forked from expo/troggle
SurvexBlocks now importing in deatil
This commit is contained in:
parent
8cc768e5b6
commit
df434cd399
@ -99,6 +99,7 @@ class SurvexBlockLookUpManager(models.Manager):
|
||||
class SurvexBlock(models.Model):
|
||||
objects = SurvexBlockLookUpManager()
|
||||
name = models.CharField(max_length=100)
|
||||
title = models.CharField(max_length=100)
|
||||
parent = models.ForeignKey('SurvexBlock', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True,on_delete=models.SET_NULL)
|
||||
|
||||
|
@ -78,6 +78,8 @@ def caveKey(x):
|
||||
Note that cave kataster numbers are not generally integers.
|
||||
This needs to be fixed make a decent sort order.
|
||||
"""
|
||||
if not x.kataster_number:
|
||||
return "~"
|
||||
return x.kataster_number
|
||||
|
||||
def getnotablecaves():
|
||||
|
@ -3,6 +3,7 @@ import os
|
||||
import time
|
||||
import timeit
|
||||
import json
|
||||
import resource
|
||||
|
||||
import settings
|
||||
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
|
||||
@ -11,13 +12,13 @@ os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
|
||||
print(" - settings on loading databaseReset.py")
|
||||
|
||||
import django
|
||||
print(" - django.setup - next")
|
||||
print(" - Memory footprint before loading Django: {:.3f} MB".format(resource.getrusage(resource.RUSAGE_SELF)[2]/1024.0))
|
||||
try:
|
||||
django.setup()
|
||||
except:
|
||||
print(" ! Cyclic reference failure. Can occur when the initial db is empty. Fixed now (in UploadFileForm) but easy to reintroduce..")
|
||||
raise
|
||||
print(" - django.setup - done")
|
||||
print(" - Memory footprint after loading Django: {:.3f} MB".format(resource.getrusage(resource.RUSAGE_SELF)[2]/1024.0))
|
||||
|
||||
import troggle.flatpages.models
|
||||
import troggle.core.models
|
||||
@ -194,7 +195,7 @@ class JobQueue():
|
||||
memend = get_process_memory()
|
||||
duration = time.time()-start
|
||||
#print(" - MEMORY start:{:.3f} MB end:{:.3f} MB change={:.3f} MB".format(memstart,memend, ))
|
||||
print("\n*- Ended \"", runfunction[0], "\" {:.1f} seconds + {:.3f} MB".format(duration, memend-memstart))
|
||||
print("\n*- Ended \"", runfunction[0], "\" {:.1f} seconds + {:.3f} MB ({:.3f} MB)".format(duration, memend-memstart, memend))
|
||||
self.results[runfunction[0]].pop() # the null item
|
||||
self.results[runfunction[0]].append(duration)
|
||||
|
||||
@ -307,7 +308,7 @@ if __name__ == "__main__":
|
||||
runlabel = sys.argv[len(sys.argv)-1]
|
||||
else:
|
||||
runlabel=None
|
||||
|
||||
|
||||
jq = JobQueue(runlabel)
|
||||
|
||||
if len(sys.argv)==1:
|
||||
|
@ -192,6 +192,8 @@ def readcave(filename):
|
||||
url = url[0],
|
||||
filename = filename)
|
||||
except:
|
||||
# this slow db query happens on every cave, but on import we have all this in memory
|
||||
# and don't need to do a db query. Fix this to speed it up!
|
||||
# need to cope with duplicates
|
||||
print(" ! FAILED to get only one CAVE when updating using: "+filename)
|
||||
kaves = models_caves.Cave.objects.all().filter(kataster_number=kataster_number[0])
|
||||
@ -206,6 +208,8 @@ def readcave(filename):
|
||||
c = k
|
||||
|
||||
for area_slug in areas:
|
||||
# this slow db query happens on every cave, but on import we have all this in memory
|
||||
# and don't need to do a db query. Fix this to speed it up!
|
||||
area = models_caves.Area.objects.filter(short_name = area_slug)
|
||||
if area:
|
||||
newArea = area[0]
|
||||
@ -216,6 +220,8 @@ def readcave(filename):
|
||||
primary = True
|
||||
for slug in slugs:
|
||||
try:
|
||||
# this slow db query happens on every cave, but on import we have all this in memory
|
||||
# and don't need to do a db query. Fix this to speed it up!
|
||||
cs = models_caves.CaveSlug.objects.update_or_create(cave = c,
|
||||
slug = slug,
|
||||
primary = primary)
|
||||
@ -225,10 +231,13 @@ def readcave(filename):
|
||||
print(message)
|
||||
|
||||
primary = False
|
||||
|
||||
for entrance in entrances:
|
||||
slug = getXML(entrance, "entranceslug", maxItems = 1, context = context)[0]
|
||||
letter = getXML(entrance, "letter", maxItems = 1, context = context)[0]
|
||||
try:
|
||||
# this slow db query happens on every entrance, but on import we have all this in memory
|
||||
# and don't need to do a db query. Fix this to speed it up!
|
||||
entrance = models_caves.Entrance.objects.get(entranceslug__slug = slug)
|
||||
ce = models_caves.CaveAndEntrance.objects.update_or_create(cave = c, entrance_letter = letter, entrance = entrance)
|
||||
except:
|
||||
|
@ -16,21 +16,21 @@ import troggle.parsers.logbooks
|
||||
import troggle.parsers.QMs
|
||||
|
||||
def import_caves():
|
||||
print("Importing Caves to ",end="")
|
||||
print("-- Importing Caves to ",end="")
|
||||
print(django.db.connections.databases['default']['NAME'])
|
||||
troggle.parsers.caves.readcaves()
|
||||
|
||||
def import_people():
|
||||
print("Importing People (folk.csv) to ",end="")
|
||||
print("-- Importing People (folk.csv) to ",end="")
|
||||
print(django.db.connections.databases['default']['NAME'])
|
||||
troggle.parsers.people.LoadPersonsExpos()
|
||||
|
||||
def import_surveyscans():
|
||||
print("Importing Survey Scans")
|
||||
print("-- Importing Survey Scans")
|
||||
troggle.parsers.surveys.LoadListScans()
|
||||
|
||||
def import_logbooks():
|
||||
print("Importing Logbooks")
|
||||
print("-- Importing Logbooks")
|
||||
troggle.parsers.logbooks.LoadLogbooks()
|
||||
|
||||
def import_QMs():
|
||||
@ -40,7 +40,7 @@ def import_QMs():
|
||||
def import_survex():
|
||||
# when this import is moved to the top with the rest it all crashes horribly
|
||||
import troggle.parsers.survex
|
||||
print("Importing Survex Blocks")
|
||||
print("-- Importing Survex Blocks")
|
||||
print(" - Survex Blocks")
|
||||
troggle.parsers.survex.LoadSurvexBlocks()
|
||||
print(" - Survex entrances x/y/z Positions")
|
||||
@ -53,6 +53,6 @@ def import_loadpos():
|
||||
troggle.parsers.survex.LoadPos()
|
||||
|
||||
def import_drawingsfiles():
|
||||
print("Importing Drawings files")
|
||||
print("-- Importing Drawings files")
|
||||
troggle.parsers.surveys.LoadDrawingFiles()
|
||||
|
||||
|
@ -114,6 +114,7 @@ def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_
|
||||
expeditionday = expedition.get_expedition_day(date)
|
||||
lookupAttribs={'date':date, 'title':title}
|
||||
# 'cave' is converted to a string doing this, which renders as the cave slug.
|
||||
# but it is a db query which we should try to avoid - rewrite this
|
||||
nonLookupAttribs={'place':place, 'text':text, 'expedition':expedition, 'cave_slug':str(cave), 'slug':slugify(title)[:50], 'entry_type':entry_type}
|
||||
lbo, created=save_carefully(LogbookEntry, lookupAttribs, nonLookupAttribs)
|
||||
|
||||
@ -356,6 +357,8 @@ def SetDatesFromLogbookEntries(expedition):
|
||||
Sets the date_from and date_to field for an expedition based on persontrips.
|
||||
Then sets the expedition date_from and date_to based on the personexpeditions.
|
||||
"""
|
||||
# Probably a faster way to do this. This uses a lot of db queries, but we have all this
|
||||
# in memory..
|
||||
for personexpedition in expedition.personexpedition_set.all():
|
||||
persontrips = personexpedition.persontrip_set.order_by('logbook_entry__date')
|
||||
# sequencing is difficult to do
|
||||
|
@ -324,7 +324,8 @@ class LoadingSurvex():
|
||||
return self.caveslist[g]
|
||||
print(' ! Failed to find cave for {}'.format(cavepath.lower()))
|
||||
else:
|
||||
print(' ! No regex cave match for %s' % cavepath.lower())
|
||||
# not a cave, but that is fine.
|
||||
# print(' ! No regex(standard identifier) cave match for %s' % cavepath.lower())
|
||||
return None
|
||||
|
||||
def GetSurvexDirectory(self, headpath):
|
||||
@ -353,17 +354,17 @@ class LoadingSurvex():
|
||||
print("\n"+message,file=sys.stderr)
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
|
||||
def LoadSurvexFile(self, includelabel):
|
||||
def LoadSurvexFile(self, svxid):
|
||||
"""Creates SurvexFile in the database, and SurvexDirectory if needed
|
||||
with links to 'cave'
|
||||
Creates a new current survexblock with valid .survexfile and valid .survexdirectory
|
||||
Creates a new current survexfile and valid .survexdirectory
|
||||
The survexblock passed-in is not necessarily the parent. FIX THIS.
|
||||
"""
|
||||
depth = " " * self.depthbegin
|
||||
print("{:2}{} - NEW survexfile:'{}'".format(self.depthbegin, depth, includelabel))
|
||||
headpath, tail = os.path.split(includelabel)
|
||||
print("{:2}{} - NEW survexfile:'{}'".format(self.depthbegin, depth, svxid))
|
||||
headpath = os.path.dirname(svxid)
|
||||
|
||||
newfile = models_survex.SurvexFile(path=includelabel)
|
||||
newfile = models_survex.SurvexFile(path=svxid)
|
||||
newfile.save() # until we do this there is no internal id so no foreign key works
|
||||
self.currentsurvexfile = newfile
|
||||
newdirectory = self.GetSurvexDirectory(headpath)
|
||||
@ -383,10 +384,10 @@ class LoadingSurvex():
|
||||
newfile.cave = cave
|
||||
#print("\n"+str(newdirectory.cave),file=sys.stderr)
|
||||
else:
|
||||
self.ReportNonCaveIncludes(headpath, includelabel)
|
||||
self.ReportNonCaveIncludes(headpath, svxid)
|
||||
|
||||
if not newfile.survexdirectory:
|
||||
message = " ! SurvexDirectory NOT SET in new SurvexFile {} ".format(includelabel)
|
||||
message = " ! SurvexDirectory NOT SET in new SurvexFile {} ".format(svxid)
|
||||
print(message)
|
||||
print(message,file=sys.stderr)
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
@ -401,7 +402,7 @@ class LoadingSurvex():
|
||||
def ProcessIncludeLine(self, included):
|
||||
svxid = included.groups()[0]
|
||||
#depth = " " * self.depthbegin
|
||||
#print("{:2}{} - Include survexfile:'{}'".format(self.depthbegin, depth, svxid))
|
||||
#print("{:2}{} - Include survexfile:'{}' {}".format(self.depthbegin, depth, svxid, included))
|
||||
self.LoadSurvexFile(svxid)
|
||||
self.stacksvxfiles.append(self.currentsurvexfile)
|
||||
|
||||
@ -426,8 +427,10 @@ class LoadingSurvex():
|
||||
self.LoadSurvexQM(survexblock, qmline)
|
||||
|
||||
included = self.rx_comminc.match(comment)
|
||||
# ;*include means we have been included; not 'proceed to include' which *include means
|
||||
# ;*include means 'we have been included'; whereas *include means 'proceed to include'
|
||||
if included:
|
||||
#depth = " " * self.depthbegin
|
||||
#print("{:2}{} - Include comment:'{}' {}".format(self.depthbegin, depth, comment, included))
|
||||
self.ProcessIncludeLine(included)
|
||||
|
||||
edulcni = self.rx_commcni.match(comment)
|
||||
@ -457,7 +460,7 @@ class LoadingSurvex():
|
||||
|
||||
def LinearLoad(self, survexblock, path, svxlines):
|
||||
"""Loads a single survex file. Usually used to import all the survex files which have been collated
|
||||
into a single file. Loads the begin/end blocks recursively.
|
||||
into a single file. Loads the begin/end blocks using a stack for labels.
|
||||
"""
|
||||
self.relativefilename = path
|
||||
cave = self.IdentifyCave(path) # this will produce null for survex files which are geographic collections
|
||||
@ -466,19 +469,25 @@ class LoadingSurvex():
|
||||
self.currentsurvexfile.save() # django insists on this although it is already saved !?
|
||||
|
||||
blockcount = 0
|
||||
lineno = 0
|
||||
def tickle():
|
||||
nonlocal blockcount
|
||||
blockcount +=1
|
||||
if blockcount % 10 ==0 :
|
||||
print(".", file=sys.stderr,end='')
|
||||
if blockcount % 500 ==0 :
|
||||
if blockcount % 200 ==0 :
|
||||
print("\n", file=sys.stderr,end='')
|
||||
sys.stderr.flush();
|
||||
print(" - MEM:{:7.3f} MB in use".format(models.get_process_memory()),file=sys.stderr)
|
||||
sys.stderr.flush()
|
||||
|
||||
for svxline in svxlines:
|
||||
sline, comment = self.rx_comment.match(svxline.strip()).groups()
|
||||
lineno += 1
|
||||
sline, comment = self.rx_comment.match(svxline).groups()
|
||||
if comment:
|
||||
depth = " " * self.depthbegin
|
||||
print("{:4} {:2}{} - Include comment:'{}' {}".format(lineno, self.depthbegin, depth, comment, sline))
|
||||
self.LoadSurvexComment(survexblock, comment) # this catches the ;*include and ;*edulcni lines too
|
||||
|
||||
if not sline:
|
||||
continue # skip blank lines
|
||||
|
||||
@ -503,10 +512,10 @@ class LoadingSurvex():
|
||||
pathlist += "." + id
|
||||
newsurvexblock = models_survex.SurvexBlock(name=blockid, parent=survexblock,
|
||||
survexpath=pathlist,
|
||||
title = survexblock.title, # copy parent inititally
|
||||
cave=self.currentcave, survexfile=self.currentsurvexfile,
|
||||
legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
|
||||
newsurvexblock.save()
|
||||
newsurvexblock.title = "("+survexblock.title+")" # copy parent inititally
|
||||
survexblock = newsurvexblock
|
||||
# survexblock.survexfile.save()
|
||||
survexblock.save() # django insists on this , but we want to save at the end !
|
||||
@ -564,7 +573,7 @@ class LoadingSurvex():
|
||||
else:
|
||||
pass # ignore all other sorts of data
|
||||
|
||||
def RecursiveScan(self, survexblock, survexfile, fin, flinear, fcollate):
|
||||
def RecursiveScan(self, survexblock, path, fin, flinear, fcollate):
|
||||
"""Follows the *include links in all the survex files from the root file 1623.svx
|
||||
and reads only the *include and *begin and *end statements. It produces a linearised
|
||||
list of the include tree
|
||||
@ -577,27 +586,27 @@ class LoadingSurvex():
|
||||
if self.callcount % 500 ==0 :
|
||||
print("\n", file=sys.stderr,end='')
|
||||
|
||||
if survexfile in self.svxfileslist:
|
||||
message = " * Warning. Survex file already seen: {}".format(survexfile.path)
|
||||
if path in self.svxfileslist:
|
||||
message = " * Warning. Duplicate in *include list at:{} depth:{} file:{}".format(self.callcount, self.depthinclude, path)
|
||||
print(message)
|
||||
print(message,file=flinear)
|
||||
print(message,file=sys.stderr)
|
||||
print("\n"+message,file=sys.stderr)
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
if self.svxfileslist.count(survexfile) > 20:
|
||||
message = " ! ERROR. Survex file already seen 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {}".format(survexfile.path)
|
||||
if self.svxfileslist.count(path) > 20:
|
||||
message = " ! ERROR. Survex file already seen 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {}".format(path)
|
||||
print(message)
|
||||
print(message,file=flinear)
|
||||
print(message,file=sys.stderr)
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
return
|
||||
self.svxfileslist.append(survexfile)
|
||||
self.svxfileslist.append(path)
|
||||
|
||||
svxlines = fin.read().splitlines()
|
||||
for svxline in svxlines:
|
||||
self.lineno += 1
|
||||
includestmt =self.rx_include.match(svxline)
|
||||
if not includestmt:
|
||||
fcollate.write("{}\n".format(svxline))
|
||||
fcollate.write("{}\n".format(svxline.strip()))
|
||||
|
||||
sline, comment = self.rx_comment.match(svxline.strip()).groups()
|
||||
mstar = self.rx_star.match(sline)
|
||||
@ -605,40 +614,35 @@ class LoadingSurvex():
|
||||
cmd, args = mstar.groups()
|
||||
cmd = cmd.lower()
|
||||
if re.match("(?i)include$", cmd):
|
||||
includepath = os.path.normpath(os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", args)))
|
||||
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
|
||||
includepath = os.path.normpath(os.path.join(os.path.split(path)[0], re.sub(r"\.svx$", "", args)))
|
||||
#path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
|
||||
|
||||
includesurvexfile = models_survex.SurvexFile(path=includepath)
|
||||
includesurvexfile.save()
|
||||
|
||||
if includesurvexfile.exists():
|
||||
# do not create SurvexFile in DB here by doing includesurvexfile.save(). Do it when reading data.
|
||||
fullpath = os.path.join(settings.SURVEX_DATA, includepath + ".svx")
|
||||
if os.path.isfile(fullpath):
|
||||
#--------------------------------------------------------
|
||||
self.depthinclude += 1
|
||||
fininclude = includesurvexfile.OpenFile()
|
||||
fcollate.write(";*include {}\n".format(includesurvexfile.path))
|
||||
flinear.write("{:2} {} *include {}\n".format(self.depthinclude, indent, includesurvexfile.path))
|
||||
push = includesurvexfile.path.lower()
|
||||
fininclude = open(fullpath,'r')
|
||||
fcollate.write(";*include {}\n".format(includepath))
|
||||
flinear.write("{:2} {} *include {}\n".format(self.depthinclude, indent, includepath))
|
||||
push = includepath.lower()
|
||||
self.stackinclude.append(push)
|
||||
#-----------------
|
||||
self.RecursiveScan(survexblock, includesurvexfile, fininclude, flinear, fcollate)
|
||||
self.RecursiveScan(survexblock, includepath, fininclude, flinear, fcollate)
|
||||
#-----------------
|
||||
pop = self.stackinclude.pop()
|
||||
if pop != push:
|
||||
message = "!!!!!!! ERROR pop != push {} != {} {}".format(pop, push, self.stackinclude)
|
||||
message = "!! ERROR mismatch *include pop!=push {}".format(pop, push, self.stackinclude)
|
||||
print(message)
|
||||
print(message,file=flinear)
|
||||
print(message,file=sys.stderr)
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
includesurvexfile.path += "-TEMP"
|
||||
includesurvexfile = None
|
||||
flinear.write("{:2} {} *edulcni {}\n".format(self.depthinclude, indent, pop))
|
||||
fcollate.write(";*edulcni {}\n".format(pop))
|
||||
fininclude.close()
|
||||
self.depthinclude -= 1
|
||||
#--------------------------------------------------------
|
||||
else:
|
||||
message = " ! ERROR *include file not found for [{}]:'{}'".format(includesurvexfile, includepath)
|
||||
message = " ! ERROR *include file not found for:'{}'".format(includepath)
|
||||
print(message)
|
||||
print(message,file=sys.stderr)
|
||||
models.DataIssue.objects.create(parser='survex', message=message)
|
||||
@ -659,7 +663,7 @@ class LoadingSurvex():
|
||||
args = " "
|
||||
popargs = self.stackbegin.pop()
|
||||
if popargs != args.lower():
|
||||
message = "!!!!!!! ERROR BEGIN/END pop != push {} != {}\n{}".format(popargs, args, self. stackbegin)
|
||||
message = "!! ERROR mismatch in BEGIN/END labels pop!=push '{}'!='{}'\n{}".format(popargs, args, self. stackbegin)
|
||||
print(message)
|
||||
print(message,file=flinear)
|
||||
print(message,file=sys.stderr)
|
||||
@ -701,7 +705,7 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
fcollate.write(";*include {}\n".format(survexfileroot.path))
|
||||
flinear.write("{:2} {} *include {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path))
|
||||
#----------------------------------------------------------------
|
||||
svx_scan.RecursiveScan(survexblockroot, survexfileroot, finroot, flinear, fcollate)
|
||||
svx_scan.RecursiveScan(survexblockroot, survexfileroot.path, finroot, flinear, fcollate)
|
||||
#----------------------------------------------------------------
|
||||
flinear.write("{:2} {} *edulcni {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path))
|
||||
fcollate.write(";*edulcni {}\n".format(survexfileroot.path))
|
||||
@ -712,7 +716,7 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
flinear.write(" - {:,} survex files in linear include list \n".format(len(svxfileslist)))
|
||||
flinear.close()
|
||||
fcollate.close()
|
||||
svx_scan = None
|
||||
svx_scan = None # Hmm. Does this actually delete all the instance variables if they are lists, dicts etc.?
|
||||
print("\n - {:,} survex files in linear include list \n".format(len(svxfileslist)),file=sys.stderr)
|
||||
|
||||
mem1 = models.get_process_memory()
|
||||
@ -724,7 +728,7 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
# entrance locations currently loaded after this by LoadPos(), but could better be done before ?
|
||||
# look in MapLocations() for how we find the entrances
|
||||
|
||||
print('\n - Loading All Survex Blocks (LinearRecursive)',file=sys.stderr)
|
||||
print('\n - Loading All Survex Blocks (LinearLoad)',file=sys.stderr)
|
||||
svx_load = LoadingSurvex()
|
||||
|
||||
svx_load.svxdirs[""] = survexfileroot.survexdirectory
|
||||
|
@ -17,6 +17,498 @@ from __future__ import unicode_literals
|
||||
from django.db import models
|
||||
|
||||
|
||||
class AuthGroup(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
name = models.CharField(unique=True, max_length=80)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'auth_group'
|
||||
# Unable to inspect table 'auth_group_permissions'
|
||||
# The error was: list index out of range
|
||||
|
||||
|
||||
class AuthPermission(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING)
|
||||
codename = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=255)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'auth_permission'
|
||||
unique_together = (('content_type', 'codename'),)
|
||||
|
||||
|
||||
class AuthUser(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
password = models.CharField(max_length=128)
|
||||
last_login = models.DateTimeField(blank=True, null=True)
|
||||
is_superuser = models.BooleanField()
|
||||
first_name = models.CharField(max_length=30)
|
||||
last_name = models.CharField(max_length=30)
|
||||
email = models.CharField(max_length=254)
|
||||
is_staff = models.BooleanField()
|
||||
is_active = models.BooleanField()
|
||||
date_joined = models.DateTimeField()
|
||||
username = models.CharField(unique=True, max_length=150)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'auth_user'
|
||||
# Unable to inspect table 'auth_user_groups'
|
||||
# The error was: list index out of range
|
||||
# Unable to inspect table 'auth_user_user_permissions'
|
||||
# The error was: list index out of range
|
||||
|
||||
|
||||
class CoreArea(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
parent = models.ForeignKey('self', models.DO_NOTHING, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_area'
|
||||
|
||||
|
||||
class CoreCave(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
official_name = models.CharField(max_length=160)
|
||||
kataster_code = models.CharField(max_length=20, blank=True, null=True)
|
||||
kataster_number = models.CharField(max_length=10, blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
|
||||
explorers = models.TextField(blank=True, null=True)
|
||||
underground_description = models.TextField(blank=True, null=True)
|
||||
equipment = models.TextField(blank=True, null=True)
|
||||
references = models.TextField(blank=True, null=True)
|
||||
survey = models.TextField(blank=True, null=True)
|
||||
kataster_status = models.TextField(blank=True, null=True)
|
||||
underground_centre_line = models.TextField(blank=True, null=True)
|
||||
notes = models.TextField(blank=True, null=True)
|
||||
length = models.CharField(max_length=100, blank=True, null=True)
|
||||
depth = models.CharField(max_length=100, blank=True, null=True)
|
||||
extent = models.CharField(max_length=100, blank=True, null=True)
|
||||
survex_file = models.CharField(max_length=100, blank=True, null=True)
|
||||
description_file = models.CharField(max_length=200, blank=True, null=True)
|
||||
url = models.CharField(max_length=200, blank=True, null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_cave'
|
||||
|
||||
|
||||
class CoreCaveArea(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
cave = models.ForeignKey(CoreCave, models.DO_NOTHING)
|
||||
area = models.ForeignKey(CoreArea, models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_cave_area'
|
||||
unique_together = (('cave', 'area'),)
|
||||
|
||||
|
||||
class CoreCaveandentrance(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
entrance_letter = models.CharField(max_length=20, blank=True, null=True)
|
||||
cave = models.ForeignKey(CoreCave, models.DO_NOTHING)
|
||||
entrance = models.ForeignKey('CoreEntrance', models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_caveandentrance'
|
||||
|
||||
|
||||
class CoreCavedescription(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
short_name = models.CharField(unique=True, max_length=50)
|
||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_cavedescription'
|
||||
|
||||
|
||||
class CoreCavedescriptionLinkedEntrances(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
cavedescription = models.ForeignKey(CoreCavedescription, models.DO_NOTHING)
|
||||
entrance = models.ForeignKey('CoreEntrance', models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_cavedescription_linked_entrances'
|
||||
unique_together = (('cavedescription', 'entrance'),)
|
||||
|
||||
|
||||
class CoreCavedescriptionLinkedQms(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
cavedescription = models.ForeignKey(CoreCavedescription, models.DO_NOTHING)
|
||||
qm = models.ForeignKey('CoreQm', models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_cavedescription_linked_qms'
|
||||
unique_together = (('cavedescription', 'qm'),)
|
||||
|
||||
|
||||
class CoreCavedescriptionLinkedSubcaves(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
cavedescription = models.ForeignKey(CoreCavedescription, models.DO_NOTHING)
|
||||
newsubcave = models.ForeignKey('CoreNewsubcave', models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_cavedescription_linked_subcaves'
|
||||
unique_together = (('cavedescription', 'newsubcave'),)
|
||||
|
||||
|
||||
class CoreCaveslug(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
slug = models.CharField(unique=True, max_length=50)
|
||||
primary = models.BooleanField()
|
||||
cave = models.ForeignKey(CoreCave, models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_caveslug'
|
||||
|
||||
|
||||
class CoreDataissue(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
date = models.DateTimeField()
|
||||
parser = models.CharField(max_length=50, blank=True, null=True)
|
||||
message = models.CharField(max_length=400, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_dataissue'
|
||||
|
||||
|
||||
class CoreEntrance(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
name = models.CharField(max_length=100, blank=True, null=True)
|
||||
entrance_description = models.TextField(blank=True, null=True)
|
||||
explorers = models.TextField(blank=True, null=True)
|
||||
map_description = models.TextField(blank=True, null=True)
|
||||
location_description = models.TextField(blank=True, null=True)
|
||||
approach = models.TextField(blank=True, null=True)
|
||||
underground_description = models.TextField(blank=True, null=True)
|
||||
photo = models.TextField(blank=True, null=True)
|
||||
marking = models.CharField(max_length=2)
|
||||
marking_comment = models.TextField(blank=True, null=True)
|
||||
findability = models.CharField(max_length=1, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True, null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True, null=True)
|
||||
bearings = models.TextField(blank=True, null=True)
|
||||
url = models.CharField(max_length=200, blank=True, null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
cached_primary_slug = models.CharField(max_length=200, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_entrance'
|
||||
|
||||
|
||||
class CoreEntranceslug(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
slug = models.CharField(unique=True, max_length=50)
|
||||
primary = models.BooleanField()
|
||||
entrance = models.ForeignKey(CoreEntrance, models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_entranceslug'
|
||||
|
||||
|
||||
class CoreExpedition(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
year = models.CharField(unique=True, max_length=20)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_expedition'
|
||||
|
||||
|
||||
class CoreExpeditionday(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
date = models.DateField()
|
||||
expedition = models.ForeignKey(CoreExpedition, models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_expeditionday'
|
||||
|
||||
|
||||
class CoreLogbookentry(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
date = models.DateField()
|
||||
title = models.CharField(max_length=200)
|
||||
cave_slug = models.CharField(max_length=50, blank=True, null=True)
|
||||
place = models.CharField(max_length=100, blank=True, null=True)
|
||||
text = models.TextField()
|
||||
slug = models.CharField(max_length=50)
|
||||
filename = models.CharField(max_length=200, blank=True, null=True)
|
||||
entry_type = models.CharField(max_length=50, blank=True, null=True)
|
||||
expedition = models.ForeignKey(CoreExpedition, models.DO_NOTHING, blank=True, null=True)
|
||||
expeditionday = models.ForeignKey(CoreExpeditionday, models.DO_NOTHING, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_logbookentry'
|
||||
|
||||
|
||||
class CoreNewsubcave(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
name = models.CharField(unique=True, max_length=200)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_newsubcave'
|
||||
|
||||
|
||||
class CoreOthercavename(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(CoreCave, models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_othercavename'
|
||||
|
||||
|
||||
class CorePerson(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
fullname = models.CharField(max_length=200)
|
||||
is_vfho = models.BooleanField()
|
||||
mug_shot = models.CharField(max_length=100, blank=True, null=True)
|
||||
blurb = models.TextField(blank=True, null=True)
|
||||
orderref = models.CharField(max_length=200)
|
||||
user = models.ForeignKey(AuthUser, models.DO_NOTHING, unique=True, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_person'
|
||||
|
||||
|
||||
class CorePersonexpedition(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
slugfield = models.CharField(max_length=50, blank=True, null=True)
|
||||
is_guest = models.BooleanField()
|
||||
expo_committee_position = models.CharField(max_length=200, blank=True, null=True)
|
||||
nickname = models.CharField(max_length=100, blank=True, null=True)
|
||||
expedition = models.ForeignKey(CoreExpedition, models.DO_NOTHING)
|
||||
person = models.ForeignKey(CorePerson, models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_personexpedition'
|
||||
|
||||
|
||||
class CorePersontrip(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
time_underground = models.FloatField()
|
||||
is_logbook_entry_author = models.BooleanField()
|
||||
logbook_entry = models.ForeignKey(CoreLogbookentry, models.DO_NOTHING)
|
||||
personexpedition = models.ForeignKey(CorePersonexpedition, models.DO_NOTHING, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_persontrip'
|
||||
|
||||
|
||||
class CoreQm(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
new_since_parsing = models.BooleanField()
|
||||
non_public = models.BooleanField()
|
||||
number = models.IntegerField()
|
||||
grade = models.CharField(max_length=1)
|
||||
location_description = models.TextField()
|
||||
nearest_station_description = models.CharField(max_length=400, blank=True, null=True)
|
||||
nearest_station_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
area = models.CharField(max_length=100, blank=True, null=True)
|
||||
completion_description = models.TextField(blank=True, null=True)
|
||||
comment = models.TextField(blank=True, null=True)
|
||||
found_by = models.ForeignKey(CoreLogbookentry, models.DO_NOTHING, blank=True, null=True)
|
||||
nearest_station = models.ForeignKey('CoreSurvexstation', models.DO_NOTHING, blank=True, null=True)
|
||||
ticked_off_by = models.ForeignKey(CoreLogbookentry, models.DO_NOTHING, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_qm'
|
||||
|
||||
|
||||
class CoreScansfolder(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
fpath = models.CharField(max_length=200)
|
||||
walletname = models.CharField(max_length=200)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_scansfolder'
|
||||
|
||||
|
||||
class CoreSinglescan(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
ffile = models.CharField(max_length=200)
|
||||
name = models.CharField(max_length=200)
|
||||
scansfolder = models.ForeignKey(CoreScansfolder, models.DO_NOTHING, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_singlescan'
|
||||
|
||||
|
||||
class CoreSurvexblock(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
name = models.CharField(max_length=100)
|
||||
title = models.CharField(max_length=100)
|
||||
date = models.DateField(blank=True, null=True)
|
||||
survexpath = models.CharField(max_length=200)
|
||||
legsall = models.IntegerField(blank=True, null=True)
|
||||
legssplay = models.IntegerField(blank=True, null=True)
|
||||
legssurfc = models.IntegerField(blank=True, null=True)
|
||||
totalleglength = models.FloatField(blank=True, null=True)
|
||||
cave = models.ForeignKey(CoreCave, models.DO_NOTHING, blank=True, null=True)
|
||||
expedition = models.ForeignKey(CoreExpedition, models.DO_NOTHING, blank=True, null=True)
|
||||
expeditionday = models.ForeignKey(CoreExpeditionday, models.DO_NOTHING, blank=True, null=True)
|
||||
scansfolder = models.ForeignKey(CoreScansfolder, models.DO_NOTHING, blank=True, null=True)
|
||||
survexfile = models.ForeignKey('CoreSurvexfile', models.DO_NOTHING, blank=True, null=True)
|
||||
parent = models.ForeignKey('self', models.DO_NOTHING, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_survexblock'
|
||||
|
||||
|
||||
class CoreSurvexdirectory(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
path = models.CharField(max_length=200)
|
||||
cave = models.ForeignKey(CoreCave, models.DO_NOTHING, blank=True, null=True)
|
||||
primarysurvexfile = models.ForeignKey('CoreSurvexfile', models.DO_NOTHING, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_survexdirectory'
|
||||
# Unable to inspect table 'core_survexfile'
|
||||
# The error was: list index out of range
|
||||
# Unable to inspect table 'core_survexpersonrole'
|
||||
# The error was: list index out of range
|
||||
# Unable to inspect table 'core_survexstation'
|
||||
# The error was: list index out of range
|
||||
# Unable to inspect table 'core_survextitle'
|
||||
# The error was: list index out of range
|
||||
|
||||
|
||||
class CoreTunnelfile(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
tunnelpath = models.CharField(max_length=200)
|
||||
tunnelname = models.CharField(max_length=200)
|
||||
bfontcolours = models.BooleanField()
|
||||
filesize = models.IntegerField()
|
||||
npaths = models.IntegerField()
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_tunnelfile'
|
||||
|
||||
|
||||
class CoreTunnelfileManyscansfolders(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
tunnelfile = models.ForeignKey(CoreTunnelfile, models.DO_NOTHING)
|
||||
scansfolder = models.ForeignKey(CoreScansfolder, models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_tunnelfile_manyscansfolders'
|
||||
unique_together = (('tunnelfile', 'scansfolder'),)
|
||||
|
||||
|
||||
class CoreTunnelfileScans(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
tunnelfile = models.ForeignKey(CoreTunnelfile, models.DO_NOTHING)
|
||||
singlescan = models.ForeignKey(CoreSinglescan, models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_tunnelfile_scans'
|
||||
unique_together = (('tunnelfile', 'singlescan'),)
|
||||
|
||||
|
||||
class CoreTunnelfileSurvexfiles(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
tunnelfile = models.ForeignKey(CoreTunnelfile, models.DO_NOTHING)
|
||||
survexfile = models.ForeignKey('CoreSurvexfile', models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_tunnelfile_survexfiles'
|
||||
unique_together = (('tunnelfile', 'survexfile'),)
|
||||
|
||||
|
||||
class CoreTunnelfileTunnelcontains(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
from_tunnelfile = models.ForeignKey(CoreTunnelfile, models.DO_NOTHING)
|
||||
to_tunnelfile = models.ForeignKey(CoreTunnelfile, models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'core_tunnelfile_tunnelcontains'
|
||||
unique_together = (('from_tunnelfile', 'to_tunnelfile'),)
|
||||
# Unable to inspect table 'django_admin_log'
|
||||
# The error was: list index out of range
|
||||
|
||||
|
||||
class DjangoContentType(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
app_label = models.CharField(max_length=100)
|
||||
model = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'django_content_type'
|
||||
unique_together = (('app_label', 'model'),)
|
||||
|
||||
|
||||
class DjangoMigrations(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
app = models.CharField(max_length=255)
|
||||
@ -26,3 +518,43 @@ class DjangoMigrations(models.Model):
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'django_migrations'
|
||||
|
||||
|
||||
class DjangoSession(models.Model):
|
||||
session_key = models.CharField(primary_key=True, max_length=40)
|
||||
session_data = models.TextField()
|
||||
expire_date = models.DateTimeField()
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'django_session'
|
||||
|
||||
|
||||
class FlatpagesEntranceredirect(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
originalurl = models.CharField(db_column='originalURL', max_length=200) # Field name made lowercase.
|
||||
entrance = models.ForeignKey(CoreEntrance, models.DO_NOTHING)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'flatpages_entranceredirect'
|
||||
|
||||
|
||||
class FlatpagesRedirect(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
originalurl = models.CharField(db_column='originalURL', unique=True, max_length=200) # Field name made lowercase.
|
||||
newurl = models.CharField(db_column='newURL', max_length=200) # Field name made lowercase.
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'flatpages_redirect'
|
||||
|
||||
|
||||
class RegistrationRegistrationprofile(models.Model):
|
||||
id = models.IntegerField(primary_key=True) # AutoField?
|
||||
activation_key = models.CharField(max_length=40)
|
||||
user = models.ForeignKey(AuthUser, models.DO_NOTHING, unique=True)
|
||||
|
||||
class Meta:
|
||||
managed = False
|
||||
db_table = 'registration_registrationprofile'
|
||||
|
Loading…
Reference in New Issue
Block a user