diff --git a/core/views.py b/core/views.py
index d81e03c..b74b709 100644
--- a/core/views.py
+++ b/core/views.py
@@ -5,4 +5,4 @@ from . import views_caves
from . import views_survex
from . import views_logbooks
from . import views_other
-
+from . import views_statistics
diff --git a/core/views_logbooks.py b/core/views_logbooks.py
index 4c36d04..27a236e 100644
--- a/core/views_logbooks.py
+++ b/core/views_logbooks.py
@@ -220,7 +220,7 @@ def pathsreport(request):
def experimental(request):
- blockroots = models.SurvexBlock.objects.filter(name="root")
+ blockroots = models.SurvexBlock.objects.filter(name="rootblock")
if len(blockroots)>1:
print(" ! more than one root survexblock {}".format(len(blockroots)))
for sbr in blockroots:
diff --git a/core/views_other.py b/core/views_other.py
index c31777a..9fa34c0 100644
--- a/core/views_other.py
+++ b/core/views_other.py
@@ -20,29 +20,6 @@ print("** importing troggle/core/views_other.py")
def showrequest(request):
return HttpResponse(request.GET)
-def stats(request):
- statsDict={}
- statsDict['expoCount'] = int(Expedition.objects.count())
- statsDict['caveCount'] = int(Cave.objects.count())
- statsDict['personCount'] = int(Person.objects.count())
- statsDict['logbookEntryCount'] = int(LogbookEntry.objects.count())
-
- legsbyexpo = [ ]
- for expedition in Expedition.objects.all():
- survexblocks = expedition.survexblock_set.all()
- survexlegs = [ ]
- survexleglength = 0.0
- for survexblock in survexblocks:
- survexlegs.extend(survexblock.survexleg_set.all())
- survexleglength += survexblock.totalleglength
- legsbyexpo.append((expedition, {"nsurvexlegs":len(survexlegs), "survexleglength":survexleglength/1000}))
- legsbyexpo.reverse()
- survexlegs = SurvexLeg.objects.all()
- totalsurvexlength = sum([survexleg.tape for survexleg in survexlegs])
-
- renderDict = {**statsDict, **{ "nsurvexlegs":len(survexlegs), "totalsurvexlength":totalsurvexlength/1000, "legsbyexpo":legsbyexpo }} # new syntax
- return render(request,'statistics.html', renderDict)
-
def frontpage(request):
if request.user.is_authenticated():
return render(request,'tasks.html')
diff --git a/core/views_statistics.py b/core/views_statistics.py
new file mode 100644
index 0000000..0b1db33
--- /dev/null
+++ b/core/views_statistics.py
@@ -0,0 +1,112 @@
+import datetime
+import os.path
+import re
+
+import django.db.models
+from django.db.models import Min, Max
+from django.core.urlresolvers import reverse
+from django.http import HttpResponse, HttpResponseRedirect
+from django.shortcuts import render, render_to_response
+from django.template import Context, loader
+from django.template.defaultfilters import slugify
+from django.utils import timezone
+from django.views.generic.list import ListView
+
+from troggle.core.models import Expedition, Person, PersonExpedition
+from troggle.core.models_caves import Cave, LogbookEntry
+from troggle.core.models_survex import SurvexLeg, SurvexBlock
+
+import troggle.settings as settings
+from settings import *
+
+
+def pathsreport(request):
+ pathsdict={
+ "ADMIN_MEDIA_PREFIX" : ADMIN_MEDIA_PREFIX,
+ "ADMIN_MEDIA_PREFIX" : ADMIN_MEDIA_PREFIX,
+ "CAVEDESCRIPTIONSX" : CAVEDESCRIPTIONS,
+ "DIR_ROOT" : DIR_ROOT,
+ "ENTRANCEDESCRIPTIONS" : ENTRANCEDESCRIPTIONS,
+ "EXPOUSER_EMAIL" : EXPOUSER_EMAIL,
+ "EXPOUSERPASS" :"",
+ "EXPOUSER" : EXPOUSER,
+ "EXPOWEB" : EXPOWEB,
+ "EXPOWEB_URL" : EXPOWEB_URL,
+ "FILES" : FILES,
+ "JSLIB_URL" : JSLIB_URL,
+ "LOGFILE" : LOGFILE,
+ "LOGIN_REDIRECT_URL" : LOGIN_REDIRECT_URL,
+ "MEDIA_ADMIN_DIR" : MEDIA_ADMIN_DIR,
+ "MEDIA_ROOT" : MEDIA_ROOT,
+ "MEDIA_URL" : MEDIA_URL,
+ #"PHOTOS_ROOT" : PHOTOS_ROOT,
+ "PHOTOS_URL" : PHOTOS_URL,
+ "PYTHON_PATH" : PYTHON_PATH,
+ "REPOS_ROOT_PATH" : REPOS_ROOT_PATH,
+ "ROOT_URLCONF" : ROOT_URLCONF,
+ "STATIC_ROOT" : STATIC_ROOT,
+ "STATIC_URL" : STATIC_URL,
+ "SURVEX_DATA" : SURVEX_DATA,
+ "SURVEY_SCANS" : SURVEY_SCANS,
+ "SURVEYS" : SURVEYS,
+ "SURVEYS_URL" : SURVEYS_URL,
+ "SVX_URL" : SVX_URL,
+ "TEMPLATE_DIRS" : TEMPLATE_DIRS,
+ "THREEDCACHEDIR" : THREEDCACHEDIR,
+ "TINY_MCE_MEDIA_ROOT" : TINY_MCE_MEDIA_ROOT,
+ "TINY_MCE_MEDIA_URL" : TINY_MCE_MEDIA_URL,
+ "TUNNEL_DATA" : TUNNEL_DATA,
+ "URL_ROOT" : URL_ROOT
+ }
+
+ ncodes = len(pathsdict)
+
+ bycodeslist = sorted(pathsdict.items())
+ bypathslist = sorted(iter(pathsdict.items()), key=lambda x: x[1])
+
+ return render(request, 'pathsreport.html', {
+ "pathsdict":pathsdict,
+ "bycodeslist":bycodeslist,
+ "bypathslist":bypathslist,
+ "ncodes":ncodes})
+
+def stats(request):
+ statsDict={}
+ statsDict['expoCount'] = "{:,}".format(Expedition.objects.count())
+ statsDict['caveCount'] = "{:,}".format(Cave.objects.count())
+ statsDict['personCount'] = "{:,}".format(Person.objects.count())
+ statsDict['logbookEntryCount'] = "{:,}".format(LogbookEntry.objects.count())
+
+ blockroots = SurvexBlock.objects.filter(name="rootblock")
+ if len(blockroots)>1:
+ print(" ! more than one root survexblock {}".format(len(blockroots)))
+ for sbr in blockroots:
+ print("{} {} {} {}".format(sbr.id, sbr.name, sbr.text, sbr.date))
+ sbr = blockroots[0]
+ totalsurvexlength = sbr.totalleglength
+ try:
+ nimportlegs = int(sbr.text)
+ except:
+ print("{} {} {} {}".format(sbr.id, sbr.name, sbr.text, sbr.date))
+ nimportlegs = -1
+
+ legsbyexpo = [ ]
+ addupsurvexlength = 0
+ for expedition in Expedition.objects.all():
+ survexblocks = expedition.survexblock_set.all()
+ legsyear=0
+ survexleglength = 0.0
+ for survexblock in survexblocks:
+ survexleglength += survexblock.totalleglength
+ try:
+ legsyear += int(survexblock.text)
+ except:
+ pass
+ addupsurvexlength += survexleglength
+ legsbyexpo.append((expedition, {"nsurvexlegs": "{:,}".format(legsyear),
+ "survexleglength":"{:,.0f}".format(survexleglength)}))
+ legsbyexpo.reverse()
+ survexlegs = SurvexLeg.objects.all()
+
+ renderDict = {**statsDict, **{ "nsurvexlegs": "{:,}".format(nimportlegs), "totalsurvexlength":totalsurvexlength/1000, "addupsurvexlength":addupsurvexlength/1000, "legsbyexpo":legsbyexpo }} # new syntax
+ return render(request,'statistics.html', renderDict)
diff --git a/databaseReset.py b/databaseReset.py
index c85be8c..a09a3d5 100644
--- a/databaseReset.py
+++ b/databaseReset.py
@@ -18,7 +18,6 @@ print(" 1 settings on loading databaseReset.py")
from troggle.core.models_caves import Cave, Entrance
import troggle.parsers.caves
-#import troggle.settings
import troggle.flatpages.models
import troggle.logbooksdump
import troggle.parsers.people
@@ -54,6 +53,8 @@ def reinit_db():
in memory (django python models, not the database), so there is already a full load
of stuff known. Deleting the db file does not clear memory.
"""
+ print("Reinitialising db ",end="")
+ print(django.db.connections.databases['default']['NAME'])
currentdbname = settings.DATABASES['default']['NAME']
if currentdbname == ':memory:':
# closing connections should wipe the in-memory database
@@ -62,11 +63,15 @@ def reinit_db():
print(" ! Closing another connection to db...")
conn.close()
elif django.db.connections.databases['default']['ENGINE'] == 'django.db.backends.sqlite3':
- try:
- os.remove(currentdbname)
- except OSError:
- print(" ! OSError on removing: " + currentdbname + " (Is the file open in another app?\n")
- raise
+ if os.path.isfile(currentdbname):
+ try:
+ print(" - deleting " + currentdbname)
+ os.remove(currentdbname)
+ except OSError:
+ print(" ! OSError on removing: " + currentdbname + " (Is the file open in another app?\n")
+ raise
+ else:
+ print(" - No database file found: " + currentdbname + " ..continuing, will create it.\n")
else:
cursor = django.db.connection.cursor()
cursor.execute("DROP DATABASE %s" % currentdbname)
@@ -74,15 +79,14 @@ def reinit_db():
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % currentdbname)
cursor.execute("USE %s" % currentdbname)
- #Sync user - needed after reload
- print(" - Migrating: " + settings.DATABASES['default']['NAME'])
- print(django.db.connections.databases['default']['NAME'])
+ #Sync user - needed after reload
+ print(" - Migrating: " + django.db.connections.databases['default']['NAME'])
management.call_command('migrate', interactive=False)
print(" - done migration on: " + settings.DATABASES['default']['NAME'])
+ print("users in db already: ",len(User.objects.all()))
try:
- print(" - Setting up admin user on: " + settings.DATABASES['default']['NAME'])
- print(django.db.connections.databases['default']['NAME'])
+ print(" - Setting up admin user on: " + django.db.connections.databases['default']['NAME'])
print(" - user: {} ({:.5}...) <{}> ".format(expouser, expouserpass, expouseremail))
user = User.objects.create_user(expouser, expouseremail, expouserpass)
user.is_staff = True
@@ -93,6 +97,8 @@ def reinit_db():
print(django.db.connections.databases['default']['NAME'])
print(" ! You probably have not got a clean db when you thought you had.\n")
print(" ! Also you are probably NOT running an in-memory db now.\n")
+ print("users in db: ",len(User.objects.all()))
+ print("tables in db: ",len(connection.introspection.table_names()))
memdumpsql(fn='integrityfail.sql')
django.db.connections.databases['default']['NAME'] = ':memory:'
#raise
@@ -123,6 +129,9 @@ def restore_dbsettings():
def set_in_memory_dbsettings():
django.db.close_old_connections() # needed if MySQL running?
+ for conn in django.db.connections.all():
+ print(" ! Closing another connection to db in set_in_memory_dbsettings")
+ conn.close()
settings.DATABASES['default'] = {'ENGINE': 'django.db.backends.sqlite3',
'AUTOCOMMIT': True,
'ATOMIC_REQUESTS': False,
@@ -250,7 +259,6 @@ class JobQueue():
with open(self.tfile, 'w') as f:
json.dump(self.results, f)
return True
-
def runqonce(self):
"""Run all the jobs in the queue provided - once
@@ -309,10 +317,8 @@ class JobQueue():
self.loadprofiles()
store_dbsettings()
- print("-- start ", settings.DATABASES['default']['ENGINE'], settings.DATABASES['default']['NAME'])
- print(django.db.connections.databases['default']['NAME'])
+ print("-- start ", django.db.connections.databases['default']['ENGINE'], django.db.connections.databases['default']['NAME'])
-
if dbname ==":memory:":
# just run, and save the sql file
self.runqonce()
@@ -426,7 +432,7 @@ def usage():
QMs - read in the QM csv files (older caves only)
scans - the survey scans in all the wallets (must run before survex)
survex - read in the survex files - all the survex blocks but not the x/y/z positions
- survexpos - just the x/y/z Pos out of the survex files (not needed)
+ survexpos - just the x/y/z Pos out of the survex files (not needed) -- Never used.
tunnel - read in the Tunnel files - which scans the survey scans too
@@ -459,15 +465,7 @@ if __name__ == "__main__":
else:
runlabel=None
- store_dbsettings()
- set_in_memory_dbsettings()
- print(" - django.setup - next")
- try:
- django.setup()
- except:
- print(" ! COMPLICATED FAILURE. Does not occur with a valid 'troggle.sqlite' database in place.")
- raise
- print(" - django.setup - done")
+
jq = JobQueue(runlabel)
@@ -493,9 +491,9 @@ if __name__ == "__main__":
jq.enq("scans",import_surveyscans)
jq.enq("logbooks",import_logbooks)
jq.enq("QMs",import_QMs)
- jq.enq("survexblks",import_survexblks)
- jq.enq("survexpos",import_survexpos)
jq.enq("tunnel",import_tunnelfiles)
+ jq.enq("survexblks",import_survexblks)
+ #jq.enq("survexpos",import_survexpos)
elif "scans" in sys.argv:
jq.enq("scans",import_surveyscans)
elif "survex" in sys.argv:
@@ -524,7 +522,18 @@ if __name__ == "__main__":
print("%s not recognised as a command." % sys.argv[1])
exit()
- #jq.run_now_django_tests(1)
+ store_dbsettings()
+ #set_in_memory_dbsettings()
+ print(" - django.setup - next")
+ try:
+ django.setup()
+ except:
+ print(" ! Cyclic reference failure. Can occur when the initial db is empty. Fixed now (in UploadFileForm) but easy to reintroduce..")
+ raise
+ print(" - django.setup - done")
+
+ #set_in_memory_dbsettings() # seems to be ignored. Appears to be set but in reality.
+ #jq.run_now_django_tests(1) # actually does set db to :memory: - but invisibly !
jq.run()
jq.showprofile()
diff --git a/parsers/caves.py b/parsers/caves.py
index 22b195e..219ae25 100644
--- a/parsers/caves.py
+++ b/parsers/caves.py
@@ -30,6 +30,7 @@ def readentrance(filename):
with open(os.path.join(settings.ENTRANCEDESCRIPTIONS, filename)) as f:
contents = f.read()
context = "in file %s" % filename
+ #print("Reading file ENTRANCE {} / {}".format(settings.ENTRANCEDESCRIPTIONS, filename))
entrancecontentslist = getXML(contents, "entrance", maxItems = 1, context = context)
if len(entrancecontentslist) == 1:
entrancecontents = entrancecontentslist[0]
@@ -83,10 +84,26 @@ def readentrance(filename):
cached_primary_slug = slugs[0])
primary = True
for slug in slugs:
- #print slug, filename
- cs = models_caves.EntranceSlug.objects.update_or_create(entrance = e,
- slug = slug,
- primary = primary)
+ #print("entrance slug:{} filename:{}".format(slug, filename))
+ try:
+ cs = models_caves.EntranceSlug.objects.update_or_create(entrance = e,
+ slug = slug,
+ primary = primary)
+ except:
+ # need to cope with duplicates
+ print(" ! FAILED to get only one ENTRANCE when updating using: "+filename)
+ kents = models_caves.EntranceSlug.objects.all().filter(entrance = e,
+ slug = slug,
+ primary = primary)
+ for k in kents:
+ message = " ! - DUPLICATE in db. entrance:"+ str(k.entrance) + ", slug:" + str(k.slug())
+ models.DataIssue.objects.create(parser='caves', message=message)
+ print(message)
+ for k in kaves:
+ if k.slug() != None:
+ print(" ! - OVERWRITING this one: slug:"+ str(k.slug()))
+ k.notes = "DUPLICATE entrance found on import. Please fix\n" + k.notes
+ c = k
primary = False
def readcave(filename):
@@ -94,7 +111,7 @@ def readcave(filename):
with open(os.path.join(settings.CAVEDESCRIPTIONS, filename)) as f:
contents = f.read()
context = " in file %s" % filename
- #print "Reading file %s" % filename
+ #print("Reading file CAVE {}".format(filename))
cavecontentslist = getXML(contents, "cave", maxItems = 1, context = context)
#print cavecontentslist
if len(cavecontentslist) == 1:
@@ -145,7 +162,7 @@ def readcave(filename):
filename = filename)
except:
# need to cope with duplicates
- print(" ! FAILED to get only one cave when updating using: "+filename)
+ print(" ! FAILED to get only one CAVE when updating using: "+filename)
kaves = models_caves.Cave.objects.all().filter(kataster_number=kataster_number[0])
for k in kaves:
message = " ! - DUPLICATES in db. kataster:"+ str(k.kataster_number) + ", slug:" + str(k.slug())
diff --git a/parsers/survex.py b/parsers/survex.py
index 56017f3..c13f65f 100644
--- a/parsers/survex.py
+++ b/parsers/survex.py
@@ -116,6 +116,7 @@ def LoadSurvexEquate(survexblock, sline):
def LoadSurvexLinePassage(survexblock, stardata, sline, comment):
+ # do not import *data passage.. data which is LRUD not tape/compass/clino
pass
stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
@@ -371,7 +372,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
#survexblock.text = "".join(textlines)
# .text not used, using it for number of legs per block
legsinblock = survexlegsnumber - previousnlegs
- print("LEGS: {} (previous: {}, now:{})".format(legsinblock,previousnlegs,survexlegsnumber))
+ print(insp+"LEGS: {} (previous: {}, now:{})".format(legsinblock,previousnlegs,survexlegsnumber))
survexblock.text = str(legsinblock)
survexblock.save()
# print(insp+' - End found: ')
@@ -483,7 +484,7 @@ def LoadAllSurvexBlocks():
#Load all
# this is the first so id=1
- survexblockroot = models.SurvexBlock(name="root", survexpath="", begin_char=0, cave=None, survexfile=survexfile, totalleglength=0.0)
+ survexblockroot = models_survex.SurvexBlock(name="rootblock", survexpath="", begin_char=0, cave=None, survexfile=survexfile, totalleglength=0.0)
survexblockroot.save()
fin = survexfile.OpenFile()
textlines = [ ]
diff --git a/templates/statistics.html b/templates/statistics.html
index 63c1dfc..0cbe115 100644
--- a/templates/statistics.html
+++ b/templates/statistics.html
@@ -10,18 +10,19 @@
{{ expoCount }} expeditions: {{ personCount }} people have contributed {{ caveCount }} caves and {{ logbookEntryCount }} logbook entries.
Number of survey legs: {{nsurvexlegs}}
-Total length: {{totalsurvexlength|stringformat:".3f"}} m on importing survex files.
-Total length: {{addupsurvexlength|stringformat:".3f"}} m adding up all the years below.
+Total length: {{totalsurvexlength|stringformat:".1f"}} km added-up on importing survex files.
+Total length: {{addupsurvexlength|stringformat:".1f"}} km adding up all the years below.
+These are raw tape lengths which include pitches, splays and surface-surveys.
-Year | Surveys | Survey Legs | Total length (km) |
+Year | Survex Surveys | Survey Legs | Total length (m) |
{% for legs in legsbyexpo %}
{{legs.0.year}} |
{{legs.0.survexblock_set.all|length}} |
{{legs.1.nsurvexlegs|rjust:"10"}} |
- {{legs.1.survexleglength|stringformat:".3f"}} |
+ {{legs.1.survexleglength}} |
{% endfor %}