mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2025-04-03 17:31:47 +01:00
tidying and prep for python3
This commit is contained in:
parent
49d5857b36
commit
b69bdcd126
@ -44,12 +44,12 @@ pip install pygraphviz # fails to install
|
|||||||
pip install pyparsing pydot # installs fine
|
pip install pyparsing pydot # installs fine
|
||||||
django extension graph_models # https://django-extensions.readthedocs.io/en/latest/graph_models.html
|
django extension graph_models # https://django-extensions.readthedocs.io/en/latest/graph_models.html
|
||||||
|
|
||||||
Or use a python3 virtual environment:
|
Or use a python3 virtual environment: (python3.5 not later)
|
||||||
$ cd troggle
|
$ cd troggle
|
||||||
$ cd ..
|
$ cd ..
|
||||||
$ python3 -m venv pyth3d2
|
$ python3.5 -m venv pyth35d2
|
||||||
(creates folder with virtual env)
|
(creates folder with virtual env)
|
||||||
cd pyth3d2
|
cd pyth35d2
|
||||||
bin/activate
|
bin/activate
|
||||||
(now install everything - not working yet..)
|
(now install everything - not working yet..)
|
||||||
$ pip install -r requirements.txt
|
$ pip install -r requirements.txt
|
||||||
|
@ -1,20 +1,26 @@
|
|||||||
|
from __future__ import (absolute_import, division,
|
||||||
|
print_function)
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import timeit
|
import timeit
|
||||||
|
import json
|
||||||
|
|
||||||
import settings
|
import settings
|
||||||
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
|
os.environ['PYTHONPATH'] = settings.PYTHON_PATH
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
|
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
|
||||||
|
|
||||||
from django.core import management
|
from django.core import management
|
||||||
from django.db import connection, close_old_connections
|
from django.db import connection, close_old_connections
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
from django.core.urlresolvers import reverse
|
from django.core.urlresolvers import reverse
|
||||||
|
|
||||||
from troggle.core.models import Cave, Entrance
|
from troggle.core.models import Cave, Entrance
|
||||||
|
import troggle.settings
|
||||||
import troggle.flatpages.models
|
import troggle.flatpages.models
|
||||||
import json
|
|
||||||
import troggle.logbooksdump
|
import troggle.logbooksdump
|
||||||
|
|
||||||
# NOTE databaseRest.py is *imported* by views_other.py as it is used in the control panel
|
# NOTE databaseReset.py is *imported* by views_other.py as it is used in the control panel
|
||||||
# presented there.
|
# presented there.
|
||||||
|
|
||||||
expouser=settings.EXPOUSER
|
expouser=settings.EXPOUSER
|
||||||
@ -60,52 +66,52 @@ def dirsredirect():
|
|||||||
f.save()
|
f.save()
|
||||||
|
|
||||||
def import_caves():
|
def import_caves():
|
||||||
import parsers.caves
|
import troggle.parsers.caves
|
||||||
print("Importing Caves")
|
print("Importing Caves")
|
||||||
parsers.caves.readcaves()
|
troggle.parsers.caves.readcaves()
|
||||||
|
|
||||||
def import_people():
|
def import_people():
|
||||||
import parsers.people
|
import troggle.parsers.people
|
||||||
print("Importing People (folk.csv)")
|
print("Importing People (folk.csv)")
|
||||||
parsers.people.LoadPersonsExpos()
|
troggle.parsers.people.LoadPersonsExpos()
|
||||||
|
|
||||||
def import_logbooks():
|
def import_logbooks():
|
||||||
import parsers.logbooks
|
import troggle.parsers.logbooks
|
||||||
print("Importing Logbooks")
|
print("Importing Logbooks")
|
||||||
parsers.logbooks.LoadLogbooks()
|
troggle.parsers.logbooks.LoadLogbooks()
|
||||||
|
|
||||||
def import_QMs():
|
def import_QMs():
|
||||||
print("Importing QMs (old caves)")
|
print("Importing QMs (old caves)")
|
||||||
import parsers.QMs
|
import troggle.parsers.QMs
|
||||||
# import process itself runs on qm.csv in only 3 old caves, not the modern ones!
|
# import process itself runs on qm.csv in only 3 old caves, not the modern ones!
|
||||||
|
|
||||||
def import_survexblks():
|
def import_survexblks():
|
||||||
import parsers.survex
|
import troggle.parsers.survex
|
||||||
print("Importing Survex Blocks")
|
print("Importing Survex Blocks")
|
||||||
parsers.survex.LoadAllSurvexBlocks()
|
troggle.parsers.survex.LoadAllSurvexBlocks()
|
||||||
|
|
||||||
def import_survexpos():
|
def import_survexpos():
|
||||||
import parsers.survex
|
import troggle.parsers.survex
|
||||||
print("Importing Survex x/y/z Positions")
|
print("Importing Survex x/y/z Positions")
|
||||||
parsers.survex.LoadPos()
|
troggle.parsers.survex.LoadPos()
|
||||||
|
|
||||||
def import_surveyimgs():
|
def import_surveyimgs():
|
||||||
"""This appears to store data in unused objects. The code is kept
|
"""This appears to store data in unused objects. The code is kept
|
||||||
for future re-working to manage progress against notes, plans and elevs.
|
for future re-working to manage progress against notes, plans and elevs.
|
||||||
"""
|
"""
|
||||||
import parsers.surveys
|
#import troggle.parsers.surveys
|
||||||
print("Importing survey images")
|
print("NOT Importing survey images")
|
||||||
parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
|
#troggle.parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
|
||||||
|
|
||||||
def import_surveyscans():
|
def import_surveyscans():
|
||||||
import parsers.surveys
|
import troggle.parsers.surveys
|
||||||
print("Importing Survey Scans")
|
print("Importing Survey Scans")
|
||||||
parsers.surveys.LoadListScans()
|
troggle.parsers.surveys.LoadListScans()
|
||||||
|
|
||||||
def import_tunnelfiles():
|
def import_tunnelfiles():
|
||||||
import parsers.surveys
|
import troggle.parsers.surveys
|
||||||
print("Importing Tunnel files")
|
print("Importing Tunnel files")
|
||||||
parsers.surveys.LoadTunnelFiles()
|
troggle.parsers.surveys.LoadTunnelFiles()
|
||||||
|
|
||||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||||
# These functions moved to a different file - not used currently.
|
# These functions moved to a different file - not used currently.
|
||||||
@ -152,7 +158,7 @@ class JobQueue():
|
|||||||
for j in data:
|
for j in data:
|
||||||
self.results[j] = data[j]
|
self.results[j] = data[j]
|
||||||
except:
|
except:
|
||||||
print "FAILURE parsing JSON file %s" % (self.tfile)
|
print("FAILURE parsing JSON file %s" % (self.tfile))
|
||||||
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
|
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
|
||||||
f.close()
|
f.close()
|
||||||
for j in self.results_order:
|
for j in self.results_order:
|
||||||
@ -176,7 +182,7 @@ class JobQueue():
|
|||||||
"""Run all the jobs in the queue provided - once
|
"""Run all the jobs in the queue provided - once
|
||||||
"""
|
"""
|
||||||
|
|
||||||
print "** Running job ", self.runlabel
|
print("** Running job ", self.runlabel)
|
||||||
jobstart = time.time()
|
jobstart = time.time()
|
||||||
self.results["date"].pop()
|
self.results["date"].pop()
|
||||||
self.results["date"].append(jobstart)
|
self.results["date"].append(jobstart)
|
||||||
@ -187,14 +193,14 @@ class JobQueue():
|
|||||||
start = time.time()
|
start = time.time()
|
||||||
i[1]() # looks ugly but invokes function passed in the second item in the tuple
|
i[1]() # looks ugly but invokes function passed in the second item in the tuple
|
||||||
duration = time.time()-start
|
duration = time.time()-start
|
||||||
print "\n*- Ended \"", i[0], "\" %.1f seconds" % duration
|
print("\n*- Ended \"", i[0], "\" %.1f seconds" % duration)
|
||||||
self.results[i[0]].pop() # the null item
|
self.results[i[0]].pop() # the null item
|
||||||
self.results[i[0]].append(duration)
|
self.results[i[0]].append(duration)
|
||||||
|
|
||||||
|
|
||||||
jobend = time.time()
|
jobend = time.time()
|
||||||
jobduration = jobend-jobstart
|
jobduration = jobend-jobstart
|
||||||
print "** Ended job %s - %.1f seconds total." % (self.runlabel,jobduration)
|
print("** Ended job %s - %.1f seconds total." % (self.runlabel,jobduration))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -221,7 +227,7 @@ class JobQueue():
|
|||||||
else:
|
else:
|
||||||
skipmem = True
|
skipmem = True
|
||||||
|
|
||||||
print "-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE']
|
print("-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE'])
|
||||||
#print "-- DATABASES.default", settings.DATABASES['default']
|
#print "-- DATABASES.default", settings.DATABASES['default']
|
||||||
|
|
||||||
if dbname ==":memory:":
|
if dbname ==":memory:":
|
||||||
@ -251,8 +257,8 @@ class JobQueue():
|
|||||||
'PORT': ''}
|
'PORT': ''}
|
||||||
|
|
||||||
|
|
||||||
print "-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE']
|
print("-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE'])
|
||||||
print "-- DATABASES.default", settings.DATABASES['default']
|
#print("-- DATABASES.default", settings.DATABASES['default'])
|
||||||
|
|
||||||
# but because the user may be expecting to add this to a db with lots of tables already there,
|
# but because the user may be expecting to add this to a db with lots of tables already there,
|
||||||
# the jobqueue may not start from scratch so we need to initialise the db properly first
|
# the jobqueue may not start from scratch so we need to initialise the db properly first
|
||||||
@ -282,7 +288,7 @@ class JobQueue():
|
|||||||
settings.DATABASES['default'] = dbdefault
|
settings.DATABASES['default'] = dbdefault
|
||||||
settings.DATABASES['default']['ENGINE'] = dbengine
|
settings.DATABASES['default']['ENGINE'] = dbengine
|
||||||
settings.DATABASES['default']['NAME'] = dbname
|
settings.DATABASES['default']['NAME'] = dbname
|
||||||
print "-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE']
|
print("-- ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE'])
|
||||||
|
|
||||||
django.db.close_old_connections() # maybe not needed here
|
django.db.close_old_connections() # maybe not needed here
|
||||||
for j in self.results_order:
|
for j in self.results_order:
|
||||||
@ -308,9 +314,9 @@ class JobQueue():
|
|||||||
elif k =="test":
|
elif k =="test":
|
||||||
break
|
break
|
||||||
elif k =="date":
|
elif k =="date":
|
||||||
print " days ago ",
|
print(" days ago ", end=' ')
|
||||||
else:
|
else:
|
||||||
print '%10s (s)' % k,
|
print('%10s (s)' % k, end=' ')
|
||||||
percen=0
|
percen=0
|
||||||
r = self.results[k]
|
r = self.results[k]
|
||||||
|
|
||||||
@ -320,26 +326,30 @@ class JobQueue():
|
|||||||
rp = r[i]
|
rp = r[i]
|
||||||
else:
|
else:
|
||||||
rp = " - "
|
rp = " - "
|
||||||
print '%8s' % rp,
|
print('%8s' % rp, end=' ')
|
||||||
elif k =="date":
|
elif k =="date":
|
||||||
# Calculate dates as days before present
|
# Calculate dates as days before present
|
||||||
if r[i]:
|
if r[i]:
|
||||||
if i == len(r)-1:
|
if i == len(r)-1:
|
||||||
print " this",
|
print(" this", end=' ')
|
||||||
else:
|
else:
|
||||||
# prints one place to the left of where you expect
|
# prints one place to the left of where you expect
|
||||||
days = (r[i]-r[len(r)-1])/(24*60*60)
|
if r[len(r)-1]:
|
||||||
print '%8.2f' % days,
|
s = r[i]-r[len(r)-1]
|
||||||
|
else:
|
||||||
|
s = 0
|
||||||
|
days = (s)/(24*60*60)
|
||||||
|
print('%8.2f' % days, end=' ')
|
||||||
elif r[i]:
|
elif r[i]:
|
||||||
print '%8.1f' % r[i],
|
print('%8.1f' % r[i], end=' ')
|
||||||
if i == len(r)-1 and r[i-1]:
|
if i == len(r)-1 and r[i-1]:
|
||||||
percen = 100* (r[i] - r[i-1])/r[i-1]
|
percen = 100* (r[i] - r[i-1])/r[i-1]
|
||||||
if abs(percen) >0.1:
|
if abs(percen) >0.1:
|
||||||
print '%8.1f%%' % percen,
|
print('%8.1f%%' % percen, end=' ')
|
||||||
else:
|
else:
|
||||||
print " - ",
|
print(" - ", end=' ')
|
||||||
print ""
|
print("")
|
||||||
print "\n"
|
print("\n")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@ -438,13 +448,15 @@ if __name__ == "__main__":
|
|||||||
elif "dumplogbooks" in sys.argv: # untested in 2020
|
elif "dumplogbooks" in sys.argv: # untested in 2020
|
||||||
dumplogbooks()
|
dumplogbooks()
|
||||||
elif "profile" in sys.argv:
|
elif "profile" in sys.argv:
|
||||||
|
jq.loadprofiles()
|
||||||
jq.showprofile()
|
jq.showprofile()
|
||||||
|
exit()
|
||||||
elif "help" in sys.argv:
|
elif "help" in sys.argv:
|
||||||
usage()
|
usage()
|
||||||
exit()
|
exit()
|
||||||
else:
|
else:
|
||||||
usage()
|
usage()
|
||||||
print("%s not recognised as a command." % sys.argv[1])
|
print(("%s not recognised as a command." % sys.argv[1]))
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
jq.run()
|
jq.run()
|
||||||
|
@ -27,6 +27,8 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
|||||||
ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
|
ssto = survexblock.MakeSurvexStation(ls[stardata["to"]])
|
||||||
|
|
||||||
survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
|
survexleg = models.SurvexLeg(block=survexblock, stationfrom=ssfrom, stationto=ssto)
|
||||||
|
# this next fails for two surface survey svx files which use / for decimal point
|
||||||
|
# e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
|
||||||
if stardata["type"] == "normal":
|
if stardata["type"] == "normal":
|
||||||
try:
|
try:
|
||||||
survexleg.tape = float(ls[stardata["tape"]])
|
survexleg.tape = float(ls[stardata["tape"]])
|
||||||
@ -34,7 +36,7 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
|||||||
print("! Tape misread in", survexblock.survexfile.path)
|
print("! Tape misread in", survexblock.survexfile.path)
|
||||||
print(" Stardata:", stardata)
|
print(" Stardata:", stardata)
|
||||||
print(" Line:", ls)
|
print(" Line:", ls)
|
||||||
message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path)
|
message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
||||||
models.DataIssue.objects.create(parser='survex', message=message)
|
models.DataIssue.objects.create(parser='survex', message=message)
|
||||||
survexleg.tape = 1000
|
survexleg.tape = 1000
|
||||||
try:
|
try:
|
||||||
@ -43,7 +45,7 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
|||||||
print("! Clino misread in", survexblock.survexfile.path)
|
print("! Clino misread in", survexblock.survexfile.path)
|
||||||
print(" Stardata:", stardata)
|
print(" Stardata:", stardata)
|
||||||
print(" Line:", ls)
|
print(" Line:", ls)
|
||||||
message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path)
|
message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
||||||
models.DataIssue.objects.create(parser='survex', message=message)
|
models.DataIssue.objects.create(parser='survex', message=message)
|
||||||
lclino = error
|
lclino = error
|
||||||
try:
|
try:
|
||||||
@ -52,7 +54,7 @@ def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave):
|
|||||||
print("! Compass misread in", survexblock.survexfile.path)
|
print("! Compass misread in", survexblock.survexfile.path)
|
||||||
print(" Stardata:", stardata)
|
print(" Stardata:", stardata)
|
||||||
print(" Line:", ls)
|
print(" Line:", ls)
|
||||||
message = ' ! Value Error: line %s in %s' % (ls, survexblock.survexfile.path)
|
message = ' ! Value Error: Compass misread in line %s in %s' % (ls, survexblock.survexfile.path)
|
||||||
models.DataIssue.objects.create(parser='survex', message=message)
|
models.DataIssue.objects.create(parser='survex', message=message)
|
||||||
lcompass = error
|
lcompass = error
|
||||||
if lclino == "up":
|
if lclino == "up":
|
||||||
|
@ -82,14 +82,14 @@ def get_or_create_placeholder(year):
|
|||||||
# logging.info("added survey " + survey[header['Year']] + "#" + surveyobj.wallet_number + "\r")
|
# logging.info("added survey " + survey[header['Year']] + "#" + surveyobj.wallet_number + "\r")
|
||||||
|
|
||||||
# dead
|
# dead
|
||||||
def listdir(*directories):
|
# def listdir(*directories):
|
||||||
try:
|
# try:
|
||||||
return os.listdir(os.path.join(settings.SURVEYS, *directories))
|
# return os.listdir(os.path.join(settings.SURVEYS, *directories))
|
||||||
except:
|
# except:
|
||||||
import urllib.request, urllib.parse, urllib.error
|
# import urllib.request, urllib.parse, urllib.error
|
||||||
url = settings.SURVEYS + reduce(lambda x, y: x + "/" + y, ["listdir"] + list(directories))
|
# url = settings.SURVEYS + reduce(lambda x, y: x + "/" + y, ["listdir"] + list(directories))
|
||||||
folders = urllib.request.urlopen(url.replace("#", "%23")).readlines()
|
# folders = urllib.request.urlopen(url.replace("#", "%23")).readlines()
|
||||||
return [folder.rstrip(r"/") for folder in folders]
|
# return [folder.rstrip(r"/") for folder in folders]
|
||||||
|
|
||||||
# add survey scans
|
# add survey scans
|
||||||
# def parseSurveyScans(expedition, logfile=None):
|
# def parseSurveyScans(expedition, logfile=None):
|
||||||
@ -171,13 +171,13 @@ def listdir(*directories):
|
|||||||
# parseSurveyScans(expedition)
|
# parseSurveyScans(expedition)
|
||||||
|
|
||||||
# dead
|
# dead
|
||||||
def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the thumbnail engine can't handle them (uses PIL)
|
# def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the thumbnail engine can't handle them (uses PIL)
|
||||||
file=Image.open(filePath)
|
# file=Image.open(filePath)
|
||||||
print(filePath)
|
# print(filePath)
|
||||||
if 'interlace' in file.info:
|
# if 'interlace' in file.info:
|
||||||
return file.info['interlace']
|
# return file.info['interlace']
|
||||||
else:
|
# else:
|
||||||
return False
|
# return False
|
||||||
|
|
||||||
|
|
||||||
# handles url or file, so we can refer to a set of scans on another server
|
# handles url or file, so we can refer to a set of scans on another server
|
||||||
|
Loading…
x
Reference in New Issue
Block a user