2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-22 07:11:52 +00:00

reducing clutter in output

This commit is contained in:
Philip Sargent 2020-04-28 01:18:57 +01:00
parent a8460065a4
commit 4be8c81291
4 changed files with 55 additions and 78 deletions

View File

@ -75,24 +75,22 @@ def import_QMs():
import parsers.QMs
# import process itself runs on qm.csv in only 3 caves, not 264!
def import_survex():
def import_survexblks():
import parsers.survex
print("Importing Survex Blocks")
parsers.survex.LoadAllSurvexBlocks()
print("Importing Survex Positions")
parsers.survex.LoadPos()
def import_survexpos():
import parsers.survex
print("Importing Survex Positions")
print("Importing Survex x/y/z Positions")
parsers.survex.LoadPos()
def import_surveys():
def import_surveyimgs():
"""This appears to store data in unused objects. The code is kept
for future re-working to manage progress against notes, plans and elevs.
"""
import parsers.surveys
print("Importing surveys")
print("Importing survey images")
parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
def import_surveyscans():
@ -170,8 +168,8 @@ class JobQueue():
self.results = {}
self.results_order=[
"date","runlabel","reinit", "caves", "people",
"logbooks", "scans", "QMs", "survex",
"tunnel", "surveys", "test", "dirsredirect", "syncuser", "survexpos" ]
"logbooks", "scans", "QMs", "survexblks",
"tunnel", "surveyimgs", "test", "dirsredirect", "syncuser", "survexpos" ]
for k in self.results_order:
self.results[k]=[]
self.tfile = "import_profile.json"
@ -288,8 +286,8 @@ def usage():
QMs - read in the QM csv files (older caves only)
reinit - clear database (delete everything) and make empty tables. Import nothing.
scans - the survey scans in all the wallets
survex - read in the survex files - all the survex blocks
survexpos - just the Pos out of the survex files (not part of reset)
survex - read in the survex files - all the survex blocks and the x/y/z positions
survexpos - just the x/y/z Pos out of the survex files
tunnel - read in the Tunnel files - which scans the survey scans too
@ -298,7 +296,7 @@ def usage():
autologbooks - read in autologbooks (what are these?)
dumplogbooks - write out autologbooks (not working?)
syncuser - needed after reloading database from SQL backup
surveys - read in scans by expo, must run after "people". Not used.
surveyimgs - read in scans by expo, must run after "people". Not used.
test - testing...
and [runlabel] is an optional string identifying this run of the script
@ -328,7 +326,8 @@ if __name__ == "__main__":
jq.enq("reinit",reinit_db)
jq.enq("dirsredirect",dirsredirect)
jq.enq("caves",import_caves)
#jq.enq("people",import_people)
jq.enq("people",import_people)
jq.enq("survex",import_survexblks)
#jq.enq("logbooks",import_logbooks)
elif "caves" in sys.argv:
jq.enq("caves",import_caves)
@ -348,16 +347,18 @@ if __name__ == "__main__":
jq.enq("scans",import_surveyscans)
jq.enq("logbooks",import_logbooks)
jq.enq("QMs",import_QMs)
jq.enq("survex",import_survex)
jq.enq("survexblks",import_survexblks)
jq.enq("survexpos",import_survexpos)
jq.enq("tunnel",import_tunnelfiles)
elif "scans" in sys.argv:
jq.enq("scans",import_surveyscans)
elif "survex" in sys.argv:
jq.enq("survex",import_survex)
jq.enq("survexblks",import_survexblks)
jq.enq("survexpos",import_survexpos)
elif "survexpos" in sys.argv:
jq.enq("survexpos",import_survexpos)
elif "surveys" in sys.argv:
jq.enq("surveys",import_surveys)
jq.enq("surveyimgs",import_surveyimgs)
elif "tunnel" in sys.argv:
jq.enq("tunnel",import_tunnelfiles)
elif "help" in sys.argv:
@ -365,7 +366,7 @@ if __name__ == "__main__":
elif "resetend" in sys.argv:
jq.enq("QMs",import_QMs)
jq.enq("tunnel",import_tunnelfiles)
jq.enq("surveys",import_surveys)
jq.enq("surveyimgs",import_surveyimgs)
#import_descriptions() # no longer present
#parse_descriptions() # no longer present
# elif "writeCaves" in sys.argv:

View File

@ -5,7 +5,7 @@ import sys
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME' : 'troggle', # Or path to database file if using sqlite3.
'NAME' : 'troggle.sqlite', # Or path to database file if using sqlite3.
'USER' : 'expo', # Not used with sqlite3.
'PASSWORD' : 'sekrit', # Not used with sqlite3.
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
@ -17,6 +17,12 @@ EXPOUSER = 'expo'
EXPOUSERPASS = 'nnn:ggggggr'
EXPOUSER_EMAIL = 'philip.sargent@gmail.com'
REPOS_ROOT_PATH = '/mnt/d/CUCC-Expo/'
sys.path.append(REPOS_ROOT_PATH)
@ -37,13 +43,13 @@ SURVEY_SCANS = '/mnt/f/expofiles/'
#FILES = REPOS_ROOT_PATH + 'expofiles'
FILES = '/mnt/f/expofiles'
EXPOWEB_URL = REPOS_ROOT_PATH + 'expoweb/'
EXPOWEB_URL = ''
SURVEYS_URL = '/survey_scans/'
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
#URL_ROOT = 'http://127.0.0.1:8000/'
URL_ROOT = "/mnt/f/expofiles/"
URL_ROOT = 'http://127.0.0.1:8000/'
#URL_ROOT = "/mnt/d/CUCC-Expo/expoweb/"
DIR_ROOT = ''#this should end in / if a value is given
@ -51,7 +57,7 @@ DIR_ROOT = ''#this should end in / if a value is given
#MEDIA_URL = URL_ROOT + DIR_ROOT + '/site_media/'
MEDIA_URL = '/site_media/'
MEDIA_ROOT = REPOS_ROOT_PATH + '/troggle/media/'
MEDIA_ROOT = REPOS_ROOT_PATH + 'troggle/media/'
MEDIA_ADMIN_DIR = '/usr/lib/python2.7/site-packages/django/contrib/admin/media/'
STATIC_URL = URL_ROOT + 'static/'

View File

@ -149,7 +149,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
survexblock.survexscansfolder = survexscansfolders[0]
#survexblock.refscandir = "%s/%s%%23%s" % (mref.group(1), mref.group(1), mref.group(2))
survexblock.save()
print('Wallet *ref - %s' % refscan)
print(' - Wallet *ref - %s' % refscan)
continue
# This whole section should be moved if we can have *QM become a proper survex command
@ -160,7 +160,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
# ;QM1 a hobnob_hallway_2.42 - junction of keyhole passage
qmline = comment and regex_qm.match(comment)
if qmline:
print(qmline.groups())
# print(qmline.groups())
#(u'1', u'B', u'miraclemaze', u'1.17', u'-', None, u'\tcontinuation of rift')
qm_no = qmline.group(1)
qm_grade = qmline.group(2)
@ -170,31 +170,32 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
qm_resolve_station = qmline.group(7)
qm_notes = qmline.group(8)
print('Cave - %s' % survexfile.cave)
print('QM no %d' % int(qm_no))
print('QM grade %s' % qm_grade)
print('QM section %s' % qm_from_section)
print('QM station %s' % qm_from_station)
print('QM res section %s' % qm_resolve_section)
print('QM res station %s' % qm_resolve_station)
print('QM notes %s' % qm_notes)
# print('Cave - %s' % survexfile.cave)
# print('QM no %d' % int(qm_no))
# print('QM grade %s' % qm_grade)
# print('QM section %s' % qm_from_section)
# print('QM station %s' % qm_from_station)
# print('QM res section %s' % qm_resolve_section)
# print('QM res station %s' % qm_resolve_station)
# print('QM notes %s' % qm_notes)
# If the QM isn't resolved (has a resolving station) thn load it
if not qm_resolve_section or qm_resolve_section is not '-' or qm_resolve_section is not 'None':
from_section = models.SurvexBlock.objects.filter(name=qm_from_section)
# If we can find a section (survex note chunck, named)
if len(from_section) > 0:
print(from_section[0])
# print(from_section[0])
from_station = models.SurvexStation.objects.filter(block=from_section[0], name=qm_from_station)
# If we can find a from station then we have the nearest station and can import it
if len(from_station) > 0:
print(from_station[0])
# print(from_station[0])
qm = models.QM.objects.create(number=qm_no,
nearest_station=from_station[0],
grade=qm_grade.upper(),
location_description=qm_notes)
else:
print('QM found but resolved')
# print(' - QM found but resolved')
pass
#print('Cave -sline ' + str(cave))
if not sline:
@ -208,7 +209,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
# print(survexblock)
LoadSurvexLineLeg(survexblock, stardata, sline, comment, survexfile.cave)
# print(' - From: ')
#print(stardata)
# print(stardata)
pass
elif stardata["type"] == "passage":
LoadSurvexLinePassage(survexblock, stardata, sline, comment)
@ -247,7 +248,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", newsvxpath)
if path_match:
pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
print(pos_cave)
# print(pos_cave)
cave = models.getCaveByReference(pos_cave)
if cave:
survexfile.cave = cave
@ -350,7 +351,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
def LoadAllSurvexBlocks():
print('Loading All Survex Blocks...')
print(' - Flushing All Survex Blocks...')
models.SurvexBlock.objects.all().delete()
models.SurvexFile.objects.all().delete()
@ -362,6 +363,7 @@ def LoadAllSurvexBlocks():
models.SurvexStation.objects.all().delete()
print(" - Data flushed")
print(' - Loading All Survex Blocks...')
survexfile = models.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
survexfile.save()
@ -377,14 +379,19 @@ def LoadAllSurvexBlocks():
fin.close()
survexblockroot.text = "".join(textlines)
survexblockroot.save()
print(' - Loaded All Survex Blocks.')
poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
def LoadPos():
print('Loading Pos....')
"""Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of
all survey point positions. Then lookup each position by name to see if we have it in the database
and if we do, then save the x/y/z coordinates.
If we don't have it in the database, print an error message and discard it.
"""
print(' - Generating a list of Pos and then loading them....')
call([settings.CAVERN, "--output=%s%s.3d" % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME), "%s%s.svx" % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME)])
call([settings.THREEDTOPOS, '%s%s.3d' % (settings.SURVEX_DATA, settings.SURVEX_TOPNAME)], cwd = settings.SURVEX_DATA)
@ -398,7 +405,7 @@ def LoadPos():
ss = models.SurvexStation.objects.lookup(name)
ss.x = float(x)
ss.y = float(y)
ss.z = float(z)
ss.z = float(z)
ss.save()
except:
print "%s not parsed in survex %s.pos" % (name, settings.SURVEX_TOPNAME)
print "%s in %s.pos not found in lookup of SurvexStation.objects" % (name, settings.SURVEX_TOPNAME)

37
plist
View File

@ -1,37 +0,0 @@
ADMIN_MEDIA_PREFIX /troggle/media-admin/
ATHREEDCACHEDIRAA /mnt/d/CUCC-Expo/expowebcache/3d/
CAVEDESCRIPTIONSX /mnt/d/CUCC-Expo/expoweb/cave_data
DIR_ROOT
EMAIL_HOST smtp.gmail.com
EMAIL_HOST_USER philip.sargent@gmail.com
ENTRANCEDESCRIPTIONS /mnt/d/CUCC-Expo/expoweb/entrance_data
EXPOUSER_EMAIL philip.sargent@gmail.com
EXPOUSERPASS <redacted>
EXPOUSER expo
EXPOWEB /mnt/d/CUCC-Expo/expoweb/
EXPOWEB_URL /mnt/d/CUCC-Expo/expoweb/
FILES /mnt/f/expofiles
JSLIB_URL /mnt/f/expofiles/javascript/
LOGFILE /mnt/d/CUCC-Expo/troggle/troggle_log.txt
LOGIN_REDIRECT_URL /
MEDIA_ADMIN_DIR /usr/lib/python2.7/site-packages/django/contrib/admin/media/
MEDIA_ROOT /mnt/d/CUCC-Expo//troggle/media/
MEDIA_URL /site_media/
PHOTOS_ROOT /mnt/d/CUCC-Expo/expoweb/photos
PHOTOS_URL /photos/
PYTHON_PATH /mnt/d/CUCC-Expo/troggle/
REPOS_ROOT_PATH /mnt/d/CUCC-Expo/
ROOT_URLCONF troggle.urls
STATIC_ROOT /mnt/d/CUCC-Expo/
STATIC_URL /mnt/f/expofiles/static/
SURVEX_DATA /mnt/d/CUCC-Expo/loser/
SURVEY_SCANS /mnt/f/expofiles/
SURVEYS /mnt/d/CUCC-Expo/
SURVEYS_URL /survey_scans/
SVX_URL /survex/
TEMPLATE_DIRS ('/mnt/d/CUCC-Expo/troggle/templates',)
TINY_MCE_MEDIA_ROOT /usr/share/tinymce/www/
TINY_MCE_MEDIA_URL /mnt/f/expofiles//tinymce_media/
TUNNEL_DATA /mnt/d/CUCC-Expo/drawings/
URL_ROOT /mnt/f/expofiles/