2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-22 07:11:52 +00:00

Progress dots on importing data

This commit is contained in:
Philip Sargent 2020-06-01 00:42:48 +01:00
parent f949bb8dc0
commit 8c4c2ad1cf
3 changed files with 30 additions and 20 deletions

View File

@ -125,6 +125,8 @@ def import_tunnelfiles():
#def dumplogbooks():
#def writeCaves():
# Writes out all cave and entrance HTML files to
# folder specified in settings.CAVEDESCRIPTIONS
# for cave in Cave.objects.all():
# cave.writeDataFile()
# for entrance in Entrance.objects.all():
@ -141,8 +143,8 @@ class JobQueue():
self.results = {}
self.results_order=[
"date","runlabel","reinit", "caves", "people",
"logbooks", "QMs", "survexblks", "survexpos",
"tunnel", "scans", "surveyimgs", "test", "dirsredirect", "syncuser" ]
"logbooks", "QMs", "scans", "survexblks", "survexpos",
"tunnel", "surveyimgs", "test", "dirsredirect", "syncuser" ]
for k in self.results_order:
self.results[k]=[]
self.tfile = "import_profile.json"
@ -320,6 +322,8 @@ class JobQueue():
for k in self.results_order:
if k =="dirsredirect":
break
if k =="surveyimgs":
break
elif k =="syncuser":
break
elif k =="test":
@ -371,17 +375,15 @@ def usage():
profile - print the profile from previous runs. Import nothing.
reset - normal usage: clear database and reread everything from files - time-consuming
caves - read in the caves
caves - read in the caves (must run first after reset)
people - read in the people from folk.csv (must run before logbooks)
logbooks - read in the logbooks
people - read in the people from folk.csv
QMs - read in the QM csv files (older caves only)
scans - the survey scans in all the wallets
scans - the survey scans in all the wallets (must run before survex)
survex - read in the survex files - all the survex blocks but not the x/y/z positions
survexpos - just the x/y/z Pos out of the survex files
survexall - both survex and survexpos
tunnel - read in the Tunnel files - which scans the survey scans too
drawings - Tunnel, QMs, scans
reinit - clear database (delete everything) and make empty tables. Import nothing.
syncuser - needed after reloading database from SQL backup
@ -395,6 +397,8 @@ def usage():
caves and logbooks must be run on an empty db before the others as they
set up db tables used by the others.
the in-memory phase is on an empty db, so always runs reinit, caves & people for this phase
""")
if __name__ == "__main__":
@ -428,8 +432,6 @@ if __name__ == "__main__":
jq.enq("people",import_people)
elif "QMs" in sys.argv:
jq.enq("QMs",import_QMs)
elif "reinit" in sys.argv:
jq.enq("reinit",reinit_db)
elif "reset" in sys.argv:
jq.enq("reinit",reinit_db)
jq.enq("dirsredirect",dirsredirect)
@ -449,19 +451,14 @@ if __name__ == "__main__":
jq.enq("survexpos",import_survexpos)
elif "tunnel" in sys.argv:
jq.enq("tunnel",import_tunnelfiles)
elif "survexall" in sys.argv:
jq.enq("survexblks",import_survexblks)
jq.enq("survexpos",import_survexpos)
elif "drawings" in sys.argv:
jq.enq("QMs",import_QMs)
jq.enq("scans",import_surveyscans)
jq.enq("tunnel",import_tunnelfiles)
elif "surveyimgs" in sys.argv:
jq.enq("surveyimgs",import_surveyimgs) # imports into tables which are never read
elif "autologbooks" in sys.argv: # untested in 2020
import_auto_logbooks()
elif "dumplogbooks" in sys.argv: # untested in 2020
dumplogbooks()
# elif "writecaves" in sys.argv: # untested in 2020 - will overwrite input files!!
# writeCaves()
elif "profile" in sys.argv:
jq.loadprofiles()
jq.showprofile()

View File

@ -1,3 +1,5 @@
from __future__ import (absolute_import, division,
print_function)
import sys
import os
import re
@ -131,7 +133,7 @@ regex_team_member = re.compile(r" and | / |, | & | \+ |^both$|^none$(?i)"
regex_qm = re.compile(r'^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
insp = ""
callcount = 0
def RecursiveLoad(survexblock, survexfile, fin, textlines):
"""Follows the *include links in all the survex files from the root file 1623.svx
and reads in the survex blocks, other data and the wallet references (survexscansfolder) as it
@ -143,11 +145,18 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
stardata = stardatadefault
teammembers = [ ]
global insp
global callcount
# uncomment to print out all files during parsing
print((insp+" - Reading file: " + survexblock.survexfile.path + " <> " + survexfile.path))
print(insp+" - Reading file: " + survexblock.survexfile.path + " <> " + survexfile.path)
stamp = datetime.now()
lineno = 0
sys.stderr.flush();
callcount +=1
if callcount >=10:
callcount=0
print(".", file=sys.stderr,end='')
# Try to find the cave in the DB if not use the string as before
path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
@ -447,7 +456,7 @@ def LoadAllSurvexBlocks():
models.DataIssue.objects.filter(parser='survex').delete()
print(' - Loading All Survex Blocks...')
print(' - redirecting stdout to loadsurvexblks.log ...')
print(' - redirecting stdout to loadsurvexblks.log...')
stdout_orig = sys.stdout
# Redirect sys.stdout to the file
sys.stdout = open('loadsurvexblks.log', 'w')
@ -469,6 +478,10 @@ def LoadAllSurvexBlocks():
# Close the file
sys.stdout.close()
print("+", file=sys.stderr)
sys.stderr.flush();
# Restore sys.stdout to our old saved file handler
sys.stdout = stdout_orig
print(' - Loaded All Survex Blocks.')

View File

@ -147,7 +147,7 @@ def LoadListScansFile(survexscansfolder):
survexscansingle.save()
c+=1
if c>=10:
print(".", end=' ')
print(".", end='')
c = 0