mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2025-12-14 21:37:13 +00:00
dbReset now loads into memory first (fast err checking), then into db
This commit is contained in:
@@ -10,7 +10,9 @@ from django.utils.timezone import make_aware
|
||||
|
||||
import re
|
||||
import os
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
import sys
|
||||
|
||||
line_leg_regex = re.compile(r"[\d\-+.]+$")
|
||||
|
||||
@@ -179,7 +181,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
|
||||
# print('QM res station %s' % qm_resolve_station)
|
||||
# print('QM notes %s' % qm_notes)
|
||||
|
||||
# If the QM isn't resolved (has a resolving station) thn load it
|
||||
# If the QM isn't resolved (has a resolving station) then load it
|
||||
if not qm_resolve_section or qm_resolve_section is not '-' or qm_resolve_section is not 'None':
|
||||
from_section = models.SurvexBlock.objects.filter(name=qm_from_section)
|
||||
# If we can find a section (survex note chunck, named)
|
||||
@@ -364,6 +366,11 @@ def LoadAllSurvexBlocks():
|
||||
|
||||
print(" - Data flushed")
|
||||
print(' - Loading All Survex Blocks...')
|
||||
|
||||
print(' - redirecting stdout to loadsurvexblks.log ...')
|
||||
stdout_orig = sys.stdout
|
||||
# Redirect sys.stdout to the file
|
||||
sys.stdout = open('loadsurvexblks.log', 'w')
|
||||
|
||||
survexfile = models.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
|
||||
survexfile.save()
|
||||
@@ -379,6 +386,11 @@ def LoadAllSurvexBlocks():
|
||||
fin.close()
|
||||
survexblockroot.text = "".join(textlines)
|
||||
survexblockroot.save()
|
||||
|
||||
# Close the file
|
||||
sys.stdout.close()
|
||||
# Restore sys.stdout to our old saved file handler
|
||||
sys.stdout = stdout_orig
|
||||
print(' - Loaded All Survex Blocks.')
|
||||
|
||||
|
||||
@@ -399,13 +411,18 @@ def LoadPos():
|
||||
# but without cave import being run before,
|
||||
# then *everything* may be in the fresh 'not found' cache file.
|
||||
|
||||
cachefile = settings.SURVEX_DATA + "posnotfound"
|
||||
cachefile = settings.SURVEX_DATA + "posnotfound.cache"
|
||||
notfoundbefore = {}
|
||||
if os.path.isfile(cachefile):
|
||||
updtsvx = os.path.getmtime(topdata + ".svx")
|
||||
updtcache = os.path.getmtime(cachefile)
|
||||
age = updtcache - updtsvx
|
||||
print(' svx: %s cache: %s cache age: %s' % (updtsvx, updtcache, str(timedelta(seconds=age) )))
|
||||
print(' svx: %s cache: %s not-found cache is fresher by: %s' % (updtsvx, updtcache, str(timedelta(seconds=age) )))
|
||||
|
||||
now = time.time()
|
||||
if now - updtcache > 30*24*60*60:
|
||||
print " cache is more than 30 days old. Deleting."
|
||||
os.remove(cachefile)
|
||||
if age < 0 :
|
||||
print " cache is stale."
|
||||
os.remove(cachefile)
|
||||
@@ -432,6 +449,8 @@ def LoadPos():
|
||||
# cavern defaults to using same cwd as supplied input file
|
||||
call([settings.CAVERN, "--output=%s.3d" % (topdata), "%s.svx" % (topdata)])
|
||||
call([settings.THREEDTOPOS, '%s.3d' % (topdata)], cwd = settings.SURVEX_DATA)
|
||||
print " - This next bit takes a while. Matching ~32,000 survey positions. Be patient..."
|
||||
|
||||
posfile = open("%s.pos" % (topdata))
|
||||
posfile.readline() #Drop header
|
||||
for line in posfile.readlines():
|
||||
@@ -449,9 +468,8 @@ def LoadPos():
|
||||
ss.save()
|
||||
found += 1
|
||||
except:
|
||||
#print "%s in %s.pos not found in lookup of SurvexStation.objects" % (name, settings.SURVEX_TOPNAME)
|
||||
notfoundnow.append(name)
|
||||
print " - %s stations NOT found in lookup of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip))
|
||||
print " - %s stations not found in lookup of SurvexStation.objects. %s found. %s skipped." % (len(notfoundnow),found, len(skip))
|
||||
|
||||
if found > 10: # i.e. a previous cave import has been done
|
||||
try:
|
||||
|
||||
Reference in New Issue
Block a user