mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2024-11-25 08:41:51 +00:00
get survey scans into database
This commit is contained in:
parent
1294444026
commit
2be3e4ce9d
@ -1,3 +1,6 @@
|
||||
import sys
|
||||
sys.path.append("/home/expo/troggle")
|
||||
|
||||
DATABASE_ENGINE = 'mysql' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
|
||||
DATABASE_NAME = 'troggle' # Or path to database file if using sqlite3.
|
||||
DATABASE_USER = 'undemocracy' # Not used with sqlite3.
|
||||
@ -5,27 +8,43 @@ DATABASE_PASSWORD = 'aiGohsh5' # Not used with sqlite3.
|
||||
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
|
||||
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
|
||||
|
||||
SURVEX_DATA = '/home/mjg/loser/'
|
||||
SURVEX_DATA = '/home/expo/loser/'
|
||||
CAVERN = 'cavern'
|
||||
EXPOWEB = '/home/mjg/expoweb/'
|
||||
SURVEYS = '/home/mjg/surveys/'
|
||||
EXPOWEB = '/home/expo/expoweb/'
|
||||
SURVEYS = '/home/expo/'
|
||||
SURVEY_SCANS = '/home/expo/expoimages/'
|
||||
FILES = '/home/expo/expoimages'
|
||||
|
||||
SURVEYS_URL = 'http://framos.lawoftheland.co.uk/troggle/survey_scans/'
|
||||
FILES = "http://framos.lawoftheland.co.uk/troggle/survey_files/"
|
||||
SURVEYS_URL = 'http://troggle.cavingexpedition.com/survey_scans/'
|
||||
|
||||
SVX_URL = 'http://framos.lawoftheland.co.uk/troggle/survex/'
|
||||
|
||||
PYTHON_PATH = '/home/mjg/expoweb/troggle/'
|
||||
PYTHON_PATH = '/home/expo/troggle/'
|
||||
|
||||
MEDIA_URL = 'http://framos.lawoftheland.co.uk/troggle/site_media/'
|
||||
MEDIA_URL = 'http://troggle.cavingexpedition.com/site_media/'
|
||||
|
||||
MEDIA_ROOT = '/home/mjg/expoweb/troggle/media/'
|
||||
MEDIA_ROOT = '/home/expo/troggle/media/'
|
||||
MEDIA_ADMIN_DIR = '/usr/lib/python2.4/site-packages/django/contrib/admin/media/'
|
||||
|
||||
URL_ROOT = "http://framos.lawoftheland.co.uk/troggle/"
|
||||
URL_ROOT = "http://troggle.cavingexpedition.com/"
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
"/home/mjg/expoweb/troggle/templates",
|
||||
"/home/expo/troggle/templates",
|
||||
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
|
||||
# Always use forward slashes, even on Windows.
|
||||
# Don't forget to use absolute paths, not relative paths.
|
||||
)
|
||||
|
||||
LOGFILE = '/home/expo/troggle/troggle_log.txt'
|
||||
|
||||
FEINCMS_ADMIN_MEDIA='/site_media/feincms/'
|
||||
|
||||
EMAIL_HOST = "smtp.gmail.com"
|
||||
|
||||
EMAIL_HOST_USER = "cuccexpo@gmail.com"
|
||||
|
||||
EMAIL_HOST_PASSWORD = "khvtffkhvtff"
|
||||
|
||||
EMAIL_PORT=587
|
||||
|
||||
EMAIL_USE_TLS = True
|
||||
|
@ -1,4 +1,4 @@
|
||||
import sys, os, types, logging, stat
|
||||
import sys, os, types, logging
|
||||
#sys.path.append('C:\\Expo\\expoweb')
|
||||
#from troggle import *
|
||||
#os.environ['DJANGO_SETTINGS_MODULE']='troggle.settings'
|
||||
@ -24,13 +24,12 @@ def get_or_create_placeholder(year):
|
||||
placeholder_logbook_entry, newly_created = save_carefully(LogbookEntry, lookupAttribs, nonLookupAttribs)
|
||||
return placeholder_logbook_entry
|
||||
|
||||
# dead
|
||||
def readSurveysFromCSV():
|
||||
try: # could probably combine these two
|
||||
surveytab = open(os.path.join(settings.SURVEY_SCANS, "Surveys.csv"))
|
||||
except IOError:
|
||||
import cStringIO, urllib
|
||||
surveytab = cStringIO.StringIO(urllib.urlopen(settings.SURVEY_SCANS + "/Surveys.csv").read())
|
||||
surveytab = cStringIO.StringIO(urllib.urlopen(settings.SURVEY_SCANS + "Surveys.csv").read())
|
||||
dialect=csv.Sniffer().sniff(surveytab.read())
|
||||
surveytab.seek(0,0)
|
||||
surveyreader = csv.reader(surveytab,dialect=dialect)
|
||||
@ -76,7 +75,6 @@ def readSurveysFromCSV():
|
||||
|
||||
logging.info("added survey " + survey[header['Year']] + "#" + surveyobj.wallet_number + "\r")
|
||||
|
||||
# dead
|
||||
def listdir(*directories):
|
||||
try:
|
||||
return os.listdir(os.path.join(settings.SURVEYS, *directories))
|
||||
@ -136,13 +134,11 @@ def parseSurveyScans(year, logfile=None):
|
||||
continue
|
||||
scanObj.save()
|
||||
|
||||
# dead
|
||||
def parseSurveys(logfile=None):
|
||||
readSurveysFromCSV()
|
||||
for year in Expedition.objects.filter(year__gte=2000): #expos since 2000, because paths and filenames were nonstandard before then
|
||||
parseSurveyScans(year)
|
||||
|
||||
# dead
|
||||
def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the thumbnail engine can't handle them (uses PIL)
|
||||
file=Image.open(filePath)
|
||||
print filePath
|
||||
@ -152,11 +148,10 @@ def isInterlacedPNG(filePath): #We need to check for interlaced PNGs because the
|
||||
return False
|
||||
|
||||
|
||||
# handles url or file, so we can refer to a set of scans on another server
|
||||
# handles url or file
|
||||
def GetListDir(sdir):
|
||||
res = [ ]
|
||||
if sdir[:7] == "http://":
|
||||
assert False, "Not written"
|
||||
s = urllib.urlopen(sdir)
|
||||
else:
|
||||
for f in os.listdir(sdir):
|
||||
@ -165,136 +160,44 @@ def GetListDir(sdir):
|
||||
res.append((f, ff, os.path.isdir(ff)))
|
||||
return res
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def LoadListScansFile(survexscansfolder):
|
||||
gld = [ ]
|
||||
|
||||
# flatten out any directories in these book files
|
||||
for (fyf, ffyf, fisdiryf) in GetListDir(survexscansfolder.fpath):
|
||||
if fisdiryf:
|
||||
gld.extend(GetListDir(ffyf))
|
||||
else:
|
||||
gld.append((fyf, ffyf, fisdiryf))
|
||||
|
||||
for (fyf, ffyf, fisdiryf) in gld:
|
||||
assert not fisdiryf, ffyf
|
||||
if re.search("\.(?:png|jpg|jpeg)(?i)$", fyf):
|
||||
survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
|
||||
survexscansingle.save()
|
||||
|
||||
|
||||
# this iterates through the scans directories (either here or on the remote server)
|
||||
# and builds up the models we can access later
|
||||
def LoadListScans():
|
||||
def LoadListScans(surveyscansdir):
|
||||
SurvexScanSingle.objects.all().delete()
|
||||
SurvexScansFolder.objects.all().delete()
|
||||
|
||||
# first do the smkhs (large kh survey scans) directory
|
||||
survexscansfoldersmkhs = SurvexScansFolder(fpath=os.path.join(settings.SURVEY_SCANS, "smkhs"), walletname="smkhs")
|
||||
if os.path.isdir(survexscansfoldersmkhs.fpath):
|
||||
survexscansfoldersmkhs.save()
|
||||
LoadListScansFile(survexscansfoldersmkhs)
|
||||
|
||||
|
||||
# iterate into the surveyscans directory
|
||||
for f, ff, fisdir in GetListDir(os.path.join(settings.SURVEY_SCANS, "surveyscans")):
|
||||
for f, ff, fisdir in GetListDir(surveyscansdir):
|
||||
if not fisdir:
|
||||
continue
|
||||
|
||||
# do the year folders
|
||||
if re.match("\d\d\d\d$", f):
|
||||
for fy, ffy, fisdiry in GetListDir(ff):
|
||||
assert fisdiry, ffy
|
||||
if not fisdiry:
|
||||
assert fy == "index", ffy
|
||||
continue
|
||||
survexscansfolder = SurvexScansFolder(fpath=ffy, walletname=fy)
|
||||
survexscansfolder.save()
|
||||
LoadListScansFile(survexscansfolder)
|
||||
|
||||
# do the
|
||||
for fyf, ffyf, fisdiryf in GetListDir(ffy):
|
||||
assert not fisdiryf, ffyf
|
||||
survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
|
||||
survexscansingle.save()
|
||||
elif f != "thumbs":
|
||||
survexscansfolder = SurvexScansFolder(fpath=ff, walletname=f)
|
||||
survexscansfolder.save()
|
||||
LoadListScansFile(survexscansfolder)
|
||||
gld = [ ]
|
||||
|
||||
# flatten out any directories in these book files
|
||||
for (fyf, ffyf, fisdiryf) in GetListDir(ff):
|
||||
if fisdiryf:
|
||||
gld.extend(GetListDir(ffyf))
|
||||
else:
|
||||
gld.append((fyf, ffyf, fisdiryf))
|
||||
|
||||
for (fyf, ffyf, fisdiryf) in gld:
|
||||
assert not fisdiryf, ffyf
|
||||
survexscansingle = SurvexScanSingle(ffile=ffyf, name=fyf, survexscansfolder=survexscansfolder)
|
||||
survexscansingle.save()
|
||||
|
||||
|
||||
def FindTunnelScan(tunnelfile, path):
|
||||
scansfolder, scansfile = None, None
|
||||
mscansdir = re.search("(\d\d\d\d#\d+\w?|1995-96kh|92-94Surveybookkh|1991surveybook|smkhs)/(.*?(?:png|jpg))$", path)
|
||||
if mscansdir:
|
||||
scansfolderl = SurvexScansFolder.objects.filter(walletname=mscansdir.group(1))
|
||||
if len(scansfolderl):
|
||||
assert len(scansfolderl) == 1
|
||||
scansfolder = scansfolderl[0]
|
||||
if scansfolder:
|
||||
scansfilel = scansfolder.survexscansingle_set.filter(name=mscansdir.group(2))
|
||||
if len(scansfilel):
|
||||
assert len(scansfilel) == 1
|
||||
scansfile = scansfilel[0]
|
||||
|
||||
if scansfolder:
|
||||
tunnelfile.survexscansfolders.add(scansfolder)
|
||||
if scansfile:
|
||||
tunnelfile.survexscans.add(scansfile)
|
||||
|
||||
elif path and not re.search("\.(?:png|jpg)$(?i)", path):
|
||||
name = os.path.split(path)[1]
|
||||
print "ttt", tunnelfile.tunnelpath, path, name
|
||||
rtunnelfilel = TunnelFile.objects.filter(tunnelname=name)
|
||||
if len(rtunnelfilel):
|
||||
assert len(rtunnelfilel) == 1, ("two paths with name of", path, "need more discrimination coded")
|
||||
rtunnelfile = rtunnelfilel[0]
|
||||
#print "ttt", tunnelfile.tunnelpath, path, name, rtunnelfile.tunnelpath
|
||||
tunnelfile.tunnelcontains.add(rtunnelfile)
|
||||
|
||||
tunnelfile.save()
|
||||
|
||||
|
||||
def SetTunnelfileInfo(tunnelfile):
|
||||
ff = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
||||
tunnelfile.filesize = os.stat(ff)[stat.ST_SIZE]
|
||||
fin = open(ff)
|
||||
ttext = fin.read()
|
||||
fin.close()
|
||||
|
||||
mtype = re.search("<(fontcolours|sketch)", ttext)
|
||||
assert mtype, ff
|
||||
tunnelfile.bfontcolours = (mtype.group(1)=="fontcolours")
|
||||
tunnelfile.npaths = len(re.findall("<skpath", ttext))
|
||||
tunnelfile.save()
|
||||
|
||||
# <tunnelxml tunnelversion="version2009-06-21 Matienzo" tunnelproject="ireby" tunneluser="goatchurch" tunneldate="2009-06-29 23:22:17">
|
||||
# <pcarea area_signal="frame" sfscaledown="12.282584" sfrotatedeg="-90.76982" sfxtrans="11.676667377221136" sfytrans="-15.677173422877454" sfsketch="204description/scans/plan(38).png" sfstyle="" nodeconnzsetrelative="0.0">
|
||||
for path, style in re.findall('<pcarea area_signal="frame".*?sfsketch="([^"]*)" sfstyle="([^"]*)"', ttext):
|
||||
FindTunnelScan(tunnelfile, path)
|
||||
|
||||
# should also scan and look for survex blocks that might have been included
|
||||
# and also survex titles as well.
|
||||
|
||||
tunnelfile.save()
|
||||
|
||||
|
||||
def LoadTunnelFiles():
|
||||
tunneldatadir = settings.TUNNEL_DATA
|
||||
TunnelFile.objects.all().delete()
|
||||
tunneldirs = [ "" ]
|
||||
while tunneldirs:
|
||||
tunneldir = tunneldirs.pop()
|
||||
for f in os.listdir(os.path.join(tunneldatadir, tunneldir)):
|
||||
if f[0] == "." or f[-1] == "~":
|
||||
continue
|
||||
lf = os.path.join(tunneldir, f)
|
||||
ff = os.path.join(tunneldatadir, lf)
|
||||
if os.path.isdir(ff):
|
||||
tunneldirs.append(lf)
|
||||
elif f[-4:] == ".xml":
|
||||
tunnelfile = TunnelFile(tunnelpath=lf, tunnelname=os.path.split(f[:-4])[1])
|
||||
tunnelfile.save()
|
||||
|
||||
for tunnelfile in TunnelFile.objects.all():
|
||||
SetTunnelfileInfo(tunnelfile)
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -12,7 +12,7 @@
|
||||
<tr>
|
||||
<td class="survexscansingle"><a href="{{survexscansingle.get_absolute_url}}">{{survexscansingle.name}}</a></td>
|
||||
<td>
|
||||
{% for survexblock in survexscansingle.survexblock_set %}
|
||||
{% for survexblock in survexscansingle.survexblock_set.all %}
|
||||
{{survexblock}}
|
||||
{% endfor %}
|
||||
</td>
|
||||
@ -20,4 +20,14 @@
|
||||
{% endfor %}
|
||||
</table>
|
||||
|
||||
{% endblock %}
|
||||
<h3>Surveys referring to this wallet</h3>
|
||||
|
||||
<table>
|
||||
{% for survexblock in survexscansfolder.survexblock_set.all %}
|
||||
<tr>
|
||||
<td><a href="{% url svx survexblock.survexfile.path %}">{{survexblock}}</a></td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
|
||||
{% endblock %}
|
||||
|
Loading…
Reference in New Issue
Block a user