mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2024-11-29 13:21:54 +00:00
197 lines
7.9 KiB
Python
197 lines
7.9 KiB
Python
import sys
|
|
import os
|
|
import subprocess
|
|
import types
|
|
import stat
|
|
import csv
|
|
import re
|
|
import datetime
|
|
import shutil, filecmp
|
|
|
|
from functools import reduce
|
|
from pathlib import Path
|
|
|
|
import settings
|
|
from troggle.core.models.survex import SingleScan, Wallet, DrawingFile
|
|
from troggle.core.models.troggle import DataIssue
|
|
from troggle.core.utils import save_carefully, GetListDir
|
|
from troggle.core.views.scans import datewallet
|
|
|
|
'''Searches through all the survey scans directories (wallets) in expofiles, looking for images to be referenced.
|
|
'''
|
|
|
|
contentsjson = "contents.json"
|
|
indexhtml = "walletindex.html"
|
|
git = settings.GIT
|
|
|
|
# to do: create a 'low priority' field, so that any such wallet does not appear in summary reports
|
|
wallet_blank_json = {
|
|
"cave": "",
|
|
"date": "",
|
|
"description url": "/caves",
|
|
"description written": False,
|
|
"electronic survey": False,
|
|
"elev drawn": False,
|
|
"elev not required": False,
|
|
"name": "",
|
|
"people": [
|
|
"Unknown"
|
|
],
|
|
"plan drawn": False,
|
|
"plan not required": False,
|
|
"qms written": False,
|
|
"survex file": [],
|
|
"survex not required": False,
|
|
"website updated": False}
|
|
|
|
wallet_blank_html = '''<html><body><H1>Wallet WALLET</H1>
|
|
<p>List of trips: <a href="http://expo.survex.com/expedition/YEAR">expedition/YEAR</a>
|
|
- troggle-processed .svx files and logbook entries on server</p>
|
|
<p>Date: </p><p>People: Unknown,</p>
|
|
<p>Cave <a href='http://expo.survex.com/caves/'>Guidebook description</a>
|
|
- A description is indicated as being needed, so may need adding into this cave page.
|
|
<p>Survex file: not identified yet
|
|
<H2>Issues</H2>
|
|
<p>The description needs writing</p>
|
|
<p>The QMs needs writing</p><p>The website is marked as needing updating (using the guidebook description)</p>
|
|
<p>Tunnel / Therion drawing files need drawing</p>
|
|
<H2>Files</H2>
|
|
<UL>
|
|
</UL>
|
|
</body></html>
|
|
'''
|
|
|
|
def CheckEmptyDate(wallet):
|
|
'''If date is not set, get it from a linked survex file.
|
|
Could also look at filedates for the scans in expofiles/surveyscans/ , but these can be re-set by copying.
|
|
'''
|
|
earliest = datetime.datetime.now().date()
|
|
|
|
# This is not working, can't see why. An scans parser now taking a very long time..
|
|
#datewallet(wallet, earliest)
|
|
return
|
|
|
|
def CheckEmptyPeople(wallet):
|
|
'''If people list is empty, copy them from the survex files: all of them
|
|
|
|
To be a Troggle model change; a many:many relationship between wallets and people,
|
|
as well as being a list in the JSON file (which is the permanent repository). We want the many:many
|
|
relationship so that we can filter wallets based on a person.
|
|
|
|
For the moment, we will just get a list..
|
|
'''
|
|
return
|
|
|
|
def LoadListScansFile(wallet):
|
|
gld = [ ]
|
|
# flatten out any directories in these wallet folders - should not be any
|
|
for (fyf, ffyf, fisdiryf) in GetListDir(wallet.fpath):
|
|
if fisdiryf:
|
|
gld.extend(GetListDir(ffyf))
|
|
else:
|
|
gld.append((fyf, ffyf, fisdiryf))
|
|
|
|
c=0
|
|
for (fyf, ffyf, fisdiryf) in gld:
|
|
if re.search(r"\.(?:png|jpg|jpeg|pdf|svg|gif)(?i)$", fyf):
|
|
singlescan = SingleScan(ffile=ffyf, name=fyf, wallet=wallet)
|
|
singlescan.save()
|
|
c+=1
|
|
if c>=10:
|
|
print(".", end='')
|
|
c = 0
|
|
def CopyWalletData(wallet):
|
|
'''Copies all the contents.json to a parallel set of folders in the drawings repo
|
|
refreshes everything during a ful import, but it should all be up to date as every time
|
|
wallet data gets saved it should also be copied across and committed.
|
|
'''
|
|
# not needed now the drawinsg repo is the master
|
|
return
|
|
|
|
year = wallet.walletname[0:4]
|
|
destfolder = Path(settings.DRAWINGS_DATA,'walletjson', year, wallet.walletname)
|
|
destjson = destfolder / contentsjson
|
|
sourcejson = Path(wallet.fpath, contentsjson)
|
|
if not os.path.exists(Path(destfolder)):
|
|
try:
|
|
os.makedirs(destfolder)
|
|
print(f' - created folder {destfolder}..')
|
|
except PermissionError:
|
|
print(f"CANNOT save this JSON file.\nPERMISSIONS incorrectly set on server for this folder {destfolder}. Ask a nerd to fix this.")
|
|
if os.path.isfile(sourcejson):
|
|
try:
|
|
if not os.path.isfile(destjson) or not filecmp.cmp(sourcejson, destjson):
|
|
shutil.copy(sourcejson, destjson)
|
|
print(f' - Copied {sourcejson} to {destjson}')
|
|
dr_add = subprocess.run([git, "add", contentsjson], cwd=destfolder, capture_output=True, text=True)
|
|
if dr_add.returncode != 0:
|
|
msgdata = 'Ask a nerd to fix this.\n\n' + dr_add.stderr + '\n\n' + dr_add.stdout + '\n\nreturn code: ' + str(dr_add.returncode)
|
|
message = f'CANNOT git on server for this file {contentsjson}. Edits saved but not added to git.\n\n' + msgdata
|
|
print(message)
|
|
else:
|
|
# ideally we would commit many chnages to many wallets just once. But most of the time only a couple of files will change.
|
|
dr_commit = subprocess.run([git, "commit", "-m", f'Update of {contentsjson} in wallet'], cwd=destfolder, capture_output=True, text=True)
|
|
# This produces return code = 1 if it commits OK
|
|
if dr_commit.returncode != 0:
|
|
msgdata = 'Ask a nerd to fix this.\n\n' + dr_commit.stderr + '\n\n' + dr_commit.stdout + '\n\nreturn code: ' + str(dr_commit.returncode)
|
|
message = f'Error code with git on server for this {contentsjson}. File is copied, added to git, but NOT committed.\n\n' + msgdata
|
|
print(message)
|
|
|
|
except PermissionError:
|
|
print(f"CANNOT copy this JSON file.\nPERMISSIONS incorrectly set on server for this file {destjson}. Ask a nerd to fix this.")
|
|
|
|
|
|
|
|
|
|
def load_all_scans():
|
|
'''This iterates through the scans directories (either here or on the remote server)
|
|
and builds up the models we can access later.
|
|
It does NOT read or validate anything in the JSON data attached to each wallet. Those checks
|
|
are done at runtime, when a wallet is accessed, not at import time.
|
|
'''
|
|
print(' - Loading Survey Scans')
|
|
|
|
SingleScan.objects.all().delete()
|
|
Wallet.objects.all().delete()
|
|
print(' - deleting all Wallet and SingleScan objects')
|
|
DataIssue.objects.filter(parser='scans').delete()
|
|
|
|
# first do the smkhs (large kh survey scans) directory
|
|
# this seems to be never used ?!
|
|
#We should load all the scans, even for nonstandard names.
|
|
manywallets_smkhs = Wallet(fpath=os.path.join(settings.SCANS_ROOT, "../surveys/smkhs"), walletname="smkhs")
|
|
print("smkhs", end=' ')
|
|
if os.path.isdir(manywallets_smkhs.fpath):
|
|
manywallets_smkhs.save()
|
|
LoadListScansFile(manywallets_smkhs)
|
|
else:
|
|
print("smkhs NOT LOADED", end=' ')
|
|
|
|
# iterate into the surveyscans directory
|
|
print(' - ', end=' ')
|
|
for walletname, fpath, fisdir in GetListDir(settings.SCANS_ROOT):
|
|
if not fisdir:
|
|
continue
|
|
|
|
# do the year folders
|
|
if re.match(r"\d\d\d\d$", walletname):
|
|
print(f"{walletname}", end=' ')
|
|
for walletname, fpath, fisdir in GetListDir(fpath):
|
|
if fisdir:
|
|
wallet = Wallet(fpath=fpath, walletname=walletname)
|
|
# this is where we should load the contents.json for people so we can report on them later
|
|
# this is where we should record the year explicitly
|
|
# line 347 of view/uploads.py and needs refactoring for loading contentsjson
|
|
CheckEmptyDate(wallet)
|
|
CheckEmptyPeople(wallet)
|
|
wallet.save()
|
|
LoadListScansFile(wallet)
|
|
CopyWalletData(wallet)
|
|
|
|
else:
|
|
# but We should load all the scans, even for nonstandard names.
|
|
print(f'\n - IGNORE {walletname} - {fpath}')
|
|
|
|
|
|
print("", flush=True)
|