mirror of
https://expo.survex.com/repositories/expoweb/.git/
synced 2024-11-25 16:52:00 +00:00
wallets now do list of survex files
This commit is contained in:
parent
c7f276c378
commit
cc6605d06b
31
noinfo/walletscripts/scp-index-pages.sh
Normal file
31
noinfo/walletscripts/scp-index-pages.sh
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
#/bin/sh
|
||||||
|
# Because wallets.py carefully re-sets the timestamp for the generated walletindex.html files to be the same as the contents.json,
|
||||||
|
# there is no way for rsync to recognise that new pages have been generated. So I use this scp script to update them
|
||||||
|
# on the server after running wallets.py on my home machine
|
||||||
|
# 2020-01-01 Philip Sargent
|
||||||
|
# 2022-03-25 also coopying contents.json as I have updated soem of them
|
||||||
|
|
||||||
|
date >scp-log.txt
|
||||||
|
start=$SECONDS
|
||||||
|
|
||||||
|
# copy myself
|
||||||
|
scp scp-index-pages.sh expo@expo.survex.com:expofiles/surveyscans/ >>../scp-log.txt
|
||||||
|
|
||||||
|
# to do, add in seconds the number of days since 1970. Hmm.
|
||||||
|
|
||||||
|
for i in 2012 2013 2014 2015 2016 2017 2018 2019; do
|
||||||
|
#for i in 2019; do
|
||||||
|
echo $i
|
||||||
|
echo $i >>scp-log.txt
|
||||||
|
cd $i
|
||||||
|
for j in `ls -d ${i:0:4}*` ; do
|
||||||
|
echo $i/$j
|
||||||
|
scp $j/walletindex.html expo@expo.survex.com:expofiles/surveyscans/$i/$j >>../scp-log.txt
|
||||||
|
scp $j/contents.json expo@expo.survex.com:expofiles/surveyscans/$i/$j >>../scp-log.txt
|
||||||
|
done
|
||||||
|
cd ..
|
||||||
|
duration=$(( SECONDS - start ))
|
||||||
|
echo $duration seconds so far
|
||||||
|
done
|
||||||
|
duration=$(( SECONDS - start ))
|
||||||
|
echo $duration seconds overall
|
@ -18,6 +18,7 @@ from pathlib import Path
|
|||||||
# want it to appear in the reports under "UNKNOWN"
|
# want it to appear in the reports under "UNKNOWN"
|
||||||
# 2021-04-24 Converted from python2 to python3 - god almighty did I really once think this was an
|
# 2021-04-24 Converted from python2 to python3 - god almighty did I really once think this was an
|
||||||
# acceptable python layout?
|
# acceptable python layout?
|
||||||
|
# 2022-03-15 Changing single survex file to a list of survex files
|
||||||
|
|
||||||
'''This stand-alone programe processes all the wallet folders for one year and produces the
|
'''This stand-alone programe processes all the wallet folders for one year and produces the
|
||||||
list of actions that need to be done.
|
list of actions that need to be done.
|
||||||
@ -30,14 +31,15 @@ It produces
|
|||||||
It scans the subdirectories only one level deep
|
It scans the subdirectories only one level deep
|
||||||
e.g. we are in /2020/ so it scans /2020/2020#01, /2020/2020#02 et seq.
|
e.g. we are in /2020/ so it scans /2020/2020#01, /2020/2020#02 et seq.
|
||||||
|
|
||||||
All the files in one folder must be for only one cave, but in principle coule be for several trips.
|
All the files in one folder must be for only one cave, but in principle could be for several trips.
|
||||||
However all the files in one folder should relate to a single survex file (troggle assumes this) and
|
However all the files in one folder should relate to one or more survex files and
|
||||||
a survex file should relate to a single trip (we do this, the Austrians and Germans don't)
|
a survex file should relate to a single trip (we do this, the Austrians and Germans don't)
|
||||||
'''
|
'''
|
||||||
|
|
||||||
loser_dir = "/home/expo/loser"
|
loser_dir = "/home/expo/loser"
|
||||||
#loser_dir = "/mnt/d/CUCC-Expo/Loser/" # when running on Win10-WSL1
|
#loser_dir = "/mnt/d/CUCC-Expo/Loser/" # when running on Win10-WSL1
|
||||||
#loser_dir = "/media/philip/SD-huge/CUCC-Expo/loser/" # when running on xubuntu laptop 'barbie'
|
#loser_dir = "/media/philip/SD-huge/CUCC-Expo/loser/" # when running on xubuntu laptop 'barbie'
|
||||||
|
#loser_dir = "D:/CUCC-Expo/Loser/" # when running on Win10-WSL1
|
||||||
|
|
||||||
# GLOBALS
|
# GLOBALS
|
||||||
wallets_needing_scanning = set()
|
wallets_needing_scanning = set()
|
||||||
@ -55,7 +57,7 @@ html_year_wallet_entry = "<tr><td><a href='%(walletindex)s'>%(walletname)s %(cav
|
|||||||
html_person_wallet_entry = "<li><a href='%(walletindex)s'>%(walletname)s</a> <UL>\n%(complaints)s</ul></li>\n"
|
html_person_wallet_entry = "<li><a href='%(walletindex)s'>%(walletname)s</a> <UL>\n%(complaints)s</ul></li>\n"
|
||||||
html_year_scanning_entry = "<li><a href='%(walletindex)s'>%(walletname)s %(cave)s %(name)s</a></li>\n"
|
html_year_scanning_entry = "<li><a href='%(walletindex)s'>%(walletname)s %(cave)s %(name)s</a></li>\n"
|
||||||
html_wallet_file_entry = "<li><a href='%(fileurl)s'>%(filename)s</a></li>\n"
|
html_wallet_file_entry = "<li><a href='%(fileurl)s'>%(filename)s</a></li>\n"
|
||||||
html_wallet_index = html_base % {"body": "<H1>%(title)s : %(cave)s : %(name)s</H1>\n<p>List of trips: <a href=\"http://expo.survex.com/expedition/%(year)s\">expedition/%(year)s</a> - troggle-processed .svx files and logbook entries on server</p>\n<p>Date: %(date)s</p><p>People: %(people)s</p>\n<p>Cave <a href='%(description)s'>Guidebook description</a> - %(description_needed)s \n<p>Survex file:<br> <br> Local: <a href='file:///%(loser_dir)s/%(survex)s' download>file:///%(loser_dir)s/%(survex)s</a><br> Server: <a href='http://expo.survex.com/survexfile/%(survex)s' download>http://expo.survex.com/survexfile/%(survex)s</a></p><a href='../walletindex.html'>Wallet index for this year</a><br/>Local location for ::loser:: repo specified on command line is <a href='file:///%(loser_dir)s'>%(loser_dir)s</a>. </p>\n<H2>Issues</H2>\n%(complaints)s\n<H2>Files</H2>\n<UL>\n%(files)s</UL>\n"}
|
html_wallet_index = html_base % {"body": "<H1>%(title)s : %(cave)s : %(name)s</H1>\n<p>List of trips: <a href=\"http://expo.survex.com/expedition/%(year)s\">expedition/%(year)s</a> - troggle-processed .svx files and logbook entries on server</p>\n<p>Date: %(date)s</p><p>People: %(people)s</p>\n<p>Cave <a href='%(description)s'>Guidebook description</a> - %(description_needed)s <a href='../walletindex.html'>Wallet index for this year</a><br/>Local location for ::loser:: repo specified on command line is <a href='file:///%(loser_dir)s'>%(loser_dir)s</a>. </p>\n<H2>Issues</H2>\n%(complaints)s\n<H2>Files</H2>\n<UL>\n%(files)s</UL>\n"}
|
||||||
html_survex_required = {True: "Survex ", False: ""}
|
html_survex_required = {True: "Survex ", False: ""}
|
||||||
html_plan_scanned = {True: "", False: "Plan "}
|
html_plan_scanned = {True: "", False: "Plan "}
|
||||||
html_elev_scanned = {True: "", False: "Elev "}
|
html_elev_scanned = {True: "", False: "Elev "}
|
||||||
@ -81,7 +83,7 @@ blank_json = {
|
|||||||
"plan drawn": False,
|
"plan drawn": False,
|
||||||
"plan not required": False,
|
"plan not required": False,
|
||||||
"qms written": False,
|
"qms written": False,
|
||||||
"survex file": "",
|
"survex file": [],
|
||||||
"survex not required": False,
|
"survex not required": False,
|
||||||
"website updated": False}
|
"website updated": False}
|
||||||
|
|
||||||
@ -144,18 +146,30 @@ def do_item(year, item):
|
|||||||
|
|
||||||
#make wallet descriptions
|
#make wallet descriptions
|
||||||
|
|
||||||
#Survex
|
#Survex. Make it a list if it is not already
|
||||||
not_req = (data["survex not required"] and data["survex file"] == "")
|
if data["survex file"]:
|
||||||
req = (not data["survex not required"] and os.path.isfile(os.path.join(loser_dir, data["survex file"])))
|
if not isinstance(data["survex file"], list):
|
||||||
survex_required = not_req or not req
|
data["survex file"] = [data["survex file"]]
|
||||||
|
|
||||||
survex_complaint = ""
|
survex_complaint = ""
|
||||||
if data["survex not required"] and data["survex file"] != "":
|
if data["survex not required"] and data["survex file"] != []:
|
||||||
survex_complaint = "Survex is not required and yet there is a survex file!"
|
survex_complaint = "Survex is not required and yet there is a survex file!"
|
||||||
if not data["survex not required"] and data["survex file"] == "":
|
if not data["survex not required"] and data["survex file"] == []:
|
||||||
survex_complaint = "A survex file is required, but has not been specified!"
|
survex_complaint = "A survex file is required, but has not been specified!"
|
||||||
if not data["survex not required"] and not os.path.isfile(os.path.join(loser_dir, data["survex file"])):
|
|
||||||
survex_complaint = "The specified survex file (%s) does not exist here!" % os.path.join(loser_dir, data["survex file"])
|
survexok = True
|
||||||
|
for svx in data["survex file"]:
|
||||||
|
if not (Path(loser_dir) / svx).is_file():
|
||||||
|
survexok = False
|
||||||
|
message = f"! {item} Incorrect survex file in wallet data: {svx} not found in LOSER repo"
|
||||||
|
print(message)
|
||||||
|
# DataIssue.objects.create(parser='scans', message=message, url=wurl) # set URL to this wallet folder
|
||||||
|
survex_complaint = f"The specified survex file {svx} does not exist here!"
|
||||||
|
|
||||||
|
not_needed = (data["survex not required"] and data["survex file"] == [])
|
||||||
|
req = not data["survex not required"] and survexok
|
||||||
|
survex_required = not_needed or req
|
||||||
|
|
||||||
complaints = []
|
complaints = []
|
||||||
person_complaints = []
|
person_complaints = []
|
||||||
if survex_required:
|
if survex_required:
|
||||||
@ -217,17 +231,16 @@ def do_item(year, item):
|
|||||||
else:
|
else:
|
||||||
survex_description = "Not specified"
|
survex_description = "Not specified"
|
||||||
|
|
||||||
wallet_index_file = open(os.path.join(item, "walletindex.html"), "w")
|
with open(os.path.join(item, "walletindex.html"), "w") as wallet_index_file:
|
||||||
wallet_index_file.write(html_wallet_index % {"title": item, "year": year,
|
wallet_index_file.write(html_wallet_index % {"title": item, "year": year,
|
||||||
"cave": data["cave"],
|
"cave": data["cave"],
|
||||||
"name": data["name"],
|
"name": data["name"],
|
||||||
"date": data["date"],
|
"date": data["date"],
|
||||||
"people": reduce(operator.add, [" %s," % person for person in data["people"]], ""),
|
"people": reduce(operator.add, [" %s," % person for person in data["people"]], ""),
|
||||||
"description": "http://expo.survex.com"+data["description url"],
|
"description": "http://expo.survex.com"+data["description url"],
|
||||||
"description_needed": description_needed,
|
"description_needed": description_needed,
|
||||||
"loser_dir": loser_dir,
|
"loser_dir": loser_dir,
|
||||||
"loser_dirw": loser_dir[5].upper() + ':/' + loser_dir[7:],
|
"loser_dirw": loser_dir[5].upper() + ':/' + loser_dir[7:],
|
||||||
"survex": survex_description,
|
|
||||||
"complaints": reduce(operator.add, ["<p>" + complaint + "</p>" for complaint in complaints], ""),
|
"complaints": reduce(operator.add, ["<p>" + complaint + "</p>" for complaint in complaints], ""),
|
||||||
"files": reduce(operator.add,
|
"files": reduce(operator.add,
|
||||||
[html_wallet_file_entry % {"fileurl": urllib.parse.quote(f),
|
[html_wallet_file_entry % {"fileurl": urllib.parse.quote(f),
|
||||||
@ -235,15 +248,19 @@ def do_item(year, item):
|
|||||||
for f
|
for f
|
||||||
in files],
|
in files],
|
||||||
"")})
|
"")})
|
||||||
wallet_index_file.close()
|
wallet_index_file.write("<h2>Survex file(s):</h2>")
|
||||||
|
for svx in data["survex file"]:
|
||||||
|
svxfile = Path(loser_dir) / svx
|
||||||
|
wallet_index_file.write(f" Local file:<br> <a href='file:////{svxfile}' download>file:////{svxfile}</a>")
|
||||||
|
wallet_index_file.write("\n<br> Server file:<br>")
|
||||||
|
wallet_index_file.write(f" <a href='http://expo.survex.com/survexfile/{svx}' download>http://expo.survex.com/survexfile/{svx}<br><br></a>")
|
||||||
|
|
||||||
wallets.append((item, data["cave"], data["name"], survex_required, plan_scanned, elev_scanned, data["description written"], data["qms written"]))
|
wallets.append((item, data["cave"], data["name"], survex_required, plan_scanned, elev_scanned, data["description written"], data["qms written"]))
|
||||||
# Set modification time to be the same as that of contents.json
|
# Set modification time to be the same as that of contents.json
|
||||||
index_file = item+"/walletindex.html"
|
index_file = item+"/walletindex.html"
|
||||||
os.utime(index_file, ( json_mtime,json_mtime))
|
os.utime(index_file, ( json_mtime,json_mtime))
|
||||||
|
|
||||||
|
|
||||||
#People
|
#People
|
||||||
|
|
||||||
for person in data["people"]:
|
for person in data["people"]:
|
||||||
# delete all person.html as we are recreating all the ones that matter and old ones have old data
|
# delete all person.html as we are recreating all the ones that matter and old ones have old data
|
||||||
if os.path.isfile(person + ".html"):
|
if os.path.isfile(person + ".html"):
|
||||||
|
Loading…
Reference in New Issue
Block a user