diff --git a/noinfo/walletscripts/mkdirs.sh b/noinfo/walletscripts/not-needed/mkdirs.sh
similarity index 100%
rename from noinfo/walletscripts/mkdirs.sh
rename to noinfo/walletscripts/not-needed/mkdirs.sh
diff --git a/noinfo/walletscripts/scp-index-pages.sh b/noinfo/walletscripts/scp-index-pages.sh
new file mode 100644
index 000000000..d4e9a4432
--- /dev/null
+++ b/noinfo/walletscripts/scp-index-pages.sh
@@ -0,0 +1,31 @@
+#/bin/sh
+# Because wallets.py carefully re-sets the timestamp for the generated walletindex.html files to be the same as the contents.json,
+# there is no way for rsync to recognise that new pages have been generated. So I use this scp script to update them
+# on the server after running wallets.py on my home machine
+# 2020-01-01 Philip Sargent
+# 2022-03-25 also coopying contents.json as I have updated soem of them
+
+date >scp-log.txt
+start=$SECONDS
+
+# copy myself
+scp scp-index-pages.sh expo@expo.survex.com:expofiles/surveyscans/ >>../scp-log.txt
+
+# to do, add in seconds the number of days since 1970. Hmm.
+
+for i in 2012 2013 2014 2015 2016 2017 2018 2019; do
+#for i in 2019; do
+echo $i
+echo $i >>scp-log.txt
+ cd $i
+ for j in `ls -d ${i:0:4}*` ; do
+ echo $i/$j
+ scp $j/walletindex.html expo@expo.survex.com:expofiles/surveyscans/$i/$j >>../scp-log.txt
+ scp $j/contents.json expo@expo.survex.com:expofiles/surveyscans/$i/$j >>../scp-log.txt
+ done
+ cd ..
+duration=$(( SECONDS - start ))
+echo $duration seconds so far
+done
+duration=$(( SECONDS - start ))
+echo $duration seconds overall
\ No newline at end of file
diff --git a/noinfo/walletscripts/wallets.py b/noinfo/walletscripts/wallets.py
index 179e87c3b..4fe36c0b9 100644
--- a/noinfo/walletscripts/wallets.py
+++ b/noinfo/walletscripts/wallets.py
@@ -18,6 +18,7 @@ from pathlib import Path
# want it to appear in the reports under "UNKNOWN"
# 2021-04-24 Converted from python2 to python3 - god almighty did I really once think this was an
# acceptable python layout?
+# 2022-03-15 Changing single survex file to a list of survex files
'''This stand-alone programe processes all the wallet folders for one year and produces the
list of actions that need to be done.
@@ -30,14 +31,15 @@ It produces
It scans the subdirectories only one level deep
e.g. we are in /2020/ so it scans /2020/2020#01, /2020/2020#02 et seq.
-All the files in one folder must be for only one cave, but in principle coule be for several trips.
-However all the files in one folder should relate to a single survex file (troggle assumes this) and
+All the files in one folder must be for only one cave, but in principle could be for several trips.
+However all the files in one folder should relate to one or more survex files and
a survex file should relate to a single trip (we do this, the Austrians and Germans don't)
'''
loser_dir = "/home/expo/loser"
#loser_dir = "/mnt/d/CUCC-Expo/Loser/" # when running on Win10-WSL1
#loser_dir = "/media/philip/SD-huge/CUCC-Expo/loser/" # when running on xubuntu laptop 'barbie'
+#loser_dir = "D:/CUCC-Expo/Loser/" # when running on Win10-WSL1
# GLOBALS
wallets_needing_scanning = set()
@@ -55,7 +57,7 @@ html_year_wallet_entry = "
%(walletname)s %(cav
html_person_wallet_entry = "%(walletname)s \n"
html_year_scanning_entry = "%(walletname)s %(cave)s %(name)s\n"
html_wallet_file_entry = "%(filename)s\n"
-html_wallet_index = html_base % {"body": "%(title)s : %(cave)s : %(name)s\nList of trips: expedition/%(year)s - troggle-processed .svx files and logbook entries on server \nDate: %(date)s People: %(people)s \nCave Guidebook description - %(description_needed)s \n Survex file: Local: file:///%(loser_dir)s/%(survex)s Server: http://expo.survex.com/survexfile/%(survex)s Wallet index for this year Local location for ::loser:: repo specified on command line is %(loser_dir)s. \nIssues\n%(complaints)s\nFiles\n\n"}
+html_wallet_index = html_base % {"body": "%(title)s : %(cave)s : %(name)s\nList of trips: expedition/%(year)s - troggle-processed .svx files and logbook entries on server \nDate: %(date)s People: %(people)s \nCave Guidebook description - %(description_needed)s Wallet index for this year Local location for ::loser:: repo specified on command line is %(loser_dir)s. \nIssues\n%(complaints)s\nFiles\n\n"}
html_survex_required = {True: "Survex ", False: ""}
html_plan_scanned = {True: "", False: "Plan "}
html_elev_scanned = {True: "", False: "Elev "}
@@ -81,7 +83,7 @@ blank_json = {
"plan drawn": False,
"plan not required": False,
"qms written": False,
- "survex file": "",
+ "survex file": [],
"survex not required": False,
"website updated": False}
@@ -144,18 +146,30 @@ def do_item(year, item):
#make wallet descriptions
- #Survex
- not_req = (data["survex not required"] and data["survex file"] == "")
- req = (not data["survex not required"] and os.path.isfile(os.path.join(loser_dir, data["survex file"])))
- survex_required = not_req or not req
+ #Survex. Make it a list if it is not already
+ if data["survex file"]:
+ if not isinstance(data["survex file"], list):
+ data["survex file"] = [data["survex file"]]
survex_complaint = ""
- if data["survex not required"] and data["survex file"] != "":
+ if data["survex not required"] and data["survex file"] != []:
survex_complaint = "Survex is not required and yet there is a survex file!"
- if not data["survex not required"] and data["survex file"] == "":
+ if not data["survex not required"] and data["survex file"] == []:
survex_complaint = "A survex file is required, but has not been specified!"
- if not data["survex not required"] and not os.path.isfile(os.path.join(loser_dir, data["survex file"])):
- survex_complaint = "The specified survex file (%s) does not exist here!" % os.path.join(loser_dir, data["survex file"])
+
+ survexok = True
+ for svx in data["survex file"]:
+ if not (Path(loser_dir) / svx).is_file():
+ survexok = False
+ message = f"! {item} Incorrect survex file in wallet data: {svx} not found in LOSER repo"
+ print(message)
+ # DataIssue.objects.create(parser='scans', message=message, url=wurl) # set URL to this wallet folder
+ survex_complaint = f"The specified survex file {svx} does not exist here!"
+
+ not_needed = (data["survex not required"] and data["survex file"] == [])
+ req = not data["survex not required"] and survexok
+ survex_required = not_needed or req
+
complaints = []
person_complaints = []
if survex_required:
@@ -217,17 +231,16 @@ def do_item(year, item):
else:
survex_description = "Not specified"
- wallet_index_file = open(os.path.join(item, "walletindex.html"), "w")
- wallet_index_file.write(html_wallet_index % {"title": item, "year": year,
+ with open(os.path.join(item, "walletindex.html"), "w") as wallet_index_file:
+ wallet_index_file.write(html_wallet_index % {"title": item, "year": year,
"cave": data["cave"],
"name": data["name"],
"date": data["date"],
"people": reduce(operator.add, [" %s," % person for person in data["people"]], ""),
"description": "http://expo.survex.com"+data["description url"],
- "description_needed": description_needed,
- "loser_dir": loser_dir,
- "loser_dirw": loser_dir[5].upper() + ':/' + loser_dir[7:],
- "survex": survex_description,
+ "description_needed": description_needed,
+ "loser_dir": loser_dir,
+ "loser_dirw": loser_dir[5].upper() + ':/' + loser_dir[7:],
"complaints": reduce(operator.add, ["" + complaint + " " for complaint in complaints], ""),
"files": reduce(operator.add,
[html_wallet_file_entry % {"fileurl": urllib.parse.quote(f),
@@ -235,15 +248,19 @@ def do_item(year, item):
for f
in files],
"")})
- wallet_index_file.close()
+ wallet_index_file.write("Survex file(s):")
+ for svx in data["survex file"]:
+ svxfile = Path(loser_dir) / svx
+ wallet_index_file.write(f" Local file: file:////{svxfile}")
+ wallet_index_file.write("\n Server file: ")
+ wallet_index_file.write(f" http://expo.survex.com/survexfile/{svx}
")
+
wallets.append((item, data["cave"], data["name"], survex_required, plan_scanned, elev_scanned, data["description written"], data["qms written"]))
# Set modification time to be the same as that of contents.json
index_file = item+"/walletindex.html"
os.utime(index_file, ( json_mtime,json_mtime))
-
#People
-
for person in data["people"]:
# delete all person.html as we are recreating all the ones that matter and old ones have old data
if os.path.isfile(person + ".html"):
|