diff --git a/noinfo/wallets.py b/noinfo/wallets.py
index d44917ed4..b5891a6d6 100755
--- a/noinfo/wallets.py
+++ b/noinfo/wallets.py
@@ -8,10 +8,10 @@ from datetime import datetime
# 2019-12-17 extra output of links to troggle-generated trip data
# 2019-12-31 bits to make website link-checker not barf so much. Added endswith() to .startswith() for notes, elev, plan filenames
# 2020-01-21 Now we are using Windows10-WSL1, +links to expedition logbook on every generated page
-# 2020-03-14 Adding timestamp tovisible outputs
+# 2020-03-15 Adding timestamp tovisible outputs, changing name of produced files to walletindex.html so that contents can be browsed
loser_dir = "/home/expo/loser"
-#loser_dir = "/mnt/d/CUCC-Expo/loser/" # when running on Win10-WSL1
+#loser_dir = "/mnt/d/CUCC-Expo/Loser/" # when running on Win10-WSL1
#loser_dir = "/media/philip/SD-huge/CUCC-Expo/loser/" # when running on xubuntu laptop 'barbie'
if len(sys.argv) > 1 :
@@ -27,13 +27,13 @@ drawings_dir = loser_dir[0:len(loser_dir)-5] + "drawings"
print "Drawings repo (for drawings files) is assumed to be in: " + drawings_dir + "/"
html_base = "
%(body)s"
-html_year_index = html_base % {"body": "%(year)s surveys: wallets status
\nList of trips: expedition/%(year)s - troggle-processed .svx files and logbook entries on server
\nAs of %(timestamp)s\nPersons
\n\nWallets
\n\nNeeding Scanning
\n\nWebsite (Guidebook description) needing updating\n
\n\n%(website needing updating)s
\n"}
+html_year_index = html_base % {"body": "%(year)s surveys: wallets status
\nList of trips: expedition/%(year)s - troggle-processed .svx files and logbook entries on server
\nAs of %(timestamp)s\nPersons
\n\nWallets
\n\nNeeding Scanning
\n\nWebsite (Guidebook description) needing updating\n
\n\n%(website needing updating)s
\n"}
html_year_person = "%(person)s\n"
html_year_wallet_entry = "%(walletname)s %(cave)s %(name)s | %(complaints)s |
\n"
html_person_wallet_entry = "%(walletname)s \n"
html_year_scanning_entry = "%(walletname)s %(cave)s %(name)s\n"
html_wallet_file_entry = "%(filename)s\n"
-html_wallet_index = html_base % {"body": "%(title)s : %(cave)s : %(name)s
\nList of trips: expedition/%(year)s - troggle-processed .svx files and logbook entries on server
\nDate: %(date)s
People: %(people)s
\nCave Guidebook description - %(description_needed)s \n
Survex file:
Local: file:///%(loser_dir)s/%(survex)s
Server: http://expo.survex.com/survexfile/%(survex)s
Wallet index for this year
Local location for ::loser:: repo specified on command line is %(loser_dir)s. \nIssues
\n%(complaints)s\nFiles
\n\n"}
+html_wallet_index = html_base % {"body": "%(title)s : %(cave)s : %(name)s
\nList of trips: expedition/%(year)s - troggle-processed .svx files and logbook entries on server
\nDate: %(date)s
People: %(people)s
\nCave Guidebook description - %(description_needed)s \n
Survex file:
Local: file:///%(loser_dir)s/%(survex)s
Server: http://expo.survex.com/survexfile/%(survex)s
Wallet index for this year
Local location for ::loser:: repo specified on command line is %(loser_dir)s. \nIssues
\n%(complaints)s\nFiles
\n\n"}
html_survex_required = {True: "Survex ", False: ""}
html_plan_scanned = {True: "", False: "Plan "}
html_elev_scanned = {True: "", False: "Elev "}
@@ -81,23 +81,24 @@ for item in sorted(os.listdir(".")):
if os.path.isdir(item):
files = []
for f in os.listdir(os.path.join(".", item)):
- if f not in ["contents.json", "contents.json~","index.html"] and os.path.isfile(os.path.join(".", item, f)):
+ if f not in ["contents.json", "contents.json~","walletindex.html"] and os.path.isfile(os.path.join(".", item, f)):
files.append(f)
contents_path = os.path.join(".", item, "contents.json")
-# print "Trying to read file %s" % (contents_path)
+ #print "Trying to read file %s" % (contents_path)
if not os.path.isfile(contents_path):
print "Creating file %s from template" % (contents_path)
json_file = open(contents_path, "w")
json.dump(blank_json, json_file, sort_keys=True, indent = 1)
json_file.close()
- # print "Reading file %s" % (contents_path)
+ #print "Reading file %s" % (contents_path)
json_file = open(contents_path)
- #print json_file
- data = json.load(json_file)
- if not data["people"]:
- data["people"]=["NOBODY"]
-
-
+ try:
+ data = json.load(json_file)
+ except:
+ print "FAILURE parsing JSON file %s" % (contents_path)
+ # Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
+ if not data["people"]:
+ data["people"]=["NOBODY"]
json_file.close()
write_required = False
try:
@@ -199,7 +200,7 @@ for item in sorted(os.listdir(".")):
else:
survex_description = "Not specified"
- wallet_index_file = open(os.path.join(item, "index.html"), "w")
+ wallet_index_file = open(os.path.join(item, "walletindex.html"), "w")
wallet_index_file.write(html_wallet_index % {"title": item, "year": year,
"cave": data["cave"],
"name": data["name"],
@@ -220,7 +221,7 @@ for item in sorted(os.listdir(".")):
wallet_index_file.close()
wallets.append((item, data["cave"], data["name"], survex_required, plan_scanned, elev_scanned, data["description written"], data["qms written"]))
# Set modification time to be the same as that of contents.json
- index_file = item+"/index.html"
+ index_file = item+"/walletindex.html"
os.utime(index_file, ( json_mtime,json_mtime))
@@ -253,7 +254,7 @@ for person, person_wallets in people.items():
person_summary = dict(person_summary)
-year_index_file = open("index.html", "w")
+year_index_file = open("walletindex.html", "w")
year_index_file.write(html_year_index % {"year": year, "timestamp": timestamp, "persons": reduce(operator.add, [html_year_person % {"person": person,
"complaints": reduce(operator.add,
[html_complaint_items % {"complaint": complaint,
@@ -266,20 +267,20 @@ year_index_file.write(html_year_index % {"year": year, "timestamp": timestamp, "
"needing scanning": reduce(operator.add, [html_year_scanning_entry % {"walletname": wallet,
"cave": cave,
"name": name,
- "walletindex": urllib.quote(wallet) + "/index.html"}
+ "walletindex": urllib.quote(wallet) + "/walletindex.html"}
for (wallet)
in wallets_needing_scanning], ""),
"website needing updating": reduce(operator.add, [html_year_scanning_entry % {"walletname": wallet,
"cave": cave,
"name": name,
- "walletindex": urllib.quote(wallet) + "/index.html"}
+ "walletindex": urllib.quote(wallet) + "/walletindex.html"}
for (wallet)
in website_needing_updating], ""),
"wallets": reduce(operator.add,
[html_year_wallet_entry % {"walletname": wallet,
"cave": cave,
"name": name,
- "walletindex": urllib.quote(wallet) + "/index.html",
+ "walletindex": urllib.quote(wallet) + "/walletindex.html",
"complaints": html_status[survex_required or not plan_scanned or not elev_scanned or description_written] + html_survex_required[survex_required] + html_plan_scanned[plan_scanned] + html_elev_scanned[elev_scanned] + html_description_written[description_written] + html_qms_written[qms_written] }
for (wallet, cave, name, survex_required, plan_scanned, elev_scanned, description_written, qms_written)
in wallets])})
@@ -289,7 +290,7 @@ for person, item_complaint_list in people.items():
person_file = open(person + ".html", "w")
person_file.write(html_person % {"person": person, "year": year, "timestamp": timestamp,
"wallets": reduce(operator.add, [html_person_wallet_entry % {"walletname": wallet,
- "walletindex": urllib.quote(wallet) + "/index.html",
+ "walletindex": urllib.quote(wallet) + "/walletindex.html",
"complaints": reduce(operator.add,
[html_items % complaint
for complaint
diff --git a/svx-refs.err.html b/svx-refs.err.html
index 21b904520..70ff867a6 100644
--- a/svx-refs.err.html
+++ b/svx-refs.err.html
@@ -53,6 +53,4 @@ mkdir /mnt/f/expofiles/surveyscans/2040/2040#24
mkdir /mnt/f/expofiles/surveyscans/2040/2040#27
mkdir /mnt/f/expofiles/surveyscans/2040/2040#28
mkdir /mnt/f/expofiles/surveyscans/2040/2040#31
-sed -i '/survex file/ s;"";"caves-1623/204/deepsouth/razor6.svx";' /mnt/f/expofiles/surveyscans/2003/2003#32/contents.json
-UNSET json 2003/2003#32 caves-1623/204/deepsouth/razor6.svx