mirror of
https://expo.survex.com/repositories/expoweb/.git/
synced 2025-12-08 14:54:28 +00:00
wallets typos
This commit is contained in:
@@ -8,10 +8,10 @@ from datetime import datetime
|
|||||||
# 2019-12-17 extra output of links to troggle-generated trip data
|
# 2019-12-17 extra output of links to troggle-generated trip data
|
||||||
# 2019-12-31 bits to make website link-checker not barf so much. Added endswith() to .startswith() for notes, elev, plan filenames
|
# 2019-12-31 bits to make website link-checker not barf so much. Added endswith() to .startswith() for notes, elev, plan filenames
|
||||||
# 2020-01-21 Now we are using Windows10-WSL1, +links to expedition logbook on every generated page
|
# 2020-01-21 Now we are using Windows10-WSL1, +links to expedition logbook on every generated page
|
||||||
# 2020-03-14 Adding timestamp tovisible outputs
|
# 2020-03-15 Adding timestamp tovisible outputs, changing name of produced files to walletindex.html so that contents can be browsed
|
||||||
|
|
||||||
loser_dir = "/home/expo/loser"
|
loser_dir = "/home/expo/loser"
|
||||||
#loser_dir = "/mnt/d/CUCC-Expo/loser/" # when running on Win10-WSL1
|
#loser_dir = "/mnt/d/CUCC-Expo/Loser/" # when running on Win10-WSL1
|
||||||
#loser_dir = "/media/philip/SD-huge/CUCC-Expo/loser/" # when running on xubuntu laptop 'barbie'
|
#loser_dir = "/media/philip/SD-huge/CUCC-Expo/loser/" # when running on xubuntu laptop 'barbie'
|
||||||
|
|
||||||
if len(sys.argv) > 1 :
|
if len(sys.argv) > 1 :
|
||||||
@@ -27,13 +27,13 @@ drawings_dir = loser_dir[0:len(loser_dir)-5] + "drawings"
|
|||||||
print "Drawings repo (for drawings files) is assumed to be in: " + drawings_dir + "/"
|
print "Drawings repo (for drawings files) is assumed to be in: " + drawings_dir + "/"
|
||||||
|
|
||||||
html_base = "<html><body>%(body)s</body></html>"
|
html_base = "<html><body>%(body)s</body></html>"
|
||||||
html_year_index = html_base % {"body": "<H1>%(year)s surveys: wallets status</H1>\n<p>List of trips: <a href=\"http://expo.survex.com/expedition/%(year)s\">expedition/%(year)s</a> - troggle-processed .svx files and logbook entries on server</p>\nAs of %(timestamp)s\n<H2>Persons</H2>\n<UL>\n%(persons)s</UL>\n<H2>Wallets</H2>\n<table>%(wallets)s</table>\n<H2>Needing Scanning</H2>\n<UL>\n%(needing scanning)s</ul>\n<H2>Website (Guidebook description) needing updating\n</H2>\n<UL>\n%(website needing updating)s</ul>\n"}
|
html_year_index = html_base % {"body": "<H1>%(year)s surveys: wallets status</H1>\n<p>List of trips: <a href=\"http://expo.survex.com/expedition/%(year)s\">expedition/%(year)s</a> - troggle-processed .svx files and logbook entries on server</p>\nAs of %(timestamp)s\n<H2>Persons</H2>\n<UL>\n%(persons)s</UL>\n<H2>Wallets</H2>\n<table>%(wallets)s</table>\n<H2>Needing Scanning</H2>\n<UL>\n%(needing scanning)s</ul>\n<H2>Website (Guidebook description) needing updating\n</H2>\n<UL style=\"column-count: 3; \">\n%(website needing updating)s</ul>\n"}
|
||||||
html_year_person = "<li><a href='%(person)s.html'>%(person)s</a><UL>\n%(complaints)s</ul></li>\n"
|
html_year_person = "<li><a href='%(person)s.html'>%(person)s</a><UL>\n%(complaints)s</ul></li>\n"
|
||||||
html_year_wallet_entry = "<tr><td><a href='%(walletindex)s'>%(walletname)s %(cave)s %(name)s</a></td> <td>%(complaints)s</td></tr>\n"
|
html_year_wallet_entry = "<tr><td><a href='%(walletindex)s'>%(walletname)s %(cave)s %(name)s</a></td> <td>%(complaints)s</td></tr>\n"
|
||||||
html_person_wallet_entry = "<li><a href='%(walletindex)s'>%(walletname)s</a> <UL>\n%(complaints)s</ul></li>\n"
|
html_person_wallet_entry = "<li><a href='%(walletindex)s'>%(walletname)s</a> <UL>\n%(complaints)s</ul></li>\n"
|
||||||
html_year_scanning_entry = "<li><a href='%(walletindex)s'>%(walletname)s %(cave)s %(name)s</a></li>\n"
|
html_year_scanning_entry = "<li><a href='%(walletindex)s'>%(walletname)s %(cave)s %(name)s</a></li>\n"
|
||||||
html_wallet_file_entry = "<li><a href='%(fileurl)s'>%(filename)s</a></li>\n"
|
html_wallet_file_entry = "<li><a href='%(fileurl)s'>%(filename)s</a></li>\n"
|
||||||
html_wallet_index = html_base % {"body": "<H1>%(title)s : %(cave)s : %(name)s</H1>\n<p>List of trips: <a href=\"http://expo.survex.com/expedition/%(year)s\">expedition/%(year)s</a> - troggle-processed .svx files and logbook entries on server</p>\n<p>Date: %(date)s</p><p>People: %(people)s</p>\n<p>Cave <a href='%(description)s'>Guidebook description</a> - %(description_needed)s \n<p>Survex file:<br> <br> Local: <a href='file:///%(loser_dir)s/%(survex)s' download>file:///%(loser_dir)s/%(survex)s</a><br> Server: <a href='http://expo.survex.com/survexfile/%(survex)s' download>http://expo.survex.com/survexfile/%(survex)s</a></p><a href='../index.html'>Wallet index for this year</a><br/>Local location for ::loser:: repo specified on command line is <a href='file:///%(loser_dir)s'>%(loser_dir)s</a>. </p>\n<H2>Issues</H2>\n%(complaints)s\n<H2>Files</H2>\n<UL>\n%(files)s</UL>\n"}
|
html_wallet_index = html_base % {"body": "<H1>%(title)s : %(cave)s : %(name)s</H1>\n<p>List of trips: <a href=\"http://expo.survex.com/expedition/%(year)s\">expedition/%(year)s</a> - troggle-processed .svx files and logbook entries on server</p>\n<p>Date: %(date)s</p><p>People: %(people)s</p>\n<p>Cave <a href='%(description)s'>Guidebook description</a> - %(description_needed)s \n<p>Survex file:<br> <br> Local: <a href='file:///%(loser_dir)s/%(survex)s' download>file:///%(loser_dir)s/%(survex)s</a><br> Server: <a href='http://expo.survex.com/survexfile/%(survex)s' download>http://expo.survex.com/survexfile/%(survex)s</a></p><a href='../walletindex.html'>Wallet index for this year</a><br/>Local location for ::loser:: repo specified on command line is <a href='file:///%(loser_dir)s'>%(loser_dir)s</a>. </p>\n<H2>Issues</H2>\n%(complaints)s\n<H2>Files</H2>\n<UL>\n%(files)s</UL>\n"}
|
||||||
html_survex_required = {True: "Survex ", False: ""}
|
html_survex_required = {True: "Survex ", False: ""}
|
||||||
html_plan_scanned = {True: "", False: "Plan "}
|
html_plan_scanned = {True: "", False: "Plan "}
|
||||||
html_elev_scanned = {True: "", False: "Elev "}
|
html_elev_scanned = {True: "", False: "Elev "}
|
||||||
@@ -81,7 +81,7 @@ for item in sorted(os.listdir(".")):
|
|||||||
if os.path.isdir(item):
|
if os.path.isdir(item):
|
||||||
files = []
|
files = []
|
||||||
for f in os.listdir(os.path.join(".", item)):
|
for f in os.listdir(os.path.join(".", item)):
|
||||||
if f not in ["contents.json", "contents.json~","index.html"] and os.path.isfile(os.path.join(".", item, f)):
|
if f not in ["contents.json", "contents.json~","walletindex.html"] and os.path.isfile(os.path.join(".", item, f)):
|
||||||
files.append(f)
|
files.append(f)
|
||||||
contents_path = os.path.join(".", item, "contents.json")
|
contents_path = os.path.join(".", item, "contents.json")
|
||||||
#print "Trying to read file %s" % (contents_path)
|
#print "Trying to read file %s" % (contents_path)
|
||||||
@@ -92,12 +92,13 @@ for item in sorted(os.listdir(".")):
|
|||||||
json_file.close()
|
json_file.close()
|
||||||
#print "Reading file %s" % (contents_path)
|
#print "Reading file %s" % (contents_path)
|
||||||
json_file = open(contents_path)
|
json_file = open(contents_path)
|
||||||
#print json_file
|
try:
|
||||||
data = json.load(json_file)
|
data = json.load(json_file)
|
||||||
|
except:
|
||||||
|
print "FAILURE parsing JSON file %s" % (contents_path)
|
||||||
|
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
|
||||||
if not data["people"]:
|
if not data["people"]:
|
||||||
data["people"]=["NOBODY"]
|
data["people"]=["NOBODY"]
|
||||||
|
|
||||||
|
|
||||||
json_file.close()
|
json_file.close()
|
||||||
write_required = False
|
write_required = False
|
||||||
try:
|
try:
|
||||||
@@ -199,7 +200,7 @@ for item in sorted(os.listdir(".")):
|
|||||||
else:
|
else:
|
||||||
survex_description = "Not specified"
|
survex_description = "Not specified"
|
||||||
|
|
||||||
wallet_index_file = open(os.path.join(item, "index.html"), "w")
|
wallet_index_file = open(os.path.join(item, "walletindex.html"), "w")
|
||||||
wallet_index_file.write(html_wallet_index % {"title": item, "year": year,
|
wallet_index_file.write(html_wallet_index % {"title": item, "year": year,
|
||||||
"cave": data["cave"],
|
"cave": data["cave"],
|
||||||
"name": data["name"],
|
"name": data["name"],
|
||||||
@@ -220,7 +221,7 @@ for item in sorted(os.listdir(".")):
|
|||||||
wallet_index_file.close()
|
wallet_index_file.close()
|
||||||
wallets.append((item, data["cave"], data["name"], survex_required, plan_scanned, elev_scanned, data["description written"], data["qms written"]))
|
wallets.append((item, data["cave"], data["name"], survex_required, plan_scanned, elev_scanned, data["description written"], data["qms written"]))
|
||||||
# Set modification time to be the same as that of contents.json
|
# Set modification time to be the same as that of contents.json
|
||||||
index_file = item+"/index.html"
|
index_file = item+"/walletindex.html"
|
||||||
os.utime(index_file, ( json_mtime,json_mtime))
|
os.utime(index_file, ( json_mtime,json_mtime))
|
||||||
|
|
||||||
|
|
||||||
@@ -253,7 +254,7 @@ for person, person_wallets in people.items():
|
|||||||
|
|
||||||
person_summary = dict(person_summary)
|
person_summary = dict(person_summary)
|
||||||
|
|
||||||
year_index_file = open("index.html", "w")
|
year_index_file = open("walletindex.html", "w")
|
||||||
year_index_file.write(html_year_index % {"year": year, "timestamp": timestamp, "persons": reduce(operator.add, [html_year_person % {"person": person,
|
year_index_file.write(html_year_index % {"year": year, "timestamp": timestamp, "persons": reduce(operator.add, [html_year_person % {"person": person,
|
||||||
"complaints": reduce(operator.add,
|
"complaints": reduce(operator.add,
|
||||||
[html_complaint_items % {"complaint": complaint,
|
[html_complaint_items % {"complaint": complaint,
|
||||||
@@ -266,20 +267,20 @@ year_index_file.write(html_year_index % {"year": year, "timestamp": timestamp, "
|
|||||||
"needing scanning": reduce(operator.add, [html_year_scanning_entry % {"walletname": wallet,
|
"needing scanning": reduce(operator.add, [html_year_scanning_entry % {"walletname": wallet,
|
||||||
"cave": cave,
|
"cave": cave,
|
||||||
"name": name,
|
"name": name,
|
||||||
"walletindex": urllib.quote(wallet) + "/index.html"}
|
"walletindex": urllib.quote(wallet) + "/walletindex.html"}
|
||||||
for (wallet)
|
for (wallet)
|
||||||
in wallets_needing_scanning], ""),
|
in wallets_needing_scanning], ""),
|
||||||
"website needing updating": reduce(operator.add, [html_year_scanning_entry % {"walletname": wallet,
|
"website needing updating": reduce(operator.add, [html_year_scanning_entry % {"walletname": wallet,
|
||||||
"cave": cave,
|
"cave": cave,
|
||||||
"name": name,
|
"name": name,
|
||||||
"walletindex": urllib.quote(wallet) + "/index.html"}
|
"walletindex": urllib.quote(wallet) + "/walletindex.html"}
|
||||||
for (wallet)
|
for (wallet)
|
||||||
in website_needing_updating], ""),
|
in website_needing_updating], ""),
|
||||||
"wallets": reduce(operator.add,
|
"wallets": reduce(operator.add,
|
||||||
[html_year_wallet_entry % {"walletname": wallet,
|
[html_year_wallet_entry % {"walletname": wallet,
|
||||||
"cave": cave,
|
"cave": cave,
|
||||||
"name": name,
|
"name": name,
|
||||||
"walletindex": urllib.quote(wallet) + "/index.html",
|
"walletindex": urllib.quote(wallet) + "/walletindex.html",
|
||||||
"complaints": html_status[survex_required or not plan_scanned or not elev_scanned or description_written] + html_survex_required[survex_required] + html_plan_scanned[plan_scanned] + html_elev_scanned[elev_scanned] + html_description_written[description_written] + html_qms_written[qms_written] }
|
"complaints": html_status[survex_required or not plan_scanned or not elev_scanned or description_written] + html_survex_required[survex_required] + html_plan_scanned[plan_scanned] + html_elev_scanned[elev_scanned] + html_description_written[description_written] + html_qms_written[qms_written] }
|
||||||
for (wallet, cave, name, survex_required, plan_scanned, elev_scanned, description_written, qms_written)
|
for (wallet, cave, name, survex_required, plan_scanned, elev_scanned, description_written, qms_written)
|
||||||
in wallets])})
|
in wallets])})
|
||||||
@@ -289,7 +290,7 @@ for person, item_complaint_list in people.items():
|
|||||||
person_file = open(person + ".html", "w")
|
person_file = open(person + ".html", "w")
|
||||||
person_file.write(html_person % {"person": person, "year": year, "timestamp": timestamp,
|
person_file.write(html_person % {"person": person, "year": year, "timestamp": timestamp,
|
||||||
"wallets": reduce(operator.add, [html_person_wallet_entry % {"walletname": wallet,
|
"wallets": reduce(operator.add, [html_person_wallet_entry % {"walletname": wallet,
|
||||||
"walletindex": urllib.quote(wallet) + "/index.html",
|
"walletindex": urllib.quote(wallet) + "/walletindex.html",
|
||||||
"complaints": reduce(operator.add,
|
"complaints": reduce(operator.add,
|
||||||
[html_items % complaint
|
[html_items % complaint
|
||||||
for complaint
|
for complaint
|
||||||
|
|||||||
@@ -53,6 +53,4 @@ mkdir /mnt/f/expofiles/surveyscans/2040/2040#24
|
|||||||
mkdir /mnt/f/expofiles/surveyscans/2040/2040#27
|
mkdir /mnt/f/expofiles/surveyscans/2040/2040#27
|
||||||
mkdir /mnt/f/expofiles/surveyscans/2040/2040#28
|
mkdir /mnt/f/expofiles/surveyscans/2040/2040#28
|
||||||
mkdir /mnt/f/expofiles/surveyscans/2040/2040#31
|
mkdir /mnt/f/expofiles/surveyscans/2040/2040#31
|
||||||
sed -i '/survex file/ s;"";"caves-1623/204/deepsouth/razor6.svx";' /mnt/f/expofiles/surveyscans/2003/2003#32/contents.json
|
|
||||||
UNSET json 2003/2003#32 caves-1623/204/deepsouth/razor6.svx
|
|
||||||
</pre></body></html>
|
</pre></body></html>
|
||||||
|
|||||||
@@ -81,6 +81,7 @@
|
|||||||
2003#29 :: OK - svx filenames match - caves-1623/240/240.svx
|
2003#29 :: OK - svx filenames match - caves-1623/240/240.svx
|
||||||
2003#30 :: OK - svx filenames match - surface/1623/244.svx
|
2003#30 :: OK - svx filenames match - surface/1623/244.svx
|
||||||
2003#31 :: OK - svx filenames match - caves-1623/245/245.svx
|
2003#31 :: OK - svx filenames match - caves-1623/245/245.svx
|
||||||
|
2003#32 :: OK - svx filenames match - caves-1623/204/deepsouth/razor6.svx
|
||||||
2003#33 :: OK - svx filenames match - caves-1623/244/skinny.svx
|
2003#33 :: OK - svx filenames match - caves-1623/244/skinny.svx
|
||||||
2003#35 :: OK - svx filenames match - caves-1623/204/gaffered/sirens2.svx
|
2003#35 :: OK - svx filenames match - caves-1623/204/gaffered/sirens2.svx
|
||||||
2003#36 :: OK - svx filenames match - caves-1623/204/rhino/faith.svx
|
2003#36 :: OK - svx filenames match - caves-1623/204/rhino/faith.svx
|
||||||
|
|||||||
Reference in New Issue
Block a user