mirror of
https://expo.survex.com/repositories/expoweb/.git/
synced 2024-11-28 02:01:55 +00:00
minor edits
This commit is contained in:
parent
ab17849725
commit
9ee80df3a1
12
aausee.html
12
aausee.html
@ -1,10 +1,13 @@
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>1623: Altausseer See to Hochklapf area</title>
|
||||
<link rel="stylesheet" type="text/css" href="css/main2.css" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>Augstbach valley and villages</h1>
|
||||
|
||||
<body>
|
||||
<h1>N & NE shore of Altausseer See</h1>
|
||||
|
||||
@ -24,7 +27,7 @@ or much higher on the hillside. The closest to the lake, <a id="id2"
|
||||
href="noinfo/1623/2.htm">Wasserlöcher</a>, is associated with a
|
||||
significant cave, <a id="id1"
|
||||
href="noinfo/1623/1.htm">Liägerhöhle</a>. Another major stream
|
||||
course enters the lake further west, near some boat houses and places or
|
||||
course enters the lake further west, near some boat houses and places of
|
||||
refreshment at the end of the motorable road. This is normally dry, but
|
||||
apparently carries huge volumes of flood water from risings at about 1080m in
|
||||
<b>Kiler Lahn</b>, a steep gully. We know of no documented caves in this gully,
|
||||
@ -40,10 +43,11 @@ href="noinfo/1623/51.htm">a few caves</a><a id="id68" /> west of this
|
||||
path, below the cliffs of Weiße Wand.</p>
|
||||
|
||||
<hr />
|
||||
<div id="menu>
|
||||
<ul id="links">
|
||||
<li><a href="indxal.htm">Full Index to 1623</a></li>
|
||||
<li><a href="areas.htm">1623 Area/Subarea description</a></li>
|
||||
<li><a href="index.htm">Back to Expedition Intro page</a></li>
|
||||
</ul>
|
||||
</ul></div>
|
||||
</body>
|
||||
</html>
|
||||
|
18
augstb.html
18
augstb.html
@ -8,23 +8,17 @@
|
||||
<body>
|
||||
<h1>Augstbach valley and villages</h1>
|
||||
|
||||
<p>There are a small number of caves in the area; none of these were explored
|
||||
by CUCC, so they are stored in the "noinfo" protected part of the archive.</p>
|
||||
|
||||
<table class="trad">
|
||||
<tr><td><a id="id3">3</a></td><td><a href="noinfo/augstb/3.htm">Gellerofen (= Göller Loch)</a></td></tr>
|
||||
<tr><td><a id="id4">4</a></td><td><a href="noinfo/augstb/4.htm">Ritscherbachhöhle (am Lecker)</a></td></tr>
|
||||
<tr><td><a id="id53">53</a></td><td><a href="noinfo/augstb/53.htm">Gellerliäger</a></td></tr>
|
||||
<tr><td><a id="id54">54</a></td><td><a href="noinfo/augstb/54.htm">Seehöhle</a></td></tr>
|
||||
<tr><td><a id="id66">66</a></td><td><a href="noinfo/augstb/66.htm">Löckerweghöhle</a></td></tr>
|
||||
</table>
|
||||
<p>There are a small number of caves in the area<br>
|
||||
3, 4, 53, 54, 66; none of these were explored
|
||||
by CUCC, so they used to be stored in the "noinfo" protected part of the archive.
|
||||
Now they are stored along with all the rest.</p>
|
||||
|
||||
<hr />
|
||||
<!-- LINKS -->
|
||||
<div id="menu>
|
||||
<ul id="links">
|
||||
<li><a href="indxal.htm">Full Index to 1623</a></li>
|
||||
<li><a href="areas.htm">1623 Area/Subarea description</a></li>
|
||||
<li><a href="index.htm">Back to Expedition Intro page</a></li>
|
||||
</ul>
|
||||
</ul></div>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -34,6 +34,12 @@ If a heading is in italics, then there are hidden items.
|
||||
|
||||
<h2>Cave Data Updating</h2>
|
||||
<p>To see the "handbook updating" and "troggle/system" to-do lists, go to <a href="todo.html">the other TO-DO list</a>
|
||||
<dl>
|
||||
<dt>Update Cave Number <=> Kataster list
|
||||
<dd>:expoweb:/noinfo/cave-number-index last updated in 2016
|
||||
<dt>Kataserable Or Not
|
||||
<dd>:expoweb:/noinfo/katasterable-or-not needs updating
|
||||
</dl>
|
||||
|
||||
<h3>Logbook fettling</h3>
|
||||
<dl>
|
||||
|
@ -17,7 +17,7 @@
|
||||
<li><a href="scriptscurrent.html#photos">updatephotos</a> generates navigable webpages from annual directories of photo images
|
||||
|
||||
<li><a href="scriptscurrent.html#area">make-areaindices.py</a> (unmaintained?)
|
||||
<li><a href="scriptscurrent.html#prosp">make-prospectingguide-new.py</a> and <a href="">prospecting_guide_short.py</a> - (unmaintained?)</li><br />
|
||||
<li><a href="scriptscurrent.html#prosp">make-prospectingguide-new.py</a> and <a href="">prospecting_guide_short.py</a> <a href="../../noinfo/make-areaindices.py">make-areaindices.py</a> - (unmaintained?)</li><br />
|
||||
<li><a href="../bankofexpo.html">boe</a> Bank of Expo (perl) runs the accounts preceding and during expo
|
||||
<li><a href="scriptscurrent.html#latex">bierbook.tex</a> LaTeX script for generating the bierbook - a new list of names and dates each year
|
||||
<li><a href="scriptscurrent.html#latex">seshbook.tex</a> LaTeX script for generating the seshbook - works from the same list of names
|
||||
|
252
noinfo/make.py
252
noinfo/make.py
@ -1,252 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
import os, operator, urllib, json, re
|
||||
|
||||
# 2018-08-27 edited Philip Sargent
|
||||
# 2017 originally by Martin Green
|
||||
|
||||
loser_dir = "/home/expo/loser/"
|
||||
html_base = "<html><body>%(body)s</body></html>"
|
||||
html_year_index = html_base % {"body": "<H1>%(year)s surveys</H1><H2>Persons</H2><UL>%(persons)s</UL><H2>Wallets</H2><table>%(wallets)s</table><H2>Needing Scanning</H2><ul>%(needing scanning)s</ul><H2>Website needing updating</H2><ul>%(website needing updating)s</ul>"}
|
||||
html_year_person = "<li><a href='%(person)s.html'>%(person)s</a><ul>%(complaints)s</ul></li>"
|
||||
html_year_wallet_entry = "<tr><td><a href='%(walletindex)s'>%(walletname)s %(cave)s %(name)s</a></td> <td>%(complaints)s</td></tr>"
|
||||
html_person_wallet_entry = "<li><a href='%(walletindex)s'>%(walletname)s</a> <ul>%(complaints)s</ul></li>"
|
||||
html_year_scanning_entry = "<li><a href='%(walletindex)s'>%(walletname)s %(cave)s %(name)s</a></li>"
|
||||
html_wallet_file_entry = "<li><a href='%(fileurl)s'>%(filename)s</a></li>"
|
||||
html_wallet_index = html_base % {"body": "<H1>%(title)s : %(cave)s : %(name)s</H1><p>Date: %(date)s</p><p>People: %(people)s</p><p> <a href='/%(description)s'>Description</a></p><p>Survex file: %(survex)s</p><H2>Issues</H2>%(complaints)s<H2>Files</H2><UL>%(files)s</UL>"}
|
||||
html_survex_required = {True: "Survex ", False: ""}
|
||||
html_plan_scanned = {True: "", False: "Plan "}
|
||||
html_elev_scanned = {True: "", False: "Elev "}
|
||||
html_description_written = {True: "", False: "Desc "}
|
||||
html_qms_written = {True: "", False: "QMs "}
|
||||
html_status = {True: "Issues: ", False: ""}
|
||||
html_person = html_base % {"body": "<H1>%(person)s</H1><H2>Outstanding Wallets</H2><UL>%(wallets)s</UL>"}
|
||||
html_complaint_items = "<li>%(count)i %(complaint)s</li>"
|
||||
html_items = "<li>%s</li>"
|
||||
|
||||
blank_json = {"survex file": "",
|
||||
"survex not required": False,
|
||||
"plan not required": False,
|
||||
"elev not required": False,
|
||||
"plan drawn": False,
|
||||
"elev drawn": False,
|
||||
"description written": False,
|
||||
"qms written": False,
|
||||
"website updated": False,
|
||||
"electronic survey": False,
|
||||
"elev not required": False,
|
||||
"date": "",
|
||||
"people": ["Unknown"],
|
||||
"description url": "",
|
||||
"cave": "",
|
||||
"name": ""}
|
||||
|
||||
#need to use wallets as a dict/tuple (id,cave,name) - not sure how.
|
||||
wallets = []
|
||||
wallets_needing_scanning = set()
|
||||
website_needing_updating = set()
|
||||
people = {}
|
||||
|
||||
#use dir this file is in to get current year
|
||||
path,year = os.path.split(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
for item in os.listdir("."):
|
||||
if os.path.isdir(item):
|
||||
files = []
|
||||
for f in os.listdir(os.path.join(".", item)):
|
||||
if f not in ["contents.json", "contents.json~","index.html"] and os.path.isfile(os.path.join(".", item, f)):
|
||||
files.append(f)
|
||||
contents_path = os.path.join(".", item, "contents.json")
|
||||
print "Reading file %s" % (contents_path)
|
||||
if not os.path.isfile(contents_path):
|
||||
json_file = open(contents_path, "w")
|
||||
json.dump(blank_json, json_file, indent = 1)
|
||||
json_file.close()
|
||||
json_file = open(contents_path)
|
||||
#print json_file
|
||||
data = json.load(json_file)
|
||||
json_file.close()
|
||||
write_required = False
|
||||
try:
|
||||
wallet, cave, name = re.match("(\d\d\d\d#\d\d)-(.*) (.*)", item).groups()
|
||||
except:
|
||||
wallet, cave, name = "", "", ""
|
||||
#print data
|
||||
for k, v in blank_json.items():
|
||||
if not data.has_key(k):
|
||||
if k == "cave":
|
||||
data[k] = cave
|
||||
elif k == "name":
|
||||
data[k] = name
|
||||
else:
|
||||
data[k] = v
|
||||
write_required = True
|
||||
#print write_required
|
||||
if write_required:
|
||||
json_file = open(contents_path, "w")
|
||||
json.dump(data, json_file, indent = 1)
|
||||
json_file.close()
|
||||
|
||||
|
||||
#make wallet descriptions
|
||||
|
||||
|
||||
#Survex
|
||||
survex_required = (data["survex not required"] and data["survex file"] == "") or \
|
||||
not (not data["survex not required"] and os.path.isfile(os.path.join(loser_dir, data["survex file"])))
|
||||
survex_complaint = ""
|
||||
if data["survex not required"] and data["survex file"] != "":
|
||||
survex_complaint = "Survex is not required and yet there is a survex file!"
|
||||
if not data["survex not required"] and data["survex file"] == "":
|
||||
survex_complaint = "A survex file is required, but has not been specified!"
|
||||
if not data["survex not required"] and not os.path.isfile(os.path.join(loser_dir, data["survex file"])):
|
||||
survex_complaint = "The specified survex file (%s) does not exist here!" % data["survex file"]
|
||||
complaints = []
|
||||
person_complaints = []
|
||||
if survex_required:
|
||||
complaints.append(survex_complaint)
|
||||
person_complaints.append(survex_complaint)
|
||||
|
||||
#Notes
|
||||
notes_scanned = reduce(operator.or_, [f.startswith("note") for f in files], False)
|
||||
if not notes_scanned:
|
||||
complaints.append("The notes needs scanning (no noteN.jpg file found)")
|
||||
wallets_needing_scanning.add(item)
|
||||
|
||||
#Plan drawing required
|
||||
plan_scanned = reduce(operator.or_, [f.startswith("plan") for f in files], False)
|
||||
plan_drawing_required = not (plan_scanned or data["plan drawn"])
|
||||
if plan_drawing_required:
|
||||
complaints.append("The plan needs drawing (no planN.jpg file found)")
|
||||
person_complaints.append(" plan(s) needs drawing (no planN.jpg file found)")
|
||||
if not plan_drawing_required and not plan_scanned:
|
||||
complaints.append("The plan needs scanning (no planN.jpg file found)")
|
||||
wallets_needing_scanning.add(item)
|
||||
|
||||
|
||||
#Elev drawing required
|
||||
elev_scanned = reduce(operator.or_, [f.startswith("elev") for f in files], False)
|
||||
elev_drawing_required = not (elev_scanned or data["elev drawn"])
|
||||
if elev_drawing_required:
|
||||
complaints.append("The elev needs drawing (no elevN.jpg file found)")
|
||||
person_complaints.append(" elev(s) needs drawing (no elevN.jpg file found)")
|
||||
if not elev_drawing_required and not elev_scanned:
|
||||
complaints.append("The elev needs scanning (no elevN.jpg file found)")
|
||||
wallets_needing_scanning.add(item)
|
||||
|
||||
#Description
|
||||
if not data["description written"]:
|
||||
complaints.append("The description needs writing")
|
||||
person_complaints.append(" description(s) needs writing")
|
||||
|
||||
#QMS
|
||||
if not data["qms written"]:
|
||||
complaints.append("The QMs needs writing")
|
||||
person_complaints.append(" set(s) of QMs needs writing")
|
||||
|
||||
#Website
|
||||
if not data["website updated"]:
|
||||
complaints.append("The guidebook description on website needs updating")
|
||||
website_needing_updating.add(item)
|
||||
|
||||
#Electronic Surveys
|
||||
if not data["electronic survey"]:
|
||||
complaints.append("Tunnel / Therion files need drawing")
|
||||
|
||||
if data["survex file"]:
|
||||
survex_description = data["survex file"]
|
||||
else:
|
||||
survex_description = "Not specified"
|
||||
|
||||
wallet_index_file = open(os.path.join(item, "index.html"), "w")
|
||||
wallet_index_file.write(html_wallet_index % {"title": item,
|
||||
"cave": data["cave"],
|
||||
"name": data["name"],
|
||||
"date": data["date"],
|
||||
"people": reduce(operator.add, [" %s," % person for person in data["people"]], ""),
|
||||
"description": data["description url"],
|
||||
"survex": survex_description,
|
||||
"complaints": reduce(operator.add, ["<p>" + complaint + "</p>" for complaint in complaints], ""),
|
||||
"files": reduce(operator.add,
|
||||
[html_wallet_file_entry % {"fileurl": urllib.quote(f),
|
||||
"filename": f}
|
||||
for f
|
||||
in files],
|
||||
"")})
|
||||
wallet_index_file.close()
|
||||
wallets.append((item, data["cave"], data["name"], survex_required, plan_scanned, elev_scanned, data["description written"], data["qms written"]))
|
||||
|
||||
#People
|
||||
|
||||
for person in data["people"]:
|
||||
# delete all person.html as we are recreating all the ones that matter and old ones have old data
|
||||
if os.path.isfile(person + ".html"):
|
||||
os.remove(person + ".html")
|
||||
if person_complaints:
|
||||
for person in data["people"]:
|
||||
if not people.has_key(person):
|
||||
people[person] = []
|
||||
people[person].append((item, person_complaints))
|
||||
|
||||
|
||||
wallets.sort()
|
||||
website_needing_updating = list(website_needing_updating)
|
||||
website_needing_updating.sort()
|
||||
wallets_needing_scanning = list(wallets_needing_scanning)
|
||||
wallets_needing_scanning.sort()
|
||||
|
||||
person_summary = []
|
||||
for person, person_wallets in people.items():
|
||||
complaints = reduce(operator.add, [complaints for wallet, complaints in person_wallets], [])
|
||||
complaints_summary = []
|
||||
for complaint in set(complaints):
|
||||
complaints_summary.append((complaint, complaints.count(complaint)))
|
||||
person_summary.append((person, complaints_summary))
|
||||
|
||||
person_summary = dict(person_summary)
|
||||
|
||||
year_index_file = open("index.html", "w")
|
||||
year_index_file.write(html_year_index % {"year": year, "persons": reduce(operator.add, [html_year_person % {"person": person,
|
||||
"complaints": reduce(operator.add,
|
||||
[html_complaint_items % {"complaint": complaint,
|
||||
"count": count}
|
||||
for complaint, count
|
||||
in complaints],
|
||||
"")}
|
||||
for person, complaints
|
||||
in person_summary.items()], ""),
|
||||
"needing scanning": reduce(operator.add, [html_year_scanning_entry % {"walletname": wallet,
|
||||
"cave": cave,
|
||||
"name": name,
|
||||
"walletindex": urllib.quote(wallet) + "/index.html"}
|
||||
for (wallet)
|
||||
in wallets_needing_scanning], ""),
|
||||
"website needing updating": reduce(operator.add, [html_year_scanning_entry % {"walletname": wallet,
|
||||
"cave": cave,
|
||||
"name": name,
|
||||
"walletindex": urllib.quote(wallet) + "/index.html"}
|
||||
for (wallet)
|
||||
in website_needing_updating], ""),
|
||||
"wallets": reduce(operator.add,
|
||||
[html_year_wallet_entry % {"walletname": wallet,
|
||||
"cave": cave,
|
||||
"name": name,
|
||||
"walletindex": urllib.quote(wallet) + "/index.html",
|
||||
"complaints": html_status[survex_required or not plan_scanned or not elev_scanned or description_written] + html_survex_required[survex_required] + html_plan_scanned[plan_scanned] + html_elev_scanned[elev_scanned] + html_description_written[description_written] + html_qms_written[qms_written] }
|
||||
for (wallet, cave, name, survex_required, plan_scanned, elev_scanned, description_written, qms_written)
|
||||
in wallets])})
|
||||
year_index_file.close()
|
||||
|
||||
for person, item_complaint_list in people.items():
|
||||
person_file = open(person + ".html", "w")
|
||||
person_file.write(html_person % {"person": person,
|
||||
"wallets": reduce(operator.add, [html_person_wallet_entry % {"walletname": wallet,
|
||||
"walletindex": urllib.quote(wallet) + "/index.html",
|
||||
"complaints": reduce(operator.add,
|
||||
[html_items % complaint
|
||||
for complaint
|
||||
in complaints],
|
||||
"") }
|
||||
for wallet, complaints
|
||||
in item_complaint_list], "")
|
||||
})
|
||||
person_file.close()
|
Loading…
Reference in New Issue
Block a user