diff --git a/aausee.html b/aausee.html index 91b18255d..2de597dc9 100644 --- a/aausee.html +++ b/aausee.html @@ -1,10 +1,13 @@ - + - + 1623: Altausseer See to Hochklapf area + +

Augstbach valley and villages

+

N & NE shore of Altausseer See

@@ -24,7 +27,7 @@ or much higher on the hillside. The closest to the lake, Wasserlöcher, is associated with a significant cave, Liägerhöhle. Another major stream -course enters the lake further west, near some boat houses and places or +course enters the lake further west, near some boat houses and places of refreshment at the end of the motorable road. This is normally dry, but apparently carries huge volumes of flood water from risings at about 1080m in Kiler Lahn, a steep gully. We know of no documented caves in this gully, @@ -40,10 +43,11 @@ href="noinfo/1623/51.htm">a few caves west of this path, below the cliffs of Weiße Wand.


+
diff --git a/augstb.html b/augstb.html index d1b31797f..102743f71 100644 --- a/augstb.html +++ b/augstb.html @@ -8,23 +8,17 @@

Augstbach valley and villages

-

There are a small number of caves in the area; none of these were explored -by CUCC, so they are stored in the "noinfo" protected part of the archive.

- - - - - - - -
3Gellerofen (= Göller Loch)
4Ritscherbachhöhle (am Lecker)
53Gellerliäger
54Seehöhle
66Löckerweghöhle
+

There are a small number of caves in the area
+3, 4, 53, 54, 66; none of these were explored +by CUCC, so they used to be stored in the "noinfo" protected part of the archive. +Now they are stored along with all the rest.


- + diff --git a/handbook/computing/todo-data.html b/handbook/computing/todo-data.html index ea3571bdf..559c0d130 100644 --- a/handbook/computing/todo-data.html +++ b/handbook/computing/todo-data.html @@ -34,6 +34,12 @@ If a heading is in italics, then there are hidden items.

Cave Data Updating

To see the "handbook updating" and "troggle/system" to-do lists, go to the other TO-DO list +

+
Update Cave Number <=> Kataster list +
:expoweb:/noinfo/cave-number-index last updated in 2016 +
Kataserable Or Not +
:expoweb:/noinfo/katasterable-or-not needs updating +

Logbook fettling

diff --git a/handbook/troggle/otherscripts.html b/handbook/troggle/otherscripts.html index 000abdd30..9280c49c4 100644 --- a/handbook/troggle/otherscripts.html +++ b/handbook/troggle/otherscripts.html @@ -17,7 +17,7 @@
  • updatephotos generates navigable webpages from annual directories of photo images
  • make-areaindices.py (unmaintained?) -
  • make-prospectingguide-new.py and prospecting_guide_short.py - (unmaintained?)

  • +
  • make-prospectingguide-new.py and prospecting_guide_short.py make-areaindices.py - (unmaintained?)

  • boe Bank of Expo (perl) runs the accounts preceding and during expo
  • bierbook.tex LaTeX script for generating the bierbook - a new list of names and dates each year
  • seshbook.tex LaTeX script for generating the seshbook - works from the same list of names diff --git a/noinfo/make.py b/noinfo/make.py deleted file mode 100644 index 10a8e3056..000000000 --- a/noinfo/make.py +++ /dev/null @@ -1,252 +0,0 @@ -#!/usr/bin/env python -import os, operator, urllib, json, re - -# 2018-08-27 edited Philip Sargent -# 2017 originally by Martin Green - -loser_dir = "/home/expo/loser/" -html_base = "%(body)s" -html_year_index = html_base % {"body": "

    %(year)s surveys

    Persons

    Wallets

    %(wallets)s

    Needing Scanning

    Website needing updating

    "} -html_year_person = "
  • %(person)s
  • " -html_year_wallet_entry = "%(walletname)s %(cave)s %(name)s %(complaints)s" -html_person_wallet_entry = "
  • %(walletname)s
  • " -html_year_scanning_entry = "
  • %(walletname)s %(cave)s %(name)s
  • " -html_wallet_file_entry = "
  • %(filename)s
  • " -html_wallet_index = html_base % {"body": "

    %(title)s : %(cave)s : %(name)s

    Date: %(date)s

    People: %(people)s

    Description

    Survex file: %(survex)s

    Issues

    %(complaints)s

    Files

    "} -html_survex_required = {True: "Survex ", False: ""} -html_plan_scanned = {True: "", False: "Plan "} -html_elev_scanned = {True: "", False: "Elev "} -html_description_written = {True: "", False: "Desc "} -html_qms_written = {True: "", False: "QMs "} -html_status = {True: "Issues: ", False: ""} -html_person = html_base % {"body": "

    %(person)s

    Outstanding Wallets

    "} -html_complaint_items = "
  • %(count)i %(complaint)s
  • " -html_items = "
  • %s
  • " - -blank_json = {"survex file": "", - "survex not required": False, - "plan not required": False, - "elev not required": False, - "plan drawn": False, - "elev drawn": False, - "description written": False, - "qms written": False, - "website updated": False, - "electronic survey": False, - "elev not required": False, - "date": "", - "people": ["Unknown"], - "description url": "", - "cave": "", - "name": ""} - -#need to use wallets as a dict/tuple (id,cave,name) - not sure how. -wallets = [] -wallets_needing_scanning = set() -website_needing_updating = set() -people = {} - -#use dir this file is in to get current year -path,year = os.path.split(os.path.dirname(os.path.realpath(__file__))) - -for item in os.listdir("."): - if os.path.isdir(item): - files = [] - for f in os.listdir(os.path.join(".", item)): - if f not in ["contents.json", "contents.json~","index.html"] and os.path.isfile(os.path.join(".", item, f)): - files.append(f) - contents_path = os.path.join(".", item, "contents.json") - print "Reading file %s" % (contents_path) - if not os.path.isfile(contents_path): - json_file = open(contents_path, "w") - json.dump(blank_json, json_file, indent = 1) - json_file.close() - json_file = open(contents_path) - #print json_file - data = json.load(json_file) - json_file.close() - write_required = False - try: - wallet, cave, name = re.match("(\d\d\d\d#\d\d)-(.*) (.*)", item).groups() - except: - wallet, cave, name = "", "", "" - #print data - for k, v in blank_json.items(): - if not data.has_key(k): - if k == "cave": - data[k] = cave - elif k == "name": - data[k] = name - else: - data[k] = v - write_required = True - #print write_required - if write_required: - json_file = open(contents_path, "w") - json.dump(data, json_file, indent = 1) - json_file.close() - - - #make wallet descriptions - - - #Survex - survex_required = (data["survex not required"] and data["survex file"] == "") or \ - not (not data["survex not required"] and os.path.isfile(os.path.join(loser_dir, data["survex file"]))) - survex_complaint = "" - if data["survex not required"] and data["survex file"] != "": - survex_complaint = "Survex is not required and yet there is a survex file!" - if not data["survex not required"] and data["survex file"] == "": - survex_complaint = "A survex file is required, but has not been specified!" - if not data["survex not required"] and not os.path.isfile(os.path.join(loser_dir, data["survex file"])): - survex_complaint = "The specified survex file (%s) does not exist here!" % data["survex file"] - complaints = [] - person_complaints = [] - if survex_required: - complaints.append(survex_complaint) - person_complaints.append(survex_complaint) - - #Notes - notes_scanned = reduce(operator.or_, [f.startswith("note") for f in files], False) - if not notes_scanned: - complaints.append("The notes needs scanning (no noteN.jpg file found)") - wallets_needing_scanning.add(item) - - #Plan drawing required - plan_scanned = reduce(operator.or_, [f.startswith("plan") for f in files], False) - plan_drawing_required = not (plan_scanned or data["plan drawn"]) - if plan_drawing_required: - complaints.append("The plan needs drawing (no planN.jpg file found)") - person_complaints.append(" plan(s) needs drawing (no planN.jpg file found)") - if not plan_drawing_required and not plan_scanned: - complaints.append("The plan needs scanning (no planN.jpg file found)") - wallets_needing_scanning.add(item) - - - #Elev drawing required - elev_scanned = reduce(operator.or_, [f.startswith("elev") for f in files], False) - elev_drawing_required = not (elev_scanned or data["elev drawn"]) - if elev_drawing_required: - complaints.append("The elev needs drawing (no elevN.jpg file found)") - person_complaints.append(" elev(s) needs drawing (no elevN.jpg file found)") - if not elev_drawing_required and not elev_scanned: - complaints.append("The elev needs scanning (no elevN.jpg file found)") - wallets_needing_scanning.add(item) - - #Description - if not data["description written"]: - complaints.append("The description needs writing") - person_complaints.append(" description(s) needs writing") - - #QMS - if not data["qms written"]: - complaints.append("The QMs needs writing") - person_complaints.append(" set(s) of QMs needs writing") - - #Website - if not data["website updated"]: - complaints.append("The guidebook description on website needs updating") - website_needing_updating.add(item) - - #Electronic Surveys - if not data["electronic survey"]: - complaints.append("Tunnel / Therion files need drawing") - - if data["survex file"]: - survex_description = data["survex file"] - else: - survex_description = "Not specified" - - wallet_index_file = open(os.path.join(item, "index.html"), "w") - wallet_index_file.write(html_wallet_index % {"title": item, - "cave": data["cave"], - "name": data["name"], - "date": data["date"], - "people": reduce(operator.add, [" %s," % person for person in data["people"]], ""), - "description": data["description url"], - "survex": survex_description, - "complaints": reduce(operator.add, ["

    " + complaint + "

    " for complaint in complaints], ""), - "files": reduce(operator.add, - [html_wallet_file_entry % {"fileurl": urllib.quote(f), - "filename": f} - for f - in files], - "")}) - wallet_index_file.close() - wallets.append((item, data["cave"], data["name"], survex_required, plan_scanned, elev_scanned, data["description written"], data["qms written"])) - - #People - - for person in data["people"]: - # delete all person.html as we are recreating all the ones that matter and old ones have old data - if os.path.isfile(person + ".html"): - os.remove(person + ".html") - if person_complaints: - for person in data["people"]: - if not people.has_key(person): - people[person] = [] - people[person].append((item, person_complaints)) - - -wallets.sort() -website_needing_updating = list(website_needing_updating) -website_needing_updating.sort() -wallets_needing_scanning = list(wallets_needing_scanning) -wallets_needing_scanning.sort() - -person_summary = [] -for person, person_wallets in people.items(): - complaints = reduce(operator.add, [complaints for wallet, complaints in person_wallets], []) - complaints_summary = [] - for complaint in set(complaints): - complaints_summary.append((complaint, complaints.count(complaint))) - person_summary.append((person, complaints_summary)) - -person_summary = dict(person_summary) - -year_index_file = open("index.html", "w") -year_index_file.write(html_year_index % {"year": year, "persons": reduce(operator.add, [html_year_person % {"person": person, - "complaints": reduce(operator.add, - [html_complaint_items % {"complaint": complaint, - "count": count} - for complaint, count - in complaints], - "")} - for person, complaints - in person_summary.items()], ""), - "needing scanning": reduce(operator.add, [html_year_scanning_entry % {"walletname": wallet, - "cave": cave, - "name": name, - "walletindex": urllib.quote(wallet) + "/index.html"} - for (wallet) - in wallets_needing_scanning], ""), - "website needing updating": reduce(operator.add, [html_year_scanning_entry % {"walletname": wallet, - "cave": cave, - "name": name, - "walletindex": urllib.quote(wallet) + "/index.html"} - for (wallet) - in website_needing_updating], ""), - "wallets": reduce(operator.add, - [html_year_wallet_entry % {"walletname": wallet, - "cave": cave, - "name": name, - "walletindex": urllib.quote(wallet) + "/index.html", - "complaints": html_status[survex_required or not plan_scanned or not elev_scanned or description_written] + html_survex_required[survex_required] + html_plan_scanned[plan_scanned] + html_elev_scanned[elev_scanned] + html_description_written[description_written] + html_qms_written[qms_written] } - for (wallet, cave, name, survex_required, plan_scanned, elev_scanned, description_written, qms_written) - in wallets])}) -year_index_file.close() - -for person, item_complaint_list in people.items(): - person_file = open(person + ".html", "w") - person_file.write(html_person % {"person": person, - "wallets": reduce(operator.add, [html_person_wallet_entry % {"walletname": wallet, - "walletindex": urllib.quote(wallet) + "/index.html", - "complaints": reduce(operator.add, - [html_items % complaint - for complaint - in complaints], - "") } - for wallet, complaints - in item_complaint_list], "") - }) - person_file.close()