diff --git a/databaseReset.py b/databaseReset.py index dbba3a5..4b45f36 100644 --- a/databaseReset.py +++ b/databaseReset.py @@ -161,7 +161,7 @@ class JobQueue(): self.results = {} self.results_order=[ "date","runlabel","reload", "caves", "people", - "logbooks", "surveyscans", "QMs", "survex" + "logbooks", "scans", "QMs", "survex", "tunnel", "surveys", "test", "makedirs", "redirect" ] for k in self.results_order: self.results[k]=[] @@ -182,7 +182,7 @@ class JobQueue(): def run(self): import json - tfile = "import_times.json" + tfile = "import_profile.json" if os.path.isfile(tfile): try: f = open(tfile, "r") @@ -194,13 +194,17 @@ class JobQueue(): # Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12 f.close() + for i in self.queue: + print i, self.results[i[0]] + self.results[i[0]].append(1.0) + print "** Running job ", self.runlabel for i in self.queue: #print "*- Running \"", i[0], "\"" start = time.time() i[1]() duration = time.time()-start - #print "\n*- Ended \"", i[0], "\"" + print "\n*- Ended \"", i[0], "\" %.1f seconds" % duration self.results[i[0]].append(duration) self.results["date"].append(start) self.results["runlabel"].append(self.runlabel) @@ -210,21 +214,21 @@ class JobQueue(): with open(tfile, 'w') as f: json.dump(self.results, f) - for i in self.results_order: + for k in self.results_order: percen=0 - if i == "runlabel": + if k == "runlabel": pass - if i =="date": + if k =="date": # Calculate dates as days before present to one decimal place pass - elif len(self.results[i])>0: - lst = self.results[i] + elif len(self.results[k])>3: + lst = self.results[k] e = len(lst)-1 percen = 100* (lst[e] - lst[e-1])/lst[e-1] if abs(percen) >0.1: - print '%15s %8.1f%%' % (i, percen) + print '%15s %8.1f%%' % (k, percen) else: - print '%15s ' % (i) + print '%15s ' % (k) return True @@ -238,14 +242,14 @@ def importtest(): return True def usage(): - print("""Usage is 'python databaseReset.py ' + print("""Usage is 'python databaseReset.py [runlabel]' where command is: reset - this is normal usage, clear database and reread everything from files - time-consuming caves - read in the caves logbooks - read in just the logbooks people - read in the people from folk.csv - QMs - read in the QM files - reload_db - clear database i.e. delete everything + QMs - read in the QM csv files + reload_db - clear database (delete everything) and make empty tables scans - NOT the scanned surveynotes ?! survex - read in the survex files - all the survex blocks surveys - read in the scanned surveynotes @@ -258,6 +262,9 @@ def usage(): autologbooks - read in autologbooks (what are these?) dumplogbooks - write out autologbooks (not working?) test - testing... + + and [runlabel] is an optional string identifying this run of the script + in the stored profiling data 'import-profile.json' """) if __name__ == "__main__": @@ -269,7 +276,6 @@ if __name__ == "__main__": runlabel = sys.argv[len(sys.argv)-1] jq = JobQueue(runlabel) - if "test" in sys.argv: jq.enq("test",importtest) jq.enq("caves",importtest) @@ -284,13 +290,13 @@ if __name__ == "__main__": elif "QMs" in sys.argv: jq.enq("QMs",import_QMs) elif "reload_db" in sys.argv: - jq.enq("reload_db",reload_db) + jq.enq("reload",reload_db) elif "reset" in sys.argv: jq.enq("reload",reload_db) jq.enq("makedirs",make_dirs) jq.enq("redirect",pageredirects) jq.enq("caves",import_caves) - jq.enq("logbooks",import_people) + jq.enq("people",import_people) jq.enq("scans",import_surveyscans) jq.enq("logbooks",import_logbooks) jq.enq("QMs",import_QMs) diff --git a/parsers/survex.py b/parsers/survex.py index 1a6118c..3cda53a 100644 --- a/parsers/survex.py +++ b/parsers/survex.py @@ -15,7 +15,7 @@ from datetime import datetime line_leg_regex = re.compile(r"[\d\-+.]+$") def LoadSurvexLineLeg(survexblock, stardata, sline, comment, cave): - # The try catches here need replacing as they are relativly expensive + # The try catches here need replacing as they are relatively expensive ls = sline.lower().split() ssfrom = survexblock.MakeSurvexStation(ls[stardata["from"]]) ssto = survexblock.MakeSurvexStation(ls[stardata["to"]]) @@ -401,4 +401,4 @@ def LoadPos(): ss.z = float(z) ss.save() except: - print("%s not parsed in survex" % name) + print("%s not parsed in survex %s" % (name, pos))