mirror of
https://expo.survex.com/repositories/expoweb/.git/
synced 2024-11-22 07:11:55 +00:00
Convert .format() to f-strings with flynt
This commit is contained in:
parent
aca92cb1af
commit
aaf6a6c7cf
@ -45,10 +45,10 @@ for i, row in enumerate(rows[1:]):
|
|||||||
|
|
||||||
if cave != prevlinecave:
|
if cave != prevlinecave:
|
||||||
if beginname != None:
|
if beginname != None:
|
||||||
fout.write("*end %s\n" % (beginname))
|
fout.write(f"*end {beginname}\n")
|
||||||
beginname = None
|
beginname = None
|
||||||
if row != i: #don't start new begin for last line
|
if row != i: #don't start new begin for last line
|
||||||
fout.write("\n*begin %s\n" % (cave))
|
fout.write(f"\n*begin {cave}\n")
|
||||||
beginname = cave
|
beginname = cave
|
||||||
|
|
||||||
|
|
||||||
@ -86,23 +86,23 @@ for i, row in enumerate(rows[1:]):
|
|||||||
|
|
||||||
#if i < 10: print data
|
#if i < 10: print data
|
||||||
|
|
||||||
sfrom = "%s-%s" % (data['RGang'], data['RPunkt'])
|
sfrom = f"{data['RGang']}-{data['RPunkt']}"
|
||||||
sto = "%s-%s" % (data['Gang'], data['Punkt'])
|
sto = f"{data['Gang']}-{data['Punkt']}"
|
||||||
if data['Description']:
|
if data['Description']:
|
||||||
fout.write(";%s\n" % data['Description'])
|
fout.write(f";{data['Description']}\n")
|
||||||
if sfrom == sto:
|
if sfrom == sto:
|
||||||
if data['RefX'] == data['X'] and data['RefY'] == data['Y'] and data['RefZ'] == data['Z']:
|
if data['RefX'] == data['X'] and data['RefY'] == data['Y'] and data['RefZ'] == data['Z']:
|
||||||
fout.write("*fix %s %f %f %f\n" % (sfrom, data['RefX']-450000, data['RefY']-200000, data['RefZ']))
|
fout.write(f"*fix {sfrom} {data['RefX'] - 450000:f} {data['RefY'] - 200000:f} {data['RefZ']:f}\n")
|
||||||
else:
|
else:
|
||||||
print ("Fix 'leg' with non-matching co-ordinates - line i\n")
|
print ("Fix 'leg' with non-matching co-ordinates - line i\n")
|
||||||
else:
|
else:
|
||||||
fout.write("%s %s\t%s\t%s\t%s\n" % (sfrom, sto, data['Tape'], bearing, slope))
|
fout.write(f"{sfrom} {sto}\t{data['Tape']}\t{bearing}\t{slope}\n")
|
||||||
# if sfrom not in sfromfixes:
|
# if sfrom not in sfromfixes:
|
||||||
# fout.write("*fix %s %f %f %f\n" % (sfrom, data['RefX']-450000, data['RefY']-200000, data['RefZ']))
|
# fout.write("*fix %s %f %f %f\n" % (sfrom, data['RefX']-450000, data['RefY']-200000, data['RefZ']))
|
||||||
# sfromfixes.add(sfrom)
|
# sfromfixes.add(sfrom)
|
||||||
|
|
||||||
if beginname != None:
|
if beginname != None:
|
||||||
fout.write("*end %s\n" % (beginname))
|
fout.write(f"*end {beginname}\n")
|
||||||
|
|
||||||
fout.write("\n*end 1626\n")
|
fout.write("\n*end 1626\n")
|
||||||
fout.close()
|
fout.close()
|
||||||
|
@ -103,7 +103,7 @@ def do_item(year, item):
|
|||||||
contents_path = os.path.join(".", item, "contents.json")
|
contents_path = os.path.join(".", item, "contents.json")
|
||||||
#print "Trying to read file %s" % (contents_path)
|
#print "Trying to read file %s" % (contents_path)
|
||||||
if not os.path.isfile(contents_path):
|
if not os.path.isfile(contents_path):
|
||||||
print("Creating file %s from template" % (contents_path))
|
print(f"Creating file {contents_path} from template")
|
||||||
json_file = open(contents_path, "w")
|
json_file = open(contents_path, "w")
|
||||||
json.dump(blank_json, json_file, sort_keys=True, indent = 1)
|
json.dump(blank_json, json_file, sort_keys=True, indent = 1)
|
||||||
json_file.close()
|
json_file.close()
|
||||||
@ -112,7 +112,7 @@ def do_item(year, item):
|
|||||||
try:
|
try:
|
||||||
data = json.load(json_file)
|
data = json.load(json_file)
|
||||||
except:
|
except:
|
||||||
print("FAILURE parsing JSON file %s" % (contents_path))
|
print(f"FAILURE parsing JSON file {contents_path}")
|
||||||
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
|
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
|
||||||
raise
|
raise
|
||||||
if not data["people"]:
|
if not data["people"]:
|
||||||
@ -135,7 +135,7 @@ def do_item(year, item):
|
|||||||
write_required = True
|
write_required = True
|
||||||
#print write_required
|
#print write_required
|
||||||
if write_required:
|
if write_required:
|
||||||
print("Writing file %s" % (contents_path))
|
print(f"Writing file {contents_path}")
|
||||||
json_file = open(contents_path, "w")
|
json_file = open(contents_path, "w")
|
||||||
json.dump(data, json_file, indent = 1)
|
json.dump(data, json_file, indent = 1)
|
||||||
json_file.close()
|
json_file.close()
|
||||||
@ -236,7 +236,7 @@ def do_item(year, item):
|
|||||||
"cave": data["cave"],
|
"cave": data["cave"],
|
||||||
"name": data["name"],
|
"name": data["name"],
|
||||||
"date": data["date"],
|
"date": data["date"],
|
||||||
"people": reduce(operator.add, [" %s," % person for person in data["people"]], ""),
|
"people": reduce(operator.add, [f" {person}," for person in data["people"]], ""),
|
||||||
"description": "http://expo.survex.com"+data["description url"],
|
"description": "http://expo.survex.com"+data["description url"],
|
||||||
"description_needed": description_needed,
|
"description_needed": description_needed,
|
||||||
"loser_dir": loser_dir,
|
"loser_dir": loser_dir,
|
||||||
|
@ -65,7 +65,7 @@ for r in lines[1:]:
|
|||||||
if mug:
|
if mug:
|
||||||
if not (os.path.isfile(mug)):
|
if not (os.path.isfile(mug)):
|
||||||
print("ERROR: --------------- mug file does not exist: ", mug, file=sys.stderr)
|
print("ERROR: --------------- mug file does not exist: ", mug, file=sys.stderr)
|
||||||
if mug: output += '<a href=%s><img alt=":-)" src="i/mug.png" /></a>' % mug
|
if mug: output += f'<a href={mug}><img alt=":-)" src="i/mug.png" /></a>'
|
||||||
output += '</td>'
|
output += '</td>'
|
||||||
for y in range(len(years)):
|
for y in range(len(years)):
|
||||||
if(years[y]):
|
if(years[y]):
|
||||||
|
@ -80,7 +80,7 @@ class QmExtracter:
|
|||||||
grade = grade.upper()
|
grade = grade.upper()
|
||||||
if grade not in ['A', 'B', 'C', 'D', 'E', 'X']:
|
if grade not in ['A', 'B', 'C', 'D', 'E', 'X']:
|
||||||
self.__print_error(svx_file, line,
|
self.__print_error(svx_file, line,
|
||||||
'Unknown QM grade ‘%s’' % grade)
|
f'Unknown QM grade ‘{grade}’')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Sanitise the resolution station.
|
# Sanitise the resolution station.
|
||||||
@ -232,10 +232,9 @@ class QmExtracter:
|
|||||||
}[grade]
|
}[grade]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
grade_colour = '00'
|
grade_colour = '00'
|
||||||
formatted_grade = '\033[{}m{}\033[0m'.format(grade_colour,
|
formatted_grade = f'[{grade_colour}m{grade}[0m'
|
||||||
grade)
|
formatted_survey_name = f'[4m{survey_name}[0m'
|
||||||
formatted_survey_name = '\033[4m{}\033[0m'.format(survey_name)
|
formatted_name = f'[4m{name}[0m'
|
||||||
formatted_name = '\033[4m{}\033[0m'.format(name)
|
|
||||||
else:
|
else:
|
||||||
formatted_grade = grade
|
formatted_grade = grade
|
||||||
formatted_survey_name = survey_name
|
formatted_survey_name = survey_name
|
||||||
@ -254,11 +253,10 @@ class QmExtracter:
|
|||||||
if n_printed == 0 and not qms:
|
if n_printed == 0 and not qms:
|
||||||
print('No QMs found')
|
print('No QMs found')
|
||||||
elif n_printed == 0:
|
elif n_printed == 0:
|
||||||
print('No unresolved QMs found (but %u resolved ones were)' %
|
print(f'No unresolved QMs found (but {len(qms)} resolved ones were)')
|
||||||
len(qms))
|
|
||||||
|
|
||||||
def __print_error(self, svx_file, line, exc):
|
def __print_error(self, svx_file, line, exc):
|
||||||
sys.stderr.write('%s: %s\n %s\n' % (svx_file, exc, line))
|
sys.stderr.write(f'{svx_file}: {exc}\n {line}\n')
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
Loading…
Reference in New Issue
Block a user