mirror of
https://expo.survex.com/repositories/expoweb/.git/
synced 2024-11-21 23:01:55 +00:00
Convert .format() to f-strings with flynt
This commit is contained in:
parent
aca92cb1af
commit
aaf6a6c7cf
@ -45,10 +45,10 @@ for i, row in enumerate(rows[1:]):
|
||||
|
||||
if cave != prevlinecave:
|
||||
if beginname != None:
|
||||
fout.write("*end %s\n" % (beginname))
|
||||
fout.write(f"*end {beginname}\n")
|
||||
beginname = None
|
||||
if row != i: #don't start new begin for last line
|
||||
fout.write("\n*begin %s\n" % (cave))
|
||||
fout.write(f"\n*begin {cave}\n")
|
||||
beginname = cave
|
||||
|
||||
|
||||
@ -86,23 +86,23 @@ for i, row in enumerate(rows[1:]):
|
||||
|
||||
#if i < 10: print data
|
||||
|
||||
sfrom = "%s-%s" % (data['RGang'], data['RPunkt'])
|
||||
sto = "%s-%s" % (data['Gang'], data['Punkt'])
|
||||
sfrom = f"{data['RGang']}-{data['RPunkt']}"
|
||||
sto = f"{data['Gang']}-{data['Punkt']}"
|
||||
if data['Description']:
|
||||
fout.write(";%s\n" % data['Description'])
|
||||
fout.write(f";{data['Description']}\n")
|
||||
if sfrom == sto:
|
||||
if data['RefX'] == data['X'] and data['RefY'] == data['Y'] and data['RefZ'] == data['Z']:
|
||||
fout.write("*fix %s %f %f %f\n" % (sfrom, data['RefX']-450000, data['RefY']-200000, data['RefZ']))
|
||||
fout.write(f"*fix {sfrom} {data['RefX'] - 450000:f} {data['RefY'] - 200000:f} {data['RefZ']:f}\n")
|
||||
else:
|
||||
print ("Fix 'leg' with non-matching co-ordinates - line i\n")
|
||||
else:
|
||||
fout.write("%s %s\t%s\t%s\t%s\n" % (sfrom, sto, data['Tape'], bearing, slope))
|
||||
fout.write(f"{sfrom} {sto}\t{data['Tape']}\t{bearing}\t{slope}\n")
|
||||
# if sfrom not in sfromfixes:
|
||||
# fout.write("*fix %s %f %f %f\n" % (sfrom, data['RefX']-450000, data['RefY']-200000, data['RefZ']))
|
||||
# sfromfixes.add(sfrom)
|
||||
|
||||
if beginname != None:
|
||||
fout.write("*end %s\n" % (beginname))
|
||||
fout.write(f"*end {beginname}\n")
|
||||
|
||||
fout.write("\n*end 1626\n")
|
||||
fout.close()
|
||||
|
@ -103,7 +103,7 @@ def do_item(year, item):
|
||||
contents_path = os.path.join(".", item, "contents.json")
|
||||
#print "Trying to read file %s" % (contents_path)
|
||||
if not os.path.isfile(contents_path):
|
||||
print("Creating file %s from template" % (contents_path))
|
||||
print(f"Creating file {contents_path} from template")
|
||||
json_file = open(contents_path, "w")
|
||||
json.dump(blank_json, json_file, sort_keys=True, indent = 1)
|
||||
json_file.close()
|
||||
@ -112,7 +112,7 @@ def do_item(year, item):
|
||||
try:
|
||||
data = json.load(json_file)
|
||||
except:
|
||||
print("FAILURE parsing JSON file %s" % (contents_path))
|
||||
print(f"FAILURE parsing JSON file {contents_path}")
|
||||
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
|
||||
raise
|
||||
if not data["people"]:
|
||||
@ -135,7 +135,7 @@ def do_item(year, item):
|
||||
write_required = True
|
||||
#print write_required
|
||||
if write_required:
|
||||
print("Writing file %s" % (contents_path))
|
||||
print(f"Writing file {contents_path}")
|
||||
json_file = open(contents_path, "w")
|
||||
json.dump(data, json_file, indent = 1)
|
||||
json_file.close()
|
||||
@ -236,7 +236,7 @@ def do_item(year, item):
|
||||
"cave": data["cave"],
|
||||
"name": data["name"],
|
||||
"date": data["date"],
|
||||
"people": reduce(operator.add, [" %s," % person for person in data["people"]], ""),
|
||||
"people": reduce(operator.add, [f" {person}," for person in data["people"]], ""),
|
||||
"description": "http://expo.survex.com"+data["description url"],
|
||||
"description_needed": description_needed,
|
||||
"loser_dir": loser_dir,
|
||||
|
@ -65,7 +65,7 @@ for r in lines[1:]:
|
||||
if mug:
|
||||
if not (os.path.isfile(mug)):
|
||||
print("ERROR: --------------- mug file does not exist: ", mug, file=sys.stderr)
|
||||
if mug: output += '<a href=%s><img alt=":-)" src="i/mug.png" /></a>' % mug
|
||||
if mug: output += f'<a href={mug}><img alt=":-)" src="i/mug.png" /></a>'
|
||||
output += '</td>'
|
||||
for y in range(len(years)):
|
||||
if(years[y]):
|
||||
|
@ -80,7 +80,7 @@ class QmExtracter:
|
||||
grade = grade.upper()
|
||||
if grade not in ['A', 'B', 'C', 'D', 'E', 'X']:
|
||||
self.__print_error(svx_file, line,
|
||||
'Unknown QM grade ‘%s’' % grade)
|
||||
f'Unknown QM grade ‘{grade}’')
|
||||
continue
|
||||
|
||||
# Sanitise the resolution station.
|
||||
@ -232,10 +232,9 @@ class QmExtracter:
|
||||
}[grade]
|
||||
except KeyError:
|
||||
grade_colour = '00'
|
||||
formatted_grade = '\033[{}m{}\033[0m'.format(grade_colour,
|
||||
grade)
|
||||
formatted_survey_name = '\033[4m{}\033[0m'.format(survey_name)
|
||||
formatted_name = '\033[4m{}\033[0m'.format(name)
|
||||
formatted_grade = f'[{grade_colour}m{grade}[0m'
|
||||
formatted_survey_name = f'[4m{survey_name}[0m'
|
||||
formatted_name = f'[4m{name}[0m'
|
||||
else:
|
||||
formatted_grade = grade
|
||||
formatted_survey_name = survey_name
|
||||
@ -254,11 +253,10 @@ class QmExtracter:
|
||||
if n_printed == 0 and not qms:
|
||||
print('No QMs found')
|
||||
elif n_printed == 0:
|
||||
print('No unresolved QMs found (but %u resolved ones were)' %
|
||||
len(qms))
|
||||
print(f'No unresolved QMs found (but {len(qms)} resolved ones were)')
|
||||
|
||||
def __print_error(self, svx_file, line, exc):
|
||||
sys.stderr.write('%s: %s\n %s\n' % (svx_file, exc, line))
|
||||
sys.stderr.write(f'{svx_file}: {exc}\n {line}\n')
|
||||
|
||||
|
||||
def main():
|
||||
|
Loading…
Reference in New Issue
Block a user