Convert .format() to f-strings with flynt

This commit is contained in:
Philip Sargent
2022-11-23 10:51:49 +00:00
parent aca92cb1af
commit aaf6a6c7cf
4 changed files with 19 additions and 21 deletions

View File

@@ -45,10 +45,10 @@ for i, row in enumerate(rows[1:]):
if cave != prevlinecave:
if beginname != None:
fout.write("*end %s\n" % (beginname))
fout.write(f"*end {beginname}\n")
beginname = None
if row != i: #don't start new begin for last line
fout.write("\n*begin %s\n" % (cave))
fout.write(f"\n*begin {cave}\n")
beginname = cave
@@ -86,23 +86,23 @@ for i, row in enumerate(rows[1:]):
#if i < 10: print data
sfrom = "%s-%s" % (data['RGang'], data['RPunkt'])
sto = "%s-%s" % (data['Gang'], data['Punkt'])
sfrom = f"{data['RGang']}-{data['RPunkt']}"
sto = f"{data['Gang']}-{data['Punkt']}"
if data['Description']:
fout.write(";%s\n" % data['Description'])
fout.write(f";{data['Description']}\n")
if sfrom == sto:
if data['RefX'] == data['X'] and data['RefY'] == data['Y'] and data['RefZ'] == data['Z']:
fout.write("*fix %s %f %f %f\n" % (sfrom, data['RefX']-450000, data['RefY']-200000, data['RefZ']))
fout.write(f"*fix {sfrom} {data['RefX'] - 450000:f} {data['RefY'] - 200000:f} {data['RefZ']:f}\n")
else:
print ("Fix 'leg' with non-matching co-ordinates - line i\n")
else:
fout.write("%s %s\t%s\t%s\t%s\n" % (sfrom, sto, data['Tape'], bearing, slope))
fout.write(f"{sfrom} {sto}\t{data['Tape']}\t{bearing}\t{slope}\n")
# if sfrom not in sfromfixes:
# fout.write("*fix %s %f %f %f\n" % (sfrom, data['RefX']-450000, data['RefY']-200000, data['RefZ']))
# sfromfixes.add(sfrom)
if beginname != None:
fout.write("*end %s\n" % (beginname))
fout.write(f"*end {beginname}\n")
fout.write("\n*end 1626\n")
fout.close()

View File

@@ -103,7 +103,7 @@ def do_item(year, item):
contents_path = os.path.join(".", item, "contents.json")
#print "Trying to read file %s" % (contents_path)
if not os.path.isfile(contents_path):
print("Creating file %s from template" % (contents_path))
print(f"Creating file {contents_path} from template")
json_file = open(contents_path, "w")
json.dump(blank_json, json_file, sort_keys=True, indent = 1)
json_file.close()
@@ -112,7 +112,7 @@ def do_item(year, item):
try:
data = json.load(json_file)
except:
print("FAILURE parsing JSON file %s" % (contents_path))
print(f"FAILURE parsing JSON file {contents_path}")
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
raise
if not data["people"]:
@@ -135,7 +135,7 @@ def do_item(year, item):
write_required = True
#print write_required
if write_required:
print("Writing file %s" % (contents_path))
print(f"Writing file {contents_path}")
json_file = open(contents_path, "w")
json.dump(data, json_file, indent = 1)
json_file.close()
@@ -236,7 +236,7 @@ def do_item(year, item):
"cave": data["cave"],
"name": data["name"],
"date": data["date"],
"people": reduce(operator.add, [" %s," % person for person in data["people"]], ""),
"people": reduce(operator.add, [f" {person}," for person in data["people"]], ""),
"description": "http://expo.survex.com"+data["description url"],
"description_needed": description_needed,
"loser_dir": loser_dir,