2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-21 06:41:53 +00:00

Convert.format() to f-strings with flynt

This commit is contained in:
Philip Sargent 2022-11-23 10:48:39 +00:00
parent 45a640dfe9
commit b06d1dae42
14 changed files with 69 additions and 69 deletions

View File

@ -44,11 +44,11 @@ class SmartAppendSlashMiddleware(object):
else:
if settings.DEBUG and request.method == 'POST':
# replace this exception with a redirect to an error page
raise RuntimeError("You called this URL via POST, but the URL doesn't end in a slash and you have SMART_APPEND_SLASH set. Django can't redirect to the slash URL while maintaining POST data. Change your form to point to %s%s (note the trailing slash), or set SMART_APPEND_SLASH=False in your Django settings." % (new_url[0], new_url[1]))
raise RuntimeError(f"You called this URL via POST, but the URL doesn't end in a slash and you have SMART_APPEND_SLASH set. Django can't redirect to the slash URL while maintaining POST data. Change your form to point to {new_url[0]}{new_url[1]} (note the trailing slash), or set SMART_APPEND_SLASH=False in your Django settings.")
if new_url != old_url:
# Redirect
if new_url[0]:
newurl = "%s://%s%s" % (request.is_secure() and 'https' or 'http', new_url[0], new_url[1])
newurl = f"{request.is_secure() and 'https' or 'http'}://{new_url[0]}{new_url[1]}"
else:
newurl = new_url[1]
if request.GET:

View File

@ -144,9 +144,9 @@ class Cave(TroggleModel):
def reference(self):
if self.kataster_number:
return "%s-%s" % (self.kat_area(), self.kataster_number)
return f"{self.kat_area()}-{self.kataster_number}"
else:
return "%s-%s" % (self.kat_area(), self.unofficial_number)
return f"{self.kat_area()}-{self.unofficial_number}"
def get_absolute_url(self):
if self.kataster_number:
@ -332,21 +332,21 @@ class Entrance(TroggleModel):
if self.tag_station:
try:
s = SurvexStation.objects.lookup(self.tag_station)
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
except:
return r + "%s Tag Station not in dataset" % self.tag_station
return r + f"{self.tag_station} Tag Station not in dataset"
if self.exact_station:
try:
s = SurvexStation.objects.lookup(self.exact_station)
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
except:
return r + "%s Exact Station not in dataset" % self.tag_station
return r + f"{self.tag_station} Exact Station not in dataset"
if self.other_station:
try:
s = SurvexStation.objects.lookup(self.other_station)
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt {self.other_description}"
except:
return r + "%s Other Station not in dataset" % self.tag_station
return r + f"{self.tag_station} Other Station not in dataset"
if self.FINDABLE_CHOICES == "S":
r += "ERROR, Entrance has been surveyed but has no survex point"
if self.bearings:

View File

@ -63,7 +63,7 @@ class DataIssue(TroggleModel):
ordering = ['date']
def __str__(self):
return "%s - %s" % (self.parser, self.message)
return f"{self.parser} - {self.message}"
#
# single Expedition, usually seen by year
@ -90,7 +90,7 @@ class Expedition(TroggleModel):
if len(expeditiondays) == 1:
return expeditiondays[0]
else:
message ='! - more than one datum in an expeditionday: {}'.format(date)
message =f'! - more than one datum in an expeditionday: {date}'
DataIssue.objects.create(parser='expedition', message=message)
return expeditiondays[0]
res = ExpeditionDay(expedition=self, date=date)
@ -139,7 +139,7 @@ class Person(TroggleModel):
def __str__(self):
if self.last_name:
return "%s %s" % (self.first_name, self.last_name)
return f"{self.first_name} {self.last_name}"
return self.first_name
@ -205,14 +205,14 @@ class PersonExpedition(TroggleModel):
#order_with_respect_to = 'expedition'
def __str__(self):
return "%s: (%s)" % (self.person, self.expedition)
return f"{self.person}: ({self.expedition})"
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
def name(self):
if self.nickname:
return "%s (%s) %s" % (self.person.first_name, self.nickname, self.person.last_name)
return f"{self.person.first_name} ({self.nickname}) {self.person.last_name}"
if self.person.last_name:
return "%s %s" % (self.person.first_name, self.person.last_name)
return f"{self.person.first_name} {self.person.last_name}"
return self.person.first_name
def get_absolute_url(self):

View File

@ -5,5 +5,5 @@ register = template.Library()
@register.filter()
def link(value):
return mark_safe("<a href=\'%s\'>"%value.get_absolute_url()+str(value)+"</a>")
return mark_safe(f"<a href='{value.get_absolute_url()}'>"+str(value)+"</a>")

View File

@ -141,7 +141,7 @@ def write_and_commit(files, message):
msgdata = 'Ask a nerd to fix this.\n\n' + cp_add.stderr + '\n\n' + cp_add.stdout + '\n\nreturn code: ' + str(cp_add.returncode)
raise WriteAndCommitError(f'CANNOT git on server for this file {filename}. Edits saved but not added to git.\n\n' + msgdata)
else:
print("No change %s" % filepah)
print(f"No change {filepah}")
cp_commit = subprocess.run([git, "commit", "-m", message], cwd=cwd, capture_output=True, text=True)
cp_status = subprocess.run([git, "status"], cwd=cwd, capture_output=True, text=True)
# This produces return code = 1 if it commits OK, but when the repo still needs to be pushed to origin/expoweb
@ -205,7 +205,7 @@ def save_carefully(objectType, lookupAttribs={}, nonLookupAttribs={}):
except:
print(" !! - FAIL in SAVE CAREFULLY ===================", objectType)
print(" !! - -- objects.get_or_create()")
print(" !! - lookupAttribs:{}\n !! - nonLookupAttribs:{}".format(lookupAttribs,nonLookupAttribs))
print(f" !! - lookupAttribs:{lookupAttribs}\n !! - nonLookupAttribs:{nonLookupAttribs}")
raise
if not created and not instance.new_since_parsing:
for k, v in list(nonLookupAttribs.items()): #overwrite the existing attributes from the logbook text (except date and title)
@ -215,12 +215,12 @@ def save_carefully(objectType, lookupAttribs={}, nonLookupAttribs={}):
except:
print(" !! - SAVE CAREFULLY ===================", objectType)
print(" !! - -- instance.save()")
print(" !! - lookupAttribs:{}\n !! - nonLookupAttribs:{}".format(lookupAttribs,nonLookupAttribs))
print(f" !! - lookupAttribs:{lookupAttribs}\n !! - nonLookupAttribs:{nonLookupAttribs}")
raise
try:
msg = str(instance)
except:
msg = "FAULT getting __str__ for instance with lookupattribs: {}:".format(lookupAttribs)
msg = f"FAULT getting __str__ for instance with lookupattribs: {lookupAttribs}:"
if created:
logging.info(str(instance) + ' was just added to the database for the first time. \n')

View File

@ -155,7 +155,7 @@ def file3d(request, cave, cave_id):
#print(" - - Regeneration ABORT\n - - from '{}'".format(survexpath))
pass
try:
completed_process = subprocess.run([settings.CAVERN, "--log", "--output={}".format(settings.SURVEX_DATA), "{}".format(survexpath)])
completed_process = subprocess.run([settings.CAVERN, "--log", f"--output={settings.SURVEX_DATA}", f"{survexpath}"])
except OSError as ex:
# propagate this to caller.
raise OSError(completed_process.stdout) from ex
@ -164,7 +164,7 @@ def file3d(request, cave, cave_id):
op3dlog = Path(op3d.with_suffix('.log'))
if not op3d.is_file():
print(" - - Regeneration FAILED\n - - from '{}'\n - - to '{}'".format(survexpath, op3d))
print(f" - - Regeneration FAILED\n - - from '{survexpath}'\n - - to '{op3d}'")
print(" - - Regeneration stdout: ", completed_process.stdout)
print(" - - Regeneration cavern log output: ", op3dlog.read_text())
@ -172,10 +172,10 @@ def file3d(request, cave, cave_id):
def return3d(threedpath):
if threedpath.is_file():
response = HttpResponse(content=open(threedpath, 'rb'), content_type='application/3d')
response['Content-Disposition'] = 'attachment; filename={}'.format(threedpath.name)
response['Content-Disposition'] = f'attachment; filename={threedpath.name}'
return response
else:
message = '<h1>Path provided does not correspond to any actual 3d file.</h1><p>path: "{}"'.format(threedpath)
message = f'<h1>Path provided does not correspond to any actual 3d file.</h1><p>path: "{threedpath}"'
#print(message)
return HttpResponseNotFound(message)
@ -205,10 +205,10 @@ def file3d(request, cave, cave_id):
# Get here if cave.survex_file was set but did not correspond to a valid svx file
if survexpath.is_file():
# a file, but invalid format
message='<h1>File is not valid .svx format.</h1><p>Could not generate 3d file from "{}"'.format(survexpath)
message=f'<h1>File is not valid .svx format.</h1><p>Could not generate 3d file from "{survexpath}"'
else:
# we could try to guess that 'caves-1623/' is missing,... nah.
message = '<h1>Path provided does not correspond to any actual file.</h1><p>path: "{}"'.format(survexpath)
message = f'<h1>Path provided does not correspond to any actual file.</h1><p>path: "{survexpath}"'
return HttpResponseNotFound(message)
@ -325,9 +325,9 @@ def edit_cave(request, path = "", slug=None):
if a.kat_area():
myArea = a.kat_area()
if form.cleaned_data["kataster_number"]:
myslug = "%s-%s" % (myArea, form.cleaned_data["kataster_number"])
myslug = f"{myArea}-{form.cleaned_data['kataster_number']}"
else:
myslug = "%s-%s" % (myArea, form.cleaned_data["unofficial_number"])
myslug = f"{myArea}-{form.cleaned_data['unofficial_number']}"
else:
myslug = slug
# Converting a PENDING cave to a real cave by saving this form
@ -345,7 +345,7 @@ def edit_cave(request, path = "", slug=None):
try:
cave_file = cave.file_output()
print(cave_file)
write_and_commit([cave_file], "Online edit of %s" % cave)
write_and_commit([cave_file], f"Online edit of {cave}")
# leave other exceptions unhandled so that they bubble up to user interface
except PermissionError:
message = f'CANNOT save this file.\nPERMISSIONS incorrectly set on server for this file {cave.filename}. Ask a nerd to fix this.'
@ -414,7 +414,7 @@ def edit_entrance(request, path = "", caveslug=None, slug=None):
es.save()
entrance_file = entrance.file_output()
cave_file = cave.file_output()
write_and_commit([entrance_file, cave_file], "Online edit of %s%s" % (cave, entletter))
write_and_commit([entrance_file, cave_file], f"Online edit of {cave}{entletter}")
entrance.save()
if slug is None:
entrance_letter.save()

View File

@ -39,13 +39,13 @@ def image_selector(request, path):
base = f"{directory}/"
else:
base = ""
thumbnail_url = reverse('expopage', args=["%st/%s" % (base, f.name)])
thumbnail_url = reverse('expopage', args=[f"{base}t/{f.name}"])
name_base = f.name.rsplit('.', 1)[0]
page_path_base = Path(settings.EXPOWEB) / directory / "l"
if ((page_path_base / ("%s.htm" % name_base)).is_file()):
page_url = reverse('expopage', args=["%sl/%s.htm" % (base, name_base)])
if ((page_path_base / (f"{name_base}.htm")).is_file()):
page_url = reverse('expopage', args=[f"{base}l/{name_base}.htm"])
else:
page_url = reverse('expopage', args=["%s/l/%s.html" % (base, name_base)])
page_url = reverse('expopage', args=[f"{base}/l/{name_base}.html"])
thumbnails.append({"thumbnail_url": thumbnail_url, "page_url": page_url})
@ -128,7 +128,7 @@ class NewWebImageForm(forms.Form):
def clean_file_(self):
for rel_path, full_path in zip(self.get_rel_paths(), self.get_full_paths()):
if full_path.exists():
raise forms.ValidationError("File already exists in %s" % rel_path)
raise forms.ValidationError(f"File already exists in {rel_path}")
return self.cleaned_data['file_']
class HTMLarea(forms.Textarea):

View File

@ -219,7 +219,7 @@ def expopage(request, path):
#print(" - EXPOPAGES delivering the file: '{}':{} as MIME type: {}".format(request.path, path,getmimetype(path)),flush=True)
if path.startswith("noinfo") and settings.PUBLIC_SITE and not request.user.is_authenticated:
return HttpResponseRedirect(urljoin(reverse("auth_login"),'?next={}'.format(request.path)))
return HttpResponseRedirect(urljoin(reverse("auth_login"),f'?next={request.path}'))
if path.startswith("admin/"):
# don't even attempt to handle these sorts of mistakes
@ -354,7 +354,7 @@ def editexpopage(request, path):
postbody = "</html>\n"
body = pageform.cleaned_data["html"]
body = body.replace("\r", "")
result = "%s<head%s>%s</head>%s<body%s>\n%s</body>%s" % (preheader, headerargs, head, postheader, bodyargs, body, postbody)
result = f"{preheader}<head{headerargs}>{head}</head>{postheader}<body{bodyargs}>\n{body}</body>{postbody}"
if not filefound or result != html: # Check if content changed at all
try:

View File

@ -238,4 +238,4 @@ def get_people(request, expeditionslug):
def get_logbook_entries(request, expeditionslug):
exp = Expedition.objects.get(year = expeditionslug)
return render(request,'options.html', {"items": [(le.slug, "%s - %s" % (le.date, le.title)) for le in exp.logbookentry_set.all()]})
return render(request,'options.html', {"items": [(le.slug, f"{le.date} - {le.title}") for le in exp.logbookentry_set.all()]})

View File

@ -125,10 +125,10 @@ def pathsreport(request):
def stats(request):
statsDict={}
statsDict['expoCount'] = "{:,}".format(Expedition.objects.count())
statsDict['caveCount'] = "{:,}".format(Cave.objects.count())
statsDict['personCount'] = "{:,}".format(Person.objects.count())
statsDict['logbookEntryCount'] = "{:,}".format(LogbookEntry.objects.count())
statsDict['expoCount'] = f"{Expedition.objects.count():,}"
statsDict['caveCount'] = f"{Cave.objects.count():,}"
statsDict['personCount'] = f"{Person.objects.count():,}"
statsDict['logbookEntryCount'] = f"{LogbookEntry.objects.count():,}"
legsbyexpo = [ ]
addupsurvexlength = 0
@ -142,8 +142,8 @@ def stats(request):
legsyear += int(survexblock.legsall)
addupsurvexlength += survexleglength
addupsurvexlegs += legsyear
legsbyexpo.append((expedition, {"nsurvexlegs": "{:,}".format(legsyear),
"survexleglength":"{:,.0f}".format(survexleglength)}))
legsbyexpo.append((expedition, {"nsurvexlegs": f"{legsyear:,}",
"survexleglength":f"{survexleglength:,.0f}"}))
legsbyexpo.reverse()
renderDict = {**statsDict, **{ "addupsurvexlength":addupsurvexlength/1000, "legsbyexpo":legsbyexpo, "nsurvexlegs":addupsurvexlegs }} # new syntax

View File

@ -26,13 +26,13 @@ print(" - settings on loading databaseReset.py")
import django
print(" - Memory footprint before loading Django: {:.3f} MB".format(resource.getrusage(resource.RUSAGE_SELF)[2]/1024.0))
print(f" - Memory footprint before loading Django: {resource.getrusage(resource.RUSAGE_SELF)[2] / 1024.0:.3f} MB")
try:
django.setup()
except:
print(" ! Cyclic reference failure. Can occur when the initial db is empty. Fixed now (in UploadFileForm) but easy to reintroduce..")
raise
print(" - Memory footprint after loading Django: {:.3f} MB".format(resource.getrusage(resource.RUSAGE_SELF)[2]/1024.0))
print(f" - Memory footprint after loading Django: {resource.getrusage(resource.RUSAGE_SELF)[2] / 1024.0:.3f} MB")
from troggle.core.models.troggle import DataIssue
import troggle.core.models.survex
@ -93,10 +93,10 @@ def reinit_db():
# this is now completely failing to nuke MariaDB adequately, and it crashes when creating Area objects with a no null parent message
# when null parents are explciitly allowed in the model.
cursor = django.db.connection.cursor()
cursor.execute("DROP DATABASE %s" % currentdbname)
cursor.execute("CREATE DATABASE %s" % currentdbname)
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % currentdbname)
cursor.execute("USE %s" % currentdbname)
cursor.execute(f"DROP DATABASE {currentdbname}")
cursor.execute(f"CREATE DATABASE {currentdbname}")
cursor.execute(f"ALTER DATABASE {currentdbname} CHARACTER SET=utf8")
cursor.execute(f"USE {currentdbname}")
print(f" - Nuked : {currentdbname}\n")
print(" - Migrating: " + django.db.connections.databases['default']['NAME'])
@ -117,7 +117,7 @@ def reinit_db():
with transaction.atomic():
try:
print(" - Setting up expo user on: " + django.db.connections.databases['default']['NAME'])
print(" - user: {} ({:.5}...) <{}> ".format(expouser, expouserpass, expouseremail))
print(f" - user: {expouser} ({expouserpass:.5}...) <{expouseremail}> ")
user = User.objects.create_user(expouser, expouseremail, expouserpass)
user.is_staff = False
user.is_superuser = False
@ -136,7 +136,7 @@ def reinit_db():
with transaction.atomic():
try:
print(" - Setting up expoadmin user on: " + django.db.connections.databases['default']['NAME'])
print(" - user: {} ({:.5}...) <{}> ".format(expoadminuser, expoadminuserpass, expoadminuseremail))
print(f" - user: {expoadminuser} ({expoadminuserpass:.5}...) <{expoadminuseremail}> ")
user = User.objects.create_user(expoadminuser, expoadminuseremail, expoadminuserpass)
user.is_staff = True
user.is_superuser = True
@ -157,7 +157,7 @@ def memdumpsql(fn):
from dump import _iterdump
with open(fn, 'w') as f:
for line in _iterdump(djconn):
f.write('%s\n' % line.encode("utf8"))
f.write(f"{line.encode('utf8')}\n")
return True
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -195,7 +195,7 @@ class JobQueue():
for j in data:
self.results[j] = data[j]
except:
print("FAILURE parsing JSON file %s" % (self.tfile))
print(f"FAILURE parsing JSON file {self.tfile}")
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
f.close()
for j in self.results_order:
@ -229,7 +229,7 @@ class JobQueue():
print("** Running job ", self.runlabel,end=" to ")
print(django.db.connections.databases['default']['NAME'])
jobstart = time.time()
print("-- Initial memory in use {:.3f} MB".format(get_process_memory()))
print(f"-- Initial memory in use {get_process_memory():.3f} MB")
self.results["date"].pop()
self.results["date"].append(jobstart)
self.results["runlabel"].pop()
@ -244,14 +244,14 @@ class JobQueue():
memend = get_process_memory()
duration = time.time()-start
#print(" - MEMORY start:{:.3f} MB end:{:.3f} MB change={:.3f} MB".format(memstart,memend, ))
print("\n*- Ended \"", runfunction[0], "\" {:.1f} seconds + {:.3f} MB ({:.3f} MB)".format(duration, memend-memstart, memend))
print("\n*- Ended \"", runfunction[0], f"\" {duration:.1f} seconds + {memend - memstart:.3f} MB ({memend:.3f} MB)")
self.results[runfunction[0]].pop() # the null item
self.results[runfunction[0]].append(duration)
jobend = time.time()
jobduration = jobend-jobstart
print("** Ended job %s - %.1f seconds total." % (self.runlabel,jobduration))
print(f"** Ended job {self.runlabel} - {jobduration:.1f} seconds total.")
return True
@ -310,13 +310,13 @@ class JobQueue():
else:
s = 0
days = (s)/(24*60*60)
print('%8.2f' % days, end=' ')
print(f'{days:8.2f}', end=' ')
elif r[i]:
print('%8.1f' % r[i], end=' ')
print(f'{r[i]:8.1f}', end=' ')
if i == len(r)-1 and r[i-1]:
percen = 100* (r[i] - r[i-1])/r[i-1]
if abs(percen) >0.1:
print('%8.1f%%' % percen, end=' ')
print(f'{percen:8.1f}%', end=' ')
else:
print(" - ", end=' ')
print("")
@ -441,7 +441,7 @@ if __name__ == "__main__":
exit()
else:
usage()
print("%s not recognised as a command." % sys.argv[1])
print(f"{sys.argv[1]} not recognised as a command.")
exit()
jq.run()

View File

@ -46,11 +46,11 @@ def _iterdump(connection):
# qtable,
# sql.replace("''")))
else:
yield('{0};'.format(sql))
yield(f'{sql};')
# Build the insert statement for each row of the current table
table_name_ident = table_name.replace('"', '""')
res = cu.execute('PRAGMA table_info("{0}")'.format(table_name_ident))
res = cu.execute(f'PRAGMA table_info("{table_name_ident}")')
column_names = [str(table_info[1]) for table_info in res.fetchall()]
q = """SELECT 'INSERT INTO "{0}" VALUES({1})' FROM "{0}";""".format(
table_name_ident,
@ -68,6 +68,6 @@ def _iterdump(connection):
"""
schema_res = cu.execute(q)
for name, type, sql in schema_res.fetchall():
yield('{0};'.format(sql))
yield(f'{sql};')
yield('COMMIT;')

View File

@ -116,7 +116,7 @@ def generate_dot(app_labels, **kwargs):
for app_label in app_labels:
app = models.get_app(app_label)
graph = Context({
'name': '"%s"' % app.__name__,
'name': f'"{app.__name__}"',
'disable_fields': disable_fields,
'models': []
})

View File

@ -62,14 +62,14 @@ def delete_sqlite3():
if os.path.exists(db_file):
try:
os.remove(db_file)
print("\n>>> troggle.sqlite: {} DELETED\n".format(db_file))
print(f"\n>>> troggle.sqlite: {db_file} DELETED\n")
except:
print("troggle.sqlite: {} NOT deleted".format(db_file))
print(f"troggle.sqlite: {db_file} NOT deleted")
def main():
global folders
print("base directory used: {}".format(base_dir))
print(f"base directory used: {base_dir}")
try: