forked from expo/troggle
ruff removed unused imports
This commit is contained in:
@@ -4,13 +4,12 @@ import re
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
from django.utils.timezone import get_current_timezone, make_aware
|
||||
|
||||
import troggle.settings as settings
|
||||
from troggle.core.models.caves import QM, Cave, Entrance, LogbookEntry
|
||||
from troggle.core.models.caves import QM, Cave, Entrance
|
||||
from troggle.core.models.survex import SurvexBlock, SurvexDirectory, SurvexFile, SurvexPersonRole, SurvexStation, Wallet
|
||||
from troggle.core.models.troggle import DataIssue, Expedition
|
||||
from troggle.core.utils import chaosmonkey, get_process_memory
|
||||
@@ -335,7 +334,7 @@ class LoadingSurvex:
|
||||
if tm:
|
||||
record_team_member(tm, survexblock)
|
||||
else:
|
||||
if not mteammember.group(2).lower() in ("none", "both"):
|
||||
if mteammember.group(2).lower() not in ("none", "both"):
|
||||
message = f"! Weird *team '{mteammember.group(2)}' newstyle line: '{line}' ({survexblock}) {survexblock.survexfile.path}"
|
||||
print(self.insp + message)
|
||||
DataIssue.objects.create(
|
||||
@@ -793,15 +792,13 @@ class LoadingSurvex:
|
||||
|
||||
# Older troggle/CSV assumes a logbook entry 'found_by' for each QM, with a date.
|
||||
# We don't need this anymore so we don't need to create a placeholder logbook entry.
|
||||
qmyear = str(survexblock.date)[:4]
|
||||
str(survexblock.date)[:4]
|
||||
blockname = survexblock.name[:6] + survexblock.name[-1:]
|
||||
# logslug = f'D{int(qmyear)}_{blockname}_{int(qm_no):03d}'
|
||||
if survexblock.survexfile.cave:
|
||||
caveslug = survexblock.survexfile.cave.slug()
|
||||
place = survexblock.survexfile.cave
|
||||
survexblock.survexfile.cave.slug()
|
||||
else:
|
||||
caveslug = None
|
||||
place = None
|
||||
pass
|
||||
|
||||
try:
|
||||
qm = QM.objects.create(
|
||||
@@ -930,7 +927,7 @@ class LoadingSurvex:
|
||||
|
||||
# if self.flagsstar["duplicate"] == True or self.flagsstar["surface"] == True or self.flagsstar["splay"] == True:
|
||||
# actually we do want to count duplicates as this is for "effort expended in surveying underground"
|
||||
if self.flagsstar["surface"] == True or self.flagsstar["splay"] == True:
|
||||
if self.flagsstar["surface"] is True or self.flagsstar["splay"] is True:
|
||||
self.flagsstar["skiplegs"] = True
|
||||
if debugprint:
|
||||
print(
|
||||
@@ -1147,7 +1144,6 @@ class LoadingSurvex:
|
||||
def LoadSurvexSetup(self, survexblock, survexfile):
|
||||
self.depthbegin = 0
|
||||
self.datastar = self.datastardefault
|
||||
blocklegs = self.legsnumber
|
||||
print(
|
||||
self.insp
|
||||
+ f" - MEM:{get_process_memory():.3f} Reading. parent:{survexblock.survexfile.path} <> {survexfile.path} "
|
||||
@@ -1181,7 +1177,7 @@ class LoadingSurvex:
|
||||
slengthtotal = 0.0
|
||||
nlegstotal = 0
|
||||
self.relativefilename = path
|
||||
cave = self.IdentifyCave(path) # this will produce null for survex files which are geographic collections
|
||||
self.IdentifyCave(path) # this will produce null for survex files which are geographic collections
|
||||
|
||||
self.currentsurvexfile = survexblock.survexfile
|
||||
self.currentsurvexfile.save() # django insists on this although it is already saved !?
|
||||
@@ -1628,7 +1624,7 @@ class LoadingSurvex:
|
||||
DataIssue.objects.create(parser="xEntrances", message=message, url=url)
|
||||
print(message)
|
||||
print(
|
||||
f"stderr:\n\n" + str(sp.stderr) + "\n\n" + str(sp.stdout) + "\n\nreturn code: " + str(sp.returncode)
|
||||
"stderr:\n\n" + str(sp.stderr) + "\n\n" + str(sp.stdout) + "\n\nreturn code: " + str(sp.returncode)
|
||||
)
|
||||
self.caverncount += 1
|
||||
|
||||
@@ -1643,7 +1639,7 @@ class LoadingSurvex:
|
||||
|
||||
svxpath = Path(fullpath + ".svx")
|
||||
logpath = Path(fullpath + ".log")
|
||||
outputdir = Path(svxpath).parent
|
||||
Path(svxpath).parent
|
||||
|
||||
if not svxpath.is_file():
|
||||
message = f' ! BAD survex file "{fullpath}" specified in *include in {calledpath} '
|
||||
@@ -1664,7 +1660,7 @@ class LoadingSurvex:
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
print(message)
|
||||
print(
|
||||
f"stderr:\n\n" + str(sp.stderr) + "\n\n" + str(sp.stdout) + "\n\nreturn code: " + str(sp.returncode)
|
||||
"stderr:\n\n" + str(sp.stderr) + "\n\n" + str(sp.stdout) + "\n\nreturn code: " + str(sp.returncode)
|
||||
)
|
||||
self.caverndate = os.path.getmtime(sp.stdout.strip())
|
||||
else:
|
||||
@@ -1786,7 +1782,7 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
f"\n - {len(unseens)} survex files found which were not included in main tree. ({len(svx_scan.svxfileslist)} in main tree)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(f" -- Now loading the previously-omitted survex files.", file=sys.stderr)
|
||||
print(" -- Now loading the previously-omitted survex files.", file=sys.stderr)
|
||||
|
||||
with open(Path(settings.SURVEX_DATA, "_unseens.svx"), "w") as u:
|
||||
u.write(
|
||||
@@ -1794,10 +1790,10 @@ def FindAndLoadSurvex(survexblockroot):
|
||||
)
|
||||
u.write(f"; autogenerated by parser/survex.py from databasereset.py on '{datetime.now(timezone.utc)}'\n")
|
||||
u.write(f"; omitting any file beginning with {excpts}\n\n")
|
||||
u.write(f"*begin unseens\n")
|
||||
u.write("*begin unseens\n")
|
||||
for x in sorted(unseens):
|
||||
u.write(f" *include {x}\n")
|
||||
u.write(f"*end unseens\n")
|
||||
u.write("*end unseens\n")
|
||||
|
||||
survexfileroot = survexblockroot.survexfile # i.e. SURVEX_TOPNAME only
|
||||
|
||||
@@ -2065,7 +2061,6 @@ def LoadPositions():
|
||||
print(f" - Generating a list of Pos from {topdata}.svx and then loading...")
|
||||
|
||||
found = 0
|
||||
skip = {}
|
||||
print("\n") # extra line because cavern overwrites the text buffer somehow
|
||||
# cavern defaults to using same cwd as supplied input file
|
||||
|
||||
@@ -2113,7 +2108,7 @@ def LoadPositions():
|
||||
try:
|
||||
survexblockroot = SurvexBlock.objects.get(id=1)
|
||||
except:
|
||||
message = f" ! FAILED to find root SurvexBlock"
|
||||
message = " ! FAILED to find root SurvexBlock"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
raise
|
||||
@@ -2131,17 +2126,16 @@ def LoadPositions():
|
||||
try:
|
||||
sbqs = SurvexBlock.objects.filter(survexpath=blockpath)
|
||||
if len(sbqs) == 1:
|
||||
sb = sbqs[0]
|
||||
sbqs[0]
|
||||
if len(sbqs) > 1:
|
||||
message = f" ! MULTIPLE SurvexBlocks {len(sbqs):3} matching Entrance point {blockpath} {sid} '{id}'"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
sb = sbqs[0]
|
||||
sbqs[0]
|
||||
elif len(sbqs) <= 0:
|
||||
message = f" ! ZERO SurvexBlocks matching Entrance point {blockpath} {sid} '{id}'"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="entrances", message=message)
|
||||
sb = survexblockroot
|
||||
except:
|
||||
message = f" ! FAIL in getting SurvexBlock matching Entrance point {blockpath} {sid}"
|
||||
print(message)
|
||||
|
||||
Reference in New Issue
Block a user