diff --git a/core/views/survex.py b/core/views/survex.py index d68ee41..83d9d79 100644 --- a/core/views/survex.py +++ b/core/views/survex.py @@ -139,7 +139,7 @@ def get_survexfile(filename): print(f"Number of SurvexFile objects found: {len(refs)}") for s in refs: print (s.path, s.primary, s.cave) - # print(type(survexfile), filename) + print(type(survexfile), filename) return survexfile class SvxForm(forms.Form): @@ -165,7 +165,12 @@ class SvxForm(forms.Form): self.survexfile = False return survextemplatefile if not self.survexfile: - self.survexfile = get_survexfile(self.data["filename"]) + if sf := get_survexfile(self.data["filename"]): # walrus! + self.survexfile = sf + else: + print(">>> >>> WARNING - svx file not a SurvexFile object yet", fname, flush=True) + self.survexfile = fname + try: fin = open(fname, "r", encoding="utf8", newline="") svxtext = fin.read() @@ -284,7 +289,7 @@ def svx(request, survex_file): """ warning = False - print(survex_file) + print(f"svx(): {survex_file=}") if survex_file.lower().endswith(".svx"): #cope with ".svx.svx" bollox survex_file = survex_file[:-4] @@ -344,7 +349,8 @@ def svx(request, survex_file): form.data["code"] = rcode # GET, also fall-through after POST-specific handling - svxfile = get_survexfile(survex_file) + if svxfile := get_survexfile(survex_file): + print(f"svx(): a real SurvexFile object {svxfile=} {svxfile.id=}") if "code" not in form.data: form.data["code"] = form.GetDiscCode() @@ -377,6 +383,9 @@ def svx(request, survex_file): except AttributeError: # some survexfiles just *include files and have no blocks themselves svxblocksall = [] else: + svxfile = survex_file + print(f"svx(): NOT a real SurvexFile object '{svxfile=}'") + svxblocks = [] svxblocksall = [] svxlength = 0.0 diff --git a/parsers/survex.py b/parsers/survex.py index fb0f8d3..20d3f79 100644 --- a/parsers/survex.py +++ b/parsers/survex.py @@ -829,7 +829,6 @@ class LoadingSurvex: parser="survex", message=message, url=None, sb=(survexblock.survexfile.path) ) print(f" {type(survexblock)=}") # survexblock.parent fails as a SurvexFile has no .parent ...ugh. - print(f" {survexblock.survexpath=}") print(f" {survexblock.survexfile=}") # Not setting 'year' crashes entire import on databaseReset. year = line[:4] @@ -1148,7 +1147,7 @@ class LoadingSurvex: elif ls[0] == "normal" or ls[0] == "topofil": if not ("from" in datastar and "to" in datastar): message = ( - f" ! - Unrecognised *data normal statement '{args}' {survexblock.name}|{survexblock.survexpath}" + f" ! - Unrecognised *data normal statement '{args}' {survexblock.name}" ) print(message) print(message, file=sys.stderr) @@ -1178,19 +1177,19 @@ class LoadingSurvex: self.datastar = copy.deepcopy(datastar) return elif ls[0] == "passage" or ls[0] == "nosurvey" or ls[0] == "diving" or ls[0] == "cylpolar": - # message = " ! - *data {} blocks ignored. {}|{}" '{}' .format(ls[0].upper(), survexblock.name, survexblock.survexpath, args) + # message = " ! - *data {} blocks ignored. {}|{}" '{}' .format(ls[0].upper(), survexblock.name, args) # print(message) # print(message,file=sys.stderr) # stash_data_issue(parser='survex', message=message) self.datastar["type"] = ls[0] elif ls[0] == "cartesian": # We should not ignore this ?! Default for Germans ? - # message = " ! - *data {} blocks ignored. {}|{}" '{}' .format(ls[0].upper(), survexblock.name, survexblock.survexpath, args) + # message = " ! - *data {} blocks ignored. {}|{}" '{}' .format(ls[0].upper(), survexblock.name, args) # print(message) # print(message,file=sys.stderr) # stash_data_issue(parser='survex', message=message) self.datastar["type"] = ls[0] else: - message = f" ! - Unrecognised *data statement '{args}' {survexblock.name}|{survexblock.survexpath}" + message = f" ! - Unrecognised *data statement '{args}' {survexblock.name}" print(message) print(message, file=sys.stderr) stash_data_issue( @@ -1258,7 +1257,7 @@ class LoadingSurvex: kataster fixedpts/gps and everything at top level, directly in caves-1623/ not in a subdir - NOTE self.cavelist is a superset of GCaveLookup, which already contains both uppercase and lowercase aliases + NOTE self.cavelist is a superset of GetCaveLookup, which already contains both uppercase and lowercase aliases why is this called with cavepath="caves-1623/2023-kt-02" when this is a cave where the files are in "caves-1623/2023-kt-02/" cavepath = 'surface/1623' when svxis is 'surface/1623/2004-18to298.svx' @@ -1281,7 +1280,7 @@ class LoadingSurvex: # print(message, file=sys.stderr) return False - if cavepath.lower() in self.caveslist: # primed with GCaveLookup + if cavepath.lower() in self.caveslist: # primed with GetCaveLookup return self.caveslist[cavepath.lower()] rx_svxcollection = re.compile(r"(?i)caves-(\d\d\d\d)/(.*)$") @@ -1290,7 +1289,7 @@ class LoadingSurvex: if path_match: area = path_match.group(1) caveid = path_match.group(2) - sluggy = f"{area}-{caveid}".lower() # GCaveLookup is all UPPER() and all lower() but not mixed + sluggy = f"{area}-{caveid}".lower() # GetCaveLookup is all UPPER() and all lower() but not mixed # if this comes from editing a survex file, we may already have loaded 3-digit aliases for 1623- from old wallets, # so be careful here.. seek = {sluggy, sluggy.replace("1623-","")} # {} is a set @@ -1761,7 +1760,6 @@ class LoadingSurvex: newsurvexblock = SurvexBlock( name=blkid, parent=survexblock, - # survexpath=pathlist, # use the debug file, not this, for debugging survexfile=self.currentsurvexfile, legsall=0, legslength=0.0, @@ -2468,7 +2466,97 @@ def parse_one_file(fpath): # --------------------------------------in progress-- In the initial file parsing in databaseReset, the *include expansion is done in an earlier stange than LinearLoad(). By the time LinearLoad() is called, all the *include expansion has happened. + + There are two cases: + 1. the path is for an existing cave 1626-359: + svxpath = 'caves-1626/359/new_passage' + or + svxpath = 'caves-1623/161/triassic/new_passage' + or possibly + svxpath = 'caves-1623/161/new_series/new_passage' + + + 2. the path is for an entirely new cave which does not exist '2030-BL-99' + svxpath = 'caves-1626/2030-BL-99/first_explore' + + This creates its own LoadingSurvex() class instance called svx_load """ + def find_cave_from_path(svxpath): + """ Seems simple enough.. but needs refactoring with Class method IdentifyCave() + This will normally be called from MakeRoot only when creating a new survex file + from inside the survexfile editor page by overwriting the URL in the + browser bar. + """ + cavelist = GetCaveLookup() + rx_cavepath = re.compile(r"(?i)caves-(\d\d\d\d)/([-\d\w]+|\d\d\d\d-?\w+-\d+)/?.*") + print(f"find_cave_from_path({svxpath})") + path_match = rx_cavepath.search(svxpath) + if path_match: + area = path_match.group(1) + caveid = path_match.group(2) + caveslug = f"{area}-{caveid}".lower() # GetCaveLookup is all UPPER() and all lower() but not mixed + + print(f"find_cave_from_path({svxpath}): {caveslug=}") + if caveslug.lower() in cavelist: + print(f"find_cave_from_path({svxpath}): {caveslug=} YES {cavelist[caveslug.lower()]=}") + return cavelist[caveslug.lower()] + else: + print(f"find_cave_from_path({svxpath}) FAIL with correct format, trying with short-form...") + rx_alias = re.compile(r"(?i)([-\d\w]+|\d\d\d\d-?\w+-\d+)/?.*") + print(f"find_cave_from_path({svxpath}) attempting short form alias") + path_match = rx_alias.search(svxpath) + if path_match: + caveid = path_match.group(1) + print(f"find_cave_from_path({svxpath}): {caveid=}") + if caveid.lower() in cavelist: + print(f"find_cave_from_path({svxpath}): {caveid=} YES {cavelist[caveid.lower()]=}") + return cavelist[caveid.lower()] + + def make_fileroot(svxpath): + """Returns a SurvexFile, not a file_object.path + Used by the online survex file editor when re-parsing + or tries to find the primary survex file for this cave + """ + cave = find_cave_from_path(svxpath) + if cave: + cave_svxpath = cave.survex_file[:-4] # remove .svx + fileroot = SurvexFile.objects.get(path=cave_svxpath) + print(f" - Setting the root survexfile for this import: {svxpath} to be that for cave {cave}") + return fileroot + + # make a dummy SurvexFile object, which will be removed later + dummyroot = SurvexFile(path=svxpath) + dummyroot.save() + print(f" - Making/finding a new dummy root survexfile for this import: {svxpath}") + print(f" - new fileroot {type(dummyroot)} for {svxpath} with cave {cave}\n - {dummyroot.primary=} {dummyroot.path=} {dummyroot.cave=} ") + return dummyroot + """ for f in IGNOREFILES: + if svxid.lower().startswith(f): + return False + for i in IGNOREPREFIX: + if cavepath.lower().startswith(i) or cavepath[11:].lower().startswith(i): + # message = (f" - {cavepath} is an (while looking at '{svxid}.svx' )") + # print(message, file=sys.stderr) + return False + + cave = create_new_cave(cavepath, svxid, f"Cave mentioned only in a survex file {svxid=}") # uses the pending code + self.caveslist[cavepath.lower()] = cave + return cave + else: + path_match = rx_svxcollection.search(svxid) + if path_match: + # message = f" ! Recognised survex file in area {path_match.group(1)} which is not a cave at {svxid=}" + # stash_data_issue(parser="survex", message=message, url=None, sb=(svxid)) + # print(message, file=sys.stderr) + return False + else: # probably a top level file immediately in the loser directory. No worries. + message = f" ! Warning: no cave identifiable for '{svxid}.svx' {cavepath=} " + print("\n" + message) + stash_data_issue(parser="survex", message=message, url="{svxid}.svx", sb=(svxid)) + return False + """ + + def parse_new_svx(fpath, svx_load, svxfileroot=None): """We need a dummy survex block which has the survexfile being parsed as its .survexfile field. But it is used in two ways, it is also @@ -2478,21 +2566,27 @@ def parse_one_file(fpath): # --------------------------------------in progress-- We also need to re-plumb the fileroot after importing, so that the new survexfile appears in the survexdirectory lists? + + Rather than just arbitrarily creating something, + we should see if this is a known cave first. If it isn't dont' bother + as it might be a fixedpts survex file not a cave survex file + """ if svxfileroot == None: - svxfileroot = MakeFileRoot(fpath) - svxfileroot.save() - + # Not seen this survexfile before, so it does not exist + # but we don't create it yet.. + svxfileroot = make_fileroot(fpath) + + print(f"## parse_new_svx(): {svxfileroot=} from {fpath}") # It is vital that the block has attached the survexfile object which is being parsed. - block_dummy = SurvexBlock( - name="dummy", survexfile=svxfileroot, legsall=0, legslength=0.0 + block_dummy = SurvexBlock( name="", + survexfile=svxfileroot, legsall=0, legslength=0.0 ) - svxfileroot.save() + + block_dummy.name=f"#{block_dummy.id}_{str(Path(str(svxfileroot)))}", + #svxfileroot.save() block_dummy.save() - newname = f"#{block_dummy.id}_" + str(Path(str(svxfileroot)).name) - block_dummy.name = newname - block_dummy.save() - print(f" - block_dummy now '{block_dummy}' {type(block_dummy)} id={block_dummy.id} f:{block_dummy.survexfile}") + print(f" - block_dummy now '{block_dummy}' {type(block_dummy)} id={block_dummy.id} f:{block_dummy.survexfile}\n -- {block_dummy.name=}") # ---------------------------------------------------------------- svx_load.LinearLoad(block_dummy, svxfileroot.path, fname) @@ -2573,24 +2667,7 @@ def parse_one_file(fpath): # --------------------------------------in progress-- svx_load = None return True -def MakeFileRoot(svxpath): - """Returns a file_object.path - Used by the online survex file editor when re-parsing - or tries to find the primary survex file for this cave - Looks horrible, rewrite all this.. - """ - cave = IdentifyCave(svxpath) - if not cave: - if svxpath != UNSEENS: - cave = create_new_cave(svxpath, "", "Make dummy Cave for MakeFileRoot {svxpath}") - # is this really necessary ?! - fileroot = SurvexFile(path=svxpath, cave=cave) - fileroot.save() - print(f" - Making/finding a new dummy root survexfile for this import: {svxpath} with cave {cave}") - print(f" - new fileroot {type(fileroot)} for {svxpath} with cave {cave}\n - {fileroot.primary} {fileroot.path} {fileroot.cave} ") - - return fileroot def set_survexblocks(): """Need to find the optimal Django way of doing this query.