import sys
import os
import re
import time
import copy

from datetime import datetime, timedelta
from subprocess import call, run

from django.utils.timezone import get_current_timezone
from django.utils.timezone import make_aware

import troggle.settings as settings
import troggle.core.models as models
import troggle.core.models_caves as models_caves
import troggle.core.models_survex as models_survex
from troggle.utils import ChaosMonkey
from troggle.parsers.people import GetPersonExpeditionNameLookup
from troggle.parsers.logbooks import GetCaveLookup
from troggle.core.views_caves import MapLocations

survexblockroot = None
ROOTBLOCK = "rootblock"

class SurvexLeg():
    """No longer a models.Model subclass, so no longer a database table
    """
    tape        = 0.0
    compass     = 0.0
    clino       = 0.0

class LoadingSurvex():
    """A 'survex block' is a *begin...*end set of cave data.
    A survex file can contain many begin-end blocks, which can be nested, and which can *include
    other survex files.
    A 'scansfolder' is what we today call a "survey scans folder" or a "wallet".
    """

    rx_flags   = re.compile(r"not\s")
    rx_linelen = re.compile(r"[\d\-+.]+$")
    rx_team    = re.compile(r"(?i)(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$")
    rx_person  = re.compile(r"(?i) and | / |, | & | \+ |^both$|^none$")
    rx_qm      = re.compile(r'(?i)^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
#   remember there is also QM_PATTERN used in views_other and set in settings.py

    rx_cave    = re.compile(r'(?i)caves-(\d\d\d\d)/([-\d\w]+|\d\d\d\d-?\w+-\d+)')
    rx_comment = re.compile(r'([^;]*?)\s*(?:;\s*(.*))?\n?$')
    rx_comminc = re.compile(r'(?i)^\*include[\s]*([-\w/]*).*$') # inserted by linear collate ;*include
    rx_commcni = re.compile(r'(?i)^\*edulcni[\s]*([-\w/]*).*$') # inserted by linear collate ;*edulcni
    rx_include = re.compile(r'(?i)^\s*(\*include[\s].*)$')
    rx_ref     = re.compile(r'(?i)^\s*ref[\s.:]*(\d+)\s*#\s*(X)?\s*(\d+)')
    rx_star    = re.compile(r'(?i)\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
    rx_starref = re.compile(r'(?i)^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')
    rx_argsref = re.compile(r'(?i)^[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$')

    # This interprets the survex "*data normal" command which sets out the order of the fields in the data, e.g.
    # *DATA normal from to length gradient bearing ignore ignore ignore ignore
    datastardefault = {"type":"normal", "from":0, "to":1, "tape":2, "compass":3, "clino":4}
    flagsdefault = {"duplicate":False, "surface":False, "splay":False, "any":False}

    datastar ={}
    flagsstar = {}
    survexlegsalllength = 0.0
    survexlegsnumber = 0
    depthbegin = 0
    depthinclude = 0
    stackbegin =[]
    flagsstack =[]
    datastack =[]
    stackinclude = []
    stacksvxfiles = []
    svxfileslist = []
    svxdirs = {}
    survexdict = {} # each key is a directory, and its value is a list of files
    lineno = 0
    insp = ""
    callcount = 0
    ignoreprefix = ["surface", "kataster", "fixedpts", "gpx"]
    ignorenoncave = ["caves-1623", "caves-1623/2007-neu"]
    includedfilename =""
    currentsurvexblock = None
    currentsurvexfile = None
    currentcave = None
    caverndate = None

    def __init__(self):
        self.caveslist = GetCaveLookup()
        pass

    def LoadSurvexIgnore(self, survexblock, line, cmd):
        if cmd == "require":
            pass # should we check survex version available for processing?
        elif cmd in ["equate", "fix", "alias", "calibrate", "cs","entrance", "export", "case", 
                "declination", "infer","instrument", "sd", "units"]:
            pass # we ignore all these, which is fine.
        else:
            if cmd in ["include", "data", "flags", "title", "set", "ref"]:
                message = "! Unparsed [*{}]: '{}' {}".format(cmd, line, survexblock.survexfile.path)
                print((self.insp+message))
                models.DataIssue.objects.create(parser='survex', message=message)
            else:
                message = "! Bad svx command: [*{}] {} ({}) {}".format(cmd, line, survexblock, survexblock.survexfile.path)
                print((self.insp+message))
                models.DataIssue.objects.create(parser='survex', message=message)

    def LoadSurvexTeam(self, survexblock, line):
        teammembers = [ ]
        mteammember = self.rx_team.match(line)
        if mteammember:
            for tm in self.rx_person.split(mteammember.group(2)):
                if tm:
                    personexpedition = survexblock.expedition and GetPersonExpeditionNameLookup(survexblock.expedition).get(tm.lower())
                    if (personexpedition, tm) not in teammembers:
                        teammembers.append((personexpedition, tm))
                        personrole = models_survex.SurvexPersonRole(survexblock=survexblock, nrole=mteammember.group(1).lower(), personexpedition=personexpedition, personname=tm)
                        personrole.save()
                        personrole.expeditionday = survexblock.expeditionday
                        if personexpedition:
                            personrole.person=personexpedition.person
                        personrole.save()

    def LoadSurvexDate(self, survexblock, line):
        # we should make this a date range for everything
        if len(line) == 10:
            survexblock.date = make_aware(datetime.strptime(re.sub(r"\.", "-", line), '%Y-%m-%d'), get_current_timezone())
            expeditions = models.Expedition.objects.filter(year=line[:4])
            if expeditions:
                assert len(expeditions) == 1
                survexblock.expedition = expeditions[0]
                survexblock.expeditionday = survexblock.expedition.get_expedition_day(survexblock.date)
                survexblock.save()

    def LoadSurvexLineLeg(self, survexblock, sline, comment):
        """This reads compass, clino and tape data but only keeps the tape lengths,
        the rest is discarded after error-checking.
        """
        #print("! LEG datastar type:{}++{}\n{} ".format(self.datastar["type"].upper(), survexblock.survexfile.path, sline))
        # SKIP PASSAGES  *data passage
        if self.datastar["type"] == "passage":
            return
        if self.datastar["type"] == "cartesian":
            return
        if self.datastar["type"] == "nosurvey":
            return
        if self.datastar["type"] == "diving":
            return
        if self.datastar["type"] == "cylpolar":
            return
        #print(" !! LEG data lineno:{}\n !! sline:'{}'\n !! datastar['tape']: {}".format(self.lineno, sline, self.datastar["tape"]))
            # # For speed this should come first. But we are checking validity too.
            # if self.flagsstar["any"]:
                # survexleg.tape = invalid_tape
                # #return
        if self.datastar["type"] != "normal": 
            return
 
        invalid_clino = 180.0
        invalid_compass = 720.0
        invalid_tape = 0.0
        datastar = self.datastar # shallow copy: alias but the things inside are the same things
        survexleg = SurvexLeg()

        ls = sline.lower().split()
       
        try:
            tape = ls[datastar["tape"]]
        except:
            print(("! datastar parsing incorrect", survexblock.survexfile.path))
            print(("  datastar:", datastar))
            print(("  Line:", ls))
            message = ' ! datastar parsing incorrect in line %s in %s' % (ls, survexblock.survexfile.path)
            models.DataIssue.objects.create(parser='survexleg', message=message)
            survexleg.tape = invalid_tape
            return
        # this next fails for two surface survey svx files which use / for decimal point 
        # e.g. '29/09' in the tape measurement, or use decimals but in brackets, e.g. (06.05)
        tape = tape.replace("(","")
        tape = tape.replace(")","")
        tape = tape.replace("/",".")
        try:
            survexleg.tape = float(tape)
            self.survexlegsnumber += 1
        except ValueError:
            print(("! Tape misread in", survexblock.survexfile.path))
            print(("  datastar:", datastar))
            print(("  Line:", ls))
            message = ' ! Value Error: Tape misread in line %s in %s' % (ls, survexblock.survexfile.path)
            models.DataIssue.objects.create(parser='survexleg', message=message)
            survexleg.tape = invalid_tape
        try:
            survexblock.totalleglength += survexleg.tape
            self.survexlegsalllength   += survexleg.tape
        except ValueError:
            message = ' ! Value Error: Tape length not added  %s in %s' % (ls, survexblock.survexfile.path)
            models.DataIssue.objects.create(parser='survexleg', message=message)

        try:
            lcompass = ls[datastar["compass"]]
        except:
            print(("! Compass not found in", survexblock.survexfile.path))
            print(("  datastar:", datastar))
            print(("  Line:", ls))
            message = ' ! Value Error: Compass not found in line %s in %s' % (ls, survexblock.survexfile.path)
            models.DataIssue.objects.create(parser='survexleg', message=message)
            lcompass = invalid_compass

        try:
            lclino = ls[datastar["clino"]]
        except:
            print(("! Clino misread in", survexblock.survexfile.path))
            print(("  datastar:", datastar))
            print(("  Line:", ls))
            message = ' ! Value Error: Clino misread in line %s in %s' % (ls, survexblock.survexfile.path)
            models.DataIssue.objects.create(parser='survexleg', message=message)
            lclino = invalid_clino

        if lclino == "up":
            survexleg.clino = 90.0
            lcompass = invalid_compass
        elif lclino == "down":
            survexleg.clino = -90.0
            lcompass = invalid_compass
        elif lclino == "-" or lclino == "level":
            survexleg.clino = -90.0

        try:
            survexleg.compass = float(lcompass)
        except ValueError:
            print(("! Compass misread in", survexblock.survexfile.path))
            print(("  datastar:", datastar))
            print(("  Line:", ls))
            message = " ! Value Error: lcompass:'{}' line {} in '{}'".format(lcompass, 
                    ls, survexblock.survexfile.path)
            models.DataIssue.objects.create(parser='survexleg', message=message)
            survexleg.compass = invalid_compass

        # For speed this should come first. But we are checking validity too.
        if self.flagsstar["any"]:
            pass
            # Comment out until we have the *data commands working!
            #survexleg.tape = invalid_tape
            #return

       # delete the object to save memory
        survexleg = None

    def LoadSurvexRef(self, survexblock, args):
        # *REF but also ; Ref      years from 1960 to 2039
        if len(args)< 4:
            message = " ! Empty or BAD *REF command '{}' in '{}'".format(args, survexblock.survexfile.path)
            print((self.insp+message))
            models.DataIssue.objects.create(parser='survex', message=message)
            return

        argsgps = self.rx_argsref.match(args)
        if argsgps:
            yr, letterx, wallet = argsgps.groups()
        else:
            message = " ! BAD *REF command '{}' in '{}'".format(args, survexblock.survexfile.path)
            print((self.insp+message))
            models.DataIssue.objects.create(parser='survex', message=message)
            return

        if not letterx:
            letterx = ""
        else:
            letterx = "X"
        if len(wallet)<2:
            wallet = "0" + wallet
        assert (int(yr)>1960 and int(yr)<2039), "Wallet year out of bounds: %s" % yr
        refscan = "%s#%s%s" % (yr, letterx, wallet)
        try:
            if int(wallet)>100:
                message = " ! Wallet *REF {} - too big in '{}'".format(refscan, survexblock.survexfile.path)
                print((self.insp+message))
                models.DataIssue.objects.create(parser='survex', message=message)
        except:
            message = " ! Wallet *REF {} - not numeric in '{}'".format(refscan, survexblock.survexfile.path)
            print((self.insp+message))
            models.DataIssue.objects.create(parser='survex', message=message)
        manyscansfolders = models_survex.ScansFolder.objects.filter(walletname=refscan)
        if manyscansfolders:
            survexblock.scansfolder = manyscansfolders[0]
            survexblock.save()
            if len(manyscansfolders) > 1:
                message = " ! Wallet *REF {} - {} scan folders from DB search in {}".format(refscan, len(manyscansfolders), survexblock.survexfile.path)
                print((self.insp+message))
                models.DataIssue.objects.create(parser='survex', message=message)
        else:
            message = " ! Wallet *REF '{}' - NOT found in DB search '{}'".format(refscan, survexblock.survexfile.path)
            print((self.insp+message))
            models.DataIssue.objects.create(parser='survex', message=message)

    def LoadSurvexQM(self, survexblock, qmline):
        insp = self.insp
        qm_no = qmline.group(1)
        qm_grade = qmline.group(2)
        if qmline.group(3):  # usual closest survey station
            qm_nearest = qmline.group(3) 
            if qmline.group(4):
                qm_nearest = qm_nearest +"."+ qmline.group(4)
                
        if qmline.group(6) and qmline.group(6) != '-':
            qm_resolve_station = qmline.group(6)
            if qmline.group(7):
                qm_resolve_station = qm_resolve_station +"."+ qmline.group(7) 
        else:
            qm_resolve_station = ""
        qm_notes = qmline.group(8)
        # Spec of QM in SVX files:
        # ;Serial number   grade(A/B/C/D/X)  nearest-station  resolution-station description
        # ;QM1	a	hobnob_hallway_2.42	hobnob-hallway_3.42	junction of keyhole passage
        # ;QM1	a	hobnob_hallway_2.42	-	junction of keyhole passage

        # NB none of the SurveyStations are in the DB now, so if we want to link to aSurvexStation
        # we would have to create one. But that is not obligatory and no QMs loaded from CSVs have one
        try:
            qm = models_caves.QM.objects.create(number=qm_no,
                                            # nearest_station=a_survex_station_object, # can be null
                                              nearest_station_description=qm_resolve_station,
                                              nearest_station_name=qm_nearest,
                                              grade=qm_grade.upper(),
                                              location_description=qm_notes)
            qm.save
            # message = " ! QM{} '{}' CREATED in DB in '{}'".format(qm_no, qm_nearest,survexblock.survexfile.path)
            # print(insp+message)
            # models.DataIssue.objects.create(parser='survex', message=message)
        except:
            message = " ! QM{} FAIL to create {} in'{}'".format(qm_no, qm_nearest,survexblock.survexfile.path)
            print(insp+message)
            models.DataIssue.objects.create(parser='survex', message=message)

    def LoadSurvexDataCmd(self,survexblock,args):
        """Sets the order for data elements in this and following blocks, e.g.
        *data normal from to compass clino tape
        *data normal from to tape compass clino
        We are only collecting length data so we are disinterested in from, to, LRUD etc.
        """
        # datastardefault = { # included here as reference to help understand the code
                        # "type":"normal", 
                        # "t":"leg", 
                        # "from":0, 
                        # "to":1, 
                        # "tape":2, 
                        # "compass":3, 
                        # "clino":4}
        datastar = copy.deepcopy(self.datastardefault)
        if args == "":
            # naked '*data' which is relevant only for passages. Ignore. Continue with previous settings.
            return
        # DEFAULT | NORMAL | CARTESIAN| NOSURVEY |PASSAGE | TOPOFIL | CYLPOLAR | DIVING  
        ls = args.lower().split()   
        if ls[0] == "default":
            self.datastar = copy.deepcopy(self.datastardefault)
        elif ls[0] == "normal" or ls[0] == "topofil":
            if not ("from" in datastar and "to" in datastar):
                message = " ! - Unrecognised *data normal statement '{}' {}|{}".format(args, survexblock.name, survexblock.survexpath)
                print(message)
                print(message,file=sys.stderr)
                models.DataIssue.objects.create(parser='survex', message=message)
                return
            else:
                datastar = self.datastardefault
                # ls = ["normal", "from", "to", "tape", "compass", "clino" ]
                for i in range(1, len(ls)): # len[0] is "normal"
                    if ls[i] in ["bearing","compass"]:
                        datastar["compass"] = i-1
                    if ls[i] in ["clino","gradient"]:
                        datastar["clino"] = i-1
                    if ls[i] in ["tape","length"]:
                        datastar["tape"] = i-1
                self.datastar = copy.deepcopy(datastar)
                return
        elif ls[0] == "cartesian" or ls[0] == "nosurvey" or ls[0] == "diving" or ls[0] == "cylpolar" or ls[0] == "passage":
            message = " ! - *data {}  blocks ignored. {}|{}"   '{}' .format(ls[0].upper(), survexblock.name, survexblock.survexpath, args)
            print(message)
            #print(message,file=sys.stderr)
            #models.DataIssue.objects.create(parser='survex', message=message)
            self.datastar["type"] = ls[0]
        else:
            message = " ! - Unrecognised *data statement '{}' {}|{}".format(args, survexblock.name, survexblock.survexpath)
            print(message)
            print(message,file=sys.stderr)
            models.DataIssue.objects.create(parser='survex', message=message)

    def LoadSurvexFlags(self, args):
        # Valid flags are DUPLICATE, SPLAY, and SURFACE, and a flag may be preceded with NOT to turn it off.
        # Default values are NOT any of them
        self.flagsstar = copy.deepcopy(self.flagsdefault)
        flags = []
        
        args = self.rx_flags.sub("not",args)
        argslist = args.split()
        for s in argslist:
            flags.append(s)
        
        if "duplicate" in flags:
            self.flagsstar["duplicate"] = True
        if "surface" in flags:
            self.flagsstar["surface"] = True
        if "splay" in flags:
            self.flagsstar["splay"] = True

        if "notduplicate" in flags:
            self.flagsstar["duplicate"] = False
        if "notsurface" in flags:
            self.flagsstar["surface"] = False
        if "notsplay" in flags:
            self.flagsstar["splay"] = False


        # if self.flagsstar["duplicate"] == True or self.flagsstar["surface"] == True or self.flagsstar["splay"] == True:
        # actually we do want to count duplicates as this is for "effort expended in surveying underground"
        if self.flagsstar["surface"] == True or self.flagsstar["splay"] == True:
            self.flagsstar["any"] = True

    def IdentifyCave(self, cavepath):
        if cavepath.lower() in self.caveslist:
            return self.caveslist[cavepath.lower()]
        # TO DO - some of this is already done in generating self.caveslist so simplify this
        # esp. as it is in a loop.
        path_match = self.rx_cave.search(cavepath)
        if path_match:
            sluggy = '{}-{}'.format(path_match.group(1), path_match.group(2))
            guesses = [sluggy.lower(), path_match.group(2).lower()]
            for g in guesses:
                if g in self.caveslist:
                    self.caveslist[cavepath] = self.caveslist[g]
                    return self.caveslist[g]
            print('    ! Failed to find cave for {}'.format(cavepath.lower()))
        else:
            # not a cave, but that is fine.
            # print('    ! No regex(standard identifier) cave match for %s' % cavepath.lower())
            return None

    def GetSurvexDirectory(self, headpath):
        """This creates a SurvexDirectory if it has not been seen before, and on creation
        it sets the primarysurvexfile. This is correct as it should be set on the first file
        in the directory, where first is defined by the *include ordering. Which is what we
        are doing.
        """
        if not headpath:
            return self.svxdirs[""]
        if headpath.lower() not in self.svxdirs:
            self.svxdirs[headpath.lower()] = models_survex.SurvexDirectory(path=headpath, primarysurvexfile=self.currentsurvexfile)
            self.svxdirs[headpath.lower()].save()
            self.survexdict[self.svxdirs[headpath.lower()]] = [] # list of the files in the directory
        return self.svxdirs[headpath.lower()]

    def ReportNonCaveIncludes(self, headpath, includelabel):
        """Ignore surface, kataser and gps *include survex files
        """
        if headpath in self.ignorenoncave:
            return
        for i in self.ignoreprefix:
            if headpath.startswith(i):
                return
        message = " ! {} is not a cave. (while creating '{}' sfile & sdirectory)".format(headpath, includelabel)
        print("\n"+message)
        print("\n"+message,file=sys.stderr)
        models.DataIssue.objects.create(parser='survex', message=message)
        
    def LoadSurvexFile(self, svxid):
        """Creates SurvexFile in the database, and SurvexDirectory if needed
        with links to 'cave'
        Creates a new current survexfile and valid .survexdirectory
        The survexblock passed-in is not necessarily the parent. FIX THIS.
        """
        # self.datastar = self.datastardefault
        print(" # datastack in  LoadSurvexFile:{} 'type':".format(svxid), end="")
        for dict in self.datastack:
            print("'{}'   ".format(dict["type"].upper()), end="")
        print("")


        depth = " " * self.depthbegin
        print("{:2}{}   - NEW survexfile:'{}'".format(self.depthbegin, depth, svxid))
        headpath = os.path.dirname(svxid)

        newfile = models_survex.SurvexFile(path=svxid)
        newfile.save() # until we do this there is no internal id so no foreign key works
        self.currentsurvexfile = newfile 
        newdirectory = self.GetSurvexDirectory(headpath)
        newdirectory.save() 
        newfile.survexdirectory = newdirectory
        self.survexdict[newdirectory].append(newfile)
        cave = self.IdentifyCave(headpath) # cave already exists in db

        if not newdirectory:
            message = " ! 'None' SurvexDirectory returned from GetSurvexDirectory({})".format(headpath)
            print(message)
            print(message,file=sys.stderr)
            models.DataIssue.objects.create(parser='survex', message=message)

        if cave:
            newdirectory.cave = cave
            newfile.cave   = cave
            #print("\n"+str(newdirectory.cave),file=sys.stderr)
        else:
            self.ReportNonCaveIncludes(headpath, svxid)

        if not newfile.survexdirectory:
            message = " ! SurvexDirectory NOT SET in new SurvexFile {} ".format(svxid)
            print(message)
            print(message,file=sys.stderr)
            models.DataIssue.objects.create(parser='survex', message=message)
        self.currentsurvexfile.save() # django insists on this although it is already saved !?
        try:
            newdirectory.save()
        except:
            print(newdirectory, file=sys.stderr)
            print(newdirectory.primarysurvexfile, file=sys.stderr)
            raise
            
        print(" # datastack end LoadSurvexFile:{} 'type':".format(svxid), end="")
        for dict in self.datastack:
            print("'{}'   ".format(dict["type"].upper()), end="")
        print("")

    def ProcessIncludeLine(self, included):
        svxid = included.groups()[0]
        self.LoadSurvexFile(svxid)
        self.stacksvxfiles.append(self.currentsurvexfile)

    def ProcessEdulcniLine(self, edulcni):
        """Saves the current survexfile in the db
        """
        svxid = edulcni.groups()[0]
        #depth = " " * self.depthbegin
        #print("{:2}{}   - Edulcni  survexfile:'{}'".format(self.depthbegin, depth, svxid))
        self.currentsurvexfile.save()
        self.currentsurvexfile = self.stacksvxfiles.pop()

    def LoadSurvexComment(self, survexblock, comment):
        # ignore all comments except ;ref and ;QM and ;*include (for collated survex file)
        refline = self.rx_ref.match(comment)
        if refline:
            comment = re.sub('(?i)\s*ref[.;]?',"",comment.strip())
            self.LoadSurvexRef(survexblock, comment)

        qmline = self.rx_qm.match(comment)
        if qmline:
            self.LoadSurvexQM(survexblock, qmline)
            
        included = self.rx_comminc.match(comment)
        # ;*include means 'we have been included'; whereas *include means 'proceed to include' 
        if included:
            self.ProcessIncludeLine(included)

        edulcni = self.rx_commcni.match(comment)
        # ;*edulcni means we are returning from an included file
        if edulcni:
            self.ProcessEdulcniLine(edulcni)

    def LoadSurvexSetup(self,survexblock, survexfile):
        self.depthbegin = 0
        self.datastar = self.datastardefault
        blocklegs = self.survexlegsnumber
        print(self.insp+"  - MEM:{:.3f} Reading. parent:{}  <> {} ".format(models.get_process_memory(),survexblock.survexfile.path, survexfile.path))
        self.lineno = 0
        sys.stderr.flush();
        self.callcount +=1
        if self.callcount % 10 ==0 :
            print(".", file=sys.stderr,end='')
        if self.callcount % 500 ==0 :
            print("\n", file=sys.stderr,end='')
        # Try to find the cave in the DB if not use the string as before
        path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", survexblock.survexfile.path)
        if path_match:
            pos_cave = '%s-%s' % (path_match.group(1), path_match.group(2))
            cave = models_caves.getCaveByReference(pos_cave)
            if cave:
                survexfile.cave = cave

    def LinearLoad(self, survexblock, path, svxlines):
        """Loads a single survex file. Usually used to import all the survex files which have been collated
        into a single file. Loads the begin/end blocks using a stack for labels.
        """
        self.relativefilename = path
        cave = self.IdentifyCave(path) # this will produce null for survex files which are geographic collections
        
        self.currentsurvexfile = survexblock.survexfile
        self.currentsurvexfile.save() # django insists on this although it is already saved !?

        self.datastar = copy.deepcopy(self.datastardefault)
        self.flagsstar = copy.deepcopy(self.flagsdefault)
        blkid = None
        pathlist = None
        args = None
        previousnlegs = None
        oldflags = None
        blockcount = 0
        self.lineno = 0

        def tickle():
            nonlocal blockcount
            
            blockcount +=1
            if blockcount % 10 ==0 :
                print(".", file=sys.stderr,end='')
            if blockcount % 200 ==0 :
                print("\n", file=sys.stderr,end='')
                print(" - MEM:{:7.3f} MB in use".format(models.get_process_memory()),file=sys.stderr)
                print("    ", file=sys.stderr,end='')
            sys.stderr.flush()

        def printbegin():
            nonlocal blkid
            nonlocal pathlist

            depth = " " * self.depthbegin
            print("{:2}{}   - Begin for :'{}'".format(self.depthbegin,depth, blkid))
            pathlist = ""
            for id in self.stackbegin:
                if len(id) > 0:
                    pathlist += "." + id

        def printend():
            nonlocal args
            nonlocal previousnlegs

            depth = " " * self.depthbegin
            print("{:2}{}   - End   from:'{}'".format(self.depthbegin,depth,args))
            legsinblock = self.survexlegsnumber - previousnlegs
            print("{:2}{}   - LEGS: {} (previous: {}, now:{})".format(self.depthbegin,
                depth, legsinblock, previousnlegs, self.survexlegsnumber))
            survexblock.legsall = legsinblock

        def pushblock():
            nonlocal blkid

            print(" # datastack at  1 *begin {} 'type':".format(blkid), end="")
            for dict in self.datastack:
                print("'{}'   ".format(dict["type"].upper()), end="")
            print("")
            print("'{}' self.datastar  ".format(self.datastar["type"].upper()))
            # ------------ * DATA
            self.datastack.append(copy.deepcopy(self.datastar))
            # ------------ * DATA
            print(" # datastack at  2 *begin {} 'type':".format(blkid), end="")
            for dict in self.datastack:
                print("'{}'   ".format(dict["type"].upper()), end="")
            print("")
            print("'{}' self.datastar  ".format(self.datastar["type"].upper()))
            # ------------ * FLAGS
            self.flagsstack.append(copy.deepcopy(self.flagsstar))
            # ------------ * FLAGS

        def popblock():
            nonlocal blkid
            nonlocal oldflags

            print(" # datastack  at  *end '{} 'type':".format(blkid), end="")
            for dict in self.datastack:
                print("'{}'   ".format(dict["type"].upper()), end="")
            print("")
            print("'{}' self.datastar  ".format(self.datastar["type"].upper()))
            # ------------ * DATA
            self.datastar  = copy.deepcopy(self.datastack.pop())
            # ------------ * DATA
            print(" # datastack  after *end '{} 'type':".format(blkid), end="")
            for dict in self.datastack:
                print("'{}'   ".format(dict["type"].upper()), end="")
            print("")
            print("'{}' self.datastar  ".format(self.datastar["type"].upper()))
            # ------------ * FLAGS
            self.flagsstar = copy.deepcopy(self.flagsstack.pop()) 
            # ------------ * FLAGS
            if oldflags["any"] != self.flagsstar["any"]:
                print(" # POP  'any' flag now:'{}'  was:{} ".format(self.flagsstar["any"], oldflags["any"]))

        def starstatement(mstar):
            nonlocal survexblock
            nonlocal blkid
            nonlocal pathlist
            nonlocal args
            nonlocal previousnlegs
            nonlocal oldflags

            cmd, args = mstar.groups()
            cmd = cmd.lower()

            # ------------------------BEGIN
            if re.match("begin$(?i)", cmd):
                blkid = args.lower()
                # PUSH state ++++++++++++++
                self.stackbegin.append(blkid)
                pushblock()
                # PUSH state ++++++++++++++
                previousnlegs = self.survexlegsnumber
                printbegin()
                newsurvexblock = models_survex.SurvexBlock(name=blkid, parent=survexblock, 
                        survexpath=pathlist, 
                        cave=self.currentcave, survexfile=self.currentsurvexfile, 
                        legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
                newsurvexblock.save()
                newsurvexblock.title = "("+survexblock.title+")" # copy parent inititally
                survexblock = newsurvexblock
                survexblock.save() # django insists on this , but we want to save at the end !
                tickle()

            # ---------------------------END
            elif re.match("end$(?i)", cmd):
                printend()
                try:
                    survexblock.parent.save() # django insists on this although it is already saved !?
                except:
                    print(survexblock.parent, file=sys.stderr)
                    raise
                try:
                    survexblock.save() # save to db at end of block
                except:
                    print(survexblock, file=sys.stderr)
                    raise
               # POP  state ++++++++++++++
                popblock()
                blkid = self.stackbegin.pop()
                # POP  state ++++++++++++++
                self.currentsurvexblock = survexblock.parent
                survexblock = survexblock.parent
                oldflags = self.flagsstar
                self.depthbegin -= 1

            # -----------------------------
            elif re.match("(?i)title$", cmd):
                survexblock.title = args # block has own title, overwrite that from parent
            elif re.match("(?i)ref$", cmd):
                self.LoadSurvexRef(survexblock, args)
            elif re.match("(?i)flags$", cmd):
                oldflags = self.flagsstar
                self.LoadSurvexFlags(args)
                if oldflags["any"] != self.flagsstar["any"]:
                    print(" # CHANGE 'any' flag now:'{}'  was:{} ".format(self.flagsstar["any"], oldflags["any"]))

            elif re.match("(?i)data$", cmd):
                self.LoadSurvexDataCmd(survexblock, args)
            elif re.match("(?i)date$", cmd):
                self.LoadSurvexDate(survexblock, args)
            elif re.match("(?i)team$", cmd):
                self.LoadSurvexTeam(survexblock, args)
            elif re.match("(?i)set$", cmd) and re.match("(?i)names", args):
                pass
            elif re.match("(?i)include$", cmd):
                message = " ! -ERROR *include command not expected here {}. Re-run a full Survex import.".format(path)
                print(message)
                print(message,file=sys.stderr)
                models.DataIssue.objects.create(parser='survex', message=message)
            else:
                self.LoadSurvexIgnore(survexblock, args, cmd)


        for svxline in svxlines:
            self.lineno += 1
            sline, comment = self.rx_comment.match(svxline).groups()
            if comment:
                # this catches the ;*include NEWFILE and ;*edulcni ENDOFFILE lines too
                self.LoadSurvexComment(survexblock, comment) 

            if not sline:
                continue # skip blank lines

            # detect a star command
            mstar = self.rx_star.match(sline)
            if mstar: # yes we are reading a *cmd
                starstatement(mstar)
            else: # not a *cmd so we are reading data OR rx_comment failed
                self.LoadSurvexLineLeg(survexblock, sline, comment)
 

    def RecursiveScan(self, survexblock, path, fin, flinear, fcollate):
        """Follows the *include links in all the survex files from the root file 1623.svx
        and reads only the *include and *begin and *end statements. It produces a linearised
        list of the include tree
        """
        indent = " " * self.depthinclude
        sys.stderr.flush();
        self.callcount +=1
        if self.callcount % 10 ==0 :
            print(".", file=sys.stderr,end='')
        if self.callcount % 500 ==0 :
            print("\n    ", file=sys.stderr,end='')



        if path in self.svxfileslist:
            message = " * Warning. Duplicate in *include list at:{} depth:{} file:{}".format(self.callcount, self.depthinclude, path)
            print(message)
            print(message,file=flinear)
            print("\n"+message,file=sys.stderr)
            models.DataIssue.objects.create(parser='survex', message=message)
            if self.svxfileslist.count(path) > 20:
                message = " ! ERROR. Survex file already seen 20x. Probably an infinite loop so fix your *include statements that include this. Aborting. {}".format(path)
                print(message)
                print(message,file=flinear)
                print(message,file=sys.stderr)
                models.DataIssue.objects.create(parser='survex', message=message)
                return
        self.svxfileslist.append(path)

        svxlines = fin.read().splitlines()
        for svxline in svxlines:
            self.lineno += 1
            includestmt =self.rx_include.match(svxline)
            if not includestmt:
                fcollate.write("{}\n".format(svxline.strip()))

            sline, comment = self.rx_comment.match(svxline.strip()).groups()
            mstar = self.rx_star.match(sline)
            if mstar: # yes we are reading a *cmd
                cmd, args = mstar.groups()
                cmd = cmd.lower()
                if re.match("(?i)include$", cmd):
                    includepath = os.path.normpath(os.path.join(os.path.split(path)[0], re.sub(r"\.svx$", "", args)))

                    fullpath = os.path.join(settings.SURVEX_DATA, includepath + ".svx")
                    self.RunSurvexIfNeeded(os.path.join(settings.SURVEX_DATA, includepath))
                    if os.path.isfile(fullpath):
                        #--------------------------------------------------------
                        self.depthinclude += 1
                        fininclude = open(fullpath,'r')
                        fcollate.write(";*include {}\n".format(includepath))
                        flinear.write("{:2} {} *include {}\n".format(self.depthinclude, indent, includepath))
                        push = includepath.lower()
                        self.stackinclude.append(push)
                        #-----------------
                        self.RecursiveScan(survexblock, includepath, fininclude, flinear, fcollate)
                        #-----------------
                        pop = self.stackinclude.pop()
                        if pop != push:
                            message = "!! ERROR mismatch *include pop!=push  {}".format(pop, push, self.stackinclude)
                            print(message)
                            print(message,file=flinear)
                            print(message,file=sys.stderr)
                            models.DataIssue.objects.create(parser='survex', message=message)
                        flinear.write("{:2} {} *edulcni {}\n".format(self.depthinclude, indent, pop))
                        fcollate.write(";*edulcni {}\n".format(pop))
                        fininclude.close()
                        self.depthinclude -= 1
                        #--------------------------------------------------------
                    else:
                        message = "    ! ERROR *include file not found for:'{}'".format(includepath)
                        print(message)
                        print(message,file=sys.stderr)
                        models.DataIssue.objects.create(parser='survex', message=message)
                elif re.match("(?i)begin$", cmd):
                    self.depthbegin += 1
                    depth = " " * self.depthbegin
                    if args:
                        pushargs = args
                    else:
                        pushargs = " "
                    self.stackbegin.append(pushargs.lower())
                    flinear.write("            {:2} {} *begin {}\n".format(self.depthbegin, depth, args))
                    pass
                elif re.match("(?i)end$", cmd):
                    depth = " " * self.depthbegin
                    flinear.write("            {:2} {} *end   {}\n".format(self.depthbegin, depth, args))
                    if not args:
                        args = " "
                    popargs = self.stackbegin.pop()
                    if popargs != args.lower():
                        message = "!! ERROR mismatch in BEGIN/END labels pop!=push '{}'!='{}'\n{}".format(popargs, args, self. stackbegin)
                        print(message)
                        print(message,file=flinear)
                        print(message,file=sys.stderr)
                        models.DataIssue.objects.create(parser='survex', message=message)

                    self.depthbegin -= 1
                    pass
                elif re.match("(?i)title$", cmd):
                    depth = " " * self.depthbegin
                    flinear.write("                    {:2} {} *title {}\n".format(self.depthbegin, depth, args))
                    pass

    def RunSurvexIfNeeded(self,fullpath):
        now = time.time()
        cav_t = now - 365*24*3600
        log_t = now - 365*24*3600
        svx_t = now - 365*24*3600

        def runcavern():
            print(" -  Regenerating stale (or chaos-monkeyed) cavern .log and .3d for '{}'\n    days svx old: {:.1f}  cav old:{:.1f}   log old: {:.1f}".
                format(fullpath, (svx_t - log_t)/(24*3600), (cav_t - log_t)/(24*3600), (now - log_t)/(24*3600)))
            call([settings.CAVERN, "--log", "--output={}".format(fullpath), "{}.svx".format(fullpath)])

        svxpath = fullpath + ".svx"
        logpath = fullpath + ".log"

        if not os.path.isfile(logpath):
            runcavern()
            return

        if not self.caverndate:
            completed_process = run(["which", "{}".format(settings.CAVERN)], 
                                    capture_output=True, check=True, text=True)
            self.caverndate = os.path.getmtime(completed_process.stdout.strip())
        cav_t = self.caverndate
        log_t = os.path.getmtime(logpath)
        svx_t = os.path.getmtime(svxpath)
        now = time.time()

        if svx_t - log_t > 0:          # stale, older than svx file
            runcavern()
            return
        if now - log_t > 60 *24*60*60: # >60 days, re-run anyway
            runcavern()
            return
        if cav_t - log_t > 0:          # new version of cavern
            runcavern()
            return
        if ChaosMonkey(200):
            runcavern()

def FindAndLoadSurvex(survexblockroot):
    """Follows the *include links recursively to find files
    """
    print('  - redirecting stdout to svxblks.log...')
    stdout_orig = sys.stdout
    # Redirect sys.stdout to the file
    sys.stdout = open('svxblks.log', 'w')

    print('  - SCANNING All Survex Blocks...',file=sys.stderr)
    survexfileroot = survexblockroot.survexfile # i.e. SURVEX_TOPNAME only
    collatefilename = "_" + survexfileroot.path + ".svx"

    svx_scan = LoadingSurvex()
    svx_scan.callcount = 0
    svx_scan.depthinclude = 0
    fullpathtotop = os.path.join(survexfileroot.survexdirectory.path, survexfileroot.path)
    print("  - RunSurvexIfNeeded cavern on '{}'".format(fullpathtotop), file=sys.stderr)
    svx_scan.RunSurvexIfNeeded(fullpathtotop)
    indent=""
    fcollate = open(collatefilename, 'w')

    mem0 = models.get_process_memory()
    print(" - MEM:{:7.2f} MB START".format(mem0),file=sys.stderr)
    flinear = open('svxlinear.log', 'w')
    flinear.write("    - MEM:{:7.2f} MB START {}\n".format(mem0,survexfileroot.path))
    print("    ", file=sys.stderr,end='')

    finroot = survexfileroot.OpenFile()
    fcollate.write(";*include {}\n".format(survexfileroot.path))
    flinear.write("{:2} {} *include {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path))
    #----------------------------------------------------------------
    svx_scan.RecursiveScan(survexblockroot, survexfileroot.path, finroot, flinear, fcollate)
    #----------------------------------------------------------------
    flinear.write("{:2} {} *edulcni {}\n".format(svx_scan.depthinclude, indent, survexfileroot.path))
    fcollate.write(";*edulcni {}\n".format(survexfileroot.path))
    mem1 = models.get_process_memory()
    flinear.write("\n    - MEM:{:.2f} MB STOP {}\n".format(mem1,survexfileroot.path))
    flinear.write("    - MEM:{:.3f} MB USED\n".format(mem1-mem0))
    svxfileslist = svx_scan.svxfileslist
    flinear.write("    - {:,} survex files in linear include list \n".format(len(svxfileslist)))
    flinear.close()
    fcollate.close()
    svx_scan = None # Hmm. Does this actually delete all the instance variables if they are lists, dicts etc.?
    print("\n -  {:,} survex files in linear include list \n".format(len(svxfileslist)),file=sys.stderr)

    mem1 = models.get_process_memory()
    print(" - MEM:{:7.2f} MB END ".format(mem0),file=sys.stderr)
    print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr)
    svxfileslist = [] # free memory
 
    # Before doing this, it would be good to identify the *equate and *entrance we need that are relevant to the
    # entrance locations currently loaded after this by LoadPos(), but could better be done before ?
    # look in MapLocations() for how we find the entrances
   
    print('\n  - Loading All Survex Blocks (LinearLoad)',file=sys.stderr)
    svx_load = LoadingSurvex()

    svx_load.survexdict[survexfileroot.survexdirectory] = []
    svx_load.survexdict[survexfileroot.survexdirectory].append(survexfileroot)
    svx_load.svxdirs[""] = survexfileroot.survexdirectory
    with open(collatefilename, "r") as fcollate:
            svxlines = fcollate.read().splitlines()
    #----------------------------------------------------------------
    svx_load.LinearLoad(survexblockroot,survexfileroot.path, svxlines)
    #----------------------------------------------------------------

    print("\n - MEM:{:7.2f} MB STOP".format(mem1),file=sys.stderr)
    print(" - MEM:{:7.3f} MB USED".format(mem1-mem0),file=sys.stderr)

    survexlegsnumber = svx_load.survexlegsnumber
    survexlegsalllength = svx_load.survexlegsalllength
    mem1 = models.get_process_memory()

    print("  - Number of SurvexDirectories: {}".format(len(svx_load.survexdict)))
    tf=0
    for d in svx_load.survexdict:
        tf += len(svx_load.survexdict[d])
    print("  - Number of SurvexFiles: {}".format(tf))
    svx_load = None

    # Close the logging file, Restore sys.stdout to our old saved file handle
    sys.stdout.close()
    print("+", file=sys.stderr)
    sys.stderr.flush();
    sys.stdout = stdout_orig
    return (survexlegsnumber, survexlegsalllength)

def MakeSurvexFileRoot():
    """Returns a file_object.path = SURVEX_TOPNAME associated with directory_object.path = SURVEX_DATA
    """
    fileroot = models_survex.SurvexFile(path=settings.SURVEX_TOPNAME, cave=None)
    fileroot.save()
    directoryroot = models_survex.SurvexDirectory(path=settings.SURVEX_DATA, cave=None, primarysurvexfile=fileroot)
    directoryroot.save()
    fileroot.survexdirectory = directoryroot # i.e. SURVEX_DATA/SURVEX_TOPNAME
    fileroot.save() # mutually dependent objects need a double-save like this
    return fileroot

def LoadSurvexBlocks():

    print(' - Flushing All Survex Blocks...')
    models_survex.SurvexBlock.objects.all().delete()
    models_survex.SurvexFile.objects.all().delete()
    models_survex.SurvexDirectory.objects.all().delete()
    models_survex.SurvexPersonRole.objects.all().delete()
    models_survex.SurvexStation.objects.all().delete()
    print(" - survex Data Issues flushed")
    models.DataIssue.objects.filter(parser='survex').delete()

    survexfileroot = MakeSurvexFileRoot()
    # this next makes a block_object assciated with a file_object.path = SURVEX_TOPNAME
    survexblockroot = models_survex.SurvexBlock(name=ROOTBLOCK, survexpath="", cave=None, survexfile=survexfileroot, 
            legsall=0, legssplay=0, legssurfc=0, totalleglength=0.0)
    # this is the first so id=1
    survexblockroot.save()

    print(' - Loading Survex Blocks...')
    memstart = models.get_process_memory()
    #----------------------------------------------------------------
    survexlegsnumber, survexlegsalllength = FindAndLoadSurvex(survexblockroot)
    #----------------------------------------------------------------
    memend = models.get_process_memory()
    print(" - MEMORY start:{:.3f} MB end:{:.3f} MB increase={:.3f} MB".format(memstart,memend, memend-memstart))

    survexblockroot.totalleglength = survexlegsalllength
    survexblockroot.legsall = survexlegsnumber
    survexblockroot.save()

    print(" - total number of survex legs: {}".format(survexlegsnumber))
    print(" - total leg lengths loaded: {}m".format(survexlegsalllength))
    print(' - Loaded All Survex Blocks.')

poslineregex = re.compile(r"^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")

def LoadPos():
    """First load the survex stations for entrances and fixed points (about 600) into the database.
    Run cavern to produce a complete .3d file, then run 3dtopos to produce a table of 
    all survey point positions. Then lookup each position by name to see if we have it in the database 
    and if we do, then save the x/y/z coordinates. This gives us coordinates of the entrances.
    If we don't have it in the database, print an error message and discard it.
    """
    svx_t = 0
    d3d_t = 0
    def runcavern3d():
        print(" -  Regenerating stale cavern .log and .3d for '{}'\n    days old: {:.1f}    {:.1f}    {:.1f}".
            format(topdata, (svx_t - d3d_t)/(24*3600), (cav_t - d3d_t)/(24*3600), (now - d3d_t)/(24*3600)))
        call([settings.CAVERN, "--log", "--output={}".format(topdata), "{}.svx".format(topdata)])
        call([settings.THREEDTOPOS, '{}.3d'.format(topdata)], cwd = settings.SURVEX_DATA)

    topdata = settings.SURVEX_DATA + settings.SURVEX_TOPNAME
    print((' - Generating a list of Pos from %s.svx and then loading...' % (topdata)))

    found = 0
    skip = {}
    print("\n") # extra line because cavern overwrites the text buffer somehow
    # cavern defaults to using same cwd as supplied input file

    completed_process = run(["which", "{}".format(settings.CAVERN)], 
                            capture_output=True, check=True, text=True)
    cav_t = os.path.getmtime(completed_process.stdout.strip())

    svxpath = topdata + ".svx"
    d3dpath = topdata + ".3d"

    svx_t = os.path.getmtime(svxpath)

    if os.path.isfile(d3dpath):
        # always fails to find log file if a double directory, e.g. caves-1623/B4/B4/B4.svx Why ?
        d3d_t = os.path.getmtime(d3dpath)

    now = time.time()
    if not os.path.isfile(d3dpath):
        runcavern3d()
    elif svx_t - d3d_t > 0:          # stale, 3d older than svx file
        runcavern3d()
    elif now - d3d_t > 60 *24*60*60: # >60 days old, re-run anyway
        runcavern3d()
    elif cav_t - d3d_t > 0:          # new version of cavern
        runcavern3d()

    mappoints = {}
    for pt in MapLocations().points():
        svxid, number,  point_type, label = pt
        mappoints[svxid]=True

    posfile = open("%s.pos" % (topdata))
    posfile.readline() #Drop header
    try:
        survexblockroot = models_survex.SurvexBlock.objects.get(name=ROOTBLOCK)
    except:
        try:
            survexblockroot = models_survex.SurvexBlock.objects.get(id=1)
        except:
            message = ' ! FAILED to find root SurvexBlock'
            print(message)
            models.DataIssue.objects.create(parser='survex', message=message)
            raise
    for line in posfile.readlines():
        r = poslineregex.match(line)
        if r:
            x, y, z, id = r.groups() 
            for sid in mappoints:
                if id.endswith(sid):
                    blockpath = "." + id[:-len(sid)].strip(".")
                    # But why are we doing this? Why do we need the survexblock id for each of these ?
                    # ..because mostly they don't actually appear in any SVX file. We should match them up
                    # via the cave data, not by this half-arsed syntactic match which almost never works. PMS.
                    if False:
                        try:
                            sbqs = models_survex.SurvexBlock.objects.filter(survexpath=blockpath)
                            if len(sbqs)==1:
                                sb = sbqs[0]
                            if len(sbqs)>1:
                                message = " ! MULTIPLE SurvexBlocks {:3} matching Entrance point {} {} '{}'".format(len(sbqs), blockpath, sid, id)
                                print(message)
                                models.DataIssue.objects.create(parser='survex', message=message)
                                sb = sbqs[0]
                            elif len(sbqs)<=0:
                                message = " ! ZERO SurvexBlocks matching Entrance point {} {} '{}'".format(blockpath, sid, id)
                                print(message)
                                models.DataIssue.objects.create(parser='survex', message=message)
                                sb = survexblockroot
                        except:
                            message = ' ! FAIL in getting SurvexBlock matching Entrance point {} {}'.format(blockpath, sid)
                            print(message)
                            models.DataIssue.objects.create(parser='survex', message=message)
                    try:
                        ss = models_survex.SurvexStation(name=id, block=survexblockroot)
                        ss.x = float(x)
                        ss.y = float(y)
                        ss.z = float(z) 
                        ss.save()
                        found += 1
                    except:
                        message = ' ! FAIL to create SurvexStation Entrance point {} {}'.format(blockpath, sid)
                        print(message)
                        models.DataIssue.objects.create(parser='survex', message=message)
                        raise
    print(" - {} SurvexStation entrances found.".format(found))