diff --git a/core/models.py b/core/models.py
index 32b094a..0efe8c9 100644
--- a/core/models.py
+++ b/core/models.py
@@ -457,7 +457,7 @@ class Cave(TroggleModel):
         return urlparse.urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,}))
 
     def __unicode__(self, sep = u": "):
-        return unicode(self.slug())
+        return unicode("slug:"+self.slug())
 
     def get_QMs(self):
         return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())	
diff --git a/databaseReset.py b/databaseReset.py
index 2387a44..455de8d 100644
--- a/databaseReset.py
+++ b/databaseReset.py
@@ -266,16 +266,17 @@ class JobQueue():
             print "--  ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE']
 
             # but because the user may be expecting to add this to a db with lots of tables already there,
-            # the jobque may not start from scratch so we need to initialise the db properly first.
+            # the jobque may not start from scratch so we need to initialise the db properly first
+            # because we are using an empty :memory: database
             # But initiating twice crashes, so be sure to do it once only.
             if ("reinit",reinit_db) not in self.queue:
                 reinit_db()
             if ("dirsredirect",dirsredirect) not in self.queue:
                 dirsredirect()
             if ("caves",import_caves) not in self.queue:
-                import_caves()
+                import_caves() # sometime extract the initialising code from this and put in reinit
             if ("people",import_people) not in self.queue:
-                import_people()
+                import_people() # sometime extract the initialising code from this and put in reinit
                 
             django.db.close_old_connections() # maybe not needed here
             
@@ -290,6 +291,7 @@ class JobQueue():
             settings.DATABASES['default']['NAME'] = dbname
             print "--  ", settings.DATABASES['default']['NAME'], settings.DATABASES['default']['ENGINE']
             
+            django.db.close_old_connections() # maybe not needed here
             for j in self.results_order:
                 self.results[j].pop() # throw away results from :memory: run
                 self.results[j].append(None) # append a placeholder
diff --git a/parsers/survex.py b/parsers/survex.py
index 6fb7c62..42a8a00 100644
--- a/parsers/survex.py
+++ b/parsers/survex.py
@@ -96,8 +96,10 @@ stardatadefault = {"type":"normal", "t":"leg", "from":0, "to":1, "tape":2, "comp
 stardataparamconvert = {"length":"tape", "bearing":"compass", "gradient":"clino"}
 
 regex_comment = re.compile(r"([^;]*?)\s*(?:;\s*(.*))?\n?$")
-regex_ref     = re.compile(r'.*?ref.*?(\d+)\s*#\s*(\d+)')
+regex_ref     = re.compile(r'.*?ref.*?(\d+)\s*#\s*(X)?\s*(\d+)')
 regex_star    = re.compile(r'\s*\*[\s,]*(\w+)\s*(.*?)\s*(?:;.*)?$')
+# years from 1960 to 2039
+regex_starref = re.compile(r'^\s*\*ref[\s.:]*((?:19[6789]\d)|(?:20[0123]\d))\s*#?\s*(X)?\s*(.*?\d+.*?)$(?i)') 
 regex_team    = re.compile(r"(Insts|Notes|Tape|Dog|Useless|Pics|Helper|Disto|Consultant)\s+(.*)$(?i)")
 regex_team_member        = re.compile(r" and | / |, | & | \+ |^both$|^none$(?i)")
 regex_qm      = re.compile(r'^\s*QM(\d)\s+?([a-dA-DxX])\s+([\w\-]+)\.(\d+)\s+(([\w\-]+)\.(\d+)|\-)\s+(.+)$')
@@ -145,14 +147,23 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
         # detect ref line pointing to the scans directory
         mref = comment and regex_ref.match(comment)
         if mref:
-            refscan = "%s#%s" % (mref.group(1), mref.group(2))
+            yr, letterx, wallet = mref.groups()
+            if not letterx:
+                letterx = ""
+            else:
+                letterx = "X"
+            if len(wallet)<2:
+                wallet = "0" + wallet
+            refscan = "%s#%s%s" % (yr, letterx, wallet )
+            #print(' - Wallet ;ref - %s - looking for survexscansfolder' % refscan)
             survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan)
             if survexscansfolders:
                 survexblock.survexscansfolder = survexscansfolders[0]
                 #survexblock.refscandir = "%s/%s%%23%s" % (mref.group(1), mref.group(1), mref.group(2))
                 survexblock.save()
-                print(' - Wallet *ref - %s' % refscan)
-            continue
+                # print(' - Wallet ; ref - %s - found in survexscansfolders' % refscan)
+            else:
+                print(' - Wallet ; ref - %s - NOT found in survexscansfolders %s-%s-%s' % (refscan,yr,letterx,wallet))
 
         # This whole section should be moved if we can have *QM become a proper survex command
         # Spec of QM in SVX files, currently commented out need to add to survex
@@ -203,6 +214,28 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
         if not sline:
             continue
 
+        # detect the star ref command 
+        mstar = regex_starref.match(sline)
+        if mstar:
+            yr,letterx,wallet = mstar.groups()
+            if not letterx:
+                letterx = ""
+            else:
+                letterx = "X"
+            if len(wallet)<2:
+                wallet = "0" + wallet
+            assert (int(yr)>1960 and int(yr)<2039), "Wallet year out of bounds: %s" % yr
+            assert (int(wallet)<100), "Wallet number more than 100: %s" % wallet
+            refscan = "%s#%s%s" % (yr, letterx, wallet)
+            survexscansfolders = models.SurvexScansFolder.objects.filter(walletname=refscan)
+            if survexscansfolders:
+                survexblock.survexscansfolder = survexscansfolders[0]
+                survexblock.save()
+                # print(' - Wallet *REF - %s - found in survexscansfolders' % refscan)
+            else:
+                print(' - Wallet *REF - %s - NOT found in survexscansfolders %s-%s-%s' % (refscan,yr,letterx,wallet))
+            continue
+
         # detect the star command
         mstar = regex_star.match(sline)
         if not mstar:
@@ -224,7 +257,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
         cmd = cmd.lower()
         if re.match("include$(?i)", cmd):
             includepath = os.path.join(os.path.split(survexfile.path)[0], re.sub(r"\.svx$", "", line))
-            print('   - Include file found including - ' + includepath)
+            print('   - Include path found including - ' + includepath)
             # Try to find the cave in the DB if not use the string as before
             path_match = re.search(r"caves-(\d\d\d\d)/(\d+|\d\d\d\d-?\w+-\d+)/", includepath)
             if path_match:
@@ -234,7 +267,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
                 if cave:
                     survexfile.cave = cave
             else:
-                print('    - No match (i) for %s' % includepath)
+                print('    - No match in DB (i) for %s, so loading..' % includepath)
             includesurvexfile = models.SurvexFile(path=includepath)
             includesurvexfile.save()
             includesurvexfile.SetDirectory()
@@ -345,7 +378,7 @@ def RecursiveLoad(survexblock, survexfile, fin, textlines):
         else:
             #print('   - Stuff')
             if cmd not in ["sd", "include", "units", "entrance", "data", "flags", "title", "export", "instrument",
-                           "calibrate", "set", "infer", "alias", "ref", "cs", "declination", "case"]:
+                           "calibrate", "set", "infer", "alias", "cs", "declination", "case"]:
                 print("Unrecognised command in line:", cmd, line, survexblock, survexblock.survexfile.path)
         endstamp = datetime.now()
         timetaken = endstamp - stamp
@@ -414,14 +447,17 @@ def LoadPos():
     cachefile = settings.SURVEX_DATA + "posnotfound.cache"
     notfoundbefore = {}
     if os.path.isfile(cachefile):
+        # this is not a good test. 1623.svx may never change but *included files may have done.
+        # When the *include is unrolled, we will have a proper timestamp to use
+        # and can increase the timeout from 3 days to 30 days.
         updtsvx = os.path.getmtime(topdata + ".svx")
         updtcache = os.path.getmtime(cachefile)
         age = updtcache - updtsvx
         print('   svx: %s    cache: %s    not-found cache is fresher by: %s' % (updtsvx, updtcache, str(timedelta(seconds=age) )))
         
         now = time.time()
-        if now - updtcache > 30*24*60*60:
-            print "   cache is more than 30 days old. Deleting."
+        if now - updtcache > 3*24*60*60:
+            print "   cache is more than 3 days old. Deleting."
             os.remove(cachefile)
         if age < 0 :
             print "   cache is stale."