diff --git a/core/admin.py b/core/admin.py index 3bc14d4..2f07cba 100644 --- a/core/admin.py +++ b/core/admin.py @@ -6,7 +6,8 @@ from django.core import serializers from troggle.core.views_other import downloadLogbook from troggle.core.models import * -from troggle.core.models_caves import * +from troggle.core.models_caves import Cave, Area, Entrance, CaveAndEntrance, NewSubCave, OtherCaveName, CaveDescription, LogbookEntry, PersonTrip, Survey, ScannedImage, QM +from troggle.core.models_survex import SurvexBlock, SurvexPersonRole, SurvexStation, SurvexScansFolder, SurvexScanSingle #from troggle.reversion.admin import VersionAdmin #django-reversion version control diff --git a/core/models_caves.py b/core/models_caves.py index 44a5ac7..b3520b7 100644 --- a/core/models_caves.py +++ b/core/models_caves.py @@ -19,7 +19,7 @@ from django.core.urlresolvers import reverse from django.template import Context, loader from troggle.core.models import TroggleModel, TroggleImageModel, Person, Expedition -from troggle.core.models_survex import * +from troggle.core.models_survex import SurvexStation class Area(TroggleModel): short_name = models.CharField(max_length=100) @@ -444,7 +444,7 @@ class LogbookEntry(TroggleModel): def __init__(self, *args, **kwargs): if "cave" in list(kwargs.keys()): if kwargs["cave"] is not None: - kwargs["cave_slug"] = models_caves.CaveSlug.objects.get(cave=kwargs["cave"], primary=True).slug + kwargs["cave_slug"] = CaveSlug.objects.get(cave=kwargs["cave"], primary=True).slug kwargs.pop("cave") # parse error in python3.8 return TroggleModel.__init__(self, *args, **kwargs) # seems OK in 3.5 & 3.8! failure later elsewhere with 3.8 diff --git a/parsers/logbooks.py b/parsers/logbooks.py index f0ae2fa..cfc1a20 100644 --- a/parsers/logbooks.py +++ b/parsers/logbooks.py @@ -11,8 +11,8 @@ from django.conf import settings from django.template.defaultfilters import slugify from django.utils.timezone import get_current_timezone, make_aware -import troggle.core.models as models -import troggle.core.models_caves as models_caves +from troggle.core.models import DataIssue, Expedition +from troggle.core.models_caves import Cave, OtherCaveName, getCaveByReference, LogbookEntry, PersonTrip from parsers.people import GetPersonExpeditionNameLookup from utils import save_carefully @@ -40,7 +40,7 @@ def GetTripPersons(trippeople, expedition, logtime_underground): if not personyear: print((" - No name match for: '%s'" % tripperson)) message = "No name match for: '%s' in year '%s'" % (tripperson, expedition.year) - models.DataIssue.objects.create(parser='logbooks', message=message) + DataIssue.objects.create(parser='logbooks', message=message) res.append((personyear, logtime_underground)) if mul: author = personyear @@ -55,18 +55,18 @@ def GetTripCave(place): # print "Getting cave for " , place try: katastNumRes=[] - katastNumRes=list(models_caves.Cave.objects.filter(kataster_number=int(place))) + katastNumRes=list(Cave.objects.filter(kataster_number=int(place))) except ValueError: pass - officialNameRes=list(models_caves.Cave.objects.filter(official_name=place)) + officialNameRes=list(Cave.objects.filter(official_name=place)) tripCaveRes=officialNameRes+katastNumRes if len(tripCaveRes)==1: # print "Place " , place , "entered as" , tripCaveRes[0] return tripCaveRes[0] - elif models_caves.OtherCaveName.objects.filter(name=place): - tripCaveRes=models_caves.OtherCaveName.objects.filter(name__icontains=place)[0].cave + elif OtherCaveName.objects.filter(name=place): + tripCaveRes=OtherCaveName.objects.filter(name__icontains=place)[0].cave # print "Place " , place , "entered as" , tripCaveRes return tripCaveRes @@ -85,7 +85,7 @@ def GetCaveLookup(): if Gcavelookup: return Gcavelookup Gcavelookup = {"NONEPLACEHOLDER":None} - for cave in models_caves.Cave.objects.all(): + for cave in Cave.objects.all(): Gcavelookup[cave.official_name.lower()] = cave if cave.kataster_number: Gcavelookup[cave.kataster_number] = cave @@ -112,7 +112,7 @@ def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_ if not author: print((" - Skipping logentry: " + title + " - no author for entry")) message = "Skipping logentry: %s - no author for entry in year '%s'" % (title, expedition.year) - models.DataIssue.objects.create(parser='logbooks', message=message) + DataIssue.objects.create(parser='logbooks', message=message) return #tripCave = GetTripCave(place) @@ -125,13 +125,13 @@ def EnterLogIntoDbase(date, place, title, text, trippeople, expedition, logtime_ expeditionday = expedition.get_expedition_day(date) lookupAttribs={'date':date, 'title':title} nonLookupAttribs={'place':place, 'text':text, 'expedition':expedition, 'cave':cave, 'slug':slugify(title)[:50], 'entry_type':entry_type} - lbo, created=save_carefully(models.LogbookEntry, lookupAttribs, nonLookupAttribs) + lbo, created=save_carefully(LogbookEntry, lookupAttribs, nonLookupAttribs) for tripperson, time_underground in trippersons: lookupAttribs={'personexpedition':tripperson, 'logbook_entry':lbo} nonLookupAttribs={'time_underground':time_underground, 'is_logbook_entry_author':(tripperson == author)} - save_carefully(models.PersonTrip, lookupAttribs, nonLookupAttribs) + save_carefully(PersonTrip, lookupAttribs, nonLookupAttribs) def ParseDate(tripdate, year): """ Interprets dates in the expo logbooks and returns a correct datetime.date object """ @@ -399,16 +399,16 @@ def LoadLogbookForExpedition(expedition): print(("Couldn't open default logbook file and nothing in settings for expo " + expedition.year)) - #return "TOLOAD: " + year + " " + str(expedition.personexpedition_set.all()[1].logbookentry_set.count()) + " " + str(models.PersonTrip.objects.filter(personexpedition__expedition=expedition).count()) + #return "TOLOAD: " + year + " " + str(expedition.personexpedition_set.all()[1].logbookentry_set.count()) + " " + str(PersonTrip.objects.filter(personexpedition__expedition=expedition).count()) def LoadLogbooks(): """ This is the master function for parsing all logbooks into the Troggle database. """ # Clear the logbook data issues as we are reloading - models.DataIssue.objects.filter(parser='logbooks').delete() + DataIssue.objects.filter(parser='logbooks').delete() # Fetch all expos - expos = models.Expedition.objects.all() + expos = Expedition.objects.all() for expo in expos: print(("\nLoading Logbook for: " + expo.year)) @@ -442,9 +442,9 @@ def parseAutoLogBookEntry(filename): expeditionYearMatch = expeditionYearRegex.search(contents) if expeditionYearMatch: try: - expedition = models.Expedition.objects.get(year = expeditionYearMatch.groups()[0]) + expedition = Expedition.objects.get(year = expeditionYearMatch.groups()[0]) personExpeditionNameLookup = GetPersonExpeditionNameLookup(expedition) - except models.Expedition.DoesNotExist: + except Expedition.DoesNotExist: errors.append("Expedition not in database") else: errors.append("Expedition Year could not be parsed") @@ -461,7 +461,7 @@ def parseAutoLogBookEntry(filename): if caveMatch: caveRef, = caveMatch.groups() try: - cave = models_caves.getCaveByReference(caveRef) + cave = getCaveByReference(caveRef) except AssertionError: cave = None errors.append("Cave not found in database") @@ -506,14 +506,14 @@ def parseAutoLogBookEntry(filename): people.append((name, author, TU)) if errors: return errors # Bail out before commiting to the database - logbookEntry = models.LogbookEntry(date = date, + logbookEntry = LogbookEntry(date = date, expedition = expedition, title = title, cave = cave, place = location, text = report, slug = slugify(title)[:50], filename = filename) logbookEntry.save() for name, author, TU in people: - models.PersonTrip(personexpedition = personExpo, + PersonTrip(personexpedition = personExpo, time_underground = TU, logbook_entry = logbookEntry, is_logbook_entry_author = author).save()