2023-01-19 18:30:05 +00:00
import json
import locale
2011-07-11 02:10:22 +01:00
import os
2023-01-19 18:30:05 +00:00
import resource
import sys
2011-07-11 02:10:22 +01:00
import time
2020-05-24 13:35:47 +01:00
2023-03-28 19:51:20 +01:00
from django . core . management import call_command
from django . core . management . commands import flush
2023-01-19 18:30:05 +00:00
2024-12-15 18:54:47 +00:00
import settings
2020-07-18 16:23:54 +01:00
""" Command-line utility for loading cave data files into troggle ' s database.
The command line options select which combination of classes of data will be imported ,
e . g . cave data , logbook data , cavers ( people ) data . The set of imports requested are put
into a job queue in a valid order , toegether with any necessary initialisation if it is
a complete reset , and the queue is then executed .
In future all these functions may be moved to a control panel webpage running within the
troggle application .
"""
2021-03-24 17:32:45 +00:00
print ( " - settings on loading databaseReset.py " , flush = True )
2020-07-18 16:23:54 +01:00
2023-01-19 21:18:42 +00:00
os . environ [ " PYTHONPATH " ] = str ( settings . PYTHON_PATH )
os . environ . setdefault ( " DJANGO_SETTINGS_MODULE " , " settings " )
2020-05-24 13:35:47 +01:00
2020-06-17 22:55:51 +01:00
print ( " - settings on loading databaseReset.py " )
2020-06-16 16:07:36 +01:00
import django
2020-07-22 23:06:15 +01:00
2022-11-23 10:48:39 +00:00
print ( f " - Memory footprint before loading Django: { resource . getrusage ( resource . RUSAGE_SELF ) [ 2 ] / 1024.0 : .3f } MB " )
2020-06-17 22:55:51 +01:00
try :
django . setup ( )
except :
2023-01-19 21:18:42 +00:00
print (
" ! Cyclic reference failure. Can occur when the initial db is empty. Fixed now (in UploadFileForm) but easy to reintroduce.. "
)
2020-06-17 22:55:51 +01:00
raise
2022-11-23 10:48:39 +00:00
print ( f " - Memory footprint after loading Django: { resource . getrusage ( resource . RUSAGE_SELF ) [ 2 ] / 1024.0 : .3f } MB " )
2020-06-17 22:55:51 +01:00
2023-01-19 18:30:05 +00:00
from django . contrib . auth . models import User
2011-07-11 02:10:22 +01:00
from django . core import management
2023-01-19 21:20:47 +00:00
from django . db import connection , transaction
2020-06-08 00:11:09 +01:00
2023-01-19 18:30:05 +00:00
from troggle . core . utils import get_process_memory
2023-01-19 21:18:42 +00:00
from troggle . parsers . imports import (
import_caves ,
import_drawingsfiles ,
import_ents ,
import_loadpos ,
import_logbook ,
import_logbooks ,
import_people ,
import_QMs ,
import_survex ,
2023-10-26 16:28:59 +01:00
import_survex_checks ,
2023-01-19 21:18:42 +00:00
import_surveyscans ,
)
2020-06-06 22:51:55 +01:00
2020-05-27 01:04:37 +01:00
if os . geteuid ( ) == 0 :
# This protects the server from having the wrong file permissions written on logs and caches
print ( " This script should be run as expo not root - quitting " )
exit ( )
2023-01-19 21:18:42 +00:00
expouser = settings . EXPOUSER
expouserpass = settings . EXPOUSERPASS
expouseremail = settings . EXPOUSER_EMAIL
expoadminuser = settings . EXPOADMINUSER
expoadminuserpass = settings . EXPOADMINUSERPASS
expoadminuseremail = settings . EXPOADMINUSER_EMAIL
2011-07-11 02:10:22 +01:00
2021-04-06 00:49:09 +01:00
2020-04-27 23:51:41 +01:00
def reinit_db ( ) :
""" Rebuild database from scratch. Deletes the file first if sqlite is used,
otherwise it drops the database and creates it .
2020-06-08 00:11:09 +01:00
Note - initial loading of troggle . sqlite will already have populated the models
in memory ( django python models , not the database ) , so there is already a full load
of stuff known . Deleting the db file does not clear memory .
2020-04-27 23:51:41 +01:00
"""
2023-01-19 21:18:42 +00:00
print ( " Reinitialising db " , end = " " )
print ( django . db . connections . databases [ " default " ] [ " NAME " ] )
currentdbname = settings . DATABASES [ " default " ] [ " NAME " ]
if currentdbname == " :memory: " :
2020-06-08 00:11:09 +01:00
# closing connections should wipe the in-memory database
django . db . close_old_connections ( )
for conn in django . db . connections . all ( ) :
print ( " ! Closing another connection to db... " )
conn . close ( )
2023-01-19 21:18:42 +00:00
elif django . db . connections . databases [ " default " ] [ " ENGINE " ] == " django.db.backends.sqlite3 " :
2020-06-12 18:10:07 +01:00
if os . path . isfile ( currentdbname ) :
try :
print ( " - deleting " + currentdbname )
os . remove ( currentdbname )
except OSError :
2023-01-19 21:18:42 +00:00
print (
" ! OSError on removing: "
+ currentdbname
+ " \n ! Is the file open in another app? Is the server running? \n "
)
2020-06-12 18:10:07 +01:00
raise
else :
print ( " - No database file found: " + currentdbname + " ..continuing, will create it. \n " )
2019-02-24 14:29:14 +00:00
else :
2022-07-21 19:01:57 +01:00
print ( f " - Attempting to nuke : { currentdbname } \n " )
# this is now completely failing to nuke MariaDB adequately, and it crashes when creating Area objects with a no null parent message
# when null parents are explciitly allowed in the model.
2020-07-22 23:44:25 +01:00
cursor = django . db . connection . cursor ( )
2023-02-02 00:01:37 +00:00
print ( f " - - Using { cursor } " )
2023-02-01 23:54:26 +00:00
try :
cursor . execute ( f " DROP DATABASE { currentdbname } " )
except :
2023-02-02 00:01:37 +00:00
print ( f " - - Exception when attempting to: DROP DATABASE { currentdbname } with { cursor } " )
2023-02-01 23:54:26 +00:00
pass
2023-02-01 23:59:38 +00:00
try :
cursor . execute ( f " CREATE DATABASE { currentdbname } " )
except :
2023-02-02 00:01:37 +00:00
print ( f " - - Exception when attempting to: CREATE DATABASE { currentdbname } with { cursor } " )
2023-02-01 23:59:38 +00:00
pass
try :
cursor . execute ( f " ALTER DATABASE { currentdbname } CHARACTER SET=utf8 " )
except :
2023-02-02 00:01:37 +00:00
print ( f " - - Exception when attempting to: ALTER DATABASE { currentdbname } CHARACTER SET=utf8 " )
pass
try :
cursor . execute ( f " USE { currentdbname } " )
except :
print ( f " - - Exception when attempting to: USE { currentdbname } " )
2023-02-01 23:59:38 +00:00
pass
2023-03-28 19:51:20 +01:00
try :
cmd = flush . Command ( )
call_command ( cmd , verbosity = 0 , interactive = False )
except :
print ( f " - - Exception when attempting to: FLUSH " )
pass
2022-07-21 19:01:57 +01:00
print ( f " - Nuked : { currentdbname } \n " )
2011-07-11 02:10:22 +01:00
2023-01-19 21:18:42 +00:00
print ( " - Migrating: " + django . db . connections . databases [ " default " ] [ " NAME " ] )
2020-06-08 00:11:09 +01:00
2023-01-19 21:18:42 +00:00
if django . db . connections . databases [ " default " ] [ " ENGINE " ] == " django.db.backends.sqlite3 " :
# with transaction.atomic():
management . call_command ( " makemigrations " , " core " , interactive = False )
management . call_command ( " migrate " , interactive = False )
management . call_command ( " migrate " , " core " , interactive = False )
2020-07-22 23:51:50 +01:00
else :
2023-01-19 21:18:42 +00:00
management . call_command ( " makemigrations " , " core " , interactive = False )
management . call_command ( " migrate " , interactive = False )
management . call_command ( " migrate " , " core " , interactive = False )
2020-07-22 23:51:50 +01:00
2023-01-19 21:18:42 +00:00
print ( " - done migration on: " + settings . DATABASES [ " default " ] [ " NAME " ] )
print ( " users in db already: " , len ( User . objects . all ( ) ) )
2020-07-22 23:36:46 +01:00
with transaction . atomic ( ) :
try :
2023-01-19 21:18:42 +00:00
print ( " - Setting up expo user on: " + django . db . connections . databases [ " default " ] [ " NAME " ] )
2022-11-23 10:48:39 +00:00
print ( f " - user: { expouser } ( { expouserpass : .5 } ...) < { expouseremail } > " )
2020-07-22 23:36:46 +01:00
user = User . objects . create_user ( expouser , expouseremail , expouserpass )
2021-04-06 00:49:09 +01:00
user . is_staff = False
user . is_superuser = False
user . save ( )
except :
2023-01-19 21:18:42 +00:00
print ( " ! INTEGRITY ERROR user on: " + settings . DATABASES [ " default " ] [ " NAME " ] )
print ( django . db . connections . databases [ " default " ] [ " NAME " ] )
2021-04-06 00:49:09 +01:00
print ( " ! You probably have not got a clean db when you thought you had. \n " )
print ( " ! Also you are probably NOT running an in-memory db now. \n " )
2023-01-19 21:18:42 +00:00
print ( " users in db: " , len ( User . objects . all ( ) ) )
print ( " tables in db: " , len ( connection . introspection . table_names ( ) ) )
memdumpsql ( fn = " integrityfail.sql " )
django . db . connections . databases [ " default " ] [ " NAME " ] = " :memory: "
# raise
2021-04-06 00:49:09 +01:00
with transaction . atomic ( ) :
try :
2023-01-19 21:18:42 +00:00
print ( " - Setting up expoadmin user on: " + django . db . connections . databases [ " default " ] [ " NAME " ] )
2022-11-23 10:48:39 +00:00
print ( f " - user: { expoadminuser } ( { expoadminuserpass : .5 } ...) < { expoadminuseremail } > " )
2021-04-06 00:49:09 +01:00
user = User . objects . create_user ( expoadminuser , expoadminuseremail , expoadminuserpass )
2020-07-22 23:36:46 +01:00
user . is_staff = True
user . is_superuser = True
user . save ( )
except :
2023-01-19 21:18:42 +00:00
print ( " ! INTEGRITY ERROR user on: " + settings . DATABASES [ " default " ] [ " NAME " ] )
print ( django . db . connections . databases [ " default " ] [ " NAME " ] )
2020-07-22 23:36:46 +01:00
print ( " ! You probably have not got a clean db when you thought you had. \n " )
print ( " ! Also you are probably NOT running an in-memory db now. \n " )
2023-01-19 21:18:42 +00:00
print ( " users in db: " , len ( User . objects . all ( ) ) )
print ( " tables in db: " , len ( connection . introspection . table_names ( ) ) )
memdumpsql ( fn = " integrityfail.sql " )
django . db . connections . databases [ " default " ] [ " NAME " ] = " :memory: "
# raise
2020-06-08 00:11:09 +01:00
def memdumpsql ( fn ) :
2023-01-19 21:18:42 +00:00
""" Unused option to dump SQL. Aborted attempt to create a cache for loading data """
2020-06-08 00:11:09 +01:00
djconn = django . db . connection
from dump import _iterdump
2023-01-19 21:18:42 +00:00
with open ( fn , " w " ) as f :
2020-06-08 00:11:09 +01:00
for line in _iterdump ( djconn ) :
2022-11-23 10:48:39 +00:00
f . write ( f " { line . encode ( ' utf8 ' ) } \n " )
2020-06-08 00:11:09 +01:00
return True
2020-04-15 04:09:28 +01:00
2023-01-19 21:18:42 +00:00
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class JobQueue :
2020-04-30 23:15:57 +01:00
""" A list of import operations to run. Always reports profile times
2023-01-19 21:18:42 +00:00
of the import operations in the same order .
2020-04-16 20:36:42 +01:00
"""
2023-01-19 21:18:42 +00:00
def __init__ ( self , run ) :
""" Initialises the job queue object with a fixed order for reporting
2022-12-23 00:49:22 +00:00
options during a run . Imports the timings from previous runs .
2023-01-19 21:18:42 +00:00
"""
2020-04-15 04:09:28 +01:00
self . runlabel = run
2023-01-19 21:18:42 +00:00
self . queue = [ ] # tuples of (jobname, jobfunction)
2020-04-15 04:09:28 +01:00
self . results = { }
2023-01-19 21:18:42 +00:00
self . results_order = [
" date " ,
" runlabel " ,
" reinit " ,
" caves " ,
" people " ,
" logbooks " ,
" QMs " ,
" scans " ,
" survex " ,
" drawings " ,
" test " ,
]
2020-04-15 04:09:28 +01:00
for k in self . results_order :
2023-01-19 21:18:42 +00:00
self . results [ k ] = [ ]
2020-04-16 20:36:42 +01:00
self . tfile = " import_profile.json "
2023-01-19 21:18:42 +00:00
self . htmlfile = " profile.html " # for HTML results table. Not yet done.
2020-04-15 04:09:28 +01:00
2023-01-19 21:18:42 +00:00
def enq ( self , label , func ) :
""" Enqueue: Adding elements to queue """
self . queue . append ( ( label , func ) )
2020-04-15 04:09:28 +01:00
return True
2020-04-30 23:15:57 +01:00
def loadprofiles ( self ) :
2023-01-19 21:18:42 +00:00
""" Load timings for previous imports for each data import type """
2020-04-16 20:36:42 +01:00
if os . path . isfile ( self . tfile ) :
2020-04-15 04:09:28 +01:00
try :
2020-04-16 20:36:42 +01:00
f = open ( self . tfile , " r " )
2020-04-15 04:09:28 +01:00
data = json . load ( f )
for j in data :
self . results [ j ] = data [ j ]
except :
2022-11-23 10:48:39 +00:00
print ( f " FAILURE parsing JSON file { self . tfile } " )
2020-04-15 04:09:28 +01:00
# Python bug: https://github.com/ShinNoNoir/twitterwebsearch/issues/12
f . close ( )
2020-04-28 18:26:08 +01:00
for j in self . results_order :
2023-01-19 21:18:42 +00:00
self . results [ j ] . append ( None ) # append a placeholder
2020-04-30 23:15:57 +01:00
return True
2023-01-19 21:18:42 +00:00
2021-04-15 14:27:16 +01:00
def dellastprofile ( self ) :
2023-01-19 21:18:42 +00:00
""" trim one set of data from the results """
2021-04-15 14:27:16 +01:00
for j in self . results_order :
2023-01-19 21:18:42 +00:00
self . results [ j ] . pop ( ) # delete last item
2021-04-15 14:27:16 +01:00
return True
2023-01-19 21:18:42 +00:00
2021-04-15 14:27:16 +01:00
def delfirstprofile ( self ) :
2023-01-19 21:18:42 +00:00
""" trim one set of data from the results """
2021-04-15 14:27:16 +01:00
for j in self . results_order :
2023-01-19 21:18:42 +00:00
self . results [ j ] . pop ( 0 ) # delete zeroth item
2021-04-15 14:27:16 +01:00
return True
2023-01-19 21:18:42 +00:00
2020-04-30 23:15:57 +01:00
def saveprofiles ( self ) :
2023-01-19 21:18:42 +00:00
""" Save timings for the set of imports just completed """
with open ( self . tfile , " w " ) as f :
json . dump ( self . results , f )
2020-04-30 23:15:57 +01:00
return True
def runqonce ( self ) :
2023-01-19 21:18:42 +00:00
""" Run all the jobs in the queue provided - once """
print ( " ** Running job " , self . runlabel , end = " to " )
print ( django . db . connections . databases [ " default " ] [ " NAME " ] )
2020-04-16 20:36:42 +01:00
jobstart = time . time ( )
2022-11-23 10:48:39 +00:00
print ( f " -- Initial memory in use { get_process_memory ( ) : .3f } MB " )
2020-04-28 18:26:08 +01:00
self . results [ " date " ] . pop ( )
2020-04-16 20:36:42 +01:00
self . results [ " date " ] . append ( jobstart )
2020-04-28 18:26:08 +01:00
self . results [ " runlabel " ] . pop ( )
2020-04-16 20:36:42 +01:00
self . results [ " runlabel " ] . append ( self . runlabel )
2020-06-30 15:22:41 +01:00
for runfunction in self . queue :
2020-04-27 23:51:41 +01:00
start = time . time ( )
2020-06-30 15:22:41 +01:00
memstart = get_process_memory ( )
2022-12-14 23:46:14 +00:00
jobname , jobparser = runfunction
2023-01-19 21:18:42 +00:00
# --------------------
jobparser ( ) # invokes function passed in the second item in the tuple
# --------------------
2020-06-30 15:22:41 +01:00
memend = get_process_memory ( )
2023-01-19 21:18:42 +00:00
duration = time . time ( ) - start
# print(" - MEMORY start:{:.3f} MB end:{:.3f} MB change={:.3f} MB".format(memstart,memend, ))
print (
' \n *- Ended " ' ,
jobname ,
f ' " { duration : .1f } seconds + { memend - memstart : .3f } MB ( { memend : .3f } MB) ' ,
)
2022-12-14 23:46:14 +00:00
self . results [ jobname ] . pop ( ) # the null item
self . results [ jobname ] . append ( duration )
2020-04-16 20:36:42 +01:00
jobend = time . time ( )
2023-01-19 21:18:42 +00:00
jobduration = jobend - jobstart
2022-11-23 10:48:39 +00:00
print ( f " ** Ended job { self . runlabel } - { jobduration : .1f } seconds total. " )
2020-04-30 23:15:57 +01:00
return True
2020-06-06 22:51:55 +01:00
def append_placeholders ( self ) :
2023-01-19 21:18:42 +00:00
""" Ads a dummy timing for each option, to fix off by one error """
2020-06-06 22:51:55 +01:00
for j in self . results_order :
2023-01-19 21:18:42 +00:00
self . results [ j ] . append ( None ) # append a placeholder
2020-06-06 22:51:55 +01:00
2023-01-19 21:18:42 +00:00
def run_now_django_tests ( self , n ) :
""" Runs the standard django test harness system which is in troggle/core/TESTS/tests.py """
management . call_command ( " test " , verbosity = n )
django . db . close_old_connections ( )
2020-06-06 22:51:55 +01:00
2020-04-30 23:15:57 +01:00
def run ( self ) :
2023-01-19 21:18:42 +00:00
""" Initialises profile timings record, initiates relational database, runs the job queue saving the imported data as an SQL image and saves the timing profile data. """
2020-04-30 23:15:57 +01:00
self . loadprofiles ( )
2023-01-19 21:18:42 +00:00
print (
" -- start " ,
django . db . connections . databases [ " default " ] [ " ENGINE " ] ,
django . db . connections . databases [ " default " ] [ " NAME " ] ,
)
2020-06-20 19:55:23 +01:00
self . runqonce ( )
2023-01-19 21:18:42 +00:00
if settings . DATABASES [ " default " ] [ " NAME " ] == " :memory: " :
memdumpsql ( " memdump.sql " ) # saved contents of in-memory db, could be imported later..
2020-06-20 19:55:23 +01:00
self . saveprofiles ( )
2020-04-16 20:36:42 +01:00
return True
def showprofile ( self ) :
2023-01-19 21:18:42 +00:00
""" Prints out the time it took to run the jobqueue """
2020-04-15 23:29:59 +01:00
for k in self . results_order :
2023-01-19 21:18:42 +00:00
if k == " test " :
2020-04-27 23:51:41 +01:00
break
2023-01-19 21:18:42 +00:00
elif k == " date " :
print ( " days ago " , end = " " )
2020-04-27 23:51:41 +01:00
else :
2023-01-19 21:18:42 +00:00
print ( " %10s (s) " % k , end = " " )
percen = 0
r = self . results [ k ]
2020-04-27 23:51:41 +01:00
for i in range ( len ( r ) ) :
2023-01-19 21:18:42 +00:00
if k == " runlabel " :
2020-04-27 23:51:41 +01:00
if r [ i ] :
2023-01-19 21:18:42 +00:00
rp = r [ i ]
2020-04-27 23:51:41 +01:00
else :
rp = " - "
2023-01-19 21:18:42 +00:00
print ( " %8s " % rp , end = " " )
elif k == " date " :
2020-04-27 23:51:41 +01:00
# Calculate dates as days before present
if r [ i ] :
2023-01-19 21:18:42 +00:00
if i == len ( r ) - 1 :
print ( " this " , end = " " )
2020-04-27 23:51:41 +01:00
else :
# prints one place to the left of where you expect
2023-01-19 21:18:42 +00:00
if r [ len ( r ) - 1 ] :
s = r [ i ] - r [ len ( r ) - 1 ]
elif r [ len ( r ) - 2 ] :
s = r [ i ] - r [ len ( r ) - 2 ]
2020-05-26 02:21:03 +01:00
else :
s = 0
2023-01-19 21:18:42 +00:00
days = ( s ) / ( 24 * 60 * 60 )
print ( f " { days : 8.2f } " , end = " " )
elif r [ i ] :
print ( f " { r [ i ] : 8.1f } " , end = " " )
if i == len ( r ) - 1 and r [ i - 1 ] :
percen = 100 * ( r [ i ] - r [ i - 1 ] ) / r [ i - 1 ]
if abs ( percen ) > 0.1 :
print ( f " { percen : 8.1f } % " , end = " " )
2020-04-30 23:15:57 +01:00
else :
2023-01-19 21:18:42 +00:00
print ( " - " , end = " " )
2020-05-24 13:35:47 +01:00
print ( " " )
print ( " \n " )
2020-04-15 04:09:28 +01:00
return True
2011-07-11 02:10:22 +01:00
2013-06-24 01:31:14 +01:00
def usage ( ) :
2023-01-19 21:18:42 +00:00
""" Prints command line options, can print history of previous runs with timings """
print (
""" Usage is ' python databaseReset.py <command> [runlabel] '
2013-06-24 01:31:14 +01:00
where command is :
2020-05-20 12:45:10 +01:00
test - testing . . . imports people and prints profile . Deletes nothing .
2021-04-15 14:27:16 +01:00
profile - print the profile from previous runs . Import nothing .
- del - deletes last entry
- delfirst - deletes first entry
2020-05-20 12:45:10 +01:00
2022-08-24 12:42:01 +01:00
reset - normal usage : clear database and reread everything from files
2020-06-28 15:57:40 +01:00
init - initialisation . Automatic if you run reset .
2020-06-08 00:11:09 +01:00
caves - read in the caves ( must run first after initialisation )
people - read in the people from folk . csv ( must run after ' caves ' )
2020-04-27 23:51:41 +01:00
logbooks - read in the logbooks
QMs - read in the QM csv files ( older caves only )
2020-06-01 00:42:48 +01:00
scans - the survey scans in all the wallets ( must run before survex )
2020-06-30 15:52:29 +01:00
drawings - read in the Tunnel & Therion files - which scans the survey scans too
2020-07-20 22:53:26 +01:00
survex - read in the survex files - all the survex blocks and entrances x / y / z
2023-10-26 16:28:59 +01:00
survex_ck - set caves and people on wallets , check wallets for * ref
ents - read just the entrances x / y / z ( must run after survex )
2020-04-27 23:51:41 +01:00
2022-12-09 23:45:07 +00:00
dumplogbooks - Not used . write out autologbooks ( not working ? use http : / / localhost : 8000 / controlpanel )
logbook - read a single logbook . Defautl set in python code
2020-04-15 23:29:59 +01:00
and [ runlabel ] is an optional string identifying this run of the script
in the stored profiling data ' import-profile.json '
2020-06-08 00:11:09 +01:00
2020-04-27 23:51:41 +01:00
caves and logbooks must be run on an empty db before the others as they
set up db tables used by the others .
2022-08-24 12:42:01 +01:00
Note that running the subfunctions will not produce a consistent website
- only the full ' reset ' does that .
2023-01-19 21:18:42 +00:00
"""
)
2012-06-10 14:59:21 +01:00
2011-07-11 02:10:22 +01:00
if __name__ == " __main__ " :
2020-04-14 20:46:45 +01:00
2020-05-26 02:21:03 +01:00
if os . geteuid ( ) == 0 :
print ( " Do not run as root or using sudo - file permissions for cache files and logs will break " )
print ( " Aborting run. " )
exit ( )
2022-06-23 14:12:13 +01:00
if sys . getfilesystemencoding ( ) != " utf-8 " :
print ( " UTF-8 is NOT the default file encoding. You must fix this. " )
2023-01-19 21:18:42 +00:00
print ( f " - { sys . getdefaultencoding ( ) =} " )
print ( f " - { sys . getfilesystemencoding ( ) =} " )
print ( f " - { locale . getdefaultlocale ( ) =} " )
print ( f " - { locale . getpreferredencoding ( ) =} " )
2022-06-23 14:12:13 +01:00
print ( " Aborting run. " )
exit ( )
2023-01-19 21:18:42 +00:00
if len ( sys . argv ) > 2 :
runlabel = sys . argv [ len ( sys . argv ) - 1 ]
else :
runlabel = None
2020-04-15 04:09:28 +01:00
jq = JobQueue ( runlabel )
2023-01-19 21:18:42 +00:00
if len ( sys . argv ) == 1 :
2020-04-27 23:51:41 +01:00
usage ( )
exit ( )
2020-06-28 15:57:40 +01:00
elif " init " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " reinit " , reinit_db )
2022-12-22 00:56:46 +00:00
elif " ents " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " survex " , import_ents )
2023-10-26 16:28:59 +01:00
elif " survex_ck " in sys . argv :
jq . enq ( " survex " , import_survex_checks )
2020-07-20 13:04:30 +01:00
elif " test2 " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " QMs " , import_QMs )
jq . enq ( " drawings " , import_drawingsfiles )
jq . enq ( " survex " , import_survex )
2020-04-15 04:09:28 +01:00
elif " caves " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " caves " , import_caves )
2020-04-14 20:46:45 +01:00
elif " logbooks " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " logbooks " , import_logbooks )
2022-12-09 23:45:07 +00:00
elif " logbook " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " logbooks " , import_logbook ) # default year set in imports.py
2013-07-02 00:34:58 +01:00
elif " people " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " people " , import_people )
2011-07-11 02:10:22 +01:00
elif " QMs " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " QMs " , import_QMs )
2011-07-11 02:10:22 +01:00
elif " reset " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " reinit " , reinit_db )
jq . enq ( " caves " , import_caves )
jq . enq ( " people " , import_people )
jq . enq ( " scans " , import_surveyscans )
jq . enq ( " logbooks " , import_logbooks )
jq . enq ( " QMs " , import_QMs )
jq . enq ( " drawings " , import_drawingsfiles )
jq . enq ( " survex " , import_survex )
2020-04-14 20:46:45 +01:00
elif " scans " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " scans " , import_surveyscans )
2011-07-11 02:10:22 +01:00
elif " survex " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " survex " , import_survex )
2020-06-30 15:22:41 +01:00
elif " loadpos " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " survex " , import_loadpos )
2020-06-30 15:52:29 +01:00
elif " drawings " in sys . argv :
2023-01-19 21:18:42 +00:00
jq . enq ( " drawings " , import_drawingsfiles )
elif " profile " in sys . argv :
if runlabel == " del " :
2021-04-15 14:27:16 +01:00
jq . loadprofiles ( )
jq . dellastprofile ( )
2023-01-19 21:18:42 +00:00
jq . dellastprofile ( ) # twice because loadprofiles adds a dummy
2021-04-15 14:27:16 +01:00
jq . showprofile ( )
jq . saveprofiles ( )
2023-01-19 21:18:42 +00:00
if runlabel == " delfirst " :
2021-04-15 14:27:16 +01:00
jq . loadprofiles ( )
2023-01-19 21:18:42 +00:00
jq . dellastprofile ( ) # remove the dummy
jq . delfirstprofile ( )
2021-04-15 14:27:16 +01:00
jq . showprofile ( )
jq . saveprofiles ( )
else :
jq . loadprofiles ( )
jq . showprofile ( )
2020-05-24 13:35:47 +01:00
exit ( )
2020-05-20 12:45:10 +01:00
elif " help " in sys . argv :
usage ( )
exit ( )
2020-02-21 14:00:33 +00:00
else :
2013-06-24 01:31:14 +01:00
usage ( )
2022-11-23 10:48:39 +00:00
print ( f " { sys . argv [ 1 ] } not recognised as a command. " )
2020-04-27 23:51:41 +01:00
exit ( )
2020-04-15 04:09:28 +01:00
jq . run ( )
2020-04-16 20:36:42 +01:00
jq . showprofile ( )