2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2024-11-28 18:11:52 +00:00
troggle/core/utils.py
Philip Sargent 31b912f3ca bugfixes
2022-07-22 12:40:42 +03:00

242 lines
10 KiB
Python

import string
import os
import datetime
import re
import resource
import random
import logging
import subprocess
from pathlib import Path
from urllib.parse import urljoin
from decimal import Decimal, getcontext
getcontext().prec=2 #use 2 significant figures for decimal calculations
import settings
from django.db import models
from django.contrib import admin
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from django.urls import reverse
from django.template import Context, loader
'''This file declares TROG a globally visible object for caches.
TROG is a dictionary holding globally visible indexes and cache functions.
It is a Global Object, see https://python-patterns.guide/python/module-globals/
troggle.utils.TROG
chaosmonkey(n) - used by survex import to regenerate some .3d files
save_carefully() - core function that saves troggle objects in the database
various git add/commit functions that need refactoring together
'''
TROG = {
'pagecache' : {
'expedition' : {}
},
'issues' : {
'logdataissues' : {}
}
}
# This is module-level executable. This is a Bad Thing. Especially when it touches the file system.
try:
logging.basicConfig(level=logging.DEBUG,
filename=settings.LOGFILE,
filemode='w')
except:
# Opening of file for writing is going to fail currently, so decide it doesn't matter for now
pass
def get_process_memory():
usage=resource.getrusage(resource.RUSAGE_SELF)
return usage[2]/1024.0
def chaosmonkey(n):
'''returns True once every n calls - randomly'''
if random.randrange(0,n) != 0:
return False
# print("CHAOS strikes !", file=sys.stderr)
return True
#
def GetListDir(sdir):
'''handles url or file, so we can refer to a set of scans (not drawings) on another server
returns a list of f (file), ff (file full path), is_dir (bool)
'''
res = [ ]
if type(sdir) is str and sdir[:7] == "http://":
# s = urllib.request.urlopen(sdir)
message = f"! Requesting loading from http:// NOT IMPLEMENTED. [{sdir}]"
print(message)
DataIssue.objects.create(parser='Drawings', message=message)
sdir[:7] = ""
for f in os.listdir(sdir):
if f[0] != ".":
ff = os.path.join(sdir, f)
res.append((f, ff, os.path.isdir(ff)))
return res
def only_commit(fname, message):
'''Only used to commit a survex file edited and saved in view/survex.py
'''
git = settings.GIT
cwd = fname.parent
filename = fname.name
#print(f'{fname=} ')
try:
cp_add = subprocess.run([git, "add", filename], cwd=cwd, capture_output=True, text=True)
if cp_add.returncode != 0:
msgdata = f'Ask a nerd to fix this problem in only_commit().\n\n{cp_add.stderr}\n\n{cp_add.stdout}\n\nreturn code:{str(cp_add.returncode)}'
raise WriteAndCommitError(f'CANNOT git on server for this file {filename}. Edits saved but not added to git.\n\n' + msgdata)
cp_commit = subprocess.run([git, "commit", "-m", message], cwd=cwd, capture_output=True, text=True)
# This produces return code = 1 if it commits OK, but when the repo still needs to be pushed to origin/expoweb
if cp_commit.returncode != 0 and cp_commit.stdout != 'nothing to commit, working tree clean':
msgdata = f'Ask a nerd to fix this problem in only_commit().\n\n{cp_add.stderr}\n\n{cp_add.stdout}\n\nreturn code:{str(cp_add.returncode)}'
print(msgdata)
raise WriteAndCommitError(f'Error code with git on server for this file {filename}. Edits saved, added to git, but NOT committed.\n\n' + msgdata)
except subprocess.SubprocessError:
raise WriteAndCommitError(f'CANNOT git on server for this file {filename}. Subprocess error. Edits not saved.\nAsk a nerd to fix this.')
def write_and_commit(files, message):
"""Writes the content to the filepath and adds and commits the file to git. If this fails, a WriteAndCommitError is raised.
This does not create any needed intermediate folders, which is what we do when writing survex files, so functionality here
is duplicated in only_commit()
These need refactoring
"""
git = settings.GIT
try:
for filepath, content, encoding in files:
cwd = filepath.parent
filename = filepath.name
# GIT see also core/views/uploads.py dwgupload()
# GIT see also core/views/expo.py editexpopage()
if encoding:
mode = "w"
kwargs = {"encoding": encoding}
else:
mode = "wb"
kwargs = {}
try:
with open(filepath, mode, **kwargs) as f:
print(f'WRITING{cwd}---{filename} ')
# as the wsgi process www-data, we have group write-access but are not owner, so cannot chmod.
# os.chmod(filepath, 0o664) # set file permissions to rw-rw-r--
f.write(content)
except PermissionError:
raise WriteAndCommitError(f'CANNOT save this file.\nPERMISSIONS incorrectly set on server for this file {filename}. Ask a nerd to fix this.')
cp_add = subprocess.run([git, "add", filename], cwd=cwd, capture_output=True, text=True)
if cp_add.returncode != 0:
msgdata = 'Ask a nerd to fix this.\n\n' + cp_add.stderr + '\n\n' + cp_add.stdout + '\n\nreturn code: ' + str(cp_add.returncode)
raise WriteAndCommitError(f'CANNOT git on server for this file {filename}. Edits saved but not added to git.\n\n' + msgdata)
cp_commit = subprocess.run([git, "commit", "-m", message], cwd=cwd, capture_output=True, text=True)
# This produces return code = 1 if it commits OK, but when the repo still needs to be pushed to origin/expoweb
if cp_commit.returncode != 0 and cp_commit.stdout != 'nothing to commit, working tree clean':
msgdata = 'Ask a nerd to fix this.\n\n' + cp_commit.stderr + '\n\n' + cp_commit.stdout + '\n\nreturn code: ' + str(cp_commit.returncode)
print(msgdata)
raise WriteAndCommitError(f'Error code with git on server for this file {filename}. Edits saved, added to git, but NOT committed.\n\n' + msgdata)
except subprocess.SubprocessError:
raise WriteAndCommitError(f'CANNOT git on server for this file {filename}. Subprocess error. Edits not saved.\nAsk a nerd to fix this.')
class WriteAndCommitError(Exception):
"""Exception class for errors writing files and comitting them to git"""
def __init__(self, message):
self.message = message
def __str__(self):
return f'WriteAndCommitError: {self.message}'
def writetrogglefile(filepath, filecontent):
'''Commit the new saved file to git
Callers to cave.writeDataFile() or entrance.writeDataFile() should handle the exception PermissionsError explicitly
'''
# GIT see also core/views/expo.py editexpopage()
# GIT see also core/views/uploads.py dwgupload()
# Called from core/models/caves.py Cave.writeDataFile() Entrance.writeDataFile()
filepath = Path(filepath)
cwd = filepath.parent
filename = filepath.name
git = settings.GIT
# as the wsgi process www-data, we have group write-access but are not owner, so cannot chmod.
# do not trap exceptions, pass them up to the view that called this function
print(f'WRITING{cwd}---{filename} ')
with open(filepath, "w") as f:
f.write(filecontent)
#os.chmod(filepath, 0o664) # set file permissions to rw-rw-r--
sp = subprocess.run([git, "add", filename], cwd=cwd, capture_output=True, check=True, text=True)
if sp.returncode != 0:
print(f'git ADD {cwd}:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
sp = subprocess.run([git, "commit", "-m", f'Troggle online: cave or entrance edit -{filename}'], cwd=cwd, capture_output=True, check=True, text=True)
if sp.returncode != 0:
print(f'git COMMIT {cwd}:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
# not catching and re-raising any exceptions yet, inc. the stderr etc.,. We should do that.
def save_carefully(objectType, lookupAttribs={}, nonLookupAttribs={}):
"""Looks up instance using lookupAttribs and carries out the following:
-if instance does not exist in DB: add instance to DB, return (new instance, True)
-if instance exists in DB and was modified using Troggle: do nothing, return (existing instance, False)
-if instance exists in DB and was not modified using Troggle: overwrite instance, return (instance, False)
The checking is accomplished using Django's get_or_create and the new_since_parsing boolean field
defined in core.models.TroggleModel.
We are not using new_since_parsing - it is a fossil from Aaron Curtis's design in 2006. So it is always false.
"""
try:
instance, created = objectType.objects.get_or_create(defaults=nonLookupAttribs, **lookupAttribs)
except:
print(" !! - FAIL in SAVE CAREFULLY ===================", objectType)
print(" !! - -- objects.get_or_create()")
print(" !! - lookupAttribs:{}\n !! - nonLookupAttribs:{}".format(lookupAttribs,nonLookupAttribs))
raise
if not created and not instance.new_since_parsing:
for k, v in list(nonLookupAttribs.items()): #overwrite the existing attributes from the logbook text (except date and title)
setattr(instance, k, v)
try:
instance.save()
except:
print(" !! - SAVE CAREFULLY ===================", objectType)
print(" !! - -- instance.save()")
print(" !! - lookupAttribs:{}\n !! - nonLookupAttribs:{}".format(lookupAttribs,nonLookupAttribs))
raise
try:
msg = str(instance)
except:
msg = "FAULT getting __str__ for instance with lookupattribs: {}:".format(lookupAttribs)
if created:
logging.info(str(instance) + ' was just added to the database for the first time. \n')
if not created and instance.new_since_parsing:
logging.info(str(instance) + " has been modified using Troggle since parsing, so the current script left it as is. \n")
if not created and not instance.new_since_parsing:
logging.info(" instance:<"+ str(instance) + "> existed in the database unchanged since last parse. It have been overwritten.")
return (instance, created)