forked from expo/troggle
Compare commits
1 Commits
master
...
django-upg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6984f66794 |
74
.gitignore
vendored
74
.gitignore
vendored
@@ -1,74 +0,0 @@
|
||||
# use glob syntax
|
||||
syntax: glob
|
||||
|
||||
*.orig
|
||||
*.pyc
|
||||
*.sql
|
||||
*.sqlite
|
||||
*.prof
|
||||
*~
|
||||
.idea/*
|
||||
.swp
|
||||
.vscode/*
|
||||
_1623.3d
|
||||
_1623.err
|
||||
_1623.pos
|
||||
_1623.svx
|
||||
_16230.svx
|
||||
cave-lookup.json
|
||||
core/migrations/*
|
||||
db*
|
||||
desktop.ini
|
||||
diffsettings.txt
|
||||
ignored-files.log
|
||||
import_profile.json
|
||||
lines-of-python.txt
|
||||
lines-of-templates.txt
|
||||
loadlogbk.log
|
||||
loadsurvexblks.log
|
||||
logbktrips.shelve
|
||||
memdump.sql
|
||||
my_project.dot
|
||||
parsing_log.txt
|
||||
svxblks.log
|
||||
svxlinear.log
|
||||
troggle
|
||||
troggle-inspectdb.py
|
||||
troggle-sqlite.sql
|
||||
troggle.log
|
||||
troggle.sqlite
|
||||
troggle.sqlite-journal
|
||||
troggle_log.txt
|
||||
tunnel-import.log
|
||||
logbktrips.shelve.db
|
||||
|
||||
credentials.py
|
||||
localsettings.py
|
||||
localsettings-expo-live.py
|
||||
_deploy/old/localsettings-expo-live.py
|
||||
_deploy/old/localsettings.py
|
||||
debian/localsettings.py
|
||||
debian/credentials.py
|
||||
wsl/localsettings.py
|
||||
wsl/credentials.py
|
||||
media/jslib/*
|
||||
!media/jslib/readme.txt
|
||||
|
||||
_test_response.html
|
||||
_deploy/wsl/localsettingsWSL.py.bak
|
||||
therionrefs.log
|
||||
_1623-and-1626.svx
|
||||
_1623-and-1626-no-schoenberg-hs.svx
|
||||
localsettings-oldMuscogee.py
|
||||
troggle.sqlite-journal - Shortcut.lnk
|
||||
troggle.sqlite - Shortcut.lnk
|
||||
|
||||
_deploy/debian/localsettings-jan.py
|
||||
_deploy/debian/localsettings-nw.py
|
||||
py310d32
|
||||
_deploy/debian/localsettingsserver2023-01-secret.py
|
||||
_deploy/debian/localsettings2023-04-05-secret.py
|
||||
pydebianbullseye
|
||||
|
||||
javascript
|
||||
|
||||
16
.hgignore
Normal file
16
.hgignore
Normal file
@@ -0,0 +1,16 @@
|
||||
# use glob syntax
|
||||
syntax: glob
|
||||
|
||||
*.pyc
|
||||
db*
|
||||
localsettings.py
|
||||
*~
|
||||
parsing_log.txt
|
||||
troggle
|
||||
troggle_log.txt
|
||||
.idea/*
|
||||
*.orig
|
||||
media/images/*
|
||||
.vscode/*
|
||||
.swp
|
||||
imagekit-off/
|
||||
216
README.txt
216
README.txt
@@ -1,214 +1,46 @@
|
||||
Updated 2 May 2023
|
||||
Troggle is an application for caving expedition data management, originally created for use on Cambridge University Caving Club (CUCC)expeditions and licensed under the GNU Lesser General Public License.
|
||||
|
||||
Troggle is an application for caving expedition data management,
|
||||
originally created for use on Cambridge University Caving Club (CUCC)expeditions
|
||||
and licensed under the GNU Lesser General Public License.
|
||||
|
||||
Troggle has been forked into two projects. The original one is maintained by Aaron Curtis
|
||||
and was used for Erebus caves in Antarctica.
|
||||
The CUCC variant uses files as the definitive data, not the database, and lives at http://expo.survex.com/repositories/troggle/.git/
|
||||
|
||||
For the server setup, see /_deploy/debian/wookey-exposerver-recipe.txt
|
||||
and see http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
|
||||
Much material which was in this file has been moved to
|
||||
http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
|
||||
See copyright notices in
|
||||
http://expo.survex.com/handbook/computing/contribute.html
|
||||
and for context see
|
||||
http://expo.survex.com/handbook/computing/onlinesystems.html
|
||||
Troggle has been forked into two projects. The original one is maintained by Aron Curtis and is used for Erebus caves. The CUCC variant uses files as the definitive data, not the database and lives at expo.sruvex.com/troggle.
|
||||
|
||||
Troggle setup
|
||||
=============
|
||||
0. read the very extensive online documentation and stop reading this README...
|
||||
well, come back to this README after you have read the HTML pages. Not everything has been transferred.
|
||||
==========
|
||||
|
||||
http://expo.survex.com/handbook/troggle/troglaptop.html
|
||||
http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
http://expo.survex.com/handbook/troggle/trogdangoup.html
|
||||
and at troggle/debian/serversetup
|
||||
1. set up the ssh key-exchange with the git server so you can clone troggle
|
||||
http://expo.survex.com/handbook/computing/keyexchange.html
|
||||
|
||||
Setting up directories
|
||||
----------------------
|
||||
see http://expo.survex.com/handbook/troggle/troglaptop.html and
|
||||
http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
|
||||
Next, you need to fill in your local settings. Copy _deploy/WSL/localsettingsWSL.py
|
||||
to a new file called localsettings.py and edit it and settings.py to match
|
||||
your machine's file locations.
|
||||
Follow the instructions contained in the file to fill out your settings.
|
||||
|
||||
{ in _deploy/old/ we have these which are all very out of date:
|
||||
localsettings-expo-live.py is the python2.7 settings for the server.
|
||||
localsettingsubuntu.py
|
||||
localsettingsdocker.py
|
||||
localsettingswindows.py
|
||||
localsettingspotatohut.py
|
||||
}
|
||||
|
||||
Python3, Django, and Database setup
|
||||
Python, Django, and Database setup
|
||||
-----------------------------------
|
||||
We are now using Django 3.2 and will move to 4.2 in 2024
|
||||
We are installing with python 3.11 (the server is running 3.9)
|
||||
Troggle requires Django 1.4 or greater, and any version of Python that works with it.
|
||||
Install Django with the following command:
|
||||
|
||||
Install Django using pip, not with apt, on your test system in a venv.
|
||||
Conventionally on our main master expo server we install everything that we can as debian packages, not using pip.
|
||||
apt-get install python-django (on debian/ubuntu)
|
||||
|
||||
[installation instructions removed - now in http://expo.survex.com/handbook/troggle/troglaptop.html ]
|
||||
|
||||
[venv description removed - read it in http://expo.survex.com/handbook/troggle/troglaptop.html ]
|
||||
|
||||
READ the os-trog.sh script !
|
||||
READ the venv-trog.sh script !
|
||||
If you want to use MySQL or Postgresql, download and install them. However, you can also use Django with Sqlite3, which is included in Python and thus requires no extra installation.
|
||||
|
||||
|
||||
Automatic Provisioning and Configuration
|
||||
----------------------------------------
|
||||
We don't do this - yet.
|
||||
|
||||
The most appropriate configuration tools today (2021) appear to be Bolt or Ansible
|
||||
https://puppet.com/docs/bolt/latest/bolt.html (declarative, local)
|
||||
https://docs.ansible.com/ansible/latest/user_guide/intro_getting_started.html (procedural, remote)
|
||||
https://puppet.com/blog/automating-from-zero-to-something/
|
||||
Troggle itself
|
||||
-------------
|
||||
Choose a directory where you will keep troggle, and svn check out Troggle into it using the following command:
|
||||
|
||||
We don't need anything for the deploy server itself, but we could do with something for setting
|
||||
up test servers quickly to help get newbie developers up to speed faster. But learning a new tool
|
||||
creates a barrier in itself. This is one reason most of us don't use Docker.
|
||||
|
||||
CSS and media files
|
||||
-------------------
|
||||
We are not using the STATICFILES capability.
|
||||
We are serving css files from troggle/media/.. (see urls.py)
|
||||
|
||||
Plain CSS pages
|
||||
---------------
|
||||
When running the test server
|
||||
manage.py runserver 0.0.0.0:8000
|
||||
and without Apache running, we are serving CSS using using this Django 'view':
|
||||
view_surveys.cssfilessingle
|
||||
i.e.
|
||||
cssfilessingle() in core/view_surveys.py
|
||||
|
||||
Setting up survex
|
||||
-----------------
|
||||
You need to have survex installed as the command line tools 'cavern' is
|
||||
used as part of the survex import process.
|
||||
$ sudo apt install survex
|
||||
|
||||
Setting up tables and importing survey data
|
||||
-------------------------------------------
|
||||
Run
|
||||
$ sudo python databaseReset.py
|
||||
from the troggle directory will give you instructions.
|
||||
|
||||
[ NB Adding a new year/expedition requires adding a column to the
|
||||
folk/folk.csv table - a year doesn't exist until that is done.]
|
||||
svn co http://troggle.googlecode.com/svn/
|
||||
|
||||
|
||||
MariaDB database
|
||||
----------------
|
||||
Start it up with
|
||||
$ sudo mysql -u -p
|
||||
when it will prompt you to type in the password. Get this by reading the settings.py file in use on the server.
|
||||
then
|
||||
> CREATE DATABASE troggle;
|
||||
> use troggle;
|
||||
> exit;
|
||||
If you want to work on the source code and be able to commit, you will need to use https instead of http, and your google account will need to be added to the troggle project members list. Contact aaron dot curtis at cantab dot net to get this set up.
|
||||
|
||||
Note the semicolons.
|
||||
|
||||
You can check the status of the db service:
|
||||
$ sudo systemctl status mysql
|
||||
|
||||
You can start and stop the db service with
|
||||
$ sudo systemctl restart mysql.service
|
||||
$ sudo systemctl stop mysql.service
|
||||
$ sudo systemctl start mysql.service
|
||||
|
||||
While logged in at a terminal session as expo on expo.survex.,com
|
||||
|
||||
$ mysql -h localhost -u expo -p<password>
|
||||
will get you the MariasDb command prompt: https://www.hostwinds.com/guide/how-to-use-mysql-mariadb-from-command-line/
|
||||
|
||||
then (Note the SEMICOLONS !):
|
||||
>drop database troggle;
|
||||
>create database troggle;
|
||||
>quit
|
||||
Somewhere I have notes for the GRANT PRIVS type runes...
|
||||
|
||||
Ah yes:
|
||||
CREATE DATABASE troggle;
|
||||
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword'; FLUSH PRIVILEGES; (at mysql root prompt)
|
||||
|
||||
(explained on https://chartio.com/resources/tutorials/how-to-grant-all-privileges-on-a-database-in-mysql/)
|
||||
(but you need to create the database too)
|
||||
|
||||
The GRANT ALL PRIVILEGES bit requires you to logon in to MariaDB as root. sudo doesn't cut it.
|
||||
these permissions are set in a different 'info' database which usually is untouched even if database troggle gets creamed.
|
||||
|
||||
The 'somepassword' is specified int he localsettings.py file.
|
||||
Next, you need to fill in your local settings. Copy either localsettingsubuntu.py or localsettingsserver.py to a new file called localsettings.py. Follow the instructions contained in the file to fill out your settings.
|
||||
|
||||
|
||||
PERMISSIONS
|
||||
https://linuxize.com/post/usermod-command-in-linux/
|
||||
Setting up tables and importing legacy data
|
||||
------------------------------------------
|
||||
Run "python databaseReset.py reset" from the troggle directory.
|
||||
|
||||
THIS MAY BE OUT OF DATE - from 2022 we are running Apache as user 'expo' not 'www-data'
|
||||
Once troggle is running, you can also log in and then go to "Import / export" data under "admin" on the menu.
|
||||
|
||||
so that the online editing system for SVX files works.
|
||||
The same goes for /expoweb/ files, so that "edit this page" works and the New Cave
|
||||
and New Entrance forms work.
|
||||
|
||||
sudo usermod -a expo expocvs
|
||||
the expocvs group is used for git
|
||||
|
||||
all the users should be in this group
|
||||
Adding a new year/expedition requires adding a column to the
|
||||
noinfo/folk.csv table - a year doesn't exist until that is done.
|
||||
|
||||
|
||||
Running a Troggle server with Apache
|
||||
------------------------------------
|
||||
Troggle also needs these aliases to be configured. These are set in
|
||||
/home/expo/config/apache/expo.conf
|
||||
on the expo server.
|
||||
Running a Troggle server
|
||||
------------------------
|
||||
For high volume use, Troggle should be run using a web server like apache. However, a quick way to get started is to use the development server built into Django.
|
||||
|
||||
At least these need setting:
|
||||
DocumentRoot /home/expo/expoweb
|
||||
WSGIScriptAlias / /home/expo/troggle/wsgi.py
|
||||
<Directory /home/expo/troggle>
|
||||
<Files wsgi.py>
|
||||
Require all granted
|
||||
</Files>
|
||||
</Directory>
|
||||
To do this, run "python manage.py runserver" from the troggle directory.
|
||||
|
||||
the instructions for apache Alias commands are in comments at the end of
|
||||
the urls.py file.
|
||||
|
||||
Unlike the django "manage.py runserver" method, apache requires a restart before it will use
|
||||
any changed files:
|
||||
|
||||
sudo service apache2 restart
|
||||
|
||||
Olly's comments 20 July 2020:
|
||||
olly: looking at /lib/systemd/system/apache2.service suggests so
|
||||
|
||||
olly: ExecStart=/usr/sbin/apachectl start
|
||||
olly: ExecStop=/usr/sbin/apachectl stop
|
||||
olly: ExecReload=/usr/sbin/apachectl graceful
|
||||
|
||||
Additions
|
||||
---------
|
||||
The python code has been manually cleaned using the 'black' and 'ruff' lint tools,
|
||||
and the 'deptry' dependency checker. This needs doing every year or so.
|
||||
See dependencies-check-deptry.txt
|
||||
|
||||
See troggle/pyproject.toml for configurations
|
||||
|
||||
Experimental additions
|
||||
----------------------
|
||||
These are untried tools which may help us document how troggle works in future.
|
||||
|
||||
pip install pygraphviz
|
||||
pip install pyparsing pydot # installs fine
|
||||
django extension graph_models # https://django-extensions.readthedocs.io/en/latest/graph_models.html
|
||||
@@ -1,27 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>Troggle - Coding Documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="..media/css/main2.css" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>Troggle Code - README</h1>
|
||||
<h2>Contents of README.txt file</h2>
|
||||
|
||||
<iframe name="erriframe" width="70%" height="500"
|
||||
src="../README.txt" frameborder="1" ></iframe>
|
||||
|
||||
<h2>Troggle documentation in the Expo Handbook</h2>
|
||||
<ul>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogintro.html">Intro</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogindex.html">Troggle manual INDEX</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogarch.html">Troggle data model</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogimport.html">Troggle importing data</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogdesign.html">Troggle design decisions</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogdesignx.html">Troggle future architectures</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogsimpler.html">a kinder simpler Troggle?</a>
|
||||
|
||||
</ul>
|
||||
<hr />
|
||||
</body></html>
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,160 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
NOTE this file is vastly out of sync with troggle/_deploy/wsl/localsettings.py
|
||||
which is the most recent version used in active maintenance. There should be
|
||||
essential differences, but there and many, many non-essential differences which
|
||||
should be eliminated for clarity and to use modern idioms. 8 March 2023.
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords.
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : '123456789012345', # Not used with sqlite3. Not a real password.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = 'Not a real password'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOADMINUSERPASS = 'Not a real password'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
PHOTOS_YEAR = "2023"
|
||||
# add in 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py
|
||||
'django.template.context_processors.debug',
|
||||
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
|
||||
# insert your own TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = False
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
|
||||
|
||||
EXPOWEB = Path(REPOS_ROOT_PATH + 'expoweb/')
|
||||
SURVEYS = REPOS_ROOT_PATH
|
||||
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
#URL_ROOT = 'http://expo.survex.com/'
|
||||
URL_ROOT = '/'
|
||||
DIR_ROOT = Path("") #this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SURVEYS_URL = '/survey_scans/'
|
||||
|
||||
REPOS_ROOT_PATH = Path(REPOS_ROOT_PATH)
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos"
|
||||
|
||||
#EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -1,160 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
NOTE this file is vastly out of sync with troggle/_deploy/wsl/localsettings.py
|
||||
which is the most recent version used in active maintenance. There should be
|
||||
essential differences, but there and many, many non-essential differences which
|
||||
should be eliminated for clarity and to use modern idioms. 8 March 2023.
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords.
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'uFqP56B4XleeyIW', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = '161:gosser'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOADMINUSERPASS = 'gosser:161'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
PHOTOS_YEAR = "2023"
|
||||
# add in 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py
|
||||
'django.template.context_processors.debug',
|
||||
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
|
||||
# insert your own TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = False
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
|
||||
|
||||
EXPOWEB = Path(REPOS_ROOT_PATH + 'expoweb/')
|
||||
SURVEYS = REPOS_ROOT_PATH
|
||||
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
#URL_ROOT = 'http://expo.survex.com/'
|
||||
URL_ROOT = '/'
|
||||
DIR_ROOT = Path("") #this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SURVEYS_URL = '/survey_scans/'
|
||||
|
||||
REPOS_ROOT_PATH = Path(REPOS_ROOT_PATH)
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos"
|
||||
|
||||
#EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -1,164 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords.
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : '123456789012345', # Not used with sqlite3.Not the real password
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
SECRET_KEY = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"
|
||||
EXPOUSERPASS = "nope"
|
||||
EXPOADMINUSERPASS = "nope"
|
||||
EMAIL_HOST_PASSWORD = "nope"
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
PHOTOS_YEAR = "2022"
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py
|
||||
'django.template.context_processors.debug',
|
||||
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
|
||||
# insert your own TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = True
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH + 'drawings/'
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH + 'expoweb/'
|
||||
#SURVEYS = REPOS_ROOT_PATH
|
||||
SCANS_ROOT = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
|
||||
CAVEDESCRIPTIONS = os.path.join(EXPOWEB, "cave_data")
|
||||
ENTRANCEDESCRIPTIONS = os.path.join(EXPOWEB, "entrance_data")
|
||||
|
||||
# CACHEDIR = REPOS_ROOT_PATH + 'expowebcache/'
|
||||
# THREEDCACHEDIR = CACHEDIR + '3d/'
|
||||
# THUMBNAILCACHE = CACHEDIR + 'thumbs'
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
|
||||
|
||||
#Note that all these *_URL constants are not actually used in urls.py, they should be..
|
||||
#URL_ROOT = 'http://expo.survex.com/'
|
||||
URL_ROOT = '/'
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SCANS_URL = '/survey_scans/'
|
||||
EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# add in 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
# Sanitise these to be strings as all other code is expecting strings
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
LOGFILE = os.fspath(LOGFILE)
|
||||
#SURVEYS = os.fspath(SURVEYS)
|
||||
EXPOWEB = os.fspath(EXPOWEB)
|
||||
DRAWINGS_DATA = os.fspath(DRAWINGS_DATA)
|
||||
SURVEX_DATA = os.fspath(SURVEX_DATA)
|
||||
REPOS_ROOT_PATH = os.fspath(REPOS_ROOT_PATH)
|
||||
TEMPLATE_PATH = os.fspath(TROGGLE_PATH)
|
||||
MEDIA_ROOT = os.fspath(MEDIA_ROOT)
|
||||
JSLIB_ROOT = os.fspath(JSLIB_ROOT)
|
||||
SCANS_ROOT = os.fspath(SCANS_ROOT)
|
||||
LIBDIR = os.fspath(LIBDIR)
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -1,70 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in the troggle directory: "bash venv-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 22.04 relatively clean install.
|
||||
|
||||
sudo apt install python-is-python3 -y
|
||||
python --version : ensure python is an alias for python3 not python2.7
|
||||
sudo apt update -y
|
||||
sudo apt dist-upgrade -y
|
||||
sudo apt autoremove -y
|
||||
sudo apt install sqlite3 -y
|
||||
sudo apt install python3-pip -y
|
||||
|
||||
# this installs a shed-load of other stuff: binutils etc.sudo apt install survex-aven
|
||||
sudo apt install git openssh-client -y
|
||||
# On a clean debian 11 (bullseye) installation with Xfce & ssh,
|
||||
|
||||
#on ubuntu 20.04:
|
||||
#Package sftp is not available, but is referred to by another package.
|
||||
#This may mean that the package is missing, has been obsoleted, or
|
||||
#is only available from another source
|
||||
#E: Package 'sftp' has no installation candidate
|
||||
|
||||
|
||||
# On Ubuntu 20.04, with python10, the pip install fails.
|
||||
# So you need to get the pip from source
|
||||
# sudo curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
|
||||
# but really you should be using 22.04
|
||||
# and also, isf using debian,
|
||||
# sudo python3.10 -m pip install -U virtualenv
|
||||
|
||||
# as debian does not install everything that ubuntu does, you need:
|
||||
sudo useradd expo
|
||||
sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
|
||||
sudo apt install python3-venv -y
|
||||
sudo apt install python3-dev -y
|
||||
|
||||
# default since 22.04
|
||||
# sudo apt install python3.10
|
||||
sudo apt install python3.10-venv -y
|
||||
sudo apt install python3.10-dev -y
|
||||
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.10 1
|
||||
|
||||
sudo apt install mariadb-server -y
|
||||
sudo apt install libmariadb-dev -y
|
||||
|
||||
sudo python -m pip install --upgrade pip
|
||||
|
||||
sudo apt install sftp -y
|
||||
echo '###'
|
||||
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
|
||||
echo '###'
|
||||
sudo apt install tunnelx therion -y
|
||||
|
||||
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
|
||||
# sudo service mysql start
|
||||
|
||||
git config --global user.email "you@example.com"
|
||||
git config --global user.name "Your Name"
|
||||
|
||||
echo '###'
|
||||
echo '### Currently set version of python'
|
||||
python --version
|
||||
|
||||
echo '###'
|
||||
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
"""
|
||||
Django settings for troggle project.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/dev/topics/settings/
|
||||
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/dev/ref/settings/
|
||||
"""
|
||||
# Imports should be grouped in the following order:
|
||||
|
||||
# 1.Standard library imports.
|
||||
# 2.Related third party imports.
|
||||
# 3.Local application/library specific imports.
|
||||
# 4.You should put a blank line between each group of imports.
|
||||
|
||||
|
||||
|
||||
print("* importing troggle/settings.py")
|
||||
|
||||
# default value, then gets overwritten by real secrets
|
||||
SECRET_KEY = "not-the-real-secret-key-a#vaeozn0---^fj!355qki*vj2"
|
||||
|
||||
GIT = "git" # command for running git
|
||||
|
||||
# Note that this builds upon the django system installed
|
||||
# global settings in
|
||||
# django/conf/global_settings.py which is automatically loaded first.
|
||||
# read https://docs.djangoproject.com/en/dev/topics/settings/
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
# BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
# Django settings for troggle project.
|
||||
|
||||
ALLOWED_HOSTS = ["*", "expo.survex.com", ".survex.com", "localhost", "127.0.0.1", "192.168.0.5"]
|
||||
|
||||
ADMINS = (
|
||||
# ('Your Name', 'your_email@domain.com'),
|
||||
)
|
||||
MANAGERS = ADMINS
|
||||
|
||||
# LOGIN_URL = '/accounts/login/' # this is the default value so does not need to be set
|
||||
|
||||
# Local time zone for this installation. Choices can be found here:
|
||||
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
||||
# although not all choices may be available on all operating systems.
|
||||
# If running in a Windows environment this must be set to the same as your
|
||||
# system time zone.
|
||||
USE_TZ = True
|
||||
TIME_ZONE = "Europe/London"
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
LANGUAGE_CODE = "en-uk"
|
||||
|
||||
SITE_ID = 1
|
||||
|
||||
# If you set this to False, Django will make some optimizations so as not
|
||||
# to load the internationalization machinery.
|
||||
USE_I18N = True
|
||||
USE_L10N = True
|
||||
|
||||
FIX_PERMISSIONS = []
|
||||
|
||||
# top-level survex file basename (without .svx)
|
||||
SURVEX_TOPNAME = "1623-and-1626-no-schoenberg-hs"
|
||||
|
||||
|
||||
# Caves for which survex files exist, but are not otherwise registered
|
||||
# replaced (?) by expoweb/cave_data/pendingcaves.txt
|
||||
# PENDING = ["1626-361", "2007-06", "2009-02",
|
||||
# "2012-ns-01", "2012-ns-02", "2010-04", "2012-ns-05", "2012-ns-06",
|
||||
# "2012-ns-07", "2012-ns-08", "2012-ns-12", "2012-ns-14", "2012-ns-15", "2014-bl888",
|
||||
# "2018-pf-01", "2018-pf-02"]
|
||||
|
||||
APPEND_SLASH = (
|
||||
False # never relevant because we have urls that match unknown files and produce an 'edit this page' response
|
||||
)
|
||||
SMART_APPEND_SLASH = True # not eorking as middleware different after Dj2.0
|
||||
|
||||
|
||||
LOGIN_REDIRECT_URL = "/" # does not seem to have any effect
|
||||
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
SECURE_BROWSER_XSS_FILTER = True
|
||||
# SESSION_COOKIE_SECURE = True # if enabled, cannot login to Django control panel, bug elsewhere?
|
||||
# CSRF_COOKIE_SECURE = True # if enabled only sends cookies over SSL
|
||||
X_FRAME_OPTIONS = "DENY" # changed to "DENY" after I eliminated all the iframes e.g. /xmlvalid.html
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" # from Django 3.2
|
||||
|
||||
INSTALLED_APPS = (
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth", # includes the url redirections for login, logout
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.admindocs",
|
||||
"django.forms", # Required to customise widget templates
|
||||
# 'django.contrib.staticfiles', # We put our CSS etc explicitly in the right place so do not need this
|
||||
"troggle.core",
|
||||
)
|
||||
|
||||
FORM_RENDERER = "django.forms.renderers.TemplatesSetting" # Required to customise widget templates
|
||||
|
||||
# See the recommended order of these in https://docs.djangoproject.com/en/dev/ref/middleware/
|
||||
# Note that this is a radically different onion architecture from earlier versions though it looks the same,
|
||||
# see https://docs.djangoproject.com/en/dev/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
|
||||
# Seriously, read this: https://www.webforefront.com/django/middlewaredjango.html which is MUCH BETTER than the docs
|
||||
MIDDLEWARE = [
|
||||
#'django.middleware.security.SecurityMiddleware', # SECURE_SSL_REDIRECT and SECURE_SSL_HOST # we don't use this
|
||||
"django.middleware.gzip.GZipMiddleware", # not needed when expofiles and photos served by apache
|
||||
"django.contrib.sessions.middleware.SessionMiddleware", # Manages sessions, if CSRF_USE_SESSIONS then it needs to be early
|
||||
"django.middleware.common.CommonMiddleware", # DISALLOWED_USER_AGENTS, APPEND_SLASH and PREPEND_WWW
|
||||
"django.middleware.csrf.CsrfViewMiddleware", # Cross Site Request Forgeries by adding hidden form fields to POST
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware", # Adds the user attribute, representing the currently-logged-in user
|
||||
"django.contrib.admindocs.middleware.XViewMiddleware", # this and docutils needed by admindocs
|
||||
"django.contrib.messages.middleware.MessageMiddleware", # Cookie-based and session-based message support. Needed by admin system
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware", # clickjacking protection via the X-Frame-Options header
|
||||
#'django.middleware.security.SecurityMiddleware', # SECURE_HSTS_SECONDS, SECURE_CONTENT_TYPE_NOSNIFF, SECURE_BROWSER_XSS_FILTER, SECURE_REFERRER_POLICY, and SECURE_SSL_REDIRECT
|
||||
#'troggle.core.middleware.SmartAppendSlashMiddleware' # needs adapting after Dj2.0
|
||||
]
|
||||
|
||||
ROOT_URLCONF = "troggle.urls"
|
||||
|
||||
WSGI_APPLICATION = "troggle.wsgi.application" # change to asgi as soon as we upgrade to Django 3.0
|
||||
|
||||
ACCOUNT_ACTIVATION_DAYS = 3
|
||||
|
||||
# AUTH_PROFILE_MODULE = 'core.person' # used by removed profiles app ?
|
||||
|
||||
QM_PATTERN = "\[\[\s*[Qq][Mm]:([ABC]?)(\d{4})-(\d*)-(\d*)\]\]"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
|
||||
TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
||||
|
||||
from localsettings import *
|
||||
|
||||
# localsettings needs to take precedence. Call it to override any existing vars.
|
||||
@@ -1,147 +0,0 @@
|
||||
"""
|
||||
Django settings for troggle project.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/dev/topics/settings/
|
||||
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/dev/ref/settings/
|
||||
"""
|
||||
# Imports should be grouped in the following order:
|
||||
|
||||
# 1.Standard library imports.
|
||||
# 2.Related third party imports.
|
||||
# 3.Local application/library specific imports.
|
||||
# 4.You should put a blank line between each group of imports.
|
||||
|
||||
|
||||
|
||||
print("* importing troggle/settings.py")
|
||||
|
||||
# default value, then gets overwritten by real secrets
|
||||
SECRET_KEY = "not-the-real-secret-key-a#vaeozn0---^fj!355qki*vj2"
|
||||
|
||||
GIT = "git" # command for running git
|
||||
|
||||
# Note that this builds upon the django system installed
|
||||
# global settings in
|
||||
# django/conf/global_settings.py which is automatically loaded first.
|
||||
# read https://docs.djangoproject.com/en/dev/topics/settings/
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
# BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
# Django settings for troggle project.
|
||||
|
||||
ALLOWED_HOSTS = ["*", "expo.survex.com", ".survex.com", "localhost", "127.0.0.1", "192.168.0.5"]
|
||||
|
||||
ADMINS = (
|
||||
# ('Your Name', 'your_email@domain.com'),
|
||||
)
|
||||
MANAGERS = ADMINS
|
||||
|
||||
# LOGIN_URL = '/accounts/login/' # this is the default value so does not need to be set
|
||||
|
||||
# Local time zone for this installation. Choices can be found here:
|
||||
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
||||
# although not all choices may be available on all operating systems.
|
||||
# If running in a Windows environment this must be set to the same as your
|
||||
# system time zone.
|
||||
USE_TZ = True
|
||||
TIME_ZONE = "Europe/London"
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
LANGUAGE_CODE = "en-uk"
|
||||
|
||||
SITE_ID = 1
|
||||
|
||||
# If you set this to False, Django will make some optimizations so as not
|
||||
# to load the internationalization machinery.
|
||||
USE_I18N = True
|
||||
USE_L10N = True
|
||||
|
||||
FIX_PERMISSIONS = []
|
||||
|
||||
# top-level survex file basename (without .svx)
|
||||
SURVEX_TOPNAME = "1623-and-1626-no-schoenberg-hs"
|
||||
|
||||
|
||||
# Caves for which survex files exist, but are not otherwise registered
|
||||
# replaced (?) by expoweb/cave_data/pendingcaves.txt
|
||||
# PENDING = ["1626-361", "2007-06", "2009-02",
|
||||
# "2012-ns-01", "2012-ns-02", "2010-04", "2012-ns-05", "2012-ns-06",
|
||||
# "2012-ns-07", "2012-ns-08", "2012-ns-12", "2012-ns-14", "2012-ns-15", "2014-bl888",
|
||||
# "2018-pf-01", "2018-pf-02"]
|
||||
|
||||
APPEND_SLASH = (
|
||||
False # never relevant because we have urls that match unknown files and produce an 'edit this page' response
|
||||
)
|
||||
SMART_APPEND_SLASH = True # not eorking as middleware different after Dj2.0
|
||||
|
||||
|
||||
LOGIN_REDIRECT_URL = "/" # does not seem to have any effect
|
||||
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
SECURE_BROWSER_XSS_FILTER = True
|
||||
# SESSION_COOKIE_SECURE = True # if enabled, cannot login to Django control panel, bug elsewhere?
|
||||
# CSRF_COOKIE_SECURE = True # if enabled only sends cookies over SSL
|
||||
X_FRAME_OPTIONS = "DENY" # changed to "DENY" after I eliminated all the iframes e.g. /xmlvalid.html
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" # from Django 3.2
|
||||
|
||||
INSTALLED_APPS = (
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth", # includes the url redirections for login, logout
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.admindocs",
|
||||
"django.forms", # Required to customise widget templates
|
||||
# 'django.contrib.staticfiles', # We put our CSS etc explicitly in the right place so do not need this
|
||||
"troggle.core",
|
||||
)
|
||||
|
||||
FORM_RENDERER = "django.forms.renderers.TemplatesSetting" # Required to customise widget templates
|
||||
|
||||
# See the recommended order of these in https://docs.djangoproject.com/en/dev/ref/middleware/
|
||||
# Note that this is a radically different onion architecture from earlier versions though it looks the same,
|
||||
# see https://docs.djangoproject.com/en/dev/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
|
||||
# Seriously, read this: https://www.webforefront.com/django/middlewaredjango.html which is MUCH BETTER than the docs
|
||||
MIDDLEWARE = [
|
||||
#'django.middleware.security.SecurityMiddleware', # SECURE_SSL_REDIRECT and SECURE_SSL_HOST # we don't use this
|
||||
"django.middleware.gzip.GZipMiddleware", # not needed when expofiles and photos served by apache
|
||||
"django.contrib.sessions.middleware.SessionMiddleware", # Manages sessions, if CSRF_USE_SESSIONS then it needs to be early
|
||||
"django.middleware.common.CommonMiddleware", # DISALLOWED_USER_AGENTS, APPEND_SLASH and PREPEND_WWW
|
||||
"django.middleware.csrf.CsrfViewMiddleware", # Cross Site Request Forgeries by adding hidden form fields to POST
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware", # Adds the user attribute, representing the currently-logged-in user
|
||||
"django.contrib.admindocs.middleware.XViewMiddleware", # this and docutils needed by admindocs
|
||||
"django.contrib.messages.middleware.MessageMiddleware", # Cookie-based and session-based message support. Needed by admin system
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware", # clickjacking protection via the X-Frame-Options header
|
||||
#'django.middleware.security.SecurityMiddleware', # SECURE_HSTS_SECONDS, SECURE_CONTENT_TYPE_NOSNIFF, SECURE_BROWSER_XSS_FILTER, SECURE_REFERRER_POLICY, and SECURE_SSL_REDIRECT
|
||||
#'troggle.core.middleware.SmartAppendSlashMiddleware' # needs adapting after Dj2.0
|
||||
]
|
||||
|
||||
ROOT_URLCONF = "troggle.urls"
|
||||
|
||||
WSGI_APPLICATION = "troggle.wsgi.application" # change to asgi as soon as we upgrade to Django 3.0
|
||||
|
||||
ACCOUNT_ACTIVATION_DAYS = 3
|
||||
|
||||
# AUTH_PROFILE_MODULE = 'core.person' # used by removed profiles app ?
|
||||
|
||||
QM_PATTERN = "\[\[\s*[Qq][Mm]:([ABC]?)(\d{4})-(\d*)-(\d*)\]\]"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
|
||||
TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
||||
|
||||
from localsettings import *
|
||||
|
||||
# localsettings needs to take precedence. Call it to override any existing vars.
|
||||
@@ -1,173 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Crowley has python 3.9.2
|
||||
# Taken from: footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
|
||||
# Run this in a terminal in the troggle directory: 'bash venv-trog-crowley.sh'
|
||||
echo '-- DONT RUN THIS - messes up permissions!'
|
||||
|
||||
|
||||
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog-crowley.sh"'
|
||||
# use the script os-trog-crowley.sh
|
||||
|
||||
# If you are using Debian, then stick with the default version of python
|
||||
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
|
||||
|
||||
|
||||
# NOW we set up troggle
|
||||
PYTHON=python3.9
|
||||
VENAME=p9d4 # python3.x and django 4
|
||||
echo "** You are logged in as `id -u -n`"
|
||||
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
|
||||
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder (this script location): ${TROGDIR}"
|
||||
|
||||
if [ -d requirements.txt ]; then
|
||||
echo "-- No requirements.txt found. Copy it from your most recent installation."
|
||||
exit 1
|
||||
fi
|
||||
echo ## Using requirements.txt :
|
||||
cat requirements.txt
|
||||
echo ##
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
|
||||
# NOTE that when using a later or earlier verison of python, you MUST also
|
||||
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
|
||||
|
||||
# NOW set up link from expo user folder
|
||||
# needed for WSL2
|
||||
echo Creating links from Linux filesystem user
|
||||
# These links only need making once, for many venv
|
||||
cd ~
|
||||
|
||||
if [ ! -d $VENAME ]; then
|
||||
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
|
||||
$PYTHON -m venv $VENAME
|
||||
else
|
||||
echo "## /$VENAME/ already exists ! Delete it first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Activate the virtual env and see what the default packages are
|
||||
echo "### Activating $VENAME"
|
||||
|
||||
cd $VENAME
|
||||
echo "-- now in: ${PWD}"
|
||||
source bin/activate
|
||||
echo "### Activated."
|
||||
# update local version of pip, more recent than OS version
|
||||
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
|
||||
|
||||
# update local version of setuptools, more recent than OS version, needed for packages without wheels
|
||||
|
||||
echo "### installing later version of pip inside $VENAME"
|
||||
$PYTHON -m pip install --upgrade pip
|
||||
$PYTHON -m pip install --upgrade setuptools
|
||||
|
||||
PIP=pip
|
||||
|
||||
$PIP list > original-pip.list
|
||||
$PIP freeze >original.txt
|
||||
|
||||
# we are in /home/$USER/$VENAME/
|
||||
ln -s ${TROGDIR} troggle
|
||||
ln -s ${TROGDIR}/../expoweb expoweb
|
||||
ln -s ${TROGDIR}/../loser loser
|
||||
ln -s ${TROGDIR}/../drawings drawings
|
||||
|
||||
# fudge for philip's machine
|
||||
if [ -d ${TROGDIR}/../expofiles ]; then
|
||||
ln -s ${TROGDIR}/../expofiles expofiles
|
||||
else
|
||||
if [ ! -d /mnt/f/expofiles ]; then
|
||||
sudo mkdir /mnt/f
|
||||
sudo mount -t drvfs F: /mnt/f
|
||||
else
|
||||
ln -s /mnt/f/expofiles expofiles
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "### Setting file permissions.. may take a while.."
|
||||
git config --global --add safe.directory '*'
|
||||
#sudo chmod -R 0777 *
|
||||
|
||||
echo "### links to expoweb, troggle etc. complete:"
|
||||
ls -tla
|
||||
|
||||
echo "###"
|
||||
echo "### now installing ${TROGDIR}/requirements.txt"
|
||||
echo "###"
|
||||
|
||||
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
|
||||
# seen on wsl2 as well as wsl1
|
||||
# which ALSO ruins EXISTING permissions !
|
||||
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
|
||||
|
||||
$PIP install -r ${TROGDIR}/requirements.txt
|
||||
echo '### install from requirements.txt completed.'
|
||||
echo '### '
|
||||
|
||||
$PIP freeze > requirements.txt
|
||||
# so that we can track requirements more easily with git
|
||||
# because we do not install these with pip, but they are listed by the freeze command
|
||||
# Now find out what we actually installed by subtracting the stuff venv installed anyway
|
||||
sort original.txt > 1
|
||||
sort requirements.txt >2
|
||||
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-requirements.txt
|
||||
rm 1
|
||||
rm 2
|
||||
|
||||
cp requirements.txt requirements-$VENAME.txt
|
||||
cp requirements-$VENAME.txt troggle/requirements-$VENAME.txt
|
||||
|
||||
$PIP list > installed-pip.list
|
||||
$PIP list -o > installed-pip-o.list
|
||||
|
||||
REQ=installation-record
|
||||
mkdir $REQ
|
||||
mv requirements-$VENAME.txt $REQ
|
||||
mv original.txt $REQ
|
||||
mv requirements.txt $REQ
|
||||
mv original-pip.list $REQ
|
||||
mv installed-pip.list $REQ
|
||||
mv installed-pip-o.list $REQ
|
||||
cp fresh-requirements.txt ../requirements.txt
|
||||
mv fresh-requirements.txt $REQ
|
||||
cp troggle/`basename "$0"` $REQ
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
python --version
|
||||
echo "Django version:`django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/$VENAME'
|
||||
'source bin/activate'
|
||||
'cd troggle'
|
||||
'django-admin'
|
||||
'python manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
## the tests may ALSO fail because of ssh and permissions errors
|
||||
# Ran 85 tests in 83.492s
|
||||
# FAILED (failures=5)
|
||||
## So you will need to run
|
||||
#$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
|
||||
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
|
||||
# because this chmod only takes effect then.
|
||||
|
||||
'./pre-run.sh' (runs the migrations and then the tests)
|
||||
|
||||
'python databaseReset.py reset $VENAME'
|
||||
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
if [ ! -d /mnt/f/expofiles ]; then
|
||||
echo '### No valid expofiles directory. Fix this before any tests will work.
|
||||
fi
|
||||
@@ -1,227 +0,0 @@
|
||||
# This is the main Apache server configuration file. It contains the
|
||||
# configuration directives that give the server its instructions.
|
||||
# See http://httpd.apache.org/docs/2.4/ for detailed information about
|
||||
# the directives and /usr/share/doc/apache2/README.Debian about Debian specific
|
||||
# hints.
|
||||
#
|
||||
#
|
||||
# Summary of how the Apache 2 configuration works in Debian:
|
||||
# The Apache 2 web server configuration in Debian is quite different to
|
||||
# upstream's suggested way to configure the web server. This is because Debian's
|
||||
# default Apache2 installation attempts to make adding and removing modules,
|
||||
# virtual hosts, and extra configuration directives as flexible as possible, in
|
||||
# order to make automating the changes and administering the server as easy as
|
||||
# possible.
|
||||
|
||||
# It is split into several files forming the configuration hierarchy outlined
|
||||
# below, all located in the /etc/apache2/ directory:
|
||||
#
|
||||
# /etc/apache2/
|
||||
# |-- apache2.conf
|
||||
# | `-- ports.conf
|
||||
# |-- mods-enabled
|
||||
# | |-- *.load
|
||||
# | `-- *.conf
|
||||
# |-- conf-enabled
|
||||
# | `-- *.conf
|
||||
# `-- sites-enabled
|
||||
# `-- *.conf
|
||||
#
|
||||
#
|
||||
# * apache2.conf is the main configuration file (this file). It puts the pieces
|
||||
# together by including all remaining configuration files when starting up the
|
||||
# web server.
|
||||
#
|
||||
# * ports.conf is always included from the main configuration file. It is
|
||||
# supposed to determine listening ports for incoming connections which can be
|
||||
# customized anytime.
|
||||
#
|
||||
# * Configuration files in the mods-enabled/, conf-enabled/ and sites-enabled/
|
||||
# directories contain particular configuration snippets which manage modules,
|
||||
# global configuration fragments, or virtual host configurations,
|
||||
# respectively.
|
||||
#
|
||||
# They are activated by symlinking available configuration files from their
|
||||
# respective *-available/ counterparts. These should be managed by using our
|
||||
# helpers a2enmod/a2dismod, a2ensite/a2dissite and a2enconf/a2disconf. See
|
||||
# their respective man pages for detailed information.
|
||||
#
|
||||
# * The binary is called apache2. Due to the use of environment variables, in
|
||||
# the default configuration, apache2 needs to be started/stopped with
|
||||
# /etc/init.d/apache2 or apache2ctl. Calling /usr/bin/apache2 directly will not
|
||||
# work with the default configuration.
|
||||
|
||||
|
||||
# Global configuration
|
||||
#
|
||||
|
||||
#
|
||||
# ServerRoot: The top of the directory tree under which the server's
|
||||
# configuration, error, and log files are kept.
|
||||
#
|
||||
# NOTE! If you intend to place this on an NFS (or otherwise network)
|
||||
# mounted filesystem then please read the Mutex documentation (available
|
||||
# at <URL:http://httpd.apache.org/docs/2.4/mod/core.html#mutex>);
|
||||
# you will save yourself a lot of trouble.
|
||||
#
|
||||
# Do NOT add a slash at the end of the directory path.
|
||||
#
|
||||
#ServerRoot "/etc/apache2"
|
||||
|
||||
#
|
||||
# The accept serialization lock file MUST BE STORED ON A LOCAL DISK.
|
||||
#
|
||||
#Mutex file:${APACHE_LOCK_DIR} default
|
||||
|
||||
#
|
||||
# The directory where shm and other runtime files will be stored.
|
||||
#
|
||||
|
||||
DefaultRuntimeDir ${APACHE_RUN_DIR}
|
||||
|
||||
#
|
||||
# PidFile: The file in which the server should record its process
|
||||
# identification number when it starts.
|
||||
# This needs to be set in /etc/apache2/envvars
|
||||
#
|
||||
PidFile ${APACHE_PID_FILE}
|
||||
|
||||
#
|
||||
# Timeout: The number of seconds before receives and sends time out.
|
||||
#
|
||||
Timeout 300
|
||||
|
||||
#
|
||||
# KeepAlive: Whether or not to allow persistent connections (more than
|
||||
# one request per connection). Set to "Off" to deactivate.
|
||||
#
|
||||
KeepAlive On
|
||||
|
||||
#
|
||||
# MaxKeepAliveRequests: The maximum number of requests to allow
|
||||
# during a persistent connection. Set to 0 to allow an unlimited amount.
|
||||
# We recommend you leave this number high, for maximum performance.
|
||||
#
|
||||
MaxKeepAliveRequests 100
|
||||
|
||||
#
|
||||
# KeepAliveTimeout: Number of seconds to wait for the next request from the
|
||||
# same client on the same connection.
|
||||
#
|
||||
KeepAliveTimeout 5
|
||||
|
||||
|
||||
# These need to be set in /etc/apache2/envvars
|
||||
User ${APACHE_RUN_USER}
|
||||
Group ${APACHE_RUN_GROUP}
|
||||
|
||||
#
|
||||
# HostnameLookups: Log the names of clients or just their IP addresses
|
||||
# e.g., www.apache.org (on) or 204.62.129.132 (off).
|
||||
# The default is off because it'd be overall better for the net if people
|
||||
# had to knowingly turn this feature on, since enabling it means that
|
||||
# each client request will result in AT LEAST one lookup request to the
|
||||
# nameserver.
|
||||
#
|
||||
HostnameLookups Off
|
||||
|
||||
# ErrorLog: The location of the error log file.
|
||||
# If you do not specify an ErrorLog directive within a <VirtualHost>
|
||||
# container, error messages relating to that virtual host will be
|
||||
# logged here. If you *do* define an error logfile for a <VirtualHost>
|
||||
# container, that host's errors will be logged there and not here.
|
||||
#
|
||||
ErrorLog ${APACHE_LOG_DIR}/error.log
|
||||
|
||||
#
|
||||
# LogLevel: Control the severity of messages logged to the error_log.
|
||||
# Available values: trace8, ..., trace1, debug, info, notice, warn,
|
||||
# error, crit, alert, emerg.
|
||||
# It is also possible to configure the log level for particular modules, e.g.
|
||||
# "LogLevel info ssl:warn"
|
||||
#
|
||||
LogLevel warn
|
||||
|
||||
# Include module configuration:
|
||||
IncludeOptional mods-enabled/*.load
|
||||
IncludeOptional mods-enabled/*.conf
|
||||
|
||||
# Include list of ports to listen on
|
||||
Include ports.conf
|
||||
|
||||
|
||||
# Sets the default security model of the Apache2 HTTPD server. It does
|
||||
# not allow access to the root filesystem outside of /usr/share and /var/www.
|
||||
# The former is used by web applications packaged in Debian,
|
||||
# the latter may be used for local directories served by the web server. If
|
||||
# your system is serving content from a sub-directory in /srv you must allow
|
||||
# access here, or in any related virtual host.
|
||||
<Directory />
|
||||
Options FollowSymLinks
|
||||
AllowOverride None
|
||||
Require all denied
|
||||
</Directory>
|
||||
|
||||
<Directory /usr/share>
|
||||
AllowOverride None
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
<Directory /var/www/>
|
||||
Options Indexes FollowSymLinks
|
||||
AllowOverride None
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
#<Directory /srv/>
|
||||
# Options Indexes FollowSymLinks
|
||||
# AllowOverride None
|
||||
# Require all granted
|
||||
#</Directory>
|
||||
|
||||
|
||||
|
||||
|
||||
# AccessFileName: The name of the file to look for in each directory
|
||||
# for additional configuration directives. See also the AllowOverride
|
||||
# directive.
|
||||
#
|
||||
AccessFileName .htaccess
|
||||
|
||||
#
|
||||
# The following lines prevent .htaccess and .htpasswd files from being
|
||||
# viewed by Web clients.
|
||||
#
|
||||
<FilesMatch "^\.ht">
|
||||
Require all denied
|
||||
</FilesMatch>
|
||||
|
||||
|
||||
#
|
||||
# The following directives define some format nicknames for use with
|
||||
# a CustomLog directive.
|
||||
#
|
||||
# These deviate from the Common Log Format definitions in that they use %O
|
||||
# (the actual bytes sent including headers) instead of %b (the size of the
|
||||
# requested file), because the latter makes it impossible to detect partial
|
||||
# requests.
|
||||
#
|
||||
# Note that the use of %{X-Forwarded-For}i instead of %h is not recommended.
|
||||
# Use mod_remoteip instead.
|
||||
#
|
||||
LogFormat "%v:%p %h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\"" vhost_combined
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\"" combined
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %O" common
|
||||
LogFormat "%{Referer}i -> %U" referer
|
||||
LogFormat "%{User-agent}i" agent
|
||||
|
||||
# Include of directories ignores editors' and dpkg's backup files,
|
||||
# see README.Debian for details.
|
||||
|
||||
# Include generic snippets of statements
|
||||
IncludeOptional conf-enabled/*.conf
|
||||
|
||||
# Include the virtual host configurations:
|
||||
IncludeOptional sites-enabled/*.conf
|
||||
|
||||
# vim: syntax=apache ts=4 sw=4 sts=4 sr noet
|
||||
@@ -1,47 +0,0 @@
|
||||
# envvars - default environment variables for apache2ctl
|
||||
|
||||
# this won't be correct after changing uid
|
||||
unset HOME
|
||||
|
||||
# for supporting multiple apache2 instances
|
||||
if [ "${APACHE_CONFDIR##/etc/apache2-}" != "${APACHE_CONFDIR}" ] ; then
|
||||
SUFFIX="-${APACHE_CONFDIR##/etc/apache2-}"
|
||||
else
|
||||
SUFFIX=
|
||||
fi
|
||||
|
||||
# Since there is no sane way to get the parsed apache2 config in scripts, some
|
||||
# settings are defined via environment variables and then used in apache2ctl,
|
||||
# /etc/init.d/apache2, /etc/logrotate.d/apache2, etc.
|
||||
export APACHE_RUN_USER=expo
|
||||
export APACHE_RUN_GROUP=expo
|
||||
# temporary state file location. This might be changed to /run in Wheezy+1
|
||||
export APACHE_PID_FILE=/var/run/apache2$SUFFIX/apache2.pid
|
||||
export APACHE_RUN_DIR=/var/run/apache2$SUFFIX
|
||||
export APACHE_LOCK_DIR=/var/lock/apache2$SUFFIX
|
||||
# Only /var/log/apache2 is handled by /etc/logrotate.d/apache2.
|
||||
export APACHE_LOG_DIR=/var/log/apache2$SUFFIX
|
||||
|
||||
## The locale used by some modules like mod_dav
|
||||
#export LANG=C
|
||||
## Uncomment the following line to use the system default locale instead:
|
||||
. /etc/default/locale
|
||||
|
||||
export LANG
|
||||
|
||||
## The command to get the status for 'apache2ctl status'.
|
||||
## Some packages providing 'www-browser' need '--dump' instead of '-dump'.
|
||||
#export APACHE_LYNX='www-browser -dump'
|
||||
|
||||
## If you need a higher file descriptor limit, uncomment and adjust the
|
||||
## following line (default is 8192):
|
||||
#APACHE_ULIMIT_MAX_FILES='ulimit -n 65536'
|
||||
|
||||
## If you would like to pass arguments to the web server, add them below
|
||||
## to the APACHE_ARGUMENTS environment.
|
||||
#export APACHE_ARGUMENTS=''
|
||||
|
||||
## Enable the debug mode for maintainer scripts.
|
||||
## This will produce a verbose output on package installations of web server modules and web application
|
||||
## installations which interact with Apache
|
||||
#export APACHE2_MAINTSCRIPT_DEBUG=1
|
||||
@@ -1,121 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords. [not this copy]
|
||||
SECRET_KEY = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"
|
||||
EXPOUSERPASS = "nope"
|
||||
EXPOADMINUSERPASS = "nope"
|
||||
EMAIL_HOST_PASSWORD = "nope"
|
||||
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'not a real password', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = "nnn:gggggg"
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'core.context.troggle_context',
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media',
|
||||
'django.template.context_processors.static',
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader',
|
||||
# insert your TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = True
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH + 'drawings/'
|
||||
|
||||
CAVERN = 'cavern'
|
||||
THREEDTOPOS = 'survexport'
|
||||
EXPOWEB = REPOS_ROOT_PATH + 'expoweb/'
|
||||
SURVEYS = REPOS_ROOT_PATH
|
||||
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
CAVEDESCRIPTIONS = os.path.join(EXPOWEB, "cave_data")
|
||||
ENTRANCEDESCRIPTIONS = os.path.join(EXPOWEB, "entrance_data")
|
||||
|
||||
CACHEDIR = REPOS_ROOT_PATH + 'expowebcache/'
|
||||
THREEDCACHEDIR = CACHEDIR + '3d/'
|
||||
THUMBNAILCACHE = CACHEDIR + 'thumbs'
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
URL_ROOT = 'http://expo.survex.com/'
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SURVEYS_URL = '/survey_scans/'
|
||||
EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views/surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
MEDIA_ROOT = REPOS_ROOT_PATH + '/troggle/media/'
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# add in 290, 291, 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "264", "258", "204", "76", "107"]
|
||||
@@ -1,164 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords.
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'uFqP56B4XleeyIW', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
SECRET_KEY = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"
|
||||
EXPOUSERPASS = "nope"
|
||||
EXPOADMINUSERPASS = "nope"
|
||||
EMAIL_HOST_PASSWORD = "nope"
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
PHOTOS_YEAR = "2022"
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py
|
||||
'django.template.context_processors.debug',
|
||||
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
|
||||
# insert your own TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = True
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH + 'drawings/'
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH + 'expoweb/'
|
||||
#SURVEYS = REPOS_ROOT_PATH
|
||||
SCANS_ROOT = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
|
||||
CAVEDESCRIPTIONS = os.path.join(EXPOWEB, "cave_data")
|
||||
ENTRANCEDESCRIPTIONS = os.path.join(EXPOWEB, "entrance_data")
|
||||
|
||||
# CACHEDIR = REPOS_ROOT_PATH + 'expowebcache/'
|
||||
# THREEDCACHEDIR = CACHEDIR + '3d/'
|
||||
# THUMBNAILCACHE = CACHEDIR + 'thumbs'
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
|
||||
|
||||
#Note that all these *_URL constants are not actually used in urls.py, they should be..
|
||||
#URL_ROOT = 'http://expo.survex.com/'
|
||||
URL_ROOT = '/'
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SCANS_URL = '/survey_scans/'
|
||||
EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# add in 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
# Sanitise these to be strings as all other code is expecting strings
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
LOGFILE = os.fspath(LOGFILE)
|
||||
#SURVEYS = os.fspath(SURVEYS)
|
||||
EXPOWEB = os.fspath(EXPOWEB)
|
||||
DRAWINGS_DATA = os.fspath(DRAWINGS_DATA)
|
||||
SURVEX_DATA = os.fspath(SURVEX_DATA)
|
||||
REPOS_ROOT_PATH = os.fspath(REPOS_ROOT_PATH)
|
||||
TEMPLATE_PATH = os.fspath(TROGGLE_PATH)
|
||||
MEDIA_ROOT = os.fspath(MEDIA_ROOT)
|
||||
JSLIB_ROOT = os.fspath(JSLIB_ROOT)
|
||||
SCANS_ROOT = os.fspath(SCANS_ROOT)
|
||||
LIBDIR = os.fspath(LIBDIR)
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -1,23 +0,0 @@
|
||||
#This requirements txt matches the libaries as of 2023-07-09 on expo.survex.com <Debian GNU/Linux 11 (bullseye)>
|
||||
|
||||
#Nb on the server asgiref==3.3.0, however this conflicts with the Django==3.2.12 requirement
|
||||
asgiref==3.3.2
|
||||
Django==3.2.12
|
||||
docutils==0.16
|
||||
packaging==20.9
|
||||
Pillow==8.1.2
|
||||
pytz==2021.1
|
||||
sqlparse==0.4.1
|
||||
Unidecode==1.2.0
|
||||
beautifulsoup4==4.9.3
|
||||
piexif==1.1.3
|
||||
|
||||
#Not installed on expo.survex.com
|
||||
#black==23.3
|
||||
#click==8.1.3
|
||||
#coverage==7.2
|
||||
#isort==5.12.0
|
||||
#mypy-extensions==1.0.0
|
||||
#pathspec==0.11
|
||||
#platformdirs==3.8
|
||||
#ruff==0.0.245
|
||||
@@ -1,93 +0,0 @@
|
||||
Instructions for setting up new expo debian server/VM
|
||||
For Debian Stretch, June 2019.
|
||||
|
||||
[Note added March 2021:
|
||||
See also http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
and troggle/README.txt
|
||||
]
|
||||
|
||||
adduser expo
|
||||
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
|
||||
apt install python-django apache2 mysql-server survex make rsync
|
||||
apt install libjs-openlayers make
|
||||
apt install git mercurial mercurial-server?
|
||||
|
||||
for boe:
|
||||
apt install libcgi-session-perl libcrypt-passwdmd5-perl libfile-slurp-perl libgit-wrapper-perl libhtml-template-perl libhtml-template-pro-perl libmime-lite-perl libtext-password-pronounceable-perl libtime-parsedate-perl libuuid-tiny-perl libcrypt-cracklib-perl
|
||||
|
||||
obsolete-packages:
|
||||
bins (move to jigl?) (for photos)
|
||||
python-django 1.7
|
||||
backports: survex therion
|
||||
not-packaged: caveview
|
||||
|
||||
make these dirs available at top documentroot:
|
||||
cuccfiles
|
||||
expofiles
|
||||
loser (link to repo)
|
||||
tunneldata (link to repo)
|
||||
troggle (link to repo)
|
||||
expoweb (link to repo)
|
||||
boc/boe
|
||||
|
||||
|
||||
config
|
||||
containing:
|
||||
|
||||
setup apache configs for cucc and expo
|
||||
#disable default website
|
||||
a2dissite 000-default
|
||||
a2ensite cucc
|
||||
a2ensite expo
|
||||
a2enmod cgid
|
||||
|
||||
|
||||
Boe config:
|
||||
Alias /boe /home/expo/boe/boc/boc.pl
|
||||
<Directory /home/expo/boe/boc>
|
||||
AddHandler cgi-script .pl
|
||||
SetHandler cgi-script
|
||||
Options +ExecCGI
|
||||
Require all granted
|
||||
</Directory>
|
||||
And remember to set both program and data dir to be
|
||||
www-data:www-data
|
||||
(optionally make file group read/write by treasurer account)
|
||||
create empty repo by clicking create in boe interface
|
||||
then set names in 'settings'
|
||||
|
||||
Set up mysql (as root)
|
||||
mysql -p
|
||||
CREATE DATABASE troggle;
|
||||
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
|
||||
|
||||
install django:
|
||||
NO!
|
||||
This was:sudo apt install python-django python-django-registration python-django-imagekit python-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi
|
||||
Should be ?
|
||||
sudo apt install python-django python-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi
|
||||
|
||||
CHeck if this is correct:
|
||||
python-django-tinymce comes from https://salsa.debian.org/python-team/modules/python-django-tinymce
|
||||
(both modified for stretch/python2). packages under /home/wookey/packages/
|
||||
|
||||
need fonts-freefont-ttf (to have truetype freesans available for troggle via PIL)
|
||||
need libapache2-mod-wsgi for apache wsgi support.
|
||||
|
||||
On stretch the django 1.10 is no use so get rid of that:
|
||||
apt remove python3-django python-django python-django-common python-django-doc
|
||||
|
||||
Then replace with django 1.7 (Needs to be built for stretch)
|
||||
apt install python-django python-django-common python-django-doc
|
||||
apt install python-django-registration python-django-imagekit python-django-tinymce
|
||||
|
||||
then hold them to stop them being upgraded by unattended upgrades:
|
||||
echo "python-django hold" | sudo dpkg --set-selections
|
||||
echo "python-django-common hold" | sudo dpkg --set-selections
|
||||
echo "python-django-doc hold" | sudo dpkg --set-selections
|
||||
|
||||
#troggle has to have a writable logfile otherwise the website explodes
|
||||
# 500 error on the server, and apache error log has non-rentrant errors
|
||||
create /var/log/troggle/troggle.log
|
||||
chown www-data:adm /var/log/troggle/troggle.log
|
||||
chmod 660 /var/log/troggle/troggle.log
|
||||
@@ -1,7 +0,0 @@
|
||||
# install the apport exception handler if available
|
||||
try:
|
||||
import apport_python_hook
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
apport_python_hook.install()
|
||||
@@ -1,103 +0,0 @@
|
||||
adduser expo
|
||||
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
|
||||
apt install python-django apache2 mysql-server survex make rsync
|
||||
apt install libjs-openlayers make
|
||||
apt install git mercurial mercurial-server?
|
||||
|
||||
for boe:
|
||||
apt install libcgi-session-perl libcrypt-passwdmd5-perl libfile-slurp-perl libgit-wrapper-perl libhtml-template-perl libhtml-template-pro-perl libmime-lite-perl libtext-password-pronounceable-perl libtime-parsedate-perl libuuid-tiny-perl libcrypt-cracklib-perl
|
||||
|
||||
apt install ufraw for PEF image decoding.
|
||||
sudo apt install python-django python-django-registration e fonts-freefont-ttf libapache2-mod-wsgi python3-gdbm
|
||||
# sudo apt install python-django-imagekit python-django-tinymc
|
||||
|
||||
obsolete-packages: bins (move to jigl?)
|
||||
older python-django?
|
||||
backports: survex therion
|
||||
not-packaged: caveview
|
||||
|
||||
|
||||
make these dirs available at top documentroot:
|
||||
cuccfiles
|
||||
expofiles
|
||||
loser
|
||||
tunneldata
|
||||
troggle
|
||||
expoweb
|
||||
boc/boe
|
||||
|
||||
config
|
||||
containing:
|
||||
|
||||
setup apache configs for cucc and expo
|
||||
#disable default website
|
||||
a2dissite 000-default
|
||||
a2ensite cucc
|
||||
a2ensite expo
|
||||
a2enmod cgid
|
||||
|
||||
|
||||
Boe config:
|
||||
Alias /boe /home/expo/boe/boc/boc.pl
|
||||
<Directory /home/expo/boe/boc>
|
||||
AddHandler cgi-script .pl
|
||||
SetHandler cgi-script
|
||||
Options +ExecCGI
|
||||
Require all granted
|
||||
</Directory>
|
||||
And remember to set both program and data dir to be
|
||||
www-data:www-data
|
||||
(optionally make file group read/write by treasurer account)
|
||||
create empty repo by clicking create in boe interface
|
||||
then set names in 'settings'
|
||||
|
||||
Set up mysql (as root)
|
||||
mysql -p
|
||||
CREATE DATABASE troggle;
|
||||
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
|
||||
Ctrl-D to exit
|
||||
|
||||
somepassword is set in localsettings.py
|
||||
sudo service mariadb stop
|
||||
sudo service mariadb start
|
||||
|
||||
to delete the database, it is
|
||||
DROP DATABASE troggle;
|
||||
|
||||
install django:
|
||||
sudo apt install python-django python-django-registration python-django-imagekit python-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi
|
||||
|
||||
python-django-imagekit comes from https://salsa.debian.org/python-team/modules/python-django-imagekit
|
||||
python-django-tinymce comes from https://salsa.debian.org/python-team/modules/python-django-tinymce
|
||||
|
||||
need fonts-freefont-ttf (to have truetype freesans available for troggle via PIL)
|
||||
need libapache2-mod-wsgi for apache wsgi support.
|
||||
|
||||
On stretch the django 1.10 is no use so get rid of that:
|
||||
apt remove python3-django python-django python-django-common python-django-doc
|
||||
|
||||
Then replace with django 1.7 (Needs to be built for stretch)
|
||||
apt install python-django python-django-common python-django-doc
|
||||
apt install python-django-registration python-django-imagekit python-django-tinymce
|
||||
|
||||
then hold them to stop them being upgraded by unattended upgrades:
|
||||
echo "python-django hold" | sudo dpkg --set-selections
|
||||
echo "python-django-common hold" | sudo dpkg --set-selections
|
||||
echo "python-django-doc hold" | sudo dpkg --set-selections
|
||||
|
||||
Optimizing server
|
||||
I've tweaked the apache and mysql settings to make them a bit more suitable for a small machine. Seems to have shaved 200MB or so off the idling footprint.
|
||||
https://www.narga.net/optimizing-apachephpmysql-low-memory-server/
|
||||
|
||||
(just discovered 'ab' for running apache performance tests - handy).
|
||||
|
||||
Do the edit to site-packages/django/db/backends/base.py
|
||||
to comment out the requirement for mysqlclient >1.3.13
|
||||
as we run perfectly happily with Django 2.2.19 & mysqlite 1.3.10
|
||||
:
|
||||
|
||||
version = Database.version_info
|
||||
#test nobbled by Wookey 2021-04-08 as 1.3.13 is not available on stable
|
||||
#if version < (1, 3, 13):
|
||||
# raise ImproperlyConfigured('mysqlclient 1.3.13 or newer is required; you have %s.' % Database.__version__)
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
Django==1.7.11
|
||||
django-registration==2.1.2
|
||||
mysql
|
||||
#imagekit
|
||||
django-imagekit
|
||||
Image
|
||||
django-tinymce==2.7.0
|
||||
smartencoding
|
||||
unidecode
|
||||
@@ -1,18 +0,0 @@
|
||||
2023-07-17 Philip Sargent
|
||||
|
||||
Trying to sort out configurations as we got into a bit of a mess on
|
||||
Expo in the last couple of weeks with two (notionally identical Debian
|
||||
Bullseye) expo laptops Crowley (which has local troggle installed and
|
||||
can run locally) and Aziraphale (has local copy of troggle repo but is
|
||||
not configured to run locally), Martin Green's laptop (Ubuntu 22.04.2),
|
||||
Philip's Barbie laptop Ubuntu 22.04.3). And of course the server itself
|
||||
expo.survex.com which is running Debian Bullseye. But most development
|
||||
recently had been done on Philip's two other machines, desktop and PC,
|
||||
both running Ubuntu on WSL on Windows and both using venv environments,
|
||||
which Crowley also does.
|
||||
|
||||
- settings.py
|
||||
is common to all configurations,
|
||||
but these are all different:
|
||||
- localsettings.py
|
||||
- requirements.txt
|
||||
@@ -1,3 +0,0 @@
|
||||
The copy in this /_deploy/ folder may not be the latest if active development
|
||||
has been going on in the parent folder. Check there for a later copy of
|
||||
the localsettingsWSL file.
|
||||
@@ -1,188 +0,0 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
# default values, real secrets imported from credentials.py
|
||||
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
SERVERPORT = "8000" # not needed
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
# PHOTOS_ROOT = EXPOFILES / 'photos'
|
||||
PHOTOS_ROOT = Path("/mnt/d/EXPO/PHOTOS")
|
||||
PHOTOS_YEAR = "2023"
|
||||
|
||||
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
|
||||
|
||||
# PYTHON_PATH = os.fspath(PYTHON_PATH)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
|
||||
LOGFILE = PYTHON_PATH / "troggle.log"
|
||||
SQLITEDB = PYTHON_PATH / "troggle.sqlite"
|
||||
KMZ_ICONS_PATH = PYTHON_PATH / "kmz_icons"
|
||||
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
MEDIA_URL = "/site-media/"
|
||||
|
||||
DIR_ROOT = Path("") # this should end in / if a value is given
|
||||
URL_ROOT = "/"
|
||||
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
|
||||
|
||||
# Note that these constants are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
# executables:
|
||||
CAVERN = "cavern" # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = "survexport" # for parsing .3d files and producing .pos files
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
#'NAME' : 'troggle.sqlite',
|
||||
"NAME": str(SQLITEDB),
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": "my-secret-password-schwatzmooskogel",
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
"OPTIONS": {
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/troggle.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
EXPOUSER = "expo"
|
||||
EXPOUSER_EMAIL = "philip.sargent@gmail.com"
|
||||
EXPOADMINUSER = "expoadmin"
|
||||
EXPOADMINUSER_EMAIL = "philip.sargent@gmail.com"
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "django-test@klebos.net"
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
EXPOWEB_URL = ""
|
||||
# SCANS_URL = '/survey_scans/' # defunct, removed.
|
||||
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
sys.path.append(str(PYTHON_PATH))
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
asgiref==3.3.4
|
||||
confusable-homoglyphs==3.2.0
|
||||
coverage==5.5
|
||||
Django==3.2.12
|
||||
docutils==0.14
|
||||
gunicorn==20.1.0
|
||||
Pillow==9.0.1
|
||||
pytz==2019.1
|
||||
reportlab==3.6.8
|
||||
sqlparse==0.2.4
|
||||
typing-extensions==3.7.4.3
|
||||
Unidecode==1.0.23
|
||||
@@ -1,20 +0,0 @@
|
||||
# Philip bleeding edge config
|
||||
asgiref==3.6.0
|
||||
beautifulsoup4==4.12.2
|
||||
black==23.1.0
|
||||
click==8.1.3
|
||||
coverage==7.1.0
|
||||
Django==4.2
|
||||
docutils==0.19
|
||||
isort==5.12.0
|
||||
mypy-extensions==1.0.0
|
||||
packaging==23.0
|
||||
pathspec==0.11.0
|
||||
Pillow==9.4.0
|
||||
platformdirs==3.0.0
|
||||
pytz==2022.7
|
||||
ruff==0.0.245
|
||||
soupsieve==2.4.1
|
||||
sqlparse==0.4.3
|
||||
Unidecode==1.3.6
|
||||
piexif==1.1.3
|
||||
@@ -1,17 +0,0 @@
|
||||
asgiref==3.6.0
|
||||
black==23.1.0
|
||||
click==8.1.3
|
||||
coverage==7.1.0
|
||||
Django==4.2
|
||||
docutils==0.19
|
||||
isort==5.12.0
|
||||
mypy-extensions==1.0.0
|
||||
packaging==23.0
|
||||
pathspec==0.11.0
|
||||
Pillow==9.4.0
|
||||
platformdirs==3.0.0
|
||||
pytz==2022.7
|
||||
ruff==0.0.245
|
||||
sqlparse==0.4.3
|
||||
tomli==2.0.1
|
||||
Unidecode==1.3.6
|
||||
@@ -1,9 +0,0 @@
|
||||
asgiref==3.5.2
|
||||
coverage==6.5.0
|
||||
Django==3.2.16
|
||||
docutils==0.19
|
||||
Pillow==9.3.0
|
||||
pytz==2022.6
|
||||
sqlparse==0.4.3
|
||||
typing_extensions==4.4.0
|
||||
Unidecode==1.3.6
|
||||
@@ -1,16 +0,0 @@
|
||||
asgiref==3.6.0
|
||||
black==23.1.0
|
||||
click==8.1.3
|
||||
coverage==7.1.0
|
||||
Django==4.2
|
||||
docutils==0.19
|
||||
isort==5.12.0
|
||||
mypy-extensions==1.0.0
|
||||
packaging==23.0
|
||||
pathspec==0.11.0
|
||||
Pillow==9.4.0
|
||||
platformdirs==3.0.0
|
||||
pytz==2022.7
|
||||
ruff==0.0.245
|
||||
sqlparse==0.4.3
|
||||
Unidecode==1.3.6
|
||||
@@ -1,9 +0,0 @@
|
||||
asgiref==3.5.2
|
||||
coverage==6.5.0
|
||||
Django==3.2.16
|
||||
docutils==0.19
|
||||
Pillow==9.3.0
|
||||
pytz==2022.6
|
||||
sqlparse==0.4.3
|
||||
typing_extensions==4.4.0
|
||||
Unidecode==1.3.6
|
||||
@@ -1,21 +0,0 @@
|
||||
asgiref==3.7.0
|
||||
beautifulsoup4==4.12.0
|
||||
black==23.3.0
|
||||
click==8.1.3
|
||||
coverage==7.2.0
|
||||
Django==4.2
|
||||
docutils==0.20
|
||||
isort==5.12.0
|
||||
mypy-extensions==1.0.0
|
||||
packaging==23.0
|
||||
pathspec==0.11.0
|
||||
Pillow==10.0.0
|
||||
pkg_resources==0.0.0
|
||||
platformdirs==3.8.0
|
||||
pytz==2023.3
|
||||
ruff==0.0.245
|
||||
soupsieve==2.4.1
|
||||
sqlparse==0.4.0
|
||||
tomli==2.0.1
|
||||
typing_extensions==4.7.1
|
||||
Unidecode==1.3.6
|
||||
@@ -1,17 +0,0 @@
|
||||
asgiref==3.3.4
|
||||
confusable-homoglyphs==3.2.0
|
||||
Django==3.2
|
||||
docutils==0.14
|
||||
gunicorn==20.1.0
|
||||
Pillow==5.4.1
|
||||
sqlparse==0.2.4
|
||||
typing-extensions==3.7.4.3
|
||||
Unidecode==1.0.23
|
||||
mariadb==1.0.11
|
||||
mysql-connector-python==8.0.29
|
||||
mysqlclient==2.1.0
|
||||
Pillow==9.1.0
|
||||
pytz==2022.5
|
||||
asgiref==3.5.0
|
||||
gunicorn==20.1.0
|
||||
|
||||
@@ -1,171 +0,0 @@
|
||||
#!/bin/bash
|
||||
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
|
||||
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog.sh
|
||||
|
||||
# If you are using Debian, then stick with the default version of python
|
||||
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
|
||||
|
||||
# NOW we set up troggle
|
||||
PYTHON=python3.11
|
||||
VENAME=p11d4 # python3.x and django 4.2
|
||||
echo "** You are logged in as `id -u -n`"
|
||||
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
|
||||
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder (this script location): ${TROGDIR}"
|
||||
|
||||
if [ -d requirements.txt ]; then
|
||||
echo "-- No requirements.txt found. You should be in the /troggle/ folder. Copy it from your most recent installation."
|
||||
exit 1
|
||||
fi
|
||||
echo ## Using requirements.txt :
|
||||
cat requirements.txt
|
||||
echo ##
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
|
||||
# NOTE that when using a later or earlier verison of python, you MUST also
|
||||
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
|
||||
|
||||
# NOW set up link from expo user folder
|
||||
# needed for WSL2
|
||||
echo Creating links from Linux filesystem user
|
||||
# These links only need making once, for many venv
|
||||
cd ~
|
||||
|
||||
if [ ! -d $VENAME ]; then
|
||||
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
|
||||
$PYTHON -m venv $VENAME
|
||||
else
|
||||
echo "## /$VENAME/ already exists ! Delete it first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Activate the virtual env and see what the default packages are
|
||||
echo "### Activating $VENAME"
|
||||
|
||||
cd $VENAME
|
||||
echo "-- now in: ${PWD}"
|
||||
source bin/activate
|
||||
echo "### Activated."
|
||||
# update local version of pip, more recent than OS version
|
||||
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
|
||||
|
||||
# update local version of setuptools, more recent than OS version, needed for packages without wheels
|
||||
|
||||
echo "### installing later version of pip inside $VENAME"
|
||||
$PYTHON -m pip install --upgrade pip
|
||||
$PYTHON -m pip install --upgrade setuptools
|
||||
|
||||
PIP=pip
|
||||
|
||||
$PIP list > original-pip.list
|
||||
$PIP freeze >original.txt
|
||||
|
||||
# we are in /home/$USER/$VENAME/
|
||||
ln -s ${TROGDIR} troggle
|
||||
ln -s ${TROGDIR}/../expoweb expoweb
|
||||
ln -s ${TROGDIR}/../loser loser
|
||||
ln -s ${TROGDIR}/../drawings drawings
|
||||
#ln -s ${TROGDIR}/../expofiles expofiles
|
||||
|
||||
# fudge for philip's machine
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
if [ -d ${TROGDIR}/../expofiles ]; then
|
||||
ln -s ${TROGDIR}/../expofiles expofiles
|
||||
else
|
||||
ln -s /mnt/d/EXPO/expofiles expofiles
|
||||
fi
|
||||
|
||||
echo "### Setting file permissions.. may take a while.."
|
||||
git config --global --add safe.directory '*'
|
||||
sudo chmod -R 777 *
|
||||
|
||||
echo "### links to expoweb, troggle etc. complete:"
|
||||
ls -tla
|
||||
echo "###"
|
||||
echo "### now installing ${TROGDIR}/requirements.txt"
|
||||
echo "###"
|
||||
|
||||
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
|
||||
# seen on wsl2 as well as wsl1
|
||||
# which ALSO ruins EXISTING permissions !
|
||||
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
|
||||
|
||||
$PIP install -r ${TROGDIR}/requirements.txt
|
||||
echo '### install from requirements.txt completed.'
|
||||
echo '### '
|
||||
|
||||
$PIP freeze > requirements.txt
|
||||
# so that we can track requirements more easily with git
|
||||
# because we do not install these with pip, but they are listed by the freeze command
|
||||
# Now find out what we actually installed by subtracting the stuff venv installed anyway
|
||||
sort original.txt > 1
|
||||
sort requirements.txt >2
|
||||
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-requirements.txt
|
||||
rm 1
|
||||
rm 2
|
||||
|
||||
cp requirements.txt requirements-$VENAME.txt
|
||||
cp requirements-$VENAME.txt troggle/requirements-$VENAME.txt
|
||||
|
||||
$PIP list > installed-pip.list
|
||||
$PIP list -o > installed-pip-o.list
|
||||
|
||||
REQ=installation-record
|
||||
mkdir $REQ
|
||||
mv requirements-$VENAME.txt $REQ
|
||||
mv original.txt $REQ
|
||||
mv requirements.txt $REQ
|
||||
mv original-pip.list $REQ
|
||||
mv installed-pip.list $REQ
|
||||
mv installed-pip-o.list $REQ
|
||||
cp fresh-requirements.txt ../requirements.txt
|
||||
mv fresh-requirements.txt $REQ
|
||||
cp troggle/`basename "$0"` $REQ
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
python --version
|
||||
echo "Django version:`django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/$VENAME'
|
||||
'source bin/activate'
|
||||
'cd troggle'
|
||||
'django-admin'
|
||||
'python manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
## the tests may ALSO fail because of ssh and permissions errors
|
||||
|
||||
## So you will need to run
|
||||
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
|
||||
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
|
||||
# because this chmod only takes effect then.
|
||||
|
||||
'python manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'python databaseReset.py reset $VENAME'
|
||||
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
if [ ! -d /mnt/d/expofiles ]; then
|
||||
echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
fi
|
||||
@@ -1,181 +0,0 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
#-----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
#-----------------------------------------------------------------
|
||||
# default values, real secrets imported from credentials.py
|
||||
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever. Tests are then less accurate.
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
SERVERPORT = '8000' # not needed
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent
|
||||
LIBDIR = REPOS_ROOT_PATH / 'lib' / PV
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
# EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
EXPOFILES = Path('/media/philip/sd-huge1/cucc-expo/expofiles/')
|
||||
|
||||
SCANS_ROOT = EXPOFILES / 'surveyscans'
|
||||
PHOTOS_ROOT = EXPOFILES / 'photos'
|
||||
PHOTOS_YEAR = "2023"
|
||||
|
||||
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
|
||||
|
||||
# PYTHON_PATH = os.fspath(PYTHON_PATH)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
|
||||
LOGFILE = PYTHON_PATH / "troggle.log"
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
MEDIA_URL = '/site-media/'
|
||||
|
||||
DIR_ROOT = Path("") #this should end in / if a value is given
|
||||
URL_ROOT = '/'
|
||||
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
|
||||
|
||||
# Note that these constants are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
DBSQLITE = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle.sqlite',
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'sekrit', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo',
|
||||
'PASSWORD' : 'my-secret-password-schwatzmooskogel',
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py - only used in expedition.html
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL used by admin pages
|
||||
'django.template.context_processors.tz',
|
||||
'django.template.context_processors.request', # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader', # default lcation is troggle/templates/
|
||||
'django.template.loaders.app_directories.Loader', # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
EXPOUSER = "expo"
|
||||
EXPOUSER_EMAIL = "philip.sargent@gmail.com"
|
||||
EXPOADMINUSER = "expoadmin"
|
||||
EXPOADMINUSER_EMAIL = "philip.sargent@gmail.com"
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "django-test@klebos.net"
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
EXPOWEB_URL = ''
|
||||
# SCANS_URL = '/survey_scans/' # defunct, removed.
|
||||
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
sys.path.append(str(PYTHON_PATH))
|
||||
|
||||
# Sanitise these to be strings as all other code is expecting strings
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
#CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
#ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
@@ -1,196 +0,0 @@
|
||||
import sys
|
||||
import os
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
#-----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
#-----------------------------------------------------------------
|
||||
# default values, real secrets imported from credentials.py
|
||||
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever. Tests are then less accurate.
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
SERVERPORT = '8000' # not needed
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent
|
||||
LIBDIR = REPOS_ROOT_PATH / 'lib' / PV
|
||||
#LIBDIR = REPOS_ROOT_PATH / 'lib' / 'python3.9' # should be finding this automatically: python --version etc.
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
#FILES = Path('/mnt/d/expofiles/')
|
||||
EXPOFILES = Path('/media/philip/sd-huge1/cucc-expo/expofiles/')
|
||||
SCANS_ROOT = EXPOFILES / 'surveyscans'
|
||||
PHOTOS_ROOT = EXPOFILES / 'photos'
|
||||
PHOTOS_YEAR = "2022"
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
MEDIA_URL = '/site-media/'
|
||||
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
URL_ROOT = '/'
|
||||
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
|
||||
|
||||
#Note that these constants are not actually used in urls.py, they should be..
|
||||
MEDIA_URL = urllib.parse.urljoin(URL_ROOT , '/site_media/')
|
||||
SCANS_URL = urllib.parse.urljoin(URL_ROOT , '/survey_scans/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT , '/photos/')
|
||||
SVX_URL = urllib.parse.urljoin(URL_ROOT , '/survex/')
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # used for CaveViewer JS utility
|
||||
|
||||
#STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
DBSQLITE = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle.sqlite',
|
||||
# 'NAME' : ':memory:',
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'sekrit', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo',
|
||||
'PASSWORD' : 'my-secret-password-schwatzmooskogel',
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH / 'troggle'
|
||||
sys.path.append(os.fspath(REPOS_ROOT_PATH))
|
||||
sys.path.append(os.fspath(PYTHON_PATH))
|
||||
|
||||
LOGFILE = PYTHON_PATH / 'troggle.log'
|
||||
PYTHON_PATH = os.fspath(PYTHON_PATH)
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
TEMPLATE_PATH
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py - only used in expedition.html
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL used by admin pages
|
||||
'django.template.context_processors.tz',
|
||||
'django.template.context_processors.request', # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader', # default lcation is troggle/templates/
|
||||
'django.template.loaders.app_directories.Loader', # needed for admin 'app'
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSER_EMAIL = 'philip.sargent@gmail.com'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOADMINUSER_EMAIL = 'philip.sargent@gmail.com'
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
|
||||
EMAIL_PORT=587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = 'django-test@klebos.net'
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
#SURVEYS = REPOS_ROOT_PATH
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
EXPOWEB_URL = ''
|
||||
# SCANS_URL = '/survey_scans/' # defunct, removed.
|
||||
|
||||
# Sanitise these to be strings as all other code is expecting strings
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
LOGFILE = os.fspath(LOGFILE)
|
||||
#SURVEYS = os.fspath(SURVEYS)
|
||||
EXPOWEB = os.fspath(EXPOWEB)
|
||||
DRAWINGS_DATA = os.fspath(DRAWINGS_DATA)
|
||||
SURVEX_DATA = os.fspath(SURVEX_DATA)
|
||||
REPOS_ROOT_PATH = os.fspath(REPOS_ROOT_PATH)
|
||||
TEMPLATE_PATH = os.fspath(TROGGLE_PATH)
|
||||
MEDIA_ROOT = os.fspath(MEDIA_ROOT)
|
||||
JSLIB_ROOT = os.fspath(JSLIB_ROOT)
|
||||
SCANS_ROOT = os.fspath(SCANS_ROOT)
|
||||
@@ -1,46 +0,0 @@
|
||||
#! /bin/sh
|
||||
# create and sanitise files for pushing to repo, for Babie laptop
|
||||
|
||||
echo deprecations.
|
||||
python -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
|
||||
echo diffsettings.
|
||||
rm diffsettings.txt
|
||||
if test -f "diffsettings.txt"; then
|
||||
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
|
||||
exit
|
||||
fi
|
||||
python manage.py diffsettings | grep "###" > diffsettings.txt
|
||||
echo pip freeze.
|
||||
pip freeze > requirements.txt
|
||||
echo inspectdb.
|
||||
# this next line requires database setting to be troggle.sqlite:
|
||||
python manage.py inspectdb > troggle-inspectdb.py
|
||||
#egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo remove passwords.
|
||||
cp localsettings.py localsettingsXubuntu.py
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettingsXubuntu.py
|
||||
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettingsXubuntu.py
|
||||
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettingsXubuntu.py
|
||||
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
|
||||
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettingsXubuntu.py
|
||||
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
|
||||
|
||||
mv _deploy/xubuntu/localsettingsXubuntu.py _deploy/xubuntu/localsettingsXubuntu.py.bak
|
||||
mv localsettingsXubuntu.py _deploy/xubuntu
|
||||
#
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# in ./pre-run.sh
|
||||
# python reset-django.py
|
||||
# python manage.py makemigrations
|
||||
# python manage.py test
|
||||
# python manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -i "unable|error" troggle-inspectdb.py
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,214 +0,0 @@
|
||||
"""
|
||||
Modified for Expo April 2021.
|
||||
"""
|
||||
|
||||
import re
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, TestCase
|
||||
|
||||
from troggle.core.models.caves import Area, Cave
|
||||
from troggle.core.models.troggle import Person, PersonExpedition
|
||||
|
||||
# import troggle.settings as settings
|
||||
# FIXTURE_DIRS = settings.PYTHON_PATH / "core" /"fixtures"
|
||||
|
||||
class FixtureTests(TestCase):
|
||||
"""These just hit the database.
|
||||
They do not exercise the GET and url functions
|
||||
"""
|
||||
|
||||
fixtures = ["auth_users", "expo_areas", "expo_caves", "expo_exped"]
|
||||
ph = r"and leads in 800m of tortuous going to"
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_fix_person_loaded(self):
|
||||
p = Person.objects.get(fullname="Michael Sargent")
|
||||
self.assertEqual(str(p.first_name), "Michael")
|
||||
|
||||
def test_fix_person_loaded(self):
|
||||
pe = PersonExpedition.objects.get(pk="681")
|
||||
self.assertEqual(str(pe.person.fullname), "Michael Sargent")
|
||||
self.assertEqual(str(pe.expedition.year), "2019")
|
||||
|
||||
def test_fix_area_loaded(self):
|
||||
a = Area.objects.get(short_name="1623")
|
||||
self.assertEqual(str(a.short_name), "1623")
|
||||
|
||||
def test_fix_cave_loaded115(self):
|
||||
c = Cave.objects.get(kataster_number="115")
|
||||
self.assertEqual(str(c.description_file), "1623/115.htm")
|
||||
self.assertEqual(str(c.url), "1623/115.url") # intentional
|
||||
self.assertEqual(str(c.filename), "1623-115.html")
|
||||
|
||||
# c.area is a 'ManyRelatedManager' object and not iterable
|
||||
# self.assertEqual(str(c.[0].short_name), "1623")
|
||||
|
||||
ph = self.ph
|
||||
phmatch = re.search(ph, c.underground_description)
|
||||
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_loaded284(self):
|
||||
c = Cave.objects.get(kataster_number="284")
|
||||
self.assertEqual(str(c.description_file), "")
|
||||
self.assertEqual(str(c.url), "1623/284/284.html")
|
||||
self.assertEqual(str(c.filename), "1623-284.html")
|
||||
|
||||
ph = r"at a depth of 72m, there are large round blocks"
|
||||
phmatch = re.search(ph, c.notes)
|
||||
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_personexpedition(self):
|
||||
response = self.client.get("/personexpedition/MichaelSargent/2019")
|
||||
content = response.content.decode()
|
||||
# with open('testresponse.html','w') as tr:
|
||||
# tr.writelines(content)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [r"Michael Sargent", r"Table of all trips and surveys aligned by date"]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
# Need to add a fixture so that this actually has a logbook entry and a trip/svx in it.
|
||||
|
||||
|
||||
class FixturePageTests(TestCase):
|
||||
"""Currently nothing that runs troggle works - all do 404. Must be something in a template rendering crash?
|
||||
ordinary pages are OK, and expopages and expofiles are OK, even though they come through troggle. And the
|
||||
fixtures are certainly loaded into the db as the other tests show.
|
||||
"""
|
||||
|
||||
# The fixtures have a password hash which is compatible with plain-text password 'secretword'
|
||||
fixtures = ["auth_users", "expo_areas", "expo_caves", "expo_exped"]
|
||||
ph = r"and leads in 800m of tortuous going to"
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
self.user = User.objects.get(username="expotest")
|
||||
|
||||
# Every test needs a client.
|
||||
self.client = Client()
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_fix_expedition(self):
|
||||
response = self.client.get("/expedition/2019")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"Michael Sargent"
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('exped-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_personexped(self):
|
||||
response = self.client.get("/personexpedition/MichaelSargent/2019")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"Table of all trips and surveys aligned by date"
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('persexped-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_person(self):
|
||||
response = self.client.get("/person/MichaelSargent")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"second-generation expo caver "
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('person-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_url115(self):
|
||||
ph = self.ph
|
||||
response = self.client.get("/1623/115.url") # yes this is intentional, see the inserted data above & fixture
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_url284(self):
|
||||
response = self.client.get("/1623/284/284.html")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"at a depth of 72m, there are large round blocks"
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('cave-url284.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_bare_url115(self):
|
||||
"""Expect to get Page Not Found and status 404"""
|
||||
ph = self.ph
|
||||
ph = "Probably a mistake."
|
||||
response = self.client.get("/1623/115")
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 200 & Page Not Found
|
||||
|
||||
def test_fix_cave_slug115(self):
|
||||
"""Expect to get Page Not Found and status 404"""
|
||||
ph = self.ph
|
||||
ph = "Probably a mistake."
|
||||
response = self.client.get("/1623-115")
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 200 & Page Not Found
|
||||
|
||||
def test_fix_caves284(self):
|
||||
response = self.client.get("/caves")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"284 Seetrichter"
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('_cave_fix_caves.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Although the Cave object exists, it looks like we get a bad slug error when trying to get a QM page.
|
||||
|
||||
# def test_fix_qms(self):
|
||||
# response = self.client.get("/cave/qms/1623-284")
|
||||
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# content = response.content.decode()
|
||||
# ph = r"Question marks for 284 - Seetrichter"
|
||||
# phmatch = re.search(ph, content)
|
||||
# with open('_cave-fixqms.html', 'w') as f:
|
||||
# f.write(content)
|
||||
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# def test_fix_openqms(self):
|
||||
# response = self.client.get("/cave/openqms/1623-284")
|
||||
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# content = response.content.decode()
|
||||
# ph = r"Open Leads for 284 - Seetrichter"
|
||||
# phmatch = re.search(ph, content)
|
||||
# with open('_cave-fixopenqms.html', 'w') as f:
|
||||
# f.write(content)
|
||||
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
@@ -1,277 +0,0 @@
|
||||
"""
|
||||
We are using unittest for troggle.
|
||||
|
||||
Note that the database has not been parsed from the source files when these tests are run,
|
||||
so any path that relies on data being in the database will fail.
|
||||
|
||||
The simple redirections to files which exist, e.g. in
|
||||
/expoweb/
|
||||
/expofiles/
|
||||
/expofiles/documents/
|
||||
etc. using parameters in localsettings such as PHOTOS_ROOT will test fine.
|
||||
|
||||
But paths like this:
|
||||
/survey_scans/
|
||||
/caves/
|
||||
which rely on database resolution will fail unless a fixture has been set up for
|
||||
them.
|
||||
|
||||
https://docs.djangoproject.com/en/dev/topics/testing/tools/
|
||||
"""
|
||||
import re
|
||||
import subprocess
|
||||
import unittest
|
||||
|
||||
from django.test import Client, SimpleTestCase, TestCase
|
||||
|
||||
|
||||
class SimpleTest(SimpleTestCase):
|
||||
def test_test_setting(self):
|
||||
from django.conf import settings
|
||||
|
||||
self.assertEqual(settings.EMAIL_BACKEND, "django.core.mail.backends.locmem.EmailBackend")
|
||||
import troggle.settings as settings
|
||||
|
||||
def test_import_TroggleModel(self):
|
||||
from troggle.core.models.troggle import TroggleModel
|
||||
|
||||
def test_import_Cave(self):
|
||||
from troggle.core.models.caves import Cave
|
||||
|
||||
def test_import_parsers_surveys(self):
|
||||
# from PIL import Image
|
||||
from functools import reduce
|
||||
|
||||
from troggle.core.utils import save_carefully
|
||||
|
||||
def test_import_parsers_survex(self):
|
||||
import troggle.core.models.caves as models_caves
|
||||
import troggle.core.models.survex as models_survex
|
||||
import troggle.core.models.troggle as models
|
||||
import troggle.settings as settings
|
||||
from troggle.core.views import caves, drawings, other, scans, statistics, survex, uploads
|
||||
from troggle.core.views.caves import cavepage, ent
|
||||
from troggle.core.views.other import frontpage
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||
|
||||
def test_import_views_uploads(self):
|
||||
from troggle.core.views.uploads import dwgupload
|
||||
|
||||
def test_import_views_walletedit(self):
|
||||
from troggle.core.views.wallets_edit import walletedit
|
||||
|
||||
def test_import_parsers_QMs(self):
|
||||
from troggle.core.models.logbooks import QM
|
||||
|
||||
def test_import_parsers_people(self):
|
||||
from html import unescape
|
||||
|
||||
from unidecode import unidecode
|
||||
|
||||
def test_import_parsers_logbooks(self):
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.utils.timezone import get_current_timezone, make_aware
|
||||
|
||||
from parsers.people import GetPersonExpeditionNameLookup
|
||||
from troggle.core.models.logbooks import CaveSlug, QM, LogbookEntry, PersonLogEntry
|
||||
from troggle.core.models.troggle import DataIssue, Expedition
|
||||
|
||||
def test_import_core_views_caves(self):
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.http import HttpResponse, HttpResponseRedirect
|
||||
from django.shortcuts import get_object_or_404, render
|
||||
|
||||
import troggle.core.views.expo
|
||||
from troggle.core.forms import CaveAndEntranceFormSet, CaveForm, EntranceForm, EntranceLetterForm
|
||||
from troggle.core.models.caves import Area, Cave, CaveAndEntrance, Entrance, SurvexStation #EntranceSlug,
|
||||
from troggle.core.models.troggle import Expedition
|
||||
from troggle.core.views.auth import login_required_if_public
|
||||
|
||||
def test_import_parsers_mix(self):
|
||||
import troggle.parsers.caves
|
||||
import troggle.parsers.drawings
|
||||
import troggle.parsers.logbooks
|
||||
import troggle.parsers.people
|
||||
import troggle.parsers.QMs
|
||||
import troggle.parsers.scans
|
||||
import troggle.parsers.survex
|
||||
import troggle.settings
|
||||
from troggle.parsers.logbooks import GetCaveLookup
|
||||
|
||||
def test_import_imports(self):
|
||||
from django.contrib.auth.models import User
|
||||
from django.core import management
|
||||
from django.db import close_old_connections, connection, connections
|
||||
from django.http import HttpResponse
|
||||
from django.urls import reverse
|
||||
|
||||
def test_import_urls(self):
|
||||
from django.conf import settings
|
||||
#from django.conf.urls import include, url
|
||||
from django.contrib import admin, auth
|
||||
from django.urls import resolve, reverse
|
||||
from django.views.generic.base import RedirectView
|
||||
from django.views.generic.edit import UpdateView
|
||||
from django.views.generic.list import ListView
|
||||
|
||||
from troggle.core.views import caves, other, statistics, survex
|
||||
from troggle.core.views.auth import expologin, expologout
|
||||
from troggle.core.views.caves import cavepage, ent
|
||||
from troggle.core.views.expo import (
|
||||
editexpopage,
|
||||
expofiles_redirect,
|
||||
expofilessingle,
|
||||
expopage,
|
||||
map,
|
||||
mapfile,
|
||||
mediapage,
|
||||
)
|
||||
from troggle.core.views.logbooks import (
|
||||
Expeditions_jsonListView,
|
||||
Expeditions_tsvListView,
|
||||
expedition,
|
||||
get_logbook_entries,
|
||||
get_people,
|
||||
logbookentry,
|
||||
notablepersons,
|
||||
person,
|
||||
personexpedition,
|
||||
)
|
||||
from troggle.core.views.other import controlpanel
|
||||
from troggle.core.views.prospect import prospecting, prospecting_image
|
||||
from troggle.core.views.statistics import dataissues, pathsreport, stats
|
||||
from troggle.core.views.survex import survexcavesingle, survexcaveslist, svx
|
||||
|
||||
class ImportTest(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
import troggle.settings as settings
|
||||
from troggle.parsers.logbooks import LOGBOOKS_DIR, DEFAULT_LOGBOOK_FILE
|
||||
|
||||
LOGBOOKS_PATH = settings.EXPOWEB / LOGBOOKS_DIR
|
||||
test_year = "1986"
|
||||
cls.test_logbook = LOGBOOKS_PATH / test_year / DEFAULT_LOGBOOK_FILE
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_logbook_exists(self):
|
||||
self.assertTrue(self.test_logbook.is_file())
|
||||
|
||||
|
||||
class SubprocessTest(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_utf8(self):
|
||||
"""Expects that utf8 is the default encoding when opening files"""
|
||||
import locale
|
||||
import sys
|
||||
|
||||
self.assertTrue(
|
||||
sys.getdefaultencoding() == "utf-8", f"{sys.getdefaultencoding()} - UTF8 error in getdefaultencoding"
|
||||
)
|
||||
self.assertTrue(
|
||||
sys.getfilesystemencoding() == "utf-8",
|
||||
f"{sys.getfilesystemencoding()} - UTF8 error in getfilesystemencoding",
|
||||
)
|
||||
self.assertTrue(
|
||||
locale.getdefaultlocale()[1] == "UTF-8",
|
||||
f"{locale.getdefaultlocale()} - UTF8 error in locale.getdefaultlocale",
|
||||
)
|
||||
self.assertTrue(
|
||||
locale.getpreferredencoding() == "UTF-8",
|
||||
f"{locale.getpreferredencoding()} - UTF8 error in locale.getpreferredencoding",
|
||||
)
|
||||
|
||||
def test_installs(self):
|
||||
"""Expects external software installed: cavern, survexport, git
|
||||
(but not whether it actually works)
|
||||
"""
|
||||
import troggle.settings as settings
|
||||
|
||||
for i in [settings.CAVERN, settings.SURVEXPORT, settings.GIT]:
|
||||
# Define command as string and then split() into list format
|
||||
cmd = f"which {i}".split()
|
||||
try:
|
||||
sp = subprocess.check_call(cmd, shell=False)
|
||||
except subprocess.CalledProcessError:
|
||||
self.assertTrue(False, f"no {i} installed")
|
||||
|
||||
def test_repos_git_status(self):
|
||||
"""Expects clean git repos with no added files and no merge failures"""
|
||||
from pathlib import Path
|
||||
|
||||
import troggle.settings as settings
|
||||
|
||||
TROGGLE_PATH = Path(settings.REPOS_ROOT_PATH) / "troggle"
|
||||
for cwd in [settings.SURVEX_DATA, settings.EXPOWEB, settings.DRAWINGS_DATA, TROGGLE_PATH]:
|
||||
sp = subprocess.run([settings.GIT, "status"], cwd=cwd, capture_output=True, text=True)
|
||||
out = str(sp.stdout)
|
||||
if len(out) > 160:
|
||||
out = out[:75] + "\n <Long output curtailed>\n" + out[-75:]
|
||||
print(f"git output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}")
|
||||
if sp.returncode != 0:
|
||||
print(f"git output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}")
|
||||
|
||||
self.assertTrue(sp.returncode == 0, f"{cwd} - git is unhappy")
|
||||
|
||||
content = sp.stdout
|
||||
ph = r"nothing to commit, working tree clean"
|
||||
phmatch = re.search(ph, content)
|
||||
msg = f'{cwd} - Failed to find expected git output: "{ph}"'
|
||||
self.assertIsNotNone(phmatch, msg)
|
||||
|
||||
# ph1 = r"no changes added to commit"
|
||||
# phmatch1 = re.search(ph1, content)
|
||||
# ph2 = r"nothing to commit"
|
||||
# phmatch2 = re.search(ph2, content)
|
||||
# phmatch = phmatch1 or phmatch2
|
||||
# msg = f'{cwd} - Failed to find expected git output: "{ph1}" or "{ph2}"'
|
||||
# self.assertIsNotNone(phmatch, msg)
|
||||
|
||||
def test_loser_survex_status(self):
|
||||
"""Expects no failures of survex files"""
|
||||
from pathlib import Path
|
||||
|
||||
import troggle.settings as settings
|
||||
|
||||
cwd = settings.SURVEX_DATA
|
||||
for survey in ["1623-and-1626-no-schoenberg-hs.svx"]:
|
||||
sp = subprocess.run([settings.CAVERN, survey], cwd=cwd, capture_output=True, text=True)
|
||||
out = str(sp.stdout)
|
||||
if len(out) > 160:
|
||||
out = out[:75] + "\n <Long output curtailed>\n" + out[-75:]
|
||||
# print(f'survex output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}')
|
||||
if sp.returncode != 0:
|
||||
print(
|
||||
f"survex output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}"
|
||||
)
|
||||
|
||||
self.assertTrue(sp.returncode == 0, f"{cwd} - survex is unhappy")
|
||||
|
||||
content = sp.stdout
|
||||
ph = r"Total length of survey legs"
|
||||
phmatch = re.search(ph, content)
|
||||
msg = f'{cwd} - Failed to find expected survex output: "{ph}"'
|
||||
self.assertIsNotNone(phmatch, msg)
|
||||
|
||||
ph1 = r"Time used"
|
||||
phmatch1 = re.search(ph1, content)
|
||||
ph2 = r"vertical length of survey le"
|
||||
phmatch2 = re.search(ph2, content)
|
||||
|
||||
phmatch = phmatch1 or phmatch2
|
||||
msg = f'{cwd} - Failed to find expected survex output: "{ph1}" or "{ph2}"'
|
||||
self.assertIsNotNone(phmatch, msg)
|
||||
@@ -1,451 +0,0 @@
|
||||
"""
|
||||
Originally written for CUYC
|
||||
Philip Sargent (Feb.2021)
|
||||
|
||||
Modified for Expo April 2021.
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
import re
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, TestCase
|
||||
|
||||
import troggle.settings as settings
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.models.troggle import Expedition
|
||||
|
||||
|
||||
class DataTests(TestCase):
|
||||
"""These check that the NULL and NON-UNIQUE constraints are working in the database"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User()
|
||||
u.pk = 9000
|
||||
u.user_id = 8000
|
||||
u.username, u.password = "stinker", "secretword"
|
||||
u.email = "philip.sargent+SP@gmail.com"
|
||||
u.first_name, u.last_name = "Stinker", "Pinker"
|
||||
u.save()
|
||||
self.user = u
|
||||
|
||||
def tearDown(self):
|
||||
# self.member.delete() # must delete member before user
|
||||
# self.user.delete() # horrible crash, why?
|
||||
pass
|
||||
|
||||
|
||||
class FixturePageTests(TestCase):
|
||||
# The fixtures have a password hash which is compatible with plain-text password 'secretword'
|
||||
fixtures = ["auth_users"]
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
self.user = User.objects.get(username="expotest")
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_fix_admin_login_fail(self):
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
response = c.get("/admin/")
|
||||
content = response.content.decode()
|
||||
# with open('admin-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r"Troggle administration", content)
|
||||
self.assertIsNone(t, "Logged in as '" + u.username + "' (not staff) but still managed to get the Admin page")
|
||||
|
||||
|
||||
class PostTests(TestCase):
|
||||
"""Tests walletedit form"""
|
||||
|
||||
fixtures = ["auth_users"]
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
self.user = User.objects.get(username="expotest")
|
||||
self.client = Client()
|
||||
|
||||
testyear = "2022"
|
||||
wname = f"{testyear}:00"
|
||||
self.testyear = testyear
|
||||
w = Wallet()
|
||||
w.pk = 9100
|
||||
w.fpath = str(pathlib.Path(settings.SCANS_ROOT, wname))
|
||||
w.walletname = wname
|
||||
w.save()
|
||||
self.wallet = w
|
||||
|
||||
e = Expedition()
|
||||
e.year = testyear
|
||||
e.save()
|
||||
self.expedition = e
|
||||
|
||||
def test_file_permissions(self):
|
||||
"""Expect to be allowed to write to SCANS_ROOT, DRAWINGS_DATA, SURVEX_DATA, EXPOWEB
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
testyear = self.testyear
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
for p in [settings.SCANS_ROOT,
|
||||
settings.DRAWINGS_DATA / "walletjson",
|
||||
settings.EXPOWEB / "documents",
|
||||
settings.SURVEX_DATA / "docs"
|
||||
]:
|
||||
|
||||
_test_file_path = pathlib.Path(p, "_created_by_test_suite.txt")
|
||||
self.assertEqual(_test_file_path.is_file(), False)
|
||||
|
||||
with open(_test_file_path, "w") as f:
|
||||
f.write("test string: can we write to this directory?")
|
||||
self.assertEqual(_test_file_path.is_file(), True)
|
||||
_test_file_path.unlink()
|
||||
|
||||
def test_scan_upload(self):
|
||||
"""Expect scan upload to wallet to work on any file
|
||||
Need to login first.
|
||||
|
||||
This upload form looks for the Cave and the Wallet, so the test fails if the database is not loaded with the cave
|
||||
identified in the wallet
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
testyear = self.testyear
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_file.txt", "r") as testf:
|
||||
response = self.client.post(
|
||||
f"/walletedit/{testyear}:00", data={"name": "test_upload_file.txt", "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open("_test_response.html", "w") as f:
|
||||
# f.write(content)
|
||||
for ph in [
|
||||
r"test_upload_",
|
||||
rf"← {testyear}#00 →",
|
||||
r"description written",
|
||||
r"Plan not required",
|
||||
r"edit settings or upload a file",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_file = pathlib.Path(settings.SCANS_ROOT) / f'{testyear}' / f'{testyear}#00'/ 'test_upload_file.txt'
|
||||
remove_file.unlink()
|
||||
|
||||
# Just uploading a file does NOT do any git commit.
|
||||
# You need to create or edit a contents.json file for that to happen.
|
||||
|
||||
def test_photo_upload(self):
|
||||
"""Expect photo upload to work on any file (contrary to msg on screen)
|
||||
Upload into current default year. settings.PHOTOS_YEAR
|
||||
Deletes file afterwards
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_file.txt", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/photoupload/", data={"name": "test_upload_file.txt", "renameto": "", "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
for ph in [
|
||||
r"test_upload_",
|
||||
r"Upload photos into /photos/" + str(settings.PHOTOS_YEAR),
|
||||
r" you can create a new folder in your name",
|
||||
r"Create new Photographer folder",
|
||||
r"only photo image files are accepted",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_file = pathlib.Path(settings.PHOTOS_ROOT, settings.PHOTOS_YEAR) / "test_upload_file.txt"
|
||||
remove_file.unlink()
|
||||
|
||||
def test_photo_upload_rename(self):
|
||||
"""Expect photo upload to work on any file (contrary to msg on screen)
|
||||
Upload into current default year. settings.PHOTOS_YEAR
|
||||
Deletes file afterwards
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
rename = "RENAMED-FILE.JPG"
|
||||
with open("core/fixtures/test_upload_file.txt", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/photoupload/", data={"name": "test_upload_file.txt", "renameto": rename, "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
for ph in [rename]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_file = pathlib.Path(settings.PHOTOS_ROOT, settings.PHOTOS_YEAR) / rename
|
||||
remove_file.unlink()
|
||||
|
||||
def test_photo_folder_create(self):
|
||||
"""Create folder for new user
|
||||
Create in current default year. settings.PHOTOS_YEAR
|
||||
Deletes folder afterwards
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
response = self.client.post("/photoupload/", data={"photographer": "GussieFinkNottle"})
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
for ph in [r"/GussieFinkNottle/", r"Create new Photographer folder"]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_dir = pathlib.Path(settings.PHOTOS_ROOT, settings.PHOTOS_YEAR) / "GussieFinkNottle"
|
||||
remove_dir.rmdir()
|
||||
|
||||
def test_dwg_upload_txt(self):
|
||||
"""Expect .pdf file to be refused upload
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_file.pdf", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/dwgupload/uploads", data={"name": "test_upload_file.txt", "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
t = re.search("Files refused:", content)
|
||||
self.assertIsNotNone(t, 'Logged in but failed to see "Files refused:"')
|
||||
|
||||
def test_dwg_upload_drawing(self):
|
||||
"""Expect no-suffix file to upload
|
||||
Note that this skips the git commit process. That would need a new test.
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_nosuffix", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/dwguploadnogit/uploads", data={"name": "test_upload_nosuffix", "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [
|
||||
r"test_upload_nosuffix",
|
||||
r"You cannot create folders here",
|
||||
r"Creating a folder is done by a nerd",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(
|
||||
phmatch, "Expect no-suffix file to upload OK. Failed to find expected text: '" + ph + "'"
|
||||
)
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
# UploadedFile.name see https://docs.djangoproject.com/en/4.1/ref/files/uploads/#django.core.files.uploadedfile.UploadedFile
|
||||
remove_file = pathlib.Path(settings.DRAWINGS_DATA) / "uploads" / "test_upload_nosuffix"
|
||||
remove_file.unlink()
|
||||
|
||||
|
||||
class ComplexLoginTests(TestCase):
|
||||
"""These test the login and capabilities of logged-in users, they do not use fixtures"""
|
||||
|
||||
def setUp(self):
|
||||
"""setUp runs once for each test in this class"""
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User()
|
||||
u.pk = 9000
|
||||
u.user_id = 8000
|
||||
u.username, u.password = "expotest", "secretword"
|
||||
u.email = "philip.sargent+ET@gmail.com"
|
||||
u.first_name, u.last_name = "ExpoTest", "Caver"
|
||||
u.is_staff = True
|
||||
u.is_superuser = True
|
||||
|
||||
u.set_password(u.password) # This creates a new salt and thus a new key for EACH test
|
||||
u.save() # vital that we save all this before attempting login
|
||||
# print ('\n',u.password)
|
||||
self.user = u
|
||||
|
||||
def tearDown(self):
|
||||
self.client.logout() # not needed as each test creates a new self.client
|
||||
# self.member.delete()
|
||||
##self.user.delete() # id attribute set to None !
|
||||
pass
|
||||
|
||||
# def test_login_redirect_for_non_logged_on_user(self): # need to fix this in real system
|
||||
# c = self.client
|
||||
# # Need to login first. Tests that we are redirected to login page if not logged in
|
||||
# response = c.get('noinfo/cave-number-index')
|
||||
# self.assertRedirects(response, "/login/?next=/committee/appointments/")
|
||||
|
||||
def test_ordinary_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
response = c.get("/accounts/login/") # defined by auth system
|
||||
content = response.content.decode()
|
||||
t = re.search(r"You are now logged in", content)
|
||||
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get 'Now you can' greeting")
|
||||
|
||||
def test_authentication_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
|
||||
# This is weird. I thought that the user had to login before she was in the authenticated state
|
||||
self.assertTrue(u.is_authenticated, "User '" + u.username + "' is NOT AUTHENTICATED before login")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
self.assertTrue(u.is_authenticated, "User '" + u.username + "' is NOT AUTHENTICATED after login")
|
||||
|
||||
# c.logout() # This next test always means user is still authenticated after logout. Surely not?
|
||||
# self.assertFalse(u.is_authenticated, 'User \'' + u.username + '\' is STILL AUTHENTICATED after logout')
|
||||
|
||||
def test_admin_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
response = c.get("/admin/")
|
||||
content = response.content.decode()
|
||||
# with open('admin-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r"Troggle database administration", content)
|
||||
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get the Troggle Admin page")
|
||||
|
||||
def test_noinfo_login(self):
|
||||
|
||||
c = self.client # inherited from TestCase
|
||||
u = self.user
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
response = c.get("/stats") # a page with the Troggle menus
|
||||
content = response.content.decode()
|
||||
t = re.search(r"User\:expotest", content)
|
||||
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get 'User:expotest' heading")
|
||||
|
||||
response = c.get("/noinfo/cave-number-index")
|
||||
content = response.content.decode()
|
||||
t = re.search(r"2001-07 Hoffnungschacht", content)
|
||||
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get /noinfo/ content")
|
||||
|
||||
def test_user_force(self):
|
||||
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
try:
|
||||
c.force_login(u)
|
||||
except:
|
||||
self.assertIsNotNone(
|
||||
None,
|
||||
"Unexpected exception trying to force_login as '"
|
||||
+ u.username
|
||||
+ "' but failed (Bad Django documentation?)",
|
||||
)
|
||||
|
||||
response = c.get("/stats") # a page with the Troggle menus
|
||||
content = response.content.decode()
|
||||
t = re.search(r"Log out", content)
|
||||
self.assertIsNotNone(t, "Forced logged in as '" + u.username + "' but failed to get Log out heading")
|
||||
|
||||
response = c.get("/accounts/login/")
|
||||
content = response.content.decode()
|
||||
t = re.search(r"You are now logged in", content)
|
||||
self.assertIsNotNone(t, "Forced logged in as '" + u.username + "' but failed to get /accounts/profile/ content")
|
||||
@@ -1,137 +0,0 @@
|
||||
"""
|
||||
We are using unittest for troggle.
|
||||
|
||||
Note that the database has not been parsed from the source files when these tests are run,
|
||||
so any path that relies on data being in the database will fail.
|
||||
|
||||
The simple redirections to files which exist, e.g. in
|
||||
/expoweb/
|
||||
/photos/
|
||||
etc. will test fine.
|
||||
|
||||
But paths like this:
|
||||
/survey_scans/
|
||||
/caves/
|
||||
which rely on database resolution will fail unless a fixture has been set up for
|
||||
them.
|
||||
|
||||
https://docs.djangoproject.com/en/dev/topics/testing/tools/
|
||||
"""
|
||||
import re
|
||||
import subprocess
|
||||
import unittest
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, SimpleTestCase, TestCase
|
||||
|
||||
from troggle.core.models.troggle import Expedition, DataIssue, Person, PersonExpedition
|
||||
import troggle.parsers.logbooks as lbp
|
||||
|
||||
TEST_YEAR = "1986"
|
||||
lbp.ENTRIES[TEST_YEAR] = 4 # number of entries in the test logbook
|
||||
|
||||
class ImportTest(TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
def make_person(firstname, lastname, nickname=False, vfho=False, guest=False):
|
||||
fullname = f"{firstname} {lastname}"
|
||||
lookupAttribs = {"first_name": firstname, "last_name": (lastname or "")}
|
||||
nonLookupAttribs = {"is_vfho": vfho, "fullname": fullname, "nickname": nickname}
|
||||
person = Person.objects.create(**nonLookupAttribs, **lookupAttribs)
|
||||
|
||||
lookupAttribs = {"person": person, "expedition": cls.test_expo}
|
||||
nonLookupAttribs = {"is_guest": guest}
|
||||
pe = PersonExpedition.objects.create(**nonLookupAttribs, **lookupAttribs)
|
||||
|
||||
return person
|
||||
|
||||
import troggle.settings as settings
|
||||
|
||||
LOGBOOKS_PATH = settings.EXPOWEB / lbp.LOGBOOKS_DIR
|
||||
|
||||
cls.test_logbook = LOGBOOKS_PATH / TEST_YEAR / lbp.DEFAULT_LOGBOOK_FILE
|
||||
frontmatter_file = LOGBOOKS_PATH / TEST_YEAR / "frontmatter.html"
|
||||
if frontmatter_file.is_file():
|
||||
frontmatter_file.unlink() # delete if it exists
|
||||
|
||||
lookupAttribs = {"year": TEST_YEAR}
|
||||
nonLookupAttribs = {"name": f"CUCC expo-test {TEST_YEAR}"}
|
||||
cls.test_expo = Expedition.objects.create(**nonLookupAttribs, **lookupAttribs)
|
||||
|
||||
fred = make_person("Fred", "Smartarse", nickname="freddy")
|
||||
phil = make_person("Phil", "Tosser", nickname="tosspot")
|
||||
dave = make_person("David", "Smartarse", "")
|
||||
mike = make_person("Michael", "Wideboy", "WB", vfho=True)
|
||||
# NOT created Kurt, as the whole point is that he is a guest.
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_logbook_exists(self):
|
||||
self.assertTrue(self.test_logbook.is_file())
|
||||
|
||||
def test_logbook_parse(self):
|
||||
|
||||
lbp.LoadLogbook(self.test_expo)
|
||||
|
||||
issues = DataIssue.objects.all()
|
||||
messages = []
|
||||
for i in issues:
|
||||
if i.parser=="logbooks":
|
||||
# f"{self.parser} - {self.message}"
|
||||
messages.append(i.message)
|
||||
print(f"'{i.message}'")
|
||||
|
||||
expected = [
|
||||
" ! - 1986 No name match for: 'Kurt Keinnamen' in entry tid='1986_s02' for this expedition year.",
|
||||
]
|
||||
|
||||
not_expected = [
|
||||
" ! - 1986 No name match for: 'Dave Smartarse' in entry tid='1986_s01' for this expedition year.",
|
||||
" ! - 1986 Warning: logentry: surface - stupour - no expo member author for entry '1986_s03'",
|
||||
" ! - 1986 Warning: logentry: 123 - wave 2 - no expo member author for entry '1986_s02'",
|
||||
]
|
||||
|
||||
for e in expected:
|
||||
self.assertIn(e, messages)
|
||||
for e in not_expected:
|
||||
self.assertNotIn(e, messages)
|
||||
|
||||
def test_aliases(self):
|
||||
# Problem: '' empty string appears as valid alias for David Smartarse
|
||||
response = self.client.get(f"/aliases/{TEST_YEAR}")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = f"'fsmartarse'"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_survexfiles(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/survexfile/caves/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = f"Caves with subdirectories"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
def test_people(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/people")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = f"<td><a href=\"/personexpedition/FredSmartarse/{TEST_YEAR}\">{TEST_YEAR}</a></td>"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
@@ -1,162 +0,0 @@
|
||||
"""
|
||||
We are using unittest for troggle.
|
||||
|
||||
Note that the database has not been parsed from the source files when these tests are run,
|
||||
so any path that relies on data being in the database will fail.
|
||||
|
||||
https://docs.djangoproject.com/en/dev/topics/testing/tools/
|
||||
|
||||
We are not using
|
||||
https://github.com/FactoryBoy/factory_boy
|
||||
because we are trying to minimise the number of 3rd-party packages because they expose us to update hell,
|
||||
as experience in 2019-2020.
|
||||
However we could use
|
||||
https://docs.python.org/dev/library/unittest.mock.html
|
||||
as this is now part if python - if we can get our heads around it.
|
||||
|
||||
The tests in this file:
|
||||
|
||||
The code {% url THING %} or {% url THING PARAMETER %} appears a hundred times or more in the troggle/templates/ HTML template files.
|
||||
This is the template synstax for
|
||||
reverse('THING')
|
||||
or
|
||||
reverse('THING', args=[PARAMETER])
|
||||
|
||||
It is the URLS which take parameters which need understanding and testing. The reverse() which take no
|
||||
parameters should be fine as this is fundamental Django stuff which will have been tested to death.
|
||||
|
||||
But the reverse() function is purely syntactical, the PARAMETER is just a string which is applied to
|
||||
the url. So this is not testing anything important really. See the test_url_threed() below.
|
||||
|
||||
These url lines all come from templates/*.html
|
||||
|
||||
1. No tests: No parameters
|
||||
|
||||
{% url "caveindex" %}
|
||||
{% url "controlpanel" %}
|
||||
{% url "dataissues" %}
|
||||
{% url "dwgallfiles" %}
|
||||
{% url "dwgupload" %}
|
||||
{% url "eastings" %}
|
||||
{% url "exportlogbook" %}
|
||||
{% url "newcave" %}
|
||||
{% url "notablepersons" %}
|
||||
{% url "photoupload" %}
|
||||
{% url "walletedit" %}
|
||||
|
||||
Tests exist:
|
||||
{% url "stats" %}
|
||||
{% url "allscans" %}
|
||||
{% url "survexcaveslist" %}
|
||||
|
||||
2. With parameter
|
||||
|
||||
{% url "caveQMs" "1623-290" %}
|
||||
{% url "cave_openQMs" "1623-290" %}
|
||||
{% url "cavewallets" cave_id %}
|
||||
{% url "dwgfilesingle" drawing.dwgpath %}
|
||||
{% url "edit_cave" cave.url_parent cave.slug %}
|
||||
{% url "editentrance" cave.slug ent.entrance.slug %}
|
||||
{% url "editexpopage" path %}
|
||||
{% url "err" title %}
|
||||
{% url "expedition" 2022 %}
|
||||
{% url "newentrance" cave.slug %}
|
||||
{% url "survexcavessingle" cavedir %}
|
||||
{% url "survexcavessingle" cavefiles.0.1 %}
|
||||
{% url "svx" cavepath %}
|
||||
{% url "svx" survexfile.path %}
|
||||
{% url "svxlog" title %}
|
||||
{% url 'caveQMs' '1623-161' %}
|
||||
{% url 'image_selector' path %}
|
||||
{% url 'new_image_form' path %}
|
||||
|
||||
Tests exist:
|
||||
{% url "threed" title %}
|
||||
"""
|
||||
|
||||
|
||||
todo = """These just do {% url THING %} with no parameter, we also need tests which take a parameter
|
||||
|
||||
- Read all this https://developer.mozilla.org/en-US/docs/Learn/Server-side/Django/Testing
|
||||
|
||||
- Read all this https://realpython.com/testing-in-django-part-1-best-practices-and-examples/
|
||||
|
||||
- add 'coverage' to all tests
|
||||
|
||||
- statistics also needs test when we have put data into the database
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, TestCase
|
||||
from django.urls import reverse, path
|
||||
|
||||
# class SimplePageTest(unittest.TestCase):
|
||||
class URLTests(TestCase):
|
||||
"""These tests may appear to be redundant, but in fact they exercise different bits of code. The urls.py
|
||||
dispatcher is sending these URLs view via different 'view' handlers, and they all need verifying.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Set up data for the whole TestCase
|
||||
# cls.foo = Foo.objects.create(bar="Test")
|
||||
# Some test using self.foo in tests below..
|
||||
# read in some SQL ?
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
# Every test needs a client.
|
||||
self.client = Client()
|
||||
|
||||
def test_statistics(self):
|
||||
response = self.client.get("/statistics")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"0 expeditions: 0 people, 0 caves and 0 logbook entries."
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_stats(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/stats")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = r"Total length: 0.0 km adding up the total for each year."
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_url_stats(self):
|
||||
"""Test the {% url "stats" %} reverse resolution
|
||||
path('statistics', statistics.stats, name="stats"),
|
||||
path('stats', statistics.stats, name="stats"),
|
||||
"""
|
||||
reversed_url = reverse('stats') # NB _ must be written as - if present in name
|
||||
self.assertEqual(reversed_url, "/stats")
|
||||
|
||||
def test_url_allscans(self):
|
||||
"""Test the {% url "allscans" %} reverse resolution
|
||||
path('survey_scans/', allscans, name="allscans"), # all the scans in all wallets
|
||||
"""
|
||||
reversed_url = reverse('allscans') # NB _ must be written as - if present in name
|
||||
self.assertEqual(reversed_url, "/survey_scans/")
|
||||
|
||||
def test_url_survexcaveslist(self):
|
||||
"""Test the {% url "allscans" %} reverse resolution
|
||||
path('survexfile/caves', survex.survexcaveslist, name="survexcaveslist"),
|
||||
path('survexfile/caves/', survex.survexcaveslist, name="survexcaveslist"), # auto slash not working
|
||||
"""
|
||||
reversed_url = reverse('survexcaveslist') # NB _ must be written as - if present in name
|
||||
self.assertEqual(reversed_url, "/survexfile/caves/")
|
||||
|
||||
def test_url_threed(self):
|
||||
"""Test the {% url "threed" %} reverse resolution
|
||||
path('survexfile/<path:survex_file>.3d', survex.threed, name="threed"),
|
||||
"""
|
||||
reversed_url = reverse('threed', args=['zilch']) # NB _ must be written as - if present in name
|
||||
self.assertEqual(reversed_url, "/survexfile/zilch.3d")
|
||||
|
||||
@@ -1,632 +0,0 @@
|
||||
"""
|
||||
IGNORED tests
|
||||
- all test files with hyphens in the filename are ignored
|
||||
- filnames with _ are OK
|
||||
|
||||
$ python manage.py test cuy.photologue --parallel
|
||||
only runs the photologue tests. Working.(well, it was working..)
|
||||
|
||||
$ python manage.py test cuy.mailman --parallel
|
||||
|
||||
$ python manage.py test paypal.standard --parallel
|
||||
needs work: a very large test suite
|
||||
|
||||
$ python manage.py test tagging --parallel
|
||||
a huge suite - needs a lot of work to with Django 1.11 & python3
|
||||
|
||||
$ python manage.py test cuy.club --parallel
|
||||
Runs the tests in this file only
|
||||
"""
|
||||
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from django.test import Client, SimpleTestCase, TestCase, TransactionTestCase
|
||||
|
||||
|
||||
class ImportTest(TestCase):
|
||||
def test_import_imports(self):
|
||||
#ed to go through all modules and copy all imports here
|
||||
from io import StringIO
|
||||
|
||||
from cuy.club.models import (Article, Event, Member, Webpage,
|
||||
WebpageCategory)
|
||||
from cuy.website.views.generic import PUBLIC_LOGIN
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.contrib.auth.models import User
|
||||
from django.core import management
|
||||
from django.db import connection, connections
|
||||
from django.db.utils import IntegrityError
|
||||
from django.http import HttpResponse, HttpResponseRedirect
|
||||
from django.shortcuts import get_object_or_404, render
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.utils.timezone import get_current_timezone, make_aware
|
||||
|
||||
|
||||
class SimpleTest(SimpleTestCase):
|
||||
def test_arith_mult(self):
|
||||
"""
|
||||
Tests that 10 x 10 always equals 100.
|
||||
"""
|
||||
self.assertEqual(10*10, 100)
|
||||
|
||||
|
||||
class DataTests(TestCase ):
|
||||
'''These check that the NULL and NON-UNIQUE constraints are working in the database '''
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
from cuy.club.models import Member
|
||||
from django.contrib.auth.models import User
|
||||
m = Member()
|
||||
m.pk=8000
|
||||
m.user_id = 9000 # not NULL constraint
|
||||
m.save()
|
||||
self.member = m
|
||||
|
||||
u = User()
|
||||
u.pk = 9000
|
||||
u.user_id = 8000
|
||||
u.username, u.password ='stinker', 'secretword'
|
||||
u.email='philip.sargent+SP@gmail.com'
|
||||
u.first_name, u.last_name ='Stinker', 'Pinker'
|
||||
u.save()
|
||||
self.user = u
|
||||
|
||||
def tearDown(self):
|
||||
#self.member.delete() # must delete member before user
|
||||
#self.user.delete() # horrible crash, why?
|
||||
pass
|
||||
|
||||
def test_member_not_null_field(self):
|
||||
from cuy.club.models import Member
|
||||
from django.db.utils import IntegrityError
|
||||
n = Member()
|
||||
try:
|
||||
n.save()
|
||||
except IntegrityError as ex:
|
||||
t = re.search(r'NOT NULL constraint failed: club_member.user_id', str(ex))
|
||||
self.assertIsNotNone(t, "Exception is not the expected 'NOT NULL constraint failed'")
|
||||
n.user_id = 1000
|
||||
try:
|
||||
n.save
|
||||
except:
|
||||
return self.assertIsNotNone(None, "Failed to save valid Member to database")
|
||||
|
||||
def test_member_not_unique_field(self):
|
||||
from cuy.club.models import Member
|
||||
from django.db.utils import IntegrityError
|
||||
m1 = Member()
|
||||
m2 = Member()
|
||||
m1.user_id = 1000
|
||||
m2.user_id = m1.user_id
|
||||
m1.save()
|
||||
try:
|
||||
m2.save()
|
||||
except IntegrityError as ex:
|
||||
t = re.search(r'UNIQUE constraint failed: club_member.user_id', str(ex))
|
||||
return self.assertIsNotNone(t, "IntegrityError as expected but message is not the expected 'UNIQUE constraint failed'" )
|
||||
self.assertIsNotNone(None, "Failed to enforce 'UNIQUE constraint' on saving two Member objects with same user_id")
|
||||
|
||||
def test_article_invalid_date(self):
|
||||
from cuy.club.models import Article, Member
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.utils import IntegrityError
|
||||
|
||||
a = Article()
|
||||
m = self.member
|
||||
a.author_id = m.user_id
|
||||
|
||||
a.publish="not a valid datetime"
|
||||
try:
|
||||
a.save()
|
||||
except ValidationError as ex:
|
||||
t = re.search(r'value has an invalid format. It must be in YYYY-MM-DD HH:MM', str(ex))
|
||||
self.assertIsNotNone(t, "Exception is not the expected 'invalid format'")
|
||||
|
||||
def test_article_and_author_not_null(self):
|
||||
from cuy.club.models import Article, Member
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.utils import IntegrityError
|
||||
|
||||
a2 = Article()
|
||||
a2.publish ="2021-02-17 17:25"
|
||||
a2.author_id = None
|
||||
|
||||
try:
|
||||
a2.save()
|
||||
except IntegrityError as ex:
|
||||
t = re.search(r'NOT NULL constraint failed: club_article.author_id', str(ex))
|
||||
self.assertIsNotNone(t, "Exception is not the expected 'NOT NULL constraint failed'")
|
||||
except:
|
||||
self.assertIsNotNone(None, "Exception is not the expected 'NOT NULL constraint failed' IntegrityError")
|
||||
|
||||
def test_article_and_author_ok(self):
|
||||
from cuy.club.models import Article, Member
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.utils import IntegrityError
|
||||
m = self.member
|
||||
|
||||
a3 = Article()
|
||||
a3.pk = 5000
|
||||
a3.publish ="2021-02-17 17:25"
|
||||
|
||||
a3.author_id = m.pk
|
||||
try:
|
||||
a3.save()
|
||||
except:
|
||||
return self.assertIsNotNone(None, "Failed to save valid Article to database")
|
||||
|
||||
|
||||
def test_member_and_user(self):
|
||||
u = self.user
|
||||
m = self.member
|
||||
|
||||
m.user = u
|
||||
self.assertEqual(m.user.last_name, 'Pinker')
|
||||
m.save()
|
||||
u.save()
|
||||
|
||||
class FixturePageTests(TestCase):
|
||||
fixtures = ['cuyc_basic_data.json', 'test_data.json', 'auth_user_gussie']
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
self.user = User.objects.get(username='gussie')
|
||||
self.member = self.user.profile
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_fix_event_loaded(self):
|
||||
from cuy.club.models import Event
|
||||
e = Event.objects.get(slug='spring-in-the-med')
|
||||
self.assertEqual(str(e.shore_contact.first_name()), 'Stiffy')
|
||||
self.assertEqual(str(e.organiser.last_name()), 'Fittleworth')
|
||||
|
||||
def test_fix_page_all_trips(self):
|
||||
response = self.client.get('/programme/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Spring in the Arctic', content)
|
||||
self.assertIsNotNone(t, "Failed to see Event loaded from fixture")
|
||||
t = re.search(r'High Summer in the Irish Sea', content)
|
||||
self.assertIsNotNone(t, "Failed to see Event loaded from fixture")
|
||||
|
||||
def test_fix_page_event(self):
|
||||
response = self.client.get('/programme/events/spring-in-the-arctic/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Spring in the Arctic', content)
|
||||
self.assertIsNotNone(t, "Failed to see Event loaded from fixture")
|
||||
|
||||
def test_fix_admin_login_fail(self):
|
||||
c = self.client
|
||||
from cuy.club.models import Member
|
||||
from django.contrib.auth.models import User
|
||||
m = Member.objects.get(pk=9002)
|
||||
u = User.objects.get(username='bingo')
|
||||
|
||||
self.assertTrue(u.is_active, 'User \'' + u.username + '\' is INACTIVE')
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
|
||||
response = c.get('/admin/')
|
||||
content = response.content.decode()
|
||||
with open('admin-op.html', 'w') as f:
|
||||
f.write(content)
|
||||
t = re.search(r'Site administration', content)
|
||||
self.assertIsNone(t, 'Logged in as \'' + u.username + '\' (not staff) but still managed to get the Admin page' )
|
||||
|
||||
|
||||
class ComplexLoginTests(TestCase):
|
||||
'''These test the login and capabilities of logged-in users'''
|
||||
def setUp(self):
|
||||
'''setUp runs once for each test in this class'''
|
||||
from cuy.club.models import AFFILIATION, MEMBER_TYPES, Member
|
||||
from django.contrib.auth.models import User
|
||||
m = Member()
|
||||
m.pk=8000
|
||||
m.user_id = 9000 # not NULL constraint
|
||||
m.email = "philip.sargent+HG@gmail.com"
|
||||
m.member_type = MEMBER_TYPES[1]
|
||||
m.affiliation = AFFILIATION[3]
|
||||
m.committee_email_prefix = 'honoria'
|
||||
|
||||
u = User()
|
||||
u.pk = 9000
|
||||
u.user_id = 8000
|
||||
u.username, u.password ='honoria', 'secretword'
|
||||
u.email='philip.sargent+HG@gmail.com'
|
||||
u.first_name, u.last_name ='Honoria', 'Glossop'
|
||||
u.is_staff = True
|
||||
u.is_superuser = True
|
||||
|
||||
u.set_password(u.password) # This creates a new salt and thus a new key for EACH test
|
||||
u.save() # vital that we save all this before attempting login
|
||||
#print ('\n',u.password)
|
||||
m.save()
|
||||
self.user = u
|
||||
self.member = m
|
||||
|
||||
from cuy.club.models import ClubRole, Elected
|
||||
cr = ClubRole()
|
||||
cr.id = 7000
|
||||
cr.title = 'Skipper'
|
||||
cr.short_description = 'Club skipper who can lead trips'
|
||||
cr.committee_position = True
|
||||
cr.rank = 8
|
||||
cr.save()
|
||||
self.clubrole = cr
|
||||
|
||||
e = Elected()
|
||||
e.member = m
|
||||
e.club_role = cr
|
||||
e.save()
|
||||
self.elected = e
|
||||
|
||||
def tearDown(self):
|
||||
self.client.logout() # not needed as each test creates a new self.client
|
||||
#self.member.delete()
|
||||
##self.user.delete() # id attribute set to None !
|
||||
pass
|
||||
|
||||
def test_login_redirect_for_non_logged_on_user(self):
|
||||
c = self.client
|
||||
# Need to login first. Tests that we are redirected to login page if not logged in
|
||||
response = c.get('/committee/appointments/')
|
||||
self.assertRedirects(response, "/login/?next=/committee/appointments/")
|
||||
|
||||
def test_ordinary_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, 'User \'' + u.username + '\' is INACTIVE')
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
response = c.get('/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Hello Honoria', content)
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get personal greeting' )
|
||||
|
||||
def test_authentication_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, 'User \'' + u.username + '\' is INACTIVE')
|
||||
|
||||
# This is weird. I thought that the user had to login before she was in the authenticated state
|
||||
self.assertTrue(u.is_authenticated, 'User \'' + u.username + '\' is NOT AUTHENTICATED before login')
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
|
||||
self.assertTrue(u.is_authenticated, 'User \'' + u.username + '\' is NOT AUTHENTICATED after login')
|
||||
|
||||
c.logout()
|
||||
self.assertFalse(u.is_authenticated, 'User \'' + u.username + '\' is STILL AUTHENTICATED after logout')
|
||||
|
||||
def test_admin_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
m = self.member
|
||||
|
||||
m.user = u
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
|
||||
response = c.get('/admin/')
|
||||
content = response.content.decode()
|
||||
# with open('admin-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r'Site administration', content)
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get the Admin page' )
|
||||
|
||||
def test_user_account_login(self):
|
||||
# User must be associated with a Member for whom is_committee() is True
|
||||
c = self.client
|
||||
u = self.user
|
||||
m = self.member
|
||||
|
||||
m.user = u
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
|
||||
response = c.get('/accounts/profile/')
|
||||
content = response.content.decode()
|
||||
# with open('account-profile-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r'CUYC Member Profile - Cambridge University Yacht Club', content)
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get /accounts/profile/ content')
|
||||
|
||||
def test_committee_login(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
# User must be associated with a Member for whom is_committee() is True
|
||||
c = self.client # inherited from TestCase
|
||||
u = self.user
|
||||
m = self.member
|
||||
cr = self.clubrole
|
||||
e = self.elected
|
||||
|
||||
m.user = u
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
response = c.get('/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Hello Honoria', content)
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get personal greeting' )
|
||||
|
||||
response = c.get('/committee/appointments/')
|
||||
content = response.content.decode()
|
||||
# with open('cmttee-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r'A word of warning...', content)
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get /committee/ content')
|
||||
|
||||
def test_user_force(self):
|
||||
from django.conf import settings
|
||||
c = self.client
|
||||
u = self.user
|
||||
m = self.member
|
||||
|
||||
m.user = u
|
||||
|
||||
try:
|
||||
c.force_login(u)
|
||||
except:
|
||||
self.assertIsNotNone(None, 'Unexpected exception trying to force_login as \'' + u.username + '\' but failed (Bad Django documentation?)')
|
||||
|
||||
response = c.get('/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Hello Honoria', content)
|
||||
self.assertIsNotNone(t, 'Forced logged in as \'' + u.username + '\' but failed to get personal greeting' )
|
||||
|
||||
response = c.get('/accounts/profile/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'From here you can update your', content)
|
||||
self.assertIsNotNone(t, 'Forced logged in as \'' + u.username + '\' but failed to get /accounts/profile/ content')
|
||||
|
||||
|
||||
class DynamicPageTests(TestCase):
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_empty_yachts(self):
|
||||
# no page there initially
|
||||
response = self.client.get('/yachts/')
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_full_yachts(self):
|
||||
'''Creating a WebpageCategory and an index webpage creates a valid url
|
||||
'''
|
||||
from cuy.club.models import Webpage, WebpageCategory
|
||||
wc = WebpageCategory()
|
||||
wc.pk = 8000
|
||||
wc.id = 8000
|
||||
wc.name, wc.slug ='Yachts', 'yachts'
|
||||
wc.save()
|
||||
self.webcategory = wc
|
||||
|
||||
p = Webpage()
|
||||
p.pk = 9000
|
||||
p.id = 9000
|
||||
p.category_id = wc.id
|
||||
p.description = "Current Yacht"
|
||||
p.edited = 1
|
||||
p.event_id = None
|
||||
p.index = 1
|
||||
p.markup = "<h1>Skylark</h1>"
|
||||
p.ordering = 10
|
||||
p.slug = "yacht"
|
||||
p.title = "Skylark Yacht"
|
||||
p.save()
|
||||
self.webpage = p
|
||||
|
||||
response = self.client.get('/yachts/')
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
class PageTests(TestCase):
|
||||
def setUp(self):
|
||||
# Every test needs a client.
|
||||
# new in Django 1.5 no need to create self.client first
|
||||
# https://docs.djangoproject.com/en/dev/topics/testing/tools/#django.test.LiveServerTestCase
|
||||
#self.client = Client()
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_basic_admin(self):
|
||||
response = self.client.get('/admin/login/')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_basic_admindoc(self):
|
||||
# Need to login first. Tests that we are redirected
|
||||
response = self.client.get('/admin/doc/models/')
|
||||
self.assertRedirects(response, "/admin/login/?next=/admin/doc/models/")
|
||||
|
||||
def test_basic_programme(self):
|
||||
response = self.client.get('/programme/')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_basic_login (self):
|
||||
# Need to login first
|
||||
response = self.client.post('/login/', {'username': 'gussie', 'password': 'secretword'})
|
||||
if response.status_code == 302:
|
||||
print(response['location'])
|
||||
self.assertEqual(response.status_code, 200) # fails because user does not exist
|
||||
|
||||
def test_basic_committee(self):
|
||||
# Need to login first. Tests that we are redirected to login page
|
||||
response = self.client.get('/committee/')
|
||||
self.assertRedirects(response, "/login/?next=/committee/")
|
||||
|
||||
# --- Check non-logged-in users cannot see these
|
||||
def test_basic_gallery(self):
|
||||
response = self.client.get('/gallery/')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_basic_sitemap(self):
|
||||
response = self.client.get('/site-map/')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
|
||||
|
||||
# --- public club pages created by content in templates/*.html
|
||||
def test_basic_club(self):
|
||||
response = self.client.get('/club/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'offers opportunities for members of the university to sail yachts', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_basic_programme(self):
|
||||
response = self.client.get('/programme/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'If you would like to go on any of these events', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_basic_programme_onshore(self):
|
||||
response = self.client.get('/programme/on_shore/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'All Upcoming Shore Based Events', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_equal_opps(self):
|
||||
response = self.client.get('/club/equal-opps/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'commitment to a policy of equal opportunities', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_safety(self):
|
||||
response = self.client.get('/club/safety/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'endeavour to maintain the highest levels of safety', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_safety_risk(self):
|
||||
response = self.client.get('/club/safety/risk/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'rules for the use of safety lines to be described and monitored by the skipper.', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_safetypolicy(self):
|
||||
response = self.client.get('/club/safetypolicy/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'should be capable of swimming at least fifty meters in clothing and keeping afloat for at least five minutes', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_safety_rules(self):
|
||||
response = self.client.get('/club/safety/rules/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Safety Officer is responsible for the maintenance of safety records', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_regulations(self):
|
||||
response = self.client.get('/club/regulations/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Sanger Institute, the Babraham Institute, Wellcome and MRC Research Laboratories', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_constitution(self):
|
||||
response = self.client.get('/club/constitution/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'to provide a wide variety of safe and affordable yacht sailing', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_clubcommittee(self):
|
||||
response = self.client.get('/club/committee/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'CUYC elects new officers as needed, usually at the beginning of each term', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_damages(self):
|
||||
response = self.client.get('/club/damages/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'all crew participants may be required to contribute to the payment of damages', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_training(self):
|
||||
response = self.client.get('/training/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'members of the club are always happy to pass on informal training tips', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_racing(self):
|
||||
response = self.client.get('/racing/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'CUYC Racing Squad', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_blog(self):
|
||||
response = self.client.get('/blog/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Latest Posts', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_gallery(self):
|
||||
response = self.client.get('/gallery/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Photo Galleries', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_about_photos(self):
|
||||
response = self.client.get('/about_photos/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'have been supplied by members of CUYC', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_loginhelp(self):
|
||||
response = self.client.get('/login/help/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Existing CUYC Member, without an account?', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_loginregister(self):
|
||||
response = self.client.get('/login/register/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'If you are, or have ever been, a CUYC or CUCrC member', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
# --- These pages are not connected to top level public menus but are in fact public
|
||||
def test_page_club_tripinformation(self):
|
||||
response = self.client.get('/club/trip-information/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'organisers have a choice to add a sum to the trip fee quoted on the website to cover expenses', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_club_trippayment(self):
|
||||
response = self.client.get('/club/trip-information/payment/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'All payments to the club should be sent via Paypal', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_club_trip_typical_day(self):
|
||||
response = self.client.get('/club/trip-information/typical-day/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Skipper and first mate crawl out of their sleeping bags early', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_club_trip_faq(self):
|
||||
response = self.client.get('/club/trip-information/faq/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Different people are seasick in different ways', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_club_trip_kit(self):
|
||||
response = self.client.get('/club/trip-information/kit/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'appropriate quantity of base layer clothes to match the duration', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
@@ -1,564 +0,0 @@
|
||||
"""
|
||||
We are using unittest for troggle.
|
||||
|
||||
Note that the database has not been parsed from the source files when these tests are run,
|
||||
so any path that relies on data being in the database will fail.
|
||||
|
||||
The simple redirections to files which exist, e.g. in
|
||||
/expoweb/
|
||||
/photos/
|
||||
etc. will test fine.
|
||||
|
||||
But paths like this:
|
||||
/survey_scans/
|
||||
/caves/
|
||||
which rely on database resolution will fail unless a fixture has been set up for
|
||||
them.
|
||||
|
||||
https://docs.djangoproject.com/en/dev/topics/testing/tools/
|
||||
"""
|
||||
|
||||
|
||||
todo = """ADD TESTS when we are redirecting /expofiles/ to a remote file-delivering site
|
||||
|
||||
- Add test for running cavern to produce a .3d file
|
||||
"""
|
||||
|
||||
import re
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, TestCase
|
||||
|
||||
|
||||
# class SimplePageTest(unittest.TestCase):
|
||||
class PageTests(TestCase):
|
||||
"""These tests may appear to be redundant, but in fact they exercise different bits of code. The urls.py
|
||||
dispatcher is sending these URLs view via different 'view' handlers, and they all need verifying.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Set up data for the whole TestCase
|
||||
# cls.foo = Foo.objects.create(bar="Test")
|
||||
# Some test using self.foo in tests below..
|
||||
# read in some SQL ?
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
# Every test needs a client.
|
||||
self.client = Client()
|
||||
|
||||
def test_expoweb_root(self):
|
||||
response = self.client.get("")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
ph = r"CUCC in Austria"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_root_slash(self):
|
||||
response = self.client.get("/")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
ph = r"CUCC in Austria"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_paths(self):
|
||||
response = self.client.get("/pathsreport")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"This report is generated from"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_dir(self):
|
||||
response = self.client.get("/handbook")
|
||||
response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND) # 302 directory, so redirects to /index.htm
|
||||
|
||||
def test_expoweb_dirslash(self):
|
||||
response = self.client.get("/handbook/")
|
||||
response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND) # 302 directory, so redirects to /index.htm
|
||||
|
||||
def test_expoweb_dir_no_index(self):
|
||||
response = self.client.get("/handbook/troggle")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
ph = r"Page not found handbook/troggle/index.html"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_dir_with_index_htm(self):
|
||||
response = self.client.get("/years/1999/index.htm")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK) # directory, so redirects to /index.htm
|
||||
ph = r"Passage descriptions for 1999"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_dir_with_index_html(self):
|
||||
response = self.client.get("/years/2015/index.html")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK) # directory, so redirects to /index.htm
|
||||
ph = r"Things left at top camp 2014"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_dir_with_index2(self):
|
||||
response = self.client.get("/handbook/index.htm")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
ph = r"Introduction to expo"
|
||||
phmatch = re.search(ph, content)
|
||||
# print("\n ! - test_expoweb_dir_with_index2\n{}\n{}".format(response.reason_phrase, content))
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_htm(self):
|
||||
response = self.client.get("/handbook/index.htm")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
ph = r"Introduction to expo"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_notfound(self):
|
||||
response = self.client.get("/handbook/_test_zyxxypqrqx.html")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
ph = r"<h1>Page not found"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_no_dir(self):
|
||||
# slash where there should not be one
|
||||
response = self.client.get("/handbook/_test_zyxxypqrqx/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<h1>Directory not found"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_troggle_default(self):
|
||||
# default page after logon
|
||||
response = self.client.get("/troggle")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"expeditions the club has undertaken"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_troggle_default_slash(self):
|
||||
response = self.client.get("/troggle/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<h1>Directory not found"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_via_areaid(self):
|
||||
# the dispatcher takes a detour via the cave renering procedure for this
|
||||
response = self.client.get("/guidebook/t/via201.jpg")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 6057)
|
||||
|
||||
def test_cave_kataster_not_found(self):
|
||||
# database not loaded, so no caves found; so looks for a generic expopage and fails
|
||||
response = self.client.get("/1623/115.htm")
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
content = response.content.decode()
|
||||
ph = r"Page not found 1623/115.htm"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_caves_page(self):
|
||||
response = self.client.get("/caves")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"Cave Number Index - kept updated"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_caves_page_kataster_not_found(self):
|
||||
response = self.client.get("/caves")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"115"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_ss(self):
|
||||
response = self.client.get("/survey_scans/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"All Survey scans folders "
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_admin(self):
|
||||
# see the login page
|
||||
response = self.client.get("/admin/login/")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
ph = r'<h1 id="site-name">Troggle database administration</h1>'
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_admindocs_exped(self):
|
||||
# Get redirected to login page
|
||||
response = self.client.get("/admin/doc/models/core.expedition/")
|
||||
response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND) # 302
|
||||
|
||||
def test_page_expofiles_root_dir(self):
|
||||
# Root expofiles - odd interaction with url parsing so needs testing
|
||||
response = self.client.get("/expofiles")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r'a href="/expofiles/geotiffsurveys">/geotiffsurveys/',
|
||||
r'<a href="/expofiles/photos">/photos/',
|
||||
r'<a href="/expofiles/surveyscans">/surveyscans/',
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofiles_root_slash_dir(self):
|
||||
# Root expofiles - odd interaction with url parsing so needs testing
|
||||
response = self.client.get("/expofiles/")
|
||||
if response.status_code != HTTPStatus.OK: # 200
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND: # 302
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r'a href="/expofiles/geotiffsurveys">/geotiffsurveys/',
|
||||
r'<a href="/expofiles/photos">/photos/',
|
||||
r'<a href="/expofiles/surveyscans">/surveyscans/',
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofiles_badness(self):
|
||||
# should display expofiles directory contents not its parent
|
||||
response = self.client.get("/expofiles/99badness99")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r'a href="/expofiles/geotiffsurveys">/geotiffsurveys/',
|
||||
r'<a href="/expofiles/photos">/photos/',
|
||||
r'<a href="/expofiles/surveyscans">/surveyscans/',
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofiles_docs_dir(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/documents/")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r'a href="/expofiles/documents/bier-tent-instructions.pdf">bier-tent-instructions.pdf',
|
||||
r'a href="/expofiles/documents/boc.pdf">boc.pdf',
|
||||
r'a href="/expofiles/documents/idiots-guide-expo-git.pdf"',
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_survey_scans_dir(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/surveyscans")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r'<a href="/expofiles/surveyscans/2004">/2004/',
|
||||
r'<a href="/expofiles/surveyscans/1989LUSS">/1989LUSS/',
|
||||
r'<a href="/expofiles/surveyscans/2018">/2018',
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_folk(self):
|
||||
# This page is separately generated, so it has the full data content
|
||||
response = self.client.get("/folk/index.htm")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [
|
||||
r"involves some active contribution",
|
||||
r"Naomi Griffiths",
|
||||
r"Gail Smith",
|
||||
r"Phil Wigglesworth",
|
||||
r"A more obscure record of longest gap between expos has",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofile_documents(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/expofiles/documents")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"notice_generale_cordes_courant"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofile_documents_slash(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/expofiles/documents/")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"notice_generale_cordes_courant"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofile_document_loeffler_pdf(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/documents/surveying/tunnel-loefflerCP35-only.pdf")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 2299270)
|
||||
|
||||
def test_page_expofile_document_rope_pdf(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/documents/ropes/rope-age-agm-2019.pdf")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 76197)
|
||||
|
||||
def test_page_expofile_document_png(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/documents/callout-2012.png")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 69921)
|
||||
|
||||
def test_page_expofile_writeup(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/writeups/1982/logbook1982.pdf")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 12915413)
|
||||
|
||||
def test_page_site_media_ok(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/site_media/surveyHover.gif")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 39482) # need to check it is not just an error page
|
||||
|
||||
def test_page_site_media_css(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/site_media/css/trog3.css")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode() # need to check it is not just an error page
|
||||
ph = r"This text is used by the test system to determine that trog3.css loaded correctly"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_photos_ok(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/photos/2018/PhilipSargent/corin.jpg") # exists
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 67487) # need to check it is not just an error page
|
||||
|
||||
def test_page_photos_not_ok(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/photos/2018/PhilipSargent/_corin.jpeg") # does not exist
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
content = response.content.decode()
|
||||
ph = r"<title>Page not found 2018/PhilipSargent/_corin.jpeg</title>"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_photos_dir(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/photos/2018/PhilipSargent/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"Directory not displayed"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_survey_scans_empty(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/survey_scans/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"contains the scanned original in-cave survey notes and sketches"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_dwgdataraw_empty(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/dwgdataraw/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<h1>Directory not found"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_dwgallfiles_empty(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/dwgfiles")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r"All Tunnel and Therion files",
|
||||
r"<th>Wallets</th><th>Scan files in the wallets</th><th>Frames</th></tr>",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_dwgallfiles_empty_slash(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/dwgfiles/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r"All Tunnel and Therion files",
|
||||
r"<th>Wallets</th><th>Scan files in the wallets</th><th>Frames</th></tr>",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_slash_empty(self):
|
||||
# tslash where there should not be one
|
||||
response = self.client.get("/expedition/1979/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<h1>Directory not found"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_not_found_survexfile_cave(self):
|
||||
response = self.client.get("/survexfile/not_a_real_cave_number")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"Cave Identifier not found in database"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
def test_dataissues(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/dataissues")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"as well as these import/parsing issues"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_therionissues(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/therionissues")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"! Un-parsed image filename"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_surveximport(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/surveximport")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = r"The number at the left-hand margin is the depth"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_survexdebug(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/survexdebug")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"Running list of warnings during import"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_eastings(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/eastings")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<tr><th>Survex Station</th><th>x</th><th>y</th></tr>"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
# ADD TESTS when we are redirecting /expofiles/ to get the actual files using e.g.
|
||||
# import requests
|
||||
# page = requests.get("http://dataquestio.github.io/web-scraping-pages/simple.html")
|
||||
|
||||
# these need a fixture to load the datbase before they will pass
|
||||
# we also need tests for invalid queries to check that error pages are right
|
||||
|
||||
# def test_page_survey_scans_khplan2_png(self):
|
||||
# # this has an error as the database has not been loaded yet in the tests
|
||||
# response = self.client.get('/survey_scans/smkhs/khplan2.png')
|
||||
# if response.status_code != HTTPStatus.OK:
|
||||
# self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
# if response.status_code != HTTPStatus.FOUND:
|
||||
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# self.assertEqual(len(response.content), 823304) # fails, but is working manually!
|
||||
|
||||
# def test_page_dwgdataraw_107sketch_xml(self):
|
||||
# # this has an error as the database has not been loaded yet in the tests
|
||||
# response = self.client.get('/dwgdataraw/107/107sketch-v2.xml')
|
||||
# if response.status_code != HTTPStatus.OK:
|
||||
# self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
# if response.status_code != HTTPStatus.FOUND:
|
||||
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# content = response.content.decode()
|
||||
# for ph in [ r'tunneldate="2014-08-21 11:34:00"',
|
||||
# r'<sketchsubset subname="Caves of the Loser Plateau"/>',
|
||||
# r'sfsketch="ollyjen107drawings',
|
||||
# r'sfsketch="surveyscans/2014/2014#01',
|
||||
# r'aa-js-plan.png"' ]:
|
||||
# phmatch = re.search(ph, content)
|
||||
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph +"'")
|
||||
|
||||
|
||||
# database not loaded yet:
|
||||
# response = self.client.get('/survey_scans/1991surveybook/page0002.png')
|
||||
# response = self.client.get('/survey_scans/1991surveybook/')
|
||||
# content = response.content.decode()
|
||||
# print(content)
|
||||
# png93 = re.search(r'/page0093.png">page0093.png</a></td>', content)
|
||||
186
core/admin.py
186
core/admin.py
@@ -1,41 +1,22 @@
|
||||
from troggle.core.models import *
|
||||
from django.contrib import admin
|
||||
from django.core import serializers
|
||||
from django.forms import ModelForm
|
||||
import django.forms as forms
|
||||
from django.http import HttpResponse
|
||||
|
||||
from troggle.core.models.caves import Area, Cave, CaveAndEntrance, Entrance
|
||||
from troggle.core.models.logbooks import QM, LogbookEntry, PersonLogEntry, CaveSlug
|
||||
from troggle.core.models.survex import (
|
||||
DrawingFile,
|
||||
SingleScan,
|
||||
SurvexBlock,
|
||||
SurvexDirectory,
|
||||
SurvexFile,
|
||||
SurvexPersonRole,
|
||||
SurvexStation,
|
||||
)
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
|
||||
|
||||
"""This code significantly adds to the capabilities of the Django Management control panel for Troggle data.
|
||||
In particular, it enables JSON export of any data with 'export_as_json'
|
||||
and configures the search fields to be used within the control panel.
|
||||
|
||||
What is the search path for the css and js inclusions in the Media subclasses though ?!
|
||||
|
||||
The page looks for /static/jquery/jquery.min.js
|
||||
"""
|
||||
from django.core import serializers
|
||||
from troggle.core.views_other import downloadLogbook
|
||||
#from troggle.reversion.admin import VersionAdmin #django-reversion version control
|
||||
|
||||
|
||||
class TroggleModelAdmin(admin.ModelAdmin):
|
||||
|
||||
def save_model(self, request, obj, form, change):
|
||||
"""overriding admin save to fill the new_since parsing_field
|
||||
|
||||
new_since_parsing is not currently used in troggle. It is a fossil."""
|
||||
obj.new_since_parsing = True
|
||||
"""overriding admin save to fill the new_since parsing_field"""
|
||||
obj.new_since_parsing=True
|
||||
obj.save()
|
||||
|
||||
|
||||
class Media:
|
||||
js = ("jquery/jquery.min.js", "js/QM_helper.js") # not currently available to troggle, see media/js/README
|
||||
js = ('jquery/jquery.min.js','js/QM_helper.js')
|
||||
|
||||
|
||||
class RoleInline(admin.TabularInline):
|
||||
@@ -47,36 +28,58 @@ class SurvexBlockAdmin(TroggleModelAdmin):
|
||||
inlines = (RoleInline,)
|
||||
|
||||
|
||||
# class QMsFoundInline(admin.TabularInline):
|
||||
# model = QM
|
||||
# fk_name = "found_by"
|
||||
# fields = ("number", "grade", "location_description", "comment") # need to add foreignkey to cave part
|
||||
# extra = 1
|
||||
class ScannedImageInline(admin.TabularInline):
|
||||
model = ScannedImage
|
||||
extra = 4
|
||||
|
||||
|
||||
class PersonLogEntryInline(admin.TabularInline):
|
||||
model = PersonLogEntry
|
||||
raw_id_fields = ("personexpedition",)
|
||||
class OtherCaveInline(admin.TabularInline):
|
||||
model = OtherCaveName
|
||||
extra = 1
|
||||
|
||||
|
||||
class SurveyAdmin(TroggleModelAdmin):
|
||||
inlines = (ScannedImageInline,)
|
||||
search_fields = ('expedition__year','wallet_number')
|
||||
|
||||
|
||||
class QMsFoundInline(admin.TabularInline):
|
||||
model=QM
|
||||
fk_name='found_by'
|
||||
fields=('number','grade','location_description','comment')#need to add foreignkey to cave part
|
||||
extra=1
|
||||
|
||||
|
||||
class PhotoInline(admin.TabularInline):
|
||||
model = DPhoto
|
||||
exclude = ['is_mugshot' ]
|
||||
extra = 1
|
||||
|
||||
|
||||
class PersonTripInline(admin.TabularInline):
|
||||
model = PersonTrip
|
||||
raw_id_fields = ('personexpedition',)
|
||||
extra = 1
|
||||
|
||||
|
||||
#class LogbookEntryAdmin(VersionAdmin):
|
||||
class LogbookEntryAdmin(TroggleModelAdmin):
|
||||
prepopulated_fields = {"slug": ("title",)}
|
||||
search_fields = ("title", "expedition__year")
|
||||
date_heirarchy = "date"
|
||||
# inlines = (PersonLogEntryInline, QMsFoundInline)
|
||||
|
||||
prepopulated_fields = {'slug':("title",)}
|
||||
search_fields = ('title','expedition__year')
|
||||
date_heirarchy = ('date')
|
||||
inlines = (PersonTripInline, PhotoInline, QMsFoundInline)
|
||||
class Media:
|
||||
css = {"all": ("css/troggleadmin.css",)} # this does not exist
|
||||
|
||||
actions = ("export_logbook_entries_as_html", "export_logbook_entries_as_txt")
|
||||
|
||||
def export_logbook_entries_as_html(self, modeladmin, request, queryset):
|
||||
response = downloadLogbook(request=request, queryset=queryset, extension="html") # fails, no queryset
|
||||
css = {
|
||||
"all": ("css/troggleadmin.css",)
|
||||
}
|
||||
actions=('export_logbook_entries_as_html','export_logbook_entries_as_txt')
|
||||
|
||||
def export_logbook_entries_as_html(modeladmin, request, queryset):
|
||||
response=downloadLogbook(request=request, queryset=queryset, extension='html')
|
||||
return response
|
||||
|
||||
def export_logbook_entries_as_txt(self, modeladmin, request, queryset):
|
||||
response = downloadLogbook(request=request, queryset=queryset, extension="txt") # fails, no queryset
|
||||
|
||||
def export_logbook_entries_as_txt(modeladmin, request, queryset):
|
||||
response=downloadLogbook(request=request, queryset=queryset, extension='txt')
|
||||
return response
|
||||
|
||||
|
||||
@@ -86,89 +89,70 @@ class PersonExpeditionInline(admin.TabularInline):
|
||||
|
||||
|
||||
class PersonAdmin(TroggleModelAdmin):
|
||||
search_fields = ("first_name", "last_name")
|
||||
search_fields = ('first_name','last_name')
|
||||
inlines = (PersonExpeditionInline,)
|
||||
|
||||
|
||||
class QMAdmin(TroggleModelAdmin):
|
||||
search_fields = ("number", "expoyear")
|
||||
list_display = ("__str__", "grade")
|
||||
list_display_links = ("__str__",)
|
||||
# list_editable = ("comment", "page_ref", "grade")
|
||||
# list_per_page = 20
|
||||
# raw_id_fields = ("found_by", "ticked_off_by")
|
||||
search_fields = ('found_by__cave__kataster_number','number','found_by__date')
|
||||
list_display = ('__unicode__','grade','found_by','ticked_off_by')
|
||||
list_display_links = ('__unicode__',)
|
||||
list_editable = ('found_by','ticked_off_by','grade')
|
||||
list_per_page = 20
|
||||
raw_id_fields=('found_by','ticked_off_by')
|
||||
|
||||
|
||||
class PersonExpeditionAdmin(TroggleModelAdmin):
|
||||
search_fields = ("person__first_name", "expedition__year")
|
||||
search_fields = ('person__first_name','expedition__year')
|
||||
|
||||
|
||||
class CaveAdmin(TroggleModelAdmin):
|
||||
search_fields = ("official_name", "kataster_number", "unofficial_number")
|
||||
search_fields = ('official_name','kataster_number','unofficial_number')
|
||||
inlines = (OtherCaveInline,)
|
||||
extra = 4
|
||||
|
||||
|
||||
class EntranceAdmin(TroggleModelAdmin):
|
||||
search_fields = ("caveandentrance__cave__kataster_number",)
|
||||
|
||||
|
||||
class SurvexStationAdmin(TroggleModelAdmin):
|
||||
search_fields = ("name",)
|
||||
|
||||
|
||||
class SurvexFileAdmin(TroggleModelAdmin):
|
||||
search_fields = ("path",)
|
||||
|
||||
|
||||
class SurvexDirectoryAdmin(TroggleModelAdmin):
|
||||
search_fields = (
|
||||
"path",
|
||||
"survexdirectory",
|
||||
)
|
||||
|
||||
|
||||
class DrawingFileAdmin(TroggleModelAdmin):
|
||||
search_fields = ("dwgname",)
|
||||
|
||||
|
||||
class WalletAdmin(TroggleModelAdmin):
|
||||
search_fields = ("fpath",)
|
||||
search_fields = ('caveandentrance__cave__kataster_number',)
|
||||
|
||||
|
||||
admin.site.register(DPhoto)
|
||||
admin.site.register(Cave, CaveAdmin)
|
||||
admin.site.register(Area)
|
||||
#admin.site.register(OtherCaveName)
|
||||
admin.site.register(CaveAndEntrance)
|
||||
admin.site.register(NewSubCave)
|
||||
admin.site.register(CaveDescription)
|
||||
admin.site.register(Entrance, EntranceAdmin)
|
||||
admin.site.register(CaveSlug)
|
||||
admin.site.register(SurvexBlock, SurvexBlockAdmin)
|
||||
admin.site.register(DrawingFile, DrawingFileAdmin)
|
||||
admin.site.register(Expedition)
|
||||
admin.site.register(Person, PersonAdmin)
|
||||
admin.site.register(Person,PersonAdmin)
|
||||
admin.site.register(SurvexPersonRole)
|
||||
admin.site.register(SurvexDirectory, SurvexDirectoryAdmin)
|
||||
admin.site.register(SurvexFile, SurvexFileAdmin)
|
||||
admin.site.register(SurvexStation, SurvexStationAdmin)
|
||||
admin.site.register(PersonExpedition, PersonExpeditionAdmin)
|
||||
admin.site.register(PersonExpedition,PersonExpeditionAdmin)
|
||||
admin.site.register(LogbookEntry, LogbookEntryAdmin)
|
||||
#admin.site.register(PersonTrip)
|
||||
admin.site.register(QM, QMAdmin)
|
||||
admin.site.register(Wallet, WalletAdmin)
|
||||
admin.site.register(SingleScan)
|
||||
admin.site.register(DataIssue)
|
||||
admin.site.register(Survey, SurveyAdmin)
|
||||
admin.site.register(ScannedImage)
|
||||
admin.site.register(SurvexStation)
|
||||
|
||||
admin.site.register(SurvexScansFolder)
|
||||
admin.site.register(SurvexScanSingle)
|
||||
|
||||
|
||||
def export_as_json(modeladmin, request, queryset):
|
||||
response = HttpResponse(content_type="text/json")
|
||||
response["Content-Disposition"] = "attachment; filename=troggle_output.json"
|
||||
response = HttpResponse(mimetype="text/json")
|
||||
response['Content-Disposition'] = 'attachment; filename=troggle_output.json'
|
||||
serializers.serialize("json", queryset, stream=response)
|
||||
return response
|
||||
|
||||
|
||||
def export_as_xml(modeladmin, request, queryset):
|
||||
response = HttpResponse(content_type="text/xml")
|
||||
response["Content-Disposition"] = "attachment; filename=troggle_output.xml"
|
||||
response = HttpResponse(mimetype="text/xml")
|
||||
response['Content-Disposition'] = 'attachment; filename=troggle_output.xml'
|
||||
serializers.serialize("xml", queryset, stream=response)
|
||||
return response
|
||||
|
||||
|
||||
admin.site.add_action(export_as_xml)
|
||||
admin.site.add_action(export_as_json)
|
||||
#admin.site.add_action(export_as_xml)
|
||||
#admin.site.add_action(export_as_json)
|
||||
|
||||
@@ -1,22 +1,5 @@
|
||||
from django.conf import settings
|
||||
|
||||
from troggle.core.models.troggle import Expedition
|
||||
|
||||
"""This is the only troggle-specific 'context processor' that troggle uses
|
||||
in the processing of Django templates
|
||||
|
||||
This seems to mean that every page produced has bundled in its context the complete 'settings' and
|
||||
the expedition class object, so all templates can do queries on Expedition.
|
||||
https://betterprogramming.pub/django-quick-tips-context-processors-da74f887f1fc
|
||||
|
||||
If it is commented out, the logbookentry page goes crazy and the screws up all the site_media resultions for CSS file s!
|
||||
Seems to be necessary to make {{settings.MEDIA_URL}} work. Which is obvious in retrospect.
|
||||
|
||||
It is VITAL that no database operations are done in any context processor, see
|
||||
https://adamj.eu/tech/2023/03/23/django-context-processors-database-queries/
|
||||
"""
|
||||
|
||||
from troggle.core.models import Expedition
|
||||
|
||||
def troggle_context(request):
|
||||
return {"settings": settings}
|
||||
# return {"settings": settings, "Expedition": Expedition}
|
||||
return { 'settings':settings, 'Expedition':Expedition }
|
||||
43
core/fileAbstraction.py
Normal file
43
core/fileAbstraction.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import troggle.settings as settings
|
||||
import os
|
||||
import urllib
|
||||
|
||||
def urljoin(x, y): return x + "/" + y
|
||||
|
||||
def listdir(*path):
|
||||
try:
|
||||
strippedpath = [p for p in path if p]
|
||||
root = os.path.join(settings.FILES, *strippedpath )
|
||||
l = ""
|
||||
#l = root + "\n"
|
||||
isdir = os.path.isdir(root) #This seems to be required for os.path.isdir to work...
|
||||
#l += str(isdir) + "\n"
|
||||
for p in os.listdir(root):
|
||||
if os.path.isdir(os.path.join(root, p)):
|
||||
l += p + "/\n"
|
||||
|
||||
elif os.path.isfile(os.path.join(root, p)):
|
||||
l += p + "\n"
|
||||
#Ignore non-files and non-directories
|
||||
return l
|
||||
except:
|
||||
if strippedpath:
|
||||
c = reduce(urljoin, strippedpath)
|
||||
else:
|
||||
c = ""
|
||||
c = c.replace("#", "%23")
|
||||
print("FILE: ", settings.FILES + "listdir/" + c)
|
||||
return urllib.urlopen(settings.FILES + "listdir/" + c).read()
|
||||
|
||||
def dirsAsList(*path):
|
||||
return [d for d in listdir(*path).split("\n") if len(d) > 0 and d[-1] == "/"]
|
||||
|
||||
def filesAsList(*path):
|
||||
return [d for d in listdir(*path).split("\n") if len(d) > 0 and d[-1] != "/"]
|
||||
|
||||
def readFile(*path):
|
||||
try:
|
||||
f = open(os.path.join(settings.FILES, *path))
|
||||
except:
|
||||
f = urllib.urlopen(settings.FILES + "download/" + reduce(urljoin, path))
|
||||
return f.read()
|
||||
@@ -1,39 +0,0 @@
|
||||
[
|
||||
{"pk": 9010, "model": "auth.user", "fields":
|
||||
{"username": "expotest", "first_name": "ExpoTest", "last_name": "Caver", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+expo@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9011, "model": "auth.user", "fields":
|
||||
{"username": "expotestadmin", "first_name": "ExpoTest", "last_name": "Admin", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+expoadmin@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"model": "auth.user", "pk": 8999, "fields":
|
||||
{
|
||||
"email": "philip.sargent+GFN@gmail.com",
|
||||
"first_name": "Gussie",
|
||||
"last_name": "Fink-Nottle",
|
||||
"id": 8999,
|
||||
"is_active": true,
|
||||
"is_staff": true,
|
||||
"is_superuser": true,
|
||||
"last_login": "2021-01-01 00:00:01+0100",
|
||||
"password": "pbkdf2_sha256$150000$EbI1VetXC8tM$pHb5Y7af/TCsNeD6H0EwGx4DWB7qyZyq1bUWKytuiTA=",
|
||||
"username": "gussie",
|
||||
"date_joined": "2021-01-01 00:00:00+0100"
|
||||
}},
|
||||
|
||||
{"pk": 9000, "model": "auth.user", "fields":
|
||||
{"username": "oofy", "first_name": "Oofy", "last_name": "Prosser", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-01-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+oofy@gmail.com", "date_joined": "2021-01-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9001, "model": "auth.user", "fields":
|
||||
{"username": "stiffy", "first_name": "Stiffy", "last_name": "Byng", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+stiffy@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9002, "model": "auth.user", "fields":
|
||||
{"username": "bingo", "first_name": "Bingo", "last_name": "Little", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+bingo@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9003, "model": "auth.user", "fields":
|
||||
{"username": "spode", "first_name": "Roderick", "last_name": "Spode", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+spode@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9004, "model": "auth.user", "fields":
|
||||
{"username": "boko", "first_name": "Boko", "last_name": "Fittleworth", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+boko@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}}
|
||||
|
||||
]
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
[
|
||||
{"pk": 9010, "model": "auth.user", "fields":
|
||||
{"username": "expotest", "first_name": "ExpoTest", "last_name": "Caver", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+expo@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9011, "model": "auth.user", "fields":
|
||||
{"username": "expotestadmin", "first_name": "ExpoTest", "last_name": "Admin", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+expoadmin@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}}
|
||||
]
|
||||
|
||||
@@ -1,292 +0,0 @@
|
||||
[
|
||||
{"pk": 1, "model": "club.boat", "fields":
|
||||
{"name": "Skylark", "cuy_boat": true, "berths": 8, "boat_type": "Beneteau First 40.7", "length": "41ft", "notes": "We bought her in June 2016 when she was based in Izola, Slovenia, then brought her home over the course of the 2016 Summer Programme."}},
|
||||
|
||||
|
||||
{"pk": 1, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Head of the Section: Overall responsibility for all the activities of CUY - authorises all activities, finances and external communication on behalf of the Club; Committee Management: Organisation of CUY Committee Meetings and Elections; Yacht Charter: Liaises with yacht charter companies to arrange yacht bookings for trips; Development: Organisation of long-term development plans for the Club;", "multiple": false, "title": "Commodore", "rank": 1, "short_description": "Chief", "committee_position": true, "club_email": "commodore@cuy.org.uk", "slug": "commodore"}},
|
||||
|
||||
{"pk": 2, "model": "club.clubrole", "fields":
|
||||
{"html_description": "House Officer Support: Authorizes the activities of all house officers (Purser, Social, Webmaster, Publicity and Sponsorship) and ensures they have details of their responsibilities and that they are properly informed and supported in thier positions. Works with the Rear-Commodore House on legal issues and documentation (see below). Manages Club Shop orders.", "multiple": false, "title": "Vice-Commodore House", "rank": 2, "short_description": "Blah", "committee_position": true, "club_email": "vc-house@cuy.org.uk", "slug": "vice-commodore-house"}},
|
||||
|
||||
{"pk": 3, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Sailing Officer Support: Authorizes the activities of all sailing officers (Training and Racing) and ensures they have details of their responsibilities and that they are properly informed and supported in thier positions. Event Management: Manages the CUY program of trips and events by liaising with skippers, charterers and the commodore. Ensures a proper and accurate record is kept of trip and event information both before and after the trip or event. Liases with the Rear-Commodore Sailing about upcoming trips to ensure they are viable and sucessful.", "multiple": false, "title": "Vice-Commodore Sailing", "rank": 2, "short_description": "Blah", "committee_position": true, "club_email": "vc-sailing@cuy.org.uk", "slug": "vice-commodore-sailing"}},
|
||||
|
||||
{"pk": 5, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Legal: Ensures CUY obtains and sustains insurance policies appropriate to Club activities. Monitors details of charter agreements. Manages contractual disputes with charterers. Liases with Club legal contacts. Documentation: Ensures CUY Regulations; CUY Crew Register; Safety Policy; House Style; Skipper Manual; Agenda and Minutes Committee Meetings and any other key club documentation stay up-to-date.\r\n\r\n", "multiple": false, "title": "Rear-Commodore House", "rank": 3, "short_description": "Blah", "committee_position": true, "club_email": "rc-house@cuy.org.uk", "slug": "rear-commodore-house"}},
|
||||
|
||||
{"pk": 6, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Works with VC-Training to ensure a workable programme of practical and theory courses is made for each term. Responsible for liaising with instructors to ensure courses run smoothly.", "multiple": false, "title": "Rear-Commodore Training", "rank": 3, "short_description": "Blah", "committee_position": true, "club_email": "rc-training@cuy.org.uk", "slug": "rear-commodore-training"}},
|
||||
|
||||
{"pk": 7, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Skipper Managament: Ensures skippers of upcoming trips are aware of standard club procedures detailed in the CUY Manual and that they have the necessary information and equipment. Ensures that the crew have completed Crew Registers and paid Membership Fees before going on trips. Ensures records are taken of travel arrangements to and from trip or event locations. Upon completion of trip ensures expenses and defect reports are collated.", "multiple": false, "title": "Rear-Commodore Sailing", "rank": 3, "short_description": "Blah", "committee_position": true, "club_email": "rc-sailing@cuy.org.uk", "slug": "rear-commodore-sailing"}},
|
||||
|
||||
{"pk": 8, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Financial management; processing all payments and receipts for activities and permenent funds. Preparing the Financial Statement for termly audit and end of year Summary of Accounts. Membership; management of membership in liasion with Trip/Event organisers, Rear-Commodore Sailing, and the DB Admin. Grants applications; preparing funding applications for the Sports and Societies syndicates, and other funding source that may be available. Spending plans & strategy; preparing and presenting to the Committee financial forecasts and strategies for the investment and long term financial future of the Club", "multiple": false, "title": "Purser", "rank": 4, "short_description": "Blah", "committee_position": true, "club_email": "purser@cuy.org.uk", "slug": "purser"}},
|
||||
|
||||
{"pk": 9, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Social programme; submission of dates for socials to the Vice-Commodore Sailing, and planning of socials, including end of term dinner. New & potential members introduction; acting at socials to welcome new & potential members and inform them about club activities.", "multiple": false, "title": "Social Officer", "rank": 5, "short_description": "Blah", "committee_position": true, "club_email": "social@cuy.org.uk", "slug": "social-officer"}},
|
||||
|
||||
{"pk": 10, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Organising RYA Practical Training courses.", "multiple": false, "title": "Practical Training Officer", "rank": 5, "short_description": "Blah", "committee_position": true, "club_email": "practical@cuy.org.uk", "slug": "practical-training-officer"}},
|
||||
|
||||
{"pk": 11, "model": "club.clubrole", "fields":
|
||||
{"html_description": "CUY Racing Squad training and development; improving racing knowledge and skills. Race selection & entry management. Varsity Yacht Race; organising an annual race with Oxford as part of an RORC/JOG or similar offshore/coastal/inshore race.", "multiple": false, "title": "Racing Officer", "rank": 5, "short_description": "Blah", "committee_position": true, "club_email": "racing@cuy.org.uk", "slug": "racing-officer"}},
|
||||
|
||||
{"pk": 13, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Webmaster; control and maintenance of style, scripts and code validity. Liasion with SRCF host; ensuring compliance with regulations and maintenance of filespace. DB Admin; development and administration of CUY Database and associated e-mail lists. Maintenance of Photos section of the website.", "multiple": false, "title": "Webmaster and Database Admin", "rank": 6, "short_description": "Blah", "committee_position": true, "club_email": "webgeek@cuy.org.uk", "slug": "webmaster-and-database-admin"}},
|
||||
|
||||
{"pk": 14, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Boat management. Is responsible for the general upkeep of CUY yachts so that they are ready and safe to be sailed. Ensures that the correct equipment and information on its use is onboard and in the correct locations. Also liaises with VC-Sailing in order to create a workable trip plan.", "multiple": false, "title": "Bosun", "rank": 4, "short_description": "Blah", "committee_position": true, "club_email": "bosun@cuy.org.uk", "slug": "bosun"}},
|
||||
|
||||
{"pk": 4, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Management and delegation of tasks to the Practical Training Officer and Theory Training Officer. Development and Evaluation of the CUY Training Scheme and courses run within the scheme. Ensuring compliance with CUY standards as set out in the training section of the CUY Manual. Training Programme; ensuring submission of dates to the Vice-Commodore Sailing for all training activities, with regard to the advice given in the Training section of the CUY Manual. Overseeing the editing and expanding the website training section.", "multiple": false, "title": "Vice-Commodore Training", "rank": 2, "short_description": "Blah", "committee_position": true, "club_email": "vc-training@cuy.org.uk", "slug": "vice-commodore-training"}},
|
||||
|
||||
{"pk": 15, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Management and delegation of tasks to the Practical Training Officer and Theory Training Officer. Development and Evaluation of the CUY Training Scheme and courses run within the scheme. Ensuring compliance with RYA and CUY standards as set out in the training section of the CUY Manual. Training Programme; ensuring submission of dates to the Vice-Commodore Sailing for all training activities, with regard to the advice given in the Training section of the CUY Manual. Overseeing the editing and expanding the website training section. Management of the RYA Practical and Shorebased training centres.", "multiple": false, "title": "RYA Principal", "rank": 4, "short_description": "Blah", "committee_position": true, "club_email": "rya-principal@cuy.org.uk", "slug": "rya-principal"}},
|
||||
|
||||
{"pk": 12, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Publicity: Publicity articles & campaigns; organising Freshers' Fair and Squash as well as ongoing publicity throughout the year. College Reps scheme; implementation and administration of College Reps scheme as a route of dissemination for publicity material and attracting new members. Sponsorship & funding in co-ordination with the rest of the CUY Committee", "multiple": false, "title": "Publicity and Sponsorship Officer", "rank": 6, "short_description": "Blah", "committee_position": true, "club_email": "sponsorship@cuy.org.uk", "slug": "publicity-and-sponsorship-officer"}},
|
||||
|
||||
{"pk": 16, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Blah", "multiple": true, "title": "Skipper", "rank": 8, "short_description": "Blah", "committee_position": true, "club_email": "", "slug": "skipper"}},
|
||||
|
||||
{"pk": 17, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Blah", "multiple": true, "title": "Instructor", "rank": 7, "short_description": "Blah", "committee_position": true, "club_email": "instructors@cuy.org.uk", "slug": "instructors"}},
|
||||
|
||||
|
||||
{"pk": 5, "model": "club.eventtype", "fields":
|
||||
{"name": "Other", "default_role": 4,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "shorebased"}},
|
||||
|
||||
{"pk": 4, "model": "club.eventtype", "fields":
|
||||
{"name": "Theory Training", "default_role": 5,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "shorebased"}},
|
||||
|
||||
{"pk": 3, "model": "club.eventtype", "fields":
|
||||
{"name": "Practical Training", "default_role": 5,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "atsea"}},
|
||||
|
||||
{"pk": 2, "model": "club.eventtype", "fields":
|
||||
{"name": "Race", "default_role": 4,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "atsea"}},
|
||||
|
||||
{"pk": 6, "model": "club.eventtype", "fields":
|
||||
{"name": "Social", "default_role": null,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "social"}},
|
||||
|
||||
{"pk": 1, "model": "club.eventtype", "fields":
|
||||
{"name": "Cruising", "default_role": 4,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "atsea"}},
|
||||
|
||||
{"pk": 7, "model": "club.eventtype", "fields":
|
||||
{"name": "Trip", "default_role": 4,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "atsea"}},
|
||||
|
||||
{"pk": 8, "model": "club.eventtype", "fields":
|
||||
{"name": "Adventurous", "default_role": 4,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "atsea"}},
|
||||
|
||||
|
||||
{"pk": 1, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-start-yachting", "title": "RYA Start Yachting"}},
|
||||
|
||||
{"pk": 2, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-day-skipper-theory", "title": "RYA Day Skipper Theory"}},
|
||||
|
||||
{"pk": 3, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-day-skipper-practical", "title": "RYA Day Skipper Practical"}},
|
||||
|
||||
{"pk": 4, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "vhf-radio-licence", "title": "VHF SRC Radio Licence"}},
|
||||
|
||||
{"pk": 5, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "first-aid-certificate", "title": "First Aid Certificate",
|
||||
"expires": true, "length": 3}},
|
||||
|
||||
{"pk": 6, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "cuy-first-mate", "title": "CUYC First Mate"}},
|
||||
|
||||
{"pk": 7, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-mca-costal-skipper-theory", "title": "RYA Costal Skipper/Yachtmaster Theory"}},
|
||||
|
||||
{"pk": 8, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-compcrew", "title": "RYA Competent Crew"}},
|
||||
|
||||
{"pk": 9, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-costal-skipper-practical-course", "title": "RYA Costal Skipper Practical Course"}},
|
||||
|
||||
{"pk": 10, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-mca-costal-skipper-certificate-of-competence", "title": "RYA / MCA Yachtmaster Costal Certificate of Competence"}},
|
||||
|
||||
{"pk": 11, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-mca-yachtmaster-offshore-certificate-of-compet", "title": "RYA / MCA Yachtmaster Offshore Certificate of Competence"}},
|
||||
|
||||
{"pk": 12, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-mca-yachtmaster-ocean-certificate-of-competenc", "title": "RYA / MCA Yachtmaster Ocean Certificate of Competence"}},
|
||||
|
||||
{"pk": 13, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-diesel-engine-course", "title": "RYA Diesel Engine Course"}},
|
||||
|
||||
{"pk": 14, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-radar-course", "title": "RYA Radar Course"}},
|
||||
|
||||
{"pk": 15, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-sea-survival-course", "title": "RYA Sea Survival Course"}},
|
||||
|
||||
{"pk": 16, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-yachtmaster-ocean-theory", "title": "RYA Yachtmaster Ocean Theory"}},
|
||||
|
||||
{"pk": 17, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "cuy-skipper", "title": "CUYC Skipper"}},
|
||||
|
||||
{"pk": 18, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "cuy-examiner-yacht", "title": "RYA Yachtmaster Examiner",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 19, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "cuy-sail-trim", "title": "CUYC Sail Trim"}},
|
||||
|
||||
{"pk": 20, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-commercial", "title": "RYA Commercial Endorsement",
|
||||
"expires": true, "length": 5}},
|
||||
|
||||
{"pk": 21, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-ppr-course", "title": "RYA Professional Practices and Responsibilities"}},
|
||||
|
||||
{"pk": 22, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "cuy-ml5", "title": "MCA ML5 Medical Certificate",
|
||||
"expires": true, "length": 5}},
|
||||
|
||||
{"pk": 23, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "cuy-eng1", "title": "MCA ENG.1 Medical Certificate",
|
||||
"expires": true, "length": 2}},
|
||||
|
||||
{"pk": 24, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "cuy-instruct-cruise", "title": "RYA Cruising Instructor",
|
||||
"expires": true, "length": 5}},
|
||||
|
||||
{"pk": 25, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "cuy-instruct-yacht", "title": "RYA Yachtmaster Instructor",
|
||||
"expires": true, "length": 5}},
|
||||
|
||||
{"pk": 26, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-shore", "title": "RYA Shorebased Instructor",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 27, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-diesel", "title": "RYA Diesel Engine Instructor",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 28, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-fistaid", "title": "RYA First Aid Instructor",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 29, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-survival", "title": "RYA Sea Survival Instructor",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 30, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-radar", "title": "RYA Radar Instructor",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 31, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-vhf", "title": "RYA VHF Instructor",
|
||||
"expires": false}},
|
||||
|
||||
|
||||
{"pk": 1, "model": "club.role", "fields":
|
||||
{"event_types": [5, 3, 2, 1], "name": "Skipper", "description": "Skipper"}},
|
||||
|
||||
{"pk": 2, "model": "club.role", "fields":
|
||||
{"event_types": [5, 3, 2, 1], "name": "First Mate", "description": "First Mate"}},
|
||||
|
||||
{"pk": 3, "model": "club.role", "fields":
|
||||
{"event_types": [5, 3, 2, 1], "name": "Watch Leader", "description": "Watch leader"}},
|
||||
|
||||
{"pk": 4, "model": "club.role", "fields":
|
||||
{"event_types": [5, 3, 2, 1], "name": "Crew", "description": "crew"}},
|
||||
|
||||
{"pk": 5, "model": "club.role", "fields":
|
||||
{"event_types": [5, 4, 3, 2], "name": "Student", "description": "student"}},
|
||||
|
||||
{"pk": 6, "model": "club.role", "fields":
|
||||
{"event_types": [5, 4, 3, 2], "name": "Instructor", "description": "Instructor"}},
|
||||
|
||||
{"pk": 7, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2, 1], "name": "Helm", "description": "Helm"}},
|
||||
|
||||
{"pk": 8, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Bow", "description": "Bowman"}},
|
||||
|
||||
{"pk": 9, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Mast", "description": "Mastman"}},
|
||||
|
||||
{"pk": 10, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Pit", "description": "Pit."}},
|
||||
|
||||
{"pk": 11, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Trim", "description": "Trim"}},
|
||||
|
||||
{"pk": 12, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Main Trim", "description": "Main trim."}},
|
||||
|
||||
{"pk": 13, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Navigator", "description": "Navigator"}},
|
||||
|
||||
{"pk": 14, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Tactics", "description": "Tactics"}},
|
||||
|
||||
{"pk": 15, "model": "club.role", "fields":
|
||||
{"event_types": [5, 3, 2, 1, 7, 8], "name": "Second Mate", "description": "Second Mate is usually third in charge, after the Skipper and the First Mate."}},
|
||||
|
||||
{"pk": 16, "model": "club.role", "fields":
|
||||
{"event_types": [6], "name": "Drinker", "description": "Someone who will drink."}},
|
||||
|
||||
{"pk": 17, "model": "club.role", "fields":
|
||||
{"event_types": [8, 1], "name": "Cook", "description": "Cooks food."}},
|
||||
|
||||
|
||||
{"pk": 3, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>(% event_name %)</h1>\r\n\r\n\r\nBlah Practical Training trip example webpage", "slug": "practical-training-index", "description": "Default page for a practical training trip.", "title": "Practical Training index"}},
|
||||
|
||||
{"pk": 4, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>(% event_name %)</h1>\r\n\r\nTheory trip\r\n example webpage", "slug": "theory-training-index", "description": "ehcr", "title": "Theory Training Index"}},
|
||||
|
||||
{"pk": 5, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>(% event_name %)</h1>\r\n\r\n\r\nBlah Social example webpage", "slug": "social-index", "description": "Balh", "title": "Social Index"}},
|
||||
|
||||
{"pk": 6, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1> Kit Page</h1>\r\n example webpage", "slug": "kit", "description": "Kit template page", "title": "Kit"}},
|
||||
|
||||
{"pk": 7, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>Crew!</h1>\r\n\r\n example webpage", "slug": "crew", "description": "Crew page", "title": "Crew"}},
|
||||
|
||||
{"pk": 2, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>(% event_name %)</h1>\r\n\r\n\r\nBlah Racing trip example webpage", "slug": "racing-index", "description": "Default Race trip index page.", "title": "Racing Index"}},
|
||||
|
||||
{"pk": 1, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>(% event_name%)<h1>\r\n\r\nBlah blah Cruising trip example webpage blah.", "slug": "cruising-index", "description": "Default cruising trip index page.", "title": "Cruising Index"}},
|
||||
|
||||
|
||||
{"pk": 1, "model": "photologue.photosize", "fields":
|
||||
{"name": "thumbnail", "watermark": null, "increment_count": false, "effect": null, "crop": true, "height": 75, "width": 75, "upscale": false, "pre_cache": true, "quality": 90}},
|
||||
|
||||
{"pk": 2, "model": "photologue.photosize", "fields":
|
||||
{"name": "small", "watermark": null, "increment_count": false, "effect": null, "crop": false, "height": 150, "width": 150, "upscale": false, "pre_cache": true, "quality": 90}},
|
||||
|
||||
{"pk": 3, "model": "photologue.photosize", "fields":
|
||||
{"name": "display", "watermark": null, "increment_count": true, "effect": null, "crop": false, "height": 500, "width": 500, "upscale": false, "pre_cache": false, "quality": 90}},
|
||||
|
||||
{"pk": 4, "model": "photologue.photosize", "fields":
|
||||
{"name": "large", "watermark": null, "increment_count": true, "effect": null, "crop": false, "height": 1000, "width": 1000, "upscale": false, "pre_cache": false, "quality": 90}}
|
||||
|
||||
]
|
||||
@@ -1,500 +0,0 @@
|
||||
[
|
||||
{"model": "club.boat", "pk": 8000, "fields":
|
||||
{
|
||||
"berths": 4,
|
||||
"boat_type": null,
|
||||
"cuy_boat": 0,
|
||||
"id": 8000,
|
||||
"length": "35",
|
||||
"name": "Goblin",
|
||||
"notes": "We Didn't Mean to Go to Sea is the seventh book in Arthur Ransome's Swallows and Amazons series of children's books.\r\n\r\nThe book features a small sailing cutter, the Goblin, which is almost identical to Ransome's own boat Nancy Blackett. Ransome sailed Nancy Blackett across to Flushing by the same route as part of his research for the book. The navigational detail and the geography are both correct for the period when the story is set, unlike other books in the series."
|
||||
}},
|
||||
{"model": "club.boat", "pk": 8001, "fields":
|
||||
{
|
||||
"berths": 0,
|
||||
"boat_type": "dinghy",
|
||||
"cuy_boat": 0,
|
||||
"id": 8001,
|
||||
"length": "13",
|
||||
"name": "Swallow",
|
||||
"notes": "Ransome and Ernest Altounyan bought two small dinghies called Swallow and Mavis. Ransome kept Swallow until he sold it a number of years later."
|
||||
}},
|
||||
{"model": "club.boat", "pk": 8002, "fields":
|
||||
{
|
||||
"berths": 0,
|
||||
"boat_type": "dinghy",
|
||||
"cuy_boat": 0,
|
||||
"id": 8002,
|
||||
"length": "13",
|
||||
"name": "Amazon",
|
||||
"notes": "the Blackett children (Nancy and Peggy), who sail a dinghy named Amazon. \r\n\r\nSwallows and Amazons contains no sorcery; its plot is plausible, its characters ordinary children. Therein lies its enduring magic. A celebration of friendship, imagination, fair play, and exploration, Swallows and Amazons inspires even the most landlocked kid to dream of messing about in boats, building fires, camping out and navigating by the stars"
|
||||
}},
|
||||
|
||||
{"model": "club.webpagecategory", "pk": 8000, "fields":
|
||||
{
|
||||
"id": 8000,
|
||||
"name": "Yachts",
|
||||
"slug": "yachts"
|
||||
}},
|
||||
|
||||
{"model": "club.webpagecategory", "pk": 8001, "fields":
|
||||
{
|
||||
"id": 8001,
|
||||
"name": "Club",
|
||||
"slug": "club"
|
||||
}},
|
||||
|
||||
{"model": "club.webpagecategory", "pk": 8002, "fields":
|
||||
{
|
||||
"id": 8002,
|
||||
"name": "Summer",
|
||||
"slug": "summer"
|
||||
}},
|
||||
|
||||
{"model": "club.webpagecategory", "pk": 8002, "fields":
|
||||
{
|
||||
"id": 8003,
|
||||
"name": "Sailing",
|
||||
"slug": "sailing"
|
||||
}},
|
||||
|
||||
{"model": "club.webpagecategory_photos", "pk": 8000, "fields":
|
||||
{
|
||||
"clubphoto_id": 7000,
|
||||
"id": 5000,
|
||||
"webpagecategory_id": 8000
|
||||
}},
|
||||
{"model": "club.clubphoto", "pk": 7000, "fields":
|
||||
{
|
||||
"id": 7000,
|
||||
"name": "IRPCS 4.4",
|
||||
"num_views": 0,
|
||||
"origional_image": "images/training/exams/IRPCS-4-4.png"
|
||||
}},
|
||||
{"model": "club.webpage", "pk": 9000, "fields":
|
||||
{
|
||||
"category_id": 8000,
|
||||
"description": "Current Yacht",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9000,
|
||||
"index": 1,
|
||||
"markup": "<h1>Skylark</h1>\r\n<p><strong> \r\n<table border=\"0\">\r\n<tbody>\r\n<tr>\r\n<td>\r\n<p><strong>Skylark, a Beneteau First 40.7, is our main and largest club yacht. </strong>We bought her in June 2016 when she was based in Izola, Slovenia, then brought her home over the course of the 2016 Summer Programme. She's been to Croatia, Greece, Italy, Spain and France on the way home - along with countless other stops along the way.</p>\r\n<p>Since arriving in the UK, she's spent time on the East and South coasts, pottering round the Solent or across the Channel, while Summer Programmes have taken her to the Norwegian Fjords, round the West Coast of Ireland, and all the way up to the Faeroes and Shetland.</p><img src='/site-media/images/training/exams/IRPCS-4-4.png'>",
|
||||
"ordering": 10,
|
||||
"slug": "yacht1",
|
||||
"title": "Skylark Yacht"
|
||||
}},
|
||||
|
||||
{"model": "club.webpage", "pk": 9001, "fields":
|
||||
{ "category_id": 8001,
|
||||
"description": "Safeguarding Policy",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9001,
|
||||
"index": 1,
|
||||
"markup": "<h1>Safeguarding Policy</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "safeguarding-policy",
|
||||
"title": "Safeguarding Policy"
|
||||
}},
|
||||
{"model": "club.webpage", "pk": 9002, "fields":
|
||||
{ "category_id": 8001,
|
||||
"description": "Complaints",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9002,
|
||||
"index": 1,
|
||||
"markup": "<h1>Complaints</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "complaints",
|
||||
"title": "Complaints"
|
||||
}},
|
||||
{"model": "club.webpage", "pk": 9003, "fields":
|
||||
{ "category_id": 8001,
|
||||
"description": "Other Sailing Opportunities in Cambridge",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9003,
|
||||
"index": 1,
|
||||
"markup": "<h1>Other Sailing Opportunities in Cambridge</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "other-sailing-in-camb",
|
||||
"title": "Other Sailing Opportunities in Cambridge"
|
||||
}},
|
||||
{"model": "club.webpage", "pk": 9004, "fields":
|
||||
{ "category_id": 8001,
|
||||
"description": "CUYC Privacy Notice",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9004,
|
||||
"index": 1,
|
||||
"markup": "<h1>CUYC Privacy Notice</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "privacy-notice",
|
||||
"title": "CUYC Privacy Notice"
|
||||
}},
|
||||
{"model": "club.webpage", "pk": 9005, "fields":
|
||||
{ "category_id": 8003,
|
||||
"description": "FAQ",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9005,
|
||||
"index": 0,
|
||||
"markup": "<h1>FAQ</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "faq",
|
||||
"title": "FAQ" }},
|
||||
{"model": "club.webpage", "pk": 9006, "fields":
|
||||
{ "category_id": 8002,
|
||||
"description": "Summer",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9006,
|
||||
"index": 1,
|
||||
"markup": "<h1>Summer</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "summer",
|
||||
"title": "Summer"
|
||||
}},
|
||||
|
||||
|
||||
{"pk": 9000, "model": "auth.user", "fields":
|
||||
{"username": "oofy", "first_name": "Oofy", "last_name": "Prosser", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-01-01 00:00:00", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+oofy@gmail.com", "date_joined": "2021-01-01 00:00:00"}},
|
||||
|
||||
{"pk": 9001, "model": "auth.user", "fields":
|
||||
{"username": "stiffy", "first_name": "Stiffy", "last_name": "Byng", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-02-01 00:00:00", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+stiffy@gmail.com", "date_joined": "2021-02-01 00:00:00"}},
|
||||
|
||||
{"pk": 9002, "model": "auth.user", "fields":
|
||||
{"username": "bingo", "first_name": "Bingo", "last_name": "Little", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+bingo@gmail.com", "date_joined": "2021-02-01 00:00:00"}},
|
||||
|
||||
{"pk": 9003, "model": "auth.user", "fields":
|
||||
{"username": "spode", "first_name": "Roderick", "last_name": "Spode", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+spode@gmail.com", "date_joined": "2021-02-01 00:00:00"}},
|
||||
|
||||
{"pk": 9004, "model": "auth.user", "fields":
|
||||
{"username": "boko", "first_name": "Boko", "last_name": "Fittleworth", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+boko@gmail.com", "date_joined": "2021-02-01 00:00:00"}},
|
||||
|
||||
|
||||
{"model": "club.member", "pk": 9000, "fields":
|
||||
{"user": 9000, "title": "Millionaire", "email": "philip.sargent+oofy@gmail.com", "member_state": "active", "nice": "nice", "nice_ref": null, "member_type": "senior", "affiliation": "", "affiliation_other": null, "portrait": "", "committee_email_prefix": "oofy", "bio": "Alexander Charles 'Oofy' Prosser is the richest member of the Drones Club, he is also a friend of Bertie Wooster.", "credit_rating": "ok", "crsid": null}},
|
||||
|
||||
{"model": "club.member", "pk": 9001, "fields":
|
||||
{"user": 9001, "title": "Niece and ward of Sir Watkyn Bassett", "email": "philip.sargent+stiffy@gmail.com", "member_state": "active", "nice": "nice", "nice_ref": null, "member_type": "affiliate", "affiliation": "student", "affiliation_other": null, "portrait": "", "committee_email_prefix": "stiffy", "bio": "Stephanie 'Stiffy' Byng is the niece and ward of Sir Watkyn Bassett, she initially lives with him in Totleigh Towers. She is short and has blue eyes. She wears a wind-swept hairstyle, and has an Aberdeen terrier named Bartholomew. Stiffy often gets bright ideas that end up making trouble for others, and she is not above using blackmail to induce Bertie Wooster to do errands for her.", "credit_rating": "good", "crsid": null}},
|
||||
|
||||
{"model": "club.member", "pk": 9002, "fields":
|
||||
{"user": 9002, "title": "Described as long and thin", "email": "philip.sargent+bingo@gmail.com", "member_state": "active", "nice": "nice", "nice_ref": null, "member_type": "senior", "affiliation": "unknown", "affiliation_other": null, "portrait": "", "committee_email_prefix": "bingo", "bio": "Bingo, who has an impulsive and romantic nature, falls in love with numerous women in quick succession, generally pursuing an absurd scheme to woo his latest love interest and invariably causing problems for his pal Bertie", "credit_rating": "ok", "crsid": null}},
|
||||
|
||||
{"model": "club.member", "pk": 9003, "fields":
|
||||
{"user": 9003, "title": "Dictator", "email": "philip.sargent+spode@gmail.com", "member_state": "active", "nice": "bad", "nice_ref": "fascist tendences", "member_type": "unknown", "affiliation": "external", "affiliation_other": null, "portrait": "", "committee_email_prefix": "spode", "bio": "The leader of a fascist group in London called the Saviours of Britain, also known as the Black Shorts.", "credit_rating": "good", "crsid": null}},
|
||||
|
||||
{"model": "club.member", "pk": 9004, "fields":
|
||||
{"user": 9004, "title": "An author with a unique dress sense", "email": "philip.sargent+boko@gmail.com", "member_state": "active", "nice": "nice", "nice_ref": null, "member_type": "senior", "affiliation": "postdoc", "affiliation_other": null, "portrait": "", "committee_email_prefix": "boko", "bio": "According to Bertie, after Jeeves first saw him, Jeeves winced and tottered off to the kitchen, probably to pull himself together with cooking sherry. Boko is engaged to Zenobia 'Nobby' Hopwood", "credit_rating": "ok", "crsid": null}},
|
||||
|
||||
|
||||
{"model": "club.article", "pk": 9000, "fields":
|
||||
{"title": "Blood orange and Campari steamed pudding", "publish": "2021-02-01 00:00:00", "hide": false,
|
||||
"author": 9000, "thumbnail": "images/training/exams/IRPCS-3-6.png", "slug":"blood_orange_campari",
|
||||
"short_summary": "A recipe for a sharp and delicious pudding",
|
||||
"tease": "Put the orange segments and pomegranate seeds in a bowl with the golden syrup, Campari and gin",
|
||||
"body": "This updated take on the traditional steamed pudding stars blood oranges and Campari. It can even be cooked in the microwave for a quick and easy hack. Serve with proper custard."}},
|
||||
|
||||
{"model": "club.article", "pk": 9001, "fields":
|
||||
{"title": "Orange-scented brioche pudding", "publish": "2021-02-01 00:00:00", "hide": false,
|
||||
"author": 9001, "thumbnail": "images/training/exams/IRPCS-3-5.png", "slug":"orange_brioche",
|
||||
"short_summary": "A fragrant bread and butter pudding.",
|
||||
"tease": "Put the sultanas and Grand Marnier into a small saucepan, bring to the boil and simmer",
|
||||
"body": "An old-fashioned bread and butter pudding with a fragrant flourish. You can get ready-sliced long brioche loaves, which makes life simpler, but if you need to get out a bread knife yourself, just try to slice thinly. Any good unchunky marmalade would do. I think this is better warm rather than hot straight from the oven."}},
|
||||
|
||||
{"model": "club.article", "pk": 9002, "fields":
|
||||
{"title": "Upside-down orange pudding", "publish": "2021-02-01 00:00:00", "hide": true,
|
||||
"author": 9002, "thumbnail": "images/training/exams/IRPCS-3-5.png", "slug":"upside_orange",
|
||||
"short_summary": "Very yummy.",
|
||||
"tease": "Yum",
|
||||
"body": "If you find puddings a bit heavy, you'll love this light upside-down pudding. And it's easy to make too."}},
|
||||
|
||||
{"model": "club.article", "pk": 9003, "fields":
|
||||
{"title": "Hot Citrus Pudding", "publish": "2021-02-01 00:00:00", "hide": false,
|
||||
"author": 9001, "thumbnail": "images/training/exams/IRPCS-3-6.png", "slug":"hot_citrus",
|
||||
"short_summary": "Although this pudding is served hot, it is just as nice cold. ",
|
||||
"tease": "Mind you, I doubt if there will be any left over.",
|
||||
"body": "There are two main types of oranges: sweet oranges and bitter (Seville) oranges. The former can be thick- or thin- skinned, with or without seeds, and has sweet-tasting orange or red-flecked flesh. Bitter oranges have aromatic dimpled skin with very bitter pith and very sour, pale-orange flesh. They always contain seeds."}},
|
||||
|
||||
{"model": "club.article", "pk": 9004, "fields":
|
||||
{"title": "Self-saucing Jaffa pudding", "publish": "2021-02-01 00:00:00", "hide": false,
|
||||
"author": 9001, "thumbnail": "images/training/exams/IRPCS-4-1.png", "slug":"jaffa_saucing",
|
||||
"short_summary": "An intense chocolate orange sponge bake. ",
|
||||
"tease": "Yum. This intense chocolate orange sponge bake with thick sauce is about as indulgent as a good pudding gets.",
|
||||
"body": "Mix ½ pint boiling water with sugar and cocoa then pour this over the batter. Return the pot to the slow cooker base, cover and cook on High for 3 hours until firm and risen."}},
|
||||
|
||||
{"model": "club.article", "pk": 9005, "fields":
|
||||
{"title": "Terry's Chocolate Orange Melt In The Middle Pudding", "publish": "2021-02-01 00:00:00", "hide": false,
|
||||
"author": 9001, "thumbnail": "images/training/exams/IRPCS-4-2.png", "slug":"chocolate_orange",
|
||||
"short_summary": "If you are fan of Chocolate Orange this is the pud for you.",
|
||||
"tease": "Yum. a beautifully light chocolate sponge pudding.",
|
||||
"body": "This beautifully light chocolate sponge pudding is encased around a whole Terry's Chocolate Orange and when served straight from the oven will create a gooey melt in the middle chocolate centre. This pudding is a great alternative to the traditional Christmas pudding or a deliciously indulgent finale to a weekend roast with the family"}},
|
||||
|
||||
{"model": "club.affiliationcheck", "pk": 9000, "fields":
|
||||
{"member": 9000, "claim_date": "2021-02-01 00:00:01", "claim": "alum", "confirmed": false, "confirmation_type": null, "confirmed_by": null, "confirmed_date": null}},
|
||||
|
||||
{"model": "club.affiliationcheck", "pk": 9001, "fields":
|
||||
{"member": 9001, "claim_date": "2021-02-01 00:00:01", "claim": "affiliate", "confirmed": false, "confirmation_type": null, "confirmed_by": null, "confirmed_date": null}},
|
||||
|
||||
{"model": "club.affiliationcheck", "pk": 9002, "fields":
|
||||
{"member": 9002, "claim_date": "2021-02-01 00:00:01", "claim": "senior", "confirmed": true, "confirmation_type": null, "confirmed_by": null, "confirmed_date": null}},
|
||||
|
||||
{"model": "club.affiliationcheck", "pk": 9003, "fields":
|
||||
{"member": 9003, "claim_date": "2021-02-01 00:00:01", "claim": "unknown", "confirmed": false, "confirmation_type": null, "confirmed_by": null, "confirmed_date": null}},
|
||||
|
||||
{"model": "club.affiliationcheck", "pk": 9004, "fields":
|
||||
{"member": 9004, "claim_date": "2021-02-01 00:00:01", "claim": "senior", "confirmed": false, "confirmation_type": null, "confirmed_by": null, "confirmed_date": null}},
|
||||
|
||||
|
||||
{"model": "club.elected", "pk": 5000, "fields":
|
||||
{"member": 9000, "elected_until": "", "club_role": 9000
|
||||
}},
|
||||
|
||||
{"model": "club.elected", "pk": 5001, "fields":
|
||||
{"member": 9001, "elected_until": "", "club_role": 16
|
||||
}},
|
||||
|
||||
{"model": "club.elected", "pk": 5002, "fields":
|
||||
{"member": 9001, "elected_until": "", "club_role": 17
|
||||
}},
|
||||
|
||||
|
||||
{"model": "club.award", "pk": 6000, "fields":
|
||||
{"member": 9001, "award_date": "2000-01-01", "qualification": 11
|
||||
}},
|
||||
|
||||
{"model": "club.award", "pk": 6001, "fields":
|
||||
{"member": 9002, "award_date": "2000-01-01", "qualification": 11
|
||||
}},
|
||||
|
||||
{"model": "club.award", "pk": 6002, "fields":
|
||||
{"member": 9004, "award_date": "2000-01-01", "qualification": 3
|
||||
}},
|
||||
|
||||
{"model": "club.award", "pk": 6003, "fields":
|
||||
{"member": 9000, "award_date": "2019-03-10", "qualification": 5
|
||||
}},
|
||||
|
||||
{"model": "club.clubrole", "pk": 9000, "fields":
|
||||
{"title": "Drunken sailor", "slug": "drunk_sailor", "rank": 100, "multiple": true, "club_email": "", "short_description": "Traditional crew role", "html_description": "In the scuppers, early in the morning.", "committee_position": false, "division": null}},
|
||||
|
||||
|
||||
{"model": "club.crewregister", "pk": 10000, "fields":
|
||||
{"member": 9000,
|
||||
"encoded": true,
|
||||
"dob": "1920-02-01",
|
||||
"gender": "M",
|
||||
"cambridge_address": "The Drones Club, London",
|
||||
"vacation_landline": "01632 960374",
|
||||
"kin1_name": "Barmy Fotheringay-Phipps ",
|
||||
"kin1_address": "The Drones Club, London",
|
||||
"kin1_phone": "01632 960620",
|
||||
"log": 20,
|
||||
"days": 3,
|
||||
"seasickness": "severe",
|
||||
"can_swim": true,
|
||||
"accepted_conditions": true,
|
||||
"checked_up_to_date": true,
|
||||
"checked_date": "2021-02-01 00:00:02"
|
||||
}},
|
||||
|
||||
{"model": "club.crewregister", "pk": 10001, "fields":
|
||||
{"member": 9001,
|
||||
"encoded": true,
|
||||
"dob": "1920-02-01",
|
||||
"gender": "F",
|
||||
"cambridge_address": "Totleigh Towers",
|
||||
"vacation_landline": "01223 496 0551",
|
||||
"kin1_name": "Sir Watkyn Bassett",
|
||||
"kin1_address": "Totleigh Towers. (All this detail is because there a minimum set of fields to be completed.)",
|
||||
"kin1_phone": "01223 496 0551",
|
||||
"log": 450,
|
||||
"days": 45,
|
||||
"seasickness": "mild",
|
||||
"can_swim": true,
|
||||
"accepted_conditions": true,
|
||||
"checked_up_to_date": true,
|
||||
"checked_date": "2021-02-01 00:00:02"
|
||||
}},
|
||||
|
||||
|
||||
{"model": "club.event", "pk": 20000, "fields":
|
||||
{"name": "Spring in the Arctic",
|
||||
"slug": "spring-in-the-arctic",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9001, "shore_contact": 9002,
|
||||
"start_date": "2031-03-01 00:00:00",
|
||||
"end_date": "2031-03-03 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision1.png",
|
||||
"photos": [7000],
|
||||
"spaces": 10, "boats": [8001],
|
||||
"short_summary": "A wonderfully refreshing trip among the ice floes.",
|
||||
"summary": "This is going to be the most amazing trip."}},
|
||||
{"model": "club.eventsettings", "pk": 20000, "fields": {
|
||||
"event": 20000,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20001, "fields":
|
||||
{"name": "Spring in the Med",
|
||||
"slug": "spring-in-the-med",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-03-11 00:00:00",
|
||||
"end_date": "2031-03-13 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision2.png",
|
||||
"photos": [7000],
|
||||
"spaces": 8, "boats": [8001],
|
||||
"short_summary": "A joyful celebration of spring flowers in the Cylades.",
|
||||
"summary": "This is going to be the most amazing trip."}},
|
||||
{"model": "club.eventsettings", "pk": 20001, "fields": {
|
||||
"event": 20001,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20002, "fields":
|
||||
{"name": "Spring in the Solent",
|
||||
"slug": "spring-in-the-solent",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-03-21 00:00:00",
|
||||
"end_date": "2031-03-23 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision3.png",
|
||||
"photos": [7000],
|
||||
"spaces": 8, "boats": [8001],
|
||||
"short_summary": "A rainy and blustery wet week discovering how to do tidal calculations at night.",
|
||||
"summary": "This is going to be the most amazing trip."}},
|
||||
{"model": "club.eventsettings", "pk": 20002, "fields": {
|
||||
"event": 20002,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20003, "fields":
|
||||
{"name": "Early Summer in the Med",
|
||||
"slug": "early-summer-in-the-med",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-05-11 00:00:00",
|
||||
"end_date": "2031-06-13 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision1.png",
|
||||
"photos": [7000],
|
||||
"spaces": 18, "boats": [8001],
|
||||
"short_summary": "Sheer hedonism in the Cylades.",
|
||||
"summary": "This is going to be the most amazing trip: a flotilla of joyfulness."}},
|
||||
{"model": "club.eventsettings", "pk": 20003, "fields": {
|
||||
"event": 20003,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20004, "fields":
|
||||
{"name": "Summer in the Med",
|
||||
"slug": "summer-in-the-med",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-06-11 00:00:00",
|
||||
"end_date": "2031-07-13 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision2.png",
|
||||
"photos": [7000],
|
||||
"spaces": 18, "boats": [8001],
|
||||
"short_summary": "The Dodecanese is spectacularly beautiful at this time of year.",
|
||||
"summary": "This is going to be the most amazing trip."}},
|
||||
{"model": "club.eventsettings", "pk": 20004, "fields": {
|
||||
"event": 20004,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20005, "fields":
|
||||
{"name": "High Summer in the Med",
|
||||
"slug": "high-summer-in-the-med",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-07-11 00:00:00",
|
||||
"end_date": "2031-08-13 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision3.png",
|
||||
"photos": [7000],
|
||||
"spaces": 18, "boats": [8001],
|
||||
"short_summary": "The Saronic Gulf is busy and packed at this time of year.",
|
||||
"summary": "This is going to be the most amazing trip. Party, party, party!"}},
|
||||
{"model": "club.eventsettings", "pk": 20005, "fields": {
|
||||
"event": 20005,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20006, "fields":
|
||||
{"name": "High Summer in the Irish Sea",
|
||||
"slug": "high-summer-in-the-irish",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-07-11 00:00:00",
|
||||
"end_date": "2031-08-13 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision2.png",
|
||||
"photos": [7000],
|
||||
"spaces": 18, "boats": [8001],
|
||||
"short_summary": "The Irish Sea is is wonderful at this time of year.",
|
||||
"summary": "Welsh and Irush coasts, Manx beer."}},
|
||||
{"model": "club.eventsettings", "pk": 20006, "fields": {
|
||||
"event": 20006,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20007, "fields":
|
||||
{"name": "RYA First Aid course",
|
||||
"slug": "rya-first-aid-2019",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2019-03-10 00:00:00",
|
||||
"end_date": "2019-03-10 00:00:00",
|
||||
"added_date": "2019-03-10 00:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision2.png",
|
||||
"photos": [],
|
||||
"spaces": 12, "boats": [],
|
||||
"short_summary": "A one-day RYA First Aid Course",
|
||||
"summary": "A First Aid certificate is a requirement for candidates for the RYA Yachtmaster Exams."}},
|
||||
|
||||
{"model": "club.eventsettings", "pk": 20006, "fields": {
|
||||
"event": 20006,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
|
||||
{"model": "club.participate", "pk": 30000, "fields":
|
||||
{"person": 9001,
|
||||
"event": 20000,
|
||||
"state": "confirmed",
|
||||
"date_added": "2021-02-01 12:00:00",
|
||||
"role": 1}},
|
||||
|
||||
{"model": "club.participate", "pk": 30001, "fields":
|
||||
{"person": 9000,
|
||||
"event": 20007,
|
||||
"state": "confirmed",
|
||||
"date_added": "2019-03-10 00:00:00",
|
||||
"paid": true,
|
||||
"role": 5}}
|
||||
]
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
[
|
||||
{"model": "core.area", "pk": 25, "fields":
|
||||
{"short_name": "1626 or 6 (borderline)", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 24, "fields":
|
||||
{"short_name": "8a", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 23, "fields":
|
||||
{"short_name": "2b or 4 (unclear)", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 22, "fields":
|
||||
{"short_name": "11", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 21, "fields":
|
||||
{"short_name": "3", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 20, "fields":
|
||||
{"short_name": "4", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 19, "fields":
|
||||
{"short_name": "1b", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 18, "fields":
|
||||
{"short_name": "8b", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 17, "fields":
|
||||
{"short_name": "2d", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 16, "fields":
|
||||
{"short_name": "7", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 15, "fields":
|
||||
{"short_name": "2b", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 14, "fields":
|
||||
{"short_name": "8c", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 13, "fields":
|
||||
{"short_name": "2c", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 12, "fields":
|
||||
{"short_name": "8d", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 11, "fields":
|
||||
{"short_name": "", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 10, "fields":
|
||||
{"short_name": "5", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 9, "fields":
|
||||
{"short_name": "6", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 8, "fields":
|
||||
{"short_name": "2a", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 7, "fields":
|
||||
{"short_name": "1c", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 6, "fields":
|
||||
{"short_name": "1d", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 5, "fields":
|
||||
{"short_name": "1a", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 4, "fields":
|
||||
{"short_name": "9", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 3, "fields":
|
||||
{"short_name": "10", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 2, "fields":
|
||||
{"short_name": "1626", "name": null, "description": null, "super": null, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 1, "fields":
|
||||
{"short_name": "1623", "name": null, "description": null, "super": null, "new_since_parsing": false, "non_public": false}}
|
||||
]
|
||||
@@ -1,40 +0,0 @@
|
||||
[{"model": "core.cave", "pk": 43, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"official_name": "Schnellzughöhle",
|
||||
"kataster_code": "6/t/S/W x",
|
||||
"kataster_number": "115",
|
||||
"unofficial_number": "40m",
|
||||
"explorers": "CUCC 1980-1985",
|
||||
"underground_description": "This is the main entrance through which the majority of the <a href=\"41.htm\">Stellerweghöhle</a> system was explored. See the separate <a href=\"41/115.htm#ent115\">full guidebook description</a> for details, just an overview is given here.</p><p>The entrance leads to a non-obvious way on to the head of the short <b>Bell Pitch</b>, from where very awkward going leads out to a bigger passage to reach <b>The Ramp</b> a series of off-vertical pitches. The damper but technically easier <b>Inlet Pitches</b> drop to a Big Chamber, from where <b>Pete's Purgatory</b> starts, and leads in 800m of tortuous going to <b>The Confluence</b> and the larger streamway leading to the deepest point.</p><p>Better is the <b>Purgatory Bypass</b> which starts as dry fossil tubes, with a choice of routes to reach <b>Junction Chamber</b> where the <b>Big Rift</b> of <a href=\"41.htm\">Stellerweghöhle</a> enters. Opposite, the huge fossil tube of <b>Dartford Tunnel</b> makes for easy progress to the Confluence, about halfway down the system. The continuing main streamway is interrupted by a bypassable sump and numerous pitches before a low airspace duck at the end of an unpromising canal leads to the spectacular <b>Orgasm Chasm</b>. Careful rigging avoids the water in this 140m shaft, ending in muddy passage and another short drop to a deep and terminal sump. ",
|
||||
"equipment": "",
|
||||
"references": "",
|
||||
"survey": "CUCC's parts surveyed to Grade 5 but not all drawn up - see <a href=\"41/survey.htm\">here</a>",
|
||||
"kataster_status": "",
|
||||
"underground_centre_line": "In dataset",
|
||||
"notes": "The Austrian Kataster has adopted a very perverse way of numbering things. Their numbers are as follows:</p><ul> <li>115a Stellerweghöhle entrance 41a</li> <li>115b Stellerweghöhle entrance 41b</li> <li>115c Stellerweghöhle entrance 41c ( where ? )</li> <li>115d Schnellzughöhle entrance 115</li> <li>115e unnamed entrance 142</li></ul><p>", "length": "SMK system total 54000m", "depth": "from entrance; SMK system total 1032m", "extent": "SMK system total 2812m",
|
||||
"survex_file": "smk-system.svx",
|
||||
"description_file": "1623/115.htm",
|
||||
"url": "1623/115.url",
|
||||
"filename": "1623-115.html",
|
||||
"area": [1, 8]}},
|
||||
|
||||
{"model": "core.cave", "pk": 350, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"official_name": "Seetrichter",
|
||||
"kataster_code": "",
|
||||
"kataster_number": "284",
|
||||
"unofficial_number": "",
|
||||
"explorers": "<p></p>",
|
||||
"underground_description": "",
|
||||
"equipment": "<p></p>",
|
||||
"references": "<p>",
|
||||
"survey": "<p></p>",
|
||||
"kataster_status": "",
|
||||
"underground_centre_line": "",
|
||||
"notes": "A 25m long (22m deep) resurgence in Altausee. At the bottom, at a depth of 72m, there are large round blocks.", "length": "", "depth": "", "extent": "",
|
||||
"survex_file": "",
|
||||
"description_file": "",
|
||||
"url": "1623/284/284.html",
|
||||
"filename": "1623-284.html",
|
||||
"area": [1, 11]}}
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
[{"model": "core.expedition", "pk": 44, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"year": "2019", "name": "CUCC expo 2019"}},
|
||||
|
||||
{"model": "core.personexpedition", "pk": 681, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"expedition": 44,
|
||||
"person": 250, "slugfield": null, "is_guest": false
|
||||
}},
|
||||
|
||||
{"model": "core.person", "pk": 250, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"first_name": "Michael",
|
||||
"last_name": "Sargent",
|
||||
"fullname": "Michael Sargent", "is_vfho": false, "mug_shot": null,
|
||||
"blurb": "\n\n\n\n\n\n<p><img class=\"onleft\" src=\"/folk/i/mikey0.jpg\">\n<img class=\"onright\" src=\"/folk/i/mikey1.jpg\" height=\"400\"\nalt=\"\" />\n<b>Michael Sargent</b> CUCC<br />\nExpeditions 2014, 15, 16, 17, 18, 19.\n<p>The first second-generation expo caver in 2014, later members of this exclusive group were Dan Lenartowicz and Sarah Connolly.\n\n\n<img class=\"onleft\" src=\"/folk/i/michaelsargent.jpg\">\n<im\n\n<hr style=\"clear: both\" /><p class=\"caption\">Pre-expo (pre-student) photos from President's Invite (OUCC) \nand first abseiling instruction (Cambridge).</p>\n", "orderref": ""}}
|
||||
]
|
||||
@@ -1,58 +0,0 @@
|
||||
This folder is used by manage.py to load fixtures, as are all the folders
|
||||
called /fixtures/ in any Django app here.
|
||||
|
||||
e.g. a list of files which are in the /fixtures/ folders:
|
||||
$ python manage.py loaddata cuyc_basic_data test_data_1 test_data_1.1 test_data_2
|
||||
|
||||
|
||||
|
||||
$ python manage.py help migration
|
||||
usage: manage.py migrate [-h] [--noinput] [--database DATABASE] [--fake]
|
||||
[--fake-initial] [--plan] [--run-syncdb] [--version]
|
||||
[-v {0,1,2,3}] [--settings SETTINGS]
|
||||
[--pythonpath PYTHONPATH] [--traceback] [--no-color]
|
||||
[--force-color]
|
||||
[app_label] [migration_name]
|
||||
|
||||
Updates database schema. Manages both apps with migrations and those without.
|
||||
|
||||
positional arguments:
|
||||
app_label App label of an application to synchronize the state.
|
||||
migration_name Database state will be brought to the state after that
|
||||
migration. Use the name "zero" to unapply all
|
||||
migrations.
|
||||
optional arguments:
|
||||
--noinput, --no-input
|
||||
Tells Django to NOT prompt the user for input of any
|
||||
kind.
|
||||
--database DATABASE Nominates a database to synchronize. Defaults to the
|
||||
"default" database.
|
||||
--fake Mark migrations as run without actually running them.
|
||||
--fake-initial Detect if tables already exist and fake-apply initial
|
||||
migrations if so. Make sure that the current database
|
||||
schema matches your initial migration before using
|
||||
this flag. Django will only check for an existing
|
||||
table name.
|
||||
--plan Shows a list of the migration actions that will be
|
||||
performed.
|
||||
--run-syncdb Creates tables for apps without migrations.
|
||||
|
||||
|
||||
$ python manage.py help loaddata
|
||||
usage: manage.py loaddata [-h] [--database DATABASE] [--app APP_LABEL]
|
||||
[--ignorenonexistent] [-e EXCLUDE] [--format FORMAT]
|
||||
[--version] [-v {0,1,2,3}] [--settings SETTINGS]
|
||||
[--pythonpath PYTHONPATH] [--traceback] [--no-color]
|
||||
[--force-color]
|
||||
fixture [fixture ...]
|
||||
|
||||
Installs the named fixture(s) in the database.
|
||||
optional arguments:
|
||||
--app APP_LABEL Only look for fixtures in the specified app.
|
||||
--ignorenonexistent, -i
|
||||
Ignores entries in the serialized data for fields that
|
||||
do not currently exist on the model.
|
||||
positional arguments:
|
||||
fixture Fixture labels.
|
||||
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
This file is uploaded by the integration test suite as part of the tests.
|
||||
|
||||
It, and any other with similar names, e.g test_upload_GPev9qN.txt can be safely deleted,
|
||||
EXCEPT for the original copy which lives in troggle/core/fixtures/
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
This file is uploaded by the integration test suite as part of the tests.
|
||||
|
||||
It, and any other with similar names, e.g test_upload_GPev9qN.txt can be safely deleted,
|
||||
EXCEPT for the original copy which lives in troggle/core/fixtures/
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
This file is uploaded by the integration test suite as part of the tests.
|
||||
|
||||
This has no suffix so it is pretending to be a Therion config file.
|
||||
|
||||
It, and any other with similar names, e.g test_upload_GPev9qN.txt can be safely deleted,
|
||||
EXCEPT for the original copy which lives in troggle/core/fixtures/
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
[{"model": "core.logbookentry", "pk": 7, "fields": {"new_since_parsing": false, "non_public": false, "date": "2019-07-11", "expeditionday": null, "expedition": 44, "title": "base camp - CUCC Austria Expedition 2019 Blog", "cave_slug": "None", "place": "base camp", "text": "<a href=\"https://ukcaving.com/board/index.php?topic=25249.msg311372#msg311372\">blog post</a> </br></br> At the time of writing, I am sat in the Tatty Hut at Base Camp in Bad Aussee. It is day five of expo and a lot has happened. We discovered on Sunday (day one - 07/07/2019) that our Top Camp, Steinbrueken, was full of snow: Meanwhile, Base Camp preparations were well underway: he beer tent was being hoisted (above) and the new rope (thanks to UK Caving and Spanset for the sponsorship!) was being soaked, coiled, and cut into usable lengths ready for caving. </br></br> The next few days consisted of Expo members undertaking multitudes of carrying trips up to top camp, and a few hardy folk doing their best to fettle the bivvy for habitability. Tuesday (09/07/2019) night saw the first people sleeping in Steinbrueken. Mostly, they described the experience as \"chilly\" but one person went as far as to claim he had been warmer there than at Base Camp. </br></br> Also on Tuesday (09/07/2019), a new route was devised and cairned directly from Heimkommen Hoehle to the tourist path on the col. The idea being that Homecoming could be close enough to push from Base Camp rather than Steinbrueken. This came with the discovery that Fischgesicht Hoehle's entrance was under two to three metres of snow: </br></br> On Wednesday (10/07/2019), Expo split into three groups. The majority went to Steinbrueken to commence the final push towards habitability while some went to investigate Balkonhoehle. Three of us (Dickon Morris, Daniel Heins, and myself) went to Heimkommen to rig to the pushing front (the decision to concentrate on Heimkommen and Balkon having been made for us by the plateau). </br></br> That's all for now, </br></br> Tom Crossley (11/07/2019)", "slug": "base-camp-cucc-austria-expedition-2019-blog", "filename": null, "entry_type": "html"}}]
|
||||
370
core/forms.py
370
core/forms.py
@@ -1,239 +1,179 @@
|
||||
|
||||
import django.forms as forms
|
||||
from django.forms import ModelForm
|
||||
from models import Cave, Person, PersonExpedition, LogbookEntry, QM, Expedition, Entrance, CaveAndEntrance
|
||||
import django.forms as forms
|
||||
from django.forms.models import modelformset_factory
|
||||
|
||||
from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance
|
||||
from troggle.core.views.editor_helpers import HTMLarea
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
# from tinymce.widgets import TinyMCE
|
||||
import re
|
||||
|
||||
|
||||
"""These are all the class-based Forms used by troggle.
|
||||
There are other, simpler, upload forms in view/uploads.py
|
||||
|
||||
class-based forms are quicker to set up (for Django experts) but
|
||||
are more difficult to maintain by non-Django experts.
|
||||
"""
|
||||
|
||||
todo = """
|
||||
"""
|
||||
|
||||
from django.contrib.admin.widgets import AdminDateWidget
|
||||
import string
|
||||
from datetime import date
|
||||
from tinymce.widgets import TinyMCE
|
||||
|
||||
class CaveForm(ModelForm):
|
||||
"""Only those fields for which we want to override defaults are listed here
|
||||
the other fields are present on the form, but use the default presentation style
|
||||
"""
|
||||
|
||||
official_name = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "45"}))
|
||||
underground_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
explorers = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
equipment = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
survey = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
# survey = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
kataster_status = forms.CharField(required=False)
|
||||
underground_centre_line = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
notes = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
references = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
description_file = forms.CharField(required=False, label="Path of top-level description file for this cave, when a separate file is used. Otherwise blank.", widget=forms.TextInput(attrs={"size": "45"}), help_text="")
|
||||
survex_file = forms.CharField(
|
||||
required=False, label="Survex file eg. caves-1623/000/000.svx", widget=forms.TextInput(attrs={"size": "45"})
|
||||
)
|
||||
#url = forms.CharField(required=True, label="URL eg. 1623/000/000 (no .html)", widget=forms.TextInput(attrs={"size": "45"}))
|
||||
length = forms.CharField(required=False, label="Length (m)")
|
||||
depth = forms.CharField(required=False, label="Depth (m)")
|
||||
extent = forms.CharField(required=False, label="Extent (m)")
|
||||
|
||||
#cave_slug = forms.CharField()
|
||||
|
||||
underground_description = forms.CharField(required = False, widget=forms.Textarea())
|
||||
explorers = forms.CharField(required = False, widget=forms.Textarea())
|
||||
equipment = forms.CharField(required = False, widget=forms.Textarea())
|
||||
survey = forms.CharField(required = False, widget=forms.Textarea())
|
||||
kataster_status = forms.CharField(required = False, widget=forms.Textarea())
|
||||
underground_centre_line = forms.CharField(required = False, widget=forms.Textarea())
|
||||
notes = forms.CharField(required = False, widget=forms.Textarea())
|
||||
references = forms.CharField(required = False, widget=forms.Textarea())
|
||||
url = forms.CharField(required = True)
|
||||
class Meta:
|
||||
model = Cave
|
||||
exclude = ("filename",)
|
||||
|
||||
field_order = ['area', 'unofficial_number', 'kataster_number', 'official_name', 'underground_description', 'explorers', 'equipment', 'survey', 'kataster_status', 'underground_centre_line', 'notes', 'references', 'description_file', 'survex_file', 'url', 'length', 'depth', 'extent']
|
||||
|
||||
def get_area(self):
|
||||
for a in self.cleaned_data["area"]:
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
def clean_cave_slug(self):
|
||||
if self.cleaned_data["cave_slug"] == "":
|
||||
myArea = ""
|
||||
for a in self.cleaned_data["area"]:
|
||||
if a.kat_area():
|
||||
myArea = a.kat_area()
|
||||
if self.data["kataster_number"]:
|
||||
cave_slug = f"{myArea}-{self.cleaned_data['kataster_number']}"
|
||||
else:
|
||||
cave_slug = f"{myArea}-{self.cleaned_data['unofficial_number']}"
|
||||
else:
|
||||
cave_slug = self.cleaned_data["cave_slug"]
|
||||
# Converting a PENDING cave to a real cave by saving this form
|
||||
print("EEE", cave_slug.replace("-PENDING-", "-"))
|
||||
return cave_slug.replace("-PENDING-", "-")
|
||||
|
||||
# def clean_url(self):
|
||||
# data = self.cleaned_data["url"]
|
||||
# if not re.match("\d\d\d\d/.", data):
|
||||
# raise ValidationError("URL must start with a four digit Kataster area.")
|
||||
# return data
|
||||
|
||||
|
||||
|
||||
|
||||
def clean(self):
|
||||
cleaned_data = super(CaveForm, self).clean()
|
||||
if self.data.get("kataster_number") == "" and self.data.get("unofficial_number") == "":
|
||||
self._errors["unofficial_number"] = self.error_class(
|
||||
["Either the kataster or unoffical number is required."]
|
||||
)
|
||||
# if self.cleaned_data.get("kataster_number") != "" and self.cleaned_data.get("official_name") == "":
|
||||
# self._errors["official_name"] = self.error_class(["This field is required when there is a kataster number."])
|
||||
if cleaned_data.get("area") == []:
|
||||
if self.cleaned_data.get("kataster_number") == "" and self.cleaned_data.get("unofficial_number") == "":
|
||||
self._errors["unofficial_number"] = self.error_class(["Either the kataster or unoffical number is required."])
|
||||
if self.cleaned_data.get("kataster_number") != "" and self.cleaned_data.get("official_name") == "":
|
||||
self._errors["official_name"] = self.error_class(["This field is required when there is a kataster number."])
|
||||
if self.cleaned_data.get("area") == []:
|
||||
self._errors["area"] = self.error_class(["This field is required."])
|
||||
if cleaned_data.get("url") and cleaned_data.get("url").startswith("/"):
|
||||
self._errors["url"] = self.error_class(["This field cannot start with a /."])
|
||||
return cleaned_data
|
||||
if self.cleaned_data.get("url") and self.cleaned_data.get("url").startswith("/"):
|
||||
self._errors["url"] = self.error_class(["This field can not start with a /."])
|
||||
return self.cleaned_data
|
||||
|
||||
class VersionControlCommentForm(forms.Form):
|
||||
description_of_change = forms.CharField(required = True, widget=forms.Textarea())
|
||||
|
||||
|
||||
class EntranceForm(ModelForm):
|
||||
"""Only those fields for which we want to override defaults are listed here
|
||||
the other fields are present on the form, but use the default presentation style
|
||||
"""
|
||||
|
||||
name = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "45"}))
|
||||
entrance_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
explorers = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "45"}))
|
||||
# explorers = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
map_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
location_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
lastvisit = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Date of last visit, e.g. 2023-07-11"
|
||||
)
|
||||
approach = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
underground_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
photo = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
marking_comment = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
findability_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
other_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
bearings = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
tag_station = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"size": "50"}), label="Tag station: Survex station id, e.g. 1623.p2023-xx-01"
|
||||
)
|
||||
exact_station = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"size": "50"}), label="Exact station: Survex station id, e.g. 1623.2023-xx-01.2"
|
||||
)
|
||||
other_station = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"size": "50"}), label="Other station: Survex station id, e.g. 1623.2023-xx-01.33"
|
||||
)
|
||||
northing = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Northing (UTM) - from survex data"
|
||||
)
|
||||
easting = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Easting (UTM) - from survex data"
|
||||
)
|
||||
lat_wgs84 = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Latitude (WSG84) - if no other location"
|
||||
)
|
||||
long_wgs84 = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Longitude (WSG84) - if no other location"
|
||||
)
|
||||
alt = forms.CharField(required=False, label="Altitude (m)")
|
||||
url = forms.CharField(required=False, label="URL [usually blank]", widget=forms.TextInput(attrs={"size": "45"}))
|
||||
|
||||
field_order = ['name', 'entrance_description', 'explorers', 'map_description', 'location_description', 'lastvisit', 'approach', 'underground_description', 'photo', 'marking_comment', 'findability_description', 'other_description', 'bearings', 'tag_station', 'exact_station', 'other_station', 'northing', 'easting', 'lat_wgs84', 'long_wgs84', 'alt', 'url']
|
||||
|
||||
#underground_description = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
#explorers = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#equipment = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#survey = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#kataster_status = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#underground_centre_line = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#notes = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#references = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
other_station = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
tag_station = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
exact_station = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
northing = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
easting = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
alt = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
class Meta:
|
||||
model = Entrance
|
||||
exclude = (
|
||||
"cached_primary_slug",
|
||||
"filename",
|
||||
)
|
||||
|
||||
exclude = ("cached_primary_slug", "filename",)
|
||||
def clean(self):
|
||||
if self.cleaned_data.get("url").startswith("/"):
|
||||
self._errors["url"] = self.error_class(["This field cannot start with a /."])
|
||||
self._errors["url"] = self.error_class(["This field can not start with a /."])
|
||||
return self.cleaned_data
|
||||
|
||||
|
||||
# This next line is called from the templates/edit_cave.html template.
|
||||
# This is sufficient to create an entire entry for for the cave fields automatically
|
||||
# http://localhost:8000/cave/new/
|
||||
# using django built-in Deep Magic. https://docs.djangoproject.com/en/dev/topics/forms/modelforms/
|
||||
# for forms which map directly onto a Django Model
|
||||
CaveAndEntranceFormSet = modelformset_factory(CaveAndEntrance, exclude=("cave",))
|
||||
# This is used only in edit_entrance() in views/caves.py
|
||||
|
||||
CaveAndEntranceFormSet = modelformset_factory(CaveAndEntrance, exclude=('cave',))
|
||||
|
||||
class EntranceLetterForm(ModelForm):
|
||||
"""Form to link entrances to caves, along with an entrance number.
|
||||
|
||||
Nb. The relationship between caves and entrances has historically been a many to many relationship.
|
||||
With entrances gaining new caves and letters when caves are joined.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
model = CaveAndEntrance
|
||||
exclude = ("cave", "entrance")
|
||||
exclude = ('cave', 'entrance')
|
||||
|
||||
#class PersonForm(ModelForm):
|
||||
# class Meta:
|
||||
# model = Person
|
||||
|
||||
#class LogbookEntryForm(ModelForm):
|
||||
# class Meta:
|
||||
# model = LogbookEntry#
|
||||
|
||||
# def wikiLinkHints(LogbookEntry=None):
|
||||
# """
|
||||
# This function returns html-formatted paragraphs for each of the
|
||||
# wikilink types that are related to this logbookentry. Each paragraph
|
||||
# contains a list of all of the related wikilinks.
|
||||
#
|
||||
# Perhaps an admin javascript solution would be better.
|
||||
# """
|
||||
# res = ["Please use the following wikilinks, which are related to this logbook entry:"]
|
||||
#
|
||||
# res.append(r'</p><p style="float: left;"><b>QMs found:</b>')
|
||||
# for QM in LogbookEntry.instance.QMs_found.all():
|
||||
# res.append(QM.wiki_link())
|
||||
|
||||
# res.append(r'</p><p style="float: left;"><b>QMs ticked off:</b>')
|
||||
# for QM in LogbookEntry.instance.QMs_ticked_off.all():
|
||||
# res.append(QM.wiki_link())
|
||||
|
||||
# res.append(r'</p><p style="float: left; "><b>People</b>')
|
||||
# for persontrip in LogbookEntry.instance.persontrip_set.all():
|
||||
# res.append(persontrip.wiki_link())
|
||||
# res.append(r'</p>')
|
||||
|
||||
# return string.join(res, r'<br />')
|
||||
|
||||
# def __init__(self, *args, **kwargs):
|
||||
# super(LogbookEntryForm, self).__init__(*args, **kwargs)
|
||||
# self.fields['text'].help_text=self.wikiLinkHints()#
|
||||
|
||||
#class CaveForm(forms.Form):
|
||||
# html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
|
||||
def getTripForm(expedition):
|
||||
|
||||
class TripForm(forms.Form):
|
||||
date = forms.DateField()
|
||||
title = forms.CharField(max_length=200)
|
||||
caves = [cave.reference() for cave in Cave.objects.all()]
|
||||
caves.sort()
|
||||
caves = ["-----"] + caves
|
||||
cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
||||
location = forms.CharField(max_length=200, required=False)
|
||||
caveOrLocation = forms.ChoiceField([("cave", "Cave"), ("location", "Location")], widget = forms.widgets.RadioSelect())
|
||||
html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
|
||||
def clean(self):
|
||||
print dir(self)
|
||||
if self.cleaned_data.get("caveOrLocation") == "cave" and not self.cleaned_data.get("cave"):
|
||||
self._errors["cave"] = self.error_class(["This field is required"])
|
||||
if self.cleaned_data.get("caveOrLocation") == "location" and not self.cleaned_data.get("location"):
|
||||
self._errors["location"] = self.error_class(["This field is required"])
|
||||
return self.cleaned_data
|
||||
|
||||
class PersonTripForm(forms.Form):
|
||||
names = [get_name(pe) for pe in PersonExpedition.objects.filter(expedition = expedition)]
|
||||
names.sort()
|
||||
names = ["-----"] + names
|
||||
name = forms.ChoiceField([(n, n) for n in names])
|
||||
TU = forms.FloatField(required=False)
|
||||
author = forms.BooleanField(required=False, default=False)
|
||||
|
||||
PersonTripFormSet = formset_factory(PersonTripForm, extra=1)
|
||||
|
||||
return PersonTripFormSet, TripForm
|
||||
|
||||
def get_name(pe):
|
||||
if pe.nickname:
|
||||
return pe.nickname
|
||||
else:
|
||||
return pe.person.first_name
|
||||
|
||||
#class UploadFileForm(forms.Form):
|
||||
# title = forms.CharField(max_length=50)
|
||||
# file = forms.FileField()
|
||||
# html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
# lon_utm = forms.FloatField(required=False)
|
||||
# lat_utm = forms.FloatField(required=False)
|
||||
# slug = forms.CharField(max_length=50)
|
||||
# date = forms.DateField(required=False)
|
||||
|
||||
# caves = [cave.slug for cave in Cave.objects.all()]
|
||||
# caves.sort()
|
||||
# caves = ["-----"] + caves
|
||||
# cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
||||
|
||||
# entrance = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
# qm = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
|
||||
# expeditions = [e.year for e in Expedition.objects.all()]
|
||||
# expeditions.sort()
|
||||
# expeditions = ["-----"] + expeditions
|
||||
# expedition = forms.ChoiceField([(e, e) for e in expeditions], required=False)
|
||||
|
||||
# logbookentry = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
|
||||
# person = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
|
||||
# survey_point = forms.CharField()
|
||||
|
||||
|
||||
def full_clean(self):
|
||||
super(EntranceLetterForm, self).full_clean()
|
||||
try:
|
||||
self.instance.validate_unique()
|
||||
except forms.ValidationError as e:
|
||||
self._update_errors(e)
|
||||
|
||||
22
core/imagekit_specs.py
Normal file
22
core/imagekit_specs.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit import processors
|
||||
|
||||
class ResizeThumb(processors.Resize):
|
||||
width = 100
|
||||
crop = False
|
||||
|
||||
class ResizeDisplay(processors.Resize):
|
||||
width = 600
|
||||
|
||||
#class EnhanceThumb(processors.Adjustment):
|
||||
#contrast = 1.2
|
||||
#sharpness = 2
|
||||
|
||||
class Thumbnail(ImageSpec):
|
||||
access_as = 'thumbnail_image'
|
||||
pre_cache = True
|
||||
processors = [ResizeThumb]
|
||||
|
||||
class Display(ImageSpec):
|
||||
increment_count = True
|
||||
processors = [ResizeDisplay]
|
||||
BIN
core/imageposcalc.ods
Normal file
BIN
core/imageposcalc.ods
Normal file
Binary file not shown.
@@ -1,36 +0,0 @@
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
"""this is now replaced by databaseRest.py
|
||||
|
||||
This is an example of how to create our own bespoke commandline
|
||||
commands.
|
||||
|
||||
Good articles on creating Django commands at
|
||||
https://www.mattlayman.com/understand-django/command-apps/
|
||||
https://www.geeksforgeeks.org/custom-django-management-commands/
|
||||
|
||||
Django docs:
|
||||
https://docs.djangoproject.com/en/dev/howto/custom-management-commands/
|
||||
|
||||
We might use this mechanism to replace/enhance the
|
||||
folk, wallets and any cron jobs or other standalone scripts.
|
||||
"""
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
def add_arguments(self, parser):
|
||||
# Positional arguments
|
||||
parser.add_argument("posargs", nargs="+", type=int)
|
||||
|
||||
# Named (optional) arguments
|
||||
parser.add_argument(
|
||||
"--delete",
|
||||
action="store_true",
|
||||
help="Removed as redundant - use databaseReset.py",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
print(args)
|
||||
print(options)
|
||||
182
core/management/commands/reset_db.py
Normal file
182
core/management/commands/reset_db.py
Normal file
@@ -0,0 +1,182 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from troggle.core.models import Cave
|
||||
import settings
|
||||
|
||||
databasename=settings.DATABASES['default']['NAME']
|
||||
expouser=settings.EXPOUSER
|
||||
expouserpass=settings.EXPOUSERPASS
|
||||
expouseremail=settings.EXPOUSER_EMAIL
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'This is normal usage, clear database and reread everything'
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--foo',
|
||||
action='store_true',
|
||||
dest='foo',
|
||||
default=False,
|
||||
help='test'),
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
||||
parser.add_argument(
|
||||
'--foo',
|
||||
action='store_true',
|
||||
dest='foo',
|
||||
help='Help text',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
print(args)
|
||||
print(options)
|
||||
if "desc" in args:
|
||||
self.resetdesc()
|
||||
elif "scans" in args:
|
||||
self.import_surveyscans()
|
||||
elif "caves" in args:
|
||||
self.reload_db()
|
||||
self.make_dirs()
|
||||
self.pageredirects()
|
||||
self.import_caves()
|
||||
elif "people" in args:
|
||||
self.import_people()
|
||||
elif "QMs" in args:
|
||||
self.import_QMs()
|
||||
elif "tunnel" in args:
|
||||
self.import_tunnelfiles()
|
||||
elif "reset" in args:
|
||||
self.reset()
|
||||
elif "survex" in args:
|
||||
self.import_survex()
|
||||
elif "survexpos" in args:
|
||||
import parsers.survex
|
||||
parsers.survex.LoadPos()
|
||||
elif "logbooks" in args:
|
||||
self.import_logbooks()
|
||||
elif "autologbooks" in args:
|
||||
self.import_auto_logbooks()
|
||||
elif "dumplogbooks" in args:
|
||||
self.dumplogbooks()
|
||||
elif "writeCaves" in args:
|
||||
self.writeCaves()
|
||||
elif "foo" in args:
|
||||
self.stdout.write('Tesing....')
|
||||
else:
|
||||
self.stdout.write("%s not recognised" % args)
|
||||
self.usage(options)
|
||||
|
||||
def reload_db():
|
||||
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
|
||||
try:
|
||||
os.remove(databasename)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("DROP DATABASE %s" % databasename)
|
||||
cursor.execute("CREATE DATABASE %s" % databasename)
|
||||
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
|
||||
cursor.execute("USE %s" % databasename)
|
||||
management.call_command('migrate', interactive=False)
|
||||
# management.call_command('syncdb', interactive=False)
|
||||
user = User.objects.create_user(expouser, expouseremail, expouserpass)
|
||||
user.is_staff = True
|
||||
user.is_superuser = True
|
||||
user.save()
|
||||
|
||||
def make_dirs():
|
||||
"""Make directories that troggle requires"""
|
||||
# should also deal with permissions here.
|
||||
if not os.path.isdir(settings.PHOTOS_ROOT):
|
||||
os.mkdir(settings.PHOTOS_ROOT)
|
||||
|
||||
def import_caves():
|
||||
import parsers.caves
|
||||
print("importing caves")
|
||||
parsers.caves.readcaves()
|
||||
|
||||
def import_people():
|
||||
import parsers.people
|
||||
parsers.people.LoadPersonsExpos()
|
||||
|
||||
def import_logbooks():
|
||||
# The below line was causing errors I didn't understand (it said LOGFILE was a string), and I couldn't be bothered to figure
|
||||
# what was going on so I just catch the error with a try. - AC 21 May
|
||||
try:
|
||||
settings.LOGFILE.write('\nBegun importing logbooks at ' + time.asctime() + '\n' + '-' * 60)
|
||||
except:
|
||||
pass
|
||||
|
||||
import parsers.logbooks
|
||||
parsers.logbooks.LoadLogbooks()
|
||||
|
||||
def import_survex():
|
||||
import parsers.survex
|
||||
parsers.survex.LoadAllSurvexBlocks()
|
||||
parsers.survex.LoadPos()
|
||||
|
||||
def import_QMs():
|
||||
import parsers.QMs
|
||||
|
||||
def import_surveys():
|
||||
import parsers.surveys
|
||||
parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
|
||||
|
||||
def import_surveyscans():
|
||||
import parsers.surveys
|
||||
parsers.surveys.LoadListScans()
|
||||
|
||||
def import_tunnelfiles():
|
||||
import parsers.surveys
|
||||
parsers.surveys.LoadTunnelFiles()
|
||||
|
||||
def reset():
|
||||
""" Wipe the troggle database and import everything from legacy data
|
||||
"""
|
||||
reload_db()
|
||||
make_dirs()
|
||||
pageredirects()
|
||||
import_caves()
|
||||
import_people()
|
||||
import_surveyscans()
|
||||
import_survex()
|
||||
import_logbooks()
|
||||
import_QMs()
|
||||
try:
|
||||
import_tunnelfiles()
|
||||
except:
|
||||
print("Tunnel files parser broken.")
|
||||
|
||||
import_surveys()
|
||||
|
||||
def pageredirects():
|
||||
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
|
||||
f = troggle.flatpages.models.Redirect(originalURL=oldURL, newURL=newURL)
|
||||
f.save()
|
||||
|
||||
def writeCaves():
|
||||
for cave in Cave.objects.all():
|
||||
cave.writeDataFile()
|
||||
for entrance in Entrance.objects.all():
|
||||
entrance.writeDataFile()
|
||||
|
||||
def usage(self, parser):
|
||||
print("""Usage is 'manage.py reset_db <command>'
|
||||
where command is:
|
||||
reset - this is normal usage, clear database and reread everything
|
||||
desc
|
||||
caves - read in the caves
|
||||
logbooks - read in the logbooks
|
||||
autologbooks
|
||||
dumplogbooks
|
||||
people
|
||||
QMs - read in the QM files
|
||||
resetend
|
||||
scans - read in the scanned surveynotes
|
||||
survex - read in the survex files
|
||||
survexpos
|
||||
tunnel - read in the Tunnel files
|
||||
writeCaves
|
||||
""")
|
||||
@@ -1,77 +0,0 @@
|
||||
from django import http
|
||||
from django.conf import settings
|
||||
from django.urls import Resolver404, resolve
|
||||
|
||||
"""Non-standard django middleware is loaded from this file.
|
||||
|
||||
"""
|
||||
todo = """SmartAppendSlashMiddleware(object) Not Working.
|
||||
It needs re-writing to be compatible with Django v2.0 and later
|
||||
"""
|
||||
|
||||
|
||||
class SmartAppendSlashMiddleware(object):
|
||||
"""
|
||||
"SmartAppendSlash" middleware for taking care of URL rewriting.
|
||||
|
||||
This middleware appends a missing slash, if:
|
||||
* the SMART_APPEND_SLASH setting is True
|
||||
* the URL without the slash does not exist
|
||||
* the URL with an appended slash does exist.
|
||||
Otherwise it won't touch the URL.
|
||||
"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Called for every url so return as quickly as possible
|
||||
Append a slash if SMART_APPEND_SLASH is set, the resulting URL resolves and it doesn't without the /
|
||||
"""
|
||||
if not settings.SMART_APPEND_SLASH:
|
||||
return None
|
||||
|
||||
if request.path.endswith("/"):
|
||||
return None
|
||||
|
||||
if request.path.endswith("_edit"):
|
||||
return None
|
||||
|
||||
host = http.HttpRequest.get_host(request)
|
||||
old_url = [host, request.path]
|
||||
if _resolves(old_url[1]):
|
||||
return None
|
||||
|
||||
# So: it does not resolve according to our criteria, i.e. _edit doesn't count
|
||||
new_url = old_url[:]
|
||||
new_url[1] = new_url[1] + "/"
|
||||
if not _resolves(new_url[1]):
|
||||
return None
|
||||
else:
|
||||
if settings.DEBUG and request.method == "POST":
|
||||
# replace this exception with a redirect to an error page
|
||||
raise RuntimeError(
|
||||
f"You called this URL via POST, but the URL doesn't end in a slash and you have SMART_APPEND_SLASH set. Django can't redirect to the slash URL while maintaining POST data. Change your form to point to {new_url[0]}{new_url[1]} (note the trailing slash), or set SMART_APPEND_SLASH=False in your Django settings."
|
||||
)
|
||||
if new_url != old_url:
|
||||
# Redirect
|
||||
if new_url[0]:
|
||||
newurl = f"{request.is_secure() and 'https' or 'http'}://{new_url[0]}{new_url[1]}"
|
||||
else:
|
||||
newurl = new_url[1]
|
||||
if request.GET:
|
||||
newurl += "?" + request.GET.urlencode()
|
||||
return http.HttpResponsePermanentRedirect(newurl)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _resolves(url):
|
||||
try:
|
||||
# If the URL does not resolve, the function raises a Resolver404 exception (a subclass of Http404)
|
||||
resolve(url)
|
||||
# this will ALWAYS be resolved by expopages because it will produce pagenotfound if not the thing asked for
|
||||
# so handle this in expopages, not in middleware
|
||||
return True
|
||||
except Resolver404:
|
||||
return False
|
||||
except:
|
||||
print(url)
|
||||
raise
|
||||
863
core/models.py
Normal file
863
core/models.py
Normal file
@@ -0,0 +1,863 @@
|
||||
import urllib, urlparse, string, os, datetime, logging, re
|
||||
import subprocess
|
||||
from django.forms import ModelForm
|
||||
from django.db import models
|
||||
from django.contrib import admin
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Min, Max
|
||||
from django.conf import settings
|
||||
from decimal import Decimal, getcontext
|
||||
from django.core.urlresolvers import reverse
|
||||
from imagekit.models import ProcessedImageField #ImageModel
|
||||
from django.template import Context, loader
|
||||
import settings
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
|
||||
from troggle.core.models_survex import *
|
||||
|
||||
|
||||
def get_related_by_wikilinks(wiki_text):
|
||||
found=re.findall(settings.QM_PATTERN,wiki_text)
|
||||
res=[]
|
||||
for wikilink in found:
|
||||
qmdict={'urlroot':settings.URL_ROOT,'cave':wikilink[2],'year':wikilink[1],'number':wikilink[3]}
|
||||
try:
|
||||
cave_slugs = CaveSlug.objects.filter(cave__kataster_number = qmdict['cave'])
|
||||
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
||||
found_by__date__year = qmdict['year'],
|
||||
number = qmdict['number'])
|
||||
res.append(qm)
|
||||
except QM.DoesNotExist:
|
||||
print('fail on '+str(wikilink))
|
||||
|
||||
return res
|
||||
|
||||
try:
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
|
||||
#This class is for adding fields and methods which all of our models will have.
|
||||
class TroggleModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class TroggleImageModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('-year',)
|
||||
get_latest_by = 'year'
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||
|
||||
# construction function. should be moved out
|
||||
def get_expedition_day(self, date):
|
||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||
if expeditiondays:
|
||||
assert len(expeditiondays) == 1
|
||||
return expeditiondays[0]
|
||||
res = ExpeditionDay(expedition=self, date=date)
|
||||
res.save()
|
||||
return res
|
||||
|
||||
def day_min(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[0] or None
|
||||
|
||||
def day_max(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[len(res) - 1] or None
|
||||
|
||||
|
||||
|
||||
class ExpeditionDay(TroggleModel):
|
||||
expedition = models.ForeignKey("Expedition")
|
||||
date = models.DateField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('date',)
|
||||
|
||||
def GetPersonTrip(self, personexpedition):
|
||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
||||
return personexpeditions and personexpeditions[0] or None
|
||||
|
||||
|
||||
#
|
||||
# single Person, can go on many years
|
||||
#
|
||||
class Person(TroggleModel):
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
#the below have been removed and made methods. I'm not sure what the b in bisnotable stands for. - AC 16 Feb
|
||||
#notability = models.FloatField() # for listing the top 20 people
|
||||
#bisnotable = models.BooleanField(default=False)
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __unicode__(self):
|
||||
if self.last_name:
|
||||
return "%s %s" % (self.first_name, self.last_name)
|
||||
return self.first_name
|
||||
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (2012 - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by('-expedition')[0]
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by('expedition')[0]
|
||||
|
||||
#def Sethref(self):
|
||||
#if self.last_name:
|
||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||
#self.orderref = self.last_name + " " + self.first_name
|
||||
#else:
|
||||
# self.href = self.first_name.lower()
|
||||
#self.orderref = self.first_name
|
||||
#self.notability = 0.0 # set temporarily
|
||||
|
||||
|
||||
#
|
||||
# Person's attenance to one Expo
|
||||
#
|
||||
class PersonExpedition(TroggleModel):
|
||||
expedition = models.ForeignKey(Expedition)
|
||||
person = models.ForeignKey(Person)
|
||||
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
COMMITTEE_CHOICES = (
|
||||
('leader','Expo leader'),
|
||||
('medical','Expo medical officer'),
|
||||
('treasurer','Expo treasurer'),
|
||||
('sponsorship','Expo sponsorship coordinator'),
|
||||
('research','Expo research coordinator'),
|
||||
)
|
||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('survexblock'):
|
||||
if res and res[-1]['survexpath'] == personrole.survexblock.survexpath:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'date':personrole.survexblock.date, 'survexpath':personrole.survexblock.survexpath, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
class Meta:
|
||||
ordering = ('-expedition',)
|
||||
#order_with_respect_to = 'expedition'
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.person, self.expedition)
|
||||
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return "%s (%s) %s" % (self.person.first_name, self.nickname, self.person.last_name)
|
||||
if self.person.last_name:
|
||||
return "%s %s" % (self.person.first_name, self.person.last_name)
|
||||
return self.person.first_name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||
|
||||
def surveyedleglength(self):
|
||||
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
||||
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
||||
|
||||
# would prefer to return actual person trips so we could link to first and last ones
|
||||
def day_min(self):
|
||||
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
||||
return res["day_min"]
|
||||
|
||||
def day_max(self):
|
||||
res = self.persontrip_set.all().aggregate(day_max=Max("expeditionday__date"))
|
||||
return res["day_max"]
|
||||
|
||||
#
|
||||
# Single parsed entry from Logbook
|
||||
#
|
||||
class LogbookEntry(TroggleModel):
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
#author = models.ForeignKey(PersonExpedition,blank=True,null=True) # the person who writes it up doesn't have to have been on the trip.
|
||||
# Re: the above- so this field should be "typist" or something, not "author". - AC 15 jun 09
|
||||
#MJG wants to KILL THIS, as it is typically redundant with PersonTrip.is_logbook_entry_author, in the rare it was not redundanty and of actually interest it could be added to the text.
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
cave_slug = models.SlugField(max_length=50)
|
||||
place = models.CharField(max_length=100,blank=True,null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
filename = models.CharField(max_length=200,null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonTrips point in to this object
|
||||
ordering = ('-date',)
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
||||
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
||||
return super(LogbookEntry, self).__getattribute__(item)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "cave" in kwargs.keys():
|
||||
if kwargs["cave"] is not None:
|
||||
kwargs["cave_slug"] = CaveSlug.objects.get(cave=kwargs["cave"], primary=True).slug
|
||||
kwargs.pop("cave")
|
||||
return super(LogbookEntry, self).__init__(*args, **kwargs)
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('logbookentry',kwargs={'date':self.date,'slug':self.slug}))
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.date, self.title)
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id-1)
|
||||
|
||||
def new_QM_number(self):
|
||||
"""Returns """
|
||||
if self.cave:
|
||||
nextQMnumber=self.cave.new_QM_number(self.date.year)
|
||||
else:
|
||||
return none
|
||||
return nextQMnumber
|
||||
|
||||
def new_QM_found_link(self):
|
||||
"""Produces a link to a new QM with the next number filled in and this LogbookEntry set as 'found by' """
|
||||
return settings.URL_ROOT + r'/admin/core/qm/add/?' + r'found_by=' + str(self.pk) +'&number=' + str(self.new_QM_number())
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.logbookentry_set.all()).index(self)
|
||||
|
||||
#
|
||||
# Single Person going on a trip, which may or may not be written up (accounts for different T/U for people in same logbook entry)
|
||||
#
|
||||
class PersonTrip(TroggleModel):
|
||||
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
||||
|
||||
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
||||
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
|
||||
# sequencing by person (difficult to solve locally)
|
||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
||||
|
||||
def persontrip_next(self):
|
||||
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def persontrip_prev(self):
|
||||
pastPTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__lt = self.logbook_entry.date).order_by('-logbook_entry__date').all()
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def place(self):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
||||
|
||||
|
||||
|
||||
##########################################
|
||||
# move following classes into models_cave
|
||||
##########################################
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
parent = models.ForeignKey('Area', blank=True, null=True)
|
||||
def __unicode__(self):
|
||||
if self.parent:
|
||||
return unicode(self.parent) + u" - " + unicode(self.short_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626"]:
|
||||
return self.short_name
|
||||
elif self.parent:
|
||||
return self.parent.kat_area()
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
entrances = models.ManyToManyField('Entrance', through='CaveAndEntrance')
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
equipment = models.TextField(blank=True,null=True)
|
||||
references = models.TextField(blank=True,null=True)
|
||||
survey = models.TextField(blank=True,null=True)
|
||||
kataster_status = models.TextField(blank=True,null=True)
|
||||
underground_centre_line = models.TextField(blank=True,null=True)
|
||||
notes = models.TextField(blank=True,null=True)
|
||||
length = models.CharField(max_length=100,blank=True,null=True)
|
||||
depth = models.CharField(max_length=100,blank=True,null=True)
|
||||
extent = models.CharField(max_length=100,blank=True,null=True)
|
||||
survex_file = models.CharField(max_length=100,blank=True,null=True)
|
||||
description_file = models.CharField(max_length=200,blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
|
||||
|
||||
#class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
|
||||
def hassurvey(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (self.survey.find("<img") > -1 or self.survey.find("<a") > -1 or self.survey.find("<IMG") > -1 or self.survey.find("<A") > -1):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def hassurveydata(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if self.survex_file:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
slugs = self.caveslug_set.filter()
|
||||
if slugs:
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r'CUCC', self.explorers))
|
||||
|
||||
def reference(self):
|
||||
if self.kataster_number:
|
||||
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
||||
else:
|
||||
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
elif self.unofficial_number:
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,}))
|
||||
|
||||
def __unicode__(self, sep = u": "):
|
||||
return unicode(self.slug())
|
||||
|
||||
def get_QMs(self):
|
||||
return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())
|
||||
|
||||
def new_QM_number(self, year=datetime.date.today().year):
|
||||
"""Given a cave and the current year, returns the next QM number."""
|
||||
try:
|
||||
res=QM.objects.filter(found_by__date__year=year, found_by__cave=self).order_by('-number')[0]
|
||||
except IndexError:
|
||||
return 1
|
||||
return res.number+1
|
||||
|
||||
def kat_area(self):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = None
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1 ) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
c = Context({'cave': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
for area in areas:
|
||||
if area.parent in areas:
|
||||
try:
|
||||
lowestareas.remove(area.parent)
|
||||
except:
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
def getCaveByReference(reference):
|
||||
areaname, code = reference.split("-", 1)
|
||||
print(areaname, code)
|
||||
area = Area.objects.get(short_name = areaname)
|
||||
print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print(list(foundCaves))
|
||||
assert len(foundCaves) == 1
|
||||
return foundCaves[0]
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(Cave)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class EntranceSlug(models.Model):
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
name = models.CharField(max_length=100, blank=True,null=True)
|
||||
entrance_description = models.TextField(blank=True,null=True)
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
map_description = models.TextField(blank=True,null=True)
|
||||
location_description = models.TextField(blank=True,null=True)
|
||||
approach = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
photo = models.TextField(blank=True,null=True)
|
||||
MARKING_CHOICES = (
|
||||
('P', 'Paint'),
|
||||
('P?', 'Paint (?)'),
|
||||
('T', 'Tag'),
|
||||
('T?', 'Tag (?)'),
|
||||
('R', 'Needs Retag'),
|
||||
('S', 'Spit'),
|
||||
('S?', 'Spit (?)'),
|
||||
('U', 'Unmarked'),
|
||||
('?', 'Unknown'))
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True,null=True)
|
||||
FINDABLE_CHOICES = (
|
||||
('?', 'To be confirmed ...'),
|
||||
('S', 'Coordinates'),
|
||||
('L', 'Lost'),
|
||||
('R', 'Refindable'))
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True,null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True,null=True)
|
||||
bearings = models.TextField(blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
cached_primary_slug = models.CharField(max_length=200,blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.slug())
|
||||
|
||||
def exact_location(self):
|
||||
return SurvexStation.objects.lookup(self.exact_station)
|
||||
def other_location(self):
|
||||
return SurvexStation.objects.lookup(self.other_station)
|
||||
|
||||
|
||||
def find_location(self):
|
||||
r = {'': 'To be entered ',
|
||||
'?': 'To be confirmed:',
|
||||
'S': '',
|
||||
'L': 'Lost:',
|
||||
'R': 'Refindable:'}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Tag Station not in dataset" % self.tag_station
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Exact Station not in dataset" % self.tag_station
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
||||
except:
|
||||
return r + "%s Other Station not in dataset" % self.tag_station
|
||||
if self.FINDABLE_CHOICES == "S":
|
||||
r += "ERROR, Entrance has been surveyed but has no survex point"
|
||||
if self.bearings:
|
||||
return r + self.bearings
|
||||
return r
|
||||
|
||||
def best_station(self):
|
||||
if self.tag_station:
|
||||
return self.tag_station
|
||||
if self.exact_station:
|
||||
return self.exact_station
|
||||
if self.other_station:
|
||||
return self.other_station
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (self.photo.find("<img") > -1 or self.photo.find("<a") > -1 or self.photo.find("<IMG") > -1 or self.photo.find("<A") > -1):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
else:
|
||||
return "No"
|
||||
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
def tag(self):
|
||||
return SurvexStation.objects.lookup(self.tag_station)
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
|
||||
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||
if ancestor_titles:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
|
||||
else:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
|
||||
return res
|
||||
|
||||
def slug(self):
|
||||
if not self.cached_primary_slug:
|
||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
self.cached_primary_slug = primarySlugs[0].slug
|
||||
self.save()
|
||||
else:
|
||||
slugs = self.entranceslug_set.filter()
|
||||
if slugs:
|
||||
self.cached_primary_slug = slugs[0].slug
|
||||
self.save()
|
||||
return self.cached_primary_slug
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
c = Context({'entrance': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
class CaveDescription(TroggleModel):
|
||||
short_name = models.CharField(max_length=50, unique = True)
|
||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
|
||||
linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
|
||||
linked_qms = models.ManyToManyField("QM", blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
if self.long_name:
|
||||
return unicode(self.long_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Overridden save method which stores wikilinks in text as links in database.
|
||||
"""
|
||||
super(CaveDescription, self).save()
|
||||
qm_list=get_related_by_wikilinks(self.description)
|
||||
for qm in qm_list:
|
||||
self.linked_qms.add(qm)
|
||||
super(CaveDescription, self).save()
|
||||
|
||||
class NewSubCave(TroggleModel):
|
||||
name = models.CharField(max_length=200, unique = True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class QM(TroggleModel):
|
||||
#based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||
#cave = models.ForeignKey(Cave)
|
||||
#expedition = models.ForeignKey(Expedition)
|
||||
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
#should be a foreignkey to surveystation
|
||||
nearest_station_description = models.CharField(max_length=400,null=True,blank=True)
|
||||
nearest_station = models.CharField(max_length=200,blank=True,null=True)
|
||||
area = models.CharField(max_length=100,blank=True,null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s" % (self.code(), self.grade)
|
||||
|
||||
def code(self):
|
||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.found_by.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade}))
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
return QM.objects.get(id=self.id-1)
|
||||
|
||||
def wiki_link(self):
|
||||
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
||||
|
||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||
class DPhoto(TroggleImageModel):
|
||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||
is_mugshot = models.BooleanField(default=False)
|
||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||
contains_entrance = models.ForeignKey(Entrance, related_name="photo_file",blank=True,null=True)
|
||||
#nearest_survey_point = models.ForeignKey(SurveyStation,blank=True,null=True)
|
||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
def __unicode__(self):
|
||||
return self.caption
|
||||
|
||||
scansFileStorage = FileSystemStorage(location=settings.SURVEY_SCANS, base_url=settings.SURVEYS_URL)
|
||||
def get_scan_path(instance, filename):
|
||||
year=instance.survey.expedition.year
|
||||
#print("WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number, instance.survey.wallet_letter)
|
||||
number=str(instance.survey.wallet_number)
|
||||
if str(instance.survey.wallet_letter) != "None":
|
||||
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
||||
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
survey = models.ForeignKey('Survey')
|
||||
contents = models.CharField(max_length=20,choices=(('notes','notes'),('plan','plan_sketch'),('elevation','elevation_sketch')))
|
||||
number_in_wallet = models.IntegerField(null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
#This is an ugly hack to deal with the #s in our survey scan paths. The correct thing is to write a custom file storage backend which calls urlencode on the name for making file.url but not file.path.
|
||||
def correctURL(self):
|
||||
return string.replace(self.file.url,r'#',r'%23')
|
||||
|
||||
def __unicode__(self):
|
||||
return get_scan_path(self,'')
|
||||
|
||||
class Survey(TroggleModel):
|
||||
expedition = models.ForeignKey('Expedition') #REDUNDANT (logbook_entry)
|
||||
wallet_number = models.IntegerField(blank=True,null=True)
|
||||
wallet_letter = models.CharField(max_length=1,blank=True,null=True)
|
||||
comments = models.TextField(blank=True,null=True)
|
||||
location = models.CharField(max_length=400,blank=True,null=True) #REDUNDANT
|
||||
subcave = models.ForeignKey('NewSubCave', blank=True, null=True)
|
||||
#notes_scan = models.ForeignKey('ScannedImage',related_name='notes_scan',blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
survex_block = models.OneToOneField('SurvexBlock',blank=True, null=True)
|
||||
logbook_entry = models.ForeignKey('LogbookEntry')
|
||||
centreline_printed_on = models.DateField(blank=True, null=True)
|
||||
centreline_printed_by = models.ForeignKey('Person',related_name='centreline_printed_by',blank=True,null=True)
|
||||
#sketch_scan = models.ForeignKey(ScannedImage,blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
tunnel_file = models.FileField(upload_to='surveyXMLfiles',blank=True, null=True)
|
||||
tunnel_main_sketch = models.ForeignKey('Survey',blank=True,null=True)
|
||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
|
||||
|
||||
def notes(self):
|
||||
return self.scannedimage_set.filter(contents='notes')
|
||||
|
||||
def plans(self):
|
||||
return self.scannedimage_set.filter(contents='plan')
|
||||
|
||||
def elevations(self):
|
||||
return self.scannedimage_set.filter(contents='elevation')
|
||||
@@ -1,693 +0,0 @@
|
||||
import os
|
||||
import os
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
from django.db import models
|
||||
from django.template import loader
|
||||
|
||||
import settings
|
||||
from troggle.core.models.logbooks import QM
|
||||
from troggle.core.models.survex import SurvexStation
|
||||
from troggle.core.models.troggle import DataIssue, TroggleModel
|
||||
from troggle.core.utils import TROG, writetrogglefile
|
||||
|
||||
# Use the TROG global object to cache the cave lookup list. No good for multi-user..
|
||||
Gcavelookup = TROG["caves"]["gcavelookup"]
|
||||
Gcave_count = TROG["caves"]["gcavecount"]
|
||||
|
||||
Gcavelookup = None
|
||||
Gcave_count = None
|
||||
|
||||
"""The model declarations for Areas, Caves and Entrances
|
||||
"""
|
||||
|
||||
todo = """
|
||||
- Find out why we have separate objects CaveSlug and why
|
||||
these are not just a single field on the Model. Do we ever need more
|
||||
than one slug per cave or entrance? Surely that would break everything??
|
||||
|
||||
- Can we rewrite things to eliminate the CaveSlug and objects? Surely
|
||||
foreign keys work fine ?!
|
||||
|
||||
- Why do we have CaveAndEntrance objects ? Surely entranceletter belong son the Entrance object?
|
||||
|
||||
- move the aliases list from the code and put into an editable file
|
||||
|
||||
- Restore constraint: unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
"""
|
||||
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
super = models.ForeignKey("Area", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
def __str__(self):
|
||||
if self.super:
|
||||
return str(self.super) + " - " + str(self.short_name)
|
||||
else:
|
||||
return str(self.short_name)
|
||||
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626", "1624", "1627"]:
|
||||
return self.short_name
|
||||
elif self.super:
|
||||
return self.super.kat_area()
|
||||
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
"""This class is ONLY used to create a FormSet for editing the cave and all its
|
||||
entrances in one form.
|
||||
CASCADE means that if the cave or the entrance is deleted, then this CaveAndEntrance
|
||||
is deleted too
|
||||
"""
|
||||
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
|
||||
entrance = models.ForeignKey("Entrance", on_delete=models.CASCADE)
|
||||
entrance_letter = models.CharField(max_length=20, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
unique_together = [["cave", "entrance"], ["cave", "entrance_letter"]]
|
||||
ordering = ["entrance_letter"]
|
||||
|
||||
def __str__(self):
|
||||
return str(self.cave) + str(self.entrance_letter)
|
||||
|
||||
# class CaveSlug(models.Model):
|
||||
# moved to models/logbooks.py to avoid cyclic import problem
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
area = models.ManyToManyField(Area, blank=False)
|
||||
depth = models.CharField(max_length=100, blank=True, null=True)
|
||||
description_file = models.CharField(max_length=200, blank=True, null=True)
|
||||
entrances = models.ManyToManyField("Entrance", through="CaveAndEntrance")
|
||||
equipment = models.TextField(blank=True, null=True)
|
||||
explorers = models.TextField(blank=True, null=True)
|
||||
extent = models.CharField(max_length=100, blank=True, null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
kataster_code = models.CharField(max_length=20, blank=True, null=True)
|
||||
kataster_number = models.CharField(max_length=10, blank=True, null=True)
|
||||
kataster_status = models.TextField(blank=True, null=True)
|
||||
length = models.CharField(max_length=100, blank=True, null=True)
|
||||
notes = models.TextField(blank=True, null=True)
|
||||
official_name = models.CharField(max_length=160)
|
||||
references = models.TextField(blank=True, null=True)
|
||||
survex_file = models.CharField(max_length=100, blank=True, null=True) # should be a foreign key
|
||||
survey = models.TextField(blank=True, null=True)
|
||||
underground_centre_line = models.TextField(blank=True, null=True)
|
||||
underground_description = models.TextField(blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
|
||||
url = models.CharField(max_length=300, blank=True, null=True, unique = True)
|
||||
|
||||
# class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
# href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ("kataster_code", "unofficial_number")
|
||||
|
||||
def hassurvey(self):
|
||||
"""This is almost certainly a fossil - needs checking...
|
||||
"""
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (
|
||||
self.survey.find("<img") > -1
|
||||
or self.survey.find("<a") > -1
|
||||
or self.survey.find("<IMG") > -1
|
||||
or self.survey.find("<A") > -1
|
||||
):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def hassurveydata(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if self.survex_filcavee:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary=True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
slugs = self.caveslug_set.filter()
|
||||
if slugs:
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r"CUCC", self.explorers))
|
||||
|
||||
def number(self):
|
||||
if self.kataster_number:
|
||||
return self.kataster_number
|
||||
else:
|
||||
return self.unofficial_number
|
||||
|
||||
def reference(self):
|
||||
return f"{self.kat_area()}-{self.number()}"
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
pass
|
||||
elif self.unofficial_number:
|
||||
pass
|
||||
else:
|
||||
self.official_name.lower()
|
||||
return Path(settings.URL_ROOT) / self.url # not good Django style.. NEEDS actual URL
|
||||
|
||||
def url_parent(self):
|
||||
return self.url.rsplit("/", 1)[0]
|
||||
|
||||
def __str__(self, sep=": "):
|
||||
return str(self.slug())
|
||||
|
||||
def get_open_QMs(self):
|
||||
"""Searches for all QMs that reference this cave."""
|
||||
# qms = self.qm_set.all().order_by('expoyear', 'block__date')
|
||||
qms = QM.objects.filter(cave=self).order_by(
|
||||
"expoyear", "block__date"
|
||||
) # a QuerySet, see https://docs.djangoproject.com/en/dev/ref/models/querysets/#order-by
|
||||
qmsopen = qms.filter(ticked=False)
|
||||
return qmsopen # a QuerySet
|
||||
|
||||
def get_ticked_QMs(self):
|
||||
"""Searches for all QMs that reference this cave."""
|
||||
qms = QM.objects.filter(cave=self).order_by(
|
||||
"expoyear", "block__date"
|
||||
)
|
||||
qmticked = qms.filter(ticked=True)
|
||||
return qmticked # a QuerySet
|
||||
|
||||
def get_QMs(self):
|
||||
qms = self.get_open_QMs() | self.get_ticked_QMs() # set union operation
|
||||
return qms # a QuerySet
|
||||
|
||||
def kat_area(self):
|
||||
try:
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
except:
|
||||
return ""
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
if e.entrance_letter:
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = ""
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
if res:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
def writeDataFile(self):
|
||||
filepath = os.path.join(settings.CAVEDESCRIPTIONS, self.filename)
|
||||
|
||||
t = loader.get_template("dataformat/cave.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
print(now)
|
||||
c = dict({"cave": self, "date": now})
|
||||
u = t.render(c)
|
||||
writetrogglefile(filepath, u)
|
||||
return
|
||||
|
||||
def file_output(self):
|
||||
filepath = Path(os.path.join(settings.CAVEDESCRIPTIONS, self.filename))
|
||||
|
||||
t = loader.get_template("dataformat/cave.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
c = dict({"cave": self, "date": now})
|
||||
content = t.render(c)
|
||||
return (filepath, content, "utf8")
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
for area in areas:
|
||||
if area.super in areas:
|
||||
try:
|
||||
lowestareas.remove(area.super)
|
||||
except:
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
MARKING_CHOICES = (
|
||||
("P", "Paint"),
|
||||
("P?", "Paint (?)"),
|
||||
("T", "Tag"),
|
||||
("T?", "Tag (?)"),
|
||||
("R", "Needs Retag"),
|
||||
("S", "Spit"),
|
||||
("S?", "Spit (?)"),
|
||||
("U", "Unmarked"),
|
||||
("?", "Unknown"),
|
||||
)
|
||||
FINDABLE_CHOICES = (("?", "To be confirmed ..."), ("S", "Coordinates"), ("L", "Lost"), ("R", "Refindable"))
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
approach = models.TextField(blank=True, null=True)
|
||||
bearings = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
entrance_description = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
explorers = models.TextField(blank=True, null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True, null=True)
|
||||
lastvisit = models.TextField(blank=True, null=True)
|
||||
lat_wgs84 = models.TextField(blank=True, null=True)
|
||||
location_description = models.TextField(blank=True, null=True)
|
||||
long_wgs84 = models.TextField(blank=True, null=True)
|
||||
map_description = models.TextField(blank=True, null=True)
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True, null=True)
|
||||
name = models.CharField(max_length=100, blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
photo = models.TextField(blank=True, null=True)
|
||||
slug = models.SlugField(max_length=50, unique=True, default="default_slug_id")
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
underground_description = models.TextField(blank=True, null=True)
|
||||
url = models.CharField(max_length=300, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["caveandentrance__entrance_letter"]
|
||||
|
||||
def __str__(self):
|
||||
return str(self.slug)
|
||||
|
||||
def single(self, station):
|
||||
try:
|
||||
single = SurvexStation.objects.get(name = station)
|
||||
return single
|
||||
except:
|
||||
stations = SurvexStation.objects.filter(name = station)
|
||||
print(f" # MULTIPLE stations found with same name '{station}' in Entrance {self}:")
|
||||
if len(stations) > 1:
|
||||
for s in stations:
|
||||
print(f" # {s.id=} - {s.name} {s.latlong()}") # .id is Django internal field, not one of ours
|
||||
return stations[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def exact_location(self):
|
||||
return self.single(self.exact_station)
|
||||
|
||||
def other_location(self):
|
||||
return self.single(self.other_station)
|
||||
|
||||
def find_location(self):
|
||||
r = {"": "To be entered ", "?": "To be confirmed:", "S": "", "L": "Lost:", "R": "Refindable:"}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
|
||||
except:
|
||||
return r + f"{self.tag_station} Tag Station not in dataset"
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
|
||||
except:
|
||||
return r + f"{self.tag_station} Exact Station not in dataset"
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt {self.other_description}"
|
||||
except:
|
||||
return r + f"{self.tag_station} Other Station not in dataset"
|
||||
if self.FINDABLE_CHOICES == "S":
|
||||
r += "ERROR, Entrance has been surveyed but has no survex point"
|
||||
if self.bearings:
|
||||
return r + self.bearings
|
||||
return r
|
||||
|
||||
def best_station(self):
|
||||
if self.tag_station:
|
||||
return self.tag_station
|
||||
if self.exact_station:
|
||||
return self.exact_station
|
||||
if self.other_station:
|
||||
return self.other_station
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (
|
||||
self.photo.find("<img") > -1
|
||||
or self.photo.find("<a") > -1
|
||||
or self.photo.find("<IMG") > -1
|
||||
or self.photo.find("<A") > -1
|
||||
):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
else:
|
||||
return "No"
|
||||
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
def tag(self):
|
||||
return self.single(self.tag_station)
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
res = "/".join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
return res
|
||||
|
||||
def cavelist(self):
|
||||
rs = []
|
||||
for e in CaveAndEntrance.objects.filter(entrance=self):
|
||||
if e.cave:
|
||||
rs.append(e.cave)
|
||||
return rs
|
||||
|
||||
def get_file_path(self):
|
||||
return Path(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
||||
|
||||
def file_output(self):
|
||||
filepath = Path(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename))
|
||||
|
||||
t = loader.get_template("dataformat/entrance.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
c = dict({"entrance": self, "date": now})
|
||||
content = t.render(c)
|
||||
return (filepath, content, "utf8")
|
||||
|
||||
def writeDataFile(self):
|
||||
filepath = os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
||||
|
||||
t = loader.get_template("dataformat/entrance.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
c = dict({"entrance": self, "date": now})
|
||||
u = t.render(c)
|
||||
writetrogglefile(filepath, u)
|
||||
return
|
||||
|
||||
def url_parent(self):
|
||||
if self.url:
|
||||
return self.url.rsplit("/", 1)[0]
|
||||
else:
|
||||
cavelist = self.cavelist()
|
||||
if len(self.cavelist()) == 1:
|
||||
return cavelist[0].url_parent()
|
||||
else:
|
||||
return ""
|
||||
|
||||
def latlong(self):
|
||||
station = None
|
||||
if self.other_station:
|
||||
try:
|
||||
station = SurvexStation.objects.get(name = self.other_station)
|
||||
except:
|
||||
pass
|
||||
if self.tag_station:
|
||||
try:
|
||||
station = SurvexStation.objects.get(name = self.tag_station)
|
||||
except:
|
||||
pass
|
||||
if self.exact_station:
|
||||
try:
|
||||
station = SurvexStation.objects.get(name = self.exact_station)
|
||||
except:
|
||||
pass
|
||||
if station:
|
||||
return station.latlong()
|
||||
|
||||
|
||||
def GetCaveLookup():
|
||||
"""A very relaxed way of finding probably the right cave given almost any string which might serve to identify it
|
||||
|
||||
lookup function modelled on GetPersonExpeditionNameLookup
|
||||
repeated assignment each call, needs refactoring
|
||||
|
||||
Used when parsing wallets contents.json file too in views/uploads.py
|
||||
|
||||
Does NOT detect duplicates! Needs fixing.
|
||||
Needs to be a proper funciton that raises an exception if there is a duplicate.
|
||||
OR we could set it to return None if there are duplicates, and require the caller to
|
||||
fall back on doing the actual database query it wants rather thna using this cache shortcut
|
||||
"""
|
||||
|
||||
duplicates = {}
|
||||
|
||||
def checkcaveid(cave, id):
|
||||
global Gcavelookup
|
||||
if id not in Gcavelookup:
|
||||
Gcavelookup[id] = cave
|
||||
Gcave_count[id] += 1
|
||||
else:
|
||||
if cave == Gcavelookup[id]:
|
||||
pass # same id, same cave
|
||||
else: # same id but different cave
|
||||
# message = f" - Warning: ignoring alias id '{id:3}'. Caves '{Gcavelookup[id]}' and '{cave}'. "
|
||||
# print(message)
|
||||
# DataIssue.objects.create(parser="aliases", message=message)
|
||||
duplicates[id] = 1
|
||||
|
||||
global Gcavelookup
|
||||
if Gcavelookup:
|
||||
return Gcavelookup
|
||||
Gcavelookup = {"NONEPLACEHOLDER": None}
|
||||
global Gcave_count
|
||||
Gcave_count = defaultdict(int) # sets default value to int(0)
|
||||
|
||||
DataIssue.objects.filter(parser="aliases").delete()
|
||||
DataIssue.objects.filter(parser="aliases ok").delete()
|
||||
|
||||
for cave in Cave.objects.all():
|
||||
key = cave.official_name.lower()
|
||||
if key != "" and key != "unamed" and key != "unnamed":
|
||||
if Gcave_count[key] > 0:
|
||||
# message = f" - Warning: ignoring alias id '{id:3}'. Caves '{Gcavelookup[id]}' and '{cave}'. "
|
||||
# print(message)
|
||||
# DataIssue.objects.create(parser="aliases", message=message)
|
||||
duplicates[key] = 1
|
||||
else:
|
||||
Gcavelookup[key] = cave
|
||||
Gcave_count[key] += 1
|
||||
if cave.kataster_number:
|
||||
checkcaveid(cave, cave.kataster_number) # we do expect 1623/55 and 1626/55 to cause a warning message
|
||||
|
||||
# the rest of these are 'nice to have' but may validly already be set
|
||||
if cave.unofficial_number:
|
||||
unoffn = cave.unofficial_number.lower()
|
||||
checkcaveid(cave, unoffn)
|
||||
|
||||
if cave.filename:
|
||||
# this is the slug - usually.. but usually done as as f'{cave.area}-{cave.kataster_number}'
|
||||
fn = cave.filename.replace(".html", "").lower()
|
||||
checkcaveid(cave, fn)
|
||||
|
||||
if cave.slug():
|
||||
# also possibly done already
|
||||
slug = cave.slug().lower()
|
||||
checkcaveid(cave, slug)
|
||||
|
||||
# These might alse create more duplicate entries
|
||||
# Yes, this should be set in, and imported from, settings.py
|
||||
aliases = [
|
||||
("1987-02", "267"),
|
||||
("1990-01", "171"),
|
||||
("1990-02", "172"),
|
||||
("1990-03", "173"),
|
||||
("1990-04", "174"),
|
||||
("1990-05", "175"),
|
||||
("1990-06", "176"),
|
||||
("1990-07", "177"),
|
||||
("1990-08", "178"),
|
||||
("1990-09", "179"),
|
||||
("1990-10", "180"),
|
||||
("1990-11", "181"),
|
||||
("1990-12", "182"),
|
||||
("1990-13", "183"),
|
||||
("1990-14", "184"),
|
||||
("1990-18", "188"),
|
||||
("1990-adam", "225"),
|
||||
("1993-01", "200"),
|
||||
("1996-02", "224"),
|
||||
("1996-03", "223"),
|
||||
("1996-04", "222"),
|
||||
("1996wk2", "207"),
|
||||
("1996wk3", "208"),
|
||||
("1996wk5", "219"),
|
||||
("1996wk6", "218"),
|
||||
("1996wk8", "209"),
|
||||
("1996wk11", "268"),
|
||||
("96wk11", "268"),
|
||||
("1998-01", "201"),
|
||||
("1998-03", "210"),
|
||||
("1999-03", "204"),
|
||||
("1999-04", "230"),
|
||||
("1999-10", "162"),
|
||||
("1999-bo-01", "205"),
|
||||
("1999-ob-03", "226"),
|
||||
("1999-ob-04", "227"),
|
||||
("2000-01", "231"),
|
||||
("2000-03", "214"),
|
||||
("2000-04", "220"),
|
||||
("2000-05", "215"),
|
||||
("2000-06", "216"),
|
||||
("2000-07", "217"),
|
||||
("2000-09", "234"),
|
||||
("2000-aa-01", "250"),
|
||||
("2001-04", "239"),
|
||||
("2001-05", "243"),
|
||||
("2002-01", "249"),
|
||||
("2002-02", "234"),
|
||||
("2002-04", "242"),
|
||||
("2002-05", "294"),
|
||||
("2003-01", "256"),
|
||||
("2003-02", "248"),
|
||||
("2003-03", "247"),
|
||||
("2003-04", "241"),
|
||||
("2003-05", "246"),
|
||||
("2003-06", "161"),
|
||||
("2003-08", "240"),
|
||||
("2003-09", "245"),
|
||||
("2003-10", "244"),
|
||||
("2004-01", "269"),
|
||||
("2004-03", "270"),
|
||||
("2004-11", "251"),
|
||||
("2004-12", "161"),
|
||||
("2004-15", "253"),
|
||||
("2004-19", "254"),
|
||||
("2004-20", "255"),
|
||||
("2005-04", "204"),
|
||||
("2005-05", "264"),
|
||||
("2005-07", "257"),
|
||||
("2006-08", "285"),
|
||||
("2006-09", "298"),
|
||||
("2007-71", "271"),
|
||||
("2010-01", "263"),
|
||||
("2010-03", "293"),
|
||||
("2011-01", "292"),
|
||||
("2012-dd-05", "286"),
|
||||
("2012-ns-13", "292"),
|
||||
("2014-neo-01", "273"),
|
||||
("2014-sd-01", "274"),
|
||||
("2014-ms-14", "287"),
|
||||
("2015-mf-06", "288"),
|
||||
("2016-jb-01", "289"),
|
||||
("2017-pw-01", "277"),
|
||||
("2018-dm-07", "359"), # NB this is 1626
|
||||
("2017_cucc_24", "291"), # note _ not -
|
||||
("2017_cucc_23", "295"), # note _ not -
|
||||
("2017_cucc_28", "290"), # note _ not -
|
||||
("bs17", "283"),
|
||||
("1976/b11", "198"),
|
||||
("1976/b8", "197"),
|
||||
("1976/b9", "190"),
|
||||
("b11", "1976/b11"),
|
||||
("b8", "1976/b8"),
|
||||
("b9", "1976/b9"),
|
||||
("2011-01-bs30", "190"),
|
||||
("bs30", "190"),
|
||||
("2011-01", "190"),
|
||||
("quarriesd", "2002-08"),
|
||||
("2002-x11", "2005-08"),
|
||||
("2002-x12", "2005-07"),
|
||||
("2002-x13", "2005-06"),
|
||||
("2002-x14", "2005-05"),
|
||||
("kh", "161"),
|
||||
("161-kh", "161"),
|
||||
("204-steinBH", "204"),
|
||||
("stonebridge", "204"),
|
||||
("hauchhole", "234"),
|
||||
("hauch", "234"),
|
||||
("234-hauch", "234"),
|
||||
("tunnocks", "258"),
|
||||
("balcony", "264"),
|
||||
("balkon", "264"),
|
||||
("fgh", "290"),
|
||||
("gsh", "291"),
|
||||
("homecoming", "2018-dm-07"),
|
||||
("heimkommen", "2018-dm-07"),
|
||||
("Heimkehr", "2018-dm-07"),
|
||||
("99ob02", "1999-ob-02"),
|
||||
]
|
||||
|
||||
for i in aliases:
|
||||
if i[1] in Gcavelookup:
|
||||
if i[0] in Gcavelookup:
|
||||
# already set by a different method, but is it the same cave?
|
||||
if Gcavelookup[i[0]] == Gcavelookup[i[1]]:
|
||||
pass
|
||||
else:
|
||||
Gcave_count[i[0]] += 1
|
||||
Gcavelookup[i[0]] = Gcavelookup[i[1]]
|
||||
else:
|
||||
message = f" * Coding or cave existence mistake, cave for id '{i[1]}' does not exist. Expecting to set alias '{i[0]}' to it"
|
||||
# print(message)
|
||||
DataIssue.objects.create(parser="aliases", message=message)
|
||||
|
||||
addmore = {}
|
||||
for id in Gcavelookup:
|
||||
addmore[id.replace("-", "_")] = Gcavelookup[id]
|
||||
addmore[id.replace("_", "-")] = Gcavelookup[id]
|
||||
addmore[id.upper()] = Gcavelookup[id]
|
||||
Gcavelookup = {**addmore, **Gcavelookup}
|
||||
|
||||
addmore = {}
|
||||
|
||||
ldup = []
|
||||
for d in duplicates:
|
||||
Gcavelookup.pop(d)
|
||||
Gcave_count.pop(d)
|
||||
ldup.append(d)
|
||||
if ldup:
|
||||
message = f" - Ambiguous aliases removed: {ldup}"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="aliases ok", message=message)
|
||||
|
||||
for c in Gcave_count:
|
||||
if Gcave_count[c] > 1:
|
||||
message = f" ** Duplicate cave id count={Gcave_count[c]} id:'{Gcavelookup[c]}' cave __str__:'{c}'"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="aliases", message=message)
|
||||
|
||||
return Gcavelookup
|
||||
@@ -1,226 +0,0 @@
|
||||
from pathlib import Path
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
|
||||
import settings
|
||||
from troggle.core.models.troggle import Expedition, TroggleModel
|
||||
|
||||
"""The model declarations LogBookEntry, PersonLogEntry, QM
|
||||
"""
|
||||
|
||||
todo = """
|
||||
- Can we rewrite things to eliminate the CaveSlug and objects? No
|
||||
Surely foreign keys work fine ?! No
|
||||
|
||||
Foreign keys do not allow for there being multiple ways to refer to a cave, eg 1623-1999-03 aka 1623-204
|
||||
Having slugs allows for much more loose coupling to caves, which removes alot of the need to reset the database, which interupts work flow.
|
||||
It also means we do not have to be creating temporary cave objects in the database, where we do not have the underlying file in cave_data.
|
||||
|
||||
To Do move Cave Slug back to troggle.core.models
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
"""Moved here to avoid nasty cyclic import error
|
||||
CASCADE means that if the Cave is deleted, this is too
|
||||
"""
|
||||
|
||||
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
|
||||
slug = models.SlugField(max_length=50, unique=True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.slug}: {self.cave}"
|
||||
|
||||
|
||||
class LogbookEntry(TroggleModel):
|
||||
"""Single parsed entry from Logbook
|
||||
Gets deleted if the Expedition gets deleted"""
|
||||
|
||||
date = (
|
||||
models.DateField()
|
||||
) # MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
|
||||
expedition = models.ForeignKey(Expedition, blank=True, null=True, on_delete=models.CASCADE) # yes this is double-
|
||||
title = models.CharField(max_length=200)
|
||||
cave_slug = models.SlugField(max_length=50, blank=True, null=True)
|
||||
place = models.CharField(
|
||||
max_length=100, blank=True, null=True, help_text="Only use this if you haven't chosen a cave"
|
||||
)
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
time_underground = models.FloatField(null=True, help_text="In decimal hours")
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonLogEntrys point in to this object
|
||||
ordering = ("-date",)
|
||||
|
||||
def cave(self): # Why didn't he just make this a foreign key to Cave ?
|
||||
c = CaveSlug.objects.get(slug=self.cave_slug, primary=True).cave
|
||||
return c
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse("logbookentry", kwargs={"date": self.date, "slug": self.slug}))
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.date}: {self.title}"
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id + 1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id - 1)
|
||||
|
||||
def DayIndex(self):
|
||||
"""This is used to set different colours for the different trips on
|
||||
the calendar view of the expedition"""
|
||||
mx = 10
|
||||
todays = list(LogbookEntry.objects.filter(date=self.date))
|
||||
if self in todays:
|
||||
index = todays.index(self)
|
||||
else:
|
||||
print(f"DayIndex: Synchronization error in logbook entries. Restart server or do full reset. {self}")
|
||||
index = 0
|
||||
|
||||
if index not in range(0, mx):
|
||||
print(f"DayIndex: More than {mx-1} LogbookEntry items on one day '{index}' {self}, restarting colour sequence.")
|
||||
index = index % mx
|
||||
return index
|
||||
|
||||
|
||||
class PersonLogEntry(TroggleModel):
|
||||
"""Single Person going on a trip, which may or may not be written up.
|
||||
It could account for different T/U for people in same logbook entry.
|
||||
|
||||
CASCADE means that if the personexpedition or the logbookentry is deleted,
|
||||
then this PersonLogEntry is deleted too
|
||||
"""
|
||||
|
||||
personexpedition = models.ForeignKey("PersonExpedition", null=True, on_delete=models.CASCADE)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry, on_delete=models.CASCADE)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
ordering = ("-personexpedition",)
|
||||
# order_with_respect_to = 'personexpedition'
|
||||
|
||||
def next_personlog(self):
|
||||
futurePTs = (
|
||||
PersonLogEntry.objects.filter(
|
||||
personexpedition=self.personexpedition, logbook_entry__date__gt=self.logbook_entry.date
|
||||
)
|
||||
.order_by("logbook_entry__date")
|
||||
.all()
|
||||
)
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def prev_personlog(self):
|
||||
pastPTs = (
|
||||
PersonLogEntry.objects.filter(
|
||||
personexpedition=self.personexpedition, logbook_entry__date__lt=self.logbook_entry.date
|
||||
)
|
||||
.order_by("-logbook_entry__date")
|
||||
.all()
|
||||
)
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def place(self):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.personexpedition} ({self.logbook_entry.date})"
|
||||
|
||||
|
||||
class QM(TroggleModel):
|
||||
"""This is based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
|
||||
All the stuff handling TICK QMs is INCOMPLETE
|
||||
"""
|
||||
|
||||
number = models.IntegerField(
|
||||
help_text="this is the sequential number in the year, only unique for CSV imports",
|
||||
)
|
||||
grade = models.CharField(max_length=1, blank=True, null=True, help_text="A/B/C/D/X")
|
||||
cave = models.ForeignKey("Cave", related_name="QMs", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
block = models.ForeignKey("SurvexBlock", null=True, on_delete=models.SET_NULL) # only for QMs from survex files
|
||||
blockname = models.TextField(blank=True, null=True) # NB truncated copy of survexblock name with last char added
|
||||
expoyear = models.CharField(max_length=4, blank=True, null=True)
|
||||
ticked = models.BooleanField(default=False)
|
||||
location_description = models.TextField(blank=True, null=True)
|
||||
completion_description = models.TextField(blank=True, null=True)
|
||||
completion_date = models.DateField(blank=True, null=True)
|
||||
nearest_station_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
resolution_station_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
area = models.CharField(max_length=100, blank=True, null=True)
|
||||
page_ref = models.TextField(blank=True, null=True)
|
||||
comment = models.TextField(blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.code()}"
|
||||
|
||||
def code(self):
|
||||
if self.cave:
|
||||
cavestr = str(self.cave.slug())[5:]
|
||||
else:
|
||||
cavestr = ""
|
||||
if self.expoyear:
|
||||
expoyearstr = str(self.expoyear)
|
||||
else:
|
||||
expoyearstr = str(self.cave.slug())[5:9]
|
||||
if self.blockname:
|
||||
blocknamestr = "-" + str(self.blockname)
|
||||
else:
|
||||
blocknamestr = ""
|
||||
return f"{cavestr}-{expoyearstr}-{self.number}{self.grade}{blocknamestr}"
|
||||
|
||||
# def get_completion_url(self):
|
||||
# """assumes html file named is in same folder as cave description file
|
||||
# WRONG - needs rewriting!"""
|
||||
# cd = None
|
||||
# if self.completion_description:
|
||||
# try:
|
||||
# dir = Path(self.cave.url).parent
|
||||
# cd = dir / self.completion_description
|
||||
# except:
|
||||
# cd = None
|
||||
# return cd
|
||||
|
||||
def newslug(self):
|
||||
qmslug = f"{str(self.cave)}-{self.expoyear}-{self.blockname}{self.number}{self.grade}"
|
||||
return qmslug
|
||||
|
||||
def get_absolute_url(self):
|
||||
# This reverse resolution stuff is pure magic. Just change the regex in urls.py and everything changes to suit. Whacky.
|
||||
return urljoin(
|
||||
settings.URL_ROOT,
|
||||
reverse(
|
||||
"qm",
|
||||
kwargs={
|
||||
"cave_id": self.cave.slug(),
|
||||
"year": self.expoyear,
|
||||
"blockname": self.blockname,
|
||||
"qm_id": self.number,
|
||||
"grade": self.grade,
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def get_next_by_id(self): # called in template
|
||||
return QM.objects.get(id=self.id + 1)
|
||||
|
||||
def get_previous_by_id(self): # called in template
|
||||
return QM.objects.get(id=self.id - 1)
|
||||
@@ -1,298 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
from urllib.parse import urljoin
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
|
||||
|
||||
# from troggle.core.models.troggle import DataIssue # circular import. Hmm
|
||||
|
||||
|
||||
class SurvexDirectory(models.Model):
|
||||
"""This relates a Cave to the primary SurvexFile which is the 'head' of the survex tree for
|
||||
that cave. Surely this could just be a property of Cave ? No. Several subdirectories
|
||||
all relate to the same Cave
|
||||
"""
|
||||
path = models.CharField(max_length=200)
|
||||
cave = models.ForeignKey("Cave", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
primarysurvexfile = models.ForeignKey(
|
||||
"SurvexFile", related_name="primarysurvexfile", blank=True, null=True, on_delete=models.SET_NULL
|
||||
)
|
||||
# could also include files in directory but not referenced
|
||||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
verbose_name_plural = "Survex directories"
|
||||
|
||||
def contents(self):
|
||||
return "[SvxDir:" + str(self.path) + " | Primary svx:" + str(self.primarysurvexfile.path) + ".svx ]"
|
||||
|
||||
def __str__(self):
|
||||
return "[SvxDir:" + str(self.path)+ "]"
|
||||
|
||||
|
||||
class SurvexFile(models.Model):
|
||||
path = models.CharField(max_length=200)
|
||||
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
cave = models.ForeignKey("Cave", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
|
||||
# Don't change from the default as that breaks troggle webpages and internal referencing!
|
||||
# def __str__(self):
|
||||
# return "[SurvexFile:"+str(self.path) + "-" + str(self.survexdirectory) + "-" + str(self.cave)+"]"
|
||||
|
||||
def exists(self):
|
||||
"""This is only used within the Django templates
|
||||
"""
|
||||
fname = Path(settings.SURVEX_DATA, self.path + ".svx")
|
||||
return fname.is_file()
|
||||
|
||||
def SetDirectory(self):
|
||||
dirpath = os.path.split(self.path)[0]
|
||||
# pointless search every time we import a survex file if we know there are no duplicates..
|
||||
# don't use this for initial import.
|
||||
survexdirectorylist = SurvexDirectory.objects.filter(cave=self.cave, path=dirpath)
|
||||
if survexdirectorylist:
|
||||
self.survexdirectory = survexdirectorylist[0]
|
||||
else:
|
||||
survexdirectory = SurvexDirectory(path=dirpath, cave=self.cave, primarysurvexfile=self)
|
||||
survexdirectory.save()
|
||||
self.survexdirectory = survexdirectory
|
||||
self.save()
|
||||
|
||||
# Don't change from the default as that breaks troggle webpages and internal referencing!
|
||||
# def __str__(self):
|
||||
# return "[SurvexFile:"+str(self.path) + "-" + str(self.survexdirectory) + "-" + str(self.cave)+"]"
|
||||
|
||||
def __str__(self):
|
||||
return self.path
|
||||
|
||||
|
||||
class SurvexStationLookUpManager(models.Manager):
|
||||
"""what this does,
|
||||
https://docs.djangoproject.com/en/dev/topics/db/managers/
|
||||
This changes the .objects thinggy to use a case-insensitive match name__iexact
|
||||
so that now SurvexStation.objects.lookup() works as a case-insensitive match
|
||||
"""
|
||||
def lookup(self, name):
|
||||
blocknames, sep, stationname = name.rpartition(".")
|
||||
return self.get(block=SurvexBlock.objects.lookup(blocknames), name__iexact=stationname)
|
||||
|
||||
|
||||
class SurvexStation(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
# block = models.ForeignKey("SurvexBlock", null=True, on_delete=models.SET_NULL)
|
||||
# block not used since 2020. survex stations objects are only used for entrnce locations and all taken from the .3d file
|
||||
objects = SurvexStationLookUpManager() # overwrites SurvexStation.objects and enables lookup()
|
||||
x = models.FloatField(blank=True, null=True)
|
||||
y = models.FloatField(blank=True, null=True)
|
||||
z = models.FloatField(blank=True, null=True)
|
||||
|
||||
# def path(self):
|
||||
# r = self.name
|
||||
# b = self.block
|
||||
# while True:
|
||||
# if b.name:
|
||||
# r = b.name + "." + r
|
||||
# if b.parent:
|
||||
# b = b.parent
|
||||
# else:
|
||||
# return r
|
||||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
|
||||
def __str__(self):
|
||||
return self.name and str(self.name) or "no name"
|
||||
|
||||
def latlong(self):
|
||||
return utmToLatLng(33, self.x, self.y, northernHemisphere=True)
|
||||
|
||||
import math
|
||||
|
||||
def utmToLatLng(zone, easting, northing, northernHemisphere=True):
|
||||
if not northernHemisphere:
|
||||
northing = 10000000 - northing
|
||||
|
||||
a = 6378137
|
||||
e = 0.081819191
|
||||
e1sq = 0.006739497
|
||||
k0 = 0.9996
|
||||
|
||||
arc = northing / k0
|
||||
mu = arc / (a * (1 - math.pow(e, 2) / 4.0 - 3 * math.pow(e, 4) / 64.0 - 5 * math.pow(e, 6) / 256.0))
|
||||
|
||||
ei = (1 - math.pow((1 - e * e), (1 / 2.0))) / (1 + math.pow((1 - e * e), (1 / 2.0)))
|
||||
|
||||
ca = 3 * ei / 2 - 27 * math.pow(ei, 3) / 32.0
|
||||
|
||||
cb = 21 * math.pow(ei, 2) / 16 - 55 * math.pow(ei, 4) / 32
|
||||
cc = 151 * math.pow(ei, 3) / 96
|
||||
cd = 1097 * math.pow(ei, 4) / 512
|
||||
phi1 = mu + ca * math.sin(2 * mu) + cb * math.sin(4 * mu) + cc * math.sin(6 * mu) + cd * math.sin(8 * mu)
|
||||
|
||||
n0 = a / math.pow((1 - math.pow((e * math.sin(phi1)), 2)), (1 / 2.0))
|
||||
|
||||
r0 = a * (1 - e * e) / math.pow((1 - math.pow((e * math.sin(phi1)), 2)), (3 / 2.0))
|
||||
fact1 = n0 * math.tan(phi1) / r0
|
||||
|
||||
_a1 = 500000 - easting
|
||||
dd0 = _a1 / (n0 * k0)
|
||||
fact2 = dd0 * dd0 / 2
|
||||
|
||||
t0 = math.pow(math.tan(phi1), 2)
|
||||
Q0 = e1sq * math.pow(math.cos(phi1), 2)
|
||||
fact3 = (5 + 3 * t0 + 10 * Q0 - 4 * Q0 * Q0 - 9 * e1sq) * math.pow(dd0, 4) / 24
|
||||
|
||||
fact4 = (61 + 90 * t0 + 298 * Q0 + 45 * t0 * t0 - 252 * e1sq - 3 * Q0 * Q0) * math.pow(dd0, 6) / 720
|
||||
|
||||
lof1 = _a1 / (n0 * k0)
|
||||
lof2 = (1 + 2 * t0 + Q0) * math.pow(dd0, 3) / 6.0
|
||||
lof3 = (5 - 2 * Q0 + 28 * t0 - 3 * math.pow(Q0, 2) + 8 * e1sq + 24 * math.pow(t0, 2)) * math.pow(dd0, 5) / 120
|
||||
_a2 = (lof1 - lof2 + lof3) / math.cos(phi1)
|
||||
_a3 = _a2 * 180 / math.pi
|
||||
|
||||
latitude = 180 * (phi1 - fact1 * (fact2 + fact3 + fact4)) / math.pi
|
||||
|
||||
if not northernHemisphere:
|
||||
latitude = -latitude
|
||||
|
||||
longitude = ((zone > 0) and (6 * zone - 183.0) or 3.0) - _a3
|
||||
|
||||
return (latitude, longitude)
|
||||
|
||||
#
|
||||
# Single SurvexBlock
|
||||
#
|
||||
class SurvexBlockLookUpManager(models.Manager):
|
||||
"""what this does,
|
||||
https://docs.djangoproject.com/en/dev/topics/db/managers/
|
||||
|
||||
This adds a method to the .objects thinggy to use a case-insensitive match name__iexact
|
||||
so that now SurvexBlock.objects.lookup() works as a case-insensitive match.
|
||||
This is used in lookup() in SurvexStationLookUpManager()
|
||||
which is used in Entrance().other_location() which is used in the Cave webpage
|
||||
"""
|
||||
def lookup(self, name):
|
||||
if name == "":
|
||||
blocknames = []
|
||||
else:
|
||||
blocknames = name.split(".")
|
||||
block = SurvexBlock.objects.get(parent=None, survexfile__path=settings.SURVEX_TOPNAME)
|
||||
for blockname in blocknames:
|
||||
block = SurvexBlock.objects.get(parent=block, name__iexact=blockname)
|
||||
return block
|
||||
|
||||
|
||||
class SurvexBlock(models.Model):
|
||||
"""One begin..end block within a survex file. The basic element of a survey trip.
|
||||
Multiple anonymous survex blocks are possible within the same surfex file
|
||||
Blocks can span several *included survexfile though.
|
||||
"""
|
||||
|
||||
objects = SurvexBlockLookUpManager() # overwrites SurvexBlock.objects and enables lookup()
|
||||
name = models.CharField(max_length=100)
|
||||
title = models.CharField(max_length=200)
|
||||
parent = models.ForeignKey("SurvexBlock", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
date = models.DateField(blank=True, null=True)
|
||||
expedition = models.ForeignKey("Expedition", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
# if the survexfile object is deleted, then all teh suvex-blocks in it should be too,
|
||||
# though a block can span more than one file...
|
||||
survexfile = models.ForeignKey("SurvexFile", blank=True, null=True, on_delete=models.CASCADE)
|
||||
survexpath = models.CharField(max_length=200) # the path for the survex stations
|
||||
|
||||
scanswallet = models.ForeignKey(
|
||||
"Wallet", null=True, on_delete=models.SET_NULL
|
||||
) # only ONE wallet per block. The most recent seen overwites.. ugh.
|
||||
|
||||
legsall = models.IntegerField(null=True) # summary data for this block
|
||||
legslength = models.FloatField(null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
|
||||
# def __str__(self):
|
||||
# return "[SurvexBlock:" + str(self.name) + "-path:" + str(self.survexpath) + "-cave:" + str(self.cave) + "]"
|
||||
|
||||
def __str__(self):
|
||||
return self.name and str(self.name) or "no_name-#" + str(self.id)
|
||||
|
||||
def isSurvexBlock(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def DayIndex(self):
|
||||
"""This is used to set different colours for the different trips on
|
||||
the calendar view of the expedition"""
|
||||
# print(f"SurvexBlock DayIndex {self.name} '{self.date}' {len(list(SurvexBlock.objects.filter(date=self.date)))} on this date")
|
||||
mx = 10
|
||||
todays = list(SurvexBlock.objects.filter(date=self.date))
|
||||
if self in todays:
|
||||
index = todays.index(self)
|
||||
else:
|
||||
print(f"DayIndex: Synchronization error in survex blocks. Restart server or do full reset. {self}")
|
||||
index = 0
|
||||
if index not in range(0, mx):
|
||||
print(f"DayIndex: More than {mx-1} SurvexBlock items on one day '{index}' {self}, restarting colour sequence.")
|
||||
index = index % mx
|
||||
# return list(self.survexblock_set.all()).index(self)
|
||||
return index
|
||||
|
||||
|
||||
class SurvexPersonRole(models.Model):
|
||||
"""The CASCADE means that if a SurvexBlock or a Person is deleted, then the SurvexPersonRole
|
||||
is deleted too
|
||||
"""
|
||||
survexblock = models.ForeignKey("SurvexBlock", on_delete=models.CASCADE)
|
||||
# increasing levels of precision, Surely we only need survexblock and person now that we have no link to a logbook entry?
|
||||
personname = models.CharField(max_length=100)
|
||||
person = models.ForeignKey("Person", blank=True, null=True, on_delete=models.CASCADE) # not needed
|
||||
personexpedition = models.ForeignKey("PersonExpedition", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.personname) + " - " + str(self.survexblock)
|
||||
|
||||
|
||||
class SingleScan(models.Model):
|
||||
"""A single file holding an image. Could be raw notes, an elevation plot or whatever"""
|
||||
|
||||
ffile = models.CharField(max_length=200)
|
||||
name = models.CharField(max_length=200)
|
||||
wallet = models.ForeignKey("Wallet", null=True, on_delete=models.SET_NULL)
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(
|
||||
settings.URL_ROOT,
|
||||
reverse("scansingle", kwargs={"path": re.sub("#", "%23", self.wallet.walletname), "file": self.name}),
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "Scan Image: " + str(self.name) + " in " + str(self.wallet)
|
||||
|
||||
|
||||
class DrawingFile(models.Model):
|
||||
"""A file holding a Therion (several types) or a Tunnel drawing
|
||||
Most of the implied capabilities are not implemented yet"""
|
||||
|
||||
dwgpath = models.CharField(max_length=200)
|
||||
dwgname = models.CharField(max_length=200)
|
||||
dwgwallets = models.ManyToManyField("Wallet") # implicitly links via folders to scans to SVX files
|
||||
scans = models.ManyToManyField("SingleScan") # implicitly links via scans to SVX files
|
||||
dwgcontains = models.ManyToManyField("DrawingFile") # case when its a frame type
|
||||
filesize = models.IntegerField(default=0)
|
||||
npaths = models.IntegerField(default=0)
|
||||
survexfiles = models.ManyToManyField("SurvexFile") # direct link to SVX files - not populated yet
|
||||
|
||||
class Meta:
|
||||
ordering = ("dwgpath",)
|
||||
|
||||
def __str__(self):
|
||||
return "Drawing File: " + str(self.dwgname) + " (" + str(self.filesize) + " bytes)"
|
||||
@@ -1,187 +0,0 @@
|
||||
from decimal import Decimal, getcontext
|
||||
from urllib.parse import urljoin
|
||||
|
||||
getcontext().prec = 2 # use 2 significant figures for decimal calculations
|
||||
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
|
||||
import settings
|
||||
|
||||
"""This file declares TroggleModel which inherits from django.db.models.Model
|
||||
All TroggleModel and models.Model subclasses inherit persistence in the django relational database. This is known as
|
||||
the django Object Relational Mapping (ORM).
|
||||
There are more subclasses defined in models/caves.py models/survex.py etc.
|
||||
"""
|
||||
|
||||
|
||||
class TroggleModel(models.Model):
|
||||
"""This class is for adding fields and methods which all of our models will have."""
|
||||
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class DataIssue(TroggleModel):
|
||||
"""When importing cave data any validation problems produce a message which is
|
||||
recorded as a DataIssue. The django admin system automatically produces a page listing
|
||||
these at /admin/core/dataissue/
|
||||
This is a use of the NOTIFICATION pattern:
|
||||
https://martinfowler.com/eaaDev/Notification.html
|
||||
|
||||
We have replaced all assertions in the code with messages and local fix-ups or skips:
|
||||
https://martinfowler.com/articles/replaceThrowWithNotification.html
|
||||
|
||||
See also the use of stash_data_issue() & store_data_issues() in parsers/survex.py which defer writing to the database until the end of the import.
|
||||
"""
|
||||
|
||||
date = models.DateTimeField(auto_now_add=True, blank=True)
|
||||
parser = models.CharField(max_length=50, blank=True, null=True)
|
||||
message = models.CharField(max_length=800, blank=True, null=True)
|
||||
url = models.CharField(max_length=300, blank=True, null=True) # link to offending object
|
||||
|
||||
class Meta:
|
||||
ordering = ["date"]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.parser} - {self.message}"
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
logbookfile = models.CharField(max_length=100, blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ("-year",)
|
||||
get_latest_by = "year"
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse("expedition", args=[self.year]))
|
||||
|
||||
|
||||
# class ExpeditionDay(TroggleModel):
|
||||
# """Exists only on Expedition now. Removed links from logbookentry, personlogentry, survex stuff etc.
|
||||
# """
|
||||
# expedition = models.ForeignKey("Expedition",on_delete=models.CASCADE)
|
||||
# date = models.DateField()
|
||||
|
||||
# class Meta:
|
||||
# ordering = ('date',)
|
||||
|
||||
|
||||
class Person(TroggleModel):
|
||||
"""single Person, can go on many years"""
|
||||
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
fullname = models.CharField(max_length=200)
|
||||
nickname = models.CharField(max_length=200)
|
||||
is_vfho = models.BooleanField(
|
||||
help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.",
|
||||
default=False,
|
||||
)
|
||||
mug_shot = models.CharField(max_length=100, blank=True, null=True)
|
||||
blurb = models.TextField(blank=True, null=True)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(
|
||||
settings.URL_ROOT, reverse("person", kwargs={"first_name": self.first_name, "last_name": self.last_name})
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ("orderref",) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __str__(self):
|
||||
if self.last_name:
|
||||
return f"{self.first_name} {self.last_name}"
|
||||
return self.first_name
|
||||
|
||||
def notability(self):
|
||||
"""This is actually recency: all recent cavers, weighted by number of expos"""
|
||||
notability = Decimal(0)
|
||||
max_expo_val = 0
|
||||
|
||||
max_expo_year = Expedition.objects.all().aggregate(models.Max("year"))
|
||||
max_expo_val = int(max_expo_year["year__max"]) + 1
|
||||
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (max_expo_val - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
"""Boolean: is this person notable?"""
|
||||
return self.notability() > Decimal(1) / Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by("-expedition")[0]
|
||||
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by("expedition")[0]
|
||||
|
||||
# moved from personexpedition
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return f"{self.first_name} ({self.nickname}) {self.last_name}"
|
||||
if self.last_name:
|
||||
return f"{self.first_name} {self.last_name}"
|
||||
return self.first_name
|
||||
|
||||
|
||||
class PersonExpedition(TroggleModel):
|
||||
"""Person's attendance to one Expo
|
||||
CASCADE means that if an expedition or a person is deleted, the PersonExpedition
|
||||
is deleted too
|
||||
"""
|
||||
|
||||
expedition = models.ForeignKey(Expedition, on_delete=models.CASCADE)
|
||||
person = models.ForeignKey(Person, on_delete=models.CASCADE)
|
||||
slugfield = models.SlugField(max_length=50, blank=True, null=True) # 2022 to be used in future
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
ordering = ("-expedition",)
|
||||
# order_with_respect_to = 'expedition'
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.person}: ({self.expedition})"
|
||||
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(
|
||||
settings.URL_ROOT,
|
||||
reverse(
|
||||
"personexpedition",
|
||||
kwargs={
|
||||
"first_name": self.person.first_name,
|
||||
"last_name": self.person.last_name,
|
||||
"year": self.expedition.year,
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def surveyedleglength(self):
|
||||
"""Survey length for this person on all survex trips on this expedition"""
|
||||
survexblocks = [personrole.survexblock for personrole in self.survexpersonrole_set.all()]
|
||||
return sum([survexblock.legslength for survexblock in set(survexblocks)])
|
||||
@@ -1,339 +0,0 @@
|
||||
import datetime
|
||||
import json
|
||||
import operator
|
||||
import re
|
||||
from functools import reduce
|
||||
from pathlib import Path
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
|
||||
# from troggle.core.models.survex import SurvexBlock
|
||||
# from troggle.core.models.troggle import DataIssue # circular import. Hmm
|
||||
|
||||
YEAR_RANGE = (1975, 2050)
|
||||
|
||||
def make_valid_date(date):
|
||||
"""Take whatever garbage some fool has typed in and try to make it into a valid ISO-format date
|
||||
"""
|
||||
datestr = date.replace(".", "-")
|
||||
try:
|
||||
samedate = datetime.date.fromisoformat(datestr)
|
||||
return samedate
|
||||
except ValueError:
|
||||
# Could be in std euro format e.g. 14/07/2023
|
||||
match = re.search(r'(\d{1,2})/(\d{1,2})/(\d{2,4})', datestr)
|
||||
if match:
|
||||
d = int(match.group(1))
|
||||
m = int(match.group(2))
|
||||
y = int(match.group(3))
|
||||
if y<2000:
|
||||
y = y + 2000
|
||||
try:
|
||||
samedate = datetime.date(y, m, d)
|
||||
print(f"- - Warning, not in ISO format. '{datestr=}' but we coped: {samedate.isoformat()} ")
|
||||
return samedate
|
||||
except:
|
||||
print(f"! - Fail, tried to decompose date in dd/mm/yyyy format but failed: {datestr=} ")
|
||||
return None
|
||||
# probably a single digit day number or month number
|
||||
match = re.search(r'(\d{4})-(\d{1,2})-(\d{1,2})', datestr)
|
||||
if match:
|
||||
y = int(match.group(1))
|
||||
m = int(match.group(2))
|
||||
d = int(match.group(3))
|
||||
try:
|
||||
samedate = datetime.date(y, m, d)
|
||||
print(f"- - Warning, 1 digit only for month or day '{datestr=}' but we coped: {samedate.isoformat()} ")
|
||||
return samedate
|
||||
except:
|
||||
print(f"! - Fail, tried to decompose date in yyyy-mm-d or yyy-m-dd format but failed: {datestr=} ")
|
||||
return None
|
||||
|
||||
print(f"! - Failed to understand date, none of our tricks worked {datestr=} ")
|
||||
return None
|
||||
|
||||
class Wallet(models.Model):
|
||||
"""We do not keep the JSON values in the database, we query them afresh each time,
|
||||
but we will change this when we need to do a Django query on e.g. personame
|
||||
"""
|
||||
|
||||
fpath = models.CharField(max_length=200)
|
||||
walletname = models.CharField(max_length=200)
|
||||
walletdate = models.DateField(blank=True, null=True)
|
||||
walletyear = models.DateField(blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ("walletname",)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse("singlewallet", kwargs={"path": re.sub("#", "%23", self.walletname)}))
|
||||
|
||||
def get_json(self):
|
||||
"""Read the JSON file for the wallet and do stuff
|
||||
Do it every time it is queried, to be sure the result is fresh
|
||||
|
||||
import DataIssue locally to prevent import cycle problem"""
|
||||
# jsonfile = Path(self.fpath, 'contents.json')
|
||||
|
||||
# Get from git repo instead
|
||||
# :drawings: walletjson/2022/2022#01/contents.json
|
||||
# fpath = /mnt/d/EXPO/expofiles/surveyscans/1999/1999#02
|
||||
fp = Path(self.fpath)
|
||||
wname = fp.name
|
||||
wyear = fp.parent.name
|
||||
wurl = f"/walletedit/{self.walletname}".replace('#', ':')
|
||||
|
||||
if len(wyear) != 4 or len(wname) !=6:
|
||||
# no contents.json for old-style wallets
|
||||
# but this ruined all the tick-list displays.. why?!
|
||||
# return None
|
||||
pass
|
||||
|
||||
jsonfile = Path(settings.DRAWINGS_DATA, "walletjson") / wyear / wname / "contents.json"
|
||||
if not Path(jsonfile).is_file():
|
||||
message = f"! {jsonfile} is not a file {wyear=} {wname=} "
|
||||
from troggle.core.models.troggle import DataIssue
|
||||
print(message)
|
||||
DataIssue.objects.update_or_create(parser="wallets", message=message, url=wurl)
|
||||
return None
|
||||
else:
|
||||
with open(jsonfile) as json_f:
|
||||
try:
|
||||
waldata = json.load(json_f)
|
||||
except:
|
||||
message = f"! {str(self.walletname)} Failed to load {jsonfile} JSON file"
|
||||
print(message)
|
||||
DataIssue.objects.update_or_create(parser="wallets", message=message, url=wurl)
|
||||
return None
|
||||
|
||||
if waldata["date"]:
|
||||
thisdate = make_valid_date(waldata["date"])
|
||||
if thisdate:
|
||||
self.walletdate = thisdate
|
||||
self.save()
|
||||
waldata["date"] = thisdate.isoformat()
|
||||
else:
|
||||
message = f"! {str(self.walletname)} Date format not ISO {waldata['date']}. Failed to load from {jsonfile} JSON file"
|
||||
from troggle.core.models.troggle import DataIssue
|
||||
DataIssue.objects.update_or_create(parser="wallets", message=message, url=wurl)
|
||||
return waldata
|
||||
|
||||
def year(self):
|
||||
"""This gets the year syntactically without opening and reading the JSON"""
|
||||
if len(self.walletname) < 5:
|
||||
return None
|
||||
if self.walletname[4] != "#":
|
||||
return None
|
||||
year = int(self.walletname[0:4])
|
||||
ymin, ymax = YEAR_RANGE
|
||||
if year < ymin or year > ymax:
|
||||
return None
|
||||
else:
|
||||
self.walletyear = datetime.date(year, 1, 1)
|
||||
self.save()
|
||||
return str(year)
|
||||
|
||||
# Yes this is horribly, horribly inefficient, esp. for a page that have date, people and cave in it
|
||||
def date(self):
|
||||
"""Reads all the JSON data just to get the JSON date."""
|
||||
if self.walletdate:
|
||||
return self.walletdate
|
||||
if not (jsondata := self.get_json()): # WALRUS
|
||||
return None
|
||||
|
||||
datestr = jsondata["date"]
|
||||
if not datestr:
|
||||
return None
|
||||
else:
|
||||
datestr = datestr.replace(".", "-")
|
||||
try:
|
||||
samedate = datetime.date.fromisoformat(datestr)
|
||||
self.walletdate = samedate.isoformat()
|
||||
except:
|
||||
try:
|
||||
samedate = datetime.date.fromisoformat(datestr[:10])
|
||||
self.walletdate = samedate.isoformat()
|
||||
except:
|
||||
samedate = None
|
||||
self.save()
|
||||
return self.walletdate
|
||||
|
||||
def people(self):
|
||||
if not self.get_json():
|
||||
return None
|
||||
jsondata = self.get_json()
|
||||
return jsondata["people"]
|
||||
|
||||
def cave(self):
|
||||
if not self.get_json():
|
||||
return None
|
||||
jsondata = self.get_json()
|
||||
return jsondata["cave"]
|
||||
|
||||
def name(self):
|
||||
if not self.get_json():
|
||||
return None
|
||||
jsondata = self.get_json()
|
||||
return jsondata["name"]
|
||||
|
||||
def get_fnames(self):
|
||||
'''Filenames without the suffix, i.e. without the ".jpg"'''
|
||||
dirpath = Path(settings.SCANS_ROOT, self.fpath) # does nowt as fpath is a rooted path already
|
||||
files = []
|
||||
if not self.fpath:
|
||||
files.append(f"Incorrect path to wallet contents: '{self.fpath}'")
|
||||
return files
|
||||
if not dirpath.is_dir():
|
||||
files.append(f"Incorrect path to wallet contents: '{self.fpath}'")
|
||||
return files
|
||||
else:
|
||||
try:
|
||||
for f in dirpath.iterdir():
|
||||
if f.is_file():
|
||||
files.append(Path(f.name).stem)
|
||||
else:
|
||||
files.append(f"-{Path(f.name).stem}-")
|
||||
except FileNotFoundError:
|
||||
files.append("FileNotFoundError")
|
||||
pass
|
||||
return files
|
||||
|
||||
def fixsurvextick(self, tick):
|
||||
blocks = self.survexblock_set.all()
|
||||
# blocks = SurvexBlock.objects.filter(scanswallet = self)
|
||||
result = tick
|
||||
for b in blocks:
|
||||
if b.survexfile: # if any exist in db, no check for validity or a real file. Refactor.
|
||||
result = "seagreen" # slightly different shade of green
|
||||
return result
|
||||
|
||||
def get_ticks(self):
|
||||
"""Reads all the JSON data and sets the colour of the completion tick for each condition"""
|
||||
ticks = {}
|
||||
|
||||
waldata = self.get_json()
|
||||
if not waldata:
|
||||
ticks["S"] = "darkgrey"
|
||||
ticks["C"] = "darkgrey"
|
||||
ticks["Q"] = "darkgrey"
|
||||
ticks["N"] = "darkgrey"
|
||||
ticks["P"] = "darkgrey"
|
||||
ticks["E"] = "darkgrey"
|
||||
ticks["T"] = "darkgrey"
|
||||
ticks["W"] = "darkgrey"
|
||||
return ticks
|
||||
ticks = {}
|
||||
|
||||
# Initially, are there any required survex files present ?
|
||||
# Note that we can't set the survexblock here on the wallet as that info is only available while parsing the survex file
|
||||
survexok = "red"
|
||||
ticks["S"] = "red"
|
||||
if waldata["survex not required"]:
|
||||
survexok = "green"
|
||||
ticks["S"] = "green"
|
||||
else:
|
||||
if waldata["survex file"]:
|
||||
if not type(waldata["survex file"]) == list: # a string also is a sequence type, so do it this way
|
||||
waldata["survex file"] = [waldata["survex file"]]
|
||||
ngood = 0
|
||||
nbad = 0
|
||||
ticks["S"] = "purple"
|
||||
for sx in waldata["survex file"]:
|
||||
# this logic appears in several places, inc uploads.py). Refactor.
|
||||
if sx != "":
|
||||
if Path(sx).suffix.lower() != ".svx":
|
||||
sx = sx + ".svx"
|
||||
if (Path(settings.SURVEX_DATA) / sx).is_file():
|
||||
ngood += 1
|
||||
else:
|
||||
nbad += 1
|
||||
if nbad == 0 and ngood >= 1: # all valid
|
||||
ticks["S"] = "green"
|
||||
elif nbad >= 1 and ngood >= 1: # some valid, some invalid
|
||||
ticks["S"] = "orange"
|
||||
elif nbad >= 1 and ngood == 0: # all bad
|
||||
ticks["S"] = "red"
|
||||
elif nbad == 0 and ngood == 0: # list of blank strings
|
||||
ticks["S"] = "red"
|
||||
else:
|
||||
ticks["S"] = "fuchsia" # have fun working out what this means
|
||||
|
||||
# Cave Description
|
||||
if waldata["description written"]:
|
||||
ticks["C"] = "green"
|
||||
else:
|
||||
ticks["C"] = survexok
|
||||
# QMs
|
||||
if waldata["qms written"]:
|
||||
ticks["Q"] = "green"
|
||||
else:
|
||||
ticks["Q"] = survexok
|
||||
if not self.year():
|
||||
ticks["Q"] = "darkgrey"
|
||||
else:
|
||||
if int(self.year()) < 2015:
|
||||
ticks["Q"] = "lightgrey"
|
||||
|
||||
if 'notes not required' not in waldata:
|
||||
waldata['notes not required'] = False
|
||||
|
||||
|
||||
# Notes, Plan, Elevation
|
||||
files = self.get_fnames()
|
||||
|
||||
# Notes required
|
||||
notes_scanned = reduce(operator.or_, [f.startswith("note") for f in files], False)
|
||||
notes_scanned = reduce(operator.or_, [f.endswith("notes") for f in files], notes_scanned)
|
||||
notes_required = not (notes_scanned or waldata["notes not required"])
|
||||
if notes_required:
|
||||
ticks["N"] = "red"
|
||||
else:
|
||||
ticks["N"] = "green"
|
||||
# print(f"{self.walletname} {ticks['N'].upper()} {notes_scanned=} {notes_required=} {waldata['notes not required']=}")
|
||||
|
||||
# Plan drawing required
|
||||
plan_scanned = reduce(operator.or_, [f.startswith("plan") for f in files], False)
|
||||
plan_scanned = reduce(operator.or_, [f.endswith("plan") for f in files], plan_scanned)
|
||||
plan_drawing_required = not (plan_scanned or waldata["plan drawn"] or waldata["plan not required"])
|
||||
if plan_drawing_required:
|
||||
ticks["P"] = "red"
|
||||
else:
|
||||
ticks["P"] = "green"
|
||||
|
||||
# Elev drawing required
|
||||
elev_scanned = reduce(operator.or_, [f.startswith("elev") for f in files], False)
|
||||
elev_scanned = reduce(operator.or_, [f.endswith("elev") for f in files], elev_scanned)
|
||||
elev_scanned = reduce(operator.or_, [f.endswith("elevation") for f in files], elev_scanned)
|
||||
elev_drawing_required = not (elev_scanned or waldata["elev drawn"] or waldata["elev not required"])
|
||||
if elev_drawing_required:
|
||||
ticks["E"] = "red"
|
||||
else:
|
||||
ticks["E"] = "green"
|
||||
|
||||
# if electronic, don't require P or E
|
||||
if waldata["electronic survey"]:
|
||||
# ticks["N"] = "green"
|
||||
ticks["P"] = "green"
|
||||
ticks["E"] = "green"
|
||||
# ticks["T"] = "green" # No, this does not mean it has been 'tunneled' properly
|
||||
|
||||
# Tunnel / Therion
|
||||
if elev_drawing_required or plan_drawing_required:
|
||||
ticks["T"] = "red"
|
||||
else:
|
||||
ticks["T"] = "green"
|
||||
|
||||
# Website
|
||||
if waldata["website updated"]:
|
||||
ticks["W"] = "green"
|
||||
else:
|
||||
ticks["W"] = "red"
|
||||
|
||||
|
||||
return ticks
|
||||
|
||||
def __str__(self):
|
||||
return "[" + str(self.walletname) + " (Wallet)]"
|
||||
228
core/models_survex.py
Normal file
228
core/models_survex.py
Normal file
@@ -0,0 +1,228 @@
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
import os
|
||||
import urlparse
|
||||
import re
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
###########################################################
|
||||
# These will allow browsing and editing of the survex data
|
||||
###########################################################
|
||||
# Needs to add:
|
||||
# Equates
|
||||
# reloading
|
||||
|
||||
class SurvexDirectory(models.Model):
|
||||
path = models.CharField(max_length=200)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
primarysurvexfile = models.ForeignKey('SurvexFile', related_name='primarysurvexfile', blank=True, null=True)
|
||||
# could also include files in directory but not referenced
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
|
||||
class SurvexFile(models.Model):
|
||||
path = models.CharField(max_length=200)
|
||||
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
|
||||
def exists(self):
|
||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||
return os.path.isfile(fname)
|
||||
|
||||
def OpenFile(self):
|
||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||
return open(fname)
|
||||
|
||||
def SetDirectory(self):
|
||||
dirpath = os.path.split(self.path)[0]
|
||||
survexdirectorylist = SurvexDirectory.objects.filter(cave=self.cave, path=dirpath)
|
||||
if survexdirectorylist:
|
||||
self.survexdirectory = survexdirectorylist[0]
|
||||
else:
|
||||
survexdirectory = SurvexDirectory(path=dirpath, cave=self.cave, primarysurvexfile=self)
|
||||
survexdirectory.save()
|
||||
self.survexdirectory = survexdirectory
|
||||
self.save()
|
||||
|
||||
class SurvexEquate(models.Model):
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
|
||||
class SurvexStationLookUpManager(models.Manager):
|
||||
def lookup(self, name):
|
||||
blocknames, sep, stationname = name.rpartition(".")
|
||||
return self.get(block = SurvexBlock.objects.lookup(blocknames),
|
||||
name__iexact = stationname)
|
||||
|
||||
class SurvexStation(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
block = models.ForeignKey('SurvexBlock')
|
||||
equate = models.ForeignKey('SurvexEquate', blank=True, null=True)
|
||||
objects = SurvexStationLookUpManager()
|
||||
x = models.FloatField(blank=True, null=True)
|
||||
y = models.FloatField(blank=True, null=True)
|
||||
z = models.FloatField(blank=True, null=True)
|
||||
|
||||
def path(self):
|
||||
r = self.name
|
||||
b = self.block
|
||||
while True:
|
||||
if b.name:
|
||||
r = b.name + "." + r
|
||||
if b.parent:
|
||||
b = b.parent
|
||||
else:
|
||||
return r
|
||||
|
||||
class SurvexLeg(models.Model):
|
||||
block = models.ForeignKey('SurvexBlock')
|
||||
#title = models.ForeignKey('SurvexTitle')
|
||||
stationfrom = models.ForeignKey('SurvexStation', related_name='stationfrom')
|
||||
stationto = models.ForeignKey('SurvexStation', related_name='stationto')
|
||||
tape = models.FloatField()
|
||||
compass = models.FloatField()
|
||||
clino = models.FloatField()
|
||||
|
||||
|
||||
#
|
||||
# Single SurvexBlock
|
||||
#
|
||||
class SurvexBlockLookUpManager(models.Manager):
|
||||
def lookup(self, name):
|
||||
if name == "":
|
||||
blocknames = []
|
||||
else:
|
||||
blocknames = name.split(".")
|
||||
block = SurvexBlock.objects.get(parent=None, survexfile__path="all")
|
||||
for blockname in blocknames:
|
||||
block = SurvexBlock.objects.get(parent=block, name__iexact=blockname)
|
||||
return block
|
||||
|
||||
class SurvexBlock(models.Model):
|
||||
objects = SurvexBlockLookUpManager()
|
||||
name = models.CharField(max_length=100)
|
||||
parent = models.ForeignKey('SurvexBlock', blank=True, null=True)
|
||||
text = models.TextField()
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
|
||||
date = models.DateField(blank=True, null=True)
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
||||
expedition = models.ForeignKey('Expedition', blank=True, null=True)
|
||||
|
||||
survexfile = models.ForeignKey("SurvexFile", blank=True, null=True)
|
||||
begin_char = models.IntegerField() # code for where in the survex data files this block sits
|
||||
survexpath = models.CharField(max_length=200) # the path for the survex stations
|
||||
|
||||
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
||||
#refscandir = models.CharField(max_length=100)
|
||||
|
||||
totalleglength = models.FloatField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
|
||||
def isSurvexBlock(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name and unicode(self.name) or 'no name'
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('personexpedition'):
|
||||
if res and res[-1]['person'] == personrole.personexpedition.person:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'person':personrole.personexpedition.person, 'expeditionyear':personrole.personexpedition.expedition.year, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
def MakeSurvexStation(self, name):
|
||||
ssl = self.survexstation_set.filter(name=name)
|
||||
if ssl:
|
||||
assert len(ssl) == 1
|
||||
return ssl[0]
|
||||
#print name
|
||||
ss = SurvexStation(name=name, block=self)
|
||||
ss.save()
|
||||
return ss
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.survexblock_set.all()).index(self)
|
||||
|
||||
|
||||
class SurvexTitle(models.Model):
|
||||
survexblock = models.ForeignKey('SurvexBlock')
|
||||
title = models.CharField(max_length=200)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
|
||||
#
|
||||
# member of a SurvexBlock
|
||||
#
|
||||
ROLE_CHOICES = (
|
||||
('insts','Instruments'),
|
||||
('dog','Other'),
|
||||
('notes','Notes'),
|
||||
('pics','Pictures'),
|
||||
('tape','Tape measure'),
|
||||
('useless','Useless'),
|
||||
('helper','Helper'),
|
||||
('disto','Disto'),
|
||||
('consultant','Consultant'),
|
||||
)
|
||||
|
||||
class SurvexPersonRole(models.Model):
|
||||
survexblock = models.ForeignKey('SurvexBlock')
|
||||
nrole = models.CharField(choices=ROLE_CHOICES, max_length=200, blank=True, null=True)
|
||||
# increasing levels of precision
|
||||
personname = models.CharField(max_length=100)
|
||||
person = models.ForeignKey('Person', blank=True, null=True)
|
||||
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True)
|
||||
persontrip = models.ForeignKey('PersonTrip', blank=True, null=True)
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.person) + " - " + unicode(self.survexblock) + " - " + unicode(self.nrole)
|
||||
|
||||
|
||||
class SurvexScansFolder(models.Model):
|
||||
fpath = models.CharField(max_length=200)
|
||||
walletname = models.CharField(max_length=200)
|
||||
|
||||
class Meta:
|
||||
ordering = ('walletname',)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansfolder', kwargs={"path":re.sub("#", "%23", self.walletname)}))
|
||||
|
||||
class SurvexScanSingle(models.Model):
|
||||
ffile = models.CharField(max_length=200)
|
||||
name = models.CharField(max_length=200)
|
||||
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ('name',)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansingle', kwargs={"path":re.sub("#", "%23", self.survexscansfolder.walletname), "file":self.name}))
|
||||
|
||||
|
||||
class TunnelFile(models.Model):
|
||||
tunnelpath = models.CharField(max_length=200)
|
||||
tunnelname = models.CharField(max_length=200)
|
||||
bfontcolours = models.BooleanField(default=False)
|
||||
survexscansfolders = models.ManyToManyField("SurvexScansFolder")
|
||||
survexscans = models.ManyToManyField("SurvexScanSingle")
|
||||
survexblocks = models.ManyToManyField("SurvexBlock")
|
||||
tunnelcontains = models.ManyToManyField("TunnelFile") # case when its a frame type
|
||||
filesize = models.IntegerField(default=0)
|
||||
npaths = models.IntegerField(default=0)
|
||||
survextitles = models.ManyToManyField("SurvexTitle")
|
||||
|
||||
|
||||
class Meta:
|
||||
ordering = ('tunnelpath',)
|
||||
|
||||
10
core/templatetags/csrffaker.py
Normal file
10
core/templatetags/csrffaker.py
Normal file
@@ -0,0 +1,10 @@
|
||||
import django
|
||||
from django import template
|
||||
|
||||
register = template.Library()
|
||||
if django.VERSION[0] >=1 and django.VERSION[1] > 1:
|
||||
pass
|
||||
else:
|
||||
|
||||
@register.simple_tag
|
||||
def csrf_token(): return ""
|
||||
@@ -3,7 +3,7 @@ from django.utils.safestring import mark_safe
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.filter()
|
||||
def link(value):
|
||||
return mark_safe(f"<a href='{value.get_absolute_url()}'>" + str(value) + "</a>")
|
||||
return mark_safe("<a href=\'%s\'>"%value.get_absolute_url()+unicode(value)+"</a>")
|
||||
|
||||
|
||||
52
core/templatetags/survex_markup.py
Normal file
52
core/templatetags/survex_markup.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from django import template
|
||||
from django.utils.html import conditional_escape
|
||||
from django.template.defaultfilters import stringfilter
|
||||
from django.utils.safestring import mark_safe
|
||||
import re
|
||||
|
||||
register = template.Library()
|
||||
|
||||
# seems to add extra lines between the commented lines, which isn't so great.
|
||||
regexes = []
|
||||
regexes.append((re.compile(r"(;.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'<span class = "comment">\1</span>\n'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*include)(\s+)([^\s]*)(.svx)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3<a href="\4.index">\4\5</a>'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*include)(\s+)([^\s]*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3<a href="\4.index">\4</a>'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*team\s+(?:notes|tape|insts|pics))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*(?:begin|end|copyright|date|entrance|equate|export|fix|prefix|require|SOLVE|title|truncate))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*calibrate\s+(?:TAPE|COMPASS|CLINO|COUNTER|DEPTH|DECLINATION|X|Y|Z)+)(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*data\s+(?:DEFAULT|NORMAL|DIVING|CARTESIAN|TOPOFIL|CYLPOLAR|NOSURVEY|passage)(?:\s+station|\s+from|\s+to|\s+FROMDEPTH|\s+TODEPTH|\s+DEPTHCHANGE|\s+newline|\s+direction|\s+tape|\s+compass|\s+clino|\s+northing|\s+easting|\s+altitude|\s+length|\s+bearing|\s+gradient|\s+ignoreall|\sleft|\sright|\sup|\sdown)*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*default\s+(?:CALIBRATE|DATA|UNITS)+)(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*flags\s+(?:DUPLICATE|SPLAY|SURFACE|not DUPLICATE|not SPLAY|not SURFACE))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*infer\s+(?:plumbs|equates|exports))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*instrument\s+(?:compass|clino|tape))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*instrument\s+(?:compass|clino|tape))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*sd\s+(?:TAPE|COMPASS|CLINO|COUNTER|DEPTH|DECLINATION|DX|DY|DZ))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*set\s+(?:BLANK|COMMENT|DECIMAL|EOL|KEYWORD|MINUS|NAMES|OMIT|PLUS|ROOT|SEPARATOR))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*units\s+(?:TAPE|LENGTH|COMPASS|BEARING|CLINO|GRADIENT|COUNTER|DEPTH|DECLINATION|X|Y|Z))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'<div>\1 </div>\n'))
|
||||
|
||||
@register.filter()
|
||||
@stringfilter
|
||||
def survex_to_html(value, autoescape=None):
|
||||
if autoescape:
|
||||
value = conditional_escape(value)
|
||||
for regex, sub in regexes:
|
||||
print sub
|
||||
value = regex.sub(sub, value)
|
||||
return mark_safe(value)
|
||||
169
core/templatetags/wiki_markup.py
Normal file
169
core/templatetags/wiki_markup.py
Normal file
@@ -0,0 +1,169 @@
|
||||
from django import template
|
||||
from django.utils.html import conditional_escape
|
||||
from django.template.defaultfilters import stringfilter
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.conf import settings
|
||||
from troggle.core.models import QM, DPhoto, LogbookEntry, Cave
|
||||
import re, urlparse
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.filter()
|
||||
def plusone(n):
|
||||
return n + 1
|
||||
|
||||
|
||||
def wiki_list(line, listdepth):
|
||||
l = ""
|
||||
for d in listdepth:
|
||||
l += d
|
||||
mstar = re.match(l + "\*(.*)", line)
|
||||
if mstar:
|
||||
listdepth.append("\*")
|
||||
return ("<ul>\n" + " " * len(listdepth) + "<li>%s</li>\n" % mstar.groups()[0], listdepth)
|
||||
mhash = re.match(l + "#(.*)", line)
|
||||
if mhash:
|
||||
listdepth.append("#")
|
||||
return ("<ol>\n" + " " * len(listdepth) + "<li>%s</li>\n" % mhash.groups()[0], listdepth)
|
||||
mflat = re.match(l + "(.*)", line)
|
||||
if mflat and listdepth:
|
||||
return (" " * len(listdepth) + "<li>%s</li>\n" % mflat.groups()[0], listdepth)
|
||||
if listdepth:
|
||||
prev = listdepth.pop()
|
||||
if prev == "\*":
|
||||
t, l = wiki_list(line, listdepth)
|
||||
return ("</ul>\n" + t, l)
|
||||
if prev == "#":
|
||||
t, l = wiki_list(line, listdepth)
|
||||
return ("</ol>\n" + t, l)
|
||||
return (line, listdepth)
|
||||
|
||||
@register.filter()
|
||||
@stringfilter
|
||||
def wiki_to_html(value, autoescape=None):
|
||||
"""
|
||||
This is the tag which turns wiki syntax into html. It is intended for long pieces of wiki.
|
||||
Hence it splits the wiki into HTML paragraphs based on double line feeds.
|
||||
"""
|
||||
#find paragraphs
|
||||
outValue = ""
|
||||
for paragraph in re.split("\n\s*?\n", value, re.DOTALL):
|
||||
outValue += "<p>"
|
||||
outValue += wiki_to_html_short(paragraph, autoescape)
|
||||
outValue += "</p>\n"
|
||||
return mark_safe(outValue)
|
||||
|
||||
@register.filter()
|
||||
@stringfilter
|
||||
def wiki_to_html_short(value, autoescape=None):
|
||||
"""
|
||||
This is the tag which turns wiki syntax into html. It is intended for short pieces of wiki.
|
||||
Hence it is not split the wiki into paragraphs using where it finds double line feeds.
|
||||
"""
|
||||
if autoescape:
|
||||
value = conditional_escape(value)
|
||||
#deescape doubly escaped characters
|
||||
value = re.sub("&(.*?);", r"&\1;", value, re.DOTALL)
|
||||
#italics and bold
|
||||
value = re.sub("''''([^']+)''''", r"<b><i>\1</i></b>", value, re.DOTALL)
|
||||
value = re.sub("'b''([^']+)'''", r"<b>\1</b>", value, re.DOTALL)
|
||||
value = re.sub("''([^']+)''", r"<i>\1</i>", value, re.DOTALL)
|
||||
|
||||
#make headers
|
||||
def headerrepl(matchobj):
|
||||
number=len(matchobj.groups()[0])
|
||||
num=str(number)
|
||||
if number>1:
|
||||
return '<h'+num+'>'+matchobj.groups()[1]+'</h'+num+'>'
|
||||
else:
|
||||
print 'morethanone'
|
||||
return matchobj.group()
|
||||
value = re.sub(r"(?m)^(=+)([^=]+)(=+)$",headerrepl,value)
|
||||
|
||||
#make qm links. this takes a little doing
|
||||
qmMatchPattern=settings.QM_PATTERN
|
||||
def qmrepl(matchobj):
|
||||
"""
|
||||
A function for replacing wikicode qm links with html qm links.
|
||||
Given a matchobj matching a wikilink in the format
|
||||
[[QM:C204-1999-24]]
|
||||
If the QM does not exist, the function will return a link for creating it.
|
||||
"""
|
||||
qmdict={'urlroot':settings.URL_ROOT,'cave':matchobj.groups()[2],'year':matchobj.groups()[1],'number':matchobj.groups()[3]}
|
||||
try:
|
||||
qm=QM.objects.get(found_by__cave__kataster_number = qmdict['cave'],
|
||||
found_by__date__year = qmdict['year'],
|
||||
number = qmdict['number'])
|
||||
return r'<a href="%s" id="q%s">%s</a>' % (qm.get_absolute_url(), qm.code, unicode(qm))
|
||||
except QM.DoesNotExist: #bother aaron to make him clean up the below code - AC
|
||||
try:
|
||||
placeholder=LogbookEntry.objects.get(date__year=qmdict['year'],cave__kataster_number=qmdict['cave'], title__icontains='placeholder')
|
||||
except LogbookEntry.DoesNotExist:
|
||||
placeholder=LogbookEntry(
|
||||
date='01-01'+qmdict['year'],
|
||||
cave=Cave.objects.get(kataster_number=qmdict['cave']),
|
||||
title='placeholder'
|
||||
)
|
||||
qm=QM(found_by = placeholder, number = qmdict['number'])
|
||||
return r'<a class="redtext" href="%s" id="q%s">%s</a>' % (qm.get_absolute_url(), qm.code, unicode(qm))
|
||||
|
||||
value = re.sub(qmMatchPattern,qmrepl, value, re.DOTALL)
|
||||
|
||||
#make photo links for [[photo:filename]] or [[photo:filename linktext]], and
|
||||
#insert photos for [[display:left photo:filename]]
|
||||
photoLinkPattern="\[\[\s*photo:(?P<photoName>[^\s]+)\s*(?P<linkText>.*)\]\]"
|
||||
photoSrcPattern="\[\[\s*display:(?P<style>[^\s]+) photo:(?P<photoName>[^\s]+)\s*\]\]"
|
||||
def photoLinkRepl(matchobj):
|
||||
matchdict=matchobj.groupdict()
|
||||
try:
|
||||
linkText=matchdict['linkText']
|
||||
except KeyError:
|
||||
linkText=None
|
||||
|
||||
try:
|
||||
photo=DPhoto.objects.get(file=matchdict['photoName'])
|
||||
if not linkText:
|
||||
linkText=str(photo)
|
||||
res=r'<a href=' + photo.get_admin_url() +'>' + linkText + '</a>'
|
||||
except Photo.DoesNotExist:
|
||||
res = r'<a class="redtext" href="">make new photo</a>'
|
||||
return res
|
||||
|
||||
def photoSrcRepl(matchobj):
|
||||
matchdict=matchobj.groupdict()
|
||||
style=matchdict['style']
|
||||
try:
|
||||
photo=Photo.objects.get(file=matchdict['photoName'])
|
||||
res=r'<a href='+photo.file.url+'><img src=' + photo.thumbnail_image.url +' class='+style+' /></a>'
|
||||
except Photo.DoesNotExist:
|
||||
res = r'<a class="redtext" href="">make new photo</a>'
|
||||
return res
|
||||
value = re.sub(photoLinkPattern,photoLinkRepl, value, re.DOTALL)
|
||||
value = re.sub(photoSrcPattern,photoSrcRepl, value, re.DOTALL)
|
||||
|
||||
#make cave links
|
||||
value = re.sub("\[\[\s*cave:([^\s]+)\s*\s*\]\]", r'<a href="%scave/\1/">\1</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
#make people links
|
||||
value = re.sub("\[\[\s*person:(.+)\|(.+)\]\]",r'<a href="%sperson/\1/">\2</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
#make subcave links
|
||||
value = re.sub("\[\[\s*subcave:(.+)\|(.+)\]\]",r'<a href="%ssubcave/\1/">\2</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
#make cavedescription links
|
||||
value = re.sub("\[\[\s*cavedescription:(.+)\|(.+)\]\]",r'<a href="%scavedescription/\1/">\2</a>' % settings.URL_ROOT, value, re.DOTALL)
|
||||
|
||||
|
||||
|
||||
#Make lists from lines starting with lists of [stars and hashes]
|
||||
outValue = ""
|
||||
listdepth = []
|
||||
for line in value.split("\n"):
|
||||
t, listdepth = wiki_list(line, listdepth)
|
||||
outValue += t
|
||||
for item in listdepth:
|
||||
if item == "\*":
|
||||
outValue += "</ul>\n"
|
||||
elif item == "#":
|
||||
outValue += "</ol>\n"
|
||||
return mark_safe(outValue)
|
||||
|
||||
wiki_to_html.needs_autoescape = True
|
||||
295
core/utils.py
295
core/utils.py
@@ -1,295 +0,0 @@
|
||||
import logging
|
||||
import random
|
||||
import resource
|
||||
import subprocess
|
||||
import os
|
||||
from decimal import getcontext
|
||||
from pathlib import Path
|
||||
|
||||
getcontext().prec = 2 # use 2 significant figures for decimal calculations
|
||||
|
||||
|
||||
import settings
|
||||
|
||||
"""This file declares TROG a globally visible object for caches.
|
||||
|
||||
TROG is a dictionary holding globally visible indexes and cache functions.
|
||||
It is a Global Object, see https://python-patterns.guide/python/module-globals/
|
||||
troggle.utils.TROG
|
||||
|
||||
chaosmonkey(n) - used by survex import to regenerate some .3d files
|
||||
save_carefully() - core function that saves troggle objects in the database
|
||||
|
||||
various git add/commit functions that need refactoring together
|
||||
|
||||
NOTE that TROG is not serialized! Two users can update it and conflict !!
|
||||
This needs to be in a multi-user database with transactions. However it is
|
||||
useful when doing a data import with databaseReset.py as that has a single
|
||||
thread.
|
||||
|
||||
"""
|
||||
|
||||
TROG = {"pagecache": {"expedition": {}}, "caves": {"gcavelookup": {}, "gcavecount": {}}}
|
||||
|
||||
# This is module-level executable. This is a Bad Thing. Especially when it touches the file system.
|
||||
try:
|
||||
logging.basicConfig(level=logging.DEBUG, filename=settings.LOGFILE, filemode="w")
|
||||
except:
|
||||
# Opening of file for writing is going to fail currently, so decide it doesn't matter for now
|
||||
pass
|
||||
|
||||
|
||||
def get_process_memory():
|
||||
usage = resource.getrusage(resource.RUSAGE_SELF)
|
||||
return usage[2] / 1024.0
|
||||
|
||||
|
||||
def chaosmonkey(n):
|
||||
"""returns True once every n calls - randomly"""
|
||||
if random.randrange(0, n) != 0:
|
||||
return False
|
||||
# print("CHAOS strikes !", file=sys.stderr)
|
||||
return True
|
||||
|
||||
|
||||
def only_commit(fname, message):
|
||||
"""Only used to commit a survex file edited and saved in view/survex.py"""
|
||||
git = settings.GIT
|
||||
cwd = fname.parent
|
||||
filename = fname.name
|
||||
# print(f'{fname=} ')
|
||||
|
||||
try:
|
||||
cp_add = subprocess.run([git, "add", filename], cwd=cwd, capture_output=True, text=True)
|
||||
if cp_add.returncode != 0:
|
||||
msgdata = f"Ask a nerd to fix this problem in only_commit().\n--{cp_add.stderr}\n--{cp_add.stdout}\n--return code:{str(cp_add.returncode)}"
|
||||
raise WriteAndCommitError(
|
||||
f"CANNOT git ADD on server for this file {filename}. Edits saved but not added to git.\n\n" + msgdata
|
||||
)
|
||||
|
||||
cp_commit = subprocess.run([git, "commit", "-m", message], cwd=cwd, capture_output=True, text=True)
|
||||
# This produces return code = 1 if it commits OK, but when the local repo still needs to be pushed to origin/loser
|
||||
# which will be the case when running a test troggle system on a development machine
|
||||
devok_text = """On branch master
|
||||
Your branch is ahead of 'origin/master' by 1 commit.
|
||||
(use "git push" to publish your local commits)
|
||||
|
||||
nothing to commit, working tree clean
|
||||
"""
|
||||
if cp_commit.returncode == 1 and cp_commit.stdout == devok_text:
|
||||
pass
|
||||
else:
|
||||
if cp_commit.returncode != 0 and not cp_commit.stdout.strip().endswith(
|
||||
"nothing to commit, working tree clean"
|
||||
):
|
||||
msgdata = f'--Ask a nerd to fix this problem in only_commit().\n--{cp_commit.stderr}\n--"{cp_commit.stdout}"\n--return code:{str(cp_commit.returncode)}'
|
||||
print(msgdata)
|
||||
raise WriteAndCommitError(
|
||||
f"Error code with git on server for this file {filename}. Edits saved, added to git, but NOT committed.\n\n"
|
||||
+ msgdata
|
||||
)
|
||||
|
||||
except subprocess.SubprocessError:
|
||||
raise WriteAndCommitError(
|
||||
f"CANNOT git COMMIT on server for this file {filename}. Subprocess error. Edits not saved.\nAsk a nerd to fix this."
|
||||
)
|
||||
|
||||
|
||||
def write_and_commit(files, message):
|
||||
"""Writes the content to the filepath and adds and commits the file to git. If this fails, a WriteAndCommitError is raised.
|
||||
This does not create any needed intermediate folders, which is what we do when writing survex files, so functionality here
|
||||
is duplicated in only_commit()
|
||||
|
||||
These need refactoring
|
||||
"""
|
||||
git = settings.GIT
|
||||
commands = []
|
||||
try:
|
||||
for filepath, content, encoding in files:
|
||||
cwd = filepath.parent
|
||||
filename = filepath.name
|
||||
# GIT see also core/views/uploads.py dwgupload()
|
||||
# GIT see also core/views/expo.py editexpopage()
|
||||
os.makedirs(os.path.dirname(filepath), exist_ok = True)
|
||||
if encoding:
|
||||
mode = "w"
|
||||
kwargs = {"encoding": encoding}
|
||||
else:
|
||||
mode = "wb"
|
||||
kwargs = {}
|
||||
try:
|
||||
with open(filepath, mode, **kwargs) as f:
|
||||
print(f"WRITING {cwd}---{filename} ")
|
||||
# as the wsgi process www-data, we have group write-access but are not owner, so cannot chmod.
|
||||
# os.chmod(filepath, 0o664) # set file permissions to rw-rw-r--
|
||||
f.write(content)
|
||||
except PermissionError:
|
||||
raise WriteAndCommitError(
|
||||
f"CANNOT save this file.\nPERMISSIONS incorrectly set on server for this file {filename}. Ask a nerd to fix this."
|
||||
)
|
||||
cmd_diff = [git, "diff", filename]
|
||||
cp_diff = subprocess.run(cmd_diff, cwd=cwd, capture_output=True, text=True)
|
||||
commands.append(cmd_diff)
|
||||
if cp_diff.returncode == 0:
|
||||
cmd_add = [git, "add", filename]
|
||||
cp_add = subprocess.run(cmd_add, cwd=cwd, capture_output=True, text=True)
|
||||
commands.append(cmd_add)
|
||||
if cp_add.returncode != 0:
|
||||
msgdata = (
|
||||
"Ask a nerd to fix this.\n\n"
|
||||
+ cp_add.stderr
|
||||
+ "\n\n"
|
||||
+ cp_add.stdout
|
||||
+ "\n\nreturn code: "
|
||||
+ str(cp_add.returncode)
|
||||
)
|
||||
raise WriteAndCommitError(
|
||||
f"CANNOT git on server for this file {filename}. Edits saved but not added to git.\n\n"
|
||||
+ msgdata
|
||||
)
|
||||
else:
|
||||
print(f"No change {filepath}")
|
||||
filepaths = [filepath for filepath, content, encoding in files]
|
||||
cmd_commit = [git, "commit"] + filepaths + ["-m", message]
|
||||
cm_status = subprocess.run(cmd_commit, cwd=cwd, capture_output=True, text=True)
|
||||
commands.append(cmd_commit)
|
||||
if cm_status == 0:
|
||||
msgdata = (
|
||||
"Commands: " + str(commands) +
|
||||
"Ask a nerd to fix this.\n\n"
|
||||
+ "Stderr: " + cp_status.stderr
|
||||
+ "\n\n"
|
||||
+ "Stdout: " + cp_status.stdout
|
||||
+ "\n\nreturn code: " + str(cp_status.returncode)
|
||||
)
|
||||
raise WriteAndCommitError(
|
||||
f"Error committing. Edits saved, added to git, but NOT committed.\n\n"
|
||||
+ msgdata
|
||||
)
|
||||
cmd_status = [git, "status"] + filepaths
|
||||
cp_status = subprocess.run(cmd_status, cwd=cwd, capture_output=True, text=True)
|
||||
commands.append(cp_status)
|
||||
#This produces return code = 1 if it commits OK, but when the repo still needs to be pushed to origin/expoweb
|
||||
if (not cp_status.stdout) or len(cp_status.stdout) < 2 or cp_status.stdout.split("\n")[-2] != "nothing to commit, working tree clean":
|
||||
msgdata = (
|
||||
str(commands) +
|
||||
"Ask a nerd to fix this.\n\n"
|
||||
+ "Stderr: " + cp_status.stderr
|
||||
+ "\n\n"
|
||||
+ "Stdout: " + cp_status.stdout
|
||||
+ "\n\nreturn code: " + str(cp_status.returncode)
|
||||
)
|
||||
raise WriteAndCommitError(
|
||||
f"Error code with git on server for this file {filename}. Edits saved, added to git, but NOT committed.\n\n"
|
||||
+ msgdata
|
||||
)
|
||||
except subprocess.SubprocessError:
|
||||
raise WriteAndCommitError(
|
||||
f"CANNOT git on server for this file {filename}. Subprocess error. Edits not saved.\nAsk a nerd to fix this."
|
||||
)
|
||||
|
||||
|
||||
class WriteAndCommitError(Exception):
|
||||
"""Exception class for errors writing files and comitting them to git"""
|
||||
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
def __str__(self):
|
||||
return f"WriteAndCommitError: {self.message}"
|
||||
|
||||
|
||||
def writetrogglefile(filepath, filecontent):
|
||||
"""Commit the new saved file to git
|
||||
Callers to cave.writeDataFile() or entrance.writeDataFile() should handle the exception PermissionsError explicitly
|
||||
"""
|
||||
# GIT see also core/views/expo.py editexpopage()
|
||||
# GIT see also core/views/uploads.py dwgupload()
|
||||
# Called from core/models/caves.py Cave.writeDataFile() Entrance.writeDataFile()
|
||||
filepath = Path(filepath)
|
||||
cwd = filepath.parent
|
||||
filename = filepath.name
|
||||
git = settings.GIT
|
||||
|
||||
# as the wsgi process www-data, we have group write-access but are not owner, so cannot chmod.
|
||||
# do not trap exceptions, pass them up to the view that called this function
|
||||
print(f"WRITING{cwd}---{filename} ")
|
||||
with open(filepath, "w") as f:
|
||||
f.write(filecontent)
|
||||
# os.chmod(filepath, 0o664) # set file permissions to rw-rw-r--
|
||||
sp = subprocess.run([git, "add", filename], cwd=cwd, capture_output=True, check=True, text=True)
|
||||
if sp.returncode != 0:
|
||||
out = sp.stdout
|
||||
if len(out) > 160:
|
||||
out = out[:75] + "\n <Long output curtailed>\n" + out[-75:]
|
||||
print(f"git ADD {cwd}:\n\n" + str(sp.stderr) + "\n\n" + out + "\n\nreturn code: " + str(sp.returncode))
|
||||
|
||||
sp = subprocess.run(
|
||||
[git, "commit", "-m", f"Troggle online: cave or entrance edit -{filename}"],
|
||||
cwd=cwd,
|
||||
capture_output=True,
|
||||
check=True,
|
||||
text=True,
|
||||
)
|
||||
if sp.returncode != 0:
|
||||
out = sp.stdout
|
||||
if len(out) > 160:
|
||||
out = out[:75] + "\n <Long output curtailed>\n" + out[-75:]
|
||||
print(f"git COMMIT {cwd}:\n\n" + str(sp.stderr) + "\n\n" + out + "\n\nreturn code: " + str(sp.returncode))
|
||||
# not catching and re-raising any exceptions yet, inc. the stderr etc.,. We should do that.
|
||||
|
||||
|
||||
def save_carefully(objectType, lookupAttribs={}, nonLookupAttribs={}):
|
||||
"""Looks up instance using lookupAttribs and carries out the following:
|
||||
-if instance does not exist in DB: add instance to DB, return (new instance, True)
|
||||
-if instance exists in DB and was modified using Troggle: do nothing, return (existing instance, False)
|
||||
-if instance exists in DB and was not modified using Troggle: overwrite instance, return (instance, False)
|
||||
|
||||
The checking is accomplished using Django's get_or_create and the new_since_parsing boolean field
|
||||
defined in core.models.TroggleModel.
|
||||
|
||||
We are not using new_since_parsing - it is a fossil from Aaron Curtis's design in 2006. So it is always false.
|
||||
|
||||
NOTE: this takes twice as long as simply creating a new object with the given values.
|
||||
|
||||
As of Jan.2023 this function is not used anywhere in troggle.
|
||||
|
||||
"""
|
||||
try:
|
||||
instance, created = objectType.objects.get_or_create(defaults=nonLookupAttribs, **lookupAttribs)
|
||||
except:
|
||||
print(" !! - FAIL in SAVE CAREFULLY ===================", objectType)
|
||||
print(" !! - -- objects.get_or_create()")
|
||||
print(f" !! - lookupAttribs:{lookupAttribs}\n !! - nonLookupAttribs:{nonLookupAttribs}")
|
||||
raise
|
||||
if not created and not instance.new_since_parsing:
|
||||
for k, v in list(
|
||||
nonLookupAttribs.items()
|
||||
): # overwrite the existing attributes from the logbook text (except date and title)
|
||||
setattr(instance, k, v)
|
||||
try:
|
||||
instance.save()
|
||||
except:
|
||||
print(" !! - SAVE CAREFULLY ===================", objectType)
|
||||
print(" !! - -- instance.save()")
|
||||
print(f" !! - lookupAttribs:{lookupAttribs}\n !! - nonLookupAttribs:{nonLookupAttribs}")
|
||||
raise
|
||||
try:
|
||||
str(instance)
|
||||
except:
|
||||
pass
|
||||
if created:
|
||||
logging.info(str(instance) + " was just added to the database for the first time. \n")
|
||||
|
||||
if not created and instance.new_since_parsing:
|
||||
logging.info(
|
||||
str(instance) + " has been modified using Troggle since parsing, so the current script left it as is. \n"
|
||||
)
|
||||
|
||||
if not created and not instance.new_since_parsing:
|
||||
logging.info(
|
||||
" instance:<"
|
||||
+ str(instance)
|
||||
+ "> existed in the database unchanged since last parse. It have been overwritten."
|
||||
)
|
||||
return (instance, created)
|
||||
235
core/view_surveys.py
Normal file
235
core/view_surveys.py
Normal file
@@ -0,0 +1,235 @@
|
||||
from django.conf import settings
|
||||
import fileAbstraction
|
||||
from django.shortcuts import render_to_response
|
||||
from django.http import HttpResponse, Http404
|
||||
import os, stat
|
||||
import re
|
||||
from troggle.core.models import SurvexScansFolder, SurvexScanSingle, SurvexBlock, TunnelFile
|
||||
import parsers.surveys
|
||||
import urllib
|
||||
|
||||
# inline fileabstraction into here if it's not going to be useful anywhere else
|
||||
# keep things simple and ignore exceptions everywhere for now
|
||||
|
||||
|
||||
def getMimeType(extension):
|
||||
try:
|
||||
return {"txt": "text/plain",
|
||||
"html": "text/html",
|
||||
}[extension]
|
||||
except:
|
||||
print("unknown file type")
|
||||
return "text/plain"
|
||||
|
||||
|
||||
def listdir(request, path):
|
||||
#try:
|
||||
return HttpResponse(fileAbstraction.listdir(path), content_type="text/plain")
|
||||
#except:
|
||||
# raise Http404
|
||||
|
||||
def upload(request, path):
|
||||
pass
|
||||
|
||||
def download(request, path):
|
||||
#try:
|
||||
|
||||
return HttpResponse(fileAbstraction.readFile(path), content_type=getMimeType(path.split(".")[-1]))
|
||||
#except:
|
||||
# raise Http404
|
||||
|
||||
|
||||
#
|
||||
# julian's quick hack for something that works
|
||||
# could signal directories by ending with /, and forward cases where it's missing
|
||||
#
|
||||
extmimetypes = {".txt": "text/plain",
|
||||
".html": "text/html",
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
}
|
||||
|
||||
# dead
|
||||
def jgtfile(request, f):
|
||||
fp = os.path.join(settings.SURVEYS, f)
|
||||
# could also surf through SURVEX_DATA
|
||||
|
||||
# directory listing
|
||||
if os.path.isdir(fp):
|
||||
listdirfiles = [ ]
|
||||
listdirdirs = [ ]
|
||||
|
||||
for lf in sorted(os.listdir(fp)):
|
||||
hpath = os.path.join(f, lf) # not absolute path
|
||||
if lf[0] == "." or lf[-1] == "~":
|
||||
continue
|
||||
|
||||
hpath = hpath.replace("\\", "/") # for windows users
|
||||
href = hpath.replace("#", "%23") # '#' in file name annoyance
|
||||
|
||||
flf = os.path.join(fp, lf)
|
||||
if os.path.isdir(flf):
|
||||
nfiles = len([sf for sf in os.listdir(flf) if sf[0] != "."])
|
||||
listdirdirs.append((href, hpath + "/", nfiles))
|
||||
else:
|
||||
listdirfiles.append((href, hpath, os.path.getsize(flf)))
|
||||
|
||||
upperdirs = [ ]
|
||||
lf = f
|
||||
while lf:
|
||||
hpath = lf.replace("\\", "/") # for windows users
|
||||
if hpath[-1] != "/":
|
||||
hpath += "/"
|
||||
href = hpath.replace("#", "%23")
|
||||
lf = os.path.split(lf)[0]
|
||||
upperdirs.append((href, hpath))
|
||||
upperdirs.append(("", "/"))
|
||||
|
||||
return render_to_response('listdir.html', {'file':f, 'listdirfiles':listdirfiles, 'listdirdirs':listdirdirs, 'upperdirs':upperdirs, 'settings': settings})
|
||||
|
||||
# flat output of file when loaded
|
||||
if os.path.isfile(fp):
|
||||
ext = os.path.splitext(fp)[1].lower()
|
||||
mimetype = extmimetypes.get(ext, "text/plain")
|
||||
fin = open(fp)
|
||||
ftext = fin.read()
|
||||
fin.close()
|
||||
return HttpResponse(ftext, content_type=mimetype)
|
||||
|
||||
return HttpResponse("unknown file::%s::" % f, content_type = "text/plain")
|
||||
|
||||
|
||||
def UniqueFile(fname):
|
||||
while True:
|
||||
if not os.path.exists(fname):
|
||||
break
|
||||
mname = re.match("(.*?)(?:-(\d+))?\.(png|jpg|jpeg)$(?i)", fname)
|
||||
if mname:
|
||||
fname = "%s-%d.%s" % (mname.group(1), int(mname.group(2) or "0") + 1, mname.group(3))
|
||||
return fname
|
||||
|
||||
|
||||
# join it all up and then split them off for the directories that don't exist
|
||||
# anyway, this mkdir doesn't work
|
||||
def SaveImageInDir(name, imgdir, project, fdata, bbinary):
|
||||
print ("hihihihi", fdata, settings.SURVEYS)
|
||||
fimgdir = os.path.join(settings.SURVEYS, imgdir)
|
||||
if not os.path.isdir(fimgdir):
|
||||
print "*** Making directory", fimgdir
|
||||
os.path.mkdir(fimgdir)
|
||||
fprojdir = os.path.join(fimgdir, project)
|
||||
if not os.path.isdir(fprojdir):
|
||||
print "*** Making directory", fprojdir
|
||||
os.path.mkdir(fprojdir)
|
||||
print "hhh"
|
||||
|
||||
fname = os.path.join(fprojdir, name)
|
||||
print fname, "fff"
|
||||
fname = UniqueFile(fname)
|
||||
|
||||
p2, p1 = os.path.split(fname)
|
||||
p3, p2 = os.path.split(p2)
|
||||
p4, p3 = os.path.split(p3)
|
||||
res = os.path.join(p3, p2, p1)
|
||||
|
||||
print "saving file", fname
|
||||
fout = open(fname, (bbinary and "wb" or "w"))
|
||||
fout.write(fdata.read())
|
||||
fout.close()
|
||||
res = os.path.join(imgdir, name)
|
||||
return res.replace("\\", "/")
|
||||
|
||||
|
||||
# do we want to consider saving project/field rather than field/project
|
||||
def jgtuploadfile(request):
|
||||
filesuploaded = [ ]
|
||||
project, user, password, tunnelversion = request.POST["tunnelproject"], request.POST["tunneluser"], request.POST["tunnelpassword"], request.POST["tunnelversion"]
|
||||
print (project, user, tunnelversion)
|
||||
for uploadedfile in request.FILES.values():
|
||||
if uploadedfile.field_name in ["tileimage", "backgroundimage"] and \
|
||||
uploadedfile.content_type in ["image/png", "image/jpeg"]:
|
||||
fname = user + "_" + re.sub("[\\\\/]", "-", uploadedfile.name) # very escaped \
|
||||
print fname
|
||||
fileuploaded = SaveImageInDir(fname, uploadedfile.field_name, project, uploadedfile, True)
|
||||
filesuploaded.append(settings.URL_ROOT + "/jgtfile/" + fileuploaded)
|
||||
if uploadedfile.field_name in ["sketch"] and \
|
||||
uploadedfile.content_type in ["text/plain"]:
|
||||
fname = user + "_" + re.sub("[\\\\/]", "-", uploadedfile.name) # very escaped \
|
||||
print fname
|
||||
fileuploaded = SaveImageInDir(fname, uploadedfile.field_name, project, uploadedfile, False)
|
||||
filesuploaded.append(settings.URL_ROOT + "/jgtfile/" + fileuploaded)
|
||||
#print "FF", request.FILES
|
||||
#print ("FFF", request.FILES.values())
|
||||
message = ""
|
||||
print "gothere"
|
||||
return render_to_response('fileupload.html', {'message':message, 'filesuploaded':filesuploaded, 'settings': settings})
|
||||
|
||||
def surveyscansfolder(request, path):
|
||||
#print [ s.walletname for s in SurvexScansFolder.objects.all() ]
|
||||
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
|
||||
return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||
|
||||
def surveyscansingle(request, path, file):
|
||||
survexscansfolder = SurvexScansFolder.objects.get(walletname=urllib.unquote(path))
|
||||
survexscansingle = SurvexScanSingle.objects.get(survexscansfolder=survexscansfolder, name=file)
|
||||
return HttpResponse(content=open(survexscansingle.ffile), content_type=getMimeType(path.split(".")[-1]))
|
||||
#return render_to_response('survexscansfolder.html', { 'survexscansfolder':survexscansfolder, 'settings': settings })
|
||||
|
||||
def surveyscansfolders(request):
|
||||
survexscansfolders = SurvexScansFolder.objects.all()
|
||||
return render_to_response('survexscansfolders.html', { 'survexscansfolders':survexscansfolders, 'settings': settings })
|
||||
|
||||
|
||||
def tunneldata(request):
|
||||
tunnelfiles = TunnelFile.objects.all()
|
||||
return render_to_response('tunnelfiles.html', { 'tunnelfiles':tunnelfiles, 'settings': settings })
|
||||
|
||||
|
||||
def tunnelfile(request, path):
|
||||
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
|
||||
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
||||
return HttpResponse(content=open(tfile), content_type="text/plain")
|
||||
|
||||
def tunnelfileupload(request, path):
|
||||
tunnelfile = TunnelFile.objects.get(tunnelpath=urllib.unquote(path))
|
||||
tfile = os.path.join(settings.TUNNEL_DATA, tunnelfile.tunnelpath)
|
||||
|
||||
project, user, password, tunnelversion = request.POST["tunnelproject"], request.POST["tunneluser"], request.POST["tunnelpassword"], request.POST["tunnelversion"]
|
||||
print (project, user, tunnelversion)
|
||||
|
||||
|
||||
assert len(request.FILES.values()) == 1, "only one file to upload"
|
||||
|
||||
uploadedfile = request.FILES.values()[0]
|
||||
|
||||
if uploadedfile.field_name != "sketch":
|
||||
return HttpResponse(content="Error: non-sketch file uploaded", content_type="text/plain")
|
||||
if uploadedfile.content_type != "text/plain":
|
||||
return HttpResponse(content="Error: non-plain content type", content_type="text/plain")
|
||||
|
||||
# could use this to add new files
|
||||
if os.path.split(path)[1] != uploadedfile.name:
|
||||
return HttpResponse(content="Error: name disagrees", content_type="text/plain")
|
||||
|
||||
orgsize = tunnelfile.filesize # = os.stat(tfile)[stat.ST_SIZE]
|
||||
|
||||
ttext = uploadedfile.read()
|
||||
|
||||
# could check that the user and projects agree here
|
||||
|
||||
fout = open(tfile, "w")
|
||||
fout.write(ttext)
|
||||
fout.close()
|
||||
|
||||
# redo its settings of
|
||||
parsers.surveys.SetTunnelfileInfo(tunnelfile)
|
||||
tunnelfile.save()
|
||||
|
||||
uploadedfile.close()
|
||||
message = "File size %d overwritten with size %d" % (orgsize, tunnelfile.filesize)
|
||||
return HttpResponse(content=message, content_type="text/plain")
|
||||
|
||||
|
||||
|
||||
|
||||
8
core/views.py
Normal file
8
core/views.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# primary namespace
|
||||
|
||||
import view_surveys
|
||||
import views_caves
|
||||
import views_survex
|
||||
import views_logbooks
|
||||
import views_other
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import authenticate
|
||||
from django.contrib.auth import forms as auth_forms
|
||||
from django.contrib.auth import login, logout
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.shortcuts import redirect, render
|
||||
from django.utils.http import url_has_allowed_host_and_scheme
|
||||
|
||||
"""This enforces the login requirement for non-public pages using
|
||||
the decorator mechanism.
|
||||
https://www.fullstackpython.com/django-contrib-auth-decorators-login-required-examples.html
|
||||
"""
|
||||
|
||||
|
||||
class login_required_if_public(object):
|
||||
def __init__(self, f):
|
||||
if settings.PUBLIC_SITE:
|
||||
self.f = login_required(f)
|
||||
else:
|
||||
self.f = f
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self.f(*args, **kwargs)
|
||||
|
||||
|
||||
# This is copied from CUYC.cuy.website.view.auth
|
||||
# If we want to do the whole online-email thing, we would also need to copy across the code in these
|
||||
# imported files and delete what is superfluous.
|
||||
# Or we could just load the latest version of django-registration app.
|
||||
# from cuy.club.models import Member, Message
|
||||
# from ..forms import WebsiteLoginForm, WebsiteRegisterForm
|
||||
# from ...common import mail_site_error
|
||||
# from .generic import user_is_active
|
||||
|
||||
"""The login and logout functions.
|
||||
This is also where we would manage registration: for people wanting to create and validate their individual
|
||||
logon accounts/forgottenpassword"""
|
||||
|
||||
############################
|
||||
# Authentication Functions #
|
||||
############################
|
||||
|
||||
|
||||
def expologout(request):
|
||||
login_form = auth_forms.AuthenticationForm()
|
||||
logout(request)
|
||||
|
||||
return render(request, "login/logout.html", {"form": login_form})
|
||||
|
||||
|
||||
def expologin(request):
|
||||
# GET
|
||||
if not request.method == "POST":
|
||||
if (not request.user.is_authenticated) or (not request.user.is_active):
|
||||
return render(request, "login/index.html", {})
|
||||
else:
|
||||
# going to login page when you are already logged in
|
||||
return render(request, "tasks.html", {})
|
||||
|
||||
# POST
|
||||
username = request.POST["username"]
|
||||
password = request.POST["password"]
|
||||
|
||||
user = authenticate(username=username, password=password)
|
||||
if user is None:
|
||||
return render(request, "login/index.html", {"invalid": True, "username": username})
|
||||
if not user.is_active:
|
||||
return render(request, "login/enable.html", {"login_state": "notenabled"})
|
||||
|
||||
try:
|
||||
login(request, user)
|
||||
# Should do the ?next= stuff here..
|
||||
return redirect_after_login(request)
|
||||
except:
|
||||
return render(request, "errors/generic.html", {})
|
||||
|
||||
|
||||
def redirect_after_login(request):
|
||||
nxt = request.GET.get("next", None)
|
||||
if nxt is None:
|
||||
return redirect(settings.LOGIN_REDIRECT_URL)
|
||||
elif not url_has_allowed_host_and_scheme(url=nxt, allowed_hosts={request.get_host()}, require_https=request.is_secure()):
|
||||
return redirect(settings.LOGIN_REDIRECT_URL)
|
||||
else:
|
||||
return redirect(nxt)
|
||||
@@ -1,688 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
import zipfile
|
||||
import urllib
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
|
||||
from django.http import HttpResponse, HttpResponseNotFound, HttpResponseRedirect, FileResponse
|
||||
from django.shortcuts import render
|
||||
from django.urls import NoReverseMatch, reverse
|
||||
|
||||
import troggle.settings as settings
|
||||
from troggle.core.forms import CaveAndEntranceFormSet, CaveForm, EntranceForm, EntranceLetterForm
|
||||
from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance, GetCaveLookup
|
||||
from troggle.core.models.logbooks import CaveSlug, QM
|
||||
from troggle.core.utils import write_and_commit
|
||||
from troggle.core.views import expo
|
||||
from troggle.settings import CAVEDESCRIPTIONS, ENTRANCEDESCRIPTIONS
|
||||
from troggle.parsers.caves import read_cave, read_entrance
|
||||
|
||||
from django.template import loader
|
||||
from django.utils.safestring import mark_safe
|
||||
|
||||
from .auth import login_required_if_public
|
||||
|
||||
"""Manages the complex procedures to assemble a cave description out of the compnoents
|
||||
Manages the use of cavern to parse survex files to produce 3d and pos files
|
||||
"""
|
||||
|
||||
todo = """
|
||||
- Fix rendercave() so that CaveView works
|
||||
|
||||
- in getCaves() search GCavelookup first, which should raise a MultpleObjectsReturned
|
||||
exception if no duplicates
|
||||
|
||||
- Learn to use Django .select_related() and .prefetch_related() to speed things up
|
||||
especially on the big report pages
|
||||
https://zerotobyte.com/how-to-use-django-select-related-and-prefetch-related/
|
||||
"""
|
||||
|
||||
|
||||
def getCaves(cave_id):
|
||||
"""Only gets called if a call to getCave() raises a MultipleObjects exception
|
||||
|
||||
TO DO: search GCavelookup first, which should raise a MultpleObjectsReturned exception if there
|
||||
are duplicates"""
|
||||
try:
|
||||
caves = Cave.objects.filter(kataster_number=cave_id)
|
||||
caveset = set(caves)
|
||||
|
||||
Gcavelookup = GetCaveLookup() # dictionary makes strings to Cave objects
|
||||
if cave_id in Gcavelookup:
|
||||
caveset.add(Gcavelookup[cave_id])
|
||||
return list(caveset)
|
||||
except:
|
||||
return []
|
||||
|
||||
|
||||
def getCave(cave_id):
|
||||
"""Returns a cave object when given a cave name or number. It is used by views including cavehref, ent, and qm.
|
||||
|
||||
TO DO: search GCavelookup first, which should raise a MultpleObjectsReturned exception if there
|
||||
are duplicates"""
|
||||
try:
|
||||
cave = Cave.objects.get(kataster_number=cave_id)
|
||||
return cave
|
||||
except Cave.MultipleObjectsReturned as ex:
|
||||
raise MultipleObjectsReturned("Duplicate kataster number") from ex # propagate this up
|
||||
|
||||
except Cave.DoesNotExist as ex:
|
||||
Gcavelookup = GetCaveLookup() # dictionary makes strings to Cave objects
|
||||
if cave_id in Gcavelookup:
|
||||
return Gcavelookup[cave_id]
|
||||
else:
|
||||
raise ObjectDoesNotExist("No cave found with this identifier in any id field") from ex # propagate this up
|
||||
except:
|
||||
raise ObjectDoesNotExist("No cave found with this identifier in any id field")
|
||||
|
||||
|
||||
def pad5(x):
|
||||
return "0" * (5 - len(x.group(0))) + x.group(0)
|
||||
|
||||
|
||||
def padnumber(x):
|
||||
return re.sub("\d+", pad5, x)
|
||||
|
||||
|
||||
def numericalcmp(x, y):
|
||||
return cmp(padnumber(x), padnumber(y))
|
||||
|
||||
|
||||
def caveKey(c):
|
||||
"""This function goes into a lexicogrpahic sort function, and the values are strings,
|
||||
but we want to sort numberically on kataster number before sorting on unofficial number.
|
||||
"""
|
||||
if not c.kataster_number:
|
||||
return "9999." + c.unofficial_number
|
||||
else:
|
||||
if int(c.kataster_number) >= 100:
|
||||
return "99." + c.kataster_number
|
||||
if int(c.kataster_number) >= 10:
|
||||
return "9." + c.kataster_number
|
||||
return c.kataster_number
|
||||
|
||||
|
||||
def getnotablecaves():
|
||||
notablecaves = []
|
||||
for kataster_number in settings.NOTABLECAVESHREFS:
|
||||
try:
|
||||
cave = Cave.objects.get(kataster_number=kataster_number)
|
||||
notablecaves.append(cave)
|
||||
except:
|
||||
# print(" ! FAILED to get only one cave per kataster_number OR invalid number for: "+kataster_number)
|
||||
caves = Cave.objects.all().filter(kataster_number=kataster_number)
|
||||
for c in caves:
|
||||
# print(c.kataster_number, c.slug())
|
||||
if c.slug() is not None:
|
||||
notablecaves.append(c)
|
||||
return notablecaves
|
||||
|
||||
|
||||
def caveindex(request):
|
||||
#Cave.objects.all()
|
||||
caves1623 = list(Cave.objects.filter(area__short_name="1623"))
|
||||
caves1626 = list(Cave.objects.filter(area__short_name="1626"))
|
||||
caves1627 = list(Cave.objects.filter(area__short_name="1627"))
|
||||
caves1623.sort(key=caveKey)
|
||||
caves1626.sort(key=caveKey)
|
||||
caves1627.sort(key=caveKey)
|
||||
return render(
|
||||
request,
|
||||
"caveindex.html",
|
||||
{"caves1623": caves1623, "caves1626": caves1626, "caves1627": caves1627, "notablecaves": getnotablecaves(), "cavepage": True},
|
||||
)
|
||||
|
||||
def entranceindex(request):
|
||||
ents = Entrance.objects.all().order_by("slug")
|
||||
|
||||
return render(
|
||||
request,
|
||||
"entranceindex.html",
|
||||
{"entrances": ents},
|
||||
)
|
||||
|
||||
|
||||
|
||||
def cave3d(request, cave_id=""):
|
||||
"""This is used to create a download url in templates/cave.html if anyone wants to download the .3d file
|
||||
The caller template tries kataster first, then unofficial_number if that kataster number does not exist
|
||||
but only if Cave.survex_file is non-empty
|
||||
|
||||
But the template file cave.html has its own ideas about the name of the file and thus the href. Ouch.
|
||||
/cave/3d/<cave_id>
|
||||
"""
|
||||
try:
|
||||
cave = getCave(cave_id)
|
||||
except ObjectDoesNotExist:
|
||||
return HttpResponseNotFound
|
||||
except Cave.MultipleObjectsReturned:
|
||||
# But only one might have survex data? So scan and return the first that works.
|
||||
caves = getCaves(cave_id)
|
||||
for c in caves:
|
||||
if c.survex_file:
|
||||
# exists, but may not be a valid file path to a valid .svx file in the Loser repo
|
||||
return file3d(request, c, c.slug)
|
||||
else:
|
||||
return file3d(request, cave, cave_id)
|
||||
|
||||
|
||||
def file3d(request, cave, cave_id):
|
||||
"""Produces a .3d file directly for download.
|
||||
survex_file should be in valid path format 'caves-1623/264/264.svx' but it might be mis-entered as simply '2012-ns-10.svx'
|
||||
|
||||
Also the cave.survex_file may well not match the cave description path:
|
||||
e.g. it might be to the whole system 'smk-system.svx' instead of just for the specific cave.
|
||||
|
||||
- If the expected .3d file corresponding to cave.survex_file is present, return it.
|
||||
- If the cave.survex_file exists, generate the 3d file, cache it and return it
|
||||
- Use the cave_id to guess what the 3d file might be and, if in the cache, return it
|
||||
- Use the cave_id to guess what the .svx file might be and generate the .3d file and return it
|
||||
- (Use the incomplete cave.survex_file and a guess at the missing directories to guess the real .svx file location ?)
|
||||
"""
|
||||
|
||||
def runcavern(survexpath):
|
||||
"""This has not yet been properly updated with respect to putting the .3d file in the same folder as the .svx filse
|
||||
as done in runcavern3d() in parsers/survex.py
|
||||
Needs testing.
|
||||
"""
|
||||
# print(" - Regenerating cavern .log and .3d for '{}'".format(survexpath))
|
||||
if not survexpath.is_file():
|
||||
# print(" - - Regeneration ABORT\n - - from '{}'".format(survexpath))
|
||||
pass
|
||||
try:
|
||||
completed_process = subprocess.run(
|
||||
[settings.CAVERN, "--log", f"--output={settings.SURVEX_DATA}", f"{survexpath}"]
|
||||
)
|
||||
except OSError as ex:
|
||||
# propagate this to caller.
|
||||
raise OSError(completed_process.stdout) from ex
|
||||
|
||||
op3d = (Path(settings.SURVEX_DATA) / Path(survexpath).name).with_suffix(".3d")
|
||||
op3dlog = Path(op3d.with_suffix(".log"))
|
||||
|
||||
if not op3d.is_file():
|
||||
print(f" - - Regeneration FAILED\n - - from '{survexpath}'\n - - to '{op3d}'")
|
||||
print(" - - Regeneration stdout: ", completed_process.stdout)
|
||||
print(" - - Regeneration cavern log output: ", op3dlog.read_text())
|
||||
|
||||
def return3d(threedpath):
|
||||
if threedpath.is_file():
|
||||
response = HttpResponse(content=open(threedpath, "rb"), content_type="application/3d")
|
||||
response["Content-Disposition"] = f"attachment; filename={threedpath.name}"
|
||||
return response
|
||||
else:
|
||||
message = f'<h1>Path provided does not correspond to any actual 3d file.</h1><p>path: "{threedpath}"'
|
||||
# print(message)
|
||||
return HttpResponseNotFound(message)
|
||||
|
||||
survexname = Path(cave.survex_file).name # removes directories
|
||||
survexpath = Path(settings.SURVEX_DATA, cave.survex_file)
|
||||
threedname = Path(survexname).with_suffix(".3d") # removes .svx, replaces with .3d
|
||||
threedpath = Path(settings.SURVEX_DATA, threedname)
|
||||
|
||||
# These if statements need refactoring more cleanly
|
||||
if cave.survex_file:
|
||||
# print(" - cave.survex_file '{}'".format(cave.survex_file))
|
||||
if threedpath.is_file():
|
||||
# print(" - threedpath '{}'".format(threedpath))
|
||||
# possible error here as several .svx files of same names in different directories will overwrite in /3d/
|
||||
if survexpath.is_file():
|
||||
if os.path.getmtime(survexpath) > os.path.getmtime(threedpath):
|
||||
runcavern(survexpath)
|
||||
return return3d(threedpath)
|
||||
else:
|
||||
# print(" - - survexpath '{}'".format(survexpath))
|
||||
if survexpath.is_file():
|
||||
# print(" - - - survexpath '{}'".format(survexpath))
|
||||
runcavern(survexpath)
|
||||
return return3d(threedpath)
|
||||
|
||||
# Get here if cave.survex_file was set but did not correspond to a valid svx file
|
||||
if survexpath.is_file():
|
||||
# a file, but invalid format
|
||||
message = f'<h1>File is not valid .svx format.</h1><p>Could not generate 3d file from "{survexpath}"'
|
||||
else:
|
||||
# we could try to guess that 'caves-1623/' is missing,... nah.
|
||||
message = f'<h1>Path provided does not correspond to any actual file.</h1><p>path: "{survexpath}"'
|
||||
|
||||
return HttpResponseNotFound(message)
|
||||
|
||||
|
||||
def rendercave(request, cave, slug, cave_id=""):
|
||||
"""Gets the data and files ready and then triggers Django to render the template.
|
||||
The resulting html contains urls which are dispatched independently, e.g. the 'download' link
|
||||
"""
|
||||
# print(" ! rendercave:'{}' START slug:'{}' cave_id:'{}'".format(cave, slug, cave_id))
|
||||
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated:
|
||||
return render(request, "nonpublic.html", {"instance": cave, "cavepage": True, "cave_id": cave_id})
|
||||
else:
|
||||
# print(f" ! rendercave: slug:'{slug}' survex file:'{cave.survex_file}'")
|
||||
try:
|
||||
svx3d = Path(cave.survex_file).stem
|
||||
svxstem = Path(settings.SURVEX_DATA) / Path(cave.survex_file)
|
||||
# print(f" ! rendercave: slug:'{slug}' '' ++ '{svxstem}'")
|
||||
except:
|
||||
svx3d = ""
|
||||
svxstem = ""
|
||||
print(f" ! rendercave: slug:'{slug}' FAIL TO MANAGE survex file:'{cave.survex_file}'")
|
||||
# NOTE the template itself loads the 3d file using javascript before it loads anything else.
|
||||
# Django cannot see what this javascript is doing, so we need to ensure that the 3d file exists first.
|
||||
# So only do this render if a valid .3d file exists. TO BE DONE -Not yet as CaveView is currently disabled
|
||||
# see design docum in troggle/templates/cave.html
|
||||
# see rendercave() in troggle/core/views/caves.py
|
||||
templatefile = "cave.html"
|
||||
|
||||
if not cave_id:
|
||||
cave_id = slug # cave.unofficial_number
|
||||
context = {
|
||||
"cave_editable": True,
|
||||
"settings": settings,
|
||||
"cave": cave,
|
||||
"cavepage": True,
|
||||
"cave_id": cave_id,
|
||||
"svxstem": str(svxstem),
|
||||
"svx3d": svx3d,
|
||||
}
|
||||
|
||||
# Do not catch any exceptions here: propagate up to caller
|
||||
r = render(
|
||||
request, templatefile, context
|
||||
) # crashes here with NoReverseMatch if url not set up for 'edit_cave' in urls.py
|
||||
return r
|
||||
|
||||
|
||||
def cavepage(request, karea, subpath):
|
||||
"""Displays a cave description page
|
||||
accessed by kataster area number specifically
|
||||
OR
|
||||
accessed by cave.url specifically set in data, e.g.
|
||||
"1623/000/000" <= cave-data/1623-000.html
|
||||
"1623/41/115.htm" <= cave-data/1623-115.html
|
||||
so we have to query the database to fine the URL as we cannot rely on the url actually telling us the cave by inspection.
|
||||
|
||||
NOTE that old caves have ".html" (or ".htm") in the URL as they used to be actual files. But since 2006 these URLs
|
||||
refer to virtual pages generated on the fly by troggle, so the".html" is confusing and redundant.
|
||||
|
||||
There are also A LOT OF URLS to e.g. /1623/161/l/rl89a.htm which are IMAGES and real html files
|
||||
in cave descriptions. These need to be handled HERE too (accident of history).
|
||||
"""
|
||||
kpath = karea + subpath
|
||||
# print(f" ! cavepage:'{kpath}' kataster area:'{karea}' rest of path:'{subpath}'")
|
||||
|
||||
try:
|
||||
cave = Cave.objects.get(url=kpath) # ideally this will be unique
|
||||
except Cave.DoesNotExist:
|
||||
# probably a link to text or an image e.g. 1623/161/l/rl89a.htm i.e. an expoweb page
|
||||
# cannot assume that this is a simple cave page, for a cave we don't know.
|
||||
# print(f" ! cavepage: url={kpath} A cave of this name does not exist")
|
||||
return expo.expopage(request, kpath)
|
||||
except Cave.MultipleObjectsReturned:
|
||||
caves = Cave.objects.filter(url=kpath)
|
||||
# print(f" ! cavepage: url={kpath} multiple caves exist")
|
||||
# we should have a -several variant for the cave pages, not just the svxcaves:
|
||||
return render(request, "svxcaveseveral.html", {"settings": settings, "caves": caves})
|
||||
|
||||
try:
|
||||
r = rendercave(request, cave, cave.slug())
|
||||
return r
|
||||
except NoReverseMatch:
|
||||
if settings.DEBUG:
|
||||
raise
|
||||
else:
|
||||
message = f"Failed to render cave: {kpath} (it does exist and is unique) because of a Django URL resolution error. Check urls.py."
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
except:
|
||||
# anything else is a new problem. Add in specific error messages here as we discover new types of error
|
||||
raise
|
||||
|
||||
@login_required_if_public
|
||||
def edit_cave(request, path="", slug=None):
|
||||
"""This is the form that edits all the cave data and writes out an XML file in the :expoweb: repo folder
|
||||
The format for the file being saved is in templates/dataformat/cave.xml
|
||||
Warning. This uses Django deep magic.
|
||||
|
||||
It saves the data into into the database and into the html file, which it then commits to git.
|
||||
"""
|
||||
message = ""
|
||||
if slug is not None:
|
||||
try:
|
||||
cave = Cave.objects.get(caveslug__slug=slug)
|
||||
except:
|
||||
return render(request, "errors/badslug.html", {"badslug": f"{slug} - from edit_cave()"})
|
||||
else:
|
||||
cave = Cave()
|
||||
if request.POST:
|
||||
form = CaveForm(request.POST, instance=cave)
|
||||
#ceFormSet = CaveAndEntranceFormSet(request.POST)
|
||||
if form.is_valid(): # and ceFormSet.is_valid():
|
||||
# print(f'! POST is valid. {cave}')
|
||||
cave = form.save(commit=False)
|
||||
if not cave.filename:
|
||||
cave.filename = form.get_area() + "-" + cave.number() + ".html"
|
||||
if not cave.url:
|
||||
cave.url = form.get_area() + "/" + cave.number() + ".html"
|
||||
cave.save()
|
||||
form.save_m2m()
|
||||
if slug is None:
|
||||
cs = CaveSlug(cave=cave, slug=cave.reference(), primary=True)
|
||||
cs.save()
|
||||
#ceinsts = ceFormSet.save(commit=False)
|
||||
#for ceinst in ceinsts:
|
||||
# ceinst.cave = cave
|
||||
# ceinst.save()
|
||||
try:
|
||||
cave_file = cave.file_output()
|
||||
write_and_commit([cave_file], f"Online edit of cave {cave}")
|
||||
# leave other exceptions unhandled so that they bubble up to user interface
|
||||
except PermissionError:
|
||||
message = f"CANNOT save this file.\nPERMISSIONS incorrectly set on server for this file {cave.filename}. Ask a nerd to fix this."
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
except subprocess.SubprocessError:
|
||||
message = f"CANNOT git on server for this file {cave.filename}. Edits may not be committed.\nAsk a nerd to fix this."
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
if cave.entrances().count() > 0:
|
||||
return HttpResponseRedirect("/" + cave.url)
|
||||
else:
|
||||
return HttpResponseRedirect(reverse("newentrance", args = [cave.url_parent(), cave.slug()]))
|
||||
|
||||
else:
|
||||
if slug is not None:
|
||||
# re-read cave data from file.
|
||||
if cave.filename:
|
||||
read_cave(cave.filename, cave=cave)
|
||||
|
||||
form = CaveForm(instance=cave, initial={'cave_slug': cave.slug()})
|
||||
#ceFormSet = CaveAndEntranceFormSet(queryset=cave.caveandentrance_set.all())
|
||||
else:
|
||||
form = CaveForm()
|
||||
#ceFormSet = CaveAndEntranceFormSet(queryset=CaveAndEntrance.objects.none())
|
||||
|
||||
return render(
|
||||
request,
|
||||
"editcave.html",
|
||||
{
|
||||
"form": form,
|
||||
"cave": cave,
|
||||
"message": message,
|
||||
#"caveAndEntranceFormSet": ceFormSet,
|
||||
"path": path + "/",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@login_required_if_public
|
||||
def edit_entrance(request, path="", caveslug=None, entslug=None):
|
||||
"""This is the form that edits the entrance data for a single entrance and writes out
|
||||
an XML file in the :expoweb: repo folder
|
||||
|
||||
The format for the file being saved is in templates/dataformat/entrance.xml
|
||||
|
||||
Warning. This uses Django deep magic for multiple forms and the CaveAndEntrance class.
|
||||
|
||||
It does save the data into into the database directly, not by parsing the file.
|
||||
"""
|
||||
|
||||
try:
|
||||
cave = Cave.objects.get(caveslug__slug=caveslug)
|
||||
except:
|
||||
return render(request, "errors/badslug.html", {"badslug": f"for cave {caveslug} - from edit_entrance()"})
|
||||
|
||||
if entslug:
|
||||
try:
|
||||
entrance = Entrance.objects.get(slug=entslug)
|
||||
except:
|
||||
return render(request, "errors/badslug.html", {"badslug": f"for entrance {entslug} - from edit_entrance()"})
|
||||
else:
|
||||
entrance = None
|
||||
|
||||
if entslug:
|
||||
# print(f"{caveslug=} {entslug=} {path=}")
|
||||
caveAndEntrance = CaveAndEntrance.objects.get(entrance=entrance, cave=cave)
|
||||
entlettereditable = False
|
||||
else:
|
||||
caveAndEntrance = CaveAndEntrance(cave=cave, entrance=Entrance())
|
||||
entlettereditable = True
|
||||
|
||||
if request.POST:
|
||||
form = EntranceForm(request.POST, instance=entrance)
|
||||
entletter = EntranceLetterForm(request.POST, instance=caveAndEntrance)
|
||||
if form.is_valid() and entletter.is_valid():
|
||||
entrance = form.save(commit=False)
|
||||
entrance_letter = entletter.save(commit=False)
|
||||
# print(f"- POST {caveslug=} {entslug=} {path=}")
|
||||
if entslug is None:
|
||||
if entletter.cleaned_data["entrance_letter"]:
|
||||
slugname = cave.slug() + entletter.cleaned_data["entrance_letter"]
|
||||
else:
|
||||
slugname = cave.slug()
|
||||
entrance.slug = slugname
|
||||
entrance.cached_primary_slug = slugname
|
||||
entrance.filename = slugname + ".html"
|
||||
else:
|
||||
entrance.slug = entslug
|
||||
entrance.cached_primary_slug = entslug
|
||||
entrance.filename = entslug + ".html"
|
||||
entrance.save()
|
||||
entrance_letter.entrance = entrance
|
||||
entrance_letter.save()
|
||||
|
||||
entrance_file = entrance.file_output()
|
||||
# print(f"Online edit of entrance {entrance.slug}")
|
||||
cave_file = cave.file_output()
|
||||
write_and_commit([entrance_file, cave_file], f"Online edit of entrance {entrance.slug}")
|
||||
return HttpResponseRedirect("/" + cave.url)
|
||||
|
||||
else: # GET the page, not POST, or if either of the forms were invalid when POSTed
|
||||
if entrance:
|
||||
# re-read entrance data from file.
|
||||
filename = str(entrance.slug +".html")
|
||||
read_entrance(filename, ent=entrance)
|
||||
|
||||
form = EntranceForm(instance=entrance)
|
||||
if entslug is None:
|
||||
entletter = EntranceLetterForm()
|
||||
else:
|
||||
entletter = caveAndEntrance.entrance_letter
|
||||
else:
|
||||
form = EntranceForm()
|
||||
entletter = EntranceLetterForm()
|
||||
|
||||
return render(
|
||||
request,
|
||||
"editentrance.html",
|
||||
{
|
||||
"form": form,
|
||||
"cave": cave,
|
||||
"entletter": entletter,
|
||||
"entlettereditable": entlettereditable,
|
||||
"path": path + "/",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def ent(request, cave_id, ent_letter):
|
||||
cave = Cave.objects.filter(kataster_number=cave_id)[0]
|
||||
cave_and_ent = CaveAndEntrance.objects.filter(cave=cave).filter(entrance_letter=ent_letter)[0]
|
||||
return render(
|
||||
request,
|
||||
"entrance.html",
|
||||
{
|
||||
"cave": cave,
|
||||
"entrance": cave_and_ent.entrance,
|
||||
"letter": cave_and_ent.entrance_letter,
|
||||
},
|
||||
)
|
||||
|
||||
def cave_debug(request):
|
||||
ents = Entrance.objects.all().order_by('id')
|
||||
return render(
|
||||
request,
|
||||
"cave_debug.html",
|
||||
{"ents": ents},
|
||||
)
|
||||
|
||||
|
||||
def get_entrances(request, caveslug):
|
||||
try:
|
||||
cave = Cave.objects.get(caveslug__slug=caveslug)
|
||||
except:
|
||||
return render(request, "errors/badslug.html", {"badslug": f"{caveslug} - from get_entrances()"})
|
||||
return render(
|
||||
request, "options.html", {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]}
|
||||
)
|
||||
|
||||
|
||||
def caveQMs(request, slug, open=False):
|
||||
"""Lists all the QMs on a particular cave
|
||||
relies on the template to find all the QMs for the cave specified in the slug, e.g. '1623-161'
|
||||
Now working in July 2022
|
||||
"""
|
||||
try:
|
||||
cave = Cave.objects.get(caveslug__slug=slug)
|
||||
except:
|
||||
return render(request, "errors/badslug.html", {"badslug": f"{slug} - from caveQMs()"})
|
||||
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated:
|
||||
return render(request, "nonpublic.html", {"instance": cave})
|
||||
elif open:
|
||||
return render(request, "cave_open_qms.html", {"cave": cave})
|
||||
else:
|
||||
return render(request, "cave_qms.html", {"cave": cave})
|
||||
|
||||
|
||||
def qm(request, cave_id, qm_id, year, grade=None, blockname=None):
|
||||
"""Reports on one specific QM
|
||||
Fixed and working July 2022, for both CSV imported QMs
|
||||
|
||||
Needs refactoring though! Uses extremely baroque way of getting the QMs instead of querying for QM objects
|
||||
directly, presumably as a result of a baroque history.
|
||||
|
||||
Many caves have several QMS with the same number, grade, year (2018) and first 8 chars of the survexblock. This crashes things, so the terminal char of the survexblock name was added
|
||||
"""
|
||||
|
||||
year = int(year)
|
||||
|
||||
if blockname == "" or not blockname:
|
||||
# CSV import QMs, use old technique
|
||||
try:
|
||||
c = getCave(cave_id)
|
||||
manyqms = c.get_open_QMs() | c.get_ticked_QMs() # set union operation
|
||||
qm = manyqms.get(number=qm_id, expoyear=year, grade=grade)
|
||||
return render(request, "qm.html", {"qm": qm})
|
||||
except QM.DoesNotExist:
|
||||
# raise
|
||||
return render(
|
||||
request,
|
||||
"errors/badslug.html",
|
||||
{
|
||||
"badslug": f"QM.DoesNotExist blockname is empty string: {cave_id=} {year=} {qm_id=} {grade=} {blockname=}"
|
||||
},
|
||||
)
|
||||
except QM.MultipleObjectsReturned:
|
||||
# raise
|
||||
qms = manyqms.filter(number=qm_id, expoyear=year)
|
||||
return render(
|
||||
request,
|
||||
"errors/badslug.html",
|
||||
{
|
||||
"badslug": f"QM.MultipleObjectsReturned {cave_id=} {year=} {qm_id=} {grade=} {blockname=} {qms=}"
|
||||
},
|
||||
)
|
||||
|
||||
else:
|
||||
try:
|
||||
qmslug = f"{cave_id}-{year}-{blockname=}{qm_id}{grade}"
|
||||
print(f"{qmslug=}")
|
||||
c = getCave(cave_id)
|
||||
manyqms = c.get_open_QMs() | c.get_ticked_QMs() # set union operation
|
||||
qmqs = manyqms.filter(expoyear=year, blockname=blockname, number=qm_id, grade=grade)
|
||||
if len(qmqs) > 1:
|
||||
for q in qmqs:
|
||||
print(qmqs)
|
||||
message = f"Multiple QMs with the same cave, year, number, grade AND first-several+terminal chars of the survexblock name. (Could be caused by incomplete databasereset). Fix this in the survex file(s). {cave_id=} {year=} {qm_id=} {blockname=}"
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
else:
|
||||
qm = qmqs.get(expoyear=year, blockname=blockname, number=qm_id, grade=grade)
|
||||
if qm:
|
||||
print(
|
||||
qm,
|
||||
f"{qmslug=}:{cave_id=} {year=} {qm_id=} {blockname=} {qm.expoyear=} {qm.completion_description=}",
|
||||
)
|
||||
return render(request, "qm.html", {"qm": qm})
|
||||
else:
|
||||
# raise
|
||||
return render(
|
||||
request,
|
||||
"errors/badslug.html",
|
||||
{"badslug": f"Failed get {cave_id=} {year=} {qm_id=} {grade=} {blockname=}"},
|
||||
)
|
||||
except MultipleObjectsReturned:
|
||||
message = f"Multiple QMs with the same cave, year, number, grade AND first-several+terminal chars of the survexblock name. (Could be caused by incomplete databasereset). Fix this in the survex file(s). {cave_id=} {year=} {qm_id=} {blockname=}"
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
except QM.DoesNotExist:
|
||||
# raise
|
||||
return render(
|
||||
request,
|
||||
"errors/badslug.html",
|
||||
{
|
||||
"badslug": f"QM.DoesNotExist blockname is not empty string {cave_id=} {year=} {qm_id=} {grade=} {blockname=}"
|
||||
},
|
||||
)
|
||||
|
||||
def expo_kml(request):
|
||||
return render(
|
||||
request,
|
||||
"expo.kml",
|
||||
{
|
||||
"entrances": Entrance.objects.all()
|
||||
},
|
||||
content_type = "application/vnd.google-earth.kml+xml"
|
||||
)
|
||||
|
||||
def expo_kmz(request):
|
||||
notablecaves = set(getnotablecaves())
|
||||
#Zip file written to a file, to save this function using too much memory
|
||||
with tempfile.TemporaryDirectory() as tmpdirname:
|
||||
zippath = os.path.join(tmpdirname, 'expo.kmz')
|
||||
with zipfile.ZipFile(zippath, 'w', compression=zipfile.ZIP_DEFLATED) as myzip:
|
||||
entrances = []
|
||||
for e in Entrance.objects.all():
|
||||
html = loader.get_template("entrance_html.kml").render({"entrance": e}, request)
|
||||
soup=BeautifulSoup(html)
|
||||
for img in soup.find_all("img"):
|
||||
#src_orig = img['src']
|
||||
src = urllib.parse.urljoin(e.cavelist()[0].url.rpartition("/")[0] + "/", img['src'])
|
||||
img['src'] = src
|
||||
p = os.path.join(settings.EXPOWEB, src)
|
||||
#print(e.cavelist()[0].url, e.cavelist()[0].url.rpartition("/")[0] + "/", src_orig, p)
|
||||
if os.path.isfile(p):
|
||||
myzip.write(p, src)
|
||||
for a in soup.find_all("a"):
|
||||
try:
|
||||
ao = a['href']
|
||||
aa = urllib.parse.urljoin(e.cavelist()[0].url.rpartition("/")[0] + "/", ao)
|
||||
a['href'] = urllib.parse.urljoin("https://expo.survex.com/", aa)
|
||||
print(e.cavelist()[0].url.rpartition("/")[0] + "/", ao, a['href'])
|
||||
except:
|
||||
pass
|
||||
html = mark_safe(soup.prettify("utf-8").decode("utf-8"))
|
||||
|
||||
size = {True: "large", False:"small"}[bool(set(e.cavelist()) & notablecaves)]
|
||||
|
||||
entrances.append(loader.get_template("entrance.kml").render({"entrance": e, "html": html, "size": size}, request))
|
||||
|
||||
s = loader.get_template("expo.kml").render({"entrances": entrances}, request)
|
||||
myzip.writestr("expo.kml", s)
|
||||
for f in os.listdir(settings.KMZ_ICONS_PATH):
|
||||
p = os.path.join(settings.KMZ_ICONS_PATH, f)
|
||||
if os.path.isfile(p):
|
||||
myzip.write(p, os.path.join("icons", f))
|
||||
return FileResponse(open(zippath, 'rb'), content_type="application/vnd.google-earth.kmz .kmz")
|
||||
|
||||
|
||||
@@ -1,112 +0,0 @@
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import render
|
||||
|
||||
from troggle.core.models.survex import DrawingFile
|
||||
from troggle.core.views.expo import getmimetype
|
||||
|
||||
# import parsers.surveys
|
||||
|
||||
"""Some of these views serve files as binary blobs, and simply set the mime type based on the file extension,
|
||||
as does the urls.py dispatcher which sends them here. Here they should actually have the filetype checked
|
||||
by looking inside the file before being served.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
todo = """- Need to check if invalid query string is invalid, or produces multiple replies
|
||||
and render a user-friendly error page.
|
||||
"""
|
||||
|
||||
|
||||
def unescape(input):
|
||||
"""These look like HTML entities, but they are not. They are tunnel-specific encodings"""
|
||||
codes = {
|
||||
"&space;": " ",
|
||||
""": '"',
|
||||
"&tab;": "\t",
|
||||
"&backslash;": "\\",
|
||||
"&newline;": "\n|\t",
|
||||
"&apostrophe": "'",
|
||||
}
|
||||
for c in codes:
|
||||
# print(c, codes[c])
|
||||
input = input.replace(c, codes[c])
|
||||
return input
|
||||
|
||||
|
||||
def dwgallfiles(request):
|
||||
"""Report on all the drawing files in the system. These were loaded by parsing the entire directory tree"""
|
||||
dwgfiles = DrawingFile.objects.all()
|
||||
return render(request, "dwgfiles.html", {"dwgfiles": dwgfiles, "settings": settings})
|
||||
|
||||
|
||||
def dwgfilesingle(request, path):
|
||||
"""sends a single binary file to the user. It could be an old PNG, PDF or SVG
|
||||
not just Tunnel or Therion
|
||||
|
||||
The db records created on datbase reset import are not used when we look for an individual drawing, only
|
||||
collections of them.
|
||||
|
||||
Note the infelicity that this will deliver files that exist, but are hidden on the previous
|
||||
webpage /dwgupload/... if the user types the filename into the browser bar. Could be a problem?
|
||||
Should we validate using uploads.py dwgvaliddisp() here too?
|
||||
"""
|
||||
tfile = Path(settings.DRAWINGS_DATA, path.replace(":", "#"))
|
||||
if not tfile.is_file():
|
||||
message = f"Drawing file not found in filesystem at '{path}' \n\t\tMaybe a new dataimport needs to be done to get up to date."
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
if Path(tfile).suffix in [
|
||||
".xml"
|
||||
]: # tunnel files are usually 'us-ascii' (!). And may not close all XML tags properly either.
|
||||
for encoding in ["us-ascii", "iso-8859-1", "utf-8"]:
|
||||
try:
|
||||
# print(f'attempting {encoding} for {tfile}')
|
||||
with open(tfile, encoding=encoding, errors="strict") as f:
|
||||
print(f"- before reading any {encoding}")
|
||||
lines = f.readlines()
|
||||
# print(f'- finished reading {encoding}')
|
||||
clean = []
|
||||
for l in lines:
|
||||
clean.append(unescape(l)) # deals with strangely embedded survex file
|
||||
# print(f'- Cleaned and stripped.')
|
||||
try:
|
||||
return HttpResponse(content=clean, content_type="text/xml")
|
||||
except:
|
||||
return HttpResponse(
|
||||
content=f"Render fail for this file: {tfile} Please report to a nerd. Probably Julian's fault."
|
||||
)
|
||||
|
||||
except:
|
||||
print(f"! Exception when reading {encoding}")
|
||||
continue
|
||||
|
||||
print(f"! None of those encodings worked for {tfile}")
|
||||
try:
|
||||
return HttpResponse(content=open(tfile, errors="ignore"), content_type=getmimetype(tfile))
|
||||
except:
|
||||
return HttpResponse(
|
||||
content=f"Unable to understand the encoding for this file: {tfile} Please report to a nerd."
|
||||
)
|
||||
|
||||
if Path(tfile).suffix in ["th2", ".th"]:
|
||||
try:
|
||||
return HttpResponse(content=open(tfile, errors="strict"), content_type="text/txt") # default utf-8
|
||||
except:
|
||||
return HttpResponse(
|
||||
content=f"Unable to understand the encoding for this file: {tfile} Please report to a nerd."
|
||||
)
|
||||
|
||||
else: # SVG, JPG etc
|
||||
try:
|
||||
return HttpResponse(content=open(tfile, mode="rb"), content_type=getmimetype(tfile)) # default utf-8
|
||||
except:
|
||||
try:
|
||||
return HttpResponse(content=open(tfile, mode="rb"))
|
||||
except:
|
||||
return HttpResponse(
|
||||
content=f"Unable to understand the encoding '{getmimetype(tfile)}' for this file: {tfile} Note that Apache will do its own thing here. Please report to a nerd."
|
||||
)
|
||||
@@ -1,208 +0,0 @@
|
||||
import io
|
||||
from pathlib import Path
|
||||
|
||||
import django.forms as forms
|
||||
from django.http import JsonResponse
|
||||
from django.shortcuts import render
|
||||
from django.template import loader
|
||||
from django.urls import reverse
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
from PIL import Image
|
||||
|
||||
import piexif
|
||||
|
||||
import troggle.settings as settings
|
||||
from troggle.core.utils import WriteAndCommitError, write_and_commit
|
||||
|
||||
from .auth import login_required_if_public
|
||||
|
||||
MAX_IMAGE_WIDTH = 1000
|
||||
MAX_IMAGE_HEIGHT = 800
|
||||
|
||||
THUMBNAIL_WIDTH = 200
|
||||
THUMBNAIL_HEIGHT = 200
|
||||
|
||||
|
||||
def get_dir(path):
|
||||
"From a path sent from urls.py, determine the directory."
|
||||
if "/" in path:
|
||||
return path.rsplit("/", 1)[0]
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def image_selector(request, path):
|
||||
"""Returns available images"""
|
||||
directory = get_dir(path)
|
||||
thumbnailspath = Path(settings.EXPOWEB) / directory / "t"
|
||||
thumbnails = []
|
||||
if thumbnailspath.is_dir():
|
||||
for f in thumbnailspath.iterdir():
|
||||
if f.is_file():
|
||||
if directory:
|
||||
base = f"{directory}/"
|
||||
else:
|
||||
base = ""
|
||||
thumbnail_url = reverse("expopage", args=[f"{base}t/{f.name}"])
|
||||
name_base = f.name.rsplit(".", 1)[0]
|
||||
page_path_base = Path(settings.EXPOWEB) / directory / "l"
|
||||
if (page_path_base / (f"{name_base}.htm")).is_file():
|
||||
page_url = reverse("expopage", args=[f"{base}l/{name_base}.htm"])
|
||||
else:
|
||||
page_url = reverse("expopage", args=[f"{base}l/{name_base}.html"])
|
||||
|
||||
thumbnails.append({"thumbnail_url": thumbnail_url, "page_url": page_url})
|
||||
|
||||
return render(request, "image_selector.html", {"thumbnails": thumbnails})
|
||||
|
||||
def reorient_image(img, exif_dict):
|
||||
if piexif.ImageIFD.Orientation in exif_dict["0th"]:
|
||||
print(exif_dict)
|
||||
orientation = exif_dict["0th"].pop(piexif.ImageIFD.Orientation)
|
||||
|
||||
if orientation == 2:
|
||||
img = img.transpose(Image.FLIP_LEFT_RIGHT)
|
||||
elif orientation == 3:
|
||||
img = img.rotate(180)
|
||||
elif orientation == 4:
|
||||
img = img.rotate(180).transpose(Image.FLIP_LEFT_RIGHT)
|
||||
elif orientation == 5:
|
||||
img = img.rotate(-90, expand=True).transpose(Image.FLIP_LEFT_RIGHT)
|
||||
elif orientation == 6:
|
||||
img = img.rotate(-90, expand=True)
|
||||
elif orientation == 7:
|
||||
img = img.rotate(90, expand=True).transpose(Image.FLIP_LEFT_RIGHT)
|
||||
elif orientation == 8:
|
||||
img = img.rotate(90, expand=True)
|
||||
return img
|
||||
|
||||
|
||||
@login_required_if_public
|
||||
@ensure_csrf_cookie
|
||||
def new_image_form(request, path):
|
||||
"""Manages a form to upload new images"""
|
||||
directory = get_dir(path)
|
||||
if request.method == "POST":
|
||||
form = NewWebImageForm(request.POST, request.FILES, directory=directory)
|
||||
if form.is_valid():
|
||||
f = request.FILES["file_"]
|
||||
binary_data = io.BytesIO()
|
||||
for chunk in f.chunks():
|
||||
binary_data.write(chunk)
|
||||
i = Image.open(binary_data)
|
||||
if "exif" in i.info:
|
||||
exif_dict = piexif.load(i.info["exif"])
|
||||
i = reorient_image(i, exif_dict)
|
||||
exif_dict['Exif'][41729] = b'1'
|
||||
exif = piexif.dump(exif_dict)
|
||||
else:
|
||||
exif = None
|
||||
|
||||
width, height = i.size
|
||||
if width > MAX_IMAGE_WIDTH or height > MAX_IMAGE_HEIGHT:
|
||||
scale = max(width / MAX_IMAGE_WIDTH, height / MAX_IMAGE_HEIGHT)
|
||||
i = i.resize((int(width / scale), int(height / scale)), Image.ANTIALIAS)
|
||||
tscale = max(width / THUMBNAIL_WIDTH, height / THUMBNAIL_HEIGHT)
|
||||
thumbnail = i.resize((int(width / tscale), int(height / tscale)), Image.ANTIALIAS)
|
||||
|
||||
ib = io.BytesIO()
|
||||
i = i.convert('RGB')
|
||||
i.save(ib, format="jpeg", quality = 75)
|
||||
tb = io.BytesIO()
|
||||
thumbnail = thumbnail.convert('RGB')
|
||||
thumbnail.save(tb, format="jpeg", quality = 70)
|
||||
image_rel_path, thumb_rel_path, desc_rel_path = form.get_rel_paths()
|
||||
image_page_template = loader.get_template("image_page_template.html")
|
||||
image_page = image_page_template.render(
|
||||
{
|
||||
"header": form.cleaned_data["header"],
|
||||
"description": form.cleaned_data["description"],
|
||||
"photographer": form.cleaned_data["photographer"],
|
||||
"year": form.cleaned_data["year"],
|
||||
"filepath": f"/{image_rel_path}",
|
||||
}
|
||||
)
|
||||
image_path, thumb_path, desc_path = form.get_full_paths()
|
||||
# Create directories if required
|
||||
for full_path in image_path, thumb_path, desc_path:
|
||||
print(full_path, full_path.parent)
|
||||
full_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
change_message = form.cleaned_data["change_message"]
|
||||
write_and_commit(
|
||||
[
|
||||
(desc_path, image_page, "utf-8"),
|
||||
(image_path, ib.getbuffer(), False),
|
||||
(thumb_path, tb.getbuffer(), False),
|
||||
],
|
||||
f"{change_message} - online adding of an image",
|
||||
)
|
||||
except WriteAndCommitError as e:
|
||||
return JsonResponse({"error": e.message})
|
||||
linked_image_template = loader.get_template("linked_image_template.html")
|
||||
html_snippet = linked_image_template.render(
|
||||
{"thumbnail_url": f"/{thumb_rel_path}", "page_url": f"/{desc_rel_path}"}, request
|
||||
)
|
||||
return JsonResponse({"html": html_snippet})
|
||||
else:
|
||||
form = NewWebImageForm(directory=directory)
|
||||
template = loader.get_template("new_image_form.html")
|
||||
htmlform = template.render({"form": form, "path": path}, request)
|
||||
return JsonResponse({"form": htmlform})
|
||||
|
||||
|
||||
class NewWebImageForm(forms.Form):
|
||||
"""The form used by the editexpopage function"""
|
||||
|
||||
header = forms.CharField(
|
||||
widget=forms.TextInput(
|
||||
attrs={"size": "60", "placeholder": "Enter title (displayed as a header and in the tab)"}
|
||||
)
|
||||
)
|
||||
file_ = forms.FileField()
|
||||
description = forms.CharField(
|
||||
widget=forms.Textarea(attrs={"cols": 80, "rows": 20, "placeholder": "Describe the photo (using HTML)"})
|
||||
)
|
||||
photographer = forms.CharField(
|
||||
widget=forms.TextInput(attrs={"size": "60", "placeholder": "Photographers name"}), required=False
|
||||
)
|
||||
year = forms.CharField(
|
||||
widget=forms.TextInput(attrs={"size": "60", "placeholder": "Year photo was taken"}), required=False
|
||||
)
|
||||
change_message = forms.CharField(
|
||||
widget=forms.Textarea(attrs={"cols": 80, "rows": 3, "placeholder": "Descibe the change made (for git)"})
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.directory = Path(kwargs.pop("directory"))
|
||||
super(forms.Form, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_rel_paths(self):
|
||||
f = self.cleaned_data["file_"]
|
||||
return [
|
||||
self.directory / "i" / (f.name.rsplit(".", 1)[0] + ".jpg"),
|
||||
self.directory / "t" / (f.name.rsplit(".", 1)[0] + ".jpg"),
|
||||
self.directory / "l" / (f.name.rsplit(".", 1)[0] + ".html"),
|
||||
]
|
||||
|
||||
def get_full_paths(self):
|
||||
return [Path(settings.EXPOWEB) / x for x in self.get_rel_paths()]
|
||||
|
||||
def clean_file_(self):
|
||||
for rel_path, full_path in zip(self.get_rel_paths(), self.get_full_paths()):
|
||||
if full_path.exists():
|
||||
raise forms.ValidationError(f"File already exists in {rel_path}")
|
||||
return self.cleaned_data["file_"]
|
||||
|
||||
|
||||
class HTMLarea(forms.Textarea):
|
||||
template_name = "widgets/HTMLarea.html"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.preview = kwargs.pop("preview", False)
|
||||
super(forms.Textarea, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_context(self, name, value, attrs):
|
||||
c = super(forms.Textarea, self).get_context(name, value, attrs)
|
||||
c["preview"] = self.preview
|
||||
return c
|
||||
@@ -1,476 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from sys import getfilesystemencoding as sys_getfilesystemencoding
|
||||
from urllib.parse import unquote as urlunquote
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import django.forms as forms
|
||||
from django.http import HttpResponse, HttpResponseRedirect
|
||||
from django.shortcuts import redirect, render
|
||||
from django.urls import reverse
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
|
||||
import troggle.core.views.caves
|
||||
import troggle.settings as settings
|
||||
from troggle.core.models.caves import Cave
|
||||
from troggle.core.utils import WriteAndCommitError, write_and_commit
|
||||
from troggle.core.views.editor_helpers import HTMLarea
|
||||
|
||||
from .auth import login_required_if_public
|
||||
|
||||
"""Formerly a separate package called 'flatpages' written by Martin Green 2011.
|
||||
This was NOT django.contrib.flatpages which stores HTML in the database, so the name was changed to expopages.
|
||||
Then it was incorporated into troggle directly, rather than being an unnecessary external package.
|
||||
"""
|
||||
|
||||
default_head = """<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
|
||||
<title>CUCC Expedition - index</title>
|
||||
<link rel="stylesheet" type="text/css" href="../css/main2.css" />
|
||||
<link rel="stylesheet" type="text/css" href="../../css/main2.css" />
|
||||
<link rel="stylesheet" type="text/css" href="../../../css/main2.css" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>Expo</h1>
|
||||
<h2 id="tophead">CUCC Expedition</h2>
|
||||
|
||||
<ul id="links">
|
||||
<li><a href="/index.htm">Home</a></li>
|
||||
<li><a href="/infodx.htm">Main Index</a></li>
|
||||
<li><a href="/handbook/index.htm">Handbook</a></li>
|
||||
<li><a href="/handbook/computing/onlinesystems.html">Online systems</a></li>
|
||||
<li><a href="/pubs.htm">Reports</a></li>
|
||||
<li><a href="/areas.htm">Areas</a></li>
|
||||
<li><a href="/caves">Caves</a></li>
|
||||
<li><a href="/expedition/2019">Troggle</a></li>
|
||||
<li><form name=P method=get action="/search" target="_top">
|
||||
<input id="omega-autofocus" type=search name=P size=8 autofocus>
|
||||
<input type=submit value="Search"></li>
|
||||
</ul>""" # this gets overwritten by templates/menu.html by django for most normal pages
|
||||
|
||||
|
||||
def expofiles_redirect(request, filepath):
|
||||
"""This is used only when running as a test system without a local copy of /expofiles/
|
||||
when settings.EXPOFILESREMOTE is True
|
||||
"""
|
||||
return redirect(urljoin("http://expo.survex.com/expofiles/", filepath))
|
||||
|
||||
def spider(request, _):
|
||||
return redirect("/?#") # so that suffixes applied by spider are no longer part of the url
|
||||
|
||||
def map(request):
|
||||
"""Serves unadorned the expoweb/map/map.html file"""
|
||||
fn = Path(settings.EXPOWEB, "map", "map.html")
|
||||
return HttpResponse(content=open(fn, "r"), content_type="text/html")
|
||||
|
||||
|
||||
def mapfile(request, path):
|
||||
"""Serves unadorned file"""
|
||||
fn = Path(settings.EXPOWEB, "map", path)
|
||||
return HttpResponse(content=open(fn, "r"), content_type=getmimetype(fn))
|
||||
|
||||
|
||||
def expofilessingle(request, filepath):
|
||||
"""sends a single binary file to the user, if not found, show the parent directory
|
||||
If the path actually is a directory, then show that.
|
||||
"""
|
||||
# print(f' - expofilessingle {filepath}')
|
||||
if filepath == "" or filepath == "/":
|
||||
return expofilesdir(request, settings.EXPOFILES, "")
|
||||
|
||||
fn = urlunquote(filepath)
|
||||
fn = Path(settings.EXPOFILES, filepath)
|
||||
if fn.is_dir():
|
||||
return expofilesdir(request, Path(fn), Path(filepath))
|
||||
if fn.is_file():
|
||||
return HttpResponse(content=open(fn, "rb"), content_type=getmimetype(filepath)) # any file
|
||||
else:
|
||||
# not a file, so show parent directory - DANGER need to check this is limited to below expofiles
|
||||
if Path(fn).parent == Path(settings.EXPOFILES).parent:
|
||||
return expofilesdir(request, Path(settings.EXPOFILES), Path(filepath).parent)
|
||||
else:
|
||||
return expofilesdir(request, Path(fn).parent, Path(filepath).parent)
|
||||
|
||||
|
||||
def expofilesdir(request, dirpath, filepath):
|
||||
"""does a directory display. If there is an index.html file we should display that.
|
||||
- dirpath is a full Path() resolved including local machine /expofiles/
|
||||
- filepath is a Path() and it does not have /expofiles/ in it
|
||||
"""
|
||||
# print(f' - expofilesdir {dirpath} settings.EXPOFILESREMOTE: {settings.EXPOFILESREMOTE}')
|
||||
if filepath:
|
||||
urlpath = "expofiles" / Path(filepath)
|
||||
else:
|
||||
urlpath = Path("expofiles")
|
||||
try:
|
||||
for f in dirpath.iterdir():
|
||||
pass
|
||||
except FileNotFoundError:
|
||||
# print(f' - expofilesdir error {dirpath}')
|
||||
return expofilesdir(request, dirpath.parent, filepath.parent)
|
||||
|
||||
fileitems = []
|
||||
diritems = []
|
||||
for f in dirpath.iterdir():
|
||||
if f.is_dir():
|
||||
diritems.append((urlpath / f.parts[-1], str(f.parts[-1])))
|
||||
else:
|
||||
# if f.parts[-1].lower() == 'index.htm' or f.parts[-1].lower() == 'index.html': # css cwd problem
|
||||
# return HttpResponse(content=open(f, "rb"),content_type=getmimetype(filepath)) # any file
|
||||
# return expofilessingle(request, str(Path(filepath / f.parts[-1])))
|
||||
fileitems.append((Path(urlpath) / f.parts[-1], str(f.parts[-1]), getmimetype(f)))
|
||||
return render(
|
||||
request,
|
||||
"dirdisplay.html",
|
||||
{"filepath": urlpath, "fileitems": fileitems, "diritems": diritems, "settings": settings},
|
||||
)
|
||||
|
||||
|
||||
def expowebpage(request, expowebpath, path):
|
||||
"""Adds menus and serves an HTML page"""
|
||||
if not os.path.isfile(expowebpath / path):
|
||||
# Should not get here if the path has suffix "_edit"
|
||||
print(f" - 404 error in expowebpage() {path}")
|
||||
return render(request, "pagenotfound.html", {"path": path}, status="404")
|
||||
|
||||
# print(f' - {sys_getfilesystemencoding()=}')
|
||||
if sys_getfilesystemencoding() != "utf-8":
|
||||
return HttpResponse(
|
||||
default_head
|
||||
+ "<h3>UTF-8 Parsing Failure:<br>Default file encoding on this Troggle installation is not UTF-8:<br>failure detected in expowebpage in views.expo.py</h3> Please Please reconfigure Debian/Apache/Django to fix this, i.e. contact Wookey. </body"
|
||||
)
|
||||
|
||||
# This next bit can be drastically simplified now that we know that the system encoding actually is utf-8
|
||||
try:
|
||||
with open(expowebpath / path, "r", encoding="utf-8") as o:
|
||||
html = o.read()
|
||||
except:
|
||||
# exception raised on debian with python 3.9.2 but not on WSL Ubuntu with python 3.9.5
|
||||
# because debian was assuming default text encoding was 'ascii'. Now specified explicitly so should be OK
|
||||
try:
|
||||
with open(expowebpath / path, "rb") as o:
|
||||
html = str(o.read()).replace("<h1>", "<h1>BAD NON-UTF-8 characters here - ")
|
||||
html = html.replace("\\n", "\n")
|
||||
html = html.replace("\\r", "")
|
||||
html = html.replace("\\t", "\t")
|
||||
html = html.replace("\\'", "'")
|
||||
except:
|
||||
return HttpResponse(
|
||||
default_head
|
||||
+ "<h3>UTF-8 Parsing Failure:<br>Page could not be parsed using UTF-8:<br>failure detected in expowebpage in views.expo.py</h3> Please edit this <var>:expoweb:</var> page to replace dubious umlauts and £ symbols with correct HTML entities e.g. <em>&pound;;</em>. </body"
|
||||
)
|
||||
|
||||
m = re.search(
|
||||
r"(.*)<\s*head([^>]*)>(.*)<\s*/head\s*>(.*)<\s*body([^>]*)>(.*)<\s*/body\s*>(.*)",
|
||||
html,
|
||||
re.DOTALL + re.IGNORECASE,
|
||||
)
|
||||
if m:
|
||||
preheader, headerattrs, head, postheader, bodyattrs, body, postbody = m.groups()
|
||||
else:
|
||||
return HttpResponse(
|
||||
default_head
|
||||
+ html
|
||||
+ "<h3>HTML Parsing failure:<br>Page could not be parsed into header and body:<br>failure detected in expowebpage in views.expo.py</h3> Please edit this <var>:expoweb:</var> page to be in the expected full HTML format </body"
|
||||
)
|
||||
m = re.search(r"<title>(.*)</title>", head, re.DOTALL + re.IGNORECASE)
|
||||
if m:
|
||||
(title,) = m.groups()
|
||||
else:
|
||||
title = ""
|
||||
m = re.search(r"noedit", head, re.DOTALL + re.IGNORECASE)
|
||||
if m:
|
||||
editable = False
|
||||
# print(f"NOEDIT set")
|
||||
else:
|
||||
editable = os.access(expowebpath / path, os.W_OK) # are file permissions writeable?
|
||||
# print(f"EDITABLE ? {editable}\n{head}")
|
||||
has_menu = False
|
||||
menumatch = re.match(r'(.*)<ul id="links">', body, re.DOTALL + re.IGNORECASE)
|
||||
if menumatch:
|
||||
has_menu = False
|
||||
|
||||
# Determine which caves this page relates to
|
||||
m = re.search(r"(162\d\/[^\/]+)[\/\.]", path, re.DOTALL + re.IGNORECASE)
|
||||
if m:
|
||||
(path_start,) = m.groups()
|
||||
parent_caves = Cave.objects.filter(url__startswith=path_start)
|
||||
else:
|
||||
parent_caves = None
|
||||
|
||||
# Determine if this page relates to a particular year
|
||||
m = re.search(r"years\/(\d\d\d\d)\/.*", path, re.DOTALL + re.IGNORECASE)
|
||||
if m:
|
||||
(year,) = m.groups()
|
||||
else:
|
||||
year = None
|
||||
|
||||
# Determine if this page is part of the handbook
|
||||
handbook = path.startswith("handbook")
|
||||
|
||||
return render(
|
||||
request,
|
||||
"expopage.html",
|
||||
{
|
||||
"editable": editable,
|
||||
"path": path,
|
||||
"title": title,
|
||||
"body": body,
|
||||
"homepage": (path == "index.htm"),
|
||||
"has_menu": has_menu,
|
||||
"year": year,
|
||||
"handbook": handbook,
|
||||
"parent_caves": parent_caves,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def mediapage(request, subpath=None, doc_root=None):
|
||||
"""This is for special prefix paths /photos/ /site_media/, /static/ etc.
|
||||
as defined in urls.py . If given a directory, gives a failure page.
|
||||
"""
|
||||
# print(" - XXXXX_ROOT: {} ...{}".format(doc_root, subpath))
|
||||
if doc_root is not None:
|
||||
filetobeopened = Path(doc_root, subpath)
|
||||
if filetobeopened.is_dir():
|
||||
return render(request, "nodirlist.html", {"path": subpath})
|
||||
try:
|
||||
return HttpResponse(content=open(filetobeopened, "rb"), content_type=getmimetype(subpath))
|
||||
except IOError:
|
||||
return render(request, "pagenotfound.html", {"path": subpath}, status="404")
|
||||
else:
|
||||
return render(request, "pagenotfound.html", {"path": subpath}, status="404")
|
||||
|
||||
|
||||
def expopage(request, path):
|
||||
"""Either renders an HTML page from expoweb with all the menus,
|
||||
or serves an unadorned binary file with mime type
|
||||
"""
|
||||
# print(" - EXPOPAGES delivering the file: '{}':{} as MIME type: {}".format(request.path, path,getmimetype(path)),flush=True)
|
||||
|
||||
if path.startswith("noinfo") and settings.PUBLIC_SITE and not request.user.is_authenticated:
|
||||
return HttpResponseRedirect(urljoin(reverse("expologin"), f"?next={request.path}"))
|
||||
|
||||
if path.startswith("admin/"):
|
||||
# don't even attempt to handle these sorts of mistakes
|
||||
return HttpResponseRedirect("/admin/")
|
||||
|
||||
expowebpath = Path(settings.EXPOWEB)
|
||||
|
||||
if path == "":
|
||||
return expowebpage(request, expowebpath, "index.htm")
|
||||
|
||||
if path.endswith(".htm") or path.endswith(".html"):
|
||||
return expowebpage(request, expowebpath, path)
|
||||
|
||||
if Path(expowebpath / path).is_dir():
|
||||
for p in ["index.html", "index.htm"]:
|
||||
if (expowebpath / path / p).is_file():
|
||||
# This needs to reset the path to the new subdirectory
|
||||
return HttpResponseRedirect("/" + str(Path(path) / p))
|
||||
return render(request, "pagenotfound.html", {"path": Path(path) / "index.html"}, status="404")
|
||||
|
||||
if path.endswith("/"):
|
||||
# we already know it is not a directory.
|
||||
# the final / may have been appended by middleware if there was no page without it
|
||||
# do not redirect to a file path without the slash as we may get in a loop. Let the user fix it:
|
||||
return render(request, "dirnotfound.html", {"path": path, "subpath": path[0:-1]})
|
||||
|
||||
# So it must be a file in /expoweb/ but not .htm or .html probably an image, maybe a txt file
|
||||
filetobeopened = expowebpath / path
|
||||
|
||||
# print(f' - {sys_getfilesystemencoding()=}')
|
||||
if sys_getfilesystemencoding() != "utf-8":
|
||||
return HttpResponse(
|
||||
default_head
|
||||
+ "<h3>UTF-8 Parsing Failure:<br>Default file encoding on this Troggle installation is not UTF-8:<br>failure detected in expowebpage in views.expo.py</h3> Please Please reconfigure Debian/Apache/Django to fix this, i.e. contact Wookey. </body"
|
||||
)
|
||||
|
||||
try:
|
||||
content = open(filetobeopened, "rb")
|
||||
content_type = getmimetype(path)
|
||||
return HttpResponse(content=content, content_type=content_type)
|
||||
except IOError:
|
||||
return render(request, "pagenotfound.html", {"path": path}, status="404")
|
||||
|
||||
|
||||
def getmimetype(path):
|
||||
"""Our own version rather than relying on what is provided by the python library. Note that when
|
||||
Apache or nginx is used to deliver /expofiles/ it will use it's own idea of mimetypes and
|
||||
not these.
|
||||
"""
|
||||
path = str(path)
|
||||
if path.lower().endswith(".css"):
|
||||
return "text/css"
|
||||
if path.lower().endswith(".txt"):
|
||||
return "text/css"
|
||||
if path.lower().endswith(".js"):
|
||||
return "application/javascript"
|
||||
if path.lower().endswith(".json"):
|
||||
return "application/javascript"
|
||||
if path.lower().endswith(".ico"):
|
||||
return "image/vnd.microsoft.icon"
|
||||
if path.lower().endswith(".png"):
|
||||
return "image/png"
|
||||
if path.lower().endswith(".tif"):
|
||||
return "image/tif"
|
||||
if path.lower().endswith(".gif"):
|
||||
return "image/gif"
|
||||
if path.lower().endswith(".jpeg"):
|
||||
return "image/jpeg"
|
||||
if path.lower().endswith(".jpg"):
|
||||
return "image/jpeg"
|
||||
if path.lower().endswith("svg"):
|
||||
return "image/svg+xml"
|
||||
if path.lower().endswith("xml"):
|
||||
return "application/xml" # we use "text/xml" for tunnel files
|
||||
if path.lower().endswith(".pdf"):
|
||||
return "application/pdf"
|
||||
if path.lower().endswith(".ps"):
|
||||
return "application/postscript"
|
||||
if path.lower().endswith(".svx"):
|
||||
return "application/x-survex-svx"
|
||||
if path.lower().endswith(".3d"):
|
||||
return "application/x-survex-3d"
|
||||
if path.lower().endswith(".pos"):
|
||||
return "application/x-survex-pos"
|
||||
if path.lower().endswith(".err"):
|
||||
return "application/x-survex-err"
|
||||
if path.lower().endswith(".odt"):
|
||||
return "application/vnd.oasis.opendocument.text"
|
||||
if path.lower().endswith(".ods"):
|
||||
return "application/vnd.oasis.opendocument.spreadsheet"
|
||||
if path.lower().endswith(".docx"):
|
||||
return "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||
if path.lower().endswith(".xslx"):
|
||||
return "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
if path.lower().endswith(".gz"):
|
||||
return "application/x-7z-compressed"
|
||||
if path.lower().endswith(".7z"):
|
||||
return "application/x-7z-compressed"
|
||||
if path.lower().endswith(".zip"):
|
||||
return "application/zip"
|
||||
return ""
|
||||
|
||||
|
||||
@login_required_if_public
|
||||
@ensure_csrf_cookie
|
||||
def editexpopage(request, path):
|
||||
"""Manages the 'Edit this Page' capability for expo handbook and other html pages.
|
||||
Relies on HTML5 or javascript to provide the in-browser editing environment.
|
||||
"""
|
||||
try:
|
||||
# if a cave not a webpage at all.
|
||||
r = Cave.objects.get(url=path)
|
||||
return troggle.core.views.caves.editCave(request, r.cave.slug)
|
||||
except Cave.DoesNotExist:
|
||||
pass
|
||||
|
||||
print(f" - {sys_getfilesystemencoding()=}")
|
||||
if sys_getfilesystemencoding() != "utf-8":
|
||||
return HttpResponse(
|
||||
default_head
|
||||
+ "<h3>UTF-8 Parsing Failure:<br>Default file encoding on this Troggle installation is not UTF-8:<br>failure detected in expowebpage in views.expo.py</h3> Please Please reconfigure Debian/Apache/Django to fix this, i.e. contact Wookey. </body"
|
||||
)
|
||||
|
||||
try:
|
||||
filepath = Path(settings.EXPOWEB) / path
|
||||
o = open(filepath, "r", encoding="utf8")
|
||||
html = o.read()
|
||||
autogeneratedmatch = re.search(
|
||||
r"\<\!--\s*(.*?(Do not edit|It is auto-generated).*?)\s*--\>", html, re.DOTALL + re.IGNORECASE
|
||||
)
|
||||
if autogeneratedmatch:
|
||||
return HttpResponse(autogeneratedmatch.group(1))
|
||||
m = re.search(r"(.*)<head([^>]*)>(.*)</head>(.*)<body([^>]*)>(.*)</body>(.*)", html, re.DOTALL + re.IGNORECASE)
|
||||
if m:
|
||||
filefound = True
|
||||
preheader, headerargs, head, postheader, bodyargs, body, postbody = m.groups()
|
||||
# linksmatch = re.match(r'(.*)(<ul\s+id="links">.*)', body, re.DOTALL + re.IGNORECASE)
|
||||
# if linksmatch:
|
||||
# body, links = linksmatch.groups()
|
||||
|
||||
else:
|
||||
return HttpResponse(
|
||||
default_head
|
||||
+ html
|
||||
+ "<h3>HTML Parsing failure:<br>Page could not be parsed into header and body:<br>failure detected in expowebpage in views.expo.py</h3> Please edit this <var>:expoweb:</var> page to be in the expected full HTML format .</body>"
|
||||
)
|
||||
except IOError:
|
||||
print("### File not found ### ", filepath)
|
||||
filefound = False
|
||||
|
||||
if request.method == "POST": # If the form has been submitted...
|
||||
pageform = ExpoPageForm(request.POST) # A form bound to the POST data
|
||||
if pageform.is_valid(): # Form valid therefore write file
|
||||
# print("### \n", str(pageform)[0:300])
|
||||
# print("### \n csrfmiddlewaretoken: ",request.POST['csrfmiddlewaretoken'])
|
||||
if filefound:
|
||||
headmatch = re.match(r"(.*)<title>.*</title>(.*)", head, re.DOTALL + re.IGNORECASE)
|
||||
if headmatch:
|
||||
head = (
|
||||
headmatch.group(1)
|
||||
+ "<title>"
|
||||
+ pageform.cleaned_data["title"]
|
||||
+ "</title>"
|
||||
+ headmatch.group(2)
|
||||
)
|
||||
else:
|
||||
head = "<title>" + pageform.cleaned_data["title"] + "</title>"
|
||||
else:
|
||||
head = "<title>" + pageform.cleaned_data["title"] + "</title>"
|
||||
preheader = "<html>"
|
||||
headerargs = ""
|
||||
postheader = ""
|
||||
bodyargs = ""
|
||||
postbody = "</html>\n"
|
||||
body = pageform.cleaned_data["html"]
|
||||
body = body.replace("\r", "")
|
||||
result = f"{preheader}<head{headerargs}>{head}</head>{postheader}<body{bodyargs}>\n{body}</body>{postbody}"
|
||||
|
||||
if not filefound or result != html: # Check if content changed at all
|
||||
try:
|
||||
change_message = pageform.cleaned_data["change_message"]
|
||||
write_and_commit([(filepath, result, "utf-8")], f"{change_message} - online edit of {path}")
|
||||
except WriteAndCommitError as e:
|
||||
return render(request, "errors/generic.html", {"message": e.message})
|
||||
|
||||
return HttpResponseRedirect(reverse("expopage", args=[path])) # Redirect after POST
|
||||
else:
|
||||
if filefound:
|
||||
m = re.search(r"<title>(.*)</title>", head, re.DOTALL + re.IGNORECASE)
|
||||
if m:
|
||||
(title,) = m.groups()
|
||||
else:
|
||||
title = ""
|
||||
pageform = ExpoPageForm(initial={"html": body, "title": title})
|
||||
else:
|
||||
pageform = ExpoPageForm()
|
||||
return render(
|
||||
request,
|
||||
"editexpopage.html",
|
||||
{
|
||||
"path": path,
|
||||
"form": pageform,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class ExpoPageForm(forms.Form):
|
||||
"""The form used by the editexpopage function"""
|
||||
|
||||
title = forms.CharField(
|
||||
widget=forms.TextInput(attrs={"size": "60", "placeholder": "Enter title (displayed in tab)"})
|
||||
)
|
||||
html = forms.CharField(
|
||||
widget=HTMLarea(
|
||||
attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}, preview=True
|
||||
),
|
||||
required=False,
|
||||
)
|
||||
change_message = forms.CharField(
|
||||
widget=forms.Textarea(
|
||||
attrs={"cols": 80, "rows": 3, "placeholder": "Describe the change made (for version control records)"}
|
||||
)
|
||||
)
|
||||
@@ -1,248 +0,0 @@
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import render
|
||||
from django.views.generic.list import ListView
|
||||
|
||||
import troggle.settings as settings
|
||||
from troggle.core.models.logbooks import LogbookEntry, PersonLogEntry, QM
|
||||
from troggle.core.models.survex import SurvexBlock, SurvexFile
|
||||
from troggle.core.models.troggle import Expedition, Person
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.utils import TROG
|
||||
from troggle.parsers.imports import import_logbook
|
||||
|
||||
|
||||
"""These views are for logbook items when they appear in an 'expedition' page
|
||||
and for persons: their individual pages and their perseonexpedition pages.
|
||||
|
||||
It uses the global object TROG to hold some cached pages.
|
||||
"""
|
||||
|
||||
todo = """Fix the get_person_chronology() display bug.
|
||||
"""
|
||||
|
||||
|
||||
def notablepersons(request):
|
||||
def notabilitykey(person):
|
||||
return person.notability()
|
||||
|
||||
persons = Person.objects.all()
|
||||
# From what I can tell, "persons" seems to be the table rows, while "pcols" is the table columns. - AC 16 Feb 09
|
||||
pcols = []
|
||||
ncols = 4
|
||||
nc = int((len(persons) + ncols - 1) / ncols)
|
||||
for i in range(ncols):
|
||||
pcols.append(persons[i * nc : (i + 1) * nc])
|
||||
|
||||
notablepersons = []
|
||||
# Needed recoding because of Django CVE-2021-45116
|
||||
for person in persons:
|
||||
if person.bisnotable():
|
||||
notablepersons.append(person)
|
||||
notablepersons.sort(key=notabilitykey, reverse=True)
|
||||
|
||||
return render(
|
||||
request, "notablepersons.html", {"persons": persons, "pcols": pcols, "notablepersons": notablepersons}
|
||||
)
|
||||
|
||||
|
||||
def expedition(request, expeditionname):
|
||||
"""Returns a rendered page for one expedition, specified by the year e.g. '2019'.
|
||||
If page caching is enabled, it caches the dictionaries used to render the template page.
|
||||
|
||||
This is not as difficult to understand as it looks. Yes there are many levels of indirection, with multiple trees being traversed at the same time. And the Django special syntax
|
||||
makes this hard for normal Python programmers.
|
||||
|
||||
Remember that 'personexpedition__expedition' is interpreted by Django to mean the
|
||||
'expedition' object which is connected by a foreign key to the 'personexpedition'
|
||||
object, which is a field of the PersonLogEntry object:
|
||||
PersonLogEntry.objects.filter(personexpedition__expedition=expo)
|
||||
|
||||
Queries are not evaluated to hit the database until a result is actually used. Django
|
||||
does lazy evaluation.
|
||||
|
||||
"""
|
||||
try:
|
||||
expo = Expedition.objects.get(year=int(expeditionname))
|
||||
except:
|
||||
message = (
|
||||
"Expedition not found - database apparently empty, you probably need to do a full re-import of all data."
|
||||
)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
if request.user.is_authenticated:
|
||||
logged_in = True
|
||||
if "reload" in request.GET:
|
||||
expo.logbookentry_set.all().delete()
|
||||
import_logbook(year=expo.year)
|
||||
else:
|
||||
logged_in = False
|
||||
|
||||
ts = TROG["pagecache"]["expedition"] # not much use unless single user!
|
||||
if settings.CACHEDPAGES:
|
||||
len(TROG["pagecache"]["expedition"])
|
||||
# print(f'! - expo {expeditionname} CACHEDPAGES {nexpos} expo pages in cache.')
|
||||
if expeditionname in ts:
|
||||
# print('! - expo {expeditionanme} using cached page')
|
||||
return render(request, "expedition.html", {**ts[expeditionname], "logged_in": logged_in})
|
||||
|
||||
expeditions = Expedition.objects.all() # top menu only, evaluated only when template renders
|
||||
|
||||
entries = expo.logbookentry_set.all()
|
||||
blocks = expo.survexblock_set.all()
|
||||
dateditems = list(entries) + list(blocks) # evaluates the Django query and hits db
|
||||
dates = sorted(set([item.date for item in dateditems]))
|
||||
|
||||
allpersonlogentries = PersonLogEntry.objects.filter(personexpedition__expedition=expo)
|
||||
|
||||
personexpodays = []
|
||||
for personexpedition in expo.personexpedition_set.all():
|
||||
expotrips = allpersonlogentries.filter(personexpedition=personexpedition) # lazy
|
||||
expoblocks = blocks.filter(survexpersonrole__personexpedition=personexpedition)
|
||||
|
||||
prow = []
|
||||
|
||||
for date in dates:
|
||||
personentries = expotrips.filter(logbook_entry__date=date) # lazy
|
||||
personblocks = set(expoblocks.filter(date=date)) # not lazy
|
||||
pcell = {}
|
||||
pcell["personentries"] = personentries
|
||||
pcell["survexblocks"] = personblocks
|
||||
if issunday := (date.weekday() == 6): # WALRUS
|
||||
pcell["sunday"] = issunday
|
||||
prow.append(pcell)
|
||||
personexpodays.append({"personexpedition": personexpedition, "personrow": prow})
|
||||
|
||||
ts[expeditionname] = {
|
||||
"expedition": expo,
|
||||
"expeditions": expeditions,
|
||||
"personexpodays": personexpodays,
|
||||
"settings": settings,
|
||||
"dateditems": dateditems,
|
||||
"dates": dates,
|
||||
}
|
||||
TROG["pagecache"]["expedition"][expeditionname] = ts[expeditionname]
|
||||
|
||||
return render(request, "expedition.html", {**ts[expeditionname], "logged_in": logged_in})
|
||||
|
||||
|
||||
class Expeditions_tsvListView(ListView):
|
||||
"""This uses the Django built-in shortcut mechanism
|
||||
It defaults to use a template with name <app-label>/<model-name>_list.html.
|
||||
https://www.agiliq.com/blog/2017/12/when-and-how-use-django-listview/
|
||||
https://developer.mozilla.org/en-US/docs/Learn/Server-side/Django/Generic_views
|
||||
Either a queryset variable or set_queryset() function is used, but not needed
|
||||
if you want all the obejcts of a particaulr type in which case just set model = <object>
|
||||
"""
|
||||
|
||||
template_name = "core/expeditions_tsv_list.html" # if not present then uses core/expedition_list.html
|
||||
# queryset = Expedition.objects.all()
|
||||
# context_object_name = 'expedition'
|
||||
model = Expedition # equivalent to .objects.all() for a queryset
|
||||
|
||||
|
||||
class Expeditions_jsonListView(ListView):
|
||||
template_name = "core/expeditions_json_list.html"
|
||||
model = Expedition
|
||||
|
||||
class QMs_jsonListView(ListView):
|
||||
template_name = "core/QMs_json_list.html"
|
||||
model = QM
|
||||
|
||||
def person(
|
||||
request,
|
||||
first_name="",
|
||||
last_name="",
|
||||
):
|
||||
try:
|
||||
this_person = Person.objects.get(first_name=first_name, last_name=last_name)
|
||||
except:
|
||||
message = f"Person not found '{first_name} {last_name}' - possibly Scottish? (See our <a href=\"/handbook/troggle/namesredesign.html\">Proposal to fix this</a>)"
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
return render(request, "person.html", {"person": this_person})
|
||||
|
||||
|
||||
def get_person_chronology(personexpedition):
|
||||
"""
|
||||
This is just a nasty convoluted way of trying the make the template do more work than it is sensible to ask it to do.
|
||||
Rewrite more simply with the login in the python, not in Django template language (you bastard Curtis).
|
||||
"""
|
||||
res = {}
|
||||
for personlogentry in personexpedition.personlogentry_set.all():
|
||||
a = res.setdefault(personlogentry.logbook_entry.date, {})
|
||||
a.setdefault("personlogentries", []).append(personlogentry)
|
||||
|
||||
for personrole in personexpedition.survexpersonrole_set.all():
|
||||
if personrole.survexblock.date: # avoid bad data from another bug
|
||||
a = res.setdefault(personrole.survexblock.date, {})
|
||||
a.setdefault("personroles", []).append(personrole.survexblock)
|
||||
|
||||
# build up the tables
|
||||
rdates = sorted(list(res.keys()))
|
||||
|
||||
res2 = []
|
||||
for rdate in rdates:
|
||||
personlogentries = res[rdate].get("personlogentries", [])
|
||||
personroles = res[rdate].get("personroles", [])
|
||||
for n in range(max(len(personlogentries), len(personroles))):
|
||||
res2.append(
|
||||
(
|
||||
(n == 0 and rdate or "--"),
|
||||
(n < len(personlogentries) and personlogentries[n]),
|
||||
(n < len(personroles) and personroles[n]),
|
||||
)
|
||||
)
|
||||
|
||||
return res2
|
||||
|
||||
|
||||
def personexpedition(request, first_name="", last_name="", year=""):
|
||||
person = Person.objects.get(first_name=first_name, last_name=last_name)
|
||||
this_expedition = Expedition.objects.get(year=year)
|
||||
personexpedition = person.personexpedition_set.get(expedition=this_expedition)
|
||||
personchronology = get_person_chronology(personexpedition)
|
||||
# for pc in personchronology:
|
||||
# print(pc)
|
||||
return render(
|
||||
request, "personexpedition.html", {"personexpedition": personexpedition, "personchronology": personchronology}
|
||||
)
|
||||
|
||||
|
||||
def logbookentry(request, date, slug):
|
||||
# start = time.time()
|
||||
trips = LogbookEntry.objects.filter(date=date) # all the trips not just this one
|
||||
this_logbookentry = trips.filter(date=date, slug=slug)
|
||||
|
||||
if this_logbookentry:
|
||||
if len(this_logbookentry) > 1:
|
||||
# BUG
|
||||
return render(request, "object_list.html", {"object_list": this_logbookentry})
|
||||
else:
|
||||
# https://stackoverflow.com/questions/739776/how-do-i-do-an-or-filter-in-a-django-query
|
||||
wallets = Wallet.objects.filter(Q(survexblock__date=date) | Q(walletdate=date)).distinct()
|
||||
svxothers = SurvexFile.objects.filter(survexblock__date=date).distinct()
|
||||
|
||||
this_logbookentry = this_logbookentry[0]
|
||||
# This is the only page that uses next_.. and prev_..
|
||||
# and it is calculated on the fly in the model
|
||||
return render(
|
||||
request,
|
||||
"logbookentry.html",
|
||||
{"logbookentry": this_logbookentry, "trips": trips, "svxothers": svxothers, "wallets": wallets},
|
||||
)
|
||||
else:
|
||||
msg = f' Logbook entry slug:"{slug}" not found in database on date:"{date}" '
|
||||
print(msg)
|
||||
return render(request, "errors/generic.html", {"message": msg})
|
||||
|
||||
|
||||
def get_people(request, expeditionslug):
|
||||
exp = Expedition.objects.get(year=expeditionslug)
|
||||
return render(request, "options.html", {"items": [(pe.slug, pe.name) for pe in exp.personexpedition_set.all()]})
|
||||
|
||||
|
||||
def get_logbook_entries(request, expeditionslug):
|
||||
exp = Expedition.objects.get(year=expeditionslug)
|
||||
return render(
|
||||
request, "options.html", {"items": [(le.slug, f"{le.date} - {le.title}") for le in exp.logbookentry_set.all()]}
|
||||
)
|
||||
@@ -1,239 +0,0 @@
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import render
|
||||
from django.template import loader
|
||||
|
||||
from troggle.core.models.caves import Cave
|
||||
from troggle.core.models.logbooks import LogbookEntry # , PersonLogEntry
|
||||
|
||||
# from databaseReset import reinit_db # don't do this. databaseRest runs code *at import time*
|
||||
from troggle.core.models.troggle import Expedition
|
||||
from troggle.parsers.imports import (
|
||||
import_caves,
|
||||
import_drawingsfiles,
|
||||
import_logbooks,
|
||||
import_people,
|
||||
import_QMs,
|
||||
import_survex,
|
||||
import_surveyscans,
|
||||
)
|
||||
|
||||
from .auth import login_required_if_public
|
||||
|
||||
"""Utility functions and code to serve the control panel and individual user's
|
||||
progress and task list (deprecated as we do not have individual user login).
|
||||
"""
|
||||
|
||||
todo = """
|
||||
- [Low priority] Fix Login page so that it produces the frontpage or
|
||||
redirects to the page which produced the login prompt requirement.
|
||||
"""
|
||||
|
||||
|
||||
def todos(request, module):
|
||||
"""produces todo text from module
|
||||
We could automate this to find all those strings automatically
|
||||
"""
|
||||
from troggle.core.TESTS.tests import todo as tests
|
||||
from troggle.core.forms import todo as forms
|
||||
from troggle.core.middleware import todo as middleware
|
||||
from troggle.core.models.caves import todo as modelcaves
|
||||
from troggle.core.models.logbooks import todo as modellogbooks
|
||||
from troggle.core.views.caves import todo as viewcaves
|
||||
from troggle.core.views.drawings import todo as viewdrawings
|
||||
from troggle.core.views.logbooks import todo as viewlogbooks
|
||||
from troggle.core.views.other import todo as viewother
|
||||
from troggle.core.views.survex import todo as viewsurvex
|
||||
from troggle.core.views.uploads import todo as viewuploads
|
||||
from troggle.core.views.wallets_edit import todo as viewwallets_edit
|
||||
from troggle.parsers.caves import todo as parserscaves
|
||||
from troggle.parsers.drawings import todo as parsersdrawings
|
||||
from troggle.parsers.locations import todo as parserslocations
|
||||
from troggle.parsers.logbooks import todo as parserslogbooks
|
||||
from troggle.parsers.survex import todo as parserssurvex
|
||||
from troggle.urls import todo as todourls
|
||||
|
||||
tododict = {
|
||||
"tests": tests,
|
||||
"forms": forms,
|
||||
"middleware": middleware,
|
||||
"models/caves": modelcaves,
|
||||
"models/logbooks": modellogbooks,
|
||||
"views/caves": viewcaves,
|
||||
"views/drawings": viewdrawings,
|
||||
"views/logbooks": viewlogbooks,
|
||||
"views/other": todo,
|
||||
"views/survex": viewsurvex,
|
||||
"views/uploads": viewuploads,
|
||||
"views/wallets_edit": viewwallets_edit,
|
||||
"parsers/caves": parserscaves,
|
||||
"parsers/drawings": parsersdrawings,
|
||||
"parsers/locations": parserslocations,
|
||||
"parsers/logbooks": parserslogbooks,
|
||||
"parsers/survex": parserssurvex,
|
||||
"urls": todourls,
|
||||
}
|
||||
return render(request, "core/todos.html", {"tododict": tododict})
|
||||
|
||||
|
||||
def troggle404(request): # cannot get this to work. Handler404 in urls.py not right syntax
|
||||
"""Custom 404 page to be used even when Debug=True
|
||||
https://blog.juanwolf.fr/posts/programming/how-to-create-404-page-django/
|
||||
"""
|
||||
context = RequestContext(request)
|
||||
# context['caves'] = Cave.objects.all()
|
||||
return render(request, ("errors/generic.html", context.flatten()))
|
||||
|
||||
|
||||
def frontpage(request):
|
||||
"""never seen in common practice. Logon should redirect here when this is more useful"""
|
||||
# the messages system does a popup on this page if there is a recent message, e.g. from the admin site actions.
|
||||
# via django.contrib.messages.middleware.MessageMiddleware
|
||||
# this is set in the templates.
|
||||
if request.user.is_authenticated:
|
||||
return render(request, "tasks.html")
|
||||
|
||||
expeditions = Expedition.objects.order_by("-year")
|
||||
logbookentry = LogbookEntry
|
||||
cave = Cave
|
||||
# from django.contrib.admin.templatetags import log
|
||||
return render(request, "frontpage.html", locals())
|
||||
|
||||
|
||||
@login_required_if_public
|
||||
def controlpanel(request):
|
||||
"""Admin requires expoadmin user logged on
|
||||
Mostly disabled apart from logbook export
|
||||
DANGEROUS, these import functions kill the ground under your feet !
|
||||
"""
|
||||
jobs_completed = []
|
||||
|
||||
def process_imports():
|
||||
"""databaseReset.py
|
||||
jq.enq("reinit",reinit_db)
|
||||
jq.enq("caves",import_caves)
|
||||
jq.enq("people",import_people)
|
||||
jq.enq("scans",import_surveyscans)
|
||||
jq.enq("logbooks",import_logbooks)
|
||||
jq.enq("QMs",import_QMs)
|
||||
jq.enq("drawings",import_drawingsfiles)
|
||||
jq.enq("survex",import_survex)
|
||||
"""
|
||||
if request.POST.get("import_caves", False):
|
||||
import_caves()
|
||||
jobs_completed.append("Caves")
|
||||
if request.POST.get("import_people", False):
|
||||
import_people()
|
||||
jobs_completed.append("People")
|
||||
if request.POST.get("import_surveyscans", False):
|
||||
import_surveyscans()
|
||||
jobs_completed.append("Scans")
|
||||
if request.POST.get("import_logbooks", False):
|
||||
import_logbooks()
|
||||
jobs_completed.append("Logbooks")
|
||||
if request.POST.get("import_QMs", False):
|
||||
import_QMs()
|
||||
jobs_completed.append("QMs")
|
||||
if request.POST.get("import_drawingsfiles", False):
|
||||
import_drawingsfiles()
|
||||
jobs_completed.append("Drawings")
|
||||
if request.POST.get("import_survex", False):
|
||||
import_survex()
|
||||
jobs_completed.append("Survex")
|
||||
|
||||
print("", flush=True)
|
||||
|
||||
if not request.user.is_superuser: # expoadmin is both .is_staff and ._is_superuser
|
||||
return render(
|
||||
request,
|
||||
"controlPanel.html",
|
||||
{"error": 'You are logged in, but not logged in as "expoadmin". \nLogout and login again to contnue.'},
|
||||
)
|
||||
else:
|
||||
if request.method == "POST":
|
||||
# reinit_db()
|
||||
process_imports()
|
||||
return render(
|
||||
request,
|
||||
"controlPanel.html",
|
||||
{"expeditions": Expedition.objects.all(), "jobs_completed": jobs_completed},
|
||||
)
|
||||
else:
|
||||
return render(
|
||||
request,
|
||||
"controlPanel.html",
|
||||
{"expeditions": Expedition.objects.all(), "jobs_completed": jobs_completed},
|
||||
)
|
||||
|
||||
|
||||
def exportlogbook(request, year=None, extension=None):
|
||||
"""Constructs, from the database, a complete HTML formatted logbook
|
||||
for the current year. Formats available are HTML2005. Other formats
|
||||
have been retired.
|
||||
|
||||
There are no images stored in the database. However links to images work in the HTML text of a logbook entry.
|
||||
|
||||
This function is the recipient of the POST action os the export form in the control panel
|
||||
"""
|
||||
|
||||
def lbeKey(lbe):
|
||||
"""This function goes into a lexicographic sort function"""
|
||||
return str(lbe.date)
|
||||
|
||||
if not request.method == "POST":
|
||||
return render(request, "controlPanel.html", {"expeditions": Expedition.objects.all(), "jobs_completed": ""})
|
||||
else:
|
||||
print(f"Logbook export {request.POST}")
|
||||
|
||||
year = request.POST["year"]
|
||||
current_expedition = Expedition.objects.get(year=year)
|
||||
logbook_entries = LogbookEntry.objects.filter(expedition=current_expedition).order_by(
|
||||
"date"
|
||||
) # need to be sorted by date!
|
||||
|
||||
print(f"Logbook has {len(logbook_entries)} entries in it.")
|
||||
|
||||
extension = "html"
|
||||
response = HttpResponse(content_type="text/html")
|
||||
style = "2005"
|
||||
|
||||
filename = "logbook-new-format." + extension
|
||||
template = "logbook" + style + "style." + extension
|
||||
response["Content-Disposition"] = "attachment; filename=" + filename
|
||||
t = loader.get_template(template)
|
||||
logbookfile = t.render({"logbook_entries": logbook_entries})
|
||||
|
||||
endpath = Path(settings.EXPOWEB, "years", year, "endmatter.html")
|
||||
endmatter = ""
|
||||
if endpath.is_file():
|
||||
try:
|
||||
with open(endpath, "r") as end:
|
||||
endmatter = end.read()
|
||||
except:
|
||||
print(" ! Very Bad Error opening " + endpath)
|
||||
|
||||
frontpath = Path(settings.EXPOWEB, "years", year, "frontmatter.html")
|
||||
if frontpath.is_file():
|
||||
try:
|
||||
with open(frontpath, "r") as front:
|
||||
frontmatter = front.read()
|
||||
except:
|
||||
print(" ! Very Bad Error opening " + frontpath)
|
||||
logbookfile = re.sub(r"<body>", "<body>\n" + frontmatter + endmatter, logbookfile)
|
||||
else:
|
||||
logbookfile = re.sub(r"<body>", f"<body>\n<h1>Expo {year}</h1>\n" + endmatter, logbookfile)
|
||||
|
||||
dir = Path(settings.EXPOWEB) / "years" / year
|
||||
filepath = Path(dir, filename)
|
||||
with (open(filepath, "w")) as lb:
|
||||
lb.writelines(logbookfile)
|
||||
|
||||
# print(f'Logbook exported to {filepath}')
|
||||
completed = f'Logbook exported to <a href="/years/{filename}">{filename}</a>'
|
||||
|
||||
return render(
|
||||
request, "controlPanel.html", {"expeditions": Expedition.objects.all(), "jobs_completed": [completed]}
|
||||
)
|
||||
@@ -1,317 +0,0 @@
|
||||
import os
|
||||
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import render
|
||||
|
||||
import troggle.settings as settings
|
||||
from troggle.core.models.caves import Area, Cave, Entrance
|
||||
from troggle.core.views.caves import caveKey
|
||||
|
||||
# from pathlib import Path
|
||||
|
||||
# from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
|
||||
|
||||
|
||||
""" Generates the prospecting guide document.
|
||||
|
||||
Also produces the overlay of points on top of a prospecting_image map - to be deleted.
|
||||
Not working with recent PIL aka Pillow image package - removed.
|
||||
"""
|
||||
|
||||
AREANAMES = [
|
||||
# ('', 'Location unclear'),
|
||||
("1a", "1a – Plateau: around Top Camp"),
|
||||
("1b", "1b – Western plateau near 182"),
|
||||
("1c", "1c – Eastern plateau near 204 walk-in path"),
|
||||
("1d", "1d – Further plateau around 76"),
|
||||
("2a", "2a – Southern Schwarzmooskogel near 201 path and the Nipple"),
|
||||
("2b", "2b – Eishöhle area"),
|
||||
("2b or 4 (unclear)", "2b or 4 (unclear)"),
|
||||
("2c", "2c – Kaninchenhöhle area"),
|
||||
("2d", "2d – Steinbrückenhöhle area"),
|
||||
("3", "3 – Bräuning Alm"),
|
||||
("4", "4 – Kratzer valley"),
|
||||
("5", "5 – Schwarzmoos-Wildensee"),
|
||||
("6", "6 – Far plateau"),
|
||||
("1626 or 6 (borderline)", "1626 or 6 (borderline)"),
|
||||
("7", "7 – Egglgrube"),
|
||||
("8a", "8a – Loser south face"),
|
||||
("8b", "8b – Loser below Dimmelwand"),
|
||||
("8c", "8c – Augst See"),
|
||||
("8d", "8d – Loser-Hochganger ridge"),
|
||||
("9", "9 – Gschwandt Alm"),
|
||||
("10", "10 – Altaussee"),
|
||||
("11", "11 – Augstbach"),
|
||||
]
|
||||
|
||||
|
||||
def prospecting(request):
|
||||
"""This produces the multipage 'prospecting guide' document,
|
||||
intended to be printed and carried into the field - in 1999.
|
||||
|
||||
All the formatting and selection cleverness is in the template file.
|
||||
|
||||
This produces a vast number of bad 404 URLs as many URLs in the cave_data
|
||||
XML files refer to other caves, assuming that they are in the same directory
|
||||
as the prospecting guide. But since the introduction of the 1623/ level, this is
|
||||
not true. e.g. 163 refers to 162 as href="../162.htm" which is valid in the cave
|
||||
description page but not when navigating from the prospecting guide page.
|
||||
|
||||
Since this vast number of broken links is getting in the way of finding real errors, the guide
|
||||
has been disabled.
|
||||
"""
|
||||
message = (
|
||||
"This prospecting guide text report contains many broken URLs because of a major redesign\n"
|
||||
+ " to put caves into 1623/ and 1624/ folders in 2017. It was mostly useless because recent QM info was not in it anyway.\n\n"
|
||||
+ 'It is disabled in the python code in "prospecting(request):" in troggle/core/views/prospect.py'
|
||||
)
|
||||
return render(request, "errors/disabled.html", {"message": message})
|
||||
|
||||
areas = []
|
||||
for key, name in AREANAMES:
|
||||
a = Area.objects.get(short_name=key) # assumes unique
|
||||
caves = list(a.cave_set.all())
|
||||
caves.sort(key=caveKey)
|
||||
areas.append((name, a, caves))
|
||||
return render(request, "prospecting.html", {"areas": areas})
|
||||
|
||||
|
||||
# Parameters for big map and zoomed subarea maps:
|
||||
# big map first (zoom factor ignored)
|
||||
|
||||
# These are the values for the url /prospecting/[mapcode].png
|
||||
|
||||
maps = {
|
||||
# id left top right bottom zoom
|
||||
# G&K G&K G&K G&K factor
|
||||
"all": [33810.4, 85436.5, 38192.0, 81048.2, 0.35, "All"],
|
||||
"40": [36275.6, 82392.5, 36780.3, 81800.0, 3.0, "Eishöhle"],
|
||||
"76": [35440.0, 83220.0, 36090.0, 82670.0, 1.3, "Eislufthöhle"],
|
||||
"204": [36354.1, 84154.5, 37047.4, 83300, 3.0, "Steinbrückenhöhle"],
|
||||
"tc": [35230.0, 82690.0, 36110.0, 82100.0, 3.0, "Near Top Camp"],
|
||||
"grieß": [36000.0, 86300.0, 38320.0, 84400.0, 4.0, "Grießkogel Area"],
|
||||
}
|
||||
|
||||
for n in list(maps.keys()):
|
||||
L, T, R, B, S, name = maps[n]
|
||||
W = (R - L) / 2
|
||||
H = (T - B) / 2
|
||||
for i in range(2):
|
||||
for j in range(2):
|
||||
maps["%s%i%i" % (n, i, j)] = [L + i * W, T - j * H, L + (i + 1) * W, T - (j + 1) * H, S, name]
|
||||
# Keys in the order in which we want the maps output
|
||||
mapcodes = ["all", "grieß", "40", "76", "204", "tc"]
|
||||
# Field codes
|
||||
L = 0
|
||||
T = 1
|
||||
R = 2
|
||||
B = 3
|
||||
ZOOM = 4
|
||||
DESC = 5
|
||||
SIZE = 5
|
||||
|
||||
areacolours = {
|
||||
"1a": "#00ffff",
|
||||
"1b": "#ff00ff",
|
||||
"1c": "#ffff00",
|
||||
"1d": "#ffffff",
|
||||
"2a": "#ff0000",
|
||||
"2b": "#00ff00",
|
||||
"2c": "#008800",
|
||||
"2d": "#ff9900",
|
||||
"3": "#880000",
|
||||
"4": "#0000ff",
|
||||
"6": "#000000", # doubles for surface fixed pts, and anything else
|
||||
"7": "#808080",
|
||||
}
|
||||
|
||||
for FONT in [
|
||||
"/usr/share/fonts/truetype/freefont/FreeSans.ttf",
|
||||
"/usr/X11R6/lib/X11/fonts/truetype/arial.ttf",
|
||||
"/mnt/c/windows/fonts/arial.ttf",
|
||||
"C:\WINNT\Fonts\ARIAL.TTF",
|
||||
]:
|
||||
if os.path.isfile(FONT):
|
||||
break
|
||||
TEXTSIZE = 16
|
||||
CIRCLESIZE = 8
|
||||
LINEWIDTH = 2
|
||||
# myFont = ImageFont.truetype(FONT, TEXTSIZE) # disabled as not importing PIL
|
||||
# print(f' - myFont {myFont} {FONT} {TEXTSIZE}')
|
||||
|
||||
|
||||
def mungecoord(x, y, mapcode, img):
|
||||
# Top of Zinken is 73 1201 = dataset 34542 81967
|
||||
# Top of Hinter is 1073 562 = dataset 36670 83317
|
||||
# image is 1417 by 2201
|
||||
# FACTOR1 = 1000.0 / (36670.0-34542.0)
|
||||
# FACTOR2 = (1201.0-562.0) / (83317 - 81967)
|
||||
# FACTOR = (FACTOR1 + FACTOR2)/2
|
||||
# The factors aren't the same as the scanned map's at a slight angle. I
|
||||
# can't be bothered to fix this. Since we zero on the Hinter it makes
|
||||
# very little difference for caves in the areas round 76 or 204.
|
||||
# xoffset = (x - 36670)*FACTOR
|
||||
# yoffset = (y - 83317)*FACTOR
|
||||
# return (1073 + xoffset, 562 - yoffset)
|
||||
|
||||
m = maps[mapcode]
|
||||
factorX, factorY = img.size[0] / (m[R] - m[L]), img.size[1] / (m[T] - m[B])
|
||||
return ((x - m[L]) * factorX, (m[T] - y) * factorY)
|
||||
|
||||
|
||||
COL_TYPES = {True: "red", False: "#dddddd", "Reference": "#dddddd"}
|
||||
|
||||
|
||||
def prospecting_image(request, name):
|
||||
"""This draws map outlines on an existing map image.
|
||||
But getting the entrances plotted is broken by later changes elsewhere in the system since this code was written.
|
||||
|
||||
SurvexStations are in x=latitude, y=longitude - these are what appear in essentials.gpx
|
||||
Entrances are in northing, easting
|
||||
|
||||
which is why we can't simply plot all the Entrances...
|
||||
|
||||
We should replace all this with something that exports an overlay for Google Maps and OpenStreetView
|
||||
"""
|
||||
|
||||
mainImage = Image.open(os.path.join(settings.EXPOFILES, "location_maps", "pguidemap.jpg"))
|
||||
# if settings.PUBLIC_SITE and not request.user.is_authenticated:
|
||||
# mainImage = Image.new("RGB", mainImage.size, '#ffffff')
|
||||
m = maps[name]
|
||||
imgmaps = []
|
||||
if name == "all":
|
||||
img = mainImage
|
||||
else:
|
||||
M = maps["all"]
|
||||
W, H = mainImage.size
|
||||
l = int((m[L] - M[L]) / (M[R] - M[L]) * W)
|
||||
t = int((m[T] - M[T]) / (M[B] - M[T]) * H)
|
||||
r = int((m[R] - M[L]) / (M[R] - M[L]) * W)
|
||||
b = int((m[B] - M[T]) / (M[B] - M[T]) * H)
|
||||
img = mainImage.crop((l, t, r, b))
|
||||
w = int(round(m[ZOOM] * (m[R] - m[L]) / (M[R] - M[L]) * W))
|
||||
h = int(round(m[ZOOM] * (m[B] - m[T]) / (M[B] - M[T]) * H))
|
||||
img = img.resize((w, h), Image.BICUBIC)
|
||||
draw = ImageDraw.Draw(img)
|
||||
# draw.setfont(myFont)
|
||||
if name == "all":
|
||||
for maparea in list(maps.keys()):
|
||||
if maparea == "all":
|
||||
continue
|
||||
localm = maps[maparea]
|
||||
l, t = mungecoord(localm[L], localm[T], "all", img)
|
||||
r, b = mungecoord(localm[R], localm[B], "all", img)
|
||||
text = maparea + " map"
|
||||
textlen = draw.textsize(text)[0] + 3
|
||||
draw.rectangle([l, t, l + textlen, t + TEXTSIZE + 2], fill="#ffffff")
|
||||
draw.text((l + 2, t + 1), text, fill="#000000", font=myFont)
|
||||
imgmaps.append([l, t, l + textlen, t + SIZE + 2, "submap" + maparea, maparea + " subarea map"])
|
||||
draw.line([l, t, r, t], fill="#777777", width=LINEWIDTH)
|
||||
draw.line([l, b, r, b], fill="#777777", width=LINEWIDTH)
|
||||
draw.line([l, t, l, b], fill="#777777", width=LINEWIDTH)
|
||||
draw.line([r, t, r, b], fill="#777777", width=LINEWIDTH)
|
||||
draw.line([l, t, l + textlen, t], fill="#777777", width=LINEWIDTH)
|
||||
draw.line([l, t + TEXTSIZE + 2, l + textlen, t + TEXTSIZE + 2], fill="#777777", width=LINEWIDTH)
|
||||
draw.line([l, t, l, t + TEXTSIZE + 2], fill="#777777", width=LINEWIDTH)
|
||||
draw.line([l + textlen, t, l + textlen, t + TEXTSIZE + 2], fill="#777777", width=LINEWIDTH)
|
||||
# imgmaps[maparea] = []
|
||||
|
||||
# Draw scale bar
|
||||
m100 = int(100 / (m[R] - m[L]) * img.size[0])
|
||||
draw.line([10, TEXTSIZE * 3, 10, TEXTSIZE * 2], fill="#000000", width=LINEWIDTH)
|
||||
draw.line([10, TEXTSIZE * 2, 10 + m100, TEXTSIZE * 2], fill="#000000", width=LINEWIDTH)
|
||||
draw.line([10 + m100, TEXTSIZE * 3, 10 + m100, TEXTSIZE * 2], fill="#000000", width=LINEWIDTH)
|
||||
label = "100m"
|
||||
draw.text([10 + (m100 - draw.textsize(label)[0]) / 2, TEXTSIZE / 2], label, fill="#000000", font=myFont)
|
||||
|
||||
# Draw the circles for known points
|
||||
# Northing, Easting, Diameter - but N&E are swapped re database
|
||||
for (N, E, D, num) in [
|
||||
(35975.37, 83018.21, 100, "177"), # Calculated from bearings
|
||||
(35350.00, 81630.00, 50, "71"), # From Auer map
|
||||
(36025.00, 82475.00, 50, "146"), # From mystery map
|
||||
(35600.00, 82050.00, 50, "35"), # From Auer map
|
||||
(35650.00, 82025.00, 50, "44"), # From Auer map
|
||||
(36200.00, 82925.00, 50, "178"), # Calculated from bearings
|
||||
(35232.64, 82910.37, 25, "181"), # Calculated from bearings
|
||||
(35323.60, 81357.83, 50, "74"), # From Auer map
|
||||
]:
|
||||
(N, E, D) = list(map(float, (N, E, D)))
|
||||
maparea = Cave.objects.get(kataster_number=num).getArea().short_name
|
||||
lo = mungecoord(N - D, E + D, name, img)
|
||||
hi = mungecoord(N + D, E - D, name, img)
|
||||
lpos = mungecoord(N - D, E, name, img)
|
||||
draw.ellipse([lo, hi], outline="#000000")
|
||||
draw.ellipse([lo[0] + 1, lo[1] + 1, hi[0] - 1, hi[1] - 1], outline=areacolours[maparea])
|
||||
draw.ellipse([lo[0] + 2, lo[1] + 2, hi[0] - 2, hi[1] - 2], outline=areacolours[maparea])
|
||||
draw.rectangle(
|
||||
[lpos[0], lpos[1] - TEXTSIZE / 2, lpos[0] + draw.textsize(name)[0], lpos[1] + TEXTSIZE / 2], fill="#ffffff"
|
||||
)
|
||||
draw.text((lpos[0], lpos[1] - TEXTSIZE / 2), num, fill="#000000")
|
||||
# print(f' CIRCLES - {num} {(N,E,D)}')
|
||||
|
||||
# ml = MapLocations()
|
||||
# for p in ml.points():
|
||||
# surveypoint, number, point_type, label = p
|
||||
# print(f'{surveypoint}, {number}, {point_type}, {label}')
|
||||
# plot(surveypoint, number, True, label, name, draw, img)
|
||||
# print(f'{name},\n{draw},\n{img}')
|
||||
|
||||
ents = Entrance.objects.all() # only has entrances and fixed points in it these days,
|
||||
# but there are only 11 Entrances with northing, easting and a useable tag!
|
||||
D = 50
|
||||
for e in ents:
|
||||
try:
|
||||
E, N = e.easting, e.northing
|
||||
if e.tag_station:
|
||||
st = e.tag_station
|
||||
elif e.exact_station:
|
||||
st = e.exact_station
|
||||
elif e.exact_station:
|
||||
st = e.exact_station
|
||||
else:
|
||||
# print(f' No tag - {e.name} ')
|
||||
continue
|
||||
if not e.northing:
|
||||
continue
|
||||
lo = mungecoord(N - D, E + D, st, img)
|
||||
hi = mungecoord(N + D, E - D, st, img)
|
||||
lpos = mungecoord(N - D, E, st, img)
|
||||
|
||||
draw.ellipse([lo, hi], outline="#000000")
|
||||
draw.ellipse([lo[0] + 1, lo[1] + 1, hi[0] - 1, hi[1] - 1], outline="#ffffff")
|
||||
draw.ellipse([lo[0] + 2, lo[1] + 2, hi[0] - 2, hi[1] - 2], outline="#ffffff")
|
||||
draw.rectangle(
|
||||
[lpos[0], lpos[1] - TEXTSIZE / 2, lpos[0] + draw.textsize(st)[0], lpos[1] + TEXTSIZE / 2],
|
||||
fill="#ffffff",
|
||||
)
|
||||
draw.text((lpos[0], lpos[1] - TEXTSIZE / 2), num, fill="#000000")
|
||||
|
||||
# draw.ellipse([(x-CIRCLESIZE,y-CIRCLESIZE),(x+CIRCLESIZE,y+CIRCLESIZE)], fill="red", outline="blue")
|
||||
# draw.rectangle([(x+CIRCLESIZE, y-TEXTSIZE/2), (x+CIRCLESIZE*2+draw.textsize(shortnumber)[0], y+TEXTSIZE/2)], fill="#ffffff")
|
||||
# draw.text((x+CIRCLESIZE * 1.5,y-TEXTSIZE/2), shortnumber, fill="#000000")
|
||||
# print(f' SUCCESS - {st} {(E, N)} ')
|
||||
except:
|
||||
# print(f' FAIL - {st} {(E, N)} ')
|
||||
pass
|
||||
|
||||
response = HttpResponse(content_type="image/png")
|
||||
del draw
|
||||
img.save(response, "PNG")
|
||||
return response
|
||||
|
||||
|
||||
# def plot(surveypoint, number, point_type, label, mapcode, draw, img):
|
||||
# try:
|
||||
# ss = SurvexStation.objects.lookup(surveypoint)
|
||||
# E, N = ss.x, ss.y
|
||||
# shortnumber = number.replace("—","")
|
||||
# (x,y) = list(map(int, mungecoord(E, N, mapcode, img)))
|
||||
# imgmaps[maparea].append( [x-4, y-SIZE/2, x+4+draw.textsize(shortnumber)[0], y+SIZE/2, shortnumber, label] )
|
||||
# draw.rectangle([(x+CIRCLESIZE, y-TEXTSIZE/2), (x+CIRCLESIZE*2+draw.textsize(shortnumber)[0], y+TEXTSIZE/2)], fill="#ffffff")
|
||||
# draw.text((x+CIRCLESIZE * 1.5,y-TEXTSIZE/2), shortnumber, fill="#000000")
|
||||
# draw.ellipse([(x-CIRCLESIZE,y-CIRCLESIZE),(x+CIRCLESIZE,y+CIRCLESIZE)], fill=COL_TYPES[point_type], outline="#000000")
|
||||
# print(f' SUCCESS - YES {surveypoint}, {number}, {point_type}, {label}')
|
||||
# except:
|
||||
# print(f' - NO {surveypoint}, {number}, {point_type}, {label}')
|
||||
# pass
|
||||
@@ -1,348 +0,0 @@
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
from urllib.parse import unquote as urlunquote
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import render
|
||||
|
||||
from troggle.core.models.caves import GetCaveLookup
|
||||
from troggle.core.models.survex import SingleScan, SurvexBlock
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.models.troggle import DataIssue, Expedition, Person
|
||||
from troggle.core.views.expo import getmimetype
|
||||
from troggle.parsers.survex import set_walletdate
|
||||
from troggle.parsers.caves import add_cave_to_pending_list
|
||||
|
||||
# from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||
# import parsers.surveys
|
||||
|
||||
"""one of these views serves files as binary blobs, and simply set the mime type based on the file extension,
|
||||
as does the urls.py dispatcher which sends them here. Here they should actually have the filetype checked
|
||||
by looking inside the file before being served.
|
||||
|
||||
need to check if inavlid query string is invalid, or produces multiple replies
|
||||
and render a user-friendly error page.
|
||||
|
||||
Note that caveifywallet() etc do NOT save the object to the db. They are ephemeral, just for the page rendering of the
|
||||
manywallets dict.
|
||||
|
||||
TODO
|
||||
cave for a wallet - just gets the last one, randomly. SHould make this a list or many:many ideally
|
||||
|
||||
add this file in to the todo list thinggy.
|
||||
"""
|
||||
|
||||
|
||||
def populatewallet(w):
|
||||
"""Copy survex data here just for display, not permanently
|
||||
|
||||
Only gets data from the survex file when it was parsed on import..
|
||||
so doesn't work if there is no *ref value
|
||||
"""
|
||||
survexpeople = []
|
||||
blocks = SurvexBlock.objects.filter(scanswallet=w)
|
||||
for b in blocks:
|
||||
for personrole in b.survexpersonrole_set.all():
|
||||
survexpeople.append(personrole.personname)
|
||||
w.persons = list(set(survexpeople))
|
||||
|
||||
|
||||
|
||||
def caveifywallet(w):
|
||||
"""Gets the cave from the list of survex files,
|
||||
only selects one of them though. Only used for display.
|
||||
"""
|
||||
# print(f' - Caveify {w=}')
|
||||
blocknames = []
|
||||
blocks = SurvexBlock.objects.filter(scanswallet=w)
|
||||
for b in blocks:
|
||||
# NB b.cave is not populated by parser. Use b.survexfile.cave instead, or we could parse b.survexpath
|
||||
if b.survexfile.cave:
|
||||
w.caveobj = (
|
||||
b.survexfile.cave
|
||||
) # just gets the last one, randomly. SHould make this a list or many:many ideally
|
||||
w.cave = w.caveobj
|
||||
if b.name:
|
||||
blocknames.append(b.name)
|
||||
|
||||
if w.name():
|
||||
w.displaynames = [w.name()]
|
||||
else:
|
||||
w.displaynames = blocknames
|
||||
|
||||
|
||||
def fillblankpeople(w):
|
||||
# this isn't working..? why? Because it needs a *ref and an import
|
||||
wp = w.people()
|
||||
w.persons = wp
|
||||
if not wp:
|
||||
populatewallet(w)
|
||||
else:
|
||||
if len(wp) == 1:
|
||||
# print(f' - {wp=}')
|
||||
nobody = wp[0].lower()
|
||||
if nobody == "unknown" or nobody == "nobody" or nobody == " " or nobody == "":
|
||||
# print(f' - {wp=} {nobody=}')
|
||||
populatewallet(w)
|
||||
|
||||
def is_cave(id):
|
||||
Gcavelookup = GetCaveLookup()
|
||||
id = id.strip("' []'")
|
||||
if id in Gcavelookup:
|
||||
return True
|
||||
else:
|
||||
print(f" - Failed to find cave object from id <{id}>")
|
||||
if id.lower() != "unknown" and id != "":
|
||||
print(f" - adding <{id}> to pendingcaves.txt list")
|
||||
add_cave_to_pending_list(id)
|
||||
return False
|
||||
|
||||
def fillblankothers(w):
|
||||
"""This is on the way to having a many:many relationship between Caves and Wallets
|
||||
"""
|
||||
if not w.walletdate:
|
||||
set_walletdate(w)
|
||||
|
||||
Gcavelookup = GetCaveLookup()
|
||||
|
||||
wcaveid = w.cave()
|
||||
if not wcaveid or wcaveid == "":
|
||||
caveifywallet(w)
|
||||
else:
|
||||
if type(wcaveid) == list:
|
||||
for i in wcaveid:
|
||||
i = i.strip("' []'")
|
||||
if is_cave(i):
|
||||
w.caveobj = Gcavelookup[i] # just sets it to the last one found. nasty. bug waiting to happen
|
||||
elif wcaveid.find(',') != -1:
|
||||
# it's a list of cave ids as a string
|
||||
ids = wcaveid.split(',')
|
||||
for i in ids:
|
||||
i = i.strip("' []'")
|
||||
if is_cave(i):
|
||||
w.caveobj = Gcavelookup[i] # just sets it to the last one found. nasty. bug waiting to happen
|
||||
else:
|
||||
if is_cave(wcaveid):
|
||||
w.caveobj = Gcavelookup[wcaveid.strip("' []'")]
|
||||
|
||||
|
||||
|
||||
|
||||
def fixsurvextick(w, ticks):
|
||||
ticks["S"] = w.fixsurvextick(ticks["S"])
|
||||
|
||||
|
||||
def walletslistperson(request, first_name, last_name):
|
||||
"""Page which displays a list of all the wallets for a specific person
|
||||
HORRIBLE linear search through everything. Index and do SQL query properly
|
||||
"""
|
||||
# This is where we face having to re-do everything to do with names properly, rather than the horrible series of hacks over 20 years..
|
||||
# GetPersonExpeditionNameLookup
|
||||
def tickspersonwallet(p):
|
||||
manywallets = []
|
||||
wallets = Wallet.objects.all()
|
||||
for w in wallets:
|
||||
w.persons = w.people() # ephemeral attribute for web page
|
||||
fillblankpeople(w)
|
||||
if w.persons:
|
||||
if p.fullname in w.persons:
|
||||
manywallets.append(w)
|
||||
fillblankothers(w)
|
||||
w.ticks = w.get_ticks() # the complaints in colour form
|
||||
fixsurvextick(w, w.ticks)
|
||||
return manywallets
|
||||
|
||||
# print("-walletslistperson")
|
||||
|
||||
try:
|
||||
if last_name:
|
||||
p = Person.objects.get(fullname=f"{first_name} {last_name}")
|
||||
else:
|
||||
# special Wookey-hack
|
||||
p = Person.objects.get(first_name=f"{first_name}")
|
||||
except:
|
||||
# raise
|
||||
return render(
|
||||
request,
|
||||
"errors/generic.html",
|
||||
{"message": f'Unrecognised name of a expo person: "{first_name} {last_name}"'},
|
||||
)
|
||||
|
||||
manywallets = tickspersonwallet(p)
|
||||
expeditions = Expedition.objects.all()
|
||||
print("--")
|
||||
return render(
|
||||
request,
|
||||
"personwallets.html",
|
||||
{"manywallets": manywallets, "settings": settings, "person": p, "expeditions": expeditions},
|
||||
)
|
||||
|
||||
|
||||
def setwalletsdates():
|
||||
"""This sets all the undated wallets, but they should already all be dated on
|
||||
import or on edit"""
|
||||
wallets = Wallet.objects.filter(walletdate=None)
|
||||
print(f"undated wallets: {len(wallets)}")
|
||||
for w in wallets:
|
||||
w.walletdate = w.date()
|
||||
w.save()
|
||||
|
||||
|
||||
def walletslistyear(request, year):
|
||||
"""Page which displays a list of all the wallets in a specific year.
|
||||
We have a field .walletyear, which we set on import.
|
||||
"""
|
||||
|
||||
def ticksyearwallet(year):
|
||||
manywallets = []
|
||||
wallets = Wallet.objects.filter(walletyear__year=year)
|
||||
for w in wallets:
|
||||
manywallets.append(w)
|
||||
fillblankpeople(w)
|
||||
fillblankothers(w)
|
||||
w.ticks = w.get_ticks() # the complaints in colour form, from the json file on disc
|
||||
fixsurvextick(w, w.ticks)
|
||||
|
||||
return manywallets
|
||||
|
||||
# print("-walletslistyear")
|
||||
if year < 1976 or year > 2050:
|
||||
return render(request, "errors/generic.html", {"message": "Year out of range. Must be between 1976 and 2050"})
|
||||
|
||||
# return render(request, 'errors/generic.html', {'message': 'This page logic not implemented yet'})
|
||||
|
||||
year = str(year)
|
||||
manywallets = ticksyearwallet(year)
|
||||
expeditions = Expedition.objects.all() #bad Django style
|
||||
expedition = expeditions.filter(year=year)
|
||||
length_ug = 0.0
|
||||
for w in manywallets:
|
||||
for sb in w.survexblock_set.all():
|
||||
length_ug += sb.legslength
|
||||
print("--")
|
||||
return render(
|
||||
request,
|
||||
"yearwallets.html",
|
||||
{
|
||||
"manywallets": manywallets,
|
||||
"settings": settings,
|
||||
"year": year,
|
||||
"expeditions": expeditions,
|
||||
"expedition": expedition,
|
||||
"length_ug": length_ug,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def cavewallets(request, caveid):
|
||||
"""Returns all the wallets for just one cave"""
|
||||
print("-cavewallets")
|
||||
|
||||
Gcavelookup = GetCaveLookup()
|
||||
if caveid in Gcavelookup:
|
||||
cave = Gcavelookup[caveid]
|
||||
else:
|
||||
return render(request, "errors/badslug.html", {"badslug": f"{caveid} - from cavewallets()"})
|
||||
|
||||
# remove duplication. Sorting is done in the template
|
||||
# But this only gets wallets which have survex files attached..
|
||||
wallets = set(Wallet.objects.filter(survexblock__survexfile__cave=cave))
|
||||
|
||||
# all the ones without a survexblock attached via a *ref, search for match in JSON
|
||||
zilchwallets = set(Wallet.objects.exclude(survexblock__survexfile__cave=cave))
|
||||
for z in zilchwallets:
|
||||
zcaveid = z.cave()
|
||||
if zcaveid:
|
||||
cleanid = str(zcaveid).strip("' []'")
|
||||
|
||||
if cleanid.find(',') != -1:
|
||||
# it's a list of cave ids
|
||||
wurl = f"/walletedit/{z.walletname.replace('#',':')}"
|
||||
message = f" ! In {z.walletname} we do not handle lists of cave ids yet '{cleanid}'"
|
||||
print(message)
|
||||
DataIssue.objects.update_or_create(parser="scans", message=message, url=wurl)
|
||||
|
||||
# it's a list of cave ids as a string. Identify any orphan caves hidden here
|
||||
ids = cleanid.split(',')
|
||||
for i in ids:
|
||||
i = i.strip("' []'")
|
||||
if is_cave(i):
|
||||
fcave = Gcavelookup[i.strip("' []'")] # just sets it to the last one found. nasty. bug waiting to happen
|
||||
|
||||
elif cleanid in Gcavelookup:
|
||||
fcave = Gcavelookup[cleanid]
|
||||
if str(fcave.slug()) == caveid:
|
||||
# print(f' - Found one ! {z.walletname=} {zcaveid=}')
|
||||
wallets.add(z)
|
||||
elif cleanid in ['surface', 'unknown', '']:
|
||||
message = f" ! In {z.walletname} ignore '{cleanid}' "
|
||||
print(message)
|
||||
pass
|
||||
else:
|
||||
wurl = f"/walletedit/{z.walletname.replace('#',':')}"
|
||||
message = f" ! In {z.walletname} there is an unrecognised cave name '{cleanid}', adding to pending list."
|
||||
print(message)
|
||||
DataIssue.objects.update_or_create(parser="scans", message=message, url=wurl)
|
||||
add_cave_to_pending_list(cleanid)
|
||||
|
||||
manywallets = list(set(wallets))
|
||||
for w in manywallets:
|
||||
fillblankpeople(w)
|
||||
fillblankothers(w)
|
||||
w.ticks = w.get_ticks() # the complaints in colour form, from the json file on disc
|
||||
fixsurvextick(w, w.ticks)
|
||||
expeditions = Expedition.objects.all()
|
||||
print("--")
|
||||
return render(
|
||||
request,
|
||||
"cavewallets.html",
|
||||
{"manywallets": manywallets, "settings": settings, "cave": cave, "expeditions": expeditions},
|
||||
)
|
||||
|
||||
|
||||
def oldwallet(request, path):
|
||||
"""Now called only for non-standard wallet structures for pre-2000 wallets"""
|
||||
# print([ s.walletname for s in Wallet.objects.all() ])
|
||||
print(f"! - oldwallet path:{path}")
|
||||
try:
|
||||
wallet = Wallet.objects.get(walletname=urlunquote(path))
|
||||
return render(request, "wallet_old.html", {"wallet": wallet, "settings": settings})
|
||||
except:
|
||||
message = f"Scan folder error or not found '{path}' ."
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
|
||||
def scansingle(request, path, file):
|
||||
"""sends a single binary file to the user for display - browser decides how using mimetype
|
||||
This is very unsafe"""
|
||||
try:
|
||||
wallet = Wallet.objects.get(walletname=urlunquote(path))
|
||||
singlescan = SingleScan.objects.get(wallet=wallet, name=file)
|
||||
imagefile = Path(singlescan.ffile, file)
|
||||
if imagefile.is_file():
|
||||
message = f" - scansingle {imagefile} {path}:{file}:{getmimetype(file)}:"
|
||||
print(message)
|
||||
return HttpResponse(content=open(imagefile, "rb"), content_type=getmimetype(file)) # any type of image
|
||||
else:
|
||||
message = f"Scan folder file '{imagefile}' not found. {path=} {file=}"
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
except:
|
||||
message = f"Scan folder or scan item access error '{path}' and '{file}'."
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
|
||||
def allscans(request):
|
||||
"""Returns all the wallets in the system, we would like to use
|
||||
the Django queryset SQL optimisation https://docs.djangoproject.com/en/dev/ref/models/querysets/#prefetch-related
|
||||
to get the related singlescan and survexblock objects but that requires rewriting this to do the query on those, not on
|
||||
the wallets
|
||||
"""
|
||||
manywallets = Wallet.objects.all() # NB all of them
|
||||
# manywallets = Wallet.objects.all().prefetch_related('singlescan') fails as the link is defined on 'singlescan' not on 'wallet'
|
||||
expeditions = Expedition.objects.all()
|
||||
return render(
|
||||
request, "manywallets.html", {"manywallets": manywallets, "settings": settings, "expeditions": expeditions}
|
||||
)
|
||||
@@ -1,254 +0,0 @@
|
||||
from collections import OrderedDict
|
||||
from pathlib import Path
|
||||
|
||||
from django.shortcuts import render
|
||||
|
||||
import troggle.settings as settings
|
||||
from troggle.core.models.caves import Cave, Entrance
|
||||
from troggle.core.models.logbooks import LogbookEntry
|
||||
from troggle.core.models.survex import SurvexStation
|
||||
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup, foreign_friends
|
||||
|
||||
# from django.views.generic.list import ListView
|
||||
"""Very simple report pages summarizing data about the whole set of expeditions and of
|
||||
the status of data inconsistencies
|
||||
"""
|
||||
|
||||
|
||||
def therionissues(request):
|
||||
"""Page displaying contents of a file produced during data import"""
|
||||
logname = "therionrefs.log"
|
||||
logpath = Path(settings.PYTHON_PATH, logname)
|
||||
therionlog = []
|
||||
newlog = []
|
||||
|
||||
if Path(logpath).is_file:
|
||||
with open(logpath, "r") as f:
|
||||
therionlog = f.readlines()
|
||||
print(f"{logpath} has {len(therionlog)} entries")
|
||||
else:
|
||||
print(f"{logpath} NOT FOUND {len(therionlog)}")
|
||||
|
||||
for line in therionlog:
|
||||
line = line.replace("! Un-parsed image filename:", "")
|
||||
newlog.append(line)
|
||||
return render(request, "therionreport.html", {"therionlog": newlog})
|
||||
|
||||
|
||||
def surveximport(request):
|
||||
"""Page displaying contents of a file produced during data import"""
|
||||
logname = "svxlinear.log"
|
||||
logpath = Path(settings.PYTHON_PATH, logname)
|
||||
|
||||
if Path(logpath).is_file:
|
||||
with open(logpath, "r") as f:
|
||||
contents = f.read()
|
||||
else:
|
||||
print(f"{logpath} NOT FOUND {len(contents)}")
|
||||
return render(request, "survexreport.html", {"log": contents})
|
||||
|
||||
|
||||
def survexdebug(request):
|
||||
"""Page displaying contents of a file produced during data import"""
|
||||
logname = "svxblks.log"
|
||||
logpath = Path(settings.PYTHON_PATH, logname)
|
||||
|
||||
if Path(logpath).is_file:
|
||||
with open(logpath, "r") as f:
|
||||
contents = f.read()
|
||||
else:
|
||||
print(f"{logpath} NOT FOUND {len(contents)}")
|
||||
return render(request, "survexdebug.html", {"log": contents})
|
||||
|
||||
|
||||
def pathsreport(request):
|
||||
"""The CONSTANTs declared in the settings and localsettings and how they have
|
||||
been evaluated for this specific installation - live"""
|
||||
pathsdict = OrderedDict()
|
||||
try:
|
||||
pathsdict = {
|
||||
# "BOGUS" : str( settings.BOGUS),
|
||||
"JSLIB_URL": str(settings.JSLIB_URL),
|
||||
"JSLIB_ROOT": str(settings.JSLIB_ROOT),
|
||||
# "CSSLIB_URL" : str( settings.CSSLIB_URL),
|
||||
"CAVEDESCRIPTIONS": str(settings.CAVEDESCRIPTIONS),
|
||||
"DIR_ROOT": str(settings.DIR_ROOT),
|
||||
"ENTRANCEDESCRIPTIONS": str(settings.ENTRANCEDESCRIPTIONS),
|
||||
"EXPOUSER_EMAIL": str(settings.EXPOUSER_EMAIL),
|
||||
"EXPOUSERPASS": str("<redacted>"),
|
||||
"EXPOUSER": str(settings.EXPOUSER),
|
||||
"EXPOWEB": str(settings.EXPOWEB),
|
||||
# "EXPOWEB_URL": str(settings.EXPOWEB_URL),
|
||||
# "FILES" : str( settings.FILES),
|
||||
"LIBDIR": str(settings.LIBDIR),
|
||||
"LOGFILE": str(settings.LOGFILE),
|
||||
"LOGIN_REDIRECT_URL": str(settings.LOGIN_REDIRECT_URL),
|
||||
"MEDIA_ROOT": str(settings.MEDIA_ROOT),
|
||||
"MEDIA_URL": str(settings.MEDIA_URL),
|
||||
"PHOTOS_URL": str(settings.PHOTOS_URL),
|
||||
"PHOTOS_ROOT": str(settings.PHOTOS_ROOT),
|
||||
"PYTHON_PATH": str(settings.PYTHON_PATH),
|
||||
"REPOS_ROOT_PATH": str(settings.REPOS_ROOT_PATH),
|
||||
"ROOT_URLCONF": str(settings.ROOT_URLCONF),
|
||||
"STATIC_URL": str(settings.STATIC_URL),
|
||||
"SURVEX_DATA": str(settings.SURVEX_DATA),
|
||||
"SCANS_ROOT": str(settings.SCANS_ROOT),
|
||||
# "SURVEYS" : str( settings.SURVEYS),
|
||||
# "SCANS_URL" : str( settings.SCANS_URL),
|
||||
"SURVEXPORT": str(settings.SURVEXPORT),
|
||||
"DRAWINGS_DATA": str(settings.DRAWINGS_DATA),
|
||||
"URL_ROOT": str(settings.URL_ROOT),
|
||||
}
|
||||
except:
|
||||
pathsdict["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
|
||||
|
||||
pathstype = OrderedDict()
|
||||
try:
|
||||
pathstype = {
|
||||
# "BOGUS" : type(settings.BOGUS),
|
||||
"JSLIB_URL": type(settings.JSLIB_URL),
|
||||
"JSLIB_ROOT": type(settings.JSLIB_ROOT),
|
||||
# "CSSLIB_URL" : type(settings.CSSLIB_URL),
|
||||
"CAVEDESCRIPTIONS": type(settings.CAVEDESCRIPTIONS),
|
||||
"DIR_ROOT": type(settings.DIR_ROOT),
|
||||
"ENTRANCEDESCRIPTIONS": type(settings.ENTRANCEDESCRIPTIONS),
|
||||
"EXPOUSER_EMAIL": type(settings.EXPOUSER_EMAIL),
|
||||
"EXPOUSERPASS": type(settings.EXPOUSERPASS),
|
||||
"EXPOUSER": type(settings.EXPOUSER),
|
||||
"EXPOWEB": type(settings.EXPOWEB),
|
||||
# "EXPOWEB_URL": type(settings.EXPOWEB_URL),
|
||||
# "FILES" : type(settings.FILES),
|
||||
"LIBDIR": type(settings.LIBDIR),
|
||||
"LOGFILE": type(settings.LOGFILE),
|
||||
"LOGIN_REDIRECT_URL": type(settings.LOGIN_REDIRECT_URL),
|
||||
"MEDIA_ROOT": type(settings.MEDIA_ROOT),
|
||||
"MEDIA_URL": type(settings.MEDIA_URL),
|
||||
"PHOTOS_ROOT": type(settings.PHOTOS_ROOT),
|
||||
"PHOTOS_URL": type(settings.PHOTOS_URL),
|
||||
"PYTHON_PATH": type(settings.PYTHON_PATH),
|
||||
"REPOS_ROOT_PATH": type(settings.REPOS_ROOT_PATH),
|
||||
"ROOT_URLCONF": type(settings.ROOT_URLCONF),
|
||||
"STATIC_URL": type(settings.STATIC_URL),
|
||||
"SURVEX_DATA": type(settings.SURVEX_DATA),
|
||||
"SCANS_ROOT": type(settings.SCANS_ROOT),
|
||||
# "SURVEYS" : type(settings.SURVEYS),
|
||||
# "SCANS_URL" : type(settings.SCANS_URL),
|
||||
"SURVEXPORT": type(settings.SURVEXPORT),
|
||||
"DRAWINGS_DATA": type(settings.DRAWINGS_DATA),
|
||||
"URL_ROOT": type(settings.URL_ROOT),
|
||||
}
|
||||
except:
|
||||
pathstype["! EXCEPTION !"] = "missing or exta string constant in troggle/settings"
|
||||
raise
|
||||
|
||||
# settings are unique by paths are not
|
||||
ncodes = len(pathsdict)
|
||||
bycodeslist = sorted(pathsdict.items()) # a list of tuples
|
||||
bycodeslist2 = []
|
||||
|
||||
for k, p in bycodeslist:
|
||||
bycodeslist2.append((k, p, str(pathstype[k])))
|
||||
|
||||
bypaths = sorted(pathsdict.values()) # a list
|
||||
bypathslist = []
|
||||
|
||||
for p in bypaths:
|
||||
for k in pathsdict.keys():
|
||||
if pathsdict[k] == p:
|
||||
bypathslist.append((p, k, str(pathstype[k])))
|
||||
del pathsdict[k]
|
||||
break
|
||||
|
||||
return render(
|
||||
request,
|
||||
"pathsreport.html",
|
||||
{"pathsdict": pathsdict, "bycodeslist": bycodeslist2, "bypathslist": bypathslist, "ncodes": ncodes},
|
||||
)
|
||||
|
||||
|
||||
def stats(request):
|
||||
"""Calculates number of survey blocks, the number of survey legs and the survey length for each year.
|
||||
This is only underground survey legs, but includes ARGE as well as Expo survex files.
|
||||
"""
|
||||
statsDict = {}
|
||||
statsDict["expoCount"] = f"{Expedition.objects.count():,}"
|
||||
statsDict["caveCount"] = f"{Cave.objects.count():,}"
|
||||
statsDict["personCount"] = f"{Person.objects.count():,}"
|
||||
statsDict["logbookEntryCount"] = f"{LogbookEntry.objects.count():,}"
|
||||
|
||||
legsbyexpo = []
|
||||
addupsurvexlength = 0.0
|
||||
addupsurvexlegs = 0
|
||||
for expedition in Expedition.objects.all():
|
||||
survexblocks = expedition.survexblock_set.all()
|
||||
legsyear = 0
|
||||
survexleglength = 0.0
|
||||
for survexblock in survexblocks:
|
||||
survexleglength += survexblock.legslength
|
||||
legsyear += int(survexblock.legsall)
|
||||
addupsurvexlength += survexleglength
|
||||
addupsurvexlegs += legsyear
|
||||
legsbyexpo.append((expedition, {"nsurvexlegs": legsyear, "survexleglength": survexleglength}))
|
||||
legsbyexpo.reverse()
|
||||
|
||||
renderDict = {
|
||||
**statsDict,
|
||||
**{"addupsurvexlength": addupsurvexlength / 1000, "legsbyexpo": legsbyexpo, "nsurvexlegs": addupsurvexlegs},
|
||||
} # new syntax
|
||||
return render(request, "statistics.html", renderDict)
|
||||
|
||||
|
||||
def dataissues(request):
|
||||
"""Each issue has a parser, a message and a url linking to the offending object after loading"""
|
||||
|
||||
def myFunc(di):
|
||||
return di.parser.lower() + di.message.lower()
|
||||
|
||||
dilist = list(DataIssue.objects.all())
|
||||
dilist.sort(key=myFunc)
|
||||
|
||||
return render(request, "dataissues.html", {"didict": dilist})
|
||||
|
||||
|
||||
def eastings(request):
|
||||
"""report each Northing/Easting pair wherever recorded"""
|
||||
ents = []
|
||||
entrances = Entrance.objects.all()
|
||||
for e in entrances:
|
||||
if e.easting or e.northing:
|
||||
ents.append(e)
|
||||
|
||||
stations = SurvexStation.objects.all()
|
||||
|
||||
return render(request, "eastings.html", {"ents": ents, "stations": stations})
|
||||
|
||||
|
||||
def aliases(request, year):
|
||||
"""Page which displays a list of all the person aliases in a specific year"""
|
||||
|
||||
if not year:
|
||||
year = 1998
|
||||
expo = Expedition.objects.filter(year=year)[0] # returns a set, even though we know there is only one
|
||||
personexpeditions = PersonExpedition.objects.filter(expedition=expo)
|
||||
persons = list(Person.objects.all().order_by("last_name"))
|
||||
|
||||
aliases = GetPersonExpeditionNameLookup(expo)
|
||||
|
||||
aliasdict = {}
|
||||
for i in sorted(aliases):
|
||||
aliasdict[i] = aliases[i]
|
||||
invert = {}
|
||||
|
||||
return render(
|
||||
request,
|
||||
"aliases.html",
|
||||
{
|
||||
"year": year,
|
||||
"aliasdict": aliasdict,
|
||||
"foreign_friends": foreign_friends,
|
||||
"invert": invert,
|
||||
"personexpeditions": personexpeditions,
|
||||
"persons": persons,
|
||||
},
|
||||
)
|
||||
@@ -1,715 +0,0 @@
|
||||
import datetime
|
||||
import difflib
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
from pathlib import Path
|
||||
from collections import namedtuple
|
||||
|
||||
from django import forms
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
|
||||
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import render
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
|
||||
import troggle.settings as settings
|
||||
from troggle.core.models.logbooks import LogbookEntry
|
||||
from troggle.core.models.caves import Cave
|
||||
from troggle.core.models.survex import SurvexFile, SurvexBlock
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.utils import only_commit
|
||||
from troggle.parsers.survex import parse_one_file
|
||||
|
||||
"""Everything that views survexfiles
|
||||
but also displays data on a cave or caves when there is ambiguity
|
||||
"""
|
||||
|
||||
todo = """- survexcavesingle is not properly producing any result for Homecoming, 1626-359, 2018-dm-07
|
||||
even though there are dozens of surveys.
|
||||
|
||||
- REFACTOR the very impenetrable code for scanningsubdirectories, replace with modern python pathlib
|
||||
|
||||
- filter out the non-public caves from display UNLESS LOGGED IN
|
||||
|
||||
- Never actual uses the object for the survexfile, works entirely from the filepath! Make it check and validate
|
||||
|
||||
- the primary survex file in each cave directory should be in a configuration, not buried in the code...
|
||||
|
||||
- Save and re-parse an edited survexfile which already exists in the db, and update
|
||||
all its dependencies (work in progress)
|
||||
|
||||
- overlapping and cross-calling when things fail make this hard to undersand, e.g. svx() and
|
||||
survexcavessingle() can get called for a survex file depending on whether the URL ends in ".svx" or not,
|
||||
but each tries to handle the other case too.
|
||||
|
||||
"""
|
||||
|
||||
SVXPATH = Path(settings.SURVEX_DATA)
|
||||
|
||||
# NB this template text must be identical to that in :loser:/templates/template.svx
|
||||
survextemplatefile = """; *** THIS IS A TEMPLATE FILE NOT WHAT YOU MIGHT BE EXPECTING ***
|
||||
|
||||
*** DO NOT SAVE THIS FILE WITHOUT RENAMING IT !! ***
|
||||
;[Stuff in square brackets is example text to be replaced with real data,
|
||||
; removing the square brackets]
|
||||
|
||||
*begin [surveyname]
|
||||
|
||||
; stations linked into other surveys (or likely to)
|
||||
*export [1 8 12 34]
|
||||
|
||||
; Cave:
|
||||
; Area in cave/QM:
|
||||
*title ""
|
||||
*date [2040.07.04] ; <-- CHANGE THIS DATE
|
||||
*team Insts [Fred Fossa]
|
||||
*team Notes [Brenda Badger]
|
||||
*team Pics [Luke Lynx]
|
||||
*team Tape [Albert Aadvark]
|
||||
*instrument [SAP #+Laser Tape/DistoX/Compass # ; Clino #]
|
||||
; Calibration: [Where, readings]
|
||||
*ref [2040#00] ; <-- CHANGE THIS TOO
|
||||
; the #number is on the clear pocket containing the original notes
|
||||
|
||||
; if using a tape:
|
||||
*calibrate tape +0.0 ; +ve if tape was too short, -ve if too long
|
||||
|
||||
; Centreline data
|
||||
*data normal from to length bearing gradient ignoreall
|
||||
[ 1 2 5.57 034.5 -12.8 ]
|
||||
|
||||
;-----------
|
||||
;recorded station details (leave commented out)
|
||||
;(NP=Nail Polish, LHW/RHW=Left/Right Hand Wall)
|
||||
;Station Left Right Up Down Description
|
||||
;[Red] nail varnish markings
|
||||
[;1 0.8 0 5.3 1.6 ; NP on boulder. pt 23 on foo survey ]
|
||||
[;2 0.3 1.2 6 1.2 ; NP '2' LHW ]
|
||||
[;3 1.3 0 3.4 0.2 ; Rock on floor - not refindable ]
|
||||
|
||||
|
||||
;LRUDs arranged into passage tubes
|
||||
;new *data command for each 'passage',
|
||||
;repeat stations and adjust numbers as needed
|
||||
*data passage station left right up down
|
||||
;[ 1 0.8 0 5.3 1.6 ]
|
||||
;[ 2 0.3 1.2 6 1.2 ]
|
||||
*data passage station left right up down
|
||||
;[ 1 1.3 1.5 5.3 1.6 ]
|
||||
;[ 3 2.4 0 3.4 0.2 ]
|
||||
|
||||
|
||||
;-----------
|
||||
;Question Mark List ;(keep initial semi-colon on each line)
|
||||
; The nearest-station is the name of the survey and station which are nearest to
|
||||
; the QM. The resolution-station is either '-' to indicate that the QM hasn't
|
||||
; been checked; or the name of the survey and station which push that QM. If a
|
||||
; QM doesn't go anywhere, set the resolution-station to be the same as the
|
||||
; nearest-station. Include any relevant details of how to find or push the QM in
|
||||
; the textual description.
|
||||
;Serial number grade(A/B/C/D/X) nearest-station resolution-station description
|
||||
;[ QM1 A surveyname.3 - description of QM ]
|
||||
;[ QM2 B surveyname.5 - description of QM ]
|
||||
|
||||
;TICKed off QMs
|
||||
; in the past, if another survey existed, the resolution-station
|
||||
; field was filled in, e.g.
|
||||
;[ QM2 B surveyname.5 anothersurvey.7 description of QM and description of progress ]
|
||||
|
||||
; or we can use the trial format
|
||||
;Serial number TICK date resolution description
|
||||
;[QM2 TICK 2022-07-20 This is an example ticked QM]
|
||||
|
||||
;------------
|
||||
;Cave description ;(leave commented-out)
|
||||
;Freeform text describing this section of the cave
|
||||
|
||||
*end [surveyname]
|
||||
"""
|
||||
|
||||
def get_survexfile(filename):
|
||||
"""Gets the SurvexFile object from the survex path for the file
|
||||
in a robust way
|
||||
"""
|
||||
refs = SurvexFile.objects.filter(path=filename)
|
||||
if len(refs)==0: # new survex file, not created in db yet
|
||||
survexfile = False
|
||||
elif len(refs)==1:
|
||||
survexfile = SurvexFile.objects.get(path=filename)
|
||||
else:
|
||||
survexfile = refs[0]
|
||||
# OK this is due to a bug in the import file parsing, whoops. Now fixed ?!
|
||||
print("BUG - to be fixed in the survex parser - not critical..")
|
||||
print(f"Number of SurvexFile objects found: {len(refs)}")
|
||||
for s in refs:
|
||||
print (s.path, s.survexdirectory, s.cave)
|
||||
# print(type(survexfile), filename)
|
||||
return survexfile
|
||||
|
||||
class SvxForm(forms.Form):
|
||||
"""Two-pane form, upper half is the raw survex file, lower half (with green background)
|
||||
is the output : of running 'cavern' on the survex file, of running a 'difference', of
|
||||
checking that there are no square brackets left.
|
||||
"""
|
||||
|
||||
dirname = forms.CharField(widget=forms.TextInput(attrs={"readonly": True}))
|
||||
filename = forms.CharField(widget=forms.TextInput(attrs={"readonly": True}))
|
||||
datetime = forms.DateTimeField(widget=forms.TextInput(attrs={"readonly": True}))
|
||||
outputtype = forms.CharField(widget=forms.TextInput(attrs={"readonly": True}))
|
||||
code = forms.CharField(widget=forms.Textarea(attrs={"cols": 140, "rows": 36}))
|
||||
survexfile = models.ForeignKey(SurvexFile, blank=True, null=True, on_delete=models.SET_NULL) # 1:1 ?
|
||||
|
||||
template = False
|
||||
|
||||
def GetDiscCode(self):
|
||||
fname = SVXPATH / (self.data["filename"] + ".svx")
|
||||
if not fname.is_file():
|
||||
print(">>> >>> WARNING - svx file not found, showing TEMPLATE SVX", fname, flush=True)
|
||||
self.template = True
|
||||
self.survexfile = False
|
||||
return survextemplatefile
|
||||
if not self.survexfile:
|
||||
self.survexfile = get_survexfile(self.data["filename"])
|
||||
try:
|
||||
fin = open(fname, "r", encoding="utf8", newline="")
|
||||
svxtext = fin.read()
|
||||
fin.close()
|
||||
except:
|
||||
# hack. Replace this with something better.
|
||||
fin = open(fname, "r", encoding="iso-8859-1", newline="")
|
||||
svxtext = fin.read()
|
||||
fin.close()
|
||||
return svxtext
|
||||
|
||||
def DiffCode(self, rcode):
|
||||
code = self.GetDiscCode()
|
||||
difftext = difflib.unified_diff(code.splitlines(), rcode.splitlines())
|
||||
difflist = [diffline.strip() for diffline in difftext if not re.match(r"\s*$", diffline)]
|
||||
return difflist
|
||||
|
||||
def SaveCode(self, rcode):
|
||||
fname = SVXPATH / (self.data["filename"] + ".svx")
|
||||
if not fname.is_file():
|
||||
if re.search(r"\[|\]", rcode):
|
||||
errmsg = "Error: remove all []s from the text.\nEverything inside [] are only template guidance.\n\n"
|
||||
errmsg += "All [] must be edited out and replaced with real data before you can save this file.\n"
|
||||
return errmsg
|
||||
mbeginend = re.search(r"(?s)\*begin\s+(\w+).*?\*end\s+(\w+)", rcode)
|
||||
if not mbeginend:
|
||||
return "Error: no begin/end block here"
|
||||
if mbeginend.group(1) != mbeginend.group(2):
|
||||
return "Error: mismatching begin/end labels"
|
||||
|
||||
# Make this create new survex folders if needed
|
||||
try:
|
||||
fout = open(fname, "w", encoding="utf8", newline="\n")
|
||||
except FileNotFoundError:
|
||||
pth = os.path.dirname(self.data["filename"])
|
||||
newpath = SVXPATH / pth
|
||||
if not os.path.exists(newpath):
|
||||
os.makedirs(newpath)
|
||||
fout = open(fname, "w", encoding="utf8", newline="\n")
|
||||
except PermissionError:
|
||||
return (
|
||||
"CANNOT save this file.\nPERMISSIONS incorrectly set on server for this file. Ask a nerd to fix this."
|
||||
)
|
||||
|
||||
# javascript seems to insert CRLF on WSL1 whatever you say. So fix that:
|
||||
fout.write(rcode.replace("\r", ""))
|
||||
fout.write("\n")
|
||||
fout.close()
|
||||
|
||||
if socket.gethostname() == "expo":
|
||||
comment = f"Online survex edit: {self.data['filename']}.svx"
|
||||
else:
|
||||
comment = f"Online survex edit: {self.data['filename']}.svx on dev machine '{socket.gethostname()}' "
|
||||
only_commit(fname, comment)
|
||||
|
||||
msg = "SAVED and committed to git (if there were differences)"
|
||||
# should only call this is something changed
|
||||
if parse_one_file(self.data["filename"]):
|
||||
return msg
|
||||
else:
|
||||
return msg + "\nBUT PARSING failed. Do a completely new databaseReset."
|
||||
|
||||
def Process(self):
|
||||
print(">>>>....\n....Processing\n")
|
||||
froox = os.fspath(SVXPATH / (self.data["filename"] + ".svx"))
|
||||
froog = os.fspath(SVXPATH / (self.data["filename"] + ".log"))
|
||||
cwd = os.getcwd()
|
||||
os.chdir(os.path.split(froox)[0])
|
||||
os.system(settings.CAVERN + " --log " + froox)
|
||||
os.chdir(cwd)
|
||||
|
||||
# Update this to use the new syntax..
|
||||
# sp = subprocess.run([settings.CAVERN, "--log", f'--output={outputdir}', f'{fullpath}.svx'],
|
||||
# capture_output=True, check=False, text=True)
|
||||
# if sp.returncode != 0:
|
||||
# message = f' ! Error running {settings.CAVERN}: {fullpath}'
|
||||
# DataIssue.objects.create(parser='entrances', message=message)
|
||||
# print(message)
|
||||
# print(f'stderr:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
||||
|
||||
filepatherr = Path(SVXPATH / str(self.data["filename"] + ".err"))
|
||||
if filepatherr.is_file():
|
||||
if filepatherr.stat().st_size == 0:
|
||||
filepatherr.unlink() # delete empty closure error file
|
||||
|
||||
fin = open(froog, "r", encoding="utf8")
|
||||
log = fin.read()
|
||||
fin.close()
|
||||
# log = re.sub("(?s).*?(Survey contains)", "\\1", log) # this omits any ERROR MESSAGES ! Don't do it.
|
||||
for s in [
|
||||
"Removing trailing traverses...\n\n",
|
||||
"Concatenating traverses...\n\n" "Simplifying network...\n\n",
|
||||
"Calculating network...\n\n",
|
||||
"Calculating traverses...\n\n",
|
||||
"Calculating trailing traverses...\n\n",
|
||||
"Calculating statistics...\n\n",
|
||||
]:
|
||||
log = log.replace(s, "")
|
||||
return log
|
||||
|
||||
|
||||
@ensure_csrf_cookie
|
||||
def svx(request, survex_file):
|
||||
"""Displays a single survex file in an textarea window (using a javascript online editor to enable
|
||||
editing) with buttons which allow SAVE, check for DIFFerences from saved, and RUN (which runs the
|
||||
cavern executable and displays the output below the main textarea).
|
||||
Requires CSRF to be set up correctly, and requires permission to write to the filesystem.
|
||||
|
||||
Originally the non-existence of difflist was used as a marker to say that the file had been saved
|
||||
and that thuis there were no differences. This is inadequate, as a new file which has not been saved
|
||||
also has no difflist.
|
||||
|
||||
Needs refactoring. Too many piecemeal edits and odd state dependencies.
|
||||
|
||||
On Get does the SAME THING as svxcavesingle but is called when the .svx suffix is MISSING
|
||||
"""
|
||||
warning = False
|
||||
|
||||
print(survex_file)
|
||||
if survex_file.lower().endswith(".svx"):
|
||||
#cope with ".svx.svx" bollox
|
||||
survex_file = survex_file[:-4]
|
||||
print(survex_file)
|
||||
|
||||
# get the basic data from the file given in the URL
|
||||
dirname = os.path.split(survex_file)[0] # replace with proper pathlib function..
|
||||
dirname += "/"
|
||||
nowtime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
outputtype = "normal"
|
||||
form = SvxForm({"filename": survex_file, "dirname": dirname, "datetime": nowtime, "outputtype": outputtype})
|
||||
|
||||
# if the form has been returned
|
||||
difflist = []
|
||||
logmessage = ""
|
||||
message = ""
|
||||
|
||||
if request.method == "POST": # If the form has been submitted...
|
||||
rform = SvxForm(request.POST) #
|
||||
if rform.is_valid(): # All validation rules pass (how do we check it against the filename and users?)
|
||||
rcode = rform.cleaned_data["code"]
|
||||
outputtype = rform.cleaned_data["outputtype"] # used by CodeMirror ajax I think
|
||||
difflist = form.DiffCode(rcode)
|
||||
# keys = []
|
||||
# for key in rform.data:
|
||||
# keys.append(key)
|
||||
# print(">>>> ", keys)
|
||||
sfile = form.survexfile
|
||||
|
||||
if "revert" in rform.data:
|
||||
pass
|
||||
|
||||
if "process" in rform.data:
|
||||
if difflist:
|
||||
message = "SAVE FILE FIRST"
|
||||
form.data["code"] = rcode
|
||||
elif sfile:
|
||||
logmessage = form.Process()
|
||||
if logmessage:
|
||||
message = f"OUTPUT FROM PROCESSING\n{logmessage}"
|
||||
else:
|
||||
message = "SAVE VALID FILE FIRST"
|
||||
form.data["code"] = rcode
|
||||
if "save" in rform.data:
|
||||
if request.user.is_authenticated:
|
||||
if difflist:
|
||||
message = form.SaveCode(rcode)
|
||||
else:
|
||||
message = "NO DIFFERENCES - so not saving the file"
|
||||
else:
|
||||
message = "You do not have authority to save this file. Please log in."
|
||||
if message != "SAVED":
|
||||
form.data["code"] = rcode
|
||||
|
||||
if "diff" in rform.data:
|
||||
print("Differences: ")
|
||||
form.data["code"] = rcode
|
||||
|
||||
# GET, also fall-through after POST-specific handling
|
||||
svxfile = get_survexfile(survex_file)
|
||||
|
||||
if "code" not in form.data:
|
||||
form.data["code"] = form.GetDiscCode()
|
||||
if form.template:
|
||||
warning = True
|
||||
if not difflist:
|
||||
if svxfile:
|
||||
difflist.append("No differences from last saved file.")
|
||||
else:
|
||||
difflist.append("No differences from last saved file (or from initial template).")
|
||||
if message:
|
||||
difflist.insert(0, message)
|
||||
|
||||
svxincludes = re.findall(r"(?i)\*include\s+(\S+)", form.data["code"] or "")
|
||||
|
||||
# collect all the survex blocks which actually have a valid date
|
||||
if svxfile:
|
||||
has_3d = (Path(SVXPATH) / Path(survex_file + ".3d")).is_file()
|
||||
try:
|
||||
svxblocks = svxfile.survexblock_set.filter(date__isnull=False).order_by('date')
|
||||
except:
|
||||
svxblocks = []
|
||||
try:
|
||||
svxblocksall = svxfile.survexblock_set.all()
|
||||
svxlength = 0.0
|
||||
for b in svxblocksall:
|
||||
svxlength += b.legslength
|
||||
# print(svxlength,b, b.legsall)
|
||||
except AttributeError: # some survexfiles just *include files and have no blocks themselves
|
||||
svxblocksall = []
|
||||
else:
|
||||
svxblocks = []
|
||||
svxblocksall = []
|
||||
svxlength = 0.0
|
||||
has_3d = False
|
||||
if not difflist:
|
||||
difflist = ["Survex file does not exist yet"]
|
||||
|
||||
|
||||
events = events_on_dates(svxblocks)
|
||||
|
||||
vmap = {
|
||||
"settings": settings,
|
||||
"warning": warning,
|
||||
"has_3d": has_3d,
|
||||
"survexfile": svxfile,
|
||||
"svxlength": svxlength,
|
||||
"svxblocks": svxblocks,
|
||||
"svxincludes": svxincludes,
|
||||
"difflist": difflist,
|
||||
"logmessage": logmessage,
|
||||
"form": form,
|
||||
"events": events,
|
||||
}
|
||||
|
||||
if outputtype == "ajax": # used by CodeMirror ajax I think
|
||||
return render(request, "svxfiledifflistonly.html", vmap)
|
||||
|
||||
return render(request, "svxfile.html", vmap)
|
||||
|
||||
SameDateEvents = namedtuple('SameDateEvents', ['trips', 'svxfiles', 'wallets', 'blocks'])
|
||||
|
||||
def events_on_dates(svxblocks):
|
||||
"""Returns a dictionary of indexed by date. For each date there is a named tuple of 3 lists:
|
||||
logbookentries, survexfiles (NB files, not blocks), and wallets.
|
||||
"""
|
||||
# deduplicate but maintain date order
|
||||
dates = []
|
||||
for b in svxblocks:
|
||||
if b.date not in dates:
|
||||
dates.append(b.date)
|
||||
# print(f"- {b.date}")
|
||||
|
||||
events = {}
|
||||
for date in dates:
|
||||
trips = LogbookEntry.objects.filter(date=date)
|
||||
|
||||
svxfiles = SurvexFile.objects.filter(survexblock__date=date).distinct()
|
||||
|
||||
# https://stackoverflow.com/questions/739776/how-do-i-do-an-or-filter-in-a-django-query
|
||||
wallets = Wallet.objects.filter(Q(survexblock__date=date) | Q(walletdate=date)).distinct()
|
||||
|
||||
blocks = []
|
||||
for b in svxblocks:
|
||||
if b.date == date:
|
||||
blocks.append(b.name)
|
||||
|
||||
events[date] = SameDateEvents(trips=trips, svxfiles=svxfiles, wallets=wallets, blocks=blocks)
|
||||
# print(events)
|
||||
return events
|
||||
|
||||
# The cavern running function. This is NOT where it is run inside the form! see SvxForm.Process() for that
|
||||
def process(survex_file):
|
||||
"""This runs cavern only where a .3d, .log or .err file is requested."""
|
||||
filepathsvx = SVXPATH / str(survex_file + ".svx")
|
||||
cwd = os.getcwd()
|
||||
os.chdir(os.path.split(os.fspath(SVXPATH / survex_file))[0])
|
||||
os.system(settings.CAVERN + " --log " + str(filepathsvx))
|
||||
os.chdir(cwd)
|
||||
|
||||
# Update this to use the new syntax..
|
||||
# sp = subprocess.run([settings.CAVERN, "--log", f'--output={outputdir}', f'{fullpath}.svx'],
|
||||
# capture_output=True, check=False, text=True)
|
||||
# if sp.returncode != 0:
|
||||
# message = f' ! Error running {settings.CAVERN}: {fullpath}'
|
||||
# DataIssue.objects.create(parser='entrances', message=message)
|
||||
# print(message)
|
||||
# print(f'stderr:\n\n' + str(sp.stderr) + '\n\n' + str(sp.stdout) + '\n\nreturn code: ' + str(sp.returncode))
|
||||
|
||||
filepatherr = Path(SVXPATH / str(survex_file + ".err"))
|
||||
if filepatherr.is_file():
|
||||
if filepatherr.stat().st_size == 0:
|
||||
filepatherr.unlink() # delete empty closure error file
|
||||
|
||||
|
||||
def threed(request, survex_file):
|
||||
filepath3d = SVXPATH / str(survex_file + ".3d")
|
||||
SVXPATH / str(survex_file + ".log")
|
||||
if filepath3d.is_file():
|
||||
threed = open(filepath3d, "rb")
|
||||
return HttpResponse(threed, content_type="application/x-aven")
|
||||
else:
|
||||
process(survex_file) # should not need to do this if it already exists, as it should.
|
||||
log = open(SVXPATH / str(survex_file + ".log"), "r", encoding="utf-8")
|
||||
return HttpResponse(log, content_type="text")
|
||||
|
||||
|
||||
def svxlog(request, survex_file):
|
||||
"""Used for rendering .log files from survex outputtype"""
|
||||
filepathlog = SVXPATH / str(survex_file + ".log")
|
||||
if not filepathlog.is_file():
|
||||
process(survex_file)
|
||||
log = open(filepathlog, "r")
|
||||
return HttpResponse(log, content_type="text/plain; charset=utf-8") # default: "text/html; charset=utf-8"
|
||||
|
||||
|
||||
def err(request, survex_file):
|
||||
filepatherr = SVXPATH / str(survex_file + ".err")
|
||||
if not filepatherr.is_file(): # probably not there because it was empty, but re-run anyway
|
||||
process(survex_file)
|
||||
process(survex_file)
|
||||
if filepatherr.is_file():
|
||||
err = open(filepatherr, "r")
|
||||
return HttpResponse(err, content_type="text/plain; charset=utf-8")
|
||||
else:
|
||||
return HttpResponse(
|
||||
f"No closure errors. \nEmpty {filepatherr} file produced. \nSee the .log file.",
|
||||
content_type="text/plain; charset=utf-8",
|
||||
)
|
||||
|
||||
|
||||
def identifycavedircontents(gcavedir):
|
||||
"""
|
||||
find the primary survex file in each cave directory
|
||||
this should be in a configuration, not buried in the code...
|
||||
|
||||
For gods sake someone refactor this monstrosity using pathlib
|
||||
"""
|
||||
name = os.path.split(gcavedir)[1]
|
||||
subdirs = []
|
||||
subsvx = []
|
||||
primesvx = None
|
||||
for f in os.listdir(gcavedir): # These may get outdated as data gets tidied up. This should not be in the code!
|
||||
if name == "204" and (f in ["skel.svx", "template.svx", "204withents.svx"]):
|
||||
pass
|
||||
elif name == "136" and (f in ["136-noents.svx"]):
|
||||
pass
|
||||
elif name == "115" and (f in ["115cufix.svx", "115fix.svx"]):
|
||||
pass
|
||||
|
||||
elif os.path.isdir(os.path.join(gcavedir, f)):
|
||||
if f[0] != ".":
|
||||
subdirs.append(f)
|
||||
elif f[-4:] == ".svx":
|
||||
nf = f[:-4]
|
||||
|
||||
if (
|
||||
nf.lower() == name.lower()
|
||||
or nf[:3] == "all"
|
||||
or (name, nf) in [("resurvey2005", "145-2005"), ("cucc", "cu115")]
|
||||
):
|
||||
if primesvx:
|
||||
if nf[:3] == "all":
|
||||
# assert primesvx[:3] != "all", (name, nf, primesvx, gcavedir, subsvx)
|
||||
primesvx = nf
|
||||
else:
|
||||
# assert primesvx[:3] == "all", (name, nf, primesvx, gcavedir, subsvx)
|
||||
pass
|
||||
else:
|
||||
primesvx = nf
|
||||
else:
|
||||
subsvx.append(nf)
|
||||
else:
|
||||
pass
|
||||
# assert re.match(".*?(?:.3d|.log|.err|.txt|.tmp|.diff|.e?spec|~)$", f), (gcavedir, f)
|
||||
subsvx.sort()
|
||||
# assert primesvx, (gcavedir, subsvx)
|
||||
if primesvx:
|
||||
subsvx.insert(0, primesvx)
|
||||
return subdirs, subsvx
|
||||
|
||||
|
||||
def get_survexareapath(area):
|
||||
return SVXPATH / str("caves-" + area)
|
||||
|
||||
|
||||
# direct local non-database browsing through the svx file repositories
|
||||
# every time the page is viewed! Should cache this.
|
||||
def survexcaveslist(request):
|
||||
"""This reads the entire list of caves in the Loser repo directory and produces a complete report.
|
||||
It can find caves which have not yet been properly registered in the system by Databasereset.py because
|
||||
someone may have uploaded the survex files without doing the rest of the integration process.
|
||||
|
||||
It uses very impenetrable code in identifycavedircontents()
|
||||
"""
|
||||
# TO DO - filter out the non-public caves from display UNLESS LOGGED IN
|
||||
# This is very impenetrable code, original from Aaron Curtis I think.
|
||||
onefilecaves = []
|
||||
multifilecaves = []
|
||||
subdircaves = []
|
||||
fnumlist = []
|
||||
|
||||
for area in ["1623", "1626", "1624", "1627"]:
|
||||
cavesdir = get_survexareapath(area)
|
||||
arealist = sorted([(area, -int(re.match(r"\d*", f).group(0) or "0"), f) for f in os.listdir(cavesdir)])
|
||||
fnumlist += arealist
|
||||
|
||||
# print(fnumlist)
|
||||
|
||||
# go through the list and identify the contents of each cave directory
|
||||
for area, num, cavedir in fnumlist:
|
||||
|
||||
# these have sub dirs /cucc/ /arge/ /old/ but that is no reason to hide them in this webpage
|
||||
# so these are now treated the same as 142 and 113 which also have a /cucc/ sub dir
|
||||
# if cavedir in ["144", "40"]:
|
||||
# continue
|
||||
|
||||
# This all assumes that the first .svx file has the same name as the cave name,
|
||||
# which usually but not always true. e.g. caves-1623/78/allkaese.svx not caves-1623/78/78.svx
|
||||
# which is why we now also pass through the cavedir
|
||||
|
||||
# Still fails for loutitohoehle etc even though this is set correctly when the pending cave is created
|
||||
cavesdir = get_survexareapath(area)
|
||||
gcavedir = os.path.join(cavesdir, cavedir)
|
||||
if os.path.isdir(gcavedir) and cavedir[0] != ".":
|
||||
subdirs, subsvx = identifycavedircontents(gcavedir)
|
||||
|
||||
check_cave_registered(
|
||||
area, cavedir
|
||||
) # should do this only once per database load or it will be slow
|
||||
survdirobj = []
|
||||
for lsubsvx in subsvx:
|
||||
survdirobj.append(("caves-" + area + "/" + cavedir + "/" + lsubsvx, lsubsvx))
|
||||
|
||||
# caves with subdirectories
|
||||
if subdirs:
|
||||
subsurvdirs = []
|
||||
for subdir in subdirs:
|
||||
dsubdirs, dsubsvx = identifycavedircontents(os.path.join(gcavedir, subdir))
|
||||
# assert not dsubdirs # handle case of empty sub directory
|
||||
lsurvdirobj = []
|
||||
for lsubsvx in dsubsvx:
|
||||
lsurvdirobj.append(("caves-" + area + "/" + cavedir + "/" + subdir + "/" + lsubsvx, lsubsvx))
|
||||
if len(dsubsvx) >= 1:
|
||||
subsurvdirs.append(
|
||||
(subdir, lsurvdirobj[0], lsurvdirobj[0:])
|
||||
) # list now includes the first item too
|
||||
if survdirobj:
|
||||
subdircaves.append((cavedir, (survdirobj[0], survdirobj[1:]), subsurvdirs))
|
||||
else:
|
||||
print(f" ! Subdirectory containing empty subdirectory {subdirs} in {gcavedir}")
|
||||
|
||||
# multifile caves
|
||||
elif len(survdirobj) > 1:
|
||||
multifilecaves.append((survdirobj[0], cavedir, survdirobj[1:]))
|
||||
# single file caves
|
||||
elif len(survdirobj) == 1:
|
||||
onefilecaves.append(survdirobj[0])
|
||||
|
||||
return render(
|
||||
request,
|
||||
"svxfilecavelist.html",
|
||||
{
|
||||
"settings": settings,
|
||||
"onefilecaves": onefilecaves,
|
||||
"multifilecaves": multifilecaves,
|
||||
"subdircaves": subdircaves,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def survexcavesingle(request, survex_cave):
|
||||
"""parsing all the survex files of a single cave and showing that it's consistent and can find all
|
||||
the files and people. Should explicitly fix the kataster number thing.
|
||||
kataster numbers are not unique across areas. This used to be a db constraint but we need to manage
|
||||
this ourselves as we don't want the parser aborting with an error message.
|
||||
|
||||
Should use getCave() from models_caves
|
||||
"""
|
||||
sc = survex_cave
|
||||
try:
|
||||
cave = Cave.objects.get(kataster_number=sc) # This may not be unique.
|
||||
return render(request, "svxcavesingle.html", {"settings": settings, "cave": cave})
|
||||
|
||||
except ObjectDoesNotExist:
|
||||
# can get here if the survex file is in a directory labelled with unofficial number not kataster number.
|
||||
# maybe - and _ mixed up, or CUCC-2017- instead of 2017-CUCC-, or CUCC2015DL01 . Let's not get carried away..
|
||||
|
||||
# or it might be an exact search for a specific survefile but just missing the '.svx.
|
||||
if (SVXPATH / Path(survex_cave + ".svx")).is_file():
|
||||
return svx(request, survex_cave)
|
||||
|
||||
for unoff in [sc, sc.replace("-", "_"), sc.replace("_", "-"), sc.replace("-", ""), sc.replace("_", "")]:
|
||||
try:
|
||||
cave = Cave.objects.get(unofficial_number=unoff) # return on first one we find
|
||||
return render(request, "svxcavesingle.html", {"settings": settings, "cave": cave})
|
||||
except ObjectDoesNotExist:
|
||||
continue # next attempt in for loop
|
||||
return render(request, "errors/svxcavesingle404.html", {"settings": settings, "cave": sc})
|
||||
|
||||
except MultipleObjectsReturned:
|
||||
caves = Cave.objects.filter(kataster_number=survex_cave)
|
||||
return render(request, "svxcaveseveral.html", {"settings": settings, "caves": caves})
|
||||
|
||||
except:
|
||||
return render(request, "errors/svxcavesingle404.html", {"settings": settings, "cave": sc})
|
||||
|
||||
|
||||
def check_cave_registered(area, survex_cave):
|
||||
"""Checks whether a cave has been properly registered when it is found in the Loser repo
|
||||
This should really be called by databaseReset not here in a view
|
||||
Currently Caves are only registered if they are listed in :expoweb: settings.CAVEDESCRIPTIONS
|
||||
so we need to add in any more here.
|
||||
|
||||
This function runs but does not seem to be used?!
|
||||
A serious bodge anyway.
|
||||
"""
|
||||
try:
|
||||
cave = Cave.objects.get(kataster_number=survex_cave)
|
||||
return str(cave)
|
||||
|
||||
except MultipleObjectsReturned:
|
||||
caves = Cave.objects.filter(kataster_number=survex_cave)
|
||||
for c in caves:
|
||||
if str(c) == area + "-" + survex_cave:
|
||||
return str(c) # just get the first that matches
|
||||
return None # many returned but none in correct area
|
||||
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
|
||||
try:
|
||||
cave = Cave.objects.get(unofficial_number=survex_cave) # should be unique!
|
||||
if cave.kataster_number:
|
||||
return str(cave)
|
||||
else:
|
||||
return None
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
|
||||
return None
|
||||
@@ -1,484 +0,0 @@
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from django import forms
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.shortcuts import render, redirect
|
||||
|
||||
import settings
|
||||
from troggle.core.models.survex import DrawingFile
|
||||
|
||||
# from databaseReset import reinit_db # don't do this. databaseRest runs code *at import time*
|
||||
|
||||
from .auth import login_required_if_public
|
||||
|
||||
"""File upload 'views'
|
||||
Note that there are other file upload forms in views/wallet_edit.py
|
||||
and that core/forms.py contains Django class-based forms for caves and entrances.
|
||||
"""
|
||||
|
||||
todo = """
|
||||
- Ideally we should validate uploaded file as being a valid file type, not a dubious script or hack
|
||||
Validate image files using a magic recogniser in walletedit()
|
||||
https://pypi.org/project/reportlab/ or
|
||||
https://stackoverflow.com/questions/889333/how-to-check-if-a-file-is-a-valid-image-file
|
||||
|
||||
- Write equivalent GPX upload form system, similar to walletedit() but in expofiles/gpslogs/
|
||||
Need to validate it as being a valid GPX file using an XML parser, not a dubious script or hack
|
||||
|
||||
- Validate Tunnel & Therion files using an XML parser in dwgupload(). Though Julian says
|
||||
tunnel is only mostly correct XML, and it does fail at least one XML parser.
|
||||
|
||||
- parse the uploaded drawing file for links to wallets and scan files as done
|
||||
in parsers/drawings.py
|
||||
|
||||
- Enable folder creation in dwguploads or as a separate form
|
||||
|
||||
- Enable file rename on expofiles, not just for /surveyscans/ (aka wallets)
|
||||
|
||||
- Make file rename utility less ugly.
|
||||
"""
|
||||
|
||||
class FilesForm(forms.Form): # not a model-form, just a form-form
|
||||
uploadfiles = forms.FileField()
|
||||
|
||||
class FilesRenameForm(forms.Form): # not a model-form, just a form-form
|
||||
uploadfiles = forms.FileField()
|
||||
renameto = forms.CharField(strip=True, required=False)
|
||||
|
||||
class TextForm(forms.Form): # not a model-form, just a form-form
|
||||
photographer = forms.CharField(strip=True)
|
||||
|
||||
class ExpofileRenameForm(forms.Form): # not a model-form, just a form-form
|
||||
renameto = forms.CharField(strip=True, required=False)
|
||||
|
||||
@login_required_if_public
|
||||
def expofilerename(request, filepath):
|
||||
"""Rename any single file in /expofiles/ - eventually.
|
||||
Currently this just does files within wallets i.e. in /surveyscans/
|
||||
and it returns control to the original wallet edit page
|
||||
"""
|
||||
if filepath:
|
||||
actualpath = Path(settings.EXPOFILES) / Path(filepath)
|
||||
else:
|
||||
message = f'\n File to rename not specified "{filepath}"'
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
if not actualpath.is_file():
|
||||
message = f'\n File not found when attempting rename "{filepath}"'
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
else:
|
||||
filename = Path(filepath).name
|
||||
folder = actualpath.parent
|
||||
filesize = f"{actualpath.stat().st_size:,}"
|
||||
|
||||
if not actualpath.is_relative_to(Path(settings.SCANS_ROOT)):
|
||||
message = f'\n Can only do rename within wallets (expofiles/surveyscans/) currently, sorry. "{actualpath}" '
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
if request.method == "POST":
|
||||
form = ExpofileRenameForm(request.POST)
|
||||
if not form.is_valid():
|
||||
message = f'Invalid form response for file renaming "{request.POST}"'
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
else:
|
||||
renameto = request.POST["renameto"]
|
||||
|
||||
if (folder / renameto).is_file() or (folder / renameto).is_dir():
|
||||
rename_bad = renameto
|
||||
message = f'\n Cannot rename to an existing file or folder. "{filename}" -> "{(folder / renameto)}"'
|
||||
print(message)
|
||||
return render(
|
||||
request,
|
||||
"renameform.html",
|
||||
{
|
||||
"form": form,
|
||||
"filepath": filepath,
|
||||
"filename": filename,
|
||||
"filesize": filesize,
|
||||
"rename_bad": rename_bad,
|
||||
},
|
||||
)
|
||||
else:
|
||||
actualpath.rename((folder / renameto))
|
||||
message = f'\n RENAMED "{filename}" -> "{(folder / renameto)}"'
|
||||
print(message)
|
||||
walletid = actualpath.relative_to(Path(settings.SCANS_ROOT)).parent.stem.replace("#",":")
|
||||
print(walletid)
|
||||
return redirect(f'/survey_scans/{walletid}/')
|
||||
|
||||
else:
|
||||
form = ExpofileRenameForm()
|
||||
return render(
|
||||
request,
|
||||
"renameform.html",
|
||||
{
|
||||
"form": form,
|
||||
"filepath": filepath,
|
||||
"filename": filename,
|
||||
"filesize": filesize,
|
||||
},
|
||||
)
|
||||
|
||||
@login_required_if_public
|
||||
def photoupload(request, folder=None):
|
||||
"""Upload photo image files into /expofiles/photos/<year>/<photographer>/
|
||||
This does NOT use a Django model linked to a Django form. Just a simple Django form.
|
||||
You will find the Django documentation on forms very confusing, This is simpler.
|
||||
|
||||
|
||||
When uploading from a phone, it is useful to be able to rename the file to something
|
||||
meaningful as this is difficult to do on a phone. Previously we had assumed files would
|
||||
be renamed to something useful before starting the upload.
|
||||
Unfortunately this only works when uploading one file at at time ,
|
||||
inevitable once you think about it.
|
||||
|
||||
Pending generic file renaming capability more generally.
|
||||
"""
|
||||
year = settings.PHOTOS_YEAR
|
||||
filesaved = False
|
||||
actual_saved = []
|
||||
|
||||
context = {"year": year, "placeholder": "AnathemaDevice"}
|
||||
|
||||
yearpath = Path(settings.PHOTOS_ROOT, year)
|
||||
|
||||
if folder == str(year) or folder == str(year) + "/":
|
||||
folder = None
|
||||
|
||||
if folder is None:
|
||||
folder = "" # improve this later
|
||||
dirpath = Path(settings.PHOTOS_ROOT, year)
|
||||
urlfile = f"/expofiles/photos/{year}"
|
||||
urldir = f"/photoupload/{year}"
|
||||
else: # it will contain the year as well as the photographer
|
||||
dirpath = Path(settings.PHOTOS_ROOT, folder)
|
||||
if dirpath.is_dir():
|
||||
urlfile = f"/expofiles/photos/{folder}"
|
||||
urldir = Path("/photoupload") / folder
|
||||
else:
|
||||
folder = "" # improve this later
|
||||
dirpath = Path(settings.PHOTOS_ROOT, year)
|
||||
urlfile = f"/expofiles/photos/{year}"
|
||||
urldir = f"/photoupload/{year}"
|
||||
|
||||
form = FilesRenameForm()
|
||||
formd = TextForm()
|
||||
|
||||
if request.method == "POST":
|
||||
if "photographer" in request.POST:
|
||||
formd = TextForm(request.POST)
|
||||
if formd.is_valid():
|
||||
newphotographer = request.POST["photographer"]
|
||||
try:
|
||||
(yearpath / newphotographer).mkdir(exist_ok=True)
|
||||
except:
|
||||
message = f'\n !! Permissions failure ?! 0 attempting to mkdir "{(yearpath / newphotographer)}"'
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
else:
|
||||
form = FilesRenameForm(request.POST, request.FILES)
|
||||
if form.is_valid():
|
||||
f = request.FILES["uploadfiles"]
|
||||
multiple = request.FILES.getlist("uploadfiles")
|
||||
# NO CHECK that the files being uploaded are image files
|
||||
fs = FileSystemStorage(dirpath)
|
||||
|
||||
renameto = request.POST["renameto"]
|
||||
|
||||
actual_saved = []
|
||||
if multiple:
|
||||
if len(multiple) == 1:
|
||||
if renameto != "":
|
||||
try: # crashes in Django os.chmod call if on WSL, but does save file!
|
||||
saved_filename = fs.save(renameto, content=f)
|
||||
except:
|
||||
print(
|
||||
f'\n !! Permissions failure ?! 1 attempting to save "{f.name}" in "{dirpath}" {renameto=}'
|
||||
)
|
||||
if "saved_filename" in locals():
|
||||
if saved_filename.is_file():
|
||||
actual_saved.append(saved_filename)
|
||||
filesaved = True
|
||||
else: # multiple is the uploaded content
|
||||
try: # crashes in Django os.chmod call if on WSL, but does save file!
|
||||
saved_filename = fs.save(f.name, content=f)
|
||||
except:
|
||||
print(
|
||||
f'\n !! Permissions failure ?! 2 attempting to save "{f.name}" in "{dirpath}" {renameto=}'
|
||||
)
|
||||
if "saved_filename" in locals():
|
||||
if saved_filename.is_file():
|
||||
actual_saved.append(saved_filename)
|
||||
filesaved = True
|
||||
else: # multiple is a list of content
|
||||
for f in multiple:
|
||||
try: # crashes in Django os.chmod call if on WSL, but does save file!
|
||||
saved_filename = fs.save(f.name, content=f)
|
||||
except:
|
||||
print(
|
||||
f'\n !! Permissions failure ?! 3 attempting to save "{f.name}" in "{dirpath}" {renameto=}'
|
||||
)
|
||||
if "saved_filename" in locals():
|
||||
if saved_filename.is_file():
|
||||
actual_saved.append(saved_filename)
|
||||
filesaved = True
|
||||
files = []
|
||||
dirs = []
|
||||
try:
|
||||
for f in dirpath.iterdir():
|
||||
if f.is_dir():
|
||||
dirs.append(f.name)
|
||||
if f.is_file():
|
||||
files.append(f.name)
|
||||
except FileNotFoundError:
|
||||
files.append("(no folder yet - would be created)")
|
||||
if len(files) > 0:
|
||||
files = sorted(files)
|
||||
|
||||
if dirs:
|
||||
dirs = sorted(dirs)
|
||||
|
||||
return render(
|
||||
request,
|
||||
"photouploadform.html",
|
||||
{
|
||||
"form": form,
|
||||
**context,
|
||||
"urlfile": urlfile,
|
||||
"urldir": urldir,
|
||||
"folder": folder,
|
||||
"files": files,
|
||||
"dirs": dirs,
|
||||
"filesaved": filesaved,
|
||||
"actual_saved": actual_saved,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@login_required_if_public
|
||||
def dwgupload(request, folder=None, gitdisable="no"):
|
||||
"""Upload DRAWING files (tunnel or therion) into the upload folder in :drawings
|
||||
AND registers it into the :drawings: git repo.
|
||||
|
||||
This does NOT use a Django model linked to a Django form. Just a simple Django form.
|
||||
You will find the Django documentation on forms very confusing, This is simpler.
|
||||
|
||||
We could validate the uploaded files as being a valid files using an XML parser, not a dubious script or hack,
|
||||
but this won't work on Tunnel files as Tunnel does not produce exactly valid xml
|
||||
|
||||
We use get_or_create instead of simply creating a new object in case someone uploads the same file
|
||||
several times in one session, and expects them to be overwritten in the database. Although
|
||||
the actual file will be duplicated in the filesystem with different random name ending.
|
||||
"""
|
||||
|
||||
def dwgvalid(name):
|
||||
if name in [
|
||||
".gitignore",
|
||||
]:
|
||||
return False
|
||||
if Path(name).suffix.lower() in [".xml", ".th", ".th2", "", ".svg", ".txt"]:
|
||||
return True # dangerous, we should check the actual file binary signature
|
||||
return False
|
||||
|
||||
def dwgvaliddisp(name):
|
||||
"""OK to display, even if we are not going to allow a new one to be uploaded"""
|
||||
if name in [
|
||||
".gitignore",
|
||||
]:
|
||||
return False
|
||||
if Path(name).suffix.lower() in [
|
||||
".xml",
|
||||
".th",
|
||||
".th2",
|
||||
"",
|
||||
".svg",
|
||||
".txt",
|
||||
".jpg",
|
||||
".jpeg",
|
||||
".png",
|
||||
".pdf",
|
||||
".top",
|
||||
".topo",
|
||||
]:
|
||||
return True # dangerous, we should check the actual file binary signature
|
||||
return False
|
||||
|
||||
filesaved = False
|
||||
actual_saved = []
|
||||
refused = []
|
||||
doesnotexist = ""
|
||||
# print(f'! - FORM dwgupload - start "{folder}" - gitdisable "{gitdisable}"')
|
||||
if folder is None:
|
||||
folder = "" # improve this later
|
||||
dirpath = Path(settings.DRAWINGS_DATA)
|
||||
urlfile = "/dwgdataraw"
|
||||
urldir = "/dwgupload"
|
||||
else:
|
||||
dirpath = Path(settings.DRAWINGS_DATA, folder)
|
||||
urlfile = Path("/dwgdataraw/") / folder
|
||||
urldir = Path("/dwgupload/") / folder
|
||||
|
||||
form = FilesForm()
|
||||
|
||||
if request.method == "POST":
|
||||
form = FilesForm(request.POST, request.FILES)
|
||||
if form.is_valid():
|
||||
# print(f'! - FORM dwgupload - POST valid: "{request.FILES["uploadfiles"]}" ')
|
||||
f = request.FILES["uploadfiles"]
|
||||
multiple = request.FILES.getlist("uploadfiles")
|
||||
savepath = Path(settings.DRAWINGS_DATA, folder)
|
||||
fs = FileSystemStorage(savepath)
|
||||
|
||||
actual_saved = []
|
||||
refused = []
|
||||
|
||||
# GIT see also core/views/expo.py editexpopage()
|
||||
# GIT see also core/models/cave.py writetrogglefile()
|
||||
if gitdisable != "yes": # set in url 'dwguploadnogit/'
|
||||
git = settings.GIT
|
||||
else:
|
||||
git = "echo"
|
||||
# print(f'git DISABLED {f.name}')
|
||||
|
||||
if multiple:
|
||||
for f in multiple:
|
||||
# print(f'! - FORM dwgupload - file {f} in {multiple=}')
|
||||
if dwgvalid(f.name):
|
||||
try: # crashes in Django os.chmod call if on WSL without metadata drvfs, but does save file!
|
||||
saved_filename = fs.save(f.name, content=f)
|
||||
except:
|
||||
print(
|
||||
f'! - FORM dwgupload - \n!! Permissions failure ?! on attempting to save file "{f.name}" in "{savepath}". Attempting to continue..'
|
||||
)
|
||||
if "saved_filename" in locals():
|
||||
if Path(dirpath, saved_filename).is_file():
|
||||
actual_saved.append(saved_filename)
|
||||
if gitdisable != "yes":
|
||||
dr_add = subprocess.run(
|
||||
[git, "add", saved_filename], cwd=dirpath, capture_output=True, text=True
|
||||
)
|
||||
msgdata = (
|
||||
dr_add.stderr
|
||||
+ "\n"
|
||||
+ dr_add.stdout
|
||||
+ "\nreturn code: "
|
||||
+ str(dr_add.returncode)
|
||||
)
|
||||
# message = f'! - FORM dwgupload - Success: git ADD on server for this file {saved_filename}.' + msgdata
|
||||
# print(message)
|
||||
if dr_add.returncode != 0:
|
||||
msgdata = (
|
||||
"Ask a nerd to fix this.\n\n"
|
||||
+ dr_add.stderr
|
||||
+ "\n\n"
|
||||
+ dr_add.stdout
|
||||
+ "\n\nreturn code: "
|
||||
+ str(dr_add.returncode)
|
||||
)
|
||||
message = (
|
||||
f"! - FORM dwgupload - CANNOT git ADD on server for this file {saved_filename}. Edits saved but not added to git.\n"
|
||||
+ msgdata
|
||||
)
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
dwgfile, created = DrawingFile.objects.get_or_create(
|
||||
dwgpath=saved_filename, dwgname=Path(f.name).stem, filesize=f.size
|
||||
)
|
||||
dwgfile.save()
|
||||
else:
|
||||
message = f"! - FORM dwgupload - NOT A FILE {Path(dirpath, saved_filename)=}. "
|
||||
print(message)
|
||||
else:
|
||||
message = f"! - FORM dwgupload - Save failure for {f.name}. Changes NOT saved."
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
if saved_filename != f.name:
|
||||
# message = f'! - FORM dwgupload - Save RENAME {f.name} renamed as {saved_filename}. This is OK.'
|
||||
# print(message)
|
||||
pass
|
||||
|
||||
else:
|
||||
refused.append(f.name)
|
||||
# print(f'REFUSED {f.name}')
|
||||
|
||||
if actual_saved:
|
||||
filesaved = True
|
||||
if len(actual_saved) > 1:
|
||||
dots = "..."
|
||||
else:
|
||||
dots = ""
|
||||
if gitdisable != "yes":
|
||||
dr_commit = subprocess.run(
|
||||
[git, "commit", "-m", f"Drawings upload - {actual_saved[0]}{dots}"],
|
||||
cwd=dirpath,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
# message = f'! - FORM dwgupload - For uploading {actual_saved[0]}{dots}. Edits saved, added to git, and COMMITTED.\n' + msgdata
|
||||
# print(message)
|
||||
# This produces return code = 1 if it commits OK
|
||||
if dr_commit.returncode != 0:
|
||||
msgdata = (
|
||||
"Ask a nerd to fix this.\n\n"
|
||||
+ dr_commit.stderr
|
||||
+ "\n"
|
||||
+ dr_commit.stdout
|
||||
+ "\nreturn code: "
|
||||
+ str(dr_commit.returncode)
|
||||
)
|
||||
message = (
|
||||
f"! - FORM dwgupload -Error code with git on server for this {actual_saved[0]}{dots}. Edits saved, added to git, but NOT committed.\n"
|
||||
+ msgdata
|
||||
)
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
else:
|
||||
print(f' git disabled "{git=}"')
|
||||
else: # maybe all were refused by the suffix test in dwgvalid()
|
||||
message = f"! - FORM dwgupload - Nothing actually saved. All were refused. {actual_saved=}"
|
||||
print(message)
|
||||
|
||||
files = []
|
||||
dirs = []
|
||||
# print(f'! - FORM dwgupload - start {folder=} \n"{dirpath=}" \n"{dirpath.parent=}" \n"{dirpath.exists()=}"')
|
||||
try:
|
||||
for f in dirpath.iterdir():
|
||||
if f.is_dir():
|
||||
if f.name not in [".git"]:
|
||||
dirs.append(f.name)
|
||||
continue
|
||||
if f.is_file():
|
||||
if dwgvaliddisp(f.name):
|
||||
files.append(f.name)
|
||||
continue
|
||||
except FileNotFoundError:
|
||||
doesnotexist = True
|
||||
if files:
|
||||
files = sorted(files)
|
||||
|
||||
if dirs:
|
||||
dirs = sorted(dirs)
|
||||
|
||||
return render(
|
||||
request,
|
||||
"dwguploadform.html",
|
||||
{
|
||||
"form": form,
|
||||
"doesnotexist": doesnotexist,
|
||||
"urlfile": urlfile,
|
||||
"urldir": urldir,
|
||||
"folder": folder,
|
||||
"files": files,
|
||||
"dirs": dirs,
|
||||
"filesaved": filesaved,
|
||||
"actual_saved": actual_saved,
|
||||
"refused": refused,
|
||||
},
|
||||
)
|
||||
@@ -1,915 +0,0 @@
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import subprocess
|
||||
import urllib
|
||||
from pathlib import Path
|
||||
|
||||
from django import forms
|
||||
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.shortcuts import render
|
||||
|
||||
import settings
|
||||
from troggle.core.models.caves import Cave
|
||||
from troggle.core.models.logbooks import LogbookEntry # , PersonLogEntry
|
||||
from troggle.core.models.survex import SurvexBlock, SurvexFile, SurvexPersonRole
|
||||
from troggle.core.models.troggle import DataIssue, Expedition
|
||||
from troggle.core.models.wallets import Wallet, YEAR_RANGE, make_valid_date
|
||||
|
||||
from troggle.core.views.auth import login_required_if_public
|
||||
from troggle.core.views.caves import getCave
|
||||
from troggle.core.views.scans import caveifywallet, oldwallet
|
||||
from troggle.core.views.uploads import FilesForm
|
||||
|
||||
from troggle.parsers.scans import contentsjson
|
||||
|
||||
|
||||
"""Main wallet editing form, which includes scan file upload into the wallet
|
||||
"""
|
||||
|
||||
todo = """
|
||||
- Nasty bug in navigating to 'previous wallet' when we have a 2-year gap in expos
|
||||
The xxxx#00 wallet is not getting edited correctly. Something is off by one somewhere..
|
||||
|
||||
- Register uploaded filenames in the Django db without needing to wait for a reset & bulk file import
|
||||
|
||||
- Refactor walletedit() as it contains all the wallets 'complaints' code from the pre-2022
|
||||
script 'wallets.py'
|
||||
|
||||
- We should validate uploaded file as being a valid image file, not a dubious script or hack?
|
||||
"""
|
||||
WALLET_BLANK_JSON = {
|
||||
"cave": "",
|
||||
"date": "",
|
||||
"free text": "",
|
||||
# "description url": "1623/NNN",
|
||||
"description written": False,
|
||||
"electronic survey": False,
|
||||
"elev drawn": False,
|
||||
"elev not required": False,
|
||||
"name": "",
|
||||
"people": ["Unknown"],
|
||||
"plan drawn": False,
|
||||
"plan not required": False,
|
||||
"notes not required": False,
|
||||
"qms written": False,
|
||||
"survex file": [],
|
||||
"survex not required": False,
|
||||
"website updated": False,
|
||||
}
|
||||
|
||||
class WalletGotoForm(forms.Form): # not a model-form, just a form-form
|
||||
walletgoto = forms.CharField(strip=True, required=False)
|
||||
|
||||
|
||||
class WalletForm(forms.Form): # not a model-form, just a form-form
|
||||
descriptionw = forms.CharField(strip=True, required=False)
|
||||
people = forms.CharField(strip=True, required=False)
|
||||
survexnr = forms.CharField(strip=True, required=False)
|
||||
qmsw = forms.CharField(strip=True, required=False)
|
||||
date = forms.CharField(strip=True, required=True) # the only required field
|
||||
websiteupt = forms.CharField(strip=True, required=False)
|
||||
elevnr = forms.CharField(strip=True, required=False)
|
||||
cave = forms.CharField(strip=True, required=False)
|
||||
psg = forms.CharField(strip=True, required=False)
|
||||
freetext = forms.CharField(strip=True, required=False)
|
||||
plannr = forms.CharField(strip=True, required=False)
|
||||
notesnr = forms.CharField(strip=True, required=False)
|
||||
electronic = forms.CharField(strip=True, required=False)
|
||||
pland = forms.CharField(strip=True, required=False)
|
||||
elevd = forms.CharField(strip=True, required=False)
|
||||
# url = forms.CharField(strip=True, required=False)
|
||||
survex = forms.CharField(strip=True, required=False)
|
||||
|
||||
|
||||
xlate = {
|
||||
# "url": "description url",
|
||||
"descriptionw": "description written",
|
||||
"people": "people",
|
||||
"date": "date",
|
||||
"cave": "cave",
|
||||
"plannr": "plan not required",
|
||||
"notesnr": "notes not required",
|
||||
"survexnr": "survex not required",
|
||||
"qmsw": "qms written",
|
||||
"elevnr": "elev not required",
|
||||
"websiteupt": "website updated",
|
||||
"electronic": "electronic survey",
|
||||
"pland": "plan drawn",
|
||||
"elevd": "elev drawn",
|
||||
"psg": "name", # a name for this wallet
|
||||
"freetext": "free text",
|
||||
"survex": "survex file",
|
||||
}
|
||||
|
||||
|
||||
def get_complaints(complaints, waldata, svxfiles, files, wallet, wurl):
|
||||
"""Taken from old script wallets.py and edited to make more comprehensible
|
||||
Loads the survex files names and processes all complaints
|
||||
|
||||
"""
|
||||
# If skipping through the wallets on the upload form, the wallet may not yet exist
|
||||
try:
|
||||
w = Wallet.objects.get(walletname=wallet)
|
||||
except ObjectDoesNotExist:
|
||||
return None, None
|
||||
|
||||
# Date
|
||||
if not waldata["date"]:
|
||||
complaints.append(
|
||||
"A date is mandatory. No data can be updated or edited unless you specify a date. Look in the survex file if there is one."
|
||||
)
|
||||
|
||||
# People
|
||||
if (
|
||||
not waldata["people"]
|
||||
or waldata["people"] == ["NOBODY"]
|
||||
or waldata["people"] == ["Unknown"]
|
||||
or waldata["people"] == [""]
|
||||
):
|
||||
complaints.append(
|
||||
"Somebody must have done this. Look in the survex file, or in the logbook entries for this date, for the people who created this data."
|
||||
)
|
||||
|
||||
# survex, but get_ticks has already done much of this ??
|
||||
survex_complaint = ""
|
||||
|
||||
if waldata["survex file"]:
|
||||
if not type(waldata["survex file"]) == list: # a string also is a sequence type, so do it this way
|
||||
waldata["survex file"] = [waldata["survex file"]]
|
||||
for sx in waldata["survex file"]:
|
||||
# this logic appears in several places, inc get_ticks(). Refactor.
|
||||
if sx != "":
|
||||
if Path(sx).suffix.lower() != ".svx":
|
||||
sx = sx + ".svx"
|
||||
svxfiles.append(sx)
|
||||
if not (Path(settings.SURVEX_DATA) / sx).is_file():
|
||||
file_complaint = f"{wallet} Incorrect survex file name. File {sx} was not found in LOSER repo"
|
||||
complaints.append(file_complaint)
|
||||
message = f"! {file_complaint}"
|
||||
print(message)
|
||||
DataIssue.objects.update_or_create(
|
||||
parser="scans", message=message, url=wurl
|
||||
) # set URL to this wallet folder
|
||||
else:
|
||||
try:
|
||||
sxpath = str(Path(sx).with_suffix(""))
|
||||
SurvexFile.objects.get(path=sxpath)
|
||||
except MultipleObjectsReturned:
|
||||
# can happen if connecting a wallet to a survex file.. i think..
|
||||
QSsvxfiles = SurvexFile.objects.filter(path=sxpath)
|
||||
for s in QSsvxfiles:
|
||||
print(s.path, s.cave, s.survexdirectory)
|
||||
# QSsvxfiles[0] # dont' know how this happened, fix later..
|
||||
except:
|
||||
file_complaint = (
|
||||
f"{wallet} Survex file {sx} exists, but is not registered in the database {sxpath}. How?.."
|
||||
)
|
||||
complaints.append(file_complaint)
|
||||
message = f"! {file_complaint}"
|
||||
print(message)
|
||||
DataIssue.objects.update_or_create(
|
||||
parser="scans", message=message, url=wurl
|
||||
) # set URL to this wallet folder
|
||||
|
||||
if waldata["survex not required"] and waldata["survex file"] != [""]:
|
||||
survex_complaint = (
|
||||
f'Survex is stated as not required and yet there is a survex file! ({waldata["survex file"]})'
|
||||
)
|
||||
if not waldata["survex not required"] and waldata["survex file"] == [""]:
|
||||
survex_complaint = "A survex file is required, but has not been specified!"
|
||||
if survex_complaint:
|
||||
complaints.append(survex_complaint)
|
||||
|
||||
ticks = w.get_ticks()
|
||||
|
||||
# Notes required
|
||||
if ticks["N"] != "green":
|
||||
complaints.append(
|
||||
"The notes needs scanning (or renaming) or tick 'Notes not required' checkbox. No noteNN.jpg or XXnote.jpg file was found. Needed even for an electronic survey."
|
||||
)
|
||||
|
||||
# Plan drawing required
|
||||
if ticks["P"] != "green":
|
||||
complaints.append(
|
||||
"The plan needs drawing (or renaming, or tick 'Plan drawn' checkbox or 'Plan not required' checkbox): no planNN.jpg or XXplan.jpg file found."
|
||||
)
|
||||
|
||||
# Elev drawing required
|
||||
if ticks["E"] != "green":
|
||||
complaints.append(
|
||||
"The elevation needs drawing (or renaming, or tick 'Elev drawn' checkbox or 'Elev not required' checkbox): no elevNN.jpg or XXelev.jpg file found."
|
||||
)
|
||||
|
||||
# Therion
|
||||
if ticks["T"] != "green":
|
||||
complaints.append(
|
||||
"Tunnel or Therion drawing files need drawing, or tick 'Plan/Elev drawn' checkbox or 'Plan/Elev not required' checkboxes"
|
||||
)
|
||||
|
||||
# Description
|
||||
if not waldata["description written"]:
|
||||
complaints.append(
|
||||
"The guidebook description needs writing into the survex file. Tick the 'Cave description written' checkbox when this is done."
|
||||
)
|
||||
# QMs
|
||||
if not waldata["qms written"] and w.year() and int(w.year()) >= 2015:
|
||||
complaints.append(
|
||||
"The QMs needs writing into the survex file. Tick the 'QMs written' checkbox when this is done."
|
||||
)
|
||||
|
||||
# Website
|
||||
if not waldata["website updated"]:
|
||||
complaints.append(
|
||||
"The cave description website is marked as needing updating using the guidebook description from the survex file. Tick the 'Website updated' checkbox when this is done."
|
||||
)
|
||||
|
||||
# Find the cave, if it exists
|
||||
if waldata["cave"]:
|
||||
try:
|
||||
caveid = waldata["cave"]
|
||||
if type(caveid) is list:
|
||||
for i in caveid:
|
||||
i = i.replace("/", "-")
|
||||
caveobject = getCave(i) # only the last one gets recorded.. ouch.
|
||||
else:
|
||||
caveid = caveid # ?urk? why?
|
||||
try:
|
||||
caveobject = getCave(caveid) # may fail if garbage value ,e.g. space, in wallet data
|
||||
except:
|
||||
caveobject = None
|
||||
print(f'getCave for id "{waldata["cave"]}" {caveobject}')
|
||||
# if not caveobject.url == waldata["description url"]:
|
||||
# complaints.append(f'The URL of cave description \"{waldata["description url"]}\" does not match the one on record for this cave which is: "{caveobject.url}". If the wallet is not for a cave, put a useful URL here.')
|
||||
except Cave.MultipleObjectsReturned:
|
||||
complaints.append(f'The cave ID \'{waldata["cave"]}\' is AMBIGUOUS. Please fix it.')
|
||||
caveobject = None
|
||||
except ObjectDoesNotExist:
|
||||
complaints.append(f'The cave ID \'{waldata["cave"]}\' is not recognised. Please fix it.')
|
||||
caveobject = None
|
||||
else:
|
||||
complaints.append(
|
||||
'No cave ID is given. If there is no survex file, please give something, even if it is just "1623-000", "surface survey" or "scraps found in hut"'
|
||||
)
|
||||
caveobject = None
|
||||
|
||||
return complaints, caveobject
|
||||
|
||||
|
||||
@login_required_if_public
|
||||
def walletedit(request, path=None):
|
||||
"""Create a new wallet or upload scanned image files into a wallet on /expofiles
|
||||
Also display AND EDIT the contents.json data in the wallet.
|
||||
|
||||
This is the main wallet display and edit page.
|
||||
|
||||
The Wallet object and the contents.json file are created when the user
|
||||
creates the wallet AND THEN SAVES IT WITH A DATE.
|
||||
|
||||
This does NOT use a Django model linked to a Django form. Just a simple Django form.
|
||||
You will find the Django documentation on forms very confusing,
|
||||
as it covers many very different things we do not need. This is simpler.
|
||||
(See also view/uploads.py for other simpler forms, as opposed to core/forms.py
|
||||
which contains a couple of Django class-based forms.)
|
||||
|
||||
This subsumes much of the code which was in the pre-2022 non-troggle wallets.py script
|
||||
and so this function is very long indeed and needs refactoring.
|
||||
|
||||
Much of the logic used here lives in the Class functions for Wallet.
|
||||
|
||||
REWRITE bits using the ticklist, dateify, caveify, populate etc utility functions in core.view.scans.py
|
||||
"""
|
||||
git = settings.GIT
|
||||
filesaved = False
|
||||
actual_saved = []
|
||||
|
||||
def get_next_empty():
|
||||
"""Gets the next most number for a new wallet just after the most recent one in the
|
||||
db. But if it has no date set, then ignore it as it was only just created"""
|
||||
latest = Wallet.objects.filter(walletname__startswith="20",walletdate__isnull=False).latest('walletname')
|
||||
next = int(latest.walletname[5:]) + 1
|
||||
return f"{latest.walletname[:4]}:{next:02d}"
|
||||
|
||||
def preprocess_path(path):
|
||||
if path:
|
||||
wpath = urllib.parse.unquote(path)
|
||||
else:
|
||||
return (None, get_next_empty() )
|
||||
|
||||
try:
|
||||
year = wpath[:4] # if path too short, exception catches it
|
||||
sepr = wpath[4]
|
||||
y = int(year) # exception catches non-integer [:4]
|
||||
wnumber = int(wpath[5:]) # exception catches nonumeric wallet number
|
||||
if sepr != "#" and sepr != ":":
|
||||
return (oldwallet(request, path), None)
|
||||
except:
|
||||
# if nonumeric wpath name for example
|
||||
return (oldwallet(request, path), None)
|
||||
|
||||
if not re.match("(19|20)\d\d[:#]\d\d\d?", wpath):
|
||||
return (None, get_next_empty() )
|
||||
|
||||
ymin, ymax = YEAR_RANGE
|
||||
if int(year) < ymin:
|
||||
year = str(ymin+2)
|
||||
if int(year) > ymax:
|
||||
return (None, get_next_empty() )
|
||||
|
||||
wallet = f"{year}:{wnumber:02d}"
|
||||
return (None, wallet)
|
||||
|
||||
def identify_most_recent_wallet(wallet, currentyear):
|
||||
""" Need to find the last wallet of the previous year
|
||||
Has to cope with years when there are no wallets
|
||||
Has to cope with weirdly named imported wallets from 1999 & earlier
|
||||
Has to cope if the 'current' wallet is one that happens to 'not exist' too.
|
||||
|
||||
Frankly this has just become too bizarre and we should devise a quite different
|
||||
navigation system.
|
||||
"""
|
||||
current_name = wallet.replace(":","#")
|
||||
|
||||
try:
|
||||
allwallets = Wallet.objects.all().order_by('walletname')
|
||||
recent_wallet = allwallets.first()
|
||||
for w in allwallets:
|
||||
if len(w.walletname) < 5:
|
||||
continue
|
||||
if w.walletname[4] != "#":
|
||||
continue
|
||||
|
||||
if w.walletname == current_name:
|
||||
break
|
||||
if int(w.walletyear.year) >= int(currentyear):
|
||||
break
|
||||
recent_wallet = w
|
||||
recent_name = recent_wallet.walletname
|
||||
except:
|
||||
raise
|
||||
|
||||
recent_year = recent_name[:4]
|
||||
recent_number = recent_name[5:]
|
||||
|
||||
# print(f"---identify_most_recent_wallet: {recent_year=} {recent_number=}")
|
||||
return recent_year, recent_number
|
||||
|
||||
def create_nav_links(wallet):
|
||||
"""Find the previous wallet and next wallet and create navigation shortcuts"""
|
||||
y = wallet[:4]
|
||||
n = wallet[5:]
|
||||
|
||||
if int(n) == 0:
|
||||
recent_year, recent_number = identify_most_recent_wallet(wallet, y)
|
||||
prevy = recent_year # same as most recent wallet
|
||||
recent_number = f"{int(recent_number):02d}"
|
||||
else:
|
||||
prevy = f"{int(y)-1}" # the previous year
|
||||
recent_year = y # current year
|
||||
recent_number = f"{int(n)-1:02d}" # previous number
|
||||
|
||||
nexty = f"{int(y)+1}"
|
||||
next = f"{int(n)+1:02d}"
|
||||
return prevy, recent_year, recent_number, y, next, nexty
|
||||
|
||||
def read_json(contents_path):
|
||||
"""Read JSON from the wallet metadata file in the repo
|
||||
or fills with blank data if that files can't be read
|
||||
|
||||
Should sanitise to ensure no spurious backslashes e.g. in windows style paths"""
|
||||
waldata = {}
|
||||
if contents_path.is_file():
|
||||
with open(contents_path) as json_file:
|
||||
try:
|
||||
waldata = json.load(json_file)
|
||||
except:
|
||||
message = f"! {wallet} Failed to load {contents_path} JSON file"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="scans", message=message, url=wurl) # set URL to this wallet folder
|
||||
raise
|
||||
else: # no JSON file exists
|
||||
print("--- No JSON exists, so using default copy")
|
||||
waldata = WALLET_BLANK_JSON.copy()
|
||||
if not waldata["survex file"]:
|
||||
try:
|
||||
w = Wallet.objects.get(walletname=wallet)
|
||||
b = SurvexBlock.objects.filter(scanswallet=w)
|
||||
waldata["survex file"] = []
|
||||
for bsf in b:
|
||||
waldata["survex file"].append(bsf.survexfile.path)
|
||||
except:
|
||||
print(f"--- No wallet {wallet} exists in database")
|
||||
return waldata
|
||||
|
||||
def save_json(jsondict):
|
||||
# print(f'--- Wallet directory in :drawings: repo {newfolder=} {jsondict}')
|
||||
if not os.path.exists(contents_path.parent):
|
||||
print(f"--- No wallet directory in :drawings: repo, so creating it {contents_path.parent}")
|
||||
os.makedirs(contents_path.parent)
|
||||
|
||||
with open(contents_path, "w") as jfile:
|
||||
json.dump(jsondict, jfile, indent=1)
|
||||
# print(f'--- FINISHED saving to JSON at {contents_path}')
|
||||
|
||||
def make_wallet(walletname):
|
||||
"""We need a wallet Object so that the django template stuff can find the files
|
||||
BUT we must restrict this to logged-in users otherwise spiderbots get at
|
||||
the hidden Submit button and create zillions of the buggers"""
|
||||
try:
|
||||
w, created = Wallet.objects.get_or_create(walletname=walletname)
|
||||
# print(f"--- Wallet string {walletname}, wallet object {w} created new?: {created}")
|
||||
if created:
|
||||
w.fpath = Path(settings.SCANS_ROOT, walletname[0:4], walletname)
|
||||
_ = w.year() # sets the walletyear property as a side-effect
|
||||
w.save()
|
||||
except:
|
||||
print(f"!-- Wallet string {walletname}, FAIL TO GET or create WALLET OBJECT")
|
||||
raise
|
||||
return w
|
||||
|
||||
def commit_json(waldata):
|
||||
destfolder = contents_path.parent
|
||||
dr_add = subprocess.run([git, "add", contentsjson], cwd=destfolder, capture_output=True, text=True)
|
||||
if dr_add.returncode != 0:
|
||||
msgdata = (
|
||||
"Ask a nerd to fix this.\n--"
|
||||
+ dr_add.stderr
|
||||
+ "\n--"
|
||||
+ dr_add.stdout
|
||||
+ "\n--return code: "
|
||||
+ str(dr_add.returncode)
|
||||
)
|
||||
message = (
|
||||
f"CANNOT git on server for this file {contentsjson}. Edits saved but not added to git.\n\n" + msgdata
|
||||
)
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
else:
|
||||
|
||||
if socket.gethostname() != "expo":
|
||||
comment = f"on dev machine '{socket.gethostname()}' "
|
||||
else:
|
||||
comment = ""
|
||||
if "cave" in waldata:
|
||||
label = waldata["cave"]
|
||||
else:
|
||||
if "name" in waldata:
|
||||
label = waldata["name"]
|
||||
else:
|
||||
label = ""
|
||||
|
||||
dr_commit = subprocess.run(
|
||||
[git, "commit", "-m", f"JSON update wallet {wallet} {label} {comment}"],
|
||||
cwd=destfolder,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
# This produces return code = 1 if it commits OK
|
||||
if dr_commit.returncode != 0:
|
||||
msgdata = (
|
||||
"Ask a nerd to fix this.\n\nstderr: "
|
||||
+ dr_commit.stderr
|
||||
+ "\n\nstdout: "
|
||||
+ dr_commit.stdout
|
||||
+ "\n\nreturn code: "
|
||||
+ str(dr_commit.returncode)
|
||||
)
|
||||
message = (
|
||||
f"Error code with git on server for this {contentsjson}. File is added to git, but NOT committed.\n"
|
||||
+ msgdata
|
||||
)
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
def get_logbook_trips():
|
||||
return None
|
||||
|
||||
def no_people(team):
|
||||
return (team == ["Unknown"]
|
||||
or team == [""]
|
||||
or team == "")
|
||||
|
||||
def empty_string(thing):
|
||||
return (thing == [""]
|
||||
or thing == []
|
||||
or thing == ""
|
||||
or thing == "[]"
|
||||
or thing is None)
|
||||
|
||||
|
||||
|
||||
def scan_survexblocks(svxfile):
|
||||
"""Scans for *team people attached to all the survex blocks in this svxfile
|
||||
This could be rather a lot for some compedious survex files! So would need
|
||||
culling manually and only those relevant put in the JSON file"""
|
||||
wallet_refs = []
|
||||
dates = []
|
||||
blocknames = []
|
||||
team = []
|
||||
try:
|
||||
blocks = SurvexBlock.objects.filter(survexfile=svxfile)
|
||||
for b in blocks:
|
||||
# print(f" - - - - {b=} {b.scanswallet.walletname} {b.date=} ")
|
||||
if b.scanswallet:
|
||||
wallet_refs.append(b.scanswallet) # other wallets
|
||||
#if b.scanswallet.walletname == wallet: # only if we assume *ref all correct!
|
||||
if b.date:
|
||||
dates.append(b.date)
|
||||
if b.name != b.title:
|
||||
blocknames.append(str(b.name) + "|" + str(b.title))
|
||||
else:
|
||||
blocknames.append(str(b.name))
|
||||
QSpeople = SurvexPersonRole.objects.filter(survexblock=b)
|
||||
# print(f" - - {QSpeople=}")
|
||||
for p in QSpeople:
|
||||
# print(f" - - {p.personname} ")
|
||||
team.append(p.personname)
|
||||
except:
|
||||
message = " - No associated survex blocks found for this wallet"
|
||||
print(message)
|
||||
# print(f" - - - ", wallet_refs, dates, blocknames, team)
|
||||
return wallet_refs, dates, blocknames, team
|
||||
|
||||
def scan_survexfiles(survex_paths):
|
||||
"""Read data from the list of survex file names attached to the wallet JSON file
|
||||
|
||||
NEED TO ALSO CHECK survex files which have a *ref to this wallet !
|
||||
"""
|
||||
cave_refs = []
|
||||
wallet_refs = []
|
||||
caves = []
|
||||
dates = []
|
||||
names = []
|
||||
team = []
|
||||
|
||||
if not type(survex_paths) == list: # a string also is a sequence type, so do it this way
|
||||
survex_paths = [survex_paths]
|
||||
|
||||
for svxf in survex_paths:
|
||||
if not svxf: # not a blank string
|
||||
continue
|
||||
|
||||
# print(f" - - {svxf=} ")
|
||||
svx = Path(svxf)
|
||||
if svx.suffix.lower() != ".svx":
|
||||
svx = svx.with_suffix(".svx")
|
||||
f = Path(settings.SURVEX_DATA) / svx
|
||||
if not f.is_file():
|
||||
continue
|
||||
|
||||
fpath = svx.parent / svx.stem
|
||||
# print(f' - {fpath=}')
|
||||
try:
|
||||
svxfile = SurvexFile.objects.get(path=fpath)
|
||||
|
||||
if svxfile.cave:
|
||||
caves.append(svxfile.cave)
|
||||
cave_refs.append(svxfile.cave.reference()) # this is a string?!
|
||||
|
||||
|
||||
w, d, n, t = scan_survexblocks(svxfile)
|
||||
wallet_refs.extend(w)
|
||||
dates.extend(d)
|
||||
names.extend(n)
|
||||
team.extend(t)
|
||||
except:
|
||||
message = "Specified survex file not found - database may be empty."
|
||||
print(message)
|
||||
# This failure will also get picked up by the "S" colour code red or orange
|
||||
|
||||
caves = list(set(caves))
|
||||
if len(caves) == 1:
|
||||
caves = caves[0]
|
||||
|
||||
elif len(caves) > 1:
|
||||
print(
|
||||
f" - More than one Cave {caves} in this wallet {wallet}. Not managed in this troggle release."
|
||||
)
|
||||
|
||||
if len(names) == 1:
|
||||
names = names[0]
|
||||
elif len(names) > 1:
|
||||
names = f"several, please edit: {names}"
|
||||
print(
|
||||
f" - More than one block name is relevant {names} in this wallet {wallet}. Not managed in this troggle release."
|
||||
)
|
||||
|
||||
cave_refs = list(set(cave_refs))
|
||||
firstdate = None
|
||||
if dates:
|
||||
firstdate = min(dates).isoformat()
|
||||
return firstdate, list(set(team)), caves, cave_refs, wallet_refs, names
|
||||
|
||||
checkboxes = [
|
||||
"description written",
|
||||
"survex not required",
|
||||
"qms written",
|
||||
"website updated",
|
||||
"plan not required",
|
||||
"plan drawn",
|
||||
"elev not required",
|
||||
"elev drawn",
|
||||
"notes not required",
|
||||
"electronic survey",
|
||||
]
|
||||
|
||||
redirect, wallet = preprocess_path(path)
|
||||
if redirect:
|
||||
return redirect
|
||||
prevy, recent_year, recent_number, year, next, nexty = create_nav_links(wallet)
|
||||
|
||||
wurl = f"/walletedit/{wallet}".replace("#", ":")
|
||||
wallet = wallet.replace(":", "#")
|
||||
dirpath = Path(settings.SCANS_ROOT, year, wallet)
|
||||
contents_path = Path(settings.DRAWINGS_DATA, "walletjson") / year / wallet / contentsjson
|
||||
|
||||
fresh_wallet = False
|
||||
|
||||
form = FilesForm()
|
||||
|
||||
if request.method == "POST":
|
||||
# print(f'--- POST processing starts {wallet=} {path=}')
|
||||
if "psg" in request.POST: # handle metadata form
|
||||
formj = WalletForm(request.POST)
|
||||
# Beware. All fields returned as strings. So must re-type them as
|
||||
# lists etc. before using or re-saving
|
||||
|
||||
# Unset checkboxes do not return any value, checked ones return "True".
|
||||
# So all need initialising to False
|
||||
if formj.is_valid():
|
||||
posted = request.POST.copy()
|
||||
posted.pop("csrfmiddlewaretoken") # discard this
|
||||
wd = WALLET_BLANK_JSON.copy()
|
||||
for f in checkboxes:
|
||||
wd[f] = False
|
||||
# print(f'--- wd ${f}$ - {wd[f]}')
|
||||
for f in posted:
|
||||
wd[xlate[f]] = posted[f].replace("'", '"')
|
||||
print(f"'{f}' -{xlate[f]}- {posted[f]}")
|
||||
if posted[f] == "True":
|
||||
wd[xlate[f]] = True
|
||||
|
||||
newdate = make_valid_date(posted["date"])
|
||||
wd["people"] = wd["people"][1:-1].replace('"', "").split(",")
|
||||
for i, elem in enumerate(wd["people"]):
|
||||
wd["people"][i] = elem.strip()
|
||||
if wd["cave"]:
|
||||
if wd["cave"][0] == "[":
|
||||
wd["cave"] = wd["cave"][1:-1].replace('"', "").split(",")
|
||||
for i, elem in enumerate(wd["cave"]):
|
||||
wd["cave"][i] = elem.strip()
|
||||
|
||||
if wd["survex file"]: # allow for no survex file at all
|
||||
if wd["survex file"][0] == "[":
|
||||
wd["survex file"] = wd["survex file"][1:-1]
|
||||
wd["survex file"] = wd["survex file"].replace('"', "").split(",")
|
||||
for i, elem in enumerate(wd["survex file"]):
|
||||
wd["survex file"][i] = elem.strip()
|
||||
|
||||
|
||||
save_json(wd)
|
||||
# walletobject will already exist as creation does not happen here anymore
|
||||
walletobject = make_wallet(wallet)
|
||||
walletobject.walletdate = newdate # must be valid date
|
||||
print(f"---Setting VALID new date to db {walletobject} {walletobject.walletdate}")
|
||||
walletobject.save()
|
||||
print(f"---Setting VALID new date to db {walletobject} {walletobject.walletdate}")
|
||||
commit_json(wd)
|
||||
|
||||
else:
|
||||
print("--- INVALID JSON Update form submitted")
|
||||
print(formj.errors)
|
||||
return render(request, "errors/generic.html", {"message": formj.errors})
|
||||
|
||||
elif (
|
||||
"walletgoto" in request.POST
|
||||
): # not editing wallet data or uploading a file.. going direct to a named wallet
|
||||
formg = WalletGotoForm(request.POST, request.FILES)
|
||||
if formg.is_valid():
|
||||
walletgoto = request.POST["walletgoto"]
|
||||
|
||||
return HttpResponseRedirect(f'/walletedit/{walletgoto.replace("#",":")}')
|
||||
|
||||
else: # Creating a wallet .
|
||||
# NOT editing wallet data, or uploading a file. Should not overwrite metadata at all.
|
||||
if "submitbutton" in request.POST:
|
||||
print(f"--- Submit button value {request.POST['submitbutton']}")
|
||||
if request.POST['submitbutton']=="Create":
|
||||
w = WALLET_BLANK_JSON.copy()
|
||||
save_json(w)
|
||||
walletobject = make_wallet(wallet) # no date set yet
|
||||
commit_json(w)
|
||||
|
||||
form = FilesForm(request.POST, request.FILES)
|
||||
if form.is_valid():
|
||||
# print(f'--- FORM walletedit multiple BUT EMPTY METADATA supposedly {WALLET_BLANK_JSON["date"]=}')
|
||||
multiple = request.FILES.getlist("uploadfiles")
|
||||
fs = FileSystemStorage(os.path.join(dirpath)) # creates wallet folder if necessary
|
||||
|
||||
waldata = read_json(contents_path)
|
||||
actual_saved = []
|
||||
if multiple:
|
||||
for f in multiple:
|
||||
try: # crashes in Django os.chmod call if on WSL, but does save file!
|
||||
saved_filename = fs.save(f.name, content=f)
|
||||
except:
|
||||
print(f"\n !! Permissions failure ?! on attempting to save scanfile {f.name}")
|
||||
if "saved_filename" in locals():
|
||||
if saved_filename.is_file():
|
||||
actual_saved.append(saved_filename)
|
||||
# print(f'! - FORM walletedit multiple {actual_saved}')
|
||||
filesaved = True
|
||||
# print(f'--- FORM walletedit multiple BUT EMPTY METADATA supposedly {WALLET_BLANK_JSON["date"]=}')
|
||||
save_json(waldata)
|
||||
walletobject = make_wallet(wallet)
|
||||
commit_json(waldata)
|
||||
else:
|
||||
print("--- Upload files form invalid, which is correct if just created.")
|
||||
#
|
||||
# Not a POST, so a GET starts here. And also control gets here after a POST is processed.
|
||||
#
|
||||
files = []
|
||||
dirs = []
|
||||
# print(f'! - FORM walletedit - start {wallet} {dirpath}')
|
||||
if dirpath.is_dir():
|
||||
create = False # wallet exists because folder exists, even if nothing in it
|
||||
try:
|
||||
for f in dirpath.iterdir():
|
||||
if f.is_dir():
|
||||
for d in f.iterdir():
|
||||
dirs.append(f"{f.name}/{d.name}")
|
||||
if f.is_file():
|
||||
files.append(f.name)
|
||||
except FileNotFoundError:
|
||||
files.append(
|
||||
"(No wallet yet. It would be created if you upload a scan and then save the form with a date.)"
|
||||
)
|
||||
else:
|
||||
# either on GET or on dropping-through after the POST creating a new wallet object:
|
||||
if Wallet.objects.filter(walletname=wallet).exists():
|
||||
create = False
|
||||
else:
|
||||
create = True
|
||||
|
||||
if len(files) > 0:
|
||||
files = sorted(files)
|
||||
|
||||
if dirs:
|
||||
dirs = sorted(dirs)
|
||||
try:
|
||||
waldata = read_json(contents_path)
|
||||
except:
|
||||
message = f"Nasty failure in parsing wallets metadata in {contents_path}. Probably backslash not forward slash in filename path"
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
jsonfile = Path(settings.DRAWINGS_DATA, "walletjson") / wallet[0:4] / wallet / "contents.json"
|
||||
# print(f'! - FORM walletedit - jsonfile {jsonfile}')
|
||||
if not Path(jsonfile).is_file():
|
||||
metadataurl = ""
|
||||
metadata =""
|
||||
else:
|
||||
metadataurl = Path("/dwgdataraw", "walletjson") / wallet[0:4] / wallet.replace("#", ":") / "contents.json"
|
||||
with open(jsonfile, 'r') as f:
|
||||
metadata = f.read()
|
||||
psg = ""
|
||||
freetext = ""
|
||||
chkplannr = ""
|
||||
chknotesnr = ""
|
||||
chkpland = ""
|
||||
svxfiles = []
|
||||
trips = []
|
||||
checked = {}
|
||||
context = {}
|
||||
if not waldata: # should always be true as populated by blank data if json file doesn't exist
|
||||
message = f" !! No Wallet data initialised or read ! Should not happen."
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
|
||||
refs = []
|
||||
|
||||
if "survex file" in waldata:
|
||||
date, team, caves, caverefs, wallet_refs, names = scan_survexfiles(waldata["survex file"])
|
||||
# Override the discovered values with those in the JSON file:
|
||||
if not waldata["date"]: # either absent or empty string
|
||||
waldata["date"] = date
|
||||
|
||||
if no_people(waldata["people"]):
|
||||
people = team
|
||||
waldata["people"] = team
|
||||
else:
|
||||
people = waldata["people"] # text string
|
||||
|
||||
if empty_string(waldata["cave"]):
|
||||
cave = caverefs # a list, potentially
|
||||
waldata["cave"] = cave
|
||||
else:
|
||||
cave = waldata["cave"] # text string
|
||||
|
||||
if empty_string(waldata["name"]):
|
||||
psg = names
|
||||
waldata["name"] = names
|
||||
else:
|
||||
psg = waldata["name"]
|
||||
|
||||
if "free text" in waldata:
|
||||
freetext = waldata["free text"]
|
||||
|
||||
if 'notes not required' not in waldata: # cope with schema change
|
||||
waldata['notes not required'] = False
|
||||
|
||||
# find trips and survex files of the same date
|
||||
walletobject = make_wallet(wallet)
|
||||
if waldata["date"]:
|
||||
samedate = make_valid_date(waldata["date"])
|
||||
walletobject.walletdate = samedate
|
||||
walletobject.save()
|
||||
|
||||
try:
|
||||
thisexpo = Expedition.objects.get(year=int(year))
|
||||
except: # creating a wallet for an expo that does not exist perhaps
|
||||
message = f"Trying to access an Expo for '{year}' which does not exist (yet)."
|
||||
message += " See /handbook/computing/newyear.html"
|
||||
print(message)
|
||||
return render(request, "errors/generic.html", {"message": message})
|
||||
if samedate:
|
||||
svxothers = SurvexFile.objects.filter(survexblock__date=samedate).distinct()
|
||||
trips = LogbookEntry.objects.filter(date=samedate)
|
||||
wallets = Wallet.objects.filter(walletdate=samedate)
|
||||
else:
|
||||
svxothers = None
|
||||
trips = None
|
||||
wallets = None
|
||||
|
||||
else:
|
||||
svxothers = None
|
||||
trips = None
|
||||
wallets = None
|
||||
|
||||
# Survex and survex complaints, comes from json file on disc, not as pre-populated as above
|
||||
complaints, caveobject = get_complaints([], waldata, svxfiles, files, wallet, wurl)
|
||||
# print(f' - {caveobject=}')
|
||||
|
||||
for f in checkboxes:
|
||||
if waldata[f]:
|
||||
checked[f] = "checked"
|
||||
|
||||
survexsize = str(min(len(str(waldata["survex file"])), 46))
|
||||
|
||||
try:
|
||||
thiswallet = walletobject # Wallet.objects.get(walletname=wallet)
|
||||
caveifywallet(thiswallet)
|
||||
thiswallet.ticks = thiswallet.get_ticks() # the complaints in colour form
|
||||
# fixsurvextick(thiswallet, thiswallet.ticks)
|
||||
# print(f"--- {wallet} {thiswallet} walletdate={thiswallet.walletdate} immediately before form render")
|
||||
except:
|
||||
thiswallet = None
|
||||
context = {
|
||||
"year": year,
|
||||
"recent_year": recent_year,
|
||||
"recent_number": recent_number,
|
||||
"next": next,
|
||||
"prevy": prevy,
|
||||
"nexty": nexty,
|
||||
"files": files,
|
||||
"dirs": dirs,
|
||||
"waldata": waldata,
|
||||
"svxfiles": svxfiles,
|
||||
"survex": waldata["survex file"],
|
||||
"survexsize": survexsize,
|
||||
"checked": checked,
|
||||
"trips": trips,
|
||||
"manywallets": [thiswallet],
|
||||
"wallets": wallets,
|
||||
"svxothers": svxothers,
|
||||
"create": create,
|
||||
"metadataurl": metadataurl,
|
||||
"metadata": metadata,
|
||||
"complaints": complaints,
|
||||
"caveobject": caveobject,
|
||||
"people": people,
|
||||
"peoplesize": str(len(str(people))),
|
||||
"filesaved": filesaved,
|
||||
"actual_saved": actual_saved,
|
||||
}
|
||||
|
||||
return render(
|
||||
request,
|
||||
"walletform.html",
|
||||
{
|
||||
"form": form,
|
||||
"wallet": wallet,
|
||||
**context,
|
||||
"date": waldata["date"],
|
||||
#'url': waldata["description url"], 'urlsize': str(len(str(waldata["description url"]))),
|
||||
"cave": cave,
|
||||
"psg": psg,
|
||||
"freetext": freetext,
|
||||
"psgsize": str(max(12, len(str(psg)))),
|
||||
"freetextsize": str(max(60, len(str(freetext)))),
|
||||
},
|
||||
)
|
||||
|
||||
525
core/views_caves.py
Normal file
525
core/views_caves.py
Normal file
@@ -0,0 +1,525 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from troggle.core.models import CaveSlug, Cave, CaveAndEntrance, Survey, Expedition, QM, CaveDescription, EntranceSlug, Entrance, Area, SurvexStation
|
||||
from troggle.core.forms import CaveForm, CaveAndEntranceFormSet, VersionControlCommentForm, EntranceForm, EntranceLetterForm
|
||||
import troggle.core.models as models
|
||||
import troggle.settings as settings
|
||||
from troggle.helper import login_required_if_public
|
||||
|
||||
from django.forms.models import modelformset_factory
|
||||
from django import forms
|
||||
from django.core.urlresolvers import reverse
|
||||
from utils import render_with_context # see views_logbooks for explanation on this.
|
||||
from django.http import HttpResponse, HttpResponseRedirect
|
||||
from django.conf import settings
|
||||
import re, urlparse
|
||||
from django.shortcuts import get_object_or_404
|
||||
import settings
|
||||
|
||||
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
import string, os, sys, subprocess
|
||||
|
||||
def getCave(cave_id):
|
||||
"""Returns a cave object when given a cave name or number. It is used by views including cavehref, ent, and qm."""
|
||||
try:
|
||||
cave = Cave.objects.get(kataster_number=cave_id)
|
||||
except Cave.DoesNotExist:
|
||||
cave = Cave.objects.get(unofficial_number=cave_id)
|
||||
return cave
|
||||
|
||||
def pad5(x):
|
||||
return "0" * (5 -len(x.group(0))) + x.group(0)
|
||||
def padnumber(x):
|
||||
return re.sub("\d+", pad5, x)
|
||||
def numericalcmp(x, y):
|
||||
return cmp(padnumber(x), padnumber(y))
|
||||
|
||||
|
||||
|
||||
def caveCmp(x, y):
|
||||
if x.kataster_number:
|
||||
if y.kataster_number:
|
||||
return numericalcmp(x.kataster_number, y.kataster_number) # Note that cave kataster numbers are not generally integers.
|
||||
else:
|
||||
return -1
|
||||
else:
|
||||
if y.kataster_number:
|
||||
return 1
|
||||
else:
|
||||
return numericalcmp(x.unofficial_number, y.unofficial_number)
|
||||
|
||||
def caveindex(request):
|
||||
caves = Cave.objects.all()
|
||||
notablecavehrefs = settings.NOTABLECAVESHREFS
|
||||
notablecaves = [Cave.objects.get(kataster_number=kataster_number) for kataster_number in notablecavehrefs ]
|
||||
caves1623 = list(Cave.objects.filter(area__short_name = "1623"))
|
||||
caves1626 = list(Cave.objects.filter(area__short_name = "1626"))
|
||||
caves1623.sort(caveCmp)
|
||||
caves1626.sort(caveCmp)
|
||||
return render_with_context(request,'caveindex.html', {'caves1623': caves1623, 'caves1626': caves1626, 'notablecaves':notablecaves, 'cavepage': True})
|
||||
|
||||
def millenialcaves(request):
|
||||
#RW messing around area
|
||||
return HttpResponse("Test text", content_type="text/plain")
|
||||
|
||||
|
||||
|
||||
def cave3d(request, cave_id=''):
|
||||
cave = getCave(cave_id)
|
||||
survexfilename = settings.SURVEX_DATA + cave.survex_file
|
||||
threedfilename = settings.THREEDCACHEDIR + '%s.3d' % cave_id
|
||||
if True or os.path.getmtime(survexfilename) > os.path.getmtime(threedfilename):
|
||||
subprocess.call(["cavern", "--output=%s" % threedfilename, survexfilename])
|
||||
test_file = open(threedfilename, 'rb')
|
||||
response = HttpResponse(content=test_file, content_type='application/3d')#mimetype is replaced by content_type for django 1.7
|
||||
response['Content-Disposition'] = 'attachment; filename=%s.3d' % cave_id
|
||||
# response['X-Sendfile'] = "%s.3d" % cave_id
|
||||
# It's usually a good idea to set the 'Content-Length' header too.
|
||||
# You can also set any other required headers: Cache-Control, etc.
|
||||
return response
|
||||
|
||||
def cave(request, cave_id='', offical_name=''):
|
||||
cave=getCave(cave_id)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave, 'cavepage': True, 'cave_id': cave_id})
|
||||
else:
|
||||
return render_with_context(request,'cave.html', {'settings': settings, 'cave': cave, 'cavepage': True, 'cave_id': cave_id})
|
||||
|
||||
def caveEntrance(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave})
|
||||
else:
|
||||
return render_with_context(request,'cave_entrances.html', {'cave': cave})
|
||||
|
||||
def caveDescription(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave})
|
||||
else:
|
||||
return render_with_context(request,'cave_uground_description.html', {'cave': cave})
|
||||
|
||||
def caveQMs(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave})
|
||||
else:
|
||||
return render_with_context(request,'cave_qms.html', {'cave': cave})
|
||||
def caveLogbook(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave})
|
||||
else:
|
||||
return render_with_context(request,'cave_logbook.html', {'cave': cave})
|
||||
|
||||
def caveSlug(request, slug):
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
if cave.non_public and settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
return render_with_context(request,'nonpublic.html', {'instance': cave, 'cave_editable': slug})
|
||||
else:
|
||||
return render_with_context(request,'cave.html', {'cave': cave, 'cave_editable': slug})
|
||||
|
||||
@login_required_if_public
|
||||
def edit_cave(request, slug=None):
|
||||
if slug is not None:
|
||||
cave = Cave.objects.get(caveslug__slug = slug)
|
||||
else:
|
||||
cave = Cave()
|
||||
if request.POST:
|
||||
form = CaveForm(request.POST, instance=cave)
|
||||
ceFormSet = CaveAndEntranceFormSet(request.POST)
|
||||
versionControlForm = VersionControlCommentForm(request.POST)
|
||||
if form.is_valid() and ceFormSet.is_valid() and versionControlForm.is_valid():
|
||||
cave = form.save(commit = False)
|
||||
if slug is None:
|
||||
for a in form.cleaned_data["area"]:
|
||||
if a.kat_area():
|
||||
myArea = a.kat_area()
|
||||
if form.cleaned_data["kataster_number"]:
|
||||
myslug = "%s-%s" % (myArea, form.cleaned_data["kataster_number"])
|
||||
else:
|
||||
myslug = "%s-%s" % (myArea, form.cleaned_data["unofficial_number"])
|
||||
else:
|
||||
myslug = slug
|
||||
cave.filename = myslug + ".html"
|
||||
cave.save()
|
||||
form.save_m2m()
|
||||
if slug is None:
|
||||
cs = CaveSlug(cave = cave, slug = myslug, primary = True)
|
||||
cs.save()
|
||||
ceinsts = ceFormSet.save(commit=False)
|
||||
for ceinst in ceinsts:
|
||||
ceinst.cave = cave
|
||||
ceinst.save()
|
||||
cave.writeDataFile()
|
||||
return HttpResponseRedirect("/" + cave.url)
|
||||
else:
|
||||
form = CaveForm(instance=cave)
|
||||
ceFormSet = CaveAndEntranceFormSet(queryset=cave.caveandentrance_set.all())
|
||||
versionControlForm = VersionControlCommentForm()
|
||||
|
||||
return render_with_context(request,
|
||||
'editcave2.html',
|
||||
{'form': form,
|
||||
'caveAndEntranceFormSet': ceFormSet,
|
||||
'versionControlForm': versionControlForm
|
||||
})
|
||||
|
||||
@login_required_if_public
|
||||
def editEntrance(request, caveslug, slug=None):
|
||||
cave = Cave.objects.get(caveslug__slug = caveslug)
|
||||
if slug is not None:
|
||||
entrance = Entrance.objects.get(entranceslug__slug = slug)
|
||||
else:
|
||||
entrance = Entrance()
|
||||
if request.POST:
|
||||
form = EntranceForm(request.POST, instance = entrance)
|
||||
versionControlForm = VersionControlCommentForm(request.POST)
|
||||
if slug is None:
|
||||
entletter = EntranceLetterForm(request.POST)
|
||||
else:
|
||||
entletter = None
|
||||
if form.is_valid() and versionControlForm.is_valid() and (slug is not None or entletter.is_valid()):
|
||||
entrance = form.save(commit = False)
|
||||
if slug is None:
|
||||
slugname = cave.slug() + entletter.cleaned_data["entrance_letter"]
|
||||
entrance.cached_primary_slug = slugname
|
||||
entrance.filename = slugname + ".html"
|
||||
entrance.save()
|
||||
if slug is None:
|
||||
es = EntranceSlug(entrance = entrance, slug = slugname, primary = True)
|
||||
es.save()
|
||||
el = entletter.save(commit = False)
|
||||
el.cave = cave
|
||||
el.entrance = entrance
|
||||
el.save()
|
||||
entrance.writeDataFile()
|
||||
return HttpResponseRedirect("/" + cave.url)
|
||||
else:
|
||||
form = EntranceForm(instance = entrance)
|
||||
versionControlForm = VersionControlCommentForm()
|
||||
if slug is None:
|
||||
entletter = EntranceLetterForm(request.POST)
|
||||
else:
|
||||
entletter = None
|
||||
return render_with_context(request,
|
||||
'editentrance.html',
|
||||
{'form': form,
|
||||
'versionControlForm': versionControlForm,
|
||||
'entletter': entletter
|
||||
})
|
||||
|
||||
def qm(request,cave_id,qm_id,year,grade=None):
|
||||
year=int(year)
|
||||
try:
|
||||
qm=getCave(cave_id).get_QMs().get(number=qm_id,found_by__date__year=year)
|
||||
return render_with_context(request,'qm.html',locals())
|
||||
|
||||
except QM.DoesNotExist:
|
||||
url=urlparse.urljoin(settings.URL_ROOT, r'/admin/core/qm/add/'+'?'+ r'number=' + qm_id)
|
||||
if grade:
|
||||
url += r'&grade=' + grade
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
|
||||
|
||||
def ent(request, cave_id, ent_letter):
|
||||
cave = Cave.objects.filter(kataster_number = cave_id)[0]
|
||||
cave_and_ent = CaveAndEntrance.objects.filter(cave = cave).filter(entrance_letter = ent_letter)[0]
|
||||
return render_with_context(request,'entrance.html', {'cave': cave,
|
||||
'entrance': cave_and_ent.entrance,
|
||||
'letter': cave_and_ent.entrance_letter,})
|
||||
|
||||
def entranceSlug(request, slug):
|
||||
entrance = Entrance.objects.get(entranceslug__slug = slug)
|
||||
if entrance.non_public and not request.user.is_authenticated():
|
||||
return render_with_context(request,'nonpublic.html', {'instance': entrance})
|
||||
else:
|
||||
return render_with_context(request,'entranceslug.html', {'entrance': entrance})
|
||||
|
||||
def survexblock(request, survexpath):
|
||||
survexpath = re.sub("/", ".", survexpath)
|
||||
print "jjjjjj", survexpath
|
||||
survexblock = models.SurvexBlock.objects.get(survexpath=survexpath)
|
||||
#ftext = survexblock.filecontents()
|
||||
ftext = survexblock.text
|
||||
return render_with_context(request,'survexblock.html', {'survexblock':survexblock, 'ftext':ftext, })
|
||||
|
||||
def surveyindex(request):
|
||||
surveys=Survey.objects.all()
|
||||
expeditions=Expedition.objects.order_by("-year")
|
||||
return render_with_context(request,'survey.html',locals())
|
||||
|
||||
def survey(request,year,wallet_number):
|
||||
surveys=Survey.objects.all()
|
||||
expeditions=Expedition.objects.order_by("-year")
|
||||
current_expedition=Expedition.objects.filter(year=year)[0]
|
||||
|
||||
if wallet_number!='':
|
||||
current_survey=Survey.objects.filter(expedition=current_expedition,wallet_number=wallet_number)[0]
|
||||
notes=current_survey.scannedimage_set.filter(contents='notes')
|
||||
planSketches=current_survey.scannedimage_set.filter(contents='plan')
|
||||
elevationSketches=current_survey.scannedimage_set.filter(contents='elevation')
|
||||
|
||||
return render_with_context(request,'survey.html', locals())
|
||||
|
||||
def cave_description(request, cavedescription_name):
|
||||
cave_description = get_object_or_404(CaveDescription, short_name = cavedescription_name)
|
||||
return render_with_context(request,'cave_description.html', locals())
|
||||
|
||||
def get_entrances(request, caveslug):
|
||||
cave = Cave.objects.get(caveslug__slug = caveslug)
|
||||
return render_with_context(request,'options.html', {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]})
|
||||
|
||||
def get_qms(request, caveslug):
|
||||
cave = Cave.objects.get(caveslug__slug = caveslug)
|
||||
return render_with_context(request,'options.html', {"items": [(e.entrance.slug(), e.entrance.slug()) for e in cave.entrances()]})
|
||||
|
||||
areanames = [
|
||||
#('', 'Location unclear'),
|
||||
('1a', '1a – Plateau: around Top Camp'),
|
||||
('1b', '1b – Western plateau near 182'),
|
||||
('1c', '1c – Eastern plateau near 204 walk-in path'),
|
||||
('1d', '1d – Further plateau around 76'),
|
||||
('2a', '2a – Southern Schwarzmooskogel near 201 path and the Nipple'),
|
||||
('2b', '2b – Eishöhle area'),
|
||||
('2b or 4 (unclear)', '2b or 4 (unclear)'),
|
||||
('2c', '2c – Kaninchenhöhle area'),
|
||||
('2d', '2d – Steinbrückenhöhle area'),
|
||||
('3', '3 – Bräuning Alm'),
|
||||
('4', '4 – Kratzer valley'),
|
||||
('5', '5 – Schwarzmoos-Wildensee'),
|
||||
('6', '6 – Far plateau'),
|
||||
('1626 or 6 (borderline)', '1626 or 6 (borderline)'),
|
||||
('7', '7 – Egglgrube'),
|
||||
('8a', '8a – Loser south face'),
|
||||
('8b', '8b – Loser below Dimmelwand'),
|
||||
('8c', '8c – Augst See'),
|
||||
('8d', '8d – Loser-Hochganger ridge'),
|
||||
('9', '9 – Gschwandt Alm'),
|
||||
('10', '10 – Altaussee'),
|
||||
('11', '11 – Augstbach')
|
||||
]
|
||||
|
||||
|
||||
def prospecting(request):
|
||||
#for key, name in areanames:
|
||||
# print key, Area.objects.get(short_name = key)
|
||||
areas = []
|
||||
for key, name in areanames:
|
||||
a = Area.objects.get(short_name = key)
|
||||
caves = list(a.cave_set.all())
|
||||
caves.sort(caveCmp)
|
||||
areas.append((name, a, caves))
|
||||
return render_with_context(request, 'prospecting.html', {"areas": areas})
|
||||
|
||||
# Parameters for big map and zoomed subarea maps:
|
||||
# big map first (zoom factor ignored)
|
||||
|
||||
maps = {
|
||||
# id left top right bottom zoom
|
||||
# G&K G&K G&K G&K factor
|
||||
"all": [33810.4, 85436.5, 38192.0, 81048.2, 0.35,
|
||||
"All"],
|
||||
"40": [36275.6, 82392.5, 36780.3, 81800.0, 3.0,
|
||||
"Eishöhle"],
|
||||
"76": [35440.0, 83220.0, 36090.0, 82670.0, 1.3,
|
||||
"Eislufthöhle"],
|
||||
"204": [36354.1, 84154.5, 37047.4, 83300, 3.0,
|
||||
"Steinbrückenhöhle"],
|
||||
"tc": [35230.0, 82690.0, 36110.0, 82100.0, 3.0,
|
||||
"Near Top Camp"],
|
||||
"grieß":
|
||||
[36000.0, 86300.0, 38320.0, 84400.0, 4.0,
|
||||
"Grießkogel Area"],
|
||||
}
|
||||
|
||||
for n in maps.keys():
|
||||
L, T, R, B, S, name = maps[n]
|
||||
W = (R-L)/2
|
||||
H = (T-B)/2
|
||||
for i in range(2):
|
||||
for j in range(2):
|
||||
maps["%s%i%i" % (n, i, j)] = [L + i * W, T - j * H, L + (i + 1) * W, T - (j + 1) * H, S, name]
|
||||
# Keys in the order in which we want the maps output
|
||||
mapcodes = ["all", "grieß","40", "76", "204", "tc"]
|
||||
# Field codes
|
||||
L = 0
|
||||
T = 1
|
||||
R = 2
|
||||
B = 3
|
||||
ZOOM = 4
|
||||
DESC = 5
|
||||
|
||||
areacolours = {
|
||||
'1a' : '#00ffff',
|
||||
'1b' : '#ff00ff',
|
||||
'1c' : '#ffff00',
|
||||
'1d' : '#ffffff',
|
||||
'2a' : '#ff0000',
|
||||
'2b' : '#00ff00',
|
||||
'2c' : '#008800',
|
||||
'2d' : '#ff9900',
|
||||
'3' : '#880000',
|
||||
'4' : '#0000ff',
|
||||
'6' : '#000000', # doubles for surface fixed pts, and anything else
|
||||
'7' : '#808080'
|
||||
}
|
||||
|
||||
|
||||
for FONT in [
|
||||
"/usr/share/fonts/truetype/freefont/FreeSans.ttf",
|
||||
"/usr/X11R6/lib/X11/fonts/truetype/arial.ttf",
|
||||
"C:\WINNT\Fonts\ARIAL.TTF"
|
||||
]:
|
||||
if os.path.isfile(FONT): break
|
||||
TEXTSIZE = 16
|
||||
CIRCLESIZE =8
|
||||
LINEWIDTH = 2
|
||||
myFont = ImageFont.truetype(FONT, TEXTSIZE)
|
||||
|
||||
def mungecoord(x, y, mapcode, img):
|
||||
# Top of Zinken is 73 1201 = dataset 34542 81967
|
||||
# Top of Hinter is 1073 562 = dataset 36670 83317
|
||||
# image is 1417 by 2201
|
||||
# FACTOR1 = 1000.0 / (36670.0-34542.0)
|
||||
# FACTOR2 = (1201.0-562.0) / (83317 - 81967)
|
||||
# FACTOR = (FACTOR1 + FACTOR2)/2
|
||||
# The factors aren't the same as the scanned map's at a slight angle. I
|
||||
# can't be bothered to fix this. Since we zero on the Hinter it makes
|
||||
# very little difference for caves in the areas round 76 or 204.
|
||||
# xoffset = (x - 36670)*FACTOR
|
||||
# yoffset = (y - 83317)*FACTOR
|
||||
# return (1073 + xoffset, 562 - yoffset)
|
||||
|
||||
m = maps[mapcode]
|
||||
factorX, factorY = img.size[0] / (m[R] - m[L]), img.size[1] / (m[T] - m[B])
|
||||
return ((x - m[L]) * factorX, (m[T] - y) * factorY)
|
||||
|
||||
COL_TYPES = {True: "red",
|
||||
False: "#dddddd",
|
||||
"Reference": "#dddddd"}
|
||||
|
||||
def plot(surveypoint, number, point_type, label, mapcode, draw, img):
|
||||
try:
|
||||
ss = SurvexStation.objects.lookup(surveypoint)
|
||||
E, N = ss.x, ss.y
|
||||
shortnumber = number.replace("—","")
|
||||
(x,y) = map(int, mungecoord(E, N, mapcode, img))
|
||||
#imgmaps[maparea].append( [x-4, y-SIZE/2, x+4+draw.textsize(shortnumber)[0], y+SIZE/2, shortnumber, label] )
|
||||
draw.rectangle([(x+CIRCLESIZE, y-TEXTSIZE/2), (x+CIRCLESIZE*2+draw.textsize(shortnumber)[0], y+TEXTSIZE/2)], fill="#ffffff")
|
||||
draw.text((x+CIRCLESIZE * 1.5,y-TEXTSIZE/2), shortnumber, fill="#000000")
|
||||
draw.ellipse([(x-CIRCLESIZE,y-CIRCLESIZE),(x+CIRCLESIZE,y+CIRCLESIZE)], fill=COL_TYPES[point_type], outline="#000000")
|
||||
except:
|
||||
pass
|
||||
|
||||
def prospecting_image(request, name):
|
||||
|
||||
mainImage = Image.open(os.path.join(settings.SURVEY_SCANS, "location_maps", "pguidemap.jpg"))
|
||||
if settings.PUBLIC_SITE and not request.user.is_authenticated():
|
||||
mainImage = Image.new("RGB", mainImage.size, '#ffffff')
|
||||
m = maps[name]
|
||||
#imgmaps = []
|
||||
if name == "all":
|
||||
img = mainImage
|
||||
else:
|
||||
M = maps['all']
|
||||
W, H = mainImage.size
|
||||
l = int((m[L] - M[L]) / (M[R] - M[L]) * W)
|
||||
t = int((m[T] - M[T]) / (M[B] - M[T]) * H)
|
||||
r = int((m[R] - M[L]) / (M[R] - M[L]) * W)
|
||||
b = int((m[B] - M[T]) / (M[B] - M[T]) * H)
|
||||
img = mainImage.crop((l, t, r, b))
|
||||
w = int(round(m[ZOOM] * (m[R] - m[L]) / (M[R] - M[L]) * W))
|
||||
h = int(round(m[ZOOM] * (m[B] - m[T]) / (M[B] - M[T]) * H))
|
||||
img = img.resize((w, h), Image.BICUBIC)
|
||||
draw = ImageDraw.Draw(img)
|
||||
draw.setfont(myFont)
|
||||
if name == "all":
|
||||
for maparea in maps.keys():
|
||||
if maparea == "all":
|
||||
continue
|
||||
localm = maps[maparea]
|
||||
l,t = mungecoord(localm[L], localm[T], "all", img)
|
||||
r,b = mungecoord(localm[R], localm[B], "all", img)
|
||||
text = maparea + " map"
|
||||
textlen = draw.textsize(text)[0] + 3
|
||||
draw.rectangle([l, t, l+textlen, t+TEXTSIZE+2], fill='#ffffff')
|
||||
draw.text((l+2, t+1), text, fill="#000000")
|
||||
#imgmaps.append( [l, t, l+textlen, t+SIZE+2, "submap" + maparea, maparea + " subarea map"] )
|
||||
draw.line([l, t, r, t], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, b, r, b], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t, l, b], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([r, t, r, b], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t, l+textlen, t], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t+TEXTSIZE+2, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l, t, l, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||
draw.line([l+textlen, t, l+textlen, t+TEXTSIZE+2], fill='#777777', width=LINEWIDTH)
|
||||
#imgmaps[maparea] = []
|
||||
# Draw scale bar
|
||||
m100 = int(100 / (m[R] - m[L]) * img.size[0])
|
||||
draw.line([10, TEXTSIZE*3, 10, TEXTSIZE*2], fill='#000000', width=LINEWIDTH)
|
||||
draw.line([10, TEXTSIZE*2, 10+m100, TEXTSIZE*2], fill='#000000', width=LINEWIDTH)
|
||||
draw.line([10+m100, TEXTSIZE * 3, 10+m100, TEXTSIZE*2], fill='#000000', width=LINEWIDTH)
|
||||
label = "100m"
|
||||
draw.text([10 + (m100 - draw.textsize(label)[0]) / 2, TEXTSIZE/2], label, fill='#000000')
|
||||
plot("laser.0_7", "BNase", "Reference", "Bräuning Nase laser point", name, draw, img)
|
||||
plot("226-96", "BZkn", "Reference", "Bräuning Zinken trig point", name, draw, img)
|
||||
plot("vd1","VD1","Reference", "VD1 survey point", name, draw, img)
|
||||
plot("laser.kt114_96","HSK","Reference", "Hinterer Schwarzmooskogel trig point", name, draw, img)
|
||||
plot("2000","Nipple","Reference", "Nipple (Weiße Warze)", name, draw, img)
|
||||
plot("3000","VSK","Reference", "Vorderer Schwarzmooskogel summit", name, draw, img)
|
||||
plot("topcamp", "TC", "Reference", "Top Camp", name, draw, img)
|
||||
plot("laser.0", "LSR0", "Reference", "Laser Point 0", name, draw, img)
|
||||
plot("laser.0_1", "LSR1", "Reference", "Laser Point 0/1", name, draw, img)
|
||||
plot("laser.0_3", "LSR3", "Reference", "Laser Point 0/3", name, draw, img)
|
||||
plot("laser.0_5", "LSR5", "Reference", "Laser Point 0/5", name, draw, img)
|
||||
plot("225-96", "BAlm", "Reference", "Bräuning Alm trig point", name, draw, img)
|
||||
for entrance in Entrance.objects.all():
|
||||
station = entrance.best_station()
|
||||
if station:
|
||||
#try:
|
||||
areaName = entrance.caveandentrance_set.all()[0].cave.getArea().short_name
|
||||
plot(station, "%s-%s" % (areaName, str(entrance)[5:]), entrance.needs_surface_work(), str(entrance), name, draw, img)
|
||||
#except:
|
||||
# pass
|
||||
|
||||
for (N, E, D, num) in [(35975.37, 83018.21, 100,"177"), # Calculated from bearings
|
||||
(35350.00, 81630.00, 50, "71"), # From Auer map
|
||||
(36025.00, 82475.00, 50, "146"), # From mystery map
|
||||
(35600.00, 82050.00, 50, "35"), # From Auer map
|
||||
(35650.00, 82025.00, 50, "44"), # From Auer map
|
||||
(36200.00, 82925.00, 50, "178"), # Calculated from bearings
|
||||
(35232.64, 82910.37, 25, "181"), # Calculated from bearings
|
||||
(35323.60, 81357.83, 50, "74") # From Auer map
|
||||
]:
|
||||
(N,E,D) = map(float, (N, E, D))
|
||||
maparea = Cave.objects.get(kataster_number = num).getArea().short_name
|
||||
lo = mungecoord(N-D, E+D, name, img)
|
||||
hi = mungecoord(N+D, E-D, name, img)
|
||||
lpos = mungecoord(N-D, E, name, img)
|
||||
draw.ellipse([lo,hi], outline="#000000")
|
||||
draw.ellipse([lo[0]+1, lo[1]+1, hi[0]-1, hi[1]-1], outline=areacolours[maparea])
|
||||
draw.ellipse([lo[0]+2, lo[1]+2, hi[0]-2, hi[1]-2], outline=areacolours[maparea])
|
||||
draw.rectangle([lpos[0],lpos[1]-TEXTSIZE/2, lpos[0] + draw.textsize(name)[0], lpos[1]+TEXTSIZE/2], fill="#ffffff")
|
||||
draw.text((lpos[0], lpos[1]-TEXTSIZE/2), num, fill="#000000")
|
||||
response = HttpResponse(content_type = "image/png")
|
||||
del draw
|
||||
img.save(response, "PNG")
|
||||
return response
|
||||
|
||||
STATIONS = {}
|
||||
poslineregex = re.compile("^\(\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*),\s*([+-]?\d*\.\d*)\s*\)\s*([^\s]+)$")
|
||||
def LoadPos():
|
||||
call([settings.CAVERN, "--output=%s/all.3d" % settings.SURVEX_DATA, "%s/all.svx" % settings.SURVEX_DATA])
|
||||
call([settings.THREEDTOPOS, '%sall.3d' % settings.SURVEX_DATA], cwd = settings.SURVEX_DATA)
|
||||
posfile = open("%sall.pos" % settings.SURVEX_DATA)
|
||||
posfile.readline()#Drop header
|
||||
for line in posfile.readlines():
|
||||
r = poslineregex.match(line)
|
||||
if r:
|
||||
x, y, z, name = r.groups()
|
||||
STATIONS[name] = (x, y, z)
|
||||
|
||||
270
core/views_logbooks.py
Normal file
270
core/views_logbooks.py
Normal file
@@ -0,0 +1,270 @@
|
||||
from django.shortcuts import render_to_response
|
||||
from troggle.core.models import Expedition, Person, PersonExpedition, PersonTrip, LogbookEntry, SurvexBlock
|
||||
import troggle.core.models as models
|
||||
import troggle.settings as settings
|
||||
import django.db.models
|
||||
from troggle.parsers.logbooks import LoadLogbookForExpedition
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||
from troggle.core.forms import getTripForm#, get_name, PersonForm
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.http import HttpResponseRedirect, HttpResponse
|
||||
from django.template import Context, loader
|
||||
from utils import render_with_context
|
||||
import os.path
|
||||
import troggle.parsers.logbooks as logbookparsers
|
||||
from django.template.defaultfilters import slugify
|
||||
from troggle.helper import login_required_if_public
|
||||
import datetime
|
||||
|
||||
from django.views.generic.list import ListView
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
# Django uses Context, not RequestContext when you call render
|
||||
# to_response. We always want to use RequestContext, so that
|
||||
# django adds the context from settings.TEMPLATE_CONTEXT_PROCESSORS.
|
||||
# This way we automatically get necessary settings variables passed
|
||||
# to each template. So we use a custom method, render_response
|
||||
# instead of render_to_response. Hopefully future Django releases
|
||||
# will make this unnecessary.
|
||||
# from troggle.alwaysUseRequestContext import render_response
|
||||
|
||||
import re
|
||||
|
||||
@django.db.models.permalink #this allows the nice get_absolute_url syntax we are using
|
||||
|
||||
def getNotablePersons():
|
||||
notablepersons = []
|
||||
for person in Person.objects.all():
|
||||
if person.bisnotable():
|
||||
notablepersons.append(person)
|
||||
return notablepersons
|
||||
|
||||
|
||||
def personindex(request):
|
||||
persons = Person.objects.all()
|
||||
# From what I can tell, "persons" seems to be the table rows, while "personss" is the table columns. - AC 16 Feb 09
|
||||
personss = [ ]
|
||||
ncols = 4
|
||||
nc = (len(persons) + ncols - 1) / ncols
|
||||
for i in range(ncols):
|
||||
personss.append(persons[i * nc: (i + 1) * nc])
|
||||
|
||||
notablepersons = []
|
||||
for person in Person.objects.all():
|
||||
if person.bisnotable():
|
||||
notablepersons.append(person)
|
||||
|
||||
return render_with_context(request,'personindex.html', {'persons': persons, 'personss':personss, 'notablepersons':notablepersons, })
|
||||
|
||||
|
||||
def expedition(request, expeditionname):
|
||||
this_expedition = Expedition.objects.get(year=int(expeditionname))
|
||||
expeditions = Expedition.objects.all()
|
||||
personexpeditiondays = [ ]
|
||||
dateditems = list(this_expedition.logbookentry_set.all()) + list(this_expedition.survexblock_set.all())
|
||||
dates = list(set([item.date for item in dateditems]))
|
||||
dates.sort()
|
||||
for personexpedition in this_expedition.personexpedition_set.all():
|
||||
prow = [ ]
|
||||
for date in dates:
|
||||
pcell = { "persontrips": PersonTrip.objects.filter(personexpedition=personexpedition,
|
||||
logbook_entry__date=date) }
|
||||
pcell["survexblocks"] = set(SurvexBlock.objects.filter(survexpersonrole__personexpedition=personexpedition,
|
||||
date = date))
|
||||
prow.append(pcell)
|
||||
personexpeditiondays.append({"personexpedition":personexpedition, "personrow":prow})
|
||||
|
||||
message = ""
|
||||
if "reload" in request.GET:
|
||||
message = LoadLogbookForExpedition(this_expedition)
|
||||
return render_with_context(request,'expedition.html', {'expedition': this_expedition, 'expeditions':expeditions, 'personexpeditiondays':personexpeditiondays, 'message':message, 'settings':settings, 'dateditems': dateditems })
|
||||
|
||||
def get_absolute_url(self):
|
||||
return ('expedition', (expedition.year))
|
||||
|
||||
class ExpeditionListView(ListView):
|
||||
|
||||
model = Expedition
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(ExpeditionListView, self).get_context_data(**kwargs)
|
||||
context['now'] = timezone.now()
|
||||
return context
|
||||
|
||||
|
||||
def person(request, first_name='', last_name='', ):
|
||||
this_person = Person.objects.get(first_name = first_name, last_name = last_name)
|
||||
|
||||
# This is for removing the reference to the user's profile, in case they set it to the wrong person
|
||||
if request.method == 'GET':
|
||||
if request.GET.get('clear_profile')=='True':
|
||||
this_person.user=None
|
||||
this_person.save()
|
||||
return HttpResponseRedirect(reverse('profiles_select_profile'))
|
||||
|
||||
return render_with_context(request,'person.html', {'person': this_person, })
|
||||
|
||||
|
||||
def GetPersonChronology(personexpedition):
|
||||
res = { }
|
||||
for persontrip in personexpedition.persontrip_set.all():
|
||||
a = res.setdefault(persontrip.date, { })
|
||||
a.setdefault("persontrips", [ ]).append(persontrip)
|
||||
|
||||
for personrole in personexpedition.survexpersonrole_set.all():
|
||||
a = res.setdefault(personrole.survexblock.date, { })
|
||||
a.setdefault("personroles", [ ]).append(personrole.survexblock)
|
||||
|
||||
# build up the tables
|
||||
rdates = res.keys()
|
||||
rdates.sort()
|
||||
|
||||
|
||||
res2 = [ ]
|
||||
for rdate in rdates:
|
||||
persontrips = res[rdate].get("persontrips", [])
|
||||
personroles = res[rdate].get("personroles", [])
|
||||
for n in range(max(len(persontrips), len(personroles))):
|
||||
res2.append(((n == 0 and rdate or "--"), (n < len(persontrips) and persontrips[n]), (n < len(personroles) and personroles[n])))
|
||||
|
||||
return res2
|
||||
|
||||
|
||||
def personexpedition(request, first_name='', last_name='', year=''):
|
||||
person = Person.objects.get(first_name = first_name, last_name = last_name)
|
||||
this_expedition = Expedition.objects.get(year=year)
|
||||
personexpedition = person.personexpedition_set.get(expedition=this_expedition)
|
||||
personchronology = GetPersonChronology(personexpedition)
|
||||
return render_with_context(request,'personexpedition.html', {'personexpedition': personexpedition, 'personchronology':personchronology})
|
||||
|
||||
|
||||
def logbookentry(request, date, slug):
|
||||
this_logbookentry = LogbookEntry.objects.filter(date=date, slug=slug)
|
||||
|
||||
if len(this_logbookentry)>1:
|
||||
return render_with_context(request, 'object_list.html',{'object_list':this_logbookentry})
|
||||
else:
|
||||
this_logbookentry=this_logbookentry[0]
|
||||
return render_with_context(request, 'logbookentry.html', {'logbookentry': this_logbookentry})
|
||||
|
||||
|
||||
def logbookSearch(request, extra):
|
||||
query_string = ''
|
||||
found_entries = None
|
||||
if ('q' in request.GET) and request.GET['q'].strip():
|
||||
query_string = request.GET['q']
|
||||
entry_query = search.get_query(query_string, ['text','title',])
|
||||
found_entries = LogbookEntry.objects.filter(entry_query)
|
||||
|
||||
return render_with_context(request,'logbooksearch.html',
|
||||
{ 'query_string': query_string, 'found_entries': found_entries, })
|
||||
#context_instance=RequestContext(request))
|
||||
|
||||
def personForm(request,pk):
|
||||
person=Person.objects.get(pk=pk)
|
||||
form=PersonForm(instance=person)
|
||||
return render_with_context(request,'personform.html', {'form':form,})
|
||||
|
||||
|
||||
def experimental(request):
|
||||
legsbyexpo = [ ]
|
||||
for expedition in Expedition.objects.all():
|
||||
survexblocks = expedition.survexblock_set.all()
|
||||
survexlegs = [ ]
|
||||
survexleglength = 0.0
|
||||
for survexblock in survexblocks:
|
||||
survexlegs.extend(survexblock.survexleg_set.all())
|
||||
survexleglength += survexblock.totalleglength
|
||||
legsbyexpo.append((expedition, {"nsurvexlegs":len(survexlegs), "survexleglength":survexleglength}))
|
||||
legsbyexpo.reverse()
|
||||
|
||||
survexlegs = models.SurvexLeg.objects.all()
|
||||
totalsurvexlength = sum([survexleg.tape for survexleg in survexlegs])
|
||||
return render_with_context(request, 'experimental.html', { "nsurvexlegs":len(survexlegs), "totalsurvexlength":totalsurvexlength, "legsbyexpo":legsbyexpo })
|
||||
|
||||
@login_required_if_public
|
||||
def newLogbookEntry(request, expeditionyear, pdate = None, pslug = None):
|
||||
expedition = Expedition.objects.get(year=expeditionyear)
|
||||
PersonTripFormSet, TripForm = getTripForm(expedition)
|
||||
if pslug and pdate:
|
||||
previousdate = datetime.date(*[int(x) for x in pdate.split("-")])
|
||||
previouslbe = LogbookEntry.objects.get(slug = pslug, date = previousdate, expedition = expedition)
|
||||
assert previouslbe.filename
|
||||
if request.method == 'POST': # If the form has been submitted...
|
||||
tripForm = TripForm(request.POST) # A form bound to the POST data
|
||||
personTripFormSet = PersonTripFormSet(request.POST)
|
||||
if tripForm.is_valid() and personTripFormSet.is_valid(): # All validation rules pass
|
||||
dateStr = tripForm.cleaned_data["date"].strftime("%Y-%m-%d")
|
||||
directory = os.path.join(settings.EXPOWEB,
|
||||
"years",
|
||||
expedition.year,
|
||||
"autologbook")
|
||||
filename = os.path.join(directory,
|
||||
dateStr + "." + slugify(tripForm.cleaned_data["title"])[:50] + ".html")
|
||||
if not os.path.isdir(directory):
|
||||
os.mkdir(directory)
|
||||
if pslug and pdate:
|
||||
delLogbookEntry(previouslbe)
|
||||
f = open(filename, "w")
|
||||
template = loader.get_template('dataformat/logbookentry.html')
|
||||
context = Context({'trip': tripForm.cleaned_data,
|
||||
'persons': personTripFormSet.cleaned_data,
|
||||
'date': dateStr,
|
||||
'expeditionyear': expeditionyear})
|
||||
f.write(template.render(context))
|
||||
f.close()
|
||||
print(logbookparsers.parseAutoLogBookEntry(filename))
|
||||
return HttpResponseRedirect(reverse('expedition', args=[expedition.year])) # Redirect after POST
|
||||
else:
|
||||
if pslug and pdate:
|
||||
if previouslbe.cave:
|
||||
tripForm = TripForm(initial={"date": previousdate,
|
||||
"title": previouslbe.title,
|
||||
"cave": previouslbe.cave.reference(),
|
||||
"location": None,
|
||||
"caveOrLocation": "cave",
|
||||
"html": previouslbe.text})
|
||||
else:
|
||||
tripForm = TripForm(initial={"date": previousdate,
|
||||
"title": previouslbe.title,
|
||||
"cave": None,
|
||||
"location": previouslbe.place,
|
||||
"caveOrLocation": "location",
|
||||
"html": previouslbe.text})
|
||||
personTripFormSet = PersonTripFormSet(initial=[{"name": get_name(py.personexpedition),
|
||||
"TU": py.time_underground,
|
||||
"author": py.is_logbook_entry_author}
|
||||
for py in previouslbe.persontrip_set.all()])
|
||||
else:
|
||||
tripForm = TripForm() # An unbound form
|
||||
personTripFormSet = PersonTripFormSet()
|
||||
|
||||
return render_with_context(request, 'newlogbookentry.html', {
|
||||
'tripForm': tripForm,
|
||||
'personTripFormSet': personTripFormSet,
|
||||
|
||||
})
|
||||
|
||||
@login_required_if_public
|
||||
def deleteLogbookEntry(request, expeditionyear, date = None, slug = None):
|
||||
expedition = Expedition.objects.get(year=expeditionyear)
|
||||
previousdate = datetime.date(*[int(x) for x in date.split("-")])
|
||||
previouslbe = LogbookEntry.objects.get(slug = slug, date = previousdate, expedition = expedition)
|
||||
delLogbookEntry(previouslbe)
|
||||
return HttpResponseRedirect(reverse('expedition', args=[expedition.year])) # Redirect after POST
|
||||
|
||||
def delLogbookEntry(lbe):
|
||||
for pt in lbe.persontrip_set.all():
|
||||
pt.delete()
|
||||
lbe.delete()
|
||||
os.remove(lbe.filename)
|
||||
|
||||
def get_people(request, expeditionslug):
|
||||
exp = Expedition.objects.get(year = expeditionslug)
|
||||
return render_with_context(request,'options.html', {"items": [(pe.slug, pe.name) for pe in exp.personexpedition_set.all()]})
|
||||
|
||||
def get_logbook_entries(request, expeditionslug):
|
||||
exp = Expedition.objects.get(year = expeditionslug)
|
||||
return render_with_context(request,'options.html', {"items": [(le.slug, "%s - %s" % (le.date, le.title)) for le in exp.logbookentry_set.all()]})
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user