2
0
mirror of https://expo.survex.com/repositories/troggle/.git synced 2025-12-17 16:27:07 +00:00

1 Commits

Author SHA1 Message Date
Sam Wenham
6984f66794 Updates required to move to django 1.8 2019-03-02 14:10:51 +00:00
550 changed files with 10947 additions and 144152 deletions

69
.gitignore vendored
View File

@@ -1,69 +0,0 @@
# use glob syntax
syntax: glob
# Virtual environments
.venv
__pycache__/
*.py[oc]
build/
dist/
wheels/
*.egg-info
*.orig
*.sql
*.sqlite
*.prof
*~
.idea/*
.swp
.vscode/*
_1623.3d
_1623.err
_1623.pos
_1623.svx
_16230.svx
_troggle_import_root.svx
troggle_import_root.log
cave-lookup.json
core/migrations/*
db*
desktop.ini
diffsettings.txt
ignored-files.log
import_profile.json
lines-of-python.txt
lines-of-templates.txt
loadlogbk.log
loadsurvexblks.log
logbktrips.shelve
memdump.sql
parsing_log.txt
svxblks.log
svxlinear.log
troggle
troggle-inspectdb.py
troggle-sqlite.sql
troggle.log
troggle.sqlite
troggle.sqlite-journal
troggle_log.txt
tunnel-import.log
secret_credentials.py
localsettings.py
media/jslib/*
!media/jslib/readme.txt
_test_response.html
therionrefs.log
_1623-and-1626.svx
_1623-and-1626-no-schoenberg-hs.svx
troggle.sqlite-journal - Shortcut.lnk
troggle.sqlite - Shortcut.lnk
javascript
mvscript.sh

16
.hgignore Normal file
View File

@@ -0,0 +1,16 @@
# use glob syntax
syntax: glob
*.pyc
db*
localsettings.py
*~
parsing_log.txt
troggle
troggle_log.txt
.idea/*
*.orig
media/images/*
.vscode/*
.swp
imagekit-off/

View File

@@ -1,807 +0,0 @@
cd /home/philip/expo/expoweb/1623
mkdir -p 2023-ASH-06/i
mkdir -p 2023-ASH-06/l
mkdir -p 2023-ASH-06/t
mkdir -p 307/i
mkdir -p 307/l
mkdir -p 307/t
mkdir -p 2023-ASH-13/i
mkdir -p 2023-ASH-13/l
mkdir -p 2023-ASH-13/t
mkdir -p 2018-pf-03/i
mkdir -p 2018-pf-03/l
mkdir -p 2018-pf-03/t
mkdir -p 2023-ASH-07/i
mkdir -p 2023-ASH-07/l
mkdir -p 2023-ASH-07/t
mkdir -p 2013-BL-01/i
mkdir -p 2013-BL-01/l
mkdir -p 2013-BL-01/t
mkdir -p 2023-ASH-03/i
mkdir -p 2023-ASH-03/l
mkdir -p 2023-ASH-03/t
mkdir -p 2017-AA-01/i
mkdir -p 2017-AA-01/l
mkdir -p 2017-AA-01/t
mkdir -p 2023-ASH-02/i
mkdir -p 2023-ASH-02/l
mkdir -p 2023-ASH-02/t
mkdir -p 306/i
mkdir -p 306/l
mkdir -p 306/t
mkdir -p 2012-sw-01/i
mkdir -p 2012-sw-01/l
mkdir -p 2012-sw-01/t
mkdir -p 315/i
mkdir -p 315/l
mkdir -p 315/t
mkdir -p 2018-NTU-01/i
mkdir -p 2018-NTU-01/l
mkdir -p 2018-NTU-01/t
mkdir -p 303/i
mkdir -p 303/l
mkdir -p 303/t
mkdir -p 2023-ASH-12/i
mkdir -p 2023-ASH-12/l
mkdir -p 2023-ASH-12/t
mkdir -p 2023-ASH-11/i
mkdir -p 2023-ASH-11/l
mkdir -p 2023-ASH-11/t
mkdir -p 311/i
mkdir -p 311/l
mkdir -p 311/t
mkdir -p 2017-AMS-02/i
mkdir -p 2017-AMS-02/l
mkdir -p 2017-AMS-02/t
mkdir -p 2013-06/i
mkdir -p 2013-06/l
mkdir -p 2013-06/t
mkdir -p 2023-ASH-01/i
mkdir -p 2023-ASH-01/l
mkdir -p 2023-ASH-01/t
mkdir -p 2017-NR-01/i
mkdir -p 2017-NR-01/l
mkdir -p 2017-NR-01/t
mkdir -p 308/i
mkdir -p 308/l
mkdir -p 308/t
mkdir -p 2012-sw-03/i
mkdir -p 2012-sw-03/l
mkdir -p 2012-sw-03/t
mkdir -p 2023-ASH-04/i
mkdir -p 2023-ASH-04/l
mkdir -p 2023-ASH-04/t
mkdir -p 2023-BL-11/i
mkdir -p 2023-BL-11/l
mkdir -p 2023-BL-11/t
mkdir -p 2023-ASH-14/i
mkdir -p 2023-ASH-14/l
mkdir -p 2023-ASH-14/t
mkdir -p 309/i
mkdir -p 309/l
mkdir -p 309/t
mkdir -p 2023-ASH-10/i
mkdir -p 2023-ASH-10/l
mkdir -p 2023-ASH-10/t
cd /home/philip/expo/expoweb/1623
mv l/"2012-sw-01_i1.html" 2012-sw-01/l
sed -i 's|\/1623\/i\/|/1623/2012-sw-01/i/|g' 2012-sw-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2012-sw-01/i/|g" 2012-sw-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2012-sw-01/i/|g' ../entrance_data/'1623-2012-sw-01.html'
sed -i "s|\/1623\/i\/|/1623/2012-sw-01/i/|g" ../entrance_data/'1623-2012-sw-01.html'
sed -i 's|\/1623\/l\/|/1623/2012-sw-01/l/|g' ../entrance_data/'1623-2012-sw-01.html'
sed -i "s|\/1623\/l\/|/1623/2012-sw-01/l/|g" ../entrance_data/'1623-2012-sw-01.html'
sed -i 's|\/1623\/t\/|/1623/2012-sw-01/t/|g' ../entrance_data/'1623-2012-sw-01.html'
sed -i "s|\/1623\/t\/|/1623/2012-sw-01/t/|g" ../entrance_data/'1623-2012-sw-01.html'
mv t/"2012-sw-01_i1.jpg" 2012-sw-01/t
mv i/"2012-sw-01_i1.jpg" 2012-sw-01/i
mv l/"2012-sw-03_i2.html" 2012-sw-03/l
sed -i 's|\/1623\/i\/|/1623/2012-sw-03/i/|g' 2012-sw-03/l/*.html
sed -i "s|\/1623\/i\/|/1623/2012-sw-03/i/|g" 2012-sw-03/l/*.html
sed -i 's|\/1623\/i\/|/1623/2012-sw-03/i/|g' ../entrance_data/'1623-2012-sw-03.html'
sed -i "s|\/1623\/i\/|/1623/2012-sw-03/i/|g" ../entrance_data/'1623-2012-sw-03.html'
sed -i 's|\/1623\/l\/|/1623/2012-sw-03/l/|g' ../entrance_data/'1623-2012-sw-03.html'
sed -i "s|\/1623\/l\/|/1623/2012-sw-03/l/|g" ../entrance_data/'1623-2012-sw-03.html'
sed -i 's|\/1623\/t\/|/1623/2012-sw-03/t/|g' ../entrance_data/'1623-2012-sw-03.html'
sed -i "s|\/1623\/t\/|/1623/2012-sw-03/t/|g" ../entrance_data/'1623-2012-sw-03.html'
mv l/"2012-sw-03_i1.html" 2012-sw-03/l
sed -i 's|\/1623\/i\/|/1623/2012-sw-03/i/|g' 2012-sw-03/l/*.html
sed -i "s|\/1623\/i\/|/1623/2012-sw-03/i/|g" 2012-sw-03/l/*.html
sed -i 's|\/1623\/i\/|/1623/2012-sw-03/i/|g' ../entrance_data/'1623-2012-sw-03.html'
sed -i "s|\/1623\/i\/|/1623/2012-sw-03/i/|g" ../entrance_data/'1623-2012-sw-03.html'
sed -i 's|\/1623\/l\/|/1623/2012-sw-03/l/|g' ../entrance_data/'1623-2012-sw-03.html'
sed -i "s|\/1623\/l\/|/1623/2012-sw-03/l/|g" ../entrance_data/'1623-2012-sw-03.html'
sed -i 's|\/1623\/t\/|/1623/2012-sw-03/t/|g' ../entrance_data/'1623-2012-sw-03.html'
sed -i "s|\/1623\/t\/|/1623/2012-sw-03/t/|g" ../entrance_data/'1623-2012-sw-03.html'
mv t/"2012-sw-03_i1.jpg" 2012-sw-03/t
mv i/"2012-sw-03_i1.jpg" 2012-sw-03/i
mv t/"2012-sw-03_i2.jpg" 2012-sw-03/t
mv i/"2012-sw-03_i2.jpg" 2012-sw-03/i
mv i/"2013wallet23.jpg" 2013-06/i
mv l/"2013wallet23.html" 2013-06/l
sed -i 's|\/1623\/i\/|/1623/2013-06/i/|g' 2013-06/l/*.html
sed -i "s|\/1623\/i\/|/1623/2013-06/i/|g" 2013-06/l/*.html
sed -i 's|\/1623\/i\/|/1623/2013-06/i/|g' ../cave_data/'1623-2013-06.html'
sed -i "s|\/1623\/i\/|/1623/2013-06/i/|g" ../cave_data/'1623-2013-06.html'
sed -i 's|\/1623\/l\/|/1623/2013-06/l/|g' ../cave_data/'1623-2013-06.html'
sed -i "s|\/1623\/l\/|/1623/2013-06/l/|g" ../cave_data/'1623-2013-06.html'
sed -i 's|\/1623\/t\/|/1623/2013-06/t/|g' ../cave_data/'1623-2013-06.html'
sed -i "s|\/1623\/t\/|/1623/2013-06/t/|g" ../cave_data/'1623-2013-06.html'
mv l/"2013-BL-01.html" 2013-BL-01/l
sed -i 's|\/1623\/i\/|/1623/2013-BL-01/i/|g' 2013-BL-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2013-BL-01/i/|g" 2013-BL-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2013-BL-01/i/|g' ../cave_data/'1623-2013-BL-01.html'
sed -i "s|\/1623\/i\/|/1623/2013-BL-01/i/|g" ../cave_data/'1623-2013-BL-01.html'
sed -i 's|\/1623\/l\/|/1623/2013-BL-01/l/|g' ../cave_data/'1623-2013-BL-01.html'
sed -i "s|\/1623\/l\/|/1623/2013-BL-01/l/|g" ../cave_data/'1623-2013-BL-01.html'
sed -i 's|\/1623\/t\/|/1623/2013-BL-01/t/|g' ../cave_data/'1623-2013-BL-01.html'
sed -i "s|\/1623\/t\/|/1623/2013-BL-01/t/|g" ../cave_data/'1623-2013-BL-01.html'
mv t/"2013-BL-01.jpg" 2013-BL-01/t
mv i/"2013-BL-01.jpg" 2013-BL-01/i
mv l/"aa-1-2017_with-tag.html" 2017-AA-01/l
sed -i 's|\/1623\/i\/|/1623/2017-AA-01/i/|g' 2017-AA-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2017-AA-01/i/|g" 2017-AA-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2017-AA-01/i/|g' ../entrance_data/'1623-2017-AA-01.html'
sed -i "s|\/1623\/i\/|/1623/2017-AA-01/i/|g" ../entrance_data/'1623-2017-AA-01.html'
sed -i 's|\/1623\/l\/|/1623/2017-AA-01/l/|g' ../entrance_data/'1623-2017-AA-01.html'
sed -i "s|\/1623\/l\/|/1623/2017-AA-01/l/|g" ../entrance_data/'1623-2017-AA-01.html'
sed -i 's|\/1623\/t\/|/1623/2017-AA-01/t/|g' ../entrance_data/'1623-2017-AA-01.html'
sed -i "s|\/1623\/t\/|/1623/2017-AA-01/t/|g" ../entrance_data/'1623-2017-AA-01.html'
mv t/"aa-1-2017_with-tag.jpg" 2017-AA-01/t
mv i/"aa-1-2017_with-tag.jpg" 2017-AA-01/i
mv l/"aa-1-2017_looking-down.html" 2017-AA-01/l
sed -i 's|\/1623\/i\/|/1623/2017-AA-01/i/|g' 2017-AA-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2017-AA-01/i/|g" 2017-AA-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2017-AA-01/i/|g' ../entrance_data/'1623-2017-AA-01.html'
sed -i "s|\/1623\/i\/|/1623/2017-AA-01/i/|g" ../entrance_data/'1623-2017-AA-01.html'
sed -i 's|\/1623\/l\/|/1623/2017-AA-01/l/|g' ../entrance_data/'1623-2017-AA-01.html'
sed -i "s|\/1623\/l\/|/1623/2017-AA-01/l/|g" ../entrance_data/'1623-2017-AA-01.html'
sed -i 's|\/1623\/t\/|/1623/2017-AA-01/t/|g' ../entrance_data/'1623-2017-AA-01.html'
sed -i "s|\/1623\/t\/|/1623/2017-AA-01/t/|g" ../entrance_data/'1623-2017-AA-01.html'
mv t/"aa-1-2017_looking-down.jpg" 2017-AA-01/t
mv i/"aa-1-2017_looking-down.jpg" 2017-AA-01/i
mv t/"ent081-20170807.jpg" 2017-AMS-02/t
mv i/"ent081-20170807.jpg" 2017-AMS-02/i
mv l/"near-ent-2017-ams-02.html" 2017-AMS-02/l
sed -i 's|\/1623\/i\/|/1623/2017-AMS-02/i/|g' 2017-AMS-02/l/*.html
sed -i "s|\/1623\/i\/|/1623/2017-AMS-02/i/|g" 2017-AMS-02/l/*.html
sed -i 's|\/1623\/i\/|/1623/2017-AMS-02/i/|g' ../entrance_data/'1623-2017-AMS-02.html'
sed -i "s|\/1623\/i\/|/1623/2017-AMS-02/i/|g" ../entrance_data/'1623-2017-AMS-02.html'
sed -i 's|\/1623\/l\/|/1623/2017-AMS-02/l/|g' ../entrance_data/'1623-2017-AMS-02.html'
sed -i "s|\/1623\/l\/|/1623/2017-AMS-02/l/|g" ../entrance_data/'1623-2017-AMS-02.html'
sed -i 's|\/1623\/t\/|/1623/2017-AMS-02/t/|g' ../entrance_data/'1623-2017-AMS-02.html'
sed -i "s|\/1623\/t\/|/1623/2017-AMS-02/t/|g" ../entrance_data/'1623-2017-AMS-02.html'
mv t/"near-ent-2017-ams-02.jpg" 2017-AMS-02/t
mv i/"near-ent-2017-ams-02.jpg" 2017-AMS-02/i
mv l/"ent081-20170807.html" 2017-AMS-02/l
sed -i 's|\/1623\/i\/|/1623/2017-AMS-02/i/|g' 2017-AMS-02/l/*.html
sed -i "s|\/1623\/i\/|/1623/2017-AMS-02/i/|g" 2017-AMS-02/l/*.html
sed -i 's|\/1623\/i\/|/1623/2017-AMS-02/i/|g' ../entrance_data/'1623-2017-AMS-02.html'
sed -i "s|\/1623\/i\/|/1623/2017-AMS-02/i/|g" ../entrance_data/'1623-2017-AMS-02.html'
sed -i 's|\/1623\/l\/|/1623/2017-AMS-02/l/|g' ../entrance_data/'1623-2017-AMS-02.html'
sed -i "s|\/1623\/l\/|/1623/2017-AMS-02/l/|g" ../entrance_data/'1623-2017-AMS-02.html'
sed -i 's|\/1623\/t\/|/1623/2017-AMS-02/t/|g' ../entrance_data/'1623-2017-AMS-02.html'
sed -i "s|\/1623\/t\/|/1623/2017-AMS-02/t/|g" ../entrance_data/'1623-2017-AMS-02.html'
mv l/"2017-NR-01_03.html" 2017-NR-01/l
sed -i 's|\/1623\/i\/|/1623/2017-NR-01/i/|g' 2017-NR-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2017-NR-01/i/|g" 2017-NR-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2017-NR-01/i/|g' ../entrance_data/'1623-2017-NR-01.html'
sed -i "s|\/1623\/i\/|/1623/2017-NR-01/i/|g" ../entrance_data/'1623-2017-NR-01.html'
sed -i 's|\/1623\/l\/|/1623/2017-NR-01/l/|g' ../entrance_data/'1623-2017-NR-01.html'
sed -i "s|\/1623\/l\/|/1623/2017-NR-01/l/|g" ../entrance_data/'1623-2017-NR-01.html'
sed -i 's|\/1623\/t\/|/1623/2017-NR-01/t/|g' ../entrance_data/'1623-2017-NR-01.html'
sed -i "s|\/1623\/t\/|/1623/2017-NR-01/t/|g" ../entrance_data/'1623-2017-NR-01.html'
mv t/"2017-NR-01_03.jpg" 2017-NR-01/t
mv i/"2017-NR-01_03.jpg" 2017-NR-01/i
mv t/"2017-NR-01_04.jpg" 2017-NR-01/t
mv i/"2017-NR-01_04.jpg" 2017-NR-01/i
mv l/"2017-NR-01_04.html" 2017-NR-01/l
sed -i 's|\/1623\/i\/|/1623/2017-NR-01/i/|g' 2017-NR-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2017-NR-01/i/|g" 2017-NR-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2017-NR-01/i/|g' ../entrance_data/'1623-2017-NR-01.html'
sed -i "s|\/1623\/i\/|/1623/2017-NR-01/i/|g" ../entrance_data/'1623-2017-NR-01.html'
sed -i 's|\/1623\/l\/|/1623/2017-NR-01/l/|g' ../entrance_data/'1623-2017-NR-01.html'
sed -i "s|\/1623\/l\/|/1623/2017-NR-01/l/|g" ../entrance_data/'1623-2017-NR-01.html'
sed -i 's|\/1623\/t\/|/1623/2017-NR-01/t/|g' ../entrance_data/'1623-2017-NR-01.html'
sed -i "s|\/1623\/t\/|/1623/2017-NR-01/t/|g" ../entrance_data/'1623-2017-NR-01.html'
mv t/"2018-ntu-01_looking_down_shaft-and-rift.jpg" 2018-NTU-01/t
mv i/"2018-ntu-01_looking_down_shaft-and-rift.jpg" 2018-NTU-01/i
mv t/"2018-ntu-01_tag_arrows.jpg" 2018-NTU-01/t
mv i/"2018-ntu-01_tag_arrows.jpg" 2018-NTU-01/i
mv t/"2018-ntu-01_neil_view_west.jpg" 2018-NTU-01/t
mv i/"2018-ntu-01_neil_view_west.jpg" 2018-NTU-01/i
mv l/"2018-ntu-01_looking_down_shaft-and-rift.html" 2018-NTU-01/l
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' 2018-NTU-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" 2018-NTU-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' ../entrance_data/'1623-2018-NTU-01.html'
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" ../entrance_data/'1623-2018-NTU-01.html'
sed -i 's|\/1623\/l\/|/1623/2018-NTU-01/l/|g' ../entrance_data/'1623-2018-NTU-01.html'
sed -i "s|\/1623\/l\/|/1623/2018-NTU-01/l/|g" ../entrance_data/'1623-2018-NTU-01.html'
sed -i 's|\/1623\/t\/|/1623/2018-NTU-01/t/|g' ../entrance_data/'1623-2018-NTU-01.html'
sed -i "s|\/1623\/t\/|/1623/2018-NTU-01/t/|g" ../entrance_data/'1623-2018-NTU-01.html'
mv l/"2018-ntu-01_tag_arrows.html" 2018-NTU-01/l
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' 2018-NTU-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" 2018-NTU-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' ../entrance_data/'1623-2018-NTU-01.html'
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" ../entrance_data/'1623-2018-NTU-01.html'
sed -i 's|\/1623\/l\/|/1623/2018-NTU-01/l/|g' ../entrance_data/'1623-2018-NTU-01.html'
sed -i "s|\/1623\/l\/|/1623/2018-NTU-01/l/|g" ../entrance_data/'1623-2018-NTU-01.html'
sed -i 's|\/1623\/t\/|/1623/2018-NTU-01/t/|g' ../entrance_data/'1623-2018-NTU-01.html'
sed -i "s|\/1623\/t\/|/1623/2018-NTU-01/t/|g" ../entrance_data/'1623-2018-NTU-01.html'
mv l/"2018-ntu-01_neil_view_west.html" 2018-NTU-01/l
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' 2018-NTU-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" 2018-NTU-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' ../entrance_data/'1623-2018-NTU-01.html'
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" ../entrance_data/'1623-2018-NTU-01.html'
sed -i 's|\/1623\/l\/|/1623/2018-NTU-01/l/|g' ../entrance_data/'1623-2018-NTU-01.html'
sed -i "s|\/1623\/l\/|/1623/2018-NTU-01/l/|g" ../entrance_data/'1623-2018-NTU-01.html'
sed -i 's|\/1623\/t\/|/1623/2018-NTU-01/t/|g' ../entrance_data/'1623-2018-NTU-01.html'
sed -i "s|\/1623\/t\/|/1623/2018-NTU-01/t/|g" ../entrance_data/'1623-2018-NTU-01.html'
mv t/"2018-pf-03_and_pf-02_arrows.jpg" 2018-pf-03/t
mv i/"2018-pf-03_and_pf-02_arrows.jpg" 2018-pf-03/i
mv l/"2018-pf-03_and_pf-02_arrows.html" 2018-pf-03/l
sed -i 's|\/1623\/i\/|/1623/2018-pf-03/i/|g' 2018-pf-03/l/*.html
sed -i "s|\/1623\/i\/|/1623/2018-pf-03/i/|g" 2018-pf-03/l/*.html
sed -i 's|\/1623\/i\/|/1623/2018-pf-03/i/|g' ../entrance_data/'1623-2018-pf-03.html'
sed -i "s|\/1623\/i\/|/1623/2018-pf-03/i/|g" ../entrance_data/'1623-2018-pf-03.html'
sed -i 's|\/1623\/l\/|/1623/2018-pf-03/l/|g' ../entrance_data/'1623-2018-pf-03.html'
sed -i "s|\/1623\/l\/|/1623/2018-pf-03/l/|g" ../entrance_data/'1623-2018-pf-03.html'
sed -i 's|\/1623\/t\/|/1623/2018-pf-03/t/|g' ../entrance_data/'1623-2018-pf-03.html'
sed -i "s|\/1623\/t\/|/1623/2018-pf-03/t/|g" ../entrance_data/'1623-2018-pf-03.html'
mv l/"2023-ASH-01-entrance1.html" 2023-ASH-01/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-01/i/|g' 2023-ASH-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-01/i/|g" 2023-ASH-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-01/i/|g' ../entrance_data/'1623-2023-ASH-01a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-01/i/|g" ../entrance_data/'1623-2023-ASH-01a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-01/l/|g' ../entrance_data/'1623-2023-ASH-01a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-01/l/|g" ../entrance_data/'1623-2023-ASH-01a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-01/t/|g' ../entrance_data/'1623-2023-ASH-01a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-01/t/|g" ../entrance_data/'1623-2023-ASH-01a.html'
mv l/"2023-ASH-01-entrance2.html" 2023-ASH-01/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-01/i/|g' 2023-ASH-01/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-01/i/|g" 2023-ASH-01/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-01/i/|g' ../entrance_data/'1623-2023-ASH-01a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-01/i/|g" ../entrance_data/'1623-2023-ASH-01a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-01/l/|g' ../entrance_data/'1623-2023-ASH-01a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-01/l/|g" ../entrance_data/'1623-2023-ASH-01a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-01/t/|g' ../entrance_data/'1623-2023-ASH-01a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-01/t/|g" ../entrance_data/'1623-2023-ASH-01a.html'
mv t/"2023-ASH-01-entrance2.jpg" 2023-ASH-01/t
mv i/"2023-ASH-01-entrance2.jpg" 2023-ASH-01/i
mv t/"2023-ASH-01-entrance1.jpg" 2023-ASH-01/t
mv i/"2023-ASH-01-entrance1.jpg" 2023-ASH-01/i
mv l/"2023-ASH-02-entrance1.html" 2023-ASH-02/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-02/i/|g' 2023-ASH-02/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-02/i/|g" 2023-ASH-02/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-02/i/|g' ../entrance_data/'1623-2023-ASH-02a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-02/i/|g" ../entrance_data/'1623-2023-ASH-02a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-02/l/|g' ../entrance_data/'1623-2023-ASH-02a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-02/l/|g" ../entrance_data/'1623-2023-ASH-02a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-02/t/|g' ../entrance_data/'1623-2023-ASH-02a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-02/t/|g" ../entrance_data/'1623-2023-ASH-02a.html'
mv t/"2023-ASH-02-entrance1.jpg" 2023-ASH-02/t
mv i/"2023-ASH-02-entrance1.jpg" 2023-ASH-02/i
mv l/"2023-ASH-02-entrance2.html" 2023-ASH-02/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-02/i/|g' 2023-ASH-02/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-02/i/|g" 2023-ASH-02/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-02/i/|g' ../entrance_data/'1623-2023-ASH-02a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-02/i/|g" ../entrance_data/'1623-2023-ASH-02a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-02/l/|g' ../entrance_data/'1623-2023-ASH-02a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-02/l/|g" ../entrance_data/'1623-2023-ASH-02a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-02/t/|g' ../entrance_data/'1623-2023-ASH-02a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-02/t/|g" ../entrance_data/'1623-2023-ASH-02a.html'
mv t/"2023-ASH-02-entrance2.jpg" 2023-ASH-02/t
mv i/"2023-ASH-02-entrance2.jpg" 2023-ASH-02/i
mv l/"2023-ASH-03-entrance2.html" 2023-ASH-03/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-03/i/|g' 2023-ASH-03/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-03/i/|g" 2023-ASH-03/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-03/i/|g' ../entrance_data/'1623-2023-ASH-03a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-03/i/|g" ../entrance_data/'1623-2023-ASH-03a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-03/l/|g' ../entrance_data/'1623-2023-ASH-03a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-03/l/|g" ../entrance_data/'1623-2023-ASH-03a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-03/t/|g' ../entrance_data/'1623-2023-ASH-03a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-03/t/|g" ../entrance_data/'1623-2023-ASH-03a.html'
mv t/"2023-ASH-03-entrance2.jpg" 2023-ASH-03/t
mv i/"2023-ASH-03-entrance2.jpg" 2023-ASH-03/i
mv l/"2023-ASH-03-entrance1.html" 2023-ASH-03/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-03/i/|g' 2023-ASH-03/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-03/i/|g" 2023-ASH-03/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-03/i/|g' ../entrance_data/'1623-2023-ASH-03a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-03/i/|g" ../entrance_data/'1623-2023-ASH-03a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-03/l/|g' ../entrance_data/'1623-2023-ASH-03a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-03/l/|g" ../entrance_data/'1623-2023-ASH-03a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-03/t/|g' ../entrance_data/'1623-2023-ASH-03a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-03/t/|g" ../entrance_data/'1623-2023-ASH-03a.html'
mv t/"2023-ASH-03-entrance1.jpg" 2023-ASH-03/t
mv i/"2023-ASH-03-entrance1.jpg" 2023-ASH-03/i
mv t/"2023-ASH-04-entrance2.jpg" 2023-ASH-04/t
mv i/"2023-ASH-04-entrance2.jpg" 2023-ASH-04/i
mv t/"2023-ASH-04-entrance1.jpg" 2023-ASH-04/t
mv i/"2023-ASH-04-entrance1.jpg" 2023-ASH-04/i
mv l/"2023-ASH-04-entrance1.html" 2023-ASH-04/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-04/i/|g' 2023-ASH-04/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-04/i/|g" 2023-ASH-04/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-04/i/|g' ../entrance_data/'1623-2023-ASH-04a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-04/i/|g" ../entrance_data/'1623-2023-ASH-04a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-04/l/|g' ../entrance_data/'1623-2023-ASH-04a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-04/l/|g" ../entrance_data/'1623-2023-ASH-04a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-04/t/|g' ../entrance_data/'1623-2023-ASH-04a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-04/t/|g" ../entrance_data/'1623-2023-ASH-04a.html'
mv l/"2023-ASH-04-entrance2.html" 2023-ASH-04/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-04/i/|g' 2023-ASH-04/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-04/i/|g" 2023-ASH-04/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-04/i/|g' ../entrance_data/'1623-2023-ASH-04a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-04/i/|g" ../entrance_data/'1623-2023-ASH-04a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-04/l/|g' ../entrance_data/'1623-2023-ASH-04a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-04/l/|g" ../entrance_data/'1623-2023-ASH-04a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-04/t/|g' ../entrance_data/'1623-2023-ASH-04a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-04/t/|g" ../entrance_data/'1623-2023-ASH-04a.html'
mv t/"2023-ASH-06-entrance2.jpg" 2023-ASH-06/t
mv i/"2023-ASH-06-entrance2.jpg" 2023-ASH-06/i
mv l/"2023-ASH-06-entrance1.html" 2023-ASH-06/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-06/i/|g' 2023-ASH-06/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-06/i/|g" 2023-ASH-06/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-06/i/|g' ../entrance_data/'1623-2023-ASH-06a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-06/i/|g" ../entrance_data/'1623-2023-ASH-06a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-06/l/|g' ../entrance_data/'1623-2023-ASH-06a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-06/l/|g" ../entrance_data/'1623-2023-ASH-06a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-06/t/|g' ../entrance_data/'1623-2023-ASH-06a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-06/t/|g" ../entrance_data/'1623-2023-ASH-06a.html'
mv l/"2023-ASH-06-entrance2.html" 2023-ASH-06/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-06/i/|g' 2023-ASH-06/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-06/i/|g" 2023-ASH-06/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-06/i/|g' ../entrance_data/'1623-2023-ASH-06a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-06/i/|g" ../entrance_data/'1623-2023-ASH-06a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-06/l/|g' ../entrance_data/'1623-2023-ASH-06a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-06/l/|g" ../entrance_data/'1623-2023-ASH-06a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-06/t/|g' ../entrance_data/'1623-2023-ASH-06a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-06/t/|g" ../entrance_data/'1623-2023-ASH-06a.html'
mv t/"2023-ASH-06-entrance1.jpg" 2023-ASH-06/t
mv i/"2023-ASH-06-entrance1.jpg" 2023-ASH-06/i
mv l/"2023-ASH-07-entrance1.html" 2023-ASH-07/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-07/i/|g' 2023-ASH-07/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-07/i/|g" 2023-ASH-07/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-07/i/|g' ../entrance_data/'1623-2023-ASH-07a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-07/i/|g" ../entrance_data/'1623-2023-ASH-07a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-07/l/|g' ../entrance_data/'1623-2023-ASH-07a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-07/l/|g" ../entrance_data/'1623-2023-ASH-07a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-07/t/|g' ../entrance_data/'1623-2023-ASH-07a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-07/t/|g" ../entrance_data/'1623-2023-ASH-07a.html'
mv t/"2023-ASH-07-entrance1.jpg" 2023-ASH-07/t
mv i/"2023-ASH-07-entrance1.jpg" 2023-ASH-07/i
mv t/"2023-ASH-07-entrance2.jpg" 2023-ASH-07/t
mv i/"2023-ASH-07-entrance2.jpg" 2023-ASH-07/i
mv l/"2023-ASH-07-entrance2.html" 2023-ASH-07/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-07/i/|g' 2023-ASH-07/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-07/i/|g" 2023-ASH-07/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-07/i/|g' ../entrance_data/'1623-2023-ASH-07a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-07/i/|g" ../entrance_data/'1623-2023-ASH-07a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-07/l/|g' ../entrance_data/'1623-2023-ASH-07a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-07/l/|g" ../entrance_data/'1623-2023-ASH-07a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-07/t/|g' ../entrance_data/'1623-2023-ASH-07a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-07/t/|g" ../entrance_data/'1623-2023-ASH-07a.html'
mv l/"2023-ASH-10-entrance2.html" 2023-ASH-10/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-10/i/|g' 2023-ASH-10/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-10/i/|g" 2023-ASH-10/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-10/i/|g' ../entrance_data/'1623-2023-ASH-10a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-10/i/|g" ../entrance_data/'1623-2023-ASH-10a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-10/l/|g' ../entrance_data/'1623-2023-ASH-10a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-10/l/|g" ../entrance_data/'1623-2023-ASH-10a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-10/t/|g' ../entrance_data/'1623-2023-ASH-10a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-10/t/|g" ../entrance_data/'1623-2023-ASH-10a.html'
mv t/"2023-ASH-10-entrance1.jpg" 2023-ASH-10/t
mv i/"2023-ASH-10-entrance1.jpg" 2023-ASH-10/i
mv t/"2023-ASH-10-entrance2.jpg" 2023-ASH-10/t
mv i/"2023-ASH-10-entrance2.jpg" 2023-ASH-10/i
mv l/"2023-ASH-10-entrance1.html" 2023-ASH-10/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-10/i/|g' 2023-ASH-10/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-10/i/|g" 2023-ASH-10/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-10/i/|g' ../entrance_data/'1623-2023-ASH-10a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-10/i/|g" ../entrance_data/'1623-2023-ASH-10a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-10/l/|g' ../entrance_data/'1623-2023-ASH-10a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-10/l/|g" ../entrance_data/'1623-2023-ASH-10a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-10/t/|g' ../entrance_data/'1623-2023-ASH-10a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-10/t/|g" ../entrance_data/'1623-2023-ASH-10a.html'
mv t/"2023-ASH-11-entrance2.jpg" 2023-ASH-11/t
mv i/"2023-ASH-11-entrance2.jpg" 2023-ASH-11/i
mv l/"2023-ASH-11-entrance2.html" 2023-ASH-11/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' 2023-ASH-11/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" 2023-ASH-11/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" ../entrance_data/'1623-2023-ASH-11a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-11/l/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-11/l/|g" ../entrance_data/'1623-2023-ASH-11a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-11/t/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-11/t/|g" ../entrance_data/'1623-2023-ASH-11a.html'
mv l/"2023-ASH-11-entrance4.html" 2023-ASH-11/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' 2023-ASH-11/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" 2023-ASH-11/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" ../entrance_data/'1623-2023-ASH-11a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-11/l/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-11/l/|g" ../entrance_data/'1623-2023-ASH-11a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-11/t/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-11/t/|g" ../entrance_data/'1623-2023-ASH-11a.html'
mv l/"2023-ASH-11-entrance3.html" 2023-ASH-11/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' 2023-ASH-11/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" 2023-ASH-11/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" ../entrance_data/'1623-2023-ASH-11a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-11/l/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-11/l/|g" ../entrance_data/'1623-2023-ASH-11a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-11/t/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-11/t/|g" ../entrance_data/'1623-2023-ASH-11a.html'
mv l/"2023-ASH-11-entrance1.html" 2023-ASH-11/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' 2023-ASH-11/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" 2023-ASH-11/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" ../entrance_data/'1623-2023-ASH-11a.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-11/l/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-11/l/|g" ../entrance_data/'1623-2023-ASH-11a.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-11/t/|g' ../entrance_data/'1623-2023-ASH-11a.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-11/t/|g" ../entrance_data/'1623-2023-ASH-11a.html'
mv t/"2023-ASH-11-entrance1.jpg" 2023-ASH-11/t
mv i/"2023-ASH-11-entrance1.jpg" 2023-ASH-11/i
mv t/"2023-ASH-11-entrance3.jpg" 2023-ASH-11/t
mv i/"2023-ASH-11-entrance3.jpg" 2023-ASH-11/i
mv t/"2023-ASH-11-entrance4.jpg" 2023-ASH-11/t
mv i/"2023-ASH-11-entrance4.jpg" 2023-ASH-11/i
mv l/"2023-ASH-12-entrance1.html" 2023-ASH-12/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-12/i/|g' 2023-ASH-12/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-12/i/|g" 2023-ASH-12/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-12/i/|g' ../entrance_data/'1623-2023-ASH-12.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-12/i/|g" ../entrance_data/'1623-2023-ASH-12.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-12/l/|g' ../entrance_data/'1623-2023-ASH-12.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-12/l/|g" ../entrance_data/'1623-2023-ASH-12.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-12/t/|g' ../entrance_data/'1623-2023-ASH-12.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-12/t/|g" ../entrance_data/'1623-2023-ASH-12.html'
mv t/"2023-ASH-12-entrance1.jpg" 2023-ASH-12/t
mv i/"2023-ASH-12-entrance1.jpg" 2023-ASH-12/i
mv t/"2023-ASH-12-entrance2.jpg" 2023-ASH-12/t
mv i/"2023-ASH-12-entrance2.jpg" 2023-ASH-12/i
mv l/"2023-ASH-12-entrance2.html" 2023-ASH-12/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-12/i/|g' 2023-ASH-12/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-12/i/|g" 2023-ASH-12/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-12/i/|g' ../entrance_data/'1623-2023-ASH-12.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-12/i/|g" ../entrance_data/'1623-2023-ASH-12.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-12/l/|g' ../entrance_data/'1623-2023-ASH-12.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-12/l/|g" ../entrance_data/'1623-2023-ASH-12.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-12/t/|g' ../entrance_data/'1623-2023-ASH-12.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-12/t/|g" ../entrance_data/'1623-2023-ASH-12.html'
mv l/"2023-ASH-13-entrance1.html" 2023-ASH-13/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-13/i/|g' 2023-ASH-13/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-13/i/|g" 2023-ASH-13/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-13/i/|g' ../entrance_data/'1623-2023-ASH-13.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-13/i/|g" ../entrance_data/'1623-2023-ASH-13.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-13/l/|g' ../entrance_data/'1623-2023-ASH-13.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-13/l/|g" ../entrance_data/'1623-2023-ASH-13.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-13/t/|g' ../entrance_data/'1623-2023-ASH-13.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-13/t/|g" ../entrance_data/'1623-2023-ASH-13.html'
mv l/"2023-ASH-13-entrance2.html" 2023-ASH-13/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-13/i/|g' 2023-ASH-13/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-13/i/|g" 2023-ASH-13/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-13/i/|g' ../entrance_data/'1623-2023-ASH-13.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-13/i/|g" ../entrance_data/'1623-2023-ASH-13.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-13/l/|g' ../entrance_data/'1623-2023-ASH-13.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-13/l/|g" ../entrance_data/'1623-2023-ASH-13.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-13/t/|g' ../entrance_data/'1623-2023-ASH-13.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-13/t/|g" ../entrance_data/'1623-2023-ASH-13.html'
mv t/"2023-ASH-13-entrance2.jpg" 2023-ASH-13/t
mv i/"2023-ASH-13-entrance2.jpg" 2023-ASH-13/i
mv t/"2023-ASH-13-entrance1.jpg" 2023-ASH-13/t
mv i/"2023-ASH-13-entrance1.jpg" 2023-ASH-13/i
mv l/"2023-ASH-13and14-entrances.html" 2023-ASH-14/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-14/i/|g' 2023-ASH-14/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-14/i/|g" 2023-ASH-14/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-14/i/|g' ../entrance_data/'1623-2023-ASH-14.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-14/i/|g" ../entrance_data/'1623-2023-ASH-14.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-14/l/|g' ../entrance_data/'1623-2023-ASH-14.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-14/l/|g" ../entrance_data/'1623-2023-ASH-14.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-14/t/|g' ../entrance_data/'1623-2023-ASH-14.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-14/t/|g" ../entrance_data/'1623-2023-ASH-14.html'
mv t/"2023-ASH-14-entrance1.jpg" 2023-ASH-14/t
mv i/"2023-ASH-14-entrance1.jpg" 2023-ASH-14/i
mv l/"2023-ASH-14-entrance1.html" 2023-ASH-14/l
sed -i 's|\/1623\/i\/|/1623/2023-ASH-14/i/|g' 2023-ASH-14/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-ASH-14/i/|g" 2023-ASH-14/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-ASH-14/i/|g' ../entrance_data/'1623-2023-ASH-14.html'
sed -i "s|\/1623\/i\/|/1623/2023-ASH-14/i/|g" ../entrance_data/'1623-2023-ASH-14.html'
sed -i 's|\/1623\/l\/|/1623/2023-ASH-14/l/|g' ../entrance_data/'1623-2023-ASH-14.html'
sed -i "s|\/1623\/l\/|/1623/2023-ASH-14/l/|g" ../entrance_data/'1623-2023-ASH-14.html'
sed -i 's|\/1623\/t\/|/1623/2023-ASH-14/t/|g' ../entrance_data/'1623-2023-ASH-14.html'
sed -i "s|\/1623\/t\/|/1623/2023-ASH-14/t/|g" ../entrance_data/'1623-2023-ASH-14.html'
mv t/"2023-ASH-13and14-entrances.jpg" 2023-ASH-14/t
mv i/"2023-ASH-13and14-entrances.jpg" 2023-ASH-14/i
mv t/"2023-BL-11-sketch.jpg" 2023-BL-11/t
mv i/"2023-BL-11-sketch.jpg" 2023-BL-11/i
mv t/"20230802_165708.jpg" 2023-BL-11/t
mv i/"20230802_165708.jpg" 2023-BL-11/i
mv t/"20230802_165823.jpg" 2023-BL-11/t
mv i/"20230802_165823.jpg" 2023-BL-11/i
mv l/"20230802_165708.html" 2023-BL-11/l
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' 2023-BL-11/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" 2023-BL-11/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' ../entrance_data/'1623-2023-BL-11.html'
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" ../entrance_data/'1623-2023-BL-11.html'
sed -i 's|\/1623\/l\/|/1623/2023-BL-11/l/|g' ../entrance_data/'1623-2023-BL-11.html'
sed -i "s|\/1623\/l\/|/1623/2023-BL-11/l/|g" ../entrance_data/'1623-2023-BL-11.html'
sed -i 's|\/1623\/t\/|/1623/2023-BL-11/t/|g' ../entrance_data/'1623-2023-BL-11.html'
sed -i "s|\/1623\/t\/|/1623/2023-BL-11/t/|g" ../entrance_data/'1623-2023-BL-11.html'
mv l/"2023-BL-11-sketch.html" 2023-BL-11/l
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' 2023-BL-11/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" 2023-BL-11/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' ../cave_data/'1623-2023-BL-11.html'
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" ../cave_data/'1623-2023-BL-11.html'
sed -i 's|\/1623\/l\/|/1623/2023-BL-11/l/|g' ../cave_data/'1623-2023-BL-11.html'
sed -i "s|\/1623\/l\/|/1623/2023-BL-11/l/|g" ../cave_data/'1623-2023-BL-11.html'
sed -i 's|\/1623\/t\/|/1623/2023-BL-11/t/|g' ../cave_data/'1623-2023-BL-11.html'
sed -i "s|\/1623\/t\/|/1623/2023-BL-11/t/|g" ../cave_data/'1623-2023-BL-11.html'
mv l/"20230802_165823.html" 2023-BL-11/l
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' 2023-BL-11/l/*.html
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" 2023-BL-11/l/*.html
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' ../entrance_data/'1623-2023-BL-11.html'
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" ../entrance_data/'1623-2023-BL-11.html'
sed -i 's|\/1623\/l\/|/1623/2023-BL-11/l/|g' ../entrance_data/'1623-2023-BL-11.html'
sed -i "s|\/1623\/l\/|/1623/2023-BL-11/l/|g" ../entrance_data/'1623-2023-BL-11.html'
sed -i 's|\/1623\/t\/|/1623/2023-BL-11/t/|g' ../entrance_data/'1623-2023-BL-11.html'
sed -i "s|\/1623\/t\/|/1623/2023-BL-11/t/|g" ../entrance_data/'1623-2023-BL-11.html'
mv t/"2023-ASH-15-entrance3.jpg" 303/t
mv i/"2023-ASH-15-entrance3.jpg" 303/i
mv l/"2023-ASH-15-entrance3.html" 303/l
sed -i 's|\/1623\/i\/|/1623/303/i/|g' 303/l/*.html
sed -i "s|\/1623\/i\/|/1623/303/i/|g" 303/l/*.html
sed -i 's|\/1623\/i\/|/1623/303/i/|g' ../entrance_data/'1623-303.html'
sed -i "s|\/1623\/i\/|/1623/303/i/|g" ../entrance_data/'1623-303.html'
sed -i 's|\/1623\/l\/|/1623/303/l/|g' ../entrance_data/'1623-303.html'
sed -i "s|\/1623\/l\/|/1623/303/l/|g" ../entrance_data/'1623-303.html'
sed -i 's|\/1623\/t\/|/1623/303/t/|g' ../entrance_data/'1623-303.html'
sed -i "s|\/1623\/t\/|/1623/303/t/|g" ../entrance_data/'1623-303.html'
mv t/"2023-ASH-15-entrance1.jpg" 303/t
mv i/"2023-ASH-15-entrance1.jpg" 303/i
mv l/"2023-ASH-15-entrance2.html" 303/l
sed -i 's|\/1623\/i\/|/1623/303/i/|g' 303/l/*.html
sed -i "s|\/1623\/i\/|/1623/303/i/|g" 303/l/*.html
sed -i 's|\/1623\/i\/|/1623/303/i/|g' ../entrance_data/'1623-303.html'
sed -i "s|\/1623\/i\/|/1623/303/i/|g" ../entrance_data/'1623-303.html'
sed -i 's|\/1623\/l\/|/1623/303/l/|g' ../entrance_data/'1623-303.html'
sed -i "s|\/1623\/l\/|/1623/303/l/|g" ../entrance_data/'1623-303.html'
sed -i 's|\/1623\/t\/|/1623/303/t/|g' ../entrance_data/'1623-303.html'
sed -i "s|\/1623\/t\/|/1623/303/t/|g" ../entrance_data/'1623-303.html'
mv l/"2023-ASH-15-entrance1.html" 303/l
sed -i 's|\/1623\/i\/|/1623/303/i/|g' 303/l/*.html
sed -i "s|\/1623\/i\/|/1623/303/i/|g" 303/l/*.html
sed -i 's|\/1623\/i\/|/1623/303/i/|g' ../entrance_data/'1623-303.html'
sed -i "s|\/1623\/i\/|/1623/303/i/|g" ../entrance_data/'1623-303.html'
sed -i 's|\/1623\/l\/|/1623/303/l/|g' ../entrance_data/'1623-303.html'
sed -i "s|\/1623\/l\/|/1623/303/l/|g" ../entrance_data/'1623-303.html'
sed -i 's|\/1623\/t\/|/1623/303/t/|g' ../entrance_data/'1623-303.html'
sed -i "s|\/1623\/t\/|/1623/303/t/|g" ../entrance_data/'1623-303.html'
mv t/"2023-ASH-15-entrance2.jpg" 303/t
mv i/"2023-ASH-15-entrance2.jpg" 303/i
mv l/"2023-ASH-17-bothentrances.html" 306/l
sed -i 's|\/1623\/i\/|/1623/306/i/|g' 306/l/*.html
sed -i "s|\/1623\/i\/|/1623/306/i/|g" 306/l/*.html
sed -i 's|\/1623\/i\/|/1623/306/i/|g' ../entrance_data/'1623-306b.html'
sed -i "s|\/1623\/i\/|/1623/306/i/|g" ../entrance_data/'1623-306b.html'
sed -i 's|\/1623\/l\/|/1623/306/l/|g' ../entrance_data/'1623-306b.html'
sed -i "s|\/1623\/l\/|/1623/306/l/|g" ../entrance_data/'1623-306b.html'
sed -i 's|\/1623\/t\/|/1623/306/t/|g' ../entrance_data/'1623-306b.html'
sed -i "s|\/1623\/t\/|/1623/306/t/|g" ../entrance_data/'1623-306b.html'
mv l/"2023-ASH-17-bothentrances.html" 306/l
sed -i 's|\/1623\/i\/|/1623/306/i/|g' 306/l/*.html
sed -i "s|\/1623\/i\/|/1623/306/i/|g" 306/l/*.html
sed -i 's|\/1623\/i\/|/1623/306/i/|g' ../entrance_data/'1623-306a.html'
sed -i "s|\/1623\/i\/|/1623/306/i/|g" ../entrance_data/'1623-306a.html'
sed -i 's|\/1623\/l\/|/1623/306/l/|g' ../entrance_data/'1623-306a.html'
sed -i "s|\/1623\/l\/|/1623/306/l/|g" ../entrance_data/'1623-306a.html'
sed -i 's|\/1623\/t\/|/1623/306/t/|g' ../entrance_data/'1623-306a.html'
sed -i "s|\/1623\/t\/|/1623/306/t/|g" ../entrance_data/'1623-306a.html'
mv i/"2023-ASH-17-sketch.jpg" 306/i
mv l/"2023-ASH-17-sketch.html" 306/l
sed -i 's|\/1623\/i\/|/1623/306/i/|g' 306/l/*.html
sed -i "s|\/1623\/i\/|/1623/306/i/|g" 306/l/*.html
sed -i 's|\/1623\/i\/|/1623/306/i/|g' ../cave_data/'1623-306.html'
sed -i "s|\/1623\/i\/|/1623/306/i/|g" ../cave_data/'1623-306.html'
sed -i 's|\/1623\/l\/|/1623/306/l/|g' ../cave_data/'1623-306.html'
sed -i "s|\/1623\/l\/|/1623/306/l/|g" ../cave_data/'1623-306.html'
sed -i 's|\/1623\/t\/|/1623/306/t/|g' ../cave_data/'1623-306.html'
sed -i "s|\/1623\/t\/|/1623/306/t/|g" ../cave_data/'1623-306.html'
mv t/"2023-ASH-17-bothentrances.jpg" 306/t
mv i/"2023-ASH-17-bothentrances.jpg" 306/i
mv t/"2023-ASH-17-bothentrances.jpg" 306/t
mv i/"2023-ASH-17-bothentrances.jpg" 306/i
mv l/"2023-ASH-05-entrance-b1.html" 307/l
sed -i 's|\/1623\/i\/|/1623/307/i/|g' 307/l/*.html
sed -i "s|\/1623\/i\/|/1623/307/i/|g" 307/l/*.html
sed -i 's|\/1623\/i\/|/1623/307/i/|g' ../entrance_data/'1623-307b.html'
sed -i "s|\/1623\/i\/|/1623/307/i/|g" ../entrance_data/'1623-307b.html'
sed -i 's|\/1623\/l\/|/1623/307/l/|g' ../entrance_data/'1623-307b.html'
sed -i "s|\/1623\/l\/|/1623/307/l/|g" ../entrance_data/'1623-307b.html'
sed -i 's|\/1623\/t\/|/1623/307/t/|g' ../entrance_data/'1623-307b.html'
sed -i "s|\/1623\/t\/|/1623/307/t/|g" ../entrance_data/'1623-307b.html'
mv l/"2023-ASH-05-entrance-a1.html" 307/l
sed -i 's|\/1623\/i\/|/1623/307/i/|g' 307/l/*.html
sed -i "s|\/1623\/i\/|/1623/307/i/|g" 307/l/*.html
sed -i 's|\/1623\/i\/|/1623/307/i/|g' ../entrance_data/'1623-307a.html'
sed -i "s|\/1623\/i\/|/1623/307/i/|g" ../entrance_data/'1623-307a.html'
sed -i 's|\/1623\/l\/|/1623/307/l/|g' ../entrance_data/'1623-307a.html'
sed -i "s|\/1623\/l\/|/1623/307/l/|g" ../entrance_data/'1623-307a.html'
sed -i 's|\/1623\/t\/|/1623/307/t/|g' ../entrance_data/'1623-307a.html'
sed -i "s|\/1623\/t\/|/1623/307/t/|g" ../entrance_data/'1623-307a.html'
mv t/"2023-ASH-05-entrance-a3.jpg" 307/t
mv i/"2023-ASH-05-entrance-a3.jpg" 307/i
mv l/"2023-ASH-05-entrance-a2.html" 307/l
sed -i 's|\/1623\/i\/|/1623/307/i/|g' 307/l/*.html
sed -i "s|\/1623\/i\/|/1623/307/i/|g" 307/l/*.html
sed -i 's|\/1623\/i\/|/1623/307/i/|g' ../entrance_data/'1623-307a.html'
sed -i "s|\/1623\/i\/|/1623/307/i/|g" ../entrance_data/'1623-307a.html'
sed -i 's|\/1623\/l\/|/1623/307/l/|g' ../entrance_data/'1623-307a.html'
sed -i "s|\/1623\/l\/|/1623/307/l/|g" ../entrance_data/'1623-307a.html'
sed -i 's|\/1623\/t\/|/1623/307/t/|g' ../entrance_data/'1623-307a.html'
sed -i "s|\/1623\/t\/|/1623/307/t/|g" ../entrance_data/'1623-307a.html'
mv t/"2023-ASH-05-entrance-a2.jpg" 307/t
mv i/"2023-ASH-05-entrance-a2.jpg" 307/i
mv i/"2023-ASH-05-sketch.jpg" 307/i
mv t/"2023-ASH-05-entrance-a1.jpg" 307/t
mv i/"2023-ASH-05-entrance-a1.jpg" 307/i
mv l/"2023-ASH-05-sketch.html" 307/l
sed -i 's|\/1623\/i\/|/1623/307/i/|g' 307/l/*.html
sed -i "s|\/1623\/i\/|/1623/307/i/|g" 307/l/*.html
sed -i 's|\/1623\/i\/|/1623/307/i/|g' ../cave_data/'1623-307.html'
sed -i "s|\/1623\/i\/|/1623/307/i/|g" ../cave_data/'1623-307.html'
sed -i 's|\/1623\/l\/|/1623/307/l/|g' ../cave_data/'1623-307.html'
sed -i "s|\/1623\/l\/|/1623/307/l/|g" ../cave_data/'1623-307.html'
sed -i 's|\/1623\/t\/|/1623/307/t/|g' ../cave_data/'1623-307.html'
sed -i "s|\/1623\/t\/|/1623/307/t/|g" ../cave_data/'1623-307.html'
mv l/"2023-ASH-05-entrance-a3.html" 307/l
sed -i 's|\/1623\/i\/|/1623/307/i/|g' 307/l/*.html
sed -i "s|\/1623\/i\/|/1623/307/i/|g" 307/l/*.html
sed -i 's|\/1623\/i\/|/1623/307/i/|g' ../cave_data/'1623-307.html'
sed -i "s|\/1623\/i\/|/1623/307/i/|g" ../cave_data/'1623-307.html'
sed -i 's|\/1623\/l\/|/1623/307/l/|g' ../cave_data/'1623-307.html'
sed -i "s|\/1623\/l\/|/1623/307/l/|g" ../cave_data/'1623-307.html'
sed -i 's|\/1623\/t\/|/1623/307/t/|g' ../cave_data/'1623-307.html'
sed -i "s|\/1623\/t\/|/1623/307/t/|g" ../cave_data/'1623-307.html'
mv t/"2023-ASH-05-entrance-b1.jpg" 307/t
mv i/"2023-ASH-05-entrance-b1.jpg" 307/i
mv l/"2023-ASH-08-entrance-a3.html" 308/l
sed -i 's|\/1623\/i\/|/1623/308/i/|g' 308/l/*.html
sed -i "s|\/1623\/i\/|/1623/308/i/|g" 308/l/*.html
sed -i 's|\/1623\/i\/|/1623/308/i/|g' ../cave_data/'1623-308.html'
sed -i "s|\/1623\/i\/|/1623/308/i/|g" ../cave_data/'1623-308.html'
sed -i 's|\/1623\/l\/|/1623/308/l/|g' ../cave_data/'1623-308.html'
sed -i "s|\/1623\/l\/|/1623/308/l/|g" ../cave_data/'1623-308.html'
sed -i 's|\/1623\/t\/|/1623/308/t/|g' ../cave_data/'1623-308.html'
sed -i "s|\/1623\/t\/|/1623/308/t/|g" ../cave_data/'1623-308.html'
mv l/"2023-ASH-08-sketch.html" 308/l
sed -i 's|\/1623\/i\/|/1623/308/i/|g' 308/l/*.html
sed -i "s|\/1623\/i\/|/1623/308/i/|g" 308/l/*.html
sed -i 's|\/1623\/i\/|/1623/308/i/|g' ../cave_data/'1623-308.html'
sed -i "s|\/1623\/i\/|/1623/308/i/|g" ../cave_data/'1623-308.html'
sed -i 's|\/1623\/l\/|/1623/308/l/|g' ../cave_data/'1623-308.html'
sed -i "s|\/1623\/l\/|/1623/308/l/|g" ../cave_data/'1623-308.html'
sed -i 's|\/1623\/t\/|/1623/308/t/|g' ../cave_data/'1623-308.html'
sed -i "s|\/1623\/t\/|/1623/308/t/|g" ../cave_data/'1623-308.html'
mv l/"2023-ASH-08-entrance-a1.html" 308/l
sed -i 's|\/1623\/i\/|/1623/308/i/|g' 308/l/*.html
sed -i "s|\/1623\/i\/|/1623/308/i/|g" 308/l/*.html
sed -i 's|\/1623\/i\/|/1623/308/i/|g' ../entrance_data/'1623-308a.html'
sed -i "s|\/1623\/i\/|/1623/308/i/|g" ../entrance_data/'1623-308a.html'
sed -i 's|\/1623\/l\/|/1623/308/l/|g' ../entrance_data/'1623-308a.html'
sed -i "s|\/1623\/l\/|/1623/308/l/|g" ../entrance_data/'1623-308a.html'
sed -i 's|\/1623\/t\/|/1623/308/t/|g' ../entrance_data/'1623-308a.html'
sed -i "s|\/1623\/t\/|/1623/308/t/|g" ../entrance_data/'1623-308a.html'
mv t/"2023-ASH-08-entrance-b1.jpg" 308/t
mv i/"2023-ASH-08-entrance-b1.jpg" 308/i
mv l/"2023-ASH-08-entrance-b1.html" 308/l
sed -i 's|\/1623\/i\/|/1623/308/i/|g' 308/l/*.html
sed -i "s|\/1623\/i\/|/1623/308/i/|g" 308/l/*.html
sed -i 's|\/1623\/i\/|/1623/308/i/|g' ../entrance_data/'1623-308b.html'
sed -i "s|\/1623\/i\/|/1623/308/i/|g" ../entrance_data/'1623-308b.html'
sed -i 's|\/1623\/l\/|/1623/308/l/|g' ../entrance_data/'1623-308b.html'
sed -i "s|\/1623\/l\/|/1623/308/l/|g" ../entrance_data/'1623-308b.html'
sed -i 's|\/1623\/t\/|/1623/308/t/|g' ../entrance_data/'1623-308b.html'
sed -i "s|\/1623\/t\/|/1623/308/t/|g" ../entrance_data/'1623-308b.html'
mv t/"2023-ASH-08-entrance-a1.jpg" 308/t
mv i/"2023-ASH-08-entrance-a1.jpg" 308/i
mv t/"2023-ASH-08-entrance-a2.jpg" 308/t
mv i/"2023-ASH-08-entrance-a2.jpg" 308/i
mv i/"2023-ASH-08-sketch.jpg" 308/i
mv l/"2023-ASH-08-entrance-a2.html" 308/l
sed -i 's|\/1623\/i\/|/1623/308/i/|g' 308/l/*.html
sed -i "s|\/1623\/i\/|/1623/308/i/|g" 308/l/*.html
sed -i 's|\/1623\/i\/|/1623/308/i/|g' ../entrance_data/'1623-308a.html'
sed -i "s|\/1623\/i\/|/1623/308/i/|g" ../entrance_data/'1623-308a.html'
sed -i 's|\/1623\/l\/|/1623/308/l/|g' ../entrance_data/'1623-308a.html'
sed -i "s|\/1623\/l\/|/1623/308/l/|g" ../entrance_data/'1623-308a.html'
sed -i 's|\/1623\/t\/|/1623/308/t/|g' ../entrance_data/'1623-308a.html'
sed -i "s|\/1623\/t\/|/1623/308/t/|g" ../entrance_data/'1623-308a.html'
mv t/"2023-ASH-08-entrance-a3.jpg" 308/t
mv i/"2023-ASH-08-entrance-a3.jpg" 308/i
mv l/"2023-ASH-09-plan.html" 309/l
sed -i 's|\/1623\/i\/|/1623/309/i/|g' 309/l/*.html
sed -i "s|\/1623\/i\/|/1623/309/i/|g" 309/l/*.html
sed -i 's|\/1623\/i\/|/1623/309/i/|g' ../cave_data/'1623-309.html'
sed -i "s|\/1623\/i\/|/1623/309/i/|g" ../cave_data/'1623-309.html'
sed -i 's|\/1623\/l\/|/1623/309/l/|g' ../cave_data/'1623-309.html'
sed -i "s|\/1623\/l\/|/1623/309/l/|g" ../cave_data/'1623-309.html'
sed -i 's|\/1623\/t\/|/1623/309/t/|g' ../cave_data/'1623-309.html'
sed -i "s|\/1623\/t\/|/1623/309/t/|g" ../cave_data/'1623-309.html'
mv l/"2023-ASH-09-entrance2.html" 309/l
sed -i 's|\/1623\/i\/|/1623/309/i/|g' 309/l/*.html
sed -i "s|\/1623\/i\/|/1623/309/i/|g" 309/l/*.html
sed -i 's|\/1623\/i\/|/1623/309/i/|g' ../entrance_data/'1623-309a.html'
sed -i "s|\/1623\/i\/|/1623/309/i/|g" ../entrance_data/'1623-309a.html'
sed -i 's|\/1623\/l\/|/1623/309/l/|g' ../entrance_data/'1623-309a.html'
sed -i "s|\/1623\/l\/|/1623/309/l/|g" ../entrance_data/'1623-309a.html'
sed -i 's|\/1623\/t\/|/1623/309/t/|g' ../entrance_data/'1623-309a.html'
sed -i "s|\/1623\/t\/|/1623/309/t/|g" ../entrance_data/'1623-309a.html'
mv i/"2023-ASH-09-plan.jpg" 309/i
mv t/"2023-ASH-09-entrance1.jpg" 309/t
mv i/"2023-ASH-09-entrance1.jpg" 309/i
mv t/"2023-ASH-09-entrance2.jpg" 309/t
mv i/"2023-ASH-09-entrance2.jpg" 309/i
mv l/"2023-ASH-09-entrance1.html" 309/l
sed -i 's|\/1623\/i\/|/1623/309/i/|g' 309/l/*.html
sed -i "s|\/1623\/i\/|/1623/309/i/|g" 309/l/*.html
sed -i 's|\/1623\/i\/|/1623/309/i/|g' ../entrance_data/'1623-309a.html'
sed -i "s|\/1623\/i\/|/1623/309/i/|g" ../entrance_data/'1623-309a.html'
sed -i 's|\/1623\/l\/|/1623/309/l/|g' ../entrance_data/'1623-309a.html'
sed -i "s|\/1623\/l\/|/1623/309/l/|g" ../entrance_data/'1623-309a.html'
sed -i 's|\/1623\/t\/|/1623/309/t/|g' ../entrance_data/'1623-309a.html'
sed -i "s|\/1623\/t\/|/1623/309/t/|g" ../entrance_data/'1623-309a.html'
mv l/"2023-ASH-16-entrance2.html" 311/l
sed -i 's|\/1623\/i\/|/1623/311/i/|g' 311/l/*.html
sed -i "s|\/1623\/i\/|/1623/311/i/|g" 311/l/*.html
sed -i 's|\/1623\/i\/|/1623/311/i/|g' ../entrance_data/'1623-311.html'
sed -i "s|\/1623\/i\/|/1623/311/i/|g" ../entrance_data/'1623-311.html'
sed -i 's|\/1623\/l\/|/1623/311/l/|g' ../entrance_data/'1623-311.html'
sed -i "s|\/1623\/l\/|/1623/311/l/|g" ../entrance_data/'1623-311.html'
sed -i 's|\/1623\/t\/|/1623/311/t/|g' ../entrance_data/'1623-311.html'
sed -i "s|\/1623\/t\/|/1623/311/t/|g" ../entrance_data/'1623-311.html'
mv t/"2023-ASH-16-entrance3.jpg" 311/t
mv i/"2023-ASH-16-entrance3.jpg" 311/i
mv l/"2023-ASH-16-entrance3.html" 311/l
sed -i 's|\/1623\/i\/|/1623/311/i/|g' 311/l/*.html
sed -i "s|\/1623\/i\/|/1623/311/i/|g" 311/l/*.html
sed -i 's|\/1623\/i\/|/1623/311/i/|g' ../entrance_data/'1623-311.html'
sed -i "s|\/1623\/i\/|/1623/311/i/|g" ../entrance_data/'1623-311.html'
sed -i 's|\/1623\/l\/|/1623/311/l/|g' ../entrance_data/'1623-311.html'
sed -i "s|\/1623\/l\/|/1623/311/l/|g" ../entrance_data/'1623-311.html'
sed -i 's|\/1623\/t\/|/1623/311/t/|g' ../entrance_data/'1623-311.html'
sed -i "s|\/1623\/t\/|/1623/311/t/|g" ../entrance_data/'1623-311.html'
mv t/"2023-ASH-16-entrance2.jpg" 311/t
mv i/"2023-ASH-16-entrance2.jpg" 311/i
mv l/"2023-ASH-16-entrance1.html" 311/l
sed -i 's|\/1623\/i\/|/1623/311/i/|g' 311/l/*.html
sed -i "s|\/1623\/i\/|/1623/311/i/|g" 311/l/*.html
sed -i 's|\/1623\/i\/|/1623/311/i/|g' ../entrance_data/'1623-311.html'
sed -i "s|\/1623\/i\/|/1623/311/i/|g" ../entrance_data/'1623-311.html'
sed -i 's|\/1623\/l\/|/1623/311/l/|g' ../entrance_data/'1623-311.html'
sed -i "s|\/1623\/l\/|/1623/311/l/|g" ../entrance_data/'1623-311.html'
sed -i 's|\/1623\/t\/|/1623/311/t/|g' ../entrance_data/'1623-311.html'
sed -i "s|\/1623\/t\/|/1623/311/t/|g" ../entrance_data/'1623-311.html'
mv i/"2023-ASH-16-sketch.jpg" 311/i
mv l/"2023-ASH-16-sketch.html" 311/l
sed -i 's|\/1623\/i\/|/1623/311/i/|g' 311/l/*.html
sed -i "s|\/1623\/i\/|/1623/311/i/|g" 311/l/*.html
sed -i 's|\/1623\/i\/|/1623/311/i/|g' ../cave_data/'1623-311.html'
sed -i "s|\/1623\/i\/|/1623/311/i/|g" ../cave_data/'1623-311.html'
sed -i 's|\/1623\/l\/|/1623/311/l/|g' ../cave_data/'1623-311.html'
sed -i "s|\/1623\/l\/|/1623/311/l/|g" ../cave_data/'1623-311.html'
sed -i 's|\/1623\/t\/|/1623/311/t/|g' ../cave_data/'1623-311.html'
sed -i "s|\/1623\/t\/|/1623/311/t/|g" ../cave_data/'1623-311.html'
mv t/"2023-ASH-16-entrance1.jpg" 311/t
mv i/"2023-ASH-16-entrance1.jpg" 311/i
mv t/"photo_2023-10-30_22-07-43.jpg" 315/t
mv i/"photo_2023-10-30_22-07-43.jpg" 315/i
mv l/"photo_2023-10-30_22-07-43.html" 315/l
sed -i 's|\/1623\/i\/|/1623/315/i/|g' 315/l/*.html
sed -i "s|\/1623\/i\/|/1623/315/i/|g" 315/l/*.html
sed -i 's|\/1623\/i\/|/1623/315/i/|g' ../cave_data/'1623-315.html'
sed -i "s|\/1623\/i\/|/1623/315/i/|g" ../cave_data/'1623-315.html'
sed -i 's|\/1623\/l\/|/1623/315/l/|g' ../cave_data/'1623-315.html'
sed -i "s|\/1623\/l\/|/1623/315/l/|g" ../cave_data/'1623-315.html'
sed -i 's|\/1623\/t\/|/1623/315/t/|g' ../cave_data/'1623-315.html'
sed -i "s|\/1623\/t\/|/1623/315/t/|g" ../cave_data/'1623-315.html'

View File

@@ -1,141 +0,0 @@
range(303, 316) 312
2023-RAWDB-02 312
range(303, 316) 313
2023-RAWDB-01 313
range(303, 316) 314
2023-KT-02 314
range(303, 316) 315
2023-JSS-01 315
('c', 'i', '2013-06', '1623-2013-06.html', '2013wallet23.jpg')
('c', 'l', '2013-06', '1623-2013-06.html', '2013wallet23.html')
('c', 'l', '2013-BL-01', '1623-2013-BL-01.html', '2013-BL-01.html')
('c', 't', '2013-BL-01', '1623-2013-BL-01.html', '2013-BL-01.jpg')
('c', 't', '2023-BL-11', '1623-2023-BL-11.html', '2023-BL-11-sketch.jpg')
('c', 'l', '2023-BL-11', '1623-2023-BL-11.html', '2023-BL-11-sketch.html')
('c', 'i', '306', '1623-306.html', '2023-ASH-17-sketch.jpg')
('c', 'l', '306', '1623-306.html', '2023-ASH-17-sketch.html')
('c', 't', '307', '1623-307.html', '2023-ASH-05-entrance-a3.jpg')
('c', 'i', '307', '1623-307.html', '2023-ASH-05-sketch.jpg')
('c', 'l', '307', '1623-307.html', '2023-ASH-05-sketch.html')
('c', 'l', '307', '1623-307.html', '2023-ASH-05-entrance-a3.html')
('c', 'l', '308', '1623-308.html', '2023-ASH-08-entrance-a3.html')
('c', 'l', '308', '1623-308.html', '2023-ASH-08-sketch.html')
('c', 'i', '308', '1623-308.html', '2023-ASH-08-sketch.jpg')
('c', 't', '308', '1623-308.html', '2023-ASH-08-entrance-a3.jpg')
('c', 'l', '309', '1623-309.html', '2023-ASH-09-plan.html')
('c', 'i', '309', '1623-309.html', '2023-ASH-09-plan.jpg')
('c', 'i', '311', '1623-311.html', '2023-ASH-16-sketch.jpg')
('c', 'l', '311', '1623-311.html', '2023-ASH-16-sketch.html')
('c', 't', '315', '1623-315.html', 'photo_2023-10-30_22-07-43.jpg')
('c', 'l', '315', '1623-315.html', 'photo_2023-10-30_22-07-43.html')
('e', 'l', '2012-sw-01', '1623-2012-sw-01.html', '2012-sw-01_i1.html')
('e', 't', '2012-sw-01', '1623-2012-sw-01.html', '2012-sw-01_i1.jpg')
('e', 'l', '2012-sw-03', '1623-2012-sw-03.html', '2012-sw-03_i2.html')
('e', 'l', '2012-sw-03', '1623-2012-sw-03.html', '2012-sw-03_i1.html')
('e', 't', '2012-sw-03', '1623-2012-sw-03.html', '2012-sw-03_i1.jpg')
('e', 't', '2012-sw-03', '1623-2012-sw-03.html', '2012-sw-03_i2.jpg')
('e', 'l', '2017-AA-01', '1623-2017-AA-01.html', 'aa-1-2017_with-tag.html')
('e', 't', '2017-AA-01', '1623-2017-AA-01.html', 'aa-1-2017_with-tag.jpg')
('e', 'l', '2017-AA-01', '1623-2017-AA-01.html', 'aa-1-2017_looking-down.html')
('e', 't', '2017-AA-01', '1623-2017-AA-01.html', 'aa-1-2017_looking-down.jpg')
('e', 't', '2017-AMS-02', '1623-2017-AMS-02.html', 'ent081-20170807.jpg')
('e', 'l', '2017-AMS-02', '1623-2017-AMS-02.html', 'near-ent-2017-ams-02.html')
('e', 't', '2017-AMS-02', '1623-2017-AMS-02.html', 'near-ent-2017-ams-02.jpg')
('e', 'l', '2017-AMS-02', '1623-2017-AMS-02.html', 'ent081-20170807.html')
('e', 'l', '2017-NR-01', '1623-2017-NR-01.html', '2017-NR-01_03.html')
('e', 't', '2017-NR-01', '1623-2017-NR-01.html', '2017-NR-01_03.jpg')
('e', 't', '2017-NR-01', '1623-2017-NR-01.html', '2017-NR-01_04.jpg')
('e', 'l', '2017-NR-01', '1623-2017-NR-01.html', '2017-NR-01_04.html')
('e', 't', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_looking_down_shaft-and-rift.jpg')
('e', 't', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_tag_arrows.jpg')
('e', 't', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_neil_view_west.jpg')
('e', 'l', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_looking_down_shaft-and-rift.html')
('e', 'l', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_tag_arrows.html')
('e', 'l', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_neil_view_west.html')
('e', 't', '2018-pf-03', '1623-2018-pf-03.html', '2018-pf-03_and_pf-02_arrows.jpg')
('e', 'l', '2018-pf-03', '1623-2018-pf-03.html', '2018-pf-03_and_pf-02_arrows.html')
('e', 'l', '2023-ASH-01', '1623-2023-ASH-01a.html', '2023-ASH-01-entrance1.html')
('e', 'l', '2023-ASH-01', '1623-2023-ASH-01a.html', '2023-ASH-01-entrance2.html')
('e', 't', '2023-ASH-01', '1623-2023-ASH-01a.html', '2023-ASH-01-entrance2.jpg')
('e', 't', '2023-ASH-01', '1623-2023-ASH-01a.html', '2023-ASH-01-entrance1.jpg')
('e', 'l', '2023-ASH-02', '1623-2023-ASH-02a.html', '2023-ASH-02-entrance1.html')
('e', 't', '2023-ASH-02', '1623-2023-ASH-02a.html', '2023-ASH-02-entrance1.jpg')
('e', 'l', '2023-ASH-02', '1623-2023-ASH-02a.html', '2023-ASH-02-entrance2.html')
('e', 't', '2023-ASH-02', '1623-2023-ASH-02a.html', '2023-ASH-02-entrance2.jpg')
('e', 'l', '2023-ASH-03', '1623-2023-ASH-03a.html', '2023-ASH-03-entrance2.html')
('e', 't', '2023-ASH-03', '1623-2023-ASH-03a.html', '2023-ASH-03-entrance2.jpg')
('e', 'l', '2023-ASH-03', '1623-2023-ASH-03a.html', '2023-ASH-03-entrance1.html')
('e', 't', '2023-ASH-03', '1623-2023-ASH-03a.html', '2023-ASH-03-entrance1.jpg')
('e', 't', '2023-ASH-04', '1623-2023-ASH-04a.html', '2023-ASH-04-entrance2.jpg')
('e', 't', '2023-ASH-04', '1623-2023-ASH-04a.html', '2023-ASH-04-entrance1.jpg')
('e', 'l', '2023-ASH-04', '1623-2023-ASH-04a.html', '2023-ASH-04-entrance1.html')
('e', 'l', '2023-ASH-04', '1623-2023-ASH-04a.html', '2023-ASH-04-entrance2.html')
('e', 't', '2023-ASH-06', '1623-2023-ASH-06a.html', '2023-ASH-06-entrance2.jpg')
('e', 'l', '2023-ASH-06', '1623-2023-ASH-06a.html', '2023-ASH-06-entrance1.html')
('e', 'l', '2023-ASH-06', '1623-2023-ASH-06a.html', '2023-ASH-06-entrance2.html')
('e', 't', '2023-ASH-06', '1623-2023-ASH-06a.html', '2023-ASH-06-entrance1.jpg')
('e', 'l', '2023-ASH-07', '1623-2023-ASH-07a.html', '2023-ASH-07-entrance1.html')
('e', 't', '2023-ASH-07', '1623-2023-ASH-07a.html', '2023-ASH-07-entrance1.jpg')
('e', 't', '2023-ASH-07', '1623-2023-ASH-07a.html', '2023-ASH-07-entrance2.jpg')
('e', 'l', '2023-ASH-07', '1623-2023-ASH-07a.html', '2023-ASH-07-entrance2.html')
('e', 'l', '2023-ASH-10', '1623-2023-ASH-10a.html', '2023-ASH-10-entrance2.html')
('e', 't', '2023-ASH-10', '1623-2023-ASH-10a.html', '2023-ASH-10-entrance1.jpg')
('e', 't', '2023-ASH-10', '1623-2023-ASH-10a.html', '2023-ASH-10-entrance2.jpg')
('e', 'l', '2023-ASH-10', '1623-2023-ASH-10a.html', '2023-ASH-10-entrance1.html')
('e', 't', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance2.jpg')
('e', 'l', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance2.html')
('e', 'l', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance4.html')
('e', 'l', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance3.html')
('e', 'l', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance1.html')
('e', 't', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance1.jpg')
('e', 't', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance3.jpg')
('e', 't', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance4.jpg')
('e', 'l', '2023-ASH-12', '1623-2023-ASH-12.html', '2023-ASH-12-entrance1.html')
('e', 't', '2023-ASH-12', '1623-2023-ASH-12.html', '2023-ASH-12-entrance1.jpg')
('e', 't', '2023-ASH-12', '1623-2023-ASH-12.html', '2023-ASH-12-entrance2.jpg')
('e', 'l', '2023-ASH-12', '1623-2023-ASH-12.html', '2023-ASH-12-entrance2.html')
('e', 'l', '2023-ASH-13', '1623-2023-ASH-13.html', '2023-ASH-13-entrance1.html')
('e', 'l', '2023-ASH-13', '1623-2023-ASH-13.html', '2023-ASH-13-entrance2.html')
('e', 't', '2023-ASH-13', '1623-2023-ASH-13.html', '2023-ASH-13-entrance2.jpg')
('e', 't', '2023-ASH-13', '1623-2023-ASH-13.html', '2023-ASH-13-entrance1.jpg')
('e', 'l', '2023-ASH-14', '1623-2023-ASH-14.html', '2023-ASH-13and14-entrances.html')
('e', 't', '2023-ASH-14', '1623-2023-ASH-14.html', '2023-ASH-14-entrance1.jpg')
('e', 'l', '2023-ASH-14', '1623-2023-ASH-14.html', '2023-ASH-14-entrance1.html')
('e', 't', '2023-ASH-14', '1623-2023-ASH-14.html', '2023-ASH-13and14-entrances.jpg')
('e', 't', '2023-BL-11', '1623-2023-BL-11.html', '20230802_165708.jpg')
('e', 't', '2023-BL-11', '1623-2023-BL-11.html', '20230802_165823.jpg')
('e', 'l', '2023-BL-11', '1623-2023-BL-11.html', '20230802_165708.html')
('e', 'l', '2023-BL-11', '1623-2023-BL-11.html', '20230802_165823.html')
('e', 't', '303', '1623-303.html', '2023-ASH-15-entrance3.jpg')
('e', 'l', '303', '1623-303.html', '2023-ASH-15-entrance3.html')
('e', 't', '303', '1623-303.html', '2023-ASH-15-entrance1.jpg')
('e', 'l', '303', '1623-303.html', '2023-ASH-15-entrance2.html')
('e', 'l', '303', '1623-303.html', '2023-ASH-15-entrance1.html')
('e', 't', '303', '1623-303.html', '2023-ASH-15-entrance2.jpg')
('e', 'l', '306', '1623-306b.html', '2023-ASH-17-bothentrances.html')
('e', 'l', '306', '1623-306a.html', '2023-ASH-17-bothentrances.html')
('e', 't', '306', '1623-306a.html', '2023-ASH-17-bothentrances.jpg')
('e', 't', '306', '1623-306b.html', '2023-ASH-17-bothentrances.jpg')
('e', 'l', '307', '1623-307b.html', '2023-ASH-05-entrance-b1.html')
('e', 'l', '307', '1623-307a.html', '2023-ASH-05-entrance-a1.html')
('e', 'l', '307', '1623-307a.html', '2023-ASH-05-entrance-a2.html')
('e', 't', '307', '1623-307a.html', '2023-ASH-05-entrance-a2.jpg')
('e', 't', '307', '1623-307a.html', '2023-ASH-05-entrance-a1.jpg')
('e', 't', '307', '1623-307b.html', '2023-ASH-05-entrance-b1.jpg')
('e', 'l', '308', '1623-308a.html', '2023-ASH-08-entrance-a1.html')
('e', 't', '308', '1623-308b.html', '2023-ASH-08-entrance-b1.jpg')
('e', 'l', '308', '1623-308b.html', '2023-ASH-08-entrance-b1.html')
('e', 't', '308', '1623-308a.html', '2023-ASH-08-entrance-a1.jpg')
('e', 't', '308', '1623-308a.html', '2023-ASH-08-entrance-a2.jpg')
('e', 'l', '308', '1623-308a.html', '2023-ASH-08-entrance-a2.html')
('e', 'l', '309', '1623-309a.html', '2023-ASH-09-entrance2.html')
('e', 't', '309', '1623-309a.html', '2023-ASH-09-entrance1.jpg')
('e', 't', '309', '1623-309a.html', '2023-ASH-09-entrance2.jpg')
('e', 'l', '309', '1623-309a.html', '2023-ASH-09-entrance1.html')
('e', 'l', '311', '1623-311.html', '2023-ASH-16-entrance2.html')
('e', 't', '311', '1623-311.html', '2023-ASH-16-entrance3.jpg')
('e', 'l', '311', '1623-311.html', '2023-ASH-16-entrance3.html')
('e', 't', '311', '1623-311.html', '2023-ASH-16-entrance2.jpg')
('e', 'l', '311', '1623-311.html', '2023-ASH-16-entrance1.html')
('e', 't', '311', '1623-311.html', '2023-ASH-16-entrance1.jpg')

View File

@@ -1,84 +0,0 @@
---------- 1626 fixing i/l/t folders for these caves and entrances
1626-2018-AD-02
1626-2018-AD-02 1626-2018-AD-02.html
1626-2018-DM-01
1626-2018-DM-01 1626-2018-DM-01.html
1626-2018-DM-02
1626-2018-DM-02 1626-2018-DM-02.html
1626-2018-DM-03
1626-2018-DM-03 1626-2018-DM-03.html
1626-2018-DM-05
1626-2018-DM-05 1626-2018-DM-05.html
1626-2018-DM-06
1626-2018-DM-06 1626-2018-DM-06.html
1626-2018-ms-02
1626-2018-ms-02 1626-2018-ms-02.html
1626-2018-ms-03
1626-2018-ms-03 1626-2018-ms-03.html
1626-2018-ms-04
1626-2018-ms-04 1626-2018-ms-04.html
1626-2018-pw-01
1626-2018-pw-01 1626-2018-pw-01.html
1626-2018-pw-02
1626-2018-pw-02 1626-2018-pw-02.html
1626-2018-pw-03
1626-2018-pw-03 1626-2018-pw-03.html
1626-2018-tk-01
1626-2018-tk-01 1626-2018-tk-01.html
1626-2018-tk-02
1626-2018-tk-02 1626-2018-tk-02.html
1626-2019-HT-01
1626-2019-HT-01 1626-2019-HT-01.html
1626-2019-HT-02
1626-2019-HT-02 1626-2019-HT-02.html
1626-2019-rh-01
1626-2019-rh-01 1626-2019-rh-01.html
1626-2019-rh-02
1626-2019-rh-02 1626-2019-rh-02.html
1626-2023-BL-01
1626-2023-BL-01 1626-2023-BL-01.html
1626-2023-BL-03
1626-2023-BL-03 1626-2023-BL-03.html
1626-2023-BL-04
1626-2023-BL-04a 1626-2023-BL-04a.html
1626-2023-BL-04b 1626-2023-BL-04b.html
1626-2023-BL-05
1626-2023-BL-05 1626-2023-BL-05.html
1626-2023-BL-06
1626-2023-BL-06 1626-2023-BL-06.html
1626-2023-BL-07
1626-2023-BL-07 1626-2023-BL-07.html
1626-2023-BL-09
1626-2023-BL-09 1626-2023-BL-09.html
1626-2023-BZ-01
1626-2023-BZ-01 1626-2023-BZ-01.html
1626-2023-ww-01
1626-2023-ww-01 1626-2023-ww-01.html
1626-2024-pb-01
1626-2024-pb-01 1626-2024-pb-01.html
1626-361
1626-361 1626-361.html
1626-E02
1626-E02 1626-E02.html
1626-E09
1626-E09 1626-E09.html
1626-E16
1626-E16 1626-E16.html
1626-E28
1626-E28 1626-E28.html
1626-casino-01
1626-casino-01 1626-casino-01.html
1626-loutoti-01
1626-loutoti-01 1626-loutoti-01.html
1626-upside-down-01
1626-upside-down-01 1626-upside-down-01.html
1626-2018-ad-03
1626-2018-ad-03 1626-2018-ad-03.html
1626-2023-BL-08
1626-2023-BL-08 1626-2023-BL-08.html
1626-LA11
1626-LA11 1626-LA11.html
1626-LA34
1626-LA34 1626-LA34.html
1626-LA12
1626-LA12 1626-LA12.html

View File

@@ -1,77 +0,0 @@
# create an oci container image with
# cd /home/expo && podman build -t expo:dev --rm -f troggle/Containerfile
#
FROM docker.io/library/debian:bookworm
WORKDIR /home/expo2
RUN apt update && \
apt install -y postgresql apache2 survex rsync git cgit proftpd \
python3 python3-django python3-pil python3-piexif \
python3-bs4 python3-unidecode python3-cryptography \
libjs-codemirror
# do we need libjs-leaflet? libjs-sizzle? libjs-mgrs?
# Install non-packaged dependencies
# apt install CaveView bins libjs-proj4 ufraw from local repo/backports?
# bins - photo processing
# ufraw - raw inages in photo collection do we need ufraw-batch? bring sin libtiff5 libgtkimageview0 libexiv2-14 libwebp6
# kanboard - kanboard organiser
# caveview.js - rotating cave on each page. brings in libjs-proj4 which brings in linjs-mgrs . We should update.
# tonymce (html editor)
# we also have host and rssh. probably not needed?
# Copy only the dependency files first
#wget troggle/pyproject.toml troggle/uv.lock
#RUN wget troggle/pyproject.toml && uv sync --frozen
RUN useradd -m expo -G sudo -s /bin/bash
# Optional:install and configure BoE
#add apache config, enable modules
#configure postgres
#Start up services for apache, proftpd, postgresql, cron?
#end of system stage
# User files - separate layer?
RUN chown expo:expo .
USER expo
RUN mkdir -p repositories/git && cd repositories/git && \
git clone http://expo.survex.com/repositories/troggle/.git && \
git clone http://expo.survex.com/repositories/expoweb/.git && \
git clone http://expo.survex.com/repositories/loser/.git && \
git clone http://expo.survex.com/repositories/drawings/.git
RUN ln -s repositories/git/troggle troggle && \
ln -s repositories/git/troggle expoweb && \
ln -s repositories/git/troggle loser && \
ln -s repositories/git/troggle drawings
RUN git config --global user.email "expo@potato.hut"
RUN git config --global user.name "expo"
RUN git config --global pull.rebase true
#rsync -az expo.survex.com:expofiles expofiles
#demo short version
#rsync -az expo.survex.com:expofiles/surveyscans/2018 expofiles/surveyscans/2018
#rsync -az expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
#/bin/sh is missing at this point - why?
RUN cd troggle && run databaseReset.py reset INIT
EXPOSE 8080
#Run postres process
CMD ["uv", "run", "python", "troggle/manage.py", "runserver", "0.0.0.0:8080"]
CMD ["bash"]
# move this file to the directory above troggle, loser etc before running the podman image build command.
# used image with:
# podman run -it --network=host --rm expo:dev

557
ERROR.txt
View File

@@ -1,557 +0,0 @@
Creating test database for alias 'default' ('file:memorydb_default?mode=memory&cache=shared')...
test_fix_cave_bare_url115 (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_cave_bare_url115)
Expect to get Page Not Found and status 404 ... ok
test_fix_cave_slug115 (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_cave_slug115)
Expect to get Page Not Found and status 404 ... ok
test_fix_cave_url115 (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_cave_url115) ... ERROR
test_fix_cave_url284 (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_cave_url284) ... ERROR
test_fix_caves284 (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_caves284) ... ERROR
test_fix_expedition (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_expedition) ... ERROR
test_fix_person (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_person) ... ok
test_fix_personexped (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_personexped) ... ok
test_fix_cave_loaded115 (troggle.core.TESTS.test_caves.FixtureTests.test_fix_cave_loaded115) ... ok
test_fix_cave_loaded284 (troggle.core.TESTS.test_caves.FixtureTests.test_fix_cave_loaded284) ... ok
test_fix_person_loaded (troggle.core.TESTS.test_caves.FixtureTests.test_fix_person_loaded) ... ok
test_page_personexpedition (troggle.core.TESTS.test_caves.FixtureTests.test_page_personexpedition) ... ok
test_logbook_exists (troggle.core.TESTS.test_imports.ImportTest.test_logbook_exists) ... ok
test_installs (troggle.core.TESTS.test_imports.SubprocessTest.test_installs)
Expects external software installed: cavern, survexport, git ... ok
test_loser_survex_status (troggle.core.TESTS.test_imports.SubprocessTest.test_loser_survex_status)
Expects no failures of survex files ... FAIL
test_repos_git_status (troggle.core.TESTS.test_imports.SubprocessTest.test_repos_git_status)
Expects clean git repos with no added files and no merge failures ... FAIL
test_utf8 (troggle.core.TESTS.test_imports.SubprocessTest.test_utf8)
Expects that utf8 is the default encoding when opening files ... ok
test_admin_login (troggle.core.TESTS.test_logins.ComplexLoginTests.test_admin_login) ... ok
test_authentication_login (troggle.core.TESTS.test_logins.ComplexLoginTests.test_authentication_login) ... ok
test_noinfo_login (troggle.core.TESTS.test_logins.ComplexLoginTests.test_noinfo_login) ... ok
test_ordinary_login (troggle.core.TESTS.test_logins.ComplexLoginTests.test_ordinary_login) ... ok
test_user_force (troggle.core.TESTS.test_logins.ComplexLoginTests.test_user_force) ... ok
test_fix_admin_login_fail (troggle.core.TESTS.test_logins.FixturePageTests.test_fix_admin_login_fail) ... ok
test_dwg_upload_drawing (troggle.core.TESTS.test_logins.PostTests.test_dwg_upload_drawing)
Expect no-suffix file to upload ... ok
test_dwg_upload_txt (troggle.core.TESTS.test_logins.PostTests.test_dwg_upload_txt)
Expect .pdf file to be refused upload ... ok
test_file_permissions (troggle.core.TESTS.test_logins.PostTests.test_file_permissions)
Expect to be allowed to write to SCANS_ROOT, DRAWINGS_DATA, SURVEX_DATA, EXPOWEB ... ok
test_photo_folder_create (troggle.core.TESTS.test_logins.PostTests.test_photo_folder_create)
Create folder for new user ... ERROR
test_photo_upload (troggle.core.TESTS.test_logins.PostTests.test_photo_upload)
Expect photo upload to work on any file (contrary to msg on screen) ... ERROR
test_photo_upload_rename (troggle.core.TESTS.test_logins.PostTests.test_photo_upload_rename)
Expect photo upload to work on any file (contrary to msg on screen) ... ERROR
test_scan_upload (troggle.core.TESTS.test_logins.PostTests.test_scan_upload)
Expect scan upload to wallet to work on any file ... ok
test_aliases (troggle.core.TESTS.test_parsers.ImportTest.test_aliases) ... ok
test_lbe (troggle.core.TESTS.test_parsers.ImportTest.test_lbe) ... ok
test_lbe_edit (troggle.core.TESTS.test_parsers.ImportTest.test_lbe_edit)
This page requires the user to be logged in first, hence the extra shenanigans ... ok
test_lbe_new (troggle.core.TESTS.test_parsers.ImportTest.test_lbe_new)
This page requires the user to be logged in first, hence the extra shenanigans ... ERROR
test_logbook_exists (troggle.core.TESTS.test_parsers.ImportTest.test_logbook_exists) ... ok
test_logbook_parse_issues (troggle.core.TESTS.test_parsers.ImportTest.test_logbook_parse_issues)
This is just testing the db not the web page ... ok
test_people (troggle.core.TESTS.test_parsers.ImportTest.test_people) ... ok
test_survexfiles (troggle.core.TESTS.test_parsers.ImportTest.test_survexfiles) ... ERROR
test_statistics (troggle.core.TESTS.test_urls.URLTests.test_statistics) ... ok
test_stats (troggle.core.TESTS.test_urls.URLTests.test_stats) ... ok
test_url_allscans (troggle.core.TESTS.test_urls.URLTests.test_url_allscans)
Test the {% url "allscans" %} reverse resolution ... ok
test_url_stats (troggle.core.TESTS.test_urls.URLTests.test_url_stats)
Test the {% url "stats" %} reverse resolution ... ok
test_url_survexcaveslist (troggle.core.TESTS.test_urls.URLTests.test_url_survexcaveslist)
Test the {% url "allscans" %} reverse resolution ... ok
test_url_threed (troggle.core.TESTS.test_urls.URLTests.test_url_threed)
Test the {% url "threed" %} reverse resolution ... ok
test_cave_kataster_not_found (troggle.core.TESTS.tests.PageTests.test_cave_kataster_not_found) ... ok
test_caves_page (troggle.core.TESTS.tests.PageTests.test_caves_page) ... ok
test_caves_page_kataster_not_found (troggle.core.TESTS.tests.PageTests.test_caves_page_kataster_not_found) ... ok
test_dataissues (troggle.core.TESTS.tests.PageTests.test_dataissues) ... ok
test_expoweb_dir (troggle.core.TESTS.tests.PageTests.test_expoweb_dir) ... ok
test_expoweb_dir_no_index (troggle.core.TESTS.tests.PageTests.test_expoweb_dir_no_index) ... ok
test_expoweb_dir_with_index2 (troggle.core.TESTS.tests.PageTests.test_expoweb_dir_with_index2) ... ok
test_expoweb_dir_with_index_htm (troggle.core.TESTS.tests.PageTests.test_expoweb_dir_with_index_htm) ... ok
test_expoweb_dir_with_index_html (troggle.core.TESTS.tests.PageTests.test_expoweb_dir_with_index_html) ... ok
test_expoweb_dirslash (troggle.core.TESTS.tests.PageTests.test_expoweb_dirslash) ... ok
test_expoweb_htm (troggle.core.TESTS.tests.PageTests.test_expoweb_htm) ... ok
test_expoweb_no_dir (troggle.core.TESTS.tests.PageTests.test_expoweb_no_dir) ... ok
test_expoweb_notfound (troggle.core.TESTS.tests.PageTests.test_expoweb_notfound) ... ok
test_expoweb_paths (troggle.core.TESTS.tests.PageTests.test_expoweb_paths) ... ok
test_expoweb_root (troggle.core.TESTS.tests.PageTests.test_expoweb_root) ... ok
test_expoweb_root_slash (troggle.core.TESTS.tests.PageTests.test_expoweb_root_slash) ... ok
test_expoweb_troggle_default (troggle.core.TESTS.tests.PageTests.test_expoweb_troggle_default) ... ok
test_expoweb_troggle_default_slash (troggle.core.TESTS.tests.PageTests.test_expoweb_troggle_default_slash) ... ok
test_expoweb_via_areaid (troggle.core.TESTS.tests.PageTests.test_expoweb_via_areaid) ... ok
test_not_found_survexfile_cave (troggle.core.TESTS.tests.PageTests.test_not_found_survexfile_cave) ... ok
test_page_admin (troggle.core.TESTS.tests.PageTests.test_page_admin) ... ok
test_page_admindocs_exped (troggle.core.TESTS.tests.PageTests.test_page_admindocs_exped) ... ok
test_page_dwgallfiles_empty (troggle.core.TESTS.tests.PageTests.test_page_dwgallfiles_empty) ... ok
test_page_dwgallfiles_empty_slash (troggle.core.TESTS.tests.PageTests.test_page_dwgallfiles_empty_slash) ... ok
test_page_dwgdataraw_empty (troggle.core.TESTS.tests.PageTests.test_page_dwgdataraw_empty) ... ok
test_page_expofile_document_loeffler_pdf (troggle.core.TESTS.tests.PageTests.test_page_expofile_document_loeffler_pdf) ... ok
test_page_expofile_document_png (troggle.core.TESTS.tests.PageTests.test_page_expofile_document_png) ... ok
test_page_expofile_document_rope_pdf (troggle.core.TESTS.tests.PageTests.test_page_expofile_document_rope_pdf) ... ok
test_page_expofile_documents (troggle.core.TESTS.tests.PageTests.test_page_expofile_documents) ... ok
test_page_expofile_documents_slash (troggle.core.TESTS.tests.PageTests.test_page_expofile_documents_slash) ... ok
test_page_expofile_writeup (troggle.core.TESTS.tests.PageTests.test_page_expofile_writeup) ... ok
test_page_expofiles_badness (troggle.core.TESTS.tests.PageTests.test_page_expofiles_badness) ... ok
test_page_expofiles_docs_dir (troggle.core.TESTS.tests.PageTests.test_page_expofiles_docs_dir) ... ok
test_page_expofiles_root_dir (troggle.core.TESTS.tests.PageTests.test_page_expofiles_root_dir) ... ok
test_page_expofiles_root_slash_dir (troggle.core.TESTS.tests.PageTests.test_page_expofiles_root_slash_dir) ... ok
test_page_folk (troggle.core.TESTS.tests.PageTests.test_page_folk) ... ok
test_page_photos_dir (troggle.core.TESTS.tests.PageTests.test_page_photos_dir) ... ok
test_page_photos_not_ok (troggle.core.TESTS.tests.PageTests.test_page_photos_not_ok) ... ok
test_page_photos_ok (troggle.core.TESTS.tests.PageTests.test_page_photos_ok) ... ok
test_page_site_media_css (troggle.core.TESTS.tests.PageTests.test_page_site_media_css) ... ok
test_page_site_media_ok (troggle.core.TESTS.tests.PageTests.test_page_site_media_ok) ... ok
test_page_slash_empty (troggle.core.TESTS.tests.PageTests.test_page_slash_empty) ... ok
test_page_ss (troggle.core.TESTS.tests.PageTests.test_page_ss) ... ok
test_page_survey_scans_dir (troggle.core.TESTS.tests.PageTests.test_page_survey_scans_dir) ... ok
test_page_survey_scans_empty (troggle.core.TESTS.tests.PageTests.test_page_survey_scans_empty) ... ok
test_stations (troggle.core.TESTS.tests.PageTests.test_stations) ... ok
test_survexdebug (troggle.core.TESTS.tests.PageTests.test_survexdebug) ... ok
test_surveximport (troggle.core.TESTS.tests.PageTests.test_surveximport) ... ok
test_therionissues (troggle.core.TESTS.tests.PageTests.test_therionissues) ... ok
test_allscans_view (troggle.core.TESTS.tests_copilot.TestSurveyScansView.test_allscans_view) ... ok
test_walletslistyear_view (troggle.core.TESTS.tests_copilot.TestSurveyScansView.test_walletslistyear_view) ... ok
test_import_Cave (troggle.core.TESTS.test_imports.SimpleTest.test_import_Cave) ... ok
test_import_TroggleModel (troggle.core.TESTS.test_imports.SimpleTest.test_import_TroggleModel) ... ok
test_import_core_views_caves (troggle.core.TESTS.test_imports.SimpleTest.test_import_core_views_caves) ... ok
test_import_imports (troggle.core.TESTS.test_imports.SimpleTest.test_import_imports) ... ok
test_import_parsers_QMs (troggle.core.TESTS.test_imports.SimpleTest.test_import_parsers_QMs) ... ok
test_import_parsers_logbooks (troggle.core.TESTS.test_imports.SimpleTest.test_import_parsers_logbooks) ... ok
test_import_parsers_mix (troggle.core.TESTS.test_imports.SimpleTest.test_import_parsers_mix) ... ok
test_import_parsers_people (troggle.core.TESTS.test_imports.SimpleTest.test_import_parsers_people) ... ok
test_import_parsers_survex (troggle.core.TESTS.test_imports.SimpleTest.test_import_parsers_survex) ... ok
test_import_parsers_surveys (troggle.core.TESTS.test_imports.SimpleTest.test_import_parsers_surveys) ... ok
test_import_urls (troggle.core.TESTS.test_imports.SimpleTest.test_import_urls) ... ok
test_import_views_uploads (troggle.core.TESTS.test_imports.SimpleTest.test_import_views_uploads) ... ok
test_import_views_walletedit (troggle.core.TESTS.test_imports.SimpleTest.test_import_views_walletedit) ... ok
test_test_setting (troggle.core.TESTS.test_imports.SimpleTest.test_test_setting) ... ok
======================================================================
ERROR: test_fix_cave_url115 (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_cave_url115)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_caves.py", line 140, in test_fix_cave_url115
response = self.client.get("/1623/115.url") # yes this is intentional, see the inserted data above & fixture
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1049, in get
response = super().get(path, data=data, secure=secure, headers=headers, **extra)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 465, in get
return self.generic(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 617, in generic
return self.request(**r)
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1013, in request
self.check_exception(response)
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 743, in check_exception
raise exc_value
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/caves.py", line 353, in cavepage
return rendercave(request, cave, cave.slug())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/caves.py", line 301, in rendercave
"year": current_expo()
^^^^^^^^^^^^^^
File "/home/philip/troggle/core/utils.py", line 126, in current_expo
make_new_expo(str(y))
File "/home/philip/troggle/core/utils.py", line 96, in make_new_expo
u = User.objects.get(username='expo')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/manager.py", line 87, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/query.py", line 649, in get
raise self.model.DoesNotExist(
django.contrib.auth.models.User.DoesNotExist: User matching query does not exist.
======================================================================
ERROR: test_fix_cave_url284 (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_cave_url284)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_caves.py", line 148, in test_fix_cave_url284
response = self.client.get("/1623/284/284.html")
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1049, in get
response = super().get(path, data=data, secure=secure, headers=headers, **extra)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 465, in get
return self.generic(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 617, in generic
return self.request(**r)
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1013, in request
self.check_exception(response)
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 743, in check_exception
raise exc_value
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/caves.py", line 353, in cavepage
return rendercave(request, cave, cave.slug())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/caves.py", line 301, in rendercave
"year": current_expo()
^^^^^^^^^^^^^^
File "/home/philip/troggle/core/utils.py", line 126, in current_expo
make_new_expo(str(y))
File "/home/philip/troggle/core/utils.py", line 96, in make_new_expo
u = User.objects.get(username='expo')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/manager.py", line 87, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/query.py", line 649, in get
raise self.model.DoesNotExist(
django.contrib.auth.models.User.DoesNotExist: User matching query does not exist.
======================================================================
ERROR: test_fix_caves284 (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_caves284)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_caves.py", line 190, in test_fix_caves284
response = self.client.get("/caves")
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1049, in get
response = super().get(path, data=data, secure=secure, headers=headers, **extra)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 465, in get
return self.generic(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 617, in generic
return self.request(**r)
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1013, in request
self.check_exception(response)
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 743, in check_exception
raise exc_value
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/caves.py", line 156, in caveindex
"cavepage": True, "year": current_expo()},
^^^^^^^^^^^^^^
File "/home/philip/troggle/core/utils.py", line 126, in current_expo
make_new_expo(str(y))
File "/home/philip/troggle/core/utils.py", line 96, in make_new_expo
u = User.objects.get(username='expo')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/manager.py", line 87, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/query.py", line 649, in get
raise self.model.DoesNotExist(
django.contrib.auth.models.User.DoesNotExist: User matching query does not exist.
======================================================================
ERROR: test_fix_expedition (troggle.core.TESTS.test_caves.FixturePageTests.test_fix_expedition)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_caves.py", line 103, in test_fix_expedition
response = self.client.get("/expedition/2019")
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1049, in get
response = super().get(path, data=data, secure=secure, headers=headers, **extra)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 465, in get
return self.generic(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 617, in generic
return self.request(**r)
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1013, in request
self.check_exception(response)
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 743, in check_exception
raise exc_value
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/logbooks.py", line 69, in expedition
current = current_expo() # creates new expo after 31st Dec.
^^^^^^^^^^^^^^
File "/home/philip/troggle/core/utils.py", line 126, in current_expo
make_new_expo(str(y))
File "/home/philip/troggle/core/utils.py", line 96, in make_new_expo
u = User.objects.get(username='expo')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/manager.py", line 87, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/query.py", line 649, in get
raise self.model.DoesNotExist(
django.contrib.auth.models.User.DoesNotExist: User matching query does not exist.
======================================================================
ERROR: test_photo_folder_create (troggle.core.TESTS.test_logins.PostTests.test_photo_folder_create)
Create folder for new user
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_logins.py", line 259, in test_photo_folder_create
response = self.client.post("/photoupload/", data={"photographer": "GussieFinkNottle"})
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1070, in post
response = super().post(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 490, in post
return self.generic(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 617, in generic
return self.request(**r)
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1013, in request
self.check_exception(response)
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 743, in check_exception
raise exc_value
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/auth.py", line 25, in __call__
return self.f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapper_view
return view_func(request, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/uploads.py", line 611, in photoupload
year = current_expo()
^^^^^^^^^^^^^^
File "/home/philip/troggle/core/utils.py", line 126, in current_expo
make_new_expo(str(y))
File "/home/philip/troggle/core/utils.py", line 96, in make_new_expo
u = User.objects.get(username='expo')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/manager.py", line 87, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/query.py", line 649, in get
raise self.model.DoesNotExist(
django.contrib.auth.models.User.DoesNotExist: User matching query does not exist.
======================================================================
ERROR: test_photo_upload (troggle.core.TESTS.test_logins.PostTests.test_photo_upload)
Expect photo upload to work on any file (contrary to msg on screen)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_logins.py", line 189, in test_photo_upload
response = self.client.post(
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1070, in post
response = super().post(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 490, in post
return self.generic(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 617, in generic
return self.request(**r)
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1013, in request
self.check_exception(response)
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 743, in check_exception
raise exc_value
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/auth.py", line 25, in __call__
return self.f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapper_view
return view_func(request, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/uploads.py", line 611, in photoupload
year = current_expo()
^^^^^^^^^^^^^^
File "/home/philip/troggle/core/utils.py", line 126, in current_expo
make_new_expo(str(y))
File "/home/philip/troggle/core/utils.py", line 96, in make_new_expo
u = User.objects.get(username='expo')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/manager.py", line 87, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/query.py", line 649, in get
raise self.model.DoesNotExist(
django.contrib.auth.models.User.DoesNotExist: User matching query does not exist.
======================================================================
ERROR: test_photo_upload_rename (troggle.core.TESTS.test_logins.PostTests.test_photo_upload_rename)
Expect photo upload to work on any file (contrary to msg on screen)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_logins.py", line 228, in test_photo_upload_rename
response = self.client.post(
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1070, in post
response = super().post(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 490, in post
return self.generic(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 617, in generic
return self.request(**r)
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1013, in request
self.check_exception(response)
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 743, in check_exception
raise exc_value
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/auth.py", line 25, in __call__
return self.f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapper_view
return view_func(request, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/uploads.py", line 611, in photoupload
year = current_expo()
^^^^^^^^^^^^^^
File "/home/philip/troggle/core/utils.py", line 126, in current_expo
make_new_expo(str(y))
File "/home/philip/troggle/core/utils.py", line 96, in make_new_expo
u = User.objects.get(username='expo')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/manager.py", line 87, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/query.py", line 649, in get
raise self.model.DoesNotExist(
django.contrib.auth.models.User.DoesNotExist: User matching query does not exist.
======================================================================
ERROR: test_lbe_new (troggle.core.TESTS.test_parsers.ImportTest.test_lbe_new)
This page requires the user to be logged in first, hence the extra shenanigans
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_parsers.py", line 141, in test_lbe_new
response = self.client.get(f"/logbookedit/")
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1049, in get
response = super().get(path, data=data, secure=secure, headers=headers, **extra)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 465, in get
return self.generic(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 617, in generic
return self.request(**r)
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1013, in request
self.check_exception(response)
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 743, in check_exception
raise exc_value
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/auth.py", line 25, in __call__
return self.f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapper_view
return view_func(request, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/uploads.py", line 192, in logbookedit
year = current_expo()
^^^^^^^^^^^^^^
File "/home/philip/troggle/core/utils.py", line 126, in current_expo
make_new_expo(str(y))
File "/home/philip/troggle/core/utils.py", line 96, in make_new_expo
u = User.objects.get(username='expo')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/manager.py", line 87, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/query.py", line 649, in get
raise self.model.DoesNotExist(
django.contrib.auth.models.User.DoesNotExist: User matching query does not exist.
======================================================================
ERROR: test_survexfiles (troggle.core.TESTS.test_parsers.ImportTest.test_survexfiles)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_parsers.py", line 195, in test_survexfiles
response = self.client.get("/survexfile/caves/")
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1049, in get
response = super().get(path, data=data, secure=secure, headers=headers, **extra)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 465, in get
return self.generic(
^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 617, in generic
return self.request(**r)
^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 1013, in request
self.check_exception(response)
File "/home/philip/p12d5/lib/python3.12/site-packages/django/test/client.py", line 743, in check_exception
raise exc_value
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/exception.py", line 55, in inner
response = get_response(request)
^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/core/handlers/base.py", line 197, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/troggle/core/views/survex.py", line 655, in survexcaveslist
"year": current_expo(),
^^^^^^^^^^^^^^
File "/home/philip/troggle/core/utils.py", line 126, in current_expo
make_new_expo(str(y))
File "/home/philip/troggle/core/utils.py", line 96, in make_new_expo
u = User.objects.get(username='expo')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/manager.py", line 87, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/philip/p12d5/lib/python3.12/site-packages/django/db/models/query.py", line 649, in get
raise self.model.DoesNotExist(
django.contrib.auth.models.User.DoesNotExist: User matching query does not exist.
======================================================================
FAIL: test_loser_survex_status (troggle.core.TESTS.test_imports.SubprocessTest.test_loser_survex_status)
Expects no failures of survex files
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_imports.py", line 261, in test_loser_survex_status
self.assertTrue(sp.returncode == 0, f"{cwd} - survex is unhappy")
AssertionError: False is not true : /home/philip/loser - survex is unhappy
======================================================================
FAIL: test_repos_git_status (troggle.core.TESTS.test_imports.SubprocessTest.test_repos_git_status)
Expects clean git repos with no added files and no merge failures
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/philip/troggle/core/TESTS/test_imports.py", line 233, in test_repos_git_status
self.assertIsNotNone(phmatch, msg)
AssertionError: unexpectedly None : /home/philip/troggle - Failed to find expected git output: "nothing to commit, working tree clean"
----------------------------------------------------------------------
Ran 109 tests in 13.383s
FAILED (failures=2, errors=9)
Destroying test database for alias 'default' ('file:memorydb_default?mode=memory&cache=shared')...

View File

@@ -1,217 +1,46 @@
Updated 2 May 2023
Troggle is an application for caving expedition data management, originally created for use on Cambridge University Caving Club (CUCC)expeditions and licensed under the GNU Lesser General Public License.
Troggle is an application for caving expedition data management,
originally created for use on Cambridge University Caving Club (CUCC)expeditions
and licensed under the GNU Lesser General Public License.
Troggle has been forked into two projects. The original one is maintained by Aaron Curtis
and was used for Erebus caves in Antarctica.
The CUCC variant uses files as the definitive data, not the database, and lives at http://expo.survex.com/repositories/troggle/.git/
The versions have diverged markedly, not just in the software but also in the implicit convnetions of how the directory structures of the survex files, the drawings and
the scans are arranged.
For the server setup, see /_deploy/debian/wookey-exposerver-recipe.txt
and see http://expo.survex.com/handbook/troggle/serverconfig.html
Much material which was in this file has been moved to
http://expo.survex.com/handbook/troggle/serverconfig.html
See copyright notices in
http://expo.survex.com/handbook/computing/contribute.html
and for context see
http://expo.survex.com/handbook/computing/onlinesystems.html
Troggle has been forked into two projects. The original one is maintained by Aron Curtis and is used for Erebus caves. The CUCC variant uses files as the definitive data, not the database and lives at expo.sruvex.com/troggle.
Troggle setup
=============
0. read the very extensive online documentation and stop reading this README...
well, come back to this README after you have read the HTML pages. Not everything has been transferred.
==========
http://expo.survex.com/handbook/troggle/troglaptop.html
http://expo.survex.com/handbook/troggle/serverconfig.html
http://expo.survex.com/handbook/troggle/trogdangoup.html
and at troggle/debian/serversetup
1. set up the ssh key-exchange with the git server so you can clone troggle
http://expo.survex.com/handbook/computing/keyexchange.html
Setting up directories
----------------------
see http://expo.survex.com/handbook/troggle/troglaptop.html and
http://expo.survex.com/handbook/troggle/serverconfig.html
Next, you need to fill in your local settings. Copy _deploy/WSL/localsettingsWSL.py
to a new file called localsettings.py and edit it and settings.py to match
your machine's file locations.
Follow the instructions contained in the file to fill out your settings.
{ in _deploy/old/ we have these which are all very out of date:
localsettings-expo-live.py is the python2.7 settings for the server.
localsettingsubuntu.py
localsettingsdocker.py
localsettingswindows.py
localsettingspotatohut.py
}
Python3, Django, and Database setup
Python, Django, and Database setup
-----------------------------------
We are now using Django 3.2 and will move to 4.2 in 2024
We are installing with python 3.11 (the server is running 3.9)
Troggle requires Django 1.4 or greater, and any version of Python that works with it.
Install Django with the following command:
Install Django using pip, not with apt, on your test system in a venv.
Conventionally on our main master expo server we install everything that we can as debian packages, not using pip.
apt-get install python-django (on debian/ubuntu)
[installation instructions removed - now in http://expo.survex.com/handbook/troggle/troglaptop.html ]
[venv description removed - read it in http://expo.survex.com/handbook/troggle/troglaptop.html ]
READ the os-trog.sh script !
READ the venv-trog.sh script !
If you want to use MySQL or Postgresql, download and install them. However, you can also use Django with Sqlite3, which is included in Python and thus requires no extra installation.
Automatic Provisioning and Configuration
----------------------------------------
We don't do this - yet.
The most appropriate configuration tools today (2021) appear to be Bolt or Ansible
https://puppet.com/docs/bolt/latest/bolt.html (declarative, local)
https://docs.ansible.com/ansible/latest/user_guide/intro_getting_started.html (procedural, remote)
https://puppet.com/blog/automating-from-zero-to-something/
Troggle itself
-------------
Choose a directory where you will keep troggle, and svn check out Troggle into it using the following command:
We don't need anything for the deploy server itself, but we could do with something for setting
up test servers quickly to help get newbie developers up to speed faster. But learning a new tool
creates a barrier in itself. This is one reason most of us don't use Docker.
CSS and media files
-------------------
We are not using the STATICFILES capability.
We are serving css files from troggle/media/.. (see urls.py)
Plain CSS pages
---------------
When running the test server
manage.py runserver 0.0.0.0:8000
and without Apache running, we are serving CSS using using this Django 'view':
view_surveys.cssfilessingle
i.e.
cssfilessingle() in core/view_surveys.py
Setting up survex
-----------------
You need to have survex installed as the command line tools 'cavern' is
used as part of the survex import process.
$ sudo apt install survex
Setting up tables and importing survey data
-------------------------------------------
Run
$ sudo python databaseReset.py
from the troggle directory will give you instructions.
[ NB Adding a new year/expedition requires adding a column to the
folk/folk.csv table - a year doesn't exist until that is done.]
svn co http://troggle.googlecode.com/svn/
MariaDB database
----------------
Start it up with
$ sudo mysql -u -p
when it will prompt you to type in the password. Get this by reading the settings.py file in use on the server.
then
> CREATE DATABASE troggle;
> use troggle;
> exit;
If you want to work on the source code and be able to commit, you will need to use https instead of http, and your google account will need to be added to the troggle project members list. Contact aaron dot curtis at cantab dot net to get this set up.
Note the semicolons.
You can check the status of the db service:
$ sudo systemctl status mysql
You can start and stop the db service with
$ sudo systemctl restart mysql.service
$ sudo systemctl stop mysql.service
$ sudo systemctl start mysql.service
While logged in at a terminal session as expo on expo.survex.,com
$ mysql -h localhost -u expo -p<password>
will get you the MariasDb command prompt: https://www.hostwinds.com/guide/how-to-use-mysql-mariadb-from-command-line/
then (Note the SEMICOLONS !):
>drop database troggle;
>create database troggle;
>quit
Somewhere I have notes for the GRANT PRIVS type runes...
Ah yes:
CREATE DATABASE troggle;
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword'; FLUSH PRIVILEGES; (at mysql root prompt)
(explained on https://chartio.com/resources/tutorials/how-to-grant-all-privileges-on-a-database-in-mysql/)
(but you need to create the database too)
The GRANT ALL PRIVILEGES bit requires you to logon in to MariaDB as root. sudo doesn't cut it.
these permissions are set in a different 'info' database which usually is untouched even if database troggle gets creamed.
The 'somepassword' is specified int he localsettings.py file.
Next, you need to fill in your local settings. Copy either localsettingsubuntu.py or localsettingsserver.py to a new file called localsettings.py. Follow the instructions contained in the file to fill out your settings.
PERMISSIONS
https://linuxize.com/post/usermod-command-in-linux/
Setting up tables and importing legacy data
------------------------------------------
Run "python databaseReset.py reset" from the troggle directory.
THIS MAY BE OUT OF DATE - from 2022 we are running Apache as user 'expo' not 'www-data'
Once troggle is running, you can also log in and then go to "Import / export" data under "admin" on the menu.
so that the online editing system for SVX files works.
The same goes for /expoweb/ files, so that "edit this page" works and the New Cave
and New Entrance forms work.
sudo usermod -a expo expocvs
the expocvs group is used for git
all the users should be in this group
Adding a new year/expedition requires adding a column to the
noinfo/folk.csv table - a year doesn't exist until that is done.
Running a Troggle server with Apache
------------------------------------
Troggle also needs these aliases to be configured. These are set in
/home/expo/config/apache/expo.conf
on the expo server.
Running a Troggle server
------------------------
For high volume use, Troggle should be run using a web server like apache. However, a quick way to get started is to use the development server built into Django.
At least these need setting:
DocumentRoot /home/expo/expoweb
WSGIScriptAlias / /home/expo/troggle/wsgi.py
<Directory /home/expo/troggle>
<Files wsgi.py>
Require all granted
</Files>
</Directory>
To do this, run "python manage.py runserver" from the troggle directory.
the instructions for apache Alias commands are in comments at the end of
the urls.py file.
Unlike the django "manage.py runserver" method, apache requires a restart before it will use
any changed files:
sudo service apache2 restart
Olly's comments 20 July 2020:
olly: looking at /lib/systemd/system/apache2.service suggests so
olly: ExecStart=/usr/sbin/apachectl start
olly: ExecStop=/usr/sbin/apachectl stop
olly: ExecReload=/usr/sbin/apachectl graceful
Additions
---------
The python code has been manually cleaned using the 'black' and 'ruff' lint tools,
and the 'deptry' dependency checker. This needs doing every year or so.
See dependencies-check-deptry.txt
See troggle/pyproject.toml for configurations
Experimental additions
----------------------
These are untried tools which may help us document how troggle works in future.
pip install pygraphviz
pip install pyparsing pydot # installs fine
django extension graph_models # https://django-extensions.readthedocs.io/en/latest/graph_models.html

View File

@@ -1,27 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Troggle - Coding Documentation</title>
<link rel="stylesheet" type="text/css" href="..media/css/main2.css" />
</head>
<body>
<h1>Troggle Code - README</h1>
<h2>Contents of README.txt file</h2>
<iframe name="erriframe" width="70%" height="500"
src="../README.txt" frameborder="1" ></iframe>
<h2>Troggle documentation in the Expo Handbook</h2>
<ul>
<li><a href="http://expo.survex.com/handbook/troggle/trogintro.html">Intro</a>
<li><a href="http://expo.survex.com/handbook/troggle/trogindex.html">Troggle manual INDEX</a>
<li><a href="http://expo.survex.com/handbook/troggle/trogarch.html">Troggle data model</a>
<li><a href="http://expo.survex.com/handbook/troggle/trogimport.html">Troggle importing data</a>
<li><a href="http://expo.survex.com/handbook/troggle/trogdesign.html">Troggle design decisions</a>
<li><a href="http://expo.survex.com/handbook/troggle/trogdesignx.html">Troggle future architectures</a>
<li><a href="http://expo.survex.com/handbook/troggle/trogsimpler.html">a kinder simpler Troggle?</a>
</ul>
<hr />
</body></html>

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1,397 +0,0 @@
<!DOCTYPE html>
<!-- cavebase.html - this text visible because this template has been included -->
<html lang="en">
<head>
<script>document.interestCohort = null;</script> <!-- Turn off Google FLoC -->
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>
</title>
<link rel="stylesheet" type="text/css" href="/css/main2.css" />
</head>
<body>
<!-- the year now as used in header text -->
<br />
<style>
.toolbarlinks
{
padding:5px;
background-color:#9ff;
text-align:center;
font-weight:bold;
}
.toolbarlinkslocal
{
padding:5px;
background-color:#f9f;
text-align:center;
font-weight:bold;
}
</style>
<div class="toolbarlinkslocal">
<a href="/logbookedit/">Logbook Entry</a> |
<a id="cavesLink" href="/caves_recent">Caves</a> |
<a id="qmsLink" href="/cave/qms/1623-290">QMs</a> |
<a href="/survexfile/caves">Survex files</a> |
<a href="/survey_scans">Scans</a> |
<a href="/walletedit">Upload Scans</a> |
<a href="/dwgfiles">Drawings</a> |
<a href="/dwgupload">Upload Drawings</a> |
<a href="/photoupload">Upload Photos</a> |
<a href="/gpxupload">Upload GPX</a> |
<br>
<a href="/dataissues">Data Issues</a> |
<a id="entsLink" href="/entrances">Entrances</a> |
<a id="entsLink" href="/stations">Stations</a> |
<a id="folklink" href="/folk">expoers</a> |
<a id="caversLink" href="/people">survey lengths</a> |
<a href="/stats">statistics</a> |
<a href="/wallets/year/2025">Wallets(2025)</a> |
<a href="/logreport/2025">Logbook(2025)</a> |
<a href="/expedition/2025">Expo(2025)</a> |
<a href="/controlpanel">Control panel</a>
</div>
<p>
<h1>Cave Index</h1>
<h3>Notable caves</h3>
<ul>
<li> 1623 <a href="/None">
290 <em></em></a>
<li> 1623 <a href="/None">
264 <em></em></a>
<li> 1623 <a href="/None">
258 <em></em></a>
<li> 1623 <a href="/None">
161 <em></em></a>
<li> 1623 <a href="/None">
204 <em></em></a>
<li> 1626 <a href="/None">
359 <em></em></a>
</ul>
Red star <span style="color: red">&#10033;</span> against a name indicates that no survex file is explicitly associated with the cave<br />
Blue star <span style="color: blue">&#10033;</span> against a name indicates that no survex file is explicitly associated with the cave but it is marked as 'fully explored'<br />
Blue triangle <span style="color: #43C6DB">&#x25BC;</span> against a name indicates that the cave is 'pending' creation properly.<br />
Orange triangle <span style="color: orange">&#x25B2;</span> against a name indicates that the cave has no Entrance (and is not 'pending').<br />
Black triangle <span style="color: black">&#x25B2;</span> against a name indicates that the cave has an Entrance, but no entrances have valid located survey stations.<br />
Red triangle <span style="color: red">&#x25BC;</span> against a name indicates that the cave has unticked QMs<br />
<span style="color:mediumvioletred">Cavename in this colour</span> means that the cave is undescended/unexplored.
<p>See <em><a href="/caves_undropped">Undropped Caves</a></em> for all unexplored caves<br />
See <em> <a href="/enttags">Lost Caves</a></em> for caves we have mislaid.<br />
See <em> <a href="/caves_recent">Recent Caves</a></em> for a shorter list of recent caves.
<p style="text-align:right">
<a href="/newcave/">New Cave</a><br>
<a href="/noinfo/cave-number-index">Cave Number Index - kept updated</a>
</p>
<h3>This year's caves</h3>
<div style="column-count: 3;">
<table class="searchable">
</table>
</div>
<h3>1623</h3>
<div style="column-count: 3;">
<table class="searchable">
<a href="/1623/115.url">
<span style="color:mediumvioletred" title="The cave is marked as unexplored/undescended">
115 <em>Schnellzugh&ouml;hle</em></a>
<span title="the cave has no Entrance (and is not 'pending')" style="color: orange">&#x25B2;</span>
<span title="no survex file is explicitly associated with the cave" style="color: red">&#10033;</span>
</span>
<br />
<a href="/None">
<span style="color:mediumvioletred" title="The cave is marked as unexplored/undescended">
161 <em></em></a>
<span title="the cave is 'pending' creation properly" style="color: #43C6DB">&#x25BC;</span>
<span title="no survex file is explicitly associated with the cave" style="color: red">&#10033;</span>
</span>
<br />
<a href="/None">
<span style="color:mediumvioletred" title="The cave is marked as unexplored/undescended">
204 <em></em></a>
<span title="the cave is 'pending' creation properly" style="color: #43C6DB">&#x25BC;</span>
<span title="no survex file is explicitly associated with the cave" style="color: red">&#10033;</span>
</span>
<br />
<a href="/None">
<span style="color:mediumvioletred" title="The cave is marked as unexplored/undescended">
258 <em></em></a>
<span title="the cave is 'pending' creation properly" style="color: #43C6DB">&#x25BC;</span>
<span title="no survex file is explicitly associated with the cave" style="color: red">&#10033;</span>
</span>
<br />
<a href="/None">
<span style="color:mediumvioletred" title="The cave is marked as unexplored/undescended">
264 <em></em></a>
<span title="the cave is 'pending' creation properly" style="color: #43C6DB">&#x25BC;</span>
<span title="no survex file is explicitly associated with the cave" style="color: red">&#10033;</span>
</span>
<br />
<a href="/1623/284/284.html">
<span style="color:mediumvioletred" title="The cave is marked as unexplored/undescended">
284 <em>Seetrichter (Lake bottom)</em></a>
<span title="the cave has no Entrance (and is not 'pending')" style="color: orange">&#x25B2;</span>
<span title="no survex file is explicitly associated with the cave" style="color: red">&#10033;</span>
</span>
<br />
<a href="/None">
<span style="color:mediumvioletred" title="The cave is marked as unexplored/undescended">
290 <em></em></a>
<span title="the cave is 'pending' creation properly" style="color: #43C6DB">&#x25BC;</span>
<span title="no survex file is explicitly associated with the cave" style="color: red">&#10033;</span>
</span>
<br />
</table>
</div>
<p style="text-align:right">
<a href="/newcave/">New Cave</a><br>
<a href="/noinfo/cave-number-index">Cave Number Index - kept updated</a>
</p>
<h3>1626</h3>
<div style="column-count: 3;">
<table class="searchable">
<a href="/None">
<span style="color:mediumvioletred" title="The cave is marked as unexplored/undescended">
359 <em></em></a>
<span title="the cave is 'pending' creation properly" style="color: #43C6DB">&#x25BC;</span>
<span title="no survex file is explicitly associated with the cave" style="color: red">&#10033;</span>
</span>
<br />
</table>
</div>
<p style="text-align:right">
<a href="/newcave/">New Cave</a><br>
<a href="/noinfo/cave-number-index">Cave Number Index - kept updated</a>
</p>
<h3>1627</h3>
<div style="column-count: 3;">
<table class="searchable">
</table>
</div>
<p style="text-align:right">
<a href="/newcave/">New Cave</a><br>
<a href="/noinfo/cave-number-index">Cave Number Index - kept updated</a>
</p>
<h3>1624</h3>
<div style="column-count: 3;">
<table class="searchable">
</table>
</div>
<p style="text-align:right">
<a href="/newcave/">New Cave</a><br>
<a href="/noinfo/cave-number-index">Cave Number Index - kept updated</a>
</p>
<div id="menu">
<ul id="menulinks">
<li><a href="/index.htm">Home</a></li>
<li><a href="/handbook/index.htm">Handbook</a>
</li>
<li><a href="/handbook/computing/onlinesystems.html">Online systems</a></li>
<li><a href="/handbook/logbooks.html#form">Make Logbook Entry</a></li>
<li><a href="/caves_recent">Caves</a>
</li>
<li><a href="/infodx.htm">Site index</a></li>
<li><a href="/pubs.htm">Reports</a></li>
<ul><li><a href="/years/2025">2025</a></li></ul>
<li><a href="https://expo.survex.com/kanboard/board/2">Kanboard</a></li>
<li><a href="/handbook/troggle/training/trogbegin.html">Troggle</a></li>
<li><form name=P method=get
action="https://expo.survex.com/search"
target="_top">
<input id="omega-autofocus" type=search name=P size=8 autofocus>
<input type=submit value="Search"></form></li>
<li> <b style="color:red">RUNNING ON LOCALSERVER</b> <br>slug:<br>newslug:<br>url:
</ul>
</div>
</body>
</html>

View File

@@ -1,179 +0,0 @@
import sys
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
from secret_credentials import *
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
# MARIADB_SERVER_PASSWORD =
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
# default values, real secrets will be imported from credentials.py
#SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
SQLITEFILE = str(Path(__file__).parent.parent / "troggle.sqlite") # can be ':memory:'
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
SERVERPORT = "8000" # not needed as it is the default
ADMINS = (
('Philip', 'philip.sargent@klebos.eu'), # only on dev
)
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
# so we use pathlib which has been standard since python 3.4
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
# --------------------- MEDIA redirections BEGIN ---------------------
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
sys.path.append(str(REPOS_ROOT_PATH))
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / "templates"
MEDIA_ROOT = TROGGLE_PATH / "media"
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
PHOTOS_YEAR = "2025"
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
# URL that handles the media served from MEDIA_ROOT.
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
# and they all need to end with / so using 'Path' doesn't work..
URL_ROOT = "/"
MEDIA_URL = Path(URL_ROOT, "/site_media/")
PHOTOS_URL = Path(URL_ROOT, "/photos/")
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# --------------------- MEDIA redirections END ---------------------
PUBLIC_SITE = True
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
CACHEDPAGES = True # experimental page cache for a handful of page types
DBSQLITE = {
"default": {
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"NAME": SQLITEFILE,
# 'NAME' : ':memory:',
"USER": "expo", # Not used with sqlite3.
"PASSWORD": "sekrit", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DBMARIADB = {
"default": {
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4",
},
"NAME": "troggle", # Or path to database file if using sqlite3.
"USER": "expo",
"PASSWORD": MARIADB_SERVER_PASSWORD,
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
# default database for me is squlite
DBSWITCH = "sqlite"
if DBSWITCH == "sqlite":
DATABASES = DBSQLITE
if DBSWITCH == "mariadb":
DATABASES = DBMARIADB
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [TEMPLATE_PATH],
"OPTIONS": {
"debug": "DEBUG",
"context_processors": [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
"core.context.troggle_context", # in core/context.py - only used in expedition.html
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media", # includes a variable MEDIA_URL
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
"django.template.context_processors.tz",
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
"django.contrib.messages.context_processors.messages",
],
"loaders": [
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
],
},
},
]
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,179 +0,0 @@
import sys
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
from secret_credentials import *
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
# MARIADB_SERVER_PASSWORD =
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
# default values, real secrets will be imported from credentials.py
#SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
SQLITEFILE = str(Path(__file__).parent.parent / "troggle.sqlite") # can be ':memory:'
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
SERVERPORT = "8000" # not needed as it is the default
ADMINS = (
('Philip', 'philip.sargent@klebos.eu'), # only on dev
)
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
# so we use pathlib which has been standard since python 3.4
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
# --------------------- MEDIA redirections BEGIN ---------------------
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
sys.path.append(str(REPOS_ROOT_PATH))
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / "templates"
MEDIA_ROOT = TROGGLE_PATH / "media"
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
PHOTOS_YEAR = "2025"
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
# URL that handles the media served from MEDIA_ROOT.
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
# and they all need to end with / so using 'Path' doesn't work..
URL_ROOT = "/"
MEDIA_URL = Path(URL_ROOT, "/site_media/")
PHOTOS_URL = Path(URL_ROOT, "/photos/")
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# --------------------- MEDIA redirections END ---------------------
PUBLIC_SITE = True
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
CACHEDPAGES = True # experimental page cache for a handful of page types
DBSQLITE = {
"default": {
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"NAME": SQLITEFILE,
# 'NAME' : ':memory:',
"USER": "expo", # Not used with sqlite3.
"PASSWORD": "sekrit", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DBMARIADB = {
"default": {
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4",
},
"NAME": "troggle", # Or path to database file if using sqlite3.
"USER": "expo",
"PASSWORD": MARIADB_SERVER_PASSWORD,
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
# default database for me is squlite
DBSWITCH = "sqlite"
if DBSWITCH == "sqlite":
DATABASES = DBSQLITE
if DBSWITCH == "mariadb":
DATABASES = DBMARIADB
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [TEMPLATE_PATH],
"OPTIONS": {
"debug": "DEBUG",
"context_processors": [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
"core.context.troggle_context", # in core/context.py - only used in expedition.html
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media", # includes a variable MEDIA_URL
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
"django.template.context_processors.tz",
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
"django.contrib.messages.context_processors.messages",
],
"loaders": [
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
],
},
},
]
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# Run this in a terminal : 'bash os-survey.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 24.04 relatively clean install.
# 24.04 has python 3.12
echo '###'
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
echo '###'
sudo apt install tunnelx therion -y
sudo apt install survex-aven -y
sudo apt install gpsprune qgis -y
cd ~/expo
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,92 +0,0 @@
#!/bin/bash
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 24.04 relatively clean install.
# 24.04 has python 3.12
# sudo apt install python-is-python3 -y
python --version : ensure python is an alias for python3 not python2.7
ssh -V
sudo apt update -y
sudo apt dist-upgrade -y
sudo apt autoremove -y
# Already in Ubuntu 24.04 on WSL:
# sudo apt install git -y
# sudo apt install wget gpg
# sudo apt install sftp -y
# sudo apt install openssh-client -y
# sudo apt install rsync
# Now using uv not pip:
# sudo apt install python3-pip -y
sudo apt install sqlite3 -y
sudo apt install gedit -y
sudo apt install tig gitg meld -y
# python formatting https://docs.astral.sh/ruff/
sudo snap install ruff
# # do not actually use this any more
# sudo useradd expo
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
# as debian does not install everything that ubuntu does, you need:
sudo apt install python3-venv -y
sudo apt install python3-dev -y
# sudo apt install python3-distutils -y
# install uv
curl -LsSf https://astral.sh/uv/install.sh | sh
sudo apt install mariadb-server -y
sudo apt install libmariadb-dev -y
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
# sudo service mysql start
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
# we leave that to uv to install now.
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
# sudo apt install software-properties-common apt-transport-https
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
# sudo apt update
# sudo apt install code
mkdir ~/expo
cd ~/expo
echo '###'
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
echo '### because you can't clone the repos without a key
git config --global user.email "philip.sargent@gmail.com"
git config --global user.name "Philip Sargent"
git config --global pull.rebase true
#Change this to clone using https?? at least for troggle?
git clone ssh://expo@expo.survex.com/home/expo/troggle
git clone ssh://expo@expo.survex.com/home/expo/loser
git clone ssh://expo@expo.survex.com/home/expo/expoweb
git clone ssh://expo@expo.survex.com/home/expo/drawings
mkdir expofiles
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,67 +0,0 @@
#! /bin/bash
# create and sanitise files for pushing to repo
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
# Philip Sargent 2022/04/12
HOSTNAME=`hostname`
echo "** This copies file to _deploy/${HOSTNAME}/ !"
cd ..
cd troggle
echo `pwd`
echo deprecations.
PYTHON="uv run"
source .venv/bin/activate
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
deactivate
echo diffsettings.
rm diffsettings.txt
if test -f "diffsettings.txt"; then
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
exit
fi
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
echo inspectdb.
# this next line requires database setting to be troggle.sqlite:
$PYTHON manage.py inspectdb > troggle-inspectdb.py
#egrep -in "unable|error" troggle-inspectdb.py
echo remove passwords.
cp localsettings.py localsettings-${HOSTNAME}.py
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
mkdir -p _deploy/${HOSTNAME}
mv _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py.bak
mv localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}
cp uv.lock _deploy/${HOSTNAME}
cp *.sh _deploy/${HOSTNAME}
ls -tlr *.toml
uv tree
#
# Do these before final testing, *not* just before pushing:
# in ./pre-run.sh
# $PYTHON reset-django.py
# $PYTHON manage.py makemigrations
# $PYTHON manage.py test
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
# egrep -i "unable|error" troggle-inspectdb.py

View File

@@ -1,36 +0,0 @@
#! /bin/bash
# Do these before final testing, *not* just before pushing:
# Changed to use uv not pip, requires manage.py to have uv structured uv comment in it.
PYTHON="uv run"
echo "** Run inspectdb:"
$PYTHON manage.py inspectdb > troggle-inspectdb.py
# egrep -in "unable|error" troggle-inspectdb.py
echo ""
# count non-blank lines of python and template HTML code
# includes all variants of settings.py files
# fix this as core/utils.py has 28,000 lines of numbers.
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
echo "** Run reset-django.py - which deletes the database"
# This deletes the database so must run after generating troggle-inspectdb.py
$PYTHON reset-django.py
echo "** After cleanup deletion, remake all migrations."
$PYTHON manage.py makemigrations >/dev/null
$PYTHON manage.py migrate
echo "** Now running self check"
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
$PYTHON manage.py check -v 3 --deploy
echo "** Now running test suite"
# $PYTHON manage.py test -v 1
echo ""
echo `tail -1 lines-of-python.txt` non-comment lines of python.
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
echo '** If you have an error running manage.py, maybe you are not in an activated venv ? or your manage.py is not managed by uv properly ?'

View File

@@ -1,235 +0,0 @@
version = 1
requires-python = ">=3.13"
[[package]]
name = "asgiref"
version = "3.9.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790 },
]
[[package]]
name = "beautifulsoup4"
version = "4.13.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "soupsieve" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/85/2e/3e5079847e653b1f6dc647aa24549d68c6addb4c595cc0d902d1b19308ad/beautifulsoup4-4.13.5.tar.gz", hash = "sha256:5e70131382930e7c3de33450a2f54a63d5e4b19386eab43a5b34d594268f3695", size = 622954 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/eb/f4151e0c7377a6e08a38108609ba5cede57986802757848688aeedd1b9e8/beautifulsoup4-4.13.5-py3-none-any.whl", hash = "sha256:642085eaa22233aceadff9c69651bc51e8bf3f874fb6d7104ece2beb24b47c4a", size = 105113 },
]
[[package]]
name = "coverage"
version = "7.10.5"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/61/83/153f54356c7c200013a752ce1ed5448573dca546ce125801afca9e1ac1a4/coverage-7.10.5.tar.gz", hash = "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6", size = 821662 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9f/08/4166ecfb60ba011444f38a5a6107814b80c34c717bc7a23be0d22e92ca09/coverage-7.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c", size = 217106 },
{ url = "https://files.pythonhosted.org/packages/25/d7/b71022408adbf040a680b8c64bf6ead3be37b553e5844f7465643979f7ca/coverage-7.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44", size = 217353 },
{ url = "https://files.pythonhosted.org/packages/74/68/21e0d254dbf8972bb8dd95e3fe7038f4be037ff04ba47d6d1b12b37510ba/coverage-7.10.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc", size = 248350 },
{ url = "https://files.pythonhosted.org/packages/90/65/28752c3a896566ec93e0219fc4f47ff71bd2b745f51554c93e8dcb659796/coverage-7.10.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869", size = 250955 },
{ url = "https://files.pythonhosted.org/packages/a5/eb/ca6b7967f57f6fef31da8749ea20417790bb6723593c8cd98a987be20423/coverage-7.10.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f", size = 252230 },
{ url = "https://files.pythonhosted.org/packages/bc/29/17a411b2a2a18f8b8c952aa01c00f9284a1fbc677c68a0003b772ea89104/coverage-7.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5", size = 250387 },
{ url = "https://files.pythonhosted.org/packages/c7/89/97a9e271188c2fbb3db82235c33980bcbc733da7da6065afbaa1d685a169/coverage-7.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c", size = 248280 },
{ url = "https://files.pythonhosted.org/packages/d1/c6/0ad7d0137257553eb4706b4ad6180bec0a1b6a648b092c5bbda48d0e5b2c/coverage-7.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2", size = 249894 },
{ url = "https://files.pythonhosted.org/packages/84/56/fb3aba936addb4c9e5ea14f5979393f1c2466b4c89d10591fd05f2d6b2aa/coverage-7.10.5-cp313-cp313-win32.whl", hash = "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4", size = 219536 },
{ url = "https://files.pythonhosted.org/packages/fc/54/baacb8f2f74431e3b175a9a2881feaa8feb6e2f187a0e7e3046f3c7742b2/coverage-7.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b", size = 220330 },
{ url = "https://files.pythonhosted.org/packages/64/8a/82a3788f8e31dee51d350835b23d480548ea8621f3effd7c3ba3f7e5c006/coverage-7.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84", size = 218961 },
{ url = "https://files.pythonhosted.org/packages/d8/a1/590154e6eae07beee3b111cc1f907c30da6fc8ce0a83ef756c72f3c7c748/coverage-7.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7", size = 217819 },
{ url = "https://files.pythonhosted.org/packages/0d/ff/436ffa3cfc7741f0973c5c89405307fe39b78dcf201565b934e6616fc4ad/coverage-7.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b", size = 218040 },
{ url = "https://files.pythonhosted.org/packages/a0/ca/5787fb3d7820e66273913affe8209c534ca11241eb34ee8c4fd2aaa9dd87/coverage-7.10.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae", size = 259374 },
{ url = "https://files.pythonhosted.org/packages/b5/89/21af956843896adc2e64fc075eae3c1cadb97ee0a6960733e65e696f32dd/coverage-7.10.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760", size = 261551 },
{ url = "https://files.pythonhosted.org/packages/e1/96/390a69244ab837e0ac137989277879a084c786cf036c3c4a3b9637d43a89/coverage-7.10.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235", size = 263776 },
{ url = "https://files.pythonhosted.org/packages/00/32/cfd6ae1da0a521723349f3129b2455832fc27d3f8882c07e5b6fefdd0da2/coverage-7.10.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5", size = 261326 },
{ url = "https://files.pythonhosted.org/packages/4c/c4/bf8d459fb4ce2201e9243ce6c015936ad283a668774430a3755f467b39d1/coverage-7.10.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db", size = 259090 },
{ url = "https://files.pythonhosted.org/packages/f4/5d/a234f7409896468e5539d42234016045e4015e857488b0b5b5f3f3fa5f2b/coverage-7.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e", size = 260217 },
{ url = "https://files.pythonhosted.org/packages/f3/ad/87560f036099f46c2ddd235be6476dd5c1d6be6bb57569a9348d43eeecea/coverage-7.10.5-cp313-cp313t-win32.whl", hash = "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee", size = 220194 },
{ url = "https://files.pythonhosted.org/packages/36/a8/04a482594fdd83dc677d4a6c7e2d62135fff5a1573059806b8383fad9071/coverage-7.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14", size = 221258 },
{ url = "https://files.pythonhosted.org/packages/eb/ad/7da28594ab66fe2bc720f1bc9b131e62e9b4c6e39f044d9a48d18429cc21/coverage-7.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff", size = 219521 },
{ url = "https://files.pythonhosted.org/packages/d3/7f/c8b6e4e664b8a95254c35a6c8dd0bf4db201ec681c169aae2f1256e05c85/coverage-7.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031", size = 217090 },
{ url = "https://files.pythonhosted.org/packages/44/74/3ee14ede30a6e10a94a104d1d0522d5fb909a7c7cac2643d2a79891ff3b9/coverage-7.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3", size = 217365 },
{ url = "https://files.pythonhosted.org/packages/41/5f/06ac21bf87dfb7620d1f870dfa3c2cae1186ccbcdc50b8b36e27a0d52f50/coverage-7.10.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031", size = 248413 },
{ url = "https://files.pythonhosted.org/packages/21/bc/cc5bed6e985d3a14228539631573f3863be6a2587381e8bc5fdf786377a1/coverage-7.10.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2", size = 250943 },
{ url = "https://files.pythonhosted.org/packages/8d/43/6a9fc323c2c75cd80b18d58db4a25dc8487f86dd9070f9592e43e3967363/coverage-7.10.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762", size = 252301 },
{ url = "https://files.pythonhosted.org/packages/69/7c/3e791b8845f4cd515275743e3775adb86273576596dc9f02dca37357b4f2/coverage-7.10.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae", size = 250302 },
{ url = "https://files.pythonhosted.org/packages/5c/bc/5099c1e1cb0c9ac6491b281babea6ebbf999d949bf4aa8cdf4f2b53505e8/coverage-7.10.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262", size = 248237 },
{ url = "https://files.pythonhosted.org/packages/7e/51/d346eb750a0b2f1e77f391498b753ea906fde69cc11e4b38dca28c10c88c/coverage-7.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99", size = 249726 },
{ url = "https://files.pythonhosted.org/packages/a3/85/eebcaa0edafe427e93286b94f56ea7e1280f2c49da0a776a6f37e04481f9/coverage-7.10.5-cp314-cp314-win32.whl", hash = "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde", size = 219825 },
{ url = "https://files.pythonhosted.org/packages/3c/f7/6d43e037820742603f1e855feb23463979bf40bd27d0cde1f761dcc66a3e/coverage-7.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13", size = 220618 },
{ url = "https://files.pythonhosted.org/packages/4a/b0/ed9432e41424c51509d1da603b0393404b828906236fb87e2c8482a93468/coverage-7.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9", size = 219199 },
{ url = "https://files.pythonhosted.org/packages/2f/54/5a7ecfa77910f22b659c820f67c16fc1e149ed132ad7117f0364679a8fa9/coverage-7.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508", size = 217833 },
{ url = "https://files.pythonhosted.org/packages/4e/0e/25672d917cc57857d40edf38f0b867fb9627115294e4f92c8fcbbc18598d/coverage-7.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357", size = 218048 },
{ url = "https://files.pythonhosted.org/packages/cb/7c/0b2b4f1c6f71885d4d4b2b8608dcfc79057adb7da4143eb17d6260389e42/coverage-7.10.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b", size = 259549 },
{ url = "https://files.pythonhosted.org/packages/94/73/abb8dab1609abec7308d83c6aec547944070526578ee6c833d2da9a0ad42/coverage-7.10.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4", size = 261715 },
{ url = "https://files.pythonhosted.org/packages/0b/d1/abf31de21ec92731445606b8d5e6fa5144653c2788758fcf1f47adb7159a/coverage-7.10.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba", size = 263969 },
{ url = "https://files.pythonhosted.org/packages/9c/b3/ef274927f4ebede96056173b620db649cc9cb746c61ffc467946b9d0bc67/coverage-7.10.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842", size = 261408 },
{ url = "https://files.pythonhosted.org/packages/20/fc/83ca2812be616d69b4cdd4e0c62a7bc526d56875e68fd0f79d47c7923584/coverage-7.10.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874", size = 259168 },
{ url = "https://files.pythonhosted.org/packages/fc/4f/e0779e5716f72d5c9962e709d09815d02b3b54724e38567308304c3fc9df/coverage-7.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732", size = 260317 },
{ url = "https://files.pythonhosted.org/packages/2b/fe/4247e732f2234bb5eb9984a0888a70980d681f03cbf433ba7b48f08ca5d5/coverage-7.10.5-cp314-cp314t-win32.whl", hash = "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df", size = 220600 },
{ url = "https://files.pythonhosted.org/packages/a7/a0/f294cff6d1034b87839987e5b6ac7385bec599c44d08e0857ac7f164ad0c/coverage-7.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f", size = 221714 },
{ url = "https://files.pythonhosted.org/packages/23/18/fa1afdc60b5528d17416df440bcbd8fd12da12bfea9da5b6ae0f7a37d0f7/coverage-7.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2", size = 219735 },
{ url = "https://files.pythonhosted.org/packages/08/b6/fff6609354deba9aeec466e4bcaeb9d1ed3e5d60b14b57df2a36fb2273f2/coverage-7.10.5-py3-none-any.whl", hash = "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a", size = 208736 },
]
[[package]]
name = "django"
version = "5.2.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "asgiref" },
{ name = "sqlparse" },
{ name = "tzdata", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/4c/8c/2a21594337250a171d45dda926caa96309d5136becd1f48017247f9cdea0/django-5.2.6.tar.gz", hash = "sha256:da5e00372763193d73cecbf71084a3848458cecf4cee36b9a1e8d318d114a87b", size = 10858861 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f5/af/6593f6d21404e842007b40fdeb81e73c20b6649b82d020bb0801b270174c/django-5.2.6-py3-none-any.whl", hash = "sha256:60549579b1174a304b77e24a93d8d9fafe6b6c03ac16311f3e25918ea5a20058", size = 8303111 },
]
[[package]]
name = "piexif"
version = "1.1.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/fa/84/a3f25cec7d0922bf60be8000c9739d28d24b6896717f44cc4cfb843b1487/piexif-1.1.3.zip", hash = "sha256:83cb35c606bf3a1ea1a8f0a25cb42cf17e24353fd82e87ae3884e74a302a5f1b", size = 1011134 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/d8/6f63147dd73373d051c5eb049ecd841207f898f50a5a1d4378594178f6cf/piexif-1.1.3-py2.py3-none-any.whl", hash = "sha256:3bc435d171720150b81b15d27e05e54b8abbde7b4242cddd81ef160d283108b6", size = 20691 },
]
[[package]]
name = "pillow"
version = "11.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328 },
{ url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652 },
{ url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443 },
{ url = "https://files.pythonhosted.org/packages/01/f4/91d5b3ffa718df2f53b0dc109877993e511f4fd055d7e9508682e8aba092/pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c", size = 5278474 },
{ url = "https://files.pythonhosted.org/packages/f9/0e/37d7d3eca6c879fbd9dba21268427dffda1ab00d4eb05b32923d4fbe3b12/pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd", size = 4686038 },
{ url = "https://files.pythonhosted.org/packages/ff/b0/3426e5c7f6565e752d81221af9d3676fdbb4f352317ceafd42899aaf5d8a/pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e", size = 5864407 },
{ url = "https://files.pythonhosted.org/packages/fc/c1/c6c423134229f2a221ee53f838d4be9d82bab86f7e2f8e75e47b6bf6cd77/pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1", size = 7639094 },
{ url = "https://files.pythonhosted.org/packages/ba/c9/09e6746630fe6372c67c648ff9deae52a2bc20897d51fa293571977ceb5d/pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805", size = 5973503 },
{ url = "https://files.pythonhosted.org/packages/d5/1c/a2a29649c0b1983d3ef57ee87a66487fdeb45132df66ab30dd37f7dbe162/pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8", size = 6642574 },
{ url = "https://files.pythonhosted.org/packages/36/de/d5cc31cc4b055b6c6fd990e3e7f0f8aaf36229a2698501bcb0cdf67c7146/pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", size = 6084060 },
{ url = "https://files.pythonhosted.org/packages/d5/ea/502d938cbaeec836ac28a9b730193716f0114c41325db428e6b280513f09/pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b", size = 6721407 },
{ url = "https://files.pythonhosted.org/packages/45/9c/9c5e2a73f125f6cbc59cc7087c8f2d649a7ae453f83bd0362ff7c9e2aee2/pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3", size = 6273841 },
{ url = "https://files.pythonhosted.org/packages/23/85/397c73524e0cd212067e0c969aa245b01d50183439550d24d9f55781b776/pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51", size = 6978450 },
{ url = "https://files.pythonhosted.org/packages/17/d2/622f4547f69cd173955194b78e4d19ca4935a1b0f03a302d655c9f6aae65/pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580", size = 2423055 },
{ url = "https://files.pythonhosted.org/packages/dd/80/a8a2ac21dda2e82480852978416cfacd439a4b490a501a288ecf4fe2532d/pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e", size = 5281110 },
{ url = "https://files.pythonhosted.org/packages/44/d6/b79754ca790f315918732e18f82a8146d33bcd7f4494380457ea89eb883d/pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d", size = 4689547 },
{ url = "https://files.pythonhosted.org/packages/49/20/716b8717d331150cb00f7fdd78169c01e8e0c219732a78b0e59b6bdb2fd6/pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced", size = 5901554 },
{ url = "https://files.pythonhosted.org/packages/74/cf/a9f3a2514a65bb071075063a96f0a5cf949c2f2fce683c15ccc83b1c1cab/pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c", size = 7669132 },
{ url = "https://files.pythonhosted.org/packages/98/3c/da78805cbdbee9cb43efe8261dd7cc0b4b93f2ac79b676c03159e9db2187/pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8", size = 6005001 },
{ url = "https://files.pythonhosted.org/packages/6c/fa/ce044b91faecf30e635321351bba32bab5a7e034c60187fe9698191aef4f/pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59", size = 6668814 },
{ url = "https://files.pythonhosted.org/packages/7b/51/90f9291406d09bf93686434f9183aba27b831c10c87746ff49f127ee80cb/pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe", size = 6113124 },
{ url = "https://files.pythonhosted.org/packages/cd/5a/6fec59b1dfb619234f7636d4157d11fb4e196caeee220232a8d2ec48488d/pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c", size = 6747186 },
{ url = "https://files.pythonhosted.org/packages/49/6b/00187a044f98255225f172de653941e61da37104a9ea60e4f6887717e2b5/pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788", size = 6277546 },
{ url = "https://files.pythonhosted.org/packages/e8/5c/6caaba7e261c0d75bab23be79f1d06b5ad2a2ae49f028ccec801b0e853d6/pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31", size = 6985102 },
{ url = "https://files.pythonhosted.org/packages/f3/7e/b623008460c09a0cb38263c93b828c666493caee2eb34ff67f778b87e58c/pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e", size = 2424803 },
{ url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12", size = 5278520 },
{ url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a", size = 4686116 },
{ url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632", size = 5864597 },
{ url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673", size = 7638246 },
{ url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027", size = 5973336 },
{ url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77", size = 6642699 },
{ url = "https://files.pythonhosted.org/packages/6f/d2/a9a4f280c6aefedce1e8f615baaa5474e0701d86dd6f1dede66726462bbd/pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874", size = 6083789 },
{ url = "https://files.pythonhosted.org/packages/fe/54/86b0cd9dbb683a9d5e960b66c7379e821a19be4ac5810e2e5a715c09a0c0/pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a", size = 6720386 },
{ url = "https://files.pythonhosted.org/packages/e7/95/88efcaf384c3588e24259c4203b909cbe3e3c2d887af9e938c2022c9dd48/pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214", size = 6370911 },
{ url = "https://files.pythonhosted.org/packages/2e/cc/934e5820850ec5eb107e7b1a72dd278140731c669f396110ebc326f2a503/pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635", size = 7117383 },
{ url = "https://files.pythonhosted.org/packages/d6/e9/9c0a616a71da2a5d163aa37405e8aced9a906d574b4a214bede134e731bc/pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6", size = 2511385 },
{ url = "https://files.pythonhosted.org/packages/1a/33/c88376898aff369658b225262cd4f2659b13e8178e7534df9e6e1fa289f6/pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae", size = 5281129 },
{ url = "https://files.pythonhosted.org/packages/1f/70/d376247fb36f1844b42910911c83a02d5544ebd2a8bad9efcc0f707ea774/pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653", size = 4689580 },
{ url = "https://files.pythonhosted.org/packages/eb/1c/537e930496149fbac69efd2fc4329035bbe2e5475b4165439e3be9cb183b/pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6", size = 5902860 },
{ url = "https://files.pythonhosted.org/packages/bd/57/80f53264954dcefeebcf9dae6e3eb1daea1b488f0be8b8fef12f79a3eb10/pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36", size = 7670694 },
{ url = "https://files.pythonhosted.org/packages/70/ff/4727d3b71a8578b4587d9c276e90efad2d6fe0335fd76742a6da08132e8c/pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b", size = 6005888 },
{ url = "https://files.pythonhosted.org/packages/05/ae/716592277934f85d3be51d7256f3636672d7b1abfafdc42cf3f8cbd4b4c8/pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477", size = 6670330 },
{ url = "https://files.pythonhosted.org/packages/e7/bb/7fe6cddcc8827b01b1a9766f5fdeb7418680744f9082035bdbabecf1d57f/pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50", size = 6114089 },
{ url = "https://files.pythonhosted.org/packages/8b/f5/06bfaa444c8e80f1a8e4bff98da9c83b37b5be3b1deaa43d27a0db37ef84/pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b", size = 6748206 },
{ url = "https://files.pythonhosted.org/packages/f0/77/bc6f92a3e8e6e46c0ca78abfffec0037845800ea38c73483760362804c41/pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12", size = 6377370 },
{ url = "https://files.pythonhosted.org/packages/4a/82/3a721f7d69dca802befb8af08b7c79ebcab461007ce1c18bd91a5d5896f9/pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db", size = 7121500 },
{ url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835 },
]
[[package]]
name = "pyaes"
version = "1.6.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/44/66/2c17bae31c906613795711fc78045c285048168919ace2220daa372c7d72/pyaes-1.6.1.tar.gz", hash = "sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f", size = 28536 }
[[package]]
name = "soupsieve"
version = "2.8"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679 },
]
[[package]]
name = "sqlparse"
version = "0.5.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415 },
]
[[package]]
name = "troggle"
version = "2025.9.26"
source = { virtual = "." }
[package.dev-dependencies]
dev = [
{ name = "beautifulsoup4" },
{ name = "coverage" },
{ name = "django" },
{ name = "piexif" },
{ name = "pillow" },
{ name = "pyaes" },
{ name = "unidecode" },
]
[package.metadata]
[package.metadata.requires-dev]
dev = [
{ name = "beautifulsoup4", specifier = ">=4.12.3" },
{ name = "coverage", specifier = ">=7.6.9" },
{ name = "django", specifier = ">=5.2.3" },
{ name = "piexif", specifier = ">=1.1.3" },
{ name = "pillow", specifier = ">=11.0.0" },
{ name = "pyaes", specifier = ">=1.6.1" },
{ name = "unidecode", specifier = ">=1.3.8" },
]
[[package]]
name = "typing-extensions"
version = "4.15.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 },
]
[[package]]
name = "tzdata"
version = "2025.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 },
]
[[package]]
name = "unidecode"
version = "1.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/94/7d/a8a765761bbc0c836e397a2e48d498305a865b70a8600fd7a942e85dcf63/Unidecode-1.4.0.tar.gz", hash = "sha256:ce35985008338b676573023acc382d62c264f307c8f7963733405add37ea2b23", size = 200149 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8f/b7/559f59d57d18b44c6d1250d2eeaa676e028b9c527431f5d0736478a73ba1/Unidecode-1.4.0-py3-none-any.whl", hash = "sha256:c3c7606c27503ad8d501270406e345ddb480a7b5f38827eafe4fa82a137f0021", size = 235837 },
]

View File

@@ -1,53 +0,0 @@
#!/bin/bash
# now using uv, unbelieveably simpler.
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
# Expects an Ubuntu 24.04 with all the gubbins already installed
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
# use the script os-trog24.04.sh runniing it in /home/username/
python3 --version
cd ~/expo/troggle
echo "-- EXPO folder [current directory]: `pwd`"
TROGDIR=$(cd $(dirname $0) && pwd)
echo "-- Troggle folder: ${TROGDIR}"
cp dev.toml pyproject.toml
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
uv self update
uv sync
# fudge for philip's laptop prior to M2 SSD upgrade
if [ ! -d /mnt/d/EXPO ]; then
sudo mkdir /mnt/d
sudo mount -t drvfs D: /mnt/d
fi
uv pip list
echo "Django version:`uv run django-admin --version`"
echo "### Now do
'[sudo service mysql start]'
'[sudo service mariadb restart]'
'[sudo mysql_secure_installation]'
'cd ~/expo/troggle'
'uv run django-admin'
'uv run manage.py check'
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
## you need to follow the Linux instructions.
'ssh expo@expo.survex.com'
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
'uv run manage.py test -v 2'
'./pre-run.sh' (runs the tests again)
'uv run databaseReset.py reset INIT'
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
"
# if [ ! -d /mnt/d/expofiles ]; then
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
# fi

View File

@@ -1,200 +0,0 @@
import sys
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
from secret_credentials import *
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
# -----------------------------------------------------------------
# THINK before you push this to a repo
# - have you checked that secret_credentials.py is in .gitignore ?
# - we don't want to have to change the expo system password !
# -----------------------------------------------------------------
# default values, real secrets will be imported from credentials.py in future
SQLITEFILE = "/home/expo/troggle.sqlite" # can be ':memory:'
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
SERVERPORT = "8000" # not needed as it is the default
ADMINS = (
('Philip', 'philip.sargent@klebos.eu'),
)
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
# so we use pathlib which has been standard since python 3.4
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
# --------------------- MEDIA redirections BEGIN ---------------------
REPOS_ROOT_PATH = Path(__file__).parent.parent
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / "templates"
MEDIA_ROOT = TROGGLE_PATH / "media"
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
# FILES = Path('/mnt/d/expofiles/')
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos"
PHOTOS_YEAR = "2025"
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
LOGFILE = PYTHON_PATH / "troggle.log"
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
MEDIA_URL = "/site-media/"
DIR_ROOT = Path("") # this should end in / if a value is given
URL_ROOT = "/"
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
# Note that these constants are not actually used in urls.py, they should be..
# and they all need to end with / so using 'Path' doesn't work..
MEDIA_URL = Path(URL_ROOT, "/site_media/")
PHOTOS_URL = Path(URL_ROOT, "/photos/")
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# --------------------- MEDIA redirections END ---------------------
PUBLIC_SITE = True
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
CACHEDPAGES = True # experimental page cache for a handful of page types
# executables:
CAVERN = "cavern" # for parsing .svx files and producing .3d files
SURVEXPORT = "survexport" # for parsing .3d files and producing .pos files
DBSQLITE = {
"default": {
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"NAME": SQLITEFILE,
# 'NAME' : ':memory:',
"USER": "expo", # Not used with sqlite3.
"PASSWORD": "sekrit", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DBMARIADB = {
"default": {
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4",
},
"NAME": "troggle", # Or path to database file if using sqlite3.
"USER": "expo",
"PASSWORD": MARIADB_SERVER_PASSWORD,
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
# default database for me is squlite
DBSWITCH = "sqlite"
if DBSWITCH == "sqlite":
DATABASES = DBSQLITE
if DBSWITCH == "mariadb":
DATABASES = DBMARIADB
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [TEMPLATE_PATH],
"OPTIONS": {
"debug": "DEBUG",
"context_processors": [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
"core.context.troggle_context", # in core/context.py - only used in expedition.html
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media", # includes a variable MEDIA_URL
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
"django.template.context_processors.tz",
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
"django.contrib.messages.context_processors.messages",
],
"loaders": [
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
],
},
},
]
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
# EXPOWEB_URL = "" # defunct, removed.
# SCANS_URL = '/survey_scans/' # defunct, removed.
sys.path.append(str(REPOS_ROOT_PATH))
sys.path.append(str(PYTHON_PATH))
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
# TEST_RUNNER = "django.test.runner.DiscoverRunner"
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# Run this in a terminal : 'bash os-survey.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 24.04 relatively clean install.
# 24.04 has python 3.12
echo '###'
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
echo '###'
sudo apt install tunnelx therion -y
sudo apt install survex-aven -y
sudo apt install gpsprune qgis -y
cd ~/expo
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,92 +0,0 @@
#!/bin/bash
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 24.04 relatively clean install.
# 24.04 has python 3.12
# sudo apt install python-is-python3 -y
python --version : ensure python is an alias for python3 not python2.7
ssh -V
sudo apt update -y
sudo apt dist-upgrade -y
sudo apt autoremove -y
# Already in Ubuntu 24.04 on WSL:
# sudo apt install git -y
# sudo apt install wget gpg
# sudo apt install sftp -y
# sudo apt install openssh-client -y
# sudo apt install rsync
# Now using uv not pip:
# sudo apt install python3-pip -y
sudo apt install sqlite3 -y
sudo apt install gedit -y
sudo apt install tig gitg meld -y
# python formatting https://docs.astral.sh/ruff/
sudo snap install ruff
# # do not actually use this any more
# sudo useradd expo
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
# as debian does not install everything that ubuntu does, you need:
sudo apt install python3-venv -y
sudo apt install python3-dev -y
# sudo apt install python3-distutils -y
# install uv
curl -LsSf https://astral.sh/uv/install.sh | sh
sudo apt install mariadb-server -y
sudo apt install libmariadb-dev -y
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
# sudo service mysql start
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
# we leave that to uv to install now.
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
# sudo apt install software-properties-common apt-transport-https
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
# sudo apt update
# sudo apt install code
mkdir ~/expo
cd ~/expo
echo '###'
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
echo '### because you can't clone the repos without a key
git config --global user.email "philip.sargent@gmail.com"
git config --global user.name "Philip Sargent"
git config --global pull.rebase true
#Change this to clone using https?? at least for troggle?
git clone ssh://expo@expo.survex.com/home/expo/troggle
git clone ssh://expo@expo.survex.com/home/expo/loser
git clone ssh://expo@expo.survex.com/home/expo/expoweb
git clone ssh://expo@expo.survex.com/home/expo/drawings
mkdir expofiles
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,63 +0,0 @@
#! /bin/bash
# create and sanitise files for pushing to repo
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
# Philip Sargent 2022/04/12
HOSTNAME=`hostname`
echo "** This copies file to _deploy/${HOSTNAME}/ !"
cd ..
cd troggle
echo `pwd`
echo deprecations.
PYTHON="uv run"
source .venv/bin/activate
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
deactivate
echo diffsettings.
rm diffsettings.txt
if test -f "diffsettings.txt"; then
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
exit
fi
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
echo inspectdb.
# this next line requires database setting to be troggle.sqlite:
$PYTHON manage.py inspectdb > troggle-inspectdb.py
#egrep -in "unable|error" troggle-inspectdb.py
echo remove passwords.
cp localsettings.py localsettings-${HOSTNAME}.py
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
mkdir -p _deploy/${HOSTNAME}
mv _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py.bak
mv localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}
cp *.sh _deploy/${HOSTNAME}
#
# Do these before final testing, *not* just before pushing:
# in ./pre-run.sh
# $PYTHON reset-django.py
# $PYTHON manage.py makemigrations
# $PYTHON manage.py test
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
# egrep -i "unable|error" troggle-inspectdb.py

View File

@@ -1,36 +0,0 @@
#! /bin/bash
# Do these before final testing, *not* just before pushing:
# Changed to use uv not pip, requires manage.py to have uv structured uv comment in it.
PYTHON="uv run"
echo "** Run inspectdb:"
$PYTHON manage.py inspectdb > troggle-inspectdb.py
# egrep -in "unable|error" troggle-inspectdb.py
echo ""
# count non-blank lines of python and template HTML code
# includes all variants of settings.py files
# fix this as core/utils.py has 28,000 lines of numbers.
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
echo "** Run reset-django.py - which deletes the database"
# This deletes the database so must run after generating troggle-inspectdb.py
$PYTHON reset-django.py
echo "** After cleanup deletion, remake all migrations."
$PYTHON manage.py makemigrations >/dev/null
$PYTHON manage.py migrate
echo "** Now running self check"
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
$PYTHON manage.py check -v 3 --deploy
echo "** Now running test suite"
# $PYTHON manage.py test -v 1
echo ""
echo `tail -1 lines-of-python.txt` non-comment lines of python.
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
echo '** If you have an error running manage.py, maybe you are not in an activated venv ? or your manage.py is not managed by uv properly ?'

View File

@@ -1,53 +0,0 @@
#!/bin/bash
# now using uv, unbelieveably simpler.
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
# Expects an Ubuntu 24.04 with all the gubbins already installed
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
# use the script os-trog24.04.sh runniing it in /home/username/
python3 --version
cd ~/expo/troggle
echo "-- EXPO folder [current directory]: `pwd`"
TROGDIR=$(cd $(dirname $0) && pwd)
echo "-- Troggle folder: ${TROGDIR}"
cp dev.toml pyproject.toml
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
uv self update
uv sync
# fudge for philip's laptop prior to M2 SSD upgrade
if [ ! -d /mnt/d/EXPO ]; then
sudo mkdir /mnt/d
sudo mount -t drvfs D: /mnt/d
fi
uv pip list
echo "Django version:`uv run django-admin --version`"
echo "### Now do
'[sudo service mysql start]'
'[sudo service mariadb restart]'
'[sudo mysql_secure_installation]'
'cd ~/expo/troggle'
'uv run django-admin'
'uv run manage.py check'
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
## you need to follow the Linux instructions.
'ssh expo@expo.survex.com'
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
'uv run manage.py test -v 2'
'./pre-run.sh' (runs the tests again)
'uv run databaseReset.py reset INIT'
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
"
# if [ ! -d /mnt/d/expofiles ]; then
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
# fi

View File

@@ -1,160 +0,0 @@
import os
import sys
import urllib.parse
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
NOTE this file is vastly out of sync with troggle/_deploy/wsl/localsettings.py
which is the most recent version used in active maintenance. There should be
essential differences, but there and many, many non-essential differences which
should be eliminated for clarity and to use modern idioms. 8 March 2023.
"""
print(" * importing troggle/localsettings.py")
# DO NOT check this file into the git repo - it contains real passwords.
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME' : 'troggle', # Or path to database file if using sqlite3.
'USER' : 'expo', # Not used with sqlite3.
'PASSWORD' : '123456789012345', # Not used with sqlite3. Not a real password.
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
}
}
EXPOUSER = 'expo'
EXPOUSERPASS = 'Not a real password'
EXPOADMINUSER = 'expoadmin'
EXPOADMINUSERPASS = 'Not a real password'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
REPOS_ROOT_PATH = '/home/expo/'
sys.path.append(REPOS_ROOT_PATH)
sys.path.append(REPOS_ROOT_PATH + 'troggle')
# Define the path to the django app (troggle in this case)
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
PHOTOS_YEAR = "2023"
# add in 358 when they don't make it crash horribly
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
PYTHON_PATH + "templates"
],
'OPTIONS': {
'debug': 'DEBUG',
'context_processors': [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
'core.context.troggle_context', # in core/troggle.py
'django.template.context_processors.debug',
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
'django.template.context_processors.i18n',
'django.template.context_processors.media', # includes a variable MEDIA_URL
'django.template.context_processors.static', # includes a variable STATIC_URL
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
# insert your own TEMPLATE_LOADERS here
]
},
},
]
PUBLIC_SITE = True
# This should be False for normal running
DEBUG = False
CACHEDPAGES = True # experimental page cache for a handful of page types
# executables:
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
EXPOWEB = Path(REPOS_ROOT_PATH + 'expoweb/')
SURVEYS = REPOS_ROOT_PATH
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
FILES = REPOS_ROOT_PATH + 'expofiles'
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
MEDIA_ROOT = TROGGLE_PATH / 'media'
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
#URL_ROOT = 'http://expo.survex.com/'
URL_ROOT = '/'
DIR_ROOT = Path("") #this should end in / if a value is given
EXPOWEB_URL = '/'
SURVEYS_URL = '/survey_scans/'
REPOS_ROOT_PATH = Path(REPOS_ROOT_PATH)
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos"
#EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
MEDIA_URL = '/site_media/'
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
LOGFILE = '/var/log/troggle/troggle.log'
IMPORTLOGFILE = '/var/log/troggle/import.log'
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,164 +0,0 @@
import os
import sys
import urllib.parse
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
# DO NOT check this file into the git repo - it contains real passwords.
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME' : 'troggle', # Or path to database file if using sqlite3.
'USER' : 'expo', # Not used with sqlite3.
'PASSWORD' : '123456789012345', # Not used with sqlite3.Not the real password
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
}
}
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
SECRET_KEY = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"
EXPOUSERPASS = "nope"
EXPOADMINUSERPASS = "nope"
EMAIL_HOST_PASSWORD = "nope"
REPOS_ROOT_PATH = '/home/expo/'
sys.path.append(REPOS_ROOT_PATH)
sys.path.append(REPOS_ROOT_PATH + 'troggle')
# Define the path to the django app (troggle in this case)
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
PHOTOS_YEAR = "2022"
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
PYTHON_PATH + "templates"
],
'OPTIONS': {
'debug': 'DEBUG',
'context_processors': [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
'core.context.troggle_context', # in core/troggle.py
'django.template.context_processors.debug',
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
'django.template.context_processors.i18n',
'django.template.context_processors.media', # includes a variable MEDIA_URL
'django.template.context_processors.static', # includes a variable STATIC_URL
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
# insert your own TEMPLATE_LOADERS here
]
},
},
]
PUBLIC_SITE = True
# This should be False for normal running
DEBUG = True
CACHEDPAGES = True # experimental page cache for a handful of page types
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
DRAWINGS_DATA = REPOS_ROOT_PATH + 'drawings/'
# executables:
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
EXPOWEB = REPOS_ROOT_PATH + 'expoweb/'
#SURVEYS = REPOS_ROOT_PATH
SCANS_ROOT = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
FILES = REPOS_ROOT_PATH + 'expofiles'
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
MEDIA_ROOT = TROGGLE_PATH / 'media'
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
CAVEDESCRIPTIONS = os.path.join(EXPOWEB, "cave_data")
ENTRANCEDESCRIPTIONS = os.path.join(EXPOWEB, "entrance_data")
# CACHEDIR = REPOS_ROOT_PATH + 'expowebcache/'
# THREEDCACHEDIR = CACHEDIR + '3d/'
# THUMBNAILCACHE = CACHEDIR + 'thumbs'
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
#Note that all these *_URL constants are not actually used in urls.py, they should be..
#URL_ROOT = 'http://expo.survex.com/'
URL_ROOT = '/'
DIR_ROOT = ''#this should end in / if a value is given
EXPOWEB_URL = '/'
SCANS_URL = '/survey_scans/'
EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
MEDIA_URL = '/site_media/'
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
LOGFILE = '/var/log/troggle/troggle.log'
IMPORTLOGFILE = '/var/log/troggle/import.log'
# add in 358 when they don't make it crash horribly
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
# Sanitise these to be strings as all other code is expecting strings
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
LOGFILE = os.fspath(LOGFILE)
#SURVEYS = os.fspath(SURVEYS)
EXPOWEB = os.fspath(EXPOWEB)
DRAWINGS_DATA = os.fspath(DRAWINGS_DATA)
SURVEX_DATA = os.fspath(SURVEX_DATA)
REPOS_ROOT_PATH = os.fspath(REPOS_ROOT_PATH)
TEMPLATE_PATH = os.fspath(TROGGLE_PATH)
MEDIA_ROOT = os.fspath(MEDIA_ROOT)
JSLIB_ROOT = os.fspath(JSLIB_ROOT)
SCANS_ROOT = os.fspath(SCANS_ROOT)
LIBDIR = os.fspath(LIBDIR)
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,70 +0,0 @@
#!/bin/bash
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in the troggle directory: "bash venv-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 22.04 relatively clean install.
sudo apt install python-is-python3 -y
python --version : ensure python is an alias for python3 not python2.7
sudo apt update -y
sudo apt dist-upgrade -y
sudo apt autoremove -y
sudo apt install sqlite3 -y
sudo apt install python3-pip -y
# this installs a shed-load of other stuff: binutils etc.sudo apt install survex-aven
sudo apt install git openssh-client -y
# On a clean debian 11 (bullseye) installation with Xfce & ssh,
#on ubuntu 20.04:
#Package sftp is not available, but is referred to by another package.
#This may mean that the package is missing, has been obsoleted, or
#is only available from another source
#E: Package 'sftp' has no installation candidate
# On Ubuntu 20.04, with python10, the pip install fails.
# So you need to get the pip from source
# sudo curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
# but really you should be using 22.04
# and also, isf using debian,
# sudo python3.10 -m pip install -U virtualenv
# as debian does not install everything that ubuntu does, you need:
sudo useradd expo
sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
sudo apt install python3-venv -y
sudo apt install python3-dev -y
# default since 22.04
# sudo apt install python3.10
sudo apt install python3.11-venv -y
sudo apt install python3.11-dev -y
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1
sudo apt install mariadb-server -y
sudo apt install libmariadb-dev -y
sudo python -m pip install --upgrade pip
sudo apt install sftp -y
echo '###'
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
echo '###'
sudo apt install tunnelx therion -y
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
# sudo service mysql start
git config --global user.email "you@example.com"
git config --global user.name "Your Name"
echo '###'
echo '### Currently set version of python'
python --version
echo '###'
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'

View File

@@ -1,147 +0,0 @@
"""
Django settings for troggle project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Imports should be grouped in the following order:
# 1.Standard library imports.
# 2.Related third party imports.
# 3.Local application/library specific imports.
# 4.You should put a blank line between each group of imports.
print("* importing troggle/settings.py")
# default value, then gets overwritten by real secrets
SECRET_KEY = "not-the-real-secret-key-a#vaeozn0---^fj!355qki*vj2"
GIT = "git" # command for running git
# Note that this builds upon the django system installed
# global settings in
# django/conf/global_settings.py which is automatically loaded first.
# read https://docs.djangoproject.com/en/dev/topics/settings/
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
# BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Django settings for troggle project.
ALLOWED_HOSTS = ["*", "expo.survex.com", ".survex.com", "localhost", "127.0.0.1", "192.168.0.5"]
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
# LOGIN_URL = '/accounts/login/' # this is the default value so does not need to be set
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
USE_TZ = True
TIME_ZONE = "Europe/London"
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en-uk"
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
USE_L10N = True
FIX_PERMISSIONS = []
# top-level survex file basename (without .svx)
SURVEX_TOPNAME = "1623-and-1626-no-schoenberg-hs"
# Caves for which survex files exist, but are not otherwise registered
# replaced (?) by expoweb/cave_data/pendingcaves.txt
# PENDING = ["1626-361", "2007-06", "2009-02",
# "2012-ns-01", "2012-ns-02", "2010-04", "2012-ns-05", "2012-ns-06",
# "2012-ns-07", "2012-ns-08", "2012-ns-12", "2012-ns-14", "2012-ns-15", "2014-bl888",
# "2018-pf-01", "2018-pf-02"]
APPEND_SLASH = (
False # never relevant because we have urls that match unknown files and produce an 'edit this page' response
)
SMART_APPEND_SLASH = True # not eorking as middleware different after Dj2.0
LOGIN_REDIRECT_URL = "/" # does not seem to have any effect
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
# SESSION_COOKIE_SECURE = True # if enabled, cannot login to Django control panel, bug elsewhere?
# CSRF_COOKIE_SECURE = True # if enabled only sends cookies over SSL
X_FRAME_OPTIONS = "DENY" # changed to "DENY" after I eliminated all the iframes e.g. /xmlvalid.html
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" # from Django 3.2
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth", # includes the url redirections for login, logout
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.admindocs",
"django.forms", # Required to customise widget templates
# 'django.contrib.staticfiles', # We put our CSS etc explicitly in the right place so do not need this
"troggle.core",
)
FORM_RENDERER = "django.forms.renderers.TemplatesSetting" # Required to customise widget templates
# See the recommended order of these in https://docs.djangoproject.com/en/dev/ref/middleware/
# Note that this is a radically different onion architecture from earlier versions though it looks the same,
# see https://docs.djangoproject.com/en/dev/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
# Seriously, read this: https://www.webforefront.com/django/middlewaredjango.html which is MUCH BETTER than the docs
MIDDLEWARE = [
#'django.middleware.security.SecurityMiddleware', # SECURE_SSL_REDIRECT and SECURE_SSL_HOST # we don't use this
"django.middleware.gzip.GZipMiddleware", # not needed when expofiles and photos served by apache
"django.contrib.sessions.middleware.SessionMiddleware", # Manages sessions, if CSRF_USE_SESSIONS then it needs to be early
"django.middleware.common.CommonMiddleware", # DISALLOWED_USER_AGENTS, APPEND_SLASH and PREPEND_WWW
"django.middleware.csrf.CsrfViewMiddleware", # Cross Site Request Forgeries by adding hidden form fields to POST
"django.contrib.auth.middleware.AuthenticationMiddleware", # Adds the user attribute, representing the currently-logged-in user
"django.contrib.admindocs.middleware.XViewMiddleware", # this and docutils needed by admindocs
"django.contrib.messages.middleware.MessageMiddleware", # Cookie-based and session-based message support. Needed by admin system
"django.middleware.clickjacking.XFrameOptionsMiddleware", # clickjacking protection via the X-Frame-Options header
#'django.middleware.security.SecurityMiddleware', # SECURE_HSTS_SECONDS, SECURE_CONTENT_TYPE_NOSNIFF, SECURE_BROWSER_XSS_FILTER, SECURE_REFERRER_POLICY, and SECURE_SSL_REDIRECT
#'troggle.core.middleware.SmartAppendSlashMiddleware' # needs adapting after Dj2.0
]
ROOT_URLCONF = "troggle.urls"
WSGI_APPLICATION = "troggle.wsgi.application" # change to asgi as soon as we upgrade to Django 3.0
ACCOUNT_ACTIVATION_DAYS = 3
# AUTH_PROFILE_MODULE = 'core.person' # used by removed profiles app ?
QM_PATTERN = "\[\[\s*[Qq][Mm]:([ABC]?)(\d{4})-(\d*)-(\d*)\]\]"
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
TEST_RUNNER = "django.test.runner.DiscoverRunner"
from localsettings import *
# localsettings needs to take precedence. Call it to override any existing vars.

View File

@@ -1,147 +0,0 @@
"""
Django settings for troggle project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Imports should be grouped in the following order:
# 1.Standard library imports.
# 2.Related third party imports.
# 3.Local application/library specific imports.
# 4.You should put a blank line between each group of imports.
print("* importing troggle/settings.py")
# default value, then gets overwritten by real secrets
SECRET_KEY = "not-the-real-secret-key-a#vaeozn0---^fj!355qki*vj2"
GIT = "git" # command for running git
# Note that this builds upon the django system installed
# global settings in
# django/conf/global_settings.py which is automatically loaded first.
# read https://docs.djangoproject.com/en/dev/topics/settings/
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
# BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Django settings for troggle project.
ALLOWED_HOSTS = ["*", "expo.survex.com", ".survex.com", "localhost", "127.0.0.1", "192.168.0.5"]
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
# LOGIN_URL = '/accounts/login/' # this is the default value so does not need to be set
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
USE_TZ = True
TIME_ZONE = "Europe/London"
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en-uk"
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
USE_L10N = True
FIX_PERMISSIONS = []
# top-level survex file basename (without .svx)
SURVEX_TOPNAME = "1623-and-1626-no-schoenberg-hs"
# Caves for which survex files exist, but are not otherwise registered
# replaced (?) by expoweb/cave_data/pendingcaves.txt
# PENDING = ["1626-361", "2007-06", "2009-02",
# "2012-ns-01", "2012-ns-02", "2010-04", "2012-ns-05", "2012-ns-06",
# "2012-ns-07", "2012-ns-08", "2012-ns-12", "2012-ns-14", "2012-ns-15", "2014-bl888",
# "2018-pf-01", "2018-pf-02"]
APPEND_SLASH = (
False # never relevant because we have urls that match unknown files and produce an 'edit this page' response
)
SMART_APPEND_SLASH = True # not eorking as middleware different after Dj2.0
LOGIN_REDIRECT_URL = "/" # does not seem to have any effect
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
# SESSION_COOKIE_SECURE = True # if enabled, cannot login to Django control panel, bug elsewhere?
# CSRF_COOKIE_SECURE = True # if enabled only sends cookies over SSL
X_FRAME_OPTIONS = "DENY" # changed to "DENY" after I eliminated all the iframes e.g. /xmlvalid.html
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" # from Django 3.2
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth", # includes the url redirections for login, logout
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.admindocs",
"django.forms", # Required to customise widget templates
# 'django.contrib.staticfiles', # We put our CSS etc explicitly in the right place so do not need this
"troggle.core",
)
FORM_RENDERER = "django.forms.renderers.TemplatesSetting" # Required to customise widget templates
# See the recommended order of these in https://docs.djangoproject.com/en/dev/ref/middleware/
# Note that this is a radically different onion architecture from earlier versions though it looks the same,
# see https://docs.djangoproject.com/en/dev/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
# Seriously, read this: https://www.webforefront.com/django/middlewaredjango.html which is MUCH BETTER than the docs
MIDDLEWARE = [
#'django.middleware.security.SecurityMiddleware', # SECURE_SSL_REDIRECT and SECURE_SSL_HOST # we don't use this
"django.middleware.gzip.GZipMiddleware", # not needed when expofiles and photos served by apache
"django.contrib.sessions.middleware.SessionMiddleware", # Manages sessions, if CSRF_USE_SESSIONS then it needs to be early
"django.middleware.common.CommonMiddleware", # DISALLOWED_USER_AGENTS, APPEND_SLASH and PREPEND_WWW
"django.middleware.csrf.CsrfViewMiddleware", # Cross Site Request Forgeries by adding hidden form fields to POST
"django.contrib.auth.middleware.AuthenticationMiddleware", # Adds the user attribute, representing the currently-logged-in user
"django.contrib.admindocs.middleware.XViewMiddleware", # this and docutils needed by admindocs
"django.contrib.messages.middleware.MessageMiddleware", # Cookie-based and session-based message support. Needed by admin system
"django.middleware.clickjacking.XFrameOptionsMiddleware", # clickjacking protection via the X-Frame-Options header
#'django.middleware.security.SecurityMiddleware', # SECURE_HSTS_SECONDS, SECURE_CONTENT_TYPE_NOSNIFF, SECURE_BROWSER_XSS_FILTER, SECURE_REFERRER_POLICY, and SECURE_SSL_REDIRECT
#'troggle.core.middleware.SmartAppendSlashMiddleware' # needs adapting after Dj2.0
]
ROOT_URLCONF = "troggle.urls"
WSGI_APPLICATION = "troggle.wsgi.application" # change to asgi as soon as we upgrade to Django 3.0
ACCOUNT_ACTIVATION_DAYS = 3
# AUTH_PROFILE_MODULE = 'core.person' # used by removed profiles app ?
QM_PATTERN = "\[\[\s*[Qq][Mm]:([ABC]?)(\d{4})-(\d*)-(\d*)\]\]"
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
TEST_RUNNER = "django.test.runner.DiscoverRunner"
from localsettings import *
# localsettings needs to take precedence. Call it to override any existing vars.

View File

@@ -1,173 +0,0 @@
#!/bin/bash
# Crowley has python 3.9.2
# Taken from: footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
# Run this in a terminal in the troggle directory: 'bash venv-trog-crowley.sh'
echo '-- DONT RUN THIS - messes up permissions!'
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog-crowley.sh"'
# use the script os-trog-crowley.sh
# If you are using Debian, then stick with the default version of python
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
# NOW we set up troggle
PYTHON=python3.9
VENAME=p9d4 # python3.x and django 4
echo "** You are logged in as `id -u -n`"
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
TROGDIR=$(cd $(dirname $0) && pwd)
echo "-- Troggle folder (this script location): ${TROGDIR}"
if [ -d requirements.txt ]; then
echo "-- No requirements.txt found. Copy it from your most recent installation."
exit 1
fi
echo ## Using requirements.txt :
cat requirements.txt
echo ##
$PYTHON --version
# NOTE that when using a later or earlier verison of python, you MUST also
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
# NOW set up link from expo user folder
# needed for WSL2
echo Creating links from Linux filesystem user
# These links only need making once, for many venv
cd ~
if [ ! -d $VENAME ]; then
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
$PYTHON -m venv $VENAME
else
echo "## /$VENAME/ already exists ! Delete it first."
exit 1
fi
# Activate the virtual env and see what the default packages are
echo "### Activating $VENAME"
cd $VENAME
echo "-- now in: ${PWD}"
source bin/activate
echo "### Activated."
# update local version of pip, more recent than OS version
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
# update local version of setuptools, more recent than OS version, needed for packages without wheels
echo "### installing later version of pip inside $VENAME"
$PYTHON -m pip install --upgrade pip
$PYTHON -m pip install --upgrade setuptools
PIP=pip
$PIP list > original-pip.list
$PIP freeze >original.txt
# we are in /home/$USER/$VENAME/
ln -s ${TROGDIR} troggle
ln -s ${TROGDIR}/../expoweb expoweb
ln -s ${TROGDIR}/../loser loser
ln -s ${TROGDIR}/../drawings drawings
# fudge for philip's machine
if [ -d ${TROGDIR}/../expofiles ]; then
ln -s ${TROGDIR}/../expofiles expofiles
else
if [ ! -d /mnt/f/expofiles ]; then
sudo mkdir /mnt/f
sudo mount -t drvfs F: /mnt/f
else
ln -s /mnt/f/expofiles expofiles
fi
fi
echo "### Setting file permissions.. may take a while.."
git config --global --add safe.directory '*'
#sudo chmod -R 0777 *
echo "### links to expoweb, troggle etc. complete:"
ls -tla
echo "###"
echo "### now installing ${TROGDIR}/requirements.txt"
echo "###"
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
# seen on wsl2 as well as wsl1
# which ALSO ruins EXISTING permissions !
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
$PIP install -r ${TROGDIR}/requirements.txt
echo '### install from requirements.txt completed.'
echo '### '
$PIP freeze > requirements.txt
# so that we can track requirements more easily with git
# because we do not install these with pip, but they are listed by the freeze command
# Now find out what we actually installed by subtracting the stuff venv installed anyway
sort original.txt > 1
sort requirements.txt >2
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-requirements.txt
rm 1
rm 2
cp requirements.txt requirements-$VENAME.txt
cp requirements-$VENAME.txt troggle/requirements-$VENAME.txt
$PIP list > installed-pip.list
$PIP list -o > installed-pip-o.list
REQ=installation-record
mkdir $REQ
mv requirements-$VENAME.txt $REQ
mv original.txt $REQ
mv requirements.txt $REQ
mv original-pip.list $REQ
mv installed-pip.list $REQ
mv installed-pip-o.list $REQ
cp fresh-requirements.txt ../requirements.txt
mv fresh-requirements.txt $REQ
cp troggle/`basename "$0"` $REQ
$PYTHON --version
python --version
echo "Django version:`django-admin --version`"
echo "### Now do
'[sudo service mysql start]'
'[sudo service mariadb restart]'
'[sudo mysql_secure_installation]'
'cd ~/$VENAME'
'source bin/activate'
'cd troggle'
'django-admin'
'python manage.py check'
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
## you need to follow the Linux instructions.
'ssh expo@expo.survex.com'
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
## the tests may ALSO fail because of ssh and permissions errors
# Ran 85 tests in 83.492s
# FAILED (failures=5)
## So you will need to run
#$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
# because this chmod only takes effect then.
'./pre-run.sh' (runs the migrations and then the tests)
'python databaseReset.py reset $VENAME'
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
"
if [ ! -d /mnt/f/expofiles ]; then
echo '### No valid expofiles directory. Fix this before any tests will work.
fi

View File

@@ -1,227 +0,0 @@
# This is the main Apache server configuration file. It contains the
# configuration directives that give the server its instructions.
# See http://httpd.apache.org/docs/2.4/ for detailed information about
# the directives and /usr/share/doc/apache2/README.Debian about Debian specific
# hints.
#
#
# Summary of how the Apache 2 configuration works in Debian:
# The Apache 2 web server configuration in Debian is quite different to
# upstream's suggested way to configure the web server. This is because Debian's
# default Apache2 installation attempts to make adding and removing modules,
# virtual hosts, and extra configuration directives as flexible as possible, in
# order to make automating the changes and administering the server as easy as
# possible.
# It is split into several files forming the configuration hierarchy outlined
# below, all located in the /etc/apache2/ directory:
#
# /etc/apache2/
# |-- apache2.conf
# | `-- ports.conf
# |-- mods-enabled
# | |-- *.load
# | `-- *.conf
# |-- conf-enabled
# | `-- *.conf
# `-- sites-enabled
# `-- *.conf
#
#
# * apache2.conf is the main configuration file (this file). It puts the pieces
# together by including all remaining configuration files when starting up the
# web server.
#
# * ports.conf is always included from the main configuration file. It is
# supposed to determine listening ports for incoming connections which can be
# customized anytime.
#
# * Configuration files in the mods-enabled/, conf-enabled/ and sites-enabled/
# directories contain particular configuration snippets which manage modules,
# global configuration fragments, or virtual host configurations,
# respectively.
#
# They are activated by symlinking available configuration files from their
# respective *-available/ counterparts. These should be managed by using our
# helpers a2enmod/a2dismod, a2ensite/a2dissite and a2enconf/a2disconf. See
# their respective man pages for detailed information.
#
# * The binary is called apache2. Due to the use of environment variables, in
# the default configuration, apache2 needs to be started/stopped with
# /etc/init.d/apache2 or apache2ctl. Calling /usr/bin/apache2 directly will not
# work with the default configuration.
# Global configuration
#
#
# ServerRoot: The top of the directory tree under which the server's
# configuration, error, and log files are kept.
#
# NOTE! If you intend to place this on an NFS (or otherwise network)
# mounted filesystem then please read the Mutex documentation (available
# at <URL:http://httpd.apache.org/docs/2.4/mod/core.html#mutex>);
# you will save yourself a lot of trouble.
#
# Do NOT add a slash at the end of the directory path.
#
#ServerRoot "/etc/apache2"
#
# The accept serialization lock file MUST BE STORED ON A LOCAL DISK.
#
#Mutex file:${APACHE_LOCK_DIR} default
#
# The directory where shm and other runtime files will be stored.
#
DefaultRuntimeDir ${APACHE_RUN_DIR}
#
# PidFile: The file in which the server should record its process
# identification number when it starts.
# This needs to be set in /etc/apache2/envvars
#
PidFile ${APACHE_PID_FILE}
#
# Timeout: The number of seconds before receives and sends time out.
#
Timeout 300
#
# KeepAlive: Whether or not to allow persistent connections (more than
# one request per connection). Set to "Off" to deactivate.
#
KeepAlive On
#
# MaxKeepAliveRequests: The maximum number of requests to allow
# during a persistent connection. Set to 0 to allow an unlimited amount.
# We recommend you leave this number high, for maximum performance.
#
MaxKeepAliveRequests 100
#
# KeepAliveTimeout: Number of seconds to wait for the next request from the
# same client on the same connection.
#
KeepAliveTimeout 5
# These need to be set in /etc/apache2/envvars
User ${APACHE_RUN_USER}
Group ${APACHE_RUN_GROUP}
#
# HostnameLookups: Log the names of clients or just their IP addresses
# e.g., www.apache.org (on) or 204.62.129.132 (off).
# The default is off because it'd be overall better for the net if people
# had to knowingly turn this feature on, since enabling it means that
# each client request will result in AT LEAST one lookup request to the
# nameserver.
#
HostnameLookups Off
# ErrorLog: The location of the error log file.
# If you do not specify an ErrorLog directive within a <VirtualHost>
# container, error messages relating to that virtual host will be
# logged here. If you *do* define an error logfile for a <VirtualHost>
# container, that host's errors will be logged there and not here.
#
ErrorLog ${APACHE_LOG_DIR}/error.log
#
# LogLevel: Control the severity of messages logged to the error_log.
# Available values: trace8, ..., trace1, debug, info, notice, warn,
# error, crit, alert, emerg.
# It is also possible to configure the log level for particular modules, e.g.
# "LogLevel info ssl:warn"
#
LogLevel warn
# Include module configuration:
IncludeOptional mods-enabled/*.load
IncludeOptional mods-enabled/*.conf
# Include list of ports to listen on
Include ports.conf
# Sets the default security model of the Apache2 HTTPD server. It does
# not allow access to the root filesystem outside of /usr/share and /var/www.
# The former is used by web applications packaged in Debian,
# the latter may be used for local directories served by the web server. If
# your system is serving content from a sub-directory in /srv you must allow
# access here, or in any related virtual host.
<Directory />
Options FollowSymLinks
AllowOverride None
Require all denied
</Directory>
<Directory /usr/share>
AllowOverride None
Require all granted
</Directory>
<Directory /var/www/>
Options Indexes FollowSymLinks
AllowOverride None
Require all granted
</Directory>
#<Directory /srv/>
# Options Indexes FollowSymLinks
# AllowOverride None
# Require all granted
#</Directory>
# AccessFileName: The name of the file to look for in each directory
# for additional configuration directives. See also the AllowOverride
# directive.
#
AccessFileName .htaccess
#
# The following lines prevent .htaccess and .htpasswd files from being
# viewed by Web clients.
#
<FilesMatch "^\.ht">
Require all denied
</FilesMatch>
#
# The following directives define some format nicknames for use with
# a CustomLog directive.
#
# These deviate from the Common Log Format definitions in that they use %O
# (the actual bytes sent including headers) instead of %b (the size of the
# requested file), because the latter makes it impossible to detect partial
# requests.
#
# Note that the use of %{X-Forwarded-For}i instead of %h is not recommended.
# Use mod_remoteip instead.
#
LogFormat "%v:%p %h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\"" vhost_combined
LogFormat "%h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\"" combined
LogFormat "%h %l %u %t \"%r\" %>s %O" common
LogFormat "%{Referer}i -> %U" referer
LogFormat "%{User-agent}i" agent
# Include of directories ignores editors' and dpkg's backup files,
# see README.Debian for details.
# Include generic snippets of statements
IncludeOptional conf-enabled/*.conf
# Include the virtual host configurations:
IncludeOptional sites-enabled/*.conf
# vim: syntax=apache ts=4 sw=4 sts=4 sr noet

View File

@@ -1,22 +0,0 @@
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
from secret_credentials import *
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4", # To permit emojis in logbook entries and elsewhere
}, 'NAME' : 'troggle', # Or path to database file if using sqlite3.
'USER' : 'expo', # Not used with sqlite3.
'PASSWORD' : MARIADB_SERVER_PASSWORD, # Not used with sqlite3.
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
}
}

View File

@@ -1,47 +0,0 @@
# envvars - default environment variables for apache2ctl
# this won't be correct after changing uid
unset HOME
# for supporting multiple apache2 instances
if [ "${APACHE_CONFDIR##/etc/apache2-}" != "${APACHE_CONFDIR}" ] ; then
SUFFIX="-${APACHE_CONFDIR##/etc/apache2-}"
else
SUFFIX=
fi
# Since there is no sane way to get the parsed apache2 config in scripts, some
# settings are defined via environment variables and then used in apache2ctl,
# /etc/init.d/apache2, /etc/logrotate.d/apache2, etc.
export APACHE_RUN_USER=expo
export APACHE_RUN_GROUP=expo
# temporary state file location. This might be changed to /run in Wheezy+1
export APACHE_PID_FILE=/var/run/apache2$SUFFIX/apache2.pid
export APACHE_RUN_DIR=/var/run/apache2$SUFFIX
export APACHE_LOCK_DIR=/var/lock/apache2$SUFFIX
# Only /var/log/apache2 is handled by /etc/logrotate.d/apache2.
export APACHE_LOG_DIR=/var/log/apache2$SUFFIX
## The locale used by some modules like mod_dav
#export LANG=C
## Uncomment the following line to use the system default locale instead:
. /etc/default/locale
export LANG
## The command to get the status for 'apache2ctl status'.
## Some packages providing 'www-browser' need '--dump' instead of '-dump'.
#export APACHE_LYNX='www-browser -dump'
## If you need a higher file descriptor limit, uncomment and adjust the
## following line (default is 8192):
#APACHE_ULIMIT_MAX_FILES='ulimit -n 65536'
## If you would like to pass arguments to the web server, add them below
## to the APACHE_ARGUMENTS environment.
#export APACHE_ARGUMENTS=''
## Enable the debug mode for maintainer scripts.
## This will produce a verbose output on package installations of web server modules and web application
## installations which interact with Apache
#export APACHE2_MAINTSCRIPT_DEBUG=1

View File

@@ -1,179 +0,0 @@
import sys
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
NOTE this file is out of sync with troggle/_deploy/wsl/localsettings.py
which is the most recent version used in active maintenance. There should be
essential differences, but there and many, many non-essential differences which
should be eliminated for clarity and to use modern idioms.
Edited 31/12/2024
"""
print(" * importing troggle/localsettings.py")
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
from secret_credentials import *
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = "django-test@klebos.net"
# -----------------------------------------------------------------
# THINK before you push this to a repo
# - have you checked that secret_credentials.py is in .gitignore ?
# - we don't want to have to change the expo system password !
# -----------------------------------------------------------------
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4", # To permit emojis in logbook entries and elsewhere
}, 'NAME' : 'troggle', # Or path to database file if using sqlite3.
'USER' : 'expo', # Not used with sqlite3.
'PASSWORD' : MARIADB_SERVER_PASSWORD, # Not used with sqlite3.
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
}
}
REPOS_ROOT_PATH = '/home/expo/'
sys.path.append(REPOS_ROOT_PATH)
sys.path.append(REPOS_ROOT_PATH + 'troggle')
# Define the path to the django app (troggle in this case)
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
PHOTOS_YEAR = "2024"
# add in 358 when they don't make it crash horribly
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
PYTHON_PATH + "templates"
],
'OPTIONS': {
'debug': 'DEBUG',
'context_processors': [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
'core.context.troggle_context', # in core/troggle.py
'django.template.context_processors.debug',
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
'django.template.context_processors.i18n',
'django.template.context_processors.media', # includes a variable MEDIA_URL
'django.template.context_processors.static', # includes a variable STATIC_URL
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
# insert your own TEMPLATE_LOADERS here
]
},
},
]
PUBLIC_SITE = True
# This should be False for normal running
DEBUG = True
CACHEDPAGES = True # experimental page cache for a handful of page types
# executables:
CAVERN = 'cavern' # for parsing .svx files and producing .2d files
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
EXPOWEB = Path(REPOS_ROOT_PATH + 'expoweb/')
SURVEYS = REPOS_ROOT_PATH
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
FILES = REPOS_ROOT_PATH + 'expofiles'
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
MEDIA_ROOT = TROGGLE_PATH / 'media'
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
#URL_ROOT = 'http://expo.survex.com/'
URL_ROOT = '/'
DIR_ROOT = Path("") #this should end in / if a value is given
EXPOWEB_URL = '/'
SURVEYS_URL = '/survey_scans/'
REPOS_ROOT_PATH = Path(REPOS_ROOT_PATH)
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos"
#EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
PHOTOS_URL = Path(URL_ROOT, "/photos/")
#PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
MEDIA_URL = '/site_media/'
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
LOGFILE = '/var/log/troggle/troggle.log' # hmm. Not used since 2022
IMPORTLOGFILE = '/var/log/troggle/import.log' # hmm. Not used since 2022
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,164 +0,0 @@
import os
import sys
import urllib.parse
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
NOTE this file is out of sync with troggle/_deploy/wsl/localsettings.py
which is the most recent version used in active maintenance. There should be
essential differences, but there and many, many non-essential differences which
should be eliminated for clarity and to use modern idioms.
Edited 31/12/2024
"""
print(" * importing troggle/localsettings.py")
# DO NOT check this file into the git repo - it contains real passwords.
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4", # To permit emojis in logbook entries and elsewhere
},
'NAME' : 'troggle', # Or path to database file if using sqlite3.
'USER' : 'expo', # Not used with sqlite3.
'PASSWORD' : 'not-the-real-password', # Not used with sqlite3.
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
}
}
EXPOUSER = 'expo'
EXPOUSERPASS = 'not-the-real-password'
EXPOADMINUSER = 'expoadmin'
EXPOADMINUSERPASS = 'not-the-real-password'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
REPOS_ROOT_PATH = '/home/expo/'
sys.path.append(REPOS_ROOT_PATH)
sys.path.append(REPOS_ROOT_PATH + 'troggle')
# Define the path to the django app (troggle in this case)
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
PHOTOS_YEAR = "2024"
# add in 358 when they don't make it crash horribly
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
PYTHON_PATH + "templates"
],
'OPTIONS': {
'debug': 'DEBUG',
'context_processors': [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
'core.context.troggle_context', # in core/troggle.py
'django.template.context_processors.debug',
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
'django.template.context_processors.i18n',
'django.template.context_processors.media', # includes a variable MEDIA_URL
'django.template.context_processors.static', # includes a variable STATIC_URL
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
# insert your own TEMPLATE_LOADERS here
]
},
},
]
PUBLIC_SITE = True
# This should be False for normal running
DEBUG = True
CACHEDPAGES = True # experimental page cache for a handful of page types
# executables:
CAVERN = 'cavern' # for parsing .svx files and producing .2d files
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
EXPOWEB = Path(REPOS_ROOT_PATH + 'expoweb/')
SURVEYS = REPOS_ROOT_PATH
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
FILES = REPOS_ROOT_PATH + 'expofiles'
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
MEDIA_ROOT = TROGGLE_PATH / 'media'
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
#URL_ROOT = 'http://expo.survex.com/'
URL_ROOT = '/'
DIR_ROOT = Path("") #this should end in / if a value is given
EXPOWEB_URL = '/'
SURVEYS_URL = '/survey_scans/'
REPOS_ROOT_PATH = Path(REPOS_ROOT_PATH)
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos"
#EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
MEDIA_URL = '/site_media/'
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
LOGFILE = '/var/log/troggle/troggle.log'
IMPORTLOGFILE = '/var/log/troggle/import.log'
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
#STATIC_URL = str(STATIC_URL) + "/"
#MEDIA_URL = str(MEDIA_URL) + "/"
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,190 +0,0 @@
Instructions for setting up new expo debian server/VM
W says: Tue, Apr 23, 2024
Javascript gets installed in /usr/share/javascript. You can find that out by asking dpkg: dpkg -S openlayers (or reading debian wiki for javascript packaging)
If you use npm it just puts packages 'here' (i.e. in a node_packages dir in the current directory). I've been avoiding that so far.
openlayers wasn't in the old prospecting map - that was just JPEGs. It was in the slippy map I never really got working properly. It's also in martin's map-app.
But they were just examples of javascript packages.
=======
See also http://expo.survex.com/handbook/troggle/serverconfig.html
and troggle/README.txt
For Debian Bullseye (Debian 11) June 2022
adduser expo
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
apt install python-django apache2 certbot mysql-server survex make rsync
apt install libjs-openlayers
apt install git mercurial
(certbot does https certs)
for boe:
apt install libcgi-session-perl libcrypt-passwdmd5-perl libfile-slurp-perl libgit-wrapper-perl libhtml-template-perl libhtml-template-pro-perl libmime-lite-perl libtext-password-pronounceable-perl libtime-parsedate-perl libuuid-tiny-perl libcrypt-cracklib-perl
setup apache configs for cucc and expo
#disable default website
a2dissite 000-default
a2ensite cucc-ssl
a2ensite expo-ssl
#a2enmod cgid
Boe config:
Alias /boe /home/expo/boe/boc/boc.pl
<Directory /home/expo/boe/boc>
AddHandler cgi-script .pl
SetHandler cgi-script
Options +ExecCGI
Require all granted
</Directory>
And remember to set both program and data dir to be
www-data:www-data
(optionally make file group read/write by treasurer account)
create empty repo by clicking create in boe interface
then set names in 'settings'
Set up mysql (as root)
mysql -p
CREATE DATABASE troggle;
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
install django:
sudo apt install python3-django python3-django-registration python3-django-imagekit python3-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi-py3
python-django-imagekit comes from https://salsa.debian.org/python-team/modules/python-django-imagekit
python-django-tinymce comes from https://salsa.debian.org/python-team/modules/python-django-tinymce
(both modified for stretch/python2). packages under /home/wookey/packages/
need fonts-freefont-ttf (to have truetype freesans available for troggle via PIL)
need libapache2-mod-wsgi for apache wsgi support.
To keep bots from overloading server use mpm_event intead of mpm_worker
sudo a2dismod mpm_prefork
sudo a2enmod mpm_event
Also adjust the numbers in the config file (~/config/apache/mods-available/mpm_event.conf)
for our tiddly server:
StartServers 1
MinSpareThreads 2
MaxSpareThreads 15
ThreadLimit 25
ThreadsPerChild 5
MaxRequestWorkers 25
MaxConnectionsPerChild 500
Kanboard:
debian python3-kanboard is a bit too simple, so use http://kanboard.org/ instead
unpack release files into /home/expo/kanboard
add this stanza to ~/config/apache/expo.conf
Alias /kanboard /home/expo/kanboard
<Directory /home/expo/kanboard>
AllowOverride All
Require all granted
</Directory>
Dependencies are php and php-bcmath
php with mpm_worker and cgi is simple, but we are not using
mpm_worker any more so this is not possible anyway.
php with mpm_event needs fpm mechanism and apache proxy_fcgi enabled
This mechanism is a lot more efficient on the server.
Good docs here: https://www.digitalocean.com/community/tutorials/how-to-configure-apache-http-with-mpm-event-and-php-fpm-on-ubuntu-18-04
apt install php-fpm libapache2-mod-fcgid
sudo a2dismod php7.4 (this normal config works via mpm_worker)
sudo a2enconf php7.4-fpm (this one works with mpm_event via proxy magic)
sudo a2enmod proxy
sudo a2enmod proxy_fcgi
apt install php-bcmath (for kanboard)
------------------------------
For Debian Stretch, June 2019.
-----------------------------
adduser expo
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
apt install python-django apache2 mysql-server survex make rsync
apt install libjs-openlayers make
apt install git mercurial mercurial-server?
for boe:
apt install libcgi-session-perl libcrypt-passwdmd5-perl libfile-slurp-perl libgit-wrapper-perl libhtml-template-perl libhtml-template-pro-perl libmime-lite-perl libtext-password-pronounceable-perl libtime-parsedate-perl libuuid-tiny-perl libcrypt-cracklib-perl
obsolete-packages:
bins (move to jigl?) (for photos)
python-django 1.7
backports: survex therion
not-packaged: caveview
make these dirs available at top documentroot:
cuccfiles
expofiles
loser (link to repo)
tunneldata (link to repo)
troggle (link to repo)
expoweb (link to repo)
boc/boe
config
containing:
setup apache configs for cucc and expo
#disable default website
a2dissite 000-default
a2ensite cucc
a2ensite expo
a2enmod cgid
Boe config:
Alias /boe /home/expo/boe/boc/boc.pl
<Directory /home/expo/boe/boc>
AddHandler cgi-script .pl
SetHandler cgi-script
Options +ExecCGI
Require all granted
</Directory>
And remember to set both program and data dir to be
www-data:www-data
(optionally make file group read/write by treasurer account)
create empty repo by clicking create in boe interface
then set names in 'settings'
Set up mysql (as root)
mysql -p
CREATE DATABASE troggle;
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
install django:
NO!
This was:sudo apt install python-django python-django-registration python-django-imagekit python-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi
Should be ?
sudo apt install python-django python-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi
CHeck if this is correct:
python-django-tinymce comes from https://salsa.debian.org/python-team/modules/python-django-tinymce
(both modified for stretch/python2). packages under /home/wookey/packages/
need fonts-freefont-ttf (to have truetype freesans available for troggle via PIL)
need libapache2-mod-wsgi for apache wsgi support.
On stretch the django 1.10 is no use so get rid of that:
apt remove python3-django python-django python-django-common python-django-doc
Then replace with django 1.7 (Needs to be built for stretch)
apt install python-django python-django-common python-django-doc
apt install python-django-registration python-django-imagekit python-django-tinymce
then hold them to stop them being upgraded by unattended upgrades:
echo "python-django hold" | sudo dpkg --set-selections
echo "python-django-common hold" | sudo dpkg --set-selections
echo "python-django-doc hold" | sudo dpkg --set-selections
#troggle has to have a writable logfile otherwise the website explodes
# 500 error on the server, and apache error log has non-rentrant errors
create /var/log/troggle/troggle.log
chown www-data:adm /var/log/troggle/troggle.log
chmod 660 /var/log/troggle/troggle.log

View File

@@ -1,7 +0,0 @@
# install the apport exception handler if available
try:
import apport_python_hook
except ImportError:
pass
else:
apport_python_hook.install()

View File

@@ -1 +0,0 @@
see .gitignore for those files which DO contain the secret passwords

View File

@@ -1,109 +0,0 @@
W says: Tue, Apr 23, 2024
Javascript gets installed in /usr/share/javascript. You can find that out by asking dpkg: dpkg -S openlayers (or reading debian wiki for javascript packaging)
If you use npm it just puts packages 'here' (i.e. in a node_packages dir in the current directory). I've been avoiding that so far.
openlayers wasn't in the old prospecting map - that was just JPEGs. It was in the slippy map I never really got working properly. It's also in martin's map-app.
But they were just examples of javascript packages.
adduser expo
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
apt install python-django apache2 mysql-server survex make rsync
apt install libjs-openlayers make
apt install git mercurial mercurial-server?
for boe:
apt install libcgi-session-perl libcrypt-passwdmd5-perl libfile-slurp-perl libgit-wrapper-perl libhtml-template-perl libhtml-template-pro-perl libmime-lite-perl libtext-password-pronounceable-perl libtime-parsedate-perl libuuid-tiny-perl libcrypt-cracklib-perl
apt install ufraw for PEF image decoding.
sudo apt install python-django python-django-registration e fonts-freefont-ttf libapache2-mod-wsgi python3-gdbm
# sudo apt install python-django-imagekit python-django-tinymc
obsolete-packages: bins (move to jigl?)
older python-django?
backports: survex therion
not-packaged: caveview
make these dirs available at top documentroot:
cuccfiles
expofiles
loser
tunneldata
troggle
expoweb
boc/boe
config
containing:
setup apache configs for cucc and expo
#disable default website
a2dissite 000-default
a2ensite cucc
a2ensite expo
a2enmod cgid
Boe config:
Alias /boe /home/expo/boe/boc/boc.pl
<Directory /home/expo/boe/boc>
AddHandler cgi-script .pl
SetHandler cgi-script
Options +ExecCGI
Require all granted
</Directory>
And remember to set both program and data dir to be
www-data:www-data
(optionally make file group read/write by treasurer account)
create empty repo by clicking create in boe interface
then set names in 'settings'
Set up mysql (as root)
mysql -p
CREATE DATABASE troggle;
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
Ctrl-D to exit
somepassword is set in localsettings.py
sudo service mariadb stop
sudo service mariadb start
to delete the database, it is
DROP DATABASE troggle;
install django:
sudo apt install python-django python-django-registration python-django-imagekit python-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi
python-django-imagekit comes from https://salsa.debian.org/python-team/modules/python-django-imagekit
python-django-tinymce comes from https://salsa.debian.org/python-team/modules/python-django-tinymce
need fonts-freefont-ttf (to have truetype freesans available for troggle via PIL)
need libapache2-mod-wsgi for apache wsgi support.
On stretch the django 1.10 is no use so get rid of that:
apt remove python3-django python-django python-django-common python-django-doc
Then replace with django 1.7 (Needs to be built for stretch)
apt install python-django python-django-common python-django-doc
apt install python-django-registration python-django-imagekit python-django-tinymce
then hold them to stop them being upgraded by unattended upgrades:
echo "python-django hold" | sudo dpkg --set-selections
echo "python-django-common hold" | sudo dpkg --set-selections
echo "python-django-doc hold" | sudo dpkg --set-selections
Optimizing server
I've tweaked the apache and mysql settings to make them a bit more suitable for a small machine. Seems to have shaved 200MB or so off the idling footprint.
https://www.narga.net/optimizing-apachephpmysql-low-memory-server/
(just discovered 'ab' for running apache performance tests - handy).
Do the edit to site-packages/django/db/backends/base.py
to comment out the requirement for mysqlclient >1.3.13
as we run perfectly happily with Django 2.2.19 & mysqlite 1.3.10
:
version = Database.version_info
#test nobbled by Wookey 2021-04-08 as 1.3.13 is not available on stable
#if version < (1, 3, 13):
# raise ImproperlyConfigured('mysqlclient 1.3.13 or newer is required; you have %s.' % Database.__version__)

View File

@@ -1,9 +0,0 @@
Django==1.7.11
django-registration==2.1.2
mysql
#imagekit
django-imagekit
Image
django-tinymce==2.7.0
smartencoding
unidecode

View File

@@ -1,200 +0,0 @@
import sys
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
from secret_credentials import *
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
# -----------------------------------------------------------------
# THINK before you push this to a repo
# - have you checked that secret_credentials.py is in .gitignore ?
# - we don't want to have to change the expo system password !
# -----------------------------------------------------------------
# default values, real secrets will be imported from credentials.py in future
SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
SERVERPORT = "8000" # not needed as it is the default
ADMINS = (
('Philip', 'philip.sargent@klebos.eu'),
)
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
# so we use pathlib which has been standard since python 3.4
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
# --------------------- MEDIA redirections BEGIN ---------------------
REPOS_ROOT_PATH = Path(__file__).parent.parent
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / "templates"
MEDIA_ROOT = TROGGLE_PATH / "media"
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
# FILES = Path('/mnt/d/expofiles/')
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos"
PHOTOS_YEAR = "2023"
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
LOGFILE = PYTHON_PATH / "troggle.log"
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
MEDIA_URL = "/site-media/"
DIR_ROOT = Path("") # this should end in / if a value is given
URL_ROOT = "/"
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
# Note that these constants are not actually used in urls.py, they should be..
# and they all need to end with / so using 'Path' doesn't work..
MEDIA_URL = Path(URL_ROOT, "/site_media/")
PHOTOS_URL = Path(URL_ROOT, "/photos/")
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# --------------------- MEDIA redirections END ---------------------
PUBLIC_SITE = True
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
CACHEDPAGES = True # experimental page cache for a handful of page types
# executables:
CAVERN = "cavern" # for parsing .svx files and producing .3d files
SURVEXPORT = "survexport" # for parsing .3d files and producing .pos files
DBSQLITE = {
"default": {
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"NAME": SQLITEFILE,
# 'NAME' : ':memory:',
"USER": "expo", # Not used with sqlite3.
"PASSWORD": "sekrit", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DBMARIADB = {
"default": {
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4",
},
"NAME": "troggle", # Or path to database file if using sqlite3.
"USER": "expo",
"PASSWORD": MARIADB_SERVER_PASSWORD,
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
# default database for me is squlite
DBSWITCH = "sqlite"
if DBSWITCH == "sqlite":
DATABASES = DBSQLITE
if DBSWITCH == "mariadb":
DATABASES = DBMARIADB
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [TEMPLATE_PATH],
"OPTIONS": {
"debug": "DEBUG",
"context_processors": [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
"core.context.troggle_context", # in core/context.py - only used in expedition.html
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media", # includes a variable MEDIA_URL
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
"django.template.context_processors.tz",
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
"django.contrib.messages.context_processors.messages",
],
"loaders": [
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
],
},
},
]
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
# EXPOWEB_URL = "" # defunct, removed.
# SCANS_URL = '/survey_scans/' # defunct, removed.
sys.path.append(str(REPOS_ROOT_PATH))
sys.path.append(str(PYTHON_PATH))
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
# TEST_RUNNER = "django.test.runner.DiscoverRunner"
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# Run this in a terminal : 'bash os-survey.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 24.04 relatively clean install.
# 24.04 has python 3.12
echo '###'
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
echo '###'
sudo apt install tunnelx therion -y
sudo apt install survex-aven -y
sudo apt install gpsprune qgis -y
cd ~/expo
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,92 +0,0 @@
#!/bin/bash
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 24.04 relatively clean install.
# 24.04 has python 3.12
# sudo apt install python-is-python3 -y
python --version : ensure python is an alias for python3 not python2.7
ssh -V
sudo apt update -y
sudo apt dist-upgrade -y
sudo apt autoremove -y
# Already in Ubuntu 24.04 on WSL:
# sudo apt install git -y
# sudo apt install wget gpg
# sudo apt install sftp -y
# sudo apt install openssh-client -y
# sudo apt install rsync
# Now using uv not pip:
# sudo apt install python3-pip -y
sudo apt install sqlite3 -y
sudo apt install gedit -y
sudo apt install tig gitg meld -y
# python formatting https://docs.astral.sh/ruff/
sudo snap install ruff
# # do not actually use this any more
# sudo useradd expo
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
# as debian does not install everything that ubuntu does, you need:
sudo apt install python3-venv -y
sudo apt install python3-dev -y
# sudo apt install python3-distutils -y
# install uv
curl -LsSf https://astral.sh/uv/install.sh | sh
sudo apt install mariadb-server -y
sudo apt install libmariadb-dev -y
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
# sudo service mysql start
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
# we leave that to uv to install now.
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
# sudo apt install software-properties-common apt-transport-https
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
# sudo apt update
# sudo apt install code
mkdir ~/expo
cd ~/expo
echo '###'
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
echo '### because you can't clone the repos without a key
git config --global user.email "philip.sargent@gmail.com"
git config --global user.name "Philip Sargent"
git config --global pull.rebase true
#Change this to clone using https?? at least for troggle?
git clone ssh://expo@expo.survex.com/home/expo/troggle
git clone ssh://expo@expo.survex.com/home/expo/loser
git clone ssh://expo@expo.survex.com/home/expo/expoweb
git clone ssh://expo@expo.survex.com/home/expo/drawings
mkdir expofiles
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,63 +0,0 @@
#! /bin/bash
# create and sanitise files for pushing to repo
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
# Philip Sargent 2022/04/12
HOSTNAME=`hostname`
echo "** This copies file to _deploy/${HOSTNAME}/ !"
cd ..
cd troggle
echo `pwd`
echo deprecations.
PYTHON="uv run"
source .venv/bin/activate
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
deactivate
echo diffsettings.
rm diffsettings.txt
if test -f "diffsettings.txt"; then
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
exit
fi
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
echo inspectdb.
# this next line requires database setting to be troggle.sqlite:
$PYTHON manage.py inspectdb > troggle-inspectdb.py
#egrep -in "unable|error" troggle-inspectdb.py
echo remove passwords.
cp localsettings.py localsettings-${HOSTNAME}.py
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
mkdir -p _deploy/${HOSTNAME}
mv _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py.bak
mv localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}
cp *.sh _deploy/${HOSTNAME}
#
# Do these before final testing, *not* just before pushing:
# in ./pre-run.sh
# $PYTHON reset-django.py
# $PYTHON manage.py makemigrations
# $PYTHON manage.py test
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
# egrep -i "unable|error" troggle-inspectdb.py

View File

@@ -1,36 +0,0 @@
#! /bin/bash
# Do these before final testing, *not* just before pushing:
# Need to be in an ALREADY activated venv
PYTHON="python"
echo "** Run inspectdb:"
$PYTHON manage.py inspectdb > troggle-inspectdb.py
# egrep -in "unable|error" troggle-inspectdb.py
echo ""
# count non-blank lines of python and template HTML code
# includes all variants of settings.py files
# fix this as core/utils.py has 28,000 lines of numbers.
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
echo "** Run reset-django.py - which deletes the database"
# This deletes the database so must run after generating troggle-inspectdb.py
$PYTHON reset-django.py
echo "** After cleanup deletion, remake all migrations."
$PYTHON manage.py makemigrations >/dev/null
$PYTHON manage.py migrate
echo "** Now running self check"
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
$PYTHON manage.py check -v 3 --deploy
echo "** Now running test suite"
# $PYTHON manage.py test -v 1
echo ""
echo `tail -1 lines-of-python.txt` non-comment lines of python. But core/utils.py has 28,000 lines of numbers.
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
echo '** If you have an error running manage.py, maybe you are not in an activated venv ?'

View File

@@ -1,53 +0,0 @@
#!/bin/bash
# now using uv, unbelieveably simpler.
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
# Expects an Ubuntu 24.04 with all the gubbins already installed
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
# use the script os-trog24.04.sh runniing it in /home/username/
python3 --version
cd ~/expo/troggle
echo "-- EXPO folder [current directory]: `pwd`"
TROGDIR=$(cd $(dirname $0) && pwd)
echo "-- Troggle folder: ${TROGDIR}"
cp dev.toml pyproject.toml
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
uv self update
uv sync
# fudge for philip's laptop prior to M2 SSD upgrade
if [ ! -d /mnt/d/EXPO ]; then
sudo mkdir /mnt/d
sudo mount -t drvfs D: /mnt/d
fi
uv pip list
echo "Django version:`uv run django-admin --version`"
echo "### Now do
'[sudo service mysql start]'
'[sudo service mariadb restart]'
'[sudo mysql_secure_installation]'
'cd ~/expo/troggle'
'uv run django-admin'
'uv run manage.py check'
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
## you need to follow the Linux instructions.
'ssh expo@expo.survex.com'
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
'uv run manage.py test -v 2'
'./pre-run.sh' (runs the tests again)
'uv run databaseReset.py reset INIT'
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
"
# if [ ! -d /mnt/d/expofiles ]; then
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
# fi

View File

@@ -1,183 +0,0 @@
import sys
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
from secret_credentials import *
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
# MARIADB_SERVER_PASSWORD =
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
# -----------------------------------------------------------------
# THINK before you push this to a repo
# - have you checked that secret_credentials.py is in .gitignore ?
# - we don't want to have to change the expo system password !
# -----------------------------------------------------------------
# default values, real secrets will be imported from credentials.py in future
SQLITEFILE = str(Path(__file__).parent.parent / "troggle.sqlite") # can be ':memory:'
print(f"SQLITEFILE is {SQLITEFILE}")
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
SERVERPORT = "8000" # not needed as it is the default
ADMINS = (
('Philip', 'philip.sargent@klebos.eu'), # only on dev
)
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
# so we use pathlib which has been standard since python 3.4
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
# --------------------- MEDIA redirections BEGIN ---------------------
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
sys.path.append(str(REPOS_ROOT_PATH))
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / "templates"
MEDIA_ROOT = TROGGLE_PATH / "media"
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
PHOTOS_YEAR = "2025"
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
# URL that handles the media served from MEDIA_ROOT.
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
# and they all need to end with / so using 'Path' doesn't work..
URL_ROOT = "/"
MEDIA_URL = Path(URL_ROOT, "/site_media/")
PHOTOS_URL = Path(URL_ROOT, "/photos/")
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# --------------------- MEDIA redirections END ---------------------
PUBLIC_SITE = True
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
CACHEDPAGES = True # experimental page cache for a handful of page types
DBSQLITE = {
"default": {
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"NAME": SQLITEFILE,
# 'NAME' : ':memory:',
"USER": "expo", # Not used with sqlite3.
"PASSWORD": "sekrit", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DBMARIADB = {
"default": {
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4",
},
"NAME": "troggle", # Or path to database file if using sqlite3.
"USER": "expo",
"PASSWORD": MARIADB_SERVER_PASSWORD,
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
# default database for me is squlite
DBSWITCH = "sqlite"
if DBSWITCH == "sqlite":
DATABASES = DBSQLITE
if DBSWITCH == "mariadb":
DATABASES = DBMARIADB
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [TEMPLATE_PATH],
"OPTIONS": {
"debug": "DEBUG",
"context_processors": [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
"core.context.troggle_context", # in core/context.py - only used in expedition.html
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media", # includes a variable MEDIA_URL
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
"django.template.context_processors.tz",
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
"django.contrib.messages.context_processors.messages",
],
"loaders": [
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
],
},
},
]
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,183 +0,0 @@
import sys
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
from secret_credentials import *
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
# MARIADB_SERVER_PASSWORD =
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
# -----------------------------------------------------------------
# THINK before you push this to a repo
# - have you checked that secret_credentials.py is in .gitignore ?
# - we don't want to have to change the expo system password !
# -----------------------------------------------------------------
# default values, real secrets will be imported from credentials.py in future
SQLITEFILE = str(Path(__file__).parent.parent / "troggle.sqlite") # can be ':memory:'
print(f"SQLITEFILE is {SQLITEFILE}")
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
SERVERPORT = "8000" # not needed as it is the default
ADMINS = (
('Philip', 'philip.sargent@klebos.eu'), # only on dev
)
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
# so we use pathlib which has been standard since python 3.4
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
# --------------------- MEDIA redirections BEGIN ---------------------
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
sys.path.append(str(REPOS_ROOT_PATH))
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / "templates"
MEDIA_ROOT = TROGGLE_PATH / "media"
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
PHOTOS_YEAR = "2025"
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
# URL that handles the media served from MEDIA_ROOT.
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
# and they all need to end with / so using 'Path' doesn't work..
URL_ROOT = "/"
MEDIA_URL = Path(URL_ROOT, "/site_media/")
PHOTOS_URL = Path(URL_ROOT, "/photos/")
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# --------------------- MEDIA redirections END ---------------------
PUBLIC_SITE = True
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
CACHEDPAGES = True # experimental page cache for a handful of page types
DBSQLITE = {
"default": {
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"NAME": SQLITEFILE,
# 'NAME' : ':memory:',
"USER": "expo", # Not used with sqlite3.
"PASSWORD": "sekrit", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DBMARIADB = {
"default": {
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4",
},
"NAME": "troggle", # Or path to database file if using sqlite3.
"USER": "expo",
"PASSWORD": MARIADB_SERVER_PASSWORD,
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
# default database for me is squlite
DBSWITCH = "sqlite"
if DBSWITCH == "sqlite":
DATABASES = DBSQLITE
if DBSWITCH == "mariadb":
DATABASES = DBMARIADB
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [TEMPLATE_PATH],
"OPTIONS": {
"debug": "DEBUG",
"context_processors": [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
"core.context.troggle_context", # in core/context.py - only used in expedition.html
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media", # includes a variable MEDIA_URL
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
"django.template.context_processors.tz",
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
"django.contrib.messages.context_processors.messages",
],
"loaders": [
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
],
},
},
]
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# Run this in a terminal : 'bash os-survey.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 24.04 relatively clean install.
# 24.04 has python 3.12
echo '###'
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
echo '###'
sudo apt install tunnelx therion -y
sudo apt install survex-aven -y
sudo apt install gpsprune qgis -y
cd ~/expo
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,92 +0,0 @@
#!/bin/bash
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 24.04 relatively clean install.
# 24.04 has python 3.12
# sudo apt install python-is-python3 -y
python --version : ensure python is an alias for python3 not python2.7
ssh -V
sudo apt update -y
sudo apt dist-upgrade -y
sudo apt autoremove -y
# Already in Ubuntu 24.04 on WSL:
# sudo apt install git -y
# sudo apt install wget gpg
# sudo apt install sftp -y
# sudo apt install openssh-client -y
# sudo apt install rsync
# Now using uv not pip:
# sudo apt install python3-pip -y
sudo apt install sqlite3 -y
sudo apt install gedit -y
sudo apt install tig gitg meld -y
# python formatting https://docs.astral.sh/ruff/
sudo snap install ruff
# # do not actually use this any more
# sudo useradd expo
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
# as debian does not install everything that ubuntu does, you need:
sudo apt install python3-venv -y
sudo apt install python3-dev -y
# sudo apt install python3-distutils -y
# install uv
curl -LsSf https://astral.sh/uv/install.sh | sh
sudo apt install mariadb-server -y
sudo apt install libmariadb-dev -y
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
# sudo service mysql start
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
# we leave that to uv to install now.
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
# sudo apt install software-properties-common apt-transport-https
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
# sudo apt update
# sudo apt install code
mkdir ~/expo
cd ~/expo
echo '###'
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
echo '### because you can't clone the repos without a key
git config --global user.email "philip.sargent@gmail.com"
git config --global user.name "Philip Sargent"
git config --global pull.rebase true
#Change this to clone using https?? at least for troggle?
git clone ssh://expo@expo.survex.com/home/expo/troggle
git clone ssh://expo@expo.survex.com/home/expo/loser
git clone ssh://expo@expo.survex.com/home/expo/expoweb
git clone ssh://expo@expo.survex.com/home/expo/drawings
mkdir expofiles
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,63 +0,0 @@
#! /bin/bash
# create and sanitise files for pushing to repo
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
# Philip Sargent 2022/04/12
HOSTNAME=`hostname`
echo "** This copies file to _deploy/${HOSTNAME}/ !"
cd ..
cd troggle
echo `pwd`
echo deprecations.
PYTHON="uv run"
source .venv/bin/activate
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
deactivate
echo diffsettings.
rm diffsettings.txt
if test -f "diffsettings.txt"; then
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
exit
fi
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
echo inspectdb.
# this next line requires database setting to be troggle.sqlite:
$PYTHON manage.py inspectdb > troggle-inspectdb.py
#egrep -in "unable|error" troggle-inspectdb.py
echo remove passwords.
cp localsettings.py localsettings-${HOSTNAME}.py
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
mkdir -p _deploy/${HOSTNAME}
mv _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py.bak
mv localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}
cp *.sh _deploy/${HOSTNAME}
#
# Do these before final testing, *not* just before pushing:
# in ./pre-run.sh
# $PYTHON reset-django.py
# $PYTHON manage.py makemigrations
# $PYTHON manage.py test
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
# egrep -i "unable|error" troggle-inspectdb.py

View File

@@ -1,36 +0,0 @@
#! /bin/bash
# Do these before final testing, *not* just before pushing:
# Changed to use uv not pip, requires manage.py to have uv structured uv comment in it.
PYTHON="uv run"
echo "** Run inspectdb:"
$PYTHON manage.py inspectdb > troggle-inspectdb.py
# egrep -in "unable|error" troggle-inspectdb.py
echo ""
# count non-blank lines of python and template HTML code
# includes all variants of settings.py files
# fix this as core/utils.py has 28,000 lines of numbers.
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
echo "** Run reset-django.py - which deletes the database"
# This deletes the database so must run after generating troggle-inspectdb.py
$PYTHON reset-django.py
echo "** After cleanup deletion, remake all migrations."
$PYTHON manage.py makemigrations >/dev/null
$PYTHON manage.py migrate
echo "** Now running self check"
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
$PYTHON manage.py check -v 3 --deploy
echo "** Now running test suite"
# $PYTHON manage.py test -v 1
echo ""
echo `tail -1 lines-of-python.txt` non-comment lines of python.
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
echo '** If you have an error running manage.py, maybe you are not in an activated venv ? or your manage.py is not managed by uv properly ?'

View File

@@ -1,53 +0,0 @@
#!/bin/bash
# now using uv, unbelieveably simpler.
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
# Expects an Ubuntu 24.04 with all the gubbins already installed
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
# use the script os-trog24.04.sh runniing it in /home/username/
python3 --version
cd ~/expo/troggle
echo "-- EXPO folder [current directory]: `pwd`"
TROGDIR=$(cd $(dirname $0) && pwd)
echo "-- Troggle folder: ${TROGDIR}"
cp dev.toml pyproject.toml
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
uv self update
uv sync
# fudge for philip's laptop prior to M2 SSD upgrade
if [ ! -d /mnt/d/EXPO ]; then
sudo mkdir /mnt/d
sudo mount -t drvfs D: /mnt/d
fi
uv pip list
echo "Django version:`uv run django-admin --version`"
echo "### Now do
'[sudo service mysql start]'
'[sudo service mariadb restart]'
'[sudo mysql_secure_installation]'
'cd ~/expo/troggle'
'uv run django-admin'
'uv run manage.py check'
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
## you need to follow the Linux instructions.
'ssh expo@expo.survex.com'
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
'uv run manage.py test -v 2'
'./pre-run.sh' (runs the tests again)
'uv run databaseReset.py reset INIT'
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
"
# if [ ! -d /mnt/d/expofiles ]; then
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
# fi

View File

@@ -1,24 +0,0 @@
2024-12-15 Philip Sargent
You will need your own localsettings.py but they are all out of date in these
subdirectories except for /wsl/ . So copy that to /troggle/ and make your own
edits to make it work with your own machine and file whereabouts.
- settings.py
is common to all configurations,
but these are different:
- localsettings.py
- dev.toml
2023-07-17 Philip Sargent
Trying to sort out configurations as we got into a bit of a mess on
Expo in the last couple of weeks with two (notionally identical Debian
Bullseye) expo laptops Crowley (which has local troggle installed and
can run locally) and Aziraphale (has local copy of troggle repo but is
not configured to run locally), Martin Green's laptop (Ubuntu 22.04.2),
Philip's Barbie laptop Ubuntu 22.04.3). And of course the server itself
expo.survex.com which is running Debian Bullseye. But most development
recently had been done on Philip's two other machines, desktop and PC,
both running Ubuntu on WSL on Windows and both using venv environments,
which Crowley also does.

View File

@@ -1,3 +0,0 @@
The copy in this /_deploy/ folder may not be the latest if active development
has been going on in the parent folder. Check there for a later copy of
the localsettingsWSL file.

View File

@@ -1,18 +0,0 @@
#! /bin/bash
echo troggle
cd ~/expo/troggle
git pull
echo expoweb
cd ../expoweb
git pull
echo drawings
cd ../drawings
git pull
cd ../loser
echo loser
git pull
cd ../troggle

View File

@@ -1,182 +0,0 @@
import sys
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
from secret_credentials import *
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
# MARIADB_SERVER_PASSWORD =
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
# -----------------------------------------------------------------
# THINK before you push this to a repo
# - have you checked that secret_credentials.py is in .gitignore ?
# - we don't want to have to change the expo system password !
# -----------------------------------------------------------------
# default values, real secrets will be imported from credentials.py in future
SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
SERVERPORT = "8000" # not needed as it is the default
ADMINS = (
('Philip', 'philip.sargent@klebos.eu'), # only on dev
)
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
# so we use pathlib which has been standard since python 3.4
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
# --------------------- MEDIA redirections BEGIN ---------------------
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
sys.path.append(str(REPOS_ROOT_PATH))
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / "templates"
MEDIA_ROOT = TROGGLE_PATH / "media"
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
PHOTOS_YEAR = "2025"
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
# URL that handles the media served from MEDIA_ROOT.
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
# and they all need to end with / so using 'Path' doesn't work..
URL_ROOT = "/"
MEDIA_URL = Path(URL_ROOT, "/site_media/")
PHOTOS_URL = Path(URL_ROOT, "/photos/")
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# --------------------- MEDIA redirections END ---------------------
PUBLIC_SITE = True
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
CACHEDPAGES = True # experimental page cache for a handful of page types
DBSQLITE = {
"default": {
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"NAME": SQLITEFILE,
# 'NAME' : ':memory:',
"USER": "expo", # Not used with sqlite3.
"PASSWORD": "sekrit", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DBMARIADB = {
"default": {
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4",
},
"NAME": "troggle", # Or path to database file if using sqlite3.
"USER": "expo",
"PASSWORD": MARIADB_SERVER_PASSWORD,
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
# default database for me is squlite
DBSWITCH = "sqlite"
if DBSWITCH == "sqlite":
DATABASES = DBSQLITE
if DBSWITCH == "mariadb":
DATABASES = DBMARIADB
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [TEMPLATE_PATH],
"OPTIONS": {
"debug": "DEBUG",
"context_processors": [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
"core.context.troggle_context", # in core/context.py - only used in expedition.html
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media", # includes a variable MEDIA_URL
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
"django.template.context_processors.tz",
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
"django.contrib.messages.context_processors.messages",
],
"loaders": [
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
],
},
},
]
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,182 +0,0 @@
import sys
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
EXPOUSER = 'expo'
EXPOADMINUSER = 'expoadmin'
EXPOUSER_EMAIL = 'wookey@wookware.org'
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
from secret_credentials import *
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
# MARIADB_SERVER_PASSWORD =
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
# -----------------------------------------------------------------
# THINK before you push this to a repo
# - have you checked that secret_credentials.py is in .gitignore ?
# - we don't want to have to change the expo system password !
# -----------------------------------------------------------------
# default values, real secrets will be imported from credentials.py in future
SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
SERVERPORT = "8000" # not needed as it is the default
ADMINS = (
('Philip', 'philip.sargent@klebos.eu'), # only on dev
)
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
# so we use pathlib which has been standard since python 3.4
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
# --------------------- MEDIA redirections BEGIN ---------------------
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
sys.path.append(str(REPOS_ROOT_PATH))
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / "templates"
MEDIA_ROOT = TROGGLE_PATH / "media"
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
SCANS_ROOT = EXPOFILES / "surveyscans"
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
PHOTOS_YEAR = "2025"
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
# URL that handles the media served from MEDIA_ROOT.
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
# and they all need to end with / so using 'Path' doesn't work..
URL_ROOT = "/"
MEDIA_URL = Path(URL_ROOT, "/site_media/")
PHOTOS_URL = Path(URL_ROOT, "/photos/")
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# --------------------- MEDIA redirections END ---------------------
PUBLIC_SITE = True
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
CACHEDPAGES = True # experimental page cache for a handful of page types
DBSQLITE = {
"default": {
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"NAME": SQLITEFILE,
# 'NAME' : ':memory:',
"USER": "expo", # Not used with sqlite3.
"PASSWORD": "sekrit", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
DBMARIADB = {
"default": {
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
"OPTIONS": {
"charset": "utf8mb4",
},
"NAME": "troggle", # Or path to database file if using sqlite3.
"USER": "expo",
"PASSWORD": MARIADB_SERVER_PASSWORD,
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
# default database for me is squlite
DBSWITCH = "sqlite"
if DBSWITCH == "sqlite":
DATABASES = DBSQLITE
if DBSWITCH == "mariadb":
DATABASES = DBMARIADB
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [TEMPLATE_PATH],
"OPTIONS": {
"debug": "DEBUG",
"context_processors": [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
"core.context.troggle_context", # in core/context.py - only used in expedition.html
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media", # includes a variable MEDIA_URL
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
"django.template.context_processors.tz",
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
"django.contrib.messages.context_processors.messages",
],
"loaders": [
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
],
},
},
]
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
# TINYMCE_DEFAULT_CONFIG = {
# 'plugins': "table,spellchecker,paste,searchreplace",
# 'theme': "advanced",
# }
# TINYMCE_SPELLCHECKER = False
# TINYMCE_COMPRESSOR = True
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
print(" + finished importing troggle/localsettings.py")

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# Run this in a terminal : 'bash os-survey.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 24.04 relatively clean install.
# 24.04 has python 3.12
echo '###'
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
echo '###'
sudo apt install tunnelx therion -y
sudo apt install survex-aven -y
sudo apt install gpsprune qgis -y
cd ~/expo
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,92 +0,0 @@
#!/bin/bash
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 24.04 relatively clean install.
# 24.04 has python 3.12
# sudo apt install python-is-python3 -y
python --version : ensure python is an alias for python3 not python2.7
ssh -V
sudo apt update -y
sudo apt dist-upgrade -y
sudo apt autoremove -y
# Already in Ubuntu 24.04 on WSL:
# sudo apt install git -y
# sudo apt install wget gpg
# sudo apt install sftp -y
# sudo apt install openssh-client -y
# sudo apt install rsync
# Now using uv not pip:
# sudo apt install python3-pip -y
sudo apt install sqlite3 -y
sudo apt install gedit -y
sudo apt install tig gitg meld -y
# python formatting https://docs.astral.sh/ruff/
sudo snap install ruff
# # do not actually use this any more
# sudo useradd expo
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
# as debian does not install everything that ubuntu does, you need:
sudo apt install python3-venv -y
sudo apt install python3-dev -y
# sudo apt install python3-distutils -y
# install uv
curl -LsSf https://astral.sh/uv/install.sh | sh
sudo apt install mariadb-server -y
sudo apt install libmariadb-dev -y
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
# sudo service mysql start
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
# we leave that to uv to install now.
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
# sudo apt install software-properties-common apt-transport-https
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
# sudo apt update
# sudo apt install code
mkdir ~/expo
cd ~/expo
echo '###'
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
echo '### because you can't clone the repos without a key
git config --global user.email "philip.sargent@gmail.com"
git config --global user.name "Philip Sargent"
git config --global pull.rebase true
#Change this to clone using https?? at least for troggle?
git clone ssh://expo@expo.survex.com/home/expo/troggle
git clone ssh://expo@expo.survex.com/home/expo/loser
git clone ssh://expo@expo.survex.com/home/expo/expoweb
git clone ssh://expo@expo.survex.com/home/expo/drawings
mkdir expofiles
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,62 +0,0 @@
#! /bin/bash
# create and sanitise files for pushing to repo
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
# Philip Sargent 2022/04/12
echo "** This copies file to _deploy/wsl/ !"
HOSTNAME=`hostname`
echo "** This copies file to _deploy/${HOSTNAME}/ !"
cd ..
cd troggle
echo `pwd`
echo deprecations.
PYTHON="uv run"
source .venv/bin/activate
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
deactivate
echo diffsettings.
rm diffsettings.txt
if test -f "diffsettings.txt"; then
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
exit
fi
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
echo inspectdb.
# this next line requires database setting to be troggle.sqlite:
$PYTHON manage.py inspectdb > troggle-inspectdb.py
#egrep -in "unable|error" troggle-inspectdb.py
echo remove passwords.
cp localsettings.py localsettingsWSL.py
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettingsWSL.py
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettingsWSL.py
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettingsWSL.py
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettingsWSL.py
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
mv _deploy/wsl/localsettingsWSL.py _deploy/wsl/localsettingsWSL.py.bak
mv localsettingsWSL.py _deploy/wsl
cp *.sh _deploy/wsl
#
# Do these before final testing, *not* just before pushing:
# in ./pre-run.sh
# $PYTHON reset-django.py
# $PYTHON manage.py makemigrations
# $PYTHON manage.py test
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
# egrep -i "unable|error" troggle-inspectdb.py

View File

@@ -1,36 +0,0 @@
#! /bin/bash
# Do these before final testing, *not* just before pushing:
# Need to be in an ALREADY activated venv
PYTHON="python"
echo "** Run inspectdb:"
$PYTHON manage.py inspectdb > troggle-inspectdb.py
# egrep -in "unable|error" troggle-inspectdb.py
echo ""
# count non-blank lines of python and template HTML code
# includes all variants of settings.py files
# fix this as core/utils.py has 28,000 lines of numbers.
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
echo "** Run reset-django.py - which deletes the database"
# This deletes the database so must run after generating troggle-inspectdb.py
$PYTHON reset-django.py
echo "** After cleanup deletion, remake all migrations."
$PYTHON manage.py makemigrations >/dev/null
$PYTHON manage.py migrate
echo "** Now running self check"
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
$PYTHON manage.py check -v 3 --deploy
echo "** Now running test suite"
# $PYTHON manage.py test -v 1
echo ""
echo `tail -1 lines-of-python.txt` non-comment lines of python. But core/utils.py has 28,000 lines of numbers.
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
echo '** If you have an error running manage.py, maybe you are not in an activated venv ?'

View File

@@ -1,12 +0,0 @@
asgiref==3.3.4
confusable-homoglyphs==3.2.0
coverage==5.5
Django==3.2.12
docutils==0.14
gunicorn==20.1.0
Pillow==9.0.1
pytz==2019.1
reportlab==3.6.8
sqlparse==0.2.4
typing-extensions==3.7.4.3
Unidecode==1.0.23

View File

@@ -1,20 +0,0 @@
# Philip bleeding edge config
asgiref==3.6.0
beautifulsoup4==4.12.2
black==23.1.0
click==8.1.3
coverage==7.1.0
Django==4.2
docutils==0.19
isort==5.12.0
mypy-extensions==1.0.0
packaging==23.0
pathspec==0.11.0
Pillow==9.4.0
platformdirs==3.0.0
pytz==2022.7
ruff==0.0.245
soupsieve==2.4.1
sqlparse==0.4.3
Unidecode==1.3.6
piexif==1.1.3

View File

@@ -1,44 +0,0 @@
For debian bullseye (10)
python3-django (3.2.12)
tinymce (from where?)
mariadb-server
apache2
libapache2-mod-wsgi-py3
python3-django-registration ?
Django==1.7
django-extensions==2.2.9
django-registration==2.0
django-tinymce==2.0.1
six==1.14.0
Unidecode==1.1.1 python3-unidecode
Pillow==7.1.2 python3-willow
asgiref==3.7.2
attrs==22.2.0
beautifulsoup4==4.12.2
black==23.11.0
bs4==0.0.1
click==8.1.3
colorama==0.4.6
coverage==6.5.0
Django==5.0
docutils==0.20
interrogate==1.5.0
isort==5.11.4
mypy-extensions==0.4.3
packaging==23.2
pathspec==0.10.3
piexif==1.1.3
Pillow==10.1.0
platformdirs==2.6.2
py==1.11.0
pytz==2022.6
ruff==0.1.0
soupsieve==2.5
sqlparse==0.4.0
tabulate==0.9.0
toml==0.10.2
typing_extensions==4.4.0
Unidecode==1.3.6

View File

@@ -1,17 +0,0 @@
asgiref==3.6.0
black==23.1.0
click==8.1.3
coverage==7.1.0
Django==4.2
docutils==0.19
isort==5.12.0
mypy-extensions==1.0.0
packaging==23.0
pathspec==0.11.0
Pillow==9.4.0
platformdirs==3.0.0
pytz==2022.7
ruff==0.0.245
sqlparse==0.4.3
tomli==2.0.1
Unidecode==1.3.6

View File

@@ -1,9 +0,0 @@
asgiref==3.5.2
coverage==6.5.0
Django==3.2.16
docutils==0.19
Pillow==9.3.0
pytz==2022.6
sqlparse==0.4.3
typing_extensions==4.4.0
Unidecode==1.3.6

View File

@@ -1,3 +0,0 @@
asgiref==3.8.1
Django==5.1b1
sqlparse==0.5.1

View File

@@ -1,21 +0,0 @@
asgiref==3.6.0
beautifulsoup4==4.12.0
black==23.1.0
chardet==5.1.0
click==8.1.0
coverage==7.1.0
deptry==0.12.0
Django==4.2
docutils==0.19
isort==5.12.0
mypy-extensions==1.0.0
packaging==23.0
pathspec==0.11.0
piexif==1.1.3
Pillow==9.4.0
platformdirs==3.0.0
pytz==2022.7
ruff==0.0.245
soupsieve==2.5
sqlparse==0.4.0
Unidecode==1.3.0

View File

@@ -1,21 +0,0 @@
# to be used with pre-release Djangi install which installs other stuff too
beautifulsoup4==4.12
piexif==1.1
black==23.1
chardet==5.1
click==8.1
coverage==7.1
deptry==0.12
docutils==0.19
isort==5.12
mypy-extensions==1.0
packaging==23.0
pathspec==0.11
Pillow==9.4.0
platformdirs==3.0
pytz==2022.7
ruff==0.0.245
setuptools==67.7
soupsieve==2.5
Unidecode==1.3
piexif==1.1

View File

@@ -1,20 +0,0 @@
Pillow==10.2
Unidecode==1.3.8
asgiref==3.6
beautifulsoup4==4.12
black==24.2
chardet==5.2
click==8.1
coverage==7
deptry==0.12.0
docutils==0.20
isort==5
mypy-extensions==1.0
packaging==23
pathspec==0.12
platformdirs==4
pytz==2024.1
ruff==0.2
soupsieve==2.5
sqlparse
piexif

View File

@@ -1,21 +0,0 @@
asgiref==3.7.0
beautifulsoup4==4.12.0
black==23.3.0
click==8.1.3
coverage==7.2.0
Django==4.2
docutils==0.20
isort==5.12.0
mypy-extensions==1.0.0
packaging==23.0
pathspec==0.11.0
Pillow==10.0.0
pkg_resources==0.0.0
platformdirs==3.8.0
pytz==2023.3
ruff==0.0.245
soupsieve==2.4.1
sqlparse==0.4.0
tomli==2.0.1
typing_extensions==4.7.1
Unidecode==1.3.6

View File

@@ -1,17 +0,0 @@
asgiref==3.3.4
confusable-homoglyphs==3.2.0
Django==3.2
docutils==0.14
gunicorn==20.1.0
Pillow==5.4.1
sqlparse==0.2.4
typing-extensions==3.7.4.3
Unidecode==1.0.23
mariadb==1.0.11
mysql-connector-python==8.0.29
mysqlclient==2.1.0
Pillow==9.1.0
pytz==2022.5
asgiref==3.5.0
gunicorn==20.1.0

View File

@@ -1,184 +0,0 @@
#!/bin/bash
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
# use the script os-trog.sh
# If you are using Debian, then stick with the default version of python
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
# NOW we set up troggle
PYTHON=python3.11
VENAME=p11d32 # python3.x and django 4.2
echo "** You are logged in as `id -u -n`"
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
TROGDIR=$(cd $(dirname $0) && pwd)
echo "-- Troggle folder (this script location): ${TROGDIR}"
REQUIRE=requirements-$VENAME.txt
if [ -d $REQUIRE ]; then
echo "-- No ${REQUIRE} found. You should be in the /troggle/ folder. Copy it from your most recent installation."
exit 1
fi
echo ## Using $REQUIRE :
cat $REQUIRE
echo ##
$PYTHON --version
# NOTE that when using a later or earlier verison of python, you MUST also
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
# NOW set up link from expo user folder
# needed for WSL2
echo Creating links from Linux filesystem user
# These links only need making once, for many venv
cd ~
if [ ! -d $VENAME ]; then
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
$PYTHON -m venv $VENAME
else
echo "## /$VENAME/ already exists ! Delete it first."
exit 1
fi
# Activate the virtual env and see what the default packages are
echo "### Activating $VENAME"
cd $VENAME
echo "-- now in: ${PWD}"
ls -tlarg
source bin/activate
echo $VIRTUAL_ENV
if [ -d ~/$VENAME/bin ]; then
echo "### Activating."
else
echo "## ~/$VENAME/bin does not exist. FAILed to create venv properly."
exit 1
fi
# update local version of pip, more recent than OS version
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
# update local version of setuptools, more recent than OS version, needed for packages without wheels
echo "### installing later version of pip inside $VENAME"
$PYTHON -m pip install --upgrade pip
$PYTHON -m pip install --upgrade setuptools
PIP=pip
$PIP list > original-pip.list
$PIP freeze >original.txt
# we are in /home/$USER/$VENAME/
ln -s ${TROGDIR} troggle
ln -s ${TROGDIR}/../expoweb expoweb
ln -s ${TROGDIR}/../loser loser
ln -s ${TROGDIR}/../drawings drawings
#ln -s ${TROGDIR}/../expofiles expofiles
# fudge for philip's machine
if [ ! -d /mnt/d/EXPO ]; then
sudo mkdir /mnt/d
sudo mount -t drvfs D: /mnt/d
fi
if [ -d ${TROGDIR}/../expofiles ]; then
ln -s ${TROGDIR}/../expofiles expofiles
else
ln -s /mnt/d/EXPO/expofiles expofiles
fi
echo "### Setting file permissions.. may take a while.."
git config --global --add safe.directory '*'
sudo chmod -R 777 *
echo "### links to expoweb, troggle etc. complete:"
ls -tla
echo "###"
echo "### now installing ${TROGDIR}/${REQUIRE}"
echo "###"
cat ${TROGDIR}/${REQUIRE}
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
# seen on wsl2 as well as wsl1
# which ALSO ruins EXISTING permissions !
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
read -p "Press any key to resume ..."
$PIP install -r ${TROGDIR}/${REQUIRE}
echo "### install from ${TROGDIR}/${REQUIRE} completed."
echo '### '
$PIP install --pre django
$PIP freeze > $REQUIRE
# so that we can track requirements more easily with git
# because we do not install these with pip, but they are listed by the freeze command
# Now find out what we actually installed by subtracting the stuff venv installed anyway
sort original.txt > 1
sort $REQUIRE >2
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-$REQUIRE
rm 1
rm 2
# cp $REQUIRE requirements-$VENAME.txt
cp $REQUIRE troggle/$REQUIRE
$PIP list > installed-pip.list
$PIP list -o > installed-pip-o.list
REQ=installation-record
mkdir $REQ
mv original.txt $REQ
mv $REQUIRE $REQ
mv original-pip.list $REQ
mv installed-pip.list $REQ
mv installed-pip-o.list $REQ
cp fresh-$REQUIRE ../$REQUIRE
mv fresh-$REQUIRE $REQ
cp troggle/`basename "$0"` $REQ
$PYTHON --version
python --version
echo "Django version:`django-admin --version`"
echo "### Now do
'[sudo service mysql start]'
'[sudo service mariadb restart]'
'[sudo mysql_secure_installation]'
'cd ~/$VENAME'
'source bin/activate'
'cd troggle'
'django-admin'
'python manage.py check'
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
## you need to follow the Linux instructions.
'ssh expo@expo.survex.com'
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
## the tests may ALSO fail because of ssh and permissions errors
## So you will need to run
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
# because this chmod only takes effect then.
'python manage.py test -v 2'
'./pre-run.sh' (runs the tests again)
'python databaseReset.py reset $VENAME'
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
"
# if [ ! -d /mnt/d/expofiles ]; then
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
# fi

View File

@@ -1,184 +0,0 @@
#!/bin/bash
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
# use the script os-trog.sh
# If you are using Debian, then stick with the default version of python
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
# NOW we set up troggle
PYTHON=python3.11
VENAME=p11d42 # python3.x and django 4.2
echo "** You are logged in as `id -u -n`"
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
TROGDIR=$(cd $(dirname $0) && pwd)
echo "-- Troggle folder (this script location): ${TROGDIR}"
REQUIRE=requirements-$VENAME.txt
if [ -d $REQUIRE ]; then
echo "-- No ${REQUIRE} found. You should be in the /troggle/ folder. Copy it from your most recent installation."
exit 1
fi
echo ## Using $REQUIRE :
cat $REQUIRE
echo ##
$PYTHON --version
# NOTE that when using a later or earlier verison of python, you MUST also
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
# NOW set up link from expo user folder
# needed for WSL2
echo Creating links from Linux filesystem user
# These links only need making once, for many venv
cd ~
if [ ! -d $VENAME ]; then
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
$PYTHON -m venv $VENAME
else
echo "## /$VENAME/ already exists ! Delete it first."
exit 1
fi
# Activate the virtual env and see what the default packages are
echo "### Activating $VENAME"
cd $VENAME
echo "-- now in: ${PWD}"
ls -tlarg
source bin/activate
echo $VIRTUAL_ENV
if [ -d ~/$VENAME/bin ]; then
echo "### Activating."
else
echo "## ~/$VENAME/bin does not exist. FAILed to create venv properly."
exit 1
fi
# update local version of pip, more recent than OS version
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
# update local version of setuptools, more recent than OS version, needed for packages without wheels
echo "### installing later version of pip inside $VENAME"
$PYTHON -m pip install --upgrade pip
$PYTHON -m pip install --upgrade setuptools
PIP=pip
$PIP list > original-pip.list
$PIP freeze >original.txt
# we are in /home/$USER/$VENAME/
ln -s ${TROGDIR} troggle
ln -s ${TROGDIR}/../expoweb expoweb
ln -s ${TROGDIR}/../loser loser
ln -s ${TROGDIR}/../drawings drawings
#ln -s ${TROGDIR}/../expofiles expofiles
# fudge for philip's machine
if [ ! -d /mnt/d/EXPO ]; then
sudo mkdir /mnt/d
sudo mount -t drvfs D: /mnt/d
fi
if [ -d ${TROGDIR}/../expofiles ]; then
ln -s ${TROGDIR}/../expofiles expofiles
else
ln -s /mnt/d/EXPO/expofiles expofiles
fi
echo "### Setting file permissions.. may take a while.."
git config --global --add safe.directory '*'
sudo chmod -R 777 *
echo "### links to expoweb, troggle etc. complete:"
ls -tla
echo "###"
echo "### now installing ${TROGDIR}/${REQUIRE}"
echo "###"
cat ${TROGDIR}/${REQUIRE}
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
# seen on wsl2 as well as wsl1
# which ALSO ruins EXISTING permissions !
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
read -p "Press any key to resume ..."
$PIP install -r ${TROGDIR}/${REQUIRE}
echo "### install from ${TROGDIR}/${REQUIRE} completed."
echo '### '
$PIP install --pre django
$PIP freeze > $REQUIRE
# so that we can track requirements more easily with git
# because we do not install these with pip, but they are listed by the freeze command
# Now find out what we actually installed by subtracting the stuff venv installed anyway
sort original.txt > 1
sort $REQUIRE >2
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-$REQUIRE
rm 1
rm 2
# cp $REQUIRE requirements-$VENAME.txt
cp $REQUIRE troggle/$REQUIRE
$PIP list > installed-pip.list
$PIP list -o > installed-pip-o.list
REQ=installation-record
mkdir $REQ
mv original.txt $REQ
mv $REQUIRE $REQ
mv original-pip.list $REQ
mv installed-pip.list $REQ
mv installed-pip-o.list $REQ
cp fresh-$REQUIRE ../$REQUIRE
mv fresh-$REQUIRE $REQ
cp troggle/`basename "$0"` $REQ
$PYTHON --version
python --version
echo "Django version:`django-admin --version`"
echo "### Now do
'[sudo service mysql start]'
'[sudo service mariadb restart]'
'[sudo mysql_secure_installation]'
'cd ~/$VENAME'
'source bin/activate'
'cd troggle'
'django-admin'
'python manage.py check'
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
## you need to follow the Linux instructions.
'ssh expo@expo.survex.com'
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
## the tests may ALSO fail because of ssh and permissions errors
## So you will need to run
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
# because this chmod only takes effect then.
'python manage.py test -v 2'
'./pre-run.sh' (runs the tests again)
'python databaseReset.py reset $VENAME'
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
"
# if [ ! -d /mnt/d/expofiles ]; then
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
# fi

View File

@@ -1,190 +0,0 @@
#!/bin/bash
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
# use the script os-trog.sh
# If you are using Debian, then stick with the default version of python
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.12
# NOW we set up troggle
PYTHON=python3.12
VENAME=p12d5 # python3.x and django version
echo "** You are logged in as `id -u -n`"
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
TROGDIR=$(cd $(dirname $0) && pwd)
echo "-- Troggle folder (this script location): ${TROGDIR}"
if [[ "${VENAME:(-1)}" == 5 ]]; then
echo "The variable '$VENAME' ends in 5."
else
echo "The variable '$VENAME' does not end in 5."
fi
REQUIRE=requirements-$VENAME.txt
if [ -d $REQUIRE ]; then
echo "-- No ${REQUIRE} found. You should be in the /troggle/ folder. Copy it from your most recent installation."
exit 1
fi
echo ## Using $REQUIRE :
cat $REQUIRE
echo ##
$PYTHON --version
# NOTE that when using a later or earlier verison of python, you MUST also
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
# NOW set up link from expo user folder
# needed for WSL2
echo Creating links from Linux filesystem user
# These links only need making once, for many venv
cd ~
if [ ! -d $VENAME ]; then
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.12-venv installed and/or use a Ubuntu window)"
$PYTHON -m venv $VENAME
else
echo "## /$VENAME/ already exists ! Delete it first."
exit 1
fi
# Activate the virtual env and see what the default packages are
echo "### Activating $VENAME"
cd $VENAME
echo "-- now in: ${PWD}"
ls -tlarg
source bin/activate
echo $VIRTUAL_ENV
if [ -d ~/$VENAME/bin ]; then
echo "### Activating."
else
echo "## ~/$VENAME/bin does not exist. FAILed to create venv properly."
exit 1
fi
# update local version of pip, more recent than OS version
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
# update local version of setuptools, more recent than OS version, needed for packages without wheels
echo "### installing later version of pip inside $VENAME"
$PYTHON -m pip install --upgrade pip
$PYTHON -m pip install --upgrade setuptools
$PYTHON -m pip list > original-pip.list
$PYTHON -m pip freeze >original.txt
# we are in /home/$USER/$VENAME/
ln -s ${TROGDIR} troggle
ln -s ${TROGDIR}/../expoweb expoweb
ln -s ${TROGDIR}/../loser loser
ln -s ${TROGDIR}/../drawings drawings
#ln -s ${TROGDIR}/../expofiles expofiles
# fudge for philip's machine
if [ ! -d /mnt/d/EXPO ]; then
sudo mkdir /mnt/d
sudo mount -t drvfs D: /mnt/d
fi
if [ -d ${TROGDIR}/../expofiles ]; then
ln -s ${TROGDIR}/../expofiles expofiles
else
ln -s /mnt/d/EXPO/expofiles expofiles
fi
echo "### Setting file permissions.. may take a while.."
git config --global --add safe.directory '*'
sudo chmod -R 777 *
echo "### links to expoweb, troggle etc. complete:"
ls -tla
echo "###"
echo "### now installing ${TROGDIR}/${REQUIRE}"
echo "###"
cat ${TROGDIR}/${REQUIRE}
cp -f ${TROGDIR}/${REQUIRE} ${TROGDIR}/${REQUIRE}.orig
read -p "Press any key to resume ..."
$PYTHON -m pip install -r ${TROGDIR}/${REQUIRE}
echo "### install from ${TROGDIR}/${REQUIRE} completed."
echo '### '
# this installs pre-release django 5.0
if [[ "${VENAME:(-1)}" == 5 ]]; then
echo "### Installing pre-release version of Django"
$PYTHON -m pip install --pre django
fi
$PYTHON -m pip freeze > $REQUIRE.freeze
# so that we can track requirements more easily with git
# because we do not install these with pip, but they are listed by the freeze command
# Now find out what we actually installed by subtracting the stuff venv installed anyway
sort original.txt > 1
sort $REQUIRE.freeze >2
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-$REQUIRE
rm 1
rm 2
# cp $REQUIRE requirements-$VENAME.txt
cp $REQUIRE troggle/$REQUIRE
$PYTHON -m pip list > installed-pip.list
$PYTHON -m pip list -o > installed-pip-o.list
REQ=installation-record
mkdir $REQ
mv $REQUIRE.freeze $REQ
mv original.txt $REQ
mv $REQUIRE $REQ
mv original-pip.list $REQ
mv installed-pip.list $REQ
mv installed-pip-o.list $REQ
# cp fresh-$REQUIRE ../$REQUIRE
mv fresh-$REQUIRE $REQ
cp troggle/`basename "$0"` $REQ
$PYTHON --version
python --version
echo "Django version:`django-admin --version`"
echo "### Now do
'[sudo service mysql start]'
'[sudo service mariadb restart]'
'[sudo mysql_secure_installation]'
'cd ~/$VENAME'
'source bin/activate'
'cd troggle'
'django-admin'
'python manage.py check'
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
## you need to follow the Linux instructions.
'ssh expo@expo.survex.com'
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
## the tests may ALSO fail because of ssh and permissions errors
## So you will need to run
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
# because this chmod only takes effect then.
'python manage.py test -v 2'
'./pre-run.sh' (runs the tests again)
'python databaseReset.py reset $VENAME'
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
"
# if [ ! -d /mnt/d/expofiles ]; then
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
# fi

View File

@@ -1,53 +0,0 @@
#!/bin/bash
# now using uv, unbelieveably simpler.
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
# Expects an Ubuntu 24.04 with all the gubbins already installed
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
# use the script os-trog24.04.sh runniing it in /home/username/
python3 --version
cd ~/expo/troggle
echo "-- EXPO folder [current directory]: `pwd`"
TROGDIR=$(cd $(dirname $0) && pwd)
echo "-- Troggle folder: ${TROGDIR}"
cp dev.toml pyproject.toml
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
uv self update
uv sync
# fudge for philip's laptop prior to M2 SSD upgrade
if [ ! -d /mnt/d/EXPO ]; then
sudo mkdir /mnt/d
sudo mount -t drvfs D: /mnt/d
fi
uv pip list
echo "Django version:`uv run django-admin --version`"
echo "### Now do
'[sudo service mysql start]'
'[sudo service mariadb restart]'
'[sudo mysql_secure_installation]'
'cd ~/expo/troggle'
'uv run django-admin'
'uv run manage.py check'
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
## you need to follow the Linux instructions.
'ssh expo@expo.survex.com'
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
'uv run manage.py test -v 2'
'./pre-run.sh' (runs the tests again)
'uv run databaseReset.py reset INIT'
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
"
# if [ ! -d /mnt/d/expofiles ]; then
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
# fi

View File

@@ -1,181 +0,0 @@
import sys
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
#-----------------------------------------------------------------
# THINK before you push this to a repo
# - have you checked that credentials.py is in .gitignore ?
# - we don't want to have to change the expo system password !
#-----------------------------------------------------------------
# default values, real secrets imported from credentials.py
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever. Tests are then less accurate.
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
SERVERPORT = '8000' # not needed
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
# so we use pathlib which has been standard since python 3.4
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
# --------------------- MEDIA redirections BEGIN ---------------------
REPOS_ROOT_PATH = Path(__file__).parent.parent
LIBDIR = REPOS_ROOT_PATH / 'lib' / PV
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
MEDIA_ROOT = TROGGLE_PATH / 'media'
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
# EXPOFILES = REPOS_ROOT_PATH / "expofiles"
EXPOFILES = Path('/media/philip/sd-huge1/cucc-expo/expofiles/')
SCANS_ROOT = EXPOFILES / 'surveyscans'
PHOTOS_ROOT = EXPOFILES / 'photos'
PHOTOS_YEAR = "2023"
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
# PYTHON_PATH = os.fspath(PYTHON_PATH)
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
LOGFILE = PYTHON_PATH / "troggle.log"
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
MEDIA_URL = '/site-media/'
DIR_ROOT = Path("") #this should end in / if a value is given
URL_ROOT = '/'
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
# Note that these constants are not actually used in urls.py, they should be..
# and they all need to end with / so using 'Path' doesn't work..
MEDIA_URL = Path(URL_ROOT, "/site_media/")
PHOTOS_URL = Path(URL_ROOT, "/photos/")
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# --------------------- MEDIA redirections END ---------------------
PUBLIC_SITE = True
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
CACHEDPAGES = True # experimental page cache for a handful of page types
# executables:
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
DBSQLITE = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME' : 'troggle.sqlite',
'USER' : 'expo', # Not used with sqlite3.
'PASSWORD' : 'sekrit', # Not used with sqlite3.
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
}
}
DBMARIADB = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME' : 'troggle', # Or path to database file if using sqlite3.
'USER' : 'expo',
'PASSWORD' : 'my-secret-password-schwatzmooskogel',
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
}
}
# default database for me is squlite
DBSWITCH = "sqlite"
if DBSWITCH == "sqlite":
DATABASES = DBSQLITE
if DBSWITCH == "mariadb":
DATABASES = DBMARIADB
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
"DIRS": [TEMPLATE_PATH],
'OPTIONS': {
'debug': 'DEBUG',
'context_processors': [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
'core.context.troggle_context', # in core/troggle.py - only used in expedition.html
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media', # includes a variable MEDIA_URL
'django.template.context_processors.static', # includes a variable STATIC_URL used by admin pages
'django.template.context_processors.tz',
'django.template.context_processors.request', # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader', # default lcation is troggle/templates/
'django.template.loaders.app_directories.Loader', # needed for admin 'app'
],
},
},
]
EXPOUSER = "expo"
EXPOUSER_EMAIL = "philip.sargent@gmail.com"
EXPOADMINUSER = "expoadmin"
EXPOADMINUSER_EMAIL = "philip.sargent@gmail.com"
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = "django-test@klebos.net"
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
EXPOWEB_URL = ''
# SCANS_URL = '/survey_scans/' # defunct, removed.
sys.path.append(str(REPOS_ROOT_PATH))
sys.path.append(str(PYTHON_PATH))
# Sanitise these to be strings as all other code is expecting strings
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
#CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
#ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
STATIC_URL = str(STATIC_URL) + "/"
MEDIA_URL = str(MEDIA_URL) + "/"

View File

@@ -1,196 +0,0 @@
import sys
import os
import urllib.parse
from pathlib import Path
"""Settings for a troggle installation which may vary among different
installations: for development or deployment, in a docker image or
python virtual environment (venv), on ubuntu, debian or in Windows
System for Linux (WSL), on the main server or in the potato hut,
using SQLite or mariaDB.
It sets the directory locations for the major parts of the system so
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
a system-wide location rather than just a local directory.
This file is included at the end of the main troggle/settings.py file so that
it overwrites defaults in that file.
Read https://realpython.com/python-pathlib/
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
"""
print(" * importing troggle/localsettings.py")
#-----------------------------------------------------------------
# THINK before you push this to a repo
# - have you checked that credentials.py is in .gitignore ?
# - we don't want to have to change the expo system password !
#-----------------------------------------------------------------
# default values, real secrets imported from credentials.py
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever. Tests are then less accurate.
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
SERVERPORT = '8000' # not needed
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
# so we use pathlib which has been standard since python 3.4
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
# --------------------- MEDIA redirections BEGIN ---------------------
REPOS_ROOT_PATH = Path(__file__).parent.parent
LIBDIR = REPOS_ROOT_PATH / 'lib' / PV
#LIBDIR = REPOS_ROOT_PATH / 'lib' / 'python3.9' # should be finding this automatically: python --version etc.
TROGGLE_PATH = Path(__file__).parent
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
MEDIA_ROOT = TROGGLE_PATH / 'media'
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
#FILES = Path('/mnt/d/expofiles/')
EXPOFILES = Path('/media/philip/sd-huge1/cucc-expo/expofiles/')
SCANS_ROOT = EXPOFILES / 'surveyscans'
PHOTOS_ROOT = EXPOFILES / 'photos'
PHOTOS_YEAR = "2022"
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
MEDIA_URL = '/site-media/'
DIR_ROOT = ''#this should end in / if a value is given
URL_ROOT = '/'
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
#Note that these constants are not actually used in urls.py, they should be..
MEDIA_URL = urllib.parse.urljoin(URL_ROOT , '/site_media/')
SCANS_URL = urllib.parse.urljoin(URL_ROOT , '/survey_scans/')
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT , '/photos/')
SVX_URL = urllib.parse.urljoin(URL_ROOT , '/survex/')
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # used for CaveViewer JS utility
#STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
# --------------------- MEDIA redirections END ---------------------
PUBLIC_SITE = True
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
CACHEDPAGES = True # experimental page cache for a handful of page types
# executables:
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
DBSQLITE = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME' : 'troggle.sqlite',
# 'NAME' : ':memory:',
'USER' : 'expo', # Not used with sqlite3.
'PASSWORD' : 'sekrit', # Not used with sqlite3.
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
}
}
DBMARIADB = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME' : 'troggle', # Or path to database file if using sqlite3.
'USER' : 'expo',
'PASSWORD' : 'my-secret-password-schwatzmooskogel',
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
}
}
# default database for me is squlite
DBSWITCH = "sqlite"
if DBSWITCH == "sqlite":
DATABASES = DBSQLITE
if DBSWITCH == "mariadb":
DATABASES = DBMARIADB
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
PYTHON_PATH = REPOS_ROOT_PATH / 'troggle'
sys.path.append(os.fspath(REPOS_ROOT_PATH))
sys.path.append(os.fspath(PYTHON_PATH))
LOGFILE = PYTHON_PATH / 'troggle.log'
PYTHON_PATH = os.fspath(PYTHON_PATH)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
TEMPLATE_PATH
],
'OPTIONS': {
'debug': 'DEBUG',
'context_processors': [
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
'core.context.troggle_context', # in core/troggle.py - only used in expedition.html
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media', # includes a variable MEDIA_URL
'django.template.context_processors.static', # includes a variable STATIC_URL used by admin pages
'django.template.context_processors.tz',
'django.template.context_processors.request', # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader', # default lcation is troggle/templates/
'django.template.loaders.app_directories.Loader', # needed for admin 'app'
]
},
},
]
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
EXPOUSER = 'expo'
EXPOUSER_EMAIL = 'philip.sargent@gmail.com'
EXPOADMINUSER = 'expoadmin'
EXPOADMINUSER_EMAIL = 'philip.sargent@gmail.com'
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
EMAIL_PORT=587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = 'django-test@klebos.net'
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
#SURVEYS = REPOS_ROOT_PATH
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
EXPOWEB_URL = ''
# SCANS_URL = '/survey_scans/' # defunct, removed.
# Sanitise these to be strings as all other code is expecting strings
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
LOGFILE = os.fspath(LOGFILE)
#SURVEYS = os.fspath(SURVEYS)
EXPOWEB = os.fspath(EXPOWEB)
DRAWINGS_DATA = os.fspath(DRAWINGS_DATA)
SURVEX_DATA = os.fspath(SURVEX_DATA)
REPOS_ROOT_PATH = os.fspath(REPOS_ROOT_PATH)
TEMPLATE_PATH = os.fspath(TROGGLE_PATH)
MEDIA_ROOT = os.fspath(MEDIA_ROOT)
JSLIB_ROOT = os.fspath(JSLIB_ROOT)
SCANS_ROOT = os.fspath(SCANS_ROOT)

View File

@@ -1,112 +0,0 @@
#!/bin/bash
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
# 'Open Linux shell here'
echo 'Run this in a terminal in the troggle directory: "bash venv-trog.sh"'
cat /etc/os-release
# Expects an Ubuntu 22.04 relatively clean install.
sudo apt install python-is-python3 -y
python --version : ensure python is an alias for python3 not python2.7
sudo apt update -y
sudo apt dist-upgrade -y
sudo apt autoremove -y
sudo apt install sqlite3 -y
sudo apt install python3-pip -y
# this installs a shed-load of other stuff: binutils etc.sudo apt install survex-aven
sudo apt install git openssh-client -y
# On a clean debian 11 (bullseye) installation with Xfce & ssh,
#on ubuntu 20.04:
#Package sftp is not available, but is referred to by another package.
#This may mean that the package is missing, has been obsoleted, or
#is only available from another source
#E: Package 'sftp' has no installation candidate
# On Ubuntu 20.04, with python10, the pip install fails.
# So you need to get the pip from source
# sudo curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
# but really you should be using 22.04
# and also, isf using debian,
# sudo python3.10 -m pip install -U virtualenv
# do not actually use this any more
sudo useradd expo
sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
# as debian does not install everything that ubuntu does, you need:
sudo apt install python3-venv -y
sudo apt install python3-dev -y
# default since 22.04
# sudo apt install python3.10
sudo apt install python3.11-venv -y
sudo apt install python3.11-dev -y
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1
sudo apt install mariadb-server -y
sudo apt install libmariadb-dev -y
sudo python -m pip install --upgrade pip
sudo apt install sftp -y
echo '###'
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
echo '###'
sudo apt install tunnelx therion -y
sudo apt install gedit
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
# sudo service mysql start
echo "### python 3.12"
sudo add-apt-repository ppa:deadsnakes/ppa -y
sudo apt update
sudo apt install python3.12-full -y
sudo apt install python3.12-distutils -y
sudo apt install python3.12-venv -y
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.12 7
sudo update-alternatives --config python
sudo rm /usr/bin/python3
sudo ln -s /etc/alternatives/python /usr/bin/python3
sudo apt dist-upgrade
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
sudo apt install software-properties-common apt-transport-https wget gpg
wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
sudo apt update
sudo apt install code
#this next crashes, why?
#sudo python -m pip install --upgrade pip
sudo apt install gpsprune qgis gedit tig gitg meld rsync
git config --global user.email "you@example.com"
git config --global user.name "Your Name"
git config --global pull.rebase true
echo '###'
echo '### Currently set version of python'
python --version
echo '###'
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
echo '### because you can't clone the repos without a key
# cd ~/expo
git clone ssh://expo@expo.survex.com/home/expo/troggle
git clone ssh://expo@expo.survex.com/home/expo/loser
git clone ssh://expo@expo.survex.com/home/expo/expoweb
git clone ssh://expo@expo.survex.com/home/expo/drawings
mkdir expofiles
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" expo@expo.survex.com:expofiles/ expofiles
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.s^Cvex.com:expofiles/photos/ expofiles/photos

View File

@@ -1,46 +0,0 @@
#! /bin/sh
# create and sanitise files for pushing to repo, for Babie laptop
echo deprecations.
python -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
echo diffsettings.
rm diffsettings.txt
if test -f "diffsettings.txt"; then
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
exit
fi
python manage.py diffsettings | grep "###" > diffsettings.txt
echo pip freeze.
pip freeze > requirements.txt
echo inspectdb.
# this next line requires database setting to be troggle.sqlite:
python manage.py inspectdb > troggle-inspectdb.py
#egrep -in "unable|error" troggle-inspectdb.py
echo remove passwords.
cp localsettings.py localsettingsXubuntu.py
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettingsXubuntu.py
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettingsXubuntu.py
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettingsXubuntu.py
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettingsXubuntu.py
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
mv _deploy/xubuntu/localsettingsXubuntu.py _deploy/xubuntu/localsettingsXubuntu.py.bak
mv localsettingsXubuntu.py _deploy/xubuntu
#
# Do these before final testing, *not* just before pushing:
# in ./pre-run.sh
# python reset-django.py
# python manage.py makemigrations
# python manage.py test
# python manage.py inspectdb > troggle-inspectdb.py
# egrep -i "unable|error" troggle-inspectdb.py

View File

@@ -1,192 +0,0 @@
#!/bin/bash
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
# use the script os-trog.sh
# If you are using Debian, then stick with the default version of python
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
# NOW we set up troggle
PYTHON=python3.12
VENAME=p12d5 # python3.x and django 4.2
echo "** You are logged in as `id -u -n`"
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
TROGDIR=$(cd $(dirname $0) && pwd)
echo "-- Troggle folder (this script location): ${TROGDIR}"
REQUIRE=requirements-$VENAME.txt
if [ -d $REQUIRE ]; then
echo "-- No ${REQUIRE} found. You should be in the /troggle/ folder. Copy it from your most recent installation."
exit 1
fi
echo ## Using $REQUIRE :
cat $REQUIRE
echo ##
$PYTHON --version
# NOTE that when using a later or earlier verison of python, you MUST also
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
# NOW set up link from expo user folder
# needed for WSL2
echo Creating links from Linux filesystem user
# These links only need making once, for many venv
cd ~
if [ ! -d $VENAME ]; then
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
$PYTHON -m venv $VENAME
else
echo "## /$VENAME/ already exists ! Delete it first."
exit 1
fi
# Activate the virtual env and see what the default packages are
echo "### Activating $VENAME"
cd $VENAME
echo "-- now in: ${PWD}"
ls -tlarg
source bin/activate
echo $VIRTUAL_ENV
if [ -d ~/$VENAME/bin ]; then
echo "### Activating."
else
echo "## ~/$VENAME/bin does not exist. FAILed to create venv properly."
exit 1
fi
# update local version of pip, more recent than OS version
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
# update local version of setuptools, more recent than OS version, needed for packages without wheels
echo "### installing later version of pip inside $VENAME"
$PYTHON -m pip install --upgrade pip
$PYTHON -m pip install --upgrade setuptools
PIP=pip
$PIP list > original-pip.list
$PIP freeze >original.txt
# we are in /home/$USER/$VENAME/
ln -s ${TROGDIR} troggle
ln -s ${TROGDIR}/../expoweb expoweb
ln -s ${TROGDIR}/../loser loser
ln -s ${TROGDIR}/../drawings drawings
#ln -s ${TROGDIR}/../expofiles expofiles
# fudge for philip's machine
if [ ! -d /mnt/d/EXPO ]; then
sudo mkdir /mnt/d
sudo mount -t drvfs D: /mnt/d
fi
if [ -d ${TROGDIR}/../expofiles ]; then
ln -s ${TROGDIR}/../expofiles expofiles
else
ln -s /mnt/d/EXPO/expofiles expofiles
fi
echo "### Setting file permissions.. may take a while.."
git config --global --add safe.directory '*'
sudo chmod -R 777 *
echo "### links to expoweb, troggle etc. complete:"
ls -tla
echo "###"
echo "### now installing ${TROGDIR}/${REQUIRE}"
echo "###"
cat ${TROGDIR}/${REQUIRE}
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
# seen on wsl2 as well as wsl1
# which ALSO ruins EXISTING permissions !
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
read -p "Press any key to resume ..."
$PIP install -r ${TROGDIR}/${REQUIRE}
echo "### install from ${TROGDIR}/${REQUIRE} completed."
echo '### '
# this installs pre-release django 5.0
$PIP install --pre django
$PIP freeze > $REQUIRE
# so that we can track requirements more easily with git
# because we do not install these with pip, but they are listed by the freeze command
# Now find out what we actually installed by subtracting the stuff venv installed anyway
sort original.txt > 1
sort $REQUIRE >2
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-$REQUIRE
rm 1
rm 2
# cp $REQUIRE requirements-$VENAME.txt
cp $REQUIRE troggle/$REQUIRE
$PIP list > installed-pip.list
$PIP list -o > installed-pip-o.list
REQ=installation-record
mkdir $REQ
mv original.txt $REQ
mv $REQUIRE $REQ
mv original-pip.list $REQ
mv installed-pip.list $REQ
mv installed-pip-o.list $REQ
cp fresh-$REQUIRE ../$REQUIRE
mv fresh-$REQUIRE $REQ
cp troggle/`basename "$0"` $REQ
# install VS code
sudo apt install software-properties-common apt-transport-https wget gpg
wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
sudo apt update
sudo apt install code
$PYTHON --version
python --version
echo "Django version:`django-admin --version`"
echo "### Now do
'[sudo service mysql start]'
'[sudo service mariadb restart]'
'[sudo mysql_secure_installation]'
'cd ~/$VENAME'
'source bin/activate'
'cd troggle'
'django-admin'
'python manage.py check'
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
## you need to follow the Linux instructions.
'ssh expo@expo.survex.com'
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
## the tests may ALSO fail because of ssh and permissions errors
## So you will need to run
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
# because this chmod only takes effect then.
'python manage.py test -v 2'
'./pre-run.sh' (runs the tests again)
'python databaseReset.py reset $VENAME'
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
"
# if [ ! -d /mnt/d/expofiles ]; then
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
# fi

View File

@@ -1,63 +0,0 @@
<!-- expobase.html - this text visible because this template has been included -->
<html lang="en">
<head>
<script>document.interestCohort = null;</script> <!-- Turn off Google FLoC -->
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Directory not found photoupload/</title>
<link rel="stylesheet" type="text/css" href="/css/main2.css" />
</head>
<body >
<h1>Directory not found 'photoupload/'</h1>
<h3>Click here: <a href="/photoupload">/photoupload</a> </h3>
<p>i.e. without the final '/'
<!-- the year now -->
<div id="menu">
<ul id="menulinks">
<li><a href="/index.htm">Home</a></li>
<li><a href="/handbook/index.htm">Handbook</a>
</li>
<li><a href="/handbook/computing/onlinesystems.html">Online systems</a></li>
<li><a href="/handbook/logbooks.html#form">Make Logbook Entry</a></li>
<li><a href="/caves">Caves</a>
</li>
<li><a href="/infodx.htm">Site index</a></li>
<li><a href="/pubs.htm">Reports</a></li>
<li><a href="https://expo.survex.com/kanboard/board/2">Kanboard</a></li>
<li><a href="/handbook/troggle/training/trogbegin.html">Troggle</a></li>
<li><form name=P method=get
action="https://expo.survex.com/search"
target="_top">
<input id="omega-autofocus" type=search name=P size=8 autofocus>
<input type=submit value="Search"></form></li>
<li> <b style="color:red">RUNNING ON LOCALSERVER</b> <br>slug:<br>newslug:<br>url:
</ul>
</div>
</body>
</html>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,162 +0,0 @@
"""Simple test factories used to replace fixtures in tests.
These avoid adding external dependencies like factory_boy and provide small
helpers to create models with predictable defaults and optional PKs so tests
that relied on fixture PKs continue to work.
"""
from troggle.core.models.troggle import Expedition, Person, PersonExpedition
from troggle.core.models.caves import Cave
from django.contrib.auth.models import User
def create_user(username, first_name=None, last_name="Caver", is_superuser=False):
u = User()
u.username = username
u.email = f"{username}@example.test"
u.first_name = first_name or username
u.last_name = last_name
u.set_password("secretword")
u.is_superuser = is_superuser
u.save()
return u
def create_expedition(pk=None, year="2019", name="CUCC expo 2019"):
e = Expedition()
if pk is not None:
e.pk = pk
e.year = year
e.name = name
e.save()
return e
def create_person(pk=None, first_name="Michael", last_name="Sargent", fullname="Michael Sargent", slug="michael-sargent", blurb=None):
ms_blurb = """\n\n\n\n\n\n<p><img class=\"onleft\" src=\"/folk/i/mikey0.jpg\">\n
<img class=\"onright\" src=\"/folk/i/mikey1.jpg\" height=\"400\"\nalt=\"\" />\n
<b>Michael Sargent</b> CUCC<br />\nExpeditions 2014, 15, 16, 17, 18, 19.\n
<p>The first second-generation expo caver in 2014, later members of this exclusive group
were Dan Lenartowicz and Sarah Connolly.\n\n\n
<img class=\"onleft\" src=\"/folk/i/michaelsargent.jpg\">\n<im\n\n
<hr style=\"clear: both\" /><p class=\"caption\">Pre-expo (pre-student)
photos from President's Invite (OUCC) \nand first abseiling instruction (Cambridge).</p>\n
"""
p = Person()
if pk is not None:
p.pk = pk
p.first_name = first_name
p.last_name = last_name
p.fullname = fullname
p.slug = slug
# provide a small default blurb consistent with fixtures for pages
p.blurb = blurb if blurb is not None else ms_blurb
p.save()
return p
def create_personexpedition(pk=None, expedition=None, person=None):
pe = PersonExpedition()
if pk is not None:
pe.pk = pk
pe.expedition = expedition
pe.person = person
pe.save()
return pe
def create_cave(
pk=None,
areacode="1623",
kataster_number="115",
filename="1623-115.html",
url="1623/115.url",
description_file="1623/115.htm",
underground_description="",
notes="",
official_name="",
non_public=False,
kataster_code="",
unofficial_number="",
explorers="",
equipment="",
references="",
survey="",
length="",
depth="",
extent="",
survex_file="",
):
c = Cave()
if pk is not None:
c.pk = pk
c.areacode = areacode
c.non_public = non_public
c.kataster_code = kataster_code
c.kataster_number = kataster_number
c.unofficial_number = unofficial_number
c.explorers = explorers
# If an explicit official_name was provided use it; otherwise
# leave it unset
if official_name:
c.official_name = official_name
c.filename = filename
c.url = url
c.description_file = description_file
c.underground_description = underground_description
c.notes = notes
c.equipment = equipment
c.references = references
c.survey = survey
c.length = length
c.depth = depth
c.extent = extent
c.survex_file = survex_file
c.save()
return c
def create_expo_caves():
"""Create the two cave fixtures used historically by the test-suite (115 and 284).
This mirrors the content of `core/fixtures/expo_caves.json` so tests that
relied on those fixture rows can use this factory instead.
"""
# Cave 115 (Schnellzugh&ouml;hle) - includes an underground_description fragment
und_desc_115 = (
"This is the main entrance through which the majority of the "
"<a href=\"41.htm\">Stellerwegh&ouml;hle</a> system was explored. See the separate "
"<a href=\"41/115.htm#ent115\">full guidebook description</a> for details, just an overview is given here.</p>"
"<p>The entrance leads to a non-obvious way on to the head of the short <b>Bell Pitch</b>, from where very awkward going leads out to a bigger passage to reach <b>The Ramp</b> a series of off-vertical pitches. The damper but technically easier <b>Inlet Pitches</b> drop to a Big Chamber, from where <b>Pete's Purgatory</b> starts, and leads in 800m of tortuous going to <b>The Confluence</b> and the larger streamway leading to the deepest point.</p>"
)
create_cave(
pk=43,
areacode="1623",
kataster_number="115",
filename="1623-115.html",
url="1623/115.url",
description_file="1623/115.htm",
underground_description=und_desc_115,
official_name="Schnellzugh&ouml;hle",
notes=(
"The Austrian Kataster has adopted a very perverse way of numbering things. "
"Their numbers are as follows: 115a Stellerwegh&ouml;hle entrance 41a etc."
),
)
# Cave 284 (Seetrichter)
create_cave(
pk=350,
areacode="1623",
kataster_number="284",
filename="1623-284.html",
url="1623/284/284.html",
description_file="",
official_name="Seetrichter (Lake bottom)",
notes=(
"A 25m long (22m deep) resurgence in Altausee. At the bottom, at a depth of 72m, "
"there are large round blocks."
),
)
return Cave.objects.filter(pk__in=[43, 350])

View File

@@ -1,284 +0,0 @@
"""
Modified for Expo April 2021.
"""
import re
from http import HTTPStatus
from django.contrib.auth.models import User
from django.test import Client, TestCase
import settings
from troggle.core.models.caves import Cave
from troggle.core.models.troggle import Expedition, Person, PersonExpedition
from troggle.core.utils import current_expo
current_year = current_expo()
def create_user(name=None, last_name="Caver", is_superuser=False):
u = User()
u.username = name
u.email = f"philip.sargent+{name}@gmail.com"
u.first_name, u.last_name = name, last_name
u.set_password("secretword") # all test users have same password
u.save()
return u
def create_cave(areacode="1623", kataster_number="000", official_name=""):
c = Cave(areacode=areacode, kataster_number=kataster_number, official_name=official_name)
c.save()
return c
# import troggle.settings as settings
# FIXTURE_DIRS = settings.PYTHON_PATH / "core" /"fixtures"
class FixtureTests(TestCase):
"""These just hit the database.
They do not exercise the GET and url functions
New: uses factories instead of fixtures so tests are self-contained.
"""
ph = r"and leads in 800m of tortuous going to"
@classmethod
def setUpTestData(cls):
# replicate the minimal data formerly provided by fixtures
from .factories import create_expedition, create_person, create_personexpedition, create_cave
exp = create_expedition(pk=44, year="2019", name="CUCC expo 2019")
person = create_person(pk=250, first_name="Michael", last_name="Sargent", fullname="Michael Sargent", slug="michael-sargent")
create_personexpedition(pk=681, expedition=exp, person=person)
# two notable caves used by tests
create_cave(pk=43, areacode="1623", kataster_number="115", filename="1623-115.html", url="1623/115.url", description_file="1623/115.htm", underground_description="This is the main entrance ... The entrance leads to a ... and leads in 800m of tortuous going to The Confluence")
create_cave(pk=350, areacode="1623", kataster_number="284", filename="1623-284.html", url="1623/284/284.html", description_file="", official_name="Seetrichter (Lake bottom)", notes="A 25m long (22m deep) resurgence in Altausee. At the bottom, at a depth of 72m, there are large round blocks.")
def setUp(self):
create_user(name="expo") # needed for current_year()
def tearDown(self):
User.objects.all().delete()
def test_fix_person_loaded_byname(self):
p = Person.objects.get(fullname="Michael Sargent")
self.assertEqual(str(p.first_name), "Michael")
def test_fix_personexped_loaded_bypk(self):
pe = PersonExpedition.objects.get(pk="681")
self.assertEqual(str(pe.person.fullname), "Michael Sargent")
self.assertEqual(str(pe.expedition.year), "2019")
def test_fix_expedition_loaded(self):
e = Expedition.objects.get(pk="44")
self.assertEqual(str(e.year), "2019")
def test_page_person(self):
response = self.client.get("/person/michael-sargent")
content = response.content.decode()
# with open('testresponseperson.html','w') as tr:
# tr.writelines(content)
self.assertEqual(response.status_code, HTTPStatus.OK)
for ph in [r"Michael Sargent", r"has been on expo in the following years"]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_personexpedition(self):
# Not working despite all components present and correct
response = self.client.get("/personexpedition/michael-sargent/2019")
content = response.content.decode()
# with open('testresponse.html','w') as tr:
# tr.writelines(content)
self.assertEqual(response.status_code, HTTPStatus.OK)
for ph in [r"Michael Sargent", r"Table of all trips and surveys aligned by date"]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
# Need to add a fixture so that this actually has a logbook entry and a trip/svx in it.
def test_fix_cave_loaded115(self):
c = Cave.objects.get(kataster_number="115")
self.assertEqual(str(c.description_file), "1623/115.htm")
self.assertEqual(str(c.url), "1623/115.url") # intentional
self.assertEqual(str(c.filename), "1623-115.html")
self.assertEqual(str(c.areacode), "1623")
ph = self.ph
phmatch = re.search(ph, c.underground_description)
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
def test_fix_cave_loaded284(self):
c = Cave.objects.get(kataster_number="284")
self.assertEqual(str(c.description_file), "")
self.assertEqual(str(c.url), "1623/284/284.html")
self.assertEqual(str(c.filename), "1623-284.html")
ph = r"at a depth of 72m, there are large round blocks"
phmatch = re.search(ph, c.notes)
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
class FixturePageTests(TestCase):
"""The fixtures have a password hash which is compatible with plain-text password 'secretword'
The hash CHANGES whenever Django upgrades the encryption key length. Better to create the test uses
algorithmically and not via a fixture.
Uses factories to create the small amount of data required for these page tests.
"""
ph = r"and leads in 800m of tortuous going to"
@classmethod
def setUpTestData(cls):
# ensure cave stubs exist for the page tests (some tests create more caves in setUp)
from .factories import create_cave
create_cave(pk=43, areacode="1623", kataster_number="115", filename="1623-115.html", url="1623/115.url", description_file="1623/115.htm", underground_description="... leads in 800m of tortuous going to ...")
create_cave(pk=350, areacode="1623", kataster_number="284", filename="1623-284.html", url="1623/284/284.html", description_file="", notes="At the bottom, at a depth of 72m, there are large round blocks.")
# also create expedition/person data used by page rendering
from .factories import create_expedition, create_person, create_personexpedition
exp = create_expedition(pk=44, year="2019", name="CUCC expo 2019")
person = create_person(pk=250, first_name="Michael", last_name="Sargent", fullname="Michael Sargent", slug="michael-sargent")
create_personexpedition(pk=681, expedition=exp, person=person)
def setUp(self):
for kataster_number in settings.NOTABLECAVES1623:
create_cave(areacode="1623", kataster_number=kataster_number)
for kataster_number in settings.NOTABLECAVES1626:
create_cave(areacode="1626", kataster_number=kataster_number)
create_user(name="expo")
create_user(name="expotest")
create_user(name="expotestadmin", is_superuser = True)
self.user = User.objects.get(username="expotest")
# Every test needs a client.
self.client = Client()
def tearDown(self):
User.objects.all().delete()
Cave.objects.all().delete()
def test_fix_expedition(self):
response = self.client.get("/expedition/2019")
self.assertEqual(response.status_code, HTTPStatus.OK)
ph = r"Michael Sargent"
content = response.content.decode()
phmatch = re.search(ph, content)
# with open('exped-op.html', 'w') as f:
# f.write(content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_fix_personexped(self):
response = self.client.get("/personexpedition/michael-sargent/2019")
self.assertEqual(response.status_code, HTTPStatus.OK)
ph = r"Table of all trips and surveys aligned by date"
content = response.content.decode()
phmatch = re.search(ph, content)
# with open('persexped-op.html', 'w') as f:
# f.write(content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_fix_person(self):
response = self.client.get("/person/michael-sargent")
self.assertEqual(response.status_code, HTTPStatus.OK)
ph = r"second-generation expo caver "
content = response.content.decode()
phmatch = re.search(ph, content)
# with open('person-op.html', 'w') as f:
# f.write(content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_fix_cave_url115(self):
ph = "leads in 800m of tortuous going to"
response = self.client.get("/1623/115.url") # yes this is intentional, see the inserted data above & fixture
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_fix_cave_url284(self):
response = self.client.get("/1623/284/284.html")
self.assertEqual(response.status_code, HTTPStatus.OK)
ph = r"at a depth of 72m, there are large round blocks"
content = response.content.decode()
phmatch = re.search(ph, content)
# with open('cave-url284.html', 'w') as f:
# f.write(content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_fix_cave_bare_url115(self):
"""Expect to get Page Not Found and status 404"""
ph = self.ph
ph = "Probably a mistake."
response = self.client.get("/1623/115/115")
# content = response.content.decode()
# with open('_test_bare_url115.html', 'w') as f:
# f.write(content)
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
content = response.content.decode()
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 200 & Page Not Found
def test_fix_cave_slug115(self):
"""Expect to get Page Not Found and status 404
UPDATE THIS BACK to 1623-115 when the data is fixed so that we don't have the
internal redirections for cave ids"""
ph = self.ph
ph = "Probably a mistake."
# response = self.client.get("/1623-115")
response = self.client.get("/1234-123")
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
content = response.content.decode()
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 302 & Page Not Found
def test_fix_caves284(self):
response = self.client.get("/caves")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"Seetrichter"
ph_alt = r"1623-284"
phmatch = re.search(ph, content) or re.search(ph_alt, content)
with open('_cave_caves284.html', 'w') as f:
f.write(content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "' or '" + ph_alt + "'")
# Although the Cave object exists, it looks like we get a bad slug error when trying to get a QM page.
# def test_fix_qms(self):
# response = self.client.get("/cave/qms/1623-284")
# self.assertEqual(response.status_code, HTTPStatus.OK)
# content = response.content.decode()
# ph = r"Question marks for 284 - Seetrichter"
# phmatch = re.search(ph, content)
# with open('_cave-fixqms.html', 'w') as f:
# f.write(content)
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
# def test_fix_openqms(self):
# response = self.client.get("/cave/openqms/1623-284")
# self.assertEqual(response.status_code, HTTPStatus.OK)
# content = response.content.decode()
# ph = r"Open Leads for 284 - Seetrichter"
# phmatch = re.search(ph, content)
# with open('_cave-fixopenqms.html', 'w') as f:
# f.write(content)
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")

View File

@@ -1,186 +0,0 @@
import os
import pathlib
import tempfile
from django.test import TestCase
import settings
from troggle.parsers import drawings
from troggle.core.models.survex import DrawingFile
from troggle.core.models.wallets import Wallet
from troggle.core.models.survex import SingleScan
from troggle.core.models.troggle import DataIssue
class DrawingsPathlibTests(TestCase):
def test_load_drawings_creates_expected_entries(self):
with tempfile.TemporaryDirectory() as td:
# create a small tree
p = pathlib.Path(td)
(p / 'one.pdf').write_text('pdf')
(p / 'two.txt').write_text('txt')
sub = p / 'dir'
sub.mkdir()
(sub / 'three.png').write_text('png')
sub2 = p / 'dir2'
sub2.mkdir()
(sub2 / 'abc.th2').write_text('th2')
(sub2 / 'abc.th').write_text('th')
# point the module at our tempdir
settings.DRAWINGS_DATA = td
drawings.load_drawings_files()
# all files should be present
self.assertTrue(DrawingFile.objects.filter(dwgpath='one.pdf').exists())
self.assertTrue(DrawingFile.objects.filter(dwgpath='two.txt').exists())
self.assertTrue(DrawingFile.objects.filter(dwgpath='dir/three.png').exists())
self.assertTrue(DrawingFile.objects.filter(dwgpath='dir2/abc.th2').exists())
self.assertTrue(DrawingFile.objects.filter(dwgpath='dir2/abc.th').exists())
def test_hidden_and_backup_skipped(self):
with tempfile.TemporaryDirectory() as td:
p = pathlib.Path(td)
(p / '.hidden').write_text('hid')
(p / 'file~').write_text('bak')
settings.DRAWINGS_DATA = td
drawings.load_drawings_files()
# Should not import hidden or backup files
self.assertFalse(DrawingFile.objects.filter(dwgpath='.hidden').exists())
self.assertFalse(DrawingFile.objects.filter(dwgpath='file~').exists())
def test_no_extension_file(self):
with tempfile.TemporaryDirectory() as td:
p = pathlib.Path(td)
(p / 'noext').write_text('data')
settings.DRAWINGS_DATA = td
drawings.load_drawings_files()
self.assertTrue(DrawingFile.objects.filter(dwgpath='noext').exists())
def test_git_dir_skipped(self):
with tempfile.TemporaryDirectory() as td:
p = pathlib.Path(td)
g = p / '.git'
g.mkdir()
(g / 'secret.txt').write_text('top secret')
settings.DRAWINGS_DATA = td
drawings.load_drawings_files()
self.assertFalse(DrawingFile.objects.filter(dwgpath='.git/secret.txt').exists())
def test_bulk_create_chunks(self):
# Create more than chunk size files to ensure bulk_create is called in multiple chunks
count = 800
with tempfile.TemporaryDirectory() as td:
p = pathlib.Path(td)
for i in range(count):
(p / f'file{i}.txt').write_text('x')
settings.DRAWINGS_DATA = td
drawings.load_drawings_files()
self.assertEqual(DrawingFile.objects.count(), count)
def test_parse_tunnel_links_wallet_and_scan(self):
# Create a wallet and a singlescan, then ensure parse_tnl_file links them
w = Wallet.objects.create(fpath='x', walletname='2025#20')
ss = SingleScan.objects.create(ffile='x', name='notes.jpg', wallet=w)
df = DrawingFile.objects.create(dwgpath='tst.th', dwgname='tst')
drawings.parse_tnl_file(df, '2025#20/notes.jpg')
self.assertIn(w, df.dwgwallets.all())
self.assertIn(ss, df.scans.all())
def test_drawing_reference_multiple_creates_dataissue(self):
df1 = DrawingFile.objects.create(dwgpath='ref1', dwgname='shared')
df2 = DrawingFile.objects.create(dwgpath='ref2', dwgname='shared')
dfmain = DrawingFile.objects.create(dwgpath='main', dwgname='main')
drawings.parse_tnl_file(dfmain, 'shared')
di = DataIssue.objects.filter(parser='Tunnel', message__contains="files named 'shared'")
self.assertTrue(di.exists())
def test_drawing_reference_single_no_dataissue(self):
DrawingFile.objects.create(dwgpath='ref3', dwgname='unique')
dfmain = DrawingFile.objects.create(dwgpath='main2', dwgname='main2')
drawings.parse_tnl_file(dfmain, 'unique')
di = DataIssue.objects.filter(parser='Tunnel', message__contains="files named 'unique'")
self.assertFalse(di.exists())
def test_extension_helpers_and_constants(self):
# Helpers should recognise supported/image suffixes (case-insensitive)
self.assertTrue(drawings._is_supported_suffix('.png'))
self.assertTrue(drawings._is_supported_suffix('.xml'))
self.assertTrue(drawings._is_supported_suffix('.TH'))
self.assertFalse(drawings._is_supported_suffix(''))
self.assertFalse(drawings._is_supported_suffix('.exe'))
self.assertTrue(drawings._is_image_suffix('.png'))
self.assertTrue(drawings._is_image_suffix('.JPEG'))
self.assertFalse(drawings._is_image_suffix('.xml'))
self.assertFalse(drawings._is_image_suffix(''))
# Constants should include expected values and be consistent
self.assertIn('.png', drawings.IMAGE_EXTS)
self.assertEqual(set(drawings.IMAGE_LIKE_EXTS), set(drawings.IMAGE_EXTS))
self.assertIn('.th', drawings.SUPPORTED_EXTENSIONS)
self.assertIn('.png', drawings.SUPPORTED_EXTENSIONS)
def test_fetch_drawingfiles_by_paths_chunks(self):
# Create more items than typical SQLite parameter limit to ensure chunking
count = 1200
rel_paths = []
objs = []
for i in range(count):
rel = f'bigdir/file{i}.txt'
rel_paths.append(rel)
objs.append(DrawingFile(dwgpath=rel, dwgname=f'name{i}'))
# Bulk create them efficiently
DrawingFile.objects.bulk_create(objs)
mapping = drawings.fetch_drawingfiles_by_paths(rel_paths, chunk_size=500)
self.assertEqual(len(mapping), count)
# Spot-check a few entries
self.assertIn('bigdir/file0.txt', mapping)
self.assertIn(f'bigdir/file{count-1}.txt', mapping)
def test_assign_wallets_for_model_assigns_and_returns_wallets(self):
w = Wallet.objects.create(fpath='x', walletname='2025#20')
df = DrawingFile.objects.create(dwgpath='assign.th', dwgname='assign')
res = drawings._assign_wallets_for_model(df, '2025#20', parser_label='AssignTest')
self.assertTrue(res)
self.assertIn(w, df.dwgwallets.all())
def test_assign_wallets_for_model_creates_dataissue_on_missing(self):
df = DrawingFile.objects.create(dwgpath='missing.th', dwgname='missing')
drawings._assign_wallets_for_model(df, 'NONEXISTENT', parser_label='AssignMissing')
di = DataIssue.objects.filter(parser='AssignMissing', message__contains='not found')
self.assertTrue(di.exists())
def test_assign_wallets_for_model_records_dataissue_on_exception(self):
# Patch Wallet.objects.filter to raise an exception
from unittest.mock import patch
df = DrawingFile.objects.create(dwgpath='err.th', dwgname='err')
with patch('troggle.core.models.wallets.Wallet.objects.filter') as mock_filter:
mock_filter.side_effect = RuntimeError('boom')
drawings._assign_wallets_for_model(df, 'WHATEVER', parser_label='AssignError')
di = DataIssue.objects.filter(parser='AssignError', message__contains='Exception')
self.assertTrue(di.exists())

View File

@@ -1,290 +0,0 @@
"""
We are using unittest for troggle.
Note that the database has not been parsed from the source files when these tests are run,
so any path that relies on data being in the database will fail.
The simple redirections to files which exist, e.g. in
/expoweb/
/expofiles/
/expofiles/documents/
etc. using parameters in localsettings such as PHOTOS_ROOT will test fine.
But paths like this:
/survey_scans/
/caves/
which rely on database resolution will fail unless a fixture has been set up for
them.
https://docs.djangoproject.com/en/dev/topics/testing/tools/
"""
import re
import subprocess
import unittest
from django.test import Client, SimpleTestCase, TestCase
class SimpleTest(SimpleTestCase):
def test_test_setting(self):
from django.conf import settings
self.assertEqual(settings.EMAIL_BACKEND, "django.core.mail.backends.locmem.EmailBackend")
import troggle.settings as settings
def test_import_TroggleModel(self):
from troggle.core.models.troggle import TroggleModel
def test_import_Cave(self):
from troggle.core.models.caves import Cave
def test_import_parsers_surveys(self):
# from PIL import Image
from functools import reduce
def test_import_parsers_survex(self):
import troggle.core.models.caves as models_caves
import troggle.core.models.survex as models_survex
import troggle.core.models.troggle as models
import troggle.settings as settings
from troggle.core.views import caves, drawings, other, scans, statistics, survex, uploads
from troggle.core.views.caves import cavepage, ent
from troggle.core.views.other import frontpage
from troggle.parsers.people import GetPersonExpeditionNameLookup
def test_import_views_uploads(self):
from troggle.core.views.logbook_edit import logbookedit
from troggle.core.views.uploads import dwgupload
def test_import_views_walletedit(self):
from troggle.core.views.wallets_edit import walletedit
def test_import_parsers_QMs(self):
from troggle.core.models.logbooks import QM
def test_import_parsers_people(self):
from html import unescape
from unidecode import unidecode
def test_import_parsers_logbooks(self):
from django.template.defaultfilters import slugify
from django.utils.timezone import get_current_timezone, make_aware
from parsers.people import GetPersonExpeditionNameLookup
from troggle.core.models.logbooks import QM, LogbookEntry, PersonLogEntry
from troggle.core.models.troggle import DataIssue, Expedition
def test_import_core_views_caves(self):
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
import troggle.core.views.expo
from troggle.core.forms import CaveForm, EntranceForm, EntranceLetterForm
from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance, SurvexStation #EntranceSlug,
from troggle.core.models.troggle import Expedition
from troggle.core.views.auth import login_required_if_public
def test_import_parsers_mix(self):
import troggle.parsers.caves
import troggle.parsers.drawings
import troggle.parsers.logbooks
import troggle.parsers.people
import troggle.parsers.QMs
import troggle.parsers.scans
import troggle.parsers.survex
import troggle.parsers.users
import troggle.settings
from troggle.parsers.logbooks import GetCaveLookup
def test_import_imports(self):
from django.contrib.auth.models import User
from django.core import management
from django.db import close_old_connections, connection, connections
from django.http import HttpResponse
from django.urls import reverse
def test_import_users_urls(self):
import base64
import json
import os
from cryptography.fernet import Fernet
from pathlib import Path
from django.contrib.auth.models import User
def test_import_urls(self):
from django.conf import settings
#from django.conf.urls import include, url
from django.contrib import admin, auth
from django.urls import resolve, reverse
from django.views.generic.base import RedirectView
from django.views.generic.edit import UpdateView
from django.views.generic.list import ListView
from troggle.core.views import caves, other, statistics, survex
from troggle.core.views.auth import expologin, expologout
from troggle.core.views.caves import cavepage, ent
from troggle.core.views.expo import (
editexpopage,
expofiles_redirect,
expofilessingle,
expopage,
map,
mapfile,
mediapage,
)
from troggle.core.views.logbooks import (
Expeditions_jsonListView,
Expeditions_tsvListView,
expedition,
get_logbook_entries,
get_people,
logbookentry,
notablepersons,
person,
personexpedition,
)
from troggle.core.views.other import controlpanel
from troggle.core.views.prospect import prospecting, prospecting_image
from troggle.core.views.statistics import dataissues, pathsreport, stats
from troggle.core.views.survex import survexcavesingle, survexcaveslist, svx
class ImportTest(TestCase):
@classmethod
def setUpTestData(cls):
import troggle.settings as settings
from troggle.parsers.logbooks import DEFAULT_LOGBOOK_FILE, LOGBOOKS_DIR
LOGBOOKS_PATH = settings.EXPOWEB / LOGBOOKS_DIR
test_year = "1986"
cls.test_logbook = LOGBOOKS_PATH / test_year / DEFAULT_LOGBOOK_FILE
def setUp(self):
pass
def tearDown(self):
pass
def test_logbook_exists(self):
self.assertTrue(self.test_logbook.is_file())
class SubprocessTest(TestCase):
@classmethod
def setUpTestData(cls):
pass
def setUp(self):
pass
def tearDown(self):
pass
def test_utf8(self):
"""Expects that utf8 is the default encoding when opening files"""
import locale
import sys
self.assertTrue(
sys.getdefaultencoding() == "utf-8", f"{sys.getdefaultencoding()} - UTF8 error in getdefaultencoding"
)
self.assertTrue(
sys.getfilesystemencoding() == "utf-8",
f"{sys.getfilesystemencoding()} - UTF8 error in getfilesystemencoding",
)
self.assertTrue(
locale.getdefaultlocale()[1] == "UTF-8",
f"{locale.getdefaultlocale()} - UTF8 error in locale.getdefaultlocale",
)
self.assertTrue(
locale.getpreferredencoding() == "UTF-8",
f"{locale.getpreferredencoding()} - UTF8 error in locale.getpreferredencoding",
)
def test_installs(self):
"""Expects external software installed: cavern, survexport, git
(but not whether it actually works)
"""
import troggle.settings as settings
for i in [settings.CAVERN, settings.SURVEXPORT, settings.GIT]:
# Define command as string and then split() into list format
cmd = f"which {i}".split()
try:
sp = subprocess.check_call(cmd, shell=False)
except subprocess.CalledProcessError:
self.assertTrue(False, f"no {i} installed")
def test_repos_git_status(self):
"""Expects clean git repos with no added files and no merge failures"""
from pathlib import Path
import troggle.settings as settings
TROGGLE_PATH = Path(settings.REPOS_ROOT_PATH) / "troggle"
for cwd in [settings.SURVEX_DATA, settings.EXPOWEB, settings.DRAWINGS_DATA, TROGGLE_PATH]:
sp = subprocess.run([settings.GIT, "status"], cwd=cwd, capture_output=True, text=True)
out = str(sp.stdout)
if len(out) > 160:
out = out[:75] + "\n <Long output curtailed>\n" + out[-75:]
print(f"git output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}")
if sp.returncode != 0:
print(f"git output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}")
self.assertTrue(sp.returncode == 0, f"{cwd} - git is unhappy")
content = sp.stdout
phs = [r"Your branch is up to date", r"nothing to commit, working tree clean", r"Your branch is ahead"]
ok = None
for ph in phs:
if phmatch := re.search(ph, content): # WALRUS
ok = True
msg = f'{cwd} - Failed to find any nice git output: "{phs}"'
self.assertIsNotNone(ok, msg)
# ph1 = r"no changes added to commit"
# phmatch1 = re.search(ph1, content)
# ph2 = r"nothing to commit"
# phmatch2 = re.search(ph2, content)
# phmatch = phmatch1 or phmatch2
# msg = f'{cwd} - Failed to find expected git output: "{ph1}" or "{ph2}"'
# self.assertIsNotNone(phmatch, msg)
def test_loser_survex_status(self):
"""Expects no failures of survex files"""
from pathlib import Path
import troggle.settings as settings
cwd = settings.SURVEX_DATA
for survey in ["1623-and-1626-no-schoenberg-hs.svx"]:
sp = subprocess.run([settings.CAVERN, survey], cwd=cwd, capture_output=True, text=True)
out = str(sp.stdout)
if len(out) > 160:
out = out[:75] + "\n <Long output curtailed>\n" + out[-75:]
# print(f'survex output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}')
if sp.returncode != 0:
print(
f"survex output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}"
)
self.assertTrue(sp.returncode == 0, f"{cwd} - survex is unhappy")
content = sp.stdout
ph = r"Total plan length of survey"
phmatch = re.search(ph, content)
msg = f'{cwd} - Failed to find expected survex output: "{ph}"'
self.assertIsNotNone(phmatch, msg)
ph1 = r"Time used"
phmatch1 = re.search(ph1, content)
ph2 = r"vertical length of survey le"
phmatch2 = re.search(ph2, content)
phmatch = phmatch1 or phmatch2
msg = f'{cwd} - Failed to find expected survex output: "{ph1}" or "{ph2}"'
self.assertIsNotNone(phmatch, msg)

View File

@@ -1,442 +0,0 @@
"""
Originally written for CUYC
Philip Sargent (Feb.2021)
Modified for Expo April 2021.
To run just these, do
uv run manage.py test -v 3 troggle.core.TESTS.test_logins
"""
import pathlib
import re
import tempfile
from http import HTTPStatus
from django.contrib.auth.models import User
from django.test import Client, TestCase
import troggle.settings as settings
from troggle.core.models.troggle import Expedition
from troggle.core.models.wallets import Wallet
from troggle.core.utils import current_expo
current_year = current_expo()
def create_user(name=None, last_name="Caver", is_superuser=False):
u = User()
u.username = name
u.email = f"philip.sargent+{name}@gmail.com"
u.first_name, u.last_name = name, last_name
u.set_password("secretword") # all test users have same password
u.save()
return u
class DataTests(TestCase):
"""These check that the NULL and NON-UNIQUE constraints are working in the database
no tests here... !"""
@classmethod
def setUpTestData(cls):
pass
def setUp(self):
create_user(name="expo")
def tearDown(self):
Users.objects.all().delete()
class LoginTests(TestCase):
def setUp(self):
create_user(name="expo")
create_user(name="expotest")
create_user(name="expotestadmin", is_superuser = True)
def tearDown(self):
User.objects.all().delete()
def test_fix_admin_login_fail(self):
c = self.client
u = User.objects.get(username="expotest")
response = c.get("/admin/")
content = response.content.decode()
# with open('admin-op.html', 'w') as f:
# f.write(content)
t = re.search(r"Troggle administration", content)
self.assertIsNone(t, "Logged in as '" + u.username + "' (not staff) but still managed to get the Admin page")
class PostTests(TestCase):
"""Tests walletedit form"""
@classmethod
def setUpTestData(cls):
pass
def setUp(self):
create_user(name="expo")
create_user(name="expotestadmin", is_superuser = True)
self.user = create_user(name="expotest")
c = self.client
testyear = "2022"
wname = f"{testyear}:00"
self.testyear = testyear
w = Wallet()
w.pk = 9100
w.fpath = str(pathlib.Path(settings.SCANS_ROOT, wname))
w.walletname = wname
w.save()
self.wallet = w
e = Expedition()
e.year = testyear
e.save()
self.expedition = e
def tearDown(self):
User.objects.all().delete()
Wallet.objects.all().delete()
Expedition.objects.all().delete()
def test_file_permissions(self):
"""Expect to be allowed to write to SCANS_ROOT, DRAWINGS_DATA, SURVEX_DATA, EXPOWEB
Need to login first.
"""
c = self.client
u = self.user
testyear = self.testyear
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
c.login(username=u.username, password="secretword")
for p in [settings.SCANS_ROOT,
settings.DRAWINGS_DATA / "walletjson",
settings.EXPOWEB / "documents",
settings.SURVEX_DATA / "docs"
]:
_test_file_path = pathlib.Path(p, "_created_by_test_suite.txt")
self.assertEqual(_test_file_path.is_file(), False)
with open(_test_file_path, "w") as f:
f.write("test string: can we write to this directory?")
self.assertEqual(_test_file_path.is_file(), True)
_test_file_path.unlink()
def test_scan_upload(self):
"""Expect scan upload to wallet to work on any file
Need to login first.
This upload form looks for the Cave and the Wallet, so the test fails if the database is not loaded with the cave
identified in the wallet
"""
c = self.client
from django.contrib.auth.models import User
u = User.objects.get(username="expotest")
testyear = self.testyear
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
c.login(username=u.username, password="secretword")
with open("core/fixtures/test_upload_file.txt", "r") as testf:
response = self.client.post(
f"/walletedit/{testyear}:00", data={"name": "test_upload_file.txt", "uploadfiles": testf, "who_are_you": "Gumby <gumby@tent.expo>"}
)
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK)
# with open('_test_response__scan_upload.html', 'w') as f:
# f.write(content)
for ph in [
r"test_upload_",
rf"&larr; {testyear}#00 &rarr;",
r"description written",
r"Plan not required",
r"edit settings or upload a file",
]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
# added each time it is run. The name of the uploaded file is only available within the code where it happens
remove_file = pathlib.Path(settings.SCANS_ROOT) / f'{testyear}' / f'{testyear}#00'/ 'test_upload_file.txt'
remove_file.unlink()
# Just uploading a file does NOT do any git commit.
# You need to create or edit a contents.json file for that to happen.
def test_photo_upload(self):
"""Expect photo upload to work on any file (contrary to msg on screen)
Upload into current default year.
Deletes file afterwards
Need to login first.
"""
c = self.client
u = self.user
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
c.login(username=u.username, password="secretword")
with open("core/fixtures/test_upload_file.txt", "r") as testf:
response = self.client.post(
"/photoupload", data={"name": "test_upload_file.txt", "renameto": "", "uploadfiles": testf}
)
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(response.status_code, HTTPStatus.OK)
# with open('_test_response_photo_upload.html', 'w') as f:
# f.write(content)
for ph in [
r"test_upload_",
r"Upload photos into /photos/" + str(current_year),
r" you can create a new folder in your name",
r"Create new Photographer folder",
r"only photo image files are accepted",
]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
# added each time it is run. The name of the uploaded file is only available within the code where it happens
remove_file = pathlib.Path(settings.PHOTOS_ROOT, current_year) / "test_upload_file.txt"
remove_file.unlink()
def test_photo_upload_rename(self):
"""Expect photo upload to work on any file (contrary to msg on screen)
Upload into current default year.
Deletes file afterwards
Need to login first.
"""
c = self.client
u = self.user
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
c.login(username=u.username, password="secretword")
rename = "RENAMED-FILE.JPG"
with open("core/fixtures/test_upload_file.txt", "r") as testf:
response = self.client.post(
"/photoupload", data={"name": "test_upload_file.txt", "renameto": rename, "uploadfiles": testf}
)
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(response.status_code, HTTPStatus.OK)
# with open('_test_response.html', 'w') as f:
# f.write(content)
for ph in [rename]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
# added each time it is run. The name of the uploaded file is only available within the code where it happens
remove_file = pathlib.Path(settings.PHOTOS_ROOT, current_year) / rename
if remove_file.is_file():
remove_file.unlink()
def test_photo_folder_create(self):
"""Create folder for new user
Create in current year.
Deletes folder afterwards
Need to login first.
"""
c = self.client
u = self.user
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
c.login(username=u.username, password="secretword")
response = self.client.post("/photoupload", data={"photographer": "GussieFinkNottle"})
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK)
with open('_test_response.html', 'w') as f:
f.write(content)
for ph in [r"Create new Photographer folder", r"/GussieFinkNottle/"]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
# added each time it is run. The name of the uploaded file is only available within the code where it happens
remove_dir = pathlib.Path(settings.PHOTOS_ROOT, current_year) / "GussieFinkNottle"
if remove_dir.is_dir():
print(f"{remove_dir} was created, now removing it.")
remove_dir.rmdir()
def test_dwg_upload_txt(self):
"""Expect .pdf file to be refused upload
Need to login first.
"""
c = self.client
u = self.user
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
c.login(username=u.username, password="secretword")
with open("core/fixtures/test_upload_file.pdf", "r") as testf:
response = self.client.post(
"/dwgupload/uploads", data={"name": "test_upload_file.txt", "uploadfiles": testf, "who_are_you": "Gumby <gumby@tent.expo>"}
)
content = response.content.decode()
# with open('_test_response_dwg_upload_txt.html', 'w') as f:
# f.write(content)
self.assertEqual(response.status_code, HTTPStatus.OK)
t = re.search("Files refused:", content)
self.assertIsNotNone(t, 'Logged in but failed to see "Files refused:"')
def test_dwg_upload_drawing(self):
"""Expect no-suffix file to upload
Note that this skips the git commit process. That would need a new test.
Need to login first.
"""
c = self.client
u = self.user
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
c.login(username=u.username, password="secretword")
with open("core/fixtures/test_upload_nosuffix", "r") as testf:
response = self.client.post(
"/dwguploadnogit/uploads", data={"name": "test_upload_nosuffix", "uploadfiles": testf, "who_are_you": "Gumby <gumby@tent.expo>"}
)
content = response.content.decode()
# with open('_test_response_dwg_upload_drawing.html', 'w') as f:
# f.write(content)
self.assertEqual(response.status_code, HTTPStatus.OK)
for ph in [
r"test_upload_nosuffix",
r"You cannot create folders here",
r"Creating a folder is done by a nerd",
]:
phmatch = re.search(ph, content)
self.assertIsNotNone(
phmatch, "Expect no-suffix file to upload OK. Failed to find expected text: '" + ph + "'"
)
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
# added each time it is run. The name of the uploaded file is only available within the code where it happens
# UploadedFile.name see https://docs.djangoproject.com/en/4.1/ref/files/uploads/#django.core.files.uploadedfile.UploadedFile
remove_file = pathlib.Path(settings.DRAWINGS_DATA) / "uploads" / "test_upload_nosuffix"
if remove_file.is_file():
remove_file.unlink()
class ComplexLoginTests(TestCase):
"""These test the login and capabilities of logged-in users, they do not use fixtures"""
def setUp(self):
"""setUp runs once for each test in this class"""
create_user(name="expo")
create_user(name="expotest")
create_user(name="expotestadmin", is_superuser = True)
def tearDown(self):
self.client.logout() # not needed as each test creates a new self.client
User.objects.all().delete()
# def test_login_redirect_for_non_logged_on_user(self): # need to fix this in real system
# c = self.client
# # Need to login first. Tests that we are redirected to login page if not logged in
# response = c.get('noinfo/cave-number-index')
# self.assertRedirects(response, "/login/?next=/committee/appointments/")
def test_ordinary_login(self):
c = self.client
u = User.objects.get(username="expotest")
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
logged_in = c.login(username=u.username, password="secretword")
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
response = c.get("/accounts/login/") # defined by auth system
content = response.content.decode()
t = re.search(r"You are now logged in", content)
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get 'Now you can' greeting")
def test_authentication_login(self):
c = self.client
u = User.objects.get(username="expotest")
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
# This is weird. I thought that the user had to login before she was in the authenticated state
self.assertTrue(u.is_authenticated, "User '" + u.username + "' is NOT AUTHENTICATED before login")
logged_in = c.login(username=u.username, password="secretword")
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
self.assertTrue(u.is_authenticated, "User '" + u.username + "' is NOT AUTHENTICATED after login")
# c.logout() # This next test always means user is still authenticated after logout. Surely not?
# self.assertFalse(u.is_authenticated, 'User \'' + u.username + '\' is STILL AUTHENTICATED after logout')
def test_admin_login(self):
c = self.client
u = User.objects.get(username="expotestadmin")
logged_in = c.login(username=u.username, password="secretword")
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
response = c.get("/admin/login/")
content = response.content.decode()
# fn='admin-op.html'
# print(f"Writing {fn}")
# with open(fn, 'w') as f:
# f.write(content)
t = re.search(r"Troggle database administration", content)
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get the Troggle Admin page")
def test_noinfo_login(self):
c = self.client # inherited from TestCase
u = User.objects.get(username="expotest")
logged_in = c.login(username=u.username, password="secretword")
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
response = c.get("/stats") # a page with the Troggle menus
content = response.content.decode()
t = re.search(r"User\:expotest", content)
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get 'User:expotest' heading")
response = c.get("/noinfo/cave-number-index")
content = response.content.decode()
t = re.search(r"2001-07 Hoffnungschacht", content)
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get /noinfo/ content")
def test_user_force(self):
c = self.client
u = User.objects.get(username="expotest")
try:
c.force_login(u)
except:
self.assertIsNotNone(
None,
"Unexpected exception trying to force_login as '"
+ u.username
+ "' but failed (Bad Django documentation?)",
)
response = c.get("/stats") # a page with the Troggle menus
content = response.content.decode()
t = re.search(r"Log out", content)
self.assertIsNotNone(t, "Forced logged in as '" + u.username + "' but failed to get Log out heading")
response = c.get("/accounts/login/")
content = response.content.decode()
t = re.search(r"You are now logged in", content)
self.assertIsNotNone(t, "Forced logged in as '" + u.username + "' but failed to get /accounts/profile/ content")

View File

@@ -1,228 +0,0 @@
"""
We are using unittest for troggle.
Note that the database has not been parsed from the source files when these tests are run,
so any path that relies on data being in the database will fail.
The simple redirections to files which exist, e.g. in
/expoweb/
/photos/
etc. will test fine.
But paths like this:
/survey_scans/
/caves/
which rely on database resolution will fail unless a fixture has been set up for
them.
https://docs.djangoproject.com/en/dev/topics/testing/tools/
"""
import re
import subprocess
import unittest
from http import HTTPStatus
from django.contrib.auth.models import User
from django.test import Client, SimpleTestCase, TestCase
import troggle.parsers.logbooks as lbp
from troggle.core.models.logbooks import LogbookEntry
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
from troggle.core.utils import current_expo
current_year = current_expo()
def create_user(name=None, last_name="Caver", is_superuser=False):
u = User()
u.username = name
u.email = f"philip.sargent+{name}@gmail.com"
u.first_name, u.last_name = name, last_name
u.set_password("secretword") # all test users have same password
u.save()
return u
def create_person(firstname, lastname, nickname=False, vfho=False, exped=None):
fullname = f"{firstname} {lastname}"
slug=f"{firstname.lower()}-{lastname.lower()}"
coUniqueAttribs = {"first_name": firstname, "last_name": (lastname or ""), "slug": slug,}
otherAttribs = {"is_vfho": vfho, "fullname": fullname, "nickname": nickname}
person = Person.objects.create(**otherAttribs, **coUniqueAttribs)
coUniqueAttribs = {"person": person, "expedition": exped}
otherAttribs = {}
pe = PersonExpedition.objects.create(**otherAttribs, **coUniqueAttribs)
return person
TEST_YEAR = "1986"
lbp.ENTRIES[TEST_YEAR] = 4 # number of entries in the test logbook
class ImportTest(TestCase):
# see test_logins.py for the tests to check that logged-in users work
@classmethod
def setUpTestData(cls):
import troggle.settings as settings
LOGBOOKS_PATH = settings.EXPOWEB / lbp.LOGBOOKS_DIR
cls.test_logbook = LOGBOOKS_PATH / TEST_YEAR / lbp.DEFAULT_LOGBOOK_FILE
frontmatter_file = LOGBOOKS_PATH / TEST_YEAR / "frontmatter.html"
if frontmatter_file.is_file():
frontmatter_file.unlink() # delete if it exists
coUniqueAttribs = {"year": TEST_YEAR}
otherAttribs = {"name": f"CUCC expo-test {TEST_YEAR}"}
cls.test_expo = Expedition.objects.create(**otherAttribs, **coUniqueAttribs)
fred = create_person("Fred", "Smartarse", nickname="freddy", exped=cls.test_expo)
phil = create_person("Phil", "Tosser", nickname="tosspot", exped=cls.test_expo)
dave = create_person("David", "Smartarse", "", exped=cls.test_expo)
mike = create_person("Michael", "Wideboy", "WB", vfho=True, exped=cls.test_expo)
# NOT created Kurt, as the whole point is that he is a guest.
def setUp(self):
create_user(name="expo") # needed for current_year()
self.user = create_user(name="expotest")
self.client = Client()
def tearDown(self):
User.objects.all().delete()
Person.objects.all().delete()
PersonExpedition.objects.all().delete()
Expedition.objects.all().delete()
def test_logbook_exists(self):
self.assertTrue(self.test_logbook.is_file())
def test_logbook_parse_issues(self):
"""This is just testing the db not the web page
"""
lbp.LoadLogbook(self.test_expo) # i.e. load the 1986 logbook
issues = DataIssue.objects.all()
messages = []
for i in issues:
if i.parser=="logbooks":
# f"{self.parser} - {self.message}"
messages.append(i.message)
print(f"'{i.message}'")
expected = [
"! - 1986 No name match for: 'Kurt Keinnamen' in entry",
]
not_expected = [
" ! - 1986 EXCEPTION:: 'Dave Smartarse' (Dave Smartarse) in entry tid='1986-07-27a' for this year.",
" ! - 1986 Warning: logentry: surface - stupour - no expo member author for entry '1986-07-31a'",
" ! - 1986 Warning: logentry: 123 - wave 2 - no expo member author for entry '1986-08-01a'",
]
# with open('_test_response.txt', 'w') as f:
# for m in messages:
# f.write(m)
messages_text = ", ".join(messages)
for e in expected:
phmatch = re.search(e, messages_text)
self.assertIsNotNone(phmatch, f"Failed to find expected text: '{e}' in\n{messages_text}")
for e in not_expected:
phmatch = re.search(e, messages_text)
self.assertIsNone(phmatch, f"Found unexpected text: '{e}' in\n{messages_text}")
def test_lbe(self):
lbp.LoadLogbook(self.test_expo) # i.e. load the 1986 logbook, which has this logbook entry
response = self.client.get(f"/logbookentry/1986-07-27/1986-07-27a")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
# with open('_test_response_1986-07-27a.html', 'w') as f:
# f.write(content)
expected = [
"<title>Logbook CUCC expo-test 1986 123 - 123 Wave 1</title>",
"Smartarse rig first section of new pitches. Second wave arrives and takes over rigging.",
]
for ph in expected:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_lbe_new(self):
"""This page requires the user to be logged in first, hence the extra shenanigans
"""
c = self.client
u = self.user
c.login(username=u.username, password="secretword")
response = self.client.get(f"/logbookedit/")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
# with open('_test_response.html', 'w') as f:
# f.write(content)
expected = [
"New Logbook Entry in ",
"Everyone else involved",
"Place: cave name, or 'plateau', 'topcamp' etc.",
]
for ph in expected:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, f"({response.status_code}) Failed to find expected text: '" + ph + "'")
def test_lbe_edit(self):
"""This page requires the user to be logged in first, hence the extra shenanigans
"""
c = self.client
u = self.user
c.login(username=u.username, password="secretword")
lbp.LoadLogbook(self.test_expo) # i.e. load the 1986 logbook, which has this logbook entry
# muliple loads are overwriting the lbes and incrementing the a, b, c etc, so get one that works
lbe = LogbookEntry.objects.get(date="1986-07-31") # only one on this date in fixture
response = self.client.get(f"/logbookedit/{lbe.slug}")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
# with open('_test_response_edit.html', 'w') as f:
# f.write(content)
expected = [
"Edit Existing Logbook Entry on 1986-07-31",
r"Other names \(comma separated\)", # regex match so slashes need to be espcaped
"Place: cave name, or 'plateau', 'topcamp' etc.",
]
for ph in expected:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, f"({response.status_code}) Failed to find expected text: '" + ph + "'")
def test_aliases(self):
# FIX THIS
# Problem: '' empty string appears as valid alias for David Smartarse
response = self.client.get(f"/aliases/{TEST_YEAR}")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
# with open('_test_responsealiases.html', 'w') as f:
# f.write(content)
ph = f"'fsmartarse'"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_survexfiles(self):
# Needs another test with test data
response = self.client.get("/survexfile/caves")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
# with open('_test_response.html', 'w') as f:
# f.write(content)
ph = f"Caves with subdirectories"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_people(self):
# Needs another test with test data
response = self.client.get("/people")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
# with open('_test_response.html', 'w') as f:
# f.write(content)
ph = f"<td><a href=\"/personexpedition/fred-smartarse/{TEST_YEAR}\">{TEST_YEAR}</a></td>"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")

View File

@@ -1,164 +0,0 @@
"""
We are using unittest for troggle.
Note that the database has not been parsed from the source files when these tests are run,
so any path that relies on data being in the database will fail.
https://docs.djangoproject.com/en/dev/topics/testing/tools/
We are not using
https://github.com/FactoryBoy/factory_boy
because we are trying to minimise the number of 3rd-party packages because they expose us to update hell,
as experience in 2019-2020.
However we could use
https://docs.python.org/dev/library/unittest.mock.html
as this is now part if python - if we can get our heads around it.
The tests in this file:
The code {% url THING %} or {% url THING PARAMETER %} appears a hundred times or more in the troggle/templates/ HTML template files.
This is the template synstax for
reverse('THING')
or
reverse('THING', args=[PARAMETER])
It is the URLS which take parameters which need understanding and testing. The reverse() which take no
parameters should be fine as this is fundamental Django stuff which will have been tested to death.
But the reverse() function is purely syntactical, the PARAMETER is just a string which is applied to
the url. So this is not testing anything important really. See the test_url_threed() below.
These url lines all come from templates/*.html
1. No tests: No parameters
{% url "caveindex" %}
{% url "controlpanel" %}
{% url "dataissues" %}
{% url "dwgallfiles" %}
{% url "dwgupload" %}
{% url "stations" %}
{% url "exportlogbook" %}
{% url "newcave" %}
{% url "notablepersons" %}
{% url "photoupload" %}
{% url "walletedit" %}
Tests exist:
{% url "stats" %}
{% url "allscans" %}
{% url "survexcaveslist" %}
2. With parameter
{% url "caveQMs" "1623-290" %}
{% url "cave_openQMs" "1623-290" %}
{% url "cavewallets" cave_id %}
{% url "dwgfilesingle" drawing.dwgpath %}
{% url "edit_cave" cave.url_parent cave.slug %}
{% url "editentrance" cave.slug ent.entrance.slug %}
{% url "editexpopage" path %}
{% url "err" title %}
{% url "expedition" 2022 %}
{% url "newentrance" cave.slug %}
{% url "survexcavessingle" cavedir %}
{% url "survexcavessingle" cavefiles.0.1 %}
{% url "svx" cavepath %}
{% url "svx" survexfile.path %}
{% url "svxlog" title %}
{% url 'caveQMs' '1623-161' %}
{% url 'image_selector' path %}
{% url 'new_image_form' path %}
Tests exist:
{% url "threed" title %}
"""
todo = """These just do {% url THING %} with no parameter, we also need tests which take a parameter
- Read all this https://developer.mozilla.org/en-US/docs/Learn/Server-side/Django/Testing
- Read all this https://realpython.com/testing-in-django-part-1-best-practices-and-examples/
- add 'coverage' to all tests
- statistics also needs test when we have put data into the database
"""
import re
from http import HTTPStatus
from django.test import Client, TestCase
from django.urls import path, reverse
# class SimplePageTest(unittest.TestCase):
class URLTests(TestCase):
"""These tests may appear to be redundant, but in fact they exercise different bits of code. The urls.py
dispatcher is sending these URLs view via different 'view' handlers, and they all need verifying.
"""
@classmethod
def setUpTestData(cls):
# Set up data for the whole TestCase
# cls.foo = Foo.objects.create(bar="Test")
# Some test using self.foo in tests below..
# read in some SQL ?
pass
def setUp(self):
# Every test needs a client.
self.client = Client()
def test_statistics(self):
response = self.client.get("/statistics")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
# with open('_test_response_statistics.html', 'w') as f:
# f.write(content)
ph = r"0 people, 0 caves, 0 wallets and 0 logbook entries"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_stats(self):
# Needs another test with test data
response = self.client.get("/stats")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
# with open('_test_response.html', 'w') as f:
# f.write(content)
ph = r"Total length: 0.0 km adding up the total for each year."
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_url_stats(self):
"""Test the {% url "stats" %} reverse resolution
path('statistics', statistics.stats, name="stats"),
path('stats', statistics.stats, name="stats"),
"""
reversed_url = reverse('stats') # NB _ must be written as - if present in name
self.assertEqual(reversed_url, "/stats")
def test_url_allscans(self):
"""Test the {% url "allscans" %} reverse resolution
path('survey_scans', allscans, name="allscans"), # all the scans in all wallets
"""
reversed_url = reverse('allscans') # NB _ must be written as - if present in name
self.assertEqual(reversed_url, "/survey_scans")
def test_url_survexcaveslist(self):
"""Test the {% url "allscans" %} reverse resolution
path('survexfile/caves', survex.survexcaveslist, name="survexcaveslist"),
"""
reversed_url = reverse('survexcaveslist') # NB _ must be written as - if present in name
self.assertEqual(reversed_url, "/survexfile/caves")
def test_url_threed(self):
"""Test the {% url "threed" %} reverse resolution
path('survexfile/<path:survex_file>.3d', survex.threed, name="threed"),
"""
reversed_url = reverse('threed', args=['zilch']) # NB _ must be written as - if present in name
self.assertEqual(reversed_url, "/survexfile/zilch.3d")

View File

@@ -1,631 +0,0 @@
"""
IGNORED tests
- all test files with hyphens in the filename are ignored
- filnames with _ are OK
$ python manage.py test cuy.photologue --parallel
only runs the photologue tests. Working.(well, it was working..)
$ python manage.py test cuy.mailman --parallel
$ python manage.py test paypal.standard --parallel
needs work: a very large test suite
$ python manage.py test tagging --parallel
a huge suite - needs a lot of work to with Django 1.11 & python3
$ python manage.py test cuy.club --parallel
Runs the tests in this file only
"""
import re
import unittest
from django.test import Client, SimpleTestCase, TestCase, TransactionTestCase
class ImportTest(TestCase):
def test_import_imports(self):
#ed to go through all modules and copy all imports here
from io import StringIO
from cuy.club.models import Article, Event, Member, Webpage, WebpageCategory
from cuy.website.views.generic import PUBLIC_LOGIN
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core import management
from django.db import connection, connections
from django.db.utils import IntegrityError
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
from django.template.defaultfilters import slugify
from django.utils.timezone import get_current_timezone, make_aware
class SimpleTest(SimpleTestCase):
def test_arith_mult(self):
"""
Tests that 10 x 10 always equals 100.
"""
self.assertEqual(10*10, 100)
class DataTests(TestCase ):
'''These check that the NULL and NON-UNIQUE constraints are working in the database '''
@classmethod
def setUpTestData(cls):
pass
def setUp(self):
from cuy.club.models import Member
from django.contrib.auth.models import User
m = Member()
m.pk=8000
m.user_id = 9000 # not NULL constraint
m.save()
self.member = m
u = User()
u.pk = 9000
u.user_id = 8000
u.username, u.password ='stinker', 'secretword'
u.email='philip.sargent+SP@gmail.com'
u.first_name, u.last_name ='Stinker', 'Pinker'
u.save()
self.user = u
def tearDown(self):
#self.member.delete() # must delete member before user
#self.user.delete() # horrible crash, why?
pass
def test_member_not_null_field(self):
from cuy.club.models import Member
from django.db.utils import IntegrityError
n = Member()
try:
n.save()
except IntegrityError as ex:
t = re.search(r'NOT NULL constraint failed: club_member.user_id', str(ex))
self.assertIsNotNone(t, "Exception is not the expected 'NOT NULL constraint failed'")
n.user_id = 1000
try:
n.save
except:
return self.assertIsNotNone(None, "Failed to save valid Member to database")
def test_member_not_unique_field(self):
from cuy.club.models import Member
from django.db.utils import IntegrityError
m1 = Member()
m2 = Member()
m1.user_id = 1000
m2.user_id = m1.user_id
m1.save()
try:
m2.save()
except IntegrityError as ex:
t = re.search(r'UNIQUE constraint failed: club_member.user_id', str(ex))
return self.assertIsNotNone(t, "IntegrityError as expected but message is not the expected 'UNIQUE constraint failed'" )
self.assertIsNotNone(None, "Failed to enforce 'UNIQUE constraint' on saving two Member objects with same user_id")
def test_article_invalid_date(self):
from cuy.club.models import Article, Member
from django.core.exceptions import ValidationError
from django.db.utils import IntegrityError
a = Article()
m = self.member
a.author_id = m.user_id
a.publish="not a valid datetime"
try:
a.save()
except ValidationError as ex:
t = re.search(r'value has an invalid format. It must be in YYYY-MM-DD HH:MM', str(ex))
self.assertIsNotNone(t, "Exception is not the expected 'invalid format'")
def test_article_and_author_not_null(self):
from cuy.club.models import Article, Member
from django.core.exceptions import ValidationError
from django.db.utils import IntegrityError
a2 = Article()
a2.publish ="2021-02-17 17:25"
a2.author_id = None
try:
a2.save()
except IntegrityError as ex:
t = re.search(r'NOT NULL constraint failed: club_article.author_id', str(ex))
self.assertIsNotNone(t, "Exception is not the expected 'NOT NULL constraint failed'")
except:
self.assertIsNotNone(None, "Exception is not the expected 'NOT NULL constraint failed' IntegrityError")
def test_article_and_author_ok(self):
from cuy.club.models import Article, Member
from django.core.exceptions import ValidationError
from django.db.utils import IntegrityError
m = self.member
a3 = Article()
a3.pk = 5000
a3.publish ="2021-02-17 17:25"
a3.author_id = m.pk
try:
a3.save()
except:
return self.assertIsNotNone(None, "Failed to save valid Article to database")
def test_member_and_user(self):
u = self.user
m = self.member
m.user = u
self.assertEqual(m.user.last_name, 'Pinker')
m.save()
u.save()
class FixturePageTests(TestCase):
fixtures = ['cuyc_basic_data.json', 'test_data.json', 'auth_user_gussie']
def setUp(self):
from django.contrib.auth.models import User
self.user = User.objects.get(username='gussie')
self.member = self.user.profile
def tearDown(self):
pass
def test_fix_event_loaded(self):
from cuy.club.models import Event
e = Event.objects.get(slug='spring-in-the-med')
self.assertEqual(str(e.shore_contact.first_name()), 'Stiffy')
self.assertEqual(str(e.organiser.last_name()), 'Fittleworth')
def test_fix_page_all_trips(self):
response = self.client.get('/programme/')
content = response.content.decode()
t = re.search(r'Spring in the Arctic', content)
self.assertIsNotNone(t, "Failed to see Event loaded from fixture")
t = re.search(r'High Summer in the Irish Sea', content)
self.assertIsNotNone(t, "Failed to see Event loaded from fixture")
def test_fix_page_event(self):
response = self.client.get('/programme/events/spring-in-the-arctic/')
content = response.content.decode()
t = re.search(r'Spring in the Arctic', content)
self.assertIsNotNone(t, "Failed to see Event loaded from fixture")
def test_fix_admin_login_fail(self):
c = self.client
from cuy.club.models import Member
from django.contrib.auth.models import User
m = Member.objects.get(pk=9002)
u = User.objects.get(username='bingo')
self.assertTrue(u.is_active, 'User \'' + u.username + '\' is INACTIVE')
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
response = c.get('/admin/')
content = response.content.decode()
with open('admin-op.html', 'w') as f:
f.write(content)
t = re.search(r'Site administration', content)
self.assertIsNone(t, 'Logged in as \'' + u.username + '\' (not staff) but still managed to get the Admin page' )
class ComplexLoginTests(TestCase):
'''These test the login and capabilities of logged-in users'''
def setUp(self):
'''setUp runs once for each test in this class'''
from cuy.club.models import AFFILIATION, MEMBER_TYPES, Member
from django.contrib.auth.models import User
m = Member()
m.pk=8000
m.user_id = 9000 # not NULL constraint
m.email = "philip.sargent+HG@gmail.com"
m.member_type = MEMBER_TYPES[1]
m.affiliation = AFFILIATION[3]
m.committee_email_prefix = 'honoria'
u = User()
u.pk = 9000
u.user_id = 8000
u.username, u.password ='honoria', 'secretword'
u.email='philip.sargent+HG@gmail.com'
u.first_name, u.last_name ='Honoria', 'Glossop'
u.is_staff = True
u.is_superuser = True
u.set_password(u.password) # This creates a new salt and thus a new key for EACH test
u.save() # vital that we save all this before attempting login
#print ('\n',u.password)
m.save()
self.user = u
self.member = m
from cuy.club.models import ClubRole, Elected
cr = ClubRole()
cr.id = 7000
cr.title = 'Skipper'
cr.short_description = 'Club skipper who can lead trips'
cr.committee_position = True
cr.rank = 8
cr.save()
self.clubrole = cr
e = Elected()
e.member = m
e.club_role = cr
e.save()
self.elected = e
def tearDown(self):
self.client.logout() # not needed as each test creates a new self.client
#self.member.delete()
##self.user.delete() # id attribute set to None !
pass
def test_login_redirect_for_non_logged_on_user(self):
c = self.client
# Need to login first. Tests that we are redirected to login page if not logged in
response = c.get('/committee/appointments/')
self.assertRedirects(response, "/login/?next=/committee/appointments/")
def test_ordinary_login(self):
c = self.client
u = self.user
self.assertTrue(u.is_active, 'User \'' + u.username + '\' is INACTIVE')
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
response = c.get('/')
content = response.content.decode()
t = re.search(r'Hello Honoria', content)
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get personal greeting' )
def test_authentication_login(self):
c = self.client
u = self.user
self.assertTrue(u.is_active, 'User \'' + u.username + '\' is INACTIVE')
# This is weird. I thought that the user had to login before she was in the authenticated state
self.assertTrue(u.is_authenticated, 'User \'' + u.username + '\' is NOT AUTHENTICATED before login')
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
self.assertTrue(u.is_authenticated, 'User \'' + u.username + '\' is NOT AUTHENTICATED after login')
c.logout()
self.assertFalse(u.is_authenticated, 'User \'' + u.username + '\' is STILL AUTHENTICATED after logout')
def test_admin_login(self):
c = self.client
u = self.user
m = self.member
m.user = u
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
response = c.get('/admin/')
content = response.content.decode()
# with open('admin-op.html', 'w') as f:
# f.write(content)
t = re.search(r'Site administration', content)
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get the Admin page' )
def test_user_account_login(self):
# User must be associated with a Member for whom is_committee() is True
c = self.client
u = self.user
m = self.member
m.user = u
logged_in = c.login(username=u.username, password='secretword') # fails if password=u.password !
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
response = c.get('/accounts/profile/')
content = response.content.decode()
# with open('account-profile-op.html', 'w') as f:
# f.write(content)
t = re.search(r'CUYC Member Profile - Cambridge University Yacht Club', content)
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get /accounts/profile/ content')
def test_committee_login(self):
from django.contrib.auth.models import User
# User must be associated with a Member for whom is_committee() is True
c = self.client # inherited from TestCase
u = self.user
m = self.member
cr = self.clubrole
e = self.elected
m.user = u
logged_in = c.login(username=u.username, password='secretword') # fails if password=u.password !
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
response = c.get('/')
content = response.content.decode()
t = re.search(r'Hello Honoria', content)
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get personal greeting' )
response = c.get('/committee/appointments/')
content = response.content.decode()
# with open('cmttee-op.html', 'w') as f:
# f.write(content)
t = re.search(r'A word of warning...', content)
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get /committee/ content')
def test_user_force(self):
from django.conf import settings
c = self.client
u = self.user
m = self.member
m.user = u
try:
c.force_login(u)
except:
self.assertIsNotNone(None, 'Unexpected exception trying to force_login as \'' + u.username + '\' but failed (Bad Django documentation?)')
response = c.get('/')
content = response.content.decode()
t = re.search(r'Hello Honoria', content)
self.assertIsNotNone(t, 'Forced logged in as \'' + u.username + '\' but failed to get personal greeting' )
response = c.get('/accounts/profile/')
content = response.content.decode()
t = re.search(r'From here you can update your', content)
self.assertIsNotNone(t, 'Forced logged in as \'' + u.username + '\' but failed to get /accounts/profile/ content')
class DynamicPageTests(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_empty_yachts(self):
# no page there initially
response = self.client.get('/yachts/')
content = response.content.decode()
self.assertEqual(response.status_code, 404)
def test_full_yachts(self):
'''Creating a WebpageCategory and an index webpage creates a valid url
'''
from cuy.club.models import Webpage, WebpageCategory
wc = WebpageCategory()
wc.pk = 8000
wc.id = 8000
wc.name, wc.slug ='Yachts', 'yachts'
wc.save()
self.webcategory = wc
p = Webpage()
p.pk = 9000
p.id = 9000
p.category_id = wc.id
p.description = "Current Yacht"
p.edited = 1
p.event_id = None
p.index = 1
p.markup = "<h1>Skylark</h1>"
p.ordering = 10
p.slug = "yacht"
p.title = "Skylark Yacht"
p.save()
self.webpage = p
response = self.client.get('/yachts/')
content = response.content.decode()
self.assertEqual(response.status_code, 200)
class PageTests(TestCase):
def setUp(self):
# Every test needs a client.
# new in Django 1.5 no need to create self.client first
# https://docs.djangoproject.com/en/dev/topics/testing/tools/#django.test.LiveServerTestCase
#self.client = Client()
pass
def tearDown(self):
pass
def test_basic_admin(self):
response = self.client.get('/admin/login/')
self.assertEqual(response.status_code, 200)
def test_basic_admindoc(self):
# Need to login first. Tests that we are redirected
response = self.client.get('/admin/doc/models/')
self.assertRedirects(response, "/admin/login/?next=/admin/doc/models/")
def test_basic_programme(self):
response = self.client.get('/programme/')
self.assertEqual(response.status_code, 200)
def test_basic_login (self):
# Need to login first
response = self.client.post('/login/', {'username': 'gussie', 'password': 'secretword'})
if response.status_code == 302:
print(response['location'])
self.assertEqual(response.status_code, 200) # fails because user does not exist
def test_basic_committee(self):
# Need to login first. Tests that we are redirected to login page
response = self.client.get('/committee/')
self.assertRedirects(response, "/login/?next=/committee/")
# --- Check non-logged-in users cannot see these
def test_basic_gallery(self):
response = self.client.get('/gallery/')
self.assertEqual(response.status_code, 200)
def test_basic_sitemap(self):
response = self.client.get('/site-map/')
self.assertEqual(response.status_code, 200)
# --- public club pages created by content in templates/*.html
def test_basic_club(self):
response = self.client.get('/club/')
content = response.content.decode()
t = re.search(r'offers opportunities for members of the university to sail yachts', content)
self.assertIsNotNone(t)
def test_basic_programme(self):
response = self.client.get('/programme/')
content = response.content.decode()
t = re.search(r'If you would like to go on any of these events', content)
self.assertIsNotNone(t)
def test_basic_programme_onshore(self):
response = self.client.get('/programme/on_shore/')
content = response.content.decode()
t = re.search(r'All Upcoming Shore Based Events', content)
self.assertIsNotNone(t)
def test_page_equal_opps(self):
response = self.client.get('/club/equal-opps/')
content = response.content.decode()
t = re.search(r'commitment to a policy of equal opportunities', content)
self.assertIsNotNone(t)
def test_page_safety(self):
response = self.client.get('/club/safety/')
content = response.content.decode()
t = re.search(r'endeavour to maintain the highest levels of safety', content)
self.assertIsNotNone(t)
def test_page_safety_risk(self):
response = self.client.get('/club/safety/risk/')
content = response.content.decode()
t = re.search(r'rules for the use of safety lines to be described and monitored by the skipper.', content)
self.assertIsNotNone(t)
def test_page_safetypolicy(self):
response = self.client.get('/club/safetypolicy/')
content = response.content.decode()
t = re.search(r'should be capable of swimming at least fifty meters in clothing and keeping afloat for at least five minutes', content)
self.assertIsNotNone(t)
def test_page_safety_rules(self):
response = self.client.get('/club/safety/rules/')
content = response.content.decode()
t = re.search(r'Safety Officer is responsible for the maintenance of safety records', content)
self.assertIsNotNone(t)
def test_page_regulations(self):
response = self.client.get('/club/regulations/')
content = response.content.decode()
t = re.search(r'Sanger Institute, the Babraham Institute, Wellcome and MRC Research Laboratories', content)
self.assertIsNotNone(t)
def test_page_constitution(self):
response = self.client.get('/club/constitution/')
content = response.content.decode()
t = re.search(r'to provide a wide variety of safe and affordable yacht sailing', content)
self.assertIsNotNone(t)
def test_page_clubcommittee(self):
response = self.client.get('/club/committee/')
content = response.content.decode()
t = re.search(r'CUYC elects new officers as needed, usually at the beginning of each term', content)
self.assertIsNotNone(t)
def test_page_damages(self):
response = self.client.get('/club/damages/')
content = response.content.decode()
t = re.search(r'all crew participants may be required to contribute to the payment of damages', content)
self.assertIsNotNone(t)
def test_page_training(self):
response = self.client.get('/training/')
content = response.content.decode()
t = re.search(r'members of the club are always happy to pass on informal training tips', content)
self.assertIsNotNone(t)
def test_page_racing(self):
response = self.client.get('/racing/')
content = response.content.decode()
t = re.search(r'CUYC Racing Squad', content)
self.assertIsNotNone(t)
def test_page_blog(self):
response = self.client.get('/blog/')
content = response.content.decode()
t = re.search(r'Latest Posts', content)
self.assertIsNotNone(t)
def test_page_gallery(self):
response = self.client.get('/gallery/')
content = response.content.decode()
t = re.search(r'Photo Galleries', content)
self.assertIsNotNone(t)
def test_page_about_photos(self):
response = self.client.get('/about_photos/')
content = response.content.decode()
t = re.search(r'have been supplied by members of CUYC', content)
self.assertIsNotNone(t)
def test_page_loginhelp(self):
response = self.client.get('/login/help/')
content = response.content.decode()
t = re.search(r'Existing CUYC Member, without an account?', content)
self.assertIsNotNone(t)
def test_page_loginregister(self):
response = self.client.get('/login/register/')
content = response.content.decode()
t = re.search(r'If you are, or have ever been, a CUYC or CUCrC member', content)
self.assertIsNotNone(t)
# --- These pages are not connected to top level public menus but are in fact public
def test_page_club_tripinformation(self):
response = self.client.get('/club/trip-information/')
content = response.content.decode()
t = re.search(r'organisers have a choice to add a sum to the trip fee quoted on the website to cover expenses', content)
self.assertIsNotNone(t)
def test_page_club_trippayment(self):
response = self.client.get('/club/trip-information/payment/')
content = response.content.decode()
t = re.search(r'All payments to the club should be sent via Paypal', content)
self.assertIsNotNone(t)
def test_page_club_trip_typical_day(self):
response = self.client.get('/club/trip-information/typical-day/')
content = response.content.decode()
t = re.search(r'Skipper and first mate crawl out of their sleeping bags early', content)
self.assertIsNotNone(t)
def test_page_club_trip_faq(self):
response = self.client.get('/club/trip-information/faq/')
content = response.content.decode()
t = re.search(r'Different people are seasick in different ways', content)
self.assertIsNotNone(t)
def test_page_club_trip_kit(self):
response = self.client.get('/club/trip-information/kit/')
content = response.content.decode()
t = re.search(r'appropriate quantity of base layer clothes to match the duration', content)
self.assertIsNotNone(t)

View File

@@ -1,564 +0,0 @@
"""
We are using unittest for troggle.
Note that the database has not been parsed from the source files when these tests are run,
so any path that relies on data being in the database will fail.
The simple redirections to files which exist, e.g. in
/expoweb/
/photos/
etc. will test fine.
But paths like this:
/survey_scans/
/caves/
which rely on database resolution will fail unless a fixture has been set up for
them.
https://docs.djangoproject.com/en/dev/topics/testing/tools/
"""
todo = """ - ADD TESTS when we are redirecting /expofiles/ to a remote file-delivering site
- Add test for running cavern to produce a .3d file
- Add tests for editing the TXT files
- add "author" tests for the git add and commit stuff for uploaded files, inc on DEVSERVER or not
"""
import re
from http import HTTPStatus
from django.test import Client, TestCase
# class SimplePageTest(unittest.TestCase):
class PageTests(TestCase):
"""These tests may appear to be redundant, but in fact they exercise different bits of code. The urls.py
dispatcher is sending these URLs view via different 'view' handlers, and they all need verifying.
"""
@classmethod
def setUpTestData(cls):
# Set up data for the whole TestCase
# cls.foo = Foo.objects.create(bar="Test")
# Some test using self.foo in tests below..
# read in some SQL ?
pass
def setUp(self):
# Every test needs a client.
self.client = Client()
def test_expoweb_root(self):
response = self.client.get("")
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK)
ph = r"CUCC in Austria"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_root_slash(self):
response = self.client.get("/")
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK)
ph = r"CUCC in Austria"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_paths(self):
response = self.client.get("/pathsreport")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"This report is generated from"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_dir(self):
response = self.client.get("/handbook")
response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.FOUND) # 302 directory, so redirects to /index.htm
def test_expoweb_dirslash(self):
response = self.client.get("/handbook/")
response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.FOUND) # 302 directory, so redirects to /index.htm
def test_expoweb_dir_no_index(self):
response = self.client.get("/handbook/troggle")
content = response.content.decode()
# with open('testresponse.html','w') as tr:
# tr.writelines(content)
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
ph = r"Page not found 'handbook/troggle/index.html'"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_dir_with_index_htm(self):
response = self.client.get("/years/1999/index.htm")
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK) # directory, so redirects to /index.htm
ph = r"Passage descriptions for 1999"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_dir_with_index_html(self):
response = self.client.get("/years/2015/index.html")
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK) # directory, so redirects to /index.htm
ph = r"Things left at top camp 2014"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_dir_with_index2(self):
response = self.client.get("/handbook/index.htm")
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK)
ph = r"Introduction to expo"
phmatch = re.search(ph, content)
# print("\n ! - test_expoweb_dir_with_index2\n{}\n{}".format(response.reason_phrase, content))
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_htm(self):
response = self.client.get("/handbook/index.htm")
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK)
ph = r"Introduction to expo"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_notfound(self):
response = self.client.get("/handbook/_test_zyxxypqrqx.html")
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
ph = r"<h1>Page not found"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_no_dir(self):
# slash where there should not be one
response = self.client.get("/handbook/_test_zyxxypqrqx/")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"<h1>Directory not found"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_troggle_default(self):
# default page after logon
response = self.client.get("/troggle")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"expeditions the club has undertaken"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_troggle_default_slash(self):
response = self.client.get("/troggle/")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"<h1>Directory not found"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_expoweb_via_areaid(self):
# the dispatcher takes a detour via the cave renering procedure for this
response = self.client.get("/guidebook/t/via201.jpg")
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(len(response.content), 6057)
def test_cave_kataster_not_found(self):
# database not loaded, so no caves found; so looks for a generic expopage and fails
# NEEDS TO BE REDONE AFETR cave id rewriting removed after the data is fixed in all teh cave description
# pages that link to photos
response = self.client.get("/1234/115.htm")
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
content = response.content.decode()
ph = r"Page not found '1234/115.htm'"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_caves_page(self):
# Throws up lots of cave error msgs because it is looking at something which is not loaded for the tests
# but the test itself does not fail
response = self.client.get("/caves")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"Cave Number Index - kept updated"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_caves_page_kataster_not_found(self):
response = self.client.get("/caves")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"115"
phmatch = re.search(ph, content)
self.assertIsNone(phmatch, "Unexpectedly found unexpected text: '" + ph + "'")
def test_page_ss(self):
response = self.client.get("/survey_scans")
self.assertEqual(response.status_code, HTTPStatus.OK)
ph = r"All Survey scans folders "
content = response.content.decode()
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_admin(self):
# see the login page
response = self.client.get("/admin/login/")
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK)
ph = r'<h1 id="site-name">Troggle database administration</h1>'
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_admindocs_exped(self):
# Get redirected to login page
response = self.client.get("/admin/doc/models/core.expedition/")
response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.FOUND) # 302
def test_page_expofiles_root_dir(self):
# Root expofiles - odd interaction with url parsing so needs testing
response = self.client.get("/expofiles")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
for ph in [
r'a href="/expofiles/geotiffsurveys">/geotiffsurveys/',
r'<a href="/expofiles/photos">/photos/',
r'<a href="/expofiles/surveyscans">/surveyscans/',
]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_expofiles_root_slash_dir(self):
# Root expofiles - odd interaction with url parsing so needs testing
response = self.client.get("/expofiles/")
if response.status_code != HTTPStatus.OK: # 200
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND: # 302
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
for ph in [
r'a href="/expofiles/geotiffsurveys">/geotiffsurveys/',
r'<a href="/expofiles/photos">/photos/',
r'<a href="/expofiles/surveyscans">/surveyscans/',
]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_expofiles_badness(self):
# should display expofiles directory contents not its parent
response = self.client.get("/expofiles/99badness99")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
for ph in [
r'a href="/expofiles/geotiffsurveys">/geotiffsurveys/',
r'<a href="/expofiles/photos">/photos/',
r'<a href="/expofiles/surveyscans">/surveyscans/',
]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_expofiles_docs_dir(self):
# Flat file tests.
response = self.client.get("/expofiles/documents/")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
for ph in [
r'a href="/expofiles/documents/bier-tent-instructions.pdf">bier-tent-instructions.pdf',
r'a href="/expofiles/documents/boc.pdf">boc.pdf',
r'a href="/expofiles/documents/idiots-guide-expo-git.pdf"',
]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_survey_scans_dir(self):
# Flat file tests.
response = self.client.get("/expofiles/surveyscans")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
for ph in [
r'<a href="/expofiles/surveyscans/2004">/2004/',
r'<a href="/expofiles/surveyscans/1989LUSS">/1989LUSS/',
r'<a href="/expofiles/surveyscans/2018">/2018',
]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_folk(self):
# This page is separately generated, so it has the full data content
response = self.client.get("/folk/index.htm")
content = response.content.decode()
self.assertEqual(response.status_code, HTTPStatus.OK)
for ph in [
r"involves some active contribution",
r"Naomi Griffiths",
r"Gail Smith",
r"Phil Wigglesworth",
r"A more obscure record of longest gap between expos has",
]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_expofile_documents(self):
# this gets a real page as it is looking at the filesystem
response = self.client.get("/expofiles/documents/ropes")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"notice_generale_cordes_courant"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_expofile_documents_slash(self):
# this gets a real page as it is looking at the filesystem
response = self.client.get("/expofiles/documents/ropes/")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"notice_generale_cordes_courant"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_expofile_document_loeffler_pdf(self):
# Flat file tests.
response = self.client.get("/expofiles/documents/surveying/tunnel-loefflerCP35-only.pdf")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(len(response.content), 2299270)
def test_page_expofile_document_rope_pdf(self):
# Flat file tests.
response = self.client.get("/expofiles/documents/ropes/rope-age-agm-2019.pdf")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(len(response.content), 76197)
def test_page_expofile_document_png(self):
# Flat file tests.
response = self.client.get("/expofiles/documents/callout-2012.png")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(len(response.content), 69921)
def test_page_expofile_writeup(self):
# Flat file tests.
response = self.client.get("/expofiles/writeups/1982/logbook1982.pdf")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(len(response.content), 12915413)
def test_page_site_media_ok(self):
# Flat file tests.
response = self.client.get("/site_media/surveyHover.gif")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(len(response.content), 39482) # need to check it is not just an error page
def test_page_site_media_css(self):
# Flat file tests.
response = self.client.get("/site_media/css/trog3.css")
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode() # need to check it is not just an error page
ph = r"This text is used by the test system to determine that trog3.css loaded correctly"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_photos_ok(self):
# Flat file tests.
response = self.client.get("/photos/2018/PhilipSargent/corin.jpg") # exists
if response.status_code != HTTPStatus.OK:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
if response.status_code != HTTPStatus.FOUND:
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertEqual(len(response.content), 67487) # need to check it is not just an error page
def test_page_photos_not_ok(self):
# Flat file tests.
response = self.client.get("/photos/2018/PhilipSargent/_corin.jpeg") # does not exist
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
content = response.content.decode()
ph = r"<title>Page not found '2018/PhilipSargent/_corin.jpeg'</title>"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_photos_dir(self):
# Flat file tests.
response = self.client.get("/photos/2018/PhilipSargent/")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"Directory not displayed"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_survey_scans_empty(self):
# this gets an empty page as the database has not been loaded
response = self.client.get("/survey_scans")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"contains the scanned original in-cave survey notes and sketches"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_dwgdataraw_empty(self):
# this gets an empty page as the database has not been loaded
response = self.client.get("/dwgdataraw/")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"<h1>Directory not found"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_dwgallfiles_empty(self):
# this gets an empty page as the database has not been loaded
response = self.client.get("/dwgfiles")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
# with open('_test_response_dwgallfiles.html', 'w') as f:
# f.write(content)
for ph in [
r"All Tunnel and Therion files",
r"<th>Wallets</th><th>Scan files when the drawing was created</th><th>Frames</th></tr>",
]:
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_page_slash_empty(self):
# tslash where there should not be one
response = self.client.get("/expedition/1979/")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"<h1>Directory not found"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_not_found_survexfile_cave(self):
response = self.client.get("/survexfile/not_a_real_cave_number")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"Cave Identifier not found in database"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_dataissues(self):
# Needs another test with test data
response = self.client.get("/dataissues")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"as well as these import/parsing issues"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_therionissues(self):
# Needs another test with test data
response = self.client.get("/therionissues")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"! Un-parsed image filename"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_surveximport(self):
# Needs another test with test data
response = self.client.get("/surveximport")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
# with open('_test_response.html', 'w') as f:
# f.write(content)
ph = r"The number at the left-hand margin is the depth"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_survexdebug(self):
# Needs another test with test data
response = self.client.get("/survexdebug")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = r"Running list of warnings during import"
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
def test_stations(self):
# Needs another test with test data
response = self.client.get("/stations")
self.assertEqual(response.status_code, HTTPStatus.OK)
content = response.content.decode()
ph = "<tr><th>Survex Station</th><th>x "
phmatch = re.search(ph, content)
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
# ADD TESTS when we are redirecting /expofiles/ to get the actual files using e.g.
# import requests
# page = requests.get("http://dataquestio.github.io/web-scraping-pages/simple.html")
# these need a fixture to load the datbase before they will pass
# we also need tests for invalid queries to check that error pages are right
# def test_page_survey_scans_khplan2_png(self):
# # this has an error as the database has not been loaded yet in the tests
# response = self.client.get('/survey_scans/smkhs/khplan2.png')
# if response.status_code != HTTPStatus.OK:
# self.assertEqual(response.status_code, HTTPStatus.FOUND)
# if response.status_code != HTTPStatus.FOUND:
# self.assertEqual(response.status_code, HTTPStatus.OK)
# self.assertEqual(len(response.content), 823304) # fails, but is working manually!
# def test_page_dwgdataraw_107sketch_xml(self):
# # this has an error as the database has not been loaded yet in the tests
# response = self.client.get('/dwgdataraw/107/107sketch-v2.xml')
# if response.status_code != HTTPStatus.OK:
# self.assertEqual(response.status_code, HTTPStatus.FOUND)
# if response.status_code != HTTPStatus.FOUND:
# self.assertEqual(response.status_code, HTTPStatus.OK)
# content = response.content.decode()
# for ph in [ r'tunneldate="2014-08-21 11:34:00"',
# r'<sketchsubset subname="Caves of the Loser Plateau"/>',
# r'sfsketch="ollyjen107drawings',
# r'sfsketch="surveyscans/2014/2014#01',
# r'aa-js-plan.png"' ]:
# phmatch = re.search(ph, content)
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph +"'")
# database not loaded yet:
# response = self.client.get('/survey_scans/1991surveybook/page0002.png')
# response = self.client.get('/survey_scans/1991surveybook/')
# content = response.content.decode()
# print(content)
# png93 = re.search(r'/page0093.png">page0093.png</a></td>', content)

View File

@@ -1,48 +0,0 @@
# Copilot: Sure, based on your `urls.py` file, here's how you might write end-to-end tests for the `survey_scans` endpoints:
from django.test import Client, TestCase
from django.urls import reverse
# Copilot does not know that there is no data in the database, so that the tests need to pre-populate with
# objects before 3 of these tests will work. These 3 are now commented out, pending proper integration.
# Philip, 2023-12-31
class TestSurveyScansView(TestCase):
def setUp(self):
self.client = Client()
def test_allscans_view(self):
response = self.client.get(reverse('allscans'))
self.assertEqual(response.status_code, 200)
# Add more assertions here based on the expected HTML content
# def test_cavewallets_view(self):
# response = self.client.get(reverse('cavewallets'))
# self.assertEqual(response.status_code, 200)
# # Add more assertions here based on the expected HTML content
# def test_scansingle_view(self):
# # You'll need to replace 'scan_id' with an actual scan ID from your database
# response = self.client.get(reverse('scansingle', kwargs={'scan_id': 1}))
# self.assertEqual(response.status_code, 200)
# # Add more assertions here based on the expected HTML content
# def test_walletslistperson_view(self):
# # You'll need to replace 'person_id' with an actual person ID from your database
# response = self.client.get(reverse('walletslistperson', kwargs={'person_id': 1}))
# self.assertEqual(response.status_code, 200)
# # Add more assertions here based on the expected HTML content
def test_walletslistyear_view(self):
# You'll need to replace 'year' with an actual year from your database
response = self.client.get(reverse('walletslistyear', kwargs={'year': 2020}))
self.assertEqual(response.status_code, 200)
# Add more assertions here based on the expected HTML content
"""
In this example, replace `'allscans'`, `'cavewallets'`, `'scansingle'`, `'walletslistperson'`, and `'walletslistyear'` with the actual names of your endpoints in your `urls.py` file. The `reverse` function is used to avoid hardcoding the URL. Instead, it generates the URL by looking up the URL pattern by name.
For each of your views, you'll want to create a new test method (like `test_allscans_view` above) where you use the client to make a request to your view and then assert that the response has the expected status code and content.
If you can provide more details about what each view is supposed to do, I can help you write more specific tests. For example, if your view is supposed to render a form, you might want to check that the form is present in the response content. If your view is supposed to handle a form submission, you might want to test that submitting the form results in the expected changes in the database. If your view requires a logged-in user, you might want to test both the logged-in and logged-out cases. And so on. Happy testing! 🚀
"""

View File

@@ -1,148 +1,158 @@
from troggle.core.models import *
from django.contrib import admin
from django.core import serializers
from django.forms import ModelForm
import django.forms as forms
from django.http import HttpResponse
from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance
from troggle.core.models.logbooks import QM, LogbookEntry, PersonLogEntry
from troggle.core.models.survex import (
DrawingFile,
SingleScan,
SurvexBlock,
SurvexFile,
SurvexPersonRole,
SurvexStation,
)
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
from troggle.core.models.wallets import Wallet
"""This code significantly adds to the capabilities of the Django Management control panel for Troggle data.
In particular, it enables JSON export of any data with 'export_as_json'
and configures the search fields to be used within the control panel.
What is the search path for the css and js inclusions in the Media subclasses though ?!
The page looks for /static/jquery/jquery.min.js
"""
from django.core import serializers
from troggle.core.views_other import downloadLogbook
#from troggle.reversion.admin import VersionAdmin #django-reversion version control
class TroggleModelAdmin(admin.ModelAdmin):
def save_model(self, request, obj, form, change):
"""overriding admin save to fill the new_since parsing_field
new_since_parsing is not currently used in troggle. It is a fossil."""
# obj.new_since_parsing = True
"""overriding admin save to fill the new_since parsing_field"""
obj.new_since_parsing=True
obj.save()
class Media:
js = ("jquery/jquery.min.js", "js/QM_helper.js") # not currently available to troggle, see media/js/README
js = ('jquery/jquery.min.js','js/QM_helper.js')
# class RoleInline(admin.TabularInline):
# model = SurvexPersonRole
# extra = 4
class RoleInline(admin.TabularInline):
model = SurvexPersonRole
extra = 4
class PersonLogEntryInline(admin.TabularInline):
model = PersonLogEntry
raw_id_fields = ("personexpedition",)
class SurvexBlockAdmin(TroggleModelAdmin):
inlines = (RoleInline,)
class ScannedImageInline(admin.TabularInline):
model = ScannedImage
extra = 4
class OtherCaveInline(admin.TabularInline):
model = OtherCaveName
extra = 1
class SurveyAdmin(TroggleModelAdmin):
inlines = (ScannedImageInline,)
search_fields = ('expedition__year','wallet_number')
class QMsFoundInline(admin.TabularInline):
model=QM
fk_name='found_by'
fields=('number','grade','location_description','comment')#need to add foreignkey to cave part
extra=1
class PhotoInline(admin.TabularInline):
model = DPhoto
exclude = ['is_mugshot' ]
extra = 1
class PersonTripInline(admin.TabularInline):
model = PersonTrip
raw_id_fields = ('personexpedition',)
extra = 1
#class LogbookEntryAdmin(VersionAdmin):
class LogbookEntryAdmin(TroggleModelAdmin):
prepopulated_fields = {"slug": ("title",)}
search_fields = ("title", "expedition__year")
date_heirarchy = "date"
#inlines = (PersonLogEntryInline, QMsFoundInline)
prepopulated_fields = {'slug':("title",)}
search_fields = ('title','expedition__year')
date_heirarchy = ('date')
inlines = (PersonTripInline, PhotoInline, QMsFoundInline)
class Media:
css = {"all": ("css/troggleadmin.css",)} # this does not exist
actions = ("export_logbook_entries_as_html", "export_logbook_entries_as_txt")
def export_logbook_entries_as_html(self, modeladmin, request, queryset):
response = downloadLogbook(request=request, queryset=queryset, extension="html") # fails, no queryset
css = {
"all": ("css/troggleadmin.css",)
}
actions=('export_logbook_entries_as_html','export_logbook_entries_as_txt')
def export_logbook_entries_as_html(modeladmin, request, queryset):
response=downloadLogbook(request=request, queryset=queryset, extension='html')
return response
def export_logbook_entries_as_txt(modeladmin, request, queryset):
response=downloadLogbook(request=request, queryset=queryset, extension='txt')
return response
def export_logbook_entries_as_txt(self, modeladmin, request, queryset):
response = downloadLogbook(request=request, queryset=queryset, extension="txt") # fails, no queryset
return response
class PersonExpeditionInline(admin.TabularInline):
model = PersonExpedition
extra = 1
class PersonAdmin(TroggleModelAdmin):
search_fields = ("first_name", "last_name", "slug")
list_display = ["slug", "first_name", "last_name" ]
search_fields = ('first_name','last_name')
inlines = (PersonExpeditionInline,)
class QMAdmin(TroggleModelAdmin):
search_fields = ("number", "cave", "expoyear")
list_display = ("__str__", "grade")
list_display_links = ("__str__",)
list_filter = ('grade', 'cave', 'expoyear')
search_fields = ('found_by__cave__kataster_number','number','found_by__date')
list_display = ('__unicode__','grade','found_by','ticked_off_by')
list_display_links = ('__unicode__',)
list_editable = ('found_by','ticked_off_by','grade')
list_per_page = 20
raw_id_fields=('found_by','ticked_off_by')
class PersonExpeditionAdmin(TroggleModelAdmin):
search_fields = ("person__first_name", "person__slug", "expedition__year")
search_fields = ('person__first_name','expedition__year')
class CaveAdmin(TroggleModelAdmin):
search_fields = ("areacode", "official_name", "kataster_number", "unofficial_number")
list_display = ["areacode", "official_name"]
search_fields = ('official_name','kataster_number','unofficial_number')
inlines = (OtherCaveInline,)
extra = 4
class EntranceAdmin(TroggleModelAdmin):
search_fields = ("caveandentrance__cave__kataster_number",)
class SurvexStationAdmin(TroggleModelAdmin):
search_fields = ("name",)
class SurvexFileAdmin(TroggleModelAdmin):
search_fields = ("path",)
# class SurvexBlockAdmin(TroggleModelAdmin):
# inlines = (RoleInline,)
class SurvexBlockAdmin(TroggleModelAdmin):
search_fields = ("name", "title")
list_display = ["survexfile", "name", "title", "scanswallet", "ref_text"]
class DrawingFileAdmin(TroggleModelAdmin):
search_fields = ("dwgname",)
class WalletAdmin(TroggleModelAdmin):
search_fields = ("fpath", "walletname", "walletyear")
search_fields = ('caveandentrance__cave__kataster_number',)
admin.site.register(DPhoto)
admin.site.register(Cave, CaveAdmin)
admin.site.register(Area)
#admin.site.register(OtherCaveName)
admin.site.register(CaveAndEntrance)
admin.site.register(NewSubCave)
admin.site.register(CaveDescription)
admin.site.register(Entrance, EntranceAdmin)
admin.site.register(DrawingFile, DrawingFileAdmin)
admin.site.register(Expedition)
admin.site.register(Person, PersonAdmin)
admin.site.register(SurvexPersonRole)
admin.site.register(SurvexFile, SurvexFileAdmin)
admin.site.register(SurvexBlock, SurvexBlockAdmin)
admin.site.register(SurvexStation, SurvexStationAdmin)
admin.site.register(PersonExpedition, PersonExpeditionAdmin)
admin.site.register(Expedition)
admin.site.register(Person,PersonAdmin)
admin.site.register(SurvexPersonRole)
admin.site.register(PersonExpedition,PersonExpeditionAdmin)
admin.site.register(LogbookEntry, LogbookEntryAdmin)
#admin.site.register(PersonTrip)
admin.site.register(QM, QMAdmin)
admin.site.register(Wallet, WalletAdmin)
admin.site.register(SingleScan)
admin.site.register(DataIssue)
admin.site.register(Survey, SurveyAdmin)
admin.site.register(ScannedImage)
admin.site.register(SurvexStation)
admin.site.register(SurvexScansFolder)
admin.site.register(SurvexScanSingle)
def export_as_json(modeladmin, request, queryset):
response = HttpResponse(content_type="text/json")
response["Content-Disposition"] = "attachment; filename=troggle_output.json"
response = HttpResponse(mimetype="text/json")
response['Content-Disposition'] = 'attachment; filename=troggle_output.json'
serializers.serialize("json", queryset, stream=response)
return response
def export_as_xml(modeladmin, request, queryset):
response = HttpResponse(content_type="text/xml")
response["Content-Disposition"] = "attachment; filename=troggle_output.xml"
response = HttpResponse(mimetype="text/xml")
response['Content-Disposition'] = 'attachment; filename=troggle_output.xml'
serializers.serialize("xml", queryset, stream=response)
return response
admin.site.add_action(export_as_xml)
admin.site.add_action(export_as_json)
#admin.site.add_action(export_as_xml)
#admin.site.add_action(export_as_json)

View File

@@ -1,22 +1,5 @@
from django.conf import settings
# from troggle.core.models.troggle import Expedition
"""This is the only troggle-specific 'context processor' that troggle uses
in the processing of Django templates
This seems to mean that every page produced has bundled in its context the complete 'settings' and
the expedition class object, so all templates can do queries on Expedition.
https://betterprogramming.pub/django-quick-tips-context-processors-da74f887f1fc
If it is commented out, the logbookentry page goes crazy and the screws up all the site_media resultions for CSS file s!
Seems to be necessary to make {{settings.MEDIA_URL}} work. Which is obvious in retrospect.
It is VITAL that no database operations are done in any context processor, see
https://adamj.eu/tech/2023/03/23/django-context-processors-database-queries/
"""
from troggle.core.models import Expedition
def troggle_context(request):
return {"settings": settings}
# return {"settings": settings, "Expedition": Expedition}
return { 'settings':settings, 'Expedition':Expedition }

Some files were not shown because too many files have changed in this diff Show More