forked from expo/troggle
Compare commits
1915 Commits
django-upg
...
front-end-
| Author | SHA1 | Date | |
|---|---|---|---|
| a0f85454f8 | |||
| c76c09fced | |||
| efa40b47ca | |||
| 6bca75b877 | |||
| 585eb534a9 | |||
| 7fc058b1da | |||
| 8d9b320d89 | |||
| 89c1c65340 | |||
| 5f07f234ef | |||
| af6081e406 | |||
| 1165b10fe4 | |||
| 4a7c14f8dc | |||
| 955fe9661a | |||
| d93133c338 | |||
|
|
a86f251423 | ||
| 6482aa697f | |||
| 0706d5dc77 | |||
| cdac10fdcf | |||
| 1f656b2101 | |||
| fab7adf079 | |||
| 72a6b091e6 | |||
| af552a3d62 | |||
| 5ce21564fc | |||
| 7d4ca5dae2 | |||
| 3c78ab79ca | |||
| 748cb91a20 | |||
|
|
8463f8947e | ||
|
|
380565c6f5 | ||
| 31c815eeb0 | |||
| c31615b1ff | |||
| 3bd7684d4b | |||
| 64fa602a07 | |||
| 9f2b77bf1d | |||
| 9473b22bd9 | |||
| 3ea014ec57 | |||
| 3ffb07371d | |||
| f76c22e843 | |||
| 33d279b95a | |||
| f8faf6c2ba | |||
| 6e452b2ee9 | |||
| dc06744444 | |||
| 26e96839e4 | |||
| 4e4824546a | |||
| 387bdf4f91 | |||
| b650095588 | |||
| 761a71930b | |||
| 127002d736 | |||
| a062e9ea44 | |||
|
|
380fe8cc32 | ||
|
|
bdb5e3208b | ||
|
|
c119c99308 | ||
|
|
2af4f4b10e | ||
|
|
a5968121a0 | ||
| 5eb6ef4d31 | |||
|
|
5f6359694d | ||
|
|
90a6eaa26d | ||
|
|
67361fa66c | ||
|
|
b27852c1f3 | ||
|
|
8ff438942d | ||
|
|
b3e2f34960 | ||
|
|
d0ccc46260 | ||
|
|
5a7f197bcb | ||
|
|
389fb6c409 | ||
|
|
4d48dd4386 | ||
|
|
086537cb56 | ||
|
|
beab42323b | ||
|
|
3d43c0ec12 | ||
| b1c5b03104 | |||
|
|
68724a0504 | ||
|
|
3359889d97 | ||
| 2f24a7f7bb | |||
|
|
66ee96cd63 | ||
|
|
ad37a82713 | ||
|
|
ffed6e3ba6 | ||
|
|
7b8e93cdb5 | ||
|
|
4158f5ba63 | ||
|
|
a70cf6cad3 | ||
|
|
63599cb27a | ||
|
|
a0fcb78e95 | ||
|
|
745ccd7f88 | ||
|
|
2b30b7b624 | ||
|
|
410a285085 | ||
|
|
d2bcef6e36 | ||
|
|
067fcd9892 | ||
|
|
c4095eb336 | ||
|
|
020a083afa | ||
|
|
fd9f21de2e | ||
|
|
7268eb4f30 | ||
|
|
910e0e3123 | ||
|
|
7db17154ad | ||
|
|
ebcc0db665 | ||
|
|
73675ca1b9 | ||
|
|
973d05f9fb | ||
|
|
52299fb6fd | ||
|
|
01964e7cf6 | ||
|
|
929f6732d0 | ||
|
|
b91223da66 | ||
| a04d9ef056 | |||
|
|
e4fe5eaf5d | ||
|
|
c7494fe379 | ||
|
|
e4112431be | ||
|
|
65eec8e91d | ||
|
|
0b0f2f07e1 | ||
|
|
ee34f87563 | ||
|
|
e42f0569fd | ||
|
|
d242a8bb1d | ||
|
|
0cd32d6a15 | ||
|
|
fc9977952e | ||
|
|
56e9273047 | ||
|
|
12cee59605 | ||
|
|
90862e9a89 | ||
|
|
ec3ebe8499 | ||
|
|
1ef636ca6f | ||
|
|
174d7bfe13 | ||
|
|
4e34ae0530 | ||
|
|
f477507d27 | ||
| 969ed6cce5 | |||
| 2e6b8d44f1 | |||
| 93201ab458 | |||
|
|
43724f1cf6 | ||
|
|
7d140af87a | ||
|
|
77ed1b444c | ||
|
|
f7fca58c57 | ||
|
|
ea7c29a54e | ||
|
|
0052385333 | ||
| 7f0e7d8fa3 | |||
|
|
ef467ad481 | ||
|
|
175307773b | ||
|
|
8e982bd6be | ||
|
|
783154d24a | ||
|
|
40126f6e2f | ||
|
|
1ccd9c418b | ||
|
|
896af43994 | ||
|
|
2ee63a9804 | ||
|
|
c7b3e8aff0 | ||
|
|
c50236575f | ||
|
|
e8d1265ee4 | ||
|
|
10ff8a5aab | ||
|
|
941100a8a3 | ||
|
|
374caa0d9a | ||
|
|
82aaa2b523 | ||
|
|
31a60ce85c | ||
|
|
0a3a6934c4 | ||
|
|
ed9f7b03bf | ||
|
|
fc8584775e | ||
|
|
b7ec4f2efe | ||
|
|
0b566575f3 | ||
|
|
73af227fb3 | ||
| 8aec40f951 | |||
| c5a9bdc724 | |||
| 30ef427b90 | |||
| 275adc8efa | |||
| 02e46ed723 | |||
| 94b8b357fb | |||
| 2ed66fe3d0 | |||
| 116cfc7c6e | |||
| f15555a3bd | |||
| dcfff1ede7 | |||
| 0f76422d85 | |||
| d3d983eedb | |||
| 2541766dd5 | |||
| e002a634ff | |||
| 489dd157b6 | |||
| 0a76acd664 | |||
| 4d8cb82ef9 | |||
| 9ffe3f690b | |||
| e7d9e9402a | |||
| 3ef5c1aa0b | |||
| 9a28e93ac6 | |||
| 5738da8566 | |||
| bbc13c4eb9 | |||
| 6ca5d5bfa8 | |||
| 2091eb8e8b | |||
| 860ce6b065 | |||
| 8b5887a87a | |||
| 770edd6391 | |||
| 562ef48f19 | |||
| 70d6d9eb77 | |||
| 2332bfdc57 | |||
| 1db7c5ffea | |||
| 838b358a4a | |||
| dd32114698 | |||
| 5f46d8fdc5 | |||
| ad48851118 | |||
| ead3f8dc8b | |||
| 6b3cb9f02e | |||
| 435f3cf00b | |||
| 7caf1690c6 | |||
| 4ae79a642e | |||
| 9489b9209d | |||
| 5a90e7b727 | |||
| d64948749e | |||
| 7345e3a328 | |||
| de54576d11 | |||
| 12c7b9b5d1 | |||
| c40b56844f | |||
| 09f5741b71 | |||
| 17fa306b09 | |||
| 3011e7b114 | |||
| 98066591da | |||
| 56c78611fb | |||
| a7660c8ec7 | |||
| 934a19b879 | |||
| c247636c4c | |||
| 85fab88ac9 | |||
| b428a87f1a | |||
| d0a05af9c6 | |||
| 83dc9e6c09 | |||
| 7aeffff00c | |||
| 94dd0fe1fd | |||
| 45fcaac47d | |||
| 35e9eb558d | |||
| b88b142332 | |||
| 870b290726 | |||
| 7b10aa0bdf | |||
| 4d66548920 | |||
| 7c923842ca | |||
| 02970512c1 | |||
| 2c36666d41 | |||
| b3d9eeecd2 | |||
| 301fa1fce1 | |||
| e71939fe00 | |||
| 859a28b4ee | |||
| 432759bfc1 | |||
| 94e145adce | |||
| ccfc44a423 | |||
| 8f3b329552 | |||
| 63dde36389 | |||
| 8fc25de794 | |||
| d5887e8f99 | |||
| b086348d38 | |||
| 4a2106183a | |||
| 06ac840dd0 | |||
| 6ab7a340e2 | |||
| 1cb81cbb09 | |||
| e8c824a396 | |||
| dc03016dbe | |||
| 5067ef2c8c | |||
| 5c3927c25d | |||
| 154722f765 | |||
| 6387de038b | |||
| 6de4fa66a2 | |||
| 394d94d5d6 | |||
| a3fc9a17ed | |||
| 3d38611e4f | |||
| d1dac92034 | |||
| bc9306fc1b | |||
| 4358e6440a | |||
| 6b5f048b4b | |||
| 709b720be9 | |||
| 19d9942676 | |||
| 0740f9ea5a | |||
| 28f87e82e9 | |||
| 175e71facf | |||
| c1231ff451 | |||
| e70d51e52b | |||
| 7f5bd2c17e | |||
| f7d91b5929 | |||
| e94a24bbd4 | |||
| e0a198bac5 | |||
| 25e00e02b7 | |||
| 13f0c6f988 | |||
| 9abfcdd091 | |||
| 18541de371 | |||
| 578f02db2d | |||
| 3b9695b4f3 | |||
| b55bfb8868 | |||
| 3a348d5d1a | |||
| 5ed91e1c15 | |||
| 8aa5a601e7 | |||
| 9d1c0ac395 | |||
| c7d88077ec | |||
| 5798e8dcd5 | |||
| 5ae37eef82 | |||
| 587ccff501 | |||
| 8e51f3aff8 | |||
| e38d29f33d | |||
| 3c7661836c | |||
| 1f5b56a593 | |||
| 57930b7aa5 | |||
| 76ed801077 | |||
| 241dde3a84 | |||
| b98d7d246c | |||
| 3aca0d0c76 | |||
| e35fccea5d | |||
| 7808005498 | |||
| d06dd3d166 | |||
| a12f666e96 | |||
| ebe86d73d4 | |||
| b29ff61871 | |||
| 58f7cf72d4 | |||
| 3742e0f367 | |||
| 7d98980121 | |||
| 89d0e1723e | |||
| 226cbb6b5c | |||
| a1c34f488d | |||
| 79bccade02 | |||
| 5639435058 | |||
| 432be660a4 | |||
| f73640522b | |||
| d9d4181dda | |||
| 071f68080c | |||
| 03fa5f5548 | |||
| 0d9d307490 | |||
| e6fd1f0ec5 | |||
| 2704fc42d4 | |||
| d9a4069662 | |||
| 9e71be8169 | |||
| db0504057b | |||
| e4c804b305 | |||
| e01bd39609 | |||
| 6565b3f9c4 | |||
| 2fee216e80 | |||
| 75834902f2 | |||
| 719e0fe918 | |||
| a321625f35 | |||
| 0c4ce6dc3c | |||
| 733765802e | |||
| 1be3a3892c | |||
| 89b0c0862e | |||
| ba2ae6cd82 | |||
| 0f8fe0e290 | |||
| be9fcc522a | |||
| 939d3970aa | |||
| e5a9330a91 | |||
| 1b70ccea3e | |||
| 822965ebe5 | |||
| 7738b2836e | |||
| 1ab7528f7b | |||
| de74cd4867 | |||
| 9dc1853e10 | |||
| 78740a1fc0 | |||
| b131e567b5 | |||
| 5bbb363f12 | |||
| 0e29cdd48c | |||
| 8374500da5 | |||
| 194470841e | |||
| a71c616afd | |||
| 9c39c9dcff | |||
| a72c2bd96a | |||
| f23764c486 | |||
| a7a126dd55 | |||
| d06af5b0ec | |||
| ec040824f6 | |||
| 517da57a0c | |||
| 5ee3ebad3e | |||
| 05df2e084c | |||
| dc3a61addd | |||
| 9169abdb05 | |||
| 81e95291e8 | |||
| f24f283a07 | |||
| bb14c94ab1 | |||
| 7e9bb73777 | |||
| 43a98b4421 | |||
| f1d5df9933 | |||
| 8ce86aabee | |||
| d1b94763b4 | |||
| 73b710d53f | |||
| 0a4471e039 | |||
| f80e4efed8 | |||
| 5e9fd7fd77 | |||
| 5cc6c26606 | |||
| cb50528e2d | |||
| 6dd8e5a75c | |||
| 0e47909704 | |||
| cabcada0b8 | |||
| 17b2b7b89c | |||
| 1eab261b30 | |||
|
|
b06d1dae42 | ||
|
|
45a640dfe9 | ||
|
|
ca9a3b5c7b | ||
|
|
0b32d51ba3 | ||
|
|
1a9e17a7e8 | ||
|
|
995df16bec | ||
|
|
259f85742a | ||
|
|
a795707552 | ||
|
|
bcb61f9cd9 | ||
|
|
4260b0f092 | ||
|
|
4514eda311 | ||
|
|
725c5ad0cd | ||
|
|
0b89979418 | ||
|
|
96281c33e8 | ||
|
|
43bf2620f1 | ||
| 5b3f91d3e5 | |||
| 73e57a19df | |||
| 275ce87e30 | |||
|
|
d9ed90b729 | ||
|
|
d82c521f4f | ||
|
|
2cafa32c7e | ||
|
|
830150ade6 | ||
|
|
55ac98ebe1 | ||
|
|
454c2c2830 | ||
|
|
2fa298cae9 | ||
|
|
3b106a3803 | ||
|
|
da09bc7968 | ||
|
|
e0ac09d5ec | ||
|
|
45f06293f5 | ||
|
|
004a3e2db1 | ||
|
|
b81b4ef2ef | ||
|
|
52c0ab213a | ||
|
|
c3bfd50cf1 | ||
|
|
39683cc413 | ||
|
|
47e2c6e214 | ||
|
|
ff8eaa241e | ||
|
|
52a035e4cf | ||
|
|
8c8b6966a7 | ||
|
|
861980a8e9 | ||
|
|
3c31c333f2 | ||
|
|
235bd86af3 | ||
|
|
e6ca20b1ed | ||
|
|
b470ab66e2 | ||
|
|
e9790e70d6 | ||
|
|
55bc042798 | ||
|
|
4e9680a3ad | ||
|
|
bec262bb2d | ||
|
|
74b3147076 | ||
|
|
f51d1e114e | ||
|
|
c76cd38d76 | ||
|
|
b4c4f2aefc | ||
|
|
d16226c879 | ||
|
|
1f70b77735 | ||
|
|
4a34986598 | ||
|
|
70709c505c | ||
|
|
8d08a67302 | ||
|
|
29c5c82337 | ||
|
|
7e47fe1f30 | ||
|
|
9e5bdace2c | ||
|
|
a6e60c0bf7 | ||
|
|
c8163ab0cd | ||
|
|
4495be2083 | ||
|
|
fe28d9ba39 | ||
|
|
b60e1f2493 | ||
|
|
78a62a1551 | ||
|
|
f0195682f2 | ||
|
|
2f64e2d4c1 | ||
|
|
829e18baef | ||
|
|
760dbc588a | ||
|
|
f3ecdd6d87 | ||
|
|
6e3fdd35c1 | ||
|
|
c3672b476c | ||
|
|
9d56e467cd | ||
|
|
97b0ce8c96 | ||
|
|
aa20692ad6 | ||
|
|
af88cb4d0f | ||
|
|
b4cf2bac95 | ||
|
|
5c0835e076 | ||
|
|
e2b280ccdc | ||
|
|
1971f51b52 | ||
|
|
11b1d41a55 | ||
|
|
86ea33bbce | ||
|
|
71bd07e70e | ||
|
|
94b49adc4e | ||
|
|
36995ec051 | ||
|
|
61f9863a06 | ||
|
|
47878d264b | ||
|
|
0611c3f00f | ||
|
|
13b57d2bb6 | ||
|
|
2648bada30 | ||
|
|
d2c6c4d7fb | ||
|
|
b5f8c5294e | ||
|
|
9cd009f8ba | ||
|
|
68865a80ef | ||
|
|
ddfc677a1e | ||
|
|
0ab3a4ff44 | ||
|
|
f12d0bd580 | ||
|
|
e28f04a51c | ||
|
|
9410dda69e | ||
|
|
04696b7b80 | ||
|
|
a41cd8eb24 | ||
|
|
29dc99c21f | ||
|
|
dfc903208e | ||
|
|
beecb4b0ac | ||
|
|
182df351b9 | ||
|
|
fd57071411 | ||
|
|
785845598f | ||
|
|
6452a7beed | ||
|
|
5c667c1826 | ||
|
|
dc2b8ad431 | ||
|
|
3af1112847 | ||
|
|
0853bbdd19 | ||
|
|
6daa96b69e | ||
|
|
9aaadafc13 | ||
|
|
6c384492be | ||
|
|
ab184bccf3 | ||
|
|
79672dd4b3 | ||
|
|
760abe1a9e | ||
|
|
8f03e590cc | ||
|
|
57c4732566 | ||
|
|
17bbbd6eab | ||
|
|
7e9fd0f353 | ||
|
|
8ca50d8fd4 | ||
|
|
43b6b590e8 | ||
|
|
e98ffced98 | ||
|
|
d37bacb91a | ||
|
|
96b2c6c9ed | ||
|
|
b8cd8c4785 | ||
|
|
a30a2b9ef9 | ||
|
|
e195497829 | ||
|
|
03cda8a897 | ||
|
|
13e3da8d26 | ||
|
|
ee7e3b6d41 | ||
|
|
d05294adaf | ||
|
|
f1aa6a9794 | ||
|
|
41c68aef26 | ||
|
|
e94dc6eb6f | ||
|
|
aaba4fd2a9 | ||
|
|
1a49e5347f | ||
|
|
51f5261bfc | ||
|
|
b2d8b21822 | ||
|
|
b2a26be8c8 | ||
|
|
dcc36f3286 | ||
|
|
3c13f62bd1 | ||
|
|
284e044a03 | ||
|
|
b093d00ff4 | ||
|
|
8e93680146 | ||
|
|
8fa25c815a | ||
|
|
edfba8d355 | ||
|
|
1eadc931cb | ||
| f3002a694d | |||
|
|
5149cf1ece | ||
|
|
1bbfd1e517 | ||
|
|
e35616a611 | ||
|
|
a0a1927437 | ||
|
|
3607b9f140 | ||
|
|
25c425cff8 | ||
|
|
7f335e082c | ||
|
|
9220dbf2e6 | ||
|
|
f33c6cc057 | ||
|
|
186eb20fb3 | ||
|
|
ac22a984ee | ||
|
|
61c04a1fb9 | ||
|
|
0fd3cf43e8 | ||
|
|
c1aaf07885 | ||
|
|
f491264b9e | ||
|
|
eed35d01a8 | ||
|
|
df42b1ccb3 | ||
|
|
129ea3cc5b | ||
|
|
fa6758b9a0 | ||
|
|
5da1fce41f | ||
|
|
a2a5e9200e | ||
|
|
94252a94fe | ||
|
|
c1ba6a39a5 | ||
|
|
724234949f | ||
|
|
bc3da1182b | ||
|
|
7872e98cb2 | ||
|
|
c29e240c2b | ||
|
|
93622b111f | ||
|
|
9a461c31a8 | ||
|
|
fea69c0371 | ||
|
|
dd0fcc28dd | ||
|
|
3d7cb78e47 | ||
|
|
1468c49723 | ||
|
|
c39fb30707 | ||
|
|
91568b7151 | ||
|
|
7090bab632 | ||
|
|
039792e320 | ||
|
|
cddcb0e321 | ||
|
|
f9a7ba7927 | ||
|
|
7785843597 | ||
|
|
c5b9cd57f2 | ||
|
|
3577d8cb68 | ||
|
|
9c090f0383 | ||
|
|
476ee482fa | ||
|
|
25d5361da4 | ||
|
|
de7388bdc5 | ||
|
|
5007393536 | ||
|
|
a1e538e93a | ||
|
|
7288bd9da3 | ||
|
|
f194126fb5 | ||
|
|
3b1b96c4c8 | ||
|
|
31b912f3ca | ||
|
|
38eb65ac0e | ||
|
|
796dbf1438 | ||
|
|
f46942fadf | ||
|
|
3a52d790f0 | ||
|
|
ce7dfd6510 | ||
|
|
312ecdcfe1 | ||
|
|
c747664a26 | ||
|
|
931c33cfdb | ||
|
|
f895a7e44c | ||
|
|
5161fce32e | ||
|
|
8245ee103e | ||
|
|
3e869ae76a | ||
|
|
dd0a448f90 | ||
|
|
3ab8a5d1ad | ||
|
|
83bbda7c40 | ||
|
|
d058942084 | ||
|
|
0a158db97d | ||
|
|
de37eea167 | ||
|
|
a215ebd62c | ||
|
|
549c1649b4 | ||
|
|
2a7f1506c9 | ||
|
|
9a395eafef | ||
|
|
2e14be61a2 | ||
|
|
6883ff49a0 | ||
|
|
d9d75b3aee | ||
|
|
1395ac76e9 | ||
|
|
b79eb9a969 | ||
|
|
23462df49c | ||
|
|
3db9c16082 | ||
|
|
6ec7071ffc | ||
|
|
4efeefe6c9 | ||
|
|
5b7c105c5f | ||
|
|
dd00ff69aa | ||
|
|
ee9b808461 | ||
|
|
8484f26ee9 | ||
|
|
deec330990 | ||
|
|
145540caf5 | ||
|
|
b7035f1574 | ||
|
|
6efbec7750 | ||
|
|
037a50cf47 | ||
|
|
bb65ffaee6 | ||
|
|
b20e6c5a58 | ||
|
|
7c82c2d97c | ||
|
|
810e058c07 | ||
|
|
8aab01c126 | ||
|
|
73e9ae54fa | ||
|
|
d4c213e0b3 | ||
|
|
3748840e23 | ||
|
|
02cf9b1c22 | ||
|
|
ef27901125 | ||
|
|
a1560c60c6 | ||
|
|
b8355cbf8d | ||
|
|
86a18c3ebc | ||
|
|
5582d545a1 | ||
|
|
c416de6e1e | ||
|
|
91c9cf0c31 | ||
|
|
278a84a485 | ||
|
|
8d31ab763d | ||
|
|
a4b1c7b142 | ||
|
|
776f9f7833 | ||
|
|
9803ebe2e8 | ||
|
|
ca5586fc42 | ||
|
|
d3572e18c3 | ||
|
|
7dc3cc3b91 | ||
|
|
ee4237b14c | ||
|
|
848043f7f4 | ||
|
|
da4d7d6d5e | ||
|
|
0ea3ed1ef2 | ||
|
|
2bd617b543 | ||
|
|
96101252bd | ||
|
|
9d4a97fc19 | ||
|
|
c9a33a4010 | ||
|
|
87fd260051 | ||
|
|
5d7d2b82b2 | ||
|
|
779afc2f2a | ||
|
|
33eb91346c | ||
|
|
a11541eb58 | ||
|
|
dbe6d10fff | ||
|
|
2af88353f3 | ||
|
|
82fe350493 | ||
|
|
47d1662033 | ||
|
|
4e5d8d1d76 | ||
|
|
f1fcef2a6f | ||
|
|
8f0ea8ed82 | ||
|
|
5fbe0b31c2 | ||
|
|
24a016e76a | ||
|
|
5de88ce92d | ||
|
|
20583b04c0 | ||
|
|
2f1ba9cb54 | ||
|
|
451326789b | ||
|
|
859ae9d825 | ||
|
|
364a636fa0 | ||
|
|
293eb10ffd | ||
|
|
b3d9e81499 | ||
|
|
74a5125cf9 | ||
|
|
d607b30953 | ||
|
|
abdea22899 | ||
|
|
48f82aaaca | ||
|
|
5ac2e24cc2 | ||
|
|
d6db942626 | ||
|
|
7db7c67065 | ||
|
|
54f47c58db | ||
|
|
ceb6d2fef1 | ||
|
|
b38412b145 | ||
|
|
ef68db080a | ||
|
|
97a9f2aae6 | ||
|
|
2f42f488ab | ||
|
|
f1e800d8bf | ||
|
|
16c6aed65f | ||
|
|
77cf3455a6 | ||
|
|
4fa8d18621 | ||
|
|
6de9181390 | ||
|
|
bd8d450542 | ||
|
|
7b0d90182b | ||
|
|
b5f2d0641d | ||
|
|
4662d10c4e | ||
|
|
13a63b64d4 | ||
|
|
4d8125a2fd | ||
|
|
ea880915b0 | ||
|
|
836387057a | ||
|
|
f0a9c33795 | ||
|
|
843bfa8ba6 | ||
|
|
654f8e8c6c | ||
|
|
c6272e4103 | ||
|
|
3420422f29 | ||
|
|
a664e8ce8c | ||
|
|
491fba7d64 | ||
|
|
3d35cf713f | ||
|
|
b75a91ed70 | ||
|
|
02a3d6a359 | ||
|
|
8372d9d85c | ||
|
|
945373df67 | ||
|
|
26e952154e | ||
|
|
e33ce724c0 | ||
|
|
86b24c9dfe | ||
|
|
3465e9bd16 | ||
|
|
c9d7931ccb | ||
|
|
aad6b70736 | ||
|
|
d560b17ab6 | ||
|
|
18dbb847e3 | ||
|
|
84693b6524 | ||
|
|
5ac6bfd49a | ||
|
|
5666f1e9a7 | ||
|
|
b39a57786d | ||
|
|
4fe306d35f | ||
|
|
60b2e5e8c7 | ||
|
|
18a58c1042 | ||
|
|
d9cfbc19ed | ||
|
|
372c7e0804 | ||
|
|
5f34a78d94 | ||
|
|
f0cfed2ef6 | ||
|
|
59a45871fd | ||
|
|
328347f8af | ||
|
|
845e70465e | ||
|
|
6bcf70bb8b | ||
|
|
41dfe08d2a | ||
|
|
38d23fd76b | ||
|
|
bf6c6e56a6 | ||
|
|
b259e43de2 | ||
|
|
1556ccd7f6 | ||
|
|
a7baf4f3e6 | ||
|
|
f0634ff164 | ||
|
|
322d454d41 | ||
|
|
cafde67c02 | ||
|
|
362aedc2ac | ||
|
|
b3b10b0db7 | ||
|
|
3528587890 | ||
|
|
3bd308effa | ||
|
|
fbffbf0909 | ||
|
|
f05e885517 | ||
|
|
9ead6b00f9 | ||
|
|
00eb978f5f | ||
|
|
c9931fd45e | ||
|
|
3813b21dcf | ||
|
|
ccd386ff4e | ||
|
|
d29f3030a4 | ||
|
|
fe53b08f35 | ||
|
|
5a64d9d3d0 | ||
|
|
4b1012cbb4 | ||
|
|
c0c4fb72ca | ||
|
|
ed71fa48f1 | ||
|
|
18c2892967 | ||
|
|
71ed0815cc | ||
|
|
41b2bcee4f | ||
| 1f3f60a6a3 | |||
| a9ef96f84e | |||
| 3390a62020 | |||
| 9461eed380 | |||
| aeb210bd30 | |||
| d7246cbb98 | |||
| 785500241e | |||
|
|
dddb9b1f57 | ||
| 7f16bca7f7 | |||
|
|
efeb0efd1e | ||
|
|
406259a8a2 | ||
|
|
7fd9497d5c | ||
|
|
3617f9b6d9 | ||
|
|
1589188988 | ||
|
|
a514355e5e | ||
|
|
be410d4d9d | ||
|
|
13ffe1fcc6 | ||
|
|
9ccf5912d4 | ||
|
|
4c7deadb9a | ||
|
|
42b615d16b | ||
|
|
70efb10ece | ||
|
|
8fd4f818b5 | ||
|
|
f1b206ad34 | ||
|
|
48171ae824 | ||
|
|
28fb4d1e94 | ||
|
|
1d504e4066 | ||
|
|
6a18511dd0 | ||
|
|
6f32364675 | ||
|
|
3b997a32bf | ||
|
|
8b889ade5f | ||
|
|
af7fc8f243 | ||
|
|
e4ee4abce8 | ||
|
|
2544bc5f3d | ||
|
|
19d017a457 | ||
|
|
e34f162688 | ||
|
|
60fc66cdf5 | ||
|
|
d3ddcba313 | ||
|
|
251e3bf844 | ||
|
|
6bdd9be092 | ||
|
|
3390f51049 | ||
|
|
fac748d2e2 | ||
|
|
ad1283662d | ||
|
|
bb8a92fff1 | ||
|
|
b65639df05 | ||
|
|
f99ebf84e9 | ||
|
|
8e78dd4a2e | ||
|
|
822f8a1699 | ||
|
|
488ce46d73 | ||
|
|
f32df567f2 | ||
|
|
d6cc32ee9a | ||
|
|
3ac617431f | ||
|
|
7a58aac08e | ||
|
|
a3a65524b8 | ||
|
|
88f5df0f19 | ||
|
|
5fe436e76a | ||
|
|
d7fd6b00ae | ||
|
|
32377f4e6c | ||
|
|
1b9fccc2a4 | ||
|
|
dc4374cb9e | ||
|
|
7f41017ce3 | ||
|
|
02d58d440e | ||
|
|
73b26ec206 | ||
|
|
601fc2cffc | ||
|
|
af50d4912d | ||
|
|
8bd20f9600 | ||
|
|
6d435ee473 | ||
|
|
7f542b8936 | ||
|
|
2c13c1b5f3 | ||
|
|
29c929aba4 | ||
|
|
a87ef54492 | ||
|
|
32e6d5f891 | ||
|
|
ab8813e389 | ||
|
|
21ad6ecffb | ||
|
|
b359937eab | ||
|
|
c0545b8777 | ||
|
|
4470c5abbd | ||
|
|
c3a54858d5 | ||
|
|
0a3037f077 | ||
|
|
84e165b8fc | ||
|
|
5bad82b4f0 | ||
|
|
d1e6125d15 | ||
|
|
26454bf6c6 | ||
|
|
1da2be03e6 | ||
|
|
5aac280618 | ||
|
|
928c451040 | ||
| 9b44ba3ef2 | |||
|
|
02e475642a | ||
|
|
406b4590a9 | ||
|
|
b3aa99f008 | ||
|
|
bb97b7c862 | ||
|
|
1aef81dccc | ||
| 8a43cf7dfb | |||
| 2391b5a504 | |||
| e61bc7416e | |||
| 64f89be6a9 | |||
| b72706356d | |||
| 73ffb509f7 | |||
| 5723f57fc5 | |||
| c38aa357b7 | |||
| df43aae33d | |||
| b461b87df6 | |||
| 7575e8c16f | |||
| 2869f228d4 | |||
| 252fcc4716 | |||
| 36f92c5c9c | |||
| f13a98e53b | |||
| 3f6fb260a4 | |||
| bf5d0556fc | |||
| c774b14e04 | |||
| b6bbec235c | |||
| 5533029072 | |||
| 5807e4a873 | |||
| f9e8cf60bc | |||
| 9294c8c2f1 | |||
| 0d7cbbea37 | |||
| 0fb6f1e4ed | |||
| 9130160bd6 | |||
| 6410cc1090 | |||
| ac11c56ca0 | |||
| 515a639fd0 | |||
| 4503751907 | |||
|
|
3cc9fe4dd9 | ||
|
|
e42cb582c7 | ||
|
|
39cd616c90 | ||
|
|
8c5fdf5021 | ||
|
|
1ff723554c | ||
|
|
41ed15f47f | ||
|
|
d916d4125c | ||
|
|
4877a7ddc4 | ||
|
|
c6bcb5fde9 | ||
|
|
4cd7367a7e | ||
|
|
47d9d7d242 | ||
|
|
c3c222e045 | ||
|
|
d374779c47 | ||
|
|
44b6770b6a | ||
|
|
1638f97d0c | ||
|
|
b3fcd7765e | ||
|
|
dc3379c186 | ||
|
|
b4abd7b6bc | ||
|
|
6d341a3cfe | ||
|
|
1d9d96f467 | ||
|
|
56c3517328 | ||
|
|
90bb0759a0 | ||
|
|
9ae2e18fe6 | ||
|
|
8ad791c594 | ||
|
|
e6adced39d | ||
|
|
fd95bb8198 | ||
|
|
9b9f6720e0 | ||
|
|
254b465755 | ||
|
|
5a085ba7ba | ||
|
|
4782f3b184 | ||
|
|
51da26564f | ||
|
|
a9ffae9b87 | ||
|
|
3393db0fbc | ||
|
|
100209ea16 | ||
|
|
425b534c30 | ||
|
|
63640db81f | ||
|
|
03160f3863 | ||
|
|
7368942488 | ||
|
|
9a69ce50f9 | ||
|
|
b545f8ed40 | ||
|
|
be0148d146 | ||
|
|
8f1d6e2cc2 | ||
|
|
fde30685a8 | ||
|
|
bdf535fcbf | ||
|
|
03a5f5989e | ||
|
|
e5cf1b5289 | ||
|
|
62799d196b | ||
|
|
cb6619a90a | ||
|
|
b9fad1f4fb | ||
|
|
5e478c7eb0 | ||
|
|
821aaa1f66 | ||
|
|
942cbdd4b2 | ||
|
|
13f3057185 | ||
|
|
81d58f1275 | ||
|
|
e236e792ec | ||
|
|
9e7414e0e0 | ||
|
|
e6eeaf1674 | ||
|
|
49b9225b6e | ||
|
|
7f64670f36 | ||
|
|
7dd5840353 | ||
|
|
72df5d5213 | ||
|
|
d43ce1bdb2 | ||
|
|
bd647b99ec | ||
|
|
0997fd0901 | ||
|
|
dc840c9bc7 | ||
|
|
37403a7234 | ||
|
|
f0d291f527 | ||
|
|
b8803c8e5b | ||
|
|
9e11c0814e | ||
|
|
72fa8a5883 | ||
|
|
a656ada67a | ||
|
|
20c42b14bf | ||
|
|
4e59c8791f | ||
|
|
8128870d57 | ||
|
|
b979bdb560 | ||
|
|
b7659a477c | ||
|
|
343d6cf350 | ||
|
|
dbd186e299 | ||
|
|
1a4be0f02e | ||
|
|
8f89b022c7 | ||
|
|
74403d28e9 | ||
|
|
1968db62ad | ||
|
|
bcdb3572fa | ||
|
|
18938c9fca | ||
|
|
8f0e7435d6 | ||
|
|
bad5484d12 | ||
|
|
b4ba3c40eb | ||
|
|
3b0c6ef2ea | ||
|
|
7a6578e205 | ||
|
|
a2083c5310 | ||
|
|
879f6c288e | ||
|
|
e17a21defd | ||
|
|
eea74406c9 | ||
|
|
d4317b5fd3 | ||
|
|
25b8fc2e1d | ||
|
|
f8b613e0aa | ||
|
|
4ad7033285 | ||
|
|
eca0bcc6d8 | ||
|
|
fa1df39923 | ||
|
|
49277216ba | ||
|
|
16ef4fa9fb | ||
|
|
9695e49024 | ||
|
|
540ce7c076 | ||
|
|
27491c933a | ||
|
|
7124d978d3 | ||
|
|
0fee2bb165 | ||
|
|
3e50d0edca | ||
|
|
38a63641bc | ||
|
|
5c4a33873f | ||
|
|
c2c7de4c59 | ||
|
|
d598a6d0f5 | ||
|
|
db3addc819 | ||
|
|
d8b1d59b12 | ||
|
|
54d98f58f3 | ||
|
|
4a13232467 | ||
|
|
ba0f573618 | ||
|
|
2f03f77ce4 | ||
|
|
daf58e9e45 | ||
|
|
2467065ac3 | ||
|
|
0820d7c0dc | ||
|
|
267741fa8b | ||
|
|
7bc73d1ca8 | ||
|
|
957169d9aa | ||
|
|
5b3b0e67e9 | ||
|
|
304bbd230a | ||
|
|
ca1df94be5 | ||
|
|
2a1710596a | ||
|
|
b602f3ae13 | ||
|
|
5024abc812 | ||
|
|
dbd9b1a095 | ||
|
|
f6f83c6f70 | ||
|
|
bc9b4f508b | ||
|
|
7f5ac93cc6 | ||
|
|
5d4ad93c51 | ||
|
|
a7e59b2bb0 | ||
|
|
876868506f | ||
|
|
6dc54adec8 | ||
|
|
16a6e05849 | ||
|
|
f16d9a5848 | ||
|
|
cb5b80353d | ||
|
|
b7d54111ba | ||
|
|
bf74913486 | ||
|
|
e3a341eb22 | ||
|
|
785d6360cd | ||
|
|
05ed8af158 | ||
|
|
d1cd72c5f8 | ||
|
|
6d6bec35f2 | ||
|
|
9db1a8490c | ||
|
|
409037bdf3 | ||
|
|
53fef14024 | ||
|
|
c08356876d | ||
|
|
4d7e3d6866 | ||
|
|
ab5512e9d6 | ||
|
|
f6ae46e352 | ||
|
|
7ee7a05ea1 | ||
|
|
e559a1dabd | ||
|
|
8707e4a819 | ||
|
|
912e447200 | ||
|
|
52c1dabd0e | ||
|
|
bd8d59b343 | ||
|
|
ea221281a3 | ||
|
|
cc5b4fa650 | ||
|
|
663d4a2a02 | ||
|
|
371542fb1e | ||
|
|
b71f2c4ebb | ||
|
|
d841faa057 | ||
|
|
c8cc1673e0 | ||
|
|
d6409b22c2 | ||
|
|
35697b9af5 | ||
|
|
7374244806 | ||
|
|
391790d648 | ||
|
|
f752f934b6 | ||
|
|
573dba4712 | ||
|
|
51de825189 | ||
|
|
4c0ad53b3a | ||
|
|
8f790309ce | ||
|
|
2690203912 | ||
|
|
9d8a44696b | ||
|
|
7cccf4daf1 | ||
|
|
e1cf43c260 | ||
|
|
a6ed0a964e | ||
|
|
3452c2c5d4 | ||
|
|
577bd51613 | ||
|
|
b3b2356a7e | ||
|
|
b75baffdcf | ||
|
|
cacae6a9cd | ||
|
|
0f024b27f0 | ||
|
|
c81f17c24b | ||
|
|
623483f3b1 | ||
|
|
0ecaa9b8ee | ||
|
|
a99020078c | ||
|
|
a4c892b696 | ||
|
|
c4cd2178f7 | ||
|
|
c7475cda83 | ||
|
|
ffaaea497c | ||
|
|
e7947069a2 | ||
|
|
0abd8aedff | ||
|
|
dba0fd8b20 | ||
|
|
ec83c1ff12 | ||
|
|
1c7e99e91b | ||
|
|
f5e799d632 | ||
|
|
2e068d3fbb | ||
|
|
713db304e2 | ||
|
|
3487c22da3 | ||
|
|
65c3cb31d7 | ||
|
|
80874887cc | ||
|
|
8723d62add | ||
|
|
213ada4ae9 | ||
|
|
70684a29c6 | ||
|
|
06c4d026f8 | ||
|
|
65be64c756 | ||
|
|
2fe2c0515f | ||
|
|
ecbef84c37 | ||
|
|
39194704f5 | ||
|
|
9a91487375 | ||
|
|
7f37327bcd | ||
|
|
a9fa251fee | ||
|
|
4e00645851 | ||
|
|
87b30741fc | ||
|
|
a0f504d1e2 | ||
|
|
24bab23508 | ||
|
|
a0c3ef8ea1 | ||
|
|
b4b343b578 | ||
|
|
18b570d750 | ||
|
|
314f600523 | ||
|
|
6ac65cf893 | ||
|
|
5836c6ff90 | ||
|
|
a4d7183260 | ||
|
|
d61c2b20c8 | ||
|
|
3dcc8883cd | ||
|
|
3574dd4b1e | ||
|
|
95b9daca80 | ||
|
|
42456e8e98 | ||
|
|
0094cf7054 | ||
|
|
d4c79ab66b | ||
|
|
57b8242f7e | ||
|
|
a624cc8a68 | ||
|
|
b5b0e4191a | ||
|
|
0403c68429 | ||
|
|
f1ceb38f5f | ||
|
|
0cf3b869af | ||
|
|
69b843a824 | ||
|
|
924c5a3bf8 | ||
|
|
809633bdd3 | ||
|
|
64727e0d3a | ||
|
|
2a0aee5bf5 | ||
|
|
c65544a377 | ||
|
|
a6ed0997e8 | ||
|
|
721341604c | ||
|
|
3e3cae507c | ||
|
|
1ef5924f0c | ||
|
|
070157eacb | ||
|
|
fbf5daff0e | ||
|
|
427afa9ebd | ||
|
|
190514597b | ||
|
|
92de606bc6 | ||
|
|
5aed96c5a6 | ||
|
|
5bc2c132fa | ||
|
|
f7db908cb2 | ||
|
|
1bc82dea15 | ||
|
|
f131509c56 | ||
|
|
8e77a70ad6 | ||
|
|
867486e72e | ||
|
|
28130de9cb | ||
|
|
1523586b37 | ||
|
|
d7838e2a42 | ||
|
|
eb923af44f | ||
|
|
edd5a3efd9 | ||
|
|
90dfa516da | ||
|
|
37620b4dbc | ||
|
|
71b5383090 | ||
|
|
52afb9f466 | ||
|
|
efc43b0863 | ||
|
|
ff3cdb10dc | ||
|
|
d27a74c97b | ||
|
|
d2833d26cc | ||
|
|
8530b0643d | ||
|
|
3f9971d2ee | ||
|
|
5be41c8163 | ||
|
|
51d0daafdd | ||
|
|
5ed6271c08 | ||
|
|
abbe8d467b | ||
|
|
fd6f0b0a35 | ||
|
|
67f66b72e8 | ||
|
|
e54436e818 | ||
|
|
9a514e7d3f | ||
|
|
6cd660982c | ||
|
|
df434cd399 | ||
|
|
8cc768e5b6 | ||
|
|
5feb07e3f6 | ||
|
|
514887d19f | ||
|
|
ae892a07d4 | ||
|
|
f76e0d3a16 | ||
|
|
0a57ac3132 | ||
|
|
6b0275d035 | ||
|
|
be2b17ea85 | ||
|
|
c4e2ae2395 | ||
|
|
39f042240d | ||
|
|
a60a495c83 | ||
|
|
459ed11b58 | ||
|
|
bf1c683fd0 | ||
|
|
122cdd7fc8 | ||
|
|
899ba13df4 | ||
|
|
467baec7da | ||
|
|
4716eaa4b6 | ||
|
|
c55716df08 | ||
|
|
ca6f7ed587 | ||
|
|
e2713cfe2d | ||
|
|
030c49ff7c | ||
|
|
d3654266ee | ||
|
|
04f14c91f0 | ||
|
|
664c18ebbe | ||
|
|
3645c98685 | ||
|
|
45bbfce4d3 | ||
|
|
bb69cc073a | ||
|
|
dc5a53376d | ||
|
|
6bf762b72f | ||
|
|
d6c4ffca5a | ||
|
|
c91aa4be47 | ||
|
|
9cd70b31ac | ||
|
|
b64c779a58 | ||
|
|
2e7cf188af | ||
|
|
674cea629d | ||
|
|
8199e67b79 | ||
|
|
7b260006bf | ||
|
|
f3232cc5df | ||
|
|
477a289c2e | ||
|
|
77c80d1a69 | ||
|
|
f608fc186e | ||
|
|
e697466557 | ||
|
|
b35a0b0d26 | ||
|
|
2c469718f6 | ||
|
|
4a51de95c4 | ||
|
|
ee1d4bb600 | ||
|
|
d9c6986a89 | ||
|
|
bd6490631f | ||
|
|
640e1e3b5e | ||
|
|
f4231456e7 | ||
|
|
222d85f052 | ||
|
|
ee92182163 | ||
|
|
55dd577275 | ||
|
|
4941d230e2 | ||
|
|
660479d692 | ||
|
|
b153fafa9f | ||
|
|
adc43324f3 | ||
|
|
f6bd08029f | ||
|
|
94e5a06a15 | ||
|
|
8fc0ba136f | ||
|
|
9f5e779b5e | ||
|
|
1b693da5ed | ||
|
|
4c44a504ed | ||
|
|
453af2851b | ||
|
|
30e560d808 | ||
|
|
277f60e3e2 | ||
|
|
77ca2d8830 | ||
|
|
d4deea2019 | ||
|
|
38fa552c00 | ||
|
|
f8fa426adb | ||
|
|
38beb34a38 | ||
|
|
fb0438d352 | ||
|
|
44fe6a8b89 | ||
|
|
269b8840ad | ||
|
|
b9a223c049 | ||
|
|
db37710b90 | ||
|
|
092c8bb913 | ||
|
|
d807e3de7d | ||
|
|
8c965015f3 | ||
|
|
538a3b6ca8 | ||
|
|
9237a6262e | ||
|
|
fcfda644d3 | ||
|
|
75bac01f3a | ||
|
|
72fd57ef76 | ||
|
|
fda50ed570 | ||
|
|
f8a3c8f5bc | ||
|
|
681bfcb4c4 | ||
|
|
27816724f8 | ||
|
|
ac9ac5e397 | ||
|
|
43c6e2f2e1 | ||
|
|
b7fea2042f | ||
|
|
c6d68749e0 | ||
|
|
ae89a707ec | ||
|
|
973c6f4ef8 | ||
|
|
4dd0a5ddf2 | ||
|
|
90dc3dac3b | ||
|
|
8c4c2ad1cf | ||
|
|
f949bb8dc0 | ||
|
|
c863bf6e1d | ||
|
|
5d89cf9474 | ||
|
|
09aedecc3b | ||
|
|
fe515e9f01 | ||
|
|
69d2c0887c | ||
|
|
8e577022b2 | ||
|
|
3088727fd4 | ||
|
|
b33ad5833e | ||
|
|
3264b6edef | ||
|
|
58c2650162 | ||
|
|
01e098339e | ||
|
|
d857cc9084 | ||
|
|
4205821bac | ||
|
|
0776978c9c | ||
|
|
6568cb8900 | ||
|
|
6a755598b2 | ||
|
|
df3917a677 | ||
|
|
d2192ffd21 | ||
|
|
cb4128436c | ||
|
|
6cc578435c | ||
|
|
73637ba53d | ||
|
|
c9657aeb8c | ||
|
|
dfb7cc88cd | ||
|
|
227120fd57 | ||
|
|
8b74ff4bb6 | ||
|
|
f4099c6929 | ||
|
|
6ae5c0d912 | ||
|
|
44d190e91c | ||
|
|
0a864c7f87 | ||
|
|
50d753a87b | ||
|
|
35f85c55f1 | ||
|
|
b69bdcd126 | ||
|
|
49d5857b36 | ||
|
|
40ad04b79f | ||
|
|
a3e564855a | ||
|
|
15d0d05185 | ||
|
|
819eca5dea | ||
|
|
edbe793c68 | ||
|
|
e017c6effc | ||
|
|
d4ac28af18 | ||
|
|
931aa4e3cb | ||
|
|
cc4017e481 | ||
|
|
38adb9a52f | ||
|
|
ccc5813b3f | ||
|
|
314d0e8b71 | ||
|
|
0338889905 | ||
|
|
876cd8909f | ||
|
|
ac7cb45f61 | ||
|
|
f326bf9148 | ||
|
|
b1596c0ac4 | ||
|
|
13d3f37f05 | ||
|
|
e4290c4ab0 | ||
|
|
2918b4b92c | ||
|
|
39c622d5bf | ||
|
|
76a6b501f3 | ||
|
|
ecf92e2079 | ||
|
|
b4c0c4d219 | ||
|
|
4be8c81291 | ||
|
|
a8460065a4 | ||
|
|
2b39dec560 | ||
| 0b85a9d330 | |||
| b123f6ada7 | |||
| e5c288c764 | |||
| 9db7d8e589 | |||
| 5e48687347 | |||
| 09bbf81915 | |||
| 78f8ea2b5b | |||
| e08b4275a9 | |||
| ac9f3cf061 | |||
| 98fd314a62 | |||
| 79a31a41f9 | |||
| 6aae9083c3 | |||
| d71e31417b | |||
| fbe6c0c859 | |||
| 53b797fb53 | |||
| 98eb9173ee | |||
| ecfa95310d | |||
| 0e75a9163b | |||
|
|
59633d94f5 | ||
| 53206ad1d7 | |||
| 9aa91bf3e2 | |||
| 867479e05d | |||
| bb1f69dd90 | |||
| d219f7b966 | |||
| 3f812e5275 | |||
| cdef395f89 | |||
|
|
66f6a9ce90 | ||
|
|
b07c888c7a | ||
|
|
d170a3c36e | ||
| 429c21a8e9 | |||
|
|
8c10908353 | ||
|
|
e0963a1c39 | ||
|
|
e77aa9fb84 | ||
|
|
f5fe2d9e33 | ||
|
|
5006342b7b | ||
|
|
3ce8b67b4f | ||
|
|
52cec290d9 | ||
|
|
a559151c57 | ||
|
|
2fc60f9f74 | ||
|
|
3b1fcb7feb | ||
|
|
2838f540d1 | ||
|
|
f5ec5a61a9 | ||
|
|
44caf35fd8 | ||
|
|
c5055e7f34 | ||
|
|
de14ecea22 | ||
|
|
f5174a3248 | ||
|
|
f0889ce0f8 | ||
|
|
b6dc711c14 | ||
|
|
04fb2e8701 | ||
|
|
c1439bed8d | ||
|
|
a88f326ee6 | ||
| 56618dbe65 | |||
|
|
71ef710d09 | ||
|
|
c74852b60b | ||
|
|
a26109cb30 | ||
|
|
6b5b9a5315 | ||
|
|
4ebf3d8a0e | ||
| 37d02b298d | |||
|
|
d6053322e8 | ||
|
|
5b5f385b67 | ||
|
|
04428c45c9 | ||
| a7f605ced9 | |||
|
|
0adb8e528d | ||
|
|
f4280f9907 | ||
| 2d7892e3b1 | |||
|
|
8edeb2f622 | ||
|
|
d157a081b1 | ||
|
|
fcc57cf365 | ||
| 12c8ab41bf | |||
|
|
9266e5460e | ||
|
|
ad45859071 | ||
|
|
ee759980c4 | ||
|
|
18b371bc15 | ||
|
|
9e77b8bb75 | ||
|
|
e6acd4bdbd | ||
|
|
424219fb6f | ||
|
|
2ebb37552f | ||
|
|
822359fe51 | ||
|
|
97426a0ddb | ||
|
|
3f78382d45 | ||
|
|
8a1be45aac | ||
|
|
b5cca8be3b | ||
|
|
4d2f9a2b39 | ||
|
|
8fe02e5c89 | ||
|
|
b2dd905f0e | ||
|
|
c06d372984 | ||
|
|
7a9aef6faf | ||
|
|
6889ae9fa3 | ||
|
|
02d3cc84d5 | ||
|
|
768ec83037 | ||
|
|
b42249890e | ||
|
|
2f9870644b | ||
|
|
cc313246bb | ||
|
|
4e187581b3 | ||
|
|
bfe018cde6 | ||
|
|
dc479b33c5 | ||
|
|
ae284a1f30 | ||
|
|
f1736c53c4 | ||
|
|
23df89cf31 | ||
|
|
05c5e26e99 | ||
|
|
d1d0c24ed8 | ||
|
|
c4301cf6df | ||
|
|
b3089fafe9 | ||
|
|
de7d68b1eb | ||
|
|
e913a56a6b | ||
|
|
bb8dbb381f | ||
|
|
39c61bd526 | ||
|
|
144610d6c2 | ||
|
|
10f1cdb458 | ||
|
|
40f413ba47 | ||
|
|
a588221524 | ||
|
|
9cd8734947 | ||
|
|
9df91b221b | ||
|
|
c8551991b2 | ||
|
|
64a4842dcb | ||
|
|
f666b9c396 | ||
|
|
a4532a29da | ||
|
|
5469794159 | ||
|
|
705dd51f30 | ||
|
|
1e26578305 | ||
|
|
ddb62f2897 | ||
|
|
8b5f81c8f8 | ||
|
|
f8be510509 | ||
|
|
27af84da65 | ||
|
|
121f0a6aac | ||
|
|
9646c32819 | ||
|
|
8932bdc466 | ||
|
|
c3ab5c6096 | ||
|
|
9fa93fdd15 | ||
|
|
7a7433bc84 | ||
|
|
b4296f1736 | ||
|
|
ff8c5ef0c1 | ||
|
|
1bac650aee | ||
|
|
a22b42e832 | ||
|
|
9fc80bed35 | ||
|
|
afa5a8b940 | ||
|
|
59f8647e0f | ||
|
|
f593104c04 | ||
|
|
384b0438b4 | ||
|
|
dc6d89b0ca | ||
|
|
e01507d541 | ||
| 9a7a1728a4 | |||
| 240c7eff10 | |||
| 6b59e3a689 | |||
| b505a26ce4 | |||
| ce268ec306 | |||
| a5e1529514 | |||
|
|
42d10cf43d | ||
|
|
2226aa34d5 | ||
|
|
0268ff46b3 | ||
| 1d7cf3f41a | |||
| 32c186afd7 | |||
| 54a9f7a37c | |||
| e4e8cc5993 | |||
| 8703ed5d94 | |||
| a4118261e1 | |||
| 6392c1f238 | |||
| 4148ece133 | |||
| c724f292ca | |||
| 53513b812b | |||
| beffdbd89d | |||
| 8bd0df1bab | |||
| 4ae43e94f4 | |||
| da88771fd4 | |||
| b6b7d2aa12 | |||
| c733b0f2eb | |||
| 9712bf6dfd | |||
| 5e4c1493a1 | |||
| 41b1334257 | |||
| a2fcbae129 | |||
| e9077542c9 | |||
| 79595521a9 | |||
| 38b658fd3f | |||
| a89123755c | |||
| 0fb9accd05 | |||
| f87df707ab | |||
| a2cb771fc1 | |||
| c888f59ff0 | |||
| 43ff6e09be | |||
| 810ab3ea4f | |||
| cb5978237b | |||
| 622d523c98 | |||
| ee7d2529e7 | |||
| 82de967f97 | |||
| 466e667e14 | |||
| 3c563ce665 | |||
| 19a061efa8 | |||
| a397eb9d00 | |||
| e5d864359a | |||
| b2adc285b6 | |||
| 8af604262d | |||
| b33ca2b290 | |||
| c4455168c6 | |||
| 1b4674acde | |||
| 4fac4317a3 | |||
| 78bf9986b7 | |||
| 5154c0d8e5 | |||
| b01fcc3a6d | |||
| e8585bec42 | |||
| 521f0241f8 | |||
| 0394becdac | |||
| e5fa636776 | |||
| 6beaf4afdd | |||
| 822812525e | |||
| a4a92483bd | |||
| 3254ba1443 | |||
| 4c3d0ce7fa | |||
| a99afe07c6 | |||
| 73bb60eff9 | |||
| 0a214c5d4b | |||
| 29c53f35ab | |||
| 3746dab5de | |||
| 18dbadd675 | |||
| ee2cd0d391 | |||
| 0cc4e7c7d3 | |||
|
|
478065786f | ||
|
|
e64d82cd92 | ||
|
|
12a991920a | ||
| 0758efb3ec | |||
| 54b782c67e | |||
| 78a5f656b9 | |||
| 6e23853759 | |||
| becfaa1504 | |||
|
|
77a6015ad6 | ||
|
|
7c15a7439d | ||
|
|
b4f4db5754 | ||
|
|
c6656e6642 | ||
|
|
e6fa54d0e5 | ||
|
|
f16b4e3f47 | ||
|
|
4ad5b68433 | ||
|
|
552730f0a3 | ||
|
|
a1f02e575f | ||
|
|
f58b1db920 | ||
|
|
3d2ac06a72 | ||
|
|
9802f45452 | ||
|
|
1ad58d6b5d | ||
|
|
6805bcb690 | ||
|
|
c162411f0b | ||
|
|
10a05d686e | ||
|
|
89ef5c19ff | ||
|
|
4385ce86c1 | ||
|
|
46124a770f | ||
|
|
6f6327d267 | ||
|
|
6710a469ee | ||
|
|
174c475ec7 | ||
|
|
d3b42a125d | ||
|
|
2f2f4d396d | ||
|
|
e1eea7088f | ||
|
|
760fa3114f | ||
|
|
798ae591c6 | ||
|
|
7877efba0a | ||
|
|
cfa888fde6 | ||
|
|
cedcb0988a | ||
|
|
c939013b14 | ||
|
|
458d0e1ebc | ||
|
|
776152ef47 | ||
|
|
9f285a9f34 | ||
|
|
302ad0632e | ||
|
|
ffb5d7bdda | ||
|
|
242cf4741a | ||
|
|
41a14f161d | ||
|
|
f0e1406c5f | ||
|
|
d7c6676c49 | ||
|
|
5e9dfc6ea6 | ||
|
|
27fca090fc | ||
|
|
716131f005 | ||
|
|
496280f3e6 | ||
|
|
0dd0951b28 | ||
|
|
b9597fbb57 | ||
|
|
edc6591554 | ||
|
|
560b9bf985 | ||
|
|
6652e3f160 | ||
|
|
b0f1f73ce4 | ||
|
|
214d887c57 | ||
|
|
6b16724c2a | ||
|
|
f1bb927063 | ||
|
|
eeda1bed73 | ||
|
|
751ec9517f | ||
|
|
228814be33 | ||
|
|
cebcbeb73a | ||
|
|
057b09dca9 | ||
|
|
480541ae54 | ||
|
|
60303d041c | ||
|
|
5a911ecec7 | ||
|
|
7056f9a8b2 | ||
|
|
34036581f2 | ||
|
|
dcc67fddda | ||
|
|
03cad0a37f | ||
|
|
a4651eaa0a | ||
|
|
7aed3d3b30 | ||
|
|
4771f52b20 | ||
|
|
77ad85b05c | ||
|
|
01d877d26e | ||
|
|
e84d990366 | ||
|
|
e06be10f7f | ||
|
|
fe6750e824 | ||
|
|
d29fe2ee1c | ||
|
|
1156b1d3ea | ||
|
|
126a10cf94 | ||
|
|
4560e0da84 | ||
|
|
f9c2e0e170 | ||
|
|
cf413dd03c | ||
|
|
4965678443 | ||
|
|
67f94f9436 | ||
|
|
1186662960 | ||
|
|
3010961383 | ||
|
|
806fd41130 | ||
|
|
af07161f05 | ||
|
|
5ff759db93 | ||
|
|
7f292d402b | ||
|
|
c180780da9 | ||
|
|
d75862bc41 | ||
|
|
7cdb603d75 | ||
|
|
94c44b0d7b | ||
|
|
4a3d181097 | ||
|
|
d8863dca48 | ||
|
|
e0c439e850 | ||
|
|
f4f1b3ca6d | ||
|
|
4a93790c7e | ||
|
|
5265acd9dc | ||
|
|
9f69bb5fca | ||
|
|
b1d6e1c3d5 | ||
|
|
659703b221 | ||
|
|
3869bd536e | ||
|
|
408d154d3f | ||
|
|
44e3eb8a18 | ||
|
|
51a3cecc02 | ||
|
|
6b4ea7b83e | ||
|
|
da71cca22f | ||
|
|
5c945e3431 | ||
|
|
ba5bc365c1 | ||
|
|
c362b1b529 | ||
|
|
f90b6dc7ab | ||
|
|
a6a9016548 | ||
|
|
5351108ec1 | ||
|
|
7759e481d4 | ||
|
|
69c3a06c98 | ||
|
|
d1ad8730d7 | ||
|
|
f3a570a21d | ||
|
|
f626d3304d | ||
|
|
7eb4c89bf0 | ||
|
|
9435be0f19 | ||
|
|
7f108f6d9a | ||
|
|
3f98470af8 | ||
|
|
e58b69782c | ||
|
|
e49e22b37c | ||
|
|
82e69b4f05 | ||
|
|
ea9266ecf9 | ||
|
|
99ea6778ad | ||
|
|
ccd80e74f8 | ||
|
|
3057d2a232 | ||
|
|
d1ac659d4f | ||
|
|
bb1989d0f0 | ||
|
|
418e5e1d3f | ||
|
|
3b12e6d975 | ||
|
|
54d7f1d097 | ||
|
|
cfc90deb83 | ||
|
|
1a0e577606 | ||
|
|
a05fe94d90 | ||
|
|
8e64062214 | ||
|
|
8c1882eec8 | ||
|
|
8dd51096cf | ||
|
|
ecd5bbcb1d | ||
|
|
6d5babd331 | ||
|
|
79b7d32664 | ||
|
|
dd66ad835a | ||
|
|
a29fd964bd | ||
|
|
1ef274ec1d | ||
|
|
0f5627505f | ||
|
|
c0782e1cca | ||
|
|
ed1d273e03 | ||
|
|
9654e5da1c | ||
|
|
8040b746b4 | ||
|
|
05004aa874 | ||
|
|
4a21720745 | ||
|
|
13cb2e9b0f | ||
|
|
0259947cda | ||
|
|
080684e56f | ||
|
|
4b269bb234 | ||
|
|
1a62931202 | ||
|
|
c2029df3c9 | ||
|
|
4a074295ad | ||
|
|
711fefb0da | ||
|
|
fd12e70f78 | ||
|
|
fac89bae30 | ||
|
|
ab97e367cb | ||
|
|
ae693ca4c5 | ||
|
|
77dea07b40 | ||
|
|
77dcf7f759 | ||
|
|
59e7c4d5df | ||
|
|
0b5e57b85e | ||
|
|
c623acf832 | ||
|
|
36b1888f46 | ||
|
|
c09a668620 | ||
|
|
e85c386375 | ||
|
|
c66ecc4d7f | ||
|
|
13fe89af9f | ||
|
|
d8fe39ae86 | ||
|
|
5f5359f933 | ||
|
|
e820a516de | ||
|
|
e9fdea80c0 | ||
|
|
9534bd8881 | ||
|
|
5be508620e | ||
|
|
82e968d5c7 | ||
|
|
b4b060a962 | ||
|
|
64e5e9d45c | ||
|
|
881215e815 | ||
|
|
35cd983cc9 | ||
|
|
0a70039dee | ||
|
|
18ccc57f87 | ||
|
|
c23fcc5b06 | ||
|
|
21ff3b8b5d | ||
|
|
97c388dba0 | ||
|
|
10799e2ce3 | ||
|
|
7ef6b1fcc2 | ||
|
|
7a220b4c87 | ||
|
|
dc1327674c | ||
|
|
c8ff8e3ef6 | ||
|
|
f766df597c | ||
|
|
bab92cb88c | ||
|
|
5d8a5494cd | ||
|
|
129d93dfa7 | ||
|
|
65c55f0f21 | ||
|
|
8578a3097a | ||
|
|
de5f68e42c | ||
|
|
f44b0be459 | ||
|
|
a128401d49 | ||
|
|
5075ded032 | ||
|
|
47c2e87979 | ||
|
|
53352e7987 | ||
|
|
44f86a7d6f | ||
|
|
c37124d9c4 | ||
|
|
69ab1e0249 | ||
|
|
2fd8052ac2 | ||
|
|
28924db9f8 | ||
|
|
50545af223 | ||
|
|
30829ff9c8 | ||
|
|
ede9e4a9bd | ||
|
|
04d0e80430 | ||
|
|
366d4736ca | ||
|
|
f3391a912e | ||
|
|
52eb4030d0 | ||
|
|
835680f0ee | ||
|
|
cdf54e0f9b | ||
|
|
b439d40120 | ||
|
|
cb744ddeef | ||
|
|
872ffe5882 | ||
|
|
671e946c6d | ||
|
|
3928609c29 | ||
|
|
e942c839a1 | ||
|
|
bff34aafb9 | ||
|
|
7623943f3e | ||
|
|
6d7691791a | ||
|
|
b001df1f53 | ||
|
|
1cc7f2d92e | ||
|
|
7a0a898bc6 | ||
|
|
41aca4e2d7 | ||
|
|
7e89b12004 | ||
|
|
7bac9f829e | ||
|
|
2435639498 | ||
|
|
2be3e4ce9d | ||
|
|
1294444026 | ||
|
|
7578b65573 | ||
|
|
ced45c92f7 | ||
|
|
f21cddb2d0 | ||
|
|
735b729a41 | ||
|
|
c5b933f922 | ||
|
|
ce6fe2590d | ||
|
|
7509a76eb0 | ||
|
|
41eaa06e55 | ||
|
|
7429749004 | ||
|
|
709f9954f4 | ||
|
|
29adaa03c6 | ||
|
|
9f169fb2b9 | ||
|
|
6b8294d9dc | ||
|
|
0ea70273fe | ||
|
|
c66b5e2dad | ||
|
|
9077462893 | ||
|
|
7158a79a34 | ||
|
|
68060d6118 | ||
|
|
ddbdc73e7e | ||
|
|
263b640641 | ||
|
|
84ad39f24a | ||
|
|
408a4c79aa | ||
|
|
b9bbccfe00 | ||
|
|
05d262e42b | ||
|
|
18e61d19f5 | ||
|
|
4a073ea161 | ||
|
|
2993ca74cc | ||
|
|
1566923d5c | ||
|
|
b0073caf5f | ||
|
|
8ad044cb2c | ||
|
|
8a9eb32aaf | ||
|
|
7f2199405d | ||
|
|
38a545e174 | ||
|
|
4f0271ad49 | ||
|
|
7fc1602f7a | ||
|
|
aa26690e33 | ||
|
|
09581829d1 | ||
|
|
3afb94f5d2 | ||
|
|
29f084613d | ||
|
|
dd76a1a0be | ||
|
|
c132477f80 | ||
|
|
92635f6f68 | ||
|
|
65ef255b99 | ||
|
|
854fe85132 | ||
|
|
4da6203828 | ||
|
|
7db1aae5ee | ||
|
|
b4388d838e | ||
|
|
8446047ab2 | ||
|
|
dc19150eba | ||
|
|
a89139763f | ||
|
|
dab138c731 | ||
|
|
205a73917d | ||
|
|
ae3fe8cd42 | ||
|
|
c0b274767b | ||
|
|
620040bde1 | ||
|
|
22aa9990a5 | ||
|
|
16b7404d9b | ||
|
|
db5e315db0 | ||
|
|
4c87ce59d3 | ||
|
|
ca7bc171c9 | ||
|
|
b55b17ccc1 | ||
|
|
59830c80af | ||
|
|
b4a63eca02 | ||
|
|
0306723c95 | ||
|
|
af9743026e | ||
|
|
9b44731c33 | ||
|
|
5946e159bc | ||
|
|
327ea9cacf | ||
|
|
6d6991e266 | ||
|
|
e4ea57932e | ||
|
|
484a17d496 | ||
|
|
1d421b2d7c | ||
|
|
4ce282b88b | ||
|
|
85ada36973 | ||
|
|
a3e42d3b19 | ||
|
|
542f55d43e | ||
|
|
d87f221a2b | ||
|
|
6237a19d17 | ||
|
|
17175637dc | ||
|
|
32b5c7fbb0 | ||
|
|
ef47d092e6 | ||
|
|
8648c85b67 | ||
|
|
657c37d45c | ||
|
|
006becf6ca | ||
|
|
012d948193 | ||
|
|
a048adcdac | ||
|
|
b091e8eb09 | ||
|
|
14b39d906c | ||
|
|
0508ba299c | ||
|
|
02db5a9170 | ||
|
|
93a68ff43e | ||
|
|
97e423ba86 | ||
|
|
3033f1eecd | ||
|
|
f4405a16f1 | ||
|
|
025b743070 | ||
|
|
e27f5565cb | ||
|
|
7fe5cd6ede | ||
|
|
7052355596 | ||
|
|
1e6d1a9f2f | ||
|
|
a776c6ba13 | ||
|
|
75f782ab71 | ||
|
|
832f56a6d0 | ||
|
|
f6d3a7c84e | ||
|
|
7769a35f07 | ||
|
|
c38dfd20a1 | ||
|
|
83634fe95a | ||
|
|
e336e9c770 | ||
|
|
3ac1169aa7 | ||
|
|
3d8a6fb55a | ||
|
|
891b3abb44 | ||
|
|
01b0980c44 | ||
|
|
2c2f11be39 | ||
|
|
d71078d03d | ||
|
|
12009e36df | ||
|
|
21c39f70de | ||
|
|
7566faf77b | ||
|
|
f27d5988f0 | ||
|
|
d8a215a575 | ||
|
|
118d132797 | ||
|
|
06487e5534 | ||
|
|
c0b73d4777 | ||
|
|
e9e755b517 | ||
|
|
191619e6d8 | ||
|
|
0f64e786b5 | ||
|
|
7164296c9d | ||
|
|
787445c071 | ||
|
|
d9d119c0c9 | ||
|
|
c45eb31e8f | ||
|
|
b31d022c1a | ||
|
|
919c7e932a | ||
|
|
9489fe56d9 |
74
.gitignore
vendored
Normal file
74
.gitignore
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
# use glob syntax
|
||||
syntax: glob
|
||||
|
||||
*.orig
|
||||
*.pyc
|
||||
*.sql
|
||||
*.sqlite
|
||||
*.prof
|
||||
*~
|
||||
.idea/*
|
||||
.swp
|
||||
.vscode/*
|
||||
_1623.3d
|
||||
_1623.err
|
||||
_1623.pos
|
||||
_1623.svx
|
||||
_16230.svx
|
||||
cave-lookup.json
|
||||
core/migrations/*
|
||||
db*
|
||||
desktop.ini
|
||||
diffsettings.txt
|
||||
ignored-files.log
|
||||
import_profile.json
|
||||
lines-of-python.txt
|
||||
lines-of-templates.txt
|
||||
loadlogbk.log
|
||||
loadsurvexblks.log
|
||||
logbktrips.shelve
|
||||
memdump.sql
|
||||
my_project.dot
|
||||
parsing_log.txt
|
||||
svxblks.log
|
||||
svxlinear.log
|
||||
troggle
|
||||
troggle-inspectdb.py
|
||||
troggle-sqlite.sql
|
||||
troggle.log
|
||||
troggle.sqlite
|
||||
troggle.sqlite-journal
|
||||
troggle_log.txt
|
||||
tunnel-import.log
|
||||
logbktrips.shelve.db
|
||||
|
||||
credentials.py
|
||||
localsettings.py
|
||||
localsettings-expo-live.py
|
||||
_deploy/old/localsettings-expo-live.py
|
||||
_deploy/old/localsettings.py
|
||||
debian/localsettings.py
|
||||
debian/credentials.py
|
||||
wsl/localsettings.py
|
||||
wsl/credentials.py
|
||||
media/jslib/*
|
||||
!media/jslib/readme.txt
|
||||
|
||||
_test_response.html
|
||||
_deploy/wsl/localsettingsWSL.py.bak
|
||||
therionrefs.log
|
||||
_1623-and-1626.svx
|
||||
_1623-and-1626-no-schoenberg-hs.svx
|
||||
localsettings-oldMuscogee.py
|
||||
troggle.sqlite-journal - Shortcut.lnk
|
||||
troggle.sqlite - Shortcut.lnk
|
||||
|
||||
_deploy/debian/localsettings-jan.py
|
||||
_deploy/debian/localsettings-nw.py
|
||||
py310d32
|
||||
_deploy/debian/localsettingsserver2023-01-secret.py
|
||||
_deploy/debian/localsettings2023-04-05-secret.py
|
||||
pydebianbullseye
|
||||
|
||||
javascript
|
||||
|
||||
16
.hgignore
16
.hgignore
@@ -1,16 +0,0 @@
|
||||
# use glob syntax
|
||||
syntax: glob
|
||||
|
||||
*.pyc
|
||||
db*
|
||||
localsettings.py
|
||||
*~
|
||||
parsing_log.txt
|
||||
troggle
|
||||
troggle_log.txt
|
||||
.idea/*
|
||||
*.orig
|
||||
media/images/*
|
||||
.vscode/*
|
||||
.swp
|
||||
imagekit-off/
|
||||
216
README.txt
216
README.txt
@@ -1,46 +1,214 @@
|
||||
Troggle is an application for caving expedition data management, originally created for use on Cambridge University Caving Club (CUCC)expeditions and licensed under the GNU Lesser General Public License.
|
||||
Updated 2 May 2023
|
||||
|
||||
Troggle has been forked into two projects. The original one is maintained by Aron Curtis and is used for Erebus caves. The CUCC variant uses files as the definitive data, not the database and lives at expo.sruvex.com/troggle.
|
||||
Troggle is an application for caving expedition data management,
|
||||
originally created for use on Cambridge University Caving Club (CUCC)expeditions
|
||||
and licensed under the GNU Lesser General Public License.
|
||||
|
||||
Troggle has been forked into two projects. The original one is maintained by Aaron Curtis
|
||||
and was used for Erebus caves in Antarctica.
|
||||
The CUCC variant uses files as the definitive data, not the database, and lives at http://expo.survex.com/repositories/troggle/.git/
|
||||
|
||||
For the server setup, see /_deploy/debian/wookey-exposerver-recipe.txt
|
||||
and see http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
|
||||
Much material which was in this file has been moved to
|
||||
http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
|
||||
See copyright notices in
|
||||
http://expo.survex.com/handbook/computing/contribute.html
|
||||
and for context see
|
||||
http://expo.survex.com/handbook/computing/onlinesystems.html
|
||||
|
||||
Troggle setup
|
||||
==========
|
||||
=============
|
||||
0. read the very extensive online documentation and stop reading this README...
|
||||
well, come back to this README after you have read the HTML pages. Not everything has been transferred.
|
||||
|
||||
Python, Django, and Database setup
|
||||
http://expo.survex.com/handbook/troggle/troglaptop.html
|
||||
http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
http://expo.survex.com/handbook/troggle/trogdangoup.html
|
||||
and at troggle/debian/serversetup
|
||||
1. set up the ssh key-exchange with the git server so you can clone troggle
|
||||
http://expo.survex.com/handbook/computing/keyexchange.html
|
||||
|
||||
Setting up directories
|
||||
----------------------
|
||||
see http://expo.survex.com/handbook/troggle/troglaptop.html and
|
||||
http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
|
||||
Next, you need to fill in your local settings. Copy _deploy/WSL/localsettingsWSL.py
|
||||
to a new file called localsettings.py and edit it and settings.py to match
|
||||
your machine's file locations.
|
||||
Follow the instructions contained in the file to fill out your settings.
|
||||
|
||||
{ in _deploy/old/ we have these which are all very out of date:
|
||||
localsettings-expo-live.py is the python2.7 settings for the server.
|
||||
localsettingsubuntu.py
|
||||
localsettingsdocker.py
|
||||
localsettingswindows.py
|
||||
localsettingspotatohut.py
|
||||
}
|
||||
|
||||
Python3, Django, and Database setup
|
||||
-----------------------------------
|
||||
Troggle requires Django 1.4 or greater, and any version of Python that works with it.
|
||||
Install Django with the following command:
|
||||
We are now using Django 3.2 and will move to 4.2 in 2024
|
||||
We are installing with python 3.11 (the server is running 3.9)
|
||||
|
||||
apt-get install python-django (on debian/ubuntu)
|
||||
Install Django using pip, not with apt, on your test system in a venv.
|
||||
Conventionally on our main master expo server we install everything that we can as debian packages, not using pip.
|
||||
|
||||
If you want to use MySQL or Postgresql, download and install them. However, you can also use Django with Sqlite3, which is included in Python and thus requires no extra installation.
|
||||
[installation instructions removed - now in http://expo.survex.com/handbook/troggle/troglaptop.html ]
|
||||
|
||||
[venv description removed - read it in http://expo.survex.com/handbook/troggle/troglaptop.html ]
|
||||
|
||||
READ the os-trog.sh script !
|
||||
READ the venv-trog.sh script !
|
||||
|
||||
|
||||
Automatic Provisioning and Configuration
|
||||
----------------------------------------
|
||||
We don't do this - yet.
|
||||
|
||||
Troggle itself
|
||||
-------------
|
||||
Choose a directory where you will keep troggle, and svn check out Troggle into it using the following command:
|
||||
The most appropriate configuration tools today (2021) appear to be Bolt or Ansible
|
||||
https://puppet.com/docs/bolt/latest/bolt.html (declarative, local)
|
||||
https://docs.ansible.com/ansible/latest/user_guide/intro_getting_started.html (procedural, remote)
|
||||
https://puppet.com/blog/automating-from-zero-to-something/
|
||||
|
||||
svn co http://troggle.googlecode.com/svn/
|
||||
We don't need anything for the deploy server itself, but we could do with something for setting
|
||||
up test servers quickly to help get newbie developers up to speed faster. But learning a new tool
|
||||
creates a barrier in itself. This is one reason most of us don't use Docker.
|
||||
|
||||
CSS and media files
|
||||
-------------------
|
||||
We are not using the STATICFILES capability.
|
||||
We are serving css files from troggle/media/.. (see urls.py)
|
||||
|
||||
Plain CSS pages
|
||||
---------------
|
||||
When running the test server
|
||||
manage.py runserver 0.0.0.0:8000
|
||||
and without Apache running, we are serving CSS using using this Django 'view':
|
||||
view_surveys.cssfilessingle
|
||||
i.e.
|
||||
cssfilessingle() in core/view_surveys.py
|
||||
|
||||
Setting up survex
|
||||
-----------------
|
||||
You need to have survex installed as the command line tools 'cavern' is
|
||||
used as part of the survex import process.
|
||||
$ sudo apt install survex
|
||||
|
||||
Setting up tables and importing survey data
|
||||
-------------------------------------------
|
||||
Run
|
||||
$ sudo python databaseReset.py
|
||||
from the troggle directory will give you instructions.
|
||||
|
||||
[ NB Adding a new year/expedition requires adding a column to the
|
||||
folk/folk.csv table - a year doesn't exist until that is done.]
|
||||
|
||||
|
||||
If you want to work on the source code and be able to commit, you will need to use https instead of http, and your google account will need to be added to the troggle project members list. Contact aaron dot curtis at cantab dot net to get this set up.
|
||||
MariaDB database
|
||||
----------------
|
||||
Start it up with
|
||||
$ sudo mysql -u -p
|
||||
when it will prompt you to type in the password. Get this by reading the settings.py file in use on the server.
|
||||
then
|
||||
> CREATE DATABASE troggle;
|
||||
> use troggle;
|
||||
> exit;
|
||||
|
||||
Next, you need to fill in your local settings. Copy either localsettingsubuntu.py or localsettingsserver.py to a new file called localsettings.py. Follow the instructions contained in the file to fill out your settings.
|
||||
Note the semicolons.
|
||||
|
||||
You can check the status of the db service:
|
||||
$ sudo systemctl status mysql
|
||||
|
||||
You can start and stop the db service with
|
||||
$ sudo systemctl restart mysql.service
|
||||
$ sudo systemctl stop mysql.service
|
||||
$ sudo systemctl start mysql.service
|
||||
|
||||
While logged in at a terminal session as expo on expo.survex.,com
|
||||
|
||||
$ mysql -h localhost -u expo -p<password>
|
||||
will get you the MariasDb command prompt: https://www.hostwinds.com/guide/how-to-use-mysql-mariadb-from-command-line/
|
||||
|
||||
then (Note the SEMICOLONS !):
|
||||
>drop database troggle;
|
||||
>create database troggle;
|
||||
>quit
|
||||
Somewhere I have notes for the GRANT PRIVS type runes...
|
||||
|
||||
Ah yes:
|
||||
CREATE DATABASE troggle;
|
||||
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword'; FLUSH PRIVILEGES; (at mysql root prompt)
|
||||
|
||||
(explained on https://chartio.com/resources/tutorials/how-to-grant-all-privileges-on-a-database-in-mysql/)
|
||||
(but you need to create the database too)
|
||||
|
||||
The GRANT ALL PRIVILEGES bit requires you to logon in to MariaDB as root. sudo doesn't cut it.
|
||||
these permissions are set in a different 'info' database which usually is untouched even if database troggle gets creamed.
|
||||
|
||||
The 'somepassword' is specified int he localsettings.py file.
|
||||
|
||||
|
||||
Setting up tables and importing legacy data
|
||||
------------------------------------------
|
||||
Run "python databaseReset.py reset" from the troggle directory.
|
||||
PERMISSIONS
|
||||
https://linuxize.com/post/usermod-command-in-linux/
|
||||
|
||||
Once troggle is running, you can also log in and then go to "Import / export" data under "admin" on the menu.
|
||||
THIS MAY BE OUT OF DATE - from 2022 we are running Apache as user 'expo' not 'www-data'
|
||||
|
||||
Adding a new year/expedition requires adding a column to the
|
||||
noinfo/folk.csv table - a year doesn't exist until that is done.
|
||||
so that the online editing system for SVX files works.
|
||||
The same goes for /expoweb/ files, so that "edit this page" works and the New Cave
|
||||
and New Entrance forms work.
|
||||
|
||||
sudo usermod -a expo expocvs
|
||||
the expocvs group is used for git
|
||||
|
||||
all the users should be in this group
|
||||
|
||||
|
||||
Running a Troggle server
|
||||
------------------------
|
||||
For high volume use, Troggle should be run using a web server like apache. However, a quick way to get started is to use the development server built into Django.
|
||||
Running a Troggle server with Apache
|
||||
------------------------------------
|
||||
Troggle also needs these aliases to be configured. These are set in
|
||||
/home/expo/config/apache/expo.conf
|
||||
on the expo server.
|
||||
|
||||
To do this, run "python manage.py runserver" from the troggle directory.
|
||||
At least these need setting:
|
||||
DocumentRoot /home/expo/expoweb
|
||||
WSGIScriptAlias / /home/expo/troggle/wsgi.py
|
||||
<Directory /home/expo/troggle>
|
||||
<Files wsgi.py>
|
||||
Require all granted
|
||||
</Files>
|
||||
</Directory>
|
||||
|
||||
the instructions for apache Alias commands are in comments at the end of
|
||||
the urls.py file.
|
||||
|
||||
Unlike the django "manage.py runserver" method, apache requires a restart before it will use
|
||||
any changed files:
|
||||
|
||||
sudo service apache2 restart
|
||||
|
||||
Olly's comments 20 July 2020:
|
||||
olly: looking at /lib/systemd/system/apache2.service suggests so
|
||||
|
||||
olly: ExecStart=/usr/sbin/apachectl start
|
||||
olly: ExecStop=/usr/sbin/apachectl stop
|
||||
olly: ExecReload=/usr/sbin/apachectl graceful
|
||||
|
||||
Additions
|
||||
---------
|
||||
The python code has been manually cleaned using the 'black' and 'ruff' lint tools,
|
||||
and the 'deptry' dependency checker. This needs doing every year or so.
|
||||
See dependencies-check-deptry.txt
|
||||
|
||||
See troggle/pyproject.toml for configurations
|
||||
|
||||
Experimental additions
|
||||
----------------------
|
||||
These are untried tools which may help us document how troggle works in future.
|
||||
|
||||
pip install pygraphviz
|
||||
pip install pyparsing pydot # installs fine
|
||||
django extension graph_models # https://django-extensions.readthedocs.io/en/latest/graph_models.html
|
||||
27
README/index.html
Normal file
27
README/index.html
Normal file
@@ -0,0 +1,27 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>Troggle - Coding Documentation</title>
|
||||
<link rel="stylesheet" type="text/css" href="..media/css/main2.css" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>Troggle Code - README</h1>
|
||||
<h2>Contents of README.txt file</h2>
|
||||
|
||||
<iframe name="erriframe" width="70%" height="500"
|
||||
src="../README.txt" frameborder="1" ></iframe>
|
||||
|
||||
<h2>Troggle documentation in the Expo Handbook</h2>
|
||||
<ul>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogintro.html">Intro</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogindex.html">Troggle manual INDEX</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogarch.html">Troggle data model</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogimport.html">Troggle importing data</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogdesign.html">Troggle design decisions</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogdesignx.html">Troggle future architectures</a>
|
||||
<li><a href="http://expo.survex.com/handbook/troggle/trogsimpler.html">a kinder simpler Troggle?</a>
|
||||
|
||||
</ul>
|
||||
<hr />
|
||||
</body></html>
|
||||
BIN
README/troggle2020.docx
Normal file
BIN
README/troggle2020.docx
Normal file
Binary file not shown.
BIN
README/troggle2020.odt
Normal file
BIN
README/troggle2020.odt
Normal file
Binary file not shown.
BIN
README/troggle2020.pdf
Normal file
BIN
README/troggle2020.pdf
Normal file
Binary file not shown.
160
_deploy/debian-laptops/localsettings2023-04-05-cleansed.py
Normal file
160
_deploy/debian-laptops/localsettings2023-04-05-cleansed.py
Normal file
@@ -0,0 +1,160 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
NOTE this file is vastly out of sync with troggle/_deploy/wsl/localsettings.py
|
||||
which is the most recent version used in active maintenance. There should be
|
||||
essential differences, but there and many, many non-essential differences which
|
||||
should be eliminated for clarity and to use modern idioms. 8 March 2023.
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords.
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : '123456789012345', # Not used with sqlite3. Not a real password.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = 'Not a real password'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOADMINUSERPASS = 'Not a real password'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
PHOTOS_YEAR = "2023"
|
||||
# add in 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py
|
||||
'django.template.context_processors.debug',
|
||||
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
|
||||
# insert your own TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = False
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
|
||||
|
||||
EXPOWEB = Path(REPOS_ROOT_PATH + 'expoweb/')
|
||||
SURVEYS = REPOS_ROOT_PATH
|
||||
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
#URL_ROOT = 'http://expo.survex.com/'
|
||||
URL_ROOT = '/'
|
||||
DIR_ROOT = Path("") #this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SURVEYS_URL = '/survey_scans/'
|
||||
|
||||
REPOS_ROOT_PATH = Path(REPOS_ROOT_PATH)
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos"
|
||||
|
||||
#EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
160
_deploy/debian-laptops/localsettings2023-04-05-secret.py
Normal file
160
_deploy/debian-laptops/localsettings2023-04-05-secret.py
Normal file
@@ -0,0 +1,160 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
NOTE this file is vastly out of sync with troggle/_deploy/wsl/localsettings.py
|
||||
which is the most recent version used in active maintenance. There should be
|
||||
essential differences, but there and many, many non-essential differences which
|
||||
should be eliminated for clarity and to use modern idioms. 8 March 2023.
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords.
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'uFqP56B4XleeyIW', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = '161:gosser'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOADMINUSERPASS = 'gosser:161'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
PHOTOS_YEAR = "2023"
|
||||
# add in 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py
|
||||
'django.template.context_processors.debug',
|
||||
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
|
||||
# insert your own TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = False
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
|
||||
|
||||
EXPOWEB = Path(REPOS_ROOT_PATH + 'expoweb/')
|
||||
SURVEYS = REPOS_ROOT_PATH
|
||||
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
#URL_ROOT = 'http://expo.survex.com/'
|
||||
URL_ROOT = '/'
|
||||
DIR_ROOT = Path("") #this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SURVEYS_URL = '/survey_scans/'
|
||||
|
||||
REPOS_ROOT_PATH = Path(REPOS_ROOT_PATH)
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos"
|
||||
|
||||
#EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
164
_deploy/debian-laptops/localsettingsserver2023-01-cleansed.py
Normal file
164
_deploy/debian-laptops/localsettingsserver2023-01-cleansed.py
Normal file
@@ -0,0 +1,164 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords.
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : '123456789012345', # Not used with sqlite3.Not the real password
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
SECRET_KEY = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"
|
||||
EXPOUSERPASS = "nope"
|
||||
EXPOADMINUSERPASS = "nope"
|
||||
EMAIL_HOST_PASSWORD = "nope"
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
PHOTOS_YEAR = "2022"
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py
|
||||
'django.template.context_processors.debug',
|
||||
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
|
||||
# insert your own TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = True
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH + 'drawings/'
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH + 'expoweb/'
|
||||
#SURVEYS = REPOS_ROOT_PATH
|
||||
SCANS_ROOT = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
|
||||
CAVEDESCRIPTIONS = os.path.join(EXPOWEB, "cave_data")
|
||||
ENTRANCEDESCRIPTIONS = os.path.join(EXPOWEB, "entrance_data")
|
||||
|
||||
# CACHEDIR = REPOS_ROOT_PATH + 'expowebcache/'
|
||||
# THREEDCACHEDIR = CACHEDIR + '3d/'
|
||||
# THUMBNAILCACHE = CACHEDIR + 'thumbs'
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
|
||||
|
||||
#Note that all these *_URL constants are not actually used in urls.py, they should be..
|
||||
#URL_ROOT = 'http://expo.survex.com/'
|
||||
URL_ROOT = '/'
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SCANS_URL = '/survey_scans/'
|
||||
EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# add in 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
# Sanitise these to be strings as all other code is expecting strings
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
LOGFILE = os.fspath(LOGFILE)
|
||||
#SURVEYS = os.fspath(SURVEYS)
|
||||
EXPOWEB = os.fspath(EXPOWEB)
|
||||
DRAWINGS_DATA = os.fspath(DRAWINGS_DATA)
|
||||
SURVEX_DATA = os.fspath(SURVEX_DATA)
|
||||
REPOS_ROOT_PATH = os.fspath(REPOS_ROOT_PATH)
|
||||
TEMPLATE_PATH = os.fspath(TROGGLE_PATH)
|
||||
MEDIA_ROOT = os.fspath(MEDIA_ROOT)
|
||||
JSLIB_ROOT = os.fspath(JSLIB_ROOT)
|
||||
SCANS_ROOT = os.fspath(SCANS_ROOT)
|
||||
LIBDIR = os.fspath(LIBDIR)
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
70
_deploy/debian-laptops/os-trog.sh
Normal file
70
_deploy/debian-laptops/os-trog.sh
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in the troggle directory: "bash venv-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 22.04 relatively clean install.
|
||||
|
||||
sudo apt install python-is-python3 -y
|
||||
python --version : ensure python is an alias for python3 not python2.7
|
||||
sudo apt update -y
|
||||
sudo apt dist-upgrade -y
|
||||
sudo apt autoremove -y
|
||||
sudo apt install sqlite3 -y
|
||||
sudo apt install python3-pip -y
|
||||
|
||||
# this installs a shed-load of other stuff: binutils etc.sudo apt install survex-aven
|
||||
sudo apt install git openssh-client -y
|
||||
# On a clean debian 11 (bullseye) installation with Xfce & ssh,
|
||||
|
||||
#on ubuntu 20.04:
|
||||
#Package sftp is not available, but is referred to by another package.
|
||||
#This may mean that the package is missing, has been obsoleted, or
|
||||
#is only available from another source
|
||||
#E: Package 'sftp' has no installation candidate
|
||||
|
||||
|
||||
# On Ubuntu 20.04, with python10, the pip install fails.
|
||||
# So you need to get the pip from source
|
||||
# sudo curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
|
||||
# but really you should be using 22.04
|
||||
# and also, isf using debian,
|
||||
# sudo python3.10 -m pip install -U virtualenv
|
||||
|
||||
# as debian does not install everything that ubuntu does, you need:
|
||||
sudo useradd expo
|
||||
sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
|
||||
sudo apt install python3-venv -y
|
||||
sudo apt install python3-dev -y
|
||||
|
||||
# default since 22.04
|
||||
# sudo apt install python3.10
|
||||
sudo apt install python3.10-venv -y
|
||||
sudo apt install python3.10-dev -y
|
||||
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.10 1
|
||||
|
||||
sudo apt install mariadb-server -y
|
||||
sudo apt install libmariadb-dev -y
|
||||
|
||||
sudo python -m pip install --upgrade pip
|
||||
|
||||
sudo apt install sftp -y
|
||||
echo '###'
|
||||
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
|
||||
echo '###'
|
||||
sudo apt install tunnelx therion -y
|
||||
|
||||
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
|
||||
# sudo service mysql start
|
||||
|
||||
git config --global user.email "you@example.com"
|
||||
git config --global user.name "Your Name"
|
||||
|
||||
echo '###'
|
||||
echo '### Currently set version of python'
|
||||
python --version
|
||||
|
||||
echo '###'
|
||||
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
|
||||
|
||||
147
_deploy/debian-laptops/settings2023-02-10.py
Normal file
147
_deploy/debian-laptops/settings2023-02-10.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""
|
||||
Django settings for troggle project.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/dev/topics/settings/
|
||||
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/dev/ref/settings/
|
||||
"""
|
||||
# Imports should be grouped in the following order:
|
||||
|
||||
# 1.Standard library imports.
|
||||
# 2.Related third party imports.
|
||||
# 3.Local application/library specific imports.
|
||||
# 4.You should put a blank line between each group of imports.
|
||||
|
||||
|
||||
|
||||
print("* importing troggle/settings.py")
|
||||
|
||||
# default value, then gets overwritten by real secrets
|
||||
SECRET_KEY = "not-the-real-secret-key-a#vaeozn0---^fj!355qki*vj2"
|
||||
|
||||
GIT = "git" # command for running git
|
||||
|
||||
# Note that this builds upon the django system installed
|
||||
# global settings in
|
||||
# django/conf/global_settings.py which is automatically loaded first.
|
||||
# read https://docs.djangoproject.com/en/dev/topics/settings/
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
# BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
# Django settings for troggle project.
|
||||
|
||||
ALLOWED_HOSTS = ["*", "expo.survex.com", ".survex.com", "localhost", "127.0.0.1", "192.168.0.5"]
|
||||
|
||||
ADMINS = (
|
||||
# ('Your Name', 'your_email@domain.com'),
|
||||
)
|
||||
MANAGERS = ADMINS
|
||||
|
||||
# LOGIN_URL = '/accounts/login/' # this is the default value so does not need to be set
|
||||
|
||||
# Local time zone for this installation. Choices can be found here:
|
||||
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
||||
# although not all choices may be available on all operating systems.
|
||||
# If running in a Windows environment this must be set to the same as your
|
||||
# system time zone.
|
||||
USE_TZ = True
|
||||
TIME_ZONE = "Europe/London"
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
LANGUAGE_CODE = "en-uk"
|
||||
|
||||
SITE_ID = 1
|
||||
|
||||
# If you set this to False, Django will make some optimizations so as not
|
||||
# to load the internationalization machinery.
|
||||
USE_I18N = True
|
||||
USE_L10N = True
|
||||
|
||||
FIX_PERMISSIONS = []
|
||||
|
||||
# top-level survex file basename (without .svx)
|
||||
SURVEX_TOPNAME = "1623-and-1626-no-schoenberg-hs"
|
||||
|
||||
|
||||
# Caves for which survex files exist, but are not otherwise registered
|
||||
# replaced (?) by expoweb/cave_data/pendingcaves.txt
|
||||
# PENDING = ["1626-361", "2007-06", "2009-02",
|
||||
# "2012-ns-01", "2012-ns-02", "2010-04", "2012-ns-05", "2012-ns-06",
|
||||
# "2012-ns-07", "2012-ns-08", "2012-ns-12", "2012-ns-14", "2012-ns-15", "2014-bl888",
|
||||
# "2018-pf-01", "2018-pf-02"]
|
||||
|
||||
APPEND_SLASH = (
|
||||
False # never relevant because we have urls that match unknown files and produce an 'edit this page' response
|
||||
)
|
||||
SMART_APPEND_SLASH = True # not eorking as middleware different after Dj2.0
|
||||
|
||||
|
||||
LOGIN_REDIRECT_URL = "/" # does not seem to have any effect
|
||||
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
SECURE_BROWSER_XSS_FILTER = True
|
||||
# SESSION_COOKIE_SECURE = True # if enabled, cannot login to Django control panel, bug elsewhere?
|
||||
# CSRF_COOKIE_SECURE = True # if enabled only sends cookies over SSL
|
||||
X_FRAME_OPTIONS = "DENY" # changed to "DENY" after I eliminated all the iframes e.g. /xmlvalid.html
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" # from Django 3.2
|
||||
|
||||
INSTALLED_APPS = (
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth", # includes the url redirections for login, logout
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.admindocs",
|
||||
"django.forms", # Required to customise widget templates
|
||||
# 'django.contrib.staticfiles', # We put our CSS etc explicitly in the right place so do not need this
|
||||
"troggle.core",
|
||||
)
|
||||
|
||||
FORM_RENDERER = "django.forms.renderers.TemplatesSetting" # Required to customise widget templates
|
||||
|
||||
# See the recommended order of these in https://docs.djangoproject.com/en/dev/ref/middleware/
|
||||
# Note that this is a radically different onion architecture from earlier versions though it looks the same,
|
||||
# see https://docs.djangoproject.com/en/dev/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
|
||||
# Seriously, read this: https://www.webforefront.com/django/middlewaredjango.html which is MUCH BETTER than the docs
|
||||
MIDDLEWARE = [
|
||||
#'django.middleware.security.SecurityMiddleware', # SECURE_SSL_REDIRECT and SECURE_SSL_HOST # we don't use this
|
||||
"django.middleware.gzip.GZipMiddleware", # not needed when expofiles and photos served by apache
|
||||
"django.contrib.sessions.middleware.SessionMiddleware", # Manages sessions, if CSRF_USE_SESSIONS then it needs to be early
|
||||
"django.middleware.common.CommonMiddleware", # DISALLOWED_USER_AGENTS, APPEND_SLASH and PREPEND_WWW
|
||||
"django.middleware.csrf.CsrfViewMiddleware", # Cross Site Request Forgeries by adding hidden form fields to POST
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware", # Adds the user attribute, representing the currently-logged-in user
|
||||
"django.contrib.admindocs.middleware.XViewMiddleware", # this and docutils needed by admindocs
|
||||
"django.contrib.messages.middleware.MessageMiddleware", # Cookie-based and session-based message support. Needed by admin system
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware", # clickjacking protection via the X-Frame-Options header
|
||||
#'django.middleware.security.SecurityMiddleware', # SECURE_HSTS_SECONDS, SECURE_CONTENT_TYPE_NOSNIFF, SECURE_BROWSER_XSS_FILTER, SECURE_REFERRER_POLICY, and SECURE_SSL_REDIRECT
|
||||
#'troggle.core.middleware.SmartAppendSlashMiddleware' # needs adapting after Dj2.0
|
||||
]
|
||||
|
||||
ROOT_URLCONF = "troggle.urls"
|
||||
|
||||
WSGI_APPLICATION = "troggle.wsgi.application" # change to asgi as soon as we upgrade to Django 3.0
|
||||
|
||||
ACCOUNT_ACTIVATION_DAYS = 3
|
||||
|
||||
# AUTH_PROFILE_MODULE = 'core.person' # used by removed profiles app ?
|
||||
|
||||
QM_PATTERN = "\[\[\s*[Qq][Mm]:([ABC]?)(\d{4})-(\d*)-(\d*)\]\]"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
|
||||
TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
||||
|
||||
from localsettings import *
|
||||
|
||||
# localsettings needs to take precedence. Call it to override any existing vars.
|
||||
147
_deploy/debian-laptops/settings2023-04-23.py
Normal file
147
_deploy/debian-laptops/settings2023-04-23.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""
|
||||
Django settings for troggle project.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/dev/topics/settings/
|
||||
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/dev/ref/settings/
|
||||
"""
|
||||
# Imports should be grouped in the following order:
|
||||
|
||||
# 1.Standard library imports.
|
||||
# 2.Related third party imports.
|
||||
# 3.Local application/library specific imports.
|
||||
# 4.You should put a blank line between each group of imports.
|
||||
|
||||
|
||||
|
||||
print("* importing troggle/settings.py")
|
||||
|
||||
# default value, then gets overwritten by real secrets
|
||||
SECRET_KEY = "not-the-real-secret-key-a#vaeozn0---^fj!355qki*vj2"
|
||||
|
||||
GIT = "git" # command for running git
|
||||
|
||||
# Note that this builds upon the django system installed
|
||||
# global settings in
|
||||
# django/conf/global_settings.py which is automatically loaded first.
|
||||
# read https://docs.djangoproject.com/en/dev/topics/settings/
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
# BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
# Django settings for troggle project.
|
||||
|
||||
ALLOWED_HOSTS = ["*", "expo.survex.com", ".survex.com", "localhost", "127.0.0.1", "192.168.0.5"]
|
||||
|
||||
ADMINS = (
|
||||
# ('Your Name', 'your_email@domain.com'),
|
||||
)
|
||||
MANAGERS = ADMINS
|
||||
|
||||
# LOGIN_URL = '/accounts/login/' # this is the default value so does not need to be set
|
||||
|
||||
# Local time zone for this installation. Choices can be found here:
|
||||
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
||||
# although not all choices may be available on all operating systems.
|
||||
# If running in a Windows environment this must be set to the same as your
|
||||
# system time zone.
|
||||
USE_TZ = True
|
||||
TIME_ZONE = "Europe/London"
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
LANGUAGE_CODE = "en-uk"
|
||||
|
||||
SITE_ID = 1
|
||||
|
||||
# If you set this to False, Django will make some optimizations so as not
|
||||
# to load the internationalization machinery.
|
||||
USE_I18N = True
|
||||
USE_L10N = True
|
||||
|
||||
FIX_PERMISSIONS = []
|
||||
|
||||
# top-level survex file basename (without .svx)
|
||||
SURVEX_TOPNAME = "1623-and-1626-no-schoenberg-hs"
|
||||
|
||||
|
||||
# Caves for which survex files exist, but are not otherwise registered
|
||||
# replaced (?) by expoweb/cave_data/pendingcaves.txt
|
||||
# PENDING = ["1626-361", "2007-06", "2009-02",
|
||||
# "2012-ns-01", "2012-ns-02", "2010-04", "2012-ns-05", "2012-ns-06",
|
||||
# "2012-ns-07", "2012-ns-08", "2012-ns-12", "2012-ns-14", "2012-ns-15", "2014-bl888",
|
||||
# "2018-pf-01", "2018-pf-02"]
|
||||
|
||||
APPEND_SLASH = (
|
||||
False # never relevant because we have urls that match unknown files and produce an 'edit this page' response
|
||||
)
|
||||
SMART_APPEND_SLASH = True # not eorking as middleware different after Dj2.0
|
||||
|
||||
|
||||
LOGIN_REDIRECT_URL = "/" # does not seem to have any effect
|
||||
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
SECURE_BROWSER_XSS_FILTER = True
|
||||
# SESSION_COOKIE_SECURE = True # if enabled, cannot login to Django control panel, bug elsewhere?
|
||||
# CSRF_COOKIE_SECURE = True # if enabled only sends cookies over SSL
|
||||
X_FRAME_OPTIONS = "DENY" # changed to "DENY" after I eliminated all the iframes e.g. /xmlvalid.html
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" # from Django 3.2
|
||||
|
||||
INSTALLED_APPS = (
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth", # includes the url redirections for login, logout
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.admindocs",
|
||||
"django.forms", # Required to customise widget templates
|
||||
# 'django.contrib.staticfiles', # We put our CSS etc explicitly in the right place so do not need this
|
||||
"troggle.core",
|
||||
)
|
||||
|
||||
FORM_RENDERER = "django.forms.renderers.TemplatesSetting" # Required to customise widget templates
|
||||
|
||||
# See the recommended order of these in https://docs.djangoproject.com/en/dev/ref/middleware/
|
||||
# Note that this is a radically different onion architecture from earlier versions though it looks the same,
|
||||
# see https://docs.djangoproject.com/en/dev/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
|
||||
# Seriously, read this: https://www.webforefront.com/django/middlewaredjango.html which is MUCH BETTER than the docs
|
||||
MIDDLEWARE = [
|
||||
#'django.middleware.security.SecurityMiddleware', # SECURE_SSL_REDIRECT and SECURE_SSL_HOST # we don't use this
|
||||
"django.middleware.gzip.GZipMiddleware", # not needed when expofiles and photos served by apache
|
||||
"django.contrib.sessions.middleware.SessionMiddleware", # Manages sessions, if CSRF_USE_SESSIONS then it needs to be early
|
||||
"django.middleware.common.CommonMiddleware", # DISALLOWED_USER_AGENTS, APPEND_SLASH and PREPEND_WWW
|
||||
"django.middleware.csrf.CsrfViewMiddleware", # Cross Site Request Forgeries by adding hidden form fields to POST
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware", # Adds the user attribute, representing the currently-logged-in user
|
||||
"django.contrib.admindocs.middleware.XViewMiddleware", # this and docutils needed by admindocs
|
||||
"django.contrib.messages.middleware.MessageMiddleware", # Cookie-based and session-based message support. Needed by admin system
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware", # clickjacking protection via the X-Frame-Options header
|
||||
#'django.middleware.security.SecurityMiddleware', # SECURE_HSTS_SECONDS, SECURE_CONTENT_TYPE_NOSNIFF, SECURE_BROWSER_XSS_FILTER, SECURE_REFERRER_POLICY, and SECURE_SSL_REDIRECT
|
||||
#'troggle.core.middleware.SmartAppendSlashMiddleware' # needs adapting after Dj2.0
|
||||
]
|
||||
|
||||
ROOT_URLCONF = "troggle.urls"
|
||||
|
||||
WSGI_APPLICATION = "troggle.wsgi.application" # change to asgi as soon as we upgrade to Django 3.0
|
||||
|
||||
ACCOUNT_ACTIVATION_DAYS = 3
|
||||
|
||||
# AUTH_PROFILE_MODULE = 'core.person' # used by removed profiles app ?
|
||||
|
||||
QM_PATTERN = "\[\[\s*[Qq][Mm]:([ABC]?)(\d{4})-(\d*)-(\d*)\]\]"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
|
||||
TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
||||
|
||||
from localsettings import *
|
||||
|
||||
# localsettings needs to take precedence. Call it to override any existing vars.
|
||||
173
_deploy/debian-laptops/venv-trog-crowley.sh
Normal file
173
_deploy/debian-laptops/venv-trog-crowley.sh
Normal file
@@ -0,0 +1,173 @@
|
||||
#!/bin/bash
|
||||
# Crowley has python 3.9.2
|
||||
# Taken from: footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
|
||||
# Run this in a terminal in the troggle directory: 'bash venv-trog-crowley.sh'
|
||||
echo '-- DONT RUN THIS - messes up permissions!'
|
||||
|
||||
|
||||
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog-crowley.sh"'
|
||||
# use the script os-trog-crowley.sh
|
||||
|
||||
# If you are using Debian, then stick with the default version of python
|
||||
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
|
||||
|
||||
|
||||
# NOW we set up troggle
|
||||
PYTHON=python3.9
|
||||
VENAME=p9d4 # python3.x and django 4
|
||||
echo "** You are logged in as `id -u -n`"
|
||||
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
|
||||
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder (this script location): ${TROGDIR}"
|
||||
|
||||
if [ -d requirements.txt ]; then
|
||||
echo "-- No requirements.txt found. Copy it from your most recent installation."
|
||||
exit 1
|
||||
fi
|
||||
echo ## Using requirements.txt :
|
||||
cat requirements.txt
|
||||
echo ##
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
|
||||
# NOTE that when using a later or earlier verison of python, you MUST also
|
||||
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
|
||||
|
||||
# NOW set up link from expo user folder
|
||||
# needed for WSL2
|
||||
echo Creating links from Linux filesystem user
|
||||
# These links only need making once, for many venv
|
||||
cd ~
|
||||
|
||||
if [ ! -d $VENAME ]; then
|
||||
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
|
||||
$PYTHON -m venv $VENAME
|
||||
else
|
||||
echo "## /$VENAME/ already exists ! Delete it first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Activate the virtual env and see what the default packages are
|
||||
echo "### Activating $VENAME"
|
||||
|
||||
cd $VENAME
|
||||
echo "-- now in: ${PWD}"
|
||||
source bin/activate
|
||||
echo "### Activated."
|
||||
# update local version of pip, more recent than OS version
|
||||
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
|
||||
|
||||
# update local version of setuptools, more recent than OS version, needed for packages without wheels
|
||||
|
||||
echo "### installing later version of pip inside $VENAME"
|
||||
$PYTHON -m pip install --upgrade pip
|
||||
$PYTHON -m pip install --upgrade setuptools
|
||||
|
||||
PIP=pip
|
||||
|
||||
$PIP list > original-pip.list
|
||||
$PIP freeze >original.txt
|
||||
|
||||
# we are in /home/$USER/$VENAME/
|
||||
ln -s ${TROGDIR} troggle
|
||||
ln -s ${TROGDIR}/../expoweb expoweb
|
||||
ln -s ${TROGDIR}/../loser loser
|
||||
ln -s ${TROGDIR}/../drawings drawings
|
||||
|
||||
# fudge for philip's machine
|
||||
if [ -d ${TROGDIR}/../expofiles ]; then
|
||||
ln -s ${TROGDIR}/../expofiles expofiles
|
||||
else
|
||||
if [ ! -d /mnt/f/expofiles ]; then
|
||||
sudo mkdir /mnt/f
|
||||
sudo mount -t drvfs F: /mnt/f
|
||||
else
|
||||
ln -s /mnt/f/expofiles expofiles
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "### Setting file permissions.. may take a while.."
|
||||
git config --global --add safe.directory '*'
|
||||
#sudo chmod -R 0777 *
|
||||
|
||||
echo "### links to expoweb, troggle etc. complete:"
|
||||
ls -tla
|
||||
|
||||
echo "###"
|
||||
echo "### now installing ${TROGDIR}/requirements.txt"
|
||||
echo "###"
|
||||
|
||||
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
|
||||
# seen on wsl2 as well as wsl1
|
||||
# which ALSO ruins EXISTING permissions !
|
||||
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
|
||||
|
||||
$PIP install -r ${TROGDIR}/requirements.txt
|
||||
echo '### install from requirements.txt completed.'
|
||||
echo '### '
|
||||
|
||||
$PIP freeze > requirements.txt
|
||||
# so that we can track requirements more easily with git
|
||||
# because we do not install these with pip, but they are listed by the freeze command
|
||||
# Now find out what we actually installed by subtracting the stuff venv installed anyway
|
||||
sort original.txt > 1
|
||||
sort requirements.txt >2
|
||||
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-requirements.txt
|
||||
rm 1
|
||||
rm 2
|
||||
|
||||
cp requirements.txt requirements-$VENAME.txt
|
||||
cp requirements-$VENAME.txt troggle/requirements-$VENAME.txt
|
||||
|
||||
$PIP list > installed-pip.list
|
||||
$PIP list -o > installed-pip-o.list
|
||||
|
||||
REQ=installation-record
|
||||
mkdir $REQ
|
||||
mv requirements-$VENAME.txt $REQ
|
||||
mv original.txt $REQ
|
||||
mv requirements.txt $REQ
|
||||
mv original-pip.list $REQ
|
||||
mv installed-pip.list $REQ
|
||||
mv installed-pip-o.list $REQ
|
||||
cp fresh-requirements.txt ../requirements.txt
|
||||
mv fresh-requirements.txt $REQ
|
||||
cp troggle/`basename "$0"` $REQ
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
python --version
|
||||
echo "Django version:`django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/$VENAME'
|
||||
'source bin/activate'
|
||||
'cd troggle'
|
||||
'django-admin'
|
||||
'python manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
## the tests may ALSO fail because of ssh and permissions errors
|
||||
# Ran 85 tests in 83.492s
|
||||
# FAILED (failures=5)
|
||||
## So you will need to run
|
||||
#$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
|
||||
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
|
||||
# because this chmod only takes effect then.
|
||||
|
||||
'./pre-run.sh' (runs the migrations and then the tests)
|
||||
|
||||
'python databaseReset.py reset $VENAME'
|
||||
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
if [ ! -d /mnt/f/expofiles ]; then
|
||||
echo '### No valid expofiles directory. Fix this before any tests will work.
|
||||
fi
|
||||
227
_deploy/debian-server/apache2.conf
Normal file
227
_deploy/debian-server/apache2.conf
Normal file
@@ -0,0 +1,227 @@
|
||||
# This is the main Apache server configuration file. It contains the
|
||||
# configuration directives that give the server its instructions.
|
||||
# See http://httpd.apache.org/docs/2.4/ for detailed information about
|
||||
# the directives and /usr/share/doc/apache2/README.Debian about Debian specific
|
||||
# hints.
|
||||
#
|
||||
#
|
||||
# Summary of how the Apache 2 configuration works in Debian:
|
||||
# The Apache 2 web server configuration in Debian is quite different to
|
||||
# upstream's suggested way to configure the web server. This is because Debian's
|
||||
# default Apache2 installation attempts to make adding and removing modules,
|
||||
# virtual hosts, and extra configuration directives as flexible as possible, in
|
||||
# order to make automating the changes and administering the server as easy as
|
||||
# possible.
|
||||
|
||||
# It is split into several files forming the configuration hierarchy outlined
|
||||
# below, all located in the /etc/apache2/ directory:
|
||||
#
|
||||
# /etc/apache2/
|
||||
# |-- apache2.conf
|
||||
# | `-- ports.conf
|
||||
# |-- mods-enabled
|
||||
# | |-- *.load
|
||||
# | `-- *.conf
|
||||
# |-- conf-enabled
|
||||
# | `-- *.conf
|
||||
# `-- sites-enabled
|
||||
# `-- *.conf
|
||||
#
|
||||
#
|
||||
# * apache2.conf is the main configuration file (this file). It puts the pieces
|
||||
# together by including all remaining configuration files when starting up the
|
||||
# web server.
|
||||
#
|
||||
# * ports.conf is always included from the main configuration file. It is
|
||||
# supposed to determine listening ports for incoming connections which can be
|
||||
# customized anytime.
|
||||
#
|
||||
# * Configuration files in the mods-enabled/, conf-enabled/ and sites-enabled/
|
||||
# directories contain particular configuration snippets which manage modules,
|
||||
# global configuration fragments, or virtual host configurations,
|
||||
# respectively.
|
||||
#
|
||||
# They are activated by symlinking available configuration files from their
|
||||
# respective *-available/ counterparts. These should be managed by using our
|
||||
# helpers a2enmod/a2dismod, a2ensite/a2dissite and a2enconf/a2disconf. See
|
||||
# their respective man pages for detailed information.
|
||||
#
|
||||
# * The binary is called apache2. Due to the use of environment variables, in
|
||||
# the default configuration, apache2 needs to be started/stopped with
|
||||
# /etc/init.d/apache2 or apache2ctl. Calling /usr/bin/apache2 directly will not
|
||||
# work with the default configuration.
|
||||
|
||||
|
||||
# Global configuration
|
||||
#
|
||||
|
||||
#
|
||||
# ServerRoot: The top of the directory tree under which the server's
|
||||
# configuration, error, and log files are kept.
|
||||
#
|
||||
# NOTE! If you intend to place this on an NFS (or otherwise network)
|
||||
# mounted filesystem then please read the Mutex documentation (available
|
||||
# at <URL:http://httpd.apache.org/docs/2.4/mod/core.html#mutex>);
|
||||
# you will save yourself a lot of trouble.
|
||||
#
|
||||
# Do NOT add a slash at the end of the directory path.
|
||||
#
|
||||
#ServerRoot "/etc/apache2"
|
||||
|
||||
#
|
||||
# The accept serialization lock file MUST BE STORED ON A LOCAL DISK.
|
||||
#
|
||||
#Mutex file:${APACHE_LOCK_DIR} default
|
||||
|
||||
#
|
||||
# The directory where shm and other runtime files will be stored.
|
||||
#
|
||||
|
||||
DefaultRuntimeDir ${APACHE_RUN_DIR}
|
||||
|
||||
#
|
||||
# PidFile: The file in which the server should record its process
|
||||
# identification number when it starts.
|
||||
# This needs to be set in /etc/apache2/envvars
|
||||
#
|
||||
PidFile ${APACHE_PID_FILE}
|
||||
|
||||
#
|
||||
# Timeout: The number of seconds before receives and sends time out.
|
||||
#
|
||||
Timeout 300
|
||||
|
||||
#
|
||||
# KeepAlive: Whether or not to allow persistent connections (more than
|
||||
# one request per connection). Set to "Off" to deactivate.
|
||||
#
|
||||
KeepAlive On
|
||||
|
||||
#
|
||||
# MaxKeepAliveRequests: The maximum number of requests to allow
|
||||
# during a persistent connection. Set to 0 to allow an unlimited amount.
|
||||
# We recommend you leave this number high, for maximum performance.
|
||||
#
|
||||
MaxKeepAliveRequests 100
|
||||
|
||||
#
|
||||
# KeepAliveTimeout: Number of seconds to wait for the next request from the
|
||||
# same client on the same connection.
|
||||
#
|
||||
KeepAliveTimeout 5
|
||||
|
||||
|
||||
# These need to be set in /etc/apache2/envvars
|
||||
User ${APACHE_RUN_USER}
|
||||
Group ${APACHE_RUN_GROUP}
|
||||
|
||||
#
|
||||
# HostnameLookups: Log the names of clients or just their IP addresses
|
||||
# e.g., www.apache.org (on) or 204.62.129.132 (off).
|
||||
# The default is off because it'd be overall better for the net if people
|
||||
# had to knowingly turn this feature on, since enabling it means that
|
||||
# each client request will result in AT LEAST one lookup request to the
|
||||
# nameserver.
|
||||
#
|
||||
HostnameLookups Off
|
||||
|
||||
# ErrorLog: The location of the error log file.
|
||||
# If you do not specify an ErrorLog directive within a <VirtualHost>
|
||||
# container, error messages relating to that virtual host will be
|
||||
# logged here. If you *do* define an error logfile for a <VirtualHost>
|
||||
# container, that host's errors will be logged there and not here.
|
||||
#
|
||||
ErrorLog ${APACHE_LOG_DIR}/error.log
|
||||
|
||||
#
|
||||
# LogLevel: Control the severity of messages logged to the error_log.
|
||||
# Available values: trace8, ..., trace1, debug, info, notice, warn,
|
||||
# error, crit, alert, emerg.
|
||||
# It is also possible to configure the log level for particular modules, e.g.
|
||||
# "LogLevel info ssl:warn"
|
||||
#
|
||||
LogLevel warn
|
||||
|
||||
# Include module configuration:
|
||||
IncludeOptional mods-enabled/*.load
|
||||
IncludeOptional mods-enabled/*.conf
|
||||
|
||||
# Include list of ports to listen on
|
||||
Include ports.conf
|
||||
|
||||
|
||||
# Sets the default security model of the Apache2 HTTPD server. It does
|
||||
# not allow access to the root filesystem outside of /usr/share and /var/www.
|
||||
# The former is used by web applications packaged in Debian,
|
||||
# the latter may be used for local directories served by the web server. If
|
||||
# your system is serving content from a sub-directory in /srv you must allow
|
||||
# access here, or in any related virtual host.
|
||||
<Directory />
|
||||
Options FollowSymLinks
|
||||
AllowOverride None
|
||||
Require all denied
|
||||
</Directory>
|
||||
|
||||
<Directory /usr/share>
|
||||
AllowOverride None
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
<Directory /var/www/>
|
||||
Options Indexes FollowSymLinks
|
||||
AllowOverride None
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
#<Directory /srv/>
|
||||
# Options Indexes FollowSymLinks
|
||||
# AllowOverride None
|
||||
# Require all granted
|
||||
#</Directory>
|
||||
|
||||
|
||||
|
||||
|
||||
# AccessFileName: The name of the file to look for in each directory
|
||||
# for additional configuration directives. See also the AllowOverride
|
||||
# directive.
|
||||
#
|
||||
AccessFileName .htaccess
|
||||
|
||||
#
|
||||
# The following lines prevent .htaccess and .htpasswd files from being
|
||||
# viewed by Web clients.
|
||||
#
|
||||
<FilesMatch "^\.ht">
|
||||
Require all denied
|
||||
</FilesMatch>
|
||||
|
||||
|
||||
#
|
||||
# The following directives define some format nicknames for use with
|
||||
# a CustomLog directive.
|
||||
#
|
||||
# These deviate from the Common Log Format definitions in that they use %O
|
||||
# (the actual bytes sent including headers) instead of %b (the size of the
|
||||
# requested file), because the latter makes it impossible to detect partial
|
||||
# requests.
|
||||
#
|
||||
# Note that the use of %{X-Forwarded-For}i instead of %h is not recommended.
|
||||
# Use mod_remoteip instead.
|
||||
#
|
||||
LogFormat "%v:%p %h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\"" vhost_combined
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %O \"%{Referer}i\" \"%{User-Agent}i\"" combined
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %O" common
|
||||
LogFormat "%{Referer}i -> %U" referer
|
||||
LogFormat "%{User-agent}i" agent
|
||||
|
||||
# Include of directories ignores editors' and dpkg's backup files,
|
||||
# see README.Debian for details.
|
||||
|
||||
# Include generic snippets of statements
|
||||
IncludeOptional conf-enabled/*.conf
|
||||
|
||||
# Include the virtual host configurations:
|
||||
IncludeOptional sites-enabled/*.conf
|
||||
|
||||
# vim: syntax=apache ts=4 sw=4 sts=4 sr noet
|
||||
47
_deploy/debian-server/envvars
Normal file
47
_deploy/debian-server/envvars
Normal file
@@ -0,0 +1,47 @@
|
||||
# envvars - default environment variables for apache2ctl
|
||||
|
||||
# this won't be correct after changing uid
|
||||
unset HOME
|
||||
|
||||
# for supporting multiple apache2 instances
|
||||
if [ "${APACHE_CONFDIR##/etc/apache2-}" != "${APACHE_CONFDIR}" ] ; then
|
||||
SUFFIX="-${APACHE_CONFDIR##/etc/apache2-}"
|
||||
else
|
||||
SUFFIX=
|
||||
fi
|
||||
|
||||
# Since there is no sane way to get the parsed apache2 config in scripts, some
|
||||
# settings are defined via environment variables and then used in apache2ctl,
|
||||
# /etc/init.d/apache2, /etc/logrotate.d/apache2, etc.
|
||||
export APACHE_RUN_USER=expo
|
||||
export APACHE_RUN_GROUP=expo
|
||||
# temporary state file location. This might be changed to /run in Wheezy+1
|
||||
export APACHE_PID_FILE=/var/run/apache2$SUFFIX/apache2.pid
|
||||
export APACHE_RUN_DIR=/var/run/apache2$SUFFIX
|
||||
export APACHE_LOCK_DIR=/var/lock/apache2$SUFFIX
|
||||
# Only /var/log/apache2 is handled by /etc/logrotate.d/apache2.
|
||||
export APACHE_LOG_DIR=/var/log/apache2$SUFFIX
|
||||
|
||||
## The locale used by some modules like mod_dav
|
||||
#export LANG=C
|
||||
## Uncomment the following line to use the system default locale instead:
|
||||
. /etc/default/locale
|
||||
|
||||
export LANG
|
||||
|
||||
## The command to get the status for 'apache2ctl status'.
|
||||
## Some packages providing 'www-browser' need '--dump' instead of '-dump'.
|
||||
#export APACHE_LYNX='www-browser -dump'
|
||||
|
||||
## If you need a higher file descriptor limit, uncomment and adjust the
|
||||
## following line (default is 8192):
|
||||
#APACHE_ULIMIT_MAX_FILES='ulimit -n 65536'
|
||||
|
||||
## If you would like to pass arguments to the web server, add them below
|
||||
## to the APACHE_ARGUMENTS environment.
|
||||
#export APACHE_ARGUMENTS=''
|
||||
|
||||
## Enable the debug mode for maintainer scripts.
|
||||
## This will produce a verbose output on package installations of web server modules and web application
|
||||
## installations which interact with Apache
|
||||
#export APACHE2_MAINTSCRIPT_DEBUG=1
|
||||
121
_deploy/debian-server/localsettingsserver-old.py
Normal file
121
_deploy/debian-server/localsettingsserver-old.py
Normal file
@@ -0,0 +1,121 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords. [not this copy]
|
||||
SECRET_KEY = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"
|
||||
EXPOUSERPASS = "nope"
|
||||
EXPOADMINUSERPASS = "nope"
|
||||
EMAIL_HOST_PASSWORD = "nope"
|
||||
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'not a real password', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = "nnn:gggggg"
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'core.context.troggle_context',
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media',
|
||||
'django.template.context_processors.static',
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader',
|
||||
# insert your TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = True
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH + 'drawings/'
|
||||
|
||||
CAVERN = 'cavern'
|
||||
THREEDTOPOS = 'survexport'
|
||||
EXPOWEB = REPOS_ROOT_PATH + 'expoweb/'
|
||||
SURVEYS = REPOS_ROOT_PATH
|
||||
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
CAVEDESCRIPTIONS = os.path.join(EXPOWEB, "cave_data")
|
||||
ENTRANCEDESCRIPTIONS = os.path.join(EXPOWEB, "entrance_data")
|
||||
|
||||
CACHEDIR = REPOS_ROOT_PATH + 'expowebcache/'
|
||||
THREEDCACHEDIR = CACHEDIR + '3d/'
|
||||
THUMBNAILCACHE = CACHEDIR + 'thumbs'
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
URL_ROOT = 'http://expo.survex.com/'
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SURVEYS_URL = '/survey_scans/'
|
||||
EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views/surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
MEDIA_ROOT = REPOS_ROOT_PATH + '/troggle/media/'
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# add in 290, 291, 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "264", "258", "204", "76", "107"]
|
||||
164
_deploy/debian-server/localsettingsserver2023-01-secret.py
Normal file
164
_deploy/debian-server/localsettingsserver2023-01-secret.py
Normal file
@@ -0,0 +1,164 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords.
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'uFqP56B4XleeyIW', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
SECRET_KEY = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"
|
||||
EXPOUSERPASS = "nope"
|
||||
EXPOADMINUSERPASS = "nope"
|
||||
EMAIL_HOST_PASSWORD = "nope"
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
PHOTOS_YEAR = "2022"
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py
|
||||
'django.template.context_processors.debug',
|
||||
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
|
||||
# insert your own TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = True
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH + 'drawings/'
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH + 'expoweb/'
|
||||
#SURVEYS = REPOS_ROOT_PATH
|
||||
SCANS_ROOT = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
|
||||
CAVEDESCRIPTIONS = os.path.join(EXPOWEB, "cave_data")
|
||||
ENTRANCEDESCRIPTIONS = os.path.join(EXPOWEB, "entrance_data")
|
||||
|
||||
# CACHEDIR = REPOS_ROOT_PATH + 'expowebcache/'
|
||||
# THREEDCACHEDIR = CACHEDIR + '3d/'
|
||||
# THUMBNAILCACHE = CACHEDIR + 'thumbs'
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
|
||||
|
||||
#Note that all these *_URL constants are not actually used in urls.py, they should be..
|
||||
#URL_ROOT = 'http://expo.survex.com/'
|
||||
URL_ROOT = '/'
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SCANS_URL = '/survey_scans/'
|
||||
EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# add in 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
# Sanitise these to be strings as all other code is expecting strings
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
LOGFILE = os.fspath(LOGFILE)
|
||||
#SURVEYS = os.fspath(SURVEYS)
|
||||
EXPOWEB = os.fspath(EXPOWEB)
|
||||
DRAWINGS_DATA = os.fspath(DRAWINGS_DATA)
|
||||
SURVEX_DATA = os.fspath(SURVEX_DATA)
|
||||
REPOS_ROOT_PATH = os.fspath(REPOS_ROOT_PATH)
|
||||
TEMPLATE_PATH = os.fspath(TROGGLE_PATH)
|
||||
MEDIA_ROOT = os.fspath(MEDIA_ROOT)
|
||||
JSLIB_ROOT = os.fspath(JSLIB_ROOT)
|
||||
SCANS_ROOT = os.fspath(SCANS_ROOT)
|
||||
LIBDIR = os.fspath(LIBDIR)
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
23
_deploy/debian-server/requirements-server.txt
Normal file
23
_deploy/debian-server/requirements-server.txt
Normal file
@@ -0,0 +1,23 @@
|
||||
#This requirements txt matches the libaries as of 2023-07-09 on expo.survex.com <Debian GNU/Linux 11 (bullseye)>
|
||||
|
||||
#Nb on the server asgiref==3.3.0, however this conflicts with the Django==3.2.12 requirement
|
||||
asgiref==3.3.2
|
||||
Django==3.2.12
|
||||
docutils==0.16
|
||||
packaging==20.9
|
||||
Pillow==8.1.2
|
||||
pytz==2021.1
|
||||
sqlparse==0.4.1
|
||||
Unidecode==1.2.0
|
||||
beautifulsoup4==4.9.3
|
||||
piexif==1.1.3
|
||||
|
||||
#Not installed on expo.survex.com
|
||||
#black==23.3
|
||||
#click==8.1.3
|
||||
#coverage==7.2
|
||||
#isort==5.12.0
|
||||
#mypy-extensions==1.0.0
|
||||
#pathspec==0.11
|
||||
#platformdirs==3.8
|
||||
#ruff==0.0.245
|
||||
93
_deploy/debian-server/serversetup
Normal file
93
_deploy/debian-server/serversetup
Normal file
@@ -0,0 +1,93 @@
|
||||
Instructions for setting up new expo debian server/VM
|
||||
For Debian Stretch, June 2019.
|
||||
|
||||
[Note added March 2021:
|
||||
See also http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
and troggle/README.txt
|
||||
]
|
||||
|
||||
adduser expo
|
||||
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
|
||||
apt install python-django apache2 mysql-server survex make rsync
|
||||
apt install libjs-openlayers make
|
||||
apt install git mercurial mercurial-server?
|
||||
|
||||
for boe:
|
||||
apt install libcgi-session-perl libcrypt-passwdmd5-perl libfile-slurp-perl libgit-wrapper-perl libhtml-template-perl libhtml-template-pro-perl libmime-lite-perl libtext-password-pronounceable-perl libtime-parsedate-perl libuuid-tiny-perl libcrypt-cracklib-perl
|
||||
|
||||
obsolete-packages:
|
||||
bins (move to jigl?) (for photos)
|
||||
python-django 1.7
|
||||
backports: survex therion
|
||||
not-packaged: caveview
|
||||
|
||||
make these dirs available at top documentroot:
|
||||
cuccfiles
|
||||
expofiles
|
||||
loser (link to repo)
|
||||
tunneldata (link to repo)
|
||||
troggle (link to repo)
|
||||
expoweb (link to repo)
|
||||
boc/boe
|
||||
|
||||
|
||||
config
|
||||
containing:
|
||||
|
||||
setup apache configs for cucc and expo
|
||||
#disable default website
|
||||
a2dissite 000-default
|
||||
a2ensite cucc
|
||||
a2ensite expo
|
||||
a2enmod cgid
|
||||
|
||||
|
||||
Boe config:
|
||||
Alias /boe /home/expo/boe/boc/boc.pl
|
||||
<Directory /home/expo/boe/boc>
|
||||
AddHandler cgi-script .pl
|
||||
SetHandler cgi-script
|
||||
Options +ExecCGI
|
||||
Require all granted
|
||||
</Directory>
|
||||
And remember to set both program and data dir to be
|
||||
www-data:www-data
|
||||
(optionally make file group read/write by treasurer account)
|
||||
create empty repo by clicking create in boe interface
|
||||
then set names in 'settings'
|
||||
|
||||
Set up mysql (as root)
|
||||
mysql -p
|
||||
CREATE DATABASE troggle;
|
||||
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
|
||||
|
||||
install django:
|
||||
NO!
|
||||
This was:sudo apt install python-django python-django-registration python-django-imagekit python-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi
|
||||
Should be ?
|
||||
sudo apt install python-django python-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi
|
||||
|
||||
CHeck if this is correct:
|
||||
python-django-tinymce comes from https://salsa.debian.org/python-team/modules/python-django-tinymce
|
||||
(both modified for stretch/python2). packages under /home/wookey/packages/
|
||||
|
||||
need fonts-freefont-ttf (to have truetype freesans available for troggle via PIL)
|
||||
need libapache2-mod-wsgi for apache wsgi support.
|
||||
|
||||
On stretch the django 1.10 is no use so get rid of that:
|
||||
apt remove python3-django python-django python-django-common python-django-doc
|
||||
|
||||
Then replace with django 1.7 (Needs to be built for stretch)
|
||||
apt install python-django python-django-common python-django-doc
|
||||
apt install python-django-registration python-django-imagekit python-django-tinymce
|
||||
|
||||
then hold them to stop them being upgraded by unattended upgrades:
|
||||
echo "python-django hold" | sudo dpkg --set-selections
|
||||
echo "python-django-common hold" | sudo dpkg --set-selections
|
||||
echo "python-django-doc hold" | sudo dpkg --set-selections
|
||||
|
||||
#troggle has to have a writable logfile otherwise the website explodes
|
||||
# 500 error on the server, and apache error log has non-rentrant errors
|
||||
create /var/log/troggle/troggle.log
|
||||
chown www-data:adm /var/log/troggle/troggle.log
|
||||
chmod 660 /var/log/troggle/troggle.log
|
||||
7
_deploy/debian-server/sitecustomize.py
Normal file
7
_deploy/debian-server/sitecustomize.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# install the apport exception handler if available
|
||||
try:
|
||||
import apport_python_hook
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
apport_python_hook.install()
|
||||
103
_deploy/debian-server/wookey-exposerver-recipe.txt
Normal file
103
_deploy/debian-server/wookey-exposerver-recipe.txt
Normal file
@@ -0,0 +1,103 @@
|
||||
adduser expo
|
||||
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
|
||||
apt install python-django apache2 mysql-server survex make rsync
|
||||
apt install libjs-openlayers make
|
||||
apt install git mercurial mercurial-server?
|
||||
|
||||
for boe:
|
||||
apt install libcgi-session-perl libcrypt-passwdmd5-perl libfile-slurp-perl libgit-wrapper-perl libhtml-template-perl libhtml-template-pro-perl libmime-lite-perl libtext-password-pronounceable-perl libtime-parsedate-perl libuuid-tiny-perl libcrypt-cracklib-perl
|
||||
|
||||
apt install ufraw for PEF image decoding.
|
||||
sudo apt install python-django python-django-registration e fonts-freefont-ttf libapache2-mod-wsgi python3-gdbm
|
||||
# sudo apt install python-django-imagekit python-django-tinymc
|
||||
|
||||
obsolete-packages: bins (move to jigl?)
|
||||
older python-django?
|
||||
backports: survex therion
|
||||
not-packaged: caveview
|
||||
|
||||
|
||||
make these dirs available at top documentroot:
|
||||
cuccfiles
|
||||
expofiles
|
||||
loser
|
||||
tunneldata
|
||||
troggle
|
||||
expoweb
|
||||
boc/boe
|
||||
|
||||
config
|
||||
containing:
|
||||
|
||||
setup apache configs for cucc and expo
|
||||
#disable default website
|
||||
a2dissite 000-default
|
||||
a2ensite cucc
|
||||
a2ensite expo
|
||||
a2enmod cgid
|
||||
|
||||
|
||||
Boe config:
|
||||
Alias /boe /home/expo/boe/boc/boc.pl
|
||||
<Directory /home/expo/boe/boc>
|
||||
AddHandler cgi-script .pl
|
||||
SetHandler cgi-script
|
||||
Options +ExecCGI
|
||||
Require all granted
|
||||
</Directory>
|
||||
And remember to set both program and data dir to be
|
||||
www-data:www-data
|
||||
(optionally make file group read/write by treasurer account)
|
||||
create empty repo by clicking create in boe interface
|
||||
then set names in 'settings'
|
||||
|
||||
Set up mysql (as root)
|
||||
mysql -p
|
||||
CREATE DATABASE troggle;
|
||||
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
|
||||
Ctrl-D to exit
|
||||
|
||||
somepassword is set in localsettings.py
|
||||
sudo service mariadb stop
|
||||
sudo service mariadb start
|
||||
|
||||
to delete the database, it is
|
||||
DROP DATABASE troggle;
|
||||
|
||||
install django:
|
||||
sudo apt install python-django python-django-registration python-django-imagekit python-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi
|
||||
|
||||
python-django-imagekit comes from https://salsa.debian.org/python-team/modules/python-django-imagekit
|
||||
python-django-tinymce comes from https://salsa.debian.org/python-team/modules/python-django-tinymce
|
||||
|
||||
need fonts-freefont-ttf (to have truetype freesans available for troggle via PIL)
|
||||
need libapache2-mod-wsgi for apache wsgi support.
|
||||
|
||||
On stretch the django 1.10 is no use so get rid of that:
|
||||
apt remove python3-django python-django python-django-common python-django-doc
|
||||
|
||||
Then replace with django 1.7 (Needs to be built for stretch)
|
||||
apt install python-django python-django-common python-django-doc
|
||||
apt install python-django-registration python-django-imagekit python-django-tinymce
|
||||
|
||||
then hold them to stop them being upgraded by unattended upgrades:
|
||||
echo "python-django hold" | sudo dpkg --set-selections
|
||||
echo "python-django-common hold" | sudo dpkg --set-selections
|
||||
echo "python-django-doc hold" | sudo dpkg --set-selections
|
||||
|
||||
Optimizing server
|
||||
I've tweaked the apache and mysql settings to make them a bit more suitable for a small machine. Seems to have shaved 200MB or so off the idling footprint.
|
||||
https://www.narga.net/optimizing-apachephpmysql-low-memory-server/
|
||||
|
||||
(just discovered 'ab' for running apache performance tests - handy).
|
||||
|
||||
Do the edit to site-packages/django/db/backends/base.py
|
||||
to comment out the requirement for mysqlclient >1.3.13
|
||||
as we run perfectly happily with Django 2.2.19 & mysqlite 1.3.10
|
||||
:
|
||||
|
||||
version = Database.version_info
|
||||
#test nobbled by Wookey 2021-04-08 as 1.3.13 is not available on stable
|
||||
#if version < (1, 3, 13):
|
||||
# raise ImproperlyConfigured('mysqlclient 1.3.13 or newer is required; you have %s.' % Database.__version__)
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
# Running troggle on Docker
|
||||
|
||||
These notes written by Sam Wenham in Feb., 2019.
|
||||
These all pre-date the move to python3, later versions of Django (1.11.+) and debian.
|
||||
|
||||
## Install
|
||||
First you need to install
|
||||
- [docker-ce](https://docs.docker.com/install/)
|
||||
@@ -1,5 +1,6 @@
|
||||
import sys
|
||||
# link localsettings to this file for use on expo computer in austria
|
||||
|
||||
# This is the local settings for use with the docker compose dev setup. It is imported automatically
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
@@ -13,7 +14,7 @@ DATABASES = {
|
||||
}
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = 'somepasshere'
|
||||
EXPOUSERPASS = "nnn:gggggg"
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
REPOS_ROOT_PATH = '/expo/'
|
||||
@@ -23,7 +24,7 @@ sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
PUBLIC_SITE = False
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
TUNNEL_DATA = REPOS_ROOT_PATH + 'tunneldata/'
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH + 'drawings/'
|
||||
|
||||
CAVERN = 'cavern'
|
||||
THREEDTOPOS = '3dtopos'
|
||||
@@ -47,13 +48,13 @@ MEDIA_URL = URL_ROOT + DIR_ROOT + 'site_media/'
|
||||
MEDIA_ROOT = REPOS_ROOT_PATH + '/troggle/media/'
|
||||
MEDIA_ADMIN_DIR = '/usr/lib/python2.7/site-packages/django/contrib/admin/media/'
|
||||
|
||||
STATIC_URL = URL_ROOT
|
||||
STATIC_ROOT = DIR_ROOT
|
||||
STATIC_URL = "/static/"
|
||||
STATIC_ROOT = "/expo/static"
|
||||
|
||||
JSLIB_URL = URL_ROOT + 'javascript/'
|
||||
|
||||
TINY_MCE_MEDIA_ROOT = '/usr/share/tinymce/www/'
|
||||
TINY_MCE_MEDIA_URL = URL_ROOT + DIR_ROOT + '/tinymce_media/'
|
||||
TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/'
|
||||
TINY_MCE_MEDIA_URL = STATIC_ROOT + '/tiny_mce/'
|
||||
|
||||
TEMPLATE_DIRS = (
|
||||
PYTHON_PATH + "templates",
|
||||
9
_deploy/docker/requirements.txt
Normal file
9
_deploy/docker/requirements.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
Django==1.7.11
|
||||
django-registration==2.1.2
|
||||
mysql
|
||||
#imagekit
|
||||
django-imagekit
|
||||
Image
|
||||
django-tinymce==2.7.0
|
||||
smartencoding
|
||||
unidecode
|
||||
@@ -6,3 +6,4 @@ django-imagekit
|
||||
Image
|
||||
django-tinymce==2.7.0
|
||||
smartencoding
|
||||
unidecode
|
||||
18
_deploy/readme.txt
Normal file
18
_deploy/readme.txt
Normal file
@@ -0,0 +1,18 @@
|
||||
2023-07-17 Philip Sargent
|
||||
|
||||
Trying to sort out configurations as we got into a bit of a mess on
|
||||
Expo in the last couple of weeks with two (notionally identical Debian
|
||||
Bullseye) expo laptops Crowley (which has local troggle installed and
|
||||
can run locally) and Aziraphale (has local copy of troggle repo but is
|
||||
not configured to run locally), Martin Green's laptop (Ubuntu 22.04.2),
|
||||
Philip's Barbie laptop Ubuntu 22.04.3). And of course the server itself
|
||||
expo.survex.com which is running Debian Bullseye. But most development
|
||||
recently had been done on Philip's two other machines, desktop and PC,
|
||||
both running Ubuntu on WSL on Windows and both using venv environments,
|
||||
which Crowley also does.
|
||||
|
||||
- settings.py
|
||||
is common to all configurations,
|
||||
but these are all different:
|
||||
- localsettings.py
|
||||
- requirements.txt
|
||||
@@ -1,6 +1,11 @@
|
||||
import sys
|
||||
|
||||
# link localsettings to this file for use on expo computer in austria
|
||||
|
||||
# This will ALL NEED TO BE CHANGED to match localsettingsWSL / python3 / Django v2.2
|
||||
# This WILL NOT WORK as it is for an earlier version of Django
|
||||
# consult localsettingsWSL for updates required.
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
@@ -13,7 +18,7 @@ DATABASES = {
|
||||
}
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = 'realpasshere'
|
||||
EXPOUSERPASS = "nnn:gggggg"
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/expofiles/'
|
||||
@@ -24,7 +29,7 @@ sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
PUBLIC_SITE = False
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
TUNNEL_DATA = REPOS_ROOT_PATH + 'tunneldata/'
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH + 'drawings/'
|
||||
THREEDCACHEDIR = REPOS_ROOT_PATH + 'expowebcache/3d/'
|
||||
|
||||
CAVERN = 'cavern'
|
||||
@@ -0,0 +1,3 @@
|
||||
The copy in this /_deploy/ folder may not be the latest if active development
|
||||
has been going on in the parent folder. Check there for a later copy of
|
||||
the localsettingsWSL file.
|
||||
188
_deploy/wsl/localsettingsWSL.py
Normal file
188
_deploy/wsl/localsettingsWSL.py
Normal file
@@ -0,0 +1,188 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
# default values, real secrets imported from credentials.py
|
||||
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
SERVERPORT = "8000" # not needed
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
# PHOTOS_ROOT = EXPOFILES / 'photos'
|
||||
PHOTOS_ROOT = Path("/mnt/d/EXPO/PHOTOS")
|
||||
PHOTOS_YEAR = "2023"
|
||||
|
||||
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
|
||||
|
||||
# PYTHON_PATH = os.fspath(PYTHON_PATH)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
|
||||
LOGFILE = PYTHON_PATH / "troggle.log"
|
||||
SQLITEDB = PYTHON_PATH / "troggle.sqlite"
|
||||
KMZ_ICONS_PATH = PYTHON_PATH / "kmz_icons"
|
||||
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
MEDIA_URL = "/site-media/"
|
||||
|
||||
DIR_ROOT = Path("") # this should end in / if a value is given
|
||||
URL_ROOT = "/"
|
||||
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
|
||||
|
||||
# Note that these constants are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
# executables:
|
||||
CAVERN = "cavern" # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = "survexport" # for parsing .3d files and producing .pos files
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
#'NAME' : 'troggle.sqlite',
|
||||
"NAME": str(SQLITEDB),
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": "my-secret-password-schwatzmooskogel",
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
"OPTIONS": {
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/troggle.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
EXPOUSER = "expo"
|
||||
EXPOUSER_EMAIL = "philip.sargent@gmail.com"
|
||||
EXPOADMINUSER = "expoadmin"
|
||||
EXPOADMINUSER_EMAIL = "philip.sargent@gmail.com"
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "django-test@klebos.net"
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
EXPOWEB_URL = ""
|
||||
# SCANS_URL = '/survey_scans/' # defunct, removed.
|
||||
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
sys.path.append(str(PYTHON_PATH))
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
12
_deploy/wsl/requirements-Dj3.2Bullseye.txt
Normal file
12
_deploy/wsl/requirements-Dj3.2Bullseye.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
asgiref==3.3.4
|
||||
confusable-homoglyphs==3.2.0
|
||||
coverage==5.5
|
||||
Django==3.2.12
|
||||
docutils==0.14
|
||||
gunicorn==20.1.0
|
||||
Pillow==9.0.1
|
||||
pytz==2019.1
|
||||
reportlab==3.6.8
|
||||
sqlparse==0.2.4
|
||||
typing-extensions==3.7.4.3
|
||||
Unidecode==1.0.23
|
||||
20
_deploy/wsl/requirements-devupgrade.txt
Normal file
20
_deploy/wsl/requirements-devupgrade.txt
Normal file
@@ -0,0 +1,20 @@
|
||||
# Philip bleeding edge config
|
||||
asgiref==3.6.0
|
||||
beautifulsoup4==4.12.2
|
||||
black==23.1.0
|
||||
click==8.1.3
|
||||
coverage==7.1.0
|
||||
Django==4.2
|
||||
docutils==0.19
|
||||
isort==5.12.0
|
||||
mypy-extensions==1.0.0
|
||||
packaging==23.0
|
||||
pathspec==0.11.0
|
||||
Pillow==9.4.0
|
||||
platformdirs==3.0.0
|
||||
pytz==2022.7
|
||||
ruff==0.0.245
|
||||
soupsieve==2.4.1
|
||||
sqlparse==0.4.3
|
||||
Unidecode==1.3.6
|
||||
piexif==1.1.3
|
||||
17
_deploy/wsl/requirements-p10d4.txt
Normal file
17
_deploy/wsl/requirements-p10d4.txt
Normal file
@@ -0,0 +1,17 @@
|
||||
asgiref==3.6.0
|
||||
black==23.1.0
|
||||
click==8.1.3
|
||||
coverage==7.1.0
|
||||
Django==4.2
|
||||
docutils==0.19
|
||||
isort==5.12.0
|
||||
mypy-extensions==1.0.0
|
||||
packaging==23.0
|
||||
pathspec==0.11.0
|
||||
Pillow==9.4.0
|
||||
platformdirs==3.0.0
|
||||
pytz==2022.7
|
||||
ruff==0.0.245
|
||||
sqlparse==0.4.3
|
||||
tomli==2.0.1
|
||||
Unidecode==1.3.6
|
||||
9
_deploy/wsl/requirements-p10d5.txt
Normal file
9
_deploy/wsl/requirements-p10d5.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
asgiref==3.5.2
|
||||
coverage==6.5.0
|
||||
Django==3.2.16
|
||||
docutils==0.19
|
||||
Pillow==9.3.0
|
||||
pytz==2022.6
|
||||
sqlparse==0.4.3
|
||||
typing_extensions==4.4.0
|
||||
Unidecode==1.3.6
|
||||
16
_deploy/wsl/requirements-p11d4.txt
Normal file
16
_deploy/wsl/requirements-p11d4.txt
Normal file
@@ -0,0 +1,16 @@
|
||||
asgiref==3.6.0
|
||||
black==23.1.0
|
||||
click==8.1.3
|
||||
coverage==7.1.0
|
||||
Django==4.2
|
||||
docutils==0.19
|
||||
isort==5.12.0
|
||||
mypy-extensions==1.0.0
|
||||
packaging==23.0
|
||||
pathspec==0.11.0
|
||||
Pillow==9.4.0
|
||||
platformdirs==3.0.0
|
||||
pytz==2022.7
|
||||
ruff==0.0.245
|
||||
sqlparse==0.4.3
|
||||
Unidecode==1.3.6
|
||||
9
_deploy/wsl/requirements-p11d5.txt
Normal file
9
_deploy/wsl/requirements-p11d5.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
asgiref==3.5.2
|
||||
coverage==6.5.0
|
||||
Django==3.2.16
|
||||
docutils==0.19
|
||||
Pillow==9.3.0
|
||||
pytz==2022.6
|
||||
sqlparse==0.4.3
|
||||
typing_extensions==4.4.0
|
||||
Unidecode==1.3.6
|
||||
21
_deploy/wsl/requirements-p9d4.txt
Executable file
21
_deploy/wsl/requirements-p9d4.txt
Executable file
@@ -0,0 +1,21 @@
|
||||
asgiref==3.7.0
|
||||
beautifulsoup4==4.12.0
|
||||
black==23.3.0
|
||||
click==8.1.3
|
||||
coverage==7.2.0
|
||||
Django==4.2
|
||||
docutils==0.20
|
||||
isort==5.12.0
|
||||
mypy-extensions==1.0.0
|
||||
packaging==23.0
|
||||
pathspec==0.11.0
|
||||
Pillow==10.0.0
|
||||
pkg_resources==0.0.0
|
||||
platformdirs==3.8.0
|
||||
pytz==2023.3
|
||||
ruff==0.0.245
|
||||
soupsieve==2.4.1
|
||||
sqlparse==0.4.0
|
||||
tomli==2.0.1
|
||||
typing_extensions==4.7.1
|
||||
Unidecode==1.3.6
|
||||
17
_deploy/wsl/requirements3.2.txt
Normal file
17
_deploy/wsl/requirements3.2.txt
Normal file
@@ -0,0 +1,17 @@
|
||||
asgiref==3.3.4
|
||||
confusable-homoglyphs==3.2.0
|
||||
Django==3.2
|
||||
docutils==0.14
|
||||
gunicorn==20.1.0
|
||||
Pillow==5.4.1
|
||||
sqlparse==0.2.4
|
||||
typing-extensions==3.7.4.3
|
||||
Unidecode==1.0.23
|
||||
mariadb==1.0.11
|
||||
mysql-connector-python==8.0.29
|
||||
mysqlclient==2.1.0
|
||||
Pillow==9.1.0
|
||||
pytz==2022.5
|
||||
asgiref==3.5.0
|
||||
gunicorn==20.1.0
|
||||
|
||||
171
_deploy/wsl/venv-trog.sh
Normal file
171
_deploy/wsl/venv-trog.sh
Normal file
@@ -0,0 +1,171 @@
|
||||
#!/bin/bash
|
||||
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
|
||||
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog.sh
|
||||
|
||||
# If you are using Debian, then stick with the default version of python
|
||||
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
|
||||
|
||||
# NOW we set up troggle
|
||||
PYTHON=python3.11
|
||||
VENAME=p11d4 # python3.x and django 4.2
|
||||
echo "** You are logged in as `id -u -n`"
|
||||
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
|
||||
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder (this script location): ${TROGDIR}"
|
||||
|
||||
if [ -d requirements.txt ]; then
|
||||
echo "-- No requirements.txt found. You should be in the /troggle/ folder. Copy it from your most recent installation."
|
||||
exit 1
|
||||
fi
|
||||
echo ## Using requirements.txt :
|
||||
cat requirements.txt
|
||||
echo ##
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
|
||||
# NOTE that when using a later or earlier verison of python, you MUST also
|
||||
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
|
||||
|
||||
# NOW set up link from expo user folder
|
||||
# needed for WSL2
|
||||
echo Creating links from Linux filesystem user
|
||||
# These links only need making once, for many venv
|
||||
cd ~
|
||||
|
||||
if [ ! -d $VENAME ]; then
|
||||
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
|
||||
$PYTHON -m venv $VENAME
|
||||
else
|
||||
echo "## /$VENAME/ already exists ! Delete it first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Activate the virtual env and see what the default packages are
|
||||
echo "### Activating $VENAME"
|
||||
|
||||
cd $VENAME
|
||||
echo "-- now in: ${PWD}"
|
||||
source bin/activate
|
||||
echo "### Activated."
|
||||
# update local version of pip, more recent than OS version
|
||||
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
|
||||
|
||||
# update local version of setuptools, more recent than OS version, needed for packages without wheels
|
||||
|
||||
echo "### installing later version of pip inside $VENAME"
|
||||
$PYTHON -m pip install --upgrade pip
|
||||
$PYTHON -m pip install --upgrade setuptools
|
||||
|
||||
PIP=pip
|
||||
|
||||
$PIP list > original-pip.list
|
||||
$PIP freeze >original.txt
|
||||
|
||||
# we are in /home/$USER/$VENAME/
|
||||
ln -s ${TROGDIR} troggle
|
||||
ln -s ${TROGDIR}/../expoweb expoweb
|
||||
ln -s ${TROGDIR}/../loser loser
|
||||
ln -s ${TROGDIR}/../drawings drawings
|
||||
#ln -s ${TROGDIR}/../expofiles expofiles
|
||||
|
||||
# fudge for philip's machine
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
if [ -d ${TROGDIR}/../expofiles ]; then
|
||||
ln -s ${TROGDIR}/../expofiles expofiles
|
||||
else
|
||||
ln -s /mnt/d/EXPO/expofiles expofiles
|
||||
fi
|
||||
|
||||
echo "### Setting file permissions.. may take a while.."
|
||||
git config --global --add safe.directory '*'
|
||||
sudo chmod -R 777 *
|
||||
|
||||
echo "### links to expoweb, troggle etc. complete:"
|
||||
ls -tla
|
||||
echo "###"
|
||||
echo "### now installing ${TROGDIR}/requirements.txt"
|
||||
echo "###"
|
||||
|
||||
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
|
||||
# seen on wsl2 as well as wsl1
|
||||
# which ALSO ruins EXISTING permissions !
|
||||
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
|
||||
|
||||
$PIP install -r ${TROGDIR}/requirements.txt
|
||||
echo '### install from requirements.txt completed.'
|
||||
echo '### '
|
||||
|
||||
$PIP freeze > requirements.txt
|
||||
# so that we can track requirements more easily with git
|
||||
# because we do not install these with pip, but they are listed by the freeze command
|
||||
# Now find out what we actually installed by subtracting the stuff venv installed anyway
|
||||
sort original.txt > 1
|
||||
sort requirements.txt >2
|
||||
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-requirements.txt
|
||||
rm 1
|
||||
rm 2
|
||||
|
||||
cp requirements.txt requirements-$VENAME.txt
|
||||
cp requirements-$VENAME.txt troggle/requirements-$VENAME.txt
|
||||
|
||||
$PIP list > installed-pip.list
|
||||
$PIP list -o > installed-pip-o.list
|
||||
|
||||
REQ=installation-record
|
||||
mkdir $REQ
|
||||
mv requirements-$VENAME.txt $REQ
|
||||
mv original.txt $REQ
|
||||
mv requirements.txt $REQ
|
||||
mv original-pip.list $REQ
|
||||
mv installed-pip.list $REQ
|
||||
mv installed-pip-o.list $REQ
|
||||
cp fresh-requirements.txt ../requirements.txt
|
||||
mv fresh-requirements.txt $REQ
|
||||
cp troggle/`basename "$0"` $REQ
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
python --version
|
||||
echo "Django version:`django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/$VENAME'
|
||||
'source bin/activate'
|
||||
'cd troggle'
|
||||
'django-admin'
|
||||
'python manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
## the tests may ALSO fail because of ssh and permissions errors
|
||||
|
||||
## So you will need to run
|
||||
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
|
||||
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
|
||||
# because this chmod only takes effect then.
|
||||
|
||||
'python manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'python databaseReset.py reset $VENAME'
|
||||
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
if [ ! -d /mnt/d/expofiles ]; then
|
||||
echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
fi
|
||||
181
_deploy/xubuntu/localsettingsXubuntu.py
Normal file
181
_deploy/xubuntu/localsettingsXubuntu.py
Normal file
@@ -0,0 +1,181 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
#-----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
#-----------------------------------------------------------------
|
||||
# default values, real secrets imported from credentials.py
|
||||
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever. Tests are then less accurate.
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
SERVERPORT = '8000' # not needed
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent
|
||||
LIBDIR = REPOS_ROOT_PATH / 'lib' / PV
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
# EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
EXPOFILES = Path('/media/philip/sd-huge1/cucc-expo/expofiles/')
|
||||
|
||||
SCANS_ROOT = EXPOFILES / 'surveyscans'
|
||||
PHOTOS_ROOT = EXPOFILES / 'photos'
|
||||
PHOTOS_YEAR = "2023"
|
||||
|
||||
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
|
||||
|
||||
# PYTHON_PATH = os.fspath(PYTHON_PATH)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
|
||||
LOGFILE = PYTHON_PATH / "troggle.log"
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
MEDIA_URL = '/site-media/'
|
||||
|
||||
DIR_ROOT = Path("") #this should end in / if a value is given
|
||||
URL_ROOT = '/'
|
||||
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
|
||||
|
||||
# Note that these constants are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
DBSQLITE = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle.sqlite',
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'sekrit', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo',
|
||||
'PASSWORD' : 'my-secret-password-schwatzmooskogel',
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py - only used in expedition.html
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL used by admin pages
|
||||
'django.template.context_processors.tz',
|
||||
'django.template.context_processors.request', # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader', # default lcation is troggle/templates/
|
||||
'django.template.loaders.app_directories.Loader', # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
EXPOUSER = "expo"
|
||||
EXPOUSER_EMAIL = "philip.sargent@gmail.com"
|
||||
EXPOADMINUSER = "expoadmin"
|
||||
EXPOADMINUSER_EMAIL = "philip.sargent@gmail.com"
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "django-test@klebos.net"
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
EXPOWEB_URL = ''
|
||||
# SCANS_URL = '/survey_scans/' # defunct, removed.
|
||||
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
sys.path.append(str(PYTHON_PATH))
|
||||
|
||||
# Sanitise these to be strings as all other code is expecting strings
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
#CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
#ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
196
_deploy/xubuntu/localsettingsXubuntu.py.bak
Normal file
196
_deploy/xubuntu/localsettingsXubuntu.py.bak
Normal file
@@ -0,0 +1,196 @@
|
||||
import sys
|
||||
import os
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
#-----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
#-----------------------------------------------------------------
|
||||
# default values, real secrets imported from credentials.py
|
||||
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever. Tests are then less accurate.
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
SERVERPORT = '8000' # not needed
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent
|
||||
LIBDIR = REPOS_ROOT_PATH / 'lib' / PV
|
||||
#LIBDIR = REPOS_ROOT_PATH / 'lib' / 'python3.9' # should be finding this automatically: python --version etc.
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
#FILES = Path('/mnt/d/expofiles/')
|
||||
EXPOFILES = Path('/media/philip/sd-huge1/cucc-expo/expofiles/')
|
||||
SCANS_ROOT = EXPOFILES / 'surveyscans'
|
||||
PHOTOS_ROOT = EXPOFILES / 'photos'
|
||||
PHOTOS_YEAR = "2022"
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
MEDIA_URL = '/site-media/'
|
||||
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
URL_ROOT = '/'
|
||||
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
|
||||
|
||||
#Note that these constants are not actually used in urls.py, they should be..
|
||||
MEDIA_URL = urllib.parse.urljoin(URL_ROOT , '/site_media/')
|
||||
SCANS_URL = urllib.parse.urljoin(URL_ROOT , '/survey_scans/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT , '/photos/')
|
||||
SVX_URL = urllib.parse.urljoin(URL_ROOT , '/survex/')
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # used for CaveViewer JS utility
|
||||
|
||||
#STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
DBSQLITE = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle.sqlite',
|
||||
# 'NAME' : ':memory:',
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'sekrit', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo',
|
||||
'PASSWORD' : 'my-secret-password-schwatzmooskogel',
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH / 'troggle'
|
||||
sys.path.append(os.fspath(REPOS_ROOT_PATH))
|
||||
sys.path.append(os.fspath(PYTHON_PATH))
|
||||
|
||||
LOGFILE = PYTHON_PATH / 'troggle.log'
|
||||
PYTHON_PATH = os.fspath(PYTHON_PATH)
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
TEMPLATE_PATH
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py - only used in expedition.html
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL used by admin pages
|
||||
'django.template.context_processors.tz',
|
||||
'django.template.context_processors.request', # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader', # default lcation is troggle/templates/
|
||||
'django.template.loaders.app_directories.Loader', # needed for admin 'app'
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSER_EMAIL = 'philip.sargent@gmail.com'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOADMINUSER_EMAIL = 'philip.sargent@gmail.com'
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
|
||||
EMAIL_PORT=587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = 'django-test@klebos.net'
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
#SURVEYS = REPOS_ROOT_PATH
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
EXPOWEB_URL = ''
|
||||
# SCANS_URL = '/survey_scans/' # defunct, removed.
|
||||
|
||||
# Sanitise these to be strings as all other code is expecting strings
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
LOGFILE = os.fspath(LOGFILE)
|
||||
#SURVEYS = os.fspath(SURVEYS)
|
||||
EXPOWEB = os.fspath(EXPOWEB)
|
||||
DRAWINGS_DATA = os.fspath(DRAWINGS_DATA)
|
||||
SURVEX_DATA = os.fspath(SURVEX_DATA)
|
||||
REPOS_ROOT_PATH = os.fspath(REPOS_ROOT_PATH)
|
||||
TEMPLATE_PATH = os.fspath(TROGGLE_PATH)
|
||||
MEDIA_ROOT = os.fspath(MEDIA_ROOT)
|
||||
JSLIB_ROOT = os.fspath(JSLIB_ROOT)
|
||||
SCANS_ROOT = os.fspath(SCANS_ROOT)
|
||||
46
_deploy/xubuntu/pre-push-barbie.sh
Normal file
46
_deploy/xubuntu/pre-push-barbie.sh
Normal file
@@ -0,0 +1,46 @@
|
||||
#! /bin/sh
|
||||
# create and sanitise files for pushing to repo, for Babie laptop
|
||||
|
||||
echo deprecations.
|
||||
python -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
|
||||
echo diffsettings.
|
||||
rm diffsettings.txt
|
||||
if test -f "diffsettings.txt"; then
|
||||
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
|
||||
exit
|
||||
fi
|
||||
python manage.py diffsettings | grep "###" > diffsettings.txt
|
||||
echo pip freeze.
|
||||
pip freeze > requirements.txt
|
||||
echo inspectdb.
|
||||
# this next line requires database setting to be troggle.sqlite:
|
||||
python manage.py inspectdb > troggle-inspectdb.py
|
||||
#egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo remove passwords.
|
||||
cp localsettings.py localsettingsXubuntu.py
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettingsXubuntu.py
|
||||
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettingsXubuntu.py
|
||||
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettingsXubuntu.py
|
||||
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
|
||||
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettingsXubuntu.py
|
||||
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
|
||||
|
||||
mv _deploy/xubuntu/localsettingsXubuntu.py _deploy/xubuntu/localsettingsXubuntu.py.bak
|
||||
mv localsettingsXubuntu.py _deploy/xubuntu
|
||||
#
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# in ./pre-run.sh
|
||||
# python reset-django.py
|
||||
# python manage.py makemigrations
|
||||
# python manage.py test
|
||||
# python manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -i "unable|error" troggle-inspectdb.py
|
||||
1
categories.json
Normal file
1
categories.json
Normal file
File diff suppressed because one or more lines are too long
1
confusables.json
Normal file
1
confusables.json
Normal file
File diff suppressed because one or more lines are too long
214
core/TESTS/test_caves.py
Normal file
214
core/TESTS/test_caves.py
Normal file
@@ -0,0 +1,214 @@
|
||||
"""
|
||||
Modified for Expo April 2021.
|
||||
"""
|
||||
|
||||
import re
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, TestCase
|
||||
|
||||
from troggle.core.models.caves import Area, Cave
|
||||
from troggle.core.models.troggle import Person, PersonExpedition
|
||||
|
||||
# import troggle.settings as settings
|
||||
# FIXTURE_DIRS = settings.PYTHON_PATH / "core" /"fixtures"
|
||||
|
||||
class FixtureTests(TestCase):
|
||||
"""These just hit the database.
|
||||
They do not exercise the GET and url functions
|
||||
"""
|
||||
|
||||
fixtures = ["auth_users", "expo_areas", "expo_caves", "expo_exped"]
|
||||
ph = r"and leads in 800m of tortuous going to"
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_fix_person_loaded(self):
|
||||
p = Person.objects.get(fullname="Michael Sargent")
|
||||
self.assertEqual(str(p.first_name), "Michael")
|
||||
|
||||
def test_fix_person_loaded(self):
|
||||
pe = PersonExpedition.objects.get(pk="681")
|
||||
self.assertEqual(str(pe.person.fullname), "Michael Sargent")
|
||||
self.assertEqual(str(pe.expedition.year), "2019")
|
||||
|
||||
def test_fix_area_loaded(self):
|
||||
a = Area.objects.get(short_name="1623")
|
||||
self.assertEqual(str(a.short_name), "1623")
|
||||
|
||||
def test_fix_cave_loaded115(self):
|
||||
c = Cave.objects.get(kataster_number="115")
|
||||
self.assertEqual(str(c.description_file), "1623/115.htm")
|
||||
self.assertEqual(str(c.url), "1623/115.url") # intentional
|
||||
self.assertEqual(str(c.filename), "1623-115.html")
|
||||
|
||||
# c.area is a 'ManyRelatedManager' object and not iterable
|
||||
# self.assertEqual(str(c.[0].short_name), "1623")
|
||||
|
||||
ph = self.ph
|
||||
phmatch = re.search(ph, c.underground_description)
|
||||
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_loaded284(self):
|
||||
c = Cave.objects.get(kataster_number="284")
|
||||
self.assertEqual(str(c.description_file), "")
|
||||
self.assertEqual(str(c.url), "1623/284/284.html")
|
||||
self.assertEqual(str(c.filename), "1623-284.html")
|
||||
|
||||
ph = r"at a depth of 72m, there are large round blocks"
|
||||
phmatch = re.search(ph, c.notes)
|
||||
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_personexpedition(self):
|
||||
response = self.client.get("/personexpedition/MichaelSargent/2019")
|
||||
content = response.content.decode()
|
||||
# with open('testresponse.html','w') as tr:
|
||||
# tr.writelines(content)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [r"Michael Sargent", r"Table of all trips and surveys aligned by date"]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
# Need to add a fixture so that this actually has a logbook entry and a trip/svx in it.
|
||||
|
||||
|
||||
class FixturePageTests(TestCase):
|
||||
"""Currently nothing that runs troggle works - all do 404. Must be something in a template rendering crash?
|
||||
ordinary pages are OK, and expopages and expofiles are OK, even though they come through troggle. And the
|
||||
fixtures are certainly loaded into the db as the other tests show.
|
||||
"""
|
||||
|
||||
# The fixtures have a password hash which is compatible with plain-text password 'secretword'
|
||||
fixtures = ["auth_users", "expo_areas", "expo_caves", "expo_exped"]
|
||||
ph = r"and leads in 800m of tortuous going to"
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
self.user = User.objects.get(username="expotest")
|
||||
|
||||
# Every test needs a client.
|
||||
self.client = Client()
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_fix_expedition(self):
|
||||
response = self.client.get("/expedition/2019")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"Michael Sargent"
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('exped-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_personexped(self):
|
||||
response = self.client.get("/personexpedition/MichaelSargent/2019")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"Table of all trips and surveys aligned by date"
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('persexped-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_person(self):
|
||||
response = self.client.get("/person/MichaelSargent")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"second-generation expo caver "
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('person-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_url115(self):
|
||||
ph = self.ph
|
||||
response = self.client.get("/1623/115.url") # yes this is intentional, see the inserted data above & fixture
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_url284(self):
|
||||
response = self.client.get("/1623/284/284.html")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"at a depth of 72m, there are large round blocks"
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('cave-url284.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_bare_url115(self):
|
||||
"""Expect to get Page Not Found and status 404"""
|
||||
ph = self.ph
|
||||
ph = "Probably a mistake."
|
||||
response = self.client.get("/1623/115")
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 200 & Page Not Found
|
||||
|
||||
def test_fix_cave_slug115(self):
|
||||
"""Expect to get Page Not Found and status 404"""
|
||||
ph = self.ph
|
||||
ph = "Probably a mistake."
|
||||
response = self.client.get("/1623-115")
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 200 & Page Not Found
|
||||
|
||||
def test_fix_caves284(self):
|
||||
response = self.client.get("/caves")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"284 Seetrichter"
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('_cave_fix_caves.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Although the Cave object exists, it looks like we get a bad slug error when trying to get a QM page.
|
||||
|
||||
# def test_fix_qms(self):
|
||||
# response = self.client.get("/cave/qms/1623-284")
|
||||
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# content = response.content.decode()
|
||||
# ph = r"Question marks for 284 - Seetrichter"
|
||||
# phmatch = re.search(ph, content)
|
||||
# with open('_cave-fixqms.html', 'w') as f:
|
||||
# f.write(content)
|
||||
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# def test_fix_openqms(self):
|
||||
# response = self.client.get("/cave/openqms/1623-284")
|
||||
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# content = response.content.decode()
|
||||
# ph = r"Open Leads for 284 - Seetrichter"
|
||||
# phmatch = re.search(ph, content)
|
||||
# with open('_cave-fixopenqms.html', 'w') as f:
|
||||
# f.write(content)
|
||||
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
277
core/TESTS/test_imports.py
Normal file
277
core/TESTS/test_imports.py
Normal file
@@ -0,0 +1,277 @@
|
||||
"""
|
||||
We are using unittest for troggle.
|
||||
|
||||
Note that the database has not been parsed from the source files when these tests are run,
|
||||
so any path that relies on data being in the database will fail.
|
||||
|
||||
The simple redirections to files which exist, e.g. in
|
||||
/expoweb/
|
||||
/expofiles/
|
||||
/expofiles/documents/
|
||||
etc. using parameters in localsettings such as PHOTOS_ROOT will test fine.
|
||||
|
||||
But paths like this:
|
||||
/survey_scans/
|
||||
/caves/
|
||||
which rely on database resolution will fail unless a fixture has been set up for
|
||||
them.
|
||||
|
||||
https://docs.djangoproject.com/en/dev/topics/testing/tools/
|
||||
"""
|
||||
import re
|
||||
import subprocess
|
||||
import unittest
|
||||
|
||||
from django.test import Client, SimpleTestCase, TestCase
|
||||
|
||||
|
||||
class SimpleTest(SimpleTestCase):
|
||||
def test_test_setting(self):
|
||||
from django.conf import settings
|
||||
|
||||
self.assertEqual(settings.EMAIL_BACKEND, "django.core.mail.backends.locmem.EmailBackend")
|
||||
import troggle.settings as settings
|
||||
|
||||
def test_import_TroggleModel(self):
|
||||
from troggle.core.models.troggle import TroggleModel
|
||||
|
||||
def test_import_Cave(self):
|
||||
from troggle.core.models.caves import Cave
|
||||
|
||||
def test_import_parsers_surveys(self):
|
||||
# from PIL import Image
|
||||
from functools import reduce
|
||||
|
||||
from troggle.core.utils import save_carefully
|
||||
|
||||
def test_import_parsers_survex(self):
|
||||
import troggle.core.models.caves as models_caves
|
||||
import troggle.core.models.survex as models_survex
|
||||
import troggle.core.models.troggle as models
|
||||
import troggle.settings as settings
|
||||
from troggle.core.views import caves, drawings, other, scans, statistics, survex, uploads
|
||||
from troggle.core.views.caves import cavepage, ent
|
||||
from troggle.core.views.other import frontpage
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||
|
||||
def test_import_views_uploads(self):
|
||||
from troggle.core.views.uploads import dwgupload
|
||||
|
||||
def test_import_views_walletedit(self):
|
||||
from troggle.core.views.wallets_edit import walletedit
|
||||
|
||||
def test_import_parsers_QMs(self):
|
||||
from troggle.core.models.logbooks import QM
|
||||
|
||||
def test_import_parsers_people(self):
|
||||
from html import unescape
|
||||
|
||||
from unidecode import unidecode
|
||||
|
||||
def test_import_parsers_logbooks(self):
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.utils.timezone import get_current_timezone, make_aware
|
||||
|
||||
from parsers.people import GetPersonExpeditionNameLookup
|
||||
from troggle.core.models.logbooks import CaveSlug, QM, LogbookEntry, PersonLogEntry
|
||||
from troggle.core.models.troggle import DataIssue, Expedition
|
||||
|
||||
def test_import_core_views_caves(self):
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.http import HttpResponse, HttpResponseRedirect
|
||||
from django.shortcuts import get_object_or_404, render
|
||||
|
||||
import troggle.core.views.expo
|
||||
from troggle.core.forms import CaveAndEntranceFormSet, CaveForm, EntranceForm, EntranceLetterForm
|
||||
from troggle.core.models.caves import Area, Cave, CaveAndEntrance, Entrance, SurvexStation #EntranceSlug,
|
||||
from troggle.core.models.troggle import Expedition
|
||||
from troggle.core.views.auth import login_required_if_public
|
||||
|
||||
def test_import_parsers_mix(self):
|
||||
import troggle.parsers.caves
|
||||
import troggle.parsers.drawings
|
||||
import troggle.parsers.logbooks
|
||||
import troggle.parsers.people
|
||||
import troggle.parsers.QMs
|
||||
import troggle.parsers.scans
|
||||
import troggle.parsers.survex
|
||||
import troggle.settings
|
||||
from troggle.parsers.logbooks import GetCaveLookup
|
||||
|
||||
def test_import_imports(self):
|
||||
from django.contrib.auth.models import User
|
||||
from django.core import management
|
||||
from django.db import close_old_connections, connection, connections
|
||||
from django.http import HttpResponse
|
||||
from django.urls import reverse
|
||||
|
||||
def test_import_urls(self):
|
||||
from django.conf import settings
|
||||
#from django.conf.urls import include, url
|
||||
from django.contrib import admin, auth
|
||||
from django.urls import resolve, reverse
|
||||
from django.views.generic.base import RedirectView
|
||||
from django.views.generic.edit import UpdateView
|
||||
from django.views.generic.list import ListView
|
||||
|
||||
from troggle.core.views import caves, other, statistics, survex
|
||||
from troggle.core.views.auth import expologin, expologout
|
||||
from troggle.core.views.caves import cavepage, ent
|
||||
from troggle.core.views.expo import (
|
||||
editexpopage,
|
||||
expofiles_redirect,
|
||||
expofilessingle,
|
||||
expopage,
|
||||
map,
|
||||
mapfile,
|
||||
mediapage,
|
||||
)
|
||||
from troggle.core.views.logbooks import (
|
||||
Expeditions_jsonListView,
|
||||
Expeditions_tsvListView,
|
||||
expedition,
|
||||
get_logbook_entries,
|
||||
get_people,
|
||||
logbookentry,
|
||||
notablepersons,
|
||||
person,
|
||||
personexpedition,
|
||||
)
|
||||
from troggle.core.views.other import controlpanel
|
||||
from troggle.core.views.prospect import prospecting, prospecting_image
|
||||
from troggle.core.views.statistics import dataissues, pathsreport, stats
|
||||
from troggle.core.views.survex import survexcavesingle, survexcaveslist, svx
|
||||
|
||||
class ImportTest(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
import troggle.settings as settings
|
||||
from troggle.parsers.logbooks import LOGBOOKS_DIR, DEFAULT_LOGBOOK_FILE
|
||||
|
||||
LOGBOOKS_PATH = settings.EXPOWEB / LOGBOOKS_DIR
|
||||
test_year = "1986"
|
||||
cls.test_logbook = LOGBOOKS_PATH / test_year / DEFAULT_LOGBOOK_FILE
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_logbook_exists(self):
|
||||
self.assertTrue(self.test_logbook.is_file())
|
||||
|
||||
|
||||
class SubprocessTest(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_utf8(self):
|
||||
"""Expects that utf8 is the default encoding when opening files"""
|
||||
import locale
|
||||
import sys
|
||||
|
||||
self.assertTrue(
|
||||
sys.getdefaultencoding() == "utf-8", f"{sys.getdefaultencoding()} - UTF8 error in getdefaultencoding"
|
||||
)
|
||||
self.assertTrue(
|
||||
sys.getfilesystemencoding() == "utf-8",
|
||||
f"{sys.getfilesystemencoding()} - UTF8 error in getfilesystemencoding",
|
||||
)
|
||||
self.assertTrue(
|
||||
locale.getdefaultlocale()[1] == "UTF-8",
|
||||
f"{locale.getdefaultlocale()} - UTF8 error in locale.getdefaultlocale",
|
||||
)
|
||||
self.assertTrue(
|
||||
locale.getpreferredencoding() == "UTF-8",
|
||||
f"{locale.getpreferredencoding()} - UTF8 error in locale.getpreferredencoding",
|
||||
)
|
||||
|
||||
def test_installs(self):
|
||||
"""Expects external software installed: cavern, survexport, git
|
||||
(but not whether it actually works)
|
||||
"""
|
||||
import troggle.settings as settings
|
||||
|
||||
for i in [settings.CAVERN, settings.SURVEXPORT, settings.GIT]:
|
||||
# Define command as string and then split() into list format
|
||||
cmd = f"which {i}".split()
|
||||
try:
|
||||
sp = subprocess.check_call(cmd, shell=False)
|
||||
except subprocess.CalledProcessError:
|
||||
self.assertTrue(False, f"no {i} installed")
|
||||
|
||||
def test_repos_git_status(self):
|
||||
"""Expects clean git repos with no added files and no merge failures"""
|
||||
from pathlib import Path
|
||||
|
||||
import troggle.settings as settings
|
||||
|
||||
TROGGLE_PATH = Path(settings.REPOS_ROOT_PATH) / "troggle"
|
||||
for cwd in [settings.SURVEX_DATA, settings.EXPOWEB, settings.DRAWINGS_DATA, TROGGLE_PATH]:
|
||||
sp = subprocess.run([settings.GIT, "status"], cwd=cwd, capture_output=True, text=True)
|
||||
out = str(sp.stdout)
|
||||
if len(out) > 160:
|
||||
out = out[:75] + "\n <Long output curtailed>\n" + out[-75:]
|
||||
print(f"git output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}")
|
||||
if sp.returncode != 0:
|
||||
print(f"git output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}")
|
||||
|
||||
self.assertTrue(sp.returncode == 0, f"{cwd} - git is unhappy")
|
||||
|
||||
content = sp.stdout
|
||||
ph = r"nothing to commit, working tree clean"
|
||||
phmatch = re.search(ph, content)
|
||||
msg = f'{cwd} - Failed to find expected git output: "{ph}"'
|
||||
self.assertIsNotNone(phmatch, msg)
|
||||
|
||||
# ph1 = r"no changes added to commit"
|
||||
# phmatch1 = re.search(ph1, content)
|
||||
# ph2 = r"nothing to commit"
|
||||
# phmatch2 = re.search(ph2, content)
|
||||
# phmatch = phmatch1 or phmatch2
|
||||
# msg = f'{cwd} - Failed to find expected git output: "{ph1}" or "{ph2}"'
|
||||
# self.assertIsNotNone(phmatch, msg)
|
||||
|
||||
def test_loser_survex_status(self):
|
||||
"""Expects no failures of survex files"""
|
||||
from pathlib import Path
|
||||
|
||||
import troggle.settings as settings
|
||||
|
||||
cwd = settings.SURVEX_DATA
|
||||
for survey in ["1623-and-1626-no-schoenberg-hs.svx"]:
|
||||
sp = subprocess.run([settings.CAVERN, survey], cwd=cwd, capture_output=True, text=True)
|
||||
out = str(sp.stdout)
|
||||
if len(out) > 160:
|
||||
out = out[:75] + "\n <Long output curtailed>\n" + out[-75:]
|
||||
# print(f'survex output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}')
|
||||
if sp.returncode != 0:
|
||||
print(
|
||||
f"survex output: {cwd}:\n # {sp.stderr=}\n # sp.stdout={out} \n # return code: {str(sp.returncode)}"
|
||||
)
|
||||
|
||||
self.assertTrue(sp.returncode == 0, f"{cwd} - survex is unhappy")
|
||||
|
||||
content = sp.stdout
|
||||
ph = r"Total length of survey legs"
|
||||
phmatch = re.search(ph, content)
|
||||
msg = f'{cwd} - Failed to find expected survex output: "{ph}"'
|
||||
self.assertIsNotNone(phmatch, msg)
|
||||
|
||||
ph1 = r"Time used"
|
||||
phmatch1 = re.search(ph1, content)
|
||||
ph2 = r"vertical length of survey le"
|
||||
phmatch2 = re.search(ph2, content)
|
||||
|
||||
phmatch = phmatch1 or phmatch2
|
||||
msg = f'{cwd} - Failed to find expected survex output: "{ph1}" or "{ph2}"'
|
||||
self.assertIsNotNone(phmatch, msg)
|
||||
451
core/TESTS/test_logins.py
Normal file
451
core/TESTS/test_logins.py
Normal file
@@ -0,0 +1,451 @@
|
||||
"""
|
||||
Originally written for CUYC
|
||||
Philip Sargent (Feb.2021)
|
||||
|
||||
Modified for Expo April 2021.
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
import re
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, TestCase
|
||||
|
||||
import troggle.settings as settings
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.models.troggle import Expedition
|
||||
|
||||
|
||||
class DataTests(TestCase):
|
||||
"""These check that the NULL and NON-UNIQUE constraints are working in the database"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User()
|
||||
u.pk = 9000
|
||||
u.user_id = 8000
|
||||
u.username, u.password = "stinker", "secretword"
|
||||
u.email = "philip.sargent+SP@gmail.com"
|
||||
u.first_name, u.last_name = "Stinker", "Pinker"
|
||||
u.save()
|
||||
self.user = u
|
||||
|
||||
def tearDown(self):
|
||||
# self.member.delete() # must delete member before user
|
||||
# self.user.delete() # horrible crash, why?
|
||||
pass
|
||||
|
||||
|
||||
class FixturePageTests(TestCase):
|
||||
# The fixtures have a password hash which is compatible with plain-text password 'secretword'
|
||||
fixtures = ["auth_users"]
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
self.user = User.objects.get(username="expotest")
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_fix_admin_login_fail(self):
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
response = c.get("/admin/")
|
||||
content = response.content.decode()
|
||||
# with open('admin-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r"Troggle administration", content)
|
||||
self.assertIsNone(t, "Logged in as '" + u.username + "' (not staff) but still managed to get the Admin page")
|
||||
|
||||
|
||||
class PostTests(TestCase):
|
||||
"""Tests walletedit form"""
|
||||
|
||||
fixtures = ["auth_users"]
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
self.user = User.objects.get(username="expotest")
|
||||
self.client = Client()
|
||||
|
||||
testyear = "2022"
|
||||
wname = f"{testyear}:00"
|
||||
self.testyear = testyear
|
||||
w = Wallet()
|
||||
w.pk = 9100
|
||||
w.fpath = str(pathlib.Path(settings.SCANS_ROOT, wname))
|
||||
w.walletname = wname
|
||||
w.save()
|
||||
self.wallet = w
|
||||
|
||||
e = Expedition()
|
||||
e.year = testyear
|
||||
e.save()
|
||||
self.expedition = e
|
||||
|
||||
def test_file_permissions(self):
|
||||
"""Expect to be allowed to write to SCANS_ROOT, DRAWINGS_DATA, SURVEX_DATA, EXPOWEB
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
testyear = self.testyear
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
for p in [settings.SCANS_ROOT,
|
||||
settings.DRAWINGS_DATA / "walletjson",
|
||||
settings.EXPOWEB / "documents",
|
||||
settings.SURVEX_DATA / "docs"
|
||||
]:
|
||||
|
||||
_test_file_path = pathlib.Path(p, "_created_by_test_suite.txt")
|
||||
self.assertEqual(_test_file_path.is_file(), False)
|
||||
|
||||
with open(_test_file_path, "w") as f:
|
||||
f.write("test string: can we write to this directory?")
|
||||
self.assertEqual(_test_file_path.is_file(), True)
|
||||
_test_file_path.unlink()
|
||||
|
||||
def test_scan_upload(self):
|
||||
"""Expect scan upload to wallet to work on any file
|
||||
Need to login first.
|
||||
|
||||
This upload form looks for the Cave and the Wallet, so the test fails if the database is not loaded with the cave
|
||||
identified in the wallet
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
testyear = self.testyear
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_file.txt", "r") as testf:
|
||||
response = self.client.post(
|
||||
f"/walletedit/{testyear}:00", data={"name": "test_upload_file.txt", "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open("_test_response.html", "w") as f:
|
||||
# f.write(content)
|
||||
for ph in [
|
||||
r"test_upload_",
|
||||
rf"← {testyear}#00 →",
|
||||
r"description written",
|
||||
r"Plan not required",
|
||||
r"edit settings or upload a file",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_file = pathlib.Path(settings.SCANS_ROOT) / f'{testyear}' / f'{testyear}#00'/ 'test_upload_file.txt'
|
||||
remove_file.unlink()
|
||||
|
||||
# Just uploading a file does NOT do any git commit.
|
||||
# You need to create or edit a contents.json file for that to happen.
|
||||
|
||||
def test_photo_upload(self):
|
||||
"""Expect photo upload to work on any file (contrary to msg on screen)
|
||||
Upload into current default year. settings.PHOTOS_YEAR
|
||||
Deletes file afterwards
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_file.txt", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/photoupload/", data={"name": "test_upload_file.txt", "renameto": "", "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
for ph in [
|
||||
r"test_upload_",
|
||||
r"Upload photos into /photos/" + str(settings.PHOTOS_YEAR),
|
||||
r" you can create a new folder in your name",
|
||||
r"Create new Photographer folder",
|
||||
r"only photo image files are accepted",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_file = pathlib.Path(settings.PHOTOS_ROOT, settings.PHOTOS_YEAR) / "test_upload_file.txt"
|
||||
remove_file.unlink()
|
||||
|
||||
def test_photo_upload_rename(self):
|
||||
"""Expect photo upload to work on any file (contrary to msg on screen)
|
||||
Upload into current default year. settings.PHOTOS_YEAR
|
||||
Deletes file afterwards
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
rename = "RENAMED-FILE.JPG"
|
||||
with open("core/fixtures/test_upload_file.txt", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/photoupload/", data={"name": "test_upload_file.txt", "renameto": rename, "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
for ph in [rename]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_file = pathlib.Path(settings.PHOTOS_ROOT, settings.PHOTOS_YEAR) / rename
|
||||
remove_file.unlink()
|
||||
|
||||
def test_photo_folder_create(self):
|
||||
"""Create folder for new user
|
||||
Create in current default year. settings.PHOTOS_YEAR
|
||||
Deletes folder afterwards
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
response = self.client.post("/photoupload/", data={"photographer": "GussieFinkNottle"})
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
for ph in [r"/GussieFinkNottle/", r"Create new Photographer folder"]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_dir = pathlib.Path(settings.PHOTOS_ROOT, settings.PHOTOS_YEAR) / "GussieFinkNottle"
|
||||
remove_dir.rmdir()
|
||||
|
||||
def test_dwg_upload_txt(self):
|
||||
"""Expect .pdf file to be refused upload
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_file.pdf", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/dwgupload/uploads", data={"name": "test_upload_file.txt", "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
t = re.search("Files refused:", content)
|
||||
self.assertIsNotNone(t, 'Logged in but failed to see "Files refused:"')
|
||||
|
||||
def test_dwg_upload_drawing(self):
|
||||
"""Expect no-suffix file to upload
|
||||
Note that this skips the git commit process. That would need a new test.
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_nosuffix", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/dwguploadnogit/uploads", data={"name": "test_upload_nosuffix", "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [
|
||||
r"test_upload_nosuffix",
|
||||
r"You cannot create folders here",
|
||||
r"Creating a folder is done by a nerd",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(
|
||||
phmatch, "Expect no-suffix file to upload OK. Failed to find expected text: '" + ph + "'"
|
||||
)
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
# UploadedFile.name see https://docs.djangoproject.com/en/4.1/ref/files/uploads/#django.core.files.uploadedfile.UploadedFile
|
||||
remove_file = pathlib.Path(settings.DRAWINGS_DATA) / "uploads" / "test_upload_nosuffix"
|
||||
remove_file.unlink()
|
||||
|
||||
|
||||
class ComplexLoginTests(TestCase):
|
||||
"""These test the login and capabilities of logged-in users, they do not use fixtures"""
|
||||
|
||||
def setUp(self):
|
||||
"""setUp runs once for each test in this class"""
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User()
|
||||
u.pk = 9000
|
||||
u.user_id = 8000
|
||||
u.username, u.password = "expotest", "secretword"
|
||||
u.email = "philip.sargent+ET@gmail.com"
|
||||
u.first_name, u.last_name = "ExpoTest", "Caver"
|
||||
u.is_staff = True
|
||||
u.is_superuser = True
|
||||
|
||||
u.set_password(u.password) # This creates a new salt and thus a new key for EACH test
|
||||
u.save() # vital that we save all this before attempting login
|
||||
# print ('\n',u.password)
|
||||
self.user = u
|
||||
|
||||
def tearDown(self):
|
||||
self.client.logout() # not needed as each test creates a new self.client
|
||||
# self.member.delete()
|
||||
##self.user.delete() # id attribute set to None !
|
||||
pass
|
||||
|
||||
# def test_login_redirect_for_non_logged_on_user(self): # need to fix this in real system
|
||||
# c = self.client
|
||||
# # Need to login first. Tests that we are redirected to login page if not logged in
|
||||
# response = c.get('noinfo/cave-number-index')
|
||||
# self.assertRedirects(response, "/login/?next=/committee/appointments/")
|
||||
|
||||
def test_ordinary_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
response = c.get("/accounts/login/") # defined by auth system
|
||||
content = response.content.decode()
|
||||
t = re.search(r"You are now logged in", content)
|
||||
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get 'Now you can' greeting")
|
||||
|
||||
def test_authentication_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
|
||||
# This is weird. I thought that the user had to login before she was in the authenticated state
|
||||
self.assertTrue(u.is_authenticated, "User '" + u.username + "' is NOT AUTHENTICATED before login")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
self.assertTrue(u.is_authenticated, "User '" + u.username + "' is NOT AUTHENTICATED after login")
|
||||
|
||||
# c.logout() # This next test always means user is still authenticated after logout. Surely not?
|
||||
# self.assertFalse(u.is_authenticated, 'User \'' + u.username + '\' is STILL AUTHENTICATED after logout')
|
||||
|
||||
def test_admin_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
response = c.get("/admin/")
|
||||
content = response.content.decode()
|
||||
# with open('admin-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r"Troggle database administration", content)
|
||||
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get the Troggle Admin page")
|
||||
|
||||
def test_noinfo_login(self):
|
||||
|
||||
c = self.client # inherited from TestCase
|
||||
u = self.user
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
response = c.get("/stats") # a page with the Troggle menus
|
||||
content = response.content.decode()
|
||||
t = re.search(r"User\:expotest", content)
|
||||
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get 'User:expotest' heading")
|
||||
|
||||
response = c.get("/noinfo/cave-number-index")
|
||||
content = response.content.decode()
|
||||
t = re.search(r"2001-07 Hoffnungschacht", content)
|
||||
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get /noinfo/ content")
|
||||
|
||||
def test_user_force(self):
|
||||
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
try:
|
||||
c.force_login(u)
|
||||
except:
|
||||
self.assertIsNotNone(
|
||||
None,
|
||||
"Unexpected exception trying to force_login as '"
|
||||
+ u.username
|
||||
+ "' but failed (Bad Django documentation?)",
|
||||
)
|
||||
|
||||
response = c.get("/stats") # a page with the Troggle menus
|
||||
content = response.content.decode()
|
||||
t = re.search(r"Log out", content)
|
||||
self.assertIsNotNone(t, "Forced logged in as '" + u.username + "' but failed to get Log out heading")
|
||||
|
||||
response = c.get("/accounts/login/")
|
||||
content = response.content.decode()
|
||||
t = re.search(r"You are now logged in", content)
|
||||
self.assertIsNotNone(t, "Forced logged in as '" + u.username + "' but failed to get /accounts/profile/ content")
|
||||
137
core/TESTS/test_parsers.py
Normal file
137
core/TESTS/test_parsers.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""
|
||||
We are using unittest for troggle.
|
||||
|
||||
Note that the database has not been parsed from the source files when these tests are run,
|
||||
so any path that relies on data being in the database will fail.
|
||||
|
||||
The simple redirections to files which exist, e.g. in
|
||||
/expoweb/
|
||||
/photos/
|
||||
etc. will test fine.
|
||||
|
||||
But paths like this:
|
||||
/survey_scans/
|
||||
/caves/
|
||||
which rely on database resolution will fail unless a fixture has been set up for
|
||||
them.
|
||||
|
||||
https://docs.djangoproject.com/en/dev/topics/testing/tools/
|
||||
"""
|
||||
import re
|
||||
import subprocess
|
||||
import unittest
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, SimpleTestCase, TestCase
|
||||
|
||||
from troggle.core.models.troggle import Expedition, DataIssue, Person, PersonExpedition
|
||||
import troggle.parsers.logbooks as lbp
|
||||
|
||||
TEST_YEAR = "1986"
|
||||
lbp.ENTRIES[TEST_YEAR] = 4 # number of entries in the test logbook
|
||||
|
||||
class ImportTest(TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
def make_person(firstname, lastname, nickname=False, vfho=False, guest=False):
|
||||
fullname = f"{firstname} {lastname}"
|
||||
lookupAttribs = {"first_name": firstname, "last_name": (lastname or "")}
|
||||
nonLookupAttribs = {"is_vfho": vfho, "fullname": fullname, "nickname": nickname}
|
||||
person = Person.objects.create(**nonLookupAttribs, **lookupAttribs)
|
||||
|
||||
lookupAttribs = {"person": person, "expedition": cls.test_expo}
|
||||
nonLookupAttribs = {"is_guest": guest}
|
||||
pe = PersonExpedition.objects.create(**nonLookupAttribs, **lookupAttribs)
|
||||
|
||||
return person
|
||||
|
||||
import troggle.settings as settings
|
||||
|
||||
LOGBOOKS_PATH = settings.EXPOWEB / lbp.LOGBOOKS_DIR
|
||||
|
||||
cls.test_logbook = LOGBOOKS_PATH / TEST_YEAR / lbp.DEFAULT_LOGBOOK_FILE
|
||||
frontmatter_file = LOGBOOKS_PATH / TEST_YEAR / "frontmatter.html"
|
||||
if frontmatter_file.is_file():
|
||||
frontmatter_file.unlink() # delete if it exists
|
||||
|
||||
lookupAttribs = {"year": TEST_YEAR}
|
||||
nonLookupAttribs = {"name": f"CUCC expo-test {TEST_YEAR}"}
|
||||
cls.test_expo = Expedition.objects.create(**nonLookupAttribs, **lookupAttribs)
|
||||
|
||||
fred = make_person("Fred", "Smartarse", nickname="freddy")
|
||||
phil = make_person("Phil", "Tosser", nickname="tosspot")
|
||||
dave = make_person("David", "Smartarse", "")
|
||||
mike = make_person("Michael", "Wideboy", "WB", vfho=True)
|
||||
# NOT created Kurt, as the whole point is that he is a guest.
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_logbook_exists(self):
|
||||
self.assertTrue(self.test_logbook.is_file())
|
||||
|
||||
def test_logbook_parse(self):
|
||||
|
||||
lbp.LoadLogbook(self.test_expo)
|
||||
|
||||
issues = DataIssue.objects.all()
|
||||
messages = []
|
||||
for i in issues:
|
||||
if i.parser=="logbooks":
|
||||
# f"{self.parser} - {self.message}"
|
||||
messages.append(i.message)
|
||||
print(f"'{i.message}'")
|
||||
|
||||
expected = [
|
||||
" ! - 1986 No name match for: 'Kurt Keinnamen' in entry tid='1986_s02' for this expedition year.",
|
||||
]
|
||||
|
||||
not_expected = [
|
||||
" ! - 1986 No name match for: 'Dave Smartarse' in entry tid='1986_s01' for this expedition year.",
|
||||
" ! - 1986 Warning: logentry: surface - stupour - no expo member author for entry '1986_s03'",
|
||||
" ! - 1986 Warning: logentry: 123 - wave 2 - no expo member author for entry '1986_s02'",
|
||||
]
|
||||
|
||||
for e in expected:
|
||||
self.assertIn(e, messages)
|
||||
for e in not_expected:
|
||||
self.assertNotIn(e, messages)
|
||||
|
||||
def test_aliases(self):
|
||||
# Problem: '' empty string appears as valid alias for David Smartarse
|
||||
response = self.client.get(f"/aliases/{TEST_YEAR}")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = f"'fsmartarse'"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_survexfiles(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/survexfile/caves/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = f"Caves with subdirectories"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
def test_people(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/people")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = f"<td><a href=\"/personexpedition/FredSmartarse/{TEST_YEAR}\">{TEST_YEAR}</a></td>"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
162
core/TESTS/test_urls.py
Normal file
162
core/TESTS/test_urls.py
Normal file
@@ -0,0 +1,162 @@
|
||||
"""
|
||||
We are using unittest for troggle.
|
||||
|
||||
Note that the database has not been parsed from the source files when these tests are run,
|
||||
so any path that relies on data being in the database will fail.
|
||||
|
||||
https://docs.djangoproject.com/en/dev/topics/testing/tools/
|
||||
|
||||
We are not using
|
||||
https://github.com/FactoryBoy/factory_boy
|
||||
because we are trying to minimise the number of 3rd-party packages because they expose us to update hell,
|
||||
as experience in 2019-2020.
|
||||
However we could use
|
||||
https://docs.python.org/dev/library/unittest.mock.html
|
||||
as this is now part if python - if we can get our heads around it.
|
||||
|
||||
The tests in this file:
|
||||
|
||||
The code {% url THING %} or {% url THING PARAMETER %} appears a hundred times or more in the troggle/templates/ HTML template files.
|
||||
This is the template synstax for
|
||||
reverse('THING')
|
||||
or
|
||||
reverse('THING', args=[PARAMETER])
|
||||
|
||||
It is the URLS which take parameters which need understanding and testing. The reverse() which take no
|
||||
parameters should be fine as this is fundamental Django stuff which will have been tested to death.
|
||||
|
||||
But the reverse() function is purely syntactical, the PARAMETER is just a string which is applied to
|
||||
the url. So this is not testing anything important really. See the test_url_threed() below.
|
||||
|
||||
These url lines all come from templates/*.html
|
||||
|
||||
1. No tests: No parameters
|
||||
|
||||
{% url "caveindex" %}
|
||||
{% url "controlpanel" %}
|
||||
{% url "dataissues" %}
|
||||
{% url "dwgallfiles" %}
|
||||
{% url "dwgupload" %}
|
||||
{% url "eastings" %}
|
||||
{% url "exportlogbook" %}
|
||||
{% url "newcave" %}
|
||||
{% url "notablepersons" %}
|
||||
{% url "photoupload" %}
|
||||
{% url "walletedit" %}
|
||||
|
||||
Tests exist:
|
||||
{% url "stats" %}
|
||||
{% url "allscans" %}
|
||||
{% url "survexcaveslist" %}
|
||||
|
||||
2. With parameter
|
||||
|
||||
{% url "caveQMs" "1623-290" %}
|
||||
{% url "cave_openQMs" "1623-290" %}
|
||||
{% url "cavewallets" cave_id %}
|
||||
{% url "dwgfilesingle" drawing.dwgpath %}
|
||||
{% url "edit_cave" cave.url_parent cave.slug %}
|
||||
{% url "editentrance" cave.slug ent.entrance.slug %}
|
||||
{% url "editexpopage" path %}
|
||||
{% url "err" title %}
|
||||
{% url "expedition" 2022 %}
|
||||
{% url "newentrance" cave.slug %}
|
||||
{% url "survexcavessingle" cavedir %}
|
||||
{% url "survexcavessingle" cavefiles.0.1 %}
|
||||
{% url "svx" cavepath %}
|
||||
{% url "svx" survexfile.path %}
|
||||
{% url "svxlog" title %}
|
||||
{% url 'caveQMs' '1623-161' %}
|
||||
{% url 'image_selector' path %}
|
||||
{% url 'new_image_form' path %}
|
||||
|
||||
Tests exist:
|
||||
{% url "threed" title %}
|
||||
"""
|
||||
|
||||
|
||||
todo = """These just do {% url THING %} with no parameter, we also need tests which take a parameter
|
||||
|
||||
- Read all this https://developer.mozilla.org/en-US/docs/Learn/Server-side/Django/Testing
|
||||
|
||||
- Read all this https://realpython.com/testing-in-django-part-1-best-practices-and-examples/
|
||||
|
||||
- add 'coverage' to all tests
|
||||
|
||||
- statistics also needs test when we have put data into the database
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, TestCase
|
||||
from django.urls import reverse, path
|
||||
|
||||
# class SimplePageTest(unittest.TestCase):
|
||||
class URLTests(TestCase):
|
||||
"""These tests may appear to be redundant, but in fact they exercise different bits of code. The urls.py
|
||||
dispatcher is sending these URLs view via different 'view' handlers, and they all need verifying.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Set up data for the whole TestCase
|
||||
# cls.foo = Foo.objects.create(bar="Test")
|
||||
# Some test using self.foo in tests below..
|
||||
# read in some SQL ?
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
# Every test needs a client.
|
||||
self.client = Client()
|
||||
|
||||
def test_statistics(self):
|
||||
response = self.client.get("/statistics")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"0 expeditions: 0 people, 0 caves and 0 logbook entries."
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_stats(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/stats")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = r"Total length: 0.0 km adding up the total for each year."
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_url_stats(self):
|
||||
"""Test the {% url "stats" %} reverse resolution
|
||||
path('statistics', statistics.stats, name="stats"),
|
||||
path('stats', statistics.stats, name="stats"),
|
||||
"""
|
||||
reversed_url = reverse('stats') # NB _ must be written as - if present in name
|
||||
self.assertEqual(reversed_url, "/stats")
|
||||
|
||||
def test_url_allscans(self):
|
||||
"""Test the {% url "allscans" %} reverse resolution
|
||||
path('survey_scans/', allscans, name="allscans"), # all the scans in all wallets
|
||||
"""
|
||||
reversed_url = reverse('allscans') # NB _ must be written as - if present in name
|
||||
self.assertEqual(reversed_url, "/survey_scans/")
|
||||
|
||||
def test_url_survexcaveslist(self):
|
||||
"""Test the {% url "allscans" %} reverse resolution
|
||||
path('survexfile/caves', survex.survexcaveslist, name="survexcaveslist"),
|
||||
path('survexfile/caves/', survex.survexcaveslist, name="survexcaveslist"), # auto slash not working
|
||||
"""
|
||||
reversed_url = reverse('survexcaveslist') # NB _ must be written as - if present in name
|
||||
self.assertEqual(reversed_url, "/survexfile/caves/")
|
||||
|
||||
def test_url_threed(self):
|
||||
"""Test the {% url "threed" %} reverse resolution
|
||||
path('survexfile/<path:survex_file>.3d', survex.threed, name="threed"),
|
||||
"""
|
||||
reversed_url = reverse('threed', args=['zilch']) # NB _ must be written as - if present in name
|
||||
self.assertEqual(reversed_url, "/survexfile/zilch.3d")
|
||||
|
||||
632
core/TESTS/tests-cuyc-ignored.py
Normal file
632
core/TESTS/tests-cuyc-ignored.py
Normal file
@@ -0,0 +1,632 @@
|
||||
"""
|
||||
IGNORED tests
|
||||
- all test files with hyphens in the filename are ignored
|
||||
- filnames with _ are OK
|
||||
|
||||
$ python manage.py test cuy.photologue --parallel
|
||||
only runs the photologue tests. Working.(well, it was working..)
|
||||
|
||||
$ python manage.py test cuy.mailman --parallel
|
||||
|
||||
$ python manage.py test paypal.standard --parallel
|
||||
needs work: a very large test suite
|
||||
|
||||
$ python manage.py test tagging --parallel
|
||||
a huge suite - needs a lot of work to with Django 1.11 & python3
|
||||
|
||||
$ python manage.py test cuy.club --parallel
|
||||
Runs the tests in this file only
|
||||
"""
|
||||
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from django.test import Client, SimpleTestCase, TestCase, TransactionTestCase
|
||||
|
||||
|
||||
class ImportTest(TestCase):
|
||||
def test_import_imports(self):
|
||||
#ed to go through all modules and copy all imports here
|
||||
from io import StringIO
|
||||
|
||||
from cuy.club.models import (Article, Event, Member, Webpage,
|
||||
WebpageCategory)
|
||||
from cuy.website.views.generic import PUBLIC_LOGIN
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.contrib.auth.models import User
|
||||
from django.core import management
|
||||
from django.db import connection, connections
|
||||
from django.db.utils import IntegrityError
|
||||
from django.http import HttpResponse, HttpResponseRedirect
|
||||
from django.shortcuts import get_object_or_404, render
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.utils.timezone import get_current_timezone, make_aware
|
||||
|
||||
|
||||
class SimpleTest(SimpleTestCase):
|
||||
def test_arith_mult(self):
|
||||
"""
|
||||
Tests that 10 x 10 always equals 100.
|
||||
"""
|
||||
self.assertEqual(10*10, 100)
|
||||
|
||||
|
||||
class DataTests(TestCase ):
|
||||
'''These check that the NULL and NON-UNIQUE constraints are working in the database '''
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
from cuy.club.models import Member
|
||||
from django.contrib.auth.models import User
|
||||
m = Member()
|
||||
m.pk=8000
|
||||
m.user_id = 9000 # not NULL constraint
|
||||
m.save()
|
||||
self.member = m
|
||||
|
||||
u = User()
|
||||
u.pk = 9000
|
||||
u.user_id = 8000
|
||||
u.username, u.password ='stinker', 'secretword'
|
||||
u.email='philip.sargent+SP@gmail.com'
|
||||
u.first_name, u.last_name ='Stinker', 'Pinker'
|
||||
u.save()
|
||||
self.user = u
|
||||
|
||||
def tearDown(self):
|
||||
#self.member.delete() # must delete member before user
|
||||
#self.user.delete() # horrible crash, why?
|
||||
pass
|
||||
|
||||
def test_member_not_null_field(self):
|
||||
from cuy.club.models import Member
|
||||
from django.db.utils import IntegrityError
|
||||
n = Member()
|
||||
try:
|
||||
n.save()
|
||||
except IntegrityError as ex:
|
||||
t = re.search(r'NOT NULL constraint failed: club_member.user_id', str(ex))
|
||||
self.assertIsNotNone(t, "Exception is not the expected 'NOT NULL constraint failed'")
|
||||
n.user_id = 1000
|
||||
try:
|
||||
n.save
|
||||
except:
|
||||
return self.assertIsNotNone(None, "Failed to save valid Member to database")
|
||||
|
||||
def test_member_not_unique_field(self):
|
||||
from cuy.club.models import Member
|
||||
from django.db.utils import IntegrityError
|
||||
m1 = Member()
|
||||
m2 = Member()
|
||||
m1.user_id = 1000
|
||||
m2.user_id = m1.user_id
|
||||
m1.save()
|
||||
try:
|
||||
m2.save()
|
||||
except IntegrityError as ex:
|
||||
t = re.search(r'UNIQUE constraint failed: club_member.user_id', str(ex))
|
||||
return self.assertIsNotNone(t, "IntegrityError as expected but message is not the expected 'UNIQUE constraint failed'" )
|
||||
self.assertIsNotNone(None, "Failed to enforce 'UNIQUE constraint' on saving two Member objects with same user_id")
|
||||
|
||||
def test_article_invalid_date(self):
|
||||
from cuy.club.models import Article, Member
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.utils import IntegrityError
|
||||
|
||||
a = Article()
|
||||
m = self.member
|
||||
a.author_id = m.user_id
|
||||
|
||||
a.publish="not a valid datetime"
|
||||
try:
|
||||
a.save()
|
||||
except ValidationError as ex:
|
||||
t = re.search(r'value has an invalid format. It must be in YYYY-MM-DD HH:MM', str(ex))
|
||||
self.assertIsNotNone(t, "Exception is not the expected 'invalid format'")
|
||||
|
||||
def test_article_and_author_not_null(self):
|
||||
from cuy.club.models import Article, Member
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.utils import IntegrityError
|
||||
|
||||
a2 = Article()
|
||||
a2.publish ="2021-02-17 17:25"
|
||||
a2.author_id = None
|
||||
|
||||
try:
|
||||
a2.save()
|
||||
except IntegrityError as ex:
|
||||
t = re.search(r'NOT NULL constraint failed: club_article.author_id', str(ex))
|
||||
self.assertIsNotNone(t, "Exception is not the expected 'NOT NULL constraint failed'")
|
||||
except:
|
||||
self.assertIsNotNone(None, "Exception is not the expected 'NOT NULL constraint failed' IntegrityError")
|
||||
|
||||
def test_article_and_author_ok(self):
|
||||
from cuy.club.models import Article, Member
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.utils import IntegrityError
|
||||
m = self.member
|
||||
|
||||
a3 = Article()
|
||||
a3.pk = 5000
|
||||
a3.publish ="2021-02-17 17:25"
|
||||
|
||||
a3.author_id = m.pk
|
||||
try:
|
||||
a3.save()
|
||||
except:
|
||||
return self.assertIsNotNone(None, "Failed to save valid Article to database")
|
||||
|
||||
|
||||
def test_member_and_user(self):
|
||||
u = self.user
|
||||
m = self.member
|
||||
|
||||
m.user = u
|
||||
self.assertEqual(m.user.last_name, 'Pinker')
|
||||
m.save()
|
||||
u.save()
|
||||
|
||||
class FixturePageTests(TestCase):
|
||||
fixtures = ['cuyc_basic_data.json', 'test_data.json', 'auth_user_gussie']
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
self.user = User.objects.get(username='gussie')
|
||||
self.member = self.user.profile
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_fix_event_loaded(self):
|
||||
from cuy.club.models import Event
|
||||
e = Event.objects.get(slug='spring-in-the-med')
|
||||
self.assertEqual(str(e.shore_contact.first_name()), 'Stiffy')
|
||||
self.assertEqual(str(e.organiser.last_name()), 'Fittleworth')
|
||||
|
||||
def test_fix_page_all_trips(self):
|
||||
response = self.client.get('/programme/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Spring in the Arctic', content)
|
||||
self.assertIsNotNone(t, "Failed to see Event loaded from fixture")
|
||||
t = re.search(r'High Summer in the Irish Sea', content)
|
||||
self.assertIsNotNone(t, "Failed to see Event loaded from fixture")
|
||||
|
||||
def test_fix_page_event(self):
|
||||
response = self.client.get('/programme/events/spring-in-the-arctic/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Spring in the Arctic', content)
|
||||
self.assertIsNotNone(t, "Failed to see Event loaded from fixture")
|
||||
|
||||
def test_fix_admin_login_fail(self):
|
||||
c = self.client
|
||||
from cuy.club.models import Member
|
||||
from django.contrib.auth.models import User
|
||||
m = Member.objects.get(pk=9002)
|
||||
u = User.objects.get(username='bingo')
|
||||
|
||||
self.assertTrue(u.is_active, 'User \'' + u.username + '\' is INACTIVE')
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
|
||||
response = c.get('/admin/')
|
||||
content = response.content.decode()
|
||||
with open('admin-op.html', 'w') as f:
|
||||
f.write(content)
|
||||
t = re.search(r'Site administration', content)
|
||||
self.assertIsNone(t, 'Logged in as \'' + u.username + '\' (not staff) but still managed to get the Admin page' )
|
||||
|
||||
|
||||
class ComplexLoginTests(TestCase):
|
||||
'''These test the login and capabilities of logged-in users'''
|
||||
def setUp(self):
|
||||
'''setUp runs once for each test in this class'''
|
||||
from cuy.club.models import AFFILIATION, MEMBER_TYPES, Member
|
||||
from django.contrib.auth.models import User
|
||||
m = Member()
|
||||
m.pk=8000
|
||||
m.user_id = 9000 # not NULL constraint
|
||||
m.email = "philip.sargent+HG@gmail.com"
|
||||
m.member_type = MEMBER_TYPES[1]
|
||||
m.affiliation = AFFILIATION[3]
|
||||
m.committee_email_prefix = 'honoria'
|
||||
|
||||
u = User()
|
||||
u.pk = 9000
|
||||
u.user_id = 8000
|
||||
u.username, u.password ='honoria', 'secretword'
|
||||
u.email='philip.sargent+HG@gmail.com'
|
||||
u.first_name, u.last_name ='Honoria', 'Glossop'
|
||||
u.is_staff = True
|
||||
u.is_superuser = True
|
||||
|
||||
u.set_password(u.password) # This creates a new salt and thus a new key for EACH test
|
||||
u.save() # vital that we save all this before attempting login
|
||||
#print ('\n',u.password)
|
||||
m.save()
|
||||
self.user = u
|
||||
self.member = m
|
||||
|
||||
from cuy.club.models import ClubRole, Elected
|
||||
cr = ClubRole()
|
||||
cr.id = 7000
|
||||
cr.title = 'Skipper'
|
||||
cr.short_description = 'Club skipper who can lead trips'
|
||||
cr.committee_position = True
|
||||
cr.rank = 8
|
||||
cr.save()
|
||||
self.clubrole = cr
|
||||
|
||||
e = Elected()
|
||||
e.member = m
|
||||
e.club_role = cr
|
||||
e.save()
|
||||
self.elected = e
|
||||
|
||||
def tearDown(self):
|
||||
self.client.logout() # not needed as each test creates a new self.client
|
||||
#self.member.delete()
|
||||
##self.user.delete() # id attribute set to None !
|
||||
pass
|
||||
|
||||
def test_login_redirect_for_non_logged_on_user(self):
|
||||
c = self.client
|
||||
# Need to login first. Tests that we are redirected to login page if not logged in
|
||||
response = c.get('/committee/appointments/')
|
||||
self.assertRedirects(response, "/login/?next=/committee/appointments/")
|
||||
|
||||
def test_ordinary_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, 'User \'' + u.username + '\' is INACTIVE')
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
response = c.get('/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Hello Honoria', content)
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get personal greeting' )
|
||||
|
||||
def test_authentication_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, 'User \'' + u.username + '\' is INACTIVE')
|
||||
|
||||
# This is weird. I thought that the user had to login before she was in the authenticated state
|
||||
self.assertTrue(u.is_authenticated, 'User \'' + u.username + '\' is NOT AUTHENTICATED before login')
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
|
||||
self.assertTrue(u.is_authenticated, 'User \'' + u.username + '\' is NOT AUTHENTICATED after login')
|
||||
|
||||
c.logout()
|
||||
self.assertFalse(u.is_authenticated, 'User \'' + u.username + '\' is STILL AUTHENTICATED after logout')
|
||||
|
||||
def test_admin_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
m = self.member
|
||||
|
||||
m.user = u
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
|
||||
response = c.get('/admin/')
|
||||
content = response.content.decode()
|
||||
# with open('admin-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r'Site administration', content)
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get the Admin page' )
|
||||
|
||||
def test_user_account_login(self):
|
||||
# User must be associated with a Member for whom is_committee() is True
|
||||
c = self.client
|
||||
u = self.user
|
||||
m = self.member
|
||||
|
||||
m.user = u
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
|
||||
response = c.get('/accounts/profile/')
|
||||
content = response.content.decode()
|
||||
# with open('account-profile-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r'CUYC Member Profile - Cambridge University Yacht Club', content)
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get /accounts/profile/ content')
|
||||
|
||||
def test_committee_login(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
# User must be associated with a Member for whom is_committee() is True
|
||||
c = self.client # inherited from TestCase
|
||||
u = self.user
|
||||
m = self.member
|
||||
cr = self.clubrole
|
||||
e = self.elected
|
||||
|
||||
m.user = u
|
||||
|
||||
logged_in = c.login(username=u.username, password='secretword') # fails if password=u.password !
|
||||
self.assertTrue(logged_in, 'FAILED to login as \'' + u.username + '\'')
|
||||
response = c.get('/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Hello Honoria', content)
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get personal greeting' )
|
||||
|
||||
response = c.get('/committee/appointments/')
|
||||
content = response.content.decode()
|
||||
# with open('cmttee-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r'A word of warning...', content)
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get /committee/ content')
|
||||
|
||||
def test_user_force(self):
|
||||
from django.conf import settings
|
||||
c = self.client
|
||||
u = self.user
|
||||
m = self.member
|
||||
|
||||
m.user = u
|
||||
|
||||
try:
|
||||
c.force_login(u)
|
||||
except:
|
||||
self.assertIsNotNone(None, 'Unexpected exception trying to force_login as \'' + u.username + '\' but failed (Bad Django documentation?)')
|
||||
|
||||
response = c.get('/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Hello Honoria', content)
|
||||
self.assertIsNotNone(t, 'Forced logged in as \'' + u.username + '\' but failed to get personal greeting' )
|
||||
|
||||
response = c.get('/accounts/profile/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'From here you can update your', content)
|
||||
self.assertIsNotNone(t, 'Forced logged in as \'' + u.username + '\' but failed to get /accounts/profile/ content')
|
||||
|
||||
|
||||
class DynamicPageTests(TestCase):
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_empty_yachts(self):
|
||||
# no page there initially
|
||||
response = self.client.get('/yachts/')
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_full_yachts(self):
|
||||
'''Creating a WebpageCategory and an index webpage creates a valid url
|
||||
'''
|
||||
from cuy.club.models import Webpage, WebpageCategory
|
||||
wc = WebpageCategory()
|
||||
wc.pk = 8000
|
||||
wc.id = 8000
|
||||
wc.name, wc.slug ='Yachts', 'yachts'
|
||||
wc.save()
|
||||
self.webcategory = wc
|
||||
|
||||
p = Webpage()
|
||||
p.pk = 9000
|
||||
p.id = 9000
|
||||
p.category_id = wc.id
|
||||
p.description = "Current Yacht"
|
||||
p.edited = 1
|
||||
p.event_id = None
|
||||
p.index = 1
|
||||
p.markup = "<h1>Skylark</h1>"
|
||||
p.ordering = 10
|
||||
p.slug = "yacht"
|
||||
p.title = "Skylark Yacht"
|
||||
p.save()
|
||||
self.webpage = p
|
||||
|
||||
response = self.client.get('/yachts/')
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
class PageTests(TestCase):
|
||||
def setUp(self):
|
||||
# Every test needs a client.
|
||||
# new in Django 1.5 no need to create self.client first
|
||||
# https://docs.djangoproject.com/en/dev/topics/testing/tools/#django.test.LiveServerTestCase
|
||||
#self.client = Client()
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_basic_admin(self):
|
||||
response = self.client.get('/admin/login/')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_basic_admindoc(self):
|
||||
# Need to login first. Tests that we are redirected
|
||||
response = self.client.get('/admin/doc/models/')
|
||||
self.assertRedirects(response, "/admin/login/?next=/admin/doc/models/")
|
||||
|
||||
def test_basic_programme(self):
|
||||
response = self.client.get('/programme/')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_basic_login (self):
|
||||
# Need to login first
|
||||
response = self.client.post('/login/', {'username': 'gussie', 'password': 'secretword'})
|
||||
if response.status_code == 302:
|
||||
print(response['location'])
|
||||
self.assertEqual(response.status_code, 200) # fails because user does not exist
|
||||
|
||||
def test_basic_committee(self):
|
||||
# Need to login first. Tests that we are redirected to login page
|
||||
response = self.client.get('/committee/')
|
||||
self.assertRedirects(response, "/login/?next=/committee/")
|
||||
|
||||
# --- Check non-logged-in users cannot see these
|
||||
def test_basic_gallery(self):
|
||||
response = self.client.get('/gallery/')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_basic_sitemap(self):
|
||||
response = self.client.get('/site-map/')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
|
||||
|
||||
# --- public club pages created by content in templates/*.html
|
||||
def test_basic_club(self):
|
||||
response = self.client.get('/club/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'offers opportunities for members of the university to sail yachts', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_basic_programme(self):
|
||||
response = self.client.get('/programme/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'If you would like to go on any of these events', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_basic_programme_onshore(self):
|
||||
response = self.client.get('/programme/on_shore/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'All Upcoming Shore Based Events', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_equal_opps(self):
|
||||
response = self.client.get('/club/equal-opps/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'commitment to a policy of equal opportunities', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_safety(self):
|
||||
response = self.client.get('/club/safety/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'endeavour to maintain the highest levels of safety', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_safety_risk(self):
|
||||
response = self.client.get('/club/safety/risk/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'rules for the use of safety lines to be described and monitored by the skipper.', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_safetypolicy(self):
|
||||
response = self.client.get('/club/safetypolicy/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'should be capable of swimming at least fifty meters in clothing and keeping afloat for at least five minutes', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_safety_rules(self):
|
||||
response = self.client.get('/club/safety/rules/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Safety Officer is responsible for the maintenance of safety records', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_regulations(self):
|
||||
response = self.client.get('/club/regulations/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Sanger Institute, the Babraham Institute, Wellcome and MRC Research Laboratories', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_constitution(self):
|
||||
response = self.client.get('/club/constitution/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'to provide a wide variety of safe and affordable yacht sailing', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_clubcommittee(self):
|
||||
response = self.client.get('/club/committee/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'CUYC elects new officers as needed, usually at the beginning of each term', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_damages(self):
|
||||
response = self.client.get('/club/damages/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'all crew participants may be required to contribute to the payment of damages', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_training(self):
|
||||
response = self.client.get('/training/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'members of the club are always happy to pass on informal training tips', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_racing(self):
|
||||
response = self.client.get('/racing/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'CUYC Racing Squad', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_blog(self):
|
||||
response = self.client.get('/blog/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Latest Posts', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_gallery(self):
|
||||
response = self.client.get('/gallery/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Photo Galleries', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_about_photos(self):
|
||||
response = self.client.get('/about_photos/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'have been supplied by members of CUYC', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_loginhelp(self):
|
||||
response = self.client.get('/login/help/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Existing CUYC Member, without an account?', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_loginregister(self):
|
||||
response = self.client.get('/login/register/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'If you are, or have ever been, a CUYC or CUCrC member', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
# --- These pages are not connected to top level public menus but are in fact public
|
||||
def test_page_club_tripinformation(self):
|
||||
response = self.client.get('/club/trip-information/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'organisers have a choice to add a sum to the trip fee quoted on the website to cover expenses', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_club_trippayment(self):
|
||||
response = self.client.get('/club/trip-information/payment/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'All payments to the club should be sent via Paypal', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_club_trip_typical_day(self):
|
||||
response = self.client.get('/club/trip-information/typical-day/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Skipper and first mate crawl out of their sleeping bags early', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_club_trip_faq(self):
|
||||
response = self.client.get('/club/trip-information/faq/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'Different people are seasick in different ways', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
def test_page_club_trip_kit(self):
|
||||
response = self.client.get('/club/trip-information/kit/')
|
||||
content = response.content.decode()
|
||||
t = re.search(r'appropriate quantity of base layer clothes to match the duration', content)
|
||||
self.assertIsNotNone(t)
|
||||
|
||||
564
core/TESTS/tests.py
Normal file
564
core/TESTS/tests.py
Normal file
@@ -0,0 +1,564 @@
|
||||
"""
|
||||
We are using unittest for troggle.
|
||||
|
||||
Note that the database has not been parsed from the source files when these tests are run,
|
||||
so any path that relies on data being in the database will fail.
|
||||
|
||||
The simple redirections to files which exist, e.g. in
|
||||
/expoweb/
|
||||
/photos/
|
||||
etc. will test fine.
|
||||
|
||||
But paths like this:
|
||||
/survey_scans/
|
||||
/caves/
|
||||
which rely on database resolution will fail unless a fixture has been set up for
|
||||
them.
|
||||
|
||||
https://docs.djangoproject.com/en/dev/topics/testing/tools/
|
||||
"""
|
||||
|
||||
|
||||
todo = """ADD TESTS when we are redirecting /expofiles/ to a remote file-delivering site
|
||||
|
||||
- Add test for running cavern to produce a .3d file
|
||||
"""
|
||||
|
||||
import re
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, TestCase
|
||||
|
||||
|
||||
# class SimplePageTest(unittest.TestCase):
|
||||
class PageTests(TestCase):
|
||||
"""These tests may appear to be redundant, but in fact they exercise different bits of code. The urls.py
|
||||
dispatcher is sending these URLs view via different 'view' handlers, and they all need verifying.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Set up data for the whole TestCase
|
||||
# cls.foo = Foo.objects.create(bar="Test")
|
||||
# Some test using self.foo in tests below..
|
||||
# read in some SQL ?
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
# Every test needs a client.
|
||||
self.client = Client()
|
||||
|
||||
def test_expoweb_root(self):
|
||||
response = self.client.get("")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
ph = r"CUCC in Austria"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_root_slash(self):
|
||||
response = self.client.get("/")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
ph = r"CUCC in Austria"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_paths(self):
|
||||
response = self.client.get("/pathsreport")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"This report is generated from"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_dir(self):
|
||||
response = self.client.get("/handbook")
|
||||
response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND) # 302 directory, so redirects to /index.htm
|
||||
|
||||
def test_expoweb_dirslash(self):
|
||||
response = self.client.get("/handbook/")
|
||||
response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND) # 302 directory, so redirects to /index.htm
|
||||
|
||||
def test_expoweb_dir_no_index(self):
|
||||
response = self.client.get("/handbook/troggle")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
ph = r"Page not found handbook/troggle/index.html"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_dir_with_index_htm(self):
|
||||
response = self.client.get("/years/1999/index.htm")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK) # directory, so redirects to /index.htm
|
||||
ph = r"Passage descriptions for 1999"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_dir_with_index_html(self):
|
||||
response = self.client.get("/years/2015/index.html")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK) # directory, so redirects to /index.htm
|
||||
ph = r"Things left at top camp 2014"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_dir_with_index2(self):
|
||||
response = self.client.get("/handbook/index.htm")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
ph = r"Introduction to expo"
|
||||
phmatch = re.search(ph, content)
|
||||
# print("\n ! - test_expoweb_dir_with_index2\n{}\n{}".format(response.reason_phrase, content))
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_htm(self):
|
||||
response = self.client.get("/handbook/index.htm")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
ph = r"Introduction to expo"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_notfound(self):
|
||||
response = self.client.get("/handbook/_test_zyxxypqrqx.html")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
ph = r"<h1>Page not found"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_no_dir(self):
|
||||
# slash where there should not be one
|
||||
response = self.client.get("/handbook/_test_zyxxypqrqx/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<h1>Directory not found"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_troggle_default(self):
|
||||
# default page after logon
|
||||
response = self.client.get("/troggle")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"expeditions the club has undertaken"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_troggle_default_slash(self):
|
||||
response = self.client.get("/troggle/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<h1>Directory not found"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_expoweb_via_areaid(self):
|
||||
# the dispatcher takes a detour via the cave renering procedure for this
|
||||
response = self.client.get("/guidebook/t/via201.jpg")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 6057)
|
||||
|
||||
def test_cave_kataster_not_found(self):
|
||||
# database not loaded, so no caves found; so looks for a generic expopage and fails
|
||||
response = self.client.get("/1623/115.htm")
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
content = response.content.decode()
|
||||
ph = r"Page not found 1623/115.htm"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_caves_page(self):
|
||||
response = self.client.get("/caves")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"Cave Number Index - kept updated"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_caves_page_kataster_not_found(self):
|
||||
response = self.client.get("/caves")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"115"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_ss(self):
|
||||
response = self.client.get("/survey_scans/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"All Survey scans folders "
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_admin(self):
|
||||
# see the login page
|
||||
response = self.client.get("/admin/login/")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
ph = r'<h1 id="site-name">Troggle database administration</h1>'
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_admindocs_exped(self):
|
||||
# Get redirected to login page
|
||||
response = self.client.get("/admin/doc/models/core.expedition/")
|
||||
response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND) # 302
|
||||
|
||||
def test_page_expofiles_root_dir(self):
|
||||
# Root expofiles - odd interaction with url parsing so needs testing
|
||||
response = self.client.get("/expofiles")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r'a href="/expofiles/geotiffsurveys">/geotiffsurveys/',
|
||||
r'<a href="/expofiles/photos">/photos/',
|
||||
r'<a href="/expofiles/surveyscans">/surveyscans/',
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofiles_root_slash_dir(self):
|
||||
# Root expofiles - odd interaction with url parsing so needs testing
|
||||
response = self.client.get("/expofiles/")
|
||||
if response.status_code != HTTPStatus.OK: # 200
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND: # 302
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r'a href="/expofiles/geotiffsurveys">/geotiffsurveys/',
|
||||
r'<a href="/expofiles/photos">/photos/',
|
||||
r'<a href="/expofiles/surveyscans">/surveyscans/',
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofiles_badness(self):
|
||||
# should display expofiles directory contents not its parent
|
||||
response = self.client.get("/expofiles/99badness99")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r'a href="/expofiles/geotiffsurveys">/geotiffsurveys/',
|
||||
r'<a href="/expofiles/photos">/photos/',
|
||||
r'<a href="/expofiles/surveyscans">/surveyscans/',
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofiles_docs_dir(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/documents/")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r'a href="/expofiles/documents/bier-tent-instructions.pdf">bier-tent-instructions.pdf',
|
||||
r'a href="/expofiles/documents/boc.pdf">boc.pdf',
|
||||
r'a href="/expofiles/documents/idiots-guide-expo-git.pdf"',
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_survey_scans_dir(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/surveyscans")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r'<a href="/expofiles/surveyscans/2004">/2004/',
|
||||
r'<a href="/expofiles/surveyscans/1989LUSS">/1989LUSS/',
|
||||
r'<a href="/expofiles/surveyscans/2018">/2018',
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_folk(self):
|
||||
# This page is separately generated, so it has the full data content
|
||||
response = self.client.get("/folk/index.htm")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [
|
||||
r"involves some active contribution",
|
||||
r"Naomi Griffiths",
|
||||
r"Gail Smith",
|
||||
r"Phil Wigglesworth",
|
||||
r"A more obscure record of longest gap between expos has",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofile_documents(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/expofiles/documents")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"notice_generale_cordes_courant"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofile_documents_slash(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/expofiles/documents/")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"notice_generale_cordes_courant"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofile_document_loeffler_pdf(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/documents/surveying/tunnel-loefflerCP35-only.pdf")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 2299270)
|
||||
|
||||
def test_page_expofile_document_rope_pdf(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/documents/ropes/rope-age-agm-2019.pdf")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 76197)
|
||||
|
||||
def test_page_expofile_document_png(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/documents/callout-2012.png")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 69921)
|
||||
|
||||
def test_page_expofile_writeup(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/expofiles/writeups/1982/logbook1982.pdf")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 12915413)
|
||||
|
||||
def test_page_site_media_ok(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/site_media/surveyHover.gif")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 39482) # need to check it is not just an error page
|
||||
|
||||
def test_page_site_media_css(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/site_media/css/trog3.css")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode() # need to check it is not just an error page
|
||||
ph = r"This text is used by the test system to determine that trog3.css loaded correctly"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_photos_ok(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/photos/2018/PhilipSargent/corin.jpg") # exists
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(len(response.content), 67487) # need to check it is not just an error page
|
||||
|
||||
def test_page_photos_not_ok(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/photos/2018/PhilipSargent/_corin.jpeg") # does not exist
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
content = response.content.decode()
|
||||
ph = r"<title>Page not found 2018/PhilipSargent/_corin.jpeg</title>"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_photos_dir(self):
|
||||
# Flat file tests.
|
||||
response = self.client.get("/photos/2018/PhilipSargent/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"Directory not displayed"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_survey_scans_empty(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/survey_scans/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"contains the scanned original in-cave survey notes and sketches"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_dwgdataraw_empty(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/dwgdataraw/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<h1>Directory not found"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_dwgallfiles_empty(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/dwgfiles")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r"All Tunnel and Therion files",
|
||||
r"<th>Wallets</th><th>Scan files in the wallets</th><th>Frames</th></tr>",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_dwgallfiles_empty_slash(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/dwgfiles/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r"All Tunnel and Therion files",
|
||||
r"<th>Wallets</th><th>Scan files in the wallets</th><th>Frames</th></tr>",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_slash_empty(self):
|
||||
# tslash where there should not be one
|
||||
response = self.client.get("/expedition/1979/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<h1>Directory not found"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_not_found_survexfile_cave(self):
|
||||
response = self.client.get("/survexfile/not_a_real_cave_number")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"Cave Identifier not found in database"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
def test_dataissues(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/dataissues")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"as well as these import/parsing issues"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_therionissues(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/therionissues")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"! Un-parsed image filename"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_surveximport(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/surveximport")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = r"The number at the left-hand margin is the depth"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_survexdebug(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/survexdebug")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"Running list of warnings during import"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_eastings(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/eastings")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<tr><th>Survex Station</th><th>x</th><th>y</th></tr>"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
# ADD TESTS when we are redirecting /expofiles/ to get the actual files using e.g.
|
||||
# import requests
|
||||
# page = requests.get("http://dataquestio.github.io/web-scraping-pages/simple.html")
|
||||
|
||||
# these need a fixture to load the datbase before they will pass
|
||||
# we also need tests for invalid queries to check that error pages are right
|
||||
|
||||
# def test_page_survey_scans_khplan2_png(self):
|
||||
# # this has an error as the database has not been loaded yet in the tests
|
||||
# response = self.client.get('/survey_scans/smkhs/khplan2.png')
|
||||
# if response.status_code != HTTPStatus.OK:
|
||||
# self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
# if response.status_code != HTTPStatus.FOUND:
|
||||
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# self.assertEqual(len(response.content), 823304) # fails, but is working manually!
|
||||
|
||||
# def test_page_dwgdataraw_107sketch_xml(self):
|
||||
# # this has an error as the database has not been loaded yet in the tests
|
||||
# response = self.client.get('/dwgdataraw/107/107sketch-v2.xml')
|
||||
# if response.status_code != HTTPStatus.OK:
|
||||
# self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
# if response.status_code != HTTPStatus.FOUND:
|
||||
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# content = response.content.decode()
|
||||
# for ph in [ r'tunneldate="2014-08-21 11:34:00"',
|
||||
# r'<sketchsubset subname="Caves of the Loser Plateau"/>',
|
||||
# r'sfsketch="ollyjen107drawings',
|
||||
# r'sfsketch="surveyscans/2014/2014#01',
|
||||
# r'aa-js-plan.png"' ]:
|
||||
# phmatch = re.search(ph, content)
|
||||
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph +"'")
|
||||
|
||||
|
||||
# database not loaded yet:
|
||||
# response = self.client.get('/survey_scans/1991surveybook/page0002.png')
|
||||
# response = self.client.get('/survey_scans/1991surveybook/')
|
||||
# content = response.content.decode()
|
||||
# print(content)
|
||||
# png93 = re.search(r'/page0093.png">page0093.png</a></td>', content)
|
||||
186
core/admin.py
186
core/admin.py
@@ -1,22 +1,41 @@
|
||||
from troggle.core.models import *
|
||||
from django.contrib import admin
|
||||
from django.forms import ModelForm
|
||||
import django.forms as forms
|
||||
from django.http import HttpResponse
|
||||
from django.core import serializers
|
||||
from troggle.core.views_other import downloadLogbook
|
||||
#from troggle.reversion.admin import VersionAdmin #django-reversion version control
|
||||
from django.http import HttpResponse
|
||||
|
||||
from troggle.core.models.caves import Area, Cave, CaveAndEntrance, Entrance
|
||||
from troggle.core.models.logbooks import QM, LogbookEntry, PersonLogEntry, CaveSlug
|
||||
from troggle.core.models.survex import (
|
||||
DrawingFile,
|
||||
SingleScan,
|
||||
SurvexBlock,
|
||||
SurvexDirectory,
|
||||
SurvexFile,
|
||||
SurvexPersonRole,
|
||||
SurvexStation,
|
||||
)
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
|
||||
|
||||
"""This code significantly adds to the capabilities of the Django Management control panel for Troggle data.
|
||||
In particular, it enables JSON export of any data with 'export_as_json'
|
||||
and configures the search fields to be used within the control panel.
|
||||
|
||||
What is the search path for the css and js inclusions in the Media subclasses though ?!
|
||||
|
||||
The page looks for /static/jquery/jquery.min.js
|
||||
"""
|
||||
|
||||
|
||||
class TroggleModelAdmin(admin.ModelAdmin):
|
||||
|
||||
def save_model(self, request, obj, form, change):
|
||||
"""overriding admin save to fill the new_since parsing_field"""
|
||||
obj.new_since_parsing=True
|
||||
"""overriding admin save to fill the new_since parsing_field
|
||||
|
||||
new_since_parsing is not currently used in troggle. It is a fossil."""
|
||||
obj.new_since_parsing = True
|
||||
obj.save()
|
||||
|
||||
|
||||
class Media:
|
||||
js = ('jquery/jquery.min.js','js/QM_helper.js')
|
||||
js = ("jquery/jquery.min.js", "js/QM_helper.js") # not currently available to troggle, see media/js/README
|
||||
|
||||
|
||||
class RoleInline(admin.TabularInline):
|
||||
@@ -28,58 +47,36 @@ class SurvexBlockAdmin(TroggleModelAdmin):
|
||||
inlines = (RoleInline,)
|
||||
|
||||
|
||||
class ScannedImageInline(admin.TabularInline):
|
||||
model = ScannedImage
|
||||
extra = 4
|
||||
# class QMsFoundInline(admin.TabularInline):
|
||||
# model = QM
|
||||
# fk_name = "found_by"
|
||||
# fields = ("number", "grade", "location_description", "comment") # need to add foreignkey to cave part
|
||||
# extra = 1
|
||||
|
||||
|
||||
class OtherCaveInline(admin.TabularInline):
|
||||
model = OtherCaveName
|
||||
class PersonLogEntryInline(admin.TabularInline):
|
||||
model = PersonLogEntry
|
||||
raw_id_fields = ("personexpedition",)
|
||||
extra = 1
|
||||
|
||||
|
||||
class SurveyAdmin(TroggleModelAdmin):
|
||||
inlines = (ScannedImageInline,)
|
||||
search_fields = ('expedition__year','wallet_number')
|
||||
|
||||
|
||||
class QMsFoundInline(admin.TabularInline):
|
||||
model=QM
|
||||
fk_name='found_by'
|
||||
fields=('number','grade','location_description','comment')#need to add foreignkey to cave part
|
||||
extra=1
|
||||
|
||||
|
||||
class PhotoInline(admin.TabularInline):
|
||||
model = DPhoto
|
||||
exclude = ['is_mugshot' ]
|
||||
extra = 1
|
||||
|
||||
|
||||
class PersonTripInline(admin.TabularInline):
|
||||
model = PersonTrip
|
||||
raw_id_fields = ('personexpedition',)
|
||||
extra = 1
|
||||
|
||||
|
||||
#class LogbookEntryAdmin(VersionAdmin):
|
||||
class LogbookEntryAdmin(TroggleModelAdmin):
|
||||
prepopulated_fields = {'slug':("title",)}
|
||||
search_fields = ('title','expedition__year')
|
||||
date_heirarchy = ('date')
|
||||
inlines = (PersonTripInline, PhotoInline, QMsFoundInline)
|
||||
prepopulated_fields = {"slug": ("title",)}
|
||||
search_fields = ("title", "expedition__year")
|
||||
date_heirarchy = "date"
|
||||
# inlines = (PersonLogEntryInline, QMsFoundInline)
|
||||
|
||||
class Media:
|
||||
css = {
|
||||
"all": ("css/troggleadmin.css",)
|
||||
}
|
||||
actions=('export_logbook_entries_as_html','export_logbook_entries_as_txt')
|
||||
|
||||
def export_logbook_entries_as_html(modeladmin, request, queryset):
|
||||
response=downloadLogbook(request=request, queryset=queryset, extension='html')
|
||||
css = {"all": ("css/troggleadmin.css",)} # this does not exist
|
||||
|
||||
actions = ("export_logbook_entries_as_html", "export_logbook_entries_as_txt")
|
||||
|
||||
def export_logbook_entries_as_html(self, modeladmin, request, queryset):
|
||||
response = downloadLogbook(request=request, queryset=queryset, extension="html") # fails, no queryset
|
||||
return response
|
||||
|
||||
def export_logbook_entries_as_txt(modeladmin, request, queryset):
|
||||
response=downloadLogbook(request=request, queryset=queryset, extension='txt')
|
||||
|
||||
def export_logbook_entries_as_txt(self, modeladmin, request, queryset):
|
||||
response = downloadLogbook(request=request, queryset=queryset, extension="txt") # fails, no queryset
|
||||
return response
|
||||
|
||||
|
||||
@@ -89,70 +86,89 @@ class PersonExpeditionInline(admin.TabularInline):
|
||||
|
||||
|
||||
class PersonAdmin(TroggleModelAdmin):
|
||||
search_fields = ('first_name','last_name')
|
||||
search_fields = ("first_name", "last_name")
|
||||
inlines = (PersonExpeditionInline,)
|
||||
|
||||
|
||||
class QMAdmin(TroggleModelAdmin):
|
||||
search_fields = ('found_by__cave__kataster_number','number','found_by__date')
|
||||
list_display = ('__unicode__','grade','found_by','ticked_off_by')
|
||||
list_display_links = ('__unicode__',)
|
||||
list_editable = ('found_by','ticked_off_by','grade')
|
||||
list_per_page = 20
|
||||
raw_id_fields=('found_by','ticked_off_by')
|
||||
search_fields = ("number", "expoyear")
|
||||
list_display = ("__str__", "grade")
|
||||
list_display_links = ("__str__",)
|
||||
# list_editable = ("comment", "page_ref", "grade")
|
||||
# list_per_page = 20
|
||||
# raw_id_fields = ("found_by", "ticked_off_by")
|
||||
|
||||
|
||||
class PersonExpeditionAdmin(TroggleModelAdmin):
|
||||
search_fields = ('person__first_name','expedition__year')
|
||||
search_fields = ("person__first_name", "expedition__year")
|
||||
|
||||
|
||||
class CaveAdmin(TroggleModelAdmin):
|
||||
search_fields = ('official_name','kataster_number','unofficial_number')
|
||||
inlines = (OtherCaveInline,)
|
||||
search_fields = ("official_name", "kataster_number", "unofficial_number")
|
||||
extra = 4
|
||||
|
||||
|
||||
class EntranceAdmin(TroggleModelAdmin):
|
||||
search_fields = ('caveandentrance__cave__kataster_number',)
|
||||
search_fields = ("caveandentrance__cave__kataster_number",)
|
||||
|
||||
|
||||
class SurvexStationAdmin(TroggleModelAdmin):
|
||||
search_fields = ("name",)
|
||||
|
||||
|
||||
class SurvexFileAdmin(TroggleModelAdmin):
|
||||
search_fields = ("path",)
|
||||
|
||||
|
||||
class SurvexDirectoryAdmin(TroggleModelAdmin):
|
||||
search_fields = (
|
||||
"path",
|
||||
"survexdirectory",
|
||||
)
|
||||
|
||||
|
||||
class DrawingFileAdmin(TroggleModelAdmin):
|
||||
search_fields = ("dwgname",)
|
||||
|
||||
|
||||
class WalletAdmin(TroggleModelAdmin):
|
||||
search_fields = ("fpath",)
|
||||
|
||||
|
||||
admin.site.register(DPhoto)
|
||||
admin.site.register(Cave, CaveAdmin)
|
||||
admin.site.register(Area)
|
||||
#admin.site.register(OtherCaveName)
|
||||
admin.site.register(CaveAndEntrance)
|
||||
admin.site.register(NewSubCave)
|
||||
admin.site.register(CaveDescription)
|
||||
admin.site.register(Entrance, EntranceAdmin)
|
||||
admin.site.register(CaveSlug)
|
||||
admin.site.register(SurvexBlock, SurvexBlockAdmin)
|
||||
admin.site.register(DrawingFile, DrawingFileAdmin)
|
||||
admin.site.register(Expedition)
|
||||
admin.site.register(Person,PersonAdmin)
|
||||
admin.site.register(Person, PersonAdmin)
|
||||
admin.site.register(SurvexPersonRole)
|
||||
admin.site.register(PersonExpedition,PersonExpeditionAdmin)
|
||||
admin.site.register(SurvexDirectory, SurvexDirectoryAdmin)
|
||||
admin.site.register(SurvexFile, SurvexFileAdmin)
|
||||
admin.site.register(SurvexStation, SurvexStationAdmin)
|
||||
admin.site.register(PersonExpedition, PersonExpeditionAdmin)
|
||||
admin.site.register(LogbookEntry, LogbookEntryAdmin)
|
||||
#admin.site.register(PersonTrip)
|
||||
admin.site.register(QM, QMAdmin)
|
||||
admin.site.register(Survey, SurveyAdmin)
|
||||
admin.site.register(ScannedImage)
|
||||
admin.site.register(SurvexStation)
|
||||
|
||||
admin.site.register(SurvexScansFolder)
|
||||
admin.site.register(SurvexScanSingle)
|
||||
admin.site.register(Wallet, WalletAdmin)
|
||||
admin.site.register(SingleScan)
|
||||
admin.site.register(DataIssue)
|
||||
|
||||
|
||||
def export_as_json(modeladmin, request, queryset):
|
||||
response = HttpResponse(mimetype="text/json")
|
||||
response['Content-Disposition'] = 'attachment; filename=troggle_output.json'
|
||||
response = HttpResponse(content_type="text/json")
|
||||
response["Content-Disposition"] = "attachment; filename=troggle_output.json"
|
||||
serializers.serialize("json", queryset, stream=response)
|
||||
return response
|
||||
|
||||
|
||||
def export_as_xml(modeladmin, request, queryset):
|
||||
response = HttpResponse(mimetype="text/xml")
|
||||
response['Content-Disposition'] = 'attachment; filename=troggle_output.xml'
|
||||
response = HttpResponse(content_type="text/xml")
|
||||
response["Content-Disposition"] = "attachment; filename=troggle_output.xml"
|
||||
serializers.serialize("xml", queryset, stream=response)
|
||||
return response
|
||||
|
||||
|
||||
#admin.site.add_action(export_as_xml)
|
||||
#admin.site.add_action(export_as_json)
|
||||
admin.site.add_action(export_as_xml)
|
||||
admin.site.add_action(export_as_json)
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
from django.conf import settings
|
||||
from troggle.core.models import Expedition
|
||||
|
||||
from troggle.core.models.troggle import Expedition
|
||||
|
||||
"""This is the only troggle-specific 'context processor' that troggle uses
|
||||
in the processing of Django templates
|
||||
|
||||
This seems to mean that every page produced has bundled in its context the complete 'settings' and
|
||||
the expedition class object, so all templates can do queries on Expedition.
|
||||
https://betterprogramming.pub/django-quick-tips-context-processors-da74f887f1fc
|
||||
|
||||
If it is commented out, the logbookentry page goes crazy and the screws up all the site_media resultions for CSS file s!
|
||||
Seems to be necessary to make {{settings.MEDIA_URL}} work. Which is obvious in retrospect.
|
||||
|
||||
It is VITAL that no database operations are done in any context processor, see
|
||||
https://adamj.eu/tech/2023/03/23/django-context-processors-database-queries/
|
||||
"""
|
||||
|
||||
|
||||
def troggle_context(request):
|
||||
return { 'settings':settings, 'Expedition':Expedition }
|
||||
return {"settings": settings}
|
||||
# return {"settings": settings, "Expedition": Expedition}
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
import troggle.settings as settings
|
||||
import os
|
||||
import urllib
|
||||
|
||||
def urljoin(x, y): return x + "/" + y
|
||||
|
||||
def listdir(*path):
|
||||
try:
|
||||
strippedpath = [p for p in path if p]
|
||||
root = os.path.join(settings.FILES, *strippedpath )
|
||||
l = ""
|
||||
#l = root + "\n"
|
||||
isdir = os.path.isdir(root) #This seems to be required for os.path.isdir to work...
|
||||
#l += str(isdir) + "\n"
|
||||
for p in os.listdir(root):
|
||||
if os.path.isdir(os.path.join(root, p)):
|
||||
l += p + "/\n"
|
||||
|
||||
elif os.path.isfile(os.path.join(root, p)):
|
||||
l += p + "\n"
|
||||
#Ignore non-files and non-directories
|
||||
return l
|
||||
except:
|
||||
if strippedpath:
|
||||
c = reduce(urljoin, strippedpath)
|
||||
else:
|
||||
c = ""
|
||||
c = c.replace("#", "%23")
|
||||
print("FILE: ", settings.FILES + "listdir/" + c)
|
||||
return urllib.urlopen(settings.FILES + "listdir/" + c).read()
|
||||
|
||||
def dirsAsList(*path):
|
||||
return [d for d in listdir(*path).split("\n") if len(d) > 0 and d[-1] == "/"]
|
||||
|
||||
def filesAsList(*path):
|
||||
return [d for d in listdir(*path).split("\n") if len(d) > 0 and d[-1] != "/"]
|
||||
|
||||
def readFile(*path):
|
||||
try:
|
||||
f = open(os.path.join(settings.FILES, *path))
|
||||
except:
|
||||
f = urllib.urlopen(settings.FILES + "download/" + reduce(urljoin, path))
|
||||
return f.read()
|
||||
39
core/fixtures/auth_user_gussie.json
Normal file
39
core/fixtures/auth_user_gussie.json
Normal file
@@ -0,0 +1,39 @@
|
||||
[
|
||||
{"pk": 9010, "model": "auth.user", "fields":
|
||||
{"username": "expotest", "first_name": "ExpoTest", "last_name": "Caver", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+expo@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9011, "model": "auth.user", "fields":
|
||||
{"username": "expotestadmin", "first_name": "ExpoTest", "last_name": "Admin", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+expoadmin@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"model": "auth.user", "pk": 8999, "fields":
|
||||
{
|
||||
"email": "philip.sargent+GFN@gmail.com",
|
||||
"first_name": "Gussie",
|
||||
"last_name": "Fink-Nottle",
|
||||
"id": 8999,
|
||||
"is_active": true,
|
||||
"is_staff": true,
|
||||
"is_superuser": true,
|
||||
"last_login": "2021-01-01 00:00:01+0100",
|
||||
"password": "pbkdf2_sha256$150000$EbI1VetXC8tM$pHb5Y7af/TCsNeD6H0EwGx4DWB7qyZyq1bUWKytuiTA=",
|
||||
"username": "gussie",
|
||||
"date_joined": "2021-01-01 00:00:00+0100"
|
||||
}},
|
||||
|
||||
{"pk": 9000, "model": "auth.user", "fields":
|
||||
{"username": "oofy", "first_name": "Oofy", "last_name": "Prosser", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-01-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+oofy@gmail.com", "date_joined": "2021-01-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9001, "model": "auth.user", "fields":
|
||||
{"username": "stiffy", "first_name": "Stiffy", "last_name": "Byng", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+stiffy@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9002, "model": "auth.user", "fields":
|
||||
{"username": "bingo", "first_name": "Bingo", "last_name": "Little", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+bingo@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9003, "model": "auth.user", "fields":
|
||||
{"username": "spode", "first_name": "Roderick", "last_name": "Spode", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+spode@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9004, "model": "auth.user", "fields":
|
||||
{"username": "boko", "first_name": "Boko", "last_name": "Fittleworth", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+boko@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}}
|
||||
|
||||
]
|
||||
|
||||
8
core/fixtures/auth_users.json
Normal file
8
core/fixtures/auth_users.json
Normal file
@@ -0,0 +1,8 @@
|
||||
[
|
||||
{"pk": 9010, "model": "auth.user", "fields":
|
||||
{"username": "expotest", "first_name": "ExpoTest", "last_name": "Caver", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+expo@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9011, "model": "auth.user", "fields":
|
||||
{"username": "expotestadmin", "first_name": "ExpoTest", "last_name": "Admin", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+expoadmin@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}}
|
||||
]
|
||||
|
||||
292
core/fixtures/cuyc_basic_data.json
Normal file
292
core/fixtures/cuyc_basic_data.json
Normal file
@@ -0,0 +1,292 @@
|
||||
[
|
||||
{"pk": 1, "model": "club.boat", "fields":
|
||||
{"name": "Skylark", "cuy_boat": true, "berths": 8, "boat_type": "Beneteau First 40.7", "length": "41ft", "notes": "We bought her in June 2016 when she was based in Izola, Slovenia, then brought her home over the course of the 2016 Summer Programme."}},
|
||||
|
||||
|
||||
{"pk": 1, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Head of the Section: Overall responsibility for all the activities of CUY - authorises all activities, finances and external communication on behalf of the Club; Committee Management: Organisation of CUY Committee Meetings and Elections; Yacht Charter: Liaises with yacht charter companies to arrange yacht bookings for trips; Development: Organisation of long-term development plans for the Club;", "multiple": false, "title": "Commodore", "rank": 1, "short_description": "Chief", "committee_position": true, "club_email": "commodore@cuy.org.uk", "slug": "commodore"}},
|
||||
|
||||
{"pk": 2, "model": "club.clubrole", "fields":
|
||||
{"html_description": "House Officer Support: Authorizes the activities of all house officers (Purser, Social, Webmaster, Publicity and Sponsorship) and ensures they have details of their responsibilities and that they are properly informed and supported in thier positions. Works with the Rear-Commodore House on legal issues and documentation (see below). Manages Club Shop orders.", "multiple": false, "title": "Vice-Commodore House", "rank": 2, "short_description": "Blah", "committee_position": true, "club_email": "vc-house@cuy.org.uk", "slug": "vice-commodore-house"}},
|
||||
|
||||
{"pk": 3, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Sailing Officer Support: Authorizes the activities of all sailing officers (Training and Racing) and ensures they have details of their responsibilities and that they are properly informed and supported in thier positions. Event Management: Manages the CUY program of trips and events by liaising with skippers, charterers and the commodore. Ensures a proper and accurate record is kept of trip and event information both before and after the trip or event. Liases with the Rear-Commodore Sailing about upcoming trips to ensure they are viable and sucessful.", "multiple": false, "title": "Vice-Commodore Sailing", "rank": 2, "short_description": "Blah", "committee_position": true, "club_email": "vc-sailing@cuy.org.uk", "slug": "vice-commodore-sailing"}},
|
||||
|
||||
{"pk": 5, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Legal: Ensures CUY obtains and sustains insurance policies appropriate to Club activities. Monitors details of charter agreements. Manages contractual disputes with charterers. Liases with Club legal contacts. Documentation: Ensures CUY Regulations; CUY Crew Register; Safety Policy; House Style; Skipper Manual; Agenda and Minutes Committee Meetings and any other key club documentation stay up-to-date.\r\n\r\n", "multiple": false, "title": "Rear-Commodore House", "rank": 3, "short_description": "Blah", "committee_position": true, "club_email": "rc-house@cuy.org.uk", "slug": "rear-commodore-house"}},
|
||||
|
||||
{"pk": 6, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Works with VC-Training to ensure a workable programme of practical and theory courses is made for each term. Responsible for liaising with instructors to ensure courses run smoothly.", "multiple": false, "title": "Rear-Commodore Training", "rank": 3, "short_description": "Blah", "committee_position": true, "club_email": "rc-training@cuy.org.uk", "slug": "rear-commodore-training"}},
|
||||
|
||||
{"pk": 7, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Skipper Managament: Ensures skippers of upcoming trips are aware of standard club procedures detailed in the CUY Manual and that they have the necessary information and equipment. Ensures that the crew have completed Crew Registers and paid Membership Fees before going on trips. Ensures records are taken of travel arrangements to and from trip or event locations. Upon completion of trip ensures expenses and defect reports are collated.", "multiple": false, "title": "Rear-Commodore Sailing", "rank": 3, "short_description": "Blah", "committee_position": true, "club_email": "rc-sailing@cuy.org.uk", "slug": "rear-commodore-sailing"}},
|
||||
|
||||
{"pk": 8, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Financial management; processing all payments and receipts for activities and permenent funds. Preparing the Financial Statement for termly audit and end of year Summary of Accounts. Membership; management of membership in liasion with Trip/Event organisers, Rear-Commodore Sailing, and the DB Admin. Grants applications; preparing funding applications for the Sports and Societies syndicates, and other funding source that may be available. Spending plans & strategy; preparing and presenting to the Committee financial forecasts and strategies for the investment and long term financial future of the Club", "multiple": false, "title": "Purser", "rank": 4, "short_description": "Blah", "committee_position": true, "club_email": "purser@cuy.org.uk", "slug": "purser"}},
|
||||
|
||||
{"pk": 9, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Social programme; submission of dates for socials to the Vice-Commodore Sailing, and planning of socials, including end of term dinner. New & potential members introduction; acting at socials to welcome new & potential members and inform them about club activities.", "multiple": false, "title": "Social Officer", "rank": 5, "short_description": "Blah", "committee_position": true, "club_email": "social@cuy.org.uk", "slug": "social-officer"}},
|
||||
|
||||
{"pk": 10, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Organising RYA Practical Training courses.", "multiple": false, "title": "Practical Training Officer", "rank": 5, "short_description": "Blah", "committee_position": true, "club_email": "practical@cuy.org.uk", "slug": "practical-training-officer"}},
|
||||
|
||||
{"pk": 11, "model": "club.clubrole", "fields":
|
||||
{"html_description": "CUY Racing Squad training and development; improving racing knowledge and skills. Race selection & entry management. Varsity Yacht Race; organising an annual race with Oxford as part of an RORC/JOG or similar offshore/coastal/inshore race.", "multiple": false, "title": "Racing Officer", "rank": 5, "short_description": "Blah", "committee_position": true, "club_email": "racing@cuy.org.uk", "slug": "racing-officer"}},
|
||||
|
||||
{"pk": 13, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Webmaster; control and maintenance of style, scripts and code validity. Liasion with SRCF host; ensuring compliance with regulations and maintenance of filespace. DB Admin; development and administration of CUY Database and associated e-mail lists. Maintenance of Photos section of the website.", "multiple": false, "title": "Webmaster and Database Admin", "rank": 6, "short_description": "Blah", "committee_position": true, "club_email": "webgeek@cuy.org.uk", "slug": "webmaster-and-database-admin"}},
|
||||
|
||||
{"pk": 14, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Boat management. Is responsible for the general upkeep of CUY yachts so that they are ready and safe to be sailed. Ensures that the correct equipment and information on its use is onboard and in the correct locations. Also liaises with VC-Sailing in order to create a workable trip plan.", "multiple": false, "title": "Bosun", "rank": 4, "short_description": "Blah", "committee_position": true, "club_email": "bosun@cuy.org.uk", "slug": "bosun"}},
|
||||
|
||||
{"pk": 4, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Management and delegation of tasks to the Practical Training Officer and Theory Training Officer. Development and Evaluation of the CUY Training Scheme and courses run within the scheme. Ensuring compliance with CUY standards as set out in the training section of the CUY Manual. Training Programme; ensuring submission of dates to the Vice-Commodore Sailing for all training activities, with regard to the advice given in the Training section of the CUY Manual. Overseeing the editing and expanding the website training section.", "multiple": false, "title": "Vice-Commodore Training", "rank": 2, "short_description": "Blah", "committee_position": true, "club_email": "vc-training@cuy.org.uk", "slug": "vice-commodore-training"}},
|
||||
|
||||
{"pk": 15, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Management and delegation of tasks to the Practical Training Officer and Theory Training Officer. Development and Evaluation of the CUY Training Scheme and courses run within the scheme. Ensuring compliance with RYA and CUY standards as set out in the training section of the CUY Manual. Training Programme; ensuring submission of dates to the Vice-Commodore Sailing for all training activities, with regard to the advice given in the Training section of the CUY Manual. Overseeing the editing and expanding the website training section. Management of the RYA Practical and Shorebased training centres.", "multiple": false, "title": "RYA Principal", "rank": 4, "short_description": "Blah", "committee_position": true, "club_email": "rya-principal@cuy.org.uk", "slug": "rya-principal"}},
|
||||
|
||||
{"pk": 12, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Publicity: Publicity articles & campaigns; organising Freshers' Fair and Squash as well as ongoing publicity throughout the year. College Reps scheme; implementation and administration of College Reps scheme as a route of dissemination for publicity material and attracting new members. Sponsorship & funding in co-ordination with the rest of the CUY Committee", "multiple": false, "title": "Publicity and Sponsorship Officer", "rank": 6, "short_description": "Blah", "committee_position": true, "club_email": "sponsorship@cuy.org.uk", "slug": "publicity-and-sponsorship-officer"}},
|
||||
|
||||
{"pk": 16, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Blah", "multiple": true, "title": "Skipper", "rank": 8, "short_description": "Blah", "committee_position": true, "club_email": "", "slug": "skipper"}},
|
||||
|
||||
{"pk": 17, "model": "club.clubrole", "fields":
|
||||
{"html_description": "Blah", "multiple": true, "title": "Instructor", "rank": 7, "short_description": "Blah", "committee_position": true, "club_email": "instructors@cuy.org.uk", "slug": "instructors"}},
|
||||
|
||||
|
||||
{"pk": 5, "model": "club.eventtype", "fields":
|
||||
{"name": "Other", "default_role": 4,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "shorebased"}},
|
||||
|
||||
{"pk": 4, "model": "club.eventtype", "fields":
|
||||
{"name": "Theory Training", "default_role": 5,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "shorebased"}},
|
||||
|
||||
{"pk": 3, "model": "club.eventtype", "fields":
|
||||
{"name": "Practical Training", "default_role": 5,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "atsea"}},
|
||||
|
||||
{"pk": 2, "model": "club.eventtype", "fields":
|
||||
{"name": "Race", "default_role": 4,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "atsea"}},
|
||||
|
||||
{"pk": 6, "model": "club.eventtype", "fields":
|
||||
{"name": "Social", "default_role": null,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "social"}},
|
||||
|
||||
{"pk": 1, "model": "club.eventtype", "fields":
|
||||
{"name": "Cruising", "default_role": 4,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "atsea"}},
|
||||
|
||||
{"pk": 7, "model": "club.eventtype", "fields":
|
||||
{"name": "Trip", "default_role": 4,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "atsea"}},
|
||||
|
||||
{"pk": 8, "model": "club.eventtype", "fields":
|
||||
{"name": "Adventurous", "default_role": 4,
|
||||
"default_thumbnail": "images/HappySailing_square.jpeg",
|
||||
"event_type": "atsea"}},
|
||||
|
||||
|
||||
{"pk": 1, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-start-yachting", "title": "RYA Start Yachting"}},
|
||||
|
||||
{"pk": 2, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-day-skipper-theory", "title": "RYA Day Skipper Theory"}},
|
||||
|
||||
{"pk": 3, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-day-skipper-practical", "title": "RYA Day Skipper Practical"}},
|
||||
|
||||
{"pk": 4, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "vhf-radio-licence", "title": "VHF SRC Radio Licence"}},
|
||||
|
||||
{"pk": 5, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "first-aid-certificate", "title": "First Aid Certificate",
|
||||
"expires": true, "length": 3}},
|
||||
|
||||
{"pk": 6, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "cuy-first-mate", "title": "CUYC First Mate"}},
|
||||
|
||||
{"pk": 7, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-mca-costal-skipper-theory", "title": "RYA Costal Skipper/Yachtmaster Theory"}},
|
||||
|
||||
{"pk": 8, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-compcrew", "title": "RYA Competent Crew"}},
|
||||
|
||||
{"pk": 9, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-costal-skipper-practical-course", "title": "RYA Costal Skipper Practical Course"}},
|
||||
|
||||
{"pk": 10, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-mca-costal-skipper-certificate-of-competence", "title": "RYA / MCA Yachtmaster Costal Certificate of Competence"}},
|
||||
|
||||
{"pk": 11, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-mca-yachtmaster-offshore-certificate-of-compet", "title": "RYA / MCA Yachtmaster Offshore Certificate of Competence"}},
|
||||
|
||||
{"pk": 12, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "rya-mca-yachtmaster-ocean-certificate-of-competenc", "title": "RYA / MCA Yachtmaster Ocean Certificate of Competence"}},
|
||||
|
||||
{"pk": 13, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-diesel-engine-course", "title": "RYA Diesel Engine Course"}},
|
||||
|
||||
{"pk": 14, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-radar-course", "title": "RYA Radar Course"}},
|
||||
|
||||
{"pk": 15, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-sea-survival-course", "title": "RYA Sea Survival Course"}},
|
||||
|
||||
{"pk": 16, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "rya-yachtmaster-ocean-theory", "title": "RYA Yachtmaster Ocean Theory"}},
|
||||
|
||||
{"pk": 17, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "cuy-skipper", "title": "CUYC Skipper"}},
|
||||
|
||||
{"pk": 18, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "cuy-examiner-yacht", "title": "RYA Yachtmaster Examiner",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 19, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "cuy-sail-trim", "title": "CUYC Sail Trim"}},
|
||||
|
||||
{"pk": 20, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-commercial", "title": "RYA Commercial Endorsement",
|
||||
"expires": true, "length": 5}},
|
||||
|
||||
{"pk": 21, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-ppr-course", "title": "RYA Professional Practices and Responsibilities"}},
|
||||
|
||||
{"pk": 22, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "cuy-ml5", "title": "MCA ML5 Medical Certificate",
|
||||
"expires": true, "length": 5}},
|
||||
|
||||
{"pk": 23, "model": "club.qualification", "fields":
|
||||
{"rya": false, "qualification_type": "", "slug": "cuy-eng1", "title": "MCA ENG.1 Medical Certificate",
|
||||
"expires": true, "length": 2}},
|
||||
|
||||
{"pk": 24, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "cuy-instruct-cruise", "title": "RYA Cruising Instructor",
|
||||
"expires": true, "length": 5}},
|
||||
|
||||
{"pk": 25, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "Practical", "slug": "cuy-instruct-yacht", "title": "RYA Yachtmaster Instructor",
|
||||
"expires": true, "length": 5}},
|
||||
|
||||
{"pk": 26, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-shore", "title": "RYA Shorebased Instructor",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 27, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-diesel", "title": "RYA Diesel Engine Instructor",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 28, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-fistaid", "title": "RYA First Aid Instructor",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 29, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-survival", "title": "RYA Sea Survival Instructor",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 30, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-radar", "title": "RYA Radar Instructor",
|
||||
"expires": false}},
|
||||
|
||||
{"pk": 31, "model": "club.qualification", "fields":
|
||||
{"rya": true, "qualification_type": "", "slug": "cuy-instruct-vhf", "title": "RYA VHF Instructor",
|
||||
"expires": false}},
|
||||
|
||||
|
||||
{"pk": 1, "model": "club.role", "fields":
|
||||
{"event_types": [5, 3, 2, 1], "name": "Skipper", "description": "Skipper"}},
|
||||
|
||||
{"pk": 2, "model": "club.role", "fields":
|
||||
{"event_types": [5, 3, 2, 1], "name": "First Mate", "description": "First Mate"}},
|
||||
|
||||
{"pk": 3, "model": "club.role", "fields":
|
||||
{"event_types": [5, 3, 2, 1], "name": "Watch Leader", "description": "Watch leader"}},
|
||||
|
||||
{"pk": 4, "model": "club.role", "fields":
|
||||
{"event_types": [5, 3, 2, 1], "name": "Crew", "description": "crew"}},
|
||||
|
||||
{"pk": 5, "model": "club.role", "fields":
|
||||
{"event_types": [5, 4, 3, 2], "name": "Student", "description": "student"}},
|
||||
|
||||
{"pk": 6, "model": "club.role", "fields":
|
||||
{"event_types": [5, 4, 3, 2], "name": "Instructor", "description": "Instructor"}},
|
||||
|
||||
{"pk": 7, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2, 1], "name": "Helm", "description": "Helm"}},
|
||||
|
||||
{"pk": 8, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Bow", "description": "Bowman"}},
|
||||
|
||||
{"pk": 9, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Mast", "description": "Mastman"}},
|
||||
|
||||
{"pk": 10, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Pit", "description": "Pit."}},
|
||||
|
||||
{"pk": 11, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Trim", "description": "Trim"}},
|
||||
|
||||
{"pk": 12, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Main Trim", "description": "Main trim."}},
|
||||
|
||||
{"pk": 13, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Navigator", "description": "Navigator"}},
|
||||
|
||||
{"pk": 14, "model": "club.role", "fields":
|
||||
{"event_types": [5, 2], "name": "Tactics", "description": "Tactics"}},
|
||||
|
||||
{"pk": 15, "model": "club.role", "fields":
|
||||
{"event_types": [5, 3, 2, 1, 7, 8], "name": "Second Mate", "description": "Second Mate is usually third in charge, after the Skipper and the First Mate."}},
|
||||
|
||||
{"pk": 16, "model": "club.role", "fields":
|
||||
{"event_types": [6], "name": "Drinker", "description": "Someone who will drink."}},
|
||||
|
||||
{"pk": 17, "model": "club.role", "fields":
|
||||
{"event_types": [8, 1], "name": "Cook", "description": "Cooks food."}},
|
||||
|
||||
|
||||
{"pk": 3, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>(% event_name %)</h1>\r\n\r\n\r\nBlah Practical Training trip example webpage", "slug": "practical-training-index", "description": "Default page for a practical training trip.", "title": "Practical Training index"}},
|
||||
|
||||
{"pk": 4, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>(% event_name %)</h1>\r\n\r\nTheory trip\r\n example webpage", "slug": "theory-training-index", "description": "ehcr", "title": "Theory Training Index"}},
|
||||
|
||||
{"pk": 5, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>(% event_name %)</h1>\r\n\r\n\r\nBlah Social example webpage", "slug": "social-index", "description": "Balh", "title": "Social Index"}},
|
||||
|
||||
{"pk": 6, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1> Kit Page</h1>\r\n example webpage", "slug": "kit", "description": "Kit template page", "title": "Kit"}},
|
||||
|
||||
{"pk": 7, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>Crew!</h1>\r\n\r\n example webpage", "slug": "crew", "description": "Crew page", "title": "Crew"}},
|
||||
|
||||
{"pk": 2, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>(% event_name %)</h1>\r\n\r\n\r\nBlah Racing trip example webpage", "slug": "racing-index", "description": "Default Race trip index page.", "title": "Racing Index"}},
|
||||
|
||||
{"pk": 1, "model": "club.samplewebpage", "fields":
|
||||
{"markup": "<h1>(% event_name%)<h1>\r\n\r\nBlah blah Cruising trip example webpage blah.", "slug": "cruising-index", "description": "Default cruising trip index page.", "title": "Cruising Index"}},
|
||||
|
||||
|
||||
{"pk": 1, "model": "photologue.photosize", "fields":
|
||||
{"name": "thumbnail", "watermark": null, "increment_count": false, "effect": null, "crop": true, "height": 75, "width": 75, "upscale": false, "pre_cache": true, "quality": 90}},
|
||||
|
||||
{"pk": 2, "model": "photologue.photosize", "fields":
|
||||
{"name": "small", "watermark": null, "increment_count": false, "effect": null, "crop": false, "height": 150, "width": 150, "upscale": false, "pre_cache": true, "quality": 90}},
|
||||
|
||||
{"pk": 3, "model": "photologue.photosize", "fields":
|
||||
{"name": "display", "watermark": null, "increment_count": true, "effect": null, "crop": false, "height": 500, "width": 500, "upscale": false, "pre_cache": false, "quality": 90}},
|
||||
|
||||
{"pk": 4, "model": "photologue.photosize", "fields":
|
||||
{"name": "large", "watermark": null, "increment_count": true, "effect": null, "crop": false, "height": 1000, "width": 1000, "upscale": false, "pre_cache": false, "quality": 90}}
|
||||
|
||||
]
|
||||
500
core/fixtures/cuyc_test_data.json
Normal file
500
core/fixtures/cuyc_test_data.json
Normal file
@@ -0,0 +1,500 @@
|
||||
[
|
||||
{"model": "club.boat", "pk": 8000, "fields":
|
||||
{
|
||||
"berths": 4,
|
||||
"boat_type": null,
|
||||
"cuy_boat": 0,
|
||||
"id": 8000,
|
||||
"length": "35",
|
||||
"name": "Goblin",
|
||||
"notes": "We Didn't Mean to Go to Sea is the seventh book in Arthur Ransome's Swallows and Amazons series of children's books.\r\n\r\nThe book features a small sailing cutter, the Goblin, which is almost identical to Ransome's own boat Nancy Blackett. Ransome sailed Nancy Blackett across to Flushing by the same route as part of his research for the book. The navigational detail and the geography are both correct for the period when the story is set, unlike other books in the series."
|
||||
}},
|
||||
{"model": "club.boat", "pk": 8001, "fields":
|
||||
{
|
||||
"berths": 0,
|
||||
"boat_type": "dinghy",
|
||||
"cuy_boat": 0,
|
||||
"id": 8001,
|
||||
"length": "13",
|
||||
"name": "Swallow",
|
||||
"notes": "Ransome and Ernest Altounyan bought two small dinghies called Swallow and Mavis. Ransome kept Swallow until he sold it a number of years later."
|
||||
}},
|
||||
{"model": "club.boat", "pk": 8002, "fields":
|
||||
{
|
||||
"berths": 0,
|
||||
"boat_type": "dinghy",
|
||||
"cuy_boat": 0,
|
||||
"id": 8002,
|
||||
"length": "13",
|
||||
"name": "Amazon",
|
||||
"notes": "the Blackett children (Nancy and Peggy), who sail a dinghy named Amazon. \r\n\r\nSwallows and Amazons contains no sorcery; its plot is plausible, its characters ordinary children. Therein lies its enduring magic. A celebration of friendship, imagination, fair play, and exploration, Swallows and Amazons inspires even the most landlocked kid to dream of messing about in boats, building fires, camping out and navigating by the stars"
|
||||
}},
|
||||
|
||||
{"model": "club.webpagecategory", "pk": 8000, "fields":
|
||||
{
|
||||
"id": 8000,
|
||||
"name": "Yachts",
|
||||
"slug": "yachts"
|
||||
}},
|
||||
|
||||
{"model": "club.webpagecategory", "pk": 8001, "fields":
|
||||
{
|
||||
"id": 8001,
|
||||
"name": "Club",
|
||||
"slug": "club"
|
||||
}},
|
||||
|
||||
{"model": "club.webpagecategory", "pk": 8002, "fields":
|
||||
{
|
||||
"id": 8002,
|
||||
"name": "Summer",
|
||||
"slug": "summer"
|
||||
}},
|
||||
|
||||
{"model": "club.webpagecategory", "pk": 8002, "fields":
|
||||
{
|
||||
"id": 8003,
|
||||
"name": "Sailing",
|
||||
"slug": "sailing"
|
||||
}},
|
||||
|
||||
{"model": "club.webpagecategory_photos", "pk": 8000, "fields":
|
||||
{
|
||||
"clubphoto_id": 7000,
|
||||
"id": 5000,
|
||||
"webpagecategory_id": 8000
|
||||
}},
|
||||
{"model": "club.clubphoto", "pk": 7000, "fields":
|
||||
{
|
||||
"id": 7000,
|
||||
"name": "IRPCS 4.4",
|
||||
"num_views": 0,
|
||||
"origional_image": "images/training/exams/IRPCS-4-4.png"
|
||||
}},
|
||||
{"model": "club.webpage", "pk": 9000, "fields":
|
||||
{
|
||||
"category_id": 8000,
|
||||
"description": "Current Yacht",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9000,
|
||||
"index": 1,
|
||||
"markup": "<h1>Skylark</h1>\r\n<p><strong> \r\n<table border=\"0\">\r\n<tbody>\r\n<tr>\r\n<td>\r\n<p><strong>Skylark, a Beneteau First 40.7, is our main and largest club yacht. </strong>We bought her in June 2016 when she was based in Izola, Slovenia, then brought her home over the course of the 2016 Summer Programme. She's been to Croatia, Greece, Italy, Spain and France on the way home - along with countless other stops along the way.</p>\r\n<p>Since arriving in the UK, she's spent time on the East and South coasts, pottering round the Solent or across the Channel, while Summer Programmes have taken her to the Norwegian Fjords, round the West Coast of Ireland, and all the way up to the Faeroes and Shetland.</p><img src='/site-media/images/training/exams/IRPCS-4-4.png'>",
|
||||
"ordering": 10,
|
||||
"slug": "yacht1",
|
||||
"title": "Skylark Yacht"
|
||||
}},
|
||||
|
||||
{"model": "club.webpage", "pk": 9001, "fields":
|
||||
{ "category_id": 8001,
|
||||
"description": "Safeguarding Policy",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9001,
|
||||
"index": 1,
|
||||
"markup": "<h1>Safeguarding Policy</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "safeguarding-policy",
|
||||
"title": "Safeguarding Policy"
|
||||
}},
|
||||
{"model": "club.webpage", "pk": 9002, "fields":
|
||||
{ "category_id": 8001,
|
||||
"description": "Complaints",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9002,
|
||||
"index": 1,
|
||||
"markup": "<h1>Complaints</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "complaints",
|
||||
"title": "Complaints"
|
||||
}},
|
||||
{"model": "club.webpage", "pk": 9003, "fields":
|
||||
{ "category_id": 8001,
|
||||
"description": "Other Sailing Opportunities in Cambridge",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9003,
|
||||
"index": 1,
|
||||
"markup": "<h1>Other Sailing Opportunities in Cambridge</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "other-sailing-in-camb",
|
||||
"title": "Other Sailing Opportunities in Cambridge"
|
||||
}},
|
||||
{"model": "club.webpage", "pk": 9004, "fields":
|
||||
{ "category_id": 8001,
|
||||
"description": "CUYC Privacy Notice",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9004,
|
||||
"index": 1,
|
||||
"markup": "<h1>CUYC Privacy Notice</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "privacy-notice",
|
||||
"title": "CUYC Privacy Notice"
|
||||
}},
|
||||
{"model": "club.webpage", "pk": 9005, "fields":
|
||||
{ "category_id": 8003,
|
||||
"description": "FAQ",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9005,
|
||||
"index": 0,
|
||||
"markup": "<h1>FAQ</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "faq",
|
||||
"title": "FAQ" }},
|
||||
{"model": "club.webpage", "pk": 9006, "fields":
|
||||
{ "category_id": 8002,
|
||||
"description": "Summer",
|
||||
"edited": 1,
|
||||
"event_id": null,
|
||||
"id": 9006,
|
||||
"index": 1,
|
||||
"markup": "<h1>Summer</h1><p>Content is here in the main backup database</p>",
|
||||
"ordering": 10,
|
||||
"slug": "summer",
|
||||
"title": "Summer"
|
||||
}},
|
||||
|
||||
|
||||
{"pk": 9000, "model": "auth.user", "fields":
|
||||
{"username": "oofy", "first_name": "Oofy", "last_name": "Prosser", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-01-01 00:00:00", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+oofy@gmail.com", "date_joined": "2021-01-01 00:00:00"}},
|
||||
|
||||
{"pk": 9001, "model": "auth.user", "fields":
|
||||
{"username": "stiffy", "first_name": "Stiffy", "last_name": "Byng", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-02-01 00:00:00", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+stiffy@gmail.com", "date_joined": "2021-02-01 00:00:00"}},
|
||||
|
||||
{"pk": 9002, "model": "auth.user", "fields":
|
||||
{"username": "bingo", "first_name": "Bingo", "last_name": "Little", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+bingo@gmail.com", "date_joined": "2021-02-01 00:00:00"}},
|
||||
|
||||
{"pk": 9003, "model": "auth.user", "fields":
|
||||
{"username": "spode", "first_name": "Roderick", "last_name": "Spode", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+spode@gmail.com", "date_joined": "2021-02-01 00:00:00"}},
|
||||
|
||||
{"pk": 9004, "model": "auth.user", "fields":
|
||||
{"username": "boko", "first_name": "Boko", "last_name": "Fittleworth", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+boko@gmail.com", "date_joined": "2021-02-01 00:00:00"}},
|
||||
|
||||
|
||||
{"model": "club.member", "pk": 9000, "fields":
|
||||
{"user": 9000, "title": "Millionaire", "email": "philip.sargent+oofy@gmail.com", "member_state": "active", "nice": "nice", "nice_ref": null, "member_type": "senior", "affiliation": "", "affiliation_other": null, "portrait": "", "committee_email_prefix": "oofy", "bio": "Alexander Charles 'Oofy' Prosser is the richest member of the Drones Club, he is also a friend of Bertie Wooster.", "credit_rating": "ok", "crsid": null}},
|
||||
|
||||
{"model": "club.member", "pk": 9001, "fields":
|
||||
{"user": 9001, "title": "Niece and ward of Sir Watkyn Bassett", "email": "philip.sargent+stiffy@gmail.com", "member_state": "active", "nice": "nice", "nice_ref": null, "member_type": "affiliate", "affiliation": "student", "affiliation_other": null, "portrait": "", "committee_email_prefix": "stiffy", "bio": "Stephanie 'Stiffy' Byng is the niece and ward of Sir Watkyn Bassett, she initially lives with him in Totleigh Towers. She is short and has blue eyes. She wears a wind-swept hairstyle, and has an Aberdeen terrier named Bartholomew. Stiffy often gets bright ideas that end up making trouble for others, and she is not above using blackmail to induce Bertie Wooster to do errands for her.", "credit_rating": "good", "crsid": null}},
|
||||
|
||||
{"model": "club.member", "pk": 9002, "fields":
|
||||
{"user": 9002, "title": "Described as long and thin", "email": "philip.sargent+bingo@gmail.com", "member_state": "active", "nice": "nice", "nice_ref": null, "member_type": "senior", "affiliation": "unknown", "affiliation_other": null, "portrait": "", "committee_email_prefix": "bingo", "bio": "Bingo, who has an impulsive and romantic nature, falls in love with numerous women in quick succession, generally pursuing an absurd scheme to woo his latest love interest and invariably causing problems for his pal Bertie", "credit_rating": "ok", "crsid": null}},
|
||||
|
||||
{"model": "club.member", "pk": 9003, "fields":
|
||||
{"user": 9003, "title": "Dictator", "email": "philip.sargent+spode@gmail.com", "member_state": "active", "nice": "bad", "nice_ref": "fascist tendences", "member_type": "unknown", "affiliation": "external", "affiliation_other": null, "portrait": "", "committee_email_prefix": "spode", "bio": "The leader of a fascist group in London called the Saviours of Britain, also known as the Black Shorts.", "credit_rating": "good", "crsid": null}},
|
||||
|
||||
{"model": "club.member", "pk": 9004, "fields":
|
||||
{"user": 9004, "title": "An author with a unique dress sense", "email": "philip.sargent+boko@gmail.com", "member_state": "active", "nice": "nice", "nice_ref": null, "member_type": "senior", "affiliation": "postdoc", "affiliation_other": null, "portrait": "", "committee_email_prefix": "boko", "bio": "According to Bertie, after Jeeves first saw him, Jeeves winced and tottered off to the kitchen, probably to pull himself together with cooking sherry. Boko is engaged to Zenobia 'Nobby' Hopwood", "credit_rating": "ok", "crsid": null}},
|
||||
|
||||
|
||||
{"model": "club.article", "pk": 9000, "fields":
|
||||
{"title": "Blood orange and Campari steamed pudding", "publish": "2021-02-01 00:00:00", "hide": false,
|
||||
"author": 9000, "thumbnail": "images/training/exams/IRPCS-3-6.png", "slug":"blood_orange_campari",
|
||||
"short_summary": "A recipe for a sharp and delicious pudding",
|
||||
"tease": "Put the orange segments and pomegranate seeds in a bowl with the golden syrup, Campari and gin",
|
||||
"body": "This updated take on the traditional steamed pudding stars blood oranges and Campari. It can even be cooked in the microwave for a quick and easy hack. Serve with proper custard."}},
|
||||
|
||||
{"model": "club.article", "pk": 9001, "fields":
|
||||
{"title": "Orange-scented brioche pudding", "publish": "2021-02-01 00:00:00", "hide": false,
|
||||
"author": 9001, "thumbnail": "images/training/exams/IRPCS-3-5.png", "slug":"orange_brioche",
|
||||
"short_summary": "A fragrant bread and butter pudding.",
|
||||
"tease": "Put the sultanas and Grand Marnier into a small saucepan, bring to the boil and simmer",
|
||||
"body": "An old-fashioned bread and butter pudding with a fragrant flourish. You can get ready-sliced long brioche loaves, which makes life simpler, but if you need to get out a bread knife yourself, just try to slice thinly. Any good unchunky marmalade would do. I think this is better warm rather than hot straight from the oven."}},
|
||||
|
||||
{"model": "club.article", "pk": 9002, "fields":
|
||||
{"title": "Upside-down orange pudding", "publish": "2021-02-01 00:00:00", "hide": true,
|
||||
"author": 9002, "thumbnail": "images/training/exams/IRPCS-3-5.png", "slug":"upside_orange",
|
||||
"short_summary": "Very yummy.",
|
||||
"tease": "Yum",
|
||||
"body": "If you find puddings a bit heavy, you'll love this light upside-down pudding. And it's easy to make too."}},
|
||||
|
||||
{"model": "club.article", "pk": 9003, "fields":
|
||||
{"title": "Hot Citrus Pudding", "publish": "2021-02-01 00:00:00", "hide": false,
|
||||
"author": 9001, "thumbnail": "images/training/exams/IRPCS-3-6.png", "slug":"hot_citrus",
|
||||
"short_summary": "Although this pudding is served hot, it is just as nice cold. ",
|
||||
"tease": "Mind you, I doubt if there will be any left over.",
|
||||
"body": "There are two main types of oranges: sweet oranges and bitter (Seville) oranges. The former can be thick- or thin- skinned, with or without seeds, and has sweet-tasting orange or red-flecked flesh. Bitter oranges have aromatic dimpled skin with very bitter pith and very sour, pale-orange flesh. They always contain seeds."}},
|
||||
|
||||
{"model": "club.article", "pk": 9004, "fields":
|
||||
{"title": "Self-saucing Jaffa pudding", "publish": "2021-02-01 00:00:00", "hide": false,
|
||||
"author": 9001, "thumbnail": "images/training/exams/IRPCS-4-1.png", "slug":"jaffa_saucing",
|
||||
"short_summary": "An intense chocolate orange sponge bake. ",
|
||||
"tease": "Yum. This intense chocolate orange sponge bake with thick sauce is about as indulgent as a good pudding gets.",
|
||||
"body": "Mix ½ pint boiling water with sugar and cocoa then pour this over the batter. Return the pot to the slow cooker base, cover and cook on High for 3 hours until firm and risen."}},
|
||||
|
||||
{"model": "club.article", "pk": 9005, "fields":
|
||||
{"title": "Terry's Chocolate Orange Melt In The Middle Pudding", "publish": "2021-02-01 00:00:00", "hide": false,
|
||||
"author": 9001, "thumbnail": "images/training/exams/IRPCS-4-2.png", "slug":"chocolate_orange",
|
||||
"short_summary": "If you are fan of Chocolate Orange this is the pud for you.",
|
||||
"tease": "Yum. a beautifully light chocolate sponge pudding.",
|
||||
"body": "This beautifully light chocolate sponge pudding is encased around a whole Terry's Chocolate Orange and when served straight from the oven will create a gooey melt in the middle chocolate centre. This pudding is a great alternative to the traditional Christmas pudding or a deliciously indulgent finale to a weekend roast with the family"}},
|
||||
|
||||
{"model": "club.affiliationcheck", "pk": 9000, "fields":
|
||||
{"member": 9000, "claim_date": "2021-02-01 00:00:01", "claim": "alum", "confirmed": false, "confirmation_type": null, "confirmed_by": null, "confirmed_date": null}},
|
||||
|
||||
{"model": "club.affiliationcheck", "pk": 9001, "fields":
|
||||
{"member": 9001, "claim_date": "2021-02-01 00:00:01", "claim": "affiliate", "confirmed": false, "confirmation_type": null, "confirmed_by": null, "confirmed_date": null}},
|
||||
|
||||
{"model": "club.affiliationcheck", "pk": 9002, "fields":
|
||||
{"member": 9002, "claim_date": "2021-02-01 00:00:01", "claim": "senior", "confirmed": true, "confirmation_type": null, "confirmed_by": null, "confirmed_date": null}},
|
||||
|
||||
{"model": "club.affiliationcheck", "pk": 9003, "fields":
|
||||
{"member": 9003, "claim_date": "2021-02-01 00:00:01", "claim": "unknown", "confirmed": false, "confirmation_type": null, "confirmed_by": null, "confirmed_date": null}},
|
||||
|
||||
{"model": "club.affiliationcheck", "pk": 9004, "fields":
|
||||
{"member": 9004, "claim_date": "2021-02-01 00:00:01", "claim": "senior", "confirmed": false, "confirmation_type": null, "confirmed_by": null, "confirmed_date": null}},
|
||||
|
||||
|
||||
{"model": "club.elected", "pk": 5000, "fields":
|
||||
{"member": 9000, "elected_until": "", "club_role": 9000
|
||||
}},
|
||||
|
||||
{"model": "club.elected", "pk": 5001, "fields":
|
||||
{"member": 9001, "elected_until": "", "club_role": 16
|
||||
}},
|
||||
|
||||
{"model": "club.elected", "pk": 5002, "fields":
|
||||
{"member": 9001, "elected_until": "", "club_role": 17
|
||||
}},
|
||||
|
||||
|
||||
{"model": "club.award", "pk": 6000, "fields":
|
||||
{"member": 9001, "award_date": "2000-01-01", "qualification": 11
|
||||
}},
|
||||
|
||||
{"model": "club.award", "pk": 6001, "fields":
|
||||
{"member": 9002, "award_date": "2000-01-01", "qualification": 11
|
||||
}},
|
||||
|
||||
{"model": "club.award", "pk": 6002, "fields":
|
||||
{"member": 9004, "award_date": "2000-01-01", "qualification": 3
|
||||
}},
|
||||
|
||||
{"model": "club.award", "pk": 6003, "fields":
|
||||
{"member": 9000, "award_date": "2019-03-10", "qualification": 5
|
||||
}},
|
||||
|
||||
{"model": "club.clubrole", "pk": 9000, "fields":
|
||||
{"title": "Drunken sailor", "slug": "drunk_sailor", "rank": 100, "multiple": true, "club_email": "", "short_description": "Traditional crew role", "html_description": "In the scuppers, early in the morning.", "committee_position": false, "division": null}},
|
||||
|
||||
|
||||
{"model": "club.crewregister", "pk": 10000, "fields":
|
||||
{"member": 9000,
|
||||
"encoded": true,
|
||||
"dob": "1920-02-01",
|
||||
"gender": "M",
|
||||
"cambridge_address": "The Drones Club, London",
|
||||
"vacation_landline": "01632 960374",
|
||||
"kin1_name": "Barmy Fotheringay-Phipps ",
|
||||
"kin1_address": "The Drones Club, London",
|
||||
"kin1_phone": "01632 960620",
|
||||
"log": 20,
|
||||
"days": 3,
|
||||
"seasickness": "severe",
|
||||
"can_swim": true,
|
||||
"accepted_conditions": true,
|
||||
"checked_up_to_date": true,
|
||||
"checked_date": "2021-02-01 00:00:02"
|
||||
}},
|
||||
|
||||
{"model": "club.crewregister", "pk": 10001, "fields":
|
||||
{"member": 9001,
|
||||
"encoded": true,
|
||||
"dob": "1920-02-01",
|
||||
"gender": "F",
|
||||
"cambridge_address": "Totleigh Towers",
|
||||
"vacation_landline": "01223 496 0551",
|
||||
"kin1_name": "Sir Watkyn Bassett",
|
||||
"kin1_address": "Totleigh Towers. (All this detail is because there a minimum set of fields to be completed.)",
|
||||
"kin1_phone": "01223 496 0551",
|
||||
"log": 450,
|
||||
"days": 45,
|
||||
"seasickness": "mild",
|
||||
"can_swim": true,
|
||||
"accepted_conditions": true,
|
||||
"checked_up_to_date": true,
|
||||
"checked_date": "2021-02-01 00:00:02"
|
||||
}},
|
||||
|
||||
|
||||
{"model": "club.event", "pk": 20000, "fields":
|
||||
{"name": "Spring in the Arctic",
|
||||
"slug": "spring-in-the-arctic",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9001, "shore_contact": 9002,
|
||||
"start_date": "2031-03-01 00:00:00",
|
||||
"end_date": "2031-03-03 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision1.png",
|
||||
"photos": [7000],
|
||||
"spaces": 10, "boats": [8001],
|
||||
"short_summary": "A wonderfully refreshing trip among the ice floes.",
|
||||
"summary": "This is going to be the most amazing trip."}},
|
||||
{"model": "club.eventsettings", "pk": 20000, "fields": {
|
||||
"event": 20000,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20001, "fields":
|
||||
{"name": "Spring in the Med",
|
||||
"slug": "spring-in-the-med",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-03-11 00:00:00",
|
||||
"end_date": "2031-03-13 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision2.png",
|
||||
"photos": [7000],
|
||||
"spaces": 8, "boats": [8001],
|
||||
"short_summary": "A joyful celebration of spring flowers in the Cylades.",
|
||||
"summary": "This is going to be the most amazing trip."}},
|
||||
{"model": "club.eventsettings", "pk": 20001, "fields": {
|
||||
"event": 20001,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20002, "fields":
|
||||
{"name": "Spring in the Solent",
|
||||
"slug": "spring-in-the-solent",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-03-21 00:00:00",
|
||||
"end_date": "2031-03-23 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision3.png",
|
||||
"photos": [7000],
|
||||
"spaces": 8, "boats": [8001],
|
||||
"short_summary": "A rainy and blustery wet week discovering how to do tidal calculations at night.",
|
||||
"summary": "This is going to be the most amazing trip."}},
|
||||
{"model": "club.eventsettings", "pk": 20002, "fields": {
|
||||
"event": 20002,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20003, "fields":
|
||||
{"name": "Early Summer in the Med",
|
||||
"slug": "early-summer-in-the-med",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-05-11 00:00:00",
|
||||
"end_date": "2031-06-13 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision1.png",
|
||||
"photos": [7000],
|
||||
"spaces": 18, "boats": [8001],
|
||||
"short_summary": "Sheer hedonism in the Cylades.",
|
||||
"summary": "This is going to be the most amazing trip: a flotilla of joyfulness."}},
|
||||
{"model": "club.eventsettings", "pk": 20003, "fields": {
|
||||
"event": 20003,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20004, "fields":
|
||||
{"name": "Summer in the Med",
|
||||
"slug": "summer-in-the-med",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-06-11 00:00:00",
|
||||
"end_date": "2031-07-13 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision2.png",
|
||||
"photos": [7000],
|
||||
"spaces": 18, "boats": [8001],
|
||||
"short_summary": "The Dodecanese is spectacularly beautiful at this time of year.",
|
||||
"summary": "This is going to be the most amazing trip."}},
|
||||
{"model": "club.eventsettings", "pk": 20004, "fields": {
|
||||
"event": 20004,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20005, "fields":
|
||||
{"name": "High Summer in the Med",
|
||||
"slug": "high-summer-in-the-med",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-07-11 00:00:00",
|
||||
"end_date": "2031-08-13 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision3.png",
|
||||
"photos": [7000],
|
||||
"spaces": 18, "boats": [8001],
|
||||
"short_summary": "The Saronic Gulf is busy and packed at this time of year.",
|
||||
"summary": "This is going to be the most amazing trip. Party, party, party!"}},
|
||||
{"model": "club.eventsettings", "pk": 20005, "fields": {
|
||||
"event": 20005,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20006, "fields":
|
||||
{"name": "High Summer in the Irish Sea",
|
||||
"slug": "high-summer-in-the-irish",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2031-07-11 00:00:00",
|
||||
"end_date": "2031-08-13 00:00:00",
|
||||
"added_date": "2021-02-01 12:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision2.png",
|
||||
"photos": [7000],
|
||||
"spaces": 18, "boats": [8001],
|
||||
"short_summary": "The Irish Sea is is wonderful at this time of year.",
|
||||
"summary": "Welsh and Irush coasts, Manx beer."}},
|
||||
{"model": "club.eventsettings", "pk": 20006, "fields": {
|
||||
"event": 20006,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
{"model": "club.event", "pk": 20007, "fields":
|
||||
{"name": "RYA First Aid course",
|
||||
"slug": "rya-first-aid-2019",
|
||||
"state": "public",
|
||||
"event_type": 1,
|
||||
"organiser": 9004, "shore_contact": 9001,
|
||||
"start_date": "2019-03-10 00:00:00",
|
||||
"end_date": "2019-03-10 00:00:00",
|
||||
"added_date": "2019-03-10 00:00:00",
|
||||
"modified_date": "2021-02-01 13:00:00",
|
||||
"thumbnail": "images/training/exams/collision2.png",
|
||||
"photos": [],
|
||||
"spaces": 12, "boats": [],
|
||||
"short_summary": "A one-day RYA First Aid Course",
|
||||
"summary": "A First Aid certificate is a requirement for candidates for the RYA Yachtmaster Exams."}},
|
||||
|
||||
{"model": "club.eventsettings", "pk": 20006, "fields": {
|
||||
"event": 20006,
|
||||
"show_event_in_progress": true
|
||||
}},
|
||||
|
||||
|
||||
{"model": "club.participate", "pk": 30000, "fields":
|
||||
{"person": 9001,
|
||||
"event": 20000,
|
||||
"state": "confirmed",
|
||||
"date_added": "2021-02-01 12:00:00",
|
||||
"role": 1}},
|
||||
|
||||
{"model": "club.participate", "pk": 30001, "fields":
|
||||
{"person": 9000,
|
||||
"event": 20007,
|
||||
"state": "confirmed",
|
||||
"date_added": "2019-03-10 00:00:00",
|
||||
"paid": true,
|
||||
"role": 5}}
|
||||
]
|
||||
|
||||
52
core/fixtures/expo_areas.json
Normal file
52
core/fixtures/expo_areas.json
Normal file
@@ -0,0 +1,52 @@
|
||||
[
|
||||
{"model": "core.area", "pk": 25, "fields":
|
||||
{"short_name": "1626 or 6 (borderline)", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 24, "fields":
|
||||
{"short_name": "8a", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 23, "fields":
|
||||
{"short_name": "2b or 4 (unclear)", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 22, "fields":
|
||||
{"short_name": "11", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 21, "fields":
|
||||
{"short_name": "3", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 20, "fields":
|
||||
{"short_name": "4", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 19, "fields":
|
||||
{"short_name": "1b", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 18, "fields":
|
||||
{"short_name": "8b", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 17, "fields":
|
||||
{"short_name": "2d", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 16, "fields":
|
||||
{"short_name": "7", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 15, "fields":
|
||||
{"short_name": "2b", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 14, "fields":
|
||||
{"short_name": "8c", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 13, "fields":
|
||||
{"short_name": "2c", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 12, "fields":
|
||||
{"short_name": "8d", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 11, "fields":
|
||||
{"short_name": "", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 10, "fields":
|
||||
{"short_name": "5", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 9, "fields":
|
||||
{"short_name": "6", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 8, "fields":
|
||||
{"short_name": "2a", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 7, "fields":
|
||||
{"short_name": "1c", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 6, "fields":
|
||||
{"short_name": "1d", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 5, "fields":
|
||||
{"short_name": "1a", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 4, "fields":
|
||||
{"short_name": "9", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 3, "fields":
|
||||
{"short_name": "10", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 2, "fields":
|
||||
{"short_name": "1626", "name": null, "description": null, "super": null, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 1, "fields":
|
||||
{"short_name": "1623", "name": null, "description": null, "super": null, "new_since_parsing": false, "non_public": false}}
|
||||
]
|
||||
40
core/fixtures/expo_caves.json
Normal file
40
core/fixtures/expo_caves.json
Normal file
@@ -0,0 +1,40 @@
|
||||
[{"model": "core.cave", "pk": 43, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"official_name": "Schnellzughöhle",
|
||||
"kataster_code": "6/t/S/W x",
|
||||
"kataster_number": "115",
|
||||
"unofficial_number": "40m",
|
||||
"explorers": "CUCC 1980-1985",
|
||||
"underground_description": "This is the main entrance through which the majority of the <a href=\"41.htm\">Stellerweghöhle</a> system was explored. See the separate <a href=\"41/115.htm#ent115\">full guidebook description</a> for details, just an overview is given here.</p><p>The entrance leads to a non-obvious way on to the head of the short <b>Bell Pitch</b>, from where very awkward going leads out to a bigger passage to reach <b>The Ramp</b> a series of off-vertical pitches. The damper but technically easier <b>Inlet Pitches</b> drop to a Big Chamber, from where <b>Pete's Purgatory</b> starts, and leads in 800m of tortuous going to <b>The Confluence</b> and the larger streamway leading to the deepest point.</p><p>Better is the <b>Purgatory Bypass</b> which starts as dry fossil tubes, with a choice of routes to reach <b>Junction Chamber</b> where the <b>Big Rift</b> of <a href=\"41.htm\">Stellerweghöhle</a> enters. Opposite, the huge fossil tube of <b>Dartford Tunnel</b> makes for easy progress to the Confluence, about halfway down the system. The continuing main streamway is interrupted by a bypassable sump and numerous pitches before a low airspace duck at the end of an unpromising canal leads to the spectacular <b>Orgasm Chasm</b>. Careful rigging avoids the water in this 140m shaft, ending in muddy passage and another short drop to a deep and terminal sump. ",
|
||||
"equipment": "",
|
||||
"references": "",
|
||||
"survey": "CUCC's parts surveyed to Grade 5 but not all drawn up - see <a href=\"41/survey.htm\">here</a>",
|
||||
"kataster_status": "",
|
||||
"underground_centre_line": "In dataset",
|
||||
"notes": "The Austrian Kataster has adopted a very perverse way of numbering things. Their numbers are as follows:</p><ul> <li>115a Stellerweghöhle entrance 41a</li> <li>115b Stellerweghöhle entrance 41b</li> <li>115c Stellerweghöhle entrance 41c ( where ? )</li> <li>115d Schnellzughöhle entrance 115</li> <li>115e unnamed entrance 142</li></ul><p>", "length": "SMK system total 54000m", "depth": "from entrance; SMK system total 1032m", "extent": "SMK system total 2812m",
|
||||
"survex_file": "smk-system.svx",
|
||||
"description_file": "1623/115.htm",
|
||||
"url": "1623/115.url",
|
||||
"filename": "1623-115.html",
|
||||
"area": [1, 8]}},
|
||||
|
||||
{"model": "core.cave", "pk": 350, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"official_name": "Seetrichter",
|
||||
"kataster_code": "",
|
||||
"kataster_number": "284",
|
||||
"unofficial_number": "",
|
||||
"explorers": "<p></p>",
|
||||
"underground_description": "",
|
||||
"equipment": "<p></p>",
|
||||
"references": "<p>",
|
||||
"survey": "<p></p>",
|
||||
"kataster_status": "",
|
||||
"underground_centre_line": "",
|
||||
"notes": "A 25m long (22m deep) resurgence in Altausee. At the bottom, at a depth of 72m, there are large round blocks.", "length": "", "depth": "", "extent": "",
|
||||
"survex_file": "",
|
||||
"description_file": "",
|
||||
"url": "1623/284/284.html",
|
||||
"filename": "1623-284.html",
|
||||
"area": [1, 11]}}
|
||||
]
|
||||
17
core/fixtures/expo_exped.json
Normal file
17
core/fixtures/expo_exped.json
Normal file
@@ -0,0 +1,17 @@
|
||||
[{"model": "core.expedition", "pk": 44, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"year": "2019", "name": "CUCC expo 2019"}},
|
||||
|
||||
{"model": "core.personexpedition", "pk": 681, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"expedition": 44,
|
||||
"person": 250, "slugfield": null, "is_guest": false
|
||||
}},
|
||||
|
||||
{"model": "core.person", "pk": 250, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"first_name": "Michael",
|
||||
"last_name": "Sargent",
|
||||
"fullname": "Michael Sargent", "is_vfho": false, "mug_shot": null,
|
||||
"blurb": "\n\n\n\n\n\n<p><img class=\"onleft\" src=\"/folk/i/mikey0.jpg\">\n<img class=\"onright\" src=\"/folk/i/mikey1.jpg\" height=\"400\"\nalt=\"\" />\n<b>Michael Sargent</b> CUCC<br />\nExpeditions 2014, 15, 16, 17, 18, 19.\n<p>The first second-generation expo caver in 2014, later members of this exclusive group were Dan Lenartowicz and Sarah Connolly.\n\n\n<img class=\"onleft\" src=\"/folk/i/michaelsargent.jpg\">\n<im\n\n<hr style=\"clear: both\" /><p class=\"caption\">Pre-expo (pre-student) photos from President's Invite (OUCC) \nand first abseiling instruction (Cambridge).</p>\n", "orderref": ""}}
|
||||
]
|
||||
58
core/fixtures/how-to-load-fixtures.txt
Normal file
58
core/fixtures/how-to-load-fixtures.txt
Normal file
@@ -0,0 +1,58 @@
|
||||
This folder is used by manage.py to load fixtures, as are all the folders
|
||||
called /fixtures/ in any Django app here.
|
||||
|
||||
e.g. a list of files which are in the /fixtures/ folders:
|
||||
$ python manage.py loaddata cuyc_basic_data test_data_1 test_data_1.1 test_data_2
|
||||
|
||||
|
||||
|
||||
$ python manage.py help migration
|
||||
usage: manage.py migrate [-h] [--noinput] [--database DATABASE] [--fake]
|
||||
[--fake-initial] [--plan] [--run-syncdb] [--version]
|
||||
[-v {0,1,2,3}] [--settings SETTINGS]
|
||||
[--pythonpath PYTHONPATH] [--traceback] [--no-color]
|
||||
[--force-color]
|
||||
[app_label] [migration_name]
|
||||
|
||||
Updates database schema. Manages both apps with migrations and those without.
|
||||
|
||||
positional arguments:
|
||||
app_label App label of an application to synchronize the state.
|
||||
migration_name Database state will be brought to the state after that
|
||||
migration. Use the name "zero" to unapply all
|
||||
migrations.
|
||||
optional arguments:
|
||||
--noinput, --no-input
|
||||
Tells Django to NOT prompt the user for input of any
|
||||
kind.
|
||||
--database DATABASE Nominates a database to synchronize. Defaults to the
|
||||
"default" database.
|
||||
--fake Mark migrations as run without actually running them.
|
||||
--fake-initial Detect if tables already exist and fake-apply initial
|
||||
migrations if so. Make sure that the current database
|
||||
schema matches your initial migration before using
|
||||
this flag. Django will only check for an existing
|
||||
table name.
|
||||
--plan Shows a list of the migration actions that will be
|
||||
performed.
|
||||
--run-syncdb Creates tables for apps without migrations.
|
||||
|
||||
|
||||
$ python manage.py help loaddata
|
||||
usage: manage.py loaddata [-h] [--database DATABASE] [--app APP_LABEL]
|
||||
[--ignorenonexistent] [-e EXCLUDE] [--format FORMAT]
|
||||
[--version] [-v {0,1,2,3}] [--settings SETTINGS]
|
||||
[--pythonpath PYTHONPATH] [--traceback] [--no-color]
|
||||
[--force-color]
|
||||
fixture [fixture ...]
|
||||
|
||||
Installs the named fixture(s) in the database.
|
||||
optional arguments:
|
||||
--app APP_LABEL Only look for fixtures in the specified app.
|
||||
--ignorenonexistent, -i
|
||||
Ignores entries in the serialized data for fields that
|
||||
do not currently exist on the model.
|
||||
positional arguments:
|
||||
fixture Fixture labels.
|
||||
|
||||
|
||||
5
core/fixtures/test_upload_file.pdf
Normal file
5
core/fixtures/test_upload_file.pdf
Normal file
@@ -0,0 +1,5 @@
|
||||
This file is uploaded by the integration test suite as part of the tests.
|
||||
|
||||
It, and any other with similar names, e.g test_upload_GPev9qN.txt can be safely deleted,
|
||||
EXCEPT for the original copy which lives in troggle/core/fixtures/
|
||||
|
||||
5
core/fixtures/test_upload_file.txt
Normal file
5
core/fixtures/test_upload_file.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
This file is uploaded by the integration test suite as part of the tests.
|
||||
|
||||
It, and any other with similar names, e.g test_upload_GPev9qN.txt can be safely deleted,
|
||||
EXCEPT for the original copy which lives in troggle/core/fixtures/
|
||||
|
||||
7
core/fixtures/test_upload_nosuffix
Normal file
7
core/fixtures/test_upload_nosuffix
Normal file
@@ -0,0 +1,7 @@
|
||||
This file is uploaded by the integration test suite as part of the tests.
|
||||
|
||||
This has no suffix so it is pretending to be a Therion config file.
|
||||
|
||||
It, and any other with similar names, e.g test_upload_GPev9qN.txt can be safely deleted,
|
||||
EXCEPT for the original copy which lives in troggle/core/fixtures/
|
||||
|
||||
1
core/fixtures/troggle_output (5).json
Normal file
1
core/fixtures/troggle_output (5).json
Normal file
@@ -0,0 +1 @@
|
||||
[{"model": "core.logbookentry", "pk": 7, "fields": {"new_since_parsing": false, "non_public": false, "date": "2019-07-11", "expeditionday": null, "expedition": 44, "title": "base camp - CUCC Austria Expedition 2019 Blog", "cave_slug": "None", "place": "base camp", "text": "<a href=\"https://ukcaving.com/board/index.php?topic=25249.msg311372#msg311372\">blog post</a> </br></br> At the time of writing, I am sat in the Tatty Hut at Base Camp in Bad Aussee. It is day five of expo and a lot has happened. We discovered on Sunday (day one - 07/07/2019) that our Top Camp, Steinbrueken, was full of snow: Meanwhile, Base Camp preparations were well underway: he beer tent was being hoisted (above) and the new rope (thanks to UK Caving and Spanset for the sponsorship!) was being soaked, coiled, and cut into usable lengths ready for caving. </br></br> The next few days consisted of Expo members undertaking multitudes of carrying trips up to top camp, and a few hardy folk doing their best to fettle the bivvy for habitability. Tuesday (09/07/2019) night saw the first people sleeping in Steinbrueken. Mostly, they described the experience as \"chilly\" but one person went as far as to claim he had been warmer there than at Base Camp. </br></br> Also on Tuesday (09/07/2019), a new route was devised and cairned directly from Heimkommen Hoehle to the tourist path on the col. The idea being that Homecoming could be close enough to push from Base Camp rather than Steinbrueken. This came with the discovery that Fischgesicht Hoehle's entrance was under two to three metres of snow: </br></br> On Wednesday (10/07/2019), Expo split into three groups. The majority went to Steinbrueken to commence the final push towards habitability while some went to investigate Balkonhoehle. Three of us (Dickon Morris, Daniel Heins, and myself) went to Heimkommen to rig to the pushing front (the decision to concentrate on Heimkommen and Balkon having been made for us by the plateau). </br></br> That's all for now, </br></br> Tom Crossley (11/07/2019)", "slug": "base-camp-cucc-austria-expedition-2019-blog", "filename": null, "entry_type": "html"}}]
|
||||
370
core/forms.py
370
core/forms.py
@@ -1,179 +1,239 @@
|
||||
from django.forms import ModelForm
|
||||
from models import Cave, Person, PersonExpedition, LogbookEntry, QM, Expedition, Entrance, CaveAndEntrance
|
||||
|
||||
import django.forms as forms
|
||||
from django.forms import ModelForm
|
||||
from django.forms.models import modelformset_factory
|
||||
from django.contrib.admin.widgets import AdminDateWidget
|
||||
import string
|
||||
from datetime import date
|
||||
from tinymce.widgets import TinyMCE
|
||||
|
||||
from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance
|
||||
from troggle.core.views.editor_helpers import HTMLarea
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
# from tinymce.widgets import TinyMCE
|
||||
import re
|
||||
|
||||
|
||||
"""These are all the class-based Forms used by troggle.
|
||||
There are other, simpler, upload forms in view/uploads.py
|
||||
|
||||
class-based forms are quicker to set up (for Django experts) but
|
||||
are more difficult to maintain by non-Django experts.
|
||||
"""
|
||||
|
||||
todo = """
|
||||
"""
|
||||
|
||||
|
||||
class CaveForm(ModelForm):
|
||||
underground_description = forms.CharField(required = False, widget=forms.Textarea())
|
||||
explorers = forms.CharField(required = False, widget=forms.Textarea())
|
||||
equipment = forms.CharField(required = False, widget=forms.Textarea())
|
||||
survey = forms.CharField(required = False, widget=forms.Textarea())
|
||||
kataster_status = forms.CharField(required = False, widget=forms.Textarea())
|
||||
underground_centre_line = forms.CharField(required = False, widget=forms.Textarea())
|
||||
notes = forms.CharField(required = False, widget=forms.Textarea())
|
||||
references = forms.CharField(required = False, widget=forms.Textarea())
|
||||
url = forms.CharField(required = True)
|
||||
"""Only those fields for which we want to override defaults are listed here
|
||||
the other fields are present on the form, but use the default presentation style
|
||||
"""
|
||||
|
||||
official_name = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "45"}))
|
||||
underground_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
explorers = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
equipment = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
survey = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
# survey = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
kataster_status = forms.CharField(required=False)
|
||||
underground_centre_line = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
notes = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
references = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
description_file = forms.CharField(required=False, label="Path of top-level description file for this cave, when a separate file is used. Otherwise blank.", widget=forms.TextInput(attrs={"size": "45"}), help_text="")
|
||||
survex_file = forms.CharField(
|
||||
required=False, label="Survex file eg. caves-1623/000/000.svx", widget=forms.TextInput(attrs={"size": "45"})
|
||||
)
|
||||
#url = forms.CharField(required=True, label="URL eg. 1623/000/000 (no .html)", widget=forms.TextInput(attrs={"size": "45"}))
|
||||
length = forms.CharField(required=False, label="Length (m)")
|
||||
depth = forms.CharField(required=False, label="Depth (m)")
|
||||
extent = forms.CharField(required=False, label="Extent (m)")
|
||||
|
||||
#cave_slug = forms.CharField()
|
||||
|
||||
class Meta:
|
||||
model = Cave
|
||||
exclude = ("filename",)
|
||||
|
||||
|
||||
|
||||
field_order = ['area', 'unofficial_number', 'kataster_number', 'official_name', 'underground_description', 'explorers', 'equipment', 'survey', 'kataster_status', 'underground_centre_line', 'notes', 'references', 'description_file', 'survex_file', 'url', 'length', 'depth', 'extent']
|
||||
|
||||
def get_area(self):
|
||||
for a in self.cleaned_data["area"]:
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
def clean_cave_slug(self):
|
||||
if self.cleaned_data["cave_slug"] == "":
|
||||
myArea = ""
|
||||
for a in self.cleaned_data["area"]:
|
||||
if a.kat_area():
|
||||
myArea = a.kat_area()
|
||||
if self.data["kataster_number"]:
|
||||
cave_slug = f"{myArea}-{self.cleaned_data['kataster_number']}"
|
||||
else:
|
||||
cave_slug = f"{myArea}-{self.cleaned_data['unofficial_number']}"
|
||||
else:
|
||||
cave_slug = self.cleaned_data["cave_slug"]
|
||||
# Converting a PENDING cave to a real cave by saving this form
|
||||
print("EEE", cave_slug.replace("-PENDING-", "-"))
|
||||
return cave_slug.replace("-PENDING-", "-")
|
||||
|
||||
# def clean_url(self):
|
||||
# data = self.cleaned_data["url"]
|
||||
# if not re.match("\d\d\d\d/.", data):
|
||||
# raise ValidationError("URL must start with a four digit Kataster area.")
|
||||
# return data
|
||||
|
||||
|
||||
def clean(self):
|
||||
if self.cleaned_data.get("kataster_number") == "" and self.cleaned_data.get("unofficial_number") == "":
|
||||
self._errors["unofficial_number"] = self.error_class(["Either the kataster or unoffical number is required."])
|
||||
if self.cleaned_data.get("kataster_number") != "" and self.cleaned_data.get("official_name") == "":
|
||||
self._errors["official_name"] = self.error_class(["This field is required when there is a kataster number."])
|
||||
if self.cleaned_data.get("area") == []:
|
||||
cleaned_data = super(CaveForm, self).clean()
|
||||
if self.data.get("kataster_number") == "" and self.data.get("unofficial_number") == "":
|
||||
self._errors["unofficial_number"] = self.error_class(
|
||||
["Either the kataster or unoffical number is required."]
|
||||
)
|
||||
# if self.cleaned_data.get("kataster_number") != "" and self.cleaned_data.get("official_name") == "":
|
||||
# self._errors["official_name"] = self.error_class(["This field is required when there is a kataster number."])
|
||||
if cleaned_data.get("area") == []:
|
||||
self._errors["area"] = self.error_class(["This field is required."])
|
||||
if self.cleaned_data.get("url") and self.cleaned_data.get("url").startswith("/"):
|
||||
self._errors["url"] = self.error_class(["This field can not start with a /."])
|
||||
return self.cleaned_data
|
||||
|
||||
class VersionControlCommentForm(forms.Form):
|
||||
description_of_change = forms.CharField(required = True, widget=forms.Textarea())
|
||||
if cleaned_data.get("url") and cleaned_data.get("url").startswith("/"):
|
||||
self._errors["url"] = self.error_class(["This field cannot start with a /."])
|
||||
return cleaned_data
|
||||
|
||||
|
||||
class EntranceForm(ModelForm):
|
||||
#underground_description = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
#explorers = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#equipment = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#survey = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#kataster_status = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#underground_centre_line = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#notes = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
#references = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
other_station = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
tag_station = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
exact_station = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
northing = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
easting = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
alt = forms.CharField(required=False) # Trying to change this to a singl;e line entry
|
||||
"""Only those fields for which we want to override defaults are listed here
|
||||
the other fields are present on the form, but use the default presentation style
|
||||
"""
|
||||
|
||||
name = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "45"}))
|
||||
entrance_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
explorers = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "45"}))
|
||||
# explorers = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
map_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
location_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
lastvisit = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Date of last visit, e.g. 2023-07-11"
|
||||
)
|
||||
approach = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
underground_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
photo = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
marking_comment = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
findability_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
other_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
bearings = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
tag_station = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"size": "50"}), label="Tag station: Survex station id, e.g. 1623.p2023-xx-01"
|
||||
)
|
||||
exact_station = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"size": "50"}), label="Exact station: Survex station id, e.g. 1623.2023-xx-01.2"
|
||||
)
|
||||
other_station = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"size": "50"}), label="Other station: Survex station id, e.g. 1623.2023-xx-01.33"
|
||||
)
|
||||
northing = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Northing (UTM) - from survex data"
|
||||
)
|
||||
easting = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Easting (UTM) - from survex data"
|
||||
)
|
||||
lat_wgs84 = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Latitude (WSG84) - if no other location"
|
||||
)
|
||||
long_wgs84 = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Longitude (WSG84) - if no other location"
|
||||
)
|
||||
alt = forms.CharField(required=False, label="Altitude (m)")
|
||||
url = forms.CharField(required=False, label="URL [usually blank]", widget=forms.TextInput(attrs={"size": "45"}))
|
||||
|
||||
field_order = ['name', 'entrance_description', 'explorers', 'map_description', 'location_description', 'lastvisit', 'approach', 'underground_description', 'photo', 'marking_comment', 'findability_description', 'other_description', 'bearings', 'tag_station', 'exact_station', 'other_station', 'northing', 'easting', 'lat_wgs84', 'long_wgs84', 'alt', 'url']
|
||||
|
||||
class Meta:
|
||||
model = Entrance
|
||||
exclude = ("cached_primary_slug", "filename",)
|
||||
exclude = (
|
||||
"cached_primary_slug",
|
||||
"filename",
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
if self.cleaned_data.get("url").startswith("/"):
|
||||
self._errors["url"] = self.error_class(["This field can not start with a /."])
|
||||
self._errors["url"] = self.error_class(["This field cannot start with a /."])
|
||||
return self.cleaned_data
|
||||
|
||||
|
||||
|
||||
CaveAndEntranceFormSet = modelformset_factory(CaveAndEntrance, exclude=('cave',))
|
||||
# This next line is called from the templates/edit_cave.html template.
|
||||
# This is sufficient to create an entire entry for for the cave fields automatically
|
||||
# http://localhost:8000/cave/new/
|
||||
# using django built-in Deep Magic. https://docs.djangoproject.com/en/dev/topics/forms/modelforms/
|
||||
# for forms which map directly onto a Django Model
|
||||
CaveAndEntranceFormSet = modelformset_factory(CaveAndEntrance, exclude=("cave",))
|
||||
# This is used only in edit_entrance() in views/caves.py
|
||||
|
||||
class EntranceLetterForm(ModelForm):
|
||||
"""Form to link entrances to caves, along with an entrance number.
|
||||
|
||||
Nb. The relationship between caves and entrances has historically been a many to many relationship.
|
||||
With entrances gaining new caves and letters when caves are joined.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
model = CaveAndEntrance
|
||||
exclude = ('cave', 'entrance')
|
||||
|
||||
#class PersonForm(ModelForm):
|
||||
# class Meta:
|
||||
# model = Person
|
||||
|
||||
#class LogbookEntryForm(ModelForm):
|
||||
# class Meta:
|
||||
# model = LogbookEntry#
|
||||
|
||||
# def wikiLinkHints(LogbookEntry=None):
|
||||
# """
|
||||
# This function returns html-formatted paragraphs for each of the
|
||||
# wikilink types that are related to this logbookentry. Each paragraph
|
||||
# contains a list of all of the related wikilinks.
|
||||
#
|
||||
# Perhaps an admin javascript solution would be better.
|
||||
# """
|
||||
# res = ["Please use the following wikilinks, which are related to this logbook entry:"]
|
||||
#
|
||||
# res.append(r'</p><p style="float: left;"><b>QMs found:</b>')
|
||||
# for QM in LogbookEntry.instance.QMs_found.all():
|
||||
# res.append(QM.wiki_link())
|
||||
|
||||
# res.append(r'</p><p style="float: left;"><b>QMs ticked off:</b>')
|
||||
# for QM in LogbookEntry.instance.QMs_ticked_off.all():
|
||||
# res.append(QM.wiki_link())
|
||||
|
||||
# res.append(r'</p><p style="float: left; "><b>People</b>')
|
||||
# for persontrip in LogbookEntry.instance.persontrip_set.all():
|
||||
# res.append(persontrip.wiki_link())
|
||||
# res.append(r'</p>')
|
||||
|
||||
# return string.join(res, r'<br />')
|
||||
|
||||
# def __init__(self, *args, **kwargs):
|
||||
# super(LogbookEntryForm, self).__init__(*args, **kwargs)
|
||||
# self.fields['text'].help_text=self.wikiLinkHints()#
|
||||
|
||||
#class CaveForm(forms.Form):
|
||||
# html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
|
||||
def getTripForm(expedition):
|
||||
|
||||
class TripForm(forms.Form):
|
||||
date = forms.DateField()
|
||||
title = forms.CharField(max_length=200)
|
||||
caves = [cave.reference() for cave in Cave.objects.all()]
|
||||
caves.sort()
|
||||
caves = ["-----"] + caves
|
||||
cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
||||
location = forms.CharField(max_length=200, required=False)
|
||||
caveOrLocation = forms.ChoiceField([("cave", "Cave"), ("location", "Location")], widget = forms.widgets.RadioSelect())
|
||||
html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
|
||||
def clean(self):
|
||||
print dir(self)
|
||||
if self.cleaned_data.get("caveOrLocation") == "cave" and not self.cleaned_data.get("cave"):
|
||||
self._errors["cave"] = self.error_class(["This field is required"])
|
||||
if self.cleaned_data.get("caveOrLocation") == "location" and not self.cleaned_data.get("location"):
|
||||
self._errors["location"] = self.error_class(["This field is required"])
|
||||
return self.cleaned_data
|
||||
|
||||
class PersonTripForm(forms.Form):
|
||||
names = [get_name(pe) for pe in PersonExpedition.objects.filter(expedition = expedition)]
|
||||
names.sort()
|
||||
names = ["-----"] + names
|
||||
name = forms.ChoiceField([(n, n) for n in names])
|
||||
TU = forms.FloatField(required=False)
|
||||
author = forms.BooleanField(required=False, default=False)
|
||||
|
||||
PersonTripFormSet = formset_factory(PersonTripForm, extra=1)
|
||||
|
||||
return PersonTripFormSet, TripForm
|
||||
|
||||
def get_name(pe):
|
||||
if pe.nickname:
|
||||
return pe.nickname
|
||||
else:
|
||||
return pe.person.first_name
|
||||
|
||||
#class UploadFileForm(forms.Form):
|
||||
# title = forms.CharField(max_length=50)
|
||||
# file = forms.FileField()
|
||||
# html = forms.CharField(widget=TinyMCE(attrs={'cols': 80, 'rows': 30}))
|
||||
# lon_utm = forms.FloatField(required=False)
|
||||
# lat_utm = forms.FloatField(required=False)
|
||||
# slug = forms.CharField(max_length=50)
|
||||
# date = forms.DateField(required=False)
|
||||
|
||||
# caves = [cave.slug for cave in Cave.objects.all()]
|
||||
# caves.sort()
|
||||
# caves = ["-----"] + caves
|
||||
# cave = forms.ChoiceField([(c, c) for c in caves], required=False)
|
||||
|
||||
# entrance = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
# qm = forms.ChoiceField([("-----", "Please select a cave"), ], required=False)
|
||||
|
||||
# expeditions = [e.year for e in Expedition.objects.all()]
|
||||
# expeditions.sort()
|
||||
# expeditions = ["-----"] + expeditions
|
||||
# expedition = forms.ChoiceField([(e, e) for e in expeditions], required=False)
|
||||
|
||||
# logbookentry = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
|
||||
# person = forms.ChoiceField([("-----", "Please select an expedition"), ], required=False)
|
||||
|
||||
# survey_point = forms.CharField()
|
||||
|
||||
exclude = ("cave", "entrance")
|
||||
|
||||
def full_clean(self):
|
||||
super(EntranceLetterForm, self).full_clean()
|
||||
try:
|
||||
self.instance.validate_unique()
|
||||
except forms.ValidationError as e:
|
||||
self._update_errors(e)
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
from imagekit.specs import ImageSpec
|
||||
from imagekit import processors
|
||||
|
||||
class ResizeThumb(processors.Resize):
|
||||
width = 100
|
||||
crop = False
|
||||
|
||||
class ResizeDisplay(processors.Resize):
|
||||
width = 600
|
||||
|
||||
#class EnhanceThumb(processors.Adjustment):
|
||||
#contrast = 1.2
|
||||
#sharpness = 2
|
||||
|
||||
class Thumbnail(ImageSpec):
|
||||
access_as = 'thumbnail_image'
|
||||
pre_cache = True
|
||||
processors = [ResizeThumb]
|
||||
|
||||
class Display(ImageSpec):
|
||||
increment_count = True
|
||||
processors = [ResizeDisplay]
|
||||
Binary file not shown.
36
core/management/commands/dummycmd.py
Normal file
36
core/management/commands/dummycmd.py
Normal file
@@ -0,0 +1,36 @@
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
"""this is now replaced by databaseRest.py
|
||||
|
||||
This is an example of how to create our own bespoke commandline
|
||||
commands.
|
||||
|
||||
Good articles on creating Django commands at
|
||||
https://www.mattlayman.com/understand-django/command-apps/
|
||||
https://www.geeksforgeeks.org/custom-django-management-commands/
|
||||
|
||||
Django docs:
|
||||
https://docs.djangoproject.com/en/dev/howto/custom-management-commands/
|
||||
|
||||
We might use this mechanism to replace/enhance the
|
||||
folk, wallets and any cron jobs or other standalone scripts.
|
||||
"""
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
def add_arguments(self, parser):
|
||||
# Positional arguments
|
||||
parser.add_argument("posargs", nargs="+", type=int)
|
||||
|
||||
# Named (optional) arguments
|
||||
parser.add_argument(
|
||||
"--delete",
|
||||
action="store_true",
|
||||
help="Removed as redundant - use databaseReset.py",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
print(args)
|
||||
print(options)
|
||||
@@ -1,182 +0,0 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from troggle.core.models import Cave
|
||||
import settings
|
||||
|
||||
databasename=settings.DATABASES['default']['NAME']
|
||||
expouser=settings.EXPOUSER
|
||||
expouserpass=settings.EXPOUSERPASS
|
||||
expouseremail=settings.EXPOUSER_EMAIL
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'This is normal usage, clear database and reread everything'
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--foo',
|
||||
action='store_true',
|
||||
dest='foo',
|
||||
default=False,
|
||||
help='test'),
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
||||
parser.add_argument(
|
||||
'--foo',
|
||||
action='store_true',
|
||||
dest='foo',
|
||||
help='Help text',
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
print(args)
|
||||
print(options)
|
||||
if "desc" in args:
|
||||
self.resetdesc()
|
||||
elif "scans" in args:
|
||||
self.import_surveyscans()
|
||||
elif "caves" in args:
|
||||
self.reload_db()
|
||||
self.make_dirs()
|
||||
self.pageredirects()
|
||||
self.import_caves()
|
||||
elif "people" in args:
|
||||
self.import_people()
|
||||
elif "QMs" in args:
|
||||
self.import_QMs()
|
||||
elif "tunnel" in args:
|
||||
self.import_tunnelfiles()
|
||||
elif "reset" in args:
|
||||
self.reset()
|
||||
elif "survex" in args:
|
||||
self.import_survex()
|
||||
elif "survexpos" in args:
|
||||
import parsers.survex
|
||||
parsers.survex.LoadPos()
|
||||
elif "logbooks" in args:
|
||||
self.import_logbooks()
|
||||
elif "autologbooks" in args:
|
||||
self.import_auto_logbooks()
|
||||
elif "dumplogbooks" in args:
|
||||
self.dumplogbooks()
|
||||
elif "writeCaves" in args:
|
||||
self.writeCaves()
|
||||
elif "foo" in args:
|
||||
self.stdout.write('Tesing....')
|
||||
else:
|
||||
self.stdout.write("%s not recognised" % args)
|
||||
self.usage(options)
|
||||
|
||||
def reload_db():
|
||||
if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3':
|
||||
try:
|
||||
os.remove(databasename)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("DROP DATABASE %s" % databasename)
|
||||
cursor.execute("CREATE DATABASE %s" % databasename)
|
||||
cursor.execute("ALTER DATABASE %s CHARACTER SET=utf8" % databasename)
|
||||
cursor.execute("USE %s" % databasename)
|
||||
management.call_command('migrate', interactive=False)
|
||||
# management.call_command('syncdb', interactive=False)
|
||||
user = User.objects.create_user(expouser, expouseremail, expouserpass)
|
||||
user.is_staff = True
|
||||
user.is_superuser = True
|
||||
user.save()
|
||||
|
||||
def make_dirs():
|
||||
"""Make directories that troggle requires"""
|
||||
# should also deal with permissions here.
|
||||
if not os.path.isdir(settings.PHOTOS_ROOT):
|
||||
os.mkdir(settings.PHOTOS_ROOT)
|
||||
|
||||
def import_caves():
|
||||
import parsers.caves
|
||||
print("importing caves")
|
||||
parsers.caves.readcaves()
|
||||
|
||||
def import_people():
|
||||
import parsers.people
|
||||
parsers.people.LoadPersonsExpos()
|
||||
|
||||
def import_logbooks():
|
||||
# The below line was causing errors I didn't understand (it said LOGFILE was a string), and I couldn't be bothered to figure
|
||||
# what was going on so I just catch the error with a try. - AC 21 May
|
||||
try:
|
||||
settings.LOGFILE.write('\nBegun importing logbooks at ' + time.asctime() + '\n' + '-' * 60)
|
||||
except:
|
||||
pass
|
||||
|
||||
import parsers.logbooks
|
||||
parsers.logbooks.LoadLogbooks()
|
||||
|
||||
def import_survex():
|
||||
import parsers.survex
|
||||
parsers.survex.LoadAllSurvexBlocks()
|
||||
parsers.survex.LoadPos()
|
||||
|
||||
def import_QMs():
|
||||
import parsers.QMs
|
||||
|
||||
def import_surveys():
|
||||
import parsers.surveys
|
||||
parsers.surveys.parseSurveys(logfile=settings.LOGFILE)
|
||||
|
||||
def import_surveyscans():
|
||||
import parsers.surveys
|
||||
parsers.surveys.LoadListScans()
|
||||
|
||||
def import_tunnelfiles():
|
||||
import parsers.surveys
|
||||
parsers.surveys.LoadTunnelFiles()
|
||||
|
||||
def reset():
|
||||
""" Wipe the troggle database and import everything from legacy data
|
||||
"""
|
||||
reload_db()
|
||||
make_dirs()
|
||||
pageredirects()
|
||||
import_caves()
|
||||
import_people()
|
||||
import_surveyscans()
|
||||
import_survex()
|
||||
import_logbooks()
|
||||
import_QMs()
|
||||
try:
|
||||
import_tunnelfiles()
|
||||
except:
|
||||
print("Tunnel files parser broken.")
|
||||
|
||||
import_surveys()
|
||||
|
||||
def pageredirects():
|
||||
for oldURL, newURL in [("indxal.htm", reverse("caveindex"))]:
|
||||
f = troggle.flatpages.models.Redirect(originalURL=oldURL, newURL=newURL)
|
||||
f.save()
|
||||
|
||||
def writeCaves():
|
||||
for cave in Cave.objects.all():
|
||||
cave.writeDataFile()
|
||||
for entrance in Entrance.objects.all():
|
||||
entrance.writeDataFile()
|
||||
|
||||
def usage(self, parser):
|
||||
print("""Usage is 'manage.py reset_db <command>'
|
||||
where command is:
|
||||
reset - this is normal usage, clear database and reread everything
|
||||
desc
|
||||
caves - read in the caves
|
||||
logbooks - read in the logbooks
|
||||
autologbooks
|
||||
dumplogbooks
|
||||
people
|
||||
QMs - read in the QM files
|
||||
resetend
|
||||
scans - read in the scanned surveynotes
|
||||
survex - read in the survex files
|
||||
survexpos
|
||||
tunnel - read in the Tunnel files
|
||||
writeCaves
|
||||
""")
|
||||
77
core/middleware.py
Normal file
77
core/middleware.py
Normal file
@@ -0,0 +1,77 @@
|
||||
from django import http
|
||||
from django.conf import settings
|
||||
from django.urls import Resolver404, resolve
|
||||
|
||||
"""Non-standard django middleware is loaded from this file.
|
||||
|
||||
"""
|
||||
todo = """SmartAppendSlashMiddleware(object) Not Working.
|
||||
It needs re-writing to be compatible with Django v2.0 and later
|
||||
"""
|
||||
|
||||
|
||||
class SmartAppendSlashMiddleware(object):
|
||||
"""
|
||||
"SmartAppendSlash" middleware for taking care of URL rewriting.
|
||||
|
||||
This middleware appends a missing slash, if:
|
||||
* the SMART_APPEND_SLASH setting is True
|
||||
* the URL without the slash does not exist
|
||||
* the URL with an appended slash does exist.
|
||||
Otherwise it won't touch the URL.
|
||||
"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Called for every url so return as quickly as possible
|
||||
Append a slash if SMART_APPEND_SLASH is set, the resulting URL resolves and it doesn't without the /
|
||||
"""
|
||||
if not settings.SMART_APPEND_SLASH:
|
||||
return None
|
||||
|
||||
if request.path.endswith("/"):
|
||||
return None
|
||||
|
||||
if request.path.endswith("_edit"):
|
||||
return None
|
||||
|
||||
host = http.HttpRequest.get_host(request)
|
||||
old_url = [host, request.path]
|
||||
if _resolves(old_url[1]):
|
||||
return None
|
||||
|
||||
# So: it does not resolve according to our criteria, i.e. _edit doesn't count
|
||||
new_url = old_url[:]
|
||||
new_url[1] = new_url[1] + "/"
|
||||
if not _resolves(new_url[1]):
|
||||
return None
|
||||
else:
|
||||
if settings.DEBUG and request.method == "POST":
|
||||
# replace this exception with a redirect to an error page
|
||||
raise RuntimeError(
|
||||
f"You called this URL via POST, but the URL doesn't end in a slash and you have SMART_APPEND_SLASH set. Django can't redirect to the slash URL while maintaining POST data. Change your form to point to {new_url[0]}{new_url[1]} (note the trailing slash), or set SMART_APPEND_SLASH=False in your Django settings."
|
||||
)
|
||||
if new_url != old_url:
|
||||
# Redirect
|
||||
if new_url[0]:
|
||||
newurl = f"{request.is_secure() and 'https' or 'http'}://{new_url[0]}{new_url[1]}"
|
||||
else:
|
||||
newurl = new_url[1]
|
||||
if request.GET:
|
||||
newurl += "?" + request.GET.urlencode()
|
||||
return http.HttpResponsePermanentRedirect(newurl)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _resolves(url):
|
||||
try:
|
||||
# If the URL does not resolve, the function raises a Resolver404 exception (a subclass of Http404)
|
||||
resolve(url)
|
||||
# this will ALWAYS be resolved by expopages because it will produce pagenotfound if not the thing asked for
|
||||
# so handle this in expopages, not in middleware
|
||||
return True
|
||||
except Resolver404:
|
||||
return False
|
||||
except:
|
||||
print(url)
|
||||
raise
|
||||
0
core/migrations/__init__.py
Normal file
0
core/migrations/__init__.py
Normal file
863
core/models.py
863
core/models.py
@@ -1,863 +0,0 @@
|
||||
import urllib, urlparse, string, os, datetime, logging, re
|
||||
import subprocess
|
||||
from django.forms import ModelForm
|
||||
from django.db import models
|
||||
from django.contrib import admin
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Min, Max
|
||||
from django.conf import settings
|
||||
from decimal import Decimal, getcontext
|
||||
from django.core.urlresolvers import reverse
|
||||
from imagekit.models import ImageModel
|
||||
from django.template import Context, loader
|
||||
import settings
|
||||
getcontext().prec=2 #use 2 significant figures for decimal calculations
|
||||
|
||||
from troggle.core.models_survex import *
|
||||
|
||||
|
||||
def get_related_by_wikilinks(wiki_text):
|
||||
found=re.findall(settings.QM_PATTERN,wiki_text)
|
||||
res=[]
|
||||
for wikilink in found:
|
||||
qmdict={'urlroot':settings.URL_ROOT,'cave':wikilink[2],'year':wikilink[1],'number':wikilink[3]}
|
||||
try:
|
||||
cave_slugs = CaveSlug.objects.filter(cave__kataster_number = qmdict['cave'])
|
||||
qm=QM.objects.get(found_by__cave_slug__in = cave_slugs,
|
||||
found_by__date__year = qmdict['year'],
|
||||
number = qmdict['number'])
|
||||
res.append(qm)
|
||||
except QM.DoesNotExist:
|
||||
print('fail on '+str(wikilink))
|
||||
|
||||
return res
|
||||
|
||||
try:
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
filename=settings.LOGFILE,
|
||||
filemode='w')
|
||||
|
||||
#This class is for adding fields and methods which all of our models will have.
|
||||
class TroggleModel(models.Model):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class TroggleImageModel(ImageModel):
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ('-year',)
|
||||
get_latest_by = 'year'
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('expedition', args=[self.year]))
|
||||
|
||||
# construction function. should be moved out
|
||||
def get_expedition_day(self, date):
|
||||
expeditiondays = self.expeditionday_set.filter(date=date)
|
||||
if expeditiondays:
|
||||
assert len(expeditiondays) == 1
|
||||
return expeditiondays[0]
|
||||
res = ExpeditionDay(expedition=self, date=date)
|
||||
res.save()
|
||||
return res
|
||||
|
||||
def day_min(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[0] or None
|
||||
|
||||
def day_max(self):
|
||||
res = self.expeditionday_set.all()
|
||||
return res and res[len(res) - 1] or None
|
||||
|
||||
|
||||
|
||||
class ExpeditionDay(TroggleModel):
|
||||
expedition = models.ForeignKey("Expedition")
|
||||
date = models.DateField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('date',)
|
||||
|
||||
def GetPersonTrip(self, personexpedition):
|
||||
personexpeditions = self.persontrip_set.filter(expeditionday=self)
|
||||
return personexpeditions and personexpeditions[0] or None
|
||||
|
||||
|
||||
#
|
||||
# single Person, can go on many years
|
||||
#
|
||||
class Person(TroggleModel):
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
is_vfho = models.BooleanField(help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.", default=False)
|
||||
mug_shot = models.CharField(max_length=100, blank=True,null=True)
|
||||
blurb = models.TextField(blank=True,null=True)
|
||||
|
||||
#href = models.CharField(max_length=200)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
#the below have been removed and made methods. I'm not sure what the b in bisnotable stands for. - AC 16 Feb
|
||||
#notability = models.FloatField() # for listing the top 20 people
|
||||
#bisnotable = models.BooleanField(default=False)
|
||||
user = models.OneToOneField(User, null=True, blank=True)
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT,reverse('person',kwargs={'first_name':self.first_name,'last_name':self.last_name}))
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ('orderref',) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __unicode__(self):
|
||||
if self.last_name:
|
||||
return "%s %s" % (self.first_name, self.last_name)
|
||||
return self.first_name
|
||||
|
||||
|
||||
def notability(self):
|
||||
notability = Decimal(0)
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (2012 - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
return self.notability() > Decimal(1)/Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by('-expedition')[0]
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by('expedition')[0]
|
||||
|
||||
#def Sethref(self):
|
||||
#if self.last_name:
|
||||
#self.href = self.first_name.lower() + "_" + self.last_name.lower()
|
||||
#self.orderref = self.last_name + " " + self.first_name
|
||||
#else:
|
||||
# self.href = self.first_name.lower()
|
||||
#self.orderref = self.first_name
|
||||
#self.notability = 0.0 # set temporarily
|
||||
|
||||
|
||||
#
|
||||
# Person's attenance to one Expo
|
||||
#
|
||||
class PersonExpedition(TroggleModel):
|
||||
expedition = models.ForeignKey(Expedition)
|
||||
person = models.ForeignKey(Person)
|
||||
slugfield = models.SlugField(max_length=50,blank=True,null=True)
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
COMMITTEE_CHOICES = (
|
||||
('leader','Expo leader'),
|
||||
('medical','Expo medical officer'),
|
||||
('treasurer','Expo treasurer'),
|
||||
('sponsorship','Expo sponsorship coordinator'),
|
||||
('research','Expo research coordinator'),
|
||||
)
|
||||
expo_committee_position = models.CharField(blank=True,null=True,choices=COMMITTEE_CHOICES,max_length=200)
|
||||
nickname = models.CharField(max_length=100,blank=True,null=True)
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('survexblock'):
|
||||
if res and res[-1]['survexpath'] == personrole.survexblock.survexpath:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'date':personrole.survexblock.date, 'survexpath':personrole.survexblock.survexpath, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
class Meta:
|
||||
ordering = ('-expedition',)
|
||||
#order_with_respect_to = 'expedition'
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.person, self.expedition)
|
||||
|
||||
|
||||
#why is the below a function in personexpedition, rather than in person? - AC 14 Feb 09
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return "%s (%s) %s" % (self.person.first_name, self.nickname, self.person.last_name)
|
||||
if self.person.last_name:
|
||||
return "%s %s" % (self.person.first_name, self.person.last_name)
|
||||
return self.person.first_name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('personexpedition',kwargs={'first_name':self.person.first_name,'last_name':self.person.last_name,'year':self.expedition.year}))
|
||||
|
||||
def surveyedleglength(self):
|
||||
survexblocks = [personrole.survexblock for personrole in self.personrole_set.all() ]
|
||||
return sum([survexblock.totalleglength for survexblock in set(survexblocks)])
|
||||
|
||||
# would prefer to return actual person trips so we could link to first and last ones
|
||||
def day_min(self):
|
||||
res = self.persontrip_set.aggregate(day_min=Min("expeditionday__date"))
|
||||
return res["day_min"]
|
||||
|
||||
def day_max(self):
|
||||
res = self.persontrip_set.all().aggregate(day_max=Max("expeditionday__date"))
|
||||
return res["day_max"]
|
||||
|
||||
#
|
||||
# Single parsed entry from Logbook
|
||||
#
|
||||
class LogbookEntry(TroggleModel):
|
||||
date = models.DateField()#MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)#MJG wants to KILL THIS (redundant information)
|
||||
expedition = models.ForeignKey(Expedition,blank=True,null=True) # yes this is double-
|
||||
#author = models.ForeignKey(PersonExpedition,blank=True,null=True) # the person who writes it up doesn't have to have been on the trip.
|
||||
# Re: the above- so this field should be "typist" or something, not "author". - AC 15 jun 09
|
||||
#MJG wants to KILL THIS, as it is typically redundant with PersonTrip.is_logbook_entry_author, in the rare it was not redundanty and of actually interest it could be added to the text.
|
||||
title = models.CharField(max_length=settings.MAX_LOGBOOK_ENTRY_TITLE_LENGTH)
|
||||
cave_slug = models.SlugField(max_length=50)
|
||||
place = models.CharField(max_length=100,blank=True,null=True,help_text="Only use this if you haven't chosen a cave")
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
filename = models.CharField(max_length=200,null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonTrips point in to this object
|
||||
ordering = ('-date',)
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item == "cave": #Allow a logbookentries cave to be directly accessed despite not having a proper foreignkey
|
||||
return CaveSlug.objects.get(slug = self.cave_slug).cave
|
||||
return super(LogbookEntry, self).__getattribute__(item)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "cave" in kwargs.keys():
|
||||
if kwargs["cave"] is not None:
|
||||
kwargs["cave_slug"] = CaveSlug.objects.get(cave=kwargs["cave"], primary=True).slug
|
||||
kwargs.pop("cave")
|
||||
return super(LogbookEntry, self).__init__(*args, **kwargs)
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('logbookentry',kwargs={'date':self.date,'slug':self.slug}))
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s: (%s)" % (self.date, self.title)
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id-1)
|
||||
|
||||
def new_QM_number(self):
|
||||
"""Returns """
|
||||
if self.cave:
|
||||
nextQMnumber=self.cave.new_QM_number(self.date.year)
|
||||
else:
|
||||
return none
|
||||
return nextQMnumber
|
||||
|
||||
def new_QM_found_link(self):
|
||||
"""Produces a link to a new QM with the next number filled in and this LogbookEntry set as 'found by' """
|
||||
return settings.URL_ROOT + r'/admin/core/qm/add/?' + r'found_by=' + str(self.pk) +'&number=' + str(self.new_QM_number())
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.logbookentry_set.all()).index(self)
|
||||
|
||||
#
|
||||
# Single Person going on a trip, which may or may not be written up (accounts for different T/U for people in same logbook entry)
|
||||
#
|
||||
class PersonTrip(TroggleModel):
|
||||
personexpedition = models.ForeignKey("PersonExpedition",null=True)
|
||||
|
||||
#expeditionday = models.ForeignKey("ExpeditionDay")#MJG wants to KILL THIS (redundant information)
|
||||
#date = models.DateField() #MJG wants to KILL THIS (redundant information)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
|
||||
# sequencing by person (difficult to solve locally)
|
||||
#persontrip_next = models.ForeignKey('PersonTrip', related_name='pnext', blank=True,null=True)#MJG wants to KILL THIS (and use funstion persontrip_next_auto)
|
||||
#persontrip_prev = models.ForeignKey('PersonTrip', related_name='pprev', blank=True,null=True)#MJG wants to KILL THIS(and use funstion persontrip_prev_auto)
|
||||
|
||||
def persontrip_next(self):
|
||||
futurePTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__gt = self.logbook_entry.date).order_by('logbook_entry__date').all()
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def persontrip_prev(self):
|
||||
pastPTs = PersonTrip.objects.filter(personexpedition = self.personexpedition, logbook_entry__date__lt = self.logbook_entry.date).order_by('-logbook_entry__date').all()
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def place(self):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.personexpedition, self.logbook_entry.date)
|
||||
|
||||
|
||||
|
||||
##########################################
|
||||
# move following classes into models_cave
|
||||
##########################################
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
parent = models.ForeignKey('Area', blank=True, null=True)
|
||||
def __unicode__(self):
|
||||
if self.parent:
|
||||
return unicode(self.parent) + u" - " + unicode(self.short_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626"]:
|
||||
return self.short_name
|
||||
elif self.parent:
|
||||
return self.parent.kat_area()
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
entrance_letter = models.CharField(max_length=20,blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.cave) + unicode(self.entrance_letter)
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
cave = models.ForeignKey('Cave')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
official_name = models.CharField(max_length=160)
|
||||
area = models.ManyToManyField(Area, blank=True, null=True)
|
||||
kataster_code = models.CharField(max_length=20,blank=True,null=True)
|
||||
kataster_number = models.CharField(max_length=10,blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60,blank=True, null=True)
|
||||
entrances = models.ManyToManyField('Entrance', through='CaveAndEntrance')
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
equipment = models.TextField(blank=True,null=True)
|
||||
references = models.TextField(blank=True,null=True)
|
||||
survey = models.TextField(blank=True,null=True)
|
||||
kataster_status = models.TextField(blank=True,null=True)
|
||||
underground_centre_line = models.TextField(blank=True,null=True)
|
||||
notes = models.TextField(blank=True,null=True)
|
||||
length = models.CharField(max_length=100,blank=True,null=True)
|
||||
depth = models.CharField(max_length=100,blank=True,null=True)
|
||||
extent = models.CharField(max_length=100,blank=True,null=True)
|
||||
survex_file = models.CharField(max_length=100,blank=True,null=True)
|
||||
description_file = models.CharField(max_length=200,blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
|
||||
|
||||
#class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
|
||||
#href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ('kataster_code', 'unofficial_number')
|
||||
|
||||
def hassurvey(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (self.survey.find("<img") > -1 or self.survey.find("<a") > -1 or self.survey.find("<IMG") > -1 or self.survey.find("<A") > -1):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def hassurveydata(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if self.survex_file:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
slugs = self.caveslug_set.filter()
|
||||
if slugs:
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r'CUCC', self.explorers))
|
||||
|
||||
def reference(self):
|
||||
if self.kataster_number:
|
||||
return "%s-%s" % (self.kat_area(), self.kataster_number)
|
||||
else:
|
||||
return "%s-%s" % (self.kat_area(), self.unofficial_number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
href = self.kataster_number
|
||||
elif self.unofficial_number:
|
||||
href = self.unofficial_number
|
||||
else:
|
||||
href = official_name.lower()
|
||||
#return settings.URL_ROOT + '/cave/' + href + '/'
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cave',kwargs={'cave_id':href,}))
|
||||
|
||||
def __unicode__(self, sep = u": "):
|
||||
return unicode(self.slug())
|
||||
|
||||
def get_QMs(self):
|
||||
return QM.objects.filter(found_by__cave_slug=self.caveslug_set.all())
|
||||
|
||||
def new_QM_number(self, year=datetime.date.today().year):
|
||||
"""Given a cave and the current year, returns the next QM number."""
|
||||
try:
|
||||
res=QM.objects.filter(found_by__date__year=year, found_by__cave=self).order_by('-number')[0]
|
||||
except IndexError:
|
||||
return 1
|
||||
return res.number+1
|
||||
|
||||
def kat_area(self):
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = None
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1 ) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.CAVEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/cave.xml')
|
||||
c = Context({'cave': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
for area in areas:
|
||||
if area.parent in areas:
|
||||
try:
|
||||
lowestareas.remove(area.parent)
|
||||
except:
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
def getCaveByReference(reference):
|
||||
areaname, code = reference.split("-", 1)
|
||||
print(areaname, code)
|
||||
area = Area.objects.get(short_name = areaname)
|
||||
print(area)
|
||||
foundCaves = list(Cave.objects.filter(area = area, kataster_number = code).all()) + list(Cave.objects.filter(area = area, unofficial_number = code).all())
|
||||
print(list(foundCaves))
|
||||
assert len(foundCaves) == 1
|
||||
return foundCaves[0]
|
||||
|
||||
class OtherCaveName(TroggleModel):
|
||||
name = models.CharField(max_length=160)
|
||||
cave = models.ForeignKey(Cave)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class EntranceSlug(models.Model):
|
||||
entrance = models.ForeignKey('Entrance')
|
||||
slug = models.SlugField(max_length=50, unique = True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
name = models.CharField(max_length=100, blank=True,null=True)
|
||||
entrance_description = models.TextField(blank=True,null=True)
|
||||
explorers = models.TextField(blank=True,null=True)
|
||||
map_description = models.TextField(blank=True,null=True)
|
||||
location_description = models.TextField(blank=True,null=True)
|
||||
approach = models.TextField(blank=True,null=True)
|
||||
underground_description = models.TextField(blank=True,null=True)
|
||||
photo = models.TextField(blank=True,null=True)
|
||||
MARKING_CHOICES = (
|
||||
('P', 'Paint'),
|
||||
('P?', 'Paint (?)'),
|
||||
('T', 'Tag'),
|
||||
('T?', 'Tag (?)'),
|
||||
('R', 'Needs Retag'),
|
||||
('S', 'Spit'),
|
||||
('S?', 'Spit (?)'),
|
||||
('U', 'Unmarked'),
|
||||
('?', 'Unknown'))
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True,null=True)
|
||||
FINDABLE_CHOICES = (
|
||||
('?', 'To be confirmed ...'),
|
||||
('S', 'Coordinates'),
|
||||
('L', 'Lost'),
|
||||
('R', 'Refindable'))
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True,null=True)
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True,null=True)
|
||||
bearings = models.TextField(blank=True,null=True)
|
||||
url = models.CharField(max_length=200,blank=True,null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
cached_primary_slug = models.CharField(max_length=200,blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.slug())
|
||||
|
||||
def exact_location(self):
|
||||
return SurvexStation.objects.lookup(self.exact_station)
|
||||
def other_location(self):
|
||||
return SurvexStation.objects.lookup(self.other_station)
|
||||
|
||||
|
||||
def find_location(self):
|
||||
r = {'': 'To be entered ',
|
||||
'?': 'To be confirmed:',
|
||||
'S': '',
|
||||
'L': 'Lost:',
|
||||
'R': 'Refindable:'}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Tag Station not in dataset" % self.tag_station
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt" % (s.x, s.y, s.z)
|
||||
except:
|
||||
return r + "%s Exact Station not in dataset" % self.tag_station
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
return r + "%0.0fE %0.0fN %0.0fAlt %s" % (s.x, s.y, s.z, self.other_description)
|
||||
except:
|
||||
return r + "%s Other Station not in dataset" % self.tag_station
|
||||
if self.FINDABLE_CHOICES == "S":
|
||||
r += "ERROR, Entrance has been surveyed but has no survex point"
|
||||
if self.bearings:
|
||||
return r + self.bearings
|
||||
return r
|
||||
|
||||
def best_station(self):
|
||||
if self.tag_station:
|
||||
return self.tag_station
|
||||
if self.exact_station:
|
||||
return self.exact_station
|
||||
if self.other_station:
|
||||
return self.other_station
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (self.photo.find("<img") > -1 or self.photo.find("<a") > -1 or self.photo.find("<IMG") > -1 or self.photo.find("<A") > -1):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
else:
|
||||
return "No"
|
||||
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
def tag(self):
|
||||
return SurvexStation.objects.lookup(self.tag_station)
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
|
||||
ancestor_titles='/'.join([subcave.title for subcave in self.get_ancestors()])
|
||||
if ancestor_titles:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), ancestor_titles, self.title))
|
||||
|
||||
else:
|
||||
res = '/'.join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
|
||||
return res
|
||||
|
||||
def slug(self):
|
||||
if not self.cached_primary_slug:
|
||||
primarySlugs = self.entranceslug_set.filter(primary = True)
|
||||
if primarySlugs:
|
||||
self.cached_primary_slug = primarySlugs[0].slug
|
||||
self.save()
|
||||
else:
|
||||
slugs = self.entranceslug_set.filter()
|
||||
if slugs:
|
||||
self.cached_primary_slug = slugs[0].slug
|
||||
self.save()
|
||||
return self.cached_primary_slug
|
||||
|
||||
def writeDataFile(self):
|
||||
try:
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
except:
|
||||
subprocess.call(settings.FIX_PERMISSIONS)
|
||||
f = open(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename), "w")
|
||||
t = loader.get_template('dataformat/entrance.xml')
|
||||
c = Context({'entrance': self})
|
||||
u = t.render(c)
|
||||
u8 = u.encode("utf-8")
|
||||
f.write(u8)
|
||||
f.close()
|
||||
|
||||
class CaveDescription(TroggleModel):
|
||||
short_name = models.CharField(max_length=50, unique = True)
|
||||
long_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True,null=True)
|
||||
linked_subcaves = models.ManyToManyField("NewSubCave", blank=True,null=True)
|
||||
linked_entrances = models.ManyToManyField("Entrance", blank=True,null=True)
|
||||
linked_qms = models.ManyToManyField("QM", blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
if self.long_name:
|
||||
return unicode(self.long_name)
|
||||
else:
|
||||
return unicode(self.short_name)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('cavedescription', args=(self.short_name,)))
|
||||
|
||||
def save(self):
|
||||
"""
|
||||
Overridden save method which stores wikilinks in text as links in database.
|
||||
"""
|
||||
super(CaveDescription, self).save()
|
||||
qm_list=get_related_by_wikilinks(self.description)
|
||||
for qm in qm_list:
|
||||
self.linked_qms.add(qm)
|
||||
super(CaveDescription, self).save()
|
||||
|
||||
class NewSubCave(TroggleModel):
|
||||
name = models.CharField(max_length=200, unique = True)
|
||||
def __unicode__(self):
|
||||
return unicode(self.name)
|
||||
|
||||
class QM(TroggleModel):
|
||||
#based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
#"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
found_by = models.ForeignKey(LogbookEntry, related_name='QMs_found',blank=True, null=True )
|
||||
ticked_off_by = models.ForeignKey(LogbookEntry, related_name='QMs_ticked_off',null=True,blank=True)
|
||||
#cave = models.ForeignKey(Cave)
|
||||
#expedition = models.ForeignKey(Expedition)
|
||||
|
||||
number = models.IntegerField(help_text="this is the sequential number in the year", )
|
||||
GRADE_CHOICES=(
|
||||
('A', 'A: Large obvious lead'),
|
||||
('B', 'B: Average lead'),
|
||||
('C', 'C: Tight unpromising lead'),
|
||||
('D', 'D: Dig'),
|
||||
('X', 'X: Unclimbable aven')
|
||||
)
|
||||
grade = models.CharField(max_length=1, choices=GRADE_CHOICES)
|
||||
location_description = models.TextField(blank=True)
|
||||
#should be a foreignkey to surveystation
|
||||
nearest_station_description = models.CharField(max_length=400,null=True,blank=True)
|
||||
nearest_station = models.CharField(max_length=200,blank=True,null=True)
|
||||
area = models.CharField(max_length=100,blank=True,null=True)
|
||||
completion_description = models.TextField(blank=True,null=True)
|
||||
comment=models.TextField(blank=True,null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s" % (self.code(), self.grade)
|
||||
|
||||
def code(self):
|
||||
return u"%s-%s-%s" % (unicode(self.found_by.cave)[6:], self.found_by.date.year, self.number)
|
||||
|
||||
def get_absolute_url(self):
|
||||
#return settings.URL_ROOT + '/cave/' + self.found_by.cave.kataster_number + '/' + str(self.found_by.date.year) + '-' + '%02d' %self.number
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('qm',kwargs={'cave_id':self.found_by.cave.kataster_number,'year':self.found_by.date.year,'qm_id':self.number,'grade':self.grade}))
|
||||
|
||||
def get_next_by_id(self):
|
||||
return QM.objects.get(id=self.id+1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
return QM.objects.get(id=self.id-1)
|
||||
|
||||
def wiki_link(self):
|
||||
return u"%s%s%s" % ('[[QM:',self.code(),']]')
|
||||
|
||||
photoFileStorage = FileSystemStorage(location=settings.PHOTOS_ROOT, base_url=settings.PHOTOS_URL)
|
||||
class DPhoto(TroggleImageModel):
|
||||
caption = models.CharField(max_length=1000,blank=True,null=True)
|
||||
contains_logbookentry = models.ForeignKey(LogbookEntry,blank=True,null=True)
|
||||
contains_person = models.ManyToManyField(Person,blank=True,null=True)
|
||||
file = models.ImageField(storage=photoFileStorage, upload_to='.',)
|
||||
is_mugshot = models.BooleanField(default=False)
|
||||
contains_cave = models.ForeignKey(Cave,blank=True,null=True)
|
||||
contains_entrance = models.ForeignKey(Entrance, related_name="photo_file",blank=True,null=True)
|
||||
#nearest_survey_point = models.ForeignKey(SurveyStation,blank=True,null=True)
|
||||
nearest_QM = models.ForeignKey(QM,blank=True,null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
def __unicode__(self):
|
||||
return self.caption
|
||||
|
||||
scansFileStorage = FileSystemStorage(location=settings.SURVEY_SCANS, base_url=settings.SURVEYS_URL)
|
||||
def get_scan_path(instance, filename):
|
||||
year=instance.survey.expedition.year
|
||||
#print("WN: ", type(instance.survey.wallet_number), instance.survey.wallet_number, instance.survey.wallet_letter)
|
||||
number=str(instance.survey.wallet_number)
|
||||
if str(instance.survey.wallet_letter) != "None":
|
||||
number=str(instance.survey.wallet_letter) + number #two strings formatting because convention is 2009#01 or 2009#X01
|
||||
return os.path.join('./',year,year+r'#'+number,str(instance.contents)+str(instance.number_in_wallet)+r'.jpg')
|
||||
|
||||
class ScannedImage(TroggleImageModel):
|
||||
file = models.ImageField(storage=scansFileStorage, upload_to=get_scan_path)
|
||||
scanned_by = models.ForeignKey(Person,blank=True, null=True)
|
||||
scanned_on = models.DateField(null=True)
|
||||
survey = models.ForeignKey('Survey')
|
||||
contents = models.CharField(max_length=20,choices=(('notes','notes'),('plan','plan_sketch'),('elevation','elevation_sketch')))
|
||||
number_in_wallet = models.IntegerField(null=True)
|
||||
lon_utm = models.FloatField(blank=True,null=True)
|
||||
lat_utm = models.FloatField(blank=True,null=True)
|
||||
|
||||
class IKOptions:
|
||||
spec_module = 'core.imagekit_specs'
|
||||
cache_dir = 'thumbs'
|
||||
image_field = 'file'
|
||||
#content_type = models.ForeignKey(ContentType)
|
||||
#object_id = models.PositiveIntegerField()
|
||||
#location = generic.GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
#This is an ugly hack to deal with the #s in our survey scan paths. The correct thing is to write a custom file storage backend which calls urlencode on the name for making file.url but not file.path.
|
||||
def correctURL(self):
|
||||
return string.replace(self.file.url,r'#',r'%23')
|
||||
|
||||
def __unicode__(self):
|
||||
return get_scan_path(self,'')
|
||||
|
||||
class Survey(TroggleModel):
|
||||
expedition = models.ForeignKey('Expedition') #REDUNDANT (logbook_entry)
|
||||
wallet_number = models.IntegerField(blank=True,null=True)
|
||||
wallet_letter = models.CharField(max_length=1,blank=True,null=True)
|
||||
comments = models.TextField(blank=True,null=True)
|
||||
location = models.CharField(max_length=400,blank=True,null=True) #REDUNDANT
|
||||
subcave = models.ForeignKey('NewSubCave', blank=True, null=True)
|
||||
#notes_scan = models.ForeignKey('ScannedImage',related_name='notes_scan',blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
survex_block = models.OneToOneField('SurvexBlock',blank=True, null=True)
|
||||
logbook_entry = models.ForeignKey('LogbookEntry')
|
||||
centreline_printed_on = models.DateField(blank=True, null=True)
|
||||
centreline_printed_by = models.ForeignKey('Person',related_name='centreline_printed_by',blank=True,null=True)
|
||||
#sketch_scan = models.ForeignKey(ScannedImage,blank=True, null=True) #Replaced by contents field of ScannedImage model
|
||||
tunnel_file = models.FileField(upload_to='surveyXMLfiles',blank=True, null=True)
|
||||
tunnel_main_sketch = models.ForeignKey('Survey',blank=True,null=True)
|
||||
integrated_into_main_sketch_on = models.DateField(blank=True,null=True)
|
||||
integrated_into_main_sketch_by = models.ForeignKey('Person' ,related_name='integrated_into_main_sketch_by', blank=True,null=True)
|
||||
rendered_image = models.ImageField(upload_to='renderedSurveys',blank=True,null=True)
|
||||
def __unicode__(self):
|
||||
return self.expedition.year+"#"+"%02d" % int(self.wallet_number)
|
||||
|
||||
def notes(self):
|
||||
return self.scannedimage_set.filter(contents='notes')
|
||||
|
||||
def plans(self):
|
||||
return self.scannedimage_set.filter(contents='plan')
|
||||
|
||||
def elevations(self):
|
||||
return self.scannedimage_set.filter(contents='elevation')
|
||||
0
core/models/__init__.py
Normal file
0
core/models/__init__.py
Normal file
693
core/models/caves.py
Normal file
693
core/models/caves.py
Normal file
@@ -0,0 +1,693 @@
|
||||
import os
|
||||
import os
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
from django.db import models
|
||||
from django.template import loader
|
||||
|
||||
import settings
|
||||
from troggle.core.models.logbooks import QM
|
||||
from troggle.core.models.survex import SurvexStation
|
||||
from troggle.core.models.troggle import DataIssue, TroggleModel
|
||||
from troggle.core.utils import TROG, writetrogglefile
|
||||
|
||||
# Use the TROG global object to cache the cave lookup list. No good for multi-user..
|
||||
Gcavelookup = TROG["caves"]["gcavelookup"]
|
||||
Gcave_count = TROG["caves"]["gcavecount"]
|
||||
|
||||
Gcavelookup = None
|
||||
Gcave_count = None
|
||||
|
||||
"""The model declarations for Areas, Caves and Entrances
|
||||
"""
|
||||
|
||||
todo = """
|
||||
- Find out why we have separate objects CaveSlug and why
|
||||
these are not just a single field on the Model. Do we ever need more
|
||||
than one slug per cave or entrance? Surely that would break everything??
|
||||
|
||||
- Can we rewrite things to eliminate the CaveSlug and objects? Surely
|
||||
foreign keys work fine ?!
|
||||
|
||||
- Why do we have CaveAndEntrance objects ? Surely entranceletter belong son the Entrance object?
|
||||
|
||||
- move the aliases list from the code and put into an editable file
|
||||
|
||||
- Restore constraint: unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
"""
|
||||
|
||||
|
||||
class Area(TroggleModel):
|
||||
short_name = models.CharField(max_length=100)
|
||||
name = models.CharField(max_length=200, blank=True, null=True)
|
||||
description = models.TextField(blank=True, null=True)
|
||||
super = models.ForeignKey("Area", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
def __str__(self):
|
||||
if self.super:
|
||||
return str(self.super) + " - " + str(self.short_name)
|
||||
else:
|
||||
return str(self.short_name)
|
||||
|
||||
def kat_area(self):
|
||||
if self.short_name in ["1623", "1626", "1624", "1627"]:
|
||||
return self.short_name
|
||||
elif self.super:
|
||||
return self.super.kat_area()
|
||||
|
||||
|
||||
class CaveAndEntrance(models.Model):
|
||||
"""This class is ONLY used to create a FormSet for editing the cave and all its
|
||||
entrances in one form.
|
||||
CASCADE means that if the cave or the entrance is deleted, then this CaveAndEntrance
|
||||
is deleted too
|
||||
"""
|
||||
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
|
||||
entrance = models.ForeignKey("Entrance", on_delete=models.CASCADE)
|
||||
entrance_letter = models.CharField(max_length=20, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
unique_together = [["cave", "entrance"], ["cave", "entrance_letter"]]
|
||||
ordering = ["entrance_letter"]
|
||||
|
||||
def __str__(self):
|
||||
return str(self.cave) + str(self.entrance_letter)
|
||||
|
||||
# class CaveSlug(models.Model):
|
||||
# moved to models/logbooks.py to avoid cyclic import problem
|
||||
|
||||
class Cave(TroggleModel):
|
||||
# too much here perhaps,
|
||||
area = models.ManyToManyField(Area, blank=False)
|
||||
depth = models.CharField(max_length=100, blank=True, null=True)
|
||||
description_file = models.CharField(max_length=200, blank=True, null=True)
|
||||
entrances = models.ManyToManyField("Entrance", through="CaveAndEntrance")
|
||||
equipment = models.TextField(blank=True, null=True)
|
||||
explorers = models.TextField(blank=True, null=True)
|
||||
extent = models.CharField(max_length=100, blank=True, null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
kataster_code = models.CharField(max_length=20, blank=True, null=True)
|
||||
kataster_number = models.CharField(max_length=10, blank=True, null=True)
|
||||
kataster_status = models.TextField(blank=True, null=True)
|
||||
length = models.CharField(max_length=100, blank=True, null=True)
|
||||
notes = models.TextField(blank=True, null=True)
|
||||
official_name = models.CharField(max_length=160)
|
||||
references = models.TextField(blank=True, null=True)
|
||||
survex_file = models.CharField(max_length=100, blank=True, null=True) # should be a foreign key
|
||||
survey = models.TextField(blank=True, null=True)
|
||||
underground_centre_line = models.TextField(blank=True, null=True)
|
||||
underground_description = models.TextField(blank=True, null=True)
|
||||
unofficial_number = models.CharField(max_length=60, blank=True, null=True)
|
||||
url = models.CharField(max_length=300, blank=True, null=True, unique = True)
|
||||
|
||||
# class Meta:
|
||||
# unique_together = (("area", "kataster_number"), ("area", "unofficial_number"))
|
||||
# FIXME Kataster Areas and CUCC defined sub areas need seperating
|
||||
|
||||
# href = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
ordering = ("kataster_code", "unofficial_number")
|
||||
|
||||
def hassurvey(self):
|
||||
"""This is almost certainly a fossil - needs checking...
|
||||
"""
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if (
|
||||
self.survey.find("<img") > -1
|
||||
or self.survey.find("<a") > -1
|
||||
or self.survey.find("<IMG") > -1
|
||||
or self.survey.find("<A") > -1
|
||||
):
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def hassurveydata(self):
|
||||
if not self.underground_centre_line:
|
||||
return "No"
|
||||
if self.survex_filcavee:
|
||||
return "Yes"
|
||||
return "Missing"
|
||||
|
||||
def slug(self):
|
||||
primarySlugs = self.caveslug_set.filter(primary=True)
|
||||
if primarySlugs:
|
||||
return primarySlugs[0].slug
|
||||
else:
|
||||
slugs = self.caveslug_set.filter()
|
||||
if slugs:
|
||||
return slugs[0].slug
|
||||
|
||||
def ours(self):
|
||||
return bool(re.search(r"CUCC", self.explorers))
|
||||
|
||||
def number(self):
|
||||
if self.kataster_number:
|
||||
return self.kataster_number
|
||||
else:
|
||||
return self.unofficial_number
|
||||
|
||||
def reference(self):
|
||||
return f"{self.kat_area()}-{self.number()}"
|
||||
|
||||
def get_absolute_url(self):
|
||||
if self.kataster_number:
|
||||
pass
|
||||
elif self.unofficial_number:
|
||||
pass
|
||||
else:
|
||||
self.official_name.lower()
|
||||
return Path(settings.URL_ROOT) / self.url # not good Django style.. NEEDS actual URL
|
||||
|
||||
def url_parent(self):
|
||||
return self.url.rsplit("/", 1)[0]
|
||||
|
||||
def __str__(self, sep=": "):
|
||||
return str(self.slug())
|
||||
|
||||
def get_open_QMs(self):
|
||||
"""Searches for all QMs that reference this cave."""
|
||||
# qms = self.qm_set.all().order_by('expoyear', 'block__date')
|
||||
qms = QM.objects.filter(cave=self).order_by(
|
||||
"expoyear", "block__date"
|
||||
) # a QuerySet, see https://docs.djangoproject.com/en/dev/ref/models/querysets/#order-by
|
||||
qmsopen = qms.filter(ticked=False)
|
||||
return qmsopen # a QuerySet
|
||||
|
||||
def get_ticked_QMs(self):
|
||||
"""Searches for all QMs that reference this cave."""
|
||||
qms = QM.objects.filter(cave=self).order_by(
|
||||
"expoyear", "block__date"
|
||||
)
|
||||
qmticked = qms.filter(ticked=True)
|
||||
return qmticked # a QuerySet
|
||||
|
||||
def get_QMs(self):
|
||||
qms = self.get_open_QMs() | self.get_ticked_QMs() # set union operation
|
||||
return qms # a QuerySet
|
||||
|
||||
def kat_area(self):
|
||||
try:
|
||||
for a in self.area.all():
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
except:
|
||||
return ""
|
||||
|
||||
def entrances(self):
|
||||
return CaveAndEntrance.objects.filter(cave=self)
|
||||
|
||||
def singleentrance(self):
|
||||
return len(CaveAndEntrance.objects.filter(cave=self)) == 1
|
||||
|
||||
def entrancelist(self):
|
||||
rs = []
|
||||
res = ""
|
||||
for e in CaveAndEntrance.objects.filter(cave=self):
|
||||
if e.entrance_letter:
|
||||
rs.append(e.entrance_letter)
|
||||
rs.sort()
|
||||
prevR = ""
|
||||
n = 0
|
||||
for r in rs:
|
||||
if prevR:
|
||||
if chr(ord(prevR) + 1) == r:
|
||||
prevR = r
|
||||
n += 1
|
||||
else:
|
||||
if n == 0:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
else:
|
||||
prevR = r
|
||||
n = 0
|
||||
res += r
|
||||
if n == 0:
|
||||
if res:
|
||||
res += ", " + prevR
|
||||
else:
|
||||
res += "–" + prevR
|
||||
return res
|
||||
|
||||
def writeDataFile(self):
|
||||
filepath = os.path.join(settings.CAVEDESCRIPTIONS, self.filename)
|
||||
|
||||
t = loader.get_template("dataformat/cave.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
print(now)
|
||||
c = dict({"cave": self, "date": now})
|
||||
u = t.render(c)
|
||||
writetrogglefile(filepath, u)
|
||||
return
|
||||
|
||||
def file_output(self):
|
||||
filepath = Path(os.path.join(settings.CAVEDESCRIPTIONS, self.filename))
|
||||
|
||||
t = loader.get_template("dataformat/cave.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
c = dict({"cave": self, "date": now})
|
||||
content = t.render(c)
|
||||
return (filepath, content, "utf8")
|
||||
|
||||
def getArea(self):
|
||||
areas = self.area.all()
|
||||
lowestareas = list(areas)
|
||||
for area in areas:
|
||||
if area.super in areas:
|
||||
try:
|
||||
lowestareas.remove(area.super)
|
||||
except:
|
||||
pass
|
||||
return lowestareas[0]
|
||||
|
||||
class Entrance(TroggleModel):
|
||||
MARKING_CHOICES = (
|
||||
("P", "Paint"),
|
||||
("P?", "Paint (?)"),
|
||||
("T", "Tag"),
|
||||
("T?", "Tag (?)"),
|
||||
("R", "Needs Retag"),
|
||||
("S", "Spit"),
|
||||
("S?", "Spit (?)"),
|
||||
("U", "Unmarked"),
|
||||
("?", "Unknown"),
|
||||
)
|
||||
FINDABLE_CHOICES = (("?", "To be confirmed ..."), ("S", "Coordinates"), ("L", "Lost"), ("R", "Refindable"))
|
||||
alt = models.TextField(blank=True, null=True)
|
||||
approach = models.TextField(blank=True, null=True)
|
||||
bearings = models.TextField(blank=True, null=True)
|
||||
easting = models.TextField(blank=True, null=True)
|
||||
entrance_description = models.TextField(blank=True, null=True)
|
||||
exact_station = models.TextField(blank=True, null=True)
|
||||
explorers = models.TextField(blank=True, null=True)
|
||||
filename = models.CharField(max_length=200)
|
||||
findability = models.CharField(max_length=1, choices=FINDABLE_CHOICES, blank=True, null=True)
|
||||
findability_description = models.TextField(blank=True, null=True)
|
||||
lastvisit = models.TextField(blank=True, null=True)
|
||||
lat_wgs84 = models.TextField(blank=True, null=True)
|
||||
location_description = models.TextField(blank=True, null=True)
|
||||
long_wgs84 = models.TextField(blank=True, null=True)
|
||||
map_description = models.TextField(blank=True, null=True)
|
||||
marking = models.CharField(max_length=2, choices=MARKING_CHOICES)
|
||||
marking_comment = models.TextField(blank=True, null=True)
|
||||
name = models.CharField(max_length=100, blank=True, null=True)
|
||||
northing = models.TextField(blank=True, null=True)
|
||||
other_description = models.TextField(blank=True, null=True)
|
||||
other_station = models.TextField(blank=True, null=True)
|
||||
photo = models.TextField(blank=True, null=True)
|
||||
slug = models.SlugField(max_length=50, unique=True, default="default_slug_id")
|
||||
tag_station = models.TextField(blank=True, null=True)
|
||||
underground_description = models.TextField(blank=True, null=True)
|
||||
url = models.CharField(max_length=300, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["caveandentrance__entrance_letter"]
|
||||
|
||||
def __str__(self):
|
||||
return str(self.slug)
|
||||
|
||||
def single(self, station):
|
||||
try:
|
||||
single = SurvexStation.objects.get(name = station)
|
||||
return single
|
||||
except:
|
||||
stations = SurvexStation.objects.filter(name = station)
|
||||
print(f" # MULTIPLE stations found with same name '{station}' in Entrance {self}:")
|
||||
if len(stations) > 1:
|
||||
for s in stations:
|
||||
print(f" # {s.id=} - {s.name} {s.latlong()}") # .id is Django internal field, not one of ours
|
||||
return stations[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def exact_location(self):
|
||||
return self.single(self.exact_station)
|
||||
|
||||
def other_location(self):
|
||||
return self.single(self.other_station)
|
||||
|
||||
def find_location(self):
|
||||
r = {"": "To be entered ", "?": "To be confirmed:", "S": "", "L": "Lost:", "R": "Refindable:"}[self.findability]
|
||||
if self.tag_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.tag_station)
|
||||
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
|
||||
except:
|
||||
return r + f"{self.tag_station} Tag Station not in dataset"
|
||||
if self.exact_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.exact_station)
|
||||
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt"
|
||||
except:
|
||||
return r + f"{self.tag_station} Exact Station not in dataset"
|
||||
if self.other_station:
|
||||
try:
|
||||
s = SurvexStation.objects.lookup(self.other_station)
|
||||
return r + f"{s.x:0.0f}E {s.y:0.0f}N {s.z:0.0f}Alt {self.other_description}"
|
||||
except:
|
||||
return r + f"{self.tag_station} Other Station not in dataset"
|
||||
if self.FINDABLE_CHOICES == "S":
|
||||
r += "ERROR, Entrance has been surveyed but has no survex point"
|
||||
if self.bearings:
|
||||
return r + self.bearings
|
||||
return r
|
||||
|
||||
def best_station(self):
|
||||
if self.tag_station:
|
||||
return self.tag_station
|
||||
if self.exact_station:
|
||||
return self.exact_station
|
||||
if self.other_station:
|
||||
return self.other_station
|
||||
|
||||
def has_photo(self):
|
||||
if self.photo:
|
||||
if (
|
||||
self.photo.find("<img") > -1
|
||||
or self.photo.find("<a") > -1
|
||||
or self.photo.find("<IMG") > -1
|
||||
or self.photo.find("<A") > -1
|
||||
):
|
||||
return "Yes"
|
||||
else:
|
||||
return "Missing"
|
||||
else:
|
||||
return "No"
|
||||
|
||||
def marking_val(self):
|
||||
for m in self.MARKING_CHOICES:
|
||||
if m[0] == self.marking:
|
||||
return m[1]
|
||||
|
||||
def findability_val(self):
|
||||
for f in self.FINDABLE_CHOICES:
|
||||
if f[0] == self.findability:
|
||||
return f[1]
|
||||
|
||||
def tag(self):
|
||||
return self.single(self.tag_station)
|
||||
|
||||
def needs_surface_work(self):
|
||||
return self.findability != "S" or not self.has_photo or self.marking != "T"
|
||||
|
||||
def get_absolute_url(self):
|
||||
res = "/".join((self.get_root().cave.get_absolute_url(), self.title))
|
||||
return res
|
||||
|
||||
def cavelist(self):
|
||||
rs = []
|
||||
for e in CaveAndEntrance.objects.filter(entrance=self):
|
||||
if e.cave:
|
||||
rs.append(e.cave)
|
||||
return rs
|
||||
|
||||
def get_file_path(self):
|
||||
return Path(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
||||
|
||||
def file_output(self):
|
||||
filepath = Path(os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename))
|
||||
|
||||
t = loader.get_template("dataformat/entrance.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
c = dict({"entrance": self, "date": now})
|
||||
content = t.render(c)
|
||||
return (filepath, content, "utf8")
|
||||
|
||||
def writeDataFile(self):
|
||||
filepath = os.path.join(settings.ENTRANCEDESCRIPTIONS, self.filename)
|
||||
|
||||
t = loader.get_template("dataformat/entrance.xml")
|
||||
now = datetime.now(timezone.utc)
|
||||
c = dict({"entrance": self, "date": now})
|
||||
u = t.render(c)
|
||||
writetrogglefile(filepath, u)
|
||||
return
|
||||
|
||||
def url_parent(self):
|
||||
if self.url:
|
||||
return self.url.rsplit("/", 1)[0]
|
||||
else:
|
||||
cavelist = self.cavelist()
|
||||
if len(self.cavelist()) == 1:
|
||||
return cavelist[0].url_parent()
|
||||
else:
|
||||
return ""
|
||||
|
||||
def latlong(self):
|
||||
station = None
|
||||
if self.other_station:
|
||||
try:
|
||||
station = SurvexStation.objects.get(name = self.other_station)
|
||||
except:
|
||||
pass
|
||||
if self.tag_station:
|
||||
try:
|
||||
station = SurvexStation.objects.get(name = self.tag_station)
|
||||
except:
|
||||
pass
|
||||
if self.exact_station:
|
||||
try:
|
||||
station = SurvexStation.objects.get(name = self.exact_station)
|
||||
except:
|
||||
pass
|
||||
if station:
|
||||
return station.latlong()
|
||||
|
||||
|
||||
def GetCaveLookup():
|
||||
"""A very relaxed way of finding probably the right cave given almost any string which might serve to identify it
|
||||
|
||||
lookup function modelled on GetPersonExpeditionNameLookup
|
||||
repeated assignment each call, needs refactoring
|
||||
|
||||
Used when parsing wallets contents.json file too in views/uploads.py
|
||||
|
||||
Does NOT detect duplicates! Needs fixing.
|
||||
Needs to be a proper funciton that raises an exception if there is a duplicate.
|
||||
OR we could set it to return None if there are duplicates, and require the caller to
|
||||
fall back on doing the actual database query it wants rather thna using this cache shortcut
|
||||
"""
|
||||
|
||||
duplicates = {}
|
||||
|
||||
def checkcaveid(cave, id):
|
||||
global Gcavelookup
|
||||
if id not in Gcavelookup:
|
||||
Gcavelookup[id] = cave
|
||||
Gcave_count[id] += 1
|
||||
else:
|
||||
if cave == Gcavelookup[id]:
|
||||
pass # same id, same cave
|
||||
else: # same id but different cave
|
||||
# message = f" - Warning: ignoring alias id '{id:3}'. Caves '{Gcavelookup[id]}' and '{cave}'. "
|
||||
# print(message)
|
||||
# DataIssue.objects.create(parser="aliases", message=message)
|
||||
duplicates[id] = 1
|
||||
|
||||
global Gcavelookup
|
||||
if Gcavelookup:
|
||||
return Gcavelookup
|
||||
Gcavelookup = {"NONEPLACEHOLDER": None}
|
||||
global Gcave_count
|
||||
Gcave_count = defaultdict(int) # sets default value to int(0)
|
||||
|
||||
DataIssue.objects.filter(parser="aliases").delete()
|
||||
DataIssue.objects.filter(parser="aliases ok").delete()
|
||||
|
||||
for cave in Cave.objects.all():
|
||||
key = cave.official_name.lower()
|
||||
if key != "" and key != "unamed" and key != "unnamed":
|
||||
if Gcave_count[key] > 0:
|
||||
# message = f" - Warning: ignoring alias id '{id:3}'. Caves '{Gcavelookup[id]}' and '{cave}'. "
|
||||
# print(message)
|
||||
# DataIssue.objects.create(parser="aliases", message=message)
|
||||
duplicates[key] = 1
|
||||
else:
|
||||
Gcavelookup[key] = cave
|
||||
Gcave_count[key] += 1
|
||||
if cave.kataster_number:
|
||||
checkcaveid(cave, cave.kataster_number) # we do expect 1623/55 and 1626/55 to cause a warning message
|
||||
|
||||
# the rest of these are 'nice to have' but may validly already be set
|
||||
if cave.unofficial_number:
|
||||
unoffn = cave.unofficial_number.lower()
|
||||
checkcaveid(cave, unoffn)
|
||||
|
||||
if cave.filename:
|
||||
# this is the slug - usually.. but usually done as as f'{cave.area}-{cave.kataster_number}'
|
||||
fn = cave.filename.replace(".html", "").lower()
|
||||
checkcaveid(cave, fn)
|
||||
|
||||
if cave.slug():
|
||||
# also possibly done already
|
||||
slug = cave.slug().lower()
|
||||
checkcaveid(cave, slug)
|
||||
|
||||
# These might alse create more duplicate entries
|
||||
# Yes, this should be set in, and imported from, settings.py
|
||||
aliases = [
|
||||
("1987-02", "267"),
|
||||
("1990-01", "171"),
|
||||
("1990-02", "172"),
|
||||
("1990-03", "173"),
|
||||
("1990-04", "174"),
|
||||
("1990-05", "175"),
|
||||
("1990-06", "176"),
|
||||
("1990-07", "177"),
|
||||
("1990-08", "178"),
|
||||
("1990-09", "179"),
|
||||
("1990-10", "180"),
|
||||
("1990-11", "181"),
|
||||
("1990-12", "182"),
|
||||
("1990-13", "183"),
|
||||
("1990-14", "184"),
|
||||
("1990-18", "188"),
|
||||
("1990-adam", "225"),
|
||||
("1993-01", "200"),
|
||||
("1996-02", "224"),
|
||||
("1996-03", "223"),
|
||||
("1996-04", "222"),
|
||||
("1996wk2", "207"),
|
||||
("1996wk3", "208"),
|
||||
("1996wk5", "219"),
|
||||
("1996wk6", "218"),
|
||||
("1996wk8", "209"),
|
||||
("1996wk11", "268"),
|
||||
("96wk11", "268"),
|
||||
("1998-01", "201"),
|
||||
("1998-03", "210"),
|
||||
("1999-03", "204"),
|
||||
("1999-04", "230"),
|
||||
("1999-10", "162"),
|
||||
("1999-bo-01", "205"),
|
||||
("1999-ob-03", "226"),
|
||||
("1999-ob-04", "227"),
|
||||
("2000-01", "231"),
|
||||
("2000-03", "214"),
|
||||
("2000-04", "220"),
|
||||
("2000-05", "215"),
|
||||
("2000-06", "216"),
|
||||
("2000-07", "217"),
|
||||
("2000-09", "234"),
|
||||
("2000-aa-01", "250"),
|
||||
("2001-04", "239"),
|
||||
("2001-05", "243"),
|
||||
("2002-01", "249"),
|
||||
("2002-02", "234"),
|
||||
("2002-04", "242"),
|
||||
("2002-05", "294"),
|
||||
("2003-01", "256"),
|
||||
("2003-02", "248"),
|
||||
("2003-03", "247"),
|
||||
("2003-04", "241"),
|
||||
("2003-05", "246"),
|
||||
("2003-06", "161"),
|
||||
("2003-08", "240"),
|
||||
("2003-09", "245"),
|
||||
("2003-10", "244"),
|
||||
("2004-01", "269"),
|
||||
("2004-03", "270"),
|
||||
("2004-11", "251"),
|
||||
("2004-12", "161"),
|
||||
("2004-15", "253"),
|
||||
("2004-19", "254"),
|
||||
("2004-20", "255"),
|
||||
("2005-04", "204"),
|
||||
("2005-05", "264"),
|
||||
("2005-07", "257"),
|
||||
("2006-08", "285"),
|
||||
("2006-09", "298"),
|
||||
("2007-71", "271"),
|
||||
("2010-01", "263"),
|
||||
("2010-03", "293"),
|
||||
("2011-01", "292"),
|
||||
("2012-dd-05", "286"),
|
||||
("2012-ns-13", "292"),
|
||||
("2014-neo-01", "273"),
|
||||
("2014-sd-01", "274"),
|
||||
("2014-ms-14", "287"),
|
||||
("2015-mf-06", "288"),
|
||||
("2016-jb-01", "289"),
|
||||
("2017-pw-01", "277"),
|
||||
("2018-dm-07", "359"), # NB this is 1626
|
||||
("2017_cucc_24", "291"), # note _ not -
|
||||
("2017_cucc_23", "295"), # note _ not -
|
||||
("2017_cucc_28", "290"), # note _ not -
|
||||
("bs17", "283"),
|
||||
("1976/b11", "198"),
|
||||
("1976/b8", "197"),
|
||||
("1976/b9", "190"),
|
||||
("b11", "1976/b11"),
|
||||
("b8", "1976/b8"),
|
||||
("b9", "1976/b9"),
|
||||
("2011-01-bs30", "190"),
|
||||
("bs30", "190"),
|
||||
("2011-01", "190"),
|
||||
("quarriesd", "2002-08"),
|
||||
("2002-x11", "2005-08"),
|
||||
("2002-x12", "2005-07"),
|
||||
("2002-x13", "2005-06"),
|
||||
("2002-x14", "2005-05"),
|
||||
("kh", "161"),
|
||||
("161-kh", "161"),
|
||||
("204-steinBH", "204"),
|
||||
("stonebridge", "204"),
|
||||
("hauchhole", "234"),
|
||||
("hauch", "234"),
|
||||
("234-hauch", "234"),
|
||||
("tunnocks", "258"),
|
||||
("balcony", "264"),
|
||||
("balkon", "264"),
|
||||
("fgh", "290"),
|
||||
("gsh", "291"),
|
||||
("homecoming", "2018-dm-07"),
|
||||
("heimkommen", "2018-dm-07"),
|
||||
("Heimkehr", "2018-dm-07"),
|
||||
("99ob02", "1999-ob-02"),
|
||||
]
|
||||
|
||||
for i in aliases:
|
||||
if i[1] in Gcavelookup:
|
||||
if i[0] in Gcavelookup:
|
||||
# already set by a different method, but is it the same cave?
|
||||
if Gcavelookup[i[0]] == Gcavelookup[i[1]]:
|
||||
pass
|
||||
else:
|
||||
Gcave_count[i[0]] += 1
|
||||
Gcavelookup[i[0]] = Gcavelookup[i[1]]
|
||||
else:
|
||||
message = f" * Coding or cave existence mistake, cave for id '{i[1]}' does not exist. Expecting to set alias '{i[0]}' to it"
|
||||
# print(message)
|
||||
DataIssue.objects.create(parser="aliases", message=message)
|
||||
|
||||
addmore = {}
|
||||
for id in Gcavelookup:
|
||||
addmore[id.replace("-", "_")] = Gcavelookup[id]
|
||||
addmore[id.replace("_", "-")] = Gcavelookup[id]
|
||||
addmore[id.upper()] = Gcavelookup[id]
|
||||
Gcavelookup = {**addmore, **Gcavelookup}
|
||||
|
||||
addmore = {}
|
||||
|
||||
ldup = []
|
||||
for d in duplicates:
|
||||
Gcavelookup.pop(d)
|
||||
Gcave_count.pop(d)
|
||||
ldup.append(d)
|
||||
if ldup:
|
||||
message = f" - Ambiguous aliases removed: {ldup}"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="aliases ok", message=message)
|
||||
|
||||
for c in Gcave_count:
|
||||
if Gcave_count[c] > 1:
|
||||
message = f" ** Duplicate cave id count={Gcave_count[c]} id:'{Gcavelookup[c]}' cave __str__:'{c}'"
|
||||
print(message)
|
||||
DataIssue.objects.create(parser="aliases", message=message)
|
||||
|
||||
return Gcavelookup
|
||||
226
core/models/logbooks.py
Normal file
226
core/models/logbooks.py
Normal file
@@ -0,0 +1,226 @@
|
||||
from pathlib import Path
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
|
||||
import settings
|
||||
from troggle.core.models.troggle import Expedition, TroggleModel
|
||||
|
||||
"""The model declarations LogBookEntry, PersonLogEntry, QM
|
||||
"""
|
||||
|
||||
todo = """
|
||||
- Can we rewrite things to eliminate the CaveSlug and objects? No
|
||||
Surely foreign keys work fine ?! No
|
||||
|
||||
Foreign keys do not allow for there being multiple ways to refer to a cave, eg 1623-1999-03 aka 1623-204
|
||||
Having slugs allows for much more loose coupling to caves, which removes alot of the need to reset the database, which interupts work flow.
|
||||
It also means we do not have to be creating temporary cave objects in the database, where we do not have the underlying file in cave_data.
|
||||
|
||||
To Do move Cave Slug back to troggle.core.models
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class CaveSlug(models.Model):
|
||||
"""Moved here to avoid nasty cyclic import error
|
||||
CASCADE means that if the Cave is deleted, this is too
|
||||
"""
|
||||
|
||||
cave = models.ForeignKey("Cave", on_delete=models.CASCADE)
|
||||
slug = models.SlugField(max_length=50, unique=True)
|
||||
primary = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.slug}: {self.cave}"
|
||||
|
||||
|
||||
class LogbookEntry(TroggleModel):
|
||||
"""Single parsed entry from Logbook
|
||||
Gets deleted if the Expedition gets deleted"""
|
||||
|
||||
date = (
|
||||
models.DateField()
|
||||
) # MJG wants to turn this into a datetime such that multiple Logbook entries on the same day can be ordered.ld()
|
||||
expedition = models.ForeignKey(Expedition, blank=True, null=True, on_delete=models.CASCADE) # yes this is double-
|
||||
title = models.CharField(max_length=200)
|
||||
cave_slug = models.SlugField(max_length=50, blank=True, null=True)
|
||||
place = models.CharField(
|
||||
max_length=100, blank=True, null=True, help_text="Only use this if you haven't chosen a cave"
|
||||
)
|
||||
text = models.TextField()
|
||||
slug = models.SlugField(max_length=50)
|
||||
time_underground = models.FloatField(null=True, help_text="In decimal hours")
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "Logbook Entries"
|
||||
# several PersonLogEntrys point in to this object
|
||||
ordering = ("-date",)
|
||||
|
||||
def cave(self): # Why didn't he just make this a foreign key to Cave ?
|
||||
c = CaveSlug.objects.get(slug=self.cave_slug, primary=True).cave
|
||||
return c
|
||||
|
||||
def isLogbookEntry(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse("logbookentry", kwargs={"date": self.date, "slug": self.slug}))
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.date}: {self.title}"
|
||||
|
||||
def get_next_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id + 1)
|
||||
|
||||
def get_previous_by_id(self):
|
||||
LogbookEntry.objects.get(id=self.id - 1)
|
||||
|
||||
def DayIndex(self):
|
||||
"""This is used to set different colours for the different trips on
|
||||
the calendar view of the expedition"""
|
||||
mx = 10
|
||||
todays = list(LogbookEntry.objects.filter(date=self.date))
|
||||
if self in todays:
|
||||
index = todays.index(self)
|
||||
else:
|
||||
print(f"DayIndex: Synchronization error in logbook entries. Restart server or do full reset. {self}")
|
||||
index = 0
|
||||
|
||||
if index not in range(0, mx):
|
||||
print(f"DayIndex: More than {mx-1} LogbookEntry items on one day '{index}' {self}, restarting colour sequence.")
|
||||
index = index % mx
|
||||
return index
|
||||
|
||||
|
||||
class PersonLogEntry(TroggleModel):
|
||||
"""Single Person going on a trip, which may or may not be written up.
|
||||
It could account for different T/U for people in same logbook entry.
|
||||
|
||||
CASCADE means that if the personexpedition or the logbookentry is deleted,
|
||||
then this PersonLogEntry is deleted too
|
||||
"""
|
||||
|
||||
personexpedition = models.ForeignKey("PersonExpedition", null=True, on_delete=models.CASCADE)
|
||||
time_underground = models.FloatField(help_text="In decimal hours")
|
||||
logbook_entry = models.ForeignKey(LogbookEntry, on_delete=models.CASCADE)
|
||||
is_logbook_entry_author = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
ordering = ("-personexpedition",)
|
||||
# order_with_respect_to = 'personexpedition'
|
||||
|
||||
def next_personlog(self):
|
||||
futurePTs = (
|
||||
PersonLogEntry.objects.filter(
|
||||
personexpedition=self.personexpedition, logbook_entry__date__gt=self.logbook_entry.date
|
||||
)
|
||||
.order_by("logbook_entry__date")
|
||||
.all()
|
||||
)
|
||||
if len(futurePTs) > 0:
|
||||
return futurePTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def prev_personlog(self):
|
||||
pastPTs = (
|
||||
PersonLogEntry.objects.filter(
|
||||
personexpedition=self.personexpedition, logbook_entry__date__lt=self.logbook_entry.date
|
||||
)
|
||||
.order_by("-logbook_entry__date")
|
||||
.all()
|
||||
)
|
||||
if len(pastPTs) > 0:
|
||||
return pastPTs[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def place(self):
|
||||
return self.logbook_entry.cave and self.logbook_entry.cave or self.logbook_entry.place
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.personexpedition} ({self.logbook_entry.date})"
|
||||
|
||||
|
||||
class QM(TroggleModel):
|
||||
"""This is based on qm.csv in trunk/expoweb/1623/204 which has the fields:
|
||||
"Number","Grade","Area","Description","Page reference","Nearest station","Completion description","Comment"
|
||||
|
||||
All the stuff handling TICK QMs is INCOMPLETE
|
||||
"""
|
||||
|
||||
number = models.IntegerField(
|
||||
help_text="this is the sequential number in the year, only unique for CSV imports",
|
||||
)
|
||||
grade = models.CharField(max_length=1, blank=True, null=True, help_text="A/B/C/D/X")
|
||||
cave = models.ForeignKey("Cave", related_name="QMs", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
block = models.ForeignKey("SurvexBlock", null=True, on_delete=models.SET_NULL) # only for QMs from survex files
|
||||
blockname = models.TextField(blank=True, null=True) # NB truncated copy of survexblock name with last char added
|
||||
expoyear = models.CharField(max_length=4, blank=True, null=True)
|
||||
ticked = models.BooleanField(default=False)
|
||||
location_description = models.TextField(blank=True, null=True)
|
||||
completion_description = models.TextField(blank=True, null=True)
|
||||
completion_date = models.DateField(blank=True, null=True)
|
||||
nearest_station_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
resolution_station_name = models.CharField(max_length=200, blank=True, null=True)
|
||||
area = models.CharField(max_length=100, blank=True, null=True)
|
||||
page_ref = models.TextField(blank=True, null=True)
|
||||
comment = models.TextField(blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.code()}"
|
||||
|
||||
def code(self):
|
||||
if self.cave:
|
||||
cavestr = str(self.cave.slug())[5:]
|
||||
else:
|
||||
cavestr = ""
|
||||
if self.expoyear:
|
||||
expoyearstr = str(self.expoyear)
|
||||
else:
|
||||
expoyearstr = str(self.cave.slug())[5:9]
|
||||
if self.blockname:
|
||||
blocknamestr = "-" + str(self.blockname)
|
||||
else:
|
||||
blocknamestr = ""
|
||||
return f"{cavestr}-{expoyearstr}-{self.number}{self.grade}{blocknamestr}"
|
||||
|
||||
# def get_completion_url(self):
|
||||
# """assumes html file named is in same folder as cave description file
|
||||
# WRONG - needs rewriting!"""
|
||||
# cd = None
|
||||
# if self.completion_description:
|
||||
# try:
|
||||
# dir = Path(self.cave.url).parent
|
||||
# cd = dir / self.completion_description
|
||||
# except:
|
||||
# cd = None
|
||||
# return cd
|
||||
|
||||
def newslug(self):
|
||||
qmslug = f"{str(self.cave)}-{self.expoyear}-{self.blockname}{self.number}{self.grade}"
|
||||
return qmslug
|
||||
|
||||
def get_absolute_url(self):
|
||||
# This reverse resolution stuff is pure magic. Just change the regex in urls.py and everything changes to suit. Whacky.
|
||||
return urljoin(
|
||||
settings.URL_ROOT,
|
||||
reverse(
|
||||
"qm",
|
||||
kwargs={
|
||||
"cave_id": self.cave.slug(),
|
||||
"year": self.expoyear,
|
||||
"blockname": self.blockname,
|
||||
"qm_id": self.number,
|
||||
"grade": self.grade,
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def get_next_by_id(self): # called in template
|
||||
return QM.objects.get(id=self.id + 1)
|
||||
|
||||
def get_previous_by_id(self): # called in template
|
||||
return QM.objects.get(id=self.id - 1)
|
||||
298
core/models/survex.py
Normal file
298
core/models/survex.py
Normal file
@@ -0,0 +1,298 @@
|
||||
import os
|
||||
import re
|
||||
from urllib.parse import urljoin
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
|
||||
|
||||
# from troggle.core.models.troggle import DataIssue # circular import. Hmm
|
||||
|
||||
|
||||
class SurvexDirectory(models.Model):
|
||||
"""This relates a Cave to the primary SurvexFile which is the 'head' of the survex tree for
|
||||
that cave. Surely this could just be a property of Cave ? No. Several subdirectories
|
||||
all relate to the same Cave
|
||||
"""
|
||||
path = models.CharField(max_length=200)
|
||||
cave = models.ForeignKey("Cave", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
primarysurvexfile = models.ForeignKey(
|
||||
"SurvexFile", related_name="primarysurvexfile", blank=True, null=True, on_delete=models.SET_NULL
|
||||
)
|
||||
# could also include files in directory but not referenced
|
||||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
verbose_name_plural = "Survex directories"
|
||||
|
||||
def contents(self):
|
||||
return "[SvxDir:" + str(self.path) + " | Primary svx:" + str(self.primarysurvexfile.path) + ".svx ]"
|
||||
|
||||
def __str__(self):
|
||||
return "[SvxDir:" + str(self.path)+ "]"
|
||||
|
||||
|
||||
class SurvexFile(models.Model):
|
||||
path = models.CharField(max_length=200)
|
||||
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
cave = models.ForeignKey("Cave", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
|
||||
# Don't change from the default as that breaks troggle webpages and internal referencing!
|
||||
# def __str__(self):
|
||||
# return "[SurvexFile:"+str(self.path) + "-" + str(self.survexdirectory) + "-" + str(self.cave)+"]"
|
||||
|
||||
def exists(self):
|
||||
"""This is only used within the Django templates
|
||||
"""
|
||||
fname = Path(settings.SURVEX_DATA, self.path + ".svx")
|
||||
return fname.is_file()
|
||||
|
||||
def SetDirectory(self):
|
||||
dirpath = os.path.split(self.path)[0]
|
||||
# pointless search every time we import a survex file if we know there are no duplicates..
|
||||
# don't use this for initial import.
|
||||
survexdirectorylist = SurvexDirectory.objects.filter(cave=self.cave, path=dirpath)
|
||||
if survexdirectorylist:
|
||||
self.survexdirectory = survexdirectorylist[0]
|
||||
else:
|
||||
survexdirectory = SurvexDirectory(path=dirpath, cave=self.cave, primarysurvexfile=self)
|
||||
survexdirectory.save()
|
||||
self.survexdirectory = survexdirectory
|
||||
self.save()
|
||||
|
||||
# Don't change from the default as that breaks troggle webpages and internal referencing!
|
||||
# def __str__(self):
|
||||
# return "[SurvexFile:"+str(self.path) + "-" + str(self.survexdirectory) + "-" + str(self.cave)+"]"
|
||||
|
||||
def __str__(self):
|
||||
return self.path
|
||||
|
||||
|
||||
class SurvexStationLookUpManager(models.Manager):
|
||||
"""what this does,
|
||||
https://docs.djangoproject.com/en/dev/topics/db/managers/
|
||||
This changes the .objects thinggy to use a case-insensitive match name__iexact
|
||||
so that now SurvexStation.objects.lookup() works as a case-insensitive match
|
||||
"""
|
||||
def lookup(self, name):
|
||||
blocknames, sep, stationname = name.rpartition(".")
|
||||
return self.get(block=SurvexBlock.objects.lookup(blocknames), name__iexact=stationname)
|
||||
|
||||
|
||||
class SurvexStation(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
# block = models.ForeignKey("SurvexBlock", null=True, on_delete=models.SET_NULL)
|
||||
# block not used since 2020. survex stations objects are only used for entrnce locations and all taken from the .3d file
|
||||
objects = SurvexStationLookUpManager() # overwrites SurvexStation.objects and enables lookup()
|
||||
x = models.FloatField(blank=True, null=True)
|
||||
y = models.FloatField(blank=True, null=True)
|
||||
z = models.FloatField(blank=True, null=True)
|
||||
|
||||
# def path(self):
|
||||
# r = self.name
|
||||
# b = self.block
|
||||
# while True:
|
||||
# if b.name:
|
||||
# r = b.name + "." + r
|
||||
# if b.parent:
|
||||
# b = b.parent
|
||||
# else:
|
||||
# return r
|
||||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
|
||||
def __str__(self):
|
||||
return self.name and str(self.name) or "no name"
|
||||
|
||||
def latlong(self):
|
||||
return utmToLatLng(33, self.x, self.y, northernHemisphere=True)
|
||||
|
||||
import math
|
||||
|
||||
def utmToLatLng(zone, easting, northing, northernHemisphere=True):
|
||||
if not northernHemisphere:
|
||||
northing = 10000000 - northing
|
||||
|
||||
a = 6378137
|
||||
e = 0.081819191
|
||||
e1sq = 0.006739497
|
||||
k0 = 0.9996
|
||||
|
||||
arc = northing / k0
|
||||
mu = arc / (a * (1 - math.pow(e, 2) / 4.0 - 3 * math.pow(e, 4) / 64.0 - 5 * math.pow(e, 6) / 256.0))
|
||||
|
||||
ei = (1 - math.pow((1 - e * e), (1 / 2.0))) / (1 + math.pow((1 - e * e), (1 / 2.0)))
|
||||
|
||||
ca = 3 * ei / 2 - 27 * math.pow(ei, 3) / 32.0
|
||||
|
||||
cb = 21 * math.pow(ei, 2) / 16 - 55 * math.pow(ei, 4) / 32
|
||||
cc = 151 * math.pow(ei, 3) / 96
|
||||
cd = 1097 * math.pow(ei, 4) / 512
|
||||
phi1 = mu + ca * math.sin(2 * mu) + cb * math.sin(4 * mu) + cc * math.sin(6 * mu) + cd * math.sin(8 * mu)
|
||||
|
||||
n0 = a / math.pow((1 - math.pow((e * math.sin(phi1)), 2)), (1 / 2.0))
|
||||
|
||||
r0 = a * (1 - e * e) / math.pow((1 - math.pow((e * math.sin(phi1)), 2)), (3 / 2.0))
|
||||
fact1 = n0 * math.tan(phi1) / r0
|
||||
|
||||
_a1 = 500000 - easting
|
||||
dd0 = _a1 / (n0 * k0)
|
||||
fact2 = dd0 * dd0 / 2
|
||||
|
||||
t0 = math.pow(math.tan(phi1), 2)
|
||||
Q0 = e1sq * math.pow(math.cos(phi1), 2)
|
||||
fact3 = (5 + 3 * t0 + 10 * Q0 - 4 * Q0 * Q0 - 9 * e1sq) * math.pow(dd0, 4) / 24
|
||||
|
||||
fact4 = (61 + 90 * t0 + 298 * Q0 + 45 * t0 * t0 - 252 * e1sq - 3 * Q0 * Q0) * math.pow(dd0, 6) / 720
|
||||
|
||||
lof1 = _a1 / (n0 * k0)
|
||||
lof2 = (1 + 2 * t0 + Q0) * math.pow(dd0, 3) / 6.0
|
||||
lof3 = (5 - 2 * Q0 + 28 * t0 - 3 * math.pow(Q0, 2) + 8 * e1sq + 24 * math.pow(t0, 2)) * math.pow(dd0, 5) / 120
|
||||
_a2 = (lof1 - lof2 + lof3) / math.cos(phi1)
|
||||
_a3 = _a2 * 180 / math.pi
|
||||
|
||||
latitude = 180 * (phi1 - fact1 * (fact2 + fact3 + fact4)) / math.pi
|
||||
|
||||
if not northernHemisphere:
|
||||
latitude = -latitude
|
||||
|
||||
longitude = ((zone > 0) and (6 * zone - 183.0) or 3.0) - _a3
|
||||
|
||||
return (latitude, longitude)
|
||||
|
||||
#
|
||||
# Single SurvexBlock
|
||||
#
|
||||
class SurvexBlockLookUpManager(models.Manager):
|
||||
"""what this does,
|
||||
https://docs.djangoproject.com/en/dev/topics/db/managers/
|
||||
|
||||
This adds a method to the .objects thinggy to use a case-insensitive match name__iexact
|
||||
so that now SurvexBlock.objects.lookup() works as a case-insensitive match.
|
||||
This is used in lookup() in SurvexStationLookUpManager()
|
||||
which is used in Entrance().other_location() which is used in the Cave webpage
|
||||
"""
|
||||
def lookup(self, name):
|
||||
if name == "":
|
||||
blocknames = []
|
||||
else:
|
||||
blocknames = name.split(".")
|
||||
block = SurvexBlock.objects.get(parent=None, survexfile__path=settings.SURVEX_TOPNAME)
|
||||
for blockname in blocknames:
|
||||
block = SurvexBlock.objects.get(parent=block, name__iexact=blockname)
|
||||
return block
|
||||
|
||||
|
||||
class SurvexBlock(models.Model):
|
||||
"""One begin..end block within a survex file. The basic element of a survey trip.
|
||||
Multiple anonymous survex blocks are possible within the same surfex file
|
||||
Blocks can span several *included survexfile though.
|
||||
"""
|
||||
|
||||
objects = SurvexBlockLookUpManager() # overwrites SurvexBlock.objects and enables lookup()
|
||||
name = models.CharField(max_length=100)
|
||||
title = models.CharField(max_length=200)
|
||||
parent = models.ForeignKey("SurvexBlock", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
date = models.DateField(blank=True, null=True)
|
||||
expedition = models.ForeignKey("Expedition", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
# if the survexfile object is deleted, then all teh suvex-blocks in it should be too,
|
||||
# though a block can span more than one file...
|
||||
survexfile = models.ForeignKey("SurvexFile", blank=True, null=True, on_delete=models.CASCADE)
|
||||
survexpath = models.CharField(max_length=200) # the path for the survex stations
|
||||
|
||||
scanswallet = models.ForeignKey(
|
||||
"Wallet", null=True, on_delete=models.SET_NULL
|
||||
) # only ONE wallet per block. The most recent seen overwites.. ugh.
|
||||
|
||||
legsall = models.IntegerField(null=True) # summary data for this block
|
||||
legslength = models.FloatField(null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ("id",)
|
||||
|
||||
# def __str__(self):
|
||||
# return "[SurvexBlock:" + str(self.name) + "-path:" + str(self.survexpath) + "-cave:" + str(self.cave) + "]"
|
||||
|
||||
def __str__(self):
|
||||
return self.name and str(self.name) or "no_name-#" + str(self.id)
|
||||
|
||||
def isSurvexBlock(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def DayIndex(self):
|
||||
"""This is used to set different colours for the different trips on
|
||||
the calendar view of the expedition"""
|
||||
# print(f"SurvexBlock DayIndex {self.name} '{self.date}' {len(list(SurvexBlock.objects.filter(date=self.date)))} on this date")
|
||||
mx = 10
|
||||
todays = list(SurvexBlock.objects.filter(date=self.date))
|
||||
if self in todays:
|
||||
index = todays.index(self)
|
||||
else:
|
||||
print(f"DayIndex: Synchronization error in survex blocks. Restart server or do full reset. {self}")
|
||||
index = 0
|
||||
if index not in range(0, mx):
|
||||
print(f"DayIndex: More than {mx-1} SurvexBlock items on one day '{index}' {self}, restarting colour sequence.")
|
||||
index = index % mx
|
||||
# return list(self.survexblock_set.all()).index(self)
|
||||
return index
|
||||
|
||||
|
||||
class SurvexPersonRole(models.Model):
|
||||
"""The CASCADE means that if a SurvexBlock or a Person is deleted, then the SurvexPersonRole
|
||||
is deleted too
|
||||
"""
|
||||
survexblock = models.ForeignKey("SurvexBlock", on_delete=models.CASCADE)
|
||||
# increasing levels of precision, Surely we only need survexblock and person now that we have no link to a logbook entry?
|
||||
personname = models.CharField(max_length=100)
|
||||
person = models.ForeignKey("Person", blank=True, null=True, on_delete=models.CASCADE) # not needed
|
||||
personexpedition = models.ForeignKey("PersonExpedition", blank=True, null=True, on_delete=models.SET_NULL)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.personname) + " - " + str(self.survexblock)
|
||||
|
||||
|
||||
class SingleScan(models.Model):
|
||||
"""A single file holding an image. Could be raw notes, an elevation plot or whatever"""
|
||||
|
||||
ffile = models.CharField(max_length=200)
|
||||
name = models.CharField(max_length=200)
|
||||
wallet = models.ForeignKey("Wallet", null=True, on_delete=models.SET_NULL)
|
||||
|
||||
class Meta:
|
||||
ordering = ("name",)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(
|
||||
settings.URL_ROOT,
|
||||
reverse("scansingle", kwargs={"path": re.sub("#", "%23", self.wallet.walletname), "file": self.name}),
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "Scan Image: " + str(self.name) + " in " + str(self.wallet)
|
||||
|
||||
|
||||
class DrawingFile(models.Model):
|
||||
"""A file holding a Therion (several types) or a Tunnel drawing
|
||||
Most of the implied capabilities are not implemented yet"""
|
||||
|
||||
dwgpath = models.CharField(max_length=200)
|
||||
dwgname = models.CharField(max_length=200)
|
||||
dwgwallets = models.ManyToManyField("Wallet") # implicitly links via folders to scans to SVX files
|
||||
scans = models.ManyToManyField("SingleScan") # implicitly links via scans to SVX files
|
||||
dwgcontains = models.ManyToManyField("DrawingFile") # case when its a frame type
|
||||
filesize = models.IntegerField(default=0)
|
||||
npaths = models.IntegerField(default=0)
|
||||
survexfiles = models.ManyToManyField("SurvexFile") # direct link to SVX files - not populated yet
|
||||
|
||||
class Meta:
|
||||
ordering = ("dwgpath",)
|
||||
|
||||
def __str__(self):
|
||||
return "Drawing File: " + str(self.dwgname) + " (" + str(self.filesize) + " bytes)"
|
||||
187
core/models/troggle.py
Normal file
187
core/models/troggle.py
Normal file
@@ -0,0 +1,187 @@
|
||||
from decimal import Decimal, getcontext
|
||||
from urllib.parse import urljoin
|
||||
|
||||
getcontext().prec = 2 # use 2 significant figures for decimal calculations
|
||||
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
|
||||
import settings
|
||||
|
||||
"""This file declares TroggleModel which inherits from django.db.models.Model
|
||||
All TroggleModel and models.Model subclasses inherit persistence in the django relational database. This is known as
|
||||
the django Object Relational Mapping (ORM).
|
||||
There are more subclasses defined in models/caves.py models/survex.py etc.
|
||||
"""
|
||||
|
||||
|
||||
class TroggleModel(models.Model):
|
||||
"""This class is for adding fields and methods which all of our models will have."""
|
||||
|
||||
new_since_parsing = models.BooleanField(default=False, editable=False)
|
||||
non_public = models.BooleanField(default=False)
|
||||
|
||||
def object_name(self):
|
||||
return self._meta.object_name
|
||||
|
||||
def get_admin_url(self):
|
||||
return urljoin(settings.URL_ROOT, "/admin/core/" + self.object_name().lower() + "/" + str(self.pk))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class DataIssue(TroggleModel):
|
||||
"""When importing cave data any validation problems produce a message which is
|
||||
recorded as a DataIssue. The django admin system automatically produces a page listing
|
||||
these at /admin/core/dataissue/
|
||||
This is a use of the NOTIFICATION pattern:
|
||||
https://martinfowler.com/eaaDev/Notification.html
|
||||
|
||||
We have replaced all assertions in the code with messages and local fix-ups or skips:
|
||||
https://martinfowler.com/articles/replaceThrowWithNotification.html
|
||||
|
||||
See also the use of stash_data_issue() & store_data_issues() in parsers/survex.py which defer writing to the database until the end of the import.
|
||||
"""
|
||||
|
||||
date = models.DateTimeField(auto_now_add=True, blank=True)
|
||||
parser = models.CharField(max_length=50, blank=True, null=True)
|
||||
message = models.CharField(max_length=800, blank=True, null=True)
|
||||
url = models.CharField(max_length=300, blank=True, null=True) # link to offending object
|
||||
|
||||
class Meta:
|
||||
ordering = ["date"]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.parser} - {self.message}"
|
||||
|
||||
#
|
||||
# single Expedition, usually seen by year
|
||||
#
|
||||
class Expedition(TroggleModel):
|
||||
year = models.CharField(max_length=20, unique=True)
|
||||
name = models.CharField(max_length=100)
|
||||
logbookfile = models.CharField(max_length=100, blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.year
|
||||
|
||||
class Meta:
|
||||
ordering = ("-year",)
|
||||
get_latest_by = "year"
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse("expedition", args=[self.year]))
|
||||
|
||||
|
||||
# class ExpeditionDay(TroggleModel):
|
||||
# """Exists only on Expedition now. Removed links from logbookentry, personlogentry, survex stuff etc.
|
||||
# """
|
||||
# expedition = models.ForeignKey("Expedition",on_delete=models.CASCADE)
|
||||
# date = models.DateField()
|
||||
|
||||
# class Meta:
|
||||
# ordering = ('date',)
|
||||
|
||||
|
||||
class Person(TroggleModel):
|
||||
"""single Person, can go on many years"""
|
||||
|
||||
first_name = models.CharField(max_length=100)
|
||||
last_name = models.CharField(max_length=100)
|
||||
fullname = models.CharField(max_length=200)
|
||||
nickname = models.CharField(max_length=200)
|
||||
is_vfho = models.BooleanField(
|
||||
help_text="VFHO is the Vereines für Höhlenkunde in Obersteier, a nearby Austrian caving club.",
|
||||
default=False,
|
||||
)
|
||||
mug_shot = models.CharField(max_length=100, blank=True, null=True)
|
||||
blurb = models.TextField(blank=True, null=True)
|
||||
orderref = models.CharField(max_length=200) # for alphabetic
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(
|
||||
settings.URL_ROOT, reverse("person", kwargs={"first_name": self.first_name, "last_name": self.last_name})
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = "People"
|
||||
ordering = ("orderref",) # "Wookey" makes too complex for: ('last_name', 'first_name')
|
||||
|
||||
def __str__(self):
|
||||
if self.last_name:
|
||||
return f"{self.first_name} {self.last_name}"
|
||||
return self.first_name
|
||||
|
||||
def notability(self):
|
||||
"""This is actually recency: all recent cavers, weighted by number of expos"""
|
||||
notability = Decimal(0)
|
||||
max_expo_val = 0
|
||||
|
||||
max_expo_year = Expedition.objects.all().aggregate(models.Max("year"))
|
||||
max_expo_val = int(max_expo_year["year__max"]) + 1
|
||||
|
||||
for personexpedition in self.personexpedition_set.all():
|
||||
if not personexpedition.is_guest:
|
||||
notability += Decimal(1) / (max_expo_val - int(personexpedition.expedition.year))
|
||||
return notability
|
||||
|
||||
def bisnotable(self):
|
||||
"""Boolean: is this person notable?"""
|
||||
return self.notability() > Decimal(1) / Decimal(3)
|
||||
|
||||
def surveyedleglength(self):
|
||||
return sum([personexpedition.surveyedleglength() for personexpedition in self.personexpedition_set.all()])
|
||||
|
||||
def first(self):
|
||||
return self.personexpedition_set.order_by("-expedition")[0]
|
||||
|
||||
def last(self):
|
||||
return self.personexpedition_set.order_by("expedition")[0]
|
||||
|
||||
# moved from personexpedition
|
||||
def name(self):
|
||||
if self.nickname:
|
||||
return f"{self.first_name} ({self.nickname}) {self.last_name}"
|
||||
if self.last_name:
|
||||
return f"{self.first_name} {self.last_name}"
|
||||
return self.first_name
|
||||
|
||||
|
||||
class PersonExpedition(TroggleModel):
|
||||
"""Person's attendance to one Expo
|
||||
CASCADE means that if an expedition or a person is deleted, the PersonExpedition
|
||||
is deleted too
|
||||
"""
|
||||
|
||||
expedition = models.ForeignKey(Expedition, on_delete=models.CASCADE)
|
||||
person = models.ForeignKey(Person, on_delete=models.CASCADE)
|
||||
slugfield = models.SlugField(max_length=50, blank=True, null=True) # 2022 to be used in future
|
||||
|
||||
is_guest = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
ordering = ("-expedition",)
|
||||
# order_with_respect_to = 'expedition'
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.person}: ({self.expedition})"
|
||||
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(
|
||||
settings.URL_ROOT,
|
||||
reverse(
|
||||
"personexpedition",
|
||||
kwargs={
|
||||
"first_name": self.person.first_name,
|
||||
"last_name": self.person.last_name,
|
||||
"year": self.expedition.year,
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def surveyedleglength(self):
|
||||
"""Survey length for this person on all survex trips on this expedition"""
|
||||
survexblocks = [personrole.survexblock for personrole in self.survexpersonrole_set.all()]
|
||||
return sum([survexblock.legslength for survexblock in set(survexblocks)])
|
||||
339
core/models/wallets.py
Normal file
339
core/models/wallets.py
Normal file
@@ -0,0 +1,339 @@
|
||||
import datetime
|
||||
import json
|
||||
import operator
|
||||
import re
|
||||
from functools import reduce
|
||||
from pathlib import Path
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
|
||||
# from troggle.core.models.survex import SurvexBlock
|
||||
# from troggle.core.models.troggle import DataIssue # circular import. Hmm
|
||||
|
||||
YEAR_RANGE = (1975, 2050)
|
||||
|
||||
def make_valid_date(date):
|
||||
"""Take whatever garbage some fool has typed in and try to make it into a valid ISO-format date
|
||||
"""
|
||||
datestr = date.replace(".", "-")
|
||||
try:
|
||||
samedate = datetime.date.fromisoformat(datestr)
|
||||
return samedate
|
||||
except ValueError:
|
||||
# Could be in std euro format e.g. 14/07/2023
|
||||
match = re.search(r'(\d{1,2})/(\d{1,2})/(\d{2,4})', datestr)
|
||||
if match:
|
||||
d = int(match.group(1))
|
||||
m = int(match.group(2))
|
||||
y = int(match.group(3))
|
||||
if y<2000:
|
||||
y = y + 2000
|
||||
try:
|
||||
samedate = datetime.date(y, m, d)
|
||||
print(f"- - Warning, not in ISO format. '{datestr=}' but we coped: {samedate.isoformat()} ")
|
||||
return samedate
|
||||
except:
|
||||
print(f"! - Fail, tried to decompose date in dd/mm/yyyy format but failed: {datestr=} ")
|
||||
return None
|
||||
# probably a single digit day number or month number
|
||||
match = re.search(r'(\d{4})-(\d{1,2})-(\d{1,2})', datestr)
|
||||
if match:
|
||||
y = int(match.group(1))
|
||||
m = int(match.group(2))
|
||||
d = int(match.group(3))
|
||||
try:
|
||||
samedate = datetime.date(y, m, d)
|
||||
print(f"- - Warning, 1 digit only for month or day '{datestr=}' but we coped: {samedate.isoformat()} ")
|
||||
return samedate
|
||||
except:
|
||||
print(f"! - Fail, tried to decompose date in yyyy-mm-d or yyy-m-dd format but failed: {datestr=} ")
|
||||
return None
|
||||
|
||||
print(f"! - Failed to understand date, none of our tricks worked {datestr=} ")
|
||||
return None
|
||||
|
||||
class Wallet(models.Model):
|
||||
"""We do not keep the JSON values in the database, we query them afresh each time,
|
||||
but we will change this when we need to do a Django query on e.g. personame
|
||||
"""
|
||||
|
||||
fpath = models.CharField(max_length=200)
|
||||
walletname = models.CharField(max_length=200)
|
||||
walletdate = models.DateField(blank=True, null=True)
|
||||
walletyear = models.DateField(blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ("walletname",)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urljoin(settings.URL_ROOT, reverse("singlewallet", kwargs={"path": re.sub("#", "%23", self.walletname)}))
|
||||
|
||||
def get_json(self):
|
||||
"""Read the JSON file for the wallet and do stuff
|
||||
Do it every time it is queried, to be sure the result is fresh
|
||||
|
||||
import DataIssue locally to prevent import cycle problem"""
|
||||
# jsonfile = Path(self.fpath, 'contents.json')
|
||||
|
||||
# Get from git repo instead
|
||||
# :drawings: walletjson/2022/2022#01/contents.json
|
||||
# fpath = /mnt/d/EXPO/expofiles/surveyscans/1999/1999#02
|
||||
fp = Path(self.fpath)
|
||||
wname = fp.name
|
||||
wyear = fp.parent.name
|
||||
wurl = f"/walletedit/{self.walletname}".replace('#', ':')
|
||||
|
||||
if len(wyear) != 4 or len(wname) !=6:
|
||||
# no contents.json for old-style wallets
|
||||
# but this ruined all the tick-list displays.. why?!
|
||||
# return None
|
||||
pass
|
||||
|
||||
jsonfile = Path(settings.DRAWINGS_DATA, "walletjson") / wyear / wname / "contents.json"
|
||||
if not Path(jsonfile).is_file():
|
||||
message = f"! {jsonfile} is not a file {wyear=} {wname=} "
|
||||
from troggle.core.models.troggle import DataIssue
|
||||
print(message)
|
||||
DataIssue.objects.update_or_create(parser="wallets", message=message, url=wurl)
|
||||
return None
|
||||
else:
|
||||
with open(jsonfile) as json_f:
|
||||
try:
|
||||
waldata = json.load(json_f)
|
||||
except:
|
||||
message = f"! {str(self.walletname)} Failed to load {jsonfile} JSON file"
|
||||
print(message)
|
||||
DataIssue.objects.update_or_create(parser="wallets", message=message, url=wurl)
|
||||
return None
|
||||
|
||||
if waldata["date"]:
|
||||
thisdate = make_valid_date(waldata["date"])
|
||||
if thisdate:
|
||||
self.walletdate = thisdate
|
||||
self.save()
|
||||
waldata["date"] = thisdate.isoformat()
|
||||
else:
|
||||
message = f"! {str(self.walletname)} Date format not ISO {waldata['date']}. Failed to load from {jsonfile} JSON file"
|
||||
from troggle.core.models.troggle import DataIssue
|
||||
DataIssue.objects.update_or_create(parser="wallets", message=message, url=wurl)
|
||||
return waldata
|
||||
|
||||
def year(self):
|
||||
"""This gets the year syntactically without opening and reading the JSON"""
|
||||
if len(self.walletname) < 5:
|
||||
return None
|
||||
if self.walletname[4] != "#":
|
||||
return None
|
||||
year = int(self.walletname[0:4])
|
||||
ymin, ymax = YEAR_RANGE
|
||||
if year < ymin or year > ymax:
|
||||
return None
|
||||
else:
|
||||
self.walletyear = datetime.date(year, 1, 1)
|
||||
self.save()
|
||||
return str(year)
|
||||
|
||||
# Yes this is horribly, horribly inefficient, esp. for a page that have date, people and cave in it
|
||||
def date(self):
|
||||
"""Reads all the JSON data just to get the JSON date."""
|
||||
if self.walletdate:
|
||||
return self.walletdate
|
||||
if not (jsondata := self.get_json()): # WALRUS
|
||||
return None
|
||||
|
||||
datestr = jsondata["date"]
|
||||
if not datestr:
|
||||
return None
|
||||
else:
|
||||
datestr = datestr.replace(".", "-")
|
||||
try:
|
||||
samedate = datetime.date.fromisoformat(datestr)
|
||||
self.walletdate = samedate.isoformat()
|
||||
except:
|
||||
try:
|
||||
samedate = datetime.date.fromisoformat(datestr[:10])
|
||||
self.walletdate = samedate.isoformat()
|
||||
except:
|
||||
samedate = None
|
||||
self.save()
|
||||
return self.walletdate
|
||||
|
||||
def people(self):
|
||||
if not self.get_json():
|
||||
return None
|
||||
jsondata = self.get_json()
|
||||
return jsondata["people"]
|
||||
|
||||
def cave(self):
|
||||
if not self.get_json():
|
||||
return None
|
||||
jsondata = self.get_json()
|
||||
return jsondata["cave"]
|
||||
|
||||
def name(self):
|
||||
if not self.get_json():
|
||||
return None
|
||||
jsondata = self.get_json()
|
||||
return jsondata["name"]
|
||||
|
||||
def get_fnames(self):
|
||||
'''Filenames without the suffix, i.e. without the ".jpg"'''
|
||||
dirpath = Path(settings.SCANS_ROOT, self.fpath) # does nowt as fpath is a rooted path already
|
||||
files = []
|
||||
if not self.fpath:
|
||||
files.append(f"Incorrect path to wallet contents: '{self.fpath}'")
|
||||
return files
|
||||
if not dirpath.is_dir():
|
||||
files.append(f"Incorrect path to wallet contents: '{self.fpath}'")
|
||||
return files
|
||||
else:
|
||||
try:
|
||||
for f in dirpath.iterdir():
|
||||
if f.is_file():
|
||||
files.append(Path(f.name).stem)
|
||||
else:
|
||||
files.append(f"-{Path(f.name).stem}-")
|
||||
except FileNotFoundError:
|
||||
files.append("FileNotFoundError")
|
||||
pass
|
||||
return files
|
||||
|
||||
def fixsurvextick(self, tick):
|
||||
blocks = self.survexblock_set.all()
|
||||
# blocks = SurvexBlock.objects.filter(scanswallet = self)
|
||||
result = tick
|
||||
for b in blocks:
|
||||
if b.survexfile: # if any exist in db, no check for validity or a real file. Refactor.
|
||||
result = "seagreen" # slightly different shade of green
|
||||
return result
|
||||
|
||||
def get_ticks(self):
|
||||
"""Reads all the JSON data and sets the colour of the completion tick for each condition"""
|
||||
ticks = {}
|
||||
|
||||
waldata = self.get_json()
|
||||
if not waldata:
|
||||
ticks["S"] = "darkgrey"
|
||||
ticks["C"] = "darkgrey"
|
||||
ticks["Q"] = "darkgrey"
|
||||
ticks["N"] = "darkgrey"
|
||||
ticks["P"] = "darkgrey"
|
||||
ticks["E"] = "darkgrey"
|
||||
ticks["T"] = "darkgrey"
|
||||
ticks["W"] = "darkgrey"
|
||||
return ticks
|
||||
ticks = {}
|
||||
|
||||
# Initially, are there any required survex files present ?
|
||||
# Note that we can't set the survexblock here on the wallet as that info is only available while parsing the survex file
|
||||
survexok = "red"
|
||||
ticks["S"] = "red"
|
||||
if waldata["survex not required"]:
|
||||
survexok = "green"
|
||||
ticks["S"] = "green"
|
||||
else:
|
||||
if waldata["survex file"]:
|
||||
if not type(waldata["survex file"]) == list: # a string also is a sequence type, so do it this way
|
||||
waldata["survex file"] = [waldata["survex file"]]
|
||||
ngood = 0
|
||||
nbad = 0
|
||||
ticks["S"] = "purple"
|
||||
for sx in waldata["survex file"]:
|
||||
# this logic appears in several places, inc uploads.py). Refactor.
|
||||
if sx != "":
|
||||
if Path(sx).suffix.lower() != ".svx":
|
||||
sx = sx + ".svx"
|
||||
if (Path(settings.SURVEX_DATA) / sx).is_file():
|
||||
ngood += 1
|
||||
else:
|
||||
nbad += 1
|
||||
if nbad == 0 and ngood >= 1: # all valid
|
||||
ticks["S"] = "green"
|
||||
elif nbad >= 1 and ngood >= 1: # some valid, some invalid
|
||||
ticks["S"] = "orange"
|
||||
elif nbad >= 1 and ngood == 0: # all bad
|
||||
ticks["S"] = "red"
|
||||
elif nbad == 0 and ngood == 0: # list of blank strings
|
||||
ticks["S"] = "red"
|
||||
else:
|
||||
ticks["S"] = "fuchsia" # have fun working out what this means
|
||||
|
||||
# Cave Description
|
||||
if waldata["description written"]:
|
||||
ticks["C"] = "green"
|
||||
else:
|
||||
ticks["C"] = survexok
|
||||
# QMs
|
||||
if waldata["qms written"]:
|
||||
ticks["Q"] = "green"
|
||||
else:
|
||||
ticks["Q"] = survexok
|
||||
if not self.year():
|
||||
ticks["Q"] = "darkgrey"
|
||||
else:
|
||||
if int(self.year()) < 2015:
|
||||
ticks["Q"] = "lightgrey"
|
||||
|
||||
if 'notes not required' not in waldata:
|
||||
waldata['notes not required'] = False
|
||||
|
||||
|
||||
# Notes, Plan, Elevation
|
||||
files = self.get_fnames()
|
||||
|
||||
# Notes required
|
||||
notes_scanned = reduce(operator.or_, [f.startswith("note") for f in files], False)
|
||||
notes_scanned = reduce(operator.or_, [f.endswith("notes") for f in files], notes_scanned)
|
||||
notes_required = not (notes_scanned or waldata["notes not required"])
|
||||
if notes_required:
|
||||
ticks["N"] = "red"
|
||||
else:
|
||||
ticks["N"] = "green"
|
||||
# print(f"{self.walletname} {ticks['N'].upper()} {notes_scanned=} {notes_required=} {waldata['notes not required']=}")
|
||||
|
||||
# Plan drawing required
|
||||
plan_scanned = reduce(operator.or_, [f.startswith("plan") for f in files], False)
|
||||
plan_scanned = reduce(operator.or_, [f.endswith("plan") for f in files], plan_scanned)
|
||||
plan_drawing_required = not (plan_scanned or waldata["plan drawn"] or waldata["plan not required"])
|
||||
if plan_drawing_required:
|
||||
ticks["P"] = "red"
|
||||
else:
|
||||
ticks["P"] = "green"
|
||||
|
||||
# Elev drawing required
|
||||
elev_scanned = reduce(operator.or_, [f.startswith("elev") for f in files], False)
|
||||
elev_scanned = reduce(operator.or_, [f.endswith("elev") for f in files], elev_scanned)
|
||||
elev_scanned = reduce(operator.or_, [f.endswith("elevation") for f in files], elev_scanned)
|
||||
elev_drawing_required = not (elev_scanned or waldata["elev drawn"] or waldata["elev not required"])
|
||||
if elev_drawing_required:
|
||||
ticks["E"] = "red"
|
||||
else:
|
||||
ticks["E"] = "green"
|
||||
|
||||
# if electronic, don't require P or E
|
||||
if waldata["electronic survey"]:
|
||||
# ticks["N"] = "green"
|
||||
ticks["P"] = "green"
|
||||
ticks["E"] = "green"
|
||||
# ticks["T"] = "green" # No, this does not mean it has been 'tunneled' properly
|
||||
|
||||
# Tunnel / Therion
|
||||
if elev_drawing_required or plan_drawing_required:
|
||||
ticks["T"] = "red"
|
||||
else:
|
||||
ticks["T"] = "green"
|
||||
|
||||
# Website
|
||||
if waldata["website updated"]:
|
||||
ticks["W"] = "green"
|
||||
else:
|
||||
ticks["W"] = "red"
|
||||
|
||||
|
||||
return ticks
|
||||
|
||||
def __str__(self):
|
||||
return "[" + str(self.walletname) + " (Wallet)]"
|
||||
@@ -1,228 +0,0 @@
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
import os
|
||||
import urlparse
|
||||
import re
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
###########################################################
|
||||
# These will allow browsing and editing of the survex data
|
||||
###########################################################
|
||||
# Needs to add:
|
||||
# Equates
|
||||
# reloading
|
||||
|
||||
class SurvexDirectory(models.Model):
|
||||
path = models.CharField(max_length=200)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
primarysurvexfile = models.ForeignKey('SurvexFile', related_name='primarysurvexfile', blank=True, null=True)
|
||||
# could also include files in directory but not referenced
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
|
||||
class SurvexFile(models.Model):
|
||||
path = models.CharField(max_length=200)
|
||||
survexdirectory = models.ForeignKey("SurvexDirectory", blank=True, null=True)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
|
||||
def exists(self):
|
||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||
return os.path.isfile(fname)
|
||||
|
||||
def OpenFile(self):
|
||||
fname = os.path.join(settings.SURVEX_DATA, self.path + ".svx")
|
||||
return open(fname)
|
||||
|
||||
def SetDirectory(self):
|
||||
dirpath = os.path.split(self.path)[0]
|
||||
survexdirectorylist = SurvexDirectory.objects.filter(cave=self.cave, path=dirpath)
|
||||
if survexdirectorylist:
|
||||
self.survexdirectory = survexdirectorylist[0]
|
||||
else:
|
||||
survexdirectory = SurvexDirectory(path=dirpath, cave=self.cave, primarysurvexfile=self)
|
||||
survexdirectory.save()
|
||||
self.survexdirectory = survexdirectory
|
||||
self.save()
|
||||
|
||||
class SurvexEquate(models.Model):
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
|
||||
class SurvexStationLookUpManager(models.Manager):
|
||||
def lookup(self, name):
|
||||
blocknames, sep, stationname = name.rpartition(".")
|
||||
return self.get(block = SurvexBlock.objects.lookup(blocknames),
|
||||
name__iexact = stationname)
|
||||
|
||||
class SurvexStation(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
block = models.ForeignKey('SurvexBlock')
|
||||
equate = models.ForeignKey('SurvexEquate', blank=True, null=True)
|
||||
objects = SurvexStationLookUpManager()
|
||||
x = models.FloatField(blank=True, null=True)
|
||||
y = models.FloatField(blank=True, null=True)
|
||||
z = models.FloatField(blank=True, null=True)
|
||||
|
||||
def path(self):
|
||||
r = self.name
|
||||
b = self.block
|
||||
while True:
|
||||
if b.name:
|
||||
r = b.name + "." + r
|
||||
if b.parent:
|
||||
b = b.parent
|
||||
else:
|
||||
return r
|
||||
|
||||
class SurvexLeg(models.Model):
|
||||
block = models.ForeignKey('SurvexBlock')
|
||||
#title = models.ForeignKey('SurvexTitle')
|
||||
stationfrom = models.ForeignKey('SurvexStation', related_name='stationfrom')
|
||||
stationto = models.ForeignKey('SurvexStation', related_name='stationto')
|
||||
tape = models.FloatField()
|
||||
compass = models.FloatField()
|
||||
clino = models.FloatField()
|
||||
|
||||
|
||||
#
|
||||
# Single SurvexBlock
|
||||
#
|
||||
class SurvexBlockLookUpManager(models.Manager):
|
||||
def lookup(self, name):
|
||||
if name == "":
|
||||
blocknames = []
|
||||
else:
|
||||
blocknames = name.split(".")
|
||||
block = SurvexBlock.objects.get(parent=None, survexfile__path="all")
|
||||
for blockname in blocknames:
|
||||
block = SurvexBlock.objects.get(parent=block, name__iexact=blockname)
|
||||
return block
|
||||
|
||||
class SurvexBlock(models.Model):
|
||||
objects = SurvexBlockLookUpManager()
|
||||
name = models.CharField(max_length=100)
|
||||
parent = models.ForeignKey('SurvexBlock', blank=True, null=True)
|
||||
text = models.TextField()
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
|
||||
date = models.DateField(blank=True, null=True)
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
||||
expedition = models.ForeignKey('Expedition', blank=True, null=True)
|
||||
|
||||
survexfile = models.ForeignKey("SurvexFile", blank=True, null=True)
|
||||
begin_char = models.IntegerField() # code for where in the survex data files this block sits
|
||||
survexpath = models.CharField(max_length=200) # the path for the survex stations
|
||||
|
||||
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
||||
#refscandir = models.CharField(max_length=100)
|
||||
|
||||
totalleglength = models.FloatField()
|
||||
|
||||
class Meta:
|
||||
ordering = ('id',)
|
||||
|
||||
def isSurvexBlock(self): # Function used in templates
|
||||
return True
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name and unicode(self.name) or 'no name'
|
||||
|
||||
def GetPersonroles(self):
|
||||
res = [ ]
|
||||
for personrole in self.personrole_set.order_by('personexpedition'):
|
||||
if res and res[-1]['person'] == personrole.personexpedition.person:
|
||||
res[-1]['roles'] += ", " + str(personrole.role)
|
||||
else:
|
||||
res.append({'person':personrole.personexpedition.person, 'expeditionyear':personrole.personexpedition.expedition.year, 'roles':str(personrole.role)})
|
||||
return res
|
||||
|
||||
def MakeSurvexStation(self, name):
|
||||
ssl = self.survexstation_set.filter(name=name)
|
||||
if ssl:
|
||||
assert len(ssl) == 1
|
||||
return ssl[0]
|
||||
#print name
|
||||
ss = SurvexStation(name=name, block=self)
|
||||
ss.save()
|
||||
return ss
|
||||
|
||||
def DayIndex(self):
|
||||
return list(self.expeditionday.survexblock_set.all()).index(self)
|
||||
|
||||
|
||||
class SurvexTitle(models.Model):
|
||||
survexblock = models.ForeignKey('SurvexBlock')
|
||||
title = models.CharField(max_length=200)
|
||||
cave = models.ForeignKey('Cave', blank=True, null=True)
|
||||
|
||||
#
|
||||
# member of a SurvexBlock
|
||||
#
|
||||
ROLE_CHOICES = (
|
||||
('insts','Instruments'),
|
||||
('dog','Other'),
|
||||
('notes','Notes'),
|
||||
('pics','Pictures'),
|
||||
('tape','Tape measure'),
|
||||
('useless','Useless'),
|
||||
('helper','Helper'),
|
||||
('disto','Disto'),
|
||||
('consultant','Consultant'),
|
||||
)
|
||||
|
||||
class SurvexPersonRole(models.Model):
|
||||
survexblock = models.ForeignKey('SurvexBlock')
|
||||
nrole = models.CharField(choices=ROLE_CHOICES, max_length=200, blank=True, null=True)
|
||||
# increasing levels of precision
|
||||
personname = models.CharField(max_length=100)
|
||||
person = models.ForeignKey('Person', blank=True, null=True)
|
||||
personexpedition = models.ForeignKey('PersonExpedition', blank=True, null=True)
|
||||
persontrip = models.ForeignKey('PersonTrip', blank=True, null=True)
|
||||
expeditionday = models.ForeignKey("ExpeditionDay", null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.person) + " - " + unicode(self.survexblock) + " - " + unicode(self.nrole)
|
||||
|
||||
|
||||
class SurvexScansFolder(models.Model):
|
||||
fpath = models.CharField(max_length=200)
|
||||
walletname = models.CharField(max_length=200)
|
||||
|
||||
class Meta:
|
||||
ordering = ('walletname',)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansfolder', kwargs={"path":re.sub("#", "%23", self.walletname)}))
|
||||
|
||||
class SurvexScanSingle(models.Model):
|
||||
ffile = models.CharField(max_length=200)
|
||||
name = models.CharField(max_length=200)
|
||||
survexscansfolder = models.ForeignKey("SurvexScansFolder", null=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ('name',)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return urlparse.urljoin(settings.URL_ROOT, reverse('surveyscansingle', kwargs={"path":re.sub("#", "%23", self.survexscansfolder.walletname), "file":self.name}))
|
||||
|
||||
|
||||
class TunnelFile(models.Model):
|
||||
tunnelpath = models.CharField(max_length=200)
|
||||
tunnelname = models.CharField(max_length=200)
|
||||
bfontcolours = models.BooleanField(default=False)
|
||||
survexscansfolders = models.ManyToManyField("SurvexScansFolder")
|
||||
survexscans = models.ManyToManyField("SurvexScanSingle")
|
||||
survexblocks = models.ManyToManyField("SurvexBlock")
|
||||
tunnelcontains = models.ManyToManyField("TunnelFile") # case when its a frame type
|
||||
filesize = models.IntegerField(default=0)
|
||||
npaths = models.IntegerField(default=0)
|
||||
survextitles = models.ManyToManyField("SurvexTitle")
|
||||
|
||||
|
||||
class Meta:
|
||||
ordering = ('tunnelpath',)
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
import django
|
||||
from django import template
|
||||
|
||||
register = template.Library()
|
||||
if django.VERSION[0] >=1 and django.VERSION[1] > 1:
|
||||
pass
|
||||
else:
|
||||
|
||||
@register.simple_tag
|
||||
def csrf_token(): return ""
|
||||
@@ -3,7 +3,7 @@ from django.utils.safestring import mark_safe
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.filter()
|
||||
def link(value):
|
||||
return mark_safe("<a href=\'%s\'>"%value.get_absolute_url()+unicode(value)+"</a>")
|
||||
|
||||
return mark_safe(f"<a href='{value.get_absolute_url()}'>" + str(value) + "</a>")
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
from django import template
|
||||
from django.utils.html import conditional_escape
|
||||
from django.template.defaultfilters import stringfilter
|
||||
from django.utils.safestring import mark_safe
|
||||
import re
|
||||
|
||||
register = template.Library()
|
||||
|
||||
# seems to add extra lines between the commented lines, which isn't so great.
|
||||
regexes = []
|
||||
regexes.append((re.compile(r"(;.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'<span class = "comment">\1</span>\n'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*include)(\s+)([^\s]*)(.svx)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3<a href="\4.index">\4\5</a>'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*include)(\s+)([^\s]*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3<a href="\4.index">\4</a>'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*team\s+(?:notes|tape|insts|pics))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*(?:begin|end|copyright|date|entrance|equate|export|fix|prefix|require|SOLVE|title|truncate))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*calibrate\s+(?:TAPE|COMPASS|CLINO|COUNTER|DEPTH|DECLINATION|X|Y|Z)+)(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*data\s+(?:DEFAULT|NORMAL|DIVING|CARTESIAN|TOPOFIL|CYLPOLAR|NOSURVEY|passage)(?:\s+station|\s+from|\s+to|\s+FROMDEPTH|\s+TODEPTH|\s+DEPTHCHANGE|\s+newline|\s+direction|\s+tape|\s+compass|\s+clino|\s+northing|\s+easting|\s+altitude|\s+length|\s+bearing|\s+gradient|\s+ignoreall|\sleft|\sright|\sup|\sdown)*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*default\s+(?:CALIBRATE|DATA|UNITS)+)(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*flags\s+(?:DUPLICATE|SPLAY|SURFACE|not DUPLICATE|not SPLAY|not SURFACE))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*infer\s+(?:plumbs|equates|exports))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*instrument\s+(?:compass|clino|tape))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*instrument\s+(?:compass|clino|tape))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*sd\s+(?:TAPE|COMPASS|CLINO|COUNTER|DEPTH|DECLINATION|DX|DY|DZ))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*set\s+(?:BLANK|COMMENT|DECIMAL|EOL|KEYWORD|MINUS|NAMES|OMIT|PLUS|ROOT|SEPARATOR))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(\s*)(\*units\s+(?:TAPE|LENGTH|COMPASS|BEARING|CLINO|GRADIENT|COUNTER|DEPTH|DECLINATION|X|Y|Z))(\s+)(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'\1<span class = "command">\2</span>\3\4'))
|
||||
regexes.append((re.compile(r"^(.*)$", re.IGNORECASE|re.MULTILINE),
|
||||
r'<div>\1 </div>\n'))
|
||||
|
||||
@register.filter()
|
||||
@stringfilter
|
||||
def survex_to_html(value, autoescape=None):
|
||||
if autoescape:
|
||||
value = conditional_escape(value)
|
||||
for regex, sub in regexes:
|
||||
print sub
|
||||
value = regex.sub(sub, value)
|
||||
return mark_safe(value)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user