forked from expo/troggle
Compare commits
1095 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 7b9b88978a | |||
| 88e5f0aa81 | |||
| 5efac62ecb | |||
| 211465247d | |||
| 69fbf7fd58 | |||
| 7a80c3d9f6 | |||
| f6770187ef | |||
| 200e31c7fa | |||
| 8a227e1446 | |||
| cdeb2d0a3f | |||
| 9887bf6696 | |||
| f4367d0fcf | |||
| 8e65dc0892 | |||
| 02bf9608b2 | |||
| 267bda67f9 | |||
| b91daafebd | |||
| 672c84e426 | |||
| d1c5f802ee | |||
| dc48a55d7e | |||
| 93b8093c0a | |||
| 8cf5a5705c | |||
| d91e22c656 | |||
| 9bd8b25a45 | |||
| d173ed2a64 | |||
| 873034b8b8 | |||
| 3ff8eeaf24 | |||
| 8c0a2ffaa7 | |||
| 16a9e00e48 | |||
| dd997824e4 | |||
| 291d2b9e73 | |||
| b951b1e152 | |||
| b3fb8286d5 | |||
| b0dc464796 | |||
| dbb9f35d9b | |||
| 900d46fc3a | |||
| 4659519791 | |||
| 55c3eb4e25 | |||
| 9f2874f86c | |||
| f563d7253b | |||
| 3c40720a95 | |||
| 4d5b21dfce | |||
| b73ffdee50 | |||
| 177047127c | |||
| 9a47648e4a | |||
| 10e270b8d6 | |||
| 4b4098f782 | |||
| 457b5ef6bd | |||
| e49375419d | |||
| f475996a6b | |||
| 78c7d580fc | |||
| 640b6d8059 | |||
| 0647451a02 | |||
| 3b93507770 | |||
| 23619f3175 | |||
| b5d20373b0 | |||
| a153905e15 | |||
| b4221e01e8 | |||
| 5b7bf62826 | |||
| 26c70cc8fa | |||
| fe06448d38 | |||
| 883b24e0d7 | |||
| 8dea7e304f | |||
| 17ec91acce | |||
| 03e1f61a9f | |||
| 1a2dab89c9 | |||
| 0abbf06b9c | |||
| e7ecae6295 | |||
| 8204a40667 | |||
| fd2f920745 | |||
| c899c0749c | |||
| 8a0de50793 | |||
| e04554e0db | |||
| d41ee79606 | |||
| b3d2b68d56 | |||
| a04652af75 | |||
| c3eecdb880 | |||
| c4e2ac66b5 | |||
| 3939668e0d | |||
| 831e0a3897 | |||
| f815b367f3 | |||
| a997b16c94 | |||
| 0553bf72fe | |||
| 83770824a5 | |||
| c5c9b91374 | |||
| bbaf52b8b7 | |||
| cec59ccaa3 | |||
| 0e19353f8f | |||
| e4c5009d0a | |||
| 34ed30ce55 | |||
| c7ddd79978 | |||
| 767e3d568c | |||
| a72950b546 | |||
| 8b1e754ded | |||
| 7c5bd1d9cc | |||
| 5a749ee039 | |||
| 6b012414c8 | |||
| 5b91de1d13 | |||
| 2141dc1eba | |||
| 3e203f992e | |||
| e5e960ac3f | |||
| 89bc27b1f8 | |||
| 5152802c6b | |||
| 1b7798e2fc | |||
| 7a779555ac | |||
| 647e8f5698 | |||
| c2818b4c97 | |||
| 8f4ad8e72f | |||
| 8455f39809 | |||
| 3783b49162 | |||
| b31c404d2b | |||
| 1a7e8c94cb | |||
| 0d6d1fd4b6 | |||
| 8f576c0a94 | |||
| 76354a4c8d | |||
| b780006dfb | |||
| 5a1c0a2bd6 | |||
| 86ba377bd8 | |||
| 5c451610ba | |||
| 7b6a066b91 | |||
| 95352a0d18 | |||
| d19c38ef46 | |||
| fa598c61ea | |||
| 97c6b5459a | |||
| 5ffdf0c055 | |||
| c898055cc6 | |||
| 8f2bc70387 | |||
| 1636fd7063 | |||
| 441dd557eb | |||
| 136f9bf935 | |||
| 8706991888 | |||
| 6fa8f24ba5 | |||
| 3a93d10d4f | |||
| c58857d1e1 | |||
| 243796ede6 | |||
| 9870c00c91 | |||
| 484698e565 | |||
| 9392d4a2d9 | |||
| 09a9b6e18f | |||
| 31a7b62ca9 | |||
| 67e154ab83 | |||
| 677bf92565 | |||
| 70d1db41b1 | |||
| 0a2386f179 | |||
| d17077aade | |||
| bd474c1d59 | |||
| 120e632bcb | |||
| 7789590796 | |||
| 2af8e19d4f | |||
| 0866321ddc | |||
| eefbdb59bb | |||
| 542e9c778a | |||
| 4d52cdbf18 | |||
| 39be4a2886 | |||
| e23cab50ed | |||
| d6b2811457 | |||
| f3b46856ee | |||
| dd2bfe8fe3 | |||
| 3d07cec143 | |||
| e1a49c8727 | |||
| ba2fa3caf5 | |||
| 7669f85984 | |||
| c136360060 | |||
| 551711fb34 | |||
| e40a798974 | |||
| ab110f9f04 | |||
| 762d11b720 | |||
| 18a9bcc258 | |||
| 31d7c4a718 | |||
| 137b6bce7d | |||
| ced9a7b024 | |||
| 0e6a3e457d | |||
| 88833941b5 | |||
| e2c1bc3516 | |||
| 2807ed5c21 | |||
| 0a16a0bd9d | |||
| 76fe0c0379 | |||
| 7889162420 | |||
| f52eab842f | |||
| 8b00c441f5 | |||
| d4ec144434 | |||
| f378406893 | |||
| 90ae23fc05 | |||
| 8d5fa586ca | |||
| 7e5201edc6 | |||
| 7e599afd40 | |||
| 66834caffb | |||
| 7bbc413902 | |||
| c6647907e5 | |||
| 5b53264316 | |||
| e39037b333 | |||
| 0a5e779dfd | |||
| f573838e89 | |||
| 061738d56d | |||
| b2104e191b | |||
| 3fb310ed97 | |||
| f5c1c6a90e | |||
| c08d06d81f | |||
| 78699b934d | |||
| 28914916b6 | |||
| 179ba32c5a | |||
| 1968c099fe | |||
| d9953d8bbe | |||
| 6e5b893646 | |||
| 993005761c | |||
| 43b3d712a7 | |||
| ac50d28d36 | |||
| 7daef9f253 | |||
| 5e4ab5006f | |||
| 79ef45a99f | |||
| ed43ab4bcf | |||
| 4bbbdba571 | |||
| 6ccd16a508 | |||
| d25a2154af | |||
| e842ae1ca3 | |||
| 905d9af978 | |||
| 9806212395 | |||
| 76854a84d7 | |||
| 870dfb5129 | |||
| bba585acf1 | |||
| 305cb8a392 | |||
| 90141c4263 | |||
| deead77c08 | |||
| 4d3821f572 | |||
| ff739b9e24 | |||
| d455b9aee7 | |||
| 4138dd0671 | |||
| 400fd8430e | |||
| 1370524479 | |||
| 6025f0b1dc | |||
| 68621a4a92 | |||
| bc154c9016 | |||
| b1e518b3aa | |||
| a7ec46cb70 | |||
| 9df466de2f | |||
| ca9fd8ec55 | |||
| 64419ffb7c | |||
| 5b129fee8f | |||
| a32bee3c8c | |||
| 02fe1f9750 | |||
| 544cf0522a | |||
| 810f4a8af8 | |||
| a7152b591d | |||
| 19c3f02c2d | |||
| cc3203a31f | |||
| 4933515be6 | |||
| bab1736636 | |||
| b982a0a3ca | |||
| d24c992482 | |||
| eec9760b6c | |||
| 26332d86e7 | |||
| 1b82113f11 | |||
| 87fd355985 | |||
| 2a0001d994 | |||
| 8e147986ae | |||
| afe9190c97 | |||
| 0cf76e8c6b | |||
| 53e03d5462 | |||
| 8379f7b154 | |||
| ebfdc6c34c | |||
| ae88d279cb | |||
| e70dfec759 | |||
| 4dece336f2 | |||
| 9d235ff10a | |||
| b4f719d7bb | |||
| 6123c03ef4 | |||
| 579389589b | |||
| 0bbb0a5173 | |||
| 6ee06f0879 | |||
| 962263beb0 | |||
| 08e545f03a | |||
| c2c70e6a95 | |||
| f76fd8ab65 | |||
| 4075f43c5f | |||
| 7a9bcd02f7 | |||
| 959c358c09 | |||
| 7564ce4d27 | |||
| 5fa4e3f72a | |||
| cf566d4bc7 | |||
| 08577deeb5 | |||
| 2e26532837 | |||
| 9b0da4ef2f | |||
| 51c9fe8839 | |||
| 5f2791816d | |||
| 89e38397b9 | |||
| c82ed82aba | |||
| 1bae6c6cd1 | |||
| 57fb3a0078 | |||
| f0c5168067 | |||
| df0ea7ab30 | |||
| 786261fcfd | |||
| 751f677c44 | |||
| bbb769faab | |||
| 76f7b10777 | |||
| d6dcd7a39a | |||
| e3df02076c | |||
| bf09ee0054 | |||
| 4fa767585c | |||
| 5204e3cc68 | |||
| 29eae4e9b2 | |||
| 3032386852 | |||
| 07e702c458 | |||
| 2b5d3ecb01 | |||
| 6ffcae16a2 | |||
| ec4feaee8b | |||
| 6a6ec66a88 | |||
| 0135d47536 | |||
| 2f7354d556 | |||
| c1bf6e94aa | |||
| 8e9ae0dd92 | |||
| 9e9122da81 | |||
| 8cf85ca5ef | |||
| bb74b95aaf | |||
| 1ca7fd5a2c | |||
| 57b68fc7db | |||
| f9d8a2bea3 | |||
| 1d7603a7fe | |||
| b79523e4e4 | |||
| 226b13b136 | |||
| 67b366a2d0 | |||
| e983ac2f00 | |||
| a319863654 | |||
| 9409e27e1d | |||
| 833aeaeeba | |||
| 9a3651ed8b | |||
| b5540fd543 | |||
| 2039501672 | |||
| 1cfcbccf76 | |||
| 083de7fdd5 | |||
| 29840aabff | |||
| 5770a9b2e7 | |||
| 24079ab5fe | |||
| af22823e7a | |||
| 25e5f5ccde | |||
| 543d30c84a | |||
| 2aee915abc | |||
| b206d08b39 | |||
| d7bfe83039 | |||
| db5be84d77 | |||
| 17347850b3 | |||
| 93172f3b70 | |||
| a2356d9559 | |||
| 14222dd3fc | |||
| b53f09bb8f | |||
| 5ea359561a | |||
| 7149b5d379 | |||
| 0c97c11d82 | |||
| ae6e8205a1 | |||
| 7d0155fea8 | |||
| f12f788248 | |||
| 51d8a50d90 | |||
| 15156aa79c | |||
| ea0a5b153f | |||
| 6cf7bbb7d9 | |||
| c2292038c4 | |||
| d19e60976e | |||
| 38744a1781 | |||
| 5f1b01bea7 | |||
| dbec098bba | |||
| 468634527f | |||
| a9e687c7ce | |||
| 4b4ab973f8 | |||
| 6c962c0132 | |||
| 8d901efdfc | |||
| 823ef5b7e7 | |||
| 501692586d | |||
| fc427993e7 | |||
| c89e8cea90 | |||
| c29548db01 | |||
| a7966e714d | |||
| 19844cd94a | |||
| 3d12397d9a | |||
| 576e4a20f0 | |||
| 6e3279e585 | |||
| fdc656dbba | |||
| 7a95c6478c | |||
| 40fb066e2b | |||
| d8cdf7bc5a | |||
| e1d7b35015 | |||
| 40aeee9bf9 | |||
| 1bb950ec45 | |||
| c7df859d80 | |||
| 955baeb238 | |||
| 9ae9ca9ec3 | |||
| 06f9de360a | |||
| d06526846f | |||
| e0c87b167b | |||
| 63ff01700f | |||
| 85185c23b4 | |||
| da28aa7b22 | |||
| 38130c876b | |||
| 31c77fd89c | |||
| 2b010384f1 | |||
| 9a4916cc58 | |||
| ec3f513308 | |||
| ab8d527163 | |||
| 3bdf73ccad | |||
| 7fb42c926f | |||
| 06861144c3 | |||
| 71c3fb12ab | |||
| 4e9c766493 | |||
| 2f098b7333 | |||
| b1d10a41d5 | |||
| 54717a436c | |||
| c98f386c13 | |||
| 73812b41b1 | |||
| ddb90b3a39 | |||
| 3118b717a0 | |||
| e95cff5818 | |||
| 8a95b0609d | |||
| 81fba01d1e | |||
| 731095e2d9 | |||
| 6586c00a37 | |||
| 0ab0750511 | |||
| aeaf7cf57f | |||
| a950cc60d9 | |||
| cc06e2e1f4 | |||
| 95190324fb | |||
| c23e93d7a7 | |||
| 61c0f91088 | |||
| 6c30a9ffcb | |||
| 95878e630d | |||
| 6a5513e52b | |||
| dc83ae1bc6 | |||
| 9fd86dc0c4 | |||
| ae36f1a9ce | |||
| a976497b3a | |||
| 3fb99eb4be | |||
| 3a3e5765f9 | |||
| d05b6b9b5f | |||
| 096c3be4e5 | |||
| b8b2d52866 | |||
| c099bf8071 | |||
| 89afdded38 | |||
| 91d8d33e95 | |||
| f4afa0c672 | |||
| 1c07dc5a94 | |||
| 54c2e3c9c3 | |||
| 10352f2ccb | |||
| 0e2ccee678 | |||
| 79b2280cde | |||
| 60fcb831d1 | |||
| 441049b0f1 | |||
| c4f990f002 | |||
| 875145fc38 | |||
| a357f4cf5b | |||
| 06b81ea825 | |||
| 28d24e48de | |||
| 7c8253dcfc | |||
| c3eed61080 | |||
| ea77d4f3e4 | |||
| f757d7632c | |||
| 7b462a883b | |||
| f39e2c9bb5 | |||
| 3003b3c3c3 | |||
| a0243c030e | |||
| 859a1dd4a8 | |||
| e9e0050162 | |||
| 59599ecdbf | |||
| afa3727757 | |||
| 062ab3e5f8 | |||
| 43634fb475 | |||
| 49ea30cf06 | |||
| 4a0496b3b4 | |||
| e080610010 | |||
| 6d5c6c0d0b | |||
| 8073aca5d2 | |||
| d275211aa7 | |||
| 0084412258 | |||
| 3d1dbf2e0d | |||
| 2681127037 | |||
| 53865e64d8 | |||
| bb4df7d07f | |||
| cf89b1b67f | |||
| e39760590f | |||
| 798ffc248e | |||
| db1c1fec6c | |||
| 6e7f0d39cc | |||
| 1f0a4806a2 | |||
| 11cf61b0a3 | |||
| bac65b5897 | |||
| 98594a07e2 | |||
| 1825ed55fc | |||
| 6d25f70491 | |||
| f1682367ee | |||
| aef0de715d | |||
| ce508b0eb2 | |||
| 7fab42fa9e | |||
| 5a678dcb0e | |||
| 288ce80e07 | |||
| 3ad75d4439 | |||
| 4f1d808720 | |||
| 622dfc34e3 | |||
| d78345e335 | |||
| 1c3d2f7b73 | |||
| 2446758e6e | |||
| 95ea87233f | |||
| 0d105d40da | |||
| c479345b6c | |||
| 1e3a684c4b | |||
| a5bce14226 | |||
| 9c3a40dd98 | |||
| 8af4fc5b90 | |||
| 1502ffb0c4 | |||
| 27cc23fe3b | |||
| fedcc6d201 | |||
| 61722fd6c0 | |||
| 27a14d0a0f | |||
| 072a04487d | |||
| 2daad646e3 | |||
| a5d0ad3e4f | |||
| f842dab12a | |||
| 173ee2348f | |||
| a5341c4eb2 | |||
| 18517f5ec7 | |||
| c602587990 | |||
| c5357cab3d | |||
| a784ca8641 | |||
| d19b1e79ab | |||
| 157f1fcf27 | |||
| bb66c09498 | |||
| e632a0d918 | |||
| fd4a3bc015 | |||
| 8ac3791fcf | |||
| b59ab47bc2 | |||
| 65cdeb4994 | |||
| 01afb09a6e | |||
| b34ad3a82b | |||
| ba5987b674 | |||
| d79ffd8354 | |||
| 8d8bc47e79 | |||
| 650cee4b0e | |||
| 7769fa868e | |||
| 1fcb2c5203 | |||
| 960958c922 | |||
| e37165f136 | |||
| df52ee9251 | |||
| 4f9468ec66 | |||
| 0201281e38 | |||
| b9f9cba672 | |||
| 3351d949d9 | |||
| 4d49eefccb | |||
| 79cf342d33 | |||
| a4957df557 | |||
| 551e849a0f | |||
| d903f883eb | |||
| ba4851fe80 | |||
| 1678dedf08 | |||
| b94ea8eb22 | |||
| 54566ef092 | |||
| c4eb14148a | |||
| d21278c5de | |||
| d49d2eecee | |||
| 05c687b253 | |||
| 919e02cee9 | |||
| 6cfb6f3373 | |||
| 8ce5db9175 | |||
| 219cc2870a | |||
| 1a044759c6 | |||
| a6a0db3c8f | |||
| 0c7fafd07f | |||
| 999d3fd02b | |||
| abb32e6b5d | |||
| fe9589039e | |||
| ea2f4526e8 | |||
| 9331d7e1db | |||
| d1ca37f00e | |||
| 4116b32c33 | |||
| 3353b4e1fc | |||
| 12499f423d | |||
| afd2b7cc18 | |||
| d2fade5a3d | |||
| eb22047c08 | |||
| f4fe681524 | |||
| 3df9b1a3d6 | |||
| 8db340fd62 | |||
| c5b08ce80f | |||
| f3bd9024cf | |||
| 21d69994e7 | |||
| d603bef64e | |||
| d31777dc8d | |||
| 49c0c0fe3a | |||
| 486a50f876 | |||
| 219b8b792e | |||
| 5b97cd83dd | |||
| 4435836313 | |||
| f5441e4c6b | |||
| f73f1b50fe | |||
| 1ddd4da27b | |||
| 2623af92c4 | |||
| 52600df2f2 | |||
| f2a43558f9 | |||
| e3a9498656 | |||
| a3c7f165b9 | |||
| ace2325773 | |||
| 6d2484376a | |||
| 13c5d14a9f | |||
| dac3e6e288 | |||
| e7444d20a4 | |||
| ecd187e88e | |||
| 09a16fed3b | |||
| 9274600f1e | |||
| 6cdf883ad7 | |||
| b71bb05fcd | |||
| 4652d83b49 | |||
| 8dbad16ece | |||
| 60d24dc48e | |||
| dccd465354 | |||
| 2e62f3c7d4 | |||
| 2b97c8f783 | |||
| 5ee26af02a | |||
| 42b978197c | |||
| 90ecdda407 | |||
| 1b6333fdef | |||
| 011e6777c9 | |||
| 19bbb00dcc | |||
| 151cba336b | |||
| d8ab17217f | |||
| f560cce348 | |||
| 407e97f91c | |||
| e532b15c1d | |||
| a836dd2619 | |||
| b541e258ec | |||
| 4677ac8b2c | |||
| 30760654b7 | |||
| eb74940ca8 | |||
| 5475a0b853 | |||
| 492f2b00b0 | |||
| 8eb248bb13 | |||
| 3b5ead22f2 | |||
| 36bb964845 | |||
| f04aa7d067 | |||
| 1d3b8d44b4 | |||
| cfc7a4b9f6 | |||
| 7730b2535f | |||
| 321f912083 | |||
| 58c9dd7d09 | |||
| bbdd2cc58f | |||
| 1ffc971285 | |||
| c401af7bfc | |||
| f3235f8b76 | |||
| 8b6a8be2c8 | |||
| eb07a36eaf | |||
| 54a069df84 | |||
| 2bebc781a1 | |||
| 8c91d2f47d | |||
| 555cb63be3 | |||
| 8c56a45e7c | |||
| 700512c008 | |||
| 26eff0172b | |||
| aa164e9d81 | |||
| cb9bfbf5d2 | |||
| d3042264b3 | |||
| 2598126879 | |||
| 29a7b984da | |||
| 7e92ee2084 | |||
| e6601e1bf7 | |||
| 2787b6c4ec | |||
| b6fc5f3d32 | |||
| fd189c68b3 | |||
| c7f0061605 | |||
| 005e1c132a | |||
| 9063e54e6c | |||
| 3e198fc410 | |||
| fc145648e6 | |||
| 6e1c83ec6e | |||
| d57e49b5b2 | |||
| aaabcafcfe | |||
| 74ce8894d2 | |||
| e64cb212c1 | |||
| 581f0259e2 | |||
| f99141b0cb | |||
| 89ef1afbe8 | |||
| 1203b51530 | |||
| 517d27458c | |||
| 7dbaea5d24 | |||
| 859d9a5b22 | |||
| beb83e93f4 | |||
| e97d60a1c4 | |||
| 3bfb082e83 | |||
| 5e6aa0a175 | |||
| 490fb61571 | |||
| c1f9a26f5b | |||
| 62320e9f7e | |||
| 526d0ad904 | |||
| 9166b6ed74 | |||
| 245486666c | |||
| 6ec90b4665 | |||
| 7c36fd5d2b | |||
| f590d0c12c | |||
| 219637e215 | |||
| 957f78b1a3 | |||
| ca103fde22 | |||
| a2f4591f72 | |||
| 07fc372b41 | |||
| feaf38aa39 | |||
| c01f0e1dff | |||
| 48f2407a06 | |||
| 2de4ad2a2f | |||
| baf58a5433 | |||
| 41fff1f3dd | |||
| bca1123d4d | |||
| 83e22f828c | |||
| c95d861516 | |||
| afa8ff4f66 | |||
| c25a4b47de | |||
| 78bc7323e9 | |||
| 210174b901 | |||
| c27a4f0ddc | |||
| 5740a6b0d6 | |||
| ff08e05485 | |||
| 0ce2ae4d1c | |||
| 5a4d874b9f | |||
| 41893b81e8 | |||
| 3ba1454f15 | |||
| 12fa55f537 | |||
| 6729698d7b | |||
| 00f5ffa1b6 | |||
| a6ca40becd | |||
| 5b23b2df8a | |||
| ceb20f8565 | |||
| 1365190330 | |||
| b907dcc993 | |||
| 2cb665e141 | |||
| 11756fa0bb | |||
| ee5b36a33c | |||
| 35b04d096e | |||
| f4c25ba163 | |||
| 1a5e6167fd | |||
| f874bdc55e | |||
| cef872d038 | |||
| fca95ce539 | |||
| 24c5ba9711 | |||
| a4783d2332 | |||
| 7908257d63 | |||
| f5f3adf7da | |||
| 63ef8c9fb1 | |||
| a8c9f8b899 | |||
| a68ef27284 | |||
| 35e6bbe1ac | |||
| cb854696c9 | |||
| 5ffe8230b1 | |||
| 8b2ec54676 | |||
| f36f10d903 | |||
| 69ce1d91f3 | |||
| 57bab53cec | |||
| b28b590b60 | |||
| 0ae1315f59 | |||
| 3ba9f9d5c7 | |||
| 8732641cfd | |||
| 1c8c36c82f | |||
| b6ffcb63bf | |||
| cb81a066db | |||
| 704ff8335d | |||
| c58f2716b0 | |||
| e4d3e33dec | |||
| 208c0ffa1f | |||
| 0f754694ed | |||
| c3b08c85ef | |||
| a6730b1b3a | |||
| c51f76ed83 | |||
| e432098f6a | |||
| 8e1cf1021d | |||
| 6789d4f627 | |||
| 24a97b9dbd | |||
| 897cdf9aee | |||
| 313d4bde30 | |||
| d1e853ef47 | |||
| b93ff5552b | |||
| a6c47b2654 | |||
| 87d67f5820 | |||
| 6d16f8f7ca | |||
| 09dbe5b14b | |||
| 70bdb50ae7 | |||
| 03fa36576b | |||
| 5fff060487 | |||
| c18ebeb992 | |||
| 36ef9f961d | |||
| f156b89f32 | |||
| efbb5b1b97 | |||
| 76f5ca3f45 | |||
| 1874a755a9 | |||
| b8d5ce2542 | |||
| 77fafc378e | |||
| af414396d6 | |||
| dfdf21459a | |||
| b73ac24a3b | |||
| 65f0c1e29f | |||
| 2743be281e | |||
| 8bafe9be98 | |||
| a0ef426cdb | |||
| 5dfaa893ad | |||
| 5523c7a484 | |||
| e2d7ab4416 | |||
| dbfe72071e | |||
| d970942f04 | |||
| e1eb85969a | |||
| 4c8a88d20c | |||
| b3490aa52d | |||
| 7325d934f9 | |||
| 83920e4d83 | |||
| e9255a2cb5 | |||
| abfa5b2757 | |||
| 5948db2555 | |||
| a61751e1fb | |||
| 422d2324e8 | |||
| 957002507c | |||
| 18a8fcbfb5 | |||
| 478f8b9ea1 | |||
| 974b91d939 | |||
| e46cbe5d41 | |||
| f26c9d1ff2 | |||
| c518fbc5af | |||
| 1b44bc2867 | |||
| 00ce42c1cf | |||
| c72efe1326 | |||
| 1e00e56522 | |||
| 1c19211c33 | |||
| 96b034d026 | |||
| ebfab4da45 | |||
| 8fd09bef92 | |||
| 521fd20e37 | |||
| 480b98a704 | |||
| 3158bfb746 | |||
| 857b4c5809 | |||
| 32e487eca6 | |||
| a63f04d194 | |||
| 80c19864ad | |||
| c0e35e964b | |||
| e1945feef5 | |||
| b191678570 | |||
| ab1212dd8a | |||
| bd81bd0b01 | |||
| 740a5a4fa8 | |||
| 7c7c0812e8 | |||
| 7a61bc47ea | |||
| 76688695b8 | |||
| 688a1795e7 | |||
| 026d959aeb | |||
| d333ebe88e | |||
| ddf88d5394 | |||
| 41e64a217c | |||
| 5961175193 | |||
| e1c9de14ca | |||
| 7b67eb52dc | |||
| 680477d2d5 | |||
| 6bc0884d43 | |||
| d2ee32d3e6 | |||
| 699c19245a | |||
| 7fb0f0734d | |||
| 2761fb5867 | |||
| 0c1601e1b0 | |||
| 08c56644eb | |||
| ca289afe68 | |||
| 717ef2fad9 | |||
| d5900b6b7a | |||
| 29e16b66b7 | |||
| 51cf09ece7 | |||
| dec7acaf14 | |||
| 9bc94f59b5 | |||
| 2215464cfa | |||
| b5cc66a576 | |||
| 1e8a5bea6e | |||
| 1ba37665b5 | |||
| 3f94955883 | |||
| 5652b9b66a | |||
| 07d9365747 | |||
| bd0a9332df | |||
| df79bdb711 | |||
| c0687615a4 | |||
| 2c67351424 | |||
| d524f94c47 | |||
| 3c62defdfd | |||
| a63275f913 | |||
| 55bbccb4ca | |||
| 70c629f455 | |||
| 6630877b02 | |||
| 99b87f808c | |||
| f78c719fd7 | |||
| fc60bde3ec | |||
| c7cb8ece2e | |||
| 685131a4c1 | |||
| c95e6e8097 | |||
| cc4a7f04da | |||
| 742ccb5f0b | |||
| 62de8095d8 | |||
| 84e0b4fe3f | |||
| f706bd814e | |||
| 97bb1a3ad1 | |||
| 788de853dc | |||
| 7672de2dd1 | |||
| 87d9804864 | |||
| f14bd984f8 | |||
| 0843a27966 | |||
| 33477f2b40 | |||
| b522899216 | |||
| c8c21fbe04 | |||
| 1694d01536 | |||
| 2b96086535 | |||
| 54ffab3e93 | |||
| 8f87e4f77a | |||
| 1641dfe5f1 | |||
| e7a0c57330 | |||
| 24029be7d3 | |||
| 850e87b97a | |||
| dfd8f4a453 | |||
| 8c7d4f8ebb | |||
| 423347dbe5 | |||
| a823a7b2d8 | |||
| 0874ca38f4 | |||
| c4da7f6fa7 | |||
| 9b96c4c745 | |||
| c1d14ea49d | |||
| c0e4bee26b | |||
| 1cbbdad1b3 | |||
| de298748e3 | |||
| 0762d53c18 | |||
| bf8bf7b3d2 | |||
| 6b94829315 | |||
| d88ae2f78c | |||
| a786f4813f | |||
| c51c2326fe | |||
| 5d383e0445 | |||
| 5cfc237448 | |||
| bf8d881770 | |||
| 246fb29193 | |||
| d3a7a9823a | |||
| 3b1fd56fe4 | |||
| a2bddaeb89 | |||
| 0cd527761e | |||
| 7e1c8797b8 | |||
| 2452fe5752 | |||
| d34c2c50d7 | |||
| d6a3006444 | |||
| 973f9bedd5 | |||
| 12293e4513 | |||
| 67136e9881 | |||
| eed031cf71 | |||
| 0151626c0b | |||
| 31461531fc | |||
| 152156227d | |||
| 7c0187c75b | |||
| 5f67af35f0 | |||
| c3642f1ae4 | |||
| 2f3827d60e | |||
| dfdbba6550 | |||
| 0a72e50c90 | |||
| 207513b8b5 | |||
| ed993761a1 | |||
| fcfa59cdf7 | |||
| eae919e5b2 | |||
| 6950fc1d1d | |||
| adbc8f7955 | |||
| da10605d75 | |||
| bc621efc36 | |||
| 9f4306e367 | |||
| 21e0700b8d | |||
| ad2d25ed00 | |||
| f161ed3cf4 | |||
| d8aad0ba2b | |||
| 7b8703dadc | |||
| 16d3ee9f92 | |||
| fd94909ee7 | |||
| c80db1a55f | |||
| c884156889 | |||
| 8dcae6437e | |||
| 904b6309cf | |||
| a67a97b9fc | |||
| e98c63f51c | |||
| 7f9f598f11 | |||
| 421b47c137 | |||
| 0d5297f08c | |||
| 9aad95bfd0 | |||
| 505eb6475e | |||
| 51dbf5e9e6 | |||
| 98e7cf6d1b | |||
| d6eadeffd6 | |||
| cf6ba7568f | |||
| d4cf76da40 | |||
| f10db6d73d | |||
| 18e1775cff | |||
| 85dba41601 | |||
| 9c05cb6af0 | |||
| 017f916ef9 | |||
| a85f859f88 | |||
| 81b5198cd1 | |||
| 9db0b2a191 | |||
| a8d4b05617 | |||
| 3390f17aa4 | |||
| 29879fdde2 | |||
| 7ddf7fe373 | |||
| 1effc10e3f | |||
| 85487efee3 | |||
| db3d1ff4ef | |||
| 1085a14b92 | |||
| 0295fce110 | |||
| 724989f985 | |||
| 70dd61b2ba | |||
| 33a485d207 | |||
| af136d3432 | |||
| d323ff2700 | |||
| 47db19f1a2 | |||
| 9d6552ea22 | |||
| 2f6017d6d3 | |||
| ab79a43afa | |||
| ad272fab3b | |||
| 327b1923b0 | |||
| 54136721b8 | |||
| 5a191ee554 | |||
| 7779544c0c | |||
| 28d1092956 | |||
| 5fc5c1285a | |||
| e1aabc84e8 | |||
| 8b76cad15a | |||
| 1631111a7f | |||
| 335ffb360d | |||
| 6d5cee7f6a | |||
| 6b64149131 | |||
| 2c5ebde308 | |||
| 69340db438 | |||
| 1ddb8248df | |||
| 0dc0e27519 | |||
| 3c6cae20ed | |||
| 83d058221d | |||
| 38d0e855c9 | |||
| cc9f425fb5 | |||
| 8c721e905a | |||
| 8e2990ca7a | |||
| 220e1327d7 | |||
| 0ea8fadaeb | |||
| f3d5a389eb | |||
| 2506517e17 | |||
| ac128705f6 | |||
| d3ac321d38 | |||
| a5dc78b04d | |||
| a2e30b29ef | |||
| f53b551d5a | |||
| 3a04a8490e | |||
| 2c89cdc1b9 | |||
| 101910a957 | |||
| eb7a1efea5 | |||
| a4f676fd32 | |||
| 1a8bc17f80 | |||
| c9729c046c | |||
| 34064972e9 | |||
| 5a7eda0968 | |||
| 98bc6007a5 | |||
| 33a08bed4f | |||
| 98412c140d | |||
| 1cf02afec9 | |||
| 9380f751b2 | |||
| e209a9bb37 | |||
| 2dc8faee35 | |||
| c29c12ea76 | |||
| bbb821e2f9 | |||
| 069a1d57c9 | |||
| df86103407 | |||
| d48163f167 | |||
| f23ea0ce4b | |||
| d08a6aa204 | |||
| 93397a774f | |||
| f2a76da7e2 | |||
| 3d51588cc1 | |||
| c44dea4d2b | |||
| 24b5018c8d | |||
| d3c1736119 | |||
| da8e22c856 | |||
| 741754e676 | |||
| c2ae586e5b | |||
| a4f84f0812 | |||
| 9e9c24ad44 | |||
| b1a5251768 | |||
| 0b7a9cf03e | |||
| eec0426612 | |||
| 65936bdce5 | |||
| f949d3732e | |||
| 9269d1ec52 | |||
| 1baf528d0a | |||
| 99ff7c741b | |||
| 12aa6ced86 | |||
| ee5a2c9ef8 | |||
| 126aaa0633 | |||
| 68409c70fd | |||
| 33026ca9ea | |||
| 7df2d44748 | |||
| b709dd2f1b | |||
| 51298786c3 | |||
| 1375b34cbe | |||
| 2c74a7317d | |||
| c8430d1ff7 | |||
| 3b9a2b990d | |||
| 2c509d085e | |||
| bed663a031 | |||
| e101f4ed2f | |||
| 0dfe9d94b2 | |||
| 0efdfe66d5 |
+15
-20
@@ -1,8 +1,16 @@
|
||||
# use glob syntax
|
||||
syntax: glob
|
||||
# Virtual environments
|
||||
.venv
|
||||
|
||||
__pycache__/
|
||||
*.py[oc]
|
||||
build/
|
||||
dist/
|
||||
wheels/
|
||||
*.egg-info
|
||||
|
||||
*.orig
|
||||
*.pyc
|
||||
*.sql
|
||||
*.sqlite
|
||||
*.prof
|
||||
@@ -15,6 +23,8 @@ _1623.err
|
||||
_1623.pos
|
||||
_1623.svx
|
||||
_16230.svx
|
||||
_troggle_import_root.svx
|
||||
troggle_import_root.log
|
||||
cave-lookup.json
|
||||
core/migrations/*
|
||||
db*
|
||||
@@ -28,7 +38,6 @@ loadlogbk.log
|
||||
loadsurvexblks.log
|
||||
logbktrips.shelve
|
||||
memdump.sql
|
||||
my_project.dot
|
||||
parsing_log.txt
|
||||
svxblks.log
|
||||
svxlinear.log
|
||||
@@ -40,35 +49,21 @@ troggle.sqlite
|
||||
troggle.sqlite-journal
|
||||
troggle_log.txt
|
||||
tunnel-import.log
|
||||
logbktrips.shelve.db
|
||||
|
||||
credentials.py
|
||||
secret_credentials.py
|
||||
localsettings.py
|
||||
localsettings-expo-live.py
|
||||
_deploy/old/localsettings-expo-live.py
|
||||
_deploy/old/localsettings.py
|
||||
debian/localsettings.py
|
||||
debian/credentials.py
|
||||
wsl/localsettings.py
|
||||
wsl/credentials.py
|
||||
|
||||
media/jslib/*
|
||||
!media/jslib/readme.txt
|
||||
|
||||
_test_response.html
|
||||
_deploy/wsl/localsettingsWSL.py.bak
|
||||
therionrefs.log
|
||||
_1623-and-1626.svx
|
||||
_1623-and-1626-no-schoenberg-hs.svx
|
||||
localsettings-oldMuscogee.py
|
||||
troggle.sqlite-journal - Shortcut.lnk
|
||||
troggle.sqlite - Shortcut.lnk
|
||||
|
||||
_deploy/debian/localsettings-jan.py
|
||||
_deploy/debian/localsettings-nw.py
|
||||
py310d32
|
||||
_deploy/debian/localsettingsserver2023-01-secret.py
|
||||
_deploy/debian/localsettings2023-04-05-secret.py
|
||||
pydebianbullseye
|
||||
|
||||
javascript
|
||||
|
||||
|
||||
mvscript.sh
|
||||
|
||||
@@ -0,0 +1,807 @@
|
||||
cd /home/philip/expo/expoweb/1623
|
||||
mkdir -p 2023-ASH-06/i
|
||||
mkdir -p 2023-ASH-06/l
|
||||
mkdir -p 2023-ASH-06/t
|
||||
mkdir -p 307/i
|
||||
mkdir -p 307/l
|
||||
mkdir -p 307/t
|
||||
mkdir -p 2023-ASH-13/i
|
||||
mkdir -p 2023-ASH-13/l
|
||||
mkdir -p 2023-ASH-13/t
|
||||
mkdir -p 2018-pf-03/i
|
||||
mkdir -p 2018-pf-03/l
|
||||
mkdir -p 2018-pf-03/t
|
||||
mkdir -p 2023-ASH-07/i
|
||||
mkdir -p 2023-ASH-07/l
|
||||
mkdir -p 2023-ASH-07/t
|
||||
mkdir -p 2013-BL-01/i
|
||||
mkdir -p 2013-BL-01/l
|
||||
mkdir -p 2013-BL-01/t
|
||||
mkdir -p 2023-ASH-03/i
|
||||
mkdir -p 2023-ASH-03/l
|
||||
mkdir -p 2023-ASH-03/t
|
||||
mkdir -p 2017-AA-01/i
|
||||
mkdir -p 2017-AA-01/l
|
||||
mkdir -p 2017-AA-01/t
|
||||
mkdir -p 2023-ASH-02/i
|
||||
mkdir -p 2023-ASH-02/l
|
||||
mkdir -p 2023-ASH-02/t
|
||||
mkdir -p 306/i
|
||||
mkdir -p 306/l
|
||||
mkdir -p 306/t
|
||||
mkdir -p 2012-sw-01/i
|
||||
mkdir -p 2012-sw-01/l
|
||||
mkdir -p 2012-sw-01/t
|
||||
mkdir -p 315/i
|
||||
mkdir -p 315/l
|
||||
mkdir -p 315/t
|
||||
mkdir -p 2018-NTU-01/i
|
||||
mkdir -p 2018-NTU-01/l
|
||||
mkdir -p 2018-NTU-01/t
|
||||
mkdir -p 303/i
|
||||
mkdir -p 303/l
|
||||
mkdir -p 303/t
|
||||
mkdir -p 2023-ASH-12/i
|
||||
mkdir -p 2023-ASH-12/l
|
||||
mkdir -p 2023-ASH-12/t
|
||||
mkdir -p 2023-ASH-11/i
|
||||
mkdir -p 2023-ASH-11/l
|
||||
mkdir -p 2023-ASH-11/t
|
||||
mkdir -p 311/i
|
||||
mkdir -p 311/l
|
||||
mkdir -p 311/t
|
||||
mkdir -p 2017-AMS-02/i
|
||||
mkdir -p 2017-AMS-02/l
|
||||
mkdir -p 2017-AMS-02/t
|
||||
mkdir -p 2013-06/i
|
||||
mkdir -p 2013-06/l
|
||||
mkdir -p 2013-06/t
|
||||
mkdir -p 2023-ASH-01/i
|
||||
mkdir -p 2023-ASH-01/l
|
||||
mkdir -p 2023-ASH-01/t
|
||||
mkdir -p 2017-NR-01/i
|
||||
mkdir -p 2017-NR-01/l
|
||||
mkdir -p 2017-NR-01/t
|
||||
mkdir -p 308/i
|
||||
mkdir -p 308/l
|
||||
mkdir -p 308/t
|
||||
mkdir -p 2012-sw-03/i
|
||||
mkdir -p 2012-sw-03/l
|
||||
mkdir -p 2012-sw-03/t
|
||||
mkdir -p 2023-ASH-04/i
|
||||
mkdir -p 2023-ASH-04/l
|
||||
mkdir -p 2023-ASH-04/t
|
||||
mkdir -p 2023-BL-11/i
|
||||
mkdir -p 2023-BL-11/l
|
||||
mkdir -p 2023-BL-11/t
|
||||
mkdir -p 2023-ASH-14/i
|
||||
mkdir -p 2023-ASH-14/l
|
||||
mkdir -p 2023-ASH-14/t
|
||||
mkdir -p 309/i
|
||||
mkdir -p 309/l
|
||||
mkdir -p 309/t
|
||||
mkdir -p 2023-ASH-10/i
|
||||
mkdir -p 2023-ASH-10/l
|
||||
mkdir -p 2023-ASH-10/t
|
||||
cd /home/philip/expo/expoweb/1623
|
||||
mv l/"2012-sw-01_i1.html" 2012-sw-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2012-sw-01/i/|g' 2012-sw-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2012-sw-01/i/|g" 2012-sw-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2012-sw-01/i/|g' ../entrance_data/'1623-2012-sw-01.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2012-sw-01/i/|g" ../entrance_data/'1623-2012-sw-01.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2012-sw-01/l/|g' ../entrance_data/'1623-2012-sw-01.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2012-sw-01/l/|g" ../entrance_data/'1623-2012-sw-01.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2012-sw-01/t/|g' ../entrance_data/'1623-2012-sw-01.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2012-sw-01/t/|g" ../entrance_data/'1623-2012-sw-01.html'
|
||||
mv t/"2012-sw-01_i1.jpg" 2012-sw-01/t
|
||||
mv i/"2012-sw-01_i1.jpg" 2012-sw-01/i
|
||||
mv l/"2012-sw-03_i2.html" 2012-sw-03/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2012-sw-03/i/|g' 2012-sw-03/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2012-sw-03/i/|g" 2012-sw-03/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2012-sw-03/i/|g' ../entrance_data/'1623-2012-sw-03.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2012-sw-03/i/|g" ../entrance_data/'1623-2012-sw-03.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2012-sw-03/l/|g' ../entrance_data/'1623-2012-sw-03.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2012-sw-03/l/|g" ../entrance_data/'1623-2012-sw-03.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2012-sw-03/t/|g' ../entrance_data/'1623-2012-sw-03.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2012-sw-03/t/|g" ../entrance_data/'1623-2012-sw-03.html'
|
||||
mv l/"2012-sw-03_i1.html" 2012-sw-03/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2012-sw-03/i/|g' 2012-sw-03/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2012-sw-03/i/|g" 2012-sw-03/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2012-sw-03/i/|g' ../entrance_data/'1623-2012-sw-03.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2012-sw-03/i/|g" ../entrance_data/'1623-2012-sw-03.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2012-sw-03/l/|g' ../entrance_data/'1623-2012-sw-03.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2012-sw-03/l/|g" ../entrance_data/'1623-2012-sw-03.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2012-sw-03/t/|g' ../entrance_data/'1623-2012-sw-03.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2012-sw-03/t/|g" ../entrance_data/'1623-2012-sw-03.html'
|
||||
mv t/"2012-sw-03_i1.jpg" 2012-sw-03/t
|
||||
mv i/"2012-sw-03_i1.jpg" 2012-sw-03/i
|
||||
mv t/"2012-sw-03_i2.jpg" 2012-sw-03/t
|
||||
mv i/"2012-sw-03_i2.jpg" 2012-sw-03/i
|
||||
mv i/"2013wallet23.jpg" 2013-06/i
|
||||
mv l/"2013wallet23.html" 2013-06/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2013-06/i/|g' 2013-06/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2013-06/i/|g" 2013-06/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2013-06/i/|g' ../cave_data/'1623-2013-06.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2013-06/i/|g" ../cave_data/'1623-2013-06.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2013-06/l/|g' ../cave_data/'1623-2013-06.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2013-06/l/|g" ../cave_data/'1623-2013-06.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2013-06/t/|g' ../cave_data/'1623-2013-06.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2013-06/t/|g" ../cave_data/'1623-2013-06.html'
|
||||
mv l/"2013-BL-01.html" 2013-BL-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2013-BL-01/i/|g' 2013-BL-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2013-BL-01/i/|g" 2013-BL-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2013-BL-01/i/|g' ../cave_data/'1623-2013-BL-01.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2013-BL-01/i/|g" ../cave_data/'1623-2013-BL-01.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2013-BL-01/l/|g' ../cave_data/'1623-2013-BL-01.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2013-BL-01/l/|g" ../cave_data/'1623-2013-BL-01.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2013-BL-01/t/|g' ../cave_data/'1623-2013-BL-01.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2013-BL-01/t/|g" ../cave_data/'1623-2013-BL-01.html'
|
||||
mv t/"2013-BL-01.jpg" 2013-BL-01/t
|
||||
mv i/"2013-BL-01.jpg" 2013-BL-01/i
|
||||
mv l/"aa-1-2017_with-tag.html" 2017-AA-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-AA-01/i/|g' 2017-AA-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-AA-01/i/|g" 2017-AA-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-AA-01/i/|g' ../entrance_data/'1623-2017-AA-01.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-AA-01/i/|g" ../entrance_data/'1623-2017-AA-01.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2017-AA-01/l/|g' ../entrance_data/'1623-2017-AA-01.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2017-AA-01/l/|g" ../entrance_data/'1623-2017-AA-01.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2017-AA-01/t/|g' ../entrance_data/'1623-2017-AA-01.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2017-AA-01/t/|g" ../entrance_data/'1623-2017-AA-01.html'
|
||||
mv t/"aa-1-2017_with-tag.jpg" 2017-AA-01/t
|
||||
mv i/"aa-1-2017_with-tag.jpg" 2017-AA-01/i
|
||||
mv l/"aa-1-2017_looking-down.html" 2017-AA-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-AA-01/i/|g' 2017-AA-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-AA-01/i/|g" 2017-AA-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-AA-01/i/|g' ../entrance_data/'1623-2017-AA-01.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-AA-01/i/|g" ../entrance_data/'1623-2017-AA-01.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2017-AA-01/l/|g' ../entrance_data/'1623-2017-AA-01.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2017-AA-01/l/|g" ../entrance_data/'1623-2017-AA-01.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2017-AA-01/t/|g' ../entrance_data/'1623-2017-AA-01.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2017-AA-01/t/|g" ../entrance_data/'1623-2017-AA-01.html'
|
||||
mv t/"aa-1-2017_looking-down.jpg" 2017-AA-01/t
|
||||
mv i/"aa-1-2017_looking-down.jpg" 2017-AA-01/i
|
||||
mv t/"ent081-20170807.jpg" 2017-AMS-02/t
|
||||
mv i/"ent081-20170807.jpg" 2017-AMS-02/i
|
||||
mv l/"near-ent-2017-ams-02.html" 2017-AMS-02/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-AMS-02/i/|g' 2017-AMS-02/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-AMS-02/i/|g" 2017-AMS-02/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-AMS-02/i/|g' ../entrance_data/'1623-2017-AMS-02.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-AMS-02/i/|g" ../entrance_data/'1623-2017-AMS-02.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2017-AMS-02/l/|g' ../entrance_data/'1623-2017-AMS-02.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2017-AMS-02/l/|g" ../entrance_data/'1623-2017-AMS-02.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2017-AMS-02/t/|g' ../entrance_data/'1623-2017-AMS-02.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2017-AMS-02/t/|g" ../entrance_data/'1623-2017-AMS-02.html'
|
||||
mv t/"near-ent-2017-ams-02.jpg" 2017-AMS-02/t
|
||||
mv i/"near-ent-2017-ams-02.jpg" 2017-AMS-02/i
|
||||
mv l/"ent081-20170807.html" 2017-AMS-02/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-AMS-02/i/|g' 2017-AMS-02/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-AMS-02/i/|g" 2017-AMS-02/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-AMS-02/i/|g' ../entrance_data/'1623-2017-AMS-02.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-AMS-02/i/|g" ../entrance_data/'1623-2017-AMS-02.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2017-AMS-02/l/|g' ../entrance_data/'1623-2017-AMS-02.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2017-AMS-02/l/|g" ../entrance_data/'1623-2017-AMS-02.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2017-AMS-02/t/|g' ../entrance_data/'1623-2017-AMS-02.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2017-AMS-02/t/|g" ../entrance_data/'1623-2017-AMS-02.html'
|
||||
mv l/"2017-NR-01_03.html" 2017-NR-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-NR-01/i/|g' 2017-NR-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-NR-01/i/|g" 2017-NR-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-NR-01/i/|g' ../entrance_data/'1623-2017-NR-01.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-NR-01/i/|g" ../entrance_data/'1623-2017-NR-01.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2017-NR-01/l/|g' ../entrance_data/'1623-2017-NR-01.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2017-NR-01/l/|g" ../entrance_data/'1623-2017-NR-01.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2017-NR-01/t/|g' ../entrance_data/'1623-2017-NR-01.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2017-NR-01/t/|g" ../entrance_data/'1623-2017-NR-01.html'
|
||||
mv t/"2017-NR-01_03.jpg" 2017-NR-01/t
|
||||
mv i/"2017-NR-01_03.jpg" 2017-NR-01/i
|
||||
mv t/"2017-NR-01_04.jpg" 2017-NR-01/t
|
||||
mv i/"2017-NR-01_04.jpg" 2017-NR-01/i
|
||||
mv l/"2017-NR-01_04.html" 2017-NR-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-NR-01/i/|g' 2017-NR-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-NR-01/i/|g" 2017-NR-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2017-NR-01/i/|g' ../entrance_data/'1623-2017-NR-01.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2017-NR-01/i/|g" ../entrance_data/'1623-2017-NR-01.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2017-NR-01/l/|g' ../entrance_data/'1623-2017-NR-01.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2017-NR-01/l/|g" ../entrance_data/'1623-2017-NR-01.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2017-NR-01/t/|g' ../entrance_data/'1623-2017-NR-01.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2017-NR-01/t/|g" ../entrance_data/'1623-2017-NR-01.html'
|
||||
mv t/"2018-ntu-01_looking_down_shaft-and-rift.jpg" 2018-NTU-01/t
|
||||
mv i/"2018-ntu-01_looking_down_shaft-and-rift.jpg" 2018-NTU-01/i
|
||||
mv t/"2018-ntu-01_tag_arrows.jpg" 2018-NTU-01/t
|
||||
mv i/"2018-ntu-01_tag_arrows.jpg" 2018-NTU-01/i
|
||||
mv t/"2018-ntu-01_neil_view_west.jpg" 2018-NTU-01/t
|
||||
mv i/"2018-ntu-01_neil_view_west.jpg" 2018-NTU-01/i
|
||||
mv l/"2018-ntu-01_looking_down_shaft-and-rift.html" 2018-NTU-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' 2018-NTU-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" 2018-NTU-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2018-NTU-01/l/|g' ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2018-NTU-01/l/|g" ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2018-NTU-01/t/|g' ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2018-NTU-01/t/|g" ../entrance_data/'1623-2018-NTU-01.html'
|
||||
mv l/"2018-ntu-01_tag_arrows.html" 2018-NTU-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' 2018-NTU-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" 2018-NTU-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2018-NTU-01/l/|g' ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2018-NTU-01/l/|g" ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2018-NTU-01/t/|g' ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2018-NTU-01/t/|g" ../entrance_data/'1623-2018-NTU-01.html'
|
||||
mv l/"2018-ntu-01_neil_view_west.html" 2018-NTU-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' 2018-NTU-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" 2018-NTU-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2018-NTU-01/i/|g' ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2018-NTU-01/i/|g" ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2018-NTU-01/l/|g' ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2018-NTU-01/l/|g" ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2018-NTU-01/t/|g' ../entrance_data/'1623-2018-NTU-01.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2018-NTU-01/t/|g" ../entrance_data/'1623-2018-NTU-01.html'
|
||||
mv t/"2018-pf-03_and_pf-02_arrows.jpg" 2018-pf-03/t
|
||||
mv i/"2018-pf-03_and_pf-02_arrows.jpg" 2018-pf-03/i
|
||||
mv l/"2018-pf-03_and_pf-02_arrows.html" 2018-pf-03/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2018-pf-03/i/|g' 2018-pf-03/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2018-pf-03/i/|g" 2018-pf-03/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2018-pf-03/i/|g' ../entrance_data/'1623-2018-pf-03.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2018-pf-03/i/|g" ../entrance_data/'1623-2018-pf-03.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2018-pf-03/l/|g' ../entrance_data/'1623-2018-pf-03.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2018-pf-03/l/|g" ../entrance_data/'1623-2018-pf-03.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2018-pf-03/t/|g' ../entrance_data/'1623-2018-pf-03.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2018-pf-03/t/|g" ../entrance_data/'1623-2018-pf-03.html'
|
||||
mv l/"2023-ASH-01-entrance1.html" 2023-ASH-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-01/i/|g' 2023-ASH-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-01/i/|g" 2023-ASH-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-01/i/|g' ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-01/i/|g" ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-01/l/|g' ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-01/l/|g" ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-01/t/|g' ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-01/t/|g" ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
mv l/"2023-ASH-01-entrance2.html" 2023-ASH-01/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-01/i/|g' 2023-ASH-01/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-01/i/|g" 2023-ASH-01/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-01/i/|g' ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-01/i/|g" ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-01/l/|g' ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-01/l/|g" ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-01/t/|g' ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-01/t/|g" ../entrance_data/'1623-2023-ASH-01a.html'
|
||||
mv t/"2023-ASH-01-entrance2.jpg" 2023-ASH-01/t
|
||||
mv i/"2023-ASH-01-entrance2.jpg" 2023-ASH-01/i
|
||||
mv t/"2023-ASH-01-entrance1.jpg" 2023-ASH-01/t
|
||||
mv i/"2023-ASH-01-entrance1.jpg" 2023-ASH-01/i
|
||||
mv l/"2023-ASH-02-entrance1.html" 2023-ASH-02/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-02/i/|g' 2023-ASH-02/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-02/i/|g" 2023-ASH-02/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-02/i/|g' ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-02/i/|g" ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-02/l/|g' ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-02/l/|g" ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-02/t/|g' ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-02/t/|g" ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
mv t/"2023-ASH-02-entrance1.jpg" 2023-ASH-02/t
|
||||
mv i/"2023-ASH-02-entrance1.jpg" 2023-ASH-02/i
|
||||
mv l/"2023-ASH-02-entrance2.html" 2023-ASH-02/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-02/i/|g' 2023-ASH-02/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-02/i/|g" 2023-ASH-02/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-02/i/|g' ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-02/i/|g" ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-02/l/|g' ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-02/l/|g" ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-02/t/|g' ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-02/t/|g" ../entrance_data/'1623-2023-ASH-02a.html'
|
||||
mv t/"2023-ASH-02-entrance2.jpg" 2023-ASH-02/t
|
||||
mv i/"2023-ASH-02-entrance2.jpg" 2023-ASH-02/i
|
||||
mv l/"2023-ASH-03-entrance2.html" 2023-ASH-03/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-03/i/|g' 2023-ASH-03/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-03/i/|g" 2023-ASH-03/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-03/i/|g' ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-03/i/|g" ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-03/l/|g' ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-03/l/|g" ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-03/t/|g' ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-03/t/|g" ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
mv t/"2023-ASH-03-entrance2.jpg" 2023-ASH-03/t
|
||||
mv i/"2023-ASH-03-entrance2.jpg" 2023-ASH-03/i
|
||||
mv l/"2023-ASH-03-entrance1.html" 2023-ASH-03/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-03/i/|g' 2023-ASH-03/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-03/i/|g" 2023-ASH-03/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-03/i/|g' ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-03/i/|g" ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-03/l/|g' ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-03/l/|g" ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-03/t/|g' ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-03/t/|g" ../entrance_data/'1623-2023-ASH-03a.html'
|
||||
mv t/"2023-ASH-03-entrance1.jpg" 2023-ASH-03/t
|
||||
mv i/"2023-ASH-03-entrance1.jpg" 2023-ASH-03/i
|
||||
mv t/"2023-ASH-04-entrance2.jpg" 2023-ASH-04/t
|
||||
mv i/"2023-ASH-04-entrance2.jpg" 2023-ASH-04/i
|
||||
mv t/"2023-ASH-04-entrance1.jpg" 2023-ASH-04/t
|
||||
mv i/"2023-ASH-04-entrance1.jpg" 2023-ASH-04/i
|
||||
mv l/"2023-ASH-04-entrance1.html" 2023-ASH-04/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-04/i/|g' 2023-ASH-04/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-04/i/|g" 2023-ASH-04/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-04/i/|g' ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-04/i/|g" ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-04/l/|g' ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-04/l/|g" ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-04/t/|g' ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-04/t/|g" ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
mv l/"2023-ASH-04-entrance2.html" 2023-ASH-04/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-04/i/|g' 2023-ASH-04/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-04/i/|g" 2023-ASH-04/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-04/i/|g' ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-04/i/|g" ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-04/l/|g' ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-04/l/|g" ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-04/t/|g' ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-04/t/|g" ../entrance_data/'1623-2023-ASH-04a.html'
|
||||
mv t/"2023-ASH-06-entrance2.jpg" 2023-ASH-06/t
|
||||
mv i/"2023-ASH-06-entrance2.jpg" 2023-ASH-06/i
|
||||
mv l/"2023-ASH-06-entrance1.html" 2023-ASH-06/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-06/i/|g' 2023-ASH-06/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-06/i/|g" 2023-ASH-06/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-06/i/|g' ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-06/i/|g" ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-06/l/|g' ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-06/l/|g" ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-06/t/|g' ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-06/t/|g" ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
mv l/"2023-ASH-06-entrance2.html" 2023-ASH-06/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-06/i/|g' 2023-ASH-06/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-06/i/|g" 2023-ASH-06/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-06/i/|g' ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-06/i/|g" ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-06/l/|g' ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-06/l/|g" ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-06/t/|g' ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-06/t/|g" ../entrance_data/'1623-2023-ASH-06a.html'
|
||||
mv t/"2023-ASH-06-entrance1.jpg" 2023-ASH-06/t
|
||||
mv i/"2023-ASH-06-entrance1.jpg" 2023-ASH-06/i
|
||||
mv l/"2023-ASH-07-entrance1.html" 2023-ASH-07/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-07/i/|g' 2023-ASH-07/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-07/i/|g" 2023-ASH-07/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-07/i/|g' ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-07/i/|g" ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-07/l/|g' ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-07/l/|g" ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-07/t/|g' ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-07/t/|g" ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
mv t/"2023-ASH-07-entrance1.jpg" 2023-ASH-07/t
|
||||
mv i/"2023-ASH-07-entrance1.jpg" 2023-ASH-07/i
|
||||
mv t/"2023-ASH-07-entrance2.jpg" 2023-ASH-07/t
|
||||
mv i/"2023-ASH-07-entrance2.jpg" 2023-ASH-07/i
|
||||
mv l/"2023-ASH-07-entrance2.html" 2023-ASH-07/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-07/i/|g' 2023-ASH-07/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-07/i/|g" 2023-ASH-07/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-07/i/|g' ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-07/i/|g" ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-07/l/|g' ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-07/l/|g" ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-07/t/|g' ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-07/t/|g" ../entrance_data/'1623-2023-ASH-07a.html'
|
||||
mv l/"2023-ASH-10-entrance2.html" 2023-ASH-10/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-10/i/|g' 2023-ASH-10/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-10/i/|g" 2023-ASH-10/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-10/i/|g' ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-10/i/|g" ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-10/l/|g' ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-10/l/|g" ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-10/t/|g' ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-10/t/|g" ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
mv t/"2023-ASH-10-entrance1.jpg" 2023-ASH-10/t
|
||||
mv i/"2023-ASH-10-entrance1.jpg" 2023-ASH-10/i
|
||||
mv t/"2023-ASH-10-entrance2.jpg" 2023-ASH-10/t
|
||||
mv i/"2023-ASH-10-entrance2.jpg" 2023-ASH-10/i
|
||||
mv l/"2023-ASH-10-entrance1.html" 2023-ASH-10/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-10/i/|g' 2023-ASH-10/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-10/i/|g" 2023-ASH-10/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-10/i/|g' ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-10/i/|g" ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-10/l/|g' ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-10/l/|g" ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-10/t/|g' ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-10/t/|g" ../entrance_data/'1623-2023-ASH-10a.html'
|
||||
mv t/"2023-ASH-11-entrance2.jpg" 2023-ASH-11/t
|
||||
mv i/"2023-ASH-11-entrance2.jpg" 2023-ASH-11/i
|
||||
mv l/"2023-ASH-11-entrance2.html" 2023-ASH-11/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' 2023-ASH-11/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" 2023-ASH-11/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-11/l/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-11/l/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-11/t/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-11/t/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
mv l/"2023-ASH-11-entrance4.html" 2023-ASH-11/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' 2023-ASH-11/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" 2023-ASH-11/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-11/l/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-11/l/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-11/t/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-11/t/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
mv l/"2023-ASH-11-entrance3.html" 2023-ASH-11/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' 2023-ASH-11/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" 2023-ASH-11/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-11/l/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-11/l/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-11/t/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-11/t/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
mv l/"2023-ASH-11-entrance1.html" 2023-ASH-11/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' 2023-ASH-11/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" 2023-ASH-11/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-11/i/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-11/i/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-11/l/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-11/l/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-11/t/|g' ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-11/t/|g" ../entrance_data/'1623-2023-ASH-11a.html'
|
||||
mv t/"2023-ASH-11-entrance1.jpg" 2023-ASH-11/t
|
||||
mv i/"2023-ASH-11-entrance1.jpg" 2023-ASH-11/i
|
||||
mv t/"2023-ASH-11-entrance3.jpg" 2023-ASH-11/t
|
||||
mv i/"2023-ASH-11-entrance3.jpg" 2023-ASH-11/i
|
||||
mv t/"2023-ASH-11-entrance4.jpg" 2023-ASH-11/t
|
||||
mv i/"2023-ASH-11-entrance4.jpg" 2023-ASH-11/i
|
||||
mv l/"2023-ASH-12-entrance1.html" 2023-ASH-12/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-12/i/|g' 2023-ASH-12/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-12/i/|g" 2023-ASH-12/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-12/i/|g' ../entrance_data/'1623-2023-ASH-12.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-12/i/|g" ../entrance_data/'1623-2023-ASH-12.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-12/l/|g' ../entrance_data/'1623-2023-ASH-12.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-12/l/|g" ../entrance_data/'1623-2023-ASH-12.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-12/t/|g' ../entrance_data/'1623-2023-ASH-12.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-12/t/|g" ../entrance_data/'1623-2023-ASH-12.html'
|
||||
mv t/"2023-ASH-12-entrance1.jpg" 2023-ASH-12/t
|
||||
mv i/"2023-ASH-12-entrance1.jpg" 2023-ASH-12/i
|
||||
mv t/"2023-ASH-12-entrance2.jpg" 2023-ASH-12/t
|
||||
mv i/"2023-ASH-12-entrance2.jpg" 2023-ASH-12/i
|
||||
mv l/"2023-ASH-12-entrance2.html" 2023-ASH-12/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-12/i/|g' 2023-ASH-12/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-12/i/|g" 2023-ASH-12/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-12/i/|g' ../entrance_data/'1623-2023-ASH-12.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-12/i/|g" ../entrance_data/'1623-2023-ASH-12.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-12/l/|g' ../entrance_data/'1623-2023-ASH-12.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-12/l/|g" ../entrance_data/'1623-2023-ASH-12.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-12/t/|g' ../entrance_data/'1623-2023-ASH-12.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-12/t/|g" ../entrance_data/'1623-2023-ASH-12.html'
|
||||
mv l/"2023-ASH-13-entrance1.html" 2023-ASH-13/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-13/i/|g' 2023-ASH-13/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-13/i/|g" 2023-ASH-13/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-13/i/|g' ../entrance_data/'1623-2023-ASH-13.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-13/i/|g" ../entrance_data/'1623-2023-ASH-13.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-13/l/|g' ../entrance_data/'1623-2023-ASH-13.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-13/l/|g" ../entrance_data/'1623-2023-ASH-13.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-13/t/|g' ../entrance_data/'1623-2023-ASH-13.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-13/t/|g" ../entrance_data/'1623-2023-ASH-13.html'
|
||||
mv l/"2023-ASH-13-entrance2.html" 2023-ASH-13/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-13/i/|g' 2023-ASH-13/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-13/i/|g" 2023-ASH-13/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-13/i/|g' ../entrance_data/'1623-2023-ASH-13.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-13/i/|g" ../entrance_data/'1623-2023-ASH-13.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-13/l/|g' ../entrance_data/'1623-2023-ASH-13.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-13/l/|g" ../entrance_data/'1623-2023-ASH-13.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-13/t/|g' ../entrance_data/'1623-2023-ASH-13.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-13/t/|g" ../entrance_data/'1623-2023-ASH-13.html'
|
||||
mv t/"2023-ASH-13-entrance2.jpg" 2023-ASH-13/t
|
||||
mv i/"2023-ASH-13-entrance2.jpg" 2023-ASH-13/i
|
||||
mv t/"2023-ASH-13-entrance1.jpg" 2023-ASH-13/t
|
||||
mv i/"2023-ASH-13-entrance1.jpg" 2023-ASH-13/i
|
||||
mv l/"2023-ASH-13and14-entrances.html" 2023-ASH-14/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-14/i/|g' 2023-ASH-14/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-14/i/|g" 2023-ASH-14/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-14/i/|g' ../entrance_data/'1623-2023-ASH-14.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-14/i/|g" ../entrance_data/'1623-2023-ASH-14.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-14/l/|g' ../entrance_data/'1623-2023-ASH-14.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-14/l/|g" ../entrance_data/'1623-2023-ASH-14.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-14/t/|g' ../entrance_data/'1623-2023-ASH-14.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-14/t/|g" ../entrance_data/'1623-2023-ASH-14.html'
|
||||
mv t/"2023-ASH-14-entrance1.jpg" 2023-ASH-14/t
|
||||
mv i/"2023-ASH-14-entrance1.jpg" 2023-ASH-14/i
|
||||
mv l/"2023-ASH-14-entrance1.html" 2023-ASH-14/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-14/i/|g' 2023-ASH-14/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-14/i/|g" 2023-ASH-14/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-ASH-14/i/|g' ../entrance_data/'1623-2023-ASH-14.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-ASH-14/i/|g" ../entrance_data/'1623-2023-ASH-14.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-ASH-14/l/|g' ../entrance_data/'1623-2023-ASH-14.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-ASH-14/l/|g" ../entrance_data/'1623-2023-ASH-14.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-ASH-14/t/|g' ../entrance_data/'1623-2023-ASH-14.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-ASH-14/t/|g" ../entrance_data/'1623-2023-ASH-14.html'
|
||||
mv t/"2023-ASH-13and14-entrances.jpg" 2023-ASH-14/t
|
||||
mv i/"2023-ASH-13and14-entrances.jpg" 2023-ASH-14/i
|
||||
mv t/"2023-BL-11-sketch.jpg" 2023-BL-11/t
|
||||
mv i/"2023-BL-11-sketch.jpg" 2023-BL-11/i
|
||||
mv t/"20230802_165708.jpg" 2023-BL-11/t
|
||||
mv i/"20230802_165708.jpg" 2023-BL-11/i
|
||||
mv t/"20230802_165823.jpg" 2023-BL-11/t
|
||||
mv i/"20230802_165823.jpg" 2023-BL-11/i
|
||||
mv l/"20230802_165708.html" 2023-BL-11/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' 2023-BL-11/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" 2023-BL-11/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' ../entrance_data/'1623-2023-BL-11.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" ../entrance_data/'1623-2023-BL-11.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-BL-11/l/|g' ../entrance_data/'1623-2023-BL-11.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-BL-11/l/|g" ../entrance_data/'1623-2023-BL-11.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-BL-11/t/|g' ../entrance_data/'1623-2023-BL-11.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-BL-11/t/|g" ../entrance_data/'1623-2023-BL-11.html'
|
||||
mv l/"2023-BL-11-sketch.html" 2023-BL-11/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' 2023-BL-11/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" 2023-BL-11/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' ../cave_data/'1623-2023-BL-11.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" ../cave_data/'1623-2023-BL-11.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-BL-11/l/|g' ../cave_data/'1623-2023-BL-11.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-BL-11/l/|g" ../cave_data/'1623-2023-BL-11.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-BL-11/t/|g' ../cave_data/'1623-2023-BL-11.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-BL-11/t/|g" ../cave_data/'1623-2023-BL-11.html'
|
||||
mv l/"20230802_165823.html" 2023-BL-11/l
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' 2023-BL-11/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" 2023-BL-11/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/2023-BL-11/i/|g' ../entrance_data/'1623-2023-BL-11.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/2023-BL-11/i/|g" ../entrance_data/'1623-2023-BL-11.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/2023-BL-11/l/|g' ../entrance_data/'1623-2023-BL-11.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/2023-BL-11/l/|g" ../entrance_data/'1623-2023-BL-11.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/2023-BL-11/t/|g' ../entrance_data/'1623-2023-BL-11.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/2023-BL-11/t/|g" ../entrance_data/'1623-2023-BL-11.html'
|
||||
mv t/"2023-ASH-15-entrance3.jpg" 303/t
|
||||
mv i/"2023-ASH-15-entrance3.jpg" 303/i
|
||||
mv l/"2023-ASH-15-entrance3.html" 303/l
|
||||
sed -i 's|\/1623\/i\/|/1623/303/i/|g' 303/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/303/i/|g" 303/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/303/i/|g' ../entrance_data/'1623-303.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/303/i/|g" ../entrance_data/'1623-303.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/303/l/|g' ../entrance_data/'1623-303.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/303/l/|g" ../entrance_data/'1623-303.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/303/t/|g' ../entrance_data/'1623-303.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/303/t/|g" ../entrance_data/'1623-303.html'
|
||||
mv t/"2023-ASH-15-entrance1.jpg" 303/t
|
||||
mv i/"2023-ASH-15-entrance1.jpg" 303/i
|
||||
mv l/"2023-ASH-15-entrance2.html" 303/l
|
||||
sed -i 's|\/1623\/i\/|/1623/303/i/|g' 303/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/303/i/|g" 303/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/303/i/|g' ../entrance_data/'1623-303.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/303/i/|g" ../entrance_data/'1623-303.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/303/l/|g' ../entrance_data/'1623-303.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/303/l/|g" ../entrance_data/'1623-303.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/303/t/|g' ../entrance_data/'1623-303.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/303/t/|g" ../entrance_data/'1623-303.html'
|
||||
mv l/"2023-ASH-15-entrance1.html" 303/l
|
||||
sed -i 's|\/1623\/i\/|/1623/303/i/|g' 303/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/303/i/|g" 303/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/303/i/|g' ../entrance_data/'1623-303.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/303/i/|g" ../entrance_data/'1623-303.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/303/l/|g' ../entrance_data/'1623-303.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/303/l/|g" ../entrance_data/'1623-303.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/303/t/|g' ../entrance_data/'1623-303.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/303/t/|g" ../entrance_data/'1623-303.html'
|
||||
mv t/"2023-ASH-15-entrance2.jpg" 303/t
|
||||
mv i/"2023-ASH-15-entrance2.jpg" 303/i
|
||||
mv l/"2023-ASH-17-bothentrances.html" 306/l
|
||||
sed -i 's|\/1623\/i\/|/1623/306/i/|g' 306/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/306/i/|g" 306/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/306/i/|g' ../entrance_data/'1623-306b.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/306/i/|g" ../entrance_data/'1623-306b.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/306/l/|g' ../entrance_data/'1623-306b.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/306/l/|g" ../entrance_data/'1623-306b.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/306/t/|g' ../entrance_data/'1623-306b.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/306/t/|g" ../entrance_data/'1623-306b.html'
|
||||
mv l/"2023-ASH-17-bothentrances.html" 306/l
|
||||
sed -i 's|\/1623\/i\/|/1623/306/i/|g' 306/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/306/i/|g" 306/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/306/i/|g' ../entrance_data/'1623-306a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/306/i/|g" ../entrance_data/'1623-306a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/306/l/|g' ../entrance_data/'1623-306a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/306/l/|g" ../entrance_data/'1623-306a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/306/t/|g' ../entrance_data/'1623-306a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/306/t/|g" ../entrance_data/'1623-306a.html'
|
||||
mv i/"2023-ASH-17-sketch.jpg" 306/i
|
||||
mv l/"2023-ASH-17-sketch.html" 306/l
|
||||
sed -i 's|\/1623\/i\/|/1623/306/i/|g' 306/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/306/i/|g" 306/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/306/i/|g' ../cave_data/'1623-306.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/306/i/|g" ../cave_data/'1623-306.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/306/l/|g' ../cave_data/'1623-306.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/306/l/|g" ../cave_data/'1623-306.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/306/t/|g' ../cave_data/'1623-306.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/306/t/|g" ../cave_data/'1623-306.html'
|
||||
mv t/"2023-ASH-17-bothentrances.jpg" 306/t
|
||||
mv i/"2023-ASH-17-bothentrances.jpg" 306/i
|
||||
mv t/"2023-ASH-17-bothentrances.jpg" 306/t
|
||||
mv i/"2023-ASH-17-bothentrances.jpg" 306/i
|
||||
mv l/"2023-ASH-05-entrance-b1.html" 307/l
|
||||
sed -i 's|\/1623\/i\/|/1623/307/i/|g' 307/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/307/i/|g" 307/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/307/i/|g' ../entrance_data/'1623-307b.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/307/i/|g" ../entrance_data/'1623-307b.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/307/l/|g' ../entrance_data/'1623-307b.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/307/l/|g" ../entrance_data/'1623-307b.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/307/t/|g' ../entrance_data/'1623-307b.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/307/t/|g" ../entrance_data/'1623-307b.html'
|
||||
mv l/"2023-ASH-05-entrance-a1.html" 307/l
|
||||
sed -i 's|\/1623\/i\/|/1623/307/i/|g' 307/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/307/i/|g" 307/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/307/i/|g' ../entrance_data/'1623-307a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/307/i/|g" ../entrance_data/'1623-307a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/307/l/|g' ../entrance_data/'1623-307a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/307/l/|g" ../entrance_data/'1623-307a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/307/t/|g' ../entrance_data/'1623-307a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/307/t/|g" ../entrance_data/'1623-307a.html'
|
||||
mv t/"2023-ASH-05-entrance-a3.jpg" 307/t
|
||||
mv i/"2023-ASH-05-entrance-a3.jpg" 307/i
|
||||
mv l/"2023-ASH-05-entrance-a2.html" 307/l
|
||||
sed -i 's|\/1623\/i\/|/1623/307/i/|g' 307/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/307/i/|g" 307/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/307/i/|g' ../entrance_data/'1623-307a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/307/i/|g" ../entrance_data/'1623-307a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/307/l/|g' ../entrance_data/'1623-307a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/307/l/|g" ../entrance_data/'1623-307a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/307/t/|g' ../entrance_data/'1623-307a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/307/t/|g" ../entrance_data/'1623-307a.html'
|
||||
mv t/"2023-ASH-05-entrance-a2.jpg" 307/t
|
||||
mv i/"2023-ASH-05-entrance-a2.jpg" 307/i
|
||||
mv i/"2023-ASH-05-sketch.jpg" 307/i
|
||||
mv t/"2023-ASH-05-entrance-a1.jpg" 307/t
|
||||
mv i/"2023-ASH-05-entrance-a1.jpg" 307/i
|
||||
mv l/"2023-ASH-05-sketch.html" 307/l
|
||||
sed -i 's|\/1623\/i\/|/1623/307/i/|g' 307/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/307/i/|g" 307/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/307/i/|g' ../cave_data/'1623-307.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/307/i/|g" ../cave_data/'1623-307.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/307/l/|g' ../cave_data/'1623-307.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/307/l/|g" ../cave_data/'1623-307.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/307/t/|g' ../cave_data/'1623-307.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/307/t/|g" ../cave_data/'1623-307.html'
|
||||
mv l/"2023-ASH-05-entrance-a3.html" 307/l
|
||||
sed -i 's|\/1623\/i\/|/1623/307/i/|g' 307/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/307/i/|g" 307/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/307/i/|g' ../cave_data/'1623-307.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/307/i/|g" ../cave_data/'1623-307.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/307/l/|g' ../cave_data/'1623-307.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/307/l/|g" ../cave_data/'1623-307.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/307/t/|g' ../cave_data/'1623-307.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/307/t/|g" ../cave_data/'1623-307.html'
|
||||
mv t/"2023-ASH-05-entrance-b1.jpg" 307/t
|
||||
mv i/"2023-ASH-05-entrance-b1.jpg" 307/i
|
||||
mv l/"2023-ASH-08-entrance-a3.html" 308/l
|
||||
sed -i 's|\/1623\/i\/|/1623/308/i/|g' 308/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/308/i/|g" 308/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/308/i/|g' ../cave_data/'1623-308.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/308/i/|g" ../cave_data/'1623-308.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/308/l/|g' ../cave_data/'1623-308.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/308/l/|g" ../cave_data/'1623-308.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/308/t/|g' ../cave_data/'1623-308.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/308/t/|g" ../cave_data/'1623-308.html'
|
||||
mv l/"2023-ASH-08-sketch.html" 308/l
|
||||
sed -i 's|\/1623\/i\/|/1623/308/i/|g' 308/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/308/i/|g" 308/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/308/i/|g' ../cave_data/'1623-308.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/308/i/|g" ../cave_data/'1623-308.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/308/l/|g' ../cave_data/'1623-308.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/308/l/|g" ../cave_data/'1623-308.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/308/t/|g' ../cave_data/'1623-308.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/308/t/|g" ../cave_data/'1623-308.html'
|
||||
mv l/"2023-ASH-08-entrance-a1.html" 308/l
|
||||
sed -i 's|\/1623\/i\/|/1623/308/i/|g' 308/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/308/i/|g" 308/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/308/i/|g' ../entrance_data/'1623-308a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/308/i/|g" ../entrance_data/'1623-308a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/308/l/|g' ../entrance_data/'1623-308a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/308/l/|g" ../entrance_data/'1623-308a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/308/t/|g' ../entrance_data/'1623-308a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/308/t/|g" ../entrance_data/'1623-308a.html'
|
||||
mv t/"2023-ASH-08-entrance-b1.jpg" 308/t
|
||||
mv i/"2023-ASH-08-entrance-b1.jpg" 308/i
|
||||
mv l/"2023-ASH-08-entrance-b1.html" 308/l
|
||||
sed -i 's|\/1623\/i\/|/1623/308/i/|g' 308/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/308/i/|g" 308/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/308/i/|g' ../entrance_data/'1623-308b.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/308/i/|g" ../entrance_data/'1623-308b.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/308/l/|g' ../entrance_data/'1623-308b.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/308/l/|g" ../entrance_data/'1623-308b.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/308/t/|g' ../entrance_data/'1623-308b.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/308/t/|g" ../entrance_data/'1623-308b.html'
|
||||
mv t/"2023-ASH-08-entrance-a1.jpg" 308/t
|
||||
mv i/"2023-ASH-08-entrance-a1.jpg" 308/i
|
||||
mv t/"2023-ASH-08-entrance-a2.jpg" 308/t
|
||||
mv i/"2023-ASH-08-entrance-a2.jpg" 308/i
|
||||
mv i/"2023-ASH-08-sketch.jpg" 308/i
|
||||
mv l/"2023-ASH-08-entrance-a2.html" 308/l
|
||||
sed -i 's|\/1623\/i\/|/1623/308/i/|g' 308/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/308/i/|g" 308/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/308/i/|g' ../entrance_data/'1623-308a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/308/i/|g" ../entrance_data/'1623-308a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/308/l/|g' ../entrance_data/'1623-308a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/308/l/|g" ../entrance_data/'1623-308a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/308/t/|g' ../entrance_data/'1623-308a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/308/t/|g" ../entrance_data/'1623-308a.html'
|
||||
mv t/"2023-ASH-08-entrance-a3.jpg" 308/t
|
||||
mv i/"2023-ASH-08-entrance-a3.jpg" 308/i
|
||||
mv l/"2023-ASH-09-plan.html" 309/l
|
||||
sed -i 's|\/1623\/i\/|/1623/309/i/|g' 309/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/309/i/|g" 309/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/309/i/|g' ../cave_data/'1623-309.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/309/i/|g" ../cave_data/'1623-309.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/309/l/|g' ../cave_data/'1623-309.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/309/l/|g" ../cave_data/'1623-309.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/309/t/|g' ../cave_data/'1623-309.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/309/t/|g" ../cave_data/'1623-309.html'
|
||||
mv l/"2023-ASH-09-entrance2.html" 309/l
|
||||
sed -i 's|\/1623\/i\/|/1623/309/i/|g' 309/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/309/i/|g" 309/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/309/i/|g' ../entrance_data/'1623-309a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/309/i/|g" ../entrance_data/'1623-309a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/309/l/|g' ../entrance_data/'1623-309a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/309/l/|g" ../entrance_data/'1623-309a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/309/t/|g' ../entrance_data/'1623-309a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/309/t/|g" ../entrance_data/'1623-309a.html'
|
||||
mv i/"2023-ASH-09-plan.jpg" 309/i
|
||||
mv t/"2023-ASH-09-entrance1.jpg" 309/t
|
||||
mv i/"2023-ASH-09-entrance1.jpg" 309/i
|
||||
mv t/"2023-ASH-09-entrance2.jpg" 309/t
|
||||
mv i/"2023-ASH-09-entrance2.jpg" 309/i
|
||||
mv l/"2023-ASH-09-entrance1.html" 309/l
|
||||
sed -i 's|\/1623\/i\/|/1623/309/i/|g' 309/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/309/i/|g" 309/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/309/i/|g' ../entrance_data/'1623-309a.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/309/i/|g" ../entrance_data/'1623-309a.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/309/l/|g' ../entrance_data/'1623-309a.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/309/l/|g" ../entrance_data/'1623-309a.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/309/t/|g' ../entrance_data/'1623-309a.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/309/t/|g" ../entrance_data/'1623-309a.html'
|
||||
mv l/"2023-ASH-16-entrance2.html" 311/l
|
||||
sed -i 's|\/1623\/i\/|/1623/311/i/|g' 311/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/311/i/|g" 311/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/311/i/|g' ../entrance_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/311/i/|g" ../entrance_data/'1623-311.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/311/l/|g' ../entrance_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/311/l/|g" ../entrance_data/'1623-311.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/311/t/|g' ../entrance_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/311/t/|g" ../entrance_data/'1623-311.html'
|
||||
mv t/"2023-ASH-16-entrance3.jpg" 311/t
|
||||
mv i/"2023-ASH-16-entrance3.jpg" 311/i
|
||||
mv l/"2023-ASH-16-entrance3.html" 311/l
|
||||
sed -i 's|\/1623\/i\/|/1623/311/i/|g' 311/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/311/i/|g" 311/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/311/i/|g' ../entrance_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/311/i/|g" ../entrance_data/'1623-311.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/311/l/|g' ../entrance_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/311/l/|g" ../entrance_data/'1623-311.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/311/t/|g' ../entrance_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/311/t/|g" ../entrance_data/'1623-311.html'
|
||||
mv t/"2023-ASH-16-entrance2.jpg" 311/t
|
||||
mv i/"2023-ASH-16-entrance2.jpg" 311/i
|
||||
mv l/"2023-ASH-16-entrance1.html" 311/l
|
||||
sed -i 's|\/1623\/i\/|/1623/311/i/|g' 311/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/311/i/|g" 311/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/311/i/|g' ../entrance_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/311/i/|g" ../entrance_data/'1623-311.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/311/l/|g' ../entrance_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/311/l/|g" ../entrance_data/'1623-311.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/311/t/|g' ../entrance_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/311/t/|g" ../entrance_data/'1623-311.html'
|
||||
mv i/"2023-ASH-16-sketch.jpg" 311/i
|
||||
mv l/"2023-ASH-16-sketch.html" 311/l
|
||||
sed -i 's|\/1623\/i\/|/1623/311/i/|g' 311/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/311/i/|g" 311/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/311/i/|g' ../cave_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/311/i/|g" ../cave_data/'1623-311.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/311/l/|g' ../cave_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/311/l/|g" ../cave_data/'1623-311.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/311/t/|g' ../cave_data/'1623-311.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/311/t/|g" ../cave_data/'1623-311.html'
|
||||
mv t/"2023-ASH-16-entrance1.jpg" 311/t
|
||||
mv i/"2023-ASH-16-entrance1.jpg" 311/i
|
||||
mv t/"photo_2023-10-30_22-07-43.jpg" 315/t
|
||||
mv i/"photo_2023-10-30_22-07-43.jpg" 315/i
|
||||
mv l/"photo_2023-10-30_22-07-43.html" 315/l
|
||||
sed -i 's|\/1623\/i\/|/1623/315/i/|g' 315/l/*.html
|
||||
sed -i "s|\/1623\/i\/|/1623/315/i/|g" 315/l/*.html
|
||||
sed -i 's|\/1623\/i\/|/1623/315/i/|g' ../cave_data/'1623-315.html'
|
||||
sed -i "s|\/1623\/i\/|/1623/315/i/|g" ../cave_data/'1623-315.html'
|
||||
sed -i 's|\/1623\/l\/|/1623/315/l/|g' ../cave_data/'1623-315.html'
|
||||
sed -i "s|\/1623\/l\/|/1623/315/l/|g" ../cave_data/'1623-315.html'
|
||||
sed -i 's|\/1623\/t\/|/1623/315/t/|g' ../cave_data/'1623-315.html'
|
||||
sed -i "s|\/1623\/t\/|/1623/315/t/|g" ../cave_data/'1623-315.html'
|
||||
|
||||
@@ -0,0 +1,141 @@
|
||||
range(303, 316) 312
|
||||
2023-RAWDB-02 312
|
||||
range(303, 316) 313
|
||||
2023-RAWDB-01 313
|
||||
range(303, 316) 314
|
||||
2023-KT-02 314
|
||||
range(303, 316) 315
|
||||
2023-JSS-01 315
|
||||
('c', 'i', '2013-06', '1623-2013-06.html', '2013wallet23.jpg')
|
||||
('c', 'l', '2013-06', '1623-2013-06.html', '2013wallet23.html')
|
||||
('c', 'l', '2013-BL-01', '1623-2013-BL-01.html', '2013-BL-01.html')
|
||||
('c', 't', '2013-BL-01', '1623-2013-BL-01.html', '2013-BL-01.jpg')
|
||||
('c', 't', '2023-BL-11', '1623-2023-BL-11.html', '2023-BL-11-sketch.jpg')
|
||||
('c', 'l', '2023-BL-11', '1623-2023-BL-11.html', '2023-BL-11-sketch.html')
|
||||
('c', 'i', '306', '1623-306.html', '2023-ASH-17-sketch.jpg')
|
||||
('c', 'l', '306', '1623-306.html', '2023-ASH-17-sketch.html')
|
||||
('c', 't', '307', '1623-307.html', '2023-ASH-05-entrance-a3.jpg')
|
||||
('c', 'i', '307', '1623-307.html', '2023-ASH-05-sketch.jpg')
|
||||
('c', 'l', '307', '1623-307.html', '2023-ASH-05-sketch.html')
|
||||
('c', 'l', '307', '1623-307.html', '2023-ASH-05-entrance-a3.html')
|
||||
('c', 'l', '308', '1623-308.html', '2023-ASH-08-entrance-a3.html')
|
||||
('c', 'l', '308', '1623-308.html', '2023-ASH-08-sketch.html')
|
||||
('c', 'i', '308', '1623-308.html', '2023-ASH-08-sketch.jpg')
|
||||
('c', 't', '308', '1623-308.html', '2023-ASH-08-entrance-a3.jpg')
|
||||
('c', 'l', '309', '1623-309.html', '2023-ASH-09-plan.html')
|
||||
('c', 'i', '309', '1623-309.html', '2023-ASH-09-plan.jpg')
|
||||
('c', 'i', '311', '1623-311.html', '2023-ASH-16-sketch.jpg')
|
||||
('c', 'l', '311', '1623-311.html', '2023-ASH-16-sketch.html')
|
||||
('c', 't', '315', '1623-315.html', 'photo_2023-10-30_22-07-43.jpg')
|
||||
('c', 'l', '315', '1623-315.html', 'photo_2023-10-30_22-07-43.html')
|
||||
|
||||
('e', 'l', '2012-sw-01', '1623-2012-sw-01.html', '2012-sw-01_i1.html')
|
||||
('e', 't', '2012-sw-01', '1623-2012-sw-01.html', '2012-sw-01_i1.jpg')
|
||||
('e', 'l', '2012-sw-03', '1623-2012-sw-03.html', '2012-sw-03_i2.html')
|
||||
('e', 'l', '2012-sw-03', '1623-2012-sw-03.html', '2012-sw-03_i1.html')
|
||||
('e', 't', '2012-sw-03', '1623-2012-sw-03.html', '2012-sw-03_i1.jpg')
|
||||
('e', 't', '2012-sw-03', '1623-2012-sw-03.html', '2012-sw-03_i2.jpg')
|
||||
('e', 'l', '2017-AA-01', '1623-2017-AA-01.html', 'aa-1-2017_with-tag.html')
|
||||
('e', 't', '2017-AA-01', '1623-2017-AA-01.html', 'aa-1-2017_with-tag.jpg')
|
||||
('e', 'l', '2017-AA-01', '1623-2017-AA-01.html', 'aa-1-2017_looking-down.html')
|
||||
('e', 't', '2017-AA-01', '1623-2017-AA-01.html', 'aa-1-2017_looking-down.jpg')
|
||||
('e', 't', '2017-AMS-02', '1623-2017-AMS-02.html', 'ent081-20170807.jpg')
|
||||
('e', 'l', '2017-AMS-02', '1623-2017-AMS-02.html', 'near-ent-2017-ams-02.html')
|
||||
('e', 't', '2017-AMS-02', '1623-2017-AMS-02.html', 'near-ent-2017-ams-02.jpg')
|
||||
('e', 'l', '2017-AMS-02', '1623-2017-AMS-02.html', 'ent081-20170807.html')
|
||||
('e', 'l', '2017-NR-01', '1623-2017-NR-01.html', '2017-NR-01_03.html')
|
||||
('e', 't', '2017-NR-01', '1623-2017-NR-01.html', '2017-NR-01_03.jpg')
|
||||
('e', 't', '2017-NR-01', '1623-2017-NR-01.html', '2017-NR-01_04.jpg')
|
||||
('e', 'l', '2017-NR-01', '1623-2017-NR-01.html', '2017-NR-01_04.html')
|
||||
('e', 't', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_looking_down_shaft-and-rift.jpg')
|
||||
('e', 't', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_tag_arrows.jpg')
|
||||
('e', 't', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_neil_view_west.jpg')
|
||||
('e', 'l', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_looking_down_shaft-and-rift.html')
|
||||
('e', 'l', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_tag_arrows.html')
|
||||
('e', 'l', '2018-NTU-01', '1623-2018-NTU-01.html', '2018-ntu-01_neil_view_west.html')
|
||||
('e', 't', '2018-pf-03', '1623-2018-pf-03.html', '2018-pf-03_and_pf-02_arrows.jpg')
|
||||
('e', 'l', '2018-pf-03', '1623-2018-pf-03.html', '2018-pf-03_and_pf-02_arrows.html')
|
||||
('e', 'l', '2023-ASH-01', '1623-2023-ASH-01a.html', '2023-ASH-01-entrance1.html')
|
||||
('e', 'l', '2023-ASH-01', '1623-2023-ASH-01a.html', '2023-ASH-01-entrance2.html')
|
||||
('e', 't', '2023-ASH-01', '1623-2023-ASH-01a.html', '2023-ASH-01-entrance2.jpg')
|
||||
('e', 't', '2023-ASH-01', '1623-2023-ASH-01a.html', '2023-ASH-01-entrance1.jpg')
|
||||
('e', 'l', '2023-ASH-02', '1623-2023-ASH-02a.html', '2023-ASH-02-entrance1.html')
|
||||
('e', 't', '2023-ASH-02', '1623-2023-ASH-02a.html', '2023-ASH-02-entrance1.jpg')
|
||||
('e', 'l', '2023-ASH-02', '1623-2023-ASH-02a.html', '2023-ASH-02-entrance2.html')
|
||||
('e', 't', '2023-ASH-02', '1623-2023-ASH-02a.html', '2023-ASH-02-entrance2.jpg')
|
||||
('e', 'l', '2023-ASH-03', '1623-2023-ASH-03a.html', '2023-ASH-03-entrance2.html')
|
||||
('e', 't', '2023-ASH-03', '1623-2023-ASH-03a.html', '2023-ASH-03-entrance2.jpg')
|
||||
('e', 'l', '2023-ASH-03', '1623-2023-ASH-03a.html', '2023-ASH-03-entrance1.html')
|
||||
('e', 't', '2023-ASH-03', '1623-2023-ASH-03a.html', '2023-ASH-03-entrance1.jpg')
|
||||
('e', 't', '2023-ASH-04', '1623-2023-ASH-04a.html', '2023-ASH-04-entrance2.jpg')
|
||||
('e', 't', '2023-ASH-04', '1623-2023-ASH-04a.html', '2023-ASH-04-entrance1.jpg')
|
||||
('e', 'l', '2023-ASH-04', '1623-2023-ASH-04a.html', '2023-ASH-04-entrance1.html')
|
||||
('e', 'l', '2023-ASH-04', '1623-2023-ASH-04a.html', '2023-ASH-04-entrance2.html')
|
||||
('e', 't', '2023-ASH-06', '1623-2023-ASH-06a.html', '2023-ASH-06-entrance2.jpg')
|
||||
('e', 'l', '2023-ASH-06', '1623-2023-ASH-06a.html', '2023-ASH-06-entrance1.html')
|
||||
('e', 'l', '2023-ASH-06', '1623-2023-ASH-06a.html', '2023-ASH-06-entrance2.html')
|
||||
('e', 't', '2023-ASH-06', '1623-2023-ASH-06a.html', '2023-ASH-06-entrance1.jpg')
|
||||
('e', 'l', '2023-ASH-07', '1623-2023-ASH-07a.html', '2023-ASH-07-entrance1.html')
|
||||
('e', 't', '2023-ASH-07', '1623-2023-ASH-07a.html', '2023-ASH-07-entrance1.jpg')
|
||||
('e', 't', '2023-ASH-07', '1623-2023-ASH-07a.html', '2023-ASH-07-entrance2.jpg')
|
||||
('e', 'l', '2023-ASH-07', '1623-2023-ASH-07a.html', '2023-ASH-07-entrance2.html')
|
||||
('e', 'l', '2023-ASH-10', '1623-2023-ASH-10a.html', '2023-ASH-10-entrance2.html')
|
||||
('e', 't', '2023-ASH-10', '1623-2023-ASH-10a.html', '2023-ASH-10-entrance1.jpg')
|
||||
('e', 't', '2023-ASH-10', '1623-2023-ASH-10a.html', '2023-ASH-10-entrance2.jpg')
|
||||
('e', 'l', '2023-ASH-10', '1623-2023-ASH-10a.html', '2023-ASH-10-entrance1.html')
|
||||
('e', 't', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance2.jpg')
|
||||
('e', 'l', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance2.html')
|
||||
('e', 'l', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance4.html')
|
||||
('e', 'l', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance3.html')
|
||||
('e', 'l', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance1.html')
|
||||
('e', 't', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance1.jpg')
|
||||
('e', 't', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance3.jpg')
|
||||
('e', 't', '2023-ASH-11', '1623-2023-ASH-11a.html', '2023-ASH-11-entrance4.jpg')
|
||||
('e', 'l', '2023-ASH-12', '1623-2023-ASH-12.html', '2023-ASH-12-entrance1.html')
|
||||
('e', 't', '2023-ASH-12', '1623-2023-ASH-12.html', '2023-ASH-12-entrance1.jpg')
|
||||
('e', 't', '2023-ASH-12', '1623-2023-ASH-12.html', '2023-ASH-12-entrance2.jpg')
|
||||
('e', 'l', '2023-ASH-12', '1623-2023-ASH-12.html', '2023-ASH-12-entrance2.html')
|
||||
('e', 'l', '2023-ASH-13', '1623-2023-ASH-13.html', '2023-ASH-13-entrance1.html')
|
||||
('e', 'l', '2023-ASH-13', '1623-2023-ASH-13.html', '2023-ASH-13-entrance2.html')
|
||||
('e', 't', '2023-ASH-13', '1623-2023-ASH-13.html', '2023-ASH-13-entrance2.jpg')
|
||||
('e', 't', '2023-ASH-13', '1623-2023-ASH-13.html', '2023-ASH-13-entrance1.jpg')
|
||||
('e', 'l', '2023-ASH-14', '1623-2023-ASH-14.html', '2023-ASH-13and14-entrances.html')
|
||||
('e', 't', '2023-ASH-14', '1623-2023-ASH-14.html', '2023-ASH-14-entrance1.jpg')
|
||||
('e', 'l', '2023-ASH-14', '1623-2023-ASH-14.html', '2023-ASH-14-entrance1.html')
|
||||
('e', 't', '2023-ASH-14', '1623-2023-ASH-14.html', '2023-ASH-13and14-entrances.jpg')
|
||||
('e', 't', '2023-BL-11', '1623-2023-BL-11.html', '20230802_165708.jpg')
|
||||
('e', 't', '2023-BL-11', '1623-2023-BL-11.html', '20230802_165823.jpg')
|
||||
('e', 'l', '2023-BL-11', '1623-2023-BL-11.html', '20230802_165708.html')
|
||||
('e', 'l', '2023-BL-11', '1623-2023-BL-11.html', '20230802_165823.html')
|
||||
('e', 't', '303', '1623-303.html', '2023-ASH-15-entrance3.jpg')
|
||||
('e', 'l', '303', '1623-303.html', '2023-ASH-15-entrance3.html')
|
||||
('e', 't', '303', '1623-303.html', '2023-ASH-15-entrance1.jpg')
|
||||
('e', 'l', '303', '1623-303.html', '2023-ASH-15-entrance2.html')
|
||||
('e', 'l', '303', '1623-303.html', '2023-ASH-15-entrance1.html')
|
||||
('e', 't', '303', '1623-303.html', '2023-ASH-15-entrance2.jpg')
|
||||
('e', 'l', '306', '1623-306b.html', '2023-ASH-17-bothentrances.html')
|
||||
('e', 'l', '306', '1623-306a.html', '2023-ASH-17-bothentrances.html')
|
||||
('e', 't', '306', '1623-306a.html', '2023-ASH-17-bothentrances.jpg')
|
||||
('e', 't', '306', '1623-306b.html', '2023-ASH-17-bothentrances.jpg')
|
||||
('e', 'l', '307', '1623-307b.html', '2023-ASH-05-entrance-b1.html')
|
||||
('e', 'l', '307', '1623-307a.html', '2023-ASH-05-entrance-a1.html')
|
||||
('e', 'l', '307', '1623-307a.html', '2023-ASH-05-entrance-a2.html')
|
||||
('e', 't', '307', '1623-307a.html', '2023-ASH-05-entrance-a2.jpg')
|
||||
('e', 't', '307', '1623-307a.html', '2023-ASH-05-entrance-a1.jpg')
|
||||
('e', 't', '307', '1623-307b.html', '2023-ASH-05-entrance-b1.jpg')
|
||||
('e', 'l', '308', '1623-308a.html', '2023-ASH-08-entrance-a1.html')
|
||||
('e', 't', '308', '1623-308b.html', '2023-ASH-08-entrance-b1.jpg')
|
||||
('e', 'l', '308', '1623-308b.html', '2023-ASH-08-entrance-b1.html')
|
||||
('e', 't', '308', '1623-308a.html', '2023-ASH-08-entrance-a1.jpg')
|
||||
('e', 't', '308', '1623-308a.html', '2023-ASH-08-entrance-a2.jpg')
|
||||
('e', 'l', '308', '1623-308a.html', '2023-ASH-08-entrance-a2.html')
|
||||
('e', 'l', '309', '1623-309a.html', '2023-ASH-09-entrance2.html')
|
||||
('e', 't', '309', '1623-309a.html', '2023-ASH-09-entrance1.jpg')
|
||||
('e', 't', '309', '1623-309a.html', '2023-ASH-09-entrance2.jpg')
|
||||
('e', 'l', '309', '1623-309a.html', '2023-ASH-09-entrance1.html')
|
||||
('e', 'l', '311', '1623-311.html', '2023-ASH-16-entrance2.html')
|
||||
('e', 't', '311', '1623-311.html', '2023-ASH-16-entrance3.jpg')
|
||||
('e', 'l', '311', '1623-311.html', '2023-ASH-16-entrance3.html')
|
||||
('e', 't', '311', '1623-311.html', '2023-ASH-16-entrance2.jpg')
|
||||
('e', 'l', '311', '1623-311.html', '2023-ASH-16-entrance1.html')
|
||||
('e', 't', '311', '1623-311.html', '2023-ASH-16-entrance1.jpg')
|
||||
@@ -0,0 +1,84 @@
|
||||
---------- 1626 fixing i/l/t folders for these caves and entrances
|
||||
1626-2018-AD-02
|
||||
1626-2018-AD-02 1626-2018-AD-02.html
|
||||
1626-2018-DM-01
|
||||
1626-2018-DM-01 1626-2018-DM-01.html
|
||||
1626-2018-DM-02
|
||||
1626-2018-DM-02 1626-2018-DM-02.html
|
||||
1626-2018-DM-03
|
||||
1626-2018-DM-03 1626-2018-DM-03.html
|
||||
1626-2018-DM-05
|
||||
1626-2018-DM-05 1626-2018-DM-05.html
|
||||
1626-2018-DM-06
|
||||
1626-2018-DM-06 1626-2018-DM-06.html
|
||||
1626-2018-ms-02
|
||||
1626-2018-ms-02 1626-2018-ms-02.html
|
||||
1626-2018-ms-03
|
||||
1626-2018-ms-03 1626-2018-ms-03.html
|
||||
1626-2018-ms-04
|
||||
1626-2018-ms-04 1626-2018-ms-04.html
|
||||
1626-2018-pw-01
|
||||
1626-2018-pw-01 1626-2018-pw-01.html
|
||||
1626-2018-pw-02
|
||||
1626-2018-pw-02 1626-2018-pw-02.html
|
||||
1626-2018-pw-03
|
||||
1626-2018-pw-03 1626-2018-pw-03.html
|
||||
1626-2018-tk-01
|
||||
1626-2018-tk-01 1626-2018-tk-01.html
|
||||
1626-2018-tk-02
|
||||
1626-2018-tk-02 1626-2018-tk-02.html
|
||||
1626-2019-HT-01
|
||||
1626-2019-HT-01 1626-2019-HT-01.html
|
||||
1626-2019-HT-02
|
||||
1626-2019-HT-02 1626-2019-HT-02.html
|
||||
1626-2019-rh-01
|
||||
1626-2019-rh-01 1626-2019-rh-01.html
|
||||
1626-2019-rh-02
|
||||
1626-2019-rh-02 1626-2019-rh-02.html
|
||||
1626-2023-BL-01
|
||||
1626-2023-BL-01 1626-2023-BL-01.html
|
||||
1626-2023-BL-03
|
||||
1626-2023-BL-03 1626-2023-BL-03.html
|
||||
1626-2023-BL-04
|
||||
1626-2023-BL-04a 1626-2023-BL-04a.html
|
||||
1626-2023-BL-04b 1626-2023-BL-04b.html
|
||||
1626-2023-BL-05
|
||||
1626-2023-BL-05 1626-2023-BL-05.html
|
||||
1626-2023-BL-06
|
||||
1626-2023-BL-06 1626-2023-BL-06.html
|
||||
1626-2023-BL-07
|
||||
1626-2023-BL-07 1626-2023-BL-07.html
|
||||
1626-2023-BL-09
|
||||
1626-2023-BL-09 1626-2023-BL-09.html
|
||||
1626-2023-BZ-01
|
||||
1626-2023-BZ-01 1626-2023-BZ-01.html
|
||||
1626-2023-ww-01
|
||||
1626-2023-ww-01 1626-2023-ww-01.html
|
||||
1626-2024-pb-01
|
||||
1626-2024-pb-01 1626-2024-pb-01.html
|
||||
1626-361
|
||||
1626-361 1626-361.html
|
||||
1626-E02
|
||||
1626-E02 1626-E02.html
|
||||
1626-E09
|
||||
1626-E09 1626-E09.html
|
||||
1626-E16
|
||||
1626-E16 1626-E16.html
|
||||
1626-E28
|
||||
1626-E28 1626-E28.html
|
||||
1626-casino-01
|
||||
1626-casino-01 1626-casino-01.html
|
||||
1626-loutoti-01
|
||||
1626-loutoti-01 1626-loutoti-01.html
|
||||
1626-upside-down-01
|
||||
1626-upside-down-01 1626-upside-down-01.html
|
||||
1626-2018-ad-03
|
||||
1626-2018-ad-03 1626-2018-ad-03.html
|
||||
1626-2023-BL-08
|
||||
1626-2023-BL-08 1626-2023-BL-08.html
|
||||
1626-LA11
|
||||
1626-LA11 1626-LA11.html
|
||||
1626-LA34
|
||||
1626-LA34 1626-LA34.html
|
||||
1626-LA12
|
||||
1626-LA12 1626-LA12.html
|
||||
@@ -0,0 +1,77 @@
|
||||
|
||||
# create an oci container image with
|
||||
# cd /home/expo && podman build -t expo:dev --rm -f troggle/Containerfile
|
||||
|
||||
#
|
||||
FROM docker.io/library/debian:bookworm
|
||||
|
||||
WORKDIR /home/expo2
|
||||
|
||||
RUN apt update && \
|
||||
apt install -y postgresql apache2 survex rsync git cgit proftpd \
|
||||
python3 python3-django python3-pil python3-piexif \
|
||||
python3-bs4 python3-unidecode python3-cryptography \
|
||||
libjs-codemirror
|
||||
|
||||
# do we need libjs-leaflet? libjs-sizzle? libjs-mgrs?
|
||||
|
||||
# Install non-packaged dependencies
|
||||
# apt install CaveView bins libjs-proj4 ufraw from local repo/backports?
|
||||
# bins - photo processing
|
||||
# ufraw - raw inages in photo collection do we need ufraw-batch? bring sin libtiff5 libgtkimageview0 libexiv2-14 libwebp6
|
||||
# kanboard - kanboard organiser
|
||||
# caveview.js - rotating cave on each page. brings in libjs-proj4 which brings in linjs-mgrs . We should update.
|
||||
# tonymce (html editor)
|
||||
# we also have host and rssh. probably not needed?
|
||||
# Copy only the dependency files first
|
||||
#wget troggle/pyproject.toml troggle/uv.lock
|
||||
#RUN wget troggle/pyproject.toml && uv sync --frozen
|
||||
|
||||
RUN useradd -m expo -G sudo -s /bin/bash
|
||||
|
||||
# Optional:install and configure BoE
|
||||
|
||||
#add apache config, enable modules
|
||||
#configure postgres
|
||||
|
||||
|
||||
#Start up services for apache, proftpd, postgresql, cron?
|
||||
|
||||
#end of system stage
|
||||
|
||||
|
||||
# User files - separate layer?
|
||||
RUN chown expo:expo .
|
||||
USER expo
|
||||
|
||||
RUN mkdir -p repositories/git && cd repositories/git && \
|
||||
git clone http://expo.survex.com/repositories/troggle/.git && \
|
||||
git clone http://expo.survex.com/repositories/expoweb/.git && \
|
||||
git clone http://expo.survex.com/repositories/loser/.git && \
|
||||
git clone http://expo.survex.com/repositories/drawings/.git
|
||||
RUN ln -s repositories/git/troggle troggle && \
|
||||
ln -s repositories/git/troggle expoweb && \
|
||||
ln -s repositories/git/troggle loser && \
|
||||
ln -s repositories/git/troggle drawings
|
||||
|
||||
RUN git config --global user.email "expo@potato.hut"
|
||||
RUN git config --global user.name "expo"
|
||||
RUN git config --global pull.rebase true
|
||||
|
||||
#rsync -az expo.survex.com:expofiles expofiles
|
||||
#demo short version
|
||||
#rsync -az expo.survex.com:expofiles/surveyscans/2018 expofiles/surveyscans/2018
|
||||
#rsync -az expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
|
||||
|
||||
#/bin/sh is missing at this point - why?
|
||||
RUN cd troggle && run databaseReset.py reset INIT
|
||||
|
||||
EXPOSE 8080
|
||||
#Run postres process
|
||||
CMD ["uv", "run", "python", "troggle/manage.py", "runserver", "0.0.0.0:8080"]
|
||||
CMD ["bash"]
|
||||
|
||||
# move this file to the directory above troggle, loser etc before running the podman image build command.
|
||||
|
||||
# used image with:
|
||||
# podman run -it --network=host --rm expo:dev
|
||||
@@ -8,6 +8,9 @@ Troggle has been forked into two projects. The original one is maintained by Aar
|
||||
and was used for Erebus caves in Antarctica.
|
||||
The CUCC variant uses files as the definitive data, not the database, and lives at http://expo.survex.com/repositories/troggle/.git/
|
||||
|
||||
The versions have diverged markedly, not just in the software but also in the implicit convnetions of how the directory structures of the survex files, the drawings and
|
||||
the scans are arranged.
|
||||
|
||||
For the server setup, see /_deploy/debian/wookey-exposerver-recipe.txt
|
||||
and see http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
|
||||
|
||||
@@ -0,0 +1,200 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
from secret_credentials import *
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that secret_credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
# default values, real secrets will be imported from credentials.py in future
|
||||
|
||||
SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
|
||||
|
||||
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
SERVERPORT = "8000" # not needed as it is the default
|
||||
|
||||
ADMINS = (
|
||||
('Philip', 'philip.sargent@klebos.eu'),
|
||||
)
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
# FILES = Path('/mnt/d/expofiles/')
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos"
|
||||
PHOTOS_YEAR = "2023"
|
||||
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
|
||||
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
|
||||
LOGFILE = PYTHON_PATH / "troggle.log"
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
MEDIA_URL = "/site-media/"
|
||||
|
||||
DIR_ROOT = Path("") # this should end in / if a value is given
|
||||
URL_ROOT = "/"
|
||||
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
|
||||
|
||||
# Note that these constants are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
# executables:
|
||||
CAVERN = "cavern" # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = "survexport" # for parsing .3d files and producing .pos files
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"NAME": SQLITEFILE,
|
||||
# 'NAME' : ':memory:',
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4",
|
||||
},
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": MARIADB_SERVER_PASSWORD,
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
"OPTIONS": {
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/context.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
# EXPOWEB_URL = "" # defunct, removed.
|
||||
# SCANS_URL = '/survey_scans/' # defunct, removed.
|
||||
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
sys.path.append(str(PYTHON_PATH))
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
|
||||
# TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal : 'bash os-survey.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
|
||||
echo '###'
|
||||
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
|
||||
echo '###'
|
||||
sudo apt install tunnelx therion -y
|
||||
sudo apt install survex-aven -y
|
||||
sudo apt install gpsprune qgis -y
|
||||
|
||||
|
||||
cd ~/expo
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
# sudo apt install python-is-python3 -y
|
||||
python --version : ensure python is an alias for python3 not python2.7
|
||||
ssh -V
|
||||
sudo apt update -y
|
||||
sudo apt dist-upgrade -y
|
||||
sudo apt autoremove -y
|
||||
|
||||
|
||||
# Already in Ubuntu 24.04 on WSL:
|
||||
# sudo apt install git -y
|
||||
# sudo apt install wget gpg
|
||||
# sudo apt install sftp -y
|
||||
# sudo apt install openssh-client -y
|
||||
# sudo apt install rsync
|
||||
|
||||
# Now using uv not pip:
|
||||
# sudo apt install python3-pip -y
|
||||
|
||||
sudo apt install sqlite3 -y
|
||||
sudo apt install gedit -y
|
||||
sudo apt install tig gitg meld -y
|
||||
|
||||
# python formatting https://docs.astral.sh/ruff/
|
||||
sudo snap install ruff
|
||||
|
||||
# # do not actually use this any more
|
||||
# sudo useradd expo
|
||||
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
|
||||
|
||||
# as debian does not install everything that ubuntu does, you need:
|
||||
sudo apt install python3-venv -y
|
||||
sudo apt install python3-dev -y
|
||||
# sudo apt install python3-distutils -y
|
||||
|
||||
# install uv
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
sudo apt install mariadb-server -y
|
||||
sudo apt install libmariadb-dev -y
|
||||
|
||||
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
|
||||
# sudo service mysql start
|
||||
|
||||
|
||||
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
|
||||
# we leave that to uv to install now.
|
||||
|
||||
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
|
||||
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
|
||||
|
||||
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
|
||||
# sudo apt install software-properties-common apt-transport-https
|
||||
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
|
||||
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
|
||||
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
|
||||
# sudo apt update
|
||||
# sudo apt install code
|
||||
|
||||
|
||||
mkdir ~/expo
|
||||
cd ~/expo
|
||||
|
||||
echo '###'
|
||||
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
|
||||
echo '### because you can't clone the repos without a key
|
||||
|
||||
git config --global user.email "philip.sargent@gmail.com"
|
||||
git config --global user.name "Philip Sargent"
|
||||
git config --global pull.rebase true
|
||||
|
||||
#Change this to clone using https?? at least for troggle?
|
||||
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
||||
git clone ssh://expo@expo.survex.com/home/expo/loser
|
||||
git clone ssh://expo@expo.survex.com/home/expo/expoweb
|
||||
git clone ssh://expo@expo.survex.com/home/expo/drawings
|
||||
|
||||
mkdir expofiles
|
||||
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
|
||||
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
|
||||
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
Executable
+67
@@ -0,0 +1,67 @@
|
||||
#! /bin/bash
|
||||
# create and sanitise files for pushing to repo
|
||||
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
|
||||
|
||||
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
|
||||
# Philip Sargent 2022/04/12
|
||||
|
||||
HOSTNAME=`hostname`
|
||||
echo "** This copies file to _deploy/${HOSTNAME}/ !"
|
||||
cd ..
|
||||
|
||||
cd troggle
|
||||
echo `pwd`
|
||||
echo deprecations.
|
||||
|
||||
PYTHON="uv run"
|
||||
|
||||
source .venv/bin/activate
|
||||
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
|
||||
deactivate
|
||||
echo diffsettings.
|
||||
rm diffsettings.txt
|
||||
if test -f "diffsettings.txt"; then
|
||||
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
|
||||
exit
|
||||
fi
|
||||
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
|
||||
|
||||
echo inspectdb.
|
||||
# this next line requires database setting to be troggle.sqlite:
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
#egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo remove passwords.
|
||||
cp localsettings.py localsettings-${HOSTNAME}.py
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
|
||||
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
|
||||
|
||||
mkdir -p _deploy/${HOSTNAME}
|
||||
mv _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py.bak
|
||||
mv localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}
|
||||
cp uv.lock _deploy/${HOSTNAME}
|
||||
cp *.sh _deploy/${HOSTNAME}
|
||||
|
||||
ls -tlr *.toml
|
||||
uv tree
|
||||
|
||||
#
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# in ./pre-run.sh
|
||||
# $PYTHON reset-django.py
|
||||
# $PYTHON manage.py makemigrations
|
||||
# $PYTHON manage.py test
|
||||
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -i "unable|error" troggle-inspectdb.py
|
||||
Executable
+36
@@ -0,0 +1,36 @@
|
||||
#! /bin/bash
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# Changed to use uv not pip, requires manage.py to have uv structured uv comment in it.
|
||||
PYTHON="uv run"
|
||||
|
||||
echo "** Run inspectdb:"
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo ""
|
||||
# count non-blank lines of python and template HTML code
|
||||
# includes all variants of settings.py files
|
||||
|
||||
# fix this as core/utils.py has 28,000 lines of numbers.
|
||||
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
|
||||
|
||||
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
|
||||
|
||||
echo "** Run reset-django.py - which deletes the database"
|
||||
# This deletes the database so must run after generating troggle-inspectdb.py
|
||||
$PYTHON reset-django.py
|
||||
echo "** After cleanup deletion, remake all migrations."
|
||||
$PYTHON manage.py makemigrations >/dev/null
|
||||
$PYTHON manage.py migrate
|
||||
|
||||
echo "** Now running self check"
|
||||
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
|
||||
$PYTHON manage.py check -v 3 --deploy
|
||||
|
||||
echo "** Now running test suite"
|
||||
# $PYTHON manage.py test -v 1
|
||||
|
||||
echo ""
|
||||
echo `tail -1 lines-of-python.txt` non-comment lines of python.
|
||||
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
|
||||
|
||||
echo '** If you have an error running manage.py, maybe you are not in an activated venv ? or your manage.py is not managed by uv properly ?'
|
||||
Executable
+3
@@ -0,0 +1,3 @@
|
||||
#! /bin/sh
|
||||
|
||||
awk '{split($1,a,":"); sum[a[2]]+=$2; count[a[2]]++} END {for (k in sum) printf "%s: total %.6f s, count %d, avg %.6f s\n", k, sum[k], count[k], sum[k]/count[k]}' starstatement_timing.log | sort -nr --key=3
|
||||
Generated
+238
@@ -0,0 +1,238 @@
|
||||
version = 1
|
||||
requires-python = ">=3.13"
|
||||
|
||||
[[package]]
|
||||
name = "asgiref"
|
||||
version = "3.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "beautifulsoup4"
|
||||
version = "4.14.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "soupsieve" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/b0/1c6a16426d389813b48d95e26898aff79abbde42ad353958ad95cc8c9b21/beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86", size = 627737 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.10.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/61/83/153f54356c7c200013a752ce1ed5448573dca546ce125801afca9e1ac1a4/coverage-7.10.5.tar.gz", hash = "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6", size = 821662 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/08/4166ecfb60ba011444f38a5a6107814b80c34c717bc7a23be0d22e92ca09/coverage-7.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c", size = 217106 },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/d7/b71022408adbf040a680b8c64bf6ead3be37b553e5844f7465643979f7ca/coverage-7.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44", size = 217353 },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/68/21e0d254dbf8972bb8dd95e3fe7038f4be037ff04ba47d6d1b12b37510ba/coverage-7.10.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc", size = 248350 },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/65/28752c3a896566ec93e0219fc4f47ff71bd2b745f51554c93e8dcb659796/coverage-7.10.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869", size = 250955 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/eb/ca6b7967f57f6fef31da8749ea20417790bb6723593c8cd98a987be20423/coverage-7.10.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f", size = 252230 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/29/17a411b2a2a18f8b8c952aa01c00f9284a1fbc677c68a0003b772ea89104/coverage-7.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5", size = 250387 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/89/97a9e271188c2fbb3db82235c33980bcbc733da7da6065afbaa1d685a169/coverage-7.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c", size = 248280 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/c6/0ad7d0137257553eb4706b4ad6180bec0a1b6a648b092c5bbda48d0e5b2c/coverage-7.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2", size = 249894 },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/56/fb3aba936addb4c9e5ea14f5979393f1c2466b4c89d10591fd05f2d6b2aa/coverage-7.10.5-cp313-cp313-win32.whl", hash = "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4", size = 219536 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/54/baacb8f2f74431e3b175a9a2881feaa8feb6e2f187a0e7e3046f3c7742b2/coverage-7.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b", size = 220330 },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/8a/82a3788f8e31dee51d350835b23d480548ea8621f3effd7c3ba3f7e5c006/coverage-7.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84", size = 218961 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/a1/590154e6eae07beee3b111cc1f907c30da6fc8ce0a83ef756c72f3c7c748/coverage-7.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7", size = 217819 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/ff/436ffa3cfc7741f0973c5c89405307fe39b78dcf201565b934e6616fc4ad/coverage-7.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b", size = 218040 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/ca/5787fb3d7820e66273913affe8209c534ca11241eb34ee8c4fd2aaa9dd87/coverage-7.10.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae", size = 259374 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/89/21af956843896adc2e64fc075eae3c1cadb97ee0a6960733e65e696f32dd/coverage-7.10.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760", size = 261551 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/96/390a69244ab837e0ac137989277879a084c786cf036c3c4a3b9637d43a89/coverage-7.10.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235", size = 263776 },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/32/cfd6ae1da0a521723349f3129b2455832fc27d3f8882c07e5b6fefdd0da2/coverage-7.10.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5", size = 261326 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/c4/bf8d459fb4ce2201e9243ce6c015936ad283a668774430a3755f467b39d1/coverage-7.10.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db", size = 259090 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/5d/a234f7409896468e5539d42234016045e4015e857488b0b5b5f3f3fa5f2b/coverage-7.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e", size = 260217 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/ad/87560f036099f46c2ddd235be6476dd5c1d6be6bb57569a9348d43eeecea/coverage-7.10.5-cp313-cp313t-win32.whl", hash = "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee", size = 220194 },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/a8/04a482594fdd83dc677d4a6c7e2d62135fff5a1573059806b8383fad9071/coverage-7.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14", size = 221258 },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/ad/7da28594ab66fe2bc720f1bc9b131e62e9b4c6e39f044d9a48d18429cc21/coverage-7.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff", size = 219521 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/7f/c8b6e4e664b8a95254c35a6c8dd0bf4db201ec681c169aae2f1256e05c85/coverage-7.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031", size = 217090 },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/74/3ee14ede30a6e10a94a104d1d0522d5fb909a7c7cac2643d2a79891ff3b9/coverage-7.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3", size = 217365 },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/5f/06ac21bf87dfb7620d1f870dfa3c2cae1186ccbcdc50b8b36e27a0d52f50/coverage-7.10.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031", size = 248413 },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/bc/cc5bed6e985d3a14228539631573f3863be6a2587381e8bc5fdf786377a1/coverage-7.10.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2", size = 250943 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/43/6a9fc323c2c75cd80b18d58db4a25dc8487f86dd9070f9592e43e3967363/coverage-7.10.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762", size = 252301 },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/7c/3e791b8845f4cd515275743e3775adb86273576596dc9f02dca37357b4f2/coverage-7.10.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae", size = 250302 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/bc/5099c1e1cb0c9ac6491b281babea6ebbf999d949bf4aa8cdf4f2b53505e8/coverage-7.10.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262", size = 248237 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/51/d346eb750a0b2f1e77f391498b753ea906fde69cc11e4b38dca28c10c88c/coverage-7.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99", size = 249726 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/85/eebcaa0edafe427e93286b94f56ea7e1280f2c49da0a776a6f37e04481f9/coverage-7.10.5-cp314-cp314-win32.whl", hash = "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde", size = 219825 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/f7/6d43e037820742603f1e855feb23463979bf40bd27d0cde1f761dcc66a3e/coverage-7.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13", size = 220618 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/b0/ed9432e41424c51509d1da603b0393404b828906236fb87e2c8482a93468/coverage-7.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9", size = 219199 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/54/5a7ecfa77910f22b659c820f67c16fc1e149ed132ad7117f0364679a8fa9/coverage-7.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508", size = 217833 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/0e/25672d917cc57857d40edf38f0b867fb9627115294e4f92c8fcbbc18598d/coverage-7.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357", size = 218048 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/7c/0b2b4f1c6f71885d4d4b2b8608dcfc79057adb7da4143eb17d6260389e42/coverage-7.10.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b", size = 259549 },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/73/abb8dab1609abec7308d83c6aec547944070526578ee6c833d2da9a0ad42/coverage-7.10.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4", size = 261715 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/d1/abf31de21ec92731445606b8d5e6fa5144653c2788758fcf1f47adb7159a/coverage-7.10.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba", size = 263969 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/b3/ef274927f4ebede96056173b620db649cc9cb746c61ffc467946b9d0bc67/coverage-7.10.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842", size = 261408 },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/fc/83ca2812be616d69b4cdd4e0c62a7bc526d56875e68fd0f79d47c7923584/coverage-7.10.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874", size = 259168 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/4f/e0779e5716f72d5c9962e709d09815d02b3b54724e38567308304c3fc9df/coverage-7.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732", size = 260317 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/fe/4247e732f2234bb5eb9984a0888a70980d681f03cbf433ba7b48f08ca5d5/coverage-7.10.5-cp314-cp314t-win32.whl", hash = "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df", size = 220600 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/a0/f294cff6d1034b87839987e5b6ac7385bec599c44d08e0857ac7f164ad0c/coverage-7.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f", size = 221714 },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/18/fa1afdc60b5528d17416df440bcbd8fd12da12bfea9da5b6ae0f7a37d0f7/coverage-7.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2", size = 219735 },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/b6/fff6609354deba9aeec466e4bcaeb9d1ed3e5d60b14b57df2a36fb2273f2/coverage-7.10.5-py3-none-any.whl", hash = "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a", size = 208736 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "django"
|
||||
version = "6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "asgiref" },
|
||||
{ name = "sqlparse" },
|
||||
{ name = "tzdata", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/15/75/19762bfc4ea556c303d9af8e36f0cd910ab17dff6c8774644314427a2120/django-6.0.tar.gz", hash = "sha256:7b0c1f50c0759bbe6331c6a39c89ae022a84672674aeda908784617ef47d8e26", size = 10932418 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/ae/f19e24789a5ad852670d6885f5480f5e5895576945fcc01817dfd9bc002a/django-6.0-py3-none-any.whl", hash = "sha256:1cc2c7344303bbfb7ba5070487c17f7fc0b7174bbb0a38cebf03c675f5f19b6d", size = 8339181 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "piexif"
|
||||
version = "1.1.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fa/84/a3f25cec7d0922bf60be8000c9739d28d24b6896717f44cc4cfb843b1487/piexif-1.1.3.zip", hash = "sha256:83cb35c606bf3a1ea1a8f0a25cb42cf17e24353fd82e87ae3884e74a302a5f1b", size = 1011134 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/d8/6f63147dd73373d051c5eb049ecd841207f898f50a5a1d4378594178f6cf/piexif-1.1.3-py2.py3-none-any.whl", hash = "sha256:3bc435d171720150b81b15d27e05e54b8abbde7b4242cddd81ef160d283108b6", size = 20691 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pillow"
|
||||
version = "12.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/62/f2/de993bb2d21b33a98d031ecf6a978e4b61da207bef02f7b43093774c480d/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643", size = 4045493 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/b6/bc8d0c4c9f6f111a783d045310945deb769b806d7574764234ffd50bc5ea/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4", size = 4120461 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/57/d60d343709366a353dc56adb4ee1e7d8a2cc34e3fbc22905f4167cfec119/pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399", size = 3576912 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/a4/a0a31467e3f83b94d37568294b01d22b43ae3c5d85f2811769b9c66389dd/pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5", size = 5249132 },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/06/48eab21dd561de2914242711434c0c0eb992ed08ff3f6107a5f44527f5e9/pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b", size = 4650099 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/bd/69ed99fd46a8dba7c1887156d3572fe4484e3f031405fcc5a92e31c04035/pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3", size = 6230808 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/94/8fad659bcdbf86ed70099cb60ae40be6acca434bbc8c4c0d4ef356d7e0de/pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07", size = 8037804 },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/39/c685d05c06deecfd4e2d1950e9a908aa2ca8bc4e6c3b12d93b9cafbd7837/pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e", size = 6345553 },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/57/755dbd06530a27a5ed74f8cb0a7a44a21722ebf318edbe67ddbd7fb28f88/pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344", size = 7037729 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/b6/7e94f4c41d238615674d06ed677c14883103dce1c52e4af16f000338cfd7/pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27", size = 6459789 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/14/4448bb0b5e0f22dd865290536d20ec8a23b64e2d04280b89139f09a36bb6/pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79", size = 7130917 },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/ca/16c6926cc1c015845745d5c16c9358e24282f1e588237a4c36d2b30f182f/pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098", size = 6302391 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/2a/dd43dcfd6dae9b6a49ee28a8eedb98c7d5ff2de94a5d834565164667b97b/pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905", size = 7007477 },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/f0/72ea067f4b5ae5ead653053212af05ce3705807906ba3f3e8f58ddf617e6/pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a", size = 2435918 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/5e/9046b423735c21f0487ea6cb5b10f89ea8f8dfbe32576fe052b5ba9d4e5b/pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3", size = 5251406 },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/66/982ceebcdb13c97270ef7a56c3969635b4ee7cd45227fa707c94719229c5/pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced", size = 4653218 },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/b3/81e625524688c31859450119bf12674619429cab3119eec0e30a7a1029cb/pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b", size = 6266564 },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/59/dfb38f2a41240d2408096e1a76c671d0a105a4a8471b1871c6902719450c/pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d", size = 8069260 },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/3d/378dbea5cd1874b94c312425ca77b0f47776c78e0df2df751b820c8c1d6c/pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a", size = 6379248 },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/b0/d525ef47d71590f1621510327acec75ae58c721dc071b17d8d652ca494d8/pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe", size = 7066043 },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/2c/aced60e9cf9d0cde341d54bf7932c9ffc33ddb4a1595798b3a5150c7ec4e/pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee", size = 6490915 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/26/69dcb9b91f4e59f8f34b2332a4a0a951b44f547c4ed39d3e4dcfcff48f89/pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef", size = 7157998 },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/2b/726235842220ca95fa441ddf55dd2382b52ab5b8d9c0596fe6b3f23dafe8/pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9", size = 6306201 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/3d/2afaf4e840b2df71344ababf2f8edd75a705ce500e5dc1e7227808312ae1/pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b", size = 7013165 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/75/3fa09aa5cf6ed04bee3fa575798ddf1ce0bace8edb47249c798077a81f7f/pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47", size = 2437834 },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/2a/9a8c6ba2c2c07b71bec92cf63e03370ca5e5f5c5b119b742bcc0cde3f9c5/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:beeae3f27f62308f1ddbcfb0690bf44b10732f2ef43758f169d5e9303165d3f9", size = 4045531 },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/54/836fdbf1bfb3d66a59f0189ff0b9f5f666cee09c6188309300df04ad71fa/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d4827615da15cd59784ce39d3388275ec093ae3ee8d7f0c089b76fa87af756c2", size = 4120554 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/cd/16aec9f0da4793e98e6b54778a5fbce4f375c6646fe662e80600b8797379/pillow-12.0.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:3e42edad50b6909089750e65c91aa09aaf1e0a71310d383f11321b27c224ed8a", size = 3576812 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/b7/13957fda356dc46339298b351cae0d327704986337c3c69bb54628c88155/pillow-12.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e5d8efac84c9afcb40914ab49ba063d94f5dbdf5066db4482c66a992f47a3a3b", size = 5252689 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/f5/eae31a306341d8f331f43edb2e9122c7661b975433de5e447939ae61c5da/pillow-12.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:266cd5f2b63ff316d5a1bba46268e603c9caf5606d44f38c2873c380950576ad", size = 4650186 },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/62/2a88339aa40c4c77e79108facbd307d6091e2c0eb5b8d3cf4977cfca2fe6/pillow-12.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58eea5ebe51504057dd95c5b77d21700b77615ab0243d8152793dc00eb4faf01", size = 6230308 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/33/5425a8992bcb32d1cb9fa3dd39a89e613d09a22f2c8083b7bf43c455f760/pillow-12.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13711b1a5ba512d647a0e4ba79280d3a9a045aaf7e0cc6fbe96b91d4cdf6b0c", size = 8039222 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/61/3f5d3b35c5728f37953d3eec5b5f3e77111949523bd2dd7f31a851e50690/pillow-12.0.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6846bd2d116ff42cba6b646edf5bf61d37e5cbd256425fa089fee4ff5c07a99e", size = 6346657 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/be/ee90a3d79271227e0f0a33c453531efd6ed14b2e708596ba5dd9be948da3/pillow-12.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c98fa880d695de164b4135a52fd2e9cd7b7c90a9d8ac5e9e443a24a95ef9248e", size = 7038482 },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/34/a16b6a4d1ad727de390e9bd9f19f5f669e079e5826ec0f329010ddea492f/pillow-12.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa3ed2a29a9e9d2d488b4da81dcb54720ac3104a20bf0bd273f1e4648aff5af9", size = 6461416 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/39/1aa5850d2ade7d7ba9f54e4e4c17077244ff7a2d9e25998c38a29749eb3f/pillow-12.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d034140032870024e6b9892c692fe2968493790dd57208b2c37e3fb35f6df3ab", size = 7131584 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/db/4fae862f8fad0167073a7733973bfa955f47e2cac3dc3e3e6257d10fab4a/pillow-12.0.0-cp314-cp314-win32.whl", hash = "sha256:1b1b133e6e16105f524a8dec491e0586d072948ce15c9b914e41cdadd209052b", size = 6400621 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/24/b350c31543fb0107ab2599464d7e28e6f856027aadda995022e695313d94/pillow-12.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8dc232e39d409036af549c86f24aed8273a40ffa459981146829a324e0848b4b", size = 7142916 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/9b/0ba5a6fd9351793996ef7487c4fdbde8d3f5f75dbedc093bb598648fddf0/pillow-12.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:d52610d51e265a51518692045e372a4c363056130d922a7351429ac9f27e70b0", size = 2523836 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/7a/ceee0840aebc579af529b523d530840338ecf63992395842e54edc805987/pillow-12.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1979f4566bb96c1e50a62d9831e2ea2d1211761e5662afc545fa766f996632f6", size = 5255092 },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/76/20776057b4bfd1aef4eeca992ebde0f53a4dce874f3ae693d0ec90a4f79b/pillow-12.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b2e4b27a6e15b04832fe9bf292b94b5ca156016bbc1ea9c2c20098a0320d6cf6", size = 4653158 },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/3f/d9ff92ace07be8836b4e7e87e6a4c7a8318d47c2f1463ffcf121fc57d9cb/pillow-12.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb3096c30df99fd01c7bf8e544f392103d0795b9f98ba71a8054bcbf56b255f1", size = 6267882 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/7a/4f7ff87f00d3ad33ba21af78bfcd2f032107710baf8280e3722ceec28cda/pillow-12.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7438839e9e053ef79f7112c881cef684013855016f928b168b81ed5835f3e75e", size = 8071001 },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/87/fcea108944a52dad8cca0715ae6247e271eb80459364a98518f1e4f480c1/pillow-12.0.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d5c411a8eaa2299322b647cd932586b1427367fd3184ffbb8f7a219ea2041ca", size = 6380146 },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/52/0d31b5e571ef5fd111d2978b84603fce26aba1b6092f28e941cb46570745/pillow-12.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7e091d464ac59d2c7ad8e7e08105eaf9dafbc3883fd7265ffccc2baad6ac925", size = 7067344 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/f4/2dd3d721f875f928d48e83bb30a434dee75a2531bca839bb996bb0aa5a91/pillow-12.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:792a2c0be4dcc18af9d4a2dfd8a11a17d5e25274a1062b0ec1c2d79c76f3e7f8", size = 6491864 },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/4b/667dfcf3d61fc309ba5a15b141845cece5915e39b99c1ceab0f34bf1d124/pillow-12.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:afbefa430092f71a9593a99ab6a4e7538bc9eabbf7bf94f91510d3503943edc4", size = 7158911 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/2f/16cabcc6426c32218ace36bf0d55955e813f2958afddbf1d391849fee9d1/pillow-12.0.0-cp314-cp314t-win32.whl", hash = "sha256:3830c769decf88f1289680a59d4f4c46c72573446352e2befec9a8512104fa52", size = 6408045 },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/73/e29aa0c9c666cf787628d3f0dcf379f4791fba79f4936d02f8b37165bdf8/pillow-12.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:905b0365b210c73afb0ebe9101a32572152dfd1c144c7e28968a331b9217b94a", size = 7148282 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/70/6b41bdcddf541b437bbb9f47f94d2db5d9ddef6c37ccab8c9107743748a4/pillow-12.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:99353a06902c2e43b43e8ff74ee65a7d90307d82370604746738a1e0661ccca7", size = 2525630 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyaes"
|
||||
version = "1.6.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/44/66/2c17bae31c906613795711fc78045c285048168919ace2220daa372c7d72/pyaes-1.6.1.tar.gz", hash = "sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f", size = 28536 }
|
||||
|
||||
[[package]]
|
||||
name = "soupsieve"
|
||||
version = "2.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlparse"
|
||||
version = "0.5.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "troggle"
|
||||
version = "2025.9.26"
|
||||
source = { virtual = "." }
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "coverage" },
|
||||
{ name = "django" },
|
||||
{ name = "piexif" },
|
||||
{ name = "pillow" },
|
||||
{ name = "pyaes" },
|
||||
{ name = "unidecode" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "beautifulsoup4", specifier = ">=4.12.3" },
|
||||
{ name = "coverage", specifier = ">=7.6.9" },
|
||||
{ name = "django", specifier = ">=5.2.3" },
|
||||
{ name = "piexif", specifier = ">=1.1.3" },
|
||||
{ name = "pillow", specifier = ">=11.0.0" },
|
||||
{ name = "pyaes", specifier = ">=1.6.1" },
|
||||
{ name = "unidecode", specifier = ">=1.3.8" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tzdata"
|
||||
version = "2025.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unidecode"
|
||||
version = "1.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/94/7d/a8a765761bbc0c836e397a2e48d498305a865b70a8600fd7a942e85dcf63/Unidecode-1.4.0.tar.gz", hash = "sha256:ce35985008338b676573023acc382d62c264f307c8f7963733405add37ea2b23", size = 200149 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/b7/559f59d57d18b44c6d1250d2eeaa676e028b9c527431f5d0736478a73ba1/Unidecode-1.4.0-py3-none-any.whl", hash = "sha256:c3c7606c27503ad8d501270406e345ddb480a7b5f38827eafe4fa82a137f0021", size = 235837 },
|
||||
]
|
||||
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
# now using uv, unbelieveably simpler.
|
||||
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 24.04 with all the gubbins already installed
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog24.04.sh runniing it in /home/username/
|
||||
python3 --version
|
||||
cd ~/expo/troggle
|
||||
echo "-- EXPO folder [current directory]: `pwd`"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder: ${TROGDIR}"
|
||||
|
||||
cp dev.toml pyproject.toml
|
||||
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
|
||||
|
||||
uv self update
|
||||
uv sync
|
||||
|
||||
|
||||
# fudge for philip's laptop prior to M2 SSD upgrade
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
uv pip list
|
||||
|
||||
echo "Django version:`uv run django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/expo/troggle'
|
||||
'uv run django-admin'
|
||||
'uv run manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
|
||||
'uv run manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'uv run databaseReset.py reset INIT'
|
||||
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
# if [ ! -d /mnt/d/expofiles ]; then
|
||||
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
# fi
|
||||
@@ -0,0 +1,179 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
from secret_credentials import *
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
# MARIADB_SERVER_PASSWORD =
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
|
||||
|
||||
# default values, real secrets will be imported from credentials.py
|
||||
|
||||
#SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
|
||||
SQLITEFILE = str(Path(__file__).parent.parent / "troggle.sqlite") # can be ':memory:'
|
||||
|
||||
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
SERVERPORT = "8000" # not needed as it is the default
|
||||
|
||||
ADMINS = (
|
||||
('Philip', 'philip.sargent@klebos.eu'), # only on dev
|
||||
)
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
|
||||
PHOTOS_YEAR = "2025"
|
||||
|
||||
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT.
|
||||
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
URL_ROOT = "/"
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"NAME": SQLITEFILE,
|
||||
# 'NAME' : ':memory:',
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4",
|
||||
},
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": MARIADB_SERVER_PASSWORD,
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
"OPTIONS": {
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/context.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -0,0 +1,179 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
from secret_credentials import *
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
# MARIADB_SERVER_PASSWORD =
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
|
||||
|
||||
# default values, real secrets will be imported from credentials.py
|
||||
|
||||
#SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
|
||||
SQLITEFILE = str(Path(__file__).parent.parent / "troggle.sqlite") # can be ':memory:'
|
||||
|
||||
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
SERVERPORT = "8000" # not needed as it is the default
|
||||
|
||||
ADMINS = (
|
||||
('Philip', 'philip.sargent@klebos.eu'), # only on dev
|
||||
)
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
|
||||
PHOTOS_YEAR = "2025"
|
||||
|
||||
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT.
|
||||
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
URL_ROOT = "/"
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"NAME": SQLITEFILE,
|
||||
# 'NAME' : ':memory:',
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4",
|
||||
},
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": MARIADB_SERVER_PASSWORD,
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
"OPTIONS": {
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/context.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal : 'bash os-survey.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
|
||||
echo '###'
|
||||
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
|
||||
echo '###'
|
||||
sudo apt install tunnelx therion -y
|
||||
sudo apt install survex-aven -y
|
||||
sudo apt install gpsprune qgis -y
|
||||
|
||||
|
||||
cd ~/expo
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
# sudo apt install python-is-python3 -y
|
||||
python --version : ensure python is an alias for python3 not python2.7
|
||||
ssh -V
|
||||
sudo apt update -y
|
||||
sudo apt dist-upgrade -y
|
||||
sudo apt autoremove -y
|
||||
|
||||
|
||||
# Already in Ubuntu 24.04 on WSL:
|
||||
# sudo apt install git -y
|
||||
# sudo apt install wget gpg
|
||||
# sudo apt install sftp -y
|
||||
# sudo apt install openssh-client -y
|
||||
# sudo apt install rsync
|
||||
|
||||
# Now using uv not pip:
|
||||
# sudo apt install python3-pip -y
|
||||
|
||||
sudo apt install sqlite3 -y
|
||||
sudo apt install gedit -y
|
||||
sudo apt install tig gitg meld -y
|
||||
|
||||
# python formatting https://docs.astral.sh/ruff/
|
||||
sudo snap install ruff
|
||||
|
||||
# # do not actually use this any more
|
||||
# sudo useradd expo
|
||||
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
|
||||
|
||||
# as debian does not install everything that ubuntu does, you need:
|
||||
sudo apt install python3-venv -y
|
||||
sudo apt install python3-dev -y
|
||||
# sudo apt install python3-distutils -y
|
||||
|
||||
# install uv
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
sudo apt install mariadb-server -y
|
||||
sudo apt install libmariadb-dev -y
|
||||
|
||||
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
|
||||
# sudo service mysql start
|
||||
|
||||
|
||||
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
|
||||
# we leave that to uv to install now.
|
||||
|
||||
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
|
||||
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
|
||||
|
||||
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
|
||||
# sudo apt install software-properties-common apt-transport-https
|
||||
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
|
||||
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
|
||||
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
|
||||
# sudo apt update
|
||||
# sudo apt install code
|
||||
|
||||
|
||||
mkdir ~/expo
|
||||
cd ~/expo
|
||||
|
||||
echo '###'
|
||||
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
|
||||
echo '### because you can't clone the repos without a key
|
||||
|
||||
git config --global user.email "philip.sargent@gmail.com"
|
||||
git config --global user.name "Philip Sargent"
|
||||
git config --global pull.rebase true
|
||||
|
||||
#Change this to clone using https?? at least for troggle?
|
||||
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
||||
git clone ssh://expo@expo.survex.com/home/expo/loser
|
||||
git clone ssh://expo@expo.survex.com/home/expo/expoweb
|
||||
git clone ssh://expo@expo.survex.com/home/expo/drawings
|
||||
|
||||
mkdir expofiles
|
||||
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
|
||||
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
|
||||
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
Executable
+67
@@ -0,0 +1,67 @@
|
||||
#! /bin/bash
|
||||
# create and sanitise files for pushing to repo
|
||||
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
|
||||
|
||||
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
|
||||
# Philip Sargent 2022/04/12
|
||||
|
||||
HOSTNAME=`hostname`
|
||||
echo "** This copies file to _deploy/${HOSTNAME}/ !"
|
||||
cd ..
|
||||
|
||||
cd troggle
|
||||
echo `pwd`
|
||||
echo deprecations.
|
||||
|
||||
PYTHON="uv run"
|
||||
|
||||
source .venv/bin/activate
|
||||
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
|
||||
deactivate
|
||||
echo diffsettings.
|
||||
rm diffsettings.txt
|
||||
if test -f "diffsettings.txt"; then
|
||||
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
|
||||
exit
|
||||
fi
|
||||
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
|
||||
|
||||
echo inspectdb.
|
||||
# this next line requires database setting to be troggle.sqlite:
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
#egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo remove passwords.
|
||||
cp localsettings.py localsettings-${HOSTNAME}.py
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
|
||||
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
|
||||
|
||||
mkdir -p _deploy/${HOSTNAME}
|
||||
mv _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py.bak
|
||||
mv localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}
|
||||
cp uv.lock _deploy/${HOSTNAME}
|
||||
cp *.sh _deploy/${HOSTNAME}
|
||||
|
||||
ls -tlr *.toml
|
||||
uv tree
|
||||
|
||||
#
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# in ./pre-run.sh
|
||||
# $PYTHON reset-django.py
|
||||
# $PYTHON manage.py makemigrations
|
||||
# $PYTHON manage.py test
|
||||
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -i "unable|error" troggle-inspectdb.py
|
||||
Executable
+36
@@ -0,0 +1,36 @@
|
||||
#! /bin/bash
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# Changed to use uv not pip, requires manage.py to have uv structured uv comment in it.
|
||||
PYTHON="uv run"
|
||||
|
||||
echo "** Run inspectdb:"
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo ""
|
||||
# count non-blank lines of python and template HTML code
|
||||
# includes all variants of settings.py files
|
||||
|
||||
# fix this as core/utils.py has 28,000 lines of numbers.
|
||||
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
|
||||
|
||||
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
|
||||
|
||||
echo "** Run reset-django.py - which deletes the database"
|
||||
# This deletes the database so must run after generating troggle-inspectdb.py
|
||||
$PYTHON reset-django.py
|
||||
echo "** After cleanup deletion, remake all migrations."
|
||||
$PYTHON manage.py makemigrations >/dev/null
|
||||
$PYTHON manage.py migrate
|
||||
|
||||
echo "** Now running self check"
|
||||
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
|
||||
$PYTHON manage.py check -v 3 --deploy
|
||||
|
||||
echo "** Now running test suite"
|
||||
# $PYTHON manage.py test -v 1
|
||||
|
||||
echo ""
|
||||
echo `tail -1 lines-of-python.txt` non-comment lines of python.
|
||||
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
|
||||
|
||||
echo '** If you have an error running manage.py, maybe you are not in an activated venv ? or your manage.py is not managed by uv properly ?'
|
||||
Generated
+235
@@ -0,0 +1,235 @@
|
||||
version = 1
|
||||
requires-python = ">=3.13"
|
||||
|
||||
[[package]]
|
||||
name = "asgiref"
|
||||
version = "3.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "beautifulsoup4"
|
||||
version = "4.13.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "soupsieve" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/85/2e/3e5079847e653b1f6dc647aa24549d68c6addb4c595cc0d902d1b19308ad/beautifulsoup4-4.13.5.tar.gz", hash = "sha256:5e70131382930e7c3de33450a2f54a63d5e4b19386eab43a5b34d594268f3695", size = 622954 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/04/eb/f4151e0c7377a6e08a38108609ba5cede57986802757848688aeedd1b9e8/beautifulsoup4-4.13.5-py3-none-any.whl", hash = "sha256:642085eaa22233aceadff9c69651bc51e8bf3f874fb6d7104ece2beb24b47c4a", size = 105113 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.10.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/61/83/153f54356c7c200013a752ce1ed5448573dca546ce125801afca9e1ac1a4/coverage-7.10.5.tar.gz", hash = "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6", size = 821662 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/08/4166ecfb60ba011444f38a5a6107814b80c34c717bc7a23be0d22e92ca09/coverage-7.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c", size = 217106 },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/d7/b71022408adbf040a680b8c64bf6ead3be37b553e5844f7465643979f7ca/coverage-7.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44", size = 217353 },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/68/21e0d254dbf8972bb8dd95e3fe7038f4be037ff04ba47d6d1b12b37510ba/coverage-7.10.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc", size = 248350 },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/65/28752c3a896566ec93e0219fc4f47ff71bd2b745f51554c93e8dcb659796/coverage-7.10.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869", size = 250955 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/eb/ca6b7967f57f6fef31da8749ea20417790bb6723593c8cd98a987be20423/coverage-7.10.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f", size = 252230 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/29/17a411b2a2a18f8b8c952aa01c00f9284a1fbc677c68a0003b772ea89104/coverage-7.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5", size = 250387 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/89/97a9e271188c2fbb3db82235c33980bcbc733da7da6065afbaa1d685a169/coverage-7.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c", size = 248280 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/c6/0ad7d0137257553eb4706b4ad6180bec0a1b6a648b092c5bbda48d0e5b2c/coverage-7.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2", size = 249894 },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/56/fb3aba936addb4c9e5ea14f5979393f1c2466b4c89d10591fd05f2d6b2aa/coverage-7.10.5-cp313-cp313-win32.whl", hash = "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4", size = 219536 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/54/baacb8f2f74431e3b175a9a2881feaa8feb6e2f187a0e7e3046f3c7742b2/coverage-7.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b", size = 220330 },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/8a/82a3788f8e31dee51d350835b23d480548ea8621f3effd7c3ba3f7e5c006/coverage-7.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84", size = 218961 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/a1/590154e6eae07beee3b111cc1f907c30da6fc8ce0a83ef756c72f3c7c748/coverage-7.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7", size = 217819 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/ff/436ffa3cfc7741f0973c5c89405307fe39b78dcf201565b934e6616fc4ad/coverage-7.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b", size = 218040 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/ca/5787fb3d7820e66273913affe8209c534ca11241eb34ee8c4fd2aaa9dd87/coverage-7.10.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae", size = 259374 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/89/21af956843896adc2e64fc075eae3c1cadb97ee0a6960733e65e696f32dd/coverage-7.10.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760", size = 261551 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/96/390a69244ab837e0ac137989277879a084c786cf036c3c4a3b9637d43a89/coverage-7.10.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235", size = 263776 },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/32/cfd6ae1da0a521723349f3129b2455832fc27d3f8882c07e5b6fefdd0da2/coverage-7.10.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5", size = 261326 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/c4/bf8d459fb4ce2201e9243ce6c015936ad283a668774430a3755f467b39d1/coverage-7.10.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db", size = 259090 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/5d/a234f7409896468e5539d42234016045e4015e857488b0b5b5f3f3fa5f2b/coverage-7.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e", size = 260217 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/ad/87560f036099f46c2ddd235be6476dd5c1d6be6bb57569a9348d43eeecea/coverage-7.10.5-cp313-cp313t-win32.whl", hash = "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee", size = 220194 },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/a8/04a482594fdd83dc677d4a6c7e2d62135fff5a1573059806b8383fad9071/coverage-7.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14", size = 221258 },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/ad/7da28594ab66fe2bc720f1bc9b131e62e9b4c6e39f044d9a48d18429cc21/coverage-7.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff", size = 219521 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/7f/c8b6e4e664b8a95254c35a6c8dd0bf4db201ec681c169aae2f1256e05c85/coverage-7.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031", size = 217090 },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/74/3ee14ede30a6e10a94a104d1d0522d5fb909a7c7cac2643d2a79891ff3b9/coverage-7.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3", size = 217365 },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/5f/06ac21bf87dfb7620d1f870dfa3c2cae1186ccbcdc50b8b36e27a0d52f50/coverage-7.10.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031", size = 248413 },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/bc/cc5bed6e985d3a14228539631573f3863be6a2587381e8bc5fdf786377a1/coverage-7.10.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2", size = 250943 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/43/6a9fc323c2c75cd80b18d58db4a25dc8487f86dd9070f9592e43e3967363/coverage-7.10.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762", size = 252301 },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/7c/3e791b8845f4cd515275743e3775adb86273576596dc9f02dca37357b4f2/coverage-7.10.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae", size = 250302 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/bc/5099c1e1cb0c9ac6491b281babea6ebbf999d949bf4aa8cdf4f2b53505e8/coverage-7.10.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262", size = 248237 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/51/d346eb750a0b2f1e77f391498b753ea906fde69cc11e4b38dca28c10c88c/coverage-7.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99", size = 249726 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/85/eebcaa0edafe427e93286b94f56ea7e1280f2c49da0a776a6f37e04481f9/coverage-7.10.5-cp314-cp314-win32.whl", hash = "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde", size = 219825 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/f7/6d43e037820742603f1e855feb23463979bf40bd27d0cde1f761dcc66a3e/coverage-7.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13", size = 220618 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/b0/ed9432e41424c51509d1da603b0393404b828906236fb87e2c8482a93468/coverage-7.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9", size = 219199 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/54/5a7ecfa77910f22b659c820f67c16fc1e149ed132ad7117f0364679a8fa9/coverage-7.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508", size = 217833 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/0e/25672d917cc57857d40edf38f0b867fb9627115294e4f92c8fcbbc18598d/coverage-7.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357", size = 218048 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/7c/0b2b4f1c6f71885d4d4b2b8608dcfc79057adb7da4143eb17d6260389e42/coverage-7.10.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b", size = 259549 },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/73/abb8dab1609abec7308d83c6aec547944070526578ee6c833d2da9a0ad42/coverage-7.10.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4", size = 261715 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/d1/abf31de21ec92731445606b8d5e6fa5144653c2788758fcf1f47adb7159a/coverage-7.10.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba", size = 263969 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/b3/ef274927f4ebede96056173b620db649cc9cb746c61ffc467946b9d0bc67/coverage-7.10.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842", size = 261408 },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/fc/83ca2812be616d69b4cdd4e0c62a7bc526d56875e68fd0f79d47c7923584/coverage-7.10.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874", size = 259168 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/4f/e0779e5716f72d5c9962e709d09815d02b3b54724e38567308304c3fc9df/coverage-7.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732", size = 260317 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/fe/4247e732f2234bb5eb9984a0888a70980d681f03cbf433ba7b48f08ca5d5/coverage-7.10.5-cp314-cp314t-win32.whl", hash = "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df", size = 220600 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/a0/f294cff6d1034b87839987e5b6ac7385bec599c44d08e0857ac7f164ad0c/coverage-7.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f", size = 221714 },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/18/fa1afdc60b5528d17416df440bcbd8fd12da12bfea9da5b6ae0f7a37d0f7/coverage-7.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2", size = 219735 },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/b6/fff6609354deba9aeec466e4bcaeb9d1ed3e5d60b14b57df2a36fb2273f2/coverage-7.10.5-py3-none-any.whl", hash = "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a", size = 208736 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "django"
|
||||
version = "5.2.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "asgiref" },
|
||||
{ name = "sqlparse" },
|
||||
{ name = "tzdata", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4c/8c/2a21594337250a171d45dda926caa96309d5136becd1f48017247f9cdea0/django-5.2.6.tar.gz", hash = "sha256:da5e00372763193d73cecbf71084a3848458cecf4cee36b9a1e8d318d114a87b", size = 10858861 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/af/6593f6d21404e842007b40fdeb81e73c20b6649b82d020bb0801b270174c/django-5.2.6-py3-none-any.whl", hash = "sha256:60549579b1174a304b77e24a93d8d9fafe6b6c03ac16311f3e25918ea5a20058", size = 8303111 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "piexif"
|
||||
version = "1.1.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fa/84/a3f25cec7d0922bf60be8000c9739d28d24b6896717f44cc4cfb843b1487/piexif-1.1.3.zip", hash = "sha256:83cb35c606bf3a1ea1a8f0a25cb42cf17e24353fd82e87ae3884e74a302a5f1b", size = 1011134 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/d8/6f63147dd73373d051c5eb049ecd841207f898f50a5a1d4378594178f6cf/piexif-1.1.3-py2.py3-none-any.whl", hash = "sha256:3bc435d171720150b81b15d27e05e54b8abbde7b4242cddd81ef160d283108b6", size = 20691 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pillow"
|
||||
version = "11.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652 },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443 },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/f4/91d5b3ffa718df2f53b0dc109877993e511f4fd055d7e9508682e8aba092/pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c", size = 5278474 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/0e/37d7d3eca6c879fbd9dba21268427dffda1ab00d4eb05b32923d4fbe3b12/pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd", size = 4686038 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/b0/3426e5c7f6565e752d81221af9d3676fdbb4f352317ceafd42899aaf5d8a/pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e", size = 5864407 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/c1/c6c423134229f2a221ee53f838d4be9d82bab86f7e2f8e75e47b6bf6cd77/pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1", size = 7639094 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/c9/09e6746630fe6372c67c648ff9deae52a2bc20897d51fa293571977ceb5d/pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805", size = 5973503 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/1c/a2a29649c0b1983d3ef57ee87a66487fdeb45132df66ab30dd37f7dbe162/pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8", size = 6642574 },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/de/d5cc31cc4b055b6c6fd990e3e7f0f8aaf36229a2698501bcb0cdf67c7146/pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", size = 6084060 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/ea/502d938cbaeec836ac28a9b730193716f0114c41325db428e6b280513f09/pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b", size = 6721407 },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/9c/9c5e2a73f125f6cbc59cc7087c8f2d649a7ae453f83bd0362ff7c9e2aee2/pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3", size = 6273841 },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/85/397c73524e0cd212067e0c969aa245b01d50183439550d24d9f55781b776/pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51", size = 6978450 },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/d2/622f4547f69cd173955194b78e4d19ca4935a1b0f03a302d655c9f6aae65/pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580", size = 2423055 },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/80/a8a2ac21dda2e82480852978416cfacd439a4b490a501a288ecf4fe2532d/pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e", size = 5281110 },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/d6/b79754ca790f315918732e18f82a8146d33bcd7f4494380457ea89eb883d/pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d", size = 4689547 },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/20/716b8717d331150cb00f7fdd78169c01e8e0c219732a78b0e59b6bdb2fd6/pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced", size = 5901554 },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/cf/a9f3a2514a65bb071075063a96f0a5cf949c2f2fce683c15ccc83b1c1cab/pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c", size = 7669132 },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/3c/da78805cbdbee9cb43efe8261dd7cc0b4b93f2ac79b676c03159e9db2187/pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8", size = 6005001 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/fa/ce044b91faecf30e635321351bba32bab5a7e034c60187fe9698191aef4f/pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59", size = 6668814 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/51/90f9291406d09bf93686434f9183aba27b831c10c87746ff49f127ee80cb/pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe", size = 6113124 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/5a/6fec59b1dfb619234f7636d4157d11fb4e196caeee220232a8d2ec48488d/pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c", size = 6747186 },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/6b/00187a044f98255225f172de653941e61da37104a9ea60e4f6887717e2b5/pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788", size = 6277546 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/5c/6caaba7e261c0d75bab23be79f1d06b5ad2a2ae49f028ccec801b0e853d6/pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31", size = 6985102 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/7e/b623008460c09a0cb38263c93b828c666493caee2eb34ff67f778b87e58c/pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e", size = 2424803 },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12", size = 5278520 },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a", size = 4686116 },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632", size = 5864597 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673", size = 7638246 },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027", size = 5973336 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77", size = 6642699 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/d2/a9a4f280c6aefedce1e8f615baaa5474e0701d86dd6f1dede66726462bbd/pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874", size = 6083789 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/54/86b0cd9dbb683a9d5e960b66c7379e821a19be4ac5810e2e5a715c09a0c0/pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a", size = 6720386 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/95/88efcaf384c3588e24259c4203b909cbe3e3c2d887af9e938c2022c9dd48/pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214", size = 6370911 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/cc/934e5820850ec5eb107e7b1a72dd278140731c669f396110ebc326f2a503/pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635", size = 7117383 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/e9/9c0a616a71da2a5d163aa37405e8aced9a906d574b4a214bede134e731bc/pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6", size = 2511385 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/33/c88376898aff369658b225262cd4f2659b13e8178e7534df9e6e1fa289f6/pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae", size = 5281129 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/70/d376247fb36f1844b42910911c83a02d5544ebd2a8bad9efcc0f707ea774/pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653", size = 4689580 },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/1c/537e930496149fbac69efd2fc4329035bbe2e5475b4165439e3be9cb183b/pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6", size = 5902860 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/57/80f53264954dcefeebcf9dae6e3eb1daea1b488f0be8b8fef12f79a3eb10/pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36", size = 7670694 },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/ff/4727d3b71a8578b4587d9c276e90efad2d6fe0335fd76742a6da08132e8c/pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b", size = 6005888 },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/ae/716592277934f85d3be51d7256f3636672d7b1abfafdc42cf3f8cbd4b4c8/pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477", size = 6670330 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/bb/7fe6cddcc8827b01b1a9766f5fdeb7418680744f9082035bdbabecf1d57f/pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50", size = 6114089 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/f5/06bfaa444c8e80f1a8e4bff98da9c83b37b5be3b1deaa43d27a0db37ef84/pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b", size = 6748206 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/77/bc6f92a3e8e6e46c0ca78abfffec0037845800ea38c73483760362804c41/pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12", size = 6377370 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/82/3a721f7d69dca802befb8af08b7c79ebcab461007ce1c18bd91a5d5896f9/pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db", size = 7121500 },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyaes"
|
||||
version = "1.6.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/44/66/2c17bae31c906613795711fc78045c285048168919ace2220daa372c7d72/pyaes-1.6.1.tar.gz", hash = "sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f", size = 28536 }
|
||||
|
||||
[[package]]
|
||||
name = "soupsieve"
|
||||
version = "2.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlparse"
|
||||
version = "0.5.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "troggle"
|
||||
version = "2025.9.26"
|
||||
source = { virtual = "." }
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "coverage" },
|
||||
{ name = "django" },
|
||||
{ name = "piexif" },
|
||||
{ name = "pillow" },
|
||||
{ name = "pyaes" },
|
||||
{ name = "unidecode" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "beautifulsoup4", specifier = ">=4.12.3" },
|
||||
{ name = "coverage", specifier = ">=7.6.9" },
|
||||
{ name = "django", specifier = ">=5.2.3" },
|
||||
{ name = "piexif", specifier = ">=1.1.3" },
|
||||
{ name = "pillow", specifier = ">=11.0.0" },
|
||||
{ name = "pyaes", specifier = ">=1.6.1" },
|
||||
{ name = "unidecode", specifier = ">=1.3.8" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tzdata"
|
||||
version = "2025.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unidecode"
|
||||
version = "1.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/94/7d/a8a765761bbc0c836e397a2e48d498305a865b70a8600fd7a942e85dcf63/Unidecode-1.4.0.tar.gz", hash = "sha256:ce35985008338b676573023acc382d62c264f307c8f7963733405add37ea2b23", size = 200149 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/b7/559f59d57d18b44c6d1250d2eeaa676e028b9c527431f5d0736478a73ba1/Unidecode-1.4.0-py3-none-any.whl", hash = "sha256:c3c7606c27503ad8d501270406e345ddb480a7b5f38827eafe4fa82a137f0021", size = 235837 },
|
||||
]
|
||||
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
# now using uv, unbelieveably simpler.
|
||||
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 24.04 with all the gubbins already installed
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog24.04.sh runniing it in /home/username/
|
||||
python3 --version
|
||||
cd ~/expo/troggle
|
||||
echo "-- EXPO folder [current directory]: `pwd`"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder: ${TROGDIR}"
|
||||
|
||||
cp dev.toml pyproject.toml
|
||||
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
|
||||
|
||||
uv self update
|
||||
uv sync
|
||||
|
||||
|
||||
# fudge for philip's laptop prior to M2 SSD upgrade
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
uv pip list
|
||||
|
||||
echo "Django version:`uv run django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/expo/troggle'
|
||||
'uv run django-admin'
|
||||
'uv run manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
|
||||
'uv run manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'uv run databaseReset.py reset INIT'
|
||||
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
# if [ ! -d /mnt/d/expofiles ]; then
|
||||
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
# fi
|
||||
@@ -0,0 +1,200 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
from secret_credentials import *
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that secret_credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
# default values, real secrets will be imported from credentials.py in future
|
||||
|
||||
SQLITEFILE = "/home/expo/troggle.sqlite" # can be ':memory:'
|
||||
|
||||
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
SERVERPORT = "8000" # not needed as it is the default
|
||||
|
||||
ADMINS = (
|
||||
('Philip', 'philip.sargent@klebos.eu'),
|
||||
)
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
# FILES = Path('/mnt/d/expofiles/')
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos"
|
||||
PHOTOS_YEAR = "2025"
|
||||
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
|
||||
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
|
||||
LOGFILE = PYTHON_PATH / "troggle.log"
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
MEDIA_URL = "/site-media/"
|
||||
|
||||
DIR_ROOT = Path("") # this should end in / if a value is given
|
||||
URL_ROOT = "/"
|
||||
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
|
||||
|
||||
# Note that these constants are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
# executables:
|
||||
CAVERN = "cavern" # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = "survexport" # for parsing .3d files and producing .pos files
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"NAME": SQLITEFILE,
|
||||
# 'NAME' : ':memory:',
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4",
|
||||
},
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": MARIADB_SERVER_PASSWORD,
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
"OPTIONS": {
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/context.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
# EXPOWEB_URL = "" # defunct, removed.
|
||||
# SCANS_URL = '/survey_scans/' # defunct, removed.
|
||||
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
sys.path.append(str(PYTHON_PATH))
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
|
||||
# TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal : 'bash os-survey.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
|
||||
echo '###'
|
||||
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
|
||||
echo '###'
|
||||
sudo apt install tunnelx therion -y
|
||||
sudo apt install survex-aven -y
|
||||
sudo apt install gpsprune qgis -y
|
||||
|
||||
|
||||
cd ~/expo
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
# sudo apt install python-is-python3 -y
|
||||
python --version : ensure python is an alias for python3 not python2.7
|
||||
ssh -V
|
||||
sudo apt update -y
|
||||
sudo apt dist-upgrade -y
|
||||
sudo apt autoremove -y
|
||||
|
||||
|
||||
# Already in Ubuntu 24.04 on WSL:
|
||||
# sudo apt install git -y
|
||||
# sudo apt install wget gpg
|
||||
# sudo apt install sftp -y
|
||||
# sudo apt install openssh-client -y
|
||||
# sudo apt install rsync
|
||||
|
||||
# Now using uv not pip:
|
||||
# sudo apt install python3-pip -y
|
||||
|
||||
sudo apt install sqlite3 -y
|
||||
sudo apt install gedit -y
|
||||
sudo apt install tig gitg meld -y
|
||||
|
||||
# python formatting https://docs.astral.sh/ruff/
|
||||
sudo snap install ruff
|
||||
|
||||
# # do not actually use this any more
|
||||
# sudo useradd expo
|
||||
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
|
||||
|
||||
# as debian does not install everything that ubuntu does, you need:
|
||||
sudo apt install python3-venv -y
|
||||
sudo apt install python3-dev -y
|
||||
# sudo apt install python3-distutils -y
|
||||
|
||||
# install uv
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
sudo apt install mariadb-server -y
|
||||
sudo apt install libmariadb-dev -y
|
||||
|
||||
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
|
||||
# sudo service mysql start
|
||||
|
||||
|
||||
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
|
||||
# we leave that to uv to install now.
|
||||
|
||||
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
|
||||
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
|
||||
|
||||
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
|
||||
# sudo apt install software-properties-common apt-transport-https
|
||||
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
|
||||
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
|
||||
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
|
||||
# sudo apt update
|
||||
# sudo apt install code
|
||||
|
||||
|
||||
mkdir ~/expo
|
||||
cd ~/expo
|
||||
|
||||
echo '###'
|
||||
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
|
||||
echo '### because you can't clone the repos without a key
|
||||
|
||||
git config --global user.email "philip.sargent@gmail.com"
|
||||
git config --global user.name "Philip Sargent"
|
||||
git config --global pull.rebase true
|
||||
|
||||
#Change this to clone using https?? at least for troggle?
|
||||
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
||||
git clone ssh://expo@expo.survex.com/home/expo/loser
|
||||
git clone ssh://expo@expo.survex.com/home/expo/expoweb
|
||||
git clone ssh://expo@expo.survex.com/home/expo/drawings
|
||||
|
||||
mkdir expofiles
|
||||
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
|
||||
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
|
||||
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
Executable
+63
@@ -0,0 +1,63 @@
|
||||
#! /bin/bash
|
||||
# create and sanitise files for pushing to repo
|
||||
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
|
||||
|
||||
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
|
||||
# Philip Sargent 2022/04/12
|
||||
|
||||
HOSTNAME=`hostname`
|
||||
echo "** This copies file to _deploy/${HOSTNAME}/ !"
|
||||
cd ..
|
||||
|
||||
cd troggle
|
||||
echo `pwd`
|
||||
echo deprecations.
|
||||
|
||||
PYTHON="uv run"
|
||||
|
||||
source .venv/bin/activate
|
||||
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
|
||||
deactivate
|
||||
echo diffsettings.
|
||||
rm diffsettings.txt
|
||||
if test -f "diffsettings.txt"; then
|
||||
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
|
||||
exit
|
||||
fi
|
||||
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
|
||||
|
||||
echo inspectdb.
|
||||
# this next line requires database setting to be troggle.sqlite:
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
#egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo remove passwords.
|
||||
cp localsettings.py localsettings-${HOSTNAME}.py
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
|
||||
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
|
||||
|
||||
mkdir -p _deploy/${HOSTNAME}
|
||||
mv _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py.bak
|
||||
mv localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}
|
||||
cp *.sh _deploy/${HOSTNAME}
|
||||
|
||||
#
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# in ./pre-run.sh
|
||||
# $PYTHON reset-django.py
|
||||
# $PYTHON manage.py makemigrations
|
||||
# $PYTHON manage.py test
|
||||
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -i "unable|error" troggle-inspectdb.py
|
||||
Executable
+36
@@ -0,0 +1,36 @@
|
||||
#! /bin/bash
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# Changed to use uv not pip, requires manage.py to have uv structured uv comment in it.
|
||||
PYTHON="uv run"
|
||||
|
||||
echo "** Run inspectdb:"
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo ""
|
||||
# count non-blank lines of python and template HTML code
|
||||
# includes all variants of settings.py files
|
||||
|
||||
# fix this as core/utils.py has 28,000 lines of numbers.
|
||||
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
|
||||
|
||||
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
|
||||
|
||||
echo "** Run reset-django.py - which deletes the database"
|
||||
# This deletes the database so must run after generating troggle-inspectdb.py
|
||||
$PYTHON reset-django.py
|
||||
echo "** After cleanup deletion, remake all migrations."
|
||||
$PYTHON manage.py makemigrations >/dev/null
|
||||
$PYTHON manage.py migrate
|
||||
|
||||
echo "** Now running self check"
|
||||
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
|
||||
$PYTHON manage.py check -v 3 --deploy
|
||||
|
||||
echo "** Now running test suite"
|
||||
# $PYTHON manage.py test -v 1
|
||||
|
||||
echo ""
|
||||
echo `tail -1 lines-of-python.txt` non-comment lines of python.
|
||||
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
|
||||
|
||||
echo '** If you have an error running manage.py, maybe you are not in an activated venv ? or your manage.py is not managed by uv properly ?'
|
||||
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
# now using uv, unbelieveably simpler.
|
||||
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 24.04 with all the gubbins already installed
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog24.04.sh runniing it in /home/username/
|
||||
python3 --version
|
||||
cd ~/expo/troggle
|
||||
echo "-- EXPO folder [current directory]: `pwd`"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder: ${TROGDIR}"
|
||||
|
||||
cp dev.toml pyproject.toml
|
||||
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
|
||||
|
||||
uv self update
|
||||
uv sync
|
||||
|
||||
|
||||
# fudge for philip's laptop prior to M2 SSD upgrade
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
uv pip list
|
||||
|
||||
echo "Django version:`uv run django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/expo/troggle'
|
||||
'uv run django-admin'
|
||||
'uv run manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
|
||||
'uv run manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'uv run databaseReset.py reset INIT'
|
||||
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
# if [ ! -d /mnt/d/expofiles ]; then
|
||||
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
# fi
|
||||
@@ -40,9 +40,9 @@ sudo apt install python3-dev -y
|
||||
|
||||
# default since 22.04
|
||||
# sudo apt install python3.10
|
||||
sudo apt install python3.10-venv -y
|
||||
sudo apt install python3.10-dev -y
|
||||
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.10 1
|
||||
sudo apt install python3.11-venv -y
|
||||
sudo apt install python3.11-dev -y
|
||||
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1
|
||||
|
||||
sudo apt install mariadb-server -y
|
||||
sudo apt install libmariadb-dev -y
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
from secret_credentials import *
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4", # To permit emojis in logbook entries and elsewhere
|
||||
}, 'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : MARIADB_SERVER_PASSWORD, # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
+28
-21
@@ -1,6 +1,4 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
@@ -15,15 +13,26 @@ that e.g. expofiles can be on a different filesystem.
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
NOTE this file is vastly out of sync with troggle/_deploy/wsl/localsettings.py
|
||||
NOTE this file is out of sync with troggle/_deploy/wsl/localsettings.py
|
||||
which is the most recent version used in active maintenance. There should be
|
||||
essential differences, but there and many, many non-essential differences which
|
||||
should be eliminated for clarity and to use modern idioms. 8 March 2023.
|
||||
should be eliminated for clarity and to use modern idioms.
|
||||
Edited 31/12/2024
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords.
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
from secret_credentials import *
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "django-test@klebos.net"
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
@@ -31,21 +40,18 @@ EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote se
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4", # To permit emojis in logbook entries and elsewhere
|
||||
}, 'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'uFqP56B4XleeyIW', # Not used with sqlite3.
|
||||
'PASSWORD' : MARIADB_SERVER_PASSWORD, # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = '161:gosser'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOADMINUSERPASS = 'gosser:161'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
@@ -54,7 +60,7 @@ sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
PHOTOS_YEAR = "2023"
|
||||
PHOTOS_YEAR = "2024"
|
||||
# add in 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
@@ -90,12 +96,12 @@ TEMPLATES = [
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = False
|
||||
DEBUG = True
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .3d files
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .2d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
@@ -137,14 +143,15 @@ SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos"
|
||||
|
||||
#EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
#PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
@@ -154,7 +161,7 @@ LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
#STATIC_URL = str(STATIC_URL) + "/"
|
||||
#MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -0,0 +1,179 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
NOTE this file is out of sync with troggle/_deploy/wsl/localsettings.py
|
||||
which is the most recent version used in active maintenance. There should be
|
||||
essential differences, but there and many, many non-essential differences which
|
||||
should be eliminated for clarity and to use modern idioms.
|
||||
Edited 31/12/2024
|
||||
"""
|
||||
|
||||
# print(" * importing troggle/localsettings.py")
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
from secret_credentials import *
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that secret_credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever
|
||||
#SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4", # To permit emojis in logbook entries and elsewhere
|
||||
}, 'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : MARIADB_SERVER_PASSWORD, # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
PHOTOS_YEAR = "2024"
|
||||
# add in 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "290", "291", "359", "264", "258", "204", "76", "107"]
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
'django.contrib.auth.context_processors.auth', # knowledge of logged-on user & permissions
|
||||
'core.context.troggle_context', # in core/troggle.py
|
||||
'django.template.context_processors.debug',
|
||||
#'django.template.context_processors.request', # copy of current request, added in trying to make csrf work
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media', # includes a variable MEDIA_URL
|
||||
'django.template.context_processors.static', # includes a variable STATIC_URL
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader', #For each app, inc admin, in INSTALLED_APPS, loader looks for /templates
|
||||
# insert your own TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = True
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
|
||||
# executables:
|
||||
CAVERN = 'cavern' # for parsing .svx files and producing .2d files
|
||||
SURVEXPORT = 'survexport' # for parsing .3d files and producing .pos files
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
LIBDIR = Path(REPOS_ROOT_PATH) / 'lib' / PV
|
||||
|
||||
EXPOWEB = Path(REPOS_ROOT_PATH + 'expoweb/')
|
||||
SURVEYS = REPOS_ROOT_PATH
|
||||
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
PHOTOS_ROOT = REPOS_ROOT_PATH + 'expofiles/photos/'
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / 'templates'
|
||||
MEDIA_ROOT = TROGGLE_PATH / 'media'
|
||||
JSLIB_ROOT = TROGGLE_PATH / 'media' / 'jslib' # used for CaveViewer JS utility
|
||||
|
||||
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
|
||||
#URL_ROOT = 'http://expo.survex.com/'
|
||||
URL_ROOT = '/'
|
||||
DIR_ROOT = Path("") #this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SURVEYS_URL = '/survey_scans/'
|
||||
|
||||
REPOS_ROOT_PATH = Path(REPOS_ROOT_PATH)
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos"
|
||||
|
||||
#EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
#PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views_surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log' # hmm. Not used since 2022
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log' # hmm. Not used since 2022
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
# print(" + finished importing troggle/localsettings.py")
|
||||
@@ -1,121 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import urllib.parse
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# DO NOT check this file into the git repo - it contains real passwords. [not this copy]
|
||||
SECRET_KEY = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"
|
||||
EXPOUSERPASS = "nope"
|
||||
EXPOADMINUSERPASS = "nope"
|
||||
EMAIL_HOST_PASSWORD = "nope"
|
||||
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
'NAME' : 'troggle', # Or path to database file if using sqlite3.
|
||||
'USER' : 'expo', # Not used with sqlite3.
|
||||
'PASSWORD' : 'not a real password', # Not used with sqlite3.
|
||||
'HOST' : '', # Set to empty string for localhost. Not used with sqlite3.
|
||||
'PORT' : '', # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOUSERPASS = "nnn:gggggg"
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
REPOS_ROOT_PATH = '/home/expo/'
|
||||
sys.path.append(REPOS_ROOT_PATH)
|
||||
sys.path.append(REPOS_ROOT_PATH + 'troggle')
|
||||
# Define the path to the django app (troggle in this case)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
PYTHON_PATH + "templates"
|
||||
],
|
||||
'OPTIONS': {
|
||||
'debug': 'DEBUG',
|
||||
'context_processors': [
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'core.context.troggle_context',
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.i18n',
|
||||
'django.template.context_processors.media',
|
||||
'django.template.context_processors.static',
|
||||
'django.template.context_processors.tz',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
'loaders': [
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader',
|
||||
# insert your TEMPLATE_LOADERS here
|
||||
]
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
PUBLIC_SITE = True
|
||||
|
||||
# This should be False for normal running
|
||||
DEBUG = True
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH + 'loser/'
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH + 'drawings/'
|
||||
|
||||
CAVERN = 'cavern'
|
||||
THREEDTOPOS = 'survexport'
|
||||
EXPOWEB = REPOS_ROOT_PATH + 'expoweb/'
|
||||
SURVEYS = REPOS_ROOT_PATH
|
||||
SURVEY_SCANS = REPOS_ROOT_PATH + 'expofiles/surveyscans/'
|
||||
FILES = REPOS_ROOT_PATH + 'expofiles'
|
||||
CAVEDESCRIPTIONS = os.path.join(EXPOWEB, "cave_data")
|
||||
ENTRANCEDESCRIPTIONS = os.path.join(EXPOWEB, "entrance_data")
|
||||
|
||||
CACHEDIR = REPOS_ROOT_PATH + 'expowebcache/'
|
||||
THREEDCACHEDIR = CACHEDIR + '3d/'
|
||||
THUMBNAILCACHE = CACHEDIR + 'thumbs'
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH + 'troggle/'
|
||||
|
||||
URL_ROOT = 'http://expo.survex.com/'
|
||||
DIR_ROOT = ''#this should end in / if a value is given
|
||||
EXPOWEB_URL = '/'
|
||||
SURVEYS_URL = '/survey_scans/'
|
||||
EXPOFILES = urllib.parse.urljoin(REPOS_ROOT_PATH, 'expofiles/')
|
||||
PHOTOS_URL = urllib.parse.urljoin(URL_ROOT, '/photos/')
|
||||
|
||||
# MEDIA_URL is used by urls.py in a regex. See urls.py & core/views/surveys.py
|
||||
MEDIA_URL = '/site_media/'
|
||||
|
||||
MEDIA_ROOT = REPOS_ROOT_PATH + '/troggle/media/'
|
||||
|
||||
STATIC_URL = urllib.parse.urljoin(URL_ROOT , '/static/') # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = urllib.parse.urljoin(URL_ROOT , '/javascript/') # always fails, try to revive it ?
|
||||
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
LOGFILE = '/var/log/troggle/troggle.log'
|
||||
IMPORTLOGFILE = '/var/log/troggle/import.log'
|
||||
|
||||
# add in 290, 291, 358 when they don't make it crash horribly
|
||||
NOTABLECAVESHREFS = [ "264", "258", "204", "76", "107"]
|
||||
@@ -1,23 +0,0 @@
|
||||
#This requirements txt matches the libaries as of 2023-07-09 on expo.survex.com <Debian GNU/Linux 11 (bullseye)>
|
||||
|
||||
#Nb on the server asgiref==3.3.0, however this conflicts with the Django==3.2.12 requirement
|
||||
asgiref==3.3.2
|
||||
Django==3.2.12
|
||||
docutils==0.16
|
||||
packaging==20.9
|
||||
Pillow==8.1.2
|
||||
pytz==2021.1
|
||||
sqlparse==0.4.1
|
||||
Unidecode==1.2.0
|
||||
beautifulsoup4==4.9.3
|
||||
piexif==1.1.3
|
||||
|
||||
#Not installed on expo.survex.com
|
||||
#black==23.3
|
||||
#click==8.1.3
|
||||
#coverage==7.2
|
||||
#isort==5.12.0
|
||||
#mypy-extensions==1.0.0
|
||||
#pathspec==0.11
|
||||
#platformdirs==3.8
|
||||
#ruff==0.0.245
|
||||
@@ -1,15 +1,112 @@
|
||||
Instructions for setting up new expo debian server/VM
|
||||
For Debian Stretch, June 2019.
|
||||
|
||||
[Note added March 2021:
|
||||
W says: Tue, Apr 23, 2024
|
||||
Javascript gets installed in /usr/share/javascript. You can find that out by asking dpkg: dpkg -S openlayers (or reading debian wiki for javascript packaging)
|
||||
If you use npm it just puts packages 'here' (i.e. in a node_packages dir in the current directory). I've been avoiding that so far.
|
||||
openlayers wasn't in the old prospecting map - that was just JPEGs. It was in the slippy map I never really got working properly. It's also in martin's map-app.
|
||||
But they were just examples of javascript packages.
|
||||
|
||||
|
||||
|
||||
=======
|
||||
See also http://expo.survex.com/handbook/troggle/serverconfig.html
|
||||
and troggle/README.txt
|
||||
]
|
||||
|
||||
For Debian Bullseye (Debian 11) June 2022
|
||||
adduser expo
|
||||
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
|
||||
apt install python-django apache2 mysql-server survex make rsync
|
||||
apt install libjs-openlayers make
|
||||
apt install python-django apache2 certbot mysql-server survex make rsync
|
||||
apt install libjs-openlayers
|
||||
apt install git mercurial
|
||||
(certbot does https certs)
|
||||
|
||||
for boe:
|
||||
apt install libcgi-session-perl libcrypt-passwdmd5-perl libfile-slurp-perl libgit-wrapper-perl libhtml-template-perl libhtml-template-pro-perl libmime-lite-perl libtext-password-pronounceable-perl libtime-parsedate-perl libuuid-tiny-perl libcrypt-cracklib-perl
|
||||
|
||||
setup apache configs for cucc and expo
|
||||
#disable default website
|
||||
a2dissite 000-default
|
||||
a2ensite cucc-ssl
|
||||
a2ensite expo-ssl
|
||||
#a2enmod cgid
|
||||
|
||||
Boe config:
|
||||
Alias /boe /home/expo/boe/boc/boc.pl
|
||||
<Directory /home/expo/boe/boc>
|
||||
AddHandler cgi-script .pl
|
||||
SetHandler cgi-script
|
||||
Options +ExecCGI
|
||||
Require all granted
|
||||
</Directory>
|
||||
And remember to set both program and data dir to be
|
||||
www-data:www-data
|
||||
(optionally make file group read/write by treasurer account)
|
||||
create empty repo by clicking create in boe interface
|
||||
then set names in 'settings'
|
||||
|
||||
Set up mysql (as root)
|
||||
mysql -p
|
||||
CREATE DATABASE troggle;
|
||||
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
|
||||
|
||||
install django:
|
||||
sudo apt install python3-django python3-django-registration python3-django-imagekit python3-django-tinymce fonts-freefont-ttf libapache2-mod-wsgi-py3
|
||||
|
||||
python-django-imagekit comes from https://salsa.debian.org/python-team/modules/python-django-imagekit
|
||||
python-django-tinymce comes from https://salsa.debian.org/python-team/modules/python-django-tinymce
|
||||
(both modified for stretch/python2). packages under /home/wookey/packages/
|
||||
|
||||
need fonts-freefont-ttf (to have truetype freesans available for troggle via PIL)
|
||||
need libapache2-mod-wsgi for apache wsgi support.
|
||||
|
||||
To keep bots from overloading server use mpm_event intead of mpm_worker
|
||||
sudo a2dismod mpm_prefork
|
||||
sudo a2enmod mpm_event
|
||||
|
||||
Also adjust the numbers in the config file (~/config/apache/mods-available/mpm_event.conf)
|
||||
for our tiddly server:
|
||||
StartServers 1
|
||||
MinSpareThreads 2
|
||||
MaxSpareThreads 15
|
||||
ThreadLimit 25
|
||||
ThreadsPerChild 5
|
||||
MaxRequestWorkers 25
|
||||
MaxConnectionsPerChild 500
|
||||
|
||||
Kanboard:
|
||||
debian python3-kanboard is a bit too simple, so use http://kanboard.org/ instead
|
||||
unpack release files into /home/expo/kanboard
|
||||
add this stanza to ~/config/apache/expo.conf
|
||||
Alias /kanboard /home/expo/kanboard
|
||||
<Directory /home/expo/kanboard>
|
||||
AllowOverride All
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
Dependencies are php and php-bcmath
|
||||
php with mpm_worker and cgi is simple, but we are not using
|
||||
mpm_worker any more so this is not possible anyway.
|
||||
|
||||
php with mpm_event needs fpm mechanism and apache proxy_fcgi enabled
|
||||
This mechanism is a lot more efficient on the server.
|
||||
Good docs here: https://www.digitalocean.com/community/tutorials/how-to-configure-apache-http-with-mpm-event-and-php-fpm-on-ubuntu-18-04
|
||||
|
||||
apt install php-fpm libapache2-mod-fcgid
|
||||
sudo a2dismod php7.4 (this normal config works via mpm_worker)
|
||||
sudo a2enconf php7.4-fpm (this one works with mpm_event via proxy magic)
|
||||
sudo a2enmod proxy
|
||||
sudo a2enmod proxy_fcgi
|
||||
|
||||
apt install php-bcmath (for kanboard)
|
||||
|
||||
|
||||
------------------------------
|
||||
For Debian Stretch, June 2019.
|
||||
-----------------------------
|
||||
adduser expo
|
||||
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
|
||||
apt install python-django apache2 mysql-server survex make rsync
|
||||
apt install libjs-openlayers make
|
||||
apt install git mercurial mercurial-server?
|
||||
|
||||
for boe:
|
||||
@@ -45,10 +142,10 @@ a2enmod cgid
|
||||
Boe config:
|
||||
Alias /boe /home/expo/boe/boc/boc.pl
|
||||
<Directory /home/expo/boe/boc>
|
||||
AddHandler cgi-script .pl
|
||||
SetHandler cgi-script
|
||||
Options +ExecCGI
|
||||
Require all granted
|
||||
AddHandler cgi-script .pl
|
||||
SetHandler cgi-script
|
||||
Options +ExecCGI
|
||||
Require all granted
|
||||
</Directory>
|
||||
And remember to set both program and data dir to be
|
||||
www-data:www-data
|
||||
@@ -59,7 +156,7 @@ then set names in 'settings'
|
||||
Set up mysql (as root)
|
||||
mysql -p
|
||||
CREATE DATABASE troggle;
|
||||
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
|
||||
GRANT ALL PRIVILEGES ON troggle.* TO 'expo'@'localhost' IDENTIFIED BY 'somepassword';
|
||||
|
||||
install django:
|
||||
NO!
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
see .gitignore for those files which DO contain the secret passwords
|
||||
@@ -1,3 +1,9 @@
|
||||
W says: Tue, Apr 23, 2024
|
||||
Javascript gets installed in /usr/share/javascript. You can find that out by asking dpkg: dpkg -S openlayers (or reading debian wiki for javascript packaging)
|
||||
If you use npm it just puts packages 'here' (i.e. in a node_packages dir in the current directory). I've been avoiding that so far.
|
||||
openlayers wasn't in the old prospecting map - that was just JPEGs. It was in the slippy map I never really got working properly. It's also in martin's map-app.
|
||||
But they were just examples of javascript packages.
|
||||
|
||||
adduser expo
|
||||
apt install openssh-server mosh tmux mc zile emacs-nox mc most ncdu
|
||||
apt install python-django apache2 mysql-server survex make rsync
|
||||
|
||||
@@ -0,0 +1,200 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
from secret_credentials import *
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that secret_credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
# default values, real secrets will be imported from credentials.py in future
|
||||
|
||||
SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
|
||||
|
||||
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
SERVERPORT = "8000" # not needed as it is the default
|
||||
|
||||
ADMINS = (
|
||||
('Philip', 'philip.sargent@klebos.eu'),
|
||||
)
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
# FILES = Path('/mnt/d/expofiles/')
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos"
|
||||
PHOTOS_YEAR = "2023"
|
||||
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
|
||||
|
||||
|
||||
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
|
||||
LOGFILE = PYTHON_PATH / "troggle.log"
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
MEDIA_URL = "/site-media/"
|
||||
|
||||
DIR_ROOT = Path("") # this should end in / if a value is given
|
||||
URL_ROOT = "/"
|
||||
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
|
||||
|
||||
# Note that these constants are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
# executables:
|
||||
CAVERN = "cavern" # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = "survexport" # for parsing .3d files and producing .pos files
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"NAME": SQLITEFILE,
|
||||
# 'NAME' : ':memory:',
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4",
|
||||
},
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": MARIADB_SERVER_PASSWORD,
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
"OPTIONS": {
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/context.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
# EXPOWEB_URL = "" # defunct, removed.
|
||||
# SCANS_URL = '/survey_scans/' # defunct, removed.
|
||||
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
sys.path.append(str(PYTHON_PATH))
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
|
||||
# TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal : 'bash os-survey.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
|
||||
echo '###'
|
||||
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
|
||||
echo '###'
|
||||
sudo apt install tunnelx therion -y
|
||||
sudo apt install survex-aven -y
|
||||
sudo apt install gpsprune qgis -y
|
||||
|
||||
|
||||
cd ~/expo
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
# sudo apt install python-is-python3 -y
|
||||
python --version : ensure python is an alias for python3 not python2.7
|
||||
ssh -V
|
||||
sudo apt update -y
|
||||
sudo apt dist-upgrade -y
|
||||
sudo apt autoremove -y
|
||||
|
||||
|
||||
# Already in Ubuntu 24.04 on WSL:
|
||||
# sudo apt install git -y
|
||||
# sudo apt install wget gpg
|
||||
# sudo apt install sftp -y
|
||||
# sudo apt install openssh-client -y
|
||||
# sudo apt install rsync
|
||||
|
||||
# Now using uv not pip:
|
||||
# sudo apt install python3-pip -y
|
||||
|
||||
sudo apt install sqlite3 -y
|
||||
sudo apt install gedit -y
|
||||
sudo apt install tig gitg meld -y
|
||||
|
||||
# python formatting https://docs.astral.sh/ruff/
|
||||
sudo snap install ruff
|
||||
|
||||
# # do not actually use this any more
|
||||
# sudo useradd expo
|
||||
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
|
||||
|
||||
# as debian does not install everything that ubuntu does, you need:
|
||||
sudo apt install python3-venv -y
|
||||
sudo apt install python3-dev -y
|
||||
# sudo apt install python3-distutils -y
|
||||
|
||||
# install uv
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
sudo apt install mariadb-server -y
|
||||
sudo apt install libmariadb-dev -y
|
||||
|
||||
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
|
||||
# sudo service mysql start
|
||||
|
||||
|
||||
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
|
||||
# we leave that to uv to install now.
|
||||
|
||||
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
|
||||
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
|
||||
|
||||
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
|
||||
# sudo apt install software-properties-common apt-transport-https
|
||||
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
|
||||
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
|
||||
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
|
||||
# sudo apt update
|
||||
# sudo apt install code
|
||||
|
||||
|
||||
mkdir ~/expo
|
||||
cd ~/expo
|
||||
|
||||
echo '###'
|
||||
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
|
||||
echo '### because you can't clone the repos without a key
|
||||
|
||||
git config --global user.email "philip.sargent@gmail.com"
|
||||
git config --global user.name "Philip Sargent"
|
||||
git config --global pull.rebase true
|
||||
|
||||
#Change this to clone using https?? at least for troggle?
|
||||
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
||||
git clone ssh://expo@expo.survex.com/home/expo/loser
|
||||
git clone ssh://expo@expo.survex.com/home/expo/expoweb
|
||||
git clone ssh://expo@expo.survex.com/home/expo/drawings
|
||||
|
||||
mkdir expofiles
|
||||
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
|
||||
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
|
||||
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
Executable
+63
@@ -0,0 +1,63 @@
|
||||
#! /bin/bash
|
||||
# create and sanitise files for pushing to repo
|
||||
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
|
||||
|
||||
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
|
||||
# Philip Sargent 2022/04/12
|
||||
|
||||
HOSTNAME=`hostname`
|
||||
echo "** This copies file to _deploy/${HOSTNAME}/ !"
|
||||
cd ..
|
||||
|
||||
cd troggle
|
||||
echo `pwd`
|
||||
echo deprecations.
|
||||
|
||||
PYTHON="uv run"
|
||||
|
||||
source .venv/bin/activate
|
||||
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
|
||||
deactivate
|
||||
echo diffsettings.
|
||||
rm diffsettings.txt
|
||||
if test -f "diffsettings.txt"; then
|
||||
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
|
||||
exit
|
||||
fi
|
||||
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
|
||||
|
||||
echo inspectdb.
|
||||
# this next line requires database setting to be troggle.sqlite:
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
#egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo remove passwords.
|
||||
cp localsettings.py localsettings-${HOSTNAME}.py
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
|
||||
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
|
||||
|
||||
mkdir -p _deploy/${HOSTNAME}
|
||||
mv _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py.bak
|
||||
mv localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}
|
||||
cp *.sh _deploy/${HOSTNAME}
|
||||
|
||||
#
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# in ./pre-run.sh
|
||||
# $PYTHON reset-django.py
|
||||
# $PYTHON manage.py makemigrations
|
||||
# $PYTHON manage.py test
|
||||
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -i "unable|error" troggle-inspectdb.py
|
||||
Executable
+36
@@ -0,0 +1,36 @@
|
||||
#! /bin/bash
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# Need to be in an ALREADY activated venv
|
||||
PYTHON="python"
|
||||
|
||||
echo "** Run inspectdb:"
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo ""
|
||||
# count non-blank lines of python and template HTML code
|
||||
# includes all variants of settings.py files
|
||||
|
||||
# fix this as core/utils.py has 28,000 lines of numbers.
|
||||
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
|
||||
|
||||
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
|
||||
|
||||
echo "** Run reset-django.py - which deletes the database"
|
||||
# This deletes the database so must run after generating troggle-inspectdb.py
|
||||
$PYTHON reset-django.py
|
||||
echo "** After cleanup deletion, remake all migrations."
|
||||
$PYTHON manage.py makemigrations >/dev/null
|
||||
$PYTHON manage.py migrate
|
||||
|
||||
echo "** Now running self check"
|
||||
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
|
||||
$PYTHON manage.py check -v 3 --deploy
|
||||
|
||||
echo "** Now running test suite"
|
||||
# $PYTHON manage.py test -v 1
|
||||
|
||||
echo ""
|
||||
echo `tail -1 lines-of-python.txt` non-comment lines of python. But core/utils.py has 28,000 lines of numbers.
|
||||
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
|
||||
|
||||
echo '** If you have an error running manage.py, maybe you are not in an activated venv ?'
|
||||
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
# now using uv, unbelieveably simpler.
|
||||
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 24.04 with all the gubbins already installed
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog24.04.sh runniing it in /home/username/
|
||||
python3 --version
|
||||
cd ~/expo/troggle
|
||||
echo "-- EXPO folder [current directory]: `pwd`"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder: ${TROGDIR}"
|
||||
|
||||
cp dev.toml pyproject.toml
|
||||
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
|
||||
|
||||
uv self update
|
||||
uv sync
|
||||
|
||||
|
||||
# fudge for philip's laptop prior to M2 SSD upgrade
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
uv pip list
|
||||
|
||||
echo "Django version:`uv run django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/expo/troggle'
|
||||
'uv run django-admin'
|
||||
'uv run manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
|
||||
'uv run manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'uv run databaseReset.py reset INIT'
|
||||
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
# if [ ! -d /mnt/d/expofiles ]; then
|
||||
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
# fi
|
||||
@@ -0,0 +1,183 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
from secret_credentials import *
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
# MARIADB_SERVER_PASSWORD =
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that secret_credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
# default values, real secrets will be imported from credentials.py in future
|
||||
|
||||
SQLITEFILE = str(Path(__file__).parent.parent / "troggle.sqlite") # can be ':memory:'
|
||||
print(f"SQLITEFILE is {SQLITEFILE}")
|
||||
|
||||
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
SERVERPORT = "8000" # not needed as it is the default
|
||||
|
||||
ADMINS = (
|
||||
('Philip', 'philip.sargent@klebos.eu'), # only on dev
|
||||
)
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
|
||||
PHOTOS_YEAR = "2025"
|
||||
|
||||
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT.
|
||||
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
URL_ROOT = "/"
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"NAME": SQLITEFILE,
|
||||
# 'NAME' : ':memory:',
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4",
|
||||
},
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": MARIADB_SERVER_PASSWORD,
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
"OPTIONS": {
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/context.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -0,0 +1,183 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
from secret_credentials import *
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
# MARIADB_SERVER_PASSWORD =
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that secret_credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
# default values, real secrets will be imported from credentials.py in future
|
||||
|
||||
SQLITEFILE = str(Path(__file__).parent.parent / "troggle.sqlite") # can be ':memory:'
|
||||
print(f"SQLITEFILE is {SQLITEFILE}")
|
||||
|
||||
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
SERVERPORT = "8000" # not needed as it is the default
|
||||
|
||||
ADMINS = (
|
||||
('Philip', 'philip.sargent@klebos.eu'), # only on dev
|
||||
)
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
|
||||
PHOTOS_YEAR = "2025"
|
||||
|
||||
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT.
|
||||
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
URL_ROOT = "/"
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"NAME": SQLITEFILE,
|
||||
# 'NAME' : ':memory:',
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4",
|
||||
},
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": MARIADB_SERVER_PASSWORD,
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
"OPTIONS": {
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/context.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal : 'bash os-survey.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
|
||||
echo '###'
|
||||
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
|
||||
echo '###'
|
||||
sudo apt install tunnelx therion -y
|
||||
sudo apt install survex-aven -y
|
||||
sudo apt install gpsprune qgis -y
|
||||
|
||||
|
||||
cd ~/expo
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
# sudo apt install python-is-python3 -y
|
||||
python --version : ensure python is an alias for python3 not python2.7
|
||||
ssh -V
|
||||
sudo apt update -y
|
||||
sudo apt dist-upgrade -y
|
||||
sudo apt autoremove -y
|
||||
|
||||
|
||||
# Already in Ubuntu 24.04 on WSL:
|
||||
# sudo apt install git -y
|
||||
# sudo apt install wget gpg
|
||||
# sudo apt install sftp -y
|
||||
# sudo apt install openssh-client -y
|
||||
# sudo apt install rsync
|
||||
|
||||
# Now using uv not pip:
|
||||
# sudo apt install python3-pip -y
|
||||
|
||||
sudo apt install sqlite3 -y
|
||||
sudo apt install gedit -y
|
||||
sudo apt install tig gitg meld -y
|
||||
|
||||
# python formatting https://docs.astral.sh/ruff/
|
||||
sudo snap install ruff
|
||||
|
||||
# # do not actually use this any more
|
||||
# sudo useradd expo
|
||||
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
|
||||
|
||||
# as debian does not install everything that ubuntu does, you need:
|
||||
sudo apt install python3-venv -y
|
||||
sudo apt install python3-dev -y
|
||||
# sudo apt install python3-distutils -y
|
||||
|
||||
# install uv
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
sudo apt install mariadb-server -y
|
||||
sudo apt install libmariadb-dev -y
|
||||
|
||||
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
|
||||
# sudo service mysql start
|
||||
|
||||
|
||||
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
|
||||
# we leave that to uv to install now.
|
||||
|
||||
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
|
||||
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
|
||||
|
||||
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
|
||||
# sudo apt install software-properties-common apt-transport-https
|
||||
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
|
||||
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
|
||||
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
|
||||
# sudo apt update
|
||||
# sudo apt install code
|
||||
|
||||
|
||||
mkdir ~/expo
|
||||
cd ~/expo
|
||||
|
||||
echo '###'
|
||||
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
|
||||
echo '### because you can't clone the repos without a key
|
||||
|
||||
git config --global user.email "philip.sargent@gmail.com"
|
||||
git config --global user.name "Philip Sargent"
|
||||
git config --global pull.rebase true
|
||||
|
||||
#Change this to clone using https?? at least for troggle?
|
||||
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
||||
git clone ssh://expo@expo.survex.com/home/expo/loser
|
||||
git clone ssh://expo@expo.survex.com/home/expo/expoweb
|
||||
git clone ssh://expo@expo.survex.com/home/expo/drawings
|
||||
|
||||
mkdir expofiles
|
||||
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
|
||||
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
|
||||
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
Executable
+63
@@ -0,0 +1,63 @@
|
||||
#! /bin/bash
|
||||
# create and sanitise files for pushing to repo
|
||||
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
|
||||
|
||||
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
|
||||
# Philip Sargent 2022/04/12
|
||||
|
||||
HOSTNAME=`hostname`
|
||||
echo "** This copies file to _deploy/${HOSTNAME}/ !"
|
||||
cd ..
|
||||
|
||||
cd troggle
|
||||
echo `pwd`
|
||||
echo deprecations.
|
||||
|
||||
PYTHON="uv run"
|
||||
|
||||
source .venv/bin/activate
|
||||
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
|
||||
deactivate
|
||||
echo diffsettings.
|
||||
rm diffsettings.txt
|
||||
if test -f "diffsettings.txt"; then
|
||||
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
|
||||
exit
|
||||
fi
|
||||
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
|
||||
|
||||
echo inspectdb.
|
||||
# this next line requires database setting to be troggle.sqlite:
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
#egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo remove passwords.
|
||||
cp localsettings.py localsettings-${HOSTNAME}.py
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
|
||||
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettings-${HOSTNAME}.py
|
||||
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
|
||||
|
||||
mkdir -p _deploy/${HOSTNAME}
|
||||
mv _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}/localsettings-${HOSTNAME}.py.bak
|
||||
mv localsettings-${HOSTNAME}.py _deploy/${HOSTNAME}
|
||||
cp *.sh _deploy/${HOSTNAME}
|
||||
|
||||
#
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# in ./pre-run.sh
|
||||
# $PYTHON reset-django.py
|
||||
# $PYTHON manage.py makemigrations
|
||||
# $PYTHON manage.py test
|
||||
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -i "unable|error" troggle-inspectdb.py
|
||||
Executable
+36
@@ -0,0 +1,36 @@
|
||||
#! /bin/bash
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# Changed to use uv not pip, requires manage.py to have uv structured uv comment in it.
|
||||
PYTHON="uv run"
|
||||
|
||||
echo "** Run inspectdb:"
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo ""
|
||||
# count non-blank lines of python and template HTML code
|
||||
# includes all variants of settings.py files
|
||||
|
||||
# fix this as core/utils.py has 28,000 lines of numbers.
|
||||
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
|
||||
|
||||
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
|
||||
|
||||
echo "** Run reset-django.py - which deletes the database"
|
||||
# This deletes the database so must run after generating troggle-inspectdb.py
|
||||
$PYTHON reset-django.py
|
||||
echo "** After cleanup deletion, remake all migrations."
|
||||
$PYTHON manage.py makemigrations >/dev/null
|
||||
$PYTHON manage.py migrate
|
||||
|
||||
echo "** Now running self check"
|
||||
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
|
||||
$PYTHON manage.py check -v 3 --deploy
|
||||
|
||||
echo "** Now running test suite"
|
||||
# $PYTHON manage.py test -v 1
|
||||
|
||||
echo ""
|
||||
echo `tail -1 lines-of-python.txt` non-comment lines of python.
|
||||
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
|
||||
|
||||
echo '** If you have an error running manage.py, maybe you are not in an activated venv ? or your manage.py is not managed by uv properly ?'
|
||||
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
# now using uv, unbelieveably simpler.
|
||||
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 24.04 with all the gubbins already installed
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog24.04.sh runniing it in /home/username/
|
||||
python3 --version
|
||||
cd ~/expo/troggle
|
||||
echo "-- EXPO folder [current directory]: `pwd`"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder: ${TROGDIR}"
|
||||
|
||||
cp dev.toml pyproject.toml
|
||||
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
|
||||
|
||||
uv self update
|
||||
uv sync
|
||||
|
||||
|
||||
# fudge for philip's laptop prior to M2 SSD upgrade
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
uv pip list
|
||||
|
||||
echo "Django version:`uv run django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/expo/troggle'
|
||||
'uv run django-admin'
|
||||
'uv run manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
|
||||
'uv run manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'uv run databaseReset.py reset INIT'
|
||||
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
# if [ ! -d /mnt/d/expofiles ]; then
|
||||
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
# fi
|
||||
+11
-5
@@ -1,3 +1,14 @@
|
||||
2024-12-15 Philip Sargent
|
||||
You will need your own localsettings.py but they are all out of date in these
|
||||
subdirectories except for /wsl/ . So copy that to /troggle/ and make your own
|
||||
edits to make it work with your own machine and file whereabouts.
|
||||
|
||||
- settings.py
|
||||
is common to all configurations,
|
||||
but these are different:
|
||||
- localsettings.py
|
||||
- dev.toml
|
||||
|
||||
2023-07-17 Philip Sargent
|
||||
|
||||
Trying to sort out configurations as we got into a bit of a mess on
|
||||
@@ -11,8 +22,3 @@ recently had been done on Philip's two other machines, desktop and PC,
|
||||
both running Ubuntu on WSL on Windows and both using venv environments,
|
||||
which Crowley also does.
|
||||
|
||||
- settings.py
|
||||
is common to all configurations,
|
||||
but these are all different:
|
||||
- localsettings.py
|
||||
- requirements.txt
|
||||
Executable
+18
@@ -0,0 +1,18 @@
|
||||
#! /bin/bash
|
||||
echo troggle
|
||||
cd ~/expo/troggle
|
||||
git pull
|
||||
|
||||
echo expoweb
|
||||
cd ../expoweb
|
||||
git pull
|
||||
|
||||
echo drawings
|
||||
cd ../drawings
|
||||
git pull
|
||||
|
||||
cd ../loser
|
||||
echo loser
|
||||
git pull
|
||||
|
||||
cd ../troggle
|
||||
@@ -20,22 +20,42 @@ Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
# default values, real secrets imported from credentials.py
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
from secret_credentials import *
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
# MARIADB_SERVER_PASSWORD =
|
||||
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote sever. Tests are then less accurate.
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that secret_credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
# default values, real secrets will be imported from credentials.py in future
|
||||
|
||||
SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
|
||||
|
||||
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
SERVERPORT = "8000" # not needed as it is the default
|
||||
|
||||
SERVERPORT = "8000" # not needed
|
||||
ADMINS = (
|
||||
('Philip', 'philip.sargent@klebos.eu'), # only on dev
|
||||
)
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
@@ -44,41 +64,27 @@ PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles"
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
# PHOTOS_ROOT = EXPOFILES / 'photos'
|
||||
PHOTOS_ROOT = Path("/mnt/d/EXPO/PHOTOS")
|
||||
PHOTOS_YEAR = "2023"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
|
||||
PHOTOS_YEAR = "2025"
|
||||
|
||||
NOTABLECAVESHREFS = ["290", "291", "264", "258", "204", "359", "76", "107"]
|
||||
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
|
||||
|
||||
# PYTHON_PATH = os.fspath(PYTHON_PATH)
|
||||
PYTHON_PATH = REPOS_ROOT_PATH / "troggle"
|
||||
LOGFILE = PYTHON_PATH / "troggle.log"
|
||||
SQLITEDB = PYTHON_PATH / "troggle.sqlite"
|
||||
KMZ_ICONS_PATH = PYTHON_PATH / "kmz_icons"
|
||||
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
|
||||
# trailing slash if there is a path component (optional in other cases).
|
||||
MEDIA_URL = "/site-media/"
|
||||
|
||||
DIR_ROOT = Path("") # this should end in / if a value is given
|
||||
URL_ROOT = "/"
|
||||
# URL_ROOT = 'http://localhost:'+ SERVERPORT +'/'
|
||||
|
||||
# Note that these constants are not actually used in urls.py, they should be..
|
||||
# URL that handles the media served from MEDIA_ROOT.
|
||||
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
URL_ROOT = "/"
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
@@ -89,18 +95,14 @@ JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
# executables:
|
||||
CAVERN = "cavern" # for parsing .svx files and producing .3d files
|
||||
SURVEXPORT = "survexport" # for parsing .3d files and producing .pos files
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
#'NAME' : 'troggle.sqlite',
|
||||
"NAME": str(SQLITEDB),
|
||||
"NAME": SQLITEFILE,
|
||||
# 'NAME' : ':memory:',
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
@@ -110,9 +112,12 @@ DBSQLITE = {
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4",
|
||||
},
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": "my-secret-password-schwatzmooskogel",
|
||||
"PASSWORD": MARIADB_SERVER_PASSWORD,
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
@@ -126,8 +131,6 @@ if DBSWITCH == "sqlite":
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
@@ -136,53 +139,44 @@ TEMPLATES = [
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/troggle.py - only used in expedition.html
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/context.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
EXPOUSER = "expo"
|
||||
EXPOUSER_EMAIL = "philip.sargent@gmail.com"
|
||||
EXPOADMINUSER = "expoadmin"
|
||||
EXPOADMINUSER_EMAIL = "philip.sargent@gmail.com"
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.net" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "django-test@klebos.net"
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
EXPOWEB_URL = ""
|
||||
# SCANS_URL = '/survey_scans/' # defunct, removed.
|
||||
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
sys.path.append(str(PYTHON_PATH))
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
# and we have not made the change to pathlib Path type in the other localsettings-* variants yet.
|
||||
CAVEDESCRIPTIONS = os.fspath(CAVEDESCRIPTIONS)
|
||||
ENTRANCEDESCRIPTIONS = os.fspath(ENTRANCEDESCRIPTIONS)
|
||||
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -0,0 +1,182 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
"""Settings for a troggle installation which may vary among different
|
||||
installations: for development or deployment, in a docker image or
|
||||
python virtual environment (venv), on ubuntu, debian or in Windows
|
||||
System for Linux (WSL), on the main server or in the potato hut,
|
||||
using SQLite or mariaDB.
|
||||
|
||||
It sets the directory locations for the major parts of the system so
|
||||
that e.g. expofiles can be on a different filesystem, or /javascript/ can be in
|
||||
a system-wide location rather than just a local directory.
|
||||
|
||||
This file is included at the end of the main troggle/settings.py file so that
|
||||
it overwrites defaults in that file.
|
||||
|
||||
Read https://realpython.com/python-pathlib/
|
||||
Read https://adamj.eu/tech/2020/03/16/use-pathlib-in-your-django-project/
|
||||
"""
|
||||
|
||||
print(" * importing troggle/localsettings.py")
|
||||
|
||||
|
||||
EXPOUSER = 'expo'
|
||||
EXPOADMINUSER = 'expoadmin'
|
||||
EXPOUSER_EMAIL = 'wookey@wookware.org'
|
||||
EXPOADMINUSER_EMAIL = 'wookey@wookware.org'
|
||||
|
||||
from secret_credentials import *
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"
|
||||
EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"
|
||||
EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"
|
||||
EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"
|
||||
# MARIADB_SERVER_PASSWORD =
|
||||
|
||||
EMAIL_HOST = "smtp-auth.mythic-beasts.com"
|
||||
EMAIL_HOST_USER = "django-test@klebos.eu" # Philip Sargent really
|
||||
EMAIL_PORT = 587
|
||||
EMAIL_USE_TLS = True
|
||||
DEFAULT_FROM_EMAIL = "EXPO SERVER AUTOMATIC <django-test@klebos.eu>"
|
||||
# -----------------------------------------------------------------
|
||||
# THINK before you push this to a repo
|
||||
# - have you checked that secret_credentials.py is in .gitignore ?
|
||||
# - we don't want to have to change the expo system password !
|
||||
# -----------------------------------------------------------------
|
||||
# default values, real secrets will be imported from credentials.py in future
|
||||
|
||||
SQLITEFILE = "/home/philip/expo/troggle.sqlite" # can be ':memory:'
|
||||
|
||||
PHOTOSREMOTE = False # if True, then re-routes urls in expofiles/photos to remote server. Not implemented yet
|
||||
EXPOFILESREMOTE = False # if True, then re-routes urls in expofiles to remote server. Tests are then less accurate.
|
||||
# SECURE_SSL_REDIRECT = True # breaks 7 tests in test suite 301 not 200 (or 302) and runserver fails completely
|
||||
SERVERPORT = "8000" # not needed as it is the default
|
||||
|
||||
ADMINS = (
|
||||
('Philip', 'philip.sargent@klebos.eu'), # only on dev
|
||||
)
|
||||
|
||||
PV = "python" + str(sys.version_info.major) + "." + str(sys.version_info.minor)
|
||||
|
||||
# Troggle does a lot of file-handling. This is very error-prone when using primitive methods,
|
||||
# so we use pathlib which has been standard since python 3.4
|
||||
# If pathlib is new to you, you will need to read https://realpython.com/python-pathlib/
|
||||
|
||||
# --------------------- MEDIA redirections BEGIN ---------------------
|
||||
REPOS_ROOT_PATH = Path(__file__).parent.parent # folder above troggle, expoweb, drawings, loser
|
||||
LIBDIR = REPOS_ROOT_PATH / "lib" / PV
|
||||
sys.path.append(str(REPOS_ROOT_PATH))
|
||||
|
||||
TROGGLE_PATH = Path(__file__).parent
|
||||
TEMPLATE_PATH = TROGGLE_PATH / "templates"
|
||||
MEDIA_ROOT = TROGGLE_PATH / "media"
|
||||
JSLIB_ROOT = TROGGLE_PATH / "media" / "jslib" # used for CaveViewer JS utility
|
||||
|
||||
EXPOFILES = REPOS_ROOT_PATH / "expofiles" # sometimes on a different filesystem
|
||||
|
||||
SCANS_ROOT = EXPOFILES / "surveyscans"
|
||||
PHOTOS_ROOT = EXPOFILES / "photos" # sometimes on a different filesystem
|
||||
PHOTOS_YEAR = "2025"
|
||||
|
||||
KMZ_ICONS_PATH = REPOS_ROOT_PATH / "troggle" / "kmz_icons" # Google Earth export in /caves/
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT.
|
||||
# Note that MEDIA_URL and PHOTOS_URL are not actually used in urls.py, they should be..
|
||||
# and they all need to end with / so using 'Path' doesn't work..
|
||||
URL_ROOT = "/"
|
||||
MEDIA_URL = Path(URL_ROOT, "/site_media/")
|
||||
PHOTOS_URL = Path(URL_ROOT, "/photos/")
|
||||
|
||||
STATIC_URL = Path(URL_ROOT, "/static/") # used by Django admin pages. Do not delete.
|
||||
JSLIB_URL = Path(URL_ROOT, "/javascript/") # used for CaveViewer JS utility
|
||||
|
||||
# STATIC_ROOT removed after merging content into MEDIA_ROOT. See urls.py & core/views/surveys.py
|
||||
# --------------------- MEDIA redirections END ---------------------
|
||||
|
||||
PUBLIC_SITE = True
|
||||
DEBUG = True # Always keep this True, even when on public server. Otherwise NO USEFUL ERROR MESSAGES !
|
||||
CACHEDPAGES = True # experimental page cache for a handful of page types
|
||||
|
||||
DBSQLITE = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.sqlite3", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"NAME": SQLITEFILE,
|
||||
# 'NAME' : ':memory:',
|
||||
"USER": "expo", # Not used with sqlite3.
|
||||
"PASSWORD": "sekrit", # Not used with sqlite3.
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
DBMARIADB = {
|
||||
"default": {
|
||||
"ENGINE": "django.db.backends.mysql", # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
|
||||
"OPTIONS": {
|
||||
"charset": "utf8mb4",
|
||||
},
|
||||
"NAME": "troggle", # Or path to database file if using sqlite3.
|
||||
"USER": "expo",
|
||||
"PASSWORD": MARIADB_SERVER_PASSWORD,
|
||||
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
|
||||
"PORT": "", # Set to empty string for default. Not used with sqlite3.
|
||||
}
|
||||
}
|
||||
|
||||
# default database for me is squlite
|
||||
DBSWITCH = "sqlite"
|
||||
|
||||
if DBSWITCH == "sqlite":
|
||||
DATABASES = DBSQLITE
|
||||
if DBSWITCH == "mariadb":
|
||||
DATABASES = DBMARIADB
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [TEMPLATE_PATH],
|
||||
"OPTIONS": {
|
||||
"debug": "DEBUG",
|
||||
"context_processors": [
|
||||
# django.template.context_processors.csrf, # is always enabled and cannot be removed, sets csrf_token
|
||||
"django.contrib.auth.context_processors.auth", # knowledge of logged-on user & permissions
|
||||
"core.context.troggle_context", # in core/context.py - only used in expedition.html
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.i18n",
|
||||
"django.template.context_processors.media", # includes a variable MEDIA_URL
|
||||
"django.template.context_processors.static", # includes a variable STATIC_URL used by admin pages
|
||||
"django.template.context_processors.tz",
|
||||
"django.template.context_processors.request", # must be enabled in DjangoTemplates (TEMPLATES) in order to use the admin navigation sidebar.
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
"loaders": [
|
||||
"django.template.loaders.filesystem.Loader", # default lcation is troggle/templates/
|
||||
"django.template.loaders.app_directories.Loader", # needed for admin 'app'
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
||||
SURVEX_DATA = REPOS_ROOT_PATH / "loser"
|
||||
DRAWINGS_DATA = REPOS_ROOT_PATH / "drawings"
|
||||
EXPOWEB = REPOS_ROOT_PATH / "expoweb"
|
||||
CAVEDESCRIPTIONS = EXPOWEB / "cave_data"
|
||||
ENTRANCEDESCRIPTIONS = EXPOWEB / "entrance_data"
|
||||
|
||||
# Sanitise these to be strings as Django seems to be particularly sensitive to crashing if they aren't
|
||||
STATIC_URL = str(STATIC_URL) + "/"
|
||||
MEDIA_URL = str(MEDIA_URL) + "/"
|
||||
|
||||
# Re-enable TinyMCE when Dj upgraded to v3. Also templates/editexpopage.html
|
||||
# TINYMCE_DEFAULT_CONFIG = {
|
||||
# 'plugins': "table,spellchecker,paste,searchreplace",
|
||||
# 'theme': "advanced",
|
||||
# }
|
||||
# TINYMCE_SPELLCHECKER = False
|
||||
# TINYMCE_COMPRESSOR = True
|
||||
#TINY_MCE_MEDIA_ROOT = STATIC_ROOT + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
#TINY_MCE_MEDIA_URL = STATIC_URL + '/tiny_mce/' # not needed while TinyMCE not installed
|
||||
|
||||
print(" + finished importing troggle/localsettings.py")
|
||||
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal : 'bash os-survey.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
|
||||
echo '###'
|
||||
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
|
||||
echo '###'
|
||||
sudo apt install tunnelx therion -y
|
||||
sudo apt install survex-aven -y
|
||||
sudo apt install gpsprune qgis -y
|
||||
|
||||
|
||||
cd ~/expo
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
Executable
+92
@@ -0,0 +1,92 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in your home directory: "bash os-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 24.04 relatively clean install.
|
||||
# 24.04 has python 3.12
|
||||
|
||||
# sudo apt install python-is-python3 -y
|
||||
python --version : ensure python is an alias for python3 not python2.7
|
||||
ssh -V
|
||||
sudo apt update -y
|
||||
sudo apt dist-upgrade -y
|
||||
sudo apt autoremove -y
|
||||
|
||||
|
||||
# Already in Ubuntu 24.04 on WSL:
|
||||
# sudo apt install git -y
|
||||
# sudo apt install wget gpg
|
||||
# sudo apt install sftp -y
|
||||
# sudo apt install openssh-client -y
|
||||
# sudo apt install rsync
|
||||
|
||||
# Now using uv not pip:
|
||||
# sudo apt install python3-pip -y
|
||||
|
||||
sudo apt install sqlite3 -y
|
||||
sudo apt install gedit -y
|
||||
sudo apt install tig gitg meld -y
|
||||
|
||||
# python formatting https://docs.astral.sh/ruff/
|
||||
sudo snap install ruff
|
||||
|
||||
# # do not actually use this any more
|
||||
# sudo useradd expo
|
||||
# sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
|
||||
|
||||
# as debian does not install everything that ubuntu does, you need:
|
||||
sudo apt install python3-venv -y
|
||||
sudo apt install python3-dev -y
|
||||
# sudo apt install python3-distutils -y
|
||||
|
||||
# install uv
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
sudo apt install mariadb-server -y
|
||||
sudo apt install libmariadb-dev -y
|
||||
|
||||
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
|
||||
# sudo service mysql start
|
||||
|
||||
|
||||
# We don't install the later version or the earlier versions of python - for dev and "sever mimic" environments
|
||||
# we leave that to uv to install now.
|
||||
|
||||
# In Dec.2024, the server is running 3.11 but dev work will be using 3.13
|
||||
# The setup of the virtual environment is done by troggle/_deploy/wsl/venv-trog.sh
|
||||
|
||||
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
|
||||
# sudo apt install software-properties-common apt-transport-https
|
||||
# wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
|
||||
# sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
|
||||
# sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
|
||||
# sudo apt update
|
||||
# sudo apt install code
|
||||
|
||||
|
||||
mkdir ~/expo
|
||||
cd ~/expo
|
||||
|
||||
echo '###'
|
||||
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
|
||||
echo '### because you can't clone the repos without a key
|
||||
|
||||
git config --global user.email "philip.sargent@gmail.com"
|
||||
git config --global user.name "Philip Sargent"
|
||||
git config --global pull.rebase true
|
||||
|
||||
#Change this to clone using https?? at least for troggle?
|
||||
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
||||
git clone ssh://expo@expo.survex.com/home/expo/loser
|
||||
git clone ssh://expo@expo.survex.com/home/expo/expoweb
|
||||
git clone ssh://expo@expo.survex.com/home/expo/drawings
|
||||
|
||||
mkdir expofiles
|
||||
rsync -azv --delete-after --prune-empty-dirs expo@expo.survex.com:expofiles/surveyscans/ expofiles/surveyscans
|
||||
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/2018/PhilipSargent/ expofiles/photos/2018/PhilipSargent
|
||||
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" --exclude="mapapp" expo@expo.survex.com:expofiles/ expofiles
|
||||
# rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.survex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
#! /bin/bash
|
||||
# create and sanitise files for pushing to repo
|
||||
# catatrophically forgot to sanitize localsettingsWSL.py - oops.
|
||||
|
||||
#Make sure you have the WSL permissions system working, or you will push unsanitized files as this will fail
|
||||
# Philip Sargent 2022/04/12
|
||||
echo "** This copies file to _deploy/wsl/ !"
|
||||
HOSTNAME=`hostname`
|
||||
echo "** This copies file to _deploy/${HOSTNAME}/ !"
|
||||
cd ..
|
||||
|
||||
cd troggle
|
||||
echo `pwd`
|
||||
echo deprecations.
|
||||
|
||||
PYTHON="uv run"
|
||||
|
||||
source .venv/bin/activate
|
||||
python3 -Wall manage.py check -v 3 2>deprecations.txt >/dev/null
|
||||
deactivate
|
||||
echo diffsettings.
|
||||
rm diffsettings.txt
|
||||
if test -f "diffsettings.txt"; then
|
||||
echo "diffsettings.txt not deleted. You have a serious permissions problem. Aborting.."
|
||||
exit
|
||||
fi
|
||||
$PYTHON manage.py diffsettings | grep "###" > diffsettings.txt
|
||||
|
||||
echo inspectdb.
|
||||
# this next line requires database setting to be troggle.sqlite:
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
#egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo remove passwords.
|
||||
cp localsettings.py localsettingsWSL.py
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOUSERPASS/ s/^.*$/EXPOUSERPASS = "nnn:gggggg - real-expo-password---imported-from-localsettings.py"/' localsettingsWSL.py
|
||||
echo " reset: EXPOUSERPASS = \"nnn:gggggg\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EXPOADMINUSERPASS/ s/^.*$/EXPOADMINUSERPASS = "gggggg:nnn - real-expo-password---imported-from-localsettings.py"/' localsettingsWSL.py
|
||||
echo " reset: EXPOUSERPASS = \"gggggg:nnn\" - real-expo-password---imported-from-localsettings.py"
|
||||
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/EMAIL_HOST_PASSWORD/ s/^.*$/EMAIL_HOST_PASSWORD = "real-email-password---imported-from-localsettings.py"/' localsettingsWSL.py
|
||||
echo " reset: EMAIL_HOST_PASSWORD = \"real-email-password--imported-from-localsettings.py\""
|
||||
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' diffsettings.txt
|
||||
sed -i '/SECRET_KEY/ s/^.*$/SECRET_KEY = "real-SECRET_KEY--imported-from-localsettings.py"/' localsettingsWSL.py
|
||||
echo " reset: SECRET_KEY = \"real-SECRET_KEY--imported-from-localsettings.py\""
|
||||
|
||||
mv _deploy/wsl/localsettingsWSL.py _deploy/wsl/localsettingsWSL.py.bak
|
||||
mv localsettingsWSL.py _deploy/wsl
|
||||
cp *.sh _deploy/wsl
|
||||
|
||||
#
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# in ./pre-run.sh
|
||||
# $PYTHON reset-django.py
|
||||
# $PYTHON manage.py makemigrations
|
||||
# $PYTHON manage.py test
|
||||
# $PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -i "unable|error" troggle-inspectdb.py
|
||||
@@ -0,0 +1,36 @@
|
||||
#! /bin/bash
|
||||
# Do these before final testing, *not* just before pushing:
|
||||
# Need to be in an ALREADY activated venv
|
||||
PYTHON="python"
|
||||
|
||||
echo "** Run inspectdb:"
|
||||
$PYTHON manage.py inspectdb > troggle-inspectdb.py
|
||||
# egrep -in "unable|error" troggle-inspectdb.py
|
||||
echo ""
|
||||
# count non-blank lines of python and template HTML code
|
||||
# includes all variants of settings.py files
|
||||
|
||||
# fix this as core/utils.py has 28,000 lines of numbers.
|
||||
find . -name \*.html -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-templates.txt
|
||||
|
||||
find . -name \*.py -print0 | xargs -0 egrep -vc "#|^\s*$" | grep -v ":0$" | grep -v ".venv" | grep -v "/migrations/" |grep -v "troggle-inspectdb.py"| awk -F ":" '{ sum +=$2; print $2, $1; } END {print sum}'| sort -n > lines-of-python.txt
|
||||
|
||||
echo "** Run reset-django.py - which deletes the database"
|
||||
# This deletes the database so must run after generating troggle-inspectdb.py
|
||||
$PYTHON reset-django.py
|
||||
echo "** After cleanup deletion, remake all migrations."
|
||||
$PYTHON manage.py makemigrations >/dev/null
|
||||
$PYTHON manage.py migrate
|
||||
|
||||
echo "** Now running self check"
|
||||
$PYTHON manage.py check -v 3 --deploy 2>security-warnings.txt >/dev/null
|
||||
$PYTHON manage.py check -v 3 --deploy
|
||||
|
||||
echo "** Now running test suite"
|
||||
# $PYTHON manage.py test -v 1
|
||||
|
||||
echo ""
|
||||
echo `tail -1 lines-of-python.txt` non-comment lines of python. But core/utils.py has 28,000 lines of numbers.
|
||||
echo `tail -1 lines-of-templates.txt` non-comment lines of HTML templates.
|
||||
|
||||
echo '** If you have an error running manage.py, maybe you are not in an activated venv ?'
|
||||
@@ -1,9 +0,0 @@
|
||||
asgiref==3.5.2
|
||||
coverage==6.5.0
|
||||
Django==3.2.16
|
||||
docutils==0.19
|
||||
Pillow==9.3.0
|
||||
pytz==2022.6
|
||||
sqlparse==0.4.3
|
||||
typing_extensions==4.4.0
|
||||
Unidecode==1.3.6
|
||||
Executable
+44
@@ -0,0 +1,44 @@
|
||||
For debian bullseye (10)
|
||||
python3-django (3.2.12)
|
||||
tinymce (from where?)
|
||||
mariadb-server
|
||||
apache2
|
||||
libapache2-mod-wsgi-py3
|
||||
|
||||
|
||||
python3-django-registration ?
|
||||
Django==1.7
|
||||
django-extensions==2.2.9
|
||||
django-registration==2.0
|
||||
django-tinymce==2.0.1
|
||||
six==1.14.0
|
||||
Unidecode==1.1.1 python3-unidecode
|
||||
Pillow==7.1.2 python3-willow
|
||||
|
||||
asgiref==3.7.2
|
||||
attrs==22.2.0
|
||||
beautifulsoup4==4.12.2
|
||||
black==23.11.0
|
||||
bs4==0.0.1
|
||||
click==8.1.3
|
||||
colorama==0.4.6
|
||||
coverage==6.5.0
|
||||
Django==5.0
|
||||
docutils==0.20
|
||||
interrogate==1.5.0
|
||||
isort==5.11.4
|
||||
mypy-extensions==0.4.3
|
||||
packaging==23.2
|
||||
pathspec==0.10.3
|
||||
piexif==1.1.3
|
||||
Pillow==10.1.0
|
||||
platformdirs==2.6.2
|
||||
py==1.11.0
|
||||
pytz==2022.6
|
||||
ruff==0.1.0
|
||||
soupsieve==2.5
|
||||
sqlparse==0.4.0
|
||||
tabulate==0.9.0
|
||||
toml==0.10.2
|
||||
typing_extensions==4.4.0
|
||||
Unidecode==1.3.6
|
||||
@@ -0,0 +1,3 @@
|
||||
asgiref==3.8.1
|
||||
Django==5.1b1
|
||||
sqlparse==0.5.1
|
||||
@@ -1,16 +1,21 @@
|
||||
asgiref==3.6.0
|
||||
beautifulsoup4==4.12.0
|
||||
black==23.1.0
|
||||
click==8.1.3
|
||||
chardet==5.1.0
|
||||
click==8.1.0
|
||||
coverage==7.1.0
|
||||
deptry==0.12.0
|
||||
Django==4.2
|
||||
docutils==0.19
|
||||
isort==5.12.0
|
||||
mypy-extensions==1.0.0
|
||||
packaging==23.0
|
||||
pathspec==0.11.0
|
||||
piexif==1.1.3
|
||||
Pillow==9.4.0
|
||||
platformdirs==3.0.0
|
||||
pytz==2022.7
|
||||
ruff==0.0.245
|
||||
sqlparse==0.4.3
|
||||
Unidecode==1.3.6
|
||||
soupsieve==2.5
|
||||
sqlparse==0.4.0
|
||||
Unidecode==1.3.0
|
||||
@@ -0,0 +1,21 @@
|
||||
# to be used with pre-release Djangi install which installs other stuff too
|
||||
beautifulsoup4==4.12
|
||||
piexif==1.1
|
||||
black==23.1
|
||||
chardet==5.1
|
||||
click==8.1
|
||||
coverage==7.1
|
||||
deptry==0.12
|
||||
docutils==0.19
|
||||
isort==5.12
|
||||
mypy-extensions==1.0
|
||||
packaging==23.0
|
||||
pathspec==0.11
|
||||
Pillow==9.4.0
|
||||
platformdirs==3.0
|
||||
pytz==2022.7
|
||||
ruff==0.0.245
|
||||
setuptools==67.7
|
||||
soupsieve==2.5
|
||||
Unidecode==1.3
|
||||
piexif==1.1
|
||||
Executable
+20
@@ -0,0 +1,20 @@
|
||||
Pillow==10.2
|
||||
Unidecode==1.3.8
|
||||
asgiref==3.6
|
||||
beautifulsoup4==4.12
|
||||
black==24.2
|
||||
chardet==5.2
|
||||
click==8.1
|
||||
coverage==7
|
||||
deptry==0.12.0
|
||||
docutils==0.20
|
||||
isort==5
|
||||
mypy-extensions==1.0
|
||||
packaging==23
|
||||
pathspec==0.12
|
||||
platformdirs==4
|
||||
pytz==2024.1
|
||||
ruff==0.2
|
||||
soupsieve==2.5
|
||||
sqlparse
|
||||
piexif
|
||||
@@ -0,0 +1,184 @@
|
||||
#!/bin/bash
|
||||
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
|
||||
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog.sh
|
||||
|
||||
# If you are using Debian, then stick with the default version of python
|
||||
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
|
||||
|
||||
# NOW we set up troggle
|
||||
PYTHON=python3.11
|
||||
VENAME=p11d32 # python3.x and django 4.2
|
||||
echo "** You are logged in as `id -u -n`"
|
||||
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
|
||||
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder (this script location): ${TROGDIR}"
|
||||
|
||||
REQUIRE=requirements-$VENAME.txt
|
||||
|
||||
if [ -d $REQUIRE ]; then
|
||||
echo "-- No ${REQUIRE} found. You should be in the /troggle/ folder. Copy it from your most recent installation."
|
||||
exit 1
|
||||
fi
|
||||
echo ## Using $REQUIRE :
|
||||
cat $REQUIRE
|
||||
echo ##
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
|
||||
# NOTE that when using a later or earlier verison of python, you MUST also
|
||||
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
|
||||
|
||||
# NOW set up link from expo user folder
|
||||
# needed for WSL2
|
||||
echo Creating links from Linux filesystem user
|
||||
# These links only need making once, for many venv
|
||||
cd ~
|
||||
|
||||
if [ ! -d $VENAME ]; then
|
||||
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
|
||||
$PYTHON -m venv $VENAME
|
||||
else
|
||||
echo "## /$VENAME/ already exists ! Delete it first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Activate the virtual env and see what the default packages are
|
||||
echo "### Activating $VENAME"
|
||||
|
||||
cd $VENAME
|
||||
echo "-- now in: ${PWD}"
|
||||
ls -tlarg
|
||||
source bin/activate
|
||||
echo $VIRTUAL_ENV
|
||||
if [ -d ~/$VENAME/bin ]; then
|
||||
echo "### Activating."
|
||||
else
|
||||
echo "## ~/$VENAME/bin does not exist. FAILed to create venv properly."
|
||||
exit 1
|
||||
fi
|
||||
# update local version of pip, more recent than OS version
|
||||
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
|
||||
|
||||
# update local version of setuptools, more recent than OS version, needed for packages without wheels
|
||||
|
||||
echo "### installing later version of pip inside $VENAME"
|
||||
$PYTHON -m pip install --upgrade pip
|
||||
$PYTHON -m pip install --upgrade setuptools
|
||||
|
||||
PIP=pip
|
||||
|
||||
$PIP list > original-pip.list
|
||||
$PIP freeze >original.txt
|
||||
|
||||
# we are in /home/$USER/$VENAME/
|
||||
ln -s ${TROGDIR} troggle
|
||||
ln -s ${TROGDIR}/../expoweb expoweb
|
||||
ln -s ${TROGDIR}/../loser loser
|
||||
ln -s ${TROGDIR}/../drawings drawings
|
||||
#ln -s ${TROGDIR}/../expofiles expofiles
|
||||
|
||||
# fudge for philip's machine
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
if [ -d ${TROGDIR}/../expofiles ]; then
|
||||
ln -s ${TROGDIR}/../expofiles expofiles
|
||||
else
|
||||
ln -s /mnt/d/EXPO/expofiles expofiles
|
||||
fi
|
||||
|
||||
echo "### Setting file permissions.. may take a while.."
|
||||
git config --global --add safe.directory '*'
|
||||
sudo chmod -R 777 *
|
||||
|
||||
echo "### links to expoweb, troggle etc. complete:"
|
||||
ls -tla
|
||||
echo "###"
|
||||
echo "### now installing ${TROGDIR}/${REQUIRE}"
|
||||
echo "###"
|
||||
cat ${TROGDIR}/${REQUIRE}
|
||||
|
||||
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
|
||||
# seen on wsl2 as well as wsl1
|
||||
# which ALSO ruins EXISTING permissions !
|
||||
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
|
||||
|
||||
read -p "Press any key to resume ..."
|
||||
$PIP install -r ${TROGDIR}/${REQUIRE}
|
||||
echo "### install from ${TROGDIR}/${REQUIRE} completed."
|
||||
echo '### '
|
||||
|
||||
$PIP install --pre django
|
||||
|
||||
$PIP freeze > $REQUIRE
|
||||
# so that we can track requirements more easily with git
|
||||
# because we do not install these with pip, but they are listed by the freeze command
|
||||
# Now find out what we actually installed by subtracting the stuff venv installed anyway
|
||||
sort original.txt > 1
|
||||
sort $REQUIRE >2
|
||||
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-$REQUIRE
|
||||
rm 1
|
||||
rm 2
|
||||
|
||||
# cp $REQUIRE requirements-$VENAME.txt
|
||||
cp $REQUIRE troggle/$REQUIRE
|
||||
|
||||
$PIP list > installed-pip.list
|
||||
$PIP list -o > installed-pip-o.list
|
||||
|
||||
REQ=installation-record
|
||||
mkdir $REQ
|
||||
|
||||
mv original.txt $REQ
|
||||
mv $REQUIRE $REQ
|
||||
mv original-pip.list $REQ
|
||||
mv installed-pip.list $REQ
|
||||
mv installed-pip-o.list $REQ
|
||||
cp fresh-$REQUIRE ../$REQUIRE
|
||||
mv fresh-$REQUIRE $REQ
|
||||
cp troggle/`basename "$0"` $REQ
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
python --version
|
||||
echo "Django version:`django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/$VENAME'
|
||||
'source bin/activate'
|
||||
'cd troggle'
|
||||
'django-admin'
|
||||
'python manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
## the tests may ALSO fail because of ssh and permissions errors
|
||||
|
||||
## So you will need to run
|
||||
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
|
||||
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
|
||||
# because this chmod only takes effect then.
|
||||
|
||||
'python manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'python databaseReset.py reset $VENAME'
|
||||
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
# if [ ! -d /mnt/d/expofiles ]; then
|
||||
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
# fi
|
||||
@@ -0,0 +1,184 @@
|
||||
#!/bin/bash
|
||||
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
|
||||
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog.sh
|
||||
|
||||
# If you are using Debian, then stick with the default version of python
|
||||
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
|
||||
|
||||
# NOW we set up troggle
|
||||
PYTHON=python3.11
|
||||
VENAME=p11d42 # python3.x and django 4.2
|
||||
echo "** You are logged in as `id -u -n`"
|
||||
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
|
||||
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder (this script location): ${TROGDIR}"
|
||||
|
||||
REQUIRE=requirements-$VENAME.txt
|
||||
|
||||
if [ -d $REQUIRE ]; then
|
||||
echo "-- No ${REQUIRE} found. You should be in the /troggle/ folder. Copy it from your most recent installation."
|
||||
exit 1
|
||||
fi
|
||||
echo ## Using $REQUIRE :
|
||||
cat $REQUIRE
|
||||
echo ##
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
|
||||
# NOTE that when using a later or earlier verison of python, you MUST also
|
||||
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
|
||||
|
||||
# NOW set up link from expo user folder
|
||||
# needed for WSL2
|
||||
echo Creating links from Linux filesystem user
|
||||
# These links only need making once, for many venv
|
||||
cd ~
|
||||
|
||||
if [ ! -d $VENAME ]; then
|
||||
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
|
||||
$PYTHON -m venv $VENAME
|
||||
else
|
||||
echo "## /$VENAME/ already exists ! Delete it first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Activate the virtual env and see what the default packages are
|
||||
echo "### Activating $VENAME"
|
||||
|
||||
cd $VENAME
|
||||
echo "-- now in: ${PWD}"
|
||||
ls -tlarg
|
||||
source bin/activate
|
||||
echo $VIRTUAL_ENV
|
||||
if [ -d ~/$VENAME/bin ]; then
|
||||
echo "### Activating."
|
||||
else
|
||||
echo "## ~/$VENAME/bin does not exist. FAILed to create venv properly."
|
||||
exit 1
|
||||
fi
|
||||
# update local version of pip, more recent than OS version
|
||||
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
|
||||
|
||||
# update local version of setuptools, more recent than OS version, needed for packages without wheels
|
||||
|
||||
echo "### installing later version of pip inside $VENAME"
|
||||
$PYTHON -m pip install --upgrade pip
|
||||
$PYTHON -m pip install --upgrade setuptools
|
||||
|
||||
PIP=pip
|
||||
|
||||
$PIP list > original-pip.list
|
||||
$PIP freeze >original.txt
|
||||
|
||||
# we are in /home/$USER/$VENAME/
|
||||
ln -s ${TROGDIR} troggle
|
||||
ln -s ${TROGDIR}/../expoweb expoweb
|
||||
ln -s ${TROGDIR}/../loser loser
|
||||
ln -s ${TROGDIR}/../drawings drawings
|
||||
#ln -s ${TROGDIR}/../expofiles expofiles
|
||||
|
||||
# fudge for philip's machine
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
if [ -d ${TROGDIR}/../expofiles ]; then
|
||||
ln -s ${TROGDIR}/../expofiles expofiles
|
||||
else
|
||||
ln -s /mnt/d/EXPO/expofiles expofiles
|
||||
fi
|
||||
|
||||
echo "### Setting file permissions.. may take a while.."
|
||||
git config --global --add safe.directory '*'
|
||||
sudo chmod -R 777 *
|
||||
|
||||
echo "### links to expoweb, troggle etc. complete:"
|
||||
ls -tla
|
||||
echo "###"
|
||||
echo "### now installing ${TROGDIR}/${REQUIRE}"
|
||||
echo "###"
|
||||
cat ${TROGDIR}/${REQUIRE}
|
||||
|
||||
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
|
||||
# seen on wsl2 as well as wsl1
|
||||
# which ALSO ruins EXISTING permissions !
|
||||
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
|
||||
|
||||
read -p "Press any key to resume ..."
|
||||
$PIP install -r ${TROGDIR}/${REQUIRE}
|
||||
echo "### install from ${TROGDIR}/${REQUIRE} completed."
|
||||
echo '### '
|
||||
|
||||
$PIP install --pre django
|
||||
|
||||
$PIP freeze > $REQUIRE
|
||||
# so that we can track requirements more easily with git
|
||||
# because we do not install these with pip, but they are listed by the freeze command
|
||||
# Now find out what we actually installed by subtracting the stuff venv installed anyway
|
||||
sort original.txt > 1
|
||||
sort $REQUIRE >2
|
||||
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-$REQUIRE
|
||||
rm 1
|
||||
rm 2
|
||||
|
||||
# cp $REQUIRE requirements-$VENAME.txt
|
||||
cp $REQUIRE troggle/$REQUIRE
|
||||
|
||||
$PIP list > installed-pip.list
|
||||
$PIP list -o > installed-pip-o.list
|
||||
|
||||
REQ=installation-record
|
||||
mkdir $REQ
|
||||
|
||||
mv original.txt $REQ
|
||||
mv $REQUIRE $REQ
|
||||
mv original-pip.list $REQ
|
||||
mv installed-pip.list $REQ
|
||||
mv installed-pip-o.list $REQ
|
||||
cp fresh-$REQUIRE ../$REQUIRE
|
||||
mv fresh-$REQUIRE $REQ
|
||||
cp troggle/`basename "$0"` $REQ
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
python --version
|
||||
echo "Django version:`django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/$VENAME'
|
||||
'source bin/activate'
|
||||
'cd troggle'
|
||||
'django-admin'
|
||||
'python manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
## the tests may ALSO fail because of ssh and permissions errors
|
||||
|
||||
## So you will need to run
|
||||
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
|
||||
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
|
||||
# because this chmod only takes effect then.
|
||||
|
||||
'python manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'python databaseReset.py reset $VENAME'
|
||||
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
# if [ ! -d /mnt/d/expofiles ]; then
|
||||
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
# fi
|
||||
Executable
+190
@@ -0,0 +1,190 @@
|
||||
#!/bin/bash
|
||||
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
|
||||
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog.sh
|
||||
|
||||
# If you are using Debian, then stick with the default version of python
|
||||
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.12
|
||||
|
||||
# NOW we set up troggle
|
||||
PYTHON=python3.12
|
||||
VENAME=p12d5 # python3.x and django version
|
||||
echo "** You are logged in as `id -u -n`"
|
||||
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
|
||||
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder (this script location): ${TROGDIR}"
|
||||
|
||||
if [[ "${VENAME:(-1)}" == 5 ]]; then
|
||||
echo "The variable '$VENAME' ends in 5."
|
||||
else
|
||||
echo "The variable '$VENAME' does not end in 5."
|
||||
fi
|
||||
|
||||
REQUIRE=requirements-$VENAME.txt
|
||||
|
||||
if [ -d $REQUIRE ]; then
|
||||
echo "-- No ${REQUIRE} found. You should be in the /troggle/ folder. Copy it from your most recent installation."
|
||||
exit 1
|
||||
fi
|
||||
echo ## Using $REQUIRE :
|
||||
cat $REQUIRE
|
||||
echo ##
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
|
||||
# NOTE that when using a later or earlier verison of python, you MUST also
|
||||
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
|
||||
|
||||
# NOW set up link from expo user folder
|
||||
# needed for WSL2
|
||||
echo Creating links from Linux filesystem user
|
||||
# These links only need making once, for many venv
|
||||
cd ~
|
||||
|
||||
if [ ! -d $VENAME ]; then
|
||||
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.12-venv installed and/or use a Ubuntu window)"
|
||||
$PYTHON -m venv $VENAME
|
||||
else
|
||||
echo "## /$VENAME/ already exists ! Delete it first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Activate the virtual env and see what the default packages are
|
||||
echo "### Activating $VENAME"
|
||||
|
||||
cd $VENAME
|
||||
echo "-- now in: ${PWD}"
|
||||
ls -tlarg
|
||||
source bin/activate
|
||||
echo $VIRTUAL_ENV
|
||||
if [ -d ~/$VENAME/bin ]; then
|
||||
echo "### Activating."
|
||||
else
|
||||
echo "## ~/$VENAME/bin does not exist. FAILed to create venv properly."
|
||||
exit 1
|
||||
fi
|
||||
# update local version of pip, more recent than OS version
|
||||
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
|
||||
|
||||
# update local version of setuptools, more recent than OS version, needed for packages without wheels
|
||||
|
||||
echo "### installing later version of pip inside $VENAME"
|
||||
$PYTHON -m pip install --upgrade pip
|
||||
$PYTHON -m pip install --upgrade setuptools
|
||||
|
||||
$PYTHON -m pip list > original-pip.list
|
||||
$PYTHON -m pip freeze >original.txt
|
||||
|
||||
# we are in /home/$USER/$VENAME/
|
||||
ln -s ${TROGDIR} troggle
|
||||
ln -s ${TROGDIR}/../expoweb expoweb
|
||||
ln -s ${TROGDIR}/../loser loser
|
||||
ln -s ${TROGDIR}/../drawings drawings
|
||||
#ln -s ${TROGDIR}/../expofiles expofiles
|
||||
|
||||
# fudge for philip's machine
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
if [ -d ${TROGDIR}/../expofiles ]; then
|
||||
ln -s ${TROGDIR}/../expofiles expofiles
|
||||
else
|
||||
ln -s /mnt/d/EXPO/expofiles expofiles
|
||||
fi
|
||||
|
||||
echo "### Setting file permissions.. may take a while.."
|
||||
git config --global --add safe.directory '*'
|
||||
sudo chmod -R 777 *
|
||||
|
||||
echo "### links to expoweb, troggle etc. complete:"
|
||||
ls -tla
|
||||
echo "###"
|
||||
echo "### now installing ${TROGDIR}/${REQUIRE}"
|
||||
echo "###"
|
||||
cat ${TROGDIR}/${REQUIRE}
|
||||
cp -f ${TROGDIR}/${REQUIRE} ${TROGDIR}/${REQUIRE}.orig
|
||||
|
||||
read -p "Press any key to resume ..."
|
||||
$PYTHON -m pip install -r ${TROGDIR}/${REQUIRE}
|
||||
echo "### install from ${TROGDIR}/${REQUIRE} completed."
|
||||
echo '### '
|
||||
|
||||
# this installs pre-release django 5.0
|
||||
if [[ "${VENAME:(-1)}" == 5 ]]; then
|
||||
echo "### Installing pre-release version of Django"
|
||||
$PYTHON -m pip install --pre django
|
||||
fi
|
||||
|
||||
$PYTHON -m pip freeze > $REQUIRE.freeze
|
||||
# so that we can track requirements more easily with git
|
||||
# because we do not install these with pip, but they are listed by the freeze command
|
||||
# Now find out what we actually installed by subtracting the stuff venv installed anyway
|
||||
sort original.txt > 1
|
||||
sort $REQUIRE.freeze >2
|
||||
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-$REQUIRE
|
||||
rm 1
|
||||
rm 2
|
||||
|
||||
# cp $REQUIRE requirements-$VENAME.txt
|
||||
cp $REQUIRE troggle/$REQUIRE
|
||||
|
||||
$PYTHON -m pip list > installed-pip.list
|
||||
$PYTHON -m pip list -o > installed-pip-o.list
|
||||
|
||||
REQ=installation-record
|
||||
mkdir $REQ
|
||||
|
||||
mv $REQUIRE.freeze $REQ
|
||||
mv original.txt $REQ
|
||||
mv $REQUIRE $REQ
|
||||
mv original-pip.list $REQ
|
||||
mv installed-pip.list $REQ
|
||||
mv installed-pip-o.list $REQ
|
||||
# cp fresh-$REQUIRE ../$REQUIRE
|
||||
mv fresh-$REQUIRE $REQ
|
||||
cp troggle/`basename "$0"` $REQ
|
||||
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
python --version
|
||||
echo "Django version:`django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/$VENAME'
|
||||
'source bin/activate'
|
||||
'cd troggle'
|
||||
'django-admin'
|
||||
'python manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
## the tests may ALSO fail because of ssh and permissions errors
|
||||
|
||||
## So you will need to run
|
||||
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
|
||||
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
|
||||
# because this chmod only takes effect then.
|
||||
|
||||
'python manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'python databaseReset.py reset $VENAME'
|
||||
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
# if [ ! -d /mnt/d/expofiles ]; then
|
||||
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
# fi
|
||||
+26
-144
@@ -1,171 +1,53 @@
|
||||
#!/bin/bash
|
||||
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
|
||||
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
|
||||
# now using uv, unbelieveably simpler.
|
||||
# Run this in a terminal in ~/expo above the troggle directory: 'bash ~/expo/venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the directory above the troggle directory: "bash ~/expo/venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
|
||||
# Expects an Ubuntu 24.04 with all the gubbins already installed
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog.sh
|
||||
|
||||
# If you are using Debian, then stick with the default version of python
|
||||
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
|
||||
|
||||
# NOW we set up troggle
|
||||
PYTHON=python3.11
|
||||
VENAME=p11d4 # python3.x and django 4.2
|
||||
echo "** You are logged in as `id -u -n`"
|
||||
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
|
||||
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
|
||||
# use the script os-trog24.04.sh runniing it in /home/username/
|
||||
python3 --version
|
||||
cd ~/expo/troggle
|
||||
echo "-- EXPO folder [current directory]: `pwd`"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder (this script location): ${TROGDIR}"
|
||||
echo "-- Troggle folder: ${TROGDIR}"
|
||||
|
||||
if [ -d requirements.txt ]; then
|
||||
echo "-- No requirements.txt found. You should be in the /troggle/ folder. Copy it from your most recent installation."
|
||||
exit 1
|
||||
fi
|
||||
echo ## Using requirements.txt :
|
||||
cat requirements.txt
|
||||
echo ##
|
||||
cp dev.toml pyproject.toml
|
||||
cp ~/expo/troggle/_deploy/wsl/localsettingsWSL.py ~/expo/troggle/localsettings.py
|
||||
|
||||
uv self update
|
||||
uv sync
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
|
||||
# NOTE that when using a later or earlier verison of python, you MUST also
|
||||
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
|
||||
|
||||
# NOW set up link from expo user folder
|
||||
# needed for WSL2
|
||||
echo Creating links from Linux filesystem user
|
||||
# These links only need making once, for many venv
|
||||
cd ~
|
||||
|
||||
if [ ! -d $VENAME ]; then
|
||||
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
|
||||
$PYTHON -m venv $VENAME
|
||||
else
|
||||
echo "## /$VENAME/ already exists ! Delete it first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Activate the virtual env and see what the default packages are
|
||||
echo "### Activating $VENAME"
|
||||
|
||||
cd $VENAME
|
||||
echo "-- now in: ${PWD}"
|
||||
source bin/activate
|
||||
echo "### Activated."
|
||||
# update local version of pip, more recent than OS version
|
||||
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
|
||||
|
||||
# update local version of setuptools, more recent than OS version, needed for packages without wheels
|
||||
|
||||
echo "### installing later version of pip inside $VENAME"
|
||||
$PYTHON -m pip install --upgrade pip
|
||||
$PYTHON -m pip install --upgrade setuptools
|
||||
|
||||
PIP=pip
|
||||
|
||||
$PIP list > original-pip.list
|
||||
$PIP freeze >original.txt
|
||||
|
||||
# we are in /home/$USER/$VENAME/
|
||||
ln -s ${TROGDIR} troggle
|
||||
ln -s ${TROGDIR}/../expoweb expoweb
|
||||
ln -s ${TROGDIR}/../loser loser
|
||||
ln -s ${TROGDIR}/../drawings drawings
|
||||
#ln -s ${TROGDIR}/../expofiles expofiles
|
||||
|
||||
# fudge for philip's machine
|
||||
# fudge for philip's laptop prior to M2 SSD upgrade
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
if [ -d ${TROGDIR}/../expofiles ]; then
|
||||
ln -s ${TROGDIR}/../expofiles expofiles
|
||||
else
|
||||
ln -s /mnt/d/EXPO/expofiles expofiles
|
||||
fi
|
||||
uv pip list
|
||||
|
||||
echo "### Setting file permissions.. may take a while.."
|
||||
git config --global --add safe.directory '*'
|
||||
sudo chmod -R 777 *
|
||||
|
||||
echo "### links to expoweb, troggle etc. complete:"
|
||||
ls -tla
|
||||
echo "###"
|
||||
echo "### now installing ${TROGDIR}/requirements.txt"
|
||||
echo "###"
|
||||
|
||||
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
|
||||
# seen on wsl2 as well as wsl1
|
||||
# which ALSO ruins EXISTING permissions !
|
||||
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
|
||||
|
||||
$PIP install -r ${TROGDIR}/requirements.txt
|
||||
echo '### install from requirements.txt completed.'
|
||||
echo '### '
|
||||
|
||||
$PIP freeze > requirements.txt
|
||||
# so that we can track requirements more easily with git
|
||||
# because we do not install these with pip, but they are listed by the freeze command
|
||||
# Now find out what we actually installed by subtracting the stuff venv installed anyway
|
||||
sort original.txt > 1
|
||||
sort requirements.txt >2
|
||||
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-requirements.txt
|
||||
rm 1
|
||||
rm 2
|
||||
|
||||
cp requirements.txt requirements-$VENAME.txt
|
||||
cp requirements-$VENAME.txt troggle/requirements-$VENAME.txt
|
||||
|
||||
$PIP list > installed-pip.list
|
||||
$PIP list -o > installed-pip-o.list
|
||||
|
||||
REQ=installation-record
|
||||
mkdir $REQ
|
||||
mv requirements-$VENAME.txt $REQ
|
||||
mv original.txt $REQ
|
||||
mv requirements.txt $REQ
|
||||
mv original-pip.list $REQ
|
||||
mv installed-pip.list $REQ
|
||||
mv installed-pip-o.list $REQ
|
||||
cp fresh-requirements.txt ../requirements.txt
|
||||
mv fresh-requirements.txt $REQ
|
||||
cp troggle/`basename "$0"` $REQ
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
python --version
|
||||
echo "Django version:`django-admin --version`"
|
||||
echo "Django version:`uv run django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/$VENAME'
|
||||
'source bin/activate'
|
||||
'cd troggle'
|
||||
'django-admin'
|
||||
'python manage.py check'
|
||||
'cd ~/expo/troggle'
|
||||
'uv run django-admin'
|
||||
'uv run manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
## the tests may ALSO fail because of ssh and permissions errors
|
||||
|
||||
## So you will need to run
|
||||
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
|
||||
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
|
||||
# because this chmod only takes effect then.
|
||||
|
||||
'python manage.py test -v 2'
|
||||
'uv run manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'python databaseReset.py reset $VENAME'
|
||||
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
'uv run databaseReset.py reset INIT'
|
||||
'uv run manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
if [ ! -d /mnt/d/expofiles ]; then
|
||||
echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
fi
|
||||
# if [ ! -d /mnt/d/expofiles ]; then
|
||||
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
# fi
|
||||
Executable
+112
@@ -0,0 +1,112 @@
|
||||
#!/bin/bash
|
||||
# Run this in a terminal in the troggle directory: 'bash os-trog.sh'
|
||||
# On WSL, do Shift-click in the file explorer on the troggle folder to open a Linux command line
|
||||
# 'Open Linux shell here'
|
||||
echo 'Run this in a terminal in the troggle directory: "bash venv-trog.sh"'
|
||||
cat /etc/os-release
|
||||
# Expects an Ubuntu 22.04 relatively clean install.
|
||||
|
||||
sudo apt install python-is-python3 -y
|
||||
python --version : ensure python is an alias for python3 not python2.7
|
||||
sudo apt update -y
|
||||
sudo apt dist-upgrade -y
|
||||
sudo apt autoremove -y
|
||||
sudo apt install sqlite3 -y
|
||||
sudo apt install python3-pip -y
|
||||
|
||||
# this installs a shed-load of other stuff: binutils etc.sudo apt install survex-aven
|
||||
sudo apt install git openssh-client -y
|
||||
# On a clean debian 11 (bullseye) installation with Xfce & ssh,
|
||||
|
||||
#on ubuntu 20.04:
|
||||
#Package sftp is not available, but is referred to by another package.
|
||||
#This may mean that the package is missing, has been obsoleted, or
|
||||
#is only available from another source
|
||||
#E: Package 'sftp' has no installation candidate
|
||||
|
||||
|
||||
# On Ubuntu 20.04, with python10, the pip install fails.
|
||||
# So you need to get the pip from source
|
||||
# sudo curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
|
||||
# but really you should be using 22.04
|
||||
# and also, isf using debian,
|
||||
# sudo python3.10 -m pip install -U virtualenv
|
||||
|
||||
# do not actually use this any more
|
||||
sudo useradd expo
|
||||
sudo usermod -a -G sudo expo # to put expo in sudoers group, re-login required
|
||||
|
||||
# as debian does not install everything that ubuntu does, you need:
|
||||
sudo apt install python3-venv -y
|
||||
sudo apt install python3-dev -y
|
||||
|
||||
# default since 22.04
|
||||
# sudo apt install python3.10
|
||||
sudo apt install python3.11-venv -y
|
||||
sudo apt install python3.11-dev -y
|
||||
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1
|
||||
|
||||
sudo apt install mariadb-server -y
|
||||
sudo apt install libmariadb-dev -y
|
||||
|
||||
sudo python -m pip install --upgrade pip
|
||||
|
||||
sudo apt install sftp -y
|
||||
echo '###'
|
||||
echo '### NOW INSTALLING tunnel and therion, go and have a cup of tea. Or a 3-course meal.'
|
||||
echo '###'
|
||||
sudo apt install tunnelx therion -y
|
||||
sudo apt install gedit
|
||||
|
||||
# Go to https://expo.survex.com/handbook/troggle/troglaptop.html#dbtools
|
||||
# sudo service mysql start
|
||||
|
||||
echo "### python 3.12"
|
||||
sudo add-apt-repository ppa:deadsnakes/ppa -y
|
||||
sudo apt update
|
||||
sudo apt install python3.12-full -y
|
||||
sudo apt install python3.12-distutils -y
|
||||
sudo apt install python3.12-venv -y
|
||||
|
||||
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.12 7
|
||||
sudo update-alternatives --config python
|
||||
sudo rm /usr/bin/python3
|
||||
sudo ln -s /etc/alternatives/python /usr/bin/python3
|
||||
sudo apt dist-upgrade
|
||||
|
||||
|
||||
# install VS code - but ONLY on a native ubuntu install, NOT in WSL
|
||||
sudo apt install software-properties-common apt-transport-https wget gpg
|
||||
wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
|
||||
sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
|
||||
sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
|
||||
sudo apt update
|
||||
sudo apt install code
|
||||
|
||||
#this next crashes, why?
|
||||
#sudo python -m pip install --upgrade pip
|
||||
|
||||
sudo apt install gpsprune qgis gedit tig gitg meld rsync
|
||||
|
||||
git config --global user.email "you@example.com"
|
||||
git config --global user.name "Your Name"
|
||||
git config --global pull.rebase true
|
||||
|
||||
echo '###'
|
||||
echo '### Currently set version of python'
|
||||
python --version
|
||||
|
||||
echo '###'
|
||||
echo '### Now YOU have to configure the git settings for YOURSELF (not "expo")'
|
||||
echo '### because you can't clone the repos without a key
|
||||
# cd ~/expo
|
||||
|
||||
git clone ssh://expo@expo.survex.com/home/expo/troggle
|
||||
git clone ssh://expo@expo.survex.com/home/expo/loser
|
||||
git clone ssh://expo@expo.survex.com/home/expo/expoweb
|
||||
git clone ssh://expo@expo.survex.com/home/expo/drawings
|
||||
|
||||
mkdir expofiles
|
||||
rsync -azv --delete-after --prune-empty-dirs --exclude="photos" --exclude="video" expo@expo.survex.com:expofiles/ expofiles
|
||||
rsync -azv --exclude="*.jpg.xml" --exclude="*.jpeg.xml" --exclude="*.JPG.xml" expo@expo.s^Cvex.com:expofiles/photos/ expofiles/photos
|
||||
|
||||
Executable
+192
@@ -0,0 +1,192 @@
|
||||
#!/bin/bash
|
||||
# footled lots to make this work with python 3.10 & 3.11 and WSL1 and WSL2 on Ubuntu 22.04
|
||||
# Run this in a terminal in the troggle directory: 'bash venv-trog.sh'
|
||||
echo '-- Run this in a terminal in the real troggle directory: "bash venv-trog.sh"'
|
||||
|
||||
# Expects an Ubuntu 22.04 (or 20.04) relatively clean install.
|
||||
# If you have not already installed these on your clean Ubuntu install DO THIS FIRST
|
||||
# use the script os-trog.sh
|
||||
|
||||
# If you are using Debian, then stick with the default version of python
|
||||
# If you are using Ubuntu, then it is easy to use a later version of python, e.g. 3.11
|
||||
|
||||
# NOW we set up troggle
|
||||
PYTHON=python3.12
|
||||
VENAME=p12d5 # python3.x and django 4.2
|
||||
echo "** You are logged in as `id -u -n`"
|
||||
echo "The 50MB pip cache will be in /home/`id -u -n`/.cache/"
|
||||
echo "The 150MB venv will created in /home/`id -u -n`/$VENAME/"
|
||||
TROGDIR=$(cd $(dirname $0) && pwd)
|
||||
echo "-- Troggle folder (this script location): ${TROGDIR}"
|
||||
|
||||
REQUIRE=requirements-$VENAME.txt
|
||||
|
||||
if [ -d $REQUIRE ]; then
|
||||
echo "-- No ${REQUIRE} found. You should be in the /troggle/ folder. Copy it from your most recent installation."
|
||||
exit 1
|
||||
fi
|
||||
echo ## Using $REQUIRE :
|
||||
cat $REQUIRE
|
||||
echo ##
|
||||
|
||||
|
||||
$PYTHON --version
|
||||
|
||||
# NOTE that when using a later or earlier verison of python, you MUST also
|
||||
# use the allowed version of Pillow, see https://pillow.readthedocs.io/en/latest/installation.html
|
||||
|
||||
# NOW set up link from expo user folder
|
||||
# needed for WSL2
|
||||
echo Creating links from Linux filesystem user
|
||||
# These links only need making once, for many venv
|
||||
cd ~
|
||||
|
||||
if [ ! -d $VENAME ]; then
|
||||
echo "## Creating venv $VENAME. (If this fails with a pip error, you need to ensure you have python3.11-venv installed and/or use a Ubuntu window)"
|
||||
$PYTHON -m venv $VENAME
|
||||
else
|
||||
echo "## /$VENAME/ already exists ! Delete it first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Activate the virtual env and see what the default packages are
|
||||
echo "### Activating $VENAME"
|
||||
|
||||
cd $VENAME
|
||||
echo "-- now in: ${PWD}"
|
||||
ls -tlarg
|
||||
source bin/activate
|
||||
echo $VIRTUAL_ENV
|
||||
if [ -d ~/$VENAME/bin ]; then
|
||||
echo "### Activating."
|
||||
else
|
||||
echo "## ~/$VENAME/bin does not exist. FAILed to create venv properly."
|
||||
exit 1
|
||||
fi
|
||||
# update local version of pip, more recent than OS version
|
||||
# debian bullseye installs pip 20.3.4 which barfs, we want >22.0.3
|
||||
|
||||
# update local version of setuptools, more recent than OS version, needed for packages without wheels
|
||||
|
||||
echo "### installing later version of pip inside $VENAME"
|
||||
$PYTHON -m pip install --upgrade pip
|
||||
$PYTHON -m pip install --upgrade setuptools
|
||||
|
||||
PIP=pip
|
||||
|
||||
$PIP list > original-pip.list
|
||||
$PIP freeze >original.txt
|
||||
|
||||
# we are in /home/$USER/$VENAME/
|
||||
ln -s ${TROGDIR} troggle
|
||||
ln -s ${TROGDIR}/../expoweb expoweb
|
||||
ln -s ${TROGDIR}/../loser loser
|
||||
ln -s ${TROGDIR}/../drawings drawings
|
||||
#ln -s ${TROGDIR}/../expofiles expofiles
|
||||
|
||||
# fudge for philip's machine
|
||||
if [ ! -d /mnt/d/EXPO ]; then
|
||||
sudo mkdir /mnt/d
|
||||
sudo mount -t drvfs D: /mnt/d
|
||||
fi
|
||||
|
||||
if [ -d ${TROGDIR}/../expofiles ]; then
|
||||
ln -s ${TROGDIR}/../expofiles expofiles
|
||||
else
|
||||
ln -s /mnt/d/EXPO/expofiles expofiles
|
||||
fi
|
||||
|
||||
echo "### Setting file permissions.. may take a while.."
|
||||
git config --global --add safe.directory '*'
|
||||
sudo chmod -R 777 *
|
||||
|
||||
echo "### links to expoweb, troggle etc. complete:"
|
||||
ls -tla
|
||||
echo "###"
|
||||
echo "### now installing ${TROGDIR}/${REQUIRE}"
|
||||
echo "###"
|
||||
cat ${TROGDIR}/${REQUIRE}
|
||||
|
||||
# NOW THERE IS A PERMISSIONS FAILURE THAT DIDN'T HAPPEN BEFORE
|
||||
# seen on wsl2 as well as wsl1
|
||||
# which ALSO ruins EXISTING permissions !
|
||||
# Guessing it is to do with pip not liking non-standard py 3.11 installation on Ubuntu 22.04
|
||||
|
||||
read -p "Press any key to resume ..."
|
||||
$PIP install -r ${TROGDIR}/${REQUIRE}
|
||||
echo "### install from ${TROGDIR}/${REQUIRE} completed."
|
||||
echo '### '
|
||||
|
||||
# this installs pre-release django 5.0
|
||||
$PIP install --pre django
|
||||
|
||||
$PIP freeze > $REQUIRE
|
||||
# so that we can track requirements more easily with git
|
||||
# because we do not install these with pip, but they are listed by the freeze command
|
||||
# Now find out what we actually installed by subtracting the stuff venv installed anyway
|
||||
sort original.txt > 1
|
||||
sort $REQUIRE >2
|
||||
comm -3 1 2 --check-order | awk '{ print $1}'>fresh-$REQUIRE
|
||||
rm 1
|
||||
rm 2
|
||||
|
||||
# cp $REQUIRE requirements-$VENAME.txt
|
||||
cp $REQUIRE troggle/$REQUIRE
|
||||
|
||||
$PIP list > installed-pip.list
|
||||
$PIP list -o > installed-pip-o.list
|
||||
|
||||
REQ=installation-record
|
||||
mkdir $REQ
|
||||
|
||||
mv original.txt $REQ
|
||||
mv $REQUIRE $REQ
|
||||
mv original-pip.list $REQ
|
||||
mv installed-pip.list $REQ
|
||||
mv installed-pip-o.list $REQ
|
||||
cp fresh-$REQUIRE ../$REQUIRE
|
||||
mv fresh-$REQUIRE $REQ
|
||||
cp troggle/`basename "$0"` $REQ
|
||||
|
||||
# install VS code
|
||||
sudo apt install software-properties-common apt-transport-https wget gpg
|
||||
wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
|
||||
sudo install -D -o root -g root -m 644 packages.microsoft.gpg /etc/apt/keyrings/packages.microsoft.gpg
|
||||
sudo sh -c 'echo "deb [arch=amd64,arm64,armhf signed-by=/etc/apt/keyrings/packages.microsoft.gpg] https://packages.microsoft.com/repos/code stable main" > /etc/apt/sources.list.d/vscode.list'
|
||||
sudo apt update
|
||||
sudo apt install code
|
||||
|
||||
$PYTHON --version
|
||||
python --version
|
||||
echo "Django version:`django-admin --version`"
|
||||
|
||||
echo "### Now do
|
||||
'[sudo service mysql start]'
|
||||
'[sudo service mariadb restart]'
|
||||
'[sudo mysql_secure_installation]'
|
||||
'cd ~/$VENAME'
|
||||
'source bin/activate'
|
||||
'cd troggle'
|
||||
'django-admin'
|
||||
'python manage.py check'
|
||||
## this tests if you have set up ssh correcting. Refer to documentation https://expo.survex.com/handbook/computing/keyexchange.html
|
||||
## you need to follow the Linux instructions.
|
||||
'ssh expo@expo.survex.com'
|
||||
|
||||
## the next tests will fail unless ~/expofiles is set correctly to a folder on your machine
|
||||
## the tests may ALSO fail because of ssh and permissions errors
|
||||
|
||||
## So you will need to run
|
||||
$sudo chown -Rhv philip:philip ~/$VENAME (if your username is philip)
|
||||
# and then REBOOT (or at least, exit WSL and terminate and restart WSL)
|
||||
# because this chmod only takes effect then.
|
||||
|
||||
'python manage.py test -v 2'
|
||||
'./pre-run.sh' (runs the tests again)
|
||||
|
||||
'python databaseReset.py reset $VENAME'
|
||||
'python manage.py runserver 0.0.0.0:8000 (and allow access when the firewall window pops up)'
|
||||
"
|
||||
# if [ ! -d /mnt/d/expofiles ]; then
|
||||
# echo '### No valid expofiles directory on /mnt/d . Fix this before any tests will work.'
|
||||
# fi
|
||||
@@ -0,0 +1,63 @@
|
||||
<!-- expobase.html - this text visible because this template has been included -->
|
||||
|
||||
<html lang="en">
|
||||
<head>
|
||||
<script>document.interestCohort = null;</script> <!-- Turn off Google FLoC -->
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>Directory not found photoupload/</title>
|
||||
<link rel="stylesheet" type="text/css" href="/css/main2.css" />
|
||||
|
||||
</head>
|
||||
|
||||
|
||||
|
||||
<body >
|
||||
|
||||
<h1>Directory not found 'photoupload/'</h1>
|
||||
<h3>Click here: <a href="/photoupload">/photoupload</a> </h3>
|
||||
<p>i.e. without the final '/'
|
||||
|
||||
|
||||
<!-- the year now -->
|
||||
|
||||
|
||||
<div id="menu">
|
||||
<ul id="menulinks">
|
||||
<li><a href="/index.htm">Home</a></li>
|
||||
<li><a href="/handbook/index.htm">Handbook</a>
|
||||
</li>
|
||||
<li><a href="/handbook/computing/onlinesystems.html">Online systems</a></li>
|
||||
<li><a href="/handbook/logbooks.html#form">Make Logbook Entry</a></li>
|
||||
|
||||
|
||||
<li><a href="/caves">Caves</a>
|
||||
</li>
|
||||
<li><a href="/infodx.htm">Site index</a></li>
|
||||
<li><a href="/pubs.htm">Reports</a></li>
|
||||
|
||||
|
||||
<li><a href="https://expo.survex.com/kanboard/board/2">Kanboard</a></li>
|
||||
|
||||
<li><a href="/handbook/troggle/training/trogbegin.html">Troggle</a></li>
|
||||
|
||||
<li><form name=P method=get
|
||||
|
||||
action="https://expo.survex.com/search"
|
||||
|
||||
|
||||
target="_top">
|
||||
<input id="omega-autofocus" type=search name=P size=8 autofocus>
|
||||
<input type=submit value="Search"></form></li>
|
||||
|
||||
|
||||
|
||||
<li> <b style="color:red">RUNNING ON LOCALSERVER</b> <br>slug:<br>newslug:<br>url:
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -0,0 +1,147 @@
|
||||
<?xml version="1.0" encoding="windows-1252"?>
|
||||
<gpx version="1.0" creator="survex 1.4.1 (aven) - https://survex.com/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.topografix.com/GPX/1/0" xsi:schemaLocation="http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd">
|
||||
<name>1626/3-as-trk</name>
|
||||
<desc>just the 1623/6 border</desc>
|
||||
<trk>
|
||||
<name>1626/3-as-trk</name>
|
||||
<number>1</number>
|
||||
<trkseg>
|
||||
|
||||
|
||||
</trkseg>
|
||||
<trkpt lon="13.72476763" lat="47.69048595"><name>1626-22</name></trkpt>
|
||||
<trkpt lon="13.72501998" lat="47.69039518"><name>1626-21</name></trkpt>
|
||||
<trkpt lon="13.72590820" lat="47.68982018"><name>1626-20</name></trkpt>
|
||||
<trkpt lon="13.72677225" lat="47.68921909"><name>1626-19</name></trkpt>
|
||||
<trkpt lon="13.73015049" lat="47.68801941"><name>1626-18</name></trkpt>
|
||||
<trkpt lon="13.73155441" lat="47.68747860"><name>1626-17</name></trkpt>
|
||||
<trkpt lon="13.73268210" lat="47.68670564"><name>1626-16</name></trkpt>
|
||||
<trkpt lon="13.73337630" lat="47.68604764"><name>1626-15</name></trkpt>
|
||||
<trkpt lon="13.73426370" lat="47.68554896"><name>1626-14</name></trkpt>
|
||||
<trkpt lon="13.73512238" lat="47.68532296"><name>1626-13</name></trkpt>
|
||||
<trkpt lon="13.73600521" lat="47.68529913"><name>1626-12</name></trkpt>
|
||||
<trkpt lon="13.73748868" lat="47.68559800"><name>1626-11</name></trkpt>
|
||||
<trkpt lon="13.73898788" lat="47.68569259"><name>1626-10</name></trkpt>
|
||||
<trkpt lon="13.74023471" lat="47.68536616"><name>1626-9</name></trkpt>
|
||||
<trkpt lon="13.74123284" lat="47.68474835"><name>1626-8</name></trkpt>
|
||||
<trkpt lon="13.74225034" lat="47.68387240"><name>1626-7</name></trkpt>
|
||||
<trkpt lon="13.74347406" lat="47.68315752"><name>1626-6</name></trkpt>
|
||||
<trkpt lon="13.74703874" lat="47.68220764"><name>1626-5</name></trkpt>
|
||||
<trkpt lon="13.74868173" lat="47.68196967"><name>1626-4</name></trkpt>
|
||||
<trkpt lon="13.75033548" lat="47.68175412"><name>1626-3</name></trkpt>
|
||||
<trkpt lon="13.75123753" lat="47.68162591"><name>1626-2</name></trkpt>
|
||||
<trkpt lon="13.75207758" lat="47.68141505"><name>1626-1</name></trkpt>
|
||||
<trkpt lon="13.75343505" lat="47.68061505"><name>1626-323</name></trkpt>
|
||||
<trkpt lon="13.75465350" lat="47.67959704"><name>1626-322</name></trkpt>
|
||||
<trkpt lon="13.75557245" lat="47.67959630"><name>1626-321</name></trkpt>
|
||||
<trkpt lon="13.75654897" lat="47.67963415"><name>1626-320</name></trkpt>
|
||||
<trkpt lon="13.75823156" lat="47.67969809"><name>1626-319</name></trkpt>
|
||||
<trkpt lon="13.75990247" lat="47.67964786"><name>1626-318</name></trkpt>
|
||||
<trkpt lon="13.76114115" lat="47.67949668"><name>1626-317</name></trkpt>
|
||||
<trkpt lon="13.76230278" lat="47.67920634"><name>1626-316</name></trkpt>
|
||||
<trkpt lon="13.76356009" lat="47.67856495"><name>1626-315</name></trkpt>
|
||||
<trkpt lon="13.76450809" lat="47.67772225"><name>1626-314</name></trkpt>
|
||||
<trkpt lon="13.76486443" lat="47.67683862"><name>1626-313</name></trkpt>
|
||||
<trkpt lon="13.76527297" lat="47.67598354"><name>1626-312</name></trkpt>
|
||||
<trkpt lon="13.76599945" lat="47.67558241"><name>1626-311</name></trkpt>
|
||||
<trkpt lon="13.76642282" lat="47.67546486"><name>1626-310</name></trkpt>
|
||||
<trkpt lon="13.76818947" lat="47.67486721"><name>1626-309</name></trkpt>
|
||||
<trkpt lon="13.76998479" lat="47.67441976"><name>1626-308</name></trkpt>
|
||||
<trkpt lon="13.77091587" lat="47.67412570"><name>1626-307</name></trkpt>
|
||||
<trkpt lon="13.77186645" lat="47.67408972"><name>1626-306</name></trkpt>
|
||||
<trkpt lon="13.77271376" lat="47.67446428"><name>1626-305</name></trkpt>
|
||||
<trkpt lon="13.77335365" lat="47.67502035"><name>1626-304</name></trkpt>
|
||||
<trkpt lon="13.77422818" lat="47.67720260"><name>1626-303</name></trkpt>
|
||||
<trkpt lon="13.77422109" lat="47.67810825"><name>1623-77</name></trkpt>
|
||||
<trkpt lon="13.77446007" lat="47.67897442"><name>1623-78</name></trkpt>
|
||||
<trkpt lon="13.77545908" lat="47.68010650"><name>1623-79</name></trkpt>
|
||||
<trkpt lon="13.77660561" lat="47.68118866"><name>1623-80</name></trkpt>
|
||||
<trkpt lon="13.77660561" lat="47.68118866"><name>1626-299</name></trkpt>
|
||||
<trkpt lon="13.77914299" lat="47.68294347"><name>1623-81</name></trkpt>
|
||||
<trkpt lon="13.77914299" lat="47.68294347"><name>1626-298</name></trkpt>
|
||||
<trkpt lon="13.78220453" lat="47.68446155"><name>1623-82</name></trkpt>
|
||||
<trkpt lon="13.78220453" lat="47.68446155"><name>1626-297</name></trkpt>
|
||||
<trkpt lon="13.78519802" lat="47.68608852"><name>1623-83</name></trkpt>
|
||||
<trkpt lon="13.78519802" lat="47.68608852"><name>1626-296</name></trkpt>
|
||||
<trkpt lon="13.78804022" lat="47.68759918"><name>1623-84</name></trkpt>
|
||||
<trkpt lon="13.78804022" lat="47.68759918"><name>1626-295</name></trkpt>
|
||||
<trkpt lon="13.79091566" lat="47.68909077"><name>1623-85</name></trkpt>
|
||||
<trkpt lon="13.79091566" lat="47.68909077"><name>1626-294</name></trkpt>
|
||||
<trkpt lon="13.79397916" lat="47.69043850"><name>1623-86</name></trkpt>
|
||||
<trkpt lon="13.79397916" lat="47.69043850"><name>1626-293</name></trkpt>
|
||||
<trkpt lon="13.79770284" lat="47.69140571"><name>1623-87</name></trkpt>
|
||||
<trkpt lon="13.79770284" lat="47.69140571"><name>1626-292</name></trkpt>
|
||||
<trkpt lon="13.80090280" lat="47.69218602"><name>1623-88</name></trkpt>
|
||||
<trkpt lon="13.80090280" lat="47.69218602"><name>1626-291</name></trkpt>
|
||||
<trkpt lon="13.80408569" lat="47.69312830"><name>1623-89</name></trkpt>
|
||||
<trkpt lon="13.80408569" lat="47.69312830"><name>1626-290</name></trkpt>
|
||||
<trkpt lon="13.80767474" lat="47.69419465"><name>1623-90</name></trkpt>
|
||||
<trkpt lon="13.80767474" lat="47.69419465"><name>1626-289</name></trkpt>
|
||||
<trkpt lon="13.81114611" lat="47.69504470"><name>1623-91</name></trkpt>
|
||||
<trkpt lon="13.81114611" lat="47.69504470"><name>1626-288</name></trkpt>
|
||||
<trkpt lon="13.81454852" lat="47.69606670"><name>1623-92</name></trkpt>
|
||||
<trkpt lon="13.81454852" lat="47.69606670"><name>1626-287</name></trkpt>
|
||||
<trkpt lon="13.81577650" lat="47.69675143"><name>1623-93</name></trkpt>
|
||||
<trkpt lon="13.81577650" lat="47.69675143"><name>1626-286</name></trkpt>
|
||||
<trkpt lon="13.81676629" lat="47.69761518"><name>1623-94</name></trkpt>
|
||||
<trkpt lon="13.81676629" lat="47.69761518"><name>1626-285</name></trkpt>
|
||||
<trkpt lon="13.81854114" lat="47.69981221"><name>1623-95</name></trkpt>
|
||||
<trkpt lon="13.81854114" lat="47.69981221"><name>1626-284</name></trkpt>
|
||||
<trkpt lon="13.81950232" lat="47.70218349"><name>1623-96</name></trkpt>
|
||||
<trkpt lon="13.81950232" lat="47.70218349"><name>1626-283</name></trkpt>
|
||||
<trkpt lon="13.81982069" lat="47.70317869"><name>1623-97</name></trkpt>
|
||||
<trkpt lon="13.81982069" lat="47.70317869"><name>1626-282</name></trkpt>
|
||||
<trkpt lon="13.82043141" lat="47.70409618"><name>1623-98</name></trkpt>
|
||||
<trkpt lon="13.82043141" lat="47.70409618"><name>1626-281</name></trkpt>
|
||||
<trkpt lon="13.82122594" lat="47.70471970"><name>1623-99</name></trkpt>
|
||||
<trkpt lon="13.82122594" lat="47.70471970"><name>1626-280</name></trkpt>
|
||||
<trkpt lon="13.82224271" lat="47.70515970"><name>1623-100</name></trkpt>
|
||||
<trkpt lon="13.82224271" lat="47.70515970"><name>1626-279</name></trkpt>
|
||||
<trkpt lon="13.82402883" lat="47.70536617"><name>1623-101</name></trkpt>
|
||||
<trkpt lon="13.82402883" lat="47.70536617"><name>1626-278</name></trkpt>
|
||||
<trkpt lon="13.82586781" lat="47.70524409"><name>1623-102</name></trkpt>
|
||||
<trkpt lon="13.82586781" lat="47.70524409"><name>1626-277</name></trkpt>
|
||||
<trkpt lon="13.82920653" lat="47.70492586"><name>1623-103</name></trkpt>
|
||||
<trkpt lon="13.82920653" lat="47.70492586"><name>1626-276</name></trkpt>
|
||||
<trkpt lon="13.83102900" lat="47.70489099"><name>1623-104</name></trkpt>
|
||||
<trkpt lon="13.83102900" lat="47.70489099"><name>1626-275</name></trkpt>
|
||||
<trkpt lon="13.83240838" lat="47.70463501"><name>1623-105</name></trkpt>
|
||||
<trkpt lon="13.83240838" lat="47.70463501"><name>1626-274</name></trkpt>
|
||||
<trkpt lon="13.83327985" lat="47.70436234"><name>1623-106</name></trkpt>
|
||||
<trkpt lon="13.83327985" lat="47.70436234"><name>1626-273</name></trkpt>
|
||||
<trkpt lon="13.83412633" lat="47.70441440"><name>1623-107</name></trkpt>
|
||||
<trkpt lon="13.83412633" lat="47.70441440"><name>1626-272</name></trkpt>
|
||||
<trkpt lon="13.83813615" lat="47.70460471"><name>1623-108</name></trkpt>
|
||||
<trkpt lon="13.83813615" lat="47.70460471"><name>1626-271</name></trkpt>
|
||||
<trkpt lon="13.84151391" lat="47.70418600"><name>1623-109</name></trkpt>
|
||||
<trkpt lon="13.84151391" lat="47.70418600"><name>1626-270</name></trkpt>
|
||||
<trkpt lon="13.84313423" lat="47.70406198"><name>1623-110</name></trkpt>
|
||||
<trkpt lon="13.84313423" lat="47.70406198"><name>1626-269</name></trkpt>
|
||||
<trkpt lon="13.84477407" lat="47.70401966"><name>1623-111</name></trkpt>
|
||||
<trkpt lon="13.84477407" lat="47.70401966"><name>1626-268</name></trkpt>
|
||||
<trkpt lon="13.84557828" lat="47.70383169"><name>1623-112</name></trkpt>
|
||||
<trkpt lon="13.84557828" lat="47.70383169"><name>1626-267</name></trkpt>
|
||||
<trkpt lon="13.84638546" lat="47.70367164"><name>1623-113</name></trkpt>
|
||||
<trkpt lon="13.84638546" lat="47.70367164"><name>1626-266</name></trkpt>
|
||||
<trkpt lon="13.84734801" lat="47.70371947"><name>1623-114</name></trkpt>
|
||||
<trkpt lon="13.84734801" lat="47.70371947"><name>1626-265</name></trkpt>
|
||||
<trkpt lon="13.84830585" lat="47.70370893"><name>1623-115</name></trkpt>
|
||||
<trkpt lon="13.84830585" lat="47.70370893"><name>1626-264</name></trkpt>
|
||||
<trkpt lon="13.84978004" lat="47.70334887"><name>1623-116</name></trkpt>
|
||||
<trkpt lon="13.84978004" lat="47.70334887"><name>1626-263</name></trkpt>
|
||||
<trkpt lon="13.85115245" lat="47.70281564"><name>1623-117</name></trkpt>
|
||||
<trkpt lon="13.85115245" lat="47.70281564"><name>1626-262</name></trkpt>
|
||||
<trkpt lon="13.85459377" lat="47.70171281"><name>1623-118</name></trkpt>
|
||||
<trkpt lon="13.85459377" lat="47.70171281"><name>1626-261</name></trkpt>
|
||||
<trkpt lon="13.85613331" lat="47.70138292"><name>1623-119</name></trkpt>
|
||||
<trkpt lon="13.85613331" lat="47.70138292"><name>1626-260</name></trkpt>
|
||||
<trkpt lon="13.85761344" lat="47.70097144"><name>1623-120</name></trkpt>
|
||||
<trkpt lon="13.85761344" lat="47.70097144"><name>1626-259</name></trkpt>
|
||||
<trkpt lon="13.85887862" lat="47.70064823"><name>1623-121</name></trkpt>
|
||||
<trkpt lon="13.85887862" lat="47.70064823"><name>1626-258</name></trkpt>
|
||||
<trkpt lon="13.85975787" lat="47.70038236"><name>1623-122</name></trkpt>
|
||||
<trkpt lon="13.85975787" lat="47.70038236"><name>1626-257</name></trkpt>
|
||||
<trkpt lon="13.86031535" lat="47.70002466"><name>1623-123</name></trkpt>
|
||||
<trkpt lon="13.86031535" lat="47.70002466"><name>1626-256</name></trkpt>
|
||||
</trk>
|
||||
</gpx>
|
||||
@@ -0,0 +1,156 @@
|
||||
"""Simple test factories used to replace fixtures in tests.
|
||||
|
||||
These avoid adding external dependencies like factory_boy and provide small
|
||||
helpers to create models with predictable defaults and optional PKs so tests
|
||||
that relied on fixture PKs continue to work.
|
||||
"""
|
||||
from troggle.core.models.troggle import Expedition, Person, PersonExpedition
|
||||
from troggle.core.models.caves import Cave
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
|
||||
def create_user(username, first_name=None, last_name="Caver", is_superuser=False):
|
||||
u = User()
|
||||
u.username = username
|
||||
u.email = f"{username}@example.test"
|
||||
u.first_name = first_name or username
|
||||
u.last_name = last_name
|
||||
u.set_password("secretword")
|
||||
u.is_superuser = is_superuser
|
||||
u.save()
|
||||
return u
|
||||
|
||||
|
||||
def create_expedition(pk=None, year="2019", name="CUCC expo 2019"):
|
||||
e = Expedition()
|
||||
if pk is not None:
|
||||
e.pk = pk
|
||||
e.year = year
|
||||
e.name = name
|
||||
e.save()
|
||||
return e
|
||||
|
||||
|
||||
def create_person(pk=None, first_name="Michael", last_name="Sargent", fullname="Michael Sargent", slug="michael-sargent", blurb=None):
|
||||
ms_blurb = """\n\n\n\n\n\n<p><img class=\"onleft\" src=\"/folk/i/mikey0.jpg\">\n
|
||||
<img class=\"onright\" src=\"/folk/i/mikey1.jpg\" height=\"400\"\nalt=\"\" />\n
|
||||
<b>Michael Sargent</b> CUCC<br />\nExpeditions 2014, 15, 16, 17, 18, 19.\n
|
||||
<p>The first second-generation expo caver in 2014, later members of this exclusive group
|
||||
were Dan Lenartowicz and Sarah Connolly.\n\n\n
|
||||
<img class=\"onleft\" src=\"/folk/i/michaelsargent.jpg\">\n<im\n\n
|
||||
<hr style=\"clear: both\" /><p class=\"caption\">Pre-expo (pre-student)
|
||||
photos from President's Invite (OUCC) \nand first abseiling instruction (Cambridge).</p>\n
|
||||
"""
|
||||
p = Person()
|
||||
if pk is not None:
|
||||
p.pk = pk
|
||||
p.first_name = first_name
|
||||
p.last_name = last_name
|
||||
p.fullname = fullname
|
||||
p.slug = slug
|
||||
# provide a small default blurb consistent with fixtures for pages
|
||||
p.blurb = blurb if blurb is not None else ms_blurb
|
||||
p.save()
|
||||
return p
|
||||
|
||||
|
||||
def create_personexpedition(pk=None, expedition=None, person=None):
|
||||
pe = PersonExpedition()
|
||||
if pk is not None:
|
||||
pe.pk = pk
|
||||
pe.expedition = expedition
|
||||
pe.person = person
|
||||
pe.save()
|
||||
return pe
|
||||
|
||||
|
||||
def create_cave(
|
||||
pk=None,
|
||||
areacode="1623",
|
||||
kataster_number="115",
|
||||
filename="1623-115.html",
|
||||
description_file="1623/115.htm",
|
||||
underground_description="",
|
||||
notes="",
|
||||
official_name="",
|
||||
non_public=False,
|
||||
kataster_code="",
|
||||
unofficial_number="",
|
||||
explorers="",
|
||||
equipment="",
|
||||
references="",
|
||||
survey="",
|
||||
length="",
|
||||
depth="",
|
||||
extent="",
|
||||
survex_file="",
|
||||
):
|
||||
c = Cave()
|
||||
if pk is not None:
|
||||
c.pk = pk
|
||||
c.areacode = areacode
|
||||
c.non_public = non_public
|
||||
c.kataster_code = kataster_code
|
||||
c.kataster_number = kataster_number
|
||||
c.unofficial_number = unofficial_number
|
||||
c.explorers = explorers
|
||||
c.official_name = official_name
|
||||
c.filename = filename
|
||||
c.description_file = description_file
|
||||
c.underground_description = underground_description
|
||||
c.notes = notes
|
||||
c.equipment = equipment
|
||||
c.references = references
|
||||
c.survey = survey
|
||||
c.length = length
|
||||
c.depth = depth
|
||||
c.extent = extent
|
||||
c.survex_file = survex_file
|
||||
|
||||
c.save()
|
||||
return c
|
||||
|
||||
|
||||
def create_expo_caves():
|
||||
"""Create the two cave fixtures used historically by the test-suite (115 and 284).
|
||||
|
||||
This mirrors the content of `core/fixtures/expo_caves.json` so tests that
|
||||
relied on those fixture rows can use this factory instead.
|
||||
"""
|
||||
# Cave 115 (Schnellzughöhle) - includes an underground_description fragment
|
||||
und_desc_115 = (
|
||||
"This is the main entrance through which the majority of the "
|
||||
"<a href=\"41.htm\">Stellerweghöhle</a> system was explored. See the separate "
|
||||
"<a href=\"41/115.htm#ent115\">full guidebook description</a> for details, just an overview is given here.</p>"
|
||||
"<p>The entrance leads to a non-obvious way on to the head of the short <b>Bell Pitch</b>, from where very awkward going leads out to a bigger passage to reach <b>The Ramp</b> a series of off-vertical pitches. The damper but technically easier <b>Inlet Pitches</b> drop to a Big Chamber, from where <b>Pete's Purgatory</b> starts, and leads in 800m of tortuous going to <b>The Confluence</b> and the larger streamway leading to the deepest point.</p>"
|
||||
)
|
||||
|
||||
create_cave(
|
||||
pk=43,
|
||||
areacode="1623",
|
||||
kataster_number="115",
|
||||
filename="1623-115.html",
|
||||
description_file="1623/115.htm",
|
||||
underground_description=und_desc_115,
|
||||
official_name="Schnellzughöhle",
|
||||
notes=(
|
||||
"The Austrian Kataster has adopted a very perverse way of numbering things. "
|
||||
"Their numbers are as follows: 115a Stellerweghöhle entrance 41a etc."
|
||||
),
|
||||
)
|
||||
|
||||
# Cave 284 (Seetrichter)
|
||||
create_cave(
|
||||
pk=350,
|
||||
areacode="1623",
|
||||
kataster_number="284",
|
||||
filename="1623-284.html",
|
||||
description_file="",
|
||||
official_name="Seetrichter (Lake bottom)",
|
||||
notes=(
|
||||
"A 25m long (22m deep) resurgence in Altausee. At the bottom, at a depth of 72m, "
|
||||
"there are large round blocks."
|
||||
),
|
||||
)
|
||||
|
||||
return Cave.objects.filter(pk__in=[43, 350])
|
||||
+111
-67
@@ -5,49 +5,105 @@ Modified for Expo April 2021.
|
||||
import re
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import Client, TestCase
|
||||
|
||||
from troggle.core.models.caves import Area, Cave
|
||||
from troggle.core.models.troggle import Person, PersonExpedition
|
||||
|
||||
import settings
|
||||
from troggle.core.models.caves import Cave
|
||||
from troggle.core.models.troggle import Expedition, Person, PersonExpedition
|
||||
from troggle.core.utils import current_expo
|
||||
from .factories import create_cave, create_expo_caves
|
||||
from .factories import create_expedition, create_person, create_personexpedition
|
||||
|
||||
|
||||
current_year = current_expo()
|
||||
|
||||
|
||||
def create_user(name=None, last_name="Caver", is_superuser=False):
|
||||
u = User()
|
||||
u.username = name
|
||||
u.email = f"philip.sargent+{name}@gmail.com"
|
||||
u.first_name, u.last_name = name, last_name
|
||||
u.set_password("secretword") # all test users have same password
|
||||
u.save()
|
||||
return u
|
||||
|
||||
def create_cave(areacode="1623", kataster_number="000", official_name=""):
|
||||
c = Cave(areacode=areacode, kataster_number=kataster_number, official_name=official_name)
|
||||
c.save()
|
||||
return c
|
||||
|
||||
# import troggle.settings as settings
|
||||
# FIXTURE_DIRS = settings.PYTHON_PATH / "core" /"fixtures"
|
||||
|
||||
class FixtureTests(TestCase):
|
||||
"""These just hit the database.
|
||||
They do not exercise the GET and url functions
|
||||
New: uses factories instead of fixtures so tests are self-contained.
|
||||
"""
|
||||
|
||||
fixtures = ["auth_users", "expo_areas", "expo_caves", "expo_exped"]
|
||||
ph = r"and leads in 800m of tortuous going to"
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# replicate the minimal data formerly provided by fixtures
|
||||
from .factories import create_expedition, create_person, create_personexpedition, create_cave, create_expo_caves
|
||||
|
||||
exp = create_expedition(pk=44, year="2019", name="CUCC expo 2019")
|
||||
person = create_person(pk=250, first_name="Michael", last_name="Sargent", fullname="Michael Sargent", slug="michael-sargent")
|
||||
create_personexpedition(pk=681, expedition=exp, person=person)
|
||||
|
||||
# two notable caves used by tests
|
||||
create_expo_caves()
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
create_user(name="expo") # needed for current_year()
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
User.objects.all().delete()
|
||||
|
||||
def test_fix_person_loaded(self):
|
||||
def test_fix_person_loaded_byname(self):
|
||||
p = Person.objects.get(fullname="Michael Sargent")
|
||||
self.assertEqual(str(p.first_name), "Michael")
|
||||
|
||||
def test_fix_person_loaded(self):
|
||||
def test_fix_personexped_loaded_bypk(self):
|
||||
pe = PersonExpedition.objects.get(pk="681")
|
||||
self.assertEqual(str(pe.person.fullname), "Michael Sargent")
|
||||
self.assertEqual(str(pe.expedition.year), "2019")
|
||||
|
||||
def test_fix_area_loaded(self):
|
||||
a = Area.objects.get(short_name="1623")
|
||||
self.assertEqual(str(a.short_name), "1623")
|
||||
def test_fix_expedition_loaded(self):
|
||||
e = Expedition.objects.get(pk="44")
|
||||
self.assertEqual(str(e.year), "2019")
|
||||
|
||||
def test_page_person(self):
|
||||
response = self.client.get("/person/michael-sargent")
|
||||
content = response.content.decode()
|
||||
# with open('testresponseperson.html','w') as tr:
|
||||
# tr.writelines(content)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [r"Michael Sargent", r"has been on expo in the following years"]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
def test_page_personexpedition(self):
|
||||
# Not working despite all components present and correct
|
||||
response = self.client.get("/personexpedition/michael-sargent/2019")
|
||||
content = response.content.decode()
|
||||
# with open('testresponse.html','w') as tr:
|
||||
# tr.writelines(content)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [r"Michael Sargent", r"Table of all trips and surveys aligned by date"]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Need to add a fixture so that this actually has a logbook entry and a trip/svx in it.
|
||||
|
||||
def test_fix_cave_loaded115(self):
|
||||
c = Cave.objects.get(kataster_number="115")
|
||||
self.assertEqual(str(c.description_file), "1623/115.htm")
|
||||
self.assertEqual(str(c.url), "1623/115.url") # intentional
|
||||
self.assertEqual(str(c.filename), "1623-115.html")
|
||||
|
||||
# c.area is a 'ManyRelatedManager' object and not iterable
|
||||
# self.assertEqual(str(c.[0].short_name), "1623")
|
||||
self.assertEqual(str(c.areacode), "1623")
|
||||
|
||||
ph = self.ph
|
||||
phmatch = re.search(ph, c.underground_description)
|
||||
@@ -56,41 +112,40 @@ class FixtureTests(TestCase):
|
||||
def test_fix_cave_loaded284(self):
|
||||
c = Cave.objects.get(kataster_number="284")
|
||||
self.assertEqual(str(c.description_file), "")
|
||||
self.assertEqual(str(c.url), "1623/284/284.html")
|
||||
self.assertEqual(str(c.filename), "1623-284.html")
|
||||
|
||||
ph = r"at a depth of 72m, there are large round blocks"
|
||||
phmatch = re.search(ph, c.notes)
|
||||
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_personexpedition(self):
|
||||
response = self.client.get("/personexpedition/MichaelSargent/2019")
|
||||
content = response.content.decode()
|
||||
# with open('testresponse.html','w') as tr:
|
||||
# tr.writelines(content)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [r"Michael Sargent", r"Table of all trips and surveys aligned by date"]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
# Need to add a fixture so that this actually has a logbook entry and a trip/svx in it.
|
||||
|
||||
|
||||
class FixturePageTests(TestCase):
|
||||
"""Currently nothing that runs troggle works - all do 404. Must be something in a template rendering crash?
|
||||
ordinary pages are OK, and expopages and expofiles are OK, even though they come through troggle. And the
|
||||
fixtures are certainly loaded into the db as the other tests show.
|
||||
"""The fixtures have a password hash which is compatible with plain-text password 'secretword'
|
||||
The hash CHANGES whenever Django upgrades the encryption key length. Better to create the test uses
|
||||
algorithmically and not via a fixture.
|
||||
Uses factories to create the small amount of data required for these page tests.
|
||||
"""
|
||||
|
||||
# The fixtures have a password hash which is compatible with plain-text password 'secretword'
|
||||
fixtures = ["auth_users", "expo_areas", "expo_caves", "expo_exped"]
|
||||
ph = r"and leads in 800m of tortuous going to"
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
create_expo_caves()
|
||||
# also create expedition/person data used by page rendering
|
||||
|
||||
exp = create_expedition(pk=44, year="2019", name="CUCC expo 2019")
|
||||
person = create_person(pk=250, first_name="Michael", last_name="Sargent", fullname="Michael Sargent", slug="michael-sargent")
|
||||
create_personexpedition(pk=681, expedition=exp, person=person)
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
for kataster_number in settings.NOTABLECAVES1623:
|
||||
create_cave(areacode="1623", kataster_number=kataster_number)
|
||||
for kataster_number in settings.NOTABLECAVES1626:
|
||||
create_cave(areacode="1626", kataster_number=kataster_number)
|
||||
|
||||
create_user(name="expo")
|
||||
create_user(name="expotest")
|
||||
create_user(name="expotestadmin", is_superuser = True)
|
||||
|
||||
self.user = User.objects.get(username="expotest")
|
||||
|
||||
@@ -98,7 +153,8 @@ class FixturePageTests(TestCase):
|
||||
self.client = Client()
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
User.objects.all().delete()
|
||||
Cave.objects.all().delete()
|
||||
|
||||
def test_fix_expedition(self):
|
||||
response = self.client.get("/expedition/2019")
|
||||
@@ -113,7 +169,7 @@ class FixturePageTests(TestCase):
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_personexped(self):
|
||||
response = self.client.get("/personexpedition/MichaelSargent/2019")
|
||||
response = self.client.get("/personexpedition/michael-sargent/2019")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"Table of all trips and surveys aligned by date"
|
||||
@@ -125,7 +181,7 @@ class FixturePageTests(TestCase):
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_person(self):
|
||||
response = self.client.get("/person/MichaelSargent")
|
||||
response = self.client.get("/person/michael-sargent")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"second-generation expo caver "
|
||||
@@ -136,32 +192,16 @@ class FixturePageTests(TestCase):
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_url115(self):
|
||||
ph = self.ph
|
||||
response = self.client.get("/1623/115.url") # yes this is intentional, see the inserted data above & fixture
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_url284(self):
|
||||
response = self.client.get("/1623/284/284.html")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"at a depth of 72m, there are large round blocks"
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('cave-url284.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_fix_cave_bare_url115(self):
|
||||
"""Expect to get Page Not Found and status 404"""
|
||||
ph = self.ph
|
||||
ph = "Probably a mistake."
|
||||
response = self.client.get("/1623/115")
|
||||
response = self.client.get("/1623/115/115")
|
||||
|
||||
# content = response.content.decode()
|
||||
# with open('_test_bare_url115.html', 'w') as f:
|
||||
# f.write(content)
|
||||
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
|
||||
content = response.content.decode()
|
||||
@@ -169,25 +209,29 @@ class FixturePageTests(TestCase):
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 200 & Page Not Found
|
||||
|
||||
def test_fix_cave_slug115(self):
|
||||
"""Expect to get Page Not Found and status 404"""
|
||||
"""Expect to get Page Not Found and status 404
|
||||
UPDATE THIS BACK to 1623-115 when the data is fixed so that we don't have the
|
||||
internal redirections for cave ids"""
|
||||
ph = self.ph
|
||||
ph = "Probably a mistake."
|
||||
response = self.client.get("/1623-115")
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
# response = self.client.get("/1623-115")
|
||||
response = self.client.get("/1234-123")
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
|
||||
content = response.content.decode()
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 200 & Page Not Found
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 302 & Page Not Found
|
||||
|
||||
def test_fix_caves284(self):
|
||||
response = self.client.get("/caves")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"284 Seetrichter"
|
||||
phmatch = re.search(ph, content)
|
||||
# with open('_cave_fix_caves.html', 'w') as f:
|
||||
ph = r"Seetrichter"
|
||||
ph_alt = r"1623-284"
|
||||
phmatch = re.search(ph, content) or re.search(ph_alt, content)
|
||||
# with open('_cave_caves284.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "' or '" + ph_alt + "'")
|
||||
|
||||
# Although the Cave object exists, it looks like we get a bad slug error when trying to get a QM page.
|
||||
|
||||
|
||||
@@ -0,0 +1,183 @@
|
||||
import os
|
||||
import pathlib
|
||||
import tempfile
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
import settings
|
||||
from troggle.parsers import drawings
|
||||
from troggle.core.models.survex import DrawingFile
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.models.survex import SingleScan
|
||||
from troggle.core.models.troggle import DataIssue
|
||||
|
||||
|
||||
class DrawingsPathlibTests(TestCase):
|
||||
def test_load_drawings_creates_expected_entries(self):
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
# create a small tree
|
||||
p = pathlib.Path(td)
|
||||
(p / 'one.pdf').write_text('pdf')
|
||||
(p / 'two.txt').write_text('txt')
|
||||
sub = p / 'dir'
|
||||
sub.mkdir()
|
||||
(sub / 'three.png').write_text('png')
|
||||
sub2 = p / 'dir2'
|
||||
sub2.mkdir()
|
||||
(sub2 / 'abc.th2').write_text('th2')
|
||||
(sub2 / 'abc.th').write_text('th')
|
||||
|
||||
# point the module at our tempdir using a temporary setting on the
|
||||
# local settings module (the parsers import `settings` directly)
|
||||
with patch.object(settings, "DRAWINGS_DATA", td):
|
||||
drawings.load_drawings_files()
|
||||
|
||||
# all files should be present
|
||||
self.assertTrue(DrawingFile.objects.filter(dwgpath='one.pdf').exists())
|
||||
self.assertTrue(DrawingFile.objects.filter(dwgpath='two.txt').exists())
|
||||
self.assertTrue(DrawingFile.objects.filter(dwgpath='dir/three.png').exists())
|
||||
self.assertTrue(DrawingFile.objects.filter(dwgpath='dir2/abc.th2').exists())
|
||||
self.assertTrue(DrawingFile.objects.filter(dwgpath='dir2/abc.th').exists())
|
||||
|
||||
def test_hidden_and_backup_skipped(self):
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
p = pathlib.Path(td)
|
||||
(p / '.hidden').write_text('hid')
|
||||
(p / 'file~').write_text('bak')
|
||||
with patch.object(settings, "DRAWINGS_DATA", td):
|
||||
drawings.load_drawings_files()
|
||||
|
||||
# Should not import hidden or backup files
|
||||
self.assertFalse(DrawingFile.objects.filter(dwgpath='.hidden').exists())
|
||||
self.assertFalse(DrawingFile.objects.filter(dwgpath='file~').exists())
|
||||
|
||||
def test_no_extension_file(self):
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
p = pathlib.Path(td)
|
||||
(p / 'noext').write_text('data')
|
||||
with patch.object(settings, "DRAWINGS_DATA", td):
|
||||
drawings.load_drawings_files()
|
||||
|
||||
self.assertTrue(DrawingFile.objects.filter(dwgpath='noext').exists())
|
||||
|
||||
def test_git_dir_skipped(self):
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
p = pathlib.Path(td)
|
||||
g = p / '.git'
|
||||
g.mkdir()
|
||||
(g / 'secret.txt').write_text('top secret')
|
||||
with patch.object(settings, "DRAWINGS_DATA", td):
|
||||
drawings.load_drawings_files()
|
||||
|
||||
self.assertFalse(DrawingFile.objects.filter(dwgpath='.git/secret.txt').exists())
|
||||
|
||||
def test_bulk_create_chunks(self):
|
||||
# Create more than chunk size files to ensure bulk_create is called in multiple chunks
|
||||
count = 800
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
p = pathlib.Path(td)
|
||||
for i in range(count):
|
||||
(p / f'file{i}.txt').write_text('x')
|
||||
with patch.object(settings, "DRAWINGS_DATA", td):
|
||||
drawings.load_drawings_files()
|
||||
|
||||
self.assertEqual(DrawingFile.objects.count(), count)
|
||||
|
||||
def test_parse_tunnel_links_wallet_and_scan(self):
|
||||
# Create a wallet and a singlescan, then ensure parse_tnl_file links them
|
||||
w = Wallet.objects.create(fpath='x', walletname='2025#20')
|
||||
ss = SingleScan.objects.create(ffile='x', name='notes.jpg', wallet=w)
|
||||
df = DrawingFile.objects.create(dwgpath='tst.th', dwgname='tst')
|
||||
|
||||
drawings.parse_tnl_file(df, '2025#20/notes.jpg')
|
||||
|
||||
self.assertIn(w, df.dwgwallets.all())
|
||||
self.assertIn(ss, df.scans.all())
|
||||
|
||||
def test_drawing_reference_multiple_creates_dataissue(self):
|
||||
df1 = DrawingFile.objects.create(dwgpath='ref1', dwgname='shared')
|
||||
df2 = DrawingFile.objects.create(dwgpath='ref2', dwgname='shared')
|
||||
dfmain = DrawingFile.objects.create(dwgpath='main', dwgname='main')
|
||||
|
||||
drawings.parse_tnl_file(dfmain, 'shared')
|
||||
|
||||
di = DataIssue.objects.filter(parser='Tunnel', message__contains="files named 'shared'")
|
||||
self.assertTrue(di.exists())
|
||||
|
||||
def test_drawing_reference_single_no_dataissue(self):
|
||||
DrawingFile.objects.create(dwgpath='ref3', dwgname='unique')
|
||||
dfmain = DrawingFile.objects.create(dwgpath='main2', dwgname='main2')
|
||||
|
||||
drawings.parse_tnl_file(dfmain, 'unique')
|
||||
|
||||
di = DataIssue.objects.filter(parser='Tunnel', message__contains="files named 'unique'")
|
||||
self.assertFalse(di.exists())
|
||||
|
||||
def test_extension_helpers_and_constants(self):
|
||||
# Helpers should recognise supported/image suffixes (case-insensitive)
|
||||
self.assertTrue(drawings._is_supported_suffix('.png'))
|
||||
self.assertTrue(drawings._is_supported_suffix('.xml'))
|
||||
self.assertTrue(drawings._is_supported_suffix('.TH'))
|
||||
self.assertFalse(drawings._is_supported_suffix(''))
|
||||
self.assertFalse(drawings._is_supported_suffix('.exe'))
|
||||
|
||||
self.assertTrue(drawings._is_image_suffix('.png'))
|
||||
self.assertTrue(drawings._is_image_suffix('.JPEG'))
|
||||
self.assertFalse(drawings._is_image_suffix('.xml'))
|
||||
self.assertFalse(drawings._is_image_suffix(''))
|
||||
|
||||
# Constants should include expected values and be consistent
|
||||
self.assertIn('.png', drawings.IMAGE_EXTS)
|
||||
self.assertEqual(set(drawings.IMAGE_LIKE_EXTS), set(drawings.IMAGE_EXTS))
|
||||
self.assertIn('.th', drawings.SUPPORTED_EXTENSIONS)
|
||||
self.assertIn('.png', drawings.SUPPORTED_EXTENSIONS)
|
||||
|
||||
def test_fetch_drawingfiles_by_paths_chunks(self):
|
||||
# Create more items than typical SQLite parameter limit to ensure chunking
|
||||
count = 1200
|
||||
rel_paths = []
|
||||
objs = []
|
||||
for i in range(count):
|
||||
rel = f'bigdir/file{i}.txt'
|
||||
rel_paths.append(rel)
|
||||
objs.append(DrawingFile(dwgpath=rel, dwgname=f'name{i}'))
|
||||
|
||||
# Bulk create them efficiently
|
||||
DrawingFile.objects.bulk_create(objs)
|
||||
|
||||
mapping = drawings.fetch_drawingfiles_by_paths(rel_paths, chunk_size=500)
|
||||
self.assertEqual(len(mapping), count)
|
||||
# Spot-check a few entries
|
||||
self.assertIn('bigdir/file0.txt', mapping)
|
||||
self.assertIn(f'bigdir/file{count-1}.txt', mapping)
|
||||
|
||||
def test_assign_wallets_for_model_assigns_and_returns_wallets(self):
|
||||
w = Wallet.objects.create(fpath='x', walletname='2025#20')
|
||||
df = DrawingFile.objects.create(dwgpath='assign.th', dwgname='assign')
|
||||
|
||||
res = drawings._assign_wallets_for_model(df, '2025#20', parser_label='AssignTest')
|
||||
|
||||
self.assertTrue(res)
|
||||
self.assertIn(w, df.dwgwallets.all())
|
||||
|
||||
def test_assign_wallets_for_model_creates_dataissue_on_missing(self):
|
||||
df = DrawingFile.objects.create(dwgpath='missing.th', dwgname='missing')
|
||||
|
||||
drawings._assign_wallets_for_model(df, 'NONEXISTENT', parser_label='AssignMissing')
|
||||
|
||||
di = DataIssue.objects.filter(parser='AssignMissing', message__contains='not found')
|
||||
self.assertTrue(di.exists())
|
||||
|
||||
def test_assign_wallets_for_model_records_dataissue_on_exception(self):
|
||||
# Patch Wallet.objects.filter to raise an exception
|
||||
from unittest.mock import patch
|
||||
|
||||
df = DrawingFile.objects.create(dwgpath='err.th', dwgname='err')
|
||||
|
||||
with patch('troggle.core.models.wallets.Wallet.objects.filter') as mock_filter:
|
||||
mock_filter.side_effect = RuntimeError('boom')
|
||||
drawings._assign_wallets_for_model(df, 'WHATEVER', parser_label='AssignError')
|
||||
|
||||
di = DataIssue.objects.filter(parser='AssignError', message__contains='Exception')
|
||||
self.assertTrue(di.exists())
|
||||
+25
-12
@@ -42,8 +42,7 @@ class SimpleTest(SimpleTestCase):
|
||||
# from PIL import Image
|
||||
from functools import reduce
|
||||
|
||||
from troggle.core.utils import save_carefully
|
||||
|
||||
|
||||
def test_import_parsers_survex(self):
|
||||
import troggle.core.models.caves as models_caves
|
||||
import troggle.core.models.survex as models_survex
|
||||
@@ -55,6 +54,7 @@ class SimpleTest(SimpleTestCase):
|
||||
from troggle.parsers.people import GetPersonExpeditionNameLookup
|
||||
|
||||
def test_import_views_uploads(self):
|
||||
from troggle.core.views.logbook_edit import logbookedit
|
||||
from troggle.core.views.uploads import dwgupload
|
||||
|
||||
def test_import_views_walletedit(self):
|
||||
@@ -73,7 +73,7 @@ class SimpleTest(SimpleTestCase):
|
||||
from django.utils.timezone import get_current_timezone, make_aware
|
||||
|
||||
from parsers.people import GetPersonExpeditionNameLookup
|
||||
from troggle.core.models.logbooks import CaveSlug, QM, LogbookEntry, PersonLogEntry
|
||||
from troggle.core.models.logbooks import QM, LogbookEntry, PersonLogEntry
|
||||
from troggle.core.models.troggle import DataIssue, Expedition
|
||||
|
||||
def test_import_core_views_caves(self):
|
||||
@@ -83,8 +83,8 @@ class SimpleTest(SimpleTestCase):
|
||||
from django.shortcuts import get_object_or_404, render
|
||||
|
||||
import troggle.core.views.expo
|
||||
from troggle.core.forms import CaveAndEntranceFormSet, CaveForm, EntranceForm, EntranceLetterForm
|
||||
from troggle.core.models.caves import Area, Cave, CaveAndEntrance, Entrance, SurvexStation #EntranceSlug,
|
||||
from troggle.core.forms import CaveForm, EntranceForm, EntranceLetterForm
|
||||
from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance, SurvexStation #EntranceSlug,
|
||||
from troggle.core.models.troggle import Expedition
|
||||
from troggle.core.views.auth import login_required_if_public
|
||||
|
||||
@@ -96,6 +96,7 @@ class SimpleTest(SimpleTestCase):
|
||||
import troggle.parsers.QMs
|
||||
import troggle.parsers.scans
|
||||
import troggle.parsers.survex
|
||||
import troggle.parsers.users
|
||||
import troggle.settings
|
||||
from troggle.parsers.logbooks import GetCaveLookup
|
||||
|
||||
@@ -106,8 +107,17 @@ class SimpleTest(SimpleTestCase):
|
||||
from django.http import HttpResponse
|
||||
from django.urls import reverse
|
||||
|
||||
def test_import_users_urls(self):
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
from cryptography.fernet import Fernet
|
||||
from pathlib import Path
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
def test_import_urls(self):
|
||||
from django.conf import settings
|
||||
|
||||
#from django.conf.urls import include, url
|
||||
from django.contrib import admin, auth
|
||||
from django.urls import resolve, reverse
|
||||
@@ -141,13 +151,13 @@ class SimpleTest(SimpleTestCase):
|
||||
from troggle.core.views.other import controlpanel
|
||||
from troggle.core.views.prospect import prospecting, prospecting_image
|
||||
from troggle.core.views.statistics import dataissues, pathsreport, stats
|
||||
from troggle.core.views.survex import survexcavesingle, survexcaveslist, svx
|
||||
from troggle.core.views.survex import survexcaveslist, svx
|
||||
|
||||
class ImportTest(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
import troggle.settings as settings
|
||||
from troggle.parsers.logbooks import LOGBOOKS_DIR, DEFAULT_LOGBOOK_FILE
|
||||
from troggle.parsers.logbooks import DEFAULT_LOGBOOK_FILE, LOGBOOKS_DIR
|
||||
|
||||
LOGBOOKS_PATH = settings.EXPOWEB / LOGBOOKS_DIR
|
||||
test_year = "1986"
|
||||
@@ -228,10 +238,13 @@ class SubprocessTest(TestCase):
|
||||
self.assertTrue(sp.returncode == 0, f"{cwd} - git is unhappy")
|
||||
|
||||
content = sp.stdout
|
||||
ph = r"nothing to commit, working tree clean"
|
||||
phmatch = re.search(ph, content)
|
||||
msg = f'{cwd} - Failed to find expected git output: "{ph}"'
|
||||
self.assertIsNotNone(phmatch, msg)
|
||||
phs = [r"Your branch is up to date", r"nothing to commit, working tree clean", r"Your branch is ahead"]
|
||||
ok = None
|
||||
for ph in phs:
|
||||
if phmatch := re.search(ph, content): # WALRUS
|
||||
ok = True
|
||||
msg = f'{cwd} - Failed to find any nice git output: "{phs}"'
|
||||
self.assertIsNotNone(ok, msg)
|
||||
|
||||
# ph1 = r"no changes added to commit"
|
||||
# phmatch1 = re.search(ph1, content)
|
||||
@@ -262,7 +275,7 @@ class SubprocessTest(TestCase):
|
||||
self.assertTrue(sp.returncode == 0, f"{cwd} - survex is unhappy")
|
||||
|
||||
content = sp.stdout
|
||||
ph = r"Total length of survey legs"
|
||||
ph = r"Total plan length of survey"
|
||||
phmatch = re.search(ph, content)
|
||||
msg = f'{cwd} - Failed to find expected survex output: "{ph}"'
|
||||
self.assertIsNotNone(phmatch, msg)
|
||||
|
||||
+105
-110
@@ -3,67 +3,68 @@ Originally written for CUYC
|
||||
Philip Sargent (Feb.2021)
|
||||
|
||||
Modified for Expo April 2021.
|
||||
|
||||
To run just these, do
|
||||
uv run manage.py test -v 3 troggle.core.TESTS.test_logins
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
import re
|
||||
import tempfile
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import Client, TestCase
|
||||
|
||||
import troggle.settings as settings
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.models.troggle import Expedition
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.utils import current_expo
|
||||
|
||||
current_year = current_expo()
|
||||
|
||||
|
||||
def create_user(name=None, last_name="Caver", is_superuser=False):
|
||||
u = User()
|
||||
u.username = name
|
||||
u.email = f"philip.sargent+{name}@gmail.com"
|
||||
u.first_name, u.last_name = name, last_name
|
||||
u.set_password("secretword") # all test users have same password
|
||||
u.save()
|
||||
return u
|
||||
|
||||
|
||||
class DataTests(TestCase):
|
||||
"""These check that the NULL and NON-UNIQUE constraints are working in the database"""
|
||||
"""These check that the NULL and NON-UNIQUE constraints are working in the database
|
||||
|
||||
no tests here... !"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User()
|
||||
u.pk = 9000
|
||||
u.user_id = 8000
|
||||
u.username, u.password = "stinker", "secretword"
|
||||
u.email = "philip.sargent+SP@gmail.com"
|
||||
u.first_name, u.last_name = "Stinker", "Pinker"
|
||||
u.save()
|
||||
self.user = u
|
||||
|
||||
create_user(name="expo")
|
||||
|
||||
def tearDown(self):
|
||||
# self.member.delete() # must delete member before user
|
||||
# self.user.delete() # horrible crash, why?
|
||||
pass
|
||||
Users.objects.all().delete()
|
||||
|
||||
|
||||
class FixturePageTests(TestCase):
|
||||
# The fixtures have a password hash which is compatible with plain-text password 'secretword'
|
||||
fixtures = ["auth_users"]
|
||||
class LoginTests(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
self.user = User.objects.get(username="expotest")
|
||||
create_user(name="expo")
|
||||
create_user(name="expotest")
|
||||
create_user(name="expotestadmin", is_superuser = True)
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
User.objects.all().delete()
|
||||
|
||||
def test_fix_admin_login_fail(self):
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
|
||||
response = c.get("/admin/")
|
||||
content = response.content.decode()
|
||||
# with open('admin-op.html', 'w') as f:
|
||||
@@ -75,17 +76,16 @@ class FixturePageTests(TestCase):
|
||||
class PostTests(TestCase):
|
||||
"""Tests walletedit form"""
|
||||
|
||||
fixtures = ["auth_users"]
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
self.user = User.objects.get(username="expotest")
|
||||
self.client = Client()
|
||||
create_user(name="expo")
|
||||
create_user(name="expotestadmin", is_superuser = True)
|
||||
self.user = create_user(name="expotest")
|
||||
|
||||
c = self.client
|
||||
|
||||
testyear = "2022"
|
||||
wname = f"{testyear}:00"
|
||||
@@ -102,14 +102,17 @@ class PostTests(TestCase):
|
||||
e.save()
|
||||
self.expedition = e
|
||||
|
||||
def tearDown(self):
|
||||
User.objects.all().delete()
|
||||
Wallet.objects.all().delete()
|
||||
Expedition.objects.all().delete()
|
||||
|
||||
def test_file_permissions(self):
|
||||
"""Expect to be allowed to write to SCANS_ROOT, DRAWINGS_DATA, SURVEX_DATA, EXPOWEB
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
u = self.user
|
||||
testyear = self.testyear
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
@@ -144,14 +147,20 @@ class PostTests(TestCase):
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
# Just uploading a file does NOT do any git commit.
|
||||
# You need to create or edit a contents.json file for that to happen.
|
||||
# But this does now seem to happen.
|
||||
|
||||
# This is crashing *sometimes* in the git diff part of the utils. git_add() function
|
||||
# because it is not in a git folder when run in a test.
|
||||
# when run just as test_logins it is fine, when run with all teh other tests it crashes.
|
||||
with open("core/fixtures/test_upload_file.txt", "r") as testf:
|
||||
response = self.client.post(
|
||||
f"/walletedit/{testyear}:00", data={"name": "test_upload_file.txt", "uploadfiles": testf}
|
||||
f"/walletedit/{testyear}:00", data={"name": "test_upload_file.txt", "uploadfiles": testf, "who_are_you": "Gumby <gumby@tent.expo>"}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open("_test_response.html", "w") as f:
|
||||
# with open('_test_response__scan_upload.html', 'w') as f:
|
||||
# f.write(content)
|
||||
for ph in [
|
||||
r"test_upload_",
|
||||
@@ -168,35 +177,31 @@ class PostTests(TestCase):
|
||||
remove_file = pathlib.Path(settings.SCANS_ROOT) / f'{testyear}' / f'{testyear}#00'/ 'test_upload_file.txt'
|
||||
remove_file.unlink()
|
||||
|
||||
# Just uploading a file does NOT do any git commit.
|
||||
# You need to create or edit a contents.json file for that to happen.
|
||||
|
||||
|
||||
def test_photo_upload(self):
|
||||
"""Expect photo upload to work on any file (contrary to msg on screen)
|
||||
Upload into current default year. settings.PHOTOS_YEAR
|
||||
Upload into current default year.
|
||||
Deletes file afterwards
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_file.txt", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/photoupload/", data={"name": "test_upload_file.txt", "renameto": "", "uploadfiles": testf}
|
||||
"/photoupload", data={"name": "test_upload_file.txt", "renameto": "", "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
# with open('_test_response_photo_upload.html', 'w') as f:
|
||||
# f.write(content)
|
||||
for ph in [
|
||||
r"test_upload_",
|
||||
r"Upload photos into /photos/" + str(settings.PHOTOS_YEAR),
|
||||
r"Upload photos into /photos/" + str(current_year),
|
||||
r" you can create a new folder in your name",
|
||||
r"Create new Photographer folder",
|
||||
r"only photo image files are accepted",
|
||||
@@ -206,19 +211,17 @@ class PostTests(TestCase):
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_file = pathlib.Path(settings.PHOTOS_ROOT, settings.PHOTOS_YEAR) / "test_upload_file.txt"
|
||||
remove_file = pathlib.Path(settings.PHOTOS_ROOT, current_year) / "test_upload_file.txt"
|
||||
remove_file.unlink()
|
||||
|
||||
def test_photo_upload_rename(self):
|
||||
"""Expect photo upload to work on any file (contrary to msg on screen)
|
||||
Upload into current default year. settings.PHOTOS_YEAR
|
||||
Upload into current default year.
|
||||
Deletes file afterwards
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
@@ -226,7 +229,7 @@ class PostTests(TestCase):
|
||||
rename = "RENAMED-FILE.JPG"
|
||||
with open("core/fixtures/test_upload_file.txt", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/photoupload/", data={"name": "test_upload_file.txt", "renameto": rename, "uploadfiles": testf}
|
||||
"/photoupload", data={"name": "test_upload_file.txt", "renameto": rename, "uploadfiles": testf}
|
||||
)
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
@@ -239,79 +242,81 @@ class PostTests(TestCase):
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_file = pathlib.Path(settings.PHOTOS_ROOT, settings.PHOTOS_YEAR) / rename
|
||||
remove_file.unlink()
|
||||
remove_file = pathlib.Path(settings.PHOTOS_ROOT, current_year) / rename
|
||||
if remove_file.is_file():
|
||||
remove_file.unlink()
|
||||
|
||||
|
||||
def test_photo_folder_create(self):
|
||||
"""Create folder for new user
|
||||
Create in current default year. settings.PHOTOS_YEAR
|
||||
Create in current year.
|
||||
Deletes folder afterwards
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
u = self.user
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
response = self.client.post("/photoupload/", data={"photographer": "GussieFinkNottle"})
|
||||
response = self.client.post("/photoupload", data={"photographer": "GussieFinkNottle"})
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
for ph in [r"/GussieFinkNottle/", r"Create new Photographer folder"]:
|
||||
with open('_test_response.html', 'w') as f:
|
||||
f.write(content)
|
||||
for ph in [r"Create new Photographer folder", r"/GussieFinkNottle/"]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
# Does not use the filename Django actually uses, assumes it is unchanged. Bug: accumulates one file with random name
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
remove_dir = pathlib.Path(settings.PHOTOS_ROOT, settings.PHOTOS_YEAR) / "GussieFinkNottle"
|
||||
remove_dir.rmdir()
|
||||
remove_dir = pathlib.Path(settings.PHOTOS_ROOT, current_year) / "GussieFinkNottle"
|
||||
if remove_dir.is_dir():
|
||||
print(f"{remove_dir} was created, now removing it.")
|
||||
remove_dir.rmdir()
|
||||
|
||||
def test_dwg_upload_txt(self):
|
||||
"""Expect .pdf file to be refused upload
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
u = self.user
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_file.pdf", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/dwgupload/uploads", data={"name": "test_upload_file.txt", "uploadfiles": testf}
|
||||
"/dwgupload/uploads", data={"name": "test_upload_file.txt", "uploadfiles": testf, "who_are_you": "Gumby <gumby@tent.expo>"}
|
||||
)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response_dwg_upload_txt.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
t = re.search("Files refused:", content)
|
||||
self.assertIsNotNone(t, 'Logged in but failed to see "Files refused:"')
|
||||
|
||||
|
||||
def test_dwg_upload_drawing(self):
|
||||
"""Expect no-suffix file to upload
|
||||
Note that this skips the git commit process. That would need a new test.
|
||||
Need to login first.
|
||||
"""
|
||||
c = self.client
|
||||
from django.contrib.auth.models import User
|
||||
u = self.user
|
||||
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
with open("core/fixtures/test_upload_nosuffix", "r") as testf:
|
||||
response = self.client.post(
|
||||
"/dwguploadnogit/uploads", data={"name": "test_upload_nosuffix", "uploadfiles": testf}
|
||||
"/dwguploadnogit/uploads", data={"name": "test_upload_nosuffix", "uploadfiles": testf, "who_are_you": "Gumby <gumby@tent.expo>"}
|
||||
)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
# with open('_test_response_dwg_upload_drawing.html', 'w') as f:
|
||||
# f.write(content)
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [
|
||||
r"test_upload_nosuffix",
|
||||
@@ -327,7 +332,8 @@ class PostTests(TestCase):
|
||||
# added each time it is run. The name of the uploaded file is only available within the code where it happens
|
||||
# UploadedFile.name see https://docs.djangoproject.com/en/4.1/ref/files/uploads/#django.core.files.uploadedfile.UploadedFile
|
||||
remove_file = pathlib.Path(settings.DRAWINGS_DATA) / "uploads" / "test_upload_nosuffix"
|
||||
remove_file.unlink()
|
||||
if remove_file.is_file():
|
||||
remove_file.unlink()
|
||||
|
||||
|
||||
class ComplexLoginTests(TestCase):
|
||||
@@ -335,27 +341,14 @@ class ComplexLoginTests(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
"""setUp runs once for each test in this class"""
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
u = User()
|
||||
u.pk = 9000
|
||||
u.user_id = 8000
|
||||
u.username, u.password = "expotest", "secretword"
|
||||
u.email = "philip.sargent+ET@gmail.com"
|
||||
u.first_name, u.last_name = "ExpoTest", "Caver"
|
||||
u.is_staff = True
|
||||
u.is_superuser = True
|
||||
|
||||
u.set_password(u.password) # This creates a new salt and thus a new key for EACH test
|
||||
u.save() # vital that we save all this before attempting login
|
||||
# print ('\n',u.password)
|
||||
self.user = u
|
||||
create_user(name="expo")
|
||||
create_user(name="expotest")
|
||||
create_user(name="expotestadmin", is_superuser = True)
|
||||
|
||||
def tearDown(self):
|
||||
self.client.logout() # not needed as each test creates a new self.client
|
||||
# self.member.delete()
|
||||
##self.user.delete() # id attribute set to None !
|
||||
pass
|
||||
User.objects.all().delete()
|
||||
|
||||
|
||||
# def test_login_redirect_for_non_logged_on_user(self): # need to fix this in real system
|
||||
# c = self.client
|
||||
@@ -365,11 +358,11 @@ class ComplexLoginTests(TestCase):
|
||||
|
||||
def test_ordinary_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
logged_in = c.login(username=u.username, password="secretword")
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
response = c.get("/accounts/login/") # defined by auth system
|
||||
@@ -379,14 +372,14 @@ class ComplexLoginTests(TestCase):
|
||||
|
||||
def test_authentication_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
self.assertTrue(u.is_active, "User '" + u.username + "' is INACTIVE")
|
||||
|
||||
# This is weird. I thought that the user had to login before she was in the authenticated state
|
||||
self.assertTrue(u.is_authenticated, "User '" + u.username + "' is NOT AUTHENTICATED before login")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
logged_in = c.login(username=u.username, password="secretword")
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
self.assertTrue(u.is_authenticated, "User '" + u.username + "' is NOT AUTHENTICATED after login")
|
||||
@@ -396,24 +389,26 @@ class ComplexLoginTests(TestCase):
|
||||
|
||||
def test_admin_login(self):
|
||||
c = self.client
|
||||
u = self.user
|
||||
u = User.objects.get(username="expotestadmin")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails to work if password=u.password !
|
||||
logged_in = c.login(username=u.username, password="secretword")
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
|
||||
response = c.get("/admin/")
|
||||
response = c.get("/admin/login/")
|
||||
content = response.content.decode()
|
||||
# with open('admin-op.html', 'w') as f:
|
||||
# f.write(content)
|
||||
# fn='admin-op.html'
|
||||
# print(f"Writing {fn}")
|
||||
# with open(fn, 'w') as f:
|
||||
# f.write(content)
|
||||
t = re.search(r"Troggle database administration", content)
|
||||
self.assertIsNotNone(t, "Logged in as '" + u.username + "' but failed to get the Troggle Admin page")
|
||||
|
||||
def test_noinfo_login(self):
|
||||
|
||||
c = self.client # inherited from TestCase
|
||||
u = self.user
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
logged_in = c.login(username=u.username, password="secretword") # fails if password=u.password !
|
||||
logged_in = c.login(username=u.username, password="secretword")
|
||||
self.assertTrue(logged_in, "FAILED to login as '" + u.username + "'")
|
||||
response = c.get("/stats") # a page with the Troggle menus
|
||||
content = response.content.decode()
|
||||
@@ -428,7 +423,7 @@ class ComplexLoginTests(TestCase):
|
||||
def test_user_force(self):
|
||||
|
||||
c = self.client
|
||||
u = self.user
|
||||
u = User.objects.get(username="expotest")
|
||||
|
||||
try:
|
||||
c.force_login(u)
|
||||
|
||||
+129
-38
@@ -22,30 +22,46 @@ import subprocess
|
||||
import unittest
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import Client, SimpleTestCase, TestCase
|
||||
|
||||
from troggle.core.models.troggle import Expedition, DataIssue, Person, PersonExpedition
|
||||
import troggle.parsers.logbooks as lbp
|
||||
from troggle.core.models.logbooks import LogbookEntry
|
||||
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
|
||||
from troggle.core.utils import current_expo
|
||||
|
||||
current_year = current_expo()
|
||||
|
||||
|
||||
def create_user(name=None, last_name="Caver", is_superuser=False):
|
||||
u = User()
|
||||
u.username = name
|
||||
u.email = f"philip.sargent+{name}@gmail.com"
|
||||
u.first_name, u.last_name = name, last_name
|
||||
u.set_password("secretword") # all test users have same password
|
||||
u.save()
|
||||
return u
|
||||
|
||||
def create_person(firstname, lastname, nickname=False, vfho=False, exped=None):
|
||||
fullname = f"{firstname} {lastname}"
|
||||
slug=f"{firstname.lower()}-{lastname.lower()}"
|
||||
coUniqueAttribs = {"first_name": firstname, "last_name": (lastname or ""), "slug": slug,}
|
||||
otherAttribs = {"is_vfho": vfho, "fullname": fullname, "nickname": nickname}
|
||||
person = Person.objects.create(**otherAttribs, **coUniqueAttribs)
|
||||
|
||||
coUniqueAttribs = {"person": person, "expedition": exped}
|
||||
otherAttribs = {}
|
||||
pe = PersonExpedition.objects.create(**otherAttribs, **coUniqueAttribs)
|
||||
return person
|
||||
|
||||
TEST_YEAR = "1986"
|
||||
lbp.ENTRIES[TEST_YEAR] = 4 # number of entries in the test logbook
|
||||
|
||||
class ImportTest(TestCase):
|
||||
# see test_logins.py for the tests to check that logged-in users work
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
def make_person(firstname, lastname, nickname=False, vfho=False, guest=False):
|
||||
fullname = f"{firstname} {lastname}"
|
||||
lookupAttribs = {"first_name": firstname, "last_name": (lastname or "")}
|
||||
nonLookupAttribs = {"is_vfho": vfho, "fullname": fullname, "nickname": nickname}
|
||||
person = Person.objects.create(**nonLookupAttribs, **lookupAttribs)
|
||||
|
||||
lookupAttribs = {"person": person, "expedition": cls.test_expo}
|
||||
nonLookupAttribs = {"is_guest": guest}
|
||||
pe = PersonExpedition.objects.create(**nonLookupAttribs, **lookupAttribs)
|
||||
|
||||
return person
|
||||
|
||||
def setUpTestData(cls):
|
||||
import troggle.settings as settings
|
||||
|
||||
LOGBOOKS_PATH = settings.EXPOWEB / lbp.LOGBOOKS_DIR
|
||||
@@ -55,28 +71,35 @@ class ImportTest(TestCase):
|
||||
if frontmatter_file.is_file():
|
||||
frontmatter_file.unlink() # delete if it exists
|
||||
|
||||
lookupAttribs = {"year": TEST_YEAR}
|
||||
nonLookupAttribs = {"name": f"CUCC expo-test {TEST_YEAR}"}
|
||||
cls.test_expo = Expedition.objects.create(**nonLookupAttribs, **lookupAttribs)
|
||||
coUniqueAttribs = {"year": TEST_YEAR}
|
||||
otherAttribs = {"name": f"CUCC expo-test {TEST_YEAR}"}
|
||||
cls.test_expo = Expedition.objects.create(**otherAttribs, **coUniqueAttribs)
|
||||
|
||||
fred = make_person("Fred", "Smartarse", nickname="freddy")
|
||||
phil = make_person("Phil", "Tosser", nickname="tosspot")
|
||||
dave = make_person("David", "Smartarse", "")
|
||||
mike = make_person("Michael", "Wideboy", "WB", vfho=True)
|
||||
fred = create_person("Fred", "Smartarse", nickname="freddy", exped=cls.test_expo)
|
||||
phil = create_person("Phil", "Tosser", nickname="tosspot", exped=cls.test_expo)
|
||||
dave = create_person("David", "Smartarse", "", exped=cls.test_expo)
|
||||
mike = create_person("Michael", "Wideboy", "WB", vfho=True, exped=cls.test_expo)
|
||||
# NOT created Kurt, as the whole point is that he is a guest.
|
||||
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
create_user(name="expo") # needed for current_year()
|
||||
self.user = create_user(name="expotest")
|
||||
self.client = Client()
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
User.objects.all().delete()
|
||||
Person.objects.all().delete()
|
||||
PersonExpedition.objects.all().delete()
|
||||
Expedition.objects.all().delete()
|
||||
|
||||
def test_logbook_exists(self):
|
||||
self.assertTrue(self.test_logbook.is_file())
|
||||
|
||||
def test_logbook_parse(self):
|
||||
|
||||
lbp.LoadLogbook(self.test_expo)
|
||||
def test_logbook_parse_issues(self):
|
||||
"""This is just testing the db not the web page
|
||||
"""
|
||||
lbp.LoadLogbook(self.test_expo) # i.e. load the 1986 logbook
|
||||
|
||||
issues = DataIssue.objects.all()
|
||||
messages = []
|
||||
@@ -87,26 +110,95 @@ class ImportTest(TestCase):
|
||||
print(f"'{i.message}'")
|
||||
|
||||
expected = [
|
||||
" ! - 1986 No name match for: 'Kurt Keinnamen' in entry tid='1986_s02' for this expedition year.",
|
||||
"! - 1986 No name match for: 'Kurt Keinnamen' in entry",
|
||||
]
|
||||
|
||||
not_expected = [
|
||||
" ! - 1986 No name match for: 'Dave Smartarse' in entry tid='1986_s01' for this expedition year.",
|
||||
" ! - 1986 Warning: logentry: surface - stupour - no expo member author for entry '1986_s03'",
|
||||
" ! - 1986 Warning: logentry: 123 - wave 2 - no expo member author for entry '1986_s02'",
|
||||
" ! - 1986 EXCEPTION:: 'Dave Smartarse' (Dave Smartarse) in entry tid='1986-07-27a' for this year.",
|
||||
" ! - 1986 Warning: logentry: surface - stupour - no expo member author for entry '1986-07-31a'",
|
||||
" ! - 1986 Warning: logentry: 123 - wave 2 - no expo member author for entry '1986-08-01a'",
|
||||
]
|
||||
|
||||
# with open('_test_response.txt', 'w') as f:
|
||||
# for m in messages:
|
||||
# f.write(m)
|
||||
messages_text = ", ".join(messages)
|
||||
for e in expected:
|
||||
self.assertIn(e, messages)
|
||||
phmatch = re.search(e, messages_text)
|
||||
self.assertIsNotNone(phmatch, f"Failed to find expected text: '{e}' in\n{messages_text}")
|
||||
for e in not_expected:
|
||||
self.assertNotIn(e, messages)
|
||||
phmatch = re.search(e, messages_text)
|
||||
self.assertIsNone(phmatch, f"Found unexpected text: '{e}' in\n{messages_text}")
|
||||
|
||||
def test_lbe(self):
|
||||
lbp.LoadLogbook(self.test_expo) # i.e. load the 1986 logbook, which has this logbook entry
|
||||
|
||||
response = self.client.get(f"/logbookentry/1986-07-27/1986-07-27a")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response_1986-07-27a.html', 'w') as f:
|
||||
# f.write(content)
|
||||
expected = [
|
||||
"<title>Logbook CUCC expo-test 1986 123 - 123 Wave 1</title>",
|
||||
"Smartarse rig first section of new pitches. Second wave arrives and takes over rigging.",
|
||||
]
|
||||
for ph in expected:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_lbe_new(self):
|
||||
"""This page requires the user to be logged in first, hence the extra shenanigans
|
||||
"""
|
||||
c = self.client
|
||||
u = self.user
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
response = self.client.get(f"/logbookedit/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
expected = [
|
||||
"New Logbook Entry in ",
|
||||
"Everyone else involved",
|
||||
"Place: cave name, or 'plateau', 'topcamp' etc.",
|
||||
]
|
||||
for ph in expected:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, f"({response.status_code}) Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
def test_lbe_edit(self):
|
||||
"""This page requires the user to be logged in first, hence the extra shenanigans
|
||||
"""
|
||||
c = self.client
|
||||
u = self.user
|
||||
c.login(username=u.username, password="secretword")
|
||||
|
||||
lbp.LoadLogbook(self.test_expo) # i.e. load the 1986 logbook, which has this logbook entry
|
||||
# muliple loads are overwriting the lbes and incrementing the a, b, c etc, so get one that works
|
||||
lbe = LogbookEntry.objects.get(date="1986-07-31") # only one on this date in fixture
|
||||
|
||||
response = self.client.get(f"/logbookedit/{lbe.slug}")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response_edit.html', 'w') as f:
|
||||
# f.write(content)
|
||||
expected = [
|
||||
"Edit Existing Logbook Entry on 1986-07-31",
|
||||
r"Other names \(comma separated\)", # regex match so slashes need to be espcaped
|
||||
"Place: cave name, or 'plateau', 'topcamp' etc.",
|
||||
]
|
||||
for ph in expected:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, f"({response.status_code}) Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_aliases(self):
|
||||
# FIX THIS
|
||||
# Problem: '' empty string appears as valid alias for David Smartarse
|
||||
response = self.client.get(f"/aliases/{TEST_YEAR}")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# with open('_test_responsealiases.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = f"'fsmartarse'"
|
||||
phmatch = re.search(ph, content)
|
||||
@@ -114,7 +206,7 @@ class ImportTest(TestCase):
|
||||
|
||||
def test_survexfiles(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/survexfile/caves/")
|
||||
response = self.client.get("/survexfile/caves")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
@@ -122,7 +214,6 @@ class ImportTest(TestCase):
|
||||
ph = f"Caves with subdirectories"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
def test_people(self):
|
||||
# Needs another test with test data
|
||||
@@ -131,7 +222,7 @@ class ImportTest(TestCase):
|
||||
content = response.content.decode()
|
||||
# with open('_test_response.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = f"<td><a href=\"/personexpedition/FredSmartarse/{TEST_YEAR}\">{TEST_YEAR}</a></td>"
|
||||
ph = f"<td><a href=\"/personexpedition/fred-smartarse/{TEST_YEAR}\">{TEST_YEAR}</a></td>"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
@@ -37,7 +37,7 @@ These url lines all come from templates/*.html
|
||||
{% url "dataissues" %}
|
||||
{% url "dwgallfiles" %}
|
||||
{% url "dwgupload" %}
|
||||
{% url "eastings" %}
|
||||
{% url "stations" %}
|
||||
{% url "exportlogbook" %}
|
||||
{% url "newcave" %}
|
||||
{% url "notablepersons" %}
|
||||
@@ -91,7 +91,8 @@ import re
|
||||
from http import HTTPStatus
|
||||
|
||||
from django.test import Client, TestCase
|
||||
from django.urls import reverse, path
|
||||
from django.urls import path, reverse
|
||||
|
||||
|
||||
# class SimplePageTest(unittest.TestCase):
|
||||
class URLTests(TestCase):
|
||||
@@ -115,7 +116,9 @@ class URLTests(TestCase):
|
||||
response = self.client.get("/statistics")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"0 expeditions: 0 people, 0 caves and 0 logbook entries."
|
||||
# with open('_test_response_statistics.html', 'w') as f:
|
||||
# f.write(content)
|
||||
ph = r"0 people, 0 caves, 0 wallets and 0 logbook entries"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
@@ -140,18 +143,17 @@ class URLTests(TestCase):
|
||||
|
||||
def test_url_allscans(self):
|
||||
"""Test the {% url "allscans" %} reverse resolution
|
||||
path('survey_scans/', allscans, name="allscans"), # all the scans in all wallets
|
||||
path('survey_scans', allscans, name="allscans"), # all the scans in all wallets
|
||||
"""
|
||||
reversed_url = reverse('allscans') # NB _ must be written as - if present in name
|
||||
self.assertEqual(reversed_url, "/survey_scans/")
|
||||
self.assertEqual(reversed_url, "/survey_scans")
|
||||
|
||||
def test_url_survexcaveslist(self):
|
||||
"""Test the {% url "allscans" %} reverse resolution
|
||||
path('survexfile/caves', survex.survexcaveslist, name="survexcaveslist"),
|
||||
path('survexfile/caves/', survex.survexcaveslist, name="survexcaveslist"), # auto slash not working
|
||||
"""
|
||||
reversed_url = reverse('survexcaveslist') # NB _ must be written as - if present in name
|
||||
self.assertEqual(reversed_url, "/survexfile/caves/")
|
||||
self.assertEqual(reversed_url, "/survexfile/caves")
|
||||
|
||||
def test_url_threed(self):
|
||||
"""Test the {% url "threed" %} reverse resolution
|
||||
|
||||
@@ -29,8 +29,7 @@ class ImportTest(TestCase):
|
||||
#ed to go through all modules and copy all imports here
|
||||
from io import StringIO
|
||||
|
||||
from cuy.club.models import (Article, Event, Member, Webpage,
|
||||
WebpageCategory)
|
||||
from cuy.club.models import Article, Event, Member, Webpage, WebpageCategory
|
||||
from cuy.website.views.generic import PUBLIC_LOGIN
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.decorators import login_required
|
||||
@@ -371,7 +370,7 @@ class ComplexLoginTests(TestCase):
|
||||
self.assertIsNotNone(t, 'Logged in as \'' + u.username + '\' but failed to get /committee/ content')
|
||||
|
||||
def test_user_force(self):
|
||||
from django.conf import settings
|
||||
from django.conf import settings
|
||||
c = self.client
|
||||
u = self.user
|
||||
m = self.member
|
||||
|
||||
+29
-29
@@ -19,9 +19,13 @@ https://docs.djangoproject.com/en/dev/topics/testing/tools/
|
||||
"""
|
||||
|
||||
|
||||
todo = """ADD TESTS when we are redirecting /expofiles/ to a remote file-delivering site
|
||||
todo = """ - ADD TESTS when we are redirecting /expofiles/ to a remote file-delivering site
|
||||
|
||||
- Add test for running cavern to produce a .3d file
|
||||
|
||||
- Add tests for editing the TXT files
|
||||
|
||||
- add "author" tests for the git add and commit stuff for uploaded files, inc on DEVSERVER or not
|
||||
"""
|
||||
|
||||
import re
|
||||
@@ -85,8 +89,10 @@ class PageTests(TestCase):
|
||||
def test_expoweb_dir_no_index(self):
|
||||
response = self.client.get("/handbook/troggle")
|
||||
content = response.content.decode()
|
||||
# with open('testresponse.html','w') as tr:
|
||||
# tr.writelines(content)
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
ph = r"Page not found handbook/troggle/index.html"
|
||||
ph = r"Page not found 'handbook/troggle/index.html'"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
@@ -165,14 +171,18 @@ class PageTests(TestCase):
|
||||
|
||||
def test_cave_kataster_not_found(self):
|
||||
# database not loaded, so no caves found; so looks for a generic expopage and fails
|
||||
response = self.client.get("/1623/115.htm")
|
||||
# NEEDS TO BE REDONE AFETR cave id rewriting removed after the data is fixed in all teh cave description
|
||||
# pages that link to photos
|
||||
response = self.client.get("/1234/115.htm")
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
content = response.content.decode()
|
||||
ph = r"Page not found 1623/115.htm"
|
||||
ph = r"Page not found '1234/115.htm'"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_caves_page(self):
|
||||
# Throws up lots of cave error msgs because it is looking at something which is not loaded for the tests
|
||||
# but the test itself does not fail
|
||||
response = self.client.get("/caves")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
@@ -186,10 +196,10 @@ class PageTests(TestCase):
|
||||
content = response.content.decode()
|
||||
ph = r"115"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
self.assertIsNone(phmatch, "Unexpectedly found unexpected text: '" + ph + "'")
|
||||
|
||||
def test_page_ss(self):
|
||||
response = self.client.get("/survey_scans/")
|
||||
response = self.client.get("/survey_scans")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
|
||||
ph = r"All Survey scans folders "
|
||||
@@ -294,7 +304,7 @@ class PageTests(TestCase):
|
||||
|
||||
def test_page_folk(self):
|
||||
# This page is separately generated, so it has the full data content
|
||||
response = self.client.get("/folk/index.htm")
|
||||
response = self.client.get("/folk/index.html")
|
||||
content = response.content.decode()
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
for ph in [
|
||||
@@ -308,8 +318,8 @@ class PageTests(TestCase):
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofile_documents(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/expofiles/documents")
|
||||
# this gets a real page as it is looking at the filesystem
|
||||
response = self.client.get("/expofiles/documents/ropes")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
@@ -320,8 +330,8 @@ class PageTests(TestCase):
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_expofile_documents_slash(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/expofiles/documents/")
|
||||
# this gets a real page as it is looking at the filesystem
|
||||
response = self.client.get("/expofiles/documents/ropes/")
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
self.assertEqual(response.status_code, HTTPStatus.FOUND)
|
||||
if response.status_code != HTTPStatus.FOUND:
|
||||
@@ -402,7 +412,7 @@ class PageTests(TestCase):
|
||||
response = self.client.get("/photos/2018/PhilipSargent/_corin.jpeg") # does not exist
|
||||
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
||||
content = response.content.decode()
|
||||
ph = r"<title>Page not found 2018/PhilipSargent/_corin.jpeg</title>"
|
||||
ph = r"<title>Page not found '2018/PhilipSargent/_corin.jpeg'</title>"
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
@@ -417,7 +427,7 @@ class PageTests(TestCase):
|
||||
|
||||
def test_page_survey_scans_empty(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/survey_scans/")
|
||||
response = self.client.get("/survey_scans")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"contains the scanned original in-cave survey notes and sketches"
|
||||
@@ -438,21 +448,11 @@ class PageTests(TestCase):
|
||||
response = self.client.get("/dwgfiles")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
# with open('_test_response_dwgallfiles.html', 'w') as f:
|
||||
# f.write(content)
|
||||
for ph in [
|
||||
r"All Tunnel and Therion files",
|
||||
r"<th>Wallets</th><th>Scan files in the wallets</th><th>Frames</th></tr>",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_page_dwgallfiles_empty_slash(self):
|
||||
# this gets an empty page as the database has not been loaded
|
||||
response = self.client.get("/dwgfiles/")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
for ph in [
|
||||
r"All Tunnel and Therion files",
|
||||
r"<th>Wallets</th><th>Scan files in the wallets</th><th>Frames</th></tr>",
|
||||
r"<th>Wallets</th><th>Scan files when the drawing was created</th><th>Frames</th></tr>",
|
||||
]:
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
@@ -513,12 +513,12 @@ class PageTests(TestCase):
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
def test_eastings(self):
|
||||
def test_stations(self):
|
||||
# Needs another test with test data
|
||||
response = self.client.get("/eastings")
|
||||
response = self.client.get("/stations")
|
||||
self.assertEqual(response.status_code, HTTPStatus.OK)
|
||||
content = response.content.decode()
|
||||
ph = r"<tr><th>Survex Station</th><th>x</th><th>y</th></tr>"
|
||||
ph = "<tr><th>Survex Station</th><th>x "
|
||||
phmatch = re.search(ph, content)
|
||||
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
||||
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
# Copilot: Sure, based on your `urls.py` file, here's how you might write end-to-end tests for the `survey_scans` endpoints:
|
||||
|
||||
from django.test import Client, TestCase
|
||||
from django.urls import reverse
|
||||
|
||||
# Copilot does not know that there is no data in the database, so that the tests need to pre-populate with
|
||||
# objects before 3 of these tests will work. These 3 are now commented out, pending proper integration.
|
||||
# Philip, 2023-12-31
|
||||
|
||||
class TestSurveyScansView(TestCase):
|
||||
def setUp(self):
|
||||
self.client = Client()
|
||||
|
||||
def test_allscans_view(self):
|
||||
response = self.client.get(reverse('allscans'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
# Add more assertions here based on the expected HTML content
|
||||
|
||||
# def test_cavewallets_view(self):
|
||||
# response = self.client.get(reverse('cavewallets'))
|
||||
# self.assertEqual(response.status_code, 200)
|
||||
# # Add more assertions here based on the expected HTML content
|
||||
|
||||
# def test_scansingle_view(self):
|
||||
# # You'll need to replace 'scan_id' with an actual scan ID from your database
|
||||
# response = self.client.get(reverse('scansingle', kwargs={'scan_id': 1}))
|
||||
# self.assertEqual(response.status_code, 200)
|
||||
# # Add more assertions here based on the expected HTML content
|
||||
|
||||
# def test_walletslistperson_view(self):
|
||||
# # You'll need to replace 'person_id' with an actual person ID from your database
|
||||
# response = self.client.get(reverse('walletslistperson', kwargs={'person_id': 1}))
|
||||
# self.assertEqual(response.status_code, 200)
|
||||
# # Add more assertions here based on the expected HTML content
|
||||
|
||||
def test_walletslistyear_view(self):
|
||||
# You'll need to replace 'year' with an actual year from your database
|
||||
response = self.client.get(reverse('walletslistyear', kwargs={'year': 2020}))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
# Add more assertions here based on the expected HTML content
|
||||
"""
|
||||
|
||||
In this example, replace `'allscans'`, `'cavewallets'`, `'scansingle'`, `'walletslistperson'`, and `'walletslistyear'` with the actual names of your endpoints in your `urls.py` file. The `reverse` function is used to avoid hardcoding the URL. Instead, it generates the URL by looking up the URL pattern by name.
|
||||
|
||||
For each of your views, you'll want to create a new test method (like `test_allscans_view` above) where you use the client to make a request to your view and then assert that the response has the expected status code and content.
|
||||
|
||||
If you can provide more details about what each view is supposed to do, I can help you write more specific tests. For example, if your view is supposed to render a form, you might want to check that the form is present in the response content. If your view is supposed to handle a form submission, you might want to test that submitting the form results in the expected changes in the database. If your view requires a logged-in user, you might want to test both the logged-in and logged-out cases. And so on. Happy testing! 🚀
|
||||
"""
|
||||
+25
-51
@@ -2,19 +2,18 @@ from django.contrib import admin
|
||||
from django.core import serializers
|
||||
from django.http import HttpResponse
|
||||
|
||||
from troggle.core.models.caves import Area, Cave, CaveAndEntrance, Entrance
|
||||
from troggle.core.models.logbooks import QM, LogbookEntry, PersonLogEntry, CaveSlug
|
||||
from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance
|
||||
from troggle.core.models.logbooks import QM, LogbookEntry, PersonLogEntry
|
||||
from troggle.core.models.survex import (
|
||||
DrawingFile,
|
||||
SingleScan,
|
||||
SurvexBlock,
|
||||
SurvexDirectory,
|
||||
SurvexFile,
|
||||
SurvexPersonRole,
|
||||
SurvexPersonTeam,
|
||||
SurvexStation,
|
||||
)
|
||||
from troggle.core.models.wallets import Wallet
|
||||
from troggle.core.models.troggle import DataIssue, Expedition, Person, PersonExpedition
|
||||
from troggle.core.models.wallets import Wallet
|
||||
|
||||
"""This code significantly adds to the capabilities of the Django Management control panel for Troggle data.
|
||||
In particular, it enables JSON export of any data with 'export_as_json'
|
||||
@@ -31,27 +30,16 @@ class TroggleModelAdmin(admin.ModelAdmin):
|
||||
"""overriding admin save to fill the new_since parsing_field
|
||||
|
||||
new_since_parsing is not currently used in troggle. It is a fossil."""
|
||||
obj.new_since_parsing = True
|
||||
# obj.new_since_parsing = True
|
||||
obj.save()
|
||||
|
||||
class Media:
|
||||
js = ("jquery/jquery.min.js", "js/QM_helper.js") # not currently available to troggle, see media/js/README
|
||||
|
||||
|
||||
class RoleInline(admin.TabularInline):
|
||||
model = SurvexPersonRole
|
||||
extra = 4
|
||||
|
||||
|
||||
class SurvexBlockAdmin(TroggleModelAdmin):
|
||||
inlines = (RoleInline,)
|
||||
|
||||
|
||||
# class QMsFoundInline(admin.TabularInline):
|
||||
# model = QM
|
||||
# fk_name = "found_by"
|
||||
# fields = ("number", "grade", "location_description", "comment") # need to add foreignkey to cave part
|
||||
# extra = 1
|
||||
# class RoleInline(admin.TabularInline):
|
||||
# model = SurvexPersonTeam
|
||||
# extra = 4
|
||||
|
||||
|
||||
class PersonLogEntryInline(admin.TabularInline):
|
||||
@@ -59,12 +47,11 @@ class PersonLogEntryInline(admin.TabularInline):
|
||||
raw_id_fields = ("personexpedition",)
|
||||
extra = 1
|
||||
|
||||
|
||||
class LogbookEntryAdmin(TroggleModelAdmin):
|
||||
prepopulated_fields = {"slug": ("title",)}
|
||||
search_fields = ("title", "expedition__year")
|
||||
date_heirarchy = "date"
|
||||
# inlines = (PersonLogEntryInline, QMsFoundInline)
|
||||
#inlines = (PersonLogEntryInline, QMsFoundInline)
|
||||
|
||||
class Media:
|
||||
css = {"all": ("css/troggleadmin.css",)} # this does not exist
|
||||
@@ -79,74 +66,61 @@ class LogbookEntryAdmin(TroggleModelAdmin):
|
||||
response = downloadLogbook(request=request, queryset=queryset, extension="txt") # fails, no queryset
|
||||
return response
|
||||
|
||||
|
||||
class PersonExpeditionInline(admin.TabularInline):
|
||||
model = PersonExpedition
|
||||
extra = 1
|
||||
|
||||
|
||||
class PersonAdmin(TroggleModelAdmin):
|
||||
search_fields = ("first_name", "last_name")
|
||||
search_fields = ("first_name", "last_name", "slug")
|
||||
list_display = ["slug", "first_name", "last_name" ]
|
||||
inlines = (PersonExpeditionInline,)
|
||||
|
||||
|
||||
class QMAdmin(TroggleModelAdmin):
|
||||
search_fields = ("number", "expoyear")
|
||||
search_fields = ("number", "cave", "expoyear")
|
||||
list_display = ("__str__", "grade")
|
||||
list_display_links = ("__str__",)
|
||||
# list_editable = ("comment", "page_ref", "grade")
|
||||
# list_per_page = 20
|
||||
# raw_id_fields = ("found_by", "ticked_off_by")
|
||||
|
||||
list_filter = ('grade', 'cave', 'expoyear')
|
||||
|
||||
class PersonExpeditionAdmin(TroggleModelAdmin):
|
||||
search_fields = ("person__first_name", "expedition__year")
|
||||
|
||||
search_fields = ("person__first_name", "person__slug", "expedition__year")
|
||||
|
||||
class CaveAdmin(TroggleModelAdmin):
|
||||
search_fields = ("official_name", "kataster_number", "unofficial_number")
|
||||
search_fields = ("areacode", "official_name", "kataster_number", "unofficial_number")
|
||||
list_display = ["areacode", "official_name"]
|
||||
extra = 4
|
||||
|
||||
|
||||
class EntranceAdmin(TroggleModelAdmin):
|
||||
search_fields = ("caveandentrance__cave__kataster_number",)
|
||||
|
||||
|
||||
class SurvexStationAdmin(TroggleModelAdmin):
|
||||
search_fields = ("name",)
|
||||
|
||||
|
||||
class SurvexFileAdmin(TroggleModelAdmin):
|
||||
search_fields = ("path",)
|
||||
|
||||
|
||||
class SurvexDirectoryAdmin(TroggleModelAdmin):
|
||||
search_fields = (
|
||||
"path",
|
||||
"survexdirectory",
|
||||
)
|
||||
|
||||
# class SurvexBlockAdmin(TroggleModelAdmin):
|
||||
# inlines = (RoleInline,)
|
||||
|
||||
class SurvexBlockAdmin(TroggleModelAdmin):
|
||||
search_fields = ("name", "title")
|
||||
list_display = ["survexfile", "name", "title", "scanswallet", "ref_text"]
|
||||
|
||||
class DrawingFileAdmin(TroggleModelAdmin):
|
||||
search_fields = ("dwgname",)
|
||||
|
||||
|
||||
class WalletAdmin(TroggleModelAdmin):
|
||||
search_fields = ("fpath",)
|
||||
search_fields = ("fpath", "walletname", "walletyear")
|
||||
|
||||
|
||||
admin.site.register(Cave, CaveAdmin)
|
||||
admin.site.register(Area)
|
||||
admin.site.register(CaveAndEntrance)
|
||||
admin.site.register(Entrance, EntranceAdmin)
|
||||
admin.site.register(CaveSlug)
|
||||
admin.site.register(SurvexBlock, SurvexBlockAdmin)
|
||||
admin.site.register(DrawingFile, DrawingFileAdmin)
|
||||
admin.site.register(Expedition)
|
||||
admin.site.register(Person, PersonAdmin)
|
||||
admin.site.register(SurvexPersonRole)
|
||||
admin.site.register(SurvexDirectory, SurvexDirectoryAdmin)
|
||||
admin.site.register(SurvexPersonTeam)
|
||||
admin.site.register(SurvexFile, SurvexFileAdmin)
|
||||
admin.site.register(SurvexBlock, SurvexBlockAdmin)
|
||||
admin.site.register(SurvexStation, SurvexStationAdmin)
|
||||
admin.site.register(PersonExpedition, PersonExpeditionAdmin)
|
||||
admin.site.register(LogbookEntry, LogbookEntryAdmin)
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
<?xml version='1.0' encoding='windows-1252'?>
|
||||
<gpx version="1.1" creator="CaveAreaProcessor" xmlns="http://www.topografix.com/GPX/1/1">
|
||||
<trk>
|
||||
<name>Cleaned Border 1626-1623</name>
|
||||
<trkseg />
|
||||
</trk>
|
||||
</gpx>
|
||||
+1
-1
@@ -1,6 +1,6 @@
|
||||
from django.conf import settings
|
||||
|
||||
from troggle.core.models.troggle import Expedition
|
||||
# from troggle.core.models.troggle import Expedition
|
||||
|
||||
"""This is the only troggle-specific 'context processor' that troggle uses
|
||||
in the processing of Django templates
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
[
|
||||
{"pk": 9010, "model": "auth.user", "fields":
|
||||
{"username": "expotest", "first_name": "ExpoTest", "last_name": "Caver", "is_active": true, "is_superuser": false, "is_staff": false, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+expo@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}},
|
||||
|
||||
{"pk": 9011, "model": "auth.user", "fields":
|
||||
{"username": "expotestadmin", "first_name": "ExpoTest", "last_name": "Admin", "is_active": true, "is_superuser": true, "is_staff": true, "last_login": "2021-02-01 00:00:00+0100", "groups": [], "user_permissions": [], "password": "pbkdf2_sha256$150000$I9wNXhHCAaHo$0ncTIJ7G+3bSaKHg7RD3ZG2a/4v7cG1bjovq9BiCyA4=", "email": "philip.sargent+expoadmin@gmail.com", "date_joined": "2021-02-01 00:00:00+0100"}}
|
||||
]
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
[
|
||||
{"model": "core.area", "pk": 25, "fields":
|
||||
{"short_name": "1626 or 6 (borderline)", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 24, "fields":
|
||||
{"short_name": "8a", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 23, "fields":
|
||||
{"short_name": "2b or 4 (unclear)", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 22, "fields":
|
||||
{"short_name": "11", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 21, "fields":
|
||||
{"short_name": "3", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 20, "fields":
|
||||
{"short_name": "4", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 19, "fields":
|
||||
{"short_name": "1b", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 18, "fields":
|
||||
{"short_name": "8b", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 17, "fields":
|
||||
{"short_name": "2d", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 16, "fields":
|
||||
{"short_name": "7", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 15, "fields":
|
||||
{"short_name": "2b", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 14, "fields":
|
||||
{"short_name": "8c", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 13, "fields":
|
||||
{"short_name": "2c", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 12, "fields":
|
||||
{"short_name": "8d", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 11, "fields":
|
||||
{"short_name": "", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 10, "fields":
|
||||
{"short_name": "5", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 9, "fields":
|
||||
{"short_name": "6", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 8, "fields":
|
||||
{"short_name": "2a", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 7, "fields":
|
||||
{"short_name": "1c", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 6, "fields":
|
||||
{"short_name": "1d", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 5, "fields":
|
||||
{"short_name": "1a", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 4, "fields":
|
||||
{"short_name": "9", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 3, "fields":
|
||||
{"short_name": "10", "name": null, "description": null, "super": 1, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 2, "fields":
|
||||
{"short_name": "1626", "name": null, "description": null, "super": null, "new_since_parsing": false, "non_public": false}},
|
||||
{"model": "core.area", "pk": 1, "fields":
|
||||
{"short_name": "1623", "name": null, "description": null, "super": null, "new_since_parsing": false, "non_public": false}}
|
||||
]
|
||||
@@ -1,40 +0,0 @@
|
||||
[{"model": "core.cave", "pk": 43, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"official_name": "Schnellzughöhle",
|
||||
"kataster_code": "6/t/S/W x",
|
||||
"kataster_number": "115",
|
||||
"unofficial_number": "40m",
|
||||
"explorers": "CUCC 1980-1985",
|
||||
"underground_description": "This is the main entrance through which the majority of the <a href=\"41.htm\">Stellerweghöhle</a> system was explored. See the separate <a href=\"41/115.htm#ent115\">full guidebook description</a> for details, just an overview is given here.</p><p>The entrance leads to a non-obvious way on to the head of the short <b>Bell Pitch</b>, from where very awkward going leads out to a bigger passage to reach <b>The Ramp</b> a series of off-vertical pitches. The damper but technically easier <b>Inlet Pitches</b> drop to a Big Chamber, from where <b>Pete's Purgatory</b> starts, and leads in 800m of tortuous going to <b>The Confluence</b> and the larger streamway leading to the deepest point.</p><p>Better is the <b>Purgatory Bypass</b> which starts as dry fossil tubes, with a choice of routes to reach <b>Junction Chamber</b> where the <b>Big Rift</b> of <a href=\"41.htm\">Stellerweghöhle</a> enters. Opposite, the huge fossil tube of <b>Dartford Tunnel</b> makes for easy progress to the Confluence, about halfway down the system. The continuing main streamway is interrupted by a bypassable sump and numerous pitches before a low airspace duck at the end of an unpromising canal leads to the spectacular <b>Orgasm Chasm</b>. Careful rigging avoids the water in this 140m shaft, ending in muddy passage and another short drop to a deep and terminal sump. ",
|
||||
"equipment": "",
|
||||
"references": "",
|
||||
"survey": "CUCC's parts surveyed to Grade 5 but not all drawn up - see <a href=\"41/survey.htm\">here</a>",
|
||||
"kataster_status": "",
|
||||
"underground_centre_line": "In dataset",
|
||||
"notes": "The Austrian Kataster has adopted a very perverse way of numbering things. Their numbers are as follows:</p><ul> <li>115a Stellerweghöhle entrance 41a</li> <li>115b Stellerweghöhle entrance 41b</li> <li>115c Stellerweghöhle entrance 41c ( where ? )</li> <li>115d Schnellzughöhle entrance 115</li> <li>115e unnamed entrance 142</li></ul><p>", "length": "SMK system total 54000m", "depth": "from entrance; SMK system total 1032m", "extent": "SMK system total 2812m",
|
||||
"survex_file": "smk-system.svx",
|
||||
"description_file": "1623/115.htm",
|
||||
"url": "1623/115.url",
|
||||
"filename": "1623-115.html",
|
||||
"area": [1, 8]}},
|
||||
|
||||
{"model": "core.cave", "pk": 350, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
"official_name": "Seetrichter",
|
||||
"kataster_code": "",
|
||||
"kataster_number": "284",
|
||||
"unofficial_number": "",
|
||||
"explorers": "<p></p>",
|
||||
"underground_description": "",
|
||||
"equipment": "<p></p>",
|
||||
"references": "<p>",
|
||||
"survey": "<p></p>",
|
||||
"kataster_status": "",
|
||||
"underground_centre_line": "",
|
||||
"notes": "A 25m long (22m deep) resurgence in Altausee. At the bottom, at a depth of 72m, there are large round blocks.", "length": "", "depth": "", "extent": "",
|
||||
"survex_file": "",
|
||||
"description_file": "",
|
||||
"url": "1623/284/284.html",
|
||||
"filename": "1623-284.html",
|
||||
"area": [1, 11]}}
|
||||
]
|
||||
@@ -1,17 +1,18 @@
|
||||
[{"model": "core.expedition", "pk": 44, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
{"non_public": false,
|
||||
"year": "2019", "name": "CUCC expo 2019"}},
|
||||
|
||||
{"model": "core.personexpedition", "pk": 681, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
{"non_public": false,
|
||||
"expedition": 44,
|
||||
"person": 250, "slugfield": null, "is_guest": false
|
||||
"person": 250
|
||||
}},
|
||||
|
||||
{"model": "core.person", "pk": 250, "fields":
|
||||
{"new_since_parsing": false, "non_public": false,
|
||||
{"non_public": false,
|
||||
"first_name": "Michael",
|
||||
"last_name": "Sargent",
|
||||
"fullname": "Michael Sargent", "is_vfho": false, "mug_shot": null,
|
||||
"last_name": "Sargent",
|
||||
"slug": "michael-sargent",
|
||||
"fullname": "Michael Sargent", "is_vfho": false, "is_guest": false, "mugshot": null,
|
||||
"blurb": "\n\n\n\n\n\n<p><img class=\"onleft\" src=\"/folk/i/mikey0.jpg\">\n<img class=\"onright\" src=\"/folk/i/mikey1.jpg\" height=\"400\"\nalt=\"\" />\n<b>Michael Sargent</b> CUCC<br />\nExpeditions 2014, 15, 16, 17, 18, 19.\n<p>The first second-generation expo caver in 2014, later members of this exclusive group were Dan Lenartowicz and Sarah Connolly.\n\n\n<img class=\"onleft\" src=\"/folk/i/michaelsargent.jpg\">\n<im\n\n<hr style=\"clear: both\" /><p class=\"caption\">Pre-expo (pre-student) photos from President's Invite (OUCC) \nand first abseiling instruction (Cambridge).</p>\n", "orderref": ""}}
|
||||
]
|
||||
+144
-95
@@ -1,41 +1,70 @@
|
||||
# from tinymce.widgets import TinyMCE
|
||||
import re
|
||||
|
||||
import django.forms as forms
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.forms import ModelForm
|
||||
from django.forms.models import modelformset_factory
|
||||
|
||||
from troggle.core.models.caves import Cave, CaveAndEntrance, Entrance
|
||||
from troggle.core.views.editor_helpers import HTMLarea
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
# from tinymce.widgets import TinyMCE
|
||||
import re
|
||||
|
||||
|
||||
"""These are all the class-based Forms used by troggle.
|
||||
There are other, simpler, upload forms in view/uploads.py
|
||||
|
||||
class-based forms are quicker to set up (for Django experts) but
|
||||
are more difficult to maintain by non-Django experts.
|
||||
are more difficult to maintain (or even begin to understand) by non-Django experts.
|
||||
|
||||
Notes to self, as I try to work out what the hell is going on:
|
||||
|
||||
Note that HTMLarea invokes a widget which sets a CSS class which calls javascript in
|
||||
templates/html_editor_scripts_css.html - which imports jquery and codemirror directly, without
|
||||
declaring it anywhere or locally installing it. (!)
|
||||
|
||||
Django handles three distinct parts of the work involved in forms:
|
||||
- preparing and restructuring data to make it ready for rendering
|
||||
- creating HTML forms for the data
|
||||
- receiving and processing submitted forms and data from the client
|
||||
It is possible to write code that does all of this manually, but Django can take care of it all for you.
|
||||
|
||||
READ https://docs.djangoproject.com/en/5.1/topics/forms/ and thoroughly digest it, also:
|
||||
https://pythontimes.com/django-forms-deep-dive-advanced-techniques-for-form-handling/
|
||||
https://docs.djangoproject.com/en/5.1/ref/forms/models/
|
||||
https://stackoverflow.com/questions/53035151/django-formset-factory-vs-modelformset-factory-vs-inlineformset-factory
|
||||
https://micropyramid.com/blog/understanding-djangos-model-formsets-in-detail-and-their-advanced-usage
|
||||
https://www.geeksforgeeks.org/django-modelformsets/
|
||||
https://www.codeunderscored.com/model-formsets-in-django/
|
||||
https://django-formset.fly.dev/styling/
|
||||
"""
|
||||
|
||||
todo = """
|
||||
todo = """ - Finish rewriting NewProspectForm to include all validation in the form class itself.
|
||||
"""
|
||||
|
||||
|
||||
class CaveForm(ModelForm):
|
||||
"""Only those fields for which we want to override defaults are listed here
|
||||
the other fields are present on the form, but use the default presentation style
|
||||
the other fields of the class Cave are present on the form, but use the default presentation style
|
||||
Extra fields, not in the model Cave, are also created here, e.g. who_are_you
|
||||
|
||||
see https://docs.djangoproject.com/en/5.1/topics/forms/modelforms/
|
||||
"""
|
||||
|
||||
official_name = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "45"}))
|
||||
unofficial_number= forms.CharField(required=False,
|
||||
label="Unofficial Number used to construct internal identifiers",
|
||||
widget=forms.TextInput(
|
||||
attrs={"size": "45", "placeholder": "2035-ZB-03"}))
|
||||
|
||||
official_name = forms.CharField(required=False,
|
||||
label="Name:",widget=forms.TextInput(
|
||||
attrs={"size": "45", "placeholder": "ideally official name in German, but any name is OK"}))
|
||||
|
||||
underground_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
explorers = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
required=False, disabled = True,
|
||||
label = "Explorers (CUCC expo or another club) - historic and disabled. This is now recorded in Survex files.",
|
||||
widget=forms.TextInput(attrs={"height": "80%", "size": 80}),
|
||||
)
|
||||
equipment = forms.CharField(
|
||||
required=False,
|
||||
@@ -45,48 +74,48 @@ class CaveForm(ModelForm):
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
# survey = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
kataster_status = forms.CharField(required=False)
|
||||
underground_centre_line = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
kataster_code = forms.CharField(required=False,
|
||||
label = "Exploration code, see explanation at bottom of page",
|
||||
widget=forms.TextInput(attrs={"placeholder": "e.g. 2/S= See below"})
|
||||
)
|
||||
notes = forms.CharField(
|
||||
required=False,
|
||||
label = "Notes, e.g. progress on issuing kataster no.",
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
references = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter page content (using HTML)"}),
|
||||
)
|
||||
description_file = forms.CharField(required=False, label="Path of top-level description file for this cave, when a separate file is used. Otherwise blank.", widget=forms.TextInput(attrs={"size": "45"}), help_text="")
|
||||
description_file = forms.CharField(required=False, label="Path of top-level description file for this cave, when a separate file is used. Otherwise blank.", widget=forms.TextInput(attrs={"size": "45","placeholder": "usually blank"}), help_text="")
|
||||
survex_file = forms.CharField(
|
||||
required=False, label="Survex file eg. caves-1623/000/000.svx", widget=forms.TextInput(attrs={"size": "45"})
|
||||
)
|
||||
#url = forms.CharField(required=True, label="URL eg. 1623/000/000 (no .html)", widget=forms.TextInput(attrs={"size": "45"}))
|
||||
length = forms.CharField(required=False, label="Length (m)")
|
||||
depth = forms.CharField(required=False, label="Depth (m)")
|
||||
extent = forms.CharField(required=False, label="Extent (m)")
|
||||
|
||||
#cave_slug = forms.CharField()
|
||||
|
||||
length = forms.CharField(required=False, label="Length (m)", widget=forms.TextInput(attrs={"placeholder": "usually blank"}))
|
||||
depth = forms.CharField(required=False, label="Depth (m)", widget=forms.TextInput(attrs={"placeholder": "usually blank"}))
|
||||
extent = forms.CharField(required=False, label="Extent (m)", widget=forms.TextInput(attrs={"placeholder": "usually blank"}))
|
||||
areacode = forms.CharField(required=False, label="Area code", widget=forms.TextInput(attrs={"placeholder": "e.g. 1623"})) # should be required=True?
|
||||
subarea = forms.CharField(required=False, label="Subarea (do not use for new caves)", widget=forms.TextInput(attrs={"placeholder": "usually blank, archaic"}))
|
||||
|
||||
identified_login = forms.BooleanField(required=False,widget=forms.CheckboxInput(attrs={"onclick":"return false"})) # makes it readonly
|
||||
who_are_you = forms.CharField(
|
||||
widget=forms.TextInput(
|
||||
attrs={"size": 100, "placeholder": "You are editing this page, who are you ? e.g. 'Becka' or 'Animal <mta@gasthof.expo>'",
|
||||
"style": "vertical-align: text-top;"}
|
||||
)
|
||||
)
|
||||
class Meta:
|
||||
model = Cave
|
||||
exclude = ("filename",)
|
||||
exclude = ("filename", "url", "kataster_status")
|
||||
|
||||
field_order = ['area', 'unofficial_number', 'kataster_number', 'official_name', 'underground_description', 'explorers', 'equipment', 'survey', 'kataster_status', 'underground_centre_line', 'notes', 'references', 'description_file', 'survex_file', 'url', 'length', 'depth', 'extent']
|
||||
|
||||
def get_area(self):
|
||||
for a in self.cleaned_data["area"]:
|
||||
if a.kat_area():
|
||||
return a.kat_area()
|
||||
|
||||
field_order = ['unofficial_number', 'kataster_number', 'official_name', 'unexplored', 'underground_description', 'survey',
|
||||
'equipment', 'notes', 'references', 'description_file', 'survex_file', 'explorers',
|
||||
'areacode', 'subarea', 'length', 'depth', 'extent',
|
||||
'kataster_code', 'kataster_status', 'fully_explored', 'non_public', 'identified_login', 'who_are_you']
|
||||
|
||||
def clean_cave_slug(self):
|
||||
if self.cleaned_data["cave_slug"] == "":
|
||||
myArea = ""
|
||||
for a in self.cleaned_data["area"]:
|
||||
if a.kat_area():
|
||||
myArea = a.kat_area()
|
||||
myArea = self.cleaned_data["areacode"]
|
||||
if self.data["kataster_number"]:
|
||||
cave_slug = f"{myArea}-{self.cleaned_data['kataster_number']}"
|
||||
else:
|
||||
@@ -96,51 +125,45 @@ class CaveForm(ModelForm):
|
||||
# Converting a PENDING cave to a real cave by saving this form
|
||||
print("EEE", cave_slug.replace("-PENDING-", "-"))
|
||||
return cave_slug.replace("-PENDING-", "-")
|
||||
|
||||
# def clean_url(self):
|
||||
# data = self.cleaned_data["url"]
|
||||
# if not re.match("\d\d\d\d/.", data):
|
||||
# raise ValidationError("URL must start with a four digit Kataster area.")
|
||||
# return data
|
||||
|
||||
|
||||
def clean(self):
|
||||
cleaned_data = super(CaveForm, self).clean()
|
||||
cleaned_data = super(CaveForm, self).clean() # where is this code hidden? How does this work??
|
||||
if self.data.get("kataster_number") == "" and self.data.get("unofficial_number") == "":
|
||||
self._errors["unofficial_number"] = self.error_class(
|
||||
["Either the kataster or unoffical number is required."]
|
||||
)
|
||||
# if self.cleaned_data.get("kataster_number") != "" and self.cleaned_data.get("official_name") == "":
|
||||
# self._errors["official_name"] = self.error_class(["This field is required when there is a kataster number."])
|
||||
if cleaned_data.get("area") == []:
|
||||
self._errors["area"] = self.error_class(["This field is required."])
|
||||
if cleaned_data.get("url") and cleaned_data.get("url").startswith("/"):
|
||||
self._errors["url"] = self.error_class(["This field cannot start with a /."])
|
||||
if self.data.get("areacode") not in ["1623", "1626", "1627", "1624"]:
|
||||
self._errors["areacode"] = self.error_class(
|
||||
["An Austrian kataster areacode, i.e. 1623, 1626, 1627, or 1624 is required."]
|
||||
)
|
||||
|
||||
return cleaned_data
|
||||
|
||||
|
||||
class EntranceForm(ModelForm):
|
||||
"""Only those fields for which we want to override defaults are listed here
|
||||
the other fields are present on the form, but use the default presentation style
|
||||
|
||||
see https://docs.djangoproject.com/en/5.1/topics/forms/modelforms/
|
||||
"""
|
||||
|
||||
name = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "45"}))
|
||||
name = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "45", "placeholder": "usually leave this blank"}))
|
||||
entrance_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
explorers = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "45"}))
|
||||
# explorers = forms.CharField(required = False, widget=TinyMCE(attrs={'cols': 80, 'rows': 10}))
|
||||
map_description = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
# explorers = forms.CharField(required=False,
|
||||
# label = "Explorers: CUCC expo or another club?",
|
||||
# widget=forms.TextInput(attrs={"size": "45"}))
|
||||
|
||||
location_description = forms.CharField(
|
||||
label="Location",
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
lastvisit = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Date of last visit, e.g. 2023-07-11"
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Last visit date, e.g. 2023-07-11"
|
||||
)
|
||||
approach = forms.CharField(
|
||||
required=False,
|
||||
@@ -151,81 +174,106 @@ class EntranceForm(ModelForm):
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
photo = forms.CharField(
|
||||
label="Photos (use 'image' button)",
|
||||
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Use button on right to add HTML link"}),
|
||||
)
|
||||
marking_comment = forms.CharField(
|
||||
label="Marking text",
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter exact tag text, e.g. 'CUCC 2035 ZB-03'"}),
|
||||
)
|
||||
findability_description = forms.CharField(
|
||||
required=False,
|
||||
label="How to find it",
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
other_description = forms.CharField(
|
||||
label="Other comments",
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
)
|
||||
bearings = forms.CharField(
|
||||
required=False,
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Enter text (using HTML)"}),
|
||||
widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Usually blank"}),
|
||||
)
|
||||
# bearings = forms.CharField(
|
||||
# label="Bearings (obsolete)",
|
||||
# required=False,
|
||||
# widget=HTMLarea(attrs={"height": "80%", "rows": 20, "placeholder": "Usually blank"}),
|
||||
# )
|
||||
tag_station = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"size": "50"}), label="Tag station: Survex station id, e.g. 1623.p2023-xx-01"
|
||||
)
|
||||
exact_station = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"size": "50"}), label="Exact station: Survex station id, e.g. 1623.2023-xx-01.2"
|
||||
widget=forms.TextInput(attrs={"size": "50","placeholder": "e.g. 1623.t2035-zb-03a"}),
|
||||
label="Tag station: Survex station id, e.g. 1623.p2023-aa-01 (must be lower case)"
|
||||
)
|
||||
other_station = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"size": "50"}), label="Other station: Survex station id, e.g. 1623.2023-xx-01.33"
|
||||
widget=forms.TextInput(attrs={"size": "50","placeholder": "e.g. 1623.p2035-zb-03c"}),
|
||||
label="Other station: Survex station id, e.g. 1623.gps2018-aa-01 (must be lower case)"
|
||||
)
|
||||
northing = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Northing (UTM) - from survex data"
|
||||
)
|
||||
easting = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Easting (UTM) - from survex data"
|
||||
)
|
||||
|
||||
lat_wgs84 = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Latitude (WSG84) - if no other location"
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10","placeholder": "e.g. 47.123456"}),
|
||||
label="Latitude (WSG84) - if no other location"
|
||||
)
|
||||
long_wgs84 = forms.CharField(
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10"}), label="Longitude (WSG84) - if no other location"
|
||||
required=False, widget=forms.TextInput(attrs={"size": "10","placeholder": "e.g. 13.123456"}),
|
||||
label="Longitude (WSG84) - if no other location"
|
||||
)
|
||||
alt = forms.CharField(required=False, label="Altitude (m)")
|
||||
url = forms.CharField(required=False, label="URL [usually blank]", widget=forms.TextInput(attrs={"size": "45"}))
|
||||
|
||||
field_order = ['name', 'entrance_description', 'explorers', 'map_description', 'location_description', 'lastvisit', 'approach', 'underground_description', 'photo', 'marking_comment', 'findability_description', 'other_description', 'bearings', 'tag_station', 'exact_station', 'other_station', 'northing', 'easting', 'lat_wgs84', 'long_wgs84', 'alt', 'url']
|
||||
alt = forms.CharField(required=False, label="Altitude (m) - from GPS if you have it, but let it settle.")
|
||||
# url = forms.CharField(required=False, label="URL [usually blank]", widget=forms.TextInput(attrs={"size": "45"}))
|
||||
identified_login = forms.BooleanField(required=False,widget=forms.CheckboxInput(attrs={"onclick":"return false"})) # makes it readonly
|
||||
who_are_you = forms.CharField(
|
||||
widget=forms.TextInput(
|
||||
attrs={"size": 100, "placeholder": "You are editing this page, who are you ? e.g. 'Becka' or 'Animal <mta@gasthof.expo>'",
|
||||
"style": "vertical-align: text-top;"}
|
||||
)
|
||||
)
|
||||
|
||||
field_order = ['name', 'entrance_description', 'map_description', 'location_description', 'lastvisit', 'non_public',
|
||||
'findability', 'marking', 'approach', 'underground_description', 'photo', 'marking_comment', 'findability_description', 'other_description',
|
||||
'bearings', 'tag_station', 'other_station', 'easting', 'northing', 'lat_wgs84', 'long_wgs84', 'alt', 'identified_login', 'who_are_you']
|
||||
|
||||
class Meta:
|
||||
model = Entrance
|
||||
exclude = (
|
||||
"cached_primary_slug",
|
||||
"map_description", # No entrance has any data on this field, so it is being retired.
|
||||
"filename",
|
||||
"slug",
|
||||
"explorers", # archaic for Cave, redundant for Entrance
|
||||
"bearings"
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
if self.cleaned_data.get("url").startswith("/"):
|
||||
self._errors["url"] = self.error_class(["This field cannot start with a /."])
|
||||
return self.cleaned_data
|
||||
cleaned_data = super(EntranceForm, self).clean() # where is this code hidden? How does this work??
|
||||
for station in ["tag_station", "other_station"]:
|
||||
print(f"{station} -- {self.data.get(station)[:4]}")
|
||||
if self.data.get(station)[:5] not in ["1623.", "1626.", "1627.", "1624.", "", "speli"]: # spelix1623
|
||||
self._errors[station] = self.error_class(
|
||||
["An Austrian kataster areacode, i.e. 1623, 1626, 1627, or 1624 prefix is required."]
|
||||
)
|
||||
return self.cleaned_data
|
||||
|
||||
|
||||
# This next line is called from the templates/edit_cave.html template.
|
||||
# This is sufficient to create an entire entry for for the cave fields automatically
|
||||
# http://localhost:8000/cave/new/
|
||||
# using django built-in Deep Magic. https://docs.djangoproject.com/en/dev/topics/forms/modelforms/
|
||||
# for forms which map directly onto a Django Model
|
||||
CaveAndEntranceFormSet = modelformset_factory(CaveAndEntrance, exclude=("cave",))
|
||||
# This is used only in edit_entrance() in views/caves.py
|
||||
# # This next line is sufficient to create an entire entry for for the cave fields automatically
|
||||
# # for forms which map directly onto a Django Model
|
||||
# CaveAndEntranceFormSet = modelformset_factory(CaveAndEntrance, exclude=("cave",))
|
||||
# # This is used only in templates/editcave.html which is called only to edit caves in core/views/cave.py
|
||||
|
||||
class EntranceLetterForm(ModelForm):
|
||||
"""Form to link entrances to caves, along with an entrance number.
|
||||
|
||||
Nb. The relationship between caves and entrances has historically been a many to many relationship.
|
||||
NOTE. The relationship between caves and entrances was originally designed to be a many to many relationship.
|
||||
With entrances gaining new caves and letters when caves are joined.
|
||||
However, so far as I can see, this was never actually done in practice on Expo and each Entrance belongs
|
||||
to only one Cave.
|
||||
|
||||
see https://docs.djangoproject.com/en/5.1/topics/forms/modelforms/
|
||||
|
||||
To be re-written when we move the 'letter' field into Entrance
|
||||
"""
|
||||
|
||||
# This only needs to be required=True for the second and subsequent entrances, not the first. Tricky.
|
||||
entranceletter = forms.CharField(required=False, widget=forms.TextInput(attrs={"size": "2"}))
|
||||
|
||||
class Meta:
|
||||
model = CaveAndEntrance
|
||||
@@ -237,3 +285,4 @@ class EntranceLetterForm(ModelForm):
|
||||
self.instance.validate_unique()
|
||||
except forms.ValidationError as e:
|
||||
self._update_errors(e)
|
||||
|
||||
|
||||
+28446
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,6 @@
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
"""this is now replaced by databaseRest.py
|
||||
|
||||
This is an example of how to create our own bespoke commandline
|
||||
|
||||
@@ -0,0 +1,124 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.apps import apps
|
||||
from django.db.models import Q, CharField, TextField
|
||||
|
||||
"""Completely written by https://gemini.google.com/ 2.5
|
||||
"""
|
||||
|
||||
def get_all_project_models():
|
||||
"""
|
||||
Returns a list of all concrete model.Model classes in the Django project.
|
||||
It ignores abstract models and models from third-party apps that aren't
|
||||
part of the project's own apps.
|
||||
"""
|
||||
# You can customize this list to include/exclude specific apps
|
||||
# For example: installed_apps = settings.INSTALLED_APPS
|
||||
all_models = []
|
||||
# This gathers all models from all installed apps.
|
||||
for app_config in apps.get_app_configs():
|
||||
all_models.extend(list(app_config.get_models()))
|
||||
return all_models
|
||||
|
||||
|
||||
def find_empty_or_null_fields(model_class):
|
||||
"""
|
||||
Takes a model class and checks every field.
|
||||
|
||||
Returns a list of field names for which all database entries
|
||||
are either NULL or an empty string.
|
||||
"""
|
||||
completely_empty_fields = []
|
||||
|
||||
# Get the total number of records for this model.
|
||||
total_records = model_class.objects.count()
|
||||
|
||||
# If there are no records, all fields are technically "empty".
|
||||
# We return an empty list to avoid reporting every field from an empty table.
|
||||
if total_records == 0:
|
||||
return []
|
||||
|
||||
# Iterate over all concrete fields of the model.
|
||||
for field in model_class._meta.get_fields():
|
||||
# We only want to check actual database columns, not relations.
|
||||
if not hasattr(field, 'column'):
|
||||
continue
|
||||
|
||||
field_name = field.name
|
||||
|
||||
# Build a query to find records that have a meaningful value.
|
||||
# This is more efficient than counting all null/empty records.
|
||||
# We exclude records that are NULL.
|
||||
query_has_value = model_class.objects.exclude(Q(**{f'{field_name}__isnull': True}))
|
||||
|
||||
# For text-based fields, we also exclude empty strings.
|
||||
if isinstance(field, (CharField, TextField)):
|
||||
query_has_value = query_has_value.exclude(Q(**{f'{field_name}': ''}))
|
||||
|
||||
# If the query for records WITH a value returns FALSE (no such records exist),
|
||||
# it means all records for this field are empty or null.
|
||||
if not query_has_value.exists():
|
||||
completely_empty_fields.append(field_name)
|
||||
|
||||
return completely_empty_fields
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Scans all project models to find fields where all entries are empty or null."
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write(self.style.SUCCESS("🚀 Starting scan for empty or null fields across all models..."))
|
||||
|
||||
all_models = get_all_project_models()
|
||||
found_any = False
|
||||
|
||||
for model in all_models:
|
||||
total_records = model.objects.count()
|
||||
model_name = f"{model._meta.app_label}.{model._meta.object_name}"
|
||||
self.stdout.write(f"\n🔍 Checking model: {self.style.HTTP_INFO(model_name)} ({total_records} instances found)")
|
||||
|
||||
empty_fields = find_empty_or_null_fields(model)
|
||||
|
||||
if empty_fields:
|
||||
found_any = True
|
||||
for field_name in empty_fields:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f" -> Field '{field_name}' is completely empty or null.")
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS(" ✅ All fields contain data."))
|
||||
|
||||
if not found_any:
|
||||
self.stdout.write(self.style.SUCCESS("\n🎉 Scan complete. No fields were found to be universally empty or null."))
|
||||
|
||||
|
||||
"""
|
||||
|
||||
### How to Run the Command
|
||||
|
||||
After saving the file, you can run your new command from your project's root directory (where `manage.py` is located):
|
||||
|
||||
bash
|
||||
uv run manage.py find_empty_fields
|
||||
|
||||
|
||||
### Example Output
|
||||
|
||||
The output in your terminal will look something like this:
|
||||
|
||||
```
|
||||
🚀 Starting scan for empty or null fields across all models...
|
||||
|
||||
🔍 Checking model: auth.Group
|
||||
✅ All fields contain data.
|
||||
|
||||
🔍 Checking model: auth.User
|
||||
-> Field 'last_name' is completely empty or null.
|
||||
-> Field 'first_name' is completely empty or null.
|
||||
|
||||
🔍 Checking model: products.Product
|
||||
-> Field 'notes' is completely empty or null.
|
||||
-> Field 'discount_code' is completely empty or null.
|
||||
|
||||
🔍 Checking model: products.Category
|
||||
✅ All fields contain data.
|
||||
"""
|
||||
+47
-9
@@ -1,31 +1,54 @@
|
||||
import pathlib
|
||||
from django import http
|
||||
from django.conf import settings
|
||||
from django.urls import Resolver404, resolve
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from troggle import settings
|
||||
|
||||
"""Non-standard django middleware is loaded from this file.
|
||||
|
||||
"""
|
||||
todo = """SmartAppendSlashMiddleware(object) Not Working.
|
||||
It needs re-writing to be compatible with Django v2.0 and later
|
||||
It needs re-writing. Can we make this work even though we have a catchall url rule ?
|
||||
"""
|
||||
|
||||
|
||||
class SmartAppendSlashMiddleware(object):
|
||||
class TroggleAppendSlashMiddleware(MiddlewareMixin):
|
||||
"""
|
||||
"SmartAppendSlash" middleware for taking care of URL rewriting.
|
||||
|
||||
This middleware appends a missing slash, if:
|
||||
* the SMART_APPEND_SLASH setting is True
|
||||
* the URL without the slash does not exist
|
||||
* the URL with an appended slash does exist.
|
||||
* the URL without the slash does not exist in urls.py
|
||||
* the URL with an appended slash does exist in urls.py
|
||||
Otherwise it won't touch the URL.
|
||||
|
||||
MODIFICATION
|
||||
Since we have a universal catchall url pattern in urls.py, the usual way this works
|
||||
won't ever trigger adding a slash. So we check for the existence of a file in expoweb,
|
||||
not the existence of a pattern in urls.py...
|
||||
|
||||
but site_media..
|
||||
but css etc....
|
||||
|
||||
CONCLUSION
|
||||
This technique "works" but would be a maintence nightmare, so DO NOT USE IT
|
||||
do NOT include
|
||||
troggle.core.middleware.TroggleAppendSlashMiddleware
|
||||
in settings.py
|
||||
|
||||
FURTHER WARNING
|
||||
If playing about with this, the 301 redirects that it creates will be cached INDEFINITELY by any browser you
|
||||
used to test it, e.g. /css/main.css with be permanetly redirected to /css/main2.css/ with dreadful
|
||||
consequences, similarly for any images visited. You have to go into your browser settings and delete all cached
|
||||
files to recover from this.
|
||||
"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Called for every url so return as quickly as possible
|
||||
Append a slash if SMART_APPEND_SLASH is set, the resulting URL resolves and it doesn't without the /
|
||||
Append a slash if TROGGLE_APPEND_SLASH is set, the resulting URL resolves and it doesn't without the /
|
||||
"""
|
||||
if not settings.SMART_APPEND_SLASH:
|
||||
if not settings.TROGGLE_APPEND_SLASH:
|
||||
return None
|
||||
|
||||
if request.path.endswith("/"):
|
||||
@@ -33,16 +56,31 @@ class SmartAppendSlashMiddleware(object):
|
||||
|
||||
if request.path.endswith("_edit"):
|
||||
return None
|
||||
|
||||
if request.path.startswith("/"):
|
||||
relative_path = request.path[1:]
|
||||
else:
|
||||
relative_path = request.path
|
||||
|
||||
for root in [settings.MEDIA_ROOT, settings.JSLIB_ROOT, settings.EXPOFILES, settings.SCANS_ROOT, settings.PHOTOS_ROOT]:
|
||||
full_path = root / relative_path
|
||||
print(f"+++++ MIDDLEWARE checking {root} / {relative_path} ")
|
||||
if full_path.is_file():
|
||||
print(f"+++++ MIDDLEWARE It IS a {root} file {full_path=} so use it as-is.")
|
||||
return None
|
||||
else:
|
||||
print(f"+++++ MIDDLEWARE NOT a {root}file {full_path=}")
|
||||
|
||||
host = http.HttpRequest.get_host(request)
|
||||
old_url = [host, request.path]
|
||||
if _resolves(old_url[1]):
|
||||
return None
|
||||
# if _resolves(old_url[1]):
|
||||
# return None
|
||||
|
||||
# So: it does not resolve according to our criteria, i.e. _edit doesn't count
|
||||
# So: it does not resolve according to our criteria, i.e. _edit doesn't count, and URL resolves doesn't count because of the catch all
|
||||
new_url = old_url[:]
|
||||
new_url[1] = new_url[1] + "/"
|
||||
if not _resolves(new_url[1]):
|
||||
print(f"+++++ MIDDLEWARE add SLASH and resolves {old_url=} => {new_url=}")
|
||||
return None
|
||||
else:
|
||||
if settings.DEBUG and request.method == "POST":
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user