2021-04-17 20:45:38 +01:00
|
|
|
"""
|
|
|
|
Modified for Expo April 2021.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import re
|
2023-02-24 17:38:06 +00:00
|
|
|
from http import HTTPStatus
|
2023-01-19 18:35:56 +00:00
|
|
|
|
2024-07-04 20:10:49 +01:00
|
|
|
from django.contrib.auth.models import User
|
2024-12-15 18:54:47 +00:00
|
|
|
from django.test import Client, TestCase
|
2023-01-19 18:35:56 +00:00
|
|
|
|
2024-08-21 13:48:49 +01:00
|
|
|
import settings
|
2023-09-10 13:55:01 +01:00
|
|
|
from troggle.core.models.caves import Cave
|
2024-12-15 18:54:47 +00:00
|
|
|
from troggle.core.models.troggle import Expedition, Person, PersonExpedition
|
2024-07-04 20:10:49 +01:00
|
|
|
from troggle.core.utils import current_expo
|
2024-12-15 18:54:47 +00:00
|
|
|
|
2024-07-04 20:10:49 +01:00
|
|
|
current_year = current_expo()
|
|
|
|
|
|
|
|
|
|
|
|
def create_user(name=None, last_name="Caver", is_superuser=False):
|
|
|
|
u = User()
|
|
|
|
u.username = name
|
|
|
|
u.email = f"philip.sargent+{name}@gmail.com"
|
|
|
|
u.first_name, u.last_name = name, last_name
|
|
|
|
u.set_password("secretword") # all test users have same password
|
|
|
|
u.save()
|
|
|
|
return u
|
|
|
|
|
2024-08-21 13:48:49 +01:00
|
|
|
def create_cave(areacode="1623", kataster_number="000", official_name=""):
|
|
|
|
c = Cave(areacode=areacode, kataster_number=kataster_number, official_name=official_name)
|
|
|
|
c.save()
|
|
|
|
return c
|
|
|
|
|
2023-04-06 00:51:04 +01:00
|
|
|
# import troggle.settings as settings
|
|
|
|
# FIXTURE_DIRS = settings.PYTHON_PATH / "core" /"fixtures"
|
2021-04-17 20:45:38 +01:00
|
|
|
|
2021-04-18 01:58:24 +01:00
|
|
|
class FixtureTests(TestCase):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""These just hit the database.
|
2021-04-18 01:58:24 +01:00
|
|
|
They do not exercise the GET and url functions
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
|
|
|
|
2024-07-04 20:10:49 +01:00
|
|
|
fixtures = ["expo_caves", "expo_exped"]
|
2023-01-30 19:04:36 +00:00
|
|
|
ph = r"and leads in 800m of tortuous going to"
|
2021-04-17 20:45:38 +01:00
|
|
|
|
|
|
|
def setUp(self):
|
2024-07-04 20:10:49 +01:00
|
|
|
create_user(name="expo") # needed for current_year()
|
2021-04-18 01:58:24 +01:00
|
|
|
|
2021-04-17 20:45:38 +01:00
|
|
|
def tearDown(self):
|
2024-07-04 20:10:49 +01:00
|
|
|
User.objects.all().delete()
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2024-07-04 20:10:49 +01:00
|
|
|
def test_fix_person_loaded_byname(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
p = Person.objects.get(fullname="Michael Sargent")
|
2021-04-18 01:58:24 +01:00
|
|
|
self.assertEqual(str(p.first_name), "Michael")
|
2021-04-17 20:45:38 +01:00
|
|
|
|
2024-07-04 20:10:49 +01:00
|
|
|
def test_fix_personexped_loaded_bypk(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
pe = PersonExpedition.objects.get(pk="681")
|
|
|
|
self.assertEqual(str(pe.person.fullname), "Michael Sargent")
|
|
|
|
self.assertEqual(str(pe.expedition.year), "2019")
|
2021-04-18 01:58:24 +01:00
|
|
|
|
2024-07-04 20:10:49 +01:00
|
|
|
def test_fix_expedition_loaded(self):
|
|
|
|
e = Expedition.objects.get(pk="44")
|
|
|
|
self.assertEqual(str(e.year), "2019")
|
|
|
|
|
|
|
|
def test_page_person(self):
|
|
|
|
response = self.client.get("/person/michael-sargent")
|
|
|
|
content = response.content.decode()
|
|
|
|
# with open('testresponseperson.html','w') as tr:
|
|
|
|
# tr.writelines(content)
|
|
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
|
|
for ph in [r"Michael Sargent", r"has been on expo in the following years"]:
|
|
|
|
phmatch = re.search(ph, content)
|
|
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
|
|
|
|
|
|
|
|
def test_page_personexpedition(self):
|
|
|
|
# Not working despite all components present and correct
|
|
|
|
response = self.client.get("/personexpedition/michael-sargent/2019")
|
|
|
|
content = response.content.decode()
|
|
|
|
# with open('testresponse.html','w') as tr:
|
|
|
|
# tr.writelines(content)
|
|
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
|
|
for ph in [r"Michael Sargent", r"Table of all trips and surveys aligned by date"]:
|
|
|
|
phmatch = re.search(ph, content)
|
|
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
|
|
|
|
# Need to add a fixture so that this actually has a logbook entry and a trip/svx in it.
|
|
|
|
|
2021-04-18 01:58:24 +01:00
|
|
|
def test_fix_cave_loaded115(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
c = Cave.objects.get(kataster_number="115")
|
2021-04-17 23:59:11 +01:00
|
|
|
self.assertEqual(str(c.description_file), "1623/115.htm")
|
2023-01-30 19:04:36 +00:00
|
|
|
self.assertEqual(str(c.url), "1623/115.url") # intentional
|
2021-04-17 23:59:11 +01:00
|
|
|
self.assertEqual(str(c.filename), "1623-115.html")
|
2023-09-10 13:55:01 +01:00
|
|
|
self.assertEqual(str(c.areacode), "1623")
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2021-04-17 23:59:11 +01:00
|
|
|
ph = self.ph
|
2023-01-30 19:04:36 +00:00
|
|
|
phmatch = re.search(ph, c.underground_description)
|
|
|
|
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
|
2021-04-17 23:59:11 +01:00
|
|
|
|
2021-04-18 01:58:24 +01:00
|
|
|
def test_fix_cave_loaded284(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
c = Cave.objects.get(kataster_number="284")
|
2021-04-18 01:58:24 +01:00
|
|
|
self.assertEqual(str(c.description_file), "")
|
|
|
|
self.assertEqual(str(c.url), "1623/284/284.html")
|
|
|
|
self.assertEqual(str(c.filename), "1623-284.html")
|
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
ph = r"at a depth of 72m, there are large round blocks"
|
|
|
|
phmatch = re.search(ph, c.notes)
|
|
|
|
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
|
2021-04-18 01:58:24 +01:00
|
|
|
|
2021-04-30 00:24:36 +01:00
|
|
|
|
|
|
|
|
2021-04-18 01:58:24 +01:00
|
|
|
class FixturePageTests(TestCase):
|
2024-08-21 13:48:49 +01:00
|
|
|
"""The fixtures have a password hash which is compatible with plain-text password 'secretword'
|
|
|
|
The hash CHANGES whenever Django upgrades the encryption key length. Better to create the test uses
|
|
|
|
algorithmically and not via a fixture.
|
2023-01-30 19:04:36 +00:00
|
|
|
"""
|
2024-07-04 20:10:49 +01:00
|
|
|
fixtures = ["expo_caves", "expo_exped"]
|
2023-01-30 19:04:36 +00:00
|
|
|
ph = r"and leads in 800m of tortuous going to"
|
2021-04-18 01:58:24 +01:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpTestData(cls):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def setUp(self):
|
2024-08-21 13:48:49 +01:00
|
|
|
for kataster_number in settings.NOTABLECAVES1623:
|
|
|
|
create_cave(areacode="1623", kataster_number=kataster_number)
|
|
|
|
for kataster_number in settings.NOTABLECAVES1626:
|
|
|
|
create_cave(areacode="1626", kataster_number=kataster_number)
|
|
|
|
|
2024-07-04 20:10:49 +01:00
|
|
|
create_user(name="expo")
|
|
|
|
create_user(name="expotest")
|
|
|
|
create_user(name="expotestadmin", is_superuser = True)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
self.user = User.objects.get(username="expotest")
|
|
|
|
|
|
|
|
# Every test needs a client.
|
2021-04-18 01:58:24 +01:00
|
|
|
self.client = Client()
|
|
|
|
|
|
|
|
def tearDown(self):
|
2024-07-04 20:10:49 +01:00
|
|
|
User.objects.all().delete()
|
2024-08-21 13:48:49 +01:00
|
|
|
Cave.objects.all().delete()
|
2021-04-18 01:58:24 +01:00
|
|
|
|
|
|
|
def test_fix_expedition(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
response = self.client.get("/expedition/2019")
|
2023-02-24 17:38:06 +00:00
|
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
2021-04-18 01:58:24 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
ph = r"Michael Sargent"
|
|
|
|
|
|
|
|
content = response.content.decode()
|
|
|
|
phmatch = re.search(ph, content)
|
|
|
|
# with open('exped-op.html', 'w') as f:
|
|
|
|
# f.write(content)
|
|
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
2021-04-18 01:58:24 +01:00
|
|
|
|
|
|
|
def test_fix_personexped(self):
|
2023-10-05 13:33:01 +01:00
|
|
|
response = self.client.get("/personexpedition/michael-sargent/2019")
|
2023-02-24 17:38:06 +00:00
|
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
ph = r"Table of all trips and surveys aligned by date"
|
|
|
|
|
2021-04-18 01:58:24 +01:00
|
|
|
content = response.content.decode()
|
2023-01-30 19:04:36 +00:00
|
|
|
phmatch = re.search(ph, content)
|
|
|
|
# with open('persexped-op.html', 'w') as f:
|
|
|
|
# f.write(content)
|
|
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
2021-04-18 01:58:24 +01:00
|
|
|
|
|
|
|
def test_fix_person(self):
|
2023-10-05 13:33:01 +01:00
|
|
|
response = self.client.get("/person/michael-sargent")
|
2023-02-24 17:38:06 +00:00
|
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
ph = r"second-generation expo caver "
|
2021-04-18 01:58:24 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
content = response.content.decode()
|
|
|
|
phmatch = re.search(ph, content)
|
|
|
|
# with open('person-op.html', 'w') as f:
|
|
|
|
# f.write(content)
|
|
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
2021-04-18 01:58:24 +01:00
|
|
|
|
|
|
|
def test_fix_cave_url115(self):
|
2021-04-17 23:59:11 +01:00
|
|
|
ph = self.ph
|
2023-01-30 19:04:36 +00:00
|
|
|
response = self.client.get("/1623/115.url") # yes this is intentional, see the inserted data above & fixture
|
2023-02-24 17:38:06 +00:00
|
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2021-04-18 01:58:24 +01:00
|
|
|
content = response.content.decode()
|
2023-01-30 19:04:36 +00:00
|
|
|
phmatch = re.search(ph, content)
|
|
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
2021-04-18 01:58:24 +01:00
|
|
|
|
|
|
|
def test_fix_cave_url284(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
response = self.client.get("/1623/284/284.html")
|
2023-02-24 17:38:06 +00:00
|
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
|
|
|
ph = r"at a depth of 72m, there are large round blocks"
|
|
|
|
|
2021-04-17 23:59:11 +01:00
|
|
|
content = response.content.decode()
|
2023-01-30 19:04:36 +00:00
|
|
|
phmatch = re.search(ph, content)
|
2023-04-06 00:51:04 +01:00
|
|
|
# with open('cave-url284.html', 'w') as f:
|
2023-01-30 19:04:36 +00:00
|
|
|
# f.write(content)
|
|
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
2021-04-17 23:59:11 +01:00
|
|
|
|
2021-04-18 01:58:24 +01:00
|
|
|
def test_fix_cave_bare_url115(self):
|
2023-01-30 19:04:36 +00:00
|
|
|
"""Expect to get Page Not Found and status 404"""
|
2021-04-18 01:58:24 +01:00
|
|
|
ph = self.ph
|
2023-01-30 19:04:36 +00:00
|
|
|
ph = "Probably a mistake."
|
2023-11-22 18:58:19 +00:00
|
|
|
response = self.client.get("/1623/115/115")
|
|
|
|
|
|
|
|
# content = response.content.decode()
|
|
|
|
# with open('_test_bare_url115.html', 'w') as f:
|
|
|
|
# f.write(content)
|
|
|
|
|
2023-02-24 17:38:06 +00:00
|
|
|
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
2021-04-18 01:58:24 +01:00
|
|
|
|
2023-01-30 19:04:36 +00:00
|
|
|
content = response.content.decode()
|
|
|
|
phmatch = re.search(ph, content)
|
|
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 200 & Page Not Found
|
2021-04-18 01:58:24 +01:00
|
|
|
|
|
|
|
def test_fix_cave_slug115(self):
|
2023-11-23 11:19:05 +00:00
|
|
|
"""Expect to get Page Not Found and status 404
|
|
|
|
UPDATE THIS BACK to 1623-115 when the data is fixed so that we don't have the
|
|
|
|
internal redirections for cave ids"""
|
2021-04-18 01:58:24 +01:00
|
|
|
ph = self.ph
|
2023-01-30 19:04:36 +00:00
|
|
|
ph = "Probably a mistake."
|
2023-11-23 11:19:05 +00:00
|
|
|
# response = self.client.get("/1623-115")
|
|
|
|
response = self.client.get("/1234-123")
|
|
|
|
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
2023-01-30 19:04:36 +00:00
|
|
|
|
2021-04-18 01:58:24 +01:00
|
|
|
content = response.content.decode()
|
2023-01-30 19:04:36 +00:00
|
|
|
phmatch = re.search(ph, content)
|
2023-11-22 18:58:19 +00:00
|
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 302 & Page Not Found
|
2023-04-06 00:16:44 +01:00
|
|
|
|
2023-04-06 00:51:04 +01:00
|
|
|
def test_fix_caves284(self):
|
|
|
|
response = self.client.get("/caves")
|
2023-04-06 00:16:44 +01:00
|
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
|
|
content = response.content.decode()
|
2023-10-21 10:39:02 +01:00
|
|
|
ph = r"284 <em>Seetrichter</em>"
|
2023-04-06 00:16:44 +01:00
|
|
|
phmatch = re.search(ph, content)
|
2023-10-21 20:31:33 +01:00
|
|
|
# with open('_cave_fix_caves.html', 'w') as f:
|
|
|
|
# f.write(content)
|
2023-04-06 00:16:44 +01:00
|
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
|
2023-04-06 00:57:19 +01:00
|
|
|
# Although the Cave object exists, it looks like we get a bad slug error when trying to get a QM page.
|
2023-04-06 00:16:44 +01:00
|
|
|
|
2023-04-06 00:51:04 +01:00
|
|
|
# def test_fix_qms(self):
|
|
|
|
# response = self.client.get("/cave/qms/1623-284")
|
|
|
|
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
|
|
# content = response.content.decode()
|
|
|
|
# ph = r"Question marks for 284 - Seetrichter"
|
|
|
|
# phmatch = re.search(ph, content)
|
|
|
|
# with open('_cave-fixqms.html', 'w') as f:
|
|
|
|
# f.write(content)
|
|
|
|
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
|
|
|
|
# def test_fix_openqms(self):
|
|
|
|
# response = self.client.get("/cave/openqms/1623-284")
|
|
|
|
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
|
|
# content = response.content.decode()
|
|
|
|
# ph = r"Open Leads for 284 - Seetrichter"
|
|
|
|
# phmatch = re.search(ph, content)
|
|
|
|
# with open('_cave-fixopenqms.html', 'w') as f:
|
|
|
|
# f.write(content)
|
|
|
|
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
|
2023-04-06 00:16:44 +01:00
|
|
|
|