mirror of
https://expo.survex.com/repositories/troggle/.git
synced 2024-11-28 10:02:10 +00:00
253 lines
9.7 KiB
Python
253 lines
9.7 KiB
Python
"""
|
|
Modified for Expo April 2021.
|
|
"""
|
|
|
|
import re
|
|
from http import HTTPStatus
|
|
|
|
from django.test import Client, TestCase
|
|
from django.contrib.auth.models import User
|
|
|
|
from troggle.core.models.caves import Cave
|
|
from troggle.core.models.troggle import Person, PersonExpedition, Expedition
|
|
from troggle.core.utils import current_expo
|
|
|
|
current_year = current_expo()
|
|
|
|
|
|
def create_user(name=None, last_name="Caver", is_superuser=False):
|
|
u = User()
|
|
u.username = name
|
|
u.email = f"philip.sargent+{name}@gmail.com"
|
|
u.first_name, u.last_name = name, last_name
|
|
u.set_password("secretword") # all test users have same password
|
|
u.save()
|
|
return u
|
|
|
|
# import troggle.settings as settings
|
|
# FIXTURE_DIRS = settings.PYTHON_PATH / "core" /"fixtures"
|
|
|
|
class FixtureTests(TestCase):
|
|
"""These just hit the database.
|
|
They do not exercise the GET and url functions
|
|
"""
|
|
|
|
fixtures = ["expo_caves", "expo_exped"]
|
|
ph = r"and leads in 800m of tortuous going to"
|
|
|
|
def setUp(self):
|
|
create_user(name="expo") # needed for current_year()
|
|
|
|
def tearDown(self):
|
|
User.objects.all().delete()
|
|
|
|
def test_fix_person_loaded_byname(self):
|
|
p = Person.objects.get(fullname="Michael Sargent")
|
|
self.assertEqual(str(p.first_name), "Michael")
|
|
|
|
def test_fix_personexped_loaded_bypk(self):
|
|
pe = PersonExpedition.objects.get(pk="681")
|
|
self.assertEqual(str(pe.person.fullname), "Michael Sargent")
|
|
self.assertEqual(str(pe.expedition.year), "2019")
|
|
|
|
def test_fix_expedition_loaded(self):
|
|
e = Expedition.objects.get(pk="44")
|
|
self.assertEqual(str(e.year), "2019")
|
|
|
|
def test_page_person(self):
|
|
response = self.client.get("/person/michael-sargent")
|
|
content = response.content.decode()
|
|
# with open('testresponseperson.html','w') as tr:
|
|
# tr.writelines(content)
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
for ph in [r"Michael Sargent", r"has been on expo in the following years"]:
|
|
phmatch = re.search(ph, content)
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
|
|
def test_page_personexpedition(self):
|
|
# Not working despite all components present and correct
|
|
response = self.client.get("/personexpedition/michael-sargent/2019")
|
|
content = response.content.decode()
|
|
# with open('testresponse.html','w') as tr:
|
|
# tr.writelines(content)
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
for ph in [r"Michael Sargent", r"Table of all trips and surveys aligned by date"]:
|
|
phmatch = re.search(ph, content)
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
# Need to add a fixture so that this actually has a logbook entry and a trip/svx in it.
|
|
|
|
def test_fix_cave_loaded115(self):
|
|
c = Cave.objects.get(kataster_number="115")
|
|
self.assertEqual(str(c.description_file), "1623/115.htm")
|
|
self.assertEqual(str(c.url), "1623/115.url") # intentional
|
|
self.assertEqual(str(c.filename), "1623-115.html")
|
|
self.assertEqual(str(c.areacode), "1623")
|
|
|
|
ph = self.ph
|
|
phmatch = re.search(ph, c.underground_description)
|
|
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
|
|
|
|
def test_fix_cave_loaded284(self):
|
|
c = Cave.objects.get(kataster_number="284")
|
|
self.assertEqual(str(c.description_file), "")
|
|
self.assertEqual(str(c.url), "1623/284/284.html")
|
|
self.assertEqual(str(c.filename), "1623-284.html")
|
|
|
|
ph = r"at a depth of 72m, there are large round blocks"
|
|
phmatch = re.search(ph, c.notes)
|
|
self.assertIsNotNone(phmatch, "In fixture-loaded cave, failed to find expected text: '" + ph + "'")
|
|
|
|
|
|
|
|
class FixturePageTests(TestCase):
|
|
"""Currently nothing that runs troggle works - all do 404. Must be something in a template rendering crash?
|
|
ordinary pages are OK, and expopages and expofiles are OK, even though they come through troggle. And the
|
|
fixtures are certainly loaded into the db as the other tests show.
|
|
"""
|
|
|
|
# The fixtures have a password hash which is compatible with plain-text password 'secretword'
|
|
# The hash CHANGES whenever Django upgrades the encryption key length. Better to create the test uses
|
|
# algorithmically and not via a fixture.
|
|
fixtures = ["expo_caves", "expo_exped"]
|
|
ph = r"and leads in 800m of tortuous going to"
|
|
|
|
@classmethod
|
|
def setUpTestData(cls):
|
|
pass
|
|
|
|
def setUp(self):
|
|
create_user(name="expo")
|
|
create_user(name="expotest")
|
|
create_user(name="expotestadmin", is_superuser = True)
|
|
|
|
self.user = User.objects.get(username="expotest")
|
|
|
|
# Every test needs a client.
|
|
self.client = Client()
|
|
|
|
def tearDown(self):
|
|
User.objects.all().delete()
|
|
|
|
def test_fix_expedition(self):
|
|
response = self.client.get("/expedition/2019")
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
|
|
ph = r"Michael Sargent"
|
|
|
|
content = response.content.decode()
|
|
phmatch = re.search(ph, content)
|
|
# with open('exped-op.html', 'w') as f:
|
|
# f.write(content)
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
def test_fix_personexped(self):
|
|
response = self.client.get("/personexpedition/michael-sargent/2019")
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
|
|
ph = r"Table of all trips and surveys aligned by date"
|
|
|
|
content = response.content.decode()
|
|
phmatch = re.search(ph, content)
|
|
# with open('persexped-op.html', 'w') as f:
|
|
# f.write(content)
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
def test_fix_person(self):
|
|
response = self.client.get("/person/michael-sargent")
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
|
|
ph = r"second-generation expo caver "
|
|
|
|
content = response.content.decode()
|
|
phmatch = re.search(ph, content)
|
|
# with open('person-op.html', 'w') as f:
|
|
# f.write(content)
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
def test_fix_cave_url115(self):
|
|
ph = self.ph
|
|
response = self.client.get("/1623/115.url") # yes this is intentional, see the inserted data above & fixture
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
|
|
content = response.content.decode()
|
|
phmatch = re.search(ph, content)
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
def test_fix_cave_url284(self):
|
|
response = self.client.get("/1623/284/284.html")
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
|
|
ph = r"at a depth of 72m, there are large round blocks"
|
|
|
|
content = response.content.decode()
|
|
phmatch = re.search(ph, content)
|
|
# with open('cave-url284.html', 'w') as f:
|
|
# f.write(content)
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
def test_fix_cave_bare_url115(self):
|
|
"""Expect to get Page Not Found and status 404"""
|
|
ph = self.ph
|
|
ph = "Probably a mistake."
|
|
response = self.client.get("/1623/115/115")
|
|
|
|
# content = response.content.decode()
|
|
# with open('_test_bare_url115.html', 'w') as f:
|
|
# f.write(content)
|
|
|
|
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
|
|
|
content = response.content.decode()
|
|
phmatch = re.search(ph, content)
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 200 & Page Not Found
|
|
|
|
def test_fix_cave_slug115(self):
|
|
"""Expect to get Page Not Found and status 404
|
|
UPDATE THIS BACK to 1623-115 when the data is fixed so that we don't have the
|
|
internal redirections for cave ids"""
|
|
ph = self.ph
|
|
ph = "Probably a mistake."
|
|
# response = self.client.get("/1623-115")
|
|
response = self.client.get("/1234-123")
|
|
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
|
|
|
|
content = response.content.decode()
|
|
phmatch = re.search(ph, content)
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'") # 302 & Page Not Found
|
|
|
|
def test_fix_caves284(self):
|
|
response = self.client.get("/caves")
|
|
self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
content = response.content.decode()
|
|
ph = r"284 <em>Seetrichter</em>"
|
|
phmatch = re.search(ph, content)
|
|
# with open('_cave_fix_caves.html', 'w') as f:
|
|
# f.write(content)
|
|
self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
# Although the Cave object exists, it looks like we get a bad slug error when trying to get a QM page.
|
|
|
|
# def test_fix_qms(self):
|
|
# response = self.client.get("/cave/qms/1623-284")
|
|
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
# content = response.content.decode()
|
|
# ph = r"Question marks for 284 - Seetrichter"
|
|
# phmatch = re.search(ph, content)
|
|
# with open('_cave-fixqms.html', 'w') as f:
|
|
# f.write(content)
|
|
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
# def test_fix_openqms(self):
|
|
# response = self.client.get("/cave/openqms/1623-284")
|
|
# self.assertEqual(response.status_code, HTTPStatus.OK)
|
|
# content = response.content.decode()
|
|
# ph = r"Open Leads for 284 - Seetrichter"
|
|
# phmatch = re.search(ph, content)
|
|
# with open('_cave-fixopenqms.html', 'w') as f:
|
|
# f.write(content)
|
|
# self.assertIsNotNone(phmatch, "Failed to find expected text: '" + ph + "'")
|
|
|
|
|