Coverage for app / backend / src / tests / conftest.py: 98%
151 statements
« prev ^ index » next coverage.py v7.13.2, created at 2026-02-03 06:18 +0000
« prev ^ index » next coverage.py v7.13.2, created at 2026-02-03 06:18 +0000
1import os
2import re
3from collections.abc import Generator
4from tempfile import TemporaryDirectory
5from unittest.mock import patch
7import pytest
8from sqlalchemy import Connection, Engine
9from sqlalchemy.sql import text
11# Set up environment variables before any couchers imports (they trigger config loading)
12prometheus_multiproc_dir = TemporaryDirectory()
13os.environ["PROMETHEUS_MULTIPROC_DIR"] = prometheus_multiproc_dir.name
15# Default for running with a database from docker-compose.test.yml.
16if "DATABASE_CONNECTION_STRING" not in os.environ: # pragma: no cover
17 os.environ["DATABASE_CONNECTION_STRING"] = (
18 "postgresql://postgres:06b3890acd2c235c41be0bbfe22f1b386a04bf02eedf8c977486355616be2aa1@localhost:6544/testdb"
19 )
21from couchers.config import config # noqa: E402
22from couchers.models import Base # noqa: E402
23from tests.fixtures.db import ( # noqa: E402
24 autocommit_engine,
25 create_schema_from_models,
26 generate_user,
27 populate_testing_resources,
28)
29from tests.fixtures.misc import Moderator, PushCollector # noqa: E402
31# Register the "pytest-split" plugin (vendored for pytest 9+ compatibility)
32pytest_plugins = ["tests.pytest_split.plugin"]
35@pytest.fixture(scope="session")
36def postgres_engine() -> Generator[Engine]:
37 """
38 SQLAlchemy engine connected to "postgres" database.
39 """
40 dsn = config["DATABASE_CONNECTION_STRING"]
41 if not dsn.endswith("/testdb"): 41 ↛ 42line 41 didn't jump to line 42 because the condition on line 41 was never true
42 raise RuntimeError(f"DATABASE_CONNECTION_STRING must point to /testdb, but was {dsn}")
44 postgres_dsn = re.sub(r"/testdb$", "/postgres", dsn)
46 with autocommit_engine(postgres_dsn) as engine:
47 yield engine
50@pytest.fixture(scope="session")
51def postgres_conn(postgres_engine: Engine) -> Generator[Connection]:
52 """
53 Acquiring a connection takes time, so we cache it.
54 """
55 with postgres_engine.connect() as conn:
56 yield conn
59@pytest.fixture(scope="session")
60def testdb_engine() -> Generator[Engine]:
61 """
62 SQLAlchemy engine connected to "testdb" database.
63 """
64 dsn = config["DATABASE_CONNECTION_STRING"]
65 with autocommit_engine(dsn) as engine:
66 yield engine
69@pytest.fixture(scope="session")
70def testdb_conn(testdb_engine: Engine) -> Generator[Connection]:
71 """
72 Connection to testdb for truncating tables between tests.
73 """
74 with testdb_engine.connect() as conn:
75 yield conn
78# Static tables that should not be truncated between tests
79STATIC_TABLES = frozenset({"languages", "timezone_areas", "regions"})
82@pytest.fixture(scope="session")
83def setup_testdb(postgres_conn: Connection, testdb_engine: Engine) -> None:
84 """
85 Creates the test database with all the extensions, tables,
86 and static data (languages, regions, timezones). This is done only once
87 per session. Between tests, we truncate all non-static tables.
88 """
89 # running in non-UTC catches some timezone errors
90 os.environ["TZ"] = "America/New_York"
92 postgres_conn.execute(text("DROP DATABASE IF EXISTS testdb WITH (FORCE)"))
93 postgres_conn.execute(text("CREATE DATABASE testdb"))
95 with testdb_engine.connect() as conn:
96 conn.execute(
97 text(
98 "CREATE SCHEMA logging;"
99 "CREATE EXTENSION IF NOT EXISTS postgis;"
100 "CREATE EXTENSION IF NOT EXISTS pg_trgm;"
101 "CREATE EXTENSION IF NOT EXISTS btree_gist;"
102 )
103 )
105 create_schema_from_models(testdb_engine)
106 populate_testing_resources(conn)
109def _truncate_non_static_tables(conn: Connection) -> None:
110 """
111 Truncates all non-static tables.
112 Static tables (languages, timezone_areas, regions) are preserved.
113 """
114 tables_to_truncate = []
115 for name in Base.metadata.tables.keys():
116 # Skip static tables
117 if name in STATIC_TABLES:
118 continue
119 # Handle schema-qualified names (e.g., "logging.api_calls" -> logging."api_calls")
120 if "." in name:
121 schema, table = name.split(".", 1)
122 tables_to_truncate.append(f'{schema}."{table}"')
123 else:
124 tables_to_truncate.append(f'"{name}"')
125 if tables_to_truncate: 125 ↛ 130line 125 didn't jump to line 130 because the condition on line 125 was always true
126 conn.execute(text(f"TRUNCATE {', '.join(tables_to_truncate)} RESTART IDENTITY CASCADE"))
128 # Reset standalone sequences, not owned by any table column
129 # (RESTART IDENTITY only resets sequences owned by truncated columns)
130 conn.execute(text("ALTER SEQUENCE communities_seq RESTART WITH 1"))
131 conn.execute(text("ALTER SEQUENCE moderation_seq RESTART WITH 2000000"))
134@pytest.fixture
135def db(setup_testdb: None, testdb_conn: Connection) -> None:
136 """
137 Truncates all non-static tables before each test.
138 Static tables (languages, timezone_areas, regions) are preserved.
139 """
140 _truncate_non_static_tables(testdb_conn)
143@pytest.fixture(scope="class")
144def db_class(setup_testdb: None, testdb_conn: Connection) -> None:
145 """
146 The same as above, but with a different scope. Used in test_communities.py.
147 """
148 _truncate_non_static_tables(testdb_conn)
151@pytest.fixture(scope="class")
152def testconfig():
153 prevconfig = config.copy()
154 config.clear()
155 config.update(prevconfig)
157 config["IN_TEST"] = True
159 config["DEV"] = True
160 config["SECRET"] = bytes.fromhex("448697d3886aec65830a1ea1497cdf804981e0c260d2f812cf2787c4ed1a262b")
161 config["VERSION"] = "testing_version"
162 config["BASE_URL"] = "http://localhost:3000"
163 config["BACKEND_BASE_URL"] = "http://localhost:8888"
164 config["CONSOLE_BASE_URL"] = "http://localhost:8888"
165 config["COOKIE_DOMAIN"] = "localhost"
167 config["ENABLE_SMS"] = False
168 config["SMS_SENDER_ID"] = "invalid"
170 config["ENABLE_EMAIL"] = False
171 config["NOTIFICATION_EMAIL_SENDER"] = "Couchers.org"
172 config["NOTIFICATION_EMAIL_ADDRESS"] = "notify@couchers.org.invalid"
173 config["NOTIFICATION_PREFIX"] = "[TEST] "
174 config["REPORTS_EMAIL_RECIPIENT"] = "reports@couchers.org.invalid"
175 config["CONTRIBUTOR_FORM_EMAIL_RECIPIENT"] = "forms@couchers.org.invalid"
176 config["MODS_EMAIL_RECIPIENT"] = "mods@couchers.org.invalid"
178 config["ENABLE_DONATIONS"] = False
179 config["STRIPE_API_KEY"] = ""
180 config["STRIPE_WEBHOOK_SECRET"] = ""
181 config["STRIPE_RECURRING_PRODUCT_ID"] = ""
183 config["ENABLE_STRONG_VERIFICATION"] = False
184 config["IRIS_ID_PUBKEY"] = ""
185 config["IRIS_ID_SECRET"] = ""
186 # corresponds to private key e6c2fbf3756b387bc09a458a7b85935718ef3eb1c2777ef41d335c9f6c0ab272
187 config["VERIFICATION_DATA_PUBLIC_KEY"] = bytes.fromhex(
188 "dd740a2b2a35bf05041a28257ea439b30f76f056f3698000b71e6470cd82275f"
189 )
191 config["ENABLE_POSTAL_VERIFICATION"] = False
193 config["SMTP_HOST"] = "localhost"
194 config["SMTP_PORT"] = 587
195 config["SMTP_USERNAME"] = "username"
196 config["SMTP_PASSWORD"] = "password"
198 config["ENABLE_MEDIA"] = True
199 config["MEDIA_SERVER_SECRET_KEY"] = bytes.fromhex(
200 "91e29bbacc74fa7e23c5d5f34cca5015cb896e338a620003de94a502a461f4bc"
201 )
202 config["MEDIA_SERVER_BEARER_TOKEN"] = "c02d383897d3b82774ced09c9e17802164c37e7e105d8927553697bf4550e91e"
203 config["MEDIA_SERVER_BASE_URL"] = "http://localhost:5001"
204 config["MEDIA_SERVER_UPLOAD_BASE_URL"] = "http://localhost:5001"
206 config["BUG_TOOL_ENABLED"] = False
207 config["BUG_TOOL_GITHUB_REPO"] = "org/repo"
208 config["BUG_TOOL_GITHUB_USERNAME"] = "user"
209 config["BUG_TOOL_GITHUB_TOKEN"] = "token"
211 config["LISTMONK_ENABLED"] = False
212 config["LISTMONK_BASE_URL"] = "https://localhost"
213 config["LISTMONK_API_USERNAME"] = "..."
214 config["LISTMONK_API_KEY"] = "..."
215 config["LISTMONK_LIST_ID"] = 3
217 config["PUSH_NOTIFICATIONS_ENABLED"] = True
218 config["PUSH_NOTIFICATIONS_VAPID_PRIVATE_KEY"] = "uI1DCR4G1AdlmMlPfRLemMxrz9f3h4kvjfnI8K9WsVI"
219 config["PUSH_NOTIFICATIONS_VAPID_SUBJECT"] = "mailto:testing@couchers.org.invalid"
221 config["ACTIVENESS_PROBES_ENABLED"] = True
223 config["RECAPTHCA_ENABLED"] = False
224 config["RECAPTHCA_PROJECT_ID"] = "..."
225 config["RECAPTHCA_API_KEY"] = "..."
226 config["RECAPTHCA_SITE_KEY"] = "..."
228 config["EXPERIMENTATION_ENABLED"] = False
229 config["EXPERIMENTATION_PASS_ALL_GATES"] = True
230 config["STATSIG_SERVER_SECRET_KEY"] = ""
231 config["STATSIG_ENVIRONMENT"] = "testing"
233 # Moderation auto-approval deadline - 0 disables, set in tests that need it
234 config["MODERATION_AUTO_APPROVE_DEADLINE_SECONDS"] = 0
235 # Bot user ID for automated moderation - will be set to a real user in tests that need it
236 config["MODERATION_BOT_USER_ID"] = 1
238 # Dev APIs disabled by default in tests
239 config["ENABLE_DEV_APIS"] = False
241 config["ENABLE_NOTIFICATION_TRANSLATIONS"] = False
243 yield None
245 config.clear()
246 config.update(prevconfig)
249@pytest.fixture
250def fast_passwords():
251 # password hashing, by design, takes a lot of time, which slows down the tests.
252 # here we jump through some hoops to make this fast by removing the hashing step
254 def fast_hash(password: bytes) -> bytes:
255 return b"fake hash:" + password
257 def fast_verify(hashed: bytes, password: bytes) -> bool:
258 return hashed == fast_hash(password)
260 with patch("couchers.crypto.nacl.pwhash.verify", fast_verify):
261 with patch("couchers.crypto.nacl.pwhash.str", fast_hash):
262 yield
265@pytest.fixture
266def push_collector():
267 """
268 See test_SendTestPushNotification for an example on how to use this fixture
269 """
270 collector = PushCollector()
272 with patch("couchers.notifications.push._push_to_user", collector.push_to_user):
273 yield collector
276@pytest.fixture
277def moderator():
278 """
279 Creates a moderator (superuser) and provides methods to exercise the moderation API.
281 Usage:
282 def test_example(db, moderator):
283 # ... create a host request ...
284 moderator.approve_host_request(host_request_id)
285 """
286 user, token = generate_user(is_superuser=True)
287 yield Moderator(user, token)