Coverage for app / backend / src / tests / conftest.py: 98%
154 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-03-19 14:14 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-03-19 14:14 +0000
1import os
2import re
3from collections.abc import Generator
4from tempfile import TemporaryDirectory
5from unittest.mock import patch
7import pytest
8from sqlalchemy import Connection, Engine
9from sqlalchemy.sql import text
11# Set up environment variables before any couchers imports (they trigger config loading)
12prometheus_multiproc_dir = TemporaryDirectory()
13os.environ["PROMETHEUS_MULTIPROC_DIR"] = prometheus_multiproc_dir.name
15# Default for running with a database from docker-compose.test.yml.
16if "DATABASE_CONNECTION_STRING" not in os.environ: # pragma: no cover
17 os.environ["DATABASE_CONNECTION_STRING"] = (
18 "postgresql://postgres:06b3890acd2c235c41be0bbfe22f1b386a04bf02eedf8c977486355616be2aa1@localhost:6544/testdb"
19 )
21from couchers.config import config # noqa: E402
22from couchers.models import Base # noqa: E402
23from tests.fixtures.db import ( # noqa: E402
24 autocommit_engine,
25 create_schema_from_models,
26 generate_user,
27 populate_testing_resources,
28)
29from tests.fixtures.misc import Moderator, PushCollector # noqa: E402
32@pytest.fixture(scope="session")
33def postgres_engine() -> Generator[Engine]:
34 """
35 SQLAlchemy engine connected to "postgres" database.
36 """
37 dsn = config["DATABASE_CONNECTION_STRING"]
38 if not dsn.endswith("/testdb"): 38 ↛ 39line 38 didn't jump to line 39 because the condition on line 38 was never true
39 raise RuntimeError(f"DATABASE_CONNECTION_STRING must point to /testdb, but was {dsn}")
41 postgres_dsn = re.sub(r"/testdb$", "/postgres", dsn)
43 with autocommit_engine(postgres_dsn) as engine:
44 yield engine
47@pytest.fixture(scope="session")
48def postgres_conn(postgres_engine: Engine) -> Generator[Connection]:
49 """
50 Acquiring a connection takes time, so we cache it.
51 """
52 with postgres_engine.connect() as conn:
53 yield conn
56@pytest.fixture(scope="session")
57def testdb_engine() -> Generator[Engine]:
58 """
59 SQLAlchemy engine connected to "testdb" database.
60 """
61 dsn = config["DATABASE_CONNECTION_STRING"]
62 with autocommit_engine(dsn) as engine:
63 yield engine
66@pytest.fixture(scope="session")
67def testdb_conn(testdb_engine: Engine) -> Generator[Connection]:
68 """
69 Connection to testdb for truncating tables between tests.
70 """
71 with testdb_engine.connect() as conn:
72 yield conn
75# Static tables that should not be truncated between tests
76STATIC_TABLES = frozenset({"languages", "timezone_areas", "regions"})
79@pytest.fixture(scope="session")
80def setup_testdb(postgres_conn: Connection, testdb_engine: Engine) -> None:
81 """
82 Creates the test database with all the extensions, tables,
83 and static data (languages, regions, timezones). This is done only once
84 per session. Between tests, we truncate all non-static tables.
85 """
86 # running in non-UTC catches some timezone errors
87 os.environ["TZ"] = "America/New_York"
89 postgres_conn.execute(text("DROP DATABASE IF EXISTS testdb WITH (FORCE)"))
90 postgres_conn.execute(text("CREATE DATABASE testdb"))
92 with testdb_engine.connect() as conn:
93 conn.execute(
94 text(
95 "CREATE SCHEMA logging;"
96 "CREATE EXTENSION IF NOT EXISTS postgis;"
97 "CREATE EXTENSION IF NOT EXISTS pg_trgm;"
98 "CREATE EXTENSION IF NOT EXISTS btree_gist;"
99 )
100 )
102 create_schema_from_models(testdb_engine)
103 populate_testing_resources(conn)
106def _truncate_non_static_tables(conn: Connection) -> None:
107 """
108 Truncates all non-static tables.
109 Static tables (languages, timezone_areas, regions) are preserved.
110 """
111 tables_to_truncate = []
112 for name in Base.metadata.tables.keys():
113 # Skip static tables
114 if name in STATIC_TABLES:
115 continue
116 # Handle schema-qualified names (e.g., "logging.api_calls" -> logging."api_calls")
117 if "." in name:
118 schema, table = name.split(".", 1)
119 tables_to_truncate.append(f'{schema}."{table}"')
120 else:
121 tables_to_truncate.append(f'"{name}"')
122 if tables_to_truncate: 122 ↛ 127line 122 didn't jump to line 127 because the condition on line 122 was always true
123 conn.execute(text(f"TRUNCATE {', '.join(tables_to_truncate)} RESTART IDENTITY CASCADE"))
125 # Reset standalone sequences, not owned by any table column
126 # (RESTART IDENTITY only resets sequences owned by truncated columns)
127 conn.execute(text("ALTER SEQUENCE communities_seq RESTART WITH 1"))
128 conn.execute(text("ALTER SEQUENCE moderation_seq RESTART WITH 2000000"))
131@pytest.fixture
132def db(setup_testdb: None, testdb_conn: Connection) -> None:
133 """
134 Truncates all non-static tables before each test.
135 Static tables (languages, timezone_areas, regions) are preserved.
136 """
137 _truncate_non_static_tables(testdb_conn)
140@pytest.fixture(scope="class")
141def db_class(setup_testdb: None, testdb_conn: Connection) -> None:
142 """
143 The same as above, but with a different scope. Used in test_communities.py.
144 """
145 _truncate_non_static_tables(testdb_conn)
148@pytest.fixture(scope="class")
149def testconfig():
150 prevconfig = config.copy()
151 config.clear()
152 config.update(prevconfig)
154 config["IN_TEST"] = True
156 config["DEV"] = True
157 config["SECRET"] = bytes.fromhex("448697d3886aec65830a1ea1497cdf804981e0c260d2f812cf2787c4ed1a262b")
158 config["VERSION"] = "testing_version"
159 config["BASE_URL"] = "http://localhost:3000"
160 config["BACKEND_BASE_URL"] = "http://localhost:8888"
161 config["CONSOLE_BASE_URL"] = "http://localhost:8888"
162 config["COOKIE_DOMAIN"] = "localhost"
164 config["ENABLE_SMS"] = False
165 config["SMS_SENDER_ID"] = "invalid"
167 config["ENABLE_EMAIL"] = False
168 config["NOTIFICATION_EMAIL_SENDER"] = "Couchers.org"
169 config["NOTIFICATION_EMAIL_ADDRESS"] = "notify@couchers.org.invalid"
170 config["NOTIFICATION_PREFIX"] = "[TEST] "
171 config["REPORTS_EMAIL_RECIPIENT"] = "reports@couchers.org.invalid"
172 config["CONTRIBUTOR_FORM_EMAIL_RECIPIENT"] = "forms@couchers.org.invalid"
173 config["MODS_EMAIL_RECIPIENT"] = "mods@couchers.org.invalid"
175 config["ENABLE_DONATIONS"] = False
176 config["STRIPE_API_KEY"] = ""
177 config["STRIPE_WEBHOOK_SECRET"] = ""
178 config["STRIPE_RECURRING_PRODUCT_ID"] = ""
180 config["ENABLE_STRONG_VERIFICATION"] = False
181 config["IRIS_ID_PUBKEY"] = ""
182 config["IRIS_ID_SECRET"] = ""
183 # corresponds to private key e6c2fbf3756b387bc09a458a7b85935718ef3eb1c2777ef41d335c9f6c0ab272
184 config["VERIFICATION_DATA_PUBLIC_KEY"] = bytes.fromhex(
185 "dd740a2b2a35bf05041a28257ea439b30f76f056f3698000b71e6470cd82275f"
186 )
188 config["ENABLE_POSTAL_VERIFICATION"] = False
190 config["SMTP_HOST"] = "localhost"
191 config["SMTP_PORT"] = 587
192 config["SMTP_USERNAME"] = "username"
193 config["SMTP_PASSWORD"] = "password"
195 config["ENABLE_MEDIA"] = True
196 config["MEDIA_SERVER_SECRET_KEY"] = bytes.fromhex(
197 "91e29bbacc74fa7e23c5d5f34cca5015cb896e338a620003de94a502a461f4bc"
198 )
199 config["MEDIA_SERVER_BEARER_TOKEN"] = "c02d383897d3b82774ced09c9e17802164c37e7e105d8927553697bf4550e91e"
200 config["MEDIA_SERVER_BASE_URL"] = "http://localhost:5001"
201 config["MEDIA_SERVER_UPLOAD_BASE_URL"] = "http://localhost:5001"
203 config["BUG_TOOL_ENABLED"] = False
204 config["BUG_TOOL_GITHUB_REPO"] = "org/repo"
205 config["BUG_TOOL_GITHUB_USERNAME"] = "user"
206 config["BUG_TOOL_GITHUB_TOKEN"] = "token"
208 config["LISTMONK_ENABLED"] = False
209 config["LISTMONK_BASE_URL"] = "https://localhost"
210 config["LISTMONK_API_USERNAME"] = "..."
211 config["LISTMONK_API_KEY"] = "..."
212 config["LISTMONK_LIST_ID"] = 3
214 config["PUSH_NOTIFICATIONS_ENABLED"] = True
215 config["PUSH_NOTIFICATIONS_VAPID_PRIVATE_KEY"] = "uI1DCR4G1AdlmMlPfRLemMxrz9f3h4kvjfnI8K9WsVI"
216 config["PUSH_NOTIFICATIONS_VAPID_SUBJECT"] = "mailto:testing@couchers.org.invalid"
218 config["ACTIVENESS_PROBES_ENABLED"] = True
220 config["RECAPTHCA_ENABLED"] = False
221 config["RECAPTHCA_PROJECT_ID"] = "..."
222 config["RECAPTHCA_API_KEY"] = "..."
223 config["RECAPTHCA_SITE_KEY"] = "..."
225 config["EXPERIMENTATION_ENABLED"] = False
226 config["EXPERIMENTATION_PASS_ALL_GATES"] = True
227 config["STATSIG_SERVER_SECRET_KEY"] = ""
228 config["STATSIG_ENVIRONMENT"] = "testing"
230 # Moderation auto-approval deadline - 0 disables, set in tests that need it
231 config["MODERATION_AUTO_APPROVE_DEADLINE_SECONDS"] = 0
232 # Bot user ID for automated moderation - will be set to a real user in tests that need it
233 config["MODERATION_BOT_USER_ID"] = 1
235 # Dev APIs disabled by default in tests
236 config["ENABLE_DEV_APIS"] = False
238 # Slack notifications disabled by default in tests
239 config["SLACK_ENABLED"] = False
240 config["SLACK_BOT_TOKEN"] = ""
241 config["SLACK_DONATIONS_CHANNEL"] = ""
242 config["SLACK_MERCH_CHANNEL"] = ""
244 config["ENABLE_NOTIFICATION_TRANSLATIONS"] = False
246 yield None
248 config.clear()
249 config.update(prevconfig)
252@pytest.fixture
253def fast_passwords():
254 # password hashing, by design, takes a lot of time, which slows down the tests.
255 # here we jump through some hoops to make this fast by removing the hashing step
257 def fast_hash(password: bytes) -> bytes:
258 return b"fake hash:" + password
260 def fast_verify(hashed: bytes, password: bytes) -> bool:
261 return hashed == fast_hash(password)
263 with patch("couchers.crypto.nacl.pwhash.verify", fast_verify):
264 with patch("couchers.crypto.nacl.pwhash.str", fast_hash):
265 yield
268@pytest.fixture
269def push_collector():
270 """
271 See test_SendTestPushNotification for an example on how to use this fixture
272 """
273 collector = PushCollector()
275 with patch("couchers.notifications.push._push_to_user", collector.push_to_user):
276 yield collector
279@pytest.fixture
280def moderator():
281 """
282 Creates a moderator (superuser) and provides methods to exercise the moderation API.
284 Usage:
285 def test_example(db, moderator):
286 # ... create a host request ...
287 moderator.approve_host_request(host_request_id)
288 """
289 user, token = generate_user(is_superuser=True)
290 yield Moderator(user, token)