Coverage for src/couchers/servicers/threads.py: 91%
116 statements
« prev ^ index » next coverage.py v7.6.10, created at 2025-06-01 15:07 +0000
« prev ^ index » next coverage.py v7.6.10, created at 2025-06-01 15:07 +0000
1import logging
3import grpc
4import sqlalchemy.exc
5from sqlalchemy.sql import func, select
7from couchers import errors
8from couchers.db import session_scope
9from couchers.jobs.enqueue import queue_job
10from couchers.models import Comment, Discussion, Event, EventOccurrence, Reply, Thread, User
11from couchers.notifications.notify import notify
12from couchers.servicers.api import user_model_to_pb
13from couchers.servicers.blocking import are_blocked
14from couchers.sql import couchers_select as select
15from couchers.utils import Timestamp_from_datetime, make_user_context
16from proto import notification_data_pb2, threads_pb2, threads_pb2_grpc
17from proto.internal import jobs_pb2
19logger = logging.getLogger(__name__)
21# Since the API exposes a single ID space regardless of nesting level,
22# we construct the API id by appending the nesting level to the
23# database ID.
26def pack_thread_id(database_id: int, depth: int) -> int:
27 return database_id * 10 + depth
30def unpack_thread_id(thread_id: int) -> (int, int):
31 """Returns (database_id, depth) tuple."""
32 return divmod(thread_id, 10)
35def total_num_responses(session, database_id):
36 """Return the total number of comments and replies to the thread with
37 database id database_id.
38 """
39 return (
40 session.execute(select(func.count()).select_from(Comment).where(Comment.thread_id == database_id)).scalar_one()
41 + session.execute(
42 select(func.count())
43 .select_from(Reply)
44 .join(Comment, Comment.id == Reply.comment_id)
45 .where(Comment.thread_id == database_id)
46 ).scalar_one()
47 )
50def thread_to_pb(session, database_id):
51 return threads_pb2.Thread(
52 thread_id=pack_thread_id(database_id, 0),
53 num_responses=total_num_responses(session, database_id),
54 )
57def generate_reply_notifications(payload: jobs_pb2.GenerateReplyNotificationsPayload):
58 from couchers.servicers.discussions import discussion_to_pb
59 from couchers.servicers.events import event_to_pb
61 with session_scope() as session:
62 database_id, depth = unpack_thread_id(payload.thread_id)
63 if depth == 1:
64 # this is a top-level Comment on a Thread attached to event, discussion, etc
65 comment = session.execute(select(Comment).where(Comment.id == database_id)).scalar_one()
66 thread = session.execute(select(Thread).where(Thread.id == comment.thread_id)).scalar_one()
67 author_user = session.execute(select(User).where(User.id == comment.author_user_id)).scalar_one()
68 # reply object for notif
69 reply = threads_pb2.Reply(
70 thread_id=payload.thread_id,
71 content=comment.content,
72 author_user_id=comment.author_user_id,
73 created_time=Timestamp_from_datetime(comment.created),
74 num_replies=0,
75 )
76 # figure out if the thread is related to an event or discussion
77 event = session.execute(select(Event).where(Event.thread_id == thread.id)).scalar_one_or_none()
78 discussion = session.execute(
79 select(Discussion).where(Discussion.thread_id == thread.id)
80 ).scalar_one_or_none()
81 if event:
82 # thread is an event thread
83 occurrence = event.occurrences.order_by(EventOccurrence.id.desc()).limit(1).one()
84 subscribed_user_ids = [user.id for user in event.subscribers]
85 attending_user_ids = [user.user_id for user in occurrence.attendances]
87 for user_id in set(subscribed_user_ids + attending_user_ids):
88 if are_blocked(session, user_id, comment.author_user_id):
89 continue
90 if user_id == comment.author_user_id:
91 continue
92 context = make_user_context(user_id=user_id)
93 notify(
94 session,
95 user_id=user_id,
96 topic_action="event:comment",
97 key=occurrence.id,
98 data=notification_data_pb2.EventComment(
99 reply=reply,
100 event=event_to_pb(session, occurrence, context),
101 author=user_model_to_pb(author_user, session, context),
102 ),
103 )
104 elif discussion:
105 # community discussion thread
106 cluster = discussion.owner_cluster
108 if not cluster.is_official_cluster:
109 raise NotImplementedError("Shouldn't have discussions under groups, only communities")
111 for user_id in [discussion.creator_user_id]:
112 if are_blocked(session, user_id, comment.author_user_id):
113 continue
114 if user_id == comment.author_user_id:
115 continue
117 context = make_user_context(user_id=user_id)
118 notify(
119 session,
120 user_id=user_id,
121 topic_action="discussion:comment",
122 key=discussion.id,
123 data=notification_data_pb2.DiscussionComment(
124 reply=reply,
125 discussion=discussion_to_pb(session, discussion, context),
126 author=user_model_to_pb(author_user, session, context),
127 ),
128 )
129 else:
130 raise NotImplementedError("I can only do event and discussion threads for now")
131 elif depth == 2:
132 # this is a second-level reply to a comment
133 reply = session.execute(select(Reply).where(Reply.id == database_id)).scalar_one()
134 # the comment we're replying to
135 parent_comment = session.execute(select(Comment).where(Comment.id == reply.comment_id)).scalar_one()
137 author_user = session.execute(select(User).where(User.id == reply.author_user_id)).scalar_one()
139 if are_blocked(session, parent_comment.author_user_id, reply.author_user_id):
140 return
142 if parent_comment.author_user_id == reply.author_user_id:
143 return
145 context = make_user_context(user_id=parent_comment.author_user_id)
146 reply = threads_pb2.Reply(
147 thread_id=payload.thread_id,
148 content=reply.content,
149 author_user_id=reply.author_user_id,
150 created_time=Timestamp_from_datetime(reply.created),
151 num_replies=0,
152 )
154 event = session.execute(
155 select(Event).where(Event.thread_id == parent_comment.thread_id)
156 ).scalar_one_or_none()
157 discussion = session.execute(
158 select(Discussion).where(Discussion.thread_id == parent_comment.thread_id)
159 ).scalar_one_or_none()
160 if event:
161 # thread is an event thread
162 occurrence = event.occurrences.order_by(EventOccurrence.id.desc()).limit(1).one()
163 notify(
164 session,
165 user_id=parent_comment.author_user_id,
166 topic_action="thread:reply",
167 key=occurrence.id,
168 data=notification_data_pb2.ThreadReply(
169 reply=reply,
170 event=event_to_pb(session, occurrence, context),
171 author=user_model_to_pb(author_user, session, context),
172 ),
173 )
174 elif discussion:
175 # community discussion thread
176 notify(
177 session,
178 user_id=parent_comment.author_user_id,
179 topic_action="thread:reply",
180 key=discussion.id,
181 data=notification_data_pb2.ThreadReply(
182 reply=reply,
183 discussion=discussion_to_pb(session, discussion, context),
184 author=user_model_to_pb(author_user, session, context),
185 ),
186 )
187 else:
188 raise NotImplementedError("I can only do event and discussion threads for now")
189 else:
190 raise Exception("Unknown depth")
193class Threads(threads_pb2_grpc.ThreadsServicer):
194 def GetThread(self, request, context, session):
195 database_id, depth = unpack_thread_id(request.thread_id)
196 page_size = request.page_size if 0 < request.page_size < 100000 else 1000
197 page_start = unpack_thread_id(int(request.page_token))[0] if request.page_token else 2**50
199 if depth == 0:
200 if not session.execute(select(Thread).where(Thread.id == database_id)).scalar_one_or_none():
201 context.abort(grpc.StatusCode.NOT_FOUND, errors.THREAD_NOT_FOUND)
203 res = session.execute(
204 select(Comment, func.count(Reply.id))
205 .outerjoin(Reply, Reply.comment_id == Comment.id)
206 .where(Comment.thread_id == database_id)
207 .where(Comment.id < page_start)
208 .group_by(Comment.id)
209 .order_by(Comment.created.desc())
210 .limit(page_size + 1)
211 ).all()
212 replies = [
213 threads_pb2.Reply(
214 thread_id=pack_thread_id(r.id, 1),
215 content=r.content,
216 author_user_id=r.author_user_id,
217 created_time=Timestamp_from_datetime(r.created),
218 num_replies=n,
219 )
220 for r, n in res[:page_size]
221 ]
223 elif depth == 1:
224 if not session.execute(select(Comment).where(Comment.id == database_id)).scalar_one_or_none():
225 context.abort(grpc.StatusCode.NOT_FOUND, errors.THREAD_NOT_FOUND)
227 res = (
228 session.execute(
229 select(Reply)
230 .where(Reply.comment_id == database_id)
231 .where(Reply.id < page_start)
232 .order_by(Reply.created.desc())
233 .limit(page_size + 1)
234 )
235 .scalars()
236 .all()
237 )
238 replies = [
239 threads_pb2.Reply(
240 thread_id=pack_thread_id(r.id, 2),
241 content=r.content,
242 author_user_id=r.author_user_id,
243 created_time=Timestamp_from_datetime(r.created),
244 num_replies=0,
245 )
246 for r in res[:page_size]
247 ]
249 else:
250 context.abort(grpc.StatusCode.NOT_FOUND, errors.THREAD_NOT_FOUND)
252 if len(res) > page_size:
253 # There's more!
254 next_page_token = str(replies[-1].thread_id)
255 else:
256 next_page_token = ""
258 return threads_pb2.GetThreadRes(replies=replies, next_page_token=next_page_token)
260 def PostReply(self, request, context, session):
261 content = request.content.strip()
263 if content == "":
264 context.abort(grpc.StatusCode.INVALID_ARGUMENT, errors.INVALID_COMMENT)
266 database_id, depth = unpack_thread_id(request.thread_id)
267 if depth == 0:
268 object_to_add = Comment(thread_id=database_id, author_user_id=context.user_id, content=content)
269 elif depth == 1:
270 object_to_add = Reply(comment_id=database_id, author_user_id=context.user_id, content=content)
271 else:
272 context.abort(grpc.StatusCode.NOT_FOUND, errors.THREAD_NOT_FOUND)
273 session.add(object_to_add)
274 try:
275 session.flush()
276 except sqlalchemy.exc.IntegrityError:
277 context.abort(grpc.StatusCode.NOT_FOUND, errors.THREAD_NOT_FOUND)
279 thread_id = pack_thread_id(object_to_add.id, depth + 1)
281 queue_job(
282 session,
283 job_type="generate_reply_notifications",
284 payload=jobs_pb2.GenerateReplyNotificationsPayload(
285 thread_id=thread_id,
286 ),
287 )
289 return threads_pb2.PostReplyRes(thread_id=thread_id)