Coverage for src/couchers/servicers/threads.py: 82%
117 statements
« prev ^ index » next coverage.py v7.6.10, created at 2025-03-11 15:27 +0000
« prev ^ index » next coverage.py v7.6.10, created at 2025-03-11 15:27 +0000
1import logging
2from types import SimpleNamespace
4import grpc
5import sqlalchemy.exc
6from sqlalchemy.sql import func, select
8from couchers import errors
9from couchers.db import session_scope
10from couchers.jobs.enqueue import queue_job
11from couchers.models import Comment, Discussion, Event, EventOccurrence, Reply, Thread, User
12from couchers.notifications.notify import notify
13from couchers.servicers.api import user_model_to_pb
14from couchers.servicers.blocking import are_blocked
15from couchers.sql import couchers_select as select
16from couchers.utils import Timestamp_from_datetime
17from proto import notification_data_pb2, threads_pb2, threads_pb2_grpc
18from proto.internal import jobs_pb2
20logger = logging.getLogger(__name__)
22# Since the API exposes a single ID space regardless of nesting level,
23# we construct the API id by appending the nesting level to the
24# database ID.
27def pack_thread_id(database_id: int, depth: int) -> int:
28 return database_id * 10 + depth
31def unpack_thread_id(thread_id: int) -> (int, int):
32 """Returns (database_id, depth) tuple."""
33 return divmod(thread_id, 10)
36def total_num_responses(session, database_id):
37 """Return the total number of comments and replies to the thread with
38 database id database_id.
39 """
40 return (
41 session.execute(select(func.count()).select_from(Comment).where(Comment.thread_id == database_id)).scalar_one()
42 + session.execute(
43 select(func.count())
44 .select_from(Reply)
45 .join(Comment, Comment.id == Reply.comment_id)
46 .where(Comment.thread_id == database_id)
47 ).scalar_one()
48 )
51def thread_to_pb(session, database_id):
52 return threads_pb2.Thread(
53 thread_id=pack_thread_id(database_id, 0),
54 num_responses=total_num_responses(session, database_id),
55 )
58def generate_reply_notifications(payload: jobs_pb2.GenerateReplyNotificationsPayload):
59 from couchers.servicers.discussions import discussion_to_pb
60 from couchers.servicers.events import event_to_pb
62 with session_scope() as session:
63 database_id, depth = unpack_thread_id(payload.thread_id)
64 if depth == 1:
65 # this is a top-level Comment on a Thread attached to event, discussion, etc
66 comment = session.execute(select(Comment).where(Comment.id == database_id)).scalar_one()
67 thread = session.execute(select(Thread).where(Thread.id == comment.thread_id)).scalar_one()
68 author_user = session.execute(select(User).where(User.id == comment.author_user_id)).scalar_one()
69 # reply object for notif
70 reply = threads_pb2.Reply(
71 thread_id=payload.thread_id,
72 content=comment.content,
73 author_user_id=comment.author_user_id,
74 created_time=Timestamp_from_datetime(comment.created),
75 num_replies=0,
76 )
77 # figure out if the thread is related to an event or discussion
78 event = session.execute(select(Event).where(Event.thread_id == thread.id)).scalar_one_or_none()
79 discussion = session.execute(
80 select(Discussion).where(Discussion.thread_id == thread.id)
81 ).scalar_one_or_none()
82 if event:
83 # thread is an event thread
84 occurrence = event.occurrences.order_by(EventOccurrence.id.desc()).first()
85 subscribed_user_ids = [user.id for user in event.subscribers]
86 attending_user_ids = [user.user_id for user in occurrence.attendances]
88 for user_id in set(subscribed_user_ids + attending_user_ids):
89 if are_blocked(session, user_id, comment.author_user_id):
90 continue
91 if user_id == comment.author_user_id:
92 continue
93 context = SimpleNamespace(user_id=user_id)
94 notify(
95 session,
96 user_id=user_id,
97 topic_action="event:comment",
98 key=occurrence.id,
99 data=notification_data_pb2.EventComment(
100 reply=reply,
101 event=event_to_pb(session, occurrence, context),
102 author=user_model_to_pb(author_user, session, context),
103 ),
104 )
105 elif discussion:
106 # community discussion thread
107 cluster = discussion.owner_cluster
109 if not cluster.is_official_cluster:
110 raise NotImplementedError("Shouldn't have discussions under groups, only communities")
112 for user_id in [discussion.creator_user_id]:
113 if are_blocked(session, user_id, comment.author_user_id):
114 continue
115 if user_id == comment.author_user_id:
116 continue
118 context = SimpleNamespace(user_id=user_id)
119 notify(
120 session,
121 user_id=user_id,
122 topic_action="discussion:comment",
123 key=discussion.id,
124 data=notification_data_pb2.DiscussionComment(
125 reply=reply,
126 discussion=discussion_to_pb(session, discussion, context),
127 author=user_model_to_pb(author_user, session, context),
128 ),
129 )
130 else:
131 raise NotImplementedError("I can only do event and discussion threads for now")
132 elif depth == 2:
133 # this is a second-level reply to a comment
134 reply = session.execute(select(Reply).where(Reply.id == database_id)).scalar_one()
135 # the comment we're replying to
136 parent_comment = session.execute(select(Comment).where(Comment.id == reply.comment_id)).scalar_one()
138 author_user = session.execute(select(User).where(User.id == reply.author_user_id)).scalar_one()
140 if are_blocked(session, parent_comment.author_user_id, reply.author_user_id):
141 return
143 if parent_comment.author_user_id == reply.author_user_id:
144 return
146 context = SimpleNamespace(user_id=parent_comment.author_user_id)
147 reply = threads_pb2.Reply(
148 thread_id=payload.thread_id,
149 content=reply.content,
150 author_user_id=reply.author_user_id,
151 created_time=Timestamp_from_datetime(reply.created),
152 num_replies=0,
153 )
155 event = session.execute(
156 select(Event).where(Event.thread_id == parent_comment.thread_id)
157 ).scalar_one_or_none()
158 discussion = session.execute(
159 select(Discussion).where(Discussion.thread_id == parent_comment.thread_id)
160 ).scalar_one_or_none()
161 if event:
162 # thread is an event thread
163 occurrence = event.occurrences.order_by(EventOccurrence.id.desc()).first()
164 notify(
165 session,
166 user_id=parent_comment.author_user_id,
167 topic_action="thread:reply",
168 key=occurrence.id,
169 data=notification_data_pb2.ThreadReply(
170 reply=reply,
171 event=event_to_pb(session, occurrence, context),
172 author=user_model_to_pb(author_user, session, context),
173 ),
174 )
175 elif discussion:
176 # community discussion thread
177 notify(
178 session,
179 user_id=parent_comment.author_user_id,
180 topic_action="thread:reply",
181 key=discussion.id,
182 data=notification_data_pb2.ThreadReply(
183 reply=reply,
184 discussion=discussion_to_pb(session, discussion, context),
185 author=user_model_to_pb(author_user, session, context),
186 ),
187 )
188 else:
189 raise NotImplementedError("I can only do event and discussion threads for now")
190 else:
191 raise Exception("Unknown depth")
194class Threads(threads_pb2_grpc.ThreadsServicer):
195 def GetThread(self, request, context, session):
196 database_id, depth = unpack_thread_id(request.thread_id)
197 page_size = request.page_size if 0 < request.page_size < 100000 else 1000
198 page_start = unpack_thread_id(int(request.page_token))[0] if request.page_token else 2**50
200 if depth == 0:
201 if not session.execute(select(Thread).where(Thread.id == database_id)).scalar_one_or_none():
202 context.abort(grpc.StatusCode.NOT_FOUND, errors.THREAD_NOT_FOUND)
204 res = session.execute(
205 select(Comment, func.count(Reply.id))
206 .outerjoin(Reply, Reply.comment_id == Comment.id)
207 .where(Comment.thread_id == database_id)
208 .where(Comment.id < page_start)
209 .group_by(Comment.id)
210 .order_by(Comment.created.desc())
211 .limit(page_size + 1)
212 ).all()
213 replies = [
214 threads_pb2.Reply(
215 thread_id=pack_thread_id(r.id, 1),
216 content=r.content,
217 author_user_id=r.author_user_id,
218 created_time=Timestamp_from_datetime(r.created),
219 num_replies=n,
220 )
221 for r, n in res[:page_size]
222 ]
224 elif depth == 1:
225 if not session.execute(select(Comment).where(Comment.id == database_id)).scalar_one_or_none():
226 context.abort(grpc.StatusCode.NOT_FOUND, errors.THREAD_NOT_FOUND)
228 res = (
229 session.execute(
230 select(Reply)
231 .where(Reply.comment_id == database_id)
232 .where(Reply.id < page_start)
233 .order_by(Reply.created.desc())
234 .limit(page_size + 1)
235 )
236 .scalars()
237 .all()
238 )
239 replies = [
240 threads_pb2.Reply(
241 thread_id=pack_thread_id(r.id, 2),
242 content=r.content,
243 author_user_id=r.author_user_id,
244 created_time=Timestamp_from_datetime(r.created),
245 num_replies=0,
246 )
247 for r in res[:page_size]
248 ]
250 else:
251 context.abort(grpc.StatusCode.NOT_FOUND, errors.THREAD_NOT_FOUND)
253 if len(res) > page_size:
254 # There's more!
255 next_page_token = str(replies[-1].thread_id)
256 else:
257 next_page_token = ""
259 return threads_pb2.GetThreadRes(replies=replies, next_page_token=next_page_token)
261 def PostReply(self, request, context, session):
262 content = request.content.strip()
264 if content == "":
265 context.abort(grpc.StatusCode.INVALID_ARGUMENT, errors.INVALID_COMMENT)
267 database_id, depth = unpack_thread_id(request.thread_id)
268 if depth == 0:
269 object_to_add = Comment(thread_id=database_id, author_user_id=context.user_id, content=content)
270 elif depth == 1:
271 object_to_add = Reply(comment_id=database_id, author_user_id=context.user_id, content=content)
272 else:
273 context.abort(grpc.StatusCode.NOT_FOUND, errors.THREAD_NOT_FOUND)
274 session.add(object_to_add)
275 try:
276 session.flush()
277 except sqlalchemy.exc.IntegrityError:
278 context.abort(grpc.StatusCode.NOT_FOUND, errors.THREAD_NOT_FOUND)
280 thread_id = pack_thread_id(object_to_add.id, depth + 1)
282 queue_job(
283 session,
284 job_type="generate_reply_notifications",
285 payload=jobs_pb2.GenerateReplyNotificationsPayload(
286 thread_id=thread_id,
287 ),
288 )
290 return threads_pb2.PostReplyRes(thread_id=thread_id)