2024-08-20 08:55:05 +12:00
from datetime import timedelta
from random import randint
2024-04-04 21:36:03 +13:00
2024-08-20 07:03:08 +12:00
from flask import request , current_app , abort , jsonify , json , g , url_for , redirect , make_response
2024-01-09 20:44:08 +13:00
from flask_login import current_user
2024-08-20 07:03:08 +12:00
from sqlalchemy import desc , or_
import werkzeug . exceptions
2023-12-30 13:23:12 +13:00
2023-12-24 13:28:41 +13:00
from app import db , constants , cache , celery
2023-08-05 21:24:10 +12:00
from app . activitypub import bp
2023-08-29 22:01:06 +12:00
2024-11-18 18:44:28 +00:00
from app . activitypub . signature import HttpSignature , post_request , VerificationError , default_context , LDSignature
2023-08-29 22:01:06 +12:00
from app . community . routes import show_community
2024-01-03 16:29:58 +13:00
from app . community . util import send_to_remote_instance
2023-12-09 22:14:16 +13:00
from app . post . routes import continue_discussion , show_post
2023-10-07 21:32:19 +13:00
from app . user . routes import show_profile
2024-11-18 16:53:32 +00:00
from app . constants import *
2023-09-10 20:20:53 +12:00
from app . models import User , Community , CommunityJoinRequest , CommunityMember , CommunityBan , ActivityPubLog , Post , \
2024-02-17 20:05:57 +13:00
PostReply , Instance , PostVote , PostReplyVote , File , AllowedInstances , BannedInstances , utcnow , Site , Notification , \
2024-05-31 22:06:34 +01:00
ChatMessage , Conversation , UserFollower , UserBlock , Poll , PollChoice
2023-08-10 21:13:37 +12:00
from app . activitypub . util import public_key , users_total , active_half_year , active_month , local_posts , local_comments , \
2024-12-31 13:55:24 +13:00
post_to_activity , find_actor_or_create , find_reply_parent , find_liked_object , \
2024-09-13 16:39:42 +12:00
lemmy_site_data , is_activitypub_request , delete_post_or_comment , community_members , \
2023-12-30 13:23:12 +13:00
user_removed_from_remote_server , create_post , create_post_reply , update_post_reply_from_activity , \
2024-04-06 16:29:47 +13:00
update_post_from_activity , undo_vote , undo_downvote , post_to_page , get_redis_connection , find_reported_object , \
2025-01-08 23:05:03 +00:00
process_report , ensure_domains_match , can_edit , can_delete , resolve_remote_post , \
2024-12-02 14:05:54 +01:00
inform_followers_of_post_update , comment_model_to_json , restore_post_or_comment , ban_user , unban_user , \
2025-01-06 19:13:23 +00:00
log_incoming_ap , find_community , site_ban_remove_data , community_ban_remove_data , verify_object_from_source
2024-09-13 16:39:42 +12:00
from app . utils import gibberish , get_setting , render_template , \
community_membership , ap_datetime , ip_address , can_downvote , \
2024-03-13 16:40:20 +13:00
can_upvote , can_create_post , awaken_dormant_instance , shorten_string , can_create_post_reply , sha256_digest , \
2024-12-31 13:55:24 +13:00
community_moderators , html_to_text , add_to_modlog_activitypub , instance_banned
2023-08-05 21:24:10 +12:00
2024-04-04 21:36:03 +13:00
@bp.route ( ' /testredis ' )
def testredis_get ( ) :
redis_client = get_redis_connection ( )
redis_client . set ( " cowbell " , " 1 " , ex = 600 )
x = redis_client . get ( ' cowbell ' )
2024-04-04 21:38:26 +13:00
if x is not None :
2024-04-04 21:36:03 +13:00
return " Redis: OK "
else :
return " Redis: FAIL "
2023-08-05 21:24:10 +12:00
@bp.route ( ' /.well-known/webfinger ' )
def webfinger ( ) :
if request . args . get ( ' resource ' ) :
query = request . args . get ( ' resource ' ) # acct:alice@tada.club
if ' acct: ' in query :
actor = query . split ( ' : ' ) [ 1 ] . split ( ' @ ' ) [ 0 ] # alice
elif ' https: ' in query or ' http: ' in query :
actor = query . split ( ' / ' ) [ - 1 ]
else :
return ' Webfinger regex failed to match '
2024-05-04 21:26:39 +01:00
# special case: instance actor
if actor == current_app . config [ ' SERVER_NAME ' ] :
webfinger_data = {
" subject " : f " acct: { actor } @ { current_app . config [ ' SERVER_NAME ' ] } " ,
" aliases " : [ f " https:// { current_app . config [ ' SERVER_NAME ' ] } /actor " ] ,
" links " : [
{
" rel " : " http://webfinger.net/rel/profile-page " ,
" type " : " text/html " ,
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /about "
} ,
{
" rel " : " self " ,
" type " : " application/activity+json " ,
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /actor " ,
}
]
}
resp = jsonify ( webfinger_data )
resp . headers . add_header ( ' Access-Control-Allow-Origin ' , ' * ' )
return resp
2023-08-05 21:24:10 +12:00
seperator = ' u '
type = ' Person '
2024-08-20 07:03:08 +12:00
user = User . query . filter ( or_ ( User . user_name == actor . strip ( ) , User . alt_user_name == actor . strip ( ) ) ) . filter_by ( deleted = False , banned = False , ap_id = None ) . first ( )
2023-08-05 21:24:10 +12:00
if user is None :
community = Community . query . filter_by ( name = actor . strip ( ) , ap_id = None ) . first ( )
if community is None :
return ' '
seperator = ' c '
type = ' Group '
webfinger_data = {
" subject " : f " acct: { actor } @ { current_app . config [ ' SERVER_NAME ' ] } " ,
" aliases " : [ f " https:// { current_app . config [ ' SERVER_NAME ' ] } / { seperator } / { actor } " ] ,
" links " : [
{
" rel " : " http://webfinger.net/rel/profile-page " ,
" type " : " text/html " ,
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } / { seperator } / { actor } "
} ,
{
" rel " : " self " ,
" type " : " application/activity+json " ,
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } / { seperator } / { actor } " ,
" properties " : {
" https://www.w3.org/ns/activitystreams#type " : type
}
}
]
}
resp = jsonify ( webfinger_data )
resp . headers . add_header ( ' Access-Control-Allow-Origin ' , ' * ' )
return resp
else :
abort ( 404 )
@bp.route ( ' /.well-known/nodeinfo ' )
2023-12-10 15:10:09 +13:00
@cache.cached ( timeout = 600 )
2023-08-05 21:24:10 +12:00
def nodeinfo ( ) :
nodeinfo_data = { " links " : [ { " rel " : " http://nodeinfo.diaspora.software/ns/schema/2.0 " ,
2024-09-04 16:30:01 -04:00
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /nodeinfo/2.0 " } ,
{ " rel " : " https://www.w3.org/ns/activitystreams#Application " ,
2024-09-05 20:10:26 +12:00
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } " } ] }
2023-08-05 21:24:10 +12:00
return jsonify ( nodeinfo_data )
2024-02-14 10:16:49 +13:00
@bp.route ( ' /.well-known/host-meta ' )
@cache.cached ( timeout = 600 )
def host_meta ( ) :
2024-02-19 15:01:53 +13:00
resp = make_response ( ' <?xml version= " 1.0 " encoding= " UTF-8 " ?> \n <XRD xmlns= " http://docs.oasis-open.org/ns/xri/xrd-1.0 " > \n <Link rel= " lrdd " template= " https:// ' + current_app . config [ " SERVER_NAME " ] + ' /.well-known/webfinger?resource= {uri} " /> \n </XRD> ' )
2024-02-14 10:16:49 +13:00
resp . content_type = ' application/xrd+xml; charset=utf-8 '
return resp
2023-08-05 21:24:10 +12:00
@bp.route ( ' /nodeinfo/2.0 ' )
2023-11-23 15:10:44 +13:00
@bp.route ( ' /nodeinfo/2.0.json ' )
2023-12-10 15:10:09 +13:00
@cache.cached ( timeout = 600 )
2023-08-05 21:24:10 +12:00
def nodeinfo2 ( ) :
nodeinfo_data = {
" version " : " 2.0 " ,
" software " : {
2023-11-16 22:31:14 +13:00
" name " : " PieFed " ,
2023-08-05 21:24:10 +12:00
" version " : " 0.1 "
} ,
" protocols " : [
" activitypub "
] ,
" usage " : {
" users " : {
2023-08-10 21:13:37 +12:00
" total " : users_total ( ) ,
" activeHalfyear " : active_half_year ( ) ,
" activeMonth " : active_month ( )
2023-08-05 21:24:10 +12:00
} ,
2023-08-10 21:13:37 +12:00
" localPosts " : local_posts ( ) ,
" localComments " : local_comments ( )
2023-08-05 21:24:10 +12:00
} ,
2024-10-23 16:08:24 +13:00
" openRegistrations " : g . site . registration_mode != ' Closed '
2023-08-05 21:24:10 +12:00
}
return jsonify ( nodeinfo_data )
2024-04-03 16:35:26 +13:00
@bp.route ( ' /api/v1/instance ' )
@cache.cached ( timeout = 600 )
def api_v1_instance ( ) :
retval = {
' title ' : g . site . name ,
' uri ' : current_app . config [ ' SERVER_NAME ' ] ,
' stats ' : {
" user_count " : users_total ( ) ,
" status_count " : local_posts ( ) + local_comments ( ) ,
" domain_count " : 1
} ,
' registrations ' : g . site . registration_mode != ' Closed ' ,
' approval_required ' : g . site . registration_mode == ' RequireApplication '
}
return jsonify ( retval )
2024-02-25 16:24:50 +13:00
@bp.route ( ' /api/v1/instance/domain_blocks ' )
@cache.cached ( timeout = 600 )
def domain_blocks ( ) :
use_allowlist = get_setting ( ' use_allowlist ' , False )
if use_allowlist :
return jsonify ( [ ] )
else :
retval = [ ]
for domain in BannedInstances . query . all ( ) :
retval . append ( {
' domain ' : domain . domain ,
' digest ' : sha256_digest ( domain . domain ) ,
' severity ' : ' suspend ' ,
' comment ' : domain . reason if domain . reason else ' '
} )
return jsonify ( retval )
2023-11-23 15:10:44 +13:00
@bp.route ( ' /api/v3/site ' )
2024-04-22 20:53:03 +12:00
@cache.cached ( timeout = 600 )
2023-11-23 15:10:44 +13:00
def lemmy_site ( ) :
return jsonify ( lemmy_site_data ( ) )
@bp.route ( ' /api/v3/federated_instances ' )
2023-12-10 15:10:09 +13:00
@cache.cached ( timeout = 600 )
2023-11-23 15:10:44 +13:00
def lemmy_federated_instances ( ) :
2024-02-14 12:31:44 +13:00
instances = Instance . query . filter ( Instance . id != 1 ) . all ( )
2023-11-23 15:10:44 +13:00
linked = [ ]
allowed = [ ]
blocked = [ ]
2024-10-29 10:33:59 +00:00
for instance in AllowedInstances . query . all ( ) :
allowed . append ( { " id " : instance . id , " domain " : instance . domain , " published " : utcnow ( ) , " updated " : utcnow ( ) } )
for instance in BannedInstances . query . all ( ) :
blocked . append ( { " id " : instance . id , " domain " : instance . domain , " published " : utcnow ( ) , " updated " : utcnow ( ) } )
2023-11-23 15:10:44 +13:00
for instance in instances :
instance_data = { " id " : instance . id , " domain " : instance . domain , " published " : instance . created_at . isoformat ( ) , " updated " : instance . updated_at . isoformat ( ) }
if instance . software :
instance_data [ ' software ' ] = instance . software
if instance . version :
instance_data [ ' version ' ] = instance . version
2024-10-29 10:33:59 +00:00
if not any ( blocked_instance . get ( ' domain ' ) == instance . domain for blocked_instance in blocked ) :
linked . append ( instance_data )
2023-11-23 15:10:44 +13:00
return jsonify ( {
" federated_instances " : {
" linked " : linked ,
" allowed " : allowed ,
" blocked " : blocked
}
} )
2023-12-29 17:32:35 +13:00
@bp.route ( ' /u/<actor> ' , methods = [ ' GET ' , ' HEAD ' ] )
2023-08-10 21:13:37 +12:00
def user_profile ( actor ) :
""" Requests to this endpoint can be for a JSON representation of the user, or a HTML rendering of their profile.
The two types of requests are differentiated by the header """
2024-03-27 22:55:31 +13:00
actor = actor . strip ( )
2024-04-20 17:16:17 +12:00
# admins can view deleted accounts
2024-01-09 20:44:08 +13:00
if current_user . is_authenticated and current_user . is_admin ( ) :
if ' @ ' in actor :
2024-04-23 21:28:58 +12:00
user : User = User . query . filter_by ( ap_id = actor . lower ( ) ) . first ( )
2024-01-09 20:44:08 +13:00
else :
2024-08-20 07:03:08 +12:00
user : User = User . query . filter ( or_ ( User . user_name == actor , User . alt_user_name == actor ) ) . filter_by ( ap_id = None ) . first ( )
2024-04-20 17:16:17 +12:00
if user is None :
2024-04-28 16:49:49 +12:00
user = User . query . filter_by ( ap_profile_id = f ' https:// { current_app . config [ " SERVER_NAME " ] } /u/ { actor . lower ( ) } ' , deleted = False , ap_id = None ) . first ( )
2023-11-24 20:22:58 +13:00
else :
2024-01-09 20:44:08 +13:00
if ' @ ' in actor :
2024-04-23 21:28:58 +12:00
user : User = User . query . filter_by ( ap_id = actor . lower ( ) , deleted = False , banned = False ) . first ( )
2024-01-09 20:44:08 +13:00
else :
2024-08-20 07:03:08 +12:00
user : User = User . query . filter ( or_ ( User . user_name == actor , User . alt_user_name == actor ) ) . filter_by ( deleted = False , ap_id = None ) . first ( )
2024-04-20 17:16:17 +12:00
if user is None :
2024-04-23 21:28:58 +12:00
user = User . query . filter_by ( ap_profile_id = f ' https:// { current_app . config [ " SERVER_NAME " ] } /u/ { actor . lower ( ) } ' , deleted = False , ap_id = None ) . first ( )
2023-11-24 20:22:58 +13:00
2023-08-10 21:13:37 +12:00
if user is not None :
2024-08-20 07:03:08 +12:00
main_user_name = True
if user . alt_user_name == actor :
main_user_name = False
2023-12-29 17:32:35 +13:00
if request . method == ' HEAD ' :
if is_activitypub_request ( ) :
resp = jsonify ( ' ' )
resp . content_type = ' application/activity+json '
return resp
else :
return ' '
2023-12-09 22:14:16 +13:00
if is_activitypub_request ( ) :
2023-08-10 21:13:37 +12:00
server = current_app . config [ ' SERVER_NAME ' ]
2023-09-16 19:09:04 +12:00
actor_data = { " @context " : default_context ( ) ,
2024-03-20 11:34:25 +00:00
" type " : " Person " if not user . bot else " Service " ,
2024-08-20 07:03:08 +12:00
" id " : user . public_url ( main_user_name ) ,
2024-06-06 00:09:17 +01:00
" preferredUsername " : actor ,
2024-01-01 14:49:15 +13:00
" name " : user . title if user . title else user . user_name ,
2024-08-20 07:03:08 +12:00
" inbox " : f " { user . public_url ( main_user_name ) } /inbox " ,
" outbox " : f " { user . public_url ( main_user_name ) } /outbox " ,
2023-12-29 17:32:35 +13:00
" discoverable " : user . searchable ,
" indexable " : user . indexable ,
2024-03-21 23:26:03 +00:00
" manuallyApprovesFollowers " : False if not user . ap_manually_approves_followers else user . ap_manually_approves_followers ,
2023-08-10 21:13:37 +12:00
" publicKey " : {
2024-08-20 07:03:08 +12:00
" id " : f " { user . public_url ( main_user_name ) } #main-key " ,
" owner " : user . public_url ( main_user_name ) ,
" publicKeyPem " : user . public_key
2023-08-10 21:13:37 +12:00
} ,
" endpoints " : {
" sharedInbox " : f " https:// { server } /inbox "
} ,
2023-12-08 17:13:38 +13:00
" published " : ap_datetime ( user . created ) ,
2023-08-10 21:13:37 +12:00
}
2024-08-20 07:03:08 +12:00
if not main_user_name :
actor_data [ ' name ' ] = ' Anonymous '
2024-08-20 09:08:45 +12:00
actor_data [ ' published ' ] = ap_datetime ( user . created + timedelta ( minutes = randint ( - 2592000 , 0 ) ) )
2024-09-10 10:34:31 +12:00
actor_data [ ' summary ' ] = ' <p>This is an anonymous alternative account of another account. It has been generated automatically for a Piefed user who chose to keep their interactions private. They cannot reply to your messages using this account, but only upvote (like) or downvote (dislike). For more information about Piefed and this feature see <a href= " https://piefed.social/post/205362 " >https://piefed.social/post/205362</a>.</p> '
2024-08-20 07:03:08 +12:00
if user . avatar_id is not None and main_user_name :
2023-08-10 21:13:37 +12:00
actor_data [ " icon " ] = {
" type " : " Image " ,
2023-12-08 17:13:38 +13:00
" url " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } { user . avatar_image ( ) } "
}
2024-08-20 07:03:08 +12:00
if user . cover_id is not None and main_user_name :
2023-12-08 17:13:38 +13:00
actor_data [ " image " ] = {
" type " : " Image " ,
" url " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } { user . cover_image ( ) } "
2023-08-10 21:13:37 +12:00
}
2024-08-20 07:03:08 +12:00
if user . about_html and main_user_name :
2024-05-14 16:24:05 +01:00
actor_data [ ' summary ' ] = user . about_html
2024-09-22 13:42:02 +00:00
actor_data [ ' source ' ] = { ' content ' : user . about , ' mediaType ' : ' text/markdown ' }
2024-08-20 07:03:08 +12:00
if user . matrix_user_id and main_user_name :
2023-12-28 21:00:26 +13:00
actor_data [ ' matrixUserId ' ] = user . matrix_user_id
2024-12-22 15:38:40 +13:00
if user . extra_fields . count ( ) > 0 :
actor_data [ ' attachment ' ] = [ ]
for field in user . extra_fields :
actor_data [ ' attachment ' ] . append ( { ' type ' : ' PropertyValue ' ,
' name ' : field . label ,
' value ' : field . text } )
2023-08-10 21:13:37 +12:00
resp = jsonify ( actor_data )
resp . content_type = ' application/activity+json '
2024-11-18 10:04:58 +13:00
resp . headers . set ( ' Link ' , f ' <https:// { current_app . config [ " SERVER_NAME " ] } /u/ { actor } >; rel= " alternate " ; type= " text/html " ' )
2023-08-10 21:13:37 +12:00
return resp
else :
2024-08-20 07:03:08 +12:00
if main_user_name :
return show_profile ( user )
else :
return render_template ( ' errors/alt_profile.html ' )
2023-10-21 15:49:01 +13:00
else :
abort ( 404 )
2023-08-10 21:13:37 +12:00
2023-12-22 15:34:45 +13:00
@bp.route ( ' /u/<actor>/outbox ' , methods = [ ' GET ' ] )
def user_outbox ( actor ) :
outbox = {
" @context " : default_context ( ) ,
' type ' : ' OrderedCollection ' ,
' id ' : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /u/ { actor } /outbox " ,
' orderedItems ' : [ ] ,
' totalItems ' : 0
}
resp = jsonify ( outbox )
resp . content_type = ' application/activity+json '
return resp
2023-08-10 21:13:37 +12:00
@bp.route ( ' /c/<actor> ' , methods = [ ' GET ' ] )
def community_profile ( actor ) :
""" Requests to this endpoint can be for a JSON representation of the community, or a HTML rendering of it.
The two types of requests are differentiated by the header """
actor = actor . strip ( )
2023-08-29 22:01:06 +12:00
if ' @ ' in actor :
# don't provide activitypub info for remote communities
2023-12-03 22:41:15 +13:00
if ' application/ld+json ' in request . headers . get ( ' Accept ' , ' ' ) or ' application/activity+json ' in request . headers . get ( ' Accept ' , ' ' ) :
2023-12-12 18:28:49 +13:00
abort ( 400 )
2024-04-30 21:11:57 +12:00
community : Community = Community . query . filter_by ( ap_id = actor . lower ( ) , banned = False ) . first ( )
2023-08-29 22:01:06 +12:00
else :
2024-01-01 11:38:24 +13:00
community : Community = Community . query . filter_by ( name = actor , ap_id = None ) . first ( )
2023-08-10 21:13:37 +12:00
if community is not None :
2023-12-09 22:14:16 +13:00
if is_activitypub_request ( ) :
2023-08-10 21:13:37 +12:00
server = current_app . config [ ' SERVER_NAME ' ]
2023-09-16 19:09:04 +12:00
actor_data = { " @context " : default_context ( ) ,
2023-08-10 21:13:37 +12:00
" type " : " Group " ,
" id " : f " https:// { server } /c/ { actor } " ,
2023-11-16 22:31:14 +13:00
" name " : community . title ,
" sensitive " : True if community . nsfw or community . nsfl else False ,
2023-08-10 21:13:37 +12:00
" preferredUsername " : actor ,
" inbox " : f " https:// { server } /c/ { actor } /inbox " ,
" outbox " : f " https:// { server } /c/ { actor } /outbox " ,
" followers " : f " https:// { server } /c/ { actor } /followers " ,
" moderators " : f " https:// { server } /c/ { actor } /moderators " ,
" featured " : f " https:// { server } /c/ { actor } /featured " ,
" attributedTo " : f " https:// { server } /c/ { actor } /moderators " ,
2024-01-02 19:41:00 +13:00
" postingRestrictedToMods " : community . restricted_to_mods or community . local_only ,
" newModsWanted " : community . new_mods_wanted ,
" privateMods " : community . private_mods ,
2023-08-10 21:13:37 +12:00
" url " : f " https:// { server } /c/ { actor } " ,
" publicKey " : {
" id " : f " https:// { server } /c/ { actor } #main-key " ,
" owner " : f " https:// { server } /c/ { actor } " ,
2023-11-22 22:12:58 +13:00
" publicKeyPem " : community . public_key
2023-08-10 21:13:37 +12:00
} ,
" endpoints " : {
" sharedInbox " : f " https:// { server } /inbox "
} ,
2023-12-08 17:13:38 +13:00
" published " : ap_datetime ( community . created_at ) ,
" updated " : ap_datetime ( community . last_active ) ,
2023-08-10 21:13:37 +12:00
}
2024-05-14 16:24:05 +01:00
if community . description_html :
actor_data [ " summary " ] = community . description_html
2024-09-22 13:42:02 +00:00
actor_data [ ' source ' ] = { ' content ' : community . description , ' mediaType ' : ' text/markdown ' }
2023-11-16 22:31:14 +13:00
if community . icon_id is not None :
2023-08-10 21:13:37 +12:00
actor_data [ " icon " ] = {
" type " : " Image " ,
2023-12-08 17:13:38 +13:00
" url " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } { community . icon_image ( ) } "
}
if community . image_id is not None :
actor_data [ " image " ] = {
" type " : " Image " ,
" url " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } { community . header_image ( ) } "
2023-08-10 21:13:37 +12:00
}
resp = jsonify ( actor_data )
resp . content_type = ' application/activity+json '
2024-11-18 10:04:58 +13:00
resp . headers . set ( ' Link ' , f ' <https:// { current_app . config [ " SERVER_NAME " ] } /c/ { actor } >; rel= " alternate " ; type= " text/html " ' )
2023-08-10 21:13:37 +12:00
return resp
2023-08-29 22:01:06 +12:00
else : # browser request - return html
return show_community ( community )
else :
abort ( 404 )
2023-08-10 21:13:37 +12:00
2024-11-18 16:53:32 +00:00
@bp.route ( ' /inbox ' , methods = [ ' POST ' ] )
2023-09-08 20:04:01 +12:00
def shared_inbox ( ) :
2024-11-18 16:53:32 +00:00
try :
request_json = request . get_json ( force = True )
except werkzeug . exceptions . BadRequest as e :
log_incoming_ap ( ' ' , APLOG_NOTYPE , APLOG_FAILURE , None , ' Unable to parse json body: ' + e . description )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2023-09-09 20:46:40 +12:00
2024-11-18 16:53:32 +00:00
g . site = Site . query . get ( 1 ) # g.site is not initialized by @app.before_request when request.path == '/inbox'
store_ap_json = g . site . log_activitypub_json
2025-01-08 22:48:57 +00:00
saved_json = request_json if store_ap_json else None
2023-09-09 20:46:40 +12:00
2024-11-18 16:53:32 +00:00
if not ' id ' in request_json or not ' type ' in request_json or not ' actor ' in request_json or not ' object ' in request_json :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( ' ' , APLOG_NOTYPE , APLOG_FAILURE , saved_json , ' Missing minimum expected fields in JSON ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2023-12-23 11:32:22 +13:00
2024-11-18 16:53:32 +00:00
id = request_json [ ' id ' ]
2025-01-21 06:29:31 +00:00
missing_actor_in_announce_object = False # nodebb
2024-11-18 16:53:32 +00:00
if request_json [ ' type ' ] == ' Announce ' and isinstance ( request_json [ ' object ' ] , dict ) :
object = request_json [ ' object ' ]
2025-01-21 06:29:31 +00:00
if not ' actor ' in object :
missing_actor_in_announce_object = True
if not ' id ' in object or not ' type ' in object or not ' object ' in object :
2024-11-18 16:53:32 +00:00
if ' type ' in object and ( object [ ' type ' ] == ' Page ' or object [ ' type ' ] == ' Note ' ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ANNOUNCE , APLOG_IGNORED , saved_json , ' Intended for Mastodon ' )
2024-11-18 16:53:32 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ANNOUNCE , APLOG_FAILURE , saved_json , ' Missing minimum expected fields in JSON Announce object ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2024-05-26 02:24:11 +01:00
2025-01-21 06:29:31 +00:00
if not missing_actor_in_announce_object and isinstance ( object [ ' actor ' ] , str ) and object [ ' actor ' ] . startswith ( ' https:// ' + current_app . config [ ' SERVER_NAME ' ] ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DUPLICATE , APLOG_IGNORED , saved_json , ' Activity about local content which is already present ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2023-12-23 11:32:22 +13:00
2025-01-23 20:18:28 +13:00
# id = object['id']
2025-01-08 21:53:26 +00:00
2024-11-18 17:16:46 +00:00
redis_client = get_redis_connection ( )
2024-11-29 15:22:40 +00:00
if redis_client . exists ( id ) : # Something is sending same activity multiple times
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DUPLICATE , APLOG_IGNORED , saved_json , ' Already aware of this activity ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2024-11-18 17:16:46 +00:00
redis_client . set ( id , 1 , ex = 90 ) # Save the activity ID into redis, to avoid duplicate activities
2024-11-18 17:18:09 +00:00
# Ignore unutilised PeerTube activity
2024-12-01 01:32:46 +00:00
if isinstance ( request_json [ ' actor ' ] , str ) and request_json [ ' actor ' ] . endswith ( ' accounts/peertube ' ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_PT_VIEW , APLOG_IGNORED , saved_json , ' PeerTube View or CacheFile activity ' )
2024-11-18 17:18:09 +00:00
return ' '
2024-11-29 15:22:40 +00:00
# Ignore account deletion requests from users that do not already exist here
account_deletion = False
if ( request_json [ ' type ' ] == ' Delete ' and
' object ' in request_json and isinstance ( request_json [ ' object ' ] , str ) and
request_json [ ' actor ' ] == request_json [ ' object ' ] ) :
account_deletion = True
actor = User . query . filter_by ( ap_profile_id = request_json [ ' actor ' ] . lower ( ) ) . first ( )
if not actor :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DELETE , APLOG_IGNORED , saved_json , ' Does not exist here ' )
2024-11-29 15:22:40 +00:00
return ' ' , 200
else :
actor = find_actor_or_create ( request_json [ ' actor ' ] )
2024-11-18 17:28:41 +00:00
2024-11-18 17:47:26 +00:00
if not actor :
actor_name = request_json [ ' actor ' ]
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_NOTYPE , APLOG_FAILURE , saved_json , f ' Actor could not be found 1: { actor_name } ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2023-12-24 13:28:41 +13:00
2024-11-18 17:47:26 +00:00
if actor . is_local ( ) : # should be impossible (can be Announced back, but not sent without access to privkey)
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_NOTYPE , APLOG_FAILURE , saved_json , ' ActivityPub activity from a local actor ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2024-11-29 15:22:40 +00:00
2024-12-11 00:33:54 +00:00
bounced = False
2024-11-18 18:44:28 +00:00
try :
HttpSignature . verify_request ( request , actor . public_key , skip_date = True )
except VerificationError as e :
2024-12-11 00:33:54 +00:00
bounced = True
2024-11-18 18:44:28 +00:00
# HTTP sig will fail if a.gup.pe or PeerTube have bounced a request, so check LD sig instead
2025-01-06 19:13:23 +00:00
if ' signature ' in request_json :
try :
LDSignature . verify_signature ( request_json , actor . public_key )
except VerificationError as e :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_NOTYPE , APLOG_FAILURE , saved_json , ' Could not verify LD signature: ' + str ( e ) )
2025-01-06 19:13:23 +00:00
return ' ' , 400
# not HTTP sig, and no LD sig, so reduce the inner object to just its remote ID, and then fetch it and check it in process_inbox_request()
elif ( ( request_json [ ' type ' ] == ' Create ' or request_json [ ' type ' ] == ' Update ' ) and
isinstance ( request_json [ ' object ' ] , dict ) and ' id ' in request_json [ ' object ' ] and isinstance ( request_json [ ' object ' ] [ ' id ' ] , str ) ) :
request_json [ ' object ' ] = request_json [ ' object ' ] [ ' id ' ]
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_NOTYPE , APLOG_FAILURE , saved_json , ' Could not verify HTTP signature: ' + str ( e ) )
2024-11-18 18:44:28 +00:00
return ' ' , 400
2024-12-11 00:33:54 +00:00
actor . instance . last_seen = utcnow ( )
actor . instance . dormant = False
actor . instance . gone_forever = False
actor . instance . failures = 0
actor . instance . ip_address = ip_address ( ) if not bounced else ' '
db . session . commit ( )
2024-11-18 19:28:37 +00:00
# When a user is deleted, the only way to be fairly sure they get deleted everywhere is to tell the whole fediverse.
2024-11-29 15:22:40 +00:00
# Earlier check means this is only for users that already exist, processing it here means that http signature will have been verified
if account_deletion == True :
if current_app . debug :
process_delete_request ( request_json , store_ap_json )
else :
process_delete_request . delay ( request_json , store_ap_json )
return ' '
2024-11-18 19:28:37 +00:00
2025-01-21 06:29:31 +00:00
if missing_actor_in_announce_object :
2025-01-23 05:07:54 +00:00
if ( ( request_json [ ' object ' ] [ ' type ' ] == ' Create ' or request_json [ ' object ' ] [ ' type ' ] == ' Update ' ) and
2025-01-21 06:29:31 +00:00
' attributedTo ' in request_json [ ' object ' ] [ ' object ' ] and isinstance ( request_json [ ' object ' ] [ ' object ' ] [ ' attributedTo ' ] , str ) ) :
2025-01-21 20:16:39 +00:00
log_incoming_ap ( id , APLOG_ANNOUNCE , APLOG_MONITOR , request_json , ' nodebb: Actor is missing in the Create ' )
2025-01-21 20:36:23 +00:00
request_json [ ' object ' ] [ ' actor ' ] = request_json [ ' object ' ] [ ' object ' ] [ ' attributedTo ' ]
2025-01-21 06:29:31 +00:00
2024-11-18 21:10:12 +00:00
if current_app . debug :
process_inbox_request ( request_json , store_ap_json )
else :
process_inbox_request . delay ( request_json , store_ap_json )
2023-12-24 13:28:41 +13:00
return ' '
2024-11-24 21:49:08 +00:00
@bp.route ( ' /site_inbox ' , methods = [ ' POST ' ] )
2024-05-22 17:04:05 +01:00
def site_inbox ( ) :
return shared_inbox ( )
2024-11-24 21:49:08 +00:00
@bp.route ( ' /u/<actor>/inbox ' , methods = [ ' POST ' ] )
def user_inbox ( actor ) :
return shared_inbox ( )
@bp.route ( ' /c/<actor>/inbox ' , methods = [ ' POST ' ] )
def community_inbox ( actor ) :
return shared_inbox ( )
2024-11-24 22:01:11 +00:00
def replay_inbox_request ( request_json ) :
if not ' id ' in request_json or not ' type ' in request_json or not ' actor ' in request_json or not ' object ' in request_json :
log_incoming_ap ( ' ' , APLOG_NOTYPE , APLOG_FAILURE , request_json , ' REPLAY: Missing minimum expected fields in JSON ' )
return
2024-11-29 22:46:59 +00:00
id = request_json [ ' id ' ]
2025-01-21 06:29:31 +00:00
missing_actor_in_announce_object = False # nodebb
2024-11-24 22:01:11 +00:00
if request_json [ ' type ' ] == ' Announce ' and isinstance ( request_json [ ' object ' ] , dict ) :
object = request_json [ ' object ' ]
2025-01-21 06:29:31 +00:00
if not ' actor ' in object :
missing_actor_in_announce_object = True
2025-01-21 17:13:50 +00:00
log_incoming_ap ( id , APLOG_ANNOUNCE , APLOG_MONITOR , request_json , ' REPLAY: Actor is missing in Announce object ' )
2025-01-21 06:29:31 +00:00
if not ' id ' in object or not ' type ' in object or not ' object ' in object :
2024-11-29 15:01:24 +00:00
if ' type ' in object and ( object [ ' type ' ] == ' Page ' or object [ ' type ' ] == ' Note ' ) :
2024-11-29 22:46:59 +00:00
log_incoming_ap ( id , APLOG_ANNOUNCE , APLOG_IGNORED , request_json , ' REPLAY: Intended for Mastodon ' )
2024-11-24 22:01:11 +00:00
else :
2024-11-29 22:46:59 +00:00
log_incoming_ap ( id , APLOG_ANNOUNCE , APLOG_FAILURE , request_json , ' REPLAY: Missing minimum expected fields in JSON Announce object ' )
2024-11-24 22:01:11 +00:00
return
2025-01-21 06:29:31 +00:00
if not missing_actor_in_announce_object and isinstance ( object [ ' actor ' ] , str ) and object [ ' actor ' ] . startswith ( ' https:// ' + current_app . config [ ' SERVER_NAME ' ] ) :
log_incoming_ap ( id , APLOG_DUPLICATE , APLOG_IGNORED , request_json , ' REPLAY: Activity about local content which is already present ' )
2024-11-29 15:01:24 +00:00
return
2024-11-24 22:01:11 +00:00
# Ignore unutilised PeerTube activity
2024-12-01 01:32:46 +00:00
if isinstance ( request_json [ ' actor ' ] , str ) and request_json [ ' actor ' ] . endswith ( ' accounts/peertube ' ) :
2024-11-29 22:46:59 +00:00
log_incoming_ap ( id , APLOG_PT_VIEW , APLOG_IGNORED , request_json , ' REPLAY: PeerTube View or CacheFile activity ' )
2024-11-24 22:01:11 +00:00
return
2024-11-29 15:22:40 +00:00
# Ignore account deletion requests from users that do not already exist here
account_deletion = False
if ( request_json [ ' type ' ] == ' Delete ' and
' object ' in request_json and isinstance ( request_json [ ' object ' ] , str ) and
request_json [ ' actor ' ] == request_json [ ' object ' ] ) :
account_deletion = True
actor = User . query . filter_by ( ap_profile_id = request_json [ ' actor ' ] . lower ( ) ) . first ( )
if not actor :
2024-11-29 22:46:59 +00:00
log_incoming_ap ( id , APLOG_DELETE , APLOG_IGNORED , request_json , ' REPLAY: Does not exist here ' )
2024-11-29 15:22:40 +00:00
return
else :
actor = find_actor_or_create ( request_json [ ' actor ' ] )
2024-11-24 22:01:11 +00:00
if not actor :
actor_name = request_json [ ' actor ' ]
2024-11-29 22:46:59 +00:00
log_incoming_ap ( id , APLOG_NOTYPE , APLOG_FAILURE , request_json , f ' REPLAY: Actor could not be found 1: { actor_name } ' )
2024-11-24 22:01:11 +00:00
return
if actor . is_local ( ) : # should be impossible (can be Announced back, but not sent back without access to privkey)
2024-11-29 22:46:59 +00:00
log_incoming_ap ( id , APLOG_NOTYPE , APLOG_FAILURE , request_json , ' REPLAY: ActivityPub activity from a local actor ' )
2024-11-24 22:01:11 +00:00
return
# When a user is deleted, the only way to be fairly sure they get deleted everywhere is to tell the whole fediverse.
2024-11-29 15:22:40 +00:00
if account_deletion == True :
process_delete_request ( request_json , True )
return
2024-11-24 22:01:11 +00:00
2025-01-21 06:29:31 +00:00
if missing_actor_in_announce_object :
2025-01-23 05:07:54 +00:00
if ( ( request_json [ ' object ' ] [ ' type ' ] == ' Create ' or request_json [ ' object ' ] [ ' type ' ] == ' Update ' ) and
2025-01-21 06:29:31 +00:00
' attributedTo ' in request_json [ ' object ' ] [ ' object ' ] and isinstance ( request_json [ ' object ' ] [ ' object ' ] [ ' attributedTo ' ] , str ) ) :
request_json [ ' object ' ] [ ' actor ' ] = request_json [ ' object ' ] [ ' object ' ] [ ' attributedTo ' ]
2024-11-24 22:01:11 +00:00
process_inbox_request ( request_json , True )
return
2023-12-24 13:28:41 +13:00
@celery.task
2024-11-18 21:10:12 +00:00
def process_inbox_request ( request_json , store_ap_json ) :
2023-12-24 13:28:41 +13:00
with current_app . app_context ( ) :
site = Site . query . get ( 1 ) # can't use g.site because celery doesn't use Flask's g variable
2024-11-18 21:10:12 +00:00
# For an Announce, Accept, or Reject, we have the community, and need to find the user
# For everything else, we have the user, and need to find the community
# Benefits of always using request_json['actor']:
# It's the actor who signed the request, and whose signature has been verified
# Because of the earlier check, we know that they already exist, and so don't need to check again
# Using actors from inner objects has a vulnerability to spoofing attacks (e.g. if 'attributedTo' doesn't match the 'Create' actor)
2025-01-08 22:48:57 +00:00
saved_json = request_json if store_ap_json else None
2024-11-29 22:46:59 +00:00
id = request_json [ ' id ' ]
2024-11-18 21:10:12 +00:00
if request_json [ ' type ' ] == ' Announce ' or request_json [ ' type ' ] == ' Accept ' or request_json [ ' type ' ] == ' Reject ' :
community_ap_id = request_json [ ' actor ' ]
community = find_actor_or_create ( community_ap_id , community_only = True , create_if_not_found = False )
if not community or not isinstance ( community , Community ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ANNOUNCE , APLOG_FAILURE , saved_json , ' Actor was not a community ' )
2024-11-18 21:10:12 +00:00
return
user_ap_id = None # found in 'if request_json['type'] == 'Announce', or it's a local user (for 'Accept'/'Reject')
else :
user_ap_id = request_json [ ' actor ' ]
user = find_actor_or_create ( user_ap_id , create_if_not_found = False )
if not user or not isinstance ( user , User ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_NOTYPE , APLOG_FAILURE , saved_json , ' Actor was not a user ' )
2024-11-18 21:10:12 +00:00
return
user . last_seen = site . last_active = utcnow ( )
db . session . commit ( )
community = None # found as needed
2025-01-07 13:53:52 +00:00
# Announce: take care of inner objects that are just a URL (PeerTube, a.gup.pe), or find the user if the inner object is a dict
if request_json [ ' type ' ] == ' Announce ' :
if isinstance ( request_json [ ' object ' ] , str ) :
if request_json [ ' object ' ] . startswith ( ' https:// ' + current_app . config [ ' SERVER_NAME ' ] ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DUPLICATE , APLOG_IGNORED , saved_json , ' Activity about local content which is already present ' )
2025-01-07 13:53:52 +00:00
return
2025-01-19 16:29:16 +00:00
post = resolve_remote_post ( request_json [ ' object ' ] , community , id , store_ap_json )
2025-01-07 13:53:52 +00:00
if post :
log_incoming_ap ( id , APLOG_ANNOUNCE , APLOG_SUCCESS , request_json )
else :
log_incoming_ap ( id , APLOG_ANNOUNCE , APLOG_FAILURE , request_json , ' Could not resolve post ' )
return
user_ap_id = request_json [ ' object ' ] [ ' actor ' ]
user = find_actor_or_create ( user_ap_id )
if not user or not isinstance ( user , User ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ANNOUNCE , APLOG_FAILURE , saved_json , ' Blocked or unfound user for Announce object actor ' + user_ap_id )
2025-01-07 13:53:52 +00:00
return
user . last_seen = site . last_active = utcnow ( )
user . instance . last_seen = utcnow ( )
user . instance . dormant = False
user . instance . gone_forever = False
user . instance . failures = 0
db . session . commit ( )
# Now that we have the community and the user from an Announce, we can save repeating code by removing it
2025-01-08 21:53:26 +00:00
# core_activity is checked for its Type, but the original request_json is sometimes passed to any other functions
2025-01-07 13:53:52 +00:00
announced = True
core_activity = request_json [ ' object ' ]
else :
announced = False
core_activity = request_json
2024-11-18 21:10:12 +00:00
# Follow: remote user wants to join/follow one of our users or communities
2025-01-08 22:58:10 +00:00
if core_activity [ ' type ' ] == ' Follow ' :
target_ap_id = core_activity [ ' object ' ]
follow_id = core_activity [ ' id ' ]
2024-11-18 21:10:12 +00:00
target = find_actor_or_create ( target_ap_id , create_if_not_found = False )
if not target :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_FOLLOW , APLOG_FAILURE , saved_json , ' Could not find target of Follow ' )
2024-11-18 21:10:12 +00:00
return
if isinstance ( target , Community ) :
community = target
reject_follow = False
if community . local_only :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_FOLLOW , APLOG_FAILURE , saved_json , ' Local only cannot be followed by remote users ' )
2024-11-18 21:10:12 +00:00
reject_follow = True
else :
# check if user is banned from this community
user_banned = CommunityBan . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
if user_banned :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_FOLLOW , APLOG_FAILURE , saved_json , ' Remote user has been banned ' )
2024-11-18 21:10:12 +00:00
reject_follow = True
if reject_follow :
# send reject message to deny the follow
reject = { " @context " : default_context ( ) , " actor " : community . public_url ( ) , " to " : [ user . public_url ( ) ] ,
" object " : { " actor " : user . public_url ( ) , " to " : None , " object " : community . public_url ( ) , " type " : " Follow " , " id " : follow_id } ,
" type " : " Reject " , " id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/reject/ " + gibberish ( 32 ) }
post_request ( user . ap_inbox_url , reject , community . private_key , f " { community . public_url ( ) } #main-key " )
else :
if community_membership ( user , community ) != SUBSCRIPTION_MEMBER :
member = CommunityMember ( user_id = user . id , community_id = community . id )
db . session . add ( member )
2025-01-17 23:22:32 +00:00
community . subscriptions_count + = 1
2024-11-18 21:10:12 +00:00
db . session . commit ( )
cache . delete_memoized ( community_membership , user , community )
# send accept message to acknowledge the follow
accept = { " @context " : default_context ( ) , " actor " : community . public_url ( ) , " to " : [ user . public_url ( ) ] ,
" object " : { " actor " : user . public_url ( ) , " to " : None , " object " : community . public_url ( ) , " type " : " Follow " , " id " : follow_id } ,
" type " : " Accept " , " id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/accept/ " + gibberish ( 32 ) }
post_request ( user . ap_inbox_url , accept , community . private_key , f " { community . public_url ( ) } #main-key " )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_FOLLOW , APLOG_SUCCESS , saved_json )
2024-11-18 21:10:12 +00:00
return
elif isinstance ( target , User ) :
local_user = target
remote_user = user
if not local_user . is_local ( ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_FOLLOW , APLOG_FAILURE , saved_json , ' Follow request for remote user received ' )
2024-11-18 21:10:12 +00:00
return
existing_follower = UserFollower . query . filter_by ( local_user_id = local_user . id , remote_user_id = remote_user . id ) . first ( )
if not existing_follower :
auto_accept = not local_user . ap_manually_approves_followers
new_follower = UserFollower ( local_user_id = local_user . id , remote_user_id = remote_user . id , is_accepted = auto_accept )
if not local_user . ap_followers_url :
local_user . ap_followers_url = local_user . public_url ( ) + ' /followers '
db . session . add ( new_follower )
db . session . commit ( )
accept = { " @context " : default_context ( ) , " actor " : local_user . public_url ( ) , " to " : [ remote_user . public_url ( ) ] ,
" object " : { " actor " : remote_user . public_url ( ) , " to " : None , " object " : local_user . public_url ( ) , " type " : " Follow " , " id " : follow_id } ,
" type " : " Accept " , " id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/accept/ " + gibberish ( 32 ) }
post_request ( remote_user . ap_inbox_url , accept , local_user . private_key , f " { local_user . public_url ( ) } #main-key " )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_FOLLOW , APLOG_SUCCESS , saved_json )
2024-11-18 21:10:12 +00:00
return
2024-11-18 21:19:42 +00:00
# Accept: remote server is accepting our previous follow request
2025-01-08 22:58:10 +00:00
if core_activity [ ' type ' ] == ' Accept ' :
2024-11-18 21:19:42 +00:00
user = None
2025-01-08 22:58:10 +00:00
if isinstance ( core_activity [ ' object ' ] , str ) : # a.gup.pe accepts using a string with the ID of the follow request
join_request_parts = core_activity [ ' object ' ] . split ( ' / ' )
2024-11-18 21:19:42 +00:00
join_request = CommunityJoinRequest . query . get ( join_request_parts [ - 1 ] )
if join_request :
user = User . query . get ( join_request . user_id )
2025-01-08 22:58:10 +00:00
elif core_activity [ ' object ' ] [ ' type ' ] == ' Follow ' :
user_ap_id = core_activity [ ' object ' ] [ ' actor ' ]
2024-11-18 21:19:42 +00:00
user = find_actor_or_create ( user_ap_id , create_if_not_found = False )
if not user :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ACCEPT , APLOG_FAILURE , saved_json , ' Could not find recipient of Accept ' )
2024-11-18 21:19:42 +00:00
return
join_request = CommunityJoinRequest . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
if join_request :
existing_membership = CommunityMember . query . filter_by ( user_id = join_request . user_id , community_id = join_request . community_id ) . first ( )
if not existing_membership :
member = CommunityMember ( user_id = join_request . user_id , community_id = join_request . community_id )
db . session . add ( member )
community . subscriptions_count + = 1
db . session . commit ( )
cache . delete_memoized ( community_membership , user , community )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ACCEPT , APLOG_SUCCESS , saved_json )
2024-11-18 21:19:42 +00:00
return
2024-11-18 21:25:19 +00:00
# Reject: remote server is rejecting our previous follow request
2025-01-08 22:58:10 +00:00
if core_activity [ ' type ' ] == ' Reject ' :
if core_activity [ ' object ' ] [ ' type ' ] == ' Follow ' :
user_ap_id = core_activity [ ' object ' ] [ ' actor ' ]
2024-11-18 21:25:19 +00:00
user = find_actor_or_create ( user_ap_id , create_if_not_found = False )
if not user :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ACCEPT , APLOG_FAILURE , saved_json , ' Could not find recipient of Reject ' )
2024-11-18 21:25:19 +00:00
return
join_request = CommunityJoinRequest . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
if join_request :
db . session . delete ( join_request )
existing_membership = CommunityMember . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
if existing_membership :
db . session . delete ( existing_membership )
cache . delete_memoized ( community_membership , user , community )
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ACCEPT , APLOG_SUCCESS , saved_json )
2024-11-18 21:25:19 +00:00
return
2024-11-18 21:10:12 +00:00
2024-11-18 22:05:25 +00:00
# Create is new content. Update is often an edit, but Updates from Lemmy can also be new content
2025-01-08 18:57:50 +00:00
if core_activity [ ' type ' ] == ' Create ' or core_activity [ ' type ' ] == ' Update ' :
if isinstance ( core_activity [ ' object ' ] , str ) :
core_activity = verify_object_from_source ( core_activity ) # change core_activity['object'] from str to dict, then process normally
2025-01-23 05:07:54 +00:00
if not core_activity :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' Could not verify unsigned request from source ' )
2025-01-06 19:13:23 +00:00
return
2025-01-08 18:57:50 +00:00
if core_activity [ ' object ' ] [ ' type ' ] == ' ChatMessage ' :
2024-11-18 22:05:25 +00:00
sender = user
2025-01-08 18:57:50 +00:00
recipient_ap_id = core_activity [ ' object ' ] [ ' to ' ] [ 0 ]
2024-11-18 22:05:25 +00:00
recipient = find_actor_or_create ( recipient_ap_id , create_if_not_found = False )
if recipient and recipient . is_local ( ) :
if sender . created_recently ( ) or sender . reputation < = - 10 :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CHATMESSAGE , APLOG_FAILURE , saved_json , ' Sender not eligible to send ' )
2024-11-18 22:05:25 +00:00
return
elif recipient . has_blocked_user ( sender . id ) or recipient . has_blocked_instance ( sender . instance_id ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CHATMESSAGE , APLOG_FAILURE , saved_json , ' Sender blocked by recipient ' )
2024-11-18 22:05:25 +00:00
return
2024-02-17 20:05:57 +13:00
else :
2024-11-18 22:05:25 +00:00
# Find existing conversation to add to
existing_conversation = Conversation . find_existing_conversation ( recipient = recipient , sender = sender )
if not existing_conversation :
existing_conversation = Conversation ( user_id = sender . id )
existing_conversation . members . append ( recipient )
existing_conversation . members . append ( sender )
db . session . add ( existing_conversation )
2024-02-17 20:05:57 +13:00
db . session . commit ( )
2024-11-18 22:05:25 +00:00
# Save ChatMessage to DB
2025-01-08 22:58:10 +00:00
encrypted = core_activity [ ' object ' ] [ ' encrypted ' ] if ' encrypted ' in core_activity [ ' object ' ] else None
2024-11-18 22:05:25 +00:00
new_message = ChatMessage ( sender_id = sender . id , recipient_id = recipient . id , conversation_id = existing_conversation . id ,
2025-01-08 22:58:10 +00:00
body_html = core_activity [ ' object ' ] [ ' content ' ] ,
body = html_to_text ( core_activity [ ' object ' ] [ ' content ' ] ) ,
2024-11-18 22:05:25 +00:00
encrypted = encrypted )
db . session . add ( new_message )
existing_conversation . updated_at = utcnow ( )
db . session . commit ( )
# Notify recipient
notify = Notification ( title = shorten_string ( ' New message from ' + sender . display_name ( ) ) ,
url = f ' /chat/ { existing_conversation . id } #message_ { new_message } ' , user_id = recipient . id ,
author_id = sender . id )
db . session . add ( notify )
recipient . unread_notifications + = 1
existing_conversation . read = False
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CHATMESSAGE , APLOG_SUCCESS , saved_json )
2024-11-18 22:05:25 +00:00
return
# inner object of Create is not a ChatMessage
else :
2025-01-08 18:57:50 +00:00
if ( core_activity [ ' object ' ] [ ' type ' ] == ' Note ' and ' name ' in core_activity [ ' object ' ] and # Poll Votes
2025-01-21 06:29:31 +00:00
' inReplyTo ' in core_activity [ ' object ' ] and ' attributedTo ' in core_activity [ ' object ' ] and
not ' published ' in core_activity [ ' object ' ] ) :
2025-01-08 18:57:50 +00:00
post_being_replied_to = Post . query . filter_by ( ap_id = core_activity [ ' object ' ] [ ' inReplyTo ' ] ) . first ( )
2024-11-18 22:05:25 +00:00
if post_being_replied_to :
poll_data = Poll . query . get ( post_being_replied_to . id )
2025-01-08 22:58:10 +00:00
choice = PollChoice . query . filter_by ( post_id = post_being_replied_to . id , choice_text = core_activity [ ' object ' ] [ ' name ' ] ) . first ( )
2024-11-18 22:05:25 +00:00
if poll_data and choice :
poll_data . vote_for_choice ( choice . id , user . id )
2024-04-20 13:03:39 +01:00
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_SUCCESS , saved_json )
2024-11-18 22:05:25 +00:00
if post_being_replied_to . author . is_local ( ) :
inform_followers_of_post_update ( post_being_replied_to . id , user . instance_id )
return
2025-01-08 18:57:50 +00:00
if not announced :
community = find_community ( request_json )
if not community :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' Blocked or unfound community ' )
2025-01-08 18:57:50 +00:00
return
2025-01-08 22:58:10 +00:00
if not ensure_domains_match ( core_activity [ ' object ' ] ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' Domains do not match ' )
2025-01-08 18:57:50 +00:00
return
if community . local_only :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' Remote Create in local_only community ' )
2025-01-08 18:57:50 +00:00
return
2024-11-18 22:05:25 +00:00
2025-01-08 18:57:50 +00:00
object_type = core_activity [ ' object ' ] [ ' type ' ]
2024-11-18 22:05:25 +00:00
new_content_types = [ ' Page ' , ' Article ' , ' Link ' , ' Note ' , ' Question ' ]
if object_type in new_content_types : # create or update a post
2025-01-08 18:57:50 +00:00
process_new_content ( user , community , store_ap_json , request_json , announced )
2024-11-18 22:05:25 +00:00
return
2025-01-08 18:57:50 +00:00
elif object_type == ' Video ' : # PeerTube: editing a video (mostly used to update post score)
2025-01-08 22:58:10 +00:00
post = Post . query . filter_by ( ap_id = core_activity [ ' object ' ] [ ' id ' ] ) . first ( )
2024-11-18 22:05:25 +00:00
if post :
if user . id == post . user_id :
update_post_from_activity ( post , request_json )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_SUCCESS , saved_json )
2024-04-20 13:03:39 +01:00
return
2023-12-24 13:28:41 +13:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_FAILURE , saved_json , ' Edit attempt denied ' )
2024-01-05 08:45:33 +13:00
return
2024-11-18 22:05:25 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_FAILURE , saved_json , ' PeerTube post not found ' )
2024-11-18 22:05:25 +00:00
return
2025-01-08 18:57:50 +00:00
elif announced and core_activity [ ' type ' ] == ' Update ' and core_activity [ ' object ' ] [ ' type ' ] == ' Group ' :
# force refresh next time community is heard from
community . ap_fetched_at = None
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_SUCCESS , saved_json )
2024-11-18 22:05:25 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' Unacceptable type (create): ' + object_type )
2024-11-18 22:05:25 +00:00
return
2023-12-24 16:20:18 +13:00
2025-01-08 18:28:27 +00:00
if core_activity [ ' type ' ] == ' Delete ' :
if isinstance ( core_activity [ ' object ' ] , str ) :
ap_id = core_activity [ ' object ' ] # lemmy
2024-11-18 22:31:18 +00:00
else :
2025-01-08 18:28:27 +00:00
ap_id = core_activity [ ' object ' ] [ ' id ' ] # kbin
2024-11-18 22:31:18 +00:00
to_delete = find_liked_object ( ap_id ) # Just for Posts and Replies (User deletes go through process_delete_request())
if to_delete :
if to_delete . deleted :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DELETE , APLOG_IGNORED , saved_json , ' Activity about local content which is already deleted ' )
2024-11-18 22:31:18 +00:00
else :
2025-01-08 18:28:27 +00:00
reason = core_activity [ ' summary ' ] if ' summary ' in core_activity else ' '
delete_post_or_comment ( user , to_delete , store_ap_json , request_json , reason )
if not announced :
announce_activity_to_followers ( to_delete . community , user , request_json )
2024-11-18 22:31:18 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DELETE , APLOG_FAILURE , saved_json , ' Delete: cannot find ' + ap_id )
2024-11-18 22:31:18 +00:00
return
2025-01-07 13:53:52 +00:00
if core_activity [ ' type ' ] == ' Like ' or core_activity [ ' type ' ] == ' EmojiReact ' : # Upvote
process_upvote ( user , store_ap_json , request_json , announced )
2024-11-19 11:51:29 +00:00
return
2025-01-07 14:14:50 +00:00
if core_activity [ ' type ' ] == ' Dislike ' : # Downvote
2024-11-19 11:51:29 +00:00
if site . enable_downvotes is False :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DISLIKE , APLOG_IGNORED , saved_json , ' Dislike ignored because of allow_dislike setting ' )
2024-11-19 11:51:29 +00:00
return
2025-01-07 14:14:50 +00:00
process_downvote ( user , store_ap_json , request_json , announced )
2024-11-19 11:51:29 +00:00
return
2025-01-07 15:12:28 +00:00
if core_activity [ ' type ' ] == ' Flag ' : # Reported content
reported = find_reported_object ( core_activity [ ' object ' ] )
2024-11-19 12:02:15 +00:00
if reported :
2025-01-07 15:12:28 +00:00
process_report ( user , reported , core_activity )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_REPORT , APLOG_SUCCESS , saved_json )
2024-11-19 12:02:15 +00:00
announce_activity_to_followers ( reported . community , user , request_json )
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_REPORT , APLOG_IGNORED , saved_json , ' Report ignored due to missing content ' )
2024-11-19 12:02:15 +00:00
return
2025-01-07 15:27:28 +00:00
if core_activity [ ' type ' ] == ' Lock ' : # Post lock
mod = user
post_id = core_activity [ ' object ' ]
post = Post . query . filter_by ( ap_id = post_id ) . first ( )
reason = core_activity [ ' summary ' ] if ' summary ' in core_activity else ' '
if post :
if post . community . is_moderator ( mod ) or post . community . is_instance_admin ( mod ) :
post . comments_enabled = False
db . session . commit ( )
add_to_modlog_activitypub ( ' lock_post ' , mod , community_id = post . community . id ,
link_text = shorten_string ( post . title ) , link = f ' post/ { post . id } ' ,
reason = reason )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_LOCK , APLOG_SUCCESS , saved_json )
2025-01-07 15:27:28 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_LOCK , APLOG_FAILURE , saved_json , ' Lock: Does not have permission ' )
2025-01-07 15:27:28 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_LOCK , APLOG_FAILURE , saved_json , ' Lock: post not found ' )
2025-01-07 15:27:28 +00:00
return
2025-01-07 16:23:29 +00:00
if core_activity [ ' type ' ] == ' Add ' : # Add mods, or sticky a post
2024-11-27 22:12:23 +00:00
mod = user
2025-01-07 16:23:29 +00:00
if not announced :
community = find_community ( core_activity )
2024-11-27 22:12:23 +00:00
if community :
if not community . is_moderator ( mod ) and not community . is_instance_admin ( mod ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_FAILURE , saved_json , ' Does not have permission ' )
2024-11-27 22:12:23 +00:00
return
2025-01-07 16:23:29 +00:00
target = core_activity [ ' target ' ]
featured_url = community . ap_featured_url
2024-11-27 22:12:23 +00:00
moderators_url = community . ap_moderators_url
2025-01-07 16:23:29 +00:00
if target == featured_url :
post = Post . query . filter_by ( ap_id = core_activity [ ' object ' ] ) . first ( )
if post :
post . sticky = True
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_SUCCESS , saved_json )
2025-01-07 16:23:29 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_FAILURE , saved_json , ' Cannot find: ' + core_activity [ ' object ' ] )
2025-01-07 16:23:29 +00:00
return
2024-11-27 22:12:23 +00:00
if target == moderators_url :
2025-01-07 16:23:29 +00:00
new_mod = find_actor_or_create ( core_activity [ ' object ' ] )
if new_mod :
2024-11-27 22:12:23 +00:00
existing_membership = CommunityMember . query . filter_by ( community_id = community . id , user_id = new_mod . id ) . first ( )
if existing_membership :
existing_membership . is_moderator = True
else :
new_membership = CommunityMember ( community_id = community . id , user_id = new_mod . id , is_moderator = True )
db . session . add ( new_membership )
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_SUCCESS , saved_json )
2024-11-27 22:12:23 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_FAILURE , saved_json , ' Cannot find: ' + core_activity [ ' object ' ] )
2024-11-27 22:12:23 +00:00
return
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_FAILURE , saved_json , ' Unknown target for Add ' )
2024-11-27 22:12:23 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_FAILURE , saved_json , ' Add: cannot find community ' )
2024-11-27 22:12:23 +00:00
return
2025-01-07 16:26:28 +00:00
if core_activity [ ' type ' ] == ' Remove ' : # Remove mods, or unsticky a post
2024-11-27 22:12:23 +00:00
mod = user
2025-01-07 16:26:28 +00:00
if not announced :
community = find_community ( core_activity )
2024-11-27 22:12:23 +00:00
if community :
if not community . is_moderator ( mod ) and not community . is_instance_admin ( mod ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_FAILURE , saved_json , ' Does not have permission ' )
2024-11-27 22:12:23 +00:00
return
2025-01-07 16:26:28 +00:00
target = core_activity [ ' target ' ]
featured_url = community . ap_featured_url
2024-11-27 22:12:23 +00:00
moderators_url = community . ap_moderators_url
2025-01-07 16:26:28 +00:00
if target == featured_url :
post = Post . query . filter_by ( ap_id = core_activity [ ' object ' ] ) . first ( )
if post :
post . sticky = False
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_REMOVE , APLOG_SUCCESS , saved_json )
2025-01-07 16:26:28 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_REMOVE , APLOG_FAILURE , saved_json , ' Cannot find: ' + core_activity [ ' object ' ] )
2025-01-07 16:26:28 +00:00
return
2024-11-27 22:12:23 +00:00
if target == moderators_url :
2025-01-07 16:26:28 +00:00
old_mod = find_actor_or_create ( core_activity [ ' object ' ] )
if old_mod :
2024-11-27 22:12:23 +00:00
existing_membership = CommunityMember . query . filter_by ( community_id = community . id , user_id = old_mod . id ) . first ( )
if existing_membership :
existing_membership . is_moderator = False
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_REMOVE , APLOG_SUCCESS , saved_json )
2024-11-27 22:12:23 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_FAILURE , saved_json , ' Cannot find: ' + core_activity [ ' object ' ] )
2024-11-27 22:12:23 +00:00
return
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_FAILURE , saved_json , ' Unknown target for Remove ' )
2024-11-27 22:12:23 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_ADD , APLOG_FAILURE , saved_json , ' Remove: cannot find community ' )
2024-11-27 22:12:23 +00:00
return
2025-01-08 16:34:25 +00:00
if core_activity [ ' type ' ] == ' Block ' : # User Ban
"""
Sent directly ( not Announced ) if a remote Admin is banning one of their own users from their site
( e . g . lemmy . ml is banning lemmy . ml / u / troll )
Also send directly if a remote Admin or Mod is banning one of our users from one of their communities
( e . g . lemmy . ml is banning piefed . social / u / troll from lemmy . ml / c / memes )
Is Announced if a remote Admin or Mod is banning a remote user from one of their communities ( a remote user could also be one of our local users )
( e . g . lemmy . ml is banning piefed . social / u / troll or lemmy . world / u / troll from lemmy . ml / c / memes )
2025-01-08 21:53:26 +00:00
Same activity can be sent direct and Announced , but one will be filtered out when shared_inbox ( ) checks for it as a duplicate
2025-01-08 16:34:25 +00:00
We currently don ' t receive a Block if a remote Admin is banning a user of a different instance from their site (it ' s hacked by all the relevant communities Announcing a community ban )
This may change in the future , so it ' s something to monitor
If / When this changes , the code below will need updating , and we ' ll have to do extra work
"""
if not announced and store_ap_json :
2025-01-08 22:58:10 +00:00
core_activity [ ' cc ' ] = [ ] # cut very long list of instances
2025-01-08 16:34:25 +00:00
2024-11-20 19:48:38 +00:00
blocker = user
2025-01-08 16:34:25 +00:00
blocked_ap_id = core_activity [ ' object ' ] . lower ( )
2024-11-20 19:48:38 +00:00
blocked = User . query . filter_by ( ap_profile_id = blocked_ap_id ) . first ( )
if not blocked :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_IGNORED , saved_json , ' Does not exist here ' )
2024-11-20 19:48:38 +00:00
return
2024-11-30 09:50:14 +13:00
if blocked . banned : # We may have already banned them - we don't want remote temp bans to over-ride our permanent bans
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_IGNORED , saved_json , ' Already banned ' )
2024-11-30 09:50:14 +13:00
return
2025-01-08 16:34:25 +00:00
remove_data = core_activity [ ' removeData ' ] if ' removeData ' in core_activity else False
target = core_activity [ ' target ' ]
if target . count ( ' / ' ) < 4 : # site ban
if not blocker . is_instance_admin ( ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_FAILURE , saved_json , ' Does not have permission ' )
2025-01-08 16:34:25 +00:00
return
if blocked . is_local ( ) :
log_incoming_ap ( id , APLOG_USERBAN , APLOG_MONITOR , request_json , ' Remote Admin in banning one of our users from their site ' )
current_app . logger . error ( ' Remote Admin in banning one of our users from their site: ' + str ( request_json ) )
return
if blocked . instance_id != blocker . instance_id :
log_incoming_ap ( id , APLOG_USERBAN , APLOG_MONITOR , request_json , ' Remote Admin is banning a user of a different instance from their site ' )
current_app . logger . error ( ' Remote Admin is banning a user of a different instance from their site: ' + str ( request_json ) )
return
2024-11-30 09:50:14 +13:00
2025-01-08 16:34:25 +00:00
blocked . banned = True
2025-01-08 22:58:10 +00:00
if ' expires ' in core_activity :
blocked . banned_until = core_activity [ ' expires ' ]
elif ' endTime ' in core_activity :
blocked . banned_until = core_activity [ ' endTime ' ]
2024-11-30 09:50:14 +13:00
db . session . commit ( )
2024-11-20 19:48:38 +00:00
2025-01-08 16:34:25 +00:00
if remove_data :
site_ban_remove_data ( blocker . id , blocked )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_SUCCESS , saved_json )
2025-01-08 16:34:25 +00:00
else : # community ban (community will already known if activity was Announced)
community = community if community else find_actor_or_create ( target , create_if_not_found = False , community_only = True )
if not community :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_IGNORED , saved_json , ' Blocked or unfound community ' )
2025-01-08 16:34:25 +00:00
return
if not community . is_moderator ( blocker ) and not community . is_instance_admin ( blocker ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_FAILURE , saved_json , ' Does not have permission ' )
2025-01-08 16:34:25 +00:00
return
2024-11-30 09:50:14 +13:00
2025-01-08 16:34:25 +00:00
if remove_data :
community_ban_remove_data ( blocker . id , community . id , blocked )
2025-01-08 21:53:26 +00:00
ban_user ( blocker , blocked , community , core_activity )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_SUCCESS , saved_json )
2024-11-20 19:48:38 +00:00
return
2025-01-08 19:23:47 +00:00
if core_activity [ ' type ' ] == ' Undo ' :
2025-01-08 22:58:10 +00:00
if core_activity [ ' object ' ] [ ' type ' ] == ' Follow ' : # Unsubscribe from a community or user
target_ap_id = core_activity [ ' object ' ] [ ' object ' ]
2024-11-22 02:07:22 +00:00
target = find_actor_or_create ( target_ap_id , create_if_not_found = False )
if isinstance ( target , Community ) :
community = target
member = CommunityMember . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
join_request = CommunityJoinRequest . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
if member :
db . session . delete ( member )
community . subscriptions_count - = 1
if join_request :
db . session . delete ( join_request )
db . session . commit ( )
cache . delete_memoized ( community_membership , user , community )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UNDO_FOLLOW , APLOG_SUCCESS , saved_json )
2024-11-22 02:07:22 +00:00
return
if isinstance ( target , User ) :
local_user = target
remote_user = user
follower = UserFollower . query . filter_by ( local_user_id = local_user . id , remote_user_id = remote_user . id , is_accepted = True ) . first ( )
if follower :
db . session . delete ( follower )
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UNDO_FOLLOW , APLOG_SUCCESS , saved_json )
2024-11-22 02:07:22 +00:00
return
if not target :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UNDO_FOLLOW , APLOG_FAILURE , saved_json , ' Unfound target ' )
2024-11-22 02:07:22 +00:00
return
2025-01-08 19:23:47 +00:00
if core_activity [ ' object ' ] [ ' type ' ] == ' Delete ' : # Restore something previously deleted
if isinstance ( core_activity [ ' object ' ] [ ' object ' ] , str ) :
ap_id = core_activity [ ' object ' ] [ ' object ' ] # lemmy
2024-11-23 01:29:04 +00:00
else :
2025-01-08 19:23:47 +00:00
ap_id = core_activity [ ' object ' ] [ ' object ' ] [ ' id ' ] # kbin
2024-11-23 01:29:04 +00:00
restorer = user
to_restore = find_liked_object ( ap_id ) # a user or a mod/admin is undoing the delete of a post or reply
if to_restore :
if not to_restore . deleted :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UNDO_DELETE , APLOG_IGNORED , saved_json , ' Activity about local content which is already restored ' )
2023-12-26 21:39:52 +13:00
else :
2025-01-08 19:23:47 +00:00
reason = core_activity [ ' object ' ] [ ' summary ' ] if ' summary ' in core_activity [ ' object ' ] else ' '
restore_post_or_comment ( restorer , to_restore , store_ap_json , request_json , reason )
if not announced :
announce_activity_to_followers ( to_restore . community , user , request_json )
2024-11-23 01:29:04 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UNDO_DELETE , APLOG_FAILURE , saved_json , ' Undo delete: cannot find ' + ap_id )
2024-11-23 01:29:04 +00:00
return
2025-01-08 19:27:46 +00:00
if core_activity [ ' object ' ] [ ' type ' ] == ' Like ' or core_activity [ ' object ' ] [ ' type ' ] == ' Dislike ' : # Undoing an upvote or downvote
2024-11-23 01:36:31 +00:00
post = comment = None
2025-01-08 19:27:46 +00:00
target_ap_id = core_activity [ ' object ' ] [ ' object ' ]
2024-11-23 01:36:31 +00:00
post_or_comment = undo_vote ( comment , post , target_ap_id , user )
if post_or_comment :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UNDO_VOTE , APLOG_SUCCESS , saved_json )
2025-01-08 19:27:46 +00:00
if not announced :
announce_activity_to_followers ( post_or_comment . community , user , request_json )
2024-11-23 01:36:31 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UNDO_VOTE , APLOG_FAILURE , saved_json , ' Unfound object ' + target_ap_id )
2024-11-23 01:36:31 +00:00
return
2025-01-08 19:49:41 +00:00
if core_activity [ ' object ' ] [ ' type ' ] == ' Lock ' : # Undo of post lock
mod = user
post_id = core_activity [ ' object ' ] [ ' object ' ]
post = Post . query . filter_by ( ap_id = post_id ) . first ( )
reason = core_activity [ ' summary ' ] if ' summary ' in core_activity else ' '
if post :
if post . community . is_moderator ( mod ) or post . community . is_instance_admin ( mod ) :
post . comments_enabled = True
db . session . commit ( )
add_to_modlog_activitypub ( ' unlock_post ' , mod , community_id = post . community . id ,
link_text = shorten_string ( post . title ) , link = f ' post/ { post . id } ' ,
reason = reason )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_LOCK , APLOG_SUCCESS , saved_json )
2025-01-08 19:49:41 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_LOCK , APLOG_FAILURE , saved_json , ' Lock: Does not have permission ' )
2025-01-08 19:49:41 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_LOCK , APLOG_FAILURE , saved_json , ' Lock: post not found ' )
2025-01-08 19:49:41 +00:00
return
2025-01-08 20:20:18 +00:00
if core_activity [ ' object ' ] [ ' type ' ] == ' Block ' : # Undo of user ban
if announced and store_ap_json :
2025-01-08 22:58:10 +00:00
core_activity [ ' cc ' ] = [ ] # cut very long list of instances
core_activity [ ' object ' ] [ ' cc ' ] = [ ]
2025-01-08 20:20:18 +00:00
2024-11-23 01:39:47 +00:00
unblocker = user
2025-01-08 20:20:18 +00:00
unblocked_ap_id = core_activity [ ' object ' ] [ ' object ' ] . lower ( )
2024-11-23 01:39:47 +00:00
unblocked = User . query . filter_by ( ap_profile_id = unblocked_ap_id ) . first ( )
if not unblocked :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_IGNORED , saved_json , ' Does not exist here ' )
2024-11-23 01:39:47 +00:00
return
2025-01-08 20:20:18 +00:00
# in future, we'll need to know who banned a user, so this activity doesn't unban a user that was bannned by a local admin
2024-11-23 01:39:47 +00:00
# (no removeData field in an undo/ban - cannot restore without knowing if deletion was part of ban, or different moderator action)
2025-01-08 20:20:18 +00:00
target = core_activity [ ' object ' ] [ ' target ' ]
if target . count ( ' / ' ) < 4 : # undo of site ban
if not unblocker . is_instance_admin ( ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_FAILURE , saved_json , ' Does not have permission ' )
2025-01-08 20:20:18 +00:00
return
if unblocked . is_local ( ) :
log_incoming_ap ( id , APLOG_USERBAN , APLOG_MONITOR , request_json , ' Remote Admin in unbanning one of our users from their site ' )
current_app . logger . error ( ' Remote Admin in unbanning one of our users from their site: ' + str ( request_json ) )
return
if unblocked . instance_id != unblocker . instance_id :
log_incoming_ap ( id , APLOG_USERBAN , APLOG_MONITOR , request_json , ' Remote Admin is unbanning a user of a different instance from their site ' )
current_app . logger . error ( ' Remote Admin is unbanning a user of a different instance from their site: ' + str ( request_json ) )
return
unblocked . banned = False
unblocked . banned_until = None
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_SUCCESS , saved_json )
2025-01-08 20:20:18 +00:00
else : # undo community ban (community will already known if activity was Announced)
community = community if community else find_actor_or_create ( target , create_if_not_found = False , community_only = True )
if not community :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_IGNORED , saved_json , ' Blocked or unfound community ' )
2025-01-08 20:20:18 +00:00
return
if not community . is_moderator ( unblocker ) and not community . is_instance_admin ( unblocker ) :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_FAILURE , saved_json , ' Does not have permission ' )
2025-01-08 20:20:18 +00:00
return
2025-01-08 21:53:26 +00:00
unban_user ( unblocker , unblocked , community , core_activity )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_USERBAN , APLOG_SUCCESS , saved_json )
2024-11-23 01:39:47 +00:00
return
2024-12-04 16:37:42 +00:00
log_incoming_ap ( id , APLOG_MONITOR , APLOG_PROCESSING , request_json , ' Unmatched activity ' )
2023-12-22 14:05:39 +13:00
2023-12-29 17:32:35 +13:00
@celery.task
2024-11-18 19:28:37 +00:00
def process_delete_request ( request_json , store_ap_json ) :
2023-12-29 17:32:35 +13:00
with current_app . app_context ( ) :
2024-11-18 19:28:37 +00:00
# this function processes self-deletes (retain case here, as user_removed_from_remote_server() uses a JSON request)
2025-01-08 22:48:57 +00:00
saved_json = request_json if store_ap_json else None
2024-11-29 22:46:59 +00:00
id = request_json [ ' id ' ]
2024-11-18 19:28:37 +00:00
user_ap_id = request_json [ ' actor ' ]
user = User . query . filter_by ( ap_profile_id = user_ap_id . lower ( ) ) . first ( )
if user :
# check that the user really has been deleted, to avoid spoofing attacks
if user_removed_from_remote_server ( user_ap_id , is_piefed = user . instance . software == ' PieFed ' ) :
# soft self-delete
user . deleted = True
user . deleted_by = user . id
2024-02-29 17:10:38 +13:00
db . session . commit ( )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DELETE , APLOG_SUCCESS , saved_json )
2024-11-18 19:28:37 +00:00
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DELETE , APLOG_FAILURE , saved_json , ' User not actually deleted. ' )
2024-11-18 19:28:37 +00:00
# TODO: acknowledge 'removeData' field from Lemmy
# TODO: hard-delete in 7 days (should purge avatar and cover images, but keep posts and replies unless already soft-deleted by removeData = True)
2023-12-29 17:32:35 +13:00
2024-01-03 16:29:58 +13:00
def announce_activity_to_followers ( community , creator , activity ) :
2024-06-17 19:06:07 +01:00
# avoid announcing activity sent to local users unless it is also in a local community
if not community . is_local ( ) :
return
2024-04-06 22:42:25 +01:00
# remove context from what will be inner object
del activity [ " @context " ]
2024-01-03 16:29:58 +13:00
announce_activity = {
' @context ' : default_context ( ) ,
2024-06-05 13:21:41 +12:00
" actor " : community . public_url ( ) ,
2024-01-03 16:29:58 +13:00
" to " : [
" https://www.w3.org/ns/activitystreams#Public "
] ,
" object " : activity ,
" cc " : [
2024-06-05 13:21:41 +12:00
f " { community . public_url ( ) } /followers "
2024-01-03 16:29:58 +13:00
] ,
" type " : " Announce " ,
2024-04-17 15:10:04 +01:00
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/announce/ { gibberish ( 15 ) } "
2024-01-03 16:29:58 +13:00
}
for instance in community . following_instances ( include_dormant = True ) :
# awaken dormant instances if they've been sleeping for long enough to be worth trying again
awaken_dormant_instance ( instance )
# All good? Send!
2024-12-31 13:55:24 +13:00
if instance and instance . online ( ) and not instance_banned ( instance . inbox ) :
2024-01-03 16:29:58 +13:00
if creator . instance_id != instance . id : # don't send it to the instance that hosts the creator as presumably they already have the content
send_to_remote_instance ( instance . id , community . id , announce_activity )
2023-08-10 21:13:37 +12:00
@bp.route ( ' /c/<actor>/outbox ' , methods = [ ' GET ' ] )
def community_outbox ( actor ) :
actor = actor . strip ( )
community = Community . query . filter_by ( name = actor , banned = False , ap_id = None ) . first ( )
if community is not None :
2024-06-20 04:38:51 +01:00
sticky_posts = community . posts . filter ( Post . sticky == True , Post . deleted == False ) . order_by ( desc ( Post . posted_at ) ) . limit ( 50 ) . all ( )
remaining_limit = 50 - len ( sticky_posts )
remaining_posts = community . posts . filter ( Post . sticky == False , Post . deleted == False ) . order_by ( desc ( Post . posted_at ) ) . limit ( remaining_limit ) . all ( )
posts = sticky_posts + remaining_posts
2023-08-10 21:13:37 +12:00
community_data = {
2023-09-16 19:09:04 +12:00
" @context " : default_context ( ) ,
2023-08-10 21:13:37 +12:00
" type " : " OrderedCollection " ,
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /c/ { actor } /outbox " ,
" totalItems " : len ( posts ) ,
" orderedItems " : [ ]
}
for post in posts :
community_data [ ' orderedItems ' ] . append ( post_to_activity ( post , community ) )
2023-08-05 21:24:10 +12:00
2023-08-10 21:13:37 +12:00
return jsonify ( community_data )
2023-08-05 21:24:10 +12:00
2024-03-24 22:10:41 +00:00
@bp.route ( ' /c/<actor>/featured ' , methods = [ ' GET ' ] )
def community_featured ( actor ) :
actor = actor . strip ( )
community = Community . query . filter_by ( name = actor , banned = False , ap_id = None ) . first ( )
if community is not None :
2024-06-02 16:45:21 +12:00
posts = Post . query . filter_by ( community_id = community . id , sticky = True , deleted = False ) . all ( )
2024-03-24 22:10:41 +00:00
community_data = {
" @context " : default_context ( ) ,
" type " : " OrderedCollection " ,
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /c/ { actor } /featured " ,
" totalItems " : len ( posts ) ,
" orderedItems " : [ ]
}
for post in posts :
2024-05-31 03:45:51 +01:00
community_data [ ' orderedItems ' ] . append ( post_to_page ( post ) )
2024-03-24 22:10:41 +00:00
return jsonify ( community_data )
2023-11-26 23:20:51 +13:00
@bp.route ( ' /c/<actor>/moderators ' , methods = [ ' GET ' ] )
2024-03-13 16:40:20 +13:00
def community_moderators_route ( actor ) :
2023-11-26 23:20:51 +13:00
actor = actor . strip ( )
community = Community . query . filter_by ( name = actor , banned = False , ap_id = None ) . first ( )
if community is not None :
2024-03-13 16:40:20 +13:00
moderator_ids = community_moderators ( community . id )
2023-11-26 23:20:51 +13:00
moderators = User . query . filter ( User . id . in_ ( [ mod . user_id for mod in moderator_ids ] ) ) . all ( )
community_data = {
" @context " : default_context ( ) ,
" type " : " OrderedCollection " ,
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /c/ { actor } /moderators " ,
" totalItems " : len ( moderators ) ,
" orderedItems " : [ ]
}
for moderator in moderators :
community_data [ ' orderedItems ' ] . append ( moderator . ap_profile_id )
return jsonify ( community_data )
2023-12-27 14:38:41 +13:00
@bp.route ( ' /c/<actor>/followers ' , methods = [ ' GET ' ] )
def community_followers ( actor ) :
actor = actor . strip ( )
community = Community . query . filter_by ( name = actor , banned = False , ap_id = None ) . first ( )
if community is not None :
result = {
" @context " : default_context ( ) ,
2024-03-24 22:10:41 +00:00
" id " : f ' https:// { current_app . config [ " SERVER_NAME " ] } /c/ { actor } /followers ' ,
2023-12-27 14:38:41 +13:00
" type " : " Collection " ,
" totalItems " : community_members ( community . id ) ,
" items " : [ ]
}
resp = jsonify ( result )
resp . content_type = ' application/activity+json '
return resp
else :
abort ( 404 )
2023-12-09 22:14:16 +13:00
2024-04-29 19:47:06 +01:00
@bp.route ( ' /u/<actor>/followers ' , methods = [ ' GET ' ] )
def user_followers ( actor ) :
actor = actor . strip ( )
user = User . query . filter_by ( user_name = actor , banned = False , ap_id = None ) . first ( )
if user is not None and user . ap_followers_url :
2024-05-08 19:28:49 +12:00
# Get all followers, except those that are blocked by user by doing an outer join
followers = User . query . join ( UserFollower , User . id == UserFollower . remote_user_id ) \
. outerjoin ( UserBlock , ( User . id == UserBlock . blocker_id ) & ( UserFollower . local_user_id == UserBlock . blocked_id ) ) \
. filter ( ( UserFollower . local_user_id == user . id ) & ( UserBlock . id == None ) ) \
. all ( )
2024-04-29 19:47:06 +01:00
items = [ ]
for f in followers :
items . append ( f . ap_public_url )
result = {
" @context " : default_context ( ) ,
" id " : user . ap_followers_url ,
" type " : " Collection " ,
" totalItems " : len ( items ) ,
" items " : items
}
resp = jsonify ( result )
resp . content_type = ' application/activity+json '
return resp
else :
abort ( 404 )
2024-11-18 10:09:57 +13:00
@bp.route ( ' /comment/<int:comment_id> ' , methods = [ ' GET ' , ' HEAD ' ] )
2023-12-09 22:14:16 +13:00
def comment_ap ( comment_id ) :
2024-11-30 09:50:14 +13:00
reply = PostReply . query . get_or_404 ( comment_id )
2023-12-09 22:14:16 +13:00
if is_activitypub_request ( ) :
2024-11-18 10:09:57 +13:00
reply_data = comment_model_to_json ( reply ) if request . method == ' GET ' else [ ]
2023-12-09 22:14:16 +13:00
resp = jsonify ( reply_data )
resp . content_type = ' application/activity+json '
2024-03-02 13:56:47 +13:00
resp . headers . set ( ' Vary ' , ' Accept ' )
2024-11-18 10:04:58 +13:00
resp . headers . set ( ' Link ' , f ' <https:// { current_app . config [ " SERVER_NAME " ] } /comment/ { reply . id } >; rel= " alternate " ; type= " text/html " ' )
2023-12-09 22:14:16 +13:00
return resp
else :
2023-12-30 11:36:24 +13:00
return continue_discussion ( reply . post . id , comment_id )
2023-12-09 22:14:16 +13:00
2024-11-18 10:09:57 +13:00
@bp.route ( ' /post/<int:post_id>/ ' , methods = [ ' GET ' , ' HEAD ' ] )
2024-01-12 20:21:41 +13:00
def post_ap2 ( post_id ) :
return redirect ( url_for ( ' activitypub.post_ap ' , post_id = post_id ) )
2024-11-18 10:09:57 +13:00
@bp.route ( ' /post/<int:post_id> ' , methods = [ ' GET ' , ' HEAD ' , ' POST ' ] )
2023-12-09 22:14:16 +13:00
def post_ap ( post_id ) :
2024-11-18 10:09:57 +13:00
if ( request . method == ' GET ' or request . method == ' HEAD ' ) and is_activitypub_request ( ) :
2023-12-09 22:14:16 +13:00
post = Post . query . get_or_404 ( post_id )
2024-11-18 10:09:57 +13:00
if request . method == ' GET ' :
post_data = post_to_page ( post )
post_data [ ' @context ' ] = default_context ( )
else : # HEAD request
post_data = [ ]
2023-12-09 22:14:16 +13:00
resp = jsonify ( post_data )
resp . content_type = ' application/activity+json '
2024-03-02 13:56:47 +13:00
resp . headers . set ( ' Vary ' , ' Accept ' )
2024-11-18 10:04:58 +13:00
resp . headers . set ( ' Link ' , f ' <https:// { current_app . config [ " SERVER_NAME " ] } /post/ { post . id } >; rel= " alternate " ; type= " text/html " ' )
2023-12-09 22:14:16 +13:00
return resp
else :
return show_post ( post_id )
2023-12-22 14:05:39 +13:00
@bp.route ( ' /activities/<type>/<id> ' )
2023-12-23 11:32:22 +13:00
@cache.cached ( timeout = 600 )
2023-12-22 14:05:39 +13:00
def activities_json ( type , id ) :
activity = ActivityPubLog . query . filter_by ( activity_id = f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/ { type } / { id } " ) . first ( )
if activity :
2024-03-17 02:02:32 +13:00
if activity . activity_json is not None :
activity_json = json . loads ( activity . activity_json )
else :
activity_json = { }
2023-12-22 15:34:45 +13:00
resp = jsonify ( activity_json )
resp . content_type = ' application/activity+json '
return resp
2023-12-22 14:05:39 +13:00
else :
abort ( 404 )
2024-04-20 20:46:51 +12:00
# Other instances can query the result of their POST to the inbox by using this endpoint. The ID of the activity they
# sent (minus the https:// on the front) is the id parameter. e.g. https://piefed.ngrok.app/activity_result/piefed.ngrok.app/activities/announce/EfjyZ3BE5SzQK0C
@bp.route ( ' /activity_result/<path:id> ' )
def activity_result ( id ) :
activity = ActivityPubLog . query . filter_by ( activity_id = f ' https:// { id } ' ) . first ( )
if activity :
if activity . result == ' success ' :
return jsonify ( ' Ok ' )
else :
return jsonify ( { ' error ' : activity . result , ' message ' : activity . exception_message } )
else :
abort ( 404 )
2024-11-18 22:05:25 +00:00
2025-01-08 18:57:50 +00:00
def process_new_content ( user , community , store_ap_json , request_json , announced ) :
2025-01-08 22:48:57 +00:00
saved_json = request_json if store_ap_json else None
2024-11-29 17:15:18 +00:00
id = request_json [ ' id ' ]
2024-11-18 22:05:25 +00:00
if not announced :
in_reply_to = request_json [ ' object ' ] [ ' inReplyTo ' ] if ' inReplyTo ' in request_json [ ' object ' ] else None
ap_id = request_json [ ' object ' ] [ ' id ' ]
announce_id = None
activity_json = request_json
else :
in_reply_to = request_json [ ' object ' ] [ ' object ' ] [ ' inReplyTo ' ] if ' inReplyTo ' in request_json [ ' object ' ] [ ' object ' ] else None
ap_id = request_json [ ' object ' ] [ ' object ' ] [ ' id ' ]
2025-01-22 02:38:12 +00:00
announce_id = shorten_string ( request_json [ ' id ' ] , 100 )
2024-11-18 22:05:25 +00:00
activity_json = request_json [ ' object ' ]
2025-01-22 02:38:12 +00:00
# announce / create IDs that are too long will crash the app. Not referred to again, so it shouldn't matter if they're truncated
activity_json [ ' id ' ] = shorten_string ( activity_json [ ' id ' ] , 100 )
2024-11-18 22:05:25 +00:00
if not in_reply_to : # Creating a new post
post = Post . query . filter_by ( ap_id = ap_id ) . first ( )
if post :
if activity_json [ ' type ' ] == ' Create ' :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' Create processed after Update ' )
2024-11-18 22:05:25 +00:00
return
if user . id == post . user_id :
update_post_from_activity ( post , activity_json )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_SUCCESS , saved_json )
2024-11-18 22:05:25 +00:00
if not announced :
announce_activity_to_followers ( post . community , post . author , request_json )
return
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_FAILURE , saved_json , ' Edit attempt denied ' )
2024-11-18 22:05:25 +00:00
return
else :
if can_create_post ( user , community ) :
try :
post = create_post ( store_ap_json , community , activity_json , user , announce_id = announce_id )
if post :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_SUCCESS , saved_json )
2024-11-18 22:05:25 +00:00
if not announced :
announce_activity_to_followers ( community , user , request_json )
return
except TypeError as e :
current_app . logger . error ( ' TypeError: ' + str ( request_json ) )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' TypeError. See log file. ' )
2024-11-18 22:05:25 +00:00
return
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' User cannot create post in Community ' )
2024-11-18 22:05:25 +00:00
return
else : # Creating a reply / comment
reply = PostReply . query . filter_by ( ap_id = ap_id ) . first ( )
if reply :
if activity_json [ ' type ' ] == ' Create ' :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' Create processed after Update ' )
2024-11-18 22:05:25 +00:00
return
if user . id == reply . user_id :
update_post_reply_from_activity ( reply , activity_json )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_SUCCESS , saved_json )
2024-11-18 22:05:25 +00:00
if not announced :
announce_activity_to_followers ( reply . community , reply . author , request_json )
return
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_FAILURE , saved_json , ' Edit attempt denied ' )
2024-11-18 22:05:25 +00:00
return
else :
if can_create_post_reply ( user , community ) :
try :
reply = create_post_reply ( store_ap_json , community , in_reply_to , activity_json , user , announce_id = announce_id )
if reply :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_SUCCESS , saved_json )
2024-11-18 22:05:25 +00:00
if not announced :
announce_activity_to_followers ( community , user , request_json )
return
except TypeError as e :
current_app . logger . error ( ' TypeError: ' + str ( request_json ) )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' TypeError. See log file. ' )
2024-11-18 22:05:25 +00:00
return
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , saved_json , ' User cannot create reply in Community ' )
2024-11-18 22:05:25 +00:00
return
2024-11-19 11:51:29 +00:00
2025-01-07 13:53:52 +00:00
def process_upvote ( user , store_ap_json , request_json , announced ) :
2025-01-08 22:48:57 +00:00
saved_json = request_json if store_ap_json else None
2024-11-29 22:46:59 +00:00
id = request_json [ ' id ' ]
2024-11-19 11:51:29 +00:00
ap_id = request_json [ ' object ' ] if not announced else request_json [ ' object ' ] [ ' object ' ]
2024-12-05 15:42:10 +13:00
if isinstance ( ap_id , dict ) and ' id ' in ap_id :
ap_id = ap_id [ ' id ' ]
2024-11-19 11:51:29 +00:00
liked = find_liked_object ( ap_id )
if liked is None :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_LIKE , APLOG_FAILURE , saved_json , ' Unfound object ' + ap_id )
2024-11-19 11:51:29 +00:00
return
if can_upvote ( user , liked . community ) :
if isinstance ( liked , ( Post , PostReply ) ) :
liked . vote ( user , ' upvote ' )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_LIKE , APLOG_SUCCESS , saved_json )
2024-11-19 11:51:29 +00:00
if not announced :
announce_activity_to_followers ( liked . community , user , request_json )
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_LIKE , APLOG_IGNORED , saved_json , ' Cannot upvote this ' )
2024-11-19 11:51:29 +00:00
2025-01-07 14:14:50 +00:00
def process_downvote ( user , store_ap_json , request_json , announced ) :
2025-01-08 22:48:57 +00:00
saved_json = request_json if store_ap_json else None
2024-11-29 22:46:59 +00:00
id = request_json [ ' id ' ]
2024-11-19 11:51:29 +00:00
ap_id = request_json [ ' object ' ] if not announced else request_json [ ' object ' ] [ ' object ' ]
2024-12-05 15:42:58 +13:00
if isinstance ( ap_id , dict ) and ' id ' in ap_id :
ap_id = ap_id [ ' id ' ]
2024-11-19 11:51:29 +00:00
liked = find_liked_object ( ap_id )
if liked is None :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DISLIKE , APLOG_FAILURE , saved_json , ' Unfound object ' + ap_id )
2024-11-19 11:51:29 +00:00
return
if can_downvote ( user , liked . community ) :
if isinstance ( liked , ( Post , PostReply ) ) :
liked . vote ( user , ' downvote ' )
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DISLIKE , APLOG_SUCCESS , saved_json )
2024-11-19 11:51:29 +00:00
if not announced :
announce_activity_to_followers ( liked . community , user , request_json )
else :
2025-01-08 22:48:57 +00:00
log_incoming_ap ( id , APLOG_DISLIKE , APLOG_IGNORED , saved_json , ' Cannot downvote this ' )