2024-08-20 08:55:05 +12:00
from datetime import timedelta
from random import randint
2024-04-04 21:36:03 +13:00
2024-08-20 07:03:08 +12:00
from flask import request , current_app , abort , jsonify , json , g , url_for , redirect , make_response
2024-01-09 20:44:08 +13:00
from flask_login import current_user
2024-08-20 07:03:08 +12:00
from sqlalchemy import desc , or_
import werkzeug . exceptions
2023-12-30 13:23:12 +13:00
2023-12-24 13:28:41 +13:00
from app import db , constants , cache , celery
2023-08-05 21:24:10 +12:00
from app . activitypub import bp
2023-08-29 22:01:06 +12:00
2024-11-18 18:44:28 +00:00
from app . activitypub . signature import HttpSignature , post_request , VerificationError , default_context , LDSignature
2023-08-29 22:01:06 +12:00
from app . community . routes import show_community
2024-01-03 16:29:58 +13:00
from app . community . util import send_to_remote_instance
2023-12-09 22:14:16 +13:00
from app . post . routes import continue_discussion , show_post
2023-10-07 21:32:19 +13:00
from app . user . routes import show_profile
2024-11-18 16:53:32 +00:00
from app . constants import *
2023-09-10 20:20:53 +12:00
from app . models import User , Community , CommunityJoinRequest , CommunityMember , CommunityBan , ActivityPubLog , Post , \
2024-02-17 20:05:57 +13:00
PostReply , Instance , PostVote , PostReplyVote , File , AllowedInstances , BannedInstances , utcnow , Site , Notification , \
2024-05-31 22:06:34 +01:00
ChatMessage , Conversation , UserFollower , UserBlock , Poll , PollChoice
2023-08-10 21:13:37 +12:00
from app . activitypub . util import public_key , users_total , active_half_year , active_month , local_posts , local_comments , \
2024-05-04 21:16:55 +01:00
post_to_activity , find_actor_or_create , instance_blocked , find_reply_parent , find_liked_object , \
2024-09-13 16:39:42 +12:00
lemmy_site_data , is_activitypub_request , delete_post_or_comment , community_members , \
2023-12-30 13:23:12 +13:00
user_removed_from_remote_server , create_post , create_post_reply , update_post_reply_from_activity , \
2024-04-06 16:29:47 +13:00
update_post_from_activity , undo_vote , undo_downvote , post_to_page , get_redis_connection , find_reported_object , \
2024-05-31 22:12:49 +01:00
process_report , ensure_domains_match , can_edit , can_delete , remove_data_from_banned_user , resolve_remote_post , \
2024-09-01 22:42:43 +00:00
inform_followers_of_post_update , comment_model_to_json , restore_post_or_comment , ban_local_user , unban_local_user , \
2024-11-24 21:01:34 +00:00
lock_post , log_incoming_ap , find_community_ap_id , site_ban_remove_data , community_ban_remove_data
2024-09-13 16:39:42 +12:00
from app . utils import gibberish , get_setting , render_template , \
community_membership , ap_datetime , ip_address , can_downvote , \
2024-03-13 16:40:20 +13:00
can_upvote , can_create_post , awaken_dormant_instance , shorten_string , can_create_post_reply , sha256_digest , \
2024-11-18 22:05:25 +00:00
community_moderators , markdown_to_html , html_to_text
2023-08-05 21:24:10 +12:00
2024-04-04 21:36:03 +13:00
@bp.route ( ' /testredis ' )
def testredis_get ( ) :
redis_client = get_redis_connection ( )
redis_client . set ( " cowbell " , " 1 " , ex = 600 )
x = redis_client . get ( ' cowbell ' )
2024-04-04 21:38:26 +13:00
if x is not None :
2024-04-04 21:36:03 +13:00
return " Redis: OK "
else :
return " Redis: FAIL "
2023-08-05 21:24:10 +12:00
@bp.route ( ' /.well-known/webfinger ' )
def webfinger ( ) :
if request . args . get ( ' resource ' ) :
query = request . args . get ( ' resource ' ) # acct:alice@tada.club
if ' acct: ' in query :
actor = query . split ( ' : ' ) [ 1 ] . split ( ' @ ' ) [ 0 ] # alice
elif ' https: ' in query or ' http: ' in query :
actor = query . split ( ' / ' ) [ - 1 ]
else :
return ' Webfinger regex failed to match '
2024-05-04 21:26:39 +01:00
# special case: instance actor
if actor == current_app . config [ ' SERVER_NAME ' ] :
webfinger_data = {
" subject " : f " acct: { actor } @ { current_app . config [ ' SERVER_NAME ' ] } " ,
" aliases " : [ f " https:// { current_app . config [ ' SERVER_NAME ' ] } /actor " ] ,
" links " : [
{
" rel " : " http://webfinger.net/rel/profile-page " ,
" type " : " text/html " ,
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /about "
} ,
{
" rel " : " self " ,
" type " : " application/activity+json " ,
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /actor " ,
}
]
}
resp = jsonify ( webfinger_data )
resp . headers . add_header ( ' Access-Control-Allow-Origin ' , ' * ' )
return resp
2023-08-05 21:24:10 +12:00
seperator = ' u '
type = ' Person '
2024-08-20 07:03:08 +12:00
user = User . query . filter ( or_ ( User . user_name == actor . strip ( ) , User . alt_user_name == actor . strip ( ) ) ) . filter_by ( deleted = False , banned = False , ap_id = None ) . first ( )
2023-08-05 21:24:10 +12:00
if user is None :
community = Community . query . filter_by ( name = actor . strip ( ) , ap_id = None ) . first ( )
if community is None :
return ' '
seperator = ' c '
type = ' Group '
webfinger_data = {
" subject " : f " acct: { actor } @ { current_app . config [ ' SERVER_NAME ' ] } " ,
" aliases " : [ f " https:// { current_app . config [ ' SERVER_NAME ' ] } / { seperator } / { actor } " ] ,
" links " : [
{
" rel " : " http://webfinger.net/rel/profile-page " ,
" type " : " text/html " ,
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } / { seperator } / { actor } "
} ,
{
" rel " : " self " ,
" type " : " application/activity+json " ,
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } / { seperator } / { actor } " ,
" properties " : {
" https://www.w3.org/ns/activitystreams#type " : type
}
}
]
}
resp = jsonify ( webfinger_data )
resp . headers . add_header ( ' Access-Control-Allow-Origin ' , ' * ' )
return resp
else :
abort ( 404 )
@bp.route ( ' /.well-known/nodeinfo ' )
2023-12-10 15:10:09 +13:00
@cache.cached ( timeout = 600 )
2023-08-05 21:24:10 +12:00
def nodeinfo ( ) :
nodeinfo_data = { " links " : [ { " rel " : " http://nodeinfo.diaspora.software/ns/schema/2.0 " ,
2024-09-04 16:30:01 -04:00
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /nodeinfo/2.0 " } ,
{ " rel " : " https://www.w3.org/ns/activitystreams#Application " ,
2024-09-05 20:10:26 +12:00
" href " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } " } ] }
2023-08-05 21:24:10 +12:00
return jsonify ( nodeinfo_data )
2024-02-14 10:16:49 +13:00
@bp.route ( ' /.well-known/host-meta ' )
@cache.cached ( timeout = 600 )
def host_meta ( ) :
2024-02-19 15:01:53 +13:00
resp = make_response ( ' <?xml version= " 1.0 " encoding= " UTF-8 " ?> \n <XRD xmlns= " http://docs.oasis-open.org/ns/xri/xrd-1.0 " > \n <Link rel= " lrdd " template= " https:// ' + current_app . config [ " SERVER_NAME " ] + ' /.well-known/webfinger?resource= {uri} " /> \n </XRD> ' )
2024-02-14 10:16:49 +13:00
resp . content_type = ' application/xrd+xml; charset=utf-8 '
return resp
2023-08-05 21:24:10 +12:00
@bp.route ( ' /nodeinfo/2.0 ' )
2023-11-23 15:10:44 +13:00
@bp.route ( ' /nodeinfo/2.0.json ' )
2023-12-10 15:10:09 +13:00
@cache.cached ( timeout = 600 )
2023-08-05 21:24:10 +12:00
def nodeinfo2 ( ) :
nodeinfo_data = {
" version " : " 2.0 " ,
" software " : {
2023-11-16 22:31:14 +13:00
" name " : " PieFed " ,
2023-08-05 21:24:10 +12:00
" version " : " 0.1 "
} ,
" protocols " : [
" activitypub "
] ,
" usage " : {
" users " : {
2023-08-10 21:13:37 +12:00
" total " : users_total ( ) ,
" activeHalfyear " : active_half_year ( ) ,
" activeMonth " : active_month ( )
2023-08-05 21:24:10 +12:00
} ,
2023-08-10 21:13:37 +12:00
" localPosts " : local_posts ( ) ,
" localComments " : local_comments ( )
2023-08-05 21:24:10 +12:00
} ,
2024-10-23 16:08:24 +13:00
" openRegistrations " : g . site . registration_mode != ' Closed '
2023-08-05 21:24:10 +12:00
}
return jsonify ( nodeinfo_data )
2024-04-03 16:35:26 +13:00
@bp.route ( ' /api/v1/instance ' )
@cache.cached ( timeout = 600 )
def api_v1_instance ( ) :
retval = {
' title ' : g . site . name ,
' uri ' : current_app . config [ ' SERVER_NAME ' ] ,
' stats ' : {
" user_count " : users_total ( ) ,
" status_count " : local_posts ( ) + local_comments ( ) ,
" domain_count " : 1
} ,
' registrations ' : g . site . registration_mode != ' Closed ' ,
' approval_required ' : g . site . registration_mode == ' RequireApplication '
}
return jsonify ( retval )
2024-02-25 16:24:50 +13:00
@bp.route ( ' /api/v1/instance/domain_blocks ' )
@cache.cached ( timeout = 600 )
def domain_blocks ( ) :
use_allowlist = get_setting ( ' use_allowlist ' , False )
if use_allowlist :
return jsonify ( [ ] )
else :
retval = [ ]
for domain in BannedInstances . query . all ( ) :
retval . append ( {
' domain ' : domain . domain ,
' digest ' : sha256_digest ( domain . domain ) ,
' severity ' : ' suspend ' ,
' comment ' : domain . reason if domain . reason else ' '
} )
return jsonify ( retval )
2023-11-23 15:10:44 +13:00
@bp.route ( ' /api/v3/site ' )
2024-04-22 20:53:03 +12:00
@cache.cached ( timeout = 600 )
2023-11-23 15:10:44 +13:00
def lemmy_site ( ) :
return jsonify ( lemmy_site_data ( ) )
@bp.route ( ' /api/v3/federated_instances ' )
2023-12-10 15:10:09 +13:00
@cache.cached ( timeout = 600 )
2023-11-23 15:10:44 +13:00
def lemmy_federated_instances ( ) :
2024-02-14 12:31:44 +13:00
instances = Instance . query . filter ( Instance . id != 1 ) . all ( )
2023-11-23 15:10:44 +13:00
linked = [ ]
allowed = [ ]
blocked = [ ]
2024-10-29 10:33:59 +00:00
for instance in AllowedInstances . query . all ( ) :
allowed . append ( { " id " : instance . id , " domain " : instance . domain , " published " : utcnow ( ) , " updated " : utcnow ( ) } )
for instance in BannedInstances . query . all ( ) :
blocked . append ( { " id " : instance . id , " domain " : instance . domain , " published " : utcnow ( ) , " updated " : utcnow ( ) } )
2023-11-23 15:10:44 +13:00
for instance in instances :
instance_data = { " id " : instance . id , " domain " : instance . domain , " published " : instance . created_at . isoformat ( ) , " updated " : instance . updated_at . isoformat ( ) }
if instance . software :
instance_data [ ' software ' ] = instance . software
if instance . version :
instance_data [ ' version ' ] = instance . version
2024-10-29 10:33:59 +00:00
if not any ( blocked_instance . get ( ' domain ' ) == instance . domain for blocked_instance in blocked ) :
linked . append ( instance_data )
2023-11-23 15:10:44 +13:00
return jsonify ( {
" federated_instances " : {
" linked " : linked ,
" allowed " : allowed ,
" blocked " : blocked
}
} )
2023-12-29 17:32:35 +13:00
@bp.route ( ' /u/<actor> ' , methods = [ ' GET ' , ' HEAD ' ] )
2023-08-10 21:13:37 +12:00
def user_profile ( actor ) :
""" Requests to this endpoint can be for a JSON representation of the user, or a HTML rendering of their profile.
The two types of requests are differentiated by the header """
2024-03-27 22:55:31 +13:00
actor = actor . strip ( )
2024-04-20 17:16:17 +12:00
# admins can view deleted accounts
2024-01-09 20:44:08 +13:00
if current_user . is_authenticated and current_user . is_admin ( ) :
if ' @ ' in actor :
2024-04-23 21:28:58 +12:00
user : User = User . query . filter_by ( ap_id = actor . lower ( ) ) . first ( )
2024-01-09 20:44:08 +13:00
else :
2024-08-20 07:03:08 +12:00
user : User = User . query . filter ( or_ ( User . user_name == actor , User . alt_user_name == actor ) ) . filter_by ( ap_id = None ) . first ( )
2024-04-20 17:16:17 +12:00
if user is None :
2024-04-28 16:49:49 +12:00
user = User . query . filter_by ( ap_profile_id = f ' https:// { current_app . config [ " SERVER_NAME " ] } /u/ { actor . lower ( ) } ' , deleted = False , ap_id = None ) . first ( )
2023-11-24 20:22:58 +13:00
else :
2024-01-09 20:44:08 +13:00
if ' @ ' in actor :
2024-04-23 21:28:58 +12:00
user : User = User . query . filter_by ( ap_id = actor . lower ( ) , deleted = False , banned = False ) . first ( )
2024-01-09 20:44:08 +13:00
else :
2024-08-20 07:03:08 +12:00
user : User = User . query . filter ( or_ ( User . user_name == actor , User . alt_user_name == actor ) ) . filter_by ( deleted = False , ap_id = None ) . first ( )
2024-04-20 17:16:17 +12:00
if user is None :
2024-04-23 21:28:58 +12:00
user = User . query . filter_by ( ap_profile_id = f ' https:// { current_app . config [ " SERVER_NAME " ] } /u/ { actor . lower ( ) } ' , deleted = False , ap_id = None ) . first ( )
2023-11-24 20:22:58 +13:00
2023-08-10 21:13:37 +12:00
if user is not None :
2024-08-20 07:03:08 +12:00
main_user_name = True
if user . alt_user_name == actor :
main_user_name = False
2023-12-29 17:32:35 +13:00
if request . method == ' HEAD ' :
if is_activitypub_request ( ) :
resp = jsonify ( ' ' )
resp . content_type = ' application/activity+json '
return resp
else :
return ' '
2023-12-09 22:14:16 +13:00
if is_activitypub_request ( ) :
2023-08-10 21:13:37 +12:00
server = current_app . config [ ' SERVER_NAME ' ]
2023-09-16 19:09:04 +12:00
actor_data = { " @context " : default_context ( ) ,
2024-03-20 11:34:25 +00:00
" type " : " Person " if not user . bot else " Service " ,
2024-08-20 07:03:08 +12:00
" id " : user . public_url ( main_user_name ) ,
2024-06-06 00:09:17 +01:00
" preferredUsername " : actor ,
2024-01-01 14:49:15 +13:00
" name " : user . title if user . title else user . user_name ,
2024-08-20 07:03:08 +12:00
" inbox " : f " { user . public_url ( main_user_name ) } /inbox " ,
" outbox " : f " { user . public_url ( main_user_name ) } /outbox " ,
2023-12-29 17:32:35 +13:00
" discoverable " : user . searchable ,
" indexable " : user . indexable ,
2024-03-21 23:26:03 +00:00
" manuallyApprovesFollowers " : False if not user . ap_manually_approves_followers else user . ap_manually_approves_followers ,
2023-08-10 21:13:37 +12:00
" publicKey " : {
2024-08-20 07:03:08 +12:00
" id " : f " { user . public_url ( main_user_name ) } #main-key " ,
" owner " : user . public_url ( main_user_name ) ,
" publicKeyPem " : user . public_key
2023-08-10 21:13:37 +12:00
} ,
" endpoints " : {
" sharedInbox " : f " https:// { server } /inbox "
} ,
2023-12-08 17:13:38 +13:00
" published " : ap_datetime ( user . created ) ,
2023-08-10 21:13:37 +12:00
}
2024-08-20 07:03:08 +12:00
if not main_user_name :
actor_data [ ' name ' ] = ' Anonymous '
2024-08-20 09:08:45 +12:00
actor_data [ ' published ' ] = ap_datetime ( user . created + timedelta ( minutes = randint ( - 2592000 , 0 ) ) )
2024-09-10 10:34:31 +12:00
actor_data [ ' summary ' ] = ' <p>This is an anonymous alternative account of another account. It has been generated automatically for a Piefed user who chose to keep their interactions private. They cannot reply to your messages using this account, but only upvote (like) or downvote (dislike). For more information about Piefed and this feature see <a href= " https://piefed.social/post/205362 " >https://piefed.social/post/205362</a>.</p> '
2024-08-20 07:03:08 +12:00
if user . avatar_id is not None and main_user_name :
2023-08-10 21:13:37 +12:00
actor_data [ " icon " ] = {
" type " : " Image " ,
2023-12-08 17:13:38 +13:00
" url " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } { user . avatar_image ( ) } "
}
2024-08-20 07:03:08 +12:00
if user . cover_id is not None and main_user_name :
2023-12-08 17:13:38 +13:00
actor_data [ " image " ] = {
" type " : " Image " ,
" url " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } { user . cover_image ( ) } "
2023-08-10 21:13:37 +12:00
}
2024-08-20 07:03:08 +12:00
if user . about_html and main_user_name :
2024-05-14 16:24:05 +01:00
actor_data [ ' summary ' ] = user . about_html
2024-09-22 13:42:02 +00:00
actor_data [ ' source ' ] = { ' content ' : user . about , ' mediaType ' : ' text/markdown ' }
2024-08-20 07:03:08 +12:00
if user . matrix_user_id and main_user_name :
2023-12-28 21:00:26 +13:00
actor_data [ ' matrixUserId ' ] = user . matrix_user_id
2023-08-10 21:13:37 +12:00
resp = jsonify ( actor_data )
resp . content_type = ' application/activity+json '
2024-11-18 10:04:58 +13:00
resp . headers . set ( ' Link ' , f ' <https:// { current_app . config [ " SERVER_NAME " ] } /u/ { actor } >; rel= " alternate " ; type= " text/html " ' )
2023-08-10 21:13:37 +12:00
return resp
else :
2024-08-20 07:03:08 +12:00
if main_user_name :
return show_profile ( user )
else :
return render_template ( ' errors/alt_profile.html ' )
2023-10-21 15:49:01 +13:00
else :
abort ( 404 )
2023-08-10 21:13:37 +12:00
2023-12-22 15:34:45 +13:00
@bp.route ( ' /u/<actor>/outbox ' , methods = [ ' GET ' ] )
def user_outbox ( actor ) :
outbox = {
" @context " : default_context ( ) ,
' type ' : ' OrderedCollection ' ,
' id ' : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /u/ { actor } /outbox " ,
' orderedItems ' : [ ] ,
' totalItems ' : 0
}
resp = jsonify ( outbox )
resp . content_type = ' application/activity+json '
return resp
2023-08-10 21:13:37 +12:00
@bp.route ( ' /c/<actor> ' , methods = [ ' GET ' ] )
def community_profile ( actor ) :
""" Requests to this endpoint can be for a JSON representation of the community, or a HTML rendering of it.
The two types of requests are differentiated by the header """
actor = actor . strip ( )
2023-08-29 22:01:06 +12:00
if ' @ ' in actor :
# don't provide activitypub info for remote communities
2023-12-03 22:41:15 +13:00
if ' application/ld+json ' in request . headers . get ( ' Accept ' , ' ' ) or ' application/activity+json ' in request . headers . get ( ' Accept ' , ' ' ) :
2023-12-12 18:28:49 +13:00
abort ( 400 )
2024-04-30 21:11:57 +12:00
community : Community = Community . query . filter_by ( ap_id = actor . lower ( ) , banned = False ) . first ( )
2023-08-29 22:01:06 +12:00
else :
2024-01-01 11:38:24 +13:00
community : Community = Community . query . filter_by ( name = actor , ap_id = None ) . first ( )
2023-08-10 21:13:37 +12:00
if community is not None :
2023-12-09 22:14:16 +13:00
if is_activitypub_request ( ) :
2023-08-10 21:13:37 +12:00
server = current_app . config [ ' SERVER_NAME ' ]
2023-09-16 19:09:04 +12:00
actor_data = { " @context " : default_context ( ) ,
2023-08-10 21:13:37 +12:00
" type " : " Group " ,
" id " : f " https:// { server } /c/ { actor } " ,
2023-11-16 22:31:14 +13:00
" name " : community . title ,
" sensitive " : True if community . nsfw or community . nsfl else False ,
2023-08-10 21:13:37 +12:00
" preferredUsername " : actor ,
" inbox " : f " https:// { server } /c/ { actor } /inbox " ,
" outbox " : f " https:// { server } /c/ { actor } /outbox " ,
" followers " : f " https:// { server } /c/ { actor } /followers " ,
" moderators " : f " https:// { server } /c/ { actor } /moderators " ,
" featured " : f " https:// { server } /c/ { actor } /featured " ,
" attributedTo " : f " https:// { server } /c/ { actor } /moderators " ,
2024-01-02 19:41:00 +13:00
" postingRestrictedToMods " : community . restricted_to_mods or community . local_only ,
" newModsWanted " : community . new_mods_wanted ,
" privateMods " : community . private_mods ,
2023-08-10 21:13:37 +12:00
" url " : f " https:// { server } /c/ { actor } " ,
" publicKey " : {
" id " : f " https:// { server } /c/ { actor } #main-key " ,
" owner " : f " https:// { server } /c/ { actor } " ,
2023-11-22 22:12:58 +13:00
" publicKeyPem " : community . public_key
2023-08-10 21:13:37 +12:00
} ,
" endpoints " : {
" sharedInbox " : f " https:// { server } /inbox "
} ,
2023-12-08 17:13:38 +13:00
" published " : ap_datetime ( community . created_at ) ,
" updated " : ap_datetime ( community . last_active ) ,
2023-08-10 21:13:37 +12:00
}
2024-05-14 16:24:05 +01:00
if community . description_html :
actor_data [ " summary " ] = community . description_html
2024-09-22 13:42:02 +00:00
actor_data [ ' source ' ] = { ' content ' : community . description , ' mediaType ' : ' text/markdown ' }
2023-11-16 22:31:14 +13:00
if community . icon_id is not None :
2023-08-10 21:13:37 +12:00
actor_data [ " icon " ] = {
" type " : " Image " ,
2023-12-08 17:13:38 +13:00
" url " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } { community . icon_image ( ) } "
}
if community . image_id is not None :
actor_data [ " image " ] = {
" type " : " Image " ,
" url " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } { community . header_image ( ) } "
2023-08-10 21:13:37 +12:00
}
resp = jsonify ( actor_data )
resp . content_type = ' application/activity+json '
2024-11-18 10:04:58 +13:00
resp . headers . set ( ' Link ' , f ' <https:// { current_app . config [ " SERVER_NAME " ] } /c/ { actor } >; rel= " alternate " ; type= " text/html " ' )
2023-08-10 21:13:37 +12:00
return resp
2023-08-29 22:01:06 +12:00
else : # browser request - return html
return show_community ( community )
else :
abort ( 404 )
2023-08-10 21:13:37 +12:00
2024-11-18 16:53:32 +00:00
@bp.route ( ' /inbox ' , methods = [ ' POST ' ] )
2023-09-08 20:04:01 +12:00
def shared_inbox ( ) :
2024-11-18 16:53:32 +00:00
try :
request_json = request . get_json ( force = True )
except werkzeug . exceptions . BadRequest as e :
log_incoming_ap ( ' ' , APLOG_NOTYPE , APLOG_FAILURE , None , ' Unable to parse json body: ' + e . description )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2023-09-09 20:46:40 +12:00
2024-11-18 16:53:32 +00:00
g . site = Site . query . get ( 1 ) # g.site is not initialized by @app.before_request when request.path == '/inbox'
store_ap_json = g . site . log_activitypub_json
2023-09-09 20:46:40 +12:00
2024-11-18 16:53:32 +00:00
if not ' id ' in request_json or not ' type ' in request_json or not ' actor ' in request_json or not ' object ' in request_json :
log_incoming_ap ( ' ' , APLOG_NOTYPE , APLOG_FAILURE , request_json if store_ap_json else None , ' Missing minimum expected fields in JSON ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2023-12-23 11:32:22 +13:00
2024-11-18 16:53:32 +00:00
id = request_json [ ' id ' ]
if request_json [ ' type ' ] == ' Announce ' and isinstance ( request_json [ ' object ' ] , dict ) :
object = request_json [ ' object ' ]
if not ' id ' in object or not ' type ' in object or not ' actor ' in object or not ' object ' in object :
if ' type ' in object and ( object [ ' type ' ] == ' Page ' or object [ ' type ' ] == ' Note ' ) :
log_incoming_ap ( request_json [ ' id ' ] , APLOG_ANNOUNCE , APLOG_IGNORED , request_json if store_ap_json else None , ' Intended for Mastodon ' )
else :
log_incoming_ap ( request_json [ ' id ' ] , APLOG_ANNOUNCE , APLOG_FAILURE , request_json if store_ap_json else None , ' Missing minimum expected fields in JSON Announce object ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2024-05-26 02:24:11 +01:00
2024-11-18 16:53:32 +00:00
if object [ ' actor ' ] . startswith ( ' https:// ' + current_app . config [ ' SERVER_NAME ' ] ) :
log_incoming_ap ( object [ ' id ' ] , APLOG_DUPLICATE , APLOG_IGNORED , request_json if store_ap_json else None , ' Activity about local content which is already present ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2023-12-23 11:32:22 +13:00
2024-11-18 17:16:46 +00:00
redis_client = get_redis_connection ( )
2024-11-29 15:22:40 +00:00
if redis_client . exists ( id ) : # Something is sending same activity multiple times
2024-11-18 17:16:46 +00:00
log_incoming_ap ( id , APLOG_DUPLICATE , APLOG_IGNORED , request_json if store_ap_json else None , ' Unnecessary retry attempt ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2024-11-18 17:16:46 +00:00
redis_client . set ( id , 1 , ex = 90 ) # Save the activity ID into redis, to avoid duplicate activities
2024-11-18 17:18:09 +00:00
# Ignore unutilised PeerTube activity
if request_json [ ' actor ' ] . endswith ( ' accounts/peertube ' ) :
log_incoming_ap ( request_json [ ' id ' ] , APLOG_PT_VIEW , APLOG_IGNORED , request_json if store_ap_json else None , ' PeerTube View or CacheFile activity ' )
return ' '
2024-11-29 15:22:40 +00:00
# Ignore account deletion requests from users that do not already exist here
account_deletion = False
if ( request_json [ ' type ' ] == ' Delete ' and
' object ' in request_json and isinstance ( request_json [ ' object ' ] , str ) and
request_json [ ' actor ' ] == request_json [ ' object ' ] ) :
account_deletion = True
actor = User . query . filter_by ( ap_profile_id = request_json [ ' actor ' ] . lower ( ) ) . first ( )
if not actor :
log_incoming_ap ( request_json [ ' id ' ] , APLOG_DELETE , APLOG_IGNORED , request_json if store_ap_json else None , ' Does not exist here ' )
return ' ' , 200
else :
actor = find_actor_or_create ( request_json [ ' actor ' ] )
2024-11-18 17:28:41 +00:00
2024-11-18 17:47:26 +00:00
if not actor :
actor_name = request_json [ ' actor ' ]
log_incoming_ap ( request_json [ ' id ' ] , APLOG_NOTYPE , APLOG_FAILURE , request_json if store_ap_json else None , f ' Actor could not be found 1: { actor_name } ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2023-12-24 13:28:41 +13:00
2024-11-18 17:47:26 +00:00
if actor . is_local ( ) : # should be impossible (can be Announced back, but not sent without access to privkey)
log_incoming_ap ( request_json [ ' id ' ] , APLOG_NOTYPE , APLOG_FAILURE , request_json if store_ap_json else None , ' ActivityPub activity from a local actor ' )
2024-11-28 11:11:59 +13:00
return ' ' , 200
2024-11-29 15:22:40 +00:00
actor . instance . last_seen = utcnow ( )
actor . instance . dormant = False
actor . instance . gone_forever = False
actor . instance . failures = 0
actor . instance . ip_address = ip_address ( )
db . session . commit ( )
2024-11-18 17:47:26 +00:00
2024-11-18 18:44:28 +00:00
try :
HttpSignature . verify_request ( request , actor . public_key , skip_date = True )
except VerificationError as e :
if not ' signature ' in request_json :
log_incoming_ap ( request_json [ ' id ' ] , APLOG_NOTYPE , APLOG_FAILURE , request_json if store_ap_json else None , ' Could not verify HTTP signature: ' + str ( e ) )
return ' ' , 400
# HTTP sig will fail if a.gup.pe or PeerTube have bounced a request, so check LD sig instead
try :
LDSignature . verify_signature ( request_json , actor . public_key )
except VerificationError as e :
log_incoming_ap ( request_json [ ' id ' ] , APLOG_NOTYPE , APLOG_FAILURE , request_json if store_ap_json else None , ' Could not verify LD signature: ' + str ( e ) )
return ' ' , 400
2024-11-18 19:28:37 +00:00
# When a user is deleted, the only way to be fairly sure they get deleted everywhere is to tell the whole fediverse.
2024-11-29 15:22:40 +00:00
# Earlier check means this is only for users that already exist, processing it here means that http signature will have been verified
if account_deletion == True :
if current_app . debug :
process_delete_request ( request_json , store_ap_json )
else :
process_delete_request . delay ( request_json , store_ap_json )
return ' '
2024-11-18 19:28:37 +00:00
2024-11-18 21:10:12 +00:00
if current_app . debug :
process_inbox_request ( request_json , store_ap_json )
else :
process_inbox_request . delay ( request_json , store_ap_json )
2023-12-24 13:28:41 +13:00
return ' '
2024-11-24 21:49:08 +00:00
@bp.route ( ' /site_inbox ' , methods = [ ' POST ' ] )
2024-05-22 17:04:05 +01:00
def site_inbox ( ) :
return shared_inbox ( )
2024-11-24 21:49:08 +00:00
@bp.route ( ' /u/<actor>/inbox ' , methods = [ ' POST ' ] )
def user_inbox ( actor ) :
return shared_inbox ( )
@bp.route ( ' /c/<actor>/inbox ' , methods = [ ' POST ' ] )
def community_inbox ( actor ) :
return shared_inbox ( )
2024-11-24 22:01:11 +00:00
def replay_inbox_request ( request_json ) :
if not ' id ' in request_json or not ' type ' in request_json or not ' actor ' in request_json or not ' object ' in request_json :
log_incoming_ap ( ' ' , APLOG_NOTYPE , APLOG_FAILURE , request_json , ' REPLAY: Missing minimum expected fields in JSON ' )
return
if request_json [ ' type ' ] == ' Announce ' and isinstance ( request_json [ ' object ' ] , dict ) :
object = request_json [ ' object ' ]
if not ' id ' in object or not ' type ' in object or not ' actor ' in object or not ' object ' in object :
2024-11-29 15:01:24 +00:00
if ' type ' in object and ( object [ ' type ' ] == ' Page ' or object [ ' type ' ] == ' Note ' ) :
2024-11-24 22:01:11 +00:00
log_incoming_ap ( request_json [ ' id ' ] , APLOG_ANNOUNCE , APLOG_IGNORED , request_json , ' REPLAY: Intended for Mastodon ' )
else :
log_incoming_ap ( request_json [ ' id ' ] , APLOG_ANNOUNCE , APLOG_FAILURE , request_json , ' REPLAY: Missing minimum expected fields in JSON Announce object ' )
return
2024-11-29 15:01:24 +00:00
if object [ ' actor ' ] . startswith ( ' https:// ' + current_app . config [ ' SERVER_NAME ' ] ) :
log_incoming_ap ( object [ ' id ' ] , APLOG_DUPLICATE , APLOG_IGNORED , request_json if store_ap_json else None , ' Activity about local content which is already present ' )
return
2024-11-24 22:01:11 +00:00
# Ignore unutilised PeerTube activity
if request_json [ ' actor ' ] . endswith ( ' accounts/peertube ' ) :
log_incoming_ap ( request_json [ ' id ' ] , APLOG_PT_VIEW , APLOG_IGNORED , request_json , ' REPLAY: PeerTube View or CacheFile activity ' )
return
2024-11-29 15:22:40 +00:00
# Ignore account deletion requests from users that do not already exist here
account_deletion = False
if ( request_json [ ' type ' ] == ' Delete ' and
' object ' in request_json and isinstance ( request_json [ ' object ' ] , str ) and
request_json [ ' actor ' ] == request_json [ ' object ' ] ) :
account_deletion = True
actor = User . query . filter_by ( ap_profile_id = request_json [ ' actor ' ] . lower ( ) ) . first ( )
if not actor :
log_incoming_ap ( request_json [ ' id ' ] , APLOG_DELETE , APLOG_IGNORED , request_json , ' REPLAY: Does not exist here ' )
return
else :
actor = find_actor_or_create ( request_json [ ' actor ' ] )
2024-11-24 22:01:11 +00:00
if not actor :
actor_name = request_json [ ' actor ' ]
log_incoming_ap ( request_json [ ' id ' ] , APLOG_NOTYPE , APLOG_FAILURE , request_json , f ' REPLAY: Actor could not be found 1: { actor_name } ' )
return
if actor . is_local ( ) : # should be impossible (can be Announced back, but not sent back without access to privkey)
log_incoming_ap ( request_json [ ' id ' ] , APLOG_NOTYPE , APLOG_FAILURE , request_json , ' REPLAY: ActivityPub activity from a local actor ' )
return
# When a user is deleted, the only way to be fairly sure they get deleted everywhere is to tell the whole fediverse.
2024-11-29 15:22:40 +00:00
if account_deletion == True :
process_delete_request ( request_json , True )
return
2024-11-24 22:01:11 +00:00
process_inbox_request ( request_json , True )
return
2023-12-24 13:28:41 +13:00
@celery.task
2024-11-18 21:10:12 +00:00
def process_inbox_request ( request_json , store_ap_json ) :
2023-12-24 13:28:41 +13:00
with current_app . app_context ( ) :
site = Site . query . get ( 1 ) # can't use g.site because celery doesn't use Flask's g variable
2024-11-18 21:10:12 +00:00
# For an Announce, Accept, or Reject, we have the community, and need to find the user
# For everything else, we have the user, and need to find the community
# Benefits of always using request_json['actor']:
# It's the actor who signed the request, and whose signature has been verified
# Because of the earlier check, we know that they already exist, and so don't need to check again
# Using actors from inner objects has a vulnerability to spoofing attacks (e.g. if 'attributedTo' doesn't match the 'Create' actor)
2024-11-28 11:11:59 +13:00
announce_id = request_json [ ' id ' ]
2024-11-18 21:10:12 +00:00
if request_json [ ' type ' ] == ' Announce ' or request_json [ ' type ' ] == ' Accept ' or request_json [ ' type ' ] == ' Reject ' :
community_ap_id = request_json [ ' actor ' ]
community = find_actor_or_create ( community_ap_id , community_only = True , create_if_not_found = False )
if not community or not isinstance ( community , Community ) :
log_incoming_ap ( announce_id , APLOG_ANNOUNCE , APLOG_FAILURE , request_json , ' Actor was not a community ' )
return
user_ap_id = None # found in 'if request_json['type'] == 'Announce', or it's a local user (for 'Accept'/'Reject')
else :
user_ap_id = request_json [ ' actor ' ]
user = find_actor_or_create ( user_ap_id , create_if_not_found = False )
if not user or not isinstance ( user , User ) :
log_incoming_ap ( announce_id , APLOG_NOTYPE , APLOG_FAILURE , request_json , ' Actor was not a user ' )
return
user . last_seen = site . last_active = utcnow ( )
db . session . commit ( )
community = None # found as needed
# Follow: remote user wants to join/follow one of our users or communities
if request_json [ ' type ' ] == ' Follow ' :
target_ap_id = request_json [ ' object ' ]
follow_id = request_json [ ' id ' ]
target = find_actor_or_create ( target_ap_id , create_if_not_found = False )
if not target :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_FOLLOW , APLOG_FAILURE , request_json if store_ap_json else None , ' Could not find target of Follow ' )
2024-11-18 21:10:12 +00:00
return
if isinstance ( target , Community ) :
community = target
reject_follow = False
if community . local_only :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_FOLLOW , APLOG_FAILURE , request_json if store_ap_json else None , ' Local only cannot be followed by remote users ' )
2024-11-18 21:10:12 +00:00
reject_follow = True
else :
# check if user is banned from this community
user_banned = CommunityBan . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
if user_banned :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_FOLLOW , APLOG_FAILURE , request_json if store_ap_json else None , ' Remote user has been banned ' )
2024-11-18 21:10:12 +00:00
reject_follow = True
if reject_follow :
# send reject message to deny the follow
reject = { " @context " : default_context ( ) , " actor " : community . public_url ( ) , " to " : [ user . public_url ( ) ] ,
" object " : { " actor " : user . public_url ( ) , " to " : None , " object " : community . public_url ( ) , " type " : " Follow " , " id " : follow_id } ,
" type " : " Reject " , " id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/reject/ " + gibberish ( 32 ) }
post_request ( user . ap_inbox_url , reject , community . private_key , f " { community . public_url ( ) } #main-key " )
else :
if community_membership ( user , community ) != SUBSCRIPTION_MEMBER :
member = CommunityMember ( user_id = user . id , community_id = community . id )
db . session . add ( member )
db . session . commit ( )
cache . delete_memoized ( community_membership , user , community )
# send accept message to acknowledge the follow
accept = { " @context " : default_context ( ) , " actor " : community . public_url ( ) , " to " : [ user . public_url ( ) ] ,
" object " : { " actor " : user . public_url ( ) , " to " : None , " object " : community . public_url ( ) , " type " : " Follow " , " id " : follow_id } ,
" type " : " Accept " , " id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/accept/ " + gibberish ( 32 ) }
post_request ( user . ap_inbox_url , accept , community . private_key , f " { community . public_url ( ) } #main-key " )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_FOLLOW , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-18 21:10:12 +00:00
return
elif isinstance ( target , User ) :
local_user = target
remote_user = user
if not local_user . is_local ( ) :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_FOLLOW , APLOG_FAILURE , request_json if store_ap_json else None , ' Follow request for remote user received ' )
2024-11-18 21:10:12 +00:00
return
existing_follower = UserFollower . query . filter_by ( local_user_id = local_user . id , remote_user_id = remote_user . id ) . first ( )
if not existing_follower :
auto_accept = not local_user . ap_manually_approves_followers
new_follower = UserFollower ( local_user_id = local_user . id , remote_user_id = remote_user . id , is_accepted = auto_accept )
if not local_user . ap_followers_url :
local_user . ap_followers_url = local_user . public_url ( ) + ' /followers '
db . session . add ( new_follower )
db . session . commit ( )
accept = { " @context " : default_context ( ) , " actor " : local_user . public_url ( ) , " to " : [ remote_user . public_url ( ) ] ,
" object " : { " actor " : remote_user . public_url ( ) , " to " : None , " object " : local_user . public_url ( ) , " type " : " Follow " , " id " : follow_id } ,
" type " : " Accept " , " id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/accept/ " + gibberish ( 32 ) }
post_request ( remote_user . ap_inbox_url , accept , local_user . private_key , f " { local_user . public_url ( ) } #main-key " )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_FOLLOW , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-18 21:10:12 +00:00
return
2024-11-18 21:19:42 +00:00
# Accept: remote server is accepting our previous follow request
if request_json [ ' type ' ] == ' Accept ' :
user = None
if isinstance ( request_json [ ' object ' ] , str ) : # a.gup.pe accepts using a string with the ID of the follow request
join_request_parts = request_json [ ' object ' ] . split ( ' / ' )
join_request = CommunityJoinRequest . query . get ( join_request_parts [ - 1 ] )
if join_request :
user = User . query . get ( join_request . user_id )
elif request_json [ ' object ' ] [ ' type ' ] == ' Follow ' :
user_ap_id = request_json [ ' object ' ] [ ' actor ' ]
user = find_actor_or_create ( user_ap_id , create_if_not_found = False )
if not user :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ACCEPT , APLOG_FAILURE , request_json if store_ap_json else None , ' Could not find recipient of Accept ' )
2024-11-18 21:19:42 +00:00
return
join_request = CommunityJoinRequest . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
if join_request :
existing_membership = CommunityMember . query . filter_by ( user_id = join_request . user_id , community_id = join_request . community_id ) . first ( )
if not existing_membership :
member = CommunityMember ( user_id = join_request . user_id , community_id = join_request . community_id )
db . session . add ( member )
community . subscriptions_count + = 1
db . session . commit ( )
cache . delete_memoized ( community_membership , user , community )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ACCEPT , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-18 21:19:42 +00:00
return
2024-11-18 21:25:19 +00:00
# Reject: remote server is rejecting our previous follow request
if request_json [ ' type ' ] == ' Reject ' :
if request_json [ ' object ' ] [ ' type ' ] == ' Follow ' :
user_ap_id = request_json [ ' object ' ] [ ' actor ' ]
user = find_actor_or_create ( user_ap_id , create_if_not_found = False )
if not user :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ACCEPT , APLOG_FAILURE , request_json if store_ap_json else None , ' Could not find recipient of Reject ' )
2024-11-18 21:25:19 +00:00
return
join_request = CommunityJoinRequest . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
if join_request :
db . session . delete ( join_request )
existing_membership = CommunityMember . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
if existing_membership :
db . session . delete ( existing_membership )
cache . delete_memoized ( community_membership , user , community )
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ACCEPT , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-18 21:25:19 +00:00
return
2024-11-18 21:10:12 +00:00
2024-11-18 22:05:25 +00:00
# Create is new content. Update is often an edit, but Updates from Lemmy can also be new content
if request_json [ ' type ' ] == ' Create ' or request_json [ ' type ' ] == ' Update ' :
if request_json [ ' object ' ] [ ' type ' ] == ' ChatMessage ' :
sender = user
recipient_ap_id = request_json [ ' object ' ] [ ' to ' ] [ 0 ]
recipient = find_actor_or_create ( recipient_ap_id , create_if_not_found = False )
if recipient and recipient . is_local ( ) :
if sender . created_recently ( ) or sender . reputation < = - 10 :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_CHATMESSAGE , APLOG_FAILURE , request_json if store_ap_json else None , ' Sender not eligible to send ' )
2024-11-18 22:05:25 +00:00
return
elif recipient . has_blocked_user ( sender . id ) or recipient . has_blocked_instance ( sender . instance_id ) :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_CHATMESSAGE , APLOG_FAILURE , request_json if store_ap_json else None , ' Sender blocked by recipient ' )
2024-11-18 22:05:25 +00:00
return
2024-02-17 20:05:57 +13:00
else :
2024-11-18 22:05:25 +00:00
# Find existing conversation to add to
existing_conversation = Conversation . find_existing_conversation ( recipient = recipient , sender = sender )
if not existing_conversation :
existing_conversation = Conversation ( user_id = sender . id )
existing_conversation . members . append ( recipient )
existing_conversation . members . append ( sender )
db . session . add ( existing_conversation )
2024-02-17 20:05:57 +13:00
db . session . commit ( )
2024-11-18 22:05:25 +00:00
# Save ChatMessage to DB
encrypted = request_json [ ' object ' ] [ ' encrypted ' ] if ' encrypted ' in request_json [ ' object ' ] else None
new_message = ChatMessage ( sender_id = sender . id , recipient_id = recipient . id , conversation_id = existing_conversation . id ,
body_html = request_json [ ' object ' ] [ ' content ' ] ,
body = html_to_text ( request_json [ ' object ' ] [ ' content ' ] ) ,
encrypted = encrypted )
db . session . add ( new_message )
existing_conversation . updated_at = utcnow ( )
db . session . commit ( )
# Notify recipient
notify = Notification ( title = shorten_string ( ' New message from ' + sender . display_name ( ) ) ,
url = f ' /chat/ { existing_conversation . id } #message_ { new_message } ' , user_id = recipient . id ,
author_id = sender . id )
db . session . add ( notify )
recipient . unread_notifications + = 1
existing_conversation . read = False
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_CHATMESSAGE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-18 22:05:25 +00:00
return
# inner object of Create is not a ChatMessage
else :
if ( request_json [ ' object ' ] [ ' type ' ] == ' Note ' and ' name ' in request_json [ ' object ' ] and # Poll Votes
' inReplyTo ' in request_json [ ' object ' ] and ' attributedTo ' in request_json [ ' object ' ] ) :
post_being_replied_to = Post . query . filter_by ( ap_id = request_json [ ' object ' ] [ ' inReplyTo ' ] ) . first ( )
if post_being_replied_to :
poll_data = Poll . query . get ( post_being_replied_to . id )
choice = PollChoice . query . filter_by ( post_id = post_being_replied_to . id , choice_text = request_json [ ' object ' ] [ ' name ' ] ) . first ( )
if poll_data and choice :
poll_data . vote_for_choice ( choice . id , user . id )
2024-04-20 13:03:39 +01:00
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_CREATE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-18 22:05:25 +00:00
if post_being_replied_to . author . is_local ( ) :
inform_followers_of_post_update ( post_being_replied_to . id , user . instance_id )
return
community_ap_id = find_community_ap_id ( request_json )
if not ensure_domains_match ( request_json [ ' object ' ] ) :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' Domains do not match ' )
2024-11-18 22:05:25 +00:00
return
community = find_actor_or_create ( community_ap_id , community_only = True , create_if_not_found = False ) if community_ap_id else None
if community and community . local_only :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' Remote Create in local_only community ' )
2024-11-18 22:05:25 +00:00
return
if not community :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' Blocked or unfound community ' )
2024-11-18 22:05:25 +00:00
return
object_type = request_json [ ' object ' ] [ ' type ' ]
new_content_types = [ ' Page ' , ' Article ' , ' Link ' , ' Note ' , ' Question ' ]
if object_type in new_content_types : # create or update a post
process_new_content ( user , community , store_ap_json , request_json , announced = False )
return
elif object_type == ' Video ' : # PeerTube: editing a video (PT doesn't Announce these)
post = Post . query . filter_by ( ap_id = request_json [ ' object ' ] [ ' id ' ] ) . first ( )
if post :
if user . id == post . user_id :
update_post_from_activity ( post , request_json )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UPDATE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-04-20 13:03:39 +01:00
return
2023-12-24 13:28:41 +13:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UPDATE , APLOG_FAILURE , request_json if store_ap_json else None , ' Edit attempt denied ' )
2024-01-05 08:45:33 +13:00
return
2024-11-18 22:05:25 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UPDATE , APLOG_FAILURE , request_json if store_ap_json else None , ' PeerTube post not found ' )
2024-11-18 22:05:25 +00:00
return
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' Unacceptable type (create): ' + object_type )
2024-11-18 22:05:25 +00:00
return
2023-12-24 16:20:18 +13:00
2024-11-18 22:31:18 +00:00
if request_json [ ' type ' ] == ' Delete ' :
if isinstance ( request_json [ ' object ' ] , str ) :
ap_id = request_json [ ' object ' ] # lemmy
else :
ap_id = request_json [ ' object ' ] [ ' id ' ] # kbin
to_delete = find_liked_object ( ap_id ) # Just for Posts and Replies (User deletes go through process_delete_request())
if to_delete :
if to_delete . deleted :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_DELETE , APLOG_IGNORED , request_json if store_ap_json else None , ' Activity about local content which is already deleted ' )
2024-11-18 22:31:18 +00:00
else :
delete_post_or_comment ( user , to_delete , store_ap_json , request_json )
announce_activity_to_followers ( to_delete . community , user , request_json )
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_DELETE , APLOG_FAILURE , request_json if store_ap_json else None , ' Delete: cannot find ' + ap_id )
2024-11-18 22:31:18 +00:00
return
2024-11-19 11:51:29 +00:00
if request_json [ ' type ' ] == ' Like ' or request_json [ ' type ' ] == ' EmojiReact ' : # Upvote
process_upvote ( user , store_ap_json , request_json , announced = False )
return
if request_json [ ' type ' ] == ' Dislike ' : # Downvote
if site . enable_downvotes is False :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_DISLIKE , APLOG_IGNORED , request_json if store_ap_json else None , ' Dislike ignored because of allow_dislike setting ' )
2024-11-19 11:51:29 +00:00
return
process_downvote ( user , store_ap_json , request_json , announced = False )
return
2024-11-19 12:02:15 +00:00
if request_json [ ' type ' ] == ' Flag ' : # Reported content
reported = find_reported_object ( request_json [ ' object ' ] )
if reported :
process_report ( user , reported , request_json )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_REPORT , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-19 12:02:15 +00:00
announce_activity_to_followers ( reported . community , user , request_json )
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_REPORT , APLOG_IGNORED , request_json if store_ap_json else None , ' Report ignored due to missing content ' )
2024-11-19 12:02:15 +00:00
return
2024-11-27 22:12:23 +00:00
if request_json [ ' type ' ] == ' Add ' : # remote site is adding a local user as a moderator, and is sending directly rather than announcing (happens if not subscribed)
mod = user
community_ap_id = find_community_ap_id ( request_json )
community = find_actor_or_create ( community_ap_id , community_only = True , create_if_not_found = False ) if community_ap_id else None
if community :
if not community . is_moderator ( mod ) and not community . is_instance_admin ( mod ) :
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Does not have permission ' )
return
target = request_json [ ' target ' ]
moderators_url = community . ap_moderators_url
if target == moderators_url :
new_mod = find_actor_or_create ( request_json [ ' object ' ] , create_if_not_found = False )
if new_mod and new_mod . is_local ( ) :
existing_membership = CommunityMember . query . filter_by ( community_id = community . id , user_id = new_mod . id ) . first ( )
if existing_membership :
existing_membership . is_moderator = True
else :
new_membership = CommunityMember ( community_id = community . id , user_id = new_mod . id , is_moderator = True )
db . session . add ( new_membership )
db . session . commit ( )
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_SUCCESS , request_json if store_ap_json else None )
else :
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Cannot find: ' + request_json [ ' object ' ] )
return
else :
# Lemmy might not send anything directly to sticky a post if no-one is subscribed (could not get it to generate the activity)
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Unknown target for Add ' )
else :
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Add: cannot find community ' )
return
if request_json [ ' type ' ] == ' Remove ' : # remote site is removing a local user as a moderator, and is sending directly rather than announcing (happens if not subscribed)
mod = user
community_ap_id = find_community_ap_id ( request_json )
community = find_actor_or_create ( community_ap_id , community_only = True , create_if_not_found = False ) if community_ap_id else None
if community :
if not community . is_moderator ( mod ) and not community . is_instance_admin ( mod ) :
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Does not have permission ' )
return
target = request_json [ ' target ' ]
moderators_url = community . ap_moderators_url
if target == moderators_url :
old_mod = find_actor_or_create ( request_json [ ' object ' ] , create_if_not_found = False )
if old_mod and old_mod . is_local ( ) :
existing_membership = CommunityMember . query . filter_by ( community_id = community . id , user_id = old_mod . id ) . first ( )
if existing_membership :
existing_membership . is_moderator = False
db . session . commit ( )
log_incoming_ap ( announce_id , APLOG_REMOVE , APLOG_SUCCESS , request_json if store_ap_json else None )
else :
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Cannot find: ' + request_json [ ' object ' ] )
return
else :
# Lemmy might not send anything directly to unsticky a post if no-one is subscribed (could not get it to generate the activity)
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Unknown target for Remove ' )
else :
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Remove: cannot find community ' )
return
2024-11-20 19:48:38 +00:00
if request_json [ ' type ' ] == ' Block ' : # remote site is banning one of their users
blocker = user
blocked_ap_id = request_json [ ' object ' ] . lower ( )
blocked = User . query . filter_by ( ap_profile_id = blocked_ap_id ) . first ( )
if store_ap_json :
request_json [ ' cc ' ] = [ ] # cut very long list of instances
if not blocked :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_IGNORED , request_json if store_ap_json else None , ' Does not exist here ' )
2024-11-20 19:48:38 +00:00
return
2024-11-30 09:50:14 +13:00
# target = request_json['target'] # target is supposed to determine the scope - whether it is an instance-wide ban or just one community. Lemmy doesn't use it right though
# community = find_actor_or_create(target, create_if_not_found=False, community_only=True)
2024-11-20 19:48:38 +00:00
remove_data = request_json [ ' removeData ' ] if ' removeData ' in request_json else False
# Lemmy currently only sends userbans for admins banning local users
# Banning remote users is hacked by banning them from every community of which they are a part
# There's plans to change this in the future though.
if not blocker . is_instance_admin ( ) and not blocked . instance_id == blocker . instance_id :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_FAILURE , request_json if store_ap_json else None , ' Does not have permission ' )
2024-11-20 19:48:38 +00:00
return
2024-11-30 09:50:14 +13:00
if blocked . banned : # We may have already banned them - we don't want remote temp bans to over-ride our permanent bans
return
if blocked . is_local ( ) : # Sanity check
current_app . logger . error ( ' Attempt to ban local user: ' + str ( request_json ) )
return
blocked . banned = True
db . session . commit ( )
if ' expires ' in request_json :
blocked . banned_until = request_json [ ' expires ' ]
elif ' endTime ' in request_json :
blocked . banned_until = request_json [ ' endTime ' ]
try :
db . session . commit ( )
except : # I don't know the format of expires or endTime so let's see how this goes
db . session . rollback ( )
current_app . logger . error ( ' could not save banned_until value: ' + str ( request_json ) )
2024-11-20 19:48:38 +00:00
2024-11-30 09:50:14 +13:00
if remove_data :
2024-11-20 19:48:38 +00:00
site_ban_remove_data ( blocker . id , blocked )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-20 19:48:38 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_IGNORED , request_json if store_ap_json else None , ' Banned, but content retained ' )
2024-11-30 09:50:14 +13:00
2024-11-20 19:48:38 +00:00
return
2024-11-22 02:07:22 +00:00
if request_json [ ' type ' ] == ' Undo ' :
if request_json [ ' object ' ] [ ' type ' ] == ' Follow ' : # Unsubscribe from a community or user
target_ap_id = request_json [ ' object ' ] [ ' object ' ]
target = find_actor_or_create ( target_ap_id , create_if_not_found = False )
if isinstance ( target , Community ) :
community = target
member = CommunityMember . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
join_request = CommunityJoinRequest . query . filter_by ( user_id = user . id , community_id = community . id ) . first ( )
if member :
db . session . delete ( member )
community . subscriptions_count - = 1
if join_request :
db . session . delete ( join_request )
db . session . commit ( )
cache . delete_memoized ( community_membership , user , community )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_FOLLOW , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-22 02:07:22 +00:00
return
if isinstance ( target , User ) :
local_user = target
remote_user = user
follower = UserFollower . query . filter_by ( local_user_id = local_user . id , remote_user_id = remote_user . id , is_accepted = True ) . first ( )
if follower :
db . session . delete ( follower )
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_FOLLOW , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-22 02:07:22 +00:00
return
if not target :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_FOLLOW , APLOG_FAILURE , request_json if store_ap_json else None , ' Unfound target ' )
2024-11-22 02:07:22 +00:00
return
2024-11-23 01:29:04 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Delete ' : # Restore something previously deleted
if isinstance ( request_json [ ' object ' ] [ ' object ' ] , str ) :
ap_id = request_json [ ' object ' ] [ ' object ' ] # lemmy
else :
ap_id = request_json [ ' object ' ] [ ' object ' ] [ ' id ' ] # kbin
restorer = user
to_restore = find_liked_object ( ap_id ) # a user or a mod/admin is undoing the delete of a post or reply
if to_restore :
if not to_restore . deleted :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_DELETE , APLOG_IGNORED , request_json if store_ap_json else None , ' Activity about local content which is already restored ' )
2023-12-26 21:39:52 +13:00
else :
2024-11-23 01:29:04 +00:00
restore_post_or_comment ( restorer , to_restore , store_ap_json , request_json )
announce_activity_to_followers ( to_restore . community , user , request_json )
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_DELETE , APLOG_FAILURE , request_json if store_ap_json else None , ' Undo delete: cannot find ' + ap_id )
2024-11-23 01:29:04 +00:00
return
2024-11-23 01:36:31 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Like ' or request_json [ ' object ' ] [ ' type ' ] == ' Dislike ' : # Undoing an upvote or downvote
post = comment = None
target_ap_id = request_json [ ' object ' ] [ ' object ' ]
post_or_comment = undo_vote ( comment , post , target_ap_id , user )
if post_or_comment :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_VOTE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-23 01:36:31 +00:00
announce_activity_to_followers ( post_or_comment . community , user , request_json )
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_VOTE , APLOG_FAILURE , request_json if store_ap_json else None , ' Unfound object ' + target_ap_id )
2024-11-23 01:36:31 +00:00
return
2024-11-23 01:39:47 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Block ' : # remote site is unbanning one of their users
unblocker = user
unblocked_ap_id = request_json [ ' object ' ] [ ' object ' ] . lower ( )
unblocked = User . query . filter_by ( ap_profile_id = unblocked_ap_id ) . first ( )
if store_ap_json :
request_json [ ' cc ' ] = [ ] # cut very long list of instances
request_json [ ' object ' ] [ ' cc ' ] = [ ]
if not unblocked :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_IGNORED , request_json if store_ap_json else None , ' Does not exist here ' )
2024-11-23 01:39:47 +00:00
return
unblock_from_ap_id = request_json [ ' object ' ] [ ' target ' ]
2024-11-28 11:11:59 +13:00
if not unblocker . is_instance_admin ( ) and not unblocked . instance_id == unblocker . instance_id :
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_FAILURE , request_json if store_ap_json else None , ' Does not have permission ' )
2024-11-23 01:39:47 +00:00
return
# (no removeData field in an undo/ban - cannot restore without knowing if deletion was part of ban, or different moderator action)
#unblocked.banned = False # uncommented until there's a mechanism for processing ban expiry date
#db.session.commit()
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_USERBAN , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-23 01:39:47 +00:00
return
2024-11-23 19:26:25 +00:00
# Announce is new content and votes that happened on a remote server.
if request_json [ ' type ' ] == ' Announce ' :
if isinstance ( request_json [ ' object ' ] , str ) : # Mastodon, PeerTube, A.gup.pe
2024-11-27 19:09:10 +00:00
if request_json [ ' object ' ] . startswith ( ' https:// ' + current_app . config [ ' SERVER_NAME ' ] ) :
log_incoming_ap ( announce_id , APLOG_DUPLICATE , APLOG_IGNORED , request_json if store_ap_json else None , ' Activity about local content which is already present ' )
return
2024-11-23 19:26:25 +00:00
post = resolve_remote_post ( request_json [ ' object ' ] , community . id , announce_actor = community . ap_profile_id , store_ap_json = store_ap_json )
if post :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ANNOUNCE , APLOG_SUCCESS , request_json )
2024-11-23 19:26:25 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ANNOUNCE , APLOG_FAILURE , request_json , ' Could not resolve post ' )
2024-11-23 19:26:25 +00:00
return
user_ap_id = request_json [ ' object ' ] [ ' actor ' ]
user = find_actor_or_create ( user_ap_id )
if not user or not isinstance ( user , User ) :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ANNOUNCE , APLOG_FAILURE , request_json , ' Blocked or unfound user for Announce object actor ' + user_ap_id )
2024-11-23 19:26:25 +00:00
return
user . last_seen = site . last_active = utcnow ( )
user . instance . last_seen = utcnow ( )
user . instance . dormant = False
user . instance . gone_forever = False
user . instance . failures = 0
db . session . commit ( )
if request_json [ ' object ' ] [ ' type ' ] == ' Create ' or request_json [ ' object ' ] [ ' type ' ] == ' Update ' :
object_type = request_json [ ' object ' ] [ ' object ' ] [ ' type ' ]
new_content_types = [ ' Page ' , ' Article ' , ' Link ' , ' Note ' , ' Question ' ]
if object_type in new_content_types : # create or update a post
process_new_content ( user , community , store_ap_json , request_json )
elif request_json [ ' object ' ] [ ' type ' ] == ' Update ' and request_json [ ' object ' ] [ ' object ' ] [ ' type ' ] == ' Group ' :
# force refresh next time community is heard from
community . ap_fetched_at = None
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UPDATE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-23 19:26:25 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' Unacceptable type (create): ' + object_type )
2024-11-23 19:26:25 +00:00
return
2024-11-23 19:41:09 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Delete ' : # Announced Delete
if isinstance ( request_json [ ' object ' ] [ ' object ' ] , str ) :
ap_id = request_json [ ' object ' ] [ ' object ' ] # lemmy
else :
ap_id = request_json [ ' object ' ] [ ' object ' ] [ ' id ' ] # kbin
to_delete = find_liked_object ( ap_id ) # Just for Posts and Replies (User deletes aren't announced)
if to_delete :
if to_delete . deleted :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_DELETE , APLOG_IGNORED , request_json if store_ap_json else None , ' Activity about local content which is already deleted ' )
2023-12-24 13:28:41 +13:00
else :
2024-11-23 19:41:09 +00:00
delete_post_or_comment ( user , to_delete , store_ap_json , request_json )
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_DELETE , APLOG_FAILURE , request_json if store_ap_json else None , ' Delete: cannot find ' + ap_id )
2024-11-23 19:41:09 +00:00
return
2024-11-23 19:46:27 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Like ' or request_json [ ' object ' ] [ ' type ' ] == ' EmojiReact ' : # Announced Upvote
process_upvote ( user , store_ap_json , request_json )
return
if request_json [ ' object ' ] [ ' type ' ] == ' Dislike ' : # Announced Downvote
if site . enable_downvotes is False :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_DISLIKE , APLOG_IGNORED , request_json if store_ap_json else None , ' Dislike ignored because of allow_dislike setting ' )
2024-11-23 19:46:27 +00:00
return
process_downvote ( user , store_ap_json , request_json )
return
2024-11-23 20:04:39 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Flag ' : # Announce of reported content
reported = find_reported_object ( request_json [ ' object ' ] [ ' object ' ] )
if reported :
process_report ( user , reported , request_json [ ' object ' ] )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_REPORT , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-23 20:04:39 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_REPORT , APLOG_IGNORED , request_json if store_ap_json else None , ' Report ignored due to missing content ' )
2024-11-23 20:04:39 +00:00
return
2024-11-24 16:06:33 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Lock ' : # Announce of post lock
mod = user
post_id = request_json [ ' object ' ] [ ' object ' ]
post = Post . query . filter_by ( ap_id = post_id ) . first ( )
if post :
if post . community . is_moderator ( mod ) or post . community . is_instance_admin ( mod ) :
post . comments_enabled = False
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_LOCK , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-24 16:06:33 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_LOCK , APLOG_FAILURE , request_json if store_ap_json else None , ' Lock: Does not have permission ' )
2024-11-24 16:06:33 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_LOCK , APLOG_FAILURE , request_json if store_ap_json else None , ' Lock: post not found ' )
2024-11-24 16:06:33 +00:00
return
2024-11-24 16:31:20 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Add ' : # Announce of adding mods or stickying a post
target = request_json [ ' object ' ] [ ' target ' ]
featured_url = community . ap_featured_url
moderators_url = community . ap_moderators_url
if target == featured_url :
post = Post . query . filter_by ( ap_id = request_json [ ' object ' ] [ ' object ' ] ) . first ( )
2023-12-24 13:28:41 +13:00
if post :
2024-11-24 16:31:20 +00:00
post . sticky = True
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_SUCCESS , request_json if store_ap_json else None )
2023-09-16 19:09:04 +12:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Cannot find: ' + request_json [ ' object ' ] [ ' object ' ] )
2024-11-24 16:31:20 +00:00
return
if target == moderators_url :
user = find_actor_or_create ( request_json [ ' object ' ] [ ' object ' ] )
if user :
existing_membership = CommunityMember . query . filter_by ( community_id = community . id , user_id = user . id ) . first ( )
if existing_membership :
existing_membership . is_moderator = True
2024-02-25 15:31:16 +13:00
else :
2024-11-24 16:31:20 +00:00
new_membership = CommunityMember ( community_id = community . id , user_id = user . id , is_moderator = True )
db . session . add ( new_membership )
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-24 16:31:20 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Cannot find: ' + request_json [ ' object ' ] [ ' object ' ] )
2024-11-24 16:31:20 +00:00
return
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_ADD , APLOG_FAILURE , request_json if store_ap_json else None , ' Unknown target for Add ' )
2024-11-24 16:31:20 +00:00
return
2024-11-24 16:35:10 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Remove ' : # Announce of removing mods or unstickying a post
target = request_json [ ' object ' ] [ ' target ' ]
featured_url = community . ap_featured_url
moderators_url = community . ap_moderators_url
if target == featured_url :
post = Post . query . filter_by ( ap_id = request_json [ ' object ' ] [ ' object ' ] ) . first ( )
if post :
post . sticky = False
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_REMOVE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-24 16:35:10 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_REMOVE , APLOG_FAILURE , request_json if store_ap_json else None , ' Cannot find: ' + target )
2024-11-24 16:35:10 +00:00
return
if target == moderators_url :
user = find_actor_or_create ( request_json [ ' object ' ] [ ' object ' ] , create_if_not_found = False )
if user :
existing_membership = CommunityMember . query . filter_by ( community_id = community . id , user_id = user . id ) . first ( )
if existing_membership :
existing_membership . is_moderator = False
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_REMOVE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-24 16:35:10 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_REMOVE , APLOG_FAILURE , request_json if store_ap_json else None , ' Cannot find: ' + request_json [ ' object ' ] [ ' object ' ] )
2024-11-24 16:35:10 +00:00
return
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_REMOVE , APLOG_FAILURE , request_json if store_ap_json else None , ' Unknown target for Remove ' )
2024-11-24 16:35:10 +00:00
return
2024-11-24 21:01:34 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Block ' : # Announce of user ban. Mod is banning a user from a community,
blocker = user # or an admin is banning a user from all the site's communities as part of a site ban
blocked_ap_id = request_json [ ' object ' ] [ ' object ' ] . lower ( )
blocked = User . query . filter_by ( ap_profile_id = blocked_ap_id ) . first ( )
if not blocked :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_IGNORED , request_json if store_ap_json else None , ' Does not exist here ' )
2024-11-24 21:01:34 +00:00
return
remove_data = request_json [ ' object ' ] [ ' removeData ' ] if ' removeData ' in request_json [ ' object ' ] else False
if not community . is_moderator ( blocker ) and not community . is_instance_admin ( blocker ) :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_FAILURE , request_json if store_ap_json else None , ' Does not have permission ' )
2024-11-24 21:01:34 +00:00
return
if remove_data == True :
community_ban_remove_data ( blocker . id , community . id , blocked )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-24 21:01:34 +00:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_IGNORED , request_json if store_ap_json else None , ' Banned, but content retained ' )
2024-11-24 21:01:34 +00:00
if blocked . is_local ( ) :
ban_local_user ( blocker , blocked , community , request_json )
return
2024-11-24 21:10:16 +00:00
if request_json [ ' object ' ] [ ' type ' ] == ' Undo ' :
if request_json [ ' object ' ] [ ' object ' ] [ ' type ' ] == ' Delete ' : # Announce of undo of Delete
if isinstance ( request_json [ ' object ' ] [ ' object ' ] [ ' object ' ] , str ) :
ap_id = request_json [ ' object ' ] [ ' object ' ] [ ' object ' ] # lemmy
else :
ap_id = request_json [ ' object ' ] [ ' object ' ] [ ' object ' ] [ ' id ' ] # kbin
restorer = user
to_restore = find_liked_object ( ap_id ) # a user or a mod/admin is undoing the delete of a post or reply
if to_restore :
if not to_restore . deleted :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_DELETE , APLOG_IGNORED , request_json if store_ap_json else None , ' Content was not deleted ' )
2024-01-03 16:29:58 +13:00
else :
2024-11-24 21:10:16 +00:00
restore_post_or_comment ( restorer , to_restore , store_ap_json , request_json )
2024-01-02 19:41:00 +13:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_DELETE , APLOG_FAILURE , request_json if store_ap_json else None , ' Undo delete: cannot find ' + ap_id )
2024-11-24 21:10:16 +00:00
return
2024-11-24 21:19:07 +00:00
if request_json [ ' object ' ] [ ' object ' ] [ ' type ' ] == ' Like ' or request_json [ ' object ' ] [ ' object ' ] [ ' type ' ] == ' Dislike ' : # Announce of undo of upvote or downvote
post = comment = None
target_ap_id = request_json [ ' object ' ] [ ' object ' ] [ ' object ' ]
post_or_comment = undo_vote ( comment , post , target_ap_id , user )
if post_or_comment :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_VOTE , APLOG_SUCCESS , request_json if store_ap_json else None )
2023-12-24 13:28:41 +13:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_UNDO_VOTE , APLOG_FAILURE , request_json if store_ap_json else None , ' Unfound object ' + target_ap_id )
2024-11-24 21:19:07 +00:00
return
2024-11-24 21:30:41 +00:00
if request_json [ ' object ' ] [ ' object ' ] [ ' type ' ] == ' Lock ' : # Announce of undo of post lock
mod = user
post_id = request_json [ ' object ' ] [ ' object ' ] [ ' object ' ]
post = Post . query . filter_by ( ap_id = post_id ) . first ( )
if post :
if post . community . is_moderator ( mod ) or post . community . is_instance_admin ( mod ) :
post . comments_enabled = True
db . session . commit ( )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_LOCK , APLOG_SUCCESS , request_json if store_ap_json else None )
2023-12-24 13:28:41 +13:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_LOCK , APLOG_FAILURE , request_json if store_ap_json else None , ' Lock: Does not have permission ' )
2024-04-06 16:29:47 +13:00
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_LOCK , APLOG_FAILURE , request_json if store_ap_json else None , ' Lock: post not found ' )
2024-11-24 21:30:41 +00:00
return
2023-09-08 20:04:01 +12:00
2024-11-24 21:43:08 +00:00
if request_json [ ' object ' ] [ ' object ' ] [ ' type ' ] == ' Block ' : # Announce of undo of user ban. Mod is unbanning a user from a community,
blocker = user # or an admin is unbanning a user from all the site's communities as part of a site unban
blocked_ap_id = request_json [ ' object ' ] [ ' object ' ] [ ' object ' ] . lower ( )
blocked = User . query . filter_by ( ap_profile_id = blocked_ap_id ) . first ( )
if not blocked :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_IGNORED , request_json if store_ap_json else None , ' Does not exist here ' )
2024-11-24 21:43:08 +00:00
return
if not community . is_moderator ( blocker ) and not community . is_instance_admin ( blocker ) :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_FAILURE , request_json if store_ap_json else None , ' Does not have permission ' )
2024-11-24 21:43:08 +00:00
return
if blocked . is_local ( ) :
unban_local_user ( blocker , blocked , community , request_json )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_USERBAN , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-24 21:43:08 +00:00
return
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_MONITOR , APLOG_PROCESSING , request_json if store_ap_json else None , ' Unmatched activity ' )
2023-12-22 14:05:39 +13:00
2023-12-29 17:32:35 +13:00
@celery.task
2024-11-18 19:28:37 +00:00
def process_delete_request ( request_json , store_ap_json ) :
2023-12-29 17:32:35 +13:00
with current_app . app_context ( ) :
2024-11-18 19:28:37 +00:00
# this function processes self-deletes (retain case here, as user_removed_from_remote_server() uses a JSON request)
user_ap_id = request_json [ ' actor ' ]
user = User . query . filter_by ( ap_profile_id = user_ap_id . lower ( ) ) . first ( )
if user :
# check that the user really has been deleted, to avoid spoofing attacks
if user_removed_from_remote_server ( user_ap_id , is_piefed = user . instance . software == ' PieFed ' ) :
# soft self-delete
user . deleted = True
user . deleted_by = user . id
2024-02-29 17:10:38 +13:00
db . session . commit ( )
2024-11-18 19:28:37 +00:00
log_incoming_ap ( request_json [ ' id ' ] , APLOG_DELETE , APLOG_SUCCESS , request_json if store_ap_json else None )
else :
log_incoming_ap ( request_json [ ' id ' ] , APLOG_DELETE , APLOG_FAILURE , request_json if store_ap_json else None , ' User not actually deleted. ' )
# TODO: acknowledge 'removeData' field from Lemmy
# TODO: hard-delete in 7 days (should purge avatar and cover images, but keep posts and replies unless already soft-deleted by removeData = True)
2023-12-29 17:32:35 +13:00
2024-01-03 16:29:58 +13:00
def announce_activity_to_followers ( community , creator , activity ) :
2024-06-17 19:06:07 +01:00
# avoid announcing activity sent to local users unless it is also in a local community
if not community . is_local ( ) :
return
2024-04-06 22:42:25 +01:00
# remove context from what will be inner object
del activity [ " @context " ]
2024-01-03 16:29:58 +13:00
announce_activity = {
' @context ' : default_context ( ) ,
2024-06-05 13:21:41 +12:00
" actor " : community . public_url ( ) ,
2024-01-03 16:29:58 +13:00
" to " : [
" https://www.w3.org/ns/activitystreams#Public "
] ,
" object " : activity ,
" cc " : [
2024-06-05 13:21:41 +12:00
f " { community . public_url ( ) } /followers "
2024-01-03 16:29:58 +13:00
] ,
" type " : " Announce " ,
2024-04-17 15:10:04 +01:00
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/announce/ { gibberish ( 15 ) } "
2024-01-03 16:29:58 +13:00
}
for instance in community . following_instances ( include_dormant = True ) :
# awaken dormant instances if they've been sleeping for long enough to be worth trying again
awaken_dormant_instance ( instance )
# All good? Send!
if instance and instance . online ( ) and not instance_blocked ( instance . inbox ) :
if creator . instance_id != instance . id : # don't send it to the instance that hosts the creator as presumably they already have the content
send_to_remote_instance ( instance . id , community . id , announce_activity )
2023-08-10 21:13:37 +12:00
@bp.route ( ' /c/<actor>/outbox ' , methods = [ ' GET ' ] )
def community_outbox ( actor ) :
actor = actor . strip ( )
community = Community . query . filter_by ( name = actor , banned = False , ap_id = None ) . first ( )
if community is not None :
2024-06-20 04:38:51 +01:00
sticky_posts = community . posts . filter ( Post . sticky == True , Post . deleted == False ) . order_by ( desc ( Post . posted_at ) ) . limit ( 50 ) . all ( )
remaining_limit = 50 - len ( sticky_posts )
remaining_posts = community . posts . filter ( Post . sticky == False , Post . deleted == False ) . order_by ( desc ( Post . posted_at ) ) . limit ( remaining_limit ) . all ( )
posts = sticky_posts + remaining_posts
2023-08-10 21:13:37 +12:00
community_data = {
2023-09-16 19:09:04 +12:00
" @context " : default_context ( ) ,
2023-08-10 21:13:37 +12:00
" type " : " OrderedCollection " ,
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /c/ { actor } /outbox " ,
" totalItems " : len ( posts ) ,
" orderedItems " : [ ]
}
for post in posts :
community_data [ ' orderedItems ' ] . append ( post_to_activity ( post , community ) )
2023-08-05 21:24:10 +12:00
2023-08-10 21:13:37 +12:00
return jsonify ( community_data )
2023-08-05 21:24:10 +12:00
2024-03-24 22:10:41 +00:00
@bp.route ( ' /c/<actor>/featured ' , methods = [ ' GET ' ] )
def community_featured ( actor ) :
actor = actor . strip ( )
community = Community . query . filter_by ( name = actor , banned = False , ap_id = None ) . first ( )
if community is not None :
2024-06-02 16:45:21 +12:00
posts = Post . query . filter_by ( community_id = community . id , sticky = True , deleted = False ) . all ( )
2024-03-24 22:10:41 +00:00
community_data = {
" @context " : default_context ( ) ,
" type " : " OrderedCollection " ,
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /c/ { actor } /featured " ,
" totalItems " : len ( posts ) ,
" orderedItems " : [ ]
}
for post in posts :
2024-05-31 03:45:51 +01:00
community_data [ ' orderedItems ' ] . append ( post_to_page ( post ) )
2024-03-24 22:10:41 +00:00
return jsonify ( community_data )
2023-11-26 23:20:51 +13:00
@bp.route ( ' /c/<actor>/moderators ' , methods = [ ' GET ' ] )
2024-03-13 16:40:20 +13:00
def community_moderators_route ( actor ) :
2023-11-26 23:20:51 +13:00
actor = actor . strip ( )
community = Community . query . filter_by ( name = actor , banned = False , ap_id = None ) . first ( )
if community is not None :
2024-03-13 16:40:20 +13:00
moderator_ids = community_moderators ( community . id )
2023-11-26 23:20:51 +13:00
moderators = User . query . filter ( User . id . in_ ( [ mod . user_id for mod in moderator_ids ] ) ) . all ( )
community_data = {
" @context " : default_context ( ) ,
" type " : " OrderedCollection " ,
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /c/ { actor } /moderators " ,
" totalItems " : len ( moderators ) ,
" orderedItems " : [ ]
}
for moderator in moderators :
community_data [ ' orderedItems ' ] . append ( moderator . ap_profile_id )
return jsonify ( community_data )
2024-05-03 06:27:25 +12:00
@celery.task
2024-04-29 16:13:29 +01:00
def process_user_follow_request ( request_json , activitypublog_id , remote_user_id ) :
activity_log = ActivityPubLog . query . get ( activitypublog_id )
local_user_ap_id = request_json [ ' object ' ]
follow_id = request_json [ ' id ' ]
2024-04-29 17:13:15 +01:00
local_user = find_actor_or_create ( local_user_ap_id , create_if_not_found = False )
2024-04-29 16:13:29 +01:00
remote_user = User . query . get ( remote_user_id )
if local_user and local_user . is_local ( ) and not remote_user . is_local ( ) :
existing_follower = UserFollower . query . filter_by ( local_user_id = local_user . id , remote_user_id = remote_user . id ) . first ( )
if not existing_follower :
auto_accept = not local_user . ap_manually_approves_followers
new_follower = UserFollower ( local_user_id = local_user . id , remote_user_id = remote_user . id , is_accepted = auto_accept )
2024-05-04 22:44:51 +01:00
if not local_user . ap_followers_url :
2024-09-01 17:49:46 +01:00
local_user . ap_followers_url = local_user . public_url ( ) + ' /followers '
2024-04-29 16:13:29 +01:00
db . session . add ( new_follower )
accept = {
" @context " : default_context ( ) ,
2024-09-01 17:49:46 +01:00
" actor " : local_user . public_url ( ) ,
2024-04-29 16:13:29 +01:00
" to " : [
2024-09-01 17:49:46 +01:00
remote_user . public_url ( )
2024-04-29 16:13:29 +01:00
] ,
" object " : {
2024-09-01 17:49:46 +01:00
" actor " : remote_user . public_url ( ) ,
2024-04-29 16:13:29 +01:00
" to " : None ,
2024-09-01 17:49:46 +01:00
" object " : local_user . public_url ( ) ,
2024-04-29 16:13:29 +01:00
" type " : " Follow " ,
" id " : follow_id
} ,
" type " : " Accept " ,
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/accept/ " + gibberish ( 32 )
}
2024-09-21 10:15:25 +12:00
if post_request ( remote_user . ap_inbox_url , accept , local_user . private_key , f " { local_user . public_url ( ) } #main-key " ) is True :
2024-04-29 16:13:29 +01:00
activity_log . result = ' success '
else :
activity_log . exception_message = ' Error sending Accept '
2024-04-30 12:41:37 +01:00
else :
activity_log . exception_message = ' Could not find local user '
activity_log . result = ' failure '
2024-04-29 16:13:29 +01:00
db . session . commit ( )
2024-04-29 17:13:15 +01:00
2023-12-27 14:38:41 +13:00
@bp.route ( ' /c/<actor>/followers ' , methods = [ ' GET ' ] )
def community_followers ( actor ) :
actor = actor . strip ( )
community = Community . query . filter_by ( name = actor , banned = False , ap_id = None ) . first ( )
if community is not None :
result = {
" @context " : default_context ( ) ,
2024-03-24 22:10:41 +00:00
" id " : f ' https:// { current_app . config [ " SERVER_NAME " ] } /c/ { actor } /followers ' ,
2023-12-27 14:38:41 +13:00
" type " : " Collection " ,
" totalItems " : community_members ( community . id ) ,
" items " : [ ]
}
resp = jsonify ( result )
resp . content_type = ' application/activity+json '
return resp
else :
abort ( 404 )
2023-12-09 22:14:16 +13:00
2024-04-29 19:47:06 +01:00
@bp.route ( ' /u/<actor>/followers ' , methods = [ ' GET ' ] )
def user_followers ( actor ) :
actor = actor . strip ( )
user = User . query . filter_by ( user_name = actor , banned = False , ap_id = None ) . first ( )
if user is not None and user . ap_followers_url :
2024-05-08 19:28:49 +12:00
# Get all followers, except those that are blocked by user by doing an outer join
followers = User . query . join ( UserFollower , User . id == UserFollower . remote_user_id ) \
. outerjoin ( UserBlock , ( User . id == UserBlock . blocker_id ) & ( UserFollower . local_user_id == UserBlock . blocked_id ) ) \
. filter ( ( UserFollower . local_user_id == user . id ) & ( UserBlock . id == None ) ) \
. all ( )
2024-04-29 19:47:06 +01:00
items = [ ]
for f in followers :
items . append ( f . ap_public_url )
result = {
" @context " : default_context ( ) ,
" id " : user . ap_followers_url ,
" type " : " Collection " ,
" totalItems " : len ( items ) ,
" items " : items
}
resp = jsonify ( result )
resp . content_type = ' application/activity+json '
return resp
else :
abort ( 404 )
2024-11-18 10:09:57 +13:00
@bp.route ( ' /comment/<int:comment_id> ' , methods = [ ' GET ' , ' HEAD ' ] )
2023-12-09 22:14:16 +13:00
def comment_ap ( comment_id ) :
2024-11-30 09:50:14 +13:00
reply = PostReply . query . get_or_404 ( comment_id )
2023-12-09 22:14:16 +13:00
if is_activitypub_request ( ) :
2024-11-18 10:09:57 +13:00
reply_data = comment_model_to_json ( reply ) if request . method == ' GET ' else [ ]
2023-12-09 22:14:16 +13:00
resp = jsonify ( reply_data )
resp . content_type = ' application/activity+json '
2024-03-02 13:56:47 +13:00
resp . headers . set ( ' Vary ' , ' Accept ' )
2024-11-18 10:04:58 +13:00
resp . headers . set ( ' Link ' , f ' <https:// { current_app . config [ " SERVER_NAME " ] } /comment/ { reply . id } >; rel= " alternate " ; type= " text/html " ' )
2023-12-09 22:14:16 +13:00
return resp
else :
2023-12-30 11:36:24 +13:00
return continue_discussion ( reply . post . id , comment_id )
2023-12-09 22:14:16 +13:00
2024-11-18 10:09:57 +13:00
@bp.route ( ' /post/<int:post_id>/ ' , methods = [ ' GET ' , ' HEAD ' ] )
2024-01-12 20:21:41 +13:00
def post_ap2 ( post_id ) :
return redirect ( url_for ( ' activitypub.post_ap ' , post_id = post_id ) )
2024-11-18 10:09:57 +13:00
@bp.route ( ' /post/<int:post_id> ' , methods = [ ' GET ' , ' HEAD ' , ' POST ' ] )
2023-12-09 22:14:16 +13:00
def post_ap ( post_id ) :
2024-11-18 10:09:57 +13:00
if ( request . method == ' GET ' or request . method == ' HEAD ' ) and is_activitypub_request ( ) :
2023-12-09 22:14:16 +13:00
post = Post . query . get_or_404 ( post_id )
2024-11-18 10:09:57 +13:00
if request . method == ' GET ' :
post_data = post_to_page ( post )
post_data [ ' @context ' ] = default_context ( )
else : # HEAD request
post_data = [ ]
2023-12-09 22:14:16 +13:00
resp = jsonify ( post_data )
resp . content_type = ' application/activity+json '
2024-03-02 13:56:47 +13:00
resp . headers . set ( ' Vary ' , ' Accept ' )
2024-11-18 10:04:58 +13:00
resp . headers . set ( ' Link ' , f ' <https:// { current_app . config [ " SERVER_NAME " ] } /post/ { post . id } >; rel= " alternate " ; type= " text/html " ' )
2023-12-09 22:14:16 +13:00
return resp
else :
return show_post ( post_id )
2023-12-22 14:05:39 +13:00
@bp.route ( ' /activities/<type>/<id> ' )
2023-12-23 11:32:22 +13:00
@cache.cached ( timeout = 600 )
2023-12-22 14:05:39 +13:00
def activities_json ( type , id ) :
activity = ActivityPubLog . query . filter_by ( activity_id = f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/ { type } / { id } " ) . first ( )
if activity :
2024-03-17 02:02:32 +13:00
if activity . activity_json is not None :
activity_json = json . loads ( activity . activity_json )
else :
activity_json = { }
2023-12-22 15:34:45 +13:00
resp = jsonify ( activity_json )
resp . content_type = ' application/activity+json '
return resp
2023-12-22 14:05:39 +13:00
else :
abort ( 404 )
2024-04-20 20:46:51 +12:00
# Other instances can query the result of their POST to the inbox by using this endpoint. The ID of the activity they
# sent (minus the https:// on the front) is the id parameter. e.g. https://piefed.ngrok.app/activity_result/piefed.ngrok.app/activities/announce/EfjyZ3BE5SzQK0C
@bp.route ( ' /activity_result/<path:id> ' )
def activity_result ( id ) :
activity = ActivityPubLog . query . filter_by ( activity_id = f ' https:// { id } ' ) . first ( )
if activity :
if activity . result == ' success ' :
return jsonify ( ' Ok ' )
else :
return jsonify ( { ' error ' : activity . result , ' message ' : activity . exception_message } )
else :
abort ( 404 )
2024-11-18 22:05:25 +00:00
def process_new_content ( user , community , store_ap_json , request_json , announced = True ) :
2024-11-29 17:15:18 +00:00
id = request_json [ ' id ' ]
2024-11-18 22:05:25 +00:00
if not announced :
in_reply_to = request_json [ ' object ' ] [ ' inReplyTo ' ] if ' inReplyTo ' in request_json [ ' object ' ] else None
ap_id = request_json [ ' object ' ] [ ' id ' ]
announce_id = None
activity_json = request_json
else :
in_reply_to = request_json [ ' object ' ] [ ' object ' ] [ ' inReplyTo ' ] if ' inReplyTo ' in request_json [ ' object ' ] [ ' object ' ] else None
ap_id = request_json [ ' object ' ] [ ' object ' ] [ ' id ' ]
announce_id = request_json [ ' id ' ]
activity_json = request_json [ ' object ' ]
if not in_reply_to : # Creating a new post
post = Post . query . filter_by ( ap_id = ap_id ) . first ( )
if post :
if activity_json [ ' type ' ] == ' Create ' :
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' Create processed after Update ' )
2024-11-18 22:05:25 +00:00
return
if user . id == post . user_id :
update_post_from_activity ( post , activity_json )
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-18 22:05:25 +00:00
if not announced :
announce_activity_to_followers ( post . community , post . author , request_json )
return
else :
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_FAILURE , request_json if store_ap_json else None , ' Edit attempt denied ' )
2024-11-18 22:05:25 +00:00
return
else :
if can_create_post ( user , community ) :
try :
post = create_post ( store_ap_json , community , activity_json , user , announce_id = announce_id )
if post :
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-18 22:05:25 +00:00
if not announced :
announce_activity_to_followers ( community , user , request_json )
return
except TypeError as e :
current_app . logger . error ( ' TypeError: ' + str ( request_json ) )
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' TypeError. See log file. ' )
2024-11-18 22:05:25 +00:00
return
else :
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' User cannot create post in Community ' )
2024-11-18 22:05:25 +00:00
return
else : # Creating a reply / comment
reply = PostReply . query . filter_by ( ap_id = ap_id ) . first ( )
if reply :
if activity_json [ ' type ' ] == ' Create ' :
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' Create processed after Update ' )
2024-11-18 22:05:25 +00:00
return
if user . id == reply . user_id :
update_post_reply_from_activity ( reply , activity_json )
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-18 22:05:25 +00:00
if not announced :
announce_activity_to_followers ( reply . community , reply . author , request_json )
return
else :
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_UPDATE , APLOG_FAILURE , request_json if store_ap_json else None , ' Edit attempt denied ' )
2024-11-18 22:05:25 +00:00
return
else :
if can_create_post_reply ( user , community ) :
try :
reply = create_post_reply ( store_ap_json , community , in_reply_to , activity_json , user , announce_id = announce_id )
if reply :
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-18 22:05:25 +00:00
if not announced :
announce_activity_to_followers ( community , user , request_json )
return
except TypeError as e :
current_app . logger . error ( ' TypeError: ' + str ( request_json ) )
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' TypeError. See log file. ' )
2024-11-18 22:05:25 +00:00
return
else :
2024-11-29 17:15:18 +00:00
log_incoming_ap ( id , APLOG_CREATE , APLOG_FAILURE , request_json if store_ap_json else None , ' User cannot create reply in Community ' )
2024-11-18 22:05:25 +00:00
return
2024-11-19 11:51:29 +00:00
def process_upvote ( user , store_ap_json , request_json , announced = True ) :
2024-11-28 11:11:59 +13:00
announce_id = request_json [ ' id ' ]
2024-11-19 11:51:29 +00:00
ap_id = request_json [ ' object ' ] if not announced else request_json [ ' object ' ] [ ' object ' ]
liked = find_liked_object ( ap_id )
if liked is None :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_LIKE , APLOG_FAILURE , request_json if store_ap_json else None , ' Unfound object ' + ap_id )
2024-11-19 11:51:29 +00:00
return
if can_upvote ( user , liked . community ) :
if isinstance ( liked , ( Post , PostReply ) ) :
liked . vote ( user , ' upvote ' )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_LIKE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-19 11:51:29 +00:00
if not announced :
announce_activity_to_followers ( liked . community , user , request_json )
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_LIKE , APLOG_IGNORED , request_json if store_ap_json else None , ' Cannot upvote this ' )
2024-11-19 11:51:29 +00:00
def process_downvote ( user , store_ap_json , request_json , announced = True ) :
2024-11-28 11:11:59 +13:00
announce_id = request_json [ ' id ' ]
2024-11-19 11:51:29 +00:00
ap_id = request_json [ ' object ' ] if not announced else request_json [ ' object ' ] [ ' object ' ]
liked = find_liked_object ( ap_id )
if liked is None :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_DISLIKE , APLOG_FAILURE , request_json if store_ap_json else None , ' Unfound object ' + ap_id )
2024-11-19 11:51:29 +00:00
return
if can_downvote ( user , liked . community ) :
if isinstance ( liked , ( Post , PostReply ) ) :
liked . vote ( user , ' downvote ' )
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_DISLIKE , APLOG_SUCCESS , request_json if store_ap_json else None )
2024-11-19 11:51:29 +00:00
if not announced :
announce_activity_to_followers ( liked . community , user , request_json )
else :
2024-11-28 11:11:59 +13:00
log_incoming_ap ( announce_id , APLOG_DISLIKE , APLOG_IGNORED , request_json if store_ap_json else None , ' Cannot downvote this ' )