2024-01-03 16:29:58 +13:00
from datetime import datetime , timedelta
2023-12-21 22:14:43 +13:00
from time import sleep
2024-05-22 12:16:35 +01:00
from random import randint
2023-10-10 22:25:37 +13:00
from typing import List
2024-09-15 19:30:45 +12:00
import httpx
2023-11-29 20:32:07 +13:00
from PIL import Image , ImageOps
2024-03-15 22:32:48 +00:00
from flask import request , abort , g , current_app , json
2023-11-30 20:57:51 +13:00
from flask_login import current_user
from pillow_heif import register_heif_opener
2023-11-29 20:32:07 +13:00
2023-12-25 21:44:10 +13:00
from app import db , cache , celery
2024-05-04 21:16:55 +01:00
from app . activitypub . signature import post_request , default_context
2024-05-12 13:02:45 +12:00
from app . activitypub . util import find_actor_or_create , actor_json_to_model , post_json_to_model , ensure_domains_match , \
find_hashtag_or_create
2024-05-16 21:53:38 +12:00
from app . constants import POST_TYPE_ARTICLE , POST_TYPE_LINK , POST_TYPE_IMAGE , POST_TYPE_VIDEO , NOTIF_POST , \
POST_TYPE_POLL
2024-10-22 19:51:37 +13:00
from app . models import Community , File , BannedInstances , PostReply , Post , utcnow , CommunityMember , Site , \
2024-10-30 09:21:54 +13:00
Instance , Notification , User , ActivityPubLog , NotificationSubscription , PollChoice , Poll , Tag
2024-10-22 19:51:37 +13:00
from app . utils import get_request , gibberish , markdown_to_html , domain_from_url , \
is_image_url , ensure_directory_exists , shorten_string , \
2024-09-22 13:42:02 +00:00
remove_tracking_from_link , ap_datetime , instance_banned , blocked_phrases , url_to_thumbnail_file , opengraph_parse , \
2024-11-14 16:28:38 +13:00
piefed_markdown_to_lemmy_markdown , get_task_session
2024-05-12 13:02:45 +12:00
from sqlalchemy import func , desc , text
2023-11-27 22:05:35 +13:00
import os
2023-08-29 22:01:06 +12:00
2024-12-06 10:21:44 +13:00
allowed_extensions = [ ' .gif ' , ' .jpg ' , ' .jpeg ' , ' .png ' , ' .webp ' , ' .heic ' , ' .mpo ' , ' .avif ' , ' .svg ' ]
2023-12-08 17:13:38 +13:00
2023-12-21 22:14:43 +13:00
2023-08-29 22:01:06 +12:00
def search_for_community ( address : str ) :
if address . startswith ( ' ! ' ) :
name , server = address [ 1 : ] . split ( ' @ ' )
banned = BannedInstances . query . filter_by ( domain = server ) . first ( )
if banned :
reason = f " Reason: { banned . reason } " if banned . reason is not None else ' '
2023-09-05 20:25:02 +12:00
raise Exception ( f " { server } is blocked. { reason } " ) # todo: create custom exception class hierarchy
2023-08-29 22:01:06 +12:00
2024-08-16 11:48:56 +12:00
if current_app . config [ ' SERVER_NAME ' ] == server :
already_exists = Community . query . filter_by ( name = name , ap_id = None ) . first ( )
return already_exists
2023-08-29 22:01:06 +12:00
already_exists = Community . query . filter_by ( ap_id = address [ 1 : ] ) . first ( )
if already_exists :
return already_exists
# Look up the profile address of the community using WebFinger
2024-04-24 16:04:49 +01:00
try :
webfinger_data = get_request ( f " https:// { server } /.well-known/webfinger " ,
params = { ' resource ' : f " acct: { address [ 1 : ] } " } )
2024-09-15 19:30:45 +12:00
except httpx . HTTPError :
2024-05-22 12:16:35 +01:00
sleep ( randint ( 3 , 10 ) )
2024-04-24 16:04:49 +01:00
try :
webfinger_data = get_request ( f " https:// { server } /.well-known/webfinger " ,
params = { ' resource ' : f " acct: { address [ 1 : ] } " } )
2024-09-15 19:30:45 +12:00
except httpx . HTTPError :
2024-04-24 16:04:49 +01:00
return None
2024-09-15 19:30:45 +12:00
2023-08-29 22:01:06 +12:00
if webfinger_data . status_code == 200 :
webfinger_json = webfinger_data . json ( )
for links in webfinger_json [ ' links ' ] :
2023-09-05 20:25:02 +12:00
if ' rel ' in links and links [ ' rel ' ] == ' self ' : # this contains the URL of the activitypub profile
2023-08-29 22:01:06 +12:00
type = links [ ' type ' ] if ' type ' in links else ' application/activity+json '
# retrieve the activitypub profile
community_data = get_request ( links [ ' href ' ] , headers = { ' Accept ' : type } )
# to see the structure of the json contained in community_data, do a GET to https://lemmy.world/c/technology with header Accept: application/activity+json
if community_data . status_code == 200 :
community_json = community_data . json ( )
2023-12-21 22:14:43 +13:00
community_data . close ( )
2023-08-29 22:01:06 +12:00
if community_json [ ' type ' ] == ' Group ' :
2024-01-07 09:29:36 +13:00
community = actor_json_to_model ( community_json , name , server )
2024-01-21 21:04:48 +13:00
if community :
2024-05-26 01:37:14 +01:00
if community . ap_profile_id == f " https:// { server } /video-channels/ { name } " :
if current_app . debug :
retrieve_peertube_mods_and_backfill ( community . id , community_json [ ' attributedTo ' ] )
else :
retrieve_peertube_mods_and_backfill . delay ( community . id , community_json [ ' attributedTo ' ] )
return community
2024-01-21 21:04:48 +13:00
if current_app . debug :
retrieve_mods_and_backfill ( community . id )
else :
retrieve_mods_and_backfill . delay ( community . id )
2023-08-29 22:01:06 +12:00
return community
return None
2023-09-05 20:25:02 +12:00
2024-05-26 01:37:14 +01:00
@celery.task
def retrieve_peertube_mods_and_backfill ( community_id : int , mods : list ) :
community = Community . query . get ( community_id )
site = Site . query . get ( 1 )
for m in mods :
user = find_actor_or_create ( m [ ' id ' ] )
if user :
existing_membership = CommunityMember . query . filter_by ( community_id = community . id , user_id = user . id ) . first ( )
if existing_membership :
existing_membership . is_moderator = True
else :
new_membership = CommunityMember ( community_id = community . id , user_id = user . id , is_moderator = True )
db . session . add ( new_membership )
2024-05-28 20:44:06 +01:00
community . restricted_to_mods = True
2024-05-26 01:37:14 +01:00
db . session . commit ( )
if community . ap_public_url :
outbox_request = get_request ( community . ap_outbox_url , headers = { ' Accept ' : ' application/activity+json ' } )
if outbox_request . status_code == 200 :
outbox_data = outbox_request . json ( )
outbox_request . close ( )
if ' totalItems ' in outbox_data and outbox_data [ ' totalItems ' ] > 0 :
page1_request = get_request ( outbox_data [ ' first ' ] , headers = { ' Accept ' : ' application/activity+json ' } )
if page1_request . status_code == 200 :
page1_data = page1_request . json ( )
page1_request . close ( )
if ' type ' in page1_data and page1_data [ ' type ' ] == ' OrderedCollectionPage ' and ' orderedItems ' in page1_data :
# only 10 posts per page for PeerTube
for activity in page1_data [ ' orderedItems ' ] :
video_request = get_request ( activity [ ' object ' ] , headers = { ' Accept ' : ' application/activity+json ' } )
if video_request . status_code == 200 :
video_data = video_request . json ( )
video_request . close ( )
activity_log = ActivityPubLog ( direction = ' in ' , activity_id = video_data [ ' id ' ] , activity_type = ' Video ' , result = ' failure ' )
if site . log_activitypub_json :
activity_log . activity_json = json . dumps ( video_data )
db . session . add ( activity_log )
if not ensure_domains_match ( video_data ) :
activity_log . exception_message = ' Domains do not match '
db . session . commit ( )
continue
if user and user . is_local ( ) :
activity_log . exception_message = ' Activity about local content which is already present '
db . session . commit ( )
continue
if user :
post = post_json_to_model ( activity_log , video_data , user , community )
post . ap_announce_id = activity [ ' id ' ]
2024-10-22 19:51:37 +13:00
post . ranking = post . post_ranking ( post . score , post . posted_at )
2024-05-26 01:37:14 +01:00
else :
activity_log . exception_message = ' Could not find or create actor '
db . session . commit ( )
if community . post_count > 0 :
community . last_active = Post . query . filter ( Post . community_id == community_id ) . order_by ( desc ( Post . posted_at ) ) . first ( ) . posted_at
db . session . commit ( )
2023-12-25 21:44:10 +13:00
@celery.task
def retrieve_mods_and_backfill ( community_id : int ) :
with current_app . app_context ( ) :
community = Community . query . get ( community_id )
site = Site . query . get ( 1 )
2023-12-21 22:14:43 +13:00
if community . ap_moderators_url :
mods_request = get_request ( community . ap_moderators_url , headers = { ' Accept ' : ' application/activity+json ' } )
if mods_request . status_code == 200 :
mods_data = mods_request . json ( )
mods_request . close ( )
if mods_data and mods_data [ ' type ' ] == ' OrderedCollection ' and ' orderedItems ' in mods_data :
for actor in mods_data [ ' orderedItems ' ] :
sleep ( 0.5 )
user = find_actor_or_create ( actor )
if user :
existing_membership = CommunityMember . query . filter_by ( community_id = community . id , user_id = user . id ) . first ( )
if existing_membership :
existing_membership . is_moderator = True
else :
new_membership = CommunityMember ( community_id = community . id , user_id = user . id , is_moderator = True )
db . session . add ( new_membership )
db . session . commit ( )
# only backfill nsfw if nsfw communities are allowed
2023-12-25 21:44:10 +13:00
if ( community . nsfw and not site . enable_nsfw ) or ( community . nsfl and not site . enable_nsfl ) :
2023-12-21 22:14:43 +13:00
return
# download 50 old posts
2024-11-30 14:22:48 +13:00
if community . ap_outbox_url :
2024-02-21 08:36:47 +13:00
outbox_request = get_request ( community . ap_outbox_url , headers = { ' Accept ' : ' application/activity+json ' } )
2023-12-21 22:14:43 +13:00
if outbox_request . status_code == 200 :
outbox_data = outbox_request . json ( )
outbox_request . close ( )
2024-03-24 00:15:10 +00:00
if ' type ' in outbox_data and outbox_data [ ' type ' ] == ' OrderedCollection ' and ' orderedItems ' in outbox_data :
2023-12-21 22:14:43 +13:00
activities_processed = 0
for activity in outbox_data [ ' orderedItems ' ] :
2024-03-15 22:32:48 +00:00
activity_log = ActivityPubLog ( direction = ' in ' , activity_id = activity [ ' id ' ] , activity_type = ' Announce ' , result = ' failure ' )
if site . log_activitypub_json :
activity_log . activity_json = json . dumps ( activity )
db . session . add ( activity_log )
2024-04-24 13:44:25 +01:00
if ' object ' in activity and ' object ' in activity [ ' object ' ] :
if not ensure_domains_match ( activity [ ' object ' ] [ ' object ' ] ) :
activity_log . exception_message = ' Domains do not match '
db . session . commit ( )
continue
user = find_actor_or_create ( activity [ ' object ' ] [ ' actor ' ] )
2024-04-27 19:24:31 +01:00
if user and user . is_local ( ) :
activity_log . exception_message = ' Activity about local content which is already present '
db . session . commit ( )
continue
2023-12-21 22:14:43 +13:00
if user :
2024-03-15 22:32:48 +00:00
post = post_json_to_model ( activity_log , activity [ ' object ' ] [ ' object ' ] , user , community )
2024-04-02 18:44:42 +01:00
if post :
post . ap_create_id = activity [ ' object ' ] [ ' id ' ]
post . ap_announce_id = activity [ ' id ' ]
2024-10-22 19:51:37 +13:00
post . ranking = post . post_ranking ( post . score , post . posted_at )
2024-04-02 18:44:42 +01:00
if post . url :
2024-09-27 10:07:41 +00:00
other_posts = Post . query . filter ( Post . id != post . id , Post . url == post . url , Post . deleted == False ,
2024-04-16 16:35:12 +12:00
Post . posted_at > post . posted_at - timedelta ( days = 3 ) ,
Post . posted_at < post . posted_at + timedelta ( days = 3 ) ) . all ( )
2024-04-02 18:44:42 +01:00
for op in other_posts :
if op . cross_posts is None :
op . cross_posts = [ post . id ]
else :
op . cross_posts . append ( post . id )
if post . cross_posts is None :
post . cross_posts = [ op . id ]
else :
post . cross_posts . append ( op . id )
db . session . commit ( )
2024-03-15 22:32:48 +00:00
else :
activity_log . exception_message = ' Could not find or create actor '
db . session . commit ( )
2023-12-21 22:14:43 +13:00
activities_processed + = 1
if activities_processed > = 50 :
break
2023-12-22 14:05:39 +13:00
c = Community . query . get ( community . id )
2024-03-15 22:51:27 +00:00
if c . post_count > 0 :
c . last_active = Post . query . filter ( Post . community_id == community_id ) . order_by ( desc ( Post . posted_at ) ) . first ( ) . posted_at
2023-12-21 22:14:43 +13:00
db . session . commit ( )
2024-03-19 07:34:19 +00:00
if community . ap_featured_url :
2024-03-22 20:49:35 +13:00
featured_request = get_request ( community . ap_featured_url , headers = { ' Accept ' : ' application/activity+json ' } )
2024-03-19 07:34:19 +00:00
if featured_request . status_code == 200 :
featured_data = featured_request . json ( )
featured_request . close ( )
if featured_data [ ' type ' ] == ' OrderedCollection ' and ' orderedItems ' in featured_data :
for item in featured_data [ ' orderedItems ' ] :
featured_id = item [ ' id ' ]
p = Post . query . filter ( Post . ap_id == featured_id ) . first ( )
if p :
p . sticky = True
db . session . commit ( )
2023-12-21 22:14:43 +13:00
2023-09-17 21:19:51 +12:00
def actor_to_community ( actor ) - > Community :
actor = actor . strip ( )
if ' @ ' in actor :
community = Community . query . filter_by ( banned = False , ap_id = actor ) . first ( )
else :
2024-02-09 14:58:51 +13:00
community = Community . query . filter ( func . lower ( Community . name ) == func . lower ( actor ) ) . filter_by ( banned = False , ap_id = None ) . first ( )
2023-09-17 21:19:51 +12:00
return community
2023-10-10 22:25:37 +13:00
2024-07-08 15:19:49 +02:00
def save_post ( form , post : Post , type : int ) :
2024-03-12 20:58:47 +13:00
post . indexable = current_user . indexable
2024-03-20 20:41:45 +13:00
post . sticky = form . sticky . data
2023-11-30 20:57:51 +13:00
post . nsfw = form . nsfw . data
post . nsfl = form . nsfl . data
post . notify_author = form . notify_author . data
2024-05-09 17:54:30 +12:00
post . language_id = form . language_id . data
current_user . language_id = form . language_id . data
2024-09-29 17:13:30 +13:00
post . title = form . title . data . strip ( )
2024-09-22 13:42:02 +00:00
post . body = piefed_markdown_to_lemmy_markdown ( form . body . data )
2024-07-08 13:21:02 +02:00
post . body_html = markdown_to_html ( post . body )
2024-07-08 15:19:49 +02:00
if not type or type == POST_TYPE_ARTICLE :
2023-11-30 20:57:51 +13:00
post . type = POST_TYPE_ARTICLE
2024-07-08 15:19:49 +02:00
elif type == POST_TYPE_LINK :
2023-11-30 20:57:51 +13:00
url_changed = post . id is None or form . link_url . data != post . url
2024-04-03 07:44:59 +13:00
post . url = remove_tracking_from_link ( form . link_url . data . strip ( ) )
2023-11-30 20:57:51 +13:00
post . type = POST_TYPE_LINK
domain = domain_from_url ( form . link_url . data )
domain . post_count + = 1
post . domain = domain
if url_changed :
if post . image_id :
remove_old_file ( post . image_id )
post . image_id = None
2024-01-25 20:16:08 +13:00
2024-04-16 16:35:12 +12:00
if post . url . endswith ( ' .mp4 ' ) or post . url . endswith ( ' .webm ' ) :
2024-06-22 14:18:26 +08:00
post . type = POST_TYPE_VIDEO
2024-04-16 16:35:12 +12:00
file = File ( source_url = form . link_url . data ) # make_image_sizes() will take care of turning this into a still image
2024-03-24 12:00:16 +13:00
post . image = file
db . session . add ( file )
2023-11-30 20:57:51 +13:00
else :
2024-04-16 16:35:12 +12:00
unused , file_extension = os . path . splitext ( form . link_url . data )
# this url is a link to an image - turn it into a image post
if file_extension . lower ( ) in allowed_extensions :
file = File ( source_url = form . link_url . data )
post . image = file
db . session . add ( file )
post . type = POST_TYPE_IMAGE
else :
# check opengraph tags on the page and make a thumbnail if an image is available in the og:image meta tag
2024-06-22 14:18:26 +08:00
if not post . type == POST_TYPE_VIDEO :
2024-09-06 01:06:59 +00:00
tn_url = form . link_url . data
if tn_url [ : 32 ] == ' https://www.youtube.com/watch?v= ' :
tn_url = ' https://youtu.be/ ' + tn_url [ 32 : 43 ] # better chance of thumbnail from youtu.be than youtube.com
opengraph = opengraph_parse ( tn_url )
2024-06-22 14:18:26 +08:00
if opengraph and ( opengraph . get ( ' og:image ' , ' ' ) != ' ' or opengraph . get ( ' og:image:url ' , ' ' ) != ' ' ) :
filename = opengraph . get ( ' og:image ' ) or opengraph . get ( ' og:image:url ' )
if not filename . startswith ( ' / ' ) :
file = url_to_thumbnail_file ( filename )
if file :
file . alt_text = shorten_string ( opengraph . get ( ' og:title ' ) , 295 )
post . image = file
db . session . add ( file )
2023-11-30 20:57:51 +13:00
2024-07-08 15:19:49 +02:00
elif type == POST_TYPE_IMAGE :
2023-11-30 20:57:51 +13:00
post . type = POST_TYPE_IMAGE
2024-07-08 13:21:02 +02:00
alt_text = form . image_alt_text . data if form . image_alt_text . data else form . title . data
2024-12-14 23:30:45 +01:00
uploaded_file = request . files [ ' image_file ' ]
# If we are uploading new file in the place of existing one just remove the old one
if post . image_id is not None and uploaded_file :
post . image . delete_from_disk ( )
image_id = post . image_id
post . image_id = None
db . session . add ( post )
db . session . commit ( )
File . query . filter_by ( id = image_id ) . delete ( )
if uploaded_file and uploaded_file . filename != ' ' :
if post . image_id :
remove_old_file ( post . image_id )
post . image_id = None
# check if this is an allowed type of file
file_ext = os . path . splitext ( uploaded_file . filename ) [ 1 ]
if file_ext . lower ( ) not in allowed_extensions :
abort ( 400 )
new_filename = gibberish ( 15 )
# set up the storage directory
directory = ' app/static/media/posts/ ' + new_filename [ 0 : 2 ] + ' / ' + new_filename [ 2 : 4 ]
ensure_directory_exists ( directory )
# save the file
final_place = os . path . join ( directory , new_filename + file_ext )
final_place_medium = os . path . join ( directory , new_filename + ' _medium.webp ' )
final_place_thumbnail = os . path . join ( directory , new_filename + ' _thumbnail.webp ' )
uploaded_file . seek ( 0 )
uploaded_file . save ( final_place )
if file_ext . lower ( ) == ' .heic ' :
register_heif_opener ( )
Image . MAX_IMAGE_PIXELS = 89478485
# resize if necessary
img = Image . open ( final_place )
if ' . ' + img . format . lower ( ) in allowed_extensions :
img = ImageOps . exif_transpose ( img )
# limit full sized version to 2000px
img_width = img . width
img_height = img . height
img . thumbnail ( ( 2000 , 2000 ) )
img . save ( final_place )
# medium sized version
img . thumbnail ( ( 512 , 512 ) )
img . save ( final_place_medium , format = " WebP " , quality = 93 )
# save a third, smaller, version as a thumbnail
img . thumbnail ( ( 170 , 170 ) )
img . save ( final_place_thumbnail , format = " WebP " , quality = 93 )
thumbnail_width = img . width
thumbnail_height = img . height
file = File ( file_path = final_place_medium , file_name = new_filename + file_ext , alt_text = alt_text ,
width = img_width , height = img_height , thumbnail_width = thumbnail_width ,
thumbnail_height = thumbnail_height , thumbnail_path = final_place_thumbnail ,
source_url = final_place . replace ( ' app/static/ ' , f " https:// { current_app . config [ ' SERVER_NAME ' ] } /static/ " ) )
db . session . add ( file )
db . session . commit ( )
post . image_id = file . id
2024-07-08 15:19:49 +02:00
elif type == POST_TYPE_VIDEO :
2024-04-17 19:42:36 +12:00
form . video_url . data = form . video_url . data . strip ( )
2024-04-16 20:59:58 +12:00
url_changed = post . id is None or form . video_url . data != post . url
post . url = remove_tracking_from_link ( form . video_url . data . strip ( ) )
post . type = POST_TYPE_VIDEO
domain = domain_from_url ( form . video_url . data )
domain . post_count + = 1
post . domain = domain
if url_changed :
if post . image_id :
remove_old_file ( post . image_id )
post . image_id = None
2024-04-17 19:42:36 +12:00
if form . video_url . data . endswith ( ' .mp4 ' ) or form . video_url . data . endswith ( ' .webm ' ) :
file = File ( source_url = form . video_url . data ) # make_image_sizes() will take care of turning this into a still image
post . image = file
db . session . add ( file )
else :
# check opengraph tags on the page and make a thumbnail if an image is available in the og:image meta tag
2024-09-07 11:15:33 +12:00
tn_url = form . video_url . data
if tn_url [ : 32 ] == ' https://www.youtube.com/watch?v= ' :
tn_url = ' https://youtu.be/ ' + tn_url [ 32 : 43 ] # better chance of thumbnail from youtu.be than youtube.com
opengraph = opengraph_parse ( tn_url )
2024-04-17 19:42:36 +12:00
if opengraph and ( opengraph . get ( ' og:image ' , ' ' ) != ' ' or opengraph . get ( ' og:image:url ' , ' ' ) != ' ' ) :
filename = opengraph . get ( ' og:image ' ) or opengraph . get ( ' og:image:url ' )
2024-06-19 13:46:36 +08:00
if not filename . startswith ( ' / ' ) :
2024-04-17 19:42:36 +12:00
file = url_to_thumbnail_file ( filename )
if file :
file . alt_text = shorten_string ( opengraph . get ( ' og:title ' ) , 295 )
post . image = file
db . session . add ( file )
2024-01-25 20:16:08 +13:00
2024-07-08 15:19:49 +02:00
elif type == POST_TYPE_POLL :
2024-07-08 13:21:02 +02:00
post . body = form . title . data + ' \n ' + form . body . data if post . title not in form . body . data else form . body . data
2024-05-16 21:53:38 +12:00
post . body_html = markdown_to_html ( post . body )
post . type = POST_TYPE_POLL
2023-11-30 20:57:51 +13:00
else :
raise Exception ( ' invalid post type ' )
2024-04-16 20:59:58 +12:00
2023-11-30 20:57:51 +13:00
if post . id is None :
2024-01-02 16:07:41 +13:00
if current_user . reputation > 100 :
post . up_votes = 1
post . score = 1
if current_user . reputation < - 100 :
post . score = - 1
2024-10-22 19:51:37 +13:00
post . ranking = post . post_ranking ( post . score , utcnow ( ) )
2024-03-22 12:22:19 +13:00
# Filter by phrase
blocked_phrases_list = blocked_phrases ( )
for blocked_phrase in blocked_phrases_list :
if blocked_phrase in post . title :
abort ( 401 )
return
if post . body :
for blocked_phrase in blocked_phrases_list :
if blocked_phrase in post . body :
abort ( 401 )
return
2023-11-30 20:57:51 +13:00
db . session . add ( post )
2024-05-12 13:02:45 +12:00
else :
2024-05-16 21:53:38 +12:00
db . session . execute ( text ( ' DELETE FROM " post_tag " WHERE post_id = :post_id ' ) , { ' post_id ' : post . id } )
2024-10-30 09:19:32 +13:00
post . tags = tags_from_string_old ( form . tags . data )
2024-05-12 13:02:45 +12:00
db . session . commit ( )
2024-04-29 21:43:37 +12:00
2024-05-16 21:53:38 +12:00
# Save poll choices. NB this will delete all votes whenever a poll is edited. Partially because it's easier to code but also to stop malicious alterations to polls after people have already voted
2024-07-08 15:19:49 +02:00
if type == POST_TYPE_POLL :
2024-05-18 19:41:20 +12:00
db . session . execute ( text ( ' DELETE FROM " poll_choice_vote " WHERE post_id = :post_id ' ) , { ' post_id ' : post . id } )
db . session . execute ( text ( ' DELETE FROM " poll_choice " WHERE post_id = :post_id ' ) , { ' post_id ' : post . id } )
2024-05-16 21:53:38 +12:00
for i in range ( 1 , 10 ) :
choice_data = getattr ( form , f " choice_ { i } " ) . data . strip ( )
if choice_data != ' ' :
db . session . add ( PollChoice ( post_id = post . id , choice_text = choice_data , sort_order = i ) )
poll = Poll . query . filter_by ( post_id = post . id ) . first ( )
if poll is None :
poll = Poll ( post_id = post . id )
db . session . add ( poll )
poll . mode = form . mode . data
2024-05-31 21:16:37 +01:00
if form . finish_in :
poll . end_poll = end_poll_date ( form . finish_in . data )
2024-05-16 21:53:38 +12:00
poll . local_only = form . local_only . data
poll . latest_vote = None
db . session . commit ( )
2024-04-29 21:43:37 +12:00
# Notify author about replies
# Remove any subscription that currently exists
existing_notification = NotificationSubscription . query . filter ( NotificationSubscription . entity_id == post . id ,
NotificationSubscription . user_id == current_user . id ,
NotificationSubscription . type == NOTIF_POST ) . first ( )
if existing_notification :
db . session . delete ( existing_notification )
# Add subscription if necessary
if form . notify_author . data :
new_notification = NotificationSubscription ( name = post . title , user_id = current_user . id , entity_id = post . id ,
type = NOTIF_POST )
db . session . add ( new_notification )
2024-01-03 20:14:39 +13:00
2023-12-17 00:12:49 +13:00
g . site . last_active = utcnow ( )
2024-04-29 21:43:37 +12:00
db . session . commit ( )
2023-11-30 20:57:51 +13:00
2024-05-16 21:53:38 +12:00
def end_poll_date ( end_choice ) :
delta_mapping = {
' 30m ' : timedelta ( minutes = 30 ) ,
' 1h ' : timedelta ( hours = 1 ) ,
' 6h ' : timedelta ( hours = 6 ) ,
' 12h ' : timedelta ( hours = 12 ) ,
' 1d ' : timedelta ( days = 1 ) ,
' 3d ' : timedelta ( days = 3 ) ,
' 7d ' : timedelta ( days = 7 )
}
if end_choice in delta_mapping :
return datetime . utcnow ( ) + delta_mapping [ end_choice ]
else :
raise ValueError ( " Invalid choice " )
2024-10-16 21:42:30 +13:00
def tags_from_string ( tags : str ) - > List [ dict ] :
2024-05-12 13:02:45 +12:00
return_value = [ ]
tags = tags . strip ( )
if tags == ' ' :
return [ ]
tag_list = tags . split ( ' , ' )
tag_list = [ tag . strip ( ) for tag in tag_list ]
for tag in tag_list :
if tag [ 0 ] == ' # ' :
tag = tag [ 1 : ]
tag_to_append = find_hashtag_or_create ( tag )
if tag_to_append :
2024-10-16 21:42:30 +13:00
return_value . append ( { ' type ' : ' Hashtag ' , ' name ' : tag_to_append . name } )
2024-05-12 13:02:45 +12:00
return return_value
2024-10-30 09:19:32 +13:00
def tags_from_string_old ( tags : str ) - > List [ Tag ] :
return_value = [ ]
tags = tags . strip ( )
if tags == ' ' :
return [ ]
tag_list = tags . split ( ' , ' )
tag_list = [ tag . strip ( ) for tag in tag_list ]
for tag in tag_list :
if tag [ 0 ] == ' # ' :
tag = tag [ 1 : ]
tag_to_append = find_hashtag_or_create ( tag )
if tag_to_append :
return_value . append ( tag_to_append )
return return_value
2024-03-15 14:24:45 +13:00
def delete_post_from_community ( post_id ) :
if current_app . debug :
delete_post_from_community_task ( post_id )
else :
delete_post_from_community_task . delay ( post_id )
@celery.task
def delete_post_from_community_task ( post_id ) :
post = Post . query . get ( post_id )
community = post . community
2024-06-02 16:45:21 +12:00
post . deleted = True
2024-10-25 06:17:10 +00:00
post . deleted_by = current_user . id
2024-03-15 14:24:45 +13:00
db . session . commit ( )
if not community . local_only :
delete_json = {
' id ' : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/delete/ { gibberish ( 15 ) } " ,
' type ' : ' Delete ' ,
2024-06-05 13:21:41 +12:00
' actor ' : current_user . public_url ( ) ,
' audience ' : post . community . public_url ( ) ,
' to ' : [ post . community . public_url ( ) , ' https://www.w3.org/ns/activitystreams#Public ' ] ,
2024-03-15 14:24:45 +13:00
' published ' : ap_datetime ( utcnow ( ) ) ,
' cc ' : [
current_user . followers_url ( )
] ,
' object ' : post . ap_id ,
}
if not post . community . is_local ( ) : # this is a remote community, send it to the instance that hosts it
success = post_request ( post . community . ap_inbox_url , delete_json , current_user . private_key ,
2024-06-05 13:21:41 +12:00
current_user . public_url ( ) + ' #main-key ' )
2024-03-15 14:24:45 +13:00
else : # local community - send it to followers on remote instances
announce = {
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/announce/ { gibberish ( 15 ) } " ,
" type " : ' Announce ' ,
" to " : [
" https://www.w3.org/ns/activitystreams#Public "
] ,
" actor " : post . community . ap_profile_id ,
" cc " : [
post . community . ap_followers_url
] ,
' @context ' : default_context ( ) ,
' object ' : delete_json
}
for instance in post . community . following_instances ( ) :
if instance . inbox and not current_user . has_blocked_instance ( instance . id ) and not instance_banned (
instance . domain ) :
send_to_remote_instance ( instance . id , post . community . id , announce )
def delete_post_reply_from_community ( post_reply_id ) :
if current_app . debug :
delete_post_reply_from_community_task ( post_reply_id )
else :
delete_post_reply_from_community_task . delay ( post_reply_id )
@celery.task
def delete_post_reply_from_community_task ( post_reply_id ) :
post_reply = PostReply . query . get ( post_reply_id )
post = post_reply . post
community = post . community
if post_reply . user_id == current_user . id or community . is_moderator ( ) :
2024-10-18 08:33:50 +00:00
post_reply . deleted = True
post_reply . deleted_by = current_user . id
2024-03-15 14:24:45 +13:00
db . session . commit ( )
2024-04-20 16:26:33 +12:00
2024-03-15 14:24:45 +13:00
# federate delete
if not post . community . local_only :
delete_json = {
' id ' : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/delete/ { gibberish ( 15 ) } " ,
' type ' : ' Delete ' ,
2024-06-05 13:21:41 +12:00
' actor ' : current_user . public_url ( ) ,
' audience ' : post . community . public_url ( ) ,
' to ' : [ post . community . public_url ( ) , ' https://www.w3.org/ns/activitystreams#Public ' ] ,
2024-03-15 14:24:45 +13:00
' published ' : ap_datetime ( utcnow ( ) ) ,
' cc ' : [
current_user . followers_url ( )
] ,
' object ' : post_reply . ap_id ,
}
if not post . community . is_local ( ) : # this is a remote community, send it to the instance that hosts it
success = post_request ( post . community . ap_inbox_url , delete_json , current_user . private_key ,
2024-06-05 13:21:41 +12:00
current_user . public_url ( ) + ' #main-key ' )
2024-03-15 14:24:45 +13:00
else : # local community - send it to followers on remote instances
announce = {
" id " : f " https:// { current_app . config [ ' SERVER_NAME ' ] } /activities/announce/ { gibberish ( 15 ) } " ,
" type " : ' Announce ' ,
" to " : [
" https://www.w3.org/ns/activitystreams#Public "
] ,
" actor " : post . community . ap_profile_id ,
" cc " : [
post . community . ap_followers_url
] ,
' @context ' : default_context ( ) ,
' object ' : delete_json
}
for instance in post . community . following_instances ( ) :
if instance . inbox and not current_user . has_blocked_instance ( instance . id ) and not instance_banned (
instance . domain ) :
send_to_remote_instance ( instance . id , post . community . id , announce )
2023-11-30 20:57:51 +13:00
def remove_old_file ( file_id ) :
remove_file = File . query . get ( file_id )
remove_file . delete_from_disk ( )
2023-12-08 17:13:38 +13:00
2023-12-25 21:44:10 +13:00
def save_icon_file ( icon_file , directory = ' communities ' ) - > File :
2023-12-08 17:13:38 +13:00
# check if this is an allowed type of file
file_ext = os . path . splitext ( icon_file . filename ) [ 1 ]
2024-01-25 20:16:08 +13:00
if file_ext . lower ( ) not in allowed_extensions :
2023-12-08 17:13:38 +13:00
abort ( 400 )
new_filename = gibberish ( 15 )
# set up the storage directory
2023-12-25 21:44:10 +13:00
directory = f ' app/static/media/ { directory } / ' + new_filename [ 0 : 2 ] + ' / ' + new_filename [ 2 : 4 ]
2023-12-08 17:13:38 +13:00
ensure_directory_exists ( directory )
# save the file
final_place = os . path . join ( directory , new_filename + file_ext )
final_place_thumbnail = os . path . join ( directory , new_filename + ' _thumbnail.webp ' )
icon_file . save ( final_place )
if file_ext . lower ( ) == ' .heic ' :
register_heif_opener ( )
2024-12-06 10:21:44 +13:00
elif file_ext . lower ( ) == ' .avif ' :
import pillow_avif
2023-12-08 17:13:38 +13:00
# resize if necessary
2024-12-06 10:21:44 +13:00
if file_ext . lower ( ) in allowed_extensions :
if file_ext . lower ( ) == ' .svg ' : # svgs don't need to be resized
file = File ( file_path = final_place , file_name = new_filename + file_ext , alt_text = f ' { directory } icon ' ,
thumbnail_path = final_place )
db . session . add ( file )
return file
else :
Image . MAX_IMAGE_PIXELS = 89478485
img = Image . open ( final_place )
img = ImageOps . exif_transpose ( img )
2024-01-25 20:16:08 +13:00
img_width = img . width
img_height = img . height
2024-12-06 10:21:44 +13:00
if img . width > 250 or img . height > 250 :
img . thumbnail ( ( 250 , 250 ) )
img . save ( final_place )
img_width = img . width
img_height = img . height
# save a second, smaller, version as a thumbnail
img . thumbnail ( ( 40 , 40 ) )
img . save ( final_place_thumbnail , format = " WebP " , quality = 93 )
thumbnail_width = img . width
thumbnail_height = img . height
file = File ( file_path = final_place , file_name = new_filename + file_ext , alt_text = f ' { directory } icon ' ,
width = img_width , height = img_height , thumbnail_width = thumbnail_width ,
thumbnail_height = thumbnail_height , thumbnail_path = final_place_thumbnail )
db . session . add ( file )
return file
2024-01-25 20:16:08 +13:00
else :
abort ( 400 )
2023-12-08 17:13:38 +13:00
2023-12-25 21:44:10 +13:00
def save_banner_file ( banner_file , directory = ' communities ' ) - > File :
2023-12-08 17:13:38 +13:00
# check if this is an allowed type of file
file_ext = os . path . splitext ( banner_file . filename ) [ 1 ]
2024-01-25 20:16:08 +13:00
if file_ext . lower ( ) not in allowed_extensions :
2023-12-08 17:13:38 +13:00
abort ( 400 )
new_filename = gibberish ( 15 )
# set up the storage directory
2023-12-25 21:44:10 +13:00
directory = f ' app/static/media/ { directory } / ' + new_filename [ 0 : 2 ] + ' / ' + new_filename [ 2 : 4 ]
2023-12-08 17:13:38 +13:00
ensure_directory_exists ( directory )
# save the file
final_place = os . path . join ( directory , new_filename + file_ext )
final_place_thumbnail = os . path . join ( directory , new_filename + ' _thumbnail.webp ' )
banner_file . save ( final_place )
if file_ext . lower ( ) == ' .heic ' :
register_heif_opener ( )
2024-12-06 10:21:44 +13:00
elif file_ext . lower ( ) == ' .avif ' :
import pillow_avif
2023-12-08 17:13:38 +13:00
# resize if necessary
2024-01-25 20:16:08 +13:00
Image . MAX_IMAGE_PIXELS = 89478485
2023-12-08 17:13:38 +13:00
img = Image . open ( final_place )
2024-01-25 20:16:08 +13:00
if ' . ' + img . format . lower ( ) in allowed_extensions :
img = ImageOps . exif_transpose ( img )
2023-12-08 17:13:38 +13:00
img_width = img . width
img_height = img . height
2024-01-25 20:16:08 +13:00
if img . width > 1600 or img . height > 600 :
img . thumbnail ( ( 1600 , 600 ) )
img . save ( final_place )
img_width = img . width
img_height = img . height
2023-12-08 17:13:38 +13:00
2024-01-25 20:16:08 +13:00
# save a second, smaller, version as a thumbnail
2024-02-10 06:41:24 +13:00
img . thumbnail ( ( 878 , 500 ) )
2024-01-25 20:16:08 +13:00
img . save ( final_place_thumbnail , format = " WebP " , quality = 93 )
thumbnail_width = img . width
thumbnail_height = img . height
2023-12-24 16:20:18 +13:00
2024-01-25 20:16:08 +13:00
file = File ( file_path = final_place , file_name = new_filename + file_ext , alt_text = f ' { directory } banner ' ,
2024-02-10 06:41:24 +13:00
width = img_width , height = img_height , thumbnail_path = final_place_thumbnail ,
thumbnail_width = thumbnail_width , thumbnail_height = thumbnail_height )
2024-01-25 20:16:08 +13:00
db . session . add ( file )
return file
else :
abort ( 400 )
2023-12-26 21:39:52 +13:00
2024-01-03 16:29:58 +13:00
# NB this always signs POSTs as the community so is only suitable for Announce activities
def send_to_remote_instance ( instance_id : int , community_id : int , payload ) :
2023-12-26 21:39:52 +13:00
if current_app . debug :
2024-01-03 16:29:58 +13:00
send_to_remote_instance_task ( instance_id , community_id , payload )
2023-12-26 21:39:52 +13:00
else :
2024-01-03 16:29:58 +13:00
send_to_remote_instance_task . delay ( instance_id , community_id , payload )
2023-12-26 21:39:52 +13:00
@celery.task
2024-01-03 16:29:58 +13:00
def send_to_remote_instance_task ( instance_id : int , community_id : int , payload ) :
2024-11-14 16:28:38 +13:00
session = get_task_session ( )
community : Community = session . query ( Community ) . get ( community_id )
2023-12-26 21:39:52 +13:00
if community :
2024-11-14 16:28:38 +13:00
instance : Instance = session . query ( Instance ) . get ( instance_id )
2024-08-19 10:24:49 +12:00
if instance . inbox and instance . online ( ) and not instance_banned ( instance . domain ) :
2024-11-16 21:53:18 +13:00
if post_request ( instance . inbox , payload , community . private_key , community . ap_profile_id + ' #main-key ' , timeout = 10 ) is True :
2024-05-22 06:29:28 +12:00
instance . last_successful_send = utcnow ( )
instance . failures = 0
else :
instance . failures + = 1
instance . most_recent_attempt = utcnow ( )
instance . start_trying_again = utcnow ( ) + timedelta ( seconds = instance . failures * * 4 )
if instance . failures > 10 :
instance . dormant = True
2024-11-14 16:28:38 +13:00
session . commit ( )
session . close ( )
2024-01-07 12:47:06 +13:00
2024-05-08 19:44:23 +12:00
def community_in_list ( community_id , community_list ) :
for tup in community_list :
if community_id == tup [ 0 ] :
return True
return False
2024-07-14 14:01:22 +08:00
def find_local_users ( search : str ) - > List [ User ] :
return User . query . filter ( User . banned == False , User . deleted == False , User . ap_id == None , User . user_name . ilike ( f " % { search } % " ) ) . \
order_by ( desc ( User . reputation ) ) . all ( )