microblog.pub/app.py

2955 lines
86 KiB
Python
Raw Normal View History

2018-05-18 18:41:41 +00:00
import binascii
import json
import logging
2018-06-16 20:02:10 +00:00
import mimetypes
import os
2018-07-17 22:20:32 +00:00
import traceback
2018-05-18 18:41:41 +00:00
from datetime import datetime
from datetime import timedelta
from datetime import timezone
2018-06-16 20:02:10 +00:00
from functools import wraps
from io import BytesIO
2018-06-16 20:02:10 +00:00
from typing import Any
from typing import Dict
from urllib.parse import urlencode
from urllib.parse import urlparse
2018-05-18 18:41:41 +00:00
import emoji_unicode
import html2text
2018-05-18 18:41:41 +00:00
import mf2py
2019-04-22 07:58:11 +00:00
import requests
2018-06-16 20:02:10 +00:00
import timeago
2018-05-18 18:41:41 +00:00
from bson.objectid import ObjectId
from flask import Flask
2018-06-16 20:02:10 +00:00
from flask import Response
2018-05-18 18:41:41 +00:00
from flask import abort
2018-06-16 20:02:10 +00:00
from flask import jsonify as flask_jsonify
2019-04-22 07:58:11 +00:00
from flask import make_response
2018-05-18 18:41:41 +00:00
from flask import redirect
from flask import render_template
2018-06-16 20:02:10 +00:00
from flask import request
2018-05-18 18:41:41 +00:00
from flask import session
from flask import url_for
from itsdangerous import BadSignature
2018-07-11 21:22:47 +00:00
from little_boxes import activitypub as ap
from little_boxes.activitypub import ActivityType
from little_boxes.activitypub import clean_activity
2019-04-14 17:17:54 +00:00
from little_boxes.activitypub import format_datetime
2018-07-11 21:22:47 +00:00
from little_boxes.activitypub import get_backend
from little_boxes.content_helper import parse_markdown
from little_boxes.errors import ActivityGoneError
from little_boxes.errors import ActivityNotFoundError
from little_boxes.errors import Error
2019-04-22 07:58:11 +00:00
from little_boxes.errors import NotAnActivityError
2018-07-11 21:22:47 +00:00
from little_boxes.errors import NotFromOutboxError
from little_boxes.httpsig import HTTPSigAuth
from little_boxes.httpsig import verify_request
from little_boxes.webfinger import get_actor_url
from little_boxes.webfinger import get_remote_follow_template
2018-05-18 18:41:41 +00:00
from passlib.hash import bcrypt
2019-04-22 07:58:11 +00:00
from requests.exceptions import HTTPError
2018-05-18 18:41:41 +00:00
from u2flib_server import u2f
from werkzeug.utils import secure_filename
2019-07-22 17:27:12 +00:00
import activity_gc
2018-05-18 18:41:41 +00:00
import activitypub
import config
2018-06-29 20:16:26 +00:00
from activitypub import Box
2019-04-14 17:17:54 +00:00
from activitypub import _answer_key
2019-04-22 07:58:11 +00:00
from activitypub import embed_collection
2019-07-30 20:12:20 +00:00
from api import api
from app_utils import MY_PERSON
from app_utils import back
from app_utils import csrf
2018-06-16 20:02:10 +00:00
from config import ADMIN_API_KEY
from config import BASE_URL
2019-07-08 21:18:33 +00:00
from config import BLACKLIST
2018-05-18 18:41:41 +00:00
from config import DB
2018-06-16 20:02:10 +00:00
from config import DEBUG_MODE
2018-05-18 18:41:41 +00:00
from config import DOMAIN
2019-04-10 20:50:36 +00:00
from config import EMOJIS
2018-06-16 20:02:10 +00:00
from config import HEADERS
2018-07-04 19:08:45 +00:00
from config import ICON_URL
2018-06-16 20:02:10 +00:00
from config import ID
from config import JWT
from config import KEY
from config import ME
from config import MEDIA_CACHE
2018-05-18 18:41:41 +00:00
from config import PASS
2019-04-22 07:58:11 +00:00
from config import USER_AGENT
2018-06-16 20:02:10 +00:00
from config import USERNAME
from config import VERSION
2019-07-07 19:09:04 +00:00
from config import VERSION_DATE
from config import MetaKey
from config import _drop_db
from config import _meta
2019-04-22 07:58:11 +00:00
from poussetaches import PousseTaches
2019-07-05 20:09:41 +00:00
from tasks import Tasks
from utils import now
2019-04-22 07:58:11 +00:00
from utils import opengraph
2018-06-16 20:02:10 +00:00
from utils.key import get_secret_key
from utils.lookup import lookup
from utils.notifications import set_inbox_flags
2019-07-30 20:12:20 +00:00
from utils.template_filters import filters
2018-06-17 18:51:23 +00:00
2019-04-12 19:10:10 +00:00
p = PousseTaches(
os.getenv("MICROBLOGPUB_POUSSETACHES_HOST", "http://localhost:7991"),
os.getenv("MICROBLOGPUB_INTERNAL_HOST", "http://localhost:5000"),
)
2019-04-10 20:50:36 +00:00
# p = PousseTaches("http://localhost:7991", "http://localhost:5000")
2019-04-08 15:24:50 +00:00
2018-05-18 18:41:41 +00:00
app = Flask(__name__)
2018-06-16 20:02:10 +00:00
app.secret_key = get_secret_key("flask")
2019-07-30 20:12:20 +00:00
app.register_blueprint(filters)
app.register_blueprint(api, url_prefix="/api")
2018-06-16 20:02:10 +00:00
app.config.update(WTF_CSRF_CHECK_DEFAULT=False)
2019-07-30 20:12:20 +00:00
csrf.init_app(app)
2018-05-18 18:41:41 +00:00
2018-05-28 17:46:23 +00:00
logger = logging.getLogger(__name__)
2018-05-22 22:41:37 +00:00
# Hook up Flask logging with gunicorn
2018-06-03 10:51:57 +00:00
root_logger = logging.getLogger()
2018-06-16 20:02:10 +00:00
if os.getenv("FLASK_DEBUG"):
logger.setLevel(logging.DEBUG)
root_logger.setLevel(logging.DEBUG)
2019-07-22 17:27:12 +00:00
root_logger.handlers = app.logger.handlers
else:
2018-06-16 20:02:10 +00:00
gunicorn_logger = logging.getLogger("gunicorn.error")
root_logger.handlers = gunicorn_logger.handlers
root_logger.setLevel(gunicorn_logger.level)
2018-05-22 22:41:37 +00:00
2018-06-16 20:02:10 +00:00
SIG_AUTH = HTTPSigAuth(KEY)
2018-05-18 18:41:41 +00:00
2019-07-08 21:18:33 +00:00
def is_blacklisted(url: str) -> bool:
2019-07-21 21:39:12 +00:00
try:
return urlparse(url).netloc in BLACKLIST
except Exception:
logger.exception(f"failed to blacklist for {url}")
return False
2019-07-08 21:18:33 +00:00
2018-05-18 18:41:41 +00:00
def verify_pass(pwd):
2018-06-16 20:02:10 +00:00
return bcrypt.verify(pwd, PASS)
2018-05-18 18:41:41 +00:00
@app.context_processor
def inject_config():
q = {
"type": "Create",
"activity.object.inReplyTo": None,
"meta.deleted": False,
2019-04-14 17:17:54 +00:00
"meta.public": True,
}
2018-06-29 20:16:26 +00:00
notes_count = DB.activities.find(
{"box": Box.OUTBOX.value, "$or": [q, {"type": "Announce", "meta.undo": False}]}
).count()
2019-07-12 22:38:51 +00:00
# FIXME(tsileo): rename to all_count, and remove poll answers from it
all_q = {
"box": Box.OUTBOX.value,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
"meta.undo": False,
"meta.deleted": False,
"meta.poll_answer": False,
}
2018-06-29 20:16:26 +00:00
liked_count = DB.activities.count(
{
"box": Box.OUTBOX.value,
"meta.deleted": False,
"meta.undo": False,
"type": ActivityType.LIKE.value,
}
)
followers_q = {
"box": Box.INBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
}
following_q = {
"box": Box.OUTBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
}
unread_notifications_q = {_meta(MetaKey.NOTIFICATION_UNREAD): True}
2019-07-10 21:32:48 +00:00
logged_in = session.get("logged_in", False)
2018-06-16 20:02:10 +00:00
return dict(
microblogpub_version=VERSION,
config=config,
2019-07-10 21:32:48 +00:00
logged_in=logged_in,
followers_count=DB.activities.count(followers_q),
2019-07-10 21:32:48 +00:00
following_count=DB.activities.count(following_q) if logged_in else 0,
notes_count=notes_count,
liked_count=liked_count,
with_replies_count=DB.activities.count(all_q) if logged_in else 0,
unread_notifications_count=DB.activities.count(unread_notifications_q)
if logged_in
else 0,
2018-06-29 20:16:26 +00:00
me=ME,
2019-04-14 17:17:54 +00:00
base_url=config.BASE_URL,
2018-06-16 20:02:10 +00:00
)
2018-05-18 18:41:41 +00:00
@app.after_request
def set_x_powered_by(response):
2018-06-16 20:02:10 +00:00
response.headers["X-Powered-By"] = "microblog.pub"
2018-05-18 18:41:41 +00:00
return response
2018-06-16 20:02:10 +00:00
2018-07-22 10:17:55 +00:00
def add_response_headers(headers={}):
"""This decorator adds the headers passed in to the response"""
2018-07-22 19:34:42 +00:00
2018-07-22 10:17:55 +00:00
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
resp = make_response(f(*args, **kwargs))
h = resp.headers
for header, value in headers.items():
h[header] = value
return resp
2018-07-22 19:34:42 +00:00
2018-07-22 10:17:55 +00:00
return decorated_function
2018-07-22 19:34:42 +00:00
2018-07-22 10:17:55 +00:00
return decorator
def noindex(f):
"""This decorator passes X-Robots-Tag: noindex, nofollow"""
2018-07-22 19:34:42 +00:00
return add_response_headers({"X-Robots-Tag": "noindex, nofollow"})(f)
2018-07-22 10:17:55 +00:00
2018-05-18 18:41:41 +00:00
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
2018-06-16 20:02:10 +00:00
if not session.get("logged_in"):
2018-07-22 10:17:55 +00:00
return redirect(url_for("admin_login", next=request.url))
2018-05-18 18:41:41 +00:00
return f(*args, **kwargs)
2018-06-16 20:02:10 +00:00
2018-05-18 18:41:41 +00:00
return decorated_function
2018-05-21 12:30:52 +00:00
def _api_required():
2018-06-16 20:02:10 +00:00
if session.get("logged_in"):
if request.method not in ["GET", "HEAD"]:
2018-06-04 15:59:38 +00:00
# If a standard API request is made with a "login session", it must havw a CSRF token
csrf.protect()
2018-05-21 12:30:52 +00:00
return
# Token verification
2018-06-16 20:02:10 +00:00
token = request.headers.get("Authorization", "").replace("Bearer ", "")
2018-05-21 12:30:52 +00:00
if not token:
2018-05-29 19:36:05 +00:00
# IndieAuth token
2018-06-16 20:02:10 +00:00
token = request.form.get("access_token", "")
2018-05-21 12:30:52 +00:00
# Will raise a BadSignature on bad auth
payload = JWT.loads(token)
2018-06-16 20:02:10 +00:00
logger.info(f"api call by {payload}")
def api_required(f):
2018-05-18 18:41:41 +00:00
@wraps(f)
def decorated_function(*args, **kwargs):
try:
2018-05-21 12:30:52 +00:00
_api_required()
2018-05-18 18:41:41 +00:00
except BadSignature:
abort(401)
2018-05-21 12:30:52 +00:00
return f(*args, **kwargs)
2018-06-16 20:02:10 +00:00
2018-05-18 18:41:41 +00:00
return decorated_function
def jsonify(**data):
2018-06-16 20:02:10 +00:00
if "@context" not in data:
2018-07-22 19:34:42 +00:00
data["@context"] = config.DEFAULT_CTX
2018-05-18 18:41:41 +00:00
return Response(
response=json.dumps(data),
2018-06-16 20:02:10 +00:00
headers={
"Content-Type": "application/json"
if app.debug
else "application/activity+json"
},
2018-05-18 18:41:41 +00:00
)
2019-05-12 08:02:28 +00:00
def _get_ip():
"""Guess the IP address from the request. Only used for security purpose (failed logins or bad payload).
Geoip will be returned if the "broxy" headers are set (it does Geoip
using an offline database and append these special headers).
"""
ip = request.headers.get("X-Forwarded-For", request.remote_addr)
geoip = None
if request.headers.get("Broxy-Geoip-Country"):
geoip = (
request.headers.get("Broxy-Geoip-Country")
+ "/"
+ request.headers.get("Broxy-Geoip-Region")
)
return ip, geoip
2018-05-18 18:41:41 +00:00
def is_api_request():
2018-06-16 20:02:10 +00:00
h = request.headers.get("Accept")
2018-05-18 18:41:41 +00:00
if h is None:
return False
2018-06-16 20:02:10 +00:00
h = h.split(",")[0]
if h in HEADERS or h == "application/json":
2018-05-18 18:41:41 +00:00
return True
return False
2018-05-29 19:36:05 +00:00
@app.errorhandler(ValueError)
def handle_value_error(error):
2018-07-17 22:20:32 +00:00
logger.error(
f"caught value error: {error!r}, {traceback.format_tb(error.__traceback__)}"
)
2018-05-29 19:36:05 +00:00
response = flask_jsonify(message=error.args[0])
response.status_code = 400
return response
@app.errorhandler(Error)
def handle_activitypub_error(error):
2018-07-17 22:20:32 +00:00
logger.error(
f"caught activitypub error {error!r}, {traceback.format_tb(error.__traceback__)}"
)
2018-05-29 19:36:05 +00:00
response = flask_jsonify(error.to_dict())
response.status_code = error.status_code
return response
2019-04-07 12:37:05 +00:00
class TaskError(Exception):
"""Raised to log the error for poussetaches."""
2019-04-07 12:37:05 +00:00
def __init__(self):
self.message = traceback.format_exc()
@app.errorhandler(TaskError)
def handle_task_error(error):
logger.error(
f"caught activitypub error {error!r}, {traceback.format_tb(error.__traceback__)}"
)
response = flask_jsonify({"traceback": error.message})
response.status_code = 500
return response
2018-07-22 10:42:36 +00:00
# @app.errorhandler(Exception)
# def handle_other_error(error):
# logger.error(
# f"caught error {error!r}, {traceback.format_tb(error.__traceback__)}"
# )
# response = flask_jsonify({})
# response.status_code = 500
# return response
2018-07-22 10:25:56 +00:00
2018-06-16 19:24:53 +00:00
# App routes
2018-05-18 18:41:41 +00:00
ROBOTS_TXT = """User-agent: *
2018-07-06 22:08:44 +00:00
Disallow: /login
Disallow: /admin/
Disallow: /static/
Disallow: /media/
Disallow: /uploads/"""
2018-07-04 23:02:51 +00:00
@app.route("/robots.txt")
def robots_txt():
return Response(response=ROBOTS_TXT, headers={"Content-Type": "text/plain"})
@app.route("/media/<media_id>")
2018-07-22 10:17:55 +00:00
@noindex
def serve_media(media_id):
f = MEDIA_CACHE.fs.get(ObjectId(media_id))
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
resp.headers.set("Content-Length", f.length)
resp.headers.set("ETag", f.md5)
resp.headers.set(
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
)
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
resp.headers.set("Content-Encoding", "gzip")
return resp
@app.route("/uploads/<oid>/<fname>")
def serve_uploads(oid, fname):
f = MEDIA_CACHE.fs.get(ObjectId(oid))
2018-07-04 23:02:51 +00:00
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
resp.headers.set("Content-Length", f.length)
resp.headers.set("ETag", f.md5)
resp.headers.set(
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
)
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
resp.headers.set("Content-Encoding", "gzip")
return resp
2018-05-18 18:41:41 +00:00
#######
# Login
2018-06-16 20:02:10 +00:00
@app.route("/admin/update_actor")
@login_required
def admin_update_actor():
update = ap.Update(
actor=MY_PERSON.id,
object=MY_PERSON.to_dict(),
to=[MY_PERSON.followers],
cc=[ap.AS_PUBLIC],
published=now(),
)
post_to_outbox(update)
return "OK"
2018-07-06 22:08:44 +00:00
@app.route("/admin/logout")
2018-05-18 18:41:41 +00:00
@login_required
2018-07-06 22:08:44 +00:00
def admin_logout():
2018-06-16 20:02:10 +00:00
session["logged_in"] = False
return redirect("/")
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/login", methods=["POST", "GET"])
2018-07-22 10:17:55 +00:00
@noindex
2018-07-06 22:08:44 +00:00
def admin_login():
2018-07-15 19:25:09 +00:00
if session.get("logged_in") is True:
return redirect(url_for("admin_notifications"))
2018-06-16 20:02:10 +00:00
devices = [doc["device"] for doc in DB.u2f.find()]
2018-05-18 18:41:41 +00:00
u2f_enabled = True if devices else False
2018-06-16 20:02:10 +00:00
if request.method == "POST":
csrf.protect()
2019-04-08 16:01:02 +00:00
# 1. Check regular password login flow
2018-06-16 20:02:10 +00:00
pwd = request.form.get("pass")
2019-04-08 16:01:02 +00:00
if pwd:
if verify_pass(pwd):
session["logged_in"] = True
return redirect(
request.args.get("redirect") or url_for("admin_notifications")
)
else:
abort(403)
# 2. Check for U2F payload, if any
elif devices:
resp = json.loads(request.form.get("resp"))
try:
u2f.complete_authentication(session["challenge"], resp)
except ValueError as exc:
print("failed", exc)
abort(403)
return
finally:
session["challenge"] = None
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
session["logged_in"] = True
2018-07-09 22:49:52 +00:00
return redirect(
request.args.get("redirect") or url_for("admin_notifications")
)
2018-05-18 18:41:41 +00:00
else:
abort(401)
payload = None
if devices:
payload = u2f.begin_authentication(ID, devices)
2018-06-16 20:02:10 +00:00
session["challenge"] = payload
2018-05-18 18:41:41 +00:00
2018-06-29 20:16:26 +00:00
return render_template("login.html", u2f_enabled=u2f_enabled, payload=payload)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/remote_follow", methods=["GET", "POST"])
2018-05-18 18:41:41 +00:00
def remote_follow():
2018-06-16 20:02:10 +00:00
if request.method == "GET":
return render_template("remote_follow.html")
2018-05-18 18:41:41 +00:00
2018-06-04 15:59:38 +00:00
csrf.protect()
profile = request.form.get("profile")
if not profile.startswith("@"):
profile = f"@{profile}"
2019-07-26 22:24:04 +00:00
return redirect(get_remote_follow_template(profile).format(uri=ID))
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/authorize_follow", methods=["GET", "POST"])
2018-05-18 18:41:41 +00:00
@login_required
def authorize_follow():
2018-06-16 20:02:10 +00:00
if request.method == "GET":
return render_template(
"authorize_remote_follow.html", profile=request.args.get("profile")
)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
actor = get_actor_url(request.form.get("profile"))
2018-05-18 18:41:41 +00:00
if not actor:
abort(500)
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
"activity.object": actor,
}
if DB.activities.count(q) > 0:
2018-06-16 20:02:10 +00:00
return redirect("/following")
2018-05-18 18:41:41 +00:00
follow = ap.Follow(
actor=MY_PERSON.id, object=actor, to=[actor], cc=[ap.AS_PUBLIC], published=now()
)
2019-04-05 09:35:48 +00:00
post_to_outbox(follow)
2018-06-16 20:33:51 +00:00
2018-06-16 20:02:10 +00:00
return redirect("/following")
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/u2f/register", methods=["GET", "POST"])
2018-05-18 18:41:41 +00:00
@login_required
def u2f_register():
# TODO(tsileo): ensure no duplicates
2018-06-16 20:02:10 +00:00
if request.method == "GET":
2018-05-18 18:41:41 +00:00
payload = u2f.begin_registration(ID)
2018-06-16 20:02:10 +00:00
session["challenge"] = payload
return render_template("u2f.html", payload=payload)
2018-05-18 18:41:41 +00:00
else:
2018-06-16 20:02:10 +00:00
resp = json.loads(request.form.get("resp"))
device, device_cert = u2f.complete_registration(session["challenge"], resp)
session["challenge"] = None
DB.u2f.insert_one({"device": device, "cert": device_cert})
session["logged_in"] = False
return redirect("/login")
2018-06-16 20:02:10 +00:00
2018-05-18 18:41:41 +00:00
#######
# Activity pub routes
2018-07-16 20:24:14 +00:00
@app.route("/drop_cache")
@login_required
def drop_cache():
DB.actors.drop()
return "Done"
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
2018-07-06 21:54:41 +00:00
def paginated_query(db, q, limit=25, sort_key="_id"):
older_than = newer_than = None
query_sort = -1
2018-07-06 21:53:33 +00:00
first_page = not request.args.get("older_than") and not request.args.get(
"newer_than"
)
query_older_than = request.args.get("older_than")
query_newer_than = request.args.get("newer_than")
2018-07-06 21:53:33 +00:00
if query_older_than:
q["_id"] = {"$lt": ObjectId(query_older_than)}
elif query_newer_than:
q["_id"] = {"$gt": ObjectId(query_newer_than)}
query_sort = 1
2018-07-06 21:53:33 +00:00
outbox_data = list(db.find(q, limit=limit + 1).sort(sort_key, query_sort))
outbox_len = len(outbox_data)
2018-07-06 21:53:33 +00:00
outbox_data = sorted(
outbox_data[:limit], key=lambda x: str(x[sort_key]), reverse=True
)
if query_older_than:
newer_than = str(outbox_data[0]["_id"])
if outbox_len == limit + 1:
older_than = str(outbox_data[-1]["_id"])
elif query_newer_than:
older_than = str(outbox_data[-1]["_id"])
if outbox_len == limit + 1:
newer_than = str(outbox_data[0]["_id"])
elif first_page and outbox_len == limit + 1:
older_than = str(outbox_data[-1]["_id"])
2018-06-04 15:59:38 +00:00
2018-07-06 21:53:33 +00:00
return outbox_data, older_than, newer_than
2018-09-03 18:21:33 +00:00
@app.route("/")
def index():
if is_api_request():
return jsonify(**ME)
2018-07-06 21:53:33 +00:00
q = {
"box": Box.OUTBOX.value,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
"activity.object.inReplyTo": None,
"meta.deleted": False,
"meta.undo": False,
2019-07-12 22:38:51 +00:00
"meta.public": True,
"$or": [{"meta.pinned": False}, {"meta.pinned": {"$exists": False}}],
2018-07-06 21:53:33 +00:00
}
2019-04-15 19:20:14 +00:00
print(list(DB.activities.find(q)))
2018-07-22 20:25:28 +00:00
pinned = []
# Only fetch the pinned notes if we're on the first page
if not request.args.get("older_than") and not request.args.get("newer_than"):
q_pinned = {
"box": Box.OUTBOX.value,
"type": ActivityType.CREATE.value,
"meta.deleted": False,
"meta.undo": False,
2019-07-12 22:38:51 +00:00
"meta.public": True,
2018-07-22 20:25:28 +00:00
"meta.pinned": True,
}
pinned = list(DB.activities.find(q_pinned))
outbox_data, older_than, newer_than = paginated_query(
DB.activities, q, limit=25 - len(pinned)
)
2018-07-22 20:25:28 +00:00
2018-09-03 07:38:29 +00:00
resp = render_template(
"index.html",
outbox_data=outbox_data,
older_than=older_than,
newer_than=newer_than,
pinned=pinned,
)
2018-09-03 07:38:29 +00:00
return resp
2018-06-04 15:59:38 +00:00
2019-07-12 22:38:51 +00:00
@app.route("/all")
@login_required
2019-07-12 22:38:51 +00:00
def all():
2018-06-29 20:16:26 +00:00
q = {
"box": Box.OUTBOX.value,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
"meta.deleted": False,
"meta.undo": False,
"meta.poll_answer": False,
2018-06-29 20:16:26 +00:00
}
2018-07-06 21:53:33 +00:00
outbox_data, older_than, newer_than = paginated_query(DB.activities, q)
2018-05-18 18:41:41 +00:00
2018-07-06 21:53:33 +00:00
return render_template(
"index.html",
outbox_data=outbox_data,
older_than=older_than,
newer_than=newer_than,
)
2018-05-18 18:41:41 +00:00
2019-04-10 20:50:36 +00:00
def _build_thread(data, include_children=True): # noqa: C901
2018-06-16 20:02:10 +00:00
data["_requested"] = True
2019-04-11 05:58:46 +00:00
app.logger.info(f"_build_thread({data!r})")
2018-06-16 20:02:10 +00:00
root_id = data["meta"].get("thread_root_parent", data["activity"]["object"]["id"])
2018-06-03 19:28:06 +00:00
2018-07-14 11:19:30 +00:00
query = {
2019-04-10 22:04:32 +00:00
"$or": [{"meta.thread_root_parent": root_id}, {"activity.object.id": root_id}],
2019-04-10 20:50:36 +00:00
"meta.deleted": False,
2018-07-14 11:19:30 +00:00
}
2019-04-10 20:50:36 +00:00
replies = [data]
for dat in DB.activities.find(query):
2019-07-04 22:29:51 +00:00
print(dat["type"])
2019-04-10 21:39:08 +00:00
if dat["type"][0] == ActivityType.CREATE.value:
replies.append(dat)
2019-07-04 22:29:51 +00:00
if dat["type"][0] == ActivityType.UPDATE.value:
continue
2019-04-10 21:39:08 +00:00
else:
2019-04-10 20:50:36 +00:00
# Make a Note/Question/... looks like a Create
2019-04-13 08:00:56 +00:00
dat = {
"activity": {"object": dat["activity"]},
"meta": dat["meta"],
"_id": dat["_id"],
}
2019-04-10 20:50:36 +00:00
replies.append(dat)
2019-04-10 21:39:08 +00:00
2018-06-22 23:04:58 +00:00
replies = sorted(replies, key=lambda d: d["activity"]["object"]["published"])
2019-04-10 20:50:36 +00:00
2018-06-03 19:28:06 +00:00
# Index all the IDs in order to build a tree
idx = {}
2018-06-22 23:04:58 +00:00
replies2 = []
2018-05-18 18:41:41 +00:00
for rep in replies:
2018-06-16 20:02:10 +00:00
rep_id = rep["activity"]["object"]["id"]
2018-06-22 23:04:58 +00:00
if rep_id in idx:
continue
2018-06-03 19:28:06 +00:00
idx[rep_id] = rep.copy()
2018-06-16 20:02:10 +00:00
idx[rep_id]["_nodes"] = []
2018-06-22 23:04:58 +00:00
replies2.append(rep)
2018-05-18 18:41:41 +00:00
2018-06-03 19:28:06 +00:00
# Build the tree
2018-06-22 23:04:58 +00:00
for rep in replies2:
2018-06-16 20:02:10 +00:00
rep_id = rep["activity"]["object"]["id"]
2018-06-03 19:28:06 +00:00
if rep_id == root_id:
continue
2019-07-04 22:29:51 +00:00
reply_of = ap._get_id(rep["activity"]["object"].get("inReplyTo"))
2018-07-26 21:11:38 +00:00
try:
idx[reply_of]["_nodes"].append(rep)
except KeyError:
app.logger.info(f"{reply_of} is not there! skipping {rep}")
2018-07-12 06:05:23 +00:00
2018-06-03 19:28:06 +00:00
# Flatten the tree
thread = []
2018-06-16 20:02:10 +00:00
2018-06-03 19:28:06 +00:00
def _flatten(node, level=0):
2018-06-16 20:02:10 +00:00
node["_level"] = level
2018-06-03 19:28:06 +00:00
thread.append(node)
2018-06-16 19:24:53 +00:00
2018-06-16 20:02:10 +00:00
for snode in sorted(
idx[node["activity"]["object"]["id"]]["_nodes"],
key=lambda d: d["activity"]["object"]["published"],
):
_flatten(snode, level=level + 1)
try:
_flatten(idx[root_id])
except KeyError:
app.logger.info(f"{root_id} is not there! skipping")
2018-05-18 18:41:41 +00:00
2018-06-03 19:28:06 +00:00
return thread
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/note/<note_id>")
2018-06-16 19:24:53 +00:00
def note_by_id(note_id):
if is_api_request():
return redirect(url_for("outbox_activity", item_id=note_id))
2018-06-29 20:16:26 +00:00
data = DB.activities.find_one(
{"box": Box.OUTBOX.value, "remote_id": back.activity_url(note_id)}
)
2018-06-16 19:24:53 +00:00
if not data:
2018-06-03 19:28:06 +00:00
abort(404)
2018-06-16 20:02:10 +00:00
if data["meta"].get("deleted", False):
2018-06-03 19:28:06 +00:00
abort(410)
2019-04-14 17:17:54 +00:00
2018-06-03 19:28:06 +00:00
thread = _build_thread(data)
2018-08-01 06:29:08 +00:00
app.logger.info(f"thread={thread!r}")
2018-08-28 20:14:48 +00:00
raw_likes = list(
2018-06-29 20:16:26 +00:00
DB.activities.find(
2018-06-16 20:02:10 +00:00
{
"meta.undo": False,
2018-07-19 23:12:02 +00:00
"meta.deleted": False,
2018-06-16 20:02:10 +00:00
"type": ActivityType.LIKE.value,
"$or": [
2018-08-01 06:29:08 +00:00
# FIXME(tsileo): remove all the useless $or
2018-06-16 20:02:10 +00:00
{"activity.object.id": data["activity"]["object"]["id"]},
{"activity.object": data["activity"]["object"]["id"]},
],
}
)
)
2018-08-28 20:14:48 +00:00
likes = []
for doc in raw_likes:
try:
likes.append(doc["meta"]["actor"])
except Exception:
app.logger.exception(f"invalid doc: {doc!r}")
2018-08-01 06:29:08 +00:00
app.logger.info(f"likes={likes!r}")
2018-08-28 20:14:48 +00:00
raw_shares = list(
2018-06-29 20:16:26 +00:00
DB.activities.find(
2018-06-16 20:02:10 +00:00
{
"meta.undo": False,
2018-07-19 23:12:02 +00:00
"meta.deleted": False,
2018-06-16 20:02:10 +00:00
"type": ActivityType.ANNOUNCE.value,
"$or": [
{"activity.object.id": data["activity"]["object"]["id"]},
{"activity.object": data["activity"]["object"]["id"]},
],
}
)
)
2018-08-28 20:14:48 +00:00
shares = []
for doc in raw_shares:
try:
shares.append(doc["meta"]["actor"])
except Exception:
app.logger.exception(f"invalid doc: {doc!r}")
2018-08-01 06:29:08 +00:00
app.logger.info(f"shares={shares!r}")
2018-06-03 21:36:16 +00:00
2018-06-16 20:02:10 +00:00
return render_template(
2018-06-29 20:16:26 +00:00
"note.html", likes=likes, shares=shares, thread=thread, note=data
2018-06-16 20:02:10 +00:00
)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/nodeinfo")
2018-06-03 08:15:11 +00:00
def nodeinfo():
2019-07-30 20:12:20 +00:00
q = {
"box": Box.OUTBOX.value,
"meta.deleted": False,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
}
2018-09-03 18:21:33 +00:00
2019-07-30 20:12:20 +00:00
response = json.dumps(
{
"version": "2.1",
"software": {
"name": "microblogpub",
"version": f"{VERSION}",
"repository": "https://github.com/tsileo/microblog.pub",
},
"protocols": ["activitypub"],
"services": {"inbound": [], "outbound": []},
"openRegistrations": False,
"usage": {"users": {"total": 1}, "localPosts": DB.activities.count(q)},
"metadata": {
"sourceCode": "https://github.com/tsileo/microblog.pub",
"nodeName": f"@{USERNAME}@{DOMAIN}",
"version": VERSION,
"version_date": VERSION_DATE,
},
}
)
2018-09-03 18:21:33 +00:00
2018-06-03 08:15:11 +00:00
return Response(
2018-06-16 20:02:10 +00:00
headers={
2019-07-29 20:53:41 +00:00
"Content-Type": "application/json; profile=http://nodeinfo.diaspora.software/ns/schema/2.1#"
2018-06-16 20:02:10 +00:00
},
2018-09-03 18:21:33 +00:00
response=response,
2018-06-03 08:15:11 +00:00
)
2018-06-16 20:02:10 +00:00
@app.route("/.well-known/nodeinfo")
2018-06-03 08:15:11 +00:00
def wellknown_nodeinfo():
return flask_jsonify(
links=[
{
2018-06-16 20:02:10 +00:00
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.0",
"href": f"{ID}/nodeinfo",
2018-06-03 08:15:11 +00:00
}
2018-06-16 20:02:10 +00:00
]
2018-06-03 08:15:11 +00:00
)
2018-06-16 20:02:10 +00:00
@app.route("/.well-known/webfinger")
2018-06-03 08:15:11 +00:00
def wellknown_webfinger():
2018-05-18 18:41:41 +00:00
"""Enable WebFinger support, required for Mastodon interopability."""
2018-07-03 22:40:23 +00:00
# TODO(tsileo): move this to little-boxes?
2018-06-16 20:02:10 +00:00
resource = request.args.get("resource")
if resource not in [f"acct:{USERNAME}@{DOMAIN}", ID]:
2018-05-18 18:41:41 +00:00
abort(404)
out = {
2018-06-16 20:02:10 +00:00
"subject": f"acct:{USERNAME}@{DOMAIN}",
2018-05-18 18:41:41 +00:00
"aliases": [ID],
"links": [
2018-06-16 20:02:10 +00:00
{
"rel": "http://webfinger.net/rel/profile-page",
"type": "text/html",
"href": BASE_URL,
},
2018-05-18 18:41:41 +00:00
{"rel": "self", "type": "application/activity+json", "href": ID},
2018-06-16 20:02:10 +00:00
{
"rel": "http://ostatus.org/schema/1.0/subscribe",
"template": BASE_URL + "/authorize_follow?profile={uri}",
},
2018-07-03 22:40:23 +00:00
{"rel": "magic-public-key", "href": KEY.to_magic_key()},
2018-07-04 19:08:45 +00:00
{
"href": ICON_URL,
"rel": "http://webfinger.net/rel/avatar",
"type": mimetypes.guess_type(ICON_URL)[0],
},
2018-05-18 18:41:41 +00:00
],
}
return Response(
response=json.dumps(out),
2018-06-16 20:02:10 +00:00
headers={
"Content-Type": "application/jrd+json; charset=utf-8"
if not app.debug
else "application/json"
},
2018-05-18 18:41:41 +00:00
)
2018-05-28 17:46:23 +00:00
def add_extra_collection(raw_doc: Dict[str, Any]) -> Dict[str, Any]:
2018-06-16 20:02:10 +00:00
if raw_doc["activity"]["type"] != ActivityType.CREATE.value:
2018-05-29 16:59:37 +00:00
return raw_doc
2018-06-16 20:02:10 +00:00
raw_doc["activity"]["object"]["replies"] = embed_collection(
raw_doc.get("meta", {}).get("count_direct_reply", 0),
2018-06-18 20:01:21 +00:00
f'{raw_doc["remote_id"]}/replies',
)
2018-05-29 16:59:37 +00:00
2018-06-16 20:02:10 +00:00
raw_doc["activity"]["object"]["likes"] = embed_collection(
2018-06-18 20:01:21 +00:00
raw_doc.get("meta", {}).get("count_like", 0), f'{raw_doc["remote_id"]}/likes'
)
2018-06-16 20:02:10 +00:00
raw_doc["activity"]["object"]["shares"] = embed_collection(
2018-06-18 20:01:21 +00:00
raw_doc.get("meta", {}).get("count_boost", 0), f'{raw_doc["remote_id"]}/shares'
)
2018-05-28 17:46:23 +00:00
return raw_doc
2018-06-04 17:10:04 +00:00
def remove_context(activity: Dict[str, Any]) -> Dict[str, Any]:
2018-06-16 20:02:10 +00:00
if "@context" in activity:
del activity["@context"]
2018-06-04 17:10:04 +00:00
return activity
2019-07-04 21:22:38 +00:00
def _add_answers_to_question(raw_doc: Dict[str, Any]) -> None:
2019-04-14 17:17:54 +00:00
activity = raw_doc["activity"]
if (
2019-04-14 18:16:04 +00:00
ap._has_type(activity["type"], ActivityType.CREATE)
and "object" in activity
and ap._has_type(activity["object"]["type"], ActivityType.QUESTION)
2019-04-14 17:17:54 +00:00
):
for choice in activity["object"].get("oneOf", activity["object"].get("anyOf")):
choice["replies"] = {
"type": ActivityType.COLLECTION.value,
"totalItems": raw_doc["meta"]
.get("question_answers", {})
.get(_answer_key(choice["name"]), 0),
}
2019-07-12 20:03:04 +00:00
now = datetime.now(timezone.utc)
2019-07-04 21:22:38 +00:00
if format_datetime(now) >= activity["object"]["endTime"]:
2019-04-14 17:17:54 +00:00
activity["object"]["closed"] = activity["object"]["endTime"]
2018-06-04 17:10:04 +00:00
def activity_from_doc(raw_doc: Dict[str, Any], embed: bool = False) -> Dict[str, Any]:
2018-05-28 17:46:23 +00:00
raw_doc = add_extra_collection(raw_doc)
2018-06-16 20:02:10 +00:00
activity = clean_activity(raw_doc["activity"])
2019-04-14 17:17:54 +00:00
# Handle Questions
# TODO(tsileo): what about object embedded by ID/URL?
2019-07-04 21:22:38 +00:00
_add_answers_to_question(raw_doc)
2018-06-04 17:10:04 +00:00
if embed:
return remove_context(activity)
return activity
2018-05-28 17:46:23 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/outbox", methods=["GET", "POST"])
2018-06-16 19:24:53 +00:00
def outbox():
2018-06-16 20:02:10 +00:00
if request.method == "GET":
2018-06-16 19:24:53 +00:00
if not is_api_request():
abort(404)
# TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support
2018-05-18 18:41:41 +00:00
q = {
2018-06-29 20:16:26 +00:00
"box": Box.OUTBOX.value,
2018-07-09 22:49:52 +00:00
"meta.deleted": False,
2019-04-14 17:17:54 +00:00
"meta.undo": False,
"meta.public": True,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
2018-05-18 18:41:41 +00:00
}
2018-06-16 20:02:10 +00:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: activity_from_doc(doc, embed=True),
2018-07-31 21:23:20 +00:00
col_name="outbox",
2018-06-16 20:02:10 +00:00
)
)
2018-05-18 18:41:41 +00:00
# Handle POST request
2018-05-21 12:30:52 +00:00
try:
_api_required()
except BadSignature:
abort(401)
2018-06-16 19:24:53 +00:00
2018-05-18 18:41:41 +00:00
data = request.get_json(force=True)
print(data)
2018-06-17 17:21:59 +00:00
activity = ap.parse_activity(data)
2019-04-05 09:35:48 +00:00
activity_id = post_to_outbox(activity)
2018-07-29 14:07:27 +00:00
return Response(status=201, headers={"Location": activity_id})
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/outbox/<item_id>")
2018-05-18 18:41:41 +00:00
def outbox_detail(item_id):
2018-06-29 20:16:26 +00:00
doc = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": back.activity_url(item_id),
"meta.public": True,
}
2018-06-29 20:16:26 +00:00
)
2018-07-09 22:49:52 +00:00
if not doc:
abort(404)
2018-06-16 20:02:10 +00:00
if doc["meta"].get("deleted", False):
2018-06-17 17:21:59 +00:00
obj = ap.parse_activity(doc["activity"])
2018-07-09 22:49:52 +00:00
resp = jsonify(**obj.get_tombstone().to_dict())
resp.status_code = 410
return resp
2018-05-28 17:46:23 +00:00
return jsonify(**activity_from_doc(doc))
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/outbox/<item_id>/activity")
2018-05-18 18:41:41 +00:00
def outbox_activity(item_id):
2018-06-29 20:16:26 +00:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": back.activity_url(item_id),
"meta.public": True,
}
2018-06-18 20:01:21 +00:00
)
2018-05-18 18:41:41 +00:00
if not data:
abort(404)
2019-04-14 17:17:54 +00:00
2018-05-28 17:46:23 +00:00
obj = activity_from_doc(data)
2018-07-09 22:49:52 +00:00
if data["meta"].get("deleted", False):
obj = ap.parse_activity(data["activity"])
resp = jsonify(**obj.get_object().get_tombstone().to_dict())
resp.status_code = 410
return resp
2018-06-16 20:02:10 +00:00
if obj["type"] != ActivityType.CREATE.value:
2018-05-18 18:41:41 +00:00
abort(404)
2018-06-16 20:02:10 +00:00
return jsonify(**obj["object"])
2018-05-28 17:46:23 +00:00
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/outbox/<item_id>/replies")
def outbox_activity_replies(item_id):
if not is_api_request():
abort(404)
2018-06-29 20:16:26 +00:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": back.activity_url(item_id),
"meta.deleted": False,
"meta.public": True,
2018-06-29 20:16:26 +00:00
}
2018-06-18 20:01:21 +00:00
)
if not data:
abort(404)
2018-06-17 17:21:59 +00:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 20:02:10 +00:00
"meta.deleted": False,
"meta.public": True,
2018-06-16 20:02:10 +00:00
"type": ActivityType.CREATE.value,
"activity.object.inReplyTo": obj.get_object().id,
}
2018-06-16 20:02:10 +00:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
col_name=f"outbox/{item_id}/replies",
first_page=request.args.get("page") == "first",
)
)
2018-06-16 20:02:10 +00:00
@app.route("/outbox/<item_id>/likes")
def outbox_activity_likes(item_id):
if not is_api_request():
abort(404)
2018-06-29 20:16:26 +00:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": back.activity_url(item_id),
"meta.deleted": False,
"meta.public": True,
2018-06-29 20:16:26 +00:00
}
2018-06-18 20:01:21 +00:00
)
if not data:
abort(404)
2018-06-17 17:21:59 +00:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 20:02:10 +00:00
"meta.undo": False,
"type": ActivityType.LIKE.value,
"$or": [
{"activity.object.id": obj.get_object().id},
{"activity.object": obj.get_object().id},
],
}
2018-06-16 20:02:10 +00:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
col_name=f"outbox/{item_id}/likes",
first_page=request.args.get("page") == "first",
)
)
2018-06-16 20:02:10 +00:00
@app.route("/outbox/<item_id>/shares")
def outbox_activity_shares(item_id):
if not is_api_request():
abort(404)
2018-06-29 20:16:26 +00:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": back.activity_url(item_id),
"meta.deleted": False,
}
2018-06-18 20:01:21 +00:00
)
if not data:
abort(404)
2018-06-17 17:21:59 +00:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 20:02:10 +00:00
"meta.undo": False,
"type": ActivityType.ANNOUNCE.value,
"$or": [
{"activity.object.id": obj.get_object().id},
{"activity.object": obj.get_object().id},
],
}
2018-06-16 20:02:10 +00:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
col_name=f"outbox/{item_id}/shares",
first_page=request.args.get("page") == "first",
)
)
@app.route("/admin", methods=["GET"])
2018-05-18 18:41:41 +00:00
@login_required
def admin():
2018-06-29 20:16:26 +00:00
q = {
"meta.deleted": False,
"meta.undo": False,
"type": ActivityType.LIKE.value,
"box": Box.OUTBOX.value,
}
col_liked = DB.activities.count(q)
2018-05-18 18:41:41 +00:00
return render_template(
2018-06-16 20:02:10 +00:00
"admin.html",
instances=list(DB.instances.find()),
2018-06-29 20:16:26 +00:00
inbox_size=DB.activities.count({"box": Box.INBOX.value}),
outbox_size=DB.activities.count({"box": Box.OUTBOX.value}),
2018-06-16 20:02:10 +00:00
col_liked=col_liked,
col_followers=DB.activities.count(
{
"box": Box.INBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
}
),
col_following=DB.activities.count(
{
"box": Box.OUTBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
}
),
2018-05-18 18:41:41 +00:00
)
2018-06-16 19:24:53 +00:00
2018-05-18 18:41:41 +00:00
@app.route("/admin/indieauth", methods=["GET"])
@login_required
def admin_indieauth():
return render_template(
"admin_indieauth.html",
indieauth_actions=DB.indieauth.find().sort("ts", -1).limit(100),
)
2019-04-07 10:27:48 +00:00
@app.route("/admin/tasks", methods=["GET"])
@login_required
def admin_tasks():
return render_template(
"admin_tasks.html",
2019-04-08 18:56:12 +00:00
success=p.get_success(),
dead=p.get_dead(),
waiting=p.get_waiting(),
2019-04-08 15:24:50 +00:00
cron=p.get_cron(),
2019-04-07 10:27:48 +00:00
)
@app.route("/admin/lookup", methods=["GET", "POST"])
@login_required
def admin_lookup():
data = None
2018-07-29 18:10:15 +00:00
meta = None
if request.method == "POST":
if request.form.get("url"):
data = lookup(request.form.get("url"))
2018-07-29 18:10:15 +00:00
if data.has_type(ActivityType.ANNOUNCE):
meta = dict(
object=data.get_object().to_dict(),
object_actor=data.get_object().get_actor().to_dict(),
actor=data.get_actor().to_dict(),
)
elif data.has_type(ActivityType.QUESTION):
p.push(data.id, "/task/fetch_remote_question")
2018-09-02 17:43:09 +00:00
print(data)
2019-04-10 20:50:36 +00:00
app.logger.debug(data.to_dict())
2018-07-29 18:10:15 +00:00
return render_template(
"lookup.html", data=data, meta=meta, url=request.form.get("url")
)
2018-07-14 10:29:46 +00:00
@app.route("/admin/thread")
@login_required
def admin_thread():
data = DB.activities.find_one(
2019-04-13 08:00:56 +00:00
{
"type": ActivityType.CREATE.value,
"activity.object.id": request.args.get("oid"),
}
2018-07-14 10:29:46 +00:00
)
2019-04-11 17:24:28 +00:00
2018-07-14 10:29:46 +00:00
if not data:
abort(404)
if data["meta"].get("deleted", False):
abort(410)
thread = _build_thread(data)
tpl = "note.html"
if request.args.get("debug"):
tpl = "note_debug.html"
return render_template(tpl, thread=thread, note=data)
2018-07-14 10:29:46 +00:00
2018-07-06 22:08:44 +00:00
@app.route("/admin/new", methods=["GET"])
2018-05-18 18:41:41 +00:00
@login_required
2018-07-06 22:08:44 +00:00
def admin_new():
2018-05-18 18:41:41 +00:00
reply_id = None
2018-06-16 20:02:10 +00:00
content = ""
2018-06-03 19:28:06 +00:00
thread = []
2018-07-31 20:42:50 +00:00
print(request.args)
2018-06-16 20:02:10 +00:00
if request.args.get("reply"):
2018-06-29 20:16:26 +00:00
data = DB.activities.find_one({"activity.object.id": request.args.get("reply")})
2018-07-31 20:42:50 +00:00
if data:
reply = ap.parse_activity(data["activity"])
else:
data = dict(
meta={},
activity=dict(
object=get_backend().fetch_iri(request.args.get("reply"))
),
)
reply = ap.parse_activity(data["activity"]["object"])
2018-06-03 19:28:06 +00:00
2018-05-18 18:41:41 +00:00
reply_id = reply.id
2018-06-17 17:21:59 +00:00
if reply.ACTIVITY_TYPE == ActivityType.CREATE:
2018-06-03 19:28:06 +00:00
reply_id = reply.get_object().id
2018-05-18 18:41:41 +00:00
actor = reply.get_actor()
domain = urlparse(actor.id).netloc
2018-06-03 19:28:06 +00:00
# FIXME(tsileo): if reply of reply, fetch all participants
2018-06-16 20:02:10 +00:00
content = f"@{actor.preferredUsername}@{domain} "
2018-06-29 20:16:26 +00:00
thread = _build_thread(data)
2018-05-18 18:41:41 +00:00
2019-04-13 08:00:56 +00:00
return render_template(
"new.html",
reply=reply_id,
content=content,
thread=thread,
2019-07-12 22:28:14 +00:00
visibility=ap.Visibility,
2019-04-13 08:00:56 +00:00
emojis=EMOJIS.split(" "),
)
2018-05-18 18:41:41 +00:00
2019-07-22 18:32:35 +00:00
@app.route("/admin/lists", methods=["GET"])
@login_required
def admin_lists():
lists = list(DB.lists.find())
return render_template("lists.html", lists=lists)
2018-07-06 22:08:44 +00:00
@app.route("/admin/notifications")
2018-05-18 18:41:41 +00:00
@login_required
2018-07-06 22:08:44 +00:00
def admin_notifications():
2019-04-08 16:09:33 +00:00
# Setup the cron for deleting old activities
2019-04-09 06:40:48 +00:00
# FIXME(tsileo): put back to 12h
p.push({}, "/task/cleanup", schedule="@every 1h")
2019-04-08 16:09:33 +00:00
# Trigger a cleanup if asked
if request.args.get("cleanup"):
p.push({}, "/task/cleanup")
2018-06-29 20:16:26 +00:00
# FIXME(tsileo): show unfollow (performed by the current actor) and liked???
mentions_query = {
"type": ActivityType.CREATE.value,
2018-06-16 20:02:10 +00:00
"activity.object.tag.type": "Mention",
"activity.object.tag.name": f"@{USERNAME}@{DOMAIN}",
"meta.deleted": False,
2018-05-18 18:41:41 +00:00
}
2018-06-29 20:16:26 +00:00
replies_query = {
"type": ActivityType.CREATE.value,
"activity.object.inReplyTo": {"$regex": f"^{BASE_URL}"},
2019-07-17 14:14:29 +00:00
"meta.poll_answer": False,
2018-06-29 20:16:26 +00:00
}
announced_query = {
"type": ActivityType.ANNOUNCE.value,
"activity.object": {"$regex": f"^{BASE_URL}"},
}
new_followers_query = {"type": ActivityType.FOLLOW.value}
2018-07-01 19:32:12 +00:00
unfollow_query = {
"type": ActivityType.UNDO.value,
"activity.object.type": ActivityType.FOLLOW.value,
}
2018-07-30 16:30:47 +00:00
likes_query = {
"type": ActivityType.LIKE.value,
"activity.object": {"$regex": f"^{BASE_URL}"},
}
2018-06-29 20:16:26 +00:00
followed_query = {"type": ActivityType.ACCEPT.value}
2018-06-16 20:02:10 +00:00
q = {
2018-06-29 20:16:26 +00:00
"box": Box.INBOX.value,
2018-06-16 20:02:10 +00:00
"$or": [
2018-06-29 20:16:26 +00:00
mentions_query,
announced_query,
replies_query,
new_followers_query,
followed_query,
2018-07-01 19:32:12 +00:00
unfollow_query,
2018-07-30 16:12:27 +00:00
likes_query,
2018-06-29 20:16:26 +00:00
],
2018-06-16 20:02:10 +00:00
}
2018-07-06 21:53:33 +00:00
inbox_data, older_than, newer_than = paginated_query(DB.activities, q)
2019-07-04 21:22:38 +00:00
if not newer_than:
nstart = datetime.now(timezone.utc).isoformat()
else:
nstart = inbox_data[0]["_id"].generation_time.isoformat()
if not older_than:
nend = (datetime.now(timezone.utc) - timedelta(days=15)).isoformat()
else:
nend = inbox_data[-1]["_id"].generation_time.isoformat()
print(nstart, nend)
2019-07-04 21:24:25 +00:00
notifs = list(
DB.notifications.find({"datetime": {"$lte": nstart, "$gt": nend}})
.sort("_id", -1)
.limit(50)
)
print(inbox_data)
nid = None
if inbox_data:
nid = inbox_data[0]["_id"]
2019-07-04 21:22:38 +00:00
inbox_data.extend(notifs)
2019-07-04 21:24:25 +00:00
inbox_data = sorted(
inbox_data, reverse=True, key=lambda doc: doc["_id"].generation_time
)
2018-05-18 18:41:41 +00:00
2018-07-06 21:53:33 +00:00
return render_template(
"stream.html",
inbox_data=inbox_data,
older_than=older_than,
newer_than=newer_than,
nid=nid,
2018-07-06 21:53:33 +00:00
)
2018-05-18 18:41:41 +00:00
2018-05-28 17:46:23 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/api/key")
2018-05-29 19:36:05 +00:00
@login_required
def api_user_key():
return flask_jsonify(api_key=ADMIN_API_KEY)
def _user_api_arg(key: str, **kwargs):
2018-06-01 18:29:44 +00:00
"""Try to get the given key from the requests, try JSON body, form data and query arg."""
2018-05-29 20:16:09 +00:00
if request.is_json:
2018-06-01 18:29:44 +00:00
oid = request.json.get(key)
2018-05-29 19:36:05 +00:00
else:
2018-06-01 18:29:44 +00:00
oid = request.args.get(key) or request.form.get(key)
2018-05-29 19:36:05 +00:00
if not oid:
2018-06-16 20:02:10 +00:00
if "default" in kwargs:
2018-07-14 11:19:30 +00:00
app.logger.info(f'{key}={kwargs.get("default")}')
2018-06-16 20:02:10 +00:00
return kwargs.get("default")
2018-06-03 10:50:51 +00:00
2018-06-16 20:02:10 +00:00
raise ValueError(f"missing {key}")
2018-06-01 18:29:44 +00:00
2018-07-14 11:19:30 +00:00
app.logger.info(f"{key}={oid}")
2018-06-01 18:29:44 +00:00
return oid
def _user_api_get_note(from_outbox: bool = False):
2018-06-16 20:02:10 +00:00
oid = _user_api_arg("id")
2018-07-14 11:19:30 +00:00
app.logger.info(f"fetching {oid}")
2019-04-14 17:17:54 +00:00
note = ap.parse_activity(get_backend().fetch_iri(oid))
2018-06-01 18:29:44 +00:00
if from_outbox and not note.id.startswith(ID):
2018-06-16 20:02:10 +00:00
raise NotFromOutboxError(
2018-06-17 17:21:59 +00:00
f"cannot load {note.id}, id must be owned by the server"
2018-06-16 20:02:10 +00:00
)
2018-05-29 19:36:05 +00:00
2018-06-01 18:29:44 +00:00
return note
2018-05-29 19:36:05 +00:00
def _user_api_response(**kwargs):
2018-06-16 20:02:10 +00:00
_redirect = _user_api_arg("redirect", default=None)
if _redirect:
return redirect(_redirect)
2018-05-29 19:36:05 +00:00
resp = flask_jsonify(**kwargs)
resp.status_code = 201
return resp
@app.route("/api/mark_notifications_as_read", methods=["POST"])
@api_required
def api_mark_notification_as_read():
nid = ObjectId(_user_api_arg("nid"))
DB.activities.update_many(
{_meta(MetaKey.NOTIFICATION_UNREAD): True, "_id": {"$lte": nid}},
{"$set": {_meta(MetaKey.NOTIFICATION_UNREAD): False}},
)
return _user_api_response()
2019-04-14 17:17:54 +00:00
@app.route("/api/vote", methods=["POST"])
@api_required
def api_vote():
oid = _user_api_arg("id")
app.logger.info(f"fetching {oid}")
note = ap.parse_activity(get_backend().fetch_iri(oid))
choice = _user_api_arg("choice")
raw_note = dict(
attributedTo=MY_PERSON.id,
cc=[],
to=note.get_actor().id,
name=choice,
tag=[],
inReplyTo=note.id,
)
2019-07-04 21:22:38 +00:00
raw_note["@context"] = config.DEFAULT_CTX
2019-04-14 17:17:54 +00:00
note = ap.Note(**raw_note)
create = note.build_create()
create_id = post_to_outbox(create)
return _user_api_response(activity=create_id)
2018-06-16 20:02:10 +00:00
@app.route("/api/like", methods=["POST"])
2018-05-28 17:46:23 +00:00
@api_required
def api_like():
2018-06-01 18:29:44 +00:00
note = _user_api_get_note()
to = []
cc = []
note_visibility = ap.get_visibility(note)
if note_visibility == ap.Visibility.PUBLIC:
to = [ap.AS_PUBLIC]
cc = [ID + "/followers", note.get_actor().id]
elif note_visibility == ap.Visibility.UNLISTED:
to = [ID + "/followers", note.get_actor().id]
cc = [ap.AS_PUBLIC]
else:
to = [note.get_actor().id]
like = ap.Like(object=note.id, actor=MY_PERSON.id, to=to, cc=cc, published=now())
2019-04-05 09:35:48 +00:00
like_id = post_to_outbox(like)
2018-06-01 18:29:44 +00:00
2018-07-29 14:07:27 +00:00
return _user_api_response(activity=like_id)
2018-05-28 17:46:23 +00:00
2018-05-18 18:41:41 +00:00
2019-07-10 21:32:48 +00:00
@app.route("/api/bookmark", methods=["POST"])
@api_required
def api_bookmark():
note = _user_api_get_note()
undo = _user_api_arg("undo", default=None) == "yes"
# Try to bookmark the `Create` first
if not DB.activities.update_one(
2019-07-10 21:32:48 +00:00
{"activity.object.id": note.id}, {"$set": {"meta.bookmarked": not undo}}
).modified_count:
# Then look for the `Announce`
DB.activities.update_one(
{"meta.object.id": note.id}, {"$set": {"meta.bookmarked": not undo}}
)
2019-07-10 21:32:48 +00:00
return _user_api_response()
@app.route("/api/note/pin", methods=["POST"])
@api_required
def api_pin():
note = _user_api_get_note(from_outbox=True)
DB.activities.update_one(
{"activity.object.id": note.id, "box": Box.OUTBOX.value},
{"$set": {"meta.pinned": True}},
)
return _user_api_response(pinned=True)
@app.route("/api/note/unpin", methods=["POST"])
@api_required
def api_unpin():
note = _user_api_get_note(from_outbox=True)
DB.activities.update_one(
{"activity.object.id": note.id, "box": Box.OUTBOX.value},
{"$set": {"meta.pinned": False}},
)
return _user_api_response(pinned=False)
2018-06-16 20:02:10 +00:00
@app.route("/api/undo", methods=["POST"])
2018-05-27 12:21:06 +00:00
@api_required
def api_undo():
2018-06-16 20:02:10 +00:00
oid = _user_api_arg("id")
2018-06-29 20:16:26 +00:00
doc = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"$or": [{"remote_id": back.activity_url(oid)}, {"remote_id": oid}],
}
2018-06-18 20:01:21 +00:00
)
2018-06-01 18:29:44 +00:00
if not doc:
2018-06-16 20:02:10 +00:00
raise ActivityNotFoundError(f"cannot found {oid}")
2018-06-01 18:29:44 +00:00
2018-06-17 17:21:59 +00:00
obj = ap.parse_activity(doc.get("activity"))
undo = ap.Undo(
actor=MY_PERSON.id,
object=obj.to_dict(embed=True, embed_object_id_only=True),
published=now(),
to=obj.to,
cc=obj.cc,
)
2018-06-01 18:29:44 +00:00
# FIXME(tsileo): detect already undo-ed and make this API call idempotent
2019-04-05 09:35:48 +00:00
undo_id = post_to_outbox(undo)
2018-06-01 18:29:44 +00:00
2018-07-29 14:07:27 +00:00
return _user_api_response(activity=undo_id)
2018-05-27 12:21:06 +00:00
2018-05-18 18:41:41 +00:00
2018-07-06 22:08:44 +00:00
@app.route("/admin/stream")
2018-05-18 18:41:41 +00:00
@login_required
2018-07-06 22:08:44 +00:00
def admin_stream():
2018-07-14 11:19:30 +00:00
q = {"meta.stream": True, "meta.deleted": False}
2018-05-18 18:41:41 +00:00
2018-07-23 20:11:03 +00:00
tpl = "stream.html"
if request.args.get("debug"):
tpl = "stream_debug.html"
2019-07-10 21:32:48 +00:00
if request.args.get("debug_inbox"):
q = {}
inbox_data, older_than, newer_than = paginated_query(
DB.activities, q, limit=int(request.args.get("limit", 25))
)
return render_template(
tpl, inbox_data=inbox_data, older_than=older_than, newer_than=newer_than
)
2019-07-22 18:32:35 +00:00
@app.route("/admin/list/<name>")
@login_required
def admin_list(name):
list_ = DB.lists.find_one({"name": name})
if not list_:
abort(404)
q = {
"meta.stream": True,
"meta.deleted": False,
"meta.actor_id": {"$in": list_["members"]},
}
tpl = "stream.html"
if request.args.get("debug"):
tpl = "stream_debug.html"
if request.args.get("debug_inbox"):
q = {}
inbox_data, older_than, newer_than = paginated_query(
DB.activities, q, limit=int(request.args.get("limit", 25))
)
return render_template(
tpl, inbox_data=inbox_data, older_than=older_than, newer_than=newer_than
)
2019-07-10 21:32:48 +00:00
@app.route("/admin/bookmarks")
@login_required
def admin_bookmarks():
q = {"meta.bookmarked": True}
tpl = "stream.html"
if request.args.get("debug"):
tpl = "stream_debug.html"
2018-07-23 20:25:51 +00:00
if request.args.get("debug_inbox"):
q = {}
inbox_data, older_than, newer_than = paginated_query(
DB.activities, q, limit=int(request.args.get("limit", 25))
)
2018-07-23 20:11:03 +00:00
2018-07-06 21:53:33 +00:00
return render_template(
2018-07-23 20:11:03 +00:00
tpl, inbox_data=inbox_data, older_than=older_than, newer_than=newer_than
2018-06-16 20:02:10 +00:00
)
2018-05-18 18:41:41 +00:00
2019-04-05 09:35:48 +00:00
@app.route("/inbox", methods=["GET", "POST"]) # noqa: C901
2018-06-16 19:24:53 +00:00
def inbox():
2019-07-15 21:08:12 +00:00
# GET /inbox
2018-06-16 20:02:10 +00:00
if request.method == "GET":
2018-06-16 19:24:53 +00:00
if not is_api_request():
abort(404)
2018-05-21 12:30:52 +00:00
try:
_api_required()
except BadSignature:
abort(404)
2018-06-16 20:02:10 +00:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
q={"meta.deleted": False, "box": Box.INBOX.value},
2018-06-16 20:02:10 +00:00
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
2018-07-31 21:23:20 +00:00
col_name="inbox",
2018-06-16 20:02:10 +00:00
)
)
2018-05-21 12:30:52 +00:00
2019-07-15 21:08:12 +00:00
# POST/ inbox
2019-05-12 08:02:28 +00:00
try:
data = request.get_json(force=True)
if not isinstance(data, dict):
raise ValueError("not a dict")
2019-05-12 08:02:28 +00:00
except Exception:
return Response(
status=422,
headers={"Content-Type": "application/json"},
2019-05-12 08:06:26 +00:00
response=json.dumps({"error": "failed to decode request as JSON"}),
2019-05-12 08:02:28 +00:00
)
2019-07-08 21:18:33 +00:00
# Check the blacklist now to see if we can return super early
if (
"id" in data
and is_blacklisted(data["id"])
or (
"object" in data
and isinstance(data["object"], dict)
2019-07-08 21:18:33 +00:00
and "id" in data["object"]
and is_blacklisted(data["object"]["id"])
)
or (
"object" in data
and isinstance(data["object"], str)
and is_blacklisted(data["object"])
)
2019-07-08 21:18:33 +00:00
):
logger.info(f"dropping activity from blacklisted host: {data['id']}")
return Response(status=201)
2019-04-17 21:36:28 +00:00
print(f"req_headers={request.headers}")
print(f"raw_data={data}")
2018-06-16 20:02:10 +00:00
logger.debug(f"req_headers={request.headers}")
logger.debug(f"raw_data={data}")
2018-06-20 21:42:12 +00:00
try:
if not verify_request(
request.method, request.path, request.headers, request.data
):
2018-06-16 20:02:10 +00:00
raise Exception("failed to verify request")
2018-06-02 07:07:57 +00:00
except Exception:
2018-06-16 20:02:10 +00:00
logger.exception(
"failed to verify request, trying to verify the payload by fetching the remote"
)
try:
2019-07-07 12:14:13 +00:00
remote_data = get_backend().fetch_iri(data["id"])
2018-09-02 18:32:15 +00:00
except ActivityGoneError:
# XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete
# appended, so an `ActivityGoneError` kind of ensure it's "legit"
2018-09-03 06:20:43 +00:00
if data["type"] == ActivityType.DELETE.value and data["id"].startswith(
data["object"]
):
2019-05-12 08:02:28 +00:00
# If we're here, this means the key is not saved, so we cannot verify the object
logger.info(f"received a Delete for an unknown actor {data!r}, drop it")
2018-09-02 18:32:15 +00:00
2018-09-03 06:20:43 +00:00
return Response(status=201)
except Exception:
2019-05-12 08:06:26 +00:00
logger.exception(f"failed to fetch remote for payload {data!r}")
2019-05-12 08:02:28 +00:00
2019-07-07 12:54:04 +00:00
if "type" in data:
# Friendica does not returns a 410, but a 302 that redirect to an HTML page
if ap._has_type(data["type"], ActivityType.DELETE):
logger.info(
f"received a Delete for an unknown actor {data!r}, drop it"
)
return Response(status=201)
if "id" in data:
if DB.trash.find_one({"activity.id": data["id"]}):
# It's already stored in trash, returns early
return Response(
status=422,
headers={"Content-Type": "application/json"},
response=json.dumps(
{
"error": "failed to verify request (using HTTP signatures or fetching the IRI)"
}
),
)
# Now we can store this activity in the trash for later analysis
2019-07-07 12:14:13 +00:00
2019-05-12 08:02:28 +00:00
# Track/store the payload for analysis
ip, geoip = _get_ip()
2019-05-12 08:06:26 +00:00
DB.trash.insert(
{
"activity": data,
"meta": {
"ts": datetime.now().timestamp(),
"ip_address": ip,
"geoip": geoip,
"tb": traceback.format_exc(),
"headers": dict(request.headers),
},
}
)
2019-05-12 08:02:28 +00:00
2018-06-02 07:07:57 +00:00
return Response(
status=422,
2018-06-16 20:02:10 +00:00
headers={"Content-Type": "application/json"},
response=json.dumps(
{
"error": "failed to verify request (using HTTP signatures or fetching the IRI)"
}
),
2018-06-02 07:07:57 +00:00
)
2019-07-07 12:14:13 +00:00
# We fetched the remote data successfully
data = remote_data
2019-04-14 18:16:04 +00:00
print(data)
2018-06-17 17:21:59 +00:00
activity = ap.parse_activity(data)
2018-06-16 20:02:10 +00:00
logger.debug(f"inbox activity={activity}/{data}")
2019-04-05 09:35:48 +00:00
post_to_inbox(activity)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
return Response(status=201)
2018-05-18 18:41:41 +00:00
2018-06-18 20:01:21 +00:00
def without_id(l):
out = []
for d in l:
if "_id" in d:
del d["_id"]
out.append(d)
return out
2018-06-16 20:02:10 +00:00
@app.route("/api/debug", methods=["GET", "DELETE"])
@api_required
def api_debug():
"""Endpoint used/needed for testing, only works in DEBUG_MODE."""
if not DEBUG_MODE:
2018-06-16 20:02:10 +00:00
return flask_jsonify(message="DEBUG_MODE is off")
2018-06-16 20:02:10 +00:00
if request.method == "DELETE":
_drop_db()
2018-06-16 20:02:10 +00:00
return flask_jsonify(message="DB dropped")
2018-06-18 20:01:21 +00:00
return flask_jsonify(
2018-06-29 20:16:26 +00:00
inbox=DB.activities.count({"box": Box.INBOX.value}),
outbox=DB.activities.count({"box": Box.OUTBOX.value}),
outbox_data=without_id(DB.activities.find({"box": Box.OUTBOX.value})),
2018-06-18 20:01:21 +00:00
)
2019-07-22 18:32:35 +00:00
@app.route("/api/new_list", methods=["POST"])
@api_required
def api_new_list():
name = _user_api_arg("name")
if not name:
raise ValueError("missing name")
if not DB.lists.find_one({"name": name}):
DB.lists.insert_one({"name": name, "members": []})
return _user_api_response(name=name)
@app.route("/api/delete_list", methods=["POST"])
@api_required
def api_delete_list():
name = _user_api_arg("name")
if not name:
raise ValueError("missing name")
if not DB.lists.find_one({"name": name}):
abort(404)
DB.lists.delete_one({"name": name})
return _user_api_response()
@app.route("/api/add_to_list", methods=["POST"])
@api_required
def api_add_to_list():
list_name = _user_api_arg("list_name")
if not list_name:
raise ValueError("missing list_name")
if not DB.lists.find_one({"name": list_name}):
raise ValueError(f"list {list_name} does not exist")
actor_id = _user_api_arg("actor_id")
if not actor_id:
raise ValueError("missing actor_id")
DB.lists.update_one({"name": list_name}, {"$addToSet": {"members": actor_id}})
return _user_api_response()
@app.route("/api/remove_from_list", methods=["POST"])
@api_required
def api_remove_from_list():
list_name = _user_api_arg("list_name")
if not list_name:
raise ValueError("missing list_name")
if not DB.lists.find_one({"name": list_name}):
raise ValueError(f"list {list_name} does not exist")
actor_id = _user_api_arg("actor_id")
if not actor_id:
raise ValueError("missing actor_id")
DB.lists.update_one({"name": list_name}, {"$pull": {"members": actor_id}})
return _user_api_response()
2018-06-16 20:02:10 +00:00
@app.route("/api/new_note", methods=["POST"])
2018-06-16 19:24:53 +00:00
@api_required
def api_new_note():
2018-06-16 20:02:10 +00:00
source = _user_api_arg("content")
if not source:
2018-06-16 20:02:10 +00:00
raise ValueError("missing content")
2018-06-16 19:24:53 +00:00
2018-06-01 18:29:44 +00:00
_reply, reply = None, None
try:
2018-06-16 20:02:10 +00:00
_reply = _user_api_arg("reply")
2018-06-01 18:29:44 +00:00
except ValueError:
pass
2019-07-12 22:28:14 +00:00
visibility = ap.Visibility[
_user_api_arg("visibility", default=ap.Visibility.PUBLIC.name)
]
2018-06-16 19:24:53 +00:00
content, tags = parse_markdown(source)
2019-07-12 22:28:14 +00:00
to, cc = [], []
if visibility == ap.Visibility.PUBLIC:
to = [ap.AS_PUBLIC]
cc = [ID + "/followers"]
elif visibility == ap.Visibility.UNLISTED:
to = [ID + "/followers"]
cc = [ap.AS_PUBLIC]
elif visibility == ap.Visibility.FOLLOWERS_ONLY:
to = [ID + "/followers"]
cc = []
2018-06-16 19:24:53 +00:00
2018-06-01 18:29:44 +00:00
if _reply:
reply = ap.fetch_remote_activity(_reply)
2019-07-12 22:28:14 +00:00
if visibility == ap.Visibility.DIRECT:
to.append(reply.attributedTo)
else:
cc.append(reply.attributedTo)
2018-06-01 18:29:44 +00:00
2018-05-18 18:41:41 +00:00
for tag in tags:
2018-06-16 20:02:10 +00:00
if tag["type"] == "Mention":
2019-07-12 22:28:14 +00:00
if visibility == ap.Visibility.DIRECT:
to.append(tag["href"])
else:
cc.append(tag["href"])
2018-05-18 18:41:41 +00:00
raw_note = dict(
2018-06-17 19:54:16 +00:00
attributedTo=MY_PERSON.id,
2018-06-16 19:24:53 +00:00
cc=list(set(cc)),
2019-07-12 22:28:14 +00:00
to=list(set(to)),
2018-06-01 18:29:44 +00:00
content=content,
2018-05-18 18:41:41 +00:00
tag=tags,
2018-06-16 20:02:10 +00:00
source={"mediaType": "text/markdown", "content": source},
inReplyTo=reply.id if reply else None,
2018-05-18 18:41:41 +00:00
)
2019-04-07 09:00:24 +00:00
if "file" in request.files and request.files["file"].filename:
file = request.files["file"]
rfilename = secure_filename(file.filename)
with BytesIO() as buf:
file.save(buf)
oid = MEDIA_CACHE.save_upload(buf, rfilename)
mtype = mimetypes.guess_type(rfilename)[0]
2018-07-03 21:29:55 +00:00
raw_note["attachment"] = [
{
"mediaType": mtype,
"name": rfilename,
"type": "Document",
"url": f"{BASE_URL}/uploads/{oid}/{rfilename}",
}
]
note = ap.Note(**raw_note)
2018-05-18 18:41:41 +00:00
create = note.build_create()
2019-04-05 09:35:48 +00:00
create_id = post_to_outbox(create)
2018-07-29 14:07:27 +00:00
return _user_api_response(activity=create_id)
2018-06-01 18:29:44 +00:00
2018-05-18 18:41:41 +00:00
2019-04-14 17:17:54 +00:00
@app.route("/api/new_question", methods=["POST"])
@api_required
def api_new_question():
source = _user_api_arg("content")
if not source:
raise ValueError("missing content")
content, tags = parse_markdown(source)
cc = [ID + "/followers"]
for tag in tags:
if tag["type"] == "Mention":
cc.append(tag["href"])
answers = []
for i in range(4):
a = _user_api_arg(f"answer{i}", default=None)
if not a:
break
2019-07-17 14:14:29 +00:00
answers.append(
{
"type": ActivityType.NOTE.value,
"name": a,
"replies": {"type": ActivityType.COLLECTION.value, "totalItems": 0},
}
)
2019-04-14 17:17:54 +00:00
2019-07-04 21:22:38 +00:00
open_for = int(_user_api_arg("open_for"))
2019-04-14 17:17:54 +00:00
choices = {
"endTime": ap.format_datetime(
2019-07-12 20:03:04 +00:00
datetime.now(timezone.utc) + timedelta(minutes=open_for)
2019-04-14 17:17:54 +00:00
)
}
of = _user_api_arg("of")
if of == "anyOf":
choices["anyOf"] = answers
else:
choices["oneOf"] = answers
raw_question = dict(
attributedTo=MY_PERSON.id,
cc=list(set(cc)),
to=[ap.AS_PUBLIC],
content=content,
tag=tags,
source={"mediaType": "text/markdown", "content": source},
inReplyTo=None,
**choices,
)
question = ap.Question(**raw_question)
create = question.build_create()
create_id = post_to_outbox(create)
2019-07-04 21:22:38 +00:00
Tasks.update_question_outbox(create_id, open_for)
2019-04-14 17:17:54 +00:00
return _user_api_response(activity=create_id)
2018-06-16 20:02:10 +00:00
@app.route("/api/stream")
2018-05-27 20:30:43 +00:00
@api_required
2018-05-18 18:41:41 +00:00
def api_stream():
return Response(
2018-06-16 20:02:10 +00:00
response=json.dumps(
activitypub.build_inbox_json_feed("/api/stream", request.args.get("cursor"))
),
headers={"Content-Type": "application/json"},
2018-05-18 18:41:41 +00:00
)
2018-06-16 20:02:10 +00:00
@app.route("/api/block", methods=["POST"])
@api_required
def api_block():
2018-06-16 20:02:10 +00:00
actor = _user_api_arg("actor")
2018-06-01 18:29:44 +00:00
2018-06-29 20:16:26 +00:00
existing = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"type": ActivityType.BLOCK.value,
"activity.object": actor,
"meta.undo": False,
}
2018-06-16 20:02:10 +00:00
)
2018-06-01 18:29:44 +00:00
if existing:
2018-06-16 20:02:10 +00:00
return _user_api_response(activity=existing["activity"]["id"])
2018-06-17 19:54:16 +00:00
block = ap.Block(actor=MY_PERSON.id, object=actor)
2019-04-05 09:35:48 +00:00
block_id = post_to_outbox(block)
2018-06-01 18:29:44 +00:00
2018-07-29 14:07:27 +00:00
return _user_api_response(activity=block_id)
2018-06-16 20:02:10 +00:00
@app.route("/api/follow", methods=["POST"])
2018-05-18 18:41:41 +00:00
@api_required
def api_follow():
2018-06-16 20:02:10 +00:00
actor = _user_api_arg("actor")
2018-06-01 18:29:44 +00:00
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
"activity.object": actor,
}
existing = DB.activities.find_one(q)
2018-06-01 18:29:44 +00:00
if existing:
2018-06-16 20:02:10 +00:00
return _user_api_response(activity=existing["activity"]["id"])
2018-05-18 18:41:41 +00:00
follow = ap.Follow(
actor=MY_PERSON.id, object=actor, to=[actor], cc=[ap.AS_PUBLIC], published=now()
)
2019-04-05 09:35:48 +00:00
follow_id = post_to_outbox(follow)
2018-06-01 18:29:44 +00:00
2018-07-29 14:07:27 +00:00
return _user_api_response(activity=follow_id)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/followers")
2018-06-16 19:24:53 +00:00
def followers():
q = {"box": Box.INBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False}
2018-06-16 19:24:53 +00:00
if is_api_request():
2018-05-18 18:41:41 +00:00
return jsonify(
**activitypub.build_ordered_collection(
DB.activities,
q=q,
2018-06-16 20:02:10 +00:00
cursor=request.args.get("cursor"),
2018-07-07 12:07:29 +00:00
map_func=lambda doc: doc["activity"]["actor"],
2018-07-31 21:23:20 +00:00
col_name="followers",
2018-05-18 18:41:41 +00:00
)
2018-06-16 19:24:53 +00:00
)
2018-05-18 18:41:41 +00:00
2018-07-18 21:18:39 +00:00
raw_followers, older_than, newer_than = paginated_query(DB.activities, q)
2019-04-05 09:35:48 +00:00
followers = [
doc["meta"]["actor"] for doc in raw_followers if "actor" in doc.get("meta", {})
]
return render_template(
"followers.html",
followers_data=followers,
older_than=older_than,
newer_than=newer_than,
)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/following")
2018-05-18 18:41:41 +00:00
def following():
q = {"box": Box.OUTBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False}
2018-05-18 18:41:41 +00:00
if is_api_request():
return jsonify(
**activitypub.build_ordered_collection(
DB.activities,
q=q,
2018-06-16 20:02:10 +00:00
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
2018-07-31 21:23:20 +00:00
col_name="following",
2018-06-16 20:02:10 +00:00
)
2018-05-18 18:41:41 +00:00
)
2018-06-16 19:24:53 +00:00
2018-07-18 21:18:39 +00:00
if config.HIDE_FOLLOWING and not session.get("logged_in", False):
2018-07-17 21:42:21 +00:00
abort(404)
following, older_than, newer_than = paginated_query(DB.activities, q)
2019-04-05 09:35:48 +00:00
following = [
(doc["remote_id"], doc["meta"]["object"])
for doc in following
if "remote_id" in doc and "object" in doc.get("meta", {})
]
2019-07-22 18:32:35 +00:00
lists = list(DB.lists.find())
return render_template(
"following.html",
following_data=following,
older_than=older_than,
newer_than=newer_than,
2019-07-22 18:32:35 +00:00
lists=lists,
)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/tags/<tag>")
2018-05-18 18:41:41 +00:00
def tags(tag):
2018-06-29 20:16:26 +00:00
if not DB.activities.count(
{
"box": Box.OUTBOX.value,
"activity.object.tag.type": "Hashtag",
"activity.object.tag.name": "#" + tag,
}
2018-06-16 20:02:10 +00:00
):
2018-05-18 18:41:41 +00:00
abort(404)
if not is_api_request():
return render_template(
2018-06-16 20:02:10 +00:00
"tags.html",
2018-05-18 18:41:41 +00:00
tag=tag,
2018-06-29 20:16:26 +00:00
outbox_data=DB.activities.find(
2018-06-16 20:02:10 +00:00
{
2018-06-29 20:16:26 +00:00
"box": Box.OUTBOX.value,
"type": ActivityType.CREATE.value,
2018-06-16 20:02:10 +00:00
"meta.deleted": False,
"activity.object.tag.type": "Hashtag",
"activity.object.tag.name": "#" + tag,
}
),
2018-05-18 18:41:41 +00:00
)
q = {
2018-06-29 20:16:26 +00:00
"box": Box.OUTBOX.value,
2018-06-16 20:02:10 +00:00
"meta.deleted": False,
"meta.undo": False,
"type": ActivityType.CREATE.value,
"activity.object.tag.type": "Hashtag",
"activity.object.tag.name": "#" + tag,
2018-05-18 18:41:41 +00:00
}
2018-06-16 20:02:10 +00:00
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"]["id"],
col_name=f"tags/{tag}",
)
)
2018-05-18 18:41:41 +00:00
@app.route("/featured")
def featured():
if not is_api_request():
abort(404)
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.CREATE.value,
"meta.deleted": False,
"meta.undo": False,
"meta.pinned": True,
}
data = [clean_activity(doc["activity"]["object"]) for doc in DB.activities.find(q)]
return jsonify(**activitypub.simple_build_ordered_collection("featured", data))
2018-06-16 20:02:10 +00:00
@app.route("/liked")
2018-05-18 18:41:41 +00:00
def liked():
if not is_api_request():
2018-07-06 21:53:33 +00:00
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.LIKE.value,
"meta.deleted": False,
"meta.undo": False,
}
liked, older_than, newer_than = paginated_query(DB.activities, q)
return render_template(
2018-07-06 21:53:33 +00:00
"liked.html", liked=liked, older_than=older_than, newer_than=newer_than
)
2018-06-16 20:02:10 +00:00
q = {"meta.deleted": False, "meta.undo": False, "type": ActivityType.LIKE.value}
return jsonify(
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
col_name="liked",
)
)
2018-05-18 18:41:41 +00:00
#######
# IndieAuth
def build_auth_resp(payload):
2018-06-16 20:02:10 +00:00
if request.headers.get("Accept") == "application/json":
2018-05-18 18:41:41 +00:00
return Response(
status=200,
2018-06-16 20:02:10 +00:00
headers={"Content-Type": "application/json"},
2018-05-18 18:41:41 +00:00
response=json.dumps(payload),
)
return Response(
status=200,
2018-06-16 20:02:10 +00:00
headers={"Content-Type": "application/x-www-form-urlencoded"},
2018-05-18 18:41:41 +00:00
response=urlencode(payload),
)
def _get_prop(props, name, default=None):
if name in props:
items = props.get(name)
if isinstance(items, list):
return items[0]
return items
return default
2018-06-16 20:02:10 +00:00
2018-05-18 18:41:41 +00:00
def get_client_id_data(url):
# FIXME(tsileo): ensure not localhost via `little_boxes.urlutils.is_url_valid`
2018-05-18 18:41:41 +00:00
data = mf2py.parse(url=url)
2018-06-16 20:02:10 +00:00
for item in data["items"]:
if "h-x-app" in item["type"] or "h-app" in item["type"]:
props = item.get("properties", {})
2018-05-18 18:41:41 +00:00
print(props)
return dict(
2018-06-16 20:02:10 +00:00
logo=_get_prop(props, "logo"),
name=_get_prop(props, "name"),
url=_get_prop(props, "url"),
2018-05-18 18:41:41 +00:00
)
2018-06-16 20:02:10 +00:00
return dict(logo=None, name=url, url=url)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/indieauth/flow", methods=["POST"])
2018-06-16 19:24:53 +00:00
@login_required
def indieauth_flow():
auth = dict(
2018-06-16 20:02:10 +00:00
scope=" ".join(request.form.getlist("scopes")),
me=request.form.get("me"),
client_id=request.form.get("client_id"),
state=request.form.get("state"),
redirect_uri=request.form.get("redirect_uri"),
response_type=request.form.get("response_type"),
2019-05-01 22:49:45 +00:00
ts=datetime.now().timestamp(),
code=binascii.hexlify(os.urandom(8)).decode("utf-8"),
verified=False,
2018-05-18 18:41:41 +00:00
)
2019-05-01 22:49:45 +00:00
# XXX(tsileo): a whitelist for me values?
# TODO(tsileo): redirect_uri checks
2018-06-16 20:02:10 +00:00
if not auth["redirect_uri"]:
2019-05-01 22:49:45 +00:00
abort(400)
2018-05-18 18:41:41 +00:00
DB.indieauth.insert_one(auth)
# FIXME(tsileo): fetch client ID and validate redirect_uri
2019-05-01 22:49:45 +00:00
red = f'{auth["redirect_uri"]}?code={auth["code"]}&state={auth["state"]}&me={auth["me"]}'
2018-05-18 18:41:41 +00:00
return redirect(red)
2019-04-05 09:35:48 +00:00
@app.route("/indieauth", methods=["GET", "POST"])
2018-06-16 19:24:53 +00:00
def indieauth_endpoint():
2018-06-16 20:02:10 +00:00
if request.method == "GET":
if not session.get("logged_in"):
2018-07-22 10:17:55 +00:00
return redirect(url_for("admin_login", next=request.url))
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
me = request.args.get("me")
2018-06-16 19:24:53 +00:00
# FIXME(tsileo): ensure me == ID
2018-06-16 20:02:10 +00:00
client_id = request.args.get("client_id")
redirect_uri = request.args.get("redirect_uri")
state = request.args.get("state", "")
response_type = request.args.get("response_type", "id")
scope = request.args.get("scope", "").split()
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
print("STATE", state)
2018-05-18 18:41:41 +00:00
return render_template(
2018-06-16 20:02:10 +00:00
"indieauth_flow.html",
2018-05-18 18:41:41 +00:00
client=get_client_id_data(client_id),
scopes=scope,
redirect_uri=redirect_uri,
state=state,
response_type=response_type,
client_id=client_id,
me=me,
)
# Auth verification via POST
2018-06-16 20:02:10 +00:00
code = request.form.get("code")
redirect_uri = request.form.get("redirect_uri")
client_id = request.form.get("client_id")
2018-05-18 18:41:41 +00:00
2019-05-08 11:19:31 +00:00
ip, geoip = _get_ip()
2018-05-18 18:41:41 +00:00
auth = DB.indieauth.find_one_and_update(
2018-06-16 20:02:10 +00:00
{
"code": code,
"redirect_uri": redirect_uri,
"client_id": client_id,
2019-05-01 22:49:45 +00:00
"verified": False,
},
2019-05-01 23:19:01 +00:00
{
"$set": {
"verified": True,
"verified_by": "id",
2019-05-01 23:19:01 +00:00
"verified_at": datetime.now().timestamp(),
2019-05-08 11:19:31 +00:00
"ip_address": ip,
"geoip": geoip,
2019-05-01 23:19:01 +00:00
}
},
2018-05-18 18:41:41 +00:00
)
print(auth)
print(code, redirect_uri, client_id)
2019-05-01 22:49:45 +00:00
# Ensure the code is recent
2019-05-01 22:59:13 +00:00
if (datetime.now() - datetime.fromtimestamp(auth["ts"])) > timedelta(minutes=5):
2019-05-01 22:49:45 +00:00
abort(400)
2018-05-18 18:41:41 +00:00
if not auth:
abort(403)
return
2018-06-16 20:02:10 +00:00
session["logged_in"] = True
me = auth["me"]
state = auth["state"]
2019-05-01 22:49:45 +00:00
scope = auth["scope"]
2018-06-16 20:02:10 +00:00
print("STATE", state)
return build_auth_resp({"me": me, "state": state, "scope": scope})
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/token", methods=["GET", "POST"])
2018-05-18 18:41:41 +00:00
def token_endpoint():
2019-05-01 22:49:45 +00:00
# Generate a new token with the returned access code
2018-06-16 20:02:10 +00:00
if request.method == "POST":
code = request.form.get("code")
me = request.form.get("me")
redirect_uri = request.form.get("redirect_uri")
client_id = request.form.get("client_id")
2018-05-18 18:41:41 +00:00
2019-05-01 23:19:01 +00:00
now = datetime.now()
2019-05-08 11:19:31 +00:00
ip, geoip = _get_ip()
2019-05-01 23:19:01 +00:00
# This query ensure code, client_id, redirect_uri and me are matching with the code request
2019-05-01 22:49:45 +00:00
auth = DB.indieauth.find_one_and_update(
2018-06-16 20:02:10 +00:00
{
"code": code,
"me": me,
"redirect_uri": redirect_uri,
"client_id": client_id,
2019-05-01 22:49:45 +00:00
"verified": False,
},
2019-05-01 23:19:01 +00:00
{
"$set": {
"verified": True,
"verified_by": "code",
2019-05-01 23:19:01 +00:00
"verified_at": now.timestamp(),
2019-05-08 11:19:31 +00:00
"ip_address": ip,
"geoip": geoip,
2019-05-01 23:19:01 +00:00
}
},
2018-06-16 20:02:10 +00:00
)
2019-05-01 23:19:01 +00:00
2018-05-18 18:41:41 +00:00
if not auth:
abort(403)
2019-05-01 22:49:45 +00:00
2019-05-01 23:19:01 +00:00
scope = auth["scope"].split()
# Ensure there's at least one scope
if not len(scope):
abort(400)
2019-05-01 22:49:45 +00:00
# Ensure the code is recent
2019-05-01 22:59:13 +00:00
if (now - datetime.fromtimestamp(auth["ts"])) > timedelta(minutes=5):
2019-05-01 22:49:45 +00:00
abort(400)
payload = dict(me=me, client_id=client_id, scope=scope, ts=now.timestamp())
2018-06-16 20:02:10 +00:00
token = JWT.dumps(payload).decode("utf-8")
2019-05-01 22:49:45 +00:00
DB.indieauth.update_one(
{"_id": auth["_id"]},
{
"$set": {
"token": token,
"token_expires": (now + timedelta(minutes=30)).timestamp(),
}
},
)
2018-05-18 18:41:41 +00:00
2019-05-01 22:49:45 +00:00
return build_auth_resp(
{"me": me, "scope": auth["scope"], "access_token": token}
)
2018-05-18 18:41:41 +00:00
# Token verification
2018-06-16 20:02:10 +00:00
token = request.headers.get("Authorization").replace("Bearer ", "")
2018-05-18 18:41:41 +00:00
try:
payload = JWT.loads(token)
except BadSignature:
abort(403)
2019-05-01 22:49:45 +00:00
# Check the token expritation (valid for 3 hours)
2019-05-01 23:19:01 +00:00
if (datetime.now() - datetime.fromtimestamp(payload["ts"])) > timedelta(
minutes=180
):
2019-05-01 22:49:45 +00:00
abort(401)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
return build_auth_resp(
{
"me": payload["me"],
2019-05-01 22:49:45 +00:00
"scope": " ".join(payload["scope"]),
2018-06-16 20:02:10 +00:00
"client_id": payload["client_id"],
}
)
2019-04-05 19:36:56 +00:00
#################
# Feeds
@app.route("/feed.json")
def json_feed():
return Response(
2019-04-05 09:35:48 +00:00
response=json.dumps(activitypub.json_feed("/feed.json")),
headers={"Content-Type": "application/json"},
)
@app.route("/feed.atom")
def atom_feed():
return Response(
response=activitypub.gen_feed().atom_str(),
headers={"Content-Type": "application/atom+xml"},
)
@app.route("/feed.rss")
def rss_feed():
return Response(
response=activitypub.gen_feed().rss_str(),
headers={"Content-Type": "application/rss+xml"},
)
2019-04-05 09:35:48 +00:00
@app.route("/task/fetch_og_meta", methods=["POST"])
2019-04-05 13:14:57 +00:00
def task_fetch_og_meta():
2019-04-05 09:35:48 +00:00
task = p.parse(request)
app.logger.info(f"task={task!r}")
iri = task.payload
try:
activity = ap.fetch_remote_activity(iri)
app.logger.info(f"activity={activity!r}")
if activity.has_type(ap.ActivityType.CREATE):
note = activity.get_object()
links = opengraph.links_from_note(note.to_dict())
og_metadata = opengraph.fetch_og_metadata(USER_AGENT, links)
for og in og_metadata:
if not og.get("image"):
continue
MEDIA_CACHE.cache_og_image(og["image"], iri)
2019-04-05 09:35:48 +00:00
app.logger.debug(f"OG metadata {og_metadata!r}")
DB.activities.update_one(
{"remote_id": iri}, {"$set": {"meta.og_metadata": og_metadata}}
)
app.logger.info(f"OG metadata fetched for {iri}: {og_metadata}")
2019-04-05 09:35:48 +00:00
except (ActivityGoneError, ActivityNotFoundError):
app.logger.exception(f"dropping activity {iri}, skip OG metedata")
return ""
except requests.exceptions.HTTPError as http_err:
if 400 <= http_err.response.status_code < 500:
app.logger.exception("bad request, no retry")
return ""
app.logger.exception("failed to fetch OG metadata")
2019-04-07 12:37:05 +00:00
raise TaskError() from http_err
except Exception as err:
2019-04-05 09:35:48 +00:00
app.logger.exception(f"failed to fetch OG metadata for {iri}")
2019-04-07 12:37:05 +00:00
raise TaskError() from err
2019-04-05 09:35:48 +00:00
2019-04-05 13:14:57 +00:00
return ""
2019-04-05 09:35:48 +00:00
@app.route("/task/cache_object", methods=["POST"])
def task_cache_object():
task = p.parse(request)
app.logger.info(f"task={task!r}")
iri = task.payload
try:
activity = ap.fetch_remote_activity(iri)
app.logger.info(f"activity={activity!r}")
2019-04-07 14:49:35 +00:00
obj = activity.get_object()
2019-04-05 09:35:48 +00:00
DB.activities.update_one(
{"remote_id": activity.id},
{
"$set": {
"meta.object": obj.to_dict(embed=True),
"meta.object_actor": activitypub._actor_to_meta(obj.get_actor()),
}
},
)
except (ActivityGoneError, ActivityNotFoundError, NotAnActivityError):
DB.activities.update_one({"remote_id": iri}, {"$set": {"meta.deleted": True}})
app.logger.exception(f"flagging activity {iri} as deleted, no object caching")
2019-04-07 12:37:05 +00:00
except Exception as err:
2019-04-05 09:35:48 +00:00
app.logger.exception(f"failed to cache object for {iri}")
2019-04-07 12:37:05 +00:00
raise TaskError() from err
2019-04-05 09:35:48 +00:00
return ""
2019-04-05 09:35:48 +00:00
@app.route("/task/finish_post_to_outbox", methods=["POST"]) # noqa:C901
def task_finish_post_to_outbox():
task = p.parse(request)
app.logger.info(f"task={task!r}")
iri = task.payload
try:
activity = ap.fetch_remote_activity(iri)
app.logger.info(f"activity={activity!r}")
recipients = activity.recipients()
if activity.has_type(ap.ActivityType.DELETE):
back.outbox_delete(MY_PERSON, activity)
elif activity.has_type(ap.ActivityType.UPDATE):
back.outbox_update(MY_PERSON, activity)
elif activity.has_type(ap.ActivityType.CREATE):
back.outbox_create(MY_PERSON, activity)
elif activity.has_type(ap.ActivityType.ANNOUNCE):
back.outbox_announce(MY_PERSON, activity)
elif activity.has_type(ap.ActivityType.LIKE):
back.outbox_like(MY_PERSON, activity)
elif activity.has_type(ap.ActivityType.UNDO):
obj = activity.get_object()
if obj.has_type(ap.ActivityType.LIKE):
back.outbox_undo_like(MY_PERSON, obj)
elif obj.has_type(ap.ActivityType.ANNOUNCE):
back.outbox_undo_announce(MY_PERSON, obj)
elif obj.has_type(ap.ActivityType.FOLLOW):
back.undo_new_following(MY_PERSON, obj)
2019-04-05 09:35:48 +00:00
app.logger.info(f"recipients={recipients}")
activity = ap.clean_activity(activity.to_dict())
DB.cache2.remove()
payload = json.dumps(activity)
for recp in recipients:
app.logger.debug(f"posting to {recp}")
Tasks.post_to_remote_inbox(payload, recp)
except (ActivityGoneError, ActivityNotFoundError):
app.logger.exception(f"no retry")
2019-04-07 12:37:05 +00:00
except Exception as err:
2019-04-05 09:35:48 +00:00
app.logger.exception(f"failed to post to remote inbox for {iri}")
2019-04-07 12:37:05 +00:00
raise TaskError() from err
2019-04-05 09:35:48 +00:00
2019-04-05 13:14:57 +00:00
return ""
2019-04-05 09:35:48 +00:00
@app.route("/task/finish_post_to_inbox", methods=["POST"]) # noqa: C901
def task_finish_post_to_inbox():
task = p.parse(request)
app.logger.info(f"task={task!r}")
iri = task.payload
try:
activity = ap.fetch_remote_activity(iri)
app.logger.info(f"activity={activity!r}")
if activity.has_type(ap.ActivityType.DELETE):
back.inbox_delete(MY_PERSON, activity)
elif activity.has_type(ap.ActivityType.UPDATE):
back.inbox_update(MY_PERSON, activity)
elif activity.has_type(ap.ActivityType.CREATE):
back.inbox_create(MY_PERSON, activity)
elif activity.has_type(ap.ActivityType.ANNOUNCE):
back.inbox_announce(MY_PERSON, activity)
elif activity.has_type(ap.ActivityType.LIKE):
back.inbox_like(MY_PERSON, activity)
elif activity.has_type(ap.ActivityType.FOLLOW):
# Reply to a Follow with an Accept
actor_id = activity.get_actor().id
accept = ap.Accept(
actor=ID,
object={
"type": "Follow",
"id": activity.id,
"object": activity.get_object_id(),
"actor": actor_id,
},
to=[actor_id],
published=now(),
)
2019-04-05 09:35:48 +00:00
post_to_outbox(accept)
elif activity.has_type(ap.ActivityType.UNDO):
obj = activity.get_object()
if obj.has_type(ap.ActivityType.LIKE):
back.inbox_undo_like(MY_PERSON, obj)
elif obj.has_type(ap.ActivityType.ANNOUNCE):
back.inbox_undo_announce(MY_PERSON, obj)
elif obj.has_type(ap.ActivityType.FOLLOW):
back.undo_new_follower(MY_PERSON, obj)
2019-04-05 09:35:48 +00:00
try:
invalidate_cache(activity)
except Exception:
app.logger.exception("failed to invalidate cache")
except (ActivityGoneError, ActivityNotFoundError, NotAnActivityError):
app.logger.exception(f"no retry")
2019-04-07 12:37:05 +00:00
except Exception as err:
2019-04-05 09:35:48 +00:00
app.logger.exception(f"failed to cache attachments for {iri}")
2019-04-07 12:37:05 +00:00
raise TaskError() from err
2019-04-05 09:35:48 +00:00
2019-04-05 13:14:57 +00:00
return ""
2019-04-05 09:35:48 +00:00
def post_to_outbox(activity: ap.BaseActivity) -> str:
if activity.has_type(ap.CREATE_TYPES):
activity = activity.build_create()
# Assign create a random ID
obj_id = back.random_object_id()
2019-04-05 09:35:48 +00:00
activity.set_id(back.activity_url(obj_id), obj_id)
back.save(Box.OUTBOX, activity)
2019-04-05 09:35:48 +00:00
Tasks.cache_actor(activity.id)
Tasks.finish_post_to_outbox(activity.id)
return activity.id
def post_to_inbox(activity: ap.BaseActivity) -> None:
# Check for Block activity
actor = activity.get_actor()
if back.outbox_is_blocked(MY_PERSON, actor.id):
2019-04-05 09:35:48 +00:00
app.logger.info(
f"actor {actor!r} is blocked, dropping the received activity {activity!r}"
)
return
if back.inbox_check_duplicate(MY_PERSON, activity.id):
# The activity is already in the inbox
app.logger.info(f"received duplicate activity {activity!r}, dropping it")
2019-07-05 20:05:28 +00:00
return
2019-04-05 09:35:48 +00:00
back.save(Box.INBOX, activity)
2019-04-05 09:35:48 +00:00
Tasks.process_new_activity(activity.id)
app.logger.info(f"spawning task for {activity!r}")
Tasks.finish_post_to_inbox(activity.id)
def invalidate_cache(activity):
if activity.has_type(ap.ActivityType.LIKE):
if activity.get_object().id.startswith(BASE_URL):
DB.cache2.remove()
elif activity.has_type(ap.ActivityType.ANNOUNCE):
if activity.get_object().id.startswith(BASE_URL):
DB.cache2.remove()
elif activity.has_type(ap.ActivityType.UNDO):
DB.cache2.remove()
elif activity.has_type(ap.ActivityType.DELETE):
# TODO(tsileo): only invalidate if it's a delete of a reply
DB.cache2.remove()
elif activity.has_type(ap.ActivityType.UPDATE):
DB.cache2.remove()
elif activity.has_type(ap.ActivityType.CREATE):
note = activity.get_object()
2019-04-16 20:54:08 +00:00
in_reply_to = note.get_in_reply_to()
if not in_reply_to or in_reply_to.startswith(ID):
2019-04-05 09:35:48 +00:00
DB.cache2.remove()
# FIXME(tsileo): check if it's a reply of a reply
@app.route("/task/cache_attachments", methods=["POST"])
def task_cache_attachments():
task = p.parse(request)
app.logger.info(f"task={task!r}")
iri = task.payload
try:
activity = ap.fetch_remote_activity(iri)
app.logger.info(f"activity={activity!r}")
# Generates thumbnails for the actor's icon and the attachments if any
obj = activity.get_object()
2019-07-05 20:05:28 +00:00
# Iter the attachments
for attachment in obj._data.get("attachment", []):
try:
MEDIA_CACHE.cache_attachment(attachment, iri)
2019-07-05 20:05:28 +00:00
except ValueError:
app.logger.exception(f"failed to cache {attachment}")
2019-04-05 09:35:48 +00:00
app.logger.info(f"attachments cached for {iri}")
except (ActivityGoneError, ActivityNotFoundError, NotAnActivityError):
app.logger.exception(f"dropping activity {iri}, no attachment caching")
2019-04-07 12:37:05 +00:00
except Exception as err:
2019-04-05 09:35:48 +00:00
app.logger.exception(f"failed to cache attachments for {iri}")
2019-04-07 12:37:05 +00:00
raise TaskError() from err
2019-04-05 09:35:48 +00:00
2019-04-05 13:14:57 +00:00
return ""
2019-04-05 09:35:48 +00:00
@app.route("/task/cache_actor", methods=["POST"])
2019-04-05 13:14:57 +00:00
def task_cache_actor() -> str:
2019-04-05 09:35:48 +00:00
task = p.parse(request)
app.logger.info(f"task={task!r}")
iri = task.payload["iri"]
2019-04-05 09:35:48 +00:00
try:
activity = ap.fetch_remote_activity(iri)
app.logger.info(f"activity={activity!r}")
# Fetch the Open Grah metadata if it's a `Create`
2019-04-05 09:35:48 +00:00
if activity.has_type(ap.ActivityType.CREATE):
2019-04-05 13:14:57 +00:00
Tasks.fetch_og_meta(iri)
2019-04-05 09:35:48 +00:00
actor = activity.get_actor()
if actor.icon:
if isinstance(actor.icon, dict) and "url" in actor.icon:
MEDIA_CACHE.cache_actor_icon(actor.icon["url"])
else:
app.logger.warning(f"failed to parse icon {actor.icon} for {iri}")
2019-04-05 09:35:48 +00:00
if activity.has_type(ap.ActivityType.FOLLOW):
if actor.id == ID:
2019-04-05 09:35:48 +00:00
# It's a new following, cache the "object" (which is the actor we follow)
DB.activities.update_one(
{"remote_id": iri},
{
"$set": {
"meta.object": activity.get_object().to_dict(embed=True)
2019-04-05 09:35:48 +00:00
}
},
)
# Cache the actor info
DB.activities.update_one(
{"remote_id": iri}, {"$set": {"meta.actor": actor.to_dict(embed=True)}}
2019-04-05 09:35:48 +00:00
)
app.logger.info(f"actor cached for {iri}")
if activity.has_type([ap.ActivityType.CREATE, ap.ActivityType.ANNOUNCE]):
2019-04-05 09:35:48 +00:00
Tasks.cache_attachments(iri)
except (ActivityGoneError, ActivityNotFoundError):
DB.activities.update_one({"remote_id": iri}, {"$set": {"meta.deleted": True}})
app.logger.exception(f"flagging activity {iri} as deleted, no actor caching")
2019-04-07 12:37:05 +00:00
except Exception as err:
2019-04-05 09:35:48 +00:00
app.logger.exception(f"failed to cache actor for {iri}")
2019-04-07 12:37:05 +00:00
raise TaskError() from err
2019-04-05 09:35:48 +00:00
2019-04-05 13:14:57 +00:00
return ""
2019-04-05 09:35:48 +00:00
@app.route("/task/process_new_activity", methods=["POST"]) # noqa:c901
def task_process_new_activity():
"""Process an activity received in the inbox"""
task = p.parse(request)
app.logger.info(f"task={task!r}")
iri = task.payload
try:
activity = ap.fetch_remote_activity(iri)
app.logger.info(f"activity={activity!r}")
flags = {}
if not activity.published:
flags[_meta(MetaKey.PUBLISHED)] = now()
2019-07-30 20:12:20 +00:00
else:
flags[_meta(MetaKey.PUBLISHED)] = activity.published
set_inbox_flags(activity, flags)
app.logger.info(f"a={activity}, flags={flags!r}")
if flags:
DB.activities.update_one({"remote_id": activity.id}, {"$set": flags})
2019-04-05 09:35:48 +00:00
app.logger.info(f"new activity {iri} processed")
2019-07-30 20:12:20 +00:00
if not activity.has_type(ap.ActivityType.DELETE):
2019-04-05 09:35:48 +00:00
Tasks.cache_actor(iri)
except (ActivityGoneError, ActivityNotFoundError):
2019-04-07 10:41:27 +00:00
app.logger.exception(f"dropping activity {iri}, skip processing")
return ""
2019-04-07 12:37:05 +00:00
except Exception as err:
2019-04-05 09:35:48 +00:00
app.logger.exception(f"failed to process new activity {iri}")
2019-04-07 12:37:05 +00:00
raise TaskError() from err
2019-04-05 09:35:48 +00:00
2019-04-05 13:14:57 +00:00
return ""
2019-04-05 09:35:48 +00:00
2019-04-05 13:14:57 +00:00
@app.route("/task/forward_activity", methods=["POST"])
2019-04-05 09:35:48 +00:00
def task_forward_activity():
task = p.parse(request)
app.logger.info(f"task={task!r}")
iri = task.payload
try:
activity = ap.fetch_remote_activity(iri)
recipients = back.followers_as_recipients()
2019-04-05 09:35:48 +00:00
app.logger.debug(f"Forwarding {activity!r} to {recipients}")
activity = ap.clean_activity(activity.to_dict())
payload = json.dumps(activity)
for recp in recipients:
app.logger.debug(f"forwarding {activity!r} to {recp}")
Tasks.post_to_remote_inbox(payload, recp)
2019-04-07 12:37:05 +00:00
except Exception as err:
2019-04-05 09:35:48 +00:00
app.logger.exception("task failed")
2019-04-07 12:37:05 +00:00
raise TaskError() from err
2019-04-05 09:35:48 +00:00
2019-04-05 13:14:57 +00:00
return ""
2019-04-05 09:35:48 +00:00
2019-04-05 13:14:57 +00:00
@app.route("/task/post_to_remote_inbox", methods=["POST"])
2019-04-05 09:35:48 +00:00
def task_post_to_remote_inbox():
2019-04-05 19:36:56 +00:00
"""Post an activity to a remote inbox."""
2019-04-05 09:35:48 +00:00
task = p.parse(request)
app.logger.info(f"task={task!r}")
payload, to = task.payload["payload"], task.payload["to"]
try:
app.logger.info("payload=%s", payload)
app.logger.info("generating sig")
signed_payload = json.loads(payload)
2019-04-22 07:50:53 +00:00
# XXX Disable JSON-LD signature crap for now (as HTTP signatures are enough for most implementations)
2019-04-05 09:35:48 +00:00
# Don't overwrite the signature if we're forwarding an activity
2019-04-22 07:50:53 +00:00
# if "signature" not in signed_payload:
# generate_signature(signed_payload, KEY)
2019-04-05 09:35:48 +00:00
app.logger.info("to=%s", to)
resp = requests.post(
to,
data=json.dumps(signed_payload),
auth=SIG_AUTH,
headers={
"Content-Type": HEADERS[1],
"Accept": HEADERS[1],
"User-Agent": USER_AGENT,
},
)
app.logger.info("resp=%s", resp)
app.logger.info("resp_body=%s", resp.text)
resp.raise_for_status()
except HTTPError as err:
app.logger.exception("request failed")
if 400 >= err.response.status_code >= 499:
app.logger.info("client error, no retry")
return ""
2019-04-07 12:37:05 +00:00
raise TaskError() from err
except Exception as err:
app.logger.exception("task failed")
raise TaskError() from err
2019-04-05 13:14:57 +00:00
return ""
2019-04-08 15:24:50 +00:00
2019-07-04 21:22:38 +00:00
@app.route("/task/fetch_remote_question", methods=["POST"])
def task_fetch_remote_question():
"""Fetch a remote question for implementation that does not send Update."""
task = p.parse(request)
app.logger.info(f"task={task!r}")
iri = task.payload
try:
app.logger.info(f"Fetching remote question {iri}")
local_question = DB.activities.find_one(
2019-07-07 15:48:45 +00:00
{
"box": Box.INBOX.value,
"type": ActivityType.CREATE.value,
"activity.object.id": iri,
}
2019-07-04 21:22:38 +00:00
)
remote_question = get_backend().fetch_iri(iri, no_cache=True)
2019-07-15 21:08:12 +00:00
# FIXME(tsileo): compute and set `meta.object_visiblity` (also update utils.py to do it)
2019-07-04 21:24:25 +00:00
if (
2019-07-07 15:48:45 +00:00
local_question
and (
local_question["meta"].get("voted_for")
2019-07-12 21:52:21 +00:00
or local_question["meta"].get("subscribed")
2019-07-07 15:48:45 +00:00
)
and not DB.notifications.find_one({"activity.id": remote_question["id"]})
2019-07-04 21:24:25 +00:00
):
DB.notifications.insert_one(
{
"type": "question_ended",
"datetime": datetime.now(timezone.utc).isoformat(),
"activity": remote_question,
}
)
2019-07-04 21:22:38 +00:00
2019-07-07 15:48:45 +00:00
# Update the Create if we received it in the inbox
if local_question:
DB.activities.update_one(
{"remote_id": local_question["remote_id"], "box": Box.INBOX.value},
{"$set": {"activity.object": remote_question}},
)
# Also update all the cached copies (Like, Announce...)
DB.activities.update_many(
{"meta.object.id": remote_question["id"]},
2019-07-07 16:09:01 +00:00
{"$set": {"meta.object": remote_question}},
2019-07-04 21:22:38 +00:00
)
except HTTPError as err:
app.logger.exception("request failed")
if 400 >= err.response.status_code >= 499:
app.logger.info("client error, no retry")
return ""
raise TaskError() from err
except Exception as err:
app.logger.exception("task failed")
raise TaskError() from err
return ""
@app.route("/task/update_question", methods=["POST"])
def task_update_question():
2019-07-04 21:22:38 +00:00
"""Sends an Update."""
task = p.parse(request)
app.logger.info(f"task={task!r}")
iri = task.payload
try:
app.logger.info(f"Updating question {iri}")
2019-07-04 21:22:38 +00:00
cc = [ID + "/followers"]
2019-07-04 21:24:25 +00:00
doc = DB.activities.find_one({"box": Box.OUTBOX.value, "remote_id": iri})
2019-07-04 21:22:38 +00:00
_add_answers_to_question(doc)
question = ap.Question(**doc["activity"]["object"])
raw_update = dict(
actor=question.id,
object=question.to_dict(embed=True),
attributedTo=MY_PERSON.id,
cc=list(set(cc)),
to=[ap.AS_PUBLIC],
)
raw_update["@context"] = config.DEFAULT_CTX
update = ap.Update(**raw_update)
print(update)
print(update.to_dict())
post_to_outbox(update)
except HTTPError as err:
app.logger.exception("request failed")
if 400 >= err.response.status_code >= 499:
app.logger.info("client error, no retry")
return ""
raise TaskError() from err
except Exception as err:
app.logger.exception("task failed")
raise TaskError() from err
return ""
2019-04-08 16:09:33 +00:00
@app.route("/task/cleanup", methods=["POST"])
def task_cleanup():
task = p.parse(request)
app.logger.info(f"task={task!r}")
2019-07-22 17:27:12 +00:00
activity_gc.perform()
2019-04-08 16:14:39 +00:00
return ""
2019-04-08 16:09:33 +00:00
2019-04-08 15:24:50 +00:00
def task_cleanup_part_1():
task = p.parse(request)
app.logger.info(f"task={task!r}")
return "OK"
@app.route("/task/cleanup_part_2", methods=["POST"])
def task_cleanup_part_2():
task = p.parse(request)
app.logger.info(f"task={task!r}")
return "OK"
@app.route("/task/cleanup_part_3", methods=["POST"])
def task_cleanup_part_3():
task = p.parse(request)
app.logger.info(f"task={task!r}")
return "OK"