microblog.pub/app.py

997 lines
29 KiB
Python
Raw Normal View History

2018-05-18 18:41:41 +00:00
import json
import logging
2018-06-16 20:02:10 +00:00
import os
2018-07-17 22:20:32 +00:00
import traceback
2018-05-18 18:41:41 +00:00
from datetime import datetime
2019-08-18 09:40:57 +00:00
from typing import Any
2019-08-18 09:48:18 +00:00
from urllib.parse import urlparse
2019-08-05 20:40:24 +00:00
from uuid import uuid4
2018-05-18 18:41:41 +00:00
2019-08-18 09:40:57 +00:00
import requests
2019-08-06 20:12:05 +00:00
from bson.errors import InvalidId
2018-05-18 18:41:41 +00:00
from bson.objectid import ObjectId
from flask import Flask
2018-06-16 20:02:10 +00:00
from flask import Response
2018-05-18 18:41:41 +00:00
from flask import abort
2019-08-05 20:40:24 +00:00
from flask import g
2018-05-18 18:41:41 +00:00
from flask import redirect
from flask import render_template
2018-06-16 20:02:10 +00:00
from flask import request
2018-05-18 18:41:41 +00:00
from flask import session
from flask import url_for
2019-08-06 20:12:05 +00:00
from gridfs.errors import NoFile
2018-05-18 18:41:41 +00:00
from itsdangerous import BadSignature
2018-07-11 21:22:47 +00:00
from little_boxes import activitypub as ap
from little_boxes.activitypub import ActivityType
from little_boxes.activitypub import clean_activity
from little_boxes.activitypub import get_backend
from little_boxes.errors import ActivityGoneError
from little_boxes.errors import Error
from little_boxes.httpsig import verify_request
from little_boxes.webfinger import get_remote_follow_template
2018-05-18 18:41:41 +00:00
2019-08-01 17:55:30 +00:00
import blueprints.admin
2019-08-01 18:56:32 +00:00
import blueprints.indieauth
2019-08-01 17:55:30 +00:00
import blueprints.tasks
import blueprints.well_known
2018-05-18 18:41:41 +00:00
import config
2019-08-01 17:55:30 +00:00
from blueprints.api import _api_required
2019-08-13 22:06:58 +00:00
from blueprints.api import api_required
2019-08-01 20:25:58 +00:00
from blueprints.tasks import TaskError
2018-05-18 18:41:41 +00:00
from config import DB
2018-06-16 20:02:10 +00:00
from config import ID
from config import ME
from config import MEDIA_CACHE
from config import VERSION
2019-08-01 20:00:26 +00:00
from core import activitypub
from core import feed
2019-08-17 19:02:30 +00:00
from core import jsonld
from core.activitypub import activity_from_doc
from core.activitypub import activity_url
from core.activitypub import post_to_inbox
from core.activitypub import post_to_outbox
from core.activitypub import remove_context
2019-08-01 20:24:18 +00:00
from core.db import find_one_activity
2019-08-01 20:25:58 +00:00
from core.meta import Box
2019-08-01 20:00:26 +00:00
from core.meta import MetaKey
from core.meta import _meta
2019-09-01 18:58:51 +00:00
from core.meta import by_hashtag
2019-08-01 20:24:18 +00:00
from core.meta import by_remote_id
2019-08-05 20:40:24 +00:00
from core.meta import by_type
2019-09-01 18:58:51 +00:00
from core.meta import by_visibility
2019-08-01 20:24:18 +00:00
from core.meta import in_outbox
2019-08-01 20:25:58 +00:00
from core.meta import is_public
2019-09-01 18:58:51 +00:00
from core.meta import not_deleted
2019-08-05 20:40:24 +00:00
from core.meta import not_undo
2019-08-01 20:00:26 +00:00
from core.shared import _build_thread
from core.shared import _get_ip
from core.shared import activitypubify
2019-08-01 20:00:26 +00:00
from core.shared import csrf
from core.shared import htmlify
from core.shared import is_api_request
2019-08-24 08:58:35 +00:00
from core.shared import jsonify
2019-08-01 20:00:26 +00:00
from core.shared import login_required
from core.shared import noindex
from core.shared import paginated_query
2019-08-05 20:40:24 +00:00
from utils.blacklist import is_blacklisted
2019-08-20 20:16:47 +00:00
from utils.emojis import EMOJIS
2019-08-24 22:16:39 +00:00
from utils.highlight import HIGHLIGHT_CSS
2018-06-16 20:02:10 +00:00
from utils.key import get_secret_key
2019-07-30 20:12:20 +00:00
from utils.template_filters import filters
2018-06-17 18:51:23 +00:00
2018-05-18 18:41:41 +00:00
app = Flask(__name__)
2018-06-16 20:02:10 +00:00
app.secret_key = get_secret_key("flask")
2019-07-30 20:12:20 +00:00
app.register_blueprint(filters)
2019-08-01 17:55:30 +00:00
app.register_blueprint(blueprints.admin.blueprint)
app.register_blueprint(blueprints.api.blueprint, url_prefix="/api")
2019-08-01 18:56:32 +00:00
app.register_blueprint(blueprints.indieauth.blueprint)
2019-08-01 17:55:30 +00:00
app.register_blueprint(blueprints.tasks.blueprint)
app.register_blueprint(blueprints.well_known.blueprint)
2018-06-16 20:02:10 +00:00
app.config.update(WTF_CSRF_CHECK_DEFAULT=False)
2019-07-30 20:12:20 +00:00
csrf.init_app(app)
2018-05-18 18:41:41 +00:00
2018-05-28 17:46:23 +00:00
logger = logging.getLogger(__name__)
2018-05-22 22:41:37 +00:00
# Hook up Flask logging with gunicorn
2018-06-03 10:51:57 +00:00
root_logger = logging.getLogger()
2018-06-16 20:02:10 +00:00
if os.getenv("FLASK_DEBUG"):
logger.setLevel(logging.DEBUG)
root_logger.setLevel(logging.DEBUG)
2019-07-22 17:27:12 +00:00
root_logger.handlers = app.logger.handlers
else:
2018-06-16 20:02:10 +00:00
gunicorn_logger = logging.getLogger("gunicorn.error")
root_logger.handlers = gunicorn_logger.handlers
root_logger.setLevel(gunicorn_logger.level)
2018-05-22 22:41:37 +00:00
2018-05-18 18:41:41 +00:00
@app.context_processor
def inject_config():
q = {
"type": "Create",
"activity.object.inReplyTo": None,
"meta.deleted": False,
2019-04-14 17:17:54 +00:00
"meta.public": True,
}
2018-06-29 20:16:26 +00:00
notes_count = DB.activities.find(
{"box": Box.OUTBOX.value, "$or": [q, {"type": "Announce", "meta.undo": False}]}
).count()
2019-07-12 22:38:51 +00:00
# FIXME(tsileo): rename to all_count, and remove poll answers from it
all_q = {
"box": Box.OUTBOX.value,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
"meta.undo": False,
"meta.deleted": False,
"meta.poll_answer": False,
}
2019-08-13 22:06:58 +00:00
liked_q = {
2019-08-13 22:12:12 +00:00
**in_outbox(),
2019-08-13 22:06:58 +00:00
"meta.deleted": False,
"meta.undo": False,
"type": ActivityType.LIKE.value,
}
followers_q = {
"box": Box.INBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
}
following_q = {
"box": Box.OUTBOX.value,
"type": ActivityType.FOLLOW.value,
"meta.undo": False,
}
unread_notifications_q = {_meta(MetaKey.NOTIFICATION_UNREAD): True}
2019-07-10 21:32:48 +00:00
logged_in = session.get("logged_in", False)
2018-06-16 20:02:10 +00:00
return dict(
microblogpub_version=VERSION,
config=config,
2019-07-10 21:32:48 +00:00
logged_in=logged_in,
followers_count=DB.activities.count(followers_q),
2019-07-10 21:32:48 +00:00
following_count=DB.activities.count(following_q) if logged_in else 0,
notes_count=notes_count,
2019-08-13 22:06:58 +00:00
liked_count=DB.activities.count(liked_q) if logged_in else 0,
with_replies_count=DB.activities.count(all_q) if logged_in else 0,
unread_notifications_count=DB.activities.count(unread_notifications_q)
if logged_in
else 0,
2018-06-29 20:16:26 +00:00
me=ME,
2019-04-14 17:17:54 +00:00
base_url=config.BASE_URL,
2019-08-24 22:16:39 +00:00
highlight_css=HIGHLIGHT_CSS,
2018-06-16 20:02:10 +00:00
)
2018-05-18 18:41:41 +00:00
2019-08-05 20:40:24 +00:00
@app.before_request
def generate_request_id():
g.request_id = uuid4().hex
2018-05-18 18:41:41 +00:00
@app.after_request
def set_x_powered_by(response):
2018-06-16 20:02:10 +00:00
response.headers["X-Powered-By"] = "microblog.pub"
2019-08-05 20:40:24 +00:00
response.headers["X-Request-ID"] = g.request_id
2018-05-18 18:41:41 +00:00
return response
2018-06-16 20:02:10 +00:00
2018-05-29 19:36:05 +00:00
@app.errorhandler(ValueError)
def handle_value_error(error):
2018-07-17 22:20:32 +00:00
logger.error(
2019-08-05 20:40:24 +00:00
f"caught value error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}"
2018-07-17 22:20:32 +00:00
)
2019-08-24 08:58:35 +00:00
response = jsonify({"message": error.args[0], "request_id": g.request_id})
2018-05-29 19:36:05 +00:00
response.status_code = 400
return response
@app.errorhandler(Error)
def handle_activitypub_error(error):
2018-07-17 22:20:32 +00:00
logger.error(
2019-08-05 20:40:24 +00:00
f"caught activitypub error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}"
2018-07-17 22:20:32 +00:00
)
2019-08-24 08:58:35 +00:00
response = jsonify({**error.to_dict(), "request_id": g.request_id})
2018-05-29 19:36:05 +00:00
response.status_code = error.status_code
return response
2019-04-07 12:37:05 +00:00
@app.errorhandler(TaskError)
def handle_task_error(error):
logger.error(
2019-08-05 20:40:24 +00:00
f"caught activitypub error for {g.request_id}: {error!r}, {traceback.format_tb(error.__traceback__)}"
2019-04-07 12:37:05 +00:00
)
2019-08-24 08:58:35 +00:00
response = jsonify({"traceback": error.message, "request_id": g.request_id})
2019-04-07 12:37:05 +00:00
response.status_code = 500
return response
2018-07-22 10:42:36 +00:00
# @app.errorhandler(Exception)
# def handle_other_error(error):
# logger.error(
# f"caught error {error!r}, {traceback.format_tb(error.__traceback__)}"
# )
# response = flask_jsonify({})
# response.status_code = 500
# return response
2018-07-22 10:25:56 +00:00
2019-08-08 22:07:08 +00:00
def _log_sig():
sig = request.headers.get("Signature")
if sig:
app.logger.info(f"received an authenticated fetch: {sig}")
2019-08-19 21:35:14 +00:00
try:
req_verified, actor_id = verify_request(
request.method, request.path, request.headers, None
)
2019-08-20 20:16:47 +00:00
app.logger.info(
f"authenticated fetch: {req_verified}: {actor_id} {request.headers}"
)
2019-08-19 21:35:14 +00:00
except Exception:
app.logger.exception("failed to verify authenticated fetch")
2019-08-08 22:07:08 +00:00
2018-06-16 19:24:53 +00:00
# App routes
2018-05-18 18:41:41 +00:00
ROBOTS_TXT = """User-agent: *
2018-07-06 22:08:44 +00:00
Disallow: /login
Disallow: /admin/
Disallow: /static/
Disallow: /media/
2019-08-18 10:05:56 +00:00
Disallow: /p/
Disallow: /uploads/"""
2018-07-04 23:02:51 +00:00
@app.route("/robots.txt")
def robots_txt():
return Response(response=ROBOTS_TXT, headers={"Content-Type": "text/plain"})
2019-08-20 20:16:47 +00:00
@app.route("/microblogpub-0.1.jsonld")
2019-08-17 19:02:30 +00:00
def microblogpub_jsonld():
2019-08-18 09:40:57 +00:00
"""Returns our AP context (embedded in activities @context)."""
2019-08-17 19:02:30 +00:00
return Response(
response=json.dumps(jsonld.MICROBLOGPUB),
headers={"Content-Type": "application/ld+json"},
)
2019-08-18 09:59:02 +00:00
@app.route("/p/<scheme>/<path:url>")
2019-08-18 10:05:56 +00:00
@noindex
2019-08-18 09:59:02 +00:00
def proxy(scheme: str, url: str) -> Any:
url = f"{scheme}://{url}"
2019-08-18 09:40:57 +00:00
req_headers = {
k: v
for k, v in dict(request.headers).items()
if k.lower() not in ["host", "cookie"]
}
2019-08-18 09:48:18 +00:00
req_headers["Host"] = urlparse(url).netloc
2019-08-18 09:40:57 +00:00
resp = requests.get(url, stream=True, headers=req_headers)
2019-08-20 20:55:55 +00:00
app.logger.info(f"proxied req {url} {req_headers}: {resp!r}")
2019-08-18 09:40:57 +00:00
def data():
for chunk in resp.raw.stream(decode_content=False):
yield chunk
resp_headers = {
k: v
for k, v in dict(resp.raw.headers).items()
if k.lower()
2019-08-18 16:31:52 +00:00
in [
"content-length",
"content-type",
"etag",
"cache-control",
"expires",
"date",
"last-modified",
]
}
2019-08-20 20:55:55 +00:00
return Response(data(), headers=resp_headers, status=resp.status_code)
2019-08-18 09:40:57 +00:00
@app.route("/media/<media_id>")
2018-07-22 10:17:55 +00:00
@noindex
def serve_media(media_id):
2019-08-06 20:12:05 +00:00
try:
f = MEDIA_CACHE.fs.get(ObjectId(media_id))
except (InvalidId, NoFile):
abort(404)
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
resp.headers.set("Content-Length", f.length)
resp.headers.set("ETag", f.md5)
resp.headers.set(
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
)
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
resp.headers.set("Content-Encoding", "gzip")
return resp
@app.route("/uploads/<oid>/<fname>")
def serve_uploads(oid, fname):
2019-08-06 20:12:05 +00:00
try:
f = MEDIA_CACHE.fs.get(ObjectId(oid))
except (InvalidId, NoFile):
abort(404)
2018-07-04 23:02:51 +00:00
resp = app.response_class(f, direct_passthrough=True, mimetype=f.content_type)
resp.headers.set("Content-Length", f.length)
resp.headers.set("ETag", f.md5)
resp.headers.set(
"Last-Modified", f.uploadDate.strftime("%a, %d %b %Y %H:%M:%S GMT")
)
resp.headers.set("Cache-Control", "public,max-age=31536000,immutable")
resp.headers.set("Content-Encoding", "gzip")
return resp
2018-06-16 20:02:10 +00:00
@app.route("/remote_follow", methods=["GET", "POST"])
2018-05-18 18:41:41 +00:00
def remote_follow():
"""Form to allow visitor to perform the remote follow dance."""
2018-06-16 20:02:10 +00:00
if request.method == "GET":
return htmlify(render_template("remote_follow.html"))
2018-05-18 18:41:41 +00:00
2018-06-04 15:59:38 +00:00
csrf.protect()
profile = request.form.get("profile")
if not profile.startswith("@"):
profile = f"@{profile}"
2019-07-26 22:24:04 +00:00
return redirect(get_remote_follow_template(profile).format(uri=ID))
2018-05-18 18:41:41 +00:00
#######
# Activity pub routes
2018-06-16 20:02:10 +00:00
2018-09-03 18:21:33 +00:00
@app.route("/")
def index():
if is_api_request():
2019-08-08 22:07:08 +00:00
_log_sig()
return activitypubify(**ME)
2018-07-06 21:53:33 +00:00
q = {
"box": Box.OUTBOX.value,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
"activity.object.inReplyTo": None,
"meta.deleted": False,
"meta.undo": False,
2019-07-12 22:38:51 +00:00
"meta.public": True,
"$or": [{"meta.pinned": False}, {"meta.pinned": {"$exists": False}}],
2018-07-06 21:53:33 +00:00
}
2018-07-22 20:25:28 +00:00
pinned = []
# Only fetch the pinned notes if we're on the first page
if not request.args.get("older_than") and not request.args.get("newer_than"):
q_pinned = {
"box": Box.OUTBOX.value,
"type": ActivityType.CREATE.value,
"meta.deleted": False,
"meta.undo": False,
2019-07-12 22:38:51 +00:00
"meta.public": True,
2018-07-22 20:25:28 +00:00
"meta.pinned": True,
}
pinned = list(DB.activities.find(q_pinned))
outbox_data, older_than, newer_than = paginated_query(
DB.activities, q, limit=25 - len(pinned)
)
2018-07-22 20:25:28 +00:00
return htmlify(
render_template(
"index.html",
outbox_data=outbox_data,
older_than=older_than,
newer_than=newer_than,
pinned=pinned,
)
)
2018-06-04 15:59:38 +00:00
2019-07-12 22:38:51 +00:00
@app.route("/all")
@login_required
2019-07-12 22:38:51 +00:00
def all():
2018-06-29 20:16:26 +00:00
q = {
"box": Box.OUTBOX.value,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
"meta.deleted": False,
"meta.undo": False,
"meta.poll_answer": False,
2018-06-29 20:16:26 +00:00
}
2018-07-06 21:53:33 +00:00
outbox_data, older_than, newer_than = paginated_query(DB.activities, q)
2018-05-18 18:41:41 +00:00
return htmlify(
render_template(
"index.html",
outbox_data=outbox_data,
older_than=older_than,
newer_than=newer_than,
)
2018-07-06 21:53:33 +00:00
)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/note/<note_id>")
2018-06-16 19:24:53 +00:00
def note_by_id(note_id):
if is_api_request():
return redirect(url_for("outbox_activity", item_id=note_id))
2018-06-29 20:16:26 +00:00
data = DB.activities.find_one(
{"box": Box.OUTBOX.value, "remote_id": activity_url(note_id)}
2018-06-29 20:16:26 +00:00
)
2018-06-16 19:24:53 +00:00
if not data:
2018-06-03 19:28:06 +00:00
abort(404)
2018-06-16 20:02:10 +00:00
if data["meta"].get("deleted", False):
2018-06-03 19:28:06 +00:00
abort(410)
2019-04-14 17:17:54 +00:00
2018-06-03 19:28:06 +00:00
thread = _build_thread(data)
2018-08-01 06:29:08 +00:00
app.logger.info(f"thread={thread!r}")
2018-08-28 20:14:48 +00:00
raw_likes = list(
2018-06-29 20:16:26 +00:00
DB.activities.find(
2018-06-16 20:02:10 +00:00
{
"meta.undo": False,
2018-07-19 23:12:02 +00:00
"meta.deleted": False,
2018-06-16 20:02:10 +00:00
"type": ActivityType.LIKE.value,
"$or": [
2018-08-01 06:29:08 +00:00
# FIXME(tsileo): remove all the useless $or
2018-06-16 20:02:10 +00:00
{"activity.object.id": data["activity"]["object"]["id"]},
{"activity.object": data["activity"]["object"]["id"]},
],
}
)
)
2018-08-28 20:14:48 +00:00
likes = []
for doc in raw_likes:
try:
likes.append(doc["meta"]["actor"])
except Exception:
app.logger.exception(f"invalid doc: {doc!r}")
2018-08-01 06:29:08 +00:00
app.logger.info(f"likes={likes!r}")
2018-08-28 20:14:48 +00:00
raw_shares = list(
2018-06-29 20:16:26 +00:00
DB.activities.find(
2018-06-16 20:02:10 +00:00
{
"meta.undo": False,
2018-07-19 23:12:02 +00:00
"meta.deleted": False,
2018-06-16 20:02:10 +00:00
"type": ActivityType.ANNOUNCE.value,
"$or": [
{"activity.object.id": data["activity"]["object"]["id"]},
{"activity.object": data["activity"]["object"]["id"]},
],
}
)
)
2018-08-28 20:14:48 +00:00
shares = []
for doc in raw_shares:
try:
shares.append(doc["meta"]["actor"])
except Exception:
app.logger.exception(f"invalid doc: {doc!r}")
2018-08-01 06:29:08 +00:00
app.logger.info(f"shares={shares!r}")
2018-06-03 21:36:16 +00:00
return htmlify(
render_template(
"note.html", likes=likes, shares=shares, thread=thread, note=data
)
2018-06-16 20:02:10 +00:00
)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/outbox", methods=["GET", "POST"])
2018-06-16 19:24:53 +00:00
def outbox():
2018-06-16 20:02:10 +00:00
if request.method == "GET":
2018-06-16 19:24:53 +00:00
if not is_api_request():
abort(404)
2019-08-08 22:07:08 +00:00
_log_sig()
# TODO(tsileo): returns the whole outbox if authenticated and look at OCAP support
2018-05-18 18:41:41 +00:00
q = {
2018-06-29 20:16:26 +00:00
"box": Box.OUTBOX.value,
2018-07-09 22:49:52 +00:00
"meta.deleted": False,
2019-04-14 17:17:54 +00:00
"meta.undo": False,
"meta.public": True,
"type": {"$in": [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
2018-05-18 18:41:41 +00:00
}
return activitypubify(
2018-06-16 20:02:10 +00:00
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: activity_from_doc(doc, embed=True),
2018-07-31 21:23:20 +00:00
col_name="outbox",
2018-06-16 20:02:10 +00:00
)
)
2018-05-18 18:41:41 +00:00
# Handle POST request
2018-05-21 12:30:52 +00:00
try:
_api_required()
except BadSignature:
abort(401)
2018-06-16 19:24:53 +00:00
2018-05-18 18:41:41 +00:00
data = request.get_json(force=True)
2018-06-17 17:21:59 +00:00
activity = ap.parse_activity(data)
2019-04-05 09:35:48 +00:00
activity_id = post_to_outbox(activity)
2018-07-29 14:07:27 +00:00
return Response(status=201, headers={"Location": activity_id})
2018-05-18 18:41:41 +00:00
2019-08-20 20:16:47 +00:00
@app.route("/emoji/<name>")
def ap_emoji(name):
if name in EMOJIS:
2019-08-24 09:11:36 +00:00
return activitypubify(**{**EMOJIS[name], "@context": config.DEFAULT_CTX})
2019-08-20 20:16:47 +00:00
abort(404)
2018-06-16 20:02:10 +00:00
@app.route("/outbox/<item_id>")
2018-05-18 18:41:41 +00:00
def outbox_detail(item_id):
2018-06-29 20:16:26 +00:00
doc = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": activity_url(item_id),
"meta.public": True,
}
2018-06-29 20:16:26 +00:00
)
2018-07-09 22:49:52 +00:00
if not doc:
abort(404)
2019-08-08 22:07:08 +00:00
_log_sig()
2018-06-16 20:02:10 +00:00
if doc["meta"].get("deleted", False):
abort(404)
return activitypubify(**activity_from_doc(doc))
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/outbox/<item_id>/activity")
2018-05-18 18:41:41 +00:00
def outbox_activity(item_id):
2019-08-01 20:25:58 +00:00
data = find_one_activity(
{**in_outbox(), **by_remote_id(activity_url(item_id)), **is_public()}
2019-08-01 20:25:58 +00:00
)
2018-05-18 18:41:41 +00:00
if not data:
abort(404)
2019-04-14 17:17:54 +00:00
2019-08-08 22:07:08 +00:00
_log_sig()
2018-05-28 17:46:23 +00:00
obj = activity_from_doc(data)
2018-07-09 22:49:52 +00:00
if data["meta"].get("deleted", False):
abort(404)
2018-07-09 22:49:52 +00:00
2018-06-16 20:02:10 +00:00
if obj["type"] != ActivityType.CREATE.value:
2018-05-18 18:41:41 +00:00
abort(404)
return activitypubify(**obj["object"])
2018-05-28 17:46:23 +00:00
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/outbox/<item_id>/replies")
def outbox_activity_replies(item_id):
if not is_api_request():
abort(404)
2019-08-08 22:07:08 +00:00
_log_sig()
2018-06-29 20:16:26 +00:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": activity_url(item_id),
2018-06-29 20:16:26 +00:00
"meta.deleted": False,
"meta.public": True,
2018-06-29 20:16:26 +00:00
}
2018-06-18 20:01:21 +00:00
)
if not data:
abort(404)
2018-06-17 17:21:59 +00:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 20:02:10 +00:00
"meta.deleted": False,
"meta.public": True,
2018-06-16 20:02:10 +00:00
"type": ActivityType.CREATE.value,
"activity.object.inReplyTo": obj.get_object().id,
}
return activitypubify(
2018-06-16 20:02:10 +00:00
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
col_name=f"outbox/{item_id}/replies",
first_page=request.args.get("page") == "first",
)
)
2018-06-16 20:02:10 +00:00
@app.route("/outbox/<item_id>/likes")
def outbox_activity_likes(item_id):
if not is_api_request():
abort(404)
2019-08-08 22:07:08 +00:00
_log_sig()
2018-06-29 20:16:26 +00:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": activity_url(item_id),
2018-06-29 20:16:26 +00:00
"meta.deleted": False,
"meta.public": True,
2018-06-29 20:16:26 +00:00
}
2018-06-18 20:01:21 +00:00
)
if not data:
abort(404)
2018-06-17 17:21:59 +00:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 20:02:10 +00:00
"meta.undo": False,
"type": ActivityType.LIKE.value,
"$or": [
{"activity.object.id": obj.get_object().id},
{"activity.object": obj.get_object().id},
],
}
return activitypubify(
2018-06-16 20:02:10 +00:00
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
col_name=f"outbox/{item_id}/likes",
first_page=request.args.get("page") == "first",
)
)
2018-06-16 20:02:10 +00:00
@app.route("/outbox/<item_id>/shares")
def outbox_activity_shares(item_id):
if not is_api_request():
abort(404)
2018-06-29 20:16:26 +00:00
data = DB.activities.find_one(
{
"box": Box.OUTBOX.value,
"remote_id": activity_url(item_id),
2018-06-29 20:16:26 +00:00
"meta.deleted": False,
}
2018-06-18 20:01:21 +00:00
)
if not data:
abort(404)
2019-08-08 22:07:08 +00:00
_log_sig()
2018-06-17 17:21:59 +00:00
obj = ap.parse_activity(data["activity"])
if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
2018-06-16 20:02:10 +00:00
"meta.undo": False,
"type": ActivityType.ANNOUNCE.value,
"$or": [
{"activity.object.id": obj.get_object().id},
{"activity.object": obj.get_object().id},
],
}
return activitypubify(
2018-06-16 20:02:10 +00:00
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
col_name=f"outbox/{item_id}/shares",
first_page=request.args.get("page") == "first",
)
)
2019-08-01 17:55:30 +00:00
@app.route("/inbox", methods=["GET", "POST"]) # noqa: C901
def inbox():
# GET /inbox
if request.method == "GET":
if not is_api_request():
abort(404)
try:
_api_required()
except BadSignature:
abort(404)
return activitypubify(
2019-08-01 17:55:30 +00:00
**activitypub.build_ordered_collection(
DB.activities,
q={"meta.deleted": False, "box": Box.INBOX.value},
cursor=request.args.get("cursor"),
map_func=lambda doc: remove_context(doc["activity"]),
col_name="inbox",
)
)
2019-08-01 17:55:30 +00:00
# POST/ inbox
try:
data = request.get_json(force=True)
if not isinstance(data, dict):
raise ValueError("not a dict")
except Exception:
return Response(
status=422,
headers={"Content-Type": "application/json"},
2019-08-05 20:40:24 +00:00
response=json.dumps(
{
"error": "failed to decode request body as JSON",
"request_id": g.request_id,
}
),
2019-08-01 17:55:30 +00:00
)
2019-04-07 10:27:48 +00:00
2019-08-01 17:55:30 +00:00
# Check the blacklist now to see if we can return super early
2019-08-05 20:40:24 +00:00
if is_blacklisted(data):
2019-08-01 17:55:30 +00:00
logger.info(f"dropping activity from blacklisted host: {data['id']}")
return Response(status=201)
2019-04-07 10:27:48 +00:00
2019-08-05 20:40:24 +00:00
logger.info(f"request_id={g.request_id} req_headers={request.headers!r}")
logger.info(f"request_id={g.request_id} raw_data={data}")
2019-08-01 17:55:30 +00:00
try:
2019-08-08 20:54:33 +00:00
req_verified, actor_id = verify_request(
2019-08-01 17:55:30 +00:00
request.method, request.path, request.headers, request.data
2019-08-08 20:54:33 +00:00
)
if not req_verified:
2019-08-01 17:55:30 +00:00
raise Exception("failed to verify request")
2019-08-08 20:54:33 +00:00
logger.info(f"request_id={g.request_id} signed by {actor_id}")
2019-08-01 17:55:30 +00:00
except Exception:
logger.exception(
2019-08-05 20:40:24 +00:00
f"failed to verify request {g.request_id}, trying to verify the payload by fetching the remote"
2019-08-01 17:55:30 +00:00
)
try:
remote_data = get_backend().fetch_iri(data["id"])
except ActivityGoneError:
# XXX Mastodon sends Delete activities that are not dereferencable, it's the actor url with #delete
# appended, so an `ActivityGoneError` kind of ensure it's "legit"
if data["type"] == ActivityType.DELETE.value and data["id"].startswith(
data["object"]
):
# If we're here, this means the key is not saved, so we cannot verify the object
logger.info(f"received a Delete for an unknown actor {data!r}, drop it")
2019-08-01 17:55:30 +00:00
return Response(status=201)
except Exception:
logger.exception(f"failed to fetch remote for payload {data!r}")
2019-08-01 17:55:30 +00:00
if "type" in data:
# Friendica does not returns a 410, but a 302 that redirect to an HTML page
if ap._has_type(data["type"], ActivityType.DELETE):
logger.info(
f"received a Delete for an unknown actor {data!r}, drop it"
)
return Response(status=201)
2019-04-11 17:24:28 +00:00
2019-08-01 17:55:30 +00:00
if "id" in data:
if DB.trash.find_one({"activity.id": data["id"]}):
# It's already stored in trash, returns early
return Response(
status=422,
headers={"Content-Type": "application/json"},
response=json.dumps(
{
2019-08-05 20:40:24 +00:00
"error": "failed to verify request (using HTTP signatures or fetching the IRI)",
"request_id": g.request_id,
2019-08-01 17:55:30 +00:00
}
),
)
2018-07-14 10:29:46 +00:00
2019-08-01 17:55:30 +00:00
# Now we can store this activity in the trash for later analysis
2018-07-14 10:29:46 +00:00
2019-08-01 17:55:30 +00:00
# Track/store the payload for analysis
ip, geoip = _get_ip()
2018-07-14 10:29:46 +00:00
2019-08-01 17:55:30 +00:00
DB.trash.insert(
{
"activity": data,
"meta": {
"ts": datetime.now().timestamp(),
"ip_address": ip,
"geoip": geoip,
"tb": traceback.format_exc(),
"headers": dict(request.headers),
2019-08-05 20:40:24 +00:00
"request_id": g.request_id,
2019-08-01 17:55:30 +00:00
},
}
2019-05-12 08:06:26 +00:00
)
2019-05-12 08:02:28 +00:00
2018-06-02 07:07:57 +00:00
return Response(
status=422,
2018-06-16 20:02:10 +00:00
headers={"Content-Type": "application/json"},
response=json.dumps(
{
2019-08-05 20:40:24 +00:00
"error": "failed to verify request (using HTTP signatures or fetching the IRI)",
"request_id": g.request_id,
2018-06-16 20:02:10 +00:00
}
),
2018-06-02 07:07:57 +00:00
)
2019-07-07 12:14:13 +00:00
# We fetched the remote data successfully
data = remote_data
2018-06-17 17:21:59 +00:00
activity = ap.parse_activity(data)
2019-08-05 20:40:24 +00:00
logger.debug(f"inbox activity={g.request_id}/{activity}/{data}")
2019-04-05 09:35:48 +00:00
post_to_inbox(activity)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
return Response(status=201)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/followers")
2018-06-16 19:24:53 +00:00
def followers():
q = {"box": Box.INBOX.value, "type": ActivityType.FOLLOW.value, "meta.undo": False}
2018-06-16 19:24:53 +00:00
if is_api_request():
2019-08-08 22:07:08 +00:00
_log_sig()
return activitypubify(
2018-05-18 18:41:41 +00:00
**activitypub.build_ordered_collection(
DB.activities,
q=q,
2018-06-16 20:02:10 +00:00
cursor=request.args.get("cursor"),
2018-07-07 12:07:29 +00:00
map_func=lambda doc: doc["activity"]["actor"],
2018-07-31 21:23:20 +00:00
col_name="followers",
2018-05-18 18:41:41 +00:00
)
2018-06-16 19:24:53 +00:00
)
2018-05-18 18:41:41 +00:00
2018-07-18 21:18:39 +00:00
raw_followers, older_than, newer_than = paginated_query(DB.activities, q)
2019-09-01 09:32:12 +00:00
followers = [doc["meta"] for doc in raw_followers if "actor" in doc.get("meta", {})]
return htmlify(
render_template(
"followers.html",
followers_data=followers,
older_than=older_than,
newer_than=newer_than,
)
)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/following")
2018-05-18 18:41:41 +00:00
def following():
2019-08-05 20:40:24 +00:00
q = {**in_outbox(), **by_type(ActivityType.FOLLOW), **not_undo()}
2018-05-18 18:41:41 +00:00
if is_api_request():
2019-08-08 22:07:08 +00:00
_log_sig()
if config.HIDE_FOLLOWING:
return activitypubify(
**activitypub.simple_build_ordered_collection("following", [])
)
return activitypubify(
2018-05-18 18:41:41 +00:00
**activitypub.build_ordered_collection(
DB.activities,
q=q,
2018-06-16 20:02:10 +00:00
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
2018-07-31 21:23:20 +00:00
col_name="following",
2018-06-16 20:02:10 +00:00
)
2018-05-18 18:41:41 +00:00
)
2018-06-16 19:24:53 +00:00
2018-07-18 21:18:39 +00:00
if config.HIDE_FOLLOWING and not session.get("logged_in", False):
2018-07-17 21:42:21 +00:00
abort(404)
following, older_than, newer_than = paginated_query(DB.activities, q)
2019-04-05 09:35:48 +00:00
following = [
2019-09-01 09:32:12 +00:00
(doc["remote_id"], doc["meta"])
2019-04-05 09:35:48 +00:00
for doc in following
if "remote_id" in doc and "object" in doc.get("meta", {})
]
2019-07-22 18:32:35 +00:00
lists = list(DB.lists.find())
return htmlify(
render_template(
"following.html",
following_data=following,
older_than=older_than,
newer_than=newer_than,
lists=lists,
)
)
2018-05-18 18:41:41 +00:00
2018-06-16 20:02:10 +00:00
@app.route("/tags/<tag>")
2018-05-18 18:41:41 +00:00
def tags(tag):
2018-06-29 20:16:26 +00:00
if not DB.activities.count(
{
2019-09-01 18:58:51 +00:00
**in_outbox(),
**by_hashtag(tag),
**by_visibility(ap.Visibility.PUBLIC),
**not_deleted(),
2018-06-29 20:16:26 +00:00
}
2018-06-16 20:02:10 +00:00
):
2018-05-18 18:41:41 +00:00
abort(404)
if not is_api_request():
return htmlify(
render_template(
"tags.html",
tag=tag,
outbox_data=DB.activities.find(
{
2019-09-01 18:58:51 +00:00
**in_outbox(),
**by_hashtag(tag),
**by_visibility(ap.Visibility.PUBLIC),
**not_deleted(),
}
),
)
2018-05-18 18:41:41 +00:00
)
2019-08-08 22:07:08 +00:00
_log_sig()
2018-05-18 18:41:41 +00:00
q = {
2019-09-01 18:58:51 +00:00
**in_outbox(),
**by_hashtag(tag),
**by_visibility(ap.Visibility.PUBLIC),
**not_deleted(),
2018-05-18 18:41:41 +00:00
}
return activitypubify(
2018-06-16 20:02:10 +00:00
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"]["id"],
col_name=f"tags/{tag}",
)
)
2018-05-18 18:41:41 +00:00
@app.route("/featured")
def featured():
if not is_api_request():
abort(404)
2019-08-01 17:55:30 +00:00
2019-08-08 22:07:08 +00:00
_log_sig()
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.CREATE.value,
"meta.deleted": False,
"meta.undo": False,
"meta.pinned": True,
}
data = [clean_activity(doc["activity"]["object"]) for doc in DB.activities.find(q)]
return activitypubify(
**activitypub.simple_build_ordered_collection("featured", data)
)
2018-06-16 20:02:10 +00:00
@app.route("/liked")
2019-08-13 22:06:58 +00:00
@api_required
2018-05-18 18:41:41 +00:00
def liked():
if not is_api_request():
2018-07-06 21:53:33 +00:00
q = {
"box": Box.OUTBOX.value,
"type": ActivityType.LIKE.value,
"meta.deleted": False,
"meta.undo": False,
}
liked, older_than, newer_than = paginated_query(DB.activities, q)
return htmlify(
render_template(
"liked.html", liked=liked, older_than=older_than, newer_than=newer_than
)
)
2018-06-16 20:02:10 +00:00
q = {"meta.deleted": False, "meta.undo": False, "type": ActivityType.LIKE.value}
return activitypubify(
2018-06-16 20:02:10 +00:00
**activitypub.build_ordered_collection(
2018-06-29 20:16:26 +00:00
DB.activities,
2018-06-16 20:02:10 +00:00
q=q,
cursor=request.args.get("cursor"),
map_func=lambda doc: doc["activity"]["object"],
col_name="liked",
)
)
2018-05-18 18:41:41 +00:00
2019-04-05 19:36:56 +00:00
#################
# Feeds
@app.route("/feed.json")
def json_feed():
return Response(
response=json.dumps(feed.json_feed("/feed.json")),
headers={"Content-Type": "application/json"},
)
@app.route("/feed.atom")
def atom_feed():
return Response(
response=feed.gen_feed().atom_str(),
headers={"Content-Type": "application/atom+xml"},
)
@app.route("/feed.rss")
def rss_feed():
return Response(
response=feed.gen_feed().rss_str(),
headers={"Content-Type": "application/rss+xml"},
)