2018-05-18 18:41:41 +00:00
|
|
|
import binascii
|
|
|
|
import hashlib
|
|
|
|
import json
|
|
|
|
import urllib
|
|
|
|
import os
|
|
|
|
import mimetypes
|
|
|
|
import logging
|
|
|
|
from functools import wraps
|
|
|
|
from datetime import datetime
|
|
|
|
|
|
|
|
import timeago
|
|
|
|
import bleach
|
|
|
|
import mf2py
|
|
|
|
import pymongo
|
|
|
|
import piexif
|
|
|
|
from bson.objectid import ObjectId
|
|
|
|
from flask import Flask
|
|
|
|
from flask import abort
|
|
|
|
from flask import request
|
|
|
|
from flask import redirect
|
|
|
|
from flask import Response
|
|
|
|
from flask import render_template
|
|
|
|
from flask import session
|
2018-05-27 09:50:09 +00:00
|
|
|
from flask import jsonify as flask_jsonify
|
2018-05-18 18:41:41 +00:00
|
|
|
from flask import url_for
|
|
|
|
from html2text import html2text
|
|
|
|
from itsdangerous import JSONWebSignatureSerializer
|
|
|
|
from itsdangerous import BadSignature
|
|
|
|
from passlib.hash import bcrypt
|
|
|
|
from u2flib_server import u2f
|
|
|
|
from urllib.parse import urlparse, urlencode
|
|
|
|
from werkzeug.utils import secure_filename
|
2018-05-29 16:59:37 +00:00
|
|
|
from flask_wtf.csrf import CSRFProtect
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
import activitypub
|
|
|
|
import config
|
2018-05-27 18:40:42 +00:00
|
|
|
from activitypub import ActivityType
|
2018-05-18 18:41:41 +00:00
|
|
|
from activitypub import clean_activity
|
2018-05-28 17:46:23 +00:00
|
|
|
from activitypub import embed_collection
|
2018-05-21 09:21:11 +00:00
|
|
|
from utils.content_helper import parse_markdown
|
2018-05-18 18:41:41 +00:00
|
|
|
from config import KEY
|
|
|
|
from config import DB
|
|
|
|
from config import ME
|
|
|
|
from config import ID
|
|
|
|
from config import DOMAIN
|
|
|
|
from config import USERNAME
|
|
|
|
from config import BASE_URL
|
|
|
|
from config import ACTOR_SERVICE
|
|
|
|
from config import OBJECT_SERVICE
|
|
|
|
from config import PASS
|
|
|
|
from config import HEADERS
|
2018-05-21 15:04:53 +00:00
|
|
|
from config import VERSION
|
2018-05-27 09:50:09 +00:00
|
|
|
from config import DEBUG_MODE
|
2018-05-30 21:47:01 +00:00
|
|
|
from config import JWT
|
|
|
|
from config import ADMIN_API_KEY
|
2018-05-27 09:50:09 +00:00
|
|
|
from config import _drop_db
|
2018-05-21 15:04:53 +00:00
|
|
|
from config import custom_cache_purge_hook
|
2018-05-18 18:41:41 +00:00
|
|
|
from utils.httpsig import HTTPSigAuth, verify_request
|
|
|
|
from utils.key import get_secret_key
|
|
|
|
from utils.webfinger import get_remote_follow_template
|
|
|
|
from utils.webfinger import get_actor_url
|
2018-05-29 19:36:05 +00:00
|
|
|
from utils.errors import Error
|
|
|
|
from utils.errors import UnexpectedActivityTypeError
|
|
|
|
from utils.errors import BadActivityError
|
2018-06-01 18:29:44 +00:00
|
|
|
from utils.errors import NotFromOutboxError
|
|
|
|
from utils.errors import ActivityNotFoundError
|
2018-05-29 16:59:37 +00:00
|
|
|
|
|
|
|
|
2018-05-28 17:46:23 +00:00
|
|
|
from typing import Dict, Any
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
app = Flask(__name__)
|
|
|
|
app.secret_key = get_secret_key('flask')
|
2018-05-29 16:59:37 +00:00
|
|
|
app.config.update(
|
|
|
|
WTF_CSRF_CHECK_DEFAULT=False,
|
|
|
|
)
|
2018-06-01 19:54:43 +00:00
|
|
|
csrf = CSRFProtect(app)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-05-28 17:46:23 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-05-22 22:41:37 +00:00
|
|
|
# Hook up Flask logging with gunicorn
|
2018-06-03 10:51:57 +00:00
|
|
|
root_logger = logging.getLogger()
|
2018-06-03 20:44:19 +00:00
|
|
|
if os.getenv('FLASK_DEBUG'):
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
root_logger.setLevel(logging.DEBUG)
|
|
|
|
else:
|
|
|
|
gunicorn_logger = logging.getLogger('gunicorn.error')
|
|
|
|
root_logger.handlers = gunicorn_logger.handlers
|
|
|
|
root_logger.setLevel(gunicorn_logger.level)
|
2018-05-22 22:41:37 +00:00
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
SIG_AUTH = HTTPSigAuth(ID+'#main-key', KEY.privkey)
|
|
|
|
|
|
|
|
|
|
|
|
def verify_pass(pwd):
|
|
|
|
return bcrypt.verify(pwd, PASS)
|
|
|
|
|
|
|
|
@app.context_processor
|
|
|
|
def inject_config():
|
2018-05-21 15:04:53 +00:00
|
|
|
return dict(
|
|
|
|
microblogpub_version=VERSION,
|
|
|
|
config=config,
|
|
|
|
logged_in=session.get('logged_in', False),
|
|
|
|
)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
@app.after_request
|
|
|
|
def set_x_powered_by(response):
|
|
|
|
response.headers['X-Powered-By'] = 'microblog.pub'
|
|
|
|
return response
|
|
|
|
|
|
|
|
# HTML/templates helper
|
|
|
|
ALLOWED_TAGS = [
|
|
|
|
'a',
|
|
|
|
'abbr',
|
|
|
|
'acronym',
|
|
|
|
'b',
|
|
|
|
'blockquote',
|
|
|
|
'code',
|
|
|
|
'pre',
|
|
|
|
'em',
|
|
|
|
'i',
|
|
|
|
'li',
|
|
|
|
'ol',
|
|
|
|
'strong',
|
|
|
|
'ul',
|
|
|
|
'span',
|
|
|
|
'div',
|
|
|
|
'p',
|
|
|
|
'h1',
|
|
|
|
'h2',
|
|
|
|
'h3',
|
|
|
|
'h4',
|
|
|
|
'h5',
|
|
|
|
'h6',
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def clean_html(html):
|
|
|
|
return bleach.clean(html, tags=ALLOWED_TAGS)
|
|
|
|
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def quote_plus(t):
|
|
|
|
return urllib.parse.quote_plus(t)
|
|
|
|
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def clean(html):
|
|
|
|
return clean_html(html)
|
|
|
|
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def html2plaintext(body):
|
|
|
|
return html2text(body)
|
|
|
|
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def domain(url):
|
|
|
|
return urlparse(url).netloc
|
|
|
|
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def get_actor(url):
|
|
|
|
if not url:
|
|
|
|
return None
|
|
|
|
print(f'GET_ACTOR {url}')
|
|
|
|
return ACTOR_SERVICE.get(url)
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def format_time(val):
|
|
|
|
if val:
|
|
|
|
return datetime.strftime(datetime.strptime(val, '%Y-%m-%dT%H:%M:%SZ'), '%B %d, %Y, %H:%M %p')
|
|
|
|
return val
|
|
|
|
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def format_timeago(val):
|
|
|
|
if val:
|
|
|
|
try:
|
|
|
|
return timeago.format(datetime.strptime(val, '%Y-%m-%dT%H:%M:%SZ'), datetime.utcnow())
|
|
|
|
except:
|
|
|
|
return timeago.format(datetime.strptime(val, '%Y-%m-%dT%H:%M:%S.%fZ'), datetime.utcnow())
|
|
|
|
|
|
|
|
return val
|
|
|
|
|
|
|
|
def _is_img(filename):
|
|
|
|
filename = filename.lower()
|
|
|
|
if (filename.endswith('.png') or filename.endswith('.jpg') or filename.endswith('.jpeg') or
|
|
|
|
filename.endswith('.gif') or filename.endswith('.svg')):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def not_only_imgs(attachment):
|
|
|
|
for a in attachment:
|
|
|
|
if not _is_img(a['url']):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
@app.template_filter()
|
|
|
|
def is_img(filename):
|
|
|
|
return _is_img(filename)
|
|
|
|
|
|
|
|
|
|
|
|
def login_required(f):
|
|
|
|
@wraps(f)
|
|
|
|
def decorated_function(*args, **kwargs):
|
|
|
|
if not session.get('logged_in'):
|
|
|
|
return redirect(url_for('login', next=request.url))
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
return decorated_function
|
|
|
|
|
2018-05-21 12:41:47 +00:00
|
|
|
|
2018-05-21 12:30:52 +00:00
|
|
|
def _api_required():
|
|
|
|
if session.get('logged_in'):
|
2018-06-04 15:59:38 +00:00
|
|
|
if request.method not in ['GET', 'HEAD']:
|
|
|
|
# If a standard API request is made with a "login session", it must havw a CSRF token
|
|
|
|
csrf.protect()
|
2018-05-21 12:30:52 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
# Token verification
|
|
|
|
token = request.headers.get('Authorization', '').replace('Bearer ', '')
|
|
|
|
if not token:
|
2018-05-29 19:36:05 +00:00
|
|
|
# IndieAuth token
|
2018-05-21 12:30:52 +00:00
|
|
|
token = request.form.get('access_token', '')
|
|
|
|
|
|
|
|
# Will raise a BadSignature on bad auth
|
|
|
|
payload = JWT.loads(token)
|
2018-05-29 19:36:05 +00:00
|
|
|
logger.info(f'api call by {payload}')
|
2018-05-21 12:41:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
def api_required(f):
|
2018-05-18 18:41:41 +00:00
|
|
|
@wraps(f)
|
|
|
|
def decorated_function(*args, **kwargs):
|
|
|
|
try:
|
2018-05-21 12:30:52 +00:00
|
|
|
_api_required()
|
2018-05-18 18:41:41 +00:00
|
|
|
except BadSignature:
|
|
|
|
abort(401)
|
2018-05-21 12:30:52 +00:00
|
|
|
|
2018-05-21 12:41:47 +00:00
|
|
|
return f(*args, **kwargs)
|
2018-05-18 18:41:41 +00:00
|
|
|
return decorated_function
|
|
|
|
|
|
|
|
|
|
|
|
def jsonify(**data):
|
|
|
|
if '@context' not in data:
|
|
|
|
data['@context'] = config.CTX_AS
|
|
|
|
return Response(
|
|
|
|
response=json.dumps(data),
|
|
|
|
headers={'Content-Type': 'application/json' if app.debug else 'application/activity+json'},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def is_api_request():
|
|
|
|
h = request.headers.get('Accept')
|
|
|
|
if h is None:
|
|
|
|
return False
|
|
|
|
h = h.split(',')[0]
|
|
|
|
if h in HEADERS or h == 'application/json':
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-05-29 19:36:05 +00:00
|
|
|
|
|
|
|
@app.errorhandler(ValueError)
|
|
|
|
def handle_value_error(error):
|
|
|
|
logger.error(f'caught value error: {error!r}')
|
|
|
|
response = flask_jsonify(message=error.args[0])
|
|
|
|
response.status_code = 400
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
|
|
|
@app.errorhandler(Error)
|
|
|
|
def handle_activitypub_error(error):
|
|
|
|
logger.error(f'caught activitypub error {error!r}')
|
|
|
|
response = flask_jsonify(error.to_dict())
|
|
|
|
response.status_code = error.status_code
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
# App routes
|
|
|
|
|
|
|
|
#######
|
|
|
|
# Login
|
|
|
|
|
|
|
|
@app.route('/logout')
|
|
|
|
@login_required
|
|
|
|
def logout():
|
|
|
|
session['logged_in'] = False
|
|
|
|
return redirect('/')
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/login', methods=['POST', 'GET'])
|
|
|
|
def login():
|
|
|
|
devices = [doc['device'] for doc in DB.u2f.find()]
|
|
|
|
u2f_enabled = True if devices else False
|
|
|
|
if request.method == 'POST':
|
2018-06-01 19:54:43 +00:00
|
|
|
csrf.protect()
|
2018-05-18 18:41:41 +00:00
|
|
|
pwd = request.form.get('pass')
|
|
|
|
if pwd and verify_pass(pwd):
|
|
|
|
if devices:
|
|
|
|
resp = json.loads(request.form.get('resp'))
|
|
|
|
print(resp)
|
|
|
|
try:
|
|
|
|
u2f.complete_authentication(session['challenge'], resp)
|
|
|
|
except ValueError as exc:
|
|
|
|
print('failed', exc)
|
|
|
|
abort(401)
|
|
|
|
return
|
|
|
|
finally:
|
|
|
|
session['challenge'] = None
|
|
|
|
|
|
|
|
session['logged_in'] = True
|
|
|
|
return redirect(request.args.get('redirect') or '/admin')
|
|
|
|
else:
|
|
|
|
abort(401)
|
|
|
|
|
|
|
|
payload = None
|
|
|
|
if devices:
|
|
|
|
payload = u2f.begin_authentication(ID, devices)
|
|
|
|
session['challenge'] = payload
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
'login.html',
|
|
|
|
u2f_enabled=u2f_enabled,
|
|
|
|
me=ME,
|
|
|
|
payload=payload,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/remote_follow', methods=['GET', 'POST'])
|
|
|
|
def remote_follow():
|
|
|
|
if request.method == 'GET':
|
|
|
|
return render_template('remote_follow.html')
|
|
|
|
|
2018-06-04 15:59:38 +00:00
|
|
|
csrf.protect()
|
|
|
|
return redirect(get_remote_follow_template('@'+request.form.get('profile')).format(uri=f'{USERNAME}@{DOMAIN}'))
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.route('/authorize_follow', methods=['GET', 'POST'])
|
|
|
|
@login_required
|
|
|
|
def authorize_follow():
|
|
|
|
if request.method == 'GET':
|
|
|
|
return render_template('authorize_remote_follow.html', profile=request.args.get('profile'))
|
|
|
|
|
|
|
|
actor = get_actor_url(request.form.get('profile'))
|
|
|
|
if not actor:
|
|
|
|
abort(500)
|
|
|
|
if DB.following.find({'remote_actor': actor}).count() > 0:
|
|
|
|
return redirect('/following')
|
|
|
|
|
|
|
|
follow = activitypub.Follow(object=actor)
|
|
|
|
follow.post_to_outbox()
|
|
|
|
return redirect('/following')
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/u2f/register', methods=['GET', 'POST'])
|
|
|
|
@login_required
|
|
|
|
def u2f_register():
|
|
|
|
# TODO(tsileo): ensure no duplicates
|
|
|
|
if request.method == 'GET':
|
|
|
|
payload = u2f.begin_registration(ID)
|
|
|
|
session['challenge'] = payload
|
|
|
|
return render_template(
|
|
|
|
'u2f.html',
|
|
|
|
payload=payload,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
resp = json.loads(request.form.get('resp'))
|
|
|
|
device, device_cert = u2f.complete_registration(session['challenge'], resp)
|
|
|
|
session['challenge'] = None
|
|
|
|
DB.u2f.insert_one({'device': device, 'cert': device_cert})
|
|
|
|
return ''
|
|
|
|
|
|
|
|
#######
|
|
|
|
# Activity pub routes
|
|
|
|
|
|
|
|
@app.route('/')
|
|
|
|
def index():
|
|
|
|
if is_api_request():
|
|
|
|
return jsonify(**ME)
|
|
|
|
|
|
|
|
# FIXME(tsileo): implements pagination, also for the followers/following page
|
|
|
|
limit = 50
|
|
|
|
q = {
|
|
|
|
'type': 'Create',
|
|
|
|
'activity.object.type': 'Note',
|
2018-06-03 20:44:19 +00:00
|
|
|
'activity.object.inReplyTo': None,
|
2018-05-18 18:41:41 +00:00
|
|
|
'meta.deleted': False,
|
|
|
|
}
|
|
|
|
c = request.args.get('cursor')
|
|
|
|
if c:
|
|
|
|
q['_id'] = {'$lt': ObjectId(c)}
|
2018-06-04 15:59:38 +00:00
|
|
|
|
|
|
|
outbox_data = list(DB.outbox.find({'$or': [q, {'type': 'Announce', 'meta.undo': False}]}, limit=limit).sort('_id', -1))
|
|
|
|
cursor = None
|
|
|
|
if outbox_data and len(outbox_data) == limit:
|
|
|
|
cursor = str(outbox_data[-1]['_id'])
|
|
|
|
|
|
|
|
for data in outbox_data:
|
|
|
|
if data['type'] == 'Announce':
|
|
|
|
print(data)
|
|
|
|
if data['activity']['object'].startswith('http'):
|
|
|
|
data['ref'] = {'activity': {'object': OBJECT_SERVICE.get(data['activity']['object'])}, 'meta': {}}
|
|
|
|
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
'index.html',
|
|
|
|
me=ME,
|
|
|
|
notes=DB.inbox.find({'type': 'Create', 'activity.object.type': 'Note', 'meta.deleted': False}).count(),
|
|
|
|
followers=DB.followers.count(),
|
|
|
|
following=DB.following.count(),
|
|
|
|
outbox_data=outbox_data,
|
|
|
|
cursor=cursor,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/with_replies')
|
|
|
|
def with_replies():
|
|
|
|
limit = 50
|
|
|
|
q = {
|
|
|
|
'type': 'Create',
|
|
|
|
'activity.object.type': 'Note',
|
|
|
|
'meta.deleted': False,
|
|
|
|
}
|
|
|
|
c = request.args.get('cursor')
|
|
|
|
if c:
|
|
|
|
q['_id'] = {'$lt': ObjectId(c)}
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
outbox_data = list(DB.outbox.find({'$or': [q, {'type': 'Announce', 'meta.undo': False}]}, limit=limit).sort('_id', -1))
|
|
|
|
cursor = None
|
|
|
|
if outbox_data and len(outbox_data) == limit:
|
|
|
|
cursor = str(outbox_data[-1]['_id'])
|
|
|
|
|
|
|
|
for data in outbox_data:
|
|
|
|
if data['type'] == 'Announce':
|
|
|
|
print(data)
|
|
|
|
if data['activity']['object'].startswith('http'):
|
|
|
|
data['ref'] = {'activity': {'object': OBJECT_SERVICE.get(data['activity']['object'])}, 'meta': {}}
|
|
|
|
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
'index.html',
|
|
|
|
me=ME,
|
|
|
|
notes=DB.inbox.find({'type': 'Create', 'activity.object.type': 'Note', 'meta.deleted': False}).count(),
|
|
|
|
followers=DB.followers.count(),
|
|
|
|
following=DB.following.count(),
|
|
|
|
outbox_data=outbox_data,
|
|
|
|
cursor=cursor,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-06-03 19:28:06 +00:00
|
|
|
def _build_thread(data, include_children=True):
|
|
|
|
data['_requested'] = True
|
|
|
|
root_id = data['meta'].get('thread_root_parent', data['activity']['object']['id'])
|
|
|
|
|
|
|
|
thread_ids = data['meta'].get('thread_parents', [])
|
|
|
|
if include_children:
|
|
|
|
thread_ids.extend(data['meta'].get('thread_children', []))
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-03 19:28:06 +00:00
|
|
|
query = {
|
|
|
|
'activity.object.id': {'$in': thread_ids},
|
|
|
|
'type': 'Create',
|
|
|
|
'meta.deleted': False, # TODO(tsileo): handle Tombstone instead of filtering them
|
|
|
|
}
|
|
|
|
# Fetch the root replies, and the children
|
|
|
|
replies = [data] + list(DB.inbox.find(query)) + list(DB.outbox.find(query))
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-03 19:28:06 +00:00
|
|
|
# Index all the IDs in order to build a tree
|
|
|
|
idx = {}
|
2018-05-18 18:41:41 +00:00
|
|
|
for rep in replies:
|
2018-06-03 19:28:06 +00:00
|
|
|
rep_id = rep['activity']['object']['id']
|
|
|
|
idx[rep_id] = rep.copy()
|
|
|
|
idx[rep_id]['_nodes'] = []
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-03 19:28:06 +00:00
|
|
|
# Build the tree
|
|
|
|
for rep in replies:
|
|
|
|
rep_id = rep['activity']['object']['id']
|
|
|
|
if rep_id == root_id:
|
|
|
|
continue
|
|
|
|
reply_of = rep['activity']['object']['inReplyTo']
|
|
|
|
idx[reply_of]['_nodes'].append(rep)
|
|
|
|
|
|
|
|
# Flatten the tree
|
|
|
|
thread = []
|
|
|
|
def _flatten(node, level=0):
|
|
|
|
node['_level'] = level
|
|
|
|
thread.append(node)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-03 19:28:06 +00:00
|
|
|
for snode in sorted(idx[node['activity']['object']['id']]['_nodes'], key=lambda d: d['activity']['object']['published']):
|
|
|
|
_flatten(snode, level=level+1)
|
|
|
|
_flatten(idx[root_id])
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-03 19:28:06 +00:00
|
|
|
return thread
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
|
2018-06-03 19:28:06 +00:00
|
|
|
@app.route('/note/<note_id>')
|
|
|
|
def note_by_id(note_id):
|
|
|
|
data = DB.outbox.find_one({'id': note_id})
|
|
|
|
if not data:
|
|
|
|
abort(404)
|
|
|
|
if data['meta'].get('deleted', False):
|
|
|
|
abort(410)
|
|
|
|
thread = _build_thread(data)
|
2018-06-03 21:11:43 +00:00
|
|
|
|
|
|
|
|
|
|
|
likes = list(DB.inbox.find({
|
|
|
|
'meta.undo': False,
|
|
|
|
'type': ActivityType.LIKE.value,
|
|
|
|
'$or': [{'activity.object.id': data['activity']['object']['id']},
|
|
|
|
{'activity.object': data['activity']['object']['id']}],
|
|
|
|
}))
|
|
|
|
likes = [ACTOR_SERVICE.get(doc['activity']['actor']) for doc in likes]
|
|
|
|
|
2018-06-03 21:36:16 +00:00
|
|
|
shares = list(DB.inbox.find({
|
|
|
|
'meta.undo': False,
|
|
|
|
'type': ActivityType.ANNOUNCE.value,
|
|
|
|
'$or': [{'activity.object.id': data['activity']['object']['id']},
|
|
|
|
{'activity.object': data['activity']['object']['id']}],
|
|
|
|
}))
|
|
|
|
shares = [ACTOR_SERVICE.get(doc['activity']['actor']) for doc in shares]
|
|
|
|
|
|
|
|
return render_template('note.html', likes=likes, shares=shares, me=ME, thread=thread, note=data)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
|
2018-06-03 08:15:11 +00:00
|
|
|
@app.route('/nodeinfo')
|
|
|
|
def nodeinfo():
|
|
|
|
return Response(
|
|
|
|
headers={'Content-Type': 'application/json; profile=http://nodeinfo.diaspora.software/ns/schema/2.0#'},
|
|
|
|
response=json.dumps({
|
|
|
|
'version': '2.0',
|
|
|
|
'software': {'name': 'microblogpub', 'version': f'Microblog.pub {VERSION}'},
|
|
|
|
'protocols': ['activitypub'],
|
|
|
|
'services': {'inbound': [], 'outbound': []},
|
|
|
|
'openRegistrations': False,
|
|
|
|
'usage': {'users': {'total': 1}, 'localPosts': DB.outbox.count()},
|
|
|
|
'metadata': {
|
|
|
|
'sourceCode': 'https://github.com/tsileo/microblog.pub',
|
2018-06-03 09:41:18 +00:00
|
|
|
'nodeName': f'@{USERNAME}@{DOMAIN}',
|
2018-06-03 08:15:11 +00:00
|
|
|
},
|
|
|
|
}),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/.well-known/nodeinfo')
|
|
|
|
def wellknown_nodeinfo():
|
|
|
|
return flask_jsonify(
|
|
|
|
links=[
|
|
|
|
{
|
|
|
|
'rel': 'http://nodeinfo.diaspora.software/ns/schema/2.0',
|
|
|
|
'href': f'{ID}/nodeinfo',
|
|
|
|
}
|
|
|
|
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
@app.route('/.well-known/webfinger')
|
2018-06-03 08:15:11 +00:00
|
|
|
def wellknown_webfinger():
|
2018-05-18 18:41:41 +00:00
|
|
|
"""Enable WebFinger support, required for Mastodon interopability."""
|
|
|
|
resource = request.args.get('resource')
|
2018-06-03 09:41:18 +00:00
|
|
|
if resource not in [f'acct:{USERNAME}@{DOMAIN}', ID]:
|
2018-05-18 18:41:41 +00:00
|
|
|
abort(404)
|
|
|
|
|
|
|
|
out = {
|
2018-06-03 09:41:18 +00:00
|
|
|
"subject": f'acct:{USERNAME}@{DOMAIN}',
|
2018-05-18 18:41:41 +00:00
|
|
|
"aliases": [ID],
|
|
|
|
"links": [
|
|
|
|
{"rel": "http://webfinger.net/rel/profile-page", "type": "text/html", "href": BASE_URL},
|
|
|
|
{"rel": "self", "type": "application/activity+json", "href": ID},
|
2018-06-03 09:41:18 +00:00
|
|
|
{"rel":"http://ostatus.org/schema/1.0/subscribe", "template": BASE_URL+"/authorize_follow?profile={uri}"},
|
2018-05-18 18:41:41 +00:00
|
|
|
],
|
|
|
|
}
|
|
|
|
|
|
|
|
return Response(
|
|
|
|
response=json.dumps(out),
|
|
|
|
headers={'Content-Type': 'application/jrd+json; charset=utf-8' if not app.debug else 'application/json'},
|
|
|
|
)
|
|
|
|
|
2018-05-28 17:46:23 +00:00
|
|
|
|
|
|
|
def add_extra_collection(raw_doc: Dict[str, Any]) -> Dict[str, Any]:
|
2018-05-29 16:59:37 +00:00
|
|
|
if raw_doc['activity']['type'] != ActivityType.CREATE.value:
|
|
|
|
return raw_doc
|
|
|
|
|
2018-05-31 23:26:23 +00:00
|
|
|
raw_doc['activity']['object']['replies'] = embed_collection(
|
|
|
|
raw_doc.get('meta', {}).get('count_direct_reply', 0),
|
|
|
|
f'{ID}/outbox/{raw_doc["id"]}/replies',
|
|
|
|
)
|
2018-05-29 16:59:37 +00:00
|
|
|
|
2018-05-31 23:26:23 +00:00
|
|
|
raw_doc['activity']['object']['likes'] = embed_collection(
|
|
|
|
raw_doc.get('meta', {}).get('count_like', 0),
|
|
|
|
f'{ID}/outbox/{raw_doc["id"]}/likes',
|
|
|
|
)
|
|
|
|
|
|
|
|
raw_doc['activity']['object']['shares'] = embed_collection(
|
|
|
|
raw_doc.get('meta', {}).get('count_boost', 0),
|
|
|
|
f'{ID}/outbox/{raw_doc["id"]}/shares',
|
|
|
|
)
|
2018-05-28 17:46:23 +00:00
|
|
|
|
|
|
|
return raw_doc
|
|
|
|
|
|
|
|
|
|
|
|
def activity_from_doc(raw_doc: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
raw_doc = add_extra_collection(raw_doc)
|
|
|
|
return clean_activity(raw_doc['activity'])
|
|
|
|
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
@app.route('/outbox', methods=['GET', 'POST'])
|
|
|
|
def outbox():
|
|
|
|
if request.method == 'GET':
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
|
|
|
# TODO(tsileo): filter the outbox if not authenticated
|
|
|
|
# FIXME(tsileo): filter deleted, add query support for build_ordered_collection
|
|
|
|
q = {
|
|
|
|
'meta.deleted': False,
|
2018-05-28 17:46:23 +00:00
|
|
|
#'type': {'$in': [ActivityType.CREATE.value, ActivityType.ANNOUNCE.value]},
|
2018-05-18 18:41:41 +00:00
|
|
|
}
|
|
|
|
return jsonify(**activitypub.build_ordered_collection(
|
|
|
|
DB.outbox,
|
|
|
|
q=q,
|
|
|
|
cursor=request.args.get('cursor'),
|
2018-06-01 18:29:44 +00:00
|
|
|
map_func=lambda doc: activity_from_doc(doc),
|
2018-05-18 18:41:41 +00:00
|
|
|
))
|
|
|
|
|
|
|
|
# Handle POST request
|
2018-05-21 12:30:52 +00:00
|
|
|
try:
|
|
|
|
_api_required()
|
|
|
|
except BadSignature:
|
|
|
|
abort(401)
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
data = request.get_json(force=True)
|
|
|
|
print(data)
|
|
|
|
activity = activitypub.parse_activity(data)
|
|
|
|
|
2018-05-27 18:40:42 +00:00
|
|
|
if activity.type_enum == ActivityType.NOTE:
|
2018-05-18 18:41:41 +00:00
|
|
|
activity = activity.build_create()
|
|
|
|
|
|
|
|
activity.post_to_outbox()
|
|
|
|
|
2018-05-21 15:04:53 +00:00
|
|
|
# Purge the cache if a custom hook is set, as new content was published
|
|
|
|
custom_cache_purge_hook()
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
return Response(status=201, headers={'Location': activity.id})
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/outbox/<item_id>')
|
|
|
|
def outbox_detail(item_id):
|
2018-06-03 12:34:04 +00:00
|
|
|
doc = DB.outbox.find_one({'id': item_id})
|
|
|
|
if doc['meta'].get('deleted', False):
|
|
|
|
obj = activitypub.parse_activity(doc['activity'])
|
|
|
|
resp = jsonify(**obj.get_object().get_tombstone())
|
|
|
|
resp.status_code = 410
|
|
|
|
return resp
|
2018-05-28 17:46:23 +00:00
|
|
|
return jsonify(**activity_from_doc(doc))
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.route('/outbox/<item_id>/activity')
|
|
|
|
def outbox_activity(item_id):
|
2018-06-03 12:34:04 +00:00
|
|
|
# TODO(tsileo): handle Tombstone
|
2018-05-18 18:41:41 +00:00
|
|
|
data = DB.outbox.find_one({'id': item_id, 'meta.deleted': False})
|
|
|
|
if not data:
|
|
|
|
abort(404)
|
2018-05-28 17:46:23 +00:00
|
|
|
obj = activity_from_doc(data)
|
2018-05-27 18:40:42 +00:00
|
|
|
if obj['type'] != ActivityType.CREATE.value:
|
2018-05-18 18:41:41 +00:00
|
|
|
abort(404)
|
2018-05-28 17:46:23 +00:00
|
|
|
return jsonify(**obj['object'])
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-05-31 23:26:23 +00:00
|
|
|
@app.route('/outbox/<item_id>/replies')
|
|
|
|
def outbox_activity_replies(item_id):
|
2018-06-03 12:34:04 +00:00
|
|
|
# TODO(tsileo): handle Tombstone
|
2018-05-31 23:26:23 +00:00
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
|
|
|
data = DB.outbox.find_one({'id': item_id, 'meta.deleted': False})
|
|
|
|
if not data:
|
|
|
|
abort(404)
|
2018-06-01 18:29:44 +00:00
|
|
|
obj = activitypub.parse_activity(data['activity'])
|
2018-05-31 23:26:23 +00:00
|
|
|
if obj.type_enum != ActivityType.CREATE:
|
|
|
|
abort(404)
|
|
|
|
|
|
|
|
q = {
|
|
|
|
'meta.deleted': False,
|
|
|
|
'type': ActivityType.CREATE.value,
|
|
|
|
'activity.object.inReplyTo': obj.get_object().id,
|
|
|
|
}
|
|
|
|
|
|
|
|
return jsonify(**activitypub.build_ordered_collection(
|
|
|
|
DB.inbox,
|
|
|
|
q=q,
|
|
|
|
cursor=request.args.get('cursor'),
|
2018-06-01 18:29:44 +00:00
|
|
|
map_func=lambda doc: doc['activity']['object'],
|
2018-05-31 23:26:23 +00:00
|
|
|
col_name=f'outbox/{item_id}/replies',
|
2018-06-01 18:29:44 +00:00
|
|
|
first_page=request.args.get('page') == 'first',
|
2018-05-31 23:26:23 +00:00
|
|
|
))
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/outbox/<item_id>/likes')
|
|
|
|
def outbox_activity_likes(item_id):
|
2018-06-03 12:34:04 +00:00
|
|
|
# TODO(tsileo): handle Tombstone
|
2018-05-31 23:26:23 +00:00
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
|
|
|
data = DB.outbox.find_one({'id': item_id, 'meta.deleted': False})
|
|
|
|
if not data:
|
|
|
|
abort(404)
|
2018-06-01 18:29:44 +00:00
|
|
|
obj = activitypub.parse_activity(data['activity'])
|
2018-05-31 23:26:23 +00:00
|
|
|
if obj.type_enum != ActivityType.CREATE:
|
|
|
|
abort(404)
|
|
|
|
|
|
|
|
q = {
|
|
|
|
'meta.undo': False,
|
|
|
|
'type': ActivityType.LIKE.value,
|
|
|
|
'$or': [{'activity.object.id': obj.get_object().id},
|
|
|
|
{'activity.object': obj.get_object().id}],
|
|
|
|
}
|
|
|
|
|
|
|
|
return jsonify(**activitypub.build_ordered_collection(
|
|
|
|
DB.inbox,
|
|
|
|
q=q,
|
|
|
|
cursor=request.args.get('cursor'),
|
|
|
|
map_func=lambda doc: doc['activity'],
|
|
|
|
col_name=f'outbox/{item_id}/likes',
|
2018-06-01 18:29:44 +00:00
|
|
|
first_page=request.args.get('page') == 'first',
|
2018-05-31 23:26:23 +00:00
|
|
|
))
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/outbox/<item_id>/shares')
|
|
|
|
def outbox_activity_shares(item_id):
|
2018-06-03 12:34:04 +00:00
|
|
|
# TODO(tsileo): handle Tombstone
|
2018-05-31 23:26:23 +00:00
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
|
|
|
data = DB.outbox.find_one({'id': item_id, 'meta.deleted': False})
|
|
|
|
if not data:
|
|
|
|
abort(404)
|
2018-06-01 18:29:44 +00:00
|
|
|
obj = activitypub.parse_activity(data['activity'])
|
2018-05-31 23:26:23 +00:00
|
|
|
if obj.type_enum != ActivityType.CREATE:
|
|
|
|
abort(404)
|
|
|
|
|
|
|
|
q = {
|
|
|
|
'meta.undo': False,
|
|
|
|
'type': ActivityType.ANNOUNCE.value,
|
|
|
|
'$or': [{'activity.object.id': obj.get_object().id},
|
|
|
|
{'activity.object': obj.get_object().id}],
|
|
|
|
}
|
|
|
|
|
|
|
|
return jsonify(**activitypub.build_ordered_collection(
|
|
|
|
DB.inbox,
|
|
|
|
q=q,
|
|
|
|
cursor=request.args.get('cursor'),
|
|
|
|
map_func=lambda doc: doc['activity'],
|
|
|
|
col_name=f'outbox/{item_id}/shares',
|
2018-06-01 18:29:44 +00:00
|
|
|
first_page=request.args.get('page') == 'first',
|
2018-05-31 23:26:23 +00:00
|
|
|
))
|
|
|
|
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
@app.route('/admin', methods=['GET'])
|
|
|
|
@login_required
|
|
|
|
def admin():
|
|
|
|
q = {
|
|
|
|
'meta.deleted': False,
|
|
|
|
'meta.undo': False,
|
2018-05-27 18:40:42 +00:00
|
|
|
'type': ActivityType.LIKE.value,
|
2018-05-18 18:41:41 +00:00
|
|
|
}
|
|
|
|
col_liked = DB.outbox.count(q)
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
'admin.html',
|
|
|
|
instances=list(DB.instances.find()),
|
|
|
|
inbox_size=DB.inbox.count(),
|
|
|
|
outbox_size=DB.outbox.count(),
|
|
|
|
object_cache_size=DB.objects_cache.count(),
|
|
|
|
actor_cache_size=DB.actors_cache.count(),
|
|
|
|
col_liked=col_liked,
|
|
|
|
col_followers=DB.followers.count(),
|
|
|
|
col_following=DB.following.count(),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-06-03 09:41:18 +00:00
|
|
|
@app.route('/new', methods=['GET'])
|
2018-05-18 18:41:41 +00:00
|
|
|
@login_required
|
|
|
|
def new():
|
|
|
|
reply_id = None
|
|
|
|
content = ''
|
2018-06-03 19:28:06 +00:00
|
|
|
thread = []
|
2018-05-18 18:41:41 +00:00
|
|
|
if request.args.get('reply'):
|
2018-06-03 19:28:06 +00:00
|
|
|
data = DB.inbox.find_one({'activity.object.id': request.args.get('reply')})
|
|
|
|
if not data:
|
|
|
|
data = DB.outbox.find_one({'activity.object.id': request.args.get('reply')})
|
|
|
|
if not data:
|
|
|
|
abort(400)
|
|
|
|
|
|
|
|
reply = activitypub.parse_activity(data['activity'])
|
2018-05-18 18:41:41 +00:00
|
|
|
reply_id = reply.id
|
2018-06-03 19:28:06 +00:00
|
|
|
if reply.type_enum == ActivityType.CREATE:
|
|
|
|
reply_id = reply.get_object().id
|
2018-05-18 18:41:41 +00:00
|
|
|
actor = reply.get_actor()
|
|
|
|
domain = urlparse(actor.id).netloc
|
2018-06-03 19:28:06 +00:00
|
|
|
# FIXME(tsileo): if reply of reply, fetch all participants
|
2018-05-18 18:41:41 +00:00
|
|
|
content = f'@{actor.preferredUsername}@{domain} '
|
2018-06-03 19:28:06 +00:00
|
|
|
thread = _build_thread(
|
|
|
|
data,
|
|
|
|
include_children=False,
|
|
|
|
)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-03 19:28:06 +00:00
|
|
|
return render_template(
|
|
|
|
'new.html',
|
|
|
|
reply=reply_id,
|
|
|
|
content=content,
|
|
|
|
thread=thread,
|
|
|
|
)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.route('/notifications')
|
|
|
|
@login_required
|
|
|
|
def notifications():
|
|
|
|
# FIXME(tsileo): implements pagination, also for the followers/following page
|
|
|
|
limit = 50
|
|
|
|
q = {
|
|
|
|
'type': 'Create',
|
|
|
|
'activity.object.tag.type': 'Mention',
|
|
|
|
'activity.object.tag.name': f'@{USERNAME}@{DOMAIN}',
|
|
|
|
'meta.deleted': False,
|
|
|
|
}
|
|
|
|
# TODO(tsileo): also include replies via regex on Create replyTo
|
|
|
|
q = {'$or': [q, {'type': 'Follow'}, {'type': 'Accept'}, {'type': 'Undo', 'activity.object.type': 'Follow'},
|
|
|
|
{'type': 'Announce', 'activity.object': {'$regex': f'^{BASE_URL}'}},
|
|
|
|
{'type': 'Create', 'activity.object.inReplyTo': {'$regex': f'^{BASE_URL}'}},
|
|
|
|
]}
|
|
|
|
print(q)
|
|
|
|
c = request.args.get('cursor')
|
|
|
|
if c:
|
|
|
|
q['_id'] = {'$lt': ObjectId(c)}
|
|
|
|
|
|
|
|
outbox_data = list(DB.inbox.find(q, limit=limit).sort('_id', -1))
|
|
|
|
cursor = None
|
|
|
|
if outbox_data and len(outbox_data) == limit:
|
|
|
|
cursor = str(outbox_data[-1]['_id'])
|
|
|
|
|
|
|
|
# TODO(tsileo): fix the annonce handling, copy it from /stream
|
|
|
|
#for data in outbox_data:
|
|
|
|
# if data['type'] == 'Announce':
|
|
|
|
# print(data)
|
|
|
|
# if data['activity']['object'].startswith('http') and data['activity']['object'] in objcache:
|
|
|
|
# data['ref'] = {'activity': {'object': objcache[data['activity']['object']]}, 'meta': {}}
|
|
|
|
# out.append(data)
|
|
|
|
# else:
|
|
|
|
# out.append(data)
|
|
|
|
|
|
|
|
return render_template(
|
|
|
|
'stream.html',
|
|
|
|
inbox_data=outbox_data,
|
|
|
|
cursor=cursor,
|
|
|
|
)
|
|
|
|
|
2018-05-28 17:46:23 +00:00
|
|
|
|
2018-05-29 19:36:05 +00:00
|
|
|
@app.route('/api/key')
|
|
|
|
@login_required
|
|
|
|
def api_user_key():
|
|
|
|
return flask_jsonify(api_key=ADMIN_API_KEY)
|
|
|
|
|
|
|
|
|
2018-06-03 12:34:04 +00:00
|
|
|
def _user_api_arg(key: str, **kwargs):
|
2018-06-01 18:29:44 +00:00
|
|
|
"""Try to get the given key from the requests, try JSON body, form data and query arg."""
|
2018-05-29 20:16:09 +00:00
|
|
|
if request.is_json:
|
2018-06-01 18:29:44 +00:00
|
|
|
oid = request.json.get(key)
|
2018-05-29 19:36:05 +00:00
|
|
|
else:
|
2018-06-01 18:29:44 +00:00
|
|
|
oid = request.args.get(key) or request.form.get(key)
|
2018-05-29 19:36:05 +00:00
|
|
|
|
|
|
|
if not oid:
|
2018-06-03 10:50:51 +00:00
|
|
|
if 'default' in kwargs:
|
|
|
|
return kwargs.get('default')
|
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
raise ValueError(f'missing {key}')
|
|
|
|
|
|
|
|
return oid
|
|
|
|
|
|
|
|
|
|
|
|
def _user_api_get_note(from_outbox: bool = False):
|
|
|
|
oid = _user_api_arg('id')
|
|
|
|
note = activitypub.parse_activity(OBJECT_SERVICE.get(oid), expected=ActivityType.NOTE)
|
|
|
|
if from_outbox and not note.id.startswith(ID):
|
|
|
|
raise NotFromOutboxError(f'cannot delete {note.id}, id must be owned by the server')
|
2018-05-29 19:36:05 +00:00
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
return note
|
2018-05-29 19:36:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _user_api_response(**kwargs):
|
2018-06-03 10:50:51 +00:00
|
|
|
_redirect = _user_api_arg('redirect', default=None)
|
2018-06-03 09:41:18 +00:00
|
|
|
if _redirect:
|
|
|
|
return redirect(_redirect)
|
2018-05-29 19:36:05 +00:00
|
|
|
|
|
|
|
resp = flask_jsonify(**kwargs)
|
|
|
|
resp.status_code = 201
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/note/delete', methods=['POST'])
|
2018-05-28 20:38:48 +00:00
|
|
|
@api_required
|
|
|
|
def api_delete():
|
2018-05-29 19:36:05 +00:00
|
|
|
"""API endpoint to delete a Note activity."""
|
2018-06-01 18:29:44 +00:00
|
|
|
note = _user_api_get_note(from_outbox=True)
|
|
|
|
|
2018-05-29 19:36:05 +00:00
|
|
|
delete = note.build_delete()
|
2018-05-28 20:38:48 +00:00
|
|
|
delete.post_to_outbox()
|
2018-05-29 19:36:05 +00:00
|
|
|
|
|
|
|
return _user_api_response(activity=delete.id)
|
|
|
|
|
2018-05-28 20:38:48 +00:00
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
@app.route('/api/boost', methods=['POST'])
|
2018-05-28 17:46:23 +00:00
|
|
|
@api_required
|
|
|
|
def api_boost():
|
2018-06-01 18:29:44 +00:00
|
|
|
note = _user_api_get_note()
|
|
|
|
|
|
|
|
announce = note.build_announce()
|
2018-05-18 18:41:41 +00:00
|
|
|
announce.post_to_outbox()
|
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
return _user_api_response(activity=announce.id)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/api/like', methods=['POST'])
|
2018-05-28 17:46:23 +00:00
|
|
|
@api_required
|
|
|
|
def api_like():
|
2018-06-01 18:29:44 +00:00
|
|
|
note = _user_api_get_note()
|
|
|
|
|
|
|
|
like = note.build_like()
|
2018-05-18 18:41:41 +00:00
|
|
|
like.post_to_outbox()
|
2018-06-01 18:29:44 +00:00
|
|
|
|
|
|
|
return _user_api_response(activity=like.id)
|
2018-05-28 17:46:23 +00:00
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
@app.route('/api/undo', methods=['POST'])
|
2018-05-27 12:21:06 +00:00
|
|
|
@api_required
|
|
|
|
def api_undo():
|
2018-06-01 18:29:44 +00:00
|
|
|
oid = _user_api_arg('id')
|
2018-05-27 12:21:06 +00:00
|
|
|
doc = DB.outbox.find_one({'$or': [{'id': oid}, {'remote_id': oid}]})
|
2018-06-01 18:29:44 +00:00
|
|
|
if not doc:
|
|
|
|
raise ActivityNotFoundError(f'cannot found {oid}')
|
|
|
|
|
|
|
|
obj = activitypub.parse_activity(doc.get('activity'))
|
|
|
|
# FIXME(tsileo): detect already undo-ed and make this API call idempotent
|
|
|
|
undo = obj.build_undo()
|
|
|
|
undo.post_to_outbox()
|
|
|
|
|
|
|
|
return _user_api_response(activity=undo.id)
|
2018-05-27 12:21:06 +00:00
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
@app.route('/stream')
|
|
|
|
@login_required
|
|
|
|
def stream():
|
|
|
|
# FIXME(tsileo): implements pagination, also for the followers/following page
|
|
|
|
limit = 100
|
|
|
|
q = {
|
|
|
|
'type': 'Create',
|
|
|
|
'activity.object.type': 'Note',
|
|
|
|
'activity.object.inReplyTo': None,
|
|
|
|
'meta.deleted': False,
|
|
|
|
}
|
|
|
|
c = request.args.get('cursor')
|
|
|
|
if c:
|
|
|
|
q['_id'] = {'$lt': ObjectId(c)}
|
|
|
|
|
|
|
|
outbox_data = list(DB.inbox.find(
|
|
|
|
{
|
|
|
|
'$or': [
|
|
|
|
q,
|
|
|
|
{
|
|
|
|
'type': 'Announce',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
}, limit=limit).sort('activity.published', -1))
|
|
|
|
cursor = None
|
|
|
|
if outbox_data and len(outbox_data) == limit:
|
|
|
|
cursor = str(outbox_data[-1]['_id'])
|
|
|
|
|
|
|
|
out = []
|
|
|
|
objcache = {}
|
|
|
|
cached = list(DB.objects_cache.find({'meta.part_of_stream': True}, limit=limit*3).sort('meta.announce_published', -1))
|
|
|
|
for c in cached:
|
|
|
|
objcache[c['object_id']] = c['cached_object']
|
|
|
|
for data in outbox_data:
|
|
|
|
if data['type'] == 'Announce':
|
|
|
|
if data['activity']['object'].startswith('http') and data['activity']['object'] in objcache:
|
|
|
|
data['ref'] = {'activity': {'object': objcache[data['activity']['object']]}, 'meta': {}}
|
|
|
|
out.append(data)
|
|
|
|
else:
|
|
|
|
print('OMG', data)
|
|
|
|
else:
|
|
|
|
out.append(data)
|
|
|
|
return render_template(
|
|
|
|
'stream.html',
|
|
|
|
inbox_data=out,
|
|
|
|
cursor=cursor,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/inbox', methods=['GET', 'POST'])
|
|
|
|
def inbox():
|
|
|
|
if request.method == 'GET':
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
2018-05-21 12:30:52 +00:00
|
|
|
try:
|
|
|
|
_api_required()
|
|
|
|
except BadSignature:
|
|
|
|
abort(404)
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
return jsonify(**activitypub.build_ordered_collection(
|
|
|
|
DB.inbox,
|
|
|
|
q={'meta.deleted': False},
|
|
|
|
cursor=request.args.get('cursor'),
|
|
|
|
map_func=lambda doc: doc['activity'],
|
|
|
|
))
|
2018-05-21 12:30:52 +00:00
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
data = request.get_json(force=True)
|
2018-05-28 17:46:23 +00:00
|
|
|
logger.debug(f'req_headers={request.headers}')
|
|
|
|
logger.debug(f'raw_data={data}')
|
2018-05-18 18:41:41 +00:00
|
|
|
try:
|
2018-06-02 07:07:57 +00:00
|
|
|
if not verify_request(ACTOR_SERVICE):
|
|
|
|
raise Exception('failed to verify request')
|
|
|
|
except Exception:
|
2018-05-28 17:46:23 +00:00
|
|
|
logger.exception('failed to verify request, trying to verify the payload by fetching the remote')
|
2018-05-21 12:41:47 +00:00
|
|
|
try:
|
|
|
|
data = OBJECT_SERVICE.get(data['id'])
|
|
|
|
except Exception:
|
2018-05-28 17:46:23 +00:00
|
|
|
logger.exception(f'failed to fetch remote id at {data["id"]}')
|
2018-06-02 07:07:57 +00:00
|
|
|
return Response(
|
|
|
|
status=422,
|
|
|
|
headers={'Content-Type': 'application/json'},
|
|
|
|
response=json.dumps({'error': 'failed to verify request (using HTTP signatures or fetching the IRI)'}),
|
|
|
|
)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
activity = activitypub.parse_activity(data)
|
2018-05-28 17:46:23 +00:00
|
|
|
logger.debug(f'inbox activity={activity}/{data}')
|
2018-05-18 18:41:41 +00:00
|
|
|
activity.process_from_inbox()
|
|
|
|
|
|
|
|
return Response(
|
|
|
|
status=201,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-05-27 09:50:09 +00:00
|
|
|
@app.route('/api/debug', methods=['GET', 'DELETE'])
|
|
|
|
@api_required
|
|
|
|
def api_debug():
|
|
|
|
"""Endpoint used/needed for testing, only works in DEBUG_MODE."""
|
|
|
|
if not DEBUG_MODE:
|
|
|
|
return flask_jsonify(message='DEBUG_MODE is off')
|
|
|
|
|
|
|
|
if request.method == 'DELETE':
|
|
|
|
_drop_db()
|
|
|
|
return flask_jsonify(message='DB dropped')
|
|
|
|
|
|
|
|
return flask_jsonify(
|
|
|
|
inbox=DB.inbox.count(),
|
|
|
|
outbox=DB.outbox.count(),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
@app.route('/api/upload', methods=['POST'])
|
|
|
|
@api_required
|
|
|
|
def api_upload():
|
|
|
|
file = request.files['file']
|
|
|
|
rfilename = secure_filename(file.filename)
|
|
|
|
prefix = hashlib.sha256(os.urandom(32)).hexdigest()[:6]
|
|
|
|
mtype = mimetypes.guess_type(rfilename)[0]
|
|
|
|
filename = f'{prefix}_{rfilename}'
|
|
|
|
file.save(os.path.join('static', 'media', filename))
|
|
|
|
|
|
|
|
# Remove EXIF metadata
|
|
|
|
if filename.lower().endswith('.jpg') or filename.lower().endswith('.jpeg'):
|
|
|
|
piexif.remove(os.path.join('static', 'media', filename))
|
|
|
|
|
|
|
|
print('upload OK')
|
|
|
|
print(filename)
|
|
|
|
attachment = [
|
|
|
|
{'mediaType': mtype,
|
|
|
|
'name': rfilename,
|
|
|
|
'type': 'Document',
|
|
|
|
'url': BASE_URL + f'/static/media/{filename}'
|
|
|
|
},
|
|
|
|
]
|
|
|
|
print(attachment)
|
|
|
|
content = request.args.get('content')
|
|
|
|
to = request.args.get('to')
|
|
|
|
note = activitypub.Note(
|
|
|
|
cc=[ID+'/followers'],
|
|
|
|
to=[to if to else config.AS_PUBLIC],
|
|
|
|
content=content, # TODO(tsileo): handle markdown
|
|
|
|
attachment=attachment,
|
|
|
|
)
|
|
|
|
print('post_note_init')
|
|
|
|
print(note)
|
|
|
|
create = note.build_create()
|
|
|
|
print(create)
|
|
|
|
print(create.to_dict())
|
|
|
|
create.post_to_outbox()
|
|
|
|
print('posted')
|
|
|
|
|
|
|
|
return Response(
|
|
|
|
status=201,
|
|
|
|
response='OK',
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
@app.route('/api/new_note', methods=['POST'])
|
2018-05-18 18:41:41 +00:00
|
|
|
@api_required
|
|
|
|
def api_new_note():
|
2018-06-01 18:29:44 +00:00
|
|
|
source = _user_api_arg('content')
|
2018-05-26 07:50:59 +00:00
|
|
|
if not source:
|
|
|
|
raise ValueError('missing content')
|
2018-05-28 22:12:44 +00:00
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
_reply, reply = None, None
|
|
|
|
try:
|
|
|
|
_reply = _user_api_arg('reply')
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
content, tags = parse_markdown(source)
|
|
|
|
to = request.args.get('to')
|
|
|
|
cc = [ID+'/followers']
|
2018-06-01 18:29:44 +00:00
|
|
|
|
|
|
|
if _reply:
|
|
|
|
reply = activitypub.parse_activity(OBJECT_SERVICE.get(_reply))
|
2018-05-28 22:12:44 +00:00
|
|
|
cc.append(reply.attributedTo)
|
2018-06-01 18:29:44 +00:00
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
for tag in tags:
|
|
|
|
if tag['type'] == 'Mention':
|
|
|
|
cc.append(tag['href'])
|
|
|
|
|
|
|
|
note = activitypub.Note(
|
2018-06-04 16:06:04 +00:00
|
|
|
cc=list(set(cc)),
|
2018-05-18 18:41:41 +00:00
|
|
|
to=[to if to else config.AS_PUBLIC],
|
2018-06-01 18:29:44 +00:00
|
|
|
content=content,
|
2018-05-18 18:41:41 +00:00
|
|
|
tag=tags,
|
|
|
|
source={'mediaType': 'text/markdown', 'content': source},
|
2018-05-28 22:12:44 +00:00
|
|
|
inReplyTo=reply.id if reply else None
|
2018-05-18 18:41:41 +00:00
|
|
|
)
|
|
|
|
create = note.build_create()
|
|
|
|
create.post_to_outbox()
|
2018-05-28 22:12:44 +00:00
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
return _user_api_response(activity=create.id)
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
@app.route('/api/stream')
|
2018-05-27 20:30:43 +00:00
|
|
|
@api_required
|
2018-05-18 18:41:41 +00:00
|
|
|
def api_stream():
|
|
|
|
return Response(
|
|
|
|
response=json.dumps(activitypub.build_inbox_json_feed('/api/stream', request.args.get('cursor'))),
|
|
|
|
headers={'Content-Type': 'application/json'},
|
|
|
|
)
|
|
|
|
|
2018-05-28 22:12:44 +00:00
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
@app.route('/api/block', methods=['POST'])
|
2018-05-28 22:12:44 +00:00
|
|
|
@api_required
|
|
|
|
def api_block():
|
2018-06-01 18:29:44 +00:00
|
|
|
actor = _user_api_arg('actor')
|
|
|
|
|
|
|
|
existing = DB.outbox.find_one({
|
|
|
|
'type': ActivityType.BLOCK.value,
|
|
|
|
'activity.object': actor,
|
|
|
|
'meta.undo': False,
|
|
|
|
})
|
|
|
|
if existing:
|
|
|
|
return _user_api_response(activity=existing['activity']['id'])
|
2018-05-28 22:12:44 +00:00
|
|
|
|
|
|
|
block = activitypub.Block(object=actor)
|
|
|
|
block.post_to_outbox()
|
2018-06-01 18:29:44 +00:00
|
|
|
|
|
|
|
return _user_api_response(activity=block.id)
|
2018-05-28 22:12:44 +00:00
|
|
|
|
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
@app.route('/api/follow', methods=['POST'])
|
2018-05-18 18:41:41 +00:00
|
|
|
@api_required
|
|
|
|
def api_follow():
|
2018-06-01 18:29:44 +00:00
|
|
|
actor = _user_api_arg('actor')
|
|
|
|
|
|
|
|
existing = DB.following.find_one({'remote_actor': actor})
|
|
|
|
if existing:
|
|
|
|
return _user_api_response(activity=existing['activity']['id'])
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
follow = activitypub.Follow(object=actor)
|
|
|
|
follow.post_to_outbox()
|
2018-06-01 18:29:44 +00:00
|
|
|
|
|
|
|
return _user_api_response(activity=follow.id)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.route('/followers')
|
|
|
|
def followers():
|
|
|
|
if is_api_request():
|
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
|
|
|
DB.followers,
|
|
|
|
cursor=request.args.get('cursor'),
|
|
|
|
map_func=lambda doc: doc['remote_actor'],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
followers = [ACTOR_SERVICE.get(doc['remote_actor']) for doc in DB.followers.find(limit=50)]
|
|
|
|
return render_template(
|
|
|
|
'followers.html',
|
|
|
|
me=ME,
|
|
|
|
notes=DB.inbox.find({'object.object.type': 'Note'}).count(),
|
|
|
|
followers=DB.followers.count(),
|
|
|
|
following=DB.following.count(),
|
|
|
|
followers_data=followers,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/following')
|
|
|
|
def following():
|
|
|
|
if is_api_request():
|
|
|
|
return jsonify(
|
|
|
|
**activitypub.build_ordered_collection(
|
|
|
|
DB.following,
|
|
|
|
cursor=request.args.get('cursor'),
|
|
|
|
map_func=lambda doc: doc['remote_actor'],
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
following = [ACTOR_SERVICE.get(doc['remote_actor']) for doc in DB.following.find(limit=50)]
|
|
|
|
return render_template(
|
|
|
|
'following.html',
|
|
|
|
me=ME,
|
|
|
|
notes=DB.inbox.find({'object.object.type': 'Note'}).count(),
|
|
|
|
followers=DB.followers.count(),
|
|
|
|
following=DB.following.count(),
|
|
|
|
following_data=following,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/tags/<tag>')
|
|
|
|
def tags(tag):
|
|
|
|
if not DB.outbox.count({'activity.object.tag.type': 'Hashtag', 'activity.object.tag.name': '#'+tag}):
|
|
|
|
abort(404)
|
|
|
|
if not is_api_request():
|
|
|
|
return render_template(
|
|
|
|
'tags.html',
|
|
|
|
tag=tag,
|
|
|
|
outbox_data=DB.outbox.find({'type': 'Create', 'activity.object.type': 'Note', 'meta.deleted': False,
|
|
|
|
'activity.object.tag.type': 'Hashtag',
|
|
|
|
'activity.object.tag.name': '#'+tag}),
|
|
|
|
)
|
|
|
|
q = {
|
|
|
|
'meta.deleted': False,
|
|
|
|
'meta.undo': False,
|
2018-05-27 18:40:42 +00:00
|
|
|
'type': ActivityType.CREATE.value,
|
2018-05-18 18:41:41 +00:00
|
|
|
'activity.object.tag.type': 'Hashtag',
|
|
|
|
'activity.object.tag.name': '#'+tag,
|
|
|
|
}
|
|
|
|
return jsonify(**activitypub.build_ordered_collection(
|
|
|
|
DB.outbox,
|
|
|
|
q=q,
|
|
|
|
cursor=request.args.get('cursor'),
|
|
|
|
map_func=lambda doc: doc['activity']['object']['id'],
|
|
|
|
col_name=f'tags/{tag}',
|
|
|
|
))
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/liked')
|
|
|
|
def liked():
|
|
|
|
if not is_api_request():
|
|
|
|
abort(404)
|
|
|
|
q = {
|
|
|
|
'meta.deleted': False,
|
|
|
|
'meta.undo': False,
|
2018-05-27 18:40:42 +00:00
|
|
|
'type': ActivityType.LIKE.value,
|
2018-05-18 18:41:41 +00:00
|
|
|
}
|
|
|
|
return jsonify(**activitypub.build_ordered_collection(
|
|
|
|
DB.outbox,
|
|
|
|
q=q,
|
|
|
|
cursor=request.args.get('cursor'),
|
|
|
|
map_func=lambda doc: doc['activity']['object'],
|
|
|
|
col_name='liked',
|
|
|
|
))
|
|
|
|
|
|
|
|
#######
|
|
|
|
# IndieAuth
|
|
|
|
|
|
|
|
|
|
|
|
def build_auth_resp(payload):
|
|
|
|
if request.headers.get('Accept') == 'application/json':
|
|
|
|
return Response(
|
|
|
|
status=200,
|
|
|
|
headers={'Content-Type': 'application/json'},
|
|
|
|
response=json.dumps(payload),
|
|
|
|
)
|
|
|
|
return Response(
|
|
|
|
status=200,
|
|
|
|
headers={'Content-Type': 'application/x-www-form-urlencoded'},
|
|
|
|
response=urlencode(payload),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def _get_prop(props, name, default=None):
|
|
|
|
if name in props:
|
|
|
|
items = props.get(name)
|
|
|
|
if isinstance(items, list):
|
|
|
|
return items[0]
|
|
|
|
return items
|
|
|
|
return default
|
|
|
|
|
|
|
|
def get_client_id_data(url):
|
|
|
|
data = mf2py.parse(url=url)
|
|
|
|
for item in data['items']:
|
|
|
|
if 'h-x-app' in item['type'] or 'h-app' in item['type']:
|
|
|
|
props = item.get('properties', {})
|
|
|
|
print(props)
|
|
|
|
return dict(
|
|
|
|
logo=_get_prop(props, 'logo'),
|
|
|
|
name=_get_prop(props, 'name'),
|
|
|
|
url=_get_prop(props, 'url'),
|
|
|
|
)
|
|
|
|
return dict(
|
|
|
|
logo=None,
|
|
|
|
name=url,
|
|
|
|
url=url,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/indieauth/flow', methods=['POST'])
|
|
|
|
@login_required
|
|
|
|
def indieauth_flow():
|
|
|
|
auth = dict(
|
|
|
|
scope=' '.join(request.form.getlist('scopes')),
|
|
|
|
me=request.form.get('me'),
|
|
|
|
client_id=request.form.get('client_id'),
|
|
|
|
state=request.form.get('state'),
|
|
|
|
redirect_uri=request.form.get('redirect_uri'),
|
|
|
|
response_type=request.form.get('response_type'),
|
|
|
|
)
|
|
|
|
|
|
|
|
code = binascii.hexlify(os.urandom(8)).decode('utf-8')
|
|
|
|
auth.update(
|
|
|
|
code=code,
|
|
|
|
verified=False,
|
|
|
|
)
|
|
|
|
print(auth)
|
|
|
|
if not auth['redirect_uri']:
|
|
|
|
abort(500)
|
|
|
|
|
|
|
|
DB.indieauth.insert_one(auth)
|
|
|
|
|
|
|
|
# FIXME(tsileo): fetch client ID and validate redirect_uri
|
|
|
|
red = f'{auth["redirect_uri"]}?code={code}&state={auth["state"]}&me={auth["me"]}'
|
|
|
|
return redirect(red)
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/indieauth', methods=['GET', 'POST'])
|
|
|
|
def indieauth_endpoint():
|
|
|
|
session['logged_in'] = True
|
|
|
|
if request.method == 'GET':
|
|
|
|
if not session.get('logged_in'):
|
|
|
|
return redirect(url_for('login', next=request.url))
|
|
|
|
|
|
|
|
me = request.args.get('me')
|
|
|
|
# FIXME(tsileo): ensure me == ID
|
|
|
|
client_id = request.args.get('client_id')
|
|
|
|
redirect_uri = request.args.get('redirect_uri')
|
|
|
|
state = request.args.get('state', '')
|
|
|
|
response_type = request.args.get('response_type', 'id')
|
|
|
|
scope = request.args.get('scope', '').split()
|
|
|
|
|
|
|
|
print('STATE', state)
|
|
|
|
return render_template(
|
|
|
|
'indieauth_flow.html',
|
|
|
|
client=get_client_id_data(client_id),
|
|
|
|
scopes=scope,
|
|
|
|
redirect_uri=redirect_uri,
|
|
|
|
state=state,
|
|
|
|
response_type=response_type,
|
|
|
|
client_id=client_id,
|
|
|
|
me=me,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Auth verification via POST
|
|
|
|
code = request.form.get('code')
|
|
|
|
redirect_uri = request.form.get('redirect_uri')
|
|
|
|
client_id = request.form.get('client_id')
|
|
|
|
|
|
|
|
auth = DB.indieauth.find_one_and_update(
|
|
|
|
{'code': code, 'redirect_uri': redirect_uri, 'client_id': client_id}, #}, # , 'verified': False},
|
|
|
|
{'$set': {'verified': True}},
|
|
|
|
sort=[('_id', pymongo.DESCENDING)],
|
|
|
|
)
|
|
|
|
print(auth)
|
|
|
|
print(code, redirect_uri, client_id)
|
|
|
|
|
|
|
|
if not auth:
|
|
|
|
abort(403)
|
|
|
|
return
|
|
|
|
|
|
|
|
me = auth['me']
|
|
|
|
state = auth['state']
|
|
|
|
scope = ' '.join(auth['scope'])
|
|
|
|
print('STATE', state)
|
|
|
|
return build_auth_resp({'me': me, 'state': state, 'scope': scope})
|
|
|
|
|
|
|
|
|
|
|
|
@app.route('/token', methods=['GET', 'POST'])
|
|
|
|
def token_endpoint():
|
|
|
|
if request.method == 'POST':
|
|
|
|
code = request.form.get('code')
|
|
|
|
me = request.form.get('me')
|
|
|
|
redirect_uri = request.form.get('redirect_uri')
|
|
|
|
client_id = request.form.get('client_id')
|
|
|
|
|
|
|
|
auth = DB.indieauth.find_one({'code': code, 'me': me, 'redirect_uri': redirect_uri, 'client_id': client_id})
|
|
|
|
if not auth:
|
|
|
|
abort(403)
|
|
|
|
scope = ' '.join(auth['scope'])
|
|
|
|
payload = dict(me=me, client_id=client_id, scope=scope, ts=datetime.now().timestamp())
|
|
|
|
token = JWT.dumps(payload).decode('utf-8')
|
|
|
|
|
|
|
|
return build_auth_resp({'me': me, 'scope': scope, 'access_token': token})
|
|
|
|
|
|
|
|
# Token verification
|
|
|
|
token = request.headers.get('Authorization').replace('Bearer ', '')
|
|
|
|
try:
|
|
|
|
payload = JWT.loads(token)
|
|
|
|
except BadSignature:
|
|
|
|
abort(403)
|
|
|
|
|
|
|
|
# TODO(tsileo): handle expiration
|
|
|
|
|
|
|
|
return build_auth_resp({
|
|
|
|
'me': payload['me'],
|
|
|
|
'scope': payload['scope'],
|
|
|
|
'client_id': payload['client_id'],
|
|
|
|
})
|