diff --git a/activitypub.py b/activitypub.py
index 4820eaf..0ff1e7a 100644
--- a/activitypub.py
+++ b/activitypub.py
@@ -22,7 +22,6 @@ from little_boxes.backend import Backend
from little_boxes.collection import parse_collection as ap_parse_collection
from little_boxes.errors import Error
-
logger = logging.getLogger(__name__)
MY_PERSON = ap.Person(**ME)
@@ -45,10 +44,12 @@ def _to_list(data: Union[List[Any], Any]) -> List[Any]:
def ensure_it_is_me(f):
"""Method decorator used to track the events fired during tests."""
+
def wrapper(*args, **kwargs):
if args[1].id != MY_PERSON.id:
- raise Error('unexpected actor')
+ raise Error("unexpected actor")
return f(*args, **kwargs)
+
return wrapper
@@ -247,8 +248,8 @@ class MicroblogPubBackend(Backend):
# FIXME(tsileo): handle update actor amd inbox_update_note/inbox_update_actor
@ensure_it_is_me
- def outbox_update(self, as_actor: ap.Person, update: ap.Update) -> None:
- obj = update._data["object"]
+ def outbox_update(self, as_actor: ap.Person, _update: ap.Update) -> None:
+ obj = _update._data["object"]
update_prefix = "activity.object."
update: Dict[str, Any] = {"$set": dict(), "$unset": dict()}
diff --git a/app.py b/app.py
index 78efbe7..33374a2 100644
--- a/app.py
+++ b/app.py
@@ -30,15 +30,14 @@ from flask import url_for
from flask_wtf.csrf import CSRFProtect
from html2text import html2text
from itsdangerous import BadSignature
-from itsdangerous import JSONWebSignatureSerializer
from passlib.hash import bcrypt
from u2flib_server import u2f
from werkzeug.utils import secure_filename
import activitypub
import config
-from activitypub import embed_collection
from activitypub import MY_PERSON
+from activitypub import embed_collection
from config import ACTOR_SERVICE
from config import ADMIN_API_KEY
from config import BASE_URL
@@ -59,16 +58,14 @@ from config import custom_cache_purge_hook
from little_boxes import activitypub as ap
from little_boxes.activitypub import ActivityType
from little_boxes.activitypub import clean_activity
-from little_boxes.errors import BadActivityError
+from little_boxes.content_helper import parse_markdown
+from little_boxes.errors import ActivityNotFoundError
from little_boxes.errors import Error
-from little_boxes.errors import UnexpectedActivityTypeError
+from little_boxes.errors import NotFromOutboxError
from little_boxes.httpsig import HTTPSigAuth
from little_boxes.httpsig import verify_request
from little_boxes.webfinger import get_actor_url
from little_boxes.webfinger import get_remote_follow_template
-from utils.content_helper import parse_markdown
-from utils.errors import ActivityNotFoundError
-from utils.errors import NotFromOutboxError
from utils.key import get_secret_key
app = Flask(__name__)
@@ -91,6 +88,7 @@ else:
SIG_AUTH = HTTPSigAuth(KEY)
OUTBOX = ap.Outbox(MY_PERSON)
+INBOX = ap.Inbox(MY_PERSON)
def verify_pass(pwd):
@@ -405,7 +403,6 @@ def u2f_register():
#######
# Activity pub routes
-# FIXME(tsileo); continue here
@app.route("/")
@@ -726,12 +723,8 @@ def outbox():
data = request.get_json(force=True)
print(data)
- activity = activitypub.parse_activity(data)
-
- if activity.type_enum == ActivityType.NOTE:
- activity = activity.build_create()
-
- activity.post_to_outbox()
+ activity = ap.parse_activity(data)
+ OUTBOX.post(activity)
# Purge the cache if a custom hook is set, as new content was published
custom_cache_purge_hook()
@@ -743,7 +736,7 @@ def outbox():
def outbox_detail(item_id):
doc = DB.outbox.find_one({"id": item_id})
if doc["meta"].get("deleted", False):
- obj = activitypub.parse_activity(doc["activity"])
+ obj = ap.parse_activity(doc["activity"])
resp = jsonify(**obj.get_object().get_tombstone())
resp.status_code = 410
return resp
@@ -770,8 +763,8 @@ def outbox_activity_replies(item_id):
data = DB.outbox.find_one({"id": item_id, "meta.deleted": False})
if not data:
abort(404)
- obj = activitypub.parse_activity(data["activity"])
- if obj.type_enum != ActivityType.CREATE:
+ obj = ap.parse_activity(data["activity"])
+ if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
@@ -800,8 +793,8 @@ def outbox_activity_likes(item_id):
data = DB.outbox.find_one({"id": item_id, "meta.deleted": False})
if not data:
abort(404)
- obj = activitypub.parse_activity(data["activity"])
- if obj.type_enum != ActivityType.CREATE:
+ obj = ap.parse_activity(data["activity"])
+ if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
@@ -833,8 +826,8 @@ def outbox_activity_shares(item_id):
data = DB.outbox.find_one({"id": item_id, "meta.deleted": False})
if not data:
abort(404)
- obj = activitypub.parse_activity(data["activity"])
- if obj.type_enum != ActivityType.CREATE:
+ obj = ap.parse_activity(data["activity"])
+ if obj.ACTIVITY_TYPE != ActivityType.CREATE:
abort(404)
q = {
@@ -890,9 +883,9 @@ def new():
if not data:
abort(400)
- reply = activitypub.parse_activity(data["activity"])
+ reply = ap.parse_activity(data["activity"])
reply_id = reply.id
- if reply.type_enum == ActivityType.CREATE:
+ if reply.ACTIVITY_TYPE == ActivityType.CREATE:
reply_id = reply.get_object().id
actor = reply.get_actor()
domain = urlparse(actor.id).netloc
@@ -972,12 +965,10 @@ def _user_api_arg(key: str, **kwargs):
def _user_api_get_note(from_outbox: bool = False):
oid = _user_api_arg("id")
- note = activitypub.parse_activity(
- OBJECT_SERVICE.get(oid), expected=ActivityType.NOTE
- )
+ note = ap.parse_activity(OBJECT_SERVICE.get(oid), expected=ActivityType.NOTE)
if from_outbox and not note.id.startswith(ID):
raise NotFromOutboxError(
- f"cannot delete {note.id}, id must be owned by the server"
+ f"cannot load {note.id}, id must be owned by the server"
)
return note
@@ -1000,7 +991,7 @@ def api_delete():
note = _user_api_get_note(from_outbox=True)
delete = note.build_delete()
- delete.post_to_outbox()
+ OUTBOX.post(delete)
return _user_api_response(activity=delete.id)
@@ -1011,7 +1002,7 @@ def api_boost():
note = _user_api_get_note()
announce = note.build_announce()
- announce.post_to_outbox()
+ OUTBOX.post(announce)
return _user_api_response(activity=announce.id)
@@ -1022,7 +1013,7 @@ def api_like():
note = _user_api_get_note()
like = note.build_like()
- like.post_to_outbox()
+ OUTBOX.post(like)
return _user_api_response(activity=like.id)
@@ -1035,10 +1026,10 @@ def api_undo():
if not doc:
raise ActivityNotFoundError(f"cannot found {oid}")
- obj = activitypub.parse_activity(doc.get("activity"))
+ obj = ap.parse_activity(doc.get("activity"))
# FIXME(tsileo): detect already undo-ed and make this API call idempotent
undo = obj.build_undo()
- undo.post_to_outbox()
+ OUTBOX.post(undo)
return _user_api_response(activity=undo.id)
@@ -1116,7 +1107,7 @@ def inbox():
data = request.get_json(force=True)
logger.debug(f"req_headers={request.headers}")
logger.debug(f"raw_data={data}")
- try:
+ """try:
if not verify_request(ACTOR_SERVICE):
raise Exception("failed to verify request")
except Exception:
@@ -1136,10 +1127,10 @@ def inbox():
}
),
)
-
- activity = activitypub.parse_activity(data)
+ """
+ activity = ap.parse_activity(data)
logger.debug(f"inbox activity={activity}/{data}")
- activity.process_from_inbox()
+ INBOX.post(activity)
return Response(status=201)
@@ -1185,9 +1176,10 @@ def api_upload():
print(attachment)
content = request.args.get("content")
to = request.args.get("to")
- note = activitypub.Note(
+ note = ap.Note(
+ actor=MY_PERSON,
cc=[ID + "/followers"],
- to=[to if to else config.AS_PUBLIC],
+ to=[to if to else ap.AS_PUBLIC],
content=content, # TODO(tsileo): handle markdown
attachment=attachment,
)
@@ -1196,7 +1188,7 @@ def api_upload():
create = note.build_create()
print(create)
print(create.to_dict())
- create.post_to_outbox()
+ OUTBOX.post(create)
print("posted")
return Response(status=201, response="OK")
@@ -1220,23 +1212,24 @@ def api_new_note():
cc = [ID + "/followers"]
if _reply:
- reply = activitypub.parse_activity(OBJECT_SERVICE.get(_reply))
+ reply = ap.parse_activity(OBJECT_SERVICE.get(_reply))
cc.append(reply.attributedTo)
for tag in tags:
if tag["type"] == "Mention":
cc.append(tag["href"])
- note = activitypub.Note(
+ note = ap.Note(
+ actor=MY_PERSON,
cc=list(set(cc)),
- to=[to if to else config.AS_PUBLIC],
+ to=[to if to else ap.AS_PUBLIC],
content=content,
tag=tags,
source={"mediaType": "text/markdown", "content": source},
inReplyTo=reply.id if reply else None,
)
create = note.build_create()
- create.post_to_outbox()
+ OUTBOX.post(create)
return _user_api_response(activity=create.id)
@@ -1263,8 +1256,8 @@ def api_block():
if existing:
return _user_api_response(activity=existing["activity"]["id"])
- block = activitypub.Block(object=actor)
- block.post_to_outbox()
+ block = ap.Block(actor=MY_PERSON, object=actor)
+ OUTBOX.post(block)
return _user_api_response(activity=block.id)
@@ -1278,8 +1271,8 @@ def api_follow():
if existing:
return _user_api_response(activity=existing["activity"]["id"])
- follow = activitypub.Follow(object=actor)
- follow.post_to_outbox()
+ follow = ap.Follow(actor=MY_PERSON, object=actor)
+ OUTBOX.post(follow)
return _user_api_response(activity=follow.id)
diff --git a/config.py b/config.py
index c0e1858..8478186 100644
--- a/config.py
+++ b/config.py
@@ -23,48 +23,49 @@ try:
except ModuleNotFoundError:
custom_cache_purge_hook = noop
-VERSION = subprocess.check_output(['git', 'describe', '--always']).split()[0].decode('utf-8')
+VERSION = (
+ subprocess.check_output(["git", "describe", "--always"]).split()[0].decode("utf-8")
+)
-DEBUG_MODE = strtobool(os.getenv('MICROBLOGPUB_DEBUG', 'false'))
+DEBUG_MODE = strtobool(os.getenv("MICROBLOGPUB_DEBUG", "false"))
-CTX_AS = 'https://www.w3.org/ns/activitystreams'
-CTX_SECURITY = 'https://w3id.org/security/v1'
-AS_PUBLIC = 'https://www.w3.org/ns/activitystreams#Public'
+CTX_AS = "https://www.w3.org/ns/activitystreams"
+CTX_SECURITY = "https://w3id.org/security/v1"
+AS_PUBLIC = "https://www.w3.org/ns/activitystreams#Public"
HEADERS = [
- 'application/activity+json',
- 'application/ld+json;profile=https://www.w3.org/ns/activitystreams',
+ "application/activity+json",
+ "application/ld+json;profile=https://www.w3.org/ns/activitystreams",
'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
- 'application/ld+json',
+ "application/ld+json",
]
-with open(os.path.join(KEY_DIR, 'me.yml')) as f:
+with open(os.path.join(KEY_DIR, "me.yml")) as f:
conf = yaml.load(f)
- USERNAME = conf['username']
- NAME = conf['name']
- DOMAIN = conf['domain']
- SCHEME = 'https' if conf.get('https', True) else 'http'
- BASE_URL = SCHEME + '://' + DOMAIN
+ USERNAME = conf["username"]
+ NAME = conf["name"]
+ DOMAIN = conf["domain"]
+ SCHEME = "https" if conf.get("https", True) else "http"
+ BASE_URL = SCHEME + "://" + DOMAIN
ID = BASE_URL
- SUMMARY = conf['summary']
- ICON_URL = conf['icon_url']
- PASS = conf['pass']
- PUBLIC_INSTANCES = conf.get('public_instances', [])
+ SUMMARY = conf["summary"]
+ ICON_URL = conf["icon_url"]
+ PASS = conf["pass"]
+ PUBLIC_INSTANCES = conf.get("public_instances", [])
# TODO(tsileo): choose dark/light style
- THEME_COLOR = conf.get('theme_color')
+ THEME_COLOR = conf.get("theme_color")
USER_AGENT = (
- f'{requests.utils.default_user_agent()} '
- f'(microblog.pub/{VERSION}; +{BASE_URL})'
+ f"{requests.utils.default_user_agent()} " f"(microblog.pub/{VERSION}; +{BASE_URL})"
)
mongo_client = MongoClient(
- host=[os.getenv('MICROBLOGPUB_MONGODB_HOST', 'localhost:27017')],
+ host=[os.getenv("MICROBLOGPUB_MONGODB_HOST", "localhost:27017")]
)
-DB_NAME = '{}_{}'.format(USERNAME, DOMAIN.replace('.', '_'))
+DB_NAME = "{}_{}".format(USERNAME, DOMAIN.replace(".", "_"))
DB = mongo_client[DB_NAME]
@@ -78,37 +79,32 @@ def _drop_db():
KEY = get_key(ID, USERNAME, DOMAIN)
-JWT_SECRET = get_secret_key('jwt')
+JWT_SECRET = get_secret_key("jwt")
JWT = JSONWebSignatureSerializer(JWT_SECRET)
def _admin_jwt_token() -> str:
- return JWT.dumps({'me': 'ADMIN', 'ts': datetime.now().timestamp()}).decode('utf-8') # type: ignore
+ return JWT.dumps({"me": "ADMIN", "ts": datetime.now().timestamp()}).decode(
+ "utf-8"
+ ) # type: ignore
-ADMIN_API_KEY = get_secret_key('admin_api_key', _admin_jwt_token)
+ADMIN_API_KEY = get_secret_key("admin_api_key", _admin_jwt_token)
ME = {
- "@context": [
- CTX_AS,
- CTX_SECURITY,
- ],
+ "@context": [CTX_AS, CTX_SECURITY],
"type": "Person",
"id": ID,
- "following": ID+"/following",
- "followers": ID+"/followers",
- "liked": ID+"/liked",
- "inbox": ID+"/inbox",
- "outbox": ID+"/outbox",
+ "following": ID + "/following",
+ "followers": ID + "/followers",
+ "liked": ID + "/liked",
+ "inbox": ID + "/inbox",
+ "outbox": ID + "/outbox",
"preferredUsername": USERNAME,
"name": NAME,
"summary": SUMMARY,
"endpoints": {},
"url": ID,
- "icon": {
- "mediaType": "image/png",
- "type": "Image",
- "url": ICON_URL,
- },
+ "icon": {"mediaType": "image/png", "type": "Image", "url": ICON_URL},
"publicKey": KEY.to_dict(),
}
diff --git a/tests/federation_test.py b/tests/federation_test.py
index a2dba10..1bdda92 100644
--- a/tests/federation_test.py
+++ b/tests/federation_test.py
@@ -22,10 +22,13 @@ class Instance(object):
self.docker_url = docker_url or host_url
self._create_delay = 10
with open(
- os.path.join(os.path.dirname(os.path.abspath(__file__)), f'fixtures/{name}/config/admin_api_key.key')
+ os.path.join(
+ os.path.dirname(os.path.abspath(__file__)),
+ f"fixtures/{name}/config/admin_api_key.key",
+ )
) as f:
api_key = f.read()
- self._auth_headers = {'Authorization': f'Bearer {api_key}'}
+ self._auth_headers = {"Authorization": f"Bearer {api_key}"}
def _do_req(self, url, headers):
"""Used to parse collection."""
@@ -36,19 +39,21 @@ class Instance(object):
def _parse_collection(self, payload=None, url=None):
"""Parses a collection (go through all the pages)."""
- return activitypub_utils.parse_collection(url=url, payload=payload, do_req=self._do_req)
+ return activitypub_utils.parse_collection(
+ url=url, payload=payload, do_req=self._do_req
+ )
def ping(self):
"""Ensures the homepage is reachable."""
- resp = requests.get(f'{self.host_url}/')
+ resp = requests.get(f"{self.host_url}/")
resp.raise_for_status()
assert resp.status_code == 200
def debug(self):
"""Returns the debug infos (number of items in the inbox/outbox."""
resp = requests.get(
- f'{self.host_url}/api/debug',
- headers={**self._auth_headers, 'Accept': 'application/json'},
+ f"{self.host_url}/api/debug",
+ headers={**self._auth_headers, "Accept": "application/json"},
)
resp.raise_for_status()
@@ -57,8 +62,8 @@ class Instance(object):
def drop_db(self):
"""Drops the MongoDB DB."""
resp = requests.delete(
- f'{self.host_url}/api/debug',
- headers={**self._auth_headers, 'Accept': 'application/json'},
+ f"{self.host_url}/api/debug",
+ headers={**self._auth_headers, "Accept": "application/json"},
)
resp.raise_for_status()
@@ -68,100 +73,92 @@ class Instance(object):
"""Blocks an actor."""
# Instance1 follows instance2
resp = requests.post(
- f'{self.host_url}/api/block',
- params={'actor': actor_url},
+ f"{self.host_url}/api/block",
+ params={"actor": actor_url},
headers=self._auth_headers,
)
assert resp.status_code == 201
# We need to wait for the Follow/Accept dance
- time.sleep(self._create_delay/2)
- return resp.json().get('activity')
+ time.sleep(self._create_delay / 2)
+ return resp.json().get("activity")
- def follow(self, instance: 'Instance') -> str:
+ def follow(self, instance: "Instance") -> str:
"""Follows another instance."""
# Instance1 follows instance2
resp = requests.post(
- f'{self.host_url}/api/follow',
- json={'actor': instance.docker_url},
+ f"{self.host_url}/api/follow",
+ json={"actor": instance.docker_url},
headers=self._auth_headers,
)
assert resp.status_code == 201
# We need to wait for the Follow/Accept dance
time.sleep(self._create_delay)
- return resp.json().get('activity')
+ return resp.json().get("activity")
def new_note(self, content, reply=None) -> str:
"""Creates a new note."""
- params = {'content': content}
+ params = {"content": content}
if reply:
- params['reply'] = reply
+ params["reply"] = reply
resp = requests.post(
- f'{self.host_url}/api/new_note',
- json=params,
- headers=self._auth_headers,
+ f"{self.host_url}/api/new_note", json=params, headers=self._auth_headers
)
assert resp.status_code == 201
time.sleep(self._create_delay)
- return resp.json().get('activity')
+ return resp.json().get("activity")
def boost(self, oid: str) -> str:
"""Creates an Announce activity."""
resp = requests.post(
- f'{self.host_url}/api/boost',
- json={'id': oid},
- headers=self._auth_headers,
+ f"{self.host_url}/api/boost", json={"id": oid}, headers=self._auth_headers
)
assert resp.status_code == 201
time.sleep(self._create_delay)
- return resp.json().get('activity')
+ return resp.json().get("activity")
def like(self, oid: str) -> str:
"""Creates a Like activity."""
resp = requests.post(
- f'{self.host_url}/api/like',
- json={'id': oid},
- headers=self._auth_headers,
+ f"{self.host_url}/api/like", json={"id": oid}, headers=self._auth_headers
)
assert resp.status_code == 201
time.sleep(self._create_delay)
- return resp.json().get('activity')
+ return resp.json().get("activity")
def delete(self, oid: str) -> str:
"""Creates a Delete activity."""
resp = requests.post(
- f'{self.host_url}/api/note/delete',
- json={'id': oid},
+ f"{self.host_url}/api/note/delete",
+ json={"id": oid},
headers=self._auth_headers,
)
assert resp.status_code == 201
time.sleep(self._create_delay)
- return resp.json().get('activity')
+ return resp.json().get("activity")
def undo(self, oid: str) -> str:
"""Creates a Undo activity."""
resp = requests.post(
- f'{self.host_url}/api/undo',
- json={'id': oid},
- headers=self._auth_headers,
+ f"{self.host_url}/api/undo", json={"id": oid}, headers=self._auth_headers
)
assert resp.status_code == 201
# We need to wait for the Follow/Accept dance
time.sleep(self._create_delay)
- return resp.json().get('activity')
+ return resp.json().get("activity")
def followers(self) -> List[str]:
"""Parses the followers collection."""
resp = requests.get(
- f'{self.host_url}/followers',
- headers={'Accept': 'application/activity+json'},
+ f"{self.host_url}/followers",
+ headers={"Accept": "application/activity+json"},
)
resp.raise_for_status()
@@ -172,8 +169,8 @@ class Instance(object):
def following(self):
"""Parses the following collection."""
resp = requests.get(
- f'{self.host_url}/following',
- headers={'Accept': 'application/activity+json'},
+ f"{self.host_url}/following",
+ headers={"Accept": "application/activity+json"},
)
resp.raise_for_status()
@@ -184,8 +181,8 @@ class Instance(object):
def outbox(self):
"""Returns the instance outbox."""
resp = requests.get(
- f'{self.host_url}/following',
- headers={'Accept': 'application/activity+json'},
+ f"{self.host_url}/following",
+ headers={"Accept": "application/activity+json"},
)
resp.raise_for_status()
return resp.json()
@@ -194,7 +191,7 @@ class Instance(object):
"""Fetches a specific item from the instance outbox."""
resp = requests.get(
aid.replace(self.docker_url, self.host_url),
- headers={'Accept': 'application/activity+json'},
+ headers={"Accept": "application/activity+json"},
)
resp.raise_for_status()
return resp.json()
@@ -202,8 +199,8 @@ class Instance(object):
def stream_jsonfeed(self):
"""Returns the "stream"'s JSON feed."""
resp = requests.get(
- f'{self.host_url}/api/stream',
- headers={**self._auth_headers, 'Accept': 'application/json'},
+ f"{self.host_url}/api/stream",
+ headers={**self._auth_headers, "Accept": "application/json"},
)
resp.raise_for_status()
return resp.json()
@@ -211,10 +208,14 @@ class Instance(object):
def _instances() -> Tuple[Instance, Instance]:
"""Initializes the client for the two test instances."""
- instance1 = Instance('instance1', 'http://localhost:5006', 'http://instance1_web_1:5005')
+ instance1 = Instance(
+ "instance1", "http://localhost:5006", "http://instance1_web_1:5005"
+ )
instance1.ping()
- instance2 = Instance('instance2', 'http://localhost:5007', 'http://instance2_web_1:5005')
+ instance2 = Instance(
+ "instance2", "http://localhost:5007", "http://instance2_web_1:5005"
+ )
instance2.ping()
# Return the DB
@@ -230,12 +231,12 @@ def test_follow() -> None:
# Instance1 follows instance2
instance1.follow(instance2)
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 1 # An Accept activity should be there
- assert instance1_debug['outbox'] == 1 # We've sent a Follow activity
+ assert instance1_debug["inbox"] == 1 # An Accept activity should be there
+ assert instance1_debug["outbox"] == 1 # We've sent a Follow activity
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 1 # An Follow activity should be there
- assert instance2_debug['outbox'] == 1 # We've sent a Accept activity
+ assert instance2_debug["inbox"] == 1 # An Follow activity should be there
+ assert instance2_debug["outbox"] == 1 # We've sent a Accept activity
assert instance2.followers() == [instance1.docker_url]
assert instance1.following() == [instance2.docker_url]
@@ -247,12 +248,12 @@ def test_follow_unfollow():
# Instance1 follows instance2
follow_id = instance1.follow(instance2)
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 1 # An Accept activity should be there
- assert instance1_debug['outbox'] == 1 # We've sent a Follow activity
+ assert instance1_debug["inbox"] == 1 # An Accept activity should be there
+ assert instance1_debug["outbox"] == 1 # We've sent a Follow activity
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 1 # An Follow activity should be there
- assert instance2_debug['outbox'] == 1 # We've sent a Accept activity
+ assert instance2_debug["inbox"] == 1 # An Follow activity should be there
+ assert instance2_debug["outbox"] == 1 # We've sent a Accept activity
assert instance2.followers() == [instance1.docker_url]
assert instance1.following() == [instance2.docker_url]
@@ -263,12 +264,12 @@ def test_follow_unfollow():
assert instance1.following() == []
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 1 # An Accept activity should be there
- assert instance1_debug['outbox'] == 2 # We've sent a Follow and a Undo activity
+ assert instance1_debug["inbox"] == 1 # An Accept activity should be there
+ assert instance1_debug["outbox"] == 2 # We've sent a Follow and a Undo activity
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 2 # An Follow and Undo activity should be there
- assert instance2_debug['outbox'] == 1 # We've sent a Accept activity
+ assert instance2_debug["inbox"] == 2 # An Follow and Undo activity should be there
+ assert instance2_debug["outbox"] == 1 # We've sent a Accept activity
def test_post_content():
@@ -279,17 +280,19 @@ def test_post_content():
instance2.follow(instance1)
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 0
+ assert len(inbox_stream["items"]) == 0
- create_id = instance1.new_note('hello')
+ create_id = instance1.new_note("hello")
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 3 # An Follow, Accept and Create activity should be there
- assert instance2_debug['outbox'] == 2 # We've sent a Accept and a Follow activity
+ assert (
+ instance2_debug["inbox"] == 3
+ ) # An Follow, Accept and Create activity should be there
+ assert instance2_debug["outbox"] == 2 # We've sent a Accept and a Follow activity
# Ensure the post is visible in instance2's stream
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 1
- assert inbox_stream['items'][0]['id'] == create_id
+ assert len(inbox_stream["items"]) == 1
+ assert inbox_stream["items"][0]["id"] == create_id
def test_block_and_post_content():
@@ -300,18 +303,22 @@ def test_block_and_post_content():
instance2.follow(instance1)
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 0
+ assert len(inbox_stream["items"]) == 0
instance2.block(instance1.docker_url)
- instance1.new_note('hello')
+ instance1.new_note("hello")
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 2 # An Follow, Accept activity should be there, Create should have been dropped
- assert instance2_debug['outbox'] == 3 # We've sent a Accept and a Follow activity + the Block activity
+ assert (
+ instance2_debug["inbox"] == 2
+ ) # An Follow, Accept activity should be there, Create should have been dropped
+ assert (
+ instance2_debug["outbox"] == 3
+ ) # We've sent a Accept and a Follow activity + the Block activity
# Ensure the post is not visible in instance2's stream
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 0
+ assert len(inbox_stream["items"]) == 0
def test_post_content_and_delete():
@@ -322,26 +329,30 @@ def test_post_content_and_delete():
instance2.follow(instance1)
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 0
+ assert len(inbox_stream["items"]) == 0
- create_id = instance1.new_note('hello')
+ create_id = instance1.new_note("hello")
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 3 # An Follow, Accept and Create activity should be there
- assert instance2_debug['outbox'] == 2 # We've sent a Accept and a Follow activity
+ assert (
+ instance2_debug["inbox"] == 3
+ ) # An Follow, Accept and Create activity should be there
+ assert instance2_debug["outbox"] == 2 # We've sent a Accept and a Follow activity
# Ensure the post is visible in instance2's stream
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 1
- assert inbox_stream['items'][0]['id'] == create_id
+ assert len(inbox_stream["items"]) == 1
+ assert inbox_stream["items"][0]["id"] == create_id
- instance1.delete(f'{create_id}/activity')
+ instance1.delete(f"{create_id}/activity")
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 4 # An Follow, Accept and Create and Delete activity should be there
- assert instance2_debug['outbox'] == 2 # We've sent a Accept and a Follow activity
+ assert (
+ instance2_debug["inbox"] == 4
+ ) # An Follow, Accept and Create and Delete activity should be there
+ assert instance2_debug["outbox"] == 2 # We've sent a Accept and a Follow activity
# Ensure the post has been delete from instance2's stream
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 0
+ assert len(inbox_stream["items"]) == 0
def test_post_content_and_like():
@@ -351,26 +362,26 @@ def test_post_content_and_like():
instance1.follow(instance2)
instance2.follow(instance1)
- create_id = instance1.new_note('hello')
+ create_id = instance1.new_note("hello")
# Ensure the post is visible in instance2's stream
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 1
- assert inbox_stream['items'][0]['id'] == create_id
+ assert len(inbox_stream["items"]) == 1
+ assert inbox_stream["items"][0]["id"] == create_id
# Now, instance2 like the note
- like_id = instance2.like(f'{create_id}/activity')
+ like_id = instance2.like(f"{create_id}/activity")
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 3 # Follow, Accept and Like
- assert instance1_debug['outbox'] == 3 # Folllow, Accept, and Create
+ assert instance1_debug["inbox"] == 3 # Follow, Accept and Like
+ assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create
- note = instance1.outbox_get(f'{create_id}/activity')
- assert 'likes' in note
- assert note['likes']['totalItems'] == 1
- likes = instance1._parse_collection(url=note['likes']['first'])
+ note = instance1.outbox_get(f"{create_id}/activity")
+ assert "likes" in note
+ assert note["likes"]["totalItems"] == 1
+ likes = instance1._parse_collection(url=note["likes"]["first"])
assert len(likes) == 1
- assert likes[0]['id'] == like_id
+ assert likes[0]["id"] == like_id
def test_post_content_and_like_unlike() -> None:
@@ -380,36 +391,36 @@ def test_post_content_and_like_unlike() -> None:
instance1.follow(instance2)
instance2.follow(instance1)
- create_id = instance1.new_note('hello')
+ create_id = instance1.new_note("hello")
# Ensure the post is visible in instance2's stream
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 1
- assert inbox_stream['items'][0]['id'] == create_id
+ assert len(inbox_stream["items"]) == 1
+ assert inbox_stream["items"][0]["id"] == create_id
# Now, instance2 like the note
- like_id = instance2.like(f'{create_id}/activity')
+ like_id = instance2.like(f"{create_id}/activity")
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 3 # Follow, Accept and Like
- assert instance1_debug['outbox'] == 3 # Folllow, Accept, and Create
+ assert instance1_debug["inbox"] == 3 # Follow, Accept and Like
+ assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create
- note = instance1.outbox_get(f'{create_id}/activity')
- assert 'likes' in note
- assert note['likes']['totalItems'] == 1
- likes = instance1._parse_collection(url=note['likes']['first'])
+ note = instance1.outbox_get(f"{create_id}/activity")
+ assert "likes" in note
+ assert note["likes"]["totalItems"] == 1
+ likes = instance1._parse_collection(url=note["likes"]["first"])
assert len(likes) == 1
- assert likes[0]['id'] == like_id
+ assert likes[0]["id"] == like_id
instance2.undo(like_id)
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 4 # Follow, Accept and Like and Undo
- assert instance1_debug['outbox'] == 3 # Folllow, Accept, and Create
+ assert instance1_debug["inbox"] == 4 # Follow, Accept and Like and Undo
+ assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create
- note = instance1.outbox_get(f'{create_id}/activity')
- assert 'likes' in note
- assert note['likes']['totalItems'] == 0
+ note = instance1.outbox_get(f"{create_id}/activity")
+ assert "likes" in note
+ assert note["likes"]["totalItems"] == 0
def test_post_content_and_boost() -> None:
@@ -419,26 +430,26 @@ def test_post_content_and_boost() -> None:
instance1.follow(instance2)
instance2.follow(instance1)
- create_id = instance1.new_note('hello')
+ create_id = instance1.new_note("hello")
# Ensure the post is visible in instance2's stream
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 1
- assert inbox_stream['items'][0]['id'] == create_id
+ assert len(inbox_stream["items"]) == 1
+ assert inbox_stream["items"][0]["id"] == create_id
# Now, instance2 like the note
- boost_id = instance2.boost(f'{create_id}/activity')
+ boost_id = instance2.boost(f"{create_id}/activity")
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 3 # Follow, Accept and Announce
- assert instance1_debug['outbox'] == 3 # Folllow, Accept, and Create
+ assert instance1_debug["inbox"] == 3 # Follow, Accept and Announce
+ assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create
- note = instance1.outbox_get(f'{create_id}/activity')
- assert 'shares' in note
- assert note['shares']['totalItems'] == 1
- shares = instance1._parse_collection(url=note['shares']['first'])
+ note = instance1.outbox_get(f"{create_id}/activity")
+ assert "shares" in note
+ assert note["shares"]["totalItems"] == 1
+ shares = instance1._parse_collection(url=note["shares"]["first"])
assert len(shares) == 1
- assert shares[0]['id'] == boost_id
+ assert shares[0]["id"] == boost_id
def test_post_content_and_boost_unboost() -> None:
@@ -448,36 +459,36 @@ def test_post_content_and_boost_unboost() -> None:
instance1.follow(instance2)
instance2.follow(instance1)
- create_id = instance1.new_note('hello')
+ create_id = instance1.new_note("hello")
# Ensure the post is visible in instance2's stream
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 1
- assert inbox_stream['items'][0]['id'] == create_id
+ assert len(inbox_stream["items"]) == 1
+ assert inbox_stream["items"][0]["id"] == create_id
# Now, instance2 like the note
- boost_id = instance2.boost(f'{create_id}/activity')
+ boost_id = instance2.boost(f"{create_id}/activity")
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 3 # Follow, Accept and Announce
- assert instance1_debug['outbox'] == 3 # Folllow, Accept, and Create
+ assert instance1_debug["inbox"] == 3 # Follow, Accept and Announce
+ assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create
- note = instance1.outbox_get(f'{create_id}/activity')
- assert 'shares' in note
- assert note['shares']['totalItems'] == 1
- shares = instance1._parse_collection(url=note['shares']['first'])
+ note = instance1.outbox_get(f"{create_id}/activity")
+ assert "shares" in note
+ assert note["shares"]["totalItems"] == 1
+ shares = instance1._parse_collection(url=note["shares"]["first"])
assert len(shares) == 1
- assert shares[0]['id'] == boost_id
+ assert shares[0]["id"] == boost_id
instance2.undo(boost_id)
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 4 # Follow, Accept and Announce and Undo
- assert instance1_debug['outbox'] == 3 # Folllow, Accept, and Create
+ assert instance1_debug["inbox"] == 4 # Follow, Accept and Announce and Undo
+ assert instance1_debug["outbox"] == 3 # Folllow, Accept, and Create
- note = instance1.outbox_get(f'{create_id}/activity')
- assert 'shares' in note
- assert note['shares']['totalItems'] == 0
+ note = instance1.outbox_get(f"{create_id}/activity")
+ assert "shares" in note
+ assert note["shares"]["totalItems"] == 0
def test_post_content_and_post_reply() -> None:
@@ -488,40 +499,50 @@ def test_post_content_and_post_reply() -> None:
instance2.follow(instance1)
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 0
+ assert len(inbox_stream["items"]) == 0
- instance1_create_id = instance1.new_note('hello')
+ instance1_create_id = instance1.new_note("hello")
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 3 # An Follow, Accept and Create activity should be there
- assert instance2_debug['outbox'] == 2 # We've sent a Accept and a Follow activity
+ assert (
+ instance2_debug["inbox"] == 3
+ ) # An Follow, Accept and Create activity should be there
+ assert instance2_debug["outbox"] == 2 # We've sent a Accept and a Follow activity
# Ensure the post is visible in instance2's stream
instance2_inbox_stream = instance2.stream_jsonfeed()
- assert len(instance2_inbox_stream['items']) == 1
- assert instance2_inbox_stream['items'][0]['id'] == instance1_create_id
+ assert len(instance2_inbox_stream["items"]) == 1
+ assert instance2_inbox_stream["items"][0]["id"] == instance1_create_id
instance2_create_id = instance2.new_note(
- f'hey @instance1@{instance1.docker_url}',
- reply=f'{instance1_create_id}/activity',
+ f"hey @instance1@{instance1.docker_url}",
+ reply=f"{instance1_create_id}/activity",
)
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 3 # An Follow, Accept and Create activity should be there
- assert instance2_debug['outbox'] == 3 # We've sent a Accept and a Follow and a Create activity
+ assert (
+ instance2_debug["inbox"] == 3
+ ) # An Follow, Accept and Create activity should be there
+ assert (
+ instance2_debug["outbox"] == 3
+ ) # We've sent a Accept and a Follow and a Create activity
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 3 # An Follow, Accept and Create activity should be there
- assert instance1_debug['outbox'] == 3 # We've sent a Accept and a Follow and a Create activity
+ assert (
+ instance1_debug["inbox"] == 3
+ ) # An Follow, Accept and Create activity should be there
+ assert (
+ instance1_debug["outbox"] == 3
+ ) # We've sent a Accept and a Follow and a Create activity
instance1_inbox_stream = instance1.stream_jsonfeed()
- assert len(instance1_inbox_stream['items']) == 1
- assert instance1_inbox_stream['items'][0]['id'] == instance2_create_id
+ assert len(instance1_inbox_stream["items"]) == 1
+ assert instance1_inbox_stream["items"][0]["id"] == instance2_create_id
- instance1_note = instance1.outbox_get(f'{instance1_create_id}/activity')
- assert 'replies' in instance1_note
- assert instance1_note['replies']['totalItems'] == 1
- replies = instance1._parse_collection(url=instance1_note['replies']['first'])
+ instance1_note = instance1.outbox_get(f"{instance1_create_id}/activity")
+ assert "replies" in instance1_note
+ assert instance1_note["replies"]["totalItems"] == 1
+ replies = instance1._parse_collection(url=instance1_note["replies"]["first"])
assert len(replies) == 1
- assert replies[0]['id'] == f'{instance2_create_id}/activity'
+ assert replies[0]["id"] == f"{instance2_create_id}/activity"
def test_post_content_and_post_reply_and_delete() -> None:
@@ -532,44 +553,58 @@ def test_post_content_and_post_reply_and_delete() -> None:
instance2.follow(instance1)
inbox_stream = instance2.stream_jsonfeed()
- assert len(inbox_stream['items']) == 0
+ assert len(inbox_stream["items"]) == 0
- instance1_create_id = instance1.new_note('hello')
+ instance1_create_id = instance1.new_note("hello")
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 3 # An Follow, Accept and Create activity should be there
- assert instance2_debug['outbox'] == 2 # We've sent a Accept and a Follow activity
+ assert (
+ instance2_debug["inbox"] == 3
+ ) # An Follow, Accept and Create activity should be there
+ assert instance2_debug["outbox"] == 2 # We've sent a Accept and a Follow activity
# Ensure the post is visible in instance2's stream
instance2_inbox_stream = instance2.stream_jsonfeed()
- assert len(instance2_inbox_stream['items']) == 1
- assert instance2_inbox_stream['items'][0]['id'] == instance1_create_id
+ assert len(instance2_inbox_stream["items"]) == 1
+ assert instance2_inbox_stream["items"][0]["id"] == instance1_create_id
instance2_create_id = instance2.new_note(
- f'hey @instance1@{instance1.docker_url}',
- reply=f'{instance1_create_id}/activity',
+ f"hey @instance1@{instance1.docker_url}",
+ reply=f"{instance1_create_id}/activity",
)
instance2_debug = instance2.debug()
- assert instance2_debug['inbox'] == 3 # An Follow, Accept and Create activity should be there
- assert instance2_debug['outbox'] == 3 # We've sent a Accept and a Follow and a Create activity
+ assert (
+ instance2_debug["inbox"] == 3
+ ) # An Follow, Accept and Create activity should be there
+ assert (
+ instance2_debug["outbox"] == 3
+ ) # We've sent a Accept and a Follow and a Create activity
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 3 # An Follow, Accept and Create activity should be there
- assert instance1_debug['outbox'] == 3 # We've sent a Accept and a Follow and a Create activity
+ assert (
+ instance1_debug["inbox"] == 3
+ ) # An Follow, Accept and Create activity should be there
+ assert (
+ instance1_debug["outbox"] == 3
+ ) # We've sent a Accept and a Follow and a Create activity
instance1_inbox_stream = instance1.stream_jsonfeed()
- assert len(instance1_inbox_stream['items']) == 1
- assert instance1_inbox_stream['items'][0]['id'] == instance2_create_id
+ assert len(instance1_inbox_stream["items"]) == 1
+ assert instance1_inbox_stream["items"][0]["id"] == instance2_create_id
- instance1_note = instance1.outbox_get(f'{instance1_create_id}/activity')
- assert 'replies' in instance1_note
- assert instance1_note['replies']['totalItems'] == 1
+ instance1_note = instance1.outbox_get(f"{instance1_create_id}/activity")
+ assert "replies" in instance1_note
+ assert instance1_note["replies"]["totalItems"] == 1
- instance2.delete(f'{instance2_create_id}/activity')
+ instance2.delete(f"{instance2_create_id}/activity")
instance1_debug = instance1.debug()
- assert instance1_debug['inbox'] == 4 # An Follow, Accept and Create and Delete activity should be there
- assert instance1_debug['outbox'] == 3 # We've sent a Accept and a Follow and a Create activity
+ assert (
+ instance1_debug["inbox"] == 4
+ ) # An Follow, Accept and Create and Delete activity should be there
+ assert (
+ instance1_debug["outbox"] == 3
+ ) # We've sent a Accept and a Follow and a Create activity
- instance1_note = instance1.outbox_get(f'{instance1_create_id}/activity')
- assert 'replies' in instance1_note
- assert instance1_note['replies']['totalItems'] == 0
+ instance1_note = instance1.outbox_get(f"{instance1_create_id}/activity")
+ assert "replies" in instance1_note
+ assert instance1_note["replies"]["totalItems"] == 0
diff --git a/tests/integration_test.py b/tests/integration_test.py
index 4270b4b..dbfe19b 100644
--- a/tests/integration_test.py
+++ b/tests/integration_test.py
@@ -9,7 +9,10 @@ from html2text import html2text
def config():
"""Return the current config as a dict."""
import yaml
- with open(os.path.join(os.path.dirname(__file__), '..', 'config/me.yml'), 'rb') as f:
+
+ with open(
+ os.path.join(os.path.dirname(__file__), "..", "config/me.yml"), "rb"
+ ) as f:
yield yaml.load(f)
@@ -20,9 +23,9 @@ def resp2plaintext(resp):
def test_ping_homepage(config):
"""Ensure the homepage is accessible."""
- resp = requests.get('http://localhost:5005')
+ resp = requests.get("http://localhost:5005")
resp.raise_for_status()
assert resp.status_code == 200
body = resp2plaintext(resp)
- assert config['name'] in body
+ assert config["name"] in body
assert f"@{config['username']}@{config['domain']}" in body
diff --git a/utils/__init__.py b/utils/__init__.py
index c30c37d..cdf368d 100644
--- a/utils/__init__.py
+++ b/utils/__init__.py
@@ -4,9 +4,9 @@ logger = logging.getLogger(__name__)
def strtobool(s: str) -> bool:
- if s in ['y', 'yes', 'true', 'on', '1']:
+ if s in ["y", "yes", "true", "on", "1"]:
return True
- if s in ['n', 'no', 'false', 'off', '0']:
+ if s in ["n", "no", "false", "off", "0"]:
return False
- raise ValueError(f'cannot convert {s} to bool')
+ raise ValueError(f"cannot convert {s} to bool")
diff --git a/utils/activitypub_utils.py b/utils/activitypub_utils.py
deleted file mode 100644
index 3204237..0000000
--- a/utils/activitypub_utils.py
+++ /dev/null
@@ -1,68 +0,0 @@
-from typing import Any
-from typing import Dict
-from typing import List
-from typing import Optional
-
-import requests
-
-from .errors import RecursionLimitExceededError
-from .errors import UnexpectedActivityTypeError
-
-
-def _do_req(url: str, headers: Dict[str, str]) -> Dict[str, Any]:
- resp = requests.get(url, headers=headers)
- resp.raise_for_status()
- return resp.json()
-
-
-def parse_collection(
- payload: Optional[Dict[str, Any]] = None,
- url: Optional[str] = None,
- user_agent: Optional[str] = None,
- level: int = 0,
- do_req: Any = _do_req,
-) -> List[str]:
- """Resolve/fetch a `Collection`/`OrderedCollection`."""
- if level > 3:
- raise RecursionLimitExceededError('recursion limit exceeded')
-
- # Go through all the pages
- headers = {'Accept': 'application/activity+json'}
- if user_agent:
- headers['User-Agent'] = user_agent
-
- out: List[str] = []
- if url:
- payload = do_req(url, headers)
- if not payload:
- raise ValueError('must at least prove a payload or an URL')
-
- if payload['type'] in ['Collection', 'OrderedCollection']:
- if 'orderedItems' in payload:
- return payload['orderedItems']
- if 'items' in payload:
- return payload['items']
- if 'first' in payload:
- if 'orderedItems' in payload['first']:
- out.extend(payload['first']['orderedItems'])
- if 'items' in payload['first']:
- out.extend(payload['first']['items'])
- n = payload['first'].get('next')
- if n:
- out.extend(parse_collection(url=n, user_agent=user_agent, level=level+1, do_req=do_req))
- return out
-
- while payload:
- if payload['type'] in ['CollectionPage', 'OrderedCollectionPage']:
- if 'orderedItems' in payload:
- out.extend(payload['orderedItems'])
- if 'items' in payload:
- out.extend(payload['items'])
- n = payload.get('next')
- if n is None:
- break
- payload = do_req(n, headers)
- else:
- raise UnexpectedActivityTypeError('unexpected activity type {}'.format(payload['type']))
-
- return out
diff --git a/utils/actor_service.py b/utils/actor_service.py
index f13a6cf..bb97131 100644
--- a/utils/actor_service.py
+++ b/utils/actor_service.py
@@ -17,7 +17,7 @@ class NotAnActorError(Exception):
class ActorService(object):
def __init__(self, user_agent, col, actor_id, actor_data, instances):
- logger.debug(f'Initializing ActorService user_agent={user_agent}')
+ logger.debug(f"Initializing ActorService user_agent={user_agent}")
self._user_agent = user_agent
self._col = col
self._in_mem = {actor_id: actor_data}
@@ -25,57 +25,70 @@ class ActorService(object):
self._known_instances = set()
def _fetch(self, actor_url):
- logger.debug(f'fetching remote object {actor_url}')
+ logger.debug(f"fetching remote object {actor_url}")
check_url(actor_url)
- resp = requests.get(actor_url, headers={
- 'Accept': 'application/activity+json',
- 'User-Agent': self._user_agent,
- })
+ resp = requests.get(
+ actor_url,
+ headers={
+ "Accept": "application/activity+json",
+ "User-Agent": self._user_agent,
+ },
+ )
if resp.status_code == 404:
- raise ActivityNotFoundError(f'{actor_url} cannot be fetched, 404 not found error')
+ raise ActivityNotFoundError(
+ f"{actor_url} cannot be fetched, 404 not found error"
+ )
resp.raise_for_status()
return resp.json()
def get(self, actor_url, reload_cache=False):
- logger.info(f'get actor {actor_url} (reload_cache={reload_cache})')
+ logger.info(f"get actor {actor_url} (reload_cache={reload_cache})")
if actor_url in self._in_mem:
return self._in_mem[actor_url]
- instance = urlparse(actor_url)._replace(path='', query='', fragment='').geturl()
+ instance = urlparse(actor_url)._replace(path="", query="", fragment="").geturl()
if instance not in self._known_instances:
self._known_instances.add(instance)
- if not self._instances.find_one({'instance': instance}):
- self._instances.insert({'instance': instance, 'first_object': actor_url})
+ if not self._instances.find_one({"instance": instance}):
+ self._instances.insert(
+ {"instance": instance, "first_object": actor_url}
+ )
if reload_cache:
actor = self._fetch(actor_url)
self._in_mem[actor_url] = actor
- self._col.update({'actor_id': actor_url}, {'$set': {'cached_response': actor}}, upsert=True)
+ self._col.update(
+ {"actor_id": actor_url},
+ {"$set": {"cached_response": actor}},
+ upsert=True,
+ )
return actor
- cached_actor = self._col.find_one({'actor_id': actor_url})
+ cached_actor = self._col.find_one({"actor_id": actor_url})
if cached_actor:
- return cached_actor['cached_response']
+ return cached_actor["cached_response"]
actor = self._fetch(actor_url)
- if not 'type' in actor:
+ if not "type" in actor:
raise NotAnActorError(None)
- if actor['type'] != 'Person':
+ if actor["type"] != "Person":
raise NotAnActorError(actor)
- self._col.update({'actor_id': actor_url}, {'$set': {'cached_response': actor}}, upsert=True)
+ self._col.update(
+ {"actor_id": actor_url}, {"$set": {"cached_response": actor}}, upsert=True
+ )
self._in_mem[actor_url] = actor
return actor
def get_public_key(self, actor_url, reload_cache=False):
profile = self.get(actor_url, reload_cache=reload_cache)
- pub = profile['publicKey']
- return pub['id'], RSA.importKey(pub['publicKeyPem'])
+ pub = profile["publicKey"]
+ return pub["id"], RSA.importKey(pub["publicKeyPem"])
def get_inbox_url(self, actor_url, reload_cache=False):
profile = self.get(actor_url, reload_cache=reload_cache)
- return profile.get('inbox')
+ return profile.get("inbox")
diff --git a/utils/content_helper.py b/utils/content_helper.py
deleted file mode 100644
index 8ea8cf2..0000000
--- a/utils/content_helper.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import re
-import typing
-from typing import Any
-from typing import Dict
-from typing import List
-from typing import Optional
-from typing import Tuple
-from typing import Type
-from typing import Union
-
-from bleach.linkifier import Linker
-from markdown import markdown
-
-from config import ACTOR_SERVICE
-from config import BASE_URL
-from config import ID
-from config import USERNAME
-from utils.webfinger import get_actor_url
-
-
-def set_attrs(attrs, new=False):
- attrs[(None, u'target')] = u'_blank'
- attrs[(None, u'class')] = u'external'
- attrs[(None, u'rel')] = u'noopener'
- attrs[(None, u'title')] = attrs[(None, u'href')]
- return attrs
-
-
-LINKER = Linker(callbacks=[set_attrs])
-HASHTAG_REGEX = re.compile(r"(#[\d\w\.]+)")
-MENTION_REGEX = re.compile(r"@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+")
-
-
-def hashtagify(content: str) -> Tuple[str, List[Dict[str, str]]]:
- tags = []
- for hashtag in re.findall(HASHTAG_REGEX, content):
- tag = hashtag[1:]
- link = f'#{tag}'
- tags.append(dict(href=f'{BASE_URL}/tags/{tag}', name=hashtag, type='Hashtag'))
- content = content.replace(hashtag, link)
- return content, tags
-
-
-def mentionify(content: str) -> Tuple[str, List[Dict[str, str]]]:
- tags = []
- for mention in re.findall(MENTION_REGEX, content):
- _, username, domain = mention.split('@')
- actor_url = get_actor_url(mention)
- p = ACTOR_SERVICE.get(actor_url)
- print(p)
- tags.append(dict(type='Mention', href=p['id'], name=mention))
- link = f'@{username}'
- content = content.replace(mention, link)
- return content, tags
-
-
-def parse_markdown(content: str) -> Tuple[str, List[Dict[str, str]]]:
- tags = []
- content = LINKER.linkify(content)
- content, hashtag_tags = hashtagify(content)
- tags.extend(hashtag_tags)
- content, mention_tags = mentionify(content)
- tags.extend(mention_tags)
- content = markdown(content)
- return content, tags
diff --git a/utils/errors.py b/utils/errors.py
deleted file mode 100644
index 7ffe744..0000000
--- a/utils/errors.py
+++ /dev/null
@@ -1,37 +0,0 @@
-
-class Error(Exception):
- status_code = 400
-
- def __init__(self, message, status_code=None, payload=None):
- Exception.__init__(self)
- self.message = message
- if status_code is not None:
- self.status_code = status_code
- self.payload = payload
-
- def to_dict(self):
- rv = dict(self.payload or ())
- rv['message'] = self.message
- return rv
-
- def __repr__(self):
- return f'{self.__class__.__qualname__}({self.message!r}, payload={self.payload!r}, status_code={self.status_code})'
-
-
-class NotFromOutboxError(Error):
- pass
-
-class ActivityNotFoundError(Error):
- status_code = 404
-
-
-class BadActivityError(Error):
- pass
-
-
-class RecursionLimitExceededError(BadActivityError):
- pass
-
-
-class UnexpectedActivityTypeError(BadActivityError):
- pass
diff --git a/utils/httpsig.py b/utils/httpsig.py
deleted file mode 100644
index 609ec3d..0000000
--- a/utils/httpsig.py
+++ /dev/null
@@ -1,95 +0,0 @@
-"""Implements HTTP signature for Flask requests.
-
-Mastodon instances won't accept requests that are not signed using this scheme.
-
-"""
-import base64
-import hashlib
-import logging
-from datetime import datetime
-from typing import Any
-from typing import Dict
-from typing import Optional
-from urllib.parse import urlparse
-
-from Crypto.Hash import SHA256
-from Crypto.Signature import PKCS1_v1_5
-from flask import request
-from requests.auth import AuthBase
-
-logger = logging.getLogger(__name__)
-
-
-def _build_signed_string(signed_headers: str, method: str, path: str, headers: Any, body_digest: str) -> str:
- out = []
- for signed_header in signed_headers.split(' '):
- if signed_header == '(request-target)':
- out.append('(request-target): '+method.lower()+' '+path)
- elif signed_header == 'digest':
- out.append('digest: '+body_digest)
- else:
- out.append(signed_header+': '+headers[signed_header])
- return '\n'.join(out)
-
-
-def _parse_sig_header(val: Optional[str]) -> Optional[Dict[str, str]]:
- if not val:
- return None
- out = {}
- for data in val.split(','):
- k, v = data.split('=', 1)
- out[k] = v[1:len(v)-1]
- return out
-
-
-def _verify_h(signed_string, signature, pubkey):
- signer = PKCS1_v1_5.new(pubkey)
- digest = SHA256.new()
- digest.update(signed_string.encode('utf-8'))
- return signer.verify(digest, signature)
-
-
-def _body_digest() -> str:
- h = hashlib.new('sha256')
- h.update(request.data)
- return 'SHA-256='+base64.b64encode(h.digest()).decode('utf-8')
-
-
-def verify_request(actor_service) -> bool:
- hsig = _parse_sig_header(request.headers.get('Signature'))
- if not hsig:
- logger.debug('no signature in header')
- return False
- logger.debug(f'hsig={hsig}')
- signed_string = _build_signed_string(hsig['headers'], request.method, request.path, request.headers, _body_digest())
- _, rk = actor_service.get_public_key(hsig['keyId'])
- return _verify_h(signed_string, base64.b64decode(hsig['signature']), rk)
-
-
-class HTTPSigAuth(AuthBase):
- def __init__(self, keyid, privkey):
- self.keyid = keyid
- self.privkey = privkey
-
- def __call__(self, r):
- logger.info(f'keyid={self.keyid}')
- host = urlparse(r.url).netloc
- bh = hashlib.new('sha256')
- bh.update(r.body.encode('utf-8'))
- bodydigest = 'SHA-256='+base64.b64encode(bh.digest()).decode('utf-8')
- date = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
- r.headers.update({'Digest': bodydigest, 'Date': date})
- r.headers.update({'Host': host})
- sigheaders = '(request-target) user-agent host date digest content-type'
- to_be_signed = _build_signed_string(sigheaders, r.method, r.path_url, r.headers, bodydigest)
- signer = PKCS1_v1_5.new(self.privkey)
- digest = SHA256.new()
- digest.update(to_be_signed.encode('utf-8'))
- sig = base64.b64encode(signer.sign(digest))
- sig = sig.decode('utf-8')
- headers = {
- 'Signature': f'keyId="{self.keyid}",algorithm="rsa-sha256",headers="{sigheaders}",signature="{sig}"'
- }
- logger.info(f'signed request headers={headers}')
- r.headers.update(headers)
- return r
diff --git a/utils/linked_data_sig.py b/utils/linked_data_sig.py
deleted file mode 100644
index b75166e..0000000
--- a/utils/linked_data_sig.py
+++ /dev/null
@@ -1,69 +0,0 @@
-import base64
-import hashlib
-from datetime import datetime
-from typing import Any
-from typing import Dict
-
-from Crypto.Hash import SHA256
-from Crypto.Signature import PKCS1_v1_5
-from pyld import jsonld
-
-# cache the downloaded "schemas", otherwise the library is super slow
-# (https://github.com/digitalbazaar/pyld/issues/70)
-_CACHE: Dict[str, Any] = {}
-LOADER = jsonld.requests_document_loader()
-
-def _caching_document_loader(url: str) -> Any:
- if url in _CACHE:
- return _CACHE[url]
- resp = LOADER(url)
- _CACHE[url] = resp
- return resp
-
-jsonld.set_document_loader(_caching_document_loader)
-
-
-def options_hash(doc):
- doc = dict(doc['signature'])
- for k in ['type', 'id', 'signatureValue']:
- if k in doc:
- del doc[k]
- doc['@context'] = 'https://w3id.org/identity/v1'
- normalized = jsonld.normalize(doc, {'algorithm': 'URDNA2015', 'format': 'application/nquads'})
- h = hashlib.new('sha256')
- h.update(normalized.encode('utf-8'))
- return h.hexdigest()
-
-
-def doc_hash(doc):
- doc = dict(doc)
- if 'signature' in doc:
- del doc['signature']
- normalized = jsonld.normalize(doc, {'algorithm': 'URDNA2015', 'format': 'application/nquads'})
- h = hashlib.new('sha256')
- h.update(normalized.encode('utf-8'))
- return h.hexdigest()
-
-
-def verify_signature(doc, pubkey):
- to_be_signed = options_hash(doc) + doc_hash(doc)
- signature = doc['signature']['signatureValue']
- signer = PKCS1_v1_5.new(pubkey)
- digest = SHA256.new()
- digest.update(to_be_signed.encode('utf-8'))
- return signer.verify(digest, base64.b64decode(signature))
-
-
-def generate_signature(doc, privkey):
- options = {
- 'type': 'RsaSignature2017',
- 'creator': doc['actor'] + '#main-key',
- 'created': datetime.utcnow().replace(microsecond=0).isoformat() + 'Z',
- }
- doc['signature'] = options
- to_be_signed = options_hash(doc) + doc_hash(doc)
- signer = PKCS1_v1_5.new(privkey)
- digest = SHA256.new()
- digest.update(to_be_signed.encode('utf-8'))
- sig = base64.b64encode(signer.sign(digest))
- options['signatureValue'] = sig.decode('utf-8')
diff --git a/utils/object_service.py b/utils/object_service.py
index 594fa10..8ce8d11 100644
--- a/utils/object_service.py
+++ b/utils/object_service.py
@@ -16,53 +16,100 @@ class ObjectService(object):
self._known_instances = set()
def _fetch_remote(self, object_id):
- print(f'fetch remote {object_id}')
+ print(f"fetch remote {object_id}")
check_url(object_id)
- resp = requests.get(object_id, headers={
- 'Accept': 'application/activity+json',
- 'User-Agent': self._user_agent,
- })
+ resp = requests.get(
+ object_id,
+ headers={
+ "Accept": "application/activity+json",
+ "User-Agent": self._user_agent,
+ },
+ )
if resp.status_code == 404:
- raise ActivityNotFoundError(f'{object_id} cannot be fetched, 404 error not found')
+ raise ActivityNotFoundError(
+ f"{object_id} cannot be fetched, 404 error not found"
+ )
resp.raise_for_status()
return resp.json()
def _fetch(self, object_id):
- instance = urlparse(object_id)._replace(path='', query='', fragment='').geturl()
+ instance = urlparse(object_id)._replace(path="", query="", fragment="").geturl()
if instance not in self._known_instances:
self._known_instances.add(instance)
- if not self._instances.find_one({'instance': instance}):
- self._instances.insert({'instance': instance, 'first_object': object_id})
+ if not self._instances.find_one({"instance": instance}):
+ self._instances.insert(
+ {"instance": instance, "first_object": object_id}
+ )
- obj = self._inbox.find_one({'$or': [{'remote_id': object_id}, {'type': 'Create', 'activity.object.id': object_id}]})
+ obj = self._inbox.find_one(
+ {
+ "$or": [
+ {"remote_id": object_id},
+ {"type": "Create", "activity.object.id": object_id},
+ ]
+ }
+ )
if obj:
- if obj['remote_id'] == object_id:
- return obj['activity']
- return obj['activity']['object']
+ if obj["remote_id"] == object_id:
+ return obj["activity"]
+ return obj["activity"]["object"]
- obj = self._outbox.find_one({'$or': [{'remote_id': object_id}, {'type': 'Create', 'activity.object.id': object_id}]})
+ obj = self._outbox.find_one(
+ {
+ "$or": [
+ {"remote_id": object_id},
+ {"type": "Create", "activity.object.id": object_id},
+ ]
+ }
+ )
if obj:
- if obj['remote_id'] == object_id:
- return obj['activity']
- return obj['activity']['object']
+ if obj["remote_id"] == object_id:
+ return obj["activity"]
+ return obj["activity"]["object"]
return self._fetch_remote(object_id)
- def get(self, object_id, reload_cache=False, part_of_stream=False, announce_published=None):
+ def get(
+ self,
+ object_id,
+ reload_cache=False,
+ part_of_stream=False,
+ announce_published=None,
+ ):
if reload_cache:
obj = self._fetch(object_id)
- self._col.update({'object_id': object_id}, {'$set': {'cached_object': obj, 'meta.part_of_stream': part_of_stream, 'meta.announce_published': announce_published}}, upsert=True)
+ self._col.update(
+ {"object_id": object_id},
+ {
+ "$set": {
+ "cached_object": obj,
+ "meta.part_of_stream": part_of_stream,
+ "meta.announce_published": announce_published,
+ }
+ },
+ upsert=True,
+ )
return obj
- cached_object = self._col.find_one({'object_id': object_id})
+ cached_object = self._col.find_one({"object_id": object_id})
if cached_object:
- print(f'ObjectService: {cached_object}')
- return cached_object['cached_object']
+ print(f"ObjectService: {cached_object}")
+ return cached_object["cached_object"]
obj = self._fetch(object_id)
- self._col.update({'object_id': object_id}, {'$set': {'cached_object': obj, 'meta.part_of_stream': part_of_stream, 'meta.announce_published': announce_published}}, upsert=True)
+ self._col.update(
+ {"object_id": object_id},
+ {
+ "$set": {
+ "cached_object": obj,
+ "meta.part_of_stream": part_of_stream,
+ "meta.announce_published": announce_published,
+ }
+ },
+ upsert=True,
+ )
# print(f'ObjectService: {obj}')
return obj
diff --git a/utils/opengraph.py b/utils/opengraph.py
index 8bafece..597ad3c 100644
--- a/utils/opengraph.py
+++ b/utils/opengraph.py
@@ -1,37 +1,34 @@
-import ipaddress
-from urllib.parse import urlparse
-
import opengraph
import requests
from bs4 import BeautifulSoup
-from .urlutils import check_url
-from .urlutils import is_url_valid
+from little_boxes.urlutils import check_url
+from little_boxes.urlutils import is_url_valid
def links_from_note(note):
- tags_href= set()
- for t in note.get('tag', []):
- h = t.get('href')
+ tags_href = set()
+ for t in note.get("tag", []):
+ h = t.get("href")
if h:
# TODO(tsileo): fetch the URL for Actor profile, type=mention
tags_href.add(h)
links = set()
- soup = BeautifulSoup(note['content'])
- for link in soup.find_all('a'):
- h = link.get('href')
- if h.startswith('http') and h not in tags_href and is_url_valid(h):
+ soup = BeautifulSoup(note["content"])
+ for link in soup.find_all("a"):
+ h = link.get("href")
+ if h.startswith("http") and h not in tags_href and is_url_valid(h):
links.add(h)
return links
def fetch_og_metadata(user_agent, col, remote_id):
- doc = col.find_one({'remote_id': remote_id})
+ doc = col.find_one({"remote_id": remote_id})
if not doc:
raise ValueError
- note = doc['activity']['object']
+ note = doc["activity"]["object"]
print(note)
links = links_from_note(note)
if not links:
@@ -40,9 +37,11 @@ def fetch_og_metadata(user_agent, col, remote_id):
htmls = []
for l in links:
check_url(l)
- r = requests.get(l, headers={'User-Agent': user_agent})
+ r = requests.get(l, headers={"User-Agent": user_agent})
r.raise_for_status()
htmls.append(r.text)
links_og_metadata = [dict(opengraph.OpenGraph(html=html)) for html in htmls]
- col.update_one({'remote_id': remote_id}, {'$set': {'meta.og_metadata': links_og_metadata}})
+ col.update_one(
+ {"remote_id": remote_id}, {"$set": {"meta.og_metadata": links_og_metadata}}
+ )
return len(links)
diff --git a/utils/urlutils.py b/utils/urlutils.py
deleted file mode 100644
index 360d209..0000000
--- a/utils/urlutils.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import ipaddress
-import logging
-import os
-import socket
-from urllib.parse import urlparse
-
-from . import strtobool
-from .errors import Error
-
-logger = logging.getLogger(__name__)
-
-
-class InvalidURLError(Error):
- pass
-
-
-def is_url_valid(url: str) -> bool:
- parsed = urlparse(url)
- if parsed.scheme not in ['http', 'https']:
- return False
-
- # XXX in debug mode, we want to allow requests to localhost to test the federation with local instances
- debug_mode = strtobool(os.getenv('MICROBLOGPUB_DEBUG', 'false'))
- if debug_mode:
- return True
-
- if parsed.hostname in ['localhost']:
- return False
-
- try:
- ip_address = socket.getaddrinfo(parsed.hostname, parsed.port or 80)[0][4][0]
- except socket.gaierror:
- logger.exception(f'failed to lookup url {url}')
- return False
-
- if ipaddress.ip_address(ip_address).is_private:
- logger.info(f'rejecting private URL {url}')
- return False
-
- return True
-
-
-def check_url(url: str) -> None:
- if not is_url_valid(url):
- raise InvalidURLError(f'"{url}" is invalid')
-
- return None
diff --git a/utils/webfinger.py b/utils/webfinger.py
deleted file mode 100644
index 344dc01..0000000
--- a/utils/webfinger.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import logging
-from typing import Any
-from typing import Dict
-from typing import Optional
-from urllib.parse import urlparse
-
-import requests
-
-from .urlutils import check_url
-
-logger = logging.getLogger(__name__)
-
-
-def webfinger(resource: str) -> Optional[Dict[str, Any]]:
- """Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL.
- """
- logger.info(f'performing webfinger resolution for {resource}')
- protos = ['https', 'http']
- if resource.startswith('http://'):
- protos.reverse()
- host = urlparse(resource).netloc
- elif resource.startswith('https://'):
- host = urlparse(resource).netloc
- else:
- if resource.startswith('acct:'):
- resource = resource[5:]
- if resource.startswith('@'):
- resource = resource[1:]
- _, host = resource.split('@', 1)
- resource='acct:'+resource
-
- # Security check on the url (like not calling localhost)
- check_url(f'https://{host}')
-
- for i, proto in enumerate(protos):
- try:
- url = f'{proto}://{host}/.well-known/webfinger'
- resp = requests.get(
- url,
- {'resource': resource}
- )
- except requests.ConnectionError:
- # If we tried https first and the domain is "http only"
- if i == 0:
- continue
- break
- if resp.status_code == 404:
- return None
- resp.raise_for_status()
- return resp.json()
-
-
-def get_remote_follow_template(resource: str) -> Optional[str]:
- data = webfinger(resource)
- if data is None:
- return None
- for link in data['links']:
- if link.get('rel') == 'http://ostatus.org/schema/1.0/subscribe':
- return link.get('template')
- return None
-
-
-def get_actor_url(resource: str) -> Optional[str]:
- """Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL.
-
- Returns:
- the Actor URL or None if the resolution failed.
- """
- data = webfinger(resource)
- if data is None:
- return None
- for link in data['links']:
- if link.get('rel') == 'self' and link.get('type') == 'application/activity+json':
- return link.get('href')
- return None