2019-04-14 17:17:54 +00:00
|
|
|
import hashlib
|
2019-04-22 07:58:11 +00:00
|
|
|
import json
|
2018-05-27 12:21:06 +00:00
|
|
|
import logging
|
2018-06-24 17:51:09 +00:00
|
|
|
import os
|
2018-05-18 18:41:41 +00:00
|
|
|
from datetime import datetime
|
2018-06-29 20:16:26 +00:00
|
|
|
from enum import Enum
|
2018-06-16 20:02:10 +00:00
|
|
|
from typing import Any
|
|
|
|
from typing import Dict
|
|
|
|
from typing import List
|
|
|
|
from typing import Optional
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
from bson.objectid import ObjectId
|
2018-07-11 21:22:47 +00:00
|
|
|
from cachetools import LRUCache
|
2018-05-18 18:41:41 +00:00
|
|
|
from feedgen.feed import FeedGenerator
|
2018-06-16 20:02:10 +00:00
|
|
|
from html2text import html2text
|
2018-07-11 21:22:47 +00:00
|
|
|
from little_boxes import activitypub as ap
|
|
|
|
from little_boxes import strtobool
|
|
|
|
from little_boxes.activitypub import _to_list
|
|
|
|
from little_boxes.backend import Backend
|
|
|
|
from little_boxes.errors import ActivityGoneError
|
|
|
|
from little_boxes.errors import Error
|
2018-07-22 10:04:18 +00:00
|
|
|
from little_boxes.errors import NotAnActivityError
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-16 20:02:10 +00:00
|
|
|
from config import BASE_URL
|
|
|
|
from config import DB
|
2018-07-07 11:56:00 +00:00
|
|
|
from config import EXTRA_INBOXES
|
2018-06-16 20:02:10 +00:00
|
|
|
from config import ID
|
|
|
|
from config import ME
|
|
|
|
from config import USER_AGENT
|
|
|
|
from config import USERNAME
|
2019-07-04 21:22:38 +00:00
|
|
|
from tasks import Tasks
|
2018-06-16 20:33:51 +00:00
|
|
|
|
2018-05-27 12:21:06 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-07-11 18:04:48 +00:00
|
|
|
ACTORS_CACHE = LRUCache(maxsize=256)
|
|
|
|
|
|
|
|
|
2018-07-21 09:41:49 +00:00
|
|
|
def _actor_to_meta(actor: ap.BaseActivity, with_inbox=False) -> Dict[str, Any]:
|
|
|
|
meta = {
|
|
|
|
"id": actor.id,
|
2018-07-20 08:56:39 +00:00
|
|
|
"url": actor.url,
|
|
|
|
"icon": actor.icon,
|
|
|
|
"name": actor.name,
|
|
|
|
"preferredUsername": actor.preferredUsername,
|
|
|
|
}
|
2018-07-21 09:41:49 +00:00
|
|
|
if with_inbox:
|
|
|
|
meta.update(
|
|
|
|
{
|
|
|
|
"inbox": actor.inbox,
|
|
|
|
"sharedInbox": actor._data.get("endpoints", {}).get("sharedInbox"),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
logger.debug(f"meta={meta}")
|
|
|
|
|
|
|
|
return meta
|
2018-07-20 08:56:39 +00:00
|
|
|
|
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
def _remove_id(doc: ap.ObjectType) -> ap.ObjectType:
|
2018-05-27 20:30:43 +00:00
|
|
|
"""Helper for removing MongoDB's `_id` field."""
|
2018-05-18 18:41:41 +00:00
|
|
|
doc = doc.copy()
|
2018-06-16 19:24:53 +00:00
|
|
|
if "_id" in doc:
|
2019-04-13 08:00:56 +00:00
|
|
|
del doc["_id"]
|
2018-05-18 18:41:41 +00:00
|
|
|
return doc
|
|
|
|
|
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
def ensure_it_is_me(f):
|
|
|
|
"""Method decorator used to track the events fired during tests."""
|
2018-06-17 17:21:59 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
def wrapper(*args, **kwargs):
|
2018-06-17 18:51:23 +00:00
|
|
|
if args[1].id != ME["id"]:
|
2018-06-17 17:21:59 +00:00
|
|
|
raise Error("unexpected actor")
|
2018-06-16 20:33:51 +00:00
|
|
|
return f(*args, **kwargs)
|
2018-06-17 17:21:59 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2019-04-14 17:17:54 +00:00
|
|
|
def _answer_key(choice: str) -> str:
|
|
|
|
h = hashlib.new("sha1")
|
|
|
|
h.update(choice.encode())
|
|
|
|
return h.hexdigest()
|
|
|
|
|
|
|
|
|
2018-06-29 20:16:26 +00:00
|
|
|
class Box(Enum):
|
|
|
|
INBOX = "inbox"
|
|
|
|
OUTBOX = "outbox"
|
|
|
|
REPLIES = "replies"
|
|
|
|
|
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
class MicroblogPubBackend(Backend):
|
2018-06-21 22:55:50 +00:00
|
|
|
"""Implements a Little Boxes backend, backed by MongoDB."""
|
|
|
|
|
2018-06-24 17:51:09 +00:00
|
|
|
def debug_mode(self) -> bool:
|
|
|
|
return strtobool(os.getenv("MICROBLOGPUB_DEBUG", "false"))
|
|
|
|
|
2018-06-16 20:02:10 +00:00
|
|
|
def user_agent(self) -> str:
|
2018-06-21 22:55:50 +00:00
|
|
|
"""Setup a custom user agent."""
|
2018-06-16 20:02:10 +00:00
|
|
|
return USER_AGENT
|
|
|
|
|
2018-07-07 11:18:01 +00:00
|
|
|
def extra_inboxes(self) -> List[str]:
|
|
|
|
return EXTRA_INBOXES
|
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
def base_url(self) -> str:
|
2018-06-21 22:55:50 +00:00
|
|
|
"""Base URL config."""
|
2018-06-16 19:24:53 +00:00
|
|
|
return BASE_URL
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
def activity_url(self, obj_id):
|
2018-06-21 22:55:50 +00:00
|
|
|
"""URL for activity link."""
|
2018-06-16 19:24:53 +00:00
|
|
|
return f"{BASE_URL}/outbox/{obj_id}"
|
2018-05-21 15:21:32 +00:00
|
|
|
|
2018-06-22 23:14:35 +00:00
|
|
|
def note_url(self, obj_id):
|
|
|
|
"""URL for activity link."""
|
|
|
|
return f"{BASE_URL}/note/{obj_id}"
|
|
|
|
|
2019-04-08 18:55:03 +00:00
|
|
|
def save(self, box: Box, activity: ap.BaseActivity) -> None:
|
|
|
|
"""Custom helper for saving an activity to the DB."""
|
2019-04-14 17:17:54 +00:00
|
|
|
is_public = True
|
|
|
|
if activity.has_type(ap.ActivityType.CREATE) and not activity.is_public():
|
|
|
|
is_public = False
|
|
|
|
|
2019-04-08 18:55:03 +00:00
|
|
|
DB.activities.insert_one(
|
|
|
|
{
|
|
|
|
"box": box.value,
|
|
|
|
"activity": activity.to_dict(),
|
|
|
|
"type": _to_list(activity.type),
|
|
|
|
"remote_id": activity.id,
|
2019-04-14 17:17:54 +00:00
|
|
|
"meta": {"undo": False, "deleted": False, "public": is_public},
|
2019-04-08 18:55:03 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
def followers(self) -> List[str]:
|
|
|
|
q = {
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ap.ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
return [doc["activity"]["actor"] for doc in DB.activities.find(q)]
|
|
|
|
|
|
|
|
def followers_as_recipients(self) -> List[str]:
|
|
|
|
q = {
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
"type": ap.ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
recipients = []
|
|
|
|
for doc in DB.activities.find(q):
|
|
|
|
recipients.append(
|
|
|
|
doc["meta"]["actor"]["sharedInbox"] or doc["meta"]["actor"]["inbox"]
|
|
|
|
)
|
|
|
|
|
|
|
|
return list(set(recipients))
|
|
|
|
|
|
|
|
def following(self) -> List[str]:
|
|
|
|
q = {
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ap.ActivityType.FOLLOW.value,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
return [doc["activity"]["object"] for doc in DB.activities.find(q)]
|
|
|
|
|
2018-07-08 10:24:49 +00:00
|
|
|
def parse_collection(
|
2018-07-08 10:43:34 +00:00
|
|
|
self, payload: Optional[Dict[str, Any]] = None, url: Optional[str] = None
|
2018-07-08 10:24:49 +00:00
|
|
|
) -> List[str]:
|
|
|
|
"""Resolve/fetch a `Collection`/`OrderedCollection`."""
|
|
|
|
# Resolve internal collections via MongoDB directly
|
|
|
|
if url == ID + "/followers":
|
2019-04-08 18:55:03 +00:00
|
|
|
return self.followers()
|
2018-07-08 10:24:49 +00:00
|
|
|
elif url == ID + "/following":
|
2019-04-08 18:55:03 +00:00
|
|
|
return self.following()
|
2018-07-08 10:24:49 +00:00
|
|
|
|
|
|
|
return super().parse_collection(payload, url)
|
|
|
|
|
2019-04-08 18:55:03 +00:00
|
|
|
@ensure_it_is_me
|
|
|
|
def outbox_is_blocked(self, as_actor: ap.Person, actor_id: str) -> bool:
|
|
|
|
return bool(
|
|
|
|
DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"box": Box.OUTBOX.value,
|
|
|
|
"type": ap.ActivityType.BLOCK.value,
|
|
|
|
"activity.object": actor_id,
|
|
|
|
"meta.undo": False,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2018-07-11 18:04:48 +00:00
|
|
|
def _fetch_iri(self, iri: str) -> ap.ObjectType:
|
2018-06-17 18:51:23 +00:00
|
|
|
if iri == ME["id"]:
|
|
|
|
return ME
|
|
|
|
|
|
|
|
# Check if the activity is owned by this server
|
|
|
|
if iri.startswith(BASE_URL):
|
2018-06-17 20:05:38 +00:00
|
|
|
is_a_note = False
|
2018-06-18 20:01:21 +00:00
|
|
|
if iri.endswith("/activity"):
|
|
|
|
iri = iri.replace("/activity", "")
|
2018-06-17 20:05:38 +00:00
|
|
|
is_a_note = True
|
2018-06-29 20:16:26 +00:00
|
|
|
data = DB.activities.find_one({"box": Box.OUTBOX.value, "remote_id": iri})
|
2018-07-09 23:06:32 +00:00
|
|
|
if data and data["meta"]["deleted"]:
|
|
|
|
raise ActivityGoneError(f"{iri} is gone")
|
2018-06-29 20:16:26 +00:00
|
|
|
if data and is_a_note:
|
|
|
|
return data["activity"]["object"]
|
|
|
|
elif data:
|
2018-06-17 19:54:16 +00:00
|
|
|
return data["activity"]
|
|
|
|
else:
|
|
|
|
# Check if the activity is stored in the inbox
|
2018-06-29 20:16:26 +00:00
|
|
|
data = DB.activities.find_one({"remote_id": iri})
|
2018-06-17 19:54:16 +00:00
|
|
|
if data:
|
2018-07-09 23:06:32 +00:00
|
|
|
if data["meta"]["deleted"]:
|
|
|
|
raise ActivityGoneError(f"{iri} is gone")
|
2018-06-17 19:54:16 +00:00
|
|
|
return data["activity"]
|
2018-06-17 18:51:23 +00:00
|
|
|
|
|
|
|
# Fetch the URL via HTTP
|
2018-07-14 11:45:06 +00:00
|
|
|
logger.info(f"dereference {iri} via HTTP")
|
2018-06-16 20:33:51 +00:00
|
|
|
return super().fetch_iri(iri)
|
2018-05-21 15:04:53 +00:00
|
|
|
|
2019-07-04 21:22:38 +00:00
|
|
|
def fetch_iri(self, iri: str, no_cache=False) -> ap.ObjectType:
|
2018-07-11 18:04:48 +00:00
|
|
|
if iri == ME["id"]:
|
|
|
|
return ME
|
|
|
|
|
|
|
|
if iri in ACTORS_CACHE:
|
2018-07-14 11:45:06 +00:00
|
|
|
logger.info(f"{iri} found in cache")
|
2018-07-11 18:04:48 +00:00
|
|
|
return ACTORS_CACHE[iri]
|
|
|
|
|
2018-07-16 20:41:17 +00:00
|
|
|
# data = DB.actors.find_one({"remote_id": iri})
|
|
|
|
# if data:
|
|
|
|
# if ap._has_type(data["type"], ap.ACTOR_TYPES):
|
|
|
|
# logger.info(f"{iri} found in DB cache")
|
|
|
|
# ACTORS_CACHE[iri] = data["data"]
|
|
|
|
# return data["data"]
|
2019-07-04 21:22:38 +00:00
|
|
|
if not no_cache:
|
|
|
|
data = self._fetch_iri(iri)
|
|
|
|
else:
|
|
|
|
return super().fetch_iri(iri)
|
2018-07-11 18:04:48 +00:00
|
|
|
|
2018-07-14 11:45:06 +00:00
|
|
|
logger.debug(f"_fetch_iri({iri!r}) == {data!r}")
|
2018-07-11 18:04:48 +00:00
|
|
|
if ap._has_type(data["type"], ap.ACTOR_TYPES):
|
2018-07-16 20:41:17 +00:00
|
|
|
logger.debug(f"caching actor {iri}")
|
2018-07-11 18:04:48 +00:00
|
|
|
# Cache the actor
|
|
|
|
DB.actors.update_one(
|
|
|
|
{"remote_id": iri},
|
|
|
|
{"$set": {"remote_id": iri, "data": data}},
|
|
|
|
upsert=True,
|
|
|
|
)
|
|
|
|
ACTORS_CACHE[iri] = data
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2019-04-08 18:55:03 +00:00
|
|
|
@ensure_it_is_me
|
|
|
|
def inbox_check_duplicate(self, as_actor: ap.Person, iri: str) -> bool:
|
|
|
|
return bool(DB.activities.find_one({"box": Box.INBOX.value, "remote_id": iri}))
|
|
|
|
|
|
|
|
def set_post_to_remote_inbox(self, cb):
|
|
|
|
self.post_to_remote_inbox_cb = cb
|
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def undo_new_follower(self, as_actor: ap.Person, follow: ap.Follow) -> None:
|
2018-07-08 10:24:49 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": follow.id}, {"$set": {"meta.undo": True}}
|
|
|
|
)
|
2018-05-21 15:04:53 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def undo_new_following(self, as_actor: ap.Person, follow: ap.Follow) -> None:
|
2018-07-08 10:24:49 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": follow.id}, {"$set": {"meta.undo": True}}
|
|
|
|
)
|
2018-05-21 15:04:53 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def inbox_like(self, as_actor: ap.Person, like: ap.Like) -> None:
|
|
|
|
obj = like.get_object()
|
2018-05-18 18:41:41 +00:00
|
|
|
# Update the meta counter if the object is published by the server
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"box": Box.OUTBOX.value, "activity.object.id": obj.id},
|
|
|
|
{"$inc": {"meta.count_like": 1}},
|
2018-06-16 19:24:53 +00:00
|
|
|
)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def inbox_undo_like(self, as_actor: ap.Person, like: ap.Like) -> None:
|
|
|
|
obj = like.get_object()
|
2018-05-18 18:41:41 +00:00
|
|
|
# Update the meta counter if the object is published by the server
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"box": Box.OUTBOX.value, "activity.object.id": obj.id},
|
|
|
|
{"$inc": {"meta.count_like": -1}},
|
2018-06-16 19:24:53 +00:00
|
|
|
)
|
2018-07-07 10:10:25 +00:00
|
|
|
DB.activities.update_one({"remote_id": like.id}, {"$set": {"meta.undo": True}})
|
2018-05-21 15:21:32 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-17 18:51:23 +00:00
|
|
|
def outbox_like(self, as_actor: ap.Person, like: ap.Like) -> None:
|
2018-06-16 19:24:53 +00:00
|
|
|
obj = like.get_object()
|
2019-07-07 15:48:45 +00:00
|
|
|
if obj.has_type(ap.ActivityType.QUESTION):
|
|
|
|
Tasks.fetch_remote_question(obj)
|
|
|
|
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"activity.object.id": obj.id},
|
|
|
|
{"$inc": {"meta.count_like": 1}, "$set": {"meta.liked": like.id}},
|
2018-06-16 19:24:53 +00:00
|
|
|
)
|
2018-07-20 08:56:39 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def outbox_undo_like(self, as_actor: ap.Person, like: ap.Like) -> None:
|
|
|
|
obj = like.get_object()
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"activity.object.id": obj.id},
|
|
|
|
{"$inc": {"meta.count_like": -1}, "$set": {"meta.liked": False}},
|
2018-06-16 19:24:53 +00:00
|
|
|
)
|
2018-07-07 10:10:25 +00:00
|
|
|
DB.activities.update_one({"remote_id": like.id}, {"$set": {"meta.undo": True}})
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def inbox_announce(self, as_actor: ap.Person, announce: ap.Announce) -> None:
|
2018-07-22 09:44:42 +00:00
|
|
|
# TODO(tsileo): actually drop it without storing it and better logging, also move the check somewhere else
|
|
|
|
# or remove it?
|
|
|
|
try:
|
|
|
|
obj = announce.get_object()
|
|
|
|
except NotAnActivityError:
|
|
|
|
logger.exception(
|
2018-06-16 19:24:53 +00:00
|
|
|
f'received an Annouce referencing an OStatus notice ({announce._data["object"]}), dropping the message'
|
2018-05-28 17:46:23 +00:00
|
|
|
)
|
2018-05-18 18:41:41 +00:00
|
|
|
return
|
|
|
|
|
2019-07-07 15:48:45 +00:00
|
|
|
if obj.has_type(ap.ActivityType.QUESTION):
|
|
|
|
Tasks.fetch_remote_question(obj)
|
|
|
|
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": announce.id},
|
2018-07-20 08:56:39 +00:00
|
|
|
{
|
|
|
|
"$set": {
|
|
|
|
"meta.object": obj.to_dict(embed=True),
|
|
|
|
"meta.object_actor": _actor_to_meta(obj.get_actor()),
|
|
|
|
}
|
|
|
|
},
|
2018-06-29 20:16:26 +00:00
|
|
|
)
|
|
|
|
DB.activities.update_one(
|
|
|
|
{"activity.object.id": obj.id}, {"$inc": {"meta.count_boost": 1}}
|
2018-06-16 19:24:53 +00:00
|
|
|
)
|
2018-05-28 20:38:48 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def inbox_undo_announce(self, as_actor: ap.Person, announce: ap.Announce) -> None:
|
|
|
|
obj = announce.get_object()
|
2018-05-28 17:46:23 +00:00
|
|
|
# Update the meta counter if the object is published by the server
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
2018-06-16 19:24:53 +00:00
|
|
|
{"activity.object.id": obj.id}, {"$inc": {"meta.count_boost": -1}}
|
|
|
|
)
|
2018-07-07 10:10:25 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": announce.id}, {"$set": {"meta.undo": True}}
|
|
|
|
)
|
2018-05-29 16:59:37 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def outbox_announce(self, as_actor: ap.Person, announce: ap.Announce) -> None:
|
|
|
|
obj = announce.get_object()
|
2019-07-07 15:48:45 +00:00
|
|
|
if obj.has_type(ap.ActivityType.QUESTION):
|
|
|
|
Tasks.fetch_remote_question(obj)
|
|
|
|
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": announce.id},
|
2018-07-20 08:56:39 +00:00
|
|
|
{
|
|
|
|
"$set": {
|
|
|
|
"meta.object": obj.to_dict(embed=True),
|
|
|
|
"meta.object_actor": _actor_to_meta(obj.get_actor()),
|
|
|
|
}
|
|
|
|
},
|
2018-06-29 20:16:26 +00:00
|
|
|
)
|
2018-07-20 08:56:39 +00:00
|
|
|
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
2018-06-16 19:24:53 +00:00
|
|
|
{"activity.object.id": obj.id}, {"$set": {"meta.boosted": announce.id}}
|
|
|
|
)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def outbox_undo_announce(self, as_actor: ap.Person, announce: ap.Announce) -> None:
|
|
|
|
obj = announce.get_object()
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
2018-06-16 19:24:53 +00:00
|
|
|
{"activity.object.id": obj.id}, {"$set": {"meta.boosted": False}}
|
|
|
|
)
|
2018-07-07 10:10:25 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": announce.id}, {"$set": {"meta.undo": True}}
|
|
|
|
)
|
2018-06-02 07:07:57 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def inbox_delete(self, as_actor: ap.Person, delete: ap.Delete) -> None:
|
2018-06-18 20:01:21 +00:00
|
|
|
obj = delete.get_object()
|
2018-07-23 06:30:51 +00:00
|
|
|
logger.debug("delete object={obj!r}")
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"activity.object.id": obj.id}, {"$set": {"meta.deleted": True}}
|
|
|
|
)
|
|
|
|
|
2018-06-18 21:34:07 +00:00
|
|
|
logger.info(f"inbox_delete handle_replies obj={obj!r}")
|
2019-04-16 21:46:34 +00:00
|
|
|
in_reply_to = obj.get_in_reply_to() if obj.inReplyTo else None
|
2018-07-23 22:14:35 +00:00
|
|
|
if delete.get_object().ACTIVITY_TYPE != ap.ActivityType.NOTE:
|
2019-04-16 21:00:15 +00:00
|
|
|
in_reply_to = ap._get_id(
|
|
|
|
DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"activity.object.id": delete.get_object().id,
|
|
|
|
"type": ap.ActivityType.CREATE.value,
|
|
|
|
}
|
|
|
|
)["activity"]["object"].get("inReplyTo")
|
|
|
|
)
|
2018-07-09 23:06:32 +00:00
|
|
|
|
|
|
|
# Fake a Undo so any related Like/Announce doesn't appear on the web UI
|
|
|
|
DB.activities.update(
|
|
|
|
{"meta.object.id": obj.id},
|
2018-07-23 06:30:51 +00:00
|
|
|
{"$set": {"meta.undo": True, "meta.extra": "object deleted"}},
|
2018-07-09 23:06:32 +00:00
|
|
|
)
|
2018-07-23 22:14:35 +00:00
|
|
|
if in_reply_to:
|
|
|
|
self._handle_replies_delete(as_actor, in_reply_to)
|
2018-06-18 20:01:21 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def outbox_delete(self, as_actor: ap.Person, delete: ap.Delete) -> None:
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
2018-06-16 19:24:53 +00:00
|
|
|
{"activity.object.id": delete.get_object().id},
|
|
|
|
{"$set": {"meta.deleted": True}},
|
|
|
|
)
|
2018-06-18 21:34:07 +00:00
|
|
|
obj = delete.get_object()
|
|
|
|
if delete.get_object().ACTIVITY_TYPE != ap.ActivityType.NOTE:
|
|
|
|
obj = ap.parse_activity(
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.find_one(
|
2018-06-18 21:34:07 +00:00
|
|
|
{
|
|
|
|
"activity.object.id": delete.get_object().id,
|
|
|
|
"type": ap.ActivityType.CREATE.value,
|
|
|
|
}
|
2018-06-18 21:57:53 +00:00
|
|
|
)["activity"]
|
2018-06-18 21:34:07 +00:00
|
|
|
).get_object()
|
|
|
|
|
2018-07-09 23:06:32 +00:00
|
|
|
DB.activities.update(
|
|
|
|
{"meta.object.id": obj.id},
|
|
|
|
{"$set": {"meta.undo": True, "meta.exta": "object deleted"}},
|
|
|
|
)
|
|
|
|
|
2019-04-16 20:54:08 +00:00
|
|
|
self._handle_replies_delete(as_actor, obj.get_in_reply_to())
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-16 19:24:53 +00:00
|
|
|
def inbox_update(self, as_actor: ap.Person, update: ap.Update) -> None:
|
|
|
|
obj = update.get_object()
|
|
|
|
if obj.ACTIVITY_TYPE == ap.ActivityType.NOTE:
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
2018-06-16 19:24:53 +00:00
|
|
|
{"activity.object.id": obj.id},
|
|
|
|
{"$set": {"activity.object": obj.to_dict()}},
|
2018-06-29 20:16:26 +00:00
|
|
|
)
|
2019-04-14 18:16:04 +00:00
|
|
|
elif obj.has_type(ap.ActivityType.QUESTION):
|
|
|
|
choices = obj._data.get("oneOf", obj.anyOf)
|
|
|
|
total_replies = 0
|
|
|
|
_set = {}
|
|
|
|
for choice in choices:
|
|
|
|
answer_key = _answer_key(choice["name"])
|
|
|
|
cnt = choice["replies"]["totalItems"]
|
|
|
|
total_replies += cnt
|
|
|
|
_set[f"meta.question_answers.{answer_key}"] = cnt
|
|
|
|
|
|
|
|
_set["meta.question_replies"] = total_replies
|
|
|
|
|
|
|
|
DB.activities.update_one(
|
2019-04-14 18:16:21 +00:00
|
|
|
{"box": Box.INBOX.value, "activity.object.id": obj.id}, {"$set": _set}
|
2019-04-14 18:16:04 +00:00
|
|
|
)
|
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
# FIXME(tsileo): handle update actor amd inbox_update_note/inbox_update_actor
|
2018-06-02 07:07:57 +00:00
|
|
|
|
2018-06-16 20:33:51 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-17 17:21:59 +00:00
|
|
|
def outbox_update(self, as_actor: ap.Person, _update: ap.Update) -> None:
|
|
|
|
obj = _update._data["object"]
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
update_prefix = "activity.object."
|
|
|
|
update: Dict[str, Any] = {"$set": dict(), "$unset": dict()}
|
|
|
|
update["$set"][f"{update_prefix}updated"] = (
|
|
|
|
datetime.utcnow().replace(microsecond=0).isoformat() + "Z"
|
|
|
|
)
|
2018-06-01 18:59:32 +00:00
|
|
|
for k, v in obj.items():
|
2018-06-16 19:24:53 +00:00
|
|
|
if k in ["id", "type"]:
|
2018-05-18 18:41:41 +00:00
|
|
|
continue
|
|
|
|
if v is None:
|
2018-06-16 19:24:53 +00:00
|
|
|
update["$unset"][f"{update_prefix}{k}"] = ""
|
2018-05-18 18:41:41 +00:00
|
|
|
else:
|
2018-06-16 19:24:53 +00:00
|
|
|
update["$set"][f"{update_prefix}{k}"] = v
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
if len(update["$unset"]) == 0:
|
2019-04-13 08:00:56 +00:00
|
|
|
del update["$unset"]
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
print(f"updating note from outbox {obj!r} {update}")
|
|
|
|
logger.info(f"updating note from outbox {obj!r} {update}")
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one({"activity.object.id": obj["id"]}, update)
|
2018-05-18 18:41:41 +00:00
|
|
|
# FIXME(tsileo): should send an Update (but not a partial one, to all the note's recipients
|
|
|
|
# (create a new Update with the result of the update, and send it without saving it?)
|
|
|
|
|
2018-06-18 20:01:21 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-17 18:51:23 +00:00
|
|
|
def outbox_create(self, as_actor: ap.Person, create: ap.Create) -> None:
|
2018-06-18 20:01:21 +00:00
|
|
|
self._handle_replies(as_actor, create)
|
2018-06-17 18:51:23 +00:00
|
|
|
|
2018-06-18 20:01:21 +00:00
|
|
|
@ensure_it_is_me
|
2018-06-17 18:51:23 +00:00
|
|
|
def inbox_create(self, as_actor: ap.Person, create: ap.Create) -> None:
|
2019-07-04 21:22:38 +00:00
|
|
|
# If it's a `Quesiion`, trigger an async task for updating it later (by fetching the remote and updating the
|
|
|
|
# local copy)
|
|
|
|
question = create.get_object()
|
|
|
|
if question.has_type(ap.ActivityType.QUESTION):
|
2019-07-07 15:48:45 +00:00
|
|
|
Tasks.fetch_remote_question(question)
|
2019-07-04 21:22:38 +00:00
|
|
|
|
2018-06-18 20:01:21 +00:00
|
|
|
self._handle_replies(as_actor, create)
|
|
|
|
|
|
|
|
@ensure_it_is_me
|
2018-07-23 22:14:35 +00:00
|
|
|
def _handle_replies_delete(
|
|
|
|
self, as_actor: ap.Person, in_reply_to: Optional[str]
|
|
|
|
) -> None:
|
2018-06-18 20:01:21 +00:00
|
|
|
if not in_reply_to:
|
|
|
|
pass
|
|
|
|
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
2018-06-18 20:01:21 +00:00
|
|
|
{"activity.object.id": in_reply_to},
|
|
|
|
{"$inc": {"meta.count_reply": -1, "meta.count_direct_reply": -1}},
|
2018-06-29 20:16:26 +00:00
|
|
|
)
|
2018-06-18 20:01:21 +00:00
|
|
|
|
2019-04-14 17:17:54 +00:00
|
|
|
def _process_question_reply(self, create: ap.Create, question: ap.Question) -> None:
|
|
|
|
choice = create.get_object().name
|
|
|
|
|
|
|
|
# Ensure it's a valid choice
|
|
|
|
if choice not in [
|
|
|
|
c["name"] for c in question._data.get("oneOf", question.anyOf)
|
|
|
|
]:
|
|
|
|
logger.info("invalid choice")
|
|
|
|
return
|
|
|
|
|
|
|
|
# Check for duplicate votes
|
|
|
|
if DB.activities.find_one(
|
|
|
|
{
|
|
|
|
"activity.object.actor": create.get_actor().id,
|
|
|
|
"meta.answer_to": question.id,
|
|
|
|
}
|
|
|
|
):
|
|
|
|
logger.info("duplicate response")
|
|
|
|
return
|
|
|
|
|
|
|
|
# Update the DB
|
|
|
|
answer_key = _answer_key(choice)
|
|
|
|
|
|
|
|
DB.activities.update_one(
|
|
|
|
{"activity.object.id": question.id},
|
|
|
|
{
|
|
|
|
"$inc": {
|
|
|
|
"meta.question_replies": 1,
|
|
|
|
f"meta.question_answers.{answer_key}": 1,
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": create.id},
|
|
|
|
{"$set": {"meta.answer_to": question.id, "meta.stream": False}},
|
|
|
|
)
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2018-06-18 20:01:21 +00:00
|
|
|
@ensure_it_is_me
|
|
|
|
def _handle_replies(self, as_actor: ap.Person, create: ap.Create) -> None:
|
2018-06-22 22:29:06 +00:00
|
|
|
"""Go up to the root reply, store unknown replies in the `threads` DB and set the "meta.thread_root_parent"
|
|
|
|
key to make it easy to query a whole thread."""
|
2019-04-16 20:54:08 +00:00
|
|
|
in_reply_to = create.get_object().get_in_reply_to()
|
2018-06-18 20:01:21 +00:00
|
|
|
if not in_reply_to:
|
2018-06-22 23:04:58 +00:00
|
|
|
return
|
2018-06-18 20:01:21 +00:00
|
|
|
|
2018-06-22 22:29:06 +00:00
|
|
|
new_threads = []
|
|
|
|
root_reply = in_reply_to
|
2018-09-06 17:19:47 +00:00
|
|
|
reply = ap.fetch_remote_activity(root_reply)
|
2018-06-22 22:29:06 +00:00
|
|
|
|
2019-04-14 17:17:54 +00:00
|
|
|
# Ensure the this is a local reply, of a question, with a direct "to" addressing
|
|
|
|
if (
|
|
|
|
reply.id.startswith(BASE_URL)
|
|
|
|
and reply.has_type(ap.ActivityType.QUESTION.value)
|
|
|
|
and _to_list(create.get_object().to)[0].startswith(BASE_URL)
|
|
|
|
and not create.is_public()
|
|
|
|
):
|
|
|
|
return self._process_question_reply(create, reply)
|
|
|
|
elif (
|
|
|
|
create.id.startswith(BASE_URL)
|
|
|
|
and reply.has_type(ap.ActivityType.QUESTION.value)
|
|
|
|
and not create.is_public()
|
|
|
|
):
|
|
|
|
# Keep track of our own votes
|
|
|
|
DB.activities.update_one(
|
|
|
|
{"activity.object.id": reply.id, "box": "inbox"},
|
|
|
|
{"$set": {"meta.voted_for": create.get_object().name}},
|
|
|
|
)
|
|
|
|
return None
|
|
|
|
|
2019-07-05 20:05:28 +00:00
|
|
|
print(f"processing {create!r} and incrementing {in_reply_to}")
|
2018-06-29 20:16:26 +00:00
|
|
|
creply = DB.activities.find_one_and_update(
|
2018-06-18 20:01:21 +00:00
|
|
|
{"activity.object.id": in_reply_to},
|
|
|
|
{"$inc": {"meta.count_reply": 1, "meta.count_direct_reply": 1}},
|
2018-06-29 20:16:26 +00:00
|
|
|
)
|
|
|
|
if not creply:
|
|
|
|
# It means the activity is not in the inbox, and not in the outbox, we want to save it
|
2019-04-08 18:55:03 +00:00
|
|
|
self.save(Box.REPLIES, reply)
|
2018-06-29 20:16:26 +00:00
|
|
|
new_threads.append(reply.id)
|
2019-04-10 20:50:36 +00:00
|
|
|
# TODO(tsileo): parses the replies collection and import the replies?
|
2018-06-22 22:29:06 +00:00
|
|
|
|
|
|
|
while reply is not None:
|
2019-04-16 20:54:08 +00:00
|
|
|
in_reply_to = reply.get_in_reply_to()
|
2018-06-22 22:29:06 +00:00
|
|
|
if not in_reply_to:
|
|
|
|
break
|
|
|
|
root_reply = in_reply_to
|
2018-09-06 17:19:47 +00:00
|
|
|
reply = ap.fetch_remote_activity(root_reply)
|
2018-06-22 22:29:06 +00:00
|
|
|
q = {"activity.object.id": root_reply}
|
2018-06-29 20:16:26 +00:00
|
|
|
if not DB.activities.count(q):
|
2019-04-08 18:55:03 +00:00
|
|
|
self.save(Box.REPLIES, reply)
|
2018-06-22 22:29:06 +00:00
|
|
|
new_threads.append(reply.id)
|
|
|
|
|
2018-06-29 20:16:26 +00:00
|
|
|
DB.activities.update_one(
|
|
|
|
{"remote_id": create.id}, {"$set": {"meta.thread_root_parent": root_reply}}
|
|
|
|
)
|
|
|
|
DB.activities.update(
|
|
|
|
{"box": Box.REPLIES.value, "remote_id": {"$in": new_threads}},
|
2018-06-22 22:29:06 +00:00
|
|
|
{"$set": {"meta.thread_root_parent": root_reply}},
|
|
|
|
)
|
2018-06-17 18:51:23 +00:00
|
|
|
|
2019-04-08 18:55:03 +00:00
|
|
|
def post_to_outbox(self, activity: ap.BaseActivity) -> None:
|
|
|
|
if activity.has_type(ap.CREATE_TYPES):
|
|
|
|
activity = activity.build_create()
|
|
|
|
|
|
|
|
self.save(Box.OUTBOX, activity)
|
|
|
|
|
|
|
|
# Assign create a random ID
|
|
|
|
obj_id = self.random_object_id()
|
|
|
|
activity.set_id(self.activity_url(obj_id), obj_id)
|
|
|
|
|
|
|
|
recipients = activity.recipients()
|
|
|
|
logger.info(f"recipients={recipients}")
|
|
|
|
activity = ap.clean_activity(activity.to_dict())
|
|
|
|
|
|
|
|
payload = json.dumps(activity)
|
|
|
|
for recp in recipients:
|
|
|
|
logger.debug(f"posting to {recp}")
|
|
|
|
self.post_to_remote_inbox(self.get_actor(), payload, recp)
|
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
def gen_feed():
|
|
|
|
fg = FeedGenerator()
|
2018-06-16 19:24:53 +00:00
|
|
|
fg.id(f"{ID}")
|
|
|
|
fg.title(f"{USERNAME} notes")
|
|
|
|
fg.author({"name": USERNAME, "email": "t@a4.io"})
|
|
|
|
fg.link(href=ID, rel="alternate")
|
|
|
|
fg.description(f"{USERNAME} notes")
|
|
|
|
fg.logo(ME.get("icon", {}).get("url"))
|
|
|
|
fg.language("en")
|
2018-06-29 20:16:26 +00:00
|
|
|
for item in DB.activities.find(
|
2019-02-24 20:04:09 +00:00
|
|
|
{"box": Box.OUTBOX.value, "type": "Create", "meta.deleted": False}, limit=10
|
|
|
|
).sort("_id", -1):
|
2018-05-18 18:41:41 +00:00
|
|
|
fe = fg.add_entry()
|
2018-06-16 19:24:53 +00:00
|
|
|
fe.id(item["activity"]["object"].get("url"))
|
|
|
|
fe.link(href=item["activity"]["object"].get("url"))
|
|
|
|
fe.title(item["activity"]["object"]["content"])
|
|
|
|
fe.description(item["activity"]["object"]["content"])
|
2018-05-18 18:41:41 +00:00
|
|
|
return fg
|
|
|
|
|
|
|
|
|
|
|
|
def json_feed(path: str) -> Dict[str, Any]:
|
|
|
|
"""JSON Feed (https://jsonfeed.org/) document."""
|
|
|
|
data = []
|
2018-06-29 20:16:26 +00:00
|
|
|
for item in DB.activities.find(
|
2019-02-24 20:04:09 +00:00
|
|
|
{"box": Box.OUTBOX.value, "type": "Create", "meta.deleted": False}, limit=10
|
|
|
|
).sort("_id", -1):
|
2018-06-16 19:24:53 +00:00
|
|
|
data.append(
|
|
|
|
{
|
2019-02-24 20:04:09 +00:00
|
|
|
"id": item["activity"]["id"],
|
2018-06-16 19:24:53 +00:00
|
|
|
"url": item["activity"]["object"].get("url"),
|
|
|
|
"content_html": item["activity"]["object"]["content"],
|
|
|
|
"content_text": html2text(item["activity"]["object"]["content"]),
|
|
|
|
"date_published": item["activity"]["object"].get("published"),
|
|
|
|
}
|
|
|
|
)
|
2018-05-18 18:41:41 +00:00
|
|
|
return {
|
|
|
|
"version": "https://jsonfeed.org/version/1",
|
2018-06-16 19:24:53 +00:00
|
|
|
"user_comment": (
|
|
|
|
"This is a microblog feed. You can add this to your feed reader using the following URL: "
|
|
|
|
+ ID
|
|
|
|
+ path
|
|
|
|
),
|
2018-05-18 18:41:41 +00:00
|
|
|
"title": USERNAME,
|
|
|
|
"home_page_url": ID,
|
|
|
|
"feed_url": ID + path,
|
|
|
|
"author": {
|
|
|
|
"name": USERNAME,
|
|
|
|
"url": ID,
|
2018-06-16 19:24:53 +00:00
|
|
|
"avatar": ME.get("icon", {}).get("url"),
|
2018-05-18 18:41:41 +00:00
|
|
|
},
|
|
|
|
"items": data,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
def build_inbox_json_feed(
|
|
|
|
path: str, request_cursor: Optional[str] = None
|
|
|
|
) -> Dict[str, Any]:
|
2018-06-21 22:55:50 +00:00
|
|
|
"""Build a JSON feed from the inbox activities."""
|
2018-05-18 18:41:41 +00:00
|
|
|
data = []
|
|
|
|
cursor = None
|
|
|
|
|
2018-06-29 20:16:26 +00:00
|
|
|
q: Dict[str, Any] = {
|
|
|
|
"type": "Create",
|
|
|
|
"meta.deleted": False,
|
|
|
|
"box": Box.INBOX.value,
|
|
|
|
}
|
2018-05-18 18:41:41 +00:00
|
|
|
if request_cursor:
|
2018-06-16 19:24:53 +00:00
|
|
|
q["_id"] = {"$lt": request_cursor}
|
|
|
|
|
2018-06-29 20:16:26 +00:00
|
|
|
for item in DB.activities.find(q, limit=50).sort("_id", -1):
|
2018-06-16 19:24:53 +00:00
|
|
|
actor = ap.get_backend().fetch_iri(item["activity"]["actor"])
|
|
|
|
data.append(
|
|
|
|
{
|
|
|
|
"id": item["activity"]["id"],
|
|
|
|
"url": item["activity"]["object"].get("url"),
|
|
|
|
"content_html": item["activity"]["object"]["content"],
|
|
|
|
"content_text": html2text(item["activity"]["object"]["content"]),
|
|
|
|
"date_published": item["activity"]["object"].get("published"),
|
|
|
|
"author": {
|
|
|
|
"name": actor.get("name", actor.get("preferredUsername")),
|
|
|
|
"url": actor.get("url"),
|
|
|
|
"avatar": actor.get("icon", {}).get("url"),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
|
|
|
cursor = str(item["_id"])
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
resp = {
|
|
|
|
"version": "https://jsonfeed.org/version/1",
|
2018-06-16 19:24:53 +00:00
|
|
|
"title": f"{USERNAME}'s stream",
|
2018-05-18 18:41:41 +00:00
|
|
|
"home_page_url": ID,
|
|
|
|
"feed_url": ID + path,
|
|
|
|
"items": data,
|
|
|
|
}
|
|
|
|
if cursor and len(data) == 50:
|
2018-06-16 19:24:53 +00:00
|
|
|
resp["next_url"] = ID + path + "?cursor=" + cursor
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
2018-05-31 23:26:23 +00:00
|
|
|
def embed_collection(total_items, first_page_id):
|
2018-06-21 22:55:50 +00:00
|
|
|
"""Helper creating a root OrderedCollection with a link to the first page."""
|
2018-05-28 17:46:23 +00:00
|
|
|
return {
|
2018-06-16 19:24:53 +00:00
|
|
|
"type": ap.ActivityType.ORDERED_COLLECTION.value,
|
2018-05-31 23:26:23 +00:00
|
|
|
"totalItems": total_items,
|
2018-06-16 19:24:53 +00:00
|
|
|
"first": f"{first_page_id}?page=first",
|
2018-06-01 18:29:44 +00:00
|
|
|
"id": first_page_id,
|
2018-05-28 17:46:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-07-22 19:54:24 +00:00
|
|
|
def simple_build_ordered_collection(col_name, data):
|
|
|
|
return {
|
|
|
|
"@context": ap.COLLECTION_CTX,
|
|
|
|
"id": BASE_URL + "/" + col_name,
|
|
|
|
"totalItems": len(data),
|
|
|
|
"type": ap.ActivityType.ORDERED_COLLECTION.value,
|
2018-07-23 19:43:03 +00:00
|
|
|
"orderedItems": data,
|
2018-07-22 19:54:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
def build_ordered_collection(
|
|
|
|
col, q=None, cursor=None, map_func=None, limit=50, col_name=None, first_page=False
|
|
|
|
):
|
2018-06-21 22:55:50 +00:00
|
|
|
"""Helper for building an OrderedCollection from a MongoDB query (with pagination support)."""
|
2018-05-18 18:41:41 +00:00
|
|
|
col_name = col_name or col.name
|
|
|
|
if q is None:
|
|
|
|
q = {}
|
|
|
|
|
|
|
|
if cursor:
|
2018-06-16 19:24:53 +00:00
|
|
|
q["_id"] = {"$lt": ObjectId(cursor)}
|
|
|
|
data = list(col.find(q, limit=limit).sort("_id", -1))
|
2018-05-18 18:41:41 +00:00
|
|
|
|
|
|
|
if not data:
|
2018-09-04 19:25:25 +00:00
|
|
|
# Returns an empty page if there's a cursor
|
|
|
|
if cursor:
|
|
|
|
return {
|
|
|
|
"@context": ap.COLLECTION_CTX,
|
|
|
|
"type": ap.ActivityType.ORDERED_COLLECTION_PAGE.value,
|
|
|
|
"id": BASE_URL + "/" + col_name + "?cursor=" + cursor,
|
|
|
|
"partOf": BASE_URL + "/" + col_name,
|
|
|
|
"totalItems": 0,
|
2018-09-11 19:49:25 +00:00
|
|
|
"orderedItems": [],
|
2018-09-04 19:25:25 +00:00
|
|
|
}
|
2018-05-18 18:41:41 +00:00
|
|
|
return {
|
2018-07-22 19:54:24 +00:00
|
|
|
"@context": ap.COLLECTION_CTX,
|
2018-06-16 19:24:53 +00:00
|
|
|
"id": BASE_URL + "/" + col_name,
|
|
|
|
"totalItems": 0,
|
|
|
|
"type": ap.ActivityType.ORDERED_COLLECTION.value,
|
2018-09-11 19:49:25 +00:00
|
|
|
"orderedItems": [],
|
2018-05-18 18:41:41 +00:00
|
|
|
}
|
|
|
|
|
2018-06-16 19:24:53 +00:00
|
|
|
start_cursor = str(data[0]["_id"])
|
|
|
|
next_page_cursor = str(data[-1]["_id"])
|
2018-05-18 18:41:41 +00:00
|
|
|
total_items = col.find(q).count()
|
|
|
|
|
|
|
|
data = [_remove_id(doc) for doc in data]
|
|
|
|
if map_func:
|
|
|
|
data = [map_func(doc) for doc in data]
|
2018-06-04 17:13:04 +00:00
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
# No cursor, this is the first page and we return an OrderedCollection
|
|
|
|
if not cursor:
|
|
|
|
resp = {
|
2018-06-16 19:24:53 +00:00
|
|
|
"@context": ap.COLLECTION_CTX,
|
|
|
|
"id": f"{BASE_URL}/{col_name}",
|
|
|
|
"totalItems": total_items,
|
|
|
|
"type": ap.ActivityType.ORDERED_COLLECTION.value,
|
|
|
|
"first": {
|
|
|
|
"id": f"{BASE_URL}/{col_name}?cursor={start_cursor}",
|
|
|
|
"orderedItems": data,
|
|
|
|
"partOf": f"{BASE_URL}/{col_name}",
|
|
|
|
"totalItems": total_items,
|
|
|
|
"type": ap.ActivityType.ORDERED_COLLECTION_PAGE.value,
|
2018-05-21 09:21:11 +00:00
|
|
|
},
|
2018-05-18 18:41:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if len(data) == limit:
|
2018-06-16 19:24:53 +00:00
|
|
|
resp["first"]["next"] = (
|
|
|
|
BASE_URL + "/" + col_name + "?cursor=" + next_page_cursor
|
|
|
|
)
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
if first_page:
|
2018-06-16 19:24:53 +00:00
|
|
|
return resp["first"]
|
2018-06-01 18:29:44 +00:00
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
return resp
|
|
|
|
|
|
|
|
# If there's a cursor, then we return an OrderedCollectionPage
|
|
|
|
resp = {
|
2018-06-16 19:24:53 +00:00
|
|
|
"@context": ap.COLLECTION_CTX,
|
|
|
|
"type": ap.ActivityType.ORDERED_COLLECTION_PAGE.value,
|
|
|
|
"id": BASE_URL + "/" + col_name + "?cursor=" + start_cursor,
|
|
|
|
"totalItems": total_items,
|
|
|
|
"partOf": BASE_URL + "/" + col_name,
|
|
|
|
"orderedItems": data,
|
2018-05-18 18:41:41 +00:00
|
|
|
}
|
|
|
|
if len(data) == limit:
|
2018-06-16 19:24:53 +00:00
|
|
|
resp["next"] = BASE_URL + "/" + col_name + "?cursor=" + next_page_cursor
|
2018-05-18 18:41:41 +00:00
|
|
|
|
2018-06-01 18:29:44 +00:00
|
|
|
if first_page:
|
2018-06-16 19:24:53 +00:00
|
|
|
return resp["first"]
|
2018-06-01 18:29:44 +00:00
|
|
|
|
|
|
|
# XXX(tsileo): implements prev with prev=<first item cursor>?
|
2018-05-31 23:26:23 +00:00
|
|
|
|
2018-05-18 18:41:41 +00:00
|
|
|
return resp
|