forked from forks/microblog.pub
Tweak/fix migration
This commit is contained in:
parent
974acabd19
commit
622b39fc26
1 changed files with 33 additions and 30 deletions
|
@ -5,12 +5,12 @@ from urllib.parse import urlparse
|
||||||
|
|
||||||
from little_boxes import activitypub as ap
|
from little_boxes import activitypub as ap
|
||||||
|
|
||||||
|
import activitypub
|
||||||
|
from config import ID
|
||||||
from utils.migrations import DB
|
from utils.migrations import DB
|
||||||
from utils.migrations import Migration
|
from utils.migrations import Migration
|
||||||
from utils.migrations import logger
|
from utils.migrations import logger
|
||||||
from utils.migrations import perform # noqa: just here for export
|
from utils.migrations import perform # noqa: just here for export
|
||||||
from config import ID
|
|
||||||
import activitypub
|
|
||||||
|
|
||||||
back = activitypub.MicroblogPubBackend()
|
back = activitypub.MicroblogPubBackend()
|
||||||
ap.use_backend(back)
|
ap.use_backend(back)
|
||||||
|
@ -119,34 +119,37 @@ class _2_FollowMigration(Migration):
|
||||||
def migrate(self) -> None:
|
def migrate(self) -> None:
|
||||||
actor_cache: Dict[str, Dict[str, Any]] = {}
|
actor_cache: Dict[str, Dict[str, Any]] = {}
|
||||||
for data in DB.activities.find({"type": ap.ActivityType.FOLLOW.value}):
|
for data in DB.activities.find({"type": ap.ActivityType.FOLLOW.value}):
|
||||||
if data["meta"]["actor_id"] == ID:
|
try:
|
||||||
# It's a "following"
|
if data["meta"]["actor_id"] == ID:
|
||||||
actor = actor_cache.get(data["meta"]["object_id"])
|
# It's a "following"
|
||||||
if not actor:
|
actor = actor_cache.get(data["meta"]["object_id"])
|
||||||
actor = ap.parse_activity(
|
|
||||||
ap.get_backend().fetch_iri(
|
|
||||||
data["meta"]["object_id"], no_cache=True
|
|
||||||
)
|
|
||||||
).to_dict(embed=True)
|
|
||||||
if not actor:
|
if not actor:
|
||||||
raise ValueError(f"missing actor {data!r}")
|
actor = ap.parse_activity(
|
||||||
actor_cache[actor["id"]] = actor
|
ap.get_backend().fetch_iri(
|
||||||
DB.activities.update_one(
|
data["meta"]["object_id"], no_cache=True
|
||||||
{"_id": data["_id"]}, {"$set": {"meta.object": actor}}
|
)
|
||||||
)
|
).to_dict(embed=True)
|
||||||
|
if not actor:
|
||||||
|
raise ValueError(f"missing actor {data!r}")
|
||||||
|
actor_cache[actor["id"]] = actor
|
||||||
|
DB.activities.update_one(
|
||||||
|
{"_id": data["_id"]}, {"$set": {"meta.object": actor}}
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# It's a "followers"
|
# It's a "followers"
|
||||||
actor = actor_cache.get(data["meta"]["actor_id"])
|
actor = actor_cache.get(data["meta"]["actor_id"])
|
||||||
if not actor:
|
|
||||||
actor = ap.parse_activity(
|
|
||||||
ap.get_backend().fetch_iri(
|
|
||||||
data["meta"]["actor_id"], no_cache=True
|
|
||||||
)
|
|
||||||
).to_dict(embed=True)
|
|
||||||
if not actor:
|
if not actor:
|
||||||
raise ValueError(f"missing actor {data!r}")
|
actor = ap.parse_activity(
|
||||||
actor_cache[actor["id"]] = actor
|
ap.get_backend().fetch_iri(
|
||||||
DB.activities.update_one(
|
data["meta"]["actor_id"], no_cache=True
|
||||||
{"_id": data["_id"]}, {"$set": {"meta.actor": actor}}
|
)
|
||||||
)
|
).to_dict(embed=True)
|
||||||
|
if not actor:
|
||||||
|
raise ValueError(f"missing actor {data!r}")
|
||||||
|
actor_cache[actor["id"]] = actor
|
||||||
|
DB.activities.update_one(
|
||||||
|
{"_id": data["_id"]}, {"$set": {"meta.actor": actor}}
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("failed to process actor {data!r}")
|
||||||
|
|
Loading…
Reference in a new issue