forked from forks/microblog.pub
Compare commits
141 commits
outbox-fts
...
v2
Author | SHA1 | Date | |
---|---|---|---|
|
9c8693ea55 | ||
|
febd8c3d26 | ||
|
a5290af5c8 | ||
|
2cec800332 | ||
|
3c07494809 | ||
|
2433fa01cd | ||
|
3169890a39 | ||
|
4e1bb330aa | ||
|
625f399309 | ||
|
2bd6c98538 | ||
|
f13376de84 | ||
|
c97070e3d8 | ||
|
c1692a296d | ||
|
ce6f9238f3 | ||
|
3f129855d1 | ||
|
3fc567861b | ||
|
7b784e3011 | ||
|
5d1ae0c9cd | ||
|
88dd2443d7 | ||
|
4045902068 | ||
|
20109b45da | ||
|
94d14fbef3 | ||
|
f34e0b376b | ||
|
51c596dd1d | ||
|
dfc7ab0470 | ||
|
5d35d5c0a0 | ||
|
17921c1097 | ||
|
24147aedef | ||
|
673baf0d7f | ||
|
9c65919070 | ||
|
c506299089 | ||
|
adbdf6f320 | ||
|
f34bce180c | ||
|
0b86df413a | ||
|
ed214cf0e7 | ||
|
3fb36d6119 | ||
|
1de108b019 | ||
|
7b506f2519 | ||
|
5cf54c2782 | ||
|
db6016394b | ||
|
573a76c0c5 | ||
|
3097dbebe9 | ||
|
e378ec94e0 | ||
|
15dd7e184b | ||
|
22410862f3 | ||
|
7621a19489 | ||
|
cad78fe5e8 | ||
|
6a47b6cf4c | ||
|
9d6ed4cd28 | ||
|
0f10bfddac | ||
|
26efd09304 | ||
|
f2e531cf1a | ||
|
5d95fd44ac | ||
|
a337b32bcd | ||
|
e8fcf5a9a2 | ||
|
7525744f82 | ||
|
7d3fc35a24 | ||
|
73dceee0f5 | ||
|
34c7cdb5fb | ||
|
0527e34476 | ||
|
a82f619e89 | ||
|
a68b3e7318 | ||
|
436d5ccf1b | ||
|
a273f26549 | ||
|
9d357446d2 | ||
|
6cabff21db | ||
|
5df4d420de | ||
|
68884d9afa | ||
|
46a592b11e | ||
|
5f0b8f5dfd | ||
|
5adb2bca9a | ||
|
08cc74d928 | ||
|
578581b4dc | ||
|
ec36272bb4 | ||
|
e30e0de10e | ||
|
e672d9b9f0 | ||
|
dcd44ec3b6 | ||
|
71a4ea2425 | ||
|
441e3d90b1 | ||
|
d9b9f596d3 | ||
|
2cc4eda143 | ||
|
bd065446bf | ||
|
8475f5bccd | ||
|
a435cd33c9 | ||
|
d692ec060f | ||
|
4c6eb51ae2 | ||
|
d36102255f | ||
|
cdbc545d5e | ||
|
fbc46e0517 | ||
|
ef4608f348 | ||
|
4638b98fa8 | ||
|
a9f41d6be7 | ||
|
59dfc3d128 | ||
|
822280c280 | ||
|
c83dd30f41 | ||
|
9d312bc229 | ||
|
b37b77ad34 | ||
|
9ee3f3b971 | ||
|
066f5ec900 | ||
|
a2254f2674 | ||
|
2151733e4f | ||
|
3cff4e4507 | ||
|
120f92a9ed | ||
|
ae8029cd22 | ||
|
434fd98cd9 | ||
|
89c90fba56 | ||
|
e29fe0a079 | ||
|
c5aee435f4 | ||
|
224f5d3f55 | ||
|
6583feb87d | ||
|
04e75c78e0 | ||
|
68c27e083f | ||
|
d52528584a | ||
|
d352dc104a | ||
|
0c5ce67d4e | ||
|
9db7bdf0fb | ||
|
793a939046 | ||
|
c3eb44add7 | ||
|
9b75020c91 | ||
|
36a1a6bd9c | ||
|
164cd9bd00 | ||
|
698a2bae11 | ||
|
4613997fe3 | ||
|
4c995957a6 | ||
|
5c98b8dbfb | ||
|
48d5914851 | ||
|
8f00e522d7 | ||
|
62c9327500 | ||
|
a339ff93b1 | ||
|
afd253a1b4 | ||
|
509e10e79b | ||
|
d96ec913d4 | ||
|
5b505b0e37 | ||
|
530491ff10 | ||
|
48740ea8cb | ||
|
0d7c121781 | ||
|
a4cfd65009 | ||
|
540b9d1470 | ||
|
1c076049cf | ||
|
242bf7b515 | ||
|
2843155501 |
70 changed files with 4602 additions and 2217 deletions
11
AUTHORS
Normal file
11
AUTHORS
Normal file
|
@ -0,0 +1,11 @@
|
|||
Thomas Sileo <t@a4.io>
|
||||
Kevin Wallace <doof@doof.net>
|
||||
Miguel Jacq <mig@mig5.net>
|
||||
Alexey Shpakovsky <alexey@shpakovsky.ru>
|
||||
Josh Washburne <josh@jodh.us>
|
||||
João Costa <jdpc557@gmail.com>
|
||||
Sam <samr1.dev@pm.me>
|
||||
Ash McAllan <acegiak@gmail.com>
|
||||
Cassio Zen <cassio@hey.com>
|
||||
Cocoa <momijizukamori@gmail.com>
|
||||
Jane <jane@janeirl.dev>
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.10-slim as python-base
|
||||
FROM python:3.11-slim as python-base
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
|
|
6
Makefile
6
Makefile
|
@ -28,7 +28,7 @@ move-to:
|
|||
|
||||
.PHONY: self-destruct
|
||||
self-destruct:
|
||||
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv self-destruct
|
||||
-docker run --rm --it --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv self-destruct
|
||||
|
||||
.PHONY: reset-password
|
||||
reset-password:
|
||||
|
@ -41,3 +41,7 @@ check-config:
|
|||
.PHONY: compile-scss
|
||||
compile-scss:
|
||||
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv compile-scss
|
||||
|
||||
.PHONY: import-mastodon-following-accounts
|
||||
import-mastodon-following-accounts:
|
||||
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv import-mastodon-following-accounts $(path)
|
||||
|
|
|
@ -10,6 +10,7 @@ Instances in the wild:
|
|||
- [microblog.pub](https://microblog.pub/) (follow to get updated about the project)
|
||||
- [hexa.ninja](https://hexa.ninja) (theme customization example)
|
||||
- [testing.microblog.pub](https://testing.microblog.pub/)
|
||||
- [Irish Left Archive](https://posts.leftarchive.ie/) (another theme customization example)
|
||||
|
||||
There are still some rough edges, but the server is mostly functional.
|
||||
|
||||
|
@ -58,7 +59,7 @@ All the development takes place on [sourcehut](https://sr.ht/~tsileo/microblog.p
|
|||
- [Issue tracker](https://todo.sr.ht/~tsileo/microblog.pub)
|
||||
- [Mailing list](https://sr.ht/~tsileo/microblog.pub/lists)
|
||||
|
||||
Contributions are welcomed, check out the [documentation](https://docs.microblog.pub) for more details.
|
||||
Contributions are welcomed, check out the [contributing section of the documentation](https://docs.microblog.pub/developer_guide.html#contributing) for more details.
|
||||
|
||||
|
||||
## License
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
"""Add Webmention.webmention_type
|
||||
|
||||
Revision ID: fadfd359ce78
|
||||
Revises: b28c0551c236
|
||||
Create Date: 2022-11-16 19:42:56.925512+00:00
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'fadfd359ce78'
|
||||
down_revision = 'b28c0551c236'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('webmention', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('webmention_type', sa.Enum('UNKNOWN', 'LIKE', 'REPLY', 'REPOST', name='webmentiontype'), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('webmention', schema=None) as batch_op:
|
||||
batch_op.drop_column('webmention_type')
|
||||
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,32 @@
|
|||
"""Add option to hide announces from actor
|
||||
|
||||
Revision ID: 9b404c47970a
|
||||
Revises: fadfd359ce78
|
||||
Create Date: 2022-12-12 19:26:36.912763+00:00
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9b404c47970a'
|
||||
down_revision = 'fadfd359ce78'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('actor', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('are_announces_hidden_from_stream', sa.Boolean(), server_default='0', nullable=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('actor', schema=None) as batch_op:
|
||||
batch_op.drop_column('are_announces_hidden_from_stream')
|
||||
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,48 @@
|
|||
"""Add OAuth client
|
||||
|
||||
Revision ID: 4ab54becec04
|
||||
Revises: 9b404c47970a
|
||||
Create Date: 2022-12-16 17:30:54.520477+00:00
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4ab54becec04'
|
||||
down_revision = '9b404c47970a'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('oauth_client',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('client_name', sa.String(), nullable=False),
|
||||
sa.Column('redirect_uris', sa.JSON(), nullable=True),
|
||||
sa.Column('client_uri', sa.String(), nullable=True),
|
||||
sa.Column('logo_uri', sa.String(), nullable=True),
|
||||
sa.Column('scope', sa.String(), nullable=True),
|
||||
sa.Column('client_id', sa.String(), nullable=False),
|
||||
sa.Column('client_secret', sa.String(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('client_secret')
|
||||
)
|
||||
with op.batch_alter_table('oauth_client', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('ix_oauth_client_client_id'), ['client_id'], unique=True)
|
||||
batch_op.create_index(batch_op.f('ix_oauth_client_id'), ['id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('oauth_client', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_oauth_client_id'))
|
||||
batch_op.drop_index(batch_op.f('ix_oauth_client_client_id'))
|
||||
|
||||
op.drop_table('oauth_client')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,36 @@
|
|||
"""Add OAuth refresh token support
|
||||
|
||||
Revision ID: a209f0333f5a
|
||||
Revises: 4ab54becec04
|
||||
Create Date: 2022-12-18 11:26:31.976348+00:00
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a209f0333f5a'
|
||||
down_revision = '4ab54becec04'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('refresh_token', sa.String(), nullable=True))
|
||||
batch_op.add_column(sa.Column('was_refreshed', sa.Boolean(), server_default='0', nullable=False))
|
||||
batch_op.create_index(batch_op.f('ix_indieauth_access_token_refresh_token'), ['refresh_token'], unique=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_indieauth_access_token_refresh_token'))
|
||||
batch_op.drop_column('was_refreshed')
|
||||
batch_op.drop_column('refresh_token')
|
||||
|
||||
# ### end Alembic commands ###
|
|
@ -135,11 +135,6 @@ ME = {
|
|||
"url": config.ID + "/", # XXX: the path is important for Mastodon compat
|
||||
"manuallyApprovesFollowers": config.CONFIG.manually_approves_followers,
|
||||
"attachment": _LOCAL_ACTOR_METADATA,
|
||||
"icon": {
|
||||
"mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0],
|
||||
"type": "Image",
|
||||
"url": config.CONFIG.icon_url,
|
||||
},
|
||||
"publicKey": {
|
||||
"id": f"{config.ID}#main-key",
|
||||
"owner": config.ID,
|
||||
|
@ -148,12 +143,26 @@ ME = {
|
|||
"tag": dedup_tags(_LOCAL_ACTOR_TAGS),
|
||||
}
|
||||
|
||||
if config.CONFIG.icon_url:
|
||||
ME["icon"] = {
|
||||
"mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0],
|
||||
"type": "Image",
|
||||
"url": config.CONFIG.icon_url,
|
||||
}
|
||||
|
||||
if ALSO_KNOWN_AS:
|
||||
ME["alsoKnownAs"] = [ALSO_KNOWN_AS]
|
||||
|
||||
if MOVED_TO:
|
||||
ME["movedTo"] = MOVED_TO
|
||||
|
||||
if config.CONFIG.image_url:
|
||||
ME["image"] = {
|
||||
"mediaType": mimetypes.guess_type(config.CONFIG.image_url)[0],
|
||||
"type": "Image",
|
||||
"url": config.CONFIG.image_url,
|
||||
}
|
||||
|
||||
|
||||
class NotAnObjectError(Exception):
|
||||
def __init__(self, url: str, resp: httpx.Response | None = None) -> None:
|
||||
|
|
107
app/actor.py
107
app/actor.py
|
@ -6,12 +6,17 @@ from functools import cached_property
|
|||
from typing import Union
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
from loguru import logger
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from app import activitypub as ap
|
||||
from app import media
|
||||
from app.config import BASE_URL
|
||||
from app.config import USER_AGENT
|
||||
from app.config import USERNAME
|
||||
from app.config import WEBFINGER_DOMAIN
|
||||
from app.database import AsyncSession
|
||||
from app.utils.datetime import as_utc
|
||||
from app.utils.datetime import now
|
||||
|
@ -26,7 +31,38 @@ def _handle(raw_actor: ap.RawObject) -> str:
|
|||
if not domain.hostname:
|
||||
raise ValueError(f"Invalid actor ID {ap_id}")
|
||||
|
||||
return f'@{raw_actor["preferredUsername"]}@{domain.hostname}' # type: ignore
|
||||
handle = f'@{raw_actor["preferredUsername"]}@{domain.hostname}' # type: ignore
|
||||
|
||||
# TODO: cleanup this
|
||||
# Next, check for custom webfinger domains
|
||||
resp: httpx.Response | None = None
|
||||
for url in {
|
||||
f"https://{domain.hostname}/.well-known/webfinger",
|
||||
f"http://{domain.hostname}/.well-known/webfinger",
|
||||
}:
|
||||
try:
|
||||
logger.info(f"Webfinger {handle} at {url}")
|
||||
resp = httpx.get(
|
||||
url,
|
||||
params={"resource": f"acct:{handle[1:]}"},
|
||||
headers={
|
||||
"User-Agent": USER_AGENT,
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
break
|
||||
except Exception:
|
||||
logger.exception(f"Failed to webfinger {handle}")
|
||||
|
||||
if resp:
|
||||
try:
|
||||
json_resp = resp.json()
|
||||
if json_resp.get("subject", "").startswith("acct:"):
|
||||
return "@" + json_resp["subject"].removeprefix("acct:")
|
||||
except Exception:
|
||||
logger.exception(f"Failed to parse webfinger response for {handle}")
|
||||
return handle
|
||||
|
||||
|
||||
class Actor:
|
||||
|
@ -60,7 +96,7 @@ class Actor:
|
|||
return self.name
|
||||
return self.preferred_username
|
||||
|
||||
@property
|
||||
@cached_property
|
||||
def handle(self) -> str:
|
||||
return _handle(self.ap_actor)
|
||||
|
||||
|
@ -82,11 +118,21 @@ class Actor:
|
|||
|
||||
@property
|
||||
def icon_url(self) -> str | None:
|
||||
return self.ap_actor.get("icon", {}).get("url")
|
||||
if icon := self.ap_actor.get("icon"):
|
||||
return icon.get("url")
|
||||
return None
|
||||
|
||||
@property
|
||||
def icon_media_type(self) -> str | None:
|
||||
return self.ap_actor.get("icon", {}).get("mediaType")
|
||||
if icon := self.ap_actor.get("icon"):
|
||||
return icon.get("mediaType")
|
||||
return None
|
||||
|
||||
@property
|
||||
def image_url(self) -> str | None:
|
||||
if image := self.ap_actor.get("image"):
|
||||
return image.get("url")
|
||||
return None
|
||||
|
||||
@property
|
||||
def public_key_as_pem(self) -> str:
|
||||
|
@ -101,14 +147,14 @@ class Actor:
|
|||
if self.icon_url:
|
||||
return media.proxied_media_url(self.icon_url)
|
||||
else:
|
||||
return "/static/nopic.png"
|
||||
return BASE_URL + "/static/nopic.png"
|
||||
|
||||
@property
|
||||
def resized_icon_url(self) -> str:
|
||||
if self.icon_url:
|
||||
return media.resized_media_url(self.icon_url, 50)
|
||||
else:
|
||||
return "/static/nopic.png"
|
||||
return BASE_URL + "/static/nopic.png"
|
||||
|
||||
@property
|
||||
def tags(self) -> list[ap.RawObject]:
|
||||
|
@ -132,13 +178,18 @@ class Actor:
|
|||
|
||||
|
||||
class RemoteActor(Actor):
|
||||
def __init__(self, ap_actor: ap.RawObject) -> None:
|
||||
def __init__(self, ap_actor: ap.RawObject, handle: str | None = None) -> None:
|
||||
if (ap_type := ap_actor.get("type")) not in ap.ACTOR_TYPES:
|
||||
raise ValueError(f"Unexpected actor type: {ap_type}")
|
||||
|
||||
self._ap_actor = ap_actor
|
||||
self._ap_type = ap_type
|
||||
|
||||
if handle is None:
|
||||
handle = _handle(ap_actor)
|
||||
|
||||
self._handle = handle
|
||||
|
||||
@property
|
||||
def ap_actor(self) -> ap.RawObject:
|
||||
return self._ap_actor
|
||||
|
@ -151,8 +202,12 @@ class RemoteActor(Actor):
|
|||
def is_from_db(self) -> bool:
|
||||
return False
|
||||
|
||||
@property
|
||||
def handle(self) -> str:
|
||||
return self._handle
|
||||
|
||||
LOCAL_ACTOR = RemoteActor(ap_actor=ap.ME)
|
||||
|
||||
LOCAL_ACTOR = RemoteActor(ap_actor=ap.ME, handle=f"@{USERNAME}@{WEBFINGER_DOMAIN}")
|
||||
|
||||
|
||||
async def save_actor(db_session: AsyncSession, ap_actor: ap.RawObject) -> "ActorModel":
|
||||
|
@ -214,24 +269,23 @@ async def fetch_actor(
|
|||
|
||||
if save_if_not_found:
|
||||
ap_actor = await ap.fetch(actor_id)
|
||||
# Some softwares uses URL when we expect ID
|
||||
if actor_id == ap_actor.get("url"):
|
||||
# Which mean we may already have it in DB
|
||||
existing_actor_by_url = (
|
||||
await db_session.scalars(
|
||||
select(models.Actor).where(
|
||||
models.Actor.ap_id == ap.get_id(ap_actor),
|
||||
)
|
||||
# Some softwares uses URL when we expect ID or uses a different casing
|
||||
# (like Birdsite LIVE) , which mean we may already have it in DB
|
||||
existing_actor_by_url = (
|
||||
await db_session.scalars(
|
||||
select(models.Actor).where(
|
||||
models.Actor.ap_id == ap.get_id(ap_actor),
|
||||
)
|
||||
).one_or_none()
|
||||
if existing_actor_by_url:
|
||||
# Update the actor as we had to fetch it anyway
|
||||
await update_actor_if_needed(
|
||||
db_session,
|
||||
existing_actor_by_url,
|
||||
RemoteActor(ap_actor),
|
||||
)
|
||||
return existing_actor_by_url
|
||||
)
|
||||
).one_or_none()
|
||||
if existing_actor_by_url:
|
||||
# Update the actor as we had to fetch it anyway
|
||||
await update_actor_if_needed(
|
||||
db_session,
|
||||
existing_actor_by_url,
|
||||
RemoteActor(ap_actor),
|
||||
)
|
||||
return existing_actor_by_url
|
||||
|
||||
return await save_actor(db_session, ap_actor)
|
||||
else:
|
||||
|
@ -381,6 +435,9 @@ def _actor_hash(actor: Actor) -> bytes:
|
|||
if actor.icon_url:
|
||||
h.update(actor.icon_url.encode())
|
||||
|
||||
if actor.image_url:
|
||||
h.update(actor.image_url.encode())
|
||||
|
||||
if actor.attachments:
|
||||
for a in actor.attachments:
|
||||
if a.get("type") != "PropertyValue":
|
||||
|
|
127
app/admin.py
127
app/admin.py
|
@ -1,4 +1,5 @@
|
|||
from datetime import datetime
|
||||
from urllib.parse import quote
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter
|
||||
|
@ -11,6 +12,7 @@ from fastapi.exceptions import HTTPException
|
|||
from fastapi.responses import RedirectResponse
|
||||
from loguru import logger
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import delete
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy import select
|
||||
|
@ -29,6 +31,7 @@ from app.boxes import send_block
|
|||
from app.boxes import send_follow
|
||||
from app.boxes import send_unblock
|
||||
from app.config import EMOJIS
|
||||
from app.config import SESSION_TIMEOUT
|
||||
from app.config import generate_csrf_token
|
||||
from app.config import session_serializer
|
||||
from app.config import verify_csrf_token
|
||||
|
@ -57,18 +60,23 @@ async def user_session_or_redirect(
|
|||
|
||||
_RedirectToLoginPage = HTTPException(
|
||||
status_code=302,
|
||||
headers={"Location": request.url_for("login") + f"?redirect={redirect_url}"},
|
||||
headers={
|
||||
"Location": request.url_for("login") + f"?redirect={quote(redirect_url)}"
|
||||
},
|
||||
)
|
||||
|
||||
if not session:
|
||||
logger.info("No existing admin session")
|
||||
raise _RedirectToLoginPage
|
||||
|
||||
try:
|
||||
loaded_session = session_serializer.loads(session, max_age=3600 * 12)
|
||||
loaded_session = session_serializer.loads(session, max_age=SESSION_TIMEOUT)
|
||||
except Exception:
|
||||
logger.exception("Failed to validate admin session")
|
||||
raise _RedirectToLoginPage
|
||||
|
||||
if not loaded_session.get("is_logged_in"):
|
||||
logger.info(f"Admin session invalidated: {loaded_session}")
|
||||
raise _RedirectToLoginPage
|
||||
|
||||
return None
|
||||
|
@ -181,8 +189,11 @@ async def admin_new(
|
|||
content += f"{in_reply_to_object.actor.handle} "
|
||||
for tag in in_reply_to_object.tags:
|
||||
if tag.get("type") == "Mention" and tag["name"] != LOCAL_ACTOR.handle:
|
||||
mentioned_actor = await fetch_actor(db_session, tag["href"])
|
||||
content += f"{mentioned_actor.handle} "
|
||||
try:
|
||||
mentioned_actor = await fetch_actor(db_session, tag["href"])
|
||||
content += f"{mentioned_actor.handle} "
|
||||
except Exception:
|
||||
logger.exception(f"Failed to lookup {mentioned_actor}")
|
||||
|
||||
# Copy the content warning if any
|
||||
if in_reply_to_object.summary:
|
||||
|
@ -439,6 +450,7 @@ async def admin_direct_messages(
|
|||
models.InboxObject.ap_context.is_not(None),
|
||||
# Skip transient object like poll relies
|
||||
models.InboxObject.is_transient.is_(False),
|
||||
models.InboxObject.is_deleted.is_(False),
|
||||
)
|
||||
.group_by(models.InboxObject.ap_context, models.InboxObject.actor_id)
|
||||
)
|
||||
|
@ -461,6 +473,7 @@ async def admin_direct_messages(
|
|||
models.OutboxObject.ap_context.is_not(None),
|
||||
# Skip transient object like poll relies
|
||||
models.OutboxObject.is_transient.is_(False),
|
||||
models.OutboxObject.is_deleted.is_(False),
|
||||
)
|
||||
.group_by(models.OutboxObject.ap_context)
|
||||
)
|
||||
|
@ -716,13 +729,9 @@ async def get_notifications(
|
|||
actors_metadata = await get_actors_metadata(
|
||||
db_session, [notif.actor for notif in notifications if notif.actor]
|
||||
)
|
||||
|
||||
for notif in notifications:
|
||||
notif.is_new = False
|
||||
await db_session.commit()
|
||||
|
||||
more_unread_count = 0
|
||||
next_cursor = None
|
||||
|
||||
if notifications and remaining_count > page_size:
|
||||
decoded_next_cursor = notifications[-1].created_at
|
||||
next_cursor = pagination.encode_cursor(decoded_next_cursor)
|
||||
|
@ -736,7 +745,8 @@ async def get_notifications(
|
|||
)
|
||||
)
|
||||
|
||||
return await templates.render_template(
|
||||
# Render the template before we change the new flag on notifications
|
||||
tpl_resp = await templates.render_template(
|
||||
db_session,
|
||||
request,
|
||||
"notifications.html",
|
||||
|
@ -748,6 +758,13 @@ async def get_notifications(
|
|||
},
|
||||
)
|
||||
|
||||
if len({notif.id for notif in notifications if notif.is_new}):
|
||||
for notif in notifications:
|
||||
notif.is_new = False
|
||||
await db_session.commit()
|
||||
|
||||
return tpl_resp
|
||||
|
||||
|
||||
@router.get("/object")
|
||||
async def admin_object(
|
||||
|
@ -850,6 +867,66 @@ async def admin_profile(
|
|||
)
|
||||
|
||||
|
||||
@router.post("/actions/force_delete")
|
||||
async def admin_actions_force_delete(
|
||||
request: Request,
|
||||
ap_object_id: str = Form(),
|
||||
redirect_url: str = Form(),
|
||||
csrf_check: None = Depends(verify_csrf_token),
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
) -> RedirectResponse:
|
||||
ap_object_to_delete = await get_inbox_object_by_ap_id(db_session, ap_object_id)
|
||||
if not ap_object_to_delete:
|
||||
raise ValueError(f"Cannot find {ap_object_id}")
|
||||
|
||||
logger.info(f"Deleting {ap_object_to_delete.ap_type}/{ap_object_to_delete.ap_id}")
|
||||
await boxes._revert_side_effect_for_deleted_object(
|
||||
db_session,
|
||||
None,
|
||||
ap_object_to_delete,
|
||||
None,
|
||||
)
|
||||
ap_object_to_delete.is_deleted = True
|
||||
await db_session.commit()
|
||||
return RedirectResponse(redirect_url, status_code=302)
|
||||
|
||||
|
||||
@router.post("/actions/force_delete_webmention")
|
||||
async def admin_actions_force_delete_webmention(
|
||||
request: Request,
|
||||
webmention_id: int = Form(),
|
||||
redirect_url: str = Form(),
|
||||
csrf_check: None = Depends(verify_csrf_token),
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
) -> RedirectResponse:
|
||||
webmention = await boxes.get_webmention_by_id(db_session, webmention_id)
|
||||
if not webmention:
|
||||
raise ValueError(f"Cannot find {webmention_id}")
|
||||
if not webmention.outbox_object:
|
||||
raise ValueError(f"Missing related outbox object for {webmention_id}")
|
||||
|
||||
# TODO: move this
|
||||
logger.info(f"Deleting {webmention_id}")
|
||||
webmention.is_deleted = True
|
||||
await db_session.flush()
|
||||
from app.webmentions import _handle_webmention_side_effects
|
||||
|
||||
await _handle_webmention_side_effects(
|
||||
db_session, webmention, webmention.outbox_object
|
||||
)
|
||||
# Delete related notifications
|
||||
notif_deletion_result = await db_session.execute(
|
||||
delete(models.Notification)
|
||||
.where(models.Notification.webmention_id == webmention.id)
|
||||
.execution_options(synchronize_session=False)
|
||||
)
|
||||
logger.info(
|
||||
f"Deleted {notif_deletion_result.rowcount} notifications" # type: ignore
|
||||
)
|
||||
await db_session.commit()
|
||||
return RedirectResponse(redirect_url, status_code=302)
|
||||
|
||||
|
||||
@router.post("/actions/follow")
|
||||
async def admin_actions_follow(
|
||||
request: Request,
|
||||
|
@ -888,6 +965,34 @@ async def admin_actions_unblock(
|
|||
return RedirectResponse(redirect_url, status_code=302)
|
||||
|
||||
|
||||
@router.post("/actions/hide_announces")
|
||||
async def admin_actions_hide_announces(
|
||||
request: Request,
|
||||
ap_actor_id: str = Form(),
|
||||
redirect_url: str = Form(),
|
||||
csrf_check: None = Depends(verify_csrf_token),
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
) -> RedirectResponse:
|
||||
actor = await fetch_actor(db_session, ap_actor_id)
|
||||
actor.are_announces_hidden_from_stream = True
|
||||
await db_session.commit()
|
||||
return RedirectResponse(redirect_url, status_code=302)
|
||||
|
||||
|
||||
@router.post("/actions/show_announces")
|
||||
async def admin_actions_show_announces(
|
||||
request: Request,
|
||||
ap_actor_id: str = Form(),
|
||||
redirect_url: str = Form(),
|
||||
csrf_check: None = Depends(verify_csrf_token),
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
) -> RedirectResponse:
|
||||
actor = await fetch_actor(db_session, ap_actor_id)
|
||||
actor.are_announces_hidden_from_stream = False
|
||||
await db_session.commit()
|
||||
return RedirectResponse(redirect_url, status_code=302)
|
||||
|
||||
|
||||
@router.post("/actions/delete")
|
||||
async def admin_actions_delete(
|
||||
request: Request,
|
||||
|
@ -1080,7 +1185,7 @@ async def admin_actions_new(
|
|||
elif name:
|
||||
ap_type = "Article"
|
||||
|
||||
public_id = await boxes.send_create(
|
||||
public_id, _ = await boxes.send_create(
|
||||
db_session,
|
||||
ap_type=ap_type,
|
||||
source=content,
|
||||
|
|
|
@ -12,6 +12,7 @@ from app import activitypub as ap
|
|||
from app.actor import LOCAL_ACTOR
|
||||
from app.actor import Actor
|
||||
from app.actor import RemoteActor
|
||||
from app.config import ID
|
||||
from app.media import proxied_media_url
|
||||
from app.utils.datetime import now
|
||||
from app.utils.datetime import parse_isoformat
|
||||
|
@ -96,6 +97,9 @@ class Object:
|
|||
def attachments(self) -> list["Attachment"]:
|
||||
attachments = []
|
||||
for obj in ap.as_list(self.ap_object.get("attachment", [])):
|
||||
if obj.get("type") == "PropertyValue":
|
||||
continue
|
||||
|
||||
if obj.get("type") == "Link":
|
||||
attachments.append(
|
||||
Attachment.parse_obj(
|
||||
|
@ -209,6 +213,15 @@ class Object:
|
|||
def in_reply_to(self) -> str | None:
|
||||
return self.ap_object.get("inReplyTo")
|
||||
|
||||
@property
|
||||
def is_local_reply(self) -> bool:
|
||||
if not self.in_reply_to:
|
||||
return False
|
||||
|
||||
return bool(
|
||||
self.in_reply_to.startswith(ID) and self.content # Hide votes from Question
|
||||
)
|
||||
|
||||
@property
|
||||
def is_in_reply_to_from_inbox(self) -> bool | None:
|
||||
if not self.in_reply_to:
|
||||
|
@ -277,6 +290,9 @@ class Attachment(BaseModel):
|
|||
proxied_url: str | None = None
|
||||
resized_url: str | None = None
|
||||
|
||||
width: int | None = None
|
||||
height: int | None = None
|
||||
|
||||
@property
|
||||
def mimetype(self) -> str:
|
||||
mimetype = self.media_type
|
||||
|
|
420
app/boxes.py
420
app/boxes.py
|
@ -1,4 +1,5 @@
|
|||
"""Actions related to the AP inbox/outbox."""
|
||||
import datetime
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
|
@ -27,10 +28,11 @@ from app.actor import save_actor
|
|||
from app.actor import update_actor_if_needed
|
||||
from app.ap_object import RemoteObject
|
||||
from app.config import BASE_URL
|
||||
from app.config import BLOCKED_SERVERS
|
||||
from app.config import ID
|
||||
from app.config import MANUALLY_APPROVES_FOLLOWERS
|
||||
from app.config import set_moved_to
|
||||
from app.config import stream_visibility_callback
|
||||
from app.customization import ObjectInfo
|
||||
from app.database import AsyncSession
|
||||
from app.outgoing_activities import new_outgoing_activity
|
||||
from app.source import dedup_tags
|
||||
|
@ -41,11 +43,24 @@ from app.utils import webmentions
|
|||
from app.utils.datetime import as_utc
|
||||
from app.utils.datetime import now
|
||||
from app.utils.datetime import parse_isoformat
|
||||
from app.utils.facepile import WebmentionReply
|
||||
from app.utils.text import slugify
|
||||
from app.utils.url import is_hostname_blocked
|
||||
|
||||
AnyboxObject = models.InboxObject | models.OutboxObject
|
||||
|
||||
|
||||
def is_notification_enabled(notification_type: models.NotificationType) -> bool:
|
||||
"""Checks if a given notification type is enabled."""
|
||||
if notification_type.value == "pending_incoming_follower":
|
||||
# This one cannot be disabled as it would prevent manually reviewing
|
||||
# follow requests.
|
||||
return True
|
||||
if notification_type.value in config.CONFIG.disabled_notifications:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def allocate_outbox_id() -> str:
|
||||
return uuid.uuid4().hex
|
||||
|
||||
|
@ -164,12 +179,13 @@ async def send_block(db_session: AsyncSession, ap_actor_id: str) -> None:
|
|||
await new_outgoing_activity(db_session, actor.inbox_url, outbox_object.id)
|
||||
|
||||
# 4. Create a notification
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.BLOCK,
|
||||
actor_id=actor.id,
|
||||
outbox_object_id=outbox_object.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.BLOCK):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.BLOCK,
|
||||
actor_id=actor.id,
|
||||
outbox_object_id=outbox_object.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
await db_session.commit()
|
||||
|
||||
|
@ -201,7 +217,7 @@ async def send_delete(db_session: AsyncSession, ap_object_id: str) -> None:
|
|||
raise ValueError("Should never happen")
|
||||
|
||||
outbox_object_to_delete.is_deleted = True
|
||||
await db_session.commit()
|
||||
await db_session.flush()
|
||||
|
||||
# Compute the original recipients
|
||||
recipients = await _compute_recipients(
|
||||
|
@ -216,14 +232,17 @@ async def send_delete(db_session: AsyncSession, ap_object_id: str) -> None:
|
|||
db_session, outbox_object_to_delete.in_reply_to
|
||||
)
|
||||
if replied_object:
|
||||
new_replies_count = await _get_replies_count(
|
||||
db_session, replied_object.ap_id
|
||||
)
|
||||
if replied_object.is_from_outbox:
|
||||
# Different helper here because we also count webmentions
|
||||
new_replies_count = await _get_outbox_replies_count(
|
||||
db_session, replied_object # type: ignore
|
||||
)
|
||||
else:
|
||||
new_replies_count = await _get_replies_count(
|
||||
db_session, replied_object.ap_id
|
||||
)
|
||||
|
||||
replied_object.replies_count = new_replies_count
|
||||
if replied_object.replies_count < 0:
|
||||
logger.warning("negative replies count for {replied_object.ap_id}")
|
||||
replied_object.replies_count = 0
|
||||
else:
|
||||
logger.info(f"{outbox_object_to_delete.in_reply_to} not found")
|
||||
|
||||
|
@ -420,7 +439,9 @@ async def _send_undo(db_session: AsyncSession, ap_object_id: str) -> None:
|
|||
announced_object.announced_via_outbox_object_ap_id = None
|
||||
|
||||
# Send the Undo to the original recipients
|
||||
recipients = await _compute_recipients(db_session, outbox_object.ap_object)
|
||||
recipients = await _compute_recipients(
|
||||
db_session, outbox_object_to_undo.ap_object
|
||||
)
|
||||
for rcp in recipients:
|
||||
await new_outgoing_activity(db_session, rcp, outbox_object.id)
|
||||
elif outbox_object_to_undo.ap_type == "Block":
|
||||
|
@ -440,12 +461,13 @@ async def _send_undo(db_session: AsyncSession, ap_object_id: str) -> None:
|
|||
outbox_object.id,
|
||||
)
|
||||
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UNBLOCK,
|
||||
actor_id=blocked_actor.id,
|
||||
outbox_object_id=outbox_object.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.UNBLOCK):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UNBLOCK,
|
||||
actor_id=blocked_actor.id,
|
||||
outbox_object_id=outbox_object.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
else:
|
||||
raise ValueError("Should never happen")
|
||||
|
@ -570,7 +592,7 @@ async def send_create(
|
|||
poll_answers: list[str] | None = None,
|
||||
poll_duration_in_minutes: int | None = None,
|
||||
name: str | None = None,
|
||||
) -> str:
|
||||
) -> tuple[str, models.OutboxObject]:
|
||||
note_id = allocate_outbox_id()
|
||||
published = now().replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||
context = f"{ID}/contexts/" + uuid.uuid4().hex
|
||||
|
@ -745,7 +767,7 @@ async def send_create(
|
|||
|
||||
await db_session.commit()
|
||||
|
||||
return note_id
|
||||
return note_id, outbox_object
|
||||
|
||||
|
||||
async def send_vote(
|
||||
|
@ -928,7 +950,7 @@ async def compute_all_known_recipients(db_session: AsyncSession) -> set[str]:
|
|||
}
|
||||
|
||||
|
||||
async def _get_following(db_session: AsyncSession) -> list[models.Follower]:
|
||||
async def _get_following(db_session: AsyncSession) -> list[models.Following]:
|
||||
return (
|
||||
(
|
||||
await db_session.scalars(
|
||||
|
@ -1048,6 +1070,32 @@ async def get_outbox_object_by_ap_id(
|
|||
) # type: ignore
|
||||
|
||||
|
||||
async def get_outbox_object_by_slug_and_short_id(
|
||||
db_session: AsyncSession,
|
||||
slug: str,
|
||||
short_id: str,
|
||||
) -> models.OutboxObject | None:
|
||||
return (
|
||||
(
|
||||
await db_session.execute(
|
||||
select(models.OutboxObject)
|
||||
.options(
|
||||
joinedload(models.OutboxObject.outbox_object_attachments).options(
|
||||
joinedload(models.OutboxObjectAttachment.upload)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
models.OutboxObject.public_id.like(f"{short_id}%"),
|
||||
models.OutboxObject.slug == slug,
|
||||
models.OutboxObject.is_deleted.is_(False),
|
||||
)
|
||||
)
|
||||
)
|
||||
.unique()
|
||||
.scalar_one_or_none()
|
||||
)
|
||||
|
||||
|
||||
async def get_anybox_object_by_ap_id(
|
||||
db_session: AsyncSession, ap_id: str
|
||||
) -> AnyboxObject | None:
|
||||
|
@ -1057,6 +1105,20 @@ async def get_anybox_object_by_ap_id(
|
|||
return await get_inbox_object_by_ap_id(db_session, ap_id)
|
||||
|
||||
|
||||
async def get_webmention_by_id(
|
||||
db_session: AsyncSession, webmention_id: int
|
||||
) -> models.Webmention | None:
|
||||
return (
|
||||
await db_session.execute(
|
||||
select(models.Webmention)
|
||||
.where(models.Webmention.id == webmention_id)
|
||||
.options(
|
||||
joinedload(models.Webmention.outbox_object),
|
||||
)
|
||||
)
|
||||
).scalar_one_or_none() # type: ignore
|
||||
|
||||
|
||||
async def _handle_delete_activity(
|
||||
db_session: AsyncSession,
|
||||
from_actor: models.Actor,
|
||||
|
@ -1124,6 +1186,23 @@ async def _handle_delete_activity(
|
|||
logger.info("Removing actor from follower")
|
||||
await db_session.delete(follower)
|
||||
|
||||
# Also mark Follow activities for this actor as deleted
|
||||
follow_activities = (
|
||||
await db_session.scalars(
|
||||
select(models.OutboxObject).where(
|
||||
models.OutboxObject.ap_type == "Follow",
|
||||
models.OutboxObject.relates_to_actor_id
|
||||
== ap_object_to_delete.id,
|
||||
models.OutboxObject.is_deleted.is_(False),
|
||||
)
|
||||
)
|
||||
).all()
|
||||
for follow_activity in follow_activities:
|
||||
logger.info(
|
||||
f"Marking Follow activity {follow_activity.ap_id} as deleted"
|
||||
)
|
||||
follow_activity.is_deleted = True
|
||||
|
||||
following = (
|
||||
await db_session.scalars(
|
||||
select(models.Following).where(
|
||||
|
@ -1184,9 +1263,70 @@ async def _get_replies_count(
|
|||
)
|
||||
|
||||
|
||||
async def _get_outbox_replies_count(
|
||||
db_session: AsyncSession,
|
||||
outbox_object: models.OutboxObject,
|
||||
) -> int:
|
||||
return (await _get_replies_count(db_session, outbox_object.ap_id)) + (
|
||||
await db_session.scalar(
|
||||
select(func.count(models.Webmention.id)).where(
|
||||
models.Webmention.is_deleted.is_(False),
|
||||
models.Webmention.outbox_object_id == outbox_object.id,
|
||||
models.Webmention.webmention_type == models.WebmentionType.REPLY,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def _get_outbox_likes_count(
|
||||
db_session: AsyncSession,
|
||||
outbox_object: models.OutboxObject,
|
||||
) -> int:
|
||||
return (
|
||||
await db_session.scalar(
|
||||
select(func.count(models.InboxObject.id)).where(
|
||||
models.InboxObject.ap_type == "Like",
|
||||
models.InboxObject.relates_to_outbox_object_id == outbox_object.id,
|
||||
models.InboxObject.is_deleted.is_(False),
|
||||
)
|
||||
)
|
||||
) + (
|
||||
await db_session.scalar(
|
||||
select(func.count(models.Webmention.id)).where(
|
||||
models.Webmention.is_deleted.is_(False),
|
||||
models.Webmention.outbox_object_id == outbox_object.id,
|
||||
models.Webmention.webmention_type == models.WebmentionType.LIKE,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def _get_outbox_announces_count(
|
||||
db_session: AsyncSession,
|
||||
outbox_object: models.OutboxObject,
|
||||
) -> int:
|
||||
return (
|
||||
await db_session.scalar(
|
||||
select(func.count(models.InboxObject.id)).where(
|
||||
models.InboxObject.ap_type == "Announce",
|
||||
models.InboxObject.relates_to_outbox_object_id == outbox_object.id,
|
||||
models.InboxObject.is_deleted.is_(False),
|
||||
)
|
||||
)
|
||||
) + (
|
||||
await db_session.scalar(
|
||||
select(func.count(models.Webmention.id)).where(
|
||||
models.Webmention.is_deleted.is_(False),
|
||||
models.Webmention.outbox_object_id == outbox_object.id,
|
||||
models.Webmention.webmention_type == models.WebmentionType.REPOST,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def _revert_side_effect_for_deleted_object(
|
||||
db_session: AsyncSession,
|
||||
delete_activity: models.InboxObject,
|
||||
delete_activity: models.InboxObject | None,
|
||||
deleted_ap_object: models.InboxObject,
|
||||
forwarded_by_actor: models.Actor | None,
|
||||
) -> None:
|
||||
|
@ -1214,8 +1354,8 @@ async def _revert_side_effect_for_deleted_object(
|
|||
# also needs to be forwarded
|
||||
is_delete_needs_to_be_forwarded = True
|
||||
|
||||
new_replies_count = await _get_replies_count(
|
||||
db_session, replied_object.ap_id
|
||||
new_replies_count = await _get_outbox_replies_count(
|
||||
db_session, replied_object # type: ignore
|
||||
)
|
||||
|
||||
await db_session.execute(
|
||||
|
@ -1223,7 +1363,7 @@ async def _revert_side_effect_for_deleted_object(
|
|||
.where(
|
||||
models.OutboxObject.id == replied_object.id,
|
||||
)
|
||||
.values(replies_count=new_replies_count)
|
||||
.values(replies_count=new_replies_count - 1)
|
||||
)
|
||||
else:
|
||||
new_replies_count = await _get_replies_count(
|
||||
|
@ -1235,7 +1375,7 @@ async def _revert_side_effect_for_deleted_object(
|
|||
.where(
|
||||
models.InboxObject.id == replied_object.id,
|
||||
)
|
||||
.values(replies_count=new_replies_count)
|
||||
.values(replies_count=new_replies_count - 1)
|
||||
)
|
||||
|
||||
if deleted_ap_object.ap_type == "Like" and deleted_ap_object.activity_object_ap_id:
|
||||
|
@ -1245,15 +1385,16 @@ async def _revert_side_effect_for_deleted_object(
|
|||
)
|
||||
if related_object:
|
||||
if related_object.is_from_outbox:
|
||||
likes_count = await _get_outbox_likes_count(db_session, related_object)
|
||||
await db_session.execute(
|
||||
update(models.OutboxObject)
|
||||
.where(
|
||||
models.OutboxObject.id == related_object.id,
|
||||
)
|
||||
.values(likes_count=models.OutboxObject.likes_count - 1)
|
||||
.values(likes_count=likes_count - 1)
|
||||
)
|
||||
elif (
|
||||
deleted_ap_object.ap_type == "Annouce"
|
||||
deleted_ap_object.ap_type == "Announce"
|
||||
and deleted_ap_object.activity_object_ap_id
|
||||
):
|
||||
related_object = await get_outbox_object_by_ap_id(
|
||||
|
@ -1262,12 +1403,15 @@ async def _revert_side_effect_for_deleted_object(
|
|||
)
|
||||
if related_object:
|
||||
if related_object.is_from_outbox:
|
||||
announces_count = await _get_outbox_announces_count(
|
||||
db_session, related_object
|
||||
)
|
||||
await db_session.execute(
|
||||
update(models.OutboxObject)
|
||||
.where(
|
||||
models.OutboxObject.id == related_object.id,
|
||||
)
|
||||
.values(announces_count=models.OutboxObject.announces_count - 1)
|
||||
.values(announces_count=announces_count - 1)
|
||||
)
|
||||
|
||||
# Delete any Like/Announce
|
||||
|
@ -1282,7 +1426,8 @@ async def _revert_side_effect_for_deleted_object(
|
|||
# If it's a local replies, it was forwarded, so we also need to forward
|
||||
# the Delete activity if possible
|
||||
if (
|
||||
delete_activity.activity_object_ap_id == deleted_ap_object.ap_id
|
||||
delete_activity
|
||||
and delete_activity.activity_object_ap_id == deleted_ap_object.ap_id
|
||||
and delete_activity.has_ld_signature
|
||||
and is_delete_needs_to_be_forwarded
|
||||
):
|
||||
|
@ -1395,11 +1540,12 @@ async def _send_accept(
|
|||
raise ValueError("Should never happen")
|
||||
await new_outgoing_activity(db_session, from_actor.inbox_url, outbox_activity.id)
|
||||
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.NEW_FOLLOWER,
|
||||
actor_id=from_actor.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.NEW_FOLLOWER):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.NEW_FOLLOWER,
|
||||
actor_id=from_actor.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
|
||||
async def send_reject(
|
||||
|
@ -1438,11 +1584,12 @@ async def _send_reject(
|
|||
raise ValueError("Should never happen")
|
||||
await new_outgoing_activity(db_session, from_actor.inbox_url, outbox_activity.id)
|
||||
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.REJECTED_FOLLOWER,
|
||||
actor_id=from_actor.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.REJECTED_FOLLOWER):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.REJECTED_FOLLOWER,
|
||||
actor_id=from_actor.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
|
||||
async def _handle_undo_activity(
|
||||
|
@ -1468,11 +1615,12 @@ async def _handle_undo_activity(
|
|||
models.Follower.inbox_object_id == ap_activity_to_undo.id
|
||||
)
|
||||
)
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UNFOLLOW,
|
||||
actor_id=from_actor.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.UNFOLLOW):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UNFOLLOW,
|
||||
actor_id=from_actor.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
elif ap_activity_to_undo.ap_type == "Like":
|
||||
if not ap_activity_to_undo.activity_object_ap_id:
|
||||
|
@ -1488,14 +1636,21 @@ async def _handle_undo_activity(
|
|||
)
|
||||
return
|
||||
|
||||
liked_obj.likes_count = models.OutboxObject.likes_count - 1
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UNDO_LIKE,
|
||||
actor_id=from_actor.id,
|
||||
outbox_object_id=liked_obj.id,
|
||||
inbox_object_id=ap_activity_to_undo.id,
|
||||
liked_obj.likes_count = (
|
||||
await _get_outbox_likes_count(
|
||||
db_session,
|
||||
liked_obj,
|
||||
)
|
||||
- 1
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.UNDO_LIKE):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UNDO_LIKE,
|
||||
actor_id=from_actor.id,
|
||||
outbox_object_id=liked_obj.id,
|
||||
inbox_object_id=ap_activity_to_undo.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
elif ap_activity_to_undo.ap_type == "Announce":
|
||||
if not ap_activity_to_undo.activity_object_ap_id:
|
||||
|
@ -1513,20 +1668,22 @@ async def _handle_undo_activity(
|
|||
announced_obj_from_outbox.announces_count = (
|
||||
models.OutboxObject.announces_count - 1
|
||||
)
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UNDO_ANNOUNCE,
|
||||
actor_id=from_actor.id,
|
||||
outbox_object_id=announced_obj_from_outbox.id,
|
||||
inbox_object_id=ap_activity_to_undo.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.UNDO_ANNOUNCE):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UNDO_ANNOUNCE,
|
||||
actor_id=from_actor.id,
|
||||
outbox_object_id=announced_obj_from_outbox.id,
|
||||
inbox_object_id=ap_activity_to_undo.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
elif ap_activity_to_undo.ap_type == "Block":
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UNBLOCKED,
|
||||
actor_id=from_actor.id,
|
||||
inbox_object_id=ap_activity_to_undo.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.UNBLOCKED):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UNBLOCKED,
|
||||
actor_id=from_actor.id,
|
||||
inbox_object_id=ap_activity_to_undo.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
else:
|
||||
logger.warning(f"Don't know how to undo {ap_activity_to_undo.ap_type} activity")
|
||||
|
||||
|
@ -1590,12 +1747,13 @@ async def _handle_move_activity(
|
|||
else:
|
||||
logger.info(f"Already following target {new_actor_id}")
|
||||
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.MOVE,
|
||||
actor_id=new_actor.id,
|
||||
inbox_object_id=move_activity.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.MOVE):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.MOVE,
|
||||
actor_id=new_actor.id,
|
||||
inbox_object_id=move_activity.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
|
||||
async def _handle_update_activity(
|
||||
|
@ -1753,16 +1911,26 @@ async def _process_note_object(
|
|||
|
||||
is_from_following = ro.actor.ap_id in {f.ap_actor_id for f in following}
|
||||
is_reply = bool(ro.in_reply_to)
|
||||
is_local_reply = (
|
||||
ro.in_reply_to
|
||||
and ro.in_reply_to.startswith(BASE_URL)
|
||||
and ro.content # Hide votes from Question
|
||||
)
|
||||
is_local_reply = ro.is_local_reply
|
||||
is_mention = False
|
||||
hashtags = []
|
||||
tags = ro.ap_object.get("tag", [])
|
||||
for tag in ap.as_list(tags):
|
||||
if tag.get("name") == LOCAL_ACTOR.handle or tag.get("href") == LOCAL_ACTOR.url:
|
||||
is_mention = True
|
||||
if tag.get("type") == "Hashtag":
|
||||
if tag_name := tag.get("name"):
|
||||
hashtags.append(tag_name)
|
||||
|
||||
object_info = ObjectInfo(
|
||||
is_reply=is_reply,
|
||||
is_local_reply=is_local_reply,
|
||||
is_mention=is_mention,
|
||||
is_from_following=is_from_following,
|
||||
hashtags=hashtags,
|
||||
actor_handle=ro.actor.handle,
|
||||
remote_object=ro,
|
||||
)
|
||||
|
||||
inbox_object = models.InboxObject(
|
||||
server=urlparse(ro.ap_id).hostname,
|
||||
|
@ -1780,9 +1948,7 @@ async def _process_note_object(
|
|||
activity_object_ap_id=ro.activity_object_ap_id,
|
||||
og_meta=await opengraph.og_meta_from_note(db_session, ro),
|
||||
# Hide replies from the stream
|
||||
is_hidden_from_stream=not (
|
||||
(not is_reply and is_from_following) or is_mention or is_local_reply
|
||||
),
|
||||
is_hidden_from_stream=not stream_visibility_callback(object_info),
|
||||
# We may already have some replies in DB
|
||||
replies_count=await _get_replies_count(db_session, ro.ap_id),
|
||||
)
|
||||
|
@ -1808,8 +1974,8 @@ async def _process_note_object(
|
|||
replied_object, # type: ignore # outbox check below
|
||||
)
|
||||
else:
|
||||
new_replies_count = await _get_replies_count(
|
||||
db_session, replied_object.ap_id
|
||||
new_replies_count = await _get_outbox_replies_count(
|
||||
db_session, replied_object # type: ignore
|
||||
)
|
||||
|
||||
await db_session.execute(
|
||||
|
@ -1857,7 +2023,7 @@ async def _process_note_object(
|
|||
inbox_object_id=parent_activity.id,
|
||||
)
|
||||
|
||||
if is_mention:
|
||||
if is_mention and is_notification_enabled(models.NotificationType.MENTION):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.MENTION,
|
||||
actor_id=from_actor.id,
|
||||
|
@ -1956,13 +2122,14 @@ async def _handle_announce_activity(
|
|||
models.OutboxObject.announces_count + 1
|
||||
)
|
||||
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.ANNOUNCE,
|
||||
actor_id=actor.id,
|
||||
outbox_object_id=relates_to_outbox_object.id,
|
||||
inbox_object_id=announce_activity.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.ANNOUNCE):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.ANNOUNCE,
|
||||
actor_id=actor.id,
|
||||
outbox_object_id=relates_to_outbox_object.id,
|
||||
inbox_object_id=announce_activity.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
else:
|
||||
# Only show the announce in the stream if it comes from an actor
|
||||
# in the following collection
|
||||
|
@ -2038,7 +2205,10 @@ async def _handle_announce_activity(
|
|||
db_session.add(announced_inbox_object)
|
||||
await db_session.flush()
|
||||
announce_activity.relates_to_inbox_object_id = announced_inbox_object.id
|
||||
announce_activity.is_hidden_from_stream = not is_from_following
|
||||
announce_activity.is_hidden_from_stream = (
|
||||
not is_from_following
|
||||
or announce_activity.actor.are_announces_hidden_from_stream
|
||||
)
|
||||
|
||||
|
||||
async def _handle_like_activity(
|
||||
|
@ -2055,15 +2225,19 @@ async def _handle_like_activity(
|
|||
)
|
||||
await db_session.delete(like_activity)
|
||||
else:
|
||||
relates_to_outbox_object.likes_count = models.OutboxObject.likes_count + 1
|
||||
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.LIKE,
|
||||
actor_id=actor.id,
|
||||
outbox_object_id=relates_to_outbox_object.id,
|
||||
inbox_object_id=like_activity.id,
|
||||
relates_to_outbox_object.likes_count = await _get_outbox_likes_count(
|
||||
db_session,
|
||||
relates_to_outbox_object,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
if is_notification_enabled(models.NotificationType.LIKE):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.LIKE,
|
||||
actor_id=actor.id,
|
||||
outbox_object_id=relates_to_outbox_object.id,
|
||||
inbox_object_id=like_activity.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
|
||||
async def _handle_block_activity(
|
||||
|
@ -2080,12 +2254,13 @@ async def _handle_block_activity(
|
|||
return
|
||||
|
||||
# Create a notification
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.BLOCKED,
|
||||
actor_id=actor.id,
|
||||
inbox_object_id=block_activity.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.BLOCKED):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.BLOCKED,
|
||||
actor_id=actor.id,
|
||||
inbox_object_id=block_activity.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
|
||||
async def _process_transient_object(
|
||||
|
@ -2140,7 +2315,7 @@ async def save_to_inbox(
|
|||
logger.exception("Failed to fetch actor")
|
||||
return
|
||||
|
||||
if actor.server in BLOCKED_SERVERS:
|
||||
if is_hostname_blocked(actor.server):
|
||||
logger.warning(f"Server {actor.server} is blocked")
|
||||
return
|
||||
|
||||
|
@ -2288,12 +2463,13 @@ async def save_to_inbox(
|
|||
if activity_ro.ap_type == "Accept"
|
||||
else models.NotificationType.FOLLOW_REQUEST_REJECTED
|
||||
)
|
||||
notif = models.Notification(
|
||||
notification_type=notif_type,
|
||||
actor_id=actor.id,
|
||||
inbox_object_id=inbox_object.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(notif_type):
|
||||
notif = models.Notification(
|
||||
notification_type=notif_type,
|
||||
actor_id=actor.id,
|
||||
inbox_object_id=inbox_object.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
if activity_ro.ap_type == "Accept":
|
||||
following = models.Following(
|
||||
|
@ -2466,11 +2642,21 @@ async def fetch_actor_collection(db_session: AsyncSession, url: str) -> list[Act
|
|||
|
||||
@dataclass
|
||||
class ReplyTreeNode:
|
||||
ap_object: AnyboxObject
|
||||
ap_object: AnyboxObject | None
|
||||
wm_reply: WebmentionReply | None
|
||||
children: list["ReplyTreeNode"]
|
||||
is_requested: bool = False
|
||||
is_root: bool = False
|
||||
|
||||
@property
|
||||
def published_at(self) -> datetime.datetime:
|
||||
if self.ap_object:
|
||||
return self.ap_object.ap_published_at # type: ignore
|
||||
elif self.wm_reply:
|
||||
return self.wm_reply.published_at
|
||||
else:
|
||||
raise ValueError(f"Should never happen: {self}")
|
||||
|
||||
|
||||
async def get_replies_tree(
|
||||
db_session: AsyncSession,
|
||||
|
@ -2544,6 +2730,7 @@ async def get_replies_tree(
|
|||
for child in index.get(node.ap_object.ap_id, []): # type: ignore
|
||||
child_node = ReplyTreeNode(
|
||||
ap_object=child,
|
||||
wm_reply=None,
|
||||
is_requested=child.ap_id == requested_object.ap_id, # type: ignore
|
||||
children=[],
|
||||
)
|
||||
|
@ -2552,7 +2739,7 @@ async def get_replies_tree(
|
|||
|
||||
return sorted(
|
||||
children,
|
||||
key=lambda node: node.ap_object.ap_published_at, # type: ignore
|
||||
key=lambda node: node.published_at,
|
||||
)
|
||||
|
||||
if None in nodes_by_in_reply_to:
|
||||
|
@ -2565,6 +2752,7 @@ async def get_replies_tree(
|
|||
|
||||
root_node = ReplyTreeNode(
|
||||
ap_object=root_ap_object,
|
||||
wm_reply=None,
|
||||
is_root=True,
|
||||
is_requested=root_ap_object.ap_id == requested_object.ap_id,
|
||||
children=[],
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import hashlib
|
||||
import hmac
|
||||
import os
|
||||
import secrets
|
||||
from pathlib import Path
|
||||
|
@ -15,6 +16,8 @@ from loguru import logger
|
|||
from mistletoe import markdown # type: ignore
|
||||
|
||||
from app.customization import _CUSTOM_ROUTES
|
||||
from app.customization import _StreamVisibilityCallback
|
||||
from app.customization import default_stream_visibility_callback
|
||||
from app.utils.emoji import _load_emojis
|
||||
from app.utils.version import get_version_commit
|
||||
|
||||
|
@ -41,11 +44,14 @@ except FileNotFoundError:
|
|||
JS_HASH = "none"
|
||||
try:
|
||||
# To keep things simple, we keep a single hash for the 2 files
|
||||
js_data_common = (ROOT_DIR / "app" / "static" / "common-admin.js").read_bytes()
|
||||
js_data_new = (ROOT_DIR / "app" / "static" / "new.js").read_bytes()
|
||||
JS_HASH = hashlib.md5(
|
||||
js_data_common + js_data_new, usedforsecurity=False
|
||||
).hexdigest()
|
||||
dat = b""
|
||||
for j in [
|
||||
ROOT_DIR / "app" / "static" / "common.js",
|
||||
ROOT_DIR / "app" / "static" / "common-admin.js",
|
||||
ROOT_DIR / "app" / "static" / "new.js",
|
||||
]:
|
||||
dat += j.read_bytes()
|
||||
JS_HASH = hashlib.md5(dat, usedforsecurity=False).hexdigest()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
@ -90,7 +96,8 @@ class Config(pydantic.BaseModel):
|
|||
name: str
|
||||
summary: str
|
||||
https: bool
|
||||
icon_url: str
|
||||
icon_url: str | None = None
|
||||
image_url: str | None = None
|
||||
secret: str
|
||||
debug: bool = False
|
||||
trusted_hosts: list[str] = ["127.0.0.1"]
|
||||
|
@ -108,10 +115,22 @@ class Config(pydantic.BaseModel):
|
|||
|
||||
inbox_retention_days: int = 15
|
||||
|
||||
custom_content_security_policy: str | None = None
|
||||
|
||||
webfinger_domain: str | None = None
|
||||
|
||||
# Config items to make tests easier
|
||||
sqlalchemy_database: str | None = None
|
||||
key_path: str | None = None
|
||||
|
||||
session_timeout: int = 3600 * 24 * 3 # in seconds, 3 days by default
|
||||
csrf_token_exp: int = 3600
|
||||
|
||||
disabled_notifications: list[str] = []
|
||||
|
||||
# Only set when the app is served on a non-root path
|
||||
id: str | None = None
|
||||
|
||||
|
||||
def load_config() -> Config:
|
||||
try:
|
||||
|
@ -146,7 +165,16 @@ CONFIG = load_config()
|
|||
DOMAIN = CONFIG.domain
|
||||
_SCHEME = "https" if CONFIG.https else "http"
|
||||
ID = f"{_SCHEME}://{DOMAIN}"
|
||||
|
||||
# When running the app on a path, the ID maybe set by the config, but in this
|
||||
# case, a valid webfinger must be served on the root domain
|
||||
if CONFIG.id:
|
||||
ID = CONFIG.id
|
||||
USERNAME = CONFIG.username
|
||||
|
||||
# Allow to use @handle@webfinger-domain.tld while hosting the server at domain.tld
|
||||
WEBFINGER_DOMAIN = CONFIG.webfinger_domain or DOMAIN
|
||||
|
||||
MANUALLY_APPROVES_FOLLOWERS = CONFIG.manually_approves_followers
|
||||
HIDES_FOLLOWERS = CONFIG.hides_followers
|
||||
HIDES_FOLLOWING = CONFIG.hides_following
|
||||
|
@ -156,8 +184,10 @@ if CONFIG.privacy_replace:
|
|||
|
||||
BLOCKED_SERVERS = {blocked_server.hostname for blocked_server in CONFIG.blocked_servers}
|
||||
ALSO_KNOWN_AS = CONFIG.also_known_as
|
||||
CUSTOM_CONTENT_SECURITY_POLICY = CONFIG.custom_content_security_policy
|
||||
|
||||
INBOX_RETENTION_DAYS = CONFIG.inbox_retention_days
|
||||
SESSION_TIMEOUT = CONFIG.session_timeout
|
||||
CUSTOM_FOOTER = (
|
||||
markdown(CONFIG.custom_footer.replace("{version}", VERSION))
|
||||
if CONFIG.custom_footer
|
||||
|
@ -176,7 +206,9 @@ if CONFIG.emoji:
|
|||
EMOJIS = CONFIG.emoji
|
||||
|
||||
# Emoji template for the FE
|
||||
EMOJI_TPL = '<img src="/static/twemoji/{filename}.svg" alt="{raw}" class="emoji">'
|
||||
EMOJI_TPL = (
|
||||
'<img src="{base_url}/static/twemoji/{filename}.svg" alt="{raw}" class="emoji">'
|
||||
)
|
||||
|
||||
_load_emojis(ROOT_DIR, BASE_URL)
|
||||
|
||||
|
@ -232,7 +264,7 @@ def verify_csrf_token(
|
|||
if redirect_url:
|
||||
please_try_again = f'<a href="{redirect_url}">please try again</a>'
|
||||
try:
|
||||
csrf_serializer.loads(csrf_token, max_age=1800)
|
||||
csrf_serializer.loads(csrf_token, max_age=CONFIG.csrf_token_exp)
|
||||
except (itsdangerous.BadData, itsdangerous.SignatureExpired):
|
||||
logger.exception("Failed to verify CSRF token")
|
||||
raise HTTPException(
|
||||
|
@ -240,3 +272,18 @@ def verify_csrf_token(
|
|||
detail=f"The security token has expired, {please_try_again}",
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def hmac_sha256() -> hmac.HMAC:
|
||||
return hmac.new(CONFIG.secret.encode(), digestmod=hashlib.sha256)
|
||||
|
||||
|
||||
stream_visibility_callback: _StreamVisibilityCallback
|
||||
try:
|
||||
from data.stream import ( # type: ignore # noqa: F401, E501
|
||||
custom_stream_visibility_callback,
|
||||
)
|
||||
|
||||
stream_visibility_callback = custom_stream_visibility_callback
|
||||
except ImportError:
|
||||
stream_visibility_callback = default_stream_visibility_callback
|
||||
|
|
|
@ -1,12 +1,19 @@
|
|||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
|
||||
from fastapi import APIRouter
|
||||
from fastapi import Depends
|
||||
from fastapi import Request
|
||||
from loguru import logger
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from app.ap_object import RemoteObject
|
||||
|
||||
|
||||
_DATA_DIR = Path().parent.resolve() / "data"
|
||||
_Handler = Callable[..., Any]
|
||||
|
||||
|
@ -110,3 +117,39 @@ def get_custom_router() -> APIRouter | None:
|
|||
router.add_api_route(path, handler.handler)
|
||||
|
||||
return router
|
||||
|
||||
|
||||
@dataclass
|
||||
class ObjectInfo:
|
||||
# Is it a reply?
|
||||
is_reply: bool
|
||||
|
||||
# Is it a reply to an outbox object
|
||||
is_local_reply: bool
|
||||
|
||||
# Is the object mentioning the local actor
|
||||
is_mention: bool
|
||||
|
||||
# Is it from someone the local actor is following
|
||||
is_from_following: bool
|
||||
|
||||
# List of hashtags, e.g. #microblogpub
|
||||
hashtags: list[str]
|
||||
|
||||
# @dev@microblog.pub
|
||||
actor_handle: str
|
||||
|
||||
remote_object: "RemoteObject"
|
||||
|
||||
|
||||
_StreamVisibilityCallback = Callable[[ObjectInfo], bool]
|
||||
|
||||
|
||||
def default_stream_visibility_callback(object_info: ObjectInfo) -> bool:
|
||||
result = (
|
||||
(not object_info.is_reply and object_info.is_from_following)
|
||||
or object_info.is_mention
|
||||
or object_info.is_local_reply
|
||||
)
|
||||
logger.info(f"{object_info=}/{result=}")
|
||||
return result
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import typing
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
@ -22,12 +23,12 @@ from sqlalchemy import select
|
|||
|
||||
from app import activitypub as ap
|
||||
from app import config
|
||||
from app.config import BLOCKED_SERVERS
|
||||
from app.config import KEY_PATH
|
||||
from app.database import AsyncSession
|
||||
from app.database import get_db_session
|
||||
from app.key import Key
|
||||
from app.utils.datetime import now
|
||||
from app.utils.url import is_hostname_blocked
|
||||
|
||||
_KEY_CACHE: MutableMapping[str, Key] = LFUCache(256)
|
||||
|
||||
|
@ -183,7 +184,7 @@ async def httpsig_checker(
|
|||
)
|
||||
|
||||
server = urlparse(key_id).hostname
|
||||
if server in BLOCKED_SERVERS:
|
||||
if is_hostname_blocked(server):
|
||||
return HTTPSigInfo(
|
||||
has_valid_signature=False,
|
||||
server=server,
|
||||
|
@ -198,6 +199,32 @@ async def httpsig_checker(
|
|||
server=server,
|
||||
)
|
||||
|
||||
# Try to drop Delete activity spams early on, this prevent making an extra
|
||||
# HTTP requests trying to fetch an unavailable actor to verify the HTTP sig
|
||||
try:
|
||||
if request.method == "POST" and request.url.path.endswith("/inbox"):
|
||||
from app import models # TODO: solve this circular import
|
||||
|
||||
activity = json.loads(body)
|
||||
actor_id = ap.get_id(activity["actor"])
|
||||
if (
|
||||
ap.as_list(activity["type"])[0] == "Delete"
|
||||
and actor_id == ap.get_id(activity["object"])
|
||||
and not (
|
||||
await db_session.scalars(
|
||||
select(models.Actor).where(
|
||||
models.Actor.ap_id == actor_id,
|
||||
)
|
||||
)
|
||||
).one_or_none()
|
||||
):
|
||||
logger.info(f"Dropping Delete activity early for {body=}")
|
||||
raise fastapi.HTTPException(status_code=202)
|
||||
except fastapi.HTTPException as http_exc:
|
||||
raise http_exc
|
||||
except Exception:
|
||||
logger.exception("Failed to check for Delete spam")
|
||||
|
||||
# logger.debug(f"hsig={hsig}")
|
||||
signed_string, signature_date = _build_signed_string(
|
||||
hsig["headers"],
|
||||
|
|
|
@ -3,7 +3,6 @@ import traceback
|
|||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
|
||||
import httpx
|
||||
from loguru import logger
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import select
|
||||
|
@ -61,7 +60,7 @@ def _set_next_try(
|
|||
if not outgoing_activity.tries:
|
||||
raise ValueError("Should never happen")
|
||||
|
||||
if outgoing_activity.tries == _MAX_RETRIES:
|
||||
if outgoing_activity.tries >= _MAX_RETRIES:
|
||||
outgoing_activity.is_errored = True
|
||||
outgoing_activity.next_try = None
|
||||
else:
|
||||
|
@ -108,6 +107,7 @@ async def process_next_incoming_activity(
|
|||
|
||||
next_activity.tries = next_activity.tries + 1
|
||||
next_activity.last_try = now()
|
||||
await db_session.commit()
|
||||
|
||||
if next_activity.ap_object and next_activity.sent_by_ap_actor_id:
|
||||
try:
|
||||
|
@ -120,13 +120,16 @@ async def process_next_incoming_activity(
|
|||
),
|
||||
timeout=60,
|
||||
)
|
||||
except httpx.TimeoutException as exc:
|
||||
url = exc._request.url if exc._request else None
|
||||
logger.error(f"Failed, HTTP timeout when fetching {url}")
|
||||
except asyncio.exceptions.TimeoutError:
|
||||
logger.error("Activity took too long to process")
|
||||
await db_session.rollback()
|
||||
await db_session.refresh(next_activity)
|
||||
next_activity.error = traceback.format_exc()
|
||||
_set_next_try(next_activity)
|
||||
except Exception:
|
||||
logger.exception("Failed")
|
||||
await db_session.rollback()
|
||||
await db_session.refresh(next_activity)
|
||||
next_activity.error = traceback.format_exc()
|
||||
_set_next_try(next_activity)
|
||||
else:
|
||||
|
|
265
app/indieauth.py
265
app/indieauth.py
|
@ -10,9 +10,12 @@ from fastapi import Form
|
|||
from fastapi import HTTPException
|
||||
from fastapi import Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.responses import RedirectResponse
|
||||
from fastapi.security import HTTPBasic
|
||||
from fastapi.security import HTTPBasicCredentials
|
||||
from loguru import logger
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from app import config
|
||||
from app import models
|
||||
|
@ -21,9 +24,12 @@ from app.admin import user_session_or_redirect
|
|||
from app.config import verify_csrf_token
|
||||
from app.database import AsyncSession
|
||||
from app.database import get_db_session
|
||||
from app.redirect import redirect
|
||||
from app.utils import indieauth
|
||||
from app.utils.datetime import now
|
||||
|
||||
basic_auth = HTTPBasic()
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
|
@ -38,9 +44,55 @@ async def well_known_authorization_server(
|
|||
"code_challenge_methods_supported": ["S256"],
|
||||
"revocation_endpoint": request.url_for("indieauth_revocation_endpoint"),
|
||||
"revocation_endpoint_auth_methods_supported": ["none"],
|
||||
"registration_endpoint": request.url_for("oauth_registration_endpoint"),
|
||||
"introspection_endpoint": request.url_for("oauth_introspection_endpoint"),
|
||||
}
|
||||
|
||||
|
||||
class OAuthRegisterClientRequest(BaseModel):
|
||||
client_name: str
|
||||
redirect_uris: list[str] | str
|
||||
|
||||
client_uri: str | None = None
|
||||
logo_uri: str | None = None
|
||||
scope: str | None = None
|
||||
|
||||
|
||||
@router.post("/oauth/register")
|
||||
async def oauth_registration_endpoint(
|
||||
register_client_request: OAuthRegisterClientRequest,
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
) -> JSONResponse:
|
||||
"""Implements OAuth 2.0 Dynamic Registration."""
|
||||
|
||||
client = models.OAuthClient(
|
||||
client_name=register_client_request.client_name,
|
||||
redirect_uris=[register_client_request.redirect_uris]
|
||||
if isinstance(register_client_request.redirect_uris, str)
|
||||
else register_client_request.redirect_uris,
|
||||
client_uri=register_client_request.client_uri,
|
||||
logo_uri=register_client_request.logo_uri,
|
||||
scope=register_client_request.scope,
|
||||
client_id=secrets.token_hex(16),
|
||||
client_secret=secrets.token_hex(32),
|
||||
)
|
||||
|
||||
db_session.add(client)
|
||||
await db_session.commit()
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
**register_client_request.dict(),
|
||||
"client_id_issued_at": int(client.created_at.timestamp()), # type: ignore
|
||||
"grant_types": ["authorization_code", "refresh_token"],
|
||||
"client_secret_expires_at": 0,
|
||||
"client_id": client.client_id,
|
||||
"client_secret": client.client_secret,
|
||||
},
|
||||
status_code=201,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/auth")
|
||||
async def indieauth_authorization_endpoint(
|
||||
request: Request,
|
||||
|
@ -56,12 +108,29 @@ async def indieauth_authorization_endpoint(
|
|||
code_challenge = request.query_params.get("code_challenge", "")
|
||||
code_challenge_method = request.query_params.get("code_challenge_method", "")
|
||||
|
||||
# Check if the authorization request is coming from an OAuth client
|
||||
registered_client = (
|
||||
await db_session.scalars(
|
||||
select(models.OAuthClient).where(
|
||||
models.OAuthClient.client_id == client_id,
|
||||
)
|
||||
)
|
||||
).one_or_none()
|
||||
if registered_client:
|
||||
client = {
|
||||
"name": registered_client.client_name,
|
||||
"logo": registered_client.logo_uri,
|
||||
"url": registered_client.client_uri,
|
||||
}
|
||||
else:
|
||||
client = await indieauth.get_client_id_data(client_id) # type: ignore
|
||||
|
||||
return await templates.render_template(
|
||||
db_session,
|
||||
request,
|
||||
"indieauth_flow.html",
|
||||
dict(
|
||||
client=await indieauth.get_client_id_data(client_id),
|
||||
client=client,
|
||||
scopes=scope,
|
||||
redirect_uri=redirect_uri,
|
||||
state=state,
|
||||
|
@ -80,7 +149,7 @@ async def indieauth_flow(
|
|||
db_session: AsyncSession = Depends(get_db_session),
|
||||
csrf_check: None = Depends(verify_csrf_token),
|
||||
_: None = Depends(user_session_or_redirect),
|
||||
) -> RedirectResponse:
|
||||
) -> templates.TemplateResponse:
|
||||
form_data = await request.form()
|
||||
logger.info(f"{form_data=}")
|
||||
|
||||
|
@ -114,9 +183,8 @@ async def indieauth_flow(
|
|||
db_session.add(auth_request)
|
||||
await db_session.commit()
|
||||
|
||||
return RedirectResponse(
|
||||
redirect_uri + f"?code={code}&state={state}&iss={iss}",
|
||||
status_code=302,
|
||||
return await redirect(
|
||||
request, db_session, redirect_uri + f"?code={code}&state={state}&iss={iss}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -207,29 +275,54 @@ async def indieauth_token_endpoint(
|
|||
form_data = await request.form()
|
||||
logger.info(f"{form_data=}")
|
||||
grant_type = form_data.get("grant_type", "authorization_code")
|
||||
if grant_type != "authorization_code":
|
||||
if grant_type not in ["authorization_code", "refresh_token"]:
|
||||
raise ValueError(f"Invalid grant_type {grant_type}")
|
||||
|
||||
code = form_data["code"]
|
||||
|
||||
# These must match the params from the first request
|
||||
client_id = form_data["client_id"]
|
||||
redirect_uri = form_data["redirect_uri"]
|
||||
# code_verifier is optional for backward compat
|
||||
code_verifier = form_data.get("code_verifier")
|
||||
|
||||
is_code_valid, auth_code_request = await _check_auth_code(
|
||||
db_session,
|
||||
code=code,
|
||||
client_id=client_id,
|
||||
redirect_uri=redirect_uri,
|
||||
code_verifier=code_verifier,
|
||||
)
|
||||
if not is_code_valid or (auth_code_request and not auth_code_request.scope):
|
||||
return JSONResponse(
|
||||
content={"error": "invalid_grant"},
|
||||
status_code=400,
|
||||
if grant_type == "authorization_code":
|
||||
code = form_data["code"]
|
||||
redirect_uri = form_data["redirect_uri"]
|
||||
# code_verifier is optional for backward compat
|
||||
is_code_valid, auth_code_request = await _check_auth_code(
|
||||
db_session,
|
||||
code=code,
|
||||
client_id=client_id,
|
||||
redirect_uri=redirect_uri,
|
||||
code_verifier=code_verifier,
|
||||
)
|
||||
if not is_code_valid or (auth_code_request and not auth_code_request.scope):
|
||||
return JSONResponse(
|
||||
content={"error": "invalid_grant"},
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
elif grant_type == "refresh_token":
|
||||
refresh_token = form_data["refresh_token"]
|
||||
access_token = (
|
||||
await db_session.scalars(
|
||||
select(models.IndieAuthAccessToken)
|
||||
.where(
|
||||
models.IndieAuthAccessToken.refresh_token == refresh_token,
|
||||
models.IndieAuthAccessToken.was_refreshed.is_(False),
|
||||
)
|
||||
.options(
|
||||
joinedload(
|
||||
models.IndieAuthAccessToken.indieauth_authorization_request
|
||||
)
|
||||
)
|
||||
)
|
||||
).one_or_none()
|
||||
if not access_token:
|
||||
raise ValueError("invalid refresh token")
|
||||
|
||||
if access_token.indieauth_authorization_request.client_id != client_id:
|
||||
raise ValueError("invalid client ID")
|
||||
|
||||
auth_code_request = access_token.indieauth_authorization_request
|
||||
access_token.was_refreshed = True
|
||||
|
||||
if not auth_code_request:
|
||||
raise ValueError("Should never happen")
|
||||
|
@ -237,6 +330,7 @@ async def indieauth_token_endpoint(
|
|||
access_token = models.IndieAuthAccessToken(
|
||||
indieauth_authorization_request_id=auth_code_request.id,
|
||||
access_token=secrets.token_urlsafe(32),
|
||||
refresh_token=secrets.token_urlsafe(32),
|
||||
expires_in=3600,
|
||||
scope=auth_code_request.scope,
|
||||
)
|
||||
|
@ -246,6 +340,7 @@ async def indieauth_token_endpoint(
|
|||
return JSONResponse(
|
||||
content={
|
||||
"access_token": access_token.access_token,
|
||||
"refresh_token": access_token.refresh_token,
|
||||
"token_type": "Bearer",
|
||||
"scope": auth_code_request.scope,
|
||||
"me": config.ID + "/",
|
||||
|
@ -261,8 +356,10 @@ async def _check_access_token(
|
|||
) -> tuple[bool, models.IndieAuthAccessToken | None]:
|
||||
access_token_info = (
|
||||
await db_session.scalars(
|
||||
select(models.IndieAuthAccessToken).where(
|
||||
models.IndieAuthAccessToken.access_token == token
|
||||
select(models.IndieAuthAccessToken)
|
||||
.where(models.IndieAuthAccessToken.access_token == token)
|
||||
.options(
|
||||
joinedload(models.IndieAuthAccessToken.indieauth_authorization_request)
|
||||
)
|
||||
)
|
||||
).one_or_none()
|
||||
|
@ -285,6 +382,9 @@ async def _check_access_token(
|
|||
@dataclass(frozen=True)
|
||||
class AccessTokenInfo:
|
||||
scopes: list[str]
|
||||
client_id: str | None
|
||||
access_token: str
|
||||
exp: int
|
||||
|
||||
|
||||
async def verify_access_token(
|
||||
|
@ -311,9 +411,71 @@ async def verify_access_token(
|
|||
|
||||
return AccessTokenInfo(
|
||||
scopes=access_token.scope.split(),
|
||||
client_id=(
|
||||
access_token.indieauth_authorization_request.client_id
|
||||
if access_token.indieauth_authorization_request
|
||||
else None
|
||||
),
|
||||
access_token=access_token.access_token,
|
||||
exp=int(
|
||||
(
|
||||
access_token.created_at.replace(tzinfo=timezone.utc)
|
||||
+ timedelta(seconds=access_token.expires_in)
|
||||
).timestamp()
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def check_access_token(
|
||||
request: Request,
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
) -> AccessTokenInfo | None:
|
||||
token = request.headers.get("Authorization", "").removeprefix("Bearer ")
|
||||
if not token:
|
||||
return None
|
||||
|
||||
is_token_valid, access_token = await _check_access_token(db_session, token)
|
||||
if not is_token_valid:
|
||||
return None
|
||||
|
||||
if not access_token or not access_token.scope:
|
||||
raise ValueError("Should never happen")
|
||||
|
||||
access_token_info = AccessTokenInfo(
|
||||
scopes=access_token.scope.split(),
|
||||
client_id=(
|
||||
access_token.indieauth_authorization_request.client_id
|
||||
if access_token.indieauth_authorization_request
|
||||
else None
|
||||
),
|
||||
access_token=access_token.access_token,
|
||||
exp=int(
|
||||
(
|
||||
access_token.created_at.replace(tzinfo=timezone.utc)
|
||||
+ timedelta(seconds=access_token.expires_in)
|
||||
).timestamp()
|
||||
),
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Authenticated with access token from client_id="
|
||||
f"{access_token_info.client_id} scopes={access_token.scope}"
|
||||
)
|
||||
|
||||
return access_token_info
|
||||
|
||||
|
||||
async def enforce_access_token(
|
||||
request: Request,
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
) -> AccessTokenInfo:
|
||||
maybe_access_token_info = await check_access_token(request, db_session)
|
||||
if not maybe_access_token_info:
|
||||
raise HTTPException(status_code=401, detail="access token required")
|
||||
|
||||
return maybe_access_token_info
|
||||
|
||||
|
||||
@router.post("/revoke_token")
|
||||
async def indieauth_revocation_endpoint(
|
||||
request: Request,
|
||||
|
@ -333,3 +495,58 @@ async def indieauth_revocation_endpoint(
|
|||
content={},
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/token_introspection")
|
||||
async def oauth_introspection_endpoint(
|
||||
request: Request,
|
||||
credentials: HTTPBasicCredentials = Depends(basic_auth),
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
token: str = Form(),
|
||||
) -> JSONResponse:
|
||||
registered_client = (
|
||||
await db_session.scalars(
|
||||
select(models.OAuthClient).where(
|
||||
models.OAuthClient.client_id == credentials.username,
|
||||
models.OAuthClient.client_secret == credentials.password,
|
||||
)
|
||||
)
|
||||
).one_or_none()
|
||||
if not registered_client:
|
||||
raise HTTPException(status_code=401, detail="unauthenticated")
|
||||
|
||||
access_token = (
|
||||
await db_session.scalars(
|
||||
select(models.IndieAuthAccessToken)
|
||||
.where(models.IndieAuthAccessToken.access_token == token)
|
||||
.join(
|
||||
models.IndieAuthAuthorizationRequest,
|
||||
models.IndieAuthAccessToken.indieauth_authorization_request_id
|
||||
== models.IndieAuthAuthorizationRequest.id,
|
||||
)
|
||||
.where(
|
||||
models.IndieAuthAuthorizationRequest.client_id == credentials.username
|
||||
)
|
||||
)
|
||||
).one_or_none()
|
||||
if not access_token:
|
||||
return JSONResponse(content={"active": False})
|
||||
|
||||
is_token_valid, _ = await _check_access_token(db_session, token)
|
||||
if not is_token_valid:
|
||||
return JSONResponse(content={"active": False})
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"active": True,
|
||||
"client_id": credentials.username,
|
||||
"scope": access_token.scope,
|
||||
"exp": int(
|
||||
(
|
||||
access_token.created_at.replace(tzinfo=timezone.utc)
|
||||
+ timedelta(seconds=access_token.expires_in)
|
||||
).timestamp()
|
||||
),
|
||||
},
|
||||
status_code=200,
|
||||
)
|
||||
|
|
|
@ -23,6 +23,13 @@ requests_loader = pyld.documentloader.requests.requests_document_loader()
|
|||
def _loader(url, options={}):
|
||||
# See https://github.com/digitalbazaar/pyld/issues/133
|
||||
options["headers"]["Accept"] = "application/ld+json"
|
||||
|
||||
# XXX: temp fix/hack is it seems to be down for now
|
||||
if url == "https://w3id.org/identity/v1":
|
||||
url = (
|
||||
"https://raw.githubusercontent.com/web-payments/web-payments.org"
|
||||
"/master/contexts/identity-v1.jsonld"
|
||||
)
|
||||
return requests_loader(url, options)
|
||||
|
||||
|
||||
|
@ -34,7 +41,7 @@ def _options_hash(doc: ap.RawObject) -> str:
|
|||
for k in ["type", "id", "signatureValue"]:
|
||||
if k in doc:
|
||||
del doc[k]
|
||||
doc["@context"] = "https://w3id.org/identity/v1"
|
||||
doc["@context"] = "https://w3id.org/security/v1"
|
||||
normalized = jsonld.normalize(
|
||||
doc, {"algorithm": "URDNA2015", "format": "application/nquads"}
|
||||
)
|
||||
|
|
455
app/main.py
455
app/main.py
|
@ -48,6 +48,7 @@ from app import boxes
|
|||
from app import config
|
||||
from app import httpsig
|
||||
from app import indieauth
|
||||
from app import media
|
||||
from app import micropub
|
||||
from app import models
|
||||
from app import templates
|
||||
|
@ -61,6 +62,7 @@ from app.config import DOMAIN
|
|||
from app.config import ID
|
||||
from app.config import USER_AGENT
|
||||
from app.config import USERNAME
|
||||
from app.config import WEBFINGER_DOMAIN
|
||||
from app.config import is_activitypub_requested
|
||||
from app.config import verify_csrf_token
|
||||
from app.customization import get_custom_router
|
||||
|
@ -72,12 +74,15 @@ from app.templates import is_current_user_admin
|
|||
from app.uploads import UPLOAD_DIR
|
||||
from app.utils import pagination
|
||||
from app.utils.emoji import EMOJIS_BY_NAME
|
||||
from app.utils.facepile import Face
|
||||
from app.utils.facepile import WebmentionReply
|
||||
from app.utils.facepile import merge_faces
|
||||
from app.utils.highlight import HIGHLIGHT_CSS_HASH
|
||||
from app.utils.url import check_url
|
||||
from app.webfinger import get_remote_follow_template
|
||||
|
||||
# Only images <1MB will be cached, so 64MB of data will be cached
|
||||
_RESIZED_CACHE: MutableMapping[tuple[str, int], tuple[bytes, str, Any]] = LFUCache(64)
|
||||
# Only images <1MB will be cached, so 32MB of data will be cached
|
||||
_RESIZED_CACHE: MutableMapping[tuple[str, int], tuple[bytes, str, Any]] = LFUCache(32)
|
||||
|
||||
|
||||
# TODO(ts):
|
||||
|
@ -136,9 +141,15 @@ class CustomMiddleware:
|
|||
headers["x-frame-options"] = "DENY"
|
||||
headers["permissions-policy"] = "interest-cohort=()"
|
||||
headers["content-security-policy"] = (
|
||||
f"default-src 'self'; "
|
||||
f"style-src 'self' 'sha256-{HIGHLIGHT_CSS_HASH}'; "
|
||||
f"frame-ancestors 'none'; base-uri 'self'; form-action 'self';"
|
||||
(
|
||||
f"default-src 'self'; "
|
||||
f"style-src 'self' 'sha256-{HIGHLIGHT_CSS_HASH}'; "
|
||||
f"frame-ancestors 'none'; base-uri 'self'; form-action 'self';"
|
||||
)
|
||||
if not config.CUSTOM_CONTENT_SECURITY_POLICY
|
||||
else config.CUSTOM_CONTENT_SECURITY_POLICY.format(
|
||||
HIGHLIGHT_CSS_HASH=HIGHLIGHT_CSS_HASH
|
||||
)
|
||||
)
|
||||
if not DEBUG:
|
||||
headers["strict-transport-security"] = "max-age=63072000;"
|
||||
|
@ -247,11 +258,34 @@ class ActivityPubResponse(JSONResponse):
|
|||
media_type = "application/activity+json"
|
||||
|
||||
|
||||
async def redirect_to_remote_instance(
|
||||
request: Request,
|
||||
db_session: AsyncSession,
|
||||
url: str,
|
||||
) -> templates.TemplateResponse:
|
||||
"""
|
||||
Similar to RedirectResponse, but uses a 200 response with HTML.
|
||||
|
||||
Needed for remote redirects on form submission endpoints,
|
||||
since our CSP policy disallows remote form submission.
|
||||
https://github.com/w3c/webappsec-csp/issues/8#issuecomment-810108984
|
||||
"""
|
||||
return await templates.render_template(
|
||||
db_session,
|
||||
request,
|
||||
"redirect_to_remote_instance.html",
|
||||
{
|
||||
"request": request,
|
||||
"url": url,
|
||||
},
|
||||
headers={"Refresh": "0;url=" + url},
|
||||
)
|
||||
|
||||
|
||||
@app.get(config.NavBarItems.NOTES_PATH)
|
||||
async def index(
|
||||
request: Request,
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
|
||||
page: int | None = None,
|
||||
) -> templates.TemplateResponse | ActivityPubResponse:
|
||||
if is_activitypub_requested(request):
|
||||
|
@ -263,7 +297,7 @@ async def index(
|
|||
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
|
||||
models.OutboxObject.is_deleted.is_(False),
|
||||
models.OutboxObject.is_hidden_from_homepage.is_(False),
|
||||
models.OutboxObject.ap_type != "Article",
|
||||
models.OutboxObject.ap_type.in_(["Announce", "Note", "Video", "Question"]),
|
||||
)
|
||||
q = select(models.OutboxObject).where(*where)
|
||||
total_count = await db_session.scalar(
|
||||
|
@ -431,7 +465,12 @@ async def followers(
|
|||
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
|
||||
) -> ActivityPubResponse | templates.TemplateResponse:
|
||||
if is_activitypub_requested(request):
|
||||
if config.HIDES_FOLLOWERS:
|
||||
maybe_access_token_info = await indieauth.check_access_token(
|
||||
request,
|
||||
db_session,
|
||||
)
|
||||
|
||||
if config.HIDES_FOLLOWERS and not maybe_access_token_info:
|
||||
return ActivityPubResponse(
|
||||
await _empty_followx_collection(
|
||||
db_session=db_session,
|
||||
|
@ -490,7 +529,12 @@ async def following(
|
|||
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
|
||||
) -> ActivityPubResponse | templates.TemplateResponse:
|
||||
if is_activitypub_requested(request):
|
||||
if config.HIDES_FOLLOWING:
|
||||
maybe_access_token_info = await indieauth.check_access_token(
|
||||
request,
|
||||
db_session,
|
||||
)
|
||||
|
||||
if config.HIDES_FOLLOWING and not maybe_access_token_info:
|
||||
return ActivityPubResponse(
|
||||
await _empty_followx_collection(
|
||||
db_session=db_session,
|
||||
|
@ -546,22 +590,34 @@ async def following(
|
|||
|
||||
@app.get("/outbox")
|
||||
async def outbox(
|
||||
request: Request,
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
|
||||
) -> ActivityPubResponse:
|
||||
maybe_access_token_info = await indieauth.check_access_token(
|
||||
request,
|
||||
db_session,
|
||||
)
|
||||
|
||||
# Default restrictions unless the request is authenticated with an access token
|
||||
restricted_where = [
|
||||
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
|
||||
models.OutboxObject.ap_type.in_(["Create", "Note", "Article", "Announce"]),
|
||||
]
|
||||
|
||||
# By design, we only show the last 20 public activities in the oubox
|
||||
outbox_objects = (
|
||||
await db_session.scalars(
|
||||
select(models.OutboxObject)
|
||||
.where(
|
||||
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
|
||||
models.OutboxObject.is_deleted.is_(False),
|
||||
models.OutboxObject.ap_type.in_(["Create", "Announce"]),
|
||||
*([] if maybe_access_token_info else restricted_where),
|
||||
)
|
||||
.order_by(models.OutboxObject.ap_published_at.desc())
|
||||
.limit(20)
|
||||
)
|
||||
).all()
|
||||
|
||||
return ActivityPubResponse(
|
||||
{
|
||||
"@context": ap.AS_EXTENDED_CTX,
|
||||
|
@ -576,6 +632,49 @@ async def outbox(
|
|||
)
|
||||
|
||||
|
||||
@app.post("/outbox")
|
||||
async def post_outbox(
|
||||
request: Request,
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
access_token_info: indieauth.AccessTokenInfo = Depends(
|
||||
indieauth.enforce_access_token
|
||||
),
|
||||
) -> ActivityPubResponse:
|
||||
payload = await request.json()
|
||||
logger.info(f"{payload=}")
|
||||
|
||||
if payload.get("type") == "Create":
|
||||
assert payload["actor"] == ID
|
||||
obj = payload["object"]
|
||||
|
||||
to_and_cc = obj.get("to", []) + obj.get("cc", [])
|
||||
if ap.AS_PUBLIC in obj.get("to", []) and ID + "/followers" in to_and_cc:
|
||||
visibility = ap.VisibilityEnum.PUBLIC
|
||||
elif ap.AS_PUBLIC in to_and_cc and ID + "/followers" in to_and_cc:
|
||||
visibility = ap.VisibilityEnum.UNLISTED
|
||||
else:
|
||||
visibility = ap.VisibilityEnum.DIRECT
|
||||
|
||||
object_id, outbox_object = await boxes.send_create(
|
||||
db_session,
|
||||
ap_type=obj["type"],
|
||||
source=obj["content"],
|
||||
uploads=[],
|
||||
in_reply_to=obj.get("inReplyTo"),
|
||||
visibility=visibility,
|
||||
content_warning=obj.get("summary"),
|
||||
is_sensitive=obj.get("sensitive", False),
|
||||
)
|
||||
else:
|
||||
raise ValueError("TODO")
|
||||
|
||||
return ActivityPubResponse(
|
||||
outbox_object.ap_object,
|
||||
status_code=201,
|
||||
headers={"Location": boxes.outbox_object_id(object_id)},
|
||||
)
|
||||
|
||||
|
||||
@app.get("/featured")
|
||||
async def featured(
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
|
@ -613,6 +712,14 @@ async def _check_outbox_object_acl(
|
|||
if templates.is_current_user_admin(request):
|
||||
return None
|
||||
|
||||
maybe_access_token_info = await indieauth.check_access_token(
|
||||
request,
|
||||
db_session,
|
||||
)
|
||||
if maybe_access_token_info:
|
||||
# TODO: check scopes
|
||||
return None
|
||||
|
||||
if ap_object.visibility in [
|
||||
ap.VisibilityEnum.PUBLIC,
|
||||
ap.VisibilityEnum.UNLISTED,
|
||||
|
@ -693,7 +800,7 @@ async def _fetch_webmentions(
|
|||
models.Webmention.outbox_object_id == outbox_object.id,
|
||||
models.Webmention.is_deleted.is_(False),
|
||||
)
|
||||
.limit(10)
|
||||
.limit(50)
|
||||
)
|
||||
).all()
|
||||
|
||||
|
@ -733,7 +840,7 @@ async def outbox_by_public_id(
|
|||
|
||||
if maybe_object.ap_type == "Article":
|
||||
return RedirectResponse(
|
||||
f"/articles/{public_id[:7]}/{maybe_object.slug}",
|
||||
f"{BASE_URL}/articles/{public_id[:7]}/{maybe_object.slug}",
|
||||
status_code=301,
|
||||
)
|
||||
|
||||
|
@ -743,23 +850,90 @@ async def outbox_by_public_id(
|
|||
is_current_user_admin=is_current_user_admin(request),
|
||||
)
|
||||
|
||||
webmentions = await _fetch_webmentions(db_session, maybe_object)
|
||||
likes = await _fetch_likes(db_session, maybe_object)
|
||||
shares = await _fetch_shares(db_session, maybe_object)
|
||||
webmentions = await _fetch_webmentions(db_session, maybe_object)
|
||||
return await templates.render_template(
|
||||
db_session,
|
||||
request,
|
||||
"object.html",
|
||||
{
|
||||
"replies_tree": replies_tree,
|
||||
"replies_tree": _merge_replies(replies_tree, webmentions),
|
||||
"outbox_object": maybe_object,
|
||||
"likes": likes,
|
||||
"shares": shares,
|
||||
"webmentions": webmentions,
|
||||
"likes": _merge_faces_from_inbox_object_and_webmentions(
|
||||
likes,
|
||||
webmentions,
|
||||
models.WebmentionType.LIKE,
|
||||
),
|
||||
"shares": _merge_faces_from_inbox_object_and_webmentions(
|
||||
shares,
|
||||
webmentions,
|
||||
models.WebmentionType.REPOST,
|
||||
),
|
||||
"webmentions": _filter_webmentions(webmentions),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _filter_webmentions(
|
||||
webmentions: list[models.Webmention],
|
||||
) -> list[models.Webmention]:
|
||||
return [
|
||||
wm
|
||||
for wm in webmentions
|
||||
if wm.webmention_type
|
||||
not in [
|
||||
models.WebmentionType.LIKE,
|
||||
models.WebmentionType.REPOST,
|
||||
models.WebmentionType.REPLY,
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
def _merge_faces_from_inbox_object_and_webmentions(
|
||||
inbox_objects: list[models.InboxObject],
|
||||
webmentions: list[models.Webmention],
|
||||
webmention_type: models.WebmentionType,
|
||||
) -> list[Face]:
|
||||
wm_faces = []
|
||||
for wm in webmentions:
|
||||
if wm.webmention_type != webmention_type:
|
||||
continue
|
||||
if face := Face.from_webmention(wm):
|
||||
wm_faces.append(face)
|
||||
|
||||
return merge_faces(
|
||||
[Face.from_inbox_object(obj) for obj in inbox_objects] + wm_faces
|
||||
)
|
||||
|
||||
|
||||
def _merge_replies(
|
||||
reply_tree_node: boxes.ReplyTreeNode,
|
||||
webmentions: list[models.Webmention],
|
||||
) -> boxes.ReplyTreeNode:
|
||||
# TODO: return None as we update the object in place
|
||||
webmention_replies = []
|
||||
for wm in [
|
||||
wm for wm in webmentions if wm.webmention_type == models.WebmentionType.REPLY
|
||||
]:
|
||||
if rep := WebmentionReply.from_webmention(wm):
|
||||
webmention_replies.append(
|
||||
boxes.ReplyTreeNode(
|
||||
ap_object=None,
|
||||
wm_reply=rep,
|
||||
is_requested=False,
|
||||
children=[],
|
||||
)
|
||||
)
|
||||
|
||||
reply_tree_node.children = sorted(
|
||||
reply_tree_node.children + webmention_replies,
|
||||
key=lambda node: node.published_at,
|
||||
reverse=True,
|
||||
)
|
||||
return reply_tree_node
|
||||
|
||||
|
||||
@app.get("/articles/{short_id}/{slug}")
|
||||
async def article_by_slug(
|
||||
short_id: str,
|
||||
|
@ -768,24 +942,8 @@ async def article_by_slug(
|
|||
db_session: AsyncSession = Depends(get_db_session),
|
||||
httpsig_info: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
|
||||
) -> ActivityPubResponse | templates.TemplateResponse | RedirectResponse:
|
||||
maybe_object = (
|
||||
(
|
||||
await db_session.execute(
|
||||
select(models.OutboxObject)
|
||||
.options(
|
||||
joinedload(models.OutboxObject.outbox_object_attachments).options(
|
||||
joinedload(models.OutboxObjectAttachment.upload)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
models.OutboxObject.public_id.like(f"{short_id}%"),
|
||||
models.OutboxObject.slug == slug,
|
||||
models.OutboxObject.is_deleted.is_(False),
|
||||
)
|
||||
)
|
||||
)
|
||||
.unique()
|
||||
.scalar_one_or_none()
|
||||
maybe_object = await boxes.get_outbox_object_by_slug_and_short_id(
|
||||
db_session, slug, short_id
|
||||
)
|
||||
if not maybe_object:
|
||||
raise HTTPException(status_code=404)
|
||||
|
@ -809,11 +967,19 @@ async def article_by_slug(
|
|||
request,
|
||||
"object.html",
|
||||
{
|
||||
"replies_tree": replies_tree,
|
||||
"replies_tree": _merge_replies(replies_tree, webmentions),
|
||||
"outbox_object": maybe_object,
|
||||
"likes": likes,
|
||||
"shares": shares,
|
||||
"webmentions": webmentions,
|
||||
"likes": _merge_faces_from_inbox_object_and_webmentions(
|
||||
likes,
|
||||
webmentions,
|
||||
models.WebmentionType.LIKE,
|
||||
),
|
||||
"shares": _merge_faces_from_inbox_object_and_webmentions(
|
||||
shares,
|
||||
webmentions,
|
||||
models.WebmentionType.REPOST,
|
||||
),
|
||||
"webmentions": _filter_webmentions(webmentions),
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -923,6 +1089,78 @@ def emoji_by_name(name: str) -> ActivityPubResponse:
|
|||
return ActivityPubResponse({"@context": ap.AS_EXTENDED_CTX, **emoji})
|
||||
|
||||
|
||||
@app.get("/inbox")
|
||||
async def get_inbox(
|
||||
request: Request,
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
access_token_info: indieauth.AccessTokenInfo = Depends(
|
||||
indieauth.enforce_access_token
|
||||
),
|
||||
page: bool | None = None,
|
||||
next_cursor: str | None = None,
|
||||
) -> ActivityPubResponse:
|
||||
where = [
|
||||
models.InboxObject.ap_type.in_(
|
||||
["Create", "Follow", "Like", "Announce", "Undo", "Update"]
|
||||
)
|
||||
]
|
||||
total_items = await db_session.scalar(
|
||||
select(func.count(models.InboxObject.id)).where(*where)
|
||||
)
|
||||
|
||||
if not page and not next_cursor:
|
||||
return ActivityPubResponse(
|
||||
{
|
||||
"@context": ap.AS_CTX,
|
||||
"id": ID + "/inbox",
|
||||
"first": ID + "/inbox?page=true",
|
||||
"type": "OrderedCollection",
|
||||
"totalItems": total_items,
|
||||
}
|
||||
)
|
||||
|
||||
q = (
|
||||
select(models.InboxObject)
|
||||
.where(*where)
|
||||
.order_by(models.InboxObject.created_at.desc())
|
||||
) # type: ignore
|
||||
if next_cursor:
|
||||
q = q.where(
|
||||
models.InboxObject.created_at
|
||||
< pagination.decode_cursor(next_cursor) # type: ignore
|
||||
)
|
||||
q = q.limit(20)
|
||||
|
||||
items = [item for item in (await db_session.scalars(q)).all()]
|
||||
next_cursor = None
|
||||
if (
|
||||
items
|
||||
and await db_session.scalar(
|
||||
select(func.count(models.InboxObject.id)).where(
|
||||
*where, models.InboxObject.created_at < items[-1].created_at
|
||||
)
|
||||
)
|
||||
> 0
|
||||
):
|
||||
next_cursor = pagination.encode_cursor(items[-1].created_at)
|
||||
|
||||
collection_page = {
|
||||
"@context": ap.AS_CTX,
|
||||
"id": (
|
||||
ID + "/inbox?page=true"
|
||||
if not next_cursor
|
||||
else ID + f"/inbox?next_cursor={next_cursor}"
|
||||
),
|
||||
"partOf": ID + "/inbox",
|
||||
"type": "OrderedCollectionPage",
|
||||
"orderedItems": [item.ap_object for item in items],
|
||||
}
|
||||
if next_cursor:
|
||||
collection_page["next"] = ID + f"/inbox?next_cursor={next_cursor}"
|
||||
|
||||
return ActivityPubResponse(collection_page)
|
||||
|
||||
|
||||
@app.post("/inbox")
|
||||
async def inbox(
|
||||
request: Request,
|
||||
|
@ -952,9 +1190,10 @@ async def get_remote_follow(
|
|||
@app.post("/remote_follow")
|
||||
async def post_remote_follow(
|
||||
request: Request,
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
csrf_check: None = Depends(verify_csrf_token),
|
||||
profile: str = Form(),
|
||||
) -> RedirectResponse:
|
||||
) -> templates.TemplateResponse:
|
||||
if not profile.startswith("@"):
|
||||
profile = f"@{profile}"
|
||||
|
||||
|
@ -963,9 +1202,10 @@ async def post_remote_follow(
|
|||
# TODO(ts): error message to user
|
||||
raise HTTPException(status_code=404)
|
||||
|
||||
return RedirectResponse(
|
||||
return await redirect_to_remote_instance(
|
||||
request,
|
||||
db_session,
|
||||
remote_follow_template.format(uri=ID),
|
||||
status_code=302,
|
||||
)
|
||||
|
||||
|
||||
|
@ -993,10 +1233,11 @@ async def remote_interaction(
|
|||
@app.post("/remote_interaction")
|
||||
async def post_remote_interaction(
|
||||
request: Request,
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
csrf_check: None = Depends(verify_csrf_token),
|
||||
profile: str = Form(),
|
||||
ap_id: str = Form(),
|
||||
) -> RedirectResponse:
|
||||
) -> templates.TemplateResponse:
|
||||
if not profile.startswith("@"):
|
||||
profile = f"@{profile}"
|
||||
|
||||
|
@ -1005,21 +1246,26 @@ async def post_remote_interaction(
|
|||
# TODO(ts): error message to user
|
||||
raise HTTPException(status_code=404)
|
||||
|
||||
return RedirectResponse(
|
||||
remote_follow_template.format(uri=ap_id),
|
||||
status_code=302,
|
||||
return await redirect_to_remote_instance(
|
||||
request,
|
||||
db_session,
|
||||
remote_follow_template.format(uri=ID),
|
||||
)
|
||||
|
||||
|
||||
@app.get("/.well-known/webfinger")
|
||||
async def wellknown_webfinger(resource: str) -> JSONResponse:
|
||||
"""Exposes/servers WebFinger data."""
|
||||
if resource not in [f"acct:{USERNAME}@{DOMAIN}", ID]:
|
||||
if resource not in [
|
||||
f"acct:{USERNAME}@{WEBFINGER_DOMAIN}",
|
||||
ID,
|
||||
f"acct:{USERNAME}@{DOMAIN}",
|
||||
]:
|
||||
logger.info(f"Got invalid req for {resource}")
|
||||
raise HTTPException(status_code=404)
|
||||
|
||||
out = {
|
||||
"subject": f"acct:{USERNAME}@{DOMAIN}",
|
||||
"subject": f"acct:{USERNAME}@{WEBFINGER_DOMAIN}",
|
||||
"aliases": [ID],
|
||||
"links": [
|
||||
{
|
||||
|
@ -1083,11 +1329,11 @@ async def nodeinfo(
|
|||
)
|
||||
|
||||
|
||||
proxy_client = httpx.AsyncClient(follow_redirects=True, http2=True)
|
||||
|
||||
|
||||
async def _proxy_get(
|
||||
request: starlette.requests.Request, url: str, stream: bool
|
||||
proxy_client: httpx.AsyncClient,
|
||||
request: starlette.requests.Request,
|
||||
url: str,
|
||||
stream: bool,
|
||||
) -> httpx.Response:
|
||||
# Request the URL (and filter request headers)
|
||||
proxy_req = proxy_client.build_request(
|
||||
|
@ -1128,21 +1374,35 @@ def _add_cache_control(headers: dict[str, str]) -> dict[str, str]:
|
|||
return {**headers, "Cache-Control": "max-age=31536000"}
|
||||
|
||||
|
||||
@app.get("/proxy/media/{encoded_url}")
|
||||
@app.get("/proxy/media/{exp}/{sig}/{encoded_url}")
|
||||
async def serve_proxy_media(
|
||||
request: Request,
|
||||
exp: int,
|
||||
sig: str,
|
||||
encoded_url: str,
|
||||
background_tasks: fastapi.BackgroundTasks,
|
||||
) -> StreamingResponse | PlainTextResponse:
|
||||
# Decode the base64-encoded URL
|
||||
url = base64.urlsafe_b64decode(encoded_url).decode()
|
||||
check_url(url)
|
||||
media.verify_proxied_media_sig(exp, url, sig)
|
||||
|
||||
proxy_resp = await _proxy_get(request, url, stream=True)
|
||||
proxy_client = httpx.AsyncClient(
|
||||
follow_redirects=True,
|
||||
timeout=httpx.Timeout(timeout=10.0),
|
||||
transport=httpx.AsyncHTTPTransport(retries=1),
|
||||
)
|
||||
|
||||
async def _close_proxy_client():
|
||||
await proxy_client.aclose()
|
||||
|
||||
background_tasks.add_task(_close_proxy_client)
|
||||
proxy_resp = await _proxy_get(proxy_client, request, url, stream=True)
|
||||
|
||||
if proxy_resp.status_code >= 300:
|
||||
logger.info(f"failed to proxy {url}, got {proxy_resp.status_code}")
|
||||
await proxy_resp.aclose()
|
||||
return PlainTextResponse(
|
||||
"proxy error",
|
||||
status_code=proxy_resp.status_code,
|
||||
)
|
||||
|
||||
|
@ -1153,6 +1413,7 @@ async def serve_proxy_media(
|
|||
_filter_proxy_resp_headers(
|
||||
proxy_resp,
|
||||
[
|
||||
"content-encoding",
|
||||
"content-length",
|
||||
"content-type",
|
||||
"content-range",
|
||||
|
@ -1168,20 +1429,26 @@ async def serve_proxy_media(
|
|||
)
|
||||
|
||||
|
||||
@app.get("/proxy/media/{encoded_url}/{size}")
|
||||
@app.get("/proxy/media/{exp}/{sig}/{encoded_url}/{size}")
|
||||
async def serve_proxy_media_resized(
|
||||
request: Request,
|
||||
exp: int,
|
||||
sig: str,
|
||||
encoded_url: str,
|
||||
size: int,
|
||||
background_tasks: fastapi.BackgroundTasks,
|
||||
) -> PlainTextResponse:
|
||||
if size not in {50, 740}:
|
||||
raise ValueError("Unsupported size")
|
||||
|
||||
is_webp_supported = "image/webp" in request.headers.get("accept")
|
||||
|
||||
# Decode the base64-encoded URL
|
||||
url = base64.urlsafe_b64decode(encoded_url).decode()
|
||||
check_url(url)
|
||||
media.verify_proxied_media_sig(exp, url, sig)
|
||||
|
||||
if cached_resp := _RESIZED_CACHE.get((url, size)):
|
||||
if (cached_resp := _RESIZED_CACHE.get((url, size))) and is_webp_supported:
|
||||
resized_content, resized_mimetype, resp_headers = cached_resp
|
||||
return PlainTextResponse(
|
||||
resized_content,
|
||||
|
@ -1189,11 +1456,21 @@ async def serve_proxy_media_resized(
|
|||
headers=resp_headers,
|
||||
)
|
||||
|
||||
proxy_resp = await _proxy_get(request, url, stream=False)
|
||||
proxy_client = httpx.AsyncClient(
|
||||
follow_redirects=True,
|
||||
timeout=httpx.Timeout(timeout=10.0),
|
||||
transport=httpx.AsyncHTTPTransport(retries=1),
|
||||
)
|
||||
|
||||
async def _close_proxy_client():
|
||||
await proxy_client.aclose()
|
||||
|
||||
background_tasks.add_task(_close_proxy_client)
|
||||
proxy_resp = await _proxy_get(proxy_client, request, url, stream=False)
|
||||
if proxy_resp.status_code >= 300:
|
||||
logger.info(f"failed to proxy {url}, got {proxy_resp.status_code}")
|
||||
await proxy_resp.aclose()
|
||||
return PlainTextResponse(
|
||||
"proxy error",
|
||||
status_code=proxy_resp.status_code,
|
||||
)
|
||||
|
||||
|
@ -1219,10 +1496,10 @@ async def serve_proxy_media_resized(
|
|||
is_webp = False
|
||||
try:
|
||||
resized_buf = BytesIO()
|
||||
i.save(resized_buf, format="webp")
|
||||
is_webp = True
|
||||
i.save(resized_buf, format="webp" if is_webp_supported else i.format)
|
||||
is_webp = is_webp_supported
|
||||
except Exception:
|
||||
logger.exception("Failed to convert to webp")
|
||||
logger.exception("Failed to create thumbnail")
|
||||
resized_buf = BytesIO()
|
||||
i.save(resized_buf, format=i.format)
|
||||
resized_buf.seek(0)
|
||||
|
@ -1280,6 +1557,7 @@ async def serve_attachment(
|
|||
|
||||
@app.get("/attachments/thumbnails/{content_hash}/{filename}")
|
||||
async def serve_attachment_thumbnail(
|
||||
request: Request,
|
||||
content_hash: str,
|
||||
filename: str,
|
||||
db_session: AsyncSession = Depends(get_db_session),
|
||||
|
@ -1294,11 +1572,20 @@ async def serve_attachment_thumbnail(
|
|||
if not upload or not upload.has_thumbnail:
|
||||
raise HTTPException(status_code=404)
|
||||
|
||||
return FileResponse(
|
||||
UPLOAD_DIR / (content_hash + "_resized"),
|
||||
media_type="image/webp",
|
||||
headers={"Cache-Control": "max-age=31536000"},
|
||||
)
|
||||
is_webp_supported = "image/webp" in request.headers.get("accept")
|
||||
|
||||
if is_webp_supported:
|
||||
return FileResponse(
|
||||
UPLOAD_DIR / (content_hash + "_resized"),
|
||||
media_type="image/webp",
|
||||
headers={"Cache-Control": "max-age=31536000"},
|
||||
)
|
||||
else:
|
||||
return FileResponse(
|
||||
UPLOAD_DIR / content_hash,
|
||||
media_type=upload.content_type,
|
||||
headers={"Cache-Control": "max-age=31536000"},
|
||||
)
|
||||
|
||||
|
||||
@app.get("/robots.txt", response_class=PlainTextResponse)
|
||||
|
@ -1357,22 +1644,27 @@ async def json_feed(
|
|||
],
|
||||
}
|
||||
)
|
||||
return {
|
||||
"version": "https://jsonfeed.org/version/1",
|
||||
result = {
|
||||
"version": "https://jsonfeed.org/version/1.1",
|
||||
"title": f"{LOCAL_ACTOR.display_name}'s microblog'",
|
||||
"home_page_url": LOCAL_ACTOR.url,
|
||||
"feed_url": BASE_URL + "/feed.json",
|
||||
"author": {
|
||||
"name": LOCAL_ACTOR.display_name,
|
||||
"url": LOCAL_ACTOR.url,
|
||||
"avatar": LOCAL_ACTOR.icon_url,
|
||||
},
|
||||
"authors": [
|
||||
{
|
||||
"name": LOCAL_ACTOR.display_name,
|
||||
"url": LOCAL_ACTOR.url,
|
||||
}
|
||||
],
|
||||
"items": data,
|
||||
}
|
||||
if LOCAL_ACTOR.icon_url:
|
||||
result["authors"][0]["avatar"] = LOCAL_ACTOR.icon_url # type: ignore
|
||||
return result
|
||||
|
||||
|
||||
async def _gen_rss_feed(
|
||||
db_session: AsyncSession,
|
||||
is_rss: bool,
|
||||
):
|
||||
fg = FeedGenerator()
|
||||
fg.id(BASE_URL + "/feed.rss")
|
||||
|
@ -1380,7 +1672,8 @@ async def _gen_rss_feed(
|
|||
fg.description(f"{LOCAL_ACTOR.display_name}'s microblog")
|
||||
fg.author({"name": LOCAL_ACTOR.display_name})
|
||||
fg.link(href=LOCAL_ACTOR.url, rel="alternate")
|
||||
fg.logo(LOCAL_ACTOR.icon_url)
|
||||
if LOCAL_ACTOR.icon_url:
|
||||
fg.logo(LOCAL_ACTOR.icon_url)
|
||||
fg.language("en")
|
||||
|
||||
outbox_objects = await _get_outbox_for_feed(db_session)
|
||||
|
@ -1402,8 +1695,12 @@ async def _gen_rss_feed(
|
|||
|
||||
fe = fg.add_entry()
|
||||
fe.id(outbox_object.url)
|
||||
if outbox_object.name is not None:
|
||||
fe.title(outbox_object.name)
|
||||
elif not is_rss: # Atom feeds require a title
|
||||
fe.title(outbox_object.url)
|
||||
|
||||
fe.link(href=outbox_object.url)
|
||||
fe.title(outbox_object.url)
|
||||
fe.description(content)
|
||||
fe.content(content)
|
||||
fe.published(outbox_object.ap_published_at.replace(tzinfo=timezone.utc))
|
||||
|
@ -1416,7 +1713,7 @@ async def rss_feed(
|
|||
db_session: AsyncSession = Depends(get_db_session),
|
||||
) -> PlainTextResponse:
|
||||
return PlainTextResponse(
|
||||
(await _gen_rss_feed(db_session)).rss_str(),
|
||||
(await _gen_rss_feed(db_session, is_rss=True)).rss_str(),
|
||||
headers={"Content-Type": "application/rss+xml"},
|
||||
)
|
||||
|
||||
|
@ -1426,6 +1723,6 @@ async def atom_feed(
|
|||
db_session: AsyncSession = Depends(get_db_session),
|
||||
) -> PlainTextResponse:
|
||||
return PlainTextResponse(
|
||||
(await _gen_rss_feed(db_session)).atom_str(),
|
||||
(await _gen_rss_feed(db_session, is_rss=False)).atom_str(),
|
||||
headers={"Content-Type": "application/atom+xml"},
|
||||
)
|
||||
|
|
31
app/media.py
31
app/media.py
|
@ -1,15 +1,44 @@
|
|||
import base64
|
||||
import time
|
||||
|
||||
from app.config import BASE_URL
|
||||
from app.config import hmac_sha256
|
||||
|
||||
SUPPORTED_RESIZE = [50, 740]
|
||||
EXPIRY_PERIOD = 86400
|
||||
EXPIRY_LENGTH = 7
|
||||
|
||||
|
||||
class InvalidProxySignatureError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def proxied_media_sig(expires: int, url: str) -> str:
|
||||
hm = hmac_sha256()
|
||||
hm.update(f"{expires}".encode())
|
||||
hm.update(b"|")
|
||||
hm.update(url.encode())
|
||||
return base64.urlsafe_b64encode(hm.digest()).decode()
|
||||
|
||||
|
||||
def verify_proxied_media_sig(expires: int, url: str, sig: str) -> None:
|
||||
now = int(time.time() / EXPIRY_PERIOD)
|
||||
expected = proxied_media_sig(expires, url)
|
||||
if now > expires or sig != expected:
|
||||
raise InvalidProxySignatureError("invalid or expired media")
|
||||
|
||||
|
||||
def proxied_media_url(url: str) -> str:
|
||||
if url.startswith(BASE_URL):
|
||||
return url
|
||||
expires = int(time.time() / EXPIRY_PERIOD) + EXPIRY_LENGTH
|
||||
sig = proxied_media_sig(expires, url)
|
||||
|
||||
return "/proxy/media/" + base64.urlsafe_b64encode(url.encode()).decode()
|
||||
return (
|
||||
BASE_URL
|
||||
+ f"/proxy/media/{expires}/{sig}/"
|
||||
+ base64.urlsafe_b64encode(url.encode()).decode()
|
||||
)
|
||||
|
||||
|
||||
def resized_media_url(url: str, size: int) -> str:
|
||||
|
|
|
@ -132,7 +132,7 @@ async def post_micropub_endpoint(
|
|||
h = form_data["h"]
|
||||
entry_type = f"h-{h}"
|
||||
|
||||
logger.info(f"Creating {entry_type}")
|
||||
logger.info(f"Creating {entry_type=} with {access_token_info=}")
|
||||
|
||||
if entry_type != "h-entry":
|
||||
return JSONResponse(
|
||||
|
@ -150,7 +150,7 @@ async def post_micropub_endpoint(
|
|||
else:
|
||||
content = form_data["content"]
|
||||
|
||||
public_id = await send_create(
|
||||
public_id, _ = await send_create(
|
||||
db_session,
|
||||
"Note",
|
||||
content,
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import enum
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
@ -54,6 +55,10 @@ class Actor(Base, BaseActor):
|
|||
is_blocked = Column(Boolean, nullable=False, default=False, server_default="0")
|
||||
is_deleted = Column(Boolean, nullable=False, default=False, server_default="0")
|
||||
|
||||
are_announces_hidden_from_stream = Column(
|
||||
Boolean, nullable=False, default=False, server_default="0"
|
||||
)
|
||||
|
||||
@property
|
||||
def is_from_db(self) -> bool:
|
||||
return True
|
||||
|
@ -251,6 +256,8 @@ class OutboxObject(Base, BaseObject):
|
|||
"mediaType": attachment.upload.content_type,
|
||||
"name": attachment.alt or attachment.filename,
|
||||
"url": url,
|
||||
"width": attachment.upload.width,
|
||||
"height": attachment.upload.height,
|
||||
"proxiedUrl": url,
|
||||
"resizedUrl": BASE_URL
|
||||
+ (
|
||||
|
@ -430,7 +437,7 @@ class OutboxObjectAttachment(Base):
|
|||
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
|
||||
|
||||
upload_id = Column(Integer, ForeignKey("upload.id"), nullable=False)
|
||||
upload = relationship(Upload, uselist=False)
|
||||
upload: Mapped["Upload"] = relationship(Upload, uselist=False)
|
||||
|
||||
|
||||
class IndieAuthAuthorizationRequest(Base):
|
||||
|
@ -453,17 +460,53 @@ class IndieAuthAccessToken(Base):
|
|||
__tablename__ = "indieauth_access_token"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||
created_at: Mapped[datetime] = Column(
|
||||
DateTime(timezone=True), nullable=False, default=now
|
||||
)
|
||||
|
||||
# Will be null for personal access tokens
|
||||
indieauth_authorization_request_id = Column(
|
||||
Integer, ForeignKey("indieauth_authorization_request.id"), nullable=True
|
||||
)
|
||||
indieauth_authorization_request = relationship(
|
||||
IndieAuthAuthorizationRequest,
|
||||
uselist=False,
|
||||
)
|
||||
|
||||
access_token = Column(String, nullable=False, unique=True, index=True)
|
||||
expires_in = Column(Integer, nullable=False)
|
||||
access_token: Mapped[str] = Column(String, nullable=False, unique=True, index=True)
|
||||
refresh_token = Column(String, nullable=True, unique=True, index=True)
|
||||
expires_in: Mapped[int] = Column(Integer, nullable=False)
|
||||
scope = Column(String, nullable=False)
|
||||
is_revoked = Column(Boolean, nullable=False, default=False)
|
||||
was_refreshed = Column(Boolean, nullable=False, default=False, server_default="0")
|
||||
|
||||
|
||||
class OAuthClient(Base):
|
||||
__tablename__ = "oauth_client"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||
|
||||
# Request
|
||||
client_name = Column(String, nullable=False)
|
||||
redirect_uris: Mapped[list[str]] = Column(JSON, nullable=True)
|
||||
|
||||
# Optional from request
|
||||
client_uri = Column(String, nullable=True)
|
||||
logo_uri = Column(String, nullable=True)
|
||||
scope = Column(String, nullable=True)
|
||||
|
||||
# Response
|
||||
client_id = Column(String, nullable=False, unique=True, index=True)
|
||||
client_secret = Column(String, nullable=False, unique=True)
|
||||
|
||||
|
||||
@enum.unique
|
||||
class WebmentionType(str, enum.Enum):
|
||||
UNKNOWN = "unknown"
|
||||
LIKE = "like"
|
||||
REPLY = "reply"
|
||||
REPOST = "repost"
|
||||
|
||||
|
||||
class Webmention(Base):
|
||||
|
@ -482,6 +525,8 @@ class Webmention(Base):
|
|||
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
|
||||
outbox_object = relationship(OutboxObject, uselist=False)
|
||||
|
||||
webmention_type = Column(Enum(WebmentionType), nullable=True)
|
||||
|
||||
@property
|
||||
def as_facepile_item(self) -> webmentions.Webmention | None:
|
||||
if not self.source_microformats:
|
||||
|
@ -491,6 +536,7 @@ class Webmention(Base):
|
|||
self.source_microformats["items"], self.source
|
||||
)
|
||||
except Exception:
|
||||
# TODO: return a facepile with the unknown image
|
||||
logger.warning(
|
||||
f"Failed to generate facefile item for Webmention id={self.id}"
|
||||
)
|
||||
|
|
|
@ -151,7 +151,7 @@ def _set_next_try(
|
|||
if not outgoing_activity.tries:
|
||||
raise ValueError("Should never happen")
|
||||
|
||||
if outgoing_activity.tries == _MAX_RETRIES:
|
||||
if outgoing_activity.tries >= _MAX_RETRIES:
|
||||
outgoing_activity.is_errored = True
|
||||
outgoing_activity.next_try = None
|
||||
else:
|
||||
|
|
|
@ -102,6 +102,8 @@ async def _prune_old_inbox_objects(
|
|||
models.InboxObject.ap_type.in_(["Note"]),
|
||||
)
|
||||
),
|
||||
# Keep Move object as they are linked to notifications
|
||||
models.InboxObject.ap_type.not_in(["Move"]),
|
||||
# Filter by retention days
|
||||
models.InboxObject.ap_published_at
|
||||
< now() - timedelta(days=INBOX_RETENTION_DAYS),
|
||||
|
|
28
app/redirect.py
Normal file
28
app/redirect.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
from fastapi import Request
|
||||
|
||||
from app import templates
|
||||
from app.database import AsyncSession
|
||||
|
||||
|
||||
async def redirect(
|
||||
request: Request,
|
||||
db_session: AsyncSession,
|
||||
url: str,
|
||||
) -> templates.TemplateResponse:
|
||||
"""
|
||||
Similar to RedirectResponse, but uses a 200 response with HTML.
|
||||
|
||||
Needed for remote redirects on form submission endpoints,
|
||||
since our CSP policy disallows remote form submission.
|
||||
https://github.com/w3c/webappsec-csp/issues/8#issuecomment-810108984
|
||||
"""
|
||||
return await templates.render_template(
|
||||
db_session,
|
||||
request,
|
||||
"redirect.html",
|
||||
{
|
||||
"request": request,
|
||||
"url": url,
|
||||
},
|
||||
headers={"Refresh": "0;url=" + url},
|
||||
)
|
|
@ -51,17 +51,20 @@ $code-highlight-background: #f0f0f0;
|
|||
.p-summary {
|
||||
display: inline-block;
|
||||
}
|
||||
label {
|
||||
.show-more-btn {
|
||||
margin-left: 5px;
|
||||
}
|
||||
.show-more-state {
|
||||
display: none;
|
||||
summary {
|
||||
display: inline-block;
|
||||
}
|
||||
.show-more-state ~ .obj-content {
|
||||
margin-top: 0;
|
||||
summary::-webkit-details-marker {
|
||||
display: none
|
||||
}
|
||||
.show-more-state:checked ~ .obj-content {
|
||||
display: none;
|
||||
&:not([open]) .show-more-btn::after {
|
||||
content: 'show more';
|
||||
}
|
||||
&[open] .show-more-btn::after {
|
||||
content: 'show less';
|
||||
}
|
||||
}
|
||||
.sensitive-attachment {
|
||||
|
@ -429,8 +432,7 @@ a.label-btn {
|
|||
.activity-attachment {
|
||||
margin: 30px 0 20px 0;
|
||||
img, audio, video {
|
||||
width: 100%;
|
||||
max-width: 740px;
|
||||
max-width: calc(min(740px, 100%));
|
||||
}
|
||||
}
|
||||
img.inline-img {
|
||||
|
@ -456,7 +458,7 @@ a.label-btn {
|
|||
border: 2px dashed $secondary-color;
|
||||
}
|
||||
|
||||
.error-box {
|
||||
.error-box, .scolor {
|
||||
color: $secondary-color;
|
||||
}
|
||||
|
||||
|
@ -467,6 +469,9 @@ a.label-btn {
|
|||
span {
|
||||
color: $muted-color;
|
||||
}
|
||||
span.new {
|
||||
color: $secondary-color;
|
||||
}
|
||||
}
|
||||
.actor-metadata {
|
||||
color: $muted-color;
|
||||
|
@ -531,3 +536,36 @@ a.label-btn {
|
|||
text-decoration: underline;
|
||||
}
|
||||
}
|
||||
|
||||
.ap-place {
|
||||
h3 {
|
||||
display: inline;
|
||||
font-weight: normal;
|
||||
}
|
||||
h3::after {
|
||||
content: ': ';
|
||||
}
|
||||
}
|
||||
|
||||
.margin-top-20 {
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
.video-wrapper {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.video-gif-overlay {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.video-gif-mode + .video-gif-overlay {
|
||||
display: block;
|
||||
position: absolute;
|
||||
top: 5px;
|
||||
left: 5px;
|
||||
padding: 0 3px;
|
||||
font-size: 0.8em;
|
||||
background: rgba(0,0,0,.5);
|
||||
color: #fff;
|
||||
}
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
import re
|
||||
import typing
|
||||
|
||||
from loguru import logger
|
||||
from mistletoe import Document # type: ignore
|
||||
from mistletoe.block_token import CodeFence # type: ignore
|
||||
from mistletoe.html_renderer import HTMLRenderer # type: ignore
|
||||
from mistletoe.span_token import SpanToken # type: ignore
|
||||
from pygments import highlight # type: ignore
|
||||
from pygments.formatters import HtmlFormatter # type: ignore
|
||||
from pygments.lexers import get_lexer_by_name as get_lexer # type: ignore
|
||||
from pygments.lexers import guess_lexer # type: ignore
|
||||
from pygments.util import ClassNotFound # type: ignore
|
||||
from sqlalchemy import select
|
||||
|
||||
from app import webfinger
|
||||
|
@ -78,13 +79,17 @@ class CustomRenderer(HTMLRenderer):
|
|||
|
||||
def render_mention(self, token: Mention) -> str:
|
||||
mention = token.target
|
||||
suffix = ""
|
||||
if mention.endswith("."):
|
||||
mention = mention[:-1]
|
||||
suffix = "."
|
||||
actor = self.mentioned_actors.get(mention)
|
||||
if not actor:
|
||||
return mention
|
||||
|
||||
self.tags.append(dict(type="Mention", href=actor.ap_id, name=mention))
|
||||
|
||||
link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>' # noqa: E501
|
||||
link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>{suffix}' # noqa: E501
|
||||
return link
|
||||
|
||||
def render_hashtag(self, token: Hashtag) -> str:
|
||||
|
@ -99,10 +104,16 @@ class CustomRenderer(HTMLRenderer):
|
|||
)
|
||||
return link
|
||||
|
||||
def render_block_code(self, token: typing.Any) -> str:
|
||||
def render_block_code(self, token: CodeFence) -> str:
|
||||
lexer_attr = ""
|
||||
try:
|
||||
lexer = get_lexer(token.language)
|
||||
lexer_attr = f' data-microblogpub-lexer="{lexer.aliases[0]}"'
|
||||
except ClassNotFound:
|
||||
pass
|
||||
|
||||
code = token.children[0].content
|
||||
lexer = get_lexer(token.language) if token.language else guess_lexer(code)
|
||||
return highlight(code, lexer, _FORMATTER)
|
||||
return f"<pre><code{lexer_attr}>\n{code}\n</code></pre>"
|
||||
|
||||
|
||||
async def _prefetch_mentioned_actors(
|
||||
|
@ -118,23 +129,30 @@ async def _prefetch_mentioned_actors(
|
|||
if mention in actors:
|
||||
continue
|
||||
|
||||
_, username, domain = mention.split("@")
|
||||
actor = (
|
||||
await db_session.execute(
|
||||
select(models.Actor).where(
|
||||
models.Actor.handle == mention,
|
||||
models.Actor.is_deleted.is_(False),
|
||||
)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not actor:
|
||||
actor_url = await webfinger.get_actor_url(mention)
|
||||
if not actor_url:
|
||||
# FIXME(ts): raise an error?
|
||||
continue
|
||||
actor = await fetch_actor(db_session, actor_url)
|
||||
# XXX: the regex catches stuff like `@toto@example.com.`
|
||||
if mention.endswith("."):
|
||||
mention = mention[:-1]
|
||||
|
||||
actors[mention] = actor
|
||||
try:
|
||||
_, username, domain = mention.split("@")
|
||||
actor = (
|
||||
await db_session.execute(
|
||||
select(models.Actor).where(
|
||||
models.Actor.handle == mention,
|
||||
models.Actor.is_deleted.is_(False),
|
||||
)
|
||||
)
|
||||
).scalar_one_or_none()
|
||||
if not actor:
|
||||
actor_url = await webfinger.get_actor_url(mention)
|
||||
if not actor_url:
|
||||
# FIXME(ts): raise an error?
|
||||
continue
|
||||
actor = await fetch_actor(db_session, actor_url)
|
||||
|
||||
actors[mention] = actor
|
||||
except Exception:
|
||||
logger.exception(f"Failed to prefetch {mention}")
|
||||
|
||||
return actors
|
||||
|
||||
|
|
32
app/static/common.js
Normal file
32
app/static/common.js
Normal file
|
@ -0,0 +1,32 @@
|
|||
function hasAudio (video) {
|
||||
return video.mozHasAudio ||
|
||||
Boolean(video.webkitAudioDecodedByteCount) ||
|
||||
Boolean(video.audioTracks && video.audioTracks.length);
|
||||
}
|
||||
|
||||
function setVideoInGIFMode(video) {
|
||||
if (!hasAudio(video)) {
|
||||
if (typeof video.loop == 'boolean' && video.duration <= 10.0) {
|
||||
video.classList.add("video-gif-mode");
|
||||
video.loop = true;
|
||||
video.controls = false;
|
||||
video.addEventListener("mouseover", () => {
|
||||
video.play();
|
||||
})
|
||||
video.addEventListener("mouseleave", () => {
|
||||
video.pause();
|
||||
})
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
var items = document.getElementsByTagName("video")
|
||||
for (var i = 0; i < items.length; i++) {
|
||||
if (items[i].duration) {
|
||||
setVideoInGIFMode(items[i]);
|
||||
} else {
|
||||
items[i].addEventListener("loadeddata", function() {
|
||||
setVideoInGIFMode(this);
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,4 +1,3 @@
|
|||
import base64
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from functools import lru_cache
|
||||
|
@ -28,6 +27,7 @@ from app.ap_object import Object
|
|||
from app.config import BASE_URL
|
||||
from app.config import CUSTOM_FOOTER
|
||||
from app.config import DEBUG
|
||||
from app.config import SESSION_TIMEOUT
|
||||
from app.config import VERSION
|
||||
from app.config import generate_csrf_token
|
||||
from app.config import session_serializer
|
||||
|
@ -39,7 +39,7 @@ from app.utils.highlight import HIGHLIGHT_CSS
|
|||
from app.utils.highlight import highlight
|
||||
|
||||
_templates = Jinja2Templates(
|
||||
directory="app/templates",
|
||||
directory=["data/templates", "app/templates"], # type: ignore # bad typing
|
||||
trim_blocks=True,
|
||||
lstrip_blocks=True,
|
||||
)
|
||||
|
@ -59,13 +59,8 @@ def _filter_domain(text: str) -> str:
|
|||
|
||||
def _media_proxy_url(url: str | None) -> str:
|
||||
if not url:
|
||||
return "/static/nopic.png"
|
||||
|
||||
if url.startswith(BASE_URL):
|
||||
return url
|
||||
|
||||
encoded_url = base64.urlsafe_b64encode(url.encode()).decode()
|
||||
return f"/proxy/media/{encoded_url}"
|
||||
return BASE_URL + "/static/nopic.png"
|
||||
return proxied_media_url(url)
|
||||
|
||||
|
||||
def is_current_user_admin(request: Request) -> bool:
|
||||
|
@ -75,10 +70,10 @@ def is_current_user_admin(request: Request) -> bool:
|
|||
try:
|
||||
loaded_session = session_serializer.loads(
|
||||
session_cookie,
|
||||
max_age=3600 * 12,
|
||||
max_age=SESSION_TIMEOUT,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
logger.exception("Failed to validate session timeout")
|
||||
else:
|
||||
is_admin = loaded_session.get("is_logged_in")
|
||||
|
||||
|
@ -91,6 +86,7 @@ async def render_template(
|
|||
template: str,
|
||||
template_args: dict[str, Any] | None = None,
|
||||
status_code: int = 200,
|
||||
headers: dict[str, str] | None = None,
|
||||
) -> TemplateResponse:
|
||||
if template_args is None:
|
||||
template_args = {}
|
||||
|
@ -135,6 +131,7 @@ async def render_template(
|
|||
**template_args,
|
||||
},
|
||||
status_code=status_code,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
|
@ -339,6 +336,14 @@ def _clean_html(html: str, note: Object) -> str:
|
|||
raise
|
||||
|
||||
|
||||
def _clean_html_wm(html: str) -> str:
|
||||
return bleach.clean(
|
||||
html,
|
||||
attributes=ALLOWED_ATTRIBUTES,
|
||||
strip=True,
|
||||
)
|
||||
|
||||
|
||||
def _timeago(original_dt: datetime) -> str:
|
||||
dt = original_dt
|
||||
if dt.tzinfo:
|
||||
|
@ -388,7 +393,7 @@ def _html2text(content: str) -> str:
|
|||
|
||||
def _replace_emoji(u: str, _) -> str:
|
||||
filename = "-".join(hex(ord(c))[2:] for c in u)
|
||||
return config.EMOJI_TPL.format(filename=filename, raw=u)
|
||||
return config.EMOJI_TPL.format(base_url=BASE_URL, filename=filename, raw=u)
|
||||
|
||||
|
||||
def _emojify(text: str, is_local: bool) -> str:
|
||||
|
@ -415,6 +420,7 @@ def _poll_item_pct(item: ap.RawObject, voters_count: int) -> int:
|
|||
_templates.env.filters["domain"] = _filter_domain
|
||||
_templates.env.filters["media_proxy_url"] = _media_proxy_url
|
||||
_templates.env.filters["clean_html"] = _clean_html
|
||||
_templates.env.filters["clean_html_wm"] = _clean_html_wm
|
||||
_templates.env.filters["timeago"] = _timeago
|
||||
_templates.env.filters["format_date"] = _format_date
|
||||
_templates.env.filters["has_media_type"] = _has_media_type
|
||||
|
@ -430,3 +436,4 @@ _templates.env.globals["BASE_URL"] = config.BASE_URL
|
|||
_templates.env.globals["HIDES_FOLLOWERS"] = config.HIDES_FOLLOWERS
|
||||
_templates.env.globals["HIDES_FOLLOWING"] = config.HIDES_FOLLOWING
|
||||
_templates.env.globals["NAVBAR_ITEMS"] = config.NavBarItems
|
||||
_templates.env.globals["ICON_URL"] = config.CONFIG.icon_url
|
||||
|
|
|
@ -90,5 +90,5 @@
|
|||
</p>
|
||||
</form>
|
||||
</div>
|
||||
<script src="/static/new.js?v={{ JS_HASH }}"></script>
|
||||
<script src="{{ BASE_URL }}/static/new.js?v={{ JS_HASH }}"></script>
|
||||
{% endblock %}
|
||||
|
|
|
@ -11,8 +11,8 @@
|
|||
<ul class="h-feed" id="articles">
|
||||
<data class="p-name" value="{{ local_actor.display_name}}'s articles"></data>
|
||||
{% for outbox_object in objects %}
|
||||
<li>
|
||||
<span class="muted">{{ outbox_object.ap_published_at.strftime("%b %d, %Y") }}</span> <a href="{{ outbox_object.url }}">{{ outbox_object.name }}</a>
|
||||
<li class="h-entry">
|
||||
<time class="muted dt-published" datetime="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at.strftime("%b %d, %Y") }}</time> <a href="{{ outbox_object.url }}" class="u-url u-uid p-name">{{ outbox_object.name }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
<meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" />
|
||||
<meta content="Homepage" property="og:title" />
|
||||
<meta content="{{ local_actor.summary | html2text | trim }}" property="og:description" />
|
||||
<meta content="{{ local_actor.url }}" property="og:image" />
|
||||
<meta content="{{ ICON_URL }}" property="og:image" />
|
||||
<meta content="summary" property="twitter:card" />
|
||||
<meta content="{{ local_actor.handle }}" property="profile:username" />
|
||||
{% endif %}
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
{% block head %}
|
||||
<title>{{ local_actor.display_name }}'s followers</title>
|
||||
<meta name="robots" content="noindex, nofollow">
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
{% block head %}
|
||||
<title>{{ local_actor.display_name }}'s follows</title>
|
||||
<meta name="robots" content="noindex, nofollow">
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
|
|
@ -25,12 +25,13 @@
|
|||
</div>
|
||||
|
||||
{%- macro header_link(url, text) -%}
|
||||
{% set url_for = request.app.router.url_path_for(url) %}
|
||||
<a href="{{ url_for }}" {% if request.url.path == url_for %}class="active"{% endif %}>{{ text }}</a>
|
||||
{% set url_for = BASE_URL + request.app.router.url_path_for(url) %}
|
||||
<a href="{{ url_for }}" {% if BASE_URL + request.url.path == url_for %}class="active"{% endif %}>{{ text }}</a>
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro navbar_item_link(navbar_item) -%}
|
||||
<a href="{{ navbar_item[0] }}" {% if request.url.path == navbar_item[0] %}class="active"{% endif %}>{{ navbar_item[1] }}</a>
|
||||
{% set url_for = BASE_URL + navbar_item[0] %}
|
||||
<a href="{{ navbar_item[0] }}" {% if BASE_URL + request.url.path == url_for %}class="active"{% endif %}>{{ navbar_item[1] }}</a>
|
||||
{% endmacro %}
|
||||
|
||||
<div class="public-top-menu">
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
<meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" />
|
||||
<meta content="Homepage" property="og:title" />
|
||||
<meta content="{{ local_actor.summary | html2text | trim }}" property="og:description" />
|
||||
<meta content="{{ local_actor.url }}" property="og:image" />
|
||||
<meta content="{{ ICON_URL }}" property="og:image" />
|
||||
<meta content="summary" property="twitter:card" />
|
||||
<meta content="{{ local_actor.handle }}" property="profile:username" />
|
||||
{% endblock %}
|
||||
|
@ -26,24 +26,30 @@
|
|||
<div class="h-feed">
|
||||
<data class="p-name" value="{{ local_actor.display_name}}'s notes"></data>
|
||||
{% for outbox_object in objects %}
|
||||
{% if outbox_object.ap_type in ["Note", "Article", "Video", "Question"] %}
|
||||
{% if outbox_object.ap_type in ["Note", "Video", "Question"] %}
|
||||
{{ utils.display_object(outbox_object) }}
|
||||
{% elif outbox_object.ap_type == "Announce" %}
|
||||
<div class="shared-header"><strong>{{ utils.display_tiny_actor_icon(local_actor) }} {{ local_actor.display_name | clean_html(local_actor) | safe }}</strong> shared <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
|
||||
{{ utils.display_object(outbox_object.relates_to_anybox_object) }}
|
||||
<div class="h-entry" id="{{ outbox_object.permalink_id }}">
|
||||
<div class="shared-header"><strong><a class="p-author h-card" href="{{ local_actor.url }}">{{ utils.display_tiny_actor_icon(local_actor) }} {{ local_actor.display_name | clean_html(local_actor) | safe }}</a></strong> shared <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
|
||||
<div class="h-cite u-repost-of">
|
||||
{{ utils.display_object(outbox_object.relates_to_anybox_object, is_h_entry=False) }}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
<div class="box">
|
||||
{% if has_previous_page %}
|
||||
<a href="{{ url_for("index") }}?page={{ current_page - 1 }}">Previous</a>
|
||||
{% endif %}
|
||||
{% if has_previous_page or has_next_page %}
|
||||
<div class="box">
|
||||
{% if has_previous_page %}
|
||||
<a href="{{ url_for("index") }}?page={{ current_page - 1 }}">Previous</a>
|
||||
{% endif %}
|
||||
|
||||
{% if has_next_page %}
|
||||
<a href="{{ url_for("index") }}?page={{ current_page + 1 }}">Next</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% if has_next_page %}
|
||||
<a href="{{ url_for("index") }}?page={{ current_page + 1 }}">Next</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
<div class="empty-state">
|
||||
|
|
|
@ -10,8 +10,12 @@
|
|||
{% endif %}
|
||||
<div class="indieauth-details">
|
||||
<div>
|
||||
<a class="lcolor" href="{{ client.url }}">{{ client.name }}</a>
|
||||
<p>wants you to login as <strong class="lcolor">{{ me }}</strong> with the following redirect URI: <code>{{ redirect_uri }}</code>.</p>
|
||||
{% if client.url %}
|
||||
<a class="scolor" href="{{ client.url }}">{{ client.name }}</a>
|
||||
{% else %}
|
||||
<span class="scolor">{{ client.name }}</span>
|
||||
{% endif %}
|
||||
<p>wants you to login{% if me %} as <strong class="lcolor">{{ me }}</strong>{% endif %} with the following redirect URI: <code>{{ redirect_uri }}</code>.</p>
|
||||
|
||||
|
||||
<form method="POST" action="{{ url_for('indieauth_flow') }}" class="form">
|
||||
|
|
|
@ -4,11 +4,11 @@
|
|||
<meta charset="utf-8">
|
||||
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<link rel="stylesheet" href="/static/css/main.css?v={{ CSS_HASH }}">
|
||||
<link rel="stylesheet" href="{{ BASE_URL }}/static/css/main.css?v={{ CSS_HASH }}">
|
||||
<link rel="alternate" title="{{ local_actor.display_name}}'s microblog" type="application/json" href="{{ url_for("json_feed") }}" />
|
||||
<link rel="alternate" href="{{ url_for("rss_feed") }}" type="application/rss+xml" title="{{ local_actor.display_name}}'s microblog">
|
||||
<link rel="alternate" href="{{ url_for("atom_feed") }}" type="application/atom+xml" title="{{ local_actor.display_name}}'s microblog">
|
||||
<link rel="icon" type="image/x-icon" href="/static/favicon.ico">
|
||||
<link rel="icon" type="image/x-icon" href="{{ BASE_URL }}/static/favicon.ico">
|
||||
<style>{{ highlight_css }}</style>
|
||||
{% block head %}{% endblock %}
|
||||
</head>
|
||||
|
@ -18,8 +18,8 @@
|
|||
{% if is_admin %}
|
||||
<div id="admin">
|
||||
{% macro admin_link(url, text) %}
|
||||
{% set url_for = request.app.router.url_path_for(url) %}
|
||||
<a href="{{ url_for }}" {% if request.url.path == url_for %}class="active"{% endif %}>{{ text }}</a>
|
||||
{% set url_for = BASE_URL + request.app.router.url_path_for(url) %}
|
||||
<a href="{{ url_for }}" {% if BASE_URL + request.url.path == url_for %}class="active"{% endif %}>{{ text }}</a>
|
||||
{% endmacro %}
|
||||
<div class="admin-menu">
|
||||
<nav class="flexbox">
|
||||
|
@ -53,7 +53,8 @@
|
|||
</div>
|
||||
</footer>
|
||||
{% if is_admin %}
|
||||
<script src="/static/common-admin.js?v={{ JS_HASH }}"></script>
|
||||
<script src="{{ BASE_URL }}/static/common-admin.js?v={{ JS_HASH }}"></script>
|
||||
{% endif %}
|
||||
<script src="{{ BASE_URL }}/static/common.js?v={{ JS_HASH }}"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
{%- import "utils.html" as utils with context -%}
|
||||
{% extends "layout.html" %}
|
||||
{% block head %}
|
||||
<meta name="robots" content="noindex, nofollow">
|
||||
{% endblock %}
|
||||
{% block main_tag %} class="main-flex"{% endblock %}
|
||||
{% block content %}
|
||||
<div class="centered">
|
||||
|
@ -7,7 +10,7 @@
|
|||
{% if error %}
|
||||
<p class="primary-color">Invalid password.</p>
|
||||
{% endif %}
|
||||
<form class="form" action="/admin/login" method="POST">
|
||||
<form class="form" action="{{ BASE_URL }}/admin/login" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token }}">
|
||||
<input type="hidden" name="redirect" value="{{ redirect }}">
|
||||
<input type="password" placeholder="password" name="password" autofocus>
|
||||
|
|
|
@ -10,6 +10,9 @@
|
|||
<a href="{{ url_for("admin_profile") }}?actor_id={{ notif.actor.ap_id }}">
|
||||
{% if with_icon %}{{ utils.display_tiny_actor_icon(notif.actor) }}{% endif %} {{ notif.actor.display_name | clean_html(notif.actor) | safe }}</a> {{ text }}
|
||||
<span title="{{ notif.created_at.isoformat() }}">{{ notif.created_at | timeago }}</span>
|
||||
{% if notif.is_new %}
|
||||
<span class="new">new</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
|
@ -48,7 +51,7 @@
|
|||
{% elif notif.notification_type.value == "unblock" %}
|
||||
{{ notif_actor_action(notif, "was unblocked") }}
|
||||
{{ utils.display_actor(notif.actor, actors_metadata) }}
|
||||
{%- elif notif.notification_type.value == "move" %}
|
||||
{%- elif notif.notification_type.value == "move" and notif.inbox_object %}
|
||||
{# for move notif, the actor is the target and the inbox object the Move activity #}
|
||||
<div class="actor-action">
|
||||
<a href="{{ url_for("admin_profile") }}?actor_id={{ notif.inbox_object.actor.ap_id }}">
|
||||
|
@ -66,8 +69,8 @@
|
|||
{{ notif_actor_action(notif, "shared a post", with_icon=True) }}
|
||||
{{ utils.display_object(notif.outbox_object) }}
|
||||
{% elif notif.notification_type.value == "undo_announce" %}
|
||||
{{ notif_actor_action(notif, "unshared a post") }}
|
||||
{{ utils.display_object(notif.outbox_object, with_icon=True) }}
|
||||
{{ notif_actor_action(notif, "unshared a post", with_icon=True) }}
|
||||
{{ utils.display_object(notif.outbox_object) }}
|
||||
{% elif notif.notification_type.value == "mention" %}
|
||||
{{ notif_actor_action(notif, "mentioned you") }}
|
||||
{{ utils.display_object(notif.inbox_object) }}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<meta content="article" property="og:type" />
|
||||
<meta content="{{ outbox_object.url }}" property="og:url" />
|
||||
<meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" />
|
||||
<meta content="{% if outbox_object.name %}{{ name }}{% else %}Note{% endif %}" property="og:title" />
|
||||
<meta content="{% if outbox_object.name %}{{ outbox_object.name }}{% else %}Note{% endif %}" property="og:title" />
|
||||
<meta content="{{ excerpt }}" property="og:description" />
|
||||
<meta content="{{ local_actor.icon_url }}" property="og:image" />
|
||||
<meta content="summary" property="twitter:card" />
|
||||
|
@ -31,9 +31,16 @@
|
|||
{% macro display_replies_tree(replies_tree_node) %}
|
||||
|
||||
{% if replies_tree_node.is_requested %}
|
||||
{{ utils.display_object(replies_tree_node.ap_object, likes=likes, shares=shares, webmentions=webmentions, expanded=not replies_tree_node.is_root, is_object_page=True) }}
|
||||
{{ utils.display_object(replies_tree_node.ap_object, likes=likes, shares=shares, webmentions=webmentions, expanded=not replies_tree_node.is_root, is_object_page=True, is_h_entry=False) }}
|
||||
{% else %}
|
||||
{{ utils.display_object(replies_tree_node.ap_object) }}
|
||||
{% if replies_tree_node.wm_reply %}
|
||||
{# u-comment h-cite is displayed by default for webmention #}
|
||||
{{ utils.display_webmention_reply(replies_tree_node.wm_reply) }}
|
||||
{% else %}
|
||||
<div class="u-comment h-cite">
|
||||
{{ utils.display_object(replies_tree_node.ap_object, is_h_entry=False) }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% for child in replies_tree_node.children %}
|
||||
|
@ -42,6 +49,8 @@
|
|||
|
||||
{% endmacro %}
|
||||
|
||||
<div class="h-entry">
|
||||
{{ display_replies_tree(replies_tree) }}
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
|
|
15
app/templates/redirect.html
Normal file
15
app/templates/redirect.html
Normal file
|
@ -0,0 +1,15 @@
|
|||
{%- import "utils.html" as utils with context -%}
|
||||
{% extends "layout.html" %}
|
||||
|
||||
{% block head %}
|
||||
<title>{{ local_actor.display_name }}'s microblog - Redirect</title>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
{% include "header.html" %}
|
||||
|
||||
<div class="box">
|
||||
<p>You are being redirected to: <a href="{{ url }}">{{ url }}</a></p>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
15
app/templates/redirect_to_remote_instance.html
Normal file
15
app/templates/redirect_to_remote_instance.html
Normal file
|
@ -0,0 +1,15 @@
|
|||
{%- import "utils.html" as utils with context -%}
|
||||
{% extends "layout.html" %}
|
||||
|
||||
{% block head %}
|
||||
<title>{{ local_actor.display_name }}'s microblog - Redirect</title>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
{% include "header.html" %}
|
||||
|
||||
<div class="box">
|
||||
<p>You are being redirected to your instance: <a href="{{ url }}">{{ url }}</a></p>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
{% block head %}
|
||||
<title>Remote follow {{ local_actor.display_name }}</title>
|
||||
<meta name="robots" content="noindex, nofollow">
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
{% block head %}
|
||||
<title>Interact from your instance</title>
|
||||
<meta name="robots" content="noindex, nofollow">
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
|
|
@ -1,168 +1,254 @@
|
|||
{% macro embed_csrf_token() %}
|
||||
{% block embed_csrf_token scoped %}
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token }}">
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro embed_redirect_url(permalink_id=None) %}
|
||||
{% block embed_redirect_url scoped %}
|
||||
<input type="hidden" name="redirect_url" value="{{ request.url }}{% if permalink_id %}#{{ permalink_id }}{% endif %}">
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_block_button(actor) %}
|
||||
{% block admin_block_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_block") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url() }}
|
||||
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
|
||||
<input type="submit" value="block">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_unblock_button(actor) %}
|
||||
{% block admin_unblock_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_unblock") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url() }}
|
||||
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
|
||||
<input type="submit" value="unblock">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_hide_shares_button(actor) %}
|
||||
{% block admin_hide_shares_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_hide_announces") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url() }}
|
||||
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
|
||||
<input type="submit" value="hide shares">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_show_shares_button(actor) %}
|
||||
{% block admin_show_shares_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_show_announces") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url() }}
|
||||
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
|
||||
<input type="submit" value="show shares">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro admin_follow_button(actor) %}
|
||||
{% block admin_follow_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_follow") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url() }}
|
||||
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
|
||||
<input type="submit" value="follow">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_accept_incoming_follow_button(notif) %}
|
||||
{% block admin_accept_incoming_follow_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_accept_incoming_follow") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url() }}
|
||||
<input type="hidden" name="notification_id" value="{{ notif.id }}">
|
||||
<input type="submit" value="accept follow">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_reject_incoming_follow_button(notif) %}
|
||||
{% block admin_reject_incoming_follow_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_reject_incoming_follow") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url() }}
|
||||
<input type="hidden" name="notification_id" value="{{ notif.id }}">
|
||||
<input type="submit" value="reject follow">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_like_button(ap_object_id, permalink_id) %}
|
||||
{% block admin_like_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_like") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url(permalink_id) }}
|
||||
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||
<input type="submit" value="like">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_bookmark_button(ap_object_id, permalink_id) %}
|
||||
{% block admin_bookmark_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_bookmark") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url(permalink_id) }}
|
||||
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||
<input type="submit" value="bookmark">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_unbookmark_button(ap_object_id, permalink_id) %}
|
||||
{% block admin_unbookmark_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_unbookmark") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url(permalink_id) }}
|
||||
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||
<input type="submit" value="unbookmark">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_pin_button(ap_object_id, permalink_id) %}
|
||||
{% block admin_pin_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_pin") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url(permalink_id) }}
|
||||
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||
<input type="submit" value="pin">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_unpin_button(ap_object_id, permalink_id) %}
|
||||
{% block admin_unpin_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_unpin") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url(permalink_id) }}
|
||||
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||
<input type="submit" value="unpin">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_delete_button(ap_object) %}
|
||||
{% block admin_delete_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_delete") }}" class="object-delete-form" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
<input type="hidden" name="redirect_url" value="{% if request.url.path.endswith("/" + ap_object.public_id) or (request.url.path == "/admin/object" and request.query_params.ap_id.endswith("/" + ap_object.public_id)) %}{{ request.base_url}}{% else %}{{ request.url }}{% endif %}">
|
||||
<input type="hidden" name="ap_object_id" value="{{ ap_object.ap_id }}">
|
||||
<input type="submit" value="delete">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_force_delete_button(ap_object_id, permalink_id=None) %}
|
||||
{% block admin_force_delete_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_force_delete") }}" class="object-delete-form" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url(permalink_id) }}
|
||||
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||
<input type="submit" value="local delete">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_force_delete_webmention_button(webmention_id, permalink_id=None) %}
|
||||
{% block admin_force_delete_webmention_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_force_delete_webmention") }}" class="object-delete-form" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url(permalink_id) }}
|
||||
<input type="hidden" name="webmention_id" value="{{ webmention_id }}">
|
||||
<input type="submit" value="local delete">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_announce_button(ap_object_id, permalink_id=None) %}
|
||||
{% block admin_announce_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_announce") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url(permalink_id) }}
|
||||
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||
<input type="submit" value="share">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_undo_button(ap_object_id, action="undo", permalink_id=None) %}
|
||||
{% block admin_undo_button scoped %}
|
||||
<form action="{{ request.url_for("admin_actions_undo") }}" method="POST">
|
||||
{{ embed_csrf_token() }}
|
||||
{{ embed_redirect_url(permalink_id) }}
|
||||
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||
<input type="submit" value="{{ action }}">
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_reply_button(ap_object_id) %}
|
||||
<form action="/admin/new" method="GET">
|
||||
{% block admin_reply_button scoped %}
|
||||
<form action="{{ BASE_URL }}/admin/new" method="GET">
|
||||
<input type="hidden" name="in_reply_to" value="{{ ap_object_id }}">
|
||||
<button type="submit">reply</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_dm_button(actor_handle) %}
|
||||
<form action="/admin/new" method="GET">
|
||||
{% block admin_dm_button scoped %}
|
||||
<form action="{{ BASE_URL }}/admin/new" method="GET">
|
||||
<input type="hidden" name="with_content" value="{{ actor_handle }}">
|
||||
<input type="hidden" name="with_visibility" value="DIRECT">
|
||||
<button type="submit">direct message</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_mention_button(actor_handle) %}
|
||||
<form action="/admin/new" method="GET">
|
||||
{% block admin_mention_button scoped %}
|
||||
<form action="{{ BASE_URL }}/admin/new" method="GET">
|
||||
<input type="hidden" name="with_content" value="{{ actor_handle }}">
|
||||
<button type="submit">mention</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
|
||||
{% macro admin_profile_button(ap_actor_id) %}
|
||||
{% block admin_profile_button scoped %}
|
||||
<form action="{{ url_for("admin_profile") }}" method="GET">
|
||||
<input type="hidden" name="actor_id" value="{{ ap_actor_id }}">
|
||||
<button type="submit">profile</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro admin_expand_button(ap_object) %}
|
||||
{% block admin_expand_button scoped %}
|
||||
{# TODO turn these into a regular link and append permalink ID if it's a reply #}
|
||||
<form action="{{ url_for("admin_object") }}" method="GET">
|
||||
<input type="hidden" name="ap_id" value="{{ ap_object.ap_id }}">
|
||||
<button type="submit">expand</button>
|
||||
</form>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro display_box_filters(route) %}
|
||||
{% block display_box_filters scoped %}
|
||||
<nav class="flexbox box">
|
||||
<ul>
|
||||
<li>Filter by</li>
|
||||
|
@ -179,13 +265,17 @@
|
|||
{% endif %}
|
||||
</ul>
|
||||
</nav>
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro display_tiny_actor_icon(actor) %}
|
||||
<img class="tiny-actor-icon" src="{{ actor.resized_icon_url }}" alt="{{ actor.display_name }}'s avatar">
|
||||
{% block display_tiny_actor_icon scoped %}
|
||||
<img class="tiny-actor-icon" src="{{ actor.resized_icon_url }}" alt="">
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro actor_action(inbox_object, text, with_icon=False) %}
|
||||
{% block actor_action scoped %}
|
||||
<div class="actor-action">
|
||||
<a href="{{ url_for("admin_profile") }}?actor_id={{ inbox_object.actor.ap_id }}">
|
||||
{% if with_icon %}{{ display_tiny_actor_icon(inbox_object.actor) }}{% endif %} {{ inbox_object.actor.display_name | clean_html(inbox_object.actor) | safe }}
|
||||
|
@ -193,9 +283,11 @@
|
|||
<span title="{{ inbox_object.ap_published_at.isoformat() }}">{{ inbox_object.ap_published_at | timeago }}</span>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro display_actor(actor, actors_metadata={}, embedded=False, with_details=False, pending_incoming_follow_notif=None) %}
|
||||
{% block display_actor scoped %}
|
||||
{% set metadata = actors_metadata.get(actor.ap_id) %}
|
||||
|
||||
{% if not embedded %}
|
||||
|
@ -273,6 +365,11 @@
|
|||
<li>rejected</li>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if actor.are_announces_hidden_from_stream %}
|
||||
<li>{{ admin_show_shares_button(actor) }}</li>
|
||||
{% else %}
|
||||
<li>{{ admin_hide_shares_button(actor) }}</li>
|
||||
{% endif %}
|
||||
{% if with_details %}
|
||||
<li><a href="{{ actor.url }}" class="label-btn">remote profile</a></li>
|
||||
{% endif %}
|
||||
|
@ -306,9 +403,11 @@
|
|||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro display_og_meta(object) %}
|
||||
{% block display_og_meta scoped %}
|
||||
{% if object.og_meta %}
|
||||
{% for og_meta in object.og_meta[:1] %}
|
||||
<div class="activity-og-meta">
|
||||
|
@ -326,32 +425,44 @@
|
|||
</div>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro display_attachments(object) %}
|
||||
{% block display_attachments scoped %}
|
||||
|
||||
{% for attachment in object.attachments %}
|
||||
{% if attachment.type != "PropertyValue" %}
|
||||
{% set orientation = "unknown" %}
|
||||
{% if attachment.width %}
|
||||
{% set orientation = "portrait" if attachment.width < attachment.height else "landscape" %}
|
||||
{% endif %}
|
||||
{% if object.sensitive and (attachment.type == "Image" or (attachment | has_media_type("image")) or attachment.type == "Video" or (attachment | has_media_type("video"))) %}
|
||||
<div class="attachment-wrapper">
|
||||
<label for="{{attachment.proxied_url}}" class="label-btn show-hide-sensitive-btn">show/hide sensitive content</label>
|
||||
<div>
|
||||
<div class="sensitive-attachment">
|
||||
<input class="sensitive-attachment-state" type="checkbox" id="{{attachment.proxied_url}}" aria-hidden="true">
|
||||
<div class="sensitive-attachment-box">
|
||||
<div class="sensitive-attachment-box attachment-orientation-{{orientation}}">
|
||||
<div></div>
|
||||
{% else %}
|
||||
<div class="attachment-item">
|
||||
<div class="attachment-item attachment-orientation-{{orientation}}">
|
||||
{% endif %}
|
||||
|
||||
{% if attachment.type == "Image" or (attachment | has_media_type("image")) %}
|
||||
{% if attachment.url not in object.inlined_images %}
|
||||
<img src="{{ attachment.resized_url or attachment.proxied_url }}"{% if attachment.name %} title="{{ attachment.name }}" alt="{{ attachment.name }}"{% endif %} class="attachment">
|
||||
<a class="media-link" href="{{ attachment.proxied_url }}" target="_blank">
|
||||
<img src="{{ attachment.resized_url or attachment.proxied_url }}"{% if attachment.name %} title="{{ attachment.name }}" alt="{{ attachment.name }}"{% endif %} class="attachment u-photo">
|
||||
</a>
|
||||
{% endif %}
|
||||
{% elif attachment.type == "Video" or (attachment | has_media_type("video")) %}
|
||||
<video controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %}></video>
|
||||
<div class="video-wrapper">
|
||||
<video controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %} class="u-video"></video>
|
||||
<div class="video-gif-overlay">GIF</div>
|
||||
</div>
|
||||
{% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %}
|
||||
<audio controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name%} title="{{ attachment.name }}"{% endif %} class="attachment"></audio>
|
||||
<audio controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name%} title="{{ attachment.name }}"{% endif %} class="attachment u-audio"></audio>
|
||||
{% elif attachment.type == "Link" %}
|
||||
<a href="{{ attachment.url }}" class="attachment">{{ attachment.url | truncate(64, True) }}</a> ({{ attachment.mimetype}})
|
||||
{% else %}
|
||||
|
@ -367,13 +478,60 @@
|
|||
{% else %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro display_object(object, likes=[], shares=[], webmentions=[], expanded=False, actors_metadata={}, is_object_page=False) %}
|
||||
{% macro display_webmention_reply(wm_reply) %}
|
||||
{% block display_webmention_reply scoped %}
|
||||
|
||||
<div class="ap-object u-comment h-cite">
|
||||
<div class="actor-box h-card p-author">
|
||||
<div class="icon-box">
|
||||
<img src="{{ wm_reply.face.picture_url }}" alt="{{ wm_reply.face.name }}'s avatar" class="actor-icon u-photo">
|
||||
</div>
|
||||
<a href="{{ wm_reply.face.url }}" class="u-url">
|
||||
<div><strong class="p-name">{{ wm_reply.face.name | clean_html_wm | safe }}</strong></div>
|
||||
<div class="actor-handle">{{ wm_reply.face.url | truncate(64, True) }}</div>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<p class="in-reply-to">in reply to <a href="{{ wm_reply.in_reply_to }}" title="{{ wm_reply.in_reply_to }}" rel="nofollow">
|
||||
this object
|
||||
</a></p>
|
||||
|
||||
<div class="obj-content margin-top-20">
|
||||
<div class="e-content">
|
||||
{{ wm_reply.content | clean_html_wm | safe }}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<nav class="flexbox activity-bar margin-top-20">
|
||||
<ul>
|
||||
<li>
|
||||
<div><a href="{{ wm_reply.url }}" rel="nofollow" class="object-permalink u-url u-uid">permalink</a></div>
|
||||
</li>
|
||||
<li>
|
||||
<time class="dt-published" datetime="{{ wm_reply.published_at.replace(microsecond=0).isoformat() }}" title="{{ wm_reply.published_at.replace(microsecond=0).isoformat() }}">{{ wm_reply.published_at | timeago }}</time>
|
||||
</li>
|
||||
{% if is_admin %}
|
||||
<li>
|
||||
{{ admin_force_delete_webmention_button(wm_reply.webmention_id) }}
|
||||
</li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro display_object(object, likes=[], shares=[], webmentions=[], expanded=False, actors_metadata={}, is_object_page=False, is_h_entry=True) %}
|
||||
{% block display_object scoped %}
|
||||
{% set is_article_mode = object.is_from_outbox and object.ap_type == "Article" and is_object_page %}
|
||||
{% if object.ap_type in ["Note", "Article", "Video", "Page", "Question"] %}
|
||||
<div class="ap-object {% if expanded %}ap-object-expanded {% endif %}h-entry" id="{{ object.permalink_id }}">
|
||||
{% if object.ap_type in ["Note", "Article", "Video", "Page", "Question", "Event"] %}
|
||||
<div class="ap-object {% if expanded %}ap-object-expanded {% endif %}{% if is_h_entry %}h-entry{% endif %}" id="{{ object.permalink_id }}">
|
||||
|
||||
{% if is_article_mode %}
|
||||
<data class="h-card">
|
||||
|
@ -387,25 +545,48 @@
|
|||
|
||||
{% if object.in_reply_to %}
|
||||
<p class="in-reply-to">in reply to <a href="{% if is_admin and object.is_in_reply_to_from_inbox %}{{ url_for("get_lookup") }}?query={% endif %}{{ object.in_reply_to }}" title="{{ object.in_reply_to }}" rel="nofollow">
|
||||
this {{ object.ap_type|lower }}
|
||||
this object
|
||||
</a></p>
|
||||
{% endif %}
|
||||
|
||||
{% if object.ap_type == "Article" %}
|
||||
{% if object.ap_type in ["Article", "Event"] %}
|
||||
<h2 class="p-name no-margin-top">{{ object.name }}</h2>
|
||||
{% endif %}
|
||||
|
||||
{% if object.ap_type == "Event" %}
|
||||
{% if object.ap_object.get("endTime") and object.ap_object.get("startTime") %}
|
||||
<p>On {{ object.ap_object.startTime | parse_datetime | format_date }}
|
||||
(ends {{ object.ap_object.endTime | parse_datetime | format_date }})</p>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% if object.ap_object.get("location") %}
|
||||
{% set loc = object.ap_object.get("location") %}
|
||||
{% if loc.type == "Place" and loc.latitude and loc.longitude %}
|
||||
<div class="ap-place">
|
||||
<h3>Location</h3>
|
||||
{% if loc.name %}{{ loc.name }}{% endif %}
|
||||
<span class="h-geo">
|
||||
<data class="p-latitude" value="{{ loc.latitude}}"></data>
|
||||
<data class="p-longitude" value="{{ loc.longitude }}"></data>
|
||||
<a href="https://www.openstreetmap.org/?mlat={{ loc.latitude }}&mlon={{ loc.longitude }}#map=16/{{loc.latitude}}/{{loc.longitude}}">{{loc.latitude}},{{loc.longitude}}</a>
|
||||
</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% if is_article_mode %}
|
||||
<time class="dt-published muted" datetime="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}" title="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}">{{ object.ap_published_at.strftime("%b %d, %Y") }}</time>
|
||||
{% endif %}
|
||||
|
||||
{% if object.summary %}
|
||||
<div class="show-more-wrapper">
|
||||
<div class="p-summary">
|
||||
<p>{{ object.summary | clean_html(object) | safe }}</p>
|
||||
</div>
|
||||
<label for="show-more-{{ object.permalink_id }}" class="show-more-btn">show/hide more</label>
|
||||
<input class="show-more-state" type="checkbox" aria-hidden="true" id="show-more-{{ object.permalink_id }}" checked>
|
||||
<details class="show-more-wrapper">
|
||||
<summary>
|
||||
<div class="p-summary">
|
||||
<p>{{ object.summary | clean_html(object) | safe }}</p>
|
||||
</div>
|
||||
<span class="show-more-btn" aria-hidden="true"></span>
|
||||
</summary>
|
||||
{% endif %}
|
||||
<div class="obj-content">
|
||||
<div class="e-content">
|
||||
|
@ -466,7 +647,7 @@
|
|||
|
||||
</div>
|
||||
{% if object.summary %}
|
||||
</div>
|
||||
</details>
|
||||
{% endif %}
|
||||
|
||||
<div class="activity-attachment">
|
||||
|
@ -601,6 +782,11 @@
|
|||
{{ admin_expand_button(object) }}
|
||||
</li>
|
||||
{% endif %}
|
||||
{% if object.is_from_inbox and not object.announced_via_outbox_object_ap_id and object.is_local_reply %}
|
||||
<li>
|
||||
{{ admin_force_delete_button(object.ap_id) }}
|
||||
</li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
</nav>
|
||||
{% endif %}
|
||||
|
@ -612,8 +798,8 @@
|
|||
<div class="interactions-block">Likes
|
||||
<div class="facepile-wrapper">
|
||||
{% for like in likes %}
|
||||
<a href="{% if is_admin %}{{ url_for("admin_profile") }}?actor_id={{ like.actor.ap_id }}{% else %}{{ like.actor.url }}{% endif %}" title="{{ like.actor.handle }}" rel="noreferrer">
|
||||
<img src="{{ like.actor.resized_icon_url }}" alt="{{ like.actor.handle}}">
|
||||
<a href="{% if is_admin and like.ap_actor_id %}{{ url_for("admin_profile") }}?actor_id={{ like.ap_actor_id }}{% else %}{{ like.url }}{% endif %}" title="{{ like.name }}" rel="noreferrer">
|
||||
<img src="{{ like.picture_url }}" alt="{{ like.name }}">
|
||||
</a>
|
||||
{% endfor %}
|
||||
{% if object.likes_count > likes | length %}
|
||||
|
@ -629,8 +815,8 @@
|
|||
<div class="interactions-block">Shares
|
||||
<div class="facepile-wrapper">
|
||||
{% for share in shares %}
|
||||
<a href="{% if is_admin %}{{ url_for("admin_profile") }}?actor_id={{ share.actor.ap_id }}{% else %}{{ share.actor.url }}{% endif %}" title="{{ share.actor.handle }}" rel="noreferrer">
|
||||
<img src="{{ share.actor.resized_icon_url }}" alt="{{ share.actor.handle}}">
|
||||
<a href="{% if is_admin and share.ap_actor_id %}{{ url_for("admin_profile") }}?actor_id={{ share.ap_actor_id }}{% else %}{{ share.url }}{% endif %}" title="{{ share.name }}" rel="noreferrer">
|
||||
<img src="{{ share.picture_url }}" alt="{{ share.name }}">
|
||||
</a>
|
||||
{% endfor %}
|
||||
{% if object.announces_count > shares | length %}
|
||||
|
@ -663,4 +849,5 @@
|
|||
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
{% endmacro %}
|
||||
|
|
|
@ -60,7 +60,7 @@ async def save_upload(db_session: AsyncSession, f: UploadFile) -> models.Upload:
|
|||
destination_image.putdata(original_image.getdata())
|
||||
destination_image.save(
|
||||
dest_filename,
|
||||
format=_original_image.format,
|
||||
format=_original_image.format, # type: ignore
|
||||
)
|
||||
|
||||
with open(dest_filename, "rb") as dest_f:
|
||||
|
|
|
@ -23,6 +23,8 @@ def _load_emojis(root_dir: Path, base_url: str) -> None:
|
|||
mt = mimetypes.guess_type(emoji.name)[0]
|
||||
if mt and mt.startswith("image/"):
|
||||
name = emoji.name.split(".")[0]
|
||||
if not re.match(EMOJI_REGEX, f":{name}:"):
|
||||
continue
|
||||
ap_emoji: "RawObject" = {
|
||||
"type": "Emoji",
|
||||
"name": f":{name}:",
|
||||
|
|
172
app/utils/facepile.py
Normal file
172
app/utils/facepile.py
Normal file
|
@ -0,0 +1,172 @@
|
|||
import datetime
|
||||
from dataclasses import dataclass
|
||||
from datetime import timezone
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from app import media
|
||||
from app.models import InboxObject
|
||||
from app.models import Webmention
|
||||
from app.utils.datetime import parse_isoformat
|
||||
from app.utils.url import must_make_abs
|
||||
|
||||
|
||||
@dataclass
|
||||
class Face:
|
||||
ap_actor_id: str | None
|
||||
url: str
|
||||
name: str
|
||||
picture_url: str
|
||||
created_at: datetime.datetime
|
||||
|
||||
@classmethod
|
||||
def from_inbox_object(cls, like: InboxObject) -> "Face":
|
||||
return cls(
|
||||
ap_actor_id=like.actor.ap_id,
|
||||
url=like.actor.url, # type: ignore
|
||||
name=like.actor.handle, # type: ignore
|
||||
picture_url=like.actor.resized_icon_url,
|
||||
created_at=like.created_at, # type: ignore
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_webmention(cls, webmention: Webmention) -> Optional["Face"]:
|
||||
items = webmention.source_microformats.get("items", []) # type: ignore
|
||||
for item in items:
|
||||
if item["type"][0] == "h-card":
|
||||
try:
|
||||
return cls(
|
||||
ap_actor_id=None,
|
||||
url=(
|
||||
must_make_abs(
|
||||
item["properties"]["url"][0], webmention.source
|
||||
)
|
||||
if item["properties"].get("url")
|
||||
else webmention.source
|
||||
),
|
||||
name=item["properties"]["name"][0],
|
||||
picture_url=media.resized_media_url(
|
||||
must_make_abs(
|
||||
item["properties"]["photo"][0], webmention.source
|
||||
), # type: ignore
|
||||
50,
|
||||
),
|
||||
created_at=webmention.created_at, # type: ignore
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Failed to build Face for webmention id={webmention.id}"
|
||||
)
|
||||
break
|
||||
elif item["type"][0] == "h-entry":
|
||||
author = item["properties"]["author"][0]
|
||||
try:
|
||||
return cls(
|
||||
ap_actor_id=None,
|
||||
url=webmention.source,
|
||||
name=author["properties"]["name"][0],
|
||||
picture_url=media.resized_media_url(
|
||||
must_make_abs(
|
||||
author["properties"]["photo"][0], webmention.source
|
||||
), # type: ignore
|
||||
50,
|
||||
),
|
||||
created_at=webmention.created_at, # type: ignore
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Failed to build Face for webmention id={webmention.id}"
|
||||
)
|
||||
break
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def merge_faces(faces: list[Face]) -> list[Face]:
|
||||
return sorted(
|
||||
faces,
|
||||
key=lambda f: f.created_at,
|
||||
reverse=True,
|
||||
)[:10]
|
||||
|
||||
|
||||
def _parse_face(webmention: Webmention, items: list[dict[str, Any]]) -> Face | None:
|
||||
for item in items:
|
||||
if item["type"][0] == "h-card":
|
||||
try:
|
||||
return Face(
|
||||
ap_actor_id=None,
|
||||
url=(
|
||||
must_make_abs(item["properties"]["url"][0], webmention.source)
|
||||
if item["properties"].get("url")
|
||||
else webmention.source
|
||||
),
|
||||
name=item["properties"]["name"][0],
|
||||
picture_url=media.resized_media_url(
|
||||
must_make_abs(
|
||||
item["properties"]["photo"][0], webmention.source
|
||||
), # type: ignore
|
||||
50,
|
||||
),
|
||||
created_at=webmention.created_at, # type: ignore
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Failed to build Face for webmention id={webmention.id}"
|
||||
)
|
||||
break
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@dataclass
|
||||
class WebmentionReply:
|
||||
face: Face
|
||||
content: str
|
||||
url: str
|
||||
published_at: datetime.datetime
|
||||
in_reply_to: str
|
||||
webmention_id: int
|
||||
|
||||
@classmethod
|
||||
def from_webmention(cls, webmention: Webmention) -> Optional["WebmentionReply"]:
|
||||
items = webmention.source_microformats.get("items", []) # type: ignore
|
||||
for item in items:
|
||||
if item["type"][0] == "h-entry":
|
||||
try:
|
||||
face = _parse_face(webmention, item["properties"].get("author", []))
|
||||
if not face:
|
||||
logger.info(
|
||||
"Failed to build WebmentionReply/Face for "
|
||||
f"webmention id={webmention.id}"
|
||||
)
|
||||
break
|
||||
|
||||
if "published" in item["properties"]:
|
||||
published_at = (
|
||||
parse_isoformat(item["properties"]["published"][0])
|
||||
.astimezone(timezone.utc)
|
||||
.replace(tzinfo=None)
|
||||
)
|
||||
else:
|
||||
published_at = webmention.created_at # type: ignore
|
||||
|
||||
return cls(
|
||||
face=face,
|
||||
content=item["properties"]["content"][0]["html"],
|
||||
url=must_make_abs(
|
||||
item["properties"]["url"][0], webmention.source
|
||||
),
|
||||
published_at=published_at,
|
||||
in_reply_to=webmention.target, # type: ignore
|
||||
webmention_id=webmention.id, # type: ignore
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Failed to build Face for webmention id={webmention.id}"
|
||||
)
|
||||
break
|
||||
|
||||
return None
|
|
@ -32,23 +32,22 @@ def highlight(html: str) -> str:
|
|||
|
||||
# If this comes from a microblog.pub instance we may have the language
|
||||
# in the class name
|
||||
if "class" in code.attrs and code.attrs["class"][0].startswith("language-"):
|
||||
if "data-microblogpub-lexer" in code.attrs:
|
||||
try:
|
||||
lexer = get_lexer_by_name(
|
||||
code.attrs["class"][0].removeprefix("language-")
|
||||
)
|
||||
lexer = get_lexer_by_name(code.attrs["data-microblogpub-lexer"])
|
||||
except Exception:
|
||||
lexer = guess_lexer(code_content)
|
||||
else:
|
||||
lexer = guess_lexer(code_content)
|
||||
|
||||
# Replace the code with Pygment output
|
||||
# XXX: the HTML escaping causes issue with Python type annotations
|
||||
code_content = code_content.replace(") -> ", ") -> ")
|
||||
code.parent.replaceWith(
|
||||
BeautifulSoup(
|
||||
phighlight(code_content, lexer, _FORMATTER), "html5lib"
|
||||
).body.next
|
||||
)
|
||||
# Replace the code with Pygment output
|
||||
# XXX: the HTML escaping causes issue with Python type annotations
|
||||
code_content = code_content.replace(") -> ", ") -> ")
|
||||
code.parent.replaceWith(
|
||||
BeautifulSoup(
|
||||
phighlight(code_content, lexer, _FORMATTER), "html5lib"
|
||||
).body.next
|
||||
)
|
||||
else:
|
||||
code.name = "div"
|
||||
code["class"] = code.get("class", []) + ["highlight"]
|
||||
|
||||
return soup.body.encode_contents().decode()
|
||||
|
|
|
@ -10,7 +10,7 @@ from app.utils.url import make_abs
|
|||
class IndieAuthClient:
|
||||
logo: str | None
|
||||
name: str
|
||||
url: str
|
||||
url: str | None
|
||||
|
||||
|
||||
def _get_prop(props: dict[str, Any], name: str, default=None) -> Any:
|
||||
|
|
32
app/utils/mastodon.py
Normal file
32
app/utils/mastodon.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
from pathlib import Path
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from app.webfinger import get_actor_url
|
||||
|
||||
|
||||
def _load_mastodon_following_accounts_csv_file(path: str) -> list[str]:
|
||||
handles = []
|
||||
for line in Path(path).read_text().splitlines()[1:]:
|
||||
handle = line.split(",")[0]
|
||||
handles.append(handle)
|
||||
|
||||
return handles
|
||||
|
||||
|
||||
async def get_actor_urls_from_following_accounts_csv_file(
|
||||
path: str,
|
||||
) -> list[tuple[str, str]]:
|
||||
actor_urls = []
|
||||
for handle in _load_mastodon_following_accounts_csv_file(path):
|
||||
try:
|
||||
actor_url = await get_actor_url(handle)
|
||||
except Exception:
|
||||
logger.error("Failed to fetch actor URL for {handle=}")
|
||||
else:
|
||||
if actor_url:
|
||||
actor_urls.append((handle, actor_url))
|
||||
else:
|
||||
logger.info(f"No actor URL found for {handle=}")
|
||||
|
||||
return actor_urls
|
|
@ -1,12 +1,15 @@
|
|||
import asyncio
|
||||
import mimetypes
|
||||
import re
|
||||
import signal
|
||||
from concurrent.futures import TimeoutError
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
from bs4 import BeautifulSoup # type: ignore
|
||||
from loguru import logger
|
||||
from pebble import concurrent # type: ignore
|
||||
from pydantic import BaseModel
|
||||
|
||||
from app import activitypub as ap
|
||||
|
@ -29,7 +32,11 @@ class OpenGraphMeta(BaseModel):
|
|||
site_name: str
|
||||
|
||||
|
||||
@concurrent.process(timeout=5)
|
||||
def _scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
|
||||
# Prevent SIGTERM to bubble up to the worker
|
||||
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
||||
|
||||
soup = BeautifulSoup(html, "html5lib")
|
||||
ogs = {
|
||||
og.attrs["property"]: og.attrs.get("content")
|
||||
|
@ -55,9 +62,20 @@ def _scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
|
|||
if u := raw.get(maybe_rel):
|
||||
raw[maybe_rel] = make_abs(u, url)
|
||||
|
||||
if not is_url_valid(raw[maybe_rel]):
|
||||
logger.info(f"Invalid url {raw[maybe_rel]}")
|
||||
if maybe_rel == "url":
|
||||
raw["url"] = url
|
||||
elif maybe_rel == "image":
|
||||
raw["image"] = None
|
||||
|
||||
return OpenGraphMeta.parse_obj(raw)
|
||||
|
||||
|
||||
def scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
|
||||
return _scrap_og_meta(url, html).result()
|
||||
|
||||
|
||||
async def external_urls(
|
||||
db_session: AsyncSession,
|
||||
ro: ap_object.RemoteObject | OutboxObject | InboxObject,
|
||||
|
@ -126,7 +144,10 @@ async def _og_meta_from_url(url: str) -> OpenGraphMeta | None:
|
|||
return None
|
||||
|
||||
try:
|
||||
return _scrap_og_meta(url, resp.text)
|
||||
return scrap_og_meta(url, resp.text)
|
||||
except TimeoutError:
|
||||
logger.info(f"Timed out when scraping OG meta for {url}")
|
||||
return None
|
||||
except Exception:
|
||||
logger.info(f"Failed to scrap OG meta for {url}")
|
||||
return None
|
||||
|
|
|
@ -21,6 +21,13 @@ def make_abs(url: str | None, parent: str) -> str | None:
|
|||
)
|
||||
|
||||
|
||||
def must_make_abs(url: str | None, parent: str) -> str:
|
||||
abs_url = make_abs(url, parent)
|
||||
if not abs_url:
|
||||
raise ValueError("missing URL")
|
||||
return abs_url
|
||||
|
||||
|
||||
class InvalidURLError(Exception):
|
||||
pass
|
||||
|
||||
|
@ -54,7 +61,7 @@ def is_url_valid(url: str) -> bool:
|
|||
if not parsed.hostname or parsed.hostname.lower() in ["localhost"]:
|
||||
return False
|
||||
|
||||
if parsed.hostname in BLOCKED_SERVERS:
|
||||
if is_hostname_blocked(parsed.hostname):
|
||||
logger.warning(f"{parsed.hostname} is blocked")
|
||||
return False
|
||||
|
||||
|
@ -81,3 +88,11 @@ def check_url(url: str) -> None:
|
|||
raise InvalidURLError(f'"{url}" is invalid')
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=256)
|
||||
def is_hostname_blocked(hostname: str) -> bool:
|
||||
for blocked_hostname in BLOCKED_SERVERS:
|
||||
if hostname == blocked_hostname or hostname.endswith(f".{blocked_hostname}"):
|
||||
return True
|
||||
return False
|
||||
|
|
|
@ -24,7 +24,7 @@ async def _discover_webmention_endoint(url: str) -> str | None:
|
|||
follow_redirects=True,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
except (httpx.HTTPError, httpx.HTTPStatusError):
|
||||
except Exception:
|
||||
logger.exception(f"Failed to discover webmention endpoint for {url}")
|
||||
return None
|
||||
|
||||
|
|
|
@ -69,5 +69,5 @@ class Worker(Generic[T]):
|
|||
logger.info("stopping loop")
|
||||
|
||||
async def _shutdown(self, sig: signal.Signals) -> None:
|
||||
logger.info(f"Caught {signal=}")
|
||||
logger.info(f"Caught {sig=}")
|
||||
self._stop_event.set()
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import xml.etree.ElementTree as ET
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
@ -8,32 +9,85 @@ from app import config
|
|||
from app.utils.url import check_url
|
||||
|
||||
|
||||
async def get_webfinger_via_host_meta(host: str) -> str | None:
|
||||
resp: httpx.Response | None = None
|
||||
is_404 = False
|
||||
async with httpx.AsyncClient() as client:
|
||||
for i, proto in enumerate({"http", "https"}):
|
||||
try:
|
||||
url = f"{proto}://{host}/.well-known/host-meta"
|
||||
check_url(url)
|
||||
resp = await client.get(
|
||||
url,
|
||||
headers={
|
||||
"User-Agent": config.USER_AGENT,
|
||||
},
|
||||
follow_redirects=True,
|
||||
)
|
||||
resp.raise_for_status()
|
||||
break
|
||||
except httpx.HTTPStatusError as http_error:
|
||||
logger.exception("HTTP error")
|
||||
if http_error.response.status_code in [403, 404, 410]:
|
||||
is_404 = True
|
||||
continue
|
||||
raise
|
||||
except httpx.HTTPError:
|
||||
logger.exception("req failed")
|
||||
# If we tried https first and the domain is "http only"
|
||||
if i == 0:
|
||||
continue
|
||||
break
|
||||
|
||||
if is_404:
|
||||
return None
|
||||
|
||||
if resp:
|
||||
tree = ET.fromstring(resp.text)
|
||||
maybe_link = tree.find(
|
||||
"./{http://docs.oasis-open.org/ns/xri/xrd-1.0}Link[@rel='lrdd']"
|
||||
)
|
||||
if maybe_link is not None:
|
||||
return maybe_link.attrib.get("template")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
async def webfinger(
|
||||
resource: str,
|
||||
webfinger_url: str | None = None,
|
||||
) -> dict[str, Any] | None: # noqa: C901
|
||||
"""Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL."""
|
||||
resource = resource.strip()
|
||||
logger.info(f"performing webfinger resolution for {resource}")
|
||||
protos = ["https", "http"]
|
||||
if resource.startswith("http://"):
|
||||
protos.reverse()
|
||||
host = urlparse(resource).netloc
|
||||
elif resource.startswith("https://"):
|
||||
host = urlparse(resource).netloc
|
||||
urls = []
|
||||
host = None
|
||||
if webfinger_url:
|
||||
urls = [webfinger_url]
|
||||
else:
|
||||
if resource.startswith("acct:"):
|
||||
resource = resource[5:]
|
||||
if resource.startswith("@"):
|
||||
resource = resource[1:]
|
||||
_, host = resource.split("@", 1)
|
||||
resource = "acct:" + resource
|
||||
if resource.startswith("http://"):
|
||||
host = urlparse(resource).netloc
|
||||
url = f"http://{host}/.well-known/webfinger"
|
||||
elif resource.startswith("https://"):
|
||||
host = urlparse(resource).netloc
|
||||
url = f"https://{host}/.well-known/webfinger"
|
||||
else:
|
||||
protos = ["https", "http"]
|
||||
_, host = resource.split("@", 1)
|
||||
urls = [f"{proto}://{host}/.well-known/webfinger" for proto in protos]
|
||||
|
||||
if resource.startswith("acct:"):
|
||||
resource = resource[5:]
|
||||
if resource.startswith("@"):
|
||||
resource = resource[1:]
|
||||
resource = "acct:" + resource
|
||||
|
||||
is_404 = False
|
||||
|
||||
resp: httpx.Response | None = None
|
||||
async with httpx.AsyncClient() as client:
|
||||
for i, proto in enumerate(protos):
|
||||
for i, url in enumerate(urls):
|
||||
try:
|
||||
url = f"{proto}://{host}/.well-known/webfinger"
|
||||
check_url(url)
|
||||
resp = await client.get(
|
||||
url,
|
||||
|
@ -57,7 +111,14 @@ async def webfinger(
|
|||
if i == 0:
|
||||
continue
|
||||
break
|
||||
|
||||
if is_404:
|
||||
if not webfinger_url and host:
|
||||
if webfinger_url := (await get_webfinger_via_host_meta(host)):
|
||||
return await webfinger(
|
||||
resource,
|
||||
webfinger_url=webfinger_url,
|
||||
)
|
||||
return None
|
||||
|
||||
if resp:
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
from bs4 import BeautifulSoup # type: ignore
|
||||
from fastapi import APIRouter
|
||||
|
@ -6,13 +8,21 @@ from fastapi import HTTPException
|
|||
from fastapi import Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from loguru import logger
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import select
|
||||
|
||||
from app import models
|
||||
from app.boxes import _get_outbox_announces_count
|
||||
from app.boxes import _get_outbox_likes_count
|
||||
from app.boxes import _get_outbox_replies_count
|
||||
from app.boxes import get_outbox_object_by_ap_id
|
||||
from app.boxes import get_outbox_object_by_slug_and_short_id
|
||||
from app.boxes import is_notification_enabled
|
||||
from app.database import AsyncSession
|
||||
from app.database import get_db_session
|
||||
from app.utils import microformats
|
||||
from app.utils.facepile import Face
|
||||
from app.utils.facepile import WebmentionReply
|
||||
from app.utils.url import check_url
|
||||
from app.utils.url import is_url_valid
|
||||
|
||||
|
@ -47,6 +57,7 @@ async def webmention_endpoint(
|
|||
|
||||
check_url(source)
|
||||
check_url(target)
|
||||
parsed_target_url = urlparse(target)
|
||||
except Exception:
|
||||
logger.exception("Invalid webmention request")
|
||||
raise HTTPException(status_code=400, detail="Invalid payload")
|
||||
|
@ -65,6 +76,16 @@ async def webmention_endpoint(
|
|||
logger.info("Found existing Webmention, will try to update or delete")
|
||||
|
||||
mentioned_object = await get_outbox_object_by_ap_id(db_session, target)
|
||||
|
||||
if not mentioned_object and parsed_target_url.path.startswith("/articles/"):
|
||||
try:
|
||||
_, _, short_id, slug = parsed_target_url.path.split("/")
|
||||
mentioned_object = await get_outbox_object_by_slug_and_short_id(
|
||||
db_session, slug, short_id
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(f"Failed to match {target}")
|
||||
|
||||
if not mentioned_object:
|
||||
logger.info(f"Invalid target {target=}")
|
||||
|
||||
|
@ -90,15 +111,21 @@ async def webmention_endpoint(
|
|||
logger.warning(f"target {target=} not found in source")
|
||||
if existing_webmention_in_db:
|
||||
logger.info("Deleting existing Webmention")
|
||||
mentioned_object.webmentions_count = mentioned_object.webmentions_count - 1
|
||||
existing_webmention_in_db.is_deleted = True
|
||||
await db_session.flush()
|
||||
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.DELETED_WEBMENTION,
|
||||
outbox_object_id=mentioned_object.id,
|
||||
webmention_id=existing_webmention_in_db.id,
|
||||
# Revert side effects
|
||||
await _handle_webmention_side_effects(
|
||||
db_session, existing_webmention_in_db, mentioned_object
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
if is_notification_enabled(models.NotificationType.DELETED_WEBMENTION):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.DELETED_WEBMENTION,
|
||||
outbox_object_id=mentioned_object.id,
|
||||
webmention_id=existing_webmention_in_db.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
await db_session.commit()
|
||||
|
||||
|
@ -110,36 +137,96 @@ async def webmention_endpoint(
|
|||
else:
|
||||
return JSONResponse(content={}, status_code=200)
|
||||
|
||||
webmention_type = models.WebmentionType.UNKNOWN
|
||||
webmention: models.Webmention
|
||||
if existing_webmention_in_db:
|
||||
# Undelete if needed
|
||||
existing_webmention_in_db.is_deleted = False
|
||||
existing_webmention_in_db.source_microformats = data
|
||||
await db_session.flush()
|
||||
webmention = existing_webmention_in_db
|
||||
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UPDATED_WEBMENTION,
|
||||
outbox_object_id=mentioned_object.id,
|
||||
webmention_id=existing_webmention_in_db.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.UPDATED_WEBMENTION):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.UPDATED_WEBMENTION,
|
||||
outbox_object_id=mentioned_object.id,
|
||||
webmention_id=existing_webmention_in_db.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
else:
|
||||
new_webmention = models.Webmention(
|
||||
source=source,
|
||||
target=target,
|
||||
source_microformats=data,
|
||||
outbox_object_id=mentioned_object.id,
|
||||
webmention_type=webmention_type,
|
||||
)
|
||||
db_session.add(new_webmention)
|
||||
await db_session.flush()
|
||||
webmention = new_webmention
|
||||
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.NEW_WEBMENTION,
|
||||
outbox_object_id=mentioned_object.id,
|
||||
webmention_id=new_webmention.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
if is_notification_enabled(models.NotificationType.NEW_WEBMENTION):
|
||||
notif = models.Notification(
|
||||
notification_type=models.NotificationType.NEW_WEBMENTION,
|
||||
outbox_object_id=mentioned_object.id,
|
||||
webmention_id=new_webmention.id,
|
||||
)
|
||||
db_session.add(notif)
|
||||
|
||||
mentioned_object.webmentions_count = mentioned_object.webmentions_count + 1
|
||||
# Determine the webmention type
|
||||
for item in data.get("items", []):
|
||||
if target in item.get("properties", {}).get(
|
||||
"in-reply-to", []
|
||||
) and WebmentionReply.from_webmention(webmention):
|
||||
webmention_type = models.WebmentionType.REPLY
|
||||
break
|
||||
elif target in item.get("properties", {}).get(
|
||||
"like-of", []
|
||||
) and Face.from_webmention(webmention):
|
||||
webmention_type = models.WebmentionType.LIKE
|
||||
break
|
||||
elif target in item.get("properties", {}).get(
|
||||
"repost-of", []
|
||||
) and Face.from_webmention(webmention):
|
||||
webmention_type = models.WebmentionType.REPOST
|
||||
break
|
||||
|
||||
if webmention_type != models.WebmentionType.UNKNOWN:
|
||||
webmention.webmention_type = webmention_type
|
||||
await db_session.flush()
|
||||
|
||||
# Handle side effect
|
||||
await _handle_webmention_side_effects(db_session, webmention, mentioned_object)
|
||||
await db_session.commit()
|
||||
|
||||
return JSONResponse(content={}, status_code=200)
|
||||
|
||||
|
||||
async def _handle_webmention_side_effects(
|
||||
db_session: AsyncSession,
|
||||
webmention: models.Webmention,
|
||||
mentioned_object: models.OutboxObject,
|
||||
) -> None:
|
||||
if webmention.webmention_type == models.WebmentionType.UNKNOWN:
|
||||
# TODO: recount everything
|
||||
mentioned_object.webmentions_count = await db_session.scalar(
|
||||
select(func.count(models.Webmention.id)).where(
|
||||
models.Webmention.is_deleted.is_(False),
|
||||
models.Webmention.outbox_object_id == mentioned_object.id,
|
||||
models.Webmention.webmention_type == models.WebmentionType.UNKNOWN,
|
||||
)
|
||||
)
|
||||
elif webmention.webmention_type == models.WebmentionType.LIKE:
|
||||
mentioned_object.likes_count = await _get_outbox_likes_count(
|
||||
db_session, mentioned_object
|
||||
)
|
||||
elif webmention.webmention_type == models.WebmentionType.REPOST:
|
||||
mentioned_object.announces_count = await _get_outbox_announces_count(
|
||||
db_session, mentioned_object
|
||||
)
|
||||
elif webmention.webmention_type == models.WebmentionType.REPLY:
|
||||
mentioned_object.replies_count = await _get_outbox_replies_count(
|
||||
db_session, mentioned_object
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unhandled {webmention.webmention_type} webmention")
|
||||
|
|
1
data/templates/app
Symbolic link
1
data/templates/app
Symbolic link
|
@ -0,0 +1 @@
|
|||
../../app/templates/
|
|
@ -5,6 +5,7 @@ admin_password = "$2b$12$OwCyZM33uXQUVrChgER.h.qgFJ4fBp6tdFwArR3Lm1LV8NgMvIxVa"
|
|||
name = "test"
|
||||
summary = "<p>Hello</p>"
|
||||
https = false
|
||||
id = "http://localhost:8000"
|
||||
icon_url = "https://localhost:8000/static/nopic.png"
|
||||
secret = "1dd4079e0474d1a519052b8fe3cb5fa6"
|
||||
debug = true
|
||||
|
|
|
@ -58,3 +58,24 @@ And check out the result by starting a static server using Python standard libra
|
|||
cd docs/dist
|
||||
python -m http.server 8001
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions/patches are welcome, but please start a discussion in a [ticket](https://todo.sr.ht/~tsileo/microblog.pub) or a [thread in the mailing list](https://lists.sr.ht/~tsileo/microblog.pub-devel) before working on anything consequent.
|
||||
|
||||
### Patches
|
||||
|
||||
Please ensure your code passes the code quality checks:
|
||||
|
||||
```bash
|
||||
inv autoformat
|
||||
inv lint
|
||||
```
|
||||
|
||||
And that the tests suite is passing:
|
||||
|
||||
```bash
|
||||
inv tests
|
||||
```
|
||||
|
||||
Please also consider adding new test cases if needed.
|
||||
|
|
|
@ -191,6 +191,72 @@ http {
|
|||
}
|
||||
```
|
||||
|
||||
|
||||
## (Advanced) Running on a subdomain
|
||||
|
||||
It is possible to run microblogpub on a subdomain (`sub.domain.tld`) while being reachable from the root root domain (`domain.tld`) using the `name@domain.tld` handle.
|
||||
|
||||
This requires forwarding/proxying requests from the root domain to the subdomain, for example using NGINX:
|
||||
|
||||
```nginx
|
||||
location /.well-known/webfinger {
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
return 301 https://sub.domain.tld$request_uri;
|
||||
}
|
||||
```
|
||||
|
||||
And updating `data/profile.toml` to specify the root domain as the webfinger domain:
|
||||
|
||||
```toml
|
||||
webfinger_domain = "domain.tld"
|
||||
```
|
||||
|
||||
Once configured correctly, people will be able to follow you using `name@domain.tld`, while using `sub.domain.tld` for the web interface.
|
||||
|
||||
|
||||
## (Advanced) Running from subpath
|
||||
|
||||
It is possible to configure microblogpub to run from subpath.
|
||||
To achieve this, do the following configuration _between_ config and start steps.
|
||||
i.e. _after_ you run `make config` or `poetry run inv configuration-wizard`,
|
||||
but _before_ you run `docker compose up` or `poetry run supervisord`.
|
||||
Changing this settings on an instance which has some posts or was seen by other instances will likely break links to these posts or federation (i.e. links to your instance, posts and profile from other instances).
|
||||
|
||||
The following steps will explain how to configure instance to be available at `https://example.com/subdir`.
|
||||
Change them to your actual domain and subdir.
|
||||
|
||||
* Edit `data/profile.toml` file, add this line:
|
||||
|
||||
id = "https://example.com/subdir"
|
||||
|
||||
* Edit `misc/*-supervisord.conf` file which is relevant to you (it depends on how you start microblogpub - if in doubt, do the same change in all of them) - in `[program:uvicorn]` section, in the line which starts with `command`, add this argument at the very end: ` --root-path /subdir`
|
||||
|
||||
Above two steps are enough to configure microblogpub.
|
||||
Next, you also need to configure reverse proxy.
|
||||
It might slightly differ if you plan to have other services running on the same domain, but for [NGINX config shown above](#reverse-proxy), the following changes are enough:
|
||||
|
||||
* Add subdir to location, so location block starts like this:
|
||||
|
||||
location /subdir {
|
||||
|
||||
* Add `/` at the end of `proxy_pass` directive, like this:
|
||||
|
||||
proxy_pass http://localhost:8000/;
|
||||
|
||||
These two changes will instruct NGINX that requests sent to `https://example.com/subdir/...` should be forwarded to `http://localhost:8000/...`.
|
||||
|
||||
* Inside `server` block, add redirects for well-known URLs (add these lines after `client_max_body_size`, remember to replace `subdir` with your actual subdir!):
|
||||
|
||||
location /.well-known/webfinger { return 301 /subdir$request_uri; }
|
||||
location /.well-known/nodeinfo { return 301 /subdir$request_uri; }
|
||||
location /.well-known/oauth-authorization-server { return 301 /subdir$request_uri; }
|
||||
|
||||
* Optionally, [check robots.txt from a running microblogpub instance](https://microblog.pub/robots.txt) and integrate it into robots.txt file in the root of your server - remember to prepend `subdir` to URLs, so for example `Disallow: /admin` becomes `Disallow: /subdir/admin`.
|
||||
|
||||
## YunoHost edition
|
||||
|
||||
[YunoHost](https://yunohost.org/) support is a work in progress.
|
||||
[YunoHost](https://yunohost.org/) support is available (although it is not an official package for now): <https://git.sr.ht/~tsileo/microblog.pub_ynh>.
|
||||
|
||||
## Available tutorial/guides
|
||||
|
||||
- [Opalstack](https://community.opalstack.com/d/1055-howto-install-and-run-microblogpub-on-opalstack), thanks to [@defulmere@mastodon.social](https://mastodon.online/@defulmere).
|
||||
|
|
|
@ -25,9 +25,10 @@ As these two config items define your ActivityPub handle `@handle@domain`.
|
|||
|
||||
You can tweak your profile by tweaking these items:
|
||||
|
||||
- `name`
|
||||
- `summary` (using Markdown)
|
||||
- `icon_url`
|
||||
- `name`: The name shown with your profile.
|
||||
- `summary`: The summary or 'bio' part of your profile, written in Markdown.
|
||||
- `icon_url`: Your profile image or avatar.
|
||||
- `image_url`: This provides a 'header' or 'banner' image. Note that it is not shown by the default Microblog.pub templates. It will be used by Mastodon (which uses a 3:1 ratio image) and Pleroma. Pixelfed and Peertube, for example, don't show these images by default.
|
||||
|
||||
Whenever one of these config items is updated, an `Update` activity will be sent to all known servers to update your remote profile.
|
||||
|
||||
|
@ -35,6 +36,15 @@ The server will need to be restarted for taking changes into account.
|
|||
|
||||
Before restarting the server, you can ensure you haven't made any mistakes by running the [configuration checking task](/user_guide.html#configuration-checking).
|
||||
|
||||
Note that currently `image_url` is not used anywhere in microblog.pub itself, but other clients/servers do occasionally use it when showing remote profiles as a background image.
|
||||
Also, this image _can_ be used in microblog.pub - just add this:
|
||||
|
||||
```html
|
||||
<img src="{{ local_actor.image_url | media_proxy_url }}">
|
||||
```
|
||||
|
||||
to an appropriate place of your template (most likely, `header.html`).
|
||||
For more information, see a section about [custom templates](/user_guide.html#custom-templates) further in this document.
|
||||
|
||||
### Profile metadata
|
||||
|
||||
|
@ -98,6 +108,39 @@ privacy_replace = [
|
|||
]
|
||||
```
|
||||
|
||||
### Disabling certain notification types
|
||||
|
||||
All notifications are enabled by default.
|
||||
|
||||
You can disabled specific notifications by adding them to the `disabled_notifications` list.
|
||||
|
||||
This example disables likes and shares notifications:
|
||||
|
||||
```
|
||||
disabled_notifications = ["like", "announce"]
|
||||
```
|
||||
|
||||
#### Available notification types
|
||||
|
||||
- `new_follower`
|
||||
- `rejected_follower`
|
||||
- `unfollow`
|
||||
- `follow_request_accepted`
|
||||
- `follow_request_rejected`
|
||||
- `move`
|
||||
- `like`
|
||||
- `undo_like`
|
||||
- `announce`
|
||||
- `undo_announce`
|
||||
- `mention`
|
||||
- `new_webmention`
|
||||
- `updated_webmention`
|
||||
- `deleted_webmention`
|
||||
- `blocked`
|
||||
- `unblocked`
|
||||
- `block`
|
||||
- `unblock`
|
||||
|
||||
### Customization
|
||||
|
||||
#### Default emoji
|
||||
|
@ -113,6 +156,7 @@ You can copy/paste them from [getemoji.com](https://getemoji.com/).
|
|||
#### Custom emoji
|
||||
|
||||
You can add custom emoji in the `data/custom_emoji` directory and they will be picked automatically.
|
||||
Do not use exotic characters in filename - only letters, numbers, and underscore symbol `_` are allowed.
|
||||
|
||||
#### Custom CSS
|
||||
|
||||
|
@ -127,9 +171,48 @@ $secondary-color: #32cd32;
|
|||
|
||||
See `app/scss/main.scss` to see what variables can be overridden.
|
||||
|
||||
You will need to [recompile CSS](#recompiling-css-files) after doing any CSS changes (for actual css files to be updates) and restart microblog.pub (for css link in HTML documents to be updated with a new checksum - otherwise, browsers that downloaded old CSS will keep using it).
|
||||
|
||||
#### Custom favicon
|
||||
|
||||
By default, microblog.pub favicon is a square of `$primary-color` CSS color (see above section on how to redefine CSS colors).
|
||||
You can change it to any icon you like - just save a desired file as `data/favicon.ico`.
|
||||
After that, run the "[recompile CSS](#recompiling-css-files)" task to copy it to `app/static/favicon.ico`.
|
||||
|
||||
#### Custom templates
|
||||
|
||||
If you'd like to customize your instance's theme beyond CSS, you can modify the app's HTML by placing templates in `data/templates` which overwrite the defaults in `app/templates`.
|
||||
|
||||
Templates are written using [Jinja](https://jinja.palletsprojects.com/en/latest/templates/) templating language.
|
||||
Moreover, `utils.html` has scoped blocks around the body of every macro.
|
||||
This allows macros to be overridden individually in `data/templates/utils.html`, without copying the whole file.
|
||||
For example, to only override the display of a specific actor's name/icon, you can create `data/templates/utils.html` file with following content:
|
||||
|
||||
```jinja
|
||||
{% extends "app/utils.html" %}
|
||||
|
||||
{% block display_actor %}
|
||||
{% if actor.ap_id == "https://me.example.com" %}
|
||||
<!-- custom actor display -->
|
||||
{% else %}
|
||||
{{ super() }}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
```
|
||||
|
||||
#### Custom Content Security Policy (CSP)
|
||||
|
||||
You can override the default Content Security Policy by adding a line in `data/profile.toml`:
|
||||
|
||||
```toml
|
||||
custom_content_security_policy = "default-src 'self'; style-src 'self' 'sha256-{HIGHLIGHT_CSS_HASH}'; frame-ancestors 'none'; base-uri 'self'; form-action 'self';"
|
||||
```
|
||||
|
||||
This example will output the default CSP, note that `{HIGHLIGHT_CSS_HASH}` will be dynamically replaced by the correct value (the hash of the CSS needed for syntax highlighting).
|
||||
|
||||
#### Code highlighting theme
|
||||
|
||||
You can switch to one of the [styles supported by Pygments](https://pygments.org/styles/) by adding a line in `profile.toml`:
|
||||
You can switch to one of the [styles supported by Pygments](https://pygments.org/styles/) by adding a line in `data/profile.toml`:
|
||||
|
||||
```toml
|
||||
code_highlighting_theme = "solarized-dark"
|
||||
|
@ -272,7 +355,7 @@ First you need to grab the "ActivityPub actor URL" for your existing account:
|
|||
|
||||
```bash
|
||||
# For a Python install
|
||||
poetry run inv webfinger username@domain.tld
|
||||
poetry run inv webfinger username@instance-you-want-to-move-from.tld
|
||||
```
|
||||
|
||||
Edit the config.
|
||||
|
@ -281,7 +364,7 @@ Edit the config.
|
|||
|
||||
```bash
|
||||
# For a Docker install
|
||||
make account=username@domain.tld webfinger
|
||||
make account=username@instance-you-want-to-move-from.tld webfinger
|
||||
```
|
||||
|
||||
Edit the config.
|
||||
|
@ -291,11 +374,35 @@ Edit the config.
|
|||
And add a reference to your old/existing account in `profile.toml`:
|
||||
|
||||
```toml
|
||||
also_known_as = "my@old-account.com"
|
||||
also_known_as = "https://instance-you-want-to-move-form.tld/users/username"
|
||||
```
|
||||
|
||||
Restart the server, and you should be able to complete the move from your existing account.
|
||||
|
||||
Note that if you already have a redirect in place on Mastodon, you may have to remove it before initiating the migration.
|
||||
|
||||
## Import follows from Mastodon
|
||||
|
||||
You can import the list of follows/following accounts from Mastodon.
|
||||
|
||||
It requires downloading the "Follows" CSV file from your Mastodon instance via "Settings" / "Import and export" / "Data export".
|
||||
|
||||
Then you need to run the import task:
|
||||
|
||||
### Python edition
|
||||
|
||||
```bash
|
||||
# For a Python install
|
||||
poetry run inv import-mastodon-following-accounts following_accounts.csv
|
||||
```
|
||||
|
||||
### Docker edition
|
||||
|
||||
```bash
|
||||
# For a Docker install
|
||||
make path=following_accounts.csv import-mastodon-following-accounts
|
||||
```
|
||||
|
||||
## Tasks
|
||||
|
||||
### Configuration checking
|
||||
|
@ -451,6 +558,7 @@ make self-destruct
|
|||
|
||||
If the server is not (re)starting, you can:
|
||||
|
||||
- [Ensure that the configuration is valid](/user_guide.html#configuration-checking)
|
||||
- [Verify if you haven't any syntax error in the custom theme by recompiling the CSS](/user_guide.html#recompiling-css-files)
|
||||
- Look at the log files
|
||||
- [Ensure that the configuration is valid](/user_guide.html#configuration-checking).
|
||||
- [Verify if you haven't any syntax error in the custom theme by recompiling the CSS](/user_guide.html#recompiling-css-files).
|
||||
- Look at the log files (in `data/uvicorn.log`, `data/incoming.log` and `data/outgoing.log`).
|
||||
- If the CSS is not working, ensure your reverse proxy is serving the static file correctly.
|
||||
|
|
3595
poetry.lock
generated
3595
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -14,7 +14,7 @@ bcrypt = "^3.2.2"
|
|||
itsdangerous = "^2.1.2"
|
||||
python-multipart = "^0.0.5"
|
||||
tomli = "^2.0.1"
|
||||
httpx = {extras = ["http2"], version = "^0.23.0"}
|
||||
httpx = {version = "0.23.0", extras = ["http2"]}
|
||||
SQLAlchemy = {extras = ["asyncio"], version = "^1.4.39"}
|
||||
alembic = "^1.8.0"
|
||||
bleach = "^5.0.0"
|
||||
|
@ -44,6 +44,7 @@ uvicorn = {extras = ["standard"], version = "^0.18.3"}
|
|||
Brotli = "^1.0.9"
|
||||
greenlet = "^1.1.3"
|
||||
mistletoe = "^0.9.0"
|
||||
Pebble = "^5.0.2"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
black = "^22.3.0"
|
||||
|
|
|
@ -75,9 +75,10 @@ def main() -> None:
|
|||
proto = "http"
|
||||
|
||||
print("Note that you can put your icon/avatar in the static/ directory")
|
||||
dat["icon_url"] = prompt(
|
||||
if icon_url := prompt(
|
||||
"icon URL: ", default=f'{proto}://{dat["domain"]}/static/nopic.png'
|
||||
)
|
||||
):
|
||||
dat["icon_url"] = icon_url
|
||||
dat["secret"] = os.urandom(16).hex()
|
||||
|
||||
with config_file.open("w") as f:
|
||||
|
|
69
tasks.py
69
tasks.py
|
@ -2,17 +2,49 @@ import asyncio
|
|||
import io
|
||||
import shutil
|
||||
import tarfile
|
||||
from collections import namedtuple
|
||||
from contextlib import contextmanager
|
||||
from inspect import getfullargspec
|
||||
from pathlib import Path
|
||||
from typing import Generator
|
||||
from typing import Optional
|
||||
from unittest.mock import patch
|
||||
|
||||
import httpx
|
||||
import invoke # type: ignore
|
||||
from invoke import Context # type: ignore
|
||||
from invoke import run # type: ignore
|
||||
from invoke import task # type: ignore
|
||||
|
||||
|
||||
def fix_annotations():
|
||||
"""
|
||||
Pyinvoke doesn't accept annotations by default, this fix that
|
||||
Based on: @zelo's fix in https://github.com/pyinvoke/invoke/pull/606
|
||||
Context in: https://github.com/pyinvoke/invoke/issues/357
|
||||
Python 3.11 https://github.com/pyinvoke/invoke/issues/833
|
||||
"""
|
||||
|
||||
ArgSpec = namedtuple("ArgSpec", ["args", "defaults"])
|
||||
|
||||
def patched_inspect_getargspec(func):
|
||||
spec = getfullargspec(func)
|
||||
return ArgSpec(spec.args, spec.defaults)
|
||||
|
||||
org_task_argspec = invoke.tasks.Task.argspec
|
||||
|
||||
def patched_task_argspec(*args, **kwargs):
|
||||
with patch(
|
||||
target="inspect.getargspec", new=patched_inspect_getargspec, create=True
|
||||
):
|
||||
return org_task_argspec(*args, **kwargs)
|
||||
|
||||
invoke.tasks.Task.argspec = patched_task_argspec
|
||||
|
||||
|
||||
fix_annotations()
|
||||
|
||||
|
||||
@task
|
||||
def generate_db_migration(ctx, message):
|
||||
# type: (Context, str) -> None
|
||||
|
@ -353,3 +385,40 @@ def check_config(ctx):
|
|||
sys.exit(1)
|
||||
else:
|
||||
print("Config is OK")
|
||||
|
||||
|
||||
@task
|
||||
def import_mastodon_following_accounts(ctx, path):
|
||||
# type: (Context, str) -> None
|
||||
from loguru import logger
|
||||
|
||||
from app.boxes import _get_following
|
||||
from app.boxes import _send_follow
|
||||
from app.database import async_session
|
||||
from app.utils.mastodon import get_actor_urls_from_following_accounts_csv_file
|
||||
|
||||
async def _import_following() -> int:
|
||||
count = 0
|
||||
async with async_session() as db_session:
|
||||
followings = {
|
||||
following.ap_actor_id for following in await _get_following(db_session)
|
||||
}
|
||||
for (
|
||||
handle,
|
||||
actor_url,
|
||||
) in await get_actor_urls_from_following_accounts_csv_file(path):
|
||||
if actor_url in followings:
|
||||
logger.info(f"Already following {handle}")
|
||||
continue
|
||||
|
||||
logger.info(f"Importing {actor_url=}")
|
||||
|
||||
await _send_follow(db_session, actor_url)
|
||||
count += 1
|
||||
|
||||
await db_session.commit()
|
||||
|
||||
return count
|
||||
|
||||
count = asyncio.run(_import_following())
|
||||
logger.info(f"Import done, {count} follow requests sent")
|
||||
|
|
|
@ -20,12 +20,16 @@ async def test_fetch_actor(async_db_session: AsyncSession, respx_mock) -> None:
|
|||
public_key="pk",
|
||||
)
|
||||
respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor))
|
||||
respx_mock.get(
|
||||
"https://example.com/.well-known/webfinger",
|
||||
params={"resource": "acct%3Atoto%40example.com"},
|
||||
).mock(return_value=httpx.Response(200, json={"subject": "acct:toto@example.com"}))
|
||||
|
||||
# When fetching this actor for the first time
|
||||
saved_actor = await fetch_actor(async_db_session, ra.ap_id)
|
||||
|
||||
# Then it has been fetched and saved in DB
|
||||
assert respx.calls.call_count == 1
|
||||
assert respx.calls.call_count == 2
|
||||
assert (
|
||||
await async_db_session.execute(select(models.Actor))
|
||||
).scalar_one().ap_id == saved_actor.ap_id
|
||||
|
@ -38,7 +42,7 @@ async def test_fetch_actor(async_db_session: AsyncSession, respx_mock) -> None:
|
|||
assert (
|
||||
await async_db_session.execute(select(func.count(models.Actor.id)))
|
||||
).scalar_one() == 1
|
||||
assert respx.calls.call_count == 1
|
||||
assert respx.calls.call_count == 2
|
||||
|
||||
|
||||
def test_sqlalchemy_factory(db: Session) -> None:
|
||||
|
|
19
tests/test_utils.py
Normal file
19
tests/test_utils.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from app.utils.url import is_hostname_blocked
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"hostname,should_be_blocked",
|
||||
[
|
||||
("example.com", True),
|
||||
("subdomain.example.com", True),
|
||||
("example.xyz", False),
|
||||
],
|
||||
)
|
||||
def test_is_hostname_blocked(hostname: str, should_be_blocked: bool) -> None:
|
||||
with mock.patch("app.utils.url.BLOCKED_SERVERS", ["example.com"]):
|
||||
is_hostname_blocked.cache_clear()
|
||||
assert is_hostname_blocked(hostname) is should_be_blocked
|
Loading…
Reference in a new issue