Compare commits

..

No commits in common. "v2" and "test-css-tweak" have entirely different histories.

64 changed files with 2379 additions and 4566 deletions

11
AUTHORS
View file

@ -1,11 +0,0 @@
Thomas Sileo <t@a4.io>
Kevin Wallace <doof@doof.net>
Miguel Jacq <mig@mig5.net>
Alexey Shpakovsky <alexey@shpakovsky.ru>
Josh Washburne <josh@jodh.us>
João Costa <jdpc557@gmail.com>
Sam <samr1.dev@pm.me>
Ash McAllan <acegiak@gmail.com>
Cassio Zen <cassio@hey.com>
Cocoa <momijizukamori@gmail.com>
Jane <jane@janeirl.dev>

View file

@ -1,4 +1,4 @@
FROM python:3.11-slim as python-base FROM python:3.10-slim as python-base
ENV PYTHONUNBUFFERED=1 \ ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \ PYTHONDONTWRITEBYTECODE=1 \
POETRY_HOME="/opt/poetry" \ POETRY_HOME="/opt/poetry" \

View file

@ -28,7 +28,7 @@ move-to:
.PHONY: self-destruct .PHONY: self-destruct
self-destruct: self-destruct:
-docker run --rm --it --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv self-destruct -docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv self-destruct
.PHONY: reset-password .PHONY: reset-password
reset-password: reset-password:
@ -41,7 +41,3 @@ check-config:
.PHONY: compile-scss .PHONY: compile-scss
compile-scss: compile-scss:
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv compile-scss -docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv compile-scss
.PHONY: import-mastodon-following-accounts
import-mastodon-following-accounts:
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv import-mastodon-following-accounts $(path)

View file

@ -10,7 +10,6 @@ Instances in the wild:
- [microblog.pub](https://microblog.pub/) (follow to get updated about the project) - [microblog.pub](https://microblog.pub/) (follow to get updated about the project)
- [hexa.ninja](https://hexa.ninja) (theme customization example) - [hexa.ninja](https://hexa.ninja) (theme customization example)
- [testing.microblog.pub](https://testing.microblog.pub/) - [testing.microblog.pub](https://testing.microblog.pub/)
- [Irish Left Archive](https://posts.leftarchive.ie/) (another theme customization example)
There are still some rough edges, but the server is mostly functional. There are still some rough edges, but the server is mostly functional.
@ -59,7 +58,7 @@ All the development takes place on [sourcehut](https://sr.ht/~tsileo/microblog.p
- [Issue tracker](https://todo.sr.ht/~tsileo/microblog.pub) - [Issue tracker](https://todo.sr.ht/~tsileo/microblog.pub)
- [Mailing list](https://sr.ht/~tsileo/microblog.pub/lists) - [Mailing list](https://sr.ht/~tsileo/microblog.pub/lists)
Contributions are welcomed, check out the [contributing section of the documentation](https://docs.microblog.pub/developer_guide.html#contributing) for more details. Contributions are welcomed, check out the [documentation](https://docs.microblog.pub) for more details.
## License ## License

View file

@ -1,32 +0,0 @@
"""Add Webmention.webmention_type
Revision ID: fadfd359ce78
Revises: b28c0551c236
Create Date: 2022-11-16 19:42:56.925512+00:00
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'fadfd359ce78'
down_revision = 'b28c0551c236'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('webmention', schema=None) as batch_op:
batch_op.add_column(sa.Column('webmention_type', sa.Enum('UNKNOWN', 'LIKE', 'REPLY', 'REPOST', name='webmentiontype'), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('webmention', schema=None) as batch_op:
batch_op.drop_column('webmention_type')
# ### end Alembic commands ###

View file

@ -1,32 +0,0 @@
"""Add option to hide announces from actor
Revision ID: 9b404c47970a
Revises: fadfd359ce78
Create Date: 2022-12-12 19:26:36.912763+00:00
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '9b404c47970a'
down_revision = 'fadfd359ce78'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('actor', schema=None) as batch_op:
batch_op.add_column(sa.Column('are_announces_hidden_from_stream', sa.Boolean(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('actor', schema=None) as batch_op:
batch_op.drop_column('are_announces_hidden_from_stream')
# ### end Alembic commands ###

View file

@ -1,48 +0,0 @@
"""Add OAuth client
Revision ID: 4ab54becec04
Revises: 9b404c47970a
Create Date: 2022-12-16 17:30:54.520477+00:00
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '4ab54becec04'
down_revision = '9b404c47970a'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('oauth_client',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('client_name', sa.String(), nullable=False),
sa.Column('redirect_uris', sa.JSON(), nullable=True),
sa.Column('client_uri', sa.String(), nullable=True),
sa.Column('logo_uri', sa.String(), nullable=True),
sa.Column('scope', sa.String(), nullable=True),
sa.Column('client_id', sa.String(), nullable=False),
sa.Column('client_secret', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('client_secret')
)
with op.batch_alter_table('oauth_client', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_oauth_client_client_id'), ['client_id'], unique=True)
batch_op.create_index(batch_op.f('ix_oauth_client_id'), ['id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('oauth_client', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_oauth_client_id'))
batch_op.drop_index(batch_op.f('ix_oauth_client_client_id'))
op.drop_table('oauth_client')
# ### end Alembic commands ###

View file

@ -1,36 +0,0 @@
"""Add OAuth refresh token support
Revision ID: a209f0333f5a
Revises: 4ab54becec04
Create Date: 2022-12-18 11:26:31.976348+00:00
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'a209f0333f5a'
down_revision = '4ab54becec04'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op:
batch_op.add_column(sa.Column('refresh_token', sa.String(), nullable=True))
batch_op.add_column(sa.Column('was_refreshed', sa.Boolean(), server_default='0', nullable=False))
batch_op.create_index(batch_op.f('ix_indieauth_access_token_refresh_token'), ['refresh_token'], unique=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_indieauth_access_token_refresh_token'))
batch_op.drop_column('was_refreshed')
batch_op.drop_column('refresh_token')
# ### end Alembic commands ###

View file

@ -135,6 +135,11 @@ ME = {
"url": config.ID + "/", # XXX: the path is important for Mastodon compat "url": config.ID + "/", # XXX: the path is important for Mastodon compat
"manuallyApprovesFollowers": config.CONFIG.manually_approves_followers, "manuallyApprovesFollowers": config.CONFIG.manually_approves_followers,
"attachment": _LOCAL_ACTOR_METADATA, "attachment": _LOCAL_ACTOR_METADATA,
"icon": {
"mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0],
"type": "Image",
"url": config.CONFIG.icon_url,
},
"publicKey": { "publicKey": {
"id": f"{config.ID}#main-key", "id": f"{config.ID}#main-key",
"owner": config.ID, "owner": config.ID,
@ -143,26 +148,12 @@ ME = {
"tag": dedup_tags(_LOCAL_ACTOR_TAGS), "tag": dedup_tags(_LOCAL_ACTOR_TAGS),
} }
if config.CONFIG.icon_url:
ME["icon"] = {
"mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0],
"type": "Image",
"url": config.CONFIG.icon_url,
}
if ALSO_KNOWN_AS: if ALSO_KNOWN_AS:
ME["alsoKnownAs"] = [ALSO_KNOWN_AS] ME["alsoKnownAs"] = [ALSO_KNOWN_AS]
if MOVED_TO: if MOVED_TO:
ME["movedTo"] = MOVED_TO ME["movedTo"] = MOVED_TO
if config.CONFIG.image_url:
ME["image"] = {
"mediaType": mimetypes.guess_type(config.CONFIG.image_url)[0],
"type": "Image",
"url": config.CONFIG.image_url,
}
class NotAnObjectError(Exception): class NotAnObjectError(Exception):
def __init__(self, url: str, resp: httpx.Response | None = None) -> None: def __init__(self, url: str, resp: httpx.Response | None = None) -> None:

View file

@ -6,17 +6,12 @@ from functools import cached_property
from typing import Union from typing import Union
from urllib.parse import urlparse from urllib.parse import urlparse
import httpx
from loguru import logger from loguru import logger
from sqlalchemy import select from sqlalchemy import select
from sqlalchemy.orm import joinedload from sqlalchemy.orm import joinedload
from app import activitypub as ap from app import activitypub as ap
from app import media from app import media
from app.config import BASE_URL
from app.config import USER_AGENT
from app.config import USERNAME
from app.config import WEBFINGER_DOMAIN
from app.database import AsyncSession from app.database import AsyncSession
from app.utils.datetime import as_utc from app.utils.datetime import as_utc
from app.utils.datetime import now from app.utils.datetime import now
@ -31,38 +26,7 @@ def _handle(raw_actor: ap.RawObject) -> str:
if not domain.hostname: if not domain.hostname:
raise ValueError(f"Invalid actor ID {ap_id}") raise ValueError(f"Invalid actor ID {ap_id}")
handle = f'@{raw_actor["preferredUsername"]}@{domain.hostname}' # type: ignore return f'@{raw_actor["preferredUsername"]}@{domain.hostname}' # type: ignore
# TODO: cleanup this
# Next, check for custom webfinger domains
resp: httpx.Response | None = None
for url in {
f"https://{domain.hostname}/.well-known/webfinger",
f"http://{domain.hostname}/.well-known/webfinger",
}:
try:
logger.info(f"Webfinger {handle} at {url}")
resp = httpx.get(
url,
params={"resource": f"acct:{handle[1:]}"},
headers={
"User-Agent": USER_AGENT,
},
follow_redirects=True,
)
resp.raise_for_status()
break
except Exception:
logger.exception(f"Failed to webfinger {handle}")
if resp:
try:
json_resp = resp.json()
if json_resp.get("subject", "").startswith("acct:"):
return "@" + json_resp["subject"].removeprefix("acct:")
except Exception:
logger.exception(f"Failed to parse webfinger response for {handle}")
return handle
class Actor: class Actor:
@ -96,7 +60,7 @@ class Actor:
return self.name return self.name
return self.preferred_username return self.preferred_username
@cached_property @property
def handle(self) -> str: def handle(self) -> str:
return _handle(self.ap_actor) return _handle(self.ap_actor)
@ -118,21 +82,11 @@ class Actor:
@property @property
def icon_url(self) -> str | None: def icon_url(self) -> str | None:
if icon := self.ap_actor.get("icon"): return self.ap_actor.get("icon", {}).get("url")
return icon.get("url")
return None
@property @property
def icon_media_type(self) -> str | None: def icon_media_type(self) -> str | None:
if icon := self.ap_actor.get("icon"): return self.ap_actor.get("icon", {}).get("mediaType")
return icon.get("mediaType")
return None
@property
def image_url(self) -> str | None:
if image := self.ap_actor.get("image"):
return image.get("url")
return None
@property @property
def public_key_as_pem(self) -> str: def public_key_as_pem(self) -> str:
@ -147,14 +101,14 @@ class Actor:
if self.icon_url: if self.icon_url:
return media.proxied_media_url(self.icon_url) return media.proxied_media_url(self.icon_url)
else: else:
return BASE_URL + "/static/nopic.png" return "/static/nopic.png"
@property @property
def resized_icon_url(self) -> str: def resized_icon_url(self) -> str:
if self.icon_url: if self.icon_url:
return media.resized_media_url(self.icon_url, 50) return media.resized_media_url(self.icon_url, 50)
else: else:
return BASE_URL + "/static/nopic.png" return "/static/nopic.png"
@property @property
def tags(self) -> list[ap.RawObject]: def tags(self) -> list[ap.RawObject]:
@ -178,18 +132,13 @@ class Actor:
class RemoteActor(Actor): class RemoteActor(Actor):
def __init__(self, ap_actor: ap.RawObject, handle: str | None = None) -> None: def __init__(self, ap_actor: ap.RawObject) -> None:
if (ap_type := ap_actor.get("type")) not in ap.ACTOR_TYPES: if (ap_type := ap_actor.get("type")) not in ap.ACTOR_TYPES:
raise ValueError(f"Unexpected actor type: {ap_type}") raise ValueError(f"Unexpected actor type: {ap_type}")
self._ap_actor = ap_actor self._ap_actor = ap_actor
self._ap_type = ap_type self._ap_type = ap_type
if handle is None:
handle = _handle(ap_actor)
self._handle = handle
@property @property
def ap_actor(self) -> ap.RawObject: def ap_actor(self) -> ap.RawObject:
return self._ap_actor return self._ap_actor
@ -202,12 +151,8 @@ class RemoteActor(Actor):
def is_from_db(self) -> bool: def is_from_db(self) -> bool:
return False return False
@property
def handle(self) -> str:
return self._handle
LOCAL_ACTOR = RemoteActor(ap_actor=ap.ME)
LOCAL_ACTOR = RemoteActor(ap_actor=ap.ME, handle=f"@{USERNAME}@{WEBFINGER_DOMAIN}")
async def save_actor(db_session: AsyncSession, ap_actor: ap.RawObject) -> "ActorModel": async def save_actor(db_session: AsyncSession, ap_actor: ap.RawObject) -> "ActorModel":
@ -269,8 +214,9 @@ async def fetch_actor(
if save_if_not_found: if save_if_not_found:
ap_actor = await ap.fetch(actor_id) ap_actor = await ap.fetch(actor_id)
# Some softwares uses URL when we expect ID or uses a different casing # Some softwares uses URL when we expect ID
# (like Birdsite LIVE) , which mean we may already have it in DB if actor_id == ap_actor.get("url"):
# Which mean we may already have it in DB
existing_actor_by_url = ( existing_actor_by_url = (
await db_session.scalars( await db_session.scalars(
select(models.Actor).where( select(models.Actor).where(
@ -435,9 +381,6 @@ def _actor_hash(actor: Actor) -> bytes:
if actor.icon_url: if actor.icon_url:
h.update(actor.icon_url.encode()) h.update(actor.icon_url.encode())
if actor.image_url:
h.update(actor.image_url.encode())
if actor.attachments: if actor.attachments:
for a in actor.attachments: for a in actor.attachments:
if a.get("type") != "PropertyValue": if a.get("type") != "PropertyValue":

View file

@ -1,5 +1,4 @@
from datetime import datetime from datetime import datetime
from urllib.parse import quote
import httpx import httpx
from fastapi import APIRouter from fastapi import APIRouter
@ -12,7 +11,6 @@ from fastapi.exceptions import HTTPException
from fastapi.responses import RedirectResponse from fastapi.responses import RedirectResponse
from loguru import logger from loguru import logger
from sqlalchemy import and_ from sqlalchemy import and_
from sqlalchemy import delete
from sqlalchemy import func from sqlalchemy import func
from sqlalchemy import or_ from sqlalchemy import or_
from sqlalchemy import select from sqlalchemy import select
@ -31,7 +29,6 @@ from app.boxes import send_block
from app.boxes import send_follow from app.boxes import send_follow
from app.boxes import send_unblock from app.boxes import send_unblock
from app.config import EMOJIS from app.config import EMOJIS
from app.config import SESSION_TIMEOUT
from app.config import generate_csrf_token from app.config import generate_csrf_token
from app.config import session_serializer from app.config import session_serializer
from app.config import verify_csrf_token from app.config import verify_csrf_token
@ -60,23 +57,18 @@ async def user_session_or_redirect(
_RedirectToLoginPage = HTTPException( _RedirectToLoginPage = HTTPException(
status_code=302, status_code=302,
headers={ headers={"Location": request.url_for("login") + f"?redirect={redirect_url}"},
"Location": request.url_for("login") + f"?redirect={quote(redirect_url)}"
},
) )
if not session: if not session:
logger.info("No existing admin session")
raise _RedirectToLoginPage raise _RedirectToLoginPage
try: try:
loaded_session = session_serializer.loads(session, max_age=SESSION_TIMEOUT) loaded_session = session_serializer.loads(session, max_age=3600 * 12)
except Exception: except Exception:
logger.exception("Failed to validate admin session")
raise _RedirectToLoginPage raise _RedirectToLoginPage
if not loaded_session.get("is_logged_in"): if not loaded_session.get("is_logged_in"):
logger.info(f"Admin session invalidated: {loaded_session}")
raise _RedirectToLoginPage raise _RedirectToLoginPage
return None return None
@ -189,11 +181,8 @@ async def admin_new(
content += f"{in_reply_to_object.actor.handle} " content += f"{in_reply_to_object.actor.handle} "
for tag in in_reply_to_object.tags: for tag in in_reply_to_object.tags:
if tag.get("type") == "Mention" and tag["name"] != LOCAL_ACTOR.handle: if tag.get("type") == "Mention" and tag["name"] != LOCAL_ACTOR.handle:
try:
mentioned_actor = await fetch_actor(db_session, tag["href"]) mentioned_actor = await fetch_actor(db_session, tag["href"])
content += f"{mentioned_actor.handle} " content += f"{mentioned_actor.handle} "
except Exception:
logger.exception(f"Failed to lookup {mentioned_actor}")
# Copy the content warning if any # Copy the content warning if any
if in_reply_to_object.summary: if in_reply_to_object.summary:
@ -450,7 +439,6 @@ async def admin_direct_messages(
models.InboxObject.ap_context.is_not(None), models.InboxObject.ap_context.is_not(None),
# Skip transient object like poll relies # Skip transient object like poll relies
models.InboxObject.is_transient.is_(False), models.InboxObject.is_transient.is_(False),
models.InboxObject.is_deleted.is_(False),
) )
.group_by(models.InboxObject.ap_context, models.InboxObject.actor_id) .group_by(models.InboxObject.ap_context, models.InboxObject.actor_id)
) )
@ -473,7 +461,6 @@ async def admin_direct_messages(
models.OutboxObject.ap_context.is_not(None), models.OutboxObject.ap_context.is_not(None),
# Skip transient object like poll relies # Skip transient object like poll relies
models.OutboxObject.is_transient.is_(False), models.OutboxObject.is_transient.is_(False),
models.OutboxObject.is_deleted.is_(False),
) )
.group_by(models.OutboxObject.ap_context) .group_by(models.OutboxObject.ap_context)
) )
@ -729,9 +716,13 @@ async def get_notifications(
actors_metadata = await get_actors_metadata( actors_metadata = await get_actors_metadata(
db_session, [notif.actor for notif in notifications if notif.actor] db_session, [notif.actor for notif in notifications if notif.actor]
) )
for notif in notifications:
notif.is_new = False
await db_session.commit()
more_unread_count = 0 more_unread_count = 0
next_cursor = None next_cursor = None
if notifications and remaining_count > page_size: if notifications and remaining_count > page_size:
decoded_next_cursor = notifications[-1].created_at decoded_next_cursor = notifications[-1].created_at
next_cursor = pagination.encode_cursor(decoded_next_cursor) next_cursor = pagination.encode_cursor(decoded_next_cursor)
@ -745,8 +736,7 @@ async def get_notifications(
) )
) )
# Render the template before we change the new flag on notifications return await templates.render_template(
tpl_resp = await templates.render_template(
db_session, db_session,
request, request,
"notifications.html", "notifications.html",
@ -758,13 +748,6 @@ async def get_notifications(
}, },
) )
if len({notif.id for notif in notifications if notif.is_new}):
for notif in notifications:
notif.is_new = False
await db_session.commit()
return tpl_resp
@router.get("/object") @router.get("/object")
async def admin_object( async def admin_object(
@ -867,66 +850,6 @@ async def admin_profile(
) )
@router.post("/actions/force_delete")
async def admin_actions_force_delete(
request: Request,
ap_object_id: str = Form(),
redirect_url: str = Form(),
csrf_check: None = Depends(verify_csrf_token),
db_session: AsyncSession = Depends(get_db_session),
) -> RedirectResponse:
ap_object_to_delete = await get_inbox_object_by_ap_id(db_session, ap_object_id)
if not ap_object_to_delete:
raise ValueError(f"Cannot find {ap_object_id}")
logger.info(f"Deleting {ap_object_to_delete.ap_type}/{ap_object_to_delete.ap_id}")
await boxes._revert_side_effect_for_deleted_object(
db_session,
None,
ap_object_to_delete,
None,
)
ap_object_to_delete.is_deleted = True
await db_session.commit()
return RedirectResponse(redirect_url, status_code=302)
@router.post("/actions/force_delete_webmention")
async def admin_actions_force_delete_webmention(
request: Request,
webmention_id: int = Form(),
redirect_url: str = Form(),
csrf_check: None = Depends(verify_csrf_token),
db_session: AsyncSession = Depends(get_db_session),
) -> RedirectResponse:
webmention = await boxes.get_webmention_by_id(db_session, webmention_id)
if not webmention:
raise ValueError(f"Cannot find {webmention_id}")
if not webmention.outbox_object:
raise ValueError(f"Missing related outbox object for {webmention_id}")
# TODO: move this
logger.info(f"Deleting {webmention_id}")
webmention.is_deleted = True
await db_session.flush()
from app.webmentions import _handle_webmention_side_effects
await _handle_webmention_side_effects(
db_session, webmention, webmention.outbox_object
)
# Delete related notifications
notif_deletion_result = await db_session.execute(
delete(models.Notification)
.where(models.Notification.webmention_id == webmention.id)
.execution_options(synchronize_session=False)
)
logger.info(
f"Deleted {notif_deletion_result.rowcount} notifications" # type: ignore
)
await db_session.commit()
return RedirectResponse(redirect_url, status_code=302)
@router.post("/actions/follow") @router.post("/actions/follow")
async def admin_actions_follow( async def admin_actions_follow(
request: Request, request: Request,
@ -965,34 +888,6 @@ async def admin_actions_unblock(
return RedirectResponse(redirect_url, status_code=302) return RedirectResponse(redirect_url, status_code=302)
@router.post("/actions/hide_announces")
async def admin_actions_hide_announces(
request: Request,
ap_actor_id: str = Form(),
redirect_url: str = Form(),
csrf_check: None = Depends(verify_csrf_token),
db_session: AsyncSession = Depends(get_db_session),
) -> RedirectResponse:
actor = await fetch_actor(db_session, ap_actor_id)
actor.are_announces_hidden_from_stream = True
await db_session.commit()
return RedirectResponse(redirect_url, status_code=302)
@router.post("/actions/show_announces")
async def admin_actions_show_announces(
request: Request,
ap_actor_id: str = Form(),
redirect_url: str = Form(),
csrf_check: None = Depends(verify_csrf_token),
db_session: AsyncSession = Depends(get_db_session),
) -> RedirectResponse:
actor = await fetch_actor(db_session, ap_actor_id)
actor.are_announces_hidden_from_stream = False
await db_session.commit()
return RedirectResponse(redirect_url, status_code=302)
@router.post("/actions/delete") @router.post("/actions/delete")
async def admin_actions_delete( async def admin_actions_delete(
request: Request, request: Request,
@ -1185,7 +1080,7 @@ async def admin_actions_new(
elif name: elif name:
ap_type = "Article" ap_type = "Article"
public_id, _ = await boxes.send_create( public_id = await boxes.send_create(
db_session, db_session,
ap_type=ap_type, ap_type=ap_type,
source=content, source=content,

View file

@ -12,7 +12,6 @@ from app import activitypub as ap
from app.actor import LOCAL_ACTOR from app.actor import LOCAL_ACTOR
from app.actor import Actor from app.actor import Actor
from app.actor import RemoteActor from app.actor import RemoteActor
from app.config import ID
from app.media import proxied_media_url from app.media import proxied_media_url
from app.utils.datetime import now from app.utils.datetime import now
from app.utils.datetime import parse_isoformat from app.utils.datetime import parse_isoformat
@ -213,15 +212,6 @@ class Object:
def in_reply_to(self) -> str | None: def in_reply_to(self) -> str | None:
return self.ap_object.get("inReplyTo") return self.ap_object.get("inReplyTo")
@property
def is_local_reply(self) -> bool:
if not self.in_reply_to:
return False
return bool(
self.in_reply_to.startswith(ID) and self.content # Hide votes from Question
)
@property @property
def is_in_reply_to_from_inbox(self) -> bool | None: def is_in_reply_to_from_inbox(self) -> bool | None:
if not self.in_reply_to: if not self.in_reply_to:
@ -290,9 +280,6 @@ class Attachment(BaseModel):
proxied_url: str | None = None proxied_url: str | None = None
resized_url: str | None = None resized_url: str | None = None
width: int | None = None
height: int | None = None
@property @property
def mimetype(self) -> str: def mimetype(self) -> str:
mimetype = self.media_type mimetype = self.media_type

View file

@ -1,5 +1,4 @@
"""Actions related to the AP inbox/outbox.""" """Actions related to the AP inbox/outbox."""
import datetime
import uuid import uuid
from collections import defaultdict from collections import defaultdict
from dataclasses import dataclass from dataclasses import dataclass
@ -28,11 +27,10 @@ from app.actor import save_actor
from app.actor import update_actor_if_needed from app.actor import update_actor_if_needed
from app.ap_object import RemoteObject from app.ap_object import RemoteObject
from app.config import BASE_URL from app.config import BASE_URL
from app.config import BLOCKED_SERVERS
from app.config import ID from app.config import ID
from app.config import MANUALLY_APPROVES_FOLLOWERS from app.config import MANUALLY_APPROVES_FOLLOWERS
from app.config import set_moved_to from app.config import set_moved_to
from app.config import stream_visibility_callback
from app.customization import ObjectInfo
from app.database import AsyncSession from app.database import AsyncSession
from app.outgoing_activities import new_outgoing_activity from app.outgoing_activities import new_outgoing_activity
from app.source import dedup_tags from app.source import dedup_tags
@ -43,24 +41,11 @@ from app.utils import webmentions
from app.utils.datetime import as_utc from app.utils.datetime import as_utc
from app.utils.datetime import now from app.utils.datetime import now
from app.utils.datetime import parse_isoformat from app.utils.datetime import parse_isoformat
from app.utils.facepile import WebmentionReply
from app.utils.text import slugify from app.utils.text import slugify
from app.utils.url import is_hostname_blocked
AnyboxObject = models.InboxObject | models.OutboxObject AnyboxObject = models.InboxObject | models.OutboxObject
def is_notification_enabled(notification_type: models.NotificationType) -> bool:
"""Checks if a given notification type is enabled."""
if notification_type.value == "pending_incoming_follower":
# This one cannot be disabled as it would prevent manually reviewing
# follow requests.
return True
if notification_type.value in config.CONFIG.disabled_notifications:
return False
return True
def allocate_outbox_id() -> str: def allocate_outbox_id() -> str:
return uuid.uuid4().hex return uuid.uuid4().hex
@ -179,7 +164,6 @@ async def send_block(db_session: AsyncSession, ap_actor_id: str) -> None:
await new_outgoing_activity(db_session, actor.inbox_url, outbox_object.id) await new_outgoing_activity(db_session, actor.inbox_url, outbox_object.id)
# 4. Create a notification # 4. Create a notification
if is_notification_enabled(models.NotificationType.BLOCK):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.BLOCK, notification_type=models.NotificationType.BLOCK,
actor_id=actor.id, actor_id=actor.id,
@ -217,7 +201,7 @@ async def send_delete(db_session: AsyncSession, ap_object_id: str) -> None:
raise ValueError("Should never happen") raise ValueError("Should never happen")
outbox_object_to_delete.is_deleted = True outbox_object_to_delete.is_deleted = True
await db_session.flush() await db_session.commit()
# Compute the original recipients # Compute the original recipients
recipients = await _compute_recipients( recipients = await _compute_recipients(
@ -232,17 +216,14 @@ async def send_delete(db_session: AsyncSession, ap_object_id: str) -> None:
db_session, outbox_object_to_delete.in_reply_to db_session, outbox_object_to_delete.in_reply_to
) )
if replied_object: if replied_object:
if replied_object.is_from_outbox:
# Different helper here because we also count webmentions
new_replies_count = await _get_outbox_replies_count(
db_session, replied_object # type: ignore
)
else:
new_replies_count = await _get_replies_count( new_replies_count = await _get_replies_count(
db_session, replied_object.ap_id db_session, replied_object.ap_id
) )
replied_object.replies_count = new_replies_count replied_object.replies_count = new_replies_count
if replied_object.replies_count < 0:
logger.warning("negative replies count for {replied_object.ap_id}")
replied_object.replies_count = 0
else: else:
logger.info(f"{outbox_object_to_delete.in_reply_to} not found") logger.info(f"{outbox_object_to_delete.in_reply_to} not found")
@ -439,9 +420,7 @@ async def _send_undo(db_session: AsyncSession, ap_object_id: str) -> None:
announced_object.announced_via_outbox_object_ap_id = None announced_object.announced_via_outbox_object_ap_id = None
# Send the Undo to the original recipients # Send the Undo to the original recipients
recipients = await _compute_recipients( recipients = await _compute_recipients(db_session, outbox_object.ap_object)
db_session, outbox_object_to_undo.ap_object
)
for rcp in recipients: for rcp in recipients:
await new_outgoing_activity(db_session, rcp, outbox_object.id) await new_outgoing_activity(db_session, rcp, outbox_object.id)
elif outbox_object_to_undo.ap_type == "Block": elif outbox_object_to_undo.ap_type == "Block":
@ -461,7 +440,6 @@ async def _send_undo(db_session: AsyncSession, ap_object_id: str) -> None:
outbox_object.id, outbox_object.id,
) )
if is_notification_enabled(models.NotificationType.UNBLOCK):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.UNBLOCK, notification_type=models.NotificationType.UNBLOCK,
actor_id=blocked_actor.id, actor_id=blocked_actor.id,
@ -592,7 +570,7 @@ async def send_create(
poll_answers: list[str] | None = None, poll_answers: list[str] | None = None,
poll_duration_in_minutes: int | None = None, poll_duration_in_minutes: int | None = None,
name: str | None = None, name: str | None = None,
) -> tuple[str, models.OutboxObject]: ) -> str:
note_id = allocate_outbox_id() note_id = allocate_outbox_id()
published = now().replace(microsecond=0).isoformat().replace("+00:00", "Z") published = now().replace(microsecond=0).isoformat().replace("+00:00", "Z")
context = f"{ID}/contexts/" + uuid.uuid4().hex context = f"{ID}/contexts/" + uuid.uuid4().hex
@ -767,7 +745,7 @@ async def send_create(
await db_session.commit() await db_session.commit()
return note_id, outbox_object return note_id
async def send_vote( async def send_vote(
@ -950,7 +928,7 @@ async def compute_all_known_recipients(db_session: AsyncSession) -> set[str]:
} }
async def _get_following(db_session: AsyncSession) -> list[models.Following]: async def _get_following(db_session: AsyncSession) -> list[models.Follower]:
return ( return (
( (
await db_session.scalars( await db_session.scalars(
@ -1070,32 +1048,6 @@ async def get_outbox_object_by_ap_id(
) # type: ignore ) # type: ignore
async def get_outbox_object_by_slug_and_short_id(
db_session: AsyncSession,
slug: str,
short_id: str,
) -> models.OutboxObject | None:
return (
(
await db_session.execute(
select(models.OutboxObject)
.options(
joinedload(models.OutboxObject.outbox_object_attachments).options(
joinedload(models.OutboxObjectAttachment.upload)
)
)
.where(
models.OutboxObject.public_id.like(f"{short_id}%"),
models.OutboxObject.slug == slug,
models.OutboxObject.is_deleted.is_(False),
)
)
)
.unique()
.scalar_one_or_none()
)
async def get_anybox_object_by_ap_id( async def get_anybox_object_by_ap_id(
db_session: AsyncSession, ap_id: str db_session: AsyncSession, ap_id: str
) -> AnyboxObject | None: ) -> AnyboxObject | None:
@ -1105,20 +1057,6 @@ async def get_anybox_object_by_ap_id(
return await get_inbox_object_by_ap_id(db_session, ap_id) return await get_inbox_object_by_ap_id(db_session, ap_id)
async def get_webmention_by_id(
db_session: AsyncSession, webmention_id: int
) -> models.Webmention | None:
return (
await db_session.execute(
select(models.Webmention)
.where(models.Webmention.id == webmention_id)
.options(
joinedload(models.Webmention.outbox_object),
)
)
).scalar_one_or_none() # type: ignore
async def _handle_delete_activity( async def _handle_delete_activity(
db_session: AsyncSession, db_session: AsyncSession,
from_actor: models.Actor, from_actor: models.Actor,
@ -1186,23 +1124,6 @@ async def _handle_delete_activity(
logger.info("Removing actor from follower") logger.info("Removing actor from follower")
await db_session.delete(follower) await db_session.delete(follower)
# Also mark Follow activities for this actor as deleted
follow_activities = (
await db_session.scalars(
select(models.OutboxObject).where(
models.OutboxObject.ap_type == "Follow",
models.OutboxObject.relates_to_actor_id
== ap_object_to_delete.id,
models.OutboxObject.is_deleted.is_(False),
)
)
).all()
for follow_activity in follow_activities:
logger.info(
f"Marking Follow activity {follow_activity.ap_id} as deleted"
)
follow_activity.is_deleted = True
following = ( following = (
await db_session.scalars( await db_session.scalars(
select(models.Following).where( select(models.Following).where(
@ -1263,70 +1184,9 @@ async def _get_replies_count(
) )
async def _get_outbox_replies_count(
db_session: AsyncSession,
outbox_object: models.OutboxObject,
) -> int:
return (await _get_replies_count(db_session, outbox_object.ap_id)) + (
await db_session.scalar(
select(func.count(models.Webmention.id)).where(
models.Webmention.is_deleted.is_(False),
models.Webmention.outbox_object_id == outbox_object.id,
models.Webmention.webmention_type == models.WebmentionType.REPLY,
)
)
)
async def _get_outbox_likes_count(
db_session: AsyncSession,
outbox_object: models.OutboxObject,
) -> int:
return (
await db_session.scalar(
select(func.count(models.InboxObject.id)).where(
models.InboxObject.ap_type == "Like",
models.InboxObject.relates_to_outbox_object_id == outbox_object.id,
models.InboxObject.is_deleted.is_(False),
)
)
) + (
await db_session.scalar(
select(func.count(models.Webmention.id)).where(
models.Webmention.is_deleted.is_(False),
models.Webmention.outbox_object_id == outbox_object.id,
models.Webmention.webmention_type == models.WebmentionType.LIKE,
)
)
)
async def _get_outbox_announces_count(
db_session: AsyncSession,
outbox_object: models.OutboxObject,
) -> int:
return (
await db_session.scalar(
select(func.count(models.InboxObject.id)).where(
models.InboxObject.ap_type == "Announce",
models.InboxObject.relates_to_outbox_object_id == outbox_object.id,
models.InboxObject.is_deleted.is_(False),
)
)
) + (
await db_session.scalar(
select(func.count(models.Webmention.id)).where(
models.Webmention.is_deleted.is_(False),
models.Webmention.outbox_object_id == outbox_object.id,
models.Webmention.webmention_type == models.WebmentionType.REPOST,
)
)
)
async def _revert_side_effect_for_deleted_object( async def _revert_side_effect_for_deleted_object(
db_session: AsyncSession, db_session: AsyncSession,
delete_activity: models.InboxObject | None, delete_activity: models.InboxObject,
deleted_ap_object: models.InboxObject, deleted_ap_object: models.InboxObject,
forwarded_by_actor: models.Actor | None, forwarded_by_actor: models.Actor | None,
) -> None: ) -> None:
@ -1354,8 +1214,8 @@ async def _revert_side_effect_for_deleted_object(
# also needs to be forwarded # also needs to be forwarded
is_delete_needs_to_be_forwarded = True is_delete_needs_to_be_forwarded = True
new_replies_count = await _get_outbox_replies_count( new_replies_count = await _get_replies_count(
db_session, replied_object # type: ignore db_session, replied_object.ap_id
) )
await db_session.execute( await db_session.execute(
@ -1363,7 +1223,7 @@ async def _revert_side_effect_for_deleted_object(
.where( .where(
models.OutboxObject.id == replied_object.id, models.OutboxObject.id == replied_object.id,
) )
.values(replies_count=new_replies_count - 1) .values(replies_count=new_replies_count)
) )
else: else:
new_replies_count = await _get_replies_count( new_replies_count = await _get_replies_count(
@ -1375,7 +1235,7 @@ async def _revert_side_effect_for_deleted_object(
.where( .where(
models.InboxObject.id == replied_object.id, models.InboxObject.id == replied_object.id,
) )
.values(replies_count=new_replies_count - 1) .values(replies_count=new_replies_count)
) )
if deleted_ap_object.ap_type == "Like" and deleted_ap_object.activity_object_ap_id: if deleted_ap_object.ap_type == "Like" and deleted_ap_object.activity_object_ap_id:
@ -1385,16 +1245,15 @@ async def _revert_side_effect_for_deleted_object(
) )
if related_object: if related_object:
if related_object.is_from_outbox: if related_object.is_from_outbox:
likes_count = await _get_outbox_likes_count(db_session, related_object)
await db_session.execute( await db_session.execute(
update(models.OutboxObject) update(models.OutboxObject)
.where( .where(
models.OutboxObject.id == related_object.id, models.OutboxObject.id == related_object.id,
) )
.values(likes_count=likes_count - 1) .values(likes_count=models.OutboxObject.likes_count - 1)
) )
elif ( elif (
deleted_ap_object.ap_type == "Announce" deleted_ap_object.ap_type == "Annouce"
and deleted_ap_object.activity_object_ap_id and deleted_ap_object.activity_object_ap_id
): ):
related_object = await get_outbox_object_by_ap_id( related_object = await get_outbox_object_by_ap_id(
@ -1403,15 +1262,12 @@ async def _revert_side_effect_for_deleted_object(
) )
if related_object: if related_object:
if related_object.is_from_outbox: if related_object.is_from_outbox:
announces_count = await _get_outbox_announces_count(
db_session, related_object
)
await db_session.execute( await db_session.execute(
update(models.OutboxObject) update(models.OutboxObject)
.where( .where(
models.OutboxObject.id == related_object.id, models.OutboxObject.id == related_object.id,
) )
.values(announces_count=announces_count - 1) .values(announces_count=models.OutboxObject.announces_count - 1)
) )
# Delete any Like/Announce # Delete any Like/Announce
@ -1426,8 +1282,7 @@ async def _revert_side_effect_for_deleted_object(
# If it's a local replies, it was forwarded, so we also need to forward # If it's a local replies, it was forwarded, so we also need to forward
# the Delete activity if possible # the Delete activity if possible
if ( if (
delete_activity delete_activity.activity_object_ap_id == deleted_ap_object.ap_id
and delete_activity.activity_object_ap_id == deleted_ap_object.ap_id
and delete_activity.has_ld_signature and delete_activity.has_ld_signature
and is_delete_needs_to_be_forwarded and is_delete_needs_to_be_forwarded
): ):
@ -1540,7 +1395,6 @@ async def _send_accept(
raise ValueError("Should never happen") raise ValueError("Should never happen")
await new_outgoing_activity(db_session, from_actor.inbox_url, outbox_activity.id) await new_outgoing_activity(db_session, from_actor.inbox_url, outbox_activity.id)
if is_notification_enabled(models.NotificationType.NEW_FOLLOWER):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.NEW_FOLLOWER, notification_type=models.NotificationType.NEW_FOLLOWER,
actor_id=from_actor.id, actor_id=from_actor.id,
@ -1584,7 +1438,6 @@ async def _send_reject(
raise ValueError("Should never happen") raise ValueError("Should never happen")
await new_outgoing_activity(db_session, from_actor.inbox_url, outbox_activity.id) await new_outgoing_activity(db_session, from_actor.inbox_url, outbox_activity.id)
if is_notification_enabled(models.NotificationType.REJECTED_FOLLOWER):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.REJECTED_FOLLOWER, notification_type=models.NotificationType.REJECTED_FOLLOWER,
actor_id=from_actor.id, actor_id=from_actor.id,
@ -1615,7 +1468,6 @@ async def _handle_undo_activity(
models.Follower.inbox_object_id == ap_activity_to_undo.id models.Follower.inbox_object_id == ap_activity_to_undo.id
) )
) )
if is_notification_enabled(models.NotificationType.UNFOLLOW):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.UNFOLLOW, notification_type=models.NotificationType.UNFOLLOW,
actor_id=from_actor.id, actor_id=from_actor.id,
@ -1636,14 +1488,7 @@ async def _handle_undo_activity(
) )
return return
liked_obj.likes_count = ( liked_obj.likes_count = models.OutboxObject.likes_count - 1
await _get_outbox_likes_count(
db_session,
liked_obj,
)
- 1
)
if is_notification_enabled(models.NotificationType.UNDO_LIKE):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.UNDO_LIKE, notification_type=models.NotificationType.UNDO_LIKE,
actor_id=from_actor.id, actor_id=from_actor.id,
@ -1668,7 +1513,6 @@ async def _handle_undo_activity(
announced_obj_from_outbox.announces_count = ( announced_obj_from_outbox.announces_count = (
models.OutboxObject.announces_count - 1 models.OutboxObject.announces_count - 1
) )
if is_notification_enabled(models.NotificationType.UNDO_ANNOUNCE):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.UNDO_ANNOUNCE, notification_type=models.NotificationType.UNDO_ANNOUNCE,
actor_id=from_actor.id, actor_id=from_actor.id,
@ -1677,7 +1521,6 @@ async def _handle_undo_activity(
) )
db_session.add(notif) db_session.add(notif)
elif ap_activity_to_undo.ap_type == "Block": elif ap_activity_to_undo.ap_type == "Block":
if is_notification_enabled(models.NotificationType.UNBLOCKED):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.UNBLOCKED, notification_type=models.NotificationType.UNBLOCKED,
actor_id=from_actor.id, actor_id=from_actor.id,
@ -1747,7 +1590,6 @@ async def _handle_move_activity(
else: else:
logger.info(f"Already following target {new_actor_id}") logger.info(f"Already following target {new_actor_id}")
if is_notification_enabled(models.NotificationType.MOVE):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.MOVE, notification_type=models.NotificationType.MOVE,
actor_id=new_actor.id, actor_id=new_actor.id,
@ -1911,26 +1753,16 @@ async def _process_note_object(
is_from_following = ro.actor.ap_id in {f.ap_actor_id for f in following} is_from_following = ro.actor.ap_id in {f.ap_actor_id for f in following}
is_reply = bool(ro.in_reply_to) is_reply = bool(ro.in_reply_to)
is_local_reply = ro.is_local_reply is_local_reply = (
ro.in_reply_to
and ro.in_reply_to.startswith(BASE_URL)
and ro.content # Hide votes from Question
)
is_mention = False is_mention = False
hashtags = []
tags = ro.ap_object.get("tag", []) tags = ro.ap_object.get("tag", [])
for tag in ap.as_list(tags): for tag in ap.as_list(tags):
if tag.get("name") == LOCAL_ACTOR.handle or tag.get("href") == LOCAL_ACTOR.url: if tag.get("name") == LOCAL_ACTOR.handle or tag.get("href") == LOCAL_ACTOR.url:
is_mention = True is_mention = True
if tag.get("type") == "Hashtag":
if tag_name := tag.get("name"):
hashtags.append(tag_name)
object_info = ObjectInfo(
is_reply=is_reply,
is_local_reply=is_local_reply,
is_mention=is_mention,
is_from_following=is_from_following,
hashtags=hashtags,
actor_handle=ro.actor.handle,
remote_object=ro,
)
inbox_object = models.InboxObject( inbox_object = models.InboxObject(
server=urlparse(ro.ap_id).hostname, server=urlparse(ro.ap_id).hostname,
@ -1948,7 +1780,9 @@ async def _process_note_object(
activity_object_ap_id=ro.activity_object_ap_id, activity_object_ap_id=ro.activity_object_ap_id,
og_meta=await opengraph.og_meta_from_note(db_session, ro), og_meta=await opengraph.og_meta_from_note(db_session, ro),
# Hide replies from the stream # Hide replies from the stream
is_hidden_from_stream=not stream_visibility_callback(object_info), is_hidden_from_stream=not (
(not is_reply and is_from_following) or is_mention or is_local_reply
),
# We may already have some replies in DB # We may already have some replies in DB
replies_count=await _get_replies_count(db_session, ro.ap_id), replies_count=await _get_replies_count(db_session, ro.ap_id),
) )
@ -1974,8 +1808,8 @@ async def _process_note_object(
replied_object, # type: ignore # outbox check below replied_object, # type: ignore # outbox check below
) )
else: else:
new_replies_count = await _get_outbox_replies_count( new_replies_count = await _get_replies_count(
db_session, replied_object # type: ignore db_session, replied_object.ap_id
) )
await db_session.execute( await db_session.execute(
@ -2023,7 +1857,7 @@ async def _process_note_object(
inbox_object_id=parent_activity.id, inbox_object_id=parent_activity.id,
) )
if is_mention and is_notification_enabled(models.NotificationType.MENTION): if is_mention:
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.MENTION, notification_type=models.NotificationType.MENTION,
actor_id=from_actor.id, actor_id=from_actor.id,
@ -2122,7 +1956,6 @@ async def _handle_announce_activity(
models.OutboxObject.announces_count + 1 models.OutboxObject.announces_count + 1
) )
if is_notification_enabled(models.NotificationType.ANNOUNCE):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.ANNOUNCE, notification_type=models.NotificationType.ANNOUNCE,
actor_id=actor.id, actor_id=actor.id,
@ -2205,10 +2038,7 @@ async def _handle_announce_activity(
db_session.add(announced_inbox_object) db_session.add(announced_inbox_object)
await db_session.flush() await db_session.flush()
announce_activity.relates_to_inbox_object_id = announced_inbox_object.id announce_activity.relates_to_inbox_object_id = announced_inbox_object.id
announce_activity.is_hidden_from_stream = ( announce_activity.is_hidden_from_stream = not is_from_following
not is_from_following
or announce_activity.actor.are_announces_hidden_from_stream
)
async def _handle_like_activity( async def _handle_like_activity(
@ -2225,12 +2055,8 @@ async def _handle_like_activity(
) )
await db_session.delete(like_activity) await db_session.delete(like_activity)
else: else:
relates_to_outbox_object.likes_count = await _get_outbox_likes_count( relates_to_outbox_object.likes_count = models.OutboxObject.likes_count + 1
db_session,
relates_to_outbox_object,
)
if is_notification_enabled(models.NotificationType.LIKE):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.LIKE, notification_type=models.NotificationType.LIKE,
actor_id=actor.id, actor_id=actor.id,
@ -2254,7 +2080,6 @@ async def _handle_block_activity(
return return
# Create a notification # Create a notification
if is_notification_enabled(models.NotificationType.BLOCKED):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.BLOCKED, notification_type=models.NotificationType.BLOCKED,
actor_id=actor.id, actor_id=actor.id,
@ -2315,7 +2140,7 @@ async def save_to_inbox(
logger.exception("Failed to fetch actor") logger.exception("Failed to fetch actor")
return return
if is_hostname_blocked(actor.server): if actor.server in BLOCKED_SERVERS:
logger.warning(f"Server {actor.server} is blocked") logger.warning(f"Server {actor.server} is blocked")
return return
@ -2463,7 +2288,6 @@ async def save_to_inbox(
if activity_ro.ap_type == "Accept" if activity_ro.ap_type == "Accept"
else models.NotificationType.FOLLOW_REQUEST_REJECTED else models.NotificationType.FOLLOW_REQUEST_REJECTED
) )
if is_notification_enabled(notif_type):
notif = models.Notification( notif = models.Notification(
notification_type=notif_type, notification_type=notif_type,
actor_id=actor.id, actor_id=actor.id,
@ -2642,21 +2466,11 @@ async def fetch_actor_collection(db_session: AsyncSession, url: str) -> list[Act
@dataclass @dataclass
class ReplyTreeNode: class ReplyTreeNode:
ap_object: AnyboxObject | None ap_object: AnyboxObject
wm_reply: WebmentionReply | None
children: list["ReplyTreeNode"] children: list["ReplyTreeNode"]
is_requested: bool = False is_requested: bool = False
is_root: bool = False is_root: bool = False
@property
def published_at(self) -> datetime.datetime:
if self.ap_object:
return self.ap_object.ap_published_at # type: ignore
elif self.wm_reply:
return self.wm_reply.published_at
else:
raise ValueError(f"Should never happen: {self}")
async def get_replies_tree( async def get_replies_tree(
db_session: AsyncSession, db_session: AsyncSession,
@ -2730,7 +2544,6 @@ async def get_replies_tree(
for child in index.get(node.ap_object.ap_id, []): # type: ignore for child in index.get(node.ap_object.ap_id, []): # type: ignore
child_node = ReplyTreeNode( child_node = ReplyTreeNode(
ap_object=child, ap_object=child,
wm_reply=None,
is_requested=child.ap_id == requested_object.ap_id, # type: ignore is_requested=child.ap_id == requested_object.ap_id, # type: ignore
children=[], children=[],
) )
@ -2739,7 +2552,7 @@ async def get_replies_tree(
return sorted( return sorted(
children, children,
key=lambda node: node.published_at, key=lambda node: node.ap_object.ap_published_at, # type: ignore
) )
if None in nodes_by_in_reply_to: if None in nodes_by_in_reply_to:
@ -2752,7 +2565,6 @@ async def get_replies_tree(
root_node = ReplyTreeNode( root_node = ReplyTreeNode(
ap_object=root_ap_object, ap_object=root_ap_object,
wm_reply=None,
is_root=True, is_root=True,
is_requested=root_ap_object.ap_id == requested_object.ap_id, is_requested=root_ap_object.ap_id == requested_object.ap_id,
children=[], children=[],

View file

@ -16,8 +16,6 @@ from loguru import logger
from mistletoe import markdown # type: ignore from mistletoe import markdown # type: ignore
from app.customization import _CUSTOM_ROUTES from app.customization import _CUSTOM_ROUTES
from app.customization import _StreamVisibilityCallback
from app.customization import default_stream_visibility_callback
from app.utils.emoji import _load_emojis from app.utils.emoji import _load_emojis
from app.utils.version import get_version_commit from app.utils.version import get_version_commit
@ -44,14 +42,11 @@ except FileNotFoundError:
JS_HASH = "none" JS_HASH = "none"
try: try:
# To keep things simple, we keep a single hash for the 2 files # To keep things simple, we keep a single hash for the 2 files
dat = b"" js_data_common = (ROOT_DIR / "app" / "static" / "common-admin.js").read_bytes()
for j in [ js_data_new = (ROOT_DIR / "app" / "static" / "new.js").read_bytes()
ROOT_DIR / "app" / "static" / "common.js", JS_HASH = hashlib.md5(
ROOT_DIR / "app" / "static" / "common-admin.js", js_data_common + js_data_new, usedforsecurity=False
ROOT_DIR / "app" / "static" / "new.js", ).hexdigest()
]:
dat += j.read_bytes()
JS_HASH = hashlib.md5(dat, usedforsecurity=False).hexdigest()
except FileNotFoundError: except FileNotFoundError:
pass pass
@ -96,8 +91,7 @@ class Config(pydantic.BaseModel):
name: str name: str
summary: str summary: str
https: bool https: bool
icon_url: str | None = None icon_url: str
image_url: str | None = None
secret: str secret: str
debug: bool = False debug: bool = False
trusted_hosts: list[str] = ["127.0.0.1"] trusted_hosts: list[str] = ["127.0.0.1"]
@ -117,17 +111,10 @@ class Config(pydantic.BaseModel):
custom_content_security_policy: str | None = None custom_content_security_policy: str | None = None
webfinger_domain: str | None = None
# Config items to make tests easier # Config items to make tests easier
sqlalchemy_database: str | None = None sqlalchemy_database: str | None = None
key_path: str | None = None key_path: str | None = None
session_timeout: int = 3600 * 24 * 3 # in seconds, 3 days by default
csrf_token_exp: int = 3600
disabled_notifications: list[str] = []
# Only set when the app is served on a non-root path # Only set when the app is served on a non-root path
id: str | None = None id: str | None = None
@ -171,10 +158,6 @@ ID = f"{_SCHEME}://{DOMAIN}"
if CONFIG.id: if CONFIG.id:
ID = CONFIG.id ID = CONFIG.id
USERNAME = CONFIG.username USERNAME = CONFIG.username
# Allow to use @handle@webfinger-domain.tld while hosting the server at domain.tld
WEBFINGER_DOMAIN = CONFIG.webfinger_domain or DOMAIN
MANUALLY_APPROVES_FOLLOWERS = CONFIG.manually_approves_followers MANUALLY_APPROVES_FOLLOWERS = CONFIG.manually_approves_followers
HIDES_FOLLOWERS = CONFIG.hides_followers HIDES_FOLLOWERS = CONFIG.hides_followers
HIDES_FOLLOWING = CONFIG.hides_following HIDES_FOLLOWING = CONFIG.hides_following
@ -187,7 +170,6 @@ ALSO_KNOWN_AS = CONFIG.also_known_as
CUSTOM_CONTENT_SECURITY_POLICY = CONFIG.custom_content_security_policy CUSTOM_CONTENT_SECURITY_POLICY = CONFIG.custom_content_security_policy
INBOX_RETENTION_DAYS = CONFIG.inbox_retention_days INBOX_RETENTION_DAYS = CONFIG.inbox_retention_days
SESSION_TIMEOUT = CONFIG.session_timeout
CUSTOM_FOOTER = ( CUSTOM_FOOTER = (
markdown(CONFIG.custom_footer.replace("{version}", VERSION)) markdown(CONFIG.custom_footer.replace("{version}", VERSION))
if CONFIG.custom_footer if CONFIG.custom_footer
@ -264,7 +246,7 @@ def verify_csrf_token(
if redirect_url: if redirect_url:
please_try_again = f'<a href="{redirect_url}">please try again</a>' please_try_again = f'<a href="{redirect_url}">please try again</a>'
try: try:
csrf_serializer.loads(csrf_token, max_age=CONFIG.csrf_token_exp) csrf_serializer.loads(csrf_token, max_age=1800)
except (itsdangerous.BadData, itsdangerous.SignatureExpired): except (itsdangerous.BadData, itsdangerous.SignatureExpired):
logger.exception("Failed to verify CSRF token") logger.exception("Failed to verify CSRF token")
raise HTTPException( raise HTTPException(
@ -274,16 +256,5 @@ def verify_csrf_token(
return None return None
def hmac_sha256() -> hmac.HMAC: def hmac_sha256():
return hmac.new(CONFIG.secret.encode(), digestmod=hashlib.sha256) return hmac.new(CONFIG.secret.encode(), digestmod=hashlib.sha256)
stream_visibility_callback: _StreamVisibilityCallback
try:
from data.stream import ( # type: ignore # noqa: F401, E501
custom_stream_visibility_callback,
)
stream_visibility_callback = custom_stream_visibility_callback
except ImportError:
stream_visibility_callback = default_stream_visibility_callback

View file

@ -1,19 +1,12 @@
from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any from typing import Any
from typing import Callable from typing import Callable
from fastapi import APIRouter from fastapi import APIRouter
from fastapi import Depends from fastapi import Depends
from fastapi import Request from fastapi import Request
from loguru import logger
from starlette.responses import JSONResponse from starlette.responses import JSONResponse
if TYPE_CHECKING:
from app.ap_object import RemoteObject
_DATA_DIR = Path().parent.resolve() / "data" _DATA_DIR = Path().parent.resolve() / "data"
_Handler = Callable[..., Any] _Handler = Callable[..., Any]
@ -117,39 +110,3 @@ def get_custom_router() -> APIRouter | None:
router.add_api_route(path, handler.handler) router.add_api_route(path, handler.handler)
return router return router
@dataclass
class ObjectInfo:
# Is it a reply?
is_reply: bool
# Is it a reply to an outbox object
is_local_reply: bool
# Is the object mentioning the local actor
is_mention: bool
# Is it from someone the local actor is following
is_from_following: bool
# List of hashtags, e.g. #microblogpub
hashtags: list[str]
# @dev@microblog.pub
actor_handle: str
remote_object: "RemoteObject"
_StreamVisibilityCallback = Callable[[ObjectInfo], bool]
def default_stream_visibility_callback(object_info: ObjectInfo) -> bool:
result = (
(not object_info.is_reply and object_info.is_from_following)
or object_info.is_mention
or object_info.is_local_reply
)
logger.info(f"{object_info=}/{result=}")
return result

View file

@ -1,6 +1,5 @@
import base64 import base64
import hashlib import hashlib
import json
import typing import typing
from dataclasses import dataclass from dataclasses import dataclass
from datetime import datetime from datetime import datetime
@ -23,12 +22,12 @@ from sqlalchemy import select
from app import activitypub as ap from app import activitypub as ap
from app import config from app import config
from app.config import BLOCKED_SERVERS
from app.config import KEY_PATH from app.config import KEY_PATH
from app.database import AsyncSession from app.database import AsyncSession
from app.database import get_db_session from app.database import get_db_session
from app.key import Key from app.key import Key
from app.utils.datetime import now from app.utils.datetime import now
from app.utils.url import is_hostname_blocked
_KEY_CACHE: MutableMapping[str, Key] = LFUCache(256) _KEY_CACHE: MutableMapping[str, Key] = LFUCache(256)
@ -184,7 +183,7 @@ async def httpsig_checker(
) )
server = urlparse(key_id).hostname server = urlparse(key_id).hostname
if is_hostname_blocked(server): if server in BLOCKED_SERVERS:
return HTTPSigInfo( return HTTPSigInfo(
has_valid_signature=False, has_valid_signature=False,
server=server, server=server,
@ -199,32 +198,6 @@ async def httpsig_checker(
server=server, server=server,
) )
# Try to drop Delete activity spams early on, this prevent making an extra
# HTTP requests trying to fetch an unavailable actor to verify the HTTP sig
try:
if request.method == "POST" and request.url.path.endswith("/inbox"):
from app import models # TODO: solve this circular import
activity = json.loads(body)
actor_id = ap.get_id(activity["actor"])
if (
ap.as_list(activity["type"])[0] == "Delete"
and actor_id == ap.get_id(activity["object"])
and not (
await db_session.scalars(
select(models.Actor).where(
models.Actor.ap_id == actor_id,
)
)
).one_or_none()
):
logger.info(f"Dropping Delete activity early for {body=}")
raise fastapi.HTTPException(status_code=202)
except fastapi.HTTPException as http_exc:
raise http_exc
except Exception:
logger.exception("Failed to check for Delete spam")
# logger.debug(f"hsig={hsig}") # logger.debug(f"hsig={hsig}")
signed_string, signature_date = _build_signed_string( signed_string, signature_date = _build_signed_string(
hsig["headers"], hsig["headers"],

View file

@ -3,6 +3,7 @@ import traceback
from datetime import datetime from datetime import datetime
from datetime import timedelta from datetime import timedelta
import httpx
from loguru import logger from loguru import logger
from sqlalchemy import func from sqlalchemy import func
from sqlalchemy import select from sqlalchemy import select
@ -60,7 +61,7 @@ def _set_next_try(
if not outgoing_activity.tries: if not outgoing_activity.tries:
raise ValueError("Should never happen") raise ValueError("Should never happen")
if outgoing_activity.tries >= _MAX_RETRIES: if outgoing_activity.tries == _MAX_RETRIES:
outgoing_activity.is_errored = True outgoing_activity.is_errored = True
outgoing_activity.next_try = None outgoing_activity.next_try = None
else: else:
@ -107,7 +108,6 @@ async def process_next_incoming_activity(
next_activity.tries = next_activity.tries + 1 next_activity.tries = next_activity.tries + 1
next_activity.last_try = now() next_activity.last_try = now()
await db_session.commit()
if next_activity.ap_object and next_activity.sent_by_ap_actor_id: if next_activity.ap_object and next_activity.sent_by_ap_actor_id:
try: try:
@ -120,16 +120,13 @@ async def process_next_incoming_activity(
), ),
timeout=60, timeout=60,
) )
except asyncio.exceptions.TimeoutError: except httpx.TimeoutException as exc:
logger.error("Activity took too long to process") url = exc._request.url if exc._request else None
await db_session.rollback() logger.error(f"Failed, HTTP timeout when fetching {url}")
await db_session.refresh(next_activity)
next_activity.error = traceback.format_exc() next_activity.error = traceback.format_exc()
_set_next_try(next_activity) _set_next_try(next_activity)
except Exception: except Exception:
logger.exception("Failed") logger.exception("Failed")
await db_session.rollback()
await db_session.refresh(next_activity)
next_activity.error = traceback.format_exc() next_activity.error = traceback.format_exc()
_set_next_try(next_activity) _set_next_try(next_activity)
else: else:

View file

@ -10,12 +10,9 @@ from fastapi import Form
from fastapi import HTTPException from fastapi import HTTPException
from fastapi import Request from fastapi import Request
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from fastapi.security import HTTPBasic from fastapi.responses import RedirectResponse
from fastapi.security import HTTPBasicCredentials
from loguru import logger from loguru import logger
from pydantic import BaseModel
from sqlalchemy import select from sqlalchemy import select
from sqlalchemy.orm import joinedload
from app import config from app import config
from app import models from app import models
@ -24,12 +21,9 @@ from app.admin import user_session_or_redirect
from app.config import verify_csrf_token from app.config import verify_csrf_token
from app.database import AsyncSession from app.database import AsyncSession
from app.database import get_db_session from app.database import get_db_session
from app.redirect import redirect
from app.utils import indieauth from app.utils import indieauth
from app.utils.datetime import now from app.utils.datetime import now
basic_auth = HTTPBasic()
router = APIRouter() router = APIRouter()
@ -44,55 +38,9 @@ async def well_known_authorization_server(
"code_challenge_methods_supported": ["S256"], "code_challenge_methods_supported": ["S256"],
"revocation_endpoint": request.url_for("indieauth_revocation_endpoint"), "revocation_endpoint": request.url_for("indieauth_revocation_endpoint"),
"revocation_endpoint_auth_methods_supported": ["none"], "revocation_endpoint_auth_methods_supported": ["none"],
"registration_endpoint": request.url_for("oauth_registration_endpoint"),
"introspection_endpoint": request.url_for("oauth_introspection_endpoint"),
} }
class OAuthRegisterClientRequest(BaseModel):
client_name: str
redirect_uris: list[str] | str
client_uri: str | None = None
logo_uri: str | None = None
scope: str | None = None
@router.post("/oauth/register")
async def oauth_registration_endpoint(
register_client_request: OAuthRegisterClientRequest,
db_session: AsyncSession = Depends(get_db_session),
) -> JSONResponse:
"""Implements OAuth 2.0 Dynamic Registration."""
client = models.OAuthClient(
client_name=register_client_request.client_name,
redirect_uris=[register_client_request.redirect_uris]
if isinstance(register_client_request.redirect_uris, str)
else register_client_request.redirect_uris,
client_uri=register_client_request.client_uri,
logo_uri=register_client_request.logo_uri,
scope=register_client_request.scope,
client_id=secrets.token_hex(16),
client_secret=secrets.token_hex(32),
)
db_session.add(client)
await db_session.commit()
return JSONResponse(
content={
**register_client_request.dict(),
"client_id_issued_at": int(client.created_at.timestamp()), # type: ignore
"grant_types": ["authorization_code", "refresh_token"],
"client_secret_expires_at": 0,
"client_id": client.client_id,
"client_secret": client.client_secret,
},
status_code=201,
)
@router.get("/auth") @router.get("/auth")
async def indieauth_authorization_endpoint( async def indieauth_authorization_endpoint(
request: Request, request: Request,
@ -108,29 +56,12 @@ async def indieauth_authorization_endpoint(
code_challenge = request.query_params.get("code_challenge", "") code_challenge = request.query_params.get("code_challenge", "")
code_challenge_method = request.query_params.get("code_challenge_method", "") code_challenge_method = request.query_params.get("code_challenge_method", "")
# Check if the authorization request is coming from an OAuth client
registered_client = (
await db_session.scalars(
select(models.OAuthClient).where(
models.OAuthClient.client_id == client_id,
)
)
).one_or_none()
if registered_client:
client = {
"name": registered_client.client_name,
"logo": registered_client.logo_uri,
"url": registered_client.client_uri,
}
else:
client = await indieauth.get_client_id_data(client_id) # type: ignore
return await templates.render_template( return await templates.render_template(
db_session, db_session,
request, request,
"indieauth_flow.html", "indieauth_flow.html",
dict( dict(
client=client, client=await indieauth.get_client_id_data(client_id),
scopes=scope, scopes=scope,
redirect_uri=redirect_uri, redirect_uri=redirect_uri,
state=state, state=state,
@ -149,7 +80,7 @@ async def indieauth_flow(
db_session: AsyncSession = Depends(get_db_session), db_session: AsyncSession = Depends(get_db_session),
csrf_check: None = Depends(verify_csrf_token), csrf_check: None = Depends(verify_csrf_token),
_: None = Depends(user_session_or_redirect), _: None = Depends(user_session_or_redirect),
) -> templates.TemplateResponse: ) -> RedirectResponse:
form_data = await request.form() form_data = await request.form()
logger.info(f"{form_data=}") logger.info(f"{form_data=}")
@ -183,8 +114,9 @@ async def indieauth_flow(
db_session.add(auth_request) db_session.add(auth_request)
await db_session.commit() await db_session.commit()
return await redirect( return RedirectResponse(
request, db_session, redirect_uri + f"?code={code}&state={state}&iss={iss}" redirect_uri + f"?code={code}&state={state}&iss={iss}",
status_code=302,
) )
@ -275,17 +207,17 @@ async def indieauth_token_endpoint(
form_data = await request.form() form_data = await request.form()
logger.info(f"{form_data=}") logger.info(f"{form_data=}")
grant_type = form_data.get("grant_type", "authorization_code") grant_type = form_data.get("grant_type", "authorization_code")
if grant_type not in ["authorization_code", "refresh_token"]: if grant_type != "authorization_code":
raise ValueError(f"Invalid grant_type {grant_type}") raise ValueError(f"Invalid grant_type {grant_type}")
code = form_data["code"]
# These must match the params from the first request # These must match the params from the first request
client_id = form_data["client_id"] client_id = form_data["client_id"]
code_verifier = form_data.get("code_verifier")
if grant_type == "authorization_code":
code = form_data["code"]
redirect_uri = form_data["redirect_uri"] redirect_uri = form_data["redirect_uri"]
# code_verifier is optional for backward compat # code_verifier is optional for backward compat
code_verifier = form_data.get("code_verifier")
is_code_valid, auth_code_request = await _check_auth_code( is_code_valid, auth_code_request = await _check_auth_code(
db_session, db_session,
code=code, code=code,
@ -299,38 +231,12 @@ async def indieauth_token_endpoint(
status_code=400, status_code=400,
) )
elif grant_type == "refresh_token":
refresh_token = form_data["refresh_token"]
access_token = (
await db_session.scalars(
select(models.IndieAuthAccessToken)
.where(
models.IndieAuthAccessToken.refresh_token == refresh_token,
models.IndieAuthAccessToken.was_refreshed.is_(False),
)
.options(
joinedload(
models.IndieAuthAccessToken.indieauth_authorization_request
)
)
)
).one_or_none()
if not access_token:
raise ValueError("invalid refresh token")
if access_token.indieauth_authorization_request.client_id != client_id:
raise ValueError("invalid client ID")
auth_code_request = access_token.indieauth_authorization_request
access_token.was_refreshed = True
if not auth_code_request: if not auth_code_request:
raise ValueError("Should never happen") raise ValueError("Should never happen")
access_token = models.IndieAuthAccessToken( access_token = models.IndieAuthAccessToken(
indieauth_authorization_request_id=auth_code_request.id, indieauth_authorization_request_id=auth_code_request.id,
access_token=secrets.token_urlsafe(32), access_token=secrets.token_urlsafe(32),
refresh_token=secrets.token_urlsafe(32),
expires_in=3600, expires_in=3600,
scope=auth_code_request.scope, scope=auth_code_request.scope,
) )
@ -340,7 +246,6 @@ async def indieauth_token_endpoint(
return JSONResponse( return JSONResponse(
content={ content={
"access_token": access_token.access_token, "access_token": access_token.access_token,
"refresh_token": access_token.refresh_token,
"token_type": "Bearer", "token_type": "Bearer",
"scope": auth_code_request.scope, "scope": auth_code_request.scope,
"me": config.ID + "/", "me": config.ID + "/",
@ -356,10 +261,8 @@ async def _check_access_token(
) -> tuple[bool, models.IndieAuthAccessToken | None]: ) -> tuple[bool, models.IndieAuthAccessToken | None]:
access_token_info = ( access_token_info = (
await db_session.scalars( await db_session.scalars(
select(models.IndieAuthAccessToken) select(models.IndieAuthAccessToken).where(
.where(models.IndieAuthAccessToken.access_token == token) models.IndieAuthAccessToken.access_token == token
.options(
joinedload(models.IndieAuthAccessToken.indieauth_authorization_request)
) )
) )
).one_or_none() ).one_or_none()
@ -382,9 +285,6 @@ async def _check_access_token(
@dataclass(frozen=True) @dataclass(frozen=True)
class AccessTokenInfo: class AccessTokenInfo:
scopes: list[str] scopes: list[str]
client_id: str | None
access_token: str
exp: int
async def verify_access_token( async def verify_access_token(
@ -411,71 +311,9 @@ async def verify_access_token(
return AccessTokenInfo( return AccessTokenInfo(
scopes=access_token.scope.split(), scopes=access_token.scope.split(),
client_id=(
access_token.indieauth_authorization_request.client_id
if access_token.indieauth_authorization_request
else None
),
access_token=access_token.access_token,
exp=int(
(
access_token.created_at.replace(tzinfo=timezone.utc)
+ timedelta(seconds=access_token.expires_in)
).timestamp()
),
) )
async def check_access_token(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
) -> AccessTokenInfo | None:
token = request.headers.get("Authorization", "").removeprefix("Bearer ")
if not token:
return None
is_token_valid, access_token = await _check_access_token(db_session, token)
if not is_token_valid:
return None
if not access_token or not access_token.scope:
raise ValueError("Should never happen")
access_token_info = AccessTokenInfo(
scopes=access_token.scope.split(),
client_id=(
access_token.indieauth_authorization_request.client_id
if access_token.indieauth_authorization_request
else None
),
access_token=access_token.access_token,
exp=int(
(
access_token.created_at.replace(tzinfo=timezone.utc)
+ timedelta(seconds=access_token.expires_in)
).timestamp()
),
)
logger.info(
"Authenticated with access token from client_id="
f"{access_token_info.client_id} scopes={access_token.scope}"
)
return access_token_info
async def enforce_access_token(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
) -> AccessTokenInfo:
maybe_access_token_info = await check_access_token(request, db_session)
if not maybe_access_token_info:
raise HTTPException(status_code=401, detail="access token required")
return maybe_access_token_info
@router.post("/revoke_token") @router.post("/revoke_token")
async def indieauth_revocation_endpoint( async def indieauth_revocation_endpoint(
request: Request, request: Request,
@ -495,58 +333,3 @@ async def indieauth_revocation_endpoint(
content={}, content={},
status_code=200, status_code=200,
) )
@router.post("/token_introspection")
async def oauth_introspection_endpoint(
request: Request,
credentials: HTTPBasicCredentials = Depends(basic_auth),
db_session: AsyncSession = Depends(get_db_session),
token: str = Form(),
) -> JSONResponse:
registered_client = (
await db_session.scalars(
select(models.OAuthClient).where(
models.OAuthClient.client_id == credentials.username,
models.OAuthClient.client_secret == credentials.password,
)
)
).one_or_none()
if not registered_client:
raise HTTPException(status_code=401, detail="unauthenticated")
access_token = (
await db_session.scalars(
select(models.IndieAuthAccessToken)
.where(models.IndieAuthAccessToken.access_token == token)
.join(
models.IndieAuthAuthorizationRequest,
models.IndieAuthAccessToken.indieauth_authorization_request_id
== models.IndieAuthAuthorizationRequest.id,
)
.where(
models.IndieAuthAuthorizationRequest.client_id == credentials.username
)
)
).one_or_none()
if not access_token:
return JSONResponse(content={"active": False})
is_token_valid, _ = await _check_access_token(db_session, token)
if not is_token_valid:
return JSONResponse(content={"active": False})
return JSONResponse(
content={
"active": True,
"client_id": credentials.username,
"scope": access_token.scope,
"exp": int(
(
access_token.created_at.replace(tzinfo=timezone.utc)
+ timedelta(seconds=access_token.expires_in)
).timestamp()
),
},
status_code=200,
)

View file

@ -23,13 +23,6 @@ requests_loader = pyld.documentloader.requests.requests_document_loader()
def _loader(url, options={}): def _loader(url, options={}):
# See https://github.com/digitalbazaar/pyld/issues/133 # See https://github.com/digitalbazaar/pyld/issues/133
options["headers"]["Accept"] = "application/ld+json" options["headers"]["Accept"] = "application/ld+json"
# XXX: temp fix/hack is it seems to be down for now
if url == "https://w3id.org/identity/v1":
url = (
"https://raw.githubusercontent.com/web-payments/web-payments.org"
"/master/contexts/identity-v1.jsonld"
)
return requests_loader(url, options) return requests_loader(url, options)
@ -41,7 +34,7 @@ def _options_hash(doc: ap.RawObject) -> str:
for k in ["type", "id", "signatureValue"]: for k in ["type", "id", "signatureValue"]:
if k in doc: if k in doc:
del doc[k] del doc[k]
doc["@context"] = "https://w3id.org/security/v1" doc["@context"] = "https://w3id.org/identity/v1"
normalized = jsonld.normalize( normalized = jsonld.normalize(
doc, {"algorithm": "URDNA2015", "format": "application/nquads"} doc, {"algorithm": "URDNA2015", "format": "application/nquads"}
) )

View file

@ -62,7 +62,6 @@ from app.config import DOMAIN
from app.config import ID from app.config import ID
from app.config import USER_AGENT from app.config import USER_AGENT
from app.config import USERNAME from app.config import USERNAME
from app.config import WEBFINGER_DOMAIN
from app.config import is_activitypub_requested from app.config import is_activitypub_requested
from app.config import verify_csrf_token from app.config import verify_csrf_token
from app.customization import get_custom_router from app.customization import get_custom_router
@ -74,15 +73,12 @@ from app.templates import is_current_user_admin
from app.uploads import UPLOAD_DIR from app.uploads import UPLOAD_DIR
from app.utils import pagination from app.utils import pagination
from app.utils.emoji import EMOJIS_BY_NAME from app.utils.emoji import EMOJIS_BY_NAME
from app.utils.facepile import Face
from app.utils.facepile import WebmentionReply
from app.utils.facepile import merge_faces
from app.utils.highlight import HIGHLIGHT_CSS_HASH from app.utils.highlight import HIGHLIGHT_CSS_HASH
from app.utils.url import check_url from app.utils.url import check_url
from app.webfinger import get_remote_follow_template from app.webfinger import get_remote_follow_template
# Only images <1MB will be cached, so 32MB of data will be cached # Only images <1MB will be cached, so 64MB of data will be cached
_RESIZED_CACHE: MutableMapping[tuple[str, int], tuple[bytes, str, Any]] = LFUCache(32) _RESIZED_CACHE: MutableMapping[tuple[str, int], tuple[bytes, str, Any]] = LFUCache(64)
# TODO(ts): # TODO(ts):
@ -258,34 +254,11 @@ class ActivityPubResponse(JSONResponse):
media_type = "application/activity+json" media_type = "application/activity+json"
async def redirect_to_remote_instance(
request: Request,
db_session: AsyncSession,
url: str,
) -> templates.TemplateResponse:
"""
Similar to RedirectResponse, but uses a 200 response with HTML.
Needed for remote redirects on form submission endpoints,
since our CSP policy disallows remote form submission.
https://github.com/w3c/webappsec-csp/issues/8#issuecomment-810108984
"""
return await templates.render_template(
db_session,
request,
"redirect_to_remote_instance.html",
{
"request": request,
"url": url,
},
headers={"Refresh": "0;url=" + url},
)
@app.get(config.NavBarItems.NOTES_PATH) @app.get(config.NavBarItems.NOTES_PATH)
async def index( async def index(
request: Request, request: Request,
db_session: AsyncSession = Depends(get_db_session), db_session: AsyncSession = Depends(get_db_session),
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
page: int | None = None, page: int | None = None,
) -> templates.TemplateResponse | ActivityPubResponse: ) -> templates.TemplateResponse | ActivityPubResponse:
if is_activitypub_requested(request): if is_activitypub_requested(request):
@ -297,7 +270,7 @@ async def index(
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
models.OutboxObject.is_deleted.is_(False), models.OutboxObject.is_deleted.is_(False),
models.OutboxObject.is_hidden_from_homepage.is_(False), models.OutboxObject.is_hidden_from_homepage.is_(False),
models.OutboxObject.ap_type.in_(["Announce", "Note", "Video", "Question"]), models.OutboxObject.ap_type != "Article",
) )
q = select(models.OutboxObject).where(*where) q = select(models.OutboxObject).where(*where)
total_count = await db_session.scalar( total_count = await db_session.scalar(
@ -465,12 +438,7 @@ async def followers(
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse | templates.TemplateResponse: ) -> ActivityPubResponse | templates.TemplateResponse:
if is_activitypub_requested(request): if is_activitypub_requested(request):
maybe_access_token_info = await indieauth.check_access_token( if config.HIDES_FOLLOWERS:
request,
db_session,
)
if config.HIDES_FOLLOWERS and not maybe_access_token_info:
return ActivityPubResponse( return ActivityPubResponse(
await _empty_followx_collection( await _empty_followx_collection(
db_session=db_session, db_session=db_session,
@ -529,12 +497,7 @@ async def following(
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse | templates.TemplateResponse: ) -> ActivityPubResponse | templates.TemplateResponse:
if is_activitypub_requested(request): if is_activitypub_requested(request):
maybe_access_token_info = await indieauth.check_access_token( if config.HIDES_FOLLOWING:
request,
db_session,
)
if config.HIDES_FOLLOWING and not maybe_access_token_info:
return ActivityPubResponse( return ActivityPubResponse(
await _empty_followx_collection( await _empty_followx_collection(
db_session=db_session, db_session=db_session,
@ -590,34 +553,22 @@ async def following(
@app.get("/outbox") @app.get("/outbox")
async def outbox( async def outbox(
request: Request,
db_session: AsyncSession = Depends(get_db_session), db_session: AsyncSession = Depends(get_db_session),
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse: ) -> ActivityPubResponse:
maybe_access_token_info = await indieauth.check_access_token(
request,
db_session,
)
# Default restrictions unless the request is authenticated with an access token
restricted_where = [
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
models.OutboxObject.ap_type.in_(["Create", "Note", "Article", "Announce"]),
]
# By design, we only show the last 20 public activities in the oubox # By design, we only show the last 20 public activities in the oubox
outbox_objects = ( outbox_objects = (
await db_session.scalars( await db_session.scalars(
select(models.OutboxObject) select(models.OutboxObject)
.where( .where(
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
models.OutboxObject.is_deleted.is_(False), models.OutboxObject.is_deleted.is_(False),
*([] if maybe_access_token_info else restricted_where), models.OutboxObject.ap_type.in_(["Create", "Announce"]),
) )
.order_by(models.OutboxObject.ap_published_at.desc()) .order_by(models.OutboxObject.ap_published_at.desc())
.limit(20) .limit(20)
) )
).all() ).all()
return ActivityPubResponse( return ActivityPubResponse(
{ {
"@context": ap.AS_EXTENDED_CTX, "@context": ap.AS_EXTENDED_CTX,
@ -632,49 +583,6 @@ async def outbox(
) )
@app.post("/outbox")
async def post_outbox(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
access_token_info: indieauth.AccessTokenInfo = Depends(
indieauth.enforce_access_token
),
) -> ActivityPubResponse:
payload = await request.json()
logger.info(f"{payload=}")
if payload.get("type") == "Create":
assert payload["actor"] == ID
obj = payload["object"]
to_and_cc = obj.get("to", []) + obj.get("cc", [])
if ap.AS_PUBLIC in obj.get("to", []) and ID + "/followers" in to_and_cc:
visibility = ap.VisibilityEnum.PUBLIC
elif ap.AS_PUBLIC in to_and_cc and ID + "/followers" in to_and_cc:
visibility = ap.VisibilityEnum.UNLISTED
else:
visibility = ap.VisibilityEnum.DIRECT
object_id, outbox_object = await boxes.send_create(
db_session,
ap_type=obj["type"],
source=obj["content"],
uploads=[],
in_reply_to=obj.get("inReplyTo"),
visibility=visibility,
content_warning=obj.get("summary"),
is_sensitive=obj.get("sensitive", False),
)
else:
raise ValueError("TODO")
return ActivityPubResponse(
outbox_object.ap_object,
status_code=201,
headers={"Location": boxes.outbox_object_id(object_id)},
)
@app.get("/featured") @app.get("/featured")
async def featured( async def featured(
db_session: AsyncSession = Depends(get_db_session), db_session: AsyncSession = Depends(get_db_session),
@ -712,14 +620,6 @@ async def _check_outbox_object_acl(
if templates.is_current_user_admin(request): if templates.is_current_user_admin(request):
return None return None
maybe_access_token_info = await indieauth.check_access_token(
request,
db_session,
)
if maybe_access_token_info:
# TODO: check scopes
return None
if ap_object.visibility in [ if ap_object.visibility in [
ap.VisibilityEnum.PUBLIC, ap.VisibilityEnum.PUBLIC,
ap.VisibilityEnum.UNLISTED, ap.VisibilityEnum.UNLISTED,
@ -800,7 +700,7 @@ async def _fetch_webmentions(
models.Webmention.outbox_object_id == outbox_object.id, models.Webmention.outbox_object_id == outbox_object.id,
models.Webmention.is_deleted.is_(False), models.Webmention.is_deleted.is_(False),
) )
.limit(50) .limit(10)
) )
).all() ).all()
@ -850,90 +750,23 @@ async def outbox_by_public_id(
is_current_user_admin=is_current_user_admin(request), is_current_user_admin=is_current_user_admin(request),
) )
webmentions = await _fetch_webmentions(db_session, maybe_object)
likes = await _fetch_likes(db_session, maybe_object) likes = await _fetch_likes(db_session, maybe_object)
shares = await _fetch_shares(db_session, maybe_object) shares = await _fetch_shares(db_session, maybe_object)
webmentions = await _fetch_webmentions(db_session, maybe_object)
return await templates.render_template( return await templates.render_template(
db_session, db_session,
request, request,
"object.html", "object.html",
{ {
"replies_tree": _merge_replies(replies_tree, webmentions), "replies_tree": replies_tree,
"outbox_object": maybe_object, "outbox_object": maybe_object,
"likes": _merge_faces_from_inbox_object_and_webmentions( "likes": likes,
likes, "shares": shares,
webmentions, "webmentions": webmentions,
models.WebmentionType.LIKE,
),
"shares": _merge_faces_from_inbox_object_and_webmentions(
shares,
webmentions,
models.WebmentionType.REPOST,
),
"webmentions": _filter_webmentions(webmentions),
}, },
) )
def _filter_webmentions(
webmentions: list[models.Webmention],
) -> list[models.Webmention]:
return [
wm
for wm in webmentions
if wm.webmention_type
not in [
models.WebmentionType.LIKE,
models.WebmentionType.REPOST,
models.WebmentionType.REPLY,
]
]
def _merge_faces_from_inbox_object_and_webmentions(
inbox_objects: list[models.InboxObject],
webmentions: list[models.Webmention],
webmention_type: models.WebmentionType,
) -> list[Face]:
wm_faces = []
for wm in webmentions:
if wm.webmention_type != webmention_type:
continue
if face := Face.from_webmention(wm):
wm_faces.append(face)
return merge_faces(
[Face.from_inbox_object(obj) for obj in inbox_objects] + wm_faces
)
def _merge_replies(
reply_tree_node: boxes.ReplyTreeNode,
webmentions: list[models.Webmention],
) -> boxes.ReplyTreeNode:
# TODO: return None as we update the object in place
webmention_replies = []
for wm in [
wm for wm in webmentions if wm.webmention_type == models.WebmentionType.REPLY
]:
if rep := WebmentionReply.from_webmention(wm):
webmention_replies.append(
boxes.ReplyTreeNode(
ap_object=None,
wm_reply=rep,
is_requested=False,
children=[],
)
)
reply_tree_node.children = sorted(
reply_tree_node.children + webmention_replies,
key=lambda node: node.published_at,
reverse=True,
)
return reply_tree_node
@app.get("/articles/{short_id}/{slug}") @app.get("/articles/{short_id}/{slug}")
async def article_by_slug( async def article_by_slug(
short_id: str, short_id: str,
@ -942,8 +775,24 @@ async def article_by_slug(
db_session: AsyncSession = Depends(get_db_session), db_session: AsyncSession = Depends(get_db_session),
httpsig_info: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), httpsig_info: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse | templates.TemplateResponse | RedirectResponse: ) -> ActivityPubResponse | templates.TemplateResponse | RedirectResponse:
maybe_object = await boxes.get_outbox_object_by_slug_and_short_id( maybe_object = (
db_session, slug, short_id (
await db_session.execute(
select(models.OutboxObject)
.options(
joinedload(models.OutboxObject.outbox_object_attachments).options(
joinedload(models.OutboxObjectAttachment.upload)
)
)
.where(
models.OutboxObject.public_id.like(f"{short_id}%"),
models.OutboxObject.slug == slug,
models.OutboxObject.is_deleted.is_(False),
)
)
)
.unique()
.scalar_one_or_none()
) )
if not maybe_object: if not maybe_object:
raise HTTPException(status_code=404) raise HTTPException(status_code=404)
@ -967,19 +816,11 @@ async def article_by_slug(
request, request,
"object.html", "object.html",
{ {
"replies_tree": _merge_replies(replies_tree, webmentions), "replies_tree": replies_tree,
"outbox_object": maybe_object, "outbox_object": maybe_object,
"likes": _merge_faces_from_inbox_object_and_webmentions( "likes": likes,
likes, "shares": shares,
webmentions, "webmentions": webmentions,
models.WebmentionType.LIKE,
),
"shares": _merge_faces_from_inbox_object_and_webmentions(
shares,
webmentions,
models.WebmentionType.REPOST,
),
"webmentions": _filter_webmentions(webmentions),
}, },
) )
@ -1089,78 +930,6 @@ def emoji_by_name(name: str) -> ActivityPubResponse:
return ActivityPubResponse({"@context": ap.AS_EXTENDED_CTX, **emoji}) return ActivityPubResponse({"@context": ap.AS_EXTENDED_CTX, **emoji})
@app.get("/inbox")
async def get_inbox(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
access_token_info: indieauth.AccessTokenInfo = Depends(
indieauth.enforce_access_token
),
page: bool | None = None,
next_cursor: str | None = None,
) -> ActivityPubResponse:
where = [
models.InboxObject.ap_type.in_(
["Create", "Follow", "Like", "Announce", "Undo", "Update"]
)
]
total_items = await db_session.scalar(
select(func.count(models.InboxObject.id)).where(*where)
)
if not page and not next_cursor:
return ActivityPubResponse(
{
"@context": ap.AS_CTX,
"id": ID + "/inbox",
"first": ID + "/inbox?page=true",
"type": "OrderedCollection",
"totalItems": total_items,
}
)
q = (
select(models.InboxObject)
.where(*where)
.order_by(models.InboxObject.created_at.desc())
) # type: ignore
if next_cursor:
q = q.where(
models.InboxObject.created_at
< pagination.decode_cursor(next_cursor) # type: ignore
)
q = q.limit(20)
items = [item for item in (await db_session.scalars(q)).all()]
next_cursor = None
if (
items
and await db_session.scalar(
select(func.count(models.InboxObject.id)).where(
*where, models.InboxObject.created_at < items[-1].created_at
)
)
> 0
):
next_cursor = pagination.encode_cursor(items[-1].created_at)
collection_page = {
"@context": ap.AS_CTX,
"id": (
ID + "/inbox?page=true"
if not next_cursor
else ID + f"/inbox?next_cursor={next_cursor}"
),
"partOf": ID + "/inbox",
"type": "OrderedCollectionPage",
"orderedItems": [item.ap_object for item in items],
}
if next_cursor:
collection_page["next"] = ID + f"/inbox?next_cursor={next_cursor}"
return ActivityPubResponse(collection_page)
@app.post("/inbox") @app.post("/inbox")
async def inbox( async def inbox(
request: Request, request: Request,
@ -1190,10 +959,9 @@ async def get_remote_follow(
@app.post("/remote_follow") @app.post("/remote_follow")
async def post_remote_follow( async def post_remote_follow(
request: Request, request: Request,
db_session: AsyncSession = Depends(get_db_session),
csrf_check: None = Depends(verify_csrf_token), csrf_check: None = Depends(verify_csrf_token),
profile: str = Form(), profile: str = Form(),
) -> templates.TemplateResponse: ) -> RedirectResponse:
if not profile.startswith("@"): if not profile.startswith("@"):
profile = f"@{profile}" profile = f"@{profile}"
@ -1202,10 +970,9 @@ async def post_remote_follow(
# TODO(ts): error message to user # TODO(ts): error message to user
raise HTTPException(status_code=404) raise HTTPException(status_code=404)
return await redirect_to_remote_instance( return RedirectResponse(
request,
db_session,
remote_follow_template.format(uri=ID), remote_follow_template.format(uri=ID),
status_code=302,
) )
@ -1233,11 +1000,10 @@ async def remote_interaction(
@app.post("/remote_interaction") @app.post("/remote_interaction")
async def post_remote_interaction( async def post_remote_interaction(
request: Request, request: Request,
db_session: AsyncSession = Depends(get_db_session),
csrf_check: None = Depends(verify_csrf_token), csrf_check: None = Depends(verify_csrf_token),
profile: str = Form(), profile: str = Form(),
ap_id: str = Form(), ap_id: str = Form(),
) -> templates.TemplateResponse: ) -> RedirectResponse:
if not profile.startswith("@"): if not profile.startswith("@"):
profile = f"@{profile}" profile = f"@{profile}"
@ -1246,26 +1012,21 @@ async def post_remote_interaction(
# TODO(ts): error message to user # TODO(ts): error message to user
raise HTTPException(status_code=404) raise HTTPException(status_code=404)
return await redirect_to_remote_instance( return RedirectResponse(
request, remote_follow_template.format(uri=ap_id),
db_session, status_code=302,
remote_follow_template.format(uri=ID),
) )
@app.get("/.well-known/webfinger") @app.get("/.well-known/webfinger")
async def wellknown_webfinger(resource: str) -> JSONResponse: async def wellknown_webfinger(resource: str) -> JSONResponse:
"""Exposes/servers WebFinger data.""" """Exposes/servers WebFinger data."""
if resource not in [ if resource not in [f"acct:{USERNAME}@{DOMAIN}", ID]:
f"acct:{USERNAME}@{WEBFINGER_DOMAIN}",
ID,
f"acct:{USERNAME}@{DOMAIN}",
]:
logger.info(f"Got invalid req for {resource}") logger.info(f"Got invalid req for {resource}")
raise HTTPException(status_code=404) raise HTTPException(status_code=404)
out = { out = {
"subject": f"acct:{USERNAME}@{WEBFINGER_DOMAIN}", "subject": f"acct:{USERNAME}@{DOMAIN}",
"aliases": [ID], "aliases": [ID],
"links": [ "links": [
{ {
@ -1329,11 +1090,11 @@ async def nodeinfo(
) )
proxy_client = httpx.AsyncClient(follow_redirects=True, http2=True)
async def _proxy_get( async def _proxy_get(
proxy_client: httpx.AsyncClient, request: starlette.requests.Request, url: str, stream: bool
request: starlette.requests.Request,
url: str,
stream: bool,
) -> httpx.Response: ) -> httpx.Response:
# Request the URL (and filter request headers) # Request the URL (and filter request headers)
proxy_req = proxy_client.build_request( proxy_req = proxy_client.build_request(
@ -1380,29 +1141,18 @@ async def serve_proxy_media(
exp: int, exp: int,
sig: str, sig: str,
encoded_url: str, encoded_url: str,
background_tasks: fastapi.BackgroundTasks,
) -> StreamingResponse | PlainTextResponse: ) -> StreamingResponse | PlainTextResponse:
# Decode the base64-encoded URL # Decode the base64-encoded URL
url = base64.urlsafe_b64decode(encoded_url).decode() url = base64.urlsafe_b64decode(encoded_url).decode()
check_url(url) check_url(url)
media.verify_proxied_media_sig(exp, url, sig) media.verify_proxied_media_sig(exp, url, sig)
proxy_client = httpx.AsyncClient( proxy_resp = await _proxy_get(request, url, stream=True)
follow_redirects=True,
timeout=httpx.Timeout(timeout=10.0),
transport=httpx.AsyncHTTPTransport(retries=1),
)
async def _close_proxy_client():
await proxy_client.aclose()
background_tasks.add_task(_close_proxy_client)
proxy_resp = await _proxy_get(proxy_client, request, url, stream=True)
if proxy_resp.status_code >= 300: if proxy_resp.status_code >= 300:
logger.info(f"failed to proxy {url}, got {proxy_resp.status_code}") logger.info(f"failed to proxy {url}, got {proxy_resp.status_code}")
await proxy_resp.aclose()
return PlainTextResponse( return PlainTextResponse(
"proxy error",
status_code=proxy_resp.status_code, status_code=proxy_resp.status_code,
) )
@ -1413,7 +1163,6 @@ async def serve_proxy_media(
_filter_proxy_resp_headers( _filter_proxy_resp_headers(
proxy_resp, proxy_resp,
[ [
"content-encoding",
"content-length", "content-length",
"content-type", "content-type",
"content-range", "content-range",
@ -1436,19 +1185,16 @@ async def serve_proxy_media_resized(
sig: str, sig: str,
encoded_url: str, encoded_url: str,
size: int, size: int,
background_tasks: fastapi.BackgroundTasks,
) -> PlainTextResponse: ) -> PlainTextResponse:
if size not in {50, 740}: if size not in {50, 740}:
raise ValueError("Unsupported size") raise ValueError("Unsupported size")
is_webp_supported = "image/webp" in request.headers.get("accept")
# Decode the base64-encoded URL # Decode the base64-encoded URL
url = base64.urlsafe_b64decode(encoded_url).decode() url = base64.urlsafe_b64decode(encoded_url).decode()
check_url(url) check_url(url)
media.verify_proxied_media_sig(exp, url, sig) media.verify_proxied_media_sig(exp, url, sig)
if (cached_resp := _RESIZED_CACHE.get((url, size))) and is_webp_supported: if cached_resp := _RESIZED_CACHE.get((url, size)):
resized_content, resized_mimetype, resp_headers = cached_resp resized_content, resized_mimetype, resp_headers = cached_resp
return PlainTextResponse( return PlainTextResponse(
resized_content, resized_content,
@ -1456,21 +1202,11 @@ async def serve_proxy_media_resized(
headers=resp_headers, headers=resp_headers,
) )
proxy_client = httpx.AsyncClient( proxy_resp = await _proxy_get(request, url, stream=False)
follow_redirects=True,
timeout=httpx.Timeout(timeout=10.0),
transport=httpx.AsyncHTTPTransport(retries=1),
)
async def _close_proxy_client():
await proxy_client.aclose()
background_tasks.add_task(_close_proxy_client)
proxy_resp = await _proxy_get(proxy_client, request, url, stream=False)
if proxy_resp.status_code >= 300: if proxy_resp.status_code >= 300:
logger.info(f"failed to proxy {url}, got {proxy_resp.status_code}") logger.info(f"failed to proxy {url}, got {proxy_resp.status_code}")
await proxy_resp.aclose()
return PlainTextResponse( return PlainTextResponse(
"proxy error",
status_code=proxy_resp.status_code, status_code=proxy_resp.status_code,
) )
@ -1496,10 +1232,10 @@ async def serve_proxy_media_resized(
is_webp = False is_webp = False
try: try:
resized_buf = BytesIO() resized_buf = BytesIO()
i.save(resized_buf, format="webp" if is_webp_supported else i.format) i.save(resized_buf, format="webp")
is_webp = is_webp_supported is_webp = True
except Exception: except Exception:
logger.exception("Failed to create thumbnail") logger.exception("Failed to convert to webp")
resized_buf = BytesIO() resized_buf = BytesIO()
i.save(resized_buf, format=i.format) i.save(resized_buf, format=i.format)
resized_buf.seek(0) resized_buf.seek(0)
@ -1557,7 +1293,6 @@ async def serve_attachment(
@app.get("/attachments/thumbnails/{content_hash}/{filename}") @app.get("/attachments/thumbnails/{content_hash}/{filename}")
async def serve_attachment_thumbnail( async def serve_attachment_thumbnail(
request: Request,
content_hash: str, content_hash: str,
filename: str, filename: str,
db_session: AsyncSession = Depends(get_db_session), db_session: AsyncSession = Depends(get_db_session),
@ -1572,20 +1307,11 @@ async def serve_attachment_thumbnail(
if not upload or not upload.has_thumbnail: if not upload or not upload.has_thumbnail:
raise HTTPException(status_code=404) raise HTTPException(status_code=404)
is_webp_supported = "image/webp" in request.headers.get("accept")
if is_webp_supported:
return FileResponse( return FileResponse(
UPLOAD_DIR / (content_hash + "_resized"), UPLOAD_DIR / (content_hash + "_resized"),
media_type="image/webp", media_type="image/webp",
headers={"Cache-Control": "max-age=31536000"}, headers={"Cache-Control": "max-age=31536000"},
) )
else:
return FileResponse(
UPLOAD_DIR / content_hash,
media_type=upload.content_type,
headers={"Cache-Control": "max-age=31536000"},
)
@app.get("/robots.txt", response_class=PlainTextResponse) @app.get("/robots.txt", response_class=PlainTextResponse)
@ -1644,27 +1370,22 @@ async def json_feed(
], ],
} }
) )
result = { return {
"version": "https://jsonfeed.org/version/1.1", "version": "https://jsonfeed.org/version/1",
"title": f"{LOCAL_ACTOR.display_name}'s microblog'", "title": f"{LOCAL_ACTOR.display_name}'s microblog'",
"home_page_url": LOCAL_ACTOR.url, "home_page_url": LOCAL_ACTOR.url,
"feed_url": BASE_URL + "/feed.json", "feed_url": BASE_URL + "/feed.json",
"authors": [ "author": {
{
"name": LOCAL_ACTOR.display_name, "name": LOCAL_ACTOR.display_name,
"url": LOCAL_ACTOR.url, "url": LOCAL_ACTOR.url,
} "avatar": LOCAL_ACTOR.icon_url,
], },
"items": data, "items": data,
} }
if LOCAL_ACTOR.icon_url:
result["authors"][0]["avatar"] = LOCAL_ACTOR.icon_url # type: ignore
return result
async def _gen_rss_feed( async def _gen_rss_feed(
db_session: AsyncSession, db_session: AsyncSession,
is_rss: bool,
): ):
fg = FeedGenerator() fg = FeedGenerator()
fg.id(BASE_URL + "/feed.rss") fg.id(BASE_URL + "/feed.rss")
@ -1672,7 +1393,6 @@ async def _gen_rss_feed(
fg.description(f"{LOCAL_ACTOR.display_name}'s microblog") fg.description(f"{LOCAL_ACTOR.display_name}'s microblog")
fg.author({"name": LOCAL_ACTOR.display_name}) fg.author({"name": LOCAL_ACTOR.display_name})
fg.link(href=LOCAL_ACTOR.url, rel="alternate") fg.link(href=LOCAL_ACTOR.url, rel="alternate")
if LOCAL_ACTOR.icon_url:
fg.logo(LOCAL_ACTOR.icon_url) fg.logo(LOCAL_ACTOR.icon_url)
fg.language("en") fg.language("en")
@ -1695,12 +1415,8 @@ async def _gen_rss_feed(
fe = fg.add_entry() fe = fg.add_entry()
fe.id(outbox_object.url) fe.id(outbox_object.url)
if outbox_object.name is not None:
fe.title(outbox_object.name)
elif not is_rss: # Atom feeds require a title
fe.title(outbox_object.url)
fe.link(href=outbox_object.url) fe.link(href=outbox_object.url)
fe.title(outbox_object.url)
fe.description(content) fe.description(content)
fe.content(content) fe.content(content)
fe.published(outbox_object.ap_published_at.replace(tzinfo=timezone.utc)) fe.published(outbox_object.ap_published_at.replace(tzinfo=timezone.utc))
@ -1713,7 +1429,7 @@ async def rss_feed(
db_session: AsyncSession = Depends(get_db_session), db_session: AsyncSession = Depends(get_db_session),
) -> PlainTextResponse: ) -> PlainTextResponse:
return PlainTextResponse( return PlainTextResponse(
(await _gen_rss_feed(db_session, is_rss=True)).rss_str(), (await _gen_rss_feed(db_session)).rss_str(),
headers={"Content-Type": "application/rss+xml"}, headers={"Content-Type": "application/rss+xml"},
) )
@ -1723,6 +1439,6 @@ async def atom_feed(
db_session: AsyncSession = Depends(get_db_session), db_session: AsyncSession = Depends(get_db_session),
) -> PlainTextResponse: ) -> PlainTextResponse:
return PlainTextResponse( return PlainTextResponse(
(await _gen_rss_feed(db_session, is_rss=False)).atom_str(), (await _gen_rss_feed(db_session)).atom_str(),
headers={"Content-Type": "application/atom+xml"}, headers={"Content-Type": "application/atom+xml"},
) )

View file

@ -132,7 +132,7 @@ async def post_micropub_endpoint(
h = form_data["h"] h = form_data["h"]
entry_type = f"h-{h}" entry_type = f"h-{h}"
logger.info(f"Creating {entry_type=} with {access_token_info=}") logger.info(f"Creating {entry_type}")
if entry_type != "h-entry": if entry_type != "h-entry":
return JSONResponse( return JSONResponse(
@ -150,7 +150,7 @@ async def post_micropub_endpoint(
else: else:
content = form_data["content"] content = form_data["content"]
public_id, _ = await send_create( public_id = await send_create(
db_session, db_session,
"Note", "Note",
content, content,

View file

@ -1,5 +1,4 @@
import enum import enum
from datetime import datetime
from typing import Any from typing import Any
from typing import Optional from typing import Optional
from typing import Union from typing import Union
@ -55,10 +54,6 @@ class Actor(Base, BaseActor):
is_blocked = Column(Boolean, nullable=False, default=False, server_default="0") is_blocked = Column(Boolean, nullable=False, default=False, server_default="0")
is_deleted = Column(Boolean, nullable=False, default=False, server_default="0") is_deleted = Column(Boolean, nullable=False, default=False, server_default="0")
are_announces_hidden_from_stream = Column(
Boolean, nullable=False, default=False, server_default="0"
)
@property @property
def is_from_db(self) -> bool: def is_from_db(self) -> bool:
return True return True
@ -256,8 +251,6 @@ class OutboxObject(Base, BaseObject):
"mediaType": attachment.upload.content_type, "mediaType": attachment.upload.content_type,
"name": attachment.alt or attachment.filename, "name": attachment.alt or attachment.filename,
"url": url, "url": url,
"width": attachment.upload.width,
"height": attachment.upload.height,
"proxiedUrl": url, "proxiedUrl": url,
"resizedUrl": BASE_URL "resizedUrl": BASE_URL
+ ( + (
@ -437,7 +430,7 @@ class OutboxObjectAttachment(Base):
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
upload_id = Column(Integer, ForeignKey("upload.id"), nullable=False) upload_id = Column(Integer, ForeignKey("upload.id"), nullable=False)
upload: Mapped["Upload"] = relationship(Upload, uselist=False) upload = relationship(Upload, uselist=False)
class IndieAuthAuthorizationRequest(Base): class IndieAuthAuthorizationRequest(Base):
@ -460,53 +453,17 @@ class IndieAuthAccessToken(Base):
__tablename__ = "indieauth_access_token" __tablename__ = "indieauth_access_token"
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
created_at: Mapped[datetime] = Column( created_at = Column(DateTime(timezone=True), nullable=False, default=now)
DateTime(timezone=True), nullable=False, default=now
)
# Will be null for personal access tokens # Will be null for personal access tokens
indieauth_authorization_request_id = Column( indieauth_authorization_request_id = Column(
Integer, ForeignKey("indieauth_authorization_request.id"), nullable=True Integer, ForeignKey("indieauth_authorization_request.id"), nullable=True
) )
indieauth_authorization_request = relationship(
IndieAuthAuthorizationRequest,
uselist=False,
)
access_token: Mapped[str] = Column(String, nullable=False, unique=True, index=True) access_token = Column(String, nullable=False, unique=True, index=True)
refresh_token = Column(String, nullable=True, unique=True, index=True) expires_in = Column(Integer, nullable=False)
expires_in: Mapped[int] = Column(Integer, nullable=False)
scope = Column(String, nullable=False) scope = Column(String, nullable=False)
is_revoked = Column(Boolean, nullable=False, default=False) is_revoked = Column(Boolean, nullable=False, default=False)
was_refreshed = Column(Boolean, nullable=False, default=False, server_default="0")
class OAuthClient(Base):
__tablename__ = "oauth_client"
id = Column(Integer, primary_key=True, index=True)
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
# Request
client_name = Column(String, nullable=False)
redirect_uris: Mapped[list[str]] = Column(JSON, nullable=True)
# Optional from request
client_uri = Column(String, nullable=True)
logo_uri = Column(String, nullable=True)
scope = Column(String, nullable=True)
# Response
client_id = Column(String, nullable=False, unique=True, index=True)
client_secret = Column(String, nullable=False, unique=True)
@enum.unique
class WebmentionType(str, enum.Enum):
UNKNOWN = "unknown"
LIKE = "like"
REPLY = "reply"
REPOST = "repost"
class Webmention(Base): class Webmention(Base):
@ -525,8 +482,6 @@ class Webmention(Base):
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
outbox_object = relationship(OutboxObject, uselist=False) outbox_object = relationship(OutboxObject, uselist=False)
webmention_type = Column(Enum(WebmentionType), nullable=True)
@property @property
def as_facepile_item(self) -> webmentions.Webmention | None: def as_facepile_item(self) -> webmentions.Webmention | None:
if not self.source_microformats: if not self.source_microformats:
@ -536,7 +491,6 @@ class Webmention(Base):
self.source_microformats["items"], self.source self.source_microformats["items"], self.source
) )
except Exception: except Exception:
# TODO: return a facepile with the unknown image
logger.warning( logger.warning(
f"Failed to generate facefile item for Webmention id={self.id}" f"Failed to generate facefile item for Webmention id={self.id}"
) )

View file

@ -151,7 +151,7 @@ def _set_next_try(
if not outgoing_activity.tries: if not outgoing_activity.tries:
raise ValueError("Should never happen") raise ValueError("Should never happen")
if outgoing_activity.tries >= _MAX_RETRIES: if outgoing_activity.tries == _MAX_RETRIES:
outgoing_activity.is_errored = True outgoing_activity.is_errored = True
outgoing_activity.next_try = None outgoing_activity.next_try = None
else: else:

View file

@ -102,8 +102,6 @@ async def _prune_old_inbox_objects(
models.InboxObject.ap_type.in_(["Note"]), models.InboxObject.ap_type.in_(["Note"]),
) )
), ),
# Keep Move object as they are linked to notifications
models.InboxObject.ap_type.not_in(["Move"]),
# Filter by retention days # Filter by retention days
models.InboxObject.ap_published_at models.InboxObject.ap_published_at
< now() - timedelta(days=INBOX_RETENTION_DAYS), < now() - timedelta(days=INBOX_RETENTION_DAYS),

View file

@ -1,28 +0,0 @@
from fastapi import Request
from app import templates
from app.database import AsyncSession
async def redirect(
request: Request,
db_session: AsyncSession,
url: str,
) -> templates.TemplateResponse:
"""
Similar to RedirectResponse, but uses a 200 response with HTML.
Needed for remote redirects on form submission endpoints,
since our CSP policy disallows remote form submission.
https://github.com/w3c/webappsec-csp/issues/8#issuecomment-810108984
"""
return await templates.render_template(
db_session,
request,
"redirect.html",
{
"request": request,
"url": url,
},
headers={"Refresh": "0;url=" + url},
)

View file

@ -1,9 +1,11 @@
$font-stack: Helvetica, sans-serif; $font-stack: -apple-system, BlinkMacSystemFont, avenir next, avenir, segoe ui, helvetica neue, helvetica, Cantarell,
Ubuntu, roboto, noto, arial, sans-serif;
$background: #ddd; $background: #ddd;
$light-background: #e6e6e6; $light-background: #e6e6e6;
$text-color: #111; $text-color: #111;
$primary-color: #1d781d; $primary-color: #1d781d;
$secondary-color: #781D78; $secondary-color: #781d78;
$form-background-color: #ccc; $form-background-color: #ccc;
$form-text-color: #333; $form-text-color: #333;
$muted-color: #555; // solarized comment text $muted-color: #555; // solarized comment text
@ -13,6 +15,46 @@ $code-highlight-background: #f0f0f0;
// Load custom theme // Load custom theme
@import "theme.scss"; @import "theme.scss";
/* Box sizing rules */
*,
*::before,
*::after {
box-sizing: border-box;
}
/* Set core root defaults */
html:focus-within {
scroll-behavior: smooth;
}
/* Inherit fonts for inputs and buttons */
input,
button,
textarea,
select {
font: inherit;
}
html {
max-width: 90ch;
padding: 3em 1em;
margin: auto;
line-height: 1.75;
font-size: 1.25em;
}
body {
font-family: $font-stack;
background: $background;
color: $text-color;
display: flex;
min-height: 100vh;
flex-direction: column;
}
a {
text-decoration: none;
}
.primary-color { .primary-color {
color: $primary-color; color: $primary-color;
} }
@ -51,20 +93,17 @@ $code-highlight-background: #f0f0f0;
.p-summary { .p-summary {
display: inline-block; display: inline-block;
} }
.show-more-btn { label {
margin-left: 5px; margin-left: 5px;
} }
summary { .show-more-state {
display: inline-block; display: none;
} }
summary::-webkit-details-marker { .show-more-state ~ .obj-content {
display: none margin-top: 0;
} }
&:not([open]) .show-more-btn::after { .show-more-state:checked ~ .obj-content {
content: 'show more'; display: none;
}
&[open] .show-more-btn::after {
content: 'show less';
} }
} }
.sensitive-attachment { .sensitive-attachment {
@ -73,7 +112,7 @@ $code-highlight-background: #f0f0f0;
display: none; display: none;
} }
.sensitive-attachment-state:checked ~ .sensitive-attachment-box div { .sensitive-attachment-state:checked ~ .sensitive-attachment-box div {
display:none; display: none;
} }
.sensitive-attachment-box { .sensitive-attachment-box {
position: relative; position: relative;
@ -87,7 +126,6 @@ $code-highlight-background: #f0f0f0;
} }
} }
blockquote { blockquote {
border-left: 3px solid $secondary-color; border-left: 3px solid $secondary-color;
margin-left: 0; margin-left: 0;
@ -102,23 +140,6 @@ blockquote {
background: $light-background; background: $light-background;
} }
body {
font-family: $font-stack;
font-size: 20px;
line-height: 32px;
background: $background;
color: $text-color;
margin: 0;
padding: 0;
display: flex;
min-height: 100vh;
flex-direction: column;
}
a {
text-decoration: none;
}
dl { dl {
display: flex; display: flex;
dt { dt {
@ -163,13 +184,16 @@ div.highlight {
padding: 0 20px; padding: 0 20px;
} }
code, pre { code,
pre {
color: $secondary-color; // #cb4b16; // #268bd2; // #2aa198; color: $secondary-color; // #cb4b16; // #268bd2; // #2aa198;
font-family: monospace; font-family: monospace;
} }
.form { .form {
input, select, textarea { input,
select,
textarea {
font-size: 20px; font-size: 20px;
border: 0; border: 0;
padding: 5px; padding: 5px;
@ -179,7 +203,7 @@ code, pre {
outline: 1px solid $secondary-color; outline: 1px solid $secondary-color;
} }
} }
input[type=submit] { input[type="submit"] {
font-size: 20px; font-size: 20px;
outline: none; outline: none;
background: $primary-color; background: $primary-color;
@ -220,7 +244,6 @@ a {
} }
main { main {
width: 100%; width: 100%;
max-width: 1000px;
margin: 30px auto; margin: 30px auto;
} }
@ -240,8 +263,8 @@ main {
} }
footer { footer {
font-size: 1em;
width: 100%; width: 100%;
max-width: 1000px;
margin: 20px auto; margin: 20px auto;
color: $muted-color; color: $muted-color;
p { p {
@ -257,7 +280,7 @@ footer {
.actor-box { .actor-box {
display: flex; display: flex;
column-gap: 20px; column-gap: 20px;
margin:10px 0; margin: 10px 0;
.icon-box { .icon-box {
flex: 0 0 50px; flex: 0 0 50px;
} }
@ -277,12 +300,14 @@ footer {
li { li {
display: block; display: block;
span { span {
padding-right:10px; padding-right: 10px;
} }
} }
} }
#notifications, #followers, #following { #notifications,
#followers,
#following {
ul { ul {
list-style-type: none; list-style-type: none;
margin: 0; margin: 0;
@ -307,14 +332,16 @@ footer {
} }
} }
.show-sensitive-btn, .show-more-btn, .label-btn { .show-sensitive-btn,
.show-more-btn,
.label-btn {
@include admin-button; @include admin-button;
padding: 10px 5px; padding: 10px 5px;
margin: 20px 0; margin: 20px 0;
} }
.show-hide-sensitive-btn { .show-hide-sensitive-btn {
display:inline-block; display: inline-block;
} }
.no-margin-top { .no-margin-top {
@ -338,13 +365,13 @@ ul.poll-items {
} }
.poll-bar { .poll-bar {
width:100%;height:20px; width: 100%;
height: 20px;
line { line {
stroke: $secondary-color; stroke: $secondary-color;
stroke-width: 20px; stroke-width: 20px;
} }
} }
} }
} }
@ -368,7 +395,8 @@ nav {
form { form {
margin: 15px 0; margin: 15px 0;
} }
input[type=submit], button { input[type="submit"],
button {
@include admin-button; @include admin-button;
} }
} }
@ -381,7 +409,6 @@ nav.flexbox {
list-style-type: none; list-style-type: none;
margin: 0; margin: 0;
padding: 0; padding: 0;
} }
ul li { ul li {
@ -394,7 +421,8 @@ nav.flexbox {
a:not(.label-btn) { a:not(.label-btn) {
color: $primary-color; color: $primary-color;
text-decoration: none; text-decoration: none;
&:hover, &:active { &:hover,
&:active {
color: $secondary-color; color: $secondary-color;
text-decoration: underline; text-decoration: underline;
} }
@ -424,15 +452,19 @@ a.label-btn {
display: inline; display: inline;
color: $muted-color; color: $muted-color;
} }
.e-content, .activity-og-meta { .e-content,
.activity-og-meta {
a:hover { a:hover {
text-decoration: underline; text-decoration: underline;
} }
} }
.activity-attachment { .activity-attachment {
margin: 30px 0 20px 0; margin: 30px 0 20px 0;
img, audio, video { img,
max-width: calc(min(740px, 100%)); audio,
video {
width: 100%;
max-width: 740px;
} }
} }
img.inline-img { img.inline-img {
@ -458,25 +490,23 @@ a.label-btn {
border: 2px dashed $secondary-color; border: 2px dashed $secondary-color;
} }
.error-box, .scolor { .error-box {
color: $secondary-color; color: $secondary-color;
} }
.actor-action { .actor-action {
margin-top:20px; margin-top: 20px;
margin-bottom:-20px; margin-bottom: -20px;
padding: 0 20px; padding: 0 20px;
span { span {
color: $muted-color; color: $muted-color;
} }
span.new {
color: $secondary-color;
}
} }
.actor-metadata { .actor-metadata {
color: $muted-color; color: $muted-color;
} }
.emoji, .custom-emoji { .emoji,
.custom-emoji {
max-width: 25px; max-width: 25px;
} }
@ -546,26 +576,3 @@ a.label-btn {
content: ': '; content: ': ';
} }
} }
.margin-top-20 {
margin-top: 20px;
}
.video-wrapper {
position: relative;
}
.video-gif-overlay {
display: none;
}
.video-gif-mode + .video-gif-overlay {
display: block;
position: absolute;
top: 5px;
left: 5px;
padding: 0 3px;
font-size: 0.8em;
background: rgba(0,0,0,.5);
color: #fff;
}

View file

@ -1,14 +1,13 @@
import re import re
import typing import typing
from loguru import logger
from mistletoe import Document # type: ignore from mistletoe import Document # type: ignore
from mistletoe.block_token import CodeFence # type: ignore
from mistletoe.html_renderer import HTMLRenderer # type: ignore from mistletoe.html_renderer import HTMLRenderer # type: ignore
from mistletoe.span_token import SpanToken # type: ignore from mistletoe.span_token import SpanToken # type: ignore
from pygments import highlight # type: ignore
from pygments.formatters import HtmlFormatter # type: ignore from pygments.formatters import HtmlFormatter # type: ignore
from pygments.lexers import get_lexer_by_name as get_lexer # type: ignore from pygments.lexers import get_lexer_by_name as get_lexer # type: ignore
from pygments.util import ClassNotFound # type: ignore from pygments.lexers import guess_lexer # type: ignore
from sqlalchemy import select from sqlalchemy import select
from app import webfinger from app import webfinger
@ -79,17 +78,13 @@ class CustomRenderer(HTMLRenderer):
def render_mention(self, token: Mention) -> str: def render_mention(self, token: Mention) -> str:
mention = token.target mention = token.target
suffix = ""
if mention.endswith("."):
mention = mention[:-1]
suffix = "."
actor = self.mentioned_actors.get(mention) actor = self.mentioned_actors.get(mention)
if not actor: if not actor:
return mention return mention
self.tags.append(dict(type="Mention", href=actor.ap_id, name=mention)) self.tags.append(dict(type="Mention", href=actor.ap_id, name=mention))
link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>{suffix}' # noqa: E501 link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>' # noqa: E501
return link return link
def render_hashtag(self, token: Hashtag) -> str: def render_hashtag(self, token: Hashtag) -> str:
@ -104,16 +99,10 @@ class CustomRenderer(HTMLRenderer):
) )
return link return link
def render_block_code(self, token: CodeFence) -> str: def render_block_code(self, token: typing.Any) -> str:
lexer_attr = ""
try:
lexer = get_lexer(token.language)
lexer_attr = f' data-microblogpub-lexer="{lexer.aliases[0]}"'
except ClassNotFound:
pass
code = token.children[0].content code = token.children[0].content
return f"<pre><code{lexer_attr}>\n{code}\n</code></pre>" lexer = get_lexer(token.language) if token.language else guess_lexer(code)
return highlight(code, lexer, _FORMATTER)
async def _prefetch_mentioned_actors( async def _prefetch_mentioned_actors(
@ -129,11 +118,6 @@ async def _prefetch_mentioned_actors(
if mention in actors: if mention in actors:
continue continue
# XXX: the regex catches stuff like `@toto@example.com.`
if mention.endswith("."):
mention = mention[:-1]
try:
_, username, domain = mention.split("@") _, username, domain = mention.split("@")
actor = ( actor = (
await db_session.execute( await db_session.execute(
@ -151,8 +135,6 @@ async def _prefetch_mentioned_actors(
actor = await fetch_actor(db_session, actor_url) actor = await fetch_actor(db_session, actor_url)
actors[mention] = actor actors[mention] = actor
except Exception:
logger.exception(f"Failed to prefetch {mention}")
return actors return actors

View file

@ -1,32 +0,0 @@
function hasAudio (video) {
return video.mozHasAudio ||
Boolean(video.webkitAudioDecodedByteCount) ||
Boolean(video.audioTracks && video.audioTracks.length);
}
function setVideoInGIFMode(video) {
if (!hasAudio(video)) {
if (typeof video.loop == 'boolean' && video.duration <= 10.0) {
video.classList.add("video-gif-mode");
video.loop = true;
video.controls = false;
video.addEventListener("mouseover", () => {
video.play();
})
video.addEventListener("mouseleave", () => {
video.pause();
})
}
};
}
var items = document.getElementsByTagName("video")
for (var i = 0; i < items.length; i++) {
if (items[i].duration) {
setVideoInGIFMode(items[i]);
} else {
items[i].addEventListener("loadeddata", function() {
setVideoInGIFMode(this);
});
}
}

View file

@ -27,7 +27,6 @@ from app.ap_object import Object
from app.config import BASE_URL from app.config import BASE_URL
from app.config import CUSTOM_FOOTER from app.config import CUSTOM_FOOTER
from app.config import DEBUG from app.config import DEBUG
from app.config import SESSION_TIMEOUT
from app.config import VERSION from app.config import VERSION
from app.config import generate_csrf_token from app.config import generate_csrf_token
from app.config import session_serializer from app.config import session_serializer
@ -70,10 +69,10 @@ def is_current_user_admin(request: Request) -> bool:
try: try:
loaded_session = session_serializer.loads( loaded_session = session_serializer.loads(
session_cookie, session_cookie,
max_age=SESSION_TIMEOUT, max_age=3600 * 12,
) )
except Exception: except Exception:
logger.exception("Failed to validate session timeout") pass
else: else:
is_admin = loaded_session.get("is_logged_in") is_admin = loaded_session.get("is_logged_in")
@ -86,7 +85,6 @@ async def render_template(
template: str, template: str,
template_args: dict[str, Any] | None = None, template_args: dict[str, Any] | None = None,
status_code: int = 200, status_code: int = 200,
headers: dict[str, str] | None = None,
) -> TemplateResponse: ) -> TemplateResponse:
if template_args is None: if template_args is None:
template_args = {} template_args = {}
@ -131,7 +129,6 @@ async def render_template(
**template_args, **template_args,
}, },
status_code=status_code, status_code=status_code,
headers=headers,
) )
@ -336,14 +333,6 @@ def _clean_html(html: str, note: Object) -> str:
raise raise
def _clean_html_wm(html: str) -> str:
return bleach.clean(
html,
attributes=ALLOWED_ATTRIBUTES,
strip=True,
)
def _timeago(original_dt: datetime) -> str: def _timeago(original_dt: datetime) -> str:
dt = original_dt dt = original_dt
if dt.tzinfo: if dt.tzinfo:
@ -420,7 +409,6 @@ def _poll_item_pct(item: ap.RawObject, voters_count: int) -> int:
_templates.env.filters["domain"] = _filter_domain _templates.env.filters["domain"] = _filter_domain
_templates.env.filters["media_proxy_url"] = _media_proxy_url _templates.env.filters["media_proxy_url"] = _media_proxy_url
_templates.env.filters["clean_html"] = _clean_html _templates.env.filters["clean_html"] = _clean_html
_templates.env.filters["clean_html_wm"] = _clean_html_wm
_templates.env.filters["timeago"] = _timeago _templates.env.filters["timeago"] = _timeago
_templates.env.filters["format_date"] = _format_date _templates.env.filters["format_date"] = _format_date
_templates.env.filters["has_media_type"] = _has_media_type _templates.env.filters["has_media_type"] = _has_media_type

View file

@ -11,8 +11,8 @@
<ul class="h-feed" id="articles"> <ul class="h-feed" id="articles">
<data class="p-name" value="{{ local_actor.display_name}}'s articles"></data> <data class="p-name" value="{{ local_actor.display_name}}'s articles"></data>
{% for outbox_object in objects %} {% for outbox_object in objects %}
<li class="h-entry"> <li>
<time class="muted dt-published" datetime="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at.strftime("%b %d, %Y") }}</time> <a href="{{ outbox_object.url }}" class="u-url u-uid p-name">{{ outbox_object.name }}</a> <span class="muted">{{ outbox_object.ap_published_at.strftime("%b %d, %Y") }}</span> <a href="{{ outbox_object.url }}">{{ outbox_object.name }}</a>
</li> </li>
{% endfor %} {% endfor %}
</ul> </ul>

View file

@ -3,7 +3,6 @@
{% block head %} {% block head %}
<title>{{ local_actor.display_name }}'s followers</title> <title>{{ local_actor.display_name }}'s followers</title>
<meta name="robots" content="noindex, nofollow">
{% endblock %} {% endblock %}
{% block content %} {% block content %}

View file

@ -3,7 +3,6 @@
{% block head %} {% block head %}
<title>{{ local_actor.display_name }}'s follows</title> <title>{{ local_actor.display_name }}'s follows</title>
<meta name="robots" content="noindex, nofollow">
{% endblock %} {% endblock %}
{% block content %} {% block content %}

View file

@ -26,20 +26,15 @@
<div class="h-feed"> <div class="h-feed">
<data class="p-name" value="{{ local_actor.display_name}}'s notes"></data> <data class="p-name" value="{{ local_actor.display_name}}'s notes"></data>
{% for outbox_object in objects %} {% for outbox_object in objects %}
{% if outbox_object.ap_type in ["Note", "Video", "Question"] %} {% if outbox_object.ap_type in ["Note", "Article", "Video", "Question"] %}
{{ utils.display_object(outbox_object) }} {{ utils.display_object(outbox_object) }}
{% elif outbox_object.ap_type == "Announce" %} {% elif outbox_object.ap_type == "Announce" %}
<div class="h-entry" id="{{ outbox_object.permalink_id }}"> <div class="shared-header"><strong>{{ utils.display_tiny_actor_icon(local_actor) }} {{ local_actor.display_name | clean_html(local_actor) | safe }}</strong> shared <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
<div class="shared-header"><strong><a class="p-author h-card" href="{{ local_actor.url }}">{{ utils.display_tiny_actor_icon(local_actor) }} {{ local_actor.display_name | clean_html(local_actor) | safe }}</a></strong> shared <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div> {{ utils.display_object(outbox_object.relates_to_anybox_object) }}
<div class="h-cite u-repost-of">
{{ utils.display_object(outbox_object.relates_to_anybox_object, is_h_entry=False) }}
</div>
</div>
{% endif %} {% endif %}
{% endfor %} {% endfor %}
</div> </div>
{% if has_previous_page or has_next_page %}
<div class="box"> <div class="box">
{% if has_previous_page %} {% if has_previous_page %}
<a href="{{ url_for("index") }}?page={{ current_page - 1 }}">Previous</a> <a href="{{ url_for("index") }}?page={{ current_page - 1 }}">Previous</a>
@ -49,7 +44,6 @@
<a href="{{ url_for("index") }}?page={{ current_page + 1 }}">Next</a> <a href="{{ url_for("index") }}?page={{ current_page + 1 }}">Next</a>
{% endif %} {% endif %}
</div> </div>
{% endif %}
{% else %} {% else %}
<div class="empty-state"> <div class="empty-state">

View file

@ -10,12 +10,8 @@
{% endif %} {% endif %}
<div class="indieauth-details"> <div class="indieauth-details">
<div> <div>
{% if client.url %} <a class="lcolor" href="{{ client.url }}">{{ client.name }}</a>
<a class="scolor" href="{{ client.url }}">{{ client.name }}</a> <p>wants you to login as <strong class="lcolor">{{ me }}</strong> with the following redirect URI: <code>{{ redirect_uri }}</code>.</p>
{% else %}
<span class="scolor">{{ client.name }}</span>
{% endif %}
<p>wants you to login{% if me %} as <strong class="lcolor">{{ me }}</strong>{% endif %} with the following redirect URI: <code>{{ redirect_uri }}</code>.</p>
<form method="POST" action="{{ url_for('indieauth_flow') }}" class="form"> <form method="POST" action="{{ url_for('indieauth_flow') }}" class="form">

View file

@ -55,6 +55,5 @@
{% if is_admin %} {% if is_admin %}
<script src="{{ BASE_URL }}/static/common-admin.js?v={{ JS_HASH }}"></script> <script src="{{ BASE_URL }}/static/common-admin.js?v={{ JS_HASH }}"></script>
{% endif %} {% endif %}
<script src="{{ BASE_URL }}/static/common.js?v={{ JS_HASH }}"></script>
</body> </body>
</html> </html>

View file

@ -1,8 +1,5 @@
{%- import "utils.html" as utils with context -%} {%- import "utils.html" as utils with context -%}
{% extends "layout.html" %} {% extends "layout.html" %}
{% block head %}
<meta name="robots" content="noindex, nofollow">
{% endblock %}
{% block main_tag %} class="main-flex"{% endblock %} {% block main_tag %} class="main-flex"{% endblock %}
{% block content %} {% block content %}
<div class="centered"> <div class="centered">

View file

@ -10,9 +10,6 @@
<a href="{{ url_for("admin_profile") }}?actor_id={{ notif.actor.ap_id }}"> <a href="{{ url_for("admin_profile") }}?actor_id={{ notif.actor.ap_id }}">
{% if with_icon %}{{ utils.display_tiny_actor_icon(notif.actor) }}{% endif %} {{ notif.actor.display_name | clean_html(notif.actor) | safe }}</a> {{ text }} {% if with_icon %}{{ utils.display_tiny_actor_icon(notif.actor) }}{% endif %} {{ notif.actor.display_name | clean_html(notif.actor) | safe }}</a> {{ text }}
<span title="{{ notif.created_at.isoformat() }}">{{ notif.created_at | timeago }}</span> <span title="{{ notif.created_at.isoformat() }}">{{ notif.created_at | timeago }}</span>
{% if notif.is_new %}
<span class="new">new</span>
{% endif %}
</div> </div>
{% endmacro %} {% endmacro %}
@ -51,7 +48,7 @@
{% elif notif.notification_type.value == "unblock" %} {% elif notif.notification_type.value == "unblock" %}
{{ notif_actor_action(notif, "was unblocked") }} {{ notif_actor_action(notif, "was unblocked") }}
{{ utils.display_actor(notif.actor, actors_metadata) }} {{ utils.display_actor(notif.actor, actors_metadata) }}
{%- elif notif.notification_type.value == "move" and notif.inbox_object %} {%- elif notif.notification_type.value == "move" %}
{# for move notif, the actor is the target and the inbox object the Move activity #} {# for move notif, the actor is the target and the inbox object the Move activity #}
<div class="actor-action"> <div class="actor-action">
<a href="{{ url_for("admin_profile") }}?actor_id={{ notif.inbox_object.actor.ap_id }}"> <a href="{{ url_for("admin_profile") }}?actor_id={{ notif.inbox_object.actor.ap_id }}">
@ -69,8 +66,8 @@
{{ notif_actor_action(notif, "shared a post", with_icon=True) }} {{ notif_actor_action(notif, "shared a post", with_icon=True) }}
{{ utils.display_object(notif.outbox_object) }} {{ utils.display_object(notif.outbox_object) }}
{% elif notif.notification_type.value == "undo_announce" %} {% elif notif.notification_type.value == "undo_announce" %}
{{ notif_actor_action(notif, "unshared a post", with_icon=True) }} {{ notif_actor_action(notif, "unshared a post") }}
{{ utils.display_object(notif.outbox_object) }} {{ utils.display_object(notif.outbox_object, with_icon=True) }}
{% elif notif.notification_type.value == "mention" %} {% elif notif.notification_type.value == "mention" %}
{{ notif_actor_action(notif, "mentioned you") }} {{ notif_actor_action(notif, "mentioned you") }}
{{ utils.display_object(notif.inbox_object) }} {{ utils.display_object(notif.inbox_object) }}

View file

@ -15,7 +15,7 @@
<meta content="article" property="og:type" /> <meta content="article" property="og:type" />
<meta content="{{ outbox_object.url }}" property="og:url" /> <meta content="{{ outbox_object.url }}" property="og:url" />
<meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" /> <meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" />
<meta content="{% if outbox_object.name %}{{ outbox_object.name }}{% else %}Note{% endif %}" property="og:title" /> <meta content="{% if outbox_object.name %}{{ name }}{% else %}Note{% endif %}" property="og:title" />
<meta content="{{ excerpt }}" property="og:description" /> <meta content="{{ excerpt }}" property="og:description" />
<meta content="{{ local_actor.icon_url }}" property="og:image" /> <meta content="{{ local_actor.icon_url }}" property="og:image" />
<meta content="summary" property="twitter:card" /> <meta content="summary" property="twitter:card" />
@ -31,16 +31,9 @@
{% macro display_replies_tree(replies_tree_node) %} {% macro display_replies_tree(replies_tree_node) %}
{% if replies_tree_node.is_requested %} {% if replies_tree_node.is_requested %}
{{ utils.display_object(replies_tree_node.ap_object, likes=likes, shares=shares, webmentions=webmentions, expanded=not replies_tree_node.is_root, is_object_page=True, is_h_entry=False) }} {{ utils.display_object(replies_tree_node.ap_object, likes=likes, shares=shares, webmentions=webmentions, expanded=not replies_tree_node.is_root, is_object_page=True) }}
{% else %} {% else %}
{% if replies_tree_node.wm_reply %} {{ utils.display_object(replies_tree_node.ap_object) }}
{# u-comment h-cite is displayed by default for webmention #}
{{ utils.display_webmention_reply(replies_tree_node.wm_reply) }}
{% else %}
<div class="u-comment h-cite">
{{ utils.display_object(replies_tree_node.ap_object, is_h_entry=False) }}
</div>
{% endif %}
{% endif %} {% endif %}
{% for child in replies_tree_node.children %} {% for child in replies_tree_node.children %}
@ -49,8 +42,6 @@
{% endmacro %} {% endmacro %}
<div class="h-entry">
{{ display_replies_tree(replies_tree) }} {{ display_replies_tree(replies_tree) }}
</div>
{% endblock %} {% endblock %}

View file

@ -1,15 +0,0 @@
{%- import "utils.html" as utils with context -%}
{% extends "layout.html" %}
{% block head %}
<title>{{ local_actor.display_name }}'s microblog - Redirect</title>
{% endblock %}
{% block content %}
{% include "header.html" %}
<div class="box">
<p>You are being redirected to: <a href="{{ url }}">{{ url }}</a></p>
</div>
{% endblock %}

View file

@ -1,15 +0,0 @@
{%- import "utils.html" as utils with context -%}
{% extends "layout.html" %}
{% block head %}
<title>{{ local_actor.display_name }}'s microblog - Redirect</title>
{% endblock %}
{% block content %}
{% include "header.html" %}
<div class="box">
<p>You are being redirected to your instance: <a href="{{ url }}">{{ url }}</a></p>
</div>
{% endblock %}

View file

@ -3,7 +3,6 @@
{% block head %} {% block head %}
<title>Remote follow {{ local_actor.display_name }}</title> <title>Remote follow {{ local_actor.display_name }}</title>
<meta name="robots" content="noindex, nofollow">
{% endblock %} {% endblock %}
{% block content %} {% block content %}

View file

@ -3,7 +3,6 @@
{% block head %} {% block head %}
<title>Interact from your instance</title> <title>Interact from your instance</title>
<meta name="robots" content="noindex, nofollow">
{% endblock %} {% endblock %}
{% block content %} {% block content %}

View file

@ -32,29 +32,6 @@
{% endblock %} {% endblock %}
{% endmacro %} {% endmacro %}
{% macro admin_hide_shares_button(actor) %}
{% block admin_hide_shares_button scoped %}
<form action="{{ request.url_for("admin_actions_hide_announces") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url() }}
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
<input type="submit" value="hide shares">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_show_shares_button(actor) %}
{% block admin_show_shares_button scoped %}
<form action="{{ request.url_for("admin_actions_show_announces") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url() }}
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
<input type="submit" value="show shares">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_follow_button(actor) %} {% macro admin_follow_button(actor) %}
{% block admin_follow_button scoped %} {% block admin_follow_button scoped %}
<form action="{{ request.url_for("admin_actions_follow") }}" method="POST"> <form action="{{ request.url_for("admin_actions_follow") }}" method="POST">
@ -154,28 +131,6 @@
{% endblock %} {% endblock %}
{% endmacro %} {% endmacro %}
{% macro admin_force_delete_button(ap_object_id, permalink_id=None) %}
{% block admin_force_delete_button scoped %}
<form action="{{ request.url_for("admin_actions_force_delete") }}" class="object-delete-form" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
<input type="submit" value="local delete">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_force_delete_webmention_button(webmention_id, permalink_id=None) %}
{% block admin_force_delete_webmention_button scoped %}
<form action="{{ request.url_for("admin_actions_force_delete_webmention") }}" class="object-delete-form" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="webmention_id" value="{{ webmention_id }}">
<input type="submit" value="local delete">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_announce_button(ap_object_id, permalink_id=None) %} {% macro admin_announce_button(ap_object_id, permalink_id=None) %}
{% block admin_announce_button scoped %} {% block admin_announce_button scoped %}
<form action="{{ request.url_for("admin_actions_announce") }}" method="POST"> <form action="{{ request.url_for("admin_actions_announce") }}" method="POST">
@ -270,7 +225,7 @@
{% macro display_tiny_actor_icon(actor) %} {% macro display_tiny_actor_icon(actor) %}
{% block display_tiny_actor_icon scoped %} {% block display_tiny_actor_icon scoped %}
<img class="tiny-actor-icon" src="{{ actor.resized_icon_url }}" alt=""> <img class="tiny-actor-icon" src="{{ actor.resized_icon_url }}" alt="{{ actor.display_name }}'s avatar">
{% endblock %} {% endblock %}
{% endmacro %} {% endmacro %}
@ -365,11 +320,6 @@
<li>rejected</li> <li>rejected</li>
{% endif %} {% endif %}
{% endif %} {% endif %}
{% if actor.are_announces_hidden_from_stream %}
<li>{{ admin_show_shares_button(actor) }}</li>
{% else %}
<li>{{ admin_hide_shares_button(actor) }}</li>
{% endif %}
{% if with_details %} {% if with_details %}
<li><a href="{{ actor.url }}" class="label-btn">remote profile</a></li> <li><a href="{{ actor.url }}" class="label-btn">remote profile</a></li>
{% endif %} {% endif %}
@ -434,35 +384,26 @@
{% for attachment in object.attachments %} {% for attachment in object.attachments %}
{% if attachment.type != "PropertyValue" %} {% if attachment.type != "PropertyValue" %}
{% set orientation = "unknown" %}
{% if attachment.width %}
{% set orientation = "portrait" if attachment.width < attachment.height else "landscape" %}
{% endif %}
{% if object.sensitive and (attachment.type == "Image" or (attachment | has_media_type("image")) or attachment.type == "Video" or (attachment | has_media_type("video"))) %} {% if object.sensitive and (attachment.type == "Image" or (attachment | has_media_type("image")) or attachment.type == "Video" or (attachment | has_media_type("video"))) %}
<div class="attachment-wrapper"> <div class="attachment-wrapper">
<label for="{{attachment.proxied_url}}" class="label-btn show-hide-sensitive-btn">show/hide sensitive content</label> <label for="{{attachment.proxied_url}}" class="label-btn show-hide-sensitive-btn">show/hide sensitive content</label>
<div> <div>
<div class="sensitive-attachment"> <div class="sensitive-attachment">
<input class="sensitive-attachment-state" type="checkbox" id="{{attachment.proxied_url}}" aria-hidden="true"> <input class="sensitive-attachment-state" type="checkbox" id="{{attachment.proxied_url}}" aria-hidden="true">
<div class="sensitive-attachment-box attachment-orientation-{{orientation}}"> <div class="sensitive-attachment-box">
<div></div> <div></div>
{% else %} {% else %}
<div class="attachment-item attachment-orientation-{{orientation}}"> <div class="attachment-item">
{% endif %} {% endif %}
{% if attachment.type == "Image" or (attachment | has_media_type("image")) %} {% if attachment.type == "Image" or (attachment | has_media_type("image")) %}
{% if attachment.url not in object.inlined_images %} {% if attachment.url not in object.inlined_images %}
<a class="media-link" href="{{ attachment.proxied_url }}" target="_blank"> <img src="{{ attachment.resized_url or attachment.proxied_url }}"{% if attachment.name %} title="{{ attachment.name }}" alt="{{ attachment.name }}"{% endif %} class="attachment">
<img src="{{ attachment.resized_url or attachment.proxied_url }}"{% if attachment.name %} title="{{ attachment.name }}" alt="{{ attachment.name }}"{% endif %} class="attachment u-photo">
</a>
{% endif %} {% endif %}
{% elif attachment.type == "Video" or (attachment | has_media_type("video")) %} {% elif attachment.type == "Video" or (attachment | has_media_type("video")) %}
<div class="video-wrapper"> <video controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %}></video>
<video controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %} class="u-video"></video>
<div class="video-gif-overlay">GIF</div>
</div>
{% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %} {% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %}
<audio controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name%} title="{{ attachment.name }}"{% endif %} class="attachment u-audio"></audio> <audio controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name%} title="{{ attachment.name }}"{% endif %} class="attachment"></audio>
{% elif attachment.type == "Link" %} {% elif attachment.type == "Link" %}
<a href="{{ attachment.url }}" class="attachment">{{ attachment.url | truncate(64, True) }}</a> ({{ attachment.mimetype}}) <a href="{{ attachment.url }}" class="attachment">{{ attachment.url | truncate(64, True) }}</a> ({{ attachment.mimetype}})
{% else %} {% else %}
@ -483,55 +424,11 @@
{% endblock %} {% endblock %}
{% endmacro %} {% endmacro %}
{% macro display_webmention_reply(wm_reply) %} {% macro display_object(object, likes=[], shares=[], webmentions=[], expanded=False, actors_metadata={}, is_object_page=False) %}
{% block display_webmention_reply scoped %}
<div class="ap-object u-comment h-cite">
<div class="actor-box h-card p-author">
<div class="icon-box">
<img src="{{ wm_reply.face.picture_url }}" alt="{{ wm_reply.face.name }}'s avatar" class="actor-icon u-photo">
</div>
<a href="{{ wm_reply.face.url }}" class="u-url">
<div><strong class="p-name">{{ wm_reply.face.name | clean_html_wm | safe }}</strong></div>
<div class="actor-handle">{{ wm_reply.face.url | truncate(64, True) }}</div>
</a>
</div>
<p class="in-reply-to">in reply to <a href="{{ wm_reply.in_reply_to }}" title="{{ wm_reply.in_reply_to }}" rel="nofollow">
this object
</a></p>
<div class="obj-content margin-top-20">
<div class="e-content">
{{ wm_reply.content | clean_html_wm | safe }}
</div>
</div>
<nav class="flexbox activity-bar margin-top-20">
<ul>
<li>
<div><a href="{{ wm_reply.url }}" rel="nofollow" class="object-permalink u-url u-uid">permalink</a></div>
</li>
<li>
<time class="dt-published" datetime="{{ wm_reply.published_at.replace(microsecond=0).isoformat() }}" title="{{ wm_reply.published_at.replace(microsecond=0).isoformat() }}">{{ wm_reply.published_at | timeago }}</time>
</li>
{% if is_admin %}
<li>
{{ admin_force_delete_webmention_button(wm_reply.webmention_id) }}
</li>
{% endif %}
</ul>
</nav>
</div>
{% endblock %}
{% endmacro %}
{% macro display_object(object, likes=[], shares=[], webmentions=[], expanded=False, actors_metadata={}, is_object_page=False, is_h_entry=True) %}
{% block display_object scoped %} {% block display_object scoped %}
{% set is_article_mode = object.is_from_outbox and object.ap_type == "Article" and is_object_page %} {% set is_article_mode = object.is_from_outbox and object.ap_type == "Article" and is_object_page %}
{% if object.ap_type in ["Note", "Article", "Video", "Page", "Question", "Event"] %} {% if object.ap_type in ["Note", "Article", "Video", "Page", "Question", "Event"] %}
<div class="ap-object {% if expanded %}ap-object-expanded {% endif %}{% if is_h_entry %}h-entry{% endif %}" id="{{ object.permalink_id }}"> <div class="ap-object {% if expanded %}ap-object-expanded {% endif %}h-entry" id="{{ object.permalink_id }}">
{% if is_article_mode %} {% if is_article_mode %}
<data class="h-card"> <data class="h-card">
@ -545,7 +442,7 @@
{% if object.in_reply_to %} {% if object.in_reply_to %}
<p class="in-reply-to">in reply to <a href="{% if is_admin and object.is_in_reply_to_from_inbox %}{{ url_for("get_lookup") }}?query={% endif %}{{ object.in_reply_to }}" title="{{ object.in_reply_to }}" rel="nofollow"> <p class="in-reply-to">in reply to <a href="{% if is_admin and object.is_in_reply_to_from_inbox %}{{ url_for("get_lookup") }}?query={% endif %}{{ object.in_reply_to }}" title="{{ object.in_reply_to }}" rel="nofollow">
this object this {{ object.ap_type|lower }}
</a></p> </a></p>
{% endif %} {% endif %}
@ -580,13 +477,12 @@
{% endif %} {% endif %}
{% if object.summary %} {% if object.summary %}
<details class="show-more-wrapper"> <div class="show-more-wrapper">
<summary>
<div class="p-summary"> <div class="p-summary">
<p>{{ object.summary | clean_html(object) | safe }}</p> <p>{{ object.summary | clean_html(object) | safe }}</p>
</div> </div>
<span class="show-more-btn" aria-hidden="true"></span> <label for="show-more-{{ object.permalink_id }}" class="show-more-btn">show/hide more</label>
</summary> <input class="show-more-state" type="checkbox" aria-hidden="true" id="show-more-{{ object.permalink_id }}" checked>
{% endif %} {% endif %}
<div class="obj-content"> <div class="obj-content">
<div class="e-content"> <div class="e-content">
@ -647,7 +543,7 @@
</div> </div>
{% if object.summary %} {% if object.summary %}
</details> </div>
{% endif %} {% endif %}
<div class="activity-attachment"> <div class="activity-attachment">
@ -782,11 +678,6 @@
{{ admin_expand_button(object) }} {{ admin_expand_button(object) }}
</li> </li>
{% endif %} {% endif %}
{% if object.is_from_inbox and not object.announced_via_outbox_object_ap_id and object.is_local_reply %}
<li>
{{ admin_force_delete_button(object.ap_id) }}
</li>
{% endif %}
</ul> </ul>
</nav> </nav>
{% endif %} {% endif %}
@ -798,8 +689,8 @@
<div class="interactions-block">Likes <div class="interactions-block">Likes
<div class="facepile-wrapper"> <div class="facepile-wrapper">
{% for like in likes %} {% for like in likes %}
<a href="{% if is_admin and like.ap_actor_id %}{{ url_for("admin_profile") }}?actor_id={{ like.ap_actor_id }}{% else %}{{ like.url }}{% endif %}" title="{{ like.name }}" rel="noreferrer"> <a href="{% if is_admin %}{{ url_for("admin_profile") }}?actor_id={{ like.actor.ap_id }}{% else %}{{ like.actor.url }}{% endif %}" title="{{ like.actor.handle }}" rel="noreferrer">
<img src="{{ like.picture_url }}" alt="{{ like.name }}"> <img src="{{ like.actor.resized_icon_url }}" alt="{{ like.actor.handle}}">
</a> </a>
{% endfor %} {% endfor %}
{% if object.likes_count > likes | length %} {% if object.likes_count > likes | length %}
@ -815,8 +706,8 @@
<div class="interactions-block">Shares <div class="interactions-block">Shares
<div class="facepile-wrapper"> <div class="facepile-wrapper">
{% for share in shares %} {% for share in shares %}
<a href="{% if is_admin and share.ap_actor_id %}{{ url_for("admin_profile") }}?actor_id={{ share.ap_actor_id }}{% else %}{{ share.url }}{% endif %}" title="{{ share.name }}" rel="noreferrer"> <a href="{% if is_admin %}{{ url_for("admin_profile") }}?actor_id={{ share.actor.ap_id }}{% else %}{{ share.actor.url }}{% endif %}" title="{{ share.actor.handle }}" rel="noreferrer">
<img src="{{ share.picture_url }}" alt="{{ share.name }}"> <img src="{{ share.actor.resized_icon_url }}" alt="{{ share.actor.handle}}">
</a> </a>
{% endfor %} {% endfor %}
{% if object.announces_count > shares | length %} {% if object.announces_count > shares | length %}

View file

@ -60,7 +60,7 @@ async def save_upload(db_session: AsyncSession, f: UploadFile) -> models.Upload:
destination_image.putdata(original_image.getdata()) destination_image.putdata(original_image.getdata())
destination_image.save( destination_image.save(
dest_filename, dest_filename,
format=_original_image.format, # type: ignore format=_original_image.format,
) )
with open(dest_filename, "rb") as dest_f: with open(dest_filename, "rb") as dest_f:

View file

@ -23,8 +23,6 @@ def _load_emojis(root_dir: Path, base_url: str) -> None:
mt = mimetypes.guess_type(emoji.name)[0] mt = mimetypes.guess_type(emoji.name)[0]
if mt and mt.startswith("image/"): if mt and mt.startswith("image/"):
name = emoji.name.split(".")[0] name = emoji.name.split(".")[0]
if not re.match(EMOJI_REGEX, f":{name}:"):
continue
ap_emoji: "RawObject" = { ap_emoji: "RawObject" = {
"type": "Emoji", "type": "Emoji",
"name": f":{name}:", "name": f":{name}:",

View file

@ -1,172 +0,0 @@
import datetime
from dataclasses import dataclass
from datetime import timezone
from typing import Any
from typing import Optional
from loguru import logger
from app import media
from app.models import InboxObject
from app.models import Webmention
from app.utils.datetime import parse_isoformat
from app.utils.url import must_make_abs
@dataclass
class Face:
ap_actor_id: str | None
url: str
name: str
picture_url: str
created_at: datetime.datetime
@classmethod
def from_inbox_object(cls, like: InboxObject) -> "Face":
return cls(
ap_actor_id=like.actor.ap_id,
url=like.actor.url, # type: ignore
name=like.actor.handle, # type: ignore
picture_url=like.actor.resized_icon_url,
created_at=like.created_at, # type: ignore
)
@classmethod
def from_webmention(cls, webmention: Webmention) -> Optional["Face"]:
items = webmention.source_microformats.get("items", []) # type: ignore
for item in items:
if item["type"][0] == "h-card":
try:
return cls(
ap_actor_id=None,
url=(
must_make_abs(
item["properties"]["url"][0], webmention.source
)
if item["properties"].get("url")
else webmention.source
),
name=item["properties"]["name"][0],
picture_url=media.resized_media_url(
must_make_abs(
item["properties"]["photo"][0], webmention.source
), # type: ignore
50,
),
created_at=webmention.created_at, # type: ignore
)
except Exception:
logger.exception(
f"Failed to build Face for webmention id={webmention.id}"
)
break
elif item["type"][0] == "h-entry":
author = item["properties"]["author"][0]
try:
return cls(
ap_actor_id=None,
url=webmention.source,
name=author["properties"]["name"][0],
picture_url=media.resized_media_url(
must_make_abs(
author["properties"]["photo"][0], webmention.source
), # type: ignore
50,
),
created_at=webmention.created_at, # type: ignore
)
except Exception:
logger.exception(
f"Failed to build Face for webmention id={webmention.id}"
)
break
return None
def merge_faces(faces: list[Face]) -> list[Face]:
return sorted(
faces,
key=lambda f: f.created_at,
reverse=True,
)[:10]
def _parse_face(webmention: Webmention, items: list[dict[str, Any]]) -> Face | None:
for item in items:
if item["type"][0] == "h-card":
try:
return Face(
ap_actor_id=None,
url=(
must_make_abs(item["properties"]["url"][0], webmention.source)
if item["properties"].get("url")
else webmention.source
),
name=item["properties"]["name"][0],
picture_url=media.resized_media_url(
must_make_abs(
item["properties"]["photo"][0], webmention.source
), # type: ignore
50,
),
created_at=webmention.created_at, # type: ignore
)
except Exception:
logger.exception(
f"Failed to build Face for webmention id={webmention.id}"
)
break
return None
@dataclass
class WebmentionReply:
face: Face
content: str
url: str
published_at: datetime.datetime
in_reply_to: str
webmention_id: int
@classmethod
def from_webmention(cls, webmention: Webmention) -> Optional["WebmentionReply"]:
items = webmention.source_microformats.get("items", []) # type: ignore
for item in items:
if item["type"][0] == "h-entry":
try:
face = _parse_face(webmention, item["properties"].get("author", []))
if not face:
logger.info(
"Failed to build WebmentionReply/Face for "
f"webmention id={webmention.id}"
)
break
if "published" in item["properties"]:
published_at = (
parse_isoformat(item["properties"]["published"][0])
.astimezone(timezone.utc)
.replace(tzinfo=None)
)
else:
published_at = webmention.created_at # type: ignore
return cls(
face=face,
content=item["properties"]["content"][0]["html"],
url=must_make_abs(
item["properties"]["url"][0], webmention.source
),
published_at=published_at,
in_reply_to=webmention.target, # type: ignore
webmention_id=webmention.id, # type: ignore
)
except Exception:
logger.exception(
f"Failed to build Face for webmention id={webmention.id}"
)
break
return None

View file

@ -32,11 +32,15 @@ def highlight(html: str) -> str:
# If this comes from a microblog.pub instance we may have the language # If this comes from a microblog.pub instance we may have the language
# in the class name # in the class name
if "data-microblogpub-lexer" in code.attrs: if "class" in code.attrs and code.attrs["class"][0].startswith("language-"):
try: try:
lexer = get_lexer_by_name(code.attrs["data-microblogpub-lexer"]) lexer = get_lexer_by_name(
code.attrs["class"][0].removeprefix("language-")
)
except Exception: except Exception:
lexer = guess_lexer(code_content) lexer = guess_lexer(code_content)
else:
lexer = guess_lexer(code_content)
# Replace the code with Pygment output # Replace the code with Pygment output
# XXX: the HTML escaping causes issue with Python type annotations # XXX: the HTML escaping causes issue with Python type annotations
@ -46,8 +50,5 @@ def highlight(html: str) -> str:
phighlight(code_content, lexer, _FORMATTER), "html5lib" phighlight(code_content, lexer, _FORMATTER), "html5lib"
).body.next ).body.next
) )
else:
code.name = "div"
code["class"] = code.get("class", []) + ["highlight"]
return soup.body.encode_contents().decode() return soup.body.encode_contents().decode()

View file

@ -10,7 +10,7 @@ from app.utils.url import make_abs
class IndieAuthClient: class IndieAuthClient:
logo: str | None logo: str | None
name: str name: str
url: str | None url: str
def _get_prop(props: dict[str, Any], name: str, default=None) -> Any: def _get_prop(props: dict[str, Any], name: str, default=None) -> Any:

View file

@ -1,32 +0,0 @@
from pathlib import Path
from loguru import logger
from app.webfinger import get_actor_url
def _load_mastodon_following_accounts_csv_file(path: str) -> list[str]:
handles = []
for line in Path(path).read_text().splitlines()[1:]:
handle = line.split(",")[0]
handles.append(handle)
return handles
async def get_actor_urls_from_following_accounts_csv_file(
path: str,
) -> list[tuple[str, str]]:
actor_urls = []
for handle in _load_mastodon_following_accounts_csv_file(path):
try:
actor_url = await get_actor_url(handle)
except Exception:
logger.error("Failed to fetch actor URL for {handle=}")
else:
if actor_url:
actor_urls.append((handle, actor_url))
else:
logger.info(f"No actor URL found for {handle=}")
return actor_urls

View file

@ -1,15 +1,12 @@
import asyncio import asyncio
import mimetypes import mimetypes
import re import re
import signal
from concurrent.futures import TimeoutError
from typing import Any from typing import Any
from urllib.parse import urlparse from urllib.parse import urlparse
import httpx import httpx
from bs4 import BeautifulSoup # type: ignore from bs4 import BeautifulSoup # type: ignore
from loguru import logger from loguru import logger
from pebble import concurrent # type: ignore
from pydantic import BaseModel from pydantic import BaseModel
from app import activitypub as ap from app import activitypub as ap
@ -32,11 +29,7 @@ class OpenGraphMeta(BaseModel):
site_name: str site_name: str
@concurrent.process(timeout=5)
def _scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None: def _scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
# Prevent SIGTERM to bubble up to the worker
signal.signal(signal.SIGTERM, signal.SIG_IGN)
soup = BeautifulSoup(html, "html5lib") soup = BeautifulSoup(html, "html5lib")
ogs = { ogs = {
og.attrs["property"]: og.attrs.get("content") og.attrs["property"]: og.attrs.get("content")
@ -62,20 +55,9 @@ def _scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
if u := raw.get(maybe_rel): if u := raw.get(maybe_rel):
raw[maybe_rel] = make_abs(u, url) raw[maybe_rel] = make_abs(u, url)
if not is_url_valid(raw[maybe_rel]):
logger.info(f"Invalid url {raw[maybe_rel]}")
if maybe_rel == "url":
raw["url"] = url
elif maybe_rel == "image":
raw["image"] = None
return OpenGraphMeta.parse_obj(raw) return OpenGraphMeta.parse_obj(raw)
def scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
return _scrap_og_meta(url, html).result()
async def external_urls( async def external_urls(
db_session: AsyncSession, db_session: AsyncSession,
ro: ap_object.RemoteObject | OutboxObject | InboxObject, ro: ap_object.RemoteObject | OutboxObject | InboxObject,
@ -144,10 +126,7 @@ async def _og_meta_from_url(url: str) -> OpenGraphMeta | None:
return None return None
try: try:
return scrap_og_meta(url, resp.text) return _scrap_og_meta(url, resp.text)
except TimeoutError:
logger.info(f"Timed out when scraping OG meta for {url}")
return None
except Exception: except Exception:
logger.info(f"Failed to scrap OG meta for {url}") logger.info(f"Failed to scrap OG meta for {url}")
return None return None

View file

@ -21,13 +21,6 @@ def make_abs(url: str | None, parent: str) -> str | None:
) )
def must_make_abs(url: str | None, parent: str) -> str:
abs_url = make_abs(url, parent)
if not abs_url:
raise ValueError("missing URL")
return abs_url
class InvalidURLError(Exception): class InvalidURLError(Exception):
pass pass
@ -61,7 +54,7 @@ def is_url_valid(url: str) -> bool:
if not parsed.hostname or parsed.hostname.lower() in ["localhost"]: if not parsed.hostname or parsed.hostname.lower() in ["localhost"]:
return False return False
if is_hostname_blocked(parsed.hostname): if parsed.hostname in BLOCKED_SERVERS:
logger.warning(f"{parsed.hostname} is blocked") logger.warning(f"{parsed.hostname} is blocked")
return False return False
@ -88,11 +81,3 @@ def check_url(url: str) -> None:
raise InvalidURLError(f'"{url}" is invalid') raise InvalidURLError(f'"{url}" is invalid')
return None return None
@functools.lru_cache(maxsize=256)
def is_hostname_blocked(hostname: str) -> bool:
for blocked_hostname in BLOCKED_SERVERS:
if hostname == blocked_hostname or hostname.endswith(f".{blocked_hostname}"):
return True
return False

View file

@ -24,7 +24,7 @@ async def _discover_webmention_endoint(url: str) -> str | None:
follow_redirects=True, follow_redirects=True,
) )
resp.raise_for_status() resp.raise_for_status()
except Exception: except (httpx.HTTPError, httpx.HTTPStatusError):
logger.exception(f"Failed to discover webmention endpoint for {url}") logger.exception(f"Failed to discover webmention endpoint for {url}")
return None return None

View file

@ -69,5 +69,5 @@ class Worker(Generic[T]):
logger.info("stopping loop") logger.info("stopping loop")
async def _shutdown(self, sig: signal.Signals) -> None: async def _shutdown(self, sig: signal.Signals) -> None:
logger.info(f"Caught {sig=}") logger.info(f"Caught {signal=}")
self._stop_event.set() self._stop_event.set()

View file

@ -1,4 +1,3 @@
import xml.etree.ElementTree as ET
from typing import Any from typing import Any
from urllib.parse import urlparse from urllib.parse import urlparse
@ -9,85 +8,32 @@ from app import config
from app.utils.url import check_url from app.utils.url import check_url
async def get_webfinger_via_host_meta(host: str) -> str | None:
resp: httpx.Response | None = None
is_404 = False
async with httpx.AsyncClient() as client:
for i, proto in enumerate({"http", "https"}):
try:
url = f"{proto}://{host}/.well-known/host-meta"
check_url(url)
resp = await client.get(
url,
headers={
"User-Agent": config.USER_AGENT,
},
follow_redirects=True,
)
resp.raise_for_status()
break
except httpx.HTTPStatusError as http_error:
logger.exception("HTTP error")
if http_error.response.status_code in [403, 404, 410]:
is_404 = True
continue
raise
except httpx.HTTPError:
logger.exception("req failed")
# If we tried https first and the domain is "http only"
if i == 0:
continue
break
if is_404:
return None
if resp:
tree = ET.fromstring(resp.text)
maybe_link = tree.find(
"./{http://docs.oasis-open.org/ns/xri/xrd-1.0}Link[@rel='lrdd']"
)
if maybe_link is not None:
return maybe_link.attrib.get("template")
return None
async def webfinger( async def webfinger(
resource: str, resource: str,
webfinger_url: str | None = None,
) -> dict[str, Any] | None: # noqa: C901 ) -> dict[str, Any] | None: # noqa: C901
"""Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL.""" """Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL."""
resource = resource.strip()
logger.info(f"performing webfinger resolution for {resource}") logger.info(f"performing webfinger resolution for {resource}")
urls = [] protos = ["https", "http"]
host = None
if webfinger_url:
urls = [webfinger_url]
else:
if resource.startswith("http://"): if resource.startswith("http://"):
protos.reverse()
host = urlparse(resource).netloc host = urlparse(resource).netloc
url = f"http://{host}/.well-known/webfinger"
elif resource.startswith("https://"): elif resource.startswith("https://"):
host = urlparse(resource).netloc host = urlparse(resource).netloc
url = f"https://{host}/.well-known/webfinger"
else: else:
protos = ["https", "http"]
_, host = resource.split("@", 1)
urls = [f"{proto}://{host}/.well-known/webfinger" for proto in protos]
if resource.startswith("acct:"): if resource.startswith("acct:"):
resource = resource[5:] resource = resource[5:]
if resource.startswith("@"): if resource.startswith("@"):
resource = resource[1:] resource = resource[1:]
_, host = resource.split("@", 1)
resource = "acct:" + resource resource = "acct:" + resource
is_404 = False is_404 = False
resp: httpx.Response | None = None resp: httpx.Response | None = None
async with httpx.AsyncClient() as client: async with httpx.AsyncClient() as client:
for i, url in enumerate(urls): for i, proto in enumerate(protos):
try: try:
url = f"{proto}://{host}/.well-known/webfinger"
check_url(url) check_url(url)
resp = await client.get( resp = await client.get(
url, url,
@ -111,14 +57,7 @@ async def webfinger(
if i == 0: if i == 0:
continue continue
break break
if is_404: if is_404:
if not webfinger_url and host:
if webfinger_url := (await get_webfinger_via_host_meta(host)):
return await webfinger(
resource,
webfinger_url=webfinger_url,
)
return None return None
if resp: if resp:

View file

@ -1,5 +1,3 @@
from urllib.parse import urlparse
import httpx import httpx
from bs4 import BeautifulSoup # type: ignore from bs4 import BeautifulSoup # type: ignore
from fastapi import APIRouter from fastapi import APIRouter
@ -8,21 +6,13 @@ from fastapi import HTTPException
from fastapi import Request from fastapi import Request
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from loguru import logger from loguru import logger
from sqlalchemy import func
from sqlalchemy import select from sqlalchemy import select
from app import models from app import models
from app.boxes import _get_outbox_announces_count
from app.boxes import _get_outbox_likes_count
from app.boxes import _get_outbox_replies_count
from app.boxes import get_outbox_object_by_ap_id from app.boxes import get_outbox_object_by_ap_id
from app.boxes import get_outbox_object_by_slug_and_short_id
from app.boxes import is_notification_enabled
from app.database import AsyncSession from app.database import AsyncSession
from app.database import get_db_session from app.database import get_db_session
from app.utils import microformats from app.utils import microformats
from app.utils.facepile import Face
from app.utils.facepile import WebmentionReply
from app.utils.url import check_url from app.utils.url import check_url
from app.utils.url import is_url_valid from app.utils.url import is_url_valid
@ -57,7 +47,6 @@ async def webmention_endpoint(
check_url(source) check_url(source)
check_url(target) check_url(target)
parsed_target_url = urlparse(target)
except Exception: except Exception:
logger.exception("Invalid webmention request") logger.exception("Invalid webmention request")
raise HTTPException(status_code=400, detail="Invalid payload") raise HTTPException(status_code=400, detail="Invalid payload")
@ -76,16 +65,6 @@ async def webmention_endpoint(
logger.info("Found existing Webmention, will try to update or delete") logger.info("Found existing Webmention, will try to update or delete")
mentioned_object = await get_outbox_object_by_ap_id(db_session, target) mentioned_object = await get_outbox_object_by_ap_id(db_session, target)
if not mentioned_object and parsed_target_url.path.startswith("/articles/"):
try:
_, _, short_id, slug = parsed_target_url.path.split("/")
mentioned_object = await get_outbox_object_by_slug_and_short_id(
db_session, slug, short_id
)
except Exception:
logger.exception(f"Failed to match {target}")
if not mentioned_object: if not mentioned_object:
logger.info(f"Invalid target {target=}") logger.info(f"Invalid target {target=}")
@ -111,15 +90,9 @@ async def webmention_endpoint(
logger.warning(f"target {target=} not found in source") logger.warning(f"target {target=} not found in source")
if existing_webmention_in_db: if existing_webmention_in_db:
logger.info("Deleting existing Webmention") logger.info("Deleting existing Webmention")
mentioned_object.webmentions_count = mentioned_object.webmentions_count - 1
existing_webmention_in_db.is_deleted = True existing_webmention_in_db.is_deleted = True
await db_session.flush()
# Revert side effects
await _handle_webmention_side_effects(
db_session, existing_webmention_in_db, mentioned_object
)
if is_notification_enabled(models.NotificationType.DELETED_WEBMENTION):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.DELETED_WEBMENTION, notification_type=models.NotificationType.DELETED_WEBMENTION,
outbox_object_id=mentioned_object.id, outbox_object_id=mentioned_object.id,
@ -137,16 +110,11 @@ async def webmention_endpoint(
else: else:
return JSONResponse(content={}, status_code=200) return JSONResponse(content={}, status_code=200)
webmention_type = models.WebmentionType.UNKNOWN
webmention: models.Webmention
if existing_webmention_in_db: if existing_webmention_in_db:
# Undelete if needed # Undelete if needed
existing_webmention_in_db.is_deleted = False existing_webmention_in_db.is_deleted = False
existing_webmention_in_db.source_microformats = data existing_webmention_in_db.source_microformats = data
await db_session.flush()
webmention = existing_webmention_in_db
if is_notification_enabled(models.NotificationType.UPDATED_WEBMENTION):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.UPDATED_WEBMENTION, notification_type=models.NotificationType.UPDATED_WEBMENTION,
outbox_object_id=mentioned_object.id, outbox_object_id=mentioned_object.id,
@ -159,13 +127,10 @@ async def webmention_endpoint(
target=target, target=target,
source_microformats=data, source_microformats=data,
outbox_object_id=mentioned_object.id, outbox_object_id=mentioned_object.id,
webmention_type=webmention_type,
) )
db_session.add(new_webmention) db_session.add(new_webmention)
await db_session.flush() await db_session.flush()
webmention = new_webmention
if is_notification_enabled(models.NotificationType.NEW_WEBMENTION):
notif = models.Notification( notif = models.Notification(
notification_type=models.NotificationType.NEW_WEBMENTION, notification_type=models.NotificationType.NEW_WEBMENTION,
outbox_object_id=mentioned_object.id, outbox_object_id=mentioned_object.id,
@ -173,60 +138,8 @@ async def webmention_endpoint(
) )
db_session.add(notif) db_session.add(notif)
# Determine the webmention type mentioned_object.webmentions_count = mentioned_object.webmentions_count + 1
for item in data.get("items", []):
if target in item.get("properties", {}).get(
"in-reply-to", []
) and WebmentionReply.from_webmention(webmention):
webmention_type = models.WebmentionType.REPLY
break
elif target in item.get("properties", {}).get(
"like-of", []
) and Face.from_webmention(webmention):
webmention_type = models.WebmentionType.LIKE
break
elif target in item.get("properties", {}).get(
"repost-of", []
) and Face.from_webmention(webmention):
webmention_type = models.WebmentionType.REPOST
break
if webmention_type != models.WebmentionType.UNKNOWN:
webmention.webmention_type = webmention_type
await db_session.flush()
# Handle side effect
await _handle_webmention_side_effects(db_session, webmention, mentioned_object)
await db_session.commit() await db_session.commit()
return JSONResponse(content={}, status_code=200) return JSONResponse(content={}, status_code=200)
async def _handle_webmention_side_effects(
db_session: AsyncSession,
webmention: models.Webmention,
mentioned_object: models.OutboxObject,
) -> None:
if webmention.webmention_type == models.WebmentionType.UNKNOWN:
# TODO: recount everything
mentioned_object.webmentions_count = await db_session.scalar(
select(func.count(models.Webmention.id)).where(
models.Webmention.is_deleted.is_(False),
models.Webmention.outbox_object_id == mentioned_object.id,
models.Webmention.webmention_type == models.WebmentionType.UNKNOWN,
)
)
elif webmention.webmention_type == models.WebmentionType.LIKE:
mentioned_object.likes_count = await _get_outbox_likes_count(
db_session, mentioned_object
)
elif webmention.webmention_type == models.WebmentionType.REPOST:
mentioned_object.announces_count = await _get_outbox_announces_count(
db_session, mentioned_object
)
elif webmention.webmention_type == models.WebmentionType.REPLY:
mentioned_object.replies_count = await _get_outbox_replies_count(
db_session, mentioned_object
)
else:
raise ValueError(f"Unhandled {webmention.webmention_type} webmention")

View file

@ -58,24 +58,3 @@ And check out the result by starting a static server using Python standard libra
cd docs/dist cd docs/dist
python -m http.server 8001 python -m http.server 8001
``` ```
## Contributing
Contributions/patches are welcome, but please start a discussion in a [ticket](https://todo.sr.ht/~tsileo/microblog.pub) or a [thread in the mailing list](https://lists.sr.ht/~tsileo/microblog.pub-devel) before working on anything consequent.
### Patches
Please ensure your code passes the code quality checks:
```bash
inv autoformat
inv lint
```
And that the tests suite is passing:
```bash
inv tests
```
Please also consider adding new test cases if needed.

View file

@ -191,72 +191,6 @@ http {
} }
``` ```
## (Advanced) Running on a subdomain
It is possible to run microblogpub on a subdomain (`sub.domain.tld`) while being reachable from the root root domain (`domain.tld`) using the `name@domain.tld` handle.
This requires forwarding/proxying requests from the root domain to the subdomain, for example using NGINX:
```nginx
location /.well-known/webfinger {
add_header Access-Control-Allow-Origin '*';
return 301 https://sub.domain.tld$request_uri;
}
```
And updating `data/profile.toml` to specify the root domain as the webfinger domain:
```toml
webfinger_domain = "domain.tld"
```
Once configured correctly, people will be able to follow you using `name@domain.tld`, while using `sub.domain.tld` for the web interface.
## (Advanced) Running from subpath
It is possible to configure microblogpub to run from subpath.
To achieve this, do the following configuration _between_ config and start steps.
i.e. _after_ you run `make config` or `poetry run inv configuration-wizard`,
but _before_ you run `docker compose up` or `poetry run supervisord`.
Changing this settings on an instance which has some posts or was seen by other instances will likely break links to these posts or federation (i.e. links to your instance, posts and profile from other instances).
The following steps will explain how to configure instance to be available at `https://example.com/subdir`.
Change them to your actual domain and subdir.
* Edit `data/profile.toml` file, add this line:
id = "https://example.com/subdir"
* Edit `misc/*-supervisord.conf` file which is relevant to you (it depends on how you start microblogpub - if in doubt, do the same change in all of them) - in `[program:uvicorn]` section, in the line which starts with `command`, add this argument at the very end: ` --root-path /subdir`
Above two steps are enough to configure microblogpub.
Next, you also need to configure reverse proxy.
It might slightly differ if you plan to have other services running on the same domain, but for [NGINX config shown above](#reverse-proxy), the following changes are enough:
* Add subdir to location, so location block starts like this:
location /subdir {
* Add `/` at the end of `proxy_pass` directive, like this:
proxy_pass http://localhost:8000/;
These two changes will instruct NGINX that requests sent to `https://example.com/subdir/...` should be forwarded to `http://localhost:8000/...`.
* Inside `server` block, add redirects for well-known URLs (add these lines after `client_max_body_size`, remember to replace `subdir` with your actual subdir!):
location /.well-known/webfinger { return 301 /subdir$request_uri; }
location /.well-known/nodeinfo { return 301 /subdir$request_uri; }
location /.well-known/oauth-authorization-server { return 301 /subdir$request_uri; }
* Optionally, [check robots.txt from a running microblogpub instance](https://microblog.pub/robots.txt) and integrate it into robots.txt file in the root of your server - remember to prepend `subdir` to URLs, so for example `Disallow: /admin` becomes `Disallow: /subdir/admin`.
## YunoHost edition ## YunoHost edition
[YunoHost](https://yunohost.org/) support is available (although it is not an official package for now): <https://git.sr.ht/~tsileo/microblog.pub_ynh>. [YunoHost](https://yunohost.org/) support is a work in progress.
## Available tutorial/guides
- [Opalstack](https://community.opalstack.com/d/1055-howto-install-and-run-microblogpub-on-opalstack), thanks to [@defulmere@mastodon.social](https://mastodon.online/@defulmere).

View file

@ -25,10 +25,9 @@ As these two config items define your ActivityPub handle `@handle@domain`.
You can tweak your profile by tweaking these items: You can tweak your profile by tweaking these items:
- `name`: The name shown with your profile. - `name`
- `summary`: The summary or 'bio' part of your profile, written in Markdown. - `summary` (using Markdown)
- `icon_url`: Your profile image or avatar. - `icon_url`
- `image_url`: This provides a 'header' or 'banner' image. Note that it is not shown by the default Microblog.pub templates. It will be used by Mastodon (which uses a 3:1 ratio image) and Pleroma. Pixelfed and Peertube, for example, don't show these images by default.
Whenever one of these config items is updated, an `Update` activity will be sent to all known servers to update your remote profile. Whenever one of these config items is updated, an `Update` activity will be sent to all known servers to update your remote profile.
@ -36,15 +35,6 @@ The server will need to be restarted for taking changes into account.
Before restarting the server, you can ensure you haven't made any mistakes by running the [configuration checking task](/user_guide.html#configuration-checking). Before restarting the server, you can ensure you haven't made any mistakes by running the [configuration checking task](/user_guide.html#configuration-checking).
Note that currently `image_url` is not used anywhere in microblog.pub itself, but other clients/servers do occasionally use it when showing remote profiles as a background image.
Also, this image _can_ be used in microblog.pub - just add this:
```html
<img src="{{ local_actor.image_url | media_proxy_url }}">
```
to an appropriate place of your template (most likely, `header.html`).
For more information, see a section about [custom templates](/user_guide.html#custom-templates) further in this document.
### Profile metadata ### Profile metadata
@ -108,39 +98,6 @@ privacy_replace = [
] ]
``` ```
### Disabling certain notification types
All notifications are enabled by default.
You can disabled specific notifications by adding them to the `disabled_notifications` list.
This example disables likes and shares notifications:
```
disabled_notifications = ["like", "announce"]
```
#### Available notification types
- `new_follower`
- `rejected_follower`
- `unfollow`
- `follow_request_accepted`
- `follow_request_rejected`
- `move`
- `like`
- `undo_like`
- `announce`
- `undo_announce`
- `mention`
- `new_webmention`
- `updated_webmention`
- `deleted_webmention`
- `blocked`
- `unblocked`
- `block`
- `unblock`
### Customization ### Customization
#### Default emoji #### Default emoji
@ -156,7 +113,6 @@ You can copy/paste them from [getemoji.com](https://getemoji.com/).
#### Custom emoji #### Custom emoji
You can add custom emoji in the `data/custom_emoji` directory and they will be picked automatically. You can add custom emoji in the `data/custom_emoji` directory and they will be picked automatically.
Do not use exotic characters in filename - only letters, numbers, and underscore symbol `_` are allowed.
#### Custom CSS #### Custom CSS
@ -171,35 +127,10 @@ $secondary-color: #32cd32;
See `app/scss/main.scss` to see what variables can be overridden. See `app/scss/main.scss` to see what variables can be overridden.
You will need to [recompile CSS](#recompiling-css-files) after doing any CSS changes (for actual css files to be updates) and restart microblog.pub (for css link in HTML documents to be updated with a new checksum - otherwise, browsers that downloaded old CSS will keep using it).
#### Custom favicon
By default, microblog.pub favicon is a square of `$primary-color` CSS color (see above section on how to redefine CSS colors).
You can change it to any icon you like - just save a desired file as `data/favicon.ico`.
After that, run the "[recompile CSS](#recompiling-css-files)" task to copy it to `app/static/favicon.ico`.
#### Custom templates #### Custom templates
If you'd like to customize your instance's theme beyond CSS, you can modify the app's HTML by placing templates in `data/templates` which overwrite the defaults in `app/templates`. If you'd like to customize your instance's theme beyond CSS, you can modify the app's HTML by placing templates in `data/templates` which overwrite the defaults in `app/templates`.
Templates are written using [Jinja](https://jinja.palletsprojects.com/en/latest/templates/) templating language.
Moreover, `utils.html` has scoped blocks around the body of every macro.
This allows macros to be overridden individually in `data/templates/utils.html`, without copying the whole file.
For example, to only override the display of a specific actor's name/icon, you can create `data/templates/utils.html` file with following content:
```jinja
{% extends "app/utils.html" %}
{% block display_actor %}
{% if actor.ap_id == "https://me.example.com" %}
<!-- custom actor display -->
{% else %}
{{ super() }}
{% endif %}
{% endblock %}
```
#### Custom Content Security Policy (CSP) #### Custom Content Security Policy (CSP)
You can override the default Content Security Policy by adding a line in `data/profile.toml`: You can override the default Content Security Policy by adding a line in `data/profile.toml`:
@ -355,7 +286,7 @@ First you need to grab the "ActivityPub actor URL" for your existing account:
```bash ```bash
# For a Python install # For a Python install
poetry run inv webfinger username@instance-you-want-to-move-from.tld poetry run inv webfinger username@domain.tld
``` ```
Edit the config. Edit the config.
@ -364,7 +295,7 @@ Edit the config.
```bash ```bash
# For a Docker install # For a Docker install
make account=username@instance-you-want-to-move-from.tld webfinger make account=username@domain.tld webfinger
``` ```
Edit the config. Edit the config.
@ -374,35 +305,11 @@ Edit the config.
And add a reference to your old/existing account in `profile.toml`: And add a reference to your old/existing account in `profile.toml`:
```toml ```toml
also_known_as = "https://instance-you-want-to-move-form.tld/users/username" also_known_as = "my@old-account.com"
``` ```
Restart the server, and you should be able to complete the move from your existing account. Restart the server, and you should be able to complete the move from your existing account.
Note that if you already have a redirect in place on Mastodon, you may have to remove it before initiating the migration.
## Import follows from Mastodon
You can import the list of follows/following accounts from Mastodon.
It requires downloading the "Follows" CSV file from your Mastodon instance via "Settings" / "Import and export" / "Data export".
Then you need to run the import task:
### Python edition
```bash
# For a Python install
poetry run inv import-mastodon-following-accounts following_accounts.csv
```
### Docker edition
```bash
# For a Docker install
make path=following_accounts.csv import-mastodon-following-accounts
```
## Tasks ## Tasks
### Configuration checking ### Configuration checking
@ -558,7 +465,6 @@ make self-destruct
If the server is not (re)starting, you can: If the server is not (re)starting, you can:
- [Ensure that the configuration is valid](/user_guide.html#configuration-checking). - [Ensure that the configuration is valid](/user_guide.html#configuration-checking)
- [Verify if you haven't any syntax error in the custom theme by recompiling the CSS](/user_guide.html#recompiling-css-files). - [Verify if you haven't any syntax error in the custom theme by recompiling the CSS](/user_guide.html#recompiling-css-files)
- Look at the log files (in `data/uvicorn.log`, `data/incoming.log` and `data/outgoing.log`). - Look at the log files
- If the CSS is not working, ensure your reverse proxy is serving the static file correctly.

3595
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,7 @@ bcrypt = "^3.2.2"
itsdangerous = "^2.1.2" itsdangerous = "^2.1.2"
python-multipart = "^0.0.5" python-multipart = "^0.0.5"
tomli = "^2.0.1" tomli = "^2.0.1"
httpx = {version = "0.23.0", extras = ["http2"]} httpx = {extras = ["http2"], version = "^0.23.0"}
SQLAlchemy = {extras = ["asyncio"], version = "^1.4.39"} SQLAlchemy = {extras = ["asyncio"], version = "^1.4.39"}
alembic = "^1.8.0" alembic = "^1.8.0"
bleach = "^5.0.0" bleach = "^5.0.0"
@ -44,7 +44,6 @@ uvicorn = {extras = ["standard"], version = "^0.18.3"}
Brotli = "^1.0.9" Brotli = "^1.0.9"
greenlet = "^1.1.3" greenlet = "^1.1.3"
mistletoe = "^0.9.0" mistletoe = "^0.9.0"
Pebble = "^5.0.2"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
black = "^22.3.0" black = "^22.3.0"

View file

@ -75,10 +75,9 @@ def main() -> None:
proto = "http" proto = "http"
print("Note that you can put your icon/avatar in the static/ directory") print("Note that you can put your icon/avatar in the static/ directory")
if icon_url := prompt( dat["icon_url"] = prompt(
"icon URL: ", default=f'{proto}://{dat["domain"]}/static/nopic.png' "icon URL: ", default=f'{proto}://{dat["domain"]}/static/nopic.png'
): )
dat["icon_url"] = icon_url
dat["secret"] = os.urandom(16).hex() dat["secret"] = os.urandom(16).hex()
with config_file.open("w") as f: with config_file.open("w") as f:

View file

@ -2,49 +2,17 @@ import asyncio
import io import io
import shutil import shutil
import tarfile import tarfile
from collections import namedtuple
from contextlib import contextmanager from contextlib import contextmanager
from inspect import getfullargspec
from pathlib import Path from pathlib import Path
from typing import Generator from typing import Generator
from typing import Optional from typing import Optional
from unittest.mock import patch
import httpx import httpx
import invoke # type: ignore
from invoke import Context # type: ignore from invoke import Context # type: ignore
from invoke import run # type: ignore from invoke import run # type: ignore
from invoke import task # type: ignore from invoke import task # type: ignore
def fix_annotations():
"""
Pyinvoke doesn't accept annotations by default, this fix that
Based on: @zelo's fix in https://github.com/pyinvoke/invoke/pull/606
Context in: https://github.com/pyinvoke/invoke/issues/357
Python 3.11 https://github.com/pyinvoke/invoke/issues/833
"""
ArgSpec = namedtuple("ArgSpec", ["args", "defaults"])
def patched_inspect_getargspec(func):
spec = getfullargspec(func)
return ArgSpec(spec.args, spec.defaults)
org_task_argspec = invoke.tasks.Task.argspec
def patched_task_argspec(*args, **kwargs):
with patch(
target="inspect.getargspec", new=patched_inspect_getargspec, create=True
):
return org_task_argspec(*args, **kwargs)
invoke.tasks.Task.argspec = patched_task_argspec
fix_annotations()
@task @task
def generate_db_migration(ctx, message): def generate_db_migration(ctx, message):
# type: (Context, str) -> None # type: (Context, str) -> None
@ -385,40 +353,3 @@ def check_config(ctx):
sys.exit(1) sys.exit(1)
else: else:
print("Config is OK") print("Config is OK")
@task
def import_mastodon_following_accounts(ctx, path):
# type: (Context, str) -> None
from loguru import logger
from app.boxes import _get_following
from app.boxes import _send_follow
from app.database import async_session
from app.utils.mastodon import get_actor_urls_from_following_accounts_csv_file
async def _import_following() -> int:
count = 0
async with async_session() as db_session:
followings = {
following.ap_actor_id for following in await _get_following(db_session)
}
for (
handle,
actor_url,
) in await get_actor_urls_from_following_accounts_csv_file(path):
if actor_url in followings:
logger.info(f"Already following {handle}")
continue
logger.info(f"Importing {actor_url=}")
await _send_follow(db_session, actor_url)
count += 1
await db_session.commit()
return count
count = asyncio.run(_import_following())
logger.info(f"Import done, {count} follow requests sent")

View file

@ -20,16 +20,12 @@ async def test_fetch_actor(async_db_session: AsyncSession, respx_mock) -> None:
public_key="pk", public_key="pk",
) )
respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor)) respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor))
respx_mock.get(
"https://example.com/.well-known/webfinger",
params={"resource": "acct%3Atoto%40example.com"},
).mock(return_value=httpx.Response(200, json={"subject": "acct:toto@example.com"}))
# When fetching this actor for the first time # When fetching this actor for the first time
saved_actor = await fetch_actor(async_db_session, ra.ap_id) saved_actor = await fetch_actor(async_db_session, ra.ap_id)
# Then it has been fetched and saved in DB # Then it has been fetched and saved in DB
assert respx.calls.call_count == 2 assert respx.calls.call_count == 1
assert ( assert (
await async_db_session.execute(select(models.Actor)) await async_db_session.execute(select(models.Actor))
).scalar_one().ap_id == saved_actor.ap_id ).scalar_one().ap_id == saved_actor.ap_id
@ -42,7 +38,7 @@ async def test_fetch_actor(async_db_session: AsyncSession, respx_mock) -> None:
assert ( assert (
await async_db_session.execute(select(func.count(models.Actor.id))) await async_db_session.execute(select(func.count(models.Actor.id)))
).scalar_one() == 1 ).scalar_one() == 1
assert respx.calls.call_count == 2 assert respx.calls.call_count == 1
def test_sqlalchemy_factory(db: Session) -> None: def test_sqlalchemy_factory(db: Session) -> None:

View file

@ -1,19 +0,0 @@
from unittest import mock
import pytest
from app.utils.url import is_hostname_blocked
@pytest.mark.parametrize(
"hostname,should_be_blocked",
[
("example.com", True),
("subdomain.example.com", True),
("example.xyz", False),
],
)
def test_is_hostname_blocked(hostname: str, should_be_blocked: bool) -> None:
with mock.patch("app.utils.url.BLOCKED_SERVERS", ["example.com"]):
is_hostname_blocked.cache_clear()
assert is_hostname_blocked(hostname) is should_be_blocked