Attachments support for the outbox

This commit is contained in:
Thomas Sileo 2022-06-23 21:07:20 +02:00
parent a2cfc36dab
commit 1a88ce7259
11 changed files with 497 additions and 58 deletions

View file

@ -1,8 +1,8 @@
"""Initial migration """Initial migration
Revision ID: b122c3a69fc9 Revision ID: 714b4a5307c7
Revises: Revises:
Create Date: 2022-06-22 19:54:19.153320 Create Date: 2022-06-23 18:42:56.009810
""" """
import sqlalchemy as sa import sqlalchemy as sa
@ -10,7 +10,7 @@ import sqlalchemy as sa
from alembic import op from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'b122c3a69fc9' revision = '714b4a5307c7'
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@ -18,7 +18,7 @@ depends_on = None
def upgrade() -> None: def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.create_table('actors', op.create_table('actor',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
@ -28,9 +28,9 @@ def upgrade() -> None:
sa.Column('handle', sa.String(), nullable=True), sa.Column('handle', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_index(op.f('ix_actors_ap_id'), 'actors', ['ap_id'], unique=True) op.create_index(op.f('ix_actor_ap_id'), 'actor', ['ap_id'], unique=True)
op.create_index(op.f('ix_actors_handle'), 'actors', ['handle'], unique=False) op.create_index(op.f('ix_actor_handle'), 'actor', ['handle'], unique=False)
op.create_index(op.f('ix_actors_id'), 'actors', ['id'], unique=False) op.create_index(op.f('ix_actor_id'), 'actor', ['id'], unique=False)
op.create_table('inbox', op.create_table('inbox',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
@ -54,7 +54,7 @@ def upgrade() -> None:
sa.Column('is_bookmarked', sa.Boolean(), nullable=False), sa.Column('is_bookmarked', sa.Boolean(), nullable=False),
sa.Column('has_replies', sa.Boolean(), nullable=False), sa.Column('has_replies', sa.Boolean(), nullable=False),
sa.Column('og_meta', sa.JSON(), nullable=True), sa.Column('og_meta', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['actor_id'], ['actors.id'], ), sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ),
sa.ForeignKeyConstraint(['relates_to_inbox_object_id'], ['inbox.id'], ), sa.ForeignKeyConstraint(['relates_to_inbox_object_id'], ['inbox.id'], ),
sa.ForeignKeyConstraint(['relates_to_outbox_object_id'], ['outbox.id'], ), sa.ForeignKeyConstraint(['relates_to_outbox_object_id'], ['outbox.id'], ),
sa.ForeignKeyConstraint(['undone_by_inbox_object_id'], ['inbox.id'], ), sa.ForeignKeyConstraint(['undone_by_inbox_object_id'], ['inbox.id'], ),
@ -93,20 +93,33 @@ def upgrade() -> None:
op.create_index(op.f('ix_outbox_ap_id'), 'outbox', ['ap_id'], unique=True) op.create_index(op.f('ix_outbox_ap_id'), 'outbox', ['ap_id'], unique=True)
op.create_index(op.f('ix_outbox_id'), 'outbox', ['id'], unique=False) op.create_index(op.f('ix_outbox_id'), 'outbox', ['id'], unique=False)
op.create_index(op.f('ix_outbox_public_id'), 'outbox', ['public_id'], unique=False) op.create_index(op.f('ix_outbox_public_id'), 'outbox', ['public_id'], unique=False)
op.create_table('followers', op.create_table('upload',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('content_type', sa.String(), nullable=False),
sa.Column('content_hash', sa.String(), nullable=False),
sa.Column('has_thumbnail', sa.Boolean(), nullable=False),
sa.Column('blurhash', sa.String(), nullable=True),
sa.Column('width', sa.Integer(), nullable=True),
sa.Column('height', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('content_hash')
)
op.create_index(op.f('ix_upload_id'), 'upload', ['id'], unique=False)
op.create_table('follower',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False), sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('actor_id', sa.Integer(), nullable=False), sa.Column('actor_id', sa.Integer(), nullable=False),
sa.Column('inbox_object_id', sa.Integer(), nullable=False), sa.Column('inbox_object_id', sa.Integer(), nullable=False),
sa.Column('ap_actor_id', sa.String(), nullable=False), sa.Column('ap_actor_id', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['actor_id'], ['actors.id'], ), sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ),
sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ), sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ),
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('actor_id'), sa.UniqueConstraint('actor_id'),
sa.UniqueConstraint('ap_actor_id') sa.UniqueConstraint('ap_actor_id')
) )
op.create_index(op.f('ix_followers_id'), 'followers', ['id'], unique=False) op.create_index(op.f('ix_follower_id'), 'follower', ['id'], unique=False)
op.create_table('following', op.create_table('following',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
@ -114,7 +127,7 @@ def upgrade() -> None:
sa.Column('actor_id', sa.Integer(), nullable=False), sa.Column('actor_id', sa.Integer(), nullable=False),
sa.Column('outbox_object_id', sa.Integer(), nullable=False), sa.Column('outbox_object_id', sa.Integer(), nullable=False),
sa.Column('ap_actor_id', sa.String(), nullable=False), sa.Column('ap_actor_id', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['actor_id'], ['actors.id'], ), sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ),
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('actor_id'), sa.UniqueConstraint('actor_id'),
@ -129,13 +142,24 @@ def upgrade() -> None:
sa.Column('actor_id', sa.Integer(), nullable=True), sa.Column('actor_id', sa.Integer(), nullable=True),
sa.Column('outbox_object_id', sa.Integer(), nullable=True), sa.Column('outbox_object_id', sa.Integer(), nullable=True),
sa.Column('inbox_object_id', sa.Integer(), nullable=True), sa.Column('inbox_object_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['actor_id'], ['actors.id'], ), sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ),
sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ), sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ),
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_index(op.f('ix_notifications_id'), 'notifications', ['id'], unique=False) op.create_index(op.f('ix_notifications_id'), 'notifications', ['id'], unique=False)
op.create_table('outgoing_activities', op.create_table('outbox_object_attachment',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('filename', sa.String(), nullable=False),
sa.Column('outbox_object_id', sa.Integer(), nullable=False),
sa.Column('upload_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
sa.ForeignKeyConstraint(['upload_id'], ['upload.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_outbox_object_attachment_id'), 'outbox_object_attachment', ['id'], unique=False)
op.create_table('outgoing_activity',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('recipient', sa.String(), nullable=False), sa.Column('recipient', sa.String(), nullable=False),
@ -151,8 +175,8 @@ def upgrade() -> None:
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ), sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_index(op.f('ix_outgoing_activities_id'), 'outgoing_activities', ['id'], unique=False) op.create_index(op.f('ix_outgoing_activity_id'), 'outgoing_activity', ['id'], unique=False)
op.create_table('tagged_outbox_objects', op.create_table('tagged_outbox_object',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('outbox_object_id', sa.Integer(), nullable=False), sa.Column('outbox_object_id', sa.Integer(), nullable=False),
sa.Column('tag', sa.String(), nullable=False), sa.Column('tag', sa.String(), nullable=False),
@ -160,24 +184,28 @@ def upgrade() -> None:
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('outbox_object_id', 'tag', name='uix_tagged_object') sa.UniqueConstraint('outbox_object_id', 'tag', name='uix_tagged_object')
) )
op.create_index(op.f('ix_tagged_outbox_objects_id'), 'tagged_outbox_objects', ['id'], unique=False) op.create_index(op.f('ix_tagged_outbox_object_id'), 'tagged_outbox_object', ['id'], unique=False)
op.create_index(op.f('ix_tagged_outbox_objects_tag'), 'tagged_outbox_objects', ['tag'], unique=False) op.create_index(op.f('ix_tagged_outbox_object_tag'), 'tagged_outbox_object', ['tag'], unique=False)
# ### end Alembic commands ### # ### end Alembic commands ###
def downgrade() -> None: def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tagged_outbox_objects_tag'), table_name='tagged_outbox_objects') op.drop_index(op.f('ix_tagged_outbox_object_tag'), table_name='tagged_outbox_object')
op.drop_index(op.f('ix_tagged_outbox_objects_id'), table_name='tagged_outbox_objects') op.drop_index(op.f('ix_tagged_outbox_object_id'), table_name='tagged_outbox_object')
op.drop_table('tagged_outbox_objects') op.drop_table('tagged_outbox_object')
op.drop_index(op.f('ix_outgoing_activities_id'), table_name='outgoing_activities') op.drop_index(op.f('ix_outgoing_activity_id'), table_name='outgoing_activity')
op.drop_table('outgoing_activities') op.drop_table('outgoing_activity')
op.drop_index(op.f('ix_outbox_object_attachment_id'), table_name='outbox_object_attachment')
op.drop_table('outbox_object_attachment')
op.drop_index(op.f('ix_notifications_id'), table_name='notifications') op.drop_index(op.f('ix_notifications_id'), table_name='notifications')
op.drop_table('notifications') op.drop_table('notifications')
op.drop_index(op.f('ix_following_id'), table_name='following') op.drop_index(op.f('ix_following_id'), table_name='following')
op.drop_table('following') op.drop_table('following')
op.drop_index(op.f('ix_followers_id'), table_name='followers') op.drop_index(op.f('ix_follower_id'), table_name='follower')
op.drop_table('followers') op.drop_table('follower')
op.drop_index(op.f('ix_upload_id'), table_name='upload')
op.drop_table('upload')
op.drop_index(op.f('ix_outbox_public_id'), table_name='outbox') op.drop_index(op.f('ix_outbox_public_id'), table_name='outbox')
op.drop_index(op.f('ix_outbox_id'), table_name='outbox') op.drop_index(op.f('ix_outbox_id'), table_name='outbox')
op.drop_index(op.f('ix_outbox_ap_id'), table_name='outbox') op.drop_index(op.f('ix_outbox_ap_id'), table_name='outbox')
@ -185,8 +213,8 @@ def downgrade() -> None:
op.drop_index(op.f('ix_inbox_id'), table_name='inbox') op.drop_index(op.f('ix_inbox_id'), table_name='inbox')
op.drop_index(op.f('ix_inbox_ap_id'), table_name='inbox') op.drop_index(op.f('ix_inbox_ap_id'), table_name='inbox')
op.drop_table('inbox') op.drop_table('inbox')
op.drop_index(op.f('ix_actors_id'), table_name='actors') op.drop_index(op.f('ix_actor_id'), table_name='actor')
op.drop_index(op.f('ix_actors_handle'), table_name='actors') op.drop_index(op.f('ix_actor_handle'), table_name='actor')
op.drop_index(op.f('ix_actors_ap_id'), table_name='actors') op.drop_index(op.f('ix_actor_ap_id'), table_name='actor')
op.drop_table('actors') op.drop_table('actor')
# ### end Alembic commands ### # ### end Alembic commands ###

View file

@ -22,6 +22,7 @@ from app.config import verify_csrf_token
from app.config import verify_password from app.config import verify_password
from app.database import get_db from app.database import get_db
from app.lookup import lookup from app.lookup import lookup
from app.uploads import save_upload
def user_session_or_redirect( def user_session_or_redirect(
@ -231,7 +232,7 @@ def admin_actions_bookmark(
@router.post("/actions/new") @router.post("/actions/new")
async def admin_actions_new( def admin_actions_new(
request: Request, request: Request,
files: list[UploadFile], files: list[UploadFile],
content: str = Form(), content: str = Form(),
@ -240,9 +241,12 @@ async def admin_actions_new(
db: Session = Depends(get_db), db: Session = Depends(get_db),
) -> RedirectResponse: ) -> RedirectResponse:
# XXX: for some reason, no files restuls in an empty single file # XXX: for some reason, no files restuls in an empty single file
uploads = []
if len(files) >= 1 and files[0].filename: if len(files) >= 1 and files[0].filename:
print("Got files") for f in files:
public_id = boxes.send_create(db, content) upload = save_upload(db, f)
uploads.append((upload, f.filename))
public_id = boxes.send_create(db, source=content, uploads=uploads)
return RedirectResponse( return RedirectResponse(
request.url_for("outbox_by_public_id", public_id=public_id), request.url_for("outbox_by_public_id", public_id=public_id),
status_code=302, status_code=302,

View file

@ -1,3 +1,4 @@
import base64
import hashlib import hashlib
from datetime import datetime from datetime import datetime
from typing import Any from typing import Any
@ -60,21 +61,35 @@ class Object:
return self.ap_object.get("sensitive", False) return self.ap_object.get("sensitive", False)
@property @property
def attachments(self) -> list["Attachment"]: def attachments_old(self) -> list["Attachment"]:
attachments = [ # TODO: set img_src with the proxy URL (proxy_url?)
Attachment.parse_obj(obj) for obj in self.ap_object.get("attachment", []) attachments = []
] for obj in self.ap_object.get("attachment", []):
proxied_url = _proxied_url(obj["url"])
attachments.append(
Attachment.parse_obj(
{
"proxiedUrl": proxied_url,
"resizedUrl": proxied_url + "/740"
if obj["mediaType"].startswith("image")
else None,
**obj,
}
)
)
# Also add any video Link (for PeerTube compat) # Also add any video Link (for PeerTube compat)
if self.ap_type == "Video": if self.ap_type == "Video":
for link in ap.as_list(self.ap_object.get("url", [])): for link in ap.as_list(self.ap_object.get("url", [])):
if (isinstance(link, dict)) and link.get("type") == "Link": if (isinstance(link, dict)) and link.get("type") == "Link":
if link.get("mediaType", "").startswith("video"): if link.get("mediaType", "").startswith("video"):
proxied_url = _proxied_url(link["href"])
attachments.append( attachments.append(
Attachment( Attachment(
type="Video", type="Video",
mediaType=link["mediaType"], mediaType=link["mediaType"],
url=link["href"], url=link["href"],
proxiedUrl=proxied_url,
) )
) )
break break
@ -137,12 +152,20 @@ class BaseModel(pydantic.BaseModel):
alias_generator = _to_camel alias_generator = _to_camel
def _proxied_url(url: str) -> str:
return "/proxy/media/" + base64.urlsafe_b64encode(url.encode()).decode()
class Attachment(BaseModel): class Attachment(BaseModel):
type: str type: str
media_type: str media_type: str
name: str | None name: str | None
url: str url: str
# Extra fields for the templates
proxied_url: str
resized_url: str | None = None
class RemoteObject(Object): class RemoteObject(Object):
def __init__(self, raw_object: ap.RawObject, actor: Actor | None = None): def __init__(self, raw_object: ap.RawObject, actor: Actor | None = None):

View file

@ -22,6 +22,7 @@ from app.config import ID
from app.database import now from app.database import now
from app.process_outgoing_activities import new_outgoing_activity from app.process_outgoing_activities import new_outgoing_activity
from app.source import markdownify from app.source import markdownify
from app.uploads import upload_to_attachment
def allocate_outbox_id() -> str: def allocate_outbox_id() -> str:
@ -214,11 +215,20 @@ def send_undo(db: Session, ap_object_id: str) -> None:
raise ValueError("Should never happen") raise ValueError("Should never happen")
def send_create(db: Session, source: str) -> str: def send_create(
db: Session,
source: str,
uploads: list[tuple[models.Upload, str]],
) -> str:
note_id = allocate_outbox_id() note_id = allocate_outbox_id()
published = now().replace(microsecond=0).isoformat().replace("+00:00", "Z") published = now().replace(microsecond=0).isoformat().replace("+00:00", "Z")
context = f"{ID}/contexts/" + uuid.uuid4().hex context = f"{ID}/contexts/" + uuid.uuid4().hex
content, tags = markdownify(db, source) content, tags = markdownify(db, source)
attachments = []
for (upload, filename) in uploads:
attachments.append(upload_to_attachment(upload, filename))
note = { note = {
"@context": ap.AS_CTX, "@context": ap.AS_CTX,
"type": "Note", "type": "Note",
@ -235,6 +245,7 @@ def send_create(db: Session, source: str) -> str:
"summary": None, "summary": None,
"inReplyTo": None, "inReplyTo": None,
"sensitive": False, "sensitive": False,
"attachment": attachments,
} }
outbox_object = save_outbox_object(db, note_id, note, source=source) outbox_object = save_outbox_object(db, note_id, note, source=source)
if not outbox_object.id: if not outbox_object.id:
@ -247,6 +258,13 @@ def send_create(db: Session, source: str) -> str:
outbox_object_id=outbox_object.id, outbox_object_id=outbox_object.id,
) )
db.add(tagged_object) db.add(tagged_object)
for (upload, filename) in uploads:
outbox_object_attachment = models.OutboxObjectAttachment(
filename=filename, outbox_object_id=outbox_object.id, upload_id=upload.id
)
db.add(outbox_object_attachment)
db.commit() db.commit()
recipients = _compute_recipients(db, note) recipients = _compute_recipients(db, note)

View file

@ -3,6 +3,7 @@ import os
import sys import sys
import time import time
from datetime import datetime from datetime import datetime
from io import BytesIO
from typing import Any from typing import Any
from typing import Type from typing import Type
@ -13,10 +14,12 @@ from fastapi import FastAPI
from fastapi import Request from fastapi import Request
from fastapi import Response from fastapi import Response
from fastapi.exceptions import HTTPException from fastapi.exceptions import HTTPException
from fastapi.responses import FileResponse
from fastapi.responses import PlainTextResponse from fastapi.responses import PlainTextResponse
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from loguru import logger from loguru import logger
from PIL import Image
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from sqlalchemy.orm import joinedload from sqlalchemy.orm import joinedload
from starlette.background import BackgroundTask from starlette.background import BackgroundTask
@ -41,6 +44,7 @@ from app.config import USERNAME
from app.config import is_activitypub_requested from app.config import is_activitypub_requested
from app.database import get_db from app.database import get_db
from app.templates import is_current_user_admin from app.templates import is_current_user_admin
from app.uploads import UPLOAD_DIR
# TODO(ts): # TODO(ts):
# #
@ -113,6 +117,8 @@ async def add_security_headers(request: Request, call_next):
response.headers["x-xss-protection"] = "1; mode=block" response.headers["x-xss-protection"] = "1; mode=block"
response.headers["x-frame-options"] = "SAMEORIGIN" response.headers["x-frame-options"] = "SAMEORIGIN"
# TODO(ts): disallow inline CSS? # TODO(ts): disallow inline CSS?
if DEBUG:
return response
response.headers["content-security-policy"] = ( response.headers["content-security-policy"] = (
"default-src 'self'" + " style-src 'self' 'unsafe-inline';" "default-src 'self'" + " style-src 'self' 'unsafe-inline';"
) )
@ -157,6 +163,11 @@ def index(
outbox_objects = ( outbox_objects = (
db.query(models.OutboxObject) db.query(models.OutboxObject)
.options(
joinedload(models.OutboxObject.outbox_object_attachments).options(
joinedload(models.OutboxObjectAttachment.upload)
)
)
.filter( .filter(
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
models.OutboxObject.is_deleted.is_(False), models.OutboxObject.is_deleted.is_(False),
@ -367,6 +378,11 @@ def outbox_by_public_id(
# TODO: ACL? # TODO: ACL?
maybe_object = ( maybe_object = (
db.query(models.OutboxObject) db.query(models.OutboxObject)
.options(
joinedload(models.OutboxObject.outbox_object_attachments).options(
joinedload(models.OutboxObjectAttachment.upload)
)
)
.filter( .filter(
models.OutboxObject.public_id == public_id, models.OutboxObject.public_id == public_id,
# models.OutboxObject.is_deleted.is_(False), # models.OutboxObject.is_deleted.is_(False),
@ -550,6 +566,112 @@ async def serve_proxy_media(request: Request, encoded_url: str) -> StreamingResp
) )
@app.get("/proxy/media/{encoded_url}/{size}")
def serve_proxy_media_resized(
request: Request,
encoded_url: str,
size: int,
) -> PlainTextResponse:
if size not in {50, 740}:
raise ValueError("Unsupported size")
# Decode the base64-encoded URL
url = base64.urlsafe_b64decode(encoded_url).decode()
# Request the URL (and filter request headers)
proxy_resp = httpx.get(
url,
headers=[
(k, v)
for (k, v) in request.headers.raw
if k.lower()
not in [b"host", b"cookie", b"x-forwarded-for", b"x-real-ip", b"user-agent"]
]
+ [(b"user-agent", USER_AGENT.encode())],
)
if proxy_resp.status_code != 200:
return PlainTextResponse(
proxy_resp.content,
status_code=proxy_resp.status_code,
)
# Filter the headers
proxy_resp_headers = {
k: v
for (k, v) in proxy_resp.headers.items()
if k.lower()
in [
"content-type",
"etag",
"cache-control",
"expires",
"last-modified",
]
}
try:
out = BytesIO(proxy_resp.content)
i = Image.open(out)
i.thumbnail((size, size))
resized_buf = BytesIO()
i.save(resized_buf, format=i.format)
resized_buf.seek(0)
return PlainTextResponse(
resized_buf.read(),
media_type=i.get_format_mimetype(), # type: ignore
headers=proxy_resp_headers,
)
except Exception:
logger.exception(f"Failed to resize {url} on the fly")
return PlainTextResponse(
proxy_resp.content,
headers=proxy_resp_headers,
)
@app.get("/attachments/{content_hash}/{filename}")
def serve_attachment(
content_hash: str,
filename: str,
db: Session = Depends(get_db),
):
upload = (
db.query(models.Upload)
.filter(
models.Upload.content_hash == content_hash,
)
.one_or_none()
)
if not upload:
raise HTTPException(status_code=404)
return FileResponse(
UPLOAD_DIR / content_hash,
media_type=upload.content_type,
)
@app.get("/attachments/thumbnails/{content_hash}/{filename}")
def serve_attachment_thumbnail(
content_hash: str,
filename: str,
db: Session = Depends(get_db),
):
upload = (
db.query(models.Upload)
.filter(
models.Upload.content_hash == content_hash,
)
.one_or_none()
)
if not upload or not upload.has_thumbnail:
raise HTTPException(status_code=404)
return FileResponse(
UPLOAD_DIR / (content_hash + "_resized"),
media_type=upload.content_type,
)
@app.get("/robots.txt", response_class=PlainTextResponse) @app.get("/robots.txt", response_class=PlainTextResponse)
async def robots_file(): async def robots_file():
return """User-agent: * return """User-agent: *

View file

@ -17,13 +17,15 @@ from sqlalchemy.orm import relationship
from app import activitypub as ap from app import activitypub as ap
from app.actor import LOCAL_ACTOR from app.actor import LOCAL_ACTOR
from app.actor import Actor as BaseActor from app.actor import Actor as BaseActor
from app.ap_object import Attachment
from app.ap_object import Object as BaseObject from app.ap_object import Object as BaseObject
from app.config import BASE_URL
from app.database import Base from app.database import Base
from app.database import now from app.database import now
class Actor(Base, BaseActor): class Actor(Base, BaseActor):
__tablename__ = "actors" __tablename__ = "actor"
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
created_at = Column(DateTime(timezone=True), nullable=False, default=now) created_at = Column(DateTime(timezone=True), nullable=False, default=now)
@ -47,7 +49,7 @@ class InboxObject(Base, BaseObject):
created_at = Column(DateTime(timezone=True), nullable=False, default=now) created_at = Column(DateTime(timezone=True), nullable=False, default=now)
updated_at = Column(DateTime(timezone=True), nullable=False, default=now) updated_at = Column(DateTime(timezone=True), nullable=False, default=now)
actor_id = Column(Integer, ForeignKey("actors.id"), nullable=False) actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False)
actor: Mapped[Actor] = relationship(Actor, uselist=False) actor: Mapped[Actor] = relationship(Actor, uselist=False)
server = Column(String, nullable=False) server = Column(String, nullable=False)
@ -166,15 +168,48 @@ class OutboxObject(Base, BaseObject):
def actor(self) -> BaseActor: def actor(self) -> BaseActor:
return LOCAL_ACTOR return LOCAL_ACTOR
outbox_object_attachments: Mapped[list["OutboxObjectAttachment"]] = relationship(
"OutboxObjectAttachment", uselist=True, backref="outbox_object"
)
@property
def attachments(self) -> list[Attachment]:
out = []
for attachment in self.outbox_object_attachments:
url = (
BASE_URL
+ f"/attachments/{attachment.upload.content_hash}/{attachment.filename}"
)
out.append(
Attachment.parse_obj(
{
"type": "Document",
"mediaType": attachment.upload.content_type,
"name": attachment.filename,
"url": url,
"proxiedUrl": url,
"resizedUrl": BASE_URL
+ (
"/attachments/thumbnails/"
f"{attachment.upload.content_hash}"
f"/{attachment.filename}"
)
if attachment.upload.has_thumbnail
else None,
}
)
)
return out
class Follower(Base): class Follower(Base):
__tablename__ = "followers" __tablename__ = "follower"
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
created_at = Column(DateTime(timezone=True), nullable=False, default=now) created_at = Column(DateTime(timezone=True), nullable=False, default=now)
updated_at = Column(DateTime(timezone=True), nullable=False, default=now) updated_at = Column(DateTime(timezone=True), nullable=False, default=now)
actor_id = Column(Integer, ForeignKey("actors.id"), nullable=False, unique=True) actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False, unique=True)
actor = relationship(Actor, uselist=False) actor = relationship(Actor, uselist=False)
inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=False) inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=False)
@ -190,7 +225,7 @@ class Following(Base):
created_at = Column(DateTime(timezone=True), nullable=False, default=now) created_at = Column(DateTime(timezone=True), nullable=False, default=now)
updated_at = Column(DateTime(timezone=True), nullable=False, default=now) updated_at = Column(DateTime(timezone=True), nullable=False, default=now)
actor_id = Column(Integer, ForeignKey("actors.id"), nullable=False, unique=True) actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False, unique=True)
actor = relationship(Actor, uselist=False) actor = relationship(Actor, uselist=False)
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
@ -220,7 +255,7 @@ class Notification(Base):
notification_type = Column(Enum(NotificationType), nullable=True) notification_type = Column(Enum(NotificationType), nullable=True)
is_new = Column(Boolean, nullable=False, default=True) is_new = Column(Boolean, nullable=False, default=True)
actor_id = Column(Integer, ForeignKey("actors.id"), nullable=True) actor_id = Column(Integer, ForeignKey("actor.id"), nullable=True)
actor = relationship(Actor, uselist=False) actor = relationship(Actor, uselist=False)
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=True) outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=True)
@ -231,7 +266,7 @@ class Notification(Base):
class OutgoingActivity(Base): class OutgoingActivity(Base):
__tablename__ = "outgoing_activities" __tablename__ = "outgoing_activity"
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
created_at = Column(DateTime(timezone=True), nullable=False, default=now) created_at = Column(DateTime(timezone=True), nullable=False, default=now)
@ -253,7 +288,7 @@ class OutgoingActivity(Base):
class TaggedOutboxObject(Base): class TaggedOutboxObject(Base):
__tablename__ = "tagged_outbox_objects" __tablename__ = "tagged_outbox_object"
__table_args__ = ( __table_args__ = (
UniqueConstraint("outbox_object_id", "tag", name="uix_tagged_object"), UniqueConstraint("outbox_object_id", "tag", name="uix_tagged_object"),
) )
@ -266,23 +301,35 @@ class TaggedOutboxObject(Base):
tag = Column(String, nullable=False, index=True) tag = Column(String, nullable=False, index=True)
"""
class Upload(Base): class Upload(Base):
__tablename__ = "upload" __tablename__ = "upload"
filename = Column(String, nullable=False) id = Column(Integer, primary_key=True, index=True)
filehash = Column(String, nullable=False) created_at = Column(DateTime(timezone=True), nullable=False, default=now)
filesize = Column(Integer, nullable=False)
content_type: Mapped[str] = Column(String, nullable=False)
content_hash = Column(String, nullable=False, unique=True)
has_thumbnail = Column(Boolean, nullable=False)
# Only set for images
blurhash = Column(String, nullable=True)
width = Column(Integer, nullable=True)
height = Column(Integer, nullable=True)
@property
def is_image(self) -> bool:
return self.content_type.startswith("image")
class OutboxObjectAttachment(Base): class OutboxObjectAttachment(Base):
__tablename__ = "outbox_object_attachment" __tablename__ = "outbox_object_attachment"
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
filename = Column(String, nullable=False)
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
outbox_object = relationship(OutboxObject, uselist=False)
upload_id = Column(Integer, ForeignKey("upload.id")) upload_id = Column(Integer, ForeignKey("upload.id"), nullable=False)
upload = relationship(Upload, uselist=False) upload = relationship(Upload, uselist=False)
"""

View file

@ -17,8 +17,8 @@ from app import models
from app.actor import LOCAL_ACTOR from app.actor import LOCAL_ACTOR
from app.ap_object import Attachment from app.ap_object import Attachment
from app.boxes import public_outbox_objects_count from app.boxes import public_outbox_objects_count
from app.config import BASE_URL
from app.config import DEBUG from app.config import DEBUG
from app.config import DOMAIN
from app.config import VERSION from app.config import VERSION
from app.config import generate_csrf_token from app.config import generate_csrf_token
from app.config import session_serializer from app.config import session_serializer
@ -40,7 +40,7 @@ def _media_proxy_url(url: str | None) -> str:
if not url: if not url:
return "/static/nopic.png" return "/static/nopic.png"
if url.startswith(DOMAIN): if url.startswith(BASE_URL):
return url return url
encoded_url = base64.urlsafe_b64encode(url.encode()).decode() encoded_url = base64.urlsafe_b64encode(url.encode()).decode()

View file

@ -57,7 +57,7 @@
{% set metadata = actors_metadata.get(actor.ap_id) %} {% set metadata = actors_metadata.get(actor.ap_id) %}
<div style="display: flex;column-gap: 20px;margin:20px 0 10px 0;" class="actor-box"> <div style="display: flex;column-gap: 20px;margin:20px 0 10px 0;" class="actor-box">
<div style="flex: 0 0 48px;"> <div style="flex: 0 0 48px;">
<img src="{{ actor.icon_url | media_proxy_url }}" style="max-width:45px;"> <img src="{{ actor.icon_url | media_proxy_url }}/50" style="max-width:45px;">
</div> </div>
<a href="{{ actor.url }}" style=""> <a href="{{ actor.url }}" style="">
<div><strong>{{ actor.name or actor.preferred_username }}</strong></div> <div><strong>{{ actor.name or actor.preferred_username }}</strong></div>
@ -90,7 +90,7 @@
{% if object.ap_type in ["Note", "Article", "Video"] %} {% if object.ap_type in ["Note", "Article", "Video"] %}
<div class="activity-wrap" id="{{ object.permalink_id }}"> <div class="activity-wrap" id="{{ object.permalink_id }}">
<div class="activity-content"> <div class="activity-content">
<img src="{% if object.actor.icon_url %}{{ object.actor.icon_url | media_proxy_url }}{% else %}/static/nopic.png{% endif %}" alt="" class="actor-icon"> <img src="{% if object.actor.icon_url %}{{ object.actor.icon_url | media_proxy_url }}/50{% else %}/static/nopic.png{% endif %}" alt="" class="actor-icon">
<div class="activity-header"> <div class="activity-header">
<strong>{{ object.actor.name or object.actor.preferred_username }}</strong> <strong>{{ object.actor.name or object.actor.preferred_username }}</strong>
<span>{{ object.actor.handle }}</span> <span>{{ object.actor.handle }}</span>
@ -107,11 +107,12 @@
{{ sensitive_button(object.permalink_id )}} {{ sensitive_button(object.permalink_id )}}
</div> </div>
{% endif %} {% endif %}
{% if object.attachments and (not object.sensitive or (object.sensitive and request.query_params["show_sensitive"] == object.permalink_id)) %} {% if object.attachments and (not object.sensitive or (object.sensitive and request.query_params["show_sensitive"] == object.permalink_id)) %}
<div class="activity-attachment"> <div class="activity-attachment">
{% for attachment in object.attachments %} {% for attachment in object.attachments %}
{% if attachment.type == "Image" or (attachment | has_media_type("image")) %} {% if attachment.type == "Image" or (attachment | has_media_type("image")) %}
<img src="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} alt="{{ attachment.name }}"{% endif %} class="attachment"> <img src="{{ attachment.resized_url or attachment.proxied_url }}"{% if attachment.name %} alt="{{ attachment.name }}"{% endif %} class="attachment">
{% elif attachment.type == "Video" or (attachment | has_media_type("video")) %} {% elif attachment.type == "Video" or (attachment | has_media_type("video")) %}
<video controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %} class="attachmeent"></video> <video controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %} class="attachmeent"></video>
{% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %} {% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %}

100
app/uploads.py Normal file
View file

@ -0,0 +1,100 @@
import hashlib
from shutil import COPY_BUFSIZE # type: ignore
import blurhash # type: ignore
from fastapi import UploadFile
from loguru import logger
from PIL import Image
from app import activitypub as ap
from app import models
from app.config import BASE_URL
from app.config import ROOT_DIR
from app.database import Session
UPLOAD_DIR = ROOT_DIR / "data" / "uploads"
def save_upload(db: Session, f: UploadFile) -> models.Upload:
# Compute the hash
h = hashlib.blake2b(digest_size=32)
while True:
buf = f.file.read(COPY_BUFSIZE)
if not buf:
break
h.update(buf)
f.file.seek(0)
content_hash = h.hexdigest()
existing_upload = (
db.query(models.Upload)
.filter(models.Upload.content_hash == content_hash)
.one_or_none()
)
if existing_upload:
logger.info(f"Upload with {content_hash=} already exists")
return existing_upload
logger.info(f"Creating new Upload with {content_hash=}")
dest_filename = UPLOAD_DIR / content_hash
with open(dest_filename, "wb") as dest:
while True:
buf = f.file.read(COPY_BUFSIZE)
if not buf:
break
dest.write(buf)
has_thumbnail = False
image_blurhash = None
width = None
height = None
if f.content_type.startswith("image"):
with open(dest_filename, "rb") as df:
image_blurhash = blurhash.encode(df, x_components=4, y_components=3)
try:
with Image.open(dest_filename) as i:
width, height = i.size
i.thumbnail((740, 740))
i.save(UPLOAD_DIR / f"{content_hash}_resized", format=i.format)
except Exception:
logger.exception(
f"Failed to created thumbnail for {f.filename}/{content_hash}"
)
else:
has_thumbnail = True
logger.info("Thumbnail generated")
new_upload = models.Upload(
content_type=f.content_type,
content_hash=content_hash,
has_thumbnail=has_thumbnail,
blurhash=image_blurhash,
width=width,
height=height,
)
db.add(new_upload)
db.commit()
return new_upload
def upload_to_attachment(upload: models.Upload, filename: str) -> ap.RawObject:
extra_attachment_fields = {}
if upload.blurhash:
extra_attachment_fields.update(
{
"blurhash": upload.blurhash,
"height": upload.height,
"width": upload.width,
}
)
return {
"type": "Document",
"mediaType": upload.content_type,
"name": filename,
"url": BASE_URL + f"/attachments/{upload.content_hash}",
**extra_attachment_fields,
}

95
poetry.lock generated
View file

@ -130,6 +130,22 @@ webencodings = "*"
css = ["tinycss2 (>=1.1.0)"] css = ["tinycss2 (>=1.1.0)"]
dev = ["pip-tools (==6.5.1)", "pytest (==7.1.1)", "flake8 (==4.0.1)", "tox (==3.24.5)", "sphinx (==4.3.2)", "twine (==4.0.0)", "wheel (==0.37.1)", "hashin (==0.17.0)", "black (==22.3.0)", "mypy (==0.942)"] dev = ["pip-tools (==6.5.1)", "pytest (==7.1.1)", "flake8 (==4.0.1)", "tox (==3.24.5)", "sphinx (==4.3.2)", "twine (==4.0.0)", "wheel (==0.37.1)", "hashin (==0.17.0)", "black (==22.3.0)", "mypy (==0.942)"]
[[package]]
name = "blurhash-python"
version = "1.1.3"
description = "BlurHash encoder implementation for Python"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
cffi = "*"
Pillow = "*"
six = "*"
[package.extras]
testing = ["pytest"]
[[package]] [[package]]
name = "boussole" name = "boussole"
version = "2.0.0" version = "2.0.0"
@ -554,6 +570,18 @@ category = "dev"
optional = false optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
name = "pillow"
version = "9.1.1"
description = "Python Imaging Library (Fork)"
category = "main"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinx-rtd-theme (>=1.0)", "sphinxext-opengraph"]
tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
[[package]] [[package]]
name = "platformdirs" name = "platformdirs"
version = "2.5.2" version = "2.5.2"
@ -905,6 +933,14 @@ category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
[[package]]
name = "types-pillow"
version = "9.0.20"
description = "Typing stubs for Pillow"
category = "dev"
optional = false
python-versions = "*"
[[package]] [[package]]
name = "types-python-dateutil" name = "types-python-dateutil"
version = "2.8.17" version = "2.8.17"
@ -1010,7 +1046,7 @@ dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"]
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.10" python-versions = "^3.10"
content-hash = "2559d473ab650fbad970dda28a4f83d8ebd62eef9c780ab39490632ab6a9fa48" content-hash = "2ac30190905e28cfb50e57e23142f0508522727ca7eca010904792c549501698"
[metadata.files] [metadata.files]
alembic = [ alembic = [
@ -1079,6 +1115,19 @@ bleach = [
{file = "bleach-5.0.0-py3-none-any.whl", hash = "sha256:08a1fe86d253b5c88c92cc3d810fd8048a16d15762e1e5b74d502256e5926aa1"}, {file = "bleach-5.0.0-py3-none-any.whl", hash = "sha256:08a1fe86d253b5c88c92cc3d810fd8048a16d15762e1e5b74d502256e5926aa1"},
{file = "bleach-5.0.0.tar.gz", hash = "sha256:c6d6cc054bdc9c83b48b8083e236e5f00f238428666d2ce2e083eaa5fd568565"}, {file = "bleach-5.0.0.tar.gz", hash = "sha256:c6d6cc054bdc9c83b48b8083e236e5f00f238428666d2ce2e083eaa5fd568565"},
] ]
blurhash-python = [
{file = "blurhash-python-1.1.3.tar.gz", hash = "sha256:0008733afb8f797aa84098a28ee2615e42192549b952260ee7dbca7dd40c2335"},
{file = "blurhash_python-1.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:4731358922696ea6e7a34e999eda899d5efe63e3939511de094b21f56ea35f57"},
{file = "blurhash_python-1.1.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e8cbd99cba8a7d8315545688578de98d45681ca83468249799184059cb60058e"},
{file = "blurhash_python-1.1.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:642362b85b49c516e602b898b2538626205cc5b253f190661d115361111cf761"},
{file = "blurhash_python-1.1.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6a72fb51975faf7bdb0f12a3acfe0d63e4833b8dfb36b53327d6d9ab1e02a09"},
{file = "blurhash_python-1.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:301fae02f2c934f127229f22ffad99cdb70c997a939d14c90d778937e68a539c"},
{file = "blurhash_python-1.1.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:845cfb8c34d959f6fb2740cf37e5f8a7991769e061ba88e941765c5ac68e93ae"},
{file = "blurhash_python-1.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:327633a793328f533267593c00183548935db9f8fa88193281b43dac7d4edd4e"},
{file = "blurhash_python-1.1.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fa272461e273021ccbe82716a4090a0f57c7aad77120ad36aca76d945e58fbe7"},
{file = "blurhash_python-1.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:122699e2364c26bf0a89b502ed306785cb26ddfb7642acbc442d61c875749c5a"},
{file = "blurhash_python-1.1.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e2147d8ff5128d7d387c711d1ba70c50595893f14460ac0746c5699df7605d65"},
]
boussole = [ boussole = [
{file = "boussole-2.0.0.tar.gz", hash = "sha256:e4907180698339c778669d71b16a77b8d54c97d54e79d7813de1630a9d091a2f"}, {file = "boussole-2.0.0.tar.gz", hash = "sha256:e4907180698339c778669d71b16a77b8d54c97d54e79d7813de1630a9d091a2f"},
] ]
@ -1383,6 +1432,46 @@ pathspec = [
{file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
] ]
pillow = [
{file = "Pillow-9.1.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:42dfefbef90eb67c10c45a73a9bc1599d4dac920f7dfcbf4ec6b80cb620757fe"},
{file = "Pillow-9.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffde4c6fabb52891d81606411cbfaf77756e3b561b566efd270b3ed3791fde4e"},
{file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c857532c719fb30fafabd2371ce9b7031812ff3889d75273827633bca0c4602"},
{file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59789a7d06c742e9d13b883d5e3569188c16acb02eeed2510fd3bfdbc1bd1530"},
{file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d45dbe4b21a9679c3e8b3f7f4f42a45a7d3ddff8a4a16109dff0e1da30a35b2"},
{file = "Pillow-9.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e9ed59d1b6ee837f4515b9584f3d26cf0388b742a11ecdae0d9237a94505d03a"},
{file = "Pillow-9.1.1-cp310-cp310-win32.whl", hash = "sha256:b3fe2ff1e1715d4475d7e2c3e8dabd7c025f4410f79513b4ff2de3d51ce0fa9c"},
{file = "Pillow-9.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5b650dbbc0969a4e226d98a0b440c2f07a850896aed9266b6fedc0f7e7834108"},
{file = "Pillow-9.1.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:0b4d5ad2cd3a1f0d1df882d926b37dbb2ab6c823ae21d041b46910c8f8cd844b"},
{file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9370d6744d379f2de5d7fa95cdbd3a4d92f0b0ef29609b4b1687f16bc197063d"},
{file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b761727ed7d593e49671d1827044b942dd2f4caae6e51bab144d4accf8244a84"},
{file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a66fe50386162df2da701b3722781cbe90ce043e7d53c1fd6bd801bca6b48d4"},
{file = "Pillow-9.1.1-cp37-cp37m-win32.whl", hash = "sha256:2b291cab8a888658d72b575a03e340509b6b050b62db1f5539dd5cd18fd50578"},
{file = "Pillow-9.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1d4331aeb12f6b3791911a6da82de72257a99ad99726ed6b63f481c0184b6fb9"},
{file = "Pillow-9.1.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8844217cdf66eabe39567118f229e275f0727e9195635a15e0e4b9227458daaf"},
{file = "Pillow-9.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b6617221ff08fbd3b7a811950b5c3f9367f6e941b86259843eab77c8e3d2b56b"},
{file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20d514c989fa28e73a5adbddd7a171afa5824710d0ab06d4e1234195d2a2e546"},
{file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088df396b047477dd1bbc7de6e22f58400dae2f21310d9e2ec2933b2ef7dfa4f"},
{file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53c27bd452e0f1bc4bfed07ceb235663a1df7c74df08e37fd6b03eb89454946a"},
{file = "Pillow-9.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3f6c1716c473ebd1649663bf3b42702d0d53e27af8b64642be0dd3598c761fb1"},
{file = "Pillow-9.1.1-cp38-cp38-win32.whl", hash = "sha256:c67db410508b9de9c4694c57ed754b65a460e4812126e87f5052ecf23a011a54"},
{file = "Pillow-9.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:f054b020c4d7e9786ae0404278ea318768eb123403b18453e28e47cdb7a0a4bf"},
{file = "Pillow-9.1.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:c17770a62a71718a74b7548098a74cd6880be16bcfff5f937f900ead90ca8e92"},
{file = "Pillow-9.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3f6a6034140e9e17e9abc175fc7a266a6e63652028e157750bd98e804a8ed9a"},
{file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f372d0f08eff1475ef426344efe42493f71f377ec52237bf153c5713de987251"},
{file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09e67ef6e430f90caa093528bd758b0616f8165e57ed8d8ce014ae32df6a831d"},
{file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66daa16952d5bf0c9d5389c5e9df562922a59bd16d77e2a276e575d32e38afd1"},
{file = "Pillow-9.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d78ca526a559fb84faaaf84da2dd4addef5edb109db8b81677c0bb1aad342601"},
{file = "Pillow-9.1.1-cp39-cp39-win32.whl", hash = "sha256:55e74faf8359ddda43fee01bffbc5bd99d96ea508d8a08c527099e84eb708f45"},
{file = "Pillow-9.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c150dbbb4a94ea4825d1e5f2c5501af7141ea95825fadd7829f9b11c97aaf6c"},
{file = "Pillow-9.1.1-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:769a7f131a2f43752455cc72f9f7a093c3ff3856bf976c5fb53a59d0ccc704f6"},
{file = "Pillow-9.1.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:488f3383cf5159907d48d32957ac6f9ea85ccdcc296c14eca1a4e396ecc32098"},
{file = "Pillow-9.1.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b525a356680022b0af53385944026d3486fc8c013638cf9900eb87c866afb4c"},
{file = "Pillow-9.1.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6e760cf01259a1c0a50f3c845f9cad1af30577fd8b670339b1659c6d0e7a41dd"},
{file = "Pillow-9.1.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4165205a13b16a29e1ac57efeee6be2dfd5b5408122d59ef2145bc3239fa340"},
{file = "Pillow-9.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937a54e5694684f74dcbf6e24cc453bfc5b33940216ddd8f4cd8f0f79167f765"},
{file = "Pillow-9.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:baf3be0b9446a4083cc0c5bb9f9c964034be5374b5bc09757be89f5d2fa247b8"},
{file = "Pillow-9.1.1.tar.gz", hash = "sha256:7502539939b53d7565f3d11d87c78e7ec900d3c72945d4ee0e2f250d598309a0"},
]
platformdirs = [ platformdirs = [
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
{file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
@ -1632,6 +1721,10 @@ types-markdown = [
{file = "types-Markdown-3.3.28.tar.gz", hash = "sha256:733ba19dad58d5dca1206390f55fa285573535b7c369b94dd367bbc34bf7e4de"}, {file = "types-Markdown-3.3.28.tar.gz", hash = "sha256:733ba19dad58d5dca1206390f55fa285573535b7c369b94dd367bbc34bf7e4de"},
{file = "types_Markdown-3.3.28-py3-none-any.whl", hash = "sha256:7868cfa3f8a2304d9ecea2ca9b02c14fcb2e34bd26fdbaf01d8c4d362a85d345"}, {file = "types_Markdown-3.3.28-py3-none-any.whl", hash = "sha256:7868cfa3f8a2304d9ecea2ca9b02c14fcb2e34bd26fdbaf01d8c4d362a85d345"},
] ]
types-pillow = [
{file = "types-Pillow-9.0.20.tar.gz", hash = "sha256:82dea83c21c665a334b0f88b78a7d6a62503c96d0777c8a2327701d4ca7bf421"},
{file = "types_Pillow-9.0.20-py3-none-any.whl", hash = "sha256:fa10b09284a8e0058484d747b8b3e75301250057390089f4c5a695c359fd3966"},
]
types-python-dateutil = [ types-python-dateutil = [
{file = "types-python-dateutil-2.8.17.tar.gz", hash = "sha256:6c54265a221681dd87f61df6743bd5eab060cf1b4086ff65c1a8fd763ed6370e"}, {file = "types-python-dateutil-2.8.17.tar.gz", hash = "sha256:6c54265a221681dd87f61df6743bd5eab060cf1b4086ff65c1a8fd763ed6370e"},
{file = "types_python_dateutil-2.8.17-py3-none-any.whl", hash = "sha256:0be7435b4d382d1cd00b8c55a8a90f4e515aaad8a96f8f0bc20c22df046792e5"}, {file = "types_python_dateutil-2.8.17-py3-none-any.whl", hash = "sha256:0be7435b4d382d1cd00b8c55a8a90f4e515aaad8a96f8f0bc20c22df046792e5"},

View file

@ -32,6 +32,8 @@ Pygments = "^2.12.0"
types-python-dateutil = "^2.8.17" types-python-dateutil = "^2.8.17"
loguru = "^0.6.0" loguru = "^0.6.0"
mdx-linkify = "^2.1" mdx-linkify = "^2.1"
Pillow = "^9.1.1"
blurhash-python = "^1.1.3"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
black = "^22.3.0" black = "^22.3.0"
@ -48,6 +50,7 @@ types-bleach = "^5.0.2"
types-Markdown = "^3.3.28" types-Markdown = "^3.3.28"
factory-boy = "^3.2.1" factory-boy = "^3.2.1"
pytest-asyncio = "^0.18.3" pytest-asyncio = "^0.18.3"
types-Pillow = "^9.0.20"
[build-system] [build-system]
requires = ["poetry-core>=1.0.0"] requires = ["poetry-core>=1.0.0"]