forked from forks/microblog.pub
Compare commits
No commits in common. "master" and "v2" have entirely different histories.
232 changed files with 22959 additions and 11585 deletions
22
.build.yml
Normal file
22
.build.yml
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
image: ubuntu/22.10
|
||||||
|
sources:
|
||||||
|
- https://git.sr.ht/~tsileo/microblog.pub
|
||||||
|
packages:
|
||||||
|
- python3
|
||||||
|
- python3-dev
|
||||||
|
- libxml2-dev
|
||||||
|
- libxslt-dev
|
||||||
|
- gcc
|
||||||
|
- libjpeg-dev
|
||||||
|
- zlib1g-dev
|
||||||
|
- libffi-dev
|
||||||
|
- python3.10-venv
|
||||||
|
tasks:
|
||||||
|
- setup: |
|
||||||
|
curl -sSL https://install.python-poetry.org | python3 -
|
||||||
|
- tests: |
|
||||||
|
export PATH="/home/build/.local/bin:$PATH"
|
||||||
|
cd microblog.pub
|
||||||
|
poetry install --no-interaction
|
||||||
|
poetry run inv lint
|
||||||
|
poetry run inv tests
|
|
@ -1,4 +1,9 @@
|
||||||
__pycache__/
|
**/.git
|
||||||
data/
|
data/**
|
||||||
data2/
|
**/__pycache__
|
||||||
tests/
|
*.pyc
|
||||||
|
*.swp
|
||||||
|
docs/
|
||||||
|
.mypy_cache
|
||||||
|
.pytest_cache
|
||||||
|
docker-compose.yml
|
||||||
|
|
95
.drone.yml
95
.drone.yml
|
@ -1,95 +0,0 @@
|
||||||
---
|
|
||||||
priviliged: true # Needed for Docker in Docker
|
|
||||||
kind: pipeline
|
|
||||||
name: default
|
|
||||||
steps:
|
|
||||||
# Run Mypy/Flake8/black linters before any further work is done
|
|
||||||
- name: lint
|
|
||||||
image: python:3
|
|
||||||
commands:
|
|
||||||
- pip install -U pip
|
|
||||||
- pip install mypy flake8 black
|
|
||||||
- black --check .
|
|
||||||
- flake8
|
|
||||||
# - mypy --ignore-missing-imports .
|
|
||||||
- mypy --ignore-missing-imports setup_wizard
|
|
||||||
|
|
||||||
# Build the container images we need for the test suite
|
|
||||||
- name: build_containers
|
|
||||||
image: docker:dind
|
|
||||||
environment:
|
|
||||||
DOCKER_HOST: tcp://docker:2375
|
|
||||||
commands:
|
|
||||||
- apk update && apk upgrade && apk add --no-cache bash git openssh curl
|
|
||||||
- docker network create fede
|
|
||||||
- docker pull mongo
|
|
||||||
- docker pull poussetaches/poussetaches
|
|
||||||
- docker build . -t microblogpub:latest
|
|
||||||
|
|
||||||
# Run poussetaches (will be shared by the two microblog.pub instances) "in the background"
|
|
||||||
- name: run_poussetaches
|
|
||||||
image: docker:dind
|
|
||||||
detach: true
|
|
||||||
environment:
|
|
||||||
DOCKER_HOST: tcp://docker:2375
|
|
||||||
POUSSETACHES_AUTH_KEY: lol
|
|
||||||
commands:
|
|
||||||
- docker run -p 7991:7991 --net fede -e POUSSETACHES_AUTH_KEY --name poussetaches poussetaches/poussetaches
|
|
||||||
|
|
||||||
# Run MongoDB (will be shared by the two microblog.pub instances) "in the background"
|
|
||||||
- name: run_mongodb
|
|
||||||
image: docker:dind
|
|
||||||
detach: true
|
|
||||||
environment:
|
|
||||||
DOCKER_HOST: tcp://docker:2375
|
|
||||||
commands:
|
|
||||||
- docker run -p 27017:27017 --net fede --name mongo mongo
|
|
||||||
|
|
||||||
# Run a first microblog.pub instance "in the background"
|
|
||||||
- name: microblogpub_instance1
|
|
||||||
image: docker:dind
|
|
||||||
detach: true
|
|
||||||
environment:
|
|
||||||
DOCKER_HOST: tcp://docker:2375
|
|
||||||
MICROBLOGPUB_DEBUG: 1
|
|
||||||
MICROBLOGPUB_POUSSETACHES_HOST: http://poussetaches:7991
|
|
||||||
MICROBLOGPUB_INTERNAL_HOST: http://instance1_web:5005
|
|
||||||
MICROBLOGPUB_MONGODB_HOST: mongo:27017
|
|
||||||
POUSSETACHES_AUTH_KEY: lol
|
|
||||||
commands:
|
|
||||||
- sleep 5
|
|
||||||
- 'docker run -p 5006:5005 --net fede -v "`pwd`/tests/fixtures/instance1/config:/app/config" -e MICROBLOGPUB_DEBUG -e MICROBLOGPUB_INTERNAL_HOST -e MICROBLOGPUB_MONGODB_HOST -e MICROBLOGPUB_POUSSETACHES_HOST -e POUSSETACHES_AUTH_KEY --name instance1_web microblogpub'
|
|
||||||
|
|
||||||
# Run the second microblog.pub instance "in the background"
|
|
||||||
- name: microblogpub_instance2
|
|
||||||
image: docker:dind
|
|
||||||
detach: true
|
|
||||||
environment:
|
|
||||||
DOCKER_HOST: tcp://docker:2375
|
|
||||||
MICROBLOGPUB_DEBUG: 1
|
|
||||||
MICROBLOGPUB_POUSSETACHES_HOST: http://poussetaches:7991
|
|
||||||
MICROBLOGPUB_INTERNAL_HOST: http://instance2_web:5005
|
|
||||||
MICROBLOGPUB_MONGODB_HOST: mongo:27017
|
|
||||||
POUSSETACHES_AUTH_KEY: lol
|
|
||||||
commands:
|
|
||||||
- 'docker run -p 5007:5005 --net fede -v "`pwd`/tests/fixtures/instance2/config:/app/config" -e MICROBLOGPUB_DEBUG -e MICROBLOGPUB_INTERNAL_HOST -e MICROBLOGPUB_MONGODB_HOST -e MICROBLOGPUB_POUSSETACHES_HOST -e POUSSETACHES_AUTH_KEY --name instance2_web microblogpub'
|
|
||||||
|
|
||||||
# Run some tests against the two instances to ensure federation is working
|
|
||||||
- name: federation_test
|
|
||||||
image: python:3
|
|
||||||
commands:
|
|
||||||
- pip install -U pip
|
|
||||||
- pip install -r dev-requirements.txt
|
|
||||||
# Federation tests (with two local instances)
|
|
||||||
- python -m pytest -v -s --ignore data -k federatio
|
|
||||||
|
|
||||||
# Setup the services needed to do some "Docker in Docker" (or dind)
|
|
||||||
services:
|
|
||||||
- name: docker
|
|
||||||
image: docker:dind
|
|
||||||
privileged: true
|
|
||||||
---
|
|
||||||
kind: signature
|
|
||||||
hmac: 9fc3e2eb3b237ab868843f94221124cdc397c454de798b9bcc43c8896615826a
|
|
||||||
|
|
||||||
...
|
|
3
.env
3
.env
|
@ -1,3 +0,0 @@
|
||||||
WEB_PORT=5005
|
|
||||||
CONFIG_DIR=./config
|
|
||||||
DATA_DIR=./data
|
|
4
.flake8
Normal file
4
.flake8
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
[flake8]
|
||||||
|
max-line-length = 88
|
||||||
|
extend-ignore = E203
|
||||||
|
exclude = alembic/versions
|
14
.gitignore
vendored
14
.gitignore
vendored
|
@ -1,8 +1,8 @@
|
||||||
*.sw[op]
|
*.db
|
||||||
key_*.pem
|
|
||||||
data/*
|
|
||||||
config/*
|
|
||||||
static/media/*
|
|
||||||
|
|
||||||
.mypy_cache/
|
|
||||||
__pycache__/
|
__pycache__/
|
||||||
|
.mypy_cache/
|
||||||
|
.pytest_cache/
|
||||||
|
docs/dist/
|
||||||
|
requirements.txt
|
||||||
|
app/_version.py
|
||||||
|
app/static/favicon.ico
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
[settings]
|
|
||||||
line_length=120
|
|
||||||
force_single_line=true
|
|
11
AUTHORS
Normal file
11
AUTHORS
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
Thomas Sileo <t@a4.io>
|
||||||
|
Kevin Wallace <doof@doof.net>
|
||||||
|
Miguel Jacq <mig@mig5.net>
|
||||||
|
Alexey Shpakovsky <alexey@shpakovsky.ru>
|
||||||
|
Josh Washburne <josh@jodh.us>
|
||||||
|
João Costa <jdpc557@gmail.com>
|
||||||
|
Sam <samr1.dev@pm.me>
|
||||||
|
Ash McAllan <acegiak@gmail.com>
|
||||||
|
Cassio Zen <cassio@hey.com>
|
||||||
|
Cocoa <momijizukamori@gmail.com>
|
||||||
|
Jane <jane@janeirl.dev>
|
38
Dockerfile
38
Dockerfile
|
@ -1,7 +1,33 @@
|
||||||
FROM python:3.7
|
FROM python:3.11-slim as python-base
|
||||||
COPY requirements.txt /app/requirements.txt
|
ENV PYTHONUNBUFFERED=1 \
|
||||||
|
PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
POETRY_HOME="/opt/poetry" \
|
||||||
|
POETRY_VIRTUALENVS_IN_PROJECT=true \
|
||||||
|
POETRY_NO_INTERACTION=1 \
|
||||||
|
PYSETUP_PATH="/opt/venv" \
|
||||||
|
VENV_PATH="/opt/venv/.venv"
|
||||||
|
ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH"
|
||||||
|
|
||||||
|
FROM python-base as builder-base
|
||||||
|
RUN apt-get update
|
||||||
|
RUN apt-get install -y --no-install-recommends curl build-essential gcc libffi-dev libssl-dev libxml2-dev libxslt1-dev zlib1g-dev libxslt-dev gcc libjpeg-dev zlib1g-dev libwebp-dev
|
||||||
|
# rustc is needed to compile Python packages
|
||||||
|
RUN curl https://sh.rustup.rs -sSf | bash -s -- -y
|
||||||
|
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||||
|
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||||
|
WORKDIR $PYSETUP_PATH
|
||||||
|
COPY poetry.lock pyproject.toml ./
|
||||||
|
RUN poetry install --only main
|
||||||
|
|
||||||
|
FROM python-base as production
|
||||||
|
RUN apt-get update
|
||||||
|
RUN apt-get install -y --no-install-recommends libjpeg-dev libxslt1-dev libxml2-dev libxslt-dev
|
||||||
|
RUN groupadd --gid 1000 microblogpub \
|
||||||
|
&& useradd --uid 1000 --gid microblogpub --shell /bin/bash microblogpub
|
||||||
|
COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH
|
||||||
|
COPY . /app/
|
||||||
|
RUN chown -R 1000:1000 /app
|
||||||
|
USER microblogpub
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
RUN pip install -r requirements.txt
|
EXPOSE 8000
|
||||||
ADD . /app
|
CMD ["./misc/docker_start.sh"]
|
||||||
ENV FLASK_APP=app.py
|
|
||||||
CMD ["./run.sh"]
|
|
||||||
|
|
4
LICENSE
4
LICENSE
|
@ -629,8 +629,8 @@ to attach them to the start of each source file to most effectively
|
||||||
state the exclusion of warranty; and each file should have at least
|
state the exclusion of warranty; and each file should have at least
|
||||||
the "copyright" line and a pointer to where the full notice is found.
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
Microblog.pub, an ActivityPub powered microblog software
|
||||||
Copyright (C) <year> <name of author>
|
Copyright (C) 2022 Thomas Sileo
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU Affero General Public License as published by
|
it under the terms of the GNU Affero General Public License as published by
|
||||||
|
|
80
Makefile
80
Makefile
|
@ -1,55 +1,47 @@
|
||||||
SHELL := /bin/bash
|
SHELL := /bin/bash
|
||||||
PYTHON=python
|
|
||||||
SETUP_WIZARD_IMAGE=microblogpub-setup-wizard:latest
|
|
||||||
PWD=$(shell pwd)
|
PWD=$(shell pwd)
|
||||||
|
|
||||||
# Build the config (will error if an existing config/me.yml is found) via a Docker container
|
.PHONY: build
|
||||||
|
build:
|
||||||
|
docker build -t microblogpub/microblogpub .
|
||||||
|
|
||||||
.PHONY: config
|
.PHONY: config
|
||||||
config:
|
config:
|
||||||
# Build the container for the setup wizard on-the-fly
|
|
||||||
cd setup_wizard && docker build . -t $(SETUP_WIZARD_IMAGE)
|
|
||||||
# Run and remove instantly
|
# Run and remove instantly
|
||||||
-docker run -e MICROBLOGPUB_WIZARD_PROJECT_NAME --rm -it --volume $(PWD):/app/out $(SETUP_WIZARD_IMAGE)
|
-docker run --rm -it --volume `pwd`/data:/app/data microblogpub/microblogpub inv configuration-wizard
|
||||||
# Finally, remove the tagged image
|
|
||||||
docker rmi $(SETUP_WIZARD_IMAGE)
|
|
||||||
|
|
||||||
# Reload the federation test instances (for local dev)
|
.PHONY: update
|
||||||
.PHONY: reload-fed
|
update:
|
||||||
reload-fed:
|
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv update --no-update-deps
|
||||||
docker build . -t microblogpub:latest
|
|
||||||
docker-compose -p instance2 -f docker-compose-tests.yml stop
|
|
||||||
docker-compose -p instance1 -f docker-compose-tests.yml stop
|
|
||||||
WEB_PORT=5006 CONFIG_DIR=./tests/fixtures/instance1/config docker-compose -p instance1 -f docker-compose-tests.yml up -d --force-recreate --build
|
|
||||||
WEB_PORT=5007 CONFIG_DIR=./tests/fixtures/instance2/config docker-compose -p instance2 -f docker-compose-tests.yml up -d --force-recreate --build
|
|
||||||
|
|
||||||
# Reload the local dev instance
|
.PHONY: prune-old-data
|
||||||
.PHONY: reload-dev
|
prune-old-data:
|
||||||
reload-dev:
|
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv prune-old-data
|
||||||
docker build . -t microblogpub:latest
|
|
||||||
docker-compose -f docker-compose-dev.yml up -d --force-recreate
|
|
||||||
|
|
||||||
# Build the microblogpub Docker image
|
.PHONY: webfinger
|
||||||
.PHONY: microblogpub
|
webfinger:
|
||||||
microblogpub:
|
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv webfinger $(account)
|
||||||
# Update microblog.pub
|
|
||||||
git pull
|
|
||||||
# Rebuild the Docker image
|
|
||||||
docker build . --no-cache -t microblogpub:latest
|
|
||||||
|
|
||||||
.PHONY: css
|
.PHONY: move-to
|
||||||
css:
|
move-to:
|
||||||
# Download pure.css if needed
|
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv move-to $(account)
|
||||||
if [[ ! -f static/pure.css ]]; then curl https://unpkg.com/purecss@1.0.1/build/pure-min.css > static/pure.css; fi
|
|
||||||
# Download the emojis from twemoji if needded
|
|
||||||
if [[ ! -d static/twemoji ]]; then wget https://github.com/twitter/twemoji/archive/v12.1.2.tar.gz && tar xvzf v12.1.2.tar.gz && mv twemoji-12.1.2/assets/svg static/twemoji && rm -rf twemoji-12.1.2 && rm -f v12.1.2.tar.gz; fi
|
|
||||||
|
|
||||||
# Run the docker-compose project locally (will perform a update if the project is already running)
|
.PHONY: self-destruct
|
||||||
.PHONY: run
|
self-destruct:
|
||||||
run: microblogpub css
|
-docker run --rm --it --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv self-destruct
|
||||||
# (poussetaches and microblogpub Docker image will updated)
|
|
||||||
# Update MongoDB
|
.PHONY: reset-password
|
||||||
docker pull mongo:3
|
reset-password:
|
||||||
docker pull poussetaches/poussetaches
|
-docker run --rm -it --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv reset-password
|
||||||
# Restart the project
|
|
||||||
docker-compose stop
|
.PHONY: check-config
|
||||||
docker-compose up -d --force-recreate --build
|
check-config:
|
||||||
|
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv check-config
|
||||||
|
|
||||||
|
.PHONY: compile-scss
|
||||||
|
compile-scss:
|
||||||
|
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv compile-scss
|
||||||
|
|
||||||
|
.PHONY: import-mastodon-following-accounts
|
||||||
|
import-mastodon-following-accounts:
|
||||||
|
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv import-mastodon-following-accounts $(path)
|
||||||
|
|
161
README.md
161
README.md
|
@ -1,124 +1,67 @@
|
||||||
# microblog.pub
|
# microblog.pub
|
||||||
|
|
||||||
<p align="center">
|
A self-hosted, single-user, ActivityPub powered microblog.
|
||||||
<img
|
|
||||||
src="https://sos-ch-dk-2.exo.io/microblogpub/microblobpub.png"
|
|
||||||
width="200" height="200" border="0" alt="microblog.pub">
|
|
||||||
</p>
|
|
||||||
<p align="center">A self-hosted, single-user, <a href="https://activitypub.rocks">ActivityPub</a> powered microblog.</p>
|
|
||||||
<p align="center">
|
|
||||||
<a href="https://d.a4.io/tsileo/microblog.pub"><img src="https://d.a4.io/api/badges/tsileo/microblog.pub/status.svg" alt="Build Status"></a>
|
|
||||||
<a href="https://matrix.to/#/#microblog.pub:matrix.org"><img src="https://img.shields.io/badge/matrix-%23microblog.pub-blue.svg" alt="#microblog.pub on Matrix"></a>
|
|
||||||
<a href="https://github.com/tsileo/microblog.pub/blob/master/LICENSE"><img src="https://img.shields.io/badge/license-AGPL_3.0-blue.svg?style=flat" alt="License"></a>
|
|
||||||
<a href="https://github.com/ambv/black"><img alt="Code style: black" src="https://img.shields.io/badge/code%20style-black-000000.svg"></a>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
**Still in early development/I do not recommend to run an instance yet.**
|
[![builds.sr.ht status](https://builds.sr.ht/~tsileo/microblog.pub.svg)](https://builds.sr.ht/~tsileo/microblog.pub?)
|
||||||
|
[![AGPL 3.0](https://img.shields.io/badge/license-AGPL_3.0-blue.svg?style=flat)](https://git.sr.ht/~tsileo/microblog.pub/tree/v2/item/LICENSE)
|
||||||
|
|
||||||
|
Instances in the wild:
|
||||||
|
|
||||||
|
- [microblog.pub](https://microblog.pub/) (follow to get updated about the project)
|
||||||
|
- [hexa.ninja](https://hexa.ninja) (theme customization example)
|
||||||
|
- [testing.microblog.pub](https://testing.microblog.pub/)
|
||||||
|
- [Irish Left Archive](https://posts.leftarchive.ie/) (another theme customization example)
|
||||||
|
|
||||||
|
There are still some rough edges, but the server is mostly functional.
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- Implements a basic [ActivityPub](https://activitypub.rocks/) server (with federation)
|
- Implements the [ActivityPub](https://activitypub.rocks/) server to server protocol
|
||||||
- S2S (Server to Server) and C2S (Client to Server) protocols
|
- Federate with all the other popular ActivityPub servers like Pleroma, PixelFed, PeerTube, Mastodon...
|
||||||
- Compatible with [Mastodon](https://joinmastodon.org/) and others ([Pleroma](https://pleroma.social/), Misskey, Plume, PixelFed, Hubzilla...)
|
- Consume most of the content types available (notes, articles, videos, pictures...)
|
||||||
- Exposes your outbox as a basic microblog
|
- Exposes your ActivityPub profile as a minimalist microblog
|
||||||
- Support all content types from the Fediverse (`Note`, `Article`, `Page`, `Video`, `Image`, `Question`...)
|
- Author notes in Markdown, with code highlighting support
|
||||||
- Markdown support
|
- Dedicated section for articles/blog posts (enabled when the first article is posted)
|
||||||
- Server-side code syntax highlighting
|
- Lightweight
|
||||||
- Comes with an admin UI with notifications and the stream of people you follow
|
- Uses SQLite, and Python 3.10+
|
||||||
- Private "bookmark" support
|
- Can be deployed on small VPS
|
||||||
- List support
|
- Privacy-aware
|
||||||
- Allows you to attach files to your notes
|
- EXIF metadata (like GPS location) are stripped before storage
|
||||||
- Custom emojis support
|
- Every media is proxied through the server
|
||||||
- Cares about your privacy
|
- Strict access control for your outbox enforced via HTTP signature
|
||||||
- The image upload endpoint strips EXIF meta data before storing the file
|
- **No** Javascript
|
||||||
- Every attachment/media is cached (or proxied) by the server
|
- The UI is pure HTML/CSS
|
||||||
- No JavaScript, **that's it**. Even the admin UI is pure HTML/CSS
|
- Except tiny bits of hand-written JS in the note composer to insert emoji and add alt text to images
|
||||||
- (well except for the Emoji picker within the admin, but it's only few line of hand-written JavaScript)
|
|
||||||
- Easy to customize (the theme is written Sass)
|
|
||||||
- mobile-friendly theme
|
|
||||||
- with dark and light version
|
|
||||||
- IndieWeb citizen
|
- IndieWeb citizen
|
||||||
- Microformats aware (exports `h-feed`, `h-entry`, `h-cards`, ...)
|
- [IndieAuth](https://www.w3.org/TR/indieauth/) support (OAuth2 extension)
|
||||||
- Export a feed in the HTML that is WebSub compatible
|
- [Microformats](http://microformats.org/wiki/Main_Page) everywhere
|
||||||
- Partial [Micropub](https://www.w3.org/TR/micropub/) support ([implementation report](https://micropub.rocks/implementation-reports/servers/416/s0BDEXZiX805btoa47sz))
|
- [Micropub](https://www.w3.org/TR/micropub/) support
|
||||||
- Implements [IndieAuth](https://indieauth.spec.indieweb.org/) endpoints (authorization and token endpoint)
|
- Sends and processes [Webmentions](https://www.w3.org/TR/webmention/)
|
||||||
- You can use your ActivityPub identity to login to other websites/app (with U2F support)
|
- RSS/Atom/[JSON](https://www.jsonfeed.org/) feed
|
||||||
- Send [Webmentions](https://www.w3.org/TR/webmention/) to linked website (only for public notes)
|
- Easy to backup
|
||||||
- Exports RSS/Atom/[JSON](https://jsonfeed.org/) feeds
|
- Everything is stored in the `data/` directory: config, uploads, secrets and the SQLite database.
|
||||||
- You stream/timeline is also available in an (authenticated) JSON feed
|
|
||||||
- Comes with a tiny HTTP API to help posting new content and and read your inbox/notifications
|
## Getting started
|
||||||
- Deployable with Docker (Docker compose for everything: dev, test and deployment)
|
|
||||||
- Focused on testing
|
Check out the [online documentation](https://docs.microblog.pub).
|
||||||
- Tested against the [official ActivityPub test suite](https://test.activitypub.rocks/), see [the results](https://activitypub.rocks/implementation-report/)
|
|
||||||
- [CI runs "federation" tests against two instances](https://d.a4.io/tsileo/microblog.pub)
|
## Credits
|
||||||
- Project is running 2 up-to-date instances ([here](https://microblog.pub) and [there](https://a4.io))
|
|
||||||
- Manually tested against other major platforms
|
- Emoji from [Twemoji](https://twemoji.twitter.com/)
|
||||||
|
- Awesome custom goose emoji from [@pamela@bsd.network](https://bsd.network/@pamela)
|
||||||
|
|
||||||
|
|
||||||
## User Guide
|
## Contributing
|
||||||
|
|
||||||
Remember that _microblog.pub_ is still in early development.
|
All the development takes place on [sourcehut](https://sr.ht/~tsileo/microblog.pub/), GitHub is only used as a mirror:
|
||||||
|
|
||||||
The easiest and recommended way to run _microblog.pub_ in production is to use the provided docker-compose config.
|
- [Project](https://sr.ht/~tsileo/microblog.pub/)
|
||||||
|
- [Issue tracker](https://todo.sr.ht/~tsileo/microblog.pub)
|
||||||
|
- [Mailing list](https://sr.ht/~tsileo/microblog.pub/lists)
|
||||||
|
|
||||||
First install [Docker](https://docs.docker.com/install/) and [Docker Compose](https://docs.docker.com/compose/install/).
|
Contributions are welcomed, check out the [contributing section of the documentation](https://docs.microblog.pub/developer_guide.html#contributing) for more details.
|
||||||
Python is not needed on the host system.
|
|
||||||
|
|
||||||
Note that all the generated data (config included) will be stored on the host (i.e. not only in Docker) in `config/` and `data/`.
|
|
||||||
|
|
||||||
### Installation
|
|
||||||
|
|
||||||
```shell
|
|
||||||
$ git clone https://github.com/tsileo/microblog.pub
|
|
||||||
$ cd microblog.pub
|
|
||||||
$ make config
|
|
||||||
```
|
|
||||||
|
|
||||||
Once the initial configuration is done, you can still tweak the config by editing `config/me.yml` directly.
|
|
||||||
|
|
||||||
|
|
||||||
### Deployment
|
## License
|
||||||
|
|
||||||
To spawn the docker-compose project (running this command will also update _microblog.pub_ to latest and restart everything if it's already running):
|
The project is licensed under the GNU AGPL v3 LICENSE (see the LICENSE file).
|
||||||
|
|
||||||
```shell
|
|
||||||
$ make run
|
|
||||||
```
|
|
||||||
|
|
||||||
By default, the server will listen on `localhost:5005` (http://localhost:5005 should work if you're running locally).
|
|
||||||
|
|
||||||
For production, you need to setup a reverse proxy (nginx, caddy) to forward your domain to the local server
|
|
||||||
(and check [certbot](https://certbot.eff.org/) for getting a free TLS certificate).
|
|
||||||
|
|
||||||
|
|
||||||
### HTTP API
|
|
||||||
|
|
||||||
See [docs/api.md](docs/api.md) for the internal HTTP API documentation.
|
|
||||||
|
|
||||||
|
|
||||||
### Backup
|
|
||||||
|
|
||||||
The easiest way to backup all of your data is to backup the `microblog.pub/` directory directly (that's what I do and I have been able to restore super easily).
|
|
||||||
It should be safe to copy the directory while the Docker compose project is running.
|
|
||||||
|
|
||||||
|
|
||||||
## Development
|
|
||||||
|
|
||||||
The project requires Python3.7+.
|
|
||||||
|
|
||||||
The most convenient way to hack on _microblog.pub_ is to run the Python server on the host directly, and evetything else in Docker.
|
|
||||||
|
|
||||||
```shell
|
|
||||||
# One-time setup (in a new virtual env)
|
|
||||||
$ pip install -r requirements.txt
|
|
||||||
# Start MongoDB and poussetaches
|
|
||||||
$ make poussetaches
|
|
||||||
$ env POUSSETACHES_AUTH_KEY="<secret-key>" docker-compose -f docker-compose-dev.yml up -d
|
|
||||||
# Run the server locally
|
|
||||||
$ FLASK_DEBUG=1 MICROBLOGPUB_DEBUG=1 FLASK_APP=app.py POUSSETACHES_AUTH_KEY="<secret-key>" flask run -p 5005 --with-threads
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Contributions
|
|
||||||
|
|
||||||
Contributions/PRs are welcome, please open an issue to start a discussion before your start any work.
|
|
||||||
|
|
105
alembic.ini
Normal file
105
alembic.ini
Normal file
|
@ -0,0 +1,105 @@
|
||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = alembic
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python-dateutil library that can be
|
||||||
|
# installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
timezone = UTC
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to alembic/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||||
|
|
||||||
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
|
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||||
|
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||||
|
# Valid values for version_path_separator are:
|
||||||
|
#
|
||||||
|
# version_path_separator = :
|
||||||
|
# version_path_separator = ;
|
||||||
|
# version_path_separator = space
|
||||||
|
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
sqlalchemy.url =
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
1
alembic/README
Normal file
1
alembic/README
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Generic single-database configuration.
|
86
alembic/env.py
Normal file
86
alembic/env.py
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
|
||||||
|
import app.models # noqa: F401 # Register models
|
||||||
|
from alembic import context
|
||||||
|
from app.database import SQLALCHEMY_DATABASE_URL
|
||||||
|
from app.database import Base
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
config.set_main_option("sqlalchemy.url", SQLALCHEMY_DATABASE_URL)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
render_as_batch=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
render_as_batch=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
24
alembic/script.py.mako
Normal file
24
alembic/script.py.mako
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
|
@ -0,0 +1,393 @@
|
||||||
|
"""Initial migration
|
||||||
|
|
||||||
|
Revision ID: f5717d82b3ff
|
||||||
|
Revises:
|
||||||
|
Create Date: 2022-07-28 17:25:31.081326+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'f5717d82b3ff'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('actor',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('ap_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('ap_actor', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('ap_type', sa.String(), nullable=False),
|
||||||
|
sa.Column('handle', sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('actor', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_actor_ap_id'), ['ap_id'], unique=True)
|
||||||
|
batch_op.create_index(batch_op.f('ix_actor_handle'), ['handle'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_actor_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('inbox',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('actor_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('server', sa.String(), nullable=False),
|
||||||
|
sa.Column('is_hidden_from_stream', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('ap_actor_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('ap_type', sa.String(), nullable=False),
|
||||||
|
sa.Column('ap_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('ap_context', sa.String(), nullable=True),
|
||||||
|
sa.Column('ap_published_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('ap_object', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('activity_object_ap_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('visibility', sa.Enum('PUBLIC', 'UNLISTED', 'FOLLOWERS_ONLY', 'DIRECT', name='visibilityenum'), nullable=False),
|
||||||
|
sa.Column('relates_to_inbox_object_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('relates_to_outbox_object_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('undone_by_inbox_object_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('liked_via_outbox_object_ap_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('announced_via_outbox_object_ap_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('voted_for_answers', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('is_bookmarked', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('is_deleted', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('is_transient', sa.Boolean(), server_default='0', nullable=False),
|
||||||
|
sa.Column('replies_count', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('og_meta', sa.JSON(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['relates_to_inbox_object_id'], ['inbox.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['relates_to_outbox_object_id'], ['outbox.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['undone_by_inbox_object_id'], ['inbox.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('inbox', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_inbox_activity_object_ap_id'), ['activity_object_ap_id'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_inbox_ap_id'), ['ap_id'], unique=True)
|
||||||
|
batch_op.create_index(batch_op.f('ix_inbox_ap_type'), ['ap_type'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_inbox_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('incoming_activity',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('webmention_source', sa.String(), nullable=True),
|
||||||
|
sa.Column('sent_by_ap_actor_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('ap_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('ap_object', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('tries', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('next_try', sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column('last_try', sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column('is_processed', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('is_errored', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('error', sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('incoming_activity', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_incoming_activity_ap_id'), ['ap_id'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_incoming_activity_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('indieauth_authorization_request',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('code', sa.String(), nullable=False),
|
||||||
|
sa.Column('scope', sa.String(), nullable=False),
|
||||||
|
sa.Column('redirect_uri', sa.String(), nullable=False),
|
||||||
|
sa.Column('client_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('code_challenge', sa.String(), nullable=True),
|
||||||
|
sa.Column('code_challenge_method', sa.String(), nullable=True),
|
||||||
|
sa.Column('is_used', sa.Boolean(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('indieauth_authorization_request', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_indieauth_authorization_request_code'), ['code'], unique=True)
|
||||||
|
batch_op.create_index(batch_op.f('ix_indieauth_authorization_request_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('outbox',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('is_hidden_from_homepage', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('public_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('ap_type', sa.String(), nullable=False),
|
||||||
|
sa.Column('ap_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('ap_context', sa.String(), nullable=True),
|
||||||
|
sa.Column('ap_object', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('activity_object_ap_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('source', sa.String(), nullable=True),
|
||||||
|
sa.Column('revisions', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('ap_published_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('visibility', sa.Enum('PUBLIC', 'UNLISTED', 'FOLLOWERS_ONLY', 'DIRECT', name='visibilityenum'), nullable=False),
|
||||||
|
sa.Column('likes_count', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('announces_count', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('replies_count', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('webmentions_count', sa.Integer(), server_default='0', nullable=False),
|
||||||
|
sa.Column('og_meta', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('is_pinned', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('is_transient', sa.Boolean(), server_default='0', nullable=False),
|
||||||
|
sa.Column('is_deleted', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('relates_to_inbox_object_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('relates_to_outbox_object_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('relates_to_actor_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('undone_by_outbox_object_id', sa.Integer(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['relates_to_actor_id'], ['actor.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['relates_to_inbox_object_id'], ['inbox.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['relates_to_outbox_object_id'], ['outbox.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['undone_by_outbox_object_id'], ['outbox.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('outbox', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_outbox_activity_object_ap_id'), ['activity_object_ap_id'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_outbox_ap_id'), ['ap_id'], unique=True)
|
||||||
|
batch_op.create_index(batch_op.f('ix_outbox_ap_type'), ['ap_type'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_outbox_id'), ['id'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_outbox_public_id'), ['public_id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('upload',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('content_type', sa.String(), nullable=False),
|
||||||
|
sa.Column('content_hash', sa.String(), nullable=False),
|
||||||
|
sa.Column('has_thumbnail', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('blurhash', sa.String(), nullable=True),
|
||||||
|
sa.Column('width', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('height', sa.Integer(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('content_hash')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('upload', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_upload_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('follower',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('actor_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('inbox_object_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('ap_actor_id', sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('actor_id'),
|
||||||
|
sa.UniqueConstraint('ap_actor_id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('follower', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_follower_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('following',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('actor_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('outbox_object_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('ap_actor_id', sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('actor_id'),
|
||||||
|
sa.UniqueConstraint('ap_actor_id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('following', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_following_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('indieauth_access_token',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('indieauth_authorization_request_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('access_token', sa.String(), nullable=False),
|
||||||
|
sa.Column('expires_in', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('scope', sa.String(), nullable=False),
|
||||||
|
sa.Column('is_revoked', sa.Boolean(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['indieauth_authorization_request_id'], ['indieauth_authorization_request.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_indieauth_access_token_access_token'), ['access_token'], unique=True)
|
||||||
|
batch_op.create_index(batch_op.f('ix_indieauth_access_token_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('outbox_object_attachment',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('filename', sa.String(), nullable=False),
|
||||||
|
sa.Column('alt', sa.String(), nullable=True),
|
||||||
|
sa.Column('outbox_object_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('upload_id', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['upload_id'], ['upload.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('outbox_object_attachment', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_outbox_object_attachment_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('outgoing_activity',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('recipient', sa.String(), nullable=False),
|
||||||
|
sa.Column('outbox_object_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('inbox_object_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('webmention_target', sa.String(), nullable=True),
|
||||||
|
sa.Column('tries', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('next_try', sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column('last_try', sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column('last_status_code', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('last_response', sa.String(), nullable=True),
|
||||||
|
sa.Column('is_sent', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('is_errored', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('error', sa.String(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('outgoing_activity', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_outgoing_activity_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('poll_answer',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('outbox_object_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('poll_type', sa.String(), nullable=False),
|
||||||
|
sa.Column('inbox_object_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('actor_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('outbox_object_id', 'name', 'actor_id', name='uix_outbox_object_id_name_actor_id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('poll_answer', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_poll_answer_id'), ['id'], unique=False)
|
||||||
|
batch_op.create_index('uix_one_of_outbox_object_id_actor_id', ['outbox_object_id', 'actor_id'], unique=True, sqlite_where=sa.text('poll_type = "oneOf"'))
|
||||||
|
|
||||||
|
op.create_table('tagged_outbox_object',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('outbox_object_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('tag', sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('outbox_object_id', 'tag', name='uix_tagged_object')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('tagged_outbox_object', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_tagged_outbox_object_id'), ['id'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_tagged_outbox_object_tag'), ['tag'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('webmention',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('is_deleted', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('source', sa.String(), nullable=False),
|
||||||
|
sa.Column('source_microformats', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('target', sa.String(), nullable=False),
|
||||||
|
sa.Column('outbox_object_id', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('source', 'target', name='uix_source_target')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('webmention', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_webmention_id'), ['id'], unique=False)
|
||||||
|
batch_op.create_index(batch_op.f('ix_webmention_source'), ['source'], unique=True)
|
||||||
|
batch_op.create_index(batch_op.f('ix_webmention_target'), ['target'], unique=False)
|
||||||
|
|
||||||
|
op.create_table('notifications',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('notification_type', sa.Enum('NEW_FOLLOWER', 'UNFOLLOW', 'FOLLOW_REQUEST_ACCEPTED', 'FOLLOW_REQUEST_REJECTED', 'LIKE', 'UNDO_LIKE', 'ANNOUNCE', 'UNDO_ANNOUNCE', 'MENTION', 'NEW_WEBMENTION', 'UPDATED_WEBMENTION', 'DELETED_WEBMENTION', name='notificationtype'), nullable=True),
|
||||||
|
sa.Column('is_new', sa.Boolean(), nullable=False),
|
||||||
|
sa.Column('actor_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('outbox_object_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('inbox_object_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('webmention_id', sa.Integer(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['actor_id'], ['actor.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['inbox_object_id'], ['inbox.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['outbox_object_id'], ['outbox.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['webmention_id'], ['webmention.id'], name='fk_webmention_id'),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('notifications', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_notifications_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('notifications', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_notifications_id'))
|
||||||
|
|
||||||
|
op.drop_table('notifications')
|
||||||
|
with op.batch_alter_table('webmention', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_webmention_target'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_webmention_source'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_webmention_id'))
|
||||||
|
|
||||||
|
op.drop_table('webmention')
|
||||||
|
with op.batch_alter_table('tagged_outbox_object', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_tagged_outbox_object_tag'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_tagged_outbox_object_id'))
|
||||||
|
|
||||||
|
op.drop_table('tagged_outbox_object')
|
||||||
|
with op.batch_alter_table('poll_answer', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index('uix_one_of_outbox_object_id_actor_id', sqlite_where=sa.text('poll_type = "oneOf"'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_poll_answer_id'))
|
||||||
|
|
||||||
|
op.drop_table('poll_answer')
|
||||||
|
with op.batch_alter_table('outgoing_activity', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_outgoing_activity_id'))
|
||||||
|
|
||||||
|
op.drop_table('outgoing_activity')
|
||||||
|
with op.batch_alter_table('outbox_object_attachment', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_outbox_object_attachment_id'))
|
||||||
|
|
||||||
|
op.drop_table('outbox_object_attachment')
|
||||||
|
with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_indieauth_access_token_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_indieauth_access_token_access_token'))
|
||||||
|
|
||||||
|
op.drop_table('indieauth_access_token')
|
||||||
|
with op.batch_alter_table('following', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_following_id'))
|
||||||
|
|
||||||
|
op.drop_table('following')
|
||||||
|
with op.batch_alter_table('follower', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_follower_id'))
|
||||||
|
|
||||||
|
op.drop_table('follower')
|
||||||
|
with op.batch_alter_table('upload', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_upload_id'))
|
||||||
|
|
||||||
|
op.drop_table('upload')
|
||||||
|
with op.batch_alter_table('outbox', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_outbox_public_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_outbox_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_outbox_ap_type'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_outbox_ap_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_outbox_activity_object_ap_id'))
|
||||||
|
|
||||||
|
op.drop_table('outbox')
|
||||||
|
with op.batch_alter_table('indieauth_authorization_request', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_indieauth_authorization_request_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_indieauth_authorization_request_code'))
|
||||||
|
|
||||||
|
op.drop_table('indieauth_authorization_request')
|
||||||
|
with op.batch_alter_table('incoming_activity', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_incoming_activity_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_incoming_activity_ap_id'))
|
||||||
|
|
||||||
|
op.drop_table('incoming_activity')
|
||||||
|
with op.batch_alter_table('inbox', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_inbox_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_inbox_ap_type'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_inbox_ap_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_inbox_activity_object_ap_id'))
|
||||||
|
|
||||||
|
op.drop_table('inbox')
|
||||||
|
with op.batch_alter_table('actor', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_actor_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_actor_handle'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_actor_ap_id'))
|
||||||
|
|
||||||
|
op.drop_table('actor')
|
||||||
|
# ### end Alembic commands ###
|
|
@ -0,0 +1,32 @@
|
||||||
|
"""Add is_blocked attribute on actors
|
||||||
|
|
||||||
|
Revision ID: 50d26a370a65
|
||||||
|
Revises: f5717d82b3ff
|
||||||
|
Create Date: 2022-07-31 08:15:27.226340+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '50d26a370a65'
|
||||||
|
down_revision = 'f5717d82b3ff'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('actor', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('is_blocked', sa.Boolean(), server_default='0', nullable=False))
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('actor', schema=None) as batch_op:
|
||||||
|
batch_op.drop_column('is_blocked')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
|
@ -0,0 +1,34 @@
|
||||||
|
"""Tweak notification model
|
||||||
|
|
||||||
|
Revision ID: 1702e88016db
|
||||||
|
Revises: 50d26a370a65
|
||||||
|
Create Date: 2022-08-02 15:19:57.221421+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '1702e88016db'
|
||||||
|
down_revision = '50d26a370a65'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('notifications', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('is_accepted', sa.Boolean(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column('is_rejected', sa.Boolean(), nullable=True))
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('notifications', schema=None) as batch_op:
|
||||||
|
batch_op.drop_column('is_rejected')
|
||||||
|
batch_op.drop_column('is_accepted')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
|
@ -0,0 +1,40 @@
|
||||||
|
"""New conversation field
|
||||||
|
|
||||||
|
Revision ID: 9bc69ed947e2
|
||||||
|
Revises: 1702e88016db
|
||||||
|
Create Date: 2022-08-14 16:38:37.688377+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '9bc69ed947e2'
|
||||||
|
down_revision = '1702e88016db'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('inbox', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('conversation', sa.String(), nullable=True))
|
||||||
|
|
||||||
|
with op.batch_alter_table('outbox', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('conversation', sa.String(), nullable=True))
|
||||||
|
|
||||||
|
op.execute("UPDATE inbox SET conversation = ap_context")
|
||||||
|
op.execute("UPDATE outbox SET conversation = ap_context")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('outbox', schema=None) as batch_op:
|
||||||
|
batch_op.drop_column('conversation')
|
||||||
|
|
||||||
|
with op.batch_alter_table('inbox', schema=None) as batch_op:
|
||||||
|
batch_op.drop_column('conversation')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
|
@ -0,0 +1,22 @@
|
||||||
|
"""Enable WAL mode
|
||||||
|
|
||||||
|
Revision ID: 6286262bb466
|
||||||
|
Revises: 9bc69ed947e2
|
||||||
|
Create Date: 2022-08-14 20:59:26.427796+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '6286262bb466'
|
||||||
|
down_revision = '9bc69ed947e2'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.execute("PRAGMA journal_mode=WAL")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.execute("PRAGMA journal_mode=DELETE")
|
|
@ -0,0 +1,32 @@
|
||||||
|
"""Add Actor.is_deleted
|
||||||
|
|
||||||
|
Revision ID: 5d3e3f2b9b4e
|
||||||
|
Revises: 6286262bb466
|
||||||
|
Create Date: 2022-08-17 17:58:24.813194+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '5d3e3f2b9b4e'
|
||||||
|
down_revision = '6286262bb466'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('actor', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('is_deleted', sa.Boolean(), server_default='0', nullable=False))
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('actor', schema=None) as batch_op:
|
||||||
|
batch_op.drop_column('is_deleted')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
|
@ -0,0 +1,33 @@
|
||||||
|
"""Add InboxObject.has_local_mention
|
||||||
|
|
||||||
|
Revision ID: 604d125ea2fb
|
||||||
|
Revises: 5d3e3f2b9b4e
|
||||||
|
Create Date: 2022-08-19 12:46:22.239989+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '604d125ea2fb'
|
||||||
|
down_revision = '5d3e3f2b9b4e'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('inbox', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('has_local_mention', sa.Boolean(), server_default='0', nullable=False))
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
op.execute("UPDATE inbox SET has_local_mention = 1 WHERE id IN (select inbox_object_id from notifications where notification_type = 'MENTION')")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('inbox', schema=None) as batch_op:
|
||||||
|
batch_op.drop_column('has_local_mention')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
|
@ -0,0 +1,48 @@
|
||||||
|
"""Add a slug field for outbox objects
|
||||||
|
|
||||||
|
Revision ID: b28c0551c236
|
||||||
|
Revises: 604d125ea2fb
|
||||||
|
Create Date: 2022-10-30 14:09:14.540461+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.orm.session import Session
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'b28c0551c236'
|
||||||
|
down_revision = '604d125ea2fb'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('outbox', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('slug', sa.String(), nullable=True))
|
||||||
|
batch_op.create_index(batch_op.f('ix_outbox_slug'), ['slug'], unique=False)
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
# Backfill the slug for existing articles
|
||||||
|
from app.models import OutboxObject
|
||||||
|
from app.utils.text import slugify
|
||||||
|
sess = Session(op.get_bind())
|
||||||
|
articles = sess.execute(select(OutboxObject).where(
|
||||||
|
OutboxObject.ap_type == "Article")
|
||||||
|
).scalars()
|
||||||
|
for article in articles:
|
||||||
|
title = article.ap_object["name"]
|
||||||
|
article.slug = slugify(title)
|
||||||
|
sess.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('outbox', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_outbox_slug'))
|
||||||
|
batch_op.drop_column('slug')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
|
@ -0,0 +1,32 @@
|
||||||
|
"""Add Webmention.webmention_type
|
||||||
|
|
||||||
|
Revision ID: fadfd359ce78
|
||||||
|
Revises: b28c0551c236
|
||||||
|
Create Date: 2022-11-16 19:42:56.925512+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'fadfd359ce78'
|
||||||
|
down_revision = 'b28c0551c236'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('webmention', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('webmention_type', sa.Enum('UNKNOWN', 'LIKE', 'REPLY', 'REPOST', name='webmentiontype'), nullable=True))
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('webmention', schema=None) as batch_op:
|
||||||
|
batch_op.drop_column('webmention_type')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
|
@ -0,0 +1,32 @@
|
||||||
|
"""Add option to hide announces from actor
|
||||||
|
|
||||||
|
Revision ID: 9b404c47970a
|
||||||
|
Revises: fadfd359ce78
|
||||||
|
Create Date: 2022-12-12 19:26:36.912763+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '9b404c47970a'
|
||||||
|
down_revision = 'fadfd359ce78'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('actor', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('are_announces_hidden_from_stream', sa.Boolean(), server_default='0', nullable=False))
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('actor', schema=None) as batch_op:
|
||||||
|
batch_op.drop_column('are_announces_hidden_from_stream')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
|
@ -0,0 +1,48 @@
|
||||||
|
"""Add OAuth client
|
||||||
|
|
||||||
|
Revision ID: 4ab54becec04
|
||||||
|
Revises: 9b404c47970a
|
||||||
|
Create Date: 2022-12-16 17:30:54.520477+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '4ab54becec04'
|
||||||
|
down_revision = '9b404c47970a'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('oauth_client',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column('client_name', sa.String(), nullable=False),
|
||||||
|
sa.Column('redirect_uris', sa.JSON(), nullable=True),
|
||||||
|
sa.Column('client_uri', sa.String(), nullable=True),
|
||||||
|
sa.Column('logo_uri', sa.String(), nullable=True),
|
||||||
|
sa.Column('scope', sa.String(), nullable=True),
|
||||||
|
sa.Column('client_id', sa.String(), nullable=False),
|
||||||
|
sa.Column('client_secret', sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('client_secret')
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('oauth_client', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(batch_op.f('ix_oauth_client_client_id'), ['client_id'], unique=True)
|
||||||
|
batch_op.create_index(batch_op.f('ix_oauth_client_id'), ['id'], unique=False)
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('oauth_client', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_oauth_client_id'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_oauth_client_client_id'))
|
||||||
|
|
||||||
|
op.drop_table('oauth_client')
|
||||||
|
# ### end Alembic commands ###
|
|
@ -0,0 +1,36 @@
|
||||||
|
"""Add OAuth refresh token support
|
||||||
|
|
||||||
|
Revision ID: a209f0333f5a
|
||||||
|
Revises: 4ab54becec04
|
||||||
|
Create Date: 2022-12-18 11:26:31.976348+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'a209f0333f5a'
|
||||||
|
down_revision = '4ab54becec04'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('refresh_token', sa.String(), nullable=True))
|
||||||
|
batch_op.add_column(sa.Column('was_refreshed', sa.Boolean(), server_default='0', nullable=False))
|
||||||
|
batch_op.create_index(batch_op.f('ix_indieauth_access_token_refresh_token'), ['refresh_token'], unique=True)
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_indieauth_access_token_refresh_token'))
|
||||||
|
batch_op.drop_column('was_refreshed')
|
||||||
|
batch_op.drop_column('refresh_token')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
408
app/activitypub.py
Normal file
408
app/activitypub.py
Normal file
|
@ -0,0 +1,408 @@
|
||||||
|
import enum
|
||||||
|
import json
|
||||||
|
import mimetypes
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app.config import ALSO_KNOWN_AS
|
||||||
|
from app.config import AP_CONTENT_TYPE # noqa: F401
|
||||||
|
from app.config import MOVED_TO
|
||||||
|
from app.httpsig import auth
|
||||||
|
from app.key import get_pubkey_as_pem
|
||||||
|
from app.source import dedup_tags
|
||||||
|
from app.source import hashtagify
|
||||||
|
from app.utils.url import check_url
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from app.actor import Actor
|
||||||
|
|
||||||
|
RawObject = dict[str, Any]
|
||||||
|
AS_CTX = "https://www.w3.org/ns/activitystreams"
|
||||||
|
AS_PUBLIC = "https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
|
||||||
|
ACTOR_TYPES = ["Application", "Group", "Organization", "Person", "Service"]
|
||||||
|
|
||||||
|
AS_EXTENDED_CTX = [
|
||||||
|
"https://www.w3.org/ns/activitystreams",
|
||||||
|
"https://w3id.org/security/v1",
|
||||||
|
{
|
||||||
|
# AS ext
|
||||||
|
"Hashtag": "as:Hashtag",
|
||||||
|
"sensitive": "as:sensitive",
|
||||||
|
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
||||||
|
"alsoKnownAs": {"@id": "as:alsoKnownAs", "@type": "@id"},
|
||||||
|
"movedTo": {"@id": "as:movedTo", "@type": "@id"},
|
||||||
|
# toot
|
||||||
|
"toot": "http://joinmastodon.org/ns#",
|
||||||
|
"featured": {"@id": "toot:featured", "@type": "@id"},
|
||||||
|
"Emoji": "toot:Emoji",
|
||||||
|
"blurhash": "toot:blurhash",
|
||||||
|
"votersCount": "toot:votersCount",
|
||||||
|
# schema
|
||||||
|
"schema": "http://schema.org#",
|
||||||
|
"PropertyValue": "schema:PropertyValue",
|
||||||
|
"value": "schema:value",
|
||||||
|
# ostatus
|
||||||
|
"ostatus": "http://ostatus.org#",
|
||||||
|
"conversation": "ostatus:conversation",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class FetchError(Exception):
|
||||||
|
def __init__(self, url: str, resp: httpx.Response | None = None) -> None:
|
||||||
|
resp_part = ""
|
||||||
|
if resp:
|
||||||
|
resp_part = f", got HTTP {resp.status_code}: {resp.text}"
|
||||||
|
message = f"Failed to fetch {url}{resp_part}"
|
||||||
|
super().__init__(message)
|
||||||
|
self.resp = resp
|
||||||
|
self.url = url
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectIsGoneError(FetchError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectNotFoundError(FetchError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectUnavailableError(FetchError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FetchErrorTypeEnum(str, enum.Enum):
|
||||||
|
TIMEOUT = "TIMEOUT"
|
||||||
|
NOT_FOUND = "NOT_FOUND"
|
||||||
|
UNAUHTORIZED = "UNAUTHORIZED"
|
||||||
|
|
||||||
|
INTERNAL_ERROR = "INTERNAL_ERROR"
|
||||||
|
|
||||||
|
|
||||||
|
class VisibilityEnum(str, enum.Enum):
|
||||||
|
PUBLIC = "public"
|
||||||
|
UNLISTED = "unlisted"
|
||||||
|
FOLLOWERS_ONLY = "followers-only"
|
||||||
|
DIRECT = "direct"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_display_name(key: "VisibilityEnum") -> str:
|
||||||
|
return {
|
||||||
|
VisibilityEnum.PUBLIC: "Public - sent to followers and visible on the homepage", # noqa: E501
|
||||||
|
VisibilityEnum.UNLISTED: "Unlisted - like public, but hidden from the homepage", # noqa: E501,
|
||||||
|
VisibilityEnum.FOLLOWERS_ONLY: "Followers only",
|
||||||
|
VisibilityEnum.DIRECT: "Direct - only visible for mentioned actors",
|
||||||
|
}[key]
|
||||||
|
|
||||||
|
|
||||||
|
_LOCAL_ACTOR_SUMMARY, _LOCAL_ACTOR_TAGS = hashtagify(config.CONFIG.summary)
|
||||||
|
_LOCAL_ACTOR_METADATA = []
|
||||||
|
if config.CONFIG.metadata:
|
||||||
|
for kv in config.CONFIG.metadata:
|
||||||
|
kv_value, kv_tags = hashtagify(kv.value)
|
||||||
|
_LOCAL_ACTOR_METADATA.append(
|
||||||
|
{
|
||||||
|
"name": kv.key,
|
||||||
|
"type": "PropertyValue",
|
||||||
|
"value": kv_value,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
_LOCAL_ACTOR_TAGS.extend(kv_tags)
|
||||||
|
|
||||||
|
|
||||||
|
ME = {
|
||||||
|
"@context": AS_EXTENDED_CTX,
|
||||||
|
"type": "Person",
|
||||||
|
"id": config.ID,
|
||||||
|
"following": config.BASE_URL + "/following",
|
||||||
|
"followers": config.BASE_URL + "/followers",
|
||||||
|
"featured": config.BASE_URL + "/featured",
|
||||||
|
"inbox": config.BASE_URL + "/inbox",
|
||||||
|
"outbox": config.BASE_URL + "/outbox",
|
||||||
|
"preferredUsername": config.USERNAME,
|
||||||
|
"name": config.CONFIG.name,
|
||||||
|
"summary": _LOCAL_ACTOR_SUMMARY,
|
||||||
|
"endpoints": {
|
||||||
|
# For compat with servers expecting a sharedInbox...
|
||||||
|
"sharedInbox": config.BASE_URL
|
||||||
|
+ "/inbox",
|
||||||
|
},
|
||||||
|
"url": config.ID + "/", # XXX: the path is important for Mastodon compat
|
||||||
|
"manuallyApprovesFollowers": config.CONFIG.manually_approves_followers,
|
||||||
|
"attachment": _LOCAL_ACTOR_METADATA,
|
||||||
|
"publicKey": {
|
||||||
|
"id": f"{config.ID}#main-key",
|
||||||
|
"owner": config.ID,
|
||||||
|
"publicKeyPem": get_pubkey_as_pem(config.KEY_PATH),
|
||||||
|
},
|
||||||
|
"tag": dedup_tags(_LOCAL_ACTOR_TAGS),
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.CONFIG.icon_url:
|
||||||
|
ME["icon"] = {
|
||||||
|
"mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0],
|
||||||
|
"type": "Image",
|
||||||
|
"url": config.CONFIG.icon_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
if ALSO_KNOWN_AS:
|
||||||
|
ME["alsoKnownAs"] = [ALSO_KNOWN_AS]
|
||||||
|
|
||||||
|
if MOVED_TO:
|
||||||
|
ME["movedTo"] = MOVED_TO
|
||||||
|
|
||||||
|
if config.CONFIG.image_url:
|
||||||
|
ME["image"] = {
|
||||||
|
"mediaType": mimetypes.guess_type(config.CONFIG.image_url)[0],
|
||||||
|
"type": "Image",
|
||||||
|
"url": config.CONFIG.image_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class NotAnObjectError(Exception):
|
||||||
|
def __init__(self, url: str, resp: httpx.Response | None = None) -> None:
|
||||||
|
message = f"{url} is not an AP activity"
|
||||||
|
super().__init__(message)
|
||||||
|
self.url = url
|
||||||
|
self.resp = resp
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch(
|
||||||
|
url: str,
|
||||||
|
params: dict[str, Any] | None = None,
|
||||||
|
disable_httpsig: bool = False,
|
||||||
|
) -> RawObject:
|
||||||
|
logger.info(f"Fetching {url} ({params=})")
|
||||||
|
check_url(url)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
resp = await client.get(
|
||||||
|
url,
|
||||||
|
headers={
|
||||||
|
"User-Agent": config.USER_AGENT,
|
||||||
|
"Accept": config.AP_CONTENT_TYPE,
|
||||||
|
},
|
||||||
|
params=params,
|
||||||
|
follow_redirects=True,
|
||||||
|
auth=None if disable_httpsig else auth,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Special handling for deleted object
|
||||||
|
if resp.status_code == 410:
|
||||||
|
raise ObjectIsGoneError(url, resp)
|
||||||
|
elif resp.status_code in [401, 403]:
|
||||||
|
raise ObjectUnavailableError(url, resp)
|
||||||
|
elif resp.status_code == 404:
|
||||||
|
raise ObjectNotFoundError(url, resp)
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp.raise_for_status()
|
||||||
|
except httpx.HTTPError as http_error:
|
||||||
|
raise FetchError(url, resp) from http_error
|
||||||
|
|
||||||
|
try:
|
||||||
|
return resp.json()
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise NotAnObjectError(url, resp)
|
||||||
|
|
||||||
|
|
||||||
|
async def parse_collection( # noqa: C901
|
||||||
|
url: str | None = None,
|
||||||
|
payload: RawObject | None = None,
|
||||||
|
level: int = 0,
|
||||||
|
limit: int = 0,
|
||||||
|
) -> list[RawObject]:
|
||||||
|
"""Resolve/fetch a `Collection`/`OrderedCollection`."""
|
||||||
|
if level > 3:
|
||||||
|
raise ValueError("recursion limit exceeded")
|
||||||
|
|
||||||
|
# Go through all the pages
|
||||||
|
out: list[RawObject] = []
|
||||||
|
if url:
|
||||||
|
payload = await fetch(url)
|
||||||
|
if not payload:
|
||||||
|
raise ValueError("must at least prove a payload or an URL")
|
||||||
|
|
||||||
|
ap_type = payload.get("type")
|
||||||
|
if not ap_type:
|
||||||
|
raise ValueError(f"Missing type: {payload=}")
|
||||||
|
|
||||||
|
if level == 0 and ap_type not in ["Collection", "OrderedCollection"]:
|
||||||
|
raise ValueError(f"Unexpected type {ap_type}")
|
||||||
|
|
||||||
|
if payload["type"] in ["Collection", "OrderedCollection"]:
|
||||||
|
if "orderedItems" in payload:
|
||||||
|
return payload["orderedItems"]
|
||||||
|
if "items" in payload:
|
||||||
|
return payload["items"]
|
||||||
|
if "first" in payload:
|
||||||
|
if isinstance(payload["first"], str):
|
||||||
|
out.extend(
|
||||||
|
await parse_collection(
|
||||||
|
url=payload["first"], level=level + 1, limit=limit
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if "orderedItems" in payload["first"]:
|
||||||
|
out.extend(payload["first"]["orderedItems"])
|
||||||
|
if "items" in payload["first"]:
|
||||||
|
out.extend(payload["first"]["items"])
|
||||||
|
n = payload["first"].get("next")
|
||||||
|
if n:
|
||||||
|
out.extend(
|
||||||
|
await parse_collection(url=n, level=level + 1, limit=limit)
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
|
||||||
|
while payload:
|
||||||
|
if ap_type in ["CollectionPage", "OrderedCollectionPage"]:
|
||||||
|
if "orderedItems" in payload:
|
||||||
|
out.extend(payload["orderedItems"])
|
||||||
|
if "items" in payload:
|
||||||
|
out.extend(payload["items"])
|
||||||
|
n = payload.get("next")
|
||||||
|
if n is None or (limit > 0 and len(out) >= limit):
|
||||||
|
break
|
||||||
|
payload = await fetch(n)
|
||||||
|
else:
|
||||||
|
raise ValueError("unexpected activity type {}".format(payload["type"]))
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def as_list(val: Any | list[Any]) -> list[Any]:
|
||||||
|
if isinstance(val, list):
|
||||||
|
return val
|
||||||
|
|
||||||
|
return [val]
|
||||||
|
|
||||||
|
|
||||||
|
def get_id(val: str | dict[str, Any]) -> str:
|
||||||
|
if isinstance(val, dict):
|
||||||
|
val = val["id"]
|
||||||
|
|
||||||
|
if not isinstance(val, str):
|
||||||
|
raise ValueError(f"Invalid ID type: {val}")
|
||||||
|
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
def object_visibility(ap_activity: RawObject, actor: "Actor") -> VisibilityEnum:
|
||||||
|
to = as_list(ap_activity.get("to", []))
|
||||||
|
cc = as_list(ap_activity.get("cc", []))
|
||||||
|
if AS_PUBLIC in to:
|
||||||
|
return VisibilityEnum.PUBLIC
|
||||||
|
elif AS_PUBLIC in cc:
|
||||||
|
return VisibilityEnum.UNLISTED
|
||||||
|
elif actor.followers_collection_id and actor.followers_collection_id in to + cc:
|
||||||
|
return VisibilityEnum.FOLLOWERS_ONLY
|
||||||
|
else:
|
||||||
|
return VisibilityEnum.DIRECT
|
||||||
|
|
||||||
|
|
||||||
|
def get_actor_id(activity: RawObject) -> str:
|
||||||
|
if "attributedTo" in activity:
|
||||||
|
attributed_to = as_list(activity["attributedTo"])
|
||||||
|
return get_id(attributed_to[0])
|
||||||
|
else:
|
||||||
|
return get_id(activity["actor"])
|
||||||
|
|
||||||
|
|
||||||
|
async def get_object(activity: RawObject) -> RawObject:
|
||||||
|
if "object" not in activity:
|
||||||
|
raise ValueError(f"No object in {activity}")
|
||||||
|
|
||||||
|
raw_activity_object = activity["object"]
|
||||||
|
if isinstance(raw_activity_object, dict):
|
||||||
|
return raw_activity_object
|
||||||
|
elif isinstance(raw_activity_object, str):
|
||||||
|
return await fetch(raw_activity_object)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unexpected object {raw_activity_object}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_object_id(activity: RawObject) -> str:
|
||||||
|
if "object" not in activity:
|
||||||
|
raise ValueError(f"No object in {activity}")
|
||||||
|
|
||||||
|
return get_id(activity["object"])
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_object(activity: RawObject) -> RawObject:
|
||||||
|
# TODO(tsileo): improve Create VS Update with a `update=True` flag
|
||||||
|
if "updated" in activity:
|
||||||
|
return {
|
||||||
|
"@context": AS_EXTENDED_CTX,
|
||||||
|
"actor": config.ID,
|
||||||
|
"to": activity.get("to", []),
|
||||||
|
"cc": activity.get("cc", []),
|
||||||
|
"id": activity["id"] + "/update_activity/" + activity["updated"],
|
||||||
|
"object": remove_context(activity),
|
||||||
|
"published": activity["published"],
|
||||||
|
"updated": activity["updated"],
|
||||||
|
"type": "Update",
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"@context": AS_EXTENDED_CTX,
|
||||||
|
"actor": config.ID,
|
||||||
|
"to": activity.get("to", []),
|
||||||
|
"cc": activity.get("cc", []),
|
||||||
|
"id": activity["id"] + "/activity",
|
||||||
|
"object": remove_context(activity),
|
||||||
|
"published": activity["published"],
|
||||||
|
"type": "Create",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_object_if_needed(raw_object: RawObject) -> RawObject:
|
||||||
|
if raw_object["type"] in ["Note", "Article", "Question"]:
|
||||||
|
return wrap_object(raw_object)
|
||||||
|
|
||||||
|
return raw_object
|
||||||
|
|
||||||
|
|
||||||
|
def unwrap_activity(activity: RawObject) -> RawObject:
|
||||||
|
# FIXME(ts): deprecate this
|
||||||
|
if activity["type"] in ["Create", "Update"]:
|
||||||
|
unwrapped_object = activity["object"]
|
||||||
|
|
||||||
|
# Sanity check, ensure the wrapped object actor matches the activity
|
||||||
|
if get_actor_id(unwrapped_object) != get_actor_id(activity):
|
||||||
|
raise ValueError(
|
||||||
|
f"Unwrapped object actor does not match activity: {activity}"
|
||||||
|
)
|
||||||
|
return unwrapped_object
|
||||||
|
|
||||||
|
return activity
|
||||||
|
|
||||||
|
|
||||||
|
def remove_context(raw_object: RawObject) -> RawObject:
|
||||||
|
if "@context" not in raw_object:
|
||||||
|
return raw_object
|
||||||
|
a = dict(raw_object)
|
||||||
|
del a["@context"]
|
||||||
|
return a
|
||||||
|
|
||||||
|
|
||||||
|
async def post(url: str, payload: dict[str, Any]) -> httpx.Response:
|
||||||
|
logger.info(f"Posting {url} ({payload=})")
|
||||||
|
check_url(url)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
resp = await client.post(
|
||||||
|
url,
|
||||||
|
headers={
|
||||||
|
"User-Agent": config.USER_AGENT,
|
||||||
|
"Content-Type": config.AP_CONTENT_TYPE,
|
||||||
|
},
|
||||||
|
json=payload,
|
||||||
|
auth=auth,
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
return resp
|
455
app/actor.py
Normal file
455
app/actor.py
Normal file
|
@ -0,0 +1,455 @@
|
||||||
|
import hashlib
|
||||||
|
import typing
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import timedelta
|
||||||
|
from functools import cached_property
|
||||||
|
from typing import Union
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from loguru import logger
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.orm import joinedload
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app import media
|
||||||
|
from app.config import BASE_URL
|
||||||
|
from app.config import USER_AGENT
|
||||||
|
from app.config import USERNAME
|
||||||
|
from app.config import WEBFINGER_DOMAIN
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.utils.datetime import as_utc
|
||||||
|
from app.utils.datetime import now
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
from app.models import Actor as ActorModel
|
||||||
|
|
||||||
|
|
||||||
|
def _handle(raw_actor: ap.RawObject) -> str:
|
||||||
|
ap_id = ap.get_id(raw_actor["id"])
|
||||||
|
domain = urlparse(ap_id)
|
||||||
|
if not domain.hostname:
|
||||||
|
raise ValueError(f"Invalid actor ID {ap_id}")
|
||||||
|
|
||||||
|
handle = f'@{raw_actor["preferredUsername"]}@{domain.hostname}' # type: ignore
|
||||||
|
|
||||||
|
# TODO: cleanup this
|
||||||
|
# Next, check for custom webfinger domains
|
||||||
|
resp: httpx.Response | None = None
|
||||||
|
for url in {
|
||||||
|
f"https://{domain.hostname}/.well-known/webfinger",
|
||||||
|
f"http://{domain.hostname}/.well-known/webfinger",
|
||||||
|
}:
|
||||||
|
try:
|
||||||
|
logger.info(f"Webfinger {handle} at {url}")
|
||||||
|
resp = httpx.get(
|
||||||
|
url,
|
||||||
|
params={"resource": f"acct:{handle[1:]}"},
|
||||||
|
headers={
|
||||||
|
"User-Agent": USER_AGENT,
|
||||||
|
},
|
||||||
|
follow_redirects=True,
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Failed to webfinger {handle}")
|
||||||
|
|
||||||
|
if resp:
|
||||||
|
try:
|
||||||
|
json_resp = resp.json()
|
||||||
|
if json_resp.get("subject", "").startswith("acct:"):
|
||||||
|
return "@" + json_resp["subject"].removeprefix("acct:")
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Failed to parse webfinger response for {handle}")
|
||||||
|
return handle
|
||||||
|
|
||||||
|
|
||||||
|
class Actor:
|
||||||
|
@property
|
||||||
|
def ap_actor(self) -> ap.RawObject:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ap_id(self) -> str:
|
||||||
|
return ap.get_id(self.ap_actor["id"])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str | None:
|
||||||
|
return self.ap_actor.get("name")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def summary(self) -> str | None:
|
||||||
|
return self.ap_actor.get("summary")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self) -> str | None:
|
||||||
|
return self.ap_actor.get("url") or self.ap_actor["id"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def preferred_username(self) -> str:
|
||||||
|
return self.ap_actor["preferredUsername"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def display_name(self) -> str:
|
||||||
|
if self.name:
|
||||||
|
return self.name
|
||||||
|
return self.preferred_username
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def handle(self) -> str:
|
||||||
|
return _handle(self.ap_actor)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ap_type(self) -> str:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def inbox_url(self) -> str:
|
||||||
|
return self.ap_actor["inbox"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def outbox_url(self) -> str:
|
||||||
|
return self.ap_actor["outbox"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def shared_inbox_url(self) -> str:
|
||||||
|
return self.ap_actor.get("endpoints", {}).get("sharedInbox") or self.inbox_url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon_url(self) -> str | None:
|
||||||
|
if icon := self.ap_actor.get("icon"):
|
||||||
|
return icon.get("url")
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def icon_media_type(self) -> str | None:
|
||||||
|
if icon := self.ap_actor.get("icon"):
|
||||||
|
return icon.get("mediaType")
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def image_url(self) -> str | None:
|
||||||
|
if image := self.ap_actor.get("image"):
|
||||||
|
return image.get("url")
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def public_key_as_pem(self) -> str:
|
||||||
|
return self.ap_actor["publicKey"]["publicKeyPem"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def public_key_id(self) -> str:
|
||||||
|
return self.ap_actor["publicKey"]["id"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def proxied_icon_url(self) -> str:
|
||||||
|
if self.icon_url:
|
||||||
|
return media.proxied_media_url(self.icon_url)
|
||||||
|
else:
|
||||||
|
return BASE_URL + "/static/nopic.png"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def resized_icon_url(self) -> str:
|
||||||
|
if self.icon_url:
|
||||||
|
return media.resized_media_url(self.icon_url, 50)
|
||||||
|
else:
|
||||||
|
return BASE_URL + "/static/nopic.png"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tags(self) -> list[ap.RawObject]:
|
||||||
|
return ap.as_list(self.ap_actor.get("tag", []))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def followers_collection_id(self) -> str | None:
|
||||||
|
return self.ap_actor.get("followers")
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def attachments(self) -> list[ap.RawObject]:
|
||||||
|
return ap.as_list(self.ap_actor.get("attachment", []))
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def moved_to(self) -> str | None:
|
||||||
|
return self.ap_actor.get("movedTo")
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def server(self) -> str:
|
||||||
|
return urlparse(self.ap_id).hostname # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteActor(Actor):
|
||||||
|
def __init__(self, ap_actor: ap.RawObject, handle: str | None = None) -> None:
|
||||||
|
if (ap_type := ap_actor.get("type")) not in ap.ACTOR_TYPES:
|
||||||
|
raise ValueError(f"Unexpected actor type: {ap_type}")
|
||||||
|
|
||||||
|
self._ap_actor = ap_actor
|
||||||
|
self._ap_type = ap_type
|
||||||
|
|
||||||
|
if handle is None:
|
||||||
|
handle = _handle(ap_actor)
|
||||||
|
|
||||||
|
self._handle = handle
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ap_actor(self) -> ap.RawObject:
|
||||||
|
return self._ap_actor
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ap_type(self) -> str:
|
||||||
|
return self._ap_type
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_from_db(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def handle(self) -> str:
|
||||||
|
return self._handle
|
||||||
|
|
||||||
|
|
||||||
|
LOCAL_ACTOR = RemoteActor(ap_actor=ap.ME, handle=f"@{USERNAME}@{WEBFINGER_DOMAIN}")
|
||||||
|
|
||||||
|
|
||||||
|
async def save_actor(db_session: AsyncSession, ap_actor: ap.RawObject) -> "ActorModel":
|
||||||
|
from app import models
|
||||||
|
|
||||||
|
if ap_type := ap_actor.get("type") not in ap.ACTOR_TYPES:
|
||||||
|
raise ValueError(f"Invalid type {ap_type} for actor {ap_actor}")
|
||||||
|
|
||||||
|
actor = models.Actor(
|
||||||
|
ap_id=ap.get_id(ap_actor["id"]),
|
||||||
|
ap_actor=ap_actor,
|
||||||
|
ap_type=ap.as_list(ap_actor["type"])[0],
|
||||||
|
handle=_handle(ap_actor),
|
||||||
|
)
|
||||||
|
db_session.add(actor)
|
||||||
|
await db_session.flush()
|
||||||
|
await db_session.refresh(actor)
|
||||||
|
return actor
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_actor(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
actor_id: str,
|
||||||
|
save_if_not_found: bool = True,
|
||||||
|
) -> "ActorModel":
|
||||||
|
if actor_id == LOCAL_ACTOR.ap_id:
|
||||||
|
raise ValueError("local actor should not be fetched")
|
||||||
|
from app import models
|
||||||
|
|
||||||
|
existing_actor = (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.Actor).where(
|
||||||
|
models.Actor.ap_id == actor_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
if existing_actor:
|
||||||
|
if existing_actor.is_deleted:
|
||||||
|
raise ap.ObjectNotFoundError(f"{actor_id} was deleted")
|
||||||
|
|
||||||
|
if now() - as_utc(existing_actor.updated_at) > timedelta(hours=24):
|
||||||
|
logger.info(
|
||||||
|
f"Refreshing {actor_id=} last updated {existing_actor.updated_at}"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
ap_actor = await ap.fetch(actor_id)
|
||||||
|
await update_actor_if_needed(
|
||||||
|
db_session,
|
||||||
|
existing_actor,
|
||||||
|
RemoteActor(ap_actor),
|
||||||
|
)
|
||||||
|
return existing_actor
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Failed to refresh {actor_id}")
|
||||||
|
# If we fail to refresh the actor, return the cached one
|
||||||
|
return existing_actor
|
||||||
|
else:
|
||||||
|
return existing_actor
|
||||||
|
|
||||||
|
if save_if_not_found:
|
||||||
|
ap_actor = await ap.fetch(actor_id)
|
||||||
|
# Some softwares uses URL when we expect ID or uses a different casing
|
||||||
|
# (like Birdsite LIVE) , which mean we may already have it in DB
|
||||||
|
existing_actor_by_url = (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.Actor).where(
|
||||||
|
models.Actor.ap_id == ap.get_id(ap_actor),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
if existing_actor_by_url:
|
||||||
|
# Update the actor as we had to fetch it anyway
|
||||||
|
await update_actor_if_needed(
|
||||||
|
db_session,
|
||||||
|
existing_actor_by_url,
|
||||||
|
RemoteActor(ap_actor),
|
||||||
|
)
|
||||||
|
return existing_actor_by_url
|
||||||
|
|
||||||
|
return await save_actor(db_session, ap_actor)
|
||||||
|
else:
|
||||||
|
raise ap.ObjectNotFoundError(actor_id)
|
||||||
|
|
||||||
|
|
||||||
|
async def update_actor_if_needed(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
actor_in_db: "ActorModel",
|
||||||
|
ra: RemoteActor,
|
||||||
|
) -> None:
|
||||||
|
# Check if we actually need to udpte the actor in DB
|
||||||
|
if _actor_hash(ra) != _actor_hash(actor_in_db):
|
||||||
|
actor_in_db.ap_actor = ra.ap_actor
|
||||||
|
actor_in_db.handle = ra.handle
|
||||||
|
actor_in_db.ap_type = ra.ap_type
|
||||||
|
|
||||||
|
actor_in_db.updated_at = now()
|
||||||
|
await db_session.flush()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ActorMetadata:
|
||||||
|
ap_actor_id: str
|
||||||
|
is_following: bool
|
||||||
|
is_follower: bool
|
||||||
|
is_follow_request_sent: bool
|
||||||
|
is_follow_request_rejected: bool
|
||||||
|
outbox_follow_ap_id: str | None
|
||||||
|
inbox_follow_ap_id: str | None
|
||||||
|
moved_to: typing.Optional["ActorModel"]
|
||||||
|
has_blocked_local_actor: bool
|
||||||
|
|
||||||
|
|
||||||
|
ActorsMetadata = dict[str, ActorMetadata]
|
||||||
|
|
||||||
|
|
||||||
|
async def get_actors_metadata(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
actors: list[Union["ActorModel", "RemoteActor"]],
|
||||||
|
) -> ActorsMetadata:
|
||||||
|
from app import models
|
||||||
|
|
||||||
|
ap_actor_ids = [actor.ap_id for actor in actors]
|
||||||
|
followers = {
|
||||||
|
follower.ap_actor_id: follower.inbox_object.ap_id
|
||||||
|
for follower in (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.Follower)
|
||||||
|
.where(models.Follower.ap_actor_id.in_(ap_actor_ids))
|
||||||
|
.options(joinedload(models.Follower.inbox_object))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.unique()
|
||||||
|
.all()
|
||||||
|
}
|
||||||
|
following = {
|
||||||
|
following.ap_actor_id
|
||||||
|
for following in await db_session.execute(
|
||||||
|
select(models.Following.ap_actor_id).where(
|
||||||
|
models.Following.ap_actor_id.in_(ap_actor_ids)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
sent_follow_requests = {
|
||||||
|
follow_req.ap_object["object"]: follow_req.ap_id
|
||||||
|
for follow_req in await db_session.execute(
|
||||||
|
select(models.OutboxObject.ap_object, models.OutboxObject.ap_id).where(
|
||||||
|
models.OutboxObject.ap_type == "Follow",
|
||||||
|
models.OutboxObject.undone_by_outbox_object_id.is_(None),
|
||||||
|
models.OutboxObject.activity_object_ap_id.in_(ap_actor_ids),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
rejected_follow_requests = {
|
||||||
|
reject.activity_object_ap_id
|
||||||
|
for reject in await db_session.execute(
|
||||||
|
select(models.InboxObject.activity_object_ap_id).where(
|
||||||
|
models.InboxObject.ap_type == "Reject",
|
||||||
|
models.InboxObject.ap_actor_id.in_(ap_actor_ids),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
blocks = {
|
||||||
|
block.ap_actor_id
|
||||||
|
for block in await db_session.execute(
|
||||||
|
select(models.InboxObject.ap_actor_id).where(
|
||||||
|
models.InboxObject.ap_type == "Block",
|
||||||
|
models.InboxObject.undone_by_inbox_object_id.is_(None),
|
||||||
|
models.InboxObject.ap_actor_id.in_(ap_actor_ids),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
idx: ActorsMetadata = {}
|
||||||
|
for actor in actors:
|
||||||
|
if not actor.ap_id:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
moved_to = None
|
||||||
|
if actor.moved_to:
|
||||||
|
try:
|
||||||
|
moved_to = await fetch_actor(
|
||||||
|
db_session,
|
||||||
|
actor.moved_to,
|
||||||
|
save_if_not_found=False,
|
||||||
|
)
|
||||||
|
except ap.ObjectNotFoundError:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Failed to fetch {actor.moved_to=}")
|
||||||
|
|
||||||
|
idx[actor.ap_id] = ActorMetadata(
|
||||||
|
ap_actor_id=actor.ap_id,
|
||||||
|
is_following=actor.ap_id in following,
|
||||||
|
is_follower=actor.ap_id in followers,
|
||||||
|
is_follow_request_sent=actor.ap_id in sent_follow_requests,
|
||||||
|
is_follow_request_rejected=bool(
|
||||||
|
sent_follow_requests[actor.ap_id] in rejected_follow_requests
|
||||||
|
)
|
||||||
|
if actor.ap_id in sent_follow_requests
|
||||||
|
else False,
|
||||||
|
outbox_follow_ap_id=sent_follow_requests.get(actor.ap_id),
|
||||||
|
inbox_follow_ap_id=followers.get(actor.ap_id),
|
||||||
|
moved_to=moved_to,
|
||||||
|
has_blocked_local_actor=actor.ap_id in blocks,
|
||||||
|
)
|
||||||
|
return idx
|
||||||
|
|
||||||
|
|
||||||
|
def _actor_hash(actor: Actor) -> bytes:
|
||||||
|
"""Used to detect when an actor is updated"""
|
||||||
|
h = hashlib.blake2b(digest_size=32)
|
||||||
|
h.update(actor.ap_id.encode())
|
||||||
|
h.update(actor.handle.encode())
|
||||||
|
|
||||||
|
if actor.name:
|
||||||
|
h.update(actor.name.encode())
|
||||||
|
|
||||||
|
if actor.summary:
|
||||||
|
h.update(actor.summary.encode())
|
||||||
|
|
||||||
|
if actor.url:
|
||||||
|
h.update(actor.url.encode())
|
||||||
|
|
||||||
|
h.update(actor.display_name.encode())
|
||||||
|
|
||||||
|
if actor.icon_url:
|
||||||
|
h.update(actor.icon_url.encode())
|
||||||
|
|
||||||
|
if actor.image_url:
|
||||||
|
h.update(actor.image_url.encode())
|
||||||
|
|
||||||
|
if actor.attachments:
|
||||||
|
for a in actor.attachments:
|
||||||
|
if a.get("type") != "PropertyValue":
|
||||||
|
continue
|
||||||
|
|
||||||
|
h.update(a["name"].encode())
|
||||||
|
h.update(a["value"].encode())
|
||||||
|
|
||||||
|
h.update(actor.public_key_id.encode())
|
||||||
|
h.update(actor.public_key_as_pem.encode())
|
||||||
|
|
||||||
|
if actor.moved_to:
|
||||||
|
h.update(actor.moved_to.encode())
|
||||||
|
|
||||||
|
return h.digest()
|
1282
app/admin.py
Normal file
1282
app/admin.py
Normal file
File diff suppressed because it is too large
Load diff
349
app/ap_object.py
Normal file
349
app/ap_object.py
Normal file
|
@ -0,0 +1,349 @@
|
||||||
|
import hashlib
|
||||||
|
import mimetypes
|
||||||
|
from datetime import datetime
|
||||||
|
from functools import cached_property
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pydantic
|
||||||
|
from bs4 import BeautifulSoup # type: ignore
|
||||||
|
from mistletoe import markdown # type: ignore
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app.actor import LOCAL_ACTOR
|
||||||
|
from app.actor import Actor
|
||||||
|
from app.actor import RemoteActor
|
||||||
|
from app.config import ID
|
||||||
|
from app.media import proxied_media_url
|
||||||
|
from app.utils.datetime import now
|
||||||
|
from app.utils.datetime import parse_isoformat
|
||||||
|
|
||||||
|
|
||||||
|
class Object:
|
||||||
|
@property
|
||||||
|
def is_from_db(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_from_outbox(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_from_inbox(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def ap_type(self) -> str:
|
||||||
|
return ap.as_list(self.ap_object["type"])[0]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ap_object(self) -> ap.RawObject:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ap_id(self) -> str:
|
||||||
|
return ap.get_id(self.ap_object["id"])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ap_actor_id(self) -> str:
|
||||||
|
return ap.get_actor_id(self.ap_object)
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def ap_published_at(self) -> datetime | None:
|
||||||
|
# TODO: default to None? or now()?
|
||||||
|
if "published" in self.ap_object:
|
||||||
|
return parse_isoformat(self.ap_object["published"])
|
||||||
|
elif "created" in self.ap_object:
|
||||||
|
return parse_isoformat(self.ap_object["created"])
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def actor(self) -> Actor:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def visibility(self) -> ap.VisibilityEnum:
|
||||||
|
return ap.object_visibility(self.ap_object, self.actor)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ap_context(self) -> str | None:
|
||||||
|
return self.ap_object.get("context") or self.ap_object.get("conversation")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sensitive(self) -> bool:
|
||||||
|
return self.ap_object.get("sensitive", False)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tags(self) -> list[ap.RawObject]:
|
||||||
|
return ap.as_list(self.ap_object.get("tag", []))
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def inlined_images(self) -> set[str]:
|
||||||
|
image_urls: set[str] = set()
|
||||||
|
if not self.content:
|
||||||
|
return image_urls
|
||||||
|
|
||||||
|
soup = BeautifulSoup(self.content, "html5lib")
|
||||||
|
imgs = soup.find_all("img")
|
||||||
|
|
||||||
|
for img in imgs:
|
||||||
|
if not img.attrs.get("src"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
image_urls.add(img.attrs["src"])
|
||||||
|
|
||||||
|
return image_urls
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def attachments(self) -> list["Attachment"]:
|
||||||
|
attachments = []
|
||||||
|
for obj in ap.as_list(self.ap_object.get("attachment", [])):
|
||||||
|
if obj.get("type") == "PropertyValue":
|
||||||
|
continue
|
||||||
|
|
||||||
|
if obj.get("type") == "Link":
|
||||||
|
attachments.append(
|
||||||
|
Attachment.parse_obj(
|
||||||
|
{
|
||||||
|
"proxiedUrl": None,
|
||||||
|
"resizedUrl": None,
|
||||||
|
"mediaType": None,
|
||||||
|
"type": "Link",
|
||||||
|
"url": obj["href"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
proxied_url = proxied_media_url(obj["url"])
|
||||||
|
attachments.append(
|
||||||
|
Attachment.parse_obj(
|
||||||
|
{
|
||||||
|
"proxiedUrl": proxied_url,
|
||||||
|
"resizedUrl": proxied_url + "/740"
|
||||||
|
if obj.get("mediaType", "").startswith("image")
|
||||||
|
else None,
|
||||||
|
**obj,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Also add any video Link (for PeerTube compat)
|
||||||
|
if self.ap_type == "Video":
|
||||||
|
for link in ap.as_list(self.ap_object.get("url", [])):
|
||||||
|
if (isinstance(link, dict)) and link.get("type") == "Link":
|
||||||
|
if link.get("mediaType", "").startswith("video"):
|
||||||
|
proxied_url = proxied_media_url(link["href"])
|
||||||
|
attachments.append(
|
||||||
|
Attachment(
|
||||||
|
type="Video",
|
||||||
|
mediaType=link["mediaType"],
|
||||||
|
url=link["href"],
|
||||||
|
proxiedUrl=proxied_url,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
elif link.get("mediaType", "") == "application/x-mpegURL":
|
||||||
|
for tag in ap.as_list(link.get("tag", [])):
|
||||||
|
if tag.get("mediaType", "").startswith("video"):
|
||||||
|
proxied_url = proxied_media_url(tag["href"])
|
||||||
|
attachments.append(
|
||||||
|
Attachment(
|
||||||
|
type="Video",
|
||||||
|
mediaType=tag["mediaType"],
|
||||||
|
url=tag["href"],
|
||||||
|
proxiedUrl=proxied_url,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
return attachments
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def url(self) -> str | None:
|
||||||
|
obj_url = self.ap_object.get("url")
|
||||||
|
if isinstance(obj_url, str) and obj_url:
|
||||||
|
return obj_url
|
||||||
|
elif obj_url:
|
||||||
|
for u in ap.as_list(obj_url):
|
||||||
|
if u.get("type") == "Link":
|
||||||
|
return u["href"]
|
||||||
|
|
||||||
|
if u["mediaType"] == "text/html":
|
||||||
|
return u["href"]
|
||||||
|
|
||||||
|
return self.ap_id
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def content(self) -> str | None:
|
||||||
|
content = self.ap_object.get("content")
|
||||||
|
if not content:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# PeerTube returns the content as markdown
|
||||||
|
if self.ap_object.get("mediaType") == "text/markdown":
|
||||||
|
content = markdown(content)
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
@property
|
||||||
|
def summary(self) -> str | None:
|
||||||
|
return self.ap_object.get("summary")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str | None:
|
||||||
|
return self.ap_object.get("name")
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def permalink_id(self) -> str:
|
||||||
|
return (
|
||||||
|
"permalink-"
|
||||||
|
+ hashlib.md5(
|
||||||
|
self.ap_id.encode(),
|
||||||
|
usedforsecurity=False,
|
||||||
|
).hexdigest()
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def activity_object_ap_id(self) -> str | None:
|
||||||
|
if "object" in self.ap_object:
|
||||||
|
return ap.get_id(self.ap_object["object"])
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def in_reply_to(self) -> str | None:
|
||||||
|
return self.ap_object.get("inReplyTo")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_local_reply(self) -> bool:
|
||||||
|
if not self.in_reply_to:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return bool(
|
||||||
|
self.in_reply_to.startswith(ID) and self.content # Hide votes from Question
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_in_reply_to_from_inbox(self) -> bool | None:
|
||||||
|
if not self.in_reply_to:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return not self.in_reply_to.startswith(LOCAL_ACTOR.ap_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_ld_signature(self) -> bool:
|
||||||
|
return bool(self.ap_object.get("signature"))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_poll_ended(self) -> bool:
|
||||||
|
if self.poll_end_time:
|
||||||
|
return now() > self.poll_end_time
|
||||||
|
return False
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def poll_items(self) -> list[ap.RawObject] | None:
|
||||||
|
return self.ap_object.get("oneOf") or self.ap_object.get("anyOf")
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def poll_end_time(self) -> datetime | None:
|
||||||
|
# Some polls may not have an end time
|
||||||
|
if self.ap_object.get("endTime"):
|
||||||
|
return parse_isoformat(self.ap_object["endTime"])
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def poll_voters_count(self) -> int | None:
|
||||||
|
if not self.poll_items:
|
||||||
|
return None
|
||||||
|
# Only Mastodon set this attribute
|
||||||
|
if self.ap_object.get("votersCount"):
|
||||||
|
return self.ap_object["votersCount"]
|
||||||
|
else:
|
||||||
|
voters_count = 0
|
||||||
|
for item in self.poll_items:
|
||||||
|
voters_count += item.get("replies", {}).get("totalItems", 0)
|
||||||
|
|
||||||
|
return voters_count
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def is_one_of_poll(self) -> bool:
|
||||||
|
return bool(self.ap_object.get("oneOf"))
|
||||||
|
|
||||||
|
|
||||||
|
def _to_camel(string: str) -> str:
|
||||||
|
cased = "".join(word.capitalize() for word in string.split("_"))
|
||||||
|
return cased[0:1].lower() + cased[1:]
|
||||||
|
|
||||||
|
|
||||||
|
class BaseModel(pydantic.BaseModel):
|
||||||
|
class Config:
|
||||||
|
alias_generator = _to_camel
|
||||||
|
|
||||||
|
|
||||||
|
class Attachment(BaseModel):
|
||||||
|
type: str
|
||||||
|
media_type: str | None
|
||||||
|
name: str | None
|
||||||
|
url: str
|
||||||
|
|
||||||
|
# Extra fields for the templates (and only for media)
|
||||||
|
proxied_url: str | None = None
|
||||||
|
resized_url: str | None = None
|
||||||
|
|
||||||
|
width: int | None = None
|
||||||
|
height: int | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mimetype(self) -> str:
|
||||||
|
mimetype = self.media_type
|
||||||
|
if not mimetype:
|
||||||
|
mimetype, _ = mimetypes.guess_type(self.url)
|
||||||
|
|
||||||
|
if not mimetype:
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
return mimetype.split("/")[-1]
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteObject(Object):
|
||||||
|
def __init__(self, raw_object: ap.RawObject, actor: Actor):
|
||||||
|
self._raw_object = raw_object
|
||||||
|
self._actor = actor
|
||||||
|
|
||||||
|
if self._actor.ap_id != ap.get_actor_id(self._raw_object):
|
||||||
|
raise ValueError(f"Invalid actor {self._actor.ap_id}")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_raw_object(
|
||||||
|
cls,
|
||||||
|
raw_object: ap.RawObject,
|
||||||
|
actor: Actor | None = None,
|
||||||
|
):
|
||||||
|
# Pre-fetch the actor
|
||||||
|
actor_id = ap.get_actor_id(raw_object)
|
||||||
|
if actor_id == LOCAL_ACTOR.ap_id:
|
||||||
|
_actor = LOCAL_ACTOR
|
||||||
|
elif actor:
|
||||||
|
if actor.ap_id != actor_id:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid actor, got {actor.ap_id}, " f"expected {actor_id}"
|
||||||
|
)
|
||||||
|
_actor = actor # type: ignore
|
||||||
|
else:
|
||||||
|
_actor = RemoteActor(
|
||||||
|
ap_actor=await ap.fetch(ap.get_actor_id(raw_object)),
|
||||||
|
)
|
||||||
|
|
||||||
|
return cls(raw_object, _actor)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def og_meta(self) -> list[dict[str, Any]] | None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ap_object(self) -> ap.RawObject:
|
||||||
|
return self._raw_object
|
||||||
|
|
||||||
|
@property
|
||||||
|
def actor(self) -> Actor:
|
||||||
|
return self._actor
|
2761
app/boxes.py
Normal file
2761
app/boxes.py
Normal file
File diff suppressed because it is too large
Load diff
289
app/config.py
Normal file
289
app/config.py
Normal file
|
@ -0,0 +1,289 @@
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import os
|
||||||
|
import secrets
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import bcrypt
|
||||||
|
import itsdangerous
|
||||||
|
import pydantic
|
||||||
|
import tomli
|
||||||
|
from fastapi import Form
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from fastapi import Request
|
||||||
|
from itsdangerous import URLSafeTimedSerializer
|
||||||
|
from loguru import logger
|
||||||
|
from mistletoe import markdown # type: ignore
|
||||||
|
|
||||||
|
from app.customization import _CUSTOM_ROUTES
|
||||||
|
from app.customization import _StreamVisibilityCallback
|
||||||
|
from app.customization import default_stream_visibility_callback
|
||||||
|
from app.utils.emoji import _load_emojis
|
||||||
|
from app.utils.version import get_version_commit
|
||||||
|
|
||||||
|
ROOT_DIR = Path().parent.resolve()
|
||||||
|
|
||||||
|
_CONFIG_FILE = os.getenv("MICROBLOGPUB_CONFIG_FILE", "profile.toml")
|
||||||
|
|
||||||
|
VERSION_COMMIT = "dev"
|
||||||
|
|
||||||
|
try:
|
||||||
|
from app._version import VERSION_COMMIT # type: ignore
|
||||||
|
except ImportError:
|
||||||
|
VERSION_COMMIT = get_version_commit()
|
||||||
|
|
||||||
|
# Force reloading cache when the CSS is updated
|
||||||
|
CSS_HASH = "none"
|
||||||
|
try:
|
||||||
|
css_data = (ROOT_DIR / "app" / "static" / "css" / "main.css").read_bytes()
|
||||||
|
CSS_HASH = hashlib.md5(css_data, usedforsecurity=False).hexdigest()
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Force reloading cache when the JS is changed
|
||||||
|
JS_HASH = "none"
|
||||||
|
try:
|
||||||
|
# To keep things simple, we keep a single hash for the 2 files
|
||||||
|
dat = b""
|
||||||
|
for j in [
|
||||||
|
ROOT_DIR / "app" / "static" / "common.js",
|
||||||
|
ROOT_DIR / "app" / "static" / "common-admin.js",
|
||||||
|
ROOT_DIR / "app" / "static" / "new.js",
|
||||||
|
]:
|
||||||
|
dat += j.read_bytes()
|
||||||
|
JS_HASH = hashlib.md5(dat, usedforsecurity=False).hexdigest()
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
MOVED_TO_FILE = ROOT_DIR / "data" / "moved_to.dat"
|
||||||
|
|
||||||
|
|
||||||
|
def _get_moved_to() -> str | None:
|
||||||
|
if not MOVED_TO_FILE.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
return MOVED_TO_FILE.read_text()
|
||||||
|
|
||||||
|
|
||||||
|
def set_moved_to(moved_to: str) -> None:
|
||||||
|
MOVED_TO_FILE.write_text(moved_to)
|
||||||
|
|
||||||
|
|
||||||
|
VERSION = f"2.0.0+{VERSION_COMMIT}"
|
||||||
|
USER_AGENT = f"microblogpub/{VERSION}"
|
||||||
|
AP_CONTENT_TYPE = "application/activity+json"
|
||||||
|
|
||||||
|
|
||||||
|
class _PrivacyReplace(pydantic.BaseModel):
|
||||||
|
domain: str
|
||||||
|
replace_by: str
|
||||||
|
|
||||||
|
|
||||||
|
class _ProfileMetadata(pydantic.BaseModel):
|
||||||
|
key: str
|
||||||
|
value: str
|
||||||
|
|
||||||
|
|
||||||
|
class _BlockedServer(pydantic.BaseModel):
|
||||||
|
hostname: str
|
||||||
|
reason: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class Config(pydantic.BaseModel):
|
||||||
|
domain: str
|
||||||
|
username: str
|
||||||
|
admin_password: bytes
|
||||||
|
name: str
|
||||||
|
summary: str
|
||||||
|
https: bool
|
||||||
|
icon_url: str | None = None
|
||||||
|
image_url: str | None = None
|
||||||
|
secret: str
|
||||||
|
debug: bool = False
|
||||||
|
trusted_hosts: list[str] = ["127.0.0.1"]
|
||||||
|
manually_approves_followers: bool = False
|
||||||
|
privacy_replace: list[_PrivacyReplace] | None = None
|
||||||
|
metadata: list[_ProfileMetadata] | None = None
|
||||||
|
code_highlighting_theme = "friendly_grayscale"
|
||||||
|
blocked_servers: list[_BlockedServer] = []
|
||||||
|
custom_footer: str | None = None
|
||||||
|
emoji: str | None = None
|
||||||
|
also_known_as: str | None = None
|
||||||
|
|
||||||
|
hides_followers: bool = False
|
||||||
|
hides_following: bool = False
|
||||||
|
|
||||||
|
inbox_retention_days: int = 15
|
||||||
|
|
||||||
|
custom_content_security_policy: str | None = None
|
||||||
|
|
||||||
|
webfinger_domain: str | None = None
|
||||||
|
|
||||||
|
# Config items to make tests easier
|
||||||
|
sqlalchemy_database: str | None = None
|
||||||
|
key_path: str | None = None
|
||||||
|
|
||||||
|
session_timeout: int = 3600 * 24 * 3 # in seconds, 3 days by default
|
||||||
|
csrf_token_exp: int = 3600
|
||||||
|
|
||||||
|
disabled_notifications: list[str] = []
|
||||||
|
|
||||||
|
# Only set when the app is served on a non-root path
|
||||||
|
id: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def load_config() -> Config:
|
||||||
|
try:
|
||||||
|
return Config.parse_obj(
|
||||||
|
tomli.loads((ROOT_DIR / "data" / _CONFIG_FILE).read_text())
|
||||||
|
)
|
||||||
|
except FileNotFoundError:
|
||||||
|
raise ValueError(
|
||||||
|
f"Please run the configuration wizard, {_CONFIG_FILE} is missing"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def is_activitypub_requested(req: Request) -> bool:
|
||||||
|
accept_value = req.headers.get("accept")
|
||||||
|
if not accept_value:
|
||||||
|
return False
|
||||||
|
for val in {
|
||||||
|
"application/ld+json",
|
||||||
|
"application/activity+json",
|
||||||
|
}:
|
||||||
|
if accept_value.startswith(val):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def verify_password(pwd: str) -> bool:
|
||||||
|
return bcrypt.checkpw(pwd.encode(), CONFIG.admin_password)
|
||||||
|
|
||||||
|
|
||||||
|
CONFIG = load_config()
|
||||||
|
DOMAIN = CONFIG.domain
|
||||||
|
_SCHEME = "https" if CONFIG.https else "http"
|
||||||
|
ID = f"{_SCHEME}://{DOMAIN}"
|
||||||
|
|
||||||
|
# When running the app on a path, the ID maybe set by the config, but in this
|
||||||
|
# case, a valid webfinger must be served on the root domain
|
||||||
|
if CONFIG.id:
|
||||||
|
ID = CONFIG.id
|
||||||
|
USERNAME = CONFIG.username
|
||||||
|
|
||||||
|
# Allow to use @handle@webfinger-domain.tld while hosting the server at domain.tld
|
||||||
|
WEBFINGER_DOMAIN = CONFIG.webfinger_domain or DOMAIN
|
||||||
|
|
||||||
|
MANUALLY_APPROVES_FOLLOWERS = CONFIG.manually_approves_followers
|
||||||
|
HIDES_FOLLOWERS = CONFIG.hides_followers
|
||||||
|
HIDES_FOLLOWING = CONFIG.hides_following
|
||||||
|
PRIVACY_REPLACE = None
|
||||||
|
if CONFIG.privacy_replace:
|
||||||
|
PRIVACY_REPLACE = {pr.domain: pr.replace_by for pr in CONFIG.privacy_replace}
|
||||||
|
|
||||||
|
BLOCKED_SERVERS = {blocked_server.hostname for blocked_server in CONFIG.blocked_servers}
|
||||||
|
ALSO_KNOWN_AS = CONFIG.also_known_as
|
||||||
|
CUSTOM_CONTENT_SECURITY_POLICY = CONFIG.custom_content_security_policy
|
||||||
|
|
||||||
|
INBOX_RETENTION_DAYS = CONFIG.inbox_retention_days
|
||||||
|
SESSION_TIMEOUT = CONFIG.session_timeout
|
||||||
|
CUSTOM_FOOTER = (
|
||||||
|
markdown(CONFIG.custom_footer.replace("{version}", VERSION))
|
||||||
|
if CONFIG.custom_footer
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
BASE_URL = ID
|
||||||
|
DEBUG = CONFIG.debug
|
||||||
|
DB_PATH = CONFIG.sqlalchemy_database or ROOT_DIR / "data" / "microblogpub.db"
|
||||||
|
SQLALCHEMY_DATABASE_URL = f"sqlite:///{DB_PATH}"
|
||||||
|
KEY_PATH = (
|
||||||
|
(ROOT_DIR / CONFIG.key_path) if CONFIG.key_path else ROOT_DIR / "data" / "key.pem"
|
||||||
|
)
|
||||||
|
EMOJIS = "😺 😸 😹 😻 😼 😽 🙀 😿 😾"
|
||||||
|
if CONFIG.emoji:
|
||||||
|
EMOJIS = CONFIG.emoji
|
||||||
|
|
||||||
|
# Emoji template for the FE
|
||||||
|
EMOJI_TPL = (
|
||||||
|
'<img src="{base_url}/static/twemoji/{filename}.svg" alt="{raw}" class="emoji">'
|
||||||
|
)
|
||||||
|
|
||||||
|
_load_emojis(ROOT_DIR, BASE_URL)
|
||||||
|
|
||||||
|
CODE_HIGHLIGHTING_THEME = CONFIG.code_highlighting_theme
|
||||||
|
|
||||||
|
MOVED_TO = _get_moved_to()
|
||||||
|
|
||||||
|
|
||||||
|
_NavBarItem = tuple[str, str]
|
||||||
|
|
||||||
|
|
||||||
|
class NavBarItems:
|
||||||
|
EXTRA_NAVBAR_ITEMS: list[_NavBarItem] = []
|
||||||
|
INDEX_NAVBAR_ITEM: _NavBarItem | None = None
|
||||||
|
NOTES_PATH = "/"
|
||||||
|
|
||||||
|
|
||||||
|
def load_custom_routes() -> None:
|
||||||
|
try:
|
||||||
|
from data import custom_routes # type: ignore # noqa: F401
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for path, custom_handler in _CUSTOM_ROUTES.items():
|
||||||
|
# If a handler wants to replace the root, move the index to /notes
|
||||||
|
if path == "/":
|
||||||
|
NavBarItems.NOTES_PATH = "/notes"
|
||||||
|
NavBarItems.INDEX_NAVBAR_ITEM = (path, custom_handler.title)
|
||||||
|
else:
|
||||||
|
if custom_handler.show_in_navbar:
|
||||||
|
NavBarItems.EXTRA_NAVBAR_ITEMS.append((path, custom_handler.title))
|
||||||
|
|
||||||
|
|
||||||
|
session_serializer = URLSafeTimedSerializer(
|
||||||
|
CONFIG.secret,
|
||||||
|
salt=f"{ID}.session",
|
||||||
|
)
|
||||||
|
csrf_serializer = URLSafeTimedSerializer(
|
||||||
|
CONFIG.secret,
|
||||||
|
salt=f"{ID}.csrf",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_csrf_token() -> str:
|
||||||
|
return csrf_serializer.dumps(secrets.token_hex(16)) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def verify_csrf_token(
|
||||||
|
csrf_token: str = Form(),
|
||||||
|
redirect_url: str | None = Form(None),
|
||||||
|
) -> None:
|
||||||
|
please_try_again = "please try again"
|
||||||
|
if redirect_url:
|
||||||
|
please_try_again = f'<a href="{redirect_url}">please try again</a>'
|
||||||
|
try:
|
||||||
|
csrf_serializer.loads(csrf_token, max_age=CONFIG.csrf_token_exp)
|
||||||
|
except (itsdangerous.BadData, itsdangerous.SignatureExpired):
|
||||||
|
logger.exception("Failed to verify CSRF token")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=403,
|
||||||
|
detail=f"The security token has expired, {please_try_again}",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def hmac_sha256() -> hmac.HMAC:
|
||||||
|
return hmac.new(CONFIG.secret.encode(), digestmod=hashlib.sha256)
|
||||||
|
|
||||||
|
|
||||||
|
stream_visibility_callback: _StreamVisibilityCallback
|
||||||
|
try:
|
||||||
|
from data.stream import ( # type: ignore # noqa: F401, E501
|
||||||
|
custom_stream_visibility_callback,
|
||||||
|
)
|
||||||
|
|
||||||
|
stream_visibility_callback = custom_stream_visibility_callback
|
||||||
|
except ImportError:
|
||||||
|
stream_visibility_callback = default_stream_visibility_callback
|
155
app/customization.py
Normal file
155
app/customization.py
Normal file
|
@ -0,0 +1,155 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
from typing import Any
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
from fastapi import APIRouter
|
||||||
|
from fastapi import Depends
|
||||||
|
from fastapi import Request
|
||||||
|
from loguru import logger
|
||||||
|
from starlette.responses import JSONResponse
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from app.ap_object import RemoteObject
|
||||||
|
|
||||||
|
|
||||||
|
_DATA_DIR = Path().parent.resolve() / "data"
|
||||||
|
_Handler = Callable[..., Any]
|
||||||
|
|
||||||
|
|
||||||
|
class HTMLPage:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
title: str,
|
||||||
|
html_file: str,
|
||||||
|
show_in_navbar: bool,
|
||||||
|
) -> None:
|
||||||
|
self.title = title
|
||||||
|
self.html_file = _DATA_DIR / html_file
|
||||||
|
self.show_in_navbar = show_in_navbar
|
||||||
|
|
||||||
|
|
||||||
|
class RawHandler:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
title: str,
|
||||||
|
handler: Any,
|
||||||
|
show_in_navbar: bool,
|
||||||
|
) -> None:
|
||||||
|
self.title = title
|
||||||
|
self.handler = handler
|
||||||
|
self.show_in_navbar = show_in_navbar
|
||||||
|
|
||||||
|
|
||||||
|
_CUSTOM_ROUTES: dict[str, HTMLPage | RawHandler] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def register_html_page(
|
||||||
|
path: str,
|
||||||
|
*,
|
||||||
|
title: str,
|
||||||
|
html_file: str,
|
||||||
|
show_in_navbar: bool = True,
|
||||||
|
) -> None:
|
||||||
|
if path in _CUSTOM_ROUTES:
|
||||||
|
raise ValueError(f"{path} is already registered")
|
||||||
|
|
||||||
|
_CUSTOM_ROUTES[path] = HTMLPage(title, html_file, show_in_navbar)
|
||||||
|
|
||||||
|
|
||||||
|
def register_raw_handler(
|
||||||
|
path: str,
|
||||||
|
*,
|
||||||
|
title: str,
|
||||||
|
handler: _Handler,
|
||||||
|
show_in_navbar: bool = True,
|
||||||
|
) -> None:
|
||||||
|
if path in _CUSTOM_ROUTES:
|
||||||
|
raise ValueError(f"{path} is already registered")
|
||||||
|
|
||||||
|
_CUSTOM_ROUTES[path] = RawHandler(title, handler, show_in_navbar)
|
||||||
|
|
||||||
|
|
||||||
|
class ActivityPubResponse(JSONResponse):
|
||||||
|
media_type = "application/activity+json"
|
||||||
|
|
||||||
|
|
||||||
|
def _custom_page_handler(path: str, html_page: HTMLPage) -> Any:
|
||||||
|
from app import templates
|
||||||
|
from app.actor import LOCAL_ACTOR
|
||||||
|
from app.config import is_activitypub_requested
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.database import get_db_session
|
||||||
|
|
||||||
|
async def _handler(
|
||||||
|
request: Request,
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> templates.TemplateResponse | ActivityPubResponse:
|
||||||
|
if path == "/" and is_activitypub_requested(request):
|
||||||
|
return ActivityPubResponse(LOCAL_ACTOR.ap_actor)
|
||||||
|
|
||||||
|
return await templates.render_template(
|
||||||
|
db_session,
|
||||||
|
request,
|
||||||
|
"custom_page.html",
|
||||||
|
{
|
||||||
|
"page_content": html_page.html_file.read_text(),
|
||||||
|
"title": html_page.title,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return _handler
|
||||||
|
|
||||||
|
|
||||||
|
def get_custom_router() -> APIRouter | None:
|
||||||
|
if not _CUSTOM_ROUTES:
|
||||||
|
return None
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
for path, handler in _CUSTOM_ROUTES.items():
|
||||||
|
if isinstance(handler, HTMLPage):
|
||||||
|
router.add_api_route(
|
||||||
|
path, _custom_page_handler(path, handler), methods=["GET"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
router.add_api_route(path, handler.handler)
|
||||||
|
|
||||||
|
return router
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ObjectInfo:
|
||||||
|
# Is it a reply?
|
||||||
|
is_reply: bool
|
||||||
|
|
||||||
|
# Is it a reply to an outbox object
|
||||||
|
is_local_reply: bool
|
||||||
|
|
||||||
|
# Is the object mentioning the local actor
|
||||||
|
is_mention: bool
|
||||||
|
|
||||||
|
# Is it from someone the local actor is following
|
||||||
|
is_from_following: bool
|
||||||
|
|
||||||
|
# List of hashtags, e.g. #microblogpub
|
||||||
|
hashtags: list[str]
|
||||||
|
|
||||||
|
# @dev@microblog.pub
|
||||||
|
actor_handle: str
|
||||||
|
|
||||||
|
remote_object: "RemoteObject"
|
||||||
|
|
||||||
|
|
||||||
|
_StreamVisibilityCallback = Callable[[ObjectInfo], bool]
|
||||||
|
|
||||||
|
|
||||||
|
def default_stream_visibility_callback(object_info: ObjectInfo) -> bool:
|
||||||
|
result = (
|
||||||
|
(not object_info.is_reply and object_info.is_from_following)
|
||||||
|
or object_info.is_mention
|
||||||
|
or object_info.is_local_reply
|
||||||
|
)
|
||||||
|
logger.info(f"{object_info=}/{result=}")
|
||||||
|
return result
|
35
app/database.py
Normal file
35
app/database.py
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
from typing import Any
|
||||||
|
from typing import AsyncGenerator
|
||||||
|
|
||||||
|
from sqlalchemy import MetaData
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.ext.asyncio import create_async_engine
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
from app.config import DB_PATH
|
||||||
|
from app.config import DEBUG
|
||||||
|
from app.config import SQLALCHEMY_DATABASE_URL
|
||||||
|
|
||||||
|
engine = create_engine(
|
||||||
|
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False, "timeout": 15}
|
||||||
|
)
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|
||||||
|
DATABASE_URL = f"sqlite+aiosqlite:///{DB_PATH}"
|
||||||
|
async_engine = create_async_engine(
|
||||||
|
DATABASE_URL, future=True, echo=DEBUG, connect_args={"timeout": 15}
|
||||||
|
)
|
||||||
|
async_session = sessionmaker(async_engine, class_=AsyncSession, expire_on_commit=False)
|
||||||
|
|
||||||
|
Base: Any = declarative_base()
|
||||||
|
metadata_obj = MetaData()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_db_session() -> AsyncGenerator[AsyncSession, None]:
|
||||||
|
async with async_session() as session:
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
finally:
|
||||||
|
await session.close()
|
353
app/httpsig.py
Normal file
353
app/httpsig.py
Normal file
|
@ -0,0 +1,353 @@
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import typing
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
from datetime import timedelta
|
||||||
|
from datetime import timezone
|
||||||
|
from typing import Any
|
||||||
|
from typing import Dict
|
||||||
|
from typing import MutableMapping
|
||||||
|
from typing import Optional
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import fastapi
|
||||||
|
import httpx
|
||||||
|
from cachetools import LFUCache
|
||||||
|
from Crypto.Hash import SHA256
|
||||||
|
from Crypto.Signature import PKCS1_v1_5
|
||||||
|
from dateutil.parser import parse
|
||||||
|
from loguru import logger
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app import config
|
||||||
|
from app.config import KEY_PATH
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.database import get_db_session
|
||||||
|
from app.key import Key
|
||||||
|
from app.utils.datetime import now
|
||||||
|
from app.utils.url import is_hostname_blocked
|
||||||
|
|
||||||
|
_KEY_CACHE: MutableMapping[str, Key] = LFUCache(256)
|
||||||
|
|
||||||
|
|
||||||
|
def _build_signed_string(
|
||||||
|
signed_headers: str,
|
||||||
|
method: str,
|
||||||
|
path: str,
|
||||||
|
headers: Any,
|
||||||
|
body_digest: str | None,
|
||||||
|
sig_data: dict[str, Any],
|
||||||
|
) -> tuple[str, datetime | None]:
|
||||||
|
signature_date: datetime | None = None
|
||||||
|
out = []
|
||||||
|
for signed_header in signed_headers.split(" "):
|
||||||
|
if signed_header == "(created)":
|
||||||
|
signature_date = datetime.fromtimestamp(int(sig_data["created"])).replace(
|
||||||
|
tzinfo=timezone.utc
|
||||||
|
)
|
||||||
|
elif signed_header == "date":
|
||||||
|
signature_date = parse(headers["date"])
|
||||||
|
|
||||||
|
if signed_header == "(request-target)":
|
||||||
|
out.append("(request-target): " + method.lower() + " " + path)
|
||||||
|
elif signed_header == "digest" and body_digest:
|
||||||
|
out.append("digest: " + body_digest)
|
||||||
|
elif signed_header in ["(created)", "(expires)"]:
|
||||||
|
out.append(
|
||||||
|
signed_header
|
||||||
|
+ ": "
|
||||||
|
+ sig_data[signed_header[1 : len(signed_header) - 1]]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
out.append(signed_header + ": " + headers[signed_header])
|
||||||
|
return "\n".join(out), signature_date
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_sig_header(val: Optional[str]) -> Optional[Dict[str, str]]:
|
||||||
|
if not val:
|
||||||
|
return None
|
||||||
|
out = {}
|
||||||
|
for data in val.split(","):
|
||||||
|
k, v = data.split("=", 1)
|
||||||
|
out[k] = v[1 : len(v) - 1] # noqa: black conflict
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_h(signed_string, signature, pubkey):
|
||||||
|
signer = PKCS1_v1_5.new(pubkey)
|
||||||
|
digest = SHA256.new()
|
||||||
|
digest.update(signed_string.encode("utf-8"))
|
||||||
|
return signer.verify(digest, signature)
|
||||||
|
|
||||||
|
|
||||||
|
def _body_digest(body: bytes) -> str:
|
||||||
|
h = hashlib.new("sha256")
|
||||||
|
h.update(body) # type: ignore
|
||||||
|
return "SHA-256=" + base64.b64encode(h.digest()).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_public_key(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
key_id: str,
|
||||||
|
should_skip_cache: bool = False,
|
||||||
|
) -> Key:
|
||||||
|
if not should_skip_cache and (cached_key := _KEY_CACHE.get(key_id)):
|
||||||
|
logger.info(f"Key {key_id} found in cache")
|
||||||
|
return cached_key
|
||||||
|
|
||||||
|
# Check if the key belongs to an actor already in DB
|
||||||
|
from app import models
|
||||||
|
|
||||||
|
existing_actor = (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.Actor).where(models.Actor.ap_id == key_id.split("#")[0])
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
if not should_skip_cache:
|
||||||
|
if existing_actor and existing_actor.public_key_id == key_id:
|
||||||
|
k = Key(existing_actor.ap_id, key_id)
|
||||||
|
k.load_pub(existing_actor.public_key_as_pem)
|
||||||
|
logger.info(f"Found {key_id} on an existing actor")
|
||||||
|
_KEY_CACHE[key_id] = k
|
||||||
|
return k
|
||||||
|
|
||||||
|
# Fetch it
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app.actor import RemoteActor
|
||||||
|
from app.actor import update_actor_if_needed
|
||||||
|
|
||||||
|
# Without signing the request as if it's the first contact, the 2 servers
|
||||||
|
# might race to fetch each other key
|
||||||
|
try:
|
||||||
|
actor = await ap.fetch(key_id, disable_httpsig=True)
|
||||||
|
except ap.ObjectUnavailableError:
|
||||||
|
actor = await ap.fetch(key_id, disable_httpsig=False)
|
||||||
|
|
||||||
|
if actor["type"] == "Key":
|
||||||
|
# The Key is not embedded in the Person
|
||||||
|
k = Key(actor["owner"], actor["id"])
|
||||||
|
k.load_pub(actor["publicKeyPem"])
|
||||||
|
else:
|
||||||
|
k = Key(actor["id"], actor["publicKey"]["id"])
|
||||||
|
k.load_pub(actor["publicKey"]["publicKeyPem"])
|
||||||
|
|
||||||
|
# Ensure the right key was fetch
|
||||||
|
# TODO: some server have the key ID `http://` but fetching it return `https`
|
||||||
|
if key_id not in [k.key_id(), k.owner]:
|
||||||
|
raise ValueError(
|
||||||
|
f"failed to fetch requested key {key_id}: got {actor['publicKey']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if should_skip_cache and actor["type"] != "Key" and existing_actor:
|
||||||
|
# We had to skip the cache, which means the actor key probably changed
|
||||||
|
# and we want to update our cached version
|
||||||
|
await update_actor_if_needed(db_session, existing_actor, RemoteActor(actor))
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
_KEY_CACHE[key_id] = k
|
||||||
|
return k
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class HTTPSigInfo:
|
||||||
|
has_valid_signature: bool
|
||||||
|
signed_by_ap_actor_id: str | None = None
|
||||||
|
|
||||||
|
is_ap_actor_gone: bool = False
|
||||||
|
is_unsupported_algorithm: bool = False
|
||||||
|
is_expired: bool = False
|
||||||
|
is_from_blocked_server: bool = False
|
||||||
|
|
||||||
|
server: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
async def httpsig_checker(
|
||||||
|
request: fastapi.Request,
|
||||||
|
db_session: AsyncSession = fastapi.Depends(get_db_session),
|
||||||
|
) -> HTTPSigInfo:
|
||||||
|
body = await request.body()
|
||||||
|
|
||||||
|
hsig = _parse_sig_header(request.headers.get("Signature"))
|
||||||
|
if not hsig:
|
||||||
|
logger.info("No HTTP signature found")
|
||||||
|
return HTTPSigInfo(has_valid_signature=False)
|
||||||
|
|
||||||
|
try:
|
||||||
|
key_id = hsig["keyId"]
|
||||||
|
except KeyError:
|
||||||
|
logger.info("Missing keyId")
|
||||||
|
return HTTPSigInfo(
|
||||||
|
has_valid_signature=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
server = urlparse(key_id).hostname
|
||||||
|
if is_hostname_blocked(server):
|
||||||
|
return HTTPSigInfo(
|
||||||
|
has_valid_signature=False,
|
||||||
|
server=server,
|
||||||
|
is_from_blocked_server=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if alg := hsig.get("algorithm") not in ["rsa-sha256", "hs2019"]:
|
||||||
|
logger.info(f"Unsupported HTTP sig algorithm: {alg}")
|
||||||
|
return HTTPSigInfo(
|
||||||
|
has_valid_signature=False,
|
||||||
|
is_unsupported_algorithm=True,
|
||||||
|
server=server,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Try to drop Delete activity spams early on, this prevent making an extra
|
||||||
|
# HTTP requests trying to fetch an unavailable actor to verify the HTTP sig
|
||||||
|
try:
|
||||||
|
if request.method == "POST" and request.url.path.endswith("/inbox"):
|
||||||
|
from app import models # TODO: solve this circular import
|
||||||
|
|
||||||
|
activity = json.loads(body)
|
||||||
|
actor_id = ap.get_id(activity["actor"])
|
||||||
|
if (
|
||||||
|
ap.as_list(activity["type"])[0] == "Delete"
|
||||||
|
and actor_id == ap.get_id(activity["object"])
|
||||||
|
and not (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.Actor).where(
|
||||||
|
models.Actor.ap_id == actor_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
):
|
||||||
|
logger.info(f"Dropping Delete activity early for {body=}")
|
||||||
|
raise fastapi.HTTPException(status_code=202)
|
||||||
|
except fastapi.HTTPException as http_exc:
|
||||||
|
raise http_exc
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to check for Delete spam")
|
||||||
|
|
||||||
|
# logger.debug(f"hsig={hsig}")
|
||||||
|
signed_string, signature_date = _build_signed_string(
|
||||||
|
hsig["headers"],
|
||||||
|
request.method,
|
||||||
|
request.url.path,
|
||||||
|
request.headers,
|
||||||
|
_body_digest(body) if body else None,
|
||||||
|
hsig,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sanity checks on the signature date
|
||||||
|
if signature_date is None or now() - signature_date > timedelta(hours=12):
|
||||||
|
logger.info(f"Signature expired: {signature_date=}")
|
||||||
|
return HTTPSigInfo(
|
||||||
|
has_valid_signature=False,
|
||||||
|
is_expired=True,
|
||||||
|
server=server,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
k = await _get_public_key(db_session, hsig["keyId"])
|
||||||
|
except (ap.ObjectIsGoneError, ap.ObjectNotFoundError):
|
||||||
|
logger.info("Actor is gone or not found")
|
||||||
|
return HTTPSigInfo(has_valid_signature=False, is_ap_actor_gone=True)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f'Failed to fetch HTTP sig key {hsig["keyId"]}')
|
||||||
|
return HTTPSigInfo(has_valid_signature=False)
|
||||||
|
|
||||||
|
has_valid_signature = _verify_h(
|
||||||
|
signed_string, base64.b64decode(hsig["signature"]), k.pubkey
|
||||||
|
)
|
||||||
|
|
||||||
|
# If the signature is not valid, we may have to update the cached actor
|
||||||
|
if not has_valid_signature:
|
||||||
|
logger.info("Invalid signature, trying to refresh actor")
|
||||||
|
try:
|
||||||
|
k = await _get_public_key(db_session, hsig["keyId"], should_skip_cache=True)
|
||||||
|
has_valid_signature = _verify_h(
|
||||||
|
signed_string, base64.b64decode(hsig["signature"]), k.pubkey
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to refresh actor")
|
||||||
|
|
||||||
|
httpsig_info = HTTPSigInfo(
|
||||||
|
has_valid_signature=has_valid_signature,
|
||||||
|
signed_by_ap_actor_id=k.owner,
|
||||||
|
server=server,
|
||||||
|
)
|
||||||
|
logger.info(f"Valid HTTP signature for {httpsig_info.signed_by_ap_actor_id}")
|
||||||
|
return httpsig_info
|
||||||
|
|
||||||
|
|
||||||
|
async def enforce_httpsig(
|
||||||
|
request: fastapi.Request,
|
||||||
|
httpsig_info: HTTPSigInfo = fastapi.Depends(httpsig_checker),
|
||||||
|
) -> HTTPSigInfo:
|
||||||
|
"""FastAPI Depends"""
|
||||||
|
if httpsig_info.is_from_blocked_server:
|
||||||
|
logger.warning(f"{httpsig_info.server} is blocked")
|
||||||
|
raise fastapi.HTTPException(status_code=403, detail="Blocked")
|
||||||
|
|
||||||
|
if not httpsig_info.has_valid_signature:
|
||||||
|
logger.warning(f"Invalid HTTP sig {httpsig_info=}")
|
||||||
|
body = await request.body()
|
||||||
|
logger.info(f"{body=}")
|
||||||
|
|
||||||
|
# Special case for Mastoodon instance that keep resending Delete
|
||||||
|
# activities for actor we don't know about if we raise a 401
|
||||||
|
if httpsig_info.is_ap_actor_gone:
|
||||||
|
logger.info("Let's make Mastodon happy, returning a 202")
|
||||||
|
raise fastapi.HTTPException(status_code=202)
|
||||||
|
|
||||||
|
detail = "Invalid HTTP sig"
|
||||||
|
if httpsig_info.is_unsupported_algorithm:
|
||||||
|
detail = "Unsupported signature algorithm, must be rsa-sha256 or hs2019"
|
||||||
|
elif httpsig_info.is_expired:
|
||||||
|
detail = "Signature expired"
|
||||||
|
|
||||||
|
raise fastapi.HTTPException(status_code=401, detail=detail)
|
||||||
|
|
||||||
|
return httpsig_info
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPXSigAuth(httpx.Auth):
|
||||||
|
def __init__(self, key: Key) -> None:
|
||||||
|
self.key = key
|
||||||
|
|
||||||
|
def auth_flow(
|
||||||
|
self, r: httpx.Request
|
||||||
|
) -> typing.Generator[httpx.Request, httpx.Response, None]:
|
||||||
|
logger.info(f"keyid={self.key.key_id()}")
|
||||||
|
|
||||||
|
bodydigest = None
|
||||||
|
if r.content:
|
||||||
|
bh = hashlib.new("sha256")
|
||||||
|
bh.update(r.content)
|
||||||
|
bodydigest = "SHA-256=" + base64.b64encode(bh.digest()).decode("utf-8")
|
||||||
|
|
||||||
|
date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||||
|
r.headers["Date"] = date
|
||||||
|
if bodydigest:
|
||||||
|
r.headers["Digest"] = bodydigest
|
||||||
|
sigheaders = "(request-target) user-agent host date digest content-type"
|
||||||
|
else:
|
||||||
|
sigheaders = "(request-target) user-agent host date accept"
|
||||||
|
|
||||||
|
to_be_signed, _ = _build_signed_string(
|
||||||
|
sigheaders, r.method, r.url.path, r.headers, bodydigest, {}
|
||||||
|
)
|
||||||
|
if not self.key.privkey:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
signer = PKCS1_v1_5.new(self.key.privkey)
|
||||||
|
digest = SHA256.new()
|
||||||
|
digest.update(to_be_signed.encode("utf-8"))
|
||||||
|
sig = base64.b64encode(signer.sign(digest)).decode()
|
||||||
|
|
||||||
|
key_id = self.key.key_id()
|
||||||
|
sig_value = f'keyId="{key_id}",algorithm="rsa-sha256",headers="{sigheaders}",signature="{sig}"' # noqa: E501
|
||||||
|
logger.debug(f"signed request {sig_value=}")
|
||||||
|
r.headers["Signature"] = sig_value
|
||||||
|
yield r
|
||||||
|
|
||||||
|
|
||||||
|
k = Key(config.ID, f"{config.ID}#main-key")
|
||||||
|
k.load(KEY_PATH.read_text())
|
||||||
|
auth = HTTPXSigAuth(k)
|
165
app/incoming_activities.py
Normal file
165
app/incoming_activities.py
Normal file
|
@ -0,0 +1,165 @@
|
||||||
|
import asyncio
|
||||||
|
import traceback
|
||||||
|
from datetime import datetime
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
from sqlalchemy import func
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app import httpsig
|
||||||
|
from app import ldsig
|
||||||
|
from app import models
|
||||||
|
from app.boxes import save_to_inbox
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.utils.datetime import now
|
||||||
|
from app.utils.workers import Worker
|
||||||
|
|
||||||
|
_MAX_RETRIES = 8
|
||||||
|
|
||||||
|
|
||||||
|
async def new_ap_incoming_activity(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
httpsig_info: httpsig.HTTPSigInfo,
|
||||||
|
raw_object: ap.RawObject,
|
||||||
|
) -> models.IncomingActivity | None:
|
||||||
|
ap_id: str
|
||||||
|
if "id" not in raw_object or ap.as_list(raw_object["type"])[0] in ap.ACTOR_TYPES:
|
||||||
|
if "@context" not in raw_object:
|
||||||
|
logger.warning(f"Dropping invalid object: {raw_object}")
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
# This is a transient object, Build the JSON LD hash as the ID
|
||||||
|
ap_id = ldsig._doc_hash(raw_object)
|
||||||
|
else:
|
||||||
|
ap_id = ap.get_id(raw_object)
|
||||||
|
|
||||||
|
# TODO(ts): dedup first
|
||||||
|
|
||||||
|
incoming_activity = models.IncomingActivity(
|
||||||
|
sent_by_ap_actor_id=httpsig_info.signed_by_ap_actor_id,
|
||||||
|
ap_id=ap_id,
|
||||||
|
ap_object=raw_object,
|
||||||
|
)
|
||||||
|
db_session.add(incoming_activity)
|
||||||
|
await db_session.commit()
|
||||||
|
await db_session.refresh(incoming_activity)
|
||||||
|
return incoming_activity
|
||||||
|
|
||||||
|
|
||||||
|
def _exp_backoff(tries: int) -> datetime:
|
||||||
|
seconds = 2 * (2 ** (tries - 1))
|
||||||
|
return now() + timedelta(seconds=seconds)
|
||||||
|
|
||||||
|
|
||||||
|
def _set_next_try(
|
||||||
|
outgoing_activity: models.IncomingActivity,
|
||||||
|
next_try: datetime | None = None,
|
||||||
|
) -> None:
|
||||||
|
if not outgoing_activity.tries:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
|
||||||
|
if outgoing_activity.tries >= _MAX_RETRIES:
|
||||||
|
outgoing_activity.is_errored = True
|
||||||
|
outgoing_activity.next_try = None
|
||||||
|
else:
|
||||||
|
outgoing_activity.next_try = next_try or _exp_backoff(outgoing_activity.tries)
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_next_incoming_activity(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
) -> models.IncomingActivity | None:
|
||||||
|
where = [
|
||||||
|
models.IncomingActivity.next_try <= now(),
|
||||||
|
models.IncomingActivity.is_errored.is_(False),
|
||||||
|
models.IncomingActivity.is_processed.is_(False),
|
||||||
|
]
|
||||||
|
q_count = await db_session.scalar(
|
||||||
|
select(func.count(models.IncomingActivity.id)).where(*where)
|
||||||
|
)
|
||||||
|
if q_count > 0:
|
||||||
|
logger.info(f"{q_count} incoming activities ready to process")
|
||||||
|
if not q_count:
|
||||||
|
# logger.debug("No activities to process")
|
||||||
|
return None
|
||||||
|
|
||||||
|
next_activity = (
|
||||||
|
await db_session.execute(
|
||||||
|
select(models.IncomingActivity)
|
||||||
|
.where(*where)
|
||||||
|
.limit(1)
|
||||||
|
.order_by(models.IncomingActivity.next_try.asc())
|
||||||
|
)
|
||||||
|
).scalar_one()
|
||||||
|
|
||||||
|
return next_activity
|
||||||
|
|
||||||
|
|
||||||
|
async def process_next_incoming_activity(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
next_activity: models.IncomingActivity,
|
||||||
|
) -> None:
|
||||||
|
logger.info(
|
||||||
|
f"incoming_activity={next_activity.ap_object}/"
|
||||||
|
f"{next_activity.sent_by_ap_actor_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
next_activity.tries = next_activity.tries + 1
|
||||||
|
next_activity.last_try = now()
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
if next_activity.ap_object and next_activity.sent_by_ap_actor_id:
|
||||||
|
try:
|
||||||
|
async with db_session.begin_nested():
|
||||||
|
await asyncio.wait_for(
|
||||||
|
save_to_inbox(
|
||||||
|
db_session,
|
||||||
|
next_activity.ap_object,
|
||||||
|
next_activity.sent_by_ap_actor_id,
|
||||||
|
),
|
||||||
|
timeout=60,
|
||||||
|
)
|
||||||
|
except asyncio.exceptions.TimeoutError:
|
||||||
|
logger.error("Activity took too long to process")
|
||||||
|
await db_session.rollback()
|
||||||
|
await db_session.refresh(next_activity)
|
||||||
|
next_activity.error = traceback.format_exc()
|
||||||
|
_set_next_try(next_activity)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed")
|
||||||
|
await db_session.rollback()
|
||||||
|
await db_session.refresh(next_activity)
|
||||||
|
next_activity.error = traceback.format_exc()
|
||||||
|
_set_next_try(next_activity)
|
||||||
|
else:
|
||||||
|
logger.info("Success")
|
||||||
|
next_activity.is_processed = True
|
||||||
|
|
||||||
|
# FIXME: webmention support
|
||||||
|
|
||||||
|
await db_session.commit()
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class IncomingActivityWorker(Worker[models.IncomingActivity]):
|
||||||
|
async def process_message(
|
||||||
|
self,
|
||||||
|
db_session: AsyncSession,
|
||||||
|
next_activity: models.IncomingActivity,
|
||||||
|
) -> None:
|
||||||
|
await process_next_incoming_activity(db_session, next_activity)
|
||||||
|
|
||||||
|
async def get_next_message(
|
||||||
|
self,
|
||||||
|
db_session: AsyncSession,
|
||||||
|
) -> models.IncomingActivity | None:
|
||||||
|
return await fetch_next_incoming_activity(db_session)
|
||||||
|
|
||||||
|
|
||||||
|
async def loop() -> None:
|
||||||
|
await IncomingActivityWorker().run_forever()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(loop())
|
552
app/indieauth.py
Normal file
552
app/indieauth.py
Normal file
|
@ -0,0 +1,552 @@
|
||||||
|
import secrets
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import timedelta
|
||||||
|
from datetime import timezone
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from fastapi import APIRouter
|
||||||
|
from fastapi import Depends
|
||||||
|
from fastapi import Form
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from fastapi.security import HTTPBasic
|
||||||
|
from fastapi.security import HTTPBasicCredentials
|
||||||
|
from loguru import logger
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.orm import joinedload
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
from app import models
|
||||||
|
from app import templates
|
||||||
|
from app.admin import user_session_or_redirect
|
||||||
|
from app.config import verify_csrf_token
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.database import get_db_session
|
||||||
|
from app.redirect import redirect
|
||||||
|
from app.utils import indieauth
|
||||||
|
from app.utils.datetime import now
|
||||||
|
|
||||||
|
basic_auth = HTTPBasic()
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/.well-known/oauth-authorization-server")
|
||||||
|
async def well_known_authorization_server(
|
||||||
|
request: Request,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"issuer": config.ID + "/",
|
||||||
|
"authorization_endpoint": request.url_for("indieauth_authorization_endpoint"),
|
||||||
|
"token_endpoint": request.url_for("indieauth_token_endpoint"),
|
||||||
|
"code_challenge_methods_supported": ["S256"],
|
||||||
|
"revocation_endpoint": request.url_for("indieauth_revocation_endpoint"),
|
||||||
|
"revocation_endpoint_auth_methods_supported": ["none"],
|
||||||
|
"registration_endpoint": request.url_for("oauth_registration_endpoint"),
|
||||||
|
"introspection_endpoint": request.url_for("oauth_introspection_endpoint"),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class OAuthRegisterClientRequest(BaseModel):
|
||||||
|
client_name: str
|
||||||
|
redirect_uris: list[str] | str
|
||||||
|
|
||||||
|
client_uri: str | None = None
|
||||||
|
logo_uri: str | None = None
|
||||||
|
scope: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/oauth/register")
|
||||||
|
async def oauth_registration_endpoint(
|
||||||
|
register_client_request: OAuthRegisterClientRequest,
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> JSONResponse:
|
||||||
|
"""Implements OAuth 2.0 Dynamic Registration."""
|
||||||
|
|
||||||
|
client = models.OAuthClient(
|
||||||
|
client_name=register_client_request.client_name,
|
||||||
|
redirect_uris=[register_client_request.redirect_uris]
|
||||||
|
if isinstance(register_client_request.redirect_uris, str)
|
||||||
|
else register_client_request.redirect_uris,
|
||||||
|
client_uri=register_client_request.client_uri,
|
||||||
|
logo_uri=register_client_request.logo_uri,
|
||||||
|
scope=register_client_request.scope,
|
||||||
|
client_id=secrets.token_hex(16),
|
||||||
|
client_secret=secrets.token_hex(32),
|
||||||
|
)
|
||||||
|
|
||||||
|
db_session.add(client)
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
**register_client_request.dict(),
|
||||||
|
"client_id_issued_at": int(client.created_at.timestamp()), # type: ignore
|
||||||
|
"grant_types": ["authorization_code", "refresh_token"],
|
||||||
|
"client_secret_expires_at": 0,
|
||||||
|
"client_id": client.client_id,
|
||||||
|
"client_secret": client.client_secret,
|
||||||
|
},
|
||||||
|
status_code=201,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/auth")
|
||||||
|
async def indieauth_authorization_endpoint(
|
||||||
|
request: Request,
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
_: None = Depends(user_session_or_redirect),
|
||||||
|
) -> templates.TemplateResponse:
|
||||||
|
me = request.query_params.get("me")
|
||||||
|
client_id = request.query_params.get("client_id")
|
||||||
|
redirect_uri = request.query_params.get("redirect_uri")
|
||||||
|
state = request.query_params.get("state", "")
|
||||||
|
response_type = request.query_params.get("response_type", "id")
|
||||||
|
scope = request.query_params.get("scope", "").split()
|
||||||
|
code_challenge = request.query_params.get("code_challenge", "")
|
||||||
|
code_challenge_method = request.query_params.get("code_challenge_method", "")
|
||||||
|
|
||||||
|
# Check if the authorization request is coming from an OAuth client
|
||||||
|
registered_client = (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.OAuthClient).where(
|
||||||
|
models.OAuthClient.client_id == client_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
if registered_client:
|
||||||
|
client = {
|
||||||
|
"name": registered_client.client_name,
|
||||||
|
"logo": registered_client.logo_uri,
|
||||||
|
"url": registered_client.client_uri,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
client = await indieauth.get_client_id_data(client_id) # type: ignore
|
||||||
|
|
||||||
|
return await templates.render_template(
|
||||||
|
db_session,
|
||||||
|
request,
|
||||||
|
"indieauth_flow.html",
|
||||||
|
dict(
|
||||||
|
client=client,
|
||||||
|
scopes=scope,
|
||||||
|
redirect_uri=redirect_uri,
|
||||||
|
state=state,
|
||||||
|
response_type=response_type,
|
||||||
|
client_id=client_id,
|
||||||
|
me=me,
|
||||||
|
code_challenge=code_challenge,
|
||||||
|
code_challenge_method=code_challenge_method,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/admin/indieauth")
|
||||||
|
async def indieauth_flow(
|
||||||
|
request: Request,
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
csrf_check: None = Depends(verify_csrf_token),
|
||||||
|
_: None = Depends(user_session_or_redirect),
|
||||||
|
) -> templates.TemplateResponse:
|
||||||
|
form_data = await request.form()
|
||||||
|
logger.info(f"{form_data=}")
|
||||||
|
|
||||||
|
# Params needed for the redirect
|
||||||
|
redirect_uri = form_data["redirect_uri"]
|
||||||
|
code = secrets.token_urlsafe(32)
|
||||||
|
iss = config.ID + "/"
|
||||||
|
state = form_data["state"]
|
||||||
|
|
||||||
|
scope = " ".join(form_data.getlist("scopes"))
|
||||||
|
client_id = form_data["client_id"]
|
||||||
|
|
||||||
|
# TODO: Ensure that me is correct
|
||||||
|
# me = form_data.get("me")
|
||||||
|
|
||||||
|
# XXX: should always be code
|
||||||
|
# response_type = form_data["response_type"]
|
||||||
|
|
||||||
|
code_challenge = form_data["code_challenge"]
|
||||||
|
code_challenge_method = form_data["code_challenge_method"]
|
||||||
|
|
||||||
|
auth_request = models.IndieAuthAuthorizationRequest(
|
||||||
|
code=code,
|
||||||
|
scope=scope,
|
||||||
|
redirect_uri=redirect_uri,
|
||||||
|
client_id=client_id,
|
||||||
|
code_challenge=code_challenge,
|
||||||
|
code_challenge_method=code_challenge_method,
|
||||||
|
)
|
||||||
|
|
||||||
|
db_session.add(auth_request)
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
return await redirect(
|
||||||
|
request, db_session, redirect_uri + f"?code={code}&state={state}&iss={iss}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _check_auth_code(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
code: str,
|
||||||
|
client_id: str,
|
||||||
|
redirect_uri: str,
|
||||||
|
code_verifier: str | None,
|
||||||
|
) -> tuple[bool, models.IndieAuthAuthorizationRequest | None]:
|
||||||
|
auth_code_req = (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.IndieAuthAuthorizationRequest).where(
|
||||||
|
models.IndieAuthAuthorizationRequest.code == code
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
if not auth_code_req:
|
||||||
|
return False, None
|
||||||
|
if auth_code_req.is_used:
|
||||||
|
logger.info("code was already used")
|
||||||
|
return False, None
|
||||||
|
#
|
||||||
|
if now() > auth_code_req.created_at.replace(tzinfo=timezone.utc) + timedelta(
|
||||||
|
seconds=120
|
||||||
|
):
|
||||||
|
logger.info("Auth code request expired")
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
if (
|
||||||
|
auth_code_req.redirect_uri != redirect_uri
|
||||||
|
or auth_code_req.client_id != client_id
|
||||||
|
):
|
||||||
|
logger.info("redirect_uri/client_id does not match request")
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
auth_code_req.is_used = True
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
return True, auth_code_req
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/auth")
|
||||||
|
async def indieauth_reedem_auth_code(
|
||||||
|
request: Request,
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> JSONResponse:
|
||||||
|
form_data = await request.form()
|
||||||
|
logger.info(f"{form_data=}")
|
||||||
|
grant_type = form_data.get("grant_type", "authorization_code")
|
||||||
|
if grant_type != "authorization_code":
|
||||||
|
raise ValueError(f"Invalid grant_type {grant_type}")
|
||||||
|
|
||||||
|
code = form_data["code"]
|
||||||
|
|
||||||
|
# These must match the params from the first request
|
||||||
|
client_id = form_data["client_id"]
|
||||||
|
redirect_uri = form_data["redirect_uri"]
|
||||||
|
# code_verifier is optional for backward compat
|
||||||
|
code_verifier = form_data.get("code_verifier")
|
||||||
|
|
||||||
|
is_code_valid, _ = await _check_auth_code(
|
||||||
|
db_session,
|
||||||
|
code=code,
|
||||||
|
client_id=client_id,
|
||||||
|
redirect_uri=redirect_uri,
|
||||||
|
code_verifier=code_verifier,
|
||||||
|
)
|
||||||
|
if is_code_valid:
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
"me": config.ID + "/",
|
||||||
|
},
|
||||||
|
status_code=200,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return JSONResponse(
|
||||||
|
content={"error": "invalid_grant"},
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/token")
|
||||||
|
async def indieauth_token_endpoint(
|
||||||
|
request: Request,
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> JSONResponse:
|
||||||
|
form_data = await request.form()
|
||||||
|
logger.info(f"{form_data=}")
|
||||||
|
grant_type = form_data.get("grant_type", "authorization_code")
|
||||||
|
if grant_type not in ["authorization_code", "refresh_token"]:
|
||||||
|
raise ValueError(f"Invalid grant_type {grant_type}")
|
||||||
|
|
||||||
|
# These must match the params from the first request
|
||||||
|
client_id = form_data["client_id"]
|
||||||
|
code_verifier = form_data.get("code_verifier")
|
||||||
|
|
||||||
|
if grant_type == "authorization_code":
|
||||||
|
code = form_data["code"]
|
||||||
|
redirect_uri = form_data["redirect_uri"]
|
||||||
|
# code_verifier is optional for backward compat
|
||||||
|
is_code_valid, auth_code_request = await _check_auth_code(
|
||||||
|
db_session,
|
||||||
|
code=code,
|
||||||
|
client_id=client_id,
|
||||||
|
redirect_uri=redirect_uri,
|
||||||
|
code_verifier=code_verifier,
|
||||||
|
)
|
||||||
|
if not is_code_valid or (auth_code_request and not auth_code_request.scope):
|
||||||
|
return JSONResponse(
|
||||||
|
content={"error": "invalid_grant"},
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
elif grant_type == "refresh_token":
|
||||||
|
refresh_token = form_data["refresh_token"]
|
||||||
|
access_token = (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.IndieAuthAccessToken)
|
||||||
|
.where(
|
||||||
|
models.IndieAuthAccessToken.refresh_token == refresh_token,
|
||||||
|
models.IndieAuthAccessToken.was_refreshed.is_(False),
|
||||||
|
)
|
||||||
|
.options(
|
||||||
|
joinedload(
|
||||||
|
models.IndieAuthAccessToken.indieauth_authorization_request
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
if not access_token:
|
||||||
|
raise ValueError("invalid refresh token")
|
||||||
|
|
||||||
|
if access_token.indieauth_authorization_request.client_id != client_id:
|
||||||
|
raise ValueError("invalid client ID")
|
||||||
|
|
||||||
|
auth_code_request = access_token.indieauth_authorization_request
|
||||||
|
access_token.was_refreshed = True
|
||||||
|
|
||||||
|
if not auth_code_request:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
|
||||||
|
access_token = models.IndieAuthAccessToken(
|
||||||
|
indieauth_authorization_request_id=auth_code_request.id,
|
||||||
|
access_token=secrets.token_urlsafe(32),
|
||||||
|
refresh_token=secrets.token_urlsafe(32),
|
||||||
|
expires_in=3600,
|
||||||
|
scope=auth_code_request.scope,
|
||||||
|
)
|
||||||
|
db_session.add(access_token)
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
"access_token": access_token.access_token,
|
||||||
|
"refresh_token": access_token.refresh_token,
|
||||||
|
"token_type": "Bearer",
|
||||||
|
"scope": auth_code_request.scope,
|
||||||
|
"me": config.ID + "/",
|
||||||
|
"expires_in": 3600,
|
||||||
|
},
|
||||||
|
status_code=200,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _check_access_token(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
token: str,
|
||||||
|
) -> tuple[bool, models.IndieAuthAccessToken | None]:
|
||||||
|
access_token_info = (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.IndieAuthAccessToken)
|
||||||
|
.where(models.IndieAuthAccessToken.access_token == token)
|
||||||
|
.options(
|
||||||
|
joinedload(models.IndieAuthAccessToken.indieauth_authorization_request)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
if not access_token_info:
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
if access_token_info.is_revoked:
|
||||||
|
logger.info("Access token is revoked")
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
if now() > access_token_info.created_at.replace(tzinfo=timezone.utc) + timedelta(
|
||||||
|
seconds=access_token_info.expires_in
|
||||||
|
):
|
||||||
|
logger.info("Access token has expired")
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
return True, access_token_info
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class AccessTokenInfo:
|
||||||
|
scopes: list[str]
|
||||||
|
client_id: str | None
|
||||||
|
access_token: str
|
||||||
|
exp: int
|
||||||
|
|
||||||
|
|
||||||
|
async def verify_access_token(
|
||||||
|
request: Request,
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> AccessTokenInfo:
|
||||||
|
token = request.headers.get("Authorization", "").removeprefix("Bearer ")
|
||||||
|
|
||||||
|
# Check if the token is within the form data
|
||||||
|
if not token:
|
||||||
|
form_data = await request.form()
|
||||||
|
if "access_token" in form_data:
|
||||||
|
token = form_data.get("access_token")
|
||||||
|
|
||||||
|
is_token_valid, access_token = await _check_access_token(db_session, token)
|
||||||
|
if not is_token_valid:
|
||||||
|
raise HTTPException(
|
||||||
|
detail="Invalid access token",
|
||||||
|
status_code=401,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not access_token or not access_token.scope:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
|
||||||
|
return AccessTokenInfo(
|
||||||
|
scopes=access_token.scope.split(),
|
||||||
|
client_id=(
|
||||||
|
access_token.indieauth_authorization_request.client_id
|
||||||
|
if access_token.indieauth_authorization_request
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
access_token=access_token.access_token,
|
||||||
|
exp=int(
|
||||||
|
(
|
||||||
|
access_token.created_at.replace(tzinfo=timezone.utc)
|
||||||
|
+ timedelta(seconds=access_token.expires_in)
|
||||||
|
).timestamp()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def check_access_token(
|
||||||
|
request: Request,
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> AccessTokenInfo | None:
|
||||||
|
token = request.headers.get("Authorization", "").removeprefix("Bearer ")
|
||||||
|
if not token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
is_token_valid, access_token = await _check_access_token(db_session, token)
|
||||||
|
if not is_token_valid:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not access_token or not access_token.scope:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
|
||||||
|
access_token_info = AccessTokenInfo(
|
||||||
|
scopes=access_token.scope.split(),
|
||||||
|
client_id=(
|
||||||
|
access_token.indieauth_authorization_request.client_id
|
||||||
|
if access_token.indieauth_authorization_request
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
access_token=access_token.access_token,
|
||||||
|
exp=int(
|
||||||
|
(
|
||||||
|
access_token.created_at.replace(tzinfo=timezone.utc)
|
||||||
|
+ timedelta(seconds=access_token.expires_in)
|
||||||
|
).timestamp()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Authenticated with access token from client_id="
|
||||||
|
f"{access_token_info.client_id} scopes={access_token.scope}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return access_token_info
|
||||||
|
|
||||||
|
|
||||||
|
async def enforce_access_token(
|
||||||
|
request: Request,
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> AccessTokenInfo:
|
||||||
|
maybe_access_token_info = await check_access_token(request, db_session)
|
||||||
|
if not maybe_access_token_info:
|
||||||
|
raise HTTPException(status_code=401, detail="access token required")
|
||||||
|
|
||||||
|
return maybe_access_token_info
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/revoke_token")
|
||||||
|
async def indieauth_revocation_endpoint(
|
||||||
|
request: Request,
|
||||||
|
token: str = Form(),
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> JSONResponse:
|
||||||
|
|
||||||
|
is_token_valid, token_info = await _check_access_token(db_session, token)
|
||||||
|
if is_token_valid:
|
||||||
|
if not token_info:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
|
||||||
|
token_info.is_revoked = True
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
content={},
|
||||||
|
status_code=200,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/token_introspection")
|
||||||
|
async def oauth_introspection_endpoint(
|
||||||
|
request: Request,
|
||||||
|
credentials: HTTPBasicCredentials = Depends(basic_auth),
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
token: str = Form(),
|
||||||
|
) -> JSONResponse:
|
||||||
|
registered_client = (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.OAuthClient).where(
|
||||||
|
models.OAuthClient.client_id == credentials.username,
|
||||||
|
models.OAuthClient.client_secret == credentials.password,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
if not registered_client:
|
||||||
|
raise HTTPException(status_code=401, detail="unauthenticated")
|
||||||
|
|
||||||
|
access_token = (
|
||||||
|
await db_session.scalars(
|
||||||
|
select(models.IndieAuthAccessToken)
|
||||||
|
.where(models.IndieAuthAccessToken.access_token == token)
|
||||||
|
.join(
|
||||||
|
models.IndieAuthAuthorizationRequest,
|
||||||
|
models.IndieAuthAccessToken.indieauth_authorization_request_id
|
||||||
|
== models.IndieAuthAuthorizationRequest.id,
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
models.IndieAuthAuthorizationRequest.client_id == credentials.username
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
if not access_token:
|
||||||
|
return JSONResponse(content={"active": False})
|
||||||
|
|
||||||
|
is_token_valid, _ = await _check_access_token(db_session, token)
|
||||||
|
if not is_token_valid:
|
||||||
|
return JSONResponse(content={"active": False})
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
"active": True,
|
||||||
|
"client_id": credentials.username,
|
||||||
|
"scope": access_token.scope,
|
||||||
|
"exp": int(
|
||||||
|
(
|
||||||
|
access_token.created_at.replace(tzinfo=timezone.utc)
|
||||||
|
+ timedelta(seconds=access_token.expires_in)
|
||||||
|
).timestamp()
|
||||||
|
),
|
||||||
|
},
|
||||||
|
status_code=200,
|
||||||
|
)
|
75
app/key.py
Normal file
75
app/key.py
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
import base64
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from Crypto.PublicKey import RSA
|
||||||
|
from Crypto.Util import number
|
||||||
|
|
||||||
|
|
||||||
|
def generate_key(key_path: Path) -> None:
|
||||||
|
if key_path.exists():
|
||||||
|
raise ValueError(f"Key at {key_path} already exists")
|
||||||
|
k = RSA.generate(2048)
|
||||||
|
privkey_pem = k.exportKey("PEM").decode("utf-8")
|
||||||
|
key_path.write_text(privkey_pem)
|
||||||
|
|
||||||
|
|
||||||
|
def get_pubkey_as_pem(key_path: Path) -> str:
|
||||||
|
text = key_path.read_text()
|
||||||
|
return RSA.import_key(text).public_key().export_key("PEM").decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
class Key(object):
|
||||||
|
DEFAULT_KEY_SIZE = 2048
|
||||||
|
|
||||||
|
def __init__(self, owner: str, id_: str | None = None) -> None:
|
||||||
|
self.owner = owner
|
||||||
|
self.privkey_pem: str | None = None
|
||||||
|
self.pubkey_pem: str | None = None
|
||||||
|
self.privkey: RSA.RsaKey | None = None
|
||||||
|
self.pubkey: RSA.RsaKey | None = None
|
||||||
|
self.id_ = id_
|
||||||
|
|
||||||
|
def load_pub(self, pubkey_pem: str) -> None:
|
||||||
|
self.pubkey_pem = pubkey_pem
|
||||||
|
self.pubkey = RSA.importKey(pubkey_pem)
|
||||||
|
|
||||||
|
def load(self, privkey_pem: str) -> None:
|
||||||
|
self.privkey_pem = privkey_pem
|
||||||
|
self.privkey = RSA.importKey(self.privkey_pem)
|
||||||
|
self.pubkey_pem = self.privkey.publickey().exportKey("PEM").decode("utf-8")
|
||||||
|
|
||||||
|
def new(self) -> None:
|
||||||
|
k = RSA.generate(self.DEFAULT_KEY_SIZE)
|
||||||
|
self.privkey_pem = k.exportKey("PEM").decode("utf-8")
|
||||||
|
self.pubkey_pem = k.publickey().exportKey("PEM").decode("utf-8")
|
||||||
|
self.privkey = k
|
||||||
|
|
||||||
|
def key_id(self) -> str:
|
||||||
|
return self.id_ or f"{self.owner}#main-key"
|
||||||
|
|
||||||
|
def to_dict(self) -> dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"id": self.key_id(),
|
||||||
|
"owner": self.owner,
|
||||||
|
"publicKeyPem": self.pubkey_pem,
|
||||||
|
"type": "Key",
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data):
|
||||||
|
try:
|
||||||
|
k = cls(data["owner"], data["id"])
|
||||||
|
k.load_pub(data["publicKeyPem"])
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError(f"bad key data {data!r}")
|
||||||
|
return k
|
||||||
|
|
||||||
|
def to_magic_key(self) -> str:
|
||||||
|
mod = base64.urlsafe_b64encode(
|
||||||
|
number.long_to_bytes(self.privkey.n) # type: ignore
|
||||||
|
).decode("utf-8")
|
||||||
|
pubexp = base64.urlsafe_b64encode(
|
||||||
|
number.long_to_bytes(self.privkey.e) # type: ignore
|
||||||
|
).decode("utf-8")
|
||||||
|
return f"data:application/magic-public-key,RSA.{mod}.{pubexp}"
|
98
app/ldsig.py
Normal file
98
app/ldsig.py
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import typing
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import pyld # type: ignore
|
||||||
|
from Crypto.Hash import SHA256
|
||||||
|
from Crypto.Signature import PKCS1_v1_5
|
||||||
|
from loguru import logger
|
||||||
|
from pyld import jsonld # type: ignore
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.httpsig import _get_public_key
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
from app.key import Key
|
||||||
|
|
||||||
|
|
||||||
|
requests_loader = pyld.documentloader.requests.requests_document_loader()
|
||||||
|
|
||||||
|
|
||||||
|
def _loader(url, options={}):
|
||||||
|
# See https://github.com/digitalbazaar/pyld/issues/133
|
||||||
|
options["headers"]["Accept"] = "application/ld+json"
|
||||||
|
|
||||||
|
# XXX: temp fix/hack is it seems to be down for now
|
||||||
|
if url == "https://w3id.org/identity/v1":
|
||||||
|
url = (
|
||||||
|
"https://raw.githubusercontent.com/web-payments/web-payments.org"
|
||||||
|
"/master/contexts/identity-v1.jsonld"
|
||||||
|
)
|
||||||
|
return requests_loader(url, options)
|
||||||
|
|
||||||
|
|
||||||
|
pyld.jsonld.set_document_loader(_loader)
|
||||||
|
|
||||||
|
|
||||||
|
def _options_hash(doc: ap.RawObject) -> str:
|
||||||
|
doc = dict(doc["signature"])
|
||||||
|
for k in ["type", "id", "signatureValue"]:
|
||||||
|
if k in doc:
|
||||||
|
del doc[k]
|
||||||
|
doc["@context"] = "https://w3id.org/security/v1"
|
||||||
|
normalized = jsonld.normalize(
|
||||||
|
doc, {"algorithm": "URDNA2015", "format": "application/nquads"}
|
||||||
|
)
|
||||||
|
h = hashlib.new("sha256")
|
||||||
|
h.update(normalized.encode("utf-8"))
|
||||||
|
return h.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def _doc_hash(doc: ap.RawObject) -> str:
|
||||||
|
doc = dict(doc)
|
||||||
|
if "signature" in doc:
|
||||||
|
del doc["signature"]
|
||||||
|
normalized = jsonld.normalize(
|
||||||
|
doc, {"algorithm": "URDNA2015", "format": "application/nquads"}
|
||||||
|
)
|
||||||
|
h = hashlib.new("sha256")
|
||||||
|
h.update(normalized.encode("utf-8"))
|
||||||
|
return h.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
async def verify_signature(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
doc: ap.RawObject,
|
||||||
|
) -> bool:
|
||||||
|
if "signature" not in doc:
|
||||||
|
logger.warning("The object does contain a signature")
|
||||||
|
return False
|
||||||
|
|
||||||
|
key_id = doc["signature"]["creator"]
|
||||||
|
key = await _get_public_key(db_session, key_id)
|
||||||
|
to_be_signed = _options_hash(doc) + _doc_hash(doc)
|
||||||
|
signature = doc["signature"]["signatureValue"]
|
||||||
|
signer = PKCS1_v1_5.new(key.pubkey or key.privkey) # type: ignore
|
||||||
|
digest = SHA256.new()
|
||||||
|
digest.update(to_be_signed.encode("utf-8"))
|
||||||
|
return signer.verify(digest, base64.b64decode(signature)) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def generate_signature(doc: ap.RawObject, key: "Key") -> None:
|
||||||
|
options = {
|
||||||
|
"type": "RsaSignature2017",
|
||||||
|
"creator": doc["actor"] + "#main-key",
|
||||||
|
"created": datetime.utcnow().replace(microsecond=0).isoformat() + "Z",
|
||||||
|
}
|
||||||
|
doc["signature"] = options
|
||||||
|
to_be_signed = _options_hash(doc) + _doc_hash(doc)
|
||||||
|
if not key.privkey:
|
||||||
|
raise ValueError(f"missing privkey on key {key!r}")
|
||||||
|
|
||||||
|
signer = PKCS1_v1_5.new(key.privkey)
|
||||||
|
digest = SHA256.new()
|
||||||
|
digest.update(to_be_signed.encode("utf-8"))
|
||||||
|
sig = base64.b64encode(signer.sign(digest)) # type: ignore
|
||||||
|
options["signatureValue"] = sig.decode("utf-8")
|
46
app/lookup.py
Normal file
46
app/lookup.py
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
import mf2py # type: ignore
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app import webfinger
|
||||||
|
from app.actor import Actor
|
||||||
|
from app.actor import RemoteActor
|
||||||
|
from app.ap_object import RemoteObject
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.source import _MENTION_REGEX
|
||||||
|
|
||||||
|
|
||||||
|
async def lookup(db_session: AsyncSession, query: str) -> Actor | RemoteObject:
|
||||||
|
query = query.strip()
|
||||||
|
if query.startswith("@") or _MENTION_REGEX.match("@" + query):
|
||||||
|
query = await webfinger.get_actor_url(query) # type: ignore # None check below
|
||||||
|
|
||||||
|
if not query:
|
||||||
|
raise ap.NotAnObjectError(query)
|
||||||
|
|
||||||
|
try:
|
||||||
|
ap_obj = await ap.fetch(query)
|
||||||
|
except ap.NotAnObjectError as not_an_object_error:
|
||||||
|
resp = not_an_object_error.resp
|
||||||
|
if not resp:
|
||||||
|
raise ap.NotAnObjectError(query)
|
||||||
|
|
||||||
|
alternate_obj = None
|
||||||
|
if resp.headers.get("content-type", "").startswith("text/html"):
|
||||||
|
for alternate in mf2py.parse(doc=resp.text).get("alternates", []):
|
||||||
|
if alternate.get("type") == "application/activity+json":
|
||||||
|
alternate_obj = await ap.fetch(alternate["url"])
|
||||||
|
|
||||||
|
if alternate_obj:
|
||||||
|
ap_obj = alternate_obj
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if ap.as_list(ap_obj["type"])[0] in ap.ACTOR_TYPES:
|
||||||
|
return RemoteActor(ap_obj)
|
||||||
|
else:
|
||||||
|
# Some software return objects wrapped in a Create activity (like
|
||||||
|
# python-federation)
|
||||||
|
if ap.as_list(ap_obj["type"])[0] == "Create":
|
||||||
|
ap_obj = await ap.get_object(ap_obj)
|
||||||
|
|
||||||
|
return await RemoteObject.from_raw_object(ap_obj)
|
1728
app/main.py
Normal file
1728
app/main.py
Normal file
File diff suppressed because it is too large
Load diff
49
app/media.py
Normal file
49
app/media.py
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
import base64
|
||||||
|
import time
|
||||||
|
|
||||||
|
from app.config import BASE_URL
|
||||||
|
from app.config import hmac_sha256
|
||||||
|
|
||||||
|
SUPPORTED_RESIZE = [50, 740]
|
||||||
|
EXPIRY_PERIOD = 86400
|
||||||
|
EXPIRY_LENGTH = 7
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidProxySignatureError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def proxied_media_sig(expires: int, url: str) -> str:
|
||||||
|
hm = hmac_sha256()
|
||||||
|
hm.update(f"{expires}".encode())
|
||||||
|
hm.update(b"|")
|
||||||
|
hm.update(url.encode())
|
||||||
|
return base64.urlsafe_b64encode(hm.digest()).decode()
|
||||||
|
|
||||||
|
|
||||||
|
def verify_proxied_media_sig(expires: int, url: str, sig: str) -> None:
|
||||||
|
now = int(time.time() / EXPIRY_PERIOD)
|
||||||
|
expected = proxied_media_sig(expires, url)
|
||||||
|
if now > expires or sig != expected:
|
||||||
|
raise InvalidProxySignatureError("invalid or expired media")
|
||||||
|
|
||||||
|
|
||||||
|
def proxied_media_url(url: str) -> str:
|
||||||
|
if url.startswith(BASE_URL):
|
||||||
|
return url
|
||||||
|
expires = int(time.time() / EXPIRY_PERIOD) + EXPIRY_LENGTH
|
||||||
|
sig = proxied_media_sig(expires, url)
|
||||||
|
|
||||||
|
return (
|
||||||
|
BASE_URL
|
||||||
|
+ f"/proxy/media/{expires}/{sig}/"
|
||||||
|
+ base64.urlsafe_b64encode(url.encode()).decode()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def resized_media_url(url: str, size: int) -> str:
|
||||||
|
if size not in SUPPORTED_RESIZE:
|
||||||
|
raise ValueError(f"Unsupported resize {size}")
|
||||||
|
if url.startswith(BASE_URL):
|
||||||
|
return url
|
||||||
|
return proxied_media_url(url) + f"/{size}"
|
168
app/micropub.py
Normal file
168
app/micropub.py
Normal file
|
@ -0,0 +1,168 @@
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from fastapi import APIRouter
|
||||||
|
from fastapi import Depends
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from fastapi.responses import RedirectResponse
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app.boxes import get_outbox_object_by_ap_id
|
||||||
|
from app.boxes import send_create
|
||||||
|
from app.boxes import send_delete
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.database import get_db_session
|
||||||
|
from app.indieauth import AccessTokenInfo
|
||||||
|
from app.indieauth import verify_access_token
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/micropub")
|
||||||
|
async def micropub_endpoint(
|
||||||
|
request: Request,
|
||||||
|
access_token_info: AccessTokenInfo = Depends(verify_access_token),
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> dict[str, Any] | JSONResponse:
|
||||||
|
if request.query_params.get("q") == "config":
|
||||||
|
return {}
|
||||||
|
|
||||||
|
elif request.query_params.get("q") == "source":
|
||||||
|
url = request.query_params.get("url")
|
||||||
|
outbox_object = await get_outbox_object_by_ap_id(db_session, url)
|
||||||
|
if not outbox_object:
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
"error": "invalid_request",
|
||||||
|
"error_description": "No post with this URL",
|
||||||
|
},
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
extra_props: dict[str, list[str]] = {}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"type": ["h-entry"],
|
||||||
|
"properties": {
|
||||||
|
"published": [
|
||||||
|
outbox_object.ap_published_at.isoformat() # type: ignore
|
||||||
|
],
|
||||||
|
"content": [outbox_object.source],
|
||||||
|
**extra_props,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def _prop_get(dat: dict[str, Any], key: str) -> str:
|
||||||
|
val = dat[key]
|
||||||
|
if isinstance(val, list):
|
||||||
|
return val[0]
|
||||||
|
else:
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/micropub")
|
||||||
|
async def post_micropub_endpoint(
|
||||||
|
request: Request,
|
||||||
|
access_token_info: AccessTokenInfo = Depends(verify_access_token),
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> RedirectResponse | JSONResponse:
|
||||||
|
form_data = await request.form()
|
||||||
|
is_json = False
|
||||||
|
if not form_data:
|
||||||
|
form_data = await request.json()
|
||||||
|
is_json = True
|
||||||
|
|
||||||
|
insufficient_scope_resp = JSONResponse(
|
||||||
|
status_code=401, content={"error": "insufficient_scope"}
|
||||||
|
)
|
||||||
|
|
||||||
|
if "action" in form_data:
|
||||||
|
if form_data["action"] in ["delete", "update"]:
|
||||||
|
outbox_object = await get_outbox_object_by_ap_id(
|
||||||
|
db_session, form_data["url"]
|
||||||
|
)
|
||||||
|
if not outbox_object:
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
"error": "invalid_request",
|
||||||
|
"error_description": "No post with this URL",
|
||||||
|
},
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
if form_data["action"] == "delete":
|
||||||
|
if "delete" not in access_token_info.scopes:
|
||||||
|
return insufficient_scope_resp
|
||||||
|
logger.info(f"Deleting object {outbox_object.ap_id}")
|
||||||
|
await send_delete(db_session, outbox_object.ap_id) # type: ignore
|
||||||
|
return JSONResponse(content={}, status_code=200)
|
||||||
|
|
||||||
|
elif form_data["action"] == "update":
|
||||||
|
if "update" not in access_token_info.scopes:
|
||||||
|
return insufficient_scope_resp
|
||||||
|
|
||||||
|
# TODO(ts): support update
|
||||||
|
# "replace": {"content": ["new content"]}
|
||||||
|
|
||||||
|
logger.info(f"Updating object {outbox_object.ap_id}: {form_data}")
|
||||||
|
return JSONResponse(content={}, status_code=200)
|
||||||
|
else:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
else:
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
"error": "invalid_request",
|
||||||
|
"error_description": f'Unsupported action: {form_data["action"]}',
|
||||||
|
},
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
if "create" not in access_token_info.scopes:
|
||||||
|
return insufficient_scope_resp
|
||||||
|
|
||||||
|
if is_json:
|
||||||
|
entry_type = _prop_get(form_data, "type") # type: ignore
|
||||||
|
else:
|
||||||
|
h = "entry"
|
||||||
|
if "h" in form_data:
|
||||||
|
h = form_data["h"]
|
||||||
|
entry_type = f"h-{h}"
|
||||||
|
|
||||||
|
logger.info(f"Creating {entry_type=} with {access_token_info=}")
|
||||||
|
|
||||||
|
if entry_type != "h-entry":
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
"error": "invalid_request",
|
||||||
|
"error_description": "Only h-entry are supported",
|
||||||
|
},
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO(ts): support creating Article (with a name)
|
||||||
|
|
||||||
|
if is_json:
|
||||||
|
content = _prop_get(form_data["properties"], "content") # type: ignore
|
||||||
|
else:
|
||||||
|
content = form_data["content"]
|
||||||
|
|
||||||
|
public_id, _ = await send_create(
|
||||||
|
db_session,
|
||||||
|
"Note",
|
||||||
|
content,
|
||||||
|
uploads=[],
|
||||||
|
in_reply_to=None,
|
||||||
|
visibility=ap.VisibilityEnum.PUBLIC,
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
content={},
|
||||||
|
status_code=201,
|
||||||
|
headers={
|
||||||
|
"Location": request.url_for("outbox_by_public_id", public_id=public_id)
|
||||||
|
},
|
||||||
|
)
|
656
app/models.py
Normal file
656
app/models.py
Normal file
|
@ -0,0 +1,656 @@
|
||||||
|
import enum
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any
|
||||||
|
from typing import Optional
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
import pydantic
|
||||||
|
from loguru import logger
|
||||||
|
from sqlalchemy import JSON
|
||||||
|
from sqlalchemy import Boolean
|
||||||
|
from sqlalchemy import Column
|
||||||
|
from sqlalchemy import DateTime
|
||||||
|
from sqlalchemy import Enum
|
||||||
|
from sqlalchemy import ForeignKey
|
||||||
|
from sqlalchemy import Index
|
||||||
|
from sqlalchemy import Integer
|
||||||
|
from sqlalchemy import String
|
||||||
|
from sqlalchemy import Table
|
||||||
|
from sqlalchemy import UniqueConstraint
|
||||||
|
from sqlalchemy import text
|
||||||
|
from sqlalchemy.orm import Mapped
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app.actor import LOCAL_ACTOR
|
||||||
|
from app.actor import Actor as BaseActor
|
||||||
|
from app.ap_object import Attachment
|
||||||
|
from app.ap_object import Object as BaseObject
|
||||||
|
from app.config import BASE_URL
|
||||||
|
from app.database import Base
|
||||||
|
from app.database import metadata_obj
|
||||||
|
from app.utils import webmentions
|
||||||
|
from app.utils.datetime import now
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectRevision(pydantic.BaseModel):
|
||||||
|
ap_object: ap.RawObject
|
||||||
|
source: str
|
||||||
|
updated_at: str
|
||||||
|
|
||||||
|
|
||||||
|
class Actor(Base, BaseActor):
|
||||||
|
__tablename__ = "actor"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
updated_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
ap_id: Mapped[str] = Column(String, unique=True, nullable=False, index=True)
|
||||||
|
ap_actor: Mapped[ap.RawObject] = Column(JSON, nullable=False)
|
||||||
|
ap_type = Column(String, nullable=False)
|
||||||
|
|
||||||
|
handle = Column(String, nullable=True, index=True)
|
||||||
|
|
||||||
|
is_blocked = Column(Boolean, nullable=False, default=False, server_default="0")
|
||||||
|
is_deleted = Column(Boolean, nullable=False, default=False, server_default="0")
|
||||||
|
|
||||||
|
are_announces_hidden_from_stream = Column(
|
||||||
|
Boolean, nullable=False, default=False, server_default="0"
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_from_db(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class InboxObject(Base, BaseObject):
|
||||||
|
__tablename__ = "inbox"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
updated_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False)
|
||||||
|
actor: Mapped[Actor] = relationship(Actor, uselist=False)
|
||||||
|
|
||||||
|
server = Column(String, nullable=False)
|
||||||
|
|
||||||
|
is_hidden_from_stream = Column(Boolean, nullable=False, default=False)
|
||||||
|
|
||||||
|
ap_actor_id = Column(String, nullable=False)
|
||||||
|
ap_type = Column(String, nullable=False, index=True)
|
||||||
|
ap_id: Mapped[str] = Column(String, nullable=False, unique=True, index=True)
|
||||||
|
ap_context = Column(String, nullable=True)
|
||||||
|
ap_published_at = Column(DateTime(timezone=True), nullable=False)
|
||||||
|
ap_object: Mapped[ap.RawObject] = Column(JSON, nullable=False)
|
||||||
|
|
||||||
|
# Only set for activities
|
||||||
|
activity_object_ap_id = Column(String, nullable=True, index=True)
|
||||||
|
|
||||||
|
visibility = Column(Enum(ap.VisibilityEnum), nullable=False)
|
||||||
|
conversation = Column(String, nullable=True)
|
||||||
|
|
||||||
|
has_local_mention = Column(
|
||||||
|
Boolean, nullable=False, default=False, server_default="0"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Used for Like, Announce and Undo activities
|
||||||
|
relates_to_inbox_object_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey("inbox.id"),
|
||||||
|
nullable=True,
|
||||||
|
)
|
||||||
|
relates_to_inbox_object: Mapped[Optional["InboxObject"]] = relationship(
|
||||||
|
"InboxObject",
|
||||||
|
foreign_keys=relates_to_inbox_object_id,
|
||||||
|
remote_side=id,
|
||||||
|
uselist=False,
|
||||||
|
)
|
||||||
|
relates_to_outbox_object_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey("outbox.id"),
|
||||||
|
nullable=True,
|
||||||
|
)
|
||||||
|
relates_to_outbox_object: Mapped[Optional["OutboxObject"]] = relationship(
|
||||||
|
"OutboxObject",
|
||||||
|
foreign_keys=[relates_to_outbox_object_id],
|
||||||
|
uselist=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
undone_by_inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=True)
|
||||||
|
|
||||||
|
# Link the oubox AP ID to allow undo without any extra query
|
||||||
|
liked_via_outbox_object_ap_id = Column(String, nullable=True)
|
||||||
|
announced_via_outbox_object_ap_id = Column(String, nullable=True)
|
||||||
|
voted_for_answers: Mapped[list[str] | None] = Column(JSON, nullable=True)
|
||||||
|
|
||||||
|
is_bookmarked = Column(Boolean, nullable=False, default=False)
|
||||||
|
|
||||||
|
# Used to mark deleted objects, but also activities that were undone
|
||||||
|
is_deleted = Column(Boolean, nullable=False, default=False)
|
||||||
|
is_transient = Column(Boolean, nullable=False, default=False, server_default="0")
|
||||||
|
|
||||||
|
replies_count: Mapped[int] = Column(Integer, nullable=False, default=0)
|
||||||
|
|
||||||
|
og_meta: Mapped[list[dict[str, Any]] | None] = Column(JSON, nullable=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def relates_to_anybox_object(self) -> Union["InboxObject", "OutboxObject"] | None:
|
||||||
|
if self.relates_to_inbox_object_id:
|
||||||
|
return self.relates_to_inbox_object
|
||||||
|
elif self.relates_to_outbox_object_id:
|
||||||
|
return self.relates_to_outbox_object
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_from_db(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_from_inbox(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class OutboxObject(Base, BaseObject):
|
||||||
|
__tablename__ = "outbox"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
updated_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
is_hidden_from_homepage = Column(Boolean, nullable=False, default=False)
|
||||||
|
|
||||||
|
public_id = Column(String, nullable=False, index=True)
|
||||||
|
slug = Column(String, nullable=True, index=True)
|
||||||
|
|
||||||
|
ap_type = Column(String, nullable=False, index=True)
|
||||||
|
ap_id: Mapped[str] = Column(String, nullable=False, unique=True, index=True)
|
||||||
|
ap_context = Column(String, nullable=True)
|
||||||
|
ap_object: Mapped[ap.RawObject] = Column(JSON, nullable=False)
|
||||||
|
|
||||||
|
activity_object_ap_id = Column(String, nullable=True, index=True)
|
||||||
|
|
||||||
|
# Source content for activities (like Notes)
|
||||||
|
source = Column(String, nullable=True)
|
||||||
|
revisions: Mapped[list[dict[str, Any]] | None] = Column(JSON, nullable=True)
|
||||||
|
|
||||||
|
ap_published_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
visibility = Column(Enum(ap.VisibilityEnum), nullable=False)
|
||||||
|
conversation = Column(String, nullable=True)
|
||||||
|
|
||||||
|
likes_count = Column(Integer, nullable=False, default=0)
|
||||||
|
announces_count = Column(Integer, nullable=False, default=0)
|
||||||
|
replies_count: Mapped[int] = Column(Integer, nullable=False, default=0)
|
||||||
|
webmentions_count: Mapped[int] = Column(
|
||||||
|
Integer, nullable=False, default=0, server_default="0"
|
||||||
|
)
|
||||||
|
# reactions: Mapped[list[dict[str, Any]] | None] = Column(JSON, nullable=True)
|
||||||
|
|
||||||
|
og_meta: Mapped[list[dict[str, Any]] | None] = Column(JSON, nullable=True)
|
||||||
|
|
||||||
|
# For the featured collection
|
||||||
|
is_pinned = Column(Boolean, nullable=False, default=False)
|
||||||
|
is_transient = Column(Boolean, nullable=False, default=False, server_default="0")
|
||||||
|
|
||||||
|
# Never actually delete from the outbox
|
||||||
|
is_deleted = Column(Boolean, nullable=False, default=False)
|
||||||
|
|
||||||
|
# Used for Create, Like, Announce and Undo activities
|
||||||
|
relates_to_inbox_object_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey("inbox.id"),
|
||||||
|
nullable=True,
|
||||||
|
)
|
||||||
|
relates_to_inbox_object: Mapped[Optional["InboxObject"]] = relationship(
|
||||||
|
"InboxObject",
|
||||||
|
foreign_keys=[relates_to_inbox_object_id],
|
||||||
|
uselist=False,
|
||||||
|
)
|
||||||
|
relates_to_outbox_object_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey("outbox.id"),
|
||||||
|
nullable=True,
|
||||||
|
)
|
||||||
|
relates_to_outbox_object: Mapped[Optional["OutboxObject"]] = relationship(
|
||||||
|
"OutboxObject",
|
||||||
|
foreign_keys=[relates_to_outbox_object_id],
|
||||||
|
remote_side=id,
|
||||||
|
uselist=False,
|
||||||
|
)
|
||||||
|
# For Follow activies
|
||||||
|
relates_to_actor_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey("actor.id"),
|
||||||
|
nullable=True,
|
||||||
|
)
|
||||||
|
relates_to_actor: Mapped[Optional["Actor"]] = relationship(
|
||||||
|
"Actor",
|
||||||
|
foreign_keys=[relates_to_actor_id],
|
||||||
|
uselist=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
undone_by_outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def actor(self) -> BaseActor:
|
||||||
|
return LOCAL_ACTOR
|
||||||
|
|
||||||
|
outbox_object_attachments: Mapped[list["OutboxObjectAttachment"]] = relationship(
|
||||||
|
"OutboxObjectAttachment", uselist=True, backref="outbox_object"
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def attachments(self) -> list[Attachment]:
|
||||||
|
out = []
|
||||||
|
for attachment in self.outbox_object_attachments:
|
||||||
|
url = (
|
||||||
|
BASE_URL
|
||||||
|
+ f"/attachments/{attachment.upload.content_hash}/{attachment.filename}"
|
||||||
|
)
|
||||||
|
out.append(
|
||||||
|
Attachment.parse_obj(
|
||||||
|
{
|
||||||
|
"type": "Document",
|
||||||
|
"mediaType": attachment.upload.content_type,
|
||||||
|
"name": attachment.alt or attachment.filename,
|
||||||
|
"url": url,
|
||||||
|
"width": attachment.upload.width,
|
||||||
|
"height": attachment.upload.height,
|
||||||
|
"proxiedUrl": url,
|
||||||
|
"resizedUrl": BASE_URL
|
||||||
|
+ (
|
||||||
|
"/attachments/thumbnails/"
|
||||||
|
f"{attachment.upload.content_hash}"
|
||||||
|
f"/{attachment.filename}"
|
||||||
|
)
|
||||||
|
if attachment.upload.has_thumbnail
|
||||||
|
else None,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
|
||||||
|
@property
|
||||||
|
def relates_to_anybox_object(self) -> Union["InboxObject", "OutboxObject"] | None:
|
||||||
|
if self.relates_to_inbox_object_id:
|
||||||
|
return self.relates_to_inbox_object
|
||||||
|
elif self.relates_to_outbox_object_id:
|
||||||
|
return self.relates_to_outbox_object
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_from_db(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_from_outbox(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self) -> str | None:
|
||||||
|
# XXX: rewrite old URL here for compat
|
||||||
|
if self.ap_type == "Article" and self.slug and self.public_id:
|
||||||
|
return f"{BASE_URL}/articles/{self.public_id[:7]}/{self.slug}"
|
||||||
|
return super().url
|
||||||
|
|
||||||
|
|
||||||
|
class Follower(Base):
|
||||||
|
__tablename__ = "follower"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
updated_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False, unique=True)
|
||||||
|
actor: Mapped[Actor] = relationship(Actor, uselist=False)
|
||||||
|
|
||||||
|
inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=False)
|
||||||
|
inbox_object = relationship(InboxObject, uselist=False)
|
||||||
|
|
||||||
|
ap_actor_id = Column(String, nullable=False, unique=True)
|
||||||
|
|
||||||
|
|
||||||
|
class Following(Base):
|
||||||
|
__tablename__ = "following"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
updated_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False, unique=True)
|
||||||
|
actor = relationship(Actor, uselist=False)
|
||||||
|
|
||||||
|
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
|
||||||
|
outbox_object = relationship(OutboxObject, uselist=False)
|
||||||
|
|
||||||
|
ap_actor_id = Column(String, nullable=False, unique=True)
|
||||||
|
|
||||||
|
|
||||||
|
class IncomingActivity(Base):
|
||||||
|
__tablename__ = "incoming_activity"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
# An incoming activity can be a webmention
|
||||||
|
webmention_source = Column(String, nullable=True)
|
||||||
|
# or an AP object
|
||||||
|
sent_by_ap_actor_id = Column(String, nullable=True)
|
||||||
|
ap_id = Column(String, nullable=True, index=True)
|
||||||
|
ap_object: Mapped[ap.RawObject] = Column(JSON, nullable=True)
|
||||||
|
|
||||||
|
tries: Mapped[int] = Column(Integer, nullable=False, default=0)
|
||||||
|
next_try = Column(DateTime(timezone=True), nullable=True, default=now)
|
||||||
|
|
||||||
|
last_try = Column(DateTime(timezone=True), nullable=True)
|
||||||
|
|
||||||
|
is_processed = Column(Boolean, nullable=False, default=False)
|
||||||
|
is_errored = Column(Boolean, nullable=False, default=False)
|
||||||
|
error = Column(String, nullable=True)
|
||||||
|
|
||||||
|
|
||||||
|
class OutgoingActivity(Base):
|
||||||
|
__tablename__ = "outgoing_activity"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
recipient = Column(String, nullable=False)
|
||||||
|
|
||||||
|
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=True)
|
||||||
|
outbox_object = relationship(OutboxObject, uselist=False)
|
||||||
|
|
||||||
|
# Can also reference an inbox object if it needds to be forwarded
|
||||||
|
inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=True)
|
||||||
|
inbox_object = relationship(InboxObject, uselist=False)
|
||||||
|
|
||||||
|
# The source will be the outbox object URL
|
||||||
|
webmention_target = Column(String, nullable=True)
|
||||||
|
|
||||||
|
tries = Column(Integer, nullable=False, default=0)
|
||||||
|
next_try = Column(DateTime(timezone=True), nullable=True, default=now)
|
||||||
|
|
||||||
|
last_try = Column(DateTime(timezone=True), nullable=True)
|
||||||
|
last_status_code = Column(Integer, nullable=True)
|
||||||
|
last_response = Column(String, nullable=True)
|
||||||
|
|
||||||
|
is_sent = Column(Boolean, nullable=False, default=False)
|
||||||
|
is_errored = Column(Boolean, nullable=False, default=False)
|
||||||
|
error = Column(String, nullable=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def anybox_object(self) -> OutboxObject | InboxObject:
|
||||||
|
if self.outbox_object_id:
|
||||||
|
return self.outbox_object # type: ignore
|
||||||
|
elif self.inbox_object_id:
|
||||||
|
return self.inbox_object # type: ignore
|
||||||
|
else:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
|
||||||
|
|
||||||
|
class TaggedOutboxObject(Base):
|
||||||
|
__tablename__ = "tagged_outbox_object"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("outbox_object_id", "tag", name="uix_tagged_object"),
|
||||||
|
)
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
|
||||||
|
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
|
||||||
|
outbox_object = relationship(OutboxObject, uselist=False)
|
||||||
|
|
||||||
|
tag = Column(String, nullable=False, index=True)
|
||||||
|
|
||||||
|
|
||||||
|
class Upload(Base):
|
||||||
|
__tablename__ = "upload"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
content_type: Mapped[str] = Column(String, nullable=False)
|
||||||
|
content_hash = Column(String, nullable=False, unique=True)
|
||||||
|
|
||||||
|
has_thumbnail = Column(Boolean, nullable=False)
|
||||||
|
|
||||||
|
# Only set for images
|
||||||
|
blurhash = Column(String, nullable=True)
|
||||||
|
width = Column(Integer, nullable=True)
|
||||||
|
height = Column(Integer, nullable=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_image(self) -> bool:
|
||||||
|
return self.content_type.startswith("image")
|
||||||
|
|
||||||
|
|
||||||
|
class OutboxObjectAttachment(Base):
|
||||||
|
__tablename__ = "outbox_object_attachment"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
filename = Column(String, nullable=False)
|
||||||
|
alt = Column(String, nullable=True)
|
||||||
|
|
||||||
|
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
|
||||||
|
|
||||||
|
upload_id = Column(Integer, ForeignKey("upload.id"), nullable=False)
|
||||||
|
upload: Mapped["Upload"] = relationship(Upload, uselist=False)
|
||||||
|
|
||||||
|
|
||||||
|
class IndieAuthAuthorizationRequest(Base):
|
||||||
|
__tablename__ = "indieauth_authorization_request"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
code = Column(String, nullable=False, unique=True, index=True)
|
||||||
|
scope = Column(String, nullable=False)
|
||||||
|
redirect_uri = Column(String, nullable=False)
|
||||||
|
client_id = Column(String, nullable=False)
|
||||||
|
code_challenge = Column(String, nullable=True)
|
||||||
|
code_challenge_method = Column(String, nullable=True)
|
||||||
|
|
||||||
|
is_used = Column(Boolean, nullable=False, default=False)
|
||||||
|
|
||||||
|
|
||||||
|
class IndieAuthAccessToken(Base):
|
||||||
|
__tablename__ = "indieauth_access_token"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at: Mapped[datetime] = Column(
|
||||||
|
DateTime(timezone=True), nullable=False, default=now
|
||||||
|
)
|
||||||
|
|
||||||
|
# Will be null for personal access tokens
|
||||||
|
indieauth_authorization_request_id = Column(
|
||||||
|
Integer, ForeignKey("indieauth_authorization_request.id"), nullable=True
|
||||||
|
)
|
||||||
|
indieauth_authorization_request = relationship(
|
||||||
|
IndieAuthAuthorizationRequest,
|
||||||
|
uselist=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
access_token: Mapped[str] = Column(String, nullable=False, unique=True, index=True)
|
||||||
|
refresh_token = Column(String, nullable=True, unique=True, index=True)
|
||||||
|
expires_in: Mapped[int] = Column(Integer, nullable=False)
|
||||||
|
scope = Column(String, nullable=False)
|
||||||
|
is_revoked = Column(Boolean, nullable=False, default=False)
|
||||||
|
was_refreshed = Column(Boolean, nullable=False, default=False, server_default="0")
|
||||||
|
|
||||||
|
|
||||||
|
class OAuthClient(Base):
|
||||||
|
__tablename__ = "oauth_client"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
# Request
|
||||||
|
client_name = Column(String, nullable=False)
|
||||||
|
redirect_uris: Mapped[list[str]] = Column(JSON, nullable=True)
|
||||||
|
|
||||||
|
# Optional from request
|
||||||
|
client_uri = Column(String, nullable=True)
|
||||||
|
logo_uri = Column(String, nullable=True)
|
||||||
|
scope = Column(String, nullable=True)
|
||||||
|
|
||||||
|
# Response
|
||||||
|
client_id = Column(String, nullable=False, unique=True, index=True)
|
||||||
|
client_secret = Column(String, nullable=False, unique=True)
|
||||||
|
|
||||||
|
|
||||||
|
@enum.unique
|
||||||
|
class WebmentionType(str, enum.Enum):
|
||||||
|
UNKNOWN = "unknown"
|
||||||
|
LIKE = "like"
|
||||||
|
REPLY = "reply"
|
||||||
|
REPOST = "repost"
|
||||||
|
|
||||||
|
|
||||||
|
class Webmention(Base):
|
||||||
|
__tablename__ = "webmention"
|
||||||
|
__table_args__ = (UniqueConstraint("source", "target", name="uix_source_target"),)
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
is_deleted = Column(Boolean, nullable=False, default=False)
|
||||||
|
|
||||||
|
source: Mapped[str] = Column(String, nullable=False, index=True, unique=True)
|
||||||
|
source_microformats: Mapped[dict[str, Any] | None] = Column(JSON, nullable=True)
|
||||||
|
|
||||||
|
target = Column(String, nullable=False, index=True)
|
||||||
|
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
|
||||||
|
outbox_object = relationship(OutboxObject, uselist=False)
|
||||||
|
|
||||||
|
webmention_type = Column(Enum(WebmentionType), nullable=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def as_facepile_item(self) -> webmentions.Webmention | None:
|
||||||
|
if not self.source_microformats:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return webmentions.Webmention.from_microformats(
|
||||||
|
self.source_microformats["items"], self.source
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# TODO: return a facepile with the unknown image
|
||||||
|
logger.warning(
|
||||||
|
f"Failed to generate facefile item for Webmention id={self.id}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class PollAnswer(Base):
|
||||||
|
__tablename__ = "poll_answer"
|
||||||
|
__table_args__ = (
|
||||||
|
# Enforce a single answer for poll/actor/answer
|
||||||
|
UniqueConstraint(
|
||||||
|
"outbox_object_id",
|
||||||
|
"name",
|
||||||
|
"actor_id",
|
||||||
|
name="uix_outbox_object_id_name_actor_id",
|
||||||
|
),
|
||||||
|
# Enforce an actor can only vote once on a "oneOf" Question
|
||||||
|
Index(
|
||||||
|
"uix_one_of_outbox_object_id_actor_id",
|
||||||
|
"outbox_object_id",
|
||||||
|
"actor_id",
|
||||||
|
unique=True,
|
||||||
|
sqlite_where=text('poll_type = "oneOf"'),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
|
||||||
|
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
|
||||||
|
outbox_object = relationship(OutboxObject, uselist=False)
|
||||||
|
|
||||||
|
# oneOf|anyOf
|
||||||
|
poll_type = Column(String, nullable=False)
|
||||||
|
|
||||||
|
inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=False)
|
||||||
|
inbox_object = relationship(InboxObject, uselist=False)
|
||||||
|
|
||||||
|
actor_id = Column(Integer, ForeignKey("actor.id"), nullable=False)
|
||||||
|
actor = relationship(Actor, uselist=False)
|
||||||
|
|
||||||
|
name = Column(String, nullable=False)
|
||||||
|
|
||||||
|
|
||||||
|
@enum.unique
|
||||||
|
class NotificationType(str, enum.Enum):
|
||||||
|
NEW_FOLLOWER = "new_follower"
|
||||||
|
PENDING_INCOMING_FOLLOWER = "pending_incoming_follower"
|
||||||
|
REJECTED_FOLLOWER = "rejected_follower"
|
||||||
|
UNFOLLOW = "unfollow"
|
||||||
|
|
||||||
|
FOLLOW_REQUEST_ACCEPTED = "follow_request_accepted"
|
||||||
|
FOLLOW_REQUEST_REJECTED = "follow_request_rejected"
|
||||||
|
|
||||||
|
MOVE = "move"
|
||||||
|
|
||||||
|
LIKE = "like"
|
||||||
|
UNDO_LIKE = "undo_like"
|
||||||
|
|
||||||
|
ANNOUNCE = "announce"
|
||||||
|
UNDO_ANNOUNCE = "undo_announce"
|
||||||
|
|
||||||
|
MENTION = "mention"
|
||||||
|
|
||||||
|
NEW_WEBMENTION = "new_webmention"
|
||||||
|
UPDATED_WEBMENTION = "updated_webmention"
|
||||||
|
DELETED_WEBMENTION = "deleted_webmention"
|
||||||
|
|
||||||
|
# incoming
|
||||||
|
BLOCKED = "blocked"
|
||||||
|
UNBLOCKED = "unblocked"
|
||||||
|
|
||||||
|
# outgoing
|
||||||
|
BLOCK = "block"
|
||||||
|
UNBLOCK = "unblock"
|
||||||
|
|
||||||
|
|
||||||
|
class Notification(Base):
|
||||||
|
__tablename__ = "notifications"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, index=True)
|
||||||
|
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
|
||||||
|
notification_type = Column(Enum(NotificationType), nullable=True)
|
||||||
|
is_new = Column(Boolean, nullable=False, default=True)
|
||||||
|
|
||||||
|
actor_id = Column(Integer, ForeignKey("actor.id"), nullable=True)
|
||||||
|
actor = relationship(Actor, uselist=False)
|
||||||
|
|
||||||
|
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=True)
|
||||||
|
outbox_object = relationship(OutboxObject, uselist=False)
|
||||||
|
|
||||||
|
inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=True)
|
||||||
|
inbox_object = relationship(InboxObject, uselist=False)
|
||||||
|
|
||||||
|
webmention_id = Column(
|
||||||
|
Integer, ForeignKey("webmention.id", name="fk_webmention_id"), nullable=True
|
||||||
|
)
|
||||||
|
webmention = relationship(Webmention, uselist=False)
|
||||||
|
|
||||||
|
is_accepted = Column(Boolean, nullable=True)
|
||||||
|
is_rejected = Column(Boolean, nullable=True)
|
||||||
|
|
||||||
|
|
||||||
|
outbox_fts = Table(
|
||||||
|
"outbox_fts",
|
||||||
|
# TODO(tsileo): use Base.metadata
|
||||||
|
metadata_obj,
|
||||||
|
Column("rowid", Integer),
|
||||||
|
Column("outbox_fts", String),
|
||||||
|
Column("summary", String, nullable=True),
|
||||||
|
Column("name", String, nullable=True),
|
||||||
|
Column("source", String),
|
||||||
|
)
|
||||||
|
|
||||||
|
# db.execute(select(outbox_fts.c.rowid).where(outbox_fts.c.outbox_fts.op("MATCH")("toto AND omg"))).all() # noqa
|
||||||
|
# db.execute(select(models.OutboxObject).join(outbox_fts, outbox_fts.c.rowid == models.OutboxObject.id).where(outbox_fts.c.outbox_fts.op("MATCH")("toto2"))).scalars() # noqa
|
||||||
|
# db.execute(insert(outbox_fts).values({"outbox_fts": "delete", "rowid": 1, "source": dat[0].source})) # noqa
|
295
app/outgoing_activities.py
Normal file
295
app/outgoing_activities.py
Normal file
|
@ -0,0 +1,295 @@
|
||||||
|
import asyncio
|
||||||
|
import email
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
from datetime import datetime
|
||||||
|
from datetime import timedelta
|
||||||
|
from typing import MutableMapping
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from cachetools import TTLCache
|
||||||
|
from loguru import logger
|
||||||
|
from sqlalchemy import func
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.orm import joinedload
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app import config
|
||||||
|
from app import ldsig
|
||||||
|
from app import models
|
||||||
|
from app.actor import LOCAL_ACTOR
|
||||||
|
from app.actor import _actor_hash
|
||||||
|
from app.config import KEY_PATH
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.key import Key
|
||||||
|
from app.utils.datetime import now
|
||||||
|
from app.utils.url import check_url
|
||||||
|
from app.utils.workers import Worker
|
||||||
|
|
||||||
|
_MAX_RETRIES = 16
|
||||||
|
|
||||||
|
_LD_SIG_CACHE: MutableMapping[str, ap.RawObject] = TTLCache(maxsize=5, ttl=60 * 5)
|
||||||
|
|
||||||
|
|
||||||
|
k = Key(config.ID, f"{config.ID}#main-key")
|
||||||
|
k.load(KEY_PATH.read_text())
|
||||||
|
|
||||||
|
|
||||||
|
def _is_local_actor_updated() -> bool:
|
||||||
|
"""Returns True if the local actor was updated, i.e. updated via the config file"""
|
||||||
|
actor_hash = _actor_hash(LOCAL_ACTOR)
|
||||||
|
actor_hash_cache = config.ROOT_DIR / "data" / "local_actor_hash.dat"
|
||||||
|
|
||||||
|
if not actor_hash_cache.exists():
|
||||||
|
logger.info("Initializing local actor hash cache")
|
||||||
|
actor_hash_cache.write_bytes(actor_hash)
|
||||||
|
return False
|
||||||
|
|
||||||
|
previous_actor_hash = actor_hash_cache.read_bytes()
|
||||||
|
if previous_actor_hash == actor_hash:
|
||||||
|
logger.info("Local actor hasn't been updated")
|
||||||
|
return False
|
||||||
|
|
||||||
|
actor_hash_cache.write_bytes(actor_hash)
|
||||||
|
logger.info("Local actor has been updated")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def _send_actor_update_if_needed(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
) -> None:
|
||||||
|
"""The process for sending an update for the local actor is done here as
|
||||||
|
in production, we may have multiple uvicorn worker and this worker will
|
||||||
|
always run in a single process."""
|
||||||
|
if not _is_local_actor_updated():
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("Will send an Update for the local actor")
|
||||||
|
|
||||||
|
from app.boxes import allocate_outbox_id
|
||||||
|
from app.boxes import compute_all_known_recipients
|
||||||
|
from app.boxes import outbox_object_id
|
||||||
|
from app.boxes import save_outbox_object
|
||||||
|
|
||||||
|
update_activity_id = allocate_outbox_id()
|
||||||
|
update_activity = {
|
||||||
|
"@context": ap.AS_EXTENDED_CTX,
|
||||||
|
"id": outbox_object_id(update_activity_id),
|
||||||
|
"type": "Update",
|
||||||
|
"to": [ap.AS_PUBLIC],
|
||||||
|
"actor": config.ID,
|
||||||
|
"object": ap.remove_context(LOCAL_ACTOR.ap_actor),
|
||||||
|
}
|
||||||
|
outbox_object = await save_outbox_object(
|
||||||
|
db_session, update_activity_id, update_activity
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send the update to the followers collection and all the actor we have ever
|
||||||
|
# contacted
|
||||||
|
recipients = await compute_all_known_recipients(db_session)
|
||||||
|
for rcp in recipients:
|
||||||
|
await new_outgoing_activity(
|
||||||
|
db_session,
|
||||||
|
recipient=rcp,
|
||||||
|
outbox_object_id=outbox_object.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
async def new_outgoing_activity(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
recipient: str,
|
||||||
|
outbox_object_id: int | None = None,
|
||||||
|
inbox_object_id: int | None = None,
|
||||||
|
webmention_target: str | None = None,
|
||||||
|
) -> models.OutgoingActivity:
|
||||||
|
if outbox_object_id is None and inbox_object_id is None:
|
||||||
|
raise ValueError("Must reference at least one inbox/outbox activity")
|
||||||
|
if webmention_target and outbox_object_id is None:
|
||||||
|
raise ValueError("Webmentions must reference an outbox activity")
|
||||||
|
if outbox_object_id and inbox_object_id:
|
||||||
|
raise ValueError("Cannot reference both inbox/outbox activities")
|
||||||
|
|
||||||
|
outgoing_activity = models.OutgoingActivity(
|
||||||
|
recipient=recipient,
|
||||||
|
outbox_object_id=outbox_object_id,
|
||||||
|
inbox_object_id=inbox_object_id,
|
||||||
|
webmention_target=webmention_target,
|
||||||
|
)
|
||||||
|
|
||||||
|
db_session.add(outgoing_activity)
|
||||||
|
await db_session.flush()
|
||||||
|
await db_session.refresh(outgoing_activity)
|
||||||
|
return outgoing_activity
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_retry_after(retry_after: str) -> datetime | None:
|
||||||
|
try:
|
||||||
|
# Retry-After: 120
|
||||||
|
seconds = int(retry_after)
|
||||||
|
except ValueError:
|
||||||
|
# Retry-After: Wed, 21 Oct 2015 07:28:00 GMT
|
||||||
|
dt_tuple = email.utils.parsedate_tz(retry_after)
|
||||||
|
if dt_tuple is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
seconds = int(email.utils.mktime_tz(dt_tuple) - time.time())
|
||||||
|
|
||||||
|
return now() + timedelta(seconds=seconds)
|
||||||
|
|
||||||
|
|
||||||
|
def _exp_backoff(tries: int) -> datetime:
|
||||||
|
seconds = 2 * (2 ** (tries - 1))
|
||||||
|
return now() + timedelta(seconds=seconds)
|
||||||
|
|
||||||
|
|
||||||
|
def _set_next_try(
|
||||||
|
outgoing_activity: models.OutgoingActivity,
|
||||||
|
next_try: datetime | None = None,
|
||||||
|
) -> None:
|
||||||
|
if not outgoing_activity.tries:
|
||||||
|
raise ValueError("Should never happen")
|
||||||
|
|
||||||
|
if outgoing_activity.tries >= _MAX_RETRIES:
|
||||||
|
outgoing_activity.is_errored = True
|
||||||
|
outgoing_activity.next_try = None
|
||||||
|
else:
|
||||||
|
outgoing_activity.next_try = next_try or _exp_backoff(outgoing_activity.tries)
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_next_outgoing_activity(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
) -> models.OutgoingActivity | None:
|
||||||
|
where = [
|
||||||
|
models.OutgoingActivity.next_try <= now(),
|
||||||
|
models.OutgoingActivity.is_errored.is_(False),
|
||||||
|
models.OutgoingActivity.is_sent.is_(False),
|
||||||
|
]
|
||||||
|
q_count = await db_session.scalar(
|
||||||
|
select(func.count(models.OutgoingActivity.id)).where(*where)
|
||||||
|
)
|
||||||
|
if q_count > 0:
|
||||||
|
logger.info(f"{q_count} outgoing activities ready to process")
|
||||||
|
if not q_count:
|
||||||
|
# logger.debug("No activities to process")
|
||||||
|
return None
|
||||||
|
|
||||||
|
next_activity = (
|
||||||
|
await db_session.execute(
|
||||||
|
select(models.OutgoingActivity)
|
||||||
|
.where(*where)
|
||||||
|
.limit(1)
|
||||||
|
.options(
|
||||||
|
joinedload(models.OutgoingActivity.inbox_object),
|
||||||
|
joinedload(models.OutgoingActivity.outbox_object),
|
||||||
|
)
|
||||||
|
.order_by(models.OutgoingActivity.next_try)
|
||||||
|
)
|
||||||
|
).scalar_one()
|
||||||
|
return next_activity
|
||||||
|
|
||||||
|
|
||||||
|
async def process_next_outgoing_activity(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
next_activity: models.OutgoingActivity,
|
||||||
|
) -> None:
|
||||||
|
next_activity.tries = next_activity.tries + 1 # type: ignore
|
||||||
|
next_activity.last_try = now()
|
||||||
|
|
||||||
|
logger.info(f"recipient={next_activity.recipient}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
if next_activity.webmention_target and next_activity.outbox_object:
|
||||||
|
webmention_payload = {
|
||||||
|
"source": next_activity.outbox_object.url,
|
||||||
|
"target": next_activity.webmention_target,
|
||||||
|
}
|
||||||
|
logger.info(f"{webmention_payload=}")
|
||||||
|
check_url(next_activity.recipient)
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
resp = await client.post(
|
||||||
|
next_activity.recipient, # type: ignore
|
||||||
|
data=webmention_payload,
|
||||||
|
headers={
|
||||||
|
"User-Agent": config.USER_AGENT,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
else:
|
||||||
|
payload = ap.wrap_object_if_needed(next_activity.anybox_object.ap_object)
|
||||||
|
|
||||||
|
# Use LD sig if the activity may need to be forwarded by recipients
|
||||||
|
if next_activity.anybox_object.is_from_outbox and payload["type"] in [
|
||||||
|
"Create",
|
||||||
|
"Update",
|
||||||
|
"Delete",
|
||||||
|
]:
|
||||||
|
# But only if the object is public (to help with deniability/privacy)
|
||||||
|
if next_activity.outbox_object.visibility == ap.VisibilityEnum.PUBLIC: # type: ignore # noqa: E501
|
||||||
|
if p := _LD_SIG_CACHE.get(payload["id"]):
|
||||||
|
payload = p
|
||||||
|
else:
|
||||||
|
ldsig.generate_signature(payload, k)
|
||||||
|
_LD_SIG_CACHE[payload["id"]] = payload
|
||||||
|
|
||||||
|
logger.info(f"{payload=}")
|
||||||
|
|
||||||
|
resp = await ap.post(next_activity.recipient, payload) # type: ignore
|
||||||
|
except httpx.HTTPStatusError as http_error:
|
||||||
|
logger.exception("Failed")
|
||||||
|
next_activity.last_status_code = http_error.response.status_code
|
||||||
|
next_activity.last_response = http_error.response.text
|
||||||
|
next_activity.error = traceback.format_exc()
|
||||||
|
|
||||||
|
if http_error.response.status_code in [429, 503]:
|
||||||
|
retry_after: datetime | None = None
|
||||||
|
if retry_after_value := http_error.response.headers.get("Retry-After"):
|
||||||
|
retry_after = _parse_retry_after(retry_after_value)
|
||||||
|
_set_next_try(next_activity, retry_after)
|
||||||
|
elif http_error.response.status_code == 401:
|
||||||
|
_set_next_try(next_activity)
|
||||||
|
elif 400 <= http_error.response.status_code < 500:
|
||||||
|
logger.info(f"status_code={http_error.response.status_code} not retrying")
|
||||||
|
next_activity.is_errored = True
|
||||||
|
next_activity.next_try = None
|
||||||
|
else:
|
||||||
|
_set_next_try(next_activity)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed")
|
||||||
|
next_activity.error = traceback.format_exc()
|
||||||
|
_set_next_try(next_activity)
|
||||||
|
else:
|
||||||
|
logger.info("Success")
|
||||||
|
next_activity.is_sent = True
|
||||||
|
next_activity.last_status_code = resp.status_code
|
||||||
|
next_activity.last_response = resp.text
|
||||||
|
|
||||||
|
await db_session.commit()
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class OutgoingActivityWorker(Worker[models.OutgoingActivity]):
|
||||||
|
async def process_message(
|
||||||
|
self,
|
||||||
|
db_session: AsyncSession,
|
||||||
|
next_activity: models.OutgoingActivity,
|
||||||
|
) -> None:
|
||||||
|
await process_next_outgoing_activity(db_session, next_activity)
|
||||||
|
|
||||||
|
async def get_next_message(
|
||||||
|
self,
|
||||||
|
db_session: AsyncSession,
|
||||||
|
) -> models.OutgoingActivity | None:
|
||||||
|
return await fetch_next_outgoing_activity(db_session)
|
||||||
|
|
||||||
|
async def startup(self, db_session: AsyncSession) -> None:
|
||||||
|
await _send_actor_update_if_needed(db_session)
|
||||||
|
|
||||||
|
|
||||||
|
async def loop() -> None:
|
||||||
|
await OutgoingActivityWorker().run_forever()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(loop())
|
119
app/prune.py
Normal file
119
app/prune.py
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
from sqlalchemy import and_
|
||||||
|
from sqlalchemy import delete
|
||||||
|
from sqlalchemy import func
|
||||||
|
from sqlalchemy import not_
|
||||||
|
from sqlalchemy import or_
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app import models
|
||||||
|
from app.config import BASE_URL
|
||||||
|
from app.config import INBOX_RETENTION_DAYS
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.database import async_session
|
||||||
|
from app.utils.datetime import now
|
||||||
|
|
||||||
|
|
||||||
|
async def prune_old_data(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
) -> None:
|
||||||
|
logger.info(f"Pruning old data with {INBOX_RETENTION_DAYS=}")
|
||||||
|
await _prune_old_incoming_activities(db_session)
|
||||||
|
await _prune_old_outgoing_activities(db_session)
|
||||||
|
await _prune_old_inbox_objects(db_session)
|
||||||
|
|
||||||
|
# TODO: delete actor with no remaining inbox objects
|
||||||
|
|
||||||
|
await db_session.commit()
|
||||||
|
# Reclaim disk space
|
||||||
|
await db_session.execute("VACUUM") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
async def _prune_old_incoming_activities(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
) -> None:
|
||||||
|
result = await db_session.execute(
|
||||||
|
delete(models.IncomingActivity)
|
||||||
|
.where(
|
||||||
|
models.IncomingActivity.created_at
|
||||||
|
< now() - timedelta(days=INBOX_RETENTION_DAYS),
|
||||||
|
# Keep failed activity for debug
|
||||||
|
models.IncomingActivity.is_errored.is_(False),
|
||||||
|
)
|
||||||
|
.execution_options(synchronize_session=False)
|
||||||
|
)
|
||||||
|
logger.info(f"Deleted {result.rowcount} old incoming activities") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
async def _prune_old_outgoing_activities(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
) -> None:
|
||||||
|
result = await db_session.execute(
|
||||||
|
delete(models.OutgoingActivity)
|
||||||
|
.where(
|
||||||
|
models.OutgoingActivity.created_at
|
||||||
|
< now() - timedelta(days=INBOX_RETENTION_DAYS),
|
||||||
|
# Keep failed activity for debug
|
||||||
|
models.OutgoingActivity.is_errored.is_(False),
|
||||||
|
)
|
||||||
|
.execution_options(synchronize_session=False)
|
||||||
|
)
|
||||||
|
logger.info(f"Deleted {result.rowcount} old outgoing activities") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
async def _prune_old_inbox_objects(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
) -> None:
|
||||||
|
outbox_conversation = select(func.distinct(models.OutboxObject.conversation)).where(
|
||||||
|
models.OutboxObject.conversation.is_not(None),
|
||||||
|
models.OutboxObject.conversation.not_like(f"{BASE_URL}%"),
|
||||||
|
)
|
||||||
|
result = await db_session.execute(
|
||||||
|
delete(models.InboxObject)
|
||||||
|
.where(
|
||||||
|
# Keep bookmarked objects
|
||||||
|
models.InboxObject.is_bookmarked.is_(False),
|
||||||
|
# Keep liked objects
|
||||||
|
models.InboxObject.liked_via_outbox_object_ap_id.is_(None),
|
||||||
|
# Keep announced objects
|
||||||
|
models.InboxObject.announced_via_outbox_object_ap_id.is_(None),
|
||||||
|
# Keep objects mentioning the local actor
|
||||||
|
models.InboxObject.has_local_mention.is_(False),
|
||||||
|
# Keep objects related to local conversations (i.e. don't break the
|
||||||
|
# public website)
|
||||||
|
or_(
|
||||||
|
models.InboxObject.conversation.not_like(f"{BASE_URL}%"),
|
||||||
|
models.InboxObject.conversation.is_(None),
|
||||||
|
models.InboxObject.conversation.not_in(outbox_conversation),
|
||||||
|
),
|
||||||
|
# Keep activities related to the outbox (like Like/Announce/Follow...)
|
||||||
|
or_(
|
||||||
|
# XXX: no `/` here because the local ID does not have one
|
||||||
|
models.InboxObject.activity_object_ap_id.not_like(f"{BASE_URL}%"),
|
||||||
|
models.InboxObject.activity_object_ap_id.is_(None),
|
||||||
|
),
|
||||||
|
# Keep direct messages
|
||||||
|
not_(
|
||||||
|
and_(
|
||||||
|
models.InboxObject.visibility == ap.VisibilityEnum.DIRECT,
|
||||||
|
models.InboxObject.ap_type.in_(["Note"]),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
# Keep Move object as they are linked to notifications
|
||||||
|
models.InboxObject.ap_type.not_in(["Move"]),
|
||||||
|
# Filter by retention days
|
||||||
|
models.InboxObject.ap_published_at
|
||||||
|
< now() - timedelta(days=INBOX_RETENTION_DAYS),
|
||||||
|
)
|
||||||
|
.execution_options(synchronize_session=False)
|
||||||
|
)
|
||||||
|
logger.info(f"Deleted {result.rowcount} old inbox objects") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
async def run_prune_old_data() -> None:
|
||||||
|
"""CLI entrypoint."""
|
||||||
|
async with async_session() as db_session:
|
||||||
|
await prune_old_data(db_session)
|
28
app/redirect.py
Normal file
28
app/redirect.py
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
from fastapi import Request
|
||||||
|
|
||||||
|
from app import templates
|
||||||
|
from app.database import AsyncSession
|
||||||
|
|
||||||
|
|
||||||
|
async def redirect(
|
||||||
|
request: Request,
|
||||||
|
db_session: AsyncSession,
|
||||||
|
url: str,
|
||||||
|
) -> templates.TemplateResponse:
|
||||||
|
"""
|
||||||
|
Similar to RedirectResponse, but uses a 200 response with HTML.
|
||||||
|
|
||||||
|
Needed for remote redirects on form submission endpoints,
|
||||||
|
since our CSP policy disallows remote form submission.
|
||||||
|
https://github.com/w3c/webappsec-csp/issues/8#issuecomment-810108984
|
||||||
|
"""
|
||||||
|
return await templates.render_template(
|
||||||
|
db_session,
|
||||||
|
request,
|
||||||
|
"redirect.html",
|
||||||
|
{
|
||||||
|
"request": request,
|
||||||
|
"url": url,
|
||||||
|
},
|
||||||
|
headers={"Refresh": "0;url=" + url},
|
||||||
|
)
|
1
app/scss/_theme.scss
Symbolic link
1
app/scss/_theme.scss
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../../data/_theme.scss
|
571
app/scss/main.scss
Normal file
571
app/scss/main.scss
Normal file
|
@ -0,0 +1,571 @@
|
||||||
|
$font-stack: Helvetica, sans-serif;
|
||||||
|
$background: #ddd;
|
||||||
|
$light-background: #e6e6e6;
|
||||||
|
$text-color: #111;
|
||||||
|
$primary-color: #1d781d;
|
||||||
|
$secondary-color: #781D78;
|
||||||
|
$form-background-color: #ccc;
|
||||||
|
$form-text-color: #333;
|
||||||
|
$muted-color: #555; // solarized comment text
|
||||||
|
$primary-button-text-color: #fff;
|
||||||
|
$code-highlight-background: #f0f0f0;
|
||||||
|
|
||||||
|
// Load custom theme
|
||||||
|
@import "theme.scss";
|
||||||
|
|
||||||
|
.primary-color {
|
||||||
|
color: $primary-color;
|
||||||
|
}
|
||||||
|
|
||||||
|
#admin {
|
||||||
|
.admin-menu {
|
||||||
|
margin-bottom: 30px;
|
||||||
|
padding: 0 20px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.empty-state {
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.public-top-menu {
|
||||||
|
margin: 30px 0 0 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.width-95 {
|
||||||
|
width: 95%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bold {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-new {
|
||||||
|
textarea {
|
||||||
|
font-size: 1.2em;
|
||||||
|
width: 95%;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.show-more-wrapper {
|
||||||
|
.p-summary {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
.show-more-btn {
|
||||||
|
margin-left: 5px;
|
||||||
|
}
|
||||||
|
summary {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
summary::-webkit-details-marker {
|
||||||
|
display: none
|
||||||
|
}
|
||||||
|
&:not([open]) .show-more-btn::after {
|
||||||
|
content: 'show more';
|
||||||
|
}
|
||||||
|
&[open] .show-more-btn::after {
|
||||||
|
content: 'show less';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.sensitive-attachment {
|
||||||
|
display: inline-block;
|
||||||
|
.sensitive-attachment-state {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.sensitive-attachment-state:checked ~ .sensitive-attachment-box div {
|
||||||
|
display:none;
|
||||||
|
}
|
||||||
|
.sensitive-attachment-box {
|
||||||
|
position: relative;
|
||||||
|
div {
|
||||||
|
position: absolute;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
z-index: 10;
|
||||||
|
backdrop-filter: blur(2em);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
blockquote {
|
||||||
|
border-left: 3px solid $secondary-color;
|
||||||
|
margin-left: 0;
|
||||||
|
padding-left: 1.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.muted {
|
||||||
|
color: $muted-color;
|
||||||
|
}
|
||||||
|
|
||||||
|
.light-background {
|
||||||
|
background: $light-background;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: $font-stack;
|
||||||
|
font-size: 20px;
|
||||||
|
line-height: 32px;
|
||||||
|
background: $background;
|
||||||
|
color: $text-color;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
display: flex;
|
||||||
|
min-height: 100vh;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
dl {
|
||||||
|
display: flex;
|
||||||
|
dt {
|
||||||
|
width: 200px;
|
||||||
|
flex: 0 0 auto;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
dd {
|
||||||
|
flex: 1 1 auto;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
p {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.shared-header {
|
||||||
|
margin-left: 20px;
|
||||||
|
margin-top: 30px;
|
||||||
|
margin-bottom: -20px;
|
||||||
|
strong {
|
||||||
|
color: $primary-color;
|
||||||
|
}
|
||||||
|
span {
|
||||||
|
color: $muted-color;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
div.highlight {
|
||||||
|
background: $code-highlight-background;
|
||||||
|
padding: 0 10px;
|
||||||
|
overflow: auto;
|
||||||
|
display: block;
|
||||||
|
margin: 20px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box {
|
||||||
|
padding: 0 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
code, pre {
|
||||||
|
color: $secondary-color; // #cb4b16; // #268bd2; // #2aa198;
|
||||||
|
font-family: monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form {
|
||||||
|
input, select, textarea {
|
||||||
|
font-size: 20px;
|
||||||
|
border: 0;
|
||||||
|
padding: 5px;
|
||||||
|
background: $form-background-color;
|
||||||
|
color: $form-text-color;
|
||||||
|
&:focus {
|
||||||
|
outline: 1px solid $secondary-color;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
input[type=submit] {
|
||||||
|
font-size: 20px;
|
||||||
|
outline: none;
|
||||||
|
background: $primary-color;
|
||||||
|
color: $primary-button-text-color;
|
||||||
|
padding: 5px 12px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
header {
|
||||||
|
padding: 0 20px;
|
||||||
|
.title {
|
||||||
|
font-size: 1.3em;
|
||||||
|
text-decoration: none;
|
||||||
|
.handle {
|
||||||
|
font-size: 0.85em;
|
||||||
|
color: $muted-color;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.counter {
|
||||||
|
color: $muted-color;
|
||||||
|
}
|
||||||
|
.summary {
|
||||||
|
a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: $primary-color;
|
||||||
|
&:hover {
|
||||||
|
color: $secondary-color;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#main {
|
||||||
|
display: flex;
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
main {
|
||||||
|
width: 100%;
|
||||||
|
max-width: 1000px;
|
||||||
|
margin: 30px auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.main-flex {
|
||||||
|
display: flex;
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.centered {
|
||||||
|
display: flex;
|
||||||
|
flex: 1;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
div {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
footer {
|
||||||
|
width: 100%;
|
||||||
|
max-width: 1000px;
|
||||||
|
margin: 20px auto;
|
||||||
|
color: $muted-color;
|
||||||
|
p {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.tiny-actor-icon {
|
||||||
|
max-width: 24px;
|
||||||
|
max-height: 24px;
|
||||||
|
position: relative;
|
||||||
|
top: 5px;
|
||||||
|
}
|
||||||
|
.actor-box {
|
||||||
|
display: flex;
|
||||||
|
column-gap: 20px;
|
||||||
|
margin:10px 0;
|
||||||
|
.icon-box {
|
||||||
|
flex: 0 0 50px;
|
||||||
|
}
|
||||||
|
.actor-handle {
|
||||||
|
font-size: 0.85em;
|
||||||
|
line-height: 1em;
|
||||||
|
color: $muted-color;
|
||||||
|
}
|
||||||
|
.actor-icon {
|
||||||
|
max-width: 50px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#articles {
|
||||||
|
list-style-type: none;
|
||||||
|
margin: 30px 0;
|
||||||
|
padding: 0 20px;
|
||||||
|
li {
|
||||||
|
display: block;
|
||||||
|
span {
|
||||||
|
padding-right:10px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#notifications, #followers, #following {
|
||||||
|
ul {
|
||||||
|
list-style-type: none;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
li {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@mixin admin-button() {
|
||||||
|
font-size: 20px;
|
||||||
|
line-height: 32px;
|
||||||
|
font-family: $font-stack;
|
||||||
|
background: $form-background-color;
|
||||||
|
color: $form-text-color;
|
||||||
|
border: 1px solid $background;
|
||||||
|
padding: 8px 10px 5px 10px;
|
||||||
|
cursor: pointer;
|
||||||
|
&:hover {
|
||||||
|
border: 1px solid $form-text-color;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.show-sensitive-btn, .show-more-btn, .label-btn {
|
||||||
|
@include admin-button;
|
||||||
|
padding: 10px 5px;
|
||||||
|
margin: 20px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.show-hide-sensitive-btn {
|
||||||
|
display:inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.no-margin-top {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.float-right {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul.poll-items {
|
||||||
|
list-style-type: none;
|
||||||
|
padding: 0;
|
||||||
|
li {
|
||||||
|
display: block;
|
||||||
|
p {
|
||||||
|
margin: 20px 0 10px 0;
|
||||||
|
.poll-vote {
|
||||||
|
padding-left: 20px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.poll-bar {
|
||||||
|
width:100%;height:20px;
|
||||||
|
line {
|
||||||
|
stroke: $secondary-color;
|
||||||
|
stroke-width: 20px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.attachment-wrapper {
|
||||||
|
.attachment-item {
|
||||||
|
margin-top: 20px;
|
||||||
|
}
|
||||||
|
img.attachment {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
a.attachment {
|
||||||
|
display: inline-block;
|
||||||
|
margin-bottom: 15px;
|
||||||
|
}
|
||||||
|
audio.attachment {
|
||||||
|
width: 480px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nav {
|
||||||
|
form {
|
||||||
|
margin: 15px 0;
|
||||||
|
}
|
||||||
|
input[type=submit], button {
|
||||||
|
@include admin-button;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nav.flexbox {
|
||||||
|
ul {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
align-items: center;
|
||||||
|
list-style-type: none;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
ul li {
|
||||||
|
margin-right: 20px;
|
||||||
|
|
||||||
|
&:last-child {
|
||||||
|
margin-right: 0px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
a:not(.label-btn) {
|
||||||
|
color: $primary-color;
|
||||||
|
text-decoration: none;
|
||||||
|
&:hover, &:active {
|
||||||
|
color: $secondary-color;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
a.active:not(.label-btn) {
|
||||||
|
color: $secondary-color;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// after nav.flexbox to override default behavior
|
||||||
|
a.label-btn {
|
||||||
|
color: $form-text-color;
|
||||||
|
&:hover {
|
||||||
|
text-decoration: none;
|
||||||
|
color: $form-text-color;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.ap-object {
|
||||||
|
margin: 15px 0;
|
||||||
|
padding: 20px;
|
||||||
|
nav {
|
||||||
|
color: $muted-color;
|
||||||
|
}
|
||||||
|
.in-reply-to {
|
||||||
|
display: inline;
|
||||||
|
color: $muted-color;
|
||||||
|
}
|
||||||
|
.e-content, .activity-og-meta {
|
||||||
|
a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.activity-attachment {
|
||||||
|
margin: 30px 0 20px 0;
|
||||||
|
img, audio, video {
|
||||||
|
max-width: calc(min(740px, 100%));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
img.inline-img {
|
||||||
|
display: block;
|
||||||
|
max-width: 740px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.activity-og-meta {
|
||||||
|
display: flex;
|
||||||
|
column-gap: 20px;
|
||||||
|
margin: 20px 0;
|
||||||
|
img {
|
||||||
|
max-width: 200px;
|
||||||
|
max-height: 100px;
|
||||||
|
}
|
||||||
|
small {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.ap-object-expanded {
|
||||||
|
border: 2px dashed $secondary-color;
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-box, .scolor {
|
||||||
|
color: $secondary-color;
|
||||||
|
}
|
||||||
|
|
||||||
|
.actor-action {
|
||||||
|
margin-top:20px;
|
||||||
|
margin-bottom:-20px;
|
||||||
|
padding: 0 20px;
|
||||||
|
span {
|
||||||
|
color: $muted-color;
|
||||||
|
}
|
||||||
|
span.new {
|
||||||
|
color: $secondary-color;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.actor-metadata {
|
||||||
|
color: $muted-color;
|
||||||
|
}
|
||||||
|
.emoji, .custom-emoji {
|
||||||
|
max-width: 25px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.indieauth-box {
|
||||||
|
display: flex;
|
||||||
|
column-gap: 20px;
|
||||||
|
|
||||||
|
.indieauth-logo {
|
||||||
|
flex: initial;
|
||||||
|
width: 100px;
|
||||||
|
img {
|
||||||
|
max-width: 100px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.indieauth-details {
|
||||||
|
flex: 1;
|
||||||
|
div {
|
||||||
|
padding-left: 20px;
|
||||||
|
a {
|
||||||
|
font-size: 1.2em;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.public-interactions {
|
||||||
|
display: flex;
|
||||||
|
column-gap: 20px;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
margin-top: 20px;
|
||||||
|
.interactions-block {
|
||||||
|
flex: 0 1 30%;
|
||||||
|
max-width: 50%;
|
||||||
|
.facepile-wrapper {
|
||||||
|
display: flex;
|
||||||
|
column-gap: 20px;
|
||||||
|
row-gap: 20px;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
margin-top: 20px;
|
||||||
|
a {
|
||||||
|
height: 50px;
|
||||||
|
img {
|
||||||
|
max-width: 50px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.and-x-more {
|
||||||
|
display: inline-block;
|
||||||
|
align-self: center;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-title {
|
||||||
|
a {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.ap-place {
|
||||||
|
h3 {
|
||||||
|
display: inline;
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
h3::after {
|
||||||
|
content: ': ';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.margin-top-20 {
|
||||||
|
margin-top: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.video-wrapper {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.video-gif-overlay {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.video-gif-mode + .video-gif-overlay {
|
||||||
|
display: block;
|
||||||
|
position: absolute;
|
||||||
|
top: 5px;
|
||||||
|
left: 5px;
|
||||||
|
padding: 0 3px;
|
||||||
|
font-size: 0.8em;
|
||||||
|
background: rgba(0,0,0,.5);
|
||||||
|
color: #fff;
|
||||||
|
}
|
218
app/source.py
Normal file
218
app/source.py
Normal file
|
@ -0,0 +1,218 @@
|
||||||
|
import re
|
||||||
|
import typing
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
from mistletoe import Document # type: ignore
|
||||||
|
from mistletoe.block_token import CodeFence # type: ignore
|
||||||
|
from mistletoe.html_renderer import HTMLRenderer # type: ignore
|
||||||
|
from mistletoe.span_token import SpanToken # type: ignore
|
||||||
|
from pygments.formatters import HtmlFormatter # type: ignore
|
||||||
|
from pygments.lexers import get_lexer_by_name as get_lexer # type: ignore
|
||||||
|
from pygments.util import ClassNotFound # type: ignore
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from app import webfinger
|
||||||
|
from app.config import BASE_URL
|
||||||
|
from app.config import CODE_HIGHLIGHTING_THEME
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.utils import emoji
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
from app.actor import Actor
|
||||||
|
|
||||||
|
_FORMATTER = HtmlFormatter(style=CODE_HIGHLIGHTING_THEME)
|
||||||
|
_HASHTAG_REGEX = re.compile(r"(#[\d\w]+)")
|
||||||
|
_MENTION_REGEX = re.compile(r"(@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+)")
|
||||||
|
_URL_REGEX = re.compile(
|
||||||
|
"(https?:\\/\\/(?:www\\.)?[-a-zA-Z0-9@:%._\\+~#=]{1,256}\\.[a-zA-Z0-9()]{1,6}\\b(?:[-a-zA-Z0-9()@:%_\\+.~#?&\\/=]*))" # noqa: E501
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AutoLink(SpanToken):
|
||||||
|
parse_inner = False
|
||||||
|
precedence = 1
|
||||||
|
pattern = _URL_REGEX
|
||||||
|
|
||||||
|
def __init__(self, match_obj: re.Match) -> None:
|
||||||
|
self.target = match_obj.group()
|
||||||
|
|
||||||
|
|
||||||
|
class Mention(SpanToken):
|
||||||
|
parse_inner = False
|
||||||
|
precedence = 10
|
||||||
|
pattern = _MENTION_REGEX
|
||||||
|
|
||||||
|
def __init__(self, match_obj: re.Match) -> None:
|
||||||
|
self.target = match_obj.group()
|
||||||
|
|
||||||
|
|
||||||
|
class Hashtag(SpanToken):
|
||||||
|
parse_inner = False
|
||||||
|
precedence = 10
|
||||||
|
pattern = _HASHTAG_REGEX
|
||||||
|
|
||||||
|
def __init__(self, match_obj: re.Match) -> None:
|
||||||
|
self.target = match_obj.group()
|
||||||
|
|
||||||
|
|
||||||
|
class CustomRenderer(HTMLRenderer):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
mentioned_actors: dict[str, "Actor"] = {},
|
||||||
|
enable_mentionify: bool = True,
|
||||||
|
enable_hashtagify: bool = True,
|
||||||
|
) -> None:
|
||||||
|
extra_tokens = []
|
||||||
|
if enable_mentionify:
|
||||||
|
extra_tokens.append(Mention)
|
||||||
|
if enable_hashtagify:
|
||||||
|
extra_tokens.append(Hashtag)
|
||||||
|
super().__init__(AutoLink, *extra_tokens)
|
||||||
|
|
||||||
|
self.tags: list[dict[str, str]] = []
|
||||||
|
self.mentioned_actors = mentioned_actors
|
||||||
|
|
||||||
|
def render_auto_link(self, token: AutoLink) -> str:
|
||||||
|
template = '<a href="{target}" rel="noopener">{inner}</a>'
|
||||||
|
target = self.escape_url(token.target)
|
||||||
|
return template.format(target=target, inner=target)
|
||||||
|
|
||||||
|
def render_mention(self, token: Mention) -> str:
|
||||||
|
mention = token.target
|
||||||
|
suffix = ""
|
||||||
|
if mention.endswith("."):
|
||||||
|
mention = mention[:-1]
|
||||||
|
suffix = "."
|
||||||
|
actor = self.mentioned_actors.get(mention)
|
||||||
|
if not actor:
|
||||||
|
return mention
|
||||||
|
|
||||||
|
self.tags.append(dict(type="Mention", href=actor.ap_id, name=mention))
|
||||||
|
|
||||||
|
link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>{suffix}' # noqa: E501
|
||||||
|
return link
|
||||||
|
|
||||||
|
def render_hashtag(self, token: Hashtag) -> str:
|
||||||
|
tag = token.target[1:]
|
||||||
|
link = f'<a href="{BASE_URL}/t/{tag.lower()}" class="mention hashtag" rel="tag">#<span>{tag}</span></a>' # noqa: E501
|
||||||
|
self.tags.append(
|
||||||
|
dict(
|
||||||
|
href=f"{BASE_URL}/t/{tag.lower()}",
|
||||||
|
name=token.target.lower(),
|
||||||
|
type="Hashtag",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return link
|
||||||
|
|
||||||
|
def render_block_code(self, token: CodeFence) -> str:
|
||||||
|
lexer_attr = ""
|
||||||
|
try:
|
||||||
|
lexer = get_lexer(token.language)
|
||||||
|
lexer_attr = f' data-microblogpub-lexer="{lexer.aliases[0]}"'
|
||||||
|
except ClassNotFound:
|
||||||
|
pass
|
||||||
|
|
||||||
|
code = token.children[0].content
|
||||||
|
return f"<pre><code{lexer_attr}>\n{code}\n</code></pre>"
|
||||||
|
|
||||||
|
|
||||||
|
async def _prefetch_mentioned_actors(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
content: str,
|
||||||
|
) -> dict[str, "Actor"]:
|
||||||
|
from app import models
|
||||||
|
from app.actor import fetch_actor
|
||||||
|
|
||||||
|
actors = {}
|
||||||
|
|
||||||
|
for mention in re.findall(_MENTION_REGEX, content):
|
||||||
|
if mention in actors:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# XXX: the regex catches stuff like `@toto@example.com.`
|
||||||
|
if mention.endswith("."):
|
||||||
|
mention = mention[:-1]
|
||||||
|
|
||||||
|
try:
|
||||||
|
_, username, domain = mention.split("@")
|
||||||
|
actor = (
|
||||||
|
await db_session.execute(
|
||||||
|
select(models.Actor).where(
|
||||||
|
models.Actor.handle == mention,
|
||||||
|
models.Actor.is_deleted.is_(False),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).scalar_one_or_none()
|
||||||
|
if not actor:
|
||||||
|
actor_url = await webfinger.get_actor_url(mention)
|
||||||
|
if not actor_url:
|
||||||
|
# FIXME(ts): raise an error?
|
||||||
|
continue
|
||||||
|
actor = await fetch_actor(db_session, actor_url)
|
||||||
|
|
||||||
|
actors[mention] = actor
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Failed to prefetch {mention}")
|
||||||
|
|
||||||
|
return actors
|
||||||
|
|
||||||
|
|
||||||
|
def hashtagify(
|
||||||
|
content: str,
|
||||||
|
) -> tuple[str, list[dict[str, str]]]:
|
||||||
|
tags = []
|
||||||
|
with CustomRenderer(
|
||||||
|
mentioned_actors={},
|
||||||
|
enable_mentionify=False,
|
||||||
|
enable_hashtagify=True,
|
||||||
|
) as renderer:
|
||||||
|
rendered_content = renderer.render(Document(content))
|
||||||
|
tags.extend(renderer.tags)
|
||||||
|
|
||||||
|
# Handle custom emoji
|
||||||
|
tags.extend(emoji.tags(content))
|
||||||
|
|
||||||
|
return rendered_content, tags
|
||||||
|
|
||||||
|
|
||||||
|
async def markdownify(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
content: str,
|
||||||
|
enable_mentionify: bool = True,
|
||||||
|
enable_hashtagify: bool = True,
|
||||||
|
) -> tuple[str, list[dict[str, str]], list["Actor"]]:
|
||||||
|
"""
|
||||||
|
>>> content, tags = markdownify("Hello")
|
||||||
|
|
||||||
|
"""
|
||||||
|
tags = []
|
||||||
|
mentioned_actors: dict[str, "Actor"] = {}
|
||||||
|
if enable_mentionify:
|
||||||
|
mentioned_actors = await _prefetch_mentioned_actors(db_session, content)
|
||||||
|
|
||||||
|
with CustomRenderer(
|
||||||
|
mentioned_actors=mentioned_actors,
|
||||||
|
enable_mentionify=enable_mentionify,
|
||||||
|
enable_hashtagify=enable_hashtagify,
|
||||||
|
) as renderer:
|
||||||
|
rendered_content = renderer.render(Document(content))
|
||||||
|
tags.extend(renderer.tags)
|
||||||
|
|
||||||
|
# Handle custom emoji
|
||||||
|
tags.extend(emoji.tags(content))
|
||||||
|
|
||||||
|
return rendered_content, dedup_tags(tags), list(mentioned_actors.values())
|
||||||
|
|
||||||
|
|
||||||
|
def dedup_tags(tags: list[dict[str, str]]) -> list[dict[str, str]]:
|
||||||
|
idx = set()
|
||||||
|
deduped_tags = []
|
||||||
|
for tag in tags:
|
||||||
|
tag_idx = (tag["type"], tag["name"])
|
||||||
|
if tag_idx in idx:
|
||||||
|
continue
|
||||||
|
|
||||||
|
idx.add(tag_idx)
|
||||||
|
deduped_tags.append(tag)
|
||||||
|
|
||||||
|
return deduped_tags
|
11
app/static/common-admin.js
Normal file
11
app/static/common-admin.js
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
document.addEventListener('DOMContentLoaded', (ev) => {
|
||||||
|
// Add confirm to "delete" button next to outbox objects
|
||||||
|
var forms = document.getElementsByClassName("object-delete-form")
|
||||||
|
for (var i = 0; i < forms.length; i++) {
|
||||||
|
forms[i].addEventListener('submit', (ev) => {
|
||||||
|
if (!confirm('Do you really want to delete this object?')) {
|
||||||
|
ev.preventDefault();
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
32
app/static/common.js
Normal file
32
app/static/common.js
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
function hasAudio (video) {
|
||||||
|
return video.mozHasAudio ||
|
||||||
|
Boolean(video.webkitAudioDecodedByteCount) ||
|
||||||
|
Boolean(video.audioTracks && video.audioTracks.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
function setVideoInGIFMode(video) {
|
||||||
|
if (!hasAudio(video)) {
|
||||||
|
if (typeof video.loop == 'boolean' && video.duration <= 10.0) {
|
||||||
|
video.classList.add("video-gif-mode");
|
||||||
|
video.loop = true;
|
||||||
|
video.controls = false;
|
||||||
|
video.addEventListener("mouseover", () => {
|
||||||
|
video.play();
|
||||||
|
})
|
||||||
|
video.addEventListener("mouseleave", () => {
|
||||||
|
video.pause();
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
var items = document.getElementsByTagName("video")
|
||||||
|
for (var i = 0; i < items.length; i++) {
|
||||||
|
if (items[i].duration) {
|
||||||
|
setVideoInGIFMode(items[i]);
|
||||||
|
} else {
|
||||||
|
items[i].addEventListener("loadeddata", function() {
|
||||||
|
setVideoInGIFMode(this);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
1
app/static/css/.gitignore
vendored
Normal file
1
app/static/css/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
*.css
|
BIN
app/static/emoji/goose_honk.png
Normal file
BIN
app/static/emoji/goose_honk.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 7.3 KiB |
56
app/static/new.js
Normal file
56
app/static/new.js
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
// The new post textarea
|
||||||
|
var ta = document.getElementsByTagName("textarea")[0];
|
||||||
|
// Helper for inserting text (emojis) in the textarea
|
||||||
|
function insertAtCursor (textToInsert) {
|
||||||
|
ta.focus();
|
||||||
|
const isSuccess = document.execCommand("insertText", false, textToInsert);
|
||||||
|
|
||||||
|
// Firefox (non-standard method)
|
||||||
|
if (!isSuccess) {
|
||||||
|
// Credits to https://www.everythingfrontend.com/posts/insert-text-into-textarea-at-cursor-position.html
|
||||||
|
// get current text of the input
|
||||||
|
const value = ta.value;
|
||||||
|
// save selection start and end position
|
||||||
|
const start = ta.selectionStart;
|
||||||
|
const end = ta.selectionEnd;
|
||||||
|
// update the value with our text inserted
|
||||||
|
ta.value = value.slice(0, start) + textToInsert + value.slice(end);
|
||||||
|
// update cursor to be at the end of insertion
|
||||||
|
ta.selectionStart = ta.selectionEnd = start + textToInsert.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Emoji click callback func
|
||||||
|
var ji = function (ev) {
|
||||||
|
insertAtCursor(ev.target.attributes.alt.value + " ");
|
||||||
|
ta.focus()
|
||||||
|
//console.log(document.execCommand('insertText', false /*no UI*/, ev.target.attributes.alt.value));
|
||||||
|
}
|
||||||
|
// Enable the click for each emojis
|
||||||
|
var items = document.getElementsByClassName("ji")
|
||||||
|
for (var i = 0; i < items.length; i++) {
|
||||||
|
items[i].addEventListener('click', ji);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new input text dynamically to allow setting an alt text on attachments
|
||||||
|
var files = document.getElementById("files");
|
||||||
|
var alts = document.getElementById("alts");
|
||||||
|
files.addEventListener("change", function(e) {
|
||||||
|
// Reset the div content
|
||||||
|
alts.innerHTML = "";
|
||||||
|
|
||||||
|
// Add an input for each files
|
||||||
|
for (var i = 0; i < e.target.files.length; i++) {
|
||||||
|
var p = document.createElement("p");
|
||||||
|
var altInput = document.createElement("input");
|
||||||
|
altInput.setAttribute("type", "text");
|
||||||
|
altInput.setAttribute("name", "alt_" + e.target.files[i].name);
|
||||||
|
altInput.setAttribute("placeholder", "Alt text for " + e.target.files[i].name);
|
||||||
|
altInput.setAttribute("style", "width:95%;")
|
||||||
|
p.appendChild(altInput);
|
||||||
|
alts.appendChild(p);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Focus at the end of the textarea
|
||||||
|
const end = ta.value.length;
|
||||||
|
ta.setSelectionRange(end, end);
|
||||||
|
ta.focus();
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
439
app/templates.py
Normal file
439
app/templates.py
Normal file
|
@ -0,0 +1,439 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from datetime import timezone
|
||||||
|
from functools import lru_cache
|
||||||
|
from typing import Any
|
||||||
|
from typing import Callable
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import bleach
|
||||||
|
import emoji
|
||||||
|
import html2text
|
||||||
|
import humanize
|
||||||
|
from bs4 import BeautifulSoup # type: ignore
|
||||||
|
from dateutil.parser import parse
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.templating import Jinja2Templates
|
||||||
|
from loguru import logger
|
||||||
|
from sqlalchemy import func
|
||||||
|
from sqlalchemy import select
|
||||||
|
from starlette.templating import _TemplateResponse as TemplateResponse
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app import config
|
||||||
|
from app import models
|
||||||
|
from app.actor import LOCAL_ACTOR
|
||||||
|
from app.ap_object import Attachment
|
||||||
|
from app.ap_object import Object
|
||||||
|
from app.config import BASE_URL
|
||||||
|
from app.config import CUSTOM_FOOTER
|
||||||
|
from app.config import DEBUG
|
||||||
|
from app.config import SESSION_TIMEOUT
|
||||||
|
from app.config import VERSION
|
||||||
|
from app.config import generate_csrf_token
|
||||||
|
from app.config import session_serializer
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.media import proxied_media_url
|
||||||
|
from app.utils import privacy_replace
|
||||||
|
from app.utils.datetime import now
|
||||||
|
from app.utils.highlight import HIGHLIGHT_CSS
|
||||||
|
from app.utils.highlight import highlight
|
||||||
|
|
||||||
|
_templates = Jinja2Templates(
|
||||||
|
directory=["data/templates", "app/templates"], # type: ignore # bad typing
|
||||||
|
trim_blocks=True,
|
||||||
|
lstrip_blocks=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
H2T = html2text.HTML2Text()
|
||||||
|
H2T.ignore_links = True
|
||||||
|
H2T.ignore_images = True
|
||||||
|
|
||||||
|
|
||||||
|
def _filter_domain(text: str) -> str:
|
||||||
|
hostname = urlparse(text).hostname
|
||||||
|
if not hostname:
|
||||||
|
raise ValueError(f"No hostname for {text}")
|
||||||
|
return hostname
|
||||||
|
|
||||||
|
|
||||||
|
def _media_proxy_url(url: str | None) -> str:
|
||||||
|
if not url:
|
||||||
|
return BASE_URL + "/static/nopic.png"
|
||||||
|
return proxied_media_url(url)
|
||||||
|
|
||||||
|
|
||||||
|
def is_current_user_admin(request: Request) -> bool:
|
||||||
|
is_admin = False
|
||||||
|
session_cookie = request.cookies.get("session")
|
||||||
|
if session_cookie:
|
||||||
|
try:
|
||||||
|
loaded_session = session_serializer.loads(
|
||||||
|
session_cookie,
|
||||||
|
max_age=SESSION_TIMEOUT,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to validate session timeout")
|
||||||
|
else:
|
||||||
|
is_admin = loaded_session.get("is_logged_in")
|
||||||
|
|
||||||
|
return is_admin
|
||||||
|
|
||||||
|
|
||||||
|
async def render_template(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
request: Request,
|
||||||
|
template: str,
|
||||||
|
template_args: dict[str, Any] | None = None,
|
||||||
|
status_code: int = 200,
|
||||||
|
headers: dict[str, str] | None = None,
|
||||||
|
) -> TemplateResponse:
|
||||||
|
if template_args is None:
|
||||||
|
template_args = {}
|
||||||
|
|
||||||
|
is_admin = False
|
||||||
|
is_admin = is_current_user_admin(request)
|
||||||
|
|
||||||
|
return _templates.TemplateResponse(
|
||||||
|
template,
|
||||||
|
{
|
||||||
|
"request": request,
|
||||||
|
"debug": DEBUG,
|
||||||
|
"microblogpub_version": VERSION,
|
||||||
|
"is_admin": is_admin,
|
||||||
|
"csrf_token": generate_csrf_token(),
|
||||||
|
"highlight_css": HIGHLIGHT_CSS,
|
||||||
|
"visibility_enum": ap.VisibilityEnum,
|
||||||
|
"notifications_count": await db_session.scalar(
|
||||||
|
select(func.count(models.Notification.id)).where(
|
||||||
|
models.Notification.is_new.is_(True)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if is_admin
|
||||||
|
else 0,
|
||||||
|
"articles_count": await db_session.scalar(
|
||||||
|
select(func.count(models.OutboxObject.id)).where(
|
||||||
|
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
|
||||||
|
models.OutboxObject.is_deleted.is_(False),
|
||||||
|
models.OutboxObject.is_hidden_from_homepage.is_(False),
|
||||||
|
models.OutboxObject.ap_type == "Article",
|
||||||
|
)
|
||||||
|
),
|
||||||
|
"local_actor": LOCAL_ACTOR,
|
||||||
|
"followers_count": await db_session.scalar(
|
||||||
|
select(func.count(models.Follower.id))
|
||||||
|
),
|
||||||
|
"following_count": await db_session.scalar(
|
||||||
|
select(func.count(models.Following.id))
|
||||||
|
),
|
||||||
|
"actor_types": ap.ACTOR_TYPES,
|
||||||
|
"custom_footer": CUSTOM_FOOTER,
|
||||||
|
**template_args,
|
||||||
|
},
|
||||||
|
status_code=status_code,
|
||||||
|
headers=headers,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# HTML/templates helper
|
||||||
|
ALLOWED_TAGS = [
|
||||||
|
"a",
|
||||||
|
"abbr",
|
||||||
|
"acronym",
|
||||||
|
"b",
|
||||||
|
"br",
|
||||||
|
"blockquote",
|
||||||
|
"code",
|
||||||
|
"pre",
|
||||||
|
"em",
|
||||||
|
"i",
|
||||||
|
"li",
|
||||||
|
"ol",
|
||||||
|
"strong",
|
||||||
|
"sup",
|
||||||
|
"sub",
|
||||||
|
"del",
|
||||||
|
"ul",
|
||||||
|
"span",
|
||||||
|
"div",
|
||||||
|
"p",
|
||||||
|
"h1",
|
||||||
|
"h2",
|
||||||
|
"h3",
|
||||||
|
"h4",
|
||||||
|
"h5",
|
||||||
|
"h6",
|
||||||
|
"table",
|
||||||
|
"th",
|
||||||
|
"tr",
|
||||||
|
"td",
|
||||||
|
"thead",
|
||||||
|
"tbody",
|
||||||
|
"tfoot",
|
||||||
|
"colgroup",
|
||||||
|
"caption",
|
||||||
|
"img",
|
||||||
|
"div",
|
||||||
|
"span",
|
||||||
|
]
|
||||||
|
|
||||||
|
ALLOWED_CSS_CLASSES = [
|
||||||
|
# microformats
|
||||||
|
"h-card",
|
||||||
|
"u-url",
|
||||||
|
"mention",
|
||||||
|
# code highlighting
|
||||||
|
"highlight",
|
||||||
|
"codehilite",
|
||||||
|
"hll",
|
||||||
|
"c",
|
||||||
|
"err",
|
||||||
|
"g",
|
||||||
|
"k",
|
||||||
|
"l",
|
||||||
|
"n",
|
||||||
|
"o",
|
||||||
|
"x",
|
||||||
|
"p",
|
||||||
|
"ch",
|
||||||
|
"cm",
|
||||||
|
"cp",
|
||||||
|
"cpf",
|
||||||
|
"c1",
|
||||||
|
"cs",
|
||||||
|
"gd",
|
||||||
|
"ge",
|
||||||
|
"gr",
|
||||||
|
"gh",
|
||||||
|
"gi",
|
||||||
|
"go",
|
||||||
|
"gp",
|
||||||
|
"gs",
|
||||||
|
"gu",
|
||||||
|
"gt",
|
||||||
|
"kc",
|
||||||
|
"kd",
|
||||||
|
"kn",
|
||||||
|
"kp",
|
||||||
|
"kr",
|
||||||
|
"kt",
|
||||||
|
"ld",
|
||||||
|
"m",
|
||||||
|
"s",
|
||||||
|
"na",
|
||||||
|
"nb",
|
||||||
|
"nc",
|
||||||
|
"no",
|
||||||
|
"nd",
|
||||||
|
"ni",
|
||||||
|
"ne",
|
||||||
|
"nf",
|
||||||
|
"nl",
|
||||||
|
"nn",
|
||||||
|
"nx",
|
||||||
|
"py",
|
||||||
|
"nt",
|
||||||
|
"nv",
|
||||||
|
"ow",
|
||||||
|
"w",
|
||||||
|
"mb",
|
||||||
|
"mf",
|
||||||
|
"mh",
|
||||||
|
"mi",
|
||||||
|
"mo",
|
||||||
|
"sa",
|
||||||
|
"sb",
|
||||||
|
"sc",
|
||||||
|
"dl",
|
||||||
|
"sd",
|
||||||
|
"s2",
|
||||||
|
"se",
|
||||||
|
"sh",
|
||||||
|
"si",
|
||||||
|
"sx",
|
||||||
|
"sr",
|
||||||
|
"s1",
|
||||||
|
"ss",
|
||||||
|
"bp",
|
||||||
|
"fm",
|
||||||
|
"vc",
|
||||||
|
"vg",
|
||||||
|
"vi",
|
||||||
|
"vm",
|
||||||
|
"il",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _allow_class(_tag: str, name: str, value: str) -> bool:
|
||||||
|
return name == "class" and value in ALLOWED_CSS_CLASSES
|
||||||
|
|
||||||
|
|
||||||
|
def _allow_img_attrs(_tag: str, name: str, value: str) -> bool:
|
||||||
|
if name in ["src", "alt", "title"]:
|
||||||
|
return True
|
||||||
|
if name == "class" and value == "inline-img":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
ALLOWED_ATTRIBUTES: dict[str, list[str] | Callable[[str, str, str], bool]] = {
|
||||||
|
"a": ["href", "title"],
|
||||||
|
"abbr": ["title"],
|
||||||
|
"acronym": ["title"],
|
||||||
|
"img": _allow_img_attrs,
|
||||||
|
"div": _allow_class,
|
||||||
|
"span": _allow_class,
|
||||||
|
"code": _allow_class,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _allow_all_attributes(tag: Any, name: Any, value: Any) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=256)
|
||||||
|
def _update_inline_imgs(content):
|
||||||
|
soup = BeautifulSoup(content, "html5lib")
|
||||||
|
imgs = soup.find_all("img")
|
||||||
|
if not imgs:
|
||||||
|
return content
|
||||||
|
|
||||||
|
for img in imgs:
|
||||||
|
if not img.attrs.get("src"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
img.attrs["src"] = _media_proxy_url(img.attrs["src"]) + "/740"
|
||||||
|
img["class"] = "inline-img"
|
||||||
|
|
||||||
|
return soup.find("body").decode_contents()
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_html(html: str, note: Object) -> str:
|
||||||
|
if html is None:
|
||||||
|
logger.error(f"{html=} for {note.ap_id}/{note.ap_object}")
|
||||||
|
return ""
|
||||||
|
try:
|
||||||
|
return _emojify(
|
||||||
|
_replace_custom_emojis(
|
||||||
|
bleach.clean(
|
||||||
|
privacy_replace.replace_content(
|
||||||
|
_update_inline_imgs(highlight(html))
|
||||||
|
),
|
||||||
|
tags=ALLOWED_TAGS,
|
||||||
|
attributes=(
|
||||||
|
_allow_all_attributes
|
||||||
|
if note.ap_id.startswith(config.ID)
|
||||||
|
else ALLOWED_ATTRIBUTES
|
||||||
|
),
|
||||||
|
strip=True,
|
||||||
|
),
|
||||||
|
note,
|
||||||
|
),
|
||||||
|
is_local=note.ap_id.startswith(BASE_URL),
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_html_wm(html: str) -> str:
|
||||||
|
return bleach.clean(
|
||||||
|
html,
|
||||||
|
attributes=ALLOWED_ATTRIBUTES,
|
||||||
|
strip=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _timeago(original_dt: datetime) -> str:
|
||||||
|
dt = original_dt
|
||||||
|
if dt.tzinfo:
|
||||||
|
dt = dt.astimezone(timezone.utc).replace(tzinfo=None)
|
||||||
|
return humanize.naturaltime(dt, when=now().replace(tzinfo=None))
|
||||||
|
|
||||||
|
|
||||||
|
def _has_media_type(attachment: Attachment, media_type_prefix: str) -> bool:
|
||||||
|
if attachment.media_type:
|
||||||
|
return attachment.media_type.startswith(media_type_prefix)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _format_date(dt: datetime) -> str:
|
||||||
|
return dt.strftime("%b %d, %Y, %H:%M")
|
||||||
|
|
||||||
|
|
||||||
|
def _pluralize(count: int, singular: str = "", plural: str = "s") -> str:
|
||||||
|
if count > 1:
|
||||||
|
return plural
|
||||||
|
else:
|
||||||
|
return singular
|
||||||
|
|
||||||
|
|
||||||
|
def _replace_custom_emojis(content: str, note: Object) -> str:
|
||||||
|
idx = {}
|
||||||
|
for tag in note.tags:
|
||||||
|
if tag.get("type") == "Emoji":
|
||||||
|
try:
|
||||||
|
idx[tag["name"]] = proxied_media_url(tag["icon"]["url"])
|
||||||
|
except KeyError:
|
||||||
|
logger.warning(f"Failed to parse custom emoji {tag=}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
for emoji_name, emoji_url in idx.items():
|
||||||
|
content = content.replace(
|
||||||
|
emoji_name,
|
||||||
|
f'<img class="custom-emoji" src="{emoji_url}" title="{emoji_name}" alt="{emoji_name}">', # noqa: E501
|
||||||
|
)
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
def _html2text(content: str) -> str:
|
||||||
|
return H2T.handle(content)
|
||||||
|
|
||||||
|
|
||||||
|
def _replace_emoji(u: str, _) -> str:
|
||||||
|
filename = "-".join(hex(ord(c))[2:] for c in u)
|
||||||
|
return config.EMOJI_TPL.format(base_url=BASE_URL, filename=filename, raw=u)
|
||||||
|
|
||||||
|
|
||||||
|
def _emojify(text: str, is_local: bool) -> str:
|
||||||
|
if not is_local:
|
||||||
|
return text
|
||||||
|
|
||||||
|
return emoji.replace_emoji(
|
||||||
|
text,
|
||||||
|
replace=_replace_emoji,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_datetime(dt: str) -> datetime:
|
||||||
|
return parse(dt)
|
||||||
|
|
||||||
|
|
||||||
|
def _poll_item_pct(item: ap.RawObject, voters_count: int) -> int:
|
||||||
|
if voters_count == 0:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
return int(item["replies"]["totalItems"] * 100 / voters_count)
|
||||||
|
|
||||||
|
|
||||||
|
_templates.env.filters["domain"] = _filter_domain
|
||||||
|
_templates.env.filters["media_proxy_url"] = _media_proxy_url
|
||||||
|
_templates.env.filters["clean_html"] = _clean_html
|
||||||
|
_templates.env.filters["clean_html_wm"] = _clean_html_wm
|
||||||
|
_templates.env.filters["timeago"] = _timeago
|
||||||
|
_templates.env.filters["format_date"] = _format_date
|
||||||
|
_templates.env.filters["has_media_type"] = _has_media_type
|
||||||
|
_templates.env.filters["html2text"] = _html2text
|
||||||
|
_templates.env.filters["emojify"] = _emojify
|
||||||
|
_templates.env.filters["pluralize"] = _pluralize
|
||||||
|
_templates.env.filters["parse_datetime"] = _parse_datetime
|
||||||
|
_templates.env.filters["poll_item_pct"] = _poll_item_pct
|
||||||
|
_templates.env.filters["privacy_replace_url"] = privacy_replace.replace_url
|
||||||
|
_templates.env.globals["JS_HASH"] = config.JS_HASH
|
||||||
|
_templates.env.globals["CSS_HASH"] = config.CSS_HASH
|
||||||
|
_templates.env.globals["BASE_URL"] = config.BASE_URL
|
||||||
|
_templates.env.globals["HIDES_FOLLOWERS"] = config.HIDES_FOLLOWERS
|
||||||
|
_templates.env.globals["HIDES_FOLLOWING"] = config.HIDES_FOLLOWING
|
||||||
|
_templates.env.globals["NAVBAR_ITEMS"] = config.NavBarItems
|
||||||
|
_templates.env.globals["ICON_URL"] = config.CONFIG.icon_url
|
20
app/templates/admin_direct_messages.html
Normal file
20
app/templates/admin_direct_messages.html
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }} - Direct messages</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
{% for anybox_object, convo, actors in threads %}
|
||||||
|
<div class="actor-action">
|
||||||
|
With {% for actor in actors %}
|
||||||
|
<a href="{{ url_for("admin_profile") }}?actor_id={{ actor.ap_id }}">
|
||||||
|
{{ actor.handle }}
|
||||||
|
</a>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{{ utils.display_object(anybox_object) }}
|
||||||
|
{% endfor %}
|
||||||
|
{% endblock %}
|
46
app/templates/admin_inbox.html
Normal file
46
app/templates/admin_inbox.html
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }} - Inbox</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
{% if show_filters %}
|
||||||
|
{{ utils.display_box_filters("admin_inbox") }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if not inbox %}
|
||||||
|
<div class="box">
|
||||||
|
<p>Nothing to see yet, <a href="{{ url_for("get_lookup") }}">start following people in the lookup section</a>.</p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% for inbox_object in inbox %}
|
||||||
|
{% if inbox_object.ap_type == "Announce" %}
|
||||||
|
{{ utils.actor_action(inbox_object, "shared", with_icon=True) }}
|
||||||
|
{{ utils.display_object(inbox_object.relates_to_anybox_object) }}
|
||||||
|
{% elif inbox_object.ap_type in ["Article", "Note", "Video", "Page", "Question"] %}
|
||||||
|
{{ utils.display_object(inbox_object) }}
|
||||||
|
{% elif inbox_object.ap_type == "Follow" %}
|
||||||
|
{{ utils.actor_action(inbox_object, "followed you") }}
|
||||||
|
{{ utils.display_actor(inbox_object.actor, actors_metadata) }}
|
||||||
|
{% elif inbox_object.ap_type == "Like" %}
|
||||||
|
{{ utils.actor_action(inbox_object, "liked one of your posts", with_icon=True) }}
|
||||||
|
{{ utils.display_object(inbox_object.relates_to_anybox_object) }}
|
||||||
|
{% else %}
|
||||||
|
<p>
|
||||||
|
Implement {{ inbox_object.ap_type }}
|
||||||
|
{{ inbox_object.ap_object }}
|
||||||
|
</p>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% if next_cursor %}
|
||||||
|
<div class="box">
|
||||||
|
<p><a href="{{ request.url._path }}?cursor={{ next_cursor }}{% if request.query_params.filter_by %}&filter_by={{ request.query_params.filter_by }}{% endif %}">See more</a></p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endblock %}
|
94
app/templates/admin_new.html
Normal file
94
app/templates/admin_new.html
Normal file
|
@ -0,0 +1,94 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }} - New</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
{% if in_reply_to_object %}
|
||||||
|
<div class="actor-action">In reply to:</div>
|
||||||
|
{{ utils.display_object(in_reply_to_object) }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<div class="box">
|
||||||
|
<nav class="flexbox">
|
||||||
|
<ul>
|
||||||
|
{% for ap_type in ["Note", "Article", "Question"] %}
|
||||||
|
<li><a href="?type={{ ap_type }}" {% if request.query_params.get("type", "Note") == ap_type %}class="active"{% endif %}>
|
||||||
|
{{ ap_type }}
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
|
||||||
|
<form class="form admin-new" action="{{ request.url_for("admin_actions_new") }}" enctype="multipart/form-data" method="POST">
|
||||||
|
{{ utils.embed_csrf_token() }}
|
||||||
|
{{ utils.embed_redirect_url() }}
|
||||||
|
<p>
|
||||||
|
<select name="visibility">
|
||||||
|
{% for (k, v) in visibility_choices %}
|
||||||
|
<option value="{{ k }}" {% if visibility == k or in_reply_to_object and in_reply_to_object.visibility.name == k %}selected{% endif %}>{{ v }}</option>
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{% if request.query_params.type == "Article" %}
|
||||||
|
<p>
|
||||||
|
<input type="text" class="width-95" name="name" placeholder="Title">
|
||||||
|
</p>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% for emoji in emojis %}
|
||||||
|
<span class="ji">{{ emoji | emojify(True) | safe }}</span>
|
||||||
|
{% endfor %}
|
||||||
|
{% for emoji in custom_emojis %}
|
||||||
|
<span class="ji"><img src="{{ emoji.icon.url }}" alt="{{ emoji.name }}" title="{{ emoji.name }}" class="custom-emoji"></span>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
<textarea name="content" rows="10" cols="50" autofocus="autofocus" designMode="on" placeholder="Hey!">{{ content }}</textarea>
|
||||||
|
|
||||||
|
{% if request.query_params.type == "Question" %}
|
||||||
|
<p>
|
||||||
|
<select name="poll_type">
|
||||||
|
<option value="oneOf">single choice</option>
|
||||||
|
<option value="anyOf">multiple choices</option>
|
||||||
|
</select>
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
<select name="poll_duration">
|
||||||
|
<option value="5">ends in 5 minutes</option>
|
||||||
|
<option value="30">ends in 30 minutes</option>
|
||||||
|
<option value="60">ends in 1 hour</option>
|
||||||
|
<option value="360">ends in 6 hours</option>
|
||||||
|
<option value="1440">ends in 1 day</option>
|
||||||
|
</select>
|
||||||
|
</p>
|
||||||
|
{% for i in ["1", "2", "3", "4"] %}
|
||||||
|
<p>
|
||||||
|
<input type="text" name="poll_answer_{{ i }}" class="width-95" placeholder="Option {{ i }}, leave empty to disable">
|
||||||
|
</p>
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<p>
|
||||||
|
<input type="text" name="content_warning" placeholder="content warning (will mark the post as sensitive)"{% if content_warning %} value="{{ content_warning }}"{% endif %} class="width-95">
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
<input type="checkbox" name="is_sensitive" id="is_sensitive"> <label for="is_sensitive">Mark attachment(s) as sensitive</label>
|
||||||
|
</p>
|
||||||
|
<input type="hidden" name="in_reply_to" value="{{ request.query_params.in_reply_to }}">
|
||||||
|
<p>
|
||||||
|
<input id="files" name="files" type="file" class="width-95" multiple>
|
||||||
|
</p>
|
||||||
|
<div id="alts"></div>
|
||||||
|
<p>
|
||||||
|
<input type="submit" value="Publish">
|
||||||
|
</p>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
<script src="{{ BASE_URL }}/static/new.js?v={{ JS_HASH }}"></script>
|
||||||
|
{% endblock %}
|
35
app/templates/admin_outbox.html
Normal file
35
app/templates/admin_outbox.html
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }} - Outbox</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
{{ utils.display_box_filters("admin_outbox") }}
|
||||||
|
|
||||||
|
{% for outbox_object in outbox %}
|
||||||
|
|
||||||
|
{% if outbox_object.ap_type == "Announce" %}
|
||||||
|
<div class="actor-action">You shared <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
|
||||||
|
{{ utils.display_object(outbox_object.relates_to_anybox_object) }}
|
||||||
|
{% elif outbox_object.ap_type == "Like" %}
|
||||||
|
<div class="actor-action">You liked <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
|
||||||
|
{{ utils.display_object(outbox_object.relates_to_anybox_object) }}
|
||||||
|
{% elif outbox_object.ap_type == "Follow" %}
|
||||||
|
<div class="actor-action">You followed <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
|
||||||
|
{{ utils.display_actor(outbox_object.relates_to_actor, actors_metadata) }}
|
||||||
|
{% elif outbox_object.ap_type in ["Article", "Note", "Video", "Question"] %}
|
||||||
|
{{ utils.display_object(outbox_object) }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% if next_cursor %}
|
||||||
|
<div class="box">
|
||||||
|
<p><a href="{{ url_for("admin_outbox") }}?cursor={{ next_cursor }}{% if request.query_params.filter_by %}&filter_by={{ request.query_params.filter_by }}{% endif %}">See more</a></p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endblock %}
|
29
app/templates/admin_profile.html
Normal file
29
app/templates/admin_profile.html
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }} - {{ actor.display_name }}</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
{% block content %}
|
||||||
|
{{ utils.display_actor(actor, actors_metadata, with_details=True) }}
|
||||||
|
{% for inbox_object in inbox_objects %}
|
||||||
|
{% if inbox_object.ap_type == "Announce" %}
|
||||||
|
{{ utils.actor_action(inbox_object, "shared", with_icon=True) }}
|
||||||
|
{{ utils.display_object(inbox_object.relates_to_anybox_object) }}
|
||||||
|
{% else %}
|
||||||
|
{{ utils.display_object(inbox_object) }}
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% if next_cursor %}
|
||||||
|
<div class="box">
|
||||||
|
<p>
|
||||||
|
<a href="{{ request.url._path }}?actor_id={{ request.query_params.actor_id }}&cursor={{ next_cursor }}">
|
||||||
|
See more
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endblock %}
|
18
app/templates/admin_stream.html
Normal file
18
app/templates/admin_stream.html
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }} - Stream</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
{% for inbox_object in stream %}
|
||||||
|
{% if inbox_object.ap_type == "Announce" %}
|
||||||
|
{{ utils.display_object(inbox_object.relates_to_anybox_object) }}
|
||||||
|
{% elif inbox_object.ap_type in ["Article", "Note", "Video"] %}
|
||||||
|
{{ utils.display_object(inbox_object) }}
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
20
app/templates/articles.html
Normal file
20
app/templates/articles.html
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }}'s articles</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
{% include "header.html" %}
|
||||||
|
|
||||||
|
<ul class="h-feed" id="articles">
|
||||||
|
<data class="p-name" value="{{ local_actor.display_name}}'s articles"></data>
|
||||||
|
{% for outbox_object in objects %}
|
||||||
|
<li class="h-entry">
|
||||||
|
<time class="muted dt-published" datetime="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at.strftime("%b %d, %Y") }}</time> <a href="{{ outbox_object.url }}" class="u-url u-uid p-name">{{ outbox_object.name }}</a>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
{% endblock %}
|
30
app/templates/custom_page.html
Normal file
30
app/templates/custom_page.html
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ title }}</title>
|
||||||
|
{% if request.url.path == "/" %}
|
||||||
|
<link rel="indieauth-metadata" href="{{ url_for("well_known_authorization_server") }}">
|
||||||
|
<link rel="authorization_endpoint" href="{{ url_for("indieauth_authorization_endpoint") }}">
|
||||||
|
<link rel="token_endpoint" href="{{ url_for("indieauth_token_endpoint") }}">
|
||||||
|
<link rel="micropub" href="{{ url_for("micropub_endpoint") }}">
|
||||||
|
<link rel="alternate" href="{{ local_actor.url }}" title="ActivityPub profile" type="application/activity+json">
|
||||||
|
<meta content="profile" property="og:type" />
|
||||||
|
<meta content="{{ local_actor.url }}" property="og:url" />
|
||||||
|
<meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" />
|
||||||
|
<meta content="Homepage" property="og:title" />
|
||||||
|
<meta content="{{ local_actor.summary | html2text | trim }}" property="og:description" />
|
||||||
|
<meta content="{{ ICON_URL }}" property="og:image" />
|
||||||
|
<meta content="summary" property="twitter:card" />
|
||||||
|
<meta content="{{ local_actor.handle }}" property="profile:username" />
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
{% include "header.html" %}
|
||||||
|
|
||||||
|
<div class="box">
|
||||||
|
{{ page_content | safe }}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
12
app/templates/error.html
Normal file
12
app/templates/error.html
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
{% block main_tag %} class="main-flex"{% endblock %}
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ title }}</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="centered primary-color box">
|
||||||
|
<h1 class="error-title">{{ title | safe }}</h1>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
32
app/templates/followers.html
Normal file
32
app/templates/followers.html
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }}'s followers</title>
|
||||||
|
<meta name="robots" content="noindex, nofollow">
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
{% include "header.html" %}
|
||||||
|
<div id="followers">
|
||||||
|
<ul>
|
||||||
|
{% for follower in followers %}
|
||||||
|
<li>{{ utils.display_actor(follower.actor, actors_metadata) }}</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
{% set x_more = followers_count - followers | length %}
|
||||||
|
{% if x_more > 0 %}
|
||||||
|
<div class="box">
|
||||||
|
<p>And {{ x_more }} more.</p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if is_admin %}
|
||||||
|
<div class="box">
|
||||||
|
<p><a href="{{ url_for("admin_inbox") }}?filter_by=Follow">Manage followers</a></p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
32
app/templates/following.html
Normal file
32
app/templates/following.html
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }}'s follows</title>
|
||||||
|
<meta name="robots" content="noindex, nofollow">
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
{% include "header.html" %}
|
||||||
|
<div id="following">
|
||||||
|
<ul>
|
||||||
|
{% for follow in following %}
|
||||||
|
<li>{{ utils.display_actor(follow.actor, actors_metadata) }}</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
{% set x_more = following_count - following | length %}
|
||||||
|
{% if x_more > 0 %}
|
||||||
|
<div class="box">
|
||||||
|
<p>And {{ x_more }} more.</p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if is_admin %}
|
||||||
|
<div class="box">
|
||||||
|
<p><a href="{{ url_for("admin_outbox") }}?filter_by=Follow">Manage follows</a></p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
62
app/templates/header.html
Normal file
62
app/templates/header.html
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
<header id="header">
|
||||||
|
|
||||||
|
<div class="h-card p-author">
|
||||||
|
<data class="u-photo" value="{{ local_actor.icon_url }}"></data>
|
||||||
|
<a href="{{ local_actor.url }}" class="u-url u-uid no-hover title">
|
||||||
|
<span class="name">{{ local_actor.name }}</span>
|
||||||
|
<span class="p-name handle">{{ local_actor.handle }}</span>
|
||||||
|
</a>
|
||||||
|
|
||||||
|
<div class="p-note summary">
|
||||||
|
{{ local_actor.summary | safe }}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="profile-props">
|
||||||
|
{% for prop in local_actor.attachments %}
|
||||||
|
<dl>
|
||||||
|
{% if prop.type == "PropertyValue" %}
|
||||||
|
<dt class="muted" title="{{ prop.name }}">{{ prop.name }}</dt>
|
||||||
|
<dd>{{ prop.value | clean_html(local_actor) | safe }}</dd>
|
||||||
|
{% endif %}
|
||||||
|
</dl>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{%- macro header_link(url, text) -%}
|
||||||
|
{% set url_for = BASE_URL + request.app.router.url_path_for(url) %}
|
||||||
|
<a href="{{ url_for }}" {% if BASE_URL + request.url.path == url_for %}class="active"{% endif %}>{{ text }}</a>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{%- macro navbar_item_link(navbar_item) -%}
|
||||||
|
{% set url_for = BASE_URL + navbar_item[0] %}
|
||||||
|
<a href="{{ navbar_item[0] }}" {% if BASE_URL + request.url.path == url_for %}class="active"{% endif %}>{{ navbar_item[1] }}</a>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
<div class="public-top-menu">
|
||||||
|
<nav class="flexbox">
|
||||||
|
<ul>
|
||||||
|
{% if NAVBAR_ITEMS.INDEX_NAVBAR_ITEM %}
|
||||||
|
<li>{{ navbar_item_link(NAVBAR_ITEMS.INDEX_NAVBAR_ITEM) }}</li>
|
||||||
|
{% endif %}
|
||||||
|
<li>{{ header_link("index", "Notes") }}</li>
|
||||||
|
{% if articles_count %}
|
||||||
|
<li>{{ header_link("articles", "Articles") }}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% if not HIDES_FOLLOWERS or is_admin %}
|
||||||
|
<li>{{ header_link("followers", "Followers") }} <span class="counter">{{ followers_count }}</span></li>
|
||||||
|
{% endif %}
|
||||||
|
{% if not HIDES_FOLLOWING or is_admin %}
|
||||||
|
<li>{{ header_link("following", "Following") }} <span class="counter">{{ following_count }}</span></li>
|
||||||
|
{% endif %}
|
||||||
|
<li>{{ header_link("get_remote_follow", "Remote follow") }}</li>
|
||||||
|
{% for navbar_item in NAVBAR_ITEMS.EXTRA_NAVBAR_ITEMS %}
|
||||||
|
{{ navbar_item_link(navbar_item) }}
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</nav>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</header>
|
60
app/templates/index.html
Normal file
60
app/templates/index.html
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }}'s microblog</title>
|
||||||
|
<link rel="indieauth-metadata" href="{{ url_for("well_known_authorization_server") }}">
|
||||||
|
<link rel="authorization_endpoint" href="{{ url_for("indieauth_authorization_endpoint") }}">
|
||||||
|
<link rel="token_endpoint" href="{{ url_for("indieauth_token_endpoint") }}">
|
||||||
|
<link rel="micropub" href="{{ url_for("micropub_endpoint") }}">
|
||||||
|
<link rel="alternate" href="{{ local_actor.url }}" title="ActivityPub profile" type="application/activity+json">
|
||||||
|
<meta content="profile" property="og:type" />
|
||||||
|
<meta content="{{ local_actor.url }}" property="og:url" />
|
||||||
|
<meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" />
|
||||||
|
<meta content="Homepage" property="og:title" />
|
||||||
|
<meta content="{{ local_actor.summary | html2text | trim }}" property="og:description" />
|
||||||
|
<meta content="{{ ICON_URL }}" property="og:image" />
|
||||||
|
<meta content="summary" property="twitter:card" />
|
||||||
|
<meta content="{{ local_actor.handle }}" property="profile:username" />
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
{% include "header.html" %}
|
||||||
|
|
||||||
|
{% if objects %}
|
||||||
|
|
||||||
|
<div class="h-feed">
|
||||||
|
<data class="p-name" value="{{ local_actor.display_name}}'s notes"></data>
|
||||||
|
{% for outbox_object in objects %}
|
||||||
|
{% if outbox_object.ap_type in ["Note", "Video", "Question"] %}
|
||||||
|
{{ utils.display_object(outbox_object) }}
|
||||||
|
{% elif outbox_object.ap_type == "Announce" %}
|
||||||
|
<div class="h-entry" id="{{ outbox_object.permalink_id }}">
|
||||||
|
<div class="shared-header"><strong><a class="p-author h-card" href="{{ local_actor.url }}">{{ utils.display_tiny_actor_icon(local_actor) }} {{ local_actor.display_name | clean_html(local_actor) | safe }}</a></strong> shared <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
|
||||||
|
<div class="h-cite u-repost-of">
|
||||||
|
{{ utils.display_object(outbox_object.relates_to_anybox_object, is_h_entry=False) }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% if has_previous_page or has_next_page %}
|
||||||
|
<div class="box">
|
||||||
|
{% if has_previous_page %}
|
||||||
|
<a href="{{ url_for("index") }}?page={{ current_page - 1 }}">Previous</a>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if has_next_page %}
|
||||||
|
<a href="{{ url_for("index") }}?page={{ current_page + 1 }}">Next</a>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% else %}
|
||||||
|
<div class="empty-state">
|
||||||
|
<p>Nothing to see here yet!</p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endblock %}
|
45
app/templates/indieauth_flow.html
Normal file
45
app/templates/indieauth_flow.html
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
{% block content %}
|
||||||
|
<div class="box">
|
||||||
|
<div class"indieauth-box">
|
||||||
|
{% if client.logo %}
|
||||||
|
<div class="indieauth-logo">
|
||||||
|
<img src="{{client.logo | media_proxy_url }}" alt="{{ client.name }} logo">
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="indieauth-details">
|
||||||
|
<div>
|
||||||
|
{% if client.url %}
|
||||||
|
<a class="scolor" href="{{ client.url }}">{{ client.name }}</a>
|
||||||
|
{% else %}
|
||||||
|
<span class="scolor">{{ client.name }}</span>
|
||||||
|
{% endif %}
|
||||||
|
<p>wants you to login{% if me %} as <strong class="lcolor">{{ me }}</strong>{% endif %} with the following redirect URI: <code>{{ redirect_uri }}</code>.</p>
|
||||||
|
|
||||||
|
|
||||||
|
<form method="POST" action="{{ url_for('indieauth_flow') }}" class="form">
|
||||||
|
{{ utils.embed_csrf_token() }}
|
||||||
|
{% if scopes %}
|
||||||
|
<h3>Scopes</h3>
|
||||||
|
<ul>
|
||||||
|
{% for scope in scopes %}
|
||||||
|
<li><input type="checkbox" name="scopes" value="{{scope}}" id="scope-{{scope}}"><label for="scope-{{scope}}">{{ scope }}</label>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
||||||
|
<input type="hidden" name="redirect_uri" value="{{ redirect_uri }}">
|
||||||
|
<input type="hidden" name="state" value="{{ state }}">
|
||||||
|
<input type="hidden" name="client_id" value="{{ client_id }}">
|
||||||
|
<input type="hidden" name="me" value="{{ me }}">
|
||||||
|
<input type="hidden" name="response_type" value="{{ response_type }}">
|
||||||
|
<input type="hidden" name="code_challenge" value="{{ code_challenge }}">
|
||||||
|
<input type="hidden" name="code_challenge_method" value="{{ code_challenge_method }}">
|
||||||
|
<input type="submit" value="login">
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
60
app/templates/layout.html
Normal file
60
app/templates/layout.html
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
<!DOCTYPE HTML>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta http-equiv="x-ua-compatible" content="ie=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||||
|
<link rel="stylesheet" href="{{ BASE_URL }}/static/css/main.css?v={{ CSS_HASH }}">
|
||||||
|
<link rel="alternate" title="{{ local_actor.display_name}}'s microblog" type="application/json" href="{{ url_for("json_feed") }}" />
|
||||||
|
<link rel="alternate" href="{{ url_for("rss_feed") }}" type="application/rss+xml" title="{{ local_actor.display_name}}'s microblog">
|
||||||
|
<link rel="alternate" href="{{ url_for("atom_feed") }}" type="application/atom+xml" title="{{ local_actor.display_name}}'s microblog">
|
||||||
|
<link rel="icon" type="image/x-icon" href="{{ BASE_URL }}/static/favicon.ico">
|
||||||
|
<style>{{ highlight_css }}</style>
|
||||||
|
{% block head %}{% endblock %}
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="main">
|
||||||
|
<main{%- block main_tag %}{%- endblock %}>
|
||||||
|
{% if is_admin %}
|
||||||
|
<div id="admin">
|
||||||
|
{% macro admin_link(url, text) %}
|
||||||
|
{% set url_for = BASE_URL + request.app.router.url_path_for(url) %}
|
||||||
|
<a href="{{ url_for }}" {% if BASE_URL + request.url.path == url_for %}class="active"{% endif %}>{{ text }}</a>
|
||||||
|
{% endmacro %}
|
||||||
|
<div class="admin-menu">
|
||||||
|
<nav class="flexbox">
|
||||||
|
<ul>
|
||||||
|
<li>{{ admin_link("index", "Public") }}</li>
|
||||||
|
<li>{{ admin_link("admin_new", "New") }}</li>
|
||||||
|
<li>{{ admin_link("admin_stream", "Stream") }}</li>
|
||||||
|
<li>{{ admin_link("admin_inbox", "Inbox") }} / {{ admin_link("admin_outbox", "Outbox") }}</li>
|
||||||
|
<li>{{ admin_link("admin_direct_messages", "DMs") }}</li>
|
||||||
|
<li>{{ admin_link("get_notifications", "Notifications") }} {% if notifications_count %}({{ notifications_count }}){% endif %}</li>
|
||||||
|
<li>{{ admin_link("get_lookup", "Lookup") }}</li>
|
||||||
|
<li>{{ admin_link("admin_bookmarks", "Bookmarks") }}</li>
|
||||||
|
<li><a href="{{ url_for("logout")}}">Logout</a></li>
|
||||||
|
</ul>
|
||||||
|
</nav>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% block content %}{% endblock %}
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<footer class="footer">
|
||||||
|
<div class="box">
|
||||||
|
{% if custom_footer %}
|
||||||
|
{{ custom_footer | safe }}
|
||||||
|
{% else %}
|
||||||
|
Powered by <a href="https://docs.microblog.pub">microblog.pub</a> <small class="microblogpub-version"><code>{{ microblogpub_version }}</code></small> and the <a href="https://activitypub.rocks/">ActivityPub</a> protocol. <a href="{{ url_for("login") }}">Admin</a>.
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</footer>
|
||||||
|
{% if is_admin %}
|
||||||
|
<script src="{{ BASE_URL }}/static/common-admin.js?v={{ JS_HASH }}"></script>
|
||||||
|
{% endif %}
|
||||||
|
<script src="{{ BASE_URL }}/static/common.js?v={{ JS_HASH }}"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
21
app/templates/login.html
Normal file
21
app/templates/login.html
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
{% block head %}
|
||||||
|
<meta name="robots" content="noindex, nofollow">
|
||||||
|
{% endblock %}
|
||||||
|
{% block main_tag %} class="main-flex"{% endblock %}
|
||||||
|
{% block content %}
|
||||||
|
<div class="centered">
|
||||||
|
<div>
|
||||||
|
{% if error %}
|
||||||
|
<p class="primary-color">Invalid password.</p>
|
||||||
|
{% endif %}
|
||||||
|
<form class="form" action="{{ BASE_URL }}/admin/login" method="POST">
|
||||||
|
<input type="hidden" name="csrf_token" value="{{ csrf_token }}">
|
||||||
|
<input type="hidden" name="redirect" value="{{ redirect }}">
|
||||||
|
<input type="password" placeholder="password" name="password" autofocus>
|
||||||
|
<input type="submit" value="login">
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
38
app/templates/lookup.html
Normal file
38
app/templates/lookup.html
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }} - Lookup</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<div class="box">
|
||||||
|
<p>Interact with an ActivityPub object via its URL or look for a user using <i>@user@domain.tld</i></p>
|
||||||
|
|
||||||
|
<form class="form" action="{{ url_for("get_lookup") }}" method="GET">
|
||||||
|
<input type="text" name="query" value="{{ query if query else "" }}" autofocus>
|
||||||
|
<input type="submit" value="Lookup">
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% if error %}
|
||||||
|
<div class="box error-box">
|
||||||
|
{% if error.value == "NOT_FOUND" %}
|
||||||
|
<p>The remote object is unavailable.</p>
|
||||||
|
{% elif error.value == "UNAUTHORIZED" %}
|
||||||
|
<p>Missing permissions to fetch the remote object.</p>
|
||||||
|
{% elif error.value == "TIMEOUT" %}
|
||||||
|
<p>Lookup timed out, please try refreshing the page.</p>
|
||||||
|
{% else %}
|
||||||
|
<p>Unexpected error, please check the logs and report an issue if needed.</p>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if ap_object and ap_object.ap_type in actor_types %}
|
||||||
|
{{ utils.display_actor(ap_object, actors_metadata, with_details=True) }}
|
||||||
|
{% elif ap_object %}
|
||||||
|
{{ utils.display_object(ap_object, actors_metadata=actors_metadata) }}
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
126
app/templates/notifications.html
Normal file
126
app/templates/notifications.html
Normal file
|
@ -0,0 +1,126 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }} - Notifications</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% macro notif_actor_action(notif, text, with_icon=False) %}
|
||||||
|
<div class="actor-action">
|
||||||
|
<a href="{{ url_for("admin_profile") }}?actor_id={{ notif.actor.ap_id }}">
|
||||||
|
{% if with_icon %}{{ utils.display_tiny_actor_icon(notif.actor) }}{% endif %} {{ notif.actor.display_name | clean_html(notif.actor) | safe }}</a> {{ text }}
|
||||||
|
<span title="{{ notif.created_at.isoformat() }}">{{ notif.created_at | timeago }}</span>
|
||||||
|
{% if notif.is_new %}
|
||||||
|
<span class="new">new</span>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="box">
|
||||||
|
<h2>Notifications</h2>
|
||||||
|
</div>
|
||||||
|
<div id="notifications">
|
||||||
|
{%- for notif in notifications %}
|
||||||
|
<div>
|
||||||
|
{%- if notif.notification_type.value == "new_follower" %}
|
||||||
|
{{ notif_actor_action(notif, "followed you") }}
|
||||||
|
{{ utils.display_actor(notif.actor, actors_metadata) }}
|
||||||
|
{%- elif notif.notification_type.value == "pending_incoming_follower" %}
|
||||||
|
{{ notif_actor_action(notif, "sent a follow request") }}
|
||||||
|
{{ utils.display_actor(notif.actor, actors_metadata, pending_incoming_follow_notif=notif) }}
|
||||||
|
{% elif notif.notification_type.value == "rejected_follower" %}
|
||||||
|
{% elif notif.notification_type.value == "unfollow" %}
|
||||||
|
{{ notif_actor_action(notif, "unfollowed you") }}
|
||||||
|
{{ utils.display_actor(notif.actor, actors_metadata) }}
|
||||||
|
{%- elif notif.notification_type.value == "follow_request_accepted" %}
|
||||||
|
{{ notif_actor_action(notif, "accepted your follow request") }}
|
||||||
|
{{ utils.display_actor(notif.actor, actors_metadata) }}
|
||||||
|
{%- elif notif.notification_type.value == "follow_request_rejected" %}
|
||||||
|
{{ notif_actor_action(notif, "rejected your follow request") }}
|
||||||
|
{{ utils.display_actor(notif.actor, actors_metadata) }}
|
||||||
|
{% elif notif.notification_type.value == "blocked" %}
|
||||||
|
{{ notif_actor_action(notif, "blocked you") }}
|
||||||
|
{{ utils.display_actor(notif.actor, actors_metadata) }}
|
||||||
|
{% elif notif.notification_type.value == "unblocked" %}
|
||||||
|
{{ notif_actor_action(notif, "unblocked you") }}
|
||||||
|
{{ utils.display_actor(notif.actor, actors_metadata) }}
|
||||||
|
{% elif notif.notification_type.value == "block" %}
|
||||||
|
{{ notif_actor_action(notif, "was blocked") }}
|
||||||
|
{{ utils.display_actor(notif.actor, actors_metadata) }}
|
||||||
|
{% elif notif.notification_type.value == "unblock" %}
|
||||||
|
{{ notif_actor_action(notif, "was unblocked") }}
|
||||||
|
{{ utils.display_actor(notif.actor, actors_metadata) }}
|
||||||
|
{%- elif notif.notification_type.value == "move" and notif.inbox_object %}
|
||||||
|
{# for move notif, the actor is the target and the inbox object the Move activity #}
|
||||||
|
<div class="actor-action">
|
||||||
|
<a href="{{ url_for("admin_profile") }}?actor_id={{ notif.inbox_object.actor.ap_id }}">
|
||||||
|
{{ utils.display_tiny_actor_icon(notif.inbox_object.actor) }} {{ notif.inbox_object.actor.display_name | clean_html(notif.inbox_object.actor) | safe }}</a> has moved to
|
||||||
|
<span title="{{ notif.created_at.isoformat() }}">{{ notif.created_at | timeago }}</span>
|
||||||
|
</div>
|
||||||
|
{{ utils.display_actor(notif.actor) }}
|
||||||
|
{% elif notif.notification_type.value == "like" %}
|
||||||
|
{{ notif_actor_action(notif, "liked a post", with_icon=True) }}
|
||||||
|
{{ utils.display_object(notif.outbox_object) }}
|
||||||
|
{% elif notif.notification_type.value == "undo_like" %}
|
||||||
|
{{ notif_actor_action(notif, "unliked a post", with_icon=True) }}
|
||||||
|
{{ utils.display_object(notif.outbox_object) }}
|
||||||
|
{% elif notif.notification_type.value == "announce" %}
|
||||||
|
{{ notif_actor_action(notif, "shared a post", with_icon=True) }}
|
||||||
|
{{ utils.display_object(notif.outbox_object) }}
|
||||||
|
{% elif notif.notification_type.value == "undo_announce" %}
|
||||||
|
{{ notif_actor_action(notif, "unshared a post", with_icon=True) }}
|
||||||
|
{{ utils.display_object(notif.outbox_object) }}
|
||||||
|
{% elif notif.notification_type.value == "mention" %}
|
||||||
|
{{ notif_actor_action(notif, "mentioned you") }}
|
||||||
|
{{ utils.display_object(notif.inbox_object) }}
|
||||||
|
{% elif notif.notification_type.value == "new_webmention" %}
|
||||||
|
<div class="actor-action" title="{{ notif.created_at.isoformat() }}">
|
||||||
|
new webmention from
|
||||||
|
{% set facepile_item = notif.webmention.as_facepile_item %}
|
||||||
|
{% if facepile_item %}
|
||||||
|
<a href="{{ facepile_item.actor_url }}">{{ facepile_item.actor_name }}</a>
|
||||||
|
{% endif %}
|
||||||
|
<a class="bold" href="{{ notif.webmention.source }}">{{ notif.webmention.source }}</a>
|
||||||
|
</div>
|
||||||
|
{{ utils.display_object(notif.outbox_object) }}
|
||||||
|
{% elif notif.notification_type.value == "updated_webmention" %}
|
||||||
|
<div class="actor-action" title="{{ notif.created_at.isoformat() }}">
|
||||||
|
updated webmention from
|
||||||
|
{% set facepile_item = notif.webmention.as_facepile_item %}
|
||||||
|
{% if facepile_item %}
|
||||||
|
<a href="{{ facepile_item.actor_url }}">{{ facepile_item.actor_name }}</a>
|
||||||
|
{% endif %}
|
||||||
|
<a class="bold" href="{{ notif.webmention.source }}">{{ notif.webmention.source }}</a>
|
||||||
|
</div>
|
||||||
|
{{ utils.display_object(notif.outbox_object) }}
|
||||||
|
{% elif notif.notification_type.value == "deleted_webmention" %}
|
||||||
|
<div class="actor-action" title="{{ notif.created_at.isoformat() }}">
|
||||||
|
deleted webmention from
|
||||||
|
{% set facepile_item = notif.webmention.as_facepile_item %}
|
||||||
|
{% if facepile_item %}
|
||||||
|
<a href="{{ facepile_item.actor_url }}">{{ facepile_item.actor_name }}</a>
|
||||||
|
{% endif %}
|
||||||
|
<a class="bold" href="{{ notif.webmention.source }}">{{ notif.webmention.source }}</a>
|
||||||
|
</div>
|
||||||
|
{{ utils.display_object(notif.outbox_object) }}
|
||||||
|
{% else %}
|
||||||
|
<div class="actor-action">
|
||||||
|
Implement {{ notif.notification_type }}
|
||||||
|
</div>
|
||||||
|
{%- endif %}
|
||||||
|
</div>
|
||||||
|
{%- endfor %}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% if next_cursor %}
|
||||||
|
<div class="box">
|
||||||
|
<p>
|
||||||
|
<a href="{{ request.url._path }}?cursor={{ next_cursor }}">
|
||||||
|
See more{% if more_unread_count %} ({{ more_unread_count }} unread left){% endif %}
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endblock %}
|
56
app/templates/object.html
Normal file
56
app/templates/object.html
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
{% if outbox_object %}
|
||||||
|
{% if outbox_object.content %}
|
||||||
|
{% set excerpt = outbox_object.content | html2text | trim | truncate(50) %}
|
||||||
|
{% else %}
|
||||||
|
{% set excerpt = outbox_object.summary | html2text | trim | truncate(50) %}
|
||||||
|
{% endif %}
|
||||||
|
<title>{% if outbox_object.name %}{{ outbox_object.name }}{% else %}{{ local_actor.display_name }}: "{{ excerpt }}"{% endif %}</title>
|
||||||
|
<link rel="webmention" href="{{ url_for("webmention_endpoint") }}">
|
||||||
|
<link rel="alternate" href="{{ request.url }}" type="application/activity+json">
|
||||||
|
<meta name="description" content="{{ excerpt }}">
|
||||||
|
<meta content="article" property="og:type" />
|
||||||
|
<meta content="{{ outbox_object.url }}" property="og:url" />
|
||||||
|
<meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" />
|
||||||
|
<meta content="{% if outbox_object.name %}{{ outbox_object.name }}{% else %}Note{% endif %}" property="og:title" />
|
||||||
|
<meta content="{{ excerpt }}" property="og:description" />
|
||||||
|
<meta content="{{ local_actor.icon_url }}" property="og:image" />
|
||||||
|
<meta content="summary" property="twitter:card" />
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
{% if outbox_object %}
|
||||||
|
{% include "header.html" %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% macro display_replies_tree(replies_tree_node) %}
|
||||||
|
|
||||||
|
{% if replies_tree_node.is_requested %}
|
||||||
|
{{ utils.display_object(replies_tree_node.ap_object, likes=likes, shares=shares, webmentions=webmentions, expanded=not replies_tree_node.is_root, is_object_page=True, is_h_entry=False) }}
|
||||||
|
{% else %}
|
||||||
|
{% if replies_tree_node.wm_reply %}
|
||||||
|
{# u-comment h-cite is displayed by default for webmention #}
|
||||||
|
{{ utils.display_webmention_reply(replies_tree_node.wm_reply) }}
|
||||||
|
{% else %}
|
||||||
|
<div class="u-comment h-cite">
|
||||||
|
{{ utils.display_object(replies_tree_node.ap_object, is_h_entry=False) }}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% for child in replies_tree_node.children %}
|
||||||
|
{{ display_replies_tree(child) }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
<div class="h-entry">
|
||||||
|
{{ display_replies_tree(replies_tree) }}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
15
app/templates/redirect.html
Normal file
15
app/templates/redirect.html
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }}'s microblog - Redirect</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
{% include "header.html" %}
|
||||||
|
|
||||||
|
<div class="box">
|
||||||
|
<p>You are being redirected to: <a href="{{ url }}">{{ url }}</a></p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
15
app/templates/redirect_to_remote_instance.html
Normal file
15
app/templates/redirect_to_remote_instance.html
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>{{ local_actor.display_name }}'s microblog - Redirect</title>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
{% include "header.html" %}
|
||||||
|
|
||||||
|
<div class="box">
|
||||||
|
<p>You are being redirected to your instance: <a href="{{ url }}">{{ url }}</a></p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
21
app/templates/remote_follow.html
Normal file
21
app/templates/remote_follow.html
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>Remote follow {{ local_actor.display_name }}</title>
|
||||||
|
<meta name="robots" content="noindex, nofollow">
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
{% include "header.html" %}
|
||||||
|
|
||||||
|
<div class="box">
|
||||||
|
<h2>Remotely follow {{ local_actor.display_name }}</h2>
|
||||||
|
<form class="form" action="{{ url_for("post_remote_follow") }}" method="POST">
|
||||||
|
{{ utils.embed_csrf_token() }}
|
||||||
|
<input type="text" name="profile" placeholder="you@instance.tld" autofocus>
|
||||||
|
<input type="submit" value="follow">
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
27
app/templates/remote_interact.html
Normal file
27
app/templates/remote_interact.html
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
{%- import "utils.html" as utils with context -%}
|
||||||
|
{% extends "layout.html" %}
|
||||||
|
|
||||||
|
{% block head %}
|
||||||
|
<title>Interact from your instance</title>
|
||||||
|
<meta name="robots" content="noindex, nofollow">
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
{% include "header.html" %}
|
||||||
|
|
||||||
|
<div class="box">
|
||||||
|
<h2>Interact with this object</h2>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{{ utils.display_object(outbox_object) }}
|
||||||
|
|
||||||
|
<div class="box">
|
||||||
|
<form class="form" action="{{ url_for("post_remote_interaction") }}" method="POST">
|
||||||
|
{{ utils.embed_csrf_token() }}
|
||||||
|
<input type="text" name="profile" placeholder="you@instance.tld" autofocus>
|
||||||
|
<input type="hidden" name="ap_id" value="{{ outbox_object.ap_id }}">
|
||||||
|
<input type="submit" value="interact from your instance">
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
853
app/templates/utils.html
Normal file
853
app/templates/utils.html
Normal file
|
@ -0,0 +1,853 @@
|
||||||
|
{% macro embed_csrf_token() %}
|
||||||
|
{% block embed_csrf_token scoped %}
|
||||||
|
<input type="hidden" name="csrf_token" value="{{ csrf_token }}">
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro embed_redirect_url(permalink_id=None) %}
|
||||||
|
{% block embed_redirect_url scoped %}
|
||||||
|
<input type="hidden" name="redirect_url" value="{{ request.url }}{% if permalink_id %}#{{ permalink_id }}{% endif %}">
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_block_button(actor) %}
|
||||||
|
{% block admin_block_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_block") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url() }}
|
||||||
|
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
|
||||||
|
<input type="submit" value="block">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_unblock_button(actor) %}
|
||||||
|
{% block admin_unblock_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_unblock") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url() }}
|
||||||
|
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
|
||||||
|
<input type="submit" value="unblock">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_hide_shares_button(actor) %}
|
||||||
|
{% block admin_hide_shares_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_hide_announces") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url() }}
|
||||||
|
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
|
||||||
|
<input type="submit" value="hide shares">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_show_shares_button(actor) %}
|
||||||
|
{% block admin_show_shares_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_show_announces") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url() }}
|
||||||
|
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
|
||||||
|
<input type="submit" value="show shares">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
|
{% macro admin_follow_button(actor) %}
|
||||||
|
{% block admin_follow_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_follow") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url() }}
|
||||||
|
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
|
||||||
|
<input type="submit" value="follow">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_accept_incoming_follow_button(notif) %}
|
||||||
|
{% block admin_accept_incoming_follow_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_accept_incoming_follow") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url() }}
|
||||||
|
<input type="hidden" name="notification_id" value="{{ notif.id }}">
|
||||||
|
<input type="submit" value="accept follow">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_reject_incoming_follow_button(notif) %}
|
||||||
|
{% block admin_reject_incoming_follow_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_reject_incoming_follow") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url() }}
|
||||||
|
<input type="hidden" name="notification_id" value="{{ notif.id }}">
|
||||||
|
<input type="submit" value="reject follow">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_like_button(ap_object_id, permalink_id) %}
|
||||||
|
{% block admin_like_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_like") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url(permalink_id) }}
|
||||||
|
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||||
|
<input type="submit" value="like">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_bookmark_button(ap_object_id, permalink_id) %}
|
||||||
|
{% block admin_bookmark_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_bookmark") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url(permalink_id) }}
|
||||||
|
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||||
|
<input type="submit" value="bookmark">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_unbookmark_button(ap_object_id, permalink_id) %}
|
||||||
|
{% block admin_unbookmark_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_unbookmark") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url(permalink_id) }}
|
||||||
|
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||||
|
<input type="submit" value="unbookmark">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_pin_button(ap_object_id, permalink_id) %}
|
||||||
|
{% block admin_pin_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_pin") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url(permalink_id) }}
|
||||||
|
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||||
|
<input type="submit" value="pin">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_unpin_button(ap_object_id, permalink_id) %}
|
||||||
|
{% block admin_unpin_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_unpin") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url(permalink_id) }}
|
||||||
|
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||||
|
<input type="submit" value="unpin">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_delete_button(ap_object) %}
|
||||||
|
{% block admin_delete_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_delete") }}" class="object-delete-form" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
<input type="hidden" name="redirect_url" value="{% if request.url.path.endswith("/" + ap_object.public_id) or (request.url.path == "/admin/object" and request.query_params.ap_id.endswith("/" + ap_object.public_id)) %}{{ request.base_url}}{% else %}{{ request.url }}{% endif %}">
|
||||||
|
<input type="hidden" name="ap_object_id" value="{{ ap_object.ap_id }}">
|
||||||
|
<input type="submit" value="delete">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_force_delete_button(ap_object_id, permalink_id=None) %}
|
||||||
|
{% block admin_force_delete_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_force_delete") }}" class="object-delete-form" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url(permalink_id) }}
|
||||||
|
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||||
|
<input type="submit" value="local delete">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_force_delete_webmention_button(webmention_id, permalink_id=None) %}
|
||||||
|
{% block admin_force_delete_webmention_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_force_delete_webmention") }}" class="object-delete-form" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url(permalink_id) }}
|
||||||
|
<input type="hidden" name="webmention_id" value="{{ webmention_id }}">
|
||||||
|
<input type="submit" value="local delete">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_announce_button(ap_object_id, permalink_id=None) %}
|
||||||
|
{% block admin_announce_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_announce") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url(permalink_id) }}
|
||||||
|
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||||
|
<input type="submit" value="share">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_undo_button(ap_object_id, action="undo", permalink_id=None) %}
|
||||||
|
{% block admin_undo_button scoped %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_undo") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url(permalink_id) }}
|
||||||
|
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
|
||||||
|
<input type="submit" value="{{ action }}">
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_reply_button(ap_object_id) %}
|
||||||
|
{% block admin_reply_button scoped %}
|
||||||
|
<form action="{{ BASE_URL }}/admin/new" method="GET">
|
||||||
|
<input type="hidden" name="in_reply_to" value="{{ ap_object_id }}">
|
||||||
|
<button type="submit">reply</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_dm_button(actor_handle) %}
|
||||||
|
{% block admin_dm_button scoped %}
|
||||||
|
<form action="{{ BASE_URL }}/admin/new" method="GET">
|
||||||
|
<input type="hidden" name="with_content" value="{{ actor_handle }}">
|
||||||
|
<input type="hidden" name="with_visibility" value="DIRECT">
|
||||||
|
<button type="submit">direct message</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_mention_button(actor_handle) %}
|
||||||
|
{% block admin_mention_button scoped %}
|
||||||
|
<form action="{{ BASE_URL }}/admin/new" method="GET">
|
||||||
|
<input type="hidden" name="with_content" value="{{ actor_handle }}">
|
||||||
|
<button type="submit">mention</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
{% macro admin_profile_button(ap_actor_id) %}
|
||||||
|
{% block admin_profile_button scoped %}
|
||||||
|
<form action="{{ url_for("admin_profile") }}" method="GET">
|
||||||
|
<input type="hidden" name="actor_id" value="{{ ap_actor_id }}">
|
||||||
|
<button type="submit">profile</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro admin_expand_button(ap_object) %}
|
||||||
|
{% block admin_expand_button scoped %}
|
||||||
|
{# TODO turn these into a regular link and append permalink ID if it's a reply #}
|
||||||
|
<form action="{{ url_for("admin_object") }}" method="GET">
|
||||||
|
<input type="hidden" name="ap_id" value="{{ ap_object.ap_id }}">
|
||||||
|
<button type="submit">expand</button>
|
||||||
|
</form>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro display_box_filters(route) %}
|
||||||
|
{% block display_box_filters scoped %}
|
||||||
|
<nav class="flexbox box">
|
||||||
|
<ul>
|
||||||
|
<li>Filter by</li>
|
||||||
|
{% for ap_type in ["Note", "Article", "Page", "Question", "Like", "Announce", "Follow"] %}
|
||||||
|
<li><a href="{{ url_for(route) }}?filter_by={{ ap_type }}" {% if request.query_params.filter_by == ap_type %}class="active"{% endif %}>
|
||||||
|
{{ ap_type }}
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
{% if request.query_params.filter_by %}
|
||||||
|
<li>
|
||||||
|
<a href="{{ url_for(route) }}">Reset filter</a>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
</ul>
|
||||||
|
</nav>
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro display_tiny_actor_icon(actor) %}
|
||||||
|
{% block display_tiny_actor_icon scoped %}
|
||||||
|
<img class="tiny-actor-icon" src="{{ actor.resized_icon_url }}" alt="">
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro actor_action(inbox_object, text, with_icon=False) %}
|
||||||
|
{% block actor_action scoped %}
|
||||||
|
<div class="actor-action">
|
||||||
|
<a href="{{ url_for("admin_profile") }}?actor_id={{ inbox_object.actor.ap_id }}">
|
||||||
|
{% if with_icon %}{{ display_tiny_actor_icon(inbox_object.actor) }}{% endif %} {{ inbox_object.actor.display_name | clean_html(inbox_object.actor) | safe }}
|
||||||
|
</a> {{ text }}
|
||||||
|
<span title="{{ inbox_object.ap_published_at.isoformat() }}">{{ inbox_object.ap_published_at | timeago }}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro display_actor(actor, actors_metadata={}, embedded=False, with_details=False, pending_incoming_follow_notif=None) %}
|
||||||
|
{% block display_actor scoped %}
|
||||||
|
{% set metadata = actors_metadata.get(actor.ap_id) %}
|
||||||
|
|
||||||
|
{% if not embedded %}
|
||||||
|
<div class="ap-object">
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<div class="actor-box h-card p-author">
|
||||||
|
<div class="icon-box">
|
||||||
|
<img src="{{ actor.resized_icon_url }}" alt="{{ actor.display_name }}'s avatar" class="actor-icon u-photo">
|
||||||
|
</div>
|
||||||
|
<a href="{{ actor.url }}" class="u-url">
|
||||||
|
<div><strong>{{ actor.display_name | clean_html(actor) | safe }}</strong></div>
|
||||||
|
<div class="actor-handle p-name">{{ actor.handle }}</div>
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% if is_admin and metadata %}
|
||||||
|
<div>
|
||||||
|
<nav class="flexbox actor-metadata">
|
||||||
|
<ul>
|
||||||
|
{% if metadata.has_blocked_local_actor %}
|
||||||
|
<li>blocked you</li>
|
||||||
|
{% endif %}
|
||||||
|
{% if metadata.is_following %}
|
||||||
|
<li>already following</li>
|
||||||
|
<li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "unfollow")}}</li>
|
||||||
|
{% if not with_details %}
|
||||||
|
<li>{{ admin_profile_button(actor.ap_id) }}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% elif metadata.is_follow_request_sent %}
|
||||||
|
{% if metadata.is_follow_request_rejected %}
|
||||||
|
<li>follow request rejected</li>
|
||||||
|
{% if not metadata.has_blocked_local_actor %}
|
||||||
|
<li>{{ admin_follow_button(actor) }}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% else %}
|
||||||
|
<li>follow request sent</li>
|
||||||
|
<li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "undo follow") }}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% elif not actor.moved_to %}
|
||||||
|
<li>{{ admin_follow_button(actor) }}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% if metadata.is_follower %}
|
||||||
|
<li>follows you</li>
|
||||||
|
{% if not metadata.is_following and not with_details %}
|
||||||
|
<li>{{ admin_profile_button(actor.ap_id) }}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% elif actor.is_from_db and not with_details and not metadata.is_following %}
|
||||||
|
<li>{{ admin_profile_button(actor.ap_id) }}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% if actor.moved_to %}
|
||||||
|
<li>has moved to {% if metadata.moved_to %}<a href="{{ url_for("admin_profile") }}?actor_id={{ actor.moved_to }}">{{ metadata.moved_to.handle }}</a>{% else %}<a href="{{ url_for("get_lookup") }}?query={{ actor.moved_to }}">{{ actor.moved_to }}</a>{% endif %}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% if actor.is_from_db %}
|
||||||
|
{% if actor.is_blocked %}
|
||||||
|
<li>blocked</li>
|
||||||
|
<li>{{ admin_unblock_button(actor) }}</li>
|
||||||
|
{% else %}
|
||||||
|
<li>{{ admin_block_button(actor) }}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
<li>{{ admin_dm_button(actor.handle) }}</li>
|
||||||
|
<li>{{ admin_mention_button(actor.handle) }}</li>
|
||||||
|
{% if pending_incoming_follow_notif %}
|
||||||
|
{% if not pending_incoming_follow_notif.is_accepted and not pending_incoming_follow_notif.is_rejected %}
|
||||||
|
<li>
|
||||||
|
{{ admin_accept_incoming_follow_button(pending_incoming_follow_notif) }}
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
{{ admin_reject_incoming_follow_button(pending_incoming_follow_notif) }}
|
||||||
|
</li>
|
||||||
|
{% elif pending_incoming_follow_notif.is_accepted %}
|
||||||
|
<li>accepted</li>
|
||||||
|
{% else %}
|
||||||
|
<li>rejected</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% if actor.are_announces_hidden_from_stream %}
|
||||||
|
<li>{{ admin_show_shares_button(actor) }}</li>
|
||||||
|
{% else %}
|
||||||
|
<li>{{ admin_hide_shares_button(actor) }}</li>
|
||||||
|
{% endif %}
|
||||||
|
{% if with_details %}
|
||||||
|
<li><a href="{{ actor.url }}" class="label-btn">remote profile</a></li>
|
||||||
|
{% endif %}
|
||||||
|
</ul>
|
||||||
|
</nav>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if with_details %}
|
||||||
|
{% if actor.summary %}
|
||||||
|
<div class="p-note">
|
||||||
|
{{ actor.summary | clean_html(actor) | safe }}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if actor.attachments %}
|
||||||
|
<div id="profile-props">
|
||||||
|
{% for prop in actor.attachments %}
|
||||||
|
<dl>
|
||||||
|
{% if prop.type == "PropertyValue" %}
|
||||||
|
<dt class="muted" title="{{ prop.name }}">{{ prop.name }}</dt>
|
||||||
|
<dd>{{ prop.value | clean_html(actor) | safe }}</dd>
|
||||||
|
{% endif %}
|
||||||
|
</dl>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if not embedded %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro display_og_meta(object) %}
|
||||||
|
{% block display_og_meta scoped %}
|
||||||
|
{% if object.og_meta %}
|
||||||
|
{% for og_meta in object.og_meta[:1] %}
|
||||||
|
<div class="activity-og-meta">
|
||||||
|
{% if og_meta.image %}
|
||||||
|
<div>
|
||||||
|
<img src="{{ og_meta.image | media_proxy_url }}">
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
<div>
|
||||||
|
<a href="{{ og_meta.url | privacy_replace_url }}">{{ og_meta.title }}</a>
|
||||||
|
{% if og_meta.site_name %}
|
||||||
|
<small>{{ og_meta.site_name }}</small>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
|
||||||
|
{% macro display_attachments(object) %}
|
||||||
|
{% block display_attachments scoped %}
|
||||||
|
|
||||||
|
{% for attachment in object.attachments %}
|
||||||
|
{% if attachment.type != "PropertyValue" %}
|
||||||
|
{% set orientation = "unknown" %}
|
||||||
|
{% if attachment.width %}
|
||||||
|
{% set orientation = "portrait" if attachment.width < attachment.height else "landscape" %}
|
||||||
|
{% endif %}
|
||||||
|
{% if object.sensitive and (attachment.type == "Image" or (attachment | has_media_type("image")) or attachment.type == "Video" or (attachment | has_media_type("video"))) %}
|
||||||
|
<div class="attachment-wrapper">
|
||||||
|
<label for="{{attachment.proxied_url}}" class="label-btn show-hide-sensitive-btn">show/hide sensitive content</label>
|
||||||
|
<div>
|
||||||
|
<div class="sensitive-attachment">
|
||||||
|
<input class="sensitive-attachment-state" type="checkbox" id="{{attachment.proxied_url}}" aria-hidden="true">
|
||||||
|
<div class="sensitive-attachment-box attachment-orientation-{{orientation}}">
|
||||||
|
<div></div>
|
||||||
|
{% else %}
|
||||||
|
<div class="attachment-item attachment-orientation-{{orientation}}">
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if attachment.type == "Image" or (attachment | has_media_type("image")) %}
|
||||||
|
{% if attachment.url not in object.inlined_images %}
|
||||||
|
<a class="media-link" href="{{ attachment.proxied_url }}" target="_blank">
|
||||||
|
<img src="{{ attachment.resized_url or attachment.proxied_url }}"{% if attachment.name %} title="{{ attachment.name }}" alt="{{ attachment.name }}"{% endif %} class="attachment u-photo">
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
{% elif attachment.type == "Video" or (attachment | has_media_type("video")) %}
|
||||||
|
<div class="video-wrapper">
|
||||||
|
<video controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %} class="u-video"></video>
|
||||||
|
<div class="video-gif-overlay">GIF</div>
|
||||||
|
</div>
|
||||||
|
{% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %}
|
||||||
|
<audio controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name%} title="{{ attachment.name }}"{% endif %} class="attachment u-audio"></audio>
|
||||||
|
{% elif attachment.type == "Link" %}
|
||||||
|
<a href="{{ attachment.url }}" class="attachment">{{ attachment.url | truncate(64, True) }}</a> ({{ attachment.mimetype}})
|
||||||
|
{% else %}
|
||||||
|
<a href="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.url }}"{% endif %} class="attachment">
|
||||||
|
{% if attachment.name %}{{ attachment.name }}{% else %}{{ attachment.url | truncate(64, True) }}{% endif %}
|
||||||
|
</a> ({{ attachment.mimetype }})
|
||||||
|
{% endif %}
|
||||||
|
{% if object.sensitive and (attachment.type == "Image" or (attachment | has_media_type("image")) or attachment.type == "Video" or (attachment | has_media_type("video"))) %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro display_webmention_reply(wm_reply) %}
|
||||||
|
{% block display_webmention_reply scoped %}
|
||||||
|
|
||||||
|
<div class="ap-object u-comment h-cite">
|
||||||
|
<div class="actor-box h-card p-author">
|
||||||
|
<div class="icon-box">
|
||||||
|
<img src="{{ wm_reply.face.picture_url }}" alt="{{ wm_reply.face.name }}'s avatar" class="actor-icon u-photo">
|
||||||
|
</div>
|
||||||
|
<a href="{{ wm_reply.face.url }}" class="u-url">
|
||||||
|
<div><strong class="p-name">{{ wm_reply.face.name | clean_html_wm | safe }}</strong></div>
|
||||||
|
<div class="actor-handle">{{ wm_reply.face.url | truncate(64, True) }}</div>
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p class="in-reply-to">in reply to <a href="{{ wm_reply.in_reply_to }}" title="{{ wm_reply.in_reply_to }}" rel="nofollow">
|
||||||
|
this object
|
||||||
|
</a></p>
|
||||||
|
|
||||||
|
<div class="obj-content margin-top-20">
|
||||||
|
<div class="e-content">
|
||||||
|
{{ wm_reply.content | clean_html_wm | safe }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<nav class="flexbox activity-bar margin-top-20">
|
||||||
|
<ul>
|
||||||
|
<li>
|
||||||
|
<div><a href="{{ wm_reply.url }}" rel="nofollow" class="object-permalink u-url u-uid">permalink</a></div>
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<time class="dt-published" datetime="{{ wm_reply.published_at.replace(microsecond=0).isoformat() }}" title="{{ wm_reply.published_at.replace(microsecond=0).isoformat() }}">{{ wm_reply.published_at | timeago }}</time>
|
||||||
|
</li>
|
||||||
|
{% if is_admin %}
|
||||||
|
<li>
|
||||||
|
{{ admin_force_delete_webmention_button(wm_reply.webmention_id) }}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
</ul>
|
||||||
|
</nav>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro display_object(object, likes=[], shares=[], webmentions=[], expanded=False, actors_metadata={}, is_object_page=False, is_h_entry=True) %}
|
||||||
|
{% block display_object scoped %}
|
||||||
|
{% set is_article_mode = object.is_from_outbox and object.ap_type == "Article" and is_object_page %}
|
||||||
|
{% if object.ap_type in ["Note", "Article", "Video", "Page", "Question", "Event"] %}
|
||||||
|
<div class="ap-object {% if expanded %}ap-object-expanded {% endif %}{% if is_h_entry %}h-entry{% endif %}" id="{{ object.permalink_id }}">
|
||||||
|
|
||||||
|
{% if is_article_mode %}
|
||||||
|
<data class="h-card">
|
||||||
|
<data class="u-photo" value="{{ local_actor.icon_url }}"></data>
|
||||||
|
<data class="u-url" value="{{ local_actor.url}}"></data>
|
||||||
|
<data class="p-name" value="{{ local_actor.handle }}"></data>
|
||||||
|
</data>
|
||||||
|
{% else %}
|
||||||
|
{{ display_actor(object.actor, actors_metadata, embedded=True) }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if object.in_reply_to %}
|
||||||
|
<p class="in-reply-to">in reply to <a href="{% if is_admin and object.is_in_reply_to_from_inbox %}{{ url_for("get_lookup") }}?query={% endif %}{{ object.in_reply_to }}" title="{{ object.in_reply_to }}" rel="nofollow">
|
||||||
|
this object
|
||||||
|
</a></p>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if object.ap_type in ["Article", "Event"] %}
|
||||||
|
<h2 class="p-name no-margin-top">{{ object.name }}</h2>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if object.ap_type == "Event" %}
|
||||||
|
{% if object.ap_object.get("endTime") and object.ap_object.get("startTime") %}
|
||||||
|
<p>On {{ object.ap_object.startTime | parse_datetime | format_date }}
|
||||||
|
(ends {{ object.ap_object.endTime | parse_datetime | format_date }})</p>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if object.ap_object.get("location") %}
|
||||||
|
{% set loc = object.ap_object.get("location") %}
|
||||||
|
{% if loc.type == "Place" and loc.latitude and loc.longitude %}
|
||||||
|
<div class="ap-place">
|
||||||
|
<h3>Location</h3>
|
||||||
|
{% if loc.name %}{{ loc.name }}{% endif %}
|
||||||
|
<span class="h-geo">
|
||||||
|
<data class="p-latitude" value="{{ loc.latitude}}"></data>
|
||||||
|
<data class="p-longitude" value="{{ loc.longitude }}"></data>
|
||||||
|
<a href="https://www.openstreetmap.org/?mlat={{ loc.latitude }}&mlon={{ loc.longitude }}#map=16/{{loc.latitude}}/{{loc.longitude}}">{{loc.latitude}},{{loc.longitude}}</a>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if is_article_mode %}
|
||||||
|
<time class="dt-published muted" datetime="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}" title="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}">{{ object.ap_published_at.strftime("%b %d, %Y") }}</time>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if object.summary %}
|
||||||
|
<details class="show-more-wrapper">
|
||||||
|
<summary>
|
||||||
|
<div class="p-summary">
|
||||||
|
<p>{{ object.summary | clean_html(object) | safe }}</p>
|
||||||
|
</div>
|
||||||
|
<span class="show-more-btn" aria-hidden="true"></span>
|
||||||
|
</summary>
|
||||||
|
{% endif %}
|
||||||
|
<div class="obj-content">
|
||||||
|
<div class="e-content">
|
||||||
|
{{ object.content | clean_html(object) | safe }}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% if object.ap_type == "Question" %}
|
||||||
|
{% set can_vote = is_admin and object.is_from_inbox and not object.is_poll_ended and not object.voted_for_answers %}
|
||||||
|
{% if can_vote %}
|
||||||
|
<form action="{{ request.url_for("admin_actions_vote") }}" method="POST">
|
||||||
|
{{ embed_csrf_token() }}
|
||||||
|
{{ embed_redirect_url(object.permalink_id) }}
|
||||||
|
<input type="hidden" name="in_reply_to" value="{{ object.ap_id }}">
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if object.poll_items %}
|
||||||
|
<ul class="poll-items">
|
||||||
|
{% for item in object.poll_items %}
|
||||||
|
<li>
|
||||||
|
{% set pct = item | poll_item_pct(object.poll_voters_count) %}
|
||||||
|
<p>
|
||||||
|
{% if can_vote %}
|
||||||
|
<input type="{% if object.is_one_of_poll %}radio{% else %}checkbox{% endif %}" name="name" value="{{ item.name }}" id="{{object.permalink_id}}-{{item.name}}">
|
||||||
|
<label for="{{object.permalink_id}}-{{item.name}}">
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{{ item.name | clean_html(object) | safe }}
|
||||||
|
|
||||||
|
{% if object.voted_for_answers and item.name in object.voted_for_answers %}
|
||||||
|
<span class="muted poll-vote">you voted for this answer</span>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if can_vote %}
|
||||||
|
</label>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<span class="float-right">{{ pct }}% <span class="muted">({{ item.replies.totalItems }} votes)</span></span>
|
||||||
|
</p>
|
||||||
|
<svg class="poll-bar">
|
||||||
|
<line x1="0" y1="10px" x2="{{ pct or 1 }}%" y2="10px"></line>
|
||||||
|
</svg>
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if can_vote %}
|
||||||
|
<p class="form">
|
||||||
|
<input type="submit" value="vote">
|
||||||
|
</p>
|
||||||
|
</form>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{{ display_og_meta(object) }}
|
||||||
|
|
||||||
|
</div>
|
||||||
|
{% if object.summary %}
|
||||||
|
</details>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<div class="activity-attachment">
|
||||||
|
{{ display_attachments(object) }}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<nav class="flexbox activity-bar">
|
||||||
|
<ul>
|
||||||
|
<li>
|
||||||
|
<div><a href="{{ object.url }}"{% if object.is_from_inbox %} rel="nofollow"{% endif %} class="object-permalink u-url u-uid">permalink</a></div>
|
||||||
|
</li>
|
||||||
|
|
||||||
|
{% if object.is_from_outbox and is_object_page and not is_admin and not request.url.path.startswith("/remote_interaction") %}
|
||||||
|
<li>
|
||||||
|
<a class="label-btn" href="{{ request.url_for("remote_interaction") }}?ap_id={{ object.ap_id }}">
|
||||||
|
interact from your instance
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
{% if not is_article_mode %}
|
||||||
|
<li>
|
||||||
|
<time class="dt-published" datetime="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}" title="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}">{{ object.ap_published_at | timeago }}</time>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% if object.ap_type == "Question" %}
|
||||||
|
{% if object.poll_end_time %}
|
||||||
|
<li>
|
||||||
|
{% if object.is_poll_ended %}ended{% else %}ends{% endif %}
|
||||||
|
<time title="{{ object.poll_end_time.replace(microsecond=0).isoformat() }}">
|
||||||
|
{{ object.poll_end_time | timeago }}
|
||||||
|
</time>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
<li>
|
||||||
|
{{ object.poll_voters_count }} voters
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% if is_admin %}
|
||||||
|
<li>
|
||||||
|
{{ object.visibility.value }}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if object.is_from_outbox %}
|
||||||
|
{% if object.likes_count %}
|
||||||
|
<li>
|
||||||
|
<a href="{{ object.url }}"><strong>{{ object.likes_count }}</strong> like{{ object.likes_count | pluralize }}</a>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if object.announces_count %}
|
||||||
|
<li>
|
||||||
|
<a href="{{ object.url }}"><strong>{{ object.announces_count }}</strong> share{{ object.announces_count | pluralize }}</a>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if object.webmentions_count %}
|
||||||
|
<li>
|
||||||
|
<a href="{{ object.url }}"><strong>{{ object.webmentions_count }}</strong> webmention{{ object.webmentions_count | pluralize }}</a>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if (object.is_from_outbox or is_admin) and object.replies_count %}
|
||||||
|
<li>
|
||||||
|
<a href="{% if is_admin and not object.is_from_outbox %}{{ url_for("admin_object") }}?ap_id={{ object.ap_id }}{% if object.in_reply_to %}#{{ object.permalink_id }}{% endif %}{% else %}{{ object.url }}{% endif %}"><strong>{{ object.replies_count }}</strong> repl{{ object.replies_count | pluralize("y", "ies") }}</a>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</ul>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
{% if is_admin %}
|
||||||
|
<nav class="flexbox activity-bar">
|
||||||
|
<ul>
|
||||||
|
{% if object.is_from_outbox %}
|
||||||
|
<li>
|
||||||
|
{{ admin_delete_button(object) }}
|
||||||
|
</li>
|
||||||
|
|
||||||
|
<li>
|
||||||
|
{% if object.is_pinned %}
|
||||||
|
{{ admin_unpin_button(object.ap_id, object.permalink_id) }}
|
||||||
|
{% else %}
|
||||||
|
{{ admin_pin_button(object.ap_id, object.permalink) }}
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<li>
|
||||||
|
{{ admin_reply_button(object.ap_id) }}
|
||||||
|
</li>
|
||||||
|
|
||||||
|
{% if not object.is_from_outbox %}
|
||||||
|
<li>
|
||||||
|
{% if object.liked_via_outbox_object_ap_id %}
|
||||||
|
{{ admin_undo_button(object.liked_via_outbox_object_ap_id, "unlike", object.permalink_id) }}
|
||||||
|
{% else %}
|
||||||
|
{{ admin_like_button(object.ap_id, object.permalink_id) }}
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
|
||||||
|
<li>
|
||||||
|
{% if object.is_bookmarked %}
|
||||||
|
{{ admin_unbookmark_button(object.ap_id, object.permalink_id) }}
|
||||||
|
{% else %}
|
||||||
|
{{ admin_bookmark_button(object.ap_id, object.permalink_id) }}
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
|
||||||
|
{% if object.visibility in [visibility_enum.PUBLIC, visibility_enum.UNLISTED] %}
|
||||||
|
<li>
|
||||||
|
{% if object.announced_via_outbox_object_ap_id %}
|
||||||
|
{{ admin_undo_button(object.announced_via_outbox_object_ap_id, "unshare") }}
|
||||||
|
{% else %}
|
||||||
|
{{ admin_announce_button(object.ap_id, permalink_id=object.permalink_id) }}
|
||||||
|
{% endif %}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if object.is_from_inbox %}
|
||||||
|
<li>
|
||||||
|
{{ admin_profile_button(object.actor.ap_id) }}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% if object.is_from_inbox or object.is_from_outbox %}
|
||||||
|
<li>
|
||||||
|
{{ admin_expand_button(object) }}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% if object.is_from_inbox and not object.announced_via_outbox_object_ap_id and object.is_local_reply %}
|
||||||
|
<li>
|
||||||
|
{{ admin_force_delete_button(object.ap_id) }}
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
</ul>
|
||||||
|
</nav>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
{% if likes or shares or webmentions %}
|
||||||
|
<div class="public-interactions">
|
||||||
|
{% if likes %}
|
||||||
|
<div class="interactions-block">Likes
|
||||||
|
<div class="facepile-wrapper">
|
||||||
|
{% for like in likes %}
|
||||||
|
<a href="{% if is_admin and like.ap_actor_id %}{{ url_for("admin_profile") }}?actor_id={{ like.ap_actor_id }}{% else %}{{ like.url }}{% endif %}" title="{{ like.name }}" rel="noreferrer">
|
||||||
|
<img src="{{ like.picture_url }}" alt="{{ like.name }}">
|
||||||
|
</a>
|
||||||
|
{% endfor %}
|
||||||
|
{% if object.likes_count > likes | length %}
|
||||||
|
<div class="and-x-more">
|
||||||
|
and {{ object.likes_count - likes | length }} more.
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if shares %}
|
||||||
|
<div class="interactions-block">Shares
|
||||||
|
<div class="facepile-wrapper">
|
||||||
|
{% for share in shares %}
|
||||||
|
<a href="{% if is_admin and share.ap_actor_id %}{{ url_for("admin_profile") }}?actor_id={{ share.ap_actor_id }}{% else %}{{ share.url }}{% endif %}" title="{{ share.name }}" rel="noreferrer">
|
||||||
|
<img src="{{ share.picture_url }}" alt="{{ share.name }}">
|
||||||
|
</a>
|
||||||
|
{% endfor %}
|
||||||
|
{% if object.announces_count > shares | length %}
|
||||||
|
<div class="and-x-more">
|
||||||
|
and {{ object.announces_count - shares | length }} more.
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if webmentions %}
|
||||||
|
<div class="interactions-block">Webmentions
|
||||||
|
<div class="facepile-wrapper">
|
||||||
|
{% for webmention in webmentions %}
|
||||||
|
{% set wm = webmention.as_facepile_item %}
|
||||||
|
{% if wm %}
|
||||||
|
<a href="{{ wm.url }}" title="{{ wm.actor_name }}" rel="noreferrer">
|
||||||
|
<img src="{{ wm.actor_icon_url | media_proxy_url }}" alt="{{ wm.actor_name }}">
|
||||||
|
</a>
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
|
{% endmacro %}
|
125
app/uploads.py
Normal file
125
app/uploads.py
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
import hashlib
|
||||||
|
from shutil import COPY_BUFSIZE # type: ignore
|
||||||
|
|
||||||
|
import blurhash # type: ignore
|
||||||
|
from fastapi import UploadFile
|
||||||
|
from loguru import logger
|
||||||
|
from PIL import Image
|
||||||
|
from PIL import ImageOps
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app import models
|
||||||
|
from app.config import BASE_URL
|
||||||
|
from app.config import ROOT_DIR
|
||||||
|
from app.database import AsyncSession
|
||||||
|
|
||||||
|
UPLOAD_DIR = ROOT_DIR / "data" / "uploads"
|
||||||
|
|
||||||
|
|
||||||
|
async def save_upload(db_session: AsyncSession, f: UploadFile) -> models.Upload:
|
||||||
|
# Compute the hash
|
||||||
|
h = hashlib.blake2b(digest_size=32)
|
||||||
|
while True:
|
||||||
|
buf = f.file.read(COPY_BUFSIZE)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
h.update(buf)
|
||||||
|
|
||||||
|
content_hash = h.hexdigest()
|
||||||
|
f.file.seek(0)
|
||||||
|
|
||||||
|
existing_upload = (
|
||||||
|
await db_session.execute(
|
||||||
|
select(models.Upload).where(models.Upload.content_hash == content_hash)
|
||||||
|
)
|
||||||
|
).scalar_one_or_none()
|
||||||
|
if existing_upload:
|
||||||
|
logger.info(f"Upload with {content_hash=} already exists")
|
||||||
|
return existing_upload
|
||||||
|
|
||||||
|
logger.info(f"Creating new Upload with {content_hash=}")
|
||||||
|
dest_filename = UPLOAD_DIR / content_hash
|
||||||
|
|
||||||
|
has_thumbnail = False
|
||||||
|
image_blurhash = None
|
||||||
|
width = None
|
||||||
|
height = None
|
||||||
|
|
||||||
|
if f.content_type.startswith("image") and not f.content_type == "image/gif":
|
||||||
|
with Image.open(f.file) as _original_image:
|
||||||
|
# Fix image orientation (as we will remove the info from the EXIF
|
||||||
|
# metadata)
|
||||||
|
original_image = ImageOps.exif_transpose(_original_image)
|
||||||
|
|
||||||
|
# Re-creating the image drop the EXIF metadata
|
||||||
|
destination_image = Image.new(
|
||||||
|
original_image.mode,
|
||||||
|
original_image.size,
|
||||||
|
)
|
||||||
|
destination_image.putdata(original_image.getdata())
|
||||||
|
destination_image.save(
|
||||||
|
dest_filename,
|
||||||
|
format=_original_image.format, # type: ignore
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(dest_filename, "rb") as dest_f:
|
||||||
|
image_blurhash = blurhash.encode(dest_f, x_components=4, y_components=3)
|
||||||
|
|
||||||
|
try:
|
||||||
|
width, height = destination_image.size
|
||||||
|
destination_image.thumbnail((740, 740))
|
||||||
|
destination_image.save(
|
||||||
|
UPLOAD_DIR / f"{content_hash}_resized",
|
||||||
|
format="webp",
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(
|
||||||
|
f"Failed to created thumbnail for {f.filename}/{content_hash}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
has_thumbnail = True
|
||||||
|
logger.info("Thumbnail generated")
|
||||||
|
else:
|
||||||
|
with open(dest_filename, "wb") as dest:
|
||||||
|
while True:
|
||||||
|
buf = f.file.read(COPY_BUFSIZE)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
dest.write(buf)
|
||||||
|
|
||||||
|
new_upload = models.Upload(
|
||||||
|
content_type=f.content_type,
|
||||||
|
content_hash=content_hash,
|
||||||
|
has_thumbnail=has_thumbnail,
|
||||||
|
blurhash=image_blurhash,
|
||||||
|
width=width,
|
||||||
|
height=height,
|
||||||
|
)
|
||||||
|
db_session.add(new_upload)
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
return new_upload
|
||||||
|
|
||||||
|
|
||||||
|
def upload_to_attachment(
|
||||||
|
upload: models.Upload,
|
||||||
|
filename: str,
|
||||||
|
alt_text: str | None,
|
||||||
|
) -> ap.RawObject:
|
||||||
|
extra_attachment_fields = {}
|
||||||
|
if upload.blurhash:
|
||||||
|
extra_attachment_fields.update(
|
||||||
|
{
|
||||||
|
"blurhash": upload.blurhash,
|
||||||
|
"height": upload.height,
|
||||||
|
"width": upload.width,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"type": "Document",
|
||||||
|
"mediaType": upload.content_type,
|
||||||
|
"name": alt_text or filename,
|
||||||
|
"url": BASE_URL + f"/attachments/{upload.content_hash}/{filename}",
|
||||||
|
**extra_attachment_fields,
|
||||||
|
}
|
32
app/utils/custom_index_handler.py
Normal file
32
app/utils/custom_index_handler.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
from typing import Any
|
||||||
|
from typing import Awaitable
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
from fastapi import Depends
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
|
||||||
|
from app.actor import LOCAL_ACTOR
|
||||||
|
from app.config import is_activitypub_requested
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.database import get_db_session
|
||||||
|
|
||||||
|
_Handler = Callable[[Request, AsyncSession], Awaitable[Any]]
|
||||||
|
|
||||||
|
|
||||||
|
def build_custom_index_handler(handler: _Handler) -> _Handler:
|
||||||
|
async def custom_index(
|
||||||
|
request: Request,
|
||||||
|
db_session: AsyncSession = Depends(get_db_session),
|
||||||
|
) -> Any:
|
||||||
|
# Serve the AP actor if requested
|
||||||
|
if is_activitypub_requested(request):
|
||||||
|
return JSONResponse(
|
||||||
|
LOCAL_ACTOR.ap_actor,
|
||||||
|
media_type="application/activity+json",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Defer to the custom handler
|
||||||
|
return await handler(request, db_session)
|
||||||
|
|
||||||
|
return custom_index
|
16
app/utils/datetime.py
Normal file
16
app/utils/datetime.py
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from datetime import timezone
|
||||||
|
|
||||||
|
from dateutil.parser import isoparse
|
||||||
|
|
||||||
|
|
||||||
|
def parse_isoformat(isodate: str) -> datetime:
|
||||||
|
return isoparse(isodate).astimezone(timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
|
def now() -> datetime:
|
||||||
|
return datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
|
def as_utc(dt: datetime) -> datetime:
|
||||||
|
return dt.replace(tzinfo=timezone.utc)
|
51
app/utils/emoji.py
Normal file
51
app/utils/emoji.py
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
import mimetypes
|
||||||
|
import re
|
||||||
|
import typing
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
from app.activitypub import RawObject
|
||||||
|
|
||||||
|
EMOJI_REGEX = re.compile(r"(:[\d\w]+:)")
|
||||||
|
|
||||||
|
EMOJIS: dict[str, "RawObject"] = {}
|
||||||
|
EMOJIS_BY_NAME: dict[str, "RawObject"] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _load_emojis(root_dir: Path, base_url: str) -> None:
|
||||||
|
if EMOJIS:
|
||||||
|
return
|
||||||
|
for dir_name, path in (
|
||||||
|
(root_dir / "app" / "static" / "emoji", "static/emoji"),
|
||||||
|
(root_dir / "data" / "custom_emoji", "custom_emoji"),
|
||||||
|
):
|
||||||
|
for emoji in dir_name.iterdir():
|
||||||
|
mt = mimetypes.guess_type(emoji.name)[0]
|
||||||
|
if mt and mt.startswith("image/"):
|
||||||
|
name = emoji.name.split(".")[0]
|
||||||
|
if not re.match(EMOJI_REGEX, f":{name}:"):
|
||||||
|
continue
|
||||||
|
ap_emoji: "RawObject" = {
|
||||||
|
"type": "Emoji",
|
||||||
|
"name": f":{name}:",
|
||||||
|
"updated": "1970-01-01T00:00:00Z", # XXX: we don't track date
|
||||||
|
"id": f"{base_url}/e/{name}",
|
||||||
|
"icon": {
|
||||||
|
"mediaType": mt,
|
||||||
|
"type": "Image",
|
||||||
|
"url": f"{base_url}/{path}/{emoji.name}",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
EMOJIS[emoji.name] = ap_emoji
|
||||||
|
EMOJIS_BY_NAME[ap_emoji["name"]] = ap_emoji
|
||||||
|
|
||||||
|
|
||||||
|
def tags(content: str) -> list["RawObject"]:
|
||||||
|
tags = []
|
||||||
|
added = set()
|
||||||
|
for e in re.findall(EMOJI_REGEX, content):
|
||||||
|
if e not in added and e in EMOJIS_BY_NAME:
|
||||||
|
tags.append(EMOJIS_BY_NAME[e])
|
||||||
|
added.add(e)
|
||||||
|
|
||||||
|
return tags
|
172
app/utils/facepile.py
Normal file
172
app/utils/facepile.py
Normal file
|
@ -0,0 +1,172 @@
|
||||||
|
import datetime
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import timezone
|
||||||
|
from typing import Any
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from app import media
|
||||||
|
from app.models import InboxObject
|
||||||
|
from app.models import Webmention
|
||||||
|
from app.utils.datetime import parse_isoformat
|
||||||
|
from app.utils.url import must_make_abs
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Face:
|
||||||
|
ap_actor_id: str | None
|
||||||
|
url: str
|
||||||
|
name: str
|
||||||
|
picture_url: str
|
||||||
|
created_at: datetime.datetime
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_inbox_object(cls, like: InboxObject) -> "Face":
|
||||||
|
return cls(
|
||||||
|
ap_actor_id=like.actor.ap_id,
|
||||||
|
url=like.actor.url, # type: ignore
|
||||||
|
name=like.actor.handle, # type: ignore
|
||||||
|
picture_url=like.actor.resized_icon_url,
|
||||||
|
created_at=like.created_at, # type: ignore
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_webmention(cls, webmention: Webmention) -> Optional["Face"]:
|
||||||
|
items = webmention.source_microformats.get("items", []) # type: ignore
|
||||||
|
for item in items:
|
||||||
|
if item["type"][0] == "h-card":
|
||||||
|
try:
|
||||||
|
return cls(
|
||||||
|
ap_actor_id=None,
|
||||||
|
url=(
|
||||||
|
must_make_abs(
|
||||||
|
item["properties"]["url"][0], webmention.source
|
||||||
|
)
|
||||||
|
if item["properties"].get("url")
|
||||||
|
else webmention.source
|
||||||
|
),
|
||||||
|
name=item["properties"]["name"][0],
|
||||||
|
picture_url=media.resized_media_url(
|
||||||
|
must_make_abs(
|
||||||
|
item["properties"]["photo"][0], webmention.source
|
||||||
|
), # type: ignore
|
||||||
|
50,
|
||||||
|
),
|
||||||
|
created_at=webmention.created_at, # type: ignore
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(
|
||||||
|
f"Failed to build Face for webmention id={webmention.id}"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
elif item["type"][0] == "h-entry":
|
||||||
|
author = item["properties"]["author"][0]
|
||||||
|
try:
|
||||||
|
return cls(
|
||||||
|
ap_actor_id=None,
|
||||||
|
url=webmention.source,
|
||||||
|
name=author["properties"]["name"][0],
|
||||||
|
picture_url=media.resized_media_url(
|
||||||
|
must_make_abs(
|
||||||
|
author["properties"]["photo"][0], webmention.source
|
||||||
|
), # type: ignore
|
||||||
|
50,
|
||||||
|
),
|
||||||
|
created_at=webmention.created_at, # type: ignore
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(
|
||||||
|
f"Failed to build Face for webmention id={webmention.id}"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def merge_faces(faces: list[Face]) -> list[Face]:
|
||||||
|
return sorted(
|
||||||
|
faces,
|
||||||
|
key=lambda f: f.created_at,
|
||||||
|
reverse=True,
|
||||||
|
)[:10]
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_face(webmention: Webmention, items: list[dict[str, Any]]) -> Face | None:
|
||||||
|
for item in items:
|
||||||
|
if item["type"][0] == "h-card":
|
||||||
|
try:
|
||||||
|
return Face(
|
||||||
|
ap_actor_id=None,
|
||||||
|
url=(
|
||||||
|
must_make_abs(item["properties"]["url"][0], webmention.source)
|
||||||
|
if item["properties"].get("url")
|
||||||
|
else webmention.source
|
||||||
|
),
|
||||||
|
name=item["properties"]["name"][0],
|
||||||
|
picture_url=media.resized_media_url(
|
||||||
|
must_make_abs(
|
||||||
|
item["properties"]["photo"][0], webmention.source
|
||||||
|
), # type: ignore
|
||||||
|
50,
|
||||||
|
),
|
||||||
|
created_at=webmention.created_at, # type: ignore
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(
|
||||||
|
f"Failed to build Face for webmention id={webmention.id}"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class WebmentionReply:
|
||||||
|
face: Face
|
||||||
|
content: str
|
||||||
|
url: str
|
||||||
|
published_at: datetime.datetime
|
||||||
|
in_reply_to: str
|
||||||
|
webmention_id: int
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_webmention(cls, webmention: Webmention) -> Optional["WebmentionReply"]:
|
||||||
|
items = webmention.source_microformats.get("items", []) # type: ignore
|
||||||
|
for item in items:
|
||||||
|
if item["type"][0] == "h-entry":
|
||||||
|
try:
|
||||||
|
face = _parse_face(webmention, item["properties"].get("author", []))
|
||||||
|
if not face:
|
||||||
|
logger.info(
|
||||||
|
"Failed to build WebmentionReply/Face for "
|
||||||
|
f"webmention id={webmention.id}"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
if "published" in item["properties"]:
|
||||||
|
published_at = (
|
||||||
|
parse_isoformat(item["properties"]["published"][0])
|
||||||
|
.astimezone(timezone.utc)
|
||||||
|
.replace(tzinfo=None)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
published_at = webmention.created_at # type: ignore
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
face=face,
|
||||||
|
content=item["properties"]["content"][0]["html"],
|
||||||
|
url=must_make_abs(
|
||||||
|
item["properties"]["url"][0], webmention.source
|
||||||
|
),
|
||||||
|
published_at=published_at,
|
||||||
|
in_reply_to=webmention.target, # type: ignore
|
||||||
|
webmention_id=webmention.id, # type: ignore
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(
|
||||||
|
f"Failed to build Face for webmention id={webmention.id}"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
return None
|
22
app/utils/favicon.py
Normal file
22
app/utils/favicon.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
import sass # type: ignore
|
||||||
|
from PIL import Image
|
||||||
|
from PIL import ImageColor
|
||||||
|
from PIL import ImageDraw
|
||||||
|
|
||||||
|
|
||||||
|
def _get_primary_color() -> str:
|
||||||
|
"""Small hack to get the theme primary color."""
|
||||||
|
compiled = sass.compile(
|
||||||
|
string=(
|
||||||
|
"@import 'app/scss/main.scss';\n"
|
||||||
|
"#favicon-color { color: $primary-color; }"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return compiled[len(compiled) - 11 : -4]
|
||||||
|
|
||||||
|
|
||||||
|
def build_favicon() -> None:
|
||||||
|
"""Builds a basic favicon with the theme primary color."""
|
||||||
|
im = Image.new("RGB", (32, 32), ImageColor.getrgb(_get_primary_color()))
|
||||||
|
ImageDraw.Draw(im)
|
||||||
|
im.save("app/static/favicon.ico")
|
53
app/utils/highlight.py
Normal file
53
app/utils/highlight.py
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
from functools import lru_cache
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup # type: ignore
|
||||||
|
from pygments import highlight as phighlight # type: ignore
|
||||||
|
from pygments.formatters import HtmlFormatter # type: ignore
|
||||||
|
from pygments.lexers import get_lexer_by_name # type: ignore
|
||||||
|
from pygments.lexers import guess_lexer # type: ignore
|
||||||
|
|
||||||
|
from app.config import CODE_HIGHLIGHTING_THEME
|
||||||
|
|
||||||
|
_FORMATTER = HtmlFormatter(style=CODE_HIGHLIGHTING_THEME)
|
||||||
|
|
||||||
|
HIGHLIGHT_CSS = _FORMATTER.get_style_defs()
|
||||||
|
HIGHLIGHT_CSS_HASH = base64.b64encode(
|
||||||
|
hashlib.sha256(HIGHLIGHT_CSS.encode()).digest()
|
||||||
|
).decode()
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(256)
|
||||||
|
def highlight(html: str) -> str:
|
||||||
|
soup = BeautifulSoup(html, "html5lib")
|
||||||
|
for code in soup.find_all("code"):
|
||||||
|
if not code.parent.name == "pre":
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Replace <br> tags with line breaks (Mastodon sends code like this)
|
||||||
|
code_content = (
|
||||||
|
code.encode_contents().decode().replace("<br>", "\n").replace("<br/>", "\n")
|
||||||
|
)
|
||||||
|
|
||||||
|
# If this comes from a microblog.pub instance we may have the language
|
||||||
|
# in the class name
|
||||||
|
if "data-microblogpub-lexer" in code.attrs:
|
||||||
|
try:
|
||||||
|
lexer = get_lexer_by_name(code.attrs["data-microblogpub-lexer"])
|
||||||
|
except Exception:
|
||||||
|
lexer = guess_lexer(code_content)
|
||||||
|
|
||||||
|
# Replace the code with Pygment output
|
||||||
|
# XXX: the HTML escaping causes issue with Python type annotations
|
||||||
|
code_content = code_content.replace(") -> ", ") -> ")
|
||||||
|
code.parent.replaceWith(
|
||||||
|
BeautifulSoup(
|
||||||
|
phighlight(code_content, lexer, _FORMATTER), "html5lib"
|
||||||
|
).body.next
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
code.name = "div"
|
||||||
|
code["class"] = code.get("class", []) + ["highlight"]
|
||||||
|
|
||||||
|
return soup.body.encode_contents().decode()
|
53
app/utils/indieauth.py
Normal file
53
app/utils/indieauth.py
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from app.utils import microformats
|
||||||
|
from app.utils.url import make_abs
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class IndieAuthClient:
|
||||||
|
logo: str | None
|
||||||
|
name: str
|
||||||
|
url: str | None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_prop(props: dict[str, Any], name: str, default=None) -> Any:
|
||||||
|
if name in props:
|
||||||
|
items = props.get(name)
|
||||||
|
if isinstance(items, list):
|
||||||
|
return items[0]
|
||||||
|
return items
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
async def get_client_id_data(url: str) -> IndieAuthClient | None:
|
||||||
|
# Don't fetch localhost URL
|
||||||
|
if urlparse(url).hostname == "localhost":
|
||||||
|
return IndieAuthClient(
|
||||||
|
logo=None,
|
||||||
|
name=url,
|
||||||
|
url=url,
|
||||||
|
)
|
||||||
|
|
||||||
|
maybe_data_and_html = await microformats.fetch_and_parse(url)
|
||||||
|
if maybe_data_and_html is not None:
|
||||||
|
data: dict[str, Any] = maybe_data_and_html[0]
|
||||||
|
|
||||||
|
for item in data["items"]:
|
||||||
|
if "h-x-app" in item["type"] or "h-app" in item["type"]:
|
||||||
|
props = item.get("properties", {})
|
||||||
|
print(props)
|
||||||
|
logo = _get_prop(props, "logo")
|
||||||
|
return IndieAuthClient(
|
||||||
|
logo=make_abs(logo, url) if logo else None,
|
||||||
|
name=_get_prop(props, "name"),
|
||||||
|
url=_get_prop(props, "url", url),
|
||||||
|
)
|
||||||
|
|
||||||
|
return IndieAuthClient(
|
||||||
|
logo=None,
|
||||||
|
name=url,
|
||||||
|
url=url,
|
||||||
|
)
|
32
app/utils/mastodon.py
Normal file
32
app/utils/mastodon.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from app.webfinger import get_actor_url
|
||||||
|
|
||||||
|
|
||||||
|
def _load_mastodon_following_accounts_csv_file(path: str) -> list[str]:
|
||||||
|
handles = []
|
||||||
|
for line in Path(path).read_text().splitlines()[1:]:
|
||||||
|
handle = line.split(",")[0]
|
||||||
|
handles.append(handle)
|
||||||
|
|
||||||
|
return handles
|
||||||
|
|
||||||
|
|
||||||
|
async def get_actor_urls_from_following_accounts_csv_file(
|
||||||
|
path: str,
|
||||||
|
) -> list[tuple[str, str]]:
|
||||||
|
actor_urls = []
|
||||||
|
for handle in _load_mastodon_following_accounts_csv_file(path):
|
||||||
|
try:
|
||||||
|
actor_url = await get_actor_url(handle)
|
||||||
|
except Exception:
|
||||||
|
logger.error("Failed to fetch actor URL for {handle=}")
|
||||||
|
else:
|
||||||
|
if actor_url:
|
||||||
|
actor_urls.append((handle, actor_url))
|
||||||
|
else:
|
||||||
|
logger.info(f"No actor URL found for {handle=}")
|
||||||
|
|
||||||
|
return actor_urls
|
34
app/utils/microformats.py
Normal file
34
app/utils/microformats.py
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import mf2py # type: ignore
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
|
||||||
|
|
||||||
|
class URLNotFoundOrGone(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_and_parse(url: str) -> tuple[dict[str, Any], str]:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
resp = await client.get(
|
||||||
|
url,
|
||||||
|
headers={
|
||||||
|
"User-Agent": config.USER_AGENT,
|
||||||
|
},
|
||||||
|
follow_redirects=True,
|
||||||
|
)
|
||||||
|
if resp.status_code in [404, 410]:
|
||||||
|
raise URLNotFoundOrGone
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp.raise_for_status()
|
||||||
|
except httpx.HTTPStatusError:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to parse microformats for {url}: " f"got {resp.status_code}"
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return mf2py.parse(doc=resp.text), resp.text
|
182
app/utils/opengraph.py
Normal file
182
app/utils/opengraph.py
Normal file
|
@ -0,0 +1,182 @@
|
||||||
|
import asyncio
|
||||||
|
import mimetypes
|
||||||
|
import re
|
||||||
|
import signal
|
||||||
|
from concurrent.futures import TimeoutError
|
||||||
|
from typing import Any
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from bs4 import BeautifulSoup # type: ignore
|
||||||
|
from loguru import logger
|
||||||
|
from pebble import concurrent # type: ignore
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from app import activitypub as ap
|
||||||
|
from app import ap_object
|
||||||
|
from app import config
|
||||||
|
from app.actor import LOCAL_ACTOR
|
||||||
|
from app.actor import fetch_actor
|
||||||
|
from app.database import AsyncSession
|
||||||
|
from app.models import InboxObject
|
||||||
|
from app.models import OutboxObject
|
||||||
|
from app.utils.url import is_url_valid
|
||||||
|
from app.utils.url import make_abs
|
||||||
|
|
||||||
|
|
||||||
|
class OpenGraphMeta(BaseModel):
|
||||||
|
url: str
|
||||||
|
title: str
|
||||||
|
image: str | None
|
||||||
|
description: str | None
|
||||||
|
site_name: str
|
||||||
|
|
||||||
|
|
||||||
|
@concurrent.process(timeout=5)
|
||||||
|
def _scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
|
||||||
|
# Prevent SIGTERM to bubble up to the worker
|
||||||
|
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
||||||
|
|
||||||
|
soup = BeautifulSoup(html, "html5lib")
|
||||||
|
ogs = {
|
||||||
|
og.attrs["property"]: og.attrs.get("content")
|
||||||
|
for og in soup.html.head.findAll(property=re.compile(r"^og"))
|
||||||
|
}
|
||||||
|
# FIXME some page have no <title>
|
||||||
|
raw = {
|
||||||
|
"url": url,
|
||||||
|
"title": soup.find("title").text.strip(),
|
||||||
|
"image": None,
|
||||||
|
"description": None,
|
||||||
|
"site_name": urlparse(url).hostname,
|
||||||
|
}
|
||||||
|
for field in OpenGraphMeta.__fields__.keys():
|
||||||
|
og_field = f"og:{field}"
|
||||||
|
if ogs.get(og_field):
|
||||||
|
raw[field] = ogs.get(og_field, None)
|
||||||
|
|
||||||
|
if "title" not in raw:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for maybe_rel in {"url", "image"}:
|
||||||
|
if u := raw.get(maybe_rel):
|
||||||
|
raw[maybe_rel] = make_abs(u, url)
|
||||||
|
|
||||||
|
if not is_url_valid(raw[maybe_rel]):
|
||||||
|
logger.info(f"Invalid url {raw[maybe_rel]}")
|
||||||
|
if maybe_rel == "url":
|
||||||
|
raw["url"] = url
|
||||||
|
elif maybe_rel == "image":
|
||||||
|
raw["image"] = None
|
||||||
|
|
||||||
|
return OpenGraphMeta.parse_obj(raw)
|
||||||
|
|
||||||
|
|
||||||
|
def scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
|
||||||
|
return _scrap_og_meta(url, html).result()
|
||||||
|
|
||||||
|
|
||||||
|
async def external_urls(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
ro: ap_object.RemoteObject | OutboxObject | InboxObject,
|
||||||
|
) -> set[str]:
|
||||||
|
note_host = urlparse(ro.ap_id).hostname
|
||||||
|
|
||||||
|
tags_hrefs = set()
|
||||||
|
for tag in ro.tags:
|
||||||
|
if tag_href := tag.get("href"):
|
||||||
|
tags_hrefs.add(tag_href)
|
||||||
|
if tag.get("type") == "Mention":
|
||||||
|
if tag["href"] != LOCAL_ACTOR.ap_id:
|
||||||
|
try:
|
||||||
|
mentioned_actor = await fetch_actor(db_session, tag["href"])
|
||||||
|
except (ap.FetchError, ap.NotAnObjectError):
|
||||||
|
tags_hrefs.add(tag["href"])
|
||||||
|
continue
|
||||||
|
|
||||||
|
tags_hrefs.add(mentioned_actor.url)
|
||||||
|
tags_hrefs.add(mentioned_actor.ap_id)
|
||||||
|
else:
|
||||||
|
tags_hrefs.add(LOCAL_ACTOR.ap_id)
|
||||||
|
tags_hrefs.add(LOCAL_ACTOR.url)
|
||||||
|
|
||||||
|
urls = set()
|
||||||
|
if ro.content:
|
||||||
|
soup = BeautifulSoup(ro.content, "html5lib")
|
||||||
|
for link in soup.find_all("a"):
|
||||||
|
h = link.get("href")
|
||||||
|
if not h:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
ph = urlparse(h)
|
||||||
|
mimetype, _ = mimetypes.guess_type(h)
|
||||||
|
if (
|
||||||
|
ph.scheme in {"http", "https"}
|
||||||
|
and ph.hostname != note_host
|
||||||
|
and is_url_valid(h)
|
||||||
|
and (
|
||||||
|
not mimetype
|
||||||
|
or mimetype.split("/")[0] not in ["image", "video", "audio"]
|
||||||
|
)
|
||||||
|
):
|
||||||
|
urls.add(h)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Failed to check {h}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
return urls - tags_hrefs
|
||||||
|
|
||||||
|
|
||||||
|
async def _og_meta_from_url(url: str) -> OpenGraphMeta | None:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
resp = await client.get(
|
||||||
|
url,
|
||||||
|
headers={
|
||||||
|
"User-Agent": config.USER_AGENT,
|
||||||
|
},
|
||||||
|
follow_redirects=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
resp.raise_for_status()
|
||||||
|
|
||||||
|
if not (ct := resp.headers.get("content-type")) or not ct.startswith("text/html"):
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return scrap_og_meta(url, resp.text)
|
||||||
|
except TimeoutError:
|
||||||
|
logger.info(f"Timed out when scraping OG meta for {url}")
|
||||||
|
return None
|
||||||
|
except Exception:
|
||||||
|
logger.info(f"Failed to scrap OG meta for {url}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def og_meta_from_note(
|
||||||
|
db_session: AsyncSession,
|
||||||
|
ro: ap_object.RemoteObject,
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
og_meta = []
|
||||||
|
urls = await external_urls(db_session, ro)
|
||||||
|
logger.debug(f"Lookig OG metadata in {urls=}")
|
||||||
|
for url in urls:
|
||||||
|
logger.debug(f"Processing {url}")
|
||||||
|
try:
|
||||||
|
maybe_og_meta = None
|
||||||
|
try:
|
||||||
|
maybe_og_meta = await asyncio.wait_for(
|
||||||
|
_og_meta_from_url(url),
|
||||||
|
timeout=5,
|
||||||
|
)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
logger.info(f"Timing out fetching {url}")
|
||||||
|
except Exception:
|
||||||
|
logger.exception(f"Failed scrap OG meta for {url}")
|
||||||
|
|
||||||
|
if maybe_og_meta:
|
||||||
|
og_meta.append(maybe_og_meta.dict())
|
||||||
|
except httpx.HTTPError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return og_meta
|
12
app/utils/pagination.py
Normal file
12
app/utils/pagination.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
import base64
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from dateutil.parser import isoparse
|
||||||
|
|
||||||
|
|
||||||
|
def encode_cursor(val: datetime) -> str:
|
||||||
|
return base64.urlsafe_b64encode(val.isoformat().encode()).decode()
|
||||||
|
|
||||||
|
|
||||||
|
def decode_cursor(cursor: str) -> datetime:
|
||||||
|
return isoparse(base64.urlsafe_b64decode(cursor).decode())
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue