mirror of
https://git.sr.ht/~tsileo/microblog.pub
synced 2024-12-22 21:24:28 +00:00
Bootstrap webmention endpoint
This commit is contained in:
parent
1b3c76ee2f
commit
c6bc53ce54
5 changed files with 104 additions and 29 deletions
|
@ -38,6 +38,7 @@ from app import httpsig
|
||||||
from app import indieauth
|
from app import indieauth
|
||||||
from app import models
|
from app import models
|
||||||
from app import templates
|
from app import templates
|
||||||
|
from app import webmentions
|
||||||
from app.actor import LOCAL_ACTOR
|
from app.actor import LOCAL_ACTOR
|
||||||
from app.actor import get_actors_metadata
|
from app.actor import get_actors_metadata
|
||||||
from app.boxes import public_outbox_objects_count
|
from app.boxes import public_outbox_objects_count
|
||||||
|
@ -82,6 +83,7 @@ app.mount("/static", StaticFiles(directory="app/static"), name="static")
|
||||||
app.include_router(admin.router, prefix="/admin")
|
app.include_router(admin.router, prefix="/admin")
|
||||||
app.include_router(admin.unauthenticated_router, prefix="/admin")
|
app.include_router(admin.unauthenticated_router, prefix="/admin")
|
||||||
app.include_router(indieauth.router)
|
app.include_router(indieauth.router)
|
||||||
|
app.include_router(webmentions.router)
|
||||||
|
|
||||||
logger.configure(extra={"request_id": "no_req_id"})
|
logger.configure(extra={"request_id": "no_req_id"})
|
||||||
logger.remove()
|
logger.remove()
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
{% if outbox_object %}
|
{% if outbox_object %}
|
||||||
|
<link rel="webmention" href="{{ url_for("webmention_endpoint") }}">
|
||||||
<link rel="alternate" href="{{ request.url }}" type="application/activity+json">
|
<link rel="alternate" href="{{ request.url }}" type="application/activity+json">
|
||||||
<meta name="description" content="{{ outbox_object.content | html2text | trim | truncate(50) }}">
|
<meta name="description" content="{{ outbox_object.content | html2text | trim | truncate(50) }}">
|
||||||
<meta content="article" property="og:type" />
|
<meta content="article" property="og:type" />
|
||||||
|
|
|
@ -1,11 +1,7 @@
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import httpx
|
from app.utils import microformats
|
||||||
import mf2py # type: ignore
|
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
from app import config
|
|
||||||
from app.utils.url import make_abs
|
from app.utils.url import make_abs
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,31 +22,20 @@ def _get_prop(props: dict[str, Any], name: str, default=None) -> Any:
|
||||||
|
|
||||||
|
|
||||||
async def get_client_id_data(url: str) -> IndieAuthClient | None:
|
async def get_client_id_data(url: str) -> IndieAuthClient | None:
|
||||||
async with httpx.AsyncClient() as client:
|
maybe_data_and_html = await microformats.fetch_and_parse(url)
|
||||||
try:
|
if maybe_data_and_html is not None:
|
||||||
resp = await client.get(
|
data: dict[str, Any] = maybe_data_and_html[0]
|
||||||
url,
|
|
||||||
headers={
|
|
||||||
"User-Agent": config.USER_AGENT,
|
|
||||||
},
|
|
||||||
follow_redirects=True,
|
|
||||||
)
|
|
||||||
resp.raise_for_status()
|
|
||||||
except (httpx.HTTPError, httpx.HTTPStatusError):
|
|
||||||
logger.exception(f"Failed to discover webmention endpoint for {url}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
data = mf2py.parse(doc=resp.text)
|
for item in data["items"]:
|
||||||
for item in data["items"]:
|
if "h-x-app" in item["type"] or "h-app" in item["type"]:
|
||||||
if "h-x-app" in item["type"] or "h-app" in item["type"]:
|
props = item.get("properties", {})
|
||||||
props = item.get("properties", {})
|
print(props)
|
||||||
print(props)
|
logo = _get_prop(props, "logo")
|
||||||
logo = _get_prop(props, "logo")
|
return IndieAuthClient(
|
||||||
return IndieAuthClient(
|
logo=make_abs(logo, url) if logo else None,
|
||||||
logo=make_abs(logo, url) if logo else None,
|
name=_get_prop(props, "name"),
|
||||||
name=_get_prop(props, "name"),
|
url=_get_prop(props, "url", url),
|
||||||
url=_get_prop(props, "url", url),
|
)
|
||||||
)
|
|
||||||
|
|
||||||
return IndieAuthClient(
|
return IndieAuthClient(
|
||||||
logo=None,
|
logo=None,
|
||||||
|
|
25
app/utils/microformats.py
Normal file
25
app/utils/microformats.py
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import mf2py # type: ignore
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from app import config
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_and_parse(url: str) -> tuple[dict[str, Any], str] | None:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
try:
|
||||||
|
resp = await client.get(
|
||||||
|
url,
|
||||||
|
headers={
|
||||||
|
"User-Agent": config.USER_AGENT,
|
||||||
|
},
|
||||||
|
follow_redirects=True,
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
except (httpx.HTTPError, httpx.HTTPStatusError):
|
||||||
|
logger.exception(f"Failed to discover webmention endpoint for {url}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
return mf2py.parse(doc=resp.text), resp.text
|
62
app/webmentions.py
Normal file
62
app/webmentions.py
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
from bs4 import BeautifulSoup # type: ignore
|
||||||
|
from fastapi import APIRouter
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from app.utils import microformats
|
||||||
|
from app.utils.url import check_url
|
||||||
|
from app.utils.url import is_url_valid
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
def is_source_containing_target(source_html: str, target_url: str) -> bool:
|
||||||
|
soup = BeautifulSoup(source_html, "html5lib")
|
||||||
|
for link in soup.find_all("a"):
|
||||||
|
h = link.get("href")
|
||||||
|
if not is_url_valid(h):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if h == target_url:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/webmentions")
|
||||||
|
async def webmention_endpoint(
|
||||||
|
request: Request,
|
||||||
|
) -> JSONResponse:
|
||||||
|
form_data = await request.form()
|
||||||
|
try:
|
||||||
|
source = form_data["source"]
|
||||||
|
target = form_data["target"]
|
||||||
|
|
||||||
|
if source == target:
|
||||||
|
raise ValueError("source URL is the same as target")
|
||||||
|
|
||||||
|
check_url(source)
|
||||||
|
check_url(target)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Invalid webmention request")
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid payload")
|
||||||
|
|
||||||
|
logger.info(f"Received webmention {source=} {target=}")
|
||||||
|
|
||||||
|
# TODO: get outbox via ap_id (URL is the same as ap_id)
|
||||||
|
maybe_data_and_html = await microformats.fetch_and_parse(source)
|
||||||
|
if not maybe_data_and_html:
|
||||||
|
logger.info("failed to fetch source")
|
||||||
|
raise HTTPException(status_code=400, detail="failed to fetch source")
|
||||||
|
|
||||||
|
data, html = maybe_data_and_html
|
||||||
|
|
||||||
|
if not is_source_containing_target(html, target):
|
||||||
|
logger.warning("target not found in source")
|
||||||
|
raise HTTPException(status_code=400, detail="target not found in source")
|
||||||
|
|
||||||
|
logger.info(f"{data=}")
|
||||||
|
|
||||||
|
return JSONResponse(content={}, status_code=200)
|
Loading…
Reference in a new issue