Black/isort

This commit is contained in:
Thomas Sileo 2018-06-16 22:02:10 +02:00
parent c5295524c7
commit d9362adb25
16 changed files with 814 additions and 672 deletions

View file

@ -1,23 +1,30 @@
import logging
from datetime import datetime
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Union
from bson.objectid import ObjectId
from html2text import html2text
from feedgen.feed import FeedGenerator
from html2text import html2text
import tasks
from config import BASE_URL
from config import DB
from config import ID
from config import ME
from config import USER_AGENT
from config import USERNAME
from little_boxes import activitypub as ap
from little_boxes.backend import Backend
from little_boxes.collection import parse_collection as ap_parse_collection
from config import USERNAME, BASE_URL, ID
from config import DB, ME
import tasks
from typing import List, Optional, Dict, Any, Union
logger = logging.getLogger(__name__)
MY_PERSON = ap.Person(**ME)
def _remove_id(doc: ap.ObjectType) -> ap.ObjectType:
"""Helper for removing MongoDB's `_id` field."""
@ -35,6 +42,9 @@ def _to_list(data: Union[List[Any], Any]) -> List[Any]:
class MicroblogPubBackend(Backend):
def user_agent(self) -> str:
return USER_AGENT
def base_url(self) -> str:
return BASE_URL

1295
app.py

File diff suppressed because it is too large Load diff

View file

@ -1,13 +1,16 @@
import subprocess
import os
import yaml
from pymongo import MongoClient
import requests
from itsdangerous import JSONWebSignatureSerializer
import subprocess
from datetime import datetime
from little_boxes.utils import strtobool
from utils.key import KEY_DIR, get_key, get_secret_key
import requests
import yaml
from itsdangerous import JSONWebSignatureSerializer
from pymongo import MongoClient
from little_boxes import strtobool
from utils.key import KEY_DIR
from utils.key import get_key
from utils.key import get_secret_key
def noop():

View file

@ -4,3 +4,4 @@ html2text
pyyaml
flake8
mypy
black

View file

@ -1,47 +1,53 @@
import os
import json
import logging
import os
import random
import requests
from celery import Celery
from requests.exceptions import HTTPError
from config import DB
from config import HEADERS
from config import ID
from config import DB
from config import KEY
from config import USER_AGENT
from utils.httpsig import HTTPSigAuth
from utils.opengraph import fetch_og_metadata
from utils.linked_data_sig import generate_signature
from utils.opengraph import fetch_og_metadata
log = logging.getLogger(__name__)
app = Celery('tasks', broker=os.getenv('MICROBLOGPUB_AMQP_BROKER', 'pyamqp://guest@localhost//'))
SigAuth = HTTPSigAuth(ID+'#main-key', KEY.privkey)
app = Celery(
"tasks", broker=os.getenv("MICROBLOGPUB_AMQP_BROKER", "pyamqp://guest@localhost//")
)
SigAuth = HTTPSigAuth(ID + "#main-key", KEY.privkey)
@app.task(bind=True, max_retries=12)
def post_to_inbox(self, payload: str, to: str) -> None:
try:
log.info('payload=%s', payload)
log.info('generating sig')
log.info("payload=%s", payload)
log.info("generating sig")
signed_payload = json.loads(payload)
generate_signature(signed_payload, KEY.privkey)
log.info('to=%s', to)
resp = requests.post(to, data=json.dumps(signed_payload), auth=SigAuth, headers={
'Content-Type': HEADERS[1],
'Accept': HEADERS[1],
'User-Agent': USER_AGENT,
})
log.info('resp=%s', resp)
log.info('resp_body=%s', resp.text)
log.info("to=%s", to)
resp = requests.post(
to,
data=json.dumps(signed_payload),
auth=SigAuth,
headers={
"Content-Type": HEADERS[1],
"Accept": HEADERS[1],
"User-Agent": USER_AGENT,
},
)
log.info("resp=%s", resp)
log.info("resp_body=%s", resp.text)
resp.raise_for_status()
except HTTPError as err:
log.exception('request failed')
log.exception("request failed")
if 400 >= err.response.status_code >= 499:
log.info('client error, no retry')
log.info("client error, no retry")
return
self.retry(exc=err, countdown=int(random.uniform(2, 4) ** self.request.retries))
@ -49,11 +55,15 @@ def post_to_inbox(self, payload: str, to: str) -> None:
@app.task(bind=True, max_retries=12)
def fetch_og(self, col, remote_id):
try:
log.info('fetch_og_meta remote_id=%s col=%s', remote_id, col)
if col == 'INBOX':
log.info('%d links saved', fetch_og_metadata(USER_AGENT, DB.inbox, remote_id))
elif col == 'OUTBOX':
log.info('%d links saved', fetch_og_metadata(USER_AGENT, DB.outbox, remote_id))
log.info("fetch_og_meta remote_id=%s col=%s", remote_id, col)
if col == "INBOX":
log.info(
"%d links saved", fetch_og_metadata(USER_AGENT, DB.inbox, remote_id)
)
elif col == "OUTBOX":
log.info(
"%d links saved", fetch_og_metadata(USER_AGENT, DB.outbox, remote_id)
)
except Exception as err:
self.log.exception('failed')
self.log.exception("failed")
self.retry(exc=err, countdown=int(random.uniform(2, 4) ** self.request.retries))

View file

@ -1,12 +1,12 @@
import time
import os
import time
from typing import List
from typing import Tuple
import requests
from html2text import html2text
from utils import activitypub_utils
from typing import Tuple
from typing import List
from utils import activitypub_utils
def resp2plaintext(resp):

View file

@ -1,4 +1,7 @@
from typing import Optional, Dict, List, Any
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
import requests

View file

@ -1,11 +1,11 @@
import logging
from urllib.parse import urlparse
import requests
from urllib.parse import urlparse
from Crypto.PublicKey import RSA
from .urlutils import check_url
from .errors import ActivityNotFoundError
from .urlutils import check_url
logger = logging.getLogger(__name__)

View file

@ -1,14 +1,21 @@
import typing
import re
import typing
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
from typing import Type
from typing import Union
from bleach.linkifier import Linker
from markdown import markdown
from utils.webfinger import get_actor_url
from config import USERNAME, BASE_URL, ID
from config import ACTOR_SERVICE
from typing import List, Optional, Tuple, Dict, Any, Union, Type
from config import BASE_URL
from config import ID
from config import USERNAME
from utils.webfinger import get_actor_url
def set_attrs(attrs, new=False):

View file

@ -3,19 +3,20 @@
Mastodon instances won't accept requests that are not signed using this scheme.
"""
from datetime import datetime
from urllib.parse import urlparse
from typing import Any, Dict, Optional
import base64
import hashlib
import logging
from datetime import datetime
from typing import Any
from typing import Dict
from typing import Optional
from urllib.parse import urlparse
from Crypto.Hash import SHA256
from Crypto.Signature import PKCS1_v1_5
from flask import request
from requests.auth import AuthBase
from Crypto.Signature import PKCS1_v1_5
from Crypto.Hash import SHA256
logger = logging.getLogger(__name__)

View file

@ -1,6 +1,5 @@
import os
import binascii
import os
from typing import Callable
from little_boxes.key import Key

View file

@ -1,13 +1,12 @@
from pyld import jsonld
import base64
import hashlib
from datetime import datetime
from typing import Any
from typing import Dict
from Crypto.Signature import PKCS1_v1_5
from Crypto.Hash import SHA256
import base64
from typing import Any, Dict
from Crypto.Signature import PKCS1_v1_5
from pyld import jsonld
# cache the downloaded "schemas", otherwise the library is super slow
# (https://github.com/digitalbazaar/pyld/issues/70)

View file

@ -1,8 +1,9 @@
import requests
from urllib.parse import urlparse
from .urlutils import check_url
import requests
from .errors import ActivityNotFoundError
from .urlutils import check_url
class ObjectService(object):

View file

@ -1,11 +1,12 @@
import ipaddress
from urllib.parse import urlparse
import ipaddress
import opengraph
import requests
from bs4 import BeautifulSoup
from .urlutils import is_url_valid, check_url
from .urlutils import check_url
from .urlutils import is_url_valid
def links_from_note(note):

View file

@ -1,7 +1,7 @@
import ipaddress
import logging
import os
import socket
import ipaddress
from urllib.parse import urlparse
from . import strtobool

View file

@ -1,13 +1,13 @@
from urllib.parse import urlparse
from typing import Dict, Any
from typing import Optional
import logging
from typing import Any
from typing import Dict
from typing import Optional
from urllib.parse import urlparse
import requests
from .urlutils import check_url
logger = logging.getLogger(__name__)