got distracted by cryptobros

This commit is contained in:
sneakers-the-rat 2023-06-08 21:18:57 -07:00
parent 90d6edeb10
commit b1712aa7ab
17 changed files with 289 additions and 27 deletions

91
poetry.lock generated
View file

@ -1,9 +1,10 @@
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. # This file is automatically @generated by Poetry and should not be changed by hand.
[[package]] [[package]]
name = "alabaster" name = "alabaster"
version = "0.7.13" version = "0.7.13"
description = "A configurable sidebar-enabled Sphinx theme" description = "A configurable sidebar-enabled Sphinx theme"
category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
files = [ files = [
@ -15,6 +16,7 @@ files = [
name = "babel" name = "babel"
version = "2.12.1" version = "2.12.1"
description = "Internationalization utilities" description = "Internationalization utilities"
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -26,6 +28,7 @@ files = [
name = "beautifulsoup4" name = "beautifulsoup4"
version = "4.12.2" version = "4.12.2"
description = "Screen-scraping library" description = "Screen-scraping library"
category = "main"
optional = false optional = false
python-versions = ">=3.6.0" python-versions = ">=3.6.0"
files = [ files = [
@ -44,6 +47,7 @@ lxml = ["lxml"]
name = "certifi" name = "certifi"
version = "2023.5.7" version = "2023.5.7"
description = "Python package for providing Mozilla's CA Bundle." description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
files = [ files = [
@ -55,6 +59,7 @@ files = [
name = "charset-normalizer" name = "charset-normalizer"
version = "3.1.0" version = "3.1.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false optional = false
python-versions = ">=3.7.0" python-versions = ">=3.7.0"
files = [ files = [
@ -139,6 +144,7 @@ files = [
name = "colorama" name = "colorama"
version = "0.4.6" version = "0.4.6"
description = "Cross-platform colored terminal text." description = "Cross-platform colored terminal text."
category = "main"
optional = false optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [ files = [
@ -150,6 +156,7 @@ files = [
name = "docutils" name = "docutils"
version = "0.19" version = "0.19"
description = "Docutils -- Python Documentation Utilities" description = "Docutils -- Python Documentation Utilities"
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -161,6 +168,7 @@ files = [
name = "furo" name = "furo"
version = "2023.5.20" version = "2023.5.20"
description = "A clean customisable Sphinx documentation theme." description = "A clean customisable Sphinx documentation theme."
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -178,6 +186,7 @@ sphinx-basic-ng = "*"
name = "idna" name = "idna"
version = "3.4" version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)" description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.5"
files = [ files = [
@ -189,6 +198,7 @@ files = [
name = "imagesize" name = "imagesize"
version = "1.4.1" version = "1.4.1"
description = "Getting image size from png/jpeg/jpeg2000/gif file" description = "Getting image size from png/jpeg/jpeg2000/gif file"
category = "main"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [ files = [
@ -200,6 +210,7 @@ files = [
name = "jinja2" name = "jinja2"
version = "3.1.2" version = "3.1.2"
description = "A very fast and expressive template engine." description = "A very fast and expressive template engine."
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -217,6 +228,7 @@ i18n = ["Babel (>=2.7)"]
name = "latexcodec" name = "latexcodec"
version = "2.0.1" version = "2.0.1"
description = "A lexer and codec to work with LaTeX code in Python." description = "A lexer and codec to work with LaTeX code in Python."
category = "main"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [ files = [
@ -227,10 +239,32 @@ files = [
[package.dependencies] [package.dependencies]
six = ">=1.4.1" six = ">=1.4.1"
[[package]]
name = "linkify-it-py"
version = "1.0.3"
description = "Links recognition library with FULL unicode support."
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "linkify-it-py-1.0.3.tar.gz", hash = "sha256:2b3f168d5ce75e3a425e34b341a6b73e116b5d9ed8dbbbf5dc7456843b7ce2ee"},
{file = "linkify_it_py-1.0.3-py3-none-any.whl", hash = "sha256:11e29f00150cddaa8f434153f103c14716e7e097a8fd372d9eb1ed06ed91524d"},
]
[package.dependencies]
uc-micro-py = "*"
[package.extras]
benchmark = ["pytest", "pytest-benchmark"]
dev = ["black", "flake8", "isort", "pre-commit"]
doc = ["myst-parser", "sphinx", "sphinx-book-theme"]
test = ["coverage", "pytest", "pytest-cov"]
[[package]] [[package]]
name = "livereload" name = "livereload"
version = "2.6.3" version = "2.6.3"
description = "Python LiveReload is an awesome tool for web developers" description = "Python LiveReload is an awesome tool for web developers"
category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
files = [ files = [
@ -246,6 +280,7 @@ tornado = {version = "*", markers = "python_version > \"2.7\""}
name = "markdown-it-py" name = "markdown-it-py"
version = "2.2.0" version = "2.2.0"
description = "Python port of markdown-it. Markdown parsing, done right!" description = "Python port of markdown-it. Markdown parsing, done right!"
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -270,6 +305,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
name = "markupsafe" name = "markupsafe"
version = "2.1.3" version = "2.1.3"
description = "Safely add untrusted strings to HTML/XML markup." description = "Safely add untrusted strings to HTML/XML markup."
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -329,6 +365,7 @@ files = [
name = "mdit-py-plugins" name = "mdit-py-plugins"
version = "0.3.5" version = "0.3.5"
description = "Collection of plugins for markdown-it-py" description = "Collection of plugins for markdown-it-py"
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -348,6 +385,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
name = "mdurl" name = "mdurl"
version = "0.1.2" version = "0.1.2"
description = "Markdown URL utilities" description = "Markdown URL utilities"
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -359,6 +397,7 @@ files = [
name = "myst-parser" name = "myst-parser"
version = "1.0.0" version = "1.0.0"
description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser,"
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -369,6 +408,7 @@ files = [
[package.dependencies] [package.dependencies]
docutils = ">=0.15,<0.20" docutils = ">=0.15,<0.20"
jinja2 = "*" jinja2 = "*"
linkify-it-py = {version = ">=1.0,<2.0", optional = true, markers = "extra == \"linkify\""}
markdown-it-py = ">=1.0.0,<3.0.0" markdown-it-py = ">=1.0.0,<3.0.0"
mdit-py-plugins = ">=0.3.4,<0.4.0" mdit-py-plugins = ">=0.3.4,<0.4.0"
pyyaml = "*" pyyaml = "*"
@ -385,6 +425,7 @@ testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,
name = "packaging" name = "packaging"
version = "23.1" version = "23.1"
description = "Core utilities for Python packages" description = "Core utilities for Python packages"
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -396,6 +437,7 @@ files = [
name = "pybtex" name = "pybtex"
version = "0.24.0" version = "0.24.0"
description = "A BibTeX-compatible bibliography processor in Python" description = "A BibTeX-compatible bibliography processor in Python"
category = "main"
optional = false optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*"
files = [ files = [
@ -415,6 +457,7 @@ test = ["pytest"]
name = "pybtex-docutils" name = "pybtex-docutils"
version = "1.0.2" version = "1.0.2"
description = "A docutils backend for pybtex." description = "A docutils backend for pybtex."
category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
files = [ files = [
@ -430,6 +473,7 @@ pybtex = ">=0.16"
name = "pygments" name = "pygments"
version = "2.15.1" version = "2.15.1"
description = "Pygments is a syntax highlighting package written in Python." description = "Pygments is a syntax highlighting package written in Python."
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -444,6 +488,7 @@ plugins = ["importlib-metadata"]
name = "pyyaml" name = "pyyaml"
version = "6.0" version = "6.0"
description = "YAML parser and emitter for Python" description = "YAML parser and emitter for Python"
category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
files = [ files = [
@ -493,6 +538,7 @@ files = [
name = "requests" name = "requests"
version = "2.31.0" version = "2.31.0"
description = "Python HTTP for Humans." description = "Python HTTP for Humans."
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -514,6 +560,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
name = "six" name = "six"
version = "1.16.0" version = "1.16.0"
description = "Python 2 and 3 compatibility utilities" description = "Python 2 and 3 compatibility utilities"
category = "main"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [ files = [
@ -525,6 +572,7 @@ files = [
name = "snowballstemmer" name = "snowballstemmer"
version = "2.2.0" version = "2.2.0"
description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
category = "main"
optional = false optional = false
python-versions = "*" python-versions = "*"
files = [ files = [
@ -536,6 +584,7 @@ files = [
name = "soupsieve" name = "soupsieve"
version = "2.4.1" version = "2.4.1"
description = "A modern CSS selector implementation for Beautiful Soup." description = "A modern CSS selector implementation for Beautiful Soup."
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -547,6 +596,7 @@ files = [
name = "sphinx" name = "sphinx"
version = "6.2.1" version = "6.2.1"
description = "Python documentation generator" description = "Python documentation generator"
category = "main"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
@ -581,6 +631,7 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"]
name = "sphinx-autobuild" name = "sphinx-autobuild"
version = "2021.3.14" version = "2021.3.14"
description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." description = "Rebuild Sphinx documentation on changes, with live-reload in the browser."
category = "dev"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
files = [ files = [
@ -600,6 +651,7 @@ test = ["pytest", "pytest-cov"]
name = "sphinx-basic-ng" name = "sphinx-basic-ng"
version = "1.0.0b1" version = "1.0.0b1"
description = "A modern skeleton for Sphinx themes." description = "A modern skeleton for Sphinx themes."
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -617,6 +669,7 @@ docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-ta
name = "sphinxcontrib-applehelp" name = "sphinxcontrib-applehelp"
version = "1.0.4" version = "1.0.4"
description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
category = "main"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
@ -632,6 +685,7 @@ test = ["pytest"]
name = "sphinxcontrib-bibtex" name = "sphinxcontrib-bibtex"
version = "2.5.0" version = "2.5.0"
description = "Sphinx extension for BibTeX style citations." description = "Sphinx extension for BibTeX style citations."
category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
files = [ files = [
@ -649,6 +703,7 @@ Sphinx = ">=2.1"
name = "sphinxcontrib-devhelp" name = "sphinxcontrib-devhelp"
version = "1.0.2" version = "1.0.2"
description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
category = "main"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.5"
files = [ files = [
@ -664,6 +719,7 @@ test = ["pytest"]
name = "sphinxcontrib-htmlhelp" name = "sphinxcontrib-htmlhelp"
version = "2.0.1" version = "2.0.1"
description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
category = "main"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
@ -679,6 +735,7 @@ test = ["html5lib", "pytest"]
name = "sphinxcontrib-jsmath" name = "sphinxcontrib-jsmath"
version = "1.0.1" version = "1.0.1"
description = "A sphinx extension which renders display math in HTML via JavaScript" description = "A sphinx extension which renders display math in HTML via JavaScript"
category = "main"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.5"
files = [ files = [
@ -693,6 +750,7 @@ test = ["flake8", "mypy", "pytest"]
name = "sphinxcontrib-mermaid" name = "sphinxcontrib-mermaid"
version = "0.9.2" version = "0.9.2"
description = "Mermaid diagrams in yours Sphinx powered docs" description = "Mermaid diagrams in yours Sphinx powered docs"
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
@ -704,6 +762,7 @@ files = [
name = "sphinxcontrib-qthelp" name = "sphinxcontrib-qthelp"
version = "1.0.3" version = "1.0.3"
description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document."
category = "main"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.5"
files = [ files = [
@ -719,6 +778,7 @@ test = ["pytest"]
name = "sphinxcontrib-serializinghtml" name = "sphinxcontrib-serializinghtml"
version = "1.1.5" version = "1.1.5"
description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)."
category = "main"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.5"
files = [ files = [
@ -734,6 +794,7 @@ test = ["pytest"]
name = "tornado" name = "tornado"
version = "6.3.2" version = "6.3.2"
description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
category = "dev"
optional = false optional = false
python-versions = ">= 3.8" python-versions = ">= 3.8"
files = [ files = [
@ -751,14 +812,30 @@ files = [
] ]
[[package]] [[package]]
name = "urllib3" name = "uc-micro-py"
version = "2.0.2" version = "1.0.2"
description = "HTTP library with thread-safe connection pooling, file post, and more." description = "Micro subset of unicode data files for linkify-it-py projects."
category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"}, {file = "uc-micro-py-1.0.2.tar.gz", hash = "sha256:30ae2ac9c49f39ac6dce743bd187fcd2b574b16ca095fa74cd9396795c954c54"},
{file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"}, {file = "uc_micro_py-1.0.2-py3-none-any.whl", hash = "sha256:8c9110c309db9d9e87302e2f4ad2c3152770930d88ab385cd544e7a7e75f3de0"},
]
[package.extras]
test = ["coverage", "pytest", "pytest-cov"]
[[package]]
name = "urllib3"
version = "2.0.3"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"},
{file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"},
] ]
[package.extras] [package.extras]
@ -770,4 +847,4 @@ zstd = ["zstandard (>=0.18.0)"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.11" python-versions = "^3.11"
content-hash = "fd560ec0effde224dbc8327246e38034fb217203db89974874f948dbc41d38ac" content-hash = "b662b687f5c5d02e641fb4a34a05cee4e652232f06c8e734c4429191e559900a"

View file

@ -10,7 +10,7 @@ packages = [{include = "p2p_ld"}]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.11" python = "^3.11"
sphinx = "^6.2.1" sphinx = "^6.2.1"
myst-parser = "^1.0.0" myst-parser = { version = "^1.0.0", extras = ["linkify"] }
furo = "^2023.5.20" furo = "^2023.5.20"
sphinxcontrib-mermaid = "^0.9.2" sphinxcontrib-mermaid = "^0.9.2"
sphinxcontrib-bibtex = "^2.5.0" sphinxcontrib-bibtex = "^2.5.0"

View file

@ -1,3 +1,4 @@
(comparison)=
# Comparison # Comparison
All of this is TODO. Comparison to existing protocols and projects (just to situate in context, not talk shit obvs) All of this is TODO. Comparison to existing protocols and projects (just to situate in context, not talk shit obvs)
@ -19,6 +20,7 @@ ssb
matrix matrix
at_protocol at_protocol
nostr nostr
xmpp
``` ```
```{toctree} ```{toctree}
@ -48,7 +50,10 @@ dmc
- P2panda - P2panda
- SAFE - SAFE
- Storj - Storj
- Swarm - [Swarm](https://www.ethswarm.org/swarm-whitepaper.pdf)
- not interesting, based around coins and smart contracts
- kademlia routing
- chunks stored by nodes close in hash space
## Points of comparison ## Points of comparison

View file

@ -1 +1,24 @@
```{index} IPFS
```
# IPFS # IPFS
If IPFS is {index}`BitTorrent` + {index}`git`, and {key}`ActivityPub` is {key}`Distributed Messaging` + {key}`Linked Data`, then p2p-ld is IPFS + ActivityPub. We build on IPFS and are heavily inspired by its design and shortcomings revealed by practical use.
## Problems
- Slow access!
- No identity misses the social nature of infrastructure. Where bittorrent had trackers, there is no similar concept in IPFS to organize archives.
- Hence the need for filecoin, an exogenous incentive to store, but then it becomes transactional which generates its own problems.
- Trust! eg. its use in phishing attacks is because there is no way to know who the hell a given CID is owned by. It needs to be possible to do social curation, or at leats know when something is riskier or not.
- Lack of metadata means having to build a lot of shit post-hoc, like IPLD and multihashes and codecs and whatnot.
## IPLD
## Overlap
- {index}`Merkle DAG`s
## Differences
- Not permanent storage! Identities retain custody and control over objects in the network.

View file

@ -1,8 +1,49 @@
```{index} Linked Data; Fragments
```
# Linked Data Fragments # Linked Data Fragments
## Summary
[Linked data fragments](https://linkeddatafragments.org/publications/) are designed to "fill in the middle" between entirely serverside ({index}`SPARQL`) or clientside (downloading a triple store) usage of linked data triples. SPARQL queries are notorious for being resource intensive, as queries can become much more complex than typical relational algebra and the server needs to resolve a potentially enormous number of resources. Placing all the logic on the server, rather than the client, is an architectural decision that has a complex history, but descends from the idea that the web should work by having "agents" that work on the web on our behalf[^semwebagents].
Linked data fragments (LDFs) split the difference by placing more of the work on clients, with the server providing pre-computed sets of triples for a given selector. "Selector" is a purposefully general concept, but the LDF authors focus primarily on [Triple Pattern Fragments](https://linkeddatafragments.org/specification/triple-pattern-fragments/) that are composed of:
- A **Triple Pattern**, a `?subject ?predicate ?object` that defines the contents of the fragment
- **Metadata**, specifically a `triples` predicate indicating the estimated total number of triples in the fragment since large fragments need to be paginated, and
- **Hypermedia Controls** that can be used to retrieve other related fragments. For example, a triple pattern corresponding to `s:people` `p:named` `o:tom` would have links to retrieve all the related combinations including each field being unspecified, eg. any triplet whose subject is a `person`, predicate is `named` and so on.
The hosting server then partitions all of the triples in a given dataset into all the possible combinations of subjects, predicates, and objects.
## Overlap
p2p-ld follows Linked Data Fragments in that it emphasizes clientside logic rather than query logic on the network. Executing distributed complex queries adds substantial complexity to the protocol and would potentially import a lot of the problems with SPARQL like heightened resource requirements and potential for abuse for denial of service.
## Differences
- re: linked data platform, p2p-ld also concerns "leaf" nodes with binary data accessed via codec, rather than represented as triplets. The results of queries are thus not necessarily imagined to be single factual assertions, but datasets, images, documents, posts, etc. -> So the container concept is less rigidly defined than an LDF host with a completely partitioned triplet graph.
Additionally, by being an explicitly *social* system, p2p-ld is unconcerned with arbitrary query execution time on anonymous data systems - the expectation is that individual peers and {index}`peer federations <Peer Federations>`
```{admonition} To be very clear!
:class: attention
p2p-ld does not attempt to replace or improve SPARQL. There are a number of philosophical and practical differences in the design of the greater semantic web, and particularly its instantiation as bigass corporate knowledge graphs. We will do what we can to integrate with RDF and RDF-like technologies, but p2p-ld is *not* a distributed SPARQL endpoint.
```
There are a number of philosophical
[^semwebagents]: See the history of the early to middle semantic web, discussed in {cite}`saundersSurveillanceGraphs2023`
## References ## References
- Homepage: https://linkeddatafragments.org/ - Homepage: https://linkeddatafragments.org/
- Papers: - Papers:
- Original conference paper: {cite}`verborghWebScaleQueryingLinked2014`
- {cite}`verborghTriplePatternFragments2016` - {cite}`verborghTriplePatternFragments2016`
- Specification: [Triple Pattern Fragments](https://linkeddatafragments.org/specification/triple-pattern-fragments/)

7
src/comparison/xmpp.md Normal file
View file

@ -0,0 +1,7 @@
# XMPP
Stuff we like about XMPP
- Resources like `username@domain.com/resource` for indicating different accounts and contexts.
- Service discovery
- https://xmpp.org/extensions/xep-0030.html
- Protocol interoperability

View file

@ -40,8 +40,11 @@ html_baseurl = '/docs/'
# myst # myst
myst_heading_anchors = 3 myst_heading_anchors = 3
myst_enable_extensions = [ myst_enable_extensions = [
'tasklist' 'tasklist',
'linkify',
'attrs_block'
] ]
myst_linkify_fuzzy_links = False
# Napoleon settings # Napoleon settings
napoleon_google_docstring = True napoleon_google_docstring = True
@ -69,6 +72,7 @@ import re
import pybtex.plugin import pybtex.plugin
from pybtex.richtext import Symbol, Text from pybtex.richtext import Symbol, Text
from pybtex.style.formatting.unsrt import Style as UnsrtStyle from pybtex.style.formatting.unsrt import Style as UnsrtStyle
from pybtex.style.formatting import unsrt as unsrt_module
from pybtex.style.formatting import toplevel from pybtex.style.formatting import toplevel
from pybtex.style.template import ( from pybtex.style.template import (
field, first_of, href, join, names, optional, optional_field, sentence, field, first_of, href, join, names, optional, optional_field, sentence,
@ -85,6 +89,9 @@ date = first_of [
] ]
pages = field('pages', apply_func=dashify) pages = field('pages', apply_func=dashify)
# monkey patch date
unsrt_module.date = date
class BetterBibTeXStyle(UnsrtStyle): class BetterBibTeXStyle(UnsrtStyle):
def get_article_template(self, e): def get_article_template(self, e):

View file

@ -15,13 +15,13 @@ Triplet graphs similar to linked data fragments with envelopes. decoupling conte
- Permissions scope - Permissions scope
- Signature - Signature
- Anything that can be directly referenced without local qualifier is a container. - Anything that can be directly referenced without local qualifier is a container.
- Triplets within a container can be referenced with the [query syntax](querying.html#Location) - Triplets within a container can be referenced with the [query syntax](qlocation)
- Containers also behave like "feeds" - Containers also behave like "feeds"
- Eg. one might put their blog posts in `@user:blog` or - Eg. one might put their blog posts in `@user:blog` or
- The account identifier is the top-level container. - The account identifier is the top-level container.
- Ordering: - Ordering:
- Every triple within a scope is ordered by default by the time it is declared - Every triple within a scope is ordered by default by the time it is declared
- A container can declare its ordering (see [vocabulary](vocabulary.html#Container)) - A container can declare its ordering (see {term}`Container`)
- Naming: - Naming:
- Each container intended to be directly referenced SHOULD contain a `name` so it can be referenced w.r.t its parent: `@<ACCOUNT>:<name>` - Each container intended to be directly referenced SHOULD contain a `name` so it can be referenced w.r.t its parent: `@<ACCOUNT>:<name>`
- Each container can also be indicated numerically - Each container can also be indicated numerically
@ -31,6 +31,8 @@ Triplet graphs similar to linked data fragments with envelopes. decoupling conte
- Capabilities should also contain a permissions scope, if none is present, the global scope is assumed. - Capabilities should also contain a permissions scope, if none is present, the global scope is assumed.
- Since Identities are just a special form of container, they too can advertise different actions that they support with capabilities. - Since Identities are just a special form of container, they too can advertise different actions that they support with capabilities.
Re hashing a graph: the container always has one root node that is the container's identity from which a graph traversal starts. A {index}`Merkle DAG` is then constructed starting from the leaves.
## Triplets ## Triplets

View file

@ -10,6 +10,10 @@ Identity
Petname Petname
A short name used to refer to a {term}`Identity`. Petnames are always contextual: `@Alice` referring to `@Bob` has the status of "The person that @Alice knows as @Bob." A petname can be declared by the identity being referred to as a "canonical" petname, but since they are potentially nonunique they should always be dereferenced against the Identity making the reference. A short name used to refer to a {term}`Identity`. Petnames are always contextual: `@Alice` referring to `@Bob` has the status of "The person that @Alice knows as @Bob." A petname can be declared by the identity being referred to as a "canonical" petname, but since they are potentially nonunique they should always be dereferenced against the Identity making the reference.
Container
Stub to check cross-references
Beacon
Stub to check cross-references
``` ```

28
src/design.md Normal file
View file

@ -0,0 +1,28 @@
# Design Decisions
A scratchpad for keeping track of the specific choices that we are making so that we know they are choices lol.
## Cultivate Abundance
Much of the focus and energy in p2p and decentralized systems has been vaccuumed up by cryptocurrency and other blockchain scams. These technologies intrinsically generate artificial scarcity rather than the abundance of p2p systems like bittorrent. Much of the thinking in these systems is oriented around self-sovereignty, but p2p-ld is intended to cultivate mutualism and the radical mutual responsibility to each other that any truly autonomous social system outside of libertarian fantasies requires. We don't design the *system* to be maximally efficient and make *system-level* guarantees about reliability or persistence, but design systems for people to organize these things among themselves, voluntarily. We are *not* interested in making a self-sustaining system that is "out there" and needs to be maintained by some blockchain economy. We are interested in making tools *for us* to make our own digital life online
## Permanence is Impossible
{attribution="Octavia Butler, Parable of the Sower"}
> Every one knows that change is inevitable. From the second law of thermodynamics to Darwinian evolution, from Buddhism's insistence that nothing is permanent and all suffering results from our delusions of permanence to the third chapter of Ecclesiastes ("To everything there is a season"), change is part of life, of existence, of the common wisdom. But I don't believe we're dealing with all that that means. We haven't even begun to deal with it.
There is no such thing as a [Cool URI that doesn't change](https://www.w3.org/Provider/Style/URI), and there is no such thing as a persistent identifier that lasts forever {cite}`kunzePersistenceStatementsDescribing2017`. All things change. Change can be because of practical reasons like running out of funding and shutting down the server, cultural reasons like shifting meanings of words, or larger shifts that render the entire domain that a thing is fixed in irrelevant. No matter how many layers of abstraction and redirection we want to create, there is no system that will for all time be able to unambiguously identify something on the web or elsewhere.
The appearance of persistence is a *social* phenomenon rather than a *technological* one. `Archive.org` continues to exist because many people actively keep it existing, not because of the architecture of their archive. Designing for permanence makes systems *fragile.* Instead we should design for *adapting* to change. Adapting to change is also a social phenomenon - I might misplace things, change how they are named, and tell you that the same URL means something different, or the same page goes by a different URL now. A newspaper might go out of business and its website might go offline, but someone might save a PDF of the original page and rehost it on their personal website. The tools we need look more like systems for renaming, declaring equivalence, translation, change, than they do an unalterable, permanent append-only blockchain thing.
## Ambiguity is Natural
The [original vision](https://www.w3.org/DesignIssues/LinkedData.html) for Linked Data on the web imagined every concept having a single unique URI, but unambiguous identifiers are fictional for the same reason that unambiguous concepts are fictional. Information is contextual. The same set of words has a different meaning in a different context. Multiple sets of words can have the same meaning.
Names and locations are *linguistic* not *mathematical.* Rather than trying to design ambiguity out of the system so that web crawlers can deterministically generate algorithmic restaurant reservations, we should design systems that explicitly incorporate context to reference and use.
## Autonomy *and* Convenience Can Coexist
We should neither sacrifice control of the internet to platform giants nor should we insist that self-hosting is the only alternative. If the alternative to using Google Docs or Slack requires me to be a professional sysadmin, or even to keep a raspberry pi plugged in and online at all times, it isn't an alternative for 95% of people.
It should be possible to share resources such that relatively few people need to maintain persistent network infrastructure, and it should be possible to accomodate their leaving at any time. It should also be very difficult for one or a few actors to make a large number of other peers on the network dependent on them, claiming de-facto control over an ostensibly decentralized system (lookin at you mastodon.social).

View file

@ -5,3 +5,16 @@ How do we find people and know how to connect to them?
- Bootstrapping initial connections - Bootstrapping initial connections
- Gossiping - Gossiping
- Hole punching - Hole punching
# Scraps
https://xmpp.org/extensions/xep-0030.html
> There are three kinds of information that need to be discovered about an entity:
>
> - its basic identity (type and/or category)
> - the features it offers and protocols it supports
> - any additional items associated with the entity, whether or not they are addressable as JIDs
>
> All three MUST be supported, but the first two kinds of information relate to the entity itself whereas the third kind of information relates to items associated with the entity itself; therefore two different query types are needed.

View file

@ -1,3 +1,8 @@
# Encryption # Encryption
How can we make it possible to have a protocol that is "open" when it is intended to, but also protects privacy and consent when we need it to? How can we make it possible to have a protocol that is "open" when it is intended to, but also protects privacy and consent when we need it to?
# TODO
- https://en.wikipedia.org/wiki/OMEMO

View file

@ -48,6 +48,7 @@ translation
:caption: Drafting :caption: Drafting
:hidden: :hidden:
design
sketchpad sketchpad
``` ```

View file

@ -2,17 +2,17 @@
Overview of the various concepts that p2p systems have to handle or address with links to the sections where we address them! Overview of the various concepts that p2p systems have to handle or address with links to the sections where we address them!
- [Definitions](definitions.html) - Terms used within the protocol spec - [Definitions](definitions) - Terms used within the protocol spec
- [Protocol](protocol.html) - The protocol spec itself, which encompasses the following sections and describes how they relate to one another. - [Protocol](protocol) - The protocol spec itself, which encompasses the following sections and describes how they relate to one another.
- [Identity](identity.html) - How each peer in the swarm is identified (or not) - [Identity](identity) - How each peer in the swarm is identified (or not)
- [Discovery](discovery.html) - How peers are discovered and connected to in the swarm, or, how an identity is dereferenced into some network entity. - [Discovery](discovery) - How peers are discovered and connected to in the swarm, or, how an identity is dereferenced into some network entity.
- [Data Structures](data_structures.html) - What and how data is represented within the protocol - [Data Structures](data_structures) - What and how data is represented within the protocol
- [Querying](querying.html) - How data, or pieces of data are requested from hosting peers - [Querying](querying) - How data, or pieces of data are requested from hosting peers
- [Evolvability](evolvability.html) - How the protocol is intended to accommodate changes, plugins, etc. - [Evolvability](evolvability) - How the protocol is intended to accommodate changes, plugins, etc.
Additionally, p2p-ld considers these additional properties that are not universal to p2p protocols: Additionally, p2p-ld considers these additional properties that are not universal to p2p protocols:
- [Vocabulary](vocabulary.html) - The linked data vocabulary that is used within the protocol - [Vocabulary](vocabulary) - The linked data vocabulary that is used within the protocol
- [Encryption](encryption.html) - How individual messages can be encrypted and decrypted by peers - [Encryption](encryption) - How individual messages can be encrypted and decrypted by peers
- [Federation](federation.html) - How peers can form supra-peer clusters for swarm robustness, social organization, and governance - [Federation](federation) - How peers can form supra-peer clusters for swarm robustness, social organization, and governance
- [Backwards Compatibility](backwards_compatibility.html) - How the protocol integrates with existing protocols and technologies. - [Backwards Compatibility](backwards_compatibility) - How the protocol integrates with existing protocols and technologies.

View file

@ -1,3 +1,25 @@
@article{kunzePersistenceStatementsDescribing2017,
title = {Persistence {{Statements}}: {{Describing Digital Stickiness}}},
shorttitle = {Persistence {{Statements}}},
author = {Kunze, John and Calvert, Scout and DeBarry, Jeremy D. and Hanlon, Matthew and Janée, Greg and Sweat, Sandra},
date = {2017-08-14},
journaltitle = {Data Science Journal},
volume = {16},
number = {0},
pages = {39},
publisher = {{Ubiquity Press}},
issn = {1683-1470},
doi = {10.5334/dsj-2017-039},
url = {http://datascience.codata.org/articles/10.5334/dsj-2017-039/},
urldate = {2022-09-07},
abstract = {In this paper we present a draft vocabulary for making “persistence statements.” These are simple tools for pragmatically addressing the concern that anyone feels upon experiencing a broken web link. Scholars increasingly use scientific and cultural assets in digital form, but choosing which among many objects to cite for the long term can be difficult. There are few well-defined terms to describe the various kinds and qualities of persistence that object repositories and identifier resolvers do or dont provide. Given an objects identifier, one should be able to query a provider to retrieve human- and machine-readable information to help judge the level of service to expect and help gauge whether the identifier is durable enough, as a sort of long-term bet, to include in a citation. The vocabulary should enable providers to articulate persistence policies and set user expectations.},
archive = {https://web.archive.org/web/20220907213818/https://datascience.codata.org/articles/10.5334/dsj-2017-039/},
issue = {0},
langid = {english},
keywords = {archived,citation,commitment,content variance,linking,metadata,persistence,persistent identifiers,referencing,standardization,versioning},
file = {/Users/jonny/Dropbox/papers/zotero/K/KunzeJ/kunze_2017_persistence_statements.pdf}
}
@online{lemmer-webberHeartSpritelyDistributed, @online{lemmer-webberHeartSpritelyDistributed,
title = {The {{Heart}} of {{Spritely}}: {{Distributed Objects}} and {{Capability Security}}}, title = {The {{Heart}} of {{Spritely}}: {{Distributed Objects}} and {{Capability Security}}},
author = {Lemmer-Webber, Christine and Farmer, Randy and Sims, Juliana}, author = {Lemmer-Webber, Christine and Farmer, Randy and Sims, Juliana},
@ -26,6 +48,22 @@
file = {/Users/jonny/Dropbox/papers/zotero/S/SaundersJ/saunders_2022_decentralized_infrastructure_for_(neuro)science.pdf} file = {/Users/jonny/Dropbox/papers/zotero/S/SaundersJ/saunders_2022_decentralized_infrastructure_for_(neuro)science.pdf}
} }
@online{saundersSurveillanceGraphs2023,
title = {Surveillance {{Graphs}}},
author = {Saunders, Jonny L.},
date = {2023-04-02T00:00:00+00:00},
number = {hc:54749},
eprint = {hc:54749},
eprinttype = {hcommons.org},
doi = {10.17613/syv8-cp10},
url = {https://jon-e.net/surveillance-graphs},
urldate = {2023-06-08},
archive = {https://web.archive.org/web/20230608223153/https://jon-e.net/surveillance-graphs/},
langid = {english},
pubstate = {preprint},
keywords = {archived}
}
@article{verborghTriplePatternFragments2016, @article{verborghTriplePatternFragments2016,
title = {Triple {{Pattern Fragments}}: {{A}} Low-Cost Knowledge Graph Interface for the {{Web}}}, title = {Triple {{Pattern Fragments}}: {{A}} Low-Cost Knowledge Graph Interface for the {{Web}}},
shorttitle = {Triple {{Pattern Fragments}}}, shorttitle = {Triple {{Pattern Fragments}}},
@ -44,3 +82,13 @@
keywords = {archived}, keywords = {archived},
file = {/Users/jonny/Dropbox/papers/zotero/V/VerborghR/verborgh_2016_triple_pattern_fragments.pdf} file = {/Users/jonny/Dropbox/papers/zotero/V/VerborghR/verborgh_2016_triple_pattern_fragments.pdf}
} }
@inproceedings{verborghWebScaleQueryingLinked2014,
title = {Web-{{Scale Querying}} through {{Linked Data Fragments}}},
booktitle = {Proceedings of the 7th {{Workshop}} on {{Linked Data}} on the {{Web}}},
author = {Verborgh, Ruben and Coppens, Sam and Sande, Miel Vander and Mannens, Erik and Colpaert, Pieter and Van de Walle, Rik},
date = {2014-04-08},
abstract = {To unlock the full potential of Linked Data sources, we need flexible ways to query them. Public sparql endpoints aim to fulfill that need, but their availability is notoriously problematic. We therefore introduce Linked Data Fragments, a publishing method that allows efficient offloading of query execution from servers to clients through a lightweight partitioning strategy. It enables servers to maintain availability rates as high as any regular http server, allowing querying to scale reliably to much larger numbers of clients. This paper explains the core concepts behind Linked Data Fragments and experimentally verifies their Web-level scalability, at the cost of increased query times. We show how trading server-side query execution for inexpensive data resources with relevant affordances enables a new generation of intelligent clients.},
langid = {english},
file = {/Users/jonny/Zotero/storage/97PS2BJQ/Verborgh et al. - Web-Scale Querying through Linked Data Fragments.pdf}
}

View file

@ -4,6 +4,7 @@ How do we find peers that have subgraphs that are responsive to what we want?
## Syntax ## Syntax
(qlocation)=
### Location ### Location
How to refer to a given [container](data_structures.html#Containers), eg. How to refer to a given [container](data_structures.html#Containers), eg.