mirror of
https://github.com/p2p-ld/docs.git
synced 2024-11-12 17:54:30 +00:00
found lots of good rdf resources\!\!\!
This commit is contained in:
parent
09be1fb121
commit
e1da79c769
9 changed files with 356 additions and 31 deletions
80
poetry.lock
generated
80
poetry.lock
generated
|
@ -241,14 +241,14 @@ six = ">=1.4.1"
|
|||
|
||||
[[package]]
|
||||
name = "linkify-it-py"
|
||||
version = "1.0.3"
|
||||
version = "2.0.2"
|
||||
description = "Links recognition library with FULL unicode support."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "linkify-it-py-1.0.3.tar.gz", hash = "sha256:2b3f168d5ce75e3a425e34b341a6b73e116b5d9ed8dbbbf5dc7456843b7ce2ee"},
|
||||
{file = "linkify_it_py-1.0.3-py3-none-any.whl", hash = "sha256:11e29f00150cddaa8f434153f103c14716e7e097a8fd372d9eb1ed06ed91524d"},
|
||||
{file = "linkify-it-py-2.0.2.tar.gz", hash = "sha256:19f3060727842c254c808e99d465c80c49d2c7306788140987a1a7a29b0d6ad2"},
|
||||
{file = "linkify_it_py-2.0.2-py3-none-any.whl", hash = "sha256:a3a24428f6c96f27370d7fe61d2ac0be09017be5190d68d8658233171f1b6541"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
@ -256,7 +256,7 @@ uc-micro-py = "*"
|
|||
|
||||
[package.extras]
|
||||
benchmark = ["pytest", "pytest-benchmark"]
|
||||
dev = ["black", "flake8", "isort", "pre-commit"]
|
||||
dev = ["black", "flake8", "isort", "pre-commit", "pyproject-flake8"]
|
||||
doc = ["myst-parser", "sphinx", "sphinx-book-theme"]
|
||||
test = ["coverage", "pytest", "pytest-cov"]
|
||||
|
||||
|
@ -278,14 +278,14 @@ tornado = {version = "*", markers = "python_version > \"2.7\""}
|
|||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "2.2.0"
|
||||
version = "3.0.0"
|
||||
description = "Python port of markdown-it. Markdown parsing, done right!"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
|
||||
{file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"},
|
||||
{file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
|
||||
{file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
@ -298,7 +298,7 @@ compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0
|
|||
linkify = ["linkify-it-py (>=1,<3)"]
|
||||
plugins = ["mdit-py-plugins"]
|
||||
profiling = ["gprof2dot"]
|
||||
rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
|
||||
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
|
||||
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
|
||||
|
||||
[[package]]
|
||||
|
@ -363,22 +363,22 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "mdit-py-plugins"
|
||||
version = "0.3.5"
|
||||
version = "0.4.0"
|
||||
description = "Collection of plugins for markdown-it-py"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mdit-py-plugins-0.3.5.tar.gz", hash = "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a"},
|
||||
{file = "mdit_py_plugins-0.3.5-py3-none-any.whl", hash = "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e"},
|
||||
{file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"},
|
||||
{file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
markdown-it-py = ">=1.0.0,<3.0.0"
|
||||
markdown-it-py = ">=1.0.0,<4.0.0"
|
||||
|
||||
[package.extras]
|
||||
code-style = ["pre-commit"]
|
||||
rtd = ["attrs", "myst-parser (>=0.16.1,<0.17.0)", "sphinx-book-theme (>=0.1.0,<0.2.0)"]
|
||||
rtd = ["myst-parser", "sphinx-book-theme"]
|
||||
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
|
||||
|
||||
[[package]]
|
||||
|
@ -395,29 +395,29 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "myst-parser"
|
||||
version = "1.0.0"
|
||||
version = "2.0.0"
|
||||
description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser,"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "myst-parser-1.0.0.tar.gz", hash = "sha256:502845659313099542bd38a2ae62f01360e7dd4b1310f025dd014dfc0439cdae"},
|
||||
{file = "myst_parser-1.0.0-py3-none-any.whl", hash = "sha256:69fb40a586c6fa68995e6521ac0a525793935db7e724ca9bac1d33be51be9a4c"},
|
||||
{file = "myst_parser-2.0.0-py3-none-any.whl", hash = "sha256:7c36344ae39c8e740dad7fdabf5aa6fc4897a813083c6cc9990044eb93656b14"},
|
||||
{file = "myst_parser-2.0.0.tar.gz", hash = "sha256:ea929a67a6a0b1683cdbe19b8d2e724cd7643f8aa3e7bb18dd65beac3483bead"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
docutils = ">=0.15,<0.20"
|
||||
docutils = ">=0.16,<0.21"
|
||||
jinja2 = "*"
|
||||
linkify-it-py = {version = ">=1.0,<2.0", optional = true, markers = "extra == \"linkify\""}
|
||||
markdown-it-py = ">=1.0.0,<3.0.0"
|
||||
mdit-py-plugins = ">=0.3.4,<0.4.0"
|
||||
linkify-it-py = {version = ">=2.0,<3.0", optional = true, markers = "extra == \"linkify\""}
|
||||
markdown-it-py = ">=3.0,<4.0"
|
||||
mdit-py-plugins = ">=0.4,<1.0"
|
||||
pyyaml = "*"
|
||||
sphinx = ">=5,<7"
|
||||
sphinx = ">=6,<8"
|
||||
|
||||
[package.extras]
|
||||
code-style = ["pre-commit (>=3.0,<4.0)"]
|
||||
linkify = ["linkify-it-py (>=1.0,<2.0)"]
|
||||
rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.7.5,<0.8.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"]
|
||||
linkify = ["linkify-it-py (>=2.0,<3.0)"]
|
||||
rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.8.2,<0.9.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"]
|
||||
testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"]
|
||||
testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"]
|
||||
|
||||
|
@ -665,6 +665,30 @@ sphinx = ">=4.0"
|
|||
[package.extras]
|
||||
docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-design"
|
||||
version = "0.4.1"
|
||||
description = "A sphinx extension for designing beautiful, view size responsive web components."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "sphinx_design-0.4.1-py3-none-any.whl", hash = "sha256:23bf5705eb31296d4451f68b0222a698a8a84396ffe8378dfd9319ba7ab8efd9"},
|
||||
{file = "sphinx_design-0.4.1.tar.gz", hash = "sha256:5b6418ba4a2dc3d83592ea0ff61a52a891fe72195a4c3a18b2fa1c7668ce4708"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
sphinx = ">=4,<7"
|
||||
|
||||
[package.extras]
|
||||
code-style = ["pre-commit (>=2.12,<3.0)"]
|
||||
rtd = ["myst-parser (>=0.18.0,<2)"]
|
||||
testing = ["myst-parser (>=0.18.0,<2)", "pytest (>=7.1,<8.0)", "pytest-cov", "pytest-regressions"]
|
||||
theme-furo = ["furo (>=2022.06.04,<2022.07)"]
|
||||
theme-pydata = ["pydata-sphinx-theme (>=0.9.0,<0.10.0)"]
|
||||
theme-rtd = ["sphinx-rtd-theme (>=1.0,<2.0)"]
|
||||
theme-sbt = ["sphinx-book-theme (>=0.3.0,<0.4.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinxcontrib-applehelp"
|
||||
version = "1.0.4"
|
||||
|
@ -847,4 +871,4 @@ zstd = ["zstandard (>=0.18.0)"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.11"
|
||||
content-hash = "b662b687f5c5d02e641fb4a34a05cee4e652232f06c8e734c4429191e559900a"
|
||||
content-hash = "f0d11975921d81f55f9be14a9cfff9382163a49a78f6929b087ce86229797c0d"
|
||||
|
|
|
@ -10,10 +10,11 @@ packages = [{include = "p2p_ld"}]
|
|||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
sphinx = "^6.2.1"
|
||||
myst-parser = { version = "^1.0.0", extras = ["linkify"] }
|
||||
myst-parser = { version = "^2.0.0", extras = ["linkify"] }
|
||||
furo = "^2023.5.20"
|
||||
sphinxcontrib-mermaid = "^0.9.2"
|
||||
sphinxcontrib-bibtex = "^2.5.0"
|
||||
sphinx-design = "^0.4.1"
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
:caption: Linked Data
|
||||
:maxdepth: 1
|
||||
|
||||
rdf
|
||||
solid
|
||||
ld_fragments
|
||||
ld_platform
|
||||
|
|
183
src/comparison/ld/rdf.md
Normal file
183
src/comparison/ld/rdf.md
Normal file
|
@ -0,0 +1,183 @@
|
|||
```{index} RDF
|
||||
```
|
||||
# RDF and Friends
|
||||
|
||||
RDF is one of the elephants in the room when it comes to triplet graphs and linked data. Its history is complex and torrid, known as hopelessly and aggressively complex or a divine calling, depending on your disposition.
|
||||
|
||||
**p2p-ld does not necessarily seek to be an RDF-based p2p protocol,** though strategizing for interoperability with RDF and RDF-derivative formats would be nice.
|
||||
|
||||
One of the primary challenges to using RDF-like formats is the conflation of URLs and URIs as the primary identifiers for schema and objects. This idea (roughly) maps onto the "neat" characterization of linked data where everything should have ideally one canonical representation, and there should be a handful of "correct" general-purpose schema capable of modeling the world.
|
||||
|
||||
We depart from that vision, instead favoring radical vernacularism {cite}`saundersSurveillanceGraphs2023`. URIs are extremely general, and include decentralized identifiers like {index}`multiaddrs <IPFS; Multiaddr>`
|
||||
|
||||
## RDF And Friends
|
||||
|
||||
RDF has a lot of formats and
|
||||
|
||||
```{index} JSON-LD
|
||||
```
|
||||
### JSON-LD
|
||||
|
||||
|
||||
|
||||
|
||||
## Challenges
|
||||
|
||||
### Tabular and Array Data
|
||||
|
||||
```{important}
|
||||
See https://www.cs.ox.ac.uk/isg/challenges/sem-tab/
|
||||
```
|
||||
|
||||
The edges from a node in a graph are unordered, which makes array and tabular data difficult to work with in RDF!
|
||||
|
||||
This has been approached in a few ways:
|
||||
|
||||
**RDF** uses a [godforsaken `rdf:first` `rdf:rest` linked list syntax](https://www.w3.org/TR/rdf12-schema/#ch_collectionvocab)
|
||||
|
||||
eg. one would express `MyList` which contains the `Friends` `["Arnold", "Bob", "Carly"]` in (longhand) turtle as
|
||||
|
||||
```turtle
|
||||
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
|
||||
@prefix : <https://example.com> .
|
||||
|
||||
:MyList :Friends :list1 .
|
||||
|
||||
:list1
|
||||
rdf:first :Amy ;
|
||||
rdf:rest :list2 .
|
||||
|
||||
:list2
|
||||
rdf:first :Bob ;
|
||||
rdf:rest :list3 .
|
||||
|
||||
:list3
|
||||
rdf:first :Carly ;
|
||||
rdf:rest rdf:nil .
|
||||
```
|
||||
|
||||
And thankfully turtle has a shorthand, which isn't so bad:
|
||||
|
||||
```turtle
|
||||
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
|
||||
@prefix : <https://example.com> .
|
||||
|
||||
:MyList
|
||||
:Friends (
|
||||
:Amy
|
||||
:Bob
|
||||
:Carly
|
||||
).
|
||||
```
|
||||
|
||||
Both of these correspond to the triplet graph:
|
||||
|
||||
```{mermaid}
|
||||
flowchart LR
|
||||
MyList
|
||||
list1
|
||||
list2
|
||||
list3
|
||||
nil
|
||||
Amy
|
||||
Bob
|
||||
Carly
|
||||
|
||||
MyList -->|Friends| list1
|
||||
list1 -->|rest| list2
|
||||
list2 -->|rest| list3
|
||||
list3 -->|rest| nil
|
||||
list1 -->|first| Amy
|
||||
list2 -->|first| Bob
|
||||
list3 -->|first| Carly
|
||||
```
|
||||
|
||||
Which is not great.
|
||||
|
||||
**{index}`JSON-LD`** uses a `@list` keyword:
|
||||
|
||||
```jsonld
|
||||
{
|
||||
"@context": {"foaf": "http://xmlns.com/foaf/0.1/"},
|
||||
"@id": "http://example.org/people#joebob",
|
||||
"foaf:nick": {
|
||||
"@list": [ "joe", "bob", "jaybee" ]
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
which can be expanded recursively to [mimic arrays](https://www.w3.org/TR/json-ld11/#example-84-coordinates-expressed-in-json-ld)
|
||||
|
||||
`````{tab-set}
|
||||
````{tab-item} JSON-LD
|
||||
```jsonld
|
||||
{
|
||||
"@context": {
|
||||
"@vocab": "https://purl.org/geojson/vocab#",
|
||||
"coordinates": {"@container": "@list"}
|
||||
},
|
||||
"geometry": {
|
||||
"coordinates": [
|
||||
[
|
||||
[-10.0, -10.0],
|
||||
[10.0, -10.0],
|
||||
[10.0, 10.0],
|
||||
[-10.0, -10.0]
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
````
|
||||
````{tab-item} Turtle
|
||||
```turtle
|
||||
@prefix geojson: <https://purl.org/geojson/vocab#>.
|
||||
|
||||
[
|
||||
a geojson:Feature ;
|
||||
geojson:bbox (-10 -10 10 10) ;
|
||||
geojson:geometry [
|
||||
a geojson:Polygon ;
|
||||
geojson:coordinates (
|
||||
(
|
||||
(-10 -10)
|
||||
(10 -10)
|
||||
(10 10)
|
||||
(-10 -10)
|
||||
)
|
||||
)
|
||||
]
|
||||
] .
|
||||
```
|
||||
````
|
||||
`````
|
||||
|
||||
### Naming
|
||||
|
||||
- All names have to be global. Relative names must resolve to a global name via contexts/prefixes. The alternative is blank nodes, which are treated as equivalent in eg. graph merges. Probably here enters pattern matching or whatever those things are called.
|
||||
- Blank nodes and skolemization https://www.w3.org/TR/rdf11-mt/#skolemization-informative
|
||||
|
||||
|
||||
## References
|
||||
|
||||
- [RDF 1.1 Primer](https://www.w3.org/TR/rdf11-primer/)
|
||||
- W3C Recommendation on generating RDF from tabular data: {cite}`tandyGeneratingRDFTabular2015`
|
||||
- {index}`JSON Schema` in RDF: {cite}`charpenayJSONSchemaRDF2023`
|
||||
- [Turtle](https://www.w3.org/TR/rdf12-turtle/)
|
||||
- [N-ary relations in RDF](https://www.w3.org/TR/swbp-n-aryRelations/)
|
||||
- [RDF 1.1 Semantics](https://www.w3.org/TR/rdf11-mt/)
|
||||
|
||||
### Libraries
|
||||
|
||||
- [jsonld.js](https://github.com/digitalbazaar/jsonld.js)
|
||||
- [rdf-canonize-native](https://github.com/digitalbazaar/rdf-canonize-native)
|
||||
- [biolink-model](https://github.com/biolink/biolink-model) for a nice example of generating multiple schema formats from a .yaml file.
|
||||
- [linkml](https://linkml.io/) - modeling language for linked data {cite}`moxonLinkedDataModeling2021`
|
||||
- Multidimensional arrays in linkml https://linkml.io/linkml/howtos/multidimensional-arrays.html
|
||||
- [oaklib](https://incatools.github.io/ontology-access-kit/index.html) - python package for managing ontologies
|
||||
- [rdflib](https://github.com/RDFLib/rdflib) - maybe the canonical python rdf library
|
||||
|
||||
### See Also
|
||||
|
||||
- [HYDRA vocabulary](https://www.hydra-cg.com/spec/latest/core/) - Linked Data plus REST
|
||||
- [CORAL](https://github.com/jmchandonia/CORAL)
|
|
@ -17,6 +17,14 @@ If IPFS is {index}`BitTorrent` + {index}`git`, and {index}`ActivityPub` is {inde
|
|||
(IPLD)=
|
||||
### IPLD
|
||||
|
||||
```{index} IPFS; Multiformats
|
||||
```
|
||||
### Multiformats
|
||||
|
||||
- https://ipfs.io/ipns/multiformats.io/
|
||||
- {index}`IPFS; Multihash` - https://ipfs.io/ipns/multiformats.io/multihash/
|
||||
- {index}`IPFS; Multicodec` - https://github.com/multiformats/multicodec
|
||||
|
||||
```{index} IPFS; libp2p
|
||||
```
|
||||
(libp2p)=
|
||||
|
|
|
@ -19,7 +19,8 @@ extensions = [
|
|||
'sphinx.ext.autodoc',
|
||||
'sphinxcontrib.mermaid',
|
||||
'sphinxcontrib.bibtex',
|
||||
'myst_parser'
|
||||
'myst_parser',
|
||||
'sphinx_design'
|
||||
]
|
||||
|
||||
templates_path = ['_templates']
|
||||
|
|
|
@ -10,6 +10,13 @@ Triplet graphs similar to linked data fragments with envelopes. decoupling conte
|
|||
(Containers)=
|
||||
## Containers
|
||||
|
||||
```{important}
|
||||
Ya this seems like the right set of ideas to build on
|
||||
|
||||
- https://www.w3.org/TR/json-ld11-framing/
|
||||
- https://w3c.github.io/rdf-canon/spec/#introduction
|
||||
```
|
||||
|
||||
- Packets of LD-triplets that contain
|
||||
- Hash of triplets
|
||||
- Encryption Info (if applicable)
|
||||
|
@ -66,4 +73,14 @@ Describes
|
|||
- A given container has an identity hash from its first packing
|
||||
- A given triple can be contained by
|
||||
|
||||
## Canonicalization
|
||||
|
||||
- https://w3c.github.io/rdf-canon/spec/#introduction
|
||||
- https://json-ld.org/spec/ED/rdf-graph-normalization/20111016/
|
||||
- https://www.w3.org/TR/json-ld11-framing/
|
||||
|
||||
## Compare to:
|
||||
|
||||
- {index}`CORAL` - https://github.com/jmchandonia/CORAL
|
||||
- Good idea, also making use of extended context. Very focused on scientific data - 'units' are a core part of the model. distinction between static and dynamic data types seems like sort of a hack. data bricks are similar to containers. the source is an absolute mess.
|
||||
|
||||
|
|
|
@ -1,3 +1,13 @@
|
|||
@online{charpenayJSONSchemaRDF2023,
|
||||
title = {{{JSON Schema}} in {{RDF}}},
|
||||
author = {Charpenay, Victor and Lefrançois, Maxime and Villalón, María Poveda},
|
||||
date = {2023-06-14},
|
||||
url = {https://www.w3.org/2019/wot/json-schema},
|
||||
urldate = {2023-06-28},
|
||||
archive = {https://web.archive.org/web/20230628004904/https://www.w3.org/2019/wot/json-schema},
|
||||
keywords = {archived}
|
||||
}
|
||||
|
||||
@online{cohenIncentivesBuildRobustness2003,
|
||||
title = {Incentives {{Build Robustness}} in {{BitTorrent}}},
|
||||
author = {Cohen, Bram},
|
||||
|
@ -28,6 +38,14 @@
|
|||
file = {/Users/jonny/Dropbox/papers/zotero/D/DanielE/daniel_2022_ipfs_and_friends2.pdf}
|
||||
}
|
||||
|
||||
@online{DefiningNaryRelations,
|
||||
title = {Defining {{N-ary Relations}} on the {{Semantic Web}}},
|
||||
url = {https://www.w3.org/TR/swbp-n-aryRelations/},
|
||||
urldate = {2023-06-28},
|
||||
archive = {https://web.archive.org/web/20230628025756/https://www.w3.org/TR/swbp-n-aryRelations/},
|
||||
keywords = {archived}
|
||||
}
|
||||
|
||||
@article{kunzePersistenceStatementsDescribing2017,
|
||||
title = {Persistence {{Statements}}: {{Describing Digital Stickiness}}},
|
||||
shorttitle = {Persistence {{Statements}}},
|
||||
|
@ -50,6 +68,14 @@
|
|||
file = {/Users/jonny/Dropbox/papers/zotero/K/KunzeJ/kunze_2017_persistence_statements.pdf}
|
||||
}
|
||||
|
||||
@article{lassilaIdentityCrisisLinked,
|
||||
title = {Identity {{Crisis}} in {{Linked Data}}},
|
||||
author = {Lassila, Ora and McDonough, Ryan and Malaika, Susan},
|
||||
langid = {english},
|
||||
keywords = {linked data},
|
||||
file = {/Users/jonny/Dropbox/papers/zotero/L/LassilaO/lassila_identity_crisis_in_linked_data.pdf}
|
||||
}
|
||||
|
||||
@inproceedings{legoutRarestFirstChoke2006,
|
||||
title = {Rarest First and Choke Algorithms Are Enough},
|
||||
booktitle = {Proceedings of the 6th {{ACM SIGCOMM}} on {{Internet}} Measurement - {{IMC}} '06},
|
||||
|
@ -79,6 +105,44 @@
|
|||
file = {/Users/jonny/Dropbox/papers/zotero/L/Lemmer-WebberC/lemmer-webber_the_heart_of_spritely.pdf;/Users/jonny/Zotero/storage/32A9YVLN/spritely-core.html}
|
||||
}
|
||||
|
||||
@inproceedings{moxonLinkedDataModeling2021,
|
||||
title = {The Linked Data Modeling Language (LinkML): A General-Purpose Data Modeling Framework Grounded in Machine-Readable Semantics},
|
||||
shorttitle = {The Linked Data Modeling Language (LinkML)},
|
||||
booktitle = {CEUR Workshop Proceedings},
|
||||
author = {Moxon, Sierra and Solbrig, Harold and Unni, Deepak and Jiao, Dazhi and Bruskiewich, Richard and Balhoff, James and Vaidya, Gaurav and Duncan, William and Hegde, Harshad and Miller, Mark and Brush, Matthew and Harris, Nomi and Haendel, Melissa and Mungall, Christopher},
|
||||
date = {2021},
|
||||
volume = {3073},
|
||||
pages = {148--151},
|
||||
publisher = {{CEUR-WS}},
|
||||
issn = {1613-0073},
|
||||
url = {https://pure.johnshopkins.edu/en/publications/the-linked-data-modeling-language-linkml-a-general-purpose-data-m},
|
||||
urldate = {2023-06-28},
|
||||
archive = {https://web.archive.org/web/20230628014800/https://pure.johnshopkins.edu/en/publications/the-linked-data-modeling-language-linkml-a-general-purpose-data-m},
|
||||
eventtitle = {2021 International Conference on Biomedical Ontologies, ICBO 2021},
|
||||
langid = {English (US)},
|
||||
keywords = {archived},
|
||||
file = {/Users/jonny/Dropbox/papers/zotero/M/MoxonS/moxon_2021_the_linked_data_modeling_language_(linkml).pdf}
|
||||
}
|
||||
|
||||
@article{novichkovCORALFrameworkRigorous2022,
|
||||
title = {{{CORAL}}: {{A}} Framework for Rigorous Self-Validated Data Modeling and Integrative, Reproducible Data Analysis},
|
||||
shorttitle = {{{CORAL}}},
|
||||
author = {Novichkov, Pavel S and Chandonia, John-Marc and Arkin, Adam P},
|
||||
date = {2022-10-17},
|
||||
journaltitle = {GigaScience},
|
||||
volume = {11},
|
||||
pages = {giac089},
|
||||
issn = {2047-217X},
|
||||
doi = {10.1093/gigascience/giac089},
|
||||
url = {https://academic.oup.com/gigascience/article/doi/10.1093/gigascience/giac089/6762021},
|
||||
urldate = {2023-06-28},
|
||||
abstract = {Background: Many organizations face challenges in managing and analyzing data, especially when relevant datasets arise from multiple sources and methods. Analyzing heterogeneous datasets and additional derived data requires rigorous tracking of their interrelationships and provenance. This task has long been a Grand Challenge of data science and has more recently been formalized in the FAIR principles: that all data objects be Findable, Accessible, Interoperable, and Reusable, both for machines and for people. Adherence to these principles is necessary for proper stewardship of information, for testing regulatory compliance, for measuring the efficiency of processes, and for facilitating reuse of data-analytical frameworks.},
|
||||
archive = {https://web.archive.org/web/20230628040905/https://academic.oup.com/gigascience/article/doi/10.1093/gigascience/giac089/6762021},
|
||||
langid = {english},
|
||||
keywords = {archived},
|
||||
file = {/Users/jonny/Dropbox/papers/zotero/N/NovichkovP/novichkov_2022_coral.pdf}
|
||||
}
|
||||
|
||||
@online{ogdenDatDistributedDataset2017,
|
||||
type = {preprint},
|
||||
title = {Dat - {{Distributed Dataset Synchronization And Versioning}}},
|
||||
|
@ -128,6 +192,27 @@
|
|||
keywords = {archived}
|
||||
}
|
||||
|
||||
@online{SPARQLLanguageOverview,
|
||||
title = {{{SPARQL}} Language Overview},
|
||||
url = {http://www.it.uu.se/research/group/udbl/SciSPARQL/SciSPARQL_intro.pdf},
|
||||
urldate = {2023-06-28},
|
||||
archive = {https://web.archive.org/web/20230628043006/http://www.it.uu.se/research/group/udbl/SciSPARQL/SciSPARQL\_intro.pdf},
|
||||
keywords = {archived},
|
||||
file = {/Users/jonny/Zotero/storage/6AAZ692A/SciSPARQL_intro.pdf}
|
||||
}
|
||||
|
||||
@online{tandyGeneratingRDFTabular2015,
|
||||
title = {Generating {{RDF}} from {{Tabular Data}} on the {{Web}}},
|
||||
author = {Tandy, Jeremy and Herman, Ivan and Kellogg, Gregg},
|
||||
date = {2015-12-17},
|
||||
url = {https://www.w3.org/TR/csv2rdf/},
|
||||
urldate = {2023-06-28},
|
||||
archive = {https://web.archive.org/web/20230628003820/https://www.w3.org/TR/csv2rdf/},
|
||||
langid = {english},
|
||||
organization = {{q3c}},
|
||||
keywords = {archived,linked data,rdf}
|
||||
}
|
||||
|
||||
@article{verborghTriplePatternFragments2016,
|
||||
title = {Triple {{Pattern Fragments}}: {{A}} Low-Cost Knowledge Graph Interface for the {{Web}}},
|
||||
shorttitle = {Triple {{Pattern Fragments}}},
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
# Translation
|
||||
|
||||
A toolkit for writing translations between formats and schemas!
|
||||
A toolkit for writing translations between formats and schemas!
|
||||
|
||||
## See also
|
||||
|
||||
- https://linkml.io/schema-automator/introduction.html#generalization-from-instance-data
|
||||
- https://apps.islab.ntua.gr/d2rml/tr/d2rml/
|
Loading…
Reference in a new issue