Compare commits

..

20 commits

Author SHA1 Message Date
d1b08f1b07 Remove old lints 2024-05-04 19:48:38 +02:00
34e665154d Improve tracing 2024-05-04 16:10:45 +02:00
3d0a05f3a9 Add tracing 2024-05-04 13:50:04 +02:00
8f2ea89301 Partial inbox handler 2024-05-03 23:44:01 +02:00
288c181cc9 make post federation work 2024-05-03 18:35:05 +02:00
f0d7d793ca Use LEGACY fetch mode for better compat 2024-05-03 17:16:59 +02:00
21b47409f1 More macros 2024-05-02 23:03:31 +02:00
edc21b4403 Improve error handling in server::api 2024-05-02 21:23:45 +02:00
564771931f Major refactor
* Reorganize the fetch component
* Organize the server code a little more
* Move verification to the server and clean it up
* Improve the error handling around the fetch code
2024-05-02 19:41:23 +02:00
8d350e8cd9 A whole bunch of different refactors 2024-05-02 19:41:12 +02:00
09cf289b75 [wip] remove space from signature header formatting 2024-04-30 00:09:53 +02:00
fc4e4595c2 [wip] YET MORE signatures cleanup and fixing 2024-04-29 23:36:57 +02:00
9eaad3d7bb [wip] http signatures refactor 2024-04-29 20:17:56 +02:00
9845603846 [wip] signatures refactor 2024-04-29 13:14:25 +02:00
c784966d20 Split fetch::signatures into its own file 2024-04-29 00:09:17 +02:00
37acb67aa5 Major cleanup
* Rename `fetch::keys` to `fetch::signatures`
* Clean up the public api of `fetch::signatures`
* Switch from axum to hyper
* Add request signature validation (buggy, wip)
2024-04-28 23:40:37 +02:00
b91da3c4ab god forsaken http signatures 2024-04-27 22:01:28 +02:00
bb26926edb Serve actors by ID 2024-04-27 09:32:00 +02:00
7ea8938c49 Documented some stuff, improved follow request logic 2024-04-26 23:56:46 +02:00
29f90ad918 Store api overhaul 2024-04-24 23:18:19 +02:00
28 changed files with 3703 additions and 721 deletions

481
Cargo.lock generated
View file

@ -89,78 +89,12 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "async-trait"
version = "0.1.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.60",
]
[[package]]
name = "autocfg"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80"
[[package]]
name = "axum"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf"
dependencies = [
"async-trait",
"axum-core",
"bytes",
"futures-util",
"http",
"http-body",
"http-body-util",
"hyper",
"hyper-util",
"itoa",
"matchit",
"memchr",
"mime",
"percent-encoding",
"pin-project-lite",
"rustversion",
"serde",
"serde_json",
"serde_path_to_error",
"serde_urlencoded",
"sync_wrapper 1.0.1",
"tokio",
"tower",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "axum-core"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3"
dependencies = [
"async-trait",
"bytes",
"futures-util",
"http",
"http-body",
"http-body-util",
"mime",
"pin-project-lite",
"rustversion",
"sync_wrapper 0.1.2",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "backtrace"
version = "0.3.71"
@ -176,18 +110,18 @@ dependencies = [
"rustc-demangle",
]
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51"
[[package]]
name = "base64ct"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
[[package]]
name = "bincode"
version = "2.0.0-rc.3"
@ -239,12 +173,27 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]]
name = "bumpalo"
version = "3.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bytes"
version = "1.6.0"
@ -381,6 +330,12 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "const-oid"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "convert_case"
version = "0.4.0"
@ -403,6 +358,25 @@ version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f"
[[package]]
name = "cpufeatures"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
dependencies = [
"libc",
]
[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
]
[[package]]
name = "csv"
version = "1.3.0"
@ -424,6 +398,17 @@ dependencies = [
"memchr",
]
[[package]]
name = "der"
version = "0.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0"
dependencies = [
"const-oid",
"pem-rfc7468",
"zeroize",
]
[[package]]
name = "derive_more"
version = "0.99.17"
@ -437,6 +422,17 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"const-oid",
"crypto-common",
]
[[package]]
name = "either"
version = "1.11.0"
@ -478,9 +474,18 @@ checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984"
name = "fetch"
version = "0.0.0"
dependencies = [
"base64",
"chrono",
"derive_more",
"http",
"http-body-util",
"pem",
"rand",
"reqwest",
"rsa",
"serde_json",
"sigh",
"spki",
"tracing",
]
[[package]]
@ -552,6 +557,16 @@ dependencies = [
"pin-utils",
]
[[package]]
name = "generic-array"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
]
[[package]]
name = "getrandom"
version = "0.2.14"
@ -804,6 +819,9 @@ name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
dependencies = [
"spin",
]
[[package]]
name = "lazycell"
@ -827,6 +845,12 @@ dependencies = [
"windows-targets 0.52.5",
]
[[package]]
name = "libm"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
[[package]]
name = "librocksdb-sys"
version = "0.17.0+9.0.0"
@ -895,12 +919,6 @@ dependencies = [
"syn 2.0.60",
]
[[package]]
name = "matchit"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
[[package]]
name = "memchr"
version = "2.7.2"
@ -967,6 +985,53 @@ dependencies = [
"minimal-lexical",
]
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
dependencies = [
"overload",
"winapi",
]
[[package]]
name = "num-bigint-dig"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151"
dependencies = [
"byteorder",
"lazy_static",
"libm",
"num-integer",
"num-iter",
"num-traits",
"rand",
"smallvec",
"zeroize",
]
[[package]]
name = "num-integer"
version = "0.1.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
dependencies = [
"num-traits",
]
[[package]]
name = "num-iter"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d869c01cc0c455284163fd0092f1f93835385ccab5a98a0dcc497b2f8bf055a9"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.18"
@ -974,6 +1039,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a"
dependencies = [
"autocfg",
"libm",
]
[[package]]
@ -1045,6 +1111,12 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "parking_lot"
version = "0.12.1"
@ -1068,6 +1140,25 @@ dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "pem"
version = "3.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae"
dependencies = [
"base64",
"serde",
]
[[package]]
name = "pem-rfc7468"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412"
dependencies = [
"base64ct",
]
[[package]]
name = "percent-encoding"
version = "2.3.1"
@ -1106,6 +1197,27 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "pkcs1"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f"
dependencies = [
"der",
"pkcs8",
"spki",
]
[[package]]
name = "pkcs8"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
dependencies = [
"der",
"spki",
]
[[package]]
name = "pkg-config"
version = "0.3.30"
@ -1134,6 +1246,7 @@ dependencies = [
"clap",
"cli-table",
"puppy",
"tokio",
]
[[package]]
@ -1145,7 +1258,9 @@ dependencies = [
"derive_more",
"either",
"fetch",
"serde_json",
"store",
"tracing",
]
[[package]]
@ -1231,7 +1346,7 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e6cc1e89e689536eb5aeede61520e874df5a4707df811cd5da4aa5fbb2aae19"
dependencies = [
"base64 0.22.0",
"base64",
"bytes",
"encoding_rs",
"futures-core",
@ -1255,7 +1370,7 @@ dependencies = [
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper 0.1.2",
"sync_wrapper",
"system-configuration",
"tokio",
"tokio-native-tls",
@ -1276,6 +1391,27 @@ dependencies = [
"librocksdb-sys",
]
[[package]]
name = "rsa"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc"
dependencies = [
"const-oid",
"digest",
"num-bigint-dig",
"num-integer",
"num-traits",
"pkcs1",
"pkcs8",
"rand_core",
"sha2",
"signature",
"spki",
"subtle",
"zeroize",
]
[[package]]
name = "rustc-demangle"
version = "0.1.23"
@ -1316,7 +1452,7 @@ version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d"
dependencies = [
"base64 0.22.0",
"base64",
"rustls-pki-types",
]
@ -1326,12 +1462,6 @@ version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecd36cc4259e3e4514335c4a138c6b43171a8d61d8f5c9348f9fc7529416f247"
[[package]]
name = "rustversion"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47"
[[package]]
name = "ryu"
version = "1.0.17"
@ -1413,16 +1543,6 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_path_to_error"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6"
dependencies = [
"itoa",
"serde",
]
[[package]]
name = "serde_urlencoded"
version = "0.7.1"
@ -1439,9 +1559,37 @@ dependencies = [
name = "server"
version = "0.0.0"
dependencies = [
"axum",
"derive_more",
"http",
"http-body-util",
"hyper",
"hyper-util",
"puppy",
"serde_json",
"tokio",
"tracing",
"tracing-forest",
"tracing-subscriber",
]
[[package]]
name = "sha2"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]]
name = "sharded-slab"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
dependencies = [
"lazy_static",
]
[[package]]
@ -1450,19 +1598,6 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "sigh"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46bdb4cc44c46a3f0f0a6d1de27c63fccd7fa3384d8d370016c21c8f4a8b89a2"
dependencies = [
"base64 0.21.7",
"http",
"nom",
"openssl",
"thiserror",
]
[[package]]
name = "signal-hook-registry"
version = "1.4.1"
@ -1472,6 +1607,16 @@ dependencies = [
"libc",
]
[[package]]
name = "signature"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
dependencies = [
"digest",
"rand_core",
]
[[package]]
name = "slab"
version = "0.4.9"
@ -1497,6 +1642,22 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "spin"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "spki"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
dependencies = [
"base64ct",
"der",
]
[[package]]
name = "store"
version = "0.0.0"
@ -1517,6 +1678,12 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "subtle"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "syn"
version = "1.0.109"
@ -1545,12 +1712,6 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
[[package]]
name = "sync_wrapper"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394"
[[package]]
name = "system-configuration"
version = "0.5.1"
@ -1613,6 +1774,16 @@ dependencies = [
"syn 2.0.60",
]
[[package]]
name = "thread_local"
version = "1.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
dependencies = [
"cfg-if",
"once_cell",
]
[[package]]
name = "tinyvec"
version = "1.6.0"
@ -1718,9 +1889,21 @@ checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
dependencies = [
"log",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.60",
]
[[package]]
name = "tracing-core"
version = "0.1.32"
@ -1728,6 +1911,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
dependencies = [
"once_cell",
"valuable",
]
[[package]]
name = "tracing-forest"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee40835db14ddd1e3ba414292272eddde9dad04d3d4b65509656414d1c42592f"
dependencies = [
"smallvec",
"thiserror",
"tracing",
"tracing-subscriber",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
dependencies = [
"log",
"once_cell",
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
dependencies = [
"nu-ansi-term",
"sharded-slab",
"smallvec",
"thread_local",
"tracing-core",
"tracing-log",
]
[[package]]
@ -1736,6 +1957,12 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "typenum"
version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "ulid"
version = "1.1.2"
@ -1791,12 +2018,24 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "valuable"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
[[package]]
name = "vcpkg"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "virtue"
version = "0.0.13"
@ -2093,6 +2332,12 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "zeroize"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
[[package]]
name = "zstd-sys"
version = "2.0.10+zstd.1.5.6"

View file

@ -6,3 +6,4 @@ edition = "2021"
puppy = { path = "../../lib/puppy" }
clap = { version = "*", features = ["derive"] }
cli-table = "*"
tokio = { version = "*", features = ["full"] }

View file

@ -1,83 +1,115 @@
//! Control program for the ActivityPub federated social media server.
#![feature(iterator_try_collect)]
use puppy::{
model::{schema, Bite, FollowRequest, Follows, Profile, Username},
actor::Actor,
config::Config,
data::{FollowRequest, Object, Profile},
post::Author,
store::{self, Error},
Key, Store,
Context,
};
fn main() -> store::Result<()> {
// Store::nuke(".state")?;
let db = Store::open(".state", schema())?;
println!("creating actors");
let riley = get_or_create_actor(&db, "riley")?;
let linen = get_or_create_actor(&db, "linen")?;
if true {
println!("creating posts");
puppy::post::create_post(&db, riley, "@linen <3")?;
puppy::post::create_post(&db, linen, "@riley <3")?;
}
if true {
println!("making riley follow linen");
if !db.exists::<Follows>(riley, linen)? {
println!("follow relation does not exist yet");
if !db.exists::<FollowRequest>(riley, linen)? {
println!("no pending follow request; creating");
puppy::follows::request(&db, riley, linen)?;
} else {
println!("accepting the pending follow request");
puppy::follows::accept(&db, riley, linen)?;
}
} else {
println!("riley already follows linen");
#[tokio::main]
async fn main() -> puppy::Result<()> {
// puppy::store::Store::nuke(".state")?;
let config = Config {
ap_domain: "test.piss-on.me".to_string(),
wf_domain: "test.piss-on.me".to_string(),
state_dir: ".state".to_string(),
port: 1312,
};
let cx = Context::load(config)?;
let riley = get_or_create_actor(&cx, "riley")?;
cx.run(|tx| {
println!("\nRiley's following:");
for FollowRequest { id, origin, .. } in
riley.pending_requests(&tx).try_collect::<Vec<_>>()?
{
let Profile { account_name, .. } = tx.get_mixin(origin)?.unwrap();
let Object { id, .. } = tx.get_mixin(id)?.unwrap();
println!("- @{account_name} ({origin}) (request url = {id})");
}
}
Ok(())
})?;
// let post = puppy::post::create_post(&cx, riley.key, "i like boys")?;
// puppy::post::federate_post(&cx, post).await
// let linen = get_or_create_actor(&cx, "linen")?;
// if true {
// println!("creating posts");
// puppy::post::create_post(&cx, riley.key, "@linen <3")?;
// puppy::post::create_post(&cx, linen.key, "@riley <3")?;
// }
// if true {
// println!("making riley follow linen");
// cx.run(|tx| {
// if !riley.follows(&tx, &linen)? {
// println!("follow relation does not exist yet");
// if let Some(req) = linen
// .pending_requests(&tx)
// .find_ok(|r| r.origin == riley.key)?
// {
// println!("accepting the pending follow request");
// linen.do_accept_request(&cx, req)
// } else {
// println!("no pending follow request; creating");
// riley.do_follow_request(&cx, &linen).map(|_| ())
// }
// } else {
// println!("riley already follows linen");
// Ok(())
// }
// })?;
// }
println!("\nPosts on the instance:");
for post in puppy::post::fetch_timeline(&db, .., None)?.posts() {
for post in puppy::post::fetch_timeline(cx.store(), .., None)?.posts() {
let Author { ref handle, .. } = post.author;
let content = post.content.content.as_ref().unwrap();
println!("- {} by {handle}:\n{content}", post.id)
println!("- {:?} by {handle}:\n{content}", post.id)
}
Ok(())
println!("\nLinen's followers:");
for id in puppy::follows::followers_of(&db, linen)? {
let Profile { account_name, .. } = db.get_mixin(id)?.unwrap();
println!("- @{account_name} ({id})");
}
// cx.run(|tx| {
// println!("\nLinen's followers:");
// for id in linen.followers(&tx).try_collect::<Vec<_>>()? {
// let Profile { account_name, .. } = db.get_mixin(id)?.unwrap();
// println!("- @{account_name} ({id})");
// }
println!("\nRiley's following:");
for id in puppy::follows::following_of(&db, riley)? {
let Profile { account_name, .. } = db.get_mixin(id)?.unwrap();
println!("- @{account_name} ({id})");
}
// println!("\nRiley's following:");
// for id in riley.following(&tx).try_collect::<Vec<_>>()? {
// let Profile { account_name, .. } = db.get_mixin(id)?.unwrap();
// println!("- @{account_name} ({id})");
// }
if false {
println!("Biting riley");
puppy::bites::bite_actor(&db, linen, riley).unwrap();
for Bite { id, biter, .. } in puppy::bites::bites_on(&db, riley).unwrap() {
let Profile { account_name, .. } = db.get_mixin(biter)?.unwrap();
println!("riley was bitten by @{account_name} at {}", id.timestamp());
}
}
store::OK
// if false {
// println!("Biting riley");
// linen.do_bite(&cx, &riley)?;
// for Bite { id, biter, .. } in riley.bites_suffered(&tx).try_collect::<Vec<_>>()? {
// let Profile { account_name, .. } = db.get_mixin(biter)?.unwrap();
// println!("riley was bitten by @{account_name} at {}", id.timestamp());
// }
// }
// Ok(())
// })
}
fn get_or_create_actor(db: &Store, username: &str) -> Result<Key, Error> {
let user = db.lookup(Username(username.to_string()));
fn get_or_create_actor(cx: &Context, username: &str) -> puppy::Result<Actor> {
let user = cx.run(|tx| Actor::by_username(tx, username))?;
match user {
Ok(Some(key)) => {
println!("found '{username}' ({key})");
Some(key) => {
println!("found '{username}' ({key:?})");
Ok(key)
}
Ok(None) => {
None => {
println!("'{username}' doesn't exist yet, creating");
let r = puppy::create_actor(&db, username);
let r = puppy::actor::create_local(cx, username);
if let Ok(ref key) = r {
println!("created '{username}' with key {key}");
println!("created '{username}' with key {key:?}");
}
r
}
Err(e) => Err(e),
}
}

View file

@ -4,5 +4,13 @@ edition = "2021"
[dependencies]
puppy = { path = "../../lib/puppy" }
hyper = { version = "*", features = ["full"] }
tokio = { version = "*", features = ["full"] }
axum = "*"
http-body-util = "*"
hyper-util = { version = "*", features = ["full"] }
serde_json = "*"
http = "*"
derive_more = "*"
tracing = "*"
tracing-subscriber = "*"
tracing-forest = "*"

310
bin/server/src/api.rs Normal file
View file

@ -0,0 +1,310 @@
//! API endpoints and request handlers.
use std::convert::Infallible;
use std::net::SocketAddr;
use std::sync::Arc;
use http_body_util::{BodyExt as _, Full};
use hyper::body::Bytes;
use hyper::server::conn::http1;
use hyper::service::service_fn;
use hyper_util::rt::TokioIo;
use hyper::Method;
use puppy::Context;
use serde_json::{json, Value};
use tokio::net::TcpListener;
use tracing::{error, info, info_span, trace_span, Instrument as _};
use crate::sig::{Signer, Verdict, Verifier, VERIFIER_MOUNT};
use self::error::Message;
// A simple macro for returning an error message.
macro_rules! fuck {
($code:literal: $($arg:tt)*) => {
return Err(crate::api::error::Message {
status: $code,
error: format!($($arg)*),
detail: None,
})
};
}
// Makes a response.
macro_rules! respond {
($($arg:tt)*) => {
crate::api::Resp {
$($arg)*,
.. crate::api::Resp::default()
}.into()
};
}
/// Parameters for a response
struct Resp<'s> {
body: Option<&'s Value>,
kind: &'s str,
code: u16,
}
impl<'s> Default for Resp<'s> {
fn default() -> Self {
Resp {
body: None,
kind: "application/json",
code: 200,
}
}
}
impl<'a> From<Resp<'a>> for Response {
fn from(Resp { body, kind, code }: Resp<'_>) -> Response {
let resp = Response::<()>::builder()
.status(code)
.header("content-type", kind);
resp.body(match body {
Some(data) => Full::new(serde_json::to_vec(&data).unwrap().into()),
None => Full::new(Bytes::default()),
})
.unwrap()
}
}
pub mod ap;
pub mod wf;
type Request = hyper::Request<hyper::body::Incoming>;
type Response<T = Full<Bytes>> = hyper::Response<T>;
/// Initialize the http server loop.
pub async fn start(context: Context) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let addr = SocketAddr::from(([127, 0, 0, 1], context.config().port));
let listener = TcpListener::bind(addr).await?;
let verifier = Arc::new(Verifier::load(context.config()));
loop {
let (stream, _) = listener.accept().await?;
let io = TokioIo::new(stream);
let cx = context.clone();
let verifier = verifier.clone();
tokio::spawn(async move {
let service = service_fn(|req| {
let user_agent = req
.headers()
.get("user-agent")
.and_then(|h| h.to_str().ok());
let span = info_span!(
"request",
target = format!("{} {}", req.method().as_str(), req.uri().to_string()),
"user-agent" = user_agent,
);
handle(req, &verifier, cx.clone()).instrument(span)
});
if let Err(err) = http1::Builder::new().serve_connection(io, service).await {
error!("Error serving connection: {:?}", err);
}
});
}
}
// A parsed HTTP request for easy handling.
struct Req<'a> {
method: &'a Method,
body: Bytes,
// The content-types in the accept header
accept: Vec<&'a str>,
// URI bits
params: Vec<(&'a str, &'a str)>,
path: Vec<&'a str>,
}
impl Req<'_> {
/// Get the path segments (non-empty parts of the path string separated by the '/' character).
fn path(&self) -> &[&str] {
&self.path
}
/// Turn an HTTP request into a more simple form so we can process it more easily.
fn simplify<'x>(r: &'x http::Request<Bytes>) -> Req<'x> {
let path: Vec<&str> = r
.uri()
.path()
.split('/')
.filter(|s| !s.is_empty())
.collect();
let params: Vec<(&str, &str)> = r
.uri()
.query()
.into_iter()
.flat_map(|s| s.split('&'))
.filter_map(|s| s.split_once('='))
.collect();
let accept = r
.headers()
.iter()
.find_map(|(k, v)| (k == "accept").then_some(v))
.and_then(|val| val.to_str().ok())
.iter()
.flat_map(|s| s.split(' '))
.filter(|s| !s.is_empty())
.collect();
Req {
method: r.method(),
body: r.body().clone(),
accept,
params,
path,
}
}
}
/// The request handler.
async fn handle(req: Request, verifier: &Verifier, cx: Context) -> Result<Response, Infallible> {
// We need to fetch the entire body of the request for signature validation, because that involves making
// a digest of the request body in some cases.
// TODO: defer loading the body until it is needed.
let request = {
let (req, body) = req.into_parts();
let Ok(body) = body.collect().await.map(|b| b.to_bytes()) else {
panic!("could not collect body!");
};
http::Request::from_parts(req, body)
};
// Simplified representation of a request, so we can pattern match on it more easily in the dispatchers.
let req = Req::simplify(&request);
// We'll use the path to pick where specifically to send the request.
// Check request signature at the door. Even if it isn't needed for a particular endpoint, failing fast
// with a clear error message will save anyone trying to get *their* signatures implementation a major
// headache.
let res = match verifier.verify(&request).await {
// If the request was signed and the signature was accepted, they can access the protected endpoints.
Verdict::Verified(sig) => dispatch_signed(cx, &verifier, &req, sig).await,
// Unsigned requests can see a smaller subset of endpoints, most notably the verification actor.
Verdict::Unsigned => dispatch_public(cx, &verifier, &req).await,
// If a signature was provided *but it turned out to be unverifiable*, show them the error message.
Verdict::Rejected { reason, signature_str } => Err(Message {
error: String::from("signature verification failed for request"),
status: 403,
detail: Some(json!({
"signature": signature_str,
"reason": reason,
})),
}),
};
// If one of the endpoints gave us an error message, we convert that into a response and then
// serve it to the client. In either case, we just serve a response.
let response = res.unwrap_or_else(|msg| {
info!("{}: {msg}", msg.status);
req.error(msg)
});
Ok(response)
}
const POST: &Method = &Method::POST;
const GET: &Method = &Method::GET;
/// Handle a signed and verified request.
///
/// This function is where all requests to a protected endpoint have to go through. If the request
/// was signed but does not target a protected endpoint, this function will fall back to the
/// [`dispatch_public`] handler.
#[tracing::instrument(level = "DEBUG", target = "router", skip_all)]
async fn dispatch_signed(
cx: Context,
verifier: &Verifier,
req: &Req<'_>,
sig: Signer,
) -> Result<Response, Message> {
match (req.method, req.path()) {
// Viewing ActivityPub objects requires a signed request, i.e. "authorized fetch".
// The one exception for this is `/s/request-verifier`, which is where the request
// verification actor lives.
(GET, ["o", ulid]) => ap::serve_object(&cx, ulid),
// POSTs to an actor's inbox need to be signed to prevent impersonation.
(POST, ["o", ulid, "inbox"]) => ap::inbox(&cx, ulid, sig, &req.body).await,
// Try the resources for which no signature is required as well.
_ => dispatch_public(cx, verifier, req).await,
}
}
/// Dispatch `req` to an unprotected endpoint. If the requested path does not exist, the
/// function will return a 404 response. If the path *does* exist, but the signature is not
/// valid, they will also get a 404.
#[tracing::instrument(level = "DEBUG", target = "router", skip_all)]
async fn dispatch_public(
cx: Context,
verifier: &Verifier,
req: &Req<'_>,
) -> Result<Response, Message> {
match (req.method, req.path()) {
(GET, ["proxy"]) => ap::proxy(&cx, &req.params).await,
(GET, ["outbox"]) => ap::outbox(&cx, &req.params).await,
(GET, [".well-known", "webfinger"]) => wf::resolve(&cx, &req.params),
// TODO: nicer solution for this
(GET, VERIFIER_MOUNT) => Ok(ap::serve_verifier_actor(&verifier)),
_ => fuck!(404: "not found"),
}
}
mod error {
//! Pre-baked error responses.
use serde_json::{json, Value};
use super::Response;
/// An error message shown to an end user of the API.
#[derive(Debug)]
pub struct Message {
/// The main error message.
pub error: String,
/// Only shown if the `accept` header included json.
pub detail: Option<Value>,
/// The status code for the response.
pub status: u16,
}
impl std::fmt::Display for Message {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.error.fmt(f)
}
}
impl super::Req<'_> {
/// Generate an error response for the request.
pub fn error(&self, err: Message) -> Response {
let resp = Response::<()>::builder().status(err.status);
// If the accept header wants json, we will give them a nice structured error
// message. Otherwise, we throw a short bit of text at them.
if self.accepts_json() {
let json = json!({
"error": err.error,
"details": err.detail,
});
let body = serde_json::to_vec_pretty(&json).unwrap();
resp.header("content-type", "application/json")
.body(body.try_into().unwrap())
.unwrap()
} else {
resp.header("content-type", "text/plain")
.body(err.error.try_into().unwrap())
.unwrap()
}
}
/// Check whether the requester wants json from us.
pub fn accepts_json(&self) -> bool {
fn is_json((k, v): (&str, &str)) -> bool {
k == "application" && v.split('+').any(|p| p == "json")
}
self.accept
.iter()
.filter_map(|s| s.split_once('/'))
.any(is_json)
}
}
}

119
bin/server/src/api/ap.rs Normal file
View file

@ -0,0 +1,119 @@
//! ActivityPub handlers.
use http_body_util::Full;
use hyper::body::Bytes;
use puppy::{
actor::{get_signing_key, Actor},
fetch::object::Activity,
get_local_ap_object, Context, Error, Key,
};
use serde_json::Value;
use tracing::{info, instrument};
use crate::sig::{Signer, Verifier};
use super::{error::Message, Response};
/// Proxy a request through the instance.
pub async fn proxy(cx: &Context, params: &[(&str, &str)]) -> Result<Response, Message> {
// Extract our query parameters.
let Some(user) = params.iter().find_map(|(k, v)| (*k == "user").then_some(v)) else {
fuck!(400: "expected `user` query param");
};
let Some(url) = params.iter().find_map(|(k, v)| (*k == "url").then_some(v)) else {
fuck!(400: "expected `url` query param");
};
// Look up the actor's key in the store (which is accessible through the puppy context).
let Ok(signing_key) = cx.run(|tx| {
let actor = Actor::by_username(&tx, user)?.unwrap();
get_signing_key(tx, actor).map_err(Error::from)
}) else {
fuck!(500: "failed to get signing key");
};
eprintln!("proxy: params: {params:?}");
// Proxy the request through our fetcher.
let resp = puppy::fetch::forward(&signing_key, url).await.unwrap();
eprintln!("proxy: status = {}", resp.status());
// Convert the http-types request to a hyper request.
Ok(resp.map(Bytes::from).map(Full::new).into())
}
pub async fn outbox(cx: &Context, params: &[(&str, &str)]) -> Result<Response, Message> {
// Extract our query parameters.
let Some(user) = params.iter().find_map(|(k, v)| (*k == "user").then_some(v)) else {
fuck!(400: "expected `user` query param");
};
let Some(content) = params
.iter()
.find_map(|(k, v)| (*k == "content").then_some(v))
else {
fuck!(400: "expected `url` query param");
};
let Ok(Some(actor)) = cx.run(|tx| Actor::by_username(&tx, user)) else {
fuck!(500: "failed actor by name {user}");
};
let post = puppy::post::create_local_post(&cx, actor.key, content.to_string()).unwrap();
puppy::post::federate_post(&cx, post).await.unwrap();
Ok(respond! {
code: 200
})
}
/// Handle POSTs to actor inboxes. Requires request signature.
#[instrument(skip_all)]
pub async fn inbox(
cx: &Context,
actor_id: &str,
sig: Signer,
body: &[u8],
) -> Result<Response, Message> {
let receiver = actor_id.parse::<Key>().unwrap();
let json: Value = serde_json::from_slice(body).unwrap();
let id = json["id"].to_string();
info! {
inbox = receiver.to_string(),
signature = sig.ap_id,
"processing object '{id}'",
};
match Activity::from_json(json) {
Ok(activity) => {
puppy::ingest(&cx, receiver, &activity).await.unwrap();
match puppy::interpret(&cx, activity) {
Ok(_) => Ok(respond!(code: 202)),
Err(err) => fuck!(400: "error interpreting activity: {err}"),
}
}
Err(err) => fuck!(400: "invalid payload: {err}"),
}
}
/// Serve an ActivityPub object as json-ld.
pub fn serve_object(cx: &Context, object_ulid: &str) -> Result<Response, Message> {
let Ok(parsed) = object_ulid.parse::<Key>() else {
fuck!(400: "improperly formatted ulid");
};
let result = cx.run(|tx| get_local_ap_object(&tx, parsed));
let Ok(object) = result else {
fuck!(404: "object does not exist");
};
Ok(respond! {
kind: "application/activity+json",
body: Some(&object.to_json_ld())
})
}
/// Serve the special actor used for signing requests.
pub fn serve_verifier_actor(verifier: &Verifier) -> Response {
respond! {
kind: "application/activity+json",
body: Some(&verifier.to_json_ld())
}
}

72
bin/server/src/api/wf.rs Normal file
View file

@ -0,0 +1,72 @@
//! WebFinger endpoints and related stuff.
use puppy::{
data::{Id, Username},
Context,
};
use serde_json::{json, Value};
use derive_more::Display;
use super::{error::Message, Response};
const WF_CONTENT_TYPE: (&str, &str) = ("content-type", "application/jrd+json");
/// Respond to a webfinger request.
pub fn resolve(cx: &Context, params: &[(&str, &str)]) -> Result<Response, Message> {
match params.iter().find_map(get_handle) {
Some(handle) if cx.config().wf_domain == handle.instance => {
let username = Username(handle.username.to_string());
let Ok(Some(user)) = cx.store().lookup(username) else {
fuck!(404: "no user {}@{} exists", handle.username, handle.instance);
};
let Ok(Some(Id(id))) = cx.store().get_alias(user) else {
fuck!(500: "internal error");
};
let jrd = make_jrd(handle, &id);
Ok(respond! {
body: Some(&jrd),
kind: "application/jrd+json"
})
}
Some(_) | None => fuck!(400: "missing/invalid resource param"),
}
}
#[derive(Clone, Copy, Display)]
#[display(fmt = "@{username}@{instance}")]
pub struct Handle<'x> {
username: &'x str,
instance: &'x str,
}
/// Parse the `resource` parameter into a [`Handle`].
fn get_handle<'x>((k, v): &'x (&str, &str)) -> Option<Handle<'x>> {
// We're looking for the `resource` query parameter.
if *k == "resource" {
// This prefix needs to exist according to spec.
let (username, instance) = v
.strip_prefix("acct:")?
// Some implementations may prefix with `@`. its ok if it's there and its also ok
// if its not there, so we use `trim_start_matches` instead of `strip_prefix`.
.trim_start_matches('@')
// Split on the middle `@` symbol, which separates the username and instance bits
.split_once('@')?;
Some(Handle { username, instance })
} else {
None
}
}
/// Construct a "JSON resource descriptor".
fn make_jrd(handle: Handle<'_>, id: &str) -> Value {
json!({
"subject": format!("acct:{}@{}", handle.username, handle.instance),
"links": [
{
"rel": "self",
"type": "application/activity+json",
"href": id
},
]
})
}

View file

@ -1,8 +1,39 @@
use axum::{routing::get, Router};
//! The ActivityPuppy social media server.
//!
//! This crate contains the implementation of the ActivityPuppy's server binary. Also see the library,
//! [`puppy`], and the other two major components: [`store`] for persistence and [`fetch`] for the
//! federation implementation.
//!
//! [`store`]: puppy::store
//! [`fetch`]: puppy::fetch
#![feature(try_blocks, yeet_expr)]
use puppy::{config::Config, Context};
use tracing::Level;
use tracing_forest::ForestLayer;
use tracing_subscriber::{
filter::filter_fn, layer::SubscriberExt as _, util::SubscriberInitExt as _, Registry,
};
mod sig;
mod api;
/// Starts up the whole shebang.
#[tokio::main]
async fn main() {
let app = Router::new().route("/", get(|| async { "Hello, World!" }));
let sock = tokio::net::TcpListener::bind("0.0.0.0:1312").await.unwrap();
axum::serve(sock, app).await.unwrap();
Registry::default()
.with(filter_fn(|meta| !meta.target().starts_with("reqwest")))
.with(filter_fn(|meta| *meta.level() < Level::DEBUG))
.with(ForestLayer::default())
.init();
// TODO: load the config from a file or something.
let config = Config {
ap_domain: "test.piss-on.me".to_string(),
wf_domain: "test.piss-on.me".to_string(),
state_dir: ".state".to_string(),
port: 1312,
};
let context = Context::load(config).unwrap();
// Start the web server
api::start(context).await.unwrap();
}

164
bin/server/src/sig.rs Normal file
View file

@ -0,0 +1,164 @@
//! Verification of HTTP signatures.
use http::Request;
use puppy::config::Config;
use puppy::fetch::{
signatures::{Private, Public, Signature, SigningKey, VerificationKey, Key},
FetchError,
};
use serde_json::{json, Value};
use tracing::{debug, info, trace};
/// Checks request signatures.
#[derive(Clone)]
pub struct Verifier {
actor_id: String,
key_id: String,
private: Private,
public: Public,
}
const VERIFIER_PATH: &str = "/s/request-verifier";
/// The path at which the request verification actor will present itself.
pub const VERIFIER_MOUNT: &[&str] = &["s", "request-verifier"];
/// A "verdict" about a signed request, passed by a [`Verifier`].
#[derive(Debug)]
pub enum Verdict {
/// The signature checks out.
Verified(Signer),
/// The signature does not contain a signature header. This may be intentional, or a client error.
Unsigned,
/// The signature failed to verify due to an error related to the signature itself.
Rejected {
signature_str: String,
reason: String,
},
}
impl Verifier {
/// Get the JSON-LD representation of the verifier actor.
pub fn to_json_ld(&self) -> Value {
json!({
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
],
"id": self.actor_id,
"name": "Public key fetcher",
"publicKey": {
"id": self.key_id,
"owner": self.actor_id,
"publicKeyPem": self.public.encode_pem()
},
"type": "Service",
})
}
/// Load the server's verifier actor.
///
/// Each server has one special actor for fetching public keys. Unlike all other objects,
/// acquiring that actor's JSON-LD representation does not require a request signature.
///
/// It doesn't have any data in the data store. Due to its exceptional nature, we just put
/// the private key in the [`state_dir`][Config::state_dir]. The very first time you load
/// the verifier, it generates the required private keys.
pub fn load(cfg: &Config) -> Verifier {
let Config { ap_domain, state_dir, .. } = cfg;
let key_path = format!("{state_dir}/fetcher.pem");
// Read the private key from the state directory, or generate a new one if it couldn't
// be read.
let private = Private::load(&key_path).unwrap_or_else(|| {
let (private, _) = Private::gen();
private.save(key_path);
private
});
Verifier {
actor_id: format!("https://{ap_domain}{VERIFIER_PATH}"),
key_id: format!("https://{ap_domain}{VERIFIER_PATH}#sig-key"),
public: private.get_public(),
private,
}
}
/// Does the HTTP signature verification process, and returns a "proof" of the signature in the form
/// of the [`Signer`], which contains information about who signed a particular request.
#[tracing::instrument(level = "DEBUG", skip_all)]
pub async fn verify<B>(&self, req: &Request<B>) -> Verdict {
// TODO: implement the whole verification thing as a middleware so we can intercept requests
// like these, instead of coupling this tightly with the router.
if req.uri().path() == VERIFIER_PATH {
// HACK: Allow access to the request verifier actor without checking the signature.
debug!("allowing request to verifier to pass without checking signature");
return Verdict::Unsigned;
}
let Some(header) = req.headers().get("signature") else {
debug!("request not signed");
return Verdict::Unsigned;
};
let signature_str = header
.to_str()
.expect("signature header value should be valid ascii")
.to_string();
let sig = match Signature::derive(&req).map_err(|e| e.to_string()) {
Err(reason) => {
info!(reason, signature_str, "invalid signature");
return Verdict::Rejected { signature_str, reason };
}
Ok(signature) => {
trace!("signature parsed");
signature
}
};
// Fetch the signer's public key using our private key.
let fetch_result = self.fetch_public_key(sig.key_id()).await;
let public_key = match fetch_result {
Ok(public_key) => public_key,
Err(err) => {
info!(reason = err.to_string(), "failed to fetch pubkey");
return Verdict::Rejected {
reason: format!("could not fetch public key: {err}"),
signature_str,
};
}
};
// TODO: verify digest also
if let Err(error) = public_key.verify(&sig) {
info!(reason = error, "rejected");
Verdict::Rejected { signature_str, reason: error }
} else {
debug!(key_owner = public_key.owner, "accepted");
Verdict::Verified(Signer { ap_id: public_key.owner })
}
}
/// Send a request to get the public key from an ID. This request will be signed with the
/// verifier actor's public key.
#[tracing::instrument(level = "TRACE", skip_all)]
async fn fetch_public_key(&self, uri: &str) -> Result<VerificationKey, FetchError> {
let json = puppy::fetch::resolve(&self.signing_key(), uri).await?;
let Some(key) = Key::from_json(json) else {
return Err(FetchError::BadJson(
"invalid public key structure".to_string(),
));
};
Ok(key.upgrade())
}
/// Get the key that the verification actor signs requests with.
fn signing_key(&self) -> SigningKey {
Key {
id: self.key_id.clone(),
owner: self.actor_id.clone(),
inner: self.private.clone(),
}
}
}
/// An ActivityPub actor that signed a request.
#[derive(Debug)]
pub struct Signer {
/// The ActivityPub ID (a URL) of the signer of the request.
pub ap_id: String,
}

View file

@ -6,6 +6,15 @@ edition = "2021"
path = "src/lib.rs"
[dependencies]
reqwest = "*"
sigh = "*"
serde_json = "*"
reqwest = { version = "*", features = ["json"] }
serde_json = "*"
derive_more = "*"
http = "*"
chrono = "*"
base64 = "*"
rsa = { version = "*", features = ["sha2"] }
spki = "*"
http-body-util = "*"
rand = "*"
pem = "*"
tracing = "*"

207
lib/fetch/src/client.rs Normal file
View file

@ -0,0 +1,207 @@
use chrono::Utc;
use http::Method;
use http_body_util::BodyExt as _;
use reqwest::Body;
use serde_json::{error, Value};
use derive_more::Display;
use tracing::{debug, error, info, instrument, warn};
use crate::{
object::Activity,
signatures::{SigningKey, Options},
FetchError,
};
/// The name of the server software, used for generating the user agent string.
///
/// See also [`VERSION`].
pub const SOFTWARE: &str = "ActivityPuppy";
/// The current version of the server software, which is incorporated into the user agent string
/// for all outbound requests made by ActivityPuppy.
pub const VERSION: &str = "0.0.1-dev";
/// Content-type/accept header for ActivityPub requests.
pub const ACTIVITYPUB_TYPE: &str = "application/activity+json";
/// A client for sending ActivityPub and WebFinger requests with.
#[derive(Clone)]
pub struct Client {
inner: reqwest::Client,
}
impl Client {
/// Constructs a new federation client.
pub fn new() -> Client {
Client { inner: reqwest::Client::new() }
}
/// Deliver an [`Activity`] to a particular `inbox`.
///
/// Note that in order for the request to be considered valid by most implementations, `key.owner`
/// must equal `payload.actor`.
#[instrument(target = "fetch.delivery", skip_all, fields(activity = payload.id, url = inbox, key = key.id))]
pub async fn deliver(&self, key: &SigningKey, payload: &Activity, inbox: &str) {
let system = Subsystem::Delivery;
let body = serde_json::to_string(&payload.to_json_ld()).unwrap();
let mut req = system
.new_request(inbox)
.unwrap()
.method(Method::POST)
.header("content-type", ACTIVITYPUB_TYPE)
.body(body)
.unwrap();
key.sign_with_digest(Options::LEGACY, &mut req)
.map_err(FetchError::Sig)
.expect("signature generation to work")
.commit(&mut req);
let request = req.map(Body::from).try_into().unwrap();
self.inner.execute(request).await.unwrap();
}
/// A high-level function to resolve a single ActivityPub ID using a signed request.
#[instrument(target = "fetch.resolver", skip_all, fields(url = url, key = key.id))]
pub async fn resolve(&self, key: &SigningKey, url: &str) -> Result<Value, FetchError> {
let system = Subsystem::Resolver;
let mut req = system
.new_request(url)?
.header("accept", ACTIVITYPUB_TYPE)
.body(())
.unwrap();
match key.sign(Options::LEGACY, &req) {
Ok(signature) => signature.commit(&mut req),
Err(error) => {
// This shouldn't happen during normal operation
warn!("failed to sign request: {error}");
return Err(FetchError::Sig(error));
}
}
let request = req.map(|()| Body::default()).try_into()?;
let response = self.inner.execute(request).await?;
if response.status().is_success() {
debug!("resolution successful");
response.json().await.map_err(From::from)
} else {
let status = response.status().as_u16();
let body = response.text().await?;
debug!(status, "resolution failed: {body}");
Err(FetchError::NotSuccess {
url: url.to_string(),
status,
body,
})
}
}
/// Forwards a request and returns the raw response, so that it can be analyzed for debugging.
///
/// It exists solely as a debugging tool!
#[instrument(target = "fetch.devproxy", skip_all, fields(url, key = key.id))]
pub async fn proxy(
&self,
key: &SigningKey,
url: &str,
) -> Result<http::Response<String>, FetchError> {
let system = Subsystem::DevProxy;
let mut req = system
.new_request(url)?
.header("accept", ACTIVITYPUB_TYPE)
.body(())
.unwrap();
key.sign(Options::LEGACY, &req)
.expect("signing error")
.commit(&mut req);
let resp = self
.inner
.execute(req.map(|_| Body::default()).try_into().unwrap())
.await?;
let http_resp: http::Response<reqwest::Body> = resp.into();
let (res, body) = http_resp.into_parts();
let body = body.collect().await.unwrap().to_bytes();
let http_resp =
http::Response::from_parts(res, String::from_utf8_lossy(body.as_ref()).into_owned());
Ok(http_resp)
}
}
/// Identifies a specific subsystem that makes an outgoing request.
///
/// This allows us to precisely track each outgoing request, as well as generate a meaningful
/// user-agent header. It is also used to generate a "base request".
#[derive(Clone, Copy, Display)]
enum Subsystem {
/// The subsystem that dereferences ActivityPub URLs to JSON values.
///
/// In addition, the resolver is used for resolving webfinger handles to ActivityPub actors.
#[display(fmt = "resolver")]
Resolver,
/// The subsystem responsible for delivering activities to inboxes.
#[display(fmt = "delivery")]
Delivery,
/// For testing the resolver and signatures.
#[display(fmt = "devproxy")]
DevProxy,
}
impl Subsystem {
/// Get the user agent string for the subsystem.
fn user_agent(&self) -> String {
format!("{SOFTWARE}/{VERSION} [{}]", self.as_str())
}
/// Get a str representation of this subsystem.
#[inline]
const fn as_str(self) -> &'static str {
match self {
Subsystem::Resolver => "resolver",
Subsystem::Delivery => "delivery",
Subsystem::DevProxy => "devproxy",
}
}
/// Construct a new request for this subsystem.
///
/// This will set the following headers, which are common to all requests made by the fetch
/// system:
///
/// - `user-agent`, which depends on the particular subsystem in use
/// - `date`, which is generated from the current time
/// - `host`, which is derived from `target`
///
/// This function returns an error if the `target` is not a valid URI. It panics if the URI
/// does not have a host specified.
fn new_request(self, target: &str) -> Result<http::request::Builder, FetchError> {
// Format our time like "Sun, 06 Nov 1994 08:49:37 GMT"
const RFC_822: &str = "%a, %d %b %Y %H:%M:%S GMT";
let date = Utc::now().format(RFC_822).to_string();
let uri = target
.parse::<http::Uri>()
.map_err(|e| FetchError::InvalidURI {
url: target.to_string(),
error: e.to_string(),
})?;
let Some(host) = uri.host() else {
// SECURITY: Refuse to resolve URLs to local resources using local keys.
error!(target: "security", "refusing to resolve a relative URL: {target}");
return Err(FetchError::InvalidURI {
url: target.to_string(),
error: "Relative URI".to_string(),
});
};
let req = http::Request::builder()
.uri(target)
.header("user-agent", self.user_agent())
.header("date", date)
.header("host", host);
Ok(req)
}
}

View file

@ -0,0 +1,88 @@
#![feature(iter_intersperse, yeet_expr, iterator_try_collect, try_blocks)]
use std::error::Error;
use derive_more::Display;
use serde_json::Value;
use object::Activity;
use signatures::SigningKey;
pub use http;
pub mod signatures;
pub mod object;
pub use client::Client;
mod client;
/// Deliver an activity to an inbox.
pub async fn deliver(key: &SigningKey, activity: &Activity, inbox: &str) {
Client::new().deliver(key, &activity, inbox).await
}
/// Resolve an ActivityPub ID to a JSON value.
///
/// Note: This creates a new [`Client`] every time you call it, so if you're gonna call it more than just
/// a couple of times, create a `Client` and call its inherent methods instead.
pub async fn resolve(key: &SigningKey, target: &str) -> Result<Value, FetchError> {
Client::new().resolve(key, target).await
}
/// Proxy a GET request through this server.
///
/// Should only be used for manually testing stuff.
pub async fn forward(key: &SigningKey, target: &str) -> Result<http::Response<String>, FetchError> {
Client::new().proxy(key, target).await
}
/// Errors that may occur during the execution of HTTP request routines.
#[derive(Debug, Display)]
pub enum FetchError {
/// Some error internal to the request sending process occurred.
#[display(fmt = "internal error: {error} (url={url:?})")]
Internal { url: Option<String>, error: String },
/// The URI was not valid and therefore the request could not be made.
#[display(fmt = "invalid uri: {error} (url={url})")]
InvalidURI { url: String, error: String },
/// A non-success status code was encountered.
#[display(fmt = "non-2xx status code: {status} (url={url})")]
NotSuccess {
status: u16,
url: String,
body: String,
},
/// The JSON body of a response could not be loaded. The string inside is the error
/// message produced by the JSON deserializer.
#[display(fmt = "deserialization error: {}", self.0)]
BadJson(String),
/// A JSON-LD document could not be deserialized because it does not conform to our expectations.
#[display(fmt = "parsing error: {}", self.0)]
BadObject(String),
/// An error that occurred while generating a signature for a a request.
#[display(fmt = "signing error: {}", self.0)]
Sig(String),
}
impl FetchError {
/// Check whether the error is due to a 403 UNAUTHORIZED response status code.
pub fn is_unauthorized(&self) -> bool {
matches!(self, FetchError::NotSuccess { status: 403, .. })
}
/// Check whether the error is due to a 404 NOT FOUND response status code.
pub fn is_not_found(&self) -> bool {
matches!(self, FetchError::NotSuccess { status: 404, .. })
}
}
#[doc(hidden)]
impl From<reqwest::Error> for FetchError {
fn from(error: reqwest::Error) -> FetchError {
match error.source().and_then(|e| e.downcast_ref()) {
Some(e @ serde_json::Error { .. }) => FetchError::BadJson(e.to_string()),
None => {
let url = error.url().map(|u| u.to_string());
FetchError::Internal { url, error: error.to_string() }
}
}
}
}

222
lib/fetch/src/object.rs Normal file
View file

@ -0,0 +1,222 @@
//! ActivityPub vocabulary as interpreted by ActivityPuppy.
use serde_json::{json, Value};
use derive_more::From;
pub use crate::signatures::Key as PublicKey;
#[derive(Debug)]
pub struct Activity<T = String> {
pub id: String,
pub actor: String,
pub object: Box<Object>,
pub kind: T,
}
impl<K> Activity<K> {
pub fn to_json_ld(&self) -> Value
where
K: ToString,
{
json!({
"@context": [
"https://www.w3.org/ns/activitystreams",
{ "Bite": "https://ns.mia.jetzt/as#Bite" },
],
"id": self.id.to_string(),
"actor": self.actor.to_string(),
"object": self.object.to_json_ld(),
"type": self.kind.to_string(),
})
}
}
impl Activity {
pub fn from_json(mut json: Value) -> Result<Activity, String> {
let Some(map) = json.as_object() else {
do yeet "expected an object"
};
let Some(id) = map.get("id").and_then(|s| s.as_str()).map(str::to_owned) else {
do yeet "missing `id` property"
};
let Some(actor) = map.get("actor").and_then(|s| s.as_str()).map(str::to_owned) else {
do yeet format!("missing `actor` property for activity {id}")
};
let Some(kind) = map.get("type").and_then(|s| s.as_str()).map(str::to_owned) else {
do yeet format!("missing `type` property for activity {id}")
};
// TODO: make this behave gracefully when we only get an ID.
let Some(object) = json
.get_mut("object")
.map(Value::take)
.map(Object::from_json)
.transpose()?
.map(Box::new)
else {
do yeet format!("missing or invalid `object` property for activity {id}")
};
Ok(Activity { id, actor, object, kind })
}
}
/// An actor is an entity capable of producing Takes.
#[derive(Debug)]
pub struct Actor {
/// The URL pointing to this object.
pub id: String,
/// Where others should send activities.
pub inbox: String,
/// Note: this maps to the `preferredUsername` property.
pub account_name: String,
/// Note: this maps to the `name` property.
pub display_name: Option<String>,
/// Public counterpart to the signing key used to sign activities
/// generated by the actor.
pub public_key: PublicKey,
}
impl Actor {
pub fn to_json_ld(&self) -> Value {
json!({
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
],
"id": self.id.to_string(),
"inbox": self.inbox.to_string(),
"outbox": self.inbox.to_string().replace("inbox", "outbox"),
"preferredUsername": self.account_name,
"name": self.display_name,
"type": "Person",
"publicKey": {
"id": self.public_key.id,
"publicKeyPem": self.public_key.inner,
"owner": self.id.to_string(),
}
})
}
pub fn from_json(json: Value) -> Result<Actor, String> {
let Value::Object(map) = json else {
do yeet format!("expected json object")
};
Ok(Actor {
id: map
.get("id")
.ok_or("id is required")?
.as_str()
.ok_or("id must be a str")?
.to_string(),
inbox: map
.get("inbox")
.ok_or("inbox is required")?
.as_str()
.ok_or("inbox must be a str")?
.to_string(),
account_name: map
.get("preferredUsername")
.ok_or("preferredUsername is required")?
.as_str()
.ok_or("preferredUsername must be a str")?
.to_string(),
display_name: map.get("name").and_then(|v| v.as_str()).map(str::to_owned),
public_key: map
.get("publicKey")
.cloned()
.and_then(PublicKey::from_json)
.ok_or("publicKey property could not be parsed")?,
})
}
}
#[derive(From, Debug)]
pub enum Object {
#[from(ignore)]
Id {
id: String,
},
Activity(Activity),
Actor(Actor),
Note(Note),
}
impl Object {
pub fn id(&self) -> &str {
match self {
Object::Activity(a) => &a.id,
Object::Actor(a) => &a.id,
Object::Note(n) => &n.id,
Object::Id { id } => id,
}
}
pub fn from_json(json: Value) -> Result<Object, String> {
if let Value::String(id) = json {
Ok(Object::Id { id })
} else if let Value::Object(ref map) = json {
match map.get("type").and_then(Value::as_str) {
Some("System" | "Application" | "Person" | "Service") => {
Actor::from_json(json).map(Object::Actor)
}
Some("Create" | "Follow" | "Accept" | "Reject" | "Bite") => {
Activity::from_json(json).map(Object::Activity)
}
Some(kind) => Ok(Object::Note(Note {
id: map
.get("id")
.ok_or("id is required")?
.as_str()
.ok_or("id must be a str")?
.to_string(),
kind: kind.to_string(),
author: map
.get("attributedTo")
.ok_or("attributedTo is required")?
.as_str()
.ok_or("attributedTo must be a str")?
.to_string(),
content: map
.get("content")
.and_then(|v| v.as_str())
.map(str::to_owned),
summary: map
.get("summary")
.and_then(|v| v.as_str())
.map(str::to_owned),
})),
None => do yeet "could not determine type of object",
}
} else {
Err(format!("expected a json object or an id, got {json:#?}"))
}
}
pub fn to_json_ld(&self) -> Value {
match self {
Object::Id { id } => json!(id),
Object::Activity(a) => a.to_json_ld(),
Object::Actor(a) => a.to_json_ld(),
Object::Note(Note {
id,
kind,
content,
summary,
author,
}) => json!({
"to": [
"https://www.w3.org/ns/activitystreams#Public",
],
"id": id.to_string(),
"type": kind,
"attributedTo": author,
"content": content,
"summary": summary,
}),
}
}
}
#[derive(Debug)]
pub struct Note {
pub id: String,
pub author: String,
pub content: Option<String>,
pub summary: Option<String>,
pub kind: String,
}

735
lib/fetch/src/signatures.rs Normal file
View file

@ -0,0 +1,735 @@
//! Containment zone for the funny math that doesn't make much sense to puppy.
//!
//! This module provides ActivityPuppy's HTTP signatures implementation. The state of HTTP signatures implementations
//! is, to put it mildly, *een fucking kutzooi*. For historical reasons, no one implements it *exactly* right (much
//! like URI parsers). This implementation aims to be as broadly compatible as possible.
//!
//! The only non-deprecated [`Algorithm`] is [`"hs2019"`][HS2019], but not everyone implements it, because the initial
//! round of implementations of the spec were based on a draft, and [`"rsa-sha256"`][RSA_SHA256] is kinda the de facto
//! standard.
//!
//! # Behavior
//!
//! By default, puppy will sign with `algorithm="hs2019"` (using `(created)` and `(expires)` pseudo-headers), and retry
//! in legacy mode (using `algorithm="rsa-sha256"` with `date` header) if the signature gets rejected.
//!
//! Currently, `"hs2019"` is treated as equivalent to `"rsa-sha256"` for verification purposes. Support for elliptic
//! curve keys is planned, but not a priority.
//!
//! # Links
//!
//! More information about http signatures:
//!
//! - <https://swicg.github.io/activitypub-http-signature>
//! - <https://datatracker.ietf.org/doc/html/draft-cavage-http-signatures>
use std::path::Path;
use chrono::{DateTime, TimeDelta, Utc};
use http::{HeaderValue, Request};
use rsa::{
pkcs8::{
DecodePrivateKey, DecodePublicKey, EncodePrivateKey as _, EncodePublicKey as _, LineEnding,
},
sha2::Sha256,
signature::{SignatureEncoding as _, Signer as _, Verifier as _},
RsaPrivateKey,
};
use serde_json::{Map, Value};
use self::new::{decode, encode, sha256, IR};
/// Size of the RSA private keys puppy generates.
const KEY_SIZE: usize = 2048;
/// A key that can be used to verify a request signature.
pub type VerificationKey = Key<Public>;
/// A key that can be used to sign a request.
pub type SigningKey = Key<Private>;
/// A key used for authorized fetch.
///
/// It comes in several flavors:
///
/// - `Key` (`K` = [`String`]): PEM-encoded, can be turned into a JSON object.
/// - [`VerificationKey`] (`K` = [`Public`]): used as an input in the request signature validation process.
/// - [`SigningKey`] (`K` = [`Private`]): used as an input in the generation of a signed request.
#[derive(Debug)]
pub struct Key<K = String> {
/// The `"id"` property of the public key, which should equal the `keyId` part of a signature.
pub id: String,
/// The `"owner"` property.
pub owner: String,
/// Maps to the `"publicKeyPem"` property of an actor's `"publicKey"` when (de)serializing, and when the
/// key is used for doing signatures.
pub inner: K,
}
impl Key {
/// Tries to find the PEM-encoded public key from the result of fetching a key id.
pub fn from_json(json: Value) -> Option<Key> {
// First, we try the object itself.
json.as_object().and_then(Key::from_map).or_else(|| {
// Because of how mastodon deals with pubkey resolution, most implementations will serve the whole actor
// object instead of just the key, so we try that first, because it is the de facto standard.
json.get("publicKey")?.as_object().and_then(Key::from_map)
})
}
/// Try to interpret the given map as a public key.
fn from_map(map: &Map<String, Value>) -> Option<Key> {
Some(Key {
id: map.get("id")?.as_str().map(str::to_owned)?,
owner: map.get("owner")?.as_str().map(str::to_owned)?,
inner: map.get("publicKeyPem")?.as_str().map(str::to_owned)?,
})
}
/// "Upgrade" a pem-encoded public key to a key that can actually be used for requests.
///
/// The inverse of this is [`Key::serialize`], which turns `inner` back into a string.
///
/// [`Key::serialize`]: Key::<Public>::serialize
pub fn upgrade(self) -> Key<Public> {
let inner = Public::decode_pem(&self.inner);
Key {
id: self.id,
owner: self.owner,
inner,
}
}
}
/// A key that can be used to generate signatures.
#[derive(Clone)]
pub struct Private(rsa::RsaPrivateKey);
impl Private {
/// Generate a new keypair.
pub fn gen() -> (Private, Public) {
let mut rng = rand::thread_rng();
let private = RsaPrivateKey::new(&mut rng, KEY_SIZE).unwrap();
let public = private.to_public_key();
(Private(private), Public(public))
}
/// Get the public counterpart to this key.
pub fn get_public(&self) -> Public {
Public(self.0.to_public_key())
}
/// Load a private key from a file on disk.
pub fn load(path: impl AsRef<Path>) -> Option<Private> {
use rsa::pkcs8::DecodePrivateKey;
let path = path.as_ref();
DecodePrivateKey::read_pkcs8_pem_file(path)
.map(Private)
.ok()
}
/// Store the private key at `path`.
pub fn save(&self, path: impl AsRef<Path>) {
use rsa::pkcs8::EncodePrivateKey;
self.0
.write_pkcs8_pem_file(path, LineEnding::default())
.expect("writing a private key to a file should not fail")
}
/// PEM-encode the key PKCS#8 style.
pub fn encode_pem(&self) -> String {
self.0
.to_pkcs8_pem(LineEnding::default())
.unwrap()
.to_string()
}
/// Decode the key from a PKCS#8 PEM-encoded string.
pub fn decode_pem(pkcs8_pem: &str) -> Private {
DecodePrivateKey::from_pkcs8_pem(&pkcs8_pem)
.map(Private)
.unwrap()
}
}
/// A key that can be used to verify signatures.
#[derive(Clone)]
pub struct Public(rsa::RsaPublicKey);
impl Public {
/// PEM-encode the public key in accordance with PKCS#8.
pub fn encode_pem(&self) -> String {
self.0
.to_public_key_pem(LineEnding::default())
.unwrap()
.to_string()
}
/// Decode a PKCS#8 PEM-encoded public key from a string.
pub fn decode_pem(pkcs8_pem: &str) -> Public {
let doc = pem::parse(pkcs8_pem).unwrap();
<rsa::RsaPublicKey as DecodePublicKey>::from_public_key_der(doc.contents())
.map(Public)
.unwrap()
}
}
impl std::fmt::Debug for Public {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.encode_pem().fmt(f)
}
}
impl SigningKey {
/// Create a signature for `req` using the given options.
pub fn sign<T>(&self, opt: Options, req: &Request<T>) -> Result<Signature, String> {
IR::partial(&req, opt, |ir| ir.signed(self)?.to_signature())
}
/// Create a signature for `req` using the given algorithm, and calculate and attach the `digest` header to
/// the request (if it doesn't already have one).
///
/// This is required by most implementations when POSTing to an inbox.
pub fn sign_with_digest<T>(
&self,
opt: Options,
req: &mut Request<T>,
) -> Result<Signature, String>
where
T: AsRef<[u8]>,
{
// Calculate and insert digest if it isn't there yet, otherwise do nothing.
let digest = format!("sha-256={}", encode(sha256(req.body())));
req.headers_mut()
.entry("digest")
.or_insert_with(|| digest.try_into().unwrap());
self.sign(opt, req)
}
}
impl VerificationKey {
/// Test the signature against three requirements:
///
/// 1. The signature must not be expired.
/// 2. The signature's `keyId` must be the same as `self`'s.
/// 3. The `signed_str` must have been signed by the private counterpart of `self`.
pub fn verify(&self, sig: &Signature) -> Result<(), String> {
if sig.is_expired_at(Utc::now()) {
do yeet format!("Signature is expired: (deadline was {})", sig.expires());
}
if sig.key_id() != &self.id {
do yeet format! {
"Mismatched key id; signature's key id is '{}', while presented key has '{}'",
sig.key_id(),
self.id
};
}
if !sig.was_signed_by(&self.inner)? {
do yeet "Signature was not generated by the presented key's private counterpart";
}
Ok(())
}
/// Encode a verification key so that it can be presented in a json.
pub fn serialize(self) -> Key {
let public_key_pem = self.inner.encode_pem();
Key {
id: self.id,
owner: self.owner,
inner: public_key_pem,
}
}
}
/// The algorithm to sign with.
///
/// Your two options are:
///
/// - [`hs2019`][HS2019], the *correct* option
/// - [`"rsa-sha256"`][RSA_SHA256], the most compatible option
#[derive(PartialEq, Debug, Clone, Copy)]
pub struct Algorithm(&'static str);
/// `hs2019`, the only non-deprecated HTTP signatures algorithm.
pub const HS2019: Algorithm = Algorithm("hs2019");
/// The HTTP signatures algorithm everyone uses, `rsa-sha256`.
pub const RSA_SHA256: Algorithm = Algorithm("rsa-sha256");
/// A signature derived from an [`http::Request`].
#[derive(Debug)]
pub struct Signature {
key_id: String,
components: Vec<(String, String)>,
time_range: (DateTime<Utc>, DateTime<Utc>),
target_str: String,
signed_str: String,
signature: String,
algorithm: Algorithm,
}
impl Signature {
/// Attempt to extract a signature from a request.
pub fn derive<T>(req: &Request<T>) -> Result<Signature, String> {
new::with_ir(req, Options::MODERN, |ir| ir.to_signature())
}
/// Obtain the key id for the signature.
pub fn key_id(&self) -> &str {
&self.key_id
}
/// Get the time the signature was created. This information is extracted from the `(created)`
/// pseudo-header if it is defined, and the `date` http header otherwise.
pub fn created(&self) -> DateTime<Utc> {
self.time_range.0
}
/// If specified, get the `(expires)` header, otherwise get the creation time + the configured grace window.
pub fn expires(&self) -> DateTime<Utc> {
self.time_range.1
}
/// Retrieve the algorithm used for the signature.
pub fn algorithm(&self) -> Algorithm {
self.algorithm
}
/// Attach `self` to `req` as the `signature` header.
pub fn commit<T>(self, req: &mut Request<T>) {
req.headers_mut().insert("signature", self.make_header());
}
/// Determine whether `self` was signed by the private counterpart of `key`.
pub fn was_signed_by(&self, key: &Public) -> Result<bool, String> {
use rsa::pkcs1v15::{VerifyingKey, Signature};
let Public(inner) = key.clone();
let key = VerifyingKey::<Sha256>::new(inner);
let raw_buf = decode(&self.signature)?;
let Ok(sig) = Signature::try_from(raw_buf.as_slice()) else {
do yeet "Failed to construct signature from decoded signature";
};
key.verify(self.signed_str.as_bytes(), &sig)
.map_err(|s| format!("{s:?}"))?;
Ok(true)
}
/// Check whether the given `time` falls within the window for valid signatures.
pub fn is_expired_at(&self, time: DateTime<Utc>) -> bool {
!(self.created()..self.expires()).contains(&time)
}
/// Turn the signature into an HTTP header value.
fn make_header(self) -> HeaderValue {
IR::<&str>::from_signature(&self)
.to_header()
.try_into()
.unwrap()
}
}
/// `rsa-sha256` is created using an rsa key and a sha256 hash.
fn sign_rsa_sha256(signing_string: &str, key: &Private) -> Result<Vec<u8>, String> {
use rsa::pkcs1v15::SigningKey;
let Private(rsa) = key.clone();
let key = SigningKey::<Sha256>::new(rsa);
let buf = key.sign(signing_string.as_bytes()).to_vec();
Ok(buf)
}
/// Maximum time difference between the creation time of the signature and the current time before the
/// signature will be rejected. This is a measure to increase the difficulty of a replay attack.
const EXPIRY_WINDOW: TimeDelta = TimeDelta::minutes(5);
/// Configuration for the behavior of the signing and verification routines.
///
/// This struct is non-exhaustive.
#[derive(Clone, Copy)]
pub struct Options {
/// Whether to use the `(created)` and `(expires)`. If `false`, the `date` header is used instead.
///
/// Defaults to `true`.
pub use_created: bool,
/// Quirk for older mastodon versions, which don't incorporate the query string into the signing
/// string during verification.
///
/// Defaults to `false`.
pub strip_query: bool,
/// For how long the signature is valid.
///
/// For signing, this only has an effect if `use_created` is also set. For verification, this is only
/// used if the `(expires)` pseudo-header is not present.
///
/// Defaults to 5 minutes.
pub expires_after: TimeDelta,
/// Which signature algorithm to use.
///
/// Defaults to [`"hs2019"`][super::HS2019].
pub algorithm: Algorithm,
}
impl Options {
/// Use hs2019 with the `(created)` pseudo-header.
pub const MODERN: Options = Options {
use_created: true,
strip_query: false,
expires_after: EXPIRY_WINDOW,
algorithm: HS2019,
};
/// Use rsa-sha256 with the `date` header.
pub const LEGACY: Options = Options {
use_created: false,
algorithm: RSA_SHA256,
..Options::MODERN
};
}
impl Default for Options {
fn default() -> Self {
Options {
use_created: true,
strip_query: false,
expires_after: EXPIRY_WINDOW,
algorithm: HS2019,
}
}
}
mod new {
use base64::prelude::*;
use chrono::{DateTime, Utc};
use http::{Method, Request};
use rsa::sha2::{Digest, Sha256};
use super::{sign_rsa_sha256, Options, Signature, SigningKey, EXPIRY_WINDOW, HS2019, RSA_SHA256};
/// Calculate the SHA256 hash of something.
pub fn sha256(buf: impl AsRef<[u8]>) -> Vec<u8> {
<Sha256 as Digest>::digest(buf.as_ref()).to_vec()
}
/// Base64-encode something.
pub fn encode(buf: impl AsRef<[u8]>) -> String {
BASE64_STANDARD.encode(buf.as_ref())
}
/// Base64-decode something.
pub fn decode(buf: &str) -> Result<Vec<u8>, String> {
BASE64_STANDARD.decode(buf).map_err(|e| e.to_string())
}
pub struct IR<'s, S = &'s str> {
target: &'s str,
inputs: Vec<(&'s str, &'s str)>,
alg: &'s str,
key: S,
sig: S,
}
/// Allocates a new [`IR`] for doing signature operations with.
pub fn with_ir<T, U>(
req: &Request<T>,
opt: Options,
f: impl FnOnce(IR<'_>) -> Result<U, String>,
) -> Result<U, String> {
let target = &format_target(&req, opt);
let map = make_header_map(&req);
let Some(header) = get(&map, "signature") else {
do yeet "Missing required `signature` header";
};
let (inputs, key, alg, sig) = parse_header(header, &target, &map)?;
f(IR { target, inputs, key, alg, sig })
}
impl<S> IR<'_, S> {
/// Create an HTTP header from the IR.
pub fn to_header(&self) -> String
where
S: AsRef<str>,
{
format_header(&self.inputs, self.key.as_ref(), self.alg, self.sig.as_ref())
}
/// Validate and upgrade the IR to a structured signature.
pub fn to_signature(&self) -> Result<Signature, String>
where
S: ToString,
{
let times: Result<_, String> = try {
let date = get(&self.inputs, "date").map(from_rfc822).transpose()?;
let created = get(&self.inputs, "(created)").map(from_secs).transpose()?;
let expires = get(&self.inputs, "(expires)").map(from_secs).transpose()?;
(date, created, expires)
};
let (date, created, expires) =
times.map_err(|e: String| format!("Failed to parse time: {e}"))?;
let (created, expires) = match (created, expires) {
(Some(created), None) => (created, created + EXPIRY_WINDOW),
(Some(created), Some(expires)) => (created, expires),
(None, _) => {
let Some(date) = date else {
do yeet "Cannot determine validity window";
};
(date, date + EXPIRY_WINDOW)
}
};
let algorithm = match self.alg {
"rsa-sha256" => RSA_SHA256,
"hs2019" => HS2019,
a => do yeet format!("Unsupported algorithm {a}"),
};
let signed_str = make_signing_string(&self.inputs);
let components = self
.inputs
.iter()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect();
Ok(Signature {
key_id: self.key.to_string(),
signature: self.sig.to_string(),
time_range: (created, expires),
target_str: self.target.to_string(),
components,
signed_str,
algorithm,
})
}
/// Create an IR from a signature.
pub fn from_signature<'s>(sig: &'s Signature) -> IR<'s> {
IR {
target: &sig.target_str,
inputs: sig
.components
.iter()
.map(|(a, b)| (a.as_str(), b.as_str()))
.collect(),
key: &sig.key_id,
alg: sig.algorithm.0,
sig: &sig.signature,
}
}
/// Create a signing string.
pub fn to_signing_str(&self) -> String {
make_signing_string(&self.inputs)
}
}
impl<'s> IR<'s, ()> {
/// Create a partial, unsigned IR.
pub fn partial<'r, T, U>(
req: &'r Request<T>,
opt: Options,
f: impl FnOnce(IR<'_, ()>) -> Result<U, String>,
) -> Result<U, String> {
let map = make_header_map(req);
let digest = req.method() == Method::POST;
let expires_after = opt.use_created.then_some(opt.expires_after);
let created = Utc::now();
let expires = created + expires_after.unwrap_or(EXPIRY_WINDOW);
let target = &format_target(&req, opt);
let created = created.timestamp().to_string();
let expires = expires.timestamp().to_string();
// Association list mapping pseudo headers names to concrete values.
#[rustfmt::skip]
let pseudo = &[
("(request-target)", target.as_str()),
("(created)", created.as_str()),
("(expired)", expires.as_str()),
];
let inputs = match compute_inputs(&map, pseudo, opt, digest) {
Err(error) => do yeet format!("computing inputs: {error}"),
Ok(inputs) => inputs,
};
f(IR {
target,
inputs,
alg: opt.algorithm.0,
key: (),
sig: (),
})
}
/// Sign a partially constructed IR to make it actually useful.
pub fn signed(self, key: &'s SigningKey) -> Result<IR<'s, String>, String> {
let sig_str = self.to_signing_str();
let signature = match sign_rsa_sha256(&sig_str, &key.inner).map(encode) {
Err(error) => do yeet format!("RSA error: {error}"),
Ok(signature) => signature,
};
Ok(IR {
target: self.target,
inputs: self.inputs,
alg: self.alg,
key: key.id.to_string(),
sig: signature,
})
}
}
/// With the given options and headers, compute a set of headers and pseudo-headers that (in order) are to be
/// turned into the signing string.
fn compute_inputs<'a>(
headers: &[(&'a str, &'a str)],
pseudo: &[(&'a str, &'a str)],
opt: Options,
use_digest: bool,
) -> Result<Vec<(&'a str, &'a str)>, String> {
// List of input names that we want. Pseudo-headers are ordered before normal headers.
let needed = ["(request-target)"]
.into_iter()
.chain(if opt.use_created {
vec!["(created)", "(expired)"]
} else {
vec!["date"]
})
.chain(use_digest.then_some("digest"))
.chain(["host"]);
let assoc = |k| {
get(headers, k)
.or_else(|| get(&pseudo, k))
.ok_or_else(|| format!("Missing (pseudo)header `{k}`"))
.map(|v| (k, v))
};
needed.map(assoc).try_collect()
}
/// Allocate a `(request-target)` buffer.
fn format_target<T>(req: &Request<T>, opt: Options) -> String {
let path = if opt.strip_query {
req.uri().path()
} else {
req.uri()
.path_and_query()
.map(|r| r.as_str())
.unwrap_or_else(|| req.uri().path())
};
let method = req.method().as_str().to_ascii_lowercase();
format!("{method} {path}")
}
fn format_header(inputs: &[(&str, &str)], key: &str, alg: &str, sig: &str) -> String {
// Format all the headers in the order that we used them in the signing string.
let headers: String = inputs
.iter()
.map(|(k, _)| k.as_ref())
.intersperse(" ")
.collect();
// Get the time-based parameters, if they exist.
let created = get(inputs, "(created)").map(|v| ("created", v));
let expires = get(inputs, "(expires)").map(|v| ("expires", v));
// These parameters are always produced.
#[rustfmt::skip]
let table = [
("keyId", key),
("algorithm", alg),
("signature", sig),
("headers", &headers),
];
// Now we need to format the whole shebang.
table
.into_iter()
// `(created)` is part of a newer draft that not everyone implements.
.chain(created)
// `(expires)` is optional per the spec
.chain(expires)
// Step 1: all the values need to be surrounded by quotes
.map(|(k, v)| (k, format!(r#""{v}""#)))
// Step 2. join each pair together
.map(|(k, v)| format!("{k}={v}"))
// Step 3. comma separate everything
.intersperse(",".to_string())
// Step 4. fold the entire thing into one
.collect::<String>()
}
fn parse_header<'s>(
header: &'s str,
target: &'s str,
extra: &[(&'s str, &'s str)],
) -> Result<(Vec<(&'s str, &'s str)>, &'s str, &'s str, &'s str), String> {
// Parse the top-level table.
let table: Vec<(&str, &str)> = header
// Split into entries
.split(",")
// Split entries into key-value pairs
.filter_map(|pair| {
pair.trim_end_matches(' ') // QUIRK: akkoma does not put a space between entries
.split_once('=')
})
// Undo quoting of the values
.filter_map(|(k, v)| v.strip_prefix('"')?.strip_suffix('"').map(|v| (k, v)))
.collect();
let Some(headers) = get(&table, "headers") else {
do yeet "Missing `headers` field";
};
let Some(key) = get(&table, "keyId") else {
do yeet "Missing `keyId` field";
};
let Some(algorithm) = get(&table, "algorithm") else {
do yeet "Missing `algorithm` field";
};
let Some(signature) = get(&table, "signature") else {
do yeet "Missing `signature` field"
};
let inputs: Vec<(&str, &str)> = headers
// Headers and pseudo-headers are separated by spaces in the order in which they appear.
.split(' ')
// Map created and expires pseudo-headers to the ones specified in the inputs table.
.map(|k| match k {
"(request-target)" => Ok(("(request-target)", target)),
// If these exist, the table must have them, but other than that they're optional.
"(created)" => get(&table, "created")
.ok_or("`(created)` pseudo-header is listed, but does not exist".to_string())
.map(|v| ("(created)", v)),
"(expires)" => get(&table, "expires")
.ok_or("`(expires)` pseudo-header is listed, but does not exist".to_string())
.map(|v| ("(expires)", v)),
// For anything else, we don't have the required information, and we'll need access
// to the entire request in order to fill in the blanks.
k => get(&extra, k)
.ok_or(format!("header '{k}' is missing"))
.map(|v| (k, v)),
})
.try_collect()?;
Ok((inputs, key, algorithm, signature))
}
/// Make an association list associating header names to header values.
///
/// Allocates a new vector, but not any strings.
fn make_header_map<'r, T>(req: &'r Request<T>) -> Vec<(&'r str, &'r str)> {
req.headers()
.iter()
// Acquire string slices of every name-value pair.
.filter_map(|(k, v)| v.to_str().ok().map(|v| (k.as_str(), v)))
.collect()
}
/// Quick utility function to get stuff from an association list.
fn get<'x>(map: &[(&str, &'x str)], key: &str) -> Option<&'x str> {
map.iter()
.find_map(|(k, v)| k.eq_ignore_ascii_case(key).then_some(*v))
}
fn make_signing_string(data: &[(&str, &str)]) -> String {
data.iter()
// Each pair is separated by a colon and a space
.map(|(k, v)| format!("{k}: {v}"))
// Pairs must be separated by a newline
.intersperse("\n".to_string())
.collect()
}
fn from_secs(s: &str) -> Result<DateTime<Utc>, String> {
s.parse::<i64>().map_err(|e| e.to_string()).and_then(|t| {
let Some(time) = DateTime::from_timestamp(t, 0) else {
do yeet "Timestamp out of range";
};
Ok(time)
})
}
fn from_rfc822(s: &str) -> Result<DateTime<Utc>, String> {
DateTime::parse_from_rfc2822(s)
.map_err(|e| e.to_string())
.map(|time| time.to_utc())
}
}

View file

@ -49,7 +49,7 @@ fn make_alias_impl(name: &syn::Ident, field: &syn::Field) -> TokenStream {
})
}
#[proc_macro_derive(Mixin)]
#[proc_macro_derive(Mixin, attributes(index))]
pub fn mixin(item: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(item as syn::DeriveInput);

View file

@ -12,3 +12,5 @@ bincode = "2.0.0-rc.3"
chrono = "*"
either = "*"
derive_more = "*"
serde_json = "*"
tracing = "*"

54
lib/puppy/src/context.rs Normal file
View file

@ -0,0 +1,54 @@
use fetch::Client;
use store::{Key, Store, Transaction};
use crate::{config::Config, Result};
/// The context of a running ActivityPuppy.
///
/// This type provides access to the data store and configuration.
#[derive(Clone)]
pub struct Context {
config: Config,
client: Client,
store: Store,
}
impl Context {
/// Load the server context from the configuration.
pub fn load(config: Config) -> Result<Context> {
let store = Store::open(&config.state_dir, crate::data::schema())?;
let client = Client::new();
Ok(Context { config, store, client })
}
/// Do a data store [transaction][store::Transaction].
pub fn run<T>(&self, f: impl FnOnce(&Transaction<'_>) -> Result<T>) -> Result<T> {
self.store.run(f)
}
/// Access the store directly.
pub fn store(&self) -> &Store {
&self.store
}
/// Access the configuration.
pub fn config(&self) -> &Config {
&self.config
}
/// Create an ActivityPub object ID from a key.
pub fn mk_url(&self, key: Key) -> String {
format!("https://{}/o/{key}", self.config.ap_domain)
}
/// Access the federation client.
pub fn resolver(&self) -> &Client {
&self.client
}
}
/// Load a context for running tests in.
#[cfg(test)]
pub fn test_context<T>(
config: Config,
schema: store::types::Schema,
test: impl FnOnce(Context) -> Result<T>,
) -> Result<T> {
let client = Client::new();
Store::test(schema, |store| test(Context { config, store, client }))
}

215
lib/puppy/src/data.rs Normal file
View file

@ -0,0 +1,215 @@
//! Datas for the data store!
//!
//! This module contains the definitions for the data store.
use bincode::{Decode, Encode};
use derive_more::Display;
use store::{types::Schema, Alias, Arrow, Key, Mixin};
/// *Bites you*
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct Bite {
#[identity]
pub id: Key,
#[origin]
pub biter: Key,
#[target]
pub victim: Key,
}
/// Properties of ActivityPub objects.
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct Object {
/// The ActivityPub id of the object.
#[index]
pub id: Id,
/// What kind of object is it?
pub kind: ObjectKind,
/// Whether or not the object resides on this server or on another one.
pub local: bool,
}
/// Allows case analysis on the type of ActivityPub objects.
#[derive(Encode, Decode, Debug, Clone)]
pub enum ObjectKind {
Actor,
Activity(ActivityKind),
Notelike(String),
}
/// The type of an activity.
#[derive(Encode, Decode, Debug, Clone)]
pub enum ActivityKind {
/// Used for posting stuff!
Create = 0,
/// Represents a follow request.
Follow = 1,
/// Used to signal that a follow request was accepted.
Accept = 2,
/// Used to reject a follow request.
Reject = 3,
/// See [`bites`](crate::bites).
Bite = 4,
}
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct Channel {
pub inbox: String,
// TODO: add public key here
}
/// A predicate; `follower` "follows" `followed`.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct Follows {
#[origin]
pub follower: Key,
#[target]
pub followed: Key,
}
/// An instance of a request from some `origin` user to follow a `target` user.
///
/// This should not be used to determine whether two actors are following each other. For that, use
/// [`Follows`], a basic arrow for exactly this purpose. *This* arrow is used to identify specific
/// instances of *requests*, and serves mostly as a historical reference and for synchronizing with
/// other servers.
///
/// Used to represent a `Follow` activity.
///
/// Mixins always present for the `id`:
///
/// - [`Status`], carrying the status of the request.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct FollowRequest {
/// The unique ID of this particular request.
#[identity]
pub id: Key,
/// The "follower", the user that made the request.
pub origin: Key,
/// The one the request is made to.
pub target: Key,
}
/// The status of a [`FollowRequest`].
///
/// Valid state transitions:
///
/// ```text
/// ┌──────────────▶ Rejected
/// │
/// │
/// │
///
/// None ─────────▶ Pending ────────▶ Accepted
///
/// │ │
/// │ │
/// │ │
/// ▼ │
/// Withdrawn ◀────────────┘
/// ```
///
/// In addition, a follow request will be deleted if either endpoint is removed from the graph.
#[derive(Mixin, Encode, Decode, Eq, PartialEq, Clone)]
pub enum Status {
/// The follow request was previously pending or accepted, but since withdrawn.
///
/// This can happen when someone cancels their follow request or unfollows the target.
Withdrawn,
/// The follow request was accepted.
Accepted,
/// The follow request was denied.
Rejected,
/// The follow request is still under review.
Pending,
}
/// An ActivityPub ID, used to look up remote objects by their canonical URL.
#[derive(Alias, Encode, Decode, Clone, PartialEq, Eq, Debug, Hash, Display)]
pub struct Id(pub String);
/// A "profile" in the social media sense.
///
/// Contains all presentation information about someone making posts.
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct Profile {
/// How many posts has this user made?
pub post_count: usize,
/// The name used for the profile's handle.
#[index] // <- currently doesnt do anything but i have an idea
pub account_name: Username,
/// The name displayed above their posts.
pub display_name: Option<String>,
/// The "bio", a freeform "about me" field.
pub about_string: Option<String>,
/// Arbitrary custom metadata fields.
pub about_fields: Vec<(String, String)>,
}
/// A unique name for an actor that is part of their "handle".
#[derive(Alias, Encode, Decode, Clone, PartialEq, Eq, Debug, Hash, Display)]
pub struct Username(pub String);
/// The relation that `author` has constructed and published `object`.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct AuthorOf {
#[origin]
pub author: Key,
#[target]
pub object: Key,
}
/// The contents of a post.
#[derive(Mixin, Encode, Decode, Debug, Clone, Default)]
pub struct Content {
/// Main post body.
pub content: Option<String>,
/// Content warning for the post.
pub warning: Option<String>,
}
/// A public key used for verifying requests.
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct PublicKey {
pub key_id: String,
pub key_pem: String,
}
/// A private key for signing requests with.
#[derive(Mixin, Encode, Decode, Clone)]
pub struct PrivateKey {
pub key_pem: String,
}
/// Represents a `Create` activity.
#[derive(Arrow)]
pub struct Create {
#[identity]
pub id: Key,
#[origin]
pub actor: Key,
#[target]
pub object: Key,
}
/// Construct the schema.
pub fn schema() -> Schema {
Schema::new()
// Mixins
.has::<Profile>()
.has::<Content>()
.has::<Status>()
.has::<Object>()
.has::<Channel>()
.has::<PrivateKey>()
.has::<PublicKey>()
// Aliases
.has::<Username>()
.has::<Id>()
// Arrows
.has::<Bite>()
.has::<FollowRequest>()
.has::<AuthorOf>()
.has::<Follows>()
.has::<Create>()
}

233
lib/puppy/src/interact.rs Normal file
View file

@ -0,0 +1,233 @@
//! Interactions between actors.
use store::{util::IterExt as _, Key, StoreError, Transaction};
use crate::{
actor::Actor,
data::{FollowRequest, Bite, Status, Follows},
Context, Error, Result,
};
/// Interactions with other objects.
impl Actor {
/// Create a [`Bite`].
pub fn bite(&self, victim: Actor) -> Bite {
Bite {
victim: victim.key,
biter: self.key,
id: Key::gen(),
}
}
/// Construct a [`FollowRequest`].
pub fn follow_request(&self, target: Actor) -> FollowRequest {
FollowRequest {
origin: self.key,
target: target.key,
id: Key::gen(),
}
}
/// Makes `biter` bite `victim` and inserts the records into the database.
pub fn do_bite(&self, cx: &Context, victim: Actor) -> Result<Bite> {
let bite = self.bite(victim);
cx.run(|tx| tx.create(bite).map_err(Error::Store))?;
Ok(bite)
}
/// Creates a follow request from `self` to `target`.
pub fn do_follow_request(&self, cx: &Context, target: Actor) -> Result<FollowRequest> {
let req = self.follow_request(target);
cx.run(|tx| {
tx.create(req)?;
tx.add_mixin(req.id, Status::Pending)?;
Ok(req)
})
}
/// Accept a follow request.
pub fn do_accept_request(&self, cx: &Context, req: FollowRequest) -> Result<()> {
debug_assert! {
self.key == req.target,
"only the target of a follow request may accept it"
};
cx.run(|tx| try {
let fr = tx
.between::<FollowRequest>(req.origin, req.target)
// Get the one that is equal to `req`.
.filter(|fr| fr.as_ref().is_ok_and(|f| f.id == req.id))
.last()
.unwrap()?;
// Only apply the update if the follow request is still in a pending state.
if let Some(Status::Pending) = tx.get_mixin(fr.id)? {
tx.update(fr.id, |_| Status::Accepted)?;
tx.create(Follows {
follower: req.origin,
followed: req.target,
})?;
}
})
}
/// Reject a follow request.
pub fn do_reject_request(&self, cx: &Context, req: FollowRequest) -> Result<()> {
debug_assert! {
self.key == req.target,
"only the target of a follow request may accept it"
};
cx.run(|tx| try { tx.update(req.id, |_| Status::Rejected)? })?;
Ok(())
}
/// Get all pending follow request for `self`.
pub fn pending_requests<'c>(
&self,
tx: &'c Transaction<'c>,
) -> impl Iterator<Item = Result<FollowRequest>> + 'c {
tx.incoming::<FollowRequest>(self.key)
.map_err(Error::Store)
.filter_bind_results(|req| Ok(if req.is_pending(tx)? { Some(req) } else { None }))
}
/// Get all nodes `self` is following.
pub fn following<'c>(&self, tx: &'c Transaction<'c>) -> impl Iterator<Item = Result<Key>> + 'c {
tx.outgoing::<Follows>(self.key)
.map_err(Error::Store)
.map_ok(|a| a.followed)
}
/// Get all followers of `self`.
pub fn followers<'c>(&self, tx: &'c Transaction<'c>) -> impl Iterator<Item = Result<Key>> + 'c {
tx.incoming::<Follows>(self.key)
.map_err(Error::Store)
.map_ok(|a| a.follower)
}
/// List all specific times `self` was bitten.
pub fn bites_suffered<'c>(
&self,
tx: &'c Transaction<'c>,
) -> impl Iterator<Item = Result<Bite>> + 'c {
tx.incoming::<Bite>(self.key).map_err(Error::Store)
}
/// Check whether `self` follows `other`.
pub fn follows(&self, tx: &Transaction<'_>, other: &Actor) -> Result<bool> {
try { tx.exists::<Follows>(self.key, other.key)? }
}
}
impl FollowRequest {
/// Determine if this follow request is pending.
pub fn is_pending(&self, tx: &Transaction<'_>) -> Result<bool> {
// The status is stored as a mixin, so we need to get it.
let Some(st) = tx.get_mixin::<Status>(self.id)? else {
// If we don't have a status for a follow request, something is borked.
return Err(StoreError::Missing.into());
};
// If the status of the follow request is pending, it can't also be true that the follows
// relation already exists.
debug_assert! {
!(st == Status::Pending)
|| tx.exists::<Follows>(self.origin, self.target).map(|x| !x)?,
"fr.is_pending -> !(fr.origin follows fr.target)"
};
Ok(st == Status::Pending)
}
}
#[cfg(test)]
mod tests {
use store::util::IterExt as _;
use crate::{
actor::{create_local as create_actor, Actor},
config::Config,
data::{schema, FollowRequest, Follows},
test_context, Context, Result,
};
fn make_test_actors(cx: &Context) -> Result<(Actor, Actor)> {
let alice = create_actor(&cx, "alice")?;
let bob = create_actor(&cx, "bob")?;
eprintln!("alice={alice:?}, bob={bob:?}");
Ok((alice, bob))
}
fn test_config() -> Config {
Config {
ap_domain: String::from("unit-test.puppy.gay"),
wf_domain: String::from("unit-test.puppy.gay"),
state_dir: todo!(), // TODO: make this a temp dir
port: 0,
}
}
#[test]
fn create_fr() -> Result<()> {
test_context(test_config(), schema(), |cx| {
let (alice, bob) = make_test_actors(&cx)?;
alice.do_follow_request(&cx, bob)?;
assert!(
cx.store().exists::<FollowRequest>(alice.key, bob.key)?,
"(alice -> bob) ∈ follow-requested"
);
assert!(
!cx.store().exists::<Follows>(alice.key, bob.key)?,
"(alice -> bob) ∉ follows"
);
let pending_for_bob = cx.run(|tx| {
bob.pending_requests(&tx)
.map_ok(|fr| fr.origin)
.try_collect::<Vec<_>>()
})?;
assert_eq!(pending_for_bob, vec![alice.key], "bob.pending = {{alice}}");
Ok(())
})
}
#[test]
fn accept_fr() -> Result<()> {
test_context(test_config(), schema(), |cx| {
let db = cx.store();
let (alice, bob) = make_test_actors(&cx)?;
let req = alice.do_follow_request(&cx, bob)?;
bob.do_accept_request(&cx, req)?;
assert!(
db.exists::<Follows>(alice.key, bob.key)?,
"(alice -> bob) ∈ follows"
);
assert!(
!db.exists::<Follows>(bob.key, alice.key)?,
"(bob -> alice) ∉ follows"
);
cx.run(|tx| try {
let pending_for_bob: Vec<_> = bob.pending_requests(&tx).try_collect()?;
assert!(pending_for_bob.is_empty(), "bob.pending = ∅");
let followers_of_bob: Vec<_> = bob.followers(&tx).try_collect()?;
assert_eq!(
followers_of_bob,
vec![alice.key],
"bob.followers = {{alice}}"
);
})
})
}
#[test]
fn listing_follow_relations() -> Result<()> {
test_context(test_config(), schema(), |cx| try {
let (alice, bob) = make_test_actors(&cx)?;
let req = alice.do_follow_request(&cx, bob)?;
bob.do_accept_request(&cx, req)?;
cx.run(|tx| try {
let followers_of_bob: Vec<_> = bob.followers(&tx).try_collect()?;
assert_eq!(
followers_of_bob,
vec![alice.key],
"bob.followers = {{alice}}"
);
let following_of_alice: Vec<_> = alice.following(&tx).try_collect()?;
assert_eq!(
following_of_alice,
vec![bob.key],
"alice.following = {{bob}}"
);
})?
})
}
}

View file

@ -1,507 +1,347 @@
#![feature(iterator_try_collect, try_blocks)]
use model::{Profile, Username};
pub use store::{self, Key, Store};
//! If you're an ActivityPub developer looking for information about ActivityPuppy's federation behavior,
//! you should take a look at [`fetch`].
pub mod model {
use bincode::{Decode, Encode};
use derive_more::Display;
use store::{types::Schema, Alias, Mixin};
// Working with result types is such a bitch without these.
#![feature(iterator_try_collect, try_blocks, once_cell_try, box_into_inner)]
use crate::follows::Status;
pub use crate::{
bites::Bite,
follows::{FollowRequest, Follows},
post::{AuthorOf, Content},
use std::hint::unreachable_unchecked;
use actor::get_signing_key;
pub use context::Context;
#[cfg(test)]
pub use context::test_context;
use data::{ActivityKind, AuthorOf, Channel, Content, Create, Id, ObjectKind, Profile, PublicKey};
use fetch::object::{Activity, Note, Object};
use store::Transaction;
pub use store::{self, Key, StoreError};
pub use fetch::{self, FetchError};
mod context;
pub mod data;
pub mod post;
mod interact;
use derive_more::{From, Display};
use tracing::{instrument, warn};
/// Retrieve an ActivityPub object from the database.
///
/// Fails with `Error::Missing` if the required properties are not present.
pub fn get_local_ap_object(tx: &Transaction<'_>, key: Key) -> Result<fetch::object::Object> {
let Some(obj) = tx.get_mixin::<data::Object>(key)? else {
// We need this data in order to determine the object type. If the passed key does not
// have this data, it must not be an ActivityPub object.
return Err(Error::MissingData { node: key, prop: "Object" });
};
/// A "profile" in the social media sense.
///
/// Contains all presentation information about someone making posts.
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct Profile {
/// How many posts has this user made?
pub post_count: usize,
/// The name used for the profile's handle.
pub account_name: Username,
/// The name displayed above their posts.
pub display_name: Option<String>,
/// The "bio", a freeform "about me" field.
pub about_string: Option<String>,
/// Arbitrary custom metadata fields.
pub about_fields: Vec<(String, String)>,
}
/// A unique name for an actor that is part of their "handle".
#[derive(Alias, Encode, Decode, Clone, PartialEq, Eq, Debug, Hash, Display)]
pub struct Username(pub String);
/// Construct the schema.
pub fn schema() -> Schema {
Schema::new()
// Mixins
.has::<Profile>()
.has::<Content>()
.has::<Status>()
// Aliases
.has::<Username>()
// Arrows
.has::<Bite>()
.has::<FollowRequest>()
.has::<AuthorOf>()
.has::<Follows>()
match obj.kind {
ObjectKind::Actor => {
let Some(Profile { account_name, display_name, .. }) = tx.get_mixin(key)? else {
return Err(Error::MissingData { node: key, prop: "Profile" });
};
let Some(Channel { inbox }) = tx.get_mixin(key)? else {
return Err(Error::MissingData { node: key, prop: "Channel" });
};
let Some(PublicKey { key_id, key_pem }) = tx.get_mixin(key)? else {
return Err(Error::MissingData { node: key, prop: "PublicKey" });
};
Ok(fetch::object::Object::Actor(fetch::object::Actor {
id: obj.id.0.clone().into(),
inbox: inbox.into(),
account_name: account_name.0,
display_name,
public_key: fetch::object::PublicKey {
owner: obj.id.0.into(),
id: key_id.into(),
inner: key_pem,
},
}))
}
ObjectKind::Activity(ActivityKind::Create) => {
let Some(Create { object, actor, .. }) = tx.get_arrow(key)? else {
panic!("expected a `Create`");
};
let Id(actor) = tx.get_alias(actor)?.unwrap();
Ok(fetch::object::Object::Activity(fetch::object::Activity {
id: obj.id.0.into(),
actor: actor.into(),
object: Box::new(get_local_ap_object(tx, object)?),
kind: String::from("Create"),
}))
}
ObjectKind::Notelike(kind) => {
let Some(Content { content, warning, .. }) = tx.get_mixin(key)? else {
panic!()
};
let Some(AuthorOf { author, .. }) = tx.incoming(key).next().transpose()? else {
panic!()
};
let Some(Id(author)) = tx.get_alias(author)? else {
todo!()
};
Ok(fetch::object::Object::Note(Note {
id: obj.id.0.clone().into(),
summary: warning,
content,
author,
kind,
}))
}
_ => todo!(),
}
}
pub fn create_actor(db: &Store, username: impl ToString) -> store::Result<Key> {
let key = Key::gen();
db.run(|tx| {
let username: Username = username.to_string().into();
tx.add_alias(key, username.clone())?;
pub mod actor {
use fetch::{
object,
signatures::{Private, SigningKey},
};
use store::{Key, StoreError, Transaction};
use crate::{
data::{Channel, Id, Object, ObjectKind, PrivateKey, Profile, PublicKey, Username},
Context, Error, Result,
};
/// A reference to an actor.
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
pub struct Actor {
/// The key identifying the actor in the data store.
pub key: Key,
}
impl Actor {
/// Get a local actor from the store by their username.
pub fn by_username(tx: &Transaction<'_>, username: impl ToString) -> Result<Option<Actor>> {
let maybe_key = tx
.lookup(Username(username.to_string()))
.map_err(Error::Store)?;
Ok(maybe_key.map(|key| Actor { key }))
}
}
/// Create a fresh local actor.
pub fn create_local(cx: &Context, username: impl ToString) -> Result<Actor> {
let key = Key::gen();
cx.run(|tx| {
let username: Username = username.to_string().into();
// Federation stuff
mixin_ap_actor(tx, key, &cx.config().ap_domain, true)?;
mixin_priv_key(tx, key, &cx.config().ap_domain)?;
// Social properties
tx.add_alias(key, username.clone())?;
tx.add_mixin(key, Profile {
post_count: 0,
account_name: username,
display_name: None,
about_string: None,
about_fields: Vec::new(),
})?;
Ok(Actor { key })
})
}
/// Register an actor from another server.
pub fn create_remote(tx: &Transaction<'_>, object: object::Actor) -> Result<Actor> {
let key = Key::gen();
tx.add_alias(key, Id(object.id.clone()))?;
tx.add_mixin(key, Channel { inbox: object.inbox })?;
tx.add_mixin(key, Object {
kind: ObjectKind::Actor,
id: Id(object.id),
local: false,
})?;
tx.add_mixin(key, Profile {
post_count: 0,
account_name: username,
display_name: None,
account_name: Username(object.account_name),
display_name: object.display_name,
about_string: None,
about_fields: Vec::new(),
})?;
Ok(key)
tx.add_mixin(key, PublicKey {
key_id: object.public_key.id,
key_pem: object.public_key.inner,
})?;
Ok(Actor { key })
}
/// Add properties related to local ActivityPub actors to a vertex.
pub fn mixin_ap_actor(
tx: &Transaction<'_>,
vertex: Key,
domain: &str,
local: bool,
) -> Result<(), StoreError> {
let id = Id(format!("https://{domain}/o/{vertex}"));
tx.add_alias(vertex, id.clone())?;
tx.add_mixin(vertex, Channel { inbox: format!("{id}/inbox") })?;
tx.add_mixin(vertex, Object {
kind: ObjectKind::Actor,
local,
id,
})?;
store::OK
}
/// Generate and attach a public/private key pair to the vertex.
pub fn mixin_priv_key(
tx: &Transaction<'_>,
vertex: Key,
domain: &str,
) -> Result<(), StoreError> {
let key_id = format!("https://{domain}/o/{vertex}#sig-key");
let (private, public) = Private::gen();
tx.add_mixin(vertex, PublicKey {
key_pem: public.encode_pem(),
key_id,
})?;
tx.add_mixin(vertex, PrivateKey { key_pem: private.encode_pem() })?;
store::OK
}
pub fn get_signing_key(tx: &Transaction<'_>, actor: Actor) -> Result<SigningKey, StoreError> {
let (PrivateKey { key_pem, .. }, PublicKey { key_id, .. }) =
tx.get_mixin_many(actor.key)?;
let Id(owner) = tx.get_alias(actor.key)?.unwrap();
let inner = Private::decode_pem(&key_pem);
Ok(SigningKey { id: key_id, owner, inner })
}
}
pub type Result<T, E = Error> = std::result::Result<T, E>;
#[derive(From, Debug, Display)]
pub enum Error {
/// An error internal to the store.
#[display(fmt = "store error: {}", self.0)]
Store(StoreError),
/// An error generated by the [fetch] subsystem.
#[display(fmt = "fetch error: {}", self.0)]
Fetch(FetchError),
/// Expected `node` to have some property that it doesn't have.
#[display(fmt = "missing data: {node} is missing {prop}")]
MissingData {
/// The node that is missing the data.
node: Key,
/// Name of the thing it is missing.
prop: &'static str,
},
#[display(fmt = "invalid data: {}", self.0)]
Invalid(String),
}
pub mod config {
#[derive(Clone)]
pub struct Config {
pub ap_domain: String,
pub wf_domain: String,
pub state_dir: String,
pub port: u16,
}
}
/// Interpret an *incoming* activity. Outgoing activities are *never* interpreted through this function,
/// because their changes are already in the database.
// TODO: figure out if that is the behavior we actually want
#[instrument(skip_all, fields(activity.id = activity.id))]
pub fn interpret(cx: &Context, activity: Activity) -> Result<()> {
// Fetch our actor from the database
let Some(actor) = cx.store().lookup(Id(activity.actor.clone()))? else {
panic!(
"actor {} does not exist in the database (id={})",
activity.actor, activity.id
)
};
// Fetch our object from the database. The object must already exist in the database.
let id = activity.object.id();
let Some(object) = cx.store().lookup(Id(id.to_owned()))? else {
panic!(
"object {} does not exist in the database (id={})",
activity.object.id(),
activity.id
)
};
let actor = actor::Actor { key: actor };
let (key, tag) = match activity.kind.as_str() {
"Bite" => {
let object = actor::Actor { key: object };
(actor.do_bite(&cx, object)?.id, ActivityKind::Bite)
}
"Create" => {
// NOTE: due to the ingesting, we already have this information.
// TODO: change this. for god's sake
return Ok(());
}
"Follow" => {
let object = actor::Actor { key: object };
let req = actor.do_follow_request(&cx, object)?;
(req.id, ActivityKind::Follow)
}
tag @ ("Accept" | "Reject") => {
// Follow requests are multi-arrows in our graph, and they have their own activitypub id.
let Some(req) = cx.store().get_arrow(object)? else {
panic!(
"follow request does not exist: {object} (id={})",
activity.id
)
};
// Dispatch to the actual method based on the tag
let tag = match tag {
"Accept" => actor
.do_accept_request(&cx, req)
.map(|_| ActivityKind::Accept)?,
"Reject" => actor
.do_reject_request(&cx, req)
.map(|_| ActivityKind::Reject)?,
_ => unsafe {
// SAFETY: this branch of the outer match only matches if the tag is either "Accept" or "Reject",
// so this inner branch is truly unreachable.
unreachable_unchecked()
},
};
(Key::gen(), tag)
}
k => {
warn!(activity.id, "unsupported activity type {k}");
return Err(Error::Invalid(format!("activity type '{k}' not supported")));
}
};
cx.run(|tx| {
tx.add_alias(key, Id(activity.id.clone()))?;
tx.add_mixin(key, data::Object {
id: Id(activity.id.clone()),
kind: ObjectKind::Activity(tag),
local: false,
})?;
Ok(())
})
}
pub mod bites {
//! The most essential feature of any social network.
use store::{Arrow, Key, Store};
/// *Bites you*
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct Bite {
#[identity]
pub id: Key,
#[origin]
pub biter: Key,
#[target]
pub victim: Key,
}
pub fn bite_actor(db: &Store, biter: Key, victim: Key) -> store::Result<Key> {
db.run(|tx| {
let id = Key::gen();
tx.create(Bite { id, biter, victim })?;
Ok(id)
})
}
/// Who has bitten `victim`?
pub fn bites_on(db: &Store, victim: Key) -> store::Result<Vec<Bite>> {
db.incoming::<Bite>(victim).try_collect()
/// Make sure all the interesting bits of an activity are here.
#[instrument(skip_all, fields(activity.id = activity.id, key = auth.to_string()))]
pub async fn ingest(cx: &Context, auth: Key, activity: &Activity) -> Result<()> {
let key = cx.run(|tx| get_signing_key(tx, actor::Actor { key: auth }).map_err(Error::Store))?;
for id in [activity.actor.as_str(), activity.object.id()] {
if cx.store().lookup(Id(id.to_owned()))?.is_some() {
// Skip ingesting if we already know this ID.
continue;
}
let json = cx.resolver().resolve(&key, &id).await?;
let object = Object::from_json(json).unwrap();
match object {
Object::Activity(a) => interpret(&cx, a)?,
Object::Actor(a) => cx.run(|tx| actor::create_remote(tx, a).map(void))?,
Object::Note(a) => post::create_post_from_note(cx, a).map(void)?,
_ => todo!(),
}
}
Ok(())
}
pub mod post {
//! Timelines: where you go to view the posts.
use std::ops::RangeBounds;
use bincode::{Decode, Encode};
use chrono::{DateTime, Utc};
use either::Either::{Left, Right};
use store::{util::IterExt as _, Arrow, Error, Key, Mixin, Result, Store, Transaction};
use crate::model::Profile;
/// The contents of a post.
#[derive(Mixin, Encode, Decode, Debug, Clone, Default)]
pub struct Content {
/// Main post body.
pub content: Option<String>,
/// Content warning for the post.
pub warning: Option<String>,
}
impl From<&str> for Content {
fn from(value: &str) -> Self {
value.to_string().into()
}
}
impl From<String> for Content {
fn from(value: String) -> Self {
Content {
content: Some(value),
warning: None,
}
}
}
/// The relation that `author` has constructed and published `object`.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct AuthorOf {
#[origin]
pub author: Key,
#[target]
pub object: Key,
}
/// A piece of content posted by someone.
#[derive(Clone, Debug)]
pub struct Post {
/// The post's internal ID.
pub id: Key,
/// The actual post contents.
pub content: Content,
/// Metadata about the post's author.
pub author: Author,
}
/// Information about a [`Post`]'s author.
#[derive(Clone, Debug)]
pub struct Author {
/// The identifier of the author.
pub id: Key,
/// The name to display along with the post.
pub display_name: String,
/// An informal identifier for a particular author.
pub handle: String,
}
/// An ordered list of [`Post`]s for viewing.
#[derive(Debug)]
pub struct Timeline {
items: Vec<Item>,
}
/// Discrete events that can be displayed to a user as part of a timeline.
#[derive(Debug)]
enum Item {
Post(Post),
}
impl Item {
/// Get the timeline item if it is a [`Post`].
pub fn as_post(&self) -> Option<&Post> {
match self {
Item::Post(ref post) => Some(post),
}
}
}
impl Timeline {
/// Get all the posts in the timeline.
pub fn posts(&self) -> impl Iterator<Item = &Post> {
self.items.iter().filter_map(|x| x.as_post())
}
}
/// Gets at most `limit` of the posts known to the instance that were inserted within `time_range`.
pub fn fetch_timeline(
db: &Store,
time_range: impl RangeBounds<DateTime<Utc>>,
limit: Option<usize>,
) -> Result<Timeline> {
let posts = db.run(|tx| {
// Get all post content entries (the argument passed here is a range of chrono datetimes).
let iter = tx.range::<Content>(time_range);
let iter = match limit {
Some(n) => Left(iter.take(n)),
None => Right(iter),
};
// Then, we're gonna map each of them to their author, and get the profile information needed to
// render the post (mostly display name and handle).
iter.bind_results(|(id, content)| {
// Take the first author. There is nothing stopping a post from having multiple authors, but
// let's take it one step at a time.
let (author, Some(Profile { display_name, account_name, .. })) = tx
.join_on(|a: AuthorOf| a.author, tx.incoming(id))?
.swap_remove(0)
else {
// We expect all posts to have at least one author, so we should complain if there is one
// that doesn't (for now). For robustness, the `.collect()` down there should be replaced
// with a strategy where we log a warning instead of failing, but in the current state of
// the project, failing fast is a good thing.
return Err(Error::Missing);
};
Ok(Item::Post(Post {
id,
author: Author {
id: author,
handle: format!("@{account_name}"),
display_name: display_name.unwrap_or(account_name.0),
},
content,
}))
})
.collect()
})?;
Ok(Timeline { items: posts })
}
/// Create a new post.
pub fn create_post(db: &Store, author: Key, content: impl Into<Content>) -> store::Result<Key> {
db.run(|tx| mixin_post(tx, Key::gen(), author, content))
}
/// Add a post's mixins and predicates to an existing `node`.
pub fn mixin_post(
tx: &Transaction<'_>,
node: Key,
author: Key,
content: impl Into<Content>,
) -> store::Result<Key> {
tx.update::<Profile>(author, |mut profile| {
profile.post_count += 1;
profile
})?;
tx.add_mixin(node, content.into())?;
tx.create(AuthorOf { author, object: node })?;
Ok(node)
}
pub fn list_posts_by_author(db: &Store, author: Key) -> store::Result<Vec<(Key, Content)>> {
db.run(|tx| {
let posts = tx
.join_on(|a: AuthorOf| a.object, tx.outgoing(author))?
.into_iter()
.filter_map(|(k, opt)| try { (k, opt?) })
.collect();
Ok(posts)
})
}
}
pub mod follows {
//! Follow requests and related stuff.
use bincode::{Decode, Encode};
use store::{util::IterExt, Arrow, Error, Key, Mixin, Store, OK};
/// A predicate; `follower` "follows" `followed`.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct Follows {
#[origin]
pub follower: Key,
#[target]
pub followed: Key,
}
/// An instance of a request from some `origin` user to follow a `target` user.
///
/// This should not be used to determine whether two actors are following each other. For that, use
/// [`Follows`], a basic arrow for exactly this purpose. *This* arrow is used to identify specific
/// instances of *requests*, and serves mostly as a historical reference and for synchronizing with
/// other servers.
///
/// Mixins always present for the `id`:
///
/// - [`Status`], carrying the status of the request.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct FollowRequest {
/// The unique ID of this particular request.
#[identity]
pub id: Key,
/// The "follower", the user that made the request.
pub origin: Key,
/// The one the request is made to.
pub target: Key,
}
impl FollowRequest {
/// Determine if this follow request is pending.
pub fn is_pending(&self, db: &Store) -> store::Result<bool> {
// The status is stored as a mixin, so we need to get it.
let Some(st) = db.get_mixin::<Status>(self.id)? else {
// If we don't have a status for a follow request, something is borked.
return Err(Error::Missing);
};
// If the status of the follow request is pending, it can't also be true that the follows
// relation already exists.
debug_assert! {
!(st == Status::Pending)
|| db.exists::<Follows>(self.origin, self.target).map(|x| !x)?,
"fr.is_pending -> !(fr.origin follows fr.target)"
};
Ok(st == Status::Pending)
}
}
/// The status of a [`FollowRequest`].
///
/// Valid state transitions:
///
/// ```text
/// ┌──────────────▶ Rejected
/// │
/// │
/// │
///
/// None ─────────▶ Pending ────────▶ Accepted
///
/// │ │
/// │ │
/// │ │
/// ▼ │
/// Withdrawn ◀────────────┘
/// ```
///
/// In addition, a follow request will be deleted if either endpoint is removed from the graph.
#[derive(Mixin, Encode, Decode, Eq, PartialEq, Clone)]
pub enum Status {
/// The follow request was previously pending or accepted, but since withdrawn.
///
/// This can happen when someone cancels their follow request or unfollows the target.
Withdrawn,
/// The follow request was accepted.
Accepted,
/// The follow request was denied.
Rejected,
/// The follow request is still under review.
Pending,
}
/// Request to follow another actor.
pub fn request(db: &Store, requester: Key, target: Key) -> store::Result<FollowRequest> {
db.run(|tx| {
let req = FollowRequest {
id: Key::gen(),
origin: requester,
target,
};
tx.create(req)?;
tx.add_mixin(req.id, Status::Pending)?;
Ok(req)
})
}
/// Accept the open follow request from `requester` to `target`, if one exists.
pub fn accept(db: &Store, requester: Key, target: Key) -> store::Result<()> {
db.run(|tx| {
// TODO: This logic is a little broken but it'll do for now. i'll fix it later.
let fr = tx
.between::<FollowRequest>(requester, target)
.filter(|fr| fr.as_ref().is_ok_and(|f| f.target == target))
// We'll want the latest one, because that one was inserted last so it'll be the most
// recent
.last()
.ok_or_else(|| Error::Missing)??;
// Only apply the update if the last follow request is still in a pending state.
if let Some(Status::Pending) = db.get_mixin(fr.id)? {
tx.update(fr.id, |_| Status::Accepted)?;
tx.create(Follows {
follower: requester,
followed: target,
})?;
}
OK
})
}
pub fn reject(db: &Store, request: Key) -> store::Result<()> {
db.run(|tx| {
tx.update(request, |_| Status::Rejected)?;
OK
})
}
/// List all pending follow requests for a user.
pub fn list_pending(db: &Store, target: Key) -> store::Result<Vec<FollowRequest>> {
db.incoming::<FollowRequest>(target)
.filter_bind_results(|req| Ok(if req.is_pending(db)? { Some(req) } else { None }))
.collect()
}
/// Get all actors followed by `actor`.
pub fn following_of(db: &Store, actor: Key) -> store::Result<Vec<Key>> {
db.outgoing::<Follows>(actor)
.map_ok(|a| a.followed)
.collect()
}
/// Get all actors following `actor`.
pub fn followers_of(db: &Store, actor: Key) -> store::Result<Vec<Key>> {
db.incoming::<Follows>(actor)
.map_ok(|a| a.follower)
.collect()
}
#[cfg(test)]
mod tests {
use store::{Key, Store, OK};
use crate::{
create_actor,
model::{schema, FollowRequest, Follows},
};
fn make_test_actors(db: &Store) -> store::Result<(Key, Key)> {
let alice = create_actor(&db, "alice")?;
let bob = create_actor(&db, "bob")?;
eprintln!("alice={alice}, bob={bob}");
Ok((alice, bob))
}
#[test]
fn create_fr() -> store::Result<()> {
Store::test(schema(), |db| {
let (alice, bob) = make_test_actors(&db)?;
super::request(&db, alice, bob)?;
assert!(
db.exists::<FollowRequest>(alice, bob)?,
"(alice -> bob) ∈ follow-requested"
);
assert!(
!db.exists::<Follows>(alice, bob)?,
"(alice -> bob) ∉ follows"
);
let pending_for_bob = super::list_pending(&db, bob)?
.into_iter()
.map(|fr| fr.origin)
.collect::<Vec<_>>();
assert_eq!(pending_for_bob, vec![alice], "bob.pending = {{alice}}");
OK
})
}
#[test]
fn accept_fr() -> store::Result<()> {
Store::test(schema(), |db| {
let (alice, bob) = make_test_actors(&db)?;
super::request(&db, alice, bob)?;
super::accept(&db, alice, bob)?;
assert!(
db.exists::<Follows>(alice, bob)?,
"(alice -> bob) ∈ follows"
);
assert!(
!db.exists::<Follows>(bob, alice)?,
"(bob -> alice) ∉ follows"
);
let pending_for_bob = super::list_pending(&db, bob)?;
assert!(pending_for_bob.is_empty(), "bob.pending = ∅");
let followers_of_bob = super::followers_of(&db, bob)?;
assert_eq!(followers_of_bob, vec![alice], "bob.followers = {{alice}}");
OK
})
}
#[test]
fn listing_follow_relations() -> store::Result<()> {
Store::test(schema(), |db| {
let (alice, bob) = make_test_actors(&db)?;
super::request(&db, alice, bob)?;
super::accept(&db, alice, bob)?;
let followers_of_bob = super::followers_of(&db, bob)?;
assert_eq!(followers_of_bob, vec![alice], "bob.followers = {{alice}}");
let following_of_alice = super::following_of(&db, alice)?;
assert_eq!(following_of_alice, vec![bob], "alice.following = {{bob}}");
OK
})
}
}
}
/// Discard the argument.
fn void<T>(_: T) -> () {}

262
lib/puppy/src/post.rs Normal file
View file

@ -0,0 +1,262 @@
//! Timelines: where you go to view the posts.
use std::ops::RangeBounds;
use chrono::{DateTime, Utc};
use either::Either::{Left, Right};
use fetch::object::{Activity, Note, Object};
use store::{util::IterExt as _, Key, Store, StoreError, Transaction};
use crate::{
actor::{get_signing_key, Actor},
data::{
self, ActivityKind, AuthorOf, Channel, Content, Create, Follows, Id, ObjectKind, Profile,
},
Context,
};
#[derive(Clone, Copy, Debug)]
pub struct Post {
pub key: Key,
}
impl From<&str> for Content {
fn from(value: &str) -> Self {
value.to_string().into()
}
}
impl From<String> for Content {
fn from(value: String) -> Self {
Content {
content: Some(value),
warning: None,
}
}
}
/// A piece of content posted by someone.
#[derive(Clone, Debug)]
pub struct PostData {
/// The post's internal ID.
pub id: Post,
/// The actual post contents.
pub content: Content,
/// Metadata about the post's author.
pub author: Author,
}
/// Information about a [`Post`]'s author.
#[derive(Clone, Debug)]
pub struct Author {
/// The identifier of the author.
pub id: Key,
/// The name to display along with the post.
pub display_name: String,
/// An informal identifier for a particular author.
pub handle: String,
}
/// An ordered list of [`Post`]s for viewing.
#[derive(Debug)]
pub struct Timeline {
items: Vec<Item>,
}
/// Discrete events that can be displayed to a user as part of a timeline.
#[derive(Debug)]
enum Item {
Post(PostData),
}
impl Item {
/// Get the timeline item if it is a [`Post`].
pub fn as_post(&self) -> Option<&PostData> {
match self {
Item::Post(ref post) => Some(post),
}
}
}
impl Timeline {
/// Get all the posts in the timeline.
pub fn posts(&self) -> impl Iterator<Item = &PostData> {
self.items.iter().filter_map(|x| x.as_post())
}
}
/// Gets at most `limit` of the posts known to the instance that were inserted within `time_range`.
pub fn fetch_timeline(
db: &Store,
time_range: impl RangeBounds<DateTime<Utc>>,
limit: Option<usize>,
) -> Result<Timeline, StoreError> {
let posts = db.run(|tx| {
// Get all post content entries (the argument passed here is a range of chrono datetimes).
let iter = tx.range::<Content>(time_range);
let iter = match limit {
Some(n) => Left(iter.take(n)),
None => Right(iter),
};
// Then, we're gonna map each of them to their author, and get the profile information needed to
// render the post (mostly display name and handle).
iter.bind_results(|(key, content)| try {
// Take the first author. There is nothing stopping a post from having multiple authors, but
// let's take it one step at a time.
let (author, Some(Profile { display_name, account_name, .. })) = tx
.join_on(|a: AuthorOf| a.author, tx.incoming(key))?
.swap_remove(0)
else {
// We expect all posts to have at least one author, so we should complain if there is one
// that doesn't (for now). For robustness, the `.collect()` down there should be replaced
// with a strategy where we log a warning instead of failing, but in the current state of
// the project, failing fast is a good thing.
return Err(StoreError::Missing);
};
Item::Post(PostData {
id: Post { key },
author: Author {
id: author,
handle: format!("@{account_name}"),
display_name: display_name.unwrap_or(account_name.0),
},
content,
})
})
.collect()
})?;
Ok(Timeline { items: posts })
}
/// Create a new post entity.
pub fn create_local_post(
cx: &Context,
author: Key,
content: impl Into<Content>,
) -> crate::Result<Post> {
let content = content.into();
cx.run(|tx| {
let key = Key::gen();
// Local stuff
mixin_post(tx, key, author, content)?;
// Federation stuff
let id = Id(cx.mk_url(key));
tx.add_alias(key, id.clone())?;
tx.add_mixin(key, data::Object {
kind: ObjectKind::Notelike("Note".to_string()),
local: true,
id,
})?;
Ok(Post { key })
})
}
/// Assumes all objects referenced already exist.
#[tracing::instrument(skip(cx))]
pub fn create_post_from_note(cx: &Context, note: Note) -> crate::Result<Post> {
cx.run(|tx| {
let Some(author) = tx.lookup(Id(note.author))? else {
panic!("needed author to already exist")
};
let key = Key::gen();
tx.add_alias(key, Id(note.id.clone()))?;
tx.create(AuthorOf { object: key, author })?;
tx.add_mixin(key, Content {
content: note.content,
warning: note.summary,
})?;
tx.add_mixin(key, data::Object {
kind: ObjectKind::Notelike(note.kind),
id: Id(note.id),
local: false,
})?;
Ok(Post { key })
})
}
#[tracing::instrument(skip(cx))]
pub async fn federate_post(cx: &Context, post: Post) -> crate::Result<()> {
// Obtain all the data we need to construct our activity
let (Content { content, warning }, url, author, signing_key, followers) = cx.run(|tx| try {
let Some(AuthorOf { author, .. }) = tx.incoming(post.key).next().transpose()? else {
panic!("can't federate post without author: {post:?}")
};
let signing_key = get_signing_key(tx, Actor { key: author })?;
let (c, data::Object { id, .. }) = tx.get_mixin_many(post.key)?;
let targets = tx.join_on::<Channel, _>(|a| a.follower, tx.incoming::<Follows>(author))?;
(c, id, author, signing_key, targets)
})?;
let activity_key = Key::gen();
// Insert a create activity into the database so we can serve it later
cx.run(|tx| try {
let id = Id(cx.mk_url(activity_key));
tx.add_alias(activity_key, id.clone())?;
tx.add_mixin(activity_key, data::Object {
kind: ObjectKind::Activity(ActivityKind::Create),
local: true,
id,
})?;
tx.create(Create {
id: activity_key,
actor: author,
object: post.key,
})?;
})?;
// Construct an ActivityPub message to send
let activity = Activity {
id: cx.mk_url(activity_key),
actor: signing_key.owner.clone(),
object: Box::new(Object::Note(Note {
id: url.to_string(),
kind: "Note".to_string(),
author: cx.mk_url(author),
summary: warning,
content,
})),
kind: "Create".to_string(),
};
for inbox in followers
.into_iter()
.filter_map(|(_, c)| c.map(|t| t.inbox))
// FIXME: remove this when im done testing
.chain(["https://crimew.gay/users/riley/inbox".to_string()])
{
fetch::deliver(&signing_key, &activity, &inbox).await;
}
Ok(())
}
/// Add a post's mixins and predicates to an existing `node`.
pub fn mixin_post(
tx: &Transaction<'_>,
node: Key,
author: Key,
content: impl Into<Content>,
) -> Result<Key, StoreError> {
tx.update::<Profile>(author, |mut profile| {
profile.post_count += 1;
profile
})?;
tx.add_mixin(node, content.into())?;
tx.create(AuthorOf { author, object: node })?;
Ok(node)
}
pub fn list_posts_by_author(db: &Store, author: Key) -> Result<Vec<(Key, Content)>, StoreError> {
db.run(|tx| {
let posts = tx
.join_on(|a: AuthorOf| a.object, tx.outgoing(author))?
.into_iter()
.filter_map(|(k, opt)| try { (k, opt?) })
.collect();
Ok(posts)
})
}

View file

@ -45,7 +45,7 @@ use super::{
types::{ArrowSpec, DataType},
Batch, Store, Transaction,
};
use crate::{util::IterExt as _, Key, Result};
use crate::{internal::Context as _, util::IterExt as _, Key, Result};
/// A directed edge.
///
@ -122,6 +122,18 @@ impl Store {
{
op::between::<A>(self, a, b).map_ok(A::from)
}
/// Construct the arrow from its identifier.
pub fn get_arrow<A>(&self, key: Key) -> Result<Option<A>>
where
A: Arrow<Kind = Multi>,
{
let arrow = self
.open(crate::types::MULTIEDGE_HEADERS)
.get(key)?
.map(|v| Key::split(v.as_ref()))
.map(|(origin, target)| A::from(Multi { origin, target, identity: key }));
Ok(arrow)
}
}
impl Transaction<'_> {
@ -205,6 +217,18 @@ impl Transaction<'_> {
{
op::between::<A>(self, a, b).map_ok(A::from)
}
/// Construct the arrow from its identifier.
pub fn get_arrow<A>(&self, key: Key) -> Result<Option<A>>
where
A: Arrow<Kind = Multi>,
{
let arrow = self
.open(crate::types::MULTIEDGE_HEADERS)
.get(key)?
.map(|v| Key::split(v.as_ref()))
.map(|(origin, target)| A::from(Multi { origin, target, identity: key }));
Ok(arrow)
}
}
impl Batch {

View file

@ -5,7 +5,7 @@ use std::sync::Arc;
use rocksdb::{BoundColumnFamily, IteratorMode};
pub use self::cx::{Context, Query, Write};
use crate::{util::IterExt as _, Error, Result};
use crate::{util::IterExt as _, Result, StoreError};
/// An internal interface to a specific keyspace that exposes basic hashmap-esque operations
/// on that keyspace, generic over whether the source of the data is a [`Transaction`] or a
@ -41,13 +41,13 @@ where
Ok((ref k, _)) => k.starts_with(&t),
_ => true,
})
.map_err(Error::Internal)
.map_err(StoreError::Internal)
}
/// List all pairs in the keyspace.
pub fn list(&self) -> impl Iterator<Item = Result<(Box<[u8]>, Box<[u8]>)>> + 'db {
self.context
.full_iterator(&self.cf, IteratorMode::Start)
.map_err(Error::Internal)
.map_err(StoreError::Internal)
}
/// Execute a range scan
pub fn range<const N: usize>(
@ -68,7 +68,7 @@ where
Ok((ref k, _)) => k.as_ref() < &upper,
_ => true,
})
.map_err(Error::Internal)
.map_err(StoreError::Internal)
}
/// Join all the keys to their values in this keyspace.
///
@ -106,7 +106,7 @@ mod cx {
};
use super::Keyspace;
use crate::{util::IterExt as _, Backend, Batch, Error, Result, Store, Transaction, OK};
use crate::{util::IterExt as _, Backend, Batch, Result, Store, StoreError, Transaction, OK};
/// A context for executing database operations.
pub trait Context {
@ -173,7 +173,9 @@ mod cx {
cf: &impl AsColumnFamilyRef,
key: impl AsRef<[u8]>,
) -> Result<Option<DBPinnableSlice<'a>>> {
self.inner.get_pinned_cf(cf, key).map_err(Error::Internal)
self.inner
.get_pinned_cf(cf, key)
.map_err(StoreError::Internal)
}
fn prefix_iterator<'a>(
@ -199,7 +201,7 @@ mod cx {
self.inner
.multi_get_cf(keys)
.into_iter()
.map_err(Error::Internal)
.map_err(StoreError::Internal)
.collect()
}
}
@ -222,7 +224,9 @@ mod cx {
cf: &impl AsColumnFamilyRef,
key: impl AsRef<[u8]>,
) -> Result<Option<DBPinnableSlice<'a>>> {
self.inner.get_pinned_cf(cf, key).map_err(Error::Internal)
self.inner
.get_pinned_cf(cf, key)
.map_err(StoreError::Internal)
}
fn prefix_iterator<'a>(
@ -248,14 +252,14 @@ mod cx {
self.inner
.multi_get_cf(keys)
.into_iter()
.map_err(Error::Internal)
.map_err(StoreError::Internal)
.collect()
}
}
impl Write for Transaction<'_> {
fn delete(&self, cf: &impl AsColumnFamilyRef, key: impl AsRef<[u8]>) -> Result<()> {
self.inner.delete_cf(cf, key).map_err(Error::Internal)
self.inner.delete_cf(cf, key).map_err(StoreError::Internal)
}
fn put(
@ -264,7 +268,9 @@ mod cx {
key: impl AsRef<[u8]>,
val: impl AsRef<[u8]>,
) -> Result<()> {
self.inner.put_cf(cf, key, val).map_err(Error::Internal)
self.inner
.put_cf(cf, key, val)
.map_err(StoreError::Internal)
}
}

View file

@ -1,35 +1,23 @@
use std::fmt::{Debug, Display};
use std::{
fmt::{Debug, Display},
str::FromStr,
};
use chrono::{DateTime, Utc};
use ulid::Ulid;
use crate::arrow::{ArrowKind, Basic, Multi};
use crate::StoreError;
/// A unique identifier for vertices in the database.
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Key(pub(crate) [u8; 16]);
impl Display for Key {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(&Ulid::from_bytes(self.0), f)
}
}
impl Debug for Key {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Key({})", Ulid::from_bytes(self.0))
}
}
impl Key {
/// Generate a new node identifier.
pub fn gen() -> Key {
Key(ulid::Ulid::new().to_bytes())
}
pub(crate) fn from_slice(buf: &[u8]) -> Key {
let mut key = [0; 16];
key.copy_from_slice(&buf);
Key(key)
}
/// Get the time at which this key was generated.
pub fn timestamp(self) -> DateTime<Utc> {
let ms = self.to_ulid().timestamp_ms();
DateTime::from_timestamp_millis(ms as i64).unwrap()
@ -41,11 +29,17 @@ impl Key {
buf[16..].copy_from_slice(&other.0);
buf
}
pub(crate) fn from_slice(buf: &[u8]) -> Key {
let mut key = [0; 16];
key.copy_from_slice(&buf);
Key(key)
}
pub(crate) fn split(buf: &[u8]) -> (Key, Key) {
let tail = Key::from_slice(&buf[..16]);
let head = Key::from_slice(&buf[16..]);
(tail, head)
}
// TODO: This doesn't belong here lmao
pub(crate) fn range(ts: DateTime<Utc>) -> ([u8; 16], [u8; 16]) {
let min = Ulid::from_parts(ts.timestamp_millis() as u64, u128::MIN).to_bytes();
let max = Ulid::from_parts(ts.timestamp_millis() as u64, u128::MAX).to_bytes();
@ -61,3 +55,25 @@ impl AsRef<[u8]> for Key {
&self.0
}
}
impl FromStr for Key {
type Err = crate::StoreError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
s.parse::<Ulid>()
.map(|x| Key(x.to_bytes()))
.map_err(|err| StoreError::BadKey(err))
}
}
impl Display for Key {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(&self.to_ulid(), f)
}
}
impl Debug for Key {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Key({})", Ulid::from_bytes(self.0))
}
}

View file

@ -13,9 +13,9 @@
//! There are three interfaces to the store: the read-only [`Store`], the write-only [`Batch`] and the [`Transaction`],
//! which allows both reads and writes.
use std::{cell::RefCell, path::Path, sync::Arc};
use std::{cell::RefCell, future::Future, path::Path, sync::Arc};
use derive_more::From;
use derive_more::{From, Display};
use rocksdb::{Options, TransactionDBOptions, WriteBatchWithTransaction};
use types::Schema;
@ -78,7 +78,7 @@ impl Store {
/// changes are committed.
pub fn run<T, E>(&self, f: impl FnOnce(&Transaction<'_>) -> Result<T, E>) -> Result<T, E>
where
E: From<Error>,
E: From<StoreError>,
{
let tx = Transaction {
inner: self.inner.transaction(),
@ -90,7 +90,7 @@ impl Store {
} else {
tx.inner.commit()
} {
return Err(E::from(Error::Internal(e)));
return Err(E::from(StoreError::Internal(e)));
}
r
}
@ -123,7 +123,7 @@ impl Store {
/// Delete the main data store in `state_dir` if it exists.
pub fn nuke(state_dir: impl AsRef<Path>) -> Result<()> {
Backend::destroy(&Options::default(), state_dir.as_ref().join(STORE_NAME))
.map_err(Error::Internal)
.map_err(StoreError::Internal)
}
/// Open a store that lives until `f` returns, for testing.
pub fn test<T>(schema: Schema, f: impl FnOnce(Store) -> T) -> T {
@ -136,11 +136,11 @@ impl Store {
pub const OK: Result<()> = Ok(());
/// Results from this component.
pub type Result<T, E = Error> = std::result::Result<T, E>;
pub type Result<T, E = StoreError> = std::result::Result<T, E>;
/// Errors from the data store.
#[derive(From, Debug)]
pub enum Error {
#[derive(From, Display, Debug)]
pub enum StoreError {
/// The requested value was expected to exist in a particular keyspace, but does not actually
/// exist there. This can occur on updates for example.
Missing,
@ -151,6 +151,8 @@ pub enum Error {
/// Returned if there is a conflict; for example, if the uniqueness property of an alias would
/// be violated by inserting one.
Conflict,
/// A node key couldn't be decoded.
BadKey(ulid::DecodeError),
/// Signals a failure related to the data store's backend.
Internal(rocksdb::Error),
Encoding(bincode::error::EncodeError),

View file

@ -7,7 +7,7 @@ use super::{
types::{DataType, MixinSpec},
Batch, Store, Transaction,
};
use crate::{util::IterExt as _, Error, Key, Result};
use crate::{util::IterExt as _, Key, Result, StoreError};
/// Mixins are the simplest pieces of data in the store.
pub trait Mixin: DataType<Type = MixinSpec> + Encode + Decode {}
@ -53,6 +53,13 @@ impl Store {
{
op::join_on(self, iter)
}
/// Get multiple mixins associated with the same key.
pub fn get_mixin_many<T>(&self, key: Key) -> Result<T>
where
T: GetMany,
{
T::get(self, key)
}
}
impl Transaction<'_> {
@ -88,7 +95,7 @@ impl Transaction<'_> {
M: Mixin,
{
if op::has_mixin::<M>(self, node)? {
return Err(Error::Conflict);
return Err(StoreError::Conflict);
} else {
op::add_mixin::<M>(self, node, mixin)
}
@ -121,6 +128,13 @@ impl Transaction<'_> {
{
op::join_on(self, iter.into_iter().map_ok(f))
}
/// Get multiple mixins associated with the same key.
pub fn get_mixin_many<T>(&self, key: Key) -> Result<T>
where
T: GetMany,
{
T::get(self, key)
}
}
impl Batch {
@ -136,6 +150,12 @@ impl Batch {
}
}
/// Getting tuples of stuff.
pub trait GetMany: Sized {
#[doc(hidden)]
fn get(cx: &impl crate::internal::Query, key: Key) -> Result<Self>;
}
mod op {
use std::ops::{Bound, RangeBounds};
@ -143,7 +163,64 @@ mod op {
use either::Either;
use super::Mixin;
use crate::{internal::*, util::IterExt as _, Error, Key, Result};
use crate::{internal::*, util::IterExt as _, Key, Result, StoreError};
impl<A, B> super::GetMany for (A, B)
where
A: Mixin,
B: Mixin,
{
fn get(cx: &impl Query, key: Key) -> Result<Self> {
let ksps = [cx.open(A::SPEC.keyspace).cf, cx.open(B::SPEC.keyspace).cf];
let mut vec = cx.multi_get(ksps.iter().map(|c| (&*c, key)));
let b = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let a = vec.pop().ok_or(StoreError::Missing)??.unwrap();
Ok((decode(a)?, decode(b)?))
}
}
impl<A, B, C> super::GetMany for (A, B, C)
where
A: Mixin,
B: Mixin,
C: Mixin,
{
fn get(cx: &impl Query, key: Key) -> Result<Self> {
let ksps = [
cx.open(A::SPEC.keyspace).cf,
cx.open(B::SPEC.keyspace).cf,
cx.open(C::SPEC.keyspace).cf,
];
let mut vec = cx.multi_get(ksps.iter().map(|c| (&*c, key)));
let c = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let b = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let a = vec.pop().ok_or(StoreError::Missing)??.unwrap();
Ok((decode(a)?, decode(b)?, decode(c)?))
}
}
impl<A, B, C, D> super::GetMany for (A, B, C, D)
where
A: Mixin,
B: Mixin,
C: Mixin,
D: Mixin,
{
fn get(cx: &impl Query, key: Key) -> Result<Self> {
let ksps = [
cx.open(A::SPEC.keyspace).cf,
cx.open(B::SPEC.keyspace).cf,
cx.open(C::SPEC.keyspace).cf,
cx.open(D::SPEC.keyspace).cf,
];
let mut vec = cx.multi_get(ksps.iter().map(|c| (&*c, key)));
let d = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let c = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let b = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let a = vec.pop().ok_or(StoreError::Missing)??.unwrap();
Ok((decode(a)?, decode(b)?, decode(c)?, decode(d)?))
}
}
pub fn update<M>(
cx: &(impl Query + Write),
@ -165,7 +242,7 @@ mod op {
// [^1]: https://github.com/facebook/rocksdb/blob/9d37408f9af15c7a1ae42f9b94d06b27d98a011a/include/rocksdb/options.h#L128
let tree = cx.open(M::SPEC.keyspace);
match tree.get(node.as_ref())? {
None => Err(Error::Missing),
None => Err(StoreError::Missing),
Some(buf) => {
let new = decode(buf).map(update).and_then(encode)?;
tree.set(node, new)
@ -237,7 +314,7 @@ mod op {
}
pub(super) fn encode(data: impl bincode::Encode) -> Result<Vec<u8>> {
bincode::encode_to_vec(data, bincode::config::standard()).map_err(Error::Encoding)
bincode::encode_to_vec(data, bincode::config::standard()).map_err(StoreError::Encoding)
}
pub(super) fn decode<T>(data: impl AsRef<[u8]>) -> Result<T>
@ -245,7 +322,7 @@ mod op {
T: bincode::Decode,
{
bincode::decode_from_slice(data.as_ref(), bincode::config::standard())
.map_err(Error::Decoding)
.map_err(StoreError::Decoding)
.map(|(v, _)| v)
}
}

View file

@ -56,6 +56,14 @@ pub trait IterExt: Iterator + Sized {
{
self.filter_map(move |r| r.and_then(|x| f(x)).transpose())
}
/// Like [`Iterator::find`].
fn find_ok<'a, I, E>(mut self, mut f: impl FnMut(&I) -> bool) -> Result<Option<I>, E>
where
Self: Iterator<Item = Result<I, E>> + 'a,
{
self.find(move |r| r.as_ref().is_ok_and(|x| f(x)))
.transpose()
}
}
impl<I> IterExt for I where I: Iterator {}

View file

@ -1,6 +1,6 @@
unstable_features = true
overflow_delimited_expr = true
group_imports = "StdExternalCrate"
use_field_init_shorthand = true
reorder_modules = false
reorder_imports = false
struct_lit_width = 30