god forsaken http signatures

This commit is contained in:
Riley Apeldoorn 2024-04-27 22:01:28 +02:00
parent bb26926edb
commit b91da3c4ab
20 changed files with 1606 additions and 550 deletions

241
Cargo.lock generated
View file

@ -188,6 +188,12 @@ version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51"
[[package]]
name = "base64ct"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
[[package]] [[package]]
name = "bincode" name = "bincode"
version = "2.0.0-rc.3" version = "2.0.0-rc.3"
@ -239,12 +245,27 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.16.0" version = "3.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]] [[package]]
name = "bytes" name = "bytes"
version = "1.6.0" version = "1.6.0"
@ -381,6 +402,12 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "const-oid"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]] [[package]]
name = "convert_case" name = "convert_case"
version = "0.4.0" version = "0.4.0"
@ -403,6 +430,25 @@ version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f"
[[package]]
name = "cpufeatures"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
dependencies = [
"libc",
]
[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
]
[[package]] [[package]]
name = "csv" name = "csv"
version = "1.3.0" version = "1.3.0"
@ -424,6 +470,17 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "der"
version = "0.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0"
dependencies = [
"const-oid",
"pem-rfc7468",
"zeroize",
]
[[package]] [[package]]
name = "derive_more" name = "derive_more"
version = "0.99.17" version = "0.99.17"
@ -437,6 +494,17 @@ dependencies = [
"syn 1.0.109", "syn 1.0.109",
] ]
[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"const-oid",
"crypto-common",
]
[[package]] [[package]]
name = "either" name = "either"
version = "1.11.0" version = "1.11.0"
@ -478,8 +546,12 @@ checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984"
name = "fetch" name = "fetch"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"base64 0.22.0",
"chrono",
"derive_more", "derive_more",
"http",
"reqwest", "reqwest",
"rsa",
"serde_json", "serde_json",
"sigh", "sigh",
] ]
@ -553,6 +625,16 @@ dependencies = [
"pin-utils", "pin-utils",
] ]
[[package]]
name = "generic-array"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
]
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.2.14" version = "0.2.14"
@ -805,6 +887,9 @@ name = "lazy_static"
version = "1.4.0" version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
dependencies = [
"spin",
]
[[package]] [[package]]
name = "lazycell" name = "lazycell"
@ -828,6 +913,12 @@ dependencies = [
"windows-targets 0.52.5", "windows-targets 0.52.5",
] ]
[[package]]
name = "libm"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
[[package]] [[package]]
name = "librocksdb-sys" name = "librocksdb-sys"
version = "0.17.0+9.0.0" version = "0.17.0+9.0.0"
@ -968,6 +1059,43 @@ dependencies = [
"minimal-lexical", "minimal-lexical",
] ]
[[package]]
name = "num-bigint-dig"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151"
dependencies = [
"byteorder",
"lazy_static",
"libm",
"num-integer",
"num-iter",
"num-traits",
"rand",
"smallvec",
"zeroize",
]
[[package]]
name = "num-integer"
version = "0.1.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
dependencies = [
"num-traits",
]
[[package]]
name = "num-iter"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d869c01cc0c455284163fd0092f1f93835385ccab5a98a0dcc497b2f8bf055a9"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.18" version = "0.2.18"
@ -975,6 +1103,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"libm",
] ]
[[package]] [[package]]
@ -1069,6 +1198,15 @@ dependencies = [
"windows-targets 0.48.5", "windows-targets 0.48.5",
] ]
[[package]]
name = "pem-rfc7468"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412"
dependencies = [
"base64ct",
]
[[package]] [[package]]
name = "percent-encoding" name = "percent-encoding"
version = "2.3.1" version = "2.3.1"
@ -1107,6 +1245,27 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "pkcs1"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f"
dependencies = [
"der",
"pkcs8",
"spki",
]
[[package]]
name = "pkcs8"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
dependencies = [
"der",
"spki",
]
[[package]] [[package]]
name = "pkg-config" name = "pkg-config"
version = "0.3.30" version = "0.3.30"
@ -1277,6 +1436,27 @@ dependencies = [
"librocksdb-sys", "librocksdb-sys",
] ]
[[package]]
name = "rsa"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc"
dependencies = [
"const-oid",
"digest",
"num-bigint-dig",
"num-integer",
"num-traits",
"pkcs1",
"pkcs8",
"rand_core",
"sha2",
"signature",
"spki",
"subtle",
"zeroize",
]
[[package]] [[package]]
name = "rustc-demangle" name = "rustc-demangle"
version = "0.1.23" version = "0.1.23"
@ -1446,6 +1626,17 @@ dependencies = [
"tokio", "tokio",
] ]
[[package]]
name = "sha2"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]] [[package]]
name = "shlex" name = "shlex"
version = "1.3.0" version = "1.3.0"
@ -1474,6 +1665,16 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "signature"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
dependencies = [
"digest",
"rand_core",
]
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.4.9" version = "0.4.9"
@ -1499,6 +1700,22 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "spin"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "spki"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
dependencies = [
"base64ct",
"der",
]
[[package]] [[package]]
name = "store" name = "store"
version = "0.0.0" version = "0.0.0"
@ -1519,6 +1736,12 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "subtle"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.109" version = "1.0.109"
@ -1738,6 +1961,12 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "typenum"
version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]] [[package]]
name = "ulid" name = "ulid"
version = "1.1.2" version = "1.1.2"
@ -1799,6 +2028,12 @@ version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]] [[package]]
name = "virtue" name = "virtue"
version = "0.0.13" version = "0.0.13"
@ -2095,6 +2330,12 @@ dependencies = [
"windows-sys 0.48.0", "windows-sys 0.48.0",
] ]
[[package]]
name = "zeroize"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
[[package]] [[package]]
name = "zstd-sys" name = "zstd-sys"
version = "2.0.10+zstd.1.5.6" version = "2.0.10+zstd.1.5.6"

View file

@ -1,38 +1,38 @@
#![feature(iterator_try_collect)]
use puppy::{ use puppy::{
mixin_ap_actor, actor::Actor,
model::{schema, Bite, FollowRequest, Follows, Profile, Username}, data::{Bite, Profile},
post::Author, post::Author,
store::{self, Error, OK}, store::util::IterExt as _,
Key, Store, Context,
}; };
fn main() -> store::Result<()> { fn main() -> puppy::Result<()> {
// Store::nuke(".state")?; // puppy::store::Store::nuke(".state")?;
let db = Store::open(".state", schema())?; puppy::context(|cx| {
let db = cx.store();
println!("creating actors"); println!("creating actors");
let riley = get_or_create_actor(&db, "riley")?; let riley = get_or_create_actor(&cx, "riley")?;
let linen = get_or_create_actor(&db, "linen")?; let linen = get_or_create_actor(&cx, "linen")?;
// db.run(|tx| {
// mixin_ap_actor(tx, riley, "test.pup.riley.lgbt")?;
// mixin_ap_actor(tx, linen, "test.pup.riley.lgbt")?;
// OK
// })?;
if true { if true {
println!("creating posts"); println!("creating posts");
puppy::post::create_post(&db, riley, "@linen <3")?; puppy::post::create_post(&cx, riley.key, "@linen <3")?;
puppy::post::create_post(&db, linen, "@riley <3")?; puppy::post::create_post(&cx, linen.key, "@riley <3")?;
} }
if true { if true {
println!("making riley follow linen"); println!("making riley follow linen");
if !db.exists::<Follows>(riley, linen)? { if !riley.follows(&cx, &linen)? {
println!("follow relation does not exist yet"); println!("follow relation does not exist yet");
if !db.exists::<FollowRequest>(riley, linen)? { if let Some(req) = linen
println!("no pending follow request; creating"); .pending_requests(&cx)
puppy::follows::request(&db, riley, linen)?; .find_ok(|r| r.origin == riley.key)?
} else { {
println!("accepting the pending follow request"); println!("accepting the pending follow request");
puppy::follows::accept(&db, riley, linen)?; linen.do_accept_request(&cx, req)?;
} else {
println!("no pending follow request; creating");
riley.do_follow_request(&cx, &linen)?;
} }
} else { } else {
println!("riley already follows linen"); println!("riley already follows linen");
@ -43,47 +43,47 @@ fn main() -> store::Result<()> {
for post in puppy::post::fetch_timeline(&db, .., None)?.posts() { for post in puppy::post::fetch_timeline(&db, .., None)?.posts() {
let Author { ref handle, .. } = post.author; let Author { ref handle, .. } = post.author;
let content = post.content.content.as_ref().unwrap(); let content = post.content.content.as_ref().unwrap();
println!("- {} by {handle}:\n{content}", post.id) println!("- {:?} by {handle}:\n{content}", post.id)
} }
println!("\nLinen's followers:"); println!("\nLinen's followers:");
for id in puppy::follows::followers_of(&db, linen)? { for id in linen.followers(&cx).try_collect::<Vec<_>>()? {
let Profile { account_name, .. } = db.get_mixin(id)?.unwrap(); let Profile { account_name, .. } = db.get_mixin(id)?.unwrap();
println!("- @{account_name} ({id})"); println!("- @{account_name} ({id})");
} }
println!("\nRiley's following:"); println!("\nRiley's following:");
for id in puppy::follows::following_of(&db, riley)? { for id in riley.following(&cx).try_collect::<Vec<_>>()? {
let Profile { account_name, .. } = db.get_mixin(id)?.unwrap(); let Profile { account_name, .. } = db.get_mixin(id)?.unwrap();
println!("- @{account_name} ({id})"); println!("- @{account_name} ({id})");
} }
if false { if false {
println!("Biting riley"); println!("Biting riley");
puppy::bites::bite_actor(&db, linen, riley).unwrap(); linen.do_bite(&cx, &riley)?;
for Bite { id, biter, .. } in puppy::bites::bites_on(&db, riley).unwrap() { for Bite { id, biter, .. } in riley.bites_suffered(&cx).try_collect::<Vec<_>>()? {
let Profile { account_name, .. } = db.get_mixin(biter)?.unwrap(); let Profile { account_name, .. } = db.get_mixin(biter)?.unwrap();
println!("riley was bitten by @{account_name} at {}", id.timestamp()); println!("riley was bitten by @{account_name} at {}", id.timestamp());
} }
} }
store::OK Ok(())
})
} }
fn get_or_create_actor(db: &Store, username: &str) -> Result<Key, Error> { fn get_or_create_actor(cx: &Context<'_>, username: &str) -> puppy::Result<Actor> {
let user = db.lookup(Username(username.to_string())); let user = Actor::by_username(cx, username)?;
match user { match user {
Ok(Some(key)) => { Some(key) => {
println!("found '{username}' ({key})"); println!("found '{username}' ({key:?})");
Ok(key) Ok(key)
} }
Ok(None) => { None => {
println!("'{username}' doesn't exist yet, creating"); println!("'{username}' doesn't exist yet, creating");
let r = puppy::create_local_actor(&db, username, "test.pup.riley.lgbt"); let r = puppy::actor::create_local(cx, username);
if let Ok(ref key) = r { if let Ok(ref key) = r {
println!("created '{username}' with key {key}"); println!("created '{username}' with key {key:?}");
} }
r r
} }
Err(e) => Err(e),
} }
} }

View file

@ -1,16 +1,78 @@
use axum::{extract::Path, routing::get, Json, Router}; #![feature(try_blocks)]
use puppy::{get_local_ap_object, model::schema, Key, Store}; use std::collections::HashMap;
use axum::{
extract::{Path, Query},
response::{AppendHeaders, IntoResponse as _},
routing::get,
Json, Router,
};
use puppy::{
actor::Actor,
context,
data::{PrivateKey, PublicKey},
get_local_ap_object, Key,
};
use serde_json::json;
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let db = Store::open(".state", schema()).unwrap(); let app = Router::new()
let app = Router::new().route( .route(
"/o/:ulid", "/o/:ulid",
get(|Path(raw_object_id): Path<String>| async move { get(
|Path(raw_object_id): Path<String>, req: axum::extract::Request| async move {
eprintln!("req: {req:?}");
context::<_, puppy::Error>(|cx| try {
let object_id = raw_object_id.parse::<Key>().unwrap(); let object_id = raw_object_id.parse::<Key>().unwrap();
let obj = get_local_ap_object(&db, object_id).unwrap().to_json_ld(); let obj = get_local_ap_object(&cx, object_id).unwrap().to_json_ld();
Json(obj) (
AppendHeaders([("content-type", "application/activity+json")]),
Json(obj).into_response(),
)
})
.unwrap()
},
),
)
.route(
"/proxy",
get(|Query(q): Query<HashMap<String, String>>| async move {
let (key_pem, key_id) = context::<_, puppy::Error>(|cx| try {
let actor = Actor::by_username(&cx, "riley")?.unwrap();
let (private, public) = cx
.store()
.get_mixin_many::<(PrivateKey, PublicKey)>(actor.key)?;
(private.key_pem, public.key_id)
})
.unwrap();
puppy::fetch::resolve(&key_pem, &key_id, &q["target"])
.await
.unwrap()
}), }),
)
.route(
"/.well-known/webfinger",
get(
|Query(q): Query<HashMap<String, String>>, req: axum::extract::Request| async move {
eprintln!("req: {req:?}");
let Some(rest) = q["resource"].strip_prefix("acct:") else {
panic!("{q:?}");
};
if rest == "riley@test.piss-on.me" {
Json(json!({
"subject": "acct:riley@test.piss-on.me",
"links": [{
"rel": "self",
"type": "application/activity+json",
"href": "https://test.piss-on.me/o/01HWG4BQJR23TWF12KVPBBP1HG",
}],
}))
} else {
panic!("{rest:?}")
}
},
),
); );
let sock = tokio::net::TcpListener::bind("0.0.0.0:1312").await.unwrap(); let sock = tokio::net::TcpListener::bind("0.0.0.0:1312").await.unwrap();
axum::serve(sock, app).await.unwrap(); axum::serve(sock, app).await.unwrap();

View file

@ -10,3 +10,7 @@ reqwest = "*"
sigh = "*" sigh = "*"
serde_json = "*" serde_json = "*"
derive_more = "*" derive_more = "*"
http = "*"
chrono = "*"
base64 = "*"
rsa = { version = "*", features = ["sha2"] }

View file

@ -1,36 +1,112 @@
#![feature(iter_intersperse)]
use chrono::Utc;
use derive_more::{Display, From, Into}; use derive_more::{Display, From, Into};
use reqwest::Body;
use serde_json::{json, Value}; use serde_json::{json, Value};
use sigh::{
alg::{Hs2019, RsaSha256},
Key as _, SigningConfig,
};
use crate::keys::{HS2019, RSA_SHA256};
pub enum Object { pub enum Object {
Activity(Activity), Activity(Activity),
Actor(Actor), Actor(Actor),
Object { id: Id }, Object {
id: Id,
kind: String,
content: Option<String>,
summary: Option<String>,
},
} }
impl Object { impl Object {
pub fn to_json(self) -> Value { pub fn id(&self) -> &Id {
match self { match self {
Object::Activity(_) => todo!(), Object::Activity(a) => &a.id,
Object::Actor(a) => json!({ Object::Actor(a) => &a.id,
"id": a.id.to_string(), Object::Object { id, .. } => id,
"inbox": a.inbox.to_string(),
"preferredUsername": a.account_name,
"name": a.display_name,
}),
Object::Object { id } => json!({
"id": id.to_string()
}),
} }
} }
pub fn to_json_ld(self) -> Value { pub fn to_json_ld(&self) -> Value {
let mut json = self.to_json(); match self {
json["@context"] = json!([]); Object::Activity(a) => a.to_json_ld(),
json Object::Actor(a) => a.to_json_ld(),
Object::Object { id, kind, content, summary } => json!({
"id": id.to_string(),
"type": kind,
"content": content,
"summary": summary,
}),
}
} }
} }
pub struct Activity { pub struct Activity<T = String> {
pub id: Id, pub id: Id,
pub actor: Id,
pub object: Box<Object>,
pub kind: T,
}
impl<K> Activity<K>
where
K: ToString,
{
pub fn to_json_ld(&self) -> Value {
json!({
"@context": [
"https://www.w3.org/ns/activitystreams",
{ "Bite": "https://ns.mia.jetzt/as#Bite" },
],
"id": self.id.to_string(),
"actor": self.actor.to_string(),
"type": self.kind.to_string(),
"object": self.object.to_json_ld()
})
}
}
/// Deliver an [`Activity`] to a particular `inbox`.
pub async fn deliver(private_key_pem: &str, key_id: &str, activity: Activity, inbox: &str) -> () {
let body = serde_json::to_string_pretty(&activity.to_json_ld()).unwrap();
let mut req = http::Request::post(inbox)
.header("content-type", "application/activity+json")
.header("user-agent", "ActivityPuppy/0.0.0 (delivery)")
.header("date", Utc::now().to_rfc3339())
.body(body)
.unwrap();
let key = sigh::PrivateKey::from_pem(private_key_pem.as_bytes()).unwrap();
SigningConfig::new(RsaSha256, &key, key_id)
.sign(&mut req)
.unwrap();
reqwest::Client::new()
.execute(req.try_into().unwrap())
.await
.unwrap();
}
pub async fn resolve(private_key_pem: &str, key_id: &str, target: &str) -> reqwest::Result<String> {
const EMPTY_DIGEST: &str = "sha-256=47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=";
// Sun, 06 Nov 1994 08:49:37 GMT
const RFC_822: &str = "%a, %d %b %Y %H:%M:%S GMT";
let date = Utc::now().format(RFC_822).to_string();
let mut req = http::Request::get(target)
.header("accept", "application/json")
.header("user-agent", "ActivityPuppy/0.0.0 (resolver)")
.header("date", date)
// Empty body
.body(Body::default())
.unwrap();
// hs2019 works with masto
keys::sign(&mut req, HS2019, private_key_pem, key_id).unwrap();
reqwest::Client::new()
.execute(req.try_into().unwrap())
.await?
.text()
.await
} }
/// An actor is an entity capable of producing Takes. /// An actor is an entity capable of producing Takes.
@ -43,7 +119,238 @@ pub struct Actor {
pub account_name: String, pub account_name: String,
/// Note: this maps to the `name` property. /// Note: this maps to the `name` property.
pub display_name: Option<String>, pub display_name: Option<String>,
/// Public counterpart to the signing key used to sign activities
/// generated by the actor.
pub public_key: PublicKey,
}
impl Actor {
pub fn to_json_ld(&self) -> Value {
json!({
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
],
"id": self.id.to_string(),
"inbox": self.inbox.to_string(),
"outbox": self.inbox.to_string().replace("inbox", "outbox"),
"preferredUsername": self.account_name,
"name": self.display_name,
"type": "Person",
"publicKey": {
"id": self.public_key.key_id.to_string(),
"publicKeyPem": self.public_key.public_key_pem,
"owner": self.id.to_string(),
}
})
}
}
pub struct PublicKey {
pub key_id: Id,
pub owner: Id,
pub public_key_pem: String,
} }
#[derive(Display, From, Into, Debug, Clone)] #[derive(Display, From, Into, Debug, Clone)]
pub struct Id(String); pub struct Id(String);
pub mod keys {
//! Cryptography and such.
use chrono::{TimeDelta, Utc};
use http::{HeaderValue, Method, Request};
use reqwest::Body;
use rsa::{pkcs1v15::SigningKey, pkcs8::DecodePrivateKey as _, sha2};
use sigh::{
Key as _,
alg::{Algorithm as _, RsaSha256},
};
pub struct RawPrivateKey {
inner: sigh::PrivateKey,
}
impl RawPrivateKey {
pub fn to_pem(&self) -> String {
self.inner.to_pem().unwrap()
}
}
pub struct RawPublicKey {
inner: sigh::PublicKey,
}
impl RawPublicKey {
pub fn to_pem(&self) -> String {
self.inner.to_pem().unwrap()
}
}
/// Generate a private and public keypair.
pub fn gen_keypair() -> (RawPrivateKey, RawPublicKey) {
let (private, public) = RsaSha256.generate_keys().unwrap();
(RawPrivateKey { inner: private }, RawPublicKey {
inner: public,
})
}
/// The algorithm to sign with.
#[derive(PartialEq, Debug)]
pub struct Algorithm(&'static str);
pub const RSA_SHA256: Algorithm = Algorithm("rsa-sha256");
pub const HS2019: Algorithm = Algorithm("hs2019");
struct Signature<'k> {
key_id: &'k str,
alg: Algorithm,
components: Vec<Component>,
created: String,
expires: String,
/// Base64-encoded signature.
signature_encoded: String,
}
/// Sign `req`.
pub fn sign(
req: &mut Request<Body>,
alg: Algorithm,
private_key: &str,
key_id: &str,
) -> Result<(), String> {
// NOTE: Rough translation of https://nest.pijul.org/ez/ActivityPuppy:main/XXPS2UOWSWD2Y.ZJAAA
let pieces = gather_pieces(&req)?;
let signing_string = make_signing_string(&pieces);
let signature = create(&signing_string, alg, private_key, key_id, pieces)?;
req.headers_mut().insert("signature", render(signature));
Ok(())
}
/// Gather all the bits from a `Request`.
fn gather_pieces(req: &Request<Body>) -> Result<Vec<Component>, &'static str> {
let target = {
let method = req.method().as_str().to_lowercase();
let path = req.uri().path();
format!("{method} {path}")
};
let created = Utc::now();
let expires = created + TimeDelta::minutes(5);
let mut components = vec![
("(request-target)", target),
("(created)", created.timestamp().to_string()),
("(expires)", expires.timestamp().to_string()),
("host", req.uri().host().unwrap().to_owned()),
];
if [Method::POST, Method::PUT, Method::PATCH].contains(req.method()) {
let digest = req
.headers()
.get("digest")
.map(|v| v.to_str().unwrap().to_string())
.ok_or("digest header is required for POST, PUT and PATCH requests")?;
components.push(("digest", digest));
}
Ok(components)
}
fn make_signing_string(pieces: &[Component]) -> String {
pieces
.iter()
.map(|(k, v)| format!("{k}: {v}"))
.intersperse("\n".to_string())
.collect()
}
type Component = (&'static str, String);
/// Sign the `signing_string`.
fn create<'s>(
signing_string: &'s str,
alg: Algorithm,
key_pem: &str,
key_url: &'s str,
components: Vec<Component>,
) -> Result<Signature<'s>, String> {
let created = components
.iter()
.find_map(|(k, v)| if *k == "(created)" { Some(v) } else { None })
.cloned()
.unwrap();
let expires = components
.iter()
.find_map(|(k, v)| if *k == "(expires)" { Some(v) } else { None })
.cloned()
.unwrap();
// We regardless of the algorithm, we produce RSA-SHA256 signatures, because this is broadly compatible
// with everything.
let signature = sign_rsa_sha256(signing_string, key_pem)?;
let encoded = base64(signature);
Ok(Signature {
signature_encoded: encoded,
key_id: key_url,
components,
created,
expires,
alg,
})
}
/// `rsa-sha256` is created using an rsa key and a sha256 hash.
fn sign_rsa_sha256(signing_string: &str, key_pem: &str) -> Result<Vec<u8>, String> {
use rsa::{
signature::{Signer as _, SignatureEncoding as _},
RsaPrivateKey,
};
let rsa = RsaPrivateKey::from_pkcs8_pem(key_pem).map_err(|e| e.to_string())?;
let key = SigningKey::<sha2::Sha256>::new(rsa);
let buf = key.sign(signing_string.as_bytes()).to_vec();
Ok(buf)
}
fn base64(buf: Vec<u8>) -> String {
use base64::Engine as _;
// STANDARD works on masto, url safe does not.
base64::prelude::BASE64_STANDARD.encode(buf)
}
/// Format the signature.
fn render(sig: Signature<'_>) -> HeaderValue {
let headers = sig
.components
.iter()
// We only need the header names.
.map(|(name, _)| name.as_ref())
// Names need to be space-separated.
.intersperse(" ")
// Equivalent to `fold ["a", "b"]` in haskell
.collect::<String>();
#[rustfmt::skip]
let data = [
("keyId", sig.key_id),
("created", &sig.created),
("expires", &sig.expires),
("algorithm", sig.alg.0),
("headers", &headers),
("signature", &sig.signature_encoded),
];
// Ok, now let's put it all together.
data.into_iter()
// Step 1: all the values need to be surrounded by quotes
.map(|(k, v)| (k, format!(r#""{v}""#)))
// Step 2. join each pair together
.map(|(k, v)| format!("{k}={v}"))
// Step 3. comma separate everything
.intersperse(", ".to_string())
// Step 4. fold the entire thing into one
.collect::<String>()
// Then, it needs to become a header value
.try_into()
.expect("signature formatting should give a correct header value")
}
}

6
lib/puppy/clippy.toml Normal file
View file

@ -0,0 +1,6 @@
disallowed-methods = [
{ path = "puppy::context", reason = "for external use only (interferes with `test_context`)" }
]
disallowed-types = [
{ path = "store::Result", reason = "generates confusing docs; parameterize puppy::Result by puppy::StoreError instead." }
]

View file

@ -1,28 +0,0 @@
//! The most essential feature of any social network.
use store::{Arrow, Key, Store};
/// *Bites you*
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct Bite {
#[identity]
pub id: Key,
#[origin]
pub biter: Key,
#[target]
pub victim: Key,
}
pub fn bite_actor(db: &Store, biter: Key, victim: Key) -> store::Result<Key> {
db.run(|tx| {
let id = Key::gen();
// TODO: Create an `Activity` arrow with the same ID.
tx.create(Bite { id, biter, victim })?;
Ok(id)
})
}
/// Who has bitten `victim`?
pub fn bites_on(db: &Store, victim: Key) -> store::Result<Vec<Bite>> {
db.incoming::<Bite>(victim).try_collect()
}

70
lib/puppy/src/context.rs Normal file
View file

@ -0,0 +1,70 @@
use std::sync::OnceLock;
use store::{Key, Store, Transaction};
use crate::{config::Config, data::schema, Error, Result};
/// The context of a running ActivityPuppy.
///
/// This type provides access to the data store and configuration.
pub struct Context<'c> {
pub(crate) config: &'c Config,
pub(crate) db: Store,
}
impl Context<'_> {
/// Do a data store [transaction][store::Transaction].
pub fn run<T>(&self, f: impl FnOnce(&Transaction<'_>) -> Result<T>) -> Result<T> {
self.db.run(f)
}
/// Access the store directly.
pub fn store(&self) -> &Store {
&self.db
}
/// Access the configuration.
pub fn config(&self) -> &Config {
self.config
}
/// Create an ActivityPub object ID from a key.
pub fn mk_url(&self, key: Key) -> String {
format!("https://{}/o/{key}", self.config.ap_domain)
}
}
/// The store, which we initialize only once this way.
///
/// This makes it so we don't have to thread everything down everywhere, we can just access the context
/// when we need it.
static STORE: OnceLock<Store> = OnceLock::new();
/// Load the puppy [`Context`] from anywhere!
///
/// This gives you access to the data store and the configuration, without having to thread it through every place.
// WARNING: don't use this within this crate. there's a clippy lint.
pub fn context<T, E>(f: impl FnOnce(Context<'_>) -> Result<T, E>) -> Result<T, E>
where
E: From<Error>,
{
let cfg = Config {
ap_domain: String::from("test.piss-on.me"),
wf_domain: String::from("test.piss-on.me"),
};
let db = STORE
.get_or_try_init(|| Store::open(".state", schema()))
.map_err(Error::Store)?
.clone();
f(Context { db, config: &cfg })
}
#[cfg(test)]
use store::types::Schema;
/// Load a context for running tests in.
#[cfg(test)]
pub fn test_context<T>(
config: Config,
schema: Schema,
test: impl FnOnce(Context<'_>) -> Result<T>,
) -> Result<T> {
Store::test(schema, |db| test(Context { config: &config, db }))
}

215
lib/puppy/src/data.rs Normal file
View file

@ -0,0 +1,215 @@
//! Datas for the data store!
//!
//! This module contains the definitions for the data store.
use bincode::{Decode, Encode};
use derive_more::Display;
use store::{types::Schema, Alias, Arrow, Key, Mixin};
/// *Bites you*
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct Bite {
#[identity]
pub id: Key,
#[origin]
pub biter: Key,
#[target]
pub victim: Key,
}
/// Properties of ActivityPub objects.
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct Object {
/// The ActivityPub id of the object.
#[index]
pub id: Id,
/// What kind of object is it?
pub kind: ObjectKind,
/// Whether or not the object resides on this server or on another one.
pub local: bool,
}
/// Allows case analysis on the type of ActivityPub objects.
#[derive(Encode, Decode, Debug, Clone)]
pub enum ObjectKind {
Actor,
Activity(ActivityKind),
Notelike(String),
}
/// The type of an activity.
#[derive(Encode, Decode, Debug, Clone)]
pub enum ActivityKind {
/// Used for posting stuff!
Create = 0,
/// Represents a follow request.
Follow = 1,
/// Used to signal that a follow request was accepted.
Accept = 2,
/// Used to reject a follow request.
Reject = 3,
/// See [`bites`](crate::bites).
Bite = 4,
}
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct Channel {
pub inbox: String,
// TODO: add public key here
}
/// A predicate; `follower` "follows" `followed`.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct Follows {
#[origin]
pub follower: Key,
#[target]
pub followed: Key,
}
/// An instance of a request from some `origin` user to follow a `target` user.
///
/// This should not be used to determine whether two actors are following each other. For that, use
/// [`Follows`], a basic arrow for exactly this purpose. *This* arrow is used to identify specific
/// instances of *requests*, and serves mostly as a historical reference and for synchronizing with
/// other servers.
///
/// Used to represent a `Follow` activity.
///
/// Mixins always present for the `id`:
///
/// - [`Status`], carrying the status of the request.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct FollowRequest {
/// The unique ID of this particular request.
#[identity]
pub id: Key,
/// The "follower", the user that made the request.
pub origin: Key,
/// The one the request is made to.
pub target: Key,
}
/// The status of a [`FollowRequest`].
///
/// Valid state transitions:
///
/// ```text
/// ┌──────────────▶ Rejected
/// │
/// │
/// │
///
/// None ─────────▶ Pending ────────▶ Accepted
///
/// │ │
/// │ │
/// │ │
/// ▼ │
/// Withdrawn ◀────────────┘
/// ```
///
/// In addition, a follow request will be deleted if either endpoint is removed from the graph.
#[derive(Mixin, Encode, Decode, Eq, PartialEq, Clone)]
pub enum Status {
/// The follow request was previously pending or accepted, but since withdrawn.
///
/// This can happen when someone cancels their follow request or unfollows the target.
Withdrawn,
/// The follow request was accepted.
Accepted,
/// The follow request was denied.
Rejected,
/// The follow request is still under review.
Pending,
}
/// An ActivityPub ID, used to look up remote objects by their canonical URL.
#[derive(Alias, Encode, Decode, Clone, PartialEq, Eq, Debug, Hash, Display)]
pub struct Id(pub String);
/// A "profile" in the social media sense.
///
/// Contains all presentation information about someone making posts.
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct Profile {
/// How many posts has this user made?
pub post_count: usize,
/// The name used for the profile's handle.
#[index] // <- currently doesnt do anything but i have an idea
pub account_name: Username,
/// The name displayed above their posts.
pub display_name: Option<String>,
/// The "bio", a freeform "about me" field.
pub about_string: Option<String>,
/// Arbitrary custom metadata fields.
pub about_fields: Vec<(String, String)>,
}
/// A unique name for an actor that is part of their "handle".
#[derive(Alias, Encode, Decode, Clone, PartialEq, Eq, Debug, Hash, Display)]
pub struct Username(pub String);
/// The relation that `author` has constructed and published `object`.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct AuthorOf {
#[origin]
pub author: Key,
#[target]
pub object: Key,
}
/// The contents of a post.
#[derive(Mixin, Encode, Decode, Debug, Clone, Default)]
pub struct Content {
/// Main post body.
pub content: Option<String>,
/// Content warning for the post.
pub warning: Option<String>,
}
/// A public key used for verifying requests.
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct PublicKey {
pub key_id: String,
pub key_pem: String,
}
/// A private key for signing requests with.
#[derive(Mixin, Encode, Decode, Clone)]
pub struct PrivateKey {
pub key_pem: String,
}
/// Represents a `Create` activity.
#[derive(Arrow)]
pub struct Create {
#[identity]
pub id: Key,
#[origin]
pub actor: Key,
#[target]
pub object: Key,
}
/// Construct the schema.
pub fn schema() -> Schema {
Schema::new()
// Mixins
.has::<Profile>()
.has::<Content>()
.has::<Status>()
.has::<Object>()
.has::<Channel>()
.has::<PrivateKey>()
.has::<PublicKey>()
// Aliases
.has::<Username>()
.has::<Id>()
// Arrows
.has::<Bite>()
.has::<FollowRequest>()
.has::<AuthorOf>()
.has::<Follows>()
.has::<Create>()
}

View file

@ -1,234 +0,0 @@
//! Follow requests and related stuff.
use bincode::{Decode, Encode};
use store::{util::IterExt, Arrow, Error, Key, Mixin, Store, OK};
/// A predicate; `follower` "follows" `followed`.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct Follows {
#[origin]
pub follower: Key,
#[target]
pub followed: Key,
}
/// An instance of a request from some `origin` user to follow a `target` user.
///
/// This should not be used to determine whether two actors are following each other. For that, use
/// [`Follows`], a basic arrow for exactly this purpose. *This* arrow is used to identify specific
/// instances of *requests*, and serves mostly as a historical reference and for synchronizing with
/// other servers.
///
/// Mixins always present for the `id`:
///
/// - [`Status`], carrying the status of the request.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct FollowRequest {
/// The unique ID of this particular request.
#[identity]
pub id: Key,
/// The "follower", the user that made the request.
pub origin: Key,
/// The one the request is made to.
pub target: Key,
}
impl FollowRequest {
/// Determine if this follow request is pending.
pub fn is_pending(&self, db: &Store) -> store::Result<bool> {
// The status is stored as a mixin, so we need to get it.
let Some(st) = db.get_mixin::<Status>(self.id)? else {
// If we don't have a status for a follow request, something is borked.
return Err(Error::Missing);
};
// If the status of the follow request is pending, it can't also be true that the follows
// relation already exists.
debug_assert! {
!(st == Status::Pending)
|| db.exists::<Follows>(self.origin, self.target).map(|x| !x)?,
"fr.is_pending -> !(fr.origin follows fr.target)"
};
Ok(st == Status::Pending)
}
}
/// The status of a [`FollowRequest`].
///
/// Valid state transitions:
///
/// ```text
/// ┌──────────────▶ Rejected
/// │
/// │
/// │
///
/// None ─────────▶ Pending ────────▶ Accepted
///
/// │ │
/// │ │
/// │ │
/// ▼ │
/// Withdrawn ◀────────────┘
/// ```
///
/// In addition, a follow request will be deleted if either endpoint is removed from the graph.
#[derive(Mixin, Encode, Decode, Eq, PartialEq, Clone)]
pub enum Status {
/// The follow request was previously pending or accepted, but since withdrawn.
///
/// This can happen when someone cancels their follow request or unfollows the target.
Withdrawn,
/// The follow request was accepted.
Accepted,
/// The follow request was denied.
Rejected,
/// The follow request is still under review.
Pending,
}
/// Request to follow another actor.
pub fn request(db: &Store, requester: Key, target: Key) -> store::Result<FollowRequest> {
db.run(|tx| {
let req = FollowRequest {
id: Key::gen(),
origin: requester,
target,
};
tx.create(req)?;
tx.add_mixin(req.id, Status::Pending)?;
Ok(req)
})
}
/// Accept the open follow request from `requester` to `target`, if one exists.
pub fn accept(db: &Store, requester: Key, target: Key) -> store::Result<()> {
db.run(|tx| {
// TODO: This logic is a little broken but it'll do for now. i'll fix it later.
let fr = tx
.between::<FollowRequest>(requester, target)
.filter(|fr| fr.as_ref().is_ok_and(|f| f.target == target))
// We'll want the latest one, because that one was inserted last so it'll be the most
// recent
.last()
.ok_or_else(|| Error::Missing)??;
// Only apply the update if the last follow request is still in a pending state.
if let Some(Status::Pending) = db.get_mixin(fr.id)? {
tx.update(fr.id, |_| Status::Accepted)?;
tx.create(Follows {
follower: requester,
followed: target,
})?;
}
OK
})
}
pub fn reject(db: &Store, request: Key) -> store::Result<()> {
db.run(|tx| {
tx.update(request, |_| Status::Rejected)?;
OK
})
}
/// List all pending follow requests for a user.
pub fn list_pending(db: &Store, target: Key) -> store::Result<Vec<FollowRequest>> {
db.incoming::<FollowRequest>(target)
.filter_bind_results(|req| Ok(if req.is_pending(db)? { Some(req) } else { None }))
.collect()
}
/// Get all actors followed by `actor`.
pub fn following_of(db: &Store, actor: Key) -> store::Result<Vec<Key>> {
db.outgoing::<Follows>(actor)
.map_ok(|a| a.followed)
.collect()
}
/// Get all actors following `actor`.
pub fn followers_of(db: &Store, actor: Key) -> store::Result<Vec<Key>> {
db.incoming::<Follows>(actor)
.map_ok(|a| a.follower)
.collect()
}
#[cfg(test)]
mod tests {
use store::{Key, Store, OK};
use crate::{
create_actor,
model::{schema, FollowRequest, Follows},
};
fn make_test_actors(db: &Store) -> store::Result<(Key, Key)> {
let alice = create_actor(&db, "alice")?;
let bob = create_actor(&db, "bob")?;
eprintln!("alice={alice}, bob={bob}");
Ok((alice, bob))
}
#[test]
fn create_fr() -> store::Result<()> {
Store::test(schema(), |db| {
let (alice, bob) = make_test_actors(&db)?;
super::request(&db, alice, bob)?;
assert!(
db.exists::<FollowRequest>(alice, bob)?,
"(alice -> bob) ∈ follow-requested"
);
assert!(
!db.exists::<Follows>(alice, bob)?,
"(alice -> bob) ∉ follows"
);
let pending_for_bob = super::list_pending(&db, bob)?
.into_iter()
.map(|fr| fr.origin)
.collect::<Vec<_>>();
assert_eq!(pending_for_bob, vec![alice], "bob.pending = {{alice}}");
OK
})
}
#[test]
fn accept_fr() -> store::Result<()> {
Store::test(schema(), |db| {
let (alice, bob) = make_test_actors(&db)?;
super::request(&db, alice, bob)?;
super::accept(&db, alice, bob)?;
assert!(
db.exists::<Follows>(alice, bob)?,
"(alice -> bob) ∈ follows"
);
assert!(
!db.exists::<Follows>(bob, alice)?,
"(bob -> alice) ∉ follows"
);
let pending_for_bob = super::list_pending(&db, bob)?;
assert!(pending_for_bob.is_empty(), "bob.pending = ∅");
let followers_of_bob = super::followers_of(&db, bob)?;
assert_eq!(followers_of_bob, vec![alice], "bob.followers = {{alice}}");
OK
})
}
#[test]
fn listing_follow_relations() -> store::Result<()> {
Store::test(schema(), |db| {
let (alice, bob) = make_test_actors(&db)?;
super::request(&db, alice, bob)?;
super::accept(&db, alice, bob)?;
let followers_of_bob = super::followers_of(&db, bob)?;
assert_eq!(followers_of_bob, vec![alice], "bob.followers = {{alice}}");
let following_of_alice = super::following_of(&db, alice)?;
assert_eq!(following_of_alice, vec![bob], "alice.following = {{bob}}");
OK
})
}
}

230
lib/puppy/src/interact.rs Normal file
View file

@ -0,0 +1,230 @@
//! Interactions between actors.
use store::{util::IterExt as _, Key, StoreError};
use crate::{
actor::Actor,
data::{FollowRequest, Bite, Status, Follows},
Context, Error, Result,
};
/// Interactions with other objects.
impl Actor {
/// Create a [`Bite`].
pub fn bite(&self, victim: &Actor) -> Bite {
Bite {
victim: victim.key,
biter: self.key,
id: Key::gen(),
}
}
/// Construct a [`FollowRequest`].
pub fn follow_request(&self, target: &Actor) -> FollowRequest {
FollowRequest {
origin: self.key,
target: target.key,
id: Key::gen(),
}
}
/// Makes `biter` bite `victim` and inserts the records into the database.
pub fn do_bite(&self, cx: &Context, victim: &Actor) -> Result<Bite> {
let bite = self.bite(victim);
cx.run(|tx| {
tx.create(bite)?;
Ok(bite)
})
}
/// Creates a follow request from `self` to `target`.
pub fn do_follow_request(&self, cx: &Context, target: &Actor) -> Result<FollowRequest> {
cx.run(|tx| {
let req = self.follow_request(target);
tx.create(req)?;
tx.add_mixin(req.id, Status::Pending)?;
Ok(req)
})
}
/// Accept a follow request.
pub fn do_accept_request(&self, cx: &Context, req: FollowRequest) -> Result<()> {
debug_assert! {
self.key == req.target,
"only the target of a follow request may accept it"
};
cx.run(|tx| try {
let fr = tx
.between::<FollowRequest>(req.origin, req.target)
// Get the one that is equal to `req`.
.filter(|fr| fr.as_ref().is_ok_and(|f| f.id == req.id))
.last()
.unwrap()?;
// Only apply the update if the follow request is still in a pending state.
if let Some(Status::Pending) = tx.get_mixin(fr.id)? {
tx.update(fr.id, |_| Status::Accepted)?;
tx.create(Follows {
follower: req.origin,
followed: req.target,
})?;
}
})
}
/// Reject a follow request.
pub fn do_reject_request(&self, cx: &Context, req: FollowRequest) -> Result<()> {
debug_assert! {
self.key == req.target,
"only the target of a follow request may accept it"
};
cx.run(|tx| try {
tx.update(req.id, |_| Status::Rejected)?;
})
}
/// Get all pending follow request for `self`.
pub fn pending_requests<'c>(
&self,
cx: &'c Context,
) -> impl Iterator<Item = Result<FollowRequest>> + 'c {
cx.store()
.incoming::<FollowRequest>(self.key)
.map_err(Error::Store)
.filter_bind_results(|req| Ok(if req.is_pending(cx)? { Some(req) } else { None }))
}
/// Get all nodes `self` is following.
pub fn following<'c>(&self, cx: &'c Context) -> impl Iterator<Item = Result<Key>> + 'c {
cx.store()
.outgoing::<Follows>(self.key)
.map_err(Error::Store)
.map_ok(|a| a.followed)
}
/// Get all followers of `self`.
pub fn followers<'c>(&self, cx: &'c Context) -> impl Iterator<Item = Result<Key>> + 'c {
cx.store()
.incoming::<Follows>(self.key)
.map_err(Error::Store)
.map_ok(|a| a.follower)
}
/// List all specific times `self` was bitten.
pub fn bites_suffered<'c>(&self, cx: &'c Context) -> impl Iterator<Item = Result<Bite>> + 'c {
cx.store().incoming::<Bite>(self.key).map_err(Error::Store)
}
/// Check whether `self` follows `other`.
pub fn follows(&self, cx: &Context, other: &Actor) -> Result<bool> {
try { cx.db.exists::<Follows>(self.key, other.key)? }
}
}
impl FollowRequest {
/// Determine if this follow request is pending.
pub fn is_pending(&self, cx: &Context) -> Result<bool> {
// The status is stored as a mixin, so we need to get it.
let Some(st) = cx.db.get_mixin::<Status>(self.id)? else {
// If we don't have a status for a follow request, something is borked.
return Err(StoreError::Missing.into());
};
// If the status of the follow request is pending, it can't also be true that the follows
// relation already exists.
debug_assert! {
!(st == Status::Pending)
|| cx.db.exists::<Follows>(self.origin, self.target).map(|x| !x)?,
"fr.is_pending -> !(fr.origin follows fr.target)"
};
Ok(st == Status::Pending)
}
}
#[cfg(test)]
mod tests {
use store::util::IterExt as _;
use crate::{
actor::{create_local as create_actor, Actor},
config::Config,
data::{schema, FollowRequest, Follows},
test_context, Context, Result,
};
fn make_test_actors(cx: &Context) -> Result<(Actor, Actor)> {
let alice = create_actor(&cx, "alice")?;
let bob = create_actor(&cx, "bob")?;
eprintln!("alice={alice:?}, bob={bob:?}");
Ok((alice, bob))
}
fn test_config() -> Config {
Config {
ap_domain: String::from("unit-test.puppy.gay"),
wf_domain: String::from("unit-test.puppy.gay"),
}
}
#[test]
fn create_fr() -> Result<()> {
test_context(test_config(), schema(), |cx| try {
let db = cx.store();
let (alice, bob) = make_test_actors(&cx)?;
alice.do_follow_request(&cx, &bob)?;
assert!(
db.exists::<FollowRequest>(alice.key, bob.key)?,
"(alice -> bob) ∈ follow-requested"
);
assert!(
!db.exists::<Follows>(alice.key, bob.key)?,
"(alice -> bob) ∉ follows"
);
let pending_for_bob = bob
.pending_requests(&cx)
.map_ok(|fr| fr.origin)
.try_collect::<Vec<_>>()?;
assert_eq!(pending_for_bob, vec![alice.key], "bob.pending = {{alice}}");
})
}
#[test]
fn accept_fr() -> Result<()> {
test_context(test_config(), schema(), |cx| {
let db = cx.store();
let (alice, bob) = make_test_actors(&cx)?;
let req = alice.do_follow_request(&cx, &bob)?;
bob.do_accept_request(&cx, req)?;
assert!(
db.exists::<Follows>(alice.key, bob.key)?,
"(alice -> bob) ∈ follows"
);
assert!(
!db.exists::<Follows>(bob.key, alice.key)?,
"(bob -> alice) ∉ follows"
);
let pending_for_bob: Vec<_> = bob.pending_requests(&cx).try_collect()?;
assert!(pending_for_bob.is_empty(), "bob.pending = ∅");
let followers_of_bob: Vec<_> = bob.followers(&cx).try_collect()?;
assert_eq!(
followers_of_bob,
vec![alice.key],
"bob.followers = {{alice}}"
);
Ok(())
})
}
#[test]
fn listing_follow_relations() -> Result<()> {
test_context(test_config(), schema(), |cx| try {
let (alice, bob) = make_test_actors(&cx)?;
let req = alice.do_follow_request(&cx, &bob)?;
bob.do_accept_request(&cx, req)?;
let followers_of_bob: Vec<_> = bob.followers(&cx).try_collect()?;
assert_eq!(
followers_of_bob,
vec![alice.key],
"bob.followers = {{alice}}"
);
let following_of_alice: Vec<_> = alice.following(&cx).try_collect()?;
assert_eq!(
following_of_alice,
vec![bob.key],
"alice.following = {{bob}}"
);
})
}
}

View file

@ -1,138 +1,128 @@
#![feature(iterator_try_collect, try_blocks)] // Working with result types is such a bitch without these.
use model::{Channel, Id, Object, ObjectKind, Profile, Username}; #![feature(iterator_try_collect, try_blocks, once_cell_try)]
pub use store::{self, Key, Store}; // Cause an error if someone tries to call [`context`] from within this crate. If we need one,
use store::{Error, Transaction, OK}; // it must be passed in as a parameter. Getting a hold of a context is not our job. We need to
// forbid internal code from calling the function because it is stateful, and that can cause
// spooky action at a distance within tests.
//
// Ideally this would be enforced by the compiler, and we *can* enforce it at the type level,
// but that would make every type signature ever 100x more complicated, so we're not doing it.
#![deny(clippy::disallowed_methods, clippy::disallowed_types)]
pub mod model { pub use context::{context, Context};
use bincode::{Decode, Encode}; #[cfg(test)]
use derive_more::Display; pub use context::test_context;
use store::{types::Schema, Alias, Mixin};
use crate::follows::Status; use data::{ActivityKind, Channel, Content, Create, Id, Object, ObjectKind, Profile, PublicKey};
pub use crate::{
bites::Bite,
follows::{FollowRequest, Follows},
post::{AuthorOf, Content},
};
/// A "profile" in the social media sense. pub use store::{self, Key, StoreError};
/// pub use fetch;
/// Contains all presentation information about someone making posts.
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct Profile {
/// How many posts has this user made?
pub post_count: usize,
/// The name used for the profile's handle.
#[index] // <- currently doesnt do anything but i have an idea
pub account_name: Username,
/// The name displayed above their posts.
pub display_name: Option<String>,
/// The "bio", a freeform "about me" field.
pub about_string: Option<String>,
/// Arbitrary custom metadata fields.
pub about_fields: Vec<(String, String)>,
}
/// Properties of ActivityPub objects. mod context;
#[derive(Mixin, Encode, Decode, Debug, Clone)] pub mod data;
pub struct Object {
#[index]
pub id: Id,
pub kind: ObjectKind,
}
/// Allows case analysis on the type of ActivityPub objects. pub mod post;
#[derive(Encode, Decode, Debug, Clone)] mod interact;
pub enum ObjectKind {
Actor,
Activity(ActivityKind),
Notelike(String),
}
/// The type of an activity.
#[derive(Encode, Decode, Debug, Clone)]
pub enum ActivityKind {
/// Used for posting stuff!
Create = 0,
/// Represents a follow request.
Follow = 1,
/// Used to signal that a follow request was accepted.
Accept = 2,
/// Used to reject a follow request.
Reject = 3,
/// See [`bites`](crate::bites).
Bite = 4,
}
#[derive(Mixin, Encode, Decode, Debug, Clone)]
pub struct Channel {
pub inbox: String,
}
/// An ActivityPub ID, used to look up remote objects by their canonical URL.
#[derive(Alias, Encode, Decode, Clone, PartialEq, Eq, Debug, Hash, Display)]
pub struct Id(pub String);
/// A unique name for an actor that is part of their "handle".
#[derive(Alias, Encode, Decode, Clone, PartialEq, Eq, Debug, Hash, Display)]
pub struct Username(pub String);
/// Construct the schema.
pub fn schema() -> Schema {
Schema::new()
// Mixins
.has::<Profile>()
.has::<Content>()
.has::<Status>()
.has::<Object>()
.has::<Channel>()
// Aliases
.has::<Username>()
.has::<Id>()
// Arrows
.has::<Bite>()
.has::<FollowRequest>()
.has::<AuthorOf>()
.has::<Follows>()
}
}
/// Retrieve an ActivityPub object from the database. /// Retrieve an ActivityPub object from the database.
/// ///
/// Fails with `Error::Missing` if the required properties are not present. /// Fails with `Error::Missing` if the required properties are not present.
pub fn get_local_ap_object(db: &Store, key: Key) -> store::Result<fetch::Object> { pub fn get_local_ap_object(cx: &Context<'_>, key: Key) -> Result<fetch::Object> {
let Some(obj) = db.get_mixin::<Object>(key)? else { let Some(obj) = cx.db.get_mixin::<Object>(key)? else {
return Err(Error::Missing); // We need this data in order to determine the object type. If the passed key does not
// have this data, it must not be an ActivityPub object.
return Err(Error::MissingData { node: key, prop: "Object" });
}; };
match obj.kind { match obj.kind {
ObjectKind::Actor => { ObjectKind::Actor => {
let Some(Profile { account_name, display_name, .. }) = db.get_mixin(key)? else { let Some(Profile { account_name, display_name, .. }) = cx.db.get_mixin(key)? else {
return Err(Error::Missing); return Err(Error::MissingData { node: key, prop: "Profile" });
}; };
let Some(Channel { inbox }) = db.get_mixin(key)? else { let Some(Channel { inbox }) = cx.db.get_mixin(key)? else {
return Err(Error::Missing); return Err(Error::MissingData { node: key, prop: "Channel" });
};
let Some(PublicKey { key_id, key_pem }) = cx.db.get_mixin(key)? else {
return Err(Error::MissingData { node: key, prop: "PublicKey" });
}; };
Ok(fetch::Object::Actor(fetch::Actor { Ok(fetch::Object::Actor(fetch::Actor {
id: obj.id.0.into(), id: obj.id.0.clone().into(),
inbox: inbox.into(), inbox: inbox.into(),
account_name: account_name.0, account_name: account_name.0,
display_name, display_name,
public_key: fetch::PublicKey {
owner: obj.id.0.into(),
key_id: key_id.into(),
public_key_pem: key_pem,
},
})) }))
} }
ObjectKind::Activity(_) => { ObjectKind::Activity(ActivityKind::Create) => {
todo!() let Some(Create { object, actor, .. }) = cx.db.get_arrow(key)? else {
panic!("expected a `Create`");
};
let Id(actor) = cx.db.get_alias(actor)?.unwrap();
Ok(fetch::Object::Activity(fetch::Activity {
id: obj.id.0.into(),
actor: actor.into(),
object: Box::new(get_local_ap_object(cx, object)?),
kind: String::from("Create"),
}))
} }
ObjectKind::Notelike(_) => todo!(), ObjectKind::Notelike(kind) => {
let Some(Content { content, warning, .. }) = cx.db.get_mixin(key)? else {
panic!()
};
Ok(fetch::Object::Object {
id: obj.id.0.clone().into(),
summary: warning,
content,
kind,
})
}
_ => todo!(),
} }
} }
/// Create a fresh local actor. pub mod actor {
pub fn create_local_actor(db: &Store, domain: &str, username: impl ToString) -> store::Result<Key> { use store::{Key, StoreError, Transaction};
use crate::{
data::{Channel, Id, Object, ObjectKind, PrivateKey, Profile, PublicKey, Username},
Context, Error, Result,
};
/// A reference to an actor.
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct Actor {
/// The key identifying the actor in the data store.
pub key: Key,
local: bool,
}
impl Actor {
/// Get a local actor from the store by their username.
pub fn by_username(cx: &Context, username: impl ToString) -> Result<Option<Actor>> {
let maybe_key = cx
.store()
.lookup(Username(username.to_string()))
.map_err(Error::Store)?;
// For now, we only have local actors.
Ok(maybe_key.map(|key| Actor { key, local: true }))
}
/// Check whether the actor is local or not.
pub fn is_local(self) -> bool {
self.local
}
}
/// Create a fresh local actor.
pub fn create_local(cx: &Context, username: impl ToString) -> Result<Actor> {
let key = Key::gen(); let key = Key::gen();
db.run(|tx| { cx.run(|tx| {
let username: Username = username.to_string().into(); let username: Username = username.to_string().into();
// Federation stuff // Federation stuff
mixin_ap_actor(tx, key, domain)?; mixin_ap_actor(tx, key, &cx.config.ap_domain, true)?;
mixin_priv_key(tx, key, &cx.config.ap_domain)?;
// Social properties // Social properties
tx.add_alias(key, username.clone())?; tx.add_alias(key, username.clone())?;
tx.add_mixin(key, Profile { tx.add_mixin(key, Profile {
@ -142,19 +132,65 @@ pub fn create_local_actor(db: &Store, domain: &str, username: impl ToString) ->
about_string: None, about_string: None,
about_fields: Vec::new(), about_fields: Vec::new(),
})?; })?;
Ok(key) Ok(Actor { key, local: true })
}) })
} }
/// Add properties related to ActivityPub actors to a vertex. /// Add properties related to ActivityPub actors to a vertex.
pub fn mixin_ap_actor(tx: &Transaction<'_>, vertex: Key, domain: &str) -> store::Result<()> { pub fn mixin_ap_actor(
let id = Id(format!("http://{domain}/o/{vertex}")); tx: &Transaction<'_>,
vertex: Key,
domain: &str,
local: bool,
) -> Result<(), StoreError> {
let id = Id(format!("https://{domain}/o/{vertex}"));
tx.add_alias(vertex, id.clone())?; tx.add_alias(vertex, id.clone())?;
tx.add_mixin(vertex, Channel { inbox: format!("{id}/inbox") })?; tx.add_mixin(vertex, Channel { inbox: format!("{id}/inbox") })?;
tx.add_mixin(vertex, Object { id, kind: ObjectKind::Actor })?; tx.add_mixin(vertex, Object {
OK kind: ObjectKind::Actor,
local,
id,
})?;
store::OK
}
/// Generate and attach a public/private key pair to the vertex.
pub fn mixin_priv_key(
tx: &Transaction<'_>,
vertex: Key,
domain: &str,
) -> Result<(), StoreError> {
let key_id = format!("https://{domain}/o/{vertex}#sig-key");
let (private, public) = fetch::keys::gen_keypair();
tx.add_mixin(vertex, PublicKey {
key_pem: public.to_pem(),
key_id,
})?;
tx.add_mixin(vertex, PrivateKey {
key_pem: dbg!(private.to_pem()),
})?;
store::OK
}
} }
pub mod bites; pub type Result<T, E = Error> = std::result::Result<T, E>;
pub mod post;
pub mod follows; #[derive(derive_more::From, Debug)]
pub enum Error {
/// An error internal to the store.
Store(StoreError),
/// Expected `node` to have some property that it doesn't have.
MissingData {
/// The node that is missing the data.
node: Key,
/// Name of the thing it is missing.
prop: &'static str,
},
}
pub mod config {
pub struct Config {
pub ap_domain: String,
pub wf_domain: String,
}
}

View file

@ -2,20 +2,66 @@
use std::ops::RangeBounds; use std::ops::RangeBounds;
use bincode::{Decode, Encode};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use either::Either::{Left, Right}; use either::Either::{Left, Right};
use store::{util::IterExt as _, Arrow, Error, Key, Mixin, Result, Store, Transaction}; use store::{util::IterExt as _, Key, Store, StoreError, Transaction};
use crate::model::Profile; use crate::{
data::{self, AuthorOf, Content, Id, Object, ObjectKind, PrivateKey, Profile, PublicKey},
Context,
};
/// The contents of a post. #[derive(Clone, Debug)]
#[derive(Mixin, Encode, Decode, Debug, Clone, Default)] pub struct Post {
pub struct Content { pub key: Key,
/// Main post body. }
pub content: Option<String>,
/// Content warning for the post. impl Post {
pub warning: Option<String>, pub async fn federate(&self, cx: &Context<'_>) -> crate::Result<()> {
use fetch::{Activity, Object};
let post @ Object::Object { .. } = crate::get_local_ap_object(cx, self.key)? else {
todo!()
};
let author = cx
.db
.incoming::<AuthorOf>(self.key)
.map_ok(|a| a.author)
.next()
.unwrap()?;
let author_id = cx.db.get_alias::<data::Id>(author)?.unwrap();
let activity = Activity {
id: post.id().clone().into(),
object: Box::new(post),
kind: String::from("Create"),
actor: author_id.0.into(),
};
let (private, public) = cx.db.get_mixin_many::<(PrivateKey, PublicKey)>(author)?;
// Insert the activity in the database.
cx.run(|tx| try {
let activity_key = Key::gen();
let id: data::Id = cx.mk_url(activity_key).into();
tx.add_mixin(activity_key, data::Object {
id: id.clone(),
kind: ObjectKind::Activity(data::ActivityKind::Create),
local: true,
})?;
tx.create(data::Create {
id: activity_key,
actor: author,
object: self.key,
})?;
tx.add_alias(activity_key, id)?;
})?;
// Send the requests.
fetch::deliver(
&private.key_pem,
&public.key_id,
activity,
"https://crimew.gay/users/ezri/inbox",
)
.await;
Ok(())
}
} }
impl From<&str> for Content { impl From<&str> for Content {
@ -33,20 +79,11 @@ impl From<String> for Content {
} }
} }
/// The relation that `author` has constructed and published `object`.
#[derive(Arrow, Debug, PartialEq, Eq, Clone, Copy)]
pub struct AuthorOf {
#[origin]
pub author: Key,
#[target]
pub object: Key,
}
/// A piece of content posted by someone. /// A piece of content posted by someone.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Post { pub struct PostData {
/// The post's internal ID. /// The post's internal ID.
pub id: Key, pub id: Post,
/// The actual post contents. /// The actual post contents.
pub content: Content, pub content: Content,
/// Metadata about the post's author. /// Metadata about the post's author.
@ -73,12 +110,12 @@ pub struct Timeline {
/// Discrete events that can be displayed to a user as part of a timeline. /// Discrete events that can be displayed to a user as part of a timeline.
#[derive(Debug)] #[derive(Debug)]
enum Item { enum Item {
Post(Post), Post(PostData),
} }
impl Item { impl Item {
/// Get the timeline item if it is a [`Post`]. /// Get the timeline item if it is a [`Post`].
pub fn as_post(&self) -> Option<&Post> { pub fn as_post(&self) -> Option<&PostData> {
match self { match self {
Item::Post(ref post) => Some(post), Item::Post(ref post) => Some(post),
} }
@ -87,7 +124,7 @@ impl Item {
impl Timeline { impl Timeline {
/// Get all the posts in the timeline. /// Get all the posts in the timeline.
pub fn posts(&self) -> impl Iterator<Item = &Post> { pub fn posts(&self) -> impl Iterator<Item = &PostData> {
self.items.iter().filter_map(|x| x.as_post()) self.items.iter().filter_map(|x| x.as_post())
} }
} }
@ -97,7 +134,7 @@ pub fn fetch_timeline(
db: &Store, db: &Store,
time_range: impl RangeBounds<DateTime<Utc>>, time_range: impl RangeBounds<DateTime<Utc>>,
limit: Option<usize>, limit: Option<usize>,
) -> Result<Timeline> { ) -> Result<Timeline, StoreError> {
let posts = db.run(|tx| { let posts = db.run(|tx| {
// Get all post content entries (the argument passed here is a range of chrono datetimes). // Get all post content entries (the argument passed here is a range of chrono datetimes).
let iter = tx.range::<Content>(time_range); let iter = tx.range::<Content>(time_range);
@ -107,28 +144,28 @@ pub fn fetch_timeline(
}; };
// Then, we're gonna map each of them to their author, and get the profile information needed to // Then, we're gonna map each of them to their author, and get the profile information needed to
// render the post (mostly display name and handle). // render the post (mostly display name and handle).
iter.bind_results(|(id, content)| { iter.bind_results(|(key, content)| try {
// Take the first author. There is nothing stopping a post from having multiple authors, but // Take the first author. There is nothing stopping a post from having multiple authors, but
// let's take it one step at a time. // let's take it one step at a time.
let (author, Some(Profile { display_name, account_name, .. })) = tx let (author, Some(Profile { display_name, account_name, .. })) = tx
.join_on(|a: AuthorOf| a.author, tx.incoming(id))? .join_on(|a: AuthorOf| a.author, tx.incoming(key))?
.swap_remove(0) .swap_remove(0)
else { else {
// We expect all posts to have at least one author, so we should complain if there is one // We expect all posts to have at least one author, so we should complain if there is one
// that doesn't (for now). For robustness, the `.collect()` down there should be replaced // that doesn't (for now). For robustness, the `.collect()` down there should be replaced
// with a strategy where we log a warning instead of failing, but in the current state of // with a strategy where we log a warning instead of failing, but in the current state of
// the project, failing fast is a good thing. // the project, failing fast is a good thing.
return Err(Error::Missing); return Err(StoreError::Missing);
}; };
Ok(Item::Post(Post { Item::Post(PostData {
id, id: Post { key },
author: Author { author: Author {
id: author, id: author,
handle: format!("@{account_name}"), handle: format!("@{account_name}"),
display_name: display_name.unwrap_or(account_name.0), display_name: display_name.unwrap_or(account_name.0),
}, },
content, content,
})) })
}) })
.collect() .collect()
})?; })?;
@ -136,8 +173,22 @@ pub fn fetch_timeline(
} }
/// Create a new post. /// Create a new post.
pub fn create_post(db: &Store, author: Key, content: impl Into<Content>) -> store::Result<Key> { pub fn create_post(cx: &Context, author: Key, content: impl Into<Content>) -> crate::Result<Post> {
db.run(|tx| mixin_post(tx, Key::gen(), author, content)) let content = content.into();
cx.run(|tx| {
let key = Key::gen();
// Local stuff
mixin_post(tx, key, author, content)?;
// Federation stuff
let id = Id(format!("https://{}/o/{key}", cx.config.ap_domain));
tx.add_alias(key, id.clone())?;
tx.add_mixin(key, Object {
kind: ObjectKind::Notelike("Note".to_string()),
local: true,
id,
})?;
Ok(Post { key })
})
} }
/// Add a post's mixins and predicates to an existing `node`. /// Add a post's mixins and predicates to an existing `node`.
@ -146,7 +197,7 @@ pub fn mixin_post(
node: Key, node: Key,
author: Key, author: Key,
content: impl Into<Content>, content: impl Into<Content>,
) -> store::Result<Key> { ) -> Result<Key, StoreError> {
tx.update::<Profile>(author, |mut profile| { tx.update::<Profile>(author, |mut profile| {
profile.post_count += 1; profile.post_count += 1;
profile profile
@ -156,7 +207,7 @@ pub fn mixin_post(
Ok(node) Ok(node)
} }
pub fn list_posts_by_author(db: &Store, author: Key) -> store::Result<Vec<(Key, Content)>> { pub fn list_posts_by_author(db: &Store, author: Key) -> Result<Vec<(Key, Content)>, StoreError> {
db.run(|tx| { db.run(|tx| {
let posts = tx let posts = tx
.join_on(|a: AuthorOf| a.object, tx.outgoing(author))? .join_on(|a: AuthorOf| a.object, tx.outgoing(author))?

View file

@ -45,7 +45,7 @@ use super::{
types::{ArrowSpec, DataType}, types::{ArrowSpec, DataType},
Batch, Store, Transaction, Batch, Store, Transaction,
}; };
use crate::{util::IterExt as _, Key, Result}; use crate::{internal::Context as _, util::IterExt as _, Key, Result};
/// A directed edge. /// A directed edge.
/// ///
@ -122,6 +122,18 @@ impl Store {
{ {
op::between::<A>(self, a, b).map_ok(A::from) op::between::<A>(self, a, b).map_ok(A::from)
} }
/// Construct the arrow from its identifier.
pub fn get_arrow<A>(&self, key: Key) -> Result<Option<A>>
where
A: Arrow<Kind = Multi>,
{
let arrow = self
.open(crate::types::MULTIEDGE_HEADERS)
.get(key)?
.map(|v| Key::split(v.as_ref()))
.map(|(origin, target)| A::from(Multi { origin, target, identity: key }));
Ok(arrow)
}
} }
impl Transaction<'_> { impl Transaction<'_> {

View file

@ -5,7 +5,7 @@ use std::sync::Arc;
use rocksdb::{BoundColumnFamily, IteratorMode}; use rocksdb::{BoundColumnFamily, IteratorMode};
pub use self::cx::{Context, Query, Write}; pub use self::cx::{Context, Query, Write};
use crate::{util::IterExt as _, Error, Result}; use crate::{util::IterExt as _, Result, StoreError};
/// An internal interface to a specific keyspace that exposes basic hashmap-esque operations /// An internal interface to a specific keyspace that exposes basic hashmap-esque operations
/// on that keyspace, generic over whether the source of the data is a [`Transaction`] or a /// on that keyspace, generic over whether the source of the data is a [`Transaction`] or a
@ -41,13 +41,13 @@ where
Ok((ref k, _)) => k.starts_with(&t), Ok((ref k, _)) => k.starts_with(&t),
_ => true, _ => true,
}) })
.map_err(Error::Internal) .map_err(StoreError::Internal)
} }
/// List all pairs in the keyspace. /// List all pairs in the keyspace.
pub fn list(&self) -> impl Iterator<Item = Result<(Box<[u8]>, Box<[u8]>)>> + 'db { pub fn list(&self) -> impl Iterator<Item = Result<(Box<[u8]>, Box<[u8]>)>> + 'db {
self.context self.context
.full_iterator(&self.cf, IteratorMode::Start) .full_iterator(&self.cf, IteratorMode::Start)
.map_err(Error::Internal) .map_err(StoreError::Internal)
} }
/// Execute a range scan /// Execute a range scan
pub fn range<const N: usize>( pub fn range<const N: usize>(
@ -68,7 +68,7 @@ where
Ok((ref k, _)) => k.as_ref() < &upper, Ok((ref k, _)) => k.as_ref() < &upper,
_ => true, _ => true,
}) })
.map_err(Error::Internal) .map_err(StoreError::Internal)
} }
/// Join all the keys to their values in this keyspace. /// Join all the keys to their values in this keyspace.
/// ///
@ -106,7 +106,7 @@ mod cx {
}; };
use super::Keyspace; use super::Keyspace;
use crate::{util::IterExt as _, Backend, Batch, Error, Result, Store, Transaction, OK}; use crate::{util::IterExt as _, Backend, Batch, Result, Store, StoreError, Transaction, OK};
/// A context for executing database operations. /// A context for executing database operations.
pub trait Context { pub trait Context {
@ -173,7 +173,9 @@ mod cx {
cf: &impl AsColumnFamilyRef, cf: &impl AsColumnFamilyRef,
key: impl AsRef<[u8]>, key: impl AsRef<[u8]>,
) -> Result<Option<DBPinnableSlice<'a>>> { ) -> Result<Option<DBPinnableSlice<'a>>> {
self.inner.get_pinned_cf(cf, key).map_err(Error::Internal) self.inner
.get_pinned_cf(cf, key)
.map_err(StoreError::Internal)
} }
fn prefix_iterator<'a>( fn prefix_iterator<'a>(
@ -199,7 +201,7 @@ mod cx {
self.inner self.inner
.multi_get_cf(keys) .multi_get_cf(keys)
.into_iter() .into_iter()
.map_err(Error::Internal) .map_err(StoreError::Internal)
.collect() .collect()
} }
} }
@ -222,7 +224,9 @@ mod cx {
cf: &impl AsColumnFamilyRef, cf: &impl AsColumnFamilyRef,
key: impl AsRef<[u8]>, key: impl AsRef<[u8]>,
) -> Result<Option<DBPinnableSlice<'a>>> { ) -> Result<Option<DBPinnableSlice<'a>>> {
self.inner.get_pinned_cf(cf, key).map_err(Error::Internal) self.inner
.get_pinned_cf(cf, key)
.map_err(StoreError::Internal)
} }
fn prefix_iterator<'a>( fn prefix_iterator<'a>(
@ -248,14 +252,14 @@ mod cx {
self.inner self.inner
.multi_get_cf(keys) .multi_get_cf(keys)
.into_iter() .into_iter()
.map_err(Error::Internal) .map_err(StoreError::Internal)
.collect() .collect()
} }
} }
impl Write for Transaction<'_> { impl Write for Transaction<'_> {
fn delete(&self, cf: &impl AsColumnFamilyRef, key: impl AsRef<[u8]>) -> Result<()> { fn delete(&self, cf: &impl AsColumnFamilyRef, key: impl AsRef<[u8]>) -> Result<()> {
self.inner.delete_cf(cf, key).map_err(Error::Internal) self.inner.delete_cf(cf, key).map_err(StoreError::Internal)
} }
fn put( fn put(
@ -264,7 +268,9 @@ mod cx {
key: impl AsRef<[u8]>, key: impl AsRef<[u8]>,
val: impl AsRef<[u8]>, val: impl AsRef<[u8]>,
) -> Result<()> { ) -> Result<()> {
self.inner.put_cf(cf, key, val).map_err(Error::Internal) self.inner
.put_cf(cf, key, val)
.map_err(StoreError::Internal)
} }
} }

View file

@ -6,7 +6,7 @@ use std::{
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use ulid::Ulid; use ulid::Ulid;
use crate::Error; use crate::StoreError;
/// A unique identifier for vertices in the database. /// A unique identifier for vertices in the database.
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
@ -57,12 +57,12 @@ impl AsRef<[u8]> for Key {
} }
impl FromStr for Key { impl FromStr for Key {
type Err = crate::Error; type Err = crate::StoreError;
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
s.parse::<Ulid>() s.parse::<Ulid>()
.map(|x| Key(x.to_bytes())) .map(|x| Key(x.to_bytes()))
.map_err(|err| Error::BadKey(err)) .map_err(|err| StoreError::BadKey(err))
} }
} }

View file

@ -78,7 +78,7 @@ impl Store {
/// changes are committed. /// changes are committed.
pub fn run<T, E>(&self, f: impl FnOnce(&Transaction<'_>) -> Result<T, E>) -> Result<T, E> pub fn run<T, E>(&self, f: impl FnOnce(&Transaction<'_>) -> Result<T, E>) -> Result<T, E>
where where
E: From<Error>, E: From<StoreError>,
{ {
let tx = Transaction { let tx = Transaction {
inner: self.inner.transaction(), inner: self.inner.transaction(),
@ -90,7 +90,7 @@ impl Store {
} else { } else {
tx.inner.commit() tx.inner.commit()
} { } {
return Err(E::from(Error::Internal(e))); return Err(E::from(StoreError::Internal(e)));
} }
r r
} }
@ -123,7 +123,7 @@ impl Store {
/// Delete the main data store in `state_dir` if it exists. /// Delete the main data store in `state_dir` if it exists.
pub fn nuke(state_dir: impl AsRef<Path>) -> Result<()> { pub fn nuke(state_dir: impl AsRef<Path>) -> Result<()> {
Backend::destroy(&Options::default(), state_dir.as_ref().join(STORE_NAME)) Backend::destroy(&Options::default(), state_dir.as_ref().join(STORE_NAME))
.map_err(Error::Internal) .map_err(StoreError::Internal)
} }
/// Open a store that lives until `f` returns, for testing. /// Open a store that lives until `f` returns, for testing.
pub fn test<T>(schema: Schema, f: impl FnOnce(Store) -> T) -> T { pub fn test<T>(schema: Schema, f: impl FnOnce(Store) -> T) -> T {
@ -136,11 +136,11 @@ impl Store {
pub const OK: Result<()> = Ok(()); pub const OK: Result<()> = Ok(());
/// Results from this component. /// Results from this component.
pub type Result<T, E = Error> = std::result::Result<T, E>; pub type Result<T, E = StoreError> = std::result::Result<T, E>;
/// Errors from the data store. /// Errors from the data store.
#[derive(From, Debug)] #[derive(From, Debug)]
pub enum Error { pub enum StoreError {
/// The requested value was expected to exist in a particular keyspace, but does not actually /// The requested value was expected to exist in a particular keyspace, but does not actually
/// exist there. This can occur on updates for example. /// exist there. This can occur on updates for example.
Missing, Missing,

View file

@ -7,7 +7,7 @@ use super::{
types::{DataType, MixinSpec}, types::{DataType, MixinSpec},
Batch, Store, Transaction, Batch, Store, Transaction,
}; };
use crate::{util::IterExt as _, Error, Key, Result}; use crate::{util::IterExt as _, Key, Result, StoreError};
/// Mixins are the simplest pieces of data in the store. /// Mixins are the simplest pieces of data in the store.
pub trait Mixin: DataType<Type = MixinSpec> + Encode + Decode {} pub trait Mixin: DataType<Type = MixinSpec> + Encode + Decode {}
@ -53,6 +53,13 @@ impl Store {
{ {
op::join_on(self, iter) op::join_on(self, iter)
} }
/// Get multiple mixins associated with the same key.
pub fn get_mixin_many<T>(&self, key: Key) -> Result<T>
where
T: GetMany,
{
T::get(self, key)
}
} }
impl Transaction<'_> { impl Transaction<'_> {
@ -88,7 +95,7 @@ impl Transaction<'_> {
M: Mixin, M: Mixin,
{ {
if op::has_mixin::<M>(self, node)? { if op::has_mixin::<M>(self, node)? {
return Err(Error::Conflict); return Err(StoreError::Conflict);
} else { } else {
op::add_mixin::<M>(self, node, mixin) op::add_mixin::<M>(self, node, mixin)
} }
@ -136,6 +143,12 @@ impl Batch {
} }
} }
/// Getting tuples of stuff.
pub trait GetMany: Sized {
#[doc(hidden)]
fn get(cx: &impl crate::internal::Query, key: Key) -> Result<Self>;
}
mod op { mod op {
use std::ops::{Bound, RangeBounds}; use std::ops::{Bound, RangeBounds};
@ -143,7 +156,64 @@ mod op {
use either::Either; use either::Either;
use super::Mixin; use super::Mixin;
use crate::{internal::*, util::IterExt as _, Error, Key, Result}; use crate::{internal::*, util::IterExt as _, Key, Result, StoreError};
impl<A, B> super::GetMany for (A, B)
where
A: Mixin,
B: Mixin,
{
fn get(cx: &impl Query, key: Key) -> Result<Self> {
let ksps = [cx.open(A::SPEC.keyspace).cf, cx.open(B::SPEC.keyspace).cf];
let mut vec = cx.multi_get(ksps.iter().map(|c| (&*c, key)));
let b = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let a = vec.pop().ok_or(StoreError::Missing)??.unwrap();
Ok((decode(a)?, decode(b)?))
}
}
impl<A, B, C> super::GetMany for (A, B, C)
where
A: Mixin,
B: Mixin,
C: Mixin,
{
fn get(cx: &impl Query, key: Key) -> Result<Self> {
let ksps = [
cx.open(A::SPEC.keyspace).cf,
cx.open(B::SPEC.keyspace).cf,
cx.open(C::SPEC.keyspace).cf,
];
let mut vec = cx.multi_get(ksps.iter().map(|c| (&*c, key)));
let c = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let b = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let a = vec.pop().ok_or(StoreError::Missing)??.unwrap();
Ok((decode(a)?, decode(b)?, decode(c)?))
}
}
impl<A, B, C, D> super::GetMany for (A, B, C, D)
where
A: Mixin,
B: Mixin,
C: Mixin,
D: Mixin,
{
fn get(cx: &impl Query, key: Key) -> Result<Self> {
let ksps = [
cx.open(A::SPEC.keyspace).cf,
cx.open(B::SPEC.keyspace).cf,
cx.open(C::SPEC.keyspace).cf,
cx.open(D::SPEC.keyspace).cf,
];
let mut vec = cx.multi_get(ksps.iter().map(|c| (&*c, key)));
let d = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let c = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let b = vec.pop().ok_or(StoreError::Missing)??.unwrap();
let a = vec.pop().ok_or(StoreError::Missing)??.unwrap();
Ok((decode(a)?, decode(b)?, decode(c)?, decode(d)?))
}
}
pub fn update<M>( pub fn update<M>(
cx: &(impl Query + Write), cx: &(impl Query + Write),
@ -165,7 +235,7 @@ mod op {
// [^1]: https://github.com/facebook/rocksdb/blob/9d37408f9af15c7a1ae42f9b94d06b27d98a011a/include/rocksdb/options.h#L128 // [^1]: https://github.com/facebook/rocksdb/blob/9d37408f9af15c7a1ae42f9b94d06b27d98a011a/include/rocksdb/options.h#L128
let tree = cx.open(M::SPEC.keyspace); let tree = cx.open(M::SPEC.keyspace);
match tree.get(node.as_ref())? { match tree.get(node.as_ref())? {
None => Err(Error::Missing), None => Err(StoreError::Missing),
Some(buf) => { Some(buf) => {
let new = decode(buf).map(update).and_then(encode)?; let new = decode(buf).map(update).and_then(encode)?;
tree.set(node, new) tree.set(node, new)
@ -237,7 +307,7 @@ mod op {
} }
pub(super) fn encode(data: impl bincode::Encode) -> Result<Vec<u8>> { pub(super) fn encode(data: impl bincode::Encode) -> Result<Vec<u8>> {
bincode::encode_to_vec(data, bincode::config::standard()).map_err(Error::Encoding) bincode::encode_to_vec(data, bincode::config::standard()).map_err(StoreError::Encoding)
} }
pub(super) fn decode<T>(data: impl AsRef<[u8]>) -> Result<T> pub(super) fn decode<T>(data: impl AsRef<[u8]>) -> Result<T>
@ -245,7 +315,7 @@ mod op {
T: bincode::Decode, T: bincode::Decode,
{ {
bincode::decode_from_slice(data.as_ref(), bincode::config::standard()) bincode::decode_from_slice(data.as_ref(), bincode::config::standard())
.map_err(Error::Decoding) .map_err(StoreError::Decoding)
.map(|(v, _)| v) .map(|(v, _)| v)
} }
} }

View file

@ -56,6 +56,14 @@ pub trait IterExt: Iterator + Sized {
{ {
self.filter_map(move |r| r.and_then(|x| f(x)).transpose()) self.filter_map(move |r| r.and_then(|x| f(x)).transpose())
} }
/// Like [`Iterator::find`].
fn find_ok<'a, I, E>(mut self, mut f: impl FnMut(&I) -> bool) -> Result<Option<I>, E>
where
Self: Iterator<Item = Result<I, E>> + 'a,
{
self.find(move |r| r.as_ref().is_ok_and(|x| f(x)))
.transpose()
}
} }
impl<I> IterExt for I where I: Iterator {} impl<I> IterExt for I where I: Iterator {}

View file

@ -1,6 +1,6 @@
unstable_features = true unstable_features = true
overflow_delimited_expr = true overflow_delimited_expr = true
group_imports = "StdExternalCrate"
use_field_init_shorthand = true use_field_init_shorthand = true
reorder_modules = false reorder_modules = false
reorder_imports = false
struct_lit_width = 30 struct_lit_width = 30