From d99a31ec64eb120dee72c4e04e2efab76bc944d2 Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Wed, 2 Apr 2025 00:08:48 +0200 Subject: [PATCH 01/12] node: Block document operation till node is running This ensures that operations are blocked and stored till the node is running. They will be executed in call order once the node is running. --- aardvark-doc/src/document.rs | 14 +- aardvark-doc/src/service.rs | 14 +- aardvark-node/src/node.rs | 287 ++++++++++++++++++----------------- 3 files changed, 168 insertions(+), 147 deletions(-) diff --git a/aardvark-doc/src/document.rs b/aardvark-doc/src/document.rs index 9bca38c4..b2503c7f 100644 --- a/aardvark-doc/src/document.rs +++ b/aardvark-doc/src/document.rs @@ -275,12 +275,14 @@ mod imp { self.parent_constructed(); if self.id.get().is_none() { - let document_id = self - .obj() - .service() - .node() - .create_document() - .expect("Create document"); + let document_id = glib::MainContext::new().block_on(async move { + self.obj() + .service() + .node() + .create_document() + .await + .expect("Create document") + }); self.set_id(Some(DocumentId(document_id))); } diff --git a/aardvark-doc/src/service.rs b/aardvark-doc/src/service.rs index 300a853b..d9d986d0 100644 --- a/aardvark-doc/src/service.rs +++ b/aardvark-doc/src/service.rs @@ -1,6 +1,6 @@ use glib::subclass::prelude::*; use p2panda_core::{Hash, PrivateKey, PublicKey}; -use tracing::info; +use tracing::{error, info}; use aardvark_node::Node; @@ -35,8 +35,16 @@ impl Service { let private_key = self.imp().private_key.clone(); let network_id = b"aardvark <3"; info!("my public key: {}", private_key.public_key()); - - self.imp().node.run(private_key, Hash::new(network_id)); + glib::MainContext::new().block_on(async move { + if let Err(error) = self + .imp() + .node + .run(private_key, Hash::new(network_id)) + .await + { + error!("Running node failed: {error}"); + } + }); } pub fn shutdown(&self) { diff --git a/aardvark-node/src/node.rs b/aardvark-node/src/node.rs index 4afa6244..5a1e918b 100644 --- a/aardvark-node/src/node.rs +++ b/aardvark-node/src/node.rs @@ -1,11 +1,11 @@ -use std::sync::Arc; +use std::sync::{Arc, OnceLock}; use anyhow::Result; use p2panda_core::{Hash, PrivateKey}; use p2panda_net::{SyncConfiguration, SystemEvent, TopicId}; use p2panda_sync::log_sync::LogSyncProtocol; use tokio::runtime::{Builder, Runtime}; -use tokio::sync::OnceCell; +use tokio::sync::Notify; use tracing::warn; use crate::document::{DocumentId, SubscribableDocument}; @@ -15,7 +15,8 @@ use crate::store::{DocumentStore, OperationStore}; #[derive(Clone)] pub struct Node { - inner: Arc, + inner: OnceLock>, + ready_notify: Arc, } impl Default for Node { @@ -24,82 +25,91 @@ impl Default for Node { } } +#[derive(Debug)] struct NodeInner { runtime: Runtime, operation_store: OperationStore, document_store: DocumentStore, - network: OnceCell, - private_key: OnceCell, + network: Network, + private_key: PrivateKey, } impl Node { pub fn new() -> Self { - // FIXME: Stores are currently in-memory and do not persist data on the file-system. - // Related issue: https://github.com/p2panda/aardvark/issues/31 - let operation_store = OperationStore::new(); - let document_store = DocumentStore::new(); + Self { + inner: OnceLock::new(), + ready_notify: Arc::new(Notify::new()), + } + } + + async fn inner(&self) -> &Arc { + if let Some(inner) = self.inner.get() { + inner + } else { + self.ready_notify.notified().await; + self.inner + .get() + .expect("Inner should always be set at this point") + } + } + pub async fn run(&self, private_key: PrivateKey, network_id: Hash) -> Result<()> { + // FIXME: Stores are currently in-memory and do not persist data on the file-system. + // Related issue: https://github.com/p2panda/aardvark/issues/3 let runtime = Builder::new_multi_thread() .worker_threads(1) .enable_all() - .build() - .expect("single-threaded tokio runtime"); + .build()?; - Self { - inner: Arc::new(NodeInner { - runtime, - operation_store, - document_store, - network: OnceCell::new(), - private_key: OnceCell::new(), - }), - } - } + let _guard = runtime.enter(); + + let operation_store = OperationStore::new(); + let document_store = DocumentStore::new(); - pub fn run(&self, private_key: PrivateKey, network_id: Hash) { let sync_config = { - let sync = LogSyncProtocol::new( - self.inner.document_store.clone(), - self.inner.operation_store.clone(), - ); + let sync = LogSyncProtocol::new(document_store.clone(), operation_store.clone()); SyncConfiguration::::new(sync) }; - let operation_store = self.inner.operation_store.clone(); - let inner = self.inner.clone(); + let network = Network::spawn( + network_id, + private_key.clone(), + sync_config, + operation_store.clone(), + ) + .await?; - self.inner.runtime.block_on(async move { - inner - .private_key - .set(private_key.clone()) - .expect("network can be run only once"); + self.inner + .set(Arc::new(NodeInner { + runtime, + operation_store, + document_store, + network, + private_key, + })) + .expect("Node can be run only once"); + self.ready_notify.notify_waiters(); - inner - .network - .get_or_init(|| async { - Network::spawn(network_id, private_key, sync_config, operation_store) - .await - .expect("networking backend") - }) - .await; - }); + Ok(()) } pub fn shutdown(&self) { - let network = self.inner.network.get().expect("network running").clone(); - self.inner.runtime.block_on(async move { - network.shutdown().await.expect("network to shutdown"); - }); + if let Some(inner) = self.inner.get() { + let network = inner.network.clone(); + inner.runtime.block_on(async move { + network.shutdown().await.expect("network to shutdown"); + }); + } } - pub fn create_document(&self) -> Result { - let private_key = self.inner.private_key.get().expect("private key"); + pub async fn create_document(&self) -> Result { + let inner = self.inner().await; - let mut operation_store = self.inner.operation_store.clone(); - let operation = self.inner.runtime.block_on(async { + let inner_clone = inner.clone(); + let operation = inner.runtime.block_on(async { create_operation( - &mut operation_store, - private_key, + &mut inner_clone.operation_store.clone(), + &inner_clone.private_key, LogType::Snapshot, None, None, @@ -121,40 +131,29 @@ impl Node { document_id: DocumentId, document: T, ) -> Result<()> { - let private_key = self.inner.private_key.get().expect("private key").clone(); let document = Arc::new(document); + let inner = self.inner().await; // Add ourselves as an author to the document store. - self.inner + inner .document_store - .add_author(document_id, private_key.public_key()) + .add_author(document_id, inner.private_key.public_key()) .await?; - let inner_clone = self.inner.clone(); - let (document_tx, mut document_rx, mut system_event) = self - .inner + let inner_clone = inner.clone(); + let (document_tx, mut document_rx, mut system_event) = inner .runtime - .spawn(async move { - let network = inner_clone - .network - // Allow concurrent calls by awaiting network instance as it might be still - // in process of initialisation. - .get_or_init(|| async { - unreachable!("network was initialised in `run` method"); - }) - .await; - network.subscribe(document_id).await - }) + .spawn(async move { inner_clone.network.subscribe(document_id).await }) .await .unwrap()?; - self.inner + inner .document_store .set_subscription_for_document(document_id, document_tx) .await; - let inner = self.inner.clone(); + let inner_clone = inner.clone(); let document_clone = document.clone(); - self.inner.runtime.spawn(async move { + inner.runtime.spawn(async move { // Process the operations and forward application messages to app layer. This is where // we "materialize" our application state from incoming "application events". while let Some(operation) = document_rx.recv().await { @@ -169,7 +168,7 @@ impl Node { } // When we discover a new author we need to add them to our document store. - inner + inner_clone .document_store .add_author(document_id, operation.header.public_key) .await @@ -182,7 +181,7 @@ impl Node { } }); - self.inner.runtime.spawn(async move { + inner.runtime.spawn(async move { while let Ok(system_event) = system_event.recv().await { match system_event { SystemEvent::GossipJoined { topic_id, peers } @@ -213,30 +212,35 @@ impl Node { /// This should be used to inform all subscribed peers about small changes to the text /// document (Delta-Based CRDT). pub async fn delta(&self, document_id: DocumentId, bytes: Vec) -> Result<()> { - let private_key = self.inner.private_key.get().expect("private key"); - - // Append one operation to our "ephemeral" delta log. - let operation = create_operation( - &mut self.inner.operation_store.clone(), - &private_key, - LogType::Delta, - Some(document_id), - Some(&bytes), - false, - ) - .await?; - - let document_tx = self - .inner - .document_store - .subscription_for_document(document_id) - .await - .expect("Not subscribed to document"); + let inner = self.inner().await; - // Broadcast operation on gossip overlay. - document_tx.send(operation).await?; + let inner_clone = inner.clone(); + inner + .runtime + .spawn(async move { + let mut operation_store = inner_clone.operation_store.clone(); + // Append one operation to our "ephemeral" delta log. + let operation = create_operation( + &mut operation_store, + &inner_clone.private_key, + LogType::Delta, + Some(document_id), + Some(&bytes), + false, + ) + .await?; + + let document_tx = inner_clone + .document_store + .subscription_for_document(document_id) + .await + .expect("Not subscribed to document"); - Ok(()) + // Broadcast operation on gossip overlay. + document_tx.send(operation).await?; + Ok(()) + }) + .await? } /// Same as [`Self::Delta`] next to persisting a whole snapshot and pruning. @@ -253,50 +257,57 @@ impl Node { delta_bytes: Vec, snapshot_bytes: Vec, ) -> Result<()> { - let private_key = self.inner.private_key.get().expect("private key"); - - // Append an operation to our "snapshot" log and set the prune flag to - // true. This will remove previous snapshots. - // - // Snapshots are not broadcasted on the gossip overlay as they would be - // too large. Peers will sync them up when they join the document. - create_operation( - &mut self.inner.operation_store.clone(), - &private_key, - LogType::Snapshot, - Some(document_id), - Some(&snapshot_bytes), - true, - ) - .await?; + let inner = self.inner().await; - // Append an operation to our "ephemeral" delta log and set the prune - // flag to true. - // - // This signals removing all previous "delta" operations now. This is - // some sort of garbage collection whenever we snapshot. Snapshots - // already contain all history, there is no need to keep duplicate - // "delta" data around. - let operation = create_operation( - &mut self.inner.operation_store.clone(), - &private_key, - LogType::Delta, - Some(document_id.into()), - Some(&delta_bytes), - true, - ) - .await?; - - let document_tx = self - .inner - .document_store - .subscription_for_document(document_id) - .await - .expect("Not subscribed to document"); + let inner_clone = inner.clone(); + inner + .runtime + .spawn(async move { + let mut operation_store = inner_clone.operation_store.clone(); + + // Append an operation to our "snapshot" log and set the prune flag to + // true. This will remove previous snapshots. + // + // Snapshots are not broadcasted on the gossip overlay as they would be + // too large. Peers will sync them up when they join the document. + create_operation( + &mut operation_store, + &inner_clone.private_key, + LogType::Snapshot, + Some(document_id), + Some(&snapshot_bytes), + true, + ) + .await?; + + // Append an operation to our "ephemeral" delta log and set the prune + // flag to true. + // + // This signals removing all previous "delta" operations now. This is + // some sort of garbage collection whenever we snapshot. Snapshots + // already contain all history, there is no need to keep duplicate + // "delta" data around. + let operation = create_operation( + &mut operation_store, + &inner_clone.private_key, + LogType::Delta, + Some(document_id.into()), + Some(&delta_bytes), + true, + ) + .await?; + + let document_tx = inner_clone + .document_store + .subscription_for_document(document_id) + .await + .expect("Not subscribed to document"); - // Broadcast operation on gossip overlay. - document_tx.send(operation).await?; + // Broadcast operation on gossip overlay. + document_tx.send(operation).await?; - Ok(()) + Ok(()) + }) + .await? } } From 0a98c39a472563bfaa36241e1cb95fc2b188659a Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Sun, 6 Apr 2025 18:14:37 +0200 Subject: [PATCH 02/12] node: Use sqlite for OperationStore --- Cargo.lock | 406 ++++++++++++++++++++++++++++++++++++ aardvark-doc/src/service.rs | 2 +- aardvark-node/Cargo.toml | 3 +- aardvark-node/src/node.rs | 35 +++- aardvark-node/src/store.rs | 4 +- 5 files changed, 444 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 938ce6e6..d299b0f5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -48,6 +48,7 @@ dependencies = [ "p2panda-stream", "p2panda-sync", "serde", + "sqlx", "tokio", "tokio-stream", "tracing", @@ -373,6 +374,15 @@ dependencies = [ "syn 2.0.100", ] +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + [[package]] name = "atomic-polyfill" version = "1.0.3" @@ -468,6 +478,9 @@ name = "bitflags" version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +dependencies = [ + "serde", +] [[package]] name = "bitmaps" @@ -789,6 +802,15 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.21" @@ -922,6 +944,7 @@ checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" dependencies = [ "const-oid", "der_derive", + "pem-rfc7468", "zeroize", ] @@ -1010,6 +1033,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", + "const-oid", "crypto-common", "subtle", ] @@ -1045,6 +1069,12 @@ dependencies = [ "litrs", ] +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + [[package]] name = "ed25519" version = "2.2.3" @@ -1076,6 +1106,9 @@ name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] [[package]] name = "embedded-io" @@ -1189,6 +1222,17 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + [[package]] name = "event-listener" version = "5.4.0" @@ -1347,6 +1391,17 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + [[package]] name = "futures-io" version = "0.3.31" @@ -1836,6 +1891,15 @@ dependencies = [ "foldhash", ] +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.2", +] + [[package]] name = "heapless" version = "0.7.17" @@ -1957,6 +2021,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + [[package]] name = "hmac" version = "0.12.1" @@ -1982,6 +2055,15 @@ version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a8575493d277c9092b988c780c94737fb9fd8651a1001e16bee3eccfc1baedb" +[[package]] +name = "home" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "hostname" version = "0.4.0" @@ -2691,6 +2773,9 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] [[package]] name = "leb128" @@ -2735,6 +2820,23 @@ version = "0.2.171" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" +[[package]] +name = "libm" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" + +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + [[package]] name = "linux-raw-sys" version = "0.4.15" @@ -2982,6 +3084,16 @@ dependencies = [ "regex-automata 0.1.10", ] +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + [[package]] name = "md5" version = "0.7.0" @@ -3336,6 +3448,23 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + [[package]] name = "num-complex" version = "0.4.6" @@ -3389,6 +3518,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -3560,7 +3690,10 @@ name = "p2panda-store" version = "0.3.0" source = "git+https://github.com/p2panda/p2panda?rev=f3a016324b69beac45cf20a792fe6890cb1a21e3#f3a016324b69beac45cf20a792fe6890cb1a21e3" dependencies = [ + "ciborium", + "hex", "p2panda-core", + "sqlx", "thiserror 2.0.12", "trait-variant", ] @@ -3664,6 +3797,15 @@ dependencies = [ "serde", ] +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + [[package]] name = "percent-encoding" version = "2.3.1" @@ -3782,6 +3924,17 @@ dependencies = [ "z32", ] +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + [[package]] name = "pkcs8" version = "0.10.2" @@ -4295,6 +4448,26 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rsa" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78928ac1ed176a5ca1d17e578a1825f3d81ca54cf41053a592584b020cfd691b" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", + "subtle", + "zeroize", +] + [[package]] name = "rtnetlink" version = "0.13.1" @@ -4731,6 +4904,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ + "digest", "rand_core 0.6.4", ] @@ -4835,6 +5009,193 @@ dependencies = [ "der", ] +[[package]] +name = "sqlx" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4410e73b3c0d8442c5f99b425d7a435b5ee0ae4167b3196771dd3f7a01be745f" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a007b6936676aa9ab40207cde35daab0a04b823be8ae004368c0793b96a61e0" +dependencies = [ + "bytes", + "crc", + "crossbeam-queue", + "either", + "event-listener", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashbrown 0.15.2", + "hashlink", + "indexmap", + "log", + "memchr", + "once_cell", + "percent-encoding", + "serde", + "serde_json", + "sha2", + "smallvec", + "thiserror 2.0.12", + "tokio", + "tokio-stream", + "tracing", + "url", +] + +[[package]] +name = "sqlx-macros" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3112e2ad78643fef903618d78cf0aec1cb3134b019730edb039b69eaf531f310" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 2.0.100", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e9f90acc5ab146a99bf5061a7eb4976b573f560bc898ef3bf8435448dd5e7ad" +dependencies = [ + "dotenvy", + "either", + "heck 0.5.0", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn 2.0.100", + "tempfile", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4560278f0e00ce64938540546f59f590d60beee33fffbd3b9cd47851e5fff233" +dependencies = [ + "atoi", + "base64", + "bitflags 2.9.0", + "byteorder", + "bytes", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "serde", + "sha1", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.12", + "tracing", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5b98a57f363ed6764d5b3a12bfedf62f07aa16e1856a7ddc2a0bb190a959613" +dependencies = [ + "atoi", + "base64", + "bitflags 2.9.0", + "byteorder", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "rand 0.8.5", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.12", + "tracing", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f85ca71d3a5b24e64e1d08dd8fe36c6c95c339a896cc33068148906784620540" +dependencies = [ + "atoi", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "tracing", + "url", +] + [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -4847,6 +5208,17 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + [[package]] name = "strsim" version = "0.11.1" @@ -5460,6 +5832,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + [[package]] name = "unicode-ident" version = "1.0.18" @@ -5475,6 +5853,12 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + [[package]] name = "unicode-xid" version = "0.2.6" @@ -5557,6 +5941,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + [[package]] name = "version-compare" version = "0.2.0" @@ -5603,6 +5993,12 @@ dependencies = [ "wit-bindgen-rt", ] +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + [[package]] name = "wasm-bindgen" version = "0.2.100" @@ -5725,6 +6121,16 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "whoami" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" +dependencies = [ + "redox_syscall", + "wasite", +] + [[package]] name = "widestring" version = "1.1.0" diff --git a/aardvark-doc/src/service.rs b/aardvark-doc/src/service.rs index d9d986d0..f9f28c37 100644 --- a/aardvark-doc/src/service.rs +++ b/aardvark-doc/src/service.rs @@ -39,7 +39,7 @@ impl Service { if let Err(error) = self .imp() .node - .run(private_key, Hash::new(network_id)) + .run(private_key, Hash::new(network_id), None) .await { error!("Running node failed: {error}"); diff --git a/aardvark-node/Cargo.toml b/aardvark-node/Cargo.toml index 63f3e5e3..4c11e7d3 100644 --- a/aardvark-node/Cargo.toml +++ b/aardvark-node/Cargo.toml @@ -15,10 +15,11 @@ ciborium = "0.2.2" p2panda-core = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3" } p2panda-discovery = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3", features = ["mdns"] } p2panda-net = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3" } -p2panda-store = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3" } +p2panda-store = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3", features = ["sqlite"] } p2panda-stream = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3" } p2panda-sync = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3", features = ["log-sync"] } serde = { version = "1.0.215", features = ["derive"] } +sqlx = { version = "0.8.3", features = ["runtime-tokio", "sqlite"], default-features = false} tokio = { version = "1.42.0", features = ["rt", "sync"] } tokio-stream = "0.1.17" tracing = "0.1" diff --git a/aardvark-node/src/node.rs b/aardvark-node/src/node.rs index 5a1e918b..afe4b9cb 100644 --- a/aardvark-node/src/node.rs +++ b/aardvark-node/src/node.rs @@ -1,9 +1,12 @@ +use std::path::Path; use std::sync::{Arc, OnceLock}; use anyhow::Result; use p2panda_core::{Hash, PrivateKey}; use p2panda_net::{SyncConfiguration, SystemEvent, TopicId}; +use p2panda_store::sqlite::store::run_pending_migrations; use p2panda_sync::log_sync::LogSyncProtocol; +use sqlx::sqlite; use tokio::runtime::{Builder, Runtime}; use tokio::sync::Notify; use tracing::warn; @@ -53,7 +56,12 @@ impl Node { } } - pub async fn run(&self, private_key: PrivateKey, network_id: Hash) -> Result<()> { + pub async fn run( + &self, + private_key: PrivateKey, + network_id: Hash, + db_location: Option<&Path>, + ) -> Result<()> { // FIXME: Stores are currently in-memory and do not persist data on the file-system. // Related issue: https://github.com/p2panda/aardvark/issues/3 let runtime = Builder::new_multi_thread() @@ -63,7 +71,30 @@ impl Node { let _guard = runtime.enter(); - let operation_store = OperationStore::new(); + let connection_options = sqlx::sqlite::SqliteConnectOptions::new() + .shared_cache(true) + .create_if_missing(true); + let connection_options = if let Some(db_location) = db_location { + connection_options.filename(db_location) + } else { + connection_options.in_memory(true) + }; + + let pool = if db_location.is_some() { + sqlx::sqlite::SqlitePool::connect_with(connection_options).await? + } else { + // FIXME: we need to set max connection to 1 for in memory sqlite DB. + // Probably has to do something with this issue: https://github.com/launchbadge/sqlx/issues/2510 + let pool_options = sqlite::SqlitePoolOptions::new().max_connections(1); + pool_options.connect_with(connection_options).await? + }; + + // FIXME: Migrate the OperationStore Sqlite DB, I think p2panda-store should call this internally + run_pending_migrations(&pool).await?; + // TODO: Do migration of our DB + // sqlx::migrate!().run(&pool).await?; + + let operation_store = OperationStore::new(pool); let document_store = DocumentStore::new(); let sync_config = { diff --git a/aardvark-node/src/store.rs b/aardvark-node/src/store.rs index df867eed..6fdcbea1 100644 --- a/aardvark-node/src/store.rs +++ b/aardvark-node/src/store.rs @@ -6,7 +6,7 @@ use tokio::sync::mpsc; use anyhow::Result; use async_trait::async_trait; use p2panda_core::{Operation, PublicKey}; -use p2panda_store::MemoryStore; +use p2panda_store::SqliteStore; use p2panda_sync::log_sync::TopicLogMap; use serde::{Deserialize, Serialize}; use tokio::sync::RwLock; @@ -101,4 +101,4 @@ impl TopicLogMap for DocumentStore { } } -pub type OperationStore = MemoryStore; +pub type OperationStore = SqliteStore; From 033423acd5fad5b743ff1d7a1ae092211994f04c Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Sun, 6 Apr 2025 23:35:02 +0200 Subject: [PATCH 03/12] node: Move document_tx hashmap from DocumentStore to Network --- aardvark-node/src/network.rs | 94 ++++++++++++++------- aardvark-node/src/node.rs | 156 +++++++++++++++++++---------------- aardvark-node/src/store.rs | 22 +---- 3 files changed, 147 insertions(+), 125 deletions(-) diff --git a/aardvark-node/src/network.rs b/aardvark-node/src/network.rs index 9779f2bb..d2359580 100644 --- a/aardvark-node/src/network.rs +++ b/aardvark-node/src/network.rs @@ -7,16 +7,18 @@ use p2panda_discovery::mdns::LocalDiscovery; use p2panda_net::config::GossipConfig; use p2panda_net::{FromNetwork, NetworkBuilder, SyncConfiguration, SystemEvent, ToNetwork}; use p2panda_stream::{DecodeExt, IngestExt}; -use tokio::sync::{broadcast, mpsc}; -use tokio::task::JoinHandle; +use std::collections::HashMap; +use tokio::sync::RwLock; +use tokio::sync::mpsc; use tokio_stream::StreamExt; use tokio_stream::wrappers::ReceiverStream; use tracing::error; -#[derive(Clone, Debug)] +#[derive(Debug)] pub struct Network { operation_store: OperationStore, network: p2panda_net::Network, + document_tx: RwLock>>, } impl Network { @@ -43,29 +45,32 @@ impl Network { Ok(Self { operation_store, network, + document_tx: RwLock::new(HashMap::new()), }) } - pub async fn shutdown(self) -> Result<()> { - self.network.shutdown().await?; + pub async fn shutdown(&self) -> Result<()> { + self.network.clone().shutdown().await?; Ok(()) } - pub async fn subscribe( + pub async fn subscribe( &self, document: DocumentId, - ) -> Result<( - mpsc::Sender>, - mpsc::Receiver>, - broadcast::Receiver>, - )> { - let (to_network, mut from_app) = mpsc::channel::>(128); - let (to_app, from_network) = mpsc::channel(128); - + f: impl Fn(Operation) -> Fut + Send + 'static, + ) -> Result<()> + where + Fut: Future + Send, + { // Join a gossip overlay with peers who are interested in the same document and start sync // with them. let (document_tx, document_rx, _gossip_ready) = self.network.subscribe(document).await?; + { + let mut store = self.document_tx.write().await; + store.insert(document.clone(), document_tx); + } + let stream = ReceiverStream::new(document_rx); // Incoming gossip payloads have a slightly different shape than sync. We convert them @@ -113,30 +118,57 @@ impl Network { }); // Send checked and ingested operations for this document to application layer. - let _result: JoinHandle> = tokio::task::spawn(async move { + tokio::task::spawn(async move { while let Some(operation) = stream.next().await { - to_app.send(operation).await?; + f(operation).await; } - Ok(()) }); - // Receive operations from application layer and forward them into gossip overlay for this - // document. - let _result: JoinHandle> = tokio::task::spawn(async move { - while let Some(operation) = from_app.recv().await { - let encoded_gossip_operation = - encode_gossip_operation(operation.header, operation.body)?; - document_tx - .send(ToNetwork::Message { - bytes: encoded_gossip_operation, - }) - .await?; + Ok(()) + } + + pub async fn subscribe_events( + &self, + f: impl Fn(SystemEvent) -> Fut + Send + 'static, + ) -> Result<()> + where + Fut: Future + Send, + { + let mut events = self.network.events().await?; + + tokio::task::spawn(async move { + while let Ok(event) = events.recv().await { + f(event).await; } - Ok(()) }); - let events = self.network.events().await?; + Ok(()) + } + + /// Send operations to the gossip overlay for `document`. + /// + /// This will panic if the `document` wasn't subscribed to. + pub async fn send_operation( + &self, + document: &DocumentId, + operation: Operation, + ) -> Result<()> { + let document_tx = { + self.document_tx + .read() + .await + .get(document) + .cloned() + .expect("Not subscribed to document with id {document_id}") + }; - Ok((to_network, from_network, events)) + let encoded_gossip_operation = encode_gossip_operation(operation.header, operation.body)?; + document_tx + .send(ToNetwork::Message { + bytes: encoded_gossip_operation, + }) + .await?; + + Ok(()) } } diff --git a/aardvark-node/src/node.rs b/aardvark-node/src/node.rs index afe4b9cb..9dc73020 100644 --- a/aardvark-node/src/node.rs +++ b/aardvark-node/src/node.rs @@ -126,9 +126,13 @@ impl Node { pub fn shutdown(&self) { if let Some(inner) = self.inner.get() { - let network = inner.network.clone(); + let inner_clone = inner.clone(); inner.runtime.block_on(async move { - network.shutdown().await.expect("network to shutdown"); + inner_clone + .network + .shutdown() + .await + .expect("network to shutdown"); }); } } @@ -172,68 +176,79 @@ impl Node { .await?; let inner_clone = inner.clone(); - let (document_tx, mut document_rx, mut system_event) = inner - .runtime - .spawn(async move { inner_clone.network.subscribe(document_id).await }) - .await - .unwrap()?; + let document_clone = document.clone(); inner - .document_store - .set_subscription_for_document(document_id, document_tx) - .await; + .runtime + .spawn(async move { + let inner_clone2 = inner_clone.clone(); + inner_clone2 + .network + .subscribe(document_id, move |operation| { + let inner_clone = inner_clone.clone(); + let document_clone = document_clone.clone(); + async move { + // Process the operations and forward application messages to app layer. This is where + // we "materialize" our application state from incoming "application events". + // Validation for our custom "document" extension. + if let Err(err) = validate_operation(&operation, &document_id) { + warn!( + public_key = %operation.header.public_key, + seq_num = %operation.header.seq_num, + "{err}" + ); + return; + } + + // When we discover a new author we need to add them to our document store. + inner_clone + .document_store + .add_author(document_id, operation.header.public_key) + .await + .expect("Unable to add author to DocumentStore"); + + // Forward the payload up to the app. + if let Some(body) = operation.body { + document_clone + .bytes_received(operation.header.public_key, &body.to_bytes()); + } + } + }) + .await + }) + .await??; let inner_clone = inner.clone(); - let document_clone = document.clone(); - inner.runtime.spawn(async move { - // Process the operations and forward application messages to app layer. This is where - // we "materialize" our application state from incoming "application events". - while let Some(operation) = document_rx.recv().await { - // Validation for our custom "document" extension. - if let Err(err) = validate_operation(&operation, &document_id) { - warn!( - public_key = %operation.header.public_key, - seq_num = %operation.header.seq_num, - "{err}" - ); - continue; - } - - // When we discover a new author we need to add them to our document store. + inner + .runtime + .spawn(async move { inner_clone - .document_store - .add_author(document_id, operation.header.public_key) + .network + .subscribe_events(move |system_event| { + let document = document.clone(); + async move { + match system_event { + SystemEvent::GossipJoined { topic_id, peers } + if topic_id == document_id.id() => + { + document.authors_joined(peers); + } + SystemEvent::GossipNeighborUp { topic_id, peer } + if topic_id == document_id.id() => + { + document.author_set_online(peer, true); + } + SystemEvent::GossipNeighborDown { topic_id, peer } + if topic_id == document_id.id() => + { + document.author_set_online(peer, false); + } + _ => {} + }; + } + }) .await - .expect("Unable to add author to DocumentStore"); - - // Forward the payload up to the app. - if let Some(body) = operation.body { - document_clone.bytes_received(operation.header.public_key, &body.to_bytes()); - } - } - }); - - inner.runtime.spawn(async move { - while let Ok(system_event) = system_event.recv().await { - match system_event { - SystemEvent::GossipJoined { topic_id, peers } - if topic_id == document_id.id() => - { - document.authors_joined(peers); - } - SystemEvent::GossipNeighborUp { topic_id, peer } - if topic_id == document_id.id() => - { - document.author_set_online(peer, true); - } - SystemEvent::GossipNeighborDown { topic_id, peer } - if topic_id == document_id.id() => - { - document.author_set_online(peer, false); - } - _ => {} - }; - } - }); + }) + .await??; Ok(()) } @@ -261,14 +276,12 @@ impl Node { ) .await?; - let document_tx = inner_clone - .document_store - .subscription_for_document(document_id) - .await - .expect("Not subscribed to document"); - // Broadcast operation on gossip overlay. - document_tx.send(operation).await?; + inner_clone + .network + .send_operation(&document_id, operation) + .await?; + Ok(()) }) .await? @@ -328,14 +341,11 @@ impl Node { ) .await?; - let document_tx = inner_clone - .document_store - .subscription_for_document(document_id) - .await - .expect("Not subscribed to document"); - // Broadcast operation on gossip overlay. - document_tx.send(operation).await?; + inner_clone + .network + .send_operation(&document_id, operation) + .await?; Ok(()) }) diff --git a/aardvark-node/src/store.rs b/aardvark-node/src/store.rs index 6fdcbea1..10069c59 100644 --- a/aardvark-node/src/store.rs +++ b/aardvark-node/src/store.rs @@ -1,11 +1,10 @@ use std::collections::{HashMap, HashSet}; use std::hash::Hash as StdHash; use std::sync::Arc; -use tokio::sync::mpsc; use anyhow::Result; use async_trait::async_trait; -use p2panda_core::{Operation, PublicKey}; +use p2panda_core::PublicKey; use p2panda_store::SqliteStore; use p2panda_sync::log_sync::TopicLogMap; use serde::{Deserialize, Serialize}; @@ -22,7 +21,6 @@ pub struct DocumentStore { #[derive(Debug)] struct DocumentStoreInner { authors: HashMap>, - document_tx: HashMap>>, } impl DocumentStore { @@ -30,28 +28,10 @@ impl DocumentStore { Self { inner: Arc::new(RwLock::new(DocumentStoreInner { authors: HashMap::new(), - document_tx: HashMap::new(), })), } } - pub async fn set_subscription_for_document( - &self, - document_id: DocumentId, - tx: mpsc::Sender>, - ) { - let mut store = self.inner.write().await; - store.document_tx.insert(document_id, tx); - } - - pub async fn subscription_for_document( - &self, - document_id: DocumentId, - ) -> Option>> { - let store = self.inner.read().await; - store.document_tx.get(&document_id).cloned() - } - pub async fn add_author(&self, document: DocumentId, public_key: PublicKey) -> Result<()> { let mut store = self.inner.write().await; store From 5b0da00ba582e3b62b73e66e51154a5ba3a65907 Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Tue, 15 Apr 2025 16:16:07 +0200 Subject: [PATCH 04/12] node: Add unsubscribe for documents This probably doesn't fully unsubscribe since p2panda doesn't really implement it, but at least we don't store any updates to the OperationStore. See for more details: https://github.com/p2panda/p2panda/issues/639 --- aardvark-node/src/document.rs | 6 +++ aardvark-node/src/network.rs | 6 +++ aardvark-node/src/node.rs | 76 +++++++++++++++++++---------------- 3 files changed, 54 insertions(+), 34 deletions(-) diff --git a/aardvark-node/src/document.rs b/aardvark-node/src/document.rs index 689422db..f96c4ac5 100644 --- a/aardvark-node/src/document.rs +++ b/aardvark-node/src/document.rs @@ -18,6 +18,12 @@ impl TopicId for DocumentId { } } +impl From<[u8; 32]> for DocumentId { + fn from(bytes: [u8; 32]) -> Self { + Self(Hash::from_bytes(bytes)) + } +} + impl From for DocumentId { fn from(document_id: Hash) -> Self { Self(document_id) diff --git a/aardvark-node/src/network.rs b/aardvark-node/src/network.rs index d2359580..3d5f8769 100644 --- a/aardvark-node/src/network.rs +++ b/aardvark-node/src/network.rs @@ -127,6 +127,12 @@ impl Network { Ok(()) } + pub async fn unsubscribe(&self, document_id: &DocumentId) -> Result<()> { + self.document_tx.write().await.remove(document_id); + + Ok(()) + } + pub async fn subscribe_events( &self, f: impl Fn(SystemEvent) -> Fut + Send + 'static, diff --git a/aardvark-node/src/node.rs b/aardvark-node/src/node.rs index 9dc73020..3c079c60 100644 --- a/aardvark-node/src/node.rs +++ b/aardvark-node/src/node.rs @@ -1,14 +1,16 @@ +use std::collections::HashMap; use std::path::Path; use std::sync::{Arc, OnceLock}; use anyhow::Result; use p2panda_core::{Hash, PrivateKey}; -use p2panda_net::{SyncConfiguration, SystemEvent, TopicId}; +use p2panda_net::{SyncConfiguration, SystemEvent}; use p2panda_store::sqlite::store::run_pending_migrations; use p2panda_sync::log_sync::LogSyncProtocol; use sqlx::sqlite; use tokio::runtime::{Builder, Runtime}; use tokio::sync::Notify; +use tokio::sync::RwLock; use tracing::warn; use crate::document::{DocumentId, SubscribableDocument}; @@ -16,10 +18,10 @@ use crate::network::Network; use crate::operation::{LogType, create_operation, validate_operation}; use crate::store::{DocumentStore, OperationStore}; -#[derive(Clone)] pub struct Node { inner: OnceLock>, ready_notify: Arc, + documents: Arc>>>, } impl Default for Node { @@ -42,6 +44,7 @@ impl Node { Self { inner: OnceLock::new(), ready_notify: Arc::new(Notify::new()), + documents: Arc::new(RwLock::new(HashMap::new())), } } @@ -110,6 +113,33 @@ impl Node { ) .await?; + let documents = self.documents.clone(); + network + .subscribe_events(move |system_event| { + let documents = documents.clone(); + async move { + match system_event { + SystemEvent::GossipJoined { topic_id, peers } => { + if let Some(document) = documents.read().await.get(&topic_id.into()) { + document.authors_joined(peers); + } + } + SystemEvent::GossipNeighborUp { topic_id, peer } => { + if let Some(document) = documents.read().await.get(&topic_id.into()) { + document.author_set_online(peer, true); + } + } + SystemEvent::GossipNeighborDown { topic_id, peer } => { + if let Some(document) = documents.read().await.get(&topic_id.into()) { + document.author_set_online(peer, false); + } + } + _ => {} + }; + } + }) + .await?; + self.inner .set(Arc::new(NodeInner { runtime, @@ -217,38 +247,16 @@ impl Node { }) .await??; - let inner_clone = inner.clone(); - inner - .runtime - .spawn(async move { - inner_clone - .network - .subscribe_events(move |system_event| { - let document = document.clone(); - async move { - match system_event { - SystemEvent::GossipJoined { topic_id, peers } - if topic_id == document_id.id() => - { - document.authors_joined(peers); - } - SystemEvent::GossipNeighborUp { topic_id, peer } - if topic_id == document_id.id() => - { - document.author_set_online(peer, true); - } - SystemEvent::GossipNeighborDown { topic_id, peer } - if topic_id == document_id.id() => - { - document.author_set_online(peer, false); - } - _ => {} - }; - } - }) - .await - }) - .await??; + self.documents.write().await.insert(document_id, document); + + Ok(()) + } + + pub async fn unsubscribe(&self, document_id: &DocumentId) -> Result<()> { + let inner = self.inner().await; + + inner.network.unsubscribe(document_id).await?; + self.documents.write().await.remove(document_id); Ok(()) } From 8a6e371d5528755f645438a22a330945e5681563 Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Tue, 15 Apr 2025 16:24:42 +0200 Subject: [PATCH 05/12] doc: Add document properties needed for document list This includes: - subscribe/unsubscribe from p2panda network - document name - last accessed --- aardvark-doc/src/document.rs | 91 ++++++++++++++++++++++++++++++------ 1 file changed, 76 insertions(+), 15 deletions(-) diff --git a/aardvark-doc/src/document.rs b/aardvark-doc/src/document.rs index b2503c7f..893b4a8b 100644 --- a/aardvark-doc/src/document.rs +++ b/aardvark-doc/src/document.rs @@ -1,7 +1,9 @@ use std::cell::{Cell, OnceCell}; +use std::cmp::min; use std::fmt; use std::str::FromStr; use std::sync::Arc; +use std::sync::Mutex; use std::sync::OnceLock; use aardvark_node::document::{DocumentId as DocumentIdNode, SubscribableDocument}; @@ -39,18 +41,22 @@ mod imp { /// /// Loro documents can contain multiple different CRDT types in one document. const TEXT_CONTAINER_ID: &str = "document"; + const DOCUMENT_NAME_LENGTH: usize = 32; use super::*; #[derive(Properties, Default)] #[properties(wrapper_type = super::Document)] pub struct Document { + #[property(name = "name", get = Self::name, type = Option)] + #[property(get)] + last_accessed: Mutex>, #[property(name = "text", get = Self::text, type = String)] crdt_doc: OnceCell, #[property(get, construct_only, set = Self::set_id)] id: OnceCell, - #[property(get, set)] - ready: Cell, + #[property(get, set = Self::set_subscribed)] + subscribed: Cell, #[property(get, construct_only)] service: OnceCell, #[property(get)] @@ -72,6 +78,21 @@ mod imp { .to_string() } + pub fn name(&self) -> Option { + let crdt_text = self + .crdt_doc + .get() + .expect("crdt_doc to be set") + .get_text(TEXT_CONTAINER_ID); + if crdt_text.is_empty() { + return None; + } + + crdt_text + .slice(0, min(DOCUMENT_NAME_LENGTH, crdt_text.len_unicode())) + .ok() + } + fn set_id(&self, id: Option) { if let Some(id) = id { self.id.set(id).expect("Document id can only be set once"); @@ -104,7 +125,55 @@ mod imp { } } + pub fn set_subscribed(&self, subscribed: bool) { + if self.obj().subscribed() == subscribed { + return; + } + + self.subscribed.set(subscribed); + + if subscribed { + *self.last_accessed.lock().unwrap() = None; + + let obj = self.obj(); + glib::spawn_future(clone!( + #[weak] + obj, + async move { + let document_id = obj.id().0; + let handle = DocumentHandle(obj.downgrade()); + if let Err(error) = + obj.service().node().subscribe(document_id, handle).await + { + error!("Failed to subscribe to document: {}", error); + obj.imp().set_subscribed(false); + } + } + )); + } else { + *self.last_accessed.lock().unwrap() = glib::DateTime::now_local().ok(); + + let obj = self.obj(); + glib::spawn_future(clone!( + #[weak] + obj, + async move { + let document_id = obj.id().0; + if let Err(error) = obj.service().node().unsubscribe(&document_id).await { + error!("Failed to unsubscribe document {}: {}", document_id, error); + } + } + )); + } + self.obj().notify_last_accessed(); + self.obj().notify_subscribed(); + } + fn emit_text_inserted(&self, pos: i32, text: String) { + if pos <= DOCUMENT_NAME_LENGTH as i32 { + self.obj().notify_name(); + } + // Emit the signal on the main thread let obj = self.obj(); glib::source::idle_add_full( @@ -116,6 +185,7 @@ mod imp { glib::ControlFlow::Break, move || { obj.emit_by_name::<()>("text-inserted", &[&pos, &text]); + obj.notify_name(); glib::ControlFlow::Break } ), @@ -123,6 +193,10 @@ mod imp { } fn emit_range_deleted(&self, start: i32, end: i32) { + if start <= DOCUMENT_NAME_LENGTH as i32 || end <= DOCUMENT_NAME_LENGTH as i32 { + self.obj().notify_name(); + } + // Emit the signal on the main thread let obj = self.obj(); glib::source::idle_add_full( @@ -288,19 +362,6 @@ mod imp { self.setup_loro_document(); - let obj = self.obj(); - glib::spawn_future(clone!( - #[weak] - obj, - async move { - let document_id = obj.id().0; - let handle = DocumentHandle(obj.downgrade()); - if let Err(error) = obj.service().node().subscribe(document_id, handle).await { - error!("Failed to subscribe to document: {}", error); - } - } - )); - // Add ourself to the list of authors self.authors .add_this_device(self.obj().service().public_key()); From 698311e2b2daf4520258f19856702ffe1fb7d915 Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Tue, 15 Apr 2025 16:28:29 +0200 Subject: [PATCH 06/12] doc: Add document list holding all documents known --- aardvark-doc/src/document.rs | 2 + aardvark-doc/src/documents.rs | 93 +++++++++++++++++++++++++++++++++++ aardvark-doc/src/lib.rs | 1 + aardvark-doc/src/service.rs | 9 +++- 4 files changed, 104 insertions(+), 1 deletion(-) create mode 100644 aardvark-doc/src/documents.rs diff --git a/aardvark-doc/src/document.rs b/aardvark-doc/src/document.rs index 893b4a8b..ea9669a0 100644 --- a/aardvark-doc/src/document.rs +++ b/aardvark-doc/src/document.rs @@ -365,6 +365,8 @@ mod imp { // Add ourself to the list of authors self.authors .add_this_device(self.obj().service().public_key()); + + self.obj().service().documents().add(self.obj().clone()); } } } diff --git a/aardvark-doc/src/documents.rs b/aardvark-doc/src/documents.rs new file mode 100644 index 00000000..cc365c84 --- /dev/null +++ b/aardvark-doc/src/documents.rs @@ -0,0 +1,93 @@ +use std::sync::Mutex; + +use gio::prelude::*; +use gio::subclass::prelude::ListModelImpl; +use glib::{clone, subclass::prelude::*}; + +use crate::document::{Document, DocumentId}; + +mod imp { + use super::*; + + #[derive(Default)] + pub struct Documents { + pub(super) list: Mutex>, + } + + #[glib::object_subclass] + impl ObjectSubclass for Documents { + const NAME: &'static str = "Documents"; + type Type = super::Documents; + type Interfaces = (gio::ListModel,); + } + + impl ObjectImpl for Documents {} + + impl ListModelImpl for Documents { + fn item_type(&self) -> glib::Type { + Document::static_type() + } + + fn n_items(&self) -> u32 { + self.list.lock().unwrap().len() as u32 + } + + fn item(&self, index: u32) -> Option { + self.list + .lock() + .unwrap() + .get(index as usize) + .cloned() + .map(Cast::upcast) + } + } +} + +glib::wrapper! { + pub struct Documents(ObjectSubclass) + @implements gio::ListModel; +} + +unsafe impl Send for Documents {} +unsafe impl Sync for Documents {} + +impl Default for Documents { + fn default() -> Self { + Self::new() + } +} + +impl Documents { + pub fn new() -> Self { + glib::Object::new() + } + + pub(crate) fn add(&self, document: Document) { + glib::source::idle_add_full( + glib::source::Priority::DEFAULT, + clone!( + #[weak(rename_to = obj)] + self, + #[upgrade_or] + glib::ControlFlow::Break, + move || { + let mut list = obj.imp().list.lock().unwrap(); + let pos = list.len() as u32; + + list.push(document.clone()); + drop(list); + obj.items_changed(pos, 0, 1); + glib::ControlFlow::Break + } + ), + ); + } + + pub fn by_id(&self, document_id: &DocumentId) -> Option { + let list = self.imp().list.lock().unwrap(); + + list.iter() + .find(|document| &document.id() == document_id) + .cloned() + } +} diff --git a/aardvark-doc/src/lib.rs b/aardvark-doc/src/lib.rs index f2d99786..8b97752a 100644 --- a/aardvark-doc/src/lib.rs +++ b/aardvark-doc/src/lib.rs @@ -1,6 +1,7 @@ pub mod author; pub mod authors; pub mod document; +pub mod documents; pub mod service; #[cfg(test)] diff --git a/aardvark-doc/src/service.rs b/aardvark-doc/src/service.rs index f9f28c37..df86c8f7 100644 --- a/aardvark-doc/src/service.rs +++ b/aardvark-doc/src/service.rs @@ -1,18 +1,25 @@ +use glib::Properties; +use glib::object::ObjectExt; use glib::subclass::prelude::*; use p2panda_core::{Hash, PrivateKey, PublicKey}; use tracing::{error, info}; +use crate::documents::Documents; use aardvark_node::Node; mod imp { use super::*; - #[derive(Default)] + #[derive(Properties, Default)] + #[properties(wrapper_type = super::Service)] pub struct Service { pub node: Node, pub private_key: PrivateKey, + #[property(get)] + documents: Documents, } + #[glib::derived_properties] impl ObjectImpl for Service {} #[glib::object_subclass] From 663b07b3b23f137b866cc03ebebd32bcfc7be194 Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Tue, 15 Apr 2025 17:10:35 +0200 Subject: [PATCH 07/12] app: Add document list popover --- .../actions/down-smaller-symbolic.svg | 2 + .../actions/join-document-symbolic.svg | 2 + .../data/resources/resources.gresource.xml | 2 + aardvark-app/src/main.rs | 2 + aardvark-app/src/open_popover/mod.rs | 361 ++++++++++++++++++ aardvark-app/src/open_popover/open_popover.ui | 69 ++++ aardvark-app/src/style.css | 17 + aardvark-app/src/ui-resources.gresource.xml | 1 + aardvark-app/src/window.rs | 124 +++--- aardvark-app/src/window.ui | 25 +- 10 files changed, 543 insertions(+), 62 deletions(-) create mode 100644 aardvark-app/data/resources/icons/scalable/actions/down-smaller-symbolic.svg create mode 100644 aardvark-app/data/resources/icons/scalable/actions/join-document-symbolic.svg create mode 100644 aardvark-app/src/open_popover/mod.rs create mode 100644 aardvark-app/src/open_popover/open_popover.ui diff --git a/aardvark-app/data/resources/icons/scalable/actions/down-smaller-symbolic.svg b/aardvark-app/data/resources/icons/scalable/actions/down-smaller-symbolic.svg new file mode 100644 index 00000000..0d073be9 --- /dev/null +++ b/aardvark-app/data/resources/icons/scalable/actions/down-smaller-symbolic.svg @@ -0,0 +1,2 @@ + + diff --git a/aardvark-app/data/resources/icons/scalable/actions/join-document-symbolic.svg b/aardvark-app/data/resources/icons/scalable/actions/join-document-symbolic.svg new file mode 100644 index 00000000..975067b2 --- /dev/null +++ b/aardvark-app/data/resources/icons/scalable/actions/join-document-symbolic.svg @@ -0,0 +1,2 @@ + + diff --git a/aardvark-app/data/resources/resources.gresource.xml b/aardvark-app/data/resources/resources.gresource.xml index 2b7e3e0d..2ffd3ac5 100644 --- a/aardvark-app/data/resources/resources.gresource.xml +++ b/aardvark-app/data/resources/resources.gresource.xml @@ -1,5 +1,7 @@ + icons/scalable/actions/down-smaller-symbolic.svg + icons/scalable/actions/join-document-symbolic.svg diff --git a/aardvark-app/src/main.rs b/aardvark-app/src/main.rs index 1d82a03a..43f03b31 100644 --- a/aardvark-app/src/main.rs +++ b/aardvark-app/src/main.rs @@ -23,6 +23,7 @@ mod components; mod config; mod connection_popover; mod open_dialog; +mod open_popover; mod system_settings; mod textbuffer; mod window; @@ -37,6 +38,7 @@ use tracing_subscriber::prelude::*; use self::application::AardvarkApplication; use self::config::*; use self::connection_popover::ConnectionPopover; +use self::open_popover::OpenPopover; use self::textbuffer::AardvarkTextBuffer; use self::window::AardvarkWindow; diff --git a/aardvark-app/src/open_popover/mod.rs b/aardvark-app/src/open_popover/mod.rs new file mode 100644 index 00000000..7804884f --- /dev/null +++ b/aardvark-app/src/open_popover/mod.rs @@ -0,0 +1,361 @@ +/* Copyright 2025 The Aardvark Developers + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + * + * SPDX-License-Identifier: GPL-3.0-or-later + */ + +use adw::subclass::prelude::*; +use gettextrs::gettext; +use gtk::prelude::*; +use gtk::{glib, glib::clone, glib::closure_local}; + +use crate::system_settings::ClockFormat; +use crate::{AardvarkApplication, AardvarkWindow, open_dialog::OpenDialog}; +use aardvark_doc::{document::Document, documents::Documents}; + +mod imp { + use super::*; + use adw::prelude::AdwDialogExt; + use glib::subclass::Signal; + use std::sync::LazyLock; + + #[derive(Debug, Default, glib::Properties, gtk::CompositeTemplate)] + #[properties(wrapper_type = super::OpenPopover)] + #[template(resource = "/org/p2panda/aardvark/open_popover/open_popover.ui")] + pub struct OpenPopover { + #[template_child] + search_entry: TemplateChild, + #[template_child] + listbox: TemplateChild, + #[template_child] + stack: TemplateChild, + #[template_child] + no_results_page: TemplateChild, + #[template_child] + document_list_page: TemplateChild, + #[template_child] + open_document_button: TemplateChild, + #[property(get = Self::model, set = Self::set_model, type = Option)] + model: gtk::FilterListModel, + } + + #[glib::object_subclass] + impl ObjectSubclass for OpenPopover { + const NAME: &'static str = "AardvarkOpenPopover"; + type Type = super::OpenPopover; + type ParentType = gtk::Popover; + + fn class_init(klass: &mut Self::Class) { + klass.bind_template(); + } + + fn instance_init(obj: &glib::subclass::InitializingObject) { + obj.init_template(); + } + } + + #[glib::derived_properties] + impl ObjectImpl for OpenPopover { + fn signals() -> &'static [Signal] { + static SIGNALS: LazyLock> = LazyLock::new(|| { + vec![ + // The user has activated a document in the document list. + Signal::builder("document-activated") + .param_types([Document::static_type()]) + .build(), + ] + }); + SIGNALS.as_ref() + } + + fn constructed(&self) { + self.parent_constructed(); + + // TODO: We should also match the document id with a more complex filter + let filter = gtk::StringFilter::builder() + .expression(gtk::PropertyExpression::new( + Document::static_type(), + gtk::Expression::NONE, + "name", + )) + .ignore_case(true) + .match_mode(gtk::StringFilterMatchMode::Substring) + .build(); + self.model.set_filter(Some(&filter)); + + self.search_entry + .connect_search_changed(move |search_entry| { + filter.set_search(Some(&search_entry.text())); + }); + + self.model.connect_items_changed(clone!( + #[weak(rename_to = this)] + self, + move |model, _, _, _| { + if model.n_items() > 0 { + this.stack.set_visible_child(&*this.document_list_page); + } else { + this.stack.set_visible_child(&*this.no_results_page); + } + } + )); + + self.listbox.connect_row_activated(clone!( + #[weak(rename_to = this)] + self, + move |_, row| { + let document: Document = this + .model + .item(row.index() as u32) + .unwrap() + .downcast() + .unwrap(); + this.obj() + .emit_by_name::<()>("document-activated", &[&document]); + this.search_entry.set_text(""); + this.obj().popdown(); + } + )); + + self.listbox.bind_model(Some(&self.model), |document| { + let document = document.downcast_ref::().unwrap(); + let row = adw::ActionRow::builder() + .selectable(false) + .activatable(true) + .build(); + + document + .bind_property("name", &row, "title") + .sync_create() + .transform_to(|_, title: Option| { + if let Some(title) = title { + Some(title) + } else { + Some(gettext("Empty document")) + } + }) + .build(); + + document + .bind_property("last-accessed", &row, "subtitle") + .sync_create() + .transform_to(|binding, last_accessed: Option| { + let document: Document = binding.source().unwrap().downcast().unwrap(); + if let Some(last_accessed) = last_accessed { + Some(format_last_accessed(&last_accessed)) + } else if document.subscribed() { + Some(gettext("Currently open")) + } else { + Some(gettext("Never accessed")) + } + }) + .build(); + + row.upcast() + }); + + self.open_document_button.connect_clicked(clone!( + #[weak(rename_to = this)] + self, + move |_| { + let dialog = OpenDialog::new(); + let window = this + .obj() + .root() + .and_then(|w| w.downcast::().ok()) + .expect("Toplevel window needs to be a AardvarkWindow"); + + this.obj().popdown(); + dialog.present(Some(&window)); + + let service = window.service(); + dialog.connect_open(clone!( + #[weak] + this, + #[weak] + service, + move |_, document_id| { + let document = service + .documents() + .by_id(document_id) + .unwrap_or_else(|| Document::new(&service, Some(document_id))); + + this.obj() + .emit_by_name::<()>("document-activated", &[&document]); + } + )); + } + )); + } + } + + impl OpenPopover { + fn model(&self) -> Option { + if let Some(model) = self.model.model() { + model.downcast().ok() + } else { + None + } + } + + fn set_model(&self, model: Option<&Documents>) { + self.model.set_model(model); + } + } + + impl WidgetImpl for OpenPopover {} + impl PopoverImpl for OpenPopover {} +} + +glib::wrapper! { + pub struct OpenPopover(ObjectSubclass) + @extends gtk::Widget, gtk::Popover; +} + +impl OpenPopover { + pub fn new>(model: &P) -> Self { + glib::Object::builder().property("model", model).build() + } + + /// Connect to the signal emitted when a user clicks a document in the document list. + pub fn connect_document_activated( + &self, + f: F, + ) -> glib::SignalHandlerId { + self.connect_closure( + "document-activated", + true, + closure_local!(move |obj: Self, document: Document| { + f(&obj, &document); + }), + ) + } +} + +// This was copied from Fractal +// See: https://gitlab.gnome.org/World/fractal/-/blob/main/src/session/model/user_sessions_list/user_session.rs#L258 +fn format_last_accessed(datetime: &glib::DateTime) -> String { + let clock_format = AardvarkApplication::default() + .system_settings() + .clock_format(); + let use_24 = clock_format == ClockFormat::TwentyFourHours; + + // This was ported from Nautilus and simplified for our use case. + // See: https://gitlab.gnome.org/GNOME/nautilus/-/blob/1c5bd3614a35cfbb49de087bc10381cdef5a218f/src/nautilus-file.c#L5001 + let now = glib::DateTime::now_local().unwrap(); + let format; + let days_ago = { + let today_midnight = + glib::DateTime::from_local(now.year(), now.month(), now.day_of_month(), 0, 0, 0f64) + .expect("constructing GDateTime works"); + + let date = glib::DateTime::from_local( + datetime.year(), + datetime.month(), + datetime.day_of_month(), + 0, + 0, + 0f64, + ) + .expect("constructing GDateTime works"); + + today_midnight.difference(&date).as_days() + }; + + // Show only the time if date is on today + if days_ago == 0 { + if use_24 { + // Translators: Time in 24h format, i.e. "23:04". + // Do not change the time format as it will follow the system settings. + // See `man strftime` or the documentation of g_date_time_format for the available specifiers: + format = gettext("Last accessed at %H:%M"); + } else { + // Translators: Time in 12h format, i.e. "11:04 PM". + // Do not change the time format as it will follow the system settings. + // See `man strftime` or the documentation of g_date_time_format for the available specifiers: + format = gettext("Last accessed at %I:%M %p"); + } + } + // Show the word "Yesterday" and time if date is on yesterday + else if days_ago == 1 { + if use_24 { + // Translators: this a time in 24h format, i.e. "Last seen yesterday at 23:04". + // Do not change the time format as it will follow the system settings. + // See `man strftime` or the documentation of g_date_time_format for the available specifiers: + // xgettext:no-c-format + format = gettext("Last accessed yesterday at %H:%M"); + } else { + // Translators: this is a time in 12h format, i.e. "Last seen Yesterday at 11:04 + // PM". + // Do not change the time format as it will follow the system settings. + // See `man strftime` or the documentation of g_date_time_format for the available specifiers: + // xgettext:no-c-format + format = gettext("Last accessed yesterday at %I:%M %p"); + } + } + // Show a week day and time if date is in the last week + else if days_ago > 1 && days_ago < 7 { + if use_24 { + // Translators: this is the name of the week day followed by a time in 24h + // format, i.e. "Last seen Monday at 23:04". + // Do not change the time format as it will follow the system settings. + // See `man strftime` or the documentation of g_date_time_format for the available specifiers: + // xgettext:no-c-format + format = gettext("Last accessed %A at %H:%M"); + } else { + // Translators: this is the week day name followed by a time in 12h format, i.e. + // "Last seen Monday at 11:04 PM". + // Do not change the time format as it will follow the system settings. + // See `man strftime` or the documentation of g_date_time_format for the available specifiers: + // xgettext:no-c-format + format = gettext("Last accessed %A at %I:%M %p"); + } + } else if datetime.year() == now.year() { + if use_24 { + // Translators: this is the month and day and the time in 24h format, i.e. "Last + // seen February 3 at 23:04". + // Do not change the time format as it will follow the system settings. + // See `man strftime` or the documentation of g_date_time_format for the available specifiers: + // xgettext:no-c-format + format = gettext("Last accessed %B %-e at %H:%M"); + } else { + // Translators: this is the month and day and the time in 12h format, i.e. "Last + // seen February 3 at 11:04 PM". + // Do not change the time format as it will follow the system settings. + // See `man strftime` or the documentation of g_date_time_format for the available specifiers: + // xgettext:no-c-format + format = gettext("Last accessed %B %-e at %I:%M %p"); + } + } else if use_24 { + // Translators: this is the full date and the time in 24h format, i.e. "Last + // seen February 3 2015 at 23:04". + // Do not change the time format as it will follow the system settings. + // See `man strftime` or the documentation of g_date_time_format for the available specifiers: + // xgettext:no-c-format + format = gettext("Last accessed %B %-e %Y at %H:%M"); + } else { + // Translators: this is the full date and the time in 12h format, i.e. "Last + // seen February 3 2015 at 11:04 PM". + // Do not change the time format as it will follow the system settings. + // See `man strftime` or the documentation of g_date_time_format for the available specifiers: + // xgettext:no-c-format + format = gettext("Last accessed %B %-e %Y at %I:%M %p"); + } + + datetime + .format(&format) + .expect("formatting GDateTime works") + .into() +} diff --git a/aardvark-app/src/open_popover/open_popover.ui b/aardvark-app/src/open_popover/open_popover.ui new file mode 100644 index 00000000..7af58451 --- /dev/null +++ b/aardvark-app/src/open_popover/open_popover.ui @@ -0,0 +1,69 @@ + + + + + + + diff --git a/aardvark-app/src/style.css b/aardvark-app/src/style.css index 9a9d3ed0..e891fbab 100644 --- a/aardvark-app/src/style.css +++ b/aardvark-app/src/style.css @@ -42,3 +42,20 @@ font-weight: 700; font-size: 9px; } +.open-popover contents { + padding: 0px; + min-width: 300px; +} + +.open-popover .search { + margin: 6px; +} + +.open-popover row { + border-radius: 6px; + margin: 6px; +} + +.open-popover .open-document { + margin: 12px; +} diff --git a/aardvark-app/src/ui-resources.gresource.xml b/aardvark-app/src/ui-resources.gresource.xml index aa0365b7..8a9d39d2 100644 --- a/aardvark-app/src/ui-resources.gresource.xml +++ b/aardvark-app/src/ui-resources.gresource.xml @@ -2,6 +2,7 @@ open_dialog/open_dialog.ui + open_popover/open_popover.ui window.ui components/zoom_level_selector.ui gtk/help-overlay.ui diff --git a/aardvark-app/src/window.rs b/aardvark-app/src/window.rs index f91d8333..958b0cb5 100644 --- a/aardvark-app/src/window.rs +++ b/aardvark-app/src/window.rs @@ -24,16 +24,13 @@ use aardvark_doc::{ document::{Document, DocumentId}, service::Service, }; -use adw::prelude::AdwDialogExt; -use adw::subclass::prelude::*; -use gtk::prelude::*; + +use adw::{prelude::*, subclass::prelude::*}; use gtk::{gdk, gio, glib, glib::clone}; -use sourceview::*; use crate::{ - AardvarkApplication, AardvarkTextBuffer, ConnectionPopover, + AardvarkApplication, AardvarkTextBuffer, ConnectionPopover, OpenPopover, components::{MultilineEntry, ZoomLevelSelector}, - open_dialog::OpenDialog, }; const BASE_TEXT_FONT_SIZE: f64 = 24.0; @@ -49,7 +46,9 @@ mod imp { #[template_child] pub text_view: TemplateChild, #[template_child] - pub open_dialog_document_button: TemplateChild, + pub open_popover_button: TemplateChild, + #[template_child] + pub open_popover: TemplateChild, #[template_child] pub toast_overlay: TemplateChild, #[template_child] @@ -83,6 +82,7 @@ mod imp { fn class_init(klass: &mut Self::Class) { ZoomLevelSelector::static_type(); MultilineEntry::static_type(); + OpenPopover::static_type(); klass.bind_template(); @@ -151,7 +151,7 @@ mod imp { self.font_size.set(BASE_TEXT_FONT_SIZE); self.obj().set_font_scale(0.0); gtk::style_context_add_provider_for_display( - &self.obj().display(), + >k::Widget::display(self.obj().upcast_ref()), &self.css_provider, gtk::STYLE_PROVIDER_PRIORITY_APPLICATION, ); @@ -159,64 +159,63 @@ mod imp { let scroll_controller = gtk::EventControllerScroll::new(gtk::EventControllerScrollFlags::VERTICAL); scroll_controller.set_propagation_phase(gtk::PropagationPhase::Capture); - let window = self.obj().clone(); - scroll_controller.connect_scroll(move |scroll, _dx, dy| { - if scroll - .current_event_state() - .contains(gdk::ModifierType::CONTROL_MASK) - { - if dy < 0.0 { - window.set_font_scale(window.font_scale() + 1.0); + let window = self.obj(); + scroll_controller.connect_scroll(clone!( + #[weak] + window, + #[upgrade_or] + glib::Propagation::Stop, + move |scroll, _dx, dy| { + if scroll + .current_event_state() + .contains(gdk::ModifierType::CONTROL_MASK) + { + if dy < 0.0 { + window.set_font_scale(window.font_scale() + 1.0); + } else { + window.set_font_scale(window.font_scale() - 1.0); + } + glib::Propagation::Stop } else { - window.set_font_scale(window.font_scale() - 1.0); + glib::Propagation::Proceed } - glib::Propagation::Stop - } else { - glib::Propagation::Proceed } - }); + )); self.obj().add_controller(scroll_controller); let zoom_gesture = gtk::GestureZoom::new(); - let window = self.obj().clone(); let prev_delta = Cell::new(0.0); - zoom_gesture.connect_scale_changed(move |_, delta| { - if prev_delta.get() == delta { - return; - } + zoom_gesture.connect_scale_changed(clone!( + #[weak] + window, + move |_, delta| { + if prev_delta.get() == delta { + return; + } - if prev_delta.get() < delta { - window.set_font_scale(window.font_scale() + delta); - } else { - window.set_font_scale(window.font_scale() - delta); + if prev_delta.get() < delta { + window.set_font_scale(window.font_scale() + delta); + } else { + window.set_font_scale(window.font_scale() - delta); + } + prev_delta.set(delta); } - prev_delta.set(delta); - }); + )); self.obj().add_controller(zoom_gesture); - let window = self.obj(); - self.open_dialog_document_button.connect_clicked(clone!( - #[weak] - window, - move |_| { - let dialog = OpenDialog::new(); - - dialog.present(Some(&window)); - - dialog.connect_open(clone!( - #[weak] - window, - move |_, document_id| { - let app = AardvarkApplication::default(); - - if let Some(window) = app.window_for_document_id(document_id) { - window.present(); - } else { - let document = Document::new(&window.service(), Some(document_id)); - window.imp().set_document(document); - } - } - )); + self.open_popover + .set_model(self.obj().service().documents()); + + self.open_popover.connect_document_activated(clone!( + #[weak(rename_to = this)] + self, + move |_, document| { + let app = AardvarkApplication::default(); + if let Some(window) = app.window_for_document_id(&document.id()) { + window.present(); + } else { + this.set_document(document.to_owned()); + } } )); @@ -234,6 +233,11 @@ mod imp { let document = Document::new(self.service.get().unwrap(), None); self.set_document(document); } + + fn dispose(&self) { + // TODO: we should actually set the window document to None + self.obj().document().set_subscribed(false); + } } impl AardvarkWindow { @@ -273,7 +277,13 @@ mod imp { )); self.connection_button_label .set_label(&format!("{}", authors.n_items())); - self.document.replace(Some(document)); + + document.set_subscribed(true); + let old_document = self.document.replace(Some(document)); + + if let Some(old_document) = old_document { + old_document.set_subscribed(false); + } self.obj().notify("document"); } @@ -305,7 +315,7 @@ mod imp { glib::wrapper! { pub struct AardvarkWindow(ObjectSubclass) @extends gtk::Widget, gtk::Window, gtk::ApplicationWindow, adw::ApplicationWindow, - @implements gio::ActionGroup, gio::ActionMap; + @implements gtk::Native, gtk::Root, gio::ActionGroup, gio::ActionMap; } impl AardvarkWindow { diff --git a/aardvark-app/src/window.ui b/aardvark-app/src/window.ui index 7828cf16..b777ad29 100644 --- a/aardvark-app/src/window.ui +++ b/aardvark-app/src/window.ui @@ -39,14 +39,28 @@ - + - - document-open-symbolic - _Open - True + + + + _Open + True + + + + + down-smaller-symbolic + + + + + + @@ -199,3 +213,4 @@ + From 23a5da4310ea19751cf6804f415bc1c4b6180bc2 Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Thu, 17 Apr 2025 12:49:53 +0200 Subject: [PATCH 08/12] doc: Add glib boxed types for identity This exposes the PublicKey and PrivateKey structs via boxed glib types so they can be used as GObject properties. --- aardvark-app/src/application.rs | 13 ++++--- aardvark-doc/src/author.rs | 21 ++++++----- aardvark-doc/src/authors.rs | 11 +++--- aardvark-doc/src/document.rs | 21 ++++++----- aardvark-doc/src/lib.rs | 64 +++++++++++++++++++++++++++++++++ aardvark-doc/src/service.rs | 30 +++++++--------- 6 files changed, 111 insertions(+), 49 deletions(-) diff --git a/aardvark-app/src/application.rs b/aardvark-app/src/application.rs index 59f243af..91af1a5d 100644 --- a/aardvark-app/src/application.rs +++ b/aardvark-app/src/application.rs @@ -18,11 +18,12 @@ * SPDX-License-Identifier: GPL-3.0-or-later */ -use aardvark_doc::{document::DocumentId, service::Service}; +use aardvark_doc::{document::DocumentId, service::Service, identity::PrivateKey}; use adw::prelude::*; use adw::subclass::prelude::*; use gettextrs::gettext; use gtk::{gio, glib, glib::Properties}; +use std::cell::OnceCell; use crate::AardvarkWindow; use crate::config; @@ -35,7 +36,7 @@ mod imp { #[properties(wrapper_type = super::AardvarkApplication)] pub struct AardvarkApplication { #[property(get)] - pub service: Service, + pub service: OnceCell, #[property(get)] pub system_settings: SystemSettings, } @@ -55,17 +56,19 @@ mod imp { obj.setup_gactions(); obj.set_accels_for_action("app.quit", &["q"]); obj.set_accels_for_action("app.new-window", &["n"]); + + self.service.set(Service::new(&PrivateKey::new())).unwrap(); } } impl ApplicationImpl for AardvarkApplication { fn startup(&self) { - self.service.startup(); + self.obj().service().startup(); self.parent_startup(); } fn shutdown(&self) { - self.service.shutdown(); + self.obj().service().shutdown(); self.parent_shutdown(); } @@ -116,7 +119,7 @@ impl AardvarkApplication { } fn new_window(&self) { - let window = AardvarkWindow::new(self, &self.imp().service); + let window = AardvarkWindow::new(self, &self.service()); window.present(); } diff --git a/aardvark-doc/src/author.rs b/aardvark-doc/src/author.rs index 0a19a104..f1378bdb 100644 --- a/aardvark-doc/src/author.rs +++ b/aardvark-doc/src/author.rs @@ -1,10 +1,11 @@ -use std::cell::{Cell, OnceCell}; use std::sync::Mutex; +use std::{cell::Cell, sync::OnceLock}; use glib::Properties; use glib::prelude::*; use glib::subclass::prelude::*; -use p2panda_core::PublicKey; + +use crate::identity::PublicKey; pub const COLORS: [(&str, &str); 15] = [ ("Yellow", "#faf387"), @@ -77,7 +78,8 @@ mod imp { #[property(name = "name", get = Self::name, type = String)] #[property(name = "emoji", get = Self::emoji, type = String)] #[property(name = "color", get = Self::color, type = String)] - pub public_key: OnceCell, + #[property(get, set, construct_only, type = PublicKey)] + public_key: OnceLock, #[property(get)] pub last_seen: Mutex>, #[property(get, default = true)] @@ -133,15 +135,16 @@ glib::wrapper! { pub struct Author(ObjectSubclass); } impl Author { - pub fn new(public_key: PublicKey) -> Self { - let obj: Self = glib::Object::new(); + pub fn new(public_key: &PublicKey) -> Self { + let obj: Self = glib::Object::builder() + .property("public-key", public_key) + .build(); - obj.imp().public_key.set(public_key).unwrap(); obj.imp().is_online.set(true); obj } - pub fn for_this_device(public_key: PublicKey) -> Self { + pub fn for_this_device(public_key: &PublicKey) -> Self { let obj = Self::new(public_key); obj.imp().is_this_device.set(true); @@ -149,10 +152,6 @@ impl Author { obj } - pub(crate) fn public_key(&self) -> &PublicKey { - self.imp().public_key.get().unwrap() - } - pub(crate) fn set_is_online(&self, is_online: bool) { let was_online = self.imp().is_online.get(); self.imp().is_online.set(is_online); diff --git a/aardvark-doc/src/authors.rs b/aardvark-doc/src/authors.rs index 9f224e37..7fe5d32a 100644 --- a/aardvark-doc/src/authors.rs +++ b/aardvark-doc/src/authors.rs @@ -1,4 +1,3 @@ -use p2panda_core::PublicKey; use std::sync::Mutex; use gio::prelude::*; @@ -6,6 +5,7 @@ use gio::subclass::prelude::ListModelImpl; use glib::{clone, subclass::prelude::*}; use crate::author::Author; +use crate::identity::PublicKey; mod imp { use super::*; @@ -75,7 +75,7 @@ impl Authors { let mut list = obj.imp().list.lock().unwrap(); let pos = list.len() as u32; - let author = Author::for_this_device(author_key); + let author = Author::for_this_device(&author_key); list.push(author); drop(list); obj.items_changed(pos, 0, 1); @@ -96,15 +96,14 @@ impl Authors { move || { let mut list = obj.imp().list.lock().unwrap(); - if let Some(author) = list - .iter() - .find(|author| author.public_key() == &author_key) + if let Some(author) = + list.iter().find(|author| author.public_key() == author_key) { author.set_is_online(is_online); } else { let pos = list.len() as u32; - let author = Author::new(author_key); + let author = Author::new(&author_key); list.push(author); drop(list); diff --git a/aardvark-doc/src/document.rs b/aardvark-doc/src/document.rs index ea9669a0..d0ae63f3 100644 --- a/aardvark-doc/src/document.rs +++ b/aardvark-doc/src/document.rs @@ -12,10 +12,11 @@ use glib::prelude::*; use glib::subclass::{Signal, prelude::*}; use glib::{Properties, clone}; use loro::{ExportMode, LoroDoc, event::Diff}; -use p2panda_core::{HashError, PublicKey}; +use p2panda_core::HashError; use tracing::error; use crate::authors::Authors; +use crate::identity::PublicKey; use crate::service::Service; #[derive(Clone, Debug, PartialEq, Eq, glib::Boxed)] @@ -215,7 +216,7 @@ mod imp { } fn setup_loro_document(&self) { - let public_key = self.obj().service().public_key(); + let public_key = self.obj().service().private_key().public_key(); let obj = self.obj(); let doc = LoroDoc::new(); // The peer id represents the identity of the author applying local changes (that's @@ -229,7 +230,7 @@ mod imp { // this should not really be a problem, but it would be nice if the Loro API would // change some day. let mut buf = [0u8; 8]; - buf[..8].copy_from_slice(&public_key.as_bytes()[..8]); + buf[..8].copy_from_slice(&public_key.0.as_bytes()[..8]); u64::from_be_bytes(buf) }) .expect("set peer id for new document"); @@ -364,7 +365,7 @@ mod imp { // Add ourself to the list of authors self.authors - .add_this_device(self.obj().service().public_key()); + .add_this_device(self.obj().service().private_key().public_key()); self.obj().service().documents().add(self.obj().clone()); } @@ -397,23 +398,25 @@ unsafe impl Sync for Document {} struct DocumentHandle(glib::WeakRef); impl SubscribableDocument for DocumentHandle { - fn bytes_received(&self, _author: PublicKey, data: &[u8]) { + fn bytes_received(&self, _author: p2panda_core::PublicKey, data: &[u8]) { if let Some(document) = self.0.upgrade() { document.imp().on_remote_message(data); } } - fn authors_joined(&self, authors: Vec) { + fn authors_joined(&self, authors: Vec) { if let Some(document) = self.0.upgrade() { for author in authors.into_iter() { - document.authors().add_or_update(author, true); + document.authors().add_or_update(PublicKey(author), true); } } } - fn author_set_online(&self, author: PublicKey, is_online: bool) { + fn author_set_online(&self, author: p2panda_core::PublicKey, is_online: bool) { if let Some(document) = self.0.upgrade() { - document.authors().add_or_update(author, is_online); + document + .authors() + .add_or_update(PublicKey(author), is_online); } } } diff --git a/aardvark-doc/src/lib.rs b/aardvark-doc/src/lib.rs index 8b97752a..ae2fadd6 100644 --- a/aardvark-doc/src/lib.rs +++ b/aardvark-doc/src/lib.rs @@ -4,6 +4,70 @@ pub mod document; pub mod documents; pub mod service; +pub mod identity { + use std::fmt; + + #[derive(Clone, Debug, glib::Boxed)] + #[boxed_type(name = "AardvarkPrivateKey", nullable)] + pub struct PrivateKey(pub(crate) p2panda_core::PrivateKey); + + impl PrivateKey { + pub fn new() -> PrivateKey { + PrivateKey(p2panda_core::PrivateKey::new()) + } + + pub fn public_key(&self) -> PublicKey { + PublicKey(self.0.public_key()) + } + + pub fn as_bytes(&self) -> &[u8] { + self.0.as_bytes().as_slice() + } + } + + impl TryFrom<&[u8]> for PrivateKey { + type Error = p2panda_core::IdentityError; + + fn try_from(value: &[u8]) -> Result { + Ok(PrivateKey(p2panda_core::PrivateKey::try_from(value)?)) + } + } + + impl<'a> From<&'a PrivateKey> for &'a [u8] { + fn from(value: &PrivateKey) -> &[u8] { + value.0.as_bytes().as_slice() + } + } + + impl fmt::Display for PrivateKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } + } + + #[derive(Clone, Debug, PartialEq, glib::Boxed)] + #[boxed_type(name = "AardvarkPublicKey", nullable)] + pub struct PublicKey(pub(crate) p2panda_core::PublicKey); + + impl fmt::Display for PublicKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } + } + + impl<'a> From<&'a PublicKey> for &'a [u8] { + fn from(value: &PublicKey) -> &[u8] { + value.0.as_bytes().as_slice() + } + } + + impl PublicKey { + pub fn as_bytes(&self) -> &[u8] { + self.0.as_bytes().as_slice() + } + } +} + #[cfg(test)] mod tests { use crate::document::Document; diff --git a/aardvark-doc/src/service.rs b/aardvark-doc/src/service.rs index df86c8f7..ffc705aa 100644 --- a/aardvark-doc/src/service.rs +++ b/aardvark-doc/src/service.rs @@ -1,10 +1,12 @@ use glib::Properties; use glib::object::ObjectExt; use glib::subclass::prelude::*; -use p2panda_core::{Hash, PrivateKey, PublicKey}; -use tracing::{error, info}; +use p2panda_core::Hash; +use std::sync::OnceLock; +use tracing::error; use crate::documents::Documents; +use crate::identity::PrivateKey; use aardvark_node::Node; mod imp { @@ -14,7 +16,8 @@ mod imp { #[properties(wrapper_type = super::Service)] pub struct Service { pub node: Node, - pub private_key: PrivateKey, + #[property(get, set, construct_only, type = PrivateKey)] + pub private_key: OnceLock, #[property(get)] documents: Documents, } @@ -34,19 +37,20 @@ glib::wrapper! { } impl Service { - pub fn new() -> Self { - glib::Object::new() + pub fn new(private_key: &PrivateKey) -> Self { + glib::Object::builder() + .property("private-key", private_key) + .build() } pub fn startup(&self) { - let private_key = self.imp().private_key.clone(); let network_id = b"aardvark <3"; - info!("my public key: {}", private_key.public_key()); + glib::MainContext::new().block_on(async move { if let Err(error) = self .imp() .node - .run(private_key, Hash::new(network_id), None) + .run(self.private_key().0.clone(), Hash::new(network_id), None) .await { error!("Running node failed: {error}"); @@ -61,14 +65,4 @@ impl Service { pub(crate) fn node(&self) -> &Node { &self.imp().node } - - pub(crate) fn public_key(&self) -> PublicKey { - self.imp().private_key.public_key() - } -} - -impl Default for Service { - fn default() -> Self { - Service::new() - } } From 39cdfbc90e3d76b1f8ff9fd1e3d7dbc7be5ab11d Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Thu, 17 Apr 2025 14:48:08 +0200 Subject: [PATCH 09/12] app: Load and store identity This uses oo7 to store the private key ("identity") to the keyring. --- Cargo.lock | 240 ++++++++++++++++++++++++++++++-- aardvark-app/Cargo.toml | 8 +- aardvark-app/src/application.rs | 11 +- aardvark-app/src/main.rs | 3 + aardvark-app/src/secret.rs | 78 +++++++++++ aardvark-doc/src/lib.rs | 1 + 6 files changed, 329 insertions(+), 12 deletions(-) create mode 100644 aardvark-app/src/secret.rs diff --git a/Cargo.lock b/Cargo.lock index d299b0f5..e3ffe734 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,12 +8,14 @@ version = "0.1.0" dependencies = [ "aardvark-doc", "aardvark-node", - "ashpd", + "ashpd 0.9.2", "futures-util", "gettext-rs", "gtk4", "libadwaita", + "oo7", "sourceview5", + "thiserror 2.0.12", "tracing", "tracing-subscriber", ] @@ -178,7 +180,26 @@ dependencies = [ "serde_repr", "tracing", "url", - "zbus", + "zbus 4.4.0", +] + +[[package]] +name = "ashpd" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6cbdf310d77fd3aaee6ea2093db7011dc2d35d2eb3481e5607f1f8d942ed99df" +dependencies = [ + "async-fs", + "async-net", + "enumflags2", + "futures-channel", + "futures-util", + "rand 0.9.0", + "serde", + "serde_repr", + "tracing", + "url", + "zbus 5.5.0", ] [[package]] @@ -1312,6 +1333,21 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -3461,6 +3497,7 @@ dependencies = [ "num-iter", "num-traits", "rand 0.8.5", + "serde", "smallvec", "zeroize", ] @@ -3595,18 +3632,83 @@ version = "1.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc" +[[package]] +name = "oo7" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6cb23d3ec3527d65a83be1c1795cb883c52cfa57147d42acc797127df56fc489" +dependencies = [ + "ashpd 0.11.0", + "async-fs", + "async-io", + "async-lock", + "blocking", + "endi", + "futures-lite", + "futures-util", + "getrandom 0.3.1", + "num", + "num-bigint-dig", + "openssl", + "rand 0.9.0", + "serde", + "tracing", + "zbus 5.5.0", + "zbus_macros 5.5.0", + "zeroize", + "zvariant 5.4.0", +] + [[package]] name = "opaque-debug" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "openssl" +version = "0.10.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" +dependencies = [ + "bitflags 2.9.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "openssl-probe" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +[[package]] +name = "openssl-sys" +version = "0.9.107" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "ordered-stream" version = "0.2.0" @@ -6799,9 +6901,45 @@ dependencies = [ "uds_windows", "windows-sys 0.52.0", "xdg-home", - "zbus_macros", - "zbus_names", - "zvariant", + "zbus_macros 4.4.0", + "zbus_names 3.0.0", + "zvariant 4.2.0", +] + +[[package]] +name = "zbus" +version = "5.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59c333f648ea1b647bc95dc1d34807c8e25ed7a6feff3394034dc4776054b236" +dependencies = [ + "async-broadcast", + "async-executor", + "async-fs", + "async-io", + "async-lock", + "async-process", + "async-recursion", + "async-task", + "async-trait", + "blocking", + "enumflags2", + "event-listener", + "futures-core", + "futures-lite", + "hex", + "nix 0.29.0", + "ordered-stream", + "serde", + "serde_repr", + "static_assertions", + "tracing", + "uds_windows", + "windows-sys 0.59.0", + "winnow", + "xdg-home", + "zbus_macros 5.5.0", + "zbus_names 4.2.0", + "zvariant 5.4.0", ] [[package]] @@ -6814,7 +6952,22 @@ dependencies = [ "proc-macro2", "quote", "syn 2.0.100", - "zvariant_utils", + "zvariant_utils 2.1.0", +] + +[[package]] +name = "zbus_macros" +version = "5.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f325ad10eb0d0a3eb060203494c3b7ec3162a01a59db75d2deee100339709fc0" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.100", + "zbus_names 4.2.0", + "zvariant 5.4.0", + "zvariant_utils 3.2.0", ] [[package]] @@ -6825,7 +6978,19 @@ checksum = "4b9b1fef7d021261cc16cba64c351d291b715febe0fa10dc3a443ac5a5022e6c" dependencies = [ "serde", "static_assertions", - "zvariant", + "zvariant 4.2.0", +] + +[[package]] +name = "zbus_names" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7be68e64bf6ce8db94f63e72f0c7eb9a60d733f7e0499e628dfab0f84d6bcb97" +dependencies = [ + "serde", + "static_assertions", + "winnow", + "zvariant 5.4.0", ] [[package]] @@ -6894,6 +7059,20 @@ name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] [[package]] name = "zerovec" @@ -6928,7 +7107,23 @@ dependencies = [ "serde", "static_assertions", "url", - "zvariant_derive", + "zvariant_derive 4.2.0", +] + +[[package]] +name = "zvariant" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2df9ee044893fcffbdc25de30546edef3e32341466811ca18421e3cd6c5a3ac" +dependencies = [ + "endi", + "enumflags2", + "serde", + "static_assertions", + "url", + "winnow", + "zvariant_derive 5.4.0", + "zvariant_utils 3.2.0", ] [[package]] @@ -6941,7 +7136,20 @@ dependencies = [ "proc-macro2", "quote", "syn 2.0.100", - "zvariant_utils", + "zvariant_utils 2.1.0", +] + +[[package]] +name = "zvariant_derive" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74170caa85b8b84cc4935f2d56a57c7a15ea6185ccdd7eadb57e6edd90f94b2f" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.100", + "zvariant_utils 3.2.0", ] [[package]] @@ -6954,3 +7162,17 @@ dependencies = [ "quote", "syn 2.0.100", ] + +[[package]] +name = "zvariant_utils" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e16edfee43e5d7b553b77872d99bc36afdda75c223ca7ad5e3fbecd82ca5fc34" +dependencies = [ + "proc-macro2", + "quote", + "serde", + "static_assertions", + "syn 2.0.100", + "winnow", +] diff --git a/aardvark-app/Cargo.toml b/aardvark-app/Cargo.toml index 9cdfe104..9d787829 100644 --- a/aardvark-app/Cargo.toml +++ b/aardvark-app/Cargo.toml @@ -17,9 +17,15 @@ sourceview = { package = "sourceview5", version = "0.9" } tracing = "0.1" tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } ashpd = { version = "0.9", default-features = false, features = ["tracing", "async-std"] } +thiserror = { version = "2.0" } futures-util = "0.3" +oo7 = { version = "0.4", default-features = false, features = [ + "openssl_crypto", + "async-std", + "tracing", +] } [dependencies.adw] package = "libadwaita" version = "0.7" -features = ["v1_6"] +features = ["v1_6"] \ No newline at end of file diff --git a/aardvark-app/src/application.rs b/aardvark-app/src/application.rs index 91af1a5d..f1fbe4e5 100644 --- a/aardvark-app/src/application.rs +++ b/aardvark-app/src/application.rs @@ -18,7 +18,7 @@ * SPDX-License-Identifier: GPL-3.0-or-later */ -use aardvark_doc::{document::DocumentId, service::Service, identity::PrivateKey}; +use aardvark_doc::{document::DocumentId, service::Service}; use adw::prelude::*; use adw::subclass::prelude::*; use gettextrs::gettext; @@ -27,6 +27,7 @@ use std::cell::OnceCell; use crate::AardvarkWindow; use crate::config; +use crate::secret; use crate::system_settings::SystemSettings; mod imp { @@ -57,7 +58,13 @@ mod imp { obj.set_accels_for_action("app.quit", &["q"]); obj.set_accels_for_action("app.new-window", &["n"]); - self.service.set(Service::new(&PrivateKey::new())).unwrap(); + // FIXME: Don't block on loading the identity + glib::MainContext::new().block_on(async move { + let private_key = secret::get_or_create_identity() + .await + .expect("Unable to get or create identity"); + self.service.set(Service::new(&private_key)).unwrap(); + }); } } diff --git a/aardvark-app/src/main.rs b/aardvark-app/src/main.rs index 43f03b31..7b56ba1d 100644 --- a/aardvark-app/src/main.rs +++ b/aardvark-app/src/main.rs @@ -24,6 +24,7 @@ mod config; mod connection_popover; mod open_dialog; mod open_popover; +mod secret; mod system_settings; mod textbuffer; mod window; @@ -42,6 +43,8 @@ use self::open_popover::OpenPopover; use self::textbuffer::AardvarkTextBuffer; use self::window::AardvarkWindow; +pub use self::config::APP_ID; + fn main() -> glib::ExitCode { setup_logging(); diff --git a/aardvark-app/src/secret.rs b/aardvark-app/src/secret.rs new file mode 100644 index 00000000..f81e03d4 --- /dev/null +++ b/aardvark-app/src/secret.rs @@ -0,0 +1,78 @@ +/* Copyright 2025 The Aardvark Developers + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + * + * SPDX-License-Identifier: GPL-3.0-or-later + */ + +use std::collections::HashMap; +use thiserror::Error; +use tracing::info; + +use crate::APP_ID; +use aardvark_doc::identity::{IdentityError, PrivateKey}; + +const XDG_SCHEMA: &'static str = "xdg:schema"; + +fn attributes() -> HashMap<&'static str, String> { + HashMap::from([(XDG_SCHEMA, APP_ID.to_owned())]) +} + +#[derive(Debug, Error)] +pub enum Error { + #[error("Secret Service error: {0}")] + Service(oo7::Error), + #[error("Format error: {0}")] + Format(IdentityError), +} + +impl From for Error { + fn from(value: IdentityError) -> Self { + Error::Format(value) + } +} + +impl From for Error { + fn from(value: oo7::Error) -> Self { + Error::Service(value) + } +} + +pub async fn get_or_create_identity() -> Result { + let keyring = oo7::Keyring::new().await?; + + keyring.unlock().await?; + + let private_key: PrivateKey = + if let Some(item) = keyring.search_items(&attributes()).await?.get(0) { + item.unlock().await?; + let private_key = PrivateKey::try_from(item.secret().await?.as_bytes())?; + info!("Found existing identity: {}", private_key.public_key()); + + private_key + } else { + let private_key = PrivateKey::new(); + keyring + .create_item("Aardvark", &attributes(), private_key.as_bytes(), true) + .await?; + + info!( + "No existing identity found. Create new identity: {}", + private_key.public_key() + ); + private_key + }; + + Ok(private_key) +} diff --git a/aardvark-doc/src/lib.rs b/aardvark-doc/src/lib.rs index ae2fadd6..2fbf3687 100644 --- a/aardvark-doc/src/lib.rs +++ b/aardvark-doc/src/lib.rs @@ -5,6 +5,7 @@ pub mod documents; pub mod service; pub mod identity { + pub use p2panda_core::identity::IdentityError; use std::fmt; #[derive(Clone, Debug, glib::Boxed)] From 3ecb3255bafbec0f6af68c54ea1ebfc1913fa773 Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Thu, 17 Apr 2025 16:11:36 +0200 Subject: [PATCH 10/12] doc: Enable persistent database by setting a data dir The node does only store the OperationStore for now and doesn't reload operations. --- aardvark-app/src/application.rs | 16 ++++++++++++++-- aardvark-doc/src/service.rs | 13 +++++++++---- aardvark-node/src/node.rs | 6 ++++-- 3 files changed, 27 insertions(+), 8 deletions(-) diff --git a/aardvark-app/src/application.rs b/aardvark-app/src/application.rs index f1fbe4e5..890cb527 100644 --- a/aardvark-app/src/application.rs +++ b/aardvark-app/src/application.rs @@ -23,7 +23,8 @@ use adw::prelude::*; use adw::subclass::prelude::*; use gettextrs::gettext; use gtk::{gio, glib, glib::Properties}; -use std::cell::OnceCell; +use std::{cell::OnceCell, fs}; +use tracing::error; use crate::AardvarkWindow; use crate::config; @@ -63,7 +64,18 @@ mod imp { let private_key = secret::get_or_create_identity() .await .expect("Unable to get or create identity"); - self.service.set(Service::new(&private_key)).unwrap(); + + let mut data_path = glib::user_data_dir(); + data_path.push("Aardvark"); + data_path.push(private_key.public_key().to_string()); + if let Err(error) = fs::create_dir_all(&data_path) { + error!("Failed to create data directory: {error}"); + } + let data_dir = gio::File::for_path(data_path); + + self.service + .set(Service::new(&private_key, &data_dir)) + .unwrap(); }); } } diff --git a/aardvark-doc/src/service.rs b/aardvark-doc/src/service.rs index ffc705aa..5e258a87 100644 --- a/aardvark-doc/src/service.rs +++ b/aardvark-doc/src/service.rs @@ -1,3 +1,4 @@ +use gio::prelude::FileExt; use glib::Properties; use glib::object::ObjectExt; use glib::subclass::prelude::*; @@ -18,6 +19,8 @@ mod imp { pub node: Node, #[property(get, set, construct_only, type = PrivateKey)] pub private_key: OnceLock, + #[property(get, set, construct_only, type = gio::File)] + pub data_dir: OnceLock, #[property(get)] documents: Documents, } @@ -37,20 +40,22 @@ glib::wrapper! { } impl Service { - pub fn new(private_key: &PrivateKey) -> Self { + pub fn new(private_key: &PrivateKey, data_dir: &gio::File) -> Self { glib::Object::builder() .property("private-key", private_key) + .property("data-dir", data_dir) .build() } pub fn startup(&self) { - let network_id = b"aardvark <3"; - glib::MainContext::new().block_on(async move { + let private_key = self.private_key().0.clone(); + let network_id = Hash::new(b"aardvark <3"); + let path = self.data_dir().path().expect("Valid file path"); if let Err(error) = self .imp() .node - .run(self.private_key().0.clone(), Hash::new(network_id), None) + .run(private_key, network_id, Some(path.as_ref())) .await { error!("Running node failed: {error}"); diff --git a/aardvark-node/src/node.rs b/aardvark-node/src/node.rs index 3c079c60..fec9cf5b 100644 --- a/aardvark-node/src/node.rs +++ b/aardvark-node/src/node.rs @@ -11,7 +11,7 @@ use sqlx::sqlite; use tokio::runtime::{Builder, Runtime}; use tokio::sync::Notify; use tokio::sync::RwLock; -use tracing::warn; +use tracing::{info, warn}; use crate::document::{DocumentId, SubscribableDocument}; use crate::network::Network; @@ -78,7 +78,9 @@ impl Node { .shared_cache(true) .create_if_missing(true); let connection_options = if let Some(db_location) = db_location { - connection_options.filename(db_location) + let db_file = db_location.join("database.sqlite"); + info!("Database file location: {db_file:?}"); + connection_options.filename(db_file) } else { connection_options.in_memory(true) }; From 954a12a74cabc3fcfae6976272d4991768e72ff5 Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Thu, 17 Apr 2025 19:31:29 +0200 Subject: [PATCH 11/12] node: Improve how the DocumentStore stores documents Logically it makes sense that we map documents to a set of authors instead of the other way around. Additionally this makes the implementation of TopicLogMap trait much simpler without the need to look at authors that aren't authors of a specific document. --- aardvark-node/src/node.rs | 5 ++-- aardvark-node/src/store.rs | 59 +++++++++++++------------------------- 2 files changed, 22 insertions(+), 42 deletions(-) diff --git a/aardvark-node/src/node.rs b/aardvark-node/src/node.rs index fec9cf5b..88390001 100644 --- a/aardvark-node/src/node.rs +++ b/aardvark-node/src/node.rs @@ -205,7 +205,7 @@ impl Node { inner .document_store .add_author(document_id, inner.private_key.public_key()) - .await?; + .await; let inner_clone = inner.clone(); let document_clone = document.clone(); @@ -235,8 +235,7 @@ impl Node { inner_clone .document_store .add_author(document_id, operation.header.public_key) - .await - .expect("Unable to add author to DocumentStore"); + .await; // Forward the payload up to the app. if let Some(body) = operation.body { diff --git a/aardvark-node/src/store.rs b/aardvark-node/src/store.rs index 10069c59..387657bc 100644 --- a/aardvark-node/src/store.rs +++ b/aardvark-node/src/store.rs @@ -2,7 +2,6 @@ use std::collections::{HashMap, HashSet}; use std::hash::Hash as StdHash; use std::sync::Arc; -use anyhow::Result; use async_trait::async_trait; use p2panda_core::PublicKey; use p2panda_store::SqliteStore; @@ -15,33 +14,24 @@ use crate::operation::{AardvarkExtensions, LogType}; #[derive(Clone, Debug)] pub struct DocumentStore { - inner: Arc>, -} - -#[derive(Debug)] -struct DocumentStoreInner { - authors: HashMap>, + documents: Arc>>>, } impl DocumentStore { pub fn new() -> Self { Self { - inner: Arc::new(RwLock::new(DocumentStoreInner { - authors: HashMap::new(), - })), + documents: Arc::new(RwLock::new(HashMap::new())), } } - pub async fn add_author(&self, document: DocumentId, public_key: PublicKey) -> Result<()> { - let mut store = self.inner.write().await; - store - .authors - .entry(public_key) + pub async fn add_author(&self, document: DocumentId, public_key: PublicKey) { + let mut documents = self.documents.write().await; + documents + .entry(document) .and_modify(|documents| { - documents.insert(document); + documents.insert(public_key); }) - .or_insert(HashSet::from([document])); - Ok(()) + .or_insert(HashSet::from([public_key])); } } @@ -57,27 +47,18 @@ impl LogId { #[async_trait] impl TopicLogMap for DocumentStore { async fn get(&self, topic: &DocumentId) -> Option>> { - let store = &self.inner.read().await; - let mut result = HashMap::>::new(); - - for (public_key, documents) in &store.authors { - if documents.contains(topic) { - // We maintain two logs per author per document. - let log_ids = [ - LogId::new(LogType::Delta, topic), - LogId::new(LogType::Snapshot, topic), - ]; - - result - .entry(*public_key) - .and_modify(|logs| { - logs.extend_from_slice(&log_ids); - }) - .or_insert(log_ids.into()); - } - } - - Some(result) + let documents = self.documents.read().await; + let authors = documents.get(topic)?.clone(); + let log_ids = [ + LogId::new(LogType::Delta, topic), + LogId::new(LogType::Snapshot, topic), + ]; + Some( + authors + .into_iter() + .map(|author| (author, log_ids.to_vec())) + .collect(), + ) } } From 0db0aad472dd8f7636dcd9036f96ecc85aa2bfbb Mon Sep 17 00:00:00 2001 From: Julian Sparber Date: Sat, 26 Apr 2025 15:57:09 +0200 Subject: [PATCH 12/12] WIP: store documents and authors to sqlite --- Cargo.lock | 49 ++-- Cargo.toml | 8 + aardvark-app/src/textbuffer.rs | 11 +- aardvark-app/src/window.rs | 10 +- aardvark-doc/src/author.rs | 9 +- aardvark-doc/src/authors.rs | 6 + aardvark-doc/src/document.rs | 98 ++++++-- aardvark-doc/src/service.rs | 54 ++++- aardvark-node/Cargo.toml | 3 +- .../20250418140035_create_tables.sql | 13 + aardvark-node/src/document.rs | 65 +++++ aardvark-node/src/lib.rs | 1 + aardvark-node/src/node.rs | 172 ++++++++++---- aardvark-node/src/store.rs | 223 ++++++++++++++++-- aardvark-node/src/utils.rs | 34 +++ 15 files changed, 642 insertions(+), 114 deletions(-) create mode 100644 aardvark-node/migrations/20250418140035_create_tables.sql create mode 100644 aardvark-node/src/utils.rs diff --git a/Cargo.lock b/Cargo.lock index e3ffe734..57847376 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -42,6 +42,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "chrono", "ciborium", "p2panda-core", "p2panda-discovery", @@ -659,8 +660,10 @@ checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" dependencies = [ "android-tzdata", "iana-time-zone", + "js-sys", "num-traits", "serde", + "wasm-bindgen", "windows-link", ] @@ -3728,7 +3731,7 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "p2panda-core" version = "0.3.0" -source = "git+https://github.com/p2panda/p2panda?rev=f3a016324b69beac45cf20a792fe6890cb1a21e3#f3a016324b69beac45cf20a792fe6890cb1a21e3" +source = "git+https://github.com/jsparber/p2panda.git?rev=dd9088a33d30014e9a11cbb3600278cfa2a2d889#dd9088a33d30014e9a11cbb3600278cfa2a2d889" dependencies = [ "blake3", "ciborium", @@ -3743,7 +3746,7 @@ dependencies = [ [[package]] name = "p2panda-discovery" version = "0.3.0" -source = "git+https://github.com/p2panda/p2panda?rev=f3a016324b69beac45cf20a792fe6890cb1a21e3#f3a016324b69beac45cf20a792fe6890cb1a21e3" +source = "git+https://github.com/jsparber/p2panda.git?rev=dd9088a33d30014e9a11cbb3600278cfa2a2d889#dd9088a33d30014e9a11cbb3600278cfa2a2d889" dependencies = [ "anyhow", "base32", @@ -3763,7 +3766,7 @@ dependencies = [ [[package]] name = "p2panda-net" version = "0.3.0" -source = "git+https://github.com/p2panda/p2panda?rev=f3a016324b69beac45cf20a792fe6890cb1a21e3#f3a016324b69beac45cf20a792fe6890cb1a21e3" +source = "git+https://github.com/jsparber/p2panda.git?rev=dd9088a33d30014e9a11cbb3600278cfa2a2d889#dd9088a33d30014e9a11cbb3600278cfa2a2d889" dependencies = [ "anyhow", "async-trait", @@ -3790,7 +3793,7 @@ dependencies = [ [[package]] name = "p2panda-store" version = "0.3.0" -source = "git+https://github.com/p2panda/p2panda?rev=f3a016324b69beac45cf20a792fe6890cb1a21e3#f3a016324b69beac45cf20a792fe6890cb1a21e3" +source = "git+https://github.com/jsparber/p2panda.git?rev=dd9088a33d30014e9a11cbb3600278cfa2a2d889#dd9088a33d30014e9a11cbb3600278cfa2a2d889" dependencies = [ "ciborium", "hex", @@ -3803,7 +3806,7 @@ dependencies = [ [[package]] name = "p2panda-stream" version = "0.3.0" -source = "git+https://github.com/p2panda/p2panda?rev=f3a016324b69beac45cf20a792fe6890cb1a21e3#f3a016324b69beac45cf20a792fe6890cb1a21e3" +source = "git+https://github.com/jsparber/p2panda.git?rev=dd9088a33d30014e9a11cbb3600278cfa2a2d889#dd9088a33d30014e9a11cbb3600278cfa2a2d889" dependencies = [ "ciborium", "futures-channel", @@ -3818,7 +3821,7 @@ dependencies = [ [[package]] name = "p2panda-sync" version = "0.3.0" -source = "git+https://github.com/p2panda/p2panda?rev=f3a016324b69beac45cf20a792fe6890cb1a21e3#f3a016324b69beac45cf20a792fe6890cb1a21e3" +source = "git+https://github.com/jsparber/p2panda.git?rev=dd9088a33d30014e9a11cbb3600278cfa2a2d889#dd9088a33d30014e9a11cbb3600278cfa2a2d889" dependencies = [ "async-trait", "futures", @@ -5113,9 +5116,9 @@ dependencies = [ [[package]] name = "sqlx" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4410e73b3c0d8442c5f99b425d7a435b5ee0ae4167b3196771dd3f7a01be745f" +checksum = "f3c3a85280daca669cfd3bcb68a337882a8bc57ec882f72c5d13a430613a738e" dependencies = [ "sqlx-core", "sqlx-macros", @@ -5126,11 +5129,13 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a007b6936676aa9ab40207cde35daab0a04b823be8ae004368c0793b96a61e0" +checksum = "f743f2a3cea30a58cd479013f75550e879009e3a02f616f18ca699335aa248c3" dependencies = [ + "base64", "bytes", + "chrono", "crc", "crossbeam-queue", "either", @@ -5159,9 +5164,9 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3112e2ad78643fef903618d78cf0aec1cb3134b019730edb039b69eaf531f310" +checksum = "7f4200e0fde19834956d4252347c12a083bdcb237d7a1a1446bffd8768417dce" dependencies = [ "proc-macro2", "quote", @@ -5172,9 +5177,9 @@ dependencies = [ [[package]] name = "sqlx-macros-core" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e9f90acc5ab146a99bf5061a7eb4976b573f560bc898ef3bf8435448dd5e7ad" +checksum = "882ceaa29cade31beca7129b6beeb05737f44f82dbe2a9806ecea5a7093d00b7" dependencies = [ "dotenvy", "either", @@ -5198,15 +5203,16 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4560278f0e00ce64938540546f59f590d60beee33fffbd3b9cd47851e5fff233" +checksum = "0afdd3aa7a629683c2d750c2df343025545087081ab5942593a5288855b1b7a7" dependencies = [ "atoi", "base64", "bitflags 2.9.0", "byteorder", "bytes", + "chrono", "crc", "digest", "dotenvy", @@ -5240,14 +5246,15 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5b98a57f363ed6764d5b3a12bfedf62f07aa16e1856a7ddc2a0bb190a959613" +checksum = "a0bedbe1bbb5e2615ef347a5e9d8cd7680fb63e77d9dafc0f29be15e53f1ebe6" dependencies = [ "atoi", "base64", "bitflags 2.9.0", "byteorder", + "chrono", "crc", "dotenvy", "etcetera", @@ -5277,11 +5284,12 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f85ca71d3a5b24e64e1d08dd8fe36c6c95c339a896cc33068148906784620540" +checksum = "c26083e9a520e8eb87a06b12347679b142dc2ea29e6e409f805644a7a979a5bc" dependencies = [ "atoi", + "chrono", "flume", "futures-channel", "futures-core", @@ -5294,6 +5302,7 @@ dependencies = [ "serde", "serde_urlencoded", "sqlx-core", + "thiserror 2.0.12", "tracing", "url", ] diff --git a/Cargo.toml b/Cargo.toml index 52f55394..3afcfa8d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,3 +1,11 @@ [workspace] resolver = "2" members = ["aardvark-app", "aardvark-doc", "aardvark-node"] + +[patch."https://github.com/p2panda/p2panda"] +p2panda-core = { git = "https://github.com/jsparber/p2panda.git", rev = "dd9088a33d30014e9a11cbb3600278cfa2a2d889" } +p2panda-discovery = { git = "https://github.com/jsparber/p2panda.git", rev = "dd9088a33d30014e9a11cbb3600278cfa2a2d889", features = ["mdns"] } +p2panda-net = { git = "https://github.com/jsparber/p2panda.git", rev = "dd9088a33d30014e9a11cbb3600278cfa2a2d889" } +p2panda-store = { git = "https://github.com/jsparber/p2panda.git", rev = "dd9088a33d30014e9a11cbb3600278cfa2a2d889", features = ["sqlite"] } +p2panda-stream = { git = "https://github.com/jsparber/p2panda.git", rev = "dd9088a33d30014e9a11cbb3600278cfa2a2d889" } +p2panda-sync = { git = "https://github.com/jsparber/p2panda.git", rev = "dd9088a33d30014e9a11cbb3600278cfa2a2d889", features = ["log-sync"] } \ No newline at end of file diff --git a/aardvark-app/src/textbuffer.rs b/aardvark-app/src/textbuffer.rs index 33782d3e..97c0e5c8 100644 --- a/aardvark-app/src/textbuffer.rs +++ b/aardvark-app/src/textbuffer.rs @@ -37,20 +37,23 @@ mod imp { pub struct AardvarkTextBuffer { pub inhibit_text_change: Cell, pub document_handlers: OnceCell, - #[property(get, set = Self::set_document)] + #[property(get, set = Self::set_document, nullable)] pub document: RefCell>, } impl AardvarkTextBuffer { - fn set_document(&self, document: Option<&Document>) { + fn set_document(&self, document: Option) { if let Some(document) = document.as_ref() { self.obj().set_inhibit_text_change(true); self.obj().set_text(&document.text()); self.obj().set_inhibit_text_change(false); } - self.document_handlers.get().unwrap().set_target(document); - self.document.replace(document.cloned()); + self.document_handlers + .get() + .unwrap() + .set_target(document.as_ref()); + self.document.replace(document); } } diff --git a/aardvark-app/src/window.rs b/aardvark-app/src/window.rs index 958b0cb5..6727d178 100644 --- a/aardvark-app/src/window.rs +++ b/aardvark-app/src/window.rs @@ -232,11 +232,11 @@ mod imp { let document = Document::new(self.service.get().unwrap(), None); self.set_document(document); - } - fn dispose(&self) { - // TODO: we should actually set the window document to None - self.obj().document().set_subscribed(false); + self.obj().connect_close_request(|window| { + window.document().set_subscribed(false); + glib::Propagation::Proceed + }); } } @@ -261,7 +261,7 @@ mod imp { .buffer() .downcast::() .unwrap() - .set_document(&document); + .set_document(Some(document.clone())); let authors = document.authors(); self.connection_button .set_popover(Some(&ConnectionPopover::new(&authors))); diff --git a/aardvark-doc/src/author.rs b/aardvark-doc/src/author.rs index f1378bdb..ee2d91c8 100644 --- a/aardvark-doc/src/author.rs +++ b/aardvark-doc/src/author.rs @@ -80,7 +80,7 @@ mod imp { #[property(name = "color", get = Self::color, type = String)] #[property(get, set, construct_only, type = PublicKey)] public_key: OnceLock, - #[property(get)] + #[property(get, set, construct_only)] pub last_seen: Mutex>, #[property(get, default = true)] pub is_online: Cell, @@ -144,6 +144,13 @@ impl Author { obj } + pub(crate) fn with_state(public_key: &PublicKey, last_seen: Option<&glib::DateTime>) -> Self { + glib::Object::builder() + .property("public-key", public_key) + .property("last-seen", last_seen) + .build() + } + pub fn for_this_device(public_key: &PublicKey) -> Self { let obj = Self::new(public_key); diff --git a/aardvark-doc/src/authors.rs b/aardvark-doc/src/authors.rs index 7fe5d32a..c6b3a7d8 100644 --- a/aardvark-doc/src/authors.rs +++ b/aardvark-doc/src/authors.rs @@ -63,6 +63,12 @@ impl Authors { glib::Object::new() } + pub(crate) fn from_vec(authors: Vec) -> Self { + let obj: Self = glib::Object::new(); + *obj.imp().list.lock().unwrap() = authors; + obj + } + pub(crate) fn add_this_device(&self, author_key: PublicKey) { glib::source::idle_add_full( glib::source::Priority::DEFAULT, diff --git a/aardvark-doc/src/document.rs b/aardvark-doc/src/document.rs index d0ae63f3..d6110853 100644 --- a/aardvark-doc/src/document.rs +++ b/aardvark-doc/src/document.rs @@ -8,6 +8,7 @@ use std::sync::OnceLock; use aardvark_node::document::{DocumentId as DocumentIdNode, SubscribableDocument}; use anyhow::Result; +use gio::prelude::ApplicationExtManual; use glib::prelude::*; use glib::subclass::{Signal, prelude::*}; use glib::{Properties, clone}; @@ -21,7 +22,7 @@ use crate::service::Service; #[derive(Clone, Debug, PartialEq, Eq, glib::Boxed)] #[boxed_type(name = "AardvarkDocumentId", nullable)] -pub struct DocumentId(DocumentIdNode); +pub struct DocumentId(pub(crate) DocumentIdNode); impl FromStr for DocumentId { type Err = HashError; @@ -49,8 +50,9 @@ mod imp { #[derive(Properties, Default)] #[properties(wrapper_type = super::Document)] pub struct Document { - #[property(name = "name", get = Self::name, type = Option)] - #[property(get)] + #[property(get, construct_only)] + name: Mutex>, + #[property(get, construct_only, set)] last_accessed: Mutex>, #[property(name = "text", get = Self::text, type = String)] crdt_doc: OnceCell, @@ -60,8 +62,8 @@ mod imp { subscribed: Cell, #[property(get, construct_only)] service: OnceCell, - #[property(get)] - authors: Authors, + #[property(get, set = Self::set_authors, construct_only)] + authors: OnceCell, } #[glib::object_subclass] @@ -71,6 +73,12 @@ mod imp { } impl Document { + fn set_authors(&self, authors: Option) { + if let Some(authors) = authors { + self.authors.set(authors).unwrap(); + } + } + pub fn text(&self) -> String { self.crdt_doc .get() @@ -79,19 +87,46 @@ mod imp { .to_string() } - pub fn name(&self) -> Option { + fn update_name(&self) { let crdt_text = self .crdt_doc .get() .expect("crdt_doc to be set") .get_text(TEXT_CONTAINER_ID); - if crdt_text.is_empty() { - return None; + let name = if crdt_text.is_empty() { + None + } else { + crdt_text + .slice(0, min(DOCUMENT_NAME_LENGTH, crdt_text.len_unicode())) + .ok() + }; + + if name == self.obj().name() { + return; } - crdt_text - .slice(0, min(DOCUMENT_NAME_LENGTH, crdt_text.len_unicode())) - .ok() + *self.name.lock().unwrap() = name.clone(); + self.obj().notify_name(); + + let obj = self.obj(); + glib::spawn_future(clone!( + #[weak] + obj, + async move { + let document_id = obj.id().0; + if let Err(error) = obj + .service() + .node() + .set_name_for_document(&document_id, name) + .await + { + error!( + "Failed to update name for document {}: {}", + document_id, error + ); + } + } + )); } fn set_id(&self, id: Option) { @@ -155,14 +190,18 @@ mod imp { *self.last_accessed.lock().unwrap() = glib::DateTime::now_local().ok(); let obj = self.obj(); - glib::spawn_future(clone!( - #[weak] + // Keep the application alive till we completed the unsubscription task + let guard = gio::Application::default().and_then(|app| Some(app.hold())); + // Keep a strong reference to the document to ensure the document lives long enough + glib::spawn_future_local(clone!( + #[strong] obj, async move { let document_id = obj.id().0; if let Err(error) = obj.service().node().unsubscribe(&document_id).await { error!("Failed to unsubscribe document {}: {}", document_id, error); } + drop(guard); } )); } @@ -172,7 +211,7 @@ mod imp { fn emit_text_inserted(&self, pos: i32, text: String) { if pos <= DOCUMENT_NAME_LENGTH as i32 { - self.obj().notify_name(); + self.update_name(); } // Emit the signal on the main thread @@ -195,7 +234,7 @@ mod imp { fn emit_range_deleted(&self, start: i32, end: i32) { if start <= DOCUMENT_NAME_LENGTH as i32 || end <= DOCUMENT_NAME_LENGTH as i32 { - self.obj().notify_name(); + self.update_name(); } // Emit the signal on the main thread @@ -345,6 +384,9 @@ mod imp { ] }) } + fn dispose(&self) { + self.set_subscribed(false); + } fn constructed(&self) { self.parent_constructed(); @@ -363,9 +405,13 @@ mod imp { self.setup_loro_document(); - // Add ourself to the list of authors - self.authors - .add_this_device(self.obj().service().private_key().public_key()); + self.authors.get_or_init(|| { + let authors = Authors::new(); + + // Add ourself to the list of authors + authors.add_this_device(self.obj().service().private_key().public_key()); + authors + }); self.obj().service().documents().add(self.obj().clone()); } @@ -383,6 +429,22 @@ impl Document { .build() } + pub(crate) fn with_state( + service: &Service, + id: Option<&DocumentId>, + name: Option<&str>, + last_accessed: Option<&glib::DateTime>, + authors: &Authors, + ) -> Self { + glib::Object::builder() + .property("service", service) + .property("id", id) + .property("authors", authors) + .property("name", name) + .property("last-accessed", last_accessed) + .build() + } + pub fn insert_text(&self, index: i32, chunk: &str) -> Result<()> { self.imp().splice_text(index, 0, chunk) } diff --git a/aardvark-doc/src/service.rs b/aardvark-doc/src/service.rs index 5e258a87..574e701b 100644 --- a/aardvark-doc/src/service.rs +++ b/aardvark-doc/src/service.rs @@ -6,8 +6,13 @@ use p2panda_core::Hash; use std::sync::OnceLock; use tracing::error; -use crate::documents::Documents; -use crate::identity::PrivateKey; +use crate::identity::{PrivateKey, PublicKey}; +use crate::{ + author::Author, + authors::Authors, + document::{Document, DocumentId}, + documents::Documents, +}; use aardvark_node::Node; mod imp { @@ -50,21 +55,62 @@ impl Service { pub fn startup(&self) { glib::MainContext::new().block_on(async move { let private_key = self.private_key().0.clone(); + let public_key = private_key.public_key(); let network_id = Hash::new(b"aardvark <3"); let path = self.data_dir().path().expect("Valid file path"); if let Err(error) = self .imp() .node - .run(private_key, network_id, Some(path.as_ref())) + .run(private_key.clone(), network_id, Some(path.as_ref())) .await { error!("Running node failed: {error}"); } + + if let Ok(documents) = self.imp().node.documents().await { + for document in documents { + let last_accessed = document.last_accessed.and_then(|last_accessed| { + glib::DateTime::from_unix_utc(last_accessed.timestamp()).ok() + }); + + let authors: Vec = document + .authors + .iter() + .map(|author| { + if author.public_key == public_key { + Author::for_this_device(&PublicKey(author.public_key)) + } else { + let last_seen = author.last_seen.and_then(|last_seen| { + glib::DateTime::from_unix_utc(last_seen.timestamp()).ok() + }); + Author::with_state( + &PublicKey(author.public_key), + last_seen.as_ref(), + ) + } + }) + .collect(); + + let authors = Authors::from_vec(authors); + // The document is inserted automatically in the document list + let _document = Document::with_state( + self, + Some(&DocumentId(document.id)), + document.name.as_deref(), + last_accessed.as_ref(), + &authors, + ); + } + } }); } pub fn shutdown(&self) { - self.imp().node.shutdown(); + glib::MainContext::new().block_on(async move { + if let Err(error) = self.imp().node.shutdown().await { + error!("Failed to shutdown service: {}", error); + } + }); } pub(crate) fn node(&self) -> &Node { diff --git a/aardvark-node/Cargo.toml b/aardvark-node/Cargo.toml index 4c11e7d3..43a0b720 100644 --- a/aardvark-node/Cargo.toml +++ b/aardvark-node/Cargo.toml @@ -11,6 +11,7 @@ authors = [ [dependencies] anyhow = "1.0.94" async-trait = "0.1.83" +chrono = "0.4.40" ciborium = "0.2.2" p2panda-core = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3" } p2panda-discovery = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3", features = ["mdns"] } @@ -19,7 +20,7 @@ p2panda-store = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b6 p2panda-stream = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3" } p2panda-sync = { git = "https://github.com/p2panda/p2panda", rev = "f3a016324b69beac45cf20a792fe6890cb1a21e3", features = ["log-sync"] } serde = { version = "1.0.215", features = ["derive"] } -sqlx = { version = "0.8.3", features = ["runtime-tokio", "sqlite"], default-features = false} +sqlx = { version = "0.8.5", features = ["runtime-tokio", "sqlite", "chrono"], default-features = false} tokio = { version = "1.42.0", features = ["rt", "sync"] } tokio-stream = "0.1.17" tracing = "0.1" diff --git a/aardvark-node/migrations/20250418140035_create_tables.sql b/aardvark-node/migrations/20250418140035_create_tables.sql new file mode 100644 index 00000000..39ad1047 --- /dev/null +++ b/aardvark-node/migrations/20250418140035_create_tables.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS authors ( + public_key TEXT NOT NULL, + document_id TEXT NOT NULL, + last_seen INTEGER, + UNIQUE(public_key, document_id), + FOREIGN KEY(document_id) REFERENCES documents(document_id) +); + +CREATE TABLE IF NOT EXISTS documents ( + document_id TEXT NOT NULL PRIMARY KEY, + name TEXT, + last_accessed INTEGER +); \ No newline at end of file diff --git a/aardvark-node/src/document.rs b/aardvark-node/src/document.rs index f96c4ac5..cd27fb50 100644 --- a/aardvark-node/src/document.rs +++ b/aardvark-node/src/document.rs @@ -2,14 +2,27 @@ use std::fmt; use std::hash::Hash as StdHash; use std::str::FromStr; +use chrono::{DateTime, Utc}; use p2panda_core::{Hash, HashError, PublicKey}; use p2panda_net::TopicId; use p2panda_sync::TopicQuery; use serde::{Deserialize, Serialize}; +use sqlx::{ + Decode, Encode, FromRow, Sqlite, Type, + encode::IsNull, + error::BoxDynError, + sqlite::{SqliteArgumentValue, SqliteTypeInfo, SqliteValueRef}, +}; #[derive(Copy, Clone, Debug, PartialEq, Eq, StdHash, Serialize, Deserialize)] pub struct DocumentId(Hash); +impl DocumentId { + pub fn as_bytes(&self) -> &[u8] { + self.0.as_ref() + } +} + impl TopicQuery for DocumentId {} impl TopicId for DocumentId { @@ -56,6 +69,58 @@ impl FromStr for DocumentId { } } +impl TryFrom<&[u8]> for DocumentId { + type Error = HashError; + + fn try_from(value: &[u8]) -> Result { + Ok(Hash::try_from(value)?.into()) + } +} + +impl Type for DocumentId { + fn type_info() -> SqliteTypeInfo { + <&[u8] as Type>::type_info() + } + + fn compatible(ty: &SqliteTypeInfo) -> bool { + <&[u8] as Type>::compatible(ty) + } +} + +impl<'q> Encode<'q, Sqlite> for &'q DocumentId { + fn encode_by_ref( + &self, + args: &mut Vec>, + ) -> Result { + <&[u8] as Encode>::encode_by_ref(&self.as_bytes(), args) + } +} + +impl Decode<'_, Sqlite> for DocumentId { + fn decode(value: SqliteValueRef<'_>) -> Result { + Ok(DocumentId::try_from(<&[u8] as Decode>::decode( + value, + )?)?) + } +} + +#[derive(Debug, FromRow)] +pub struct Document { + #[sqlx(rename = "document_id")] + pub id: DocumentId, + #[sqlx(default)] + pub name: Option, + pub last_accessed: Option>, + #[sqlx(skip)] + pub authors: Vec, +} + +#[derive(Debug)] +pub struct Author { + pub public_key: PublicKey, + pub last_seen: Option>, +} + pub trait SubscribableDocument: Sync + Send { fn bytes_received(&self, author: PublicKey, data: &[u8]); fn authors_joined(&self, authors: Vec); diff --git a/aardvark-node/src/lib.rs b/aardvark-node/src/lib.rs index aa2d7dfb..1cfd1d68 100644 --- a/aardvark-node/src/lib.rs +++ b/aardvark-node/src/lib.rs @@ -3,6 +3,7 @@ mod network; mod node; mod operation; mod store; +mod utils; pub use document::SubscribableDocument; pub use node::Node; diff --git a/aardvark-node/src/node.rs b/aardvark-node/src/node.rs index 88390001..53ca49a3 100644 --- a/aardvark-node/src/node.rs +++ b/aardvark-node/src/node.rs @@ -3,20 +3,22 @@ use std::path::Path; use std::sync::{Arc, OnceLock}; use anyhow::Result; +use chrono::Utc; use p2panda_core::{Hash, PrivateKey}; use p2panda_net::{SyncConfiguration, SystemEvent}; -use p2panda_store::sqlite::store::run_pending_migrations; +use p2panda_store::sqlite::store::migrations as operation_store_migrations; use p2panda_sync::log_sync::LogSyncProtocol; -use sqlx::sqlite; +use sqlx::{migrate::Migrator, sqlite}; use tokio::runtime::{Builder, Runtime}; use tokio::sync::Notify; use tokio::sync::RwLock; -use tracing::{info, warn}; +use tracing::{error, info, warn}; -use crate::document::{DocumentId, SubscribableDocument}; +use crate::document::{Document, DocumentId, SubscribableDocument}; use crate::network::Network; use crate::operation::{LogType, create_operation, validate_operation}; use crate::store::{DocumentStore, OperationStore}; +use crate::utils::CombinedMigrationSource; pub struct Node { inner: OnceLock>, @@ -94,13 +96,17 @@ impl Node { pool_options.connect_with(connection_options).await? }; - // FIXME: Migrate the OperationStore Sqlite DB, I think p2panda-store should call this internally - run_pending_migrations(&pool).await?; - // TODO: Do migration of our DB - // sqlx::migrate!().run(&pool).await?; + // Run migration for p2panda OperationStore and for the our DocumentStore + Migrator::new(CombinedMigrationSource::new(vec![ + operation_store_migrations(), + sqlx::migrate!(), + ])) + .await? + .run(&pool) + .await?; - let operation_store = OperationStore::new(pool); - let document_store = DocumentStore::new(); + let operation_store = OperationStore::new(pool.clone()); + let document_store = DocumentStore::new(pool); let sync_config = { let sync = LogSyncProtocol::new(document_store.clone(), operation_store.clone()); @@ -114,11 +120,22 @@ impl Node { operation_store.clone(), ) .await?; + let inner = Arc::new(NodeInner { + runtime, + operation_store, + document_store, + network, + private_key, + }); let documents = self.documents.clone(); - network + + let inner_clone = inner.clone(); + inner + .network .subscribe_events(move |system_event| { let documents = documents.clone(); + let inner_clone = inner_clone.clone(); async move { match system_event { SystemEvent::GossipJoined { topic_id, peers } => { @@ -132,6 +149,13 @@ impl Node { } } SystemEvent::GossipNeighborDown { topic_id, peer } => { + if let Err(error) = inner_clone + .document_store + .set_last_seen_for_author(peer, Some(Utc::now())) + .await + { + error!("Failed to set last seen for author {peer}: {error}"); + } if let Some(document) = documents.read().await.get(&topic_id.into()) { document.author_set_online(peer, false); } @@ -142,31 +166,31 @@ impl Node { }) .await?; - self.inner - .set(Arc::new(NodeInner { - runtime, - operation_store, - document_store, - network, - private_key, - })) - .expect("Node can be run only once"); + self.inner.set(inner).expect("Node can be run only once"); self.ready_notify.notify_waiters(); Ok(()) } - pub fn shutdown(&self) { - if let Some(inner) = self.inner.get() { - let inner_clone = inner.clone(); - inner.runtime.block_on(async move { - inner_clone - .network - .shutdown() - .await - .expect("network to shutdown"); - }); - } + pub async fn shutdown(&self) -> Result<()> { + // TODO: set last seen for all peer that are online + // TODO: ensure all documents are unsubscribe at this point + let inner = self.inner().await; + let _guard = inner.runtime.enter(); + + inner.network.shutdown().await?; + + Ok(()) + } + + pub async fn documents(&self) -> Result> { + let inner = self.inner().await; + + let inner_clone = inner.clone(); + Ok(inner + .runtime + .spawn(async move { inner_clone.document_store.documents().await }) + .await??) } pub async fn create_document(&self) -> Result { @@ -193,6 +217,33 @@ impl Node { Ok(document_id) } + /// Set the name for a given document + /// + /// This information will be written to the database + pub async fn set_name_for_document( + &self, + document_id: &DocumentId, + name: Option, + ) -> Result<()> { + let inner = self.inner().await; + + let inner_clone = inner.clone(); + let document_id = *document_id; + inner + .runtime + .spawn(async move { + inner_clone + .document_store + .set_name_for_document(&document_id, name) + .await + }) + .await??; + + Ok(()) + } + + // TODO: check if peers are online and call SubscribableDocument::author_set_online(). + // For this we need to track the systemevents pub async fn subscribe( &self, document_id: DocumentId, @@ -201,11 +252,33 @@ impl Node { let document = Arc::new(document); let inner = self.inner().await; - // Add ourselves as an author to the document store. - inner - .document_store - .add_author(document_id, inner.private_key.public_key()) - .await; + let inner_clone = inner.clone(); + let stored_operations = inner + .runtime + .spawn(async move { + inner_clone + .document_store + .add_document(&document_id) + .await?; + // Add ourselves as an author to the document store. + inner_clone + .document_store + .add_author(&document_id, &inner_clone.private_key.public_key()) + .await?; + inner_clone + .document_store + .operations_for_document(&inner_clone.operation_store, &document_id) + .await + }) + .await??; + + for operation in stored_operations { + // Send all stored operation bytes to the app, + // it doesn't matter if the app already knows some or all of them + if let Some(body) = operation.body { + document.bytes_received(operation.header.public_key, &body.to_bytes()); + } + } let inner_clone = inner.clone(); let document_clone = document.clone(); @@ -232,10 +305,13 @@ impl Node { } // When we discover a new author we need to add them to our document store. - inner_clone + if let Err(error) = inner_clone .document_store - .add_author(document_id, operation.header.public_key) - .await; + .add_author(&document_id, &operation.header.public_key) + .await + { + error!("Can't store author to database: {error}"); + } // Forward the payload up to the app. if let Some(body) = operation.body { @@ -256,8 +332,22 @@ impl Node { pub async fn unsubscribe(&self, document_id: &DocumentId) -> Result<()> { let inner = self.inner().await; - inner.network.unsubscribe(document_id).await?; - self.documents.write().await.remove(document_id); + let inner_clone = inner.clone(); + let document_id = *document_id; + + inner + .runtime + .spawn(async move { + inner_clone + .document_store + .set_last_accessed_for_document(&document_id, Some(Utc::now())) + .await?; + + let result = inner_clone.network.unsubscribe(&document_id).await; + result + }) + .await??; + self.documents.write().await.remove(&document_id); Ok(()) } diff --git a/aardvark-node/src/store.rs b/aardvark-node/src/store.rs index 387657bc..8447b5c8 100644 --- a/aardvark-node/src/store.rs +++ b/aardvark-node/src/store.rs @@ -1,37 +1,219 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use std::hash::Hash as StdHash; -use std::sync::Arc; use async_trait::async_trait; +use chrono::{DateTime, Utc}; use p2panda_core::PublicKey; -use p2panda_store::SqliteStore; +use p2panda_store::{LogStore, SqliteStore}; use p2panda_sync::log_sync::TopicLogMap; use serde::{Deserialize, Serialize}; -use tokio::sync::RwLock; +use sqlx; +use sqlx::Row; +use tracing::error; -use crate::document::DocumentId; -use crate::operation::{AardvarkExtensions, LogType}; +use crate::document::{Author, Document, DocumentId}; +use crate::operation::{AardvarkExtensions, LogType, validate_operation}; #[derive(Clone, Debug)] pub struct DocumentStore { - documents: Arc>>>, + pool: sqlx::SqlitePool, } impl DocumentStore { - pub fn new() -> Self { - Self { - documents: Arc::new(RwLock::new(HashMap::new())), + pub fn new(pool: sqlx::SqlitePool) -> Self { + Self { pool } + } + + async fn authors(&self, document_id: &DocumentId) -> sqlx::Result> { + let list = sqlx::query("SELECT public_key FROM authors WHERE document_id = ?") + .bind(document_id) + .fetch_all(&self.pool) + .await?; + + Ok(list + .iter() + .filter_map(|row| PublicKey::try_from(row.get::<&[u8], _>("public_key")).ok()) + .collect()) + } + + pub async fn documents(&self) -> sqlx::Result> { + let mut documents: Vec = + sqlx::query_as("SELECT document_id, name, last_accessed FROM documents") + .fetch_all(&self.pool) + .await?; + let authors = sqlx::query("SELECT public_key, document_id, last_seen FROM authors") + .fetch_all(&self.pool) + .await?; + + let mut authors_per_document = authors.iter().fold(HashMap::new(), |mut acc, row| { + let Ok(document_id) = row.try_get::("document_id") else { + return acc; + }; + let Ok(public_key) = PublicKey::try_from(row.get::<&[u8], _>("public_key")) else { + return acc; + }; + let Ok(last_seen) = row.try_get::>, _>("last_seen") else { + return acc; + }; + acc.entry(document_id) + .or_insert_with(|| Vec::new()) + .push(Author { + public_key, + last_seen, + }); + acc + }); + + for document in &mut documents { + document.authors = authors_per_document + .remove(&document.id) + .expect("Document does not exist"); } + + Ok(documents) } - pub async fn add_author(&self, document: DocumentId, public_key: PublicKey) { - let mut documents = self.documents.write().await; - documents - .entry(document) - .and_modify(|documents| { - documents.insert(public_key); - }) - .or_insert(HashSet::from([public_key])); + pub async fn add_document(&self, document_id: &DocumentId) -> sqlx::Result<()> { + // The document_id is the primary key in the table therefore ignore insertion when the document exists already + sqlx::query( + " + INSERT OR IGNORE INTO documents ( document_id ) + VALUES ( ? ) + ", + ) + .bind(document_id) + .execute(&self.pool) + .await?; + + Ok(()) + } + + pub async fn add_author( + &self, + document_id: &DocumentId, + public_key: &PublicKey, + ) -> sqlx::Result<()> { + // The author/document_id pair is required to be unique therefore ignore if the insertion fails + sqlx::query( + " + INSERT OR IGNORE INTO authors ( public_key, document_id ) + VALUES ( ?, ? ) + ", + ) + .bind(public_key.as_bytes().as_slice()) + .bind(document_id) + .execute(&self.pool) + .await?; + + Ok(()) + } + + pub async fn set_last_seen_for_author( + &self, + public_key: PublicKey, + last_seen: Option>, + ) -> sqlx::Result<()> { + sqlx::query( + " + UPDATE authors + SET last_seen = ? + WHERE public_key = ? + ", + ) + .bind(last_seen) + .bind(public_key.as_bytes().as_slice()) + .execute(&self.pool) + .await?; + + Ok(()) + } + + pub async fn set_name_for_document( + &self, + document_id: &DocumentId, + name: Option, + ) -> sqlx::Result<()> { + sqlx::query( + " + UPDATE documents + SET name = ? + WHERE document_id = ? + ", + ) + .bind(name) + .bind(document_id) + .execute(&self.pool) + .await?; + + Ok(()) + } + + pub async fn set_last_accessed_for_document( + &self, + document_id: &DocumentId, + last_accessed: Option>, + ) -> sqlx::Result<()> { + sqlx::query( + " + UPDATE documents + SET last_accessed = ? + WHERE document_id = ? + ", + ) + .bind(last_accessed) + .bind(document_id) + .execute(&self.pool) + .await?; + + Ok(()) + } + + pub async fn operations_for_document( + &self, + operation_store: &OperationStore, + document_id: &DocumentId, + ) -> sqlx::Result>> { + let authors = self.authors(document_id).await?; + + let log_ids = [ + LogId::new(LogType::Delta, document_id), + LogId::new(LogType::Snapshot, document_id), + ]; + + let mut result = Vec::new(); + + for author in authors.iter() { + for log_id in &log_ids { + let operations = match operation_store.get_log(author, log_id, None).await { + Ok(Some(operations)) => { + operations.into_iter().map(|(header, body)| { + let operation = p2panda_core::Operation { + hash: header.hash(), + header, + body, + }; + + // Stored operations are always valid + assert!(validate_operation(&operation, &document_id).is_ok()); + operation + }) + } + Ok(None) => { + continue; + } + Err(error) => { + error!( + "Failed to load operation for {author} with log type {log_id:?}: {error}" + ); + continue; + } + }; + + result.extend(operations); + } + } + + Ok(result) } } @@ -47,8 +229,9 @@ impl LogId { #[async_trait] impl TopicLogMap for DocumentStore { async fn get(&self, topic: &DocumentId) -> Option>> { - let documents = self.documents.read().await; - let authors = documents.get(topic)?.clone(); + let Ok(authors) = self.authors(topic).await else { + return None; + }; let log_ids = [ LogId::new(LogType::Delta, topic), LogId::new(LogType::Snapshot, topic), diff --git a/aardvark-node/src/utils.rs b/aardvark-node/src/utils.rs new file mode 100644 index 00000000..195dd214 --- /dev/null +++ b/aardvark-node/src/utils.rs @@ -0,0 +1,34 @@ +use std::pin::Pin; + +use sqlx::error::BoxDynError; +use sqlx::migrate::{Migration, MigrationSource, Migrator}; + +type BoxFuture<'a, T> = Pin + Send + 'a>>; + +/// Combine multiple `sqlx::migrate::Migrator` into a single `sqlx::migrate::MigrationSource` +/// +/// See for more details: https://github.com/launchbadge/sqlx/discussions/3407 +#[derive(Debug)] +pub struct CombinedMigrationSource { + migrators: Vec, +} + +impl CombinedMigrationSource { + pub fn new(migrators: Vec) -> CombinedMigrationSource { + Self { migrators } + } +} + +impl<'s> MigrationSource<'s> for CombinedMigrationSource { + fn resolve(self) -> BoxFuture<'s, Result, BoxDynError>> { + Box::pin(async move { + Ok(self + .migrators + .iter() + .map(|migrator| migrator.iter()) + .flatten() + .cloned() + .collect()) + }) + } +}