we have a localfirst E2EE dropbox

pull/19/head
Niko PLP 8 months ago
parent fe4a41b3c2
commit c8b7a04ab4
  1. 7
      Cargo.lock
  2. 4
      nextgraph/examples/in_memory.rs
  3. 4
      nextgraph/examples/open.rs
  4. 4
      nextgraph/examples/persistent.rs
  5. 171
      nextgraph/src/local_broker.rs
  6. 1
      ng-app/src-tauri/Cargo.toml
  7. 123
      ng-app/src-tauri/src/lib.rs
  8. 32
      ng-app/src/api.ts
  9. 3
      ng-app/src/lib/Home.svelte
  10. 209
      ng-app/src/lib/Test.svelte
  11. 20
      ng-app/src/routes/Test.svelte
  12. 32
      ng-app/src/store.ts
  13. 53
      ng-broker/src/rocksdb_server_storage.rs
  14. 119
      ng-broker/src/server_broker.rs
  15. 1
      ng-net/Cargo.toml
  16. 102
      ng-net/src/actors/client/blocks_exist.rs
  17. 129
      ng-net/src/actors/client/blocks_get.rs
  18. 79
      ng-net/src/actors/client/blocks_put.rs
  19. 18
      ng-net/src/actors/client/event.rs
  20. 6
      ng-net/src/actors/client/mod.rs
  21. 25
      ng-net/src/actors/client/pin_repo.rs
  22. 17
      ng-net/src/actors/client/topic_sub.rs
  23. 237
      ng-net/src/broker.rs
  24. 44
      ng-net/src/connection.rs
  25. 17
      ng-net/src/server_broker.rs
  26. 143
      ng-net/src/types.rs
  27. 9
      ng-repo/src/block_storage.rs
  28. 18
      ng-repo/src/commit.rs
  29. 31
      ng-repo/src/errors.rs
  30. 185
      ng-repo/src/file.rs
  31. 13
      ng-repo/src/repo.rs
  32. 9
      ng-repo/src/store.rs
  33. 72
      ng-repo/src/types.rs
  34. 9
      ng-repo/src/utils.rs
  35. 11
      ng-sdk-js/js/browser.js
  36. 4
      ng-sdk-js/js/node.js
  37. 135
      ng-sdk-js/src/lib.rs
  38. 9
      ng-storage-rocksdb/src/block_storage.rs
  39. 1
      ng-verifier/Cargo.toml
  40. 140
      ng-verifier/src/commits/mod.rs
  41. 2
      ng-verifier/src/lib.rs
  42. 220
      ng-verifier/src/request_processor.rs
  43. 11
      ng-verifier/src/rocksdb_user_storage.rs
  44. 14
      ng-verifier/src/site.rs
  45. 124
      ng-verifier/src/types.rs
  46. 110
      ng-verifier/src/user_storage/storage.rs
  47. 644
      ng-verifier/src/verifier.rs
  48. 22
      ng-wallet/src/lib.rs
  49. 20
      ng-wallet/src/types.rs
  50. 15
      ngcli/src/main.rs
  51. 24
      ngd/src/main.rs
  52. 6
      ngone/src/main.rs

7
Cargo.lock generated

@ -384,9 +384,9 @@ dependencies = [
[[package]] [[package]]
name = "async-recursion" name = "async-recursion"
version = "1.0.4" version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba" checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -3255,6 +3255,7 @@ dependencies = [
"ng-repo", "ng-repo",
"ng-wallet", "ng-wallet",
"serde", "serde",
"serde_bytes",
"serde_json", "serde_json",
"tauri", "tauri",
"tauri-build", "tauri-build",
@ -3318,6 +3319,7 @@ name = "ng-net"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"async-broadcast 0.4.1", "async-broadcast 0.4.1",
"async-recursion",
"async-std", "async-std",
"async-trait", "async-trait",
"base64-url", "base64-url",
@ -3421,6 +3423,7 @@ name = "ng-verifier"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"async-std", "async-std",
"async-trait",
"automerge", "automerge",
"blake3", "blake3",
"chacha20", "chacha20",

@ -8,8 +8,8 @@
// according to those terms. // according to those terms.
use nextgraph::local_broker::{ use nextgraph::local_broker::{
doc_fetch, init_local_broker, session_start, session_stop, user_connect, user_disconnect, app_request, app_request_stream, init_local_broker, session_start, session_stop, user_connect,
wallet_close, wallet_create_v0, wallet_get, wallet_get_file, wallet_import, user_disconnect, wallet_close, wallet_create_v0, wallet_get, wallet_get_file, wallet_import,
wallet_open_with_pazzle_words, wallet_read_file, wallet_was_opened, LocalBrokerConfig, wallet_open_with_pazzle_words, wallet_read_file, wallet_was_opened, LocalBrokerConfig,
SessionConfig, SessionConfig,
}; };

@ -8,8 +8,8 @@
// according to those terms. // according to those terms.
use nextgraph::local_broker::{ use nextgraph::local_broker::{
doc_fetch, init_local_broker, session_start, session_stop, user_connect, user_disconnect, app_request, app_request_stream, init_local_broker, session_start, session_stop, user_connect,
wallet_close, wallet_create_v0, wallet_get, wallet_get_file, wallet_import, user_disconnect, wallet_close, wallet_create_v0, wallet_get, wallet_get_file, wallet_import,
wallet_open_with_pazzle, wallet_open_with_pazzle_words, wallet_read_file, wallet_was_opened, wallet_open_with_pazzle, wallet_open_with_pazzle_words, wallet_read_file, wallet_was_opened,
LocalBrokerConfig, SessionConfig, LocalBrokerConfig, SessionConfig,
}; };

@ -8,8 +8,8 @@
// according to those terms. // according to those terms.
use nextgraph::local_broker::{ use nextgraph::local_broker::{
doc_fetch, init_local_broker, session_start, session_stop, user_connect, user_disconnect, app_request, app_request_stream, init_local_broker, session_start, session_stop, user_connect,
wallet_close, wallet_create_v0, wallet_get, wallet_get_file, wallet_import, user_disconnect, wallet_close, wallet_create_v0, wallet_get, wallet_get_file, wallet_import,
wallet_open_with_pazzle_words, wallet_read_file, wallet_was_opened, LocalBrokerConfig, wallet_open_with_pazzle_words, wallet_read_file, wallet_was_opened, LocalBrokerConfig,
SessionConfig, SessionConfig,
}; };

@ -23,7 +23,7 @@ use once_cell::sync::Lazy;
use serde_bare::to_vec; use serde_bare::to_vec;
use serde_json::json; use serde_json::json;
use std::collections::HashMap; use std::collections::HashMap;
use std::fs::{read, write, File, OpenOptions}; use std::fs::{read, remove_file, write, File, OpenOptions};
use std::path::PathBuf; use std::path::PathBuf;
use zeroize::{Zeroize, ZeroizeOnDrop}; use zeroize::{Zeroize, ZeroizeOnDrop};
@ -54,6 +54,7 @@ pub struct JsStorageConfig {
pub session_read: Arc<Box<JsStorageReadFn>>, pub session_read: Arc<Box<JsStorageReadFn>>,
pub session_write: Arc<Box<JsStorageWriteFn>>, pub session_write: Arc<Box<JsStorageWriteFn>>,
pub session_del: Arc<Box<JsStorageDelFn>>, pub session_del: Arc<Box<JsStorageDelFn>>,
pub clear: Arc<Box<JsCallback>>,
pub is_browser: bool, pub is_browser: bool,
} }
@ -111,6 +112,7 @@ impl JsStorageConfig {
outbox_read_function: Box::new( outbox_read_function: Box::new(
move |peer_id: PubKey| -> Result<Vec<Vec<u8>>, NgError> { move |peer_id: PubKey| -> Result<Vec<Vec<u8>>, NgError> {
let start_key = format!("ng_outboxes@{}@start", peer_id); let start_key = format!("ng_outboxes@{}@start", peer_id);
//log_info!("search start key {}", start_key);
let res = (session_read4)(start_key.clone()); let res = (session_read4)(start_key.clone());
let _start = match res { let _start = match res {
Err(_) => return Err(NgError::JsStorageKeyNotFound), Err(_) => return Err(NgError::JsStorageKeyNotFound),
@ -123,6 +125,7 @@ impl JsStorageConfig {
loop { loop {
let idx_str = format!("{:05}", idx); let idx_str = format!("{:05}", idx);
let str = format!("ng_outboxes@{}@{idx_str}", peer_id); let str = format!("ng_outboxes@{}@{idx_str}", peer_id);
//log_info!("search key {}", str);
let res = (session_read4)(str.clone()); let res = (session_read4)(str.clone());
let res = match res { let res = match res {
Err(_) => break, Err(_) => break,
@ -369,7 +372,6 @@ impl fmt::Debug for OpenedWallet {
} }
} }
#[derive(Debug)]
struct LocalBroker { struct LocalBroker {
pub config: LocalBrokerConfig, pub config: LocalBrokerConfig,
@ -382,12 +384,29 @@ struct LocalBroker {
pub opened_sessions: HashMap<UserId, u64>, pub opened_sessions: HashMap<UserId, u64>,
pub opened_sessions_list: Vec<Option<Session>>, pub opened_sessions_list: Vec<Option<Session>>,
tauri_streams: HashMap<String, CancelFn>,
}
impl fmt::Debug for LocalBroker {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "LocalBroker.\nconfig {:?}", self.config)?;
writeln!(f, "wallets {:?}", self.wallets)?;
writeln!(f, "opened_wallets {:?}", self.opened_wallets)?;
writeln!(f, "sessions {:?}", self.sessions)?;
writeln!(f, "opened_sessions {:?}", self.opened_sessions)?;
writeln!(f, "opened_sessions_list {:?}", self.opened_sessions_list)
}
} }
// used to deliver events to the verifier on Clients, or Core that have Verifiers attached. // used to deliver events to the verifier on Clients, or Core that have Verifiers attached.
#[async_trait::async_trait] #[async_trait::async_trait]
impl ILocalBroker for LocalBroker { impl ILocalBroker for LocalBroker {
async fn deliver(&mut self, event: Event) {} async fn deliver(&mut self, event: Event, overlay: OverlayId, user_id: UserId) {
if let Some(session) = self.get_mut_session_for_user(&user_id) {
session.verifier.deliver(event, overlay).await;
}
}
} }
// this is used if an Actor does a BROKER.local_broker.respond // this is used if an Actor does a BROKER.local_broker.respond
@ -420,6 +439,21 @@ impl LocalBroker {
// } // }
// } // }
/// helper function to store the sender of a tauri stream in order to be able to cancel it later on
/// only used in Tauri, not used in the JS SDK
fn tauri_stream_add(&mut self, stream_id: String, cancel: CancelFn) {
self.tauri_streams.insert(stream_id, cancel);
}
/// helper function to cancel a tauri stream
/// only used in Tauri, not used in the JS SDK
fn tauri_stream_cancel(&mut self, stream_id: String) {
let s = self.tauri_streams.remove(&stream_id);
if let Some(cancel) = s {
cancel();
}
}
fn get_mut_session_for_user(&mut self, user: &UserId) -> Option<&mut Session> { fn get_mut_session_for_user(&mut self, user: &UserId) -> Option<&mut Session> {
match self.opened_sessions.get(user) { match self.opened_sessions.get(user) {
Some(idx) => self.opened_sessions_list[*idx as usize].as_mut(), Some(idx) => self.opened_sessions_list[*idx as usize].as_mut(),
@ -604,7 +638,7 @@ impl LocalBroker {
let credentials = match opened_wallet.wallet.individual_site(&user_id) { let credentials = match opened_wallet.wallet.individual_site(&user_id) {
Some(creds) => creds, Some(creds) => creds,
None => match user_priv_key { None => match user_priv_key {
Some(user_pk) => (user_pk, None, None), Some(user_pk) => (user_pk, None, None, None, None),
None => return Err(NgError::NotFound), None => return Err(NgError::NotFound),
}, },
}; };
@ -745,6 +779,8 @@ impl LocalBroker {
user_priv_key: credentials.0, user_priv_key: credentials.0,
private_store_read_cap: credentials.1, private_store_read_cap: credentials.1,
private_store_id: credentials.2, private_store_id: credentials.2,
protected_store_id: credentials.3,
public_store_id: credentials.4,
}, },
block_storage, block_storage,
)?; )?;
@ -794,7 +830,7 @@ impl LocalBroker {
let lws_ser = LocalWalletStorage::v0_to_vec(&wallets_to_be_saved); let lws_ser = LocalWalletStorage::v0_to_vec(&wallets_to_be_saved);
let r = write(path.clone(), &lws_ser); let r = write(path.clone(), &lws_ser);
if r.is_err() { if r.is_err() {
log_debug!("write error {:?} {}", path, r.unwrap_err()); log_err!("write error {:?} {}", path, r.unwrap_err());
return Err(NgError::IoError); return Err(NgError::IoError);
} }
} }
@ -815,11 +851,17 @@ async fn init_(config: LocalBrokerConfig) -> Result<Arc<RwLock<LocalBroker>>, Ng
// load the wallets and sessions from disk // load the wallets and sessions from disk
let mut path = base_path.clone(); let mut path = base_path.clone();
path.push("wallets"); path.push("wallets");
let map_ser = read(path); let map_ser = read(path.clone());
if map_ser.is_ok() { if map_ser.is_ok() {
let wallets = LocalWalletStorage::v0_from_vec(&map_ser.unwrap())?; let wallets = LocalWalletStorage::v0_from_vec(&map_ser.unwrap());
let LocalWalletStorage::V0(wallets) = wallets; if wallets.is_err() {
log_err!("Load LocalWalletStorage error: {:?}", wallets.unwrap_err());
let _ = remove_file(path);
HashMap::new()
} else {
let LocalWalletStorage::V0(wallets) = wallets.unwrap();
wallets wallets
}
} else { } else {
HashMap::new() HashMap::new()
} }
@ -829,12 +871,27 @@ async fn init_(config: LocalBrokerConfig) -> Result<Arc<RwLock<LocalBroker>>, Ng
match (js_storage_config.local_read)("ng_wallets".to_string()) { match (js_storage_config.local_read)("ng_wallets".to_string()) {
Err(_) => HashMap::new(), Err(_) => HashMap::new(),
Ok(wallets_string) => { Ok(wallets_string) => {
let map_ser = base64_url::decode(&wallets_string) match base64_url::decode(&wallets_string)
.map_err(|_| NgError::SerializationError)?; .map_err(|_| NgError::SerializationError)
let wallets: LocalWalletStorage = serde_bare::from_slice(&map_ser)?; {
Err(e) => {
log_err!("Load wallets error: {:?}", e);
(js_storage_config.clear)();
HashMap::new()
}
Ok(map_ser) => match serde_bare::from_slice(&map_ser) {
Err(e) => {
log_err!("Load LocalWalletStorage error: {:?}", e);
(js_storage_config.clear)();
HashMap::new()
}
Ok(wallets) => {
let LocalWalletStorage::V0(v0) = wallets; let LocalWalletStorage::V0(v0) = wallets;
v0 v0
} }
},
}
}
} }
} }
}; };
@ -846,14 +903,16 @@ async fn init_(config: LocalBrokerConfig) -> Result<Arc<RwLock<LocalBroker>>, Ng
sessions: HashMap::new(), sessions: HashMap::new(),
opened_sessions: HashMap::new(), opened_sessions: HashMap::new(),
opened_sessions_list: vec![], opened_sessions_list: vec![],
tauri_streams: HashMap::new(),
}; };
//log_debug!("{:?}", &local_broker); //log_debug!("{:?}", &local_broker);
let broker = Arc::new(RwLock::new(local_broker)); let broker = Arc::new(RwLock::new(local_broker));
BROKER.write().await.set_local_broker(Arc::clone( BROKER
&(Arc::clone(&broker) as Arc<RwLock<dyn ILocalBroker>>), .write()
)); .await
.set_local_broker(Arc::clone(&broker) as Arc<RwLock<dyn ILocalBroker>>);
Ok(broker) Ok(broker)
} }
@ -868,6 +927,27 @@ pub async fn init_local_broker_with_lazy(config_fn: &Lazy<Box<ConfigInitFn>>) {
.await; .await;
} }
#[doc(hidden)]
pub async fn tauri_stream_add(stream_id: String, cancel: CancelFn) -> Result<(), NgError> {
let mut broker = match LOCAL_BROKER.get() {
None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized),
Some(Ok(broker)) => broker.write().await,
};
broker.tauri_stream_add(stream_id, cancel);
Ok(())
}
#[doc(hidden)]
pub async fn tauri_stream_cancel(stream_id: String) -> Result<(), NgError> {
let mut broker = match LOCAL_BROKER.get() {
None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized),
Some(Ok(broker)) => broker.write().await,
};
broker.tauri_stream_cancel(stream_id);
Ok(())
}
/// Initialize the configuration of your local broker /// Initialize the configuration of your local broker
/// ///
/// , by passing in a function (or closure) that returns a `LocalBrokerConfig`. /// , by passing in a function (or closure) that returns a `LocalBrokerConfig`.
@ -1415,11 +1495,64 @@ pub async fn wallet_remove(wallet_name: String) -> Result<(), NgError> {
Ok(()) Ok(())
} }
/// fetches a document's content, or performs a mutation on the document. // /// fetches a document's content.
pub async fn doc_fetch( // pub async fn doc_fetch_nuri(
// session_id: u64,
// nuri: String,
// payload: Option<AppRequestPayload>,
// ) -> Result<(Receiver<AppResponse>, CancelFn), NgError> {
// let mut broker = match LOCAL_BROKER.get() {
// None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized),
// Some(Ok(broker)) => broker.write().await,
// };
// if session_id as usize >= broker.opened_sessions_list.len() {
// return Err(NgError::InvalidArgument);
// }
// let session = broker.opened_sessions_list[session_id as usize]
// .as_mut()
// .ok_or(NgError::SessionNotFound)?;
// session.verifier.doc_fetch_nuri(nuri, payload, true).await
// }
// /// fetches the private store home page and subscribes to its updates.
// pub async fn doc_fetch_private(
// session_id: u64,
// ) -> Result<(Receiver<AppResponse>, CancelFn), NgError> {
// let mut broker = match LOCAL_BROKER.get() {
// None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized),
// Some(Ok(broker)) => broker.write().await,
// };
// if session_id as usize >= broker.opened_sessions_list.len() {
// return Err(NgError::InvalidArgument);
// }
// let session = broker.opened_sessions_list[session_id as usize]
// .as_mut()
// .ok_or(NgError::SessionNotFound)?;
// session.verifier.doc_fetch_private(true).await
// }
/// process any type of app request that returns a single value
pub async fn app_request(session_id: u64, request: AppRequest) -> Result<AppResponse, NgError> {
let mut broker = match LOCAL_BROKER.get() {
None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized),
Some(Ok(broker)) => broker.write().await,
};
if session_id as usize >= broker.opened_sessions_list.len() {
return Err(NgError::InvalidArgument);
}
let session = broker.opened_sessions_list[session_id as usize]
.as_mut()
.ok_or(NgError::SessionNotFound)?;
session.verifier.app_request(request).await
}
/// process any type of app request that returns a stream of values
pub async fn app_request_stream(
session_id: u64, session_id: u64,
nuri: String, request: AppRequest,
payload: Option<AppRequestPayload>,
) -> Result<(Receiver<AppResponse>, CancelFn), NgError> { ) -> Result<(Receiver<AppResponse>, CancelFn), NgError> {
let mut broker = match LOCAL_BROKER.get() { let mut broker = match LOCAL_BROKER.get() {
None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized),
@ -1432,7 +1565,7 @@ pub async fn doc_fetch(
.as_mut() .as_mut()
.ok_or(NgError::SessionNotFound)?; .ok_or(NgError::SessionNotFound)?;
session.verifier.doc_fetch(nuri, payload) session.verifier.app_request_stream(request).await
} }
/// retrieves the ID of one of the 3 stores of a the personal Site (3P: public, protected, or private) /// retrieves the ID of one of the 3 stores of a the personal Site (3P: public, protected, or private)

@ -29,6 +29,7 @@ tauri = { version = "2.0.0-alpha.14", features = [] }
# tauri = { git = "https://github.com/simonhyll/tauri.git", branch="fix/ipc-mixup", features = [] } # tauri = { git = "https://github.com/simonhyll/tauri.git", branch="fix/ipc-mixup", features = [] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
serde_bytes = "0.11.7"
ng-repo = { path = "../../ng-repo" } ng-repo = { path = "../../ng-repo" }
ng-net = { path = "../../ng-net" } ng-net = { path = "../../ng-net" }
ng-client-ws = { path = "../../ng-client-ws" } ng-client-ws = { path = "../../ng-client-ws" }

@ -8,6 +8,7 @@
// according to those terms. // according to those terms.
use async_std::stream::StreamExt; use async_std::stream::StreamExt;
use nextgraph::local_broker::*; use nextgraph::local_broker::*;
use nextgraph::verifier::types::*;
use ng_net::broker::*; use ng_net::broker::*;
use ng_net::types::{ClientInfo, CreateAccountBSP, Invitation}; use ng_net::types::{ClientInfo, CreateAccountBSP, Invitation};
use ng_net::utils::{decode_invitation_string, spawn_and_log_error, Receiver, ResultSend}; use ng_net::utils::{decode_invitation_string, spawn_and_log_error, Receiver, ResultSend};
@ -234,29 +235,39 @@ async fn decode_invitation(invite: String) -> Option<Invitation> {
} }
#[tauri::command(rename_all = "snake_case")] #[tauri::command(rename_all = "snake_case")]
async fn doc_sync_branch(nuri: &str, stream_id: &str, app: tauri::AppHandle) -> Result<(), ()> { async fn app_request_stream(
log_debug!("doc_sync_branch {} {}", nuri, stream_id); session_id: u64,
request: AppRequest,
stream_id: &str,
app: tauri::AppHandle,
) -> Result<(), String> {
log_debug!("app request stream {} {:?}", stream_id, request);
let main_window = app.get_window("main").unwrap(); let main_window = app.get_window("main").unwrap();
let mut reader; let reader;
{ {
let mut sender; let cancel;
let mut broker = BROKER.write().await; (reader, cancel) = nextgraph::local_broker::app_request_stream(session_id, request)
(reader, sender) = broker.doc_sync_branch(nuri.to_string().clone()).await; .await
.map_err(|e| e.to_string())?;
broker.tauri_stream_add(stream_id.to_string(), sender); nextgraph::local_broker::tauri_stream_add(stream_id.to_string(), cancel)
.await
.map_err(|e| e.to_string())?;
} }
async fn inner_task( async fn inner_task(
mut reader: Receiver<Commit>, mut reader: Receiver<AppResponse>,
stream_id: String, stream_id: String,
main_window: tauri::Window, main_window: tauri::Window,
) -> ResultSend<()> { ) -> ResultSend<()> {
while let Some(commit) = reader.next().await { while let Some(app_response) = reader.next().await {
main_window.emit(&stream_id, commit).unwrap(); main_window.emit(&stream_id, app_response).unwrap();
} }
BROKER.write().await.tauri_stream_cancel(stream_id); nextgraph::local_broker::tauri_stream_cancel(stream_id)
.await
.map_err(|e| e.to_string())?;
log_debug!("END OF LOOP"); log_debug!("END OF LOOP");
Ok(()) Ok(())
@ -268,13 +279,59 @@ async fn doc_sync_branch(nuri: &str, stream_id: &str, app: tauri::AppHandle) ->
} }
#[tauri::command(rename_all = "snake_case")] #[tauri::command(rename_all = "snake_case")]
async fn cancel_doc_sync_branch(stream_id: &str) -> Result<(), ()> { async fn doc_fetch_private_subscribe() -> Result<AppRequest, String> {
let request = AppRequest::V0(AppRequestV0 {
command: AppRequestCommandV0::Fetch(AppFetchContentV0::get_or_subscribe(true)),
nuri: NuriV0::new_private_store_target(),
payload: None,
});
Ok(request)
}
#[tauri::command(rename_all = "snake_case")]
async fn app_request(
session_id: u64,
request: AppRequest,
app: tauri::AppHandle,
) -> Result<AppResponse, String> {
log_debug!("app request {:?}", request);
nextgraph::local_broker::app_request(session_id, request)
.await
.map_err(|e| e.to_string())
}
#[tauri::command(rename_all = "snake_case")]
async fn upload_chunk(
session_id: u64,
upload_id: u32,
chunk: serde_bytes::ByteBuf,
nuri: NuriV0,
app: tauri::AppHandle,
) -> Result<AppResponse, String> {
log_debug!("upload_chunk {:?}", chunk);
let request = AppRequest::V0(AppRequestV0 {
command: AppRequestCommandV0::FilePut,
nuri,
payload: Some(AppRequestPayload::V0(
AppRequestPayloadV0::RandomAccessFilePutChunk((upload_id, chunk)),
)),
});
nextgraph::local_broker::app_request(session_id, request)
.await
.map_err(|e| e.to_string())
}
#[tauri::command(rename_all = "snake_case")]
async fn cancel_stream(stream_id: &str) -> Result<(), String> {
log_debug!("cancel stream {}", stream_id); log_debug!("cancel stream {}", stream_id);
BROKER Ok(
.write() nextgraph::local_broker::tauri_stream_cancel(stream_id.to_string())
.await .await
.tauri_stream_cancel(stream_id.to_string()); .map_err(|e: NgError| e.to_string())?,
Ok(()) )
} }
#[tauri::command(rename_all = "snake_case")] #[tauri::command(rename_all = "snake_case")]
@ -304,32 +361,6 @@ async fn disconnections_subscribe(app: tauri::AppHandle) -> Result<(), ()> {
Ok(()) Ok(())
} }
#[tauri::command(rename_all = "snake_case")]
async fn doc_get_file_from_store_with_object_ref(
nuri: &str,
obj_ref: ObjectRef,
) -> Result<ObjectContent, String> {
log_debug!(
"doc_get_file_from_store_with_object_ref {} {:?}",
nuri,
obj_ref
);
// let ret = ObjectContent::File(File::V0(FileV0 {
// content_type: "text/plain".to_string(),
// metadata: vec![],
// content: vec![45; 20],
// }));
// Ok(ret)
let obj_content = BROKER
.write()
.await
.get_object_from_store_with_object_ref(nuri.to_string(), obj_ref)
.await
.map_err(|e| e.to_string())?;
Ok(obj_content)
}
#[tauri::command(rename_all = "snake_case")] #[tauri::command(rename_all = "snake_case")]
async fn session_stop(user_id: UserId) -> Result<(), String> { async fn session_stop(user_id: UserId) -> Result<(), String> {
nextgraph::local_broker::session_stop(&user_id) nextgraph::local_broker::session_stop(&user_id)
@ -438,9 +469,6 @@ impl AppBuilder {
.plugin(tauri_plugin_window::init()) .plugin(tauri_plugin_window::init())
.invoke_handler(tauri::generate_handler![ .invoke_handler(tauri::generate_handler![
test, test,
doc_sync_branch,
cancel_doc_sync_branch,
doc_get_file_from_store_with_object_ref,
wallet_gen_shuffle_for_pazzle_opening, wallet_gen_shuffle_for_pazzle_opening,
wallet_gen_shuffle_for_pin, wallet_gen_shuffle_for_pin,
wallet_open_with_pazzle, wallet_open_with_pazzle,
@ -461,6 +489,11 @@ impl AppBuilder {
user_connect, user_connect,
user_disconnect, user_disconnect,
client_info_rust, client_info_rust,
doc_fetch_private_subscribe,
cancel_stream,
app_request_stream,
app_request,
upload_chunk,
]) ])
.run(tauri::generate_context!()) .run(tauri::generate_context!())
.expect("error while running tauri application"); .expect("error while running tauri application");

@ -13,7 +13,6 @@ import {version} from '../package.json';
const mapping = { const mapping = {
"doc_get_file_from_store_with_object_ref": [ "nuri","obj_ref" ],
"wallet_gen_shuffle_for_pazzle_opening": ["pazzle_length"], "wallet_gen_shuffle_for_pazzle_opening": ["pazzle_length"],
"wallet_gen_shuffle_for_pin": [], "wallet_gen_shuffle_for_pin": [],
"wallet_open_with_pazzle": ["wallet","pazzle","pin"], "wallet_open_with_pazzle": ["wallet","pazzle","pin"],
@ -32,7 +31,9 @@ const mapping = {
"decode_invitation": ["invite"], "decode_invitation": ["invite"],
"user_connect": ["info","user_id","location"], "user_connect": ["info","user_id","location"],
"user_disconnect": ["user_id"], "user_disconnect": ["user_id"],
"test": [ ] "app_request": ["session_id","request"],
"test": [ ],
"doc_fetch_private_subscribe": []
} }
@ -113,29 +114,25 @@ const handler = {
e[1].since = new Date(e[1].since); e[1].since = new Date(e[1].since);
} }
return ret; return ret;
}else if (path[0] === "doc_sync_branch") { }
else if (path[0] === "app_request_stream") {
let stream_id = (lastStreamId += 1).toString(); let stream_id = (lastStreamId += 1).toString();
console.log("stream_id",stream_id); console.log("stream_id",stream_id);
let { getCurrent } = await import("@tauri-apps/plugin-window"); let { getCurrent } = await import("@tauri-apps/plugin-window");
let nuri = args[0]; let session_id = args[0];
let callback = args[1]; let request = args[1];
let callback = args[2];
let unlisten = await getCurrent().listen(stream_id, (event) => { let unlisten = await getCurrent().listen(stream_id, (event) => {
callback(event.payload).then(()=> {}) callback(event.payload).then(()=> {})
}) })
await tauri.invoke("doc_sync_branch",{nuri, stream_id}); await tauri.invoke("app_request_stream",{session_id, stream_id, request});
return () => { return () => {
unlisten(); unlisten();
tauri.invoke("cancel_doc_sync_branch", {stream_id}); tauri.invoke("cancel_stream", {stream_id});
} }
} else if (path[0] === "doc_get_file_from_store_with_object_ref") {
let arg = {};
args.map((el,ix) => arg[mapping[path[0]][ix]]=el)
let res = await tauri.invoke(path[0],arg);
res['File'].V0.content = Uint8Array.from(res['File'].V0.content);
res['File'].V0.metadata = Uint8Array.from(res['File'].V0.metadata);
return res;
} else if (path[0] === "get_wallets") { } else if (path[0] === "get_wallets") {
let res = await tauri.invoke(path[0],{}); let res = await tauri.invoke(path[0],{});
if (res) for (let e of Object.entries(res)) { if (res) for (let e of Object.entries(res)) {
@ -143,6 +140,13 @@ const handler = {
} }
return res || {}; return res || {};
} else if (path[0] === "upload_chunk") {
let session_id = args[0];
let upload_id = args[1];
let chunk = args[2];
let nuri = args[3];
chunk = Array.from(new Uint8Array(chunk));
return await tauri.invoke(path[0],{session_id, upload_id, chunk, nuri})
} else if (path[0] === "wallet_create") { } else if (path[0] === "wallet_create") {
let params = args[0]; let params = args[0];
params.result_with_wallet_file = false; params.result_with_wallet_file = false;

@ -12,6 +12,7 @@
<script lang="ts"> <script lang="ts">
import { online } from "../store"; import { online } from "../store";
import FullLayout from "./FullLayout.svelte"; import FullLayout from "./FullLayout.svelte";
import Test from "./Test.svelte";
import { PaperAirplane, Bell, ArrowRightOnRectangle } from "svelte-heros-v2"; import { PaperAirplane, Bell, ArrowRightOnRectangle } from "svelte-heros-v2";
// @ts-ignore // @ts-ignore
import Logo from "../assets/nextgraph.svg?component"; import Logo from "../assets/nextgraph.svg?component";
@ -76,5 +77,7 @@
</nav> </nav>
{/if} {/if}
<div /> <div />
<Test />
</FullLayout> </FullLayout>
<svelte:window bind:innerWidth={width} /> <svelte:window bind:innerWidth={width} />

@ -11,75 +11,188 @@
<script lang="ts"> <script lang="ts">
import ng from "../api"; import ng from "../api";
import branch_commits from "../store"; import { branch_subs, active_session } from "../store";
let name = ""; import { link } from "svelte-spa-router";
let greetMsg = "";
let commits = branch_commits("ok", false); let files = branch_subs("ok");
let img_map = {}; let img_map = {};
async function get_img(ref) { async function get_img(ref) {
if (!ref) return false; if (!ref) return false;
let cache = img_map[ref]; let cache = img_map[ref.nuri];
if (cache) { if (cache) {
return cache; return cache;
} }
let prom = new Promise(async (resolve) => {
try { try {
//console.log(JSON.stringify(ref)); let nuri = {
let file = await ng.doc_get_file_from_store_with_object_ref("ng:", ref); target: "PrivateStore",
//console.log(file); entire_store: false,
var blob = new Blob([file["File"].V0.content], { access: [{ Key: ref.reference.key }],
type: file["File"].V0.content_type, locator: [],
object: ref.reference.id,
};
let file_request = {
V0: {
command: "FileGet",
nuri,
},
};
let final_blob;
let content_type;
let unsub = await ng.app_request_stream(
$active_session.session_id,
file_request,
async (blob) => {
//console.log("GOT APP RESPONSE", blob);
if (blob.V0.FileMeta) {
content_type = blob.V0.FileMeta.content_type;
final_blob = new Blob([], { type: content_type });
} else if (blob.V0.FileBinary) {
if (blob.V0.FileBinary.byteLength > 0) {
final_blob = new Blob([final_blob, blob.V0.FileBinary], {
type: content_type,
}); });
var imageUrl = URL.createObjectURL(blob); } else {
img_map[ref] = imageUrl; var imageUrl = URL.createObjectURL(final_blob);
return imageUrl;
resolve(imageUrl);
}
}
}
);
} catch (e) { } catch (e) {
console.error(e); console.error(e);
return false; resolve(false);
} }
});
img_map[ref.nuri] = prom;
return prom;
} }
async function greet() { let fileinput;
//greetMsg = await ng.create_wallet(name);
// cancel = await ng.doc_sync_branch("ok", async (commit) => { function uploadFile(upload_id, nuri, file, success) {
// console.log(commit); let chunkSize = 1024 * 1024;
// try { let fileSize = file.size;
// let file = await ng.doc_get_file_from_store_with_object_ref( let offset = 0;
// "ng:", let readBlock = null;
// commit.V0.content.refs[0]
// ); let onLoadHandler = async function (event) {
// console.log(file); let result = event.target.result;
// var blob = new Blob([file["File"].V0.content], {
// type: file["File"].V0.content_type, if (event.target.error == null) {
// }); offset += event.target.result.byteLength;
// var imageUrl = URL.createObjectURL(blob); //console.log("chunk", event.target.result);
// url = imageUrl;
// } catch (e) { let res = await ng.upload_chunk(
// console.error(e); $active_session.session_id,
upload_id,
event.target.result,
nuri
);
//console.log("chunk upload res", res);
// if (onChunkRead) {
// onChunkRead(result);
// }
} else {
// if (onChunkError) {
// onChunkError(event.target.error);
// } // }
// }); return;
//cancel();
} }
let fileinput; if (offset >= fileSize) {
//console.log("file uploaded");
let res = await ng.upload_chunk(
$active_session.session_id,
upload_id,
[],
nuri
);
//console.log("end upload res", res);
if (success) {
success(res);
}
return;
}
readBlock(offset, chunkSize, file);
};
readBlock = function (offset, length, file) {
let fileReader = new FileReader();
let blob = file.slice(offset, length + offset);
fileReader.onload = onLoadHandler;
fileReader.readAsArrayBuffer(blob);
};
const onFileSelected = (e) => { readBlock(offset, chunkSize, file);
return;
}
const onFileSelected = async (e) => {
let image = e.target.files[0]; let image = e.target.files[0];
let reader = new FileReader(); if (!image) return;
reader.readAsArrayBuffer(image); //console.log(image);
reader.onload = (e) => {
console.log(e.target.result); let nuri = {
target: "PrivateStore",
entire_store: false,
access: [],
locator: [],
}; };
let start_request = {
V0: {
command: "FilePut",
nuri,
payload: {
V0: {
RandomAccessFilePut: image.type,
},
},
},
};
let start_res = await ng.app_request(
$active_session.session_id,
start_request
);
let upload_id = start_res.V0.FileUploading;
uploadFile(upload_id, nuri, image, async (reference) => {
if (reference) {
let request = {
V0: {
command: "FilePut",
nuri,
payload: {
V0: {
AddFile: {
filename: image.name,
object: reference.V0.FileUploaded,
},
},
},
},
};
await ng.app_request($active_session.session_id, request);
}
});
fileinput.value = "";
}; };
</script> </script>
<div> <div>
<!-- <div class="row">
<input id="greet-input" placeholder="Enter a name..." bind:value={name} />
<button on:click={greet}> Greet </button>
</div> -->
<div class="row mt-2"> <div class="row mt-2">
<!-- <a use:link href="/">
<button tabindex="-1" class=" mr-5 select-none"> Back home </button>
</a> -->
<button <button
type="button" type="button"
on:click={() => { on:click={() => {
@ -112,13 +225,15 @@
bind:this={fileinput} bind:this={fileinput}
/> />
</div> </div>
<!-- <p>{greetMsg}</p> -->
{#await commits.load()} {#await files.load()}
<p>Currently loading...</p> <p>Currently loading...</p>
{:then} {:then}
{#each $commits as commit} {#each $files as file}
<p> <p>
{#await get_img(commit.V0.header.V0.files[0]) then url} {file.V0.File.name}<br />
did:ng{file.V0.File.nuri}
{#await get_img(file.V0.File) then url}
{#if url} {#if url}
<img src={url} /> <img src={url} />
{/if} {/if}

@ -12,11 +12,27 @@
<script lang="ts"> <script lang="ts">
import Test from "../lib/Test.svelte"; import Test from "../lib/Test.svelte";
export let params = {}; export let params = {};
import { active_session } from "../store";
import { onMount, tick } from "svelte";
import { link, push } from "svelte-spa-router";
let top;
async function scrollToTop() {
await tick();
top.scrollIntoView();
}
onMount(async () => {
if (!$active_session) {
push("#/");
} else {
await scrollToTop();
}
});
</script> </script>
<main class="container3"> <main class="container3">
<h1>Welcome to test</h1> <div class="row" bind:this={top}>
<div class="row">
<Test /> <Test />
</div> </div>
</main> </main>

@ -127,7 +127,7 @@ can_connect.subscribe(async (value) => {
} }
}); });
const branch_commits = (nura, sub) => { export const branch_subs = function(nura) {
// console.log("branch_commits") // console.log("branch_commits")
// const { subscribe, set, update } = writable([]); // create the underlying writable store // const { subscribe, set, update } = writable([]); // create the underlying writable store
@ -162,8 +162,10 @@ const branch_commits = (nura, sub) => {
let already_subscribed = all_branches[nura]; let already_subscribed = all_branches[nura];
if (!already_subscribed) return; if (!already_subscribed) return;
if (already_subscribed.load) { if (already_subscribed.load) {
await already_subscribed.load(); let loader = already_subscribed.load;
already_subscribed.load = undefined; already_subscribed.load = undefined;
await loader();
} }
}, },
subscribe: (run, invalid) => { subscribe: (run, invalid) => {
@ -175,10 +177,22 @@ const branch_commits = (nura, sub) => {
let unsub = () => {}; let unsub = () => {};
already_subscribed = { already_subscribed = {
load: async () => { load: async () => {
unsub = await ng.doc_sync_branch(nura, async (commit) => { try {
console.log("GOT COMMIT", commit); let session = get(active_session);
if (!session) {
console.error("no session");
return;
}
await unsub();
unsub = await ng.app_request_stream(session.session_id, await ng.doc_fetch_private_subscribe(),
async (commit) => {
//console.log("GOT APP RESPONSE", commit);
update( (old) => {old.unshift(commit); return old;} ) update( (old) => {old.unshift(commit); return old;} )
}); });
}
catch (e) {
console.error(e);
}
// this is in case decrease has been called before the load function returned. // this is in case decrease has been called before the load function returned.
if (count == 0) {unsub();} if (count == 0) {unsub();}
}, },
@ -186,10 +200,10 @@ const branch_commits = (nura, sub) => {
count += 1; count += 1;
return readonly({subscribe}); return readonly({subscribe});
}, },
decrease: () => { decrease: async () => {
count -= 1; count -= 1;
if (count == 0) { if (count == 0) {
unsub(); await unsub();
delete all_branches[nura]; delete all_branches[nura];
} }
}, },
@ -199,13 +213,13 @@ const branch_commits = (nura, sub) => {
let new_store = already_subscribed.increase(); let new_store = already_subscribed.increase();
let read_unsub = new_store.subscribe(run, invalid); let read_unsub = new_store.subscribe(run, invalid);
return () => { return async () => {
read_unsub(); read_unsub();
already_subscribed.decrease(); await already_subscribed.decrease();
} }
} }
} }
}; };
export default branch_commits; //export default branch_commits;

@ -455,7 +455,7 @@ impl RocksDbServerStorage {
) -> Result<TopicSubRes, ServerError> { ) -> Result<TopicSubRes, ServerError> {
let overlay = self.check_overlay(overlay)?; let overlay = self.check_overlay(overlay)?;
// now we check that the repo was previously pinned. // now we check that the repo was previously pinned.
// if it was opened but not pinned, then this should be deal with in the ServerBroker, in memory, not here) // if it was opened but not pinned, then this should be dealt with in the ServerBroker, in memory, not here)
let is_publisher = publisher.is_some(); let is_publisher = publisher.is_some();
// (we already checked that the advert is valid) // (we already checked that the advert is valid)
@ -502,7 +502,45 @@ impl RocksDbServerStorage {
Ok(blocks) Ok(blocks)
} }
fn add_block( pub(crate) fn has_block(
&self,
overlay: &OverlayId,
block_id: &BlockId,
) -> Result<(), ServerError> {
let overlay = self.check_overlay(overlay)?;
let overlay = &overlay;
Ok(self.block_storage.read().unwrap().has(overlay, block_id)?)
}
pub(crate) fn get_block(
&self,
overlay: &OverlayId,
block_id: &BlockId,
) -> Result<Block, ServerError> {
let overlay = self.check_overlay(overlay)?;
let overlay = &overlay;
Ok(self.block_storage.read().unwrap().get(overlay, block_id)?)
}
pub(crate) fn add_block(
&self,
overlay: &OverlayId,
block: Block,
) -> Result<BlockId, ServerError> {
if overlay.is_outer() {
// we don't publish events on the outer overlay!
return Err(ServerError::OverlayMismatch);
}
let overlay = self.check_overlay(overlay)?;
let overlay = &overlay;
let mut overlay_storage = OverlayStorage::new(overlay, &self.core_storage);
Ok(self.add_block_(overlay, &mut overlay_storage, block)?)
}
fn add_block_(
&self, &self,
overlay_id: &OverlayId, overlay_id: &OverlayId,
overlay_storage: &mut OverlayStorage, overlay_storage: &mut OverlayStorage,
@ -522,7 +560,7 @@ impl RocksDbServerStorage {
overlay: &OverlayId, overlay: &OverlayId,
mut event: Event, mut event: Event,
user_id: &UserId, user_id: &UserId,
) -> Result<(), ServerError> { ) -> Result<TopicId, ServerError> {
if overlay.is_outer() { if overlay.is_outer() {
// we don't publish events on the outer overlay! // we don't publish events on the outer overlay!
return Err(ServerError::OverlayMismatch); return Err(ServerError::OverlayMismatch);
@ -532,9 +570,10 @@ impl RocksDbServerStorage {
// TODO: check that the sequence number is correct // TODO: check that the sequence number is correct
let topic = *event.topic_id();
// check that the topic exists and that this user has pinned it as publisher // check that the topic exists and that this user has pinned it as publisher
let mut topic_storage = TopicStorage::open(event.topic_id(), overlay, &self.core_storage) let mut topic_storage =
.map_err(|e| match e { TopicStorage::open(&topic, overlay, &self.core_storage).map_err(|e| match e {
StorageError::NotFound => ServerError::TopicNotFound, StorageError::NotFound => ServerError::TopicNotFound,
_ => e.into(), _ => e.into(),
})?; })?;
@ -557,7 +596,7 @@ impl RocksDbServerStorage {
let temp_mini_block_storage = HashMapBlockStorage::new(); let temp_mini_block_storage = HashMapBlockStorage::new();
for block in v0.content.blocks { for block in v0.content.blocks {
let _ = temp_mini_block_storage.put(overlay, &block, false)?; let _ = temp_mini_block_storage.put(overlay, &block, false)?;
extracted_blocks_ids.push(self.add_block( extracted_blocks_ids.push(self.add_block_(
overlay, overlay,
&mut overlay_storage, &mut overlay_storage,
block, block,
@ -604,7 +643,7 @@ impl RocksDbServerStorage {
} }
} }
Ok(()) Ok(topic)
} }
pub(crate) fn topic_sync_req( pub(crate) fn topic_sync_req(

@ -21,6 +21,8 @@ use ng_repo::{
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ng_repo::log::*;
use crate::rocksdb_server_storage::RocksDbServerStorage; use crate::rocksdb_server_storage::RocksDbServerStorage;
pub struct TopicInfo { pub struct TopicInfo {
@ -113,6 +115,8 @@ pub struct ServerBroker {
overlays: HashMap<OverlayId, OverlayInfo>, overlays: HashMap<OverlayId, OverlayInfo>,
inner_overlays: HashMap<OverlayId, Option<OverlayId>>, inner_overlays: HashMap<OverlayId, Option<OverlayId>>,
local_subscriptions: HashMap<(OverlayId, TopicId), HashSet<PubKey>>,
} }
impl ServerBroker { impl ServerBroker {
@ -121,22 +125,77 @@ impl ServerBroker {
storage: storage, storage: storage,
overlays: HashMap::new(), overlays: HashMap::new(),
inner_overlays: HashMap::new(), inner_overlays: HashMap::new(),
local_subscriptions: HashMap::new(),
} }
} }
pub fn load(&mut self) -> Result<(), NgError> { pub fn load(&mut self) -> Result<(), NgError> {
Ok(()) Ok(())
} }
fn add_subscription(
&mut self,
overlay: OverlayId,
topic: TopicId,
peer: PubKey,
) -> Result<(), ServerError> {
let peers_set = self
.local_subscriptions
.entry((overlay, topic))
.or_insert(HashSet::with_capacity(1));
log_debug!(
"SUBSCRIBING PEER {} TOPIC {} OVERLAY {}",
peer,
topic,
overlay
);
if !peers_set.insert(peer) {
//return Err(ServerError::PeerAlreadySubscribed);
}
Ok(())
}
fn remove_subscription(
&mut self,
overlay: &OverlayId,
topic: &TopicId,
peer: &PubKey,
) -> Result<(), ServerError> {
let peers_set = self
.local_subscriptions
.get_mut(&(*overlay, *topic))
.ok_or(ServerError::SubscriptionNotFound)?;
if !peers_set.remove(peer) {
return Err(ServerError::SubscriptionNotFound);
}
Ok(())
}
} }
//TODO: the purpose of this trait is to have a level of indirection so we can keep some data in memory (cache) and avoid hitting the storage backend (rocksdb) at every call. //TODO: the purpose of this trait is to have a level of indirection so we can keep some data in memory (cache) and avoid hitting the storage backend (rocksdb) at every call.
//for now this cache is not implemented, but the structs are ready (see above), and it would just require to change slightly the implementation of the trait functions here below. //for now this cache is not implemented, but the structs are ready (see above), and it would just require to change slightly the implementation of the trait functions here below.
impl IServerBroker for ServerBroker { impl IServerBroker for ServerBroker {
fn has_block(&self, overlay_id: &OverlayId, block_id: &BlockId) -> Result<(), ServerError> {
self.storage.has_block(overlay_id, block_id)
}
fn get_block(&self, overlay_id: &OverlayId, block_id: &BlockId) -> Result<Block, ServerError> {
self.storage.get_block(overlay_id, block_id)
}
fn next_seq_for_peer(&self, peer: &PeerId, seq: u64) -> Result<(), ServerError> { fn next_seq_for_peer(&self, peer: &PeerId, seq: u64) -> Result<(), ServerError> {
self.storage.next_seq_for_peer(peer, seq) self.storage.next_seq_for_peer(peer, seq)
} }
fn put_block(&self, overlay_id: &OverlayId, block: Block) -> Result<(), ServerError> {
self.storage.add_block(overlay_id, block)?;
Ok(())
}
fn get_user(&self, user_id: PubKey) -> Result<bool, ProtocolError> { fn get_user(&self, user_id: PubKey) -> Result<bool, ProtocolError> {
self.storage.get_user(user_id) self.storage.get_user(user_id)
} }
@ -181,7 +240,7 @@ impl IServerBroker for ServerBroker {
} }
fn pin_repo_write( fn pin_repo_write(
&self, &mut self,
overlay: &OverlayAccess, overlay: &OverlayAccess,
repo: &RepoHash, repo: &RepoHash,
user_id: &UserId, user_id: &UserId,
@ -189,8 +248,9 @@ impl IServerBroker for ServerBroker {
rw_topics: &Vec<PublisherAdvert>, rw_topics: &Vec<PublisherAdvert>,
overlay_root_topic: &Option<TopicId>, overlay_root_topic: &Option<TopicId>,
expose_outer: bool, expose_outer: bool,
peer: &PubKey,
) -> Result<RepoOpened, ServerError> { ) -> Result<RepoOpened, ServerError> {
self.storage.pin_repo_write( let res = self.storage.pin_repo_write(
overlay, overlay,
repo, repo,
user_id, user_id,
@ -198,30 +258,50 @@ impl IServerBroker for ServerBroker {
rw_topics, rw_topics,
overlay_root_topic, overlay_root_topic,
expose_outer, expose_outer,
) )?;
for topic in res.iter() {
self.add_subscription(
*overlay.overlay_id_for_client_protocol_purpose(),
*topic.topic_id(),
*peer,
)?;
}
Ok(res)
} }
fn pin_repo_read( fn pin_repo_read(
&self, &mut self,
overlay: &OverlayId, overlay: &OverlayId,
repo: &RepoHash, repo: &RepoHash,
user_id: &UserId, user_id: &UserId,
ro_topics: &Vec<TopicId>, ro_topics: &Vec<TopicId>,
peer: &PubKey,
) -> Result<RepoOpened, ServerError> { ) -> Result<RepoOpened, ServerError> {
self.storage let res = self
.pin_repo_read(overlay, repo, user_id, ro_topics) .storage
.pin_repo_read(overlay, repo, user_id, ro_topics)?;
for topic in res.iter() {
// TODO: those outer subscriptions are not handled yet. they will not emit events.
self.add_subscription(*overlay, *topic.topic_id(), *peer)?;
}
Ok(res)
} }
fn topic_sub( fn topic_sub(
&self, &mut self,
overlay: &OverlayId, overlay: &OverlayId,
repo: &RepoHash, repo: &RepoHash,
topic: &TopicId, topic: &TopicId,
user: &UserId, user: &UserId,
publisher: Option<&PublisherAdvert>, publisher: Option<&PublisherAdvert>,
peer: &PubKey,
) -> Result<TopicSubRes, ServerError> { ) -> Result<TopicSubRes, ServerError> {
self.storage let res = self
.topic_sub(overlay, repo, topic, user, publisher) .storage
.topic_sub(overlay, repo, topic, user, publisher)?;
self.add_subscription(*overlay, *topic, *peer)?;
Ok(res)
} }
fn get_commit(&self, overlay: &OverlayId, id: &ObjectId) -> Result<Vec<Block>, ServerError> { fn get_commit(&self, overlay: &OverlayId, id: &ObjectId) -> Result<Vec<Block>, ServerError> {
@ -233,8 +313,25 @@ impl IServerBroker for ServerBroker {
overlay: &OverlayId, overlay: &OverlayId,
event: Event, event: Event,
user_id: &UserId, user_id: &UserId,
) -> Result<(), ServerError> { remote_peer: &PubKey,
self.storage.save_event(overlay, event, user_id) ) -> Result<HashSet<&PubKey>, ServerError> {
let topic = self.storage.save_event(overlay, event, user_id)?;
log_debug!(
"DISPATCH EVENt {} {} {:?}",
overlay,
topic,
self.local_subscriptions
);
let mut set = self
.local_subscriptions
.get(&(*overlay, topic))
.map(|set| set.iter().collect())
.unwrap_or(HashSet::new());
set.remove(remote_peer);
Ok(set)
} }
fn topic_sync_req( fn topic_sync_req(

@ -36,6 +36,7 @@ either = "1.8.1"
url = "2.4.0" url = "2.4.0"
base64-url = "2.0.0" base64-url = "2.0.0"
web-time = "0.2.0" web-time = "0.2.0"
async-recursion = "1.1.1"
[target.'cfg(target_arch = "wasm32")'.dependencies] [target.'cfg(target_arch = "wasm32")'.dependencies]
reqwest = { version = "0.11.18", features = ["json","native-tls-vendored"] } reqwest = { version = "0.11.18", features = ["json","native-tls-vendored"] }

@ -0,0 +1,102 @@
/*
* Copyright (c) 2022-2024 Niko Bonnieure, Par le Peuple, NextGraph.org developers
* All rights reserved.
* Licensed under the Apache License, Version 2.0
* <LICENSE-APACHE2 or http://www.apache.org/licenses/LICENSE-2.0>
* or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
* at your option. All files in the project carrying such
* notice may not be copied, modified, or distributed except
* according to those terms.
*/
use crate::broker::{ServerConfig, BROKER};
use crate::connection::NoiseFSM;
use crate::types::*;
use crate::{actor::*, types::ProtocolMessage};
use async_std::sync::Mutex;
use ng_repo::errors::*;
use ng_repo::log::*;
use ng_repo::types::PubKey;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
impl BlocksExist {
pub fn get_actor(&self, id: i64) -> Box<dyn EActor> {
Actor::<BlocksExist, BlocksFound>::new_responder(id)
}
}
impl TryFrom<ProtocolMessage> for BlocksExist {
type Error = ProtocolError;
fn try_from(msg: ProtocolMessage) -> Result<Self, Self::Error> {
let req: ClientRequestContentV0 = msg.try_into()?;
if let ClientRequestContentV0::BlocksExist(a) = req {
Ok(a)
} else {
log_debug!("INVALID {:?}", req);
Err(ProtocolError::InvalidValue)
}
}
}
impl From<BlocksExist> for ProtocolMessage {
fn from(msg: BlocksExist) -> ProtocolMessage {
let overlay = *msg.overlay();
ProtocolMessage::from_client_request_v0(ClientRequestContentV0::BlocksExist(msg), overlay)
}
}
impl TryFrom<ProtocolMessage> for BlocksFound {
type Error = ProtocolError;
fn try_from(msg: ProtocolMessage) -> Result<Self, Self::Error> {
let res: ClientResponseContentV0 = msg.try_into()?;
if let ClientResponseContentV0::BlocksFound(a) = res {
Ok(a)
} else {
log_debug!("INVALID {:?}", res);
Err(ProtocolError::InvalidValue)
}
}
}
impl From<BlocksFound> for ProtocolMessage {
fn from(b: BlocksFound) -> ProtocolMessage {
ClientResponseContentV0::BlocksFound(b).into()
}
}
impl Actor<'_, BlocksExist, BlocksFound> {}
#[async_trait::async_trait]
impl EActor for Actor<'_, BlocksExist, BlocksFound> {
async fn respond(
&mut self,
msg: ProtocolMessage,
fsm: Arc<Mutex<NoiseFSM>>,
) -> Result<(), ProtocolError> {
let req = BlocksExist::try_from(msg)?;
let broker = BROKER.read().await;
let overlay = req.overlay().clone();
let mut found = vec![];
let mut missing = vec![];
match req {
BlocksExist::V0(v0) => {
for block_id in v0.blocks {
let r = broker.get_server_broker()?.has_block(&overlay, &block_id);
if r.is_err() {
missing.push(block_id);
} else {
found.push(block_id);
}
}
}
}
let res = Ok(BlocksFound::V0(BlocksFoundV0 { found, missing }));
fsm.lock()
.await
.send_in_reply_to(res.into(), self.id())
.await?;
Ok(())
}
}

@ -0,0 +1,129 @@
/*
* Copyright (c) 2022-2024 Niko Bonnieure, Par le Peuple, NextGraph.org developers
* All rights reserved.
* Licensed under the Apache License, Version 2.0
* <LICENSE-APACHE2 or http://www.apache.org/licenses/LICENSE-2.0>
* or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
* at your option. All files in the project carrying such
* notice may not be copied, modified, or distributed except
* according to those terms.
*/
use crate::broker::{ServerConfig, BROKER};
use crate::connection::NoiseFSM;
use crate::server_broker::IServerBroker;
use crate::types::*;
use crate::{actor::*, types::ProtocolMessage};
use async_recursion::async_recursion;
use async_std::sync::{Mutex, MutexGuard};
use ng_repo::errors::*;
use ng_repo::log::*;
use ng_repo::types::{Block, BlockId, OverlayId, PubKey};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
impl BlocksGet {
pub fn get_actor(&self, id: i64) -> Box<dyn EActor> {
Actor::<BlocksGet, Block>::new_responder(id)
}
pub fn overlay(&self) -> &OverlayId {
match self {
Self::V0(v0) => v0.overlay.as_ref().unwrap(),
}
}
pub fn set_overlay(&mut self, overlay: OverlayId) {
match self {
Self::V0(v0) => v0.overlay = Some(overlay),
}
}
}
impl TryFrom<ProtocolMessage> for BlocksGet {
type Error = ProtocolError;
fn try_from(msg: ProtocolMessage) -> Result<Self, Self::Error> {
let req: ClientRequestContentV0 = msg.try_into()?;
if let ClientRequestContentV0::BlocksGet(a) = req {
Ok(a)
} else {
log_debug!("INVALID {:?}", req);
Err(ProtocolError::InvalidValue)
}
}
}
impl From<BlocksGet> for ProtocolMessage {
fn from(msg: BlocksGet) -> ProtocolMessage {
let overlay = *msg.overlay();
ProtocolMessage::from_client_request_v0(ClientRequestContentV0::BlocksGet(msg), overlay)
}
}
impl Actor<'_, BlocksGet, Block> {}
#[async_trait::async_trait]
impl EActor for Actor<'_, BlocksGet, Block> {
async fn respond(
&mut self,
msg: ProtocolMessage,
fsm: Arc<Mutex<NoiseFSM>>,
) -> Result<(), ProtocolError> {
let req = BlocksGet::try_from(msg)?;
let broker = BROKER.read().await;
let server = broker.get_server_broker()?;
let mut lock = fsm.lock().await;
let mut something_was_sent = false;
#[async_recursion]
async fn process_children(
children: &Vec<BlockId>,
server: &Box<dyn IServerBroker + Send + Sync>,
overlay: &OverlayId,
lock: &mut MutexGuard<'_, NoiseFSM>,
req_id: i64,
include_children: bool,
something_was_sent: &mut bool,
) {
for block_id in children {
if let Ok(block) = server.get_block(overlay, block_id) {
let grand_children = block.children().to_vec();
if let Err(_) = lock.send_in_reply_to(block.into(), req_id).await {
break;
}
*something_was_sent = true;
if include_children {
process_children(
&grand_children,
server,
overlay,
lock,
req_id,
include_children,
something_was_sent,
)
.await;
}
}
}
}
process_children(
req.ids(),
server,
req.overlay(),
&mut lock,
self.id(),
req.include_children(),
&mut something_was_sent,
)
.await;
if !something_was_sent {
let re: Result<(), ServerError> = Err(ServerError::NotFound);
lock.send_in_reply_to(re.into(), self.id()).await?;
} else {
let re: Result<(), ServerError> = Err(ServerError::EndOfStream);
lock.send_in_reply_to(re.into(), self.id()).await?;
}
Ok(())
}
}

@ -0,0 +1,79 @@
/*
* Copyright (c) 2022-2024 Niko Bonnieure, Par le Peuple, NextGraph.org developers
* All rights reserved.
* Licensed under the Apache License, Version 2.0
* <LICENSE-APACHE2 or http://www.apache.org/licenses/LICENSE-2.0>
* or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
* at your option. All files in the project carrying such
* notice may not be copied, modified, or distributed except
* according to those terms.
*/
use crate::broker::{ServerConfig, BROKER};
use crate::connection::NoiseFSM;
use crate::types::*;
use crate::{actor::*, types::ProtocolMessage};
use async_std::sync::Mutex;
use ng_repo::errors::*;
use ng_repo::log::*;
use ng_repo::types::PubKey;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
impl BlocksPut {
pub fn get_actor(&self, id: i64) -> Box<dyn EActor> {
Actor::<BlocksPut, ()>::new_responder(id)
}
}
impl TryFrom<ProtocolMessage> for BlocksPut {
type Error = ProtocolError;
fn try_from(msg: ProtocolMessage) -> Result<Self, Self::Error> {
let req: ClientRequestContentV0 = msg.try_into()?;
if let ClientRequestContentV0::BlocksPut(a) = req {
Ok(a)
} else {
log_debug!("INVALID {:?}", req);
Err(ProtocolError::InvalidValue)
}
}
}
impl From<BlocksPut> for ProtocolMessage {
fn from(msg: BlocksPut) -> ProtocolMessage {
let overlay = *msg.overlay();
ProtocolMessage::from_client_request_v0(ClientRequestContentV0::BlocksPut(msg), overlay)
}
}
impl Actor<'_, BlocksPut, ()> {}
#[async_trait::async_trait]
impl EActor for Actor<'_, BlocksPut, ()> {
async fn respond(
&mut self,
msg: ProtocolMessage,
fsm: Arc<Mutex<NoiseFSM>>,
) -> Result<(), ProtocolError> {
let req = BlocksPut::try_from(msg)?;
let broker = BROKER.read().await;
let mut res: Result<(), ServerError> = Ok(());
let overlay = req.overlay().clone();
match req {
BlocksPut::V0(v0) => {
for block in v0.blocks {
let r = broker.get_server_broker()?.put_block(&overlay, block);
if r.is_err() {
res = r;
break;
}
}
}
}
fsm.lock()
.await
.send_in_reply_to(res.into(), self.id())
.await?;
Ok(())
}
}

@ -73,6 +73,8 @@ impl EActor for Actor<'_, PublishEvent, ()> {
msg: ProtocolMessage, msg: ProtocolMessage,
fsm: Arc<Mutex<NoiseFSM>>, fsm: Arc<Mutex<NoiseFSM>>,
) -> Result<(), ProtocolError> { ) -> Result<(), ProtocolError> {
#[cfg(not(target_arch = "wasm32"))]
{
let req = PublishEvent::try_from(msg)?; let req = PublishEvent::try_from(msg)?;
// send a ProtocolError if invalid signatures (will disconnect the client) // send a ProtocolError if invalid signatures (will disconnect the client)
@ -80,16 +82,22 @@ impl EActor for Actor<'_, PublishEvent, ()> {
let broker = BROKER.read().await; let broker = BROKER.read().await;
let overlay = req.overlay().clone(); let overlay = req.overlay().clone();
let res = broker.get_server_broker()?.dispatch_event( let (user_id, remote_peer) = {
&overlay, let fsm = fsm.lock().await;
req.take_event(), (
&fsm.lock().await.user_id()?, fsm.user_id()?,
); fsm.remote_peer().ok_or(ProtocolError::ActorError)?,
)
};
let res = broker
.dispatch_event(&overlay, req.take_event(), &user_id, &remote_peer)
.await;
fsm.lock() fsm.lock()
.await .await
.send_in_reply_to(res.into(), self.id()) .send_in_reply_to(res.into(), self.id())
.await?; .await?;
}
Ok(()) Ok(())
} }
} }

@ -9,3 +9,9 @@ pub mod event;
pub mod commit_get; pub mod commit_get;
pub mod topic_sync_req; pub mod topic_sync_req;
pub mod blocks_put;
pub mod blocks_exist;
pub mod blocks_get;

@ -106,7 +106,7 @@ impl EActor for Actor<'_, PinRepo, RepoOpened> {
) -> Result<(), ProtocolError> { ) -> Result<(), ProtocolError> {
let req = PinRepo::try_from(msg)?; let req = PinRepo::try_from(msg)?;
let broker = BROKER.read().await; let mut broker = BROKER.write().await;
// check the validity of the PublisherAdvert(s). this will return a ProtocolError (will close the connection) // check the validity of the PublisherAdvert(s). this will return a ProtocolError (will close the connection)
let server_peer_id = broker.get_config().unwrap().peer_id; let server_peer_id = broker.get_config().unwrap().peer_id;
@ -114,6 +114,14 @@ impl EActor for Actor<'_, PinRepo, RepoOpened> {
pub_ad.verify_for_broker(&server_peer_id)?; pub_ad.verify_for_broker(&server_peer_id)?;
} }
let (user_id, remote_peer) = {
let fsm = fsm.lock().await;
(
fsm.user_id()?,
fsm.remote_peer().ok_or(ProtocolError::ActorError)?,
)
};
let result = { let result = {
match req.overlay_access() { match req.overlay_access() {
OverlayAccess::ReadOnly(r) => { OverlayAccess::ReadOnly(r) => {
@ -124,11 +132,12 @@ impl EActor for Actor<'_, PinRepo, RepoOpened> {
{ {
Err(ServerError::InvalidRequest) Err(ServerError::InvalidRequest)
} else { } else {
broker.get_server_broker()?.pin_repo_read( broker.get_server_broker_mut()?.pin_repo_read(
req.overlay(), req.overlay(),
req.hash(), req.hash(),
&fsm.lock().await.user_id()?, &user_id,
req.ro_topics(), req.ro_topics(),
&remote_peer,
) )
} }
} }
@ -142,14 +151,15 @@ impl EActor for Actor<'_, PinRepo, RepoOpened> {
// TODO add a check on "|| overlay_root_topic.is_none()" because it should be mandatory to have one (not sent by client at the moment) // TODO add a check on "|| overlay_root_topic.is_none()" because it should be mandatory to have one (not sent by client at the moment)
Err(ServerError::InvalidRequest) Err(ServerError::InvalidRequest)
} else { } else {
broker.get_server_broker()?.pin_repo_write( broker.get_server_broker_mut()?.pin_repo_write(
req.overlay_access(), req.overlay_access(),
req.hash(), req.hash(),
&fsm.lock().await.user_id()?, &user_id,
req.ro_topics(), req.ro_topics(),
req.rw_topics(), req.rw_topics(),
req.overlay_root_topic(), req.overlay_root_topic(),
req.expose_outer(), req.expose_outer(),
&remote_peer,
) )
} }
} }
@ -157,14 +167,15 @@ impl EActor for Actor<'_, PinRepo, RepoOpened> {
if !w.is_inner() || req.overlay() != w || req.expose_outer() { if !w.is_inner() || req.overlay() != w || req.expose_outer() {
Err(ServerError::InvalidRequest) Err(ServerError::InvalidRequest)
} else { } else {
broker.get_server_broker()?.pin_repo_write( broker.get_server_broker_mut()?.pin_repo_write(
req.overlay_access(), req.overlay_access(),
req.hash(), req.hash(),
&fsm.lock().await.user_id()?, &user_id,
req.ro_topics(), req.ro_topics(),
req.rw_topics(), req.rw_topics(),
req.overlay_root_topic(), req.overlay_root_topic(),
false, false,
&remote_peer,
) )
} }
} }

@ -36,7 +36,7 @@ impl TopicSub {
)), )),
) )
} else { } else {
(repo.store.outer_overlay(), None) (repo.store.inner_overlay(), None)
}; };
TopicSub::V0(TopicSubV0 { TopicSub::V0(TopicSubV0 {
@ -98,7 +98,7 @@ impl EActor for Actor<'_, TopicSub, TopicSubRes> {
) -> Result<(), ProtocolError> { ) -> Result<(), ProtocolError> {
let req = TopicSub::try_from(msg)?; let req = TopicSub::try_from(msg)?;
let broker = BROKER.read().await; let mut broker = BROKER.write().await;
// check the validity of the PublisherAdvert. this will return a ProtocolError (will close the connection) // check the validity of the PublisherAdvert. this will return a ProtocolError (will close the connection)
if let Some(advert) = req.publisher() { if let Some(advert) = req.publisher() {
@ -106,12 +106,21 @@ impl EActor for Actor<'_, TopicSub, TopicSubRes> {
advert.verify_for_broker(&server_peer_id)?; advert.verify_for_broker(&server_peer_id)?;
} }
let res = broker.get_server_broker()?.topic_sub( let (user_id, remote_peer) = {
let fsm = fsm.lock().await;
(
fsm.user_id()?,
fsm.remote_peer().ok_or(ProtocolError::ActorError)?,
)
};
let res = broker.get_server_broker_mut()?.topic_sub(
req.overlay(), req.overlay(),
req.hash(), req.hash(),
req.topic(), req.topic(),
&fsm.lock().await.user_id()?, &user_id,
req.publisher(), req.publisher(),
&remote_peer,
); );
fsm.lock() fsm.lock()

@ -29,7 +29,7 @@ use ng_repo::object::Object;
use ng_repo::types::*; use ng_repo::types::*;
use ng_repo::utils::generate_keypair; use ng_repo::utils::generate_keypair;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use std::collections::HashMap; use std::collections::{HashMap, HashSet};
use std::path::PathBuf; use std::path::PathBuf;
#[derive(Debug)] #[derive(Debug)]
@ -67,7 +67,7 @@ pub struct ServerConfig {
#[async_trait::async_trait] #[async_trait::async_trait]
pub trait ILocalBroker: Send + Sync + EActor { pub trait ILocalBroker: Send + Sync + EActor {
async fn deliver(&mut self, event: Event); async fn deliver(&mut self, event: Event, overlay: OverlayId, user: UserId);
} }
pub static BROKER: Lazy<Arc<RwLock<Broker>>> = Lazy::new(|| Arc::new(RwLock::new(Broker::new()))); pub static BROKER: Lazy<Arc<RwLock<Broker>>> = Lazy::new(|| Arc::new(RwLock::new(Broker::new())));
@ -87,29 +87,15 @@ pub struct Broker<'a> {
closing: bool, closing: bool,
server_broker: Option<Box<dyn IServerBroker + Send + Sync + 'a>>, server_broker: Option<Box<dyn IServerBroker + Send + Sync + 'a>>,
tauri_streams: HashMap<String, Sender<Commit>>,
disconnections_sender: Sender<String>, disconnections_sender: Sender<String>,
disconnections_receiver: Option<Receiver<String>>, disconnections_receiver: Option<Receiver<String>>,
//local_broker: Option<Box<dyn ILocalBroker + Send + Sync + 'a>>, //local_broker: Option<Box<dyn ILocalBroker + Send + Sync + 'a>>,
local_broker: Option<Arc<RwLock<dyn ILocalBroker + 'a>>>, local_broker: Option<Arc<RwLock<dyn ILocalBroker + 'a>>>,
users_peers: HashMap<UserId, HashSet<X25519PubKey>>,
} }
impl<'a> Broker<'a> { impl<'a> Broker<'a> {
/// helper function to store the sender of a tauri stream in order to be able to cancel it later on
/// only used in Tauri, not used in the JS SDK
pub fn tauri_stream_add(&mut self, stream_id: String, sender: Sender<Commit>) {
self.tauri_streams.insert(stream_id, sender);
}
/// helper function to cancel a tauri stream
/// only used in Tauri, not used in the JS SDK
pub fn tauri_stream_cancel(&mut self, stream_id: String) {
let s = self.tauri_streams.remove(&stream_id);
if let Some(sender) = s {
sender.close_channel();
}
}
// pub fn init_local_broker( // pub fn init_local_broker(
// &mut self, // &mut self,
// base_path: Option<PathBuf>, // base_path: Option<PathBuf>,
@ -186,6 +172,16 @@ impl<'a> Broker<'a> {
.as_ref() .as_ref()
.ok_or(ProtocolError::BrokerError) .ok_or(ProtocolError::BrokerError)
} }
pub fn get_server_broker_mut(
&mut self,
) -> Result<&mut Box<dyn IServerBroker + Send + Sync + 'a>, ProtocolError> {
//log_debug!("GET STORAGE {:?}", self.server_storage);
self.server_broker
.as_mut()
.ok_or(ProtocolError::BrokerError)
}
//Option<Arc<RwLock<dyn ILocalBroker>>>, //Option<Arc<RwLock<dyn ILocalBroker>>>,
pub fn get_local_broker(&self) -> Result<Arc<RwLock<dyn ILocalBroker + 'a>>, NgError> { pub fn get_local_broker(&self) -> Result<Arc<RwLock<dyn ILocalBroker + 'a>>, NgError> {
Ok(Arc::clone( Ok(Arc::clone(
@ -301,14 +297,6 @@ impl<'a> Broker<'a> {
} }
} }
// pub fn add_user(&self, user: PubKey, is_admin: bool) -> Result<(), ProtocolError> {
// self.get_server_broker()?.add_user(user, is_admin)
// }
// pub fn list_users(&self, admins: bool) -> Result<Vec<PubKey>, ProtocolError> {
// self.get_server_broker()?.list_users(admins)
// }
pub async fn get_block_from_store_with_block_id( pub async fn get_block_from_store_with_block_id(
&mut self, &mut self,
nuri: String, nuri: String,
@ -357,54 +345,53 @@ impl<'a> Broker<'a> {
// .map_err(|_| ProtocolError::ObjectParseError) // .map_err(|_| ProtocolError::ObjectParseError)
} }
pub async fn doc_sync_branch(&mut self, anuri: String) -> (Receiver<Commit>, Sender<Commit>) { // pub async fn doc_sync_branch(&mut self, anuri: String) -> (Receiver<Commit>, Sender<Commit>) {
let (tx, rx) = mpsc::unbounded::<Commit>(); // let obj_ref = ObjectRef {
// id: ObjectId::Blake3Digest32([
let obj_ref = ObjectRef { // 228, 228, 181, 117, 36, 206, 41, 223, 130, 96, 85, 195, 104, 137, 78, 145, 42, 176,
id: ObjectId::Blake3Digest32([ // 58, 244, 111, 97, 246, 39, 11, 76, 135, 150, 188, 111, 66, 33,
228, 228, 181, 117, 36, 206, 41, 223, 130, 96, 85, 195, 104, 137, 78, 145, 42, 176, // ]),
58, 244, 111, 97, 246, 39, 11, 76, 135, 150, 188, 111, 66, 33, // key: SymKey::ChaCha20Key([
]), // 100, 243, 39, 242, 203, 131, 102, 50, 9, 54, 248, 113, 4, 160, 28, 45, 73, 56, 217,
key: SymKey::ChaCha20Key([ // 112, 95, 150, 144, 137, 9, 57, 106, 5, 39, 202, 146, 94,
100, 243, 39, 242, 203, 131, 102, 50, 9, 54, 248, 113, 4, 160, 28, 45, 73, 56, 217, // ]),
112, 95, 150, 144, 137, 9, 57, 106, 5, 39, 202, 146, 94, // };
]), // let refs = vec![obj_ref.clone()];
}; // let metadata = vec![5u8; 55];
let refs = vec![obj_ref.clone()];
let metadata = vec![5u8; 55]; // let (member_privkey, member_pubkey) = generate_keypair();
let (member_privkey, member_pubkey) = generate_keypair(); // let overlay = OverlayId::nil();
let overlay = OverlayId::nil(); // let commit = Commit::new(
// &member_privkey,
let commit = Commit::new( // &member_pubkey,
&member_privkey, // overlay,
&member_pubkey, // PubKey::nil(),
overlay, // QuorumType::NoSigning,
PubKey::nil(), // vec![],
QuorumType::NoSigning, // vec![],
vec![], // vec![],
vec![], // vec![],
vec![], // refs,
vec![], // vec![],
refs, // metadata,
vec![], // obj_ref.clone(),
metadata, // )
obj_ref.clone(), // .unwrap();
) // let (tx, rx) = mpsc::unbounded::<Commit>();
.unwrap(); // async fn send(mut tx: Sender<Commit>, commit: Commit) -> ResultSend<()> {
async fn send(mut tx: Sender<Commit>, commit: Commit) -> ResultSend<()> { // while let Ok(_) = tx.send(commit.clone()).await {
while let Ok(_) = tx.send(commit.clone()).await { // log_debug!("sending");
log_debug!("sending"); // sleep!(std::time::Duration::from_secs(3));
sleep!(std::time::Duration::from_secs(3)); // }
} // log_debug!("end of sending");
log_debug!("end of sending"); // Ok(())
Ok(()) // }
} // spawn_and_log_error(send(tx.clone(), commit));
spawn_and_log_error(send(tx.clone(), commit));
(rx, tx.clone()) // (rx, tx.clone())
} // }
pub fn reconnecting(&mut self, peer_id: X25519PrivKey, user: Option<PubKey>) { pub fn reconnecting(&mut self, peer_id: X25519PrivKey, user: Option<PubKey>) {
let peerinfo = self.peers.get_mut(&(user, peer_id)); let peerinfo = self.peers.get_mut(&(user, peer_id));
@ -422,12 +409,22 @@ impl<'a> Broker<'a> {
None => {} None => {}
} }
} }
fn remove_peer_id(&mut self, peer_id: X25519PrivKey, user: Option<PubKey>) { async fn remove_peer_id(&mut self, peer_id: X25519PrivKey, user: Option<PubKey>) {
let removed = self.peers.remove(&(user, peer_id)); let removed = self.peers.remove(&(user, peer_id));
match removed { match removed {
Some(info) => match info.connected { Some(info) => match info.connected {
PeerConnection::NONE => {} PeerConnection::NONE => {}
PeerConnection::Client(cb) => {} PeerConnection::Client(cb) => {
#[cfg(not(target_arch = "wasm32"))]
if user.is_none() {
// server side
if let Some(fsm) = cb.fsm {
if let Ok(user) = fsm.lock().await.user_id() {
let _ = self.remove_user_peer(&user, &peer_id);
}
}
}
}
PeerConnection::Core(ip) => { PeerConnection::Core(ip) => {
self.direct_connections.remove(&ip); self.direct_connections.remove(&ip);
} }
@ -480,12 +477,12 @@ impl<'a> Broker<'a> {
shutdown_sender, shutdown_sender,
direct_connections: HashMap::new(), direct_connections: HashMap::new(),
peers: HashMap::new(), peers: HashMap::new(),
tauri_streams: HashMap::new(),
closing: false, closing: false,
server_broker: None, server_broker: None,
disconnections_sender, disconnections_sender,
disconnections_receiver: Some(disconnections_receiver), disconnections_receiver: Some(disconnections_receiver),
local_broker: None, local_broker: None,
users_peers: HashMap::new(),
} }
} }
@ -625,7 +622,11 @@ impl<'a> Broker<'a> {
Some(Either::Right(remote_peer_id)) => { Some(Either::Right(remote_peer_id)) => {
let res = join.next().await; let res = join.next().await;
log_debug!("SOCKET IS CLOSED {:?} peer_id: {:?}", res, remote_peer_id); log_debug!("SOCKET IS CLOSED {:?} peer_id: {:?}", res, remote_peer_id);
BROKER.write().await.remove_peer_id(remote_peer_id, None); BROKER
.write()
.await
.remove_peer_id(remote_peer_id, None)
.await;
} }
_ => { _ => {
log_debug!( log_debug!(
@ -649,6 +650,36 @@ impl<'a> Broker<'a> {
Ok(()) Ok(())
} }
#[cfg(not(target_arch = "wasm32"))]
fn add_user_peer(&mut self, user: UserId, peer: X25519PrivKey) -> Result<(), ProtocolError> {
let peers_set = self
.users_peers
.entry(user)
.or_insert(HashSet::with_capacity(1));
if !peers_set.insert(peer) {
return Err(ProtocolError::PeerAlreadyConnected);
}
Ok(())
}
#[cfg(not(target_arch = "wasm32"))]
fn remove_user_peer(
&mut self,
user: &UserId,
peer: &X25519PrivKey,
) -> Result<(), ProtocolError> {
let peers_set = self
.users_peers
.get_mut(user)
.ok_or(ProtocolError::UserNotConnected)?;
if !peers_set.remove(peer) {
return Err(ProtocolError::PeerNotConnected);
}
Ok(())
}
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
pub async fn attach_and_authorize_peer_id( pub async fn attach_and_authorize_peer_id(
&mut self, &mut self,
@ -709,7 +740,10 @@ impl<'a> Broker<'a> {
connection.reset_shutdown(remote_peer_id).await; connection.reset_shutdown(remote_peer_id).await;
let connected = if !is_core { let connected = if !is_core {
fsm.set_user_id(client.unwrap().user); let user = client.unwrap().user;
fsm.set_user_id(user);
self.add_user_peer(user, remote_peer_id)?;
PeerConnection::Client(connection) PeerConnection::Client(connection)
} else { } else {
let dc = DirectConnection { let dc = DirectConnection {
@ -890,7 +924,8 @@ impl<'a> Broker<'a> {
BROKER BROKER
.write() .write()
.await .await
.remove_peer_id(remote_peer_id, config.get_user()); .remove_peer_id(remote_peer_id, config.get_user())
.await;
} }
} }
.await; .await;
@ -928,6 +963,50 @@ impl<'a> Broker<'a> {
} }
} }
#[cfg(not(target_arch = "wasm32"))]
pub async fn dispatch_event(
&self,
overlay: &OverlayId,
event: Event,
user_id: &UserId,
remote_peer: &PubKey,
) -> Result<(), ServerError> {
// TODO: deal with subscriptions on the outer overlay. for now we assume everything is on the inner overlay
let peers_for_local_dispatch = self.get_server_broker()?.dispatch_event(
overlay,
event.clone(),
user_id,
remote_peer,
)?;
log_debug!("dispatch_event {:?}", peers_for_local_dispatch);
for peer in peers_for_local_dispatch {
log_debug!("dispatch_event peer {:?}", peer);
if let Some(BrokerPeerInfo {
connected: PeerConnection::Client(ConnectionBase { fsm: Some(fsm), .. }),
..
}) = self.peers.get(&(None, peer.to_owned().to_dh()))
{
log_debug!("ForwardedEvent peer {:?}", peer);
let _ = fsm
.lock()
.await
.send(ProtocolMessage::ClientMessage(ClientMessage::V0(
ClientMessageV0 {
overlay: *overlay,
padding: vec![],
content: ClientMessageContentV0::ForwardedEvent(event.clone()),
},
)))
.await;
}
}
Ok(())
}
pub fn take_disconnections_receiver(&mut self) -> Option<Receiver<String>> { pub fn take_disconnections_receiver(&mut self) -> Option<Receiver<String>> {
self.disconnections_receiver.take() self.disconnections_receiver.take()
} }

@ -264,6 +264,10 @@ impl NoiseFSM {
} }
} }
pub fn remote_peer(&self) -> &Option<PubKey> {
&self.remote
}
pub(crate) fn set_user_id(&mut self, user: UserId) { pub(crate) fn set_user_id(&mut self, user: UserId) {
if self.user.is_none() { if self.user.is_none() {
self.user = Some(user); self.user = Some(user);
@ -309,7 +313,12 @@ impl NoiseFSM {
if in_reply_to != 0 { if in_reply_to != 0 {
msg.set_id(in_reply_to); msg.set_id(in_reply_to);
} }
#[cfg(debug_assertions)]
if msg.is_block() {
log_debug!("SENDING BLOCK");
} else {
log_debug!("SENDING: {:?}", msg); log_debug!("SENDING: {:?}", msg);
}
if self.noise_cipher_state_enc.is_some() { if self.noise_cipher_state_enc.is_some() {
let cipher = self.encrypt(msg)?; let cipher = self.encrypt(msg)?;
self.sender self.sender
@ -408,12 +417,17 @@ impl NoiseFSM {
} }
} }
if msg_opt.is_some() { if msg_opt.is_some() {
#[cfg(debug_assertions)]
if msg_opt.as_ref().unwrap().is_block() {
log_debug!("RECEIVED BLOCK");
} else {
log_debug!( log_debug!(
"RECEIVED: {:?} in state {:?}", "RECEIVED: {:?} in state {:?}",
msg_opt.as_ref().unwrap(), msg_opt.as_ref().unwrap(),
self.state self.state
); );
} }
}
match self.state { match self.state {
FSMstate::Closing => {} FSMstate::Closing => {}
// TODO verify that ID is zero // TODO verify that ID is zero
@ -538,7 +552,7 @@ impl NoiseFSM {
// CLIENT side receiving probe response // CLIENT side receiving probe response
if let Some(msg) = msg_opt { if let Some(msg) = msg_opt {
let id = msg.id(); let id = msg.id();
if id != 0 { if id != Some(0) {
return Err(ProtocolError::InvalidState); return Err(ProtocolError::InvalidState);
} }
if let ProtocolMessage::ProbeResponse(_probe_res) = &msg { if let ProtocolMessage::ProbeResponse(_probe_res) = &msg {
@ -736,7 +750,7 @@ impl NoiseFSM {
let content = ClientAuthContentV0 { let content = ClientAuthContentV0 {
user: user_pub, user: user_pub,
client: client_pub, client: client_pub,
/// Nonce from ServerHello // Nonce from ServerHello
nonce: hello.nonce().clone(), nonce: hello.nonce().clone(),
info: info.clone(), info: info.clone(),
registration: client_config.registration, registration: client_config.registration,
@ -747,7 +761,7 @@ impl NoiseFSM {
sign(&client_config.client_priv, &client_pub, &ser)?; sign(&client_config.client_priv, &client_pub, &ser)?;
let client_auth = ClientAuth::V0(ClientAuthV0 { let client_auth = ClientAuth::V0(ClientAuthV0 {
content, content,
/// Signature by user key // Signature by user key
sig, sig,
client_sig, client_sig,
}); });
@ -845,13 +859,31 @@ impl NoiseFSM {
if msg.type_id() != TypeId::of::<ClientMessage>() { if msg.type_id() != TypeId::of::<ClientMessage>() {
return Err(ProtocolError::AccessDenied); return Err(ProtocolError::AccessDenied);
} }
let id: i64 = msg.id(); match msg.id() {
Some(id) => {
if self.dir.is_server() && id > 0 || !self.dir.is_server() && id < 0 { if self.dir.is_server() && id > 0 || !self.dir.is_server() && id < 0 {
return Ok(StepReply::Responder(msg)); return Ok(StepReply::Responder(msg));
} else if id != 0 { } else if id != 0 {
return Ok(StepReply::Response(msg)); return Ok(StepReply::Response(msg));
} }
} }
None => {
if let ProtocolMessage::ClientMessage(cm) = msg {
if let Some((event, overlay)) = cm.forwarded_event() {
BROKER
.read()
.await
.get_local_broker()?
.write()
.await
.deliver(event, overlay, self.user_id()?)
.await;
return Ok(StepReply::NONE);
}
}
}
}
}
} }
} }
Err(ProtocolError::InvalidState) Err(ProtocolError::InvalidState)
@ -1016,7 +1048,7 @@ impl ConnectionBase {
} }
Ok(StepReply::Response(response)) => { Ok(StepReply::Response(response)) => {
let mut lock = actors.lock().await; let mut lock = actors.lock().await;
let exists = lock.get_mut(&response.id()); let exists = lock.get_mut(&response.id().unwrap_or(0));
match exists { match exists {
Some(actor_sender) => { Some(actor_sender) => {
if actor_sender if actor_sender
@ -1077,6 +1109,8 @@ impl ConnectionBase {
res res
} }
// FIXME: why not use the FSm instead? looks like this is sending messages to the wire, unencrypted.
// Only final errors are sent this way. but it looks like even those error should be encrypted
pub async fn send(&mut self, cmd: ConnectionCommand) { pub async fn send(&mut self, cmd: ConnectionCommand) {
let _ = self.sender_tx.as_mut().unwrap().send(cmd).await; let _ = self.sender_tx.as_mut().unwrap().send(cmd).await;
} }

@ -11,11 +11,16 @@
//! Trait for ServerBroker //! Trait for ServerBroker
use std::collections::HashSet;
use crate::types::*; use crate::types::*;
use ng_repo::errors::*; use ng_repo::errors::*;
use ng_repo::types::*; use ng_repo::types::*;
pub trait IServerBroker: Send + Sync { pub trait IServerBroker: Send + Sync {
fn put_block(&self, overlay_id: &OverlayId, block: Block) -> Result<(), ServerError>;
fn has_block(&self, overlay_id: &OverlayId, block_id: &BlockId) -> Result<(), ServerError>;
fn get_block(&self, overlay_id: &OverlayId, block_id: &BlockId) -> Result<Block, ServerError>;
fn get_user(&self, user_id: PubKey) -> Result<bool, ProtocolError>; fn get_user(&self, user_id: PubKey) -> Result<bool, ProtocolError>;
fn add_user(&self, user_id: PubKey, is_admin: bool) -> Result<(), ProtocolError>; fn add_user(&self, user_id: PubKey, is_admin: bool) -> Result<(), ProtocolError>;
fn del_user(&self, user_id: PubKey) -> Result<(), ProtocolError>; fn del_user(&self, user_id: PubKey) -> Result<(), ProtocolError>;
@ -45,7 +50,7 @@ pub trait IServerBroker: Send + Sync {
) -> Result<RepoPinStatus, ServerError>; ) -> Result<RepoPinStatus, ServerError>;
fn pin_repo_write( fn pin_repo_write(
&self, &mut self,
overlay: &OverlayAccess, overlay: &OverlayAccess,
repo: &RepoHash, repo: &RepoHash,
user_id: &UserId, user_id: &UserId,
@ -53,23 +58,26 @@ pub trait IServerBroker: Send + Sync {
rw_topics: &Vec<PublisherAdvert>, rw_topics: &Vec<PublisherAdvert>,
overlay_root_topic: &Option<TopicId>, overlay_root_topic: &Option<TopicId>,
expose_outer: bool, expose_outer: bool,
peer: &PubKey,
) -> Result<RepoOpened, ServerError>; ) -> Result<RepoOpened, ServerError>;
fn pin_repo_read( fn pin_repo_read(
&self, &mut self,
overlay: &OverlayId, overlay: &OverlayId,
repo: &RepoHash, repo: &RepoHash,
user_id: &UserId, user_id: &UserId,
ro_topics: &Vec<TopicId>, ro_topics: &Vec<TopicId>,
peer: &PubKey,
) -> Result<RepoOpened, ServerError>; ) -> Result<RepoOpened, ServerError>;
fn topic_sub( fn topic_sub(
&self, &mut self,
overlay: &OverlayId, overlay: &OverlayId,
repo: &RepoHash, repo: &RepoHash,
topic: &TopicId, topic: &TopicId,
user_id: &UserId, user_id: &UserId,
publisher: Option<&PublisherAdvert>, publisher: Option<&PublisherAdvert>,
peer: &PubKey,
) -> Result<TopicSubRes, ServerError>; ) -> Result<TopicSubRes, ServerError>;
fn get_commit(&self, overlay: &OverlayId, id: &ObjectId) -> Result<Vec<Block>, ServerError>; fn get_commit(&self, overlay: &OverlayId, id: &ObjectId) -> Result<Vec<Block>, ServerError>;
@ -79,7 +87,8 @@ pub trait IServerBroker: Send + Sync {
overlay: &OverlayId, overlay: &OverlayId,
event: Event, event: Event,
user_id: &UserId, user_id: &UserId,
) -> Result<(), ServerError>; remote_peer: &PubKey,
) -> Result<HashSet<&PubKey>, ServerError>;
fn topic_sync_req( fn topic_sync_req(
&self, &self,

@ -1435,8 +1435,9 @@ pub enum ClientType {
NativeService, NativeService,
NodeService, NodeService,
Verifier, Verifier,
Box, VerifierLocal,
Stick, Box, // VerifierBox
Stick, // VerifierStick
WalletMaster, WalletMaster,
ClientBroker, ClientBroker,
Cli, Cli,
@ -2047,7 +2048,7 @@ pub struct OverlayAdvertMarkerV0 {
/// Core Block Get V0 /// Core Block Get V0
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CoreBlockGetV0 { pub struct CoreBlocksGetV0 {
/// Block ID to request /// Block ID to request
pub ids: Vec<BlockId>, pub ids: Vec<BlockId>,
@ -2093,8 +2094,8 @@ pub enum ReturnPathTimingAdvert {
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub enum CoreBlockGet { pub enum CoreBlocksGet {
V0(CoreBlockGetV0), V0(CoreBlocksGetV0),
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
@ -2107,7 +2108,7 @@ pub enum CoreBlockResult {
pub enum CoreDirectMessageContentV0 { pub enum CoreDirectMessageContentV0 {
OverlayAdvertMarker(OverlayAdvertMarker), OverlayAdvertMarker(OverlayAdvertMarker),
ReturnPathTimingAdvert(ReturnPathTimingAdvert), ReturnPathTimingAdvert(ReturnPathTimingAdvert),
BlockGet(CoreBlockGet), BlocksGet(CoreBlocksGet),
BlockResult(CoreBlockResult), BlockResult(CoreBlockResult),
//PostInbox, //PostInbox,
//PartialSignature, //PartialSignature,
@ -2199,7 +2200,7 @@ pub enum OuterOverlayRequestContentV0 {
OverlayLeave(OverlayLeave), OverlayLeave(OverlayLeave),
TopicSub(PubKey), TopicSub(PubKey),
TopicUnsub(PubKey), TopicUnsub(PubKey),
BlockGet(BlockGet), BlocksGet(BlocksGet),
//PostInboxRequest(PostInboxRequest), //PostInboxRequest(PostInboxRequest),
} }
@ -2958,11 +2959,6 @@ pub enum TopicSub {
} }
impl TopicSub { impl TopicSub {
pub fn overlay(&self) -> &OverlayId {
match self {
Self::V0(v0) => v0.overlay.as_ref().unwrap(),
}
}
pub fn hash(&self) -> &RepoHash { pub fn hash(&self) -> &RepoHash {
match self { match self {
Self::V0(o) => &o.repo_hash, Self::V0(o) => &o.repo_hash,
@ -2983,6 +2979,11 @@ impl TopicSub {
Self::V0(v0) => v0.overlay = Some(overlay), Self::V0(v0) => v0.overlay = Some(overlay),
} }
} }
pub fn overlay(&self) -> &OverlayId {
match self {
Self::V0(v0) => v0.overlay.as_ref().unwrap(),
}
}
} }
/// Request unsubscription from a `Topic` of an already opened or pinned Repo /// Request unsubscription from a `Topic` of an already opened or pinned Repo
@ -3005,7 +3006,7 @@ pub enum TopicUnsub {
/// ///
/// commit_header_key is always set to None in the reply when request is made on OuterOverlay of protected or Group overlays /// commit_header_key is always set to None in the reply when request is made on OuterOverlay of protected or Group overlays
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BlockGetV0 { pub struct BlocksGetV0 {
/// Block IDs to request /// Block IDs to request
pub ids: Vec<BlockId>, pub ids: Vec<BlockId>,
@ -3015,28 +3016,31 @@ pub struct BlockGetV0 {
/// Topic the object is referenced from, if it is known by the requester. /// Topic the object is referenced from, if it is known by the requester.
/// can be used to do a BlockSearchTopic in the core overlay. /// can be used to do a BlockSearchTopic in the core overlay.
pub topic: Option<TopicId>, pub topic: Option<TopicId>,
#[serde(skip)]
pub overlay: Option<OverlayId>,
} }
/// Request an object by ID /// Request an object by ID
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub enum BlockGet { pub enum BlocksGet {
V0(BlockGetV0), V0(BlocksGetV0),
} }
impl BlockGet { impl BlocksGet {
pub fn ids(&self) -> &Vec<BlockId> { pub fn ids(&self) -> &Vec<BlockId> {
match self { match self {
BlockGet::V0(o) => &o.ids, BlocksGet::V0(o) => &o.ids,
} }
} }
pub fn include_children(&self) -> bool { pub fn include_children(&self) -> bool {
match self { match self {
BlockGet::V0(o) => o.include_children, BlocksGet::V0(o) => o.include_children,
} }
} }
pub fn topic(&self) -> Option<PubKey> { pub fn topic(&self) -> Option<PubKey> {
match self { match self {
BlockGet::V0(o) => o.topic, BlocksGet::V0(o) => o.topic,
} }
} }
} }
@ -3044,10 +3048,10 @@ impl BlockGet {
/// Request a Commit by ID /// Request a Commit by ID
/// ///
/// commit_header_key is always set to None in the reply when request is made on OuterOverlay of protected or Group overlays /// commit_header_key is always set to None in the reply when request is made on OuterOverlay of protected or Group overlays
/// The difference with BlockGet is that the Broker will try to return all the commit blocks as they were sent in the Pub/Sub Event, if it has it. /// The difference with BlocksGet is that the Broker will try to return all the commit blocks as they were sent in the Pub/Sub Event, if it has it.
/// This will help in having all the blocks (including the header and body blocks), while a BlockGet would inevitably return only the blocks of the ObjectContent, /// This will help in having all the blocks (including the header and body blocks), while a BlocksGet would inevitably return only the blocks of the ObjectContent,
/// and not the header nor the body. And the load() would fail with CommitLoadError::MissingBlocks. That's what happens when the Commit is not present in the pubsub, /// and not the header nor the body. And the load() would fail with CommitLoadError::MissingBlocks. That's what happens when the Commit is not present in the pubsub,
/// and we need to default to using BlockGet instead. /// and we need to default to using BlocksGet instead.
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CommitGetV0 { pub struct CommitGetV0 {
/// Block IDs to request /// Block IDs to request
@ -3079,6 +3083,9 @@ impl CommitGet {
pub struct BlocksPutV0 { pub struct BlocksPutV0 {
/// Blocks to store /// Blocks to store
pub blocks: Vec<Block>, pub blocks: Vec<Block>,
#[serde(skip)]
pub overlay: Option<OverlayId>,
} }
/// Request to store one or more blocks /// Request to store one or more blocks
@ -3093,15 +3100,28 @@ impl BlocksPut {
BlocksPut::V0(o) => &o.blocks, BlocksPut::V0(o) => &o.blocks,
} }
} }
pub fn overlay(&self) -> &OverlayId {
match self {
Self::V0(v0) => v0.overlay.as_ref().unwrap(),
}
}
pub fn set_overlay(&mut self, overlay: OverlayId) {
match self {
Self::V0(v0) => v0.overlay = Some(overlay),
}
}
} }
/// Request to know if some blocks are present locally /// Request to know if some blocks are present locally
/// ///
/// used by client before publishing an event, to know what to push /// used by client before publishing an event with files, to know what to push
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BlocksExistV0 { pub struct BlocksExistV0 {
/// Ids of Blocks to check /// Ids of Blocks to check
pub blocks: Vec<BlockId>, pub blocks: Vec<BlockId>,
#[serde(skip)]
pub overlay: Option<OverlayId>,
} }
/// Request to store one or more blocks /// Request to store one or more blocks
@ -3116,6 +3136,16 @@ impl BlocksExist {
BlocksExist::V0(o) => &o.blocks, BlocksExist::V0(o) => &o.blocks,
} }
} }
pub fn overlay(&self) -> &OverlayId {
match self {
Self::V0(v0) => v0.overlay.as_ref().unwrap(),
}
}
pub fn set_overlay(&mut self, overlay: OverlayId) {
match self {
Self::V0(v0) => v0.overlay = Some(overlay),
}
}
} }
/// Request to pin an object /// Request to pin an object
@ -3201,7 +3231,7 @@ pub enum ClientRequestContentV0 {
TopicUnsub(TopicUnsub), TopicUnsub(TopicUnsub),
BlocksExist(BlocksExist), BlocksExist(BlocksExist),
BlockGet(BlockGet), BlocksGet(BlocksGet),
CommitGet(CommitGet), CommitGet(CommitGet),
TopicSyncReq(TopicSyncReq), TopicSyncReq(TopicSyncReq),
@ -3224,6 +3254,9 @@ impl ClientRequestContentV0 {
ClientRequestContentV0::PublishEvent(a) => a.set_overlay(overlay), ClientRequestContentV0::PublishEvent(a) => a.set_overlay(overlay),
ClientRequestContentV0::CommitGet(a) => a.set_overlay(overlay), ClientRequestContentV0::CommitGet(a) => a.set_overlay(overlay),
ClientRequestContentV0::TopicSyncReq(a) => a.set_overlay(overlay), ClientRequestContentV0::TopicSyncReq(a) => a.set_overlay(overlay),
ClientRequestContentV0::BlocksPut(a) => a.set_overlay(overlay),
ClientRequestContentV0::BlocksExist(a) => a.set_overlay(overlay),
ClientRequestContentV0::BlocksGet(a) => a.set_overlay(overlay),
_ => unimplemented!(), _ => unimplemented!(),
} }
} }
@ -3272,6 +3305,9 @@ impl ClientRequest {
ClientRequestContentV0::PublishEvent(r) => r.get_actor(self.id()), ClientRequestContentV0::PublishEvent(r) => r.get_actor(self.id()),
ClientRequestContentV0::CommitGet(r) => r.get_actor(self.id()), ClientRequestContentV0::CommitGet(r) => r.get_actor(self.id()),
ClientRequestContentV0::TopicSyncReq(r) => r.get_actor(self.id()), ClientRequestContentV0::TopicSyncReq(r) => r.get_actor(self.id()),
ClientRequestContentV0::BlocksPut(r) => r.get_actor(self.id()),
ClientRequestContentV0::BlocksExist(r) => r.get_actor(self.id()),
ClientRequestContentV0::BlocksGet(r) => r.get_actor(self.id()),
_ => unimplemented!(), _ => unimplemented!(),
}, },
} }
@ -3364,6 +3400,11 @@ impl TopicSubRes {
publisher, publisher,
}) })
} }
pub fn known_heads(&self) -> &Vec<ObjectId> {
match self {
Self::V0(v0) => &v0.known_heads,
}
}
} }
impl From<TopicId> for TopicSubRes { impl From<TopicId> for TopicSubRes {
@ -3518,6 +3559,22 @@ pub enum ClientMessageContentV0 {
ForwardedEvent(Event), ForwardedEvent(Event),
ForwardedBlock(Block), ForwardedBlock(Block),
} }
impl ClientMessageContentV0 {
pub fn is_block(&self) -> bool {
match self {
Self::ClientRequest(ClientRequest::V0(ClientRequestV0 {
content: ClientRequestContentV0::BlocksPut(_),
..
})) => true,
Self::ClientResponse(ClientResponse::V0(ClientResponseV0 {
content: ClientResponseContentV0::Block(_),
..
})) => true,
_ => false,
}
}
}
/// Broker message for an overlay /// Broker message for an overlay
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ClientMessageV0 { pub struct ClientMessageV0 {
@ -3548,6 +3605,16 @@ impl ClientMessage {
}, },
} }
} }
pub fn forwarded_event(self) -> Option<(Event, OverlayId)> {
let overlay = self.overlay_id();
match self {
ClientMessage::V0(o) => match o.content {
ClientMessageContentV0::ForwardedEvent(e) => Some((e, overlay)),
_ => None,
},
}
}
pub fn overlay_id(&self) -> OverlayId { pub fn overlay_id(&self) -> OverlayId {
match self { match self {
ClientMessage::V0(o) => o.overlay, ClientMessage::V0(o) => o.overlay,
@ -3567,15 +3634,13 @@ impl ClientMessage {
} }
} }
} }
pub fn id(&self) -> i64 { pub fn id(&self) -> Option<i64> {
match self { match self {
ClientMessage::V0(o) => match &o.content { ClientMessage::V0(o) => match &o.content {
ClientMessageContentV0::ClientResponse(r) => r.id(), ClientMessageContentV0::ClientResponse(r) => Some(r.id()),
ClientMessageContentV0::ClientRequest(r) => r.id(), ClientMessageContentV0::ClientRequest(r) => Some(r.id()),
ClientMessageContentV0::ForwardedEvent(_) ClientMessageContentV0::ForwardedEvent(_)
| ClientMessageContentV0::ForwardedBlock(_) => { | ClientMessageContentV0::ForwardedBlock(_) => None,
panic!("it is an event")
}
}, },
} }
} }
@ -3869,12 +3934,12 @@ impl TryFrom<&ProtocolMessage> for ServerError {
} }
impl ProtocolMessage { impl ProtocolMessage {
pub fn id(&self) -> i64 { pub fn id(&self) -> Option<i64> {
match self { match self {
ProtocolMessage::ExtRequest(ext_req) => ext_req.id(), ProtocolMessage::ExtRequest(ext_req) => Some(ext_req.id()),
ProtocolMessage::ExtResponse(ext_res) => ext_res.id(), ProtocolMessage::ExtResponse(ext_res) => Some(ext_res.id()),
ProtocolMessage::ClientMessage(client_msg) => client_msg.id(), ProtocolMessage::ClientMessage(client_msg) => client_msg.id(),
_ => 0, _ => None,
} }
} }
pub fn set_id(&mut self, id: i64) { pub fn set_id(&mut self, id: i64) {
@ -3940,6 +4005,16 @@ impl ProtocolMessage {
padding: vec![], padding: vec![],
})) }))
} }
pub fn is_block(&self) -> bool {
match self {
ProtocolMessage::ClientMessage(ClientMessage::V0(ClientMessageV0 {
content: c,
..
})) => c.is_block(),
_ => false,
}
}
} }
impl From<ClientResponseContentV0> for ClientResponse { impl From<ClientResponseContentV0> for ClientResponse {

@ -38,6 +38,8 @@ pub trait BlockStorage: Send + Sync {
/// number of Blocks in the storage /// number of Blocks in the storage
fn len(&self) -> Result<usize, StorageError>; fn len(&self) -> Result<usize, StorageError>;
fn has(&self, overlay: &OverlayId, id: &BlockId) -> Result<(), StorageError>;
} }
/* LMDB values: /* LMDB values:
@ -138,6 +140,13 @@ impl BlockStorage for HashMapBlockStorage {
} }
} }
fn has(&self, _overlay: &OverlayId, id: &BlockId) -> Result<(), StorageError> {
if !self.blocks.read().unwrap().contains_key(id) {
return Err(StorageError::NotFound);
}
Ok(())
}
fn len(&self) -> Result<usize, StorageError> { fn len(&self) -> Result<usize, StorageError> {
Ok(self.get_len()) Ok(self.get_len())
} }

@ -532,6 +532,22 @@ impl Commit {
res res
} }
/// Get files
pub fn files(&self) -> Vec<ObjectRef> {
let mut res: Vec<ObjectRef> = vec![];
match self {
Commit::V0(c) => match &c.content.header_keys() {
Some(CommitHeaderKeys::V0(hk_v0)) => {
for file in hk_v0.files.iter() {
res.push(file.clone());
}
}
None => {}
},
};
res
}
/// Get deps (that have both an ID in the header and a key in the header_keys) /// Get deps (that have both an ID in the header and a key in the header_keys)
pub fn deps(&self) -> Vec<ObjectRef> { pub fn deps(&self) -> Vec<ObjectRef> {
let mut res: Vec<ObjectRef> = vec![]; let mut res: Vec<ObjectRef> = vec![];
@ -1394,7 +1410,7 @@ impl fmt::Display for CommitBody {
// CommitBodyV0::Snapshot(b) => write!(f, "Snapshot {}", b), // a soft snapshot // CommitBodyV0::Snapshot(b) => write!(f, "Snapshot {}", b), // a soft snapshot
// CommitBodyV0::AsyncTransaction(b) => write!(f, "AsyncTransaction {}", b), // partial_order // CommitBodyV0::AsyncTransaction(b) => write!(f, "AsyncTransaction {}", b), // partial_order
// CommitBodyV0::SyncTransaction(b) => write!(f, "SyncTransaction {}", b), // total_order // CommitBodyV0::SyncTransaction(b) => write!(f, "SyncTransaction {}", b), // total_order
// CommitBodyV0::AddFile(b) => write!(f, "AddFile {}", b), CommitBodyV0::AddFile(b) => write!(f, "AddFile {}", b),
// CommitBodyV0::RemoveFile(b) => write!(f, "RemoveFile {}", b), // CommitBodyV0::RemoveFile(b) => write!(f, "RemoveFile {}", b),
// CommitBodyV0::Compact(b) => write!(f, "Compact {}", b), // a hard snapshot. total order enforced with total_order_quorum // CommitBodyV0::Compact(b) => write!(f, "Compact {}", b), // a hard snapshot. total order enforced with total_order_quorum
//Merge(Merge) => write!(f, "RootBranch {}", b), //Merge(Merge) => write!(f, "RootBranch {}", b),

@ -10,6 +10,7 @@
//! Errors //! Errors
pub use crate::commit::{CommitLoadError, CommitVerifyError}; pub use crate::commit::{CommitLoadError, CommitVerifyError};
use crate::file::FileError;
use crate::object::Object; use crate::object::Object;
use num_enum::IntoPrimitive; use num_enum::IntoPrimitive;
use num_enum::TryFromPrimitive; use num_enum::TryFromPrimitive;
@ -70,6 +71,10 @@ pub enum NgError {
BrokerConfigErrorStr(&'static str), BrokerConfigErrorStr(&'static str),
BrokerConfigError(String), BrokerConfigError(String),
MalformedEvent, MalformedEvent,
InvalidPayload,
WrongUploadId,
FileError(FileError),
InternalError,
} }
impl Error for NgError {} impl Error for NgError {}
@ -129,6 +134,12 @@ impl From<CommitLoadError> for NgError {
} }
} }
impl From<FileError> for NgError {
fn from(e: FileError) -> Self {
NgError::FileError(e)
}
}
impl From<CommitVerifyError> for NgError { impl From<CommitVerifyError> for NgError {
fn from(e: CommitVerifyError) -> Self { fn from(e: CommitVerifyError) -> Self {
NgError::CommitVerifyError(e) NgError::CommitVerifyError(e)
@ -232,6 +243,10 @@ pub enum ServerError {
AccessDenied, AccessDenied,
InvalidHeader, InvalidHeader,
MalformedBranch, MalformedBranch,
BrokerError,
ProtocolError,
PeerAlreadySubscribed,
SubscriptionNotFound,
} }
impl From<StorageError> for ServerError { impl From<StorageError> for ServerError {
@ -243,6 +258,16 @@ impl From<StorageError> for ServerError {
} }
} }
impl From<ProtocolError> for ServerError {
fn from(e: ProtocolError) -> Self {
match e {
ProtocolError::NotFound => ServerError::NotFound,
ProtocolError::BrokerError => ServerError::BrokerError,
_ => ServerError::ProtocolError,
}
}
}
impl From<NgError> for ServerError { impl From<NgError> for ServerError {
fn from(e: NgError) -> Self { fn from(e: NgError) -> Self {
match e { match e {
@ -281,10 +306,14 @@ pub enum VerifierError {
TopicNotFound, TopicNotFound,
RepoNotFound, RepoNotFound,
StoreNotFound, StoreNotFound,
OverlayNotFound,
BranchNotFound, BranchNotFound,
InvalidBranch, InvalidBranch,
NoBlockStorageAvailable, NoBlockStorageAvailable,
RootBranchNotFound, RootBranchNotFound,
BranchNotOpened,
DoubleBranchSubscription,
InvalidCommit,
} }
impl From<NgError> for VerifierError { impl From<NgError> for VerifierError {
@ -371,6 +400,8 @@ pub enum ProtocolError {
Expired, Expired,
PeerAlreadyConnected, PeerAlreadyConnected,
UserNotConnected,
PeerNotConnected,
OtherError, OtherError,
NetError, NetError,
StorageError, StorageError,

@ -12,6 +12,7 @@
use core::fmt; use core::fmt;
use std::cmp::min; use std::cmp::min;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc;
use chacha20::cipher::{KeyIvInit, StreamCipher}; use chacha20::cipher::{KeyIvInit, StreamCipher};
use chacha20::ChaCha20; use chacha20::ChaCha20;
@ -25,7 +26,7 @@ use crate::store::Store;
use crate::types::*; use crate::types::*;
/// File errors /// File errors
#[derive(Debug, PartialEq)] #[derive(Debug, Eq, PartialEq, Clone)]
pub enum FileError { pub enum FileError {
/// Missing blocks /// Missing blocks
MissingBlocks(Vec<BlockId>), MissingBlocks(Vec<BlockId>),
@ -71,17 +72,20 @@ impl From<ObjectParseError> for FileError {
} }
} }
trait ReadFile { pub trait ReadFile {
fn read(&self, pos: usize, size: usize) -> Result<Vec<u8>, FileError>; fn read(&self, pos: usize, size: usize) -> Result<Vec<u8>, FileError>;
fn get_all_blocks_ids(&self) -> Result<Vec<ObjectId>, FileError>;
} }
/// A File in memory (read access only) /// A File in memory (read access only)
pub struct File<'a> { pub struct File<'a> {
internal: Box<dyn ReadFile + 'a>, internal: Box<dyn ReadFile + 'a>,
blocks_ids: Vec<BlockId>,
} }
impl<'a> File<'a> { impl<'a> File<'a> {
pub fn open(id: ObjectId, key: SymKey, store: &'a Store) -> Result<File<'a>, FileError> { pub fn open(id: ObjectId, key: SymKey, store: Arc<Store>) -> Result<File<'a>, FileError> {
let root_block = store.get(&id)?; let root_block = store.get(&id)?;
if root_block.children().len() == 2 if root_block.children().len() == 2
@ -89,12 +93,14 @@ impl<'a> File<'a> {
{ {
Ok(File { Ok(File {
internal: Box::new(RandomAccessFile::open(id, key, store)?), internal: Box::new(RandomAccessFile::open(id, key, store)?),
blocks_ids: vec![],
}) })
} else { } else {
let obj = Object::load(id, Some(key), store)?; let obj = Object::load(id, Some(key), &store)?;
match obj.content_v0()? { match obj.content_v0()? {
ObjectContentV0::SmallFile(small_file) => Ok(File { ObjectContentV0::SmallFile(small_file) => Ok(File {
internal: Box::new(small_file), internal: Box::new(small_file),
blocks_ids: obj.block_ids(),
}), }),
_ => Err(FileError::NotAFile), _ => Err(FileError::NotAFile),
} }
@ -106,6 +112,13 @@ impl<'a> ReadFile for File<'a> {
fn read(&self, pos: usize, size: usize) -> Result<Vec<u8>, FileError> { fn read(&self, pos: usize, size: usize) -> Result<Vec<u8>, FileError> {
self.internal.read(pos, size) self.internal.read(pos, size)
} }
fn get_all_blocks_ids(&self) -> Result<Vec<ObjectId>, FileError> {
if self.blocks_ids.len() > 0 {
Ok(self.blocks_ids.to_vec())
} else {
self.internal.get_all_blocks_ids()
}
}
} }
impl ReadFile for SmallFile { impl ReadFile for SmallFile {
@ -114,6 +127,9 @@ impl ReadFile for SmallFile {
Self::V0(v0) => v0.read(pos, size), Self::V0(v0) => v0.read(pos, size),
} }
} }
fn get_all_blocks_ids(&self) -> Result<Vec<ObjectId>, FileError> {
unimplemented!();
}
} }
impl ReadFile for SmallFileV0 { impl ReadFile for SmallFileV0 {
@ -126,12 +142,15 @@ impl ReadFile for SmallFileV0 {
} }
Ok(self.content[pos..pos + size].to_vec()) Ok(self.content[pos..pos + size].to_vec())
} }
fn get_all_blocks_ids(&self) -> Result<Vec<ObjectId>, FileError> {
unimplemented!();
}
} }
/// A RandomAccessFile in memory. This is not used to serialize data /// A RandomAccessFile in memory. This is not used to serialize data
pub struct RandomAccessFile<'a> { pub struct RandomAccessFile {
//storage: Arc<&'a dyn BlockStorage>, //storage: Arc<&'a dyn BlockStorage>,
store: &'a Store, store: Arc<Store>,
/// accurate once saved or opened /// accurate once saved or opened
meta: RandomAccessFileMeta, meta: RandomAccessFileMeta,
@ -155,18 +174,61 @@ pub struct RandomAccessFile<'a> {
size: usize, size: usize,
} }
impl<'a> ReadFile for RandomAccessFile<'a> { impl ReadFile for RandomAccessFile {
fn get_all_blocks_ids(&self) -> Result<Vec<ObjectId>, FileError> {
if self.id.is_none() {
unimplemented!();
}
let mut res = Vec::with_capacity(4);
let _: Vec<()> = self
.blocks
.iter()
.map(|(id, _)| res.push(id.clone()))
.collect();
recurse_tree(
&self.store,
self.content_block.as_ref().unwrap().clone(),
&mut res,
self.meta.depth(),
)?;
fn recurse_tree(
store: &Store,
current_block_id_key: (Digest, SymKey),
res: &mut Vec<Digest>,
level: u8,
) -> Result<(), FileError> {
res.push(current_block_id_key.0);
if level > 0 {
let tree_block = store.get(&current_block_id_key.0)?;
let (children, content) = tree_block.read(&current_block_id_key.1)?;
if children.len() == 0 || content.len() > 0 {
return Err(FileError::BlockDeserializeError);
}
for child in children {
recurse_tree(store, child, res, level - 1)?;
}
}
Ok(())
}
Ok(res)
}
/// reads at most one block from the file. the returned vector should be tested for size. it might be smaller than what you asked for. /// reads at most one block from the file. the returned vector should be tested for size. it might be smaller than what you asked for.
/// `pos`ition can be anywhere in the file. /// `pos`ition can be anywhere in the file.
//TODO: parallelize decryption on multi threads (cores) //TODO: parallelize decryption on multi threads (cores)
fn read(&self, pos: usize, size: usize) -> Result<Vec<u8>, FileError> { fn read(&self, pos: usize, mut size: usize) -> Result<Vec<u8>, FileError> {
if size == 0 { if size == 0 {
return Err(FileError::InvalidArgument); return Err(FileError::InvalidArgument);
} }
if self.id.is_some() { if self.id.is_some() {
if pos + size > self.meta.total_size() as usize { let total = self.meta.total_size() as usize;
if pos > total {
return Err(FileError::EndOfFile); return Err(FileError::EndOfFile);
} }
size = min(total - pos, size);
let mut current_block_id_key = self.content_block.as_ref().unwrap().clone(); let mut current_block_id_key = self.content_block.as_ref().unwrap().clone();
let depth = self.meta.depth(); let depth = self.meta.depth();
@ -242,7 +304,7 @@ impl<'a> ReadFile for RandomAccessFile<'a> {
} }
} }
impl<'a> RandomAccessFile<'a> { impl RandomAccessFile {
pub fn meta(&self) -> &RandomAccessFileMeta { pub fn meta(&self) -> &RandomAccessFileMeta {
&self.meta &self.meta
} }
@ -396,8 +458,8 @@ impl<'a> RandomAccessFile<'a> {
block_size: usize, block_size: usize,
content_type: String, content_type: String,
metadata: Vec<u8>, metadata: Vec<u8>,
store: &'a Store, store: Arc<Store>,
) -> Result<RandomAccessFile<'a>, FileError> { ) -> Result<RandomAccessFile, FileError> {
//let max_block_size = store_max_value_size(); //let max_block_size = store_max_value_size();
let valid_block_size = store_valid_value_size(block_size) - BLOCK_EXTRA; let valid_block_size = store_valid_value_size(block_size) - BLOCK_EXTRA;
@ -405,22 +467,22 @@ impl<'a> RandomAccessFile<'a> {
let total_size = content.len() as u64; let total_size = content.len() as u64;
let mut conv_key = Object::convergence_key(store); let mut conv_key = Object::convergence_key(&store);
let mut blocks: Vec<(BlockId, BlockKey)> = vec![]; let mut blocks: Vec<(BlockId, BlockKey)> = vec![];
let mut already_existing: HashMap<BlockKey, BlockId> = HashMap::new(); let mut already_existing: HashMap<BlockKey, BlockId> = HashMap::new();
//log_debug!("making the leaves"); //log_debug!("making the leaves");
for chunck in content.chunks(valid_block_size) { for chunk in content.chunks(valid_block_size) {
let data_chunk = ChunkContentV0::DataChunk(chunck.to_vec()); let data_chunk = ChunkContentV0::DataChunk(chunk.to_vec());
let content_ser = serde_bare::to_vec(&data_chunk).unwrap(); let content_ser = serde_bare::to_vec(&data_chunk).unwrap();
blocks.push(Self::make_block( blocks.push(Self::make_block(
content_ser, content_ser,
&conv_key, &conv_key,
vec![], vec![],
&mut already_existing, &mut already_existing,
store, &store,
)?); )?);
} }
assert_eq!( assert_eq!(
@ -438,7 +500,7 @@ impl<'a> RandomAccessFile<'a> {
}); });
let (content_block, root_block) = let (content_block, root_block) =
Self::save_(&mut already_existing, &blocks, &mut meta, &conv_key, store)?; Self::save_(&mut already_existing, &blocks, &mut meta, &conv_key, &store)?;
conv_key.zeroize(); conv_key.zeroize();
@ -460,7 +522,7 @@ impl<'a> RandomAccessFile<'a> {
block_size: usize, block_size: usize,
content_type: String, content_type: String,
metadata: Vec<u8>, metadata: Vec<u8>,
store: &'a Store, store: Arc<Store>,
) -> Self { ) -> Self {
let valid_block_size = store_valid_value_size(block_size) - BLOCK_EXTRA; let valid_block_size = store_valid_value_size(block_size) - BLOCK_EXTRA;
@ -476,14 +538,14 @@ impl<'a> RandomAccessFile<'a> {
}); });
Self { Self {
store, store: Arc::clone(&store),
meta, meta,
block_contents: HashMap::new(), block_contents: HashMap::new(),
blocks: vec![], blocks: vec![],
id: None, id: None,
key: None, key: None,
content_block: None, content_block: None,
conv_key: Some(Object::convergence_key(store)), conv_key: Some(Object::convergence_key(&store)),
remainder: vec![], remainder: vec![],
size: 0, size: 0,
} }
@ -518,7 +580,7 @@ impl<'a> RandomAccessFile<'a> {
&conv_key, &conv_key,
vec![], vec![],
&mut already_existing, &mut already_existing,
self.store, &self.store,
)?); )?);
} else { } else {
// not enough data to create a new block // not enough data to create a new block
@ -530,28 +592,28 @@ impl<'a> RandomAccessFile<'a> {
return Ok(()); return Ok(());
} }
for chunck in data[pos..].chunks(chunk_size) { for chunk in data[pos..].chunks(chunk_size) {
if chunck.len() == chunk_size { if chunk.len() == chunk_size {
self.size += chunk_size; self.size += chunk_size;
//log_debug!("size += chunk_size {} {}", self.size, chunk_size); //log_debug!("size += chunk_size {} {}", self.size, chunk_size);
let data_chunk = ChunkContentV0::DataChunk(chunck.to_vec()); let data_chunk = ChunkContentV0::DataChunk(chunk.to_vec());
let content_ser = serde_bare::to_vec(&data_chunk).unwrap(); let content_ser = serde_bare::to_vec(&data_chunk).unwrap();
self.blocks.push(Self::make_block( self.blocks.push(Self::make_block(
content_ser, content_ser,
&conv_key, &conv_key,
vec![], vec![],
&mut already_existing, &mut already_existing,
self.store, &self.store,
)?); )?);
} else { } else {
self.remainder = Vec::from(chunck); self.remainder = Vec::from(chunk);
return Ok(()); return Ok(());
} }
} }
Ok(()) Ok(())
} }
pub fn save(&mut self) -> Result<(), FileError> { pub fn save(&mut self) -> Result<ObjectId, FileError> {
if self.id.is_some() { if self.id.is_some() {
return Err(FileError::AlreadySaved); return Err(FileError::AlreadySaved);
} }
@ -568,7 +630,7 @@ impl<'a> RandomAccessFile<'a> {
&self.conv_key.unwrap(), &self.conv_key.unwrap(),
vec![], vec![],
&mut HashMap::new(), &mut HashMap::new(),
self.store, &self.store,
)?); )?);
} }
@ -580,7 +642,7 @@ impl<'a> RandomAccessFile<'a> {
&self.blocks, &self.blocks,
&mut self.meta, &mut self.meta,
self.conv_key.as_ref().unwrap(), self.conv_key.as_ref().unwrap(),
self.store, &self.store,
)?; )?;
self.conv_key.as_mut().unwrap().zeroize(); self.conv_key.as_mut().unwrap().zeroize();
@ -593,15 +655,26 @@ impl<'a> RandomAccessFile<'a> {
self.blocks = vec![]; self.blocks = vec![];
self.blocks.shrink_to_fit(); self.blocks.shrink_to_fit();
Ok(()) Ok(root_block.0)
}
pub fn reference(&self) -> Option<ObjectRef> {
if self.key.is_some() && self.id.is_some() {
Some(ObjectRef::from_id_key(
self.id.unwrap(),
self.key.as_ref().unwrap().clone(),
))
} else {
None
}
} }
/// Opens a file for read purpose. /// Opens a file for read purpose.
pub fn open( pub fn open(
id: ObjectId, id: ObjectId,
key: SymKey, key: SymKey,
store: &'a Store, store: Arc<Store>,
) -> Result<RandomAccessFile<'a>, FileError> { ) -> Result<RandomAccessFile, FileError> {
// load root block // load root block
let root_block = store.get(&id)?; let root_block = store.get(&id)?;
@ -617,7 +690,7 @@ impl<'a> RandomAccessFile<'a> {
let meta_object = Object::load( let meta_object = Object::load(
root_sub_blocks[0].0, root_sub_blocks[0].0,
Some(root_sub_blocks[0].1.clone()), Some(root_sub_blocks[0].1.clone()),
store, &store,
)?; )?;
let meta = match meta_object.content_v0()? { let meta = match meta_object.content_v0()? {
@ -629,7 +702,7 @@ impl<'a> RandomAccessFile<'a> {
store, store,
meta, meta,
block_contents: HashMap::new(), // not used in this case block_contents: HashMap::new(), // not used in this case
blocks: vec![], // not used in this case blocks: vec![(id, SymKey::nil()), (root_sub_blocks[0].0, SymKey::nil())], // not used in this case
id: Some(id), id: Some(id),
key: Some(key), key: Some(key),
content_block: Some(root_sub_blocks[1].clone()), content_block: Some(root_sub_blocks[1].clone()),
@ -680,7 +753,7 @@ impl<'a> RandomAccessFile<'a> {
} }
} }
impl fmt::Display for RandomAccessFile<'_> { impl fmt::Display for RandomAccessFile {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!( writeln!(
f, f,
@ -736,7 +809,7 @@ mod test {
block_size, block_size,
"text/plain".to_string(), "text/plain".to_string(),
vec![], vec![],
&store, Arc::clone(&store),
) )
.expect("new_from_slice"); .expect("new_from_slice");
log_debug!("{}", file); log_debug!("{}", file);
@ -781,7 +854,7 @@ mod test {
assert_eq!(file.depth(), Ok(0)); assert_eq!(file.depth(), Ok(0));
assert_eq!(store.len(), Ok(3)); assert_eq!(store.len(), Ok(3));
let file = RandomAccessFile::open(id, file.key.unwrap(), &store).expect("re open"); let file = RandomAccessFile::open(id, file.key.unwrap(), store).expect("re open");
log_debug!("{}", file); log_debug!("{}", file);
@ -809,7 +882,7 @@ mod test {
store_max_value_size(), store_max_value_size(),
"text/plain".to_string(), "text/plain".to_string(),
vec![], vec![],
&store, Arc::clone(&store),
) )
.expect("new_from_slice"); .expect("new_from_slice");
log_debug!("{}", file); log_debug!("{}", file);
@ -842,7 +915,7 @@ mod test {
store_max_value_size(), store_max_value_size(),
"text/plain".to_string(), "text/plain".to_string(),
vec![], vec![],
&store, Arc::clone(&store),
) )
.expect("new_from_slice"); .expect("new_from_slice");
log_debug!("{}", file); log_debug!("{}", file);
@ -877,7 +950,7 @@ mod test {
store_valid_value_size(0), store_valid_value_size(0),
"text/plain".to_string(), "text/plain".to_string(),
vec![], vec![],
&store, Arc::clone(&store),
) )
.expect("new_from_slice"); .expect("new_from_slice");
log_debug!("{}", file); log_debug!("{}", file);
@ -938,7 +1011,7 @@ mod test {
store_valid_value_size(0), store_valid_value_size(0),
"text/plain".to_string(), "text/plain".to_string(),
vec![], vec![],
&store, Arc::clone(&store),
) )
.expect("new_from_slice"); .expect("new_from_slice");
@ -971,7 +1044,7 @@ mod test {
store_max_value_size(), //store_valid_value_size(0),// store_max_value_size(), //store_valid_value_size(0),//
"image/jpeg".to_string(), "image/jpeg".to_string(),
vec![], vec![],
&store, store,
); );
log_debug!("{}", file); log_debug!("{}", file);
@ -1041,7 +1114,7 @@ mod test {
store_max_value_size(), //store_valid_value_size(0),// store_max_value_size(), //store_valid_value_size(0),//
"image/jpeg".to_string(), "image/jpeg".to_string(),
vec![], vec![],
&store, store,
); );
log_debug!("{}", file); log_debug!("{}", file);
@ -1113,7 +1186,7 @@ mod test {
store_valid_value_size(0), store_valid_value_size(0),
"image/jpeg".to_string(), "image/jpeg".to_string(),
vec![], vec![],
&store, store,
); );
log_debug!("{}", file); log_debug!("{}", file);
@ -1193,7 +1266,7 @@ mod test {
store_valid_value_size(0), store_valid_value_size(0),
"image/jpeg".to_string(), "image/jpeg".to_string(),
vec![], vec![],
&store, store,
); );
log_debug!("{}", file); log_debug!("{}", file);
@ -1271,7 +1344,7 @@ mod test {
store_valid_value_size(0), store_valid_value_size(0),
"image/jpeg".to_string(), "image/jpeg".to_string(),
vec![], vec![],
&store, Arc::clone(&store),
); );
log_debug!("{}", file); log_debug!("{}", file);
@ -1320,7 +1393,7 @@ mod test {
store_max_value_size(), //store_valid_value_size(0),// store_max_value_size(), //store_valid_value_size(0),//
"image/jpeg".to_string(), "image/jpeg".to_string(),
vec![], vec![],
&store, Arc::clone(&store),
); );
log_debug!("{}", file); log_debug!("{}", file);
@ -1331,7 +1404,7 @@ mod test {
file.save().expect("save"); file.save().expect("save");
let file2 = RandomAccessFile::open(file.id().unwrap(), file.key.unwrap(), &store) let file2 = RandomAccessFile::open(file.id().unwrap(), file.key.unwrap(), store)
.expect("reopen file"); .expect("reopen file");
// this works only because store_max_value_size() is bigger than the actual size of the JPEG file. so it fits in one block. // this works only because store_max_value_size() is bigger than the actual size of the JPEG file. so it fits in one block.
@ -1378,7 +1451,7 @@ mod test {
let _ = obj.save_in_test(&store).expect("save"); let _ = obj.save_in_test(&store).expect("save");
let file = File::open(obj.id(), obj.key().unwrap(), &store).expect("open"); let file = File::open(obj.id(), obj.key().unwrap(), store).expect("open");
let res = file.read(0, len).expect("read all"); let res = file.read(0, len).expect("read all");
@ -1400,8 +1473,12 @@ mod test {
let store = Store::dummy_public_v0(); let store = Store::dummy_public_v0();
log_debug!("creating empty file"); log_debug!("creating empty file");
let mut file: RandomAccessFile = let mut file: RandomAccessFile = RandomAccessFile::new_empty(
RandomAccessFile::new_empty(max_object_size, "image/jpeg".to_string(), vec![], &store); max_object_size,
"image/jpeg".to_string(),
vec![],
Arc::clone(&store),
);
file.write(&img_buffer).expect("write all"); file.write(&img_buffer).expect("write all");
@ -1414,7 +1491,7 @@ mod test {
let file = File::open( let file = File::open(
file.id().unwrap(), file.id().unwrap(),
file.key().as_ref().unwrap().clone(), file.key().as_ref().unwrap().clone(),
&store, store,
) )
.expect("open"); .expect("open");
@ -1440,7 +1517,7 @@ mod test {
store_valid_value_size(0), store_valid_value_size(0),
"image/jpeg".to_string(), "image/jpeg".to_string(),
vec![], vec![],
&store, store,
); );
log_debug!("{}", file); log_debug!("{}", file);
@ -1489,7 +1566,7 @@ mod test {
store_max_value_size(), store_max_value_size(),
"image/jpeg".to_string(), "image/jpeg".to_string(),
vec![], vec![],
&store, store,
); );
log_debug!("{}", file); log_debug!("{}", file);

@ -166,7 +166,10 @@ impl Repo {
pub fn update_branch_current_head(&mut self, branch: &BranchId, commit_ref: ObjectRef) { pub fn update_branch_current_head(&mut self, branch: &BranchId, commit_ref: ObjectRef) {
//log_info!("from branch {} HEAD UPDATED TO {}", branch, commit_ref.id); //log_info!("from branch {} HEAD UPDATED TO {}", branch, commit_ref.id);
if let Some(branch) = self.branches.get_mut(branch) { if let Some(branch) = self.branches.get_mut(branch) {
// FIXME: this is very wrong. the DAG is not always linear
branch.current_heads = vec![commit_ref]; branch.current_heads = vec![commit_ref];
//TODO: if userstorage: save current_heads to user storage
} }
} }
@ -203,6 +206,7 @@ impl Repo {
write_cap: None, write_cap: None,
branches: HashMap::new(), branches: HashMap::new(),
opened_branches: HashMap::new(), opened_branches: HashMap::new(),
//main_branch_rc: None,
} }
} }
@ -251,6 +255,15 @@ impl Repo {
None None
} }
pub fn main_branch(&self) -> Option<&BranchInfo> {
for (_, branch) in self.branches.iter() {
if branch.branch_type == BranchType::Main {
return Some(branch);
}
}
None
}
pub fn root_branch(&self) -> Option<&BranchInfo> { pub fn root_branch(&self) -> Option<&BranchInfo> {
for (_, branch) in self.branches.iter() { for (_, branch) in self.branches.iter() {
if branch.branch_type == BranchType::Root { if branch.branch_type == BranchType::Root {

@ -28,8 +28,8 @@ use rand::prelude::*;
use threshold_crypto::{SecretKeySet, SecretKeyShare}; use threshold_crypto::{SecretKeySet, SecretKeyShare};
pub struct Store { pub struct Store {
//TODO: store_repo, store_readcap and store_overlay_branch_readcap could be empty, if we have only an outer access to the store. should be Options
store_repo: StoreRepo, store_repo: StoreRepo,
//TODO: store_readcap and store_overlay_branch_readcap could be empty, if we have only an outer access to the store. should be Options
store_readcap: ReadCap, store_readcap: ReadCap,
store_overlay_branch_readcap: ReadCap, store_overlay_branch_readcap: ReadCap,
pub overlay_id: OverlayId, pub overlay_id: OverlayId,
@ -168,6 +168,13 @@ impl Store {
.len() .len()
} }
pub fn has(&self, id: &BlockId) -> Result<(), StorageError> {
self.storage
.read()
.map_err(|_| StorageError::BackendError)?
.has(&self.overlay_id, id)
}
/// returns the (branch_commit, add_branch_commit, branch_info) /// returns the (branch_commit, add_branch_commit, branch_info)
fn create_branch( fn create_branch(
&self, &self,

@ -14,8 +14,9 @@
use crate::errors::NgError; use crate::errors::NgError;
use crate::store::Store; use crate::store::Store;
use crate::utils::{ use crate::utils::{
decode_key, dh_pubkey_array_from_ed_pubkey_slice, dh_pubkey_from_ed_pubkey_slice, decode_key, decode_priv_key, dh_pubkey_array_from_ed_pubkey_slice,
ed_privkey_to_ed_pubkey, from_ed_privkey_to_dh_privkey, random_key, dh_pubkey_from_ed_pubkey_slice, ed_privkey_to_ed_pubkey, from_ed_privkey_to_dh_privkey,
random_key,
}; };
use core::fmt; use core::fmt;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
@ -45,9 +46,8 @@ impl Digest {
impl fmt::Display for Digest { impl fmt::Display for Digest {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { let ser = serde_bare::to_vec(&self).unwrap();
Digest::Blake3Digest32(d) => write!(f, "{}", base64_url::encode(d)), write!(f, "{}", base64_url::encode(&ser))
}
} }
} }
@ -103,9 +103,8 @@ impl SymKey {
impl fmt::Display for SymKey { impl fmt::Display for SymKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { let ser = serde_bare::to_vec(&self).unwrap();
Self::ChaCha20Key(k) => write!(f, "{}", base64_url::encode(k)), write!(f, "{}", base64_url::encode(&ser))
}
} }
} }
@ -144,6 +143,12 @@ impl Default for PubKey {
} }
impl PubKey { impl PubKey {
pub fn to_dh(self) -> X25519PubKey {
match self {
Self::X25519PubKey(x) => x,
_ => panic!("cannot call to_dh on an Edward key"),
}
}
pub fn slice(&self) -> &[u8; 32] { pub fn slice(&self) -> &[u8; 32] {
match self { match self {
PubKey::Ed25519PubKey(o) | PubKey::X25519PubKey(o) => o, PubKey::Ed25519PubKey(o) | PubKey::X25519PubKey(o) => o,
@ -172,26 +177,23 @@ impl PubKey {
} }
pub fn to_hash_string(&self) -> String { pub fn to_hash_string(&self) -> String {
let hash = blake3::hash(self.slice()); let ser = serde_bare::to_vec(&self).unwrap();
let hash = blake3::hash(&ser);
base64_url::encode(&hash.as_bytes()) base64_url::encode(&hash.as_bytes())
} }
} }
impl fmt::Display for PubKey { impl fmt::Display for PubKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { let ser = serde_bare::to_vec(&self).unwrap();
PubKey::Ed25519PubKey(d) | PubKey::X25519PubKey(d) => { write!(f, "{}", base64_url::encode(&ser))
write!(f, "{}", base64_url::encode(d))
}
}
} }
} }
impl TryFrom<&str> for PubKey { impl TryFrom<&str> for PubKey {
type Error = NgError; type Error = NgError;
fn try_from(str: &str) -> Result<Self, NgError> { fn try_from(str: &str) -> Result<Self, NgError> {
let key = decode_key(str)?; decode_key(str)
Ok(PubKey::Ed25519PubKey(key))
} }
} }
@ -260,23 +262,14 @@ impl TryFrom<&[u8]> for PrivKey {
impl TryFrom<&str> for PrivKey { impl TryFrom<&str> for PrivKey {
type Error = NgError; type Error = NgError;
fn try_from(str: &str) -> Result<Self, NgError> { fn try_from(str: &str) -> Result<Self, NgError> {
let key = decode_key(str)?; decode_priv_key(str)
Ok(PrivKey::Ed25519PrivKey(key))
} }
} }
impl fmt::Display for PrivKey { impl fmt::Display for PrivKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { let ser = serde_bare::to_vec(&self).unwrap();
Self::Ed25519PrivKey(ed) => { write!(f, "{}", base64_url::encode(&ser))
//let priv_key_ser = serde_bare::to_vec(ed).unwrap();
let priv_key_encoded = base64_url::encode(ed);
write!(f, "{}", priv_key_encoded)
}
_ => {
unimplemented!();
}
}
} }
} }
@ -440,6 +433,10 @@ impl BlockRef {
pub fn from_id_key(id: BlockId, key: BlockKey) -> Self { pub fn from_id_key(id: BlockId, key: BlockKey) -> Self {
BlockRef { id, key } BlockRef { id, key }
} }
pub fn nuri(&self) -> String {
format!(":j:{}:k:{}", self.id, self.key)
}
} }
impl From<BlockRef> for (BlockId, BlockKey) { impl From<BlockRef> for (BlockId, BlockKey) {
@ -1849,6 +1846,25 @@ pub enum AddFile {
V0(AddFileV0), V0(AddFileV0),
} }
impl fmt::Display for AddFile {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::V0(v0) => {
writeln!(f, "V0")?;
writeln!(f, "name: {:?}", v0.name)
}
}
}
}
impl AddFile {
pub fn name(&self) -> &Option<String> {
match self {
Self::V0(v0) => &v0.name,
}
}
}
/// Remove a file from the branch, using ORset CRDT logic /// Remove a file from the branch, using ORset CRDT logic
/// ///
/// (removes the ref counting. not necessarily the file itself) /// (removes the ref counting. not necessarily the file itself)

@ -53,9 +53,14 @@ pub fn from_ed_privkey_to_dh_privkey(private: &PrivKey) -> PrivKey {
} }
/// don't forget to zeroize the string later on /// don't forget to zeroize the string later on
pub fn decode_key(key_string: &str) -> Result<[u8; 32], NgError> { pub fn decode_key(key_string: &str) -> Result<PubKey, NgError> {
let vec = base64_url::decode(key_string).map_err(|_| NgError::InvalidKey)?; let vec = base64_url::decode(key_string).map_err(|_| NgError::InvalidKey)?;
Ok(*slice_as_array!(&vec, [u8; 32]).ok_or(NgError::InvalidKey)?) Ok(serde_bare::from_slice(&vec).map_err(|_| NgError::InvalidKey)?)
}
pub fn decode_priv_key(key_string: &str) -> Result<PrivKey, NgError> {
let vec = base64_url::decode(key_string).map_err(|_| NgError::InvalidKey)?;
Ok(serde_bare::from_slice(&vec).map_err(|_| NgError::InvalidKey)?)
} }
pub fn ed_privkey_to_ed_pubkey(privkey: &PrivKey) -> PubKey { pub fn ed_privkey_to_ed_pubkey(privkey: &PrivKey) -> PubKey {

@ -23,6 +23,7 @@ export function client_details2(obj,version) {
export function session_save(key,value) { export function session_save(key,value) {
try { try {
sessionStorage.setItem(key, value); sessionStorage.setItem(key, value);
} catch(e) { } catch(e) {
@ -81,6 +82,16 @@ export function local_save(key,value) {
} }
} }
export function storage_clear() {
try {
localStorage.clear();
sessionStorage.clear();
} catch(e) {
console.error(e);
}
}
export function local_get(key) { export function local_get(key) {
try { try {

@ -150,6 +150,10 @@ module.exports.session_save = function(key,value) {
} }
module.exports.storage_clear = function() {
}
module.exports.session_get = function(key) { module.exports.session_get = function(key) {
} }

@ -31,6 +31,7 @@ use ng_wallet::types::*;
use ng_wallet::*; use ng_wallet::*;
use nextgraph::local_broker::*; use nextgraph::local_broker::*;
use nextgraph::verifier::types::*;
use ng_net::WS_PORT; use ng_net::WS_PORT;
use ng_repo::errors::NgError; use ng_repo::errors::NgError;
use ng_repo::log::*; use ng_repo::log::*;
@ -228,6 +229,7 @@ extern "C" {
fn local_save(key: String, value: String) -> Option<String>; fn local_save(key: String, value: String) -> Option<String>;
fn local_get(key: String) -> Option<String>; fn local_get(key: String) -> Option<String>;
fn is_browser() -> bool; fn is_browser() -> bool;
fn storage_clear();
} }
#[cfg(wasmpack_target = "nodejs")] #[cfg(wasmpack_target = "nodejs")]
@ -239,6 +241,7 @@ extern "C" {
fn local_save(key: String, value: String) -> Option<String>; fn local_save(key: String, value: String) -> Option<String>;
fn local_get(key: String) -> Option<String>; fn local_get(key: String) -> Option<String>;
fn is_browser() -> bool; fn is_browser() -> bool;
fn storage_clear();
} }
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
@ -273,6 +276,11 @@ fn session_del(key: String) -> Result<(), NgError> {
Ok(()) Ok(())
} }
#[cfg(target_arch = "wasm32")]
fn clear() {
storage_clear();
}
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
static INIT_LOCAL_BROKER: Lazy<Box<ConfigInitFn>> = Lazy::new(|| { static INIT_LOCAL_BROKER: Lazy<Box<ConfigInitFn>> = Lazy::new(|| {
Box::new(|| { Box::new(|| {
@ -282,6 +290,7 @@ static INIT_LOCAL_BROKER: Lazy<Box<ConfigInitFn>> = Lazy::new(|| {
session_read: Arc::new(Box::new(session_read)), session_read: Arc::new(Box::new(session_read)),
session_write: Arc::new(Box::new(session_write)), session_write: Arc::new(Box::new(session_write)),
session_del: Arc::new(Box::new(session_del)), session_del: Arc::new(Box::new(session_del)),
clear: Arc::new(Box::new(clear)),
is_browser: is_browser(), is_browser: is_browser(),
}) })
}) })
@ -482,53 +491,35 @@ pub async fn test() {
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn doc_get_file_from_store_with_object_ref( pub async fn app_request_stream(
nuri: String, js_session_id: JsValue,
obj_ref_js: JsValue, js_request: JsValue,
) -> Result<JsValue, JsValue> { callback: &js_sys::Function,
let obj_ref = serde_wasm_bindgen::from_value::<ObjectRef>(obj_ref_js).unwrap(); ) -> Result<JsValue, String> {
let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(js_session_id)
log_debug!("doc_get_file {} {:?}", nuri, obj_ref.id,); .map_err(|_| "Deserialization error of session_id".to_string())?;
// let vec: Vec<u8> = vec![2; 10];
// let view = unsafe { Uint8Array::view(&vec) };
// let x = JsValue::from(Uint8Array::new(view.as_ref()));
// let ret = ObjectContent::File(File::V0(FileV0 { let mut request = serde_wasm_bindgen::from_value::<AppRequest>(js_request)
// content_type: "text/plain".to_string(), .map_err(|_| "Deserialization error of AppRequest".to_string())?;
// metadata: vec![],
// content: vec![45; 20],
// }));
let obj_content = BROKER
.write()
.await
.get_object_from_store_with_object_ref(nuri, obj_ref)
.await
.map_err(|e| e.to_string())?;
Ok(serde_wasm_bindgen::to_value(&obj_content).unwrap())
}
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen]
pub async fn doc_sync_branch(anuri: String, callback: &js_sys::Function) -> JsValue {
let vec: Vec<u8> = vec![2; 10]; let vec: Vec<u8> = vec![2; 10];
let view = unsafe { Uint8Array::view(&vec) }; let view = unsafe { Uint8Array::view(&vec) };
let x = JsValue::from(Uint8Array::new(view.as_ref())); let x = JsValue::from(Uint8Array::new(view.as_ref()));
let mut reader; let mut reader;
let mut sender; let mut cancel;
{ {
(reader, sender) = BROKER.write().await.doc_sync_branch(anuri.clone()).await; (reader, cancel) = nextgraph::local_broker::app_request_stream(session_id, request)
.await
.map_err(|e: NgError| e.to_string())?;
} }
async fn inner_task( async fn inner_task(
mut reader: Receiver<Commit>, mut reader: Receiver<AppResponse>,
anuri: String,
callback: js_sys::Function, callback: js_sys::Function,
) -> ResultSend<()> { ) -> ResultSend<()> {
while let Some(commit) = reader.next().await { while let Some(app_response) = reader.next().await {
let xx = serde_wasm_bindgen::to_value(&commit).unwrap(); let xx = serde_wasm_bindgen::to_value(&app_response).unwrap();
//let xx = JsValue::from(json!(commit).to_string()); //let xx = JsValue::from(json!(commit).to_string());
//let _ = callback.call1(&this, &xx); //let _ = callback.call1(&this, &xx);
let this = JsValue::null(); let this = JsValue::null();
@ -545,18 +536,86 @@ pub async fn doc_sync_branch(anuri: String, callback: &js_sys::Function) -> JsVa
Ok(()) Ok(())
} }
spawn_and_log_error(inner_task(reader, anuri, callback.clone())); spawn_and_log_error(inner_task(reader, callback.clone()));
let cb = Closure::once(move || { let cb = Closure::once(move || {
log_debug!("close channel"); log_info!("cancelling");
sender.close_channel() //sender.close_channel()
cancel();
}); });
//Closure::wrap(Box::new(move |sender| sender.close_channel()) as Box<FnMut(Sender<Commit>)>); //Closure::wrap(Box::new(move |sender| sender.close_channel()) as Box<FnMut(Sender<Commit>)>);
let ret = cb.as_ref().clone(); let ret = cb.as_ref().clone();
cb.forget(); cb.forget();
return ret; Ok(ret)
}
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen]
pub async fn app_request(js_session_id: JsValue, js_request: JsValue) -> Result<JsValue, String> {
let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(js_session_id)
.map_err(|_| "Deserialization error of session_id".to_string())?;
let mut request = serde_wasm_bindgen::from_value::<AppRequest>(js_request)
.map_err(|_| "Deserialization error of AppRequest".to_string())?;
let response = nextgraph::local_broker::app_request(session_id, request)
.await
.map_err(|e: NgError| e.to_string())?;
Ok(serde_wasm_bindgen::to_value(&response).unwrap())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen]
pub async fn upload_chunk(
js_session_id: JsValue,
js_upload_id: JsValue,
js_chunk: JsValue,
js_nuri: JsValue,
) -> Result<JsValue, String> {
log_debug!("upload_chunk {:?}", js_nuri);
let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(js_session_id)
.map_err(|_| "Deserialization error of session_id".to_string())?;
let upload_id: u32 = serde_wasm_bindgen::from_value::<u32>(js_upload_id)
.map_err(|_| "Deserialization error of upload_id".to_string())?;
let chunk: serde_bytes::ByteBuf =
serde_wasm_bindgen::from_value::<serde_bytes::ByteBuf>(js_chunk)
.map_err(|_| "Deserialization error of chunk".to_string())?;
let nuri: NuriV0 = serde_wasm_bindgen::from_value::<NuriV0>(js_nuri)
.map_err(|_| "Deserialization error of nuri".to_string())?;
let request = AppRequest::V0(AppRequestV0 {
command: AppRequestCommandV0::FilePut,
nuri,
payload: Some(AppRequestPayload::V0(
AppRequestPayloadV0::RandomAccessFilePutChunk((upload_id, chunk)),
)),
});
let response = nextgraph::local_broker::app_request(session_id, request)
.await
.map_err(|e: NgError| e.to_string())?;
Ok(serde_wasm_bindgen::to_value(&response).unwrap())
}
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen]
pub async fn doc_fetch_private_subscribe() -> Result<JsValue, String> {
let request = AppRequest::V0(AppRequestV0 {
command: AppRequestCommandV0::Fetch(AppFetchContentV0::get_or_subscribe(true)),
nuri: NuriV0::new_private_store_target(),
payload: None,
});
Ok(serde_wasm_bindgen::to_value(&request).unwrap())
}
// #[cfg(target_arch = "wasm32")]
// #[wasm_bindgen]
// pub async fn get_readcap() -> Result<JsValue, String> {
// let request = ObjectRef::nil();
// Ok(serde_wasm_bindgen::to_value(&request).unwrap())
// }
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn disconnections_subscribe(callback: &js_sys::Function) -> Result<JsValue, JsValue> { pub async fn disconnections_subscribe(callback: &js_sys::Function) -> Result<JsValue, JsValue> {

@ -117,6 +117,15 @@ impl BlockStorage for RocksDbBlockStorage {
Ok(block) Ok(block)
} }
fn has(&self, overlay: &OverlayId, id: &BlockId) -> Result<(), StorageError> {
let _block_ser = self
.db
.get(Self::compute_key(overlay, id))
.map_err(|_e| StorageError::BackendError)?
.ok_or(StorageError::NotFound)?;
Ok(())
}
/// Save a block to the storage. /// Save a block to the storage.
fn put(&self, overlay: &OverlayId, block: &Block, lazy: bool) -> Result<BlockId, StorageError> { fn put(&self, overlay: &OverlayId, block: &Block, lazy: bool) -> Result<BlockId, StorageError> {
// TODO? return an error if already present in blockstorage and !lazy ? // TODO? return an error if already present in blockstorage and !lazy ?

@ -35,6 +35,7 @@ rand = { version = "0.7", features = ["getrandom"] }
web-time = "0.2.0" web-time = "0.2.0"
either = "1.8.1" either = "1.8.1"
futures = "0.3.24" futures = "0.3.24"
async-trait = "0.1.64"
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dependencies]
ng-storage-rocksdb = { path = "../ng-storage-rocksdb", version = "0.1.0" } ng-storage-rocksdb = { path = "../ng-storage-rocksdb", version = "0.1.0" }

@ -9,6 +9,7 @@
//! Verifiers for each Commit type //! Verifiers for each Commit type
use crate::types::*;
use crate::verifier::Verifier; use crate::verifier::Verifier;
use ng_repo::errors::VerifierError; use ng_repo::errors::VerifierError;
use ng_repo::log::*; use ng_repo::log::*;
@ -19,8 +20,9 @@ use ng_repo::types::*;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
#[async_trait::async_trait]
pub trait CommitVerifier { pub trait CommitVerifier {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -55,8 +57,9 @@ fn list_dep_chain_until(
Ok(res) Ok(res)
} }
#[async_trait::async_trait]
impl CommitVerifier for RootBranch { impl CommitVerifier for RootBranch {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -126,9 +129,9 @@ impl CommitVerifier for RootBranch {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for Branch { impl CommitVerifier for Branch {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -181,9 +184,9 @@ impl CommitVerifier for Branch {
} }
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for SyncSignature { impl CommitVerifier for SyncSignature {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -212,16 +215,18 @@ impl CommitVerifier for SyncSignature {
} }
let commits = list_dep_chain_until(deps[0].clone(), &ack.id, &store)?; let commits = list_dep_chain_until(deps[0].clone(), &ack.id, &store)?;
for commit in commits { for commit in commits {
verifier.verify_commit(commit, branch_id, repo_id, Arc::clone(&store))?; verifier
.verify_commit(&commit, branch_id, repo_id, Arc::clone(&store))
.await?;
} }
} }
} }
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for AddBranch { impl CommitVerifier for AddBranch {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -258,9 +263,9 @@ impl CommitVerifier for AddBranch {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for Repository { impl CommitVerifier for Repository {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -272,9 +277,9 @@ impl CommitVerifier for Repository {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for StoreUpdate { impl CommitVerifier for StoreUpdate {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -285,9 +290,9 @@ impl CommitVerifier for StoreUpdate {
verifier.new_store_from_update(self) verifier.new_store_from_update(self)
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for AddSignerCap { impl CommitVerifier for AddSignerCap {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -300,9 +305,9 @@ impl CommitVerifier for AddSignerCap {
} }
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for AddMember { impl CommitVerifier for AddMember {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -313,9 +318,9 @@ impl CommitVerifier for AddMember {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for RemoveMember { impl CommitVerifier for RemoveMember {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -326,9 +331,9 @@ impl CommitVerifier for RemoveMember {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for AddPermission { impl CommitVerifier for AddPermission {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -339,9 +344,9 @@ impl CommitVerifier for AddPermission {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for RemovePermission { impl CommitVerifier for RemovePermission {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -352,9 +357,9 @@ impl CommitVerifier for RemovePermission {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for RemoveBranch { impl CommitVerifier for RemoveBranch {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -365,9 +370,9 @@ impl CommitVerifier for RemoveBranch {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for AddName { impl CommitVerifier for AddName {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -378,9 +383,9 @@ impl CommitVerifier for AddName {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for RemoveName { impl CommitVerifier for RemoveName {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -391,9 +396,9 @@ impl CommitVerifier for RemoveName {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for () { impl CommitVerifier for () {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -404,9 +409,9 @@ impl CommitVerifier for () {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for Snapshot { impl CommitVerifier for Snapshot {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -417,9 +422,9 @@ impl CommitVerifier for Snapshot {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for AddFile { impl CommitVerifier for AddFile {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -427,12 +432,33 @@ impl CommitVerifier for AddFile {
repo_id: &RepoId, repo_id: &RepoId,
store: Arc<Store>, store: Arc<Store>,
) -> Result<(), VerifierError> { ) -> Result<(), VerifierError> {
let files = commit.files();
if files.len() == 1 {
let refe = commit.files().remove(0);
let filename = FileName {
heads: vec![], //TODO: put the current heads
name: self.name().clone(),
nuri: refe.nuri(),
reference: refe,
};
verifier
.user_storage
.as_ref()
.unwrap()
.branch_add_file(*branch_id, filename.clone())?;
verifier
.push_app_response(branch_id, AppResponse::V0(AppResponseV0::File(filename)))
.await;
Ok(()) Ok(())
} else {
Err(VerifierError::InvalidCommit)
}
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for RemoveFile { impl CommitVerifier for RemoveFile {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -443,9 +469,9 @@ impl CommitVerifier for RemoveFile {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for Compact { impl CommitVerifier for Compact {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -456,9 +482,9 @@ impl CommitVerifier for Compact {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for AsyncSignature { impl CommitVerifier for AsyncSignature {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -469,9 +495,9 @@ impl CommitVerifier for AsyncSignature {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for RootCapRefresh { impl CommitVerifier for RootCapRefresh {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -482,9 +508,9 @@ impl CommitVerifier for RootCapRefresh {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for BranchCapRefresh { impl CommitVerifier for BranchCapRefresh {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -495,9 +521,9 @@ impl CommitVerifier for BranchCapRefresh {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for AddRepo { impl CommitVerifier for AddRepo {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -508,9 +534,9 @@ impl CommitVerifier for AddRepo {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for RemoveRepo { impl CommitVerifier for RemoveRepo {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -521,9 +547,9 @@ impl CommitVerifier for RemoveRepo {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for AddLink { impl CommitVerifier for AddLink {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -534,9 +560,9 @@ impl CommitVerifier for AddLink {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for RemoveLink { impl CommitVerifier for RemoveLink {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -547,9 +573,9 @@ impl CommitVerifier for RemoveLink {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for RemoveSignerCap { impl CommitVerifier for RemoveSignerCap {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,
@ -560,9 +586,9 @@ impl CommitVerifier for RemoveSignerCap {
Ok(()) Ok(())
} }
} }
#[async_trait::async_trait]
impl CommitVerifier for WalletUpdate { impl CommitVerifier for WalletUpdate {
fn verify( async fn verify(
&self, &self,
commit: &Commit, commit: &Commit,
verifier: &mut Verifier, verifier: &mut Verifier,

@ -8,5 +8,7 @@ pub mod site;
pub mod commits; pub mod commits;
pub mod request_processor;
#[cfg(not(target_family = "wasm"))] #[cfg(not(target_family = "wasm"))]
pub mod rocksdb_user_storage; pub mod rocksdb_user_storage;

@ -0,0 +1,220 @@
// Copyright (c) 2022-2024 Niko Bonnieure, Par le Peuple, NextGraph.org developers
// All rights reserved.
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE2 or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
//! Processor for each type of AppRequest
use futures::channel::mpsc;
use futures::SinkExt;
use futures::StreamExt;
use ng_net::utils::ResultSend;
use std::sync::Arc;
use crate::types::*;
use crate::verifier::*;
use ng_net::utils::{spawn_and_log_error, Receiver, Sender};
use ng_repo::errors::*;
use ng_repo::file::{RandomAccessFile, ReadFile};
use ng_repo::types::BranchId;
use ng_repo::types::*;
use ng_repo::log::*;
use ng_repo::types::StoreRepo;
impl AppRequestCommandV0 {
pub(crate) async fn process_stream(
&self,
verifier: &mut Verifier,
nuri: &NuriV0,
payload: &Option<AppRequestPayload>,
) -> Result<(Receiver<AppResponse>, CancelFn), NgError> {
match self {
Self::Fetch(fetch) => match fetch {
AppFetchContentV0::Subscribe => {
let (_, branch_id, _) =
Self::open_for_target(verifier, &nuri.target, false).await?;
Ok(verifier.create_branch_subscription(branch_id).await?)
}
_ => unimplemented!(),
},
Self::FileGet => {
if nuri.access.len() < 1 || nuri.object.is_none() {
return Err(NgError::InvalidArgument);
}
let (repo_id, _, store_repo) = Self::resolve_target(verifier, &nuri.target)?;
let access = nuri.access.get(0).unwrap();
if let NgAccessV0::Key(key) = access {
let repo = verifier.get_repo(&repo_id, &store_repo)?;
let obj_id = nuri.object.unwrap();
if let Some(mut stream) = verifier
.fetch_blocks_if_needed(&obj_id, &repo_id, &store_repo)
.await?
{
// TODO: start opening the file and running the sending_loop after we received 10 (3 mandatory and 7 depths max) blocks.
// for files below 10MB we wont see a difference, but for big files, we can start sending out some AppResponse earlier.
while let Some(block) = stream.next().await {
repo.store.put(&block)?;
}
}
let file =
RandomAccessFile::open(obj_id, key.clone(), Arc::clone(&repo.store))?;
let (mut tx, rx) = mpsc::unbounded::<AppResponse>();
tx.send(AppResponse::V0(AppResponseV0::FileMeta(FileMetaV0 {
content_type: file.meta().content_type().clone(),
size: file.meta().total_size(),
})))
.await
.map_err(|_| NgError::InternalError)?;
async fn sending_loop(
file: Arc<RandomAccessFile>,
mut tx: Sender<AppResponse>,
) -> ResultSend<()> {
let mut pos = 0;
loop {
let res = file.read(pos, 1048564);
if res.is_err() {
//log_info!("ERR={:?}", res.unwrap_err());
let _ = tx
.send(AppResponse::V0(AppResponseV0::FileBinary(vec![])))
.await;
break;
}
let res = res.unwrap();
//log_info!("reading={} {}", pos, res.len());
pos += res.len();
if let Err(_) = tx
.send(AppResponse::V0(AppResponseV0::FileBinary(res)))
.await
{
break;
}
}
Ok(())
}
spawn_and_log_error(sending_loop(Arc::new(file), tx.clone()));
let fnonce = Box::new(move || {
tx.close_channel();
});
Ok((rx, fnonce))
} else {
return Err(NgError::InvalidArgument);
}
}
_ => unimplemented!(),
}
}
fn resolve_target(
verifier: &mut Verifier,
target: &NuriTargetV0,
) -> Result<(RepoId, BranchId, StoreRepo), NgError> {
match target {
NuriTargetV0::PrivateStore => {
let repo_id = verifier.config.private_store_id.unwrap();
let (branch, store_repo) = {
let repo = verifier.repos.get(&repo_id).ok_or(NgError::RepoNotFound)?;
let branch = repo.main_branch().ok_or(NgError::BranchNotFound)?;
(branch.id, repo.store.get_store_repo().clone())
};
Ok((repo_id, branch, store_repo))
}
_ => unimplemented!(),
}
}
async fn open_for_target(
verifier: &mut Verifier,
target: &NuriTargetV0,
as_publisher: bool,
) -> Result<(RepoId, BranchId, StoreRepo), NgError> {
let (repo_id, branch, store_repo) = Self::resolve_target(verifier, target)?;
verifier
.open_branch(&repo_id, &branch, as_publisher)
.await?;
Ok((repo_id, branch, store_repo))
}
pub(crate) async fn process(
&self,
verifier: &mut Verifier,
nuri: NuriV0,
payload: Option<AppRequestPayload>,
) -> Result<AppResponse, NgError> {
match self {
Self::FilePut => match payload {
None => return Err(NgError::InvalidPayload),
Some(AppRequestPayload::V0(v0)) => match v0 {
AppRequestPayloadV0::AddFile(add) => {
let (repo_id, branch, store_repo) =
Self::open_for_target(verifier, &nuri.target, true).await?;
//log_info!("GOT ADD FILE {:?}", add);
let repo = verifier.get_repo(&repo_id, &store_repo)?;
// check that the referenced object exists locally.
repo.store.has(&add.object.id)?;
// we send all the blocks to the broker.
let file = RandomAccessFile::open(
add.object.id.clone(),
add.object.key.clone(),
Arc::clone(&repo.store),
)?;
let blocks = file.get_all_blocks_ids()?;
let found = verifier.has_blocks(blocks, repo).await?;
for block_id in found.missing() {
let block = repo.store.get(block_id)?;
verifier.put_blocks(vec![block], repo).await?;
}
let add_file_commit_body = CommitBodyV0::AddFile(AddFile::V0(AddFileV0 {
name: add.filename,
metadata: vec![],
}));
verifier
.new_commit(
add_file_commit_body,
&repo_id,
&branch,
&store_repo,
&vec![],
vec![],
vec![add.object],
)
.await?;
}
AppRequestPayloadV0::SmallFilePut(small) => {}
AppRequestPayloadV0::RandomAccessFilePut(content_type) => {
let (repo_id, _, store_repo) =
Self::resolve_target(verifier, &nuri.target)?;
let repo = verifier.get_repo(&repo_id, &store_repo)?;
let id = verifier.start_upload(content_type, Arc::clone(&repo.store));
return Ok(AppResponse::V0(AppResponseV0::FileUploading(id)));
}
AppRequestPayloadV0::RandomAccessFilePutChunk((id, chunk)) => {
if chunk.len() > 0 {
verifier.continue_upload(id, &chunk)?;
} else {
let reference = verifier.finish_upload(id)?;
return Ok(AppResponse::V0(AppResponseV0::FileUploaded(reference)));
}
}
_ => return Err(NgError::InvalidPayload),
},
},
_ => unimplemented!(),
}
Ok(AppResponse::V0(AppResponseV0::Ok))
}
}

@ -49,11 +49,11 @@ impl UserStorage for RocksDbUserStorage {
fn load_store( fn load_store(
&self, &self,
repo_store: &StoreRepo, store_repo: &StoreRepo,
block_storage: Arc<RwLock<dyn BlockStorage + Send + Sync>>, block_storage: Arc<RwLock<dyn BlockStorage + Send + Sync>>,
) -> Result<Repo, StorageError> { ) -> Result<Repo, StorageError> {
RepoStorage::load( RepoStorage::load(
repo_store.repo_id(), store_repo.repo_id(),
Right(block_storage), Right(block_storage),
&self.user_storage, &self.user_storage,
) )
@ -75,4 +75,11 @@ impl UserStorage for RocksDbUserStorage {
fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError> { fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError> {
RepoStorage::update_signer_cap(signer_cap, &self.user_storage) RepoStorage::update_signer_cap(signer_cap, &self.user_storage)
} }
fn branch_add_file(&self, branch: BranchId, file: FileName) -> Result<(), StorageError> {
todo!();
}
fn branch_get_all_files(&self, branch: &BranchId) -> Result<Vec<FileName>, StorageError> {
todo!();
}
} }

@ -50,6 +50,13 @@ impl SiteV0 {
} }
} }
pub fn get_individual_site_private_store_read_cap(&self) -> Option<ReadCap> {
match &self.site_type {
SiteType::Individual((_, read_cap)) => Some(read_cap.clone()),
_ => None,
}
}
fn site_store_to_store_repo(site_store: &SiteStore) -> StoreRepo { fn site_store_to_store_repo(site_store: &SiteStore) -> StoreRepo {
StoreRepo::V0(match site_store.store_type { StoreRepo::V0(match site_store.store_type {
SiteStoreType::Public => StoreRepoV0::PublicStore(site_store.id), SiteStoreType::Public => StoreRepoV0::PublicStore(site_store.id),
@ -237,7 +244,12 @@ impl SiteV0 {
user_priv_key: PrivKey, user_priv_key: PrivKey,
verifier: &mut Verifier, verifier: &mut Verifier,
) -> Result<Self, NgError> { ) -> Result<Self, NgError> {
Self::create_individual_(user_priv_key, verifier, SiteName::Personal).await let site = Self::create_individual_(user_priv_key, verifier, SiteName::Personal).await?;
verifier.config.private_store_read_cap = site.get_individual_site_private_store_read_cap();
verifier.config.private_store_id = Some(site.private.id);
verifier.config.protected_store_id = Some(site.protected.id);
verifier.config.public_store_id = Some(site.public.id);
Ok(site)
} }
pub async fn create_org(name: String) -> Result<Self, NgError> { pub async fn create_org(name: String) -> Result<Self, NgError> {

@ -150,9 +150,11 @@ pub struct VerifierConfig {
pub user_priv_key: PrivKey, pub user_priv_key: PrivKey,
pub private_store_read_cap: Option<ObjectRef>, pub private_store_read_cap: Option<ObjectRef>,
pub private_store_id: Option<RepoId>, pub private_store_id: Option<RepoId>,
pub public_store_id: Option<RepoId>,
pub protected_store_id: Option<RepoId>,
} }
pub type CancelFn = Box<dyn FnOnce()>; pub type CancelFn = Box<dyn FnOnce() + Sync + Send>;
// //
// APP PROTOCOL (between APP and VERIFIER) // APP PROTOCOL (between APP and VERIFIER)
@ -160,29 +162,97 @@ pub type CancelFn = Box<dyn FnOnce()>;
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub enum AppFetchContentV0 { pub enum AppFetchContentV0 {
Get, // more to be detailed Get, // does not subscribe. more to be detailed
Subscribe, Subscribe, // more to be detailed
Update, Update,
//Invoke,
ReadQuery, // more to be detailed ReadQuery, // more to be detailed
WriteQuery, // more to be detailed WriteQuery, // more to be detailed
//Invoke, }
impl AppFetchContentV0 {
pub fn get_or_subscribe(subscribe: bool) -> Self {
if subscribe {
AppFetchContentV0::Subscribe
} else {
AppFetchContentV0::Get
}
}
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AppFetchV0 { pub enum NgAccessV0 {
pub doc_id: RepoId, ReadCap(ReadCap),
Token(Digest),
#[serde(with = "serde_bytes")]
ExtRequest(Vec<u8>),
Key(BlockKey),
Inbox(Digest),
}
pub branch_id: Option<BranchId>, #[derive(Clone, Debug, Serialize, Deserialize)]
pub enum TargetBranchV0 {
Chat,
Stream,
Context,
Ontology,
BranchId(BranchId),
Named(String), // branch or commit
Commits(Vec<ObjectId>), // only possible if access to their branch is given. must belong to the same branch.
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NuriTargetV0 {
UserSite, // targets the whole data set of the user
pub store: StoreRepo, PublicStore,
ProtectedStore,
PrivateStore,
AllDialogs,
Dialog(String), // shortname of a Dialog
AllGroups,
Group(String), // shortname of a Group
pub content: AppFetchContentV0, Repo(RepoId),
Identity(UserId),
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub enum AppRequestContentV0 { pub struct NuriV0 {
FetchNuri, pub target: NuriTargetV0,
Fetch(AppFetchV0), pub entire_store: bool, // If it is a store, will (try to) include all the docs belonging to the store
pub object: Option<ObjectId>, // used only for FileGet. // cannot be used for queries. only to download an object (file,commit..)
pub branch: Option<TargetBranchV0>, // if None, the main branch is chosen
pub overlay: Option<OverlayLink>,
pub access: Vec<NgAccessV0>,
pub topic: Option<TopicId>,
pub locator: Vec<PeerAdvert>,
}
impl NuriV0 {
pub fn new_private_store_target() -> Self {
Self {
target: NuriTargetV0::PrivateStore,
entire_store: false,
object: None,
branch: None,
overlay: None,
access: vec![],
topic: None,
locator: vec![],
}
}
pub fn new(from: String) -> Self {
unimplemented!();
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum AppRequestCommandV0 {
Fetch(AppFetchContentV0),
Pin, Pin,
UnPin, UnPin,
Delete, Delete,
@ -193,9 +263,9 @@ pub enum AppRequestContentV0 {
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AppRequestV0 { pub struct AppRequestV0 {
pub nuri: Option<String>, pub command: AppRequestCommandV0,
pub content: AppRequestContentV0, pub nuri: NuriV0,
pub payload: Option<AppRequestPayload>, pub payload: Option<AppRequestPayload>,
} }
@ -237,6 +307,12 @@ pub struct DocUpdate {
discrete: Option<DiscreteUpdate>, discrete: Option<DiscreteUpdate>,
} }
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct DocAddFile {
pub filename: Option<String>,
pub object: ObjectRef,
}
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct DocCreate { pub struct DocCreate {
store: StoreRepo, store: StoreRepo,
@ -254,11 +330,13 @@ pub enum AppRequestPayloadV0 {
Create(DocCreate), Create(DocCreate),
Query(DocQuery), Query(DocQuery),
Update(DocUpdate), Update(DocUpdate),
AddFile(DocAddFile),
//RemoveFile
Delete(DocDelete), Delete(DocDelete),
//Invoke(InvokeArguments), //Invoke(InvokeArguments),
SmallFilePut(SmallFile), SmallFilePut(SmallFile),
RandomAccessFilePut(String), // content_type RandomAccessFilePut(String), // content_type
RandomAccessFilePutChunk((ObjectId, serde_bytes::ByteBuf)), // end the upload with an empty vec RandomAccessFilePutChunk((u32, serde_bytes::ByteBuf)), // end the upload with an empty vec
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
@ -321,8 +399,16 @@ pub struct AppPatch {
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct FileName { pub struct FileName {
name: Option<String>, pub heads: Vec<ObjectId>,
reference: ObjectRef, pub name: Option<String>,
pub reference: ObjectRef,
pub nuri: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct FileMetaV0 {
pub content_type: String,
pub size: u64,
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
@ -331,9 +417,13 @@ pub enum AppResponseV0 {
Patch(AppPatch), Patch(AppPatch),
Text(String), Text(String),
File(FileName), File(FileName),
FileUploading(u32),
FileUploaded(ObjectRef),
#[serde(with = "serde_bytes")] #[serde(with = "serde_bytes")]
FileBinary(Vec<u8>), FileBinary(Vec<u8>),
FileMeta(FileMetaV0),
QueryResult, // see sparesults QueryResult, // see sparesults
Ok,
} }
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]

@ -32,7 +32,7 @@ pub trait UserStorage: Send + Sync {
fn load_store( fn load_store(
&self, &self,
repo_store: &StoreRepo, store_repo: &StoreRepo,
block_storage: Arc<RwLock<dyn BlockStorage + Send + Sync>>, block_storage: Arc<RwLock<dyn BlockStorage + Send + Sync>>,
) -> Result<Repo, StorageError>; ) -> Result<Repo, StorageError>;
@ -43,53 +43,65 @@ pub trait UserStorage: Send + Sync {
fn add_branch(&self, repo_id: &RepoId, branch_info: &BranchInfo) -> Result<(), StorageError>; fn add_branch(&self, repo_id: &RepoId, branch_info: &BranchInfo) -> Result<(), StorageError>;
fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError>; fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError>;
fn branch_add_file(&self, branch: BranchId, file: FileName) -> Result<(), StorageError>;
fn branch_get_all_files(&self, branch: &BranchId) -> Result<Vec<FileName>, StorageError>;
}
pub(crate) struct InMemoryUserStorage {
branch_files: RwLock<HashMap<BranchId, Vec<FileName>>>,
}
impl InMemoryUserStorage {
pub fn new() -> Self {
InMemoryUserStorage {
branch_files: RwLock::new(HashMap::new()),
}
}
} }
// pub(crate) struct InMemoryUserStorage { impl UserStorage for InMemoryUserStorage {
// repo_id_to_store_overlay: HashMap<RepoId, StoreOverlay>, fn branch_add_file(&self, branch: BranchId, file: FileName) -> Result<(), StorageError> {
// } let mut lock = self.branch_files.write().unwrap();
let file_list = lock.entry(branch).or_insert_with(|| Vec::with_capacity(1));
// impl InMemoryUserStorage { file_list.push(file);
// pub fn new() -> Self { Ok(())
// InMemoryUserStorage { }
// repo_id_to_store_overlay: HashMap::new(),
// } fn branch_get_all_files(&self, branch: &BranchId) -> Result<Vec<FileName>, StorageError> {
// } let lock = self.branch_files.read().unwrap();
// } if let Some(file_list) = lock.get(&branch) {
Ok(file_list.to_vec())
// impl UserStorage for InMemoryUserStorage { } else {
// fn repo_id_to_store_overlay(&self, id: &RepoId) -> Result<StoreOverlay, StorageError> { Ok(vec![])
// Ok(self }
// .repo_id_to_store_overlay }
// .get(&id)
// .ok_or(StorageError::NotFound)? fn get_all_store_and_repo_ids(&self) -> Result<HashMap<StoreRepo, Vec<RepoId>>, StorageError> {
// .to_owned()) unimplemented!();
// } }
// fn get_all_store_and_repo_ids(&self) -> Result<HashMap<StoreRepo, Vec<RepoId>>, StorageError> { fn load_store(
// unimplemented!(); &self,
// } store_repo: &StoreRepo,
block_storage: Arc<RwLock<dyn BlockStorage + Send + Sync>>,
// fn load_store( ) -> Result<Repo, StorageError> {
// &self, unimplemented!();
// repo_store: &StoreRepo, }
// block_storage: Arc<RwLock<dyn BlockStorage + Send + Sync>>, fn load_repo(&self, repo_id: &RepoId, store: Arc<Store>) -> Result<Repo, StorageError> {
// ) -> Result<Repo, StorageError> { unimplemented!();
// unimplemented!(); }
// }
// fn load_repo(&self, repo_id: &RepoId, store: Arc<Store>) -> Result<Repo, StorageError> { fn save_repo(&self, repo: &Repo) -> Result<(), StorageError> {
// unimplemented!(); unimplemented!();
// } }
// fn save_repo(&self, repo: &Repo) -> Result<(), StorageError> { fn add_branch(&self, repo_id: &RepoId, branch_info: &BranchInfo) -> Result<(), StorageError> {
// unimplemented!(); unimplemented!();
// } }
// fn add_branch(&self, repo_id: &RepoId, branch_info: &BranchInfo) -> Result<(), StorageError> { fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError> {
// unimplemented!(); unimplemented!();
// } }
}
// fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError> {
// unimplemented!();
// }
// }

@ -10,9 +10,13 @@
//! Repo object (on heap) to handle a Repository //! Repo object (on heap) to handle a Repository
use crate::commits::*; use crate::commits::*;
use crate::types::*; use crate::types::*;
use crate::user_storage::InMemoryUserStorage;
use async_std::stream::StreamExt; use async_std::stream::StreamExt;
use futures::channel::mpsc;
use futures::SinkExt;
use ng_net::actor::SoS; use ng_net::actor::SoS;
use ng_net::broker::{Broker, BROKER}; use ng_net::broker::{Broker, BROKER};
use ng_repo::block_storage::store_max_value_size;
use ng_repo::log::*; use ng_repo::log::*;
use ng_repo::object::Object; use ng_repo::object::Object;
use ng_repo::repo::BranchInfo; use ng_repo::repo::BranchInfo;
@ -26,6 +30,8 @@ use ng_repo::{
utils::{generate_keypair, sign}, utils::{generate_keypair, sign},
}; };
use std::cmp::max; use std::cmp::max;
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::fs::{create_dir_all, read, write, File, OpenOptions}; use std::fs::{create_dir_all, read, write, File, OpenOptions};
use std::io::Write; use std::io::Write;
@ -66,14 +72,15 @@ pub struct Verifier {
pub config: VerifierConfig, pub config: VerifierConfig,
pub connected_server_id: Option<PubKey>, pub connected_server_id: Option<PubKey>,
graph_dataset: Option<oxigraph::store::Store>, graph_dataset: Option<oxigraph::store::Store>,
user_storage: Option<Arc<Box<dyn UserStorage>>>, pub(crate) user_storage: Option<Arc<Box<dyn UserStorage>>>,
block_storage: Option<Arc<std::sync::RwLock<dyn BlockStorage + Send + Sync>>>, block_storage: Option<Arc<std::sync::RwLock<dyn BlockStorage + Send + Sync>>>,
last_seq_num: u64, last_seq_num: u64,
peer_id: PubKey, peer_id: PubKey,
max_reserved_seq_num: u64, max_reserved_seq_num: u64,
last_reservation: SystemTime, last_reservation: SystemTime,
stores: HashMap<OverlayId, Arc<Store>>, stores: HashMap<OverlayId, Arc<Store>>,
repos: HashMap<RepoId, Repo>, inner_to_outer: HashMap<OverlayId, OverlayId>,
pub(crate) repos: HashMap<RepoId, Repo>,
// TODO: deal with collided repo_ids. self.repos should be a HashMap<RepoId,Collision> enum Collision {Yes, No(Repo)} // TODO: deal with collided repo_ids. self.repos should be a HashMap<RepoId,Collision> enum Collision {Yes, No(Repo)}
// add a collided_repos: HashMap<(OverlayId, RepoId), Repo> // add a collided_repos: HashMap<(OverlayId, RepoId), Repo>
// only use get_repo() everywhere in the code (always passing the overlay) so that collisions can be handled. // only use get_repo() everywhere in the code (always passing the overlay) so that collisions can be handled.
@ -82,6 +89,8 @@ pub struct Verifier {
pub(crate) topics: HashMap<(OverlayId, TopicId), (RepoId, BranchId)>, pub(crate) topics: HashMap<(OverlayId, TopicId), (RepoId, BranchId)>,
/// only used for InMemory type, to store the outbox /// only used for InMemory type, to store the outbox
in_memory_outbox: Vec<EventOutboxStorage>, in_memory_outbox: Vec<EventOutboxStorage>,
uploads: BTreeMap<u32, RandomAccessFile>,
branch_subscriptions: HashMap<BranchId, Sender<AppResponse>>,
} }
impl fmt::Debug for Verifier { impl fmt::Debug for Verifier {
@ -104,10 +113,97 @@ impl Verifier {
&self.config.user_priv_key &self.config.user_priv_key
} }
pub(crate) fn start_upload(&mut self, content_type: String, store: Arc<Store>) -> u32 {
let mut first_available: u32 = 0;
for upload in self.uploads.keys() {
if *upload != first_available + 1 {
break;
} else {
first_available += 1;
}
}
first_available += 1;
let ret = self.uploads.insert(
first_available,
RandomAccessFile::new_empty(store_max_value_size(), content_type, vec![], store),
);
assert!(ret.is_none());
first_available
}
pub(crate) fn continue_upload(
&mut self,
upload_id: u32,
data: &Vec<u8>,
) -> Result<(), NgError> {
let file = self
.uploads
.get_mut(&upload_id)
.ok_or(NgError::WrongUploadId)?;
Ok(file.write(data)?)
}
pub(crate) fn finish_upload(&mut self, upload_id: u32) -> Result<ObjectRef, NgError> {
let mut file = self
.uploads
.remove(&upload_id)
.ok_or(NgError::WrongUploadId)?;
let id = file.save()?;
Ok(file.reference().unwrap())
}
pub(crate) async fn push_app_response(&mut self, branch: &BranchId, response: AppResponse) {
// log_info!(
// "push_app_response {} {:?}",
// branch,
// self.branch_subscriptions
// );
if let Some(sender) = self.branch_subscriptions.get_mut(branch) {
let _ = sender.send(response).await;
}
}
pub(crate) async fn create_branch_subscription(
&mut self,
branch: BranchId,
) -> Result<(Receiver<AppResponse>, CancelFn), VerifierError> {
// async fn send(mut tx: Sender<AppResponse>, msg: AppResponse) -> ResultSend<()> {
// while let Ok(_) = tx.send(msg.clone()).await {
// log_debug!("sending AppResponse");
// sleep!(std::time::Duration::from_secs(3));
// }
// log_debug!("end of sending");
// Ok(())
// }
// spawn_and_log_error(send(tx.clone(), commit));
//log_info!("#### create_branch_subscription {}", branch);
let (tx, rx) = mpsc::unbounded::<AppResponse>();
if let Some(returned) = self.branch_subscriptions.insert(branch, tx.clone()) {
if !returned.is_closed() {
return Err(VerifierError::DoubleBranchSubscription);
}
}
//let tx = self.branch_subscriptions.entry(branch).or_insert_with(|| {});
for file in self
.user_storage
.as_ref()
.unwrap()
.branch_get_all_files(&branch)?
{
self.push_app_response(&branch, AppResponse::V0(AppResponseV0::File(file)))
.await;
}
let fnonce = Box::new(move || {
tx.close_channel();
});
Ok((rx, fnonce))
}
#[allow(deprecated)] #[allow(deprecated)]
#[cfg(any(test, feature = "testing"))] #[cfg(any(test, feature = "testing"))]
pub fn new_dummy() -> Self { pub fn new_dummy() -> Self {
use ng_repo::block_storage::HashMapBlockStorage;
let (peer_priv_key, peer_id) = generate_keypair(); let (peer_priv_key, peer_id) = generate_keypair();
let block_storage = Arc::new(std::sync::RwLock::new(HashMapBlockStorage::new())) let block_storage = Arc::new(std::sync::RwLock::new(HashMapBlockStorage::new()))
as Arc<std::sync::RwLock<dyn BlockStorage + Send + Sync>>; as Arc<std::sync::RwLock<dyn BlockStorage + Send + Sync>>;
@ -119,6 +215,8 @@ impl Verifier {
user_priv_key: PrivKey::random_ed(), user_priv_key: PrivKey::random_ed(),
private_store_read_cap: None, private_store_read_cap: None,
private_store_id: None, private_store_id: None,
protected_store_id: None,
public_store_id: None,
}, },
connected_server_id: None, connected_server_id: None,
graph_dataset: None, graph_dataset: None,
@ -132,6 +230,9 @@ impl Verifier {
repos: HashMap::new(), repos: HashMap::new(),
topics: HashMap::new(), topics: HashMap::new(),
in_memory_outbox: vec![], in_memory_outbox: vec![],
inner_to_outer: HashMap::new(),
uploads: BTreeMap::new(),
branch_subscriptions: HashMap::new(),
} }
} }
@ -406,6 +507,65 @@ impl Verifier {
Ok(self.last_seq_num) Ok(self.last_seq_num)
} }
pub(crate) async fn new_commit(
&mut self,
commit_body: CommitBodyV0,
repo_id: &RepoId,
branch_id: &BranchId,
store_repo: &StoreRepo,
additional_blocks: &Vec<BlockId>,
deps: Vec<ObjectRef>,
files: Vec<ObjectRef>,
) -> Result<(), NgError> {
let commit = {
let repo = self.get_repo(repo_id, &store_repo)?;
let branch = repo.branch(branch_id)?;
let commit = Commit::new_with_body_and_save(
self.user_privkey(),
&self.user_privkey().to_pub(),
*branch_id,
QuorumType::NoSigning,
deps,
vec![],
branch.current_heads.clone(),
vec![],
files,
vec![],
vec![],
CommitBody::V0(commit_body),
0,
&repo.store,
)?;
self.verify_commit(&commit, branch_id, repo_id, Arc::clone(&repo.store))
.await?;
commit
};
//log_info!("{}", commit);
self.new_event(&commit, additional_blocks, *repo_id, store_repo)
.await
}
pub(crate) async fn new_commit_simple(
&mut self,
commit_body: CommitBodyV0,
repo_id: &RepoId,
branch_id: &BranchId,
store_repo: &StoreRepo,
additional_blocks: &Vec<BlockId>,
) -> Result<(), NgError> {
self.new_commit(
commit_body,
repo_id,
branch_id,
store_repo,
additional_blocks,
vec![],
vec![],
)
.await
}
pub(crate) async fn new_events_with_repo( pub(crate) async fn new_events_with_repo(
&mut self, &mut self,
events: Vec<(Commit, Vec<Digest>)>, events: Vec<(Commit, Vec<Digest>)>,
@ -530,7 +690,9 @@ impl Verifier {
} else { } else {
match &self.config.config_type { match &self.config.config_type {
VerifierConfigType::JsSaveSession(js) => { VerifierConfigType::JsSaveSession(js) => {
//log_info!("========== SAVING EVENT {:03}", event.seq_num());
let e = EventOutboxStorage { event, overlay }; let e = EventOutboxStorage { event, overlay };
(js.outbox_write_function)( (js.outbox_write_function)(
self.peer_id, self.peer_id,
e.event.seq_num(), e.event.seq_num(),
@ -567,26 +729,90 @@ impl Verifier {
Ok(()) Ok(())
} }
async fn send_event<'a>( pub(crate) async fn open_branch<'a>(
&mut self, &mut self,
event: Event, repo_id: &RepoId,
branch: &BranchId,
as_publisher: bool,
) -> Result<(), NgError> {
let user = self.config.user_priv_key.to_pub();
let remote = self
.connected_server_id
.as_ref()
.ok_or(NgError::NotConnected)?
.clone();
self.open_branch_(
repo_id,
branch,
as_publisher,
&BROKER.read().await,
&user,
&remote,
)
.await
}
pub(crate) async fn put_blocks(&self, blocks: Vec<Block>, repo: &Repo) -> Result<(), NgError> {
let overlay = repo.store.overlay_for_read_on_client_protocol();
let broker = BROKER.read().await;
let user = self.config.user_priv_key.to_pub();
let remote = self.connected_server_id.to_owned().unwrap();
let msg = BlocksPut::V0(BlocksPutV0 {
blocks,
overlay: Some(overlay),
});
broker.request::<BlocksPut, ()>(&user, &remote, msg).await?;
Ok(())
}
pub(crate) async fn has_blocks(
&self,
blocks: Vec<BlockId>,
repo: &Repo,
) -> Result<BlocksFound, NgError> {
let overlay = repo.store.overlay_for_read_on_client_protocol();
let broker = BROKER.read().await;
let user = self.config.user_priv_key.to_pub();
let remote = self.connected_server_id.to_owned().unwrap();
let msg = BlocksExist::V0(BlocksExistV0 {
blocks,
overlay: Some(overlay),
});
if let SoS::Single(found) = broker
.request::<BlocksExist, BlocksFound>(&user, &remote, msg)
.await?
{
Ok(found)
} else {
Err(NgError::InvalidResponse)
}
}
async fn open_branch_<'a>(
&mut self,
repo_id: &RepoId,
branch: &BranchId,
as_publisher: bool,
broker: &RwLockReadGuard<'a, Broker<'a>>, broker: &RwLockReadGuard<'a, Broker<'a>>,
user: &UserId, user: &UserId,
remote: &DirectPeerId, remote: &DirectPeerId,
overlay: OverlayId,
) -> Result<(), NgError> { ) -> Result<(), NgError> {
assert!(overlay.is_inner()); let (need_open, mut need_sub, overlay) = {
let (repo_id, branch_id) = self let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?;
.topics let overlay = repo.store.overlay_for_read_on_client_protocol();
.get(&(overlay, *event.topic_id())) match repo.opened_branches.get(branch) {
.ok_or(NgError::TopicNotFound)? Some(val) => (false, as_publisher && !val, overlay),
.to_owned(); None => (repo.opened_branches.len() == 0, true, overlay),
let opened_as_publisher;
{
let repo = self.repos.get(&repo_id).ok_or(NgError::RepoNotFound)?;
opened_as_publisher = repo.branch_is_opened_as_publisher(&branch_id);
} }
if !opened_as_publisher { };
//log_info!("need_open {} need_sub {}", need_open, need_sub);
if need_open {
// TODO: implement OpenRepo. for now we always do a Pinning because OpenRepo is not implemented on the broker.
let msg = RepoPinStatusReq::V0(RepoPinStatusReqV0 { let msg = RepoPinStatusReq::V0(RepoPinStatusReqV0 {
hash: repo_id.into(), hash: repo_id.into(),
overlay: Some(overlay), overlay: Some(overlay),
@ -598,18 +824,35 @@ impl Verifier {
Err(NgError::ServerError(ServerError::False)) Err(NgError::ServerError(ServerError::False))
| Err(NgError::ServerError(ServerError::RepoAlreadyOpened)) => { | Err(NgError::ServerError(ServerError::RepoAlreadyOpened)) => {
// pinning the repo on the server broker // pinning the repo on the server broker
let pin_req; let (pin_req, topic_id) = {
{ let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?;
let repo = self.repos.get(&repo_id).ok_or(NgError::RepoNotFound)?; let topic_id = repo.branch(branch).unwrap().topic;
pin_req = PinRepo::from_repo(repo, remote); //TODO: only pin the requested branch.
} let pin_req = PinRepo::from_repo(repo, remote);
(pin_req, topic_id)
};
match broker match broker
.request::<PinRepo, RepoOpened>(user, remote, pin_req) .request::<PinRepo, RepoOpened>(user, remote, pin_req)
.await .await
{ {
Ok(SoS::Single(opened)) => { Ok(SoS::Single(opened)) => {
self.repo_was_opened(&repo_id, &opened)?; self.repo_was_opened(repo_id, &opened)?;
//TODO: check that in the returned opened_repo, the branch we are interested in has effectively been subscribed as publisher by the broker. //TODO: check that in the returned opened_repo, the branch we are interested in has effectively been subscribed as publisher by the broker.
for topic in opened {
if topic.topic_id() == &topic_id {
self.do_sync_req_if_needed(
broker,
user,
remote,
branch,
repo_id,
topic.known_heads(),
)
.await?;
}
}
} }
Ok(_) => return Err(NgError::InvalidResponse), Ok(_) => return Err(NgError::InvalidResponse),
Err(e) => return Err(e), Err(e) => return Err(e),
@ -619,26 +862,56 @@ impl Verifier {
Ok(SoS::Single(pin_status)) => { Ok(SoS::Single(pin_status)) => {
// checking that the branch is subscribed as publisher // checking that the branch is subscribed as publisher
if !pin_status.is_topic_subscribed_as_publisher(event.topic_id()) { let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?;
// we need to subscribe as publisher let branch_info = repo.branch(branch)?;
let topic_sub; let topic_id = &branch_info.topic;
{ // log_info!(
let repo = self.repos.get(&repo_id).ok_or(NgError::RepoNotFound)?; // "as_publisher {} {}",
let branch_info = repo.branch(&branch_id)?; // as_publisher,
// pin_status.is_topic_subscribed_as_publisher(topic_id)
// );
if as_publisher && !pin_status.is_topic_subscribed_as_publisher(topic_id) {
need_sub = true;
}
}
_ => return Err(NgError::InvalidResponse),
}
}
if need_sub {
// we subscribe
let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?;
let branch_info = repo.branch(branch)?;
let broker_id = if as_publisher {
if branch_info.topic_priv_key.is_none() { if branch_info.topic_priv_key.is_none() {
// we need to subscribe as publisher, but we cant
return Err(NgError::PermissionDenied); return Err(NgError::PermissionDenied);
} }
topic_sub = TopicSub::new(repo, branch_info, Some(remote)); Some(remote)
} } else {
None
};
let topic_sub = TopicSub::new(repo, branch_info, broker_id);
match broker match broker
.request::<TopicSub, TopicSubRes>(user, remote, topic_sub) .request::<TopicSub, TopicSubRes>(user, remote, topic_sub)
.await .await
{ {
Ok(SoS::Single(sub)) => { Ok(SoS::Single(sub)) => {
// TODO, deal with heads let repo = self.repos.get_mut(&repo_id).ok_or(NgError::RepoNotFound)?;
let repo =
self.repos.get_mut(&repo_id).ok_or(NgError::RepoNotFound)?;
Self::branch_was_opened(&self.topics, repo, &sub)?; Self::branch_was_opened(&self.topics, repo, &sub)?;
self.do_sync_req_if_needed(
broker,
user,
remote,
branch,
repo_id,
sub.known_heads(),
)
.await?;
} }
Ok(_) => return Err(NgError::InvalidResponse), Ok(_) => return Err(NgError::InvalidResponse),
Err(e) => { Err(e) => {
@ -646,12 +919,26 @@ impl Verifier {
} }
} }
} }
Ok(())
} }
_ => return Err(NgError::InvalidResponse),
} async fn send_event<'a>(
// TODO: deal with received known_heads. &mut self,
// TODO a TopicSync event: Event,
} broker: &RwLockReadGuard<'a, Broker<'a>>,
user: &UserId,
remote: &DirectPeerId,
overlay: OverlayId,
) -> Result<(), NgError> {
assert!(overlay.is_inner());
let (repo_id, branch_id) = self
.topics
.get(&(overlay, *event.topic_id()))
.ok_or(NgError::TopicNotFound)?
.to_owned();
self.open_branch_(&repo_id, &branch_id, true, broker, user, remote)
.await?;
let _ = broker let _ = broker
.request::<PublishEvent, ()>(user, remote, PublishEvent::new(event, overlay)) .request::<PublishEvent, ()>(user, remote, PublishEvent::new(event, overlay))
@ -660,11 +947,48 @@ impl Verifier {
Ok(()) Ok(())
} }
pub fn deliver(&mut self, event: Event) {} pub async fn deliver(&mut self, event: Event, overlay: OverlayId) {
let event_str = event.to_string();
if let Err(e) = self.deliver_(event, overlay).await {
log_err!("DELIVERY ERROR {} {}", e, event_str);
}
}
async fn deliver_(&mut self, event: Event, overlay: OverlayId) -> Result<(), NgError> {
let (repo_id, branch_id) = self
.topics
.get(&(overlay, *event.topic_id()))
.ok_or(NgError::TopicNotFound)?
.to_owned();
// let outer = self
// .inner_to_outer
// .get(&overlay)
// .ok_or(VerifierError::OverlayNotFound)?;
// let store = self
// .stores
// .get(outer)
// .ok_or(VerifierError::OverlayNotFound)?;
let repo = self
.repos
.get(&repo_id)
.ok_or(VerifierError::RepoNotFound)?;
repo.branch_is_opened(&branch_id)
.then_some(true)
.ok_or(VerifierError::BranchNotOpened)?;
let branch = repo.branch(&branch_id)?;
let commit = event.open(&repo.store, &repo_id, &branch_id, &branch.read_cap.key)?;
self.verify_commit(&commit, &branch_id, &repo_id, Arc::clone(&repo.store))
.await?;
pub fn verify_commit( Ok(())
}
pub async fn verify_commit(
&mut self, &mut self,
commit: Commit, commit: &Commit,
branch_id: &BranchId, branch_id: &BranchId,
repo_id: &RepoId, repo_id: &RepoId,
store: Arc<Store>, store: Arc<Store>,
@ -676,23 +1000,26 @@ impl Verifier {
// commit, // commit,
// store // store
// ); // );
//log_info!("{}", commit);
// TODO: check that DAG is well formed. check the heads
let res = match commit.body().ok_or(VerifierError::CommitBodyNotFound)? { let res = match commit.body().ok_or(VerifierError::CommitBodyNotFound)? {
CommitBody::V0(v0) => match v0 { CommitBody::V0(v0) => match v0 {
CommitBodyV0::Repository(a) => a.verify(&commit, self, branch_id, repo_id, store), CommitBodyV0::Repository(a) => a.verify(commit, self, branch_id, repo_id, store),
CommitBodyV0::RootBranch(a) => a.verify(&commit, self, branch_id, repo_id, store), CommitBodyV0::RootBranch(a) => a.verify(commit, self, branch_id, repo_id, store),
CommitBodyV0::Branch(a) => a.verify(&commit, self, branch_id, repo_id, store), CommitBodyV0::Branch(a) => a.verify(commit, self, branch_id, repo_id, store),
CommitBodyV0::SyncSignature(a) => { CommitBodyV0::SyncSignature(a) => a.verify(commit, self, branch_id, repo_id, store),
a.verify(&commit, self, branch_id, repo_id, store) CommitBodyV0::AddBranch(a) => a.verify(commit, self, branch_id, repo_id, store),
} CommitBodyV0::StoreUpdate(a) => a.verify(commit, self, branch_id, repo_id, store),
CommitBodyV0::AddBranch(a) => a.verify(&commit, self, branch_id, repo_id, store), CommitBodyV0::AddSignerCap(a) => a.verify(commit, self, branch_id, repo_id, store),
CommitBodyV0::StoreUpdate(a) => a.verify(&commit, self, branch_id, repo_id, store), CommitBodyV0::AddFile(a) => a.verify(commit, self, branch_id, repo_id, store),
CommitBodyV0::AddSignerCap(a) => a.verify(&commit, self, branch_id, repo_id, store),
_ => { _ => {
log_err!("unimplemented verifier {}", commit); log_err!("unimplemented verifier {}", commit);
Err(VerifierError::NotImplemented) return Err(VerifierError::NotImplemented);
} }
}, },
}; };
let res = res.await;
if res.is_ok() { if res.is_ok() {
let commit_ref = commit.reference().unwrap(); let commit_ref = commit.reference().unwrap();
if let Some(repo) = self.repos.get_mut(repo_id) { if let Some(repo) = self.repos.get_mut(repo_id) {
@ -777,7 +1104,8 @@ impl Verifier {
store_repo: &StoreRepo, store_repo: &StoreRepo,
) -> Result<&Repo, VerifierError> { ) -> Result<&Repo, VerifierError> {
//let store = self.get_store(store_repo); //let store = self.get_store(store_repo);
let repo_ref = self.repos.get(id).ok_or(VerifierError::RepoNotFound); let repo_ref: Result<&Repo, VerifierError> =
self.repos.get(id).ok_or(VerifierError::RepoNotFound);
repo_ref repo_ref
} }
@ -794,6 +1122,67 @@ impl Verifier {
Ok(()) Ok(())
} }
async fn do_sync_req_if_needed<'a>(
&mut self,
broker: &RwLockReadGuard<'a, Broker<'a>>,
user: &UserId,
remote: &DirectPeerId,
branch_id: &BranchId,
repo_id: &RepoId,
remote_heads: &Vec<ObjectId>,
) -> Result<(), NgError> {
let (store, msg, branch_secret) = {
let repo = self.repos.get(repo_id).unwrap();
let branch_info = repo.branch(branch_id)?;
let store = Arc::clone(&repo.store);
let ours = branch_info.current_heads.iter().map(|refe| refe.id);
let ours_set: HashSet<Digest> = HashSet::from_iter(ours.clone());
let theirs = HashSet::from_iter(remote_heads.clone().into_iter());
if theirs.len() == 0 {
log_info!("branch is new on the broker. doing nothing");
return Ok(());
}
if ours_set.difference(&theirs).count() == 0
&& theirs.difference(&ours_set).count() == 0
{
// no need to sync
log_info!("branch is up to date");
return Ok(());
}
let msg = TopicSyncReq::V0(TopicSyncReqV0 {
topic: branch_info.topic,
known_heads: ours.collect(),
target_heads: remote_heads.clone(),
overlay: Some(store.overlay_for_read_on_client_protocol()),
});
(store, msg, branch_info.read_cap.key.clone())
};
match broker
.request::<TopicSyncReq, TopicSyncRes>(user, remote, msg)
.await
{
Err(e) => return Err(e),
Ok(SoS::Stream(mut events)) => {
while let Some(event) = events.next().await {
let commit = event
.event()
.open(&store, repo_id, branch_id, &branch_secret)?;
self.verify_commit(&commit, branch_id, repo_id, Arc::clone(&store))
.await?;
}
}
Ok(_) => return Err(NgError::InvalidResponse),
}
Ok(())
}
async fn do_sync_req<'a>( async fn do_sync_req<'a>(
&mut self, &mut self,
broker: &RwLockReadGuard<'a, Broker<'a>>, broker: &RwLockReadGuard<'a, Broker<'a>>,
@ -817,7 +1206,8 @@ impl Verifier {
.event() .event()
.open(&store, repo_id, branch_id, branch_secret)?; .open(&store, repo_id, branch_id, branch_secret)?;
self.verify_commit(commit, branch_id, repo_id, Arc::clone(&store))?; self.verify_commit(&commit, branch_id, repo_id, Arc::clone(&store))
.await?;
} }
} }
Ok(_) => return Err(NgError::InvalidResponse), Ok(_) => return Err(NgError::InvalidResponse),
@ -925,7 +1315,7 @@ impl Verifier {
}); });
match broker.request::<CommitGet, Block>(user, remote, msg).await { match broker.request::<CommitGet, Block>(user, remote, msg).await {
Err(NgError::ServerError(ServerError::NotFound)) => { Err(NgError::ServerError(ServerError::NotFound)) => {
// TODO: fallback to BlockGet, then Commit::load(with_body:true), which will return an Err(CommitLoadError::MissingBlocks), then do another BlockGet with those, and then again Commit::load... // TODO: fallback to BlocksGet, then Commit::load(with_body:true), which will return an Err(CommitLoadError::MissingBlocks), then do another BlocksGet with those, and then again Commit::load...
return Err(NgError::SiteNotFoundOnBroker); return Err(NgError::SiteNotFoundOnBroker);
} }
Ok(SoS::Stream(blockstream)) => { Ok(SoS::Stream(blockstream)) => {
@ -945,8 +1335,44 @@ impl Verifier {
} }
} }
pub(crate) async fn fetch_blocks_if_needed(
&self,
id: &BlockId,
repo_id: &RepoId,
store_repo: &StoreRepo,
) -> Result<Option<Receiver<Block>>, NgError> {
let repo = self.get_repo(repo_id, store_repo)?;
let overlay = repo.store.overlay_for_read_on_client_protocol();
let broker = BROKER.read().await;
let user = self.config.user_priv_key.to_pub();
let remote = self.connected_server_id.to_owned().unwrap();
match repo.store.has(id) {
Err(StorageError::NotFound) => {
let msg = BlocksGet::V0(BlocksGetV0 {
ids: vec![*id],
topic: None,
include_children: true,
overlay: Some(overlay),
});
match broker
.request::<BlocksGet, Block>(&user, &remote, msg)
.await
{
Ok(SoS::Stream(blockstream)) => Ok(Some(blockstream)),
Ok(_) => return Err(NgError::InvalidResponse),
Err(e) => return Err(e),
}
}
Err(e) => Err(e.into()),
Ok(()) => Ok(None),
}
}
async fn bootstrap_from_remote(&mut self) -> Result<(), NgError> { async fn bootstrap_from_remote(&mut self) -> Result<(), NgError> {
if self.is_in_memory() || self.need_bootstrap() { if self.need_bootstrap() {
let broker = BROKER.read().await; let broker = BROKER.read().await;
let user = self.config.user_priv_key.to_pub(); let user = self.config.user_priv_key.to_pub();
let remote = self.connected_server_id.to_owned().unwrap(); let remote = self.connected_server_id.to_owned().unwrap();
@ -994,7 +1420,7 @@ impl Verifier {
Ok(Arc::clone(store)) Ok(Arc::clone(store))
} }
fn load_from_credentials_and_outbox( async fn load_from_credentials_and_outbox(
&mut self, &mut self,
events: &Vec<EventOutboxStorage>, events: &Vec<EventOutboxStorage>,
) -> Result<(), VerifierError> { ) -> Result<(), VerifierError> {
@ -1077,11 +1503,12 @@ impl Verifier {
postponed_signer_caps.push(commit); postponed_signer_caps.push(commit);
} else { } else {
self.verify_commit( self.verify_commit(
commit, &commit,
&branch_id.clone(), &branch_id.clone(),
private_store.id(), private_store.id(),
Arc::clone(&private_store), Arc::clone(&private_store),
)?; )
.await?;
} }
} }
} }
@ -1150,7 +1577,8 @@ impl Verifier {
let commit = e.event.open(store, store.id(), branch_id, branch_secret)?; let commit = e.event.open(store, store.id(), branch_id, branch_secret)?;
self.verify_commit(commit, &branch_id.clone(), store.id(), Arc::clone(store))?; self.verify_commit(&commit, &branch_id.clone(), store.id(), Arc::clone(store))
.await?;
} else { } else {
// log_info!( // log_info!(
// "SKIPPED wrong overlay {} {}", // "SKIPPED wrong overlay {} {}",
@ -1176,11 +1604,12 @@ impl Verifier {
// finally, ingest the signer_caps. // finally, ingest the signer_caps.
for signer_cap in postponed_signer_caps { for signer_cap in postponed_signer_caps {
self.verify_commit( self.verify_commit(
signer_cap, &signer_cap,
private_user_branch.as_ref().unwrap(), private_user_branch.as_ref().unwrap(),
private_store.id(), private_store.id(),
Arc::clone(&private_store), Arc::clone(&private_store),
)?; )
.await?;
} }
Ok(()) Ok(())
@ -1198,7 +1627,11 @@ impl Verifier {
} }
pub async fn send_outbox(&mut self) -> Result<(), NgError> { pub async fn send_outbox(&mut self) -> Result<(), NgError> {
let events: Vec<EventOutboxStorage> = self.take_events_from_outbox().unwrap_or(vec![]); let ret = self.take_events_from_outbox();
// if ret.is_err() {
// log_err!("send_outbox {:}", ret.as_ref().unwrap_err());
// }
let events: Vec<EventOutboxStorage> = ret.unwrap_or(vec![]);
if events.len() == 0 { if events.len() == 0 {
return Ok(()); return Ok(());
} }
@ -1213,34 +1646,34 @@ impl Verifier {
// for all the events, check that they are valid (topic exists, current_heads match with event) // for all the events, check that they are valid (topic exists, current_heads match with event)
let mut need_replay = false; let mut need_replay = false;
let mut events_to_replay = Vec::with_capacity(events.len()); let mut events_to_replay = Vec::with_capacity(events.len());
let mut branch_heads: HashMap<BranchId, Vec<ObjectRef>> = HashMap::new(); //let mut branch_heads: HashMap<BranchId, Vec<ObjectRef>> = HashMap::new();
for e in events { for e in events {
match self.topics.get(&(e.overlay, *e.event.topic_id())) { match self.topics.get(&(e.overlay, *e.event.topic_id())) {
Some((repo_id, branch_id)) => match self.repos.get(repo_id) { Some((repo_id, branch_id)) => match self.repos.get(repo_id) {
Some(repo) => match repo.branches.get(branch_id) { Some(repo) => match repo.branches.get(branch_id) {
Some(branch) => { Some(branch) => {
let commit = e.event.open_with_info(repo, branch)?; // let commit = e.event.open_with_info(repo, branch)?;
let acks = commit.acks(); // let acks = commit.acks();
match branch_heads.get(branch_id) { // match branch_heads.get(branch_id) {
Some(previous_heads) => { // Some(previous_heads) => {
if *previous_heads != acks { // if *previous_heads != acks {
// skip event, as it is outdated. // // skip event, as it is outdated.
continue; // continue;
} else { // } else {
branch_heads // branch_heads
.insert(*branch_id, vec![commit.reference().unwrap()]); // .insert(*branch_id, vec![commit.reference().unwrap()]);
} // }
} // }
None => { // None => {
if acks != branch.current_heads { // if acks != branch.current_heads {
// skip event, as it is outdated. // // skip event, as it is outdated.
continue; // continue;
} else { // } else {
branch_heads // branch_heads
.insert(*branch_id, vec![commit.reference().unwrap()]); // .insert(*branch_id, vec![commit.reference().unwrap()]);
} // }
} // }
} // }
} }
None => { None => {
log_info!("REPLAY BRANCH NOT FOUND {}", branch_id); log_info!("REPLAY BRANCH NOT FOUND {}", branch_id);
@ -1265,7 +1698,8 @@ impl Verifier {
} }
log_info!("NEED REPLAY {need_replay}"); log_info!("NEED REPLAY {need_replay}");
if need_replay { if need_replay {
self.load_from_credentials_and_outbox(&events_to_replay)?; self.load_from_credentials_and_outbox(&events_to_replay)
.await?;
log_info!("REPLAY DONE"); log_info!("REPLAY DONE");
} }
log_info!("SENDING {} EVENTS FOR OUTBOX", events_to_replay.len()); log_info!("SENDING {} EVENTS FOR OUTBOX", events_to_replay.len());
@ -1338,7 +1772,7 @@ impl Verifier {
let (graph, user, block) = match &config.config_type { let (graph, user, block) = match &config.config_type {
VerifierConfigType::Memory | VerifierConfigType::JsSaveSession(_) => ( VerifierConfigType::Memory | VerifierConfigType::JsSaveSession(_) => (
Some(oxigraph::store::Store::new().unwrap()), Some(oxigraph::store::Store::new().unwrap()),
None, //Some(Box::new(InMemoryUserStorage::new()) as Box<dyn UserStorage>), Some(Box::new(InMemoryUserStorage::new()) as Box<dyn UserStorage>),
Some(block_storage), Some(block_storage),
), ),
#[cfg(not(target_family = "wasm"))] #[cfg(not(target_family = "wasm"))]
@ -1382,6 +1816,9 @@ impl Verifier {
repos: HashMap::new(), repos: HashMap::new(),
topics: HashMap::new(), topics: HashMap::new(),
in_memory_outbox: vec![], in_memory_outbox: vec![],
inner_to_outer: HashMap::new(),
uploads: BTreeMap::new(),
branch_subscriptions: HashMap::new(),
}; };
// this is important as it will load the last seq from storage // this is important as it will load the last seq from storage
if verif.config.config_type.should_load_last_seq_num() { if verif.config.config_type.should_load_last_seq_num() {
@ -1392,12 +1829,19 @@ impl Verifier {
Ok(verif) Ok(verif)
} }
pub fn doc_fetch( pub async fn app_request_stream(
&mut self, &mut self,
nuri: String, req: AppRequest,
payload: Option<AppRequestPayload>,
) -> Result<(Receiver<AppResponse>, CancelFn), NgError> { ) -> Result<(Receiver<AppResponse>, CancelFn), NgError> {
unimplemented!(); match req {
AppRequest::V0(v0) => v0.command.process_stream(self, &v0.nuri, &v0.payload).await,
}
}
pub async fn app_request(&mut self, req: AppRequest) -> Result<AppResponse, NgError> {
match req {
AppRequest::V0(v0) => v0.command.process(self, v0.nuri, v0.payload).await,
}
} }
pub async fn respond( pub async fn respond(
@ -1433,10 +1877,19 @@ impl Verifier {
sub: &TopicSubRes, sub: &TopicSubRes,
) -> Result<(), NgError> { ) -> Result<(), NgError> {
let overlay = repo.store.inner_overlay(); let overlay = repo.store.inner_overlay();
//log_info!("branch_was_opened searching for topic {}", sub.topic_id()); // log_info!(
// "branch_was_opened topic {} overlay {}",
// sub.topic_id(),
// overlay
// );
let (_, branch_id) = topics let (_, branch_id) = topics
.get(&(overlay, *sub.topic_id())) .get(&(overlay, *sub.topic_id()))
.ok_or(NgError::TopicNotFound)?; .ok_or(NgError::TopicNotFound)?;
// log_info!(
// "branch_was_opened insert branch_id {} is_publisher {}",
// branch_id,
// sub.is_publisher()
// );
repo.opened_branches.insert(*branch_id, sub.is_publisher()); repo.opened_branches.insert(*branch_id, sub.is_publisher());
Ok(()) Ok(())
} }
@ -1447,6 +1900,11 @@ impl Verifier {
opened_repo: &RepoOpened, opened_repo: &RepoOpened,
) -> Result<(), NgError> { ) -> Result<(), NgError> {
let repo = self.repos.get_mut(repo_id).ok_or(NgError::RepoNotFound)?; let repo = self.repos.get_mut(repo_id).ok_or(NgError::RepoNotFound)?;
//TODO: improve the inner_to_outer insert. (should be done when store is created, not here. should work also for dialogs.)
self.inner_to_outer.insert(
repo.store.overlay_for_read_on_client_protocol(),
repo.store.outer_overlay(),
);
for sub in opened_repo { for sub in opened_repo {
Self::branch_was_opened(&self.topics, repo, sub)?; Self::branch_was_opened(&self.topics, repo, sub)?;
} }

@ -70,7 +70,7 @@ impl Wallet {
} }
pub fn name(&self) -> String { pub fn name(&self) -> String {
match self { match self {
Wallet::V0(v0) => base64_url::encode(&v0.id.slice()), Wallet::V0(v0) => v0.id.to_string(),
_ => unimplemented!(), _ => unimplemented!(),
} }
} }
@ -109,11 +109,11 @@ impl Wallet {
let sig = sign(&wallet_privkey, &wallet_id, &ser_wallet).unwrap(); let sig = sign(&wallet_privkey, &wallet_id, &ser_wallet).unwrap();
let wallet_v0 = WalletV0 { let wallet_v0 = WalletV0 {
/// ID // ID
id: wallet_id, id: wallet_id,
/// Content // Content
content: wallet_content, content: wallet_content,
/// Signature over content by wallet's private key // Signature over content by wallet's private key
sig, sig,
}; };
@ -552,7 +552,7 @@ pub fn create_wallet_first_step_v0(
let intermediary = CreateWalletIntermediaryV0 { let intermediary = CreateWalletIntermediaryV0 {
wallet_privkey, wallet_privkey,
wallet_name: base64_url::encode(&wallet_id.slice()), wallet_name: wallet_id.to_string(),
client, client,
user_privkey, user_privkey,
in_memory: !params.local_save, in_memory: !params.local_save,
@ -742,6 +742,7 @@ pub async fn create_wallet_second_step_v0(
peer_id: PubKey::nil(), peer_id: PubKey::nil(),
nonce: 0, nonce: 0,
encrypted, encrypted,
test: None,
}; };
let ser_wallet = serde_bare::to_vec(&wallet_content).unwrap(); let ser_wallet = serde_bare::to_vec(&wallet_content).unwrap();
@ -749,11 +750,11 @@ pub async fn create_wallet_second_step_v0(
let sig = sign(&params.wallet_privkey, &wallet_id, &ser_wallet).unwrap(); let sig = sign(&params.wallet_privkey, &wallet_id, &ser_wallet).unwrap();
let wallet_v0 = WalletV0 { let wallet_v0 = WalletV0 {
/// ID // ID
id: wallet_id, id: wallet_id,
/// Content // Content
content: wallet_content, content: wallet_content,
/// Signature over content by wallet's private key // Signature over content by wallet's private key
sig, sig,
}; };
@ -865,10 +866,7 @@ mod test {
let ser_wallet = to_vec(&NgFile::V0(NgFileV0::Wallet(res.wallet.clone()))).unwrap(); let ser_wallet = to_vec(&NgFile::V0(NgFileV0::Wallet(res.wallet.clone()))).unwrap();
file.write_all(&ser_wallet); file.write_all(&ser_wallet);
log_debug!( log_debug!("wallet id: {}", res.wallet.id());
"wallet id: {:?}",
base64_url::encode(&res.wallet.id().slice())
);
log_debug!("pazzle {:?}", display_pazzle(&res.pazzle)); log_debug!("pazzle {:?}", display_pazzle(&res.pazzle));
log_debug!("mnemonic {:?}", display_mnemonic(&res.mnemonic)); log_debug!("mnemonic {:?}", display_mnemonic(&res.mnemonic));
log_debug!("pin {:?}", pin); log_debug!("pin {:?}", pin);

@ -498,13 +498,23 @@ impl SensitiveWallet {
pub fn individual_site( pub fn individual_site(
&self, &self,
user_id: &UserId, user_id: &UserId,
) -> Option<(PrivKey, Option<ReadCap>, Option<RepoId>)> { ) -> Option<(
PrivKey,
Option<ReadCap>,
Option<RepoId>,
Option<RepoId>,
Option<RepoId>,
)> {
match self { match self {
Self::V0(v0) => match v0.sites.get(&user_id.to_string()) { Self::V0(v0) => match v0.sites.get(&user_id.to_string()) {
Some(site) => match &site.site_type { Some(site) => match &site.site_type {
SiteType::Individual((user, readcap)) => { SiteType::Individual((user, readcap)) => Some((
Some((user.clone(), Some(readcap.clone()), Some(site.private.id))) user.clone(),
} Some(readcap.clone()),
Some(site.private.id),
Some(site.protected.id),
Some(site.public.id),
)),
_ => None, _ => None,
}, },
None => None, None => None,
@ -639,6 +649,8 @@ pub struct WalletContentV0 {
// WalletLog content encrypted with XChaCha20Poly1305, AD = timestamp and walletID // WalletLog content encrypted with XChaCha20Poly1305, AD = timestamp and walletID
#[serde(with = "serde_bytes")] #[serde(with = "serde_bytes")]
pub encrypted: Vec<u8>, pub encrypted: Vec<u8>,
pub test: Option<String>,
} }
/// Wallet Log V0 /// Wallet Log V0

@ -36,7 +36,7 @@ use ng_repo::errors::*;
use ng_repo::log::*; use ng_repo::log::*;
use ng_repo::types::*; use ng_repo::types::*;
use ng_repo::utils::{ use ng_repo::utils::{
decode_key, display_timestamp, generate_keypair, now_timestamp, timestamp_after, decode_priv_key, display_timestamp, generate_keypair, now_timestamp, timestamp_after,
}; };
use clap::{arg, command, value_parser, ArgAction, Command}; use clap::{arg, command, value_parser, ArgAction, Command};
@ -279,10 +279,10 @@ async fn main_inner() -> Result<(), NgcliError> {
.lines() .lines()
.nth(0) .nth(0)
.ok_or(NgcliError::InvalidKeyFile("empty file".to_string()))?; .ok_or(NgcliError::InvalidKeyFile("empty file".to_string()))?;
let res = decode_key(first_line.trim()) let res = decode_priv_key(first_line.trim())
.map_err(|_| NgcliError::InvalidKeyFile("deserialization error".to_string()))?; .map_err(|_| NgcliError::InvalidKeyFile("deserialization error".to_string()))?;
file.zeroize(); file.zeroize();
Some(res) Some(*res.slice())
} }
}; };
@ -293,20 +293,20 @@ async fn main_inner() -> Result<(), NgcliError> {
//key_string.as_mut().zeroize(); //key_string.as_mut().zeroize();
gen_client_keys(key_from_file) gen_client_keys(key_from_file)
} else { } else {
let res = decode_key(key_string.as_str()).map_err(|_| { let res = decode_priv_key(key_string.as_str()).map_err(|_| {
NgcliError::InvalidKeyFile( NgcliError::InvalidKeyFile(
"check the argument provided in command line".to_string(), "check the argument provided in command line".to_string(),
) )
})?; })?;
if matches.get_flag("save_key") { if matches.get_flag("save_key") {
let mut master_key = base64_url::encode(&res); let mut master_key = res.to_string();
write(key_path.clone(), &master_key) write(key_path.clone(), &master_key)
.map_err(|e| NgcliError::CannotSaveKey(e.to_string()))?; .map_err(|e| NgcliError::CannotSaveKey(e.to_string()))?;
master_key.zeroize(); master_key.zeroize();
log_info!("The key has been saved to {}", key_path.to_str().unwrap()); log_info!("The key has been saved to {}", key_path.to_str().unwrap());
} }
//key_string.as_mut().zeroize(); //key_string.as_mut().zeroize();
gen_client_keys(Some(res)) gen_client_keys(Some(*res.slice()))
} }
} }
None => { None => {
@ -314,7 +314,8 @@ async fn main_inner() -> Result<(), NgcliError> {
gen_client_keys(key_from_file) gen_client_keys(key_from_file)
} else { } else {
let res = gen_client_keys(None); let res = gen_client_keys(None);
let mut master_key = base64_url::encode(&res[0]); let key = PrivKey::Ed25519PrivKey(res[0]);
let mut master_key = key.to_string();
if matches.get_flag("save_key") { if matches.get_flag("save_key") {
write(key_path.clone(), &master_key) write(key_path.clone(), &master_key)
.map_err(|e| NgcliError::CannotSaveKey(e.to_string()))?; .map_err(|e| NgcliError::CannotSaveKey(e.to_string()))?;

@ -35,7 +35,7 @@ use ng_repo::types::SymKey;
use ng_repo::utils::ed_keypair_from_priv_bytes; use ng_repo::utils::ed_keypair_from_priv_bytes;
use ng_repo::{ use ng_repo::{
types::{PrivKey, PubKey}, types::{PrivKey, PubKey},
utils::{decode_key, generate_keypair, sign, verify}, utils::{decode_key, decode_priv_key, generate_keypair, sign, verify},
}; };
use serde_json::{from_str, to_string_pretty}; use serde_json::{from_str, to_string_pretty};
use std::error::Error; use std::error::Error;
@ -286,12 +286,12 @@ fn prepare_accept_forward_for_domain(
args: &mut Cli, args: &mut Cli,
) -> Result<AcceptForwardForV0, NgError> { ) -> Result<AcceptForwardForV0, NgError> {
if args.domain_peer.is_some() { if args.domain_peer.is_some() {
let key = decode_key(args.domain_peer.as_ref().unwrap().as_str())?; let key = decode_priv_key(args.domain_peer.as_ref().unwrap().as_str())?;
args.domain_peer.as_mut().unwrap().zeroize(); args.domain_peer.as_mut().unwrap().zeroize();
Ok(AcceptForwardForV0::PublicDomainPeer(( Ok(AcceptForwardForV0::PublicDomainPeer((
domain, domain,
PrivKey::Ed25519PrivKey(key), key,
"".to_string(), "".to_string(),
))) )))
} else { } else {
@ -425,10 +425,10 @@ async fn main_inner() -> Result<(), NgdError> {
.lines() .lines()
.nth(0) .nth(0)
.ok_or(NgdError::InvalidKeyFile("empty file".to_string()))?; .ok_or(NgdError::InvalidKeyFile("empty file".to_string()))?;
let res = decode_key(first_line.trim()) let res = decode_priv_key(first_line.trim())
.map_err(|_| NgdError::InvalidKeyFile("deserialization error".to_string()))?; .map_err(|_| NgdError::InvalidKeyFile("deserialization error".to_string()))?;
file.zeroize(); file.zeroize();
Some(res) Some(*res.slice())
} }
}; };
@ -439,20 +439,19 @@ async fn main_inner() -> Result<(), NgdError> {
args.key.as_mut().unwrap().zeroize(); args.key.as_mut().unwrap().zeroize();
gen_broker_keys(key_from_file) gen_broker_keys(key_from_file)
} else { } else {
let res = decode_key(key_string.as_str()).map_err(|_| { let res = decode_priv_key(key_string.as_str()).map_err(|_| {
NgdError::InvalidKeyFile( NgdError::InvalidKeyFile(
"check the argument provided in command line".to_string(), "check the argument provided in command line".to_string(),
) )
})?; })?;
if args.save_key { if args.save_key {
let mut master_key = base64_url::encode(&res); write(key_path.clone(), res.to_string())
write(key_path.clone(), &master_key)
.map_err(|e| NgdError::CannotSaveKey(e.to_string()))?; .map_err(|e| NgdError::CannotSaveKey(e.to_string()))?;
master_key.zeroize(); //master_key.zeroize();
log_info!("The key has been saved to {}", key_path.to_str().unwrap()); log_info!("The key has been saved to {}", key_path.to_str().unwrap());
} }
args.key.as_mut().unwrap().zeroize(); args.key.as_mut().unwrap().zeroize();
gen_broker_keys(Some(res)) gen_broker_keys(Some(*res.slice()))
} }
} }
None => { None => {
@ -460,7 +459,8 @@ async fn main_inner() -> Result<(), NgdError> {
gen_broker_keys(key_from_file) gen_broker_keys(key_from_file)
} else { } else {
let res = gen_broker_keys(None); let res = gen_broker_keys(None);
let mut master_key = base64_url::encode(&res[0]); let key = PrivKey::Ed25519PrivKey((res[0]));
let mut master_key = key.to_string();
if args.save_key { if args.save_key {
write(key_path.clone(), &master_key) write(key_path.clone(), &master_key)
.map_err(|e| NgdError::CannotSaveKey(e.to_string()))?; .map_err(|e| NgdError::CannotSaveKey(e.to_string()))?;
@ -912,7 +912,7 @@ async fn main_inner() -> Result<(), NgdError> {
"The PEER_ID provided in the --forward option is invalid", "The PEER_ID provided in the --forward option is invalid",
) )
})?; })?;
let peer_id = PubKey::Ed25519PubKey(pub_key_array); let peer_id = pub_key_array;
let server_type = if parts[0].len() > 0 { let server_type = if parts[0].len() > 0 {
let first_char = parts[0].chars().next().unwrap(); let first_char = parts[0].chars().next().unwrap();

@ -88,8 +88,7 @@ impl Server {
fn get_wallet(&self, encoded_id: String) -> Result<Response, NgHttpError> { fn get_wallet(&self, encoded_id: String) -> Result<Response, NgHttpError> {
log_debug!("DOWNLOAD wallet {}", encoded_id); log_debug!("DOWNLOAD wallet {}", encoded_id);
let id = base64_url::decode(&encoded_id).map_err(|e| NgHttpError::InvalidParams)?; let id = base64_url::decode(&encoded_id).map_err(|e| NgHttpError::InvalidParams)?;
let array = slice_as_array!(&id, [u8; 32]).ok_or(NgHttpError::InvalidParams)?; let wallet_id: PubKey = from_slice(&id).map_err(|e| NgHttpError::InvalidParams)?;
let wallet_id = PubKey::Ed25519PubKey(*array);
let wallet_record = let wallet_record =
WalletRecord::open(&wallet_id, &self.store).map_err(|e| NgHttpError::NotFound)?; WalletRecord::open(&wallet_id, &self.store).map_err(|e| NgHttpError::NotFound)?;
let wallet = wallet_record.wallet().map_err(|e| NgHttpError::NotFound)?; let wallet = wallet_record.wallet().map_err(|e| NgHttpError::NotFound)?;
@ -108,8 +107,7 @@ impl Server {
log_debug!("DOWNLOAD bootstrap {}", encoded_id); log_debug!("DOWNLOAD bootstrap {}", encoded_id);
let id = base64_url::decode(&encoded_id).map_err(|e| NgHttpError::InvalidParams)?; let id = base64_url::decode(&encoded_id).map_err(|e| NgHttpError::InvalidParams)?;
let array = slice_as_array!(&id, [u8; 32]).ok_or(NgHttpError::InvalidParams)?; let wallet_id: PubKey = from_slice(&id).map_err(|e| NgHttpError::InvalidParams)?;
let wallet_id = PubKey::Ed25519PubKey(*array);
let wallet_record = let wallet_record =
WalletRecord::open(&wallet_id, &self.store).map_err(|e| NgHttpError::NotFound)?; WalletRecord::open(&wallet_id, &self.store).map_err(|e| NgHttpError::NotFound)?;
let bootstrap = wallet_record let bootstrap = wallet_record

Loading…
Cancel
Save