cleanup of test suite

pull/19/head
Niko PLP 5 months ago
parent 2acdcbb33b
commit 9e108733e6
  1. 3
      Cargo.lock
  2. 2
      Cargo.toml
  3. 29
      README.md
  4. 2
      nextgraph/examples/open.md
  5. 1
      ng-app/README.md
  6. 6
      ng-broker/Cargo.toml
  7. 2
      ng-broker/src/server_storage/admin/account.rs
  8. 7
      ng-broker/src/server_storage/admin/invitation.rs
  9. 21
      ng-client-ws/src/remote_ws.rs
  10. 3
      ng-net/Cargo.toml
  11. 2
      ng-net/src/actors/probe.rs
  12. 8
      ng-net/src/broker.rs
  13. 2
      ng-net/src/connection.rs
  14. 130
      ng-repo/src/branch.rs
  15. 50
      ng-repo/src/commit.rs
  16. 1
      ng-repo/src/errors.rs
  17. 96
      ng-repo/src/file.rs
  18. 23
      ng-repo/src/object.rs
  19. 14
      ng-repo/src/repo.rs
  20. 52
      ng-sdk-js/src/lib.rs
  21. 2
      ng-verifier/src/user_storage/branch.rs
  22. 1
      ng-wallet/Cargo.toml
  23. 50
      ng-wallet/src/lib.rs
  24. BIN
      ng-wallet/tests/generated_security_image.jpg.compare
  25. BIN
      ng-wallet/tests/valid_security_image.jpg
  26. 2
      ngaccount/README.md
  27. 4
      ngone/Cargo.toml
  28. 2
      ngone/README.md
  29. 51
      ngone/src/main.rs
  30. 8
      ngone/src/store/dynpeer.rs
  31. 7
      ngone/src/store/wallet_record.rs
  32. 2
      ngone/src/types.rs

3
Cargo.lock generated

@ -3242,6 +3242,7 @@ dependencies = [
"async-recursion", "async-recursion",
"async-std", "async-std",
"async-trait", "async-trait",
"async-tungstenite",
"base64-url", "base64-url",
"default-net", "default-net",
"ed25519-dalek", "ed25519-dalek",
@ -3445,10 +3446,8 @@ dependencies = [
"ng-storage-rocksdb", "ng-storage-rocksdb",
"ng-wallet", "ng-wallet",
"rust-embed", "rust-embed",
"serde",
"serde_bare", "serde_bare",
"serde_json", "serde_json",
"slice_as_array",
"tokio", "tokio",
"warp", "warp",
"warp-embed", "warp-embed",

@ -39,4 +39,4 @@ opt-level = 's'
[patch.crates-io] [patch.crates-io]
# tauri = { git = "https://github.com/simonhyll/tauri.git", branch="fix/ipc-mixup"} # tauri = { git = "https://github.com/simonhyll/tauri.git", branch="fix/ipc-mixup"}
# tauri = { git = "https://git.nextgraph.org/NextGraph/tauri.git", branch="alpha.11-nextgraph", features = ["no-ipc-custom-protocol"] } # tauri = { git = "https://git.nextgraph.org/NextGraph/tauri.git", branch="alpha.11-nextgraph", features = ["no-ipc-custom-protocol"] }

@ -106,25 +106,24 @@ cargo test
cargo test --package nextgraph -r --lib -- local_broker::test::import_session_for_test_to_disk --show-output --nocapture --ignored cargo test --package nextgraph -r --lib -- local_broker::test::import_session_for_test_to_disk --show-output --nocapture --ignored
``` ```
Test all: Test a single crate:
```
cargo test --all --verbose -- --show-output --nocapture
```
Test a single module:
``` ```
cargo test --package ng-repo --lib -- branch::test --show-output --nocapture cargo test --package ng-repo --lib -- branch::test --show-output --nocapture
cargo test --package ng-wallet --lib -- branch::test --show-output --nocapture
cargo test --package ng-verifier --lib -- branch::test --show-output --nocapture
cargo test --package ng-sdk-js --lib -- branch::test --show-output --nocapture
cargo test --package ng-broker --lib -- branch::test --show-output --nocapture
cargo test --package ng-client-ws --lib -- branch::test --show-output --nocapture
``` ```
Test end-to-end client and server: Test WASM websocket
``` First you need to install the `chromedriver` that matches your version of Chrome
cargo test --package ngcli -- --show-output --nocapture
```
Test WASM websocket https://googlechromelabs.github.io/chrome-for-testing/
then:
``` ```
cd ng-sdk-js cd ng-sdk-js
@ -140,7 +139,7 @@ cargo test --package ng-client-ws --lib -- remote_ws::test::test_ws --show-outpu
### Build release binaries ### Build release binaries
First you will need to have the production build of the frontend. First you will need to have the production build of the frontend.
If you do not want to setup a whole development environment for the frontend, you can use the precompiled release of the frontend available in `dist-file.tar.gz` If you do not want to setup a whole development environment for the frontend, you can use the precompiled release of the frontend available in `dist-file.tar.gz` that you can download from the release page.
``` ```
cd ng-app cd ng-app
@ -183,7 +182,7 @@ For building the apps, see this [documentation](ng-app/README.md).
#### OpenBSD #### OpenBSD
On OpenBSD, a conflict between the installed LibreSSL library and the reqwest crate, needs a bit of attention. On OpenBSD, a conflict between the installed LibreSSL library and the reqwest crate, needs a bit of attention.
Before compiling the daemon for OpenBSD, please comment out lines 41-42 of `ng-net/Cargo.toml`. This will be solved soon by using `resolver = "2"`. Before compiling the daemon for OpenBSD, please comment out lines 38-39 of `ng-net/Cargo.toml`. This will be solved soon by using `resolver = "2"`.
``` ```
#[target.'cfg(target_arch = "wasm32")'.dependencies] #[target.'cfg(target_arch = "wasm32")'.dependencies]
@ -210,7 +209,7 @@ Generate documentation for all packages without their dependencies:
cargo doc --no-deps cargo doc --no-deps
``` ```
The generated documentation can be found in `target/doc/<crate-name>`. The generated documentation can be found in `target/doc/nextgraph`.
### Contributions license ### Contributions license

@ -2,7 +2,7 @@
Example of LocalBroker configured with persistence to disk, and opening of a previsouly saved wallet Example of LocalBroker configured with persistence to disk, and opening of a previsouly saved wallet
You need to replace `wallet_name` on line 40 with the name that was given to you when you ran the example [persistent], in `Your wallet name is : ` You need to replace `wallet_name` on line 35 with the name that was given to you when you ran the example [persistent], in `Your wallet name is : `
You need to replace the argument `pazzle` in the function call `wallet_open_with_pazzle` with the array that you received in `Your pazzle is:` You need to replace the argument `pazzle` in the function call `wallet_open_with_pazzle` with the array that you received in `Your pazzle is:`

@ -147,5 +147,4 @@ to build the production app :
``` ```
cargo tauri ios build cargo tauri ios build
``` ```

@ -31,9 +31,6 @@ ng-net = { path = "../ng-net", version = "0.1.0" }
ng-client-ws = { path = "../ng-client-ws", version = "0.1.0" } ng-client-ws = { path = "../ng-client-ws", version = "0.1.0" }
ng-storage-rocksdb = { path = "../ng-storage-rocksdb", version = "0.1.0" } ng-storage-rocksdb = { path = "../ng-storage-rocksdb", version = "0.1.0" }
[dev-dependencies]
tempfile = "3"
[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] [target.'cfg(target_arch = "wasm32")'.dependencies.getrandom]
version = "0.2.7" version = "0.2.7"
features = ["js"] features = ["js"]
@ -41,3 +38,6 @@ features = ["js"]
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dependencies]
getrandom = "0.2.7" getrandom = "0.2.7"
default-net = { git = "https://git.nextgraph.org/NextGraph/default-net.git" } default-net = { git = "https://git.nextgraph.org/NextGraph/default-net.git" }
[dev-dependencies]
tempfile = "3"

@ -262,7 +262,7 @@ mod test {
let key: [u8; 32] = [0; 32]; let key: [u8; 32] = [0; 32];
fs::create_dir_all(root.path()).unwrap(); fs::create_dir_all(root.path()).unwrap();
println!("{}", root.path().to_str().unwrap()); println!("{}", root.path().to_str().unwrap());
let mut storage = RocksDbKCVStorage::open(root.path(), key).unwrap(); let storage = RocksDbKCVStorage::open(root.path(), key).unwrap();
let user_id = PubKey::Ed25519PubKey([1; 32]); let user_id = PubKey::Ed25519PubKey([1; 32]);

@ -181,10 +181,3 @@ impl<'a> Invitation<'a> {
}) })
} }
} }
#[cfg(test)]
mod test {
#[test]
pub fn test_invitation() {}
}

@ -203,7 +203,7 @@ async fn ws_loop(
if msg.is_close() { if msg.is_close() {
if let Message::Close(Some(cf)) = msg { if let Message::Close(Some(cf)) = msg {
log_debug!("CLOSE from remote with closeframe: {}",cf.reason); log_debug!("CLOSE from remote with closeframe: {} {}",cf.code, cf.reason);
let last_command = match cf.code { let last_command = match cf.code {
CloseCode::Normal => CloseCode::Normal =>
ConnectionCommand::Close, ConnectionCommand::Close,
@ -294,11 +294,11 @@ async fn ws_loop(
mod test { mod test {
use crate::remote_ws::*; use crate::remote_ws::*;
use async_std::task;
use ng_net::types::IP; use ng_net::types::IP;
use ng_net::utils::{spawn_and_log_error, ResultSend}; use ng_net::utils::{spawn_and_log_error, ResultSend};
use ng_net::{broker::*, WS_PORT}; use ng_net::{broker::*, WS_PORT};
use ng_repo::errors::{NetError, NgError}; use ng_repo::errors::NgError;
#[allow(unused_imports)]
use ng_repo::log::*; use ng_repo::log::*;
use ng_repo::utils::generate_keypair; use ng_repo::utils::generate_keypair;
use std::net::IpAddr; use std::net::IpAddr;
@ -307,14 +307,14 @@ mod test {
#[async_std::test] #[async_std::test]
pub async fn test_ws() -> Result<(), NgError> { pub async fn test_ws() -> Result<(), NgError> {
let server_key: PubKey = "X0nh-gOTGKSx0yL0LYJviOWRNacyqIzjQW_LKdK6opU".try_into()?; let server_key: PubKey = "ALyGZgFaDDALXLppJZLS2TrMScG0TQIS68RzRcPv99aN".try_into()?;
log_debug!("server_key:{}", server_key); log_debug!("server_key:{}", server_key);
let keys = generate_keypair(); let keys = generate_keypair();
let x_from_ed = keys.1.to_dh_from_ed(); let x_from_ed = keys.1.to_dh_from_ed();
log_debug!("Pub from X {}", x_from_ed); log_debug!("Pub from X {}", x_from_ed);
let (client_priv, client) = generate_keypair(); let (client_priv, _client) = generate_keypair();
let (user_priv, user) = generate_keypair(); let (user_priv, user) = generate_keypair();
log_debug!("start connecting"); log_debug!("start connecting");
@ -338,7 +338,12 @@ mod test {
) )
.await; .await;
log_debug!("broker.connect : {:?}", res); log_debug!("broker.connect : {:?}", res);
res.expect("assume the connection succeeds"); assert!(res.is_err());
let err = res.unwrap_err();
assert!(
ProtocolError::NoLocalBrokerFound == err
|| ProtocolError::NoiseHandshakeFailed == err
);
} }
BROKER.read().await.print_status(); BROKER.read().await.print_status();
@ -360,7 +365,7 @@ mod test {
//Broker::graceful_shutdown().await; //Broker::graceful_shutdown().await;
Broker::join_shutdown_with_timeout(std::time::Duration::from_secs(5)).await; let _ = Broker::join_shutdown_with_timeout(std::time::Duration::from_secs(5)).await;
Ok(()) Ok(())
} }
@ -383,7 +388,7 @@ mod test {
//Broker::graceful_shutdown().await; //Broker::graceful_shutdown().await;
Broker::join_shutdown_with_timeout(std::time::Duration::from_secs(10)).await; let _ = Broker::join_shutdown_with_timeout(std::time::Duration::from_secs(10)).await;
Ok(()) Ok(())
} }
} }

@ -45,3 +45,6 @@ features = ["js"]
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dependencies]
getrandom = "0.2.7" getrandom = "0.2.7"
default-net = { git = "https://git.nextgraph.org/NextGraph/default-net.git" } default-net = { git = "https://git.nextgraph.org/NextGraph/default-net.git" }
[target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies]
async-tungstenite = { git = "https://git.nextgraph.org/NextGraph/async-tungstenite.git", branch = "nextgraph", features = ["async-std-runtime", "async-native-tls"] }

@ -66,7 +66,7 @@ impl EActor for Actor<'_, Probe, ProbeResponse> {
_fsm: Arc<Mutex<NoiseFSM>>, _fsm: Arc<Mutex<NoiseFSM>>,
) -> Result<(), ProtocolError> { ) -> Result<(), ProtocolError> {
let _req = Probe::try_from(msg)?; let _req = Probe::try_from(msg)?;
//let res = ProbeResponse() //let res = ProbeResponse();
//fsm.lock().await.send(res.into()).await?; //fsm.lock().await.send(res.into()).await?;
Ok(()) Ok(())
} }

@ -194,7 +194,7 @@ impl Broker {
Ok(Arc::clone( Ok(Arc::clone(
self.local_broker self.local_broker
.as_ref() .as_ref()
.ok_or(ProtocolError::BrokerError)?, .ok_or(ProtocolError::NoLocalBrokerFound)?,
)) ))
} }
@ -405,14 +405,14 @@ impl Broker {
} }
} }
fn take_shutdown(&mut self) -> Receiver<ProtocolError> { fn take_shutdown(&mut self) -> Result<Receiver<ProtocolError>, ProtocolError> {
self.shutdown.take().unwrap() self.shutdown.take().ok_or(ProtocolError::BrokerError)
} }
pub async fn join_shutdown() -> Result<(), ProtocolError> { pub async fn join_shutdown() -> Result<(), ProtocolError> {
let mut shutdown_join: Receiver<ProtocolError>; let mut shutdown_join: Receiver<ProtocolError>;
{ {
shutdown_join = BROKER.write().await.take_shutdown(); shutdown_join = BROKER.write().await.take_shutdown()?;
} }
match shutdown_join.next().await { match shutdown_join.next().await {
Some(ProtocolError::Closing) => Ok(()), Some(ProtocolError::Closing) => Ok(()),

@ -557,7 +557,7 @@ impl NoiseFSM {
// CLIENT side receiving probe response // CLIENT side receiving probe response
if let Some(msg) = msg_opt { if let Some(msg) = msg_opt {
let id = msg.id(); let id = msg.id();
if id != Some(0) { if id.is_some() {
return Err(ProtocolError::InvalidState); return Err(ProtocolError::InvalidState);
} }
if let ProtocolMessage::ProbeResponse(_probe_res) = &msg { if let ProtocolMessage::ProbeResponse(_probe_res) = &msg {

@ -53,7 +53,7 @@ pub struct DagNode {
pub future: HashSet<ObjectId>, pub future: HashSet<ObjectId>,
} }
//struct Dag<'a>(&'a HashMap<Digest, DagNode>); struct Dag<'a>(&'a HashMap<Digest, DagNode>);
impl fmt::Display for DagNode { impl fmt::Display for DagNode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@ -64,14 +64,14 @@ impl fmt::Display for DagNode {
} }
} }
// impl<'a> fmt::Display for Dag<'a> { impl<'a> fmt::Display for Dag<'a> {
// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// for node in self.0.iter() { for node in self.0.iter() {
// writeln!(f, "ID: {} FUTURES: {}", node.0, node.1)?; writeln!(f, "ID: {} FUTURES: {}", node.0, node.1)?;
// } }
// Ok(()) Ok(())
// } }
// } }
impl DagNode { impl DagNode {
fn new() -> Self { fn new() -> Self {
@ -219,8 +219,9 @@ impl Branch {
/// Branch sync request from another peer /// Branch sync request from another peer
/// ///
/// `target_heads` represents the list of heads the requester would like to reach. this list should not be empty. /// `target_heads` represents the list of heads the requester would like to reach. this list cannot be empty.
/// if the requester doesn't know what to reach, the responder should fill this list with their own current local head. /// if the requester doesn't know what to reach, the responder should fill this list with their own current local head.
/// this is not done here. it should be done before, in the handling of incoming requests.
/// `known_heads` represents the list of current heads at the requester replica at the moment of request. /// `known_heads` represents the list of current heads at the requester replica at the moment of request.
/// an empty list means the requester has an empty branch locally /// an empty list means the requester has an empty branch locally
/// ///
@ -251,6 +252,8 @@ impl Branch {
// we silently discard any load error on the known_heads as the responder might not know them (yet). // we silently discard any load error on the known_heads as the responder might not know them (yet).
} }
//log_debug!("their causal past \n{}", Dag(&theirs));
let mut visited = HashMap::new(); let mut visited = HashMap::new();
let theirs: HashSet<ObjectId> = theirs.keys().into_iter().cloned().collect(); let theirs: HashSet<ObjectId> = theirs.keys().into_iter().cloned().collect();
@ -277,6 +280,8 @@ impl Branch {
// we silently discard any load error on the target_heads as they can be wrong if the requester is confused about what the responder has locally. // we silently discard any load error on the target_heads as they can be wrong if the requester is confused about what the responder has locally.
} }
//log_debug!("what we have here \n{}", Dag(&visited));
// now ordering to respect causal partial order. // now ordering to respect causal partial order.
let mut next_generations = HashSet::new(); let mut next_generations = HashSet::new();
for (_, node) in visited.iter() { for (_, node) in visited.iter() {
@ -301,6 +306,7 @@ impl Branch {
} }
} }
#[allow(unused_imports)]
#[cfg(test)] #[cfg(test)]
mod test { mod test {
@ -331,7 +337,6 @@ mod test {
branch: BranchId, branch: BranchId,
author_privkey: PrivKey, author_privkey: PrivKey,
author_pubkey: PubKey, author_pubkey: PubKey,
seq: u64,
deps: Vec<ObjectRef>, deps: Vec<ObjectRef>,
acks: Vec<ObjectRef>, acks: Vec<ObjectRef>,
body_ref: ObjectRef, body_ref: ObjectRef,
@ -381,8 +386,8 @@ mod test {
) )
} }
fn add_body_trans(header: Option<CommitHeader>, store: &Store) -> ObjectRef { fn add_body_trans(header: Option<CommitHeader>, content: u8, store: &Store) -> ObjectRef {
let content = [7u8; 777].to_vec(); let content = [content; 777].to_vec();
let body = CommitBodyV0::AsyncTransaction(Transaction::V0(content)); let body = CommitBodyV0::AsyncTransaction(Transaction::V0(content));
//log_debug!("body: {:?}", body); //log_debug!("body: {:?}", body);
add_obj( add_obj(
@ -399,7 +404,7 @@ mod test {
// branch // branch
let (branch_privkey, branch_pubkey) = generate_keypair(); let (_, branch_pubkey) = generate_keypair();
let (member_privkey, member_pubkey) = generate_keypair(); let (member_privkey, member_pubkey) = generate_keypair();
@ -409,7 +414,6 @@ mod test {
&repo_pubkey, &repo_pubkey,
&member_pubkey, &member_pubkey,
&[PermissionV0::WriteAsync], &[PermissionV0::WriteAsync],
store.get_store_repo().overlay_id_for_read_purpose(),
store, store,
); );
@ -435,10 +439,10 @@ mod test {
log_debug!(" br"); log_debug!(" br");
log_debug!(" / \\"); log_debug!(" / \\");
log_debug!(" t1 t2"); log_debug!(" t1 t2");
log_debug!(" / \\ / \\"); log_debug!(" \\ /");
log_debug!(" a3 t4<--t5-->(t1)"); log_debug!(" t4");
log_debug!(" / \\"); log_debug!(" |");
log_debug!(" a6 a7"); log_debug!(" t5");
log_debug!(""); log_debug!("");
} }
@ -448,110 +452,68 @@ mod test {
let branch_body = add_body_branch(branch.clone(), &repo.store); let branch_body = add_body_branch(branch.clone(), &repo.store);
let trans_body = add_body_trans(None, &repo.store); let trans_body = add_body_trans(None, 8, &repo.store);
let trans_body2 = add_body_trans(None, 9, &repo.store);
// create & add commits to store // create & add commits to store
log_debug!(">> br");
let br = add_commit( let br = add_commit(
branch_pubkey, branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
0,
vec![], vec![],
vec![], vec![],
branch_body.clone(), branch_body.clone(),
&repo.store, &repo.store,
); );
log_debug!(">> br {}", br.id);
log_debug!(">> t1");
let t1 = add_commit( let t1 = add_commit(
branch_pubkey, branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
1,
vec![br.clone()],
vec![], vec![],
vec![br.clone()],
trans_body.clone(), trans_body.clone(),
&repo.store, &repo.store,
); );
log_debug!(">> t1 {}", t1.id);
log_debug!(">> t2");
let t2 = add_commit( let t2 = add_commit(
branch_pubkey, branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
2,
vec![br.clone()],
vec![], vec![],
trans_body.clone(), vec![br.clone()],
trans_body2.clone(),
&repo.store, &repo.store,
); );
log_debug!(">> t2 {}", t2.id);
// log_debug!(">> a3");
// let a3 = add_commit(
// branch_pubkey,
// member_privkey.clone(),
// member_pubkey,
// 3,
// vec![t1.clone()],
// vec![],
// ack_body.clone(),
// repo_pubkey,
// repo_secret.clone(),
// &mut store,
// );
log_debug!(">> t4");
let t4 = add_commit( let t4 = add_commit(
branch_pubkey, branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
4, vec![],
vec![t2.clone()],
vec![t1.clone()],
trans_body.clone(),
&repo.store,
);
log_debug!(">> t5");
let t5 = add_commit(
branch_pubkey,
member_privkey.clone(),
member_pubkey,
5,
vec![t1.clone(), t2.clone()], vec![t1.clone(), t2.clone()],
vec![t4.clone()],
trans_body.clone(), trans_body.clone(),
&repo.store, &repo.store,
); );
log_debug!(">> t4 {}", t4.id);
log_debug!(">> a6"); let t5 = add_commit(
let a6 = add_commit(
branch_pubkey, branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
6,
vec![t4.clone()],
vec![], vec![],
trans_body.clone(),
&repo.store,
);
log_debug!(">> a7");
let a7 = add_commit(
branch_pubkey,
member_privkey.clone(),
member_pubkey,
7,
vec![t4.clone()], vec![t4.clone()],
vec![],
trans_body.clone(), trans_body.clone(),
&repo.store, &repo.store,
); );
log_debug!(">> t5 {}", t5.id);
let c7 = Commit::load(a7.clone(), &repo.store, true).unwrap(); let c5 = Commit::load(t5.clone(), &repo.store, true).unwrap();
c7.verify(&repo).unwrap(); c5.verify(&repo).unwrap();
// let mut filter = Filter::new(FilterBuilder::new(10, 0.01)); // let mut filter = Filter::new(FilterBuilder::new(10, 0.01));
// for commit_ref in [br, t1, t2, t5.clone(), a6.clone()] { // for commit_ref in [br, t1, t2, t5.clone(), a6.clone()] {
@ -565,21 +527,9 @@ mod test {
// f: filter.get_u8_array().to_vec(), // f: filter.get_u8_array().to_vec(),
// }; // };
print_branch(); let ids = Branch::sync_req([t5.id].into_iter(), &[t1.id], &None, &repo.store).unwrap();
log_debug!(">> sync_req");
log_debug!(" our_heads: [a3, t5, a6, a7]");
log_debug!(" known_heads: [a3, t5]");
log_debug!(" their_commits: [br, t1, t2, a3, t5, a6]");
let ids = Branch::sync_req(
[t5.id, a6.id, a7.id].into_iter(),
&[t5.id],
&None,
&repo.store,
)
.unwrap();
assert_eq!(ids.len(), 1); assert_eq!(ids.len(), 3);
assert!(ids.contains(&a7.id)); assert_eq!(ids, [t2.id, t4.id, t5.id]);
} }
} }

@ -17,6 +17,8 @@ use ed25519_dalek::{PublicKey, Signature};
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use crate::errors::*; use crate::errors::*;
#[allow(unused_imports)]
use crate::log::*;
use crate::object::*; use crate::object::*;
use crate::repo::Repo; use crate::repo::Repo;
use crate::store::Store; use crate::store::Store;
@ -295,6 +297,13 @@ impl Commit {
} }
} }
#[cfg(test)]
fn empty_blocks(&mut self) {
match self {
Commit::V0(v0) => v0.blocks = vec![],
}
}
/// Load commit from store /// Load commit from store
pub fn load( pub fn load(
commit_ref: ObjectRef, commit_ref: ObjectRef,
@ -1321,7 +1330,10 @@ impl CommitHeaderV0 {
#[cfg(test)] #[cfg(test)]
pub fn new_with_deps_and_acks(deps: Vec<ObjectId>, acks: Vec<ObjectId>) -> Option<Self> { pub fn new_with_deps_and_acks(deps: Vec<ObjectId>, acks: Vec<ObjectId>) -> Option<Self> {
assert!(!deps.is_empty() || !acks.is_empty()); if deps.is_empty() && acks.is_empty() {
return None;
}
//assert!(!deps.is_empty() || !acks.is_empty());
let mut n = Self::new_empty(); let mut n = Self::new_empty();
n.deps = deps; n.deps = deps;
n.acks = acks; n.acks = acks;
@ -1491,6 +1503,7 @@ impl fmt::Display for CommitHeaderKeys {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use crate::commit::*; use crate::commit::*;
#[allow(unused_imports)]
use crate::log::*; use crate::log::*;
fn test_commit_header_ref_content_fits( fn test_commit_header_ref_content_fits(
@ -1546,7 +1559,9 @@ mod test {
log_debug!("{}", commit_object); log_debug!("{}", commit_object);
log_debug!("object size: {}", commit_object.size()); // log_debug!("blocks: {}", commit_object.blocks_len());
// log_debug!("header blocks: {}", commit_object.header_blocks_len());
// log_debug!("object size: {}", commit_object.size());
assert_eq!(commit_object.all_blocks_len(), expect_blocks_len); assert_eq!(commit_object.all_blocks_len(), expect_blocks_len);
@ -1561,15 +1576,16 @@ mod test {
let obj_refs2 = vec![obj_ref.clone(), obj_ref.clone()]; let obj_refs2 = vec![obj_ref.clone(), obj_ref.clone()];
let obj_refs = vec![obj_ref.clone()]; let obj_refs = vec![obj_ref.clone()];
// with 1 refs in header // with 1 refs in header
test_commit_header_ref_content_fits(obj_refs.clone(), 3733, 2); test_commit_header_ref_content_fits(obj_refs.clone(), 3592, 1); // block 4090
test_commit_header_ref_content_fits(obj_refs.clone(), 3734, 3); test_commit_header_ref_content_fits(obj_refs.clone(), 3593, 2); //block 4012 header 117 total: 4129
test_commit_header_ref_content_fits(obj_refs.clone(), 3584, 1); test_commit_header_ref_content_fits(obj_refs.clone(), 3741, 2); //block 4094 block 219 total: 4313
test_commit_header_ref_content_fits(obj_refs.clone(), 3585, 2); test_commit_header_ref_content_fits(obj_refs.clone(), 3742, 3); // block 4094 block 9 block 285
// with 2 refs in header // with 2 refs in header
test_commit_header_ref_content_fits(obj_refs2.clone(), 3352, 1); test_commit_header_ref_content_fits(obj_refs2.clone(), 3360, 1);
test_commit_header_ref_content_fits(obj_refs2.clone(), 3353, 2); test_commit_header_ref_content_fits(obj_refs2.clone(), 3361, 2);
test_commit_header_ref_content_fits(obj_refs2.clone(), 3601, 2); test_commit_header_ref_content_fits(obj_refs2.clone(), 3609, 2);
test_commit_header_ref_content_fits(obj_refs2.clone(), 3602, 3); test_commit_header_ref_content_fits(obj_refs2.clone(), 3610, 3);
} }
#[test] #[test]
@ -1613,7 +1629,7 @@ mod test {
let store = Store::dummy_public_v0(); let store = Store::dummy_public_v0();
let commit = Commit::new_with_body_and_save( let mut commit = Commit::new_with_body_and_save(
&priv_key, &priv_key,
&pub_key, &pub_key,
branch, branch,
@ -1633,6 +1649,8 @@ mod test {
log_debug!("{}", commit); log_debug!("{}", commit);
commit.empty_blocks();
let commit2 = Commit::load(commit.reference().unwrap(), &store, true) let commit2 = Commit::load(commit.reference().unwrap(), &store, true)
.expect("load commit with body after save"); .expect("load commit with body after save");
@ -1652,12 +1670,12 @@ mod test {
let files = obj_refs.clone(); let files = obj_refs.clone();
let metadata = vec![1, 2, 3]; let metadata = vec![1, 2, 3];
let body_ref = obj_ref.clone(); let body_ref = obj_ref.clone();
let overlay = OverlayId::dummy(); let store = Store::dummy_public_v0();
let commit = Commit::new( let commit = Commit::new(
&priv_key, &priv_key,
&pub_key, &pub_key,
overlay, store.overlay_id,
branch, branch,
QuorumType::NoSigning, QuorumType::NoSigning,
deps, deps,
@ -1672,8 +1690,7 @@ mod test {
.unwrap(); .unwrap();
log_debug!("{}", commit); log_debug!("{}", commit);
let store = Store::dummy_public_v0(); let repo = Repo::new_with_member(&pub_key, &pub_key, &[PermissionV0::Create], store);
let repo = Repo::new_with_perms(&[PermissionV0::Create], store);
// match commit.load_body(repo.store.unwrap()) { // match commit.load_body(repo.store.unwrap()) {
// Ok(_b) => panic!("Body should not exist"), // Ok(_b) => panic!("Body should not exist"),
@ -1712,7 +1729,6 @@ mod test {
#[test] #[test]
pub fn test_load_commit_with_body_verify_perms() { pub fn test_load_commit_with_body_verify_perms() {
let (priv_key, pub_key) = generate_keypair(); let (priv_key, pub_key) = generate_keypair();
let obj_ref = ObjectRef::dummy();
let branch = pub_key; let branch = pub_key;
@ -1744,7 +1760,7 @@ mod test {
log_debug!("{}", commit); log_debug!("{}", commit);
let repo = Repo::new_with_perms(&[PermissionV0::Create], store); let repo = Repo::new_with_member(&pub_key, &pub_key, &[PermissionV0::Create], store);
commit.load_body(&repo.store).expect("load body"); commit.load_body(&repo.store).expect("load body");

@ -389,6 +389,7 @@ pub enum ProtocolError {
AccessDenied, AccessDenied,
InvitationRequired, InvitationRequired,
BrokerError, BrokerError,
NoLocalBrokerFound,
NotFound, NotFound,
MissingBlocks, MissingBlocks,
ObjectParseError, ObjectParseError,

@ -826,13 +826,13 @@ mod test {
assert_eq!(read_content, content); assert_eq!(read_content, content);
let read_content2 = file.read(0, data_size + 1); let read_content2 = file.read(0, data_size + 1);
assert_eq!(read_content2, Err(FileError::EndOfFile)); assert_eq!(read_content2.unwrap().len(), 1048564);
let read_content = file.read(data_size - 9, 9).expect("reading end"); let read_content = file.read(data_size - 9, 9).expect("reading end");
assert_eq!(read_content, vec![99, 99, 99, 99, 99, 99, 99, 99, 99]); assert_eq!(read_content, vec![99, 99, 99, 99, 99, 99, 99, 99, 99]);
let read_content = file.read(data_size - 9, 10); let read_content = file.read(data_size - 9, 10);
assert_eq!(read_content, Err(FileError::EndOfFile)); assert_eq!(read_content, Ok(vec![99, 99, 99, 99, 99, 99, 99, 99, 99]));
// log_debug!( // log_debug!(
// "overhead: {} - {}%", // "overhead: {} - {}%",
@ -864,6 +864,7 @@ mod test {
} }
/// Checks that a content that doesn't fit in all the children of first level in tree /// Checks that a content that doesn't fit in all the children of first level in tree
#[ignore]
#[test] #[test]
pub fn test_depth_1() { pub fn test_depth_1() {
const MAX_ARITY_LEAVES: usize = 15887; const MAX_ARITY_LEAVES: usize = 15887;
@ -898,6 +899,7 @@ mod test {
} }
/// Checks that a content that doesn't fit in all the children of first level in tree /// Checks that a content that doesn't fit in all the children of first level in tree
#[ignore]
#[test] #[test]
pub fn test_depth_2() { pub fn test_depth_2() {
const MAX_ARITY_LEAVES: usize = 15887; const MAX_ARITY_LEAVES: usize = 15887;
@ -990,6 +992,7 @@ mod test {
} }
/// Checks that a content that doesn't fit in all the children of first level in tree /// Checks that a content that doesn't fit in all the children of first level in tree
#[ignore]
#[test] #[test]
pub fn test_depth_4() { pub fn test_depth_4() {
const MAX_ARITY_LEAVES: usize = 61; const MAX_ARITY_LEAVES: usize = 61;
@ -1066,13 +1069,13 @@ mod test {
img_buffer img_buffer
); );
// reading too far, well behind the size of the JPG // // reading too far, well behind the size of the JPG
assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); // assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(10000, 1).expect("read before save"), vec![41]); assert_eq!(file.read(10000, 1).expect("read before save"), vec![41]);
// reading one byte after the end of the file size. // // reading one byte after the end of the file size.
assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument));
@ -1081,19 +1084,19 @@ mod test {
let res = file.read(0, img_buffer.len()).expect("read all"); let res = file.read(0, img_buffer.len()).expect("read all");
assert_eq!(res, img_buffer); assert_eq!(res, img_buffer);
// asking too much, receiving an error, as now we know the total size of file, and we check it // // asking too much, receiving an error, as now we know the total size of file, and we check it
assert_eq!( // assert_eq!(
file.read(0, img_buffer.len() + 1), // file.read(0, img_buffer.len() + 1),
Err(FileError::EndOfFile) // Err(FileError::EndOfFile)
); // );
// reading too far, well behind the size of the JPG // reading too far, well behind the size of the JPG
assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]); assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]);
// reading one byte after the end of the file size. // // reading one byte after the end of the file size.
assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument));
} }
@ -1153,19 +1156,19 @@ mod test {
assert_eq!(res, img_buffer); assert_eq!(res, img_buffer);
// asking too much, receiving an error, as now we know the total size of file, and we check it // // asking too much, receiving an error, as now we know the total size of file, and we check it
assert_eq!( // assert_eq!(
file.read(0, img_buffer.len() + 1), // file.read(0, img_buffer.len() + 1),
Err(FileError::EndOfFile) // Err(FileError::EndOfFile)
); // );
// reading too far, well behind the size of the JPG // reading too far, well behind the size of the JPG
assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]); assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]);
// reading one byte after the end of the file size. // // reading one byte after the end of the file size.
assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument));
} }
@ -1217,8 +1220,8 @@ mod test {
assert_eq!(file.read(10000, 1).expect("read before save"), vec![41]); assert_eq!(file.read(10000, 1).expect("read before save"), vec![41]);
// reading one byte after the end of the file size. // // reading one byte after the end of the file size.
assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument));
@ -1231,19 +1234,19 @@ mod test {
let res = file.read(0, img_buffer.len()).expect("read all"); let res = file.read(0, img_buffer.len()).expect("read all");
assert_eq!(res, first_block_content); assert_eq!(res, first_block_content);
// asking too much, receiving an error, as now we know the total size of file, and we check it // // asking too much, not receiving an error, as we know the total size of file, and return what we can
assert_eq!( // assert_eq!(
file.read(0, img_buffer.len() + 1), // file.read(0, img_buffer.len() + 1),
Err(FileError::EndOfFile) // Err(FileError::EndOfFile)
); // );
// reading too far, well behind the size of the JPG // reading too far, well behind the size of the JPG
assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]); assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]);
// reading one byte after the end of the file size. // // reading one byte after the end of the file size.
assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument));
} }
@ -1291,8 +1294,8 @@ mod test {
assert_eq!(file.read(10000, 1).expect("read before save"), vec![41]); assert_eq!(file.read(10000, 1).expect("read before save"), vec![41]);
// reading one byte after the end of the file size. // // reading one byte after the end of the file size.
assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument));
@ -1304,24 +1307,25 @@ mod test {
let res = file.read(10, img_buffer.len() - 10).expect("read all"); let res = file.read(10, img_buffer.len() - 10).expect("read all");
assert_eq!(res, first_block_content[10..].to_vec()); assert_eq!(res, first_block_content[10..].to_vec());
// asking too much, receiving an error, as now we know the total size of file, and we check it // // asking too much, receiving an error, as now we know the total size of file, and we check it
assert_eq!( // assert_eq!(
file.read(0, img_buffer.len() + 1), // file.read(0, img_buffer.len() + 1),
Err(FileError::EndOfFile) // Err(FileError::EndOfFile)
); // );
// reading too far, well behind the size of the JPG // reading too far, well behind the size of the JPG
assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]); assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]);
// reading one byte after the end of the file size. // // reading one byte after the end of the file size.
assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile));
assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument));
} }
/// Test depth 4 with 52GB of data, but using write in small increments, so the memory burden on the system will be minimal /// Test depth 4 with 52GB of data, but using write in small increments, so the memory burden on the system will be minimal
#[ignore]
#[test] #[test]
pub fn test_depth_4_write_small() { pub fn test_depth_4_write_small() {
const MAX_ARITY_LEAVES: usize = 61; const MAX_ARITY_LEAVES: usize = 61;
@ -1415,19 +1419,19 @@ mod test {
assert_eq!(res, img_buffer); assert_eq!(res, img_buffer);
// asking too much, receiving an error, as now we know the total size of file, and we check it // // asking too much, receiving an error, as now we know the total size of file, and we check it
assert_eq!( // assert_eq!(
file2.read(0, img_buffer.len() + 1), // file2.read(0, img_buffer.len() + 1),
Err(FileError::EndOfFile) // Err(FileError::EndOfFile)
); // );
// reading too far, well behind the size of the JPG // reading too far, well behind the size of the JPG
assert_eq!(file2.read(100000, 1), Err(FileError::EndOfFile)); assert_eq!(file2.read(100000, 1), Err(FileError::EndOfFile));
assert_eq!(file2.read(10000, 1).expect("read after save"), vec![41]); assert_eq!(file2.read(10000, 1).expect("read after save"), vec![41]);
// reading one byte after the end of the file size. // // reading one byte after the end of the file size.
assert_eq!(file2.read(29454, 1), Err(FileError::EndOfFile)); // assert_eq!(file2.read(29454, 1), Err(FileError::EndOfFile));
assert_eq!(file2.read(29454, 0), Err(FileError::InvalidArgument)); assert_eq!(file2.read(29454, 0), Err(FileError::InvalidArgument));
} }
@ -1504,6 +1508,7 @@ mod test {
} }
/// Test depth 4, but using write in increments, so the memory burden on the system will be minimal /// Test depth 4, but using write in increments, so the memory burden on the system will be minimal
#[ignore]
#[test] #[test]
pub fn test_depth_4_big_write_small() { pub fn test_depth_4_big_write_small() {
let encoding_big_file = Instant::now(); let encoding_big_file = Instant::now();
@ -1553,6 +1558,7 @@ mod test {
} }
/// Test depth 4 with 2.7GB of data, but using write in increments, so the memory burden on the system will be minimal /// Test depth 4 with 2.7GB of data, but using write in increments, so the memory burden on the system will be minimal
#[ignore]
#[test] #[test]
pub fn test_depth_4_big_write_big() { pub fn test_depth_4_big_write_big() {
let encoding_big_file = Instant::now(); let encoding_big_file = Instant::now();

@ -376,7 +376,12 @@ impl Object {
&mut already_existing, &mut already_existing,
); );
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
log_debug!("make_block {} of {} - {}%", i, _total + 1, i * 100 / _total); log_debug!(
"make_block {} of {} - {}%",
i + 1,
_total + 1,
i * 100 / _total
);
i = i + 1; i = i + 1;
} }
@ -676,16 +681,24 @@ impl Object {
self.blocks.len() + self.header_blocks.len() self.blocks.len() + self.header_blocks.len()
} }
pub fn blocks_len(&self) -> usize {
self.blocks.len()
}
pub fn header_blocks_len(&self) -> usize {
self.header_blocks.len()
}
pub fn size(&self) -> usize { pub fn size(&self) -> usize {
let mut total = 0; let mut total = 0;
self.blocks().for_each(|b| { self.blocks().for_each(|b| {
let s = b.size(); let s = b.size();
//log_debug!("@@@@ {}", s); //log_debug!("@@@@ block {}", s);
total += s; total += s;
}); });
self.header_blocks.iter().for_each(|b| { self.header_blocks.iter().for_each(|b| {
let s = b.size(); let s = b.size();
//log_debug!("@@@@ {}", s); //log_debug!("@@@@ header {}", s);
total += s; total += s;
}); });
total total
@ -1223,6 +1236,7 @@ mod test {
} }
/// Checks that a content that doesn't fit in all the children of first level in tree /// Checks that a content that doesn't fit in all the children of first level in tree
#[ignore]
#[test] #[test]
pub fn test_depth_1() { pub fn test_depth_1() {
const MAX_ARITY_LEAVES: usize = 15887; const MAX_ARITY_LEAVES: usize = 15887;
@ -1263,6 +1277,7 @@ mod test {
} }
/// Checks that a content that doesn't fit in all the children of first level in tree /// Checks that a content that doesn't fit in all the children of first level in tree
#[ignore]
#[test] #[test]
pub fn test_depth_2() { pub fn test_depth_2() {
const MAX_ARITY_LEAVES: usize = 15887; const MAX_ARITY_LEAVES: usize = 15887;
@ -1300,6 +1315,7 @@ mod test {
} }
/// Checks that a content that doesn't fit in all the children of first level in tree /// Checks that a content that doesn't fit in all the children of first level in tree
#[ignore]
#[test] #[test]
pub fn test_depth_3() { pub fn test_depth_3() {
const MAX_ARITY_LEAVES: usize = 61; const MAX_ARITY_LEAVES: usize = 61;
@ -1348,6 +1364,7 @@ mod test {
} }
/// Checks that a content that doesn't fit in all the children of first level in tree /// Checks that a content that doesn't fit in all the children of first level in tree
#[ignore]
#[test] #[test]
pub fn test_depth_4() { pub fn test_depth_4() {
const MAX_ARITY_LEAVES: usize = 61; const MAX_ARITY_LEAVES: usize = 61;

@ -158,7 +158,7 @@ impl Repo {
#[allow(deprecated)] #[allow(deprecated)]
pub fn new_with_perms(perms: &[PermissionV0], store: Arc<Store>) -> Self { pub fn new_with_perms(perms: &[PermissionV0], store: Arc<Store>) -> Self {
let pub_key = PubKey::nil(); let pub_key = PubKey::nil();
Self::new_with_member(&pub_key, &pub_key, perms, OverlayId::dummy(), store) Self::new_with_member(&pub_key, &pub_key, perms, store)
} }
pub fn update_branch_current_heads( pub fn update_branch_current_heads(
@ -184,10 +184,9 @@ impl Repo {
} }
pub fn new_with_member( pub fn new_with_member(
id: &PubKey, repo_id: &PubKey,
member: &UserId, member: &UserId,
perms: &[PermissionV0], perms: &[PermissionV0],
overlay: OverlayId,
store: Arc<Store>, store: Arc<Store>,
) -> Self { ) -> Self {
let mut members = HashMap::new(); let mut members = HashMap::new();
@ -199,16 +198,19 @@ impl Repo {
.iter() .iter()
.cloned(), .cloned(),
); );
let overlay = store.get_store_repo().overlay_id_for_read_purpose();
let member_hash = CommitContent::author_digest(member, overlay);
//log_debug!("added member {:?} {:?}", member, member_hash);
members.insert( members.insert(
CommitContent::author_digest(member, overlay), member_hash,
UserInfo { UserInfo {
id: *member, id: *member,
permissions, permissions,
}, },
); );
Self { Self {
id: id.clone(), id: repo_id.clone(),
repo_def: Repository::new(&id), repo_def: Repository::new(&repo_id),
members, members,
store, store,
signer: None, signer: None,

@ -9,6 +9,8 @@
* according to those terms. * according to those terms.
*/ */
#![cfg(target_arch = "wasm32")]
use std::collections::HashMap; use std::collections::HashMap;
use std::net::IpAddr; use std::net::IpAddr;
use std::str::FromStr; use std::str::FromStr;
@ -18,11 +20,9 @@ use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[allow(unused_imports)] #[allow(unused_imports)]
use serde_json::json; use serde_json::json;
// #[cfg(target_arch = "wasm32")]
// use js_sys::Reflect; // use js_sys::Reflect;
use async_std::stream::StreamExt; use async_std::stream::StreamExt;
use wasm_bindgen::prelude::*; use wasm_bindgen::prelude::*;
#[cfg(target_arch = "wasm32")]
use wasm_bindgen_futures::JsFuture; use wasm_bindgen_futures::JsFuture;
use ng_repo::errors::NgError; use ng_repo::errors::NgError;
@ -32,11 +32,9 @@ use ng_repo::types::*;
use ng_net::broker::*; use ng_net::broker::*;
use ng_net::types::{ClientInfo, ClientInfoV0, ClientType, CreateAccountBSP, IP}; use ng_net::types::{ClientInfo, ClientInfoV0, ClientType, CreateAccountBSP, IP};
use ng_net::utils::{decode_invitation_string, spawn_and_log_error, Receiver, ResultSend}; use ng_net::utils::{decode_invitation_string, spawn_and_log_error, Receiver, ResultSend};
#[cfg(target_arch = "wasm32")]
use ng_net::utils::{retrieve_local_bootstrap, retrieve_local_url}; use ng_net::utils::{retrieve_local_bootstrap, retrieve_local_url};
use ng_net::WS_PORT; use ng_net::WS_PORT;
#[cfg(target_arch = "wasm32")]
use ng_client_ws::remote_ws_wasm::ConnectionWebSocket; use ng_client_ws::remote_ws_wasm::ConnectionWebSocket;
use ng_wallet::types::*; use ng_wallet::types::*;
@ -45,7 +43,6 @@ use ng_wallet::*;
use nextgraph::local_broker::*; use nextgraph::local_broker::*;
use nextgraph::verifier::types::*; use nextgraph::verifier::types::*;
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn get_local_bootstrap(location: String, invite: JsValue) -> JsValue { pub async fn get_local_bootstrap(location: String, invite: JsValue) -> JsValue {
let res = retrieve_local_bootstrap(location, invite.as_string(), false).await; let res = retrieve_local_bootstrap(location, invite.as_string(), false).await;
@ -56,7 +53,6 @@ pub async fn get_local_bootstrap(location: String, invite: JsValue) -> JsValue {
} }
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn get_local_bootstrap_with_public(location: String, invite: JsValue) -> JsValue { pub async fn get_local_bootstrap_with_public(location: String, invite: JsValue) -> JsValue {
let res = retrieve_local_bootstrap(location, invite.as_string(), true).await; let res = retrieve_local_bootstrap(location, invite.as_string(), true).await;
@ -67,7 +63,6 @@ pub async fn get_local_bootstrap_with_public(location: String, invite: JsValue)
} }
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn decode_invitation(invite: String) -> JsValue { pub async fn decode_invitation(invite: String) -> JsValue {
let res = decode_invitation_string(invite); let res = decode_invitation_string(invite);
@ -78,7 +73,6 @@ pub async fn decode_invitation(invite: String) -> JsValue {
} }
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn get_local_url(location: String) -> JsValue { pub async fn get_local_url(location: String) -> JsValue {
let res = retrieve_local_url(location).await; let res = retrieve_local_url(location).await;
@ -89,7 +83,6 @@ pub async fn get_local_url(location: String) -> JsValue {
} }
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn get_ngone_url_of_invitation(invitation_string: String) -> JsValue { pub async fn get_ngone_url_of_invitation(invitation_string: String) -> JsValue {
let res = decode_invitation_string(invitation_string); let res = decode_invitation_string(invitation_string);
@ -100,20 +93,17 @@ pub async fn get_ngone_url_of_invitation(invitation_string: String) -> JsValue {
} }
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub fn wallet_gen_shuffle_for_pazzle_opening(pazzle_length: u8) -> JsValue { pub fn wallet_gen_shuffle_for_pazzle_opening(pazzle_length: u8) -> JsValue {
let res = gen_shuffle_for_pazzle_opening(pazzle_length); let res = gen_shuffle_for_pazzle_opening(pazzle_length);
serde_wasm_bindgen::to_value(&res).unwrap() serde_wasm_bindgen::to_value(&res).unwrap()
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub fn wallet_gen_shuffle_for_pin() -> Vec<u8> { pub fn wallet_gen_shuffle_for_pin() -> Vec<u8> {
gen_shuffle_for_pin() gen_shuffle_for_pin()
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub fn wallet_open_with_pazzle( pub fn wallet_open_with_pazzle(
js_wallet: JsValue, js_wallet: JsValue,
@ -133,7 +123,6 @@ pub fn wallet_open_with_pazzle(
} }
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub fn wallet_update(js_wallet_id: JsValue, js_operations: JsValue) -> Result<JsValue, JsValue> { pub fn wallet_update(js_wallet_id: JsValue, js_operations: JsValue) -> Result<JsValue, JsValue> {
let _wallet = serde_wasm_bindgen::from_value::<WalletId>(js_wallet_id) let _wallet = serde_wasm_bindgen::from_value::<WalletId>(js_wallet_id)
@ -147,7 +136,6 @@ pub fn wallet_update(js_wallet_id: JsValue, js_operations: JsValue) -> Result<Js
// } // }
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn get_wallets() -> Result<JsValue, JsValue> { pub async fn get_wallets() -> Result<JsValue, JsValue> {
init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await;
@ -161,7 +149,6 @@ pub async fn get_wallets() -> Result<JsValue, JsValue> {
Ok(JsValue::UNDEFINED) Ok(JsValue::UNDEFINED)
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn session_start(wallet_name: String, user_js: JsValue) -> Result<JsValue, String> { pub async fn session_start(wallet_name: String, user_js: JsValue) -> Result<JsValue, String> {
let user_id = serde_wasm_bindgen::from_value::<PubKey>(user_js) let user_id = serde_wasm_bindgen::from_value::<PubKey>(user_js)
@ -175,7 +162,6 @@ pub async fn session_start(wallet_name: String, user_js: JsValue) -> Result<JsVa
Ok(serde_wasm_bindgen::to_value(&res).unwrap()) Ok(serde_wasm_bindgen::to_value(&res).unwrap())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn session_start_remote( pub async fn session_start_remote(
wallet_name: String, wallet_name: String,
@ -196,7 +182,6 @@ pub async fn session_start_remote(
Ok(serde_wasm_bindgen::to_value(&res).unwrap()) Ok(serde_wasm_bindgen::to_value(&res).unwrap())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn wallets_reload() -> Result<(), String> { pub async fn wallets_reload() -> Result<(), String> {
init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await;
@ -205,7 +190,6 @@ pub async fn wallets_reload() -> Result<(), String> {
.map_err(|e: NgError| e.to_string()) .map_err(|e: NgError| e.to_string())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn add_in_memory_wallet(lws_js: JsValue) -> Result<(), String> { pub async fn add_in_memory_wallet(lws_js: JsValue) -> Result<(), String> {
let lws = serde_wasm_bindgen::from_value::<LocalWalletStorageV0>(lws_js) let lws = serde_wasm_bindgen::from_value::<LocalWalletStorageV0>(lws_js)
@ -242,12 +226,10 @@ extern "C" {
fn storage_clear(); fn storage_clear();
} }
#[cfg(target_arch = "wasm32")]
fn local_read(key: String) -> Result<String, NgError> { fn local_read(key: String) -> Result<String, NgError> {
local_get(key).ok_or(NgError::JsStorageReadError) local_get(key).ok_or(NgError::JsStorageReadError)
} }
#[cfg(target_arch = "wasm32")]
fn local_write(key: String, value: String) -> Result<(), NgError> { fn local_write(key: String, value: String) -> Result<(), NgError> {
match local_save(key, value) { match local_save(key, value) {
Some(err) => Err(NgError::JsStorageWriteError(err)), Some(err) => Err(NgError::JsStorageWriteError(err)),
@ -255,12 +237,10 @@ fn local_write(key: String, value: String) -> Result<(), NgError> {
} }
} }
#[cfg(target_arch = "wasm32")]
fn session_read(key: String) -> Result<String, NgError> { fn session_read(key: String) -> Result<String, NgError> {
session_get(key).ok_or(NgError::JsStorageReadError) session_get(key).ok_or(NgError::JsStorageReadError)
} }
#[cfg(target_arch = "wasm32")]
fn session_write(key: String, value: String) -> Result<(), NgError> { fn session_write(key: String, value: String) -> Result<(), NgError> {
match session_save(key, value) { match session_save(key, value) {
Some(err) => Err(NgError::JsStorageWriteError(err)), Some(err) => Err(NgError::JsStorageWriteError(err)),
@ -268,18 +248,15 @@ fn session_write(key: String, value: String) -> Result<(), NgError> {
} }
} }
#[cfg(target_arch = "wasm32")]
fn session_del(key: String) -> Result<(), NgError> { fn session_del(key: String) -> Result<(), NgError> {
session_remove(key); session_remove(key);
Ok(()) Ok(())
} }
#[cfg(target_arch = "wasm32")]
fn clear() { fn clear() {
storage_clear(); storage_clear();
} }
#[cfg(target_arch = "wasm32")]
static INIT_LOCAL_BROKER: Lazy<Box<ConfigInitFn>> = Lazy::new(|| { static INIT_LOCAL_BROKER: Lazy<Box<ConfigInitFn>> = Lazy::new(|| {
Box::new(|| { Box::new(|| {
LocalBrokerConfig::JsStorage(JsStorageConfig { LocalBrokerConfig::JsStorage(JsStorageConfig {
@ -294,7 +271,6 @@ static INIT_LOCAL_BROKER: Lazy<Box<ConfigInitFn>> = Lazy::new(|| {
}) })
}); });
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn wallet_create(js_params: JsValue) -> Result<JsValue, JsValue> { pub async fn wallet_create(js_params: JsValue) -> Result<JsValue, JsValue> {
init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await;
@ -308,7 +284,6 @@ pub async fn wallet_create(js_params: JsValue) -> Result<JsValue, JsValue> {
} }
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn wallet_get_file(wallet_name: String) -> Result<JsValue, JsValue> { pub async fn wallet_get_file(wallet_name: String) -> Result<JsValue, JsValue> {
init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await;
@ -320,7 +295,6 @@ pub async fn wallet_get_file(wallet_name: String) -> Result<JsValue, JsValue> {
} }
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn wallet_read_file(js_file: JsValue) -> Result<JsValue, String> { pub async fn wallet_read_file(js_file: JsValue) -> Result<JsValue, String> {
init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await;
@ -334,7 +308,6 @@ pub async fn wallet_read_file(js_file: JsValue) -> Result<JsValue, String> {
Ok(serde_wasm_bindgen::to_value(&wallet).unwrap()) Ok(serde_wasm_bindgen::to_value(&wallet).unwrap())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn wallet_was_opened( pub async fn wallet_was_opened(
js_opened_wallet: JsValue, //SensitiveWallet js_opened_wallet: JsValue, //SensitiveWallet
@ -349,7 +322,6 @@ pub async fn wallet_was_opened(
Ok(serde_wasm_bindgen::to_value(&client).unwrap()) Ok(serde_wasm_bindgen::to_value(&client).unwrap())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn wallet_import( pub async fn wallet_import(
js_encrypted_wallet: JsValue, //Wallet, js_encrypted_wallet: JsValue, //Wallet,
@ -388,7 +360,6 @@ pub fn client_info() -> JsValue {
serde_wasm_bindgen::to_value(&res).unwrap() serde_wasm_bindgen::to_value(&res).unwrap()
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub fn encode_create_account(payload: JsValue) -> JsValue { pub fn encode_create_account(payload: JsValue) -> JsValue {
//log_debug!("{:?}", payload); //log_debug!("{:?}", payload);
@ -459,7 +430,6 @@ pub fn client_info() -> JsValue {
serde_wasm_bindgen::to_value(&res).unwrap() serde_wasm_bindgen::to_value(&res).unwrap()
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn test() { pub async fn test() {
init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await;
@ -469,7 +439,6 @@ pub async fn test() {
log_debug!("{:?}", client_info); log_debug!("{:?}", client_info);
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn app_request_stream( pub async fn app_request_stream(
js_session_id: JsValue, js_session_id: JsValue,
@ -525,7 +494,6 @@ pub async fn app_request_stream(
Ok(ret) Ok(ret)
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn app_request(js_session_id: JsValue, js_request: JsValue) -> Result<JsValue, String> { pub async fn app_request(js_session_id: JsValue, js_request: JsValue) -> Result<JsValue, String> {
let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(js_session_id) let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(js_session_id)
@ -540,7 +508,6 @@ pub async fn app_request(js_session_id: JsValue, js_request: JsValue) -> Result<
Ok(serde_wasm_bindgen::to_value(&response).unwrap()) Ok(serde_wasm_bindgen::to_value(&response).unwrap())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn upload_chunk( pub async fn upload_chunk(
js_session_id: JsValue, js_session_id: JsValue,
@ -574,7 +541,6 @@ pub async fn upload_chunk(
Ok(serde_wasm_bindgen::to_value(&response).unwrap()) Ok(serde_wasm_bindgen::to_value(&response).unwrap())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn doc_fetch_private_subscribe() -> Result<JsValue, String> { pub async fn doc_fetch_private_subscribe() -> Result<JsValue, String> {
let request = AppRequest::V0(AppRequestV0 { let request = AppRequest::V0(AppRequestV0 {
@ -585,7 +551,6 @@ pub async fn doc_fetch_private_subscribe() -> Result<JsValue, String> {
Ok(serde_wasm_bindgen::to_value(&request).unwrap()) Ok(serde_wasm_bindgen::to_value(&request).unwrap())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn doc_fetch_repo_subscribe(repo_id: String) -> Result<JsValue, String> { pub async fn doc_fetch_repo_subscribe(repo_id: String) -> Result<JsValue, String> {
let request = AppRequest::V0(AppRequestV0 { let request = AppRequest::V0(AppRequestV0 {
@ -596,14 +561,12 @@ pub async fn doc_fetch_repo_subscribe(repo_id: String) -> Result<JsValue, String
Ok(serde_wasm_bindgen::to_value(&request).unwrap()) Ok(serde_wasm_bindgen::to_value(&request).unwrap())
} }
// #[cfg(target_arch = "wasm32")] // // #[wasm_bindgen]
// #[wasm_bindgen]
// pub async fn get_readcap() -> Result<JsValue, String> { // pub async fn get_readcap() -> Result<JsValue, String> {
// let request = ObjectRef::nil(); // let request = ObjectRef::nil();
// Ok(serde_wasm_bindgen::to_value(&request).unwrap()) // Ok(serde_wasm_bindgen::to_value(&request).unwrap())
// } // }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn disconnections_subscribe(callback: &js_sys::Function) -> Result<JsValue, JsValue> { pub async fn disconnections_subscribe(callback: &js_sys::Function) -> Result<JsValue, JsValue> {
init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await;
@ -645,7 +608,6 @@ pub async fn disconnections_subscribe(callback: &js_sys::Function) -> Result<JsV
Ok(true.into()) Ok(true.into())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn probe() { pub async fn probe() {
let _res = BROKER let _res = BROKER
@ -662,7 +624,6 @@ pub async fn probe() {
let _ = Broker::join_shutdown_with_timeout(std::time::Duration::from_secs(5)).await; let _ = Broker::join_shutdown_with_timeout(std::time::Duration::from_secs(5)).await;
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn start() { pub async fn start() {
async fn inner_task() -> ResultSend<()> { async fn inner_task() -> ResultSend<()> {
@ -671,7 +632,6 @@ pub async fn start() {
spawn_and_log_error(inner_task()).await; spawn_and_log_error(inner_task()).await;
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn session_stop(user_id_js: JsValue) -> Result<(), String> { pub async fn session_stop(user_id_js: JsValue) -> Result<(), String> {
let user_id = serde_wasm_bindgen::from_value::<UserId>(user_id_js) let user_id = serde_wasm_bindgen::from_value::<UserId>(user_id_js)
@ -682,7 +642,6 @@ pub async fn session_stop(user_id_js: JsValue) -> Result<(), String> {
.map_err(|e: NgError| e.to_string()) .map_err(|e: NgError| e.to_string())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn user_disconnect(user_id_js: JsValue) -> Result<(), String> { pub async fn user_disconnect(user_id_js: JsValue) -> Result<(), String> {
let user_id = serde_wasm_bindgen::from_value::<UserId>(user_id_js) let user_id = serde_wasm_bindgen::from_value::<UserId>(user_id_js)
@ -693,7 +652,6 @@ pub async fn user_disconnect(user_id_js: JsValue) -> Result<(), String> {
.map_err(|e: NgError| e.to_string()) .map_err(|e: NgError| e.to_string())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn wallet_close(wallet_name: String) -> Result<(), String> { pub async fn wallet_close(wallet_name: String) -> Result<(), String> {
nextgraph::local_broker::wallet_close(&wallet_name) nextgraph::local_broker::wallet_close(&wallet_name)
@ -701,7 +659,6 @@ pub async fn wallet_close(wallet_name: String) -> Result<(), String> {
.map_err(|e: NgError| e.to_string()) .map_err(|e: NgError| e.to_string())
} }
#[cfg(target_arch = "wasm32")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn user_connect( pub async fn user_connect(
client_info_js: JsValue, client_info_js: JsValue,
@ -751,12 +708,11 @@ pub async fn user_connect(
.unwrap()) .unwrap())
} }
#[cfg(target_arch = "wasm32")]
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use wasm_bindgen_test::*; use wasm_bindgen_test::*;
wasm_bindgen_test_configure!(run_in_browser); wasm_bindgen_test_configure!(run_in_browser);
use crate::probe; //use crate::probe;
use crate::start; use crate::start;
#[wasm_bindgen_test] #[wasm_bindgen_test]

@ -278,5 +278,5 @@ impl<'a> BranchStorage<'a> {
mod test { mod test {
#[test] #[test]
pub fn test_repo() {} pub fn test_branch() {}
} }

@ -30,7 +30,6 @@ blake3 = "1.3.1"
argon2 = "0.5.0" argon2 = "0.5.0"
chacha20poly1305 = "0.10.1" chacha20poly1305 = "0.10.1"
#{version = "0.10.1", features = ["heapless","getrandom"] } #{version = "0.10.1", features = ["heapless","getrandom"] }
# slice_as_array = "1.1.0"
image = "0.24.6" image = "0.24.6"
web-time = "0.2.0" web-time = "0.2.0"
ng-repo = { path = "../ng-repo", version = "0.1.0" } ng-repo = { path = "../ng-repo", version = "0.1.0" }

@ -295,7 +295,7 @@ pub fn dec_encrypted_block(
// we haven't test it yet. https://community.bitwarden.com/t/recommended-settings-for-argon2/50901/16?page=4 // we haven't test it yet. https://community.bitwarden.com/t/recommended-settings-for-argon2/50901/16?page=4
pub fn derive_key_from_pass(mut pass: Vec<u8>, salt: [u8; 16], wallet_id: WalletId) -> [u8; 32] { pub fn derive_key_from_pass(mut pass: Vec<u8>, salt: [u8; 16], wallet_id: WalletId) -> [u8; 32] {
let params = ParamsBuilder::new() let params = ParamsBuilder::new()
.m_cost(30 * 1024) .m_cost(40 * 1024)
.t_cost(40) .t_cost(40)
.p_cost(1) .p_cost(1)
.data(AssociatedData::new(wallet_id.slice()).unwrap()) .data(AssociatedData::new(wallet_id.slice()).unwrap())
@ -814,14 +814,14 @@ mod test {
#[test] #[test]
fn test_gen_shuffle() { fn test_gen_shuffle() {
let shuffle = gen_shuffle_for_pazzle_opening(9); let _shuffle = gen_shuffle_for_pazzle_opening(9);
log_debug!("{:?}", shuffle); log_debug!("{:?}", _shuffle);
let shuffle = gen_shuffle_for_pazzle_opening(12); let _shuffle = gen_shuffle_for_pazzle_opening(12);
log_debug!("{:?}", shuffle); log_debug!("{:?}", _shuffle);
let shuffle = gen_shuffle_for_pazzle_opening(15); let _shuffle = gen_shuffle_for_pazzle_opening(15);
log_debug!("{:?}", shuffle); log_debug!("{:?}", _shuffle);
let digits = gen_shuffle_for_pin(); let _digits = gen_shuffle_for_pin();
let digits = gen_shuffle_for_pin(); log_debug!("{:?}", _digits);
} }
#[async_std::test] #[async_std::test]
@ -838,7 +838,7 @@ mod test {
let pin = [5, 2, 9, 1]; let pin = [5, 2, 9, 1];
let creation = Instant::now(); let _creation = Instant::now();
let res = create_wallet_first_step_v0(CreateWalletV0::new( let res = create_wallet_first_step_v0(CreateWalletV0::new(
img_buffer, img_buffer,
@ -847,7 +847,7 @@ mod test {
9, 9,
false, false,
false, false,
BootstrapContentV0::new_empty(), BootstrapContentV0::new_localhost(PubKey::nil()),
None, None,
None, None,
)) ))
@ -858,15 +858,15 @@ mod test {
.await .await
.expect("create_wallet_second_step_v0"); .expect("create_wallet_second_step_v0");
log_debug!( log_info!(
"creation of wallet took: {} ms", "creation of wallet took: {} ms",
creation.elapsed().as_millis() _creation.elapsed().as_millis()
); );
log_debug!("-----------------------------"); log_debug!("-----------------------------");
let mut file = File::create("tests/wallet.ngw").expect("open wallet write file"); let mut file = File::create("tests/wallet.ngw").expect("open wallet write file");
let ser_wallet = to_vec(&NgFile::V0(NgFileV0::Wallet(res.wallet.clone()))).unwrap(); let ser_wallet = to_vec(&NgFile::V0(NgFileV0::Wallet(res.wallet.clone()))).unwrap();
file.write_all(&ser_wallet); let _ = file.write_all(&ser_wallet);
log_debug!("wallet id: {}", res.wallet.id()); log_debug!("wallet id: {}", res.wallet.id());
log_debug!("pazzle {:?}", display_pazzle(&res.pazzle)); log_debug!("pazzle {:?}", display_pazzle(&res.pazzle));
@ -878,7 +878,7 @@ mod test {
let mut file = let mut file =
File::create("tests/generated_security_image.jpg").expect("open write file"); File::create("tests/generated_security_image.jpg").expect("open write file");
file.write_all(&v0.content.security_img); let _ = file.write_all(&v0.content.security_img);
let f = File::open("tests/generated_security_image.jpg.compare") let f = File::open("tests/generated_security_image.jpg.compare")
.expect("open of generated_security_image.jpg.compare"); .expect("open of generated_security_image.jpg.compare");
@ -891,29 +891,27 @@ mod test {
assert_eq!(v0.content.security_img, generated_security_image_compare); assert_eq!(v0.content.security_img, generated_security_image_compare);
#[cfg(debug_assertions)] let _opening_mnemonic = Instant::now();
let opening_mnemonic = Instant::now();
let w = open_wallet_with_mnemonic(Wallet::V0(v0.clone()), res.mnemonic, pin.clone()) let _w = open_wallet_with_mnemonic(Wallet::V0(v0.clone()), res.mnemonic, pin.clone())
.expect("open with mnemonic"); .expect("open with mnemonic");
//log_debug!("encrypted part {:?}", w); //log_debug!("encrypted part {:?}", w);
log_debug!( log_info!(
"opening of wallet with mnemonic took: {} ms", "opening of wallet with mnemonic took: {} ms",
opening_mnemonic.elapsed().as_millis() _opening_mnemonic.elapsed().as_millis()
); );
if v0.content.pazzle_length > 0 { if v0.content.pazzle_length > 0 {
#[cfg(debug_assertions)] let _opening_pazzle = Instant::now();
let opening_pazzle = Instant::now(); let _w = open_wallet_with_pazzle(&Wallet::V0(v0.clone()), res.pazzle.clone(), pin)
let w = open_wallet_with_pazzle(&Wallet::V0(v0.clone()), res.pazzle.clone(), pin)
.expect("open with pazzle"); .expect("open with pazzle");
log_debug!( log_info!(
"opening of wallet with pazzle took: {} ms", "opening of wallet with pazzle took: {} ms",
opening_pazzle.elapsed().as_millis() _opening_pazzle.elapsed().as_millis()
); );
} }
log_debug!("encrypted part {:?}", w); log_debug!("encrypted part {:?}", _w);
} }
} }
} }

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

@ -22,7 +22,7 @@ cargo watch -c -w src -x run
// then open http://localhost:5173/ // then open http://localhost:5173/
``` ```
## Build ## Prod
``` ```
cd web cd web

@ -14,7 +14,6 @@ documentation.workspace = true
rust-version.workspace = true rust-version.workspace = true
[dependencies] [dependencies]
serde = { version = "1.0.142", features = ["derive"] }
serde_bare = "0.5.0" serde_bare = "0.5.0"
serde_json = "1.0.96" serde_json = "1.0.96"
tokio = { version = "1.27", features = ["full"] } tokio = { version = "1.27", features = ["full"] }
@ -25,8 +24,7 @@ rust-embed = "6"
log = "0.4" log = "0.4"
env_logger = "0.10" env_logger = "0.10"
base64-url = "2.0.0" base64-url = "2.0.0"
slice_as_array = "1.1.0"
ng-repo = { path = "../ng-repo", features = ["server_log_output"] } ng-repo = { path = "../ng-repo", features = ["server_log_output"] }
ng-net = { path = "../ng-net" } ng-net = { path = "../ng-net" }
ng-wallet = { path = "../ng-wallet" } ng-wallet = { path = "../ng-wallet" }
ng-storage-rocksdb = { path = "../ng-storage-rocksdb" } ng-storage-rocksdb = { path = "../ng-storage-rocksdb" }

@ -21,7 +21,7 @@ cargo watch -c -w src -x run
// then open http://localhost:5173/ // then open http://localhost:5173/
``` ```
## Build ## Prod
``` ```
cd web cd web

@ -6,31 +6,33 @@
// at your option. All files in the project carrying such // at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except // notice may not be copied, modified, or distributed except
// according to those terms. // according to those terms.
#[macro_use]
extern crate slice_as_array;
mod store; mod store;
mod types; mod types;
use ng_repo::errors::StorageError; use std::sync::Arc;
use warp::reply::Response; use std::{env, fs};
use warp::{Filter, Reply};
use rust_embed::RustEmbed; use rust_embed::RustEmbed;
use serde_bare::{from_slice, to_vec}; use serde_bare::{from_slice, to_vec};
use serde_json::json; use serde_json::json;
use std::sync::Arc; use warp::reply::Response;
use std::{env, fs}; use warp::{Filter, Reply};
use crate::store::wallet_record::*; use ng_repo::errors::StorageError;
use crate::types::*;
use ng_net::types::{APP_NG_ONE_URL, NG_ONE_URL};
use ng_repo::log::*; use ng_repo::log::*;
use ng_repo::types::*; use ng_repo::types::*;
use ng_repo::utils::{generate_keypair, sign, verify}; use ng_repo::utils::verify;
use ng_storage_rocksdb::kcv_storage::RocksDbKCVStorage;
use ng_net::types::{APP_NG_ONE_URL, NG_ONE_URL};
use ng_wallet::types::*; use ng_wallet::types::*;
use ng_storage_rocksdb::kcv_storage::RocksDbKCVStorage;
use crate::store::wallet_record::*;
use crate::types::*;
#[derive(RustEmbed)] #[derive(RustEmbed)]
#[folder = "web/dist"] #[folder = "web/dist"]
struct Static; struct Static;
@ -41,7 +43,8 @@ struct Server {
impl Server { impl Server {
fn add_wallet(&self, bytes: Vec<u8>) -> Result<Response, NgHttpError> { fn add_wallet(&self, bytes: Vec<u8>) -> Result<Response, NgHttpError> {
let add_wallet = from_slice::<AddWallet>(&bytes).map_err(|e| NgHttpError::InvalidParams)?; let add_wallet =
from_slice::<AddWallet>(&bytes).map_err(|_e| NgHttpError::InvalidParams)?;
let bootstrap = add_wallet.bootstrap(); let bootstrap = add_wallet.bootstrap();
@ -52,12 +55,12 @@ impl Server {
bootstrap.sig(), bootstrap.sig(),
bootstrap.id(), bootstrap.id(),
) )
.map_err(|e| NgHttpError::InvalidParams)?; .map_err(|_e| NgHttpError::InvalidParams)?;
match add_wallet.wallet() { match add_wallet.wallet() {
Some(wallet) => { Some(wallet) => {
verify(&wallet.content_as_bytes(), wallet.sig(), wallet.id()) verify(&wallet.content_as_bytes(), wallet.sig(), wallet.id())
.map_err(|e| NgHttpError::InvalidParams)?; .map_err(|_e| NgHttpError::InvalidParams)?;
} }
None => {} None => {}
} }
@ -87,12 +90,12 @@ impl Server {
fn get_wallet(&self, encoded_id: String) -> Result<Response, NgHttpError> { fn get_wallet(&self, encoded_id: String) -> Result<Response, NgHttpError> {
log_debug!("DOWNLOAD wallet {}", encoded_id); log_debug!("DOWNLOAD wallet {}", encoded_id);
let id = base64_url::decode(&encoded_id).map_err(|e| NgHttpError::InvalidParams)?; let id = base64_url::decode(&encoded_id).map_err(|_e| NgHttpError::InvalidParams)?;
let wallet_id: PubKey = from_slice(&id).map_err(|e| NgHttpError::InvalidParams)?; let wallet_id: PubKey = from_slice(&id).map_err(|_e| NgHttpError::InvalidParams)?;
let wallet_record = let wallet_record =
WalletRecord::open(&wallet_id, &self.store).map_err(|e| NgHttpError::NotFound)?; WalletRecord::open(&wallet_id, &self.store).map_err(|_e| NgHttpError::NotFound)?;
let wallet = wallet_record.wallet().map_err(|e| NgHttpError::NotFound)?; let wallet = wallet_record.wallet().map_err(|_e| NgHttpError::NotFound)?;
let data = to_vec(&wallet).map_err(|e| NgHttpError::NotFound)?; let data = to_vec(&wallet).map_err(|_e| NgHttpError::NotFound)?;
Ok(Response::new(data.into())) Ok(Response::new(data.into()))
} }
@ -106,13 +109,13 @@ impl Server {
fn get_bootstrap(&self, encoded_id: String) -> Result<Response, NgHttpError> { fn get_bootstrap(&self, encoded_id: String) -> Result<Response, NgHttpError> {
log_debug!("DOWNLOAD bootstrap {}", encoded_id); log_debug!("DOWNLOAD bootstrap {}", encoded_id);
let id = base64_url::decode(&encoded_id).map_err(|e| NgHttpError::InvalidParams)?; let id = base64_url::decode(&encoded_id).map_err(|_e| NgHttpError::InvalidParams)?;
let wallet_id: PubKey = from_slice(&id).map_err(|e| NgHttpError::InvalidParams)?; let wallet_id: PubKey = from_slice(&id).map_err(|_e| NgHttpError::InvalidParams)?;
let wallet_record = let wallet_record =
WalletRecord::open(&wallet_id, &self.store).map_err(|e| NgHttpError::NotFound)?; WalletRecord::open(&wallet_id, &self.store).map_err(|_e| NgHttpError::NotFound)?;
let bootstrap = wallet_record let bootstrap = wallet_record
.bootstrap() .bootstrap()
.map_err(|e| NgHttpError::NotFound)?; .map_err(|_e| NgHttpError::NotFound)?;
let data = json!(bootstrap).to_string(); let data = json!(bootstrap).to_string();
Ok(Response::new(data.into())) Ok(Response::new(data.into()))
} }

@ -9,20 +9,22 @@
//! ng-one bootstrap //! ng-one bootstrap
use ng_net::types::*; use serde_bare::to_vec;
use ng_repo::errors::StorageError; use ng_repo::errors::StorageError;
use ng_repo::kcv_storage::KCVStorage; use ng_repo::kcv_storage::KCVStorage;
use ng_repo::types::PubKey; use ng_repo::types::PubKey;
use serde::{Deserialize, Serialize}; use ng_net::types::*;
use serde_bare::{from_slice, to_vec};
#[allow(dead_code)]
pub struct DynPeer<'a> { pub struct DynPeer<'a> {
/// peer ID /// peer ID
id: PubKey, id: PubKey,
store: &'a dyn KCVStorage, store: &'a dyn KCVStorage,
} }
#[allow(dead_code)]
impl<'a> DynPeer<'a> { impl<'a> DynPeer<'a> {
const PREFIX: u8 = b"d"[0]; const PREFIX: u8 = b"d"[0];

@ -9,12 +9,12 @@
//! ng-wallet //! ng-wallet
use serde_bare::{from_slice, to_vec};
use ng_repo::errors::StorageError; use ng_repo::errors::StorageError;
use ng_repo::kcv_storage::KCVStorage; use ng_repo::kcv_storage::KCVStorage;
use ng_repo::types::*;
use ng_wallet::types::*; use ng_wallet::types::*;
use serde::{Deserialize, Serialize};
use serde_bare::{from_slice, to_vec};
pub struct WalletRecord<'a> { pub struct WalletRecord<'a> {
/// Wallet ID /// Wallet ID
@ -22,6 +22,7 @@ pub struct WalletRecord<'a> {
store: &'a dyn KCVStorage, store: &'a dyn KCVStorage,
} }
#[allow(dead_code)]
impl<'a> WalletRecord<'a> { impl<'a> WalletRecord<'a> {
const PREFIX: u8 = b"w"[0]; const PREFIX: u8 = b"w"[0];

@ -18,7 +18,7 @@ pub enum NgHttpError {
impl Reply for NgHttpError { impl Reply for NgHttpError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
match (self) { match self {
NgHttpError::NotFound => warp::http::StatusCode::NOT_FOUND.into_response(), NgHttpError::NotFound => warp::http::StatusCode::NOT_FOUND.into_response(),
NgHttpError::InvalidParams => warp::http::StatusCode::BAD_REQUEST.into_response(), NgHttpError::InvalidParams => warp::http::StatusCode::BAD_REQUEST.into_response(),
NgHttpError::AlreadyExists => warp::http::StatusCode::CONFLICT.into_response(), NgHttpError::AlreadyExists => warp::http::StatusCode::CONFLICT.into_response(),

Loading…
Cancel
Save