refactor p2p-repo and p2p-net

pull/19/head
Niko PLP 9 months ago
parent 386e552432
commit 8faa4ae1eb
  1. 5
      ng-app/src/lib/Login.svelte
  2. 2
      ng-sdk-js/src/lib.rs
  3. 7
      ng-wallet/src/lib.rs
  4. 1
      ngcli/src/main.rs
  5. 2
      p2p-broker/src/broker_store/account.rs
  6. 79
      p2p-net/src/actors/start.rs
  7. 13
      p2p-net/src/broker.rs
  8. 4
      p2p-net/src/connection.rs
  9. 2
      p2p-net/src/lib.rs
  10. 1611
      p2p-net/src/types.rs
  11. 119
      p2p-repo/src/block.rs
  12. 297
      p2p-repo/src/branch.rs
  13. 320
      p2p-repo/src/commit.rs
  14. 10
      p2p-repo/src/errors.rs
  15. 2
      p2p-repo/src/lib.rs
  16. 374
      p2p-repo/src/object.rs
  17. 101
      p2p-repo/src/repo.rs
  18. 8
      p2p-repo/src/site.rs
  19. 2
      p2p-repo/src/store.rs
  20. 995
      p2p-repo/src/types.rs
  21. 4
      stores-lmdb/src/lib.rs

@ -221,7 +221,7 @@
<ul class="mb-8 ml-3 space-y-4 text-left list-decimal"> <ul class="mb-8 ml-3 space-y-4 text-left list-decimal">
<li> <li>
For each category of images, you will be presented with the 15 possible For each category of images, you will be presented with the 15 possible
choices. The categories are shuffled at every login. They will not image choices. The categories are shuffled at every login. They will not
always appear in the same order. always appear in the same order.
</li> </li>
<li> <li>
@ -229,7 +229,8 @@
image that belongs to your pazzle. Find it and tap or click on that one. image that belongs to your pazzle. Find it and tap or click on that one.
The 15 images are shuffled too, they will not appear at the same The 15 images are shuffled too, they will not appear at the same
position at each login. On a computer, you can also use the tab key on position at each login. On a computer, you can also use the tab key on
your keyboard to move to the desired item on the screen. your keyboard to move to the desired item on the screen, then press the
space bar.
</li> </li>
<li> <li>
Once you completed the last category, you will be presented with all the Once you completed the last category, you will be presented with all the

@ -23,7 +23,7 @@ use p2p_net::broker::*;
use p2p_net::connection::{ClientConfig, StartConfig}; use p2p_net::connection::{ClientConfig, StartConfig};
use p2p_net::types::{ use p2p_net::types::{
BootstrapContent, BootstrapContentV0, ClientId, ClientInfo, ClientInfoV0, ClientType, BootstrapContent, BootstrapContentV0, ClientId, ClientInfo, ClientInfoV0, ClientType,
CreateAccountBSP, DirectPeerId, Identity, UserId, IP, CreateAccountBSP, DirectPeerId, IP,
}; };
use p2p_net::utils::{decode_invitation_string, spawn_and_log_error, Receiver, ResultSend, Sender}; use p2p_net::utils::{decode_invitation_string, spawn_and_log_error, Receiver, ResultSend, Sender};
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]

@ -38,11 +38,8 @@ use p2p_net::{
broker::BROKER, broker::BROKER,
connection::{ClientConfig, StartConfig}, connection::{ClientConfig, StartConfig},
}; };
use p2p_net::{ use p2p_net::{connection::IConnect, types::ClientInfo};
connection::IConnect, use p2p_repo::types::{Identity, PubKey, Sig, SiteType, SiteV0, Timestamp};
types::{ClientInfo, Identity, SiteType, SiteV0},
};
use p2p_repo::types::{PubKey, Sig, Timestamp};
use p2p_repo::utils::{generate_keypair, now_timestamp, sign, verify}; use p2p_repo::utils::{generate_keypair, now_timestamp, sign, verify};
use p2p_repo::{log::*, types::PrivKey}; use p2p_repo::{log::*, types::PrivKey};
use rand::prelude::*; use rand::prelude::*;

@ -12,7 +12,6 @@ use ed25519_dalek::*;
use duration_str::parse; use duration_str::parse;
use futures::{future, pin_mut, stream, SinkExt, StreamExt}; use futures::{future, pin_mut, stream, SinkExt, StreamExt};
use p2p_net::actors::*; use p2p_net::actors::*;
use p2p_repo::object::Object;
use p2p_repo::store::{store_max_value_size, store_valid_value_size, HashMapRepoStore, RepoStore}; use p2p_repo::store::{store_max_value_size, store_valid_value_size, HashMapRepoStore, RepoStore};
use rand::rngs::OsRng; use rand::rngs::OsRng;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};

@ -18,7 +18,7 @@ use p2p_net::types::*;
use p2p_repo::kcv_store::KCVStore; use p2p_repo::kcv_store::KCVStore;
use p2p_repo::log::*; use p2p_repo::log::*;
use p2p_repo::store::*; use p2p_repo::store::*;
use p2p_repo::types::Timestamp; use p2p_repo::types::UserId;
use serde_bare::{from_slice, to_vec}; use serde_bare::{from_slice, to_vec};
pub struct Account<'a> { pub struct Account<'a> {

@ -11,9 +11,13 @@
use crate::actors::noise::Noise; use crate::actors::noise::Noise;
use crate::connection::NoiseFSM; use crate::connection::NoiseFSM;
use crate::types::{AdminRequest, ExtResponse}; use crate::types::{
AdminRequest, CoreBrokerConnect, CoreBrokerConnectResponse, CoreBrokerConnectResponseV0,
CoreMessage, CoreMessageV0, CoreResponseContentV0, CoreResponseV0, ExtResponse,
};
use crate::{actor::*, errors::ProtocolError, types::ProtocolMessage}; use crate::{actor::*, errors::ProtocolError, types::ProtocolMessage};
use async_std::sync::Mutex; use async_std::sync::Mutex;
use p2p_repo::log::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::any::{Any, TypeId}; use std::any::{Any, TypeId};
use std::sync::Arc; use std::sync::Arc;
@ -21,12 +25,12 @@ use std::sync::Arc;
// pub struct Noise3(Noise); // pub struct Noise3(Noise);
/// Start chosen protocol /// Start chosen protocol
/// First message sent by the client /// First message sent by the connecting peer
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub enum StartProtocol { pub enum StartProtocol {
Client(ClientHello), Client(ClientHello),
Ext(ExtHello), Ext(ExtHello),
//Core(CoreHello), Core(CoreHello),
Admin(AdminRequest), Admin(AdminRequest),
} }
@ -34,6 +38,7 @@ impl StartProtocol {
pub fn type_id(&self) -> TypeId { pub fn type_id(&self) -> TypeId {
match self { match self {
StartProtocol::Client(a) => a.type_id(), StartProtocol::Client(a) => a.type_id(),
StartProtocol::Core(a) => a.type_id(),
StartProtocol::Ext(a) => a.type_id(), StartProtocol::Ext(a) => a.type_id(),
StartProtocol::Admin(a) => a.type_id(), StartProtocol::Admin(a) => a.type_id(),
} }
@ -41,6 +46,7 @@ impl StartProtocol {
pub fn get_actor(&self) -> Box<dyn EActor> { pub fn get_actor(&self) -> Box<dyn EActor> {
match self { match self {
StartProtocol::Client(a) => a.get_actor(), StartProtocol::Client(a) => a.get_actor(),
StartProtocol::Core(a) => a.get_actor(),
StartProtocol::Ext(a) => a.get_actor(), StartProtocol::Ext(a) => a.get_actor(),
StartProtocol::Admin(a) => a.get_actor(), StartProtocol::Admin(a) => a.get_actor(),
} }
@ -53,7 +59,72 @@ impl From<StartProtocol> for ProtocolMessage {
} }
} }
/// External Hello (finalizes the Noise handshake and send first ExtRequest) /// Core Hello (finalizes the Noise handshake and sends CoreConnect)
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CoreHello {
// contains the 3rd Noise handshake message "s,se"
pub noise: Noise,
/// Noise encrypted payload (a CoreMessage::CoreRequest::BrokerConnect)
pub payload: Vec<u8>,
}
impl CoreHello {
pub fn get_actor(&self) -> Box<dyn EActor> {
Actor::<CoreBrokerConnect, CoreBrokerConnectResponse>::new_responder()
}
}
impl TryFrom<ProtocolMessage> for CoreBrokerConnectResponse {
type Error = ProtocolError;
fn try_from(msg: ProtocolMessage) -> Result<Self, Self::Error> {
if let ProtocolMessage::CoreMessage(CoreMessage::V0(CoreMessageV0::Response(
CoreResponseV0 {
content: CoreResponseContentV0::BrokerConnectResponse(a),
..
},
))) = msg
{
Ok(CoreBrokerConnectResponse::V0(a))
} else {
log_debug!("INVALID {:?}", msg);
Err(ProtocolError::InvalidValue)
}
}
}
impl From<CoreHello> for ProtocolMessage {
fn from(msg: CoreHello) -> ProtocolMessage {
ProtocolMessage::Start(StartProtocol::Core(msg))
}
}
impl From<CoreBrokerConnect> for ProtocolMessage {
fn from(msg: CoreBrokerConnect) -> ProtocolMessage {
unimplemented!();
}
}
impl Actor<'_, CoreBrokerConnect, CoreBrokerConnectResponse> {}
#[async_trait::async_trait]
impl EActor for Actor<'_, CoreBrokerConnect, CoreBrokerConnectResponse> {
async fn respond(
&mut self,
msg: ProtocolMessage,
fsm: Arc<Mutex<NoiseFSM>>,
) -> Result<(), ProtocolError> {
//let req = CoreBrokerConnect::try_from(msg)?;
// let res = CoreBrokerConnectResponse::V0(CoreBrokerConnectResponseV0 {
// successes: vec![],
// errors: vec![],
// });
// fsm.lock().await.send(res.into()).await?;
Ok(())
}
}
/// External Hello (finalizes the Noise handshake and sends first ExtRequest)
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ExtHello { pub struct ExtHello {
// contains the 3rd Noise handshake message "s,se" // contains the 3rd Noise handshake message "s,se"

@ -320,7 +320,7 @@ impl<'a> Broker<'a> {
let blockstream = self let blockstream = self
.get_block_from_store_with_block_id(nuri, obj_ref.id, true) .get_block_from_store_with_block_id(nuri, obj_ref.id, true)
.await?; .await?;
let store = HashMapRepoStore::from_block_stream(blockstream).await; let store = Box::new(HashMapRepoStore::from_block_stream(blockstream).await);
Object::load(obj_ref.id, Some(obj_ref.key), &store) Object::load(obj_ref.id, Some(obj_ref.key), &store)
.map_err(|e| match e { .map_err(|e| match e {
@ -346,7 +346,6 @@ impl<'a> Broker<'a> {
}; };
let refs = vec![obj_ref.clone()]; let refs = vec![obj_ref.clone()];
let metadata = vec![5u8; 55]; let metadata = vec![5u8; 55];
let expiry = None;
let (member_privkey, member_pubkey) = generate_keypair(); let (member_privkey, member_pubkey) = generate_keypair();
@ -354,13 +353,16 @@ impl<'a> Broker<'a> {
member_privkey, member_privkey,
member_pubkey, member_pubkey,
1, 1,
obj_ref.clone(), PubKey::nil(),
QuorumType::NoSigning,
vec![],
vec![],
vec![], vec![],
vec![], vec![],
refs, refs,
vec![],
metadata, metadata,
obj_ref.clone(), obj_ref.clone(),
expiry,
) )
.unwrap(); .unwrap();
async fn send(mut tx: Sender<Commit>, commit: Commit) -> ResultSend<()> { async fn send(mut tx: Sender<Commit>, commit: Commit) -> ResultSend<()> {
@ -782,6 +784,7 @@ impl<'a> Broker<'a> {
} }
}; };
} }
//TODO, if Core, check that IP is not in self.direct_connections
} }
let mut connection = cnx let mut connection = cnx
@ -844,6 +847,8 @@ impl<'a> Broker<'a> {
let mut broker = BROKER.write().await; let mut broker = BROKER.write().await;
broker.reconnecting(remote_peer_id, config.get_user()); broker.reconnecting(remote_peer_id, config.get_user());
// TODO: deal with cycle error https://users.rust-lang.org/t/recursive-async-method-causes-cycle-error/84628/5 // TODO: deal with cycle error https://users.rust-lang.org/t/recursive-async-method-causes-cycle-error/84628/5
// use a channel and send the reconnect job to it.
// create a spawned loop to read the channel and process the reconnection requests.
// let result = broker // let result = broker
// .connect(cnx, ip, core, peer_pubk, peer_privk, remote_peer_id) // .connect(cnx, ip, core, peer_pubk, peer_privk, remote_peer_id)
// .await; // .await;

@ -163,6 +163,7 @@ pub struct ClientConfig {
pub user_priv: PrivKey, pub user_priv: PrivKey,
pub client: PubKey, pub client: PubKey,
pub info: ClientInfo, pub info: ClientInfo,
//pub peer_advert: PeerAdvert,
pub registration: Option<Option<[u8; 32]>>, pub registration: Option<Option<[u8; 32]>>,
} }
@ -172,7 +173,8 @@ pub struct ExtConfig {}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct CoreConfig { pub struct CoreConfig {
pub addr: BindAddress, pub addr: BindAddress,
pub interface: String, //pub interface: String,
pub overlays_config: CoreBrokerConnect,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]

@ -31,8 +31,6 @@ pub mod utils;
pub mod tests; pub mod tests;
pub mod site;
pub static NG_BOOTSTRAP_LOCAL_PATH: &str = "/.ng_bootstrap"; pub static NG_BOOTSTRAP_LOCAL_PATH: &str = "/.ng_bootstrap";
#[cfg(debug_assertions)] #[cfg(debug_assertions)]

File diff suppressed because it is too large Load Diff

@ -16,23 +16,38 @@ use crate::types::*;
impl BlockV0 { impl BlockV0 {
pub fn new( pub fn new(
children: Vec<BlockId>, children: Vec<BlockId>,
deps: ObjectDeps, header_ref: Option<ObjectRef>,
expiry: Option<Timestamp>,
content: Vec<u8>, content: Vec<u8>,
key: Option<SymKey>, key: Option<SymKey>,
) -> BlockV0 { ) -> BlockV0 {
let (commit_header_id, commit_header_key) = header_ref.map_or((None, None), |obj_ref| {
(Some(obj_ref.id), Some(obj_ref.key))
});
let bc = BlockContentV0 {
children,
commit_header_id,
encrypted_content: content,
};
let mut b = BlockV0 { let mut b = BlockV0 {
id: None, id: None,
key, key,
children, content: BlockContent::V0(bc),
deps, commit_header_key,
expiry,
content,
}; };
let block = Block::V0(b.clone()); b.id = Some(b.compute_id());
b.id = Some(block.get_id());
b b
} }
/// Compute the ID
pub fn compute_id(&self) -> BlockId {
let ser = serde_bare::to_vec(&self.content).unwrap();
let hash = blake3::hash(ser.as_slice());
Digest::Blake3Digest32(hash.as_bytes().clone())
}
pub fn children(&self) -> &Vec<BlockId> {
self.content.children()
}
} }
impl From<Digest> for String { impl From<Digest> for String {
@ -42,68 +57,92 @@ impl From<Digest> for String {
} }
} }
impl BlockContent {
/// Get the encrypted content
pub fn encrypted_content(&self) -> &Vec<u8> {
match self {
BlockContent::V0(bc) => &bc.encrypted_content,
}
}
/// Get the header id
pub fn header_id(&self) -> &Option<ObjectId> {
match self {
BlockContent::V0(bc) => &bc.commit_header_id,
}
}
/// Get the children
pub fn children(&self) -> &Vec<BlockId> {
match self {
BlockContent::V0(b) => &b.children,
}
}
}
impl Block { impl Block {
pub fn new( pub fn new(
children: Vec<BlockId>, children: Vec<BlockId>,
deps: ObjectDeps, header_ref: Option<ObjectRef>,
expiry: Option<Timestamp>,
content: Vec<u8>, content: Vec<u8>,
key: Option<SymKey>, key: Option<SymKey>,
) -> Block { ) -> Block {
Block::V0(BlockV0::new(children, deps, expiry, content, key)) Block::V0(BlockV0::new(children, header_ref, content, key))
} }
/// Compute the ID /// Compute the ID
pub fn get_id(&self) -> BlockId { pub fn compute_id(&self) -> BlockId {
let ser = serde_bare::to_vec(self).unwrap(); match self {
let hash = blake3::hash(ser.as_slice()); Block::V0(v0) => v0.compute_id(),
Digest::Blake3Digest32(hash.as_bytes().clone()) }
} }
/// Get the already computed ID /// Get the already computed ID or computes it, saves it, and returns it
pub fn id(&self) -> BlockId { pub fn get_and_save_id(&mut self) -> BlockId {
match self { match &self {
Block::V0(b) => match b.id { Block::V0(b) => match b.id {
Some(id) => id, Some(id) => id,
None => self.get_id(), None => {
let id = self.compute_id();
let Block::V0(c) = self;
c.id = Some(id);
id
}
}, },
} }
} }
/// Get the content /// Get the already computed ID or computes it
pub fn content(&self) -> &Vec<u8> { pub fn id(&self) -> BlockId {
match self {
Block::V0(b) => &b.content,
}
}
/// Get the children
pub fn children(&self) -> &Vec<BlockId> {
match self { match self {
Block::V0(b) => &b.children, Block::V0(b) => match b.id {
Some(id) => id,
None => self.compute_id(),
},
} }
} }
/// Get the dependencies /// Get the encrypted content
pub fn deps(&self) -> &ObjectDeps { pub fn encrypted_content(&self) -> &Vec<u8> {
match self { match self {
Block::V0(b) => &b.deps, Block::V0(b) => &b.content.encrypted_content(),
} }
} }
/// Get the expiry /// Get the children
pub fn expiry(&self) -> Option<Timestamp> { pub fn children(&self) -> &Vec<BlockId> {
match self { match self {
Block::V0(b) => b.expiry, Block::V0(b) => &b.content.children(),
} }
} }
pub fn set_expiry(&mut self, expiry: Option<Timestamp>) { /// Get the header
pub fn header_ref(&self) -> Option<ObjectRef> {
match self { match self {
Block::V0(b) => { Block::V0(b) => b.commit_header_key.as_ref().map(|key| ObjectRef {
b.id = None; key: key.clone(),
b.expiry = expiry id: b.content.header_id().unwrap().clone(),
} }),
} }
} }

@ -12,7 +12,7 @@
//! Branch of a Repository //! Branch of a Repository
use crate::log::*; use crate::log::*;
use std::collections::{HashMap, HashSet}; use std::collections::HashSet;
use fastbloom_rs::{BloomFilter as Filter, Membership}; use fastbloom_rs::{BloomFilter as Filter, Membership};
@ -20,55 +20,23 @@ use crate::object::*;
use crate::store::*; use crate::store::*;
use crate::types::*; use crate::types::*;
impl MemberV0 {
/// New member
pub fn new(id: PubKey, commit_types: Vec<CommitType>, metadata: Vec<u8>) -> MemberV0 {
MemberV0 {
id,
commit_types,
metadata,
}
}
/// Check whether this member has permission for the given commit type
pub fn has_perm(&self, commit_type: CommitType) -> bool {
self.commit_types.contains(&commit_type)
}
}
impl Member {
/// New member
pub fn new(id: PubKey, commit_types: Vec<CommitType>, metadata: Vec<u8>) -> Member {
Member::V0(MemberV0::new(id, commit_types, metadata))
}
/// Check whether this member has permission for the given commit type
pub fn has_perm(&self, commit_type: CommitType) -> bool {
match self {
Member::V0(m) => m.has_perm(commit_type),
}
}
}
impl BranchV0 { impl BranchV0 {
pub fn new( pub fn new(
id: PubKey, id: PubKey,
topic: PubKey, repo: ObjectRef,
secret: SymKey, root_branch_def_id: ObjectId,
members: Vec<MemberV0>, topic_priv: PrivKey,
quorum: HashMap<CommitType, u32>,
ack_delay: RelTime,
tags: Vec<u8>,
metadata: Vec<u8>, metadata: Vec<u8>,
) -> BranchV0 { ) -> BranchV0 {
let topic_privkey: Vec<u8> = vec![];
//TODO: topic_privkey is topic_priv encrypted with the repo_secret, branch_id, topic_id
let topic = topic_priv.to_pub();
BranchV0 { BranchV0 {
id, id,
repo,
root_branch_def_id,
topic, topic,
secret, topic_privkey,
members,
quorum,
ack_delay,
tags,
metadata, metadata,
} }
} }
@ -77,33 +45,20 @@ impl BranchV0 {
impl Branch { impl Branch {
pub fn new( pub fn new(
id: PubKey, id: PubKey,
topic: PubKey, repo: ObjectRef,
secret: SymKey, root_branch_def_id: ObjectId,
members: Vec<MemberV0>, topic_priv: PrivKey,
quorum: HashMap<CommitType, u32>,
ack_delay: RelTime,
tags: Vec<u8>,
metadata: Vec<u8>, metadata: Vec<u8>,
) -> Branch { ) -> Branch {
Branch::V0(BranchV0::new( Branch::V0(BranchV0::new(
id, topic, secret, members, quorum, ack_delay, tags, metadata, id,
repo,
root_branch_def_id,
topic_priv,
metadata,
)) ))
} }
/// Get member by ID
pub fn get_member(&self, id: &PubKey) -> Option<&MemberV0> {
match self {
Branch::V0(b) => {
for m in b.members.iter() {
if m.id == *id {
return Some(m);
}
}
}
}
None
}
/// Branch sync request from another peer /// Branch sync request from another peer
/// ///
/// Return ObjectIds to send /// Return ObjectIds to send
@ -111,7 +66,7 @@ impl Branch {
our_heads: &[ObjectId], our_heads: &[ObjectId],
their_heads: &[ObjectId], their_heads: &[ObjectId],
their_filter: &BloomFilter, their_filter: &BloomFilter,
store: &impl RepoStore, store: &Box<impl RepoStore + ?Sized>,
) -> Result<Vec<ObjectId>, ObjectParseError> { ) -> Result<Vec<ObjectId>, ObjectParseError> {
//log_debug!(">> sync_req"); //log_debug!(">> sync_req");
//log_debug!(" our_heads: {:?}", our_heads); //log_debug!(" our_heads: {:?}", our_heads);
@ -121,7 +76,7 @@ impl Branch {
/// and collect `ObjectId`s starting from `our_heads` towards `their_heads` /// and collect `ObjectId`s starting from `our_heads` towards `their_heads`
fn load_branch( fn load_branch(
cobj: &Object, cobj: &Object,
store: &impl RepoStore, store: &Box<impl RepoStore + ?Sized>,
their_heads: &[ObjectId], their_heads: &[ObjectId],
visited: &mut HashSet<ObjectId>, visited: &mut HashSet<ObjectId>,
missing: &mut HashSet<ObjectId>, missing: &mut HashSet<ObjectId>,
@ -129,9 +84,9 @@ impl Branch {
//log_debug!(">>> load_branch: {}", cobj.id()); //log_debug!(">>> load_branch: {}", cobj.id());
let id = cobj.id(); let id = cobj.id();
// root has no deps // root has no acks
let is_root = cobj.is_root(); let is_root = cobj.is_root();
//log_debug!(" deps: {:?}", cobj.deps()); //log_debug!(" acks: {:?}", cobj.acks());
// check if this commit object is present in their_heads // check if this commit object is present in their_heads
let mut their_head_found = their_heads.contains(&id); let mut their_head_found = their_heads.contains(&id);
@ -140,9 +95,9 @@ impl Branch {
if !is_root && !their_head_found { if !is_root && !their_head_found {
visited.insert(id); visited.insert(id);
for id in cobj.deps() { for id in cobj.deps() {
match Object::load(*id, None, store) { match Object::load(id, None, store) {
Ok(o) => { Ok(o) => {
if !visited.contains(id) { if !visited.contains(&id) {
if load_branch(&o, store, their_heads, visited, missing)? { if load_branch(&o, store, their_heads, visited, missing)? {
their_head_found = true; their_head_found = true;
} }
@ -218,49 +173,43 @@ mod test {
use crate::commit::*; use crate::commit::*;
use crate::object::*; use crate::object::*;
use crate::repo; use crate::repo;
use crate::repo::Repo;
use crate::store::*; use crate::store::*;
#[test] #[test]
pub fn test_branch() { pub fn test_branch() {
fn add_obj( fn add_obj(
content: ObjectContent, content: ObjectContentV0,
deps: Vec<ObjectId>, header: Option<CommitHeaderV0>,
expiry: Option<Timestamp>,
repo_pubkey: PubKey, repo_pubkey: PubKey,
repo_secret: SymKey, repo_secret: SymKey,
store: &mut impl RepoStore, store: &Box<impl RepoStore + ?Sized>,
) -> ObjectRef { ) -> ObjectRef {
let max_object_size = 4000; let max_object_size = 4000;
let obj = Object::new( let obj = Object::new(content, header, max_object_size, repo_pubkey, repo_secret);
content,
deps,
expiry,
max_object_size,
repo_pubkey,
repo_secret,
);
log_debug!(">>> add_obj"); log_debug!(">>> add_obj");
log_debug!(" id: {:?}", obj.id()); log_debug!(" id: {:?}", obj.id());
log_debug!(" deps: {:?}", obj.deps()); log_debug!(" header: {:?}", obj.header());
obj.save(store).unwrap(); obj.save(store).unwrap();
obj.reference().unwrap() obj.reference().unwrap()
} }
fn add_commit( fn add_commit(
branch: ObjectRef, branch: BranchId,
author_privkey: PrivKey, author_privkey: PrivKey,
author_pubkey: PubKey, author_pubkey: PubKey,
seq: u32, seq: u64,
deps: Vec<ObjectRef>, deps: Vec<ObjectRef>,
acks: Vec<ObjectRef>, acks: Vec<ObjectRef>,
body_ref: ObjectRef, body_ref: ObjectRef,
repo_pubkey: PubKey, repo_pubkey: PubKey,
repo_secret: SymKey, repo_secret: SymKey,
store: &mut impl RepoStore, store: &Box<impl RepoStore + ?Sized>,
) -> ObjectRef { ) -> ObjectRef {
let mut obj_deps: Vec<ObjectId> = vec![]; let header = CommitHeaderV0::new_with_deps_and_acks(
obj_deps.extend(deps.iter().map(|r| r.id)); deps.iter().map(|r| r.id).collect(),
obj_deps.extend(acks.iter().map(|r| r.id)); acks.iter().map(|r| r.id).collect(),
);
let obj_ref = ObjectRef { let obj_ref = ObjectRef {
id: ObjectId::Blake3Digest32([1; 32]), id: ObjectId::Blake3Digest32([1; 32]),
@ -268,26 +217,27 @@ mod test {
}; };
let refs = vec![obj_ref]; let refs = vec![obj_ref];
let metadata = vec![5u8; 55]; let metadata = vec![5u8; 55];
let expiry = None;
let commit = Commit::new( let commit = CommitV0::new(
author_privkey, author_privkey,
author_pubkey, author_pubkey,
seq, seq,
branch, branch,
QuorumType::NoSigning,
deps, deps,
vec![],
acks, acks,
vec![],
refs, refs,
vec![],
metadata, metadata,
body_ref, body_ref,
expiry,
) )
.unwrap(); .unwrap();
//log_debug!("commit: {:?}", commit); //log_debug!("commit: {:?}", commit);
add_obj( add_obj(
ObjectContent::Commit(commit), ObjectContentV0::Commit(commit),
obj_deps, header,
expiry,
repo_pubkey, repo_pubkey,
repo_secret, repo_secret,
store, store,
@ -295,19 +245,16 @@ mod test {
} }
fn add_body_branch( fn add_body_branch(
branch: Branch, branch: BranchV0,
repo_pubkey: PubKey, repo_pubkey: PubKey,
repo_secret: SymKey, repo_secret: SymKey,
store: &mut impl RepoStore, store: &Box<impl RepoStore + ?Sized>,
) -> ObjectRef { ) -> ObjectRef {
let deps = vec![]; let body = CommitBodyV0::Branch(branch);
let expiry = None;
let body = CommitBody::Branch(branch);
//log_debug!("body: {:?}", body); //log_debug!("body: {:?}", body);
add_obj( add_obj(
ObjectContent::CommitBody(body), ObjectContentV0::CommitBody(body),
deps, None,
expiry,
repo_pubkey, repo_pubkey,
repo_secret, repo_secret,
store, store,
@ -315,45 +262,24 @@ mod test {
} }
fn add_body_trans( fn add_body_trans(
deps: Vec<ObjectId>, header: Option<CommitHeaderV0>,
repo_pubkey: PubKey, repo_pubkey: PubKey,
repo_secret: SymKey, repo_secret: SymKey,
store: &mut impl RepoStore, store: &Box<impl RepoStore + ?Sized>,
) -> ObjectRef { ) -> ObjectRef {
let expiry = None;
let content = [7u8; 777].to_vec(); let content = [7u8; 777].to_vec();
let body = CommitBody::Transaction(Transaction::V0(content)); let body = CommitBodyV0::Transaction(content);
//log_debug!("body: {:?}", body);
add_obj(
ObjectContent::CommitBody(body),
deps,
expiry,
repo_pubkey,
repo_secret,
store,
)
}
fn add_body_ack(
deps: Vec<ObjectId>,
repo_pubkey: PubKey,
repo_secret: SymKey,
store: &mut impl RepoStore,
) -> ObjectRef {
let expiry = None;
let body = CommitBody::Ack(Ack::V0());
//log_debug!("body: {:?}", body); //log_debug!("body: {:?}", body);
add_obj( add_obj(
ObjectContent::CommitBody(body), ObjectContentV0::CommitBody(body),
deps, header,
expiry,
repo_pubkey, repo_pubkey,
repo_secret, repo_secret,
store, store,
) )
} }
let mut store = HashMapRepoStore::new(); let store = Box::new(HashMapRepoStore::new());
let mut rng = OsRng {}; let mut rng = OsRng {};
// repo // repo
@ -369,7 +295,7 @@ mod test {
repo_keypair.public.as_bytes().len(), repo_keypair.public.as_bytes().len(),
repo_keypair.public.as_bytes() repo_keypair.public.as_bytes()
); );
let _repo_privkey = PrivKey::Ed25519PrivKey(repo_keypair.secret.to_bytes()); let repo_privkey = PrivKey::Ed25519PrivKey(repo_keypair.secret.to_bytes());
let repo_pubkey = PubKey::Ed25519PubKey(repo_keypair.public.to_bytes()); let repo_pubkey = PubKey::Ed25519PubKey(repo_keypair.public.to_bytes());
let repo_secret = SymKey::ChaCha20Key([9; 32]); let repo_secret = SymKey::ChaCha20Key([9; 32]);
@ -385,23 +311,26 @@ mod test {
let member_pubkey = PubKey::Ed25519PubKey(member_keypair.public.to_bytes()); let member_pubkey = PubKey::Ed25519PubKey(member_keypair.public.to_bytes());
let metadata = [66u8; 64].to_vec(); let metadata = [66u8; 64].to_vec();
let commit_types = vec![CommitType::Ack, CommitType::Transaction];
let secret = SymKey::ChaCha20Key([0; 32]); let repo = Repo::new_with_member(
&repo_pubkey,
let member = MemberV0::new(member_pubkey, commit_types, metadata.clone()); member_pubkey,
let members = vec![member]; &[Permission::Transaction],
let mut quorum = HashMap::new(); store,
quorum.insert(CommitType::Transaction, 3); );
let ack_delay = RelTime::Minutes(3);
let tags = [99u8; 32].to_vec(); let repo_ref = ObjectRef {
let branch = Branch::new( id: ObjectId::Blake3Digest32([1; 32]),
branch_pubkey, key: SymKey::ChaCha20Key([2; 32]),
};
let root_branch_def_id = ObjectId::Blake3Digest32([1; 32]);
let branch = BranchV0::new(
branch_pubkey, branch_pubkey,
secret, repo_ref,
members, root_branch_def_id,
quorum, repo_privkey,
ack_delay,
tags,
metadata, metadata,
); );
//log_debug!("branch: {:?}", branch); //log_debug!("branch: {:?}", branch);
@ -427,16 +356,16 @@ mod test {
branch.clone(), branch.clone(),
repo_pubkey.clone(), repo_pubkey.clone(),
repo_secret.clone(), repo_secret.clone(),
&mut store, repo.get_store(),
); );
let ack_body = add_body_ack(vec![], repo_pubkey, repo_secret.clone(), &mut store);
let trans_body = add_body_trans(vec![], repo_pubkey, repo_secret.clone(), &mut store); let trans_body = add_body_trans(None, repo_pubkey, repo_secret.clone(), repo.get_store());
// create & add commits to store // create & add commits to store
log_debug!(">> br"); log_debug!(">> br");
let br = add_commit( let br = add_commit(
branch_body.clone(), branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
0, 0,
@ -445,12 +374,12 @@ mod test {
branch_body.clone(), branch_body.clone(),
repo_pubkey, repo_pubkey,
repo_secret.clone(), repo_secret.clone(),
&mut store, repo.get_store(),
); );
log_debug!(">> t1"); log_debug!(">> t1");
let t1 = add_commit( let t1 = add_commit(
branch_body.clone(), branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
1, 1,
@ -459,12 +388,12 @@ mod test {
trans_body.clone(), trans_body.clone(),
repo_pubkey, repo_pubkey,
repo_secret.clone(), repo_secret.clone(),
&mut store, repo.get_store(),
); );
log_debug!(">> t2"); log_debug!(">> t2");
let t2 = add_commit( let t2 = add_commit(
branch_body.clone(), branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
2, 2,
@ -473,26 +402,26 @@ mod test {
trans_body.clone(), trans_body.clone(),
repo_pubkey, repo_pubkey,
repo_secret.clone(), repo_secret.clone(),
&mut store, repo.get_store(),
); );
log_debug!(">> a3"); // log_debug!(">> a3");
let a3 = add_commit( // let a3 = add_commit(
branch_body.clone(), // branch_pubkey,
member_privkey.clone(), // member_privkey.clone(),
member_pubkey, // member_pubkey,
3, // 3,
vec![t1.clone()], // vec![t1.clone()],
vec![], // vec![],
ack_body.clone(), // ack_body.clone(),
repo_pubkey, // repo_pubkey,
repo_secret.clone(), // repo_secret.clone(),
&mut store, // &mut store,
); // );
log_debug!(">> t4"); log_debug!(">> t4");
let t4 = add_commit( let t4 = add_commit(
branch_body.clone(), branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
4, 4,
@ -501,12 +430,12 @@ mod test {
trans_body.clone(), trans_body.clone(),
repo_pubkey, repo_pubkey,
repo_secret.clone(), repo_secret.clone(),
&mut store, repo.get_store(),
); );
log_debug!(">> t5"); log_debug!(">> t5");
let t5 = add_commit( let t5 = add_commit(
branch_body.clone(), branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
5, 5,
@ -515,42 +444,42 @@ mod test {
trans_body.clone(), trans_body.clone(),
repo_pubkey, repo_pubkey,
repo_secret.clone(), repo_secret.clone(),
&mut store, repo.get_store(),
); );
log_debug!(">> a6"); log_debug!(">> a6");
let a6 = add_commit( let a6 = add_commit(
branch_body.clone(), branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
6, 6,
vec![t4.clone()], vec![t4.clone()],
vec![], vec![],
ack_body.clone(), trans_body.clone(),
repo_pubkey, repo_pubkey,
repo_secret.clone(), repo_secret.clone(),
&mut store, repo.get_store(),
); );
log_debug!(">> a7"); log_debug!(">> a7");
let a7 = add_commit( let a7 = add_commit(
branch_body.clone(), branch_pubkey,
member_privkey.clone(), member_privkey.clone(),
member_pubkey, member_pubkey,
7, 7,
vec![t4.clone()], vec![t4.clone()],
vec![], vec![],
ack_body.clone(), trans_body.clone(),
repo_pubkey, repo_pubkey,
repo_secret.clone(), repo_secret.clone(),
&mut store, repo.get_store(),
); );
let c7 = Commit::load(a7.clone(), &store).unwrap(); let c7 = Commit::load(a7.clone(), repo.get_store()).unwrap();
c7.verify(&branch, &store).unwrap(); c7.verify(&repo, repo.get_store()).unwrap();
let mut filter = Filter::new(FilterBuilder::new(10, 0.01)); let mut filter = Filter::new(FilterBuilder::new(10, 0.01));
for commit_ref in [br, t1, t2, a3.clone(), t5.clone(), a6.clone()] { for commit_ref in [br, t1, t2, t5.clone(), a6.clone()] {
match commit_ref.id { match commit_ref.id {
ObjectId::Blake3Digest32(d) => filter.add(&d), ObjectId::Blake3Digest32(d) => filter.add(&d),
} }
@ -568,10 +497,10 @@ mod test {
log_debug!(" their_commits: [br, t1, t2, a3, t5, a6]"); log_debug!(" their_commits: [br, t1, t2, a3, t5, a6]");
let ids = Branch::sync_req( let ids = Branch::sync_req(
&[a3.id, t5.id, a6.id, a7.id], &[t5.id, a6.id, a7.id],
&[a3.id, t5.id], &[t5.id],
&their_commits, &their_commits,
&store, repo.get_store(),
) )
.unwrap(); .unwrap();

@ -13,19 +13,22 @@
use ed25519_dalek::*; use ed25519_dalek::*;
use std::collections::HashSet; use crate::errors::NgError;
use std::iter::FromIterator;
use crate::log::*; use crate::log::*;
use crate::object::*; use crate::object::*;
use crate::repo::Repo;
use crate::store::*; use crate::store::*;
use crate::types::*; use crate::types::*;
use std::collections::HashSet;
use std::iter::FromIterator;
#[derive(Debug)] #[derive(Debug)]
pub enum CommitLoadError { pub enum CommitLoadError {
MissingBlocks(Vec<BlockId>), MissingBlocks(Vec<BlockId>),
ObjectParseError, ObjectParseError,
DeserializeError, DeserializeError,
CannotBeAtRootOfBranch,
MustBeAtRootOfBranch,
} }
#[derive(Debug)] #[derive(Debug)]
@ -35,47 +38,33 @@ pub enum CommitVerifyError {
BodyLoadError(CommitLoadError), BodyLoadError(CommitLoadError),
DepLoadError(CommitLoadError), DepLoadError(CommitLoadError),
} }
impl CommitBody {
/// Get CommitType corresponding to CommitBody
pub fn to_type(&self) -> CommitType {
match self {
CommitBody::Ack(_) => CommitType::Ack,
CommitBody::AddBranch(_) => CommitType::AddBranch,
CommitBody::AddMembers(_) => CommitType::AddMembers,
CommitBody::Branch(_) => CommitType::Branch,
CommitBody::EndOfBranch(_) => CommitType::EndOfBranch,
CommitBody::RemoveBranch(_) => CommitType::RemoveBranch,
CommitBody::Repository(_) => CommitType::Repository,
CommitBody::Snapshot(_) => CommitType::Snapshot,
CommitBody::Transaction(_) => CommitType::Transaction,
}
}
}
impl CommitV0 { impl CommitV0 {
/// New commit /// New commit
pub fn new( pub fn new(
author_privkey: PrivKey, author_privkey: PrivKey,
author_pubkey: PubKey, author_pubkey: PubKey,
seq: u32, seq: u64,
branch: ObjectRef, branch: BranchId,
quorum: QuorumType,
deps: Vec<ObjectRef>, deps: Vec<ObjectRef>,
ndeps: Vec<ObjectRef>,
acks: Vec<ObjectRef>, acks: Vec<ObjectRef>,
nacks: Vec<ObjectRef>,
refs: Vec<ObjectRef>, refs: Vec<ObjectRef>,
nrefs: Vec<ObjectRef>,
metadata: Vec<u8>, metadata: Vec<u8>,
body: ObjectRef, body: ObjectRef,
expiry: Option<Timestamp>,
) -> Result<CommitV0, SignatureError> { ) -> Result<CommitV0, SignatureError> {
let headers = CommitHeaderV0::new_with(deps, ndeps, acks, nacks, refs, nrefs);
let content = CommitContentV0 { let content = CommitContentV0 {
author: author_pubkey, author: author_pubkey,
seq, seq,
branch, branch,
deps, header_keys: headers.1,
acks, quorum,
refs,
metadata, metadata,
body, body,
expiry,
}; };
let content_ser = serde_bare::to_vec(&content).unwrap(); let content_ser = serde_bare::to_vec(&content).unwrap();
@ -96,6 +85,7 @@ impl CommitV0 {
sig, sig,
id: None, id: None,
key: None, key: None,
header: headers.0,
}) })
} }
} }
@ -105,32 +95,41 @@ impl Commit {
pub fn new( pub fn new(
author_privkey: PrivKey, author_privkey: PrivKey,
author_pubkey: PubKey, author_pubkey: PubKey,
seq: u32, seq: u64,
branch: ObjectRef, branch: BranchId,
quorum: QuorumType,
deps: Vec<ObjectRef>, deps: Vec<ObjectRef>,
ndeps: Vec<ObjectRef>,
acks: Vec<ObjectRef>, acks: Vec<ObjectRef>,
nacks: Vec<ObjectRef>,
refs: Vec<ObjectRef>, refs: Vec<ObjectRef>,
nrefs: Vec<ObjectRef>,
metadata: Vec<u8>, metadata: Vec<u8>,
body: ObjectRef, body: ObjectRef,
expiry: Option<Timestamp>,
) -> Result<Commit, SignatureError> { ) -> Result<Commit, SignatureError> {
CommitV0::new( CommitV0::new(
author_privkey, author_privkey,
author_pubkey, author_pubkey,
seq, seq,
branch, branch,
quorum,
deps, deps,
ndeps,
acks, acks,
nacks,
refs, refs,
nrefs,
metadata, metadata,
body, body,
expiry,
) )
.map(|c| Commit::V0(c)) .map(|c| Commit::V0(c))
} }
/// Load commit from store /// Load commit from store
pub fn load(commit_ref: ObjectRef, store: &impl RepoStore) -> Result<Commit, CommitLoadError> { pub fn load(
commit_ref: ObjectRef,
store: &Box<impl RepoStore + ?Sized>,
) -> Result<Commit, CommitLoadError> {
let (id, key) = (commit_ref.id, commit_ref.key); let (id, key) = (commit_ref.id, commit_ref.key);
match Object::load(id, Some(key.clone()), store) { match Object::load(id, Some(key.clone()), store) {
Ok(obj) => { Ok(obj) => {
@ -138,12 +137,15 @@ impl Commit {
.content() .content()
.map_err(|_e| CommitLoadError::ObjectParseError)?; .map_err(|_e| CommitLoadError::ObjectParseError)?;
let mut commit = match content { let mut commit = match content {
ObjectContent::Commit(c) => c, ObjectContent::V0(ObjectContentV0::Commit(c)) => c,
_ => return Err(CommitLoadError::DeserializeError), _ => return Err(CommitLoadError::DeserializeError),
}; };
commit.set_id(id); commit.id = Some(id);
commit.set_key(key.clone()); commit.key = Some(key.clone());
Ok(commit) if let Some(CommitHeader::V0(header_v0)) = obj.header() {
commit.header = Some(header_v0.clone());
}
Ok(Commit::V0(commit))
} }
Err(ObjectParseError::MissingBlocks(missing)) => { Err(ObjectParseError::MissingBlocks(missing)) => {
Err(CommitLoadError::MissingBlocks(missing)) Err(CommitLoadError::MissingBlocks(missing))
@ -153,8 +155,12 @@ impl Commit {
} }
/// Load commit body from store /// Load commit body from store
pub fn load_body(&self, store: &impl RepoStore) -> Result<CommitBody, CommitLoadError> { pub fn load_body(
let content = self.content(); &self,
store: &Box<impl RepoStore + ?Sized>,
) -> Result<CommitBody, CommitLoadError> {
// TODO store body in CommitV0 (with #[serde(skip)]) as a cache for subsequent calls to load_body
let content = self.content_v0();
let (id, key) = (content.body.id, content.body.key.clone()); let (id, key) = (content.body.id, content.body.key.clone());
let obj = Object::load(id.clone(), Some(key.clone()), store).map_err(|e| match e { let obj = Object::load(id.clone(), Some(key.clone()), store).map_err(|e| match e {
ObjectParseError::MissingBlocks(missing) => CommitLoadError::MissingBlocks(missing), ObjectParseError::MissingBlocks(missing) => CommitLoadError::MissingBlocks(missing),
@ -164,7 +170,7 @@ impl Commit {
.content() .content()
.map_err(|_e| CommitLoadError::ObjectParseError)?; .map_err(|_e| CommitLoadError::ObjectParseError)?;
match content { match content {
ObjectContent::CommitBody(body) => Ok(body), ObjectContent::V0(ObjectContentV0::CommitBody(body)) => Ok(CommitBody::V0(body)),
_ => Err(CommitLoadError::DeserializeError), _ => Err(CommitLoadError::DeserializeError),
} }
} }
@ -204,36 +210,92 @@ impl Commit {
} }
} }
/// Get commit content /// Get commit content V0
pub fn content(&self) -> &CommitContentV0 { pub fn content_v0(&self) -> &CommitContentV0 {
match self { match self {
Commit::V0(c) => &c.content, Commit::V0(c) => &c.content,
} }
} }
/// This commit is the first one in the branch (doesn't have any ACKs nor Nacks)
pub fn is_root_commit_of_branch(&self) -> bool {
match self {
Commit::V0(c) => match &c.content.header_keys {
Some(hk) => hk.acks.is_empty() && hk.nacks.is_empty(),
None => true,
},
_ => unimplemented!(),
}
}
/// Get acks /// Get acks
pub fn acks(&self) -> Vec<ObjectRef> { pub fn acks(&self) -> Vec<ObjectRef> {
let mut res: Vec<ObjectRef> = vec![];
match self { match self {
Commit::V0(c) => c.content.acks.clone(), Commit::V0(c) => match &c.header {
} Some(header_v0) => match &c.content.header_keys {
Some(hk) => {
for ack in header_v0.acks.iter().zip(hk.acks.iter()) {
res.push(ack.into());
}
}
None => {}
},
None => {}
},
_ => {}
};
res
} }
/// Get deps /// Get deps
pub fn deps(&self) -> Vec<ObjectRef> { pub fn deps(&self) -> Vec<ObjectRef> {
let mut res: Vec<ObjectRef> = vec![];
match self { match self {
Commit::V0(c) => c.content.deps.clone(), Commit::V0(c) => match &c.header {
} Some(header_v0) => match &c.content.header_keys {
Some(hk) => {
for dep in header_v0.deps.iter().zip(hk.deps.iter()) {
res.push(dep.into());
}
}
None => {}
},
None => {}
},
_ => {}
};
res
} }
/// Get all direct commit dependencies of the commit (`deps`, `acks`) /// Get all commits that are in the direct causal past of the commit (`deps`, `acks`, `nacks`, `ndeps`)
pub fn deps_acks(&self) -> Vec<ObjectRef> { pub fn direct_causal_past(&self) -> Vec<ObjectRef> {
let mut res: Vec<ObjectRef> = vec![];
match self { match self {
Commit::V0(c) => [c.content.acks.clone(), c.content.deps.clone()].concat(), Commit::V0(c) => match (&c.header, &c.content.header_keys) {
} (Some(header_v0), Some(hk)) => {
for ack in header_v0.acks.iter().zip(hk.acks.iter()) {
res.push(ack.into());
}
for nack in header_v0.nacks.iter().zip(hk.nacks.iter()) {
res.push(nack.into());
}
for dep in header_v0.deps.iter().zip(hk.deps.iter()) {
res.push(dep.into());
}
for ndep in header_v0.ndeps.iter().zip(hk.ndeps.iter()) {
res.push(ndep.into());
}
}
_ => {}
},
_ => {}
};
res
} }
/// Get seq /// Get seq
pub fn seq(&self) -> u32 { pub fn seq(&self) -> u64 {
match self { match self {
Commit::V0(c) => c.content.seq, Commit::V0(c) => c.content.seq,
} }
@ -258,32 +320,30 @@ impl Commit {
} }
/// Verify commit permissions /// Verify commit permissions
pub fn verify_perm(&self, body: &CommitBody, branch: &Branch) -> Result<(), CommitVerifyError> { pub fn verify_perm(&self, repo: &Repo) -> Result<(), CommitVerifyError> {
let content = self.content(); repo.verify_permission(self)
match branch.get_member(&content.author) { .map_err(|_| CommitVerifyError::PermissionDenied)
Some(m) => {
if m.has_perm(body.to_type()) {
return Ok(());
}
}
None => (),
}
Err(CommitVerifyError::PermissionDenied)
} }
/// Verify if the commit's `body` and dependencies (`deps` & `acks`) are available in the `store` /// Verify if the commit's `body`, `header` and direct_causal_past, and recursively all their refs are available in the `store`
pub fn verify_deps(&self, store: &impl RepoStore) -> Result<Vec<ObjectId>, CommitLoadError> { pub fn verify_full_object_refs_of_branch_at_commit(
//log_debug!(">> verify_deps: #{}", self.seq()); &self,
store: &Box<impl RepoStore + ?Sized>,
) -> Result<Vec<ObjectId>, CommitLoadError> {
//log_debug!(">> verify_full_object_refs_of_branch_at_commit: #{}", self.seq());
/// Load `Commit`s of a `Branch` from the `RepoStore` starting from the given `Commit`, /// Load `Commit`s of a `Branch` from the `RepoStore` starting from the given `Commit`,
/// and collect missing `ObjectId`s /// and collect missing `ObjectId`s
fn load_branch( fn load_direct_object_refs(
commit: &Commit, commit: &Commit,
store: &impl RepoStore, store: &Box<impl RepoStore + ?Sized>,
visited: &mut HashSet<ObjectId>, visited: &mut HashSet<ObjectId>,
missing: &mut HashSet<ObjectId>, missing: &mut HashSet<ObjectId>,
) -> Result<(), CommitLoadError> { ) -> Result<(), CommitLoadError> {
//log_debug!(">>> load_branch: #{}", commit.seq()); //log_debug!(">>> load_branch: #{}", commit.seq());
// the commit verify_deps() was called on may not have an ID set,
// FIXME: what about this comment? seems like a Commit always has an id
// the self of verify_full_object_refs_of_branch_at_commit() may not have an ID set,
// but the commits loaded from store should have it // but the commits loaded from store should have it
match commit.id() { match commit.id() {
Some(id) => { Some(id) => {
@ -292,40 +352,53 @@ impl Commit {
} }
visited.insert(id); visited.insert(id);
} }
None => (), None => panic!("Commit without an ID"),
} }
// load body & check if it's the Branch commit at the root // load body & check if it's the Branch root commit
let is_root = match commit.load_body(store) { match commit.load_body(store) {
Ok(body) => body.to_type() == CommitType::Branch, Ok(body) => {
if commit.is_root_commit_of_branch() {
if body.must_be_root_commit_in_branch() {
Ok(())
} else {
Err(CommitLoadError::CannotBeAtRootOfBranch)
}
} else {
if body.must_be_root_commit_in_branch() {
Err(CommitLoadError::MustBeAtRootOfBranch)
} else {
Ok(())
}
}
}
Err(CommitLoadError::MissingBlocks(m)) => { Err(CommitLoadError::MissingBlocks(m)) => {
// The commit body is missing.
missing.extend(m); missing.extend(m);
false Ok(())
} }
Err(e) => return Err(e), Err(e) => Err(e),
}; }?;
log_debug!("!!! is_root: {}", is_root);
// load direct causal past
// load deps for blockref in commit.direct_causal_past() {
if !is_root { match Commit::load(blockref, store) {
for dep in commit.deps_acks() { Ok(c) => {
match Commit::load(dep, store) { load_direct_object_refs(&c, store, visited, missing)?;
Ok(c) => { }
load_branch(&c, store, visited, missing)?; Err(CommitLoadError::MissingBlocks(m)) => {
} missing.extend(m);
Err(CommitLoadError::MissingBlocks(m)) => {
missing.extend(m);
}
Err(e) => return Err(e),
} }
Err(e) => return Err(e),
} }
} }
Ok(()) Ok(())
} }
let mut visited = HashSet::new(); let mut visited = HashSet::new();
let mut missing = HashSet::new(); let mut missing = HashSet::new();
load_branch(self, store, &mut visited, &mut missing)?; load_direct_object_refs(self, store, &mut visited, &mut missing)?;
if !missing.is_empty() { if !missing.is_empty() {
return Err(CommitLoadError::MissingBlocks(Vec::from_iter(missing))); return Err(CommitLoadError::MissingBlocks(Vec::from_iter(missing)));
@ -333,15 +406,16 @@ impl Commit {
Ok(Vec::from_iter(visited)) Ok(Vec::from_iter(visited))
} }
/// Verify signature, permissions, and dependencies /// Verify signature, permissions, and full causal past
pub fn verify(&self, branch: &Branch, store: &impl RepoStore) -> Result<(), CommitVerifyError> { pub fn verify(
&self,
repo: &Repo,
store: &Box<impl RepoStore + ?Sized>,
) -> Result<(), CommitVerifyError> {
self.verify_sig() self.verify_sig()
.map_err(|_e| CommitVerifyError::InvalidSignature)?; .map_err(|_e| CommitVerifyError::InvalidSignature)?;
let body = self self.verify_perm(repo)?;
.load_body(store) self.verify_full_object_refs_of_branch_at_commit(repo.get_store())
.map_err(|e| CommitVerifyError::BodyLoadError(e))?;
self.verify_perm(&body, branch)?;
self.verify_deps(store)
.map_err(|e| CommitVerifyError::DepLoadError(e))?; .map_err(|e| CommitVerifyError::DepLoadError(e))?;
Ok(()) Ok(())
} }
@ -382,46 +456,40 @@ mod test {
key: SymKey::ChaCha20Key([2; 32]), key: SymKey::ChaCha20Key([2; 32]),
}; };
let obj_refs = vec![obj_ref.clone()]; let obj_refs = vec![obj_ref.clone()];
let branch = obj_ref.clone(); let branch = pub_key;
let deps = obj_refs.clone(); let deps = obj_refs.clone();
let acks = obj_refs.clone(); let acks = obj_refs.clone();
let refs = obj_refs.clone(); let refs = obj_refs.clone();
let metadata = vec![1, 2, 3]; let metadata = vec![1, 2, 3];
let body_ref = obj_ref.clone(); let body_ref = obj_ref.clone();
let expiry = Some(2342);
let commit = Commit::new( let commit = Commit::new(
priv_key, pub_key, seq, branch, deps, acks, refs, metadata, body_ref, expiry, priv_key,
pub_key,
seq,
branch,
QuorumType::NoSigning,
deps,
vec![],
acks,
vec![],
refs,
vec![],
metadata,
body_ref,
) )
.unwrap(); .unwrap();
log_debug!("commit: {:?}", commit); log_debug!("commit: {:?}", commit);
let store = HashMapRepoStore::new(); let store = Box::new(HashMapRepoStore::new());
let metadata = [66u8; 64].to_vec();
let commit_types = vec![CommitType::Ack, CommitType::Transaction]; let repo =
let key: [u8; 32] = [0; 32]; Repo::new_with_member(&pub_key, pub_key.clone(), &[Permission::Transaction], store);
let secret = SymKey::ChaCha20Key(key);
let member = MemberV0::new(pub_key, commit_types, metadata.clone()); //let body = CommitBody::Ack(Ack::V0());
let members = vec![member];
let mut quorum = HashMap::new();
quorum.insert(CommitType::Transaction, 3);
let ack_delay = RelTime::Minutes(3);
let tags = [99u8; 32].to_vec();
let branch = Branch::new(
pub_key.clone(),
pub_key.clone(),
secret,
members,
quorum,
ack_delay,
tags,
metadata,
);
//log_debug!("branch: {:?}", branch);
let body = CommitBody::Ack(Ack::V0());
//log_debug!("body: {:?}", body); //log_debug!("body: {:?}", body);
match commit.load_body(&store) { match commit.load_body(repo.get_store()) {
Ok(_b) => panic!("Body should not exist"), Ok(_b) => panic!("Body should not exist"),
Err(CommitLoadError::MissingBlocks(missing)) => { Err(CommitLoadError::MissingBlocks(missing)) => {
assert_eq!(missing.len(), 1); assert_eq!(missing.len(), 1);
@ -429,15 +497,13 @@ mod test {
Err(e) => panic!("Commit verify error: {:?}", e), Err(e) => panic!("Commit verify error: {:?}", e),
} }
let content = commit.content(); let content = commit.content_v0();
log_debug!("content: {:?}", content); log_debug!("content: {:?}", content);
commit.verify_sig().expect("Invalid signature"); commit.verify_sig().expect("Invalid signature");
commit commit.verify_perm(&repo).expect("Permission denied");
.verify_perm(&body, &branch)
.expect("Permission denied");
match commit.verify_deps(&store) { match commit.verify_full_object_refs_of_branch_at_commit(repo.get_store()) {
Ok(_) => panic!("Commit should not be Ok"), Ok(_) => panic!("Commit should not be Ok"),
Err(CommitLoadError::MissingBlocks(missing)) => { Err(CommitLoadError::MissingBlocks(missing)) => {
assert_eq!(missing.len(), 1); assert_eq!(missing.len(), 1);
@ -445,7 +511,7 @@ mod test {
Err(e) => panic!("Commit verify error: {:?}", e), Err(e) => panic!("Commit verify error: {:?}", e),
} }
match commit.verify(&branch, &store) { match commit.verify(&repo, repo.get_store()) {
Ok(_) => panic!("Commit should not be Ok"), Ok(_) => panic!("Commit should not be Ok"),
Err(CommitVerifyError::BodyLoadError(CommitLoadError::MissingBlocks(missing))) => { Err(CommitVerifyError::BodyLoadError(CommitLoadError::MissingBlocks(missing))) => {
assert_eq!(missing.len(), 1); assert_eq!(missing.len(), 1);

@ -11,6 +11,7 @@
//! Errors //! Errors
use crate::commit::CommitLoadError;
use core::fmt; use core::fmt;
use std::error::Error; use std::error::Error;
@ -23,6 +24,9 @@ pub enum NgError {
InvalidInvitation, InvalidInvitation,
InvalidCreateAccount, InvalidCreateAccount,
InvalidFileFormat, InvalidFileFormat,
InvalidArgument,
PermissionDenied,
RepoLoadError,
} }
impl Error for NgError {} impl Error for NgError {}
@ -44,3 +48,9 @@ impl From<ed25519_dalek::ed25519::Error> for NgError {
NgError::InvalidSignature NgError::InvalidSignature
} }
} }
impl From<CommitLoadError> for NgError {
fn from(e: CommitLoadError) -> Self {
NgError::RepoLoadError
}
}

@ -20,6 +20,8 @@ pub mod branch;
pub mod repo; pub mod repo;
pub mod site;
pub mod utils; pub mod utils;
pub mod errors; pub mod errors;

@ -20,8 +20,11 @@ use crate::log::*;
use crate::store::*; use crate::store::*;
use crate::types::*; use crate::types::*;
// TODO: review all those constants. they were done for LMDB but we now use RocksDB.
// Also, the format of Blocks have changed so all should be checked.
/// Size of a serialized empty Block /// Size of a serialized empty Block
const EMPTY_BLOCK_SIZE: usize = 12; const EMPTY_BLOCK_SIZE: usize = 12 + 1;
/// Size of a serialized BlockId /// Size of a serialized BlockId
const BLOCK_ID_SIZE: usize = 33; const BLOCK_ID_SIZE: usize = 33;
/// Size of serialized SymKey /// Size of serialized SymKey
@ -34,16 +37,18 @@ const DEPSREF_OVERLOAD: usize = EMPTY_ROOT_SIZE_DEPSREF - EMPTY_BLOCK_SIZE;
const BIG_VARINT_EXTRA: usize = 3; const BIG_VARINT_EXTRA: usize = 3;
/// Varint extra bytes when reaching the maximum size of data byte arrays. /// Varint extra bytes when reaching the maximum size of data byte arrays.
const DATA_VARINT_EXTRA: usize = 4; const DATA_VARINT_EXTRA: usize = 4;
/// Max extra space used by the deps list // Max extra space used by the deps list
const MAX_DEPS_SIZE: usize = 8 * BLOCK_ID_SIZE; //const MAX_DEPS_SIZE: usize = 8 * BLOCK_ID_SIZE;
const MAX_HEADER_SIZE: usize = BLOCK_ID_SIZE;
#[derive(Debug)] #[derive(Debug)]
/// An Object in memory. This is not used to serialize data
pub struct Object { pub struct Object {
/// Blocks of the Object (nodes of the tree) /// Blocks of the Object (nodes of the tree)
blocks: Vec<Block>, blocks: Vec<Block>,
/// Dependencies /// Header
deps: Vec<ObjectId>, header: Option<CommitHeader>,
} }
/// Object parsing errors /// Object parsing errors
@ -59,8 +64,8 @@ pub enum ObjectParseError {
InvalidChildren, InvalidChildren,
/// Number of keys does not match number of children of a block /// Number of keys does not match number of children of a block
InvalidKeys, InvalidKeys,
/// Invalid DepList object content /// Invalid CommitHeader object content
InvalidDeps, InvalidHeader,
/// Error deserializing content of a block /// Error deserializing content of a block
BlockDeserializeError, BlockDeserializeError,
/// Error deserializing content of the object /// Error deserializing content of the object
@ -89,8 +94,7 @@ impl Object {
content: &[u8], content: &[u8],
conv_key: &[u8; blake3::OUT_LEN], conv_key: &[u8; blake3::OUT_LEN],
children: Vec<ObjectId>, children: Vec<ObjectId>,
deps: ObjectDeps, header_ref: Option<ObjectRef>,
expiry: Option<Timestamp>,
) -> Block { ) -> Block {
let key_hash = blake3::keyed_hash(conv_key, content); let key_hash = blake3::keyed_hash(conv_key, content);
let nonce = [0u8; 12]; let nonce = [0u8; 12];
@ -100,45 +104,38 @@ impl Object {
let mut content_enc_slice = &mut content_enc.as_mut_slice(); let mut content_enc_slice = &mut content_enc.as_mut_slice();
cipher.apply_keystream(&mut content_enc_slice); cipher.apply_keystream(&mut content_enc_slice);
let key = SymKey::ChaCha20Key(key.clone()); let key = SymKey::ChaCha20Key(key.clone());
let block = Block::new(children, deps, expiry, content_enc, Some(key)); let block = Block::new(children, header_ref, content_enc, Some(key));
//log_debug!(">>> make_block:"); //log_debug!(">>> make_block:");
//log_debug!("!! id: {:?}", obj.id()); //log_debug!("!! id: {:?}", obj.id());
//log_debug!("!! children: ({}) {:?}", children.len(), children); //log_debug!("!! children: ({}) {:?}", children.len(), children);
block block
} }
fn make_deps( fn make_header_v0(
deps_vec: Vec<ObjectId>, header: CommitHeaderV0,
object_size: usize, object_size: usize,
repo_pubkey: PubKey, repo_pubkey: PubKey,
repo_secret: SymKey, repo_secret: SymKey,
) -> ObjectDeps { ) -> ObjectRef {
if deps_vec.len() <= 8 { let header_obj = Object::new(
ObjectDeps::ObjectIdList(deps_vec) ObjectContentV0::CommitHeader(header),
} else { None,
let dep_list = DepList::V0(deps_vec); object_size,
let dep_obj = Object::new( repo_pubkey,
ObjectContent::DepList(dep_list), repo_secret,
vec![], );
None, let header_ref = ObjectRef {
object_size, id: header_obj.id(),
repo_pubkey, key: header_obj.key().unwrap(),
repo_secret, };
); header_ref
let dep_ref = ObjectRef {
id: dep_obj.id(),
key: dep_obj.key().unwrap(),
};
ObjectDeps::DepListRef(dep_ref)
}
} }
/// Build tree from leaves, returns parent nodes /// Build tree from leaves, returns parent nodes
fn make_tree( fn make_tree(
leaves: &[Block], leaves: &[Block],
conv_key: &ChaCha20Key, conv_key: &ChaCha20Key,
root_deps: &ObjectDeps, header_ref: &Option<ObjectRef>,
expiry: Option<Timestamp>,
arity: usize, arity: usize,
) -> Vec<Block> { ) -> Vec<Block> {
let mut parents = vec![]; let mut parents = vec![];
@ -147,27 +144,26 @@ impl Object {
while let Some(nodes) = it.next() { while let Some(nodes) = it.next() {
let keys = nodes.iter().map(|block| block.key().unwrap()).collect(); let keys = nodes.iter().map(|block| block.key().unwrap()).collect();
let children = nodes.iter().map(|block| block.id()).collect(); let children = nodes.iter().map(|block| block.id()).collect();
let content = BlockContentV0::InternalNode(keys); let content = ChunkContentV0::InternalNode(keys);
let content_ser = serde_bare::to_vec(&content).unwrap(); let content_ser = serde_bare::to_vec(&content).unwrap();
let child_deps = ObjectDeps::ObjectIdList(vec![]); let child_header = None;
let deps = if parents.is_empty() && it.peek().is_none() { let header = if parents.is_empty() && it.peek().is_none() {
root_deps.clone() header_ref
} else { } else {
child_deps &child_header
}; };
parents.push(Self::make_block( parents.push(Self::make_block(
content_ser.as_slice(), content_ser.as_slice(),
conv_key, conv_key,
children, children,
deps, header.clone(),
expiry,
)); ));
} }
//log_debug!("parents += {}", parents.len()); //log_debug!("parents += {}", parents.len());
if 1 < parents.len() { if 1 < parents.len() {
let mut great_parents = let mut great_parents =
Self::make_tree(parents.as_slice(), conv_key, root_deps, expiry, arity); Self::make_tree(parents.as_slice(), conv_key, header_ref, arity);
parents.append(&mut great_parents); parents.append(&mut great_parents);
} }
parents parents
@ -180,14 +176,13 @@ impl Object {
/// ///
/// Arguments: /// Arguments:
/// * `content`: Object content /// * `content`: Object content
/// * `deps`: Dependencies of the object /// * `header`: CommitHeaderV0 : All references of the object
/// * `block_size`: Desired block size for chunking content, rounded up to nearest valid block size /// * `block_size`: Desired block size for chunking content, rounded up to nearest valid block size
/// * `repo_pubkey`: Repository public key /// * `repo_pubkey`: Repository public key
/// * `repo_secret`: Repository secret /// * `repo_secret`: Repository secret
pub fn new( pub fn new(
content: ObjectContent, content: ObjectContentV0,
deps: Vec<ObjectId>, header: Option<CommitHeaderV0>,
expiry: Option<Timestamp>,
block_size: usize, block_size: usize,
repo_pubkey: PubKey, repo_pubkey: PubKey,
repo_secret: SymKey, repo_secret: SymKey,
@ -200,106 +195,66 @@ impl Object {
let mut blocks: Vec<Block> = vec![]; let mut blocks: Vec<Block> = vec![];
let conv_key = Self::convergence_key(repo_pubkey, repo_secret.clone()); let conv_key = Self::convergence_key(repo_pubkey, repo_secret.clone());
let obj_deps = Self::make_deps( let header_ref = header
deps.clone(), .clone()
valid_block_size, .map(|ch| Self::make_header_v0(ch, valid_block_size, repo_pubkey, repo_secret.clone()));
repo_pubkey,
repo_secret.clone(),
);
let content_ser = serde_bare::to_vec(&content).unwrap(); let content_ser = serde_bare::to_vec(&content).unwrap();
if EMPTY_BLOCK_SIZE + DATA_VARINT_EXTRA + BLOCK_ID_SIZE * deps.len() + content_ser.len() if EMPTY_BLOCK_SIZE
+ DATA_VARINT_EXTRA
+ BLOCK_ID_SIZE * header_ref.as_ref().map_or(0, |_| 1)
+ content_ser.len()
<= valid_block_size <= valid_block_size
{ {
// content fits in root node // content fits in root node
let data_chunk = BlockContentV0::DataChunk(content_ser.clone()); let data_chunk = ChunkContentV0::DataChunk(content_ser.clone());
let content_ser = serde_bare::to_vec(&data_chunk).unwrap(); let content_ser = serde_bare::to_vec(&data_chunk).unwrap();
blocks.push(Self::make_block( blocks.push(Self::make_block(
content_ser.as_slice(), content_ser.as_slice(),
&conv_key, &conv_key,
vec![], vec![],
obj_deps, header_ref,
expiry,
)); ));
} else { } else {
// chunk content and create leaf nodes // chunk content and create leaf nodes
for chunk in content_ser.chunks(data_chunk_size) { for chunk in content_ser.chunks(data_chunk_size) {
let data_chunk = BlockContentV0::DataChunk(chunk.to_vec()); let data_chunk = ChunkContentV0::DataChunk(chunk.to_vec());
let content_ser = serde_bare::to_vec(&data_chunk).unwrap(); let content_ser = serde_bare::to_vec(&data_chunk).unwrap();
blocks.push(Self::make_block( blocks.push(Self::make_block(
content_ser.as_slice(), content_ser.as_slice(),
&conv_key, &conv_key,
vec![], vec![],
ObjectDeps::ObjectIdList(vec![]), None,
expiry,
)); ));
} }
// internal nodes // internal nodes
// arity: max number of ObjectRefs that fit inside an InternalNode Object within the object_size limit // arity: max number of ObjectRefs that fit inside an InternalNode Object within the object_size limit
let arity: usize = let arity: usize =
(valid_block_size - EMPTY_BLOCK_SIZE - BIG_VARINT_EXTRA * 2 - MAX_DEPS_SIZE) (valid_block_size - EMPTY_BLOCK_SIZE - BIG_VARINT_EXTRA * 2 - MAX_HEADER_SIZE)
/ (BLOCK_ID_SIZE + BLOCK_KEY_SIZE); / (BLOCK_ID_SIZE + BLOCK_KEY_SIZE);
let mut parents = let mut parents = Self::make_tree(blocks.as_slice(), &conv_key, &header_ref, arity);
Self::make_tree(blocks.as_slice(), &conv_key, &obj_deps, expiry, arity);
blocks.append(&mut parents); blocks.append(&mut parents);
} }
Object { blocks, deps } Object {
}
pub fn copy(
&self,
expiry: Option<Timestamp>,
repo_pubkey: PubKey,
repo_secret: SymKey,
) -> Result<Object, ObjectCopyError> {
// getting the old object from store
let leaves: Vec<Block> = self.leaves().map_err(|_e| ObjectCopyError::ParseError)?;
let conv_key = Self::convergence_key(repo_pubkey, repo_secret);
let block_size = leaves.first().unwrap().content().len();
let valid_block_size = store_valid_value_size(block_size);
let mut blocks: Vec<Block> = vec![];
for block in leaves {
let mut copy = block.clone();
copy.set_expiry(expiry);
blocks.push(copy);
}
// internal nodes
// arity: max number of ObjectRefs that fit inside an InternalNode Object within the object_size limit
let arity: usize =
(valid_block_size - EMPTY_BLOCK_SIZE - BIG_VARINT_EXTRA * 2 - MAX_DEPS_SIZE)
/ (BLOCK_ID_SIZE + BLOCK_KEY_SIZE);
let mut parents = Self::make_tree(
blocks.as_slice(),
&conv_key,
self.root().deps(),
expiry,
arity,
);
blocks.append(&mut parents);
Ok(Object {
blocks, blocks,
deps: self.deps().clone(), header: header.map(|h| CommitHeader::V0(h)),
}) }
} }
/// Load an Object from RepoStore /// Load an Object from RepoStore
/// ///
/// Returns Ok(Object) or an Err(Vec<ObjectId>) of missing BlockIds /// Returns Ok(Object) or an Err(ObjectParseError::MissingBlocks(Vec<ObjectId>)) of missing BlockIds
pub fn load( pub fn load(
id: ObjectId, id: ObjectId,
key: Option<SymKey>, key: Option<SymKey>,
store: &impl RepoStore, store: &Box<impl RepoStore + ?Sized>,
) -> Result<Object, ObjectParseError> { ) -> Result<Object, ObjectParseError> {
fn load_tree( fn load_tree(
parents: Vec<BlockId>, parents: Vec<BlockId>,
store: &impl RepoStore, store: &Box<impl RepoStore + ?Sized>,
blocks: &mut Vec<Block>, blocks: &mut Vec<Block>,
missing: &mut Vec<BlockId>, missing: &mut Vec<BlockId>,
) { ) {
@ -307,13 +262,12 @@ impl Object {
for id in parents { for id in parents {
match store.get(&id) { match store.get(&id) {
Ok(block) => { Ok(block) => {
//FIXME: remove the block.clone() match &block {
blocks.insert(0, block.clone());
match block {
Block::V0(o) => { Block::V0(o) => {
children.extend(o.children.iter().rev()); children.extend(o.children().iter().rev());
} }
} }
blocks.insert(0, block);
} }
Err(_) => missing.push(id.clone()), Err(_) => missing.push(id.clone()),
} }
@ -337,22 +291,24 @@ impl Object {
root.set_key(key); root.set_key(key);
} }
let deps = match root.deps().clone() { let header = match root.header_ref() {
ObjectDeps::ObjectIdList(deps_vec) => deps_vec, Some(header_ref) => {
ObjectDeps::DepListRef(deps_ref) => { let obj = Object::load(header_ref.id, Some(header_ref.key), store)?;
let obj = Object::load(deps_ref.id, Some(deps_ref.key), store)?;
match obj.content()? { match obj.content()? {
ObjectContent::DepList(DepList::V0(deps_vec)) => deps_vec, ObjectContent::V0(ObjectContentV0::CommitHeader(commit_header)) => {
_ => return Err(ObjectParseError::InvalidDeps), Some(CommitHeader::V0(commit_header))
}
_ => return Err(ObjectParseError::InvalidHeader),
} }
} }
None => None,
}; };
Ok(Object { blocks, deps }) Ok(Object { blocks, header })
} }
/// Save blocks of the object in the store /// Save blocks of the object in the store
pub fn save(&self, store: &mut impl RepoStore) -> Result<(), StorageError> { pub fn save(&self, store: &Box<impl RepoStore + ?Sized>) -> Result<(), StorageError> {
let mut deduplicated: HashSet<ObjectId> = HashSet::new(); let mut deduplicated: HashSet<ObjectId> = HashSet::new();
for block in &self.blocks { for block in &self.blocks {
let id = block.id(); let id = block.id();
@ -387,20 +343,29 @@ impl Object {
} }
pub fn is_root(&self) -> bool { pub fn is_root(&self) -> bool {
self.deps().len() == 0 self.header.as_ref().map_or(true, |h| h.is_root())
//TODO: add && sdeps().len() == 0 && self.acks().len() == 0 && self.nacks().len() == 0
} }
pub fn root(&self) -> &Block { pub fn deps(&self) -> Vec<ObjectId> {
self.blocks.last().unwrap() match &self.header {
Some(h) => h.deps(),
None => vec![],
}
}
pub fn acks(&self) -> Vec<ObjectId> {
match &self.header {
Some(h) => h.acks(),
None => vec![],
}
} }
pub fn expiry(&self) -> Option<Timestamp> { pub fn root_block(&self) -> &Block {
self.blocks.last().unwrap().expiry() self.blocks.last().unwrap()
} }
pub fn deps(&self) -> &Vec<ObjectId> { pub fn header(&self) -> &Option<CommitHeader> {
&self.deps &self.header
} }
pub fn blocks(&self) -> &Vec<Block> { pub fn blocks(&self) -> &Vec<Block> {
@ -445,7 +410,7 @@ impl Object {
match block { match block {
Block::V0(b) => { Block::V0(b) => {
// decrypt content // decrypt content
let mut content_dec = b.content.clone(); let mut content_dec = b.content.encrypted_content().clone();
match key { match key {
SymKey::ChaCha20Key(key) => { SymKey::ChaCha20Key(key) => {
let nonce = [0u8; 12]; let nonce = [0u8; 12];
@ -456,7 +421,7 @@ impl Object {
} }
// deserialize content // deserialize content
let content: BlockContentV0; let content: ChunkContentV0;
match serde_bare::from_slice(content_dec.as_slice()) { match serde_bare::from_slice(content_dec.as_slice()) {
Ok(c) => content = c, Ok(c) => content = c,
Err(e) => { Err(e) => {
@ -464,26 +429,26 @@ impl Object {
return Err(ObjectParseError::BlockDeserializeError); return Err(ObjectParseError::BlockDeserializeError);
} }
} }
let b_children = b.children();
// parse content // parse content
match content { match content {
BlockContentV0::InternalNode(keys) => { ChunkContentV0::InternalNode(keys) => {
if keys.len() != b.children.len() { if keys.len() != b_children.len() {
log_debug!( log_debug!(
"Invalid keys length: got {}, expected {}", "Invalid keys length: got {}, expected {}",
keys.len(), keys.len(),
b.children.len() b_children.len()
); );
log_debug!("!!! children: {:?}", b.children); log_debug!("!!! children: {:?}", b_children);
log_debug!("!!! keys: {:?}", keys); log_debug!("!!! keys: {:?}", keys);
return Err(ObjectParseError::InvalidKeys); return Err(ObjectParseError::InvalidKeys);
} }
for (id, key) in b.children.iter().zip(keys.iter()) { for (id, key) in b_children.iter().zip(keys.iter()) {
children.push((id.clone(), key.clone())); children.push((id.clone(), key.clone()));
} }
} }
BlockContentV0::DataChunk(chunk) => { ChunkContentV0::DataChunk(chunk) => {
if leaves.is_some() { if leaves.is_some() {
let mut leaf = block.clone(); let mut leaf = block.clone();
leaf.set_key(Some(key.clone())); leaf.set_key(Some(key.clone()));
@ -548,9 +513,9 @@ impl Object {
&mut Some(&mut obj_content), &mut Some(&mut obj_content),
) { ) {
Ok(_) => { Ok(_) => {
let content: ObjectContent; let content: ObjectContentV0;
match serde_bare::from_slice(obj_content.as_slice()) { match serde_bare::from_slice(obj_content.as_slice()) {
Ok(c) => Ok(c), Ok(c) => Ok(ObjectContent::V0(c)),
Err(e) => { Err(e) => {
log_debug!("Object deserialize error: {}", e); log_debug!("Object deserialize error: {}", e);
Err(ObjectParseError::ObjectDeserializeError) Err(ObjectParseError::ObjectDeserializeError)
@ -560,6 +525,14 @@ impl Object {
Err(e) => Err(e), Err(e) => Err(e),
} }
} }
pub fn content_v0(&self) -> Result<ObjectContentV0, ObjectParseError> {
match self.content() {
Ok(ObjectContent::V0(v0)) => Ok(v0),
Err(e) => Err(e),
_ => unimplemented!(),
}
}
} }
#[cfg(test)] #[cfg(test)]
@ -599,28 +572,20 @@ mod test {
.read_to_end(&mut img_buffer) .read_to_end(&mut img_buffer)
.expect("read of test.jpg"); .expect("read of test.jpg");
let file = File::V0(FileV0 { let file = FileV0 {
content_type: "image/jpeg".into(), content_type: "image/jpeg".into(),
metadata: vec![], metadata: vec![],
content: img_buffer, content: img_buffer,
}); };
let content = ObjectContent::File(file); let content = ObjectContentV0::File(file);
let deps: Vec<ObjectId> = vec![Digest::Blake3Digest32([9; 32])]; let deps: Vec<ObjectId> = vec![Digest::Blake3Digest32([9; 32])];
let exp = Some(2u32.pow(31));
let max_object_size = store_max_value_size(); let max_object_size = store_max_value_size();
let repo_secret = SymKey::ChaCha20Key([0; 32]); let repo_secret = SymKey::ChaCha20Key([0; 32]);
let repo_pubkey = PubKey::Ed25519PubKey([1; 32]); let repo_pubkey = PubKey::Ed25519PubKey([1; 32]);
let obj = Object::new( let obj = Object::new(content, None, max_object_size, repo_pubkey, repo_secret);
content,
vec![],
exp,
max_object_size,
repo_pubkey,
repo_secret,
);
log_debug!("obj.id: {:?}", obj.id()); log_debug!("obj.id: {:?}", obj.id());
log_debug!("obj.key: {:?}", obj.key()); log_debug!("obj.key: {:?}", obj.key());
@ -642,14 +607,15 @@ mod test {
/// Test tree API /// Test tree API
#[test] #[test]
pub fn test_object() { pub fn test_object() {
let file = File::V0(FileV0 { let file = FileV0 {
content_type: "file/test".into(), content_type: "file/test".into(),
metadata: Vec::from("some meta data here"), metadata: Vec::from("some meta data here"),
content: [(0..255).collect::<Vec<u8>>().as_slice(); 320].concat(), content: [(0..255).collect::<Vec<u8>>().as_slice(); 320].concat(),
}); };
let content = ObjectContent::File(file); let content = ObjectContentV0::File(file);
let deps: Vec<ObjectId> = vec![Digest::Blake3Digest32([9; 32])]; let deps = vec![Digest::Blake3Digest32([9; 32])];
let header = CommitHeaderV0::new_with_deps(deps.clone());
let exp = Some(2u32.pow(31)); let exp = Some(2u32.pow(31));
let max_object_size = 0; let max_object_size = 0;
@ -658,8 +624,7 @@ mod test {
let obj = Object::new( let obj = Object::new(
content.clone(), content.clone(),
deps.clone(), header,
exp,
max_object_size, max_object_size,
repo_pubkey, repo_pubkey,
repo_secret.clone(), repo_secret.clone(),
@ -667,6 +632,7 @@ mod test {
log_debug!("obj.id: {:?}", obj.id()); log_debug!("obj.id: {:?}", obj.id());
log_debug!("obj.key: {:?}", obj.key()); log_debug!("obj.key: {:?}", obj.key());
log_debug!("obj.acks: {:?}", obj.acks());
log_debug!("obj.deps: {:?}", obj.deps()); log_debug!("obj.deps: {:?}", obj.deps());
log_debug!("obj.blocks.len: {:?}", obj.blocks().len()); log_debug!("obj.blocks.len: {:?}", obj.blocks().len());
@ -679,19 +645,20 @@ mod test {
assert_eq!(*obj.deps(), deps); assert_eq!(*obj.deps(), deps);
match obj.content() { match obj.content() {
Ok(cnt) => { Ok(ObjectContent::V0(cnt)) => {
assert_eq!(content, cnt); assert_eq!(content, cnt);
} }
Err(e) => panic!("Object parse error: {:?}", e), Err(e) => panic!("Object parse error: {:?}", e),
} }
let mut store = HashMapRepoStore::new(); let store = Box::new(HashMapRepoStore::new());
obj.save(&mut store).expect("Object save error"); obj.save(&store).expect("Object save error");
let obj2 = Object::load(obj.id(), obj.key(), &store).unwrap(); let obj2 = Object::load(obj.id(), obj.key(), &store).unwrap();
log_debug!("obj2.id: {:?}", obj2.id()); log_debug!("obj2.id: {:?}", obj2.id());
log_debug!("obj2.key: {:?}", obj2.key()); log_debug!("obj2.key: {:?}", obj2.key());
log_debug!("obj2.acks: {:?}", obj2.acks());
log_debug!("obj2.deps: {:?}", obj2.deps()); log_debug!("obj2.deps: {:?}", obj2.deps());
log_debug!("obj2.blocks.len: {:?}", obj2.blocks().len()); log_debug!("obj2.blocks.len: {:?}", obj2.blocks().len());
let mut i = 0; let mut i = 0;
@ -700,10 +667,9 @@ mod test {
i += 1; i += 1;
} }
assert_eq!(*obj2.deps(), deps);
assert_eq!(*obj2.deps(), deps); assert_eq!(*obj2.deps(), deps);
match obj2.content() { match obj2.content_v0() {
Ok(cnt) => { Ok(cnt) => {
assert_eq!(content, cnt); assert_eq!(content, cnt);
} }
@ -729,20 +695,6 @@ mod test {
Err(e) => panic!("Object3 parse error: {:?}", e), Err(e) => panic!("Object3 parse error: {:?}", e),
Ok(_) => panic!("Object3 should not return content"), Ok(_) => panic!("Object3 should not return content"),
} }
let exp4 = Some(2342);
let obj4 = obj.copy(exp4, repo_pubkey, repo_secret).unwrap();
obj4.save(&mut store).unwrap();
assert_eq!(obj4.expiry(), exp4);
assert_eq!(*obj.deps(), deps);
match obj4.content() {
Ok(cnt) => {
assert_eq!(content, cnt);
}
Err(e) => panic!("Object3 parse error: {:?}", e),
}
} }
/// Checks that a content that fits the root node, will not be chunked into children nodes /// Checks that a content that fits the root node, will not be chunked into children nodes
@ -750,27 +702,27 @@ mod test {
pub fn test_depth_1() { pub fn test_depth_1() {
let deps: Vec<ObjectId> = vec![Digest::Blake3Digest32([9; 32])]; let deps: Vec<ObjectId> = vec![Digest::Blake3Digest32([9; 32])];
let empty_file = ObjectContent::File(File::V0(FileV0 { let empty_file = ObjectContentV0::File(FileV0 {
content_type: "".into(), content_type: "".into(),
metadata: vec![], metadata: vec![],
content: vec![], content: vec![],
})); });
let empty_file_ser = serde_bare::to_vec(&empty_file).unwrap(); let empty_file_ser = serde_bare::to_vec(&empty_file).unwrap();
log_debug!("empty file size: {}", empty_file_ser.len()); log_debug!("empty file size: {}", empty_file_ser.len());
let size = store_max_value_size() let size = store_max_value_size()
- EMPTY_BLOCK_SIZE - EMPTY_BLOCK_SIZE
- DATA_VARINT_EXTRA - DATA_VARINT_EXTRA
- BLOCK_ID_SIZE * deps.len() - BLOCK_ID_SIZE
- empty_file_ser.len() - empty_file_ser.len()
- DATA_VARINT_EXTRA; - DATA_VARINT_EXTRA;
log_debug!("file size: {}", size); log_debug!("file size: {}", size);
let content = ObjectContent::File(File::V0(FileV0 { let content = ObjectContentV0::File(FileV0 {
content_type: "".into(), content_type: "".into(),
metadata: vec![], metadata: vec![],
content: vec![99; size], content: vec![99; size],
})); });
let content_ser = serde_bare::to_vec(&content).unwrap(); let content_ser = serde_bare::to_vec(&content).unwrap();
log_debug!("content len: {}", content_ser.len()); log_debug!("content len: {}", content_ser.len());
@ -782,8 +734,7 @@ mod test {
let object = Object::new( let object = Object::new(
content, content,
deps, CommitHeaderV0::new_with_deps(deps),
expiry,
max_object_size, max_object_size,
repo_pubkey, repo_pubkey,
repo_secret, repo_secret,
@ -792,7 +743,7 @@ mod test {
log_debug!("root_id: {:?}", object.id()); log_debug!("root_id: {:?}", object.id());
log_debug!("root_key: {:?}", object.key().unwrap()); log_debug!("root_key: {:?}", object.key().unwrap());
log_debug!("nodes.len: {:?}", object.blocks().len()); log_debug!("nodes.len: {:?}", object.blocks().len());
//log_debug!("root: {:?}", tree.root()); //log_debug!("root: {:?}", tree.root_block());
//log_debug!("nodes: {:?}", object.blocks); //log_debug!("nodes: {:?}", object.blocks);
assert_eq!(object.blocks.len(), 1); assert_eq!(object.blocks.len(), 1);
} }
@ -805,80 +756,48 @@ mod test {
let id = Digest::Blake3Digest32([0u8; 32]); let id = Digest::Blake3Digest32([0u8; 32]);
let key = SymKey::ChaCha20Key([0u8; 32]); let key = SymKey::ChaCha20Key([0u8; 32]);
let one_key = BlockContentV0::InternalNode(vec![key.clone()]); let one_key = ChunkContentV0::InternalNode(vec![key.clone()]);
let one_key_ser = serde_bare::to_vec(&one_key).unwrap(); let one_key_ser = serde_bare::to_vec(&one_key).unwrap();
let two_keys = BlockContentV0::InternalNode(vec![key.clone(), key.clone()]); let two_keys = ChunkContentV0::InternalNode(vec![key.clone(), key.clone()]);
let two_keys_ser = serde_bare::to_vec(&two_keys).unwrap(); let two_keys_ser = serde_bare::to_vec(&two_keys).unwrap();
let max_keys = BlockContentV0::InternalNode(vec![key.clone(); MAX_ARITY_LEAVES]); let max_keys = ChunkContentV0::InternalNode(vec![key.clone(); MAX_ARITY_LEAVES]);
let max_keys_ser = serde_bare::to_vec(&max_keys).unwrap(); let max_keys_ser = serde_bare::to_vec(&max_keys).unwrap();
let data = BlockContentV0::DataChunk(vec![]); let data = ChunkContentV0::DataChunk(vec![]);
let data_ser = serde_bare::to_vec(&data).unwrap(); let data_ser = serde_bare::to_vec(&data).unwrap();
let data_full = BlockContentV0::DataChunk(vec![0; MAX_DATA_PAYLOAD_SIZE]); let data_full = ChunkContentV0::DataChunk(vec![0; MAX_DATA_PAYLOAD_SIZE]);
let data_full_ser = serde_bare::to_vec(&data_full).unwrap(); let data_full_ser = serde_bare::to_vec(&data_full).unwrap();
let leaf_empty = Block::new( let leaf_empty = Block::new(vec![], None, data_ser.clone(), None);
vec![],
ObjectDeps::ObjectIdList(vec![]),
Some(2342),
data_ser.clone(),
None,
);
let leaf_empty_ser = serde_bare::to_vec(&leaf_empty).unwrap(); let leaf_empty_ser = serde_bare::to_vec(&leaf_empty).unwrap();
let leaf_full_data = Block::new( let leaf_full_data = Block::new(vec![], None, data_full_ser.clone(), None);
vec![],
ObjectDeps::ObjectIdList(vec![]),
Some(2342),
data_full_ser.clone(),
None,
);
let leaf_full_data_ser = serde_bare::to_vec(&leaf_full_data).unwrap(); let leaf_full_data_ser = serde_bare::to_vec(&leaf_full_data).unwrap();
let root_depsref = Block::new( let root_depsref = Block::new(
vec![], vec![],
ObjectDeps::DepListRef(ObjectRef { id: id, key: key }), Some(ObjectRef::from_id_key(id, key.clone())),
Some(2342),
data_ser.clone(), data_ser.clone(),
None, None,
); );
let root_depsref_ser = serde_bare::to_vec(&root_depsref).unwrap(); let root_depsref_ser = serde_bare::to_vec(&root_depsref).unwrap();
let internal_max = Block::new( let internal_max = Block::new(vec![id; MAX_ARITY_LEAVES], None, max_keys_ser.clone(), None);
vec![id; MAX_ARITY_LEAVES],
ObjectDeps::ObjectIdList(vec![]),
Some(2342),
max_keys_ser.clone(),
None,
);
let internal_max_ser = serde_bare::to_vec(&internal_max).unwrap(); let internal_max_ser = serde_bare::to_vec(&internal_max).unwrap();
let internal_one = Block::new( let internal_one = Block::new(vec![id; 1], None, one_key_ser.clone(), None);
vec![id; 1],
ObjectDeps::ObjectIdList(vec![]),
Some(2342),
one_key_ser.clone(),
None,
);
let internal_one_ser = serde_bare::to_vec(&internal_one).unwrap(); let internal_one_ser = serde_bare::to_vec(&internal_one).unwrap();
let internal_two = Block::new( let internal_two = Block::new(vec![id; 2], None, two_keys_ser.clone(), None);
vec![id; 2],
ObjectDeps::ObjectIdList(vec![]),
Some(2342),
two_keys_ser.clone(),
None,
);
let internal_two_ser = serde_bare::to_vec(&internal_two).unwrap(); let internal_two_ser = serde_bare::to_vec(&internal_two).unwrap();
let root_one = Block::new( let root_one = Block::new(
vec![id; 1], vec![id; 1],
ObjectDeps::ObjectIdList(vec![id; 8]), Some(ObjectRef::from_id_key(id, key.clone())),
Some(2342),
one_key_ser.clone(), one_key_ser.clone(),
None, None,
); );
@ -886,8 +805,7 @@ mod test {
let root_two = Block::new( let root_two = Block::new(
vec![id; 2], vec![id; 2],
ObjectDeps::ObjectIdList(vec![id; 8]), Some(ObjectRef::from_id_key(id, key)),
Some(2342),
two_keys_ser.clone(), two_keys_ser.clone(),
None, None,
); );
@ -906,7 +824,7 @@ mod test {
log_debug!( log_debug!(
"max_data_payload_depth_1: {}", "max_data_payload_depth_1: {}",
max_block_size - EMPTY_BLOCK_SIZE - DATA_VARINT_EXTRA - MAX_DEPS_SIZE max_block_size - EMPTY_BLOCK_SIZE - DATA_VARINT_EXTRA - MAX_HEADER_SIZE
); );
log_debug!( log_debug!(
@ -928,7 +846,7 @@ mod test {
MAX_DATA_PAYLOAD_SIZE MAX_DATA_PAYLOAD_SIZE
); );
let max_arity_root = let max_arity_root =
(max_block_size - EMPTY_BLOCK_SIZE - MAX_DEPS_SIZE - BIG_VARINT_EXTRA * 2) (max_block_size - EMPTY_BLOCK_SIZE - MAX_HEADER_SIZE - BIG_VARINT_EXTRA * 2)
/ (BLOCK_ID_SIZE + BLOCK_KEY_SIZE); / (BLOCK_ID_SIZE + BLOCK_KEY_SIZE);
log_debug!("max_arity_root: {}", max_arity_root); log_debug!("max_arity_root: {}", max_arity_root);
assert_eq!(max_arity_root, MAX_ARITY_ROOT); assert_eq!(max_arity_root, MAX_ARITY_ROOT);

@ -1,7 +1,5 @@
// Copyright (c) 2022-2024 Niko Bonnieure, Par le Peuple, NextGraph.org developers // Copyright (c) 2022-2024 Niko Bonnieure, Par le Peuple, NextGraph.org developers
// All rights reserved. // All rights reserved.
// This code is partly derived from work written by TG x Thoth from P2Pcollab.
// Copyright 2022 TG x Thoth
// Licensed under the Apache License, Version 2.0 // Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE2 or http://www.apache.org/licenses/LICENSE-2.0> // <LICENSE-APACHE2 or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>, // or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
@ -9,38 +7,97 @@
// notice may not be copied, modified, or distributed except // notice may not be copied, modified, or distributed except
// according to those terms. // according to those terms.
//! Repository //! Repository serde implementation and in memory helper
use crate::errors::*;
use crate::store::*;
use crate::types::*; use crate::types::*;
use std::collections::HashMap;
use std::collections::HashSet;
impl RepositoryV0 { impl RepositoryV0 {
pub fn new( pub fn new(id: &PubKey, metadata: &Vec<u8>) -> RepositoryV0 {
id: &PubKey,
branches: &Vec<ObjectRef>,
allow_ext_requests: bool,
metadata: &Vec<u8>,
) -> RepositoryV0 {
RepositoryV0 { RepositoryV0 {
id: id.clone(), id: id.clone(),
branches: branches.clone(),
allow_ext_requests,
metadata: metadata.clone(), metadata: metadata.clone(),
verification_program: vec![],
creator: None,
} }
} }
} }
impl Repository { impl Repository {
pub fn new( pub fn new(id: &PubKey, metadata: &Vec<u8>) -> Repository {
Repository::V0(RepositoryV0::new(id, metadata))
}
}
pub struct UserInfo {
/// list of permissions granted to user, with optional metadata
pub permissions: HashMap<Permission, Vec<u8>>,
}
impl UserInfo {
pub fn has_any_perm(&self, perms: &HashSet<&Permission>) -> Result<(), NgError> {
let has_perms: HashSet<&Permission> = self.permissions.keys().collect();
if has_perms.intersection(perms).count() > 0 {
Ok(())
} else {
Err(NgError::PermissionDenied)
}
//
}
pub fn has_perm(&self, perm: &Permission) -> Result<&Vec<u8>, NgError> {
self.permissions.get(perm).ok_or(NgError::PermissionDenied)
}
}
/// In memory Repository representation. With helper functions that access the underlying UserStore and keeps proxy of the values
pub struct Repo<'a> {
/// Repo definition
pub repo_def: Repository,
pub members: HashMap<UserId, UserInfo>,
store: Box<dyn RepoStore + Send + Sync + 'a>,
}
impl<'a> Repo<'a> {
pub fn new_with_member(
id: &PubKey, id: &PubKey,
branches: &Vec<ObjectRef>, member: UserId,
allow_ext_requests: bool, perms: &[Permission],
metadata: &Vec<u8>, store: Box<dyn RepoStore + Send + Sync + 'a>,
) -> Repository { ) -> Self {
Repository::V0(RepositoryV0::new( let mut members = HashMap::new();
id, let permissions = HashMap::from_iter(
branches, perms
allow_ext_requests, .iter()
metadata, .map(|p| (*p, vec![]))
)) .collect::<Vec<(Permission, Vec<u8>)>>()
.iter()
.cloned(),
);
members.insert(member, UserInfo { permissions });
Self {
repo_def: Repository::new(id, &vec![]),
members,
store,
}
}
pub fn verify_permission(&self, commit: &Commit) -> Result<(), NgError> {
let content = commit.content_v0();
let body = commit.load_body(&self.store)?;
match self.members.get(&content.author) {
Some(info) => return info.has_any_perm(&body.required_permission()),
None => {}
}
Err(NgError::PermissionDenied)
}
pub fn get_store(&self) -> &Box<dyn RepoStore + Send + Sync + 'a> {
&self.store
} }
} }

@ -9,11 +9,9 @@
* according to those terms. * according to those terms.
*/ */
use crate::types::{SiteStore, SiteType, SiteV0}; use crate::errors::NgError;
use crate::types::{BlockRef, PrivKey, SiteStore, SiteType, SiteV0, SymKey};
use p2p_repo::errors::NgError; use crate::utils::{generate_keypair, sign, verify};
use p2p_repo::types::{BlockRef, PrivKey, SymKey};
use p2p_repo::utils::{generate_keypair, sign, verify};
impl SiteV0 { impl SiteV0 {
// pub fn site_identity(&self) -> &Identity { // pub fn site_identity(&self) -> &Identity {

@ -23,7 +23,7 @@ use std::{
mem::size_of_val, mem::size_of_val,
}; };
pub trait RepoStore { pub trait RepoStore: Send + Sync {
/// Load a block from the store. /// Load a block from the store.
fn get(&self, id: &BlockId) -> Result<Block, StorageError>; fn get(&self, id: &BlockId) -> Result<Block, StorageError>;

File diff suppressed because it is too large Load Diff

@ -1,5 +1,5 @@
#[cfg(not(target_arch = "wasm32"))] //#[cfg(not(target_arch = "wasm32"))]
pub mod repo_store; //pub mod repo_store;
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
pub mod kcv_store; pub mod kcv_store;

Loading…
Cancel
Save