RandomAccessFile

pull/19/head
Niko PLP 8 months ago
parent 4048efd714
commit f03b6bcc10
  1. 36
      p2p-repo/src/block.rs
  2. 1537
      p2p-repo/src/file.rs
  3. 2
      p2p-repo/src/lib.rs
  4. 63
      p2p-repo/src/object.rs
  5. 18
      p2p-repo/src/store.rs
  6. 81
      p2p-repo/src/types.rs

@ -40,6 +40,26 @@ impl BlockV0 {
b
}
pub fn new_random_access(
children: Vec<BlockId>,
content: Vec<u8>,
key: Option<SymKey>,
) -> BlockV0 {
let bc = BlockContentV0 {
children,
commit_header: CommitHeaderObject::RandomAccess,
encrypted_content: content,
};
let mut b = BlockV0 {
id: None,
key,
content: BlockContent::V0(bc),
commit_header_key: None,
};
b.id = Some(b.compute_id());
b
}
/// Compute the ID
pub fn compute_id(&self) -> BlockId {
let ser = serde_bare::to_vec(&self.content).unwrap();
@ -92,6 +112,14 @@ impl Block {
Block::V0(BlockV0::new(children, header_ref, content, key))
}
pub fn new_random_access(
children: Vec<BlockId>,
content: Vec<u8>,
key: Option<SymKey>,
) -> Block {
Block::V0(BlockV0::new_random_access(children, content, key))
}
pub fn size(&self) -> usize {
serde_bare::to_vec(&self).unwrap().len()
}
@ -128,6 +156,13 @@ impl Block {
}
}
/// Get the content
pub fn content(&self) -> &BlockContent {
match self {
Block::V0(b) => &b.content,
}
}
/// Get the encrypted content
pub fn encrypted_content(&self) -> &Vec<u8> {
match self {
@ -148,6 +183,7 @@ impl Block {
Block::V0(b) => match b.commit_header_key.as_ref() {
Some(key) => match b.content.commit_header_obj() {
CommitHeaderObject::None => None,
CommitHeaderObject::RandomAccess => None,
_ => Some(CommitHeaderRef {
obj: b.content.commit_header_obj().clone(),
key: key.clone(),

File diff suppressed because it is too large Load Diff

@ -14,6 +14,8 @@ pub mod block;
pub mod object;
pub mod file;
pub mod commit;
pub mod branch;

@ -21,21 +21,21 @@ use crate::log::*;
use crate::store::*;
use crate::types::*;
const BLOCK_EXTRA: usize = 12; // 8 is the smallest extra + BLOCK_MAX_DATA_EXTRA
const HEADER_REF_EXTRA: usize = 66;
const HEADER_EMBED_EXTRA: usize = 34;
const CHILD_SIZE: usize = 66;
pub const BLOCK_EXTRA: usize = 12; // 8 is the smallest extra + BLOCK_MAX_DATA_EXTRA
pub const HEADER_REF_EXTRA: usize = 66;
pub const HEADER_EMBED_EXTRA: usize = 34;
pub const CHILD_SIZE: usize = 66;
const BLOCK_ID_SIZE: usize = 33;
pub const BLOCK_ID_SIZE: usize = 33;
/// Size of serialized SymKey
const BLOCK_KEY_SIZE: usize = 33;
pub const BLOCK_KEY_SIZE: usize = 33;
/// Size of serialized Object with deps reference.
/// Varint extra bytes when reaching the maximum value we will ever use in one block
const BIG_VARINT_EXTRA: usize = 2;
pub const BIG_VARINT_EXTRA: usize = 2;
/// Varint extra bytes when reaching the maximum size of data byte arrays.
const DATA_VARINT_EXTRA: usize = 4;
pub const DATA_VARINT_EXTRA: usize = 4;
const BLOCK_MAX_DATA_EXTRA: usize = 4;
pub const BLOCK_MAX_DATA_EXTRA: usize = 4;
#[derive(Debug)]
/// An Object in memory. This is not used to serialize data
@ -85,7 +85,7 @@ pub enum ObjectCopyError {
}
impl Object {
fn convergence_key(
pub(crate) fn convergence_key(
store_pubkey: &StoreRepo,
store_readcap_secret: &ReadCapSecret,
) -> [u8; blake3::OUT_LEN] {
@ -128,15 +128,13 @@ impl Object {
fn make_header_v0(
header: CommitHeaderV0,
object_size: usize,
store: &StoreRepo,
store_secret: &ReadCapSecret,
conv_key: &ChaCha20Key,
) -> (ObjectRef, Vec<Block>) {
let header_obj = Object::new(
let header_obj = Object::new_with_convergence_key(
ObjectContent::V0(ObjectContentV0::CommitHeader(CommitHeader::V0(header))),
None,
object_size,
store,
store_secret,
conv_key,
);
let header_ref = ObjectRef {
id: header_obj.id(),
@ -148,11 +146,10 @@ impl Object {
fn make_header(
header: CommitHeader,
object_size: usize,
store: &StoreRepo,
store_secret: &ReadCapSecret,
conv_key: &ChaCha20Key,
) -> (ObjectRef, Vec<Block>) {
match header {
CommitHeader::V0(v0) => Self::make_header_v0(v0, object_size, store, store_secret),
CommitHeader::V0(v0) => Self::make_header_v0(v0, object_size, conv_key),
}
}
@ -300,6 +297,16 @@ impl Object {
block_size: usize,
store: &StoreRepo,
store_secret: &ReadCapSecret,
) -> Object {
let conv_key = Self::convergence_key(store, store_secret);
Self::new_with_convergence_key(content, header, block_size, &conv_key)
}
pub fn new_with_convergence_key(
content: ObjectContent,
mut header: Option<CommitHeader>,
block_size: usize,
conv_key: &ChaCha20Key,
) -> Object {
if header.is_some() && !content.can_have_header() {
panic!(
@ -321,13 +328,11 @@ impl Object {
let mut blocks: Vec<BlockId> = vec![];
let mut block_contents: HashMap<BlockId, Block> = HashMap::new();
let mut already_existing: HashMap<BlockKey, BlockId> = HashMap::new();
let conv_key = Self::convergence_key(store, store_secret);
let header_prepare = match &header {
None => (0 as usize, None, vec![]),
Some(h) => {
let block_info =
Self::make_header(h.clone(), valid_block_size, store, store_secret);
let block_info = Self::make_header(h.clone(), valid_block_size, conv_key);
if block_info.1.len() == 1 {
(
block_info.1[0].encrypted_content().len(),
@ -367,7 +372,7 @@ impl Object {
Self::add_block(
Self::make_block(
content_ser,
&conv_key,
conv_key,
vec![],
header_ref,
&mut already_existing,
@ -386,7 +391,7 @@ impl Object {
let data_chunk = ChunkContentV0::DataChunk(chunk.to_vec());
let chunk_ser = serde_bare::to_vec(&data_chunk).unwrap();
Self::add_block(
Self::make_block(chunk_ser, &conv_key, vec![], None, &mut already_existing),
Self::make_block(chunk_ser, conv_key, vec![], None, &mut already_existing),
&mut blocks,
&mut block_contents,
&mut already_existing,
@ -401,7 +406,7 @@ impl Object {
&mut block_contents,
&mut already_existing,
blocks.as_slice(),
&conv_key,
conv_key,
header_prepare.0,
header_prepare.1,
header_prepare.2,
@ -489,7 +494,9 @@ impl Object {
let header = match root.header_ref() {
Some(header_ref) => match header_ref.obj {
CommitHeaderObject::None => panic!("shouldn't happen"),
CommitHeaderObject::None | CommitHeaderObject::RandomAccess => {
panic!("shouldn't happen")
}
CommitHeaderObject::Id(id) => {
let obj = Object::load(id, Some(header_ref.key.clone()), store)?;
match obj.content()? {
@ -539,6 +546,7 @@ impl Object {
}
Ok(())
}
#[cfg(test)]
pub fn save_in_test(
&mut self,
@ -891,6 +899,7 @@ impl fmt::Display for ObjectContent {
ObjectContentV0::Signature(_) => "Signature",
ObjectContentV0::Certificate(_) => "Certificate",
ObjectContentV0::File(_) => "File",
ObjectContentV0::RandomAccessFileMeta(_) => "RandomAccessFileMeta",
},
),
};
@ -1300,7 +1309,7 @@ mod test {
const MAX_ARITY_LEAVES: usize = 61;
const MAX_DATA_PAYLOAD_SIZE: usize = 4084;
////// 55GB of data!
////// 52GB of data!
let data_size = MAX_ARITY_LEAVES
* MAX_ARITY_LEAVES
* MAX_ARITY_LEAVES
@ -1309,7 +1318,7 @@ mod test {
- 12;
let (store_repo, store_secret) = StoreRepo::dummy_public_v0();
log_debug!("creating 55GB of data");
log_debug!("creating 52GB of data");
let content = ObjectContent::V0(ObjectContentV0::File(File::V0(FileV0 {
content_type: "".into(),
metadata: vec![],

@ -13,9 +13,9 @@
use futures::StreamExt;
use crate::log::*;
use crate::types::*;
use crate::utils::Receiver;
use std::sync::{Arc, RwLock};
use std::{
cmp::{max, min},
@ -24,14 +24,17 @@ use std::{
};
pub trait RepoStore: Send + Sync {
/// Load a block from the store.
/// Load a block from the storage.
fn get(&self, id: &BlockId) -> Result<Block, StorageError>;
/// Save a block to the store.
/// Save a block to the storage.
fn put(&self, block: &Block) -> Result<BlockId, StorageError>;
/// Delete a block from the store.
/// Delete a block from the storage.
fn del(&self, id: &BlockId) -> Result<(Block, usize), StorageError>;
/// number of Blocks in the storage
fn len(&self) -> Result<usize, StorageError>;
}
#[derive(Debug, PartialEq)]
@ -91,7 +94,7 @@ const MAX_FACTOR: usize = 256;
/// Returns a valid/optimal value size for the entries of the storage backend.
pub fn store_valid_value_size(size: usize) -> usize {
min(
max(1, (size as f32 / DISK_BLOCK_SIZE as f32).ceil() as usize),
max(1, (size + DISK_BLOCK_SIZE - 1) / DISK_BLOCK_SIZE),
MAX_FACTOR,
) * DISK_BLOCK_SIZE
}
@ -151,8 +154,13 @@ impl RepoStore for HashMapRepoStore {
}
}
fn len(&self) -> Result<usize, StorageError> {
Ok(self.get_len())
}
fn put(&self, block: &Block) -> Result<BlockId, StorageError> {
let id = block.id();
//log_debug!("PUTTING {}", id);
let mut b = block.clone();
b.set_key(None);
self.blocks.write().unwrap().insert(id, b);

@ -665,6 +665,7 @@ pub enum CommitHeaderObject {
Id(ObjectId),
EncryptedContent(Vec<u8>),
None,
RandomAccess,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
@ -706,7 +707,7 @@ pub struct BlockContentV0 {
/// is empty if ObjectContent fits in one block or this block is a leaf. in both cases, encrypted_content is then not empty
pub children: Vec<BlockId>,
/// contains encrypted ChunkContentV0 (entirety, when fitting, or chunks of ObjectContentV0, in DataChunk) used for leafs of the Merkle tree,
/// contains encrypted ChunkContentV0 (entirety, when fitting, or chunks of ObjectContentV0, in DataChunk) used for leaves of the Merkle tree,
/// or to store the keys of children (in InternalNode)
///
/// Encrypted using convergent encryption with ChaCha20:
@ -1664,6 +1665,83 @@ pub enum File {
V0(FileV0),
}
/// Random Access File Object
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct RandomAccessFileMetaV0 {
pub content_type: String,
#[serde(with = "serde_bytes")]
pub metadata: Vec<u8>,
pub total_size: u64,
pub chunk_size: u32,
pub arity: u16,
pub depth: u8,
}
/// A Random Access file stored in an Object
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum RandomAccessFileMeta {
V0(RandomAccessFileMetaV0),
}
impl RandomAccessFileMeta {
pub fn arity(&self) -> u16 {
match self {
Self::V0(v0) => v0.arity,
}
}
pub fn depth(&self) -> u8 {
match self {
Self::V0(v0) => v0.depth,
}
}
pub fn set_depth(&mut self, depth: u8) {
match self {
Self::V0(v0) => {
v0.depth = depth;
}
}
}
pub fn chunk_size(&self) -> u32 {
match self {
Self::V0(v0) => v0.chunk_size,
}
}
pub fn total_size(&self) -> u64 {
match self {
Self::V0(v0) => v0.total_size,
}
}
pub fn set_total_size(&mut self, size: u64) {
match self {
Self::V0(v0) => {
v0.total_size = size;
}
}
}
pub fn metadata(&self) -> &Vec<u8> {
match self {
Self::V0(v0) => &v0.metadata,
}
}
pub fn content_type(&self) -> &String {
match self {
Self::V0(v0) => &v0.content_type,
}
}
}
/// Immutable data stored encrypted in a Merkle tree V0
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum ObjectContentV0 {
@ -1674,6 +1752,7 @@ pub enum ObjectContentV0 {
Signature(Signature),
Certificate(Certificate),
File(File),
RandomAccessFileMeta(RandomAccessFileMeta),
}
/// Immutable data stored encrypted in a Merkle tree

Loading…
Cancel
Save