snapshot and async signature

master
Niko PLP 1 month ago
parent 495340dabe
commit 17e1eb95e3
  1. 90
      Cargo.lock
  2. 6
      nextgraph/src/local_broker.rs
  3. 2
      ng-app/index-web.html
  4. 91
      ng-app/src-tauri/src/lib.rs
  5. 3
      ng-app/src/api.ts
  6. 4
      ng-app/src/apps/ContainerView.svelte
  7. 35
      ng-app/src/lib/FullLayout.svelte
  8. 15
      ng-app/src/lib/panes/History.svelte
  9. 152
      ng-app/src/lib/popups/Signature.svelte
  10. 6
      ng-app/src/locales/en.json
  11. 42
      ng-app/src/store.ts
  12. 6
      ng-app/src/styles.css
  13. 10
      ng-app/src/tab.ts
  14. 1
      ng-broker/src/server_broker.rs
  15. 26
      ng-net/src/app_protocol.rs
  16. 26
      ng-net/src/types.rs
  17. 2
      ng-repo/Cargo.toml
  18. 9
      ng-repo/src/commit.rs
  19. 10
      ng-repo/src/errors.rs
  20. 5
      ng-repo/src/event.rs
  21. 2
      ng-repo/src/lib.rs
  22. 4
      ng-repo/src/object.rs
  23. 24
      ng-repo/src/repo.rs
  24. 4
      ng-repo/src/store.rs
  25. 82
      ng-repo/src/types.rs
  26. 97
      ng-sdk-js/src/lib.rs
  27. 65
      ng-verifier/src/commits/mod.rs
  28. 121
      ng-verifier/src/commits/snapshot.rs
  29. 342
      ng-verifier/src/request_processor.rs
  30. 12
      ng-verifier/src/rocksdb_user_storage.rs
  31. 1
      ng-verifier/src/types.rs
  32. 36
      ng-verifier/src/user_storage/repo.rs
  33. 27
      ng-verifier/src/user_storage/storage.rs
  34. 217
      ng-verifier/src/verifier.rs
  35. 24
      ng-wallet/src/types.rs

90
Cargo.lock generated

@ -12,15 +12,6 @@ dependencies = [
"psl-types", "psl-types",
] ]
[[package]]
name = "addr2line"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
dependencies = [
"gimli",
]
[[package]] [[package]]
name = "adler" name = "adler"
version = "1.0.2" version = "1.0.2"
@ -493,21 +484,6 @@ dependencies = [
"uuid", "uuid",
] ]
[[package]]
name = "backtrace"
version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.13.1" version = "0.13.1"
@ -1683,28 +1659,6 @@ dependencies = [
"zune-inflate", "zune-inflate",
] ]
[[package]]
name = "failure"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86"
dependencies = [
"backtrace",
"failure_derive",
]
[[package]]
name = "failure_derive"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"synstructure",
]
[[package]] [[package]]
name = "fastrand" name = "fastrand"
version = "1.9.0" version = "1.9.0"
@ -2143,12 +2097,6 @@ dependencies = [
"weezl", "weezl",
] ]
[[package]]
name = "gimli"
version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
[[package]] [[package]]
name = "gio" name = "gio"
version = "0.16.7" version = "0.16.7"
@ -3854,15 +3802,6 @@ dependencies = [
"objc", "objc",
] ]
[[package]]
name = "object"
version = "0.32.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "once_cell" name = "once_cell"
version = "1.18.0" version = "1.18.0"
@ -4817,12 +4756,6 @@ dependencies = [
"num-traits", "num-traits",
] ]
[[package]]
name = "rustc-demangle"
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]] [[package]]
name = "rustc-hash" name = "rustc-hash"
version = "1.1.0" version = "1.1.0"
@ -5481,18 +5414,6 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "synstructure"
version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"unicode-xid",
]
[[package]] [[package]]
name = "sys-locale" name = "sys-locale"
version = "0.3.1" version = "0.3.1"
@ -5893,11 +5814,9 @@ dependencies = [
[[package]] [[package]]
name = "threshold_crypto" name = "threshold_crypto"
version = "0.4.0" version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/nextgraph-org/threshold_crypto.git?branch=master#b60552e4d42f67058455779eed476a76986b5478"
checksum = "7f708705bce37e765c37a95a8e0221a327c880d5a5a148d522552e8daa85787a"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"failure",
"ff", "ff",
"group", "group",
"hex_fmt", "hex_fmt",
@ -5906,6 +5825,7 @@ dependencies = [
"rand 0.7.3", "rand 0.7.3",
"rand_chacha 0.2.2", "rand_chacha 0.2.2",
"serde", "serde",
"thiserror",
"tiny-keccak", "tiny-keccak",
"zeroize", "zeroize",
] ]
@ -6287,12 +6207,6 @@ version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "unicode-xid"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]] [[package]]
name = "unique_id" name = "unique_id"
version = "0.1.5" version = "0.1.5"

@ -1312,6 +1312,11 @@ impl LocalBroker {
// user_id.to_hash_string(), // user_id.to_hash_string(),
// key // key
// ); // );
let site = opened_wallet.wallet.site(&user_id)?;
let core = site.cores[0]; //TODO: cycle the other cores if failure to connect (failover)
let brokers = opened_wallet.wallet.broker(core.0)?;
key_material.zeroize(); key_material.zeroize();
let mut verifier = Verifier::new( let mut verifier = Verifier::new(
VerifierConfig { VerifierConfig {
@ -1325,6 +1330,7 @@ impl LocalBroker {
private_store_id: credentials.2, private_store_id: credentials.2,
protected_store_id: credentials.3, protected_store_id: credentials.3,
public_store_id: credentials.4, public_store_id: credentials.4,
locator: BrokerInfoV0::vec_into_locator(brokers),
}, },
block_storage, block_storage,
)?; )?;

@ -102,7 +102,7 @@
<div id="error-no-wasm" class="error-no-wasm-hidden"> <div id="error-no-wasm" class="error-no-wasm-hidden">
Your browser is too old and does not support NextGraph. <br/>Please upgrade to a newer version of this browser,<br/> try with another browser,<br/> <br/>or <a href="https://nextgraph.org/download">install our native apps on <br/> Your browser is too old and does not support NextGraph. <br/>Please upgrade to a newer version of this browser,<br/> try with another browser,<br/> <br/>or <a href="https://nextgraph.org/download">install our native apps on <br/>
Linux, macOS, Windows desktops and laptops,<br/> and iOS, Android mobiles.</a> Linux, macOS, Windows desktops and laptops,<br/> and iOS, Android mobiles.</a><br/><br/>If you are using jshelter or another javascript protection mechanism, please deactivate it as we need access to the WebWorker facility of your browser.
</div> </div>
<noscript style="display:grid;"> <noscript style="display:grid;">
NextGraph cannot load as Javascript is deactivated.<br/> NextGraph cannot load as Javascript is deactivated.<br/>

@ -641,6 +641,94 @@ async fn app_request(request: AppRequest) -> Result<AppResponse, String> {
.map_err(|e| e.to_string()) .map_err(|e| e.to_string())
} }
#[tauri::command(rename_all = "snake_case")]
async fn signature_status(
session_id: u64,
nuri: Option<String>,
) -> Result<Vec<(String, Option<String>, bool)>, String> {
let nuri = if nuri.is_some() {
NuriV0::new_from(&nuri.unwrap()).map_err(|e| e.to_string())?
} else {
NuriV0::new_private_store_target()
};
let request = AppRequest::V0(AppRequestV0 {
command: AppRequestCommandV0::new_signature_status(),
nuri,
payload: None,
session_id,
});
let res = nextgraph::local_broker::app_request(request)
.await
.map_err(|e: NgError| e.to_string())?;
let AppResponse::V0(res) = res;
//log_debug!("{:?}", res);
match res {
AppResponseV0::SignatureStatus(s) => Ok(s),
_ => Err("invalid response".to_string()),
}
}
#[tauri::command(rename_all = "snake_case")]
async fn signed_snapshot_request(session_id: u64, nuri: Option<String>) -> Result<bool, String> {
let nuri = if nuri.is_some() {
NuriV0::new_from(&nuri.unwrap()).map_err(|e| e.to_string())?
} else {
NuriV0::new_private_store_target()
};
let request = AppRequest::V0(AppRequestV0 {
command: AppRequestCommandV0::new_signed_snapshot_request(),
nuri,
payload: None,
session_id,
});
let res = nextgraph::local_broker::app_request(request)
.await
.map_err(|e: NgError| e.to_string())?;
let AppResponse::V0(res) = res;
//log_debug!("{:?}", res);
match res {
AppResponseV0::True => Ok(true),
AppResponseV0::False => Ok(false),
AppResponseV0::Error(e) => Err(e),
_ => Err("invalid response".to_string()),
}
}
#[tauri::command(rename_all = "snake_case")]
async fn signature_request(session_id: u64, nuri: Option<String>) -> Result<bool, String> {
let nuri = if nuri.is_some() {
NuriV0::new_from(&nuri.unwrap()).map_err(|e| e.to_string())?
} else {
NuriV0::new_private_store_target()
};
let request = AppRequest::V0(AppRequestV0 {
command: AppRequestCommandV0::new_signature_request(),
nuri,
payload: None,
session_id,
});
let res = nextgraph::local_broker::app_request(request)
.await
.map_err(|e: NgError| e.to_string())?;
let AppResponse::V0(res) = res;
//log_debug!("{:?}", res);
match res {
AppResponseV0::True => Ok(true),
AppResponseV0::False => Ok(false),
AppResponseV0::Error(e) => Err(e),
_ => Err("invalid response".to_string()),
}
}
#[tauri::command(rename_all = "snake_case")] #[tauri::command(rename_all = "snake_case")]
async fn doc_create( async fn doc_create(
session_id: u64, session_id: u64,
@ -932,6 +1020,9 @@ impl AppBuilder {
sparql_query, sparql_query,
sparql_update, sparql_update,
branch_history, branch_history,
signature_status,
signature_request,
signed_snapshot_request,
]) ])
.run(tauri::generate_context!()) .run(tauri::generate_context!())
.expect("error while running tauri application"); .expect("error while running tauri application");

@ -50,6 +50,9 @@ const mapping = {
"doc_fetch_repo_subscribe": ["repo_o"], "doc_fetch_repo_subscribe": ["repo_o"],
"branch_history": ["session_id", "nuri"], "branch_history": ["session_id", "nuri"],
"file_save_to_downloads": ["session_id", "reference", "filename", "branch_nuri"], "file_save_to_downloads": ["session_id", "reference", "filename", "branch_nuri"],
"signature_status": ["session_id", "nuri"],
"signed_snapshot_request": ["session_id", "nuri"],
"signature_request": ["session_id", "nuri"],
} }

@ -15,7 +15,7 @@
} from "../store"; } from "../store";
import { link } from "svelte-spa-router"; import { link } from "svelte-spa-router";
import { Button, Progressbar, Spinner, Alert } from "flowbite-svelte"; import { Button, Progressbar, Spinner, Alert } from "flowbite-svelte";
import{ PencilSquare } from "svelte-heros-v2"; import{ PlusCircle } from "svelte-heros-v2";
import { t } from "svelte-i18n"; import { t } from "svelte-i18n";
import { import {
in_memory_discrete, open_viewer, set_viewer, set_editor, set_view_or_edit, cur_tab_branch_class, cur_tab_doc_can_edit, cur_tab in_memory_discrete, open_viewer, set_viewer, set_editor, set_view_or_edit, cur_tab_branch_class, cur_tab_doc_can_edit, cur_tab
@ -56,7 +56,7 @@
on:keypress={create} on:keypress={create}
class="select-none ml-0 mt-2 mb-10 text-white bg-primary-700 hover:bg-primary-700/90 focus:ring-4 focus:ring-primary-500/50 rounded-lg text-base p-2 text-center inline-flex items-center dark:focus:ring-primary-700/55" class="select-none ml-0 mt-2 mb-10 text-white bg-primary-700 hover:bg-primary-700/90 focus:ring-4 focus:ring-primary-500/50 rounded-lg text-base p-2 text-center inline-flex items-center dark:focus:ring-primary-700/55"
> >
<PencilSquare tabindex="-1" class="mr-2 focus:outline-none" /> <PlusCircle tabindex="-1" class="mr-2 focus:outline-none" />
{$t("doc.create")} {$t("doc.create")}
</button> </button>
{/if} {/if}

@ -33,6 +33,9 @@
import PaneHeader from "./components/PaneHeader.svelte"; import PaneHeader from "./components/PaneHeader.svelte";
import BranchIcon from "./icons/BranchIcon.svelte"; import BranchIcon from "./icons/BranchIcon.svelte";
import Message from "./components/Message.svelte"; import Message from "./components/Message.svelte";
import Signature from "./popups/Signature.svelte";
// @ts-ignore // @ts-ignore
import { t } from "svelte-i18n"; import { t } from "svelte-i18n";
import { onMount, onDestroy, tick } from "svelte"; import { onMount, onDestroy, tick } from "svelte";
@ -40,7 +43,8 @@
available_editors, available_viewers, set_editor, set_viewer, set_view_or_edit, toggle_live_edit, available_editors, available_viewers, set_editor, set_viewer, set_view_or_edit, toggle_live_edit,
has_editor_chat, all_files_count, all_comments_count, hideMenu, show_modal_menu, show_modal_create, has_editor_chat, all_files_count, all_comments_count, hideMenu, show_modal_menu, show_modal_create,
cur_tab_branch_nuri, cur_tab_doc_can_edit, cur_tab_doc_is_member, cur_tab_right_pane, cur_tab_folders_pane, cur_tab_branch_nuri, cur_tab_doc_can_edit, cur_tab_doc_is_member, cur_tab_right_pane, cur_tab_folders_pane,
cur_tab_toc_pane, cur_tab_show_menu, cur_tab_branch_has_discrete, cur_tab_graph_or_discrete, cur_tab_view_or_edit, show_spinner } from "../tab"; cur_tab_toc_pane, cur_tab_show_menu, cur_tab_branch_has_discrete, cur_tab_graph_or_discrete, cur_tab_view_or_edit, show_spinner,
in_private_store, show_doc_popup, cur_doc_popup, open_doc_popup } from "../tab";
import { import {
active_session, redirect_after_login, toasts, check_has_camera, toast_error, active_session, redirect_after_login, toasts, check_has_camera, toast_error,
reset_toasts, reset_toasts,
@ -390,6 +394,8 @@
} }
return ct; return ct;
}); });
} else {
reset_toasts();
} }
}); });
}); });
@ -414,8 +420,8 @@
}; };
const openAction = (action:string) => { const openAction = (action:string) => {
// TODO
hideMenu(); hideMenu();
if (doc_popups[action]) open_doc_popup(action);
} }
const openPane = (pane:string) => { const openPane = (pane:string) => {
@ -551,6 +557,10 @@
"mc":Sparkles, "mc":Sparkles,
}; };
const doc_popups = {
"signature": Signature,
}
let destination = "store"; let destination = "store";
$: destination = $cur_tab_branch_nuri === "" ? "mc" : destination == "mc" ? "store" : destination; $: destination = $cur_tab_branch_nuri === "" ? "mc" : destination == "mc" ? "store" : destination;
@ -766,6 +776,7 @@
<Icon tabindex="-1" class="w-7 h-7 text-gray-700 focus:outline-none dark:text-white" variation="outline" color="currentColor" icon={pane_items["files"]} /> <Icon tabindex="-1" class="w-7 h-7 text-gray-700 focus:outline-none dark:text-white" variation="outline" color="currentColor" icon={pane_items["files"]} />
<span class="ml-3">{$t("doc.menu.items.files.label")} {$all_files_count}</span> <span class="ml-3">{$t("doc.menu.items.files.label")} {$all_files_count}</span>
</MenuItem> </MenuItem>
{#if !$in_private_store}
<div style="padding:0;" bind:this={shareMenu}></div> <div style="padding:0;" bind:this={shareMenu}></div>
<MenuItem title={$t("doc.menu.items.share.desc")} dropdown={open_share} clickable={ () => { open_share = !open_share; scrollToMenuShare(); } }> <MenuItem title={$t("doc.menu.items.share.desc")} dropdown={open_share} clickable={ () => { open_share = !open_share; scrollToMenuShare(); } }>
<Share <Share
@ -782,6 +793,12 @@
</MenuItem> </MenuItem>
{/each} {/each}
{/if} {/if}
{:else}
<MenuItem title={$t(`doc.menu.items.download.desc`)} clickable={ () => openShare("download") }>
<Icon tabindex="-1" class="w-7 h-7 text-gray-700 focus:outline-none dark:text-white " variation="outline" color="currentColor" icon={DocumentArrowDown} />
<span class="ml-3">{$t(`doc.menu.items.download.label`)}</span>
</MenuItem>
{/if}
<MenuItem title={$t("doc.menu.items.comments.desc")} selected={$cur_tab_right_pane == "comments"} clickable={ ()=> openPane("comments") }> <MenuItem title={$t("doc.menu.items.comments.desc")} selected={$cur_tab_right_pane == "comments"} clickable={ ()=> openPane("comments") }>
<Icon tabindex="-1" class="w-7 h-7 text-gray-700 focus:outline-none dark:text-white" variation="outline" color="currentColor" icon={pane_items["comments"]} /> <Icon tabindex="-1" class="w-7 h-7 text-gray-700 focus:outline-none dark:text-white" variation="outline" color="currentColor" icon={pane_items["comments"]} />
@ -836,7 +853,7 @@
/> />
<span class="ml-3">{$t("doc.menu.items.notifs.label")}</span> <span class="ml-3">{$t("doc.menu.items.notifs.label")}</span>
</MenuItem> </MenuItem>
{#if $cur_tab_doc_is_member} {#if $cur_tab_doc_is_member && !$in_private_store}
<MenuItem title={$t("doc.menu.items.permissions.desc")} clickable={ ()=> openAction("permissions") }> <MenuItem title={$t("doc.menu.items.permissions.desc")} clickable={ ()=> openAction("permissions") }>
<LockOpen <LockOpen
tabindex="-1" tabindex="-1"
@ -862,10 +879,12 @@
</MenuItem> </MenuItem>
{#if open_tools } {#if open_tools }
{#each tools_items as tool} {#each tools_items as tool}
{#if !$in_private_store || tool.n !== "signature" }
<MenuItem title={$t(`doc.menu.items.${tool.n}.desc`)} extraClass="submenu" clickable={ () => openAction(tool.n) }> <MenuItem title={$t(`doc.menu.items.${tool.n}.desc`)} extraClass="submenu" clickable={ () => openAction(tool.n) }>
<Icon tabindex="-1" class="w-7 h-7 text-gray-700 focus:outline-none dark:text-white " variation="outline" color="currentColor" icon={tool.i} /> <Icon tabindex="-1" class="w-7 h-7 text-gray-700 focus:outline-none dark:text-white " variation="outline" color="currentColor" icon={tool.i} />
<span class="ml-3">{$t(`doc.menu.items.${tool.n}.label`)}</span> <span class="ml-3">{$t(`doc.menu.items.${tool.n}.label`)}</span>
</MenuItem> </MenuItem>
{/if}
{/each} {/each}
{/if} {/if}
{/if} {/if}
@ -916,6 +935,16 @@
<Spinner className="w-10 h-10"/> <Spinner className="w-10 h-10"/>
</div> </div>
</Modal> </Modal>
<Modal class="document-popup"
outsideclose
bind:open={$show_doc_popup}
size = 'xs'
placement = 'center'
defaultClass="bg-white dark:bg-gray-800 text-gray-800 dark:text-gray-400 rounded-lg border-gray-200 dark:border-gray-700 divide-gray-200 dark:divide-gray-700 shadow-md relative flex flex-col mx-auto w-full"
backdropClass="bg-gray-900 bg-opacity-50 dark:bg-opacity-80 popup-bg-modal"
>
<svelte:component this={doc_popups[$cur_doc_popup]}/>
</Modal>
<Modal class="menu-modal" <Modal class="menu-modal"
outsideclose outsideclose
bind:open={$show_modal_create} bind:open={$show_modal_create}

@ -21,7 +21,7 @@
Cloud, Cloud,
DocumentPlus, DocumentPlus,
DocumentMinus, DocumentMinus,
CircleStack, Camera,
Funnel, Funnel,
FingerPrint, FingerPrint,
Key, Key,
@ -83,6 +83,17 @@
for (var h; h = b.history.commits.pop(); ) { for (var h; h = b.history.commits.pop(); ) {
//console.log(h); //console.log(h);
history.unshift(h); history.unshift(h);
if (h[1].async_sig) {
for (let hh of history) {
const index = h[1].async_sig[1].indexOf(hh[0]);
if (index > -1) {
h[1].async_sig[1].splice(index, 1);
hh[1].final_consistency = false;
hh[1].signature = h[1].async_sig[0];
}
if (h[1].async_sig[1].length == 0) break;
}
}
history = history; history = history;
gitgraph.commit({ gitgraph.commit({
hash: h[0], hash: h[0],
@ -115,7 +126,7 @@
"TransactionBoth": Sun, "TransactionBoth": Sun,
"FileAdd": DocumentPlus, "FileAdd": DocumentPlus,
"FileRemove": DocumentMinus, "FileRemove": DocumentMinus,
"Snapshot": CircleStack, "Snapshot": Camera,
"Compact": Funnel, "Compact": Funnel,
"AsyncSignature": FingerPrint, "AsyncSignature": FingerPrint,
"SyncSignature": FingerPrint, "SyncSignature": FingerPrint,

@ -0,0 +1,152 @@
<!--
// Copyright (c) 2022-2024 Niko Bonnieure, Par le Peuple, NextGraph.org developers
// All rights reserved.
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE2 or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
-->
<script lang="ts">
import {
branch_subscribe,
active_session,
toast_error,
toast_success,
display_error,
online,
} from "../../store";
import {
cur_tab,
show_doc_popup
} from "../../tab";
import { get } from "svelte/store";
import { onMount, onDestroy, tick } from "svelte";
import ng from "../../api";
import { t } from "svelte-i18n";
import {
ShieldExclamation,
ShieldCheck,
Camera
} from "svelte-heros-v2";
import {
Toggle,
Button
} from "flowbite-svelte";
let is_tauri = import.meta.env.TAURI_PLATFORM;
let heads = [];
onMount(async ()=>{
heads = await ng.signature_status($active_session.session_id, "did:ng:"+$cur_tab.branch.nuri+":"+$cur_tab.store.overlay);
});
let snapshot = false;
let force_snapshot = false;
let can_sign = false;
let has_signatures = false;
let hide_snapshot = false;
$: force_snapshot = heads.every(h => h[1]) && heads.length && !heads[0][2];
$: can_sign = force_snapshot || !heads[0]?.[2] ;
$: has_signatures = heads.some(h => h[1]);
let cur_link;
function signed_commit_link(head) {
return `did:ng:${$cur_tab.branch.nuri}:${$cur_tab.store.overlay}:${head[1]}:${$cur_tab.store.has_outer}`
}
async function sign() {
if (snapshot) await sign_snapshot();
else {
try {
let immediate = await ng.signature_request($active_session.session_id, "did:ng:"+$cur_tab.branch.nuri+":"+$cur_tab.store.overlay);
if (immediate) {
heads = await ng.signature_status($active_session.session_id, "did:ng:"+$cur_tab.branch.nuri+":"+$cur_tab.store.overlay);
cur_link=signed_commit_link(heads[0]);
hide_snapshot = true;
toast_success($t("doc.signature_is_ready"));
} else {
$show_doc_popup = false;
toast_success($t("doc.signature_is_on_its_way"));
}
} catch (e) {
toast_error(display_error(e));
}
}
}
async function sign_snapshot() {
try {
let immediate = await ng.signed_snapshot_request($active_session.session_id, "did:ng:"+$cur_tab.branch.nuri+":"+$cur_tab.store.overlay);
if (immediate) {
heads = await ng.signature_status($active_session.session_id, "did:ng:"+$cur_tab.branch.nuri+":"+$cur_tab.store.overlay);
} else {
$show_doc_popup = false;
toast_success($t("doc.signed_snapshot_is_on_its_way"));
}
} catch (e) {
toast_error(display_error(e));
}
}
</script>
<div class="flex flex-col">
<span class="font-bold text-xl">Signature</span>
Current heads :
{#each heads as head}
{#if head[1]}
<div style="font-family: monospace; font: Courier; font-size:16px;" class="flex text-green-600 clickable my-2"
on:click={()=>cur_link=signed_commit_link(head)} on:keypress={()=>cur_link=signed_commit_link(head)} tabindex="0" role="button">
<ShieldCheck tabindex="-1" class="w-5 h-5 mr-2" />
{head[0].substring(0,7)}
</div>
{:else}
<div style="font-family: monospace; font: Courier; font-size:16px;" class="flex my-2">
<ShieldExclamation tabindex="-1" class="w-5 h-5 mr-2" />
{head[0].substring(0,7)}
</div>
{/if}
{/each}
{#if !hide_snapshot}
{#if force_snapshot}
<Button
disabled={!$online && !is_tauri}
on:click|once={sign_snapshot}
on:keypress|once={sign_snapshot}
class="select-none mt-2 mb-2 text-white bg-primary-700 hover:bg-primary-700/90 focus:ring-4 focus:ring-primary-500/50 rounded-lg text-base p-2 text-center inline-flex items-center dark:focus:ring-primary-700/55"
>
<ShieldCheck tabindex="-1" class="mr-2 focus:outline-none" />
{$t("doc.sign_snapshot")}
</Button>
<span class="mb-2">or click on one of the signed heads to get its link.</span>
{:else if can_sign}
<button
on:click|once={sign}
on:keypress|once={sign}
class="shrink select-none mt-2 mb-3 text-white bg-primary-700 hover:bg-primary-700/90 focus:ring-4 focus:ring-primary-500/50 rounded-lg text-base p-2 text-center inline-flex items-center dark:focus:ring-primary-700/55"
>
<ShieldCheck tabindex="-1" class="mr-2 focus:outline-none" />
{$t("doc.sign_heads")}
</button>
<Toggle
disabled={!$online && !is_tauri}
class="clickable mb-3"
bind:checked={ snapshot }
><span class="text-gray-700 text-base">{$t("doc.take_snapshot")}</span>
</Toggle>
{#if has_signatures}<span>or click on one of the signed heads to get its link</span>{/if}
{:else}
<div class="flex mt-3"><Camera tabindex="-1" class="w-6 h-6 mr-3 text-green-600"/><span class="text-green-600">A signed snapshot is currently at the head.</span></div>
<span>Here is its link that you can share.<br/>For now this link is only usable with the CLI, by running the following command :<br/><br/></span>
<span style="font-family: monospace; font: Courier; font-size:16px;" class="break-all">ngcli get {signed_commit_link(heads[0])}</span>
{/if}
{/if}
{#if (force_snapshot || can_sign) && cur_link }
<span class="mt-3">For now the link is only usable with the CLI, by running the following command :<br/><br/></span>
<span style="font-family: monospace; font: Courier; font-size:16px;" class="break-all">ngcli get {cur_link}</span>
{/if}
</div>

@ -52,6 +52,12 @@
"creating": "Please wait while your Document is being created", "creating": "Please wait while your Document is being created",
"not_found" : "Document not found", "not_found" : "Document not found",
"empty_container": "Container is empty.", "empty_container": "Container is empty.",
"sign_snapshot": "Create a signed Snapshot",
"sign_heads": "Sign current heads",
"take_snapshot": "and take a Snapshot",
"signed_snapshot_is_on_its_way": "Your request for a signed snapshot has been sent to all signers. You will be notified when it is ready",
"signature_is_on_its_way": "Your request for a signature has been sent to all signers. You will be notified when it is ready",
"signature_is_ready": "Your signature is ready.",
"not_found_details_online" : "The document could not be found locally on this device, nor on the broker.", "not_found_details_online" : "The document could not be found locally on this device, nor on the broker.",
"not_found_details_offline" : "The document could not be found locally on this device, and it seems like you are offline, so it could not be retrieved from any broker neither.<br/><br/>If you are opening this document for the first time on this device, you have to be online now so the document can be fetched.<br/><br/> We will try connecting and fetching it every 5 seconds.", "not_found_details_offline" : "The document could not be found locally on this device, and it seems like you are offline, so it could not be retrieved from any broker neither.<br/><br/>If you are opening this document for the first time on this device, you have to be online now so the document can be fetched.<br/><br/> We will try connecting and fetching it every 5 seconds.",
"cannot_load_offline": "You are offline and using the web app. There is currently a limitation on local storage within the Web App, and you need to connect to the broker every time you login with the Web App.<br/><br/>For now, the Web App does not keep a local copy of your documents. due to the limit of 5MB in localStorage. We will remove this limitation soon. Stay tuned!<br/><br/>Check your connectivity status in the ", "cannot_load_offline": "You are offline and using the web app. There is currently a limitation on local storage within the Web App, and you need to connect to the broker every time you login with the Web App.<br/><br/>For now, the Web App does not keep a local copy of your documents. due to the limit of 5MB in localStorage. We will remove this limitation soon. Stay tuned!<br/><br/>Check your connectivity status in the ",

@ -396,6 +396,14 @@ export const digest_to_string = function(digest) {
return encode(buffer.buffer); return encode(buffer.buffer);
}; };
export const symkey_to_string = function(key) {
let copy = [...key.ChaCha20Key];
copy.reverse();
copy.push(0);
let buffer = Uint8Array.from(copy);
return encode(buffer.buffer);
};
export const discrete_update = async (update, crdt, heads) => { export const discrete_update = async (update, crdt, heads) => {
if (get(cur_tab).doc.live_edit) { if (get(cur_tab).doc.live_edit) {
await live_discrete_update(update, crdt, heads); await live_discrete_update(update, crdt, heads);
@ -504,7 +512,8 @@ export const branch_subscribe = function(nuri:string, in_tab:boolean) {
let already_subscribed = all_branches[nuri]; let already_subscribed = all_branches[nuri];
if (!already_subscribed) { if (!already_subscribed) {
let onUpdate = (update) => {}; let onUpdate = (update) => {};
const { subscribe, set, update } = writable({graph:[], discrete:{updates:[], deregisterOnUpdate:()=>{ onUpdate=()=>{};},registerOnUpdate:(f)=>{ }}, files:[], history: {start:()=>{}, stop:()=>{}, commits:false}, heads: []}); // create the underlying writable store // take:()=>{}, const { subscribe, set, update } = writable({graph:[], discrete:{updates:[], deregisterOnUpdate:()=>{ onUpdate=()=>{};},registerOnUpdate:(f)=>{ }},
files:[], history: {start:()=>{}, stop:()=>{}, commits:false}, heads: [], head_keys:[]}); // create the underlying writable store // take:()=>{},
update((old)=> { update((old)=> {
old.history.start = () => update((o) => {o.history.commits = true; return o;}) ; old.history.start = () => update((o) => {o.history.commits = true; return o;}) ;
old.history.stop = () => update((o) => {o.history.commits = false; return o;}) ; old.history.stop = () => update((o) => {o.history.commits = false; return o;}) ;
@ -566,6 +575,9 @@ export const branch_subscribe = function(nuri:string, in_tab:boolean) {
if (response.V0.TabInfo.store?.repo) { if (response.V0.TabInfo.store?.repo) {
$cur_tab.store.repo = response.V0.TabInfo.store.repo; $cur_tab.store.repo = response.V0.TabInfo.store.repo;
} }
if (response.V0.TabInfo.store?.has_outer) {
$cur_tab.store.has_outer = response.V0.TabInfo.store.has_outer;
}
if (response.V0.TabInfo.store?.store_type) { if (response.V0.TabInfo.store?.store_type) {
if (get(cur_branch) == nuri) { if (get(cur_branch) == nuri) {
@ -585,6 +597,10 @@ export const branch_subscribe = function(nuri:string, in_tab:boolean) {
let commitId = digest_to_string(head); let commitId = digest_to_string(head);
old.heads.push(commitId); old.heads.push(commitId);
} }
for (const key of response.V0.State.head_keys) {
let key_str = symkey_to_string(key);
old.head_keys.push(key_str);
}
for (const file of response.V0.State.files) { for (const file of response.V0.State.files) {
old.files.unshift(file); old.files.unshift(file);
} }
@ -608,17 +624,12 @@ export const branch_subscribe = function(nuri:string, in_tab:boolean) {
while (i--) { while (i--) {
if (response.V0.Patch.commit_info.past.includes(old.heads[i])) { if (response.V0.Patch.commit_info.past.includes(old.heads[i])) {
old.heads.splice(i, 1); old.heads.splice(i, 1);
old.head_keys.splice(i, 1);
} }
} }
old.heads.push(response.V0.Patch.commit_id); old.heads.push(response.V0.Patch.commit_id);
if (old.history.commits!==false) { old.head_keys.push(response.V0.Patch.commit_info.key);
let commit = [response.V0.Patch.commit_id, response.V0.Patch.commit_info];
if (old.history.commits === true) {
old.history.commits = [commit];
} else {
old.history.commits.push(commit);
}
}
if (response.V0.Patch.discrete) { if (response.V0.Patch.discrete) {
old.discrete.updates.push(response.V0.Patch.discrete); old.discrete.updates.push(response.V0.Patch.discrete);
onUpdate(response.V0.Patch.discrete); onUpdate(response.V0.Patch.discrete);
@ -646,8 +657,19 @@ export const branch_subscribe = function(nuri:string, in_tab:boolean) {
$cur_tab.branch.files = old.files.length; $cur_tab.branch.files = old.files.length;
return $cur_tab; return $cur_tab;
}); });
} else if (response.V0.Patch.other?.AsyncSignature) {
if (old.history.commits!==false) {
// we pass the AsyncSignature to the History.svelte
response.V0.Patch.commit_info.async_sig = response.V0.Patch.other.AsyncSignature;
}
}
if (old.history.commits!==false) {
let commit = [response.V0.Patch.commit_id, response.V0.Patch.commit_info];
if (old.history.commits === true) {
old.history.commits = [commit];
} else { } else {
old.history.commits.push(commit);
}
} }
} }
return old; return old;

@ -103,6 +103,12 @@ td.hljs {
width: calc(100% - 32px) !important; width: calc(100% - 32px) !important;
} }
@media (max-width: 400px) {
.popup-bg-modal + div {
padding: 0 !important;
}
}
.menu-bg-modal + div { .menu-bg-modal + div {
padding: 0 !important; padding: 0 !important;
height: 100%; height: 100%;

@ -204,8 +204,15 @@ export const update_branch_display = (cur_tab) => {
export const show_modal_menu = writable(false); export const show_modal_menu = writable(false);
export const show_spinner = writable(false); export const show_spinner = writable(false);
export const show_doc_popup = writable(false);
export const cur_doc_popup = writable("");
export const show_modal_create = writable(false); export const show_modal_create = writable(false);
export const open_doc_popup = (popup_name) => {
cur_doc_popup.set(popup_name);
show_doc_popup.set(true);
}
export const in_memory_graph = writable(""); export const in_memory_graph = writable("");
export const in_memory_discrete = writable(""); export const in_memory_discrete = writable("");
@ -393,6 +400,9 @@ export const cur_tab_view_or_edit = derived(cur_tab, ($cur_tab) => {
export const edit_header_button = derived(cur_tab, ($cur_tab) => { export const edit_header_button = derived(cur_tab, ($cur_tab) => {
return ($cur_tab.doc.is_store && ( $cur_tab.store.store_type === "public" || $cur_tab.store.store_type === "protected"))? "doc.header.buttons.edit_profile" : "doc.header.buttons.edit_intro"; return ($cur_tab.doc.is_store && ( $cur_tab.store.store_type === "public" || $cur_tab.store.store_type === "protected"))? "doc.header.buttons.edit_profile" : "doc.header.buttons.edit_intro";
}); });
export const in_private_store = derived(cur_tab, ($cur_tab) => {
return $cur_tab.store.store_type === "private";
});
export const header_title = derived(cur_tab, ($cur_tab) => { export const header_title = derived(cur_tab, ($cur_tab) => {
if ($cur_tab.doc.is_store) { if ($cur_tab.doc.is_store) {

@ -269,6 +269,7 @@ impl ServerBroker {
} else { } else {
Some(credentials.public_store) Some(credentials.public_store)
}, },
locator: Locator::empty(),
}, },
block_storage, block_storage,
)?; )?;

@ -51,6 +51,9 @@ pub enum AppFetchContentV0 {
WriteQuery, WriteQuery,
RdfDump, RdfDump,
History, History,
SignatureStatus,
SignatureRequest,
SignedSnapshotRequest,
} }
impl AppFetchContentV0 { impl AppFetchContentV0 {
@ -170,7 +173,7 @@ impl From<&CommitInfo> for CommitInfoJs {
CommitInfoJs { CommitInfoJs {
past: info.past.iter().map(|objid| objid.to_string()).collect(), past: info.past.iter().map(|objid| objid.to_string()).collect(),
key: info.key.to_string(), key: info.key.to_string(),
signature: info.signature.as_ref().map(|s| NuriV0::object_ref(&s)), signature: info.signature.as_ref().map(|s| NuriV0::signature_ref(&s)),
author: info.author.clone(), author: info.author.clone(),
timestamp: display_timestamp_local(info.timestamp), timestamp: display_timestamp_local(info.timestamp),
final_consistency: info.final_consistency, final_consistency: info.final_consistency,
@ -287,10 +290,18 @@ impl NuriV0 {
format!("{DID_PREFIX}:{}", obj_ref.object_nuri()) format!("{DID_PREFIX}:{}", obj_ref.object_nuri())
} }
pub fn signature_ref(obj_ref: &ObjectRef) -> String {
format!("s:{}:k:{}", obj_ref.id, obj_ref.key)
}
pub fn token(token: &Digest) -> String { pub fn token(token: &Digest) -> String {
format!("{DID_PREFIX}:n:{token}") format!("{DID_PREFIX}:n:{token}")
} }
pub fn locator(locator: &Locator) -> String {
format!("l:{locator}")
}
pub fn is_branch_identifier(&self) -> bool { pub fn is_branch_identifier(&self) -> bool {
self.locator.is_empty() self.locator.is_empty()
&& self.topic.is_none() && self.topic.is_none()
@ -516,6 +527,15 @@ impl AppRequestCommandV0 {
pub fn new_history() -> Self { pub fn new_history() -> Self {
AppRequestCommandV0::Fetch(AppFetchContentV0::History) AppRequestCommandV0::Fetch(AppFetchContentV0::History)
} }
pub fn new_signature_status() -> Self {
AppRequestCommandV0::Fetch(AppFetchContentV0::SignatureStatus)
}
pub fn new_signature_request() -> Self {
AppRequestCommandV0::Fetch(AppFetchContentV0::SignatureRequest)
}
pub fn new_signed_snapshot_request() -> Self {
AppRequestCommandV0::Fetch(AppFetchContentV0::SignedSnapshotRequest)
}
pub fn new_create() -> Self { pub fn new_create() -> Self {
AppRequestCommandV0::Create AppRequestCommandV0::Create
} }
@ -821,6 +841,7 @@ pub struct GraphState {
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AppState { pub struct AppState {
pub heads: Vec<ObjectId>, pub heads: Vec<ObjectId>,
pub head_keys: Vec<ObjectKey>,
pub graph: Option<GraphState>, // there is always a graph present in the branch. but it might not have been asked in the request pub graph: Option<GraphState>, // there is always a graph present in the branch. but it might not have been asked in the request
pub discrete: Option<DiscreteState>, pub discrete: Option<DiscreteState>,
pub files: Vec<FileName>, pub files: Vec<FileName>,
@ -859,7 +880,7 @@ impl AppHistory {
pub enum OtherPatch { pub enum OtherPatch {
FileAdd(FileName), FileAdd(FileName),
FileRemove(ObjectId), FileRemove(ObjectId),
AsyncSignature((ObjectRef, Vec<ObjectId>)), AsyncSignature((String, Vec<String>)),
Snapshot(ObjectRef), Snapshot(ObjectRef),
Compact(ObjectRef), Compact(ObjectRef),
Other, Other,
@ -940,6 +961,7 @@ pub enum AppResponseV0 {
State(AppState), State(AppState),
Patch(AppPatch), Patch(AppPatch),
History(AppHistory), History(AppHistory),
SignatureStatus(Vec<(String, Option<String>, bool)>),
Text(String), Text(String),
//File(FileName), //File(FileName),
FileUploading(u32), FileUploading(u32),

@ -201,6 +201,32 @@ pub enum BrokerServer {
V0(BrokerServerV0), V0(BrokerServerV0),
} }
pub type LocatorV0 = Vec<BrokerServer>;
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)]
pub enum Locator {
V0(LocatorV0),
}
impl fmt::Display for Locator {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let ser = serde_bare::to_vec(&self).unwrap();
write!(f, "{}", base64_url::encode(&ser))
}
}
impl Locator {
pub fn empty() -> Self {
Self::V0(vec![])
}
}
impl From<BrokerServerV0> for Locator {
fn from(bs: BrokerServerV0) -> Self {
Locator::V0(vec![BrokerServer::V0(bs)])
}
}
impl BrokerServerV0 { impl BrokerServerV0 {
pub fn new_localhost(peer_id: PubKey) -> Self { pub fn new_localhost(peer_id: PubKey) -> Self {
BrokerServerV0 { BrokerServerV0 {

@ -37,7 +37,7 @@ ed25519-dalek = "1.0.1"
sbbf-rs-safe = "0.3.2" sbbf-rs-safe = "0.3.2"
lazy_static = "1.4.0" lazy_static = "1.4.0"
curve25519-dalek = "3.2.0" curve25519-dalek = "3.2.0"
threshold_crypto = "0.4.0" threshold_crypto = { version = "0.4.0", git = "https://github.com/nextgraph-org/threshold_crypto.git", branch = "master" }
crypto_box = { version = "0.8.2", features = ["seal"] } crypto_box = { version = "0.8.2", features = ["seal"] }
zeroize = { version = "1.7.0", features = ["zeroize_derive"] } zeroize = { version = "1.7.0", features = ["zeroize_derive"] }
base64-url = "2.0.0" base64-url = "2.0.0"

@ -448,6 +448,7 @@ impl Commit {
pub fn final_consistency(&self) -> bool { pub fn final_consistency(&self) -> bool {
self.content().final_consistency() self.content().final_consistency()
|| self.body().is_some_and(|body| body.total_order_required())
} }
pub fn get_type(&self) -> Option<CommitType> { pub fn get_type(&self) -> Option<CommitType> {
@ -1009,6 +1010,8 @@ impl CommitBody {
pub fn total_order_required(&self) -> bool { pub fn total_order_required(&self) -> bool {
match self { match self {
Self::V0(v0) => match v0 { Self::V0(v0) => match v0 {
CommitBodyV0::RootBranch(_) => true,
CommitBodyV0::Branch(_) => true,
CommitBodyV0::UpdateRootBranch(_) => true, CommitBodyV0::UpdateRootBranch(_) => true,
CommitBodyV0::UpdateBranch(_) => true, CommitBodyV0::UpdateBranch(_) => true,
CommitBodyV0::AddBranch(AddBranch::V0(AddBranchV0 { CommitBodyV0::AddBranch(AddBranch::V0(AddBranchV0 {
@ -1022,7 +1025,7 @@ impl CommitBody {
CommitBodyV0::RemovePermission(_) => true, CommitBodyV0::RemovePermission(_) => true,
//CommitBodyV0::Quorum(_) => true, //CommitBodyV0::Quorum(_) => true,
CommitBodyV0::Compact(_) => true, CommitBodyV0::Compact(_) => true,
CommitBodyV0::SyncTransaction(_) => true, // check Quorum::TotalOrder in CommitContent CommitBodyV0::SyncTransaction(_) => true, // check QuorumType::TotalOrder in CommitContent
CommitBodyV0::RootCapRefresh(_) => true, CommitBodyV0::RootCapRefresh(_) => true,
CommitBodyV0::BranchCapRefresh(_) => true, CommitBodyV0::BranchCapRefresh(_) => true,
_ => false, _ => false,
@ -1497,7 +1500,7 @@ impl fmt::Display for CommitBody {
// //
CommitBodyV0::Branch(b) => write!(f, "Branch {}", b), // singleton and should be first in branch CommitBodyV0::Branch(b) => write!(f, "Branch {}", b), // singleton and should be first in branch
// CommitBodyV0::UpdateBranch(b) => write!(f, "UpdateBranch {}", b), // total order enforced with total_order_quorum // CommitBodyV0::UpdateBranch(b) => write!(f, "UpdateBranch {}", b), // total order enforced with total_order_quorum
// CommitBodyV0::Snapshot(b) => write!(f, "Snapshot {}", b), // a soft snapshot CommitBodyV0::Snapshot(b) => write!(f, "Snapshot {}", b), // a soft snapshot
// CommitBodyV0::AsyncTransaction(b) => write!(f, "AsyncTransaction {}", b), // partial_order // CommitBodyV0::AsyncTransaction(b) => write!(f, "AsyncTransaction {}", b), // partial_order
// CommitBodyV0::SyncTransaction(b) => write!(f, "SyncTransaction {}", b), // total_order // CommitBodyV0::SyncTransaction(b) => write!(f, "SyncTransaction {}", b), // total_order
CommitBodyV0::AddFile(b) => write!(f, "AddFile {}", b), CommitBodyV0::AddFile(b) => write!(f, "AddFile {}", b),
@ -1505,7 +1508,7 @@ impl fmt::Display for CommitBody {
// CommitBodyV0::Compact(b) => write!(f, "Compact {}", b), // a hard snapshot. total order enforced with total_order_quorum // CommitBodyV0::Compact(b) => write!(f, "Compact {}", b), // a hard snapshot. total order enforced with total_order_quorum
//Merge(Merge) => write!(f, "RootBranch {}", b), //Merge(Merge) => write!(f, "RootBranch {}", b),
//Revert(Revert) => write!(f, "RootBranch {}", b), // only possible on partial order commit //Revert(Revert) => write!(f, "RootBranch {}", b), // only possible on partial order commit
// CommitBodyV0::AsyncSignature(b) => write!(f, "AsyncSignature {}", b), CommitBodyV0::AsyncSignature(b) => write!(f, "AsyncSignature {}", b),
// //
// For both // For both

@ -89,6 +89,8 @@ pub enum NgError {
NotARendezVous, NotARendezVous,
IncompatibleQrCode, IncompatibleQrCode,
InvalidClass, InvalidClass,
KeyShareNotFound,
BrokerNotFound,
} }
impl Error for NgError {} impl Error for NgError {}
@ -148,6 +150,12 @@ impl From<CommitLoadError> for NgError {
} }
} }
impl From<ObjectParseError> for NgError {
fn from(e: ObjectParseError) -> Self {
NgError::ObjectParseError(e)
}
}
impl From<FileError> for NgError { impl From<FileError> for NgError {
fn from(e: FileError) -> Self { fn from(e: FileError) -> Self {
NgError::FileError(e) NgError::FileError(e)
@ -371,6 +379,8 @@ pub enum VerifierError {
YrsError(String), YrsError(String),
AutomergeError(String), AutomergeError(String),
InvalidNuri, InvalidNuri,
InvalidJson,
NothingToSign,
} }
impl Error for VerifierError {} impl Error for VerifierError {}

@ -133,15 +133,16 @@ impl Event {
} }
} }
pub fn open_without_body( pub fn open_with_body(
&self, &self,
store: &Store, store: &Store,
repo_id: &RepoId, repo_id: &RepoId,
branch_id: &BranchId, branch_id: &BranchId,
branch_secret: &ReadCapSecret, branch_secret: &ReadCapSecret,
with_body: bool,
) -> Result<Commit, NgError> { ) -> Result<Commit, NgError> {
match self { match self {
Self::V0(v0) => v0.open(store, repo_id, branch_id, branch_secret, false), Self::V0(v0) => v0.open(store, repo_id, branch_id, branch_secret, with_body),
} }
} }

@ -34,6 +34,8 @@ pub mod kcv_storage;
pub mod os_info; pub mod os_info;
pub use threshold_crypto::PublicKeySet;
#[macro_use] #[macro_use]
extern crate slice_as_array; extern crate slice_as_array;

@ -715,6 +715,10 @@ impl Object {
&self.block_contents &self.block_contents
} }
pub fn into_blocks(self) -> Vec<Block> {
self.block_contents.into_values().collect()
}
/// Collect leaves from the tree /// Collect leaves from the tree
fn collect_leaves( fn collect_leaves(
blocks: &Vec<BlockId>, blocks: &Vec<BlockId>,

@ -123,6 +123,8 @@ pub struct Repo {
pub signer: Option<SignerCap>, pub signer: Option<SignerCap>,
pub certificate_ref: Option<ObjectRef>,
pub members: HashMap<Digest, UserInfo>, pub members: HashMap<Digest, UserInfo>,
pub branches: HashMap<BranchId, BranchInfo>, pub branches: HashMap<BranchId, BranchInfo>,
@ -193,16 +195,20 @@ impl Repo {
&self, &self,
recursor: &mut Vec<(BlockRef, Option<ObjectId>)>, recursor: &mut Vec<(BlockRef, Option<ObjectId>)>,
visited: &mut HashMap<ObjectId, (HashSet<ObjectId>, CommitInfo)>, visited: &mut HashMap<ObjectId, (HashSet<ObjectId>, CommitInfo)>,
signatures: &mut HashMap<ObjectId, ObjectRef>,
) -> Result<Option<ObjectId>, VerifierError> { ) -> Result<Option<ObjectId>, VerifierError> {
let mut root = None; let mut root = None;
while let Some((next_ref, future)) = recursor.pop() { while let Some((next_ref, future)) = recursor.pop() {
if let Ok(cobj) = Commit::load(next_ref, &self.store, true) { if let Ok(cobj) = Commit::load(next_ref, &self.store, true) {
let id = cobj.id().unwrap(); let id = cobj.id().unwrap();
if let Some((future_set, _)) = visited.get_mut(&id) { if let Some((future_set, info)) = visited.get_mut(&id) {
// we update the future // we update the future
if let Some(f) = future { if let Some(f) = future {
future_set.insert(f); future_set.insert(f);
} }
if let Some(sign) = signatures.remove(&id) {
info.signature = Some(sign);
}
} else { } else {
let commit_type = cobj.get_type().unwrap(); let commit_type = cobj.get_type().unwrap();
let acks = cobj.acks(); let acks = cobj.acks();
@ -248,9 +254,10 @@ impl Repo {
} }
CommitType::AsyncSignature => { CommitType::AsyncSignature => {
let past: Vec<ObjectId> = acks.iter().map(|r| r.id.clone()).collect(); let past: Vec<ObjectId> = acks.iter().map(|r| r.id.clone()).collect();
for p in past.iter() { let sign = cobj.get_signature_reference().unwrap();
visited.get_mut(p).unwrap().1.signature = for p in cobj.deps().iter() {
Some(cobj.get_signature_reference().unwrap()); signatures.insert(p.id, sign.clone());
//visited.get_mut(&p.id).unwrap().1.signature = Some(sign.clone());
} }
(past, acks, id) (past, acks, id)
} }
@ -260,7 +267,7 @@ impl Repo {
let commit_info = CommitInfo { let commit_info = CommitInfo {
past, past,
key: cobj.key().unwrap(), key: cobj.key().unwrap(),
signature: None, signature: signatures.remove(&id),
author: self.get_user_string(cobj.author()), author: self.get_user_string(cobj.author()),
timestamp: cobj.timestamp(), timestamp: cobj.timestamp(),
final_consistency: cobj.final_consistency(), final_consistency: cobj.final_consistency(),
@ -464,7 +471,8 @@ impl Repo {
let mut root = None; let mut root = None;
let mut recursor: Vec<(BlockRef, Option<ObjectId>)> = let mut recursor: Vec<(BlockRef, Option<ObjectId>)> =
heads.iter().map(|h| (h.clone(), None)).collect(); heads.iter().map(|h| (h.clone(), None)).collect();
let r = self.load_causal_past(&mut recursor, &mut visited)?; let mut signatures: HashMap<ObjectId, ObjectRef> = HashMap::new();
let r = self.load_causal_past(&mut recursor, &mut visited, &mut signatures)?;
if r.is_some() { if r.is_some() {
root = r; root = r;
} }
@ -517,9 +525,12 @@ impl Repo {
for p in past { for p in past {
set.remove(&p); set.remove(&p);
} }
let already_in_heads = set.contains(&commit_ref);
branch.current_heads = set.into_iter().cloned().collect(); branch.current_heads = set.into_iter().cloned().collect();
if !already_in_heads {
branch.current_heads.push(commit_ref); branch.current_heads.push(commit_ref);
branch.commits_nbr += 1; branch.commits_nbr += 1;
}
// we return the new current heads // we return the new current heads
Ok(branch.current_heads.to_vec()) Ok(branch.current_heads.to_vec())
} else { } else {
@ -558,6 +569,7 @@ impl Repo {
members, members,
store, store,
signer: None, signer: None,
certificate_ref: None,
read_cap: None, read_cap: None,
write_cap: None, write_cap: None,
branches: HashMap::new(), branches: HashMap::new(),

@ -518,10 +518,11 @@ impl Store {
// finally getting the signature: // finally getting the signature:
let certificate_ref = cert_object.reference().unwrap();
let signature = Signature::V0(SignatureV0 { let signature = Signature::V0(SignatureV0 {
content: signature_content, content: signature_content,
threshold_sig, threshold_sig,
certificate_ref: cert_object.reference().unwrap(), certificate_ref: certificate_ref.clone(),
}); });
// saving the signature // saving the signature
@ -642,6 +643,7 @@ impl Store {
write_cap: Some(repo_write_cap_secret), write_cap: Some(repo_write_cap_secret),
branches: branches.into_iter().collect(), branches: branches.into_iter().collect(),
opened_branches: HashMap::new(), opened_branches: HashMap::new(),
certificate_ref: Some(certificate_ref),
}; };
Ok((repo, events)) Ok((repo, events))

@ -20,6 +20,7 @@ use once_cell::sync::OnceCell;
use sbbf_rs_safe::Filter; use sbbf_rs_safe::Filter;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use threshold_crypto::serde_impl::SerdeSecret; use threshold_crypto::serde_impl::SerdeSecret;
use threshold_crypto::SignatureShare;
use zeroize::{Zeroize, ZeroizeOnDrop}; use zeroize::{Zeroize, ZeroizeOnDrop};
use crate::errors::NgError; use crate::errors::NgError;
@ -484,6 +485,10 @@ impl BlockRef {
format!("j:{}:k:{}", self.id, self.key) format!("j:{}:k:{}", self.id, self.key)
} }
pub fn commit_nuri(&self) -> String {
format!("c:{}:k:{}", self.id, self.key)
}
pub fn readcap_nuri(&self) -> String { pub fn readcap_nuri(&self) -> String {
let ser = serde_bare::to_vec(self).unwrap(); let ser = serde_bare::to_vec(self).unwrap();
format!("r:{}", base64_url::encode(&ser)) format!("r:{}", base64_url::encode(&ser))
@ -1653,6 +1658,16 @@ pub struct SignerCap {
pub partial_order: Option<SerdeSecret<threshold_crypto::SecretKeyShare>>, pub partial_order: Option<SerdeSecret<threshold_crypto::SecretKeyShare>>,
} }
impl SignerCap {
pub fn sign_with_owner(&self, content: &[u8]) -> Result<SignatureShare, NgError> {
if let Some(key_share) = &self.owner {
Ok(key_share.sign(content))
} else {
Err(NgError::KeyShareNotFound)
}
}
}
/// Permissions /// Permissions
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)]
pub enum PermissionV0 { pub enum PermissionV0 {
@ -1900,7 +1915,7 @@ pub enum RemoveLink {
/// so that a user can share with all its device a new signing capability that was just created. /// so that a user can share with all its device a new signing capability that was just created.
/// The cap's `epoch` field should be dereferenced and the user must be part of the quorum/owners. /// The cap's `epoch` field should be dereferenced and the user must be part of the quorum/owners.
/// DEPS to the previous AddSignerCap commit(s) if it is an update. in this case, repo_ids have to match, /// DEPS to the previous AddSignerCap commit(s) if it is an update. in this case, repo_ids have to match,
/// and the the referenced rootbranch definition(s) should have compatible causal past (the newer AddSignerCap must have a newer epoch compared to the one of the replaced cap ) /// and the referenced rootbranch definition(s) should have compatible causal past (the newer AddSignerCap must have a newer epoch compared to the one of the replaced cap )
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct AddSignerCapV0 { pub struct AddSignerCapV0 {
pub cap: SignerCap, pub cap: SignerCap,
@ -2097,6 +2112,29 @@ pub enum Snapshot {
V0(SnapshotV0), V0(SnapshotV0),
} }
impl Snapshot {
pub fn snapshot_ref(&self) -> &ObjectRef {
match self {
Self::V0(v0) => &v0.content,
}
}
}
impl fmt::Display for Snapshot {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::V0(v0) => {
writeln!(f, "V0\r\nheads:")?;
for h in v0.heads.iter() {
writeln!(f, "{h}")?;
}
writeln!(f, "content: {}", v0.content)?;
Ok(())
}
}
}
}
/// Compact: Hard Snapshot of a Branch /// Compact: Hard Snapshot of a Branch
/// ///
/// Contains a data structure /// Contains a data structure
@ -2126,7 +2164,7 @@ pub enum Compact {
V0(CompactV0), V0(CompactV0),
} }
// Async Threshold Signature of a commit V0 based on the partial order quorum // Async Threshold Signature of a commit (or commits) V0 based on the partial order quorum
// //
// Can sign Transaction, AddFile, and Snapshot, after they have been committed to the DAG. // Can sign Transaction, AddFile, and Snapshot, after they have been committed to the DAG.
// DEPS: the signed commits // DEPS: the signed commits
@ -2143,7 +2181,7 @@ pub enum AsyncSignature {
} }
impl AsyncSignature { impl AsyncSignature {
pub fn verify(&self) -> bool { pub fn verify_(&self) -> bool {
// check that the signature object referenced here, is of type threshold_sig Partial // check that the signature object referenced here, is of type threshold_sig Partial
unimplemented!(); unimplemented!();
} }
@ -2154,6 +2192,17 @@ impl AsyncSignature {
} }
} }
impl fmt::Display for AsyncSignature {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::V0(v0) => {
writeln!(f, "V0\r\nsignature object ref: {}", v0)?;
Ok(())
}
}
}
}
/// Sync Threshold Signature of one or a chain of commits . V0 /// Sync Threshold Signature of one or a chain of commits . V0
/// ///
/// points to the new Signature Object /// points to the new Signature Object
@ -2363,6 +2412,21 @@ impl fmt::Display for Signature {
} }
} }
impl Signature {
pub fn certificate_ref(&self) -> &ObjectRef {
match self {
Self::V0(v0) => &v0.certificate_ref,
}
}
pub fn signed_commits(&self) -> &[ObjectId] {
match self {
Self::V0(v0) => match &v0.content {
SignatureContent::V0(v0) => &v0.commits,
},
}
}
}
/// A Signature object (it is not a commit), referenced in AsyncSignature or SyncSignature /// A Signature object (it is not a commit), referenced in AsyncSignature or SyncSignature
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum Signature { pub enum Signature {
@ -2373,14 +2437,14 @@ pub enum Signature {
/// ///
/// Can be inherited from the store, in this case, it is an ObjectRef pointing to the latest Certificate of the store. /// Can be inherited from the store, in this case, it is an ObjectRef pointing to the latest Certificate of the store.
/// Or can be 2 PublicKey defined specially for this repo, /// Or can be 2 PublicKey defined specially for this repo,
/// .0 one for the total_order (first one). it is a PublicKeysSet so that verifier can see the threshold value, and can also verify Shares individually /// .0 one for the total_order (first one).
/// .1 the other for the partial_order (second one. a PublicKey. is optional, as some repos are forcefully totally ordered and do not have this set). /// .1 the other for the partial_order (second one. a PublicKey. is optional, as some repos are forcefully totally ordered and do not have this set).
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum OrdersPublicKeySetsV0 { pub enum OrdersPublicKeySetsV0 {
Store(ObjectRef), Store(ObjectRef),
Repo( Repo(
( (
threshold_crypto::PublicKeySet, threshold_crypto::PublicKey,
Option<threshold_crypto::PublicKey>, Option<threshold_crypto::PublicKey>,
), ),
), ),
@ -2518,7 +2582,7 @@ pub enum QuorumType {
impl QuorumType { impl QuorumType {
pub fn final_consistency(&self) -> bool { pub fn final_consistency(&self) -> bool {
match self { match self {
Self::TotalOrder | Self::Owners | Self::IamTheSignature => true, Self::TotalOrder => true,
_ => false, _ => false,
} }
} }
@ -2816,7 +2880,7 @@ pub enum ObjectContentV0 {
RandomAccessFileMeta(RandomAccessFileMeta), RandomAccessFileMeta(RandomAccessFileMeta),
RefreshCap(RefreshCap), RefreshCap(RefreshCap),
#[serde(with = "serde_bytes")] #[serde(with = "serde_bytes")]
Snapshot(Vec<u8>), // serialization of an AppState Snapshot(Vec<u8>), // JSON serialization (UTF8)
} }
/// Immutable data stored encrypted in a Merkle tree /// Immutable data stored encrypted in a Merkle tree
@ -2901,7 +2965,7 @@ pub struct EventContentV0 {
/// so that a valid EventContent can be sent (and so that its signature can be verified successfully) /// so that a valid EventContent can be sent (and so that its signature can be verified successfully)
pub blocks: Vec<Block>, pub blocks: Vec<Block>,
/// Ids of additional Blocks (FILES) with encrypted content that are not to be pushed in the pub/sub /// Ids of additional Blocks (FILES or Objects) with encrypted content that are not to be pushed in the pub/sub
/// they will be retrieved later by interested users /// they will be retrieved later by interested users
pub file_ids: Vec<BlockId>, pub file_ids: Vec<BlockId>,
@ -2911,7 +2975,7 @@ pub struct EventContentV0 {
/// - key: BLAKE3 derive_key ("NextGraph Event Commit ObjectKey ChaCha20 key", /// - key: BLAKE3 derive_key ("NextGraph Event Commit ObjectKey ChaCha20 key",
/// RepoId + BranchId + branch_secret(ReadCapSecret of the branch) + publisher) /// RepoId + BranchId + branch_secret(ReadCapSecret of the branch) + publisher)
/// - nonce: commit_seq /// - nonce: commit_seq
/// * If it is a CertificateRefresh, both the blocks and block_ids vectors are empty. /// * If it is a CertificateRefresh, both the blocks and file_ids vectors are empty.
/// the key here contains an encrypted ObjectRef to the new Certificate. /// the key here contains an encrypted ObjectRef to the new Certificate.
/// The whole ObjectRef is encrypted (including the ID) to avoid correlation of topics who will have the same Certificate ID (belong to the same repo) /// The whole ObjectRef is encrypted (including the ID) to avoid correlation of topics who will have the same Certificate ID (belong to the same repo)
/// Encrypted using ChaCha20, with : /// Encrypted using ChaCha20, with :

@ -511,6 +511,103 @@ pub async fn branch_history(session_id: JsValue, nuri: JsValue) -> Result<JsValu
} }
} }
#[wasm_bindgen]
pub async fn signature_status(session_id: JsValue, nuri: JsValue) -> Result<JsValue, String> {
let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(session_id)
.map_err(|_| "Invalid session_id".to_string())?;
let nuri = if nuri.is_string() {
NuriV0::new_from(&nuri.as_string().unwrap()).map_err(|e| e.to_string())?
} else {
NuriV0::new_private_store_target()
};
let request = AppRequest::V0(AppRequestV0 {
command: AppRequestCommandV0::new_signature_status(),
nuri,
payload: None,
session_id,
});
let res = nextgraph::local_broker::app_request(request)
.await
.map_err(|e: NgError| e.to_string())?;
let AppResponse::V0(res) = res;
//log_debug!("{:?}", res);
match res {
AppResponseV0::SignatureStatus(s) => Ok(serde_wasm_bindgen::to_value(&s).unwrap()),
_ => Err("invalid response".to_string()),
}
}
#[wasm_bindgen]
pub async fn signed_snapshot_request(
session_id: JsValue,
nuri: JsValue,
) -> Result<JsValue, String> {
let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(session_id)
.map_err(|_| "Invalid session_id".to_string())?;
let nuri = if nuri.is_string() {
NuriV0::new_from(&nuri.as_string().unwrap()).map_err(|e| e.to_string())?
} else {
NuriV0::new_private_store_target()
};
let request = AppRequest::V0(AppRequestV0 {
command: AppRequestCommandV0::new_signed_snapshot_request(),
nuri,
payload: None,
session_id,
});
let res = nextgraph::local_broker::app_request(request)
.await
.map_err(|e: NgError| e.to_string())?;
let AppResponse::V0(res) = res;
//log_debug!("{:?}", res);
match res {
AppResponseV0::True => Ok(JsValue::TRUE),
AppResponseV0::False => Ok(JsValue::FALSE),
AppResponseV0::Error(e) => Err(e),
_ => Err("invalid response".to_string()),
}
}
#[wasm_bindgen]
pub async fn signature_request(session_id: JsValue, nuri: JsValue) -> Result<JsValue, String> {
let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(session_id)
.map_err(|_| "Invalid session_id".to_string())?;
let nuri = if nuri.is_string() {
NuriV0::new_from(&nuri.as_string().unwrap()).map_err(|e| e.to_string())?
} else {
NuriV0::new_private_store_target()
};
let request = AppRequest::V0(AppRequestV0 {
command: AppRequestCommandV0::new_signature_request(),
nuri,
payload: None,
session_id,
});
let res = nextgraph::local_broker::app_request(request)
.await
.map_err(|e: NgError| e.to_string())?;
let AppResponse::V0(res) = res;
//log_debug!("{:?}", res);
match res {
AppResponseV0::True => Ok(JsValue::TRUE),
AppResponseV0::False => Ok(JsValue::FALSE),
AppResponseV0::Error(e) => Err(e),
_ => Err("invalid response".to_string()),
}
}
#[cfg(wasmpack_target = "nodejs")] #[cfg(wasmpack_target = "nodejs")]
#[wasm_bindgen] #[wasm_bindgen]
pub async fn admin_create_user(config: JsValue) -> Result<JsValue, String> { pub async fn admin_create_user(config: JsValue) -> Result<JsValue, String> {

@ -11,6 +11,8 @@
pub mod transaction; pub mod transaction;
pub mod snapshot;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
@ -39,10 +41,11 @@ pub trait CommitVerifier {
) -> Result<(), VerifierError>; ) -> Result<(), VerifierError>;
} }
fn list_dep_chain_until( pub(crate) fn list_dep_chain_until(
start: ObjectRef, start: ObjectRef,
end: &ObjectId, end: &ObjectId,
store: &Store, store: &Store,
with_body: bool,
) -> Result<Vec<Commit>, VerifierError> { ) -> Result<Vec<Commit>, VerifierError> {
let mut res = vec![]; let mut res = vec![];
let mut pos = start; let mut pos = start;
@ -51,7 +54,7 @@ fn list_dep_chain_until(
if pos_id == *end { if pos_id == *end {
break; break;
} }
let commit = Commit::load(pos, &store, true)?; let commit = Commit::load(pos, &store, with_body)?;
let deps = commit.deps(); let deps = commit.deps();
if deps.len() != 1 { if deps.len() != 1 {
return Err(VerifierError::MalformedSyncSignatureDeps); return Err(VerifierError::MalformedSyncSignatureDeps);
@ -122,16 +125,20 @@ impl CommitVerifier for RootBranch {
}; };
let id = root_branch.id; let id = root_branch.id;
let branches = vec![(root_branch.id, root_branch)]; let branches = vec![(root_branch.id, root_branch)];
let signer = verifier
.user_storage()
.and_then(|storage| storage.get_signer_cap(&id).ok());
let repo = Repo { let repo = Repo {
id, id,
repo_def: repository.clone(), repo_def: repository.clone(),
signer: None, //TO BE ADDED LATER when AddSignerCap commit is found signer,
members: HashMap::new(), members: HashMap::new(),
store: Arc::clone(&store), store: Arc::clone(&store),
read_cap: Some(reference), read_cap: Some(reference),
write_cap: repo_write_cap_secret, write_cap: repo_write_cap_secret,
branches: branches.into_iter().collect(), branches: branches.into_iter().collect(),
opened_branches: HashMap::new(), opened_branches: HashMap::new(),
certificate_ref: verifier.temporary_repo_certificates.remove(&id),
}; };
verifier.populate_topics(&repo); verifier.populate_topics(&repo);
let _repo_ref = verifier.add_repo_and_save(repo); let _repo_ref = verifier.add_repo_and_save(repo);
@ -210,8 +217,9 @@ impl CommitVerifier for SyncSignature {
SyncSignature::V0(signature_ref) => { SyncSignature::V0(signature_ref) => {
let sign = Object::load_ref(signature_ref, &store)?; let sign = Object::load_ref(signature_ref, &store)?;
match sign.content_v0()? { match sign.content_v0()? {
ObjectContentV0::Signature(_sig) => { ObjectContentV0::Signature(sig) => {
//TODO: verify signature //TODO: verify signature
verifier.update_repo_certificate(repo_id, sig.certificate_ref());
} }
_ => return Err(VerifierError::InvalidSignatureObject), _ => return Err(VerifierError::InvalidSignatureObject),
} }
@ -225,7 +233,7 @@ impl CommitVerifier for SyncSignature {
if deps.len() != 1 { if deps.len() != 1 {
return Err(VerifierError::MalformedSyncSignatureDeps); return Err(VerifierError::MalformedSyncSignatureDeps);
} }
let commits = list_dep_chain_until(deps[0].clone(), &ack.id, &store)?; let commits = list_dep_chain_until(deps[0].clone(), &ack.id, &store, true)?;
for commit in commits { for commit in commits {
verifier verifier
.verify_commit(&commit, branch_id, repo_id, Arc::clone(&store)) .verify_commit(&commit, branch_id, repo_id, Arc::clone(&store))
@ -441,6 +449,19 @@ impl CommitVerifier for Snapshot {
repo_id: &RepoId, repo_id: &RepoId,
store: Arc<Store>, store: Arc<Store>,
) -> Result<(), VerifierError> { ) -> Result<(), VerifierError> {
let repo = verifier.get_repo(repo_id, store.get_store_repo())?;
verifier
.push_app_response(
branch_id,
AppResponse::V0(AppResponseV0::Patch(AppPatch {
commit_id: commit.id().unwrap().to_string(),
commit_info: (&commit.as_info(repo)).into(),
graph: None,
discrete: None,
other: Some(OtherPatch::Snapshot(self.snapshot_ref().clone())),
})),
)
.await;
Ok(()) Ok(())
} }
} }
@ -528,8 +549,42 @@ impl CommitVerifier for AsyncSignature {
repo_id: &RepoId, repo_id: &RepoId,
store: Arc<Store>, store: Arc<Store>,
) -> Result<(), VerifierError> { ) -> Result<(), VerifierError> {
match self {
AsyncSignature::V0(signature_ref) => {
let sign = Object::load_ref(signature_ref, &store)?;
let deps: Vec<BlockRef> = commit.deps();
match sign.content_v0()? {
ObjectContentV0::Signature(sig) => {
//TODO: verify signature (each deps should be in the sig.signed_commits())
// pushing AppResponse
let repo = verifier.get_repo(repo_id, store.get_store_repo())?;
verifier
.push_app_response(
branch_id,
AppResponse::V0(AppResponseV0::Patch(AppPatch {
commit_id: commit.id().unwrap().to_string(),
commit_info: (&commit.as_info(repo)).into(),
graph: None,
discrete: None,
other: Some(OtherPatch::AsyncSignature((
NuriV0::signature_ref(&signature_ref),
sig.signed_commits()
.iter()
.map(|c| c.to_string())
.collect(),
))),
})),
)
.await;
Ok(()) Ok(())
} }
_ => return Err(VerifierError::InvalidSignatureObject),
}
}
}
}
} }
#[async_trait::async_trait] #[async_trait::async_trait]
impl CommitVerifier for RootCapRefresh { impl CommitVerifier for RootCapRefresh {

@ -0,0 +1,121 @@
// Copyright (c) 2022-2024 Niko Bonnieure, Par le Peuple, NextGraph.org developers
// All rights reserved.
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE2 or http://www.apache.org/licenses/LICENSE-2.0>
// or the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::verifier::Verifier;
use ng_net::app_protocol::NuriTargetV0;
use ng_oxigraph::oxigraph::sparql::{Query, QueryResults};
use ng_repo::errors::{StorageError, VerifierError};
use ng_repo::types::*;
use serde_json::json;
use yrs::types::ToJson;
use yrs::updates::decoder::Decode;
use yrs::{GetString, Transact};
impl Verifier {
pub(crate) fn take_snapshot(
&self,
crdt: &BranchCrdt,
branch_id: &BranchId,
target: &NuriTargetV0,
) -> Result<String, VerifierError> {
let state = match self
.user_storage
.as_ref()
.unwrap()
.branch_get_discrete_state(branch_id)
{
Ok(s) => Ok(s),
Err(StorageError::NoDiscreteState) => Ok(vec![]),
Err(e) => Err(e),
}?;
let discrete = if state.is_empty() {
serde_json::Value::Null
} else {
match crdt {
BranchCrdt::Automerge(_) => {
let doc = automerge::Automerge::load(&state)
.map_err(|e| VerifierError::AutomergeError(e.to_string()))?;
serde_json::json!(automerge::AutoSerde::from(&doc))
}
BranchCrdt::YText(_) => {
let doc = yrs::Doc::new();
let text = doc.get_or_insert_text("ng");
let mut txn = doc.transact_mut();
let update = yrs::Update::decode_v1(&state)
.map_err(|e| VerifierError::YrsError(e.to_string()))?;
txn.apply_update(update);
serde_json::Value::from(text.get_string(&txn))
}
BranchCrdt::YArray(_) => {
let doc = yrs::Doc::new();
let array = doc.get_or_insert_array("ng");
let mut txn = doc.transact_mut();
let update = yrs::Update::decode_v1(&state)
.map_err(|e| VerifierError::YrsError(e.to_string()))?;
txn.apply_update(update);
let mut json = String::new();
array.to_json(&txn).to_json(&mut json);
serde_json::from_str(&json).map_err(|_| VerifierError::InvalidJson)?
}
BranchCrdt::YMap(_) => {
let doc = yrs::Doc::new();
let map = doc.get_or_insert_map("ng");
let mut txn = doc.transact_mut();
let update = yrs::Update::decode_v1(&state)
.map_err(|e| VerifierError::YrsError(e.to_string()))?;
txn.apply_update(update);
let mut json = String::new();
map.to_json(&txn).to_json(&mut json);
serde_json::from_str(&json).map_err(|_| VerifierError::InvalidJson)?
}
BranchCrdt::YXml(_) => {
// TODO: if it is markdown, output the markdown instead of XML
let doc = yrs::Doc::new();
let xml = doc.get_or_insert_xml_fragment("prosemirror");
let mut txn = doc.transact_mut();
let update = yrs::Update::decode_v1(&state)
.map_err(|e| VerifierError::YrsError(e.to_string()))?;
txn.apply_update(update);
serde_json::json!({"xml":xml.get_string(&txn)})
}
_ => return Err(VerifierError::InvalidBranch),
}
};
let store = self.graph_dataset.as_ref().unwrap();
let parsed = Query::parse("CONSTRUCT { ?s ?p ?o } WHERE { ?s ?p ?o }", None).unwrap();
let results = store
.query(parsed, self.resolve_target_for_sparql(target, true)?)
.map_err(|e| VerifierError::OxigraphError(e.to_string()))?;
let results = if let QueryResults::Graph(quads) = results {
let mut results = Vec::with_capacity(quads.size_hint().0);
for quad in quads {
match quad {
Err(e) => return Err(VerifierError::OxigraphError(e.to_string())),
Ok(triple) => results.push(triple.to_string()),
}
}
results
} else {
return Err(VerifierError::OxigraphError(
"Invalid Oxigraph query result".to_string(),
));
};
let res = json!({
"discrete": discrete,
"graph": results,
});
Ok(serde_json::to_string(&res).unwrap())
}
}

@ -9,6 +9,7 @@
//! Processor for each type of AppRequest //! Processor for each type of AppRequest
use std::collections::HashSet;
use std::sync::Arc; use std::sync::Arc;
use futures::channel::mpsc; use futures::channel::mpsc;
@ -20,10 +21,13 @@ use ng_repo::errors::*;
use ng_repo::file::{RandomAccessFile, ReadFile}; use ng_repo::file::{RandomAccessFile, ReadFile};
#[allow(unused_imports)] #[allow(unused_imports)]
use ng_repo::log::*; use ng_repo::log::*;
use ng_repo::object::Object;
use ng_repo::repo::CommitInfo; use ng_repo::repo::CommitInfo;
use ng_repo::store::Store;
use ng_repo::types::BranchId; use ng_repo::types::BranchId;
use ng_repo::types::StoreRepo; use ng_repo::types::StoreRepo;
use ng_repo::types::*; use ng_repo::types::*;
use ng_repo::PublicKeySet;
use ng_net::app_protocol::*; use ng_net::app_protocol::*;
use ng_net::utils::ResultSend; use ng_net::utils::ResultSend;
@ -237,6 +241,295 @@ impl Verifier {
repo.history_at_heads(&branch.current_heads) repo.history_at_heads(&branch.current_heads)
} }
async fn signed_snapshot_request(
&mut self,
target: &NuriTargetV0,
) -> Result<bool, VerifierError> {
let (repo_id, branch_id, store_repo) = self.resolve_target(target)?; // TODO deal with targets that are commit heads
let repo = self.get_repo(&repo_id, &store_repo)?;
let branch = repo.branch(&branch_id)?;
let snapshot_json = self.take_snapshot(&branch.crdt, &branch_id, target)?;
//log_debug!("snapshot created {snapshot_json}");
let snapshot_object = Object::new(
ObjectContent::V0(ObjectContentV0::Snapshot(snapshot_json.as_bytes().to_vec())),
None,
0,
&repo.store,
);
let snap_obj_blocks = snapshot_object.save(&repo.store)?;
if self.connected_broker.is_some() {
let mut blocks = Vec::with_capacity(snap_obj_blocks.len());
for block_id in snap_obj_blocks {
blocks.push(repo.store.get(&block_id)?);
}
self.put_blocks(blocks, repo).await?;
}
let snapshot_commit_body = CommitBodyV0::Snapshot(Snapshot::V0(SnapshotV0 {
heads: branch.current_heads.iter().map(|h| h.id).collect(),
content: snapshot_object.reference().unwrap(), //TODO : content could be omitted as the ref is already in files
}));
let mut proto_events = Vec::with_capacity(2);
let snapshot_commit = Commit::new_with_body_and_save(
self.user_privkey(),
self.user_id(),
branch_id,
QuorumType::Owners, // TODO: deal with PartialOrder (when the snapshot is not requested by owners)
vec![],
vec![],
branch.current_heads.clone(),
vec![],
vec![snapshot_object.reference().unwrap()],
vec![],
vec![],
CommitBody::V0(snapshot_commit_body),
0,
&repo.store,
)?;
let snapshot_commit_id = snapshot_commit.id().unwrap();
let snapshot_commit_ref = snapshot_commit.reference().unwrap();
let signature_content = SignatureContent::V0(SignatureContentV0 {
commits: vec![snapshot_commit_id],
});
let signature_content_ser = serde_bare::to_vec(&signature_content).unwrap();
let sig_share = repo
.signer
.as_ref()
.unwrap()
.sign_with_owner(&signature_content_ser)?;
let sig = PublicKeySet::combine_signatures_with_threshold(0, [(0, &sig_share)])
.map_err(|_| NgError::IncompleteSignature)?;
let threshold_sig = ThresholdSignatureV0::Owners(sig);
let signature = Signature::V0(SignatureV0 {
content: signature_content,
threshold_sig,
certificate_ref: repo.certificate_ref.clone().unwrap(),
});
let signature_object = Object::new(
ObjectContent::V0(ObjectContentV0::Signature(signature)),
None,
0,
&repo.store,
);
let sign_obj_blocks = signature_object.save(&repo.store)?;
let signature_commit_body =
CommitBodyV0::AsyncSignature(AsyncSignature::V0(signature_object.reference().unwrap()));
let signature_commit = Commit::new_with_body_and_save(
self.user_privkey(),
self.user_id(),
branch_id,
QuorumType::IamTheSignature,
vec![snapshot_commit_ref.clone()],
vec![],
vec![snapshot_commit_ref],
vec![],
vec![],
vec![],
vec![],
CommitBody::V0(signature_commit_body),
0,
&repo.store,
)?;
let store = Arc::clone(&repo.store);
self.verify_commit_(
&snapshot_commit,
&branch_id,
&repo_id,
Arc::clone(&store),
true,
)
.await?;
self.verify_commit_(&signature_commit, &branch_id, &repo_id, store, true)
.await?;
proto_events.push((snapshot_commit, vec![]));
proto_events.push((signature_commit, sign_obj_blocks));
self.new_events(proto_events, repo_id, &store_repo).await?;
Ok(true)
}
fn find_signable_commits(
heads: &[BlockRef],
store: &Store,
) -> Result<HashSet<BlockRef>, VerifierError> {
let mut res = HashSet::with_capacity(heads.len());
for head in heads {
let commit = Commit::load(head.clone(), store, true)?;
let commit_type = commit.get_type().unwrap();
res.extend(match commit_type {
CommitType::SyncSignature => {
continue; // we shouldn't be signing asynchronously a SyncSignature
}
CommitType::AsyncSignature => {
Self::find_signable_commits(&commit.deps(), store)?.into_iter()
}
_ => HashSet::from([commit.reference().unwrap()]).into_iter(),
});
}
Ok(res)
}
async fn signature_request(&mut self, target: &NuriTargetV0) -> Result<bool, VerifierError> {
let (repo_id, branch_id, store_repo) = self.resolve_target(target)?; // TODO deal with targets that are commit heads
let repo = self.get_repo(&repo_id, &store_repo)?;
let branch = repo.branch(&branch_id)?;
let commits = Vec::from_iter(
Verifier::find_signable_commits(&branch.current_heads, &repo.store)?.into_iter(),
);
if commits.is_empty() {
return Err(VerifierError::NothingToSign);
}
let signature_content = SignatureContent::V0(SignatureContentV0 {
commits: commits.iter().map(|h| h.id).collect(),
});
let signature_content_ser = serde_bare::to_vec(&signature_content).unwrap();
let sig_share = repo
.signer
.as_ref()
.unwrap()
.sign_with_owner(&signature_content_ser)?;
let sig = PublicKeySet::combine_signatures_with_threshold(0, [(0, &sig_share)])
.map_err(|_| NgError::IncompleteSignature)?;
let threshold_sig = ThresholdSignatureV0::Owners(sig);
let signature = Signature::V0(SignatureV0 {
content: signature_content,
threshold_sig,
certificate_ref: repo.certificate_ref.clone().unwrap(),
});
let signature_object = Object::new(
ObjectContent::V0(ObjectContentV0::Signature(signature)),
None,
0,
&repo.store,
);
let sign_obj_blocks = signature_object.save(&repo.store)?;
let signature_commit_body =
CommitBodyV0::AsyncSignature(AsyncSignature::V0(signature_object.reference().unwrap()));
let signature_commit = Commit::new_with_body_and_save(
self.user_privkey(),
self.user_id(),
branch_id,
QuorumType::IamTheSignature,
commits,
vec![],
branch.current_heads.clone(),
vec![],
vec![],
vec![],
vec![],
CommitBody::V0(signature_commit_body),
0,
&repo.store,
)?;
let store = Arc::clone(&repo.store);
self.verify_commit_(&signature_commit, &branch_id, &repo_id, store, true)
.await?;
self.new_event(&signature_commit, &sign_obj_blocks, repo_id, &store_repo)
.await?;
Ok(true)
}
fn find_signed_past(
commit: &Commit,
store: &Store,
) -> Result<HashSet<ObjectRef>, VerifierError> {
let commit_type = commit.get_type().unwrap();
match commit_type {
CommitType::SyncSignature => {
let mut acks = commit.acks();
if acks.len() != 1 {
return Err(VerifierError::MalformedSyncSignatureAcks);
}
let ack = &acks[0];
let deps = commit.deps();
if deps.len() != 1 {
return Err(VerifierError::MalformedSyncSignatureDeps);
}
let commits =
crate::commits::list_dep_chain_until(deps[0].clone(), &ack.id, &store, false)?;
let mut res = HashSet::with_capacity(commits.len() + 1);
res.extend(commits.into_iter().map(|c| c.reference().unwrap()));
res.insert(acks.pop().unwrap());
Ok(res)
}
CommitType::AsyncSignature => Ok(HashSet::from_iter(commit.deps().into_iter())),
_ => Ok(HashSet::new()),
}
}
fn signature_status(
&self,
target: &NuriTargetV0,
) -> Result<Vec<(ObjectId, Option<String>, bool)>, VerifierError> {
let (repo_id, branch_id, store_repo) = self.resolve_target(target)?; // TODO deal with targets that are commit heads
let repo = self.get_repo(&repo_id, &store_repo)?;
let branch = repo.branch(&branch_id)?;
let mut res = Vec::with_capacity(branch.current_heads.len());
let is_unique_head = branch.current_heads.len() == 1;
for head in branch.current_heads.iter() {
let cobj = Commit::load(head.clone(), &repo.store, true)?;
let commit_type = cobj.get_type().unwrap();
let mut is_snapshot = false;
let has_sig = match commit_type {
CommitType::SyncSignature => true,
CommitType::AsyncSignature => {
let mut past = cobj.acks();
if is_unique_head && past.len() == 1 {
// we check if the signed commit is a snapshot
let signed_commit = Commit::load(past.pop().unwrap(), &repo.store, true)?;
is_snapshot = match signed_commit.get_type().unwrap() {
CommitType::Snapshot => true,
_ => false,
};
}
true
}
_ => false,
};
let sig = if has_sig {
Some(format!(
"{}:{}",
Verifier::find_signed_past(&cobj, &repo.store)?
.into_iter()
.map(|c| c.commit_nuri())
.collect::<Vec<String>>()
.join(":"),
NuriV0::signature_ref(&cobj.get_signature_reference().unwrap())
))
} else {
None
};
res.push((head.id, sig, is_snapshot));
}
Ok(res)
}
pub(crate) async fn process( pub(crate) async fn process(
&mut self, &mut self,
command: &AppRequestCommandV0, command: &AppRequestCommandV0,
@ -391,7 +684,7 @@ impl Verifier {
let repo = self.get_repo(&repo_id, &store_repo)?; let repo = self.get_repo(&repo_id, &store_repo)?;
let commit_info: CommitInfoJs = (&commit.as_info(repo)).into(); let commit_info: CommitInfoJs = (&commit.as_info(repo)).into();
let crdt = &repo.branch(&branch_id)?.crdt.clone(); let crdt: &BranchCrdt = &repo.branch(&branch_id)?.crdt.clone();
self.update_discrete( self.update_discrete(
patch, patch,
&crdt, &crdt,
@ -425,7 +718,6 @@ impl Verifier {
if !nuri.is_valid_for_sparql_update() { if !nuri.is_valid_for_sparql_update() {
return Err(NgError::InvalidNuri); return Err(NgError::InvalidNuri);
} }
return Ok(match self.history_for_nuri(&nuri.target) { return Ok(match self.history_for_nuri(&nuri.target) {
Err(e) => AppResponse::error(e.to_string()), Err(e) => AppResponse::error(e.to_string()),
Ok(history) => AppResponse::V0(AppResponseV0::History(AppHistory { Ok(history) => AppResponse::V0(AppResponseV0::History(AppHistory {
@ -434,6 +726,52 @@ impl Verifier {
})), })),
}); });
} }
AppFetchContentV0::SignatureStatus => {
if !nuri.is_valid_for_sparql_update() {
return Err(NgError::InvalidNuri);
}
return Ok(match self.signature_status(&nuri.target) {
Err(e) => AppResponse::error(e.to_string()),
Ok(status) => AppResponse::V0(AppResponseV0::SignatureStatus(
status
.into_iter()
.map(|(commitid, signature, is_snapshot)| {
(commitid.to_string(), signature, is_snapshot)
})
.collect(),
)),
});
}
AppFetchContentV0::SignedSnapshotRequest => {
if !nuri.is_valid_for_sparql_update() {
return Err(NgError::InvalidNuri);
}
return Ok(match self.signed_snapshot_request(&nuri.target).await {
Err(e) => AppResponse::error(e.to_string()),
Ok(immediate) => {
if immediate {
AppResponse::V0(AppResponseV0::True)
} else {
AppResponse::V0(AppResponseV0::False)
}
}
});
}
AppFetchContentV0::SignatureRequest => {
if !nuri.is_valid_for_sparql_update() {
return Err(NgError::InvalidNuri);
}
return Ok(match self.signature_request(&nuri.target).await {
Err(e) => AppResponse::error(e.to_string()),
Ok(immediate) => {
if immediate {
AppResponse::V0(AppResponseV0::True)
} else {
AppResponse::V0(AppResponseV0::False)
}
}
});
}
_ => unimplemented!(), _ => unimplemented!(),
}, },
AppRequestCommandV0::FilePut => match payload { AppRequestCommandV0::FilePut => match payload {

@ -80,6 +80,18 @@ impl UserStorage for RocksDbUserStorage {
RepoStorage::update_signer_cap(signer_cap, &self.user_storage) RepoStorage::update_signer_cap(signer_cap, &self.user_storage)
} }
fn update_certificate(
&self,
repo_id: &RepoId,
certificate: &ObjectRef,
) -> Result<(), StorageError> {
RepoStorage::update_certificate(repo_id, certificate, &self.user_storage)
}
fn get_signer_cap(&self, repo_id: &RepoId) -> Result<SignerCap, StorageError> {
RepoStorage::open(repo_id, &self.user_storage)?.get_signer_cap()
}
fn update_branch_current_heads( fn update_branch_current_heads(
&self, &self,
_repo_id: &RepoId, _repo_id: &RepoId,

@ -230,6 +230,7 @@ pub struct VerifierConfig {
pub private_store_id: Option<RepoId>, pub private_store_id: Option<RepoId>,
pub public_store_id: Option<RepoId>, pub public_store_id: Option<RepoId>,
pub protected_store_id: Option<RepoId>, pub protected_store_id: Option<RepoId>,
pub locator: Locator,
} }
#[doc(hidden)] #[doc(hidden)]

@ -57,8 +57,9 @@ impl<'a> RepoStorage<'a> {
//const SIGNER_CAP_TOTAL: u8 = b't'; //const SIGNER_CAP_TOTAL: u8 = b't';
const USER_BRANCH: u8 = b'u'; const USER_BRANCH: u8 = b'u';
const WRITE_CAP_SECRET: u8 = b'w'; const WRITE_CAP_SECRET: u8 = b'w';
const CERTIFICATE: u8 = b'f';
const ALL_PROPERTIES: [u8; 14] = [ const ALL_PROPERTIES: [u8; 15] = [
Self::SIGNER_CAP, Self::SIGNER_CAP,
//Self::SIGNER_CAP_PARTIAL, //Self::SIGNER_CAP_PARTIAL,
Self::CHAT_BRANCH, Self::CHAT_BRANCH,
@ -75,6 +76,7 @@ impl<'a> RepoStorage<'a> {
//Self::SIGNER_CAP_TOTAL, //Self::SIGNER_CAP_TOTAL,
Self::USER_BRANCH, Self::USER_BRANCH,
Self::WRITE_CAP_SECRET, Self::WRITE_CAP_SECRET,
Self::CERTIFICATE,
]; ];
const PREFIX_BRANCHES: u8 = b'b'; const PREFIX_BRANCHES: u8 = b'b';
@ -145,7 +147,6 @@ impl<'a> RepoStorage<'a> {
storage: &'a dyn KCVStorage, storage: &'a dyn KCVStorage,
) -> Result<(), StorageError> { ) -> Result<(), StorageError> {
let repo_id = signer_cap.repo; let repo_id = signer_cap.repo;
let _ = Self::new(&repo_id, storage);
storage.write_transaction(&mut |tx| { storage.write_transaction(&mut |tx| {
let id_ser = to_vec(&repo_id)?; let id_ser = to_vec(&repo_id)?;
let value = to_vec(signer_cap)?; let value = to_vec(signer_cap)?;
@ -155,6 +156,36 @@ impl<'a> RepoStorage<'a> {
Ok(()) Ok(())
} }
pub fn update_certificate(
id: &RepoId,
certificate: &ObjectRef,
storage: &'a dyn KCVStorage,
) -> Result<(), StorageError> {
storage.write_transaction(&mut |tx| {
let id_ser = to_vec(id)?;
let value = to_vec(certificate)?;
tx.put(
Self::PREFIX,
&id_ser,
Some(Self::CERTIFICATE),
&value,
&None,
)?;
Ok(())
})?;
Ok(())
}
pub fn get_signer_cap(&self) -> Result<SignerCap, StorageError> {
let ser = self.storage.get(
Self::PREFIX,
&to_vec(&self.id).unwrap(),
Some(Self::SIGNER_CAP),
&None,
)?;
Ok(from_slice(&ser)?)
}
pub fn create( pub fn create(
id: &RepoId, id: &RepoId,
read_cap: &ReadCap, read_cap: &ReadCap,
@ -300,6 +331,7 @@ impl<'a> RepoStorage<'a> {
branches, branches,
opened_branches, opened_branches,
store, store,
certificate_ref: prop(Self::CERTIFICATE, &props).ok(),
}; };
Ok(repo) Ok(repo)
} }

@ -42,6 +42,14 @@ pub trait UserStorage: Send + Sync {
fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError>; fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError>;
fn update_certificate(
&self,
repo_id: &RepoId,
certificate: &ObjectRef,
) -> Result<(), StorageError>;
fn get_signer_cap(&self, repo_id: &RepoId) -> Result<SignerCap, StorageError>;
fn branch_add_file( fn branch_add_file(
&self, &self,
commit_id: ObjectId, commit_id: ObjectId,
@ -77,6 +85,7 @@ pub trait UserStorage: Send + Sync {
pub(crate) struct InMemoryUserStorage { pub(crate) struct InMemoryUserStorage {
branch_files: RwLock<HashMap<BranchId, Vec<FileName>>>, branch_files: RwLock<HashMap<BranchId, Vec<FileName>>>,
branch_discrete_state: RwLock<HashMap<BranchId, Vec<u8>>>, branch_discrete_state: RwLock<HashMap<BranchId, Vec<u8>>>,
repo_signer_cap: RwLock<HashMap<RepoId, SignerCap>>,
} }
impl InMemoryUserStorage { impl InMemoryUserStorage {
@ -84,6 +93,7 @@ impl InMemoryUserStorage {
InMemoryUserStorage { InMemoryUserStorage {
branch_files: RwLock::new(HashMap::new()), branch_files: RwLock::new(HashMap::new()),
branch_discrete_state: RwLock::new(HashMap::new()), branch_discrete_state: RwLock::new(HashMap::new()),
repo_signer_cap: RwLock::new(HashMap::new()),
} }
} }
} }
@ -161,10 +171,25 @@ impl UserStorage for InMemoryUserStorage {
unimplemented!(); unimplemented!();
} }
fn update_signer_cap(&self, _signer_cap: &SignerCap) -> Result<(), StorageError> { fn update_certificate(
&self,
repo_id: &RepoId,
certificate: &ObjectRef,
) -> Result<(), StorageError> {
unimplemented!(); unimplemented!();
} }
fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError> {
let mut lock = self.repo_signer_cap.write().unwrap();
lock.insert(signer_cap.repo, signer_cap.clone());
Ok(())
}
fn get_signer_cap(&self, repo_id: &RepoId) -> Result<SignerCap, StorageError> {
let mut lock = self.repo_signer_cap.write().unwrap();
Ok(lock.remove(repo_id).ok_or(StorageError::NotFound)?)
}
fn update_branch_current_heads( fn update_branch_current_heads(
&self, &self,
_repo_id: &RepoId, _repo_id: &RepoId,

@ -93,6 +93,7 @@ pub struct Verifier {
last_reservation: SystemTime, last_reservation: SystemTime,
stores: HashMap<OverlayId, Arc<Store>>, stores: HashMap<OverlayId, Arc<Store>>,
inner_to_outer: HashMap<OverlayId, OverlayId>, inner_to_outer: HashMap<OverlayId, OverlayId>,
pub(crate) outer: String,
pub(crate) repos: HashMap<RepoId, Repo>, pub(crate) repos: HashMap<RepoId, Repo>,
// TODO: deal with collided repo_ids. self.repos should be a HashMap<RepoId,Collision> enum Collision {Yes, No(Repo)} // TODO: deal with collided repo_ids. self.repos should be a HashMap<RepoId,Collision> enum Collision {Yes, No(Repo)}
// add a collided_repos: HashMap<(OverlayId, RepoId), Repo> // add a collided_repos: HashMap<(OverlayId, RepoId), Repo>
@ -104,6 +105,7 @@ pub struct Verifier {
in_memory_outbox: Vec<EventOutboxStorage>, in_memory_outbox: Vec<EventOutboxStorage>,
uploads: BTreeMap<u32, RandomAccessFile>, uploads: BTreeMap<u32, RandomAccessFile>,
branch_subscriptions: HashMap<BranchId, Sender<AppResponse>>, branch_subscriptions: HashMap<BranchId, Sender<AppResponse>>,
pub(crate) temporary_repo_certificates: HashMap<RepoId, ObjectRef>,
} }
impl fmt::Debug for Verifier { impl fmt::Debug for Verifier {
@ -119,6 +121,7 @@ impl fmt::Debug for Verifier {
struct EventOutboxStorage { struct EventOutboxStorage {
event: Event, event: Event,
overlay: OverlayId, overlay: OverlayId,
file_blocks: Vec<Block>,
} }
impl Verifier { impl Verifier {
@ -210,12 +213,16 @@ impl Verifier {
// check that the referenced object exists locally. // check that the referenced object exists locally.
repo.store.has(&file_ref.id)?; repo.store.has(&file_ref.id)?;
// we send all the blocks to the broker. // we send all the blocks to the broker.
let file = RandomAccessFile::open( let blocks = if let Ok(file) = RandomAccessFile::open(
file_ref.id.clone(), file_ref.id.clone(),
file_ref.key.clone(), file_ref.key.clone(),
Arc::clone(&repo.store), Arc::clone(&repo.store),
)?; ) {
let blocks = file.get_all_blocks_ids()?; file.get_all_blocks_ids()?
} else {
let obj = Object::load_ref(file_ref, &repo.store)?;
obj.block_ids()
};
let found = self.has_blocks(blocks, repo).await?; let found = self.has_blocks(blocks, repo).await?;
for block_id in found.missing() { for block_id in found.missing() {
let block = repo.store.get(block_id)?; let block = repo.store.get(block_id)?;
@ -240,7 +247,11 @@ impl Verifier {
} }
} }
fn branch_get_tab_info(repo: &Repo, branch: &BranchId) -> Result<AppTabInfo, NgError> { fn branch_get_tab_info(
repo: &Repo,
branch: &BranchId,
outer: String,
) -> Result<AppTabInfo, NgError> {
let branch_info = repo.branch(branch)?; let branch_info = repo.branch(branch)?;
let branch_tab_info = AppTabBranchInfo { let branch_tab_info = AppTabBranchInfo {
@ -268,7 +279,7 @@ impl Verifier {
repo: Some(repo.store.get_store_repo().clone()), repo: Some(repo.store.get_store_repo().clone()),
overlay: Some(format!("v:{}", repo.store.overlay_id.to_string())), overlay: Some(format!("v:{}", repo.store.overlay_id.to_string())),
store_type: Some(repo.store.get_store_repo().store_type_for_app()), store_type: Some(repo.store.get_store_repo().store_type_for_app()),
has_outer: None, //TODO has_outer: Some(outer), //TODO
inner: None, //TODO inner: None, //TODO
is_member: None, //TODO is_member: None, //TODO
readcap: None, //TODO readcap: None, //TODO
@ -312,7 +323,7 @@ impl Verifier {
.unwrap() .unwrap()
.branch_get_all_files(&branch_id)?; .branch_get_all_files(&branch_id)?;
let tab_info = Self::branch_get_tab_info(repo, &branch_id)?; let tab_info = Self::branch_get_tab_info(repo, &branch_id, self.outer.clone())?;
// let tab_info = self.user_storage.as_ref().unwrap().branch_get_tab_info( // let tab_info = self.user_storage.as_ref().unwrap().branch_get_tab_info(
// &branch_id, // &branch_id,
@ -365,6 +376,7 @@ impl Verifier {
let state = AppState { let state = AppState {
heads: branch.current_heads.iter().map(|h| h.id.clone()).collect(), heads: branch.current_heads.iter().map(|h| h.id.clone()).collect(),
head_keys: branch.current_heads.iter().map(|h| h.key.clone()).collect(),
graph: if results.is_empty() { graph: if results.is_empty() {
None None
} else { } else {
@ -412,7 +424,9 @@ impl Verifier {
private_store_id: None, private_store_id: None,
protected_store_id: None, protected_store_id: None,
public_store_id: None, public_store_id: None,
locator: Locator::empty(),
}, },
outer: "".to_string(),
user_id, user_id,
connected_broker: BrokerPeerId::None, connected_broker: BrokerPeerId::None,
graph_dataset: None, graph_dataset: None,
@ -429,6 +443,7 @@ impl Verifier {
inner_to_outer: HashMap::new(), inner_to_outer: HashMap::new(),
uploads: BTreeMap::new(), uploads: BTreeMap::new(),
branch_subscriptions: HashMap::new(), branch_subscriptions: HashMap::new(),
temporary_repo_certificates: HashMap::new(),
} }
} }
@ -761,6 +776,55 @@ impl Verifier {
.await .await
} }
pub(crate) async fn new_commits(
&mut self,
// commit_body, quorum_type, additional_blocks, deps, files
proto_commits: Vec<(
CommitBodyV0,
QuorumType,
&Vec<BlockId>,
Vec<ObjectRef>,
Vec<ObjectRef>,
)>,
repo_id: &RepoId,
branch_id: &BranchId,
store_repo: &StoreRepo,
) -> Result<(), NgError> {
let repo = self.get_repo(repo_id, &store_repo)?;
let branch = repo.branch(branch_id)?;
let mut acks = branch.current_heads.clone();
let mut proto_events: Vec<(Commit, Vec<Digest>)> = Vec::with_capacity(proto_commits.len());
let store = Arc::clone(&repo.store);
for (commit_body, quorum_type, additional_blocks, deps, files) in proto_commits.into_iter()
{
let commit = {
let commit = Commit::new_with_body_and_save(
self.user_privkey(),
self.user_id(),
*branch_id,
quorum_type,
deps,
vec![],
acks,
vec![],
files,
vec![],
vec![],
CommitBody::V0(commit_body),
0,
&store,
)?;
self.verify_commit_(&commit, branch_id, repo_id, Arc::clone(&store), true)
.await?;
commit
};
acks = vec![commit.reference().unwrap()];
proto_events.push((commit, additional_blocks.to_vec()));
}
self.new_events(proto_events, *repo_id, store_repo).await
}
pub(crate) async fn new_transaction_commit( pub(crate) async fn new_transaction_commit(
&mut self, &mut self,
commit_body: CommitBodyV0, commit_body: CommitBodyV0,
@ -820,6 +884,24 @@ impl Verifier {
.await .await
} }
pub(crate) fn update_repo_certificate(
&mut self,
repo_id: &RepoId,
certificate_ref: &ObjectRef,
) {
match self.repos.get_mut(repo_id) {
Some(repo) => repo.certificate_ref = Some(certificate_ref.clone()),
None => {
self.temporary_repo_certificates
.insert(*repo_id, certificate_ref.clone());
}
}
if let Some(user_storage) = self.user_storage_if_persistent() {
let _ = user_storage.update_certificate(repo_id, certificate_ref);
}
//TODO: verify the certificate with the previous one (chain), before changing it
}
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) async fn new_events_with_repo( pub(crate) async fn new_events_with_repo(
&mut self, &mut self,
@ -964,7 +1046,7 @@ impl Verifier {
.ok_or(NgError::TopicNotFound)? .ok_or(NgError::TopicNotFound)?
.to_owned(); .to_owned();
self.update_branch_current_heads(&repo_id, &branch_id, past, commit_ref)?; self.update_branch_current_heads(&repo_id, &branch_id, past, commit_ref.clone())?;
if self.connected_broker.is_some() { if self.connected_broker.is_some() {
// send the event to the server already // send the event to the server already
@ -977,7 +1059,21 @@ impl Verifier {
match &self.config.config_type { match &self.config.config_type {
VerifierConfigType::JsSaveSession(js) => { VerifierConfigType::JsSaveSession(js) => {
//log_info!("========== SAVING EVENT {:03}", event.seq_num()); //log_info!("========== SAVING EVENT {:03}", event.seq_num());
let e = EventOutboxStorage { event, overlay };
let mut file_blocks = Vec::new();
if !event.file_ids().is_empty() {
let store = &self.repos.get(&repo_id).unwrap().store;
let commit = Commit::load(commit_ref, store, false)?;
for file_ref in commit.files() {
let obj = Object::load_ref(&file_ref, store)?;
file_blocks.append(&mut obj.into_blocks());
}
}
let e = EventOutboxStorage {
event,
overlay,
file_blocks,
};
(js.outbox_write_function)( (js.outbox_write_function)(
self.peer_id, self.peer_id,
@ -995,7 +1091,11 @@ impl Verifier {
.create(true) .create(true)
.open(path) .open(path)
.map_err(|_| NgError::IoError)?; .map_err(|_| NgError::IoError)?;
let e = EventOutboxStorage { event, overlay }; let e = EventOutboxStorage {
event,
overlay,
file_blocks: vec![],
};
let event_ser = serde_bare::to_vec(&e)?; let event_ser = serde_bare::to_vec(&e)?;
//log_info!("EVENT size={}", event_ser.len()); //log_info!("EVENT size={}", event_ser.len());
//log_info!("EVENT {:?}", event_ser); //log_info!("EVENT {:?}", event_ser);
@ -1007,8 +1107,11 @@ impl Verifier {
file.sync_data().map_err(|_| NgError::IoError)?; file.sync_data().map_err(|_| NgError::IoError)?;
} }
VerifierConfigType::Memory => { VerifierConfigType::Memory => {
self.in_memory_outbox self.in_memory_outbox.push(EventOutboxStorage {
.push(EventOutboxStorage { event, overlay }); event,
overlay,
file_blocks: vec![],
});
} }
_ => unimplemented!(), _ => unimplemented!(),
} }
@ -1406,7 +1509,7 @@ impl Verifier {
.await .await
} }
async fn verify_commit_( pub(crate) async fn verify_commit_(
&mut self, &mut self,
commit: &Commit, commit: &Commit,
branch_id: &BranchId, branch_id: &BranchId,
@ -1435,6 +1538,10 @@ impl Verifier {
CommitBodyV0::AddSignerCap(a) => a.verify(commit, self, branch_id, repo_id, store), CommitBodyV0::AddSignerCap(a) => a.verify(commit, self, branch_id, repo_id, store),
CommitBodyV0::AddFile(a) => a.verify(commit, self, branch_id, repo_id, store), CommitBodyV0::AddFile(a) => a.verify(commit, self, branch_id, repo_id, store),
CommitBodyV0::AddRepo(a) => a.verify(commit, self, branch_id, repo_id, store), CommitBodyV0::AddRepo(a) => a.verify(commit, self, branch_id, repo_id, store),
CommitBodyV0::Snapshot(a) => a.verify(commit, self, branch_id, repo_id, store),
CommitBodyV0::AsyncSignature(a) => {
a.verify(commit, self, branch_id, repo_id, store)
}
CommitBodyV0::AsyncTransaction(a) => { CommitBodyV0::AsyncTransaction(a) => {
Box::pin(self.verify_async_transaction(a, commit, branch_id, repo_id, store)) Box::pin(self.verify_async_transaction(a, commit, branch_id, repo_id, store))
} }
@ -1481,7 +1588,7 @@ impl Verifier {
} }
} }
fn user_storage(&self) -> Option<Arc<Box<dyn UserStorage>>> { pub(crate) fn user_storage(&self) -> Option<Arc<Box<dyn UserStorage>>> {
if let Some(us) = self.user_storage.as_ref() { if let Some(us) = self.user_storage.as_ref() {
Some(Arc::clone(us)) Some(Arc::clone(us))
} else { } else {
@ -1502,14 +1609,14 @@ impl Verifier {
let topic_id = branch_info.topic.clone().unwrap(); let topic_id = branch_info.topic.clone().unwrap();
let repo = self.get_repo_mut(repo_id, store_repo)?; let repo = self.get_repo_mut(repo_id, store_repo)?;
let res = repo.branches.insert(branch_info.id.clone(), branch_info); let res = repo.branches.insert(branch_info.id.clone(), branch_info);
assert!(res.is_none()); //assert!(res.is_none());
let overlay_id: OverlayId = repo.store.inner_overlay(); let overlay_id: OverlayId = repo.store.inner_overlay();
let repo_id = repo_id.clone(); let repo_id = repo_id.clone();
let res = self let res = self
.topics .topics
.insert((overlay_id, topic_id), (repo_id, branch_id)); .insert((overlay_id, topic_id), (repo_id, branch_id));
assert_eq!(res, None); //assert_eq!(res, None);
Ok(()) Ok(())
} }
@ -1527,10 +1634,21 @@ impl Verifier {
Ok(()) Ok(())
} }
pub(crate) fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), VerifierError> { pub(crate) fn update_signer_cap(
if let Some(user_storage) = self.user_storage_if_persistent() { &mut self,
signer_cap: &SignerCap,
) -> Result<(), VerifierError> {
let storage = match self.repos.get_mut(&signer_cap.repo) {
Some(repo) => {
repo.signer = Some(signer_cap.clone());
self.user_storage_if_persistent()
}
None => self.user_storage(),
};
if let Some(user_storage) = storage {
user_storage.update_signer_cap(signer_cap)?; user_storage.update_signer_cap(signer_cap)?;
} }
Ok(()) Ok(())
} }
@ -1870,7 +1988,6 @@ impl Verifier {
} }
log_debug!("loaded from read_cap {}", repo_id); log_debug!("loaded from read_cap {}", repo_id);
// TODO: deal with AddSignerCap that are saved on rocksdb for now, but do not make it to the Verifier.repos
return Ok((repo_id.clone(), store_branch)); return Ok((repo_id.clone(), store_branch));
} }
@ -2213,7 +2330,7 @@ impl Verifier {
log_info!("SENDING {} EVENTS FROM OUTBOX", events_to_replay.len()); log_info!("SENDING {} EVENTS FROM OUTBOX", events_to_replay.len());
for e in events_to_replay { for e in events_to_replay {
let files = e.event.file_ids(); let files = e.event.file_ids();
if !files.is_empty() { if !files.is_empty() || !need_replay {
let (repo_id, branch_id) = self let (repo_id, branch_id) = self
.topics .topics
.get(&(e.overlay, *e.event.topic_id())) .get(&(e.overlay, *e.event.topic_id()))
@ -2227,25 +2344,34 @@ impl Verifier {
let branch = repo.branch(&branch_id)?; let branch = repo.branch(&branch_id)?;
let commit = e.event.open_without_body( let commit = e.event.open_with_body(
&repo.store, &repo.store,
&repo_id, &repo_id,
&branch_id, &branch_id,
&branch.read_cap.as_ref().unwrap().key, &branch.read_cap.as_ref().unwrap().key,
!need_replay,
)?; )?;
let store_repo = repo.store.get_store_repo().clone(); let store_repo = repo.store.get_store_repo().clone();
let store = Arc::clone(&repo.store);
for block in e.file_blocks {
_ = store.put(&block);
}
self.open_branch_(&repo_id, &branch_id, true, &broker, &user, &remote, false) self.open_branch_(&repo_id, &branch_id, true, &broker, &user, &remote, false)
.await?; .await?;
for file in commit.files() { for file in commit.files() {
log_debug!("PUT FILE {:?}", file.id); //log_debug!("PUT FILE {:?}", file.id);
self.put_all_blocks_of_file(&file, &repo_id, &store_repo) self.put_all_blocks_of_file(&file, &repo_id, &store_repo)
.await?; .await?;
} }
}
if !need_replay {
self.verify_commit_(&commit, &branch_id, &repo_id, store, false)
.await?;
}
}
self.send_event(e.event, &broker, &user, &remote, e.overlay) self.send_event(e.event, &broker, &user, &remote, e.overlay)
.await?; .await?;
} }
@ -2350,6 +2476,7 @@ impl Verifier {
let peer_id = config.peer_priv_key.to_pub(); let peer_id = config.peer_priv_key.to_pub();
let mut verif = Verifier { let mut verif = Verifier {
user_id: config.user_priv_key.to_pub(), user_id: config.user_priv_key.to_pub(),
outer: NuriV0::locator(&config.locator),
config, config,
connected_broker: BrokerPeerId::None, connected_broker: BrokerPeerId::None,
graph_dataset: graph, graph_dataset: graph,
@ -2366,6 +2493,7 @@ impl Verifier {
inner_to_outer: HashMap::new(), inner_to_outer: HashMap::new(),
uploads: BTreeMap::new(), uploads: BTreeMap::new(),
branch_subscriptions: HashMap::new(), branch_subscriptions: HashMap::new(),
temporary_repo_certificates: HashMap::new(),
}; };
// this is important as it will load the last seq from storage // this is important as it will load the last seq from storage
if verif.config.config_type.should_load_last_seq_num() { if verif.config.config_type.should_load_last_seq_num() {
@ -2529,7 +2657,7 @@ impl Verifier {
store_repo: &StoreRepo, store_repo: &StoreRepo,
branch_crdt: BranchCrdt, branch_crdt: BranchCrdt,
) -> Result<RepoId, NgError> { ) -> Result<RepoId, NgError> {
let (repo_id, proto_events) = { let (repo_id, proto_events, add_signer_cap_commit, private_store_repo) = {
let store = self.get_store_or_load(store_repo); let store = self.get_store_or_load(store_repo);
let repo_write_cap_secret = SymKey::random(); let repo_write_cap_secret = SymKey::random();
let (repo, proto_events) = store.create_repo_default( let (repo, proto_events) = store.create_repo_default(
@ -2539,9 +2667,50 @@ impl Verifier {
branch_crdt, branch_crdt,
)?; )?;
self.populate_topics(&repo); self.populate_topics(&repo);
// send AddSignerCap to User branch of private store
let add_signer_cap_commit_body = CommitBody::V0(CommitBodyV0::AddSignerCap(
AddSignerCap::V0(AddSignerCapV0 {
cap: repo.signer.as_ref().unwrap().clone(),
metadata: vec![],
}),
));
let (add_signer_cap_commit, private_store_repo) = {
// find user_branch of private repo
let private_repo = self
.repos
.get(self.private_store_id())
.ok_or(NgError::StoreNotFound)?;
let user_branch = private_repo.user_branch().ok_or(NgError::BranchNotFound)?;
(
Commit::new_with_body_acks_deps_and_save(
creator_priv_key,
creator,
user_branch.id,
QuorumType::NoSigning,
vec![],
user_branch.current_heads.clone(),
add_signer_cap_commit_body,
&private_repo.store,
)?,
private_repo.store.get_store_repo().clone(),
)
};
let repo_ref = self.add_repo_and_save(repo); let repo_ref = self.add_repo_and_save(repo);
(repo_ref.id, proto_events) (
repo_ref.id,
proto_events,
add_signer_cap_commit,
private_store_repo,
)
}; };
self.new_events(
vec![(add_signer_cap_commit, vec![])],
private_store_repo.repo_id().clone(),
&private_store_repo,
)
.await?;
self.new_events(proto_events, repo_id, store_repo).await?; self.new_events(proto_events, repo_id, store_repo).await?;
//let repo_ref = self.add_repo_and_save(repo); //let repo_ref = self.add_repo_and_save(repo);

@ -517,6 +517,14 @@ impl SensitiveWallet {
}, },
} }
} }
pub fn broker(&self, id: DirectPeerId) -> Result<Vec<BrokerInfoV0>, NgError> {
match self {
Self::V0(v0) => match v0.brokers.get(&id.to_string()) {
Some(broker_info) => Ok(broker_info.to_vec()),
None => Err(NgError::BrokerNotFound),
},
}
}
pub fn set_client(&mut self, client: ClientV0) { pub fn set_client(&mut self, client: ClientV0) {
match self { match self {
Self::V0(v0) => v0.client = Some(client), Self::V0(v0) => v0.client = Some(client),
@ -1119,6 +1127,22 @@ impl BrokerInfoV0 {
Self::ServerV0(s) => s.peer_id, Self::ServerV0(s) => s.peer_id,
} }
} }
pub fn into_locator(&self) -> Locator {
match self {
Self::CoreV0(_) => panic!("BrokerCoreV0 cannot be made a Locator"),
Self::ServerV0(s) => s.clone().into(),
}
}
pub fn vec_into_locator(list: Vec<BrokerInfoV0>) -> Locator {
Locator::V0(
list.into_iter()
.filter_map(|info| match info {
Self::CoreV0(_) => None,
Self::ServerV0(bs) => Some(BrokerServer::V0(bs)),
})
.collect(),
)
}
} }
/// ReducedSensitiveWallet block Version 0 /// ReducedSensitiveWallet block Version 0

Loading…
Cancel
Save