From 6755978c03f2cb1f3c66d5ae2bb6b245e1f1307c Mon Sep 17 00:00:00 2001 From: Niko PLP Date: Mon, 15 Sep 2025 01:18:43 +0300 Subject: [PATCH] version 0.1.1 --- .gitignore | 17 + .prettierrc.json | 14 + CHANGELOG.md | 199 + Cargo.lock | 4918 ++++++++++++++ Cargo.toml | 44 + DEV.md | 240 + LICENSE-APACHE2 | 176 + LICENSE-MIT | 22 + README.md | 71 + RELEASE-NOTE.md | 40 + nextgraph/.gitignore | 2 + nextgraph/.static/header.png | Bin 0 -> 139433 bytes nextgraph/Cargo.toml | 55 + nextgraph/README.md | 83 + nextgraph/examples/README.md | 17 + nextgraph/examples/in_memory.md | 13 + nextgraph/examples/in_memory.rs | 178 + nextgraph/examples/open.md | 17 + nextgraph/examples/open.rs | 76 + nextgraph/examples/persistent.md | 13 + nextgraph/examples/persistent.rs | 161 + nextgraph/examples/sparql_update.rs | 103 + .../examples/wallet-security-image-demo.png | Bin 0 -> 21006 bytes .../examples/wallet-security-image-white.png | Bin 0 -> 437 bytes nextgraph/src/lib.rs | 137 + nextgraph/src/local_broker.rs | 3209 +++++++++ nextgraph/src/local_broker_dev_env.rs | 1 + ng-broker/Cargo.toml | 45 + ng-broker/README.md | 56 + ng-broker/build.rs | 5 + ng-broker/src/actors/mod.rs | 1 + ng-broker/src/interfaces.rs | 110 + ng-broker/src/lib.rs | 15 + ng-broker/src/public/favicon.ico | Bin 0 -> 36098 bytes ng-broker/src/rocksdb_server_storage.rs | 776 +++ ng-broker/src/server_broker.rs | 905 +++ ng-broker/src/server_storage/admin/account.rs | 355 + .../src/server_storage/admin/invitation.rs | 184 + ng-broker/src/server_storage/admin/mod.rs | 5 + ng-broker/src/server_storage/admin/wallet.rs | 123 + ng-broker/src/server_storage/core/account.rs | 95 + ng-broker/src/server_storage/core/commit.rs | 158 + ng-broker/src/server_storage/core/inbox.rs | 120 + ng-broker/src/server_storage/core/mod.rs | 20 + ng-broker/src/server_storage/core/overlay.rs | 142 + ng-broker/src/server_storage/core/peer.rs | 187 + ng-broker/src/server_storage/core/repo.rs | 123 + ng-broker/src/server_storage/core/topic.rs | 182 + ng-broker/src/server_storage/mod.rs | 3 + ng-broker/src/server_ws.rs | 950 +++ ng-broker/src/types.rs | 35 + ng-broker/src/utils.rs | 31 + ng-client-ws/Cargo.toml | 38 + ng-client-ws/README.md | 56 + ng-client-ws/src/lib.rs | 13 + ng-client-ws/src/remote_ws.rs | 394 ++ ng-client-ws/src/remote_ws_wasm.rs | 216 + ng-net/Cargo.toml | 54 + ng-net/README.md | 56 + ng-net/src/actor.rs | 238 + ng-net/src/actors/admin/add_invitation.rs | 145 + ng-net/src/actors/admin/add_user.rs | 121 + ng-net/src/actors/admin/create_user.rs | 111 + ng-net/src/actors/admin/del_user.rs | 94 + ng-net/src/actors/admin/list_invitations.rs | 135 + ng-net/src/actors/admin/list_users.rs | 95 + ng-net/src/actors/admin/mod.rs | 17 + ng-net/src/actors/app/mod.rs | 3 + ng-net/src/actors/app/request.rs | 134 + ng-net/src/actors/app/session.rs | 197 + ng-net/src/actors/client/blocks_exist.rs | 104 + ng-net/src/actors/client/blocks_get.rs | 132 + ng-net/src/actors/client/blocks_put.rs | 81 + ng-net/src/actors/client/client_event.rs | 94 + ng-net/src/actors/client/commit_get.rs | 125 + ng-net/src/actors/client/event.rs | 121 + ng-net/src/actors/client/inbox_post.rs | 74 + ng-net/src/actors/client/inbox_register.rs | 91 + ng-net/src/actors/client/mod.rs | 25 + ng-net/src/actors/client/pin_repo.rs | 230 + ng-net/src/actors/client/repo_pin_status.rs | 96 + ng-net/src/actors/client/topic_sub.rs | 139 + ng-net/src/actors/client/topic_sync_req.rs | 151 + ng-net/src/actors/client/wallet_put_export.rs | 89 + ng-net/src/actors/connecting.rs | 43 + ng-net/src/actors/ext/get.rs | 103 + ng-net/src/actors/ext/mod.rs | 3 + ng-net/src/actors/ext/wallet_get_export.rs | 109 + ng-net/src/actors/mod.rs | 25 + ng-net/src/actors/noise.rs | 69 + ng-net/src/actors/probe.rs | 73 + ng-net/src/actors/start.rs | 326 + ng-net/src/app_protocol.rs | 1311 ++++ ng-net/src/broker.rs | 1319 ++++ ng-net/src/bsps.rs | 53 + ng-net/src/connection.rs | 1621 +++++ ng-net/src/lib.rs | 54 + ng-net/src/server_broker.rs | 157 + ng-net/src/tests/file.rs | 1435 ++++ ng-net/src/tests/mod.rs | 2 + ng-net/src/types.rs | 5294 +++++++++++++++ ng-net/src/utils.rs | 534 ++ ng-oxigraph/Cargo.toml | 249 + ng-oxigraph/LICENSE-APACHE | 201 + ng-oxigraph/LICENSE-MIT | 26 + ng-oxigraph/README.md | 41 + ng-oxigraph/build.rs | 5 + ng-oxigraph/src/lib.rs | 22 + ng-oxigraph/src/oxigraph/io/format.rs | 301 + ng-oxigraph/src/oxigraph/io/mod.rs | 39 + ng-oxigraph/src/oxigraph/io/read.rs | 199 + ng-oxigraph/src/oxigraph/io/write.rs | 185 + ng-oxigraph/src/oxigraph/mod.rs | 18 + ng-oxigraph/src/oxigraph/model.rs | 22 + ng-oxigraph/src/oxigraph/sparql/algebra.rs | 328 + ng-oxigraph/src/oxigraph/sparql/dataset.rs | 274 + ng-oxigraph/src/oxigraph/sparql/error.rs | 87 + ng-oxigraph/src/oxigraph/sparql/eval.rs | 5871 +++++++++++++++++ ng-oxigraph/src/oxigraph/sparql/http/dummy.rs | 34 + ng-oxigraph/src/oxigraph/sparql/http/mod.rs | 9 + .../src/oxigraph/sparql/http/simple.rs | 90 + ng-oxigraph/src/oxigraph/sparql/mod.rs | 354 + ng-oxigraph/src/oxigraph/sparql/model.rs | 372 ++ ng-oxigraph/src/oxigraph/sparql/results.rs | 44 + ng-oxigraph/src/oxigraph/sparql/service.rs | 124 + ng-oxigraph/src/oxigraph/sparql/update.rs | 612 ++ .../src/oxigraph/storage/backend/fallback.rs | 420 ++ .../src/oxigraph/storage/backend/mod.rs | 12 + .../oxigraph/storage/backend/oxi_rocksdb.rs | 1567 +++++ .../src/oxigraph/storage/binary_encoder.rs | 787 +++ ng-oxigraph/src/oxigraph/storage/error.rs | 139 + ng-oxigraph/src/oxigraph/storage/mod.rs | 2912 ++++++++ .../src/oxigraph/storage/numeric_encoder.rs | 1031 +++ .../src/oxigraph/storage/small_string.rs | 177 + ng-oxigraph/src/oxigraph/store.rs | 2328 +++++++ ng-oxigraph/src/oxrdf/README.md | 51 + ng-oxigraph/src/oxrdf/blank_node.rs | 403 ++ ng-oxigraph/src/oxrdf/dataset.rs | 1641 +++++ ng-oxigraph/src/oxrdf/graph.rs | 284 + ng-oxigraph/src/oxrdf/interning.rs | 535 ++ ng-oxigraph/src/oxrdf/literal.rs | 669 ++ ng-oxigraph/src/oxrdf/mod.rs | 24 + ng-oxigraph/src/oxrdf/named_node.rs | 251 + ng-oxigraph/src/oxrdf/parser.rs | 468 ++ ng-oxigraph/src/oxrdf/triple.rs | 1368 ++++ ng-oxigraph/src/oxrdf/variable.rs | 216 + ng-oxigraph/src/oxrdf/vocab.rs | 242 + ng-oxigraph/src/oxrdfio/README.md | 67 + ng-oxigraph/src/oxrdfio/error.rs | 124 + ng-oxigraph/src/oxrdfio/format.rs | 216 + ng-oxigraph/src/oxrdfio/mod.rs | 9 + ng-oxigraph/src/oxrdfio/parser.rs | 795 +++ ng-oxigraph/src/oxrdfio/serializer.rs | 412 ++ ng-oxigraph/src/oxrdfxml/README.md | 56 + ng-oxigraph/src/oxrdfxml/error.rs | 89 + ng-oxigraph/src/oxrdfxml/mod.rs | 8 + ng-oxigraph/src/oxrdfxml/parser.rs | 1237 ++++ ng-oxigraph/src/oxrdfxml/serializer.rs | 461 ++ ng-oxigraph/src/oxrdfxml/utils.rs | 26 + ng-oxigraph/src/oxsdatatypes/README.md | 65 + ng-oxigraph/src/oxsdatatypes/boolean.rs | 134 + ng-oxigraph/src/oxsdatatypes/date_time.rs | 3187 +++++++++ ng-oxigraph/src/oxsdatatypes/decimal.rs | 1099 +++ ng-oxigraph/src/oxsdatatypes/double.rs | 326 + ng-oxigraph/src/oxsdatatypes/duration.rs | 1249 ++++ ng-oxigraph/src/oxsdatatypes/float.rs | 310 + ng-oxigraph/src/oxsdatatypes/integer.rs | 400 ++ ng-oxigraph/src/oxsdatatypes/mod.rs | 21 + ng-oxigraph/src/oxttl/README.md | 54 + ng-oxigraph/src/oxttl/lexer.rs | 977 +++ ng-oxigraph/src/oxttl/line_formats.rs | 314 + ng-oxigraph/src/oxttl/mod.rs | 19 + ng-oxigraph/src/oxttl/n3.rs | 1326 ++++ ng-oxigraph/src/oxttl/nquads.rs | 564 ++ ng-oxigraph/src/oxttl/ntriples.rs | 580 ++ ng-oxigraph/src/oxttl/terse.rs | 1072 +++ ng-oxigraph/src/oxttl/toolkit/error.rs | 97 + ng-oxigraph/src/oxttl/toolkit/lexer.rs | 432 ++ ng-oxigraph/src/oxttl/toolkit/mod.rs | 13 + ng-oxigraph/src/oxttl/toolkit/parser.rs | 183 + ng-oxigraph/src/oxttl/trig.rs | 1252 ++++ ng-oxigraph/src/oxttl/turtle.rs | 878 +++ ng-oxigraph/src/sparesults/README.md | 72 + ng-oxigraph/src/sparesults/csv.rs | 948 +++ ng-oxigraph/src/sparesults/error.rs | 157 + ng-oxigraph/src/sparesults/format.rs | 176 + ng-oxigraph/src/sparesults/json.rs | 1101 ++++ ng-oxigraph/src/sparesults/mod.rs | 16 + ng-oxigraph/src/sparesults/parser.rs | 460 ++ ng-oxigraph/src/sparesults/serializer.rs | 427 ++ ng-oxigraph/src/sparesults/solution.rs | 340 + ng-oxigraph/src/sparesults/xml.rs | 833 +++ ng-oxigraph/src/spargebra/README.md | 46 + ng-oxigraph/src/spargebra/algebra.rs | 1419 ++++ ng-oxigraph/src/spargebra/mod.rs | 9 + ng-oxigraph/src/spargebra/parser.rs | 2086 ++++++ ng-oxigraph/src/spargebra/query.rs | 300 + ng-oxigraph/src/spargebra/term.rs | 1028 +++ ng-oxigraph/src/spargebra/update.rs | 344 + ng-oxigraph/src/sparopt/README.md | 33 + ng-oxigraph/src/sparopt/algebra.rs | 1662 +++++ ng-oxigraph/src/sparopt/mod.rs | 5 + ng-oxigraph/src/sparopt/optimizer.rs | 1082 +++ ng-oxigraph/src/sparopt/type_inference.rs | 462 ++ ng-oxigraph/tests/rocksdb_bc_data/000003.log | Bin 0 -> 8399 bytes ng-oxigraph/tests/rocksdb_bc_data/CURRENT | 1 + ng-oxigraph/tests/rocksdb_bc_data/IDENTITY | 1 + ng-oxigraph/tests/rocksdb_bc_data/LOCK | 0 .../tests/rocksdb_bc_data/MANIFEST-000004 | Bin 0 -> 559 bytes .../tests/rocksdb_bc_data/OPTIONS-000026 | 964 +++ ng-oxigraph/tests/store.rs | 542 ++ ng-repo/Cargo.toml | 61 + ng-repo/README.md | 56 + ng-repo/src/block.rs | 285 + ng-repo/src/block_storage.rs | 179 + ng-repo/src/branch.rs | 612 ++ ng-repo/src/commit.rs | 1960 ++++++ ng-repo/src/errors.rs | 602 ++ ng-repo/src/event.rs | 319 + ng-repo/src/file.rs | 1607 +++++ ng-repo/src/kcv_storage.rs | 1128 ++++ ng-repo/src/lib.rs | 199 + ng-repo/src/object.rs | 1782 +++++ ng-repo/src/os_info.rs | 65 + ng-repo/src/repo.rs | 686 ++ ng-repo/src/store.rs | 763 +++ ng-repo/src/types.rs | 3148 +++++++++ ng-repo/src/utils.rs | 298 + ng-repo/tests/test.jpg | Bin 0 -> 29454 bytes ng-storage-rocksdb/Cargo.toml | 23 + ng-storage-rocksdb/README.md | 56 + ng-storage-rocksdb/build.rs | 5 + ng-storage-rocksdb/src/block_storage.rs | 167 + ng-storage-rocksdb/src/kcv_storage.rs | 887 +++ ng-storage-rocksdb/src/lib.rs | 5 + ng-verifier/Cargo.toml | 54 + ng-verifier/README.md | 58 + ng-verifier/build.rs | 5 + ng-verifier/src/commits/mod.rs | 728 ++ ng-verifier/src/commits/snapshot.rs | 121 + ng-verifier/src/commits/transaction.rs | 831 +++ ng-verifier/src/inbox_processor.rs | 681 ++ ng-verifier/src/lib.rs | 91 + ng-verifier/src/request_processor.rs | 1359 ++++ ng-verifier/src/rocksdb_user_storage.rs | 198 + ng-verifier/src/site.rs | 333 + ng-verifier/src/types.rs | 356 + ng-verifier/src/user_storage/branch.rs | 350 + ng-verifier/src/user_storage/mod.rs | 18 + ng-verifier/src/user_storage/repo.rs | 499 ++ ng-verifier/src/user_storage/storage.rs | 218 + ng-verifier/src/verifier.rs | 3051 +++++++++ ng-wallet/.gitignore | 3 + ng-wallet/Cargo.toml | 45 + ng-wallet/README.md | 56 + ng-wallet/src/bip39.rs | 241 + ng-wallet/src/emojis.rs | 1302 ++++ ng-wallet/src/lib.rs | 911 +++ ng-wallet/src/permissions.rs | 232 + ng-wallet/src/types.rs | 1481 +++++ .../generated_security_image.jpg.compare | Bin 0 -> 29484 bytes ng-wallet/tests/valid_security_image.jpg | Bin 0 -> 29454 bytes package.json | 15 + pnpm-workspace.yaml | 8 + sdk/ng-sdk-js/.gitignore | 3 + sdk/ng-sdk-js/Cargo.toml | 61 + sdk/ng-sdk-js/DEV.md | 132 + sdk/ng-sdk-js/LICENSE-APACHE2 | 16 + sdk/ng-sdk-js/LICENSE-MIT | 22 + sdk/ng-sdk-js/README.md | 160 + sdk/ng-sdk-js/index.html | 32 + sdk/ng-sdk-js/js/bowser.js | 2233 +++++++ sdk/ng-sdk-js/js/browser.js | 136 + sdk/ng-sdk-js/js/node.js | 207 + sdk/ng-sdk-js/prepare-node.js | 24 + sdk/ng-sdk-js/prepare-web.js | 11 + sdk/ng-sdk-js/src/lib.rs | 2088 ++++++ sdk/ng-sdk-js/src/model.rs | 825 +++ 278 files changed, 122318 insertions(+) create mode 100644 .gitignore create mode 100644 .prettierrc.json create mode 100644 CHANGELOG.md create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 DEV.md create mode 100644 LICENSE-APACHE2 create mode 100644 LICENSE-MIT create mode 100644 README.md create mode 100644 RELEASE-NOTE.md create mode 100644 nextgraph/.gitignore create mode 100644 nextgraph/.static/header.png create mode 100644 nextgraph/Cargo.toml create mode 100644 nextgraph/README.md create mode 100644 nextgraph/examples/README.md create mode 100644 nextgraph/examples/in_memory.md create mode 100644 nextgraph/examples/in_memory.rs create mode 100644 nextgraph/examples/open.md create mode 100644 nextgraph/examples/open.rs create mode 100644 nextgraph/examples/persistent.md create mode 100644 nextgraph/examples/persistent.rs create mode 100644 nextgraph/examples/sparql_update.rs create mode 100644 nextgraph/examples/wallet-security-image-demo.png create mode 100644 nextgraph/examples/wallet-security-image-white.png create mode 100644 nextgraph/src/lib.rs create mode 100644 nextgraph/src/local_broker.rs create mode 100644 nextgraph/src/local_broker_dev_env.rs create mode 100644 ng-broker/Cargo.toml create mode 100644 ng-broker/README.md create mode 100644 ng-broker/build.rs create mode 100644 ng-broker/src/actors/mod.rs create mode 100644 ng-broker/src/interfaces.rs create mode 100644 ng-broker/src/lib.rs create mode 100644 ng-broker/src/public/favicon.ico create mode 100644 ng-broker/src/rocksdb_server_storage.rs create mode 100644 ng-broker/src/server_broker.rs create mode 100644 ng-broker/src/server_storage/admin/account.rs create mode 100644 ng-broker/src/server_storage/admin/invitation.rs create mode 100644 ng-broker/src/server_storage/admin/mod.rs create mode 100644 ng-broker/src/server_storage/admin/wallet.rs create mode 100644 ng-broker/src/server_storage/core/account.rs create mode 100644 ng-broker/src/server_storage/core/commit.rs create mode 100644 ng-broker/src/server_storage/core/inbox.rs create mode 100644 ng-broker/src/server_storage/core/mod.rs create mode 100644 ng-broker/src/server_storage/core/overlay.rs create mode 100644 ng-broker/src/server_storage/core/peer.rs create mode 100644 ng-broker/src/server_storage/core/repo.rs create mode 100644 ng-broker/src/server_storage/core/topic.rs create mode 100644 ng-broker/src/server_storage/mod.rs create mode 100644 ng-broker/src/server_ws.rs create mode 100644 ng-broker/src/types.rs create mode 100644 ng-broker/src/utils.rs create mode 100644 ng-client-ws/Cargo.toml create mode 100644 ng-client-ws/README.md create mode 100644 ng-client-ws/src/lib.rs create mode 100644 ng-client-ws/src/remote_ws.rs create mode 100644 ng-client-ws/src/remote_ws_wasm.rs create mode 100644 ng-net/Cargo.toml create mode 100644 ng-net/README.md create mode 100644 ng-net/src/actor.rs create mode 100644 ng-net/src/actors/admin/add_invitation.rs create mode 100644 ng-net/src/actors/admin/add_user.rs create mode 100644 ng-net/src/actors/admin/create_user.rs create mode 100644 ng-net/src/actors/admin/del_user.rs create mode 100644 ng-net/src/actors/admin/list_invitations.rs create mode 100644 ng-net/src/actors/admin/list_users.rs create mode 100644 ng-net/src/actors/admin/mod.rs create mode 100644 ng-net/src/actors/app/mod.rs create mode 100644 ng-net/src/actors/app/request.rs create mode 100644 ng-net/src/actors/app/session.rs create mode 100644 ng-net/src/actors/client/blocks_exist.rs create mode 100644 ng-net/src/actors/client/blocks_get.rs create mode 100644 ng-net/src/actors/client/blocks_put.rs create mode 100644 ng-net/src/actors/client/client_event.rs create mode 100644 ng-net/src/actors/client/commit_get.rs create mode 100644 ng-net/src/actors/client/event.rs create mode 100644 ng-net/src/actors/client/inbox_post.rs create mode 100644 ng-net/src/actors/client/inbox_register.rs create mode 100644 ng-net/src/actors/client/mod.rs create mode 100644 ng-net/src/actors/client/pin_repo.rs create mode 100644 ng-net/src/actors/client/repo_pin_status.rs create mode 100644 ng-net/src/actors/client/topic_sub.rs create mode 100644 ng-net/src/actors/client/topic_sync_req.rs create mode 100644 ng-net/src/actors/client/wallet_put_export.rs create mode 100644 ng-net/src/actors/connecting.rs create mode 100644 ng-net/src/actors/ext/get.rs create mode 100644 ng-net/src/actors/ext/mod.rs create mode 100644 ng-net/src/actors/ext/wallet_get_export.rs create mode 100644 ng-net/src/actors/mod.rs create mode 100644 ng-net/src/actors/noise.rs create mode 100644 ng-net/src/actors/probe.rs create mode 100644 ng-net/src/actors/start.rs create mode 100644 ng-net/src/app_protocol.rs create mode 100644 ng-net/src/broker.rs create mode 100644 ng-net/src/bsps.rs create mode 100644 ng-net/src/connection.rs create mode 100644 ng-net/src/lib.rs create mode 100644 ng-net/src/server_broker.rs create mode 100644 ng-net/src/tests/file.rs create mode 100644 ng-net/src/tests/mod.rs create mode 100644 ng-net/src/types.rs create mode 100644 ng-net/src/utils.rs create mode 100644 ng-oxigraph/Cargo.toml create mode 100644 ng-oxigraph/LICENSE-APACHE create mode 100644 ng-oxigraph/LICENSE-MIT create mode 100644 ng-oxigraph/README.md create mode 100644 ng-oxigraph/build.rs create mode 100644 ng-oxigraph/src/lib.rs create mode 100644 ng-oxigraph/src/oxigraph/io/format.rs create mode 100644 ng-oxigraph/src/oxigraph/io/mod.rs create mode 100644 ng-oxigraph/src/oxigraph/io/read.rs create mode 100644 ng-oxigraph/src/oxigraph/io/write.rs create mode 100644 ng-oxigraph/src/oxigraph/mod.rs create mode 100644 ng-oxigraph/src/oxigraph/model.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/algebra.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/dataset.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/error.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/eval.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/http/dummy.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/http/mod.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/http/simple.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/mod.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/model.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/results.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/service.rs create mode 100644 ng-oxigraph/src/oxigraph/sparql/update.rs create mode 100644 ng-oxigraph/src/oxigraph/storage/backend/fallback.rs create mode 100644 ng-oxigraph/src/oxigraph/storage/backend/mod.rs create mode 100644 ng-oxigraph/src/oxigraph/storage/backend/oxi_rocksdb.rs create mode 100644 ng-oxigraph/src/oxigraph/storage/binary_encoder.rs create mode 100644 ng-oxigraph/src/oxigraph/storage/error.rs create mode 100644 ng-oxigraph/src/oxigraph/storage/mod.rs create mode 100644 ng-oxigraph/src/oxigraph/storage/numeric_encoder.rs create mode 100644 ng-oxigraph/src/oxigraph/storage/small_string.rs create mode 100644 ng-oxigraph/src/oxigraph/store.rs create mode 100644 ng-oxigraph/src/oxrdf/README.md create mode 100644 ng-oxigraph/src/oxrdf/blank_node.rs create mode 100644 ng-oxigraph/src/oxrdf/dataset.rs create mode 100644 ng-oxigraph/src/oxrdf/graph.rs create mode 100644 ng-oxigraph/src/oxrdf/interning.rs create mode 100644 ng-oxigraph/src/oxrdf/literal.rs create mode 100644 ng-oxigraph/src/oxrdf/mod.rs create mode 100644 ng-oxigraph/src/oxrdf/named_node.rs create mode 100644 ng-oxigraph/src/oxrdf/parser.rs create mode 100644 ng-oxigraph/src/oxrdf/triple.rs create mode 100644 ng-oxigraph/src/oxrdf/variable.rs create mode 100644 ng-oxigraph/src/oxrdf/vocab.rs create mode 100644 ng-oxigraph/src/oxrdfio/README.md create mode 100644 ng-oxigraph/src/oxrdfio/error.rs create mode 100644 ng-oxigraph/src/oxrdfio/format.rs create mode 100644 ng-oxigraph/src/oxrdfio/mod.rs create mode 100644 ng-oxigraph/src/oxrdfio/parser.rs create mode 100644 ng-oxigraph/src/oxrdfio/serializer.rs create mode 100644 ng-oxigraph/src/oxrdfxml/README.md create mode 100644 ng-oxigraph/src/oxrdfxml/error.rs create mode 100644 ng-oxigraph/src/oxrdfxml/mod.rs create mode 100644 ng-oxigraph/src/oxrdfxml/parser.rs create mode 100644 ng-oxigraph/src/oxrdfxml/serializer.rs create mode 100644 ng-oxigraph/src/oxrdfxml/utils.rs create mode 100644 ng-oxigraph/src/oxsdatatypes/README.md create mode 100644 ng-oxigraph/src/oxsdatatypes/boolean.rs create mode 100644 ng-oxigraph/src/oxsdatatypes/date_time.rs create mode 100644 ng-oxigraph/src/oxsdatatypes/decimal.rs create mode 100644 ng-oxigraph/src/oxsdatatypes/double.rs create mode 100644 ng-oxigraph/src/oxsdatatypes/duration.rs create mode 100644 ng-oxigraph/src/oxsdatatypes/float.rs create mode 100644 ng-oxigraph/src/oxsdatatypes/integer.rs create mode 100644 ng-oxigraph/src/oxsdatatypes/mod.rs create mode 100644 ng-oxigraph/src/oxttl/README.md create mode 100644 ng-oxigraph/src/oxttl/lexer.rs create mode 100644 ng-oxigraph/src/oxttl/line_formats.rs create mode 100644 ng-oxigraph/src/oxttl/mod.rs create mode 100644 ng-oxigraph/src/oxttl/n3.rs create mode 100644 ng-oxigraph/src/oxttl/nquads.rs create mode 100644 ng-oxigraph/src/oxttl/ntriples.rs create mode 100644 ng-oxigraph/src/oxttl/terse.rs create mode 100644 ng-oxigraph/src/oxttl/toolkit/error.rs create mode 100644 ng-oxigraph/src/oxttl/toolkit/lexer.rs create mode 100644 ng-oxigraph/src/oxttl/toolkit/mod.rs create mode 100644 ng-oxigraph/src/oxttl/toolkit/parser.rs create mode 100644 ng-oxigraph/src/oxttl/trig.rs create mode 100644 ng-oxigraph/src/oxttl/turtle.rs create mode 100644 ng-oxigraph/src/sparesults/README.md create mode 100644 ng-oxigraph/src/sparesults/csv.rs create mode 100644 ng-oxigraph/src/sparesults/error.rs create mode 100644 ng-oxigraph/src/sparesults/format.rs create mode 100644 ng-oxigraph/src/sparesults/json.rs create mode 100644 ng-oxigraph/src/sparesults/mod.rs create mode 100644 ng-oxigraph/src/sparesults/parser.rs create mode 100644 ng-oxigraph/src/sparesults/serializer.rs create mode 100644 ng-oxigraph/src/sparesults/solution.rs create mode 100644 ng-oxigraph/src/sparesults/xml.rs create mode 100644 ng-oxigraph/src/spargebra/README.md create mode 100644 ng-oxigraph/src/spargebra/algebra.rs create mode 100644 ng-oxigraph/src/spargebra/mod.rs create mode 100644 ng-oxigraph/src/spargebra/parser.rs create mode 100644 ng-oxigraph/src/spargebra/query.rs create mode 100644 ng-oxigraph/src/spargebra/term.rs create mode 100644 ng-oxigraph/src/spargebra/update.rs create mode 100644 ng-oxigraph/src/sparopt/README.md create mode 100644 ng-oxigraph/src/sparopt/algebra.rs create mode 100644 ng-oxigraph/src/sparopt/mod.rs create mode 100644 ng-oxigraph/src/sparopt/optimizer.rs create mode 100644 ng-oxigraph/src/sparopt/type_inference.rs create mode 100644 ng-oxigraph/tests/rocksdb_bc_data/000003.log create mode 100644 ng-oxigraph/tests/rocksdb_bc_data/CURRENT create mode 100644 ng-oxigraph/tests/rocksdb_bc_data/IDENTITY create mode 100644 ng-oxigraph/tests/rocksdb_bc_data/LOCK create mode 100644 ng-oxigraph/tests/rocksdb_bc_data/MANIFEST-000004 create mode 100644 ng-oxigraph/tests/rocksdb_bc_data/OPTIONS-000026 create mode 100644 ng-oxigraph/tests/store.rs create mode 100644 ng-repo/Cargo.toml create mode 100644 ng-repo/README.md create mode 100644 ng-repo/src/block.rs create mode 100644 ng-repo/src/block_storage.rs create mode 100644 ng-repo/src/branch.rs create mode 100644 ng-repo/src/commit.rs create mode 100644 ng-repo/src/errors.rs create mode 100644 ng-repo/src/event.rs create mode 100644 ng-repo/src/file.rs create mode 100644 ng-repo/src/kcv_storage.rs create mode 100644 ng-repo/src/lib.rs create mode 100644 ng-repo/src/object.rs create mode 100644 ng-repo/src/os_info.rs create mode 100644 ng-repo/src/repo.rs create mode 100644 ng-repo/src/store.rs create mode 100644 ng-repo/src/types.rs create mode 100644 ng-repo/src/utils.rs create mode 100644 ng-repo/tests/test.jpg create mode 100644 ng-storage-rocksdb/Cargo.toml create mode 100644 ng-storage-rocksdb/README.md create mode 100644 ng-storage-rocksdb/build.rs create mode 100644 ng-storage-rocksdb/src/block_storage.rs create mode 100644 ng-storage-rocksdb/src/kcv_storage.rs create mode 100644 ng-storage-rocksdb/src/lib.rs create mode 100644 ng-verifier/Cargo.toml create mode 100644 ng-verifier/README.md create mode 100644 ng-verifier/build.rs create mode 100644 ng-verifier/src/commits/mod.rs create mode 100644 ng-verifier/src/commits/snapshot.rs create mode 100644 ng-verifier/src/commits/transaction.rs create mode 100644 ng-verifier/src/inbox_processor.rs create mode 100644 ng-verifier/src/lib.rs create mode 100644 ng-verifier/src/request_processor.rs create mode 100644 ng-verifier/src/rocksdb_user_storage.rs create mode 100644 ng-verifier/src/site.rs create mode 100644 ng-verifier/src/types.rs create mode 100644 ng-verifier/src/user_storage/branch.rs create mode 100644 ng-verifier/src/user_storage/mod.rs create mode 100644 ng-verifier/src/user_storage/repo.rs create mode 100644 ng-verifier/src/user_storage/storage.rs create mode 100644 ng-verifier/src/verifier.rs create mode 100644 ng-wallet/.gitignore create mode 100644 ng-wallet/Cargo.toml create mode 100644 ng-wallet/README.md create mode 100644 ng-wallet/src/bip39.rs create mode 100644 ng-wallet/src/emojis.rs create mode 100644 ng-wallet/src/lib.rs create mode 100644 ng-wallet/src/permissions.rs create mode 100644 ng-wallet/src/types.rs create mode 100644 ng-wallet/tests/generated_security_image.jpg.compare create mode 100644 ng-wallet/tests/valid_security_image.jpg create mode 100644 package.json create mode 100644 pnpm-workspace.yaml create mode 100644 sdk/ng-sdk-js/.gitignore create mode 100644 sdk/ng-sdk-js/Cargo.toml create mode 100644 sdk/ng-sdk-js/DEV.md create mode 100644 sdk/ng-sdk-js/LICENSE-APACHE2 create mode 100644 sdk/ng-sdk-js/LICENSE-MIT create mode 100644 sdk/ng-sdk-js/README.md create mode 100644 sdk/ng-sdk-js/index.html create mode 100644 sdk/ng-sdk-js/js/bowser.js create mode 100644 sdk/ng-sdk-js/js/browser.js create mode 100644 sdk/ng-sdk-js/js/node.js create mode 100644 sdk/ng-sdk-js/prepare-node.js create mode 100644 sdk/ng-sdk-js/prepare-web.js create mode 100644 sdk/ng-sdk-js/src/lib.rs create mode 100644 sdk/ng-sdk-js/src/model.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f1e389f --- /dev/null +++ b/.gitignore @@ -0,0 +1,17 @@ +*~ +*.tar.gz +.ng +.direnv +!.github +\#* +/target +/result* +.DS_Store +node_modules +*/tests/*.ng +*/tests/*.ngw +*/tests/*.pazzle +*/tests/*.mnemonic +*/ng-example/* +.vscode/settings.json +.env.local diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 0000000..9d15638 --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,14 @@ +{ + "plugins": ["prettier-plugin-svelte"], + "overrides": [ + { + "files": "*", + "excludeFiles": ["*.svelte", "*.html", "*.json"], + "options": { + "tabWidth": 4 + } + }, + { "files": "*.svelte", "options": { "parser": "svelte" } } + ], + "trailingComma": "es5" +} diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..5462514 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,199 @@ +# Changelog + +Access the sub-sections directly : + +[App](#app) - [SDK](#sdk) - [Broker](#broker) - [CLI](#cli) + +## App + +### App [0.1.1-alpha] - 2024-09-02 + +#### Added + +- edit title and intro + +#### Fixed + +- bug doc not saved when back navigation + +### App [0.1.0-preview.8] - 2024-08-21 + +#### Added + +- signature tool: signs HEADS or a snapshot + +#### Fixed + +- bug in synchronization of stores content (container) on tauri native apps +- removed dark theme (that wasn't implemented properly) +- on web-app, detects jshelter and ask user to deactivate it + +### App [0.1.0-preview.7] - 2024-08-15 + +#### Added + +- Wallet Creation : Download Recovery PDF +- Wallet Creation : Download wallet file +- Wallet Login : with pazzle +- Wallet Login : correct errors while entering pazzle +- Wallet Login : with mnemonic +- Wallet Login : in-memory session (save nothing locally) +- Wallet Import : from file +- Wallet Import : from QR code +- Wallet Import : from TextCode +- User Panel : Online / Offline status +- User Panel : Toggle Personal Connection +- User Panel : Logout +- User Panel / Wallet : Export by scanning QRCode +- User Panel / Wallet : Export by generating QRCode +- User Panel / Wallet : Export by generating TextCode +- User Panel / Wallet : Download file +- User Panel / Accounts Info : basic info (not accurate) +- Document Menu : switch Viewer / Editor +- Document Menu : switch Graph / Document +- Document Menu : Live editing +- Document Menu : Upload binary file + Attachements and Files pane +- Document Menu : History pane +- Add Document : Save in current Store +- Document class: Source Code: Rust, JS, TS, Svelte, React +- Document class: Data : Graph, Container, JSON, Array, Object +- Document class: Post (rich text) +- Document class: Markdown (rich text) +- Document class: Plain Text +- A11Y : limited ARIA and tabulation navigation on all pages. not tested with screen-reader. +- I18N : english +- I18N : german (partial) +- Native app: macOS +- Native app: android +- Native app: linux and Ubuntu +- Native app: Windows + +## SDK + +### SDK [0.1.1-alpha.7] - 2025-04-03 + +#### Changed + +- js : doc_create : parameters are session_id, crdt, class_name, destination, store_repo (defaults to Private Store) +- nodejs & python : doc_create : parameters are session_id, crdt, class_name, destination, store_type (string), store_repo (string) if 2 last params omitted, defaults to Private Store. +- all : sparql_update : returns list of Nuri of new commits, in the form `did:ng:o:c` + +#### Added + +- python : wallet_open_with_mnemonic_words +- python : disconnect_and_close +- python : doc_create +- python : doc_sparql_update +- js & nodejs : fetch_header +- js & nodejs : update_header +- js & nodejs : signature_status +- js & nodejs : signed_snapshot_request +- js & nodejs : signature_request +- rust : app_request: Fetch : SignatureStatus , SignatureRequest SignedSnapshotRequest + +### SDK [0.1.0-preview.6] - 2024-08-15 + +#### Added + +- js : session_start +- js : session_start_remote +- js : session_stop +- js : user_connect +- js : user_disconnect +- js : discrete_update +- js : sparql_update +- js : sparql_query (returns SPARQL Query Results JSON Format, a list of turtle triples, or a boolean ) +- js : branch_history +- js : app_request_stream (fetch and subscribe) +- js : app_request +- js : doc_create +- js : file_get +- js : upload_start +- js : upload_done +- js : upload_chunk +- nodejs : init_headless +- nodejs : session_headless_start +- nodejs : session_headless_stop +- nodejs : sparql_query (returns SPARQL Query Results JSON Format, RDF-JS data model, or a boolean) +- nodejs : discrete_update +- nodejs : sparql_update +- nodejs : rdf_dump +- nodejs : admin_create_user +- nodejs : doc_create +- nodejs : file_get +- nodejs : file_put +- rust : session_start +- rust : session_stop +- rust : app_request_stream, gives access to: + - fetch and subscribe + - file_get +- rust : app_request, gives access to: + - create_doc + - sparql_query + - sparql_update + - discrete_update + - rdf_dump + - history + - file_put + +## Broker + +### Broker [0.1.1-alpha] - 2024-09-02 + +### Broker [0.1.0-preview.8] - 2024-08-21 + +#### Added + +- ExtProtocol : ObjectGet + +### Broker [0.1.0-preview.7] - 2024-08-15 + +#### Added + +- listen on localhost +- listen on domain +- listen on private LAN +- listen on public IP +- invite-admin +- broker service provider : add invitation for user +- serve web app +- ExtProtocol : WalletGetExport +- ClientProtocol : BlocksExist +- ClientProtocol : BlocksGet +- ClientProtocol : BlocksPut +- ClientProtocol : CommitGet +- ClientProtocol : Event +- ClientProtocol : PinRepo +- ClientProtocol : RepoPinStatus +- ClientProtocol : TopicSub +- ClientProtocol : TopicSyncReq +- ClientProtocol : WalletPutExport +- AppProtocol : AppRequest +- AppProtocol : AppSessionStart +- AppProtocol : AppSessionStop +- AdminProtocol : AddInvitation +- AdminProtocol : AddUser +- AdminProtocol : CreateUser +- AdminProtocol : DelUser +- AdminProtocol : ListInvitations +- AdminProtocol : ListUsers + +## CLI + +### CLI [0.1.1-alpha] - 2024-09-02 + +### CLI [0.1.0-preview.8] - 2024-08-21 + +#### Added + +- get : download binary files, snapshots, and head commits, and verify signature + +### CLI [0.1.0-preview.7] - 2024-08-15 + +#### Added + +- gen-key +- admin : add/remove admin user +- admin : add invitation +- admin : list users +- admin : list invitations diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..88c5f71 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,4918 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", + "heapless", +] + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] + +[[package]] +name = "aes-gcm-siv" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae0784134ba9375416d469ec31e7c5f9fa94405049cf08c5ce5b4698be673e0d" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "polyval", + "subtle", + "zeroize", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "anstyle" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" + +[[package]] +name = "anyhow" +version = "1.0.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" + +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + +[[package]] +name = "argon2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" +dependencies = [ + "base64ct", + "blake2", + "cpufeatures", + "password-hash", +] + +[[package]] +name = "arrayref" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "async-attributes" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3203e79f4dd9bdda415ed03cf14dae5a2bf775c683a00f94e9cd1faf0f596e5" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497c00e0fd83a72a79a39fcbd8e3e2f055d6f6c7e025f3b3d91f4f8e76527fb8" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "pin-project-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +dependencies = [ + "async-channel 2.5.0", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-io" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" +dependencies = [ + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "windows-sys 0.61.0", +] + +[[package]] +name = "async-lock" +version = "3.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" +dependencies = [ + "event-listener 5.4.1", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-native-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9343dc5acf07e79ff82d0c37899f079db3534d99f189a1837c8e549c99405bec" +dependencies = [ + "futures-util", + "native-tls", + "thiserror 1.0.69", + "url", +] + +[[package]] +name = "async-once-cell" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288f83726785267c6f2ef073a3d83dc3f9b81464e9f99898240cced85fce35a" + +[[package]] +name = "async-process" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75" +dependencies = [ + "async-channel 2.5.0", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener 5.4.1", + "futures-lite", + "rustix", +] + +[[package]] +name = "async-recursion" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "async-signal" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.61.0", +] + +[[package]] +name = "async-std" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c8e079a4ab67ae52b7403632e4618815d6db36d2a010cfe41b02c1b1578f93b" +dependencies = [ + "async-attributes", + "async-channel 1.9.0", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "async_io_stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" +dependencies = [ + "futures", + "pharos", + "rustc_version", +] + +[[package]] +name = "atomic-polyfill" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cf2bce30dfe09ef0bfaef228b9d414faaf7e563035494d7fe092dba54b300f4" +dependencies = [ + "critical-section", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "atomic_refcell" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41e67cd8309bbd06cd603a9e693a784ac2e5d1e955f11286e355089fcab3047c" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "automerge" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f0dae93622d3c6850d196503480004576249e0e391bddb3f54600974d92a790" +dependencies = [ + "cfg-if", + "flate2", + "fxhash", + "hex", + "im", + "itertools 0.13.0", + "leb128", + "serde", + "sha2 0.10.9", + "smol_str", + "thiserror 1.0.69", + "tinyvec", + "tracing", + "unicode-segmentation", + "uuid", +] + +[[package]] +name = "backtrace" +version = "0.3.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide 0.8.9", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64-url" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb9fb9fb058cc3063b5fc88d9a21eefa2735871498a04e1650da76ed511c8569" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "base64ct" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" + +[[package]] +name = "bindgen" +version = "0.65.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5" +dependencies = [ + "bitflags 1.3.2", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "peeking_take_while", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.106", +] + +[[package]] +name = "bit_field" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e4b40c7323adcfc0a41c4b88143ed58346ff65a288fc144329c5c45e05d70c6" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" + +[[package]] +name = "bitmaps" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" +dependencies = [ + "typenum", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "blake3" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3888aaa89e4b2a40fca9848e400f6a658a5a3978de7be858e209cafa8be9a4a0" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", +] + +[[package]] +name = "blob-uuid" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc15853171b33280f5614e77f5fa4debd33f51a86c44daa4ba3d759674c561" +dependencies = [ + "base64 0.13.1", + "uuid", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blocking" +version = "1.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" +dependencies = [ + "async-channel 2.5.0", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + +[[package]] +name = "bstr" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] +name = "bytemuck" +version = "1.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" + +[[package]] +name = "bzip2-sys" +version = "0.1.13+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" +dependencies = [ + "cc", + "pkg-config", +] + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "cc" +version = "1.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65193589c6404eb80b450d618eaf9a2cafaaafd57ecce47370519ef674a7bd44" +dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" + +[[package]] +name = "chacha20" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "chacha20poly1305" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35" +dependencies = [ + "aead", + "chacha20", + "cipher", + "poly1305", + "zeroize", +] + +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", + "zeroize", +] + +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "4.5.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eac00902d9d136acd712710d71823fb8ac8004ca445a89e73a41d45aa712931" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ad9bbf750e73b5884fb8a211a9424a1906c1e156724260fdae972f31d70e1d6" +dependencies = [ + "anstyle", + "clap_lex", +] + +[[package]] +name = "clap_lex" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" + +[[package]] +name = "codspeed" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93f4cce9c27c49c4f101fffeebb1826f41a9df2e7498b7cd4d95c0658b796c6c" +dependencies = [ + "colored", + "libc", + "serde", + "serde_json", + "uuid", +] + +[[package]] +name = "codspeed-criterion-compat" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c23d880a28a2aab52d38ca8481dd7a3187157d0a952196b6db1db3c8499725" +dependencies = [ + "codspeed", + "codspeed-criterion-compat-walltime", + "colored", +] + +[[package]] +name = "codspeed-criterion-compat-walltime" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0a2f7365e347f4f22a67e9ea689bf7bc89900a354e22e26cf8a531a42c8fbb" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "codspeed", + "criterion-plot", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "color_quant" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" + +[[package]] +name = "colored" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" +dependencies = [ + "lazy_static", + "windows-sys 0.59.0", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + +[[package]] +name = "critical-section" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "typenum", +] + +[[package]] +name = "crypto_box" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd26c32de5307fd08aac445a75c43472b14559d5dccdfba8022dbcd075838ebc" +dependencies = [ + "aead", + "blake2", + "chacha20", + "chacha20poly1305", + "salsa20", + "x25519-dalek 1.1.1", + "xsalsa20poly1305", + "zeroize", +] + +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + +[[package]] +name = "current_platform" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a74858bcfe44b22016cb49337d7b6f04618c58e5dbfdef61b06b8c434324a0bc" + +[[package]] +name = "curve25519-dalek" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" +dependencies = [ + "byteorder", + "digest 0.9.0", + "rand_core 0.5.1", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "fiat-crypto", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "data-encoding" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" + +[[package]] +name = "data-url" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be1e0bca6c3637f992fc1cc7cbc52a78c1ef6db076dbf1059c4323d6a2048376" + +[[package]] +name = "debug_print" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f215f9b7224f49fb73256115331f677d868b34d18b65dbe4db392e6021eea90" + +[[package]] +name = "deranged" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer 0.10.4", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "dlopen2" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09b4f5f101177ff01b8ec4ecc81eead416a8aa42819a2869311b3420fa114ffa" +dependencies = [ + "libc", + "once_cell", + "winapi", +] + +[[package]] +name = "ed25519" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +dependencies = [ + "signature", +] + +[[package]] +name = "ed25519-dalek" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" +dependencies = [ + "curve25519-dalek 3.2.0", + "ed25519", + "rand 0.7.3", + "serde", + "sha2 0.9.9", + "zeroize", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.0", +] + +[[package]] +name = "euclid" +version = "0.22.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad9cdb4b747e485a12abb0e6566612956c7a1bafa3bdb8d682c5b6d403589e48" +dependencies = [ + "num-traits", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener 5.4.1", + "pin-project-lite", +] + +[[package]] +name = "exr" +version = "1.73.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83197f59927b46c04a183a619b7c29df34e63e63c7869320862268c0ef687e0" +dependencies = [ + "bit_field", + "half", + "lebe", + "miniz_oxide 0.8.9", + "rayon-core", + "smallvec", + "zune-inflate", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +dependencies = [ + "getrandom 0.2.16", +] + +[[package]] +name = "fastrange-rs" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e90a1392cd6ec5ebe42ccaf251f2b7ba6be654c377f05c913f3898bfb2172512" + +[[package]] +name = "fdeflate" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "ff" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4b967a3ee6ae993f0094174257d404a5818f58be79d67a1aea1ec8996d28906" +dependencies = [ + "byteorder", + "ff_derive", + "rand_core 0.5.1", +] + +[[package]] +name = "ff_derive" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3776aaf60a45037a9c3cabdd8542b38693acaa3e241ff957181b72579d29feb" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + +[[package]] +name = "find-msvc-tools" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fd99930f64d146689264c637b5af2f0233a933bef0d8570e2526bf9e083192d" + +[[package]] +name = "flate2" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" +dependencies = [ + "crc32fast", + "miniz_oxide 0.8.9", +] + +[[package]] +name = "float-cmp" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-lite" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasi 0.14.5+wasi-0.2.4", + "wasm-bindgen", +] + +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval", +] + +[[package]] +name = "gif" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ae047235e33e2829703574b54fdec96bfbad892062d97fed2f76022287de61b" +dependencies = [ + "color_quant", + "weezl", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + +[[package]] +name = "globset" +version = "0.4.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5" +dependencies = [ + "aho-corasick", + "bstr", + "log", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "gloo-timers" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "group" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f15be54742789e36f03307c8fdf0621201e1345e94f1387282024178b5e9ec8c" +dependencies = [ + "ff", + "rand 0.7.3", + "rand_xorshift", +] + +[[package]] +name = "h2" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9" +dependencies = [ + "cfg-if", + "crunchy", +] + +[[package]] +name = "hash32" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" +dependencies = [ + "byteorder", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" + +[[package]] +name = "heapless" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f" +dependencies = [ + "atomic-polyfill", + "hash32", + "rustc_version", + "spin", + "stable_deref_trait", +] + +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hex_fmt" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b07f60793ff0a4d9cef0f18e63b5357e06209987153a64648c972c1e5aff336f" + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.10", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "im" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" +dependencies = [ + "bitmaps", + "rand_core 0.6.4", + "rand_xoshiro", + "sized-chunks", + "typenum", + "version_check", +] + +[[package]] +name = "image" +version = "0.24.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d" +dependencies = [ + "bytemuck", + "byteorder", + "color_quant", + "exr", + "gif", + "jpeg-decoder", + "num-traits", + "png", + "qoi", + "tiff", +] + +[[package]] +name = "imagesize" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "029d73f573d8e8d63e6d5020011d3255b28c3ba85d6cf870a07184ed23de9284" + +[[package]] +name = "indexmap" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "206a8042aec68fa4a62e8d3f7aa4ceb508177d9324faf261e1959e495b7a1921" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "generic-array", +] + +[[package]] +name = "io-uring" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b" +dependencies = [ + "bitflags 2.9.4", + "cfg-if", + "libc", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "is-terminal" +version = "0.4.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.3", + "libc", +] + +[[package]] +name = "jpeg-decoder" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00810f1d8b74be64b13dbf3db89ac67740615d6c891f0e7b6179326533011a07" +dependencies = [ + "rayon", +] + +[[package]] +name = "js-sys" +version = "0.3.78" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0b063578492ceec17683ef2f8c5e89121fbd0b172cbc280635ab7567db2738" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "json-event-parser" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73267b6bffa5356bd46cfa89386673e9a7f62f4eb3adcb45b1bd031892357853" + +[[package]] +name = "kurbo" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c62026ae44756f8a599ba21140f350303d4f08dcdcc71b5ad9c9bb8128c13c62" +dependencies = [ + "arrayvec", + "euclid", + "smallvec", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "leb128" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" + +[[package]] +name = "lebe" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a79a3332a6609480d7d0c9eab957bca6b455b91bb84e66d19f5ff66294b85b8" + +[[package]] +name = "libc" +version = "0.2.175" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" + +[[package]] +name = "libloading" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" +dependencies = [ + "cfg-if", + "windows-targets 0.53.3", +] + +[[package]] +name = "libredox" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" +dependencies = [ + "bitflags 2.9.4", + "libc", + "redox_syscall", +] + +[[package]] +name = "libz-sys" +version = "1.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b70e7a7df205e92a1a4cd9aaae7898dac0aa555503cc0a649494d0d60e7651d" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" + +[[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" + +[[package]] +name = "lock_api" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" +dependencies = [ + "value-bag", +] + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest 0.10.7", +] + +[[package]] +name = "memalloc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df39d232f5c40b0891c10216992c2f250c054105cb1e56f0fc9032db6203ecc1" + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minicov" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f27fe9f1cc3c22e1687f9446c2083c4c5fc7f0bcf1c7a86bdbded14985895b4b" +dependencies = [ + "cc", + "walkdir", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +dependencies = [ + "adler", +] + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", +] + +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "netdev" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea7fff9396e7c96a7f1ce778e91a00f0afb22337a6150ab50a22c15e0d902621" +dependencies = [ + "dlopen2", + "libc", + "memalloc", + "netlink-packet-core", + "netlink-packet-route", + "netlink-sys", + "once_cell", + "system-configuration 0.6.1", + "windows", +] + +[[package]] +name = "netlink-packet-core" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72724faf704479d67b388da142b186f916188505e7e0b26719019c525882eda4" +dependencies = [ + "anyhow", + "byteorder", + "netlink-packet-utils", +] + +[[package]] +name = "netlink-packet-route" +version = "0.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053998cea5a306971f88580d0829e90f270f940befd7cf928da179d4187a5a66" +dependencies = [ + "anyhow", + "bitflags 1.3.2", + "byteorder", + "libc", + "netlink-packet-core", + "netlink-packet-utils", +] + +[[package]] +name = "netlink-packet-utils" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ede8a08c71ad5a95cdd0e4e52facd37190977039a4704eb82a283f713747d34" +dependencies = [ + "anyhow", + "byteorder", + "paste", + "thiserror 1.0.69", +] + +[[package]] +name = "netlink-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16c903aa70590cb93691bf97a767c8d1d6122d2cc9070433deb3bbf36ce8bd23" +dependencies = [ + "bytes", + "libc", + "log", +] + +[[package]] +name = "nextgraph" +version = "0.1.2" +dependencies = [ + "async-once-cell", + "async-std", + "async-trait", + "base64-url", + "futures", + "lazy_static", + "ng-client-ws", + "ng-net", + "ng-repo", + "ng-storage-rocksdb", + "ng-verifier", + "ng-wallet", + "once_cell", + "pdf-writer", + "qrcode", + "serde_bare", + "serde_bytes", + "serde_json", + "svg2pdf", + "web-time", + "whoami", + "zeroize", +] + +[[package]] +name = "ng-async-tungstenite" +version = "0.22.2" +source = "git+https://git.nextgraph.org/NextGraph/async-tungstenite.git?branch=nextgraph#84fcc9e457f7686afcec0e6068819f482c0afa8c" +dependencies = [ + "async-native-tls", + "async-std", + "futures-io", + "futures-util", + "log", + "ng-tungstenite", + "pin-project-lite", +] + +[[package]] +name = "ng-broker" +version = "0.1.2" +dependencies = [ + "async-std", + "async-trait", + "blake3", + "either", + "futures", + "getrandom 0.3.3", + "netdev", + "ng-async-tungstenite", + "ng-client-ws", + "ng-net", + "ng-repo", + "ng-storage-rocksdb", + "ng-verifier", + "once_cell", + "rust-embed", + "serde", + "serde_bare", + "serde_json", + "tempfile", + "urlencoding", +] + +[[package]] +name = "ng-client-ws" +version = "0.1.2" +dependencies = [ + "async-std", + "async-trait", + "either", + "futures", + "getrandom 0.3.3", + "ng-async-tungstenite", + "ng-net", + "ng-repo", + "pharos", + "serde_bare", + "wasm-bindgen", + "wasm-bindgen-test", + "ws_stream_wasm", +] + +[[package]] +name = "ng-net" +version = "0.1.2" +dependencies = [ + "async-recursion", + "async-std", + "async-trait", + "base64-url", + "crypto_box", + "ed25519-dalek", + "either", + "futures", + "getrandom 0.3.3", + "lazy_static", + "netdev", + "ng-async-tungstenite", + "ng-repo", + "noise-protocol", + "noise-rust-crypto", + "once_cell", + "regex", + "reqwest", + "serde", + "serde_bare", + "serde_bytes", + "serde_json", + "time", + "unique_id", + "url", + "web-time", + "zeroize", +] + +[[package]] +name = "ng-oxigraph" +version = "0.4.0-alpha.8-ngalpha" +dependencies = [ + "base64-url", + "codspeed-criterion-compat", + "digest 0.10.7", + "getrandom 0.3.3", + "hex", + "js-sys", + "json-event-parser", + "lazy_static", + "libc", + "md-5", + "memchr", + "ng-repo", + "ng-rocksdb", + "oxilangtag", + "oxiri", + "peg", + "quick-xml 0.31.0", + "rand 0.8.5", + "regex", + "serde", + "sha1", + "sha2 0.10.9", + "siphasher", + "thiserror 1.0.69", + "zstd", +] + +[[package]] +name = "ng-repo" +version = "0.1.2" +dependencies = [ + "base64-url", + "blake3", + "chacha20", + "crypto_box", + "current_platform", + "curve25519-dalek 3.2.0", + "debug_print", + "ed25519-dalek", + "futures", + "getrandom 0.3.3", + "gloo-timers", + "lazy_static", + "log", + "ng_threshold_crypto", + "num_enum", + "once_cell", + "os_info", + "rand 0.7.3", + "sbbf-rs-safe", + "serde", + "serde_bare", + "serde_bytes", + "serde_json", + "slice_as_array", + "time", + "wasm-bindgen", + "web-time", + "zeroize", +] + +[[package]] +name = "ng-rocksdb" +version = "0.21.0-ngpreview.7" +source = "git+https://git.nextgraph.org/NextGraph/rust-rocksdb.git?branch=master#0861351ab34188cb2ed71ee865f463d80019b165" +dependencies = [ + "bindgen", + "bzip2-sys", + "cc", + "glob", + "libc", + "libz-sys", + "openssl", + "pkg-config", +] + +[[package]] +name = "ng-sdk-js" +version = "0.1.2" +dependencies = [ + "async-std", + "futures", + "getrandom 0.3.3", + "gloo-timers", + "js-sys", + "nextgraph", + "ng-client-ws", + "ng-net", + "ng-repo", + "ng-wallet", + "once_cell", + "oxrdf", + "rand 0.7.3", + "serde", + "serde-wasm-bindgen", + "serde_bare", + "serde_bytes", + "sys-locale", + "uuid", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-bindgen-test", +] + +[[package]] +name = "ng-storage-rocksdb" +version = "0.1.2" +dependencies = [ + "ng-repo", + "ng-rocksdb", + "serde_bare", +] + +[[package]] +name = "ng-tungstenite" +version = "0.19.0" +source = "git+https://git.nextgraph.org/NextGraph/tungstenite-rs.git?branch=nextgraph#3e9aa12cf32313b944fee1ef4128591f4346013a" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "native-tls", + "rand 0.8.5", + "sha1", + "thiserror 1.0.69", + "url", + "utf-8", +] + +[[package]] +name = "ng-verifier" +version = "0.1.2" +dependencies = [ + "async-std", + "async-trait", + "automerge", + "base64-url", + "either", + "futures", + "getrandom 0.3.3", + "lazy_static", + "ng-net", + "ng-oxigraph", + "ng-repo", + "ng-storage-rocksdb", + "qrcode", + "rand 0.7.3", + "sbbf-rs-safe", + "serde", + "serde_bare", + "serde_bytes", + "serde_json", + "web-time", + "yrs", +] + +[[package]] +name = "ng-wallet" +version = "0.1.2" +dependencies = [ + "aes-gcm-siv", + "argon2", + "async-std", + "base64-url", + "blake3", + "chacha20poly1305", + "crypto_box", + "getrandom 0.3.3", + "image", + "lazy_static", + "ng-net", + "ng-repo", + "ng-verifier", + "rand 0.7.3", + "safe-transmute", + "serde", + "serde-big-array", + "serde_bare", + "serde_bytes", + "web-time", + "zeroize", +] + +[[package]] +name = "ng_threshold_crypto" +version = "0.4.1" +source = "git+https://git.nextgraph.org/NextGraph/threshold_crypto.git?branch=master#5475af23b4fe77f5ad1aba083c9058379065fffa" +dependencies = [ + "byteorder", + "ff", + "group", + "hex_fmt", + "log", + "pairing", + "rand 0.7.3", + "rand_chacha 0.2.2", + "serde", + "thiserror 1.0.69", + "tiny-keccak", + "zeroize", +] + +[[package]] +name = "noise-protocol" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2473d39689a839f5a363aaef7d99f76d5611bf352286682b25a6644fec18b1d3" +dependencies = [ + "arrayvec", +] + +[[package]] +name = "noise-rust-crypto" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c6159f60beb3bbbcdc266bc789bfc6c37fdad7d7ca7152d3e049ef5af633f0" +dependencies = [ + "aes-gcm", + "blake2", + "chacha20poly1305", + "noise-protocol", + "sha2 0.10.9", + "x25519-dalek 2.0.1", + "zeroize", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "num-bigint" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_enum" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "num_threads" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" +dependencies = [ + "libc", +] + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "oorandom" +version = "11.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" + +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + +[[package]] +name = "openssl" +version = "0.10.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" +dependencies = [ + "bitflags 2.9.4", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-src" +version = "300.5.2+3.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d270b79e2926f5150189d475bc7e9d2c69f9c4697b185fa917d5a32b792d21b4" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "os_info" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0e1ac5fde8d43c34139135df8ea9ee9465394b2d8d20f032d38998f64afffc3" +dependencies = [ + "log", + "plist", + "serde", + "windows-sys 0.52.0", +] + +[[package]] +name = "oxilangtag" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23f3f87617a86af77fa3691e6350483e7154c2ead9f1261b75130e21ca0f8acb" +dependencies = [ + "serde", +] + +[[package]] +name = "oxiri" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b4ed3a7192fa19f5f48f99871f2755047fabefd7f222f12a1df1773796a102" + +[[package]] +name = "oxrdf" +version = "0.2.0-alpha.4" +source = "git+https://git.nextgraph.org/NextGraph/oxigraph.git?branch=main#c7f873f904617c201e359196717eb2133d91cef5" +dependencies = [ + "oxilangtag", + "oxiri", + "oxsdatatypes", + "rand 0.8.5", + "serde", + "thiserror 1.0.69", +] + +[[package]] +name = "oxsdatatypes" +version = "0.2.0-alpha.1" +source = "git+https://git.nextgraph.org/NextGraph/oxigraph.git?branch=main#c7f873f904617c201e359196717eb2133d91cef5" +dependencies = [ + "serde", + "thiserror 1.0.69", +] + +[[package]] +name = "pairing" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8290dea210a712682cd65031dc2b34fd132cf2729def3df7ee08f0737ff5ed6" +dependencies = [ + "byteorder", + "ff", + "group", + "rand_core 0.5.1", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "password-hash" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" +dependencies = [ + "base64ct", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "pdf-writer" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af6a7882fda7808481d43c51cadfc3ec934c6af72612a1fe6985ce329a2f0469" +dependencies = [ + "bitflags 2.9.4", + "itoa", + "memchr", + "ryu", +] + +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "peg" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9928cfca101b36ec5163e70049ee5368a8a1c3c6efc9ca9c5f9cc2f816152477" +dependencies = [ + "peg-macros", + "peg-runtime", +] + +[[package]] +name = "peg-macros" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6298ab04c202fa5b5d52ba03269fb7b74550b150323038878fe6c372d8280f71" +dependencies = [ + "peg-runtime", + "proc-macro2", + "quote", +] + +[[package]] +name = "peg-runtime" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "132dca9b868d927b35b5dd728167b2dee150eb1ad686008fc71ccb298b776fca" + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pharos" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" +dependencies = [ + "futures", + "rustc_version", +] + +[[package]] +name = "pico-args" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "piper" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "plist" +version = "1.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3af6b589e163c5a788fab00ce0c0366f6efbb9959c2f9874b224936af7fce7e1" +dependencies = [ + "base64 0.22.1", + "indexmap", + "quick-xml 0.38.3", + "serde", + "time", +] + +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "png" +version = "0.17.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" +dependencies = [ + "bitflags 1.3.2", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide 0.8.9", +] + +[[package]] +name = "polling" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi", + "pin-project-lite", + "rustix", + "windows-sys 0.61.0", +] + +[[package]] +name = "poly1305" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" +dependencies = [ + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "potential_utf" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn 2.0.106", +] + +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit", +] + +[[package]] +name = "proc-macro2" +version = "1.0.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "qoi" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6d64c71eb498fe9eae14ce4ec935c555749aef511cca85b5568910d6e48001" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "qrcode" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d68782463e408eb1e668cf6152704bd856c78c5b6417adaee3203d8f4c1fc9ec" + +[[package]] +name = "quick-xml" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33" +dependencies = [ + "memchr", +] + +[[package]] +name = "quick-xml" +version = "0.38.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" +dependencies = [ + "memchr", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.16", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_xorshift" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77d416b86801d23dde1aa643023b775c3a462efc0ed96443add11546cdf1dca8" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_xoshiro" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" +dependencies = [ + "rand_core 0.6.4", +] + +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redox_syscall" +version = "0.5.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +dependencies = [ + "bitflags 2.9.4", +] + +[[package]] +name = "regex" +version = "1.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration 0.5.1", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + +[[package]] +name = "roxmltree" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c20b6793b5c2fa6553b250154b78d6d0db37e72700ae35fad9387a46f487c97" + +[[package]] +name = "rust-embed" +version = "6.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a36224c3276f8c4ebc8c20f158eca7ca4359c8db89991c4925132aaaf6702661" +dependencies = [ + "rust-embed-impl", + "rust-embed-utils", + "walkdir", +] + +[[package]] +name = "rust-embed-impl" +version = "6.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49b94b81e5b2c284684141a2fb9e2a31be90638caf040bf9afbc5a0416afe1ac" +dependencies = [ + "proc-macro2", + "quote", + "rust-embed-utils", + "syn 2.0.106", + "walkdir", +] + +[[package]] +name = "rust-embed-utils" +version = "7.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d38ff6bf570dc3bb7100fce9f7b60c33fa71d80e88da3f2580df4ff2bdded74" +dependencies = [ + "globset", + "sha2 0.10.9", + "walkdir", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" +dependencies = [ + "bitflags 2.9.4", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.0", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "safe-transmute" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3944826ff8fa8093089aba3acb4ef44b9446a99a16f3bf4e74af3f77d340ab7d" + +[[package]] +name = "salsa20" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" +dependencies = [ + "cipher", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "sbbf-rs" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5525db49c7719816ac719ea8ffd0d0b4586db1a3f5d3e7751593230dacc642fd" +dependencies = [ + "cpufeatures", + "fastrange-rs", +] + +[[package]] +name = "sbbf-rs-safe" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9902ffeb2cff3f8c072c60c7d526ac9560fc9a66fe1dfc3c240eba5e2151ba3c" +dependencies = [ + "sbbf-rs", +] + +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.0", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.9.4", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + +[[package]] +name = "serde" +version = "1.0.222" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aab69e3f5be1836a1fe0aca0b286e5a5b38f262d6c9e8acd2247818751fcc8fb" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde-big-array" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11fc7cc2c76d73e0f27ee52abbd64eec84d46f370c88371120433196934e4b7f" +dependencies = [ + "serde", +] + +[[package]] +name = "serde-wasm-bindgen" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8302e169f0eddcc139c70f139d19d6467353af16f9fce27e8c30158036a1e16b" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + +[[package]] +name = "serde_bare" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51c55386eed0f1ae957b091dc2ca8122f287b60c79c774cbe3d5f2b69fded660" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_bytes" +version = "0.11.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe07b5d88710e3b807c16a06ccbc9dfecd5fff6a4d2745c59e3e26774f10de6a" +dependencies = [ + "serde", + "serde_core", +] + +[[package]] +name = "serde_core" +version = "1.0.222" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f8ebec5eea07db7df9342aa712db2138f019d9ab3454a60a680579a6f841b80" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.222" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5f61630fe26d0ff555e6c37dc445ab2f15871c8a11ace3cf471b3195d3e4f49" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "serde_json" +version = "1.0.145" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", + "serde_core", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" + +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + +[[package]] +name = "simplecss" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9c6883ca9c3c7c90e888de77b7a5c849c779d25d74a1269b0218b14e8b136c" +dependencies = [ + "log", +] + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "sized-chunks" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" +dependencies = [ + "bitmaps", + "typenum", +] + +[[package]] +name = "slab" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" + +[[package]] +name = "slice_as_array" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64c963ee59ddedb5ab95dc2cd97c48b4a292572a52c5636fbbabdb9985bfe4c3" + +[[package]] +name = "smallstr" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "862077b1e764f04c251fe82a2ef562fd78d7cadaeb072ca7c2bcaf7217b1ff3b" +dependencies = [ + "smallvec", +] + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "smol_str" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd538fb6910ac1099850255cf94a94df6551fbdd602454387d0adb2d1ca6dead" +dependencies = [ + "serde", +] + +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socket2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "strict-num" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6637bab7722d379c8b41ba849228d680cc12d0a45ba1fa2b48f2a30577a06731" +dependencies = [ + "float-cmp", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "svg2pdf" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31565956eb1dc398c0d9776ee1d1bac4e34759af63dcbe0520df32313a5b53b" +dependencies = [ + "log", + "miniz_oxide 0.7.4", + "once_cell", + "pdf-writer", + "usvg", +] + +[[package]] +name = "svgtypes" +version = "0.15.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68c7541fff44b35860c1a7a47a7cadf3e4a304c457b58f9870d9706ece028afc" +dependencies = [ + "kurbo", + "siphasher", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "sys-locale" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eab9a99a024a169fe8a903cf9d4a3b3601109bcc13bd9e3c6fff259138626c4" +dependencies = [ + "js-sys", + "libc", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.9.4", + "core-foundation", + "system-configuration-sys 0.6.0", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84fa4d11fadde498443cca10fd3ac23c951f0dc59e080e9f4b93d4df4e4eea53" +dependencies = [ + "fastrand", + "getrandom 0.3.3", + "once_cell", + "rustix", + "windows-sys 0.61.0", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" +dependencies = [ + "thiserror-impl 2.0.16", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "tiff" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba1310fcea54c6a9a4fd1aad794ecc02c31682f6bfbecdf460bf19533eed1e3e" +dependencies = [ + "flate2", + "jpeg-decoder", + "weezl", +] + +[[package]] +name = "time" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +dependencies = [ + "deranged", + "itoa", + "js-sys", + "libc", + "num-conv", + "num_threads", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" + +[[package]] +name = "time-macros" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tiny-skia-path" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c9e7fc0c2e86a30b117d0462aa261b72b7a99b7ebd7deb3a14ceda95c5bdc93" +dependencies = [ + "arrayref", + "bytemuck", + "strict-num", +] + +[[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.47.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +dependencies = [ + "backtrace", + "bytes", + "io-uring", + "libc", + "mio", + "pin-project-lite", + "slab", + "socket2 0.6.0", + "windows-sys 0.59.0", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" + +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "tracing-core" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" + +[[package]] +name = "unicode-ident" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unique_id" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d4f2b94d146d69e6ab70ad2b968aabde3aff26bff8e2f64bd2812f6f4201b4" +dependencies = [ + "blob-uuid", + "uuid", +] + +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + +[[package]] +name = "url" +version = "2.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "usvg" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b84ea542ae85c715f07b082438a4231c3760539d902e11d093847a0b22963032" +dependencies = [ + "base64 0.22.1", + "data-url", + "flate2", + "imagesize", + "kurbo", + "log", + "pico-args", + "roxmltree", + "simplecss", + "siphasher", + "strict-num", + "svgtypes", + "tiny-skia-path", + "xmlwriter", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" +dependencies = [ + "getrandom 0.3.3", + "js-sys", + "serde", + "uuid-rng-internal", + "wasm-bindgen", +] + +[[package]] +name = "uuid-rng-internal" +version = "1.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23426b4394875bbc29a3074f94e1b52cd0eed2c8410c21a6edbfb033daef0aa1" +dependencies = [ + "getrandom 0.3.3", +] + +[[package]] +name = "value-bag" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasi" +version = "0.14.5+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4494f6290a82f5fe584817a676a34b9d6763e8d9d18204009fb31dceca98fd4" +dependencies = [ + "wasip2", +] + +[[package]] +name = "wasip2" +version = "1.0.0+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03fa2761397e5bd52002cd7e73110c71af2109aca4e521a9f40473fe685b0a24" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e14915cadd45b529bb8d1f343c4ed0ac1de926144b746e2710f9cd05df6603b" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "serde", + "serde_json", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e28d1ba982ca7923fd01448d5c30c6864d0a14109560296a162f80f305fb93bb" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.106", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ca85039a9b469b38336411d6d6ced91f3fc87109a2a27b0c197663f5144dffe" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c3d463ae3eff775b0c45df9da45d68837702ac35af998361e2c84e7c5ec1b0d" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bb4ce89b08211f923caf51d527662b75bdc9c9c7aab40f86dcb9fb85ac552aa" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f143854a3b13752c6950862c906306adb27c7e839f7414cec8fea35beab624c1" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-bindgen-test" +version = "0.3.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80cc7f8a4114fdaa0c58383caf973fc126cf004eba25c9dc639bccd3880d55ad" +dependencies = [ + "js-sys", + "minicov", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-bindgen-test-macro", +] + +[[package]] +name = "wasm-bindgen-test-macro" +version = "0.3.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5ada2ab788d46d4bda04c9d567702a79c8ced14f51f221646a16ed39d0e6a5d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "web-sys" +version = "0.3.78" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77e4b637749ff0d92b8fad63aa1f7cff3cbe125fd49c175cd6345e7272638b12" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa30049b1c872b72c89866d458eae9f20380ab280ffd1b1e18df2d3e2d98cfe0" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "weezl" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" + +[[package]] +name = "whoami" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +dependencies = [ + "libredox", + "wasite", + "web-sys", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.54.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9252e5725dbed82865af151df558e754e4a3c2c30818359eb17465f1346a1b49" +dependencies = [ + "windows-core", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-core" +version = "0.54.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12661b9c89351d684a50a8a643ce5f608e20243b9fb84687800163429f161d65" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-link" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65" + +[[package]] +name = "windows-result" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.61.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e201184e40b2ede64bc2ea34968b28e33622acdbbf37104f0e4a33f7abe657aa" +dependencies = [ + "windows-link 0.2.0", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +dependencies = [ + "windows-link 0.1.3", + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] +name = "winnow" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "wit-bindgen" +version = "0.45.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c573471f125075647d03df72e026074b7203790d41351cd6edc96f46bcccd36" + +[[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "ws_stream_wasm" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c173014acad22e83f16403ee360115b38846fe754e735c5d9d3803fe70c6abc" +dependencies = [ + "async_io_stream", + "futures", + "js-sys", + "log", + "pharos", + "rustc_version", + "send_wrapper", + "thiserror 2.0.16", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "x25519-dalek" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a0c105152107e3b96f6a00a65e86ce82d9b125230e1c4302940eca58ff71f4f" +dependencies = [ + "curve25519-dalek 3.2.0", + "rand_core 0.5.1", + "zeroize", +] + +[[package]] +name = "x25519-dalek" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" +dependencies = [ + "curve25519-dalek 4.1.3", + "rand_core 0.6.4", + "zeroize", +] + +[[package]] +name = "xmlwriter" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9" + +[[package]] +name = "xsalsa20poly1305" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02a6dad357567f81cd78ee75f7c61f1b30bb2fe4390be8fb7c69e2ac8dffb6c7" +dependencies = [ + "aead", + "poly1305", + "salsa20", + "subtle", + "zeroize", +] + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", + "synstructure", +] + +[[package]] +name = "yrs" +version = "0.19.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8ca5126331b9a5ef5bb10f3f1c3d01b05f298d348c66f8fb15497d83ee73176" +dependencies = [ + "arc-swap", + "atomic_refcell", + "fastrand", + "serde", + "serde_json", + "smallstr", + "smallvec", + "thiserror 1.0.69", +] + +[[package]] +name = "zerocopy" +version = "0.8.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.16+zstd.1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" +dependencies = [ + "cc", + "pkg-config", +] + +[[package]] +name = "zune-inflate" +version = "0.2.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73ab332fe2f6680068f3582b16a24f90ad7096d5d39b974d1c0aff0125116f02" +dependencies = [ + "simd-adler32", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..f08bda2 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,44 @@ +[workspace] +resolver = "2" +members = [ + "nextgraph", + "ng-repo", + "ng-net", + "ng-broker", + "ng-client-ws", + "ng-verifier", + "ng-wallet", + "ng-storage-rocksdb", + "sdk/ng-sdk-js", + "ng-oxigraph", +] +default-members = [ "nextgraph" ] + +[workspace.package] +version = "0.1.2" +edition = "2021" +rust-version = "1.81.0" +license = "MIT/Apache-2.0" +authors = ["Niko PLP "] +repository = "https://git.nextgraph.org/NextGraph/nextgraph-rs" +homepage = "https://nextgraph.org" +keywords = [ +"crdt","dapp","decentralized","e2ee","local-first","p2p","semantic-web","eventual-consistency","json-ld","markdown", +"ocap","vc","offline-first","p2p-network","collaboration","privacy-protection","rdf","rich-text-editor","self-hosted", +"sparql","byzantine-fault-tolerance", +"web3", "graph-database", "database","triplestore" +] +documentation = "https://docs.nextgraph.org/" + +[profile.release] +lto = true +opt-level = 's' + +[profile.dev] +opt-level = 2 + +[patch.crates-io] +# tauri = { git = "https://github.com/simonhyll/tauri.git", branch="fix/ipc-mixup"} +# tauri = { git = "https://git.nextgraph.org/NextGraph/tauri.git", branch="alpha.11-nextgraph", features = ["no-ipc-custom-protocol"] } + +[workspace.dependencies] diff --git a/DEV.md b/DEV.md new file mode 100644 index 0000000..2047f10 --- /dev/null +++ b/DEV.md @@ -0,0 +1,240 @@ +# Contributors or compilation guide + +- [Install Rust](https://www.rust-lang.org/tools/install) minimum required MSRV 1.81.0 +- [Install Nodejs](https://nodejs.org/en/download/) +- [Install LLVM](https://rust-lang.github.io/rust-bindgen/requirements.html) + +On OpenBSD, for LLVM you need to choose llvm-17. + +Until this [PR](https://github.com/rustwasm/wasm-pack/pull/1271) is accepted, will have to install wasm-pack this way: + +``` +cargo install wasm-pack --git https://git.nextgraph.org/NextGraph/wasm-pack.git --branch master +``` + +On Debian distros +``` +sudo apt install pkg-config gcc build-essential libglib2.0-dev libgtk-3-dev libwebkit2gtk-4.1-dev +``` + +``` +cargo install cargo-watch +cargo install cargo-run-script +// optionally, if you want a Rust REPL: cargo install evcxr_repl +git clone git@git.nextgraph.org:NextGraph/nextgraph-rs.git +// or if you don't have a git account with us: git clone https://git.nextgraph.org/NextGraph/nextgraph-rs.git +cd nextgraph-rs +npm install -g pnpm +cd sdk/ng-sdk-js +cargo run-script app +cd ../.. +cd helpers/wasm-tools +cargo run-script app +cd ../.. +pnpm -C ./ng-app install +pnpm -C ./ng-app webfilebuild +pnpm -C ./helpers/app-auth install +pnpm -C ./helpers/app-auth build +``` + +For building the native apps, see the [ng-app/README](ng-app/README.md) + +### First run + +The current directory will be used to save all the config, keys and storage data. +If you prefer to change the base directory, use the argument `--base [PATH]` when using `ngd` and/or `ngcli`. + +``` +// runs the daemon in one terminal +cargo run -p ngd -- -vv --save-key -l 14400 +``` + +If you are developing also the front-end, you should run it with this command in a separate terminal: + +``` +cd ng-app +pnpm -C ../helpers/net-auth builddev +pnpm -C ../helpers/app-auth builddev +pnpm -C ../helpers/net-bootstrap builddev +pnpm webdev +``` + +In the logs/output of ngd, you will see an invitation link that you should open in your web browser. If there are many links, choose the one that starts with `http://localhost:`, and if you run a local front-end, replace the prefix `http://localhost:14400/` with `http://localhost:1421/` before you open the link in your browser. + +The computer you use to open the link should have direct access to the ngd server on localhost. In most of the cases, it will work, as you are running ngd on localhost. If you are running ngd in a docker container, then you need to give access to the container to the local network of the host by using `docker run --network="host"`. see more here https://docs.docker.com/network/drivers/host/ + +Follow the steps on the screen to create your wallet :) + +Once your ngd server will run in your dev env, replace the string in `nextgraph/src/local_broker_dev_env.rs` with the actual PEER ID of your ngd server that is displayed when you first start `ngd`, with a line starting with `INFO ngd] PeerId of node:`. + +### Using ngcli with the account you just created + +The current directory will be used to save all the config, keys and storage data. +If you prefer to change the base directory, use the argument `--base [PATH]` when using `ngd` and/or `ngcli`. + +`PEER_ID_OF_SERVER` is displayed when you first start `ngd`, with a line starting with `INFO ngd] PeerId of node:`. + +`THE_PRIVATE_KEY_OF_THE_USER_YOU_JUST_CREATED` can be found in the app, after you opened your wallet, click on the logo of NextGraph, and you will see the User Panel. Click on `Accounts` and you will find the User Private Key. + +By example, to list all the admin users : + +``` +cargo run -p ngcli -- --save-key --save-config -s 127.0.0.1,14400, -u admin list-users -a +``` + +### Adding more accounts and wallets + +In your dev env, if you want to create more wallets and accounts, you have 2 options: + +- creating an invitation link from the admin account + +``` +cargo run -p ngcli -- -s 127.0.0.1,14400, -u admin add-invitation --notos +``` + +and then open the link after replacing the port number from `14400` to `1421` (if you are running the front-end in development mode). + +- run a local instance of `ngaccount`. this is useful if you want to test or develop the ngaccount part of the flow.. + +See the [README of ngaccount here](ngaccount/README.md). + +Then you need to stop your ngd and start it again with the additional option : + +``` +--registration-url="http://127.0.0.1:5173/#/create" +``` + +### Packages + +The crates are organized as follow : + +- [nextgraph](nextgraph/README.md) : Client library. Use this crate to embed NextGraph client in your Rust application +- [ngcli](ngcli/README.md) : CLI tool to manipulate the local documents and repos and administrate the server +- [ngd](ngd/README.md) : binary executable of the daemon (that can run a broker, verifier and/or Rust services) +- [ng-app](ng-app/README.md) : all the native apps, based on Tauri, and the official web app. +- [ng-sdk-js](ng-sdk-js/DEV.md) : contains the JS SDK, with example for: web app, react app, or node service. +- [ng-sdk-python](ng-sdk-python/README.md) : contains the Python SDK. +- ng-repo : Repositories common library +- ng-net : Network common library +- ng-oxigraph : Fork of OxiGraph. contains our CRDT of RDF +- ng-verifier : Verifier library, that exposes the document API to the app +- ng-wallet : keeps the secret keys of all identities of the user in a safe wallet +- ng-broker : Core and Server Broker library +- ng-client-ws : Websocket client library +- ng-storage-rocksdb : RocksDB backed stores. see also dependency [repo here](https://git.nextgraph.org/NextGraph/rust-rocksdb) +- helpers : all kind of servers and front end code needed for our infrastructure. + +### Test + +Please test by following this order (as we need to generate some files locally) + +``` +cargo test --package nextgraph -r --lib -- local_broker::test::gen_wallet_for_test --show-output --nocapture +cargo test -r +cargo test --package nextgraph -r --lib -- local_broker::test::import_session_for_test_to_disk --show-output --nocapture --ignored +``` + +Test a single crate: + +``` +cargo test --package ng-repo --lib -- --show-output --nocapture +cargo test --package ng-wallet --lib -- --show-output --nocapture +cargo test --package ng-verifier --lib -- --show-output --nocapture +cargo test --package ng-sdk-js --lib -- --show-output --nocapture +cargo test --package ng-broker --lib -- --show-output --nocapture +cargo test --package ng-client-ws --lib -- --show-output --nocapture +``` + +Test WASM websocket + +First you need to install the `chromedriver` that matches your version of Chrome + +https://googlechromelabs.github.io/chrome-for-testing/ + +then: + +``` +cd ng-sdk-js +wasm-pack test --chrome --headless +``` + +Test Rust websocket + +``` +cargo test --package ng-client-ws --lib -- remote_ws::test::test_ws --show-output --nocapture +``` + +### Build release binaries + +First you will need to have the production build of the frontend. +You need to freshly built it from source, following those instructions: + +``` +cargo install cargo-run-script +npm install -g pnpm +cd ng-sdk-js +cargo run-script app +cd .. +pnpm -C ./ng-app install +pnpm -C ./ng-app webfilebuild +pnpm -C ./helpers/app-auth install +pnpm -C ./helpers/app-auth build +``` + +then build the ngd daemon + +``` +cargo build -r -p ngd +``` + +you can then find the binary `ngd` in `target/release` + +The CLI tool can be obtained with : + +``` +cargo build -r -p ngcli +``` + +you can then use the binary `target/release/ngcli` + +For usage, see the documentation [here](ngd/README.md). + +For building the apps, see this [documentation](ng-app/README.md). + +#### OpenBSD + +On OpenBSD, a conflict between the installed LibreSSL library and the reqwest crate, needs a bit of attention. +Before compiling the daemon for OpenBSD, please comment out lines 41-42 of `ng-net/Cargo.toml`. This will be solved soon by using `resolver = "2"`. + +``` +#[target.'cfg(target_arch = "wasm32")'.dependencies] +#reqwest = { version = "0.11.18", features = ["json","native-tls-vendored"] } +``` + +to use the app on OpenBSD, you need to run the daemon locally. + +``` +ngd -l 14400 --save-key +``` + +then open chrome (previously installed with `doas pkg_add chrome`) + +``` +env ENABLE_WASM=1 chrome --enable-wasm --process-per-site --new-window --app=http://localhost:14400 +``` + +### Generate documentation + +Generate documentation for all packages without their dependencies: + +``` +cargo doc --no-deps +``` + +The generated documentation can be found in `target/doc/nextgraph`. + +### Contributions license + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as below, without any +additional terms or conditions. diff --git a/LICENSE-APACHE2 b/LICENSE-APACHE2 new file mode 100644 index 0000000..7acc4a1 --- /dev/null +++ b/LICENSE-APACHE2 @@ -0,0 +1,176 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, +and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by +the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all +other entities that control, are controlled by, or are under common +control with that entity. For the purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity +exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation +source, and configuration files. + +"Object" form shall mean any form resulting from mechanical +transformation or translation of a Source form, including but +not limited to compiled object code, generated documentation, +and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or +Object form, made available under the License, as indicated by a +copyright notice that is included in or attached to the work +(an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object +form, that is based on (or derived from) the Work and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. For the purposes +of this License, Derivative Works shall not include works that remain +separable from, or merely link (or bind by name) to the interfaces of, +the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including +the original version of the Work and any modifications or additions +to that Work or Derivative Works thereof, that is intentionally +submitted to Licensor for inclusion in the Work by the copyright owner +or by an individual or Legal Entity authorized to submit on behalf of +the copyright owner. For the purposes of this definition, "submitted" +means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, +and issue tracking systems that are managed by, or on behalf of, the +Licensor for the purpose of discussing and improving the Work, but +excluding communication that is conspicuously marked or otherwise +designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity +on behalf of whom a Contribution has been received by Licensor and +subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the +Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except as stated in this section) patent license to make, have made, +use, offer to sell, sell, import, and otherwise transfer the Work, +where such license applies only to those patent claims licensable +by such Contributor that are necessarily infringed by their +Contribution(s) alone or by combination of their Contribution(s) +with the Work to which such Contribution(s) was submitted. If You +institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work +or a Contribution incorporated within the Work constitutes direct +or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate +as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the +Work or Derivative Works thereof in any medium, with or without +modifications, and in Source or Object form, provided that You +meet the following conditions: + +(a) You must give any other recipients of the Work or +Derivative Works a copy of this License; and + +(b) You must cause any modified files to carry prominent notices +stating that You changed the files; and + +(c) You must retain, in the Source form of any Derivative Works +that You distribute, all copyright, patent, trademark, and +attribution notices from the Source form of the Work, +excluding those notices that do not pertain to any part of +the Derivative Works; and + +(d) If the Work includes a "NOTICE" text file as part of its +distribution, then any Derivative Works that You distribute must +include a readable copy of the attribution notices contained +within such NOTICE file, excluding those notices that do not +pertain to any part of the Derivative Works, in at least one +of the following places: within a NOTICE text file distributed +as part of the Derivative Works; within the Source form or +documentation, if provided along with the Derivative Works; or, +within a display generated by the Derivative Works, if and +wherever such third-party notices normally appear. The contents +of the NOTICE file are for informational purposes only and +do not modify the License. You may add Your own attribution +notices within Derivative Works that You distribute, alongside +or as an addendum to the NOTICE text from the Work, provided +that such additional attribution notices cannot be construed +as modifying the License. + +You may add Your own copyright statement to Your modifications and +may provide additional or different license terms and conditions +for use, reproduction, or distribution of Your modifications, or +for any such Derivative Works as a whole, provided Your use, +reproduction, and distribution of the Work otherwise complies with +the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, +any Contribution intentionally submitted for inclusion in the Work +by You to the Licensor shall be under the terms and conditions of +this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify +the terms of any separate license agreement you may have executed +with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade +names, trademarks, service marks, or product names of the Licensor, +except as required for reasonable and customary use in describing the +origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or +agreed to in writing, Licensor provides the Work (and each +Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied, including, without limitation, any warranties or conditions +of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +PARTICULAR PURPOSE. You are solely responsible for determining the +appropriateness of using or redistributing the Work and assume any +risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, +whether in tort (including negligence), contract, or otherwise, +unless required by applicable law (such as deliberate and grossly +negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a +result of this License or out of the use or inability to use the +Work (including but not limited to damages for loss of goodwill, +work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses), even if such Contributor +has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing +the Work or Derivative Works thereof, You may choose to offer, +and charge a fee for, acceptance of support, warranty, indemnity, +or other liability obligations and/or rights consistent with this +License. However, in accepting such obligations, You may act only +on Your own behalf and on Your sole responsibility, not on behalf +of any other Contributor, and only if You agree to indemnify, +defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason +of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 0000000..92fc43f --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..5ade76c --- /dev/null +++ b/README.md @@ -0,0 +1,71 @@ +

+ nextgraph-header +

+ +# nextgraph-rs + +![MSRV][rustc-image] +[![Apache 2.0 Licensed][license-image]][license-link] +[![MIT Licensed][license-image2]][license-link2] +[![project chat](https://img.shields.io/badge/zulip-join_chat-brightgreen.svg)](https://forum.nextgraph.org) +[![Crates.io Version](https://img.shields.io/crates/v/nextgraph)](https://crates.io/crates/nextgraph) +[![docs.rs](https://img.shields.io/docsrs/nextgraph)](https://docs.rs/nextgraph) +[node:![NPM Version node](https://img.shields.io/npm/v/nextgraph)](https://www.npmjs.com/package/nextgraph) +[web:![NPM Version web](https://img.shields.io/npm/v/nextgraphweb)](https://www.npmjs.com/package/nextgraphweb) +[![PyPI - Version](https://img.shields.io/pypi/v/nextgraphpy)](https://pypi.org/project/nextgraphpy/) + +Rust implementation of NextGraph + +This repository is in active development at [https://git.nextgraph.org/NextGraph/nextgraph-rs](https://git.nextgraph.org/NextGraph/nextgraph-rs), a Gitea instance. For bug reports, issues, merge requests, and in order to join the dev team, please visit the link above and create an account (you can do so with a github account). The [github repo](https://github.com/nextgraph-org/nextgraph-rs) is just a read-only mirror that does not accept issues. + +## NextGraph + +> NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## Support + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +[![Mastodon](https://img.shields.io/badge/-MASTODON-%232B90D9?style=for-the-badge&logo=mastodon&logoColor=white)](https://fosstodon.org/@nextgraph) + +## How to use NextGraph App & Platform + +NextGraph is in alpha release! + +You can try it online or by installing the apps. Please follow our [Getting started](https://docs.nextgraph.org/en/getting-started/) guide . + +You can also subscribe to [our newsletter](https://list.nextgraph.org/subscription/form) to get updates, and support us with a [donation](https://nextgraph.org/donate/). + +## NextGraph is also a Framework for App developers + +Read our [getting started guide for developers](https://docs.nextgraph.org/en/framework/getting-started/). + +## For contributors or self compilation + +See our [contributor's guide](DEV.md) + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. + +[rustc-image]: https://img.shields.io/badge/rustc-1.81+-blue.svg +[license-image]: https://img.shields.io/badge/license-Apache2.0-blue.svg +[license-link]: https://git.nextgraph.org/NextGraph/nextgraph-rs/raw/branch/master/LICENSE-APACHE2 +[license-image2]: https://img.shields.io/badge/license-MIT-blue.svg +[license-link2]: https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/LICENSE-MIT diff --git a/RELEASE-NOTE.md b/RELEASE-NOTE.md new file mode 100644 index 0000000..6ec655c --- /dev/null +++ b/RELEASE-NOTE.md @@ -0,0 +1,40 @@ +# Release 0.1.1-alpha + +_02 September 2024_ + +This release is not stable and should not be used for any productive work or to store personal documents. This release is meant as a **preview** of what NextGraph can do as of today and hints at its future potential. + +**Please note: The binary format of the Documents or Wallet might change, that might result in a complete loss of data. We will not provide migration scripts as the APIs and formats aren't stable yet.** + +If you previously installed any NextGraph app on your device, please uninstall it first, by following the normal uninstall procedure specific to your OS. If you have previously created a Wallet, it will not work with this new release. Please create a new one now. + +## App + +Please read the [Getting started](https://docs.nextgraph.org/en/getting-started) guide. + +[changelog](CHANGELOG.md#app-0-1-1-alpha-2024-09-02) + +## SDK + +The SDK for is not documented yet. + +[changelog](CHANGELOG.md#sdk-0-1-0-preview-6-2024-08-15) + +## Broker + +The `ngd` daemon is release with the basic features listed in `ngd --help`. More documentation will come soon + +[changelog](CHANGELOG.md#broker-0-1-1-alpha-2024-09-02) + +## CLI + +The `ngcli` daemon is release with the basic features listed in `ngcli --help`. More documentation will come soon. + +[changelog](CHANGELOG.md#cli-0-1-1-alpha-2024-09-02) + +## Limitations of this release + +- you cannot share documents with other users. Everything is ready for this internally, but there is still some wiring to do that will take some more time. +- the Rich text editors (both for normal Post/Article and in Markdown) do not let you insert images nor links to other documents. +- The webapp has some limitation for now when it is offline, because it doesn't have a UserStorage. it works differently than the native apps, as it has to replay all the commits at every load. This will stay like that for now, as the feature "Web UserStorage" based on IndexedDB will take some time to be coded. +- JSON-LD isn't ready yet as we need the "Context branch" feature in order to enter the list of ontologies each document is based on. diff --git a/nextgraph/.gitignore b/nextgraph/.gitignore new file mode 100644 index 0000000..01f9583 --- /dev/null +++ b/nextgraph/.gitignore @@ -0,0 +1,2 @@ +tests +local_broker_dev_env_peer_id.rs \ No newline at end of file diff --git a/nextgraph/.static/header.png b/nextgraph/.static/header.png new file mode 100644 index 0000000000000000000000000000000000000000..fa6a0f13b13472a21d10d01ba0e002c85c747d0f GIT binary patch literal 139433 zcmd421Cu6Aum<{$ZQHhO+xCt(drn9n<8=-@vy_uzr zDWS86gDIh@yQLWb;J#6lZJCJ66&v#Ni0T7+GxfXO-^?k9>)SgHNm)j1S#R4+g_=sG zV%)(E*c}G4wCU$>-sSgwoFC|ImRm=&zC=y#&%-#0JpcLkhwpP+ zUDsw`o;d#nSzg*Zy8yw^(dBU($A>;i^={wI$E(4wpWgXb0^e?r8ti5i7fnP7~0{osom%x8p#|8ME{B-~N_WgW?jl?~EMm~kL zRPVAKB=voTDSmt2?;`&=?ms=o9^C%)8(ik$pMHMsEZ(-vE>7!e&3{Z&-uhO@_J(25 zr}FmLSAAK1(R=Ug)D6_#lWSmAxuKGXiiu;oN#9xZ zwE8`qgIZQ?us@nA&Ru~rfj5oRnSC(p1$EnbsAnHGlcKO!!gOc0AM#`H<@j?$@RMb5 zvr|I`tt~4|zz-BVscR>au$b2!=^y+VqvHYdb-2p$^_@0MtQKMDF}3B zwaB_bLNDvqa=(W?RkNrUmK`ZP(aLJrtXQhvk^V?wDhD@{EJatkq9o;JlCr5Z)4HOi zl{!9)u4PNQs-|T}Jd5vnOQx>pc}F6TBo-7rRY?NJCz{FJISEE?$NJ%8LPK5G@>oq> z&-R&ZWvh60rt7M6^TzAS<3I``o%6YQah7v2kR^Vw5v^*adb8eRq9x0&_jqN~uJ3Rr zPw7+qS)uPcsyxJg=^!duf7Mq!rml0vw_6M`8+9Z)d;~Y#T?JI~tUUc4-!V2a``3v? zF{ShodDO2%dBtH!qgKQzI056=+3~*;znw~cBz{NvVQ`hr{q+-_LFyD3yJjeK9d`N@ zyHW|!R>>>p*Jm+ybBbe=&$Y$GY^Gk#P=(Zh=OvIqvA)|c7-pU@{11=)O;xI7p=fjP@eEs= zT{LXBOJWTiD1|Auhm?gQ*9r3;z7sx03!B|ea6yACZ^EqJg3;(PMY)TNmQ%6M2Hd(? zrTF!gT3qGyfU_GK%WMx8HR~`|r z<7(j2vUAaDnI19yc>QcjypAz^{T(}7H8N5+nJDM4KLAFnjYYLVDlBpdzjSmuwJA2` zo7~Z4;WG77VrELj!OCARmd3A0GE-e{IZ2AL1j693+hLL*h7oeh5@R~AxIzn0&e(Cp zhyrpTa@^#`sNELqWNW1O%EuFI&S zeC*NAAN5{R2#2a|_8hfmRHH7(+2~ov!}~?l5fr21Za$Lm>&7iMSD2^hMopP^@gCO* z;41DDp~C!GEL^#d^J&sk9NV94`ebm3jN`E z*{VX456%F9Npr@6Prql|9{L+n^!BG`Ko+sj74blo(qE=j?*NxkhI?xwbjHfSU%0-L z@;M#v;25|*Lrup*8ed}W>U6KOXIEV-)c1n_Rq?p#{>eNDUw!f=#I%%qBKQ0QEl_Gf z*tKgrXLMt#^X_+u%4)8uQXqiiSZANr?okofOEV@OXI#an_og-j#|oX-IzlJxcqIVV z-QSfQ!O@l3AG$zXP0S%3=iahmMsF9yPF{;ZLQ{rXPcDwBLd&LO*zNaKRbX;pAEM#f{uL$tpI_|-^Wv|UJ_aEtk19w21G zg)wC^!tK+tFMK=5?46)qo{)OT?xp}r(CAeN^*_G;$Q_t+m_a-0KHsW*=W zzNxf`RWCwsxs=HaL7j^H7&J&meNJ;&r z=O81Uxw#)=-07oEc=QR*ibfUrh`=Gl)KBJDknAYLwk27qx08Wr3Kh07YqEYKZt9%A zV5&u5zJh{&pt4Gh@+1r{PSqOjo+U_UIKiXz$f@dk#NzV`|KpOv=XVl?$?jxL;pF-DPU;6+bdOo(>^h> zD^p#yz~QYMQsMJat4f?^Rtl(uYvY!j^)y-c79fNTRwX=K%P%u^RWem`ZuJM9Lr*7FxXk7hp2gj~&@a4yGj%upc&QByI6DS^^|Tr&~CrC1|nOB8`7fe}Jm z!G9m2rcJ>Q?;RkOE{J%XP-a0fq329SeG2ug* z7!I>P45f^focGsSK)#EzHQ8rf&alI1MVS}{>35*u z!!@8F+kxtVo^p5YO>PA-rB_oh+=C+NXc>m_3r?Q*@=yE(M7Y3Tl#;WVxT_WoDvg=H z(DiAz7DY}5nlP;W(nm{7NLSS(5>t}s@=%8>WM`C%Og%VkMOK{jbXX6aG&|1Ofaw5Y z6R~mCQ11dd*vBb$fX)ra!3mx7cOC<0rP?c918OH1NxrSd2|Nmg3Fyjk6thfjFAk)n z+iMP5QFKGZb`c>2awRoO16r$A4df-N+=!HMz%|@j<-Sjh%pD6vM5Ko_6hurELZ-CG ze^)}nLo{Gi7-Pi6m$Vqb@j%{K4Y{A}8YK~l7`d`k#seIOleAk z2!nB0@z8L>$0nS5MXfKaQ!**1N}ZnCyBa}>@DlVw#lLhebBBd!c;LEY6vUyyhvX%8 zQaQu|CCWNpujSm~N)_?&n1wnyW zuOJ!dl3d);o%Za4w-a#LDa=6Nr1unarTc*I3&f$o)WFPWIZ+pe3KAQsj+Y~vGEEiN zM(~uZO}M2ZSw)Z%rr_d-VGG&?J&IfjD;J1w!j+3-#|M~FdL{`iUIlS$A#S*gN|bl% z={aJocd=+$P#Ld6$St8918kY9im@`&f=A!_v%BSXO&sfD1>*}AG~~o+Sx2#paDoZ^ zxt24JtKW4X84+sBRureY*?Iqw?lQQn%W>fzeFou%;eh97YX68G#f(A;9fhO$fM>}K z5+hYfrKJvumzvrQEqJMLwmmY!33NY#Eou%Zk>`L3F;YSC@go*PB5Bj@(KN*I3~jLJ zT2=f&JZ>U5DCmJLrh)Q>O=Y`wSi4y>`(70nqx}f&c3Y#p_(n~gV?I&YZ&#=O*r#8` zxbv3DKgzoX&sE>e6r=flwl1iMvbqs-J5;FhbM!0F_rpP;HA;IN_>$x#NOrXdoQkNZ zQDyS?01m1>O?GN5J62;C$R+`MNVb>D2;(2(H=&BDJ5fe z!(=r?FRV+fgT3@8j1LB9WjIDELUV&{dlJ^>KB(P7f-#QMK#RIk*VhY=powVTg+V1#0FD8U3=IafH|sE75CDhVmq)b#cWvZ`ccb>J?z9GAkey=ttC8K>>&^Y1Gsgoi2qiOAgfV3NNHv_ucP9+aS9+DO zJ0G`FQ_kmNy|5%DWKSb5u;JmDdi;!?@o<57gis|#4W6e=8X+x080^b$aiJT;572&N z!niQC2?n<&Yq&tq2{A;e_fg`XknW`ZPUhUI&9rr|Xv{<3+&B=t1w#*SO`^~; z3)ID67Sg3$HmtFWTFHS9mr^`VPe|j3KZva;le=9-K~NW;WPEdcV7ab7BT0 ztVt0Au;fatZXvQna^_72s_=!U&wzP1(?ruQknX zQV`D_MbyEt2RhA@GWch?@V-1iC&Hv|gDPslMg-Bh1Jvz#?uuV2@{h2um}wCPem-L=QswR!g4P?we?bWZQF9FdJ;Qpziuyq z*#_5sv;#r$ggzqFp5oG9L9s!|GogOtNr(jE@WUvJV*DI$3de2;G%j)YIkB9A;OA}B znlqT!1z=dvV8tHQ=xV~qUZPyB1|FGf_=Z1ntcXcGnOYBB6~^oNxOYj2o=c`Q zf*8H2yz5O^YH+eq#oT0t<~IE`ku3=+XIhTo65$rW&JfAF`B)RH^x}b3^NaOt9Z>eC zut^Wt6ku>*z!mP)<>Z~qJ{eRiG z@m>!Rkm!*niqyO3Vt?euy)NpI_rEMR_=%_ZvRzRA8yFw#2(EH;5|*mQV+2Dj9=TJzi9kja7qkRcOot>O){gj==k#>;tr||ROCGTwv^mzrUgSwU_Z=4 zR86?KK(Zc|woTkef+3x#iCh4Ag5jtD zuatEXFywLpN+>)k3Z~RLqH%<5VlQWr?V=7F=h6Rt7rBsndH%uo0edvR=QA$?nJxH+ zcngz#KsW!2Lxf0`oBD7E0C1w}KolC5-nA;-O51g!^BxtO$AC@~X1< zC$)g~3g2|2uyN(-d zm|m)J7{qZVT|^ax4@l++NjbeK1^v4m}l3>z9kiOH#LnB0z#>PHtDG#_) zUVjechkVgxNY}!kOt#fUAcr!r-8C}W)$i=#E!_e~%*iUJ$fHROSP#J&0t}G|Il6NZ zOkEJfkvC(=LP?A0OythJndYg)Ss%Lnm_}eO+!&QSJ z)N{B#5YgX-KVh)Q@Yo{=jPg%y%knuVa)`*oAnBgcFhNQMV2S`tD$5E1O1fb3q{hs< zxD81i=zwwHD3EnjW2krS{thrb(CcwQbg@(=0@+LQ?82qn#UgF7f#I3 z1VdZ_+J=;t;;>BRRaU)`e-2&1xDxd@J@*tgtwS)L|Fku8A9IDv`$v)4A|c7(EfIeo zb+NAW3WGTPShIBxjzL#9oefCAYT?}o)X0ds1sV%^Eq{V z@g$0no+&~w`a;eL=+bPNf;B)3#`U0qN45M8UyBL<(&b4OlT@U9^W?n4Z&^Yu}(X%du;8A}*^H(7e1kYXI$VzwHZJxk(f(;M6t0GZ91>B$Kt^DoodMXkM zfS}$43>iV5O)Y3}K`y{tR(42Xh!y{mJ!0G3i-0fy^x+Upn;QZc6R%LI_m;@j*nyt{ zcz1a%vEMA2ha#9knew^0L{khc*^1DQ=Mhy!q%rh$7slL;uj%hNA(eC)EsH^0N{tw> zPAmI}owJ;+S8I_xO?zoi;4x^Kr!Va-h$HA3L9v)6-O~vzM6~RSM-%8QW3fppb2b)f z6Vam}DNLln*p_G)R}Pq2LsOv>22waeC3&FQZ6aN*w31fWkSnCzlMr2On$xWMtn;f# z*jicXWMc$#L)|w5H-WHXX0R5+hOIQD@O zzFGw}m*;j2A)1A7QFIZ=kw;N^kee*WrCsVfgU+R3c!?uxHJCv3Q3`~Vo7PY_7w{Po zM4v#f+GTC4oj7FZ7<9>wxuYr#g~$^S54Oc1TxGC1OT3aq)*~>+dofqGXLYyLpv#U+51`J{ z#NT_H_rt#*Bvda&4k0-8o)0D2|9B8*Mv*#=<1Kt-cbHy*r;@$0nl?}H4>!8VJK8R^@+*ks;;GSIUS48`V2OvhjnUlcVUvf~nu5tY zzyU2ndTg1%3BOXEvfGO;=L0kAnBUq=87cy$QuC6Zay)~o^pdLvaxs@9CNlaG_n-$i z3GZ&EOB6?JCrs5jtE5m0I^}o<&0|5urK;4jF4T-_N)^I*>Z{$8Kj%#0kffLS!0hT%A_*07AL(c%LBo=p}Q;@8%=^r z@Z|E@`95zO9%k@0(5Uvgd{k)c0WZd`!o`0?OGs0;12#tWsc=Tj%(94^0C>}!jTHk` zL-l7-57jrxNF^;E)MTHCzql%L!@Q#JTYRZI*m?PT87G1e!^*^CqCaA-CRY2btgWZt zGj3g^6FU=EsW8Us;h^^h$}He1_hN)yZpiWiA|%@TQX@-#Z=Ov@iNd7ttSY3ohhDSX z>JVZi(n5K&ZP|I(j;hy`kIS?hK&^_CNc&qY$Zl3xc;1~<&RF;QOEjAWy^=lqT^6YXMY~DMTz;VIr<; zy+p`hEHz9I-~>A#FX!R6ndCQ>Rxll(@8a2g95d(aT9gyzs$J5sFMU|fhgjAtH0 z%kuSlOn<_DVSapF>W}ne-4#~%z@dIK;{%5|xW2ebHIDeJOUkObl6c`=?#JH+rUA8? zG2=x#WS8vAEl2fZcpH9_g^IOlWp2K8XiWJglkP;qn$sc$y+FR_*iXJIlW998i4L#c z41!3J>_V9)=rk9MqySiP1WaZmNKhe7DUwBRHnY5u)#ypqY$jg4dg1iha4cz{11&*7 z^9zb4TI&MqNA)F2NNKNLn;Z_<(<7*96scICXhr^ita9C>QDy4LFNe zsz|7+W}Ao`CYqon%)Kn)H}YA$Vc>_LY0=9R8XkGw(TX$+b}BCHr&jclk_A@RW$s2b zWvYM(6k%+qvBXz=ppFaWgcUD4`c1nRz+1KAE#2D~uY$r`E`Qa(LJrOr5n!<~Zu1qm zbzjxMNx!JYL4*d52vu;NQO8BV$;KIj8B;lx-tq59|7;4YiE?QaN(ecYI$L|)n8@0V+b{r?-22&`tZY>wE1bk? zcpBQ494t~llvxI@;cqMxs!Rj(m)z-oW zRz6$evxLLd9UURSUk<=_pPSUerb>6$E*gKK9af8Xm;0r}{K?_lqNQ<`+`>W|hCIe< zUS=xvyR~>4WqO%}xE^1msY)w}5K=fAq<{)-SE>7#dt$3^Cp@#DjJ>Sy>DoPaNXeW~ zO7QLN#tJZT0w*?**WZXhZoW)i30^&09BDi9X^rVPGPvdjQ(kP^KY_6EKuD!KGuGrBl|zO|a?NzYWz!(GuJ+}*ueLm_ zG3SZ4Sah=SLl#zd=gBjH;)dwz;ak-J!l)W@XksD{Ie%4~D<>Qa$OxO%sN_@r<{=U! zKi0Q;uTTh9q5RPbc;4Wc@OqJrWot}wPDImCXEZli#Hh&d4Mj3mPG=Wa|4X1iHX%6( z)dZ-$X1THoHIuc{T*Uxg9k2T|XHg__FF%_>@*%(QlTGpf%R*TN2K!M=Nk|kGr`p)N zgx<{Cp`Yr%GbKh{Y2?ubM(WOmgozvOMfOP$OHkC!ya(ac03F>4vZ}DaT5%qP9IN6$ zBxP>A%z`UW`mj{R4DJ&*x_Do`t=KDhs}T$N_VljCUytW+F%R3^?*3jP$wqM|gyv)N zYPPb)i)>0ftfP|A9TyA5fQ{+2oA|yXg+<>6)O3n@8e9zU!~~Z^a@H>8Wl6fxBB1#$ zvTHYDVJacK!jU-7Fsb~|C9q`MU2ZruXW-dF-en&46c>KUvDN0faWsTXZ*({mqafK0 zhNHuH`=F8@Wq^*}C#OTH3cBDSWIvuZbETh(^h+j1s?0v;)`p#gOcwi?k_?_asmKIw=>!I3wLhfGJ(}F{B%Q{?n^^u z5rxL07MVq%|M(hkvN5(tq4?7IiV$p8uyW;=v5~^c3buT#jzpg3Ms>FM*HM6^NzBtt z0ZI}haGZ3FUUIcrF}KzPoY2EFUa_XH1rJeAE^lG9^uJsjj)@eirN$avIf)z<&bKBB zMTgm3JjPU7j<18Vrof3_*|~1y?@N4h9Ar{#zOewOM@H65D5>7Ucqpg{bAQ=Ib+C~1 zURtYVS8*|T4}MJAhy0M;GeVKPgw^a(FJ0yppdGZtjE@0QuTGqAIR}PYvoVStB5cNp z5MeY0U+A+^%e=Rj9q8i$k9VHStFZK5KjQ#s%(_lm(6v1@j;Asy+R{v}Xce4(fZSBF zm7~!L`#b{)PAi`T8gL!wk1eDY=DL)CQ7$U+*3N`5^Q$gt7`Lv0%yDVXY0vs%w_#01 zX#m@)j@zizak2M$i9eK-H&CGzcG0;*1fZ-Q0Y-<#G1gS^xWYVPz&38;%R~ZUjsvUQ)iW*kMvz3mB~nyV zB%xPW8&VuD)JF!GoX%nj0KmMOE$@A-8bpr3#t{p24(tLu%>#?4)T)clxBkn3^{{Xq9Tl+? zOn)(+V^)bg68RdGvua9{{XjEEMq!5N7b73G`BFJ}$~!m5`VoDs`2ah5VHH>umjO1W z;Cs&BSQ(Fcy?RzrCk0W}mb}c>UL6ANJ4r^?X^3d6>^~IXCq4N}s_$PzdZ>Rz zfyKe%CGLOEcW}Em$kwPKnJh}t#7NNE%WuV8=00Fy8Y-4S%{V7(2tX5omm7nCEDS7% z6*rm4FNMAg-iSH-N2fKKGH4*jpD;|{^fW_d(KgBiJB1c zUJv?9HRq~u&%7kAExg|rwWwPXPi%<76bO$&sp?DOwNQpU<(2O*r&9e`F{__b*Vp9b z1l-MT&D$}y!rB(7&EP3CUlG65>hy=eaZoQ^ReeHgARJQ$@Lp4kYV2S_Tc@!g0$FXMY7B1*d;eA zU)%wMsK_HWL>m!@r|jew=5a>W3Ei!ljS^gJj&rSN{_M3DDoHzcx+fg{1F$|H?5ms1pT1-$4On%qz4zJWCFZc@#z9q5wrz&B;HX>-5MZ4W3 zrq7xaHt&~=gqF+EMPWWt0yr5_yhDo9;5HR8t8=>cqWT)GA@yZhKdHI_3mGkp+t0#E zvw>v=`*TOr%XN!eDZg_pcfn@P+lz! z7>TmgcvWsNe~P6EY&oBX6C^9am~(%jcog}M2^2@JisW-wYP{p1Je?p1!IPzSXN=+; zH}c`F>E(C+zlb0M{hNgDu`D*zkKj2KgSb_$=$g{(=d7q^ApX8S?`G**b$`ptbF~(} z3CyxQSNHO76!{ngMar7+ZmwSt^Wo}KtNxfKZRI@+CQ9!Y0V!PduvawAG=;J}EVN1p z0tUM?f40;F?941+Y)k+WLEXP|B2V^y0Rjv+PTTG)*7a4Biwzf*z`a3k|2*R6NaMl- z)F`~##_0wMDEsnzT>5YQ)5Jss(OL|xxH-~!m3$Z2^}^=ZX!9SzJFKg@Ra@dGxYl9y zOJ;r+wp@G!#H7mf6!SEGhlrOCN1k@Lj~7qpd1k>jXJsPkTFDUj1Qs6p zKF?UuDu9^k(H_O%@0co#c1HtFJeX0%Z?Hoki3$X3;hm(gGh-!MKX%OM%L=Y_(p(`3 z8Q!z2jUym&e--A5EIC($qWX0W%RvnaF0X6Kdp8h1B`2Q6nib|W7J9AO5j>FAs`j#XPlsPy0V67scoANb3Y_P{8p#2 zDZ7VYgOc%$S$1?Lm+i{ikjvgs$GLM0+Yb0Bnuy8wHqWuE!S!9dfoevMtN z&F>MN&T&E+N5fN}^QU9Bv2={NWb+`dD)vx{b8{}iit=!Rv`&XzOY$+n5C?z8Df3?Z~Dw4B(68I#C1aU_UR7#X} z@U93C;tRO75Yc`wQKJ?Jm!oOBFm^n_WU`V{(Z>c_4-CYOR0c&g)xy^X621W)oE;UP z5tDs0P&J1?@3j-$-tgV*015Z}VXd;popaulB|G><-bD?rKOe;piX<*+mdZpSCFP83 z@OpRuOh4Mho%iV!yqrA($dlxqf?XuvAzrDN3ecAcuTfDl305f;=Vpt`A z`d2az{?Gf00)GBG@_WjX{z;%5B(uJ%^>uQkp6RQ5cda z7*RO9D^^~Z@L%JnmPNdB8?8(!{ z_Nm=dtFqoygO*W#WAT2meJ@vpbaXLq!zPUji;^-jWF$0MUt78l506Y)fBc0PS#4}>e>*!5 z?+oCZW0>pe@!dZ^j}A?vAs}$z7JwJ~uZ^OB9a0FJV9j5%QJ3Kfb z9zgpKAnh`i7e7^~H;e@s2NUugrPl(oU)Xy1A=DbhDf zHM`l`F+B4PdQKOs72+}2Y$eVS%H;l3!J(nZ_~(>QN^VtM(jFP zU#R&i(2srIJ136F<>!nvj{@Xz7eDwsoZN~9!eEuEwSRnkKJF&14~vuahGua%7%dk_ zy}NrH931-he)Ufc<=pVymaO``?k-PbBV6sy>e$N@lfNLJ&gPJi(7=?`_DcPx<7Q>; z7rmFamIDbINV!<9Iy}9e{-f1Yu2Z+8=LO5m!t(x-m4eId;D3~i%FN6%F*P-0!5q*y zJT8La3$;QQVc)jc2(@NL^hBK#tumXG67HBf|!`!)nPj_4lkBcJWXG)po ziA(U_2SiX&4UbSDqWj)l6fcM?bg|Q&&%1GofbU2sF8AX!?qV>M7_w$EX8ZxwxbLBh zUA|J_-!EY@uJzoR*a`3#^xN9rx_^4g*Xwe9!eIoMQrB=|+QzUxM}IzD0cT}5|=QM6dE z{__chj~H~lMe>98>eYrT)mquS9*JAsyfR6qav(YIIrvVL7nU)v{7U08Z|uTRA? zc|7eFo0(ybokRn?|G+6Jtf|h->;%c@6?tFRH*-SQXYlMuqa{m#r9Mxe`2if?FaQbD%H;MydbFa?#yZ z_uam<(`z+PH$1!&&d!hxOltCAI|f5fm~y{=5HD)mqHGZQ!l z1~83A@Aager#80rYR!I7g$fjxPwTj~FMqG|%~k>s2!v9PC!<^zdl%K_(`Kn$vAIdT z$po?M)tZ#Lx@*E}7Mt~-u_peKe?8XLHg;T@1n`iB&D7EBVm&f){B8!<4GBqD4m0uS ztX1n`-m=m(IWzW`|{<*Hi%uL~{5*bl3y|2DTuab(o zvM`(+9=DUf6$A%2H}WCSXh#~7|Haru#MIz*8?(87huNVl@z@!vjP$x_1Y?)$=j z`k&4h-LKg%+scy%ZP<-jv6ZBaSh3e1HpzYU+ft+EnX;i^D5#keft%i??P>y|l2Deb z)QU9f^rQ2hz-w!3+eW_ec^&-Qc73C!b=XxD4M9yOq`Z0gEVeu0Ze??2gF@jNdc>qO z^!4$*UayQc!U*bpPT?IJooebjIyo1+G#E;KIb-szmhGb4Dago~kN;9t>okR?ugM%8 zA8SqO?O%H0@Cm6^DK?qOebJ~L1weq$RXAZ-(_J0=vB`Crt+Qpwu3)eJ3rf8|FFG$b zn^PlE7+hZL$#3nPvT1z|bjyA1o$YhwMkJwd1fwh@xhV$3{|iN28TD%OmE^RDXn8^w zk|AbE*t?IndsfShU@x!jxPg&|Et}g97i%sD(qIM*KY0@q=Y-W%dOhJWcs!Y90lVRF zBwF2eM7zb{yS{9AsQtMDk)xxddiMd~Z*F(M;?MInG;H~BG#1PGLJLlB?{O1>siPw^ zg?zT**$@H}p4YjfRa9LW#I4+4|4cUPg$|iC$Cl~KmD>HahQmMU{4q~|XfGfCMXrOa zTXiHv#PF~%iH_*k4yV(#CUcIu-UsTmgGg7(*5&oNMm48g&krnITs({AMzZC=s((NI zeRVidASECL=W5R$dL@CMjJWx8e za!~bOD4-H-vC`I{)9t9xwf72`FIQGkS@s7)(9azZw#+Pb&MeJjx2ulD<<~th6?pu^ zvSKQq$1k@%(b(9i>ZqKE&70rnlIMagP4o4`UZY(JRTrmX*3_YE>*lSSZME5$vS6LL zK#P_=X~5! zJX?d8YwYNiKfuT)BI=jnZMIfK9FA3G4lLJp-udl)f2vtnSiG;z2((slvsKXq4H(LS zTCbKD^rmy%wnn{2pD(H{SE|uP6qv22tdAyHu#v&WGnUDTpQ6rc-Gu~RjNq% zFuGK7d!HG0tj)yWu$N1OA*iHKzoAjJwR+}wUNr1ev$3i9=KOE$F~wP_(^zIOc+x3F zQe_L;S`PU8sU~mtZHx8x^##2AxsxkK51w&6g0$(8gxgPDJUe!$jB<;YjiLw=s;CB*=?7y zlE^0+{%6ifiFn=b@+FBOY?Pr@21nO~D2gz%etH3JAWYBk! z8fGRY)Cs1u3#h+qLZ|+LDb9Sl^L!H-AHS5l)!v#f3~+pYUYqN8qlM;nzF>ZoRC;c&si2G^4z%)nKVNXv8WQssE}vG~m&(=U^f>))N^Qr=PxDk};rE`SGnbTWj1@tL4a(HX2Qv9G%!SUl*r_L}Kt*PD*WuI$f-j`~1MH;j&`%>;Ca# zHNTs!f`5F*uFBo4xU5RS%%>Rk4`M|w4T_6(X zzTa^wB`poyKf4-duU=z3^A+N@l+Vx3?zy$S{k|Wz?0mHbRWEDPIuu4X)OAjaX1Q`*guVLGMpkJ1Mz&;7Hc>W^;; zZnqmpi4pCx#rc{|K3eVOaCJIue`V6L;(CX9Vl&x1*P+#)kuQ&@>rX&K8ZAk9Cl;)1 zHfO-Jj&GUXC;Zyl+LrpgGZUt}{9KX^}zHLsQkLK*X|H%h< z#If`q7Zoa{1^ruG%Y^EiK0d;BZ{MzXNk~NCu73O*=M5^>vETLp6-2ZJS64RABVX^6 z^8*9gj~50Fftg=7JRCMT|9K2YC#NQM4cqzdjL6tt_pz~$qX?8r{b}r#Ol|=JgXhuV zdkz~P6n#IvcTaYB5 zZZSNvQ}Qfhaci|sfcttQPp8k9FAN?JIN$5_rrCN`{>B;5+Sw7PxOz= zM*oq8$K?(0s-mIOZlzs(mixRD;m_gAdU|r6+Z*`)t5xrS1Oyb|=3VXM?Ng!F#a>`O z{C4wN}Jw0(~zj_-e z5sE-@@{1G#0Z)`lQd07$&d2NAb(ZfR3!Z|a{|B{jp1M$nDf=wdp^xWkx4(Eo)4J_S zDNR(zcJ2Ry?($E-x0txJ_u=1;%6^MP>_Oy>KEJ*N%T#S)JW2%wH@4%R*GZdqnlwK> z6X<|iy{_`;=;$u`nKBV!RmGG_>AI$2u5#&oXqusOft_qtr4B6JQ;1nsN;Cz~Zr zBll+d-x~2xXGcfJ{XIpKg<@UobI0-&KPfr4^o?P%;^WEmdaEr?49>F1B5R_Gat4_;{Os=$JLPz_3I7qw|!M=jfXMV?6~k>Yja`a z6BAU=jplw2P=UA~&HhoGA0E@=RX$O<|bT@TK#4R$0Cqv z+OuZ4(n>)v{n0G?H(-Iq>EFu2@gmsn{H`bT$9kH>xs!f$7XX3`L% zvh^;vy5tRIi>+d+EZ-kI0RaI(p)tdJgPhI*y&m=aL$5(gr0r%KMk>U}(ZuaSuSi%I z|IB|RIz>Z&pqSSDx$a-Nl%}RdMC$X{i!JZ{k;>80Ng#L%$lr+YUU!Y>qQ!IPr~ikh zuK=p@|DHw=kp4(hbtx-QA6JcT0D7cXxMpcXQvz-#dTD85N!D!~MkB-Lre{ z`9io&*9(+NK*8b5_=?i# zbjo+=_gwHpLUg)xET3yK=)L3Ocimydf+({|adAqZg4Nso3A54~qSo)o?H@)(UG7?G zh`)K8OHWtsv>Q&}2qfFHHaFlzalRg$?EGCUe>*7U3rs#@NZ<=xI!e~@R>r@yUi4P$ z@OV@@)OS|i==4gzeY~y5zk+^yOzi%Y77*||UC!-rN*Jd~gqZjH@IkY{B8wMqu$fz3 z4XM=s#ou6bXX9ce1r#D|rhP&q2FwJg(alNaj>l0|O3VLUYc(2K|M5LM8!U$N;uTLF z(NfI>9hL2oX%T;!*J7recTDnuplT^QJ~mBEPSz=Z$c+A@FtMsFK3n-zr5#XsTGeMF zfm(Dpp-#b@lwYM?A#s{)6+%i#Q(#8mDaU~qWaJyS6Aj;SIaOFxRA#Nkr4em4aoR&A zLx$8fIuI^T^&{smE2XE=IA&tD&&QDl$lI%81SzL8g3x!DjtphG95;`*D{{yd$B={{DU{%C|eOZV{pX-4hHXxdZ`2?~knZvNPt zhuNKBDxz3q>#chYE&+=Lfo?E=Hb+3q*z0lNfnDd#fLjO(X?y!}0+6!M`79*&w#n`< zJ{kJ(UDHHgdBwZvKFl!JG4Y&zgM*_KhmdZ1=Y*cE=4=KwhoX%q`se2GSR&(1^q=>h zB2h7=eX33pPcQYc*}Zx8h9ZT&s9~jgcIcA=1gNEQ3IJ^=8ckMBh^{Qw>l~h5UZlmK zp)G!2@27`>ZT+7@duLTicDv6tXRqf#6&sheSc*-I4ULqU3PYpDyg9E!R??l>C3U>o z#LFxsW@e5jP_9ZU%=_OWewW_pmC~Pwqi3+$)J*U{ZNRQJS#oY~=Lv6!`22XieQUfh zG&asnNL6>TEM9BIt6T^{Nk1?j`2=G@C^#v(YR=q)jmhj`jtizln#viI4dz5@km{~;%%@#k-})W18tIOnQ| zVE8NdRRvD}poxFF_Sf9m=2Xfv%U~x@@KvW8>`yr(e&-J-|1j^(2uw!*UvLBoI@Mbf zFm!OG)s4}?Wcj!v2F5y*y8msdzpB=12s=m?J|+{VzjFY!AV;<;tGUHP^XW*MjQL77 zdm_#0Y-D)+CnfXKgUzorC8;F-h~;7)8Kb7Yf}T1S_iIIU7A1;; zVaC35vnt!|?yl%<&hwEOTt0Wy%tB~Xh&P_WWa_T{iYIbZai}lmN)H8dytJ-oee9|) zdrZNI_cVd#AT#KDT{G2yZwKKlg5DbHvNMzGcx45Q>z)gUk;9Z zyg&|9JQJM|(2UgARog?eMmiEq&p!BGyD*!sceIwU8X}$z`lMw(&)VR`|LW};F`;Is zDcOu&Xo{aP!r9%;5Oej6j*mioY-~^KZ%LY!Sbt;M_M386)c8y!3Io050*j04R374G ztoEBLmT*|KhvR!}7;)fFn?nG%*ST|0ER;Iz&&4~Qo~Gu7aOWQlMB2(Vt|$eyRQUaV zPvzRP3g)|pHo;`IE6JycI&d3-`&_8|*4z~>_0j9HQ6~6T zoOS9Ab)u+r&_62RZdGlHiHJZMqvEYOA9X~N%{6~MEU7A0p7)Kr5llCYfjJ&e`_iM+ z?JkEh53C;YJvkQ|SFA2NK7|x0{`tmDL)wzSm(H9VjDVva*O_m)SfSxE@l^48sUZ`b z0fo@szK*Hx&sXI_3>d^OTZ(X0FSn|#-q;qav>X=R0G9K7J63SaGDe#@sny2txS&Gn z3+i55OXs{h)F1%W8`FNrpcB&t7X6!iCra-^nGXpc;TJg;>p8y{#ccXVCK+6Q!;OH! zi?LxUE-qDOTo#_0GmNjl8bUSIBy%$LhU z_)cHj@?~%p44-MRfkcK^$?NLG`C=bao92Km&$AD_+0#so1K;W znciqMx&S6lIWhu|#q;OW>ut8{>f?Z_a!jQ>Um*wvRfPb+{6(3Mc30X;lbi7}J7Ohsns%?8P-fIks4tVTXuMg90lIp^3iPu2S0rT~a zxOnGd3yr_aRbwxCJ*q8NRKRG|tN&y@J^mHci=(jCIl)>9)c$j%pw9(EsirA0x-Y0n z7E8%Vs-BfxN4r3c`R(3mU}_A~1_=Y4s#$Sl3PKu+97PalX4*hq-FGauwkvCK%~YD* zK<_i8PweBsP5Wx*(PF(k`quQX8QPTfO)4X$eMbd;XekRSAY)_01{JlLlsxiNbi*OB zNB0^kiK?5shbG-k6U*Mto!Sf!$F&TN&Z5Zj_Kn(flJ4JQ)jHI1x}B0_wmA}#a;8dm zeTJ=A4sM@@MK`env=O;_S3APpZKgz#{PAKfWX5v$`DPEO^Sisdg2u+^BPAurj?<0O zIBD!QgkRcZCMPH1u{r)2jdcn_2=)oq)YQawuLs}WMP_{K7;YIhoD?RrrJ;(9z%@vv z<)Q&ko6Q$p${}D9`z`^5$`xu+qJ%9Jo~EnsVs$y`-JC?)fw%7uPK%CYNv#pQJNUvB2;RBwz}ZLn~- z?-ysG!Al?NNKFQB{68jqe+U6#aFU@>WRe~bD+{K(f=(gx}>>#;R7hZoK>WVUM-#*+QZ}g z!g7Qi+wKP}I~}rY0kVyXb5>|MZV*K?Un}hB6nK>!b*XuD+N!u*UM548Ng_&R-77nG;r-n$9eVuBz6j=7Z8#T0rn3 z@oI>r|N3mMANCG)p;0_GUh5sANMk?gNVm_9L=S#SPN-Y6^=H3hYQfwJ1O+ze`?U<{?<&094sArFcO^bR)9bfS6v3{dr1e8@;qQ02b=5qH zHLp7Ybz1ye)t%t4Dsp082(le;7Gd(deli=W96{ zZ_JpSk_yYLR-KKOb1qfLb2`1(w~E*gd&8Hu7AuT*^QS^1q+2eX3XJw|H{0%1YYovQ zQW=^to4cmGln^QvOF5Ki%8kAojjfD?t~Qu!In_Fz&tu8T%EHJ|axSka)(Aq@_hz*v zL))9L)RN*OZz-6~71E-;zk`X%NDZ2@h|5{Wg6D;9xn%EVLTacroPoDMx+q7ke%_5p zeYz85({9NqN&UsO{pXyy-kMOYO>J-2Oi-A&T7NE8Z{9VZDP`4i=MQ8GC}ghDGr)Zf z#9QKBEfy3N7-r7kK$P>1tU#FJeOqQtuUTDPovt0)bA`ah4&|gA&t&$S+RT-@6-wKw`N2WWP(ujY7fD9}$WE(Vueb;{I{ zN!!}7#U#UR(Pu>qJrld8$p%cKqqvVt2z?58w*n zt4;`zofp!B^=NG*PKjy_I4lZV8m-QFYHxa#v=L}CAcGIbX~9IW&tRqN35=efHLHN2 zD-4U+SPBz<(v#1|F15eGoz;hjr#%L>;Jihfn9(l2I88`uEyyt`9*-6c%H}4 z6n&2Cq$4?;n-W33mL-=7Yv~~FMi&Sm%W|NfgUA+s=$Wa+dYkv z5R04D99(U*aanM%T5U}1PWatx#h?n(2`N}?KjVE8(;e;Z6AG?19Z>*7MmcEPF>b+3 z?+=R%c_AkGtc>j0tR|VH-JB%eIJ1{sYn}ZIpk`oUiRkDM5vrGdG`q{C(k^XoW)Un^ z9<+Ryt=l=$>9NEzJ#3Qx{@zFl{(M+EbssEL6jjp@V2V|K;FZFVagOB%Un|jd|kcTn}<#J|lvZ@md>_TUjVx8zc zavAl}v$=a=4eYB>3aa0{*kqM{LDU}kDq5~nQs0lmP)?3x|)i% zSi0-AxKeBg}tVNVw-d(yIf4#zm-OYWQGUn+;T1p?fz<^TzI z9?1mSgN7Hmp|@)cWEKQrth&g1?1%5@qvw=L769h10i6M_z1UW5Pv3ZM%7q6g_=`hP zQBlYVOiB&v4Maw46cl3q3d%a?TzJ}9+7v&isQhWWtm1Oke@IZWS#ysLl_5zoJ+m7_ zmN{pd9L`$xljTeN1k?-MHZ#^hGc&WKodYL!GU$eenw`576s(thU443O-4~TH&VOdf zzZbOs7>rjQN>aXl9g_&}d_GqUa3z zc+$CZ%9L>tdXxLAbe*GEXC8>RK_L0*9I7fHJTpuF181(rh!!UJU#sBep;U|kPzR6&;mFC!XPIiPC1!7D%2(cR&&cS0>HOfN{GK-w(UFN)d6`vjw&B0+ zev!iPSi+>X)RSlOV*g2lh>t}0sQ+~($jd1s6Sr8s0{!7kDeov?YNp5{9;ddd#7r$v zgiJtB^PslMGLzR~5L?kH-r9KGi!(f76$jq{Mj$m|?urdJ#_*c!s~h*tyYJzA721!U z|BnnVDp=Eu2eH0WFInEbc2l`u)BE*D*gn;o7 zu=3D_Zd>W@EgFlK=hE)w37bxr_yI=a@p!Jc z-R&2F{Xry;VGwqn(5v~ZazAT&G}(pu_AbQ3W%;d0&>*a#6Nx`XWGE_=c4Ql*HAZDr zQBJ)(gqA&1ph(V_*;X(0)bWEpy}X`-a)f8|a5QW7>fkp7>PmO}1-Ta_Sg3#;I4)gA zSPqn7TwVzgXR_=`oq>FnY?`|8(HeuHT9+&B`(s8*Wzyc=P1PmoD@n=sf)MHCu zZa$&!mBroe&gEzLOH5tnTczSDDeU=WENQ&Jyt@nOhdx_+3X2{Q&6J^i4Ow>R2R7_m41831OiS!V6@9C3HUT(&^7Ch^k z|JIHWW~TwjGodVh{W{qqE+ce)yGeJzh#p7ACroL0)wbUt({c{101;s@Z<2^;5J zw&{kTsEcjd0)-h7$P)6AW8+mD+?i*&@`PAvuzjAr@@driLEiCy`Dv4i!MAx+f)(90OT7bwi_W&j5heVAl8iN>&4%BuJi8aQ(!q$KL8~JXsoB@3)btJ&XS4hJBz$StU?W(* zh4Mwge^)wC{w!eU4gD$bP@7Rlgfw;MCW6JDbD0kbw85LIQN+eN!Q$aD1#5f6>mQrT zQ?h$@oN-4+rc)#E={jMMU=g>dgZhIu%u0V{_eJm5{Joo`itrXdiKf>+IsXeEkdL-1q4YSfgdCTbWio0 zU0?;MC#fxfVi%v9DCwy{RuhVq5hg~+(rMW_7VKC)7{f;ks5I%Z|J!Chrd#b$jn0qz z82b+A*LsZ?aCwqtr=;4Ot2!>oPL2>s)-lFui#B6#1YM$;*omKS&BP47c+bOB zx@xh%pTo70a4fIwoc*rPK*N*{%<2+06$XbE)aP_1ELdJMELw7H_po~1m}~BDc!Pu_ zv%kBmscYz&Ts$m%er4Xj2x+&u^`dh&cQf}oQVt#fp_AxVdw5w zww4OhvrD3X>Fx0!#&+HpH5L{beE$A^q$H-h3zP76dUom{Vh{Gb6Pkcsma|(hW1$~u zEgZy!RF02y_pbMhi{2gZ(gE4tniD1QMS#KEI&|+p9aV1E79>4=`@d3xTETH~$l*Mj zO*ZrZigR^!6|XCRTYVN;o(EKs z_}HRsEIXk^GZyR@`x6DA!x33%){F_S1XtRd)^J9GMKe@uf2;cOCT<_u^w?JK7DJwy zrkrb(_XB3F_p%z$O8bwCixWtw1T9M@k!9%-iXKlls)Z&w zojOwF<|c7BxlcqTMXpU(+fC@x4^0gCEp54`X{9gSzHLgUB!80h4{tp8q zKwUVF7Gj+&o^78T%^`+1P>0W7O!SCpmz3^9` zIjOpipaK^G{np4$_{rjtJ+oB5(Y_rQ+g@6B$>D`qPP8AaHXSf<>^tiAuZeAm<59gx zeW0gw)dl+G*KuiXVRSxMJ-@gJ?>GlKz?+k6i09TcnQuEE?^_VS?#1Q{BCaC)p?1H=A z0Oa=p_Fnzz0>#Vo72{}^-HONkm_AOCh96hk8o&O0rJ{e$$fPXqck<;qS98GlyA&e+ za{#|HUcO=*^~h=3frHpswd45JEbpJ{^3r96+Nf8I+q0R!sl4AYZUGftIP)37wNiL& z_4)&RGH^(ZzEmJKC1squZs~eNBYI3tTLOo@d}wARH6nuA%W6{RkM79dpmsjJzh8?{ z3uWA*pZoiX*qP~s2W;P$<*`{T9fg)Qa)m_s(pbZrHeKW4V!sQEXc-PqivloB_c<>* zafB;D0nkrERJ1)15D+i|;QT((o~;+2;^31UzR!e~uQ**Mm9I_d4nbU$&D9C<;+|XC zyuJp2dtrS)mP`-Ec(FT!0c3Ivw~pX!xG4lvk<8OiQtcsnZeXAwRMk*RKkg*yA=u60 zkvmK2FtJzK&ff3NN%Fjz;d-;V&kuJBh8VrRU^I^vrRjsvN?kNK zTihb$C~(}}F9AvB(15HrYYz&`eV(s(JUiYkczAirKa$8~Qij8-y6EB$d|pS$Bd~c_ z-Lu$BRg$$lKL468{NQ{(STD>Z%s0eXzkg_~I_Ya8=9;_88f+rLdT)bjfWQPfvS(fV z#;`Gw%zkxd?Z5)54k?fZ7}up593hVy;`nf5kG2198=0VPz8GD;-suC6Gsery^aO8{l_RzgHx%>Z&7@N0 zzLvGV9`N>cd0^jt-f6iOn&4^UZ-$0>7f|;lBhPYpl$?v0Z{j44Uk2zk8L6eSm?3j= za@x;s`~V)u)y?hWY^&YvPQodY!Rb_~)#+>yFrJPYrpsn>{c5t_aNUb!50fG%>&Hv} zkIhUqJPMENOH2kN<&a9Et6K8aY4UF*@CSOu*0YB+Ctfx#$Wj)Ih3RpXSMh{;Bw=l9 zbUl-39S6|U#nzO|)~N7OL2OFQaMng!)yry7F~#9zDzN}nE^w*gYwesUH(6_~Hd$DB zT|7;6eaFFZm}@-94GFpaKq|9yvJ;qNiGU>2cE9=6nW9m<_cqY(CWZOrzT*PRVlJ26 z^+=Foe=uslZ3E$h`wcYI>A>y}Q0e4Xc74?@<)(J!>+G+}_kVq&$vC)IlHJwhYeq09 zjBvJ&pX=9Z%PX`y(hkOw$HZN5BmhivzFgD(a+uC;_(I@${bg@3lG+KO@V^5#!mp;A z7vIm}sDX?OA~8FA)AHE#*#nRd0Sl||C;6NASF%AGD|H2R{|JJNrP=(RoL@)BVo%xT z;zyxEp!6IIQ+$&~z?D|-DV?CX-rbmU~XRs19ed09Omute}l5^vjog^e17fi@9C}5cwEkgJAZ?e5bIfj z!lNP&S};Uv><1PwEU=e4VT_nkpIsMeM53^Nb&mW2|0iq>X8XkQ5jKU+=`mXJx!+lf zxN@&C8OzNfN6)))hsjdCxoO_PS#K>e0Ij&~GfS6Se$(E%cR)q9@a#G%4Z}{aCAwL&zUJz%oQ$=0bUEn#*bQX zyd=(vN~680bTJ93IAm?Xs;);WwUreW(gg`MCVzypC7^Cl_)!(+iAGhoRUa;_jfIEc zyBFKu(@Bo`<@b{q%e-9|+AM8{;2Xc(CMuP08%#WJrm%#fjdgX3NIZj#1R4L8_lE|R ziYeA|on*YW$8CcDen5Y))!bdbq$_hDA6Sm1gs9IJnnkuz4T_(v7H-Lzl{+YpyD>IhrFJ!~eVym=d^32)@Wr#^F!N+H*E{w}${WP%>E$U95SgA+ z_=r*(m^&8DJ)FfG)U$Nr_BsO{))8R@cl2L;OenbJg=H0I>eUFbhq#nXJhC=8S$rr0 zKgfP!$W3o*QP%uE+Q`;r_cE!pbyU+f1vU#Sjz3vB@(tnMCt z`t|*N9E5jzb3)8uW4aRev872L+(-IszIGzu1hx}r|3T>J)LLOsYc@xQPODBrzGJlw z*kBEN17QYUrKYlYQ@Na|0RPzm@GQpr!?Bp~!DpZEF@!?jkdTtM!@22!LpQVI{k~44 z;xiO=Zm{68g?e^3aSAz|Z{z_F1%VxQ(Zrx6EO0yQYSYRjRU(5JbU7E-hHIX&Is&#Ku>pH05ovjmJoiLZE&%Dyq0}E=b{C@ zQa4)GL`v7SF0Gw;`Fh*PCquZ7~YeUB4DU77}xeDvJYHDgyX)GtLTl^l6mOxtW z`C&=t-vI{RcOJlX$mDQ1PRPZT_Rn1bc#O1{YCGR=B{{)%gWE`%lMxB#0|->$bthH{ zMkbhRZFdCmJ>Fp~t*W{dKU8!o^4;nVarx|1Qsyx`wOSIOU$=z%$d1IbL?=3l5w0zTX#!=@aWu-ZH?A!svF3N+yLV3#9cl+c77OWAnaM z;z!IF(&3sZ-Snf#s?zPk7I~FmX+~ak=>#-2HQ`>b{Q>M_Q=#e1wa44e0**F1SCtVC z3KCElaiiW!(nu;p+(di^gAoA!Luj&O+0D-<0T$#>SQrr}XX^O46fQ2Vt&7Xh#ztmN z&JVyBZojc)c5`#1*?NC9XYS_awsQr#noCKUFqX;Uahs??F7KL3164@0|DP8?WTVxI zQ;ox&wfkmo2;_h>6ipyorQH$6V`5_?3-|*n-LH~pHI`fMx{B^gHJa$CRVwr4^L(;o zmvk9yuR5M)o9zrlfVAc7!x_ja74Yoo0cTJNQ2vJRAR!?G0|Qa12LY!&E~kCqT#;;i zT%3P+INX;nU+#}*AU>QlsSES-ZFhPh_4V}uP6c2`xcHI49^LMN0Z1PoA8-f=pp{3& z#WA)TiqR_VQ! zqbz``X%jOO2h8mZxn73GM&NVDX@9(ZT6Fo}&r5kfd$U?}W(lc#W9Qf|_QX$h1a^Eh zT(R+e+<$Yn+UkGeuM{d;dbZw_dgWb53i0hUOv|X5sobd9$ah>m?xr)Rb5OnzE}COV zNAr3^)ZH(Z9|&8iiTM_cc&dk&1PUe3E>w9!EPvS<-NvAslv#d{?x>3jGq>n_wh6Wt z=;^=gzHAN)4GZ(r&(YUH`!&$++@$Z4yE%mj{}oOE^*aijl3Z_&-f_yy=TG^neqUeT zyK+{^3{I0HkH?iMZ%F|^z%0NA30Y^g#zrQcMYHxG&OIobeSN$m6qrvL*kuV){9rve zIKbl21>3CJ`hgtE;eGA{+zZ5G$*b8>K(N#9*42Dp!wCUF>h)egQj+?@1@hOg(L~zt z;9#hkzowO1PWv0&TP*Rj?}OLh`$`lFm;#{FcL=WiwyWj*H-zR$1b_Tz3;O&}T8R-e z-6(}LnamQBt;)Bog~Q5ngWSdfKHkhdO=)&;zIX;0Y<*8i5vPgB5SA8)Ti_Tj!iAxt zvQ+<*=!KMcWMl-0kog)6Md=wB5TmX9yPn9%$N&aQb~jPb&IA1Z`T5zBj0y0ADQzME*h&8shgSd^k8bz!>$^=Mowkst}2kFEUf9rNK2I3_SaC zy_sTCAcK?5?LE05CLUgRI}0OL~0c#z{{!Gv@9+z zz5_52AUZ`cC*;4`;lb?n>ZlF?T8c%lovYGj!2zmy>x5Iul-3%)uq*Da^Y6WOd#3BP zC{(v#`j>ny1NS9!sXT_@qDWeYJ@M@QL*#KWF~Xjj;Ii3o`gN`t0dFY=oHKFke%iiY zH;_qF7+vs#FValjcjaSEo;rnCEOmW3e)<3Wb`-t#-OdMf0re{PN2si{IN^W3KYiQf z{E6(8Vtj=C+ptVz)&+im)u;-7yh4?0G%>9iG`iogyz>nZelcGtABnoq@FU2Ja6X?8 zrOV_h`~V%t5_^8Wt2Lg*Bf}j{5%Uls!^jC1agaJW`M}aJ8$9BUCW>pN@OvTZzc0_! z`i<$|ynEC`=I(s|=Bv(H$~-c9T69(@oh|Uu>J;;%hyNPF!Fcky;=+2akrtTE{Yyv) z&F|+m?Ly@CkV&OT162l>m$C>H9HsWF=ja{RJZeyiQ(1Ir*en6}+uv z%mbM9k1)EwTn5s4NhA$Epij3pk#^6}_;|&nB|Ls_O>?Gav_z`k`hWpVP5bG1CQs+I z@8j*sY`IzwAa~5|UT^5YegoE(wjsllcbkEoT~tRWyTjw@P<=bUzh4;G0-{ASnYH5} z4wFe-^QFoO;QwS=U@(COE_E86&WtELZsQV0KA%q>tF=a>m)ir*(c+k`XDjSp{j^ft z1mznHBTj z-$$uqP-%&}kMLx(c}aN)G5vm~Bzf2ZI8;?SHO?tL9Ifx>)XDwfyl2DX^4nKr>rH=+ zCbro^1d?I|5+VUCKQI=wJ^4uPs64oSaWDZ`WSDyMnG6GRnZH$4*%jWGT(0OIAr@vJ z0@hKY4Mo;I#NcC8VEvebk`n7d;J%^>*Y5Aex5MQkdST184F3bmnhfnrh; zJDX$eAFlp4sbF|e?VCj^%3bY&@5s28+l+qt+L0jclQMgu^SL8VrvnXsp#w|%l8 z>6V%X?iKgP^VGcCx5taOc+P#F+|snWZE?WG7=fSp5n!RBq5{0)f1%zJ{`$b}yWZ*) z4k&IpWo2)YMi-kM=_@PMWT>IS_ooZt5YUJWp3m1h>hvVYFlS3uCOf@BE3G!4?=Ss> zgSCbu$R!h|KG1<(b@p0XTER%HLfVACr7zBHBN{A}O|Be&=_IEgJzYO7Gae_=d=v_f33X>*MBd)MW-`tw-PWr3aFK^=T25icE9jaM>wUGQ6giNTBfBYp`&+}Ux(J*qPNZ+c;|w^&q1CW|lKiYX-}W%$!7{9Ck`)3D8A zHQ>TdEj-gnjEV6TmlYZhQS;=S5Yru7q;+dmMTh+N>;^-Kaq;orzX3@LZ5p*s+*}4P zl(5qU3*F_<&eQ#6?}O4t=u>6pcY)-Zy3e~R zbjZ-Ydn#$YQrJ2Nuh(lrBo6!G3isRYKzNrFRM##bBf;bIq)boWD{7V$7aOLw^AgkQ z{lk^)*5miu#Kd`;XOhO%b#?J!!$%4AiIj>^z}KYseLfykqSmgqw_`3mlV!7ba)I#> z|9ipVSmFq5*07Y6j4B8u0zN}vJ|JhS3jn277Nb5fTrSu3Hrw5RuK+0(g;Xd|P2hjJ zeT7!@Wm_zfBLp0t{?~ubizHR+ta+cVwhWf5d*(|M+-u4HgZ=Lti7)2YA1`q~ zRb}2n%9bi;+f0fqmkKH?djQPW;yRLLoyo0G;t4s=%Azn5K*Fk$`SXV`jUVj6=xn8v zMbzgN2arb1kxHFiDWxXiN$UJ~y_+4w_$sF$8ruOKEm!A$NH|8WDqY~-FbXCtBEs13 z$x>q%Ivm3XAfl+b$TN~~(&93U=`t4^Y&Oe4PpD1=b#?V}?Yjxw!v`D!tZ+UQx)D-q^= zMPCCB`^>^zytF;xU|V-JPgR~rIF+eI3FyY)gOcV6MvoC8k%&{7TDkoBV8X)P_5S$# zV2h>Q&i;Nsu$tmhQa(O2h0-{|2xIH+#viW_{G_^F4o8UOvc_5drofv{L48y)V(aS4 z@%Z?trk$Lay>}%jEc_MdzK<;O62(#s^;1*d`T&i1+>N)x2_T5C1Co3|0yf9?_BQSY z4jS4306M%&K7k7Z1s`8qcPVqLvTc55#`v&aJOx39mD1zooKR>&s>Sds=Ass`7Yqrk zj{^(eFM9GUp@dG&ZB2b~hTe zq_oog)d`IVJl434H^gBmG)yi>27IKh7v2mG?=WQ2S~iSSQ?n7j4-dENW0>!;@_=xj zzUH#-?NMLJX0=x9c)|pXgc#WDcr!Gt{?^UTV52kn+iAgcPO0&bjD&0lC*dt^SwD^H zm-F~wBu+?KnMI}A4`ESJX0KMqkR)KH(_cW21>&_khlj!G=^BMgHb9-0$>f?TQ>n^k zPrbdBN@t7g3qb-Rhd>RBFNAzMB~QW{1b9t2I_<3|KGUrCCk7za(Z|>uCPsC6D>vI8 zgeiUh4UK>;3~Vt#)7$~t9cr&0-~b1PVP<9y!}+60j1$F1sDNJu-t5Kw5?M|DkY zuW$dR^Y!c3?cLqDO9vw2K%JmbONKXk-lNkQG%(H1-QjZ|^lHly!Tp1}E|_w~1`;Q9 zy2+-;vf|=utD#9v`lXo}zmvr|v1dW0G}h9fiLet2Dlv8|1Ci*%_f!;Dp4)!8IbF$G(_vg z`Z?Pq9l0ISI=$XLX7>x1cwPl#uQyC4oCeqlGUq3V39(CDF|KsE1oE$9`zC;qC-jftPUvv8ZkiXO@Hbvj9 zX{f6kb2K>K+v@`cRH*^8vnrqQw18l+WQ74xDAhVWIT5f~#O_WcWn>P4$TlEy@hiqR z0l0UzM6t+`_UGwR)l|7!ooWY{L2}NO7ffbmW*tjaI*;2ek~OnnZVbCkXTiSv%WZ+u z>h#ss7w=mmc(}3EupF}?GCQZ%}suIqnLD{yIJD{@`i_p_r4yf ztF6@spqCo{51?f_4S+b@rj6<881Z;A`A$zapt=C4O7CR0pxyO`Xv_luUSTaQFZ4zE zDQs5Qz%v3QA{fAf62Ef3&}p@j5C^L5^rKt6K65&Zk+SwKUeZds5mQZ;&?>)&x*j~B zkA=p&ejKubp^cs}x@{`gHxyv(#uZ2~E}DPuR2+wi{L}OD$lJJF-fKLEZYVYoel%tG z`pwCm%9;e1^#Z zHT5@B&7yY89KV?B_M17H$_i!Ep?qM#fLa_0Fb@tM0l{ZwoSBU+DUq5v+H$ka#Rd_V z!?&^VF*;q?%?((k=!yzjp%BCapwoARQOg+C;UoJy~UIeY?WR=k;|^K_lbl#WC$FfR!ikPSo&BogK1 zcg6bNf_im5rByXJ|DeDpjr+1t#E_E>&t3KE1uX95f>VIG+cPBYnQjAm?|d18P4@?;*%`>(%-1=?w`QBkWJPEsL^}f z0vc3GJNlTZK(ix zMT3zzQY>UaUw{W06~B}yl_00HS&hyI+5trn$lVetN*4m09#HXzo7^PC#D;INot#cQ z1KDjhGcLC2w(HRN=Q@Hk2G-esQ`knc zGwOY|dxp-ZlS-sYINB|6#2qStmtQY+vOuTR_LrmhNrDVSv;_I%H+|+)f&;(Km5Dyh z52%6<)YqplyYM~Wunfz)nV+6=w6y+x8d&N+yU-{nQ`=1kLMvE?G3wfPKTZ#xiWLiV zj|!y3zu%2hgV}7!1_p+TiAhl@8EDDN;u(GXO^a=tLSTht9lv0&8Z{HRG2*s6@2BB$0%mX3s3`iFEcNU z-T>D^a-R1;!AFs(pr|Ohz!^&yP;2t@^Y{P!M@3=Se&~3%2)t0FCa2-^9t>f3<$zF1 zogvOZV2Sv;*H(u~Oo>@1sl`X;^|fboNf6p(rS-SY@o|0!(9UymhCk;oo4!`dEgwIs zL~0_`Z{MQ*R+vq$cMPTp+OX;~II@zptLDZflIS!5yNiOmO+Y<=9V|GX>GTgkTfHT1 zrFG@;lnOtb$iEe!csFDSWTVx%T^+*k`}s39j+E2;y;-TqC{;jSK6b`Hq)6lj?VIcM zGYs&KB-UVId1DE~=D~bT%Gc#7Uv=>fWM0PieRz!a>~Z z20n2R!EECQ~?Er$q5Voki$>a^^)8NeOO_}rze-15y#%0Z*iGWCEJkV}O{jQP_Aj6(=xN+~C) z{?f$|Go=ww#9$#0MIFo9kJ_P6;0lQ~{~40yJ=v&(;}xygoqiqpW$&7!(w4PtGCp1_bBpT`7e^5QM6ui%6waI||mscsfxFT`)S^yJsl z#W_V4QV#SgVHTyo3Myro5HY2aXT?QixpWW%L`i!DIPW;desGtH31dOOg9JxO_tX?B zC1nvp)hAASV!*GAxI8#GIAjR;@XuI*6cL~>8&+OZmKB5Rc$+U4j->%ZD2WW}LkT6N zx^C27aF$`JKLAW1PdE_1OIr>U|K8r-_fJn(7dsNqFE8#_`-tG>G&(KH!l7?zNt5_7 zs@@SF6*ab^o0;#&^^h`8N~0XswpW&?kj-L%LLn9HkO5n}z!Vy6c*M>!KkdH70EH?#M@5A|1(kQNOdNyLzh?JP(f# zv$Jmzyc+ZKGtEcdL|@q=aX4J)tm~k3<+L9_RX9A(I7!z|#J>Dyy%?}gwH(p;v!$rX`K#k!8 z^J^*(GOqCuL$0VpJ96Y`-DO!4ZF6TIa#qJ%D1wwfax9!@hzx{xqnmsZwxZ;>NM>$J z48}sgs6zNO%Mo*nQesM!%};Spc)|p-bEKc6|7-t%PBIY5f}A#2ZO~_(Oz`Pbde@|? z@_uK_zkmPeG-?At!DU>w>izO)U~Yb|wNF6{UTEkvfaTaA4XSC2uB zXGp+axaro`k3?EAL7te5IKPXber>LH3=#_m&cEvoACnzZ5xo1Cyuwp{VNLq-eL@Wt zZC>pZDVwiwzuc9K-GG`e(+W3ZY)A2AdT%pVZjodxkN4*LCM+ix1&2-<7Z*1+mdb)E z7JmMNK3C623Fi6?6o^IPzuj&R=i;xh849Qs5;1lU^_dC!UyK>X$jP` zhN*H#|5yC}C+*8+q?GNMCVb(qA|4lS14j&a-+rEfV0-HafsP8w%42u#Q}N6j*82MT zNvP6ntgOrk@aQNgF(w3D3IV&UZlOg*lvf9nlZtH_dP7m{RiGo(KW`KVm0Nvrc zL;q;du?ta=#`e`|zJ13f8U``letWk?nCIN~jt`gf7h_a#GARlyiVumu)k69HG-fa_ zXsIM)zn3L5Z=Z2_>S%g-d6lTv5hml?VoH*@cDY6~5b&Ho2V#FEE4DJ%=d-~B^4w(G z2WhI`mLtjxI*mo*4gZKG*$}zi{~_%61=&xY*yiAm{`zBXyU{7z#E{A5vjysc(sUF5 zN0>!Dhi;ca!vu`0{R~M3h0srO6A`sIsr!?Mjw^{%dE)=cMq1-0F?cl&?rpe#Cv2uD+*M1cO5}^@jJvWE>{=NS@5tsr#^ZT%>hiDdm!I7W7%);v3U#xPe zkrs4{)ta#2;V(mS#oYM4LsBjD#$b^82L+nt5g%Xsig4M7Lh25VV?0qu;(oNsq*@PO zbL>Tx#RU1Ck=8D0rtue8kOwN6hD_Q1;tT$he z_90YiP>naIOrS1;26#Q)&2;JM5%RkcYfYKNz)MnZ^U`+_Q|7&4 zU@84Q_@<$Yw6={WCdZ(nqN34YsZyErz*BFjVX)EZwRFpq{>4BzY6kNe9t{8B0EQrG zVQG0eE+gZrTYCDMUSL`s8U|*c3N|t)heWH%c5i>U+*7G5m^By?`l2NURhT-b zvT~wkhthn_j3!NrSlkZ>Zcq;2WD(-Cc1C6>EHYy=<5mpVN7el0F53skWIdaM-}ft7 z2}S71a^qI*jx77WQLmc`x!8P_hH%kqJar+$S7Kf|zm}{0In@Sw8|a{D}B? z^!KtK&es`<$IBh&$5J_UJCdqw4%hQO1V=whUrmU_geb9D3iXhbYcU5#5xn-!HEc@! z`R?FS_&t2TT$4Gx`QD8T4JF**_LwW9ZTIT|gU)3IeW|4-XUnSTm;U*>Vjfpt&fH z=4#HT&4TuZH1@0GUOZ;1+v_ve+w;}8#wQZAP!Rk%X_ty_L8=D)t)weTCh7Wcwi>{}9-9tkmm6<#Sb>2F@R(FpbNHWzpg4ydG z8ZC|{n)Yblzq8R5P4pZlu|)n`7`}6{uqrV$YYSToj(Uml@w&_(PPsYjuQLl|d1s-m zjsITuX(ms?b-m;6DOb`D2$HpybN@D`3)yX!?H9JbsH!HJ`(Trw?9-b83qtaaLC3yL zt9tA!IU3yj6}!=5B>@Nn>N#0&(pJSBNix2g=cCiA@v~H++4WdSNK6FknZb~LUA^p9 z*+Lc5=~QNp|F|)28$v3u&+S43at(XNqVKrK{C=MI`sf43e{@JC8K+^F?qidV!JPy4 zaize(UMB~5&+YUb*vBcsGiu;}(T4!)(quyebJ?|kudnaJ^(uLti_OFg483Ok-=^O@ zU%q^aq3OzKXkhEE8q zO$NGcvQ+rL&n8*hphF`OA)yG+GgvL-z^+;~22KH08BI=3j*yHt-6=95^Y=%DAJ@mx zKpJaQ9&3q=ImfYGb1ZB+RA-+&8Cd$L28)O+Ax91q8x7GT!>Q)om46>IIq*;F2@-EzPaG9kd|f z@;6+g-Np^*D~;*`%Q$rYE_G-vyBeUzF6wniz43QEc=TeNwr%W8eSF)PA5gOssC5`r z06$|eYEO05*@1+C3350&J_7PZhSU9(CY~bd^<>9=_ivZ=YBk|Yt;y7%KY|zp{(TKc z|M^Fw$qXZ^Zn)E8hKh<=NmaypU(i*2kRllrukJMxy1lZ$Otd`JU0r@uCA`vq_2qf) zIE9}-I#zEx!wa5R-|M}hZ02_x4`1*V74P4tQdjpDmXr(ywVkAxaDI=T&r9Or86X`d zlg;x1aE{OIOf_RABP+}D;sv}Kvb~F|Cp~j3E9u1ZXK?V#q%xCh<(~rq*EyM##uW%1 zSMAy0!fQNk0Pc@_W)_J{mYWf=Qg@s?Wf$lHwe5w#nQXbW#w5pBzAyf6sRDeuL^}2A zkkhr+?Q5O^uPiu5WUApi%O4^E>j-XLe~ZfoasgIGhZ zVvu2>i7Dw)AU~4k8NP))$YM#m5nzlS{mOnS%O%batHd+;J~uU^Dp8aI-w$H4^3)=j z$s1zhX>qKyfNa49#K}rBLp$J*6m~t`-`gL}(*v)Jxq|`VgWyvFb(bHg|3*eesHmw6 zyf82^4*`YP8bCrrLnE!E0JN@%$Vgxbi3Tm;^SB^5IbG4M&>+@|_<7v?3oIxwNm_V$ zI_m=LK(*(Cqh^C;q5(1RdE`L|O?7qkQ0*Axdl-4Z#qcI`S@_i$C(fLotTegLcTy`A z0<-S}$eu_zJp;Hc&@{zMTM}|$QH8(W5GVpsd6&zBcQs?F+JWWUsU8W{yX~DF_r1M- zT1LjKW}b`dTS=+I1>?tr8VuS(e|6o=v_$C_#TT^hL5`mgZ+H0o-y@^#wB2`Z5bUy} z1+*zq1k;mbOJ+(LX%C`7qf#aqRa9fZ7lEj=*y_a?d~4f(6p2h48K$rx4&%r9D z>#7}(zQ;L4^x7FpCW1n)d9JA4xzAnDpzW=;fa%u?aRMB!e9E+bX9b0seB)3(o;7%+ zaB&RWk*PX4QSaNW&^Gj{Y_2Q*-NB7*fyem7Pr6J zmj%XpW~X*|KR&-_%8dq;F$iE(YTd}@b!S$p`wHZZv9Yl~Gc$c4kpf&-W4Dvzr8fJI;=NpDZM2IS}ywr1pSdXQDYI3kgz)<`zboA!m+>;Rd+UhC} zr~NPGJyxrEnOihE%K+fFnwL$D4~5u@i(A(<9Q>Wv;PrlvFs_NoPrwj1RP3AiXur3; zl!r0HfaFNSTB()j__$|2lk>@YN)v>=QP2G7{cO~7dWoVI>;w0FpV}Rrec?qUD5FdT z@EEe9uhY-(1BD?Sg-OW6tY+Gx;doq5DaggN_2r0I`hH*J;n*+*cvE;a|G43%HiSbX z)2?^je3YfMoG%tj2T42GUxF#V-}p#>iY9C{8Jx)Eb-zwo4B+gHrU#dh@AgdKYhO?# z{ImK(RYA1SxPrQRgmG_qq_#juB7~1X9O{2h?4-o`gPVUDhi$FvAWaxHV`ym__gtCs zuOAJi^?(0-G`T?oWE{uL4+b8-&)jrB({GDlpReKcx{w24KA>Zw3VV9K+h(Lt78ayR z5Rr=!-YcD3YO@mnMu&u)TxEB#1w&y)UtzwBBH;MpvtLfB~1a0Cc0zyp6^kqYoTz=lZH3h|0h|1~5yG&-V_%0ZTS;pUyXT zyV!;X!Zk3_f0dMkR#h>Ax(^w&%7?leT|PeGbmH{VY}M{%)NTJy8t)=|4}z&jv7HoR z82*HT-vvy6Jcj;)frhTJoGX=SXlsE4N+0k@Ksk;8nr2W9;R5CWz$FxOFQAO+2BLK1 zBBqM){rglV4#V6Q)Mz3P$D4hdj4IvDy)w1?ho^Y3oO1WBidW0Wt997W z`a1&TD6oP*A-0lNe+rX@kfSLo{wC`WM#p{1te9FrymWnqr4if#A|>-!#^v7V%z8CC zjZ{gWHqMZ~WfK$?ZJGV>I^@%6vPR7zN)v$+(PeWpR2IK`Z{A?^{YDSL^IXY?0n;6G z?J81CS``t#=-vts5BLA7-%c-Dq+2y`R~TU^V~UA!e?R=a=&*AL@^o|JG<*erf~ohm zdD-CaGY%a)0|Ol{A?t0I-D|0T%u%GtL?s4M2r^r#-5$0_CC`E)i{F*@hkvF-A-=x^ z>Dj`UVxr`*A;=$hp7gJT75B8qIF{&jS~0-G`dK`W(8>%cN32df@bn=W9o}NCkY2aS zOTEhi+5WgG-x@}Zq&7&skN%2Y&x1VT~vPs$ieexmit z+6$UtIJ7RjzD@;6cQi@Mwz$T%F1X#SSFxx z_&qt$ z(due;u&$fI-|GJ%It!H$Hxvan+}{bCamW4yPjh*h)p9WfSf*3e6>#r8ns(II%=x{Z zhjQLOxj*kx-<`IgVxS3ITdyKRLe|%ZDOAbHV$o^JJlzridx`4WXuVbCV;YB5m^{_y z@0L&6TpgdXT_M}=ZuXq)Vc|CRxX~ak1cl9UN`7dg6#jT2+1I+9yVfow(>wiu@_z4T zw8pznymX;hL*L)f_=6H6?7Sxlnn9#8u$Y&sogZ^83Pnk`oJ7m5FAILRAuB)^(i3ny|7U#w1&nOa z1UQ^EfFk6T{#Xr!FlRPKLPA0fODwf=b>F0<`EO8-prUPVX)$rz0F16WfCXDU=mB+0 zM^9f@k#lu(GaiHTE`e4pDxUHi0Brr5nq0HS-CG>@I6?W-inq{C768P7QRb6jkRV+! zAGePMZWk$Vqb$`JNkom}u$rR~fE06jTOT|UgnJxu=X;5HqV|a=*L5;7isy4fnX|L( zkWK8@qVnz?=SUWmwt6-;>>c~7%%qk zF=X>UeirIwTfh!zzuTSp;3M4Eiy{^p5`~4DoDxDtp7?JVPDo4)H%I!4KA-6)eo?f;J!HE@P@BTcoa;%gpN)o*l1#wX8Ru>o|2YkYOZKuXc%WBeEJMd`XAou z=^Y##qF6Uah1FMcC=%B-`;H@+=jf9%#lyjc`1Z4+ow7z%QmnW9;m6WUOjsyTAdAI| zM*>Rocd^7bpeeyC*N?V~uQby8&O)_Ncqsvm##?IaS#SGNMak=V+6!%4wEenygeQBU z{q}MXT00t=$X0WPDiZD>zgjY>}_+r{TaY;AxbmD|BQhp27;hF16`4Cn4&UvbS& zHII{@71DwG<1ub7l|q^*aOc*6_`kkEL}tJFi(WYAi%!dnt;2ofP)e~>{O`MOfl)5& zF#Ta+3~s+^DcF5eej6|CN{4Qz(LhU^es^AMcdt{HoMsULc!b0E?|we{VRazi?qN4GFQ{kFrI{44o0Tu z=jSza)qwWGpxxXJLiAJEtW+zW`k<9G{+l0xhx81M>;iu>I`B@uXsl3|?CIWa`<+MZq0(@2wyo84cs zu47{<#3AtT{s9@^~wAz-+qmw!=N0`{m~wPw2Spg{Jep zLK4A6hmn$#2XgRtwqIO`ZFLKsuJZPE8|)lzv^j0*o$u~hs28E_jcTCcU;8%k^NWgw zb*lUMOeK21+=aJ?^>5n&No45Yj14i___;!~QPm_Mj+~XcBxlS_Vcq=lnO_X6nLqi@ z)Gr5eg43sfVJJa(^AcL@X7&d2DZiVe1!*yLW)aZeo}ZuV15T5RT?F{0^wP-;1>oO9 z!ZGj!nl8yHDJd=2Tf|>@$O-x4vX~l6eF4vJO;gik3>gA0GnAN^7z`pFnMzlsZike* zIu18Cx5j8l5*CXYSR8tHiqC2P6Of&|ZgN3L1S7vg(E;hYWGC>Kkl7d(6q%`y_OIB0 zO$PH6et=dxuvh`kzCU1OW=lQ?Arno+kxNtQb+%h8JfXvGEi&ab?;Ve%FX#&FB~LRH z3Z3~HEwzQbJy1ZdE_uam|1dN%64CqhmG}Idn3);AhV%P-`tf~rIXOZ}DQ4J! z5c2JM*;^lumEKv#t&axJ_ZKYbP1Zd-JtAPE&SKlyPitmJ2*^LJlHY%EL-0j-#K*N~ z0Bm_cGh%ah>B?e(9I{9*yZ>+=@mLO*1&o8@v0EyeKiPV`@*`KB?iIFipb~Y!BEiYV zcMy37zV#J$Cd>9DM{{OwF!OyCx-Za%+eX+IBOW3krF@t)2uBbQmQ?V)Y%O&cht+!s z#BMI0c61betgq@X>-GpiGfwL*|E1H-8lcByI);N!K;U3+f8QVy8%O@%Y63tmbPo@M z-8|7|8iiR|gn;eJ1G)?=Sj4Av)%G7ChKu_he4HAOFB~9heRTGds;X)|a8Gn9(&VX( z!E*_Sa6cfQ)dK$n;e9Vy>+=QtnC+dqCI%qFCWAY~$if0FkNW_g@LdSF|U3d7LUR2-k)2I67)4+N zp7uk_)*_iyVPLphfWpgHhEJ9FE+|g8U5nG<68}h*aVzmIoe0fxzEsthqM}Woy;=Z_ z?_fMg0wKd%^LGJTnHr1H{+M>vj?-7NXd*u2nO00*p9{aAKamA&EHMM*OB^w!z#BUC zQl(h4j_}4xMNn49KhA{4FM|eBXzKdpa=hf_)JTtv+E5uI4$kvH%vu|NifO8uyKVF2 zL<}&Hb{n1KuL25B0B|YREMR6kYB21N!%d*pBJAgd#h7X{b3JBQdM_`mmA+Y>>(hF= zyWdYxB(2>bFsd|p5D-`dV}u3|l|ZY6)raI$V?7QtlEi@DqXz6QID~VvQvoM;=R4M_ zb;_6Dt*nVTo_m^q&8?J5JrTJZ>_Q6*VSfB*Rj$+y1UG$dMwFzYvokB?>ubCBiL8ta zT~ANX;))&xD{DmcA9*nMDEY8mMtyIY^hkam?duB(x*?PMbJUg%i_idtJPD`GZpfJK zMF*p$7N?@)$U6d;=gHKMgpls4ua6f#3T2i{?-tG^7@zGuE~C`E4Gt-6lSwe0#?z)- zhqKYW)B&o5hbJX3kAjMd3II$JtZ?9|i7F|@gJ)C;3}FeM`6nhMh=MNw0=vcUZ(?H5 z^z`)3-@g|~sgWS&=H)$rxAU_4z~ErsuU|iFYng$rB?4MS8=DhhRL?Ke)S*9rLI5n@Jh_ec&XX@NI2{fzP5eZ9QQkSeBl%}pTF-IZoxAs`SVNY`B=yR?Qa^EAXd;8R z*W>I1RG?_S7bB(24c_dOVv%9N2&?r{AlvAx5E!=tlVrQQ7~Wy~cN@;X=szh-J+mu-MGk^6^yT2OM@DzweJ_1?mYB*G!QsMA|Mv2IIbQm=#Iu8UlCYoV8NT%`?@0oF=T!uj$e$E0JcOK; zCh1o-&a=PKIb!I;@bdmQ5hmF=fzmE;=IZFIZS1!t>4BEx7od3(=+m1f3tL-%1U(>p z+(}UZw~1h*te99qccZL0>=g=Ajl=z8yNTeLZTlsh3{T=8#ysip34ir2PYRRKr1)`5 zhVC~J95A9`cBoBz>dyOkc(=?}I#cJWSE^R#=fH@5eeJklt1T%>eL^;EIEF(v5COTl z>FZT4a2#P+O^ck}0ZrV!fh^!7AeYJSKlStCSe@r%hoZ^MrNBsxKlaad6lixg5UtbO ztj2hIx-_1p!{gu+L-WzDgOFv`lQYM_AO(?0qA5Vm*TO^CkeNsTO5cVroW)EoKeoHd ze7c^?{{wQaa>+CT9_zV8%aaKq#XU)LPZ*n(MvW=`>#MhRO;@kdWRZ5K8)~-eS!lTI zCTKc%0a`yp`K`|Og}e3cFG=E=)GaX!t}OBoZud3~kMn=%9SHr<>~G~688IV|Ka}U+ z6lSvHNGa$@)4C^7MZ*l?q9ZC-JEI9#`Vn5eB-u|3q{Zv-R(d zvA01@_SBYuDWd^HZhI7Tx(Y6@W0X)yNr0SuuanIm4$oOKe-DH@9ht0)B(%R29MPHdZ zVIYkL?pau*a0*_Upz0w`{y@2^VL|OGQ!|1`$TKJ-#Qq4ttKI~X3XT<% zpHb07e*Y_NURn1By4g&ApT1^eDRug=wxG2(FXXFt%I#Sn`ed>v(MGOfztlAw!HD3D zFRP9AK~tlWhz|5j7X6o#rFV9AjNs+Gq(6f5D+VVZ{{RbOrmDYM| zM<>X%v^4yhHGhMred`s6kYK_l$=L3jvfBtk%(1OJi4xt8M?0(RwZxTjP1Z;=MqN~T zv;wi^@}*+FB&?Ui8P{&RI}c!tivsxTeXzBIgUi6lI-kqwYE<+j4-goeJ?&(_K5 zcr(EGfqYn@6Gh?Y*m2bfw*(tZ4!C@_%xmTFP)YY2uT&my*u_81B1Qr0v-kbD67>af6E?M(i=TtwNyoJQP=>RU3Wyg=5`kFvf$>1Sq90Sl35;`hQm-W<^on`y0 zB&)QpUkVEh+V<`K;~9{(OQYn+TrTmInac*Que=Tpp%zAK`6}42uGg?1TtGyEWp}-1>$c7xoFfH=5esP+XpPie9zgD-j zvZ{04fJ9m$>}?KK(9{eMjwWQW9zr>0)JsbB-bbZLn$Qx)NhTXO%yNw-l9*J3T)lo) zH-5}3tDx}U{J(!HEa&(2*d^7^Zkc#E0I}#Trltjz7a5>3W5%@^49-q<=u%lA2agym zN_fDu)y_b~eo&tV4A`nz{K(=a$N@qaCbr%(?@8zf6605xnAiqE$m&x50qMNW>QG!m zn-V%TY&RwBDM9K$P{oHKE2Jxc<@`v!K3EO4d!S5c%b zys!jTt?qw!Dr)aW)Tr9o7gxn^>s@!yznWnXh&bZeK32#t{7D$sWH7j*_%!gw7mIwR zTOejoZYLk@KX|;<;(?6!FL0(rS4Ri*QXlM(ZoTkWEz+?JFK=(IWa=}%>g)fszI1{E z^VJcbOa_(-ii#C$@V^RT02@>N{7#_lvRoHTZx2Ke1g@N<3ziOR*0E=Dojv;Lc6j9F zR4P&pDuK9raI`qiO~nE|y71=YJofW78xsh^6ryl$36XkituN1NGPvD|SE_dUA*tvn z6^k!Y*tfNJeeZ>Br}RW8iU51!!ck z+~^Zcj;(n_k1?3*{=EquP+c)FRN~wn_jJB|Fkh^Wl|gZYn;Ezfk)s-1-=k>1MBjW; z@Y{)n=KM653rO@9&1BFLfT8D$$D8!}v&XxmzSE9Ij2RldPJv8r*Zrs!KA9h%c08Yu zfIa7QFga5Oie`dJQVe(#;bduZc^$?{xN(~^{eRqi1!_gPiIi-dtnh6sc9#W}h9j3C zX6+!@OVH5~zQ;fzAZQQ;mp~$)D(X*&x-jX^JqA|a(;xNR?YKIXZ}ICB&Cd_f{(G2b*_N&rvmJ$I0K~y$xU}7RB*pw?u z6#!1ZnX)!ivtx!|#Z=OD@t=y-I`C=W8AGJe4lMX6Gjx^eq%2C+XjI4s=*h*#x{?Xc zADx^#XVog|_rE3rr#koNE@1x@lYiQ1w99VI6k1ujCe8@QP&|o8^IcY7h*gerPJ^_Ap|re0v=zs z*p=ysDK+-R*_&Lo?HV5OoSM8CYEV@AWKg1$f*NgWQ7(_miLh1`LRk`{IBD$W#XYLm z)3xcrDIcpj)mn*{&}EGK{cQSGku?|KJzifSLqtiT-c3K>u0MUaTo*t|wBj&#vH4K= z{?TZ)jS!!s@#(MREiQaGwfMko--gnXue^S0jbN+`wjvH5(w<&cfq7ko8B%l1`5 zQZ}V&ASu-71GYvBy-F!-FcP7#iVF0Kh(rihX!Gvo&`)nKF0Whd&I3sLFe)|8;H0^m z=8I>gta)^-sLNGFDYdhs@{FK|fTE*WEFQqh(K%9WKAa;0GQvkhxfU={E| z#Q~x!M8w2U{nPf0mt6rvYF1oYYM3S6;%YL)1wS@&aCDXQM(~1#-)r1WJWroWnx3QW zC;AQ>|BTuAT#(-rLFx*ZmC$^+71Jq<6*oXxpgeIDi^<|rG$COZ72=MUP@m5PJ||~u zI#EIJar@L8c}CYpz1Eov7-7#0IDaM=1|++mZmg0j)Fp|GJ`=jR5r;_*^*IC$qf4+j ztg2(|?x8@aRjqGc*SvHop90tI-eZ+BtIG`zR$fiX`1GHMdekQw1+as?JhwLpksS87%`FFx+Q?(`^c^|U-Y_!zSV({p`v3BA>$n-|Fg#ot=`et$xt@v1PJKa z{wdQ+j}E9<$hWmeApCEQb+4tzaCbUY5>Mg)SHd{#Vxw8-^QJq?W*pn;_z?igW7%w6 zHq+$YdWCgMEhcPffvgY`a@7ufreM%35v%_qsE+0HpiJ+A0IQXv4#e_Xm(N>&vl;Sw zC$ij=oEDrolD~J-s0GJJkV~HR7s9R}1*1>$dpy$P(LyAwR?7)pMGO?|{7m0gS}ayv zU=aVNvzwMBGwyC<7U3t}%nxF-)Cuvr-Ba06U{&z-2GsPu;iIBf-@Su0gv@Bt8P0D>zjVvk z!@0gQL?}%bh6s?LWbqJ|K+0^1uWVUdqldc2aQhu{Q7MBc~qQ-Cdsqox$eD4((eZZh`%0}zw!JGBnv zj++|9$`-SECW510rIY1sVu^*2Aj7d449`y2Qqg)h9;&sgc_xduKEp;w2$N#{qEGs> zZp{@JmUoVtD3V1M@NO8nB8d|BjY@V~DyqciIP3?K5y9_S5Z|Lk308IEgU>8OYy(gNkSI$ zZmn$!ZcAwj!m#KwT@vSQ=hl^NEN4qQLWu)VLSu+M4%V-pb_V*t85-}jKV4d$dSyLj zTzO8qXB~KEUGZ(4g<+1iIKJS~Kucj_JLB=BdbAq;g(}wl;#xxd2R=|4-6O2C3uW?> zTNyrJHF~$~%$sC${sq6H$*kY9eWQ}2m9-7*Z~M`E+NGKLmf8IK*JMy!{#^KFEoN!8 z#Z{6ke@qx)LCfaLd*(E<1sKogZ5plDXwVnTP_?W+;;B(*l$pGGJ+nKh66_m@T6Z>t zqxcbgU|{|}CwVHm8;L`=tsfQdj}d?tDtb8&*ZX&ojQ@B!8i1(v@1(Hk#{wkLO~Q>o zLpJptZWQrAa-9AAlQU%wvRBCECMu^n1$UPBQf^l~S2HrF;0QExc4mD2QH1ZV&8a4j z4qb)(y0Nok*=xx!lgeW<>l^&!DL@f0#KnYR}ZE#?ZET^sO`;v%K0qwOtD zU@@lqWK&!2ZnpVXI|xLpq{3p6(^iVp7Mzh;@(<^2SQ%%>s08WV(~*Oz9XDt-!|qM0 zbA6Kjv6`J!Nngq0GDs(N8w3!#MiW0ugh+?SEEc^%(6}ci(#J*8X?^kcK_(Qd_^B$G zCqACS^~7P3QuN^`;?Yc{h{XOxy0nZ8!l(JM!5mQn3eTmJixBR~BTlPfmtG=%;evPA zO7yXEYp5kvZ-u(BLLm>q%2ah&8)Yb%$-To~N4T>^I#Kf*z+GD=7<9Px{8mr1_;SY2 zZnq|sj;v{VpMDWjkuoz>(T-8^{i7S6=;+waLej=UIX$66XdRRA`ApbBKgbU5vEtW=8l@?Q-?Tx26 z2uMEV_koohf9id8t$9O&ysW&*{+{-k9j7MOp8q>MfdIS5ZsRBN3r+crJN^{P7d(6^ zpE=FOp2*(K(36kK?i=!^r`mcUjHc9EsL+%GIXkUSFl#NNy&4<2vNOy?-ix7Ip9hDc z(vYs+)CEfvAD0$>{n7`C0*M8$B)&lnBMKeOBB#hd6zl0Wh`R3uZtb_9$9d7Ogj|Vg z&0kQxwY_=uLWCebd~#Z1B9C*8SZT7mTCwX=C{?DGN}`hl`22tKrT>k}>2~}8Wp2_t zX!vswGzXF*6R`=cfIR|&J&s#^!00*5CJ_HYAjd~WMn=s>Ye^vIYt$Huwe|vx=K@3; zD^_T-0T)g@PdxJc@-p|w&A;R2hL4$Y0)If0F4JlfMI^wlx6JbmYAgE)$?5e^d^gH> z%LQ;j$tNRYGctsMf#3SJo)GvJog6Ql@+GiLjWI_g`)89=k(}}s!{@UvhluRQHW!xH zaO|r4#Xo1T?P?ufE?H5Nnpo(rREf5Gg}l0B-XYm5ExcnZ(P-XmSY1%*UK6QuoYZUe zeJfDn@+O4VCIH!iC0mtF_d^PV{4xS_gO3emuwK8iA-}sgm`vwWa}MMBIaBL(zI(~~ zk=p1uCw02fMTr;>EfX=?vBxadtC9=TirY3Q2Nek;4aDF#YPSt|1o4QJ&MKj zwCnl%GkZzvGfSJb9c`6Q=&lY48(X4EnW{qmS5y!sCvI;)aC!}>IM2H^*Z;_c|H6ks zh}Z%HBQtXx;MxHkUW$F{3g&OXC}R(ZP@G=#zStH6%j+impzTaI+jt#tI~=VEqLZcX zl}mbO^}NSf-e+92i45H+krkP1R9_Np_f07fLn{q%%kR&k`S4vm0q;j((>REwXzK2hX-nQvwP8$)~8XF#>v?%NnBz=xD=jq%cLY=>sWDRcDy2){9@>y?d9!Yyyvk zRpmXoSZng}0}{Sljcto#AV4R(rY~TS@I#W5lQ9AqKhr&vNXQ*7m=$9Lf+;P1o6M)F zzWQZl?-bTx{}r&ExSU9mdNQcjN3#rS6=C=9uu{2hW%Rs8OD*M`6^G%(7%prqV^Zqj z2^>DJ3n6;mm1zD?JZMUf&#V7D1JmRyp6zpsv$3M0%ck8;+N5bco3PYN}Um7Z0 zGqYHkU8$aIWUIUA@Fe)j+&^Z1yJasEVe%yuyRk^BA4JNSub1P74iC#@vPbUIr+u0p z`^qd)w4VqE8`{mmN%8H-;a-z7xGHd`{SJOksztsv;{-17{7by z^#X@C4wK>Uqev%8nV~-)rQmy6U5@cQlD8f^X=B zKZJ+_;Jub_uT2B4uWYz;cE&XHCNggJW`~uo&sNeQ-#;FN4`6intbLN7eKegTJ*QfU zao=zr!M>4}o4P#7Zy9^BrDFOSo65$wTt3cW0rhXT`0HpC5vaSk>ow^?6Ev{MY3I*V z95Igg@-3a_nvuA6`5UlZa9UZ#2mWdEWLFZTF}bQ@KEF($;1q8ED>;dwfMwLV+}re# zKxE{gnSDvHCAuG?+@;;z+Y5~TrUDj2zw7=6wk)eJzPCuB)3>sbY{E*sw>U)Ij=SRb z^;DN(UYS@mSV`r9NN}8!ul z4rb%mS2U&Xb}~E|PL02pr7B4hzOK9c&or3r8uRLZ2?oHI_Xmb6*aAcZ2q-z%F1yZ$ zARwK~gXGcVBrdPV>*Q0X^GzWj6N3u@Z2ACDhJmi$5FioO8=Wtabv~8RzoU!c-20itTtngY(jQ${&?oRs9}19PJmap>R#?atb#90V9?8i{r>)xe32MxF@%$gZqNUSbVVhlE6<>Fq4F?4 zJlIg+L;4BNe{d9rH8i?5lt*-PUl?z=a&UJ3C0iCG)8>do;%vR>5e4aQRMXS|xI#W{ z52jU32wzeT<1{sUgj?%Tka&ljhC8F)+@C_yPHcrfrhT&??%s(sVn ziitmvsx$~u%^qktCotAf%cyrz1eR_C7CX2(1qTB<y5V)6N1{A_guwr7XqA959l zJRnr7lp_j>JW~#CB&DVKbUa<>I1ekr5>s<6p&ZJ@v1xKtii&(o zZDXslkA*BCi)+eSTvQj2!cRSI1aLVs*cootK8zuj0|dwGlNFA&4La@Sm|wrtwBE0S zFeC7xiUM$jN+A~t5QBg=4g%INDi+oNAi|iRPXHN&V2IMaxcFbV`4^)B1Zr5l-=*YN zTn8SQnDWKy8dx%f8MJ%4oUTzY-!1L$|L8(J-YCgIXIM<1d^8L%r|2n%MMk5S55<@@ zSD^leFODe~i6xegmR?S=o{xf@F?hLU_L)j`4A02uMJ^bLC`a~aUc1YMeuLHaDVkE@ zk4F5#&HY1D!Sm&bnJ76eRTy{03iVH6P>8>sY{+#i&s_zMgJ{zdt)_Jrj?DIfr;e!N z>GDSF5`lrzv8o3ag}zsSihY$fQZJPe3&W_NoVz-lYak#W`TKcYw+nUXxL-@2`rX^d z4~S3Kf8V$$TKs zE8GnuCpx+K{jp9s-hAsGS&^6B)f~2n8wzS-qp?(uOWl6^!j4(5Y^m7@G~mN%Yxol^ zSb`_`@PyOajQ+lW@q?L9dK=;H(<#wrYY zXNFz6l|PCuuEMv9Top!wP0UhRdKfo&O?`cm4{tR4xL|u&PltX_eO75NuN%J<1x=x9 zH?t>slb;NPeSK5D`_Y8EqnwEpe<*&d#U5*`^P!BkFd97kI_Ah!b5@h5u&JLd*cq_7 zDcE_~ZU=YUsOmYLgGa7Yp3mfns|ApKs@V3EcB%%^RA$^F8Ct*$2d#jRR=tvAP!XFn zpY-@gENm~7r#i@sA|oTWcWjh9$W+4hR#$(}Zr(JzU+s|oRL;irW>3J?OxuP`D8wl7 zOGnFUjPD68(KrC!rZBop)j9tkM(;7qbQQ9?Mu|KIr{guwXK}B-j%taPjkGXB^$@iN z7bh8+tK0E^i^CsNxxF#&cCL4nBqS(7cOQnrjq=Xp0}nj*ZTIY0KuR-?%gNFpavsd? zgP1~NGYdWc-xuZAs`gBqi)*@Xc@&W|Y3hT%nNQRvKLV65{;`;I=jc~;jt+Z)8)&!~ zgG#*}PsQtUKS-s_#0S6prn2gQ@g9s>>O#oO`BlDj#(W!;F|;-oWg{X&nbu)#yP(?q z`ZmALsg5UYv`adHG(QcCHmn5_14M&XtZi6sC)^a0{6IEDSN$r8vTpz4rVmcYpcP<_ zvdqI;vVq{pIKMA^xw(V@icsl0=oORRcS{XVs@wwaRfQ6v0+?(V`ma>4JYft^j`SsB z@Hw}%=5wtSw6Vs4(AP7OAb?e3OFD+b-o1rRqwFNEn~CZ zsPPAcAU~>Z!DzMd1=_mZ!j6Nznp|dwAR-Y_pW%A@_6m_t@B02ldw!Jf&e?w=RsmaYOoz}Q99Sb2tg?!W#PAZ=O$?n0$|`M*$3f>Xch3FchwEc` zWXgqHnAD2s^N&#s(0`|E0zJ7+(LQNu0V0eHF;+yGYG;D7g4V#okSsFxBZh13@3nDV zJYiDzQAs(u3Hz0BsPxcK$->{OM!HyguVTK?ddloSBO@a*0uFO#;ClP|5VDO#C)RfZ zQOb2{Z%8=2?h%31XAzuW}e&$sjDJ3O`7xT$L-m++V%h ze`StxyMEW=Z17>(rzNOV#mm0d)oow+`m%JW_n~XLQA7;`+R(_5)9V(Y0&hXT=h4XB z1&O-?3&gvM@}u}e?9sYi9Q5{MI6Dai`bqi9(qf@`-O`4o<5PAZ1ilMY#4?j?zi0E9 zpV*XZ_RFQ>uXXAlX~&tknV`&<6Sio?V&TYvifY3;y-&qJsjhLIh7)< zkTR3t($xQz{whX_%#Nq9un^Z)T1skUmYue&+=Ika%pMRvQbH+&3-3fErDQ}QN0RBK zg9|a8>f4z%UdFmSZfwmfkv3I)zVtY7Q3?mLLw`DZz@Yu*$`BC|l_PPd`};Tf!8$RI z4T^e)oX%=^(!|X~LPGgo8PWb1t7Cma-Y~(uDfz{@q;FjhnHokj^QC-ja$e6xkqwwv z`_ED<>^7)>DL>v3v~+h_QS2|thoc0nNKw7dLBDna}9}_1{X6UGh?yZozlsBXk|4uY#GDq z01cMVgZl_N$F*U3k&AP*$O!B2R2?P1O6gaTUE-V4L{m2jqGS5}Cv&;&Q_?jt(7F3! zx99~02b(pYhz)Om`Kem%D(vygH8A`fXm}wzDwbm9cYc2npAY}S3q~%v7*$`F>b1Ls z<$aeUJ3`oRgk>l8_7pKf<}`M68?d<{K?j<_>mE_XOiYDe zbNn7o{lM5D9*^ruZmybppKD3?gwFY0txCq~m(+M4S>u^nG3zDBsnBoM|A(ow4(h7w z-aa58E!{|WNF!a+-QC>{B1m_Gbcd9bG)O7k-Hmj2*Sqigett9WAI>n&5a)c)S$prb zuIqE@SISDo5DAKBE!H!$jVvu7d+E^Q&t0=#E%kkEv@Oj~%@|BeDF>(#@$P)Z*8RbBkhM>pbD2yCT5d|kazm^uSkfqUV{)`PyLBS5BMlAZ|>)qU1a4^pW zL`(BMi1dK-B5sQYesh1LY-bCuIcOs>I>pVT9yzQ0Xv237q!{_pg(qaL`^shl5ITon)SwbytnU1m_Iq|=T>_Q?8WPxA4t~3D5w!z z1?lE$_ZUYV=m#oSvOeq~=0uLF*Ih74&g8o{S?V+8a+r&IUkJktCezSFQ^&V%Sk`OD z5OQ|{vr8HXq}UzHQP5~Lqd@4m-y!Ji?96@?0CrNqLom}kiOm1jP6uLpMGB+@zb*k* z7Z45Zx~xe6B%$469am+R^Rb$@q$CXjf;DtrsXe#0x~(n);2xGt8=6<<&a3%tEo;7| zf>Zl3l`CMo1j061%JunjcA^h=v;%)NH~&>@aIAFCho8b`K7P!jDB4?5Upa<84yiPB z>~OCtGR*1h8Hf=>G>=Cm>J@D90FbnwtiN=glfFczBsGY+E% z(XB|dFNW_6=0Mm+*Lla6@bU3SI8wkt|8Cn{^K>zUl$n{ORH}M!ucx(tq4sjRqnCce z_+y~hf^)pugqlh=Q?k22(khCuw>FX2;JVKEyyzQY^NkSx7Og5mU6oc)j2yjD#Y6|K zoV5JLOQ2;ZaSu8ck%X=Y0=(2fH+2yzMov^k5_dfvPT^-GZeFlo*xz&I0SL@q;C{=w z_W_NwF=#w?HnvUBXEXclw@W_K*WJww1h`bPsfLb>(UFmxjcYz8;0U&#%xl~oX6uB< z!Np|(B2~cbjc(-_;1A3J+;+N5;~j8X0;+o&hYdF1-lhaFlfMUt7hv|bN@e-+HO9T& z0Q;0utxU8qBwvg~*arIh)}Uw6-tSslo%w3`EYv%L%dr_)w-4(fbSbbe1GJ^G={%8I zYsl0ratRl4LnbOHNpuOxRxAM$Qb3*_Vg*e(GIX89+rYNSJ` zTajgazUHpt_wrBYM@U9#%9Qb@@6^@tGYJU`NrovAn&w%{cfCH^3k>Y2czhT3%a2L+ zp8ihg=AGSEDJK_2tNPnt1cvZ?JUCBe0{xmGH7q1J5r~7?an8B6^;8kE;LDxKFK+nP zyUl>?o8|x00=#oSnimG3tLmRW4VN112nBukx||=c4^!;^0UU=S!I1v`?f+&iqG6Cj z7*Nuau!Teb?k%d>bF}c&&#;aVsJ%LCZ|}OG1LrX=b;`P+ANWSYRLcGmXw7USDwr^& z&@d5+f4UhwCgq~C?%w*oJiJlT7V-!6@a}3U=>uAzLih|0^qH%ZyWwhPj+AVa(dC*A zP*VT7ox}?9S&Rj6lAVL!JjKn9CxoIQC|zT>d`i{Tmi50Zj$AKy7(tZjNJz)5;_sf| z$~IVL`y-9Cn0BtK+CNA0%^58ofxzL%x~1c-`&aGtutY6%p~LC@HhrQ||JmvObfsrQ zj>ixzmXcBb$ADrDHHrlfL9mbBO=lOYG_tZE>?58nYI?+)J-MW&pgYg1@9{Fr+CLY@ z0-wY{3~798moA_V2QOt~2aS%7{&Pe#A4xB4@pW`$#-Nt>wo3DTxu*u|&XEQe8vWLf zfKQqa6xLNw8A=7>Xz$;T=8HwHrZx=La|E%lI6YVf(7BDKU}3@d;WN6!2#OXDwE+dI z7;3z!FeKMA94{p$hs2N!D8@>r<)k#Bkv5P~(Jq|*!k`6c(mQ5$avK;P79|2!_umU? z2CvJbaRL=J6@<}AlpV4}V{h$v381(3*{IJeRsBK7RTp1#ce%PCKmX47&XUg4>vC;F z^wX!F@8N%nXKDo2Bh;xS!(w;E4M_xj1VgBh4>x^}i%22H${hc8)3(}hSq#){FE`-{ zi5TY#BMGSF`>9sMgq47;t9QV=rAEV>@-*mBf#HRA+XK>^?1dY$u84>vdKuBJn>T8O z`A$R%z1LL|Zxm8IPM(e#qwnjIsL3DM6O9$g;th?A&O3LfU#9><8<^gb_b&kDQ5Gat z)I9S6!s93)1sYfxI5;=}KLs$p9uv%{H*<=nf+4kyzuw=!e^LDkU_}zzl;{_AIeuuv zVB@3a#i2T9G?%?&+biJVCa25aZ2o#1zu{6XGMc*MIJreTh4a&}SM%?rfL9Zb)8C_D z+kyLwS4<||)WXPJ6Jz7SMKwgAU;%k0ti*1XQky56_vJRDurz}MZXRGdzBB%(t3(l% z2F^8+K`~nzi|+4+KCX=o!QailpRZ^LUu{yjkG+zi(INxZ*YjR2SGV}v(^|t`h&ep1 zvy}-y=hF6#v!64+|4h?A82WD)wLqRCgm_0E{flVE^=9sl<=G>&tBM@E@6{=QZS$ zzSig3;1RlC6AeZ%2rayWC?xTc5NGG-!UYq2I8L>I{iIr+>hsVHyhH0JC!^q1H~83f z$BrKzLMf9x4%!XV2S+)dSvfiXiQoC`<#pNhPS6p2>3T~5Y)k*x9fbU$9CUTIBS>1& zFzwHfhLgsb6rwkxXK3SWi->1-$YRTKYl5T;UF;3bzeBN^e1!_l6zKYH%D5)0{rlKs(^2h2lKr^Hb*v@i}(iE_rm4$K?PV zQ+r~!jp;NOIMQ*_Gdk@Mfk4v2q2vP9YU)}Wa*@y&PS=2M}2TCBSy=lk$AO|9A@D>PAqnEnEh=NlLQ?UG0o<1juL$!L)ae*1|WUj0Vo}NSj#_x zUjTi?ktbuf9QOmoSbT>(oUSZ0C6uT~$=x<37%J||#B0ze%+ z9NhW_VCYv9X40q>A^pRc^niT2NFrhQdt$0d|9vZff}M*)4Dx-C-!~_c4R|c2Z(ae4 z)Fc9S55v>Ff!eD*E|m;+2&Ga^U)ID)ou_-87NeS<&M!5(3*q&no4MP4UQ#o3w(f*} zkMzW2m8nm)j8WS@d2!I3jb(SKUZ3wItR?AM%^mFtgUq7pwU_Q>yQQg0_~4zHjB)4R zk|9@i>Fp#9%y}Z)lb_Y;#Kr`&a92IpFO$chD3c<&TxIcZ@fZmRdswqF4hImq@Wv|` zah@vkX>w-}VBeMX$*JMIhoA_Izv!;es@S_!3!>Kaw_~)mp8cMr=h1LF?os{Bt1(qo z1tesQ1KBS!+Rn;J2;p0H-6K88K3NBfZN|*JFD--qLq-lgo>*=_bH$?b*-MmQWe`;s z#?iwm^ng@%6iC(Kk&wCp_Xs#VOspR+yh8pn_!ol)95~(sTZX)d2$TpM07n3xz(2&n z8Tu$ zwfQ_}Gn2iXm|`BfkvLlr<;l~`r*)%!R|pJqis>lC*{drJ;RxdH+1mGIwdhndo_z4w z&mU#buVs^1{>0X~sNL0i?;@l$-(-ZPO$zr5LnQDP|Lm|eSd~G8;cfZjRcQIr-A*)t z?NTd~9FlNFgxkGiza$86eVAPPfeDiokA|`PB&6)j=UbljmL{`hT_(k-cK%^E)#7IE z!1ZXgrjrEr*mfiE-I9~q3^GQ7zS2)xMSCK*gk8s{-=uY=KP}IdVSQh)Rc=;6Cw^>lR|-Q47Exd2-BXtj-Z#qNG4i+xUi zXlYUNP(y0_vmXu&+gt=3q_rDepF)Y!?%%CI6b`1pex%O%6@DL9bb!P;huH!+mQQ=E7BR z+7>ma?aR%2?}$!`5{;=!n1Ud|i?s2XRXKZd9o`u{v8t6nLR-~S@Q8uS&g8Mh4Y0w4 z0R+zBN;r&axq2Yn;Y}3-Y>w*zrnDzlt|F-mG!&-hS>F?b!H`$1{#P>C!s4_oBB4U% z4V63^P^T>}{JzwcDd9#pKSYTF33QVF_7VFIGymQIOot{A68dWixa`kj5IgHEd4DBp+QaPX7C9tI=dFdF8kMV) zLhH6s{Z!pu4e*h{AmXZevO~oXlisrJvZ*hnpz7W&8FS_ya~{@dN=@jARGc3P(R|3}JQI3>|G0lD&kY}OsO%B`$!Jc)6hNJuQ^s_VQ1GwzG?(d`Ku!a4~J zIlZCK>yGNdrJB9sTFQ?q#R8PoJU%Dv$CQ1_#pu3p#N~2LjLhSKlWY}l~Z?=K#G|9zh z2S(NBx0s1^XnOE{y9IJP@pNX98-H~T`!{g*lU&71=ahRbdKtk^T?!Io@ zmn3qD7-s8fn#D#1!L?st7zCyn_ZNIFj1hs6-`IRKp2Ot=x~QK@_O!_Q23c9ufnm3+ zD-143J??3on0?#%i4X=MhE`%B4%@pQuwz&ZkjExizkNdwkBuBXcR8FX@bKpAXF+Y0ziM_QpW6JDcV#2gr)K=i- zrCzSROD^%bW=*8k@E^zUg81k;o5KVzrI~v8^|Isfh8I0S1v1#47~Z}c^Harl(-U#h z1WCtxsH&Rhv$h}WyB_`^qRaRyOgOFbmGRX@L61+ldMgZ979)r*NgJO$=kH%j(WzgY z?4>F(?*6xDh)sPnOYgYd$TPbWmwlC#hbJ=W3Uz5uj1d28`ND#XT!#!o`k{hPX1aQrpK;Ornbb(~ z70B7QmHM?j)avuMcj^tvuB=e2T^xHCd#OUjN(2P;>=o-HaMa2^Z)c2_RoASCe(>6v zKljz~2@89~$-Y0iSP)G^4-)x(9LGFO$H8Mq2WaKqlhih~<+C?2i9ZzmpREGU4tff} z1B?&7(Q|3#NsijR0L*yO$%8WJ=9nLB!cS*D|3%*zj$es`5yr*4-^1I^Pw+RZ!K&cI z@)y93tFDe(Pe1Kx$yfZ5N>CWFU!WYyAGLE3tq3Lh-ie$7;Wf_EIHA_J&5lI^aS*$q zI4%cKZ^n60XjGcoq%Ad+Sg;Ez5_!eVNF7AH27IwN{Vu?Q@m@}{gkWVb2S~$Ga`Fu3goN zWVsf3wk8MRh4UJJkhKux6Y-C|M z^&Cmqc!<2V*-asU2mMLBuV4m#nr=TJMa`HP*6Ed&zXS-U#mS(nI;donel{o;V2G@l zBpx->@@et@h|9dNty^dc>bCWj%2gT8MkgB3YO+)gkM?$FJYUJc@#n1BG<$RIWW-DaC>3pIUa%^1}BM2m=17br0A)eS?0WQ;>exCWv1-JGN z2wI!>T{pnQx-$qirfjFI-cwaw;FGb;N5>7Vw751`Bprv>yuU@WzQ-JLYFS14%=var ztb+h#gymN!ut8~U&qkMx!M_&t#bBV}`L|`Q*dUPnsW^iUtn6%gb)2*T5;Dn+=$;ik6dI|7x|9(Qg z93H4I4g~}J_b2Z|AkXV%ik(}BZ`XWt-f1_A?vZd{aXKL7DVpL$B^_ynl0sl`G*Q_b z?Jid2?zs%rTQ0^K_c-e#QBf44dUM^SoxX=QAf@Ei*RNj?lGR;yS>s3Oka6rklo;S( zRlYp-6Q{8{fr?j}09a^7nd=tf_dN@dkXB!n3>{AIWY6N1DP<1C$k&b0e~i(l7QR%X z-Wqbnb|qt9^tXEL7KGt~OvWqX$xlZkzYn`jNh8X(;IlgP<@OkTArNrr!csKEtUz27 zz;@Qa1Nb8XHiIOP&z9&mvjV+vB9k7zhsQ0KJkrh&5(ee(aKN|9?k(_j0|9tBx%E#X zwq>kTJM6VFMHwTL%M_t}g(}qZhW?g^2Uq{5xcZ;fS^}*~2aA<~GRmxF=FaLh zYwP_%fZ%FtbfpEi)J({)9Gxh`&sS*NWo-=w$#A7eFO8?DIv$OuLO+@Rvtv$2OmcpHHFd-Y|&@cL@v zD+T4wlWcN*eaJl`wo+aIH3UyZ%!u#W#)ARzhAn1P8V}ZrO{#Eit}^uZUJRFV-a`#r z3ixq&M>m&(1!|i#_WZAv^Orllbpu~~7n3G8b8KPKTsWeh!D*w1xfNDDkB-P8mxKuwfcc{L zg2nO@No(zj0G@}s51Ptpbkil#QM5)IkvFq+l@70|{GnH(P8ELXYJ(To--BfzF1mEN zi~IX6q3mG_TjYq`Wp9oc@@Ed3FbItuYgGBMpm%1Yhk(-Ua0^bF#FzKK!8R~|te}X-afdLI`}W=0J8L16 zJdZbWlIshus}Fo@aFB&En8pJa9EpsFfbZc}x#$EMpFyF5vK~tVY!+J0#xNk!Hjba~ zn>Y6pVnRX=^v8X+Jzn*4ZL#hNyFFL!m@GsR9!_|hvN@R(>L9~$3(=3OWjJ(VJtJ{8 zQ7_hnjSg=oDqW}vk1AN7>`Z0JS{k3gT2;2-Wl4{Kz~dT4$d-;$LGb+*y~sx6=5(cJ zO{W3uV!9>;Z_j;NC>SDcNW|w2zwfc42IQ(lIm(`IFUFr|3KZZZ@sL`wnF<6W;EX2f znV4)gyHY}65?PDMHJ+@T)9m%e8FgJ7--<)f(n{65G57B;nXl7OTR%|3WOI@EUqz+B zOE_ftQ5!ODQVrgr>%+%nVU=R{AczIAs$<17x z`o9SoDT6_Mih}@N&t6r#@#Bfx-Z&}hbh%O_6aNGMH~YU< zvT{+Wue&0ALy5Dy3aWn^q9x$Cnd1_us97d^Ov)#z)m1TJFtN-JFo(nQ9ETNOv?_4| zX&#{Ga=e#gDOFfc18|+Wg1+^kkU^*7@6%|vve#$0ch-LAh1j?aNaOJ+I11824fH}0 z(0a5kaw1jh{|+>D|Gc#Ne*I>3+F9UC)CduzU4af%NjUKE;bc`!1n6WyS;x5ZQmxEh zYM|59{1Jh?UYdqv8bPFxrgP$t{B)_BiudbE<6jV9`FAwSFb~7C@15XNV)6>#rZ0d< zVQp-@`{c9PxIcyLW~*3L`T36Ib;fBhF$abOQw+^-kLwAC6nHVDXge2#O-g*%{0nMS zN1r?|ueSMS?Cob^K0VVRf0~Ph4I5n%#8~-)gw2RwhgP^UBrw;@alnbuXdl|DtQ8+0 zpD*0TGe%rMm#_WJJ>Rv>{@;J5ZSViK6}f#1CmXvhJ@v_C0Gi7h*U_w&D^|j>;)LQb zIY~4TxCw{fyRaD=04pKM+6Q0;L1hJjiiHbzRU=-7O8+im@vW#A`Qz{y#c*sG&whn| zW#7Qby2v(MgW@BF>U4?LWB6H_s*k*|KZ21L`s)PPw$gf%R1Ds@-Sw!v0+OF zVPyIp+aLzlejXbQjnuZh9H{}YjRicbEdrb{vRh4Q4)hiA|1H$$12Rh6J>^^Lq&X@D z+8huktBAXiq&WkniyEfKvt4)Dyhe^_N3*`rltZ3^yKf1J2>Rm2B%}k|sFrTB*Kg`w zVqL0Knrw-JE=)_0Q>*$Ee*5%u75w;Y6&Q<%?fqh1b+X&Pc)Hlz|h{rOuG1eAh*i{Lv?uR5$_f8 zR58HC^jDciwnAk21*?R{#id3-SkF|Bh5g}&YjRCwqeeM+JG(bmN-OfyZph$h(Kp>3 z+57vse*6f%4<$R_4P$HV<&kU`JJ?aoUbat4uLv_UqTj zTE}n!>-|m`Uq_H=2=L?M2d*>-sp+YT<2`BeZJ~d)CK+XRi>ulDrfT^z8tuj4QrVqY z{kh(NtUr-l18NX`8lIx?753&h)6Tuk`<`wnnL$jLIHS=bwsNezt-gA8yjssChHw$t z@AMucXa%u%wCMX$Vpabv@=%kbS`JFjOaTc-?^V1wSg}D3NDzb}6)L`|NaL--*7)!w zVJnOD_La9YcMP4GjV+}j0a8#dvv)h=RuK|q7Mnq*5(iDYV)m!Pfbn~?r zW%{l^f{>b#CjF*8=*{D8t=Fq>$kK8>d(WR<7LD)dgMEV zr)Eb|u>Qb$EXQQE7@F8Tf>4qc&OC|6SX8T*=mLq@jVilWsW;}o$5f~<$9Bw0AYg=Z zUfxvKF?-o}*dG%pm-%=>DAzmgZ`r%@WU;T)qhb*C)YI=~FTZW2`zM;|e}62+D}sn$ z`?Fxt$RndC-_}yw1@CC40HY2ik$BX7IPV`)h>x0O`@Dvz=H_GXe%Pqd4R`0X7pd1r zz!5>Ows-A{!tQ2VqTWxp+i;zjpJw}lX95;NYp*%vt=CxFUw zJ-mlUUMq_~X$R-m`T6;AuW^r5ng>_`$35mL-?U^wdGUeuQ$F+kr_$wjUT{3j94ti& zYd)IVW|vABIGj3u^Xqm$5+y7$+HCdj zb&z0G|K+6Q6)DDQcVcsz&~U?014K^j0kZ@W0|Vp#yFTyX5(jD!PRrf(rSp7~y4q!}r6rqL zfHWRBA{;M(HYStWYNkAlIw2r&uW2t+>@5F&`3+*<<2!;r%RswNUa`-Y`|JG45 zPc|5%E}kSjc2k-kb<%n*Cu2CzOKsnx$$|{G<_+#FZta1*P!V&F&k?gD>Xoc(jn5tL z(bs(B1uKUxNy)E-BK7g`GZ5CAjlJEGdrZz+;{1Ni4daq4bDD>@$Vv{vAEa71iNDte zrf0Rh7%gk$w14AlBf9YGmD6!N>xb|100aL5&u_Q-haZcmdSx@1XV31{*O9;69x?IS z;X}?V8S$U4WR+;`xL-eK6<11W?%Rxcn2SE0;fBIbh%22k?>_ox1kr z@LHe@X_N~E3tW_DL=y4dJj;OgaT?F+-YYU$=orCuPvm%E8|k%vE@%MGEPz|kiW?UZQYh&p6P;1Lt3tcO+qLjs zRFSg*jj1%=O^HB1dV@-amfM%edbgc7Vy?&YnF4KJa@F@UymoI!K>lFd_|JZinkk3- zUFJZ+$EcugicjezPq|Zl)8%?xGJRPbUYyxxgLyP@sfpbdq9NzI!^Erb8T!p=(qkuC z3$<=Ai*=(q1=?ywV|R=)eHD5wC?n~dZ%DwfT8@D0?0A~;^az>vNf?^E{UJRtG&Yw8 z!5v>EBqse6@MvyN$^LV!Mju!rg|n+*)O>jS;ClF&X2icyjZs{pSjkR3*4Fli$2i{v zm))6_-rQFXK)S$){)d2o?Q$y&NS_9ny$^rK#AI;OIaLajF<%y{ zaf^a^b*SNW`yBM=&94FKwExQSF!=?o(>Z*;!kPjMi7JfWFN<=uE+7vy z_`i*`l)4fyz1*(tO5|DF+J0)gJwU_H>pC4aa-|9458K*`UzuOzQLa`lY!r#h;qgdo z=WlUZ2cRu`{%bp~X1^oH06&ca2)TB;2c}9?>B`H?aW57~#C>=J zzo^NW+jvGuM-NUCb$qi4iwbTE`ZOJnQ*xw&_%2q&;W@HJZ3u_OW@ij8OCNc*d@Vwc zx6#D(d$I$^|2iC5Y(cT%(K47T58g(f*(zz^FfdFeYG&S&ZLJ1!X-B`qV^?hY)r4!V zjtMhy)^@A!X@2Hr0Y~g|bcMB{R+(y4G~V4W$!@!>9JK6prQUvx%kdY{$j**Y)oQm4 zYGYeVo`Qm6%d+`V;Vv@d+8Xz$oF)P&v@i@}GtPRK>OiIK1aYrvX@A`91p3dNEtpoB z5O997&rge+P>O$jUDA2%JvnJ}oX9o8$Wy0<<((@%p=~(6O{!x1@9#H7Y^B9oX)+3= z(f=N&_?ea2dbKY8M%qEi(SMKo~|funKxpwQs9n{#~)R0PuQ z(#w~8>H1$|NP9YMYvQ`VZc%ZtUTgkB#ULve3?&jx21GPxHy4<^H-rQAJl-76-QODI zAAS2cD}vXO{J2K5ZMaS`UkJ6+r=>5zfL+ZVs+<|l7Dhl zoz|K0_y7`$wd-wfhB>qg>{Y5?=dtd$68>F|E>7S@NP{#x+@l?xettIhGX(~4fzKT? z1THAN9w(?EtwiLx4p+6T=l8~@ftNWmeSjhyDa!i}GPq#r^d21^(l0OTLJxkkxopEv z>K+aBxhSRlZyUdY6(j=ZbY$yy=iH6cDeFmwz=0o4qXOpG09xPVeyrVIb{Va6+kGnY zf_^$vN;+*-85n!`qW|1$JeIwp#p7cRlZPsD#C=2^f=pzadafj-nVu2u^Yu&Q&`aZL z6ABSQ4h2XMr?q}im8}go-XjNf8QbGzOV2Qc|1mbQfw-zO^_MA2gW{S)}K z3-$GS5qSbciFFnmYsY1>&xx_29xu9~3k!Q%jC~E2kNDzdgBUGNjw!a*o4q|4UVr$C z?LfEdT@|U%--1YBGs1S;C>#hoP@ME%^`M2zF<n!uO%HykV5U)@UcQzY}IR93( z8_7deY}u2tX&y$G^ff5BIZep3Mtyu%oSdh@ftpFb?c26<4H?{P->PTdoa>9z)7O)c zRo(94P$vE0e^H?t*-{mcI zcrgci^lT4rvzuHlGhTYc3L22cV|sd^?>zoPZhP0xIX-KHkVJ-eaLUh5nU%6dPhJo@IB0+@||7J0laq#txj)ucYj$xpxJFi<$ZgZ)#EQ8fr|kQ z>TDZ>N&#-)m)57-qXn@eBG5-`wK#=5*O3O74h&P=T%OJ`TdWX zbku9VD8R(Q#!gnfzQ{9rUYstOi9^e2ue!XLVy)3?*(R}gSFtH>35_$BLwcc4(QKU- z8ogAf#9uU)8JJ^W2a>_v)L%esMJkZVLwxhS4`wmx6S!@goMOY}zwsTuKlHeW6P}!( z8kq10dZ)|TxN6t06;f$dj}9Ff{Y7$zO<(q*(iWN}6M}jR%+fi}EOuIj4yG&B%)j2Y z>=STTdm=2k6AF#q>cK1G(>S^QEUwXDJea-6*GqwPJzf6sDcfj!(T;GwIr$Pt6t2xF ze7@=P^gHfKfB%HJmX46ZHJ#zd28FcxZ7@c>Gu?Zu`^%l}l?cL|iVB3FhlwK-Fk4+z znH_lg8q~#}eH#MF9H8c{4#Yl`-ycN_JZsh&3heBdlVLXe^ZY;upl=7G2EhQ2X$obT zyR&N_tBAcA9|a0Ax|tmQ=C(@868Fh4{v44ho(w&~q%+c4<tqb|A<3W1ZxQ0p31;df`pwf8<5%>hHy4QM&3VNi;KuF>`A>2;nkuT)h=+w0;f zob@d1%)bX`(WH!mBEQXG424C>i7W5MC8EmM>%aGk(Wm@7i#7UwsmuTQ?Z8lneEe%8 zzGKC@Y`tGh6wz!X)yR^ug`M_}o#)F1<7}07cX?W^9hA^dIb2{phhsz1` zjsHqXg;8*8i76|SaeE&03CP6S^WT4cFlgdg0__!DW4GCQ5(Y3dWwW^<^TB^*i9b&Z z7O1VS&I2X?b5qOa!Ag|W8WQ6|%*zb#@av(3u`$L+&%Yl%PCmtz6@~4cu9Ur%feK`1 zXa7{nU?9~}TT=s>R=wG7u~3&2Hb&1di7<y*{vq?OE$=AS|UN+I-y3pz$lZ<32dD(m(;6k-8cW$B29H@YQLPX^I6{>5HmnOXFP zIuj|et0%%O8U|g{)1N+EGtdfa3VDmL@-{eZSK&=@dv96Bk7bqr?yKUqf1gdEGi|mr z*T&3Q+xvSQ&WqsEg6RVp>w9kMq@Zq7Yz94QoIPF!sFLa^3(%NF;?9=+|VPh4%^Eh>gb&e=G#v-Dp|2wXh;i8?Db@vkr3MKLp0 zOY>#ROJ_dHu%Wl7>#=w)aBWS|EiA!r9ggJwH0opM^0(a^A4!&1RKy`Mp1an*j+se_ zuTe8n`bVaS!fCUn5Rj&D`%>2RySaTWQ!X1gygHtVW~$=A>4^WoC$7Y%h>81m^@z%R z^&ttc6D&^L7FRS-NJ&bjw^{X<%%sZ~avN$?lif5lOxum8!ms(pwpCyu1OpC^_?HS3 zDa1}FXBQVderxCqE(cQfBTiTZm>TU-1N)Nd;^;IqPL$D)O{$&q-I%9z_L^EcMkaP* zGu8s&|Adnh@8@JpEht>Jmz9)Nrodc?j8U#99MNd2s5jwb0L` z-w?vo#=^o%>sw8QEV%4!XboHj*8nvV%C}v6i_^jIqb1)=zF>5%tYSS5k~f^QvziO@ z#)eA%*wz7YH3@4e@52SgaD70kODOmz9L-?3DmFUKmZFGf3}n~*mV<4Al_ zZ&3G?L4a}2#@CS{U#vgw{#lJ-g|A2i*6sh@P-U;QQ8odq{?@u}|X+HQvD*Wq{42|;;zkV$;H=T&P7@>TUk?tz@s z6>9f|Pk*!9oI|IZm`_$)WYpcUJ)1pC%<$g4d6VqG9h{U#+GwooRLjzQT-;{9g%`NsVN zBf;KljG7hqXF3xTf6!o9X)tNNzlB#+DpM+(+1fFWOU=~p*FaolgE=K)V`KZFdhvU? z;(ZR%iDWSo|Id0(iGSB`e<3&#B4=K*h>oJ?-Vo%BCJ(N}tKm+W?nKxS)$^YP&T4|X znOP-E-}#rScB?n${6WRcRmJzC$WtFmO-s&MD4GOIm#ki0WY?iDaBlK%ZSCay$a{Rz z{3*Dc$E3MNPpVX%Pk;LEF~F%H^nhLZFj8FHoHGG@f7zu)liBnEpAeN}7rgV72{#z* z#grkEN)vqPxTdw!x$9uQ85mL_&)d0VF+giIW>HeA zKKnVwzx%Pb;Fjjhq)=JPPLMbxHe2X*Rjg7A zv#lNC({i}!>B%v;bv4dnsMPOHDJc;rYgyqD5jRiojh^^@A@v@SHpg-#HV=0EtCjA* z+I{M!Dj1bEo^0h`M&uS17fDz5xw49t7H|&15^P?*Rkn+D;Wl#)m=D8)s=~2cu$Q}af{Jv=Ajsyk5rI+; zThLH4qs3TzAfaGQ4#8YiLe}kPNX^CuE-d~Z8_Ic5YBrogtWN9NraY@B4M@jx2`x(J zPYx#n2O15%npq-yNd?{Ay=YjTu?2ndd<_Qn#-^XP>5}#rD+mO9kUhQD3I1l3zfq;7 zJ>q}gnViJ7Q0LB~6mD$?L4{Ycbs}FZPh>S?2ltf+Cb8uKXwEV?2yRiGo^g$gJo>*5 zyUq40*2=$Grc>{34q(&w8R=KdAg4vXw-ll`)iPHfhqPao>FgUpauX-y`and@LzR5C z#KrBq|A5i+D>n8Wk^Z4jr2ER=ctfcp-@iL`Xc1Ha`qKS{PpfSseO&^EbJ=gwBep!W zZDz0XJWhS;yz2Q!GB|u_@vFbX7^%CXD8=92{T(`m*Ka-}J)OW^5rV?e!1?j{e&R9O z9!#5*n89>FvX}8jogh(A85ktwy-_}ndKX$^8%km)O z{nf_E!i2PahKIy*#67`8RIARAv`-;Rai%nH7|?9*4;ES`oBm)DZA(%X~ULr;9? zybbnCH^1F3sY@y<7)sWUq8wJtUA^nM#X*X1`H|>ha`flyt#^&5tJGT;cqc0@GGD&% z{P|>K8&cw;7LbK9B|G$GvqRE!CxW0b&a@!ol!oZ&f zl7_$=g^U-20kAGP%iC8zv7Y~VOKfS!e4k?A`sEbna(5_j^^1nP^Un{)4np<|t);DA zW02ee0{S4%mNxzYgts|+d$)Sup6E4uHC6@>rcg-1ni3$2HtU6EUo4B2nfaf9{Ydib z6R)Pbi(ixsIDcD@*S$ab@Ga&e5!k{czD2=jI9>Ko{+Rji8xPf{QfUsyWzXB0 z7*5XZwLwRhB+WK>Vk!U1wU&39t6-@*_E?B{P01qq`E-?E0FNWvcb0*SS|OUp*S>35 zGhh8!90`9uXU?n5KO>`T^{~ZzN^11|Vs|(>Z?o$@K442Orisy;fUryYoalRh&XI#K zG-4t&BBBxMg7(d)YPn^kNNAxv#c^g5KVVMT*gA?Rm0EyHt~ni-qIzZq@_2U~p3^PE zNQDZX*2`I)*6Y!w!V6o(6bC9=s-Rr6A#Rr&cF%*iWdyoJ$d>c=G%hbs*b)+NH(dYQ zxppHN<)!@yjx@v3xD_Ek#>wz}l%_w;Q3Q2%4@8fhb^7HZq?*UO?89i^1IWd94t zGVqk`4$*>iTK(O+C|hn)QhgNtW|7Bo1gV69F$922VtjoV8Z+*fNU7EP;dyf(k=>G*qE;fptE)s8#;RLv&pb6fGpSz6k<`KPG2cfeEcl8LEDO6Q}@v-L~@ z4$~3|p;&0ijJ4;%a=o)%j%CrL68N_Q(TlvT zcLHaM;VY>D+i~9*Fe))Lat;g*{+$zi*_m_7iHDMu?C)>o_{2gaa)hv$^df{#k?!yBF9WAV_#9{iD3^ARkE4o;iUO@K3mY3oF9w`12zL8` zWX5_vGiTu2v!Pcu&<`059i3ihxLAHKf^?9&=JTACMMEYW)>}S) zoKHUdg-1YeIg-*x=sTd5J**fSZ*T*NxvBT@d{>vMW4ppD(+r?z}Z~oE;PZNz>=ijBHhn zj@f3%^<*`gg7rZ_@QE1>Tx`g@SJ{tQF=3IBQz%9Jh{?&xpFKS{4i9BOia0=BVf8Y= zfDo^Kko)U8+T9HuEJFIEc2_1uLNgX{c56V*c0DH8FF6DyCDlX6k;dV{@={M1IXj6K zDj(`(yk_llw-NeuMTCx`&KimRcCy@R6X!?e>ANpkF;(owhOa(Hyr1EcqYQ|ROzrlKKF+9Lg<=LTJBWM>fSYvPJ>-R?$BJD3$^)Z zT4Y(-A)<+b1TSJc{w?M8!EByHpS-CQ0{BLtz$c++`^I9@fhw;N0fqt~Dy}Py&4QVQ zCC{WUX1dCN7<_(4g#6B%y~x-Mnu>eNfQjVfh-#%1)PVpUV38+_TriT!l6C5nD*5FL z6)Wothn^O0lX#dgk;~2a7Xf1aWXvRlLg@B->*_DN^*HiK*z3T+G(M=tZZ)sCd4+Df z@GBbf`H?IohdtBm{XYrO!w=s=t_iHiM$(`7*Pa_bQQxMRrBxE~{`4Btr}!-d&u?%# z^i)W%Ps4J@401G0m;QWHy>=BN6&4X86O?_5FI9O(MbQxNA09RZK9%7NuGr~mRU=gY zGmU>w%$Et7nVqL>*jpbA63=dKjBlRQS|{ga>Jm`WShu;TWbXuCB|F!usD=XOCTj9- z5%o?Hn6N-UiJZU19sCBhLNZG+W>JAVp^w2RAOOx}a|an*3WT4l;eOPIk3LWHzU{Xe zZystrm)w=r=#Uc8zrIB=`xDSSzogwh4)!Un0)K2cm4{joa`BE|uNw{0?&l0TH>4pf zEG(>N%S)eCKwuw84GM^i9KuKu3=Ixu0pK(cYZQzI(`f($CET1p%z#HU-ti%CZVWO# z&F}p6F&S{z!m%ew*(_dqg5cL|ZjpYeI+{EY)0gViF3KqRQd3))kLgG%claEwwBSf3 zq;mZ~qTVX3sxDj`Mnp=am2Qv*=>`eu?(PQZ?(S~s4w3Gbl5UXh?(X`B``!EdIq>pi zaWU5%3I?EDzuPor0gTso&rJuPsq+KQ10C=2SIW(OJt>D(EgD*(PfwxQO%eLc=0r&`_?-Bb>=)RA=fJ) zeb;*PrdJ_;*3z^;t&Ukfo8UkQe_dn)DiUedVSguxUXOSOBKV53#g*QyZElVnd`R27 z2|JZAf&o_;pft^g%5u33I_^;*;DE>!yC7fI-SF+ez<|L8 zf&HDu4nqEuJqQZAYHI?x$28hhX{-pvy+2tFi#PT|X@*2aBc8T+AZH0s5Q{2wQsCc# z!8H^V)EE68*o8U^m9122m2#V#BQ-sm|2th%s2KLiEc`WfeIx3uCf8T%xke7dDSB;< z*r?EqAc|ey?@ch9?Y+t`bai(6HogC}rqqk(gRthU6Gco%C(Rxf^@UmXY=6A7)_M^- zFyD2;5cuNK%PLLO!++t+`mf;Nd%h(&^Cb0o3jp~SOMF%f)^ZF zmSKv8M>NDJ80;w^;*Xt`L(&FQeGEQG=^ySW9p9$M$f7bJ7_Pm^~CPZcl1 zN`6e8pN-$GxV(YMiUnl@ZaKQ?Y&jlKBMuw|oLMxyXOB}<&v(tJZ|bzA-QoUaj9$5) z?y$R~X+wOA^&~m_a}k-9=}0RFr0U;-uU?<55uwJwwG@sc#jbb5dwm!Q(DR8Ee2h!Q z?2TTHBBCZw=7EW_eHb@3SMk=t6cJIhams)7@k;0N#`d}FyE*UsrTfD6km3D2KXRCC zzjbFu`uoph#GCHdo~bDX%#W~{&mTPF%Bu_xq2-lIa97%V?;Z_`RjYEpL}da2p&TF? zjLW6WZX<7LdDJ1?(dPZio0F3Rv=wZ?+1}XP92gnt{qqMJ1_q|JwY6Bj03IyjUiYya zwWRg+iLtP-7C;LP1e@)vW{r)Ea^y?;2L?KUw6ny}>nA8GAUO+6&AzwclL0^qs2BQP zT1!eO>+5AkYtrK5yEC|)8@--6*Vos#Fv#r^FHZSts zm>?2x3{0;U5mJ6t7Hp|6yStd;?Mk^$-e|=7G%7;S+O2b`vUXxl zkC1N?hPDB^py)PUARGydH*JCFkv^Y{s3;*2=mFB!5MZRt5Bw8>V}fhpE>(#g&@}7; z2(Y?7L`1|0JT7JT^yK6|P&Ki;UXTLW>xsuaO4wa(Xpzq939BA>ulDEy(o*bxi_5eT&99|j~j^}`mgE5_V zeis5Iv{=vkW81h(4R6Jgsu-CzOJyGPtu2&3N7!8-D5s+`ke|f7xtw5?fRj&m_`-22Q z0YEX+FgTLq6`oL6fEDIHb!(pbsYiAcW*~nmkTK&esI$#I*yeuw&np&4Fulm!POzz| zsR0A(03h#xP7NJS!o`&a9{3CRK+y}S?k-0#*w1Hs|$)L$( z*6HuBMH+-WK~2r&y~%>o8<+D7mSV+T@^6URbx9;eqY%1A=KTR#Md#`gI(j|&m=9J2 zoQ?+qohPZ^d`mOgC+R7$z#uj}-Q9sQhh_*+!qZqR{-Vz#706P6NzCurSqvZt0gvkU;W%CO zk2ShoQ^CXGg4wO(D<=NskR-(WBF}F{;|A&MZa&jADNQ!C6?N9Zfv)oziGxE!f=Naj zC*)_nb5)wHHklzU>!jqQd9pr0V*oI@K@03AnaT|3&Se2SWiw-bHRgjWkg9FHJkae1R08cz==+6<~`YP_D^ zfSOjAM}xNeF$)kotgNXKEKwvLVg!kH31E1^g$9ug-_4l}1rxI$P}VhflB40hXR<&# z0u!R3h+mrJK>_i(@;R~;#cNZUNRJJ)3c|6MeTzmeMmSQzZ26`8%^OLgyCWL zUO#wpKD@W~hIFd`d$rFOvMGW@_+~bmZnLXko%y@s%~4WD-p<^Fpkzjyp9Th^4gCS# zEgEyj8kv|toYBR_nwJN0)$JyS?E!Nd+pj|XZ)~`&>P#;+UenfdP-rC2bjaX@k{oLv zc2@uX*8h9u7-t=Y|88ETqvmy7kM#EnRC5~xrU?x7dbUiq5eb1?`#s!Y(g*~h>5l5k z{w%(XvkIt~ZI2h~$l;?K8`(jDqq1l3?d=WJ;Fa>bHU7PDP`Lh{nu-s6Pe4?v8b>@_x*G zL63O2@=Rqm_d!@{$r`cdi=@|h`|&<6=j}tQGyuG-Vlan?Cs&|m6$`{Ah*HS)i<6R~ zQ^@v?-vFB1!{Zgfq{WeV>G{$Xf@zz zr@$Y84FZ>|Wznr3IDe*#^=H}pU{(D7o7A*3hYT?xq0dB~2$RR1jkf2t68LU>Iy(43 zw95xvY@?sYV4yYE+H`gU0mk+#05RbmDP(SJ{Kx0b+b}3*K-`t7VzTMd3Hl0o>+4c|uE8T24s--boWxps4?>3{=;@JI1SqRTVz5T_QTUj|t6kfH#7LA|3 z(`ch9DhtWvzPi9g(ZQhaga;i#tyqp^h_URjUSBr)_Wt2;AmYxAilpP-ziQJf*E_kB z`O+dt5}(ohD>0jl@$o~XVyUwKwp)1?xV+!B&4NkAiphQ$mx@(Vw$3@w4m7i{FR4t9Yf z?;aqS$6lI@rlY)2Jt+}VsO)(T>>WK}y@4}b+ET(j9{xtXhw$SOE)<&#c1~<4R@ zN!4K^oMM! z@4FKS!4%_{c3UdQE5yV(K&5M86CgG)9$EpCXRJ0K<@exV7;q>6g=~}OBP&Ql)9xJ6 zhnh$PytwX<7c6%!8$F={eW0S91{3NZCvpWq5K6IFmZItr=%|^3ru4^x9r07$(_`sZ z*|z(!W6NGGDOoE}&?iYhUl7-S4KN~nd;4@=d;EcdN)d6#K2Odbl z0?DImo&^hN#J%9WwmGgln%eKn0qveF)O2z3@|e>d9bd%ZV&dY!=&mn?(~&BfPK)Zo zHH+61kVvNR`P@c29F+-zgM&qKHv9X#{FNT4ir9xoMuxO@Ioa5faoDWTIWdrt&otlY zf$J!{r&=JN_=6d}OGgI*l>Fk@ZKKF5?Ov&tk#?PRxiH#FLekvJ@B$wg*K`efr?Z6L z=W)XrbUeiofB^;zQ#`2Sm~@}0SYPk?O8}uxY?J+FjF=Q%H0}*_HCA5xeJFjqD*@W3 z$zkK&j1A1vkM<>uUF%XXXRq+30h3{e<||o4ex+;P5bzE+ycS zV4Ef?Awj~(coB;C42spY!$XZ?rWaExND+~~T{k-F(yq6pNUJJ3+pVHKfBwWJC*KO$ zp4T=ufBLY7j)(VL@rVrK4frawIr&o(GltrVG0XI;>`Fqrev8(A6?1I%JkU2JMRnz) zuW0+5$%4Zg2c!DU3i^s3Hknpq6qq^0LcqO~I$+-I(;=6o&OTuZ?+j*OVBLzJIGrez z!fVI8|7cQSo+R~-PDl8#9&c~H%|3K?WU+$ML^|uU$cIOyqrRTsm~gV9Xl8Z}Ka2uxo<0C^h?_+HDjA(cRYB_L38Jin#4JZx-iB%a0Rc0ne|GkIOc z`1nz2kDZoQ>J&UC=uyx)ZhuL9dJpOYYCCD2E}^@_A<*)7z9)e_U5;G4DuFq?hPg70 zObq;C`UX#-Qn%K*qI_f}*1L|5HN3^50Q3H8m}@xvRJzDc40+ zdchHHhYKJf$>>GA`pB06)B4!TMHEBG`A{~9XV|XV2;3F78YC>Nd5V}yhA$hN^q8z3aNMx=0BbkLxAf$!{PS@#H2inCYIVTr~o-Uyu z;WNhYh-BSsRA{5PbRj&lpKLO~Xs@51%Cxy%hiTE>W!gO#SytFM3lPen{o;VW-%($xrWBoY$AjO6Vj-A)xggx!UL%U`Qi&|rZ1@>Zk~ z%PrhosWd?qPfBv6M7D}ba~$duMSs=ax)O!~6GP3jPVrPF9?Q>W8(QYDxjh@Nl)4No zw3SamuHwM91Ptr`;!FJBSQ%t$kc7gIG~94|N0eP$T(T7poW?<$(<;iH{#x#{TbC?f zA{0k@?2bUl#ij8-We@0qMec2PpsQ=_VzieBh(@wqIw-5x~Wm`e#>oqHt!AXDI%h$uZY2%ZdH-`FAfZTaFgJQMYC6L z=@=;zcGSB^%_cllE#RzxH`s$1dg3XECq3y~B4QKaBWY*I5s}+vGxqFoYjkr>_9)6V z#`7pOOX}I#*{X6m_d}%}o9;m-pm!eW6nrk`{Gjo^Z|coQYTl4F+`_*3?{8kI;KlW$ z!GdojtL>V~Digai z1Klxm#GM{KF9EW5Te-!4SGtL|i2H1Ba>S%c&2}F-GiXCz6&<5j;eQUE`QIG03*Um<8yS)zwsSizlG?FMj_w zAh`zIW`0u>6BWb4gM*n}_rBnupmuQc|1BY)^g{Cnb>wsQkKo1O(OL#Y#YyyF@z%9V zup!K`%C%_jr5y$IiS1sz!PbkqPnUq{5l8QypVlJxesj~XZ$Dj!m zx2O|T7mcPs13Sdr>54x zEJy$wxoj{s3>*bbH!3FP-LL=;sEB~t;}@erbbSK@iGuX%fJmu{^a4;*xgBKPg7L=4 zNZEK(cQBGY=qigI?o(1QS*=$z_tIlyQQF(v|6gy?UO}Chf}dbC*K|ty(#{)kXNXx< zWoy*+JKpOl_J$U6uGv*#V490Qcu6wRX*D3lXNHz9`l$R|gOHb@s%MCag=LbiRJw&A zSRnetPg#oo;X$EM8C`uNvvMT3pZ6MZS#GD%Yx`dqna4j5jSdZ=K~sL?bUIQsgwr!M zr2sYZE)Y(9UEl?iG$0?GJ`hh{1A5-RpuJfY4x@KK$Qhgf_zW(`1N3Bi9lDzbFwqq7 zH6RtsH5tqNR<68z~qWy*6EQZ8N9)ZgWi1?dpKG zwH!A#BjZE0F}?&rDO*NHdW;bZs<{5ilJQigy@Q2{485aP+cl{e9E19goufV34I`>p zSqV4%A8tSTZ?%Z(B`_>3Ks}7x4=&#O3>l#J3CwPQRMW=pZg5%}4p8x@NE8Q!AL!h7 z!4rVVN1kM&Lef8TIbaW$82c!VDA^v&A`fOOqJc);=g*%Fjf?;jr5m%WHjl=ElOt zPK?#fhMb?{NY_sfN46z)Wkpe2oUBal&QNGnzYxH{aJAvwJf4z+F9uS{92@@kNE>5f z=`3Z=Bx%`)1o471bFJHwI?uznQ^i?0(}6-Mjac8xvoG?Ss8`Bkwf$U6q0PbAEFUei zZPT>L)HA7Q)3^;uv4)370`n5{PvI33IF)`X>h;GE(P%X%L_~Bw+AC3g6p6y6RM}4W zcrab^kFp!;YAEI{Dhf+PB!fYDpQxe@hSC|}U}JC+eEKA;sye5z47~HOfzGK$ORms= z(+cjK247cD>WqX}2XM}a7sQbbB~7*2iws{T69TkVrndFf`brx`8}{{PbP$8tBq>h(~>Nu)h_KOf6lXWp6X6SUG5L z9&$SAF}LG)b(<;h&Kf0CUpcpDtJUuvoV*C!7lnh0&Qq^5M*v-(we{()_of6Yu)%h5 zb4zmLFnR;_#L!6i5kQOB`+(2k@NXoj`MGD^i_NAA5wMwym*_wK8!WcA?tKp$pO^^M z&|BX!EQpUO?pjz-zdcaS(S6=2L%{#yV+3e9LpHjvRzTVKPgme&*j5`QT>Ik*R}wTG z2{qqO^E~J`!|ILjr{*VQ^4iME(3E1SJScGjp*j8h#R|Fl1VX$yu$&-S@9mq{yf_VrE3QA`ziAbZvew?w{Lx+P?8`xxMG|o&RZu9x8(72+;-==^y@Mzt^F{D zPUY$EH{gbHIIbhW$u;=IeGK65OoMGhgBDd)O}Usqixyw1ZxAF9x&X-WPeU1wqT=Sz zuehzm26q{*o%Fzf6Dcn1L5Pft%Tmdz2U zqWmOYnhuOuCc$2UaVF{0r%w(C)1iO|i~8~7H#<8*~%*Z@f~ozxU)dCuK4jC1gsa#;UapaFBP%&7pnu7H{cq(yibcC4Wvm&Oze{h_6 zlz_j=#;GNqVjBn7hrgC>%X@|tsS=-R4A_pf%_7yI3rN8=pyS14^$Q5-b1u&!OWkU; z)nRmA@j#7>J)ATKO_KBcB;dgAgFyguje>5 zErVacFoe~|zX~=eUxUARl!R967w&(w0ofp^9D$tQ1D*UhMvAevBG!orzR3iq_0GHbG+8Ht`fp*-|6u`){^&5tmMHc!y3GFSYi8-3E>n=Ob_@^o zLN?ck3&aFAzB0?C+*cl*U*16cY}DA=Hry+CGE?}xu`Y+gVvbHR-{#1nK>o@SyN4Ew(* zN}_+YZm^7$&ath~-S;-7$=|m8}TlXfq+)L-tCXT`;&C|}Z)>u`IPWBZzcejWQ(cC~DrtGpa+&ZG(b=BPCz$Wvs2< zVEO0n)ib;%o`lg=s3(Z&CXL~V{Q<}CFD@aQ%JcB8VyO{;l|?ZSMCcwjtM!-^5a<0F znMpkT<1Ljqi0@Xi7*NvLBfJ8cZOcPI^;nONSR_dEfhjaX3N7e=UxX7>PFovKo%y|K z^laIpXjrCy4pfDjMBmMElaAomPh7e zF7Iv2mls=Zov8a*?;A~*1NUQImr#bhc7x-iEVzYwgqRpP17Lda^zL$h*4t<0!13|Q zEpC$ex@27;P1unN4xqe}rH@dB|9gfr#XT0YjPOJM`I@~wSk(9_(3?zZ+y?~(SuWK3 zuT+y({`_{Ao?r?2hNZz*j(-Ar7$_;|JdIb_3=K&9)??r zXh?qHs)ZX!EslvJHA`9%(+`s^x{(s4y74gGGqSt<{Z&Q{6>x03o?Pt#0);@c&F!69 z61&3-avyz$;aKWNnu@82or?PIrc{{kX|t1b-3g?jZ6f8!wkPl~w!8K{z=LwK^5b88GlBQ$*%iCjr55g8y-CDBS(dx^uc= zHLGK4cIi=_m;$WHUq*8~yl31CfTc&TMMZm83DYhxU9ntjkM#0 zO=J-c67H`~T{^;@R0QIzy{Pf1Z;NANF5b?39a5%+)viPALly15AT@q%^GRc4_N#1G z`N&!Koe)<#_?LD~EH%a$?EnA3<4eO(Id{DBy2K{=&#iSI7-dLR|{b)1fVQHmK6z?V@ zBeQ?J@y|vXL&Z`$JKfqcw0~3L0 z`NrDjuDad_T_RDnsAQY75G?Vu-JRv-C|+K4t^CPVJaPl@5r9OODpwbdOJF`LjR8HQGiW*;5CC=ov?|I09$g9<8ymaY=+R2sY(8C3B(#FcHsJfQ=0R=xeGTg56 z^gKHl^=5Sn={NseOF$@=nF=hH`v=zS$0X!K-JC9wLL$EZA6zX(iyx?heS>6ZEn@>ul$pWRKO+KEO|9=GXE&4q}pCWPevK7q(6G_F?Ign+G5UG{L%3eh3?AclEF zsn+BG4HK0v7)MIQ&cX0p@uQ1^)A*0|Dm=;m2BAm4N~63~s9Lv}OUuV7ymW5c-08?F ztIJR7b*4d01@Gb%zZ`y+Yf~14R4LD~%1AAEi755=4~>W_{91amcgc#M*h3Ru9ugL2 zeEIiyVV?h5&~Ng@DL~SnPNlHFrjv@jrm86Y$7w?0!?@ zev4?XSfTzobH4^IoZa@|my^!scr@>yxj9TYO7GRYwT_GRV8KQdCm1XnM<gLU_H6e0)xoqTrttq(yB`DI4X5%TaamB4uANoIdvHG4GuGIB8{+Pn zXEI@LKEYtLzW~=a+j$J%;Urg#s$5o-;-h}_xcBx+n<=Z+vIuBoKp%PZdS2yK9#Lw{|D|6G+t z^o`4o-?4+~x}dRf;g_iYu4gqpY#OTt|E2PA%mfq=!}=UDmm)gBc)tW2p5xr%+~~N= z3oi!LD?ofr7+yMWNQE2 zg=~cP0ysF-FX^0t=2pfKOOX*9k+bf|D7GrKh?Pdm@?F;2gfeA+mb(0ix_LXtWuamx z$W~t%gCk3m%(-T?+}p*XGkL-_Z;L4`_Bbk7C>NGtw!cNQv?MGgRzAXmu#eMjVD zQXr0iUjvI1`FU$=zs7^n1N|IsfZlO40`mnWkC#AD+snIL%pcl=!59j@N=VTpASkGs z*M{>PD^)3yHjT|_Ct?*|E6*ze=q#;P!qw@x5&-O?c!BA z=n*K;QFS~oX|E6e{wsWy%hiWQw>x4Ji_Rz7+aAw4W01lS^kF&vV=`wCzZdYaCAiI& zh<*d+X~bec1X%Ukt#iAZ8O~>yZmS(SRRq%{ubGO~1T*h&UEJoKve*v0^+_^P(&^tR z72Z8dORal>sOZU~Ba=%3y;B`Ij-|@~G}JRnjYsKqF1K&#ttYRuywiHS3Wj8duqh{Z zuQRXQ@+xdt@#11*JD28}`V-MbeWI;BgCbGI_Wx@%xVfN>cK8tx)S4}^mYW@nr;^IH zAcNUP4VN%>Bp-|}BxWdv{t|3>6V#EAcyH&ZC*5!F!nvqi-toEbVtwkJdAH`AR8FH+ zMI8ot6V!HFUe&YW&Q@=Vie~Wh^Gc0CQYO!rFLe-Z&M6Exo10;E*jCf)Er*gg9K@LJ#!mwJadB2W&tD?rL1kulIfYi*6 zLH9ErbN^tE@%to&I$Nx=E*U92wn~Ne_LYTW67VhCWji_doZY$x&kv3xhp1%Y zU)*mj#hQTDt>E2x%FT8KH(Xff&@(F}F`!820G~Mo9Nvw1+5WeuB2O>ZHjvX;TUB)> zJqy=3rnBw-hGk9QgcJc(P4tdqO%mSyExm9;a4Xt9fZTLl>(&PjA43$ z!PTxqpAJku-%}jW-0AUPI7yNa*%Rj_X;Fe;LB!$TBxtxzbrF}-h}TJM#GWH2~g z5@)mG|Ifw1-@m3-O3h?*aH-#mk~DlGA(Z~x(;11= z`KTvQ0;5E$#VIyv|M~G&t;K;ZOy~_3as77WKC?0Kv|^3x+$J8&iBA1U-K=JuY zn_A^{{7Tx{N)9G*VBkq9q0!;>YzpSf<(_@lhqIv(5yO+lfJ^=1H`uYSz=3tVxw&}~ zE7+76R;Qo7#GQzsS_>#6bIs(6;oaaiNoKf3PG_J)DZkvqe18}vWae%brRo|m!=T=A zAbx|V7?M!WxHes)aCI~zQ9Y@-HQdYf>$Mz-P*5n}VP8I-%RcCHB%b~DlY+?YPSM9l zO0!%jpR`?2H;&bk;RC{tPE7ARv$%a=AA_@`+wMeVd+>Qyn!zXu=Wp z4K~+by_wW&MVjvsu=$khgX>BWSS;z|m1o2S0ayy;Om=r)1U^EDOG-%=FFGOucw-U` zbFvz{J9l(eFCvMEehP}$@rtsBv%PU@CL;+nc1u8xawO8uM@cvX-I(J_r z)<`u0Mz-iXIdt2~sf#`8;00~%i zfgd?$JOQ61sl%$*e5UIIwl~xBtYhgMy(XS+wyVn-)67OWpV|R>x8u1!Ldjy51RgRM zxP>bIF`K26KDmR#!~0>F)6^3ipG>J$NcMu;o8#Tq_>Ok=6wTd9v zoV9=34h)dgI`rP+Ej09=%6-jAE-EU*07TmLo|3726N=cOZFW}E-_3X?cVfombCQoCWYfrU;bCp(WtZ>yB48M$! zf6vbcj%IL9l$toRKGzkD!)c-G4JHpYxp|;@MdO>)hbGk14d~#oO%!Tz4KUiro?>_p z6-K6o!hZM`7~(KnvU&Y-uOx+S4@_Qr~rUb z-!0#ZBE|N7F9s7wDJ(5c z$+kLs;1woa$dX+Y(PgqS#txZSIr=6>IV(}Oh@1_>V> zTdlcewMgM~{po&J+|SQnE)-ZtY9xR;ji zUmj0691jwVcLYQG7o^B$0ZBV1eY|-7dnml}3?r2lh23Kh8EDQ6sHliAHf!nwfc`JFwm7bGE5a~XAE`ii z&32~`XbO~)ir`LzA7W=phqUP=PmUP-SU;!wDtK=`sy)v&8gZTABgy_0+Op$_#KoJhzVz8b*dgA>!n*o#1r!|-x6anP z^YedBU9uJH1RID%Pwv)?T})$nh%t}|N^?99hN6+1PBb&=7Ycqv#~p6EOg3G}M_H`5 zB4V#yBbB_kyv%zoZelLh2wrs7gnRGZIW*Z%yLi3x=Njkg{)lM+YnZHyMBiqi$$y@y zXv1z7L=0lc?-~s|z;6%W?(<0EZ$3(F;cX11*zDg*pnshgd$_NyKofgy3b)cbasXSV z`5JqsU_^YwjezrFWKnV1wavreic^vsJWhej!YGgHYZZVLB-eRxnPrOUP^SAcGvW_} ze=91bLIlyw$vBxv<#xL_xbvNBVkR-^z&@U}Onr0vMBGCjoA}@?By4-I((KXeVER|; z>GR<~tF#tpldJx9%fN}zq|cy(F)gWPgyDV#i97WqJIN>)G@0f zS`dT8L<185x*7C}HMNP93c{DKAUow36`8oME-XB>9pwI0qynh$>RK#pY>2q)@KO0gyi^P(*#ZQZh2TU>$n*@NjW7&y)+OXOWS;peuU+{{3JQZFoio z-ah~}7S=}!90^g;wcc=ye=dGlSO5wD1#S%t=xj|vmGQDWpsiy-eDEk zdsCppgIy5Va*R6ZykNS4U_=P``u36S(k?FSV3A|})CzX?z!;dLo#3M(51?~wZf^%> zWf1@@U4DMPu9cNE@I(3s5dfp#y@Lbdb2yFw2sbx31YG72Uw`N-mjT8of|bZcPv@_s zcdiiLd`N3&u_SX!H3jP3_)k~crU(%cC5lx`BKtC}#Gx2^{7ZJ12B%4{>mJ+0awVYF z%-n@Zt1e6^nUA;LMJO^5DVg)ILM|00VtnE#lF8ah&hjEAqWlqHSjS|l(2UvLH@5zG z+CgpeywdDRF5z_-`);*jV|2TX+YMDx-3=P4jmKs0*cVIVoNE?gDD_#DwT11WcbG!q zU=uy+16(*E3uV;ncaH97iN!|uOFFR%ys6449%Ci0>h0*J1X>ks##YU>flJYV2$w?X z)SliU711$KkplTu>Vw(Rk%%8UU-Y*`#bg810e!tLN%dgK+%ZEG5fZWnAOzM+W4L&DQVtHxUS3`hV4pb!(97bFL?*bSw^P>kH3QC9uf`2pACDiADN4riqFh7#&VmGT=W_dB_rgGtA+3MKPAeH+%34 zyl8u%d@wvbjLB?@49>b&(WSTLvu1}^hQ(~TnYM(Bh{M?5YhC|Jh;)>x2Fu(mMAniK7*nt$iJAFmiB{8MeyA{_txJZ zc&Z5$QZ1~sHpSch`RCnyckb~DOzh4BSi9lD=Qr28zAe;pv9T0!@^`ncWcpTm(uc-T z4tU(r6K@x?i^xZGx1jmT`9#Ls-fA|x_?o=&&8JQ*MxL0OEs$$FxWax$12aR4%4d3= z9Ab^X&weeBx?Y+Wc+Oho$QQ)?$9s?%6-zyH^FO%yUPL%>U{YCre!INM($lpR+xtI6vfu}F98def7f(r*(t`8vY;Ievh`q`sIh{)^s?y4^u~!tcgH@yA?&}xf00uACvKWLW2DxNt)o^$;pd0Vh)I0Nwb`s zoO6pE^HJ#;861Ev!4;@|e|L9wcWwY2ScR_-W-f(<0SyC}*8{VFakrolL@*AoCw6j~ zOv9I_d-aSBKs5#&n(*Xgm8h|_q@+K9bztk{1g5?WDL;N}ryY}i{=72>=b8b+i*kkb zzZYn>+diyLT4k*)^pfb2-^8jv(noAL4R) z_@yNyqLmn(_puT-`P#a19rU&DMQ!_O?4- z`;4j`-(ujuAeqS-4~j>HLd$u`S9gS~YtyYcAA>#_o2OwVZK(SyqeZ%OYfWJ|(tHXr zH~gNdU4G4u2c)MLnC7n|oOM+-HF<9Sh{X=!OtrxzKN}DZuCA``EczJmn{>Kf&(XMf zcwAXTrJ!4D4c}&ag#+C>7G%LGc!c9sImT8bQc8}D%9yX3JXzHS%kt8h&my$WZx}J& znJxc(x7jD9yLYx8_nVhUJIH|7?MieCgkxdw46Bq|R3IgpnC#MgkBW*?A0rAQ2Ashy z0QZ?+i61abpjJTyy5%6ggXA^%>1Prc9e{0q3bWay2%bx5Za-*hn_SK}+_MCMRGuR# zz$MSMdvd*bV_p8P9n9oSh&xPE=zMV4wExEL5F9?$qDf5=Od zd#4hK+oK6KFYg#cWYUFTDxDw1+bro?9T*wk@_v&Ir4o`0WfTk)xBqm#LM4aJkIYDr zZ)EdJ2V)sNWA5gV{R#mgrmdS78tyxriE540+m~q1Mwffem*`ee$tSkInoz$+46ksw zV_&)p$LrNb1n#-qoy^|V>ffEM|C+PKub^IRvVEj`Ba`JT{)0o+AK~ND?b%v;3rtu0 z^BzP8-L%NpwStmoi?2w9@K+t)ed(^v1JEgqlZHP>)-);_6fy$?p(P4MJMMSIgs=bv zFIQ)-1gaFEq|yERH-sFzn3NP5aLWbc*m9i?X+=fFq%nLhCj@u*N8D2-FD(4K|EZ{e z6+MVNAh-pCmH*I?#Np3Lz>5MB52HA@=sQJTE3+u~s%{h-^)!G#HNmb@aa#&#Fxi+SC{86FTLBqatJxtI zpDwFf@D3Fq!Ler>`feH$htn8hO?>%Ng9`|=;r2a#wDQAIOifkSw(aX^=u@5yN89$?;fz5!iJHXH%yZRn;-+an9S{3~_<|wIM|!X;FT)=POxebJixvc}UOx zFEicLS+P%Ib6rxxKyP&+u?W*hgw( zE;?djKY(mAF)IaFspp4l_f3dpP{Yz`HOm54UcJ@wPcsz?^rd#sG=LS`YL@(0EMKEd zyXSH`?8TH-^a4sUC`Z8@CKz0MTabL%f|yoXoTCA3)x?Zi=_fjHB^izL+1zfM0uyx+ z5fQ+V?gV_S^YinGRsvMiuE6&g7e;54u#h0$6awy&V!7r$Uwr*Pa}JRBK+uRNEiEnN z0TL!MGBT;GU=-wO3Z_edHrO@E_=tZCa^A(&zJk-yTx_qe2gjVZLWm&+Qh9^g>Xvm1Q zYki-cjL)xWi2%^aJsphQSDaD@iwwqp?VxYX0&P)-e&6-HGFZ(1B4f5^YA^QaHP*RT zd5ZqQ^SvTa<>5Pb$7xVOSw1RuGhHaxY!U!-O?#fp9dM(ok8yyCJRT4Qco~Dj1ZdQ&qxpEi-K_?{ zkmeLHQ|tjR&2n@LD7d^T`~}C$06;atlp|_8y#V zsXXvu&|ev=^}2$H&BYM_jOBH~=MtWy%dh}#`G4X#INuVFpqw@X%R-v=s z9yF5Nvb#C4&9(1XW%H{qa(DtK<7Te>+*=E8Hea~@wb zPw;!kjz%KsPuEg{ozj7>o?4voho7Kz6yCp35|cZ~6&?JFo|xaqOC*O9Enh$kN@*A5#AAnoRG?g z?mYB<#S+khUknjwz`AdU8!949tVhFnqX-&WVuw&AoZ3iBQCJm{P{a=u%Qq3XJt7vm zrkz}!Orpbu&X#C2SbYjc22A>7kQ$$NO$Y#lyUgL+Nb0=cF5pJi1gcygFWzRz*Rg$H zU=9}qGb(XFLoWZE;)?ZfPX*x0ML;s>R8c$^bn%i|ygWlD9-f{xfn31ymZ{HQ5lo}q zZcj$QWWA=grKxEGR89hV7SPbp%$9R863KLC3s29__D`-&nxmkVVPIw+o;U~X`#%jL zHa0e?G=Ut#FJF*xaMCPUvsND%7#a0@LQoU|$MfsqxkHQGMI`}*>$L7U`K$hTW!9sN`;wCFgGAh^dGjQIbUI_J1P z`~Lk;xt6)GmbGkq+4i!QRxP(|+s0b9ZF||aUB7qt{k-xtXF1M%19OG4R>3^anXh;mQgh2(|+!=pEl3bF4S^tP}nT|0lfdOr1s z;jr(a6aw3B!WzHs4oV^T`E*v^z$xhWW5@g=LZ?y&i?$TtJ&;Nc4weYR?U?Os0D$Uh zzsTm6pxxcw+mSURP+fu7$NBBqhDSOC7$^J@y#2Fr8U?1$ttys1v#A)EnDNx5S65dn z6ik>?4_o4~l(D(n8(@n8l+weT?0@h251heqM8?g{E$Y$)4-ap92;Y)pv(YnmdB52o zZShgh{h+t6F94g-$OPdxjfwMZ!e%Xg5MWwJS2I-OJ?u9tW(9Ei?4!3M8A+v&L~ist zagQ2z-XrWTv=VF$D(U_^&8)v}W_TMC{OR5Pr!0_V;=*{)#m2d^-O)V=)FWZ}Y4HK# zp0`~;o0hD~cApW#4z&d102Xe>yimL^M1nLmA zgua=XmtlWJAN-~eNx`qoZ^p7NPz($)DfLeQ72AH$*sxh0-T^A)0q$#}s*2l5A^R)* z4zkwdMareCZS8JM=JN^6U@r=mJeBlvUMWy|S%RY!&O4VgBQ|yL=)l=Cf z$fn3EZ%P`_&I{F5?88ko1?S4uee+p4ANmOZ}78%63_#?UI@C@|VdN^YvEtME}! z^;Gs5I9C<;6*gh9EK zZtT~AMLMFJ5FQ4mUrDWLSq9ATD3$(Pw(ku0_4P$HK~3rXK4H9d06KGUZzauwhZ58= z7A{4wl=Ij1V5R5AB$@+YN8mOz=)b*pKI-twNC$3|okYe20%!6+vtnR7P%$@qAXY;h zR3^w~JfbO`&L-Jb8PAFiFp}6;2U9tsC;YDT=9IRm(OgFg8$D8B!v+JwaP-$s9-!+T zJug(OkO)47zgfJ3B|ObX*5n%ht>f{Ws64!niQ~k1v^<)o#c9*(k{V%sH8fcCwaz#2 z?e#}{3M~*CHvMO^LQp1Yswn7lbY^~O_~+dW#UO(MA!^BBqAwMEks)DW@cfEVaY}W5Uez#4 zhX>!aI)Z{6t`fQ2iF>*cBKP{nCK-j$3=9kk=wd$+M#GS!Oj8weFd!*m=j7)`M#OY9 zEX4;R)(NTZ!U+fn{<>{Tt&8!vF^7PJs`rw zWm^@?Z13#E8!D-TEHQJ0&fD8ti!GvML&#?(No|*s7WZYu)Vjd1u#pL60JJTBAY6$B zeYyb^|5=Oh-)fRNXgD}3x{rF|;;^6u1N{pM0fCIBCN8*Vy1InWV1;Yo<4dK#yMdX+ zRUr`S$A!cKCr8q^;H*FCUU~jmuBkwlkBW(z@06O9M9Hlf)F4>*aEk7(!xh&1c1`Vj ziseC--hQ7pJOMkT1l*+SQFeJbeGx|Z+n%x?!<387k8BPmw3%y>LfsUBb+&Oc!=tvl zW0|Z*yOhvEqP?rfR{?jGJ%BtMdGs_8OF5CLfC=t&KH*QCe(?M88+LHeYMu6>(=P9B zp6Gc7ZA8P<+7=eTM%w^X>^|}FwcB|pI2e9|#S#|k%|6mZFTE-JtBZH2OS`5Z?L)b%s`n_y!$6v_8@Q6>HkiMMu z@kW%ks&yqs_x`JVK_U@X%`VlU3!d)n>8a40Wv`)050K%rh`TZ1U;sKKe&F#Z*B%PE z4_z3`v>Z|4+3Kzw-B(xbpw;;Z@f<4hue>ubV`udA>S}EwH<$}izBGpm5q@25^+y=~ z{O~+_=w$YTt9szkZA^KWhZS$bQ5a1aEc}^GZ*>qRsmN6| zG>mPaqobpNR%Y^V6gtEc8YDN`^g#~bp{U_@JUaX(@joS6zaS3rc44V7x+3uL>ozsI zJ5a1O>^RiHnyN*`*oGLzPT;cVC^X%3t+eKuBn@5LExa)E|K{ob^e|atXg6au|CsKp zeu0zU>_69JgI-&z^@W7U9QlZaCW+BwO=Zez4Ddwk;Nf|+%~n8LHrD8f_@?FdJLv}I z2~npbYwyTX-%slHr5y-FjE`Huo))jD}d^a$-OrP7}i9sgvGyLcp1)MB~&rt}s+AASN zDrI>(tX7S5IgY664mjfJEl*hNb+wlU_dcdzV2a@n3azNsRo#JDs`#USU<35{_*hXj z4InoMPSw6L&Dwp>ZvDfFw=pUVmn~gGLxk1a{RNLQ=6;^NMI8Mv2ZXzA*jISL0S1V@{LiC-_K*_kQI-o zunKkjI0%^cZrlU0w4(!34<_~I?=@=K*S3y)J^Q+;X0aw(;ieniI%{-9qE|E~&|$=e z5}8wrlWQX)*-g-82OGsE6sZ4Y=&Js^(!pVMhAP? zzf1cEd-8u2DLx)Q1z*t|R5)B{<1Z?3t|Ld%168evtG{qd_qt^La`1M0K(w1Z!U*zV z%GyxEWiN~Vl<#;xAgw{&vs5C^*``$ohPloI)*%y!V;L;c>N}#K-T(llVYE(#`r?#z+^T{ z8~h%}O{HvZKLH5sJ+lMVyQTXOgQo$4GCJ1$I-6%Q8|;}%!m)_A-hOLapW0IPP+ubl zp^+k?S7?&L^BduSw8UWX7G^?WA5~C8s$-$nY7$UTEYw&|7VD$8u9D3Vy{?s-z!s=U zWqTz22<#bU?|7Q_4O%zdJX*uTq-&@<=kK4(fl3Z=%s4fi!s0>A3@D3AU^5j80Dqu} z23t$hL(p}-CnL+}nl5HpaJ5I$!Makwx;mxMB0~}GBXkA((P%QsuE~W@*!$?g4WaYr zgC|s`lmA-AV3#{OQarwD8;m>hmN|7&3Um?$i2F!=Qbxf<@9Jr-*&HCfPeDN~(w)ko zt#=)ukl?>9bOv5 zPe}7mmYa*FVN4h6=)nNY`NW6c>FBWa&q+r?dPjl3%pOPNFMpV1uf=*`<(()Lgu?&j zLX=U$H`c17^AYc{QI^xY^n7?0uKvi?U!55j=)gCZDlDN;Nkc{}dAJZ^^-fQ19xFZY z&R~6ekdJsRaPmh|zLt%w9)Mmq8tx^`?%G|D$vw=d$Hqeby+TCTpmwrVhM*gWQ zPhmc&DYLs-@g-eylcpP+%@yblI=piss5YPOvp}D#wc_=LLb%;s0fS6|6>TkckMqy{ zlzg7zN(W^|)8f}Bn!=#IQ?o=w3Rs)l?%k|_%r5=`%Nc0CTID_YSxPKK+ z7osoEfR;E`lMCXhOFTV4j{R|CeXjqR>}yA=XQKKGpTiHbA%;T1&)effcWxs0pQ6p1 z&m2672gWxHIabCU9Rj&zGx_JbBY(wiDN9O9lHOL62hE{$G!!zJC)L->1sJ|W-F(5}_igbTHn4<;*( z?n~tjhydv3#hWq- zzJHTK0R~ncQJXS`suM8Jh4V+Wf~uC(6VQzp2CVk}xe$vue-2u`=}Ye?61Cp8J?5kX zr-}aYVm+7JJ^b4O)e`uXulGrlE-zgZi#gx=s%*TIG@ES`Ny8A3kRtjG7VGSTBZ)+i zP*A>b*Ymv9o*{OPbxA~hr>(8lx$=qGEXN)MTY+BNz^(?2VEn~%DP>z1rly!Qc8ije zHA)VNs>;gxYTn%e1=S5ld~2&m3&KyG;?~DM?r;nMD?3*q(UQhqTeg?~d3Q~p%}02P zBa*!9V`k9-p2$a#`ck?gAz|Deo{YuYG)tBpVdenTW2;C@9FFg%s$o@1SGrEj9WoWS zN4y_Oil_kiq3dQ`U$MY#l;EW{cFE=WYZSv1$huIgHbc)USRREs1z7lUyWVTfhp@Ib zzMgQr;E(U#$>zl{OE)?&y034_bo>}pPp&XMf>pmv^>*LZ63#?Pptt|B@Ns#l`UDKD zq^MQP_%1%zL>yEunG&o}-+4au&DzZ=hBqLy*CB2e2y)cS@G;QLnra5pQ&K`oOHZKERgiov7)}9u zf#rJXde;bazJsUg4L?nb?XP(t=O+%RVh7$?4wlM%f1?&fj4txDS&eVYI5(JYo%#(; z-k*;2E0E88-I+(YS{aVQ00GTe4A42y*CL*QK-@nv}FI(|d7Ss396&mJL?9%2f*$c+rayk`X z!kbR4Ego*_ZJ)s+`E2VE0BLN^K z*fobG)o`l{axv^B4xiX^FJDGzBa;XbA10Q)`~Orzbs#Q2D;b zPqun^xIf#^kriRNxIjs#vHNRnF}%5jf}E_>z3uJiT64Ejsz09-neHQLJd!p%5L+bsmV*IXZw;ARttsJOLc$GCzI@^4pwoqQ<4G9& zYeS5X%tCKBgPESQ6jv2ph5eJSV4(|$0&|cpU|Tq9vs(au z*PhT%VB7Om!eJ0prm{5_bJV@dbvEw_2-0y=S-H4U0D;E9baHmq!@NIyF%L}E=jH)w zGco(LeRtOc{O?gT6yv>LNJxBvY~5I+-A1KGkPXl* zV1e5n3gkRr6*G+V^|kf(`hzJtKunKYoWOat8K=9&6@zvA(PFE|OR@JI&=Hk^Nw-C7 zy-Yff`kErXEM7w#ws`cr*U9SVIHeW$Ipp&k}$Hc)I z0MVS4ZaQG9TBuoHF*Axr`uYL_vvKLY2{p#n#)bwngkA{y!hjnoNCmVK)Idj~6}U-J zk?!+O6#H+9;y&}tM|XyE1W`r|U=R8sKis1GC=CjX%?RRMC6pSIs?5%QTutAHtF;YZ z7>lL1!TwJRFkPf-%kJiUtvj*%oSq|T>`tGILQS3~AHb`v{|u|2G+STRpcYRWZJy#7VPFCm8>}OiIFrF)@qyUZ zAK>|$>`bPLrixU;!RN4}?g0+SDGVGVE2~oG!(&UZ!eX%aM+eka>ABp1untzIV0;Qz zlo*DCaov4=(0F)w@(Kz-?XwQlhE5n}s-U+A3#?us#H*;{b$?7)@p!@GTx}z_AGc?1mEB=Nzx139u7;ycp$8V#$^72XD4|`vVLBp6!~S#{mnm*KZyc*0Z#v zbzmk5+76Sk4F0rvp!awMg}mx`O-e3>L#Ds>37_KS7mnB~a zf{}5+{g@4A-)wAbvROhffN3g684E(qFHYA4iywGiF|CXBc9CN;?z@>z7u&dO)}NE3 zm5bGx!SaSe`7ai*Hv$X5>e?EAG^y0?a`S_Wn~Gj5c$O0d>kemi>UUg~E}R#;Osh=f zMJFUlfqF%=#TF0Xgj`+GxDep=jxY5d%g&)wHMT8`et`ymvAN109$AskpQZR;x%mwl z%`Bmy|N8pjP>@m)l4tQkt-^E-C0y$rNAC{E-^2x}6M{$|{uclJ=W_blXnQ+q+z3#x zI&aIf@5~2T_%;`Psc;`%pcsut2F}?V2sjZbDMM!)y_L;72L})QfkpsYBPuVC!un&eZvr1wTg6-*$z`>*#%-Q&?-*cI zi{&hAd$zJQ$0?C|IPN35@=!pR#=hpa$Q zg6zq8L$G0c#DpzwPjXl5I|D`pEiY)Ya5%g?OC(npat{Jkh{e4qvP>2nvr9{5Kv%f(kWy1q z1JsT0IXN3|c>F&@GntHeg5J?RW`zd&9gRi}s8DUxZ)CQ70Fu<4#-|v_qQG_*@U;hB z5EKEzy$4jD7c?KFMMeF8zlUm}zdV z`J*$A$#hW@L7A}(gHa#b{sX;6+&G9hSUa$XiY1r-TZddpirC$d<4Zg?^yy$? zC0?OLb8aY@e>I=L!&RTOQN^1J5&`#{Y*_=o<{d-szXmtaI1A6a{&Cco4EaEaEZVJs znAHO(1?JM`CbwJHHV=PFFhD=+Ee)Qw#mrP^)^NW!;!vWBi-5;8nJ6Pg?G5kY6UEFG z?;3rg1QWct4+=u#%F(ay{}(lv(^fE&GdTQL{z|R~Zya+e^U`6lSIqT$sE?b;RxNs~ zZNJsw)OJY0r=Y*w8rDwk_ z1)6xEasxtd9xx}|D$+s&9xE`rdPNkyxIRz)Rf`6T)iKcfA|5+}e89W&LE63XY$)6M z$Kqa4|Bct%83Q%z*~rxnY_R*eI^wGwtT7O%NG34Bcv>zv(i>gU3CT5gOgMH~e6-N{3Ukivu3C{`)v9o!&rG46Vh?f) z40dJtv~*6l4r_gV+2!QMH9=_n_0a+mG$=Sm3nz_DCTlEPUc#!Xm>L}R+rXH3_n!w& zk;g-vSuCvI`)Qk1+>w)q_xzAqut4{3ZC&u(vmqMx;H8!PB9xej=(yEjDfuE)vDN~Y zj)>Fw+*z0s#Y3!-*oVr0w)pBrdKTV~Bfz}^acK3hPM^BU`s zkPwiWN!ZiV6GJZN1s<*E{P!_Bpp-^ILgJH^MS}Hxmni86JO!`c7m$_aqS7}pfDH5l z1Oze~nvjJBH2_nAlL;u#!P5f|6czO~9Ew<%pl<#K6ogDnuO<#KmuD)f1vd})4cuck zLwxVRTM6zv&-h8@a-z9+Kv*?Zkc)IfAj-?|I!S9#g=GESz|XEd1+w58r9T0lCOM_F zB|5dzXu@(pc8bbre_Rq*{y9JjhZUKK=nL`Nmfy?Vjpm2rGkH-75fPt*vdDQ{t|cdb z7#We~<>druaXL;^*`LfPg#-mrBDH1$#n_k6u%Gw)es&SGzO=OU-GD*gcf(JgLKhpZ zvB9MvGvmI##zP!eg&Ho;_Ctt;zeMo)02K^INB6V%yq`#Wx(Y3Cz}YZUC6mG?hZ6Vd za06e;=OhM~&rdFOi#EbYQ)-r!toZ#IKdy*RxS-&ln-GB+Pi}OmFeZij2C%V4dKJ+^waDU_!qu9N91$=frKQK!{0SNFxpj_Y}2{$mLPy3$Df z^KQb$21BY*Z!dn&9;u|VAA8hdsO&jOsd(TuKMw!qDV?i7ibPsQ)W4Z*NtMl=|(WUblv&-9=1W>G}G&xCUAGc?K77=P2*Y`(0m?c=B}HQF2QPhsCJ-=r-y z+H4Idp~3pjzl^3}%jUpp@{|R!SMDHTAjg-wF)@r8_VrOHR+bCpz5}Y;ndcLKi(U}q zW3#)eEhMK!c7CypN3PPN&M?N>KP?;XYnLxyWF&>e9|_6p`s6$RQAqB89z>ybJLpaw zA=~}O;i0#|e9b*H6v~=G%?^g=Qx z)SkNZ)7iECVg5wNESGQF`tyiG$NhrpL1roHf1iuoDQ?Ga9d_i?)30y3g(63J2z-CH zl;i|D>YgWMEIE=v1+^I3hqoX2Q>DH|Lhjs@>RzYJ zf${COdAf%dL;U&Hz}wZAO?Hi!`3xtk+GteKu+%qvIw~0&D}gaoB-8doFxL)B1xgPq z_r*J5C#2_v$;4Qoi2+}MFT|tVqufURWQyjPLieq9s(p(JV8LV1NbMPI@Z%ORnxt{0cUC5hO2+?^YiWP zeM`BkVE#xd#o1f?6doVo^lETC?&|E7VY1B5cNLSDL^77fBO)aYi;9B`Nx3J&lsN|E zICTC9o1@DgA~Kb_(-p%1<~C(kBvj+GEFZ$yj{f<{C$?X#f~8~(uuCB!V;91>6wBsf zp!UKKv2sEq1QU~wr8BB<{n~Z*ix|V~>gkS*OY#F5(nfiCF_ap=L1`*CFpq*qM72L% z)?fRquA#x`d?-|+)%qL$^zt0!Z;P>t|KD}-+CuX4^|L*wVSJOGp8(PafSZLzM4ZH(tZ;fwE=V|W1GYDVGsl(njf;rYRiy9w8?s%!M68iS z@yW7zk`zFt-RotAp?4C|iuwWcKG_npC4V)Warkouk~vKZiX^~z`Jy z*mPsS1q;(Z%7q0mHqEYQT=e5|ySw{k<~tvUZJHZvd*?Yk#Epj1S<{I`Kcb?b37M(m zh|3LC5Xc}+t<5tewmrMKj7?8w%L{+%3B&GQywllE)(XLAPGP2sv%6{G%#shR@|OLN z$5Y^o=%%6gWGTi7=9XXaem2mXuFZNx5(TCMY9tDBP* zR3I@WQLe3>%xQVKks*cklKhaEca*E8NUfv%=zdn46=0ud@ctq`2InW5yJ4!i1ry$(Tv$yX{hJF%oj;| zI$}}}aT1^207LNs=5t)SXP5EG1(lfQFBhKqqZd^&zkiQ*gVHAmJZm$5E>fPEO*hFK z>gobueZS}!xBwVy@l(`*G*G<1el-PhX%R|_i%3mdp9?`r9H=~RqoI^EY?ZB?Ca#0`NX2@45cRL zyMf9qOLX>Y-Et#hRsihoQW1g2^Ru9`afD>KS*GRW{^Q-OO;1?5WZ4Th3K~obXG3!N z{fvd?Gjr_VPI0lKt;qmhyhe_J=fis$#Vja>4TG|JukRkCQi> z0Wk@g-TI=vm!QaoY)_y=@u!_cO(`jzEHUuHn!-WnLV^6RlKMU?Fojbhx#K;a_tH6>Yl=f4?ZuIj>~6KN>=Y*mUCRj z_%VydCMIAEWeh1?yx}4E-=YgGYIwB#u%Yep{X8JcCl^8ZN&PxcRy?z+>gP#w)Z$He z)o&(>sxcpo(VGjoq~y&sm-H07W2ccy+0TPGJ@6?VMOH0*`S?e235umI|itTd(w9qnC=S5%O=<-@#H5x?IQ3ahn);ND_j$#l6F{JN@{1Huq z>&yP8i)Sdn(r8sX?gz6?Plqo>W&cGI&Fj}xpjJ>vE2!;iP>8)^G`_XspPn;O&o2#h z`YBs-CLQ5CFI6PU@VrC~p2f>$2#NGZG>QVs2V~q!eZsM$W6#9EL;(XdV5b76QXB1< zf4Z_qXAkczW5dg@LtH%BZ{qUulfTaHFz0SZAuE@nV`G_lT6lK-zw`TFPOO%ytt3B{KCb)xKn)oM-J*3I>mnu%gj0zPkXKfGjRdi`e&P02o{ zbXOW0U`1%@=~g@%?z~Y?EhXFzg#2?baXlG%q3GR z0u*v$9hqBygsuh#w8}ppwQ9A!v6!d0$|ZWk@eGeQ?D~gCMFj$LJN%4yTG|MbRp`w+ ztZ`Eg4}S8PYf$-xe(KO^oT1xkAsg>ABc>)-h`l%s@n?4ojh%}Dfukw>-u}CB4@y;6 z=H@IaJMM1Im6i|ET8l@gksGS#lQ(ts9LuMTDDt5vOH14dwY-lQv@j89XtDi=suvJ4 zJa@!4foewetmYa}N!1_u^8OfQUEhwiKbdYD&PQQ^mI{OphB1!E^S>jLNQAM|CLPxM zf^K%l5!DzuWXKRyTxh})7155Esc05k;nT&I24tLb+dhQ~q)0P`wF$WYB@8i9`&!S9 zN{pFQIRuZ5T??FF`G6JUSLqQIyS4X(T964+{pl=6{0xkhZ(+zZRMOnmop`h^c^s~5 zOPJiMHnmzbPidrCK<;N>-^llwPDH!AbHmET^yuu&^(|d1t8)hl3Gupo9i1%Np8cMo zC&umaUwiVnxcm7}nNRZ<(>>Wf_J^4ZlK*Pl(ZjhPNY7LxOVKE4s87viG>d)=R+OCF z*TT^|EmWpGCD`p`MT|NB{+M+3A%0r#%E3L9TJL}}%B4rDV?_Rd2dDhyP54lfX^m5R zZzBodO}(D!P`_xy$-mq*X22-j9yiB7wMoLV z&}Di!PtrV^;}*RQ<&ncXwW5G5-E%kDLs!hcD~&EK<#@emGxD1-6F3GmG_*2(fM-eP zF^p?j$efj>X9xMI;Eyv=h1FAc#(7httbDn@B>Jr-t*oM|W4@=x_BY&dnzpJ&VoQps zx;mr>wG*O}FAjEMLkIyg|BW*k{ipC4-uqs0dqX0lW$1kW zzPU?JYF-K+-Q}eQ>(gC&CL9C?I=umlmdEwzPd)I<)Xj|$7tdKVRR~Ak)Pp85C@+uI zLG`pvqVnn`Vp|%9gn#!NLAadmKtKtpxlC@{-PrsBuq@1%X*W_A-H4hA2hpk96m~ft_z)GQM4mKf7XoonN1W%DoM_0v}qlN&nqU!{}$Fas(*5){I+q_WM*hG=Y~DUik#Q= z1us=VBeO6hle20~VtY7MCIp7YSG(BzO8cSuih0jXBsFw%v+x^VvfNbl5p~^OK>K`T z@ulJZQ$}PgIoj!&KS6?_Hr8@0cgb;a_Xmpquxc~b$bFogocb8ZiG3rSo6RKd;nZ*9 zj>;-nEGJ%76J_SJ(r=gw_Ws#n(GKI8POx}$d^jC@&sY{g9pQO;>PAdPmZ;gJjjs|* z?>oWdRtr8PLhs|X)ku9s#UA*uPaXt1u9u|O4%=e7jAf4GI$xK+ndEEtMG5I)KQ{kh z4{yEY38Y{rHNBV*h4=Iv`%2o}>yrZ?r4VH=*wb12siKffRSE)IyU4n7Qk^h7jjh{w zrIhiht3-C9t6z91{J#$sPMCnIrlyy`J6{15`BIrb=xGPb>|GhEPR%cIzD}BA;sSD} zT8U3}PTw(jRc|4W=06a=RgqV?J=_SFS~dz#Qwa`|Agv$`CEx}xFvo+6KxwyIm^kVu z7+M+DN{8G=m#Ur6o0g7+E3@?Z=54q%r)#I^s#%gLjamOU-(i!IE}xZYULdN~y{W0N zSUyArB_|1&#K}Ww-FxN9|D^{4BBx}GxS2ZVYV?c$z4Eh>9H3!x?Nmj*-h2B@9Uy~O zYRXd5Ufgt?5xL0AOh;>htm)Y2Ns0_88V#r9Ep1B+i&JydjG}T!685=GONXPGr+v77 zCfK#pGsd#Ynx1!cFTb%}@6R5t;-sodByneA31)>Snj34R3v@Vn^Fj!|58xpbzss&# z9gOcyzFc1aUbKG0RJC{M)&Cs|;Y2ntx0g%nDoVk4)Mov!=>ZC5j5rwNBmWo1ScicA! zX3gGYWn~2fvdc6&yd^cCoV+m8%qcW@=t}Wtvom3ZAfa@B)I+|$u4TpUTWu69*QoO` z#>ejEgyYc%%S%#oi9d(mqKFl=wSQDNw5VZAAD(r-`Nh_j8zmbfZb#vx9;}5tfaBtE;Lq98az} zl)g|=B}YZcN7qnjnWM)@&i35!(wfVT_q=f#HvjT0h3PKThzE}Rl9HLj*U#BZdWS5lpkwlw723Y+UI z%}GT~&6h2gTx=OCa>+Vb=#$sIy#2?q$v5}zWJ176iIAFxiZQvus6~zc^+i*y2ey^K z6SAw_*w}ah5U{ZA&^`j>b+K1fkZMZ5EqV-fY-|#bPrk%DMSq|`7he*nt*S!1z~T21 zQB9AD`7=Z+SG{c}T94;fX}+LHPJD6e;LUQZqzZc=3B0LbAGAG@E0L3%8yOufC?*C2 zwiQ4nyRo?`RB1E})}|S1l}4agz=eZ5V?&=J0L$|HD#exeq`;(~2WTL-=CohGl2VBA z!qr7bqvGS^v)Z?S4eY?cz-1T!Lo7}uP*GBTsaJn~`|6-gD9edy7Z=Fpcawr|6bEA` z?ayce?KEG1Dv`us7$sCoQAv=XqtEvb%}g}GW^Us!rsiTxas$oMwoeFi!j5gqxnJN^q2JD-}#6(DOS$$dP zX)|Wt=iBV%av--c8cA_d*trL6sKSDRG0x{MRGHNyts5T)kjbJC+{eznX)ed?m6Sg3 ze#=5bTuF=B&_g)Y8(B_vcl%y49eMwuY7&b>^e0`|MwDz4lHb zj(VK)dOesH*!8Z{>pXxfI*HjFOw@zYBq=E=%c}k^hyc@a(r*T6Fj)Vk0IR2|nX+uI zp`if)f5Q3bk2}akz(pfpm{CxGH5Y>6i9w^W|ICBa5?)88W@K6`kh|V@1{b@ZA*p~# zMjexP)bZq%a-~fGy+XvH#f@xt9HX`XcWI8mmj*8tjFH{W)$Ri;=3RWh-+W*7BUI)9 zgVeIJpe{t;c)eO*IBO)h>;=Wp(2!cA<}fd|eIFzxB{As(Q7IPm$B#2E#mYs%G4P(4-P2%>A$65QoZ?8R-fc7I--X$(Xp77tfZh;0WJ9&ozi zyjaXb!v3py{+ycU9T~dgEL!ZBfXexE%@wbsL3iB^o(Tut*GcUgipkWuC9UCv?6?Wj zU-jxBILsAv|DFG70ls2283WfhkkbOR3G>Lv2*{ty95*4Spdh583I@vt=@d5pz{G9d z-x>648~V2vEw)>*#p=^ER8CSsD*)#N5bE|pL_-4uS!HFiCkg54>9^${-en@OxHHh= zX6()j7h|t4l1e4@5fsH|&-CH)*n|wl5I;U?BsFQ}`bJ5d#TN{u?rto-V^x`)sgi7P zWsdvyE=VFsIKK$${5+V=_Dh32d(7~J)x!;Z`^ZdO)&vN^(ADk6o`7aDg@KLzOFXvy zMk9-l;&&(n*GGn*uwj?m>Fz}IZc_knM38=a2@8B-``J#hIgWjo_qeCp+rc~umjcZ7Z-TL zqGI6&h|GM9j``=U`Yn$}V_moKd*>*e+-Jf;I@e3pl%#*jkJ^T6q-11JZ)lC7k2xfN zHkif4$G=8AdvDyVc<{c5gfECqF3!y*0Xqn;r^`vM)3*0Oz7E?CyAJ^#HxPbu`D`v( zvI68skTY#ShIU{$+k^~WInEfj`~Bs>vo4=+9uyuv>q4^*1pz+SW#kn0^l$5131(FM zleG)g8mt_Sutih6Bp)sAl9!;erRI&y%vc8FsIvqV!l(x&u{eV$5G5H+lan0@b_6!v zoAsRvG3De50lM&uSVVj}VnvN^5Iag;&G~hn%FRfFp6zv?vN}bl++bcmbYVnWi!SY1 z;e3J$5HO^^H5GhvGk4p9AinwDXUwnjm~!96gMf9C(JS*A^9_inQ&6we5XRfsh39 zM>TU~e7v!2Pd2dlhvs<%Jd@Mi&1(l_oJFO@x*b{Q&D8XUe(KIRv-{U_?1EMig`ONY z>2ZtX^|-%cR=d5!J6Eg!UOdJ`cJiU5@wC}D@X?4sGLpgyP6n%qa6jJ<^-d5}L7sbv^@G!z zIraHPYR4B{?RcOH`c;dBHv+BnwR#rdRcr@$niFw+z>WCo2t-8%3~5p|HMIgo%9PYp z8#}vBfTq*YSy3n3?Cb9zuXLx36$g+Z`_jR|L2%4&1Me}QHi9z38?4a5-~GBd1?&~T z@G;}^0C=!`>C`VkLIj>;K$AZ^Jq2d<2=F>+t}zqT(<1~D8=w`9NK6z12Q>r)M1xCN zd3ig)hk&{P{1hPnE{rZ(3Eg}>tU{QgyIQxjKEP|zihLi{gw&xr|nFdGDUO(^W_><#dB0A>Vo zS;zh;QK{Eh1Oiy77)xwuH?nra&p@D2o)&-Gz|vCt2Qs_1mKAI|-o(VjfZtTD$m?b{ zuB{-eW}~pBcPDhrgbB!GkwJ73*eL^&U0=nk9vToF@_~=&_U=y4(vpVM7SskW2RS*0 zwvU%eH~tIdj+b*9;f#-C&M47Qav7VGh|8jSM}0$c`uoM9!yc9!!>r4sEHpS{t9!>i zbvF9os?hH>=NfHRxTK#|K-<{F3qdsr16G2SMlT-MF8*;kLufh%tw8RKwY3MeeoCG^ zMqQSz+)pY@6207hqN}c=l;B9d30*7w&dXly*1{w;*3MB6{8jl&fTk4| zD7uY-Tm$5Q?fMPafb|$icmGU8lpS{=5|ZXc2RH`mp+yJ%MBsP?m-Ex2!n6yxF+nCg zyrZKdsZ5%tz6%igK|+j{8qQT1&NlY|Y8^fJS4GJjWYT4(lcdd~uD5Ic)$)rVlj>ii zBQ5a0%vMw5v03wxg@dHQ@p64pT)YEXP?Q(b@bK_BSHFN>1$>^YKc|Ply?&5=G;i8W z1u|BD!V;R$gjLHy)9SSRy_p{wIrg(Blw#&76$N*=$IBBd$l?=WMF%@BHkVI9HX?bY(hr?ntV`7;9)ziu<(e*=zk4pFhdK7{!Ua2$<6(|?J$9i~eHrvfrLrqa) zi$aM4xa?@i74CV+pvg8EnmeL%{O^m`|7@==uAH!0ZL2e2b@c!otGU z%8*6CGTaNGJ5~+uIn;xI5tk6o`&_4d69euv#}zk~ojPVayW3Me3~E(AK_v(a{N3Vs30?1!mcfC$e9#(ppidhlV z0W!@)5;8;j*Im7H9-Ca~==Q;*@;59BMw(_sW>8lvh+4+(z*KU8%#SqPnQR zCe{|qqEwQhN`GTRS_}}s=uUZo zG@aYKo%Dm81n?FIJiTGi5*<|WJss7q$QDg&)>;1sI@ua&gVY;F+tlAp)-2 z!bwL(O7w%7VtwE)t`6fbx?cul(N8}1bE>WL6^3wt$XKjEI=Y_>$@;LAWHOl-+FqU% zG^`^tUR2Sb{GS=;KSw=y3kC<>~s9X6kTdo!iz64;d{l0X7y>2BKBV z6aGol9Y?-&x)9Oeineru(z+l`-RDNuJ zq?-DMw)sc3n!!7)-R=w?>vyVz1eZn?Vf`XzwX1!XsR4xLU;Th^iHs(?G_Cv^YaZ<) z9_9K}9yWW*71xJUwG#>=#^kGH2UAQIzKmAd>yKes8`HL#&YOM6uU1w*KF|JnZh*ji zd44E$*ZFCu4-i5?HT|gzVrz5LdUuQvXw!0Q`LxQvLyU4=z<>P6nuZEeLa(S{Kvv%6 z{&X-9sp^4V0vw<~iJijjqI}{47^@bGb&)^@w@U$AtWlc>oM&|n4L}L|DrW+ctM%_s zfOA{>gM^N*O!-Gx6frUql4olxF9^;9rH;O(WhnJvOl+(iXqv&R7`!S$uI8SbBk-*O zsrStM5L4sEESt;q-*zkr)k-6TQSO_R017l{-_zEf@ZkT)(^rON z)ivRY3J8dRgfvQ*Al)D>T?*1AEg?v^fQpoWAV`CBmo!LAcXxMppW*$^IsR0yYd<^I zS~GK(;EG5!=Sz*u*U3uR=+s(v20*9W$IX85ekR5*p-w@=h5oU*1Ict_cjJWmdDjF4 zQ3hKHxEc<%B&?7Th&Vg*00-P?s;FrEcOt*R_{Q`wQE z_C}P}xwV<-=Q`aC?EI4o5Kr+AS4!lr!wS-f!I40C{pLBFv0)OwHCe$1)1j68y6b*EQY@^bvKM4rR>ZfbX#$)HNnlUQs8RAtL0ty8emp#5 zrnaHYgI!$$`_IR% z@d%Z4`9cFf598Z>05+T1mo-g%K*bzTF9iBGI!*)3ekNZSZyCdEfMZqdP2Ew=$+X2& zHK3ux3l7sq^#*9o!FnqAZ@CL&dU3;A?$}K%hvATh7g4 z_G09C)2_$PW`=ayZH2<+i|?`*VE`gVa;xWCs*2AMoo&_gI? zuBWrN!E-(8URqgc{Vf%4URq3sgVQ8%GMVD|9y~>nP%Y)ayphGEn;0eo`iT#9s_=8^ zR19EOh!XOw^=5tR2ce!|cfrc-A#m~%ys=^KD50pRn43!!85tQ!z}E72TvhGlHLP&> zmloKA@?&b)U_J?!5*yalKOzacG@PqOi;M!iH#WdvjX==t6n$xZaj_{wDdqMOZQn}N zsC+9_=c;sa4D82LLx^&$rs|Z4=U@zh&iNv+8DqM8Z*6 zbL*Wwy}iiPlp4%2h3}<375WnJqQ z_YWVeKIQa|tFUKCF)_~1FPR$5cYoNkW#1c%#>r&(PQS$z{OI`O`egKn6v;xnkw@DV zCW-1h&ji>)pt9C_Q;r-AaFgnZOT#9Y>H0f=N~z1Mp1RAb?G>ak1ww=7? zloCtX9})-_32P@LD4<=RtfO64xnmqG3)FwF7kyy*#Ouo!J_mzCDX2S%db8D-eSSZA zv$(i;TVUT(3#XkH&^^wUo*eJYD1f>E7wRV~hUd?_{$9Z>a?id?Nur>NXVZ_ew1o0g-(;)arU=WYQbGc8^X1urS^w{AuJv_7$+z0|*?9$O zW!$>fY>Cy?lP{mK;|V$OwWh;Y1+$m@zAzmUGBPrl2j|prGSa>%@I}3EFI*nTNp8TL z&Strf4}}o4hk>MmffqDE#yfdko0HQvdBt*QYPF~HD&GPEcwCPe*9fBBF1keK7Yw*P zx5jd#MR zyB>|n5gqSzKz>9@&CHAeo(2>7_s!W&+e^ppJ?E%E&)NZBun(tSIX_F?b1GT;M`Z2U zX`QXgS^i;puqo6Jtu9UEIkw$3zY&#q#*n$?FFbXLFiJ`k#vgd2d=&$Eme~^?UE)+r zo0H|=7{J4$t1h(F}IsE@+ddr%vE1lYb%L>guYcZJ~B=ASxmQx`I+=ruXmP z+t}L=6<-5L3=U-3OjX~$p~8sSDx*ZTErn_Lg<%4RiNyGZK4Zt?S1byvEYh^hb{_b~ zAEqj2n$*Jp?6z-~5c14*%?t=YgI)>=^+$R-4RqFCoHFKeJ##=+zqgUe!f?7poB$w! zeWW`$)jssN0I$3u)A}a>lQ7&0Z->*zyrf4jTo)w~>gDl>6=Z$+OIf~kH(0#f87o^dm&<*!EJyV(-`R)MI#SyI z`|Mu{7A~AcBkV$xYzMt0_w+c2EwOv4qWRDvH-^@q4$ z924j1G$f^+*0-<%P>wS8{My=*YiHn>{6O=NONhBGs;Xv2=~xl$a;^$MWhYn@OH=0uhTXF_ZkU$cs|3 z%-8*}+kpEk%pz_Lf=eCh!i=Kf?6-qDI!!iO1Xu#-g5e1X){3rM#tWrcRfj`qX%o5h z8uoNuic=acY}=)YK?5D_?c`9pLd6J=ik6lZ5R9*%KHc?^O?gx2mkEgiz1cHwRaG4( zE-dBcgM}G&AZ#5ypmydF*e)L7G;D6Zx3#lF&+!~uK(JbVG zOMwGd+Mc=}hqD3rFfa~Zl9hbZ9tvJ&(0jicLaAYs|ncVyj^;{PIOJ?xMmdJ zDt~(TaMh73uIeMlIZsd8@#Wr##NEr%RCe-5#3p;mAx<|w7=D;RZATjlJTA%?iVhzf z=1)pJA0!R3^$1=tE=8d2d@?i~TR7c)MXU8|R8jW*d;PN=hUQTnF;ojNrgos`;oQc}~Z_5@en zn+HT9X+TBc86zECQgyY~j|5DpvgIku5`V13$19xH`n-C~Ug*8!aT?7;ax_IiC{1|N z(q3urS^S|9>0OX)ICZdWxplw2&jtxGak=OEYdoe)B(FCNYW*gEUr9Wy)qHS3IBY%L z*lwZxN9cu7SUjTe;e=*NwN|rXNLNx$_1&O{wU*<%eKZLu^@I21<>kTf8w<3baNPgv z>7fBjXqXMy>@T(v282W7wNvtb>@mcZ*Z67JgU|45<`MOo$SloLrokazs*c-U1eAl~ zJ%|O*tnk>`Vd97${6?*S5EU8`IIPyy1B04Jv^C$=P@i;X?X4{@r%hx?$;+qRLNjNO zohIz;TrqtTI7IedcA@y3_ct%R2$5$vlarIr#d-@WK0f<4Q;Nldjx_w1}huU?XDXuYcO^CRQN&quPg z*6htj-+zU6WneX)9^@ZUOTF)nqNQUpzAP`qiTGh&{H>B!z0gXi#&u2TubKl&1E;^8qLwdzXuTy$nkf?pcRI0q7mH10YlTXu*fcc887Jm3$*$-E4b!{-_%4?!T&%^ zLSkJfJuOXHQ86)_F8bH6td(!BDS^?@@IaKsh7JuSc4qA2W7LG)2U_HKm5 zEdi&(Faq9r@i@Fcn!^r{Mm|pLF25EI|55u2_zp-vNGJ8L!Eb~f%RaNuoi8L=Gi18- z`r6Ce2cO@8b7O=tDKXJ&s>)%vPp!jgt77cNtfKa4n+9B?1By^_p_dlOD{qQ^e{hKIk|Nkc=kfg@tbqy>UEE~&1vmIe^I z1=9P#_K^t94K+iUNBU9SI{Q1c&_Y`ba3SXPD{zUFHQbIHQ5Fb(K}{{8MywNQAY3057C9RXqEKqVp{|Q7`mADI!_;7mnMbSLlLy03j_S@F9k;+$}4Rpqr@S z)-nV$7JTqI49EtwmG$dEu2lVh_sPNeXY&A%IaKr6`K6D`0>>|C92rL|`&U;JbCU;%<7eCyQjdRPvq8+@51kKF zH>fK;2_~xr^*BE>6<8LM_q$(z=z zdlo)c?i_$AFTi%(=jcEB#G6tI1l9Y!v0S73Lpd>mGimw8d#eWlANMd&{Qdda4R3*( ztDb-Cto!UI3h`nt8q`HdT5*hTE7rMEnm=XtCfaX>ia*uQ{i+~qbgj~ZNtEzeIc)dH ztkE^$vf;OWoUe@)MT^5yewVaa=lqc_6S*a0mnR&`BR--n)i4;h2icJYA^_h-J)Vw3I{AA9xWV|jz_+SvIz3aaaWe z`9TQ;4n5fc5IsYs z{)7+3aKCb3fle+7lb45QZhbw=aT7dV(RqV=-#UzfMChLQvBaLwo7ZLCY87$AoE`=6 zE}=N~*~}=7h<^EW*bw~(n+bXJ#XnJEfg;_X_Ir7*5(kr-G6Xz{HJFM5tQY=jAcKiG zMhNPRl_b+&+Ob&Qx=irgBVevMB@YfhzSGpIYdl_(2?NH+Psx1D#u+&xjgxA1M(w`O zDAQA)Nj)Z*4E6;UHx}Y91we z4CeSqOut~3a1$?-vH5PLwQjvu;^9L{BeH zwie=Dk@k0vLb-Vazy89D` z(7W>zv&xaykc8WP(Cv@S8VSt+BAHu5R=)16tZb2LTcnZT)wv02+;el*VH%(FBjcHC zpGv>OM3$36mpwj_7iBNOryjyD5MeA^_O1pJi@3f%IX*h#u!-0nFU_uZyg6OK06fyF za%*x@>oI*tFgtZ1?qFW6R~;$fs`*~dqK$9UAkj-Jyu^rc8z= zTYvxxR7UN}!!(0@$dEZZ8qq}$#=w8^cNS9QV(RAwXlTB`DNJOaR2?ev5r87Wt3ws| zk3?~kLF%3`cF<*L=I7&!0iiF<&XbT_Jx7ddbLOF!r8dhJ55PmpJFBRu0DqXY;k*>c zR0m^`JwQT!QB@h+%cug+DgMInh`W(UNg&u}rqTN=PVPFpc#$eEFsP_Q? z6t27VxljB1r;x5MO}MP%1f$D{ytXTS`Eg`ox>@03e!*SR22eP&l6^O%bNbj#vwH zX7}*B*-NS=Q6`qMr7Sbf7e*()aB(j*J8+11a--h+i7kwj>PH2Wc>T3`Ya4U&ANA+I z+eP~FwD~F>cjSTGR#;Y+zjpw5I1JOnpeZL{?1WlK&CvFXe&c5>Z0wY>zo155+Q_dn zhPDc_lXSUc^Z-hLZQBT}hp3CWqwhgs45jK}&L;p^Z(#)}Oo&P!;o{-h0>6yYdU|qp zN@iV@DiSb2++>IMbQzRwlEe%KW$V3gNo!#9~45CA17>R;H524V{u&{>P zE?F(>UP&pRg)4A=6(3l$saG_=OQR~#ze3V++7quisr>lRus&VaZs%{6U*fudSQzX4 z0yB%zKBc^5e4ml!mcxlDp8FA&HXl;d67O<1vBHgxH$Sfh9v1Q=Kz+I0PHDrx*srvv z%D}kwPVPjB84hcW)85@VEUZp9r?lxk*Nc;d4(r+q{ksIhTdz9)>GyPdM)NOuY$2fh z8WlFZAz-{-6G(da3mA0xaTn>p8i1bg7F#hhlLD*&&`?l%0kPa<@E5e`?ZBL{D3Q30 zC4$_5|6tSwpe5WchTByPWJQeIg9TJ~9N)s+oVRl3ZzvSuYyfV)WmpqqS~4(jadcVH6QVG1!AFu=JD%~4Vk z2E+vr`80U#f%q)*Y2(ly8#MT$o!(g4V+q;<#W?0CZomBpAIaz69!MYX}+F zD_Mg_@Oi-B!_^X7E;2XHparF6V#kl{@bKBd&K|$!S z_@E7kpK8L+St1oq{g8lFOm-TI4o)ONjq13lt2(GeRpC>z_cy$gCLdQHBE;Bm{0nF4 zlcveJ4@LnOf9&B$OBXu`2>Ogp+SEQXlKA@9hzc()ZIs-N6~u5y=V+JVp(skda2fXmjKgAs6^Ji*4o`Y^Qvi8jCBL8dfs5ut*48ng-Fk&&tk zX`=2x!F?hvEe!~VPrWOYf!2!t#*|Xzz@Pyb55Ib4Yb#f+DD}?@8VbrB3_y@}hNwLX zo(0{eVQEz5PmGVV+AYiOUE&V**eQMe%f5*FJkjT{==t+4j`1@sUIjad)wTK+D=cHq*gz_Q_ju zeDl7p^U={rb7KZ6FdLVLFgy(APv_o`9b$-6jdRBrU<%cqUfKbtmg+~nOH+4u@NHWP zZ=EpcB#Qyy6$9|>;J~ZR3x?>&TeT4a0@qIFQ>n)gpOgenQ$hP&iu4h(@%$Ctc~)>b zF7!!~<@TW$CFxmOc7bJRSLCW_80B{Dg_Utny!(JVFE@92cl&6U^1zNqap|Es@66FU z2Ir4GGfPWtlM0C2*gIVa8K`!)V@X0=N7R3oV?y4eJ6D#fYs-7iY9hZ)b6s>TAHvue z+e*3=z+A`r%U{;Uj#$9fm(1hTyYA*Z{IK&q4(irDz+upiNw}S+oSxdv^$ms+U&Z{lB%Rg0fYuM3S7(zs!`mIXJ?^n^Uo^*H;Zvtf~6-LyPKQ~uGI%h?Z?H~UXu z=kMExyp*M3;*G79fJj!Y;N)~4{3g@15K$>NNk~>PCnM8uc96G;hUFf#-Mav0e?7Lb&;w5!IRPwmole{B%zANg3-^>>cR*pKZ zONb6Td5LzKLOy8; z>S{e-zL5CA^7DdHj;&>3B_$R*y0*)iD&5D9*AGm(CDur#QA<9cPJ!+`^T~DV*JoE7 zx!OiXQzB@oVZch`lI65XS5#G%x3zuDTzAU9F>IZftQCU}X=b6u@!PT?P^(Hgnb;7Sd7cCc5#CYzR= zd>8suViFRd(?NFGM)1@tMxfg>F$IIM7i`mB5NS3%j0a~p;NvP58y1Zu2S%`G06$C8 z%q+8;ei1z7;e*JJ$yrp*_*TzIOWV7l0+V)ji{V>(|M1wSW8EgL1(j4~u(-gzO;fV6 zVuE@cLR^6pcpK&h;|>w5TNK?PTgva>SD{g`BP!s&?2z2k@Iz{^`=Kq48w@P({J^8K?Y8V{Yckic?U zYUs@k$ETYFFV;$(7+N!EZCFLXCyI&|;B=JR!((ql!SZC`a1o=o)YTDbk}VK%r*6!2 zyQ5GF`*R3>+b$M`dW9{nr`2f7TH{28jg)5g3noGDjy)9KjXAy>*GD61+ou<2SfiOj z81e+(HE|q|AHRC6oGM<4svcR&8fNrOy8UhCren7x>GGu?zO8K$OcZ)O)J7&seeai2 z6)e0=lOt^$WuX90p_Ft7{l9M6mDx6#9GzpTIlk+sJer={HU(Z@Rz6W&7UUQ`zn=S2 z|0f)SA$QtrD+jV!mh6J;q7Ha3q zd#>D-bnzX1L!%(SR47`daipnGr9>iyDLPCC%>mhiKR|U+{vIRhzy2#;Co>Av*xX!Z zpL8cUfkd@88qSP0nTLMt(7>@|d}?yR!W2K=T!Hq1c(N?!umne$&GvBlk&F!5_IjT` z3Wi{_=s=61HO(^G|Ky!O=HOy2M*IGzng8;bjF#!Dt)+|aBdTshKh229S~eP?y10ih zmKu||QZL8X{&|r}tD@e`I4|kcq>S>Yu=%^}zp^9g4fjiniVUVNJbOzP8GkNx+g&#x z&d)QCIBgq~%B`~55WBm6?k9J`TtK!jr+8;|w=+!Hkvy;Lp-~l5EmP$$=oz|dr}W#z2(JI9Rxps1&-+hpE`i<5M-3NFECura zf8z?^`rB^pxkY)c#a;M>d4ulL2)(sEAY8nx&P+){bM9aHK5Ek~eewCrmNV}8C<9^JjAS95$b#p?mufIhgV9YLX9+ILk z;&9{t$bNTKJ$Q3M+RTiHK)piin*!zYy}M&P6*zc!${NZdW@gddtW@_**-gLG(miKM zW|Zb#YObG-@14Cp zw`^CfXu5xhpqSDJI_SD`bnCojo}{$3G56~z2qUN8d*dHIK= z_CnUR*OMeX_{Bx6>gN-xocU+p;#?mQTm_a!P<6|9mzeh54KdDL8>nLV!^sIl$0^S+ zO0$Rt>7Poa8bQ*zY309tMZYM7;giWp+tQ!;A^uP^E_U7y{l z7OAJr1Au59fA>E(lXhB&c|Q8H$!1fL`@&%>ZScM&9S*97O9k7}Tx&FoWth|Y|8$9_ zij!FC5_RYMGMtt|e`|B=RNm&w2HIh}eh$QMHA!&$s+{z)dC} zgaWvEwR{oxN5@AW;}HKX|9}lh&whVL{v?*q3+iQli{Tuju3s}e_VoQ34;#n7V2&S= zPG8im6#n@m1~iMJqRir=$e0)fb#;^?Mio`n?(7@akcBS}m?SE>uceI^w)l?q+MASQ ztF6hex7KIcEw>p{7_NEb zmDrgl8t4FFVZQ<^f6e3r1!K#{XU%2AX4=hCFjy|OK#`NF!vadJ12WGa0&`eR+VUBJ zg@YQ(nt#mwP3*|FiMT%PoyfK1TM~uqbnI+6*wCG0dS0GkKrAU`>hI*2n%5%Fq)wg6 z4F2`jyPdB6ip!ZWu9sZv1v%{{Zu6?a>gU5rDq?Z}fGWCAQGU(`LU#Va)Gj3|W zFjjm5exG|*M4$S{{U^ye5XF}=1_^U=ew1^+UC_ICHYW9(Qiq&5-|H)d(mR?28V0^E zP?Plc51?~O5^rs-4HXkL(sUXu`xt)sh{+Q3Kk2Ej$Xl9j9odBh4xz%$6{^?({~`KM zVZIWPYmOt@)m5{GpYC1(dvVROrn_^~jk4m?N`l9yp}HH1Lex0eryg$;96U|%Jfs#B ztVP#&CfoJTu$ft?$Zd&No^-~u^x$<%DIE^x?2f_;={={LxRHXAg1_zKggC?O_L&z3 zT+i9a{Ir6l-@rIvG5q#b9^ayeB4#jy@Iy4EU&^=xs5(CTJA4Wn{-&v^dxUN`H8wl} z&pnw}s+v3ezwu|KFG79KqBr|O@Z!)t$8vMmhRd2vzUSZXlJc)62`=Auzdn#|@1SF( z?WN;*7I*%_aTsTLXh0$$6SrZvab>K)0i0;BMPI)j>#Y>pW3q=_*E-Py|-CS zkTGZu@(`sqJg3SbfiW2et7o`I;->88?HvqGM-ev*qoEJFb=eQ`3yMEG=wCV6?9--o zIo%IQBd-YjpIgR+*W_0~5x+y|^wM^mkI_rjS78tT<;f8*CN;LUL6G^MdYy}A8UKAS zm}Mw4n^Jb&lD2&O3faQq3=BU%=cr~|oll1F@4tFOiglKPA(T^Q!9>p#n&5g(HP-Ql z!Guz~_Hl*L!7_9;;Y&9)*OhL2^Pq=Pg#G;Hq;6y}(aOgtK{cNBsmxObs(0VtS$wDI zaS`usYYXjYe}kTf@k1?R5d<%g#$q)t==D3?#MGvMU%&ql$*~K2JBA*)^k76g{WZ_B zLQz}LI z!OWFuDDh~wn)Qbf!NiK{4+}!J@>aqrhqnc{Z8bY1Yo&_B%3ar zI_}u6#eGM*{~*b)SJ(wZyYdw?xwcY4?I7^-@i}W zIUZbpn46Ww3?s(Wt4+Z8H8l|v7w7I^?Qzh_-=nA0HKIm))_{o_i8>!Gf!=&_5E)VF zdNZUpVmPTq1} zmM93}LjEgzMH(S4ZsEb-Xi5493k%p{7arkOuD<&>J9Bf-kT3|C{#ebZWR35g78=f% zwuOsF&~wO*KgO(y-@5)Mwh{NvXyvQUR02n4Oax(Hl?g`STu&V-QOuR`^2fe(N9&ZY zRYpYMUytija~+D-AXG+%Z8=f$7&34rtpD6?XytlY4a_`p?o-=L^B$^zC;oF35-8Rf zA$RBP+vdf|IjF?1MWk-;ggo>UbS>YeYg`CtC#kL=1c98?f;I|^$8vIH_v!=1lfzLdI9V*qm#2Y&QjgI*)t{QH=9=2>Fhn?FlX>pdeDx1vDBX$<&%V8IvcirB zA`&}qi3LME%3M!#q}L9d5)#i9hHNdqrF40*QLfrmVF${W}Rv}k>{g`qY~0lEIN(3 zMu4H_b#-;ZIj1>2^*el~+6)`CXP2WBTdlgx*ytN0T%|t=rwwM3!ZF@prkT9^_KaDc zV({N?AVg)I{Vk9}?A5o!R{iPdoM*V_vg{^p@ZS(#Y@(1a%&NW_+2D^zX z&84G6Ax4@lIw9BxD>v_L1?VvakYq9SlQ2xiXT!wN#JkpqWcgKdFXZr)j5TP~2)|a_ ziXRf~7qxK=XT4E-KWcO1LqLV)`|MeLvg6Gh^}utEr+Ca7wDa@WWF4GA{z2FRRrVZ( z+A^5dbq9r)rD6Xqh5e5x4f0##gcK|q7C0QU0?5T1aQ%+IS zM9?=fqZ7ROIF4;rtY#P{R);|08*#2WDC^^B(JFLOHe6Ev=>-pudWrdTCwGl{E^V?^ zT&2N8Jhp;Zm50EF$=ueO(cxbh0e%(siT_#m_J2<}-c1;AC5B4JEpo6Y_azh`rT^N8 zoi50!Bqr1TZK=aIE+Rs4B_PU61>|4;F8-$%z#?>-stNK-w6O#)mW845zdY?h6NN zv$Ib>qZ5#Omj3S(=O6oIGx$}%MadKQYhI1ySKr_13|u24Z0Tr3L4METM6SpTgwqF~Z>WK>!kn&VS&ZzFokPJBT_IzO z7cvcB$f}u%Nv!kzj%+11`E2WR;33mV4Q6X=X5dnJ)+A~?B;!5ww;TWuU!^*IiiHxB zmxo(JqSv|%N*K<;c;oV<0VH0;Ja&8QS;hw zzT4jnCT4m4e}5-G7HLDq>c-*DNKAQ*)ScJaraf6h8?<1epz?fp<&yiSe2vb_MZUXm z^wa|)7_de#xHrDo_I{=mEh70&uifrDkJC9xEp6iXUz~AurYI>AwO)5{5dXZN=x%Bw zvk?$19W|17F+%+k94CR0(>stn(jj|sSPVb9?s}PM{KO6Gj(BQ;bH|LV`w7KYq@KLMu5X|eQB#}KO%ee_`Oi$BXRw`u%*uH4=m2k zygWs8{~fc7dqgz&q^C<>vi zM^>PC{svkl{E6oD>Yrajn9C(1T%8`U727ry-_zlgcwbsF`1P3&vS8_!3kRz*gh;lQ zcUMZMqLMRhSAPHBQ+IwZ;46u-13{H?84u+dkkH+|%~ayUr3DgfmpTe&s563XA(g<8;EHz8t^@UM6{bV>9{Iiku#=>-ONp0`48MnAo-`9sPizSB4hV2>g@Ccnvwi6xVcmqkeLM!Y%pDLsq z4i)(wZcqD5-$R<*ev78GdRPS!_0ukngdZ|dpVI2)@ykT!L$F6H@w8FnpbWNl7C)Y) zxrs^Z23Z%?xu5)fNoq_2_aG^3on*-n(vzdB-#;FJCkwyzmfu45_&1bK{ryK+R|VQn{NsK0KUcMPOe8hU zoQ$b?c%otKjtPF9U~d6}b>O7j1vDz?%NJzm&mp20=!QX{2sbZ17wQ1F@kLmAC;Vl} zw19Hy5D%x)uAY5o>W%(^b(rM;N<< zYlsv@~Y+99pZ9icl=%>NXHAjbK)vLHcmBGiP=ka{9A4+q|ka;(Y}VJPfLzbI{v90 zmq9UK)#&n``NGu!KR){^b$LUBFBtjk|s+c9B{%9Z28!>0ejDuh662t%IQ9fYv?_+rZHv*j2CcgDL= zT33+$xU}cTh_mSH1F>5)+6r?b@~@Bh|8(n~{~a2WpN^<1Fz<3fLNA}1+u(72MpWzW z4M1p-Uj1EP%+4smIIbBNG_Sgw>H4q2?*1~;_h!eerbf%1?fln!vo%ewCk=M{GY07o zMeuEXA1LDBk_+9HkHHH%v+b7tHGINww8aY?H5kXDdWBS>V+&nJl(n9m_w1CI_XS=d zW2De3Laq$x0#d!F{VZMOd($mNi}uSV%B;GA1zhhaGN8Kr70IRMC;tSYNOyJ{dl;N{ zBJ_VRKREmghW_ohy6r`~{E3n;p#tM!2xkLXh~I2oKbP)prE@VVi+O#aV8`EMi!lkI z>#ry(vLj$!em<5P*Yv>)i{A)`;>PSUv+lc4_~~Y+WP-k-LDzCN(XoWh(oT z3b@Q}x!?G>E%TOXChF>*(fW?0OHQfW)=6?U!|2K?TyLZi{)%nB+ zcS^tMJA(4VX&-3f(p*obTeePX5*`zT%Np1sNoXg`^mTQ0EuGAaJpTFP^Cyu~bB?IX zI4tQQ%j^mV?k$l@{NdeOiXf27?LeK2tULqCuI0|?Mwn2LxoEC^2i!UWKNQR;YYt0 zRI!UJOQCWE=scwXnp2U(-uI$+MXa5a3A0s;|Z`@M~ zff(Nyb)YYa#^aG2JacPasowftpR;4EXJn*KKQWjt!ejo2q-zhGIXpCFMX#5~L$l_` z-U1B{Hz(}s37(fsbyXNaF{Q~;1IGO?c7%(UpJ$LL2C%Jk-`m~SOeoerY7kjy%h9O_ z?ES}mr#doWig0fFLk zisR9I^NPkDOkz@*-3puG&KtkRbPQ|~BB^-JF9OKHFFvj+;xX&UW^-C@U+_-+vFi2` z7CpTTV7~LV)Ev>e_KY`PN2h=F+dm-Goh|*;?cF#+6jaF`x=n%tj*6C+RzY#Gs1goH zUO>&T3~Mcx`@<)2NE7L(P4GqpNVy#-)U#JHU>j#{zOF5vDxYo$i6j8DUS0*PkX)Zs zfL((|Ze(YtY*<*>zxMMT;To_OuvzP;`ixHGFmkj8sygv36>tdhc2yc=CH={<1(nDC zYt)9eeA#*1xA=*QRT5M!vcKqen%tFZ-JCW4n0K9{6Wuwr8<4P~rX*}mlujkzHb!C& zA(NX7BJ%pyQd&d6Vk}+QJz{Z~PON(*ReRF(l#lOlHNfW!**6*Yq7w6z$M!Ib9FC`4 z*0|f(x`IWVmZ+*Xtx&9(jXQm@i(TCw+5I4P#*_6k0}W?^<;5f0{ryvqIi%0%Wd3ov zL2VzNF!|ZcO%kNs@w`4mDF106{@IziZ)*V(>a@Rpt(Mks;l(m>+^=udB}`LY=1SAZ zHNNRXyJWpFIpl4$QK~v7+0A^N`f5d^)`3W|wUCeuOw>*v;&E|tL54ZlJfH*B zc5|XE1l%;9R6K(8A`tXaJ6`dF*a&pcAAm(D(xD4sm5@SI-`w21xzY`qE8u3sJ^-d> z^+y{cXvB~}+yEv?1RUl#pxS)?G-Jkfr-ppUto{=s%mJe|T~DSQQ~!lSVT1fvG=UXJ zbS--aYEa!nx<#)O7geG$t~1RxHi#E~ahScE3=Rkq1*0p?InB2Pb7e-C*DJaiib^dA zZhW;05vY0Mf+IpVn@>#-PPBShWRDJVDWmqMDSt>`wH4*c7zdLkb%?H_-}EkhPdrVb zoB15+sJe|7_L3R$7P zCChX{H9NBN)200)n3W_BARW zC*w0Jj@(_7cdd^&nG?+3{-hl&u^f#*F>!m7fq%)xVg6<*Mm?dg?!e}wNC+WrI>!6i zh@UigYc4gDan5$<%%=O2#@BDp(W{P0Ly;U4-kF#b1BdV zr;_%)jU%>&aD!f(Uh^9*`~JU?8!)$@C1ni06M4$S)F z+|9Z;Kj?mv&|i3UB`KZvx-YJ0xjWG$MvxZYHYiW)l{^HW*aZ##Z6|Fl58?ch)ws*3gc@|&h#k0i<%)uuI6^1uk;}`K@yXBD z6M^CNx0l&PDHq>=((OEZ$;|C=39jTf8L!K>ezMiI3`~)f-sL+XnG3PE2uy%2;m3PR zQdGrs9e4Ag!-$r*V#Xeatk)b_MB-$tO3xHi3LKh68Ysx`?B*&*M<>*3FYbqIvN|Q< z;+B{qx{RD>i1qRQ`_>z}G3m>hi#{=?QD3I}By3+~^;N9{-|4ILZW8eKU)_MDuVcM+ z6YGWdYDkJ7*kmNl=S#^WKpN2we))kDU@i#md%}d10+2(Ycd|W&czgmyNK=JIZf>sg z_1VfA$iNEs<)#C`ngCe2l}voFg+YtVfpb-v+7Q5RXkvR=$UZdcRDvIx4#Fz#Q+m+p8-S zikR`bH!XUWKOs>ipJ>B4H#HQnxF=xP>&AG|Kv%J8(f<@qffQVRUTgyN2+o*7& zxipXup>$pEYc{G7<69XWoxb=oZ&8Wa4Z{aFo?GJ_@(H|D=-Z^k{7wabYY92bqq>SA z)%C%{hfPC6L!0&^rtE`RZ?gYhmz0!z8UGB_L*w1qdc+{n*MFOv1I)BTvgzC1b-0; zuQH2n)n{Mv-Ia5W)IZuFAvK*^cWAvH!DZ+*FRec9BMeJXNEV%)UG@6%kWz*5eJ%%k zmL__xjwj!e*mWIW>cqc3?84GvoXpL|-%gLQ4=7mH>&LpPMvq?8xrF&L8wotL!1AG( zm-@yjCvJ0gD6d?E{%Lq7A1PR{Sr^T6XI+|OX8lYP6-CJ;$V_YdNy5I)A&KeRfnPB( z9WvN7uY4{rz_5nL?bOEm3fx6PZOY1CV_6v4^#Wp-& zE~_Sjn`0a1FR7^!)%U3W_Dvt>CA`T|3|5oGwdS>Q`p3g0DV9vDXO`N~U-{00|GA}h zFE@Te(#pG)(6Csotcg;oixs*U>tBT*xSXyd#xVbcWo&$Q8F`e^+>tLIiS$;s^4Q%w zYsp%{uy*MGRQDE6S$AK!Hz`OX-KBsa-6ahoCEeZK-7PKBNFyyJ-3`*+4FWffbaOV( z?|skzaF}tN!5QTK#@>6abzPt9wr%DTsYo}moFwwtW@^tFv_A;3#%T6irIqEaq;Xu) z>f<%dj8k?BS{(YP`GIpH(?+d%e)r7PtqU|CXydT#sBufvi|yJYs^h6V*9T{4eYIN@ z2$xq@oi;YiYYTMM!D~XNTy#3GEoz{k@GKC0VK4!5MqxzmKm-au^k(<_ZA+CpYR~C2v;?sRcDt~6oedlH z>(sg;j@e#1F8M+#K=*_nH&^DKPa2k)thxHR-Iz_AJ!OUL-1V7A_TcXHup_7MDDrvO zd)(Y5mJjhaJ$y@b)$Rhfc2bdk!q+i8Pga@iDeP$l-aG6n-x5^;DsKHE!& z%jYxPWuE-0$=tsg_O&4);SKpItKqyZ`-IOi$bv6teC;;x#*Q{n=Lo7=pdukJJGNsg zj6_#gJLM?ZkR=wql=FTM|N3=Wy#WR4x@B%sls4{DCN8NaY1iK#Zjm5)qI#NGPh8NJ z@<909@vZamcCY_r-YG1X+QIRIZhyyL$uA0dMeN!1z5?*sWAd1c!Rrn;;g1*R zd*CO(0@)de?cduOWuaq#k6sY4>k{g{{#U*VbMN!~EtATYg8}N8G&71wpwj(L7CKr} z);1GYU_E<)T3qcxn5I=XlJUApAz4E`rH%3E$ac4A_|NDEhabi!D~#PhoDe}FqphQ} zH?gI0b<8GarPFSLj-}TY9_B@0;rt9V?=>B5{R4dNTvgjUSCE40IbZ`7is^Y7so>gx zPA!1#0M^;V!DIF~P@;e}>BXxIG{cA#e?~`lz*PgfdM5xJ5pnwP`R$v5_)*bwjC~hR zL!-?oDo0hu$sxm}7?&q@wr#ymwvw45l?MfwTEf~~a;OYdX*oID@O|^V(e6=71PVC1 z+Fsc31~c25MgBn#kJJb?=M%&Z=} z3RK4%VeqE$C+_o6oDvcpKgo(_X2ohXh%0n zsU{hq%55Wkbhg%zu4^MG(x?y5VJllc7OS^gfr~|Y>OJ|w+qAx?flEQoq$cT^)F$Rq zGftg5k;#Rh{yDl#c2AFQUv)O9Z$uMk{^U>tN7~BE!u?&(>gX#;Nt!&27V(wP5eG*H zrq$~8li1|ujphiX%`IE(W?$ZEcS|=69GiLDCmE7O=VLR+VPve;o~0EW_z)^HIVb0# z9m6c94NYu3#!L=dx5L>vVCWM)MGtt-0*7e+p}0@860>D zIAG_bt853ANNkNR7@o~9rlF<(EG&#qOuQW5&z{;ta(@qkR9*FW-T}G^s5wkPY$$-I zl?ug(|93#G3_e`(OX1HDLKM#6h5o%g(i46#3q(ID?uex`mSU}Gz1Pe&wlnFP(4kNS zJUkuIJYR-SA3byQ3sK+kb}^Hb=N7)(F5zUjJhHShiq=c~41dLCrH*AL$pNTAYx@CzszbvVQy3ux<0h2X;6ct+BH0a+m$uY|D2_ zIhyTq9Nyk^IoaP5Glm1GB&B2-by<&(_+l|UIpE&4x4Br|<}en}v>M+2zENRA?D~nq zu%LD=6?Bi6nhIZdvwFrTh-PRHTtVp{eHM+|fsKO0a@zgxCH=7U4YQH~pqF5OB2iG! zcshUlsXCP_j_mETEkg}12z-vl>yM3dH}!TLdnD|5f~|9Sl6ZFUE`D^^eeW3SJxPMx zWGce4A79T$a$ktE;{^nkx=^dh4O6iB&1wn-)j8YgJy)4)*_{ODF7_@c5M~hAJN%}p zOzcEEMmbzptCtNQ^zqqmR%Sej8Sm`O!=2Y72(B$K42|>)r3^p3ke__{%a0s{M9%Q?tHhBnPwxkpvuRDHR$F zeHUnds?Vx|q~KfFKigi5D6;XF=zkD@u?PdWOr=M4G1nq!@B){1xiVbOOJlv_%J<9i zpou39`jqI)#3_~vouO#BHbq@@7k#{GHlD$b`U2+A+76-Kp6OulfNC+hVcCMLrE{W{ zh{y8~j^(xe(@A~tnj3U@>a}2qM1CFl&R@E#LpGU>Qpi^K^%{6^Ys%u?pjJmQocdWa zA`(kJ<*{M8*15W&kP30M4?0`SRf?Garc_#Ah9qXKIj=~C$rd$)<-*SX2^R?7J9IM6BZ1)F(o8oz$uh%TsbyX7yZHjeuwF!Rp< z5d3xZBXSu9H=RJRGcfD{F%6#P35!u5pdV|}E!I_9)B~#m`q4v3{z`-UKT2@k_x?rs z=VhVo{+r{0XtydO22FfOlJew9ws2+&5?@IBS?dKfR<{SYj6)%1axt#nHxo2P*zm#R zZ@H5%q@C^6l=QQI2`*DYJbx*G(%of#h6T>*e1hO!97Ufx*LX+n$a^KYEzG zVwqs+>Bk#xOif9U^Va4Eq|d=QB^oEj%@6ej#_}0=Ko2|m&Nhf_bx%@*Oc0y=Y0$%W z$@>$b_WNDn<^vYfzQ1eW#5P}JfrWrZOsk9nf`8BUODhy#=-y5wK#cC063_#j8^~Yp zfw%@>9#tSh{y>HEi8MEi`906;xV58!krBYXk^n38c%DSXZ`N14r>9rFlQIFAP0o8F z_2oMAt!`uldluFY3}5?XnUo4I-{xLILtJ>3-QcP*{l9#$uWuNQKBwnyd!)7C2}QS5 zec%7!;Y8Lfez)Dy8s*0~U$VEX=~+J#5?#I5lo`>+F``%=D=+SPv><(5uET8)VMaQ= ze+4ht4uAwy7|)uCQeSE*-_H@^4H5JA2s7#+QeRA>iHM@+8H;qVMHBuu?zQx$_gLHF zdHvTu)LxuWVvdk*aPh_W&%!Nu-qyH7FV`?FIm4}Mk-^^<;rlSA`+WGQu z!)u7WePyBaZjATU0(-SP%UKQY@?Zl7AyU;I1|BIKd%T3(L0H8%e*ttSO(Us)7Z*PP z9%>rIg-?syF#aMzK3^1|#TL&2pkctBoicu&Ez{z0Jxp=1$!>0LMj@mEFE+XsMnM5l z0mFMb9+U3$B4x{XG7h(~Gj;Wp-F`KNV$fJL%{+M~w(`?LKqBTzACBvuGM|HKA#Cfv zJpA2f&vuo@fB5F3{=)w7!wFGOcDtXA>2u9Z)x*Cs)oR8{T&U=LSlr*w`kX+{gb+g9 zYW1Vc8nmgBDzpdw7<4aG`w8;4t~=7gg&pemXF;7K?|m|ivc;OOB_nG)-}gb-zAha8 z|Fr;#?iQ2uz`w;}1+YwF65mTlcC_amMng%=W!D5-3D{>14jlR!g1< zrLW)uaxv(gN*_yokkHcd3kp6`pLpW{_v}mZeg1D>aGrp_($LZpoaTK={{qHBY;}2% zHrxT)u_z}_(V<{R+&VarZgkuRb|X*7Wddly70O}k?EGChc9t3r{Siq5Ju~RwUT@Is z8jUos{=<#^YEGq80iB>{r@ihwxYfBG$FSMNH`w&jUv`n(TZsR3Vr(cdb*zpCP)F}G zKaEuEjhA#9Ewpi#e=GrXPA;w4>z~I4OVVJo z?mK9~AwiFFp04&!5ePcGadj2{(aU^WJNI6#mFFNy#%yE}m3ogo;o*5X)lh-PxyDj8 zmRexVxNiw|#Pi65SJ*h0bIlfqm6bKWb4Azy+8C5hWEL5KhJ{4p&+QEPCbe?$U*E(G zI9~JRIuI$4-ny!l&Y%kfmI8S)XnGlg#-v*Yby{+AKS-7UG^O-^JjFWAoG*-+xjD6R zE^wU`F+L(*+zL>@-+uSO5bRpspF=uNu23z#b(tXq{=?q1nNl9-Q;fR@Ia}A&cHY)o zk^@2&Z;Q2B;10aqud>yeN)MWJ9f_t4HlS<#Y?4%4@!SXaGx>r(!*SWZz-^ z4+NWASbqjadItyJ)U(>&%oIuYqcIfe9uPFSe(((*ds_K1(SGmg`EWQAJ6@ZnZ^&+&?Nd zc8n^O1O#m3==xl$JORD|puipC-L26O zeMQUQdZ#Ei9`VR#qn-m$;g?J3P%%t>^D1-f{XAWEG#32+M3JilUD9l>|0yl&WHyEC z%%v_w^Ideu@=3@;9S*v>HJ62U7G|-WjCv z+cnu<0dY zNgq41l)!=kLFsk&);+TiY6&s^tc|fje?+_|R+1haQ~@-3l&V zo&2MZPr&xQZZ5Im#x$7VaS=o*f`}zd#{z;Cw1hAKlD>HQwTM5!9KKM?>+IHSfBONi zKj3A50q0;vg)u;M#hF*jAIy`T$)iOAu-BWryUA)ZdE3>N7hwW0d}{-&J;2?IwBuo7 zc5ZZsSycIRf&4(ra!RE=Sn- z-Z^QBz0~J{w*mq|KYxk=5Gg>#ynVObx$wm{GwJWox8L8`WiW+D4gU(8gYIM2(Q9)+ z1`C4{xtCDB+8X5SV;~&8s3&23v=9K>+gvb?c0mdyTI6$K&|LJ-a z5OM(~pVeeA0VbDOK%jNJsQ6`$_4D6&K~z(-m;L*{tOR>|hy39`faL1`p{&`gU)_~keF@Lz!+sP35uSh!R2^vd5^l+OcA#GaybnAX zx%c@(_UiB^{ye$sHE&JWMU-S<;M@Gc+6>-w;D~o_ig%!Z{VT4V;$HxOd;yVCm^?Yl! z3Jx(qol-RL5ex^xZ9bo$4+0)w(BlF&Ep?SncBd>CJ;a?4V+r_{DUWha- z8ueSO_bgjHsIv_4H zZop0O!_E@AE+tFhsguJgVrdeQBNiGlPbyiPFPRqt%?FO1bD=ev&%3*q{n0d>VfY?z zUbty|ZpZ9WvfmODd%;!-xYp+3HmpEQ0w$+;`wR4pAoU8cN5AGvC#M~v1G+F6eS_-q zAmVw60KA>_ry2ltOUBoHetJrKLHN?1liJNeB}y)yoW61Uv;Ae^4E<}K8Refp z!ybqGJ`djP0+~bu{bX&ixyzhGmN=yo^-5%Y=zfV7Cu7DB<5Cw#^T>J?gMFjrfn6~; zS*)Ii(r>BrQ)+du@CT=0ZB4ZE15)E4Rug?_$*P4$yPfBz41%EknP6%a*FP{#=XPm2 zP2k!b?ZoErLd(=YJZHE84hh8QDEi4;*JIRN@#f5yXIBct)3sfgX{2A84=)V9=#^JNEI<20GU?p5u8w|x6uN>s=O1X^8>2-V89-_N=sH?M_k|9Uxn z&_suEdUJU=Gwt|X3OXfI5ebAg34Yj}9ib6+5E5c>}pbic?^Du-ml%&8FmjewjSt__)~eL2;`) zI`G~=Le*#4eBt*y0O?@FNHGCu8Zf_`0#i5`E7TJcFF@8d0ZS@+%_?};fRNE~qYFO% zvHkgp&txdcM}I;9OkBBr?%2U|C|Wv$d(+~1xgQd77$y$#U4X2q5NsGVAm!wf$Gh&X zE}1&eF}HU~gbjEZB8gPzOj%S_)lmuv#1^G+yOK41YBuHACShiqtI!27P2@@+>wSK& zD>v`+qr-<$`sw0@Z{pIv1)rn$xp}ZJtS^q$@rtB>FxqfCo$*)15_C6~;(hVw5cH9- zHhe{-=53YK1jfX$`bN+=4@;k5*{}xQ7ZZQ<95h}VplbGi|BjLGBTI%s(4FJy_=b1Y z`%~Amop^$*3C>E6n9=fvrd8Zeb6elXcZ^RbhE)!AudxiO42l%yA9e{}p<*4*|K1TC zt2gYXXuDZM)^1FSScGxbgb3E~6}ysDIyqA)*@^a*1s!!P>a45PB3v$sSNV?C|AbBL zpgCy^lr!nps)!NEr(3X3?duBs`=HN*Mp&d?7RTPyT$?D-4PvcQ1iUykGzy^ofU1St z?}>k<*{u-B#^#ss+G5lZUox&1AIt|cO96$>UwPB011WD z8YSJkcfEf-Ycv_aE(0K=KrQh}M&|EkZ=^QBV3cnwgPI8-*pnibeLzmiE`TI}xjcBU z{HxDb5L|;XUUV5?8%;R`=P(}2S`=#P|61K!vB~hOtGe1qjVMAg<{03_>&m8U2x~OhpXtFYih@(}9II`2=;2Wl+b z1lG7t&~_g3>1iVj*0cM+`o85w7klS8FbNHjF%?B5@9zrwupyPK_Hpa)j@xTgr9XcH zLaGJV^C!l(*1xdEt~x3Cy}OF&bW&cepbpA9po~5iq*W?F0eS^9z!L!@30w?mpj3cI zS!Dv#1W{hne!G)tC6N2)Q`4X@AnDHWDSk&@vy-s*uU3>7iXiR;4 z`u1(yUZ}G<3Qs4KOnP@UK%6d5DS5}*dMR?Nv+xUsc4Q1S1tZdDT10V8Zi;SiR?IN3-}OOf?BhXP*WDM?D}uUMb)N5QJi|c2Nz#QM|d!qdfhjhz=`3#SzTh*uk>!zj_)_x87+QK-Vuc9yb|HnvnA$R$WhOK$kq7%c4U z=uwY9g7VUE@~6}<^{PM!1R@gq8V!va;8{LxtAcNh6T9tnO-;oE(O#AH0&DzZRaF)E zUy~?AL$ehH8tkD#HydE4NWrN#Y5(49qbmev%IYK(#H}&4-5ACdAW4IR?AEX-GF{$; zxxE{c)$&JAdp?a+f-|%fObT-J22z|e^=U2^o{UyW#+UxY=c?>9JCapDFZZfJ&!ZKG z>2gSHZqfMu#c<}OPWS;Hj)i(qK0TTRAqiTCH_NMZJb!S%%9U3WYsXuEf?2-upOnoQx_L(~X}+ zbtqlN5H~;Z!;pmhnQ)(Qc%WA|M=ASvpY;QgN9%V7xmoN==G~4$T@-u6{Z9byl}TX^ z0MbT)O3}#u4WPsXVDyhgBj>#yAZ0mME(<85$cvo|)u8=x9+1vPPVPUEC&>m5FpBS9 zt*opR>$Urazj~$Xx%UoC(a6aV!69|}^vjA92aIj(gQO0s(NuXg%HE3NDm+`Y*^;NO zmx)Yqg_)}<-9%15G<@ryJmK!0qKJjprP?_tdHu0lm&NhW0sxI;HWnvcW1*O*zHMT< z{RY?J$dx8f1j;hHmYtE!VRPR1B1FdK$!yNI$>k)x+Gs+6jw^n$(SGlYp;-K&uZ-Yv zztV8VZaotA)@jAgdbRp>KxSU^T=%pTbw4q;Z)#C#PTyYkCE+Y3cRvz``1M;YUX{N3 z2&_&#)PshYEYw1uY}#4PF6j-fuZV2!Rm8t=2Isz6=|u)gznt;H zaf0(g(T#7r$QEkN$S~FHDxDrqpc>{@h_JvL7>W?n1ft>Bd%0P}M8?a=*fHkjq8MuB zvpVwwG_}$Ps_vkP9a9+b`XHymps$UlU$Ng~@MF!RQHWJJ{^pstgv>XSg~=0>QyftX zOJ1GNB^@~*EKax&lhK=E5_*vMmy?jI^GBv_!ece&Fi7S}Y)1v!uf<%-q+S?SwJ1wo z+fqYrc%TM7mDeBVBtHewzXHQ1cALcnz={F=9|>)3Vks#p5GG4-q>?v*Po?1L<+XN= z-5r5L&uY|%Qa!6yrpjhHT_ovvjL-={)zTL}YHDgAa!qO|i9t$Ue)sP30Nju7iHYK> zs$jN>3l`EX2;^&WG9DPHcL2cpSrMC(k}A5fM}Yngln_`aGLK~81Ftre zN|OXLNo}pys))n`D#xY&Zkytj0_+aI(ND2}OwO|XBwAs&paK`b^rm8A2){|t7Gca} z0S-BX`lC|q?OAP{-Z2fD3(pq1Iw%yY^zzk0$r9#GEZgsiKcaJK8$6QwClx*Gq$7fY zM8jbpKarAv=tmF;HxLRogYV6!<6+$e?OdK`?UzzKsD<* zoy*x+CwW|UU>eaW%%-NOk@6T1shId$r{`uAjXn{k!>Hp>;7b~$x8aM+d9r@?MObas z{)fd@p}{>hPhvN1o6Szg=0FSi5RNH{s>Bo0ksn3G@Pi}QCK(#)=Wf?TyYGjm4v{um zrDJ^J_XEr!l)+VR|1qIwH;6(Zi7S18HuL0+dsZAp|E(0YS<|SIj(XSMAYgD|-?E(X zbK`$LwD$sm=Fy!{Wo?~k zKP}@Ug#4Qdr2r(WBaN-P>gsBWSQ*UJ`a`B%qfHc8N?Q|r-pWFD-j^Zer?>gi7%(lY zx3X#M>|*@L$gzKcTX;KznQc#FA_*}YxiP@0-4Z)}4ddqb4J|g z;OkC=dLt5lDx?NHw_Z|rQY0&2+e2wOlLq&7U0kvi>CtG`9Y6~u;daU63M@j#knym- zBTi`7h-cY~d{l)Z63uz=KZ!Zi+AHS!o~|uqDa3 zneIj0+VEVQ$(xfKYJ^XuRz}S=hap*T;xsnZV$n@Gqw(X1IONu-@5tb>dHraQ8dd(~ z)qOJ{px}1_Bw7L`@%5H7X@K8u|Be-jL_ZE!=DvfmVHNY2u%jKP?ed>+EE*I_>PlGA zu=YO#-OY%xQPdn9(QJistlnaKvT`d0veJP}I&aS`u|b5Ec6229-Wj}N?d*J!3S?%M zdvOg7f=TR}aRV`5&1y^8Ui0Z1I90kmD1$r7BpK)vRK;#Y)8NqrrHhQaJ3dciBYTlr zmoYR4d+TJpCdK(neV|;lIlRdiJ_mv+GWIP1R{iOqP`-Bmj^6Su=1*p9@fEOKG91~D} zHm_X{g8x1d0C8v8FOwIzzX&3QZ)~ZP_YZ&s({Q>duFp#0 zOOzi-ii)weG2Dq_>b?C2G0fm%XBECUnCMkkRarx#{J?(ke6-@Bu6+4~aC*(d<`08l zjEM~wH>`t=6H?Pc+hDqrX2{sMU6xg+<;pf?twJwb%_?5TUO#xK%i;9?I{M9s&Rbj) z$rR{N0@IJ}g0E#B*ceOst zZuPD(tI__cg`1~n``)PIbRbdpq|GS#Z}gmQ)m;1C(C_1mcBRm35=OQs4*7yHZCW-x zZ)u5DBB~K0jz<-ng#+7V$b_PzqNC_Rv-93}XS4(5JGaB+rj`n(pKB>JZ>{HL_V^Fs z!=q9us3hwWpiU;Q6WX|#nZJNA4%IzZQxg;EbKJ6_Ij*}-1x58Q9Y8NlnM)L!_M5_h zWJ!&CV(sb3ZJBQdF_@C_gi(_=2JP-_t}mKEWoyZ>TJs$}z4UPmV(Az3D+khi5kxq0 zk|nr-MW#|Udijq}?zdI88Ytd`q8D;$xrpAJTLH`Q3u?f74$uZNZws!Lq0 z`Pr|yTz+EV@J=Id$`k~73?w6?7 z9v05KDtbc`piuyMk=kXK){DQjK#9qPU7|^EGK=pK#}P?OgekfFbM3 zO&cBOk9;RF<=b7Y8f$u~fnt@#q&&%(fl6W6cihaC?8bDtO4wes%E}7TLHh9>eXQo| zKEIzkHt6d47^u|hylXX@)~0kG2syE3r^HmS6|ldS2r1vohHM`1)0JtmC(r4o(ie`t zRkqNPUcIlZ5h#H`5%9Y5BFBFH`qg$th?eqfv9uzhz+!u?vfhShP(XP^C?zwKQKySH zv3<2=Ap^Ai9EWN~Osm6t4I7VxPW2R^pfFXX#DtamO{-TDwk5@!%j`8ap7PApzia& zmjvY}r~o}790&0geM`fY6(R|4V`9PV&wGNbA1tuX_{^9wP-6Z)J&|Ac|DmgYz+FAm z5C`t$6n+pa@ewMkma%3lqF5H_ca-D#ot zm(#0UM+lc^is2&*2qPk85pBo9faAxwZ-DMAp+&EThadhF?;R+90mjB zXUio;43Q#aOjCw&fi9!xDx(>L)Bz8O0F-Ob`~pF*oY`X1{cWLc0II)U(4D)}ar?T0 zyb<&pOiYFRVh)=iL%gv*!QI3oaQ*XI&u; z7#|YtmON<>Df)N~4fTt1zVU$1xtX4LmoeWyR%PNh4n2$Y9}M%fS1Jmp+joiFHExn+ z2ME&Ps79HZfH&p=7b1>po)$_Px5Dy#nY{kDhXx_ee0 zucLRNcyP$Y)uB@h8IQ6>+N{Bx5G92ALmCSyNOLW9@p*8vM7#34W_W7#pn?*bVvoY- zARLqRi-P|Io|<}jayWk|KfPa%?(lbOT-Z?h?1T>M;|5Vg94O6K{Z17s;F@;dMH5Xl zX)s$;Rzl;mI^*LGXQ`FmZ6wyK8TId{elZ|pMztjV8vO$VB?#`?l*V*kuSDv2wbjQq zL}c4N2dG~!(41cW`*rpE2ZvoiMZIlERs|lM5VU_0X&krPm37(PT0&`K%F|&KQr`TL zCRgvEN~ztVNjdoSdW6*T+}#_D_Pin|%sT8p?S1Rc=?**X=@tdZC)?vjI?vmq8Q_7_ zxqaRp_8x(cd=V{93eVJDo}MY zj;TI;<>hmK5z&Vrjn_SLw6ii*U~%kjzE8}4T8VI*K(EGP*@FmTtlPl(Iu4fa-%JyF zPKWP_b^oXii^bg=T=JJY-4=jR8H-)6@rW53MXcvbp!pz=ce-AL>At_W_h=6jCI1b<&aG2=&rj2jrzj63d0)^och*Y?ZNH_oq-MMX+lqemY6uh6~yp?#<{ zuvIRu9=J~Zl==*>k9BBiTk+0UpJ4wj+@T^Onma4a<st1G1s%+Dg40?|FX)OUR z2iz2I*+PEAA)wqQIy_@2GTEUQd)r4}9sQ7Fec=cH5rI za=EfE{nhX>Co6DyuR)x#*uyH*7euaLH?-C=#w;0G+1|WoZ+v>SGB|4(A~+IWas8bZ zdFE^yGD=bKF}ViH*d_aZqYGjb>W$dE5+X!kPcrD1bR-3VQjUB(uM5YS9=BJgfWi!4}WAO1o4Jmo0UKcv*e`Q>&+czldp;mv&?;N&$bnn z!^3w=tu}s-EjRK80kgE`w7mRS3DZf6YR2Gd^K?+e4GDWc<1|3 z(Hk>rxwI&TwSJ@FAeQZR>CaMKb!B=~U=y`oXd%$hSdh8pD$;W2YiU1*YjTbZ9kF~~ z$qBlj;QNh?+ft8f>hzCcsSW_G@@#%^eigjL>L~y(b#RDF&rVrb)g4C+iYlm7ISXX) z!;V)@`f?uEax0zbI#g{I4Ymi>qsRnFU7q?1Yvg`#BO(i~hJO22H_ndQYf6tsDiAtK z!l%D`{8(=-_+z9u<^Ig7p->CXm)EE7DWAmuW2@K3`oH)FrU>nQ8Rr!9sV zy!UF=+l^*fad)e_+cpz>&Xeu(H9%;8N}v7tZbPP|<@T<0vPE2LqSVqO%QB5xA#TOw zIZo+$>S$7!CqI+LfI=|P;gG6N4qUiv>cJKxpqULL%vheyKLpI3d@y-Q|+QYvmeD4!-hb8r} z4h0OgqONz1nf}&`HByR7N|w%KK=>ymA`%!}W3B1=`q!iJ;#x&O%jGPL)Y$O$gzjsg z`J6P`B3lax_)M z$H%|?=bJY_hBkh1Sy5M&s#fCz;Sz)?|7(Ik`}u6G*BZ<4k9&SVuZIijgz9+07Riu9 zAR}XftA_+XwKnBAOkqS3FFw~1ZD;U%u!3ROD=7E#Xs*-Yi#-)hxvECEMjj3PYkicu zWVxdpsBu2-Y*BMQkhMmrR47}jVDtR%Q04I1l;q009S``?L#ir^9%|8S0-60LK8s1X z?&d>rnQ!+KIoulZ1O9X16pe52HdjR~;XN%bV+t%2(->jx%G?=pmE9N`Ka0y7Mrsog zczxJ)oeEOV%f0AK0WZUc9x0CUcD>BUH=H)h{_$p`!q&5Ah40Y`$I+#e%ltu3L*#dG z8hcW#7bpL4{xo@gI^x!EZ$4(KEj!kD?1Cd?9Qz55{m5TqOgfo!ph^hba+cV<#Ptm*WAW&M^&8@R_jY&vU`%aO z{o!2?$zn*BTvqF`K%S9=;Xg0wjh*9B!*@W3_^Rs%g}{)1MZ0?Bl-ZckZv7fL6&1XN zMPtLnE5E8OxZ_x);@bfG$+w3~&3Kf7ud!%0_slXIi-+{8H>^Hr{f5FD%@DEFR1KNi z=Z1i-IEL@oJP@IeKjNw^D5ue@W1#BpywhZxFeifGRS>4r&S>{`-%Gc;G4`u*xKvnrnPtE$Mh&KkS_4A!f-uZ%6drpR&Y z7A(RsZ98RWSHqiMhq4>nk0sR|V)D;#z@Qm>tABLXd$Z@vkM?X&$Z6HPh>CV0-hBG8 z!Ec;7ieQ2aEL&nI!G6$@y@4Qgia1)n=ZYa~<0kI0!J7v&^biGK+INZ~;<1hA2h{Vb z6VhW-efhta1u}HTQn1s_(K*5yk(d4*HQy!$tbjLW=jtm6e}Am0LhUNN8WF=lilPCnqU??^mf0FwwWLjSJ&{~q)K@Ecd z)p~!~nLuNjr?oq4@8)E%0ZIf+*o`#?u6D%d+kGeFpM@Z-5vtde4K9#FOTI(fx~D*1 zEmyjkTy?&UmsswZ zc*B%Jsg~k!5`9Zfi~6l(CaywWcOYFIm&F7_$R!R6fu1N2{FSDf4YXSu^5J2z=Qbbv z3wQ0?6YUEm?UnSLne&jHg3gH%vWe^9u8|pdoxE2YM>v z5}p#XF15%i0cAB;>`=B((BsAaB~!wM2`c3srdad8RMi=%|Nb<~IdqU{NBv=PjA$F! zlqrnQ!A-bDBNrg=gN{oNE&&V1tk|B$?H3jJO92koJ~H^&U&IOO5g=U+Mw{OU1u%RX zoOv+csUFB`M@B&Txv{^tjqafCbbt6)h=~nBA1NaxCQ&{PUNfj(I9awg5Eb^;b2N5KY{X|i-pyJKd7&AeuEI8&$ONz~;v?=|#mA+F%;OBjWn{qrkhNcj zyvMJO*JsApXC_ooHV*7ngpnR#n%mpQ?id(&?Xo``va^Wd`gcRbyu@KP&$lq}A>z#VvS05vB=OH#4VbYz)b;%bi~}3zqx8@<`5>1d zZn%4)7rva4(caB1EJ%w=EVMo}m-h7Ezs`o&@$1{2VLccx&lAOrNb16zJ||9xX>AuZ z3}ygOrAxFnKz~f5LMeFD!kUl3zpQg8@1}xXR1(qTqVr`ekJQ=Eb&6s_ufq}+bm6{e zTL+Gq1|`hej&e|CYBgcGkX-!X()hQRp2PfO + nextgraph-header +

+ +# nextgraph + +![MSRV][rustc-image] +[![Apache 2.0 Licensed][license-image]][license-link] +[![MIT Licensed][license-image2]][license-link2] +[![project chat](https://img.shields.io/badge/zulip-join_chat-brightgreen.svg)](https://forum.nextgraph.org) +[![Crates.io Version](https://img.shields.io/crates/v/nextgraph)](https://crates.io/crates/nextgraph) +[![docs.rs](https://img.shields.io/docsrs/nextgraph)](https://docs.rs/nextgraph) +[node:![NPM Version node](https://img.shields.io/npm/v/nextgraph)](https://www.npmjs.com/package/nextgraph) +[web:![NPM Version web](https://img.shields.io/npm/v/nextgraphweb)](https://www.npmjs.com/package/nextgraphweb) + +Rust client library for NextGraph framework + +This library is in active development at [https://git.nextgraph.org/NextGraph/nextgraph-rs](https://git.nextgraph.org/NextGraph/nextgraph-rs), a Gitea instance. For bug reports, issues, merge requests, and in order to join the dev team, please visit the link above and create an account (you can do so with a github account). The [github repo](https://github.com/nextgraph-org/nextgraph-rs) is just a read-only mirror that does not accept issues. + +## NextGraph + +> NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## Support + +This crate has official documentation at [docs.rs](https://docs.rs/nextgraph/0.1.0/nextgraph/) + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +## Status + +NextGraph is not ready yet. You can subscribe to [our newsletter](https://list.nextgraph.org/subscription/form) to get updates, and support us with a [donation](https://nextgraph.org/donate/). + +## Dependencies + +Nextgraph library is dependent on [async-std](https://async.rs/). You must include it in your `Cargo.toml`. +A tokio-based version (as a feature) might be available in the future. + +```toml +[dependencies] +nextgraph = "0.1.1-alpha.2" +async-std = "1.12.0" +``` + +## Examples + +You can find some examples on how to use the library: + +- [in_memory](https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/nextgraph/examples) +- [persistent](https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/nextgraph/examples) + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +### Contributions license + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as below, without any +additional terms or conditions. + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. + + +[rustc-image]: https://img.shields.io/badge/rustc-1.81+-blue.svg +[license-image]: https://img.shields.io/badge/license-Apache2.0-blue.svg +[license-link]: https://git.nextgraph.org/NextGraph/nextgraph-rs/raw/branch/master/LICENSE-APACHE2 +[license-image2]: https://img.shields.io/badge/license-MIT-blue.svg +[license-link2]: https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/LICENSE-MIT diff --git a/nextgraph/examples/README.md b/nextgraph/examples/README.md new file mode 100644 index 0000000..fd2a88c --- /dev/null +++ b/nextgraph/examples/README.md @@ -0,0 +1,17 @@ +# Examples + +Some examples of using `nextgraph` client library + +run them with: + +``` +cargo run -p nextgraph --example in_memory +cargo run -p nextgraph --example persistent +cargo run -p nextgraph --example open +``` + +See the code: + +- [in_memory](in_memory.md) +- [persistent](persistent.md) +- [open](open.md) diff --git a/nextgraph/examples/in_memory.md b/nextgraph/examples/in_memory.md new file mode 100644 index 0000000..900cad6 --- /dev/null +++ b/nextgraph/examples/in_memory.md @@ -0,0 +1,13 @@ +# in-memory LocalBroker + +Example of LocalBroker configured with in-memory (no persistence). + +run with: + +``` +cargo run -p nextgraph -r --example in_memory +``` + +we assume that you run this command from the root of the git repo (nextgraph-rs) + +the `-r` for release version is important, without it, the creation and opening of the wallet will take ages. diff --git a/nextgraph/examples/in_memory.rs b/nextgraph/examples/in_memory.rs new file mode 100644 index 0000000..6a61b83 --- /dev/null +++ b/nextgraph/examples/in_memory.rs @@ -0,0 +1,178 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use std::fs::read; + +#[allow(unused_imports)] +use nextgraph::local_broker::{ + app_request, app_request_stream, init_local_broker, session_start, session_stop, user_connect, + user_disconnect, wallet_close, wallet_create_v0, wallet_get, wallet_get_file, wallet_import, + wallet_open_with_pazzle_words, wallet_read_file, wallet_was_opened, LocalBrokerConfig, + SessionConfig, +}; +use nextgraph::net::types::BootstrapContentV0; +use nextgraph::repo::errors::NgError; +use nextgraph::repo::types::PubKey; +use nextgraph::wallet::types::CreateWalletV0; +use nextgraph::wallet::{display_mnemonic, emojis::display_pazzle}; + +#[async_std::main] +async fn main() -> std::io::Result<()> { + // initialize the local_broker with in-memory config. + // all sessions will be lost when the program exits + init_local_broker(Box::new(|| LocalBrokerConfig::InMemory)).await; + + // load some image that will be used as security_img + // we assume here for the sake of this example, + // that the current directory contains this demo image file + let security_img = read("nextgraph/examples/wallet-security-image-demo.png")?; + + // the peer_id should come from somewhere else. + // this is just given for the sake of an example + #[allow(deprecated)] + let peer_id_of_server_broker = PubKey::nil(); + + // Create your wallet + // this will take some time ! + println!("Creating the wallet. this will take some time..."); + + let wallet_result = wallet_create_v0(CreateWalletV0 { + security_img, + security_txt: "know yourself".to_string(), + pin: [1, 2, 1, 2], + pazzle_length: 9, + send_bootstrap: false, + send_wallet: false, + result_with_wallet_file: true, + local_save: false, + // we default to localhost:14400. this is just for the sake of an example + core_bootstrap: BootstrapContentV0::new_localhost(peer_id_of_server_broker), + core_registration: None, + additional_bootstrap: None, + pdf: false, + device_name: "test".to_string(), + }) + .await?; + + println!("Your wallet name is : {}", wallet_result.wallet_name); + + let pazzle = display_pazzle(&wallet_result.pazzle); + let mut pazzle_words = vec![]; + println!("Your pazzle is: {:?}", wallet_result.pazzle); + for emoji in pazzle { + println!( + "\t{}:\t{}{}", + emoji.0, + if emoji.0.len() > 12 { "" } else { "\t" }, + emoji.1 + ); + pazzle_words.push(emoji.1.to_string()); + } + println!("Your mnemonic is:"); + display_mnemonic(&wallet_result.mnemonic) + .iter() + .for_each(|word| print!("{} ", word.as_str())); + println!(""); + + // A session has been opened for you and you can directly use it without the need to call [wallet_was_opened] nor [session_start]. + let user_id = wallet_result.personal_identity(); + + // if the user has internet access, they can now decide to connect to its Server Broker, in order to sync data + let status = user_connect(&user_id).await?; + + // The connection cannot succeed because we miss-configured the core_bootstrap of the wallet. its Peer ID is invalid. + let error_reason = status[0].3.as_ref().unwrap(); + assert!(error_reason == "NoiseHandshakeFailed" || error_reason == "ConnectionError"); + + // a session ID has been assigned to you in `wallet_result.session_id` you can use it to fetch a document + //let _ = doc_fetch(wallet_result.session_id, "ng:example".to_string(), None).await?; + + // Then we should disconnect + user_disconnect(&user_id).await?; + + // if you need the Wallet File again (if you didn't select `result_with_wallet_file` by example), you can retrieve it with: + let wallet_file = wallet_get_file(&wallet_result.wallet_name).await?; + + // if you did ask for `result_with_wallet_file`, as we did above, then the 2 vectors should be identical + assert_eq!(wallet_file, wallet_result.wallet_file); + + // stop the session + session_stop(&user_id).await?; + + // closes the wallet + wallet_close(&wallet_result.wallet_name).await?; + + // if you have saved the wallet locally (which we haven't done in the example above, see `local_save: false`), next time you want to connect, + // you can retrieve the wallet, display the security phrase and image to the user, ask for the pazzle or mnemonic, and then open the wallet + // if you haven't saved the wallet, the next line will not work once you restart the LocalBroker. + let _wallet = wallet_get(&wallet_result.wallet_name).await?; + + // at this point, the wallet is kept in the internal memory of the LocalBroker + // and it hasn't been opened yet, so it is not usable right away. + // now let's open the wallet, by providing the pazzle and PIN code + let opened_wallet = + wallet_open_with_pazzle_words(&wallet_result.wallet, &pazzle_words, [1, 2, 1, 2])?; + + // once the wallet is opened, we notify the LocalBroker that we have opened it. + let _client = wallet_was_opened(opened_wallet).await?; + + // if instead of saving the wallet locally, you want to provide the Wallet File for every login, + // then you have to import the wallet. here is an example: + { + // this part should happen on another device or on the same machine if you haven't saved the wallet locally + + // you could use the Wallet File and import it there so it could be used for login. + // first you would read and decode the Wallet File + // this fails here because we already added this wallet in the LocalBroker (when we created it). + // But on another device or after a restart of LocalBroker, it would work. + let wallet = wallet_read_file(wallet_file).await; + assert_eq!(wallet.unwrap_err(), NgError::WalletAlreadyAdded); + + // we would then open the wallet + // (here we take the Wallet as we received it from wallet_create_v0, but in real case you would use `wallet`) + let opened_wallet2 = + wallet_open_with_pazzle_words(&wallet_result.wallet, &pazzle_words, [1, 2, 1, 2])?; + + // once it has been opened, the Wallet can be imported into the LocalBroker + // if you try to import the same wallet in a LocalBroker where it is already opened, it will fail. + // So here it fails. But on another device, it would work. + let client_fail = wallet_import(wallet_result.wallet.clone(), opened_wallet2, true).await; + assert_eq!(client_fail.unwrap_err(), NgError::WalletAlreadyAdded); + } + + // now that the wallet is opened or imported, let's start a session. + // we pass the user_id and the wallet_name + let _session = session_start(SessionConfig::new_in_memory( + &user_id, + &wallet_result.wallet_name, + )) + .await?; + + // if the user has internet access, they can now decide to connect to its Server Broker, in order to sync data + let status = user_connect(&user_id).await?; + + // The connection cannot succeed because we miss-configured the core_bootstrap of the wallet. its Peer ID is invalid. + let error_reason = status[0].3.as_ref().unwrap(); + assert!(error_reason == "NoiseHandshakeFailed" || error_reason == "ConnectionError"); + + // then you can make some calls to the APP protocol + // with app_request or app_request_stream + // more to be detailed soon. + + // Then we should disconnect + user_disconnect(&user_id).await?; + + // stop the session + session_stop(&user_id).await?; + + // closes the wallet + wallet_close(&wallet_result.wallet_name).await?; + + Ok(()) +} diff --git a/nextgraph/examples/open.md b/nextgraph/examples/open.md new file mode 100644 index 0000000..af873f8 --- /dev/null +++ b/nextgraph/examples/open.md @@ -0,0 +1,17 @@ +# open LocalBroker + +Example of LocalBroker configured with persistence to disk, and opening of a previsouly saved wallet + +You need to replace `wallet_name` on line 35 with the name that was given to you when you ran the example [persistent], in `Your wallet name is : ` + +You need to replace the argument `pazzle` in the function call `wallet_open_with_pazzle` with the array that you received in `Your pazzle is:` + +then, run with: + +``` +cargo run -p nextgraph -r --example open +``` + +we assume that you run this command from the root of the git repo (nextgraph-rs). + +the `-r` for release version is important, without it, the creation and opening of the wallet will take ages. diff --git a/nextgraph/examples/open.rs b/nextgraph/examples/open.rs new file mode 100644 index 0000000..e4e3b26 --- /dev/null +++ b/nextgraph/examples/open.rs @@ -0,0 +1,76 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use std::env::current_dir; +use std::fs::create_dir_all; + +#[allow(unused_imports)] +use nextgraph::local_broker::{ + app_request, app_request_stream, init_local_broker, session_start, session_stop, user_connect, + user_disconnect, wallet_close, wallet_create_v0, wallet_get, wallet_get_file, wallet_import, + wallet_open_with_pazzle, wallet_open_with_pazzle_words, wallet_read_file, wallet_was_opened, + LocalBrokerConfig, SessionConfig, +}; + +#[async_std::main] +async fn main() -> std::io::Result<()> { + // get the current working directory + let mut current_path = current_dir()?; + current_path.push(".ng"); + current_path.push("example"); + create_dir_all(current_path.clone())?; + + // initialize the local_broker with config to save to disk in a folder called `.ng/example` in the current directory + init_local_broker(Box::new(move || { + LocalBrokerConfig::BasePath(current_path.clone()) + })) + .await; + + let wallet_name = "9ivXl3TpgcQlDKTmR9NOipjhPWxQw6Yg5jkWBTlJuXw".to_string(); + + // as we have previously saved the wallet, + // we can retrieve it, display the security phrase and image to the user, ask for the pazzle or mnemonic, and then open the wallet + let wallet = wallet_get(&wallet_name).await?; + + // at this point, the wallet is kept in the internal memory of the LocalBroker + // and it hasn't been opened yet, so it is not usable right away. + // now let's open the wallet, by providing the pazzle and PIN code + let opened_wallet = wallet_open_with_pazzle( + &wallet, + vec![110, 139, 115, 94, 9, 40, 74, 25, 52], + [2, 3, 2, 3], + )?; + + let user_id = opened_wallet.personal_identity(); + + // once the wallet is opened, we notify the LocalBroker that we have opened it. + let _client = wallet_was_opened(opened_wallet).await?; + + // now that the wallet is opened, let's start a session. + // we pass the user_id and the wallet_name + let _session = session_start(SessionConfig::new_save(&user_id, &wallet_name)).await?; + + // if the user has internet access, they can now decide to connect to its Server Broker, in order to sync data + let status = user_connect(&user_id).await?; + + // The connection cannot succeed because we miss-configured the core_bootstrap of the wallet. its Peer ID is invalid. + println!("Connection was : {:?}", status[0]); + //assert!(error_reason == "NoiseHandshakeFailed" || error_reason == "ConnectionError"); + + // Then we should disconnect + user_disconnect(&user_id).await?; + + // stop the session + session_stop(&user_id).await?; + + // closes the wallet + wallet_close(&wallet_name).await?; + + Ok(()) +} diff --git a/nextgraph/examples/persistent.md b/nextgraph/examples/persistent.md new file mode 100644 index 0000000..83bd5ba --- /dev/null +++ b/nextgraph/examples/persistent.md @@ -0,0 +1,13 @@ +# persistent LocalBroker + +Example of LocalBroker configured with persistence to disk + +run with: + +``` +cargo run -p nextgraph -r --example persistent +``` + +we assume that you run this command from the root of the git repo (nextgraph-rs). + +the `-r` for release version is important, without it, the creation and opening of the wallet will take ages. diff --git a/nextgraph/examples/persistent.rs b/nextgraph/examples/persistent.rs new file mode 100644 index 0000000..10d7a72 --- /dev/null +++ b/nextgraph/examples/persistent.rs @@ -0,0 +1,161 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use std::env::current_dir; +use std::fs::create_dir_all; +use std::fs::read; + +#[allow(unused_imports)] +use nextgraph::local_broker::{ + app_request, app_request_stream, init_local_broker, session_start, session_stop, user_connect, + user_disconnect, wallet_close, wallet_create_v0, wallet_get, wallet_get_file, wallet_import, + wallet_open_with_pazzle_words, wallet_read_file, wallet_was_opened, LocalBrokerConfig, + SessionConfig, +}; +use nextgraph::net::types::BootstrapContentV0; +use nextgraph::repo::types::PubKey; +use nextgraph::wallet::types::CreateWalletV0; +use nextgraph::wallet::{display_mnemonic, emojis::display_pazzle}; + +#[async_std::main] +async fn main() -> std::io::Result<()> { + // get the current working directory + let mut current_path = current_dir()?; + current_path.push(".ng"); + current_path.push("example"); + create_dir_all(current_path.clone())?; + + // initialize the local_broker with config to save to disk in a folder called `.ng/example` in the current directory + init_local_broker(Box::new(move || { + LocalBrokerConfig::BasePath(current_path.clone()) + })) + .await; + + // load some image that will be used as security_img + // we assume here for the sake of this example, + // that the current directory contains this demo image file + let security_img = read("nextgraph/examples/wallet-security-image-demo.png")?; + + // the peer_id should come from somewhere else. + // this is just given for the sake of an example + let peer_id_of_server_broker = PubKey::nil(); + + // Create your wallet + // this will take some time ! + println!("Creating the wallet. this will take some time..."); + + let wallet_result = wallet_create_v0(CreateWalletV0 { + security_img, + security_txt: "know yourself".to_string(), + pin: [1, 2, 1, 2], + pazzle_length: 9, + send_bootstrap: false, + send_wallet: false, + result_with_wallet_file: true, + local_save: true, + // we default to localhost:14400. this is just for the sake of an example + core_bootstrap: BootstrapContentV0::new_localhost(peer_id_of_server_broker), + core_registration: None, + additional_bootstrap: None, + pdf: false, + device_name: "test".to_string(), + }) + .await?; + + println!("Your wallet name is : {}", wallet_result.wallet_name); + + let pazzle = display_pazzle(&wallet_result.pazzle); + let mut pazzle_words = vec![]; + println!("Your pazzle is: {:?}", wallet_result.pazzle); + for emoji in pazzle { + println!( + "\t{}:\t{}{}", + emoji.0, + if emoji.0.len() > 12 { "" } else { "\t" }, + emoji.1 + ); + pazzle_words.push(emoji.1.to_string()); + } + println!("Your mnemonic is:"); + display_mnemonic(&wallet_result.mnemonic) + .iter() + .for_each(|word| print!("{} ", word.as_str())); + println!(""); + + // A session has been opened for you and you can directly use it without the need to call [wallet_was_opened] nor [session_start]. + let user_id = wallet_result.personal_identity(); + + // if the user has internet access, they can now decide to connect to its Server Broker, in order to sync data + let status = user_connect(&user_id).await?; + + // The connection cannot succeed because we miss-configured the core_bootstrap of the wallet. its Peer ID is invalid. + let error_reason = status[0].3.as_ref().unwrap(); + assert!(error_reason == "NoiseHandshakeFailed" || error_reason == "ConnectionError"); + + // a session ID has been assigned to you in `wallet_result.session_id` you can use it to fetch a document + //let _ = doc_fetch(wallet_result.session_id, "ng:example".to_string(), None).await?; + + // Then we should disconnect + user_disconnect(&user_id).await?; + + // if you need the Wallet File again (if you didn't select `result_with_wallet_file` by example), you can retrieve it with: + let wallet_file = wallet_get_file(&wallet_result.wallet_name).await?; + + // if you did ask for `result_with_wallet_file`, as we did above, then the 2 vectors should be identical + assert_eq!(wallet_file, wallet_result.wallet_file); + + // stop the session + session_stop(&user_id).await?; + + // closes the wallet + wallet_close(&wallet_result.wallet_name).await?; + + // as we have saved the wallet, the next time we want to connect, + // we can retrieve the wallet, display the security phrase and image to the user, ask for the pazzle or mnemonic, and then open the wallet + let _wallet = wallet_get(&wallet_result.wallet_name).await?; + + // at this point, the wallet is kept in the internal memory of the LocalBroker + // and it hasn't been opened yet, so it is not usable right away. + // now let's open the wallet, by providing the pazzle and PIN code + let opened_wallet = + wallet_open_with_pazzle_words(&wallet_result.wallet, &pazzle_words, [1, 2, 1, 2])?; + + // once the wallet is opened, we notify the LocalBroker that we have opened it. + let _client = wallet_was_opened(opened_wallet).await?; + + // now that the wallet is opened, let's start a session. + // we pass the user_id and the wallet_name + let _session = session_start(SessionConfig::new_save( + &user_id, + &wallet_result.wallet_name, + )) + .await?; + + // if the user has internet access, they can now decide to connect to its Server Broker, in order to sync data + let status = user_connect(&user_id).await?; + + // The connection cannot succeed because we miss-configured the core_bootstrap of the wallet. its Peer ID is invalid. + let error_reason = status[0].3.as_ref().unwrap(); + assert!(error_reason == "NoiseHandshakeFailed" || error_reason == "ConnectionError"); + + // then you can make some calls to the APP protocol + // with app_request or app_request_stream + // more to be detailed soon. + + // Then we should disconnect + user_disconnect(&user_id).await?; + + // stop the session + session_stop(&user_id).await?; + + // closes the wallet + wallet_close(&wallet_result.wallet_name).await?; + + Ok(()) +} diff --git a/nextgraph/examples/sparql_update.rs b/nextgraph/examples/sparql_update.rs new file mode 100644 index 0000000..1b523d3 --- /dev/null +++ b/nextgraph/examples/sparql_update.rs @@ -0,0 +1,103 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use std::fs::read; + +use async_std::stream::StreamExt; +#[allow(unused_imports)] +use nextgraph::local_broker::{ + app_request, app_request_stream, doc_fetch_repo_subscribe, doc_sparql_update, + init_local_broker, session_start, session_stop, user_connect, user_disconnect, wallet_close, + wallet_create_v0, wallet_get, wallet_get_file, wallet_import, wallet_open_with_mnemonic_words, + wallet_read_file, wallet_was_opened, LocalBrokerConfig, SessionConfig, +}; +use nextgraph::net::types::BootstrapContentV0; +use nextgraph::repo::errors::NgError; +use nextgraph::repo::log::*; +use nextgraph::repo::types::PubKey; +use nextgraph::wallet::types::CreateWalletV0; +use nextgraph::wallet::{display_mnemonic, emojis::display_pazzle}; + +#[async_std::main] +async fn main() -> std::io::Result<()> { + // initialize the local_broker with in-memory config. + // all sessions will be lost when the program exits + init_local_broker(Box::new(|| LocalBrokerConfig::InMemory)).await; + + let wallet_file = + read("/Users/nl/Downloads/wallet-Hr-UITwGtjE1k6lXBoVGzD4FQMiDkM3T6bSeAi9PXt4A.ngw") + .expect("read wallet file"); + + let wallet = wallet_read_file(wallet_file).await?; + + let mnemonic_words = vec![ + "jealous".to_string(), + "during".to_string(), + "elevator".to_string(), + "swallow".to_string(), + "pen".to_string(), + "phone".to_string(), + "like".to_string(), + "employ".to_string(), + "myth".to_string(), + "remember".to_string(), + "question".to_string(), + "lemon".to_string(), + ]; + + let opened_wallet = wallet_open_with_mnemonic_words(&wallet, &mnemonic_words, [2, 3, 2, 3])?; + + let user_id = opened_wallet.personal_identity(); + let wallet_name = opened_wallet.name(); + + let client = wallet_import(wallet.clone(), opened_wallet, true).await?; + + let session = session_start(SessionConfig::new_in_memory(&user_id, &wallet_name)).await?; + + // let session = session_start(SessionConfig::new_remote(&user_id, &wallet_name, None)).await?; + + // if the user has internet access, they can now decide to connect to its Server Broker, in order to sync data + let status = user_connect(&user_id).await?; + + let result = doc_sparql_update( + session.session_id, + "INSERT DATA { \"An example value10\". }".to_string(), + Some("did:ng:o:Dn0QpE9_4jhta1mUWRl_LZh1SbXUkXfOB5eu38PNIk4A:v:Z4ihjV3KMVIqBxzjP6hogVLyjkZunLsb7MMsCR0kizQA".to_string()), + ) + .await; + + log_debug!("{:?}", result); + + // // a session ID has been assigned to you in `session.session_id` you can use it to fetch a document + // let (mut receiver, cancel) = doc_fetch_repo_subscribe( + // session.session_id, + // "did:ng:o:Dn0QpE9_4jhta1mUWRl_LZh1SbXUkXfOB5eu38PNIk4A".to_string(), + // ) + // .await?; + + // cancel(); + + // while let Some(app_response) = receiver.next().await { + // let (inserts, removes) = + // nextgraph::verifier::read_triples_in_app_response_from_rust(app_response)?; + // log_debug!("inserts {:?}", inserts); + // log_debug!("removes {:?}", removes); + // } + + // Then we should disconnect + user_disconnect(&user_id).await?; + + // stop the session + session_stop(&user_id).await?; + + // closes the wallet + wallet_close(&wallet_name).await?; + + Ok(()) +} diff --git a/nextgraph/examples/wallet-security-image-demo.png b/nextgraph/examples/wallet-security-image-demo.png new file mode 100644 index 0000000000000000000000000000000000000000..9a6cee969e8b85d53785ddb53be71d268f224a07 GIT binary patch literal 21006 zcmeFZbyS?&vM-7TCwS06aM#A&-QBf;#@*dLSRlB&1a}P%!5xBo2<{ecXRWpOUi;ni z&V6T$cmLZxy6NwmRrRa-YF5>>K}RYnNTML(BSJtxph!!Jser#zet+QKfdBGAI&MQi z5YBt4X}PEvyOTIL*_(rGfFv%S4nPv12gn=(!eeDVLpz0lJ5l_#Hp&2M-jh zYV7Q(C!SW721uj0s7O~xGC;Dn#nI8bxi#_neCPT)kh3r9+S*_sS(Pp5xrj_Hzq=k@-(%*RC;_~FUz-Pao0J5!A=L8NYv zm+->EN6)p3uQ|cm-GaZaZXRw_~6CWXyQZqWvQ zqft+sEtnX$(D68nrSLO&Q!gbZ&CagEHbbt7XH?*)b2>LgU$(0v!%%h2I$Bw_uVYnJ zh4^eP%g{VyWnI-gZLY|FH)Czz*)H%%=t$mE8^g15Xd1)KeY88Aq^P*gd=N!f)iGLL zQPnk=F7cq%WHO)bYSFhJ$7}0gvbg59VLFM4TI6y`Q##1_to`-#t(Gq1yaxXps(Y} zeZ?E*0u=)eyz|?W^45C?LE*kmry58(leT#uWn$8rh?)oVeiV`G3b-x{cx6XwZz1k0 zBs+~ptJm&Nl@keiGq2AbYgb?Q&OlZCJaW>`4DMQef?fvBYW}*vC)R@=?BT z9A#J|(c8W1lyTwH4MSWux4d@M@^wk*0Nz9;8Bjpwj1NAhm%BGo?U=^4xE@+t*IOU1 zH3sIpT3sU&8UX*52<4|yC}?20G{^grB=s)Nv;Zr^`s31%w2QgH>3vrgfY+T^cW-!? zhsCyh%9EaPPelOLk^{G%{_%niV(8c7cTPG@7}F*Rro2Iox|ONt`<(>rWn{G90=BF6 zkgc}7FkBE--gOk0lW{AQsWv(9QWTEbU8HPN9cm8c{}L32%xBnNM)~@wE%Sh$xmCc} zUx-FVGqu~4*6pQk_iI(>y|^JZwnb|&CJ{Lj*OGl{vjr#z%Zohx*Ao74diDfA zAs}v>S}?w&QBU}GdZDsH?A&wK3AnDtCCY{q0|6Z@%PJe0IpbnQ;T36D$4y9z|APU} z5W~D^uoXTCA07c-UhPpHq^u^zwA~qdYz|9uD$%73^mX8Nj3NPbEPM5=T_ZrI43t;| zA@1A4BY*hD*D$y7HSx*4MLdqzaZqsJAce8wD66izNsd8$yjZn7*awc#eUf%B|0NLi zka=dT#KvEH0sX^Vk}h?!WDaKzDqN${c_{Uj#N)SVDQs+>5~+p~C2_|F+VgiGd*EQU zcY5&UacSsBTa`r-kVA^3u8L$dTunK)qsyR#3W(YsLLBWsTNovRyj(yHvWrY4Qc|V-@II4?t62Cl9?!j zc~$)@MZ^YQleOVjsD|t%O}JZCL3dSVSSNuZ(XIfE5-FN`?HKq(xX9Pz5?(WnD)Qv2 zv^M?*IgKxnx`fEdYmqB;@$gYYS%Ku}zz+t2CO=x|ltbevQG-;oQ1fRxlQgNUpr4_- zo=5iu!%-f)XdLr1xp0ak7w5$|JdAmly5u0ZWzDwDoejt)4>fW@{ihjo10 z&5=9R^4T?RBY{ufMh|ho3w3B`_e1nt|NTVB_fZ+E6GSSg$RMca?6gC5EzDq{sc=#O zHxyvvY-a@NFv4QM#ccX=_fA=Q#X0$Ak^B%498_((a1c?VUH+wF-nGYGelW z^BrY(viMM#;Uh_#dL>2|6Lw!5cJV`|AUSalnC1dC&@ruCUSy0_Domv@R5Q4C=@MX3 zsy-CnIDJJ*T`MEQ-C)J@J?M$>KUc3$H{?TY!FLmG%w60%{K*jy8LzTA$G%NW8Z7#e z`4>IRb2PjfEM`G&4LW`ETnvkc#m%qd=?7wi`#n=Bj|4TILe-{gGFj`Aag6&WRYi|XJH{$6ufZ)B{f)ChGIZ3swEsx%&nm}_} zBtD%gn5zUYQV-Kx9M@(2ug$o2u)zt_M(+%CL3Z;T9TCpjD9sJ-g2fWD*qX+hP{2?e zy9PJTil2)WVG|P?$F{G&CYVELX!}vma+>$O~o5SSb*`as8 zpuCFrgxtNtlY~vfku0OW)jFFut1j29?uiJo3gN(}u|C6naKLGw!@AcrijU!g;z_r* zoFWH3qJC1}Qt~!I5v@Z;kFyL;Ei`NYIIP&9D5DvD)NgB}@*=}@EZo&6&$)z-X)bzZ(I527KAD9BeBeuJl zocE?s3L}mApR7r~83nG579F5@H`OrLlVS2vivMIq9>^(vJ86rWY@bp&Mo=7zkIq$6 z1_vRuf*r4T^E0hy)pHZH6}dbI{bjHdVy7`)%gi`Xf2xt~9Q#uz0TXr$%7`{LqD(V( zA$uUBt*yh{EleyV`L?Le5UX5Yo7R@J91;Aq(t6r1o`)fWN}4yv+&jiFD-r%&ez+(C z$TZGZI1~F`ZTLMnaulnHpGp=XxZG;b37XLkac@NPw}OY^p64~+T-rc@lEp{*K1$hL zut&rmPd0LY_+S!962`#daOkiZdMxah!9*u)sC5S~IpoO&J+-Z#z#CX$Wx@09n`0>6 zBi5SmQlKnkk78N_IU*II1}C2!GIIV8vU7*h@eaDSaDxT~?P;wq;v~TgdSU`e>f<;a zKN3``?`aJuG7++YgU}EQ1y0a-{d#lq2bYfo^&6(*qJ!cMt9^j7W*yG z77=(w!_x~j&IzLnQo*uX+^vm0SX;TwA?gL#R(YPxHU#SVd5s{u5av(00ihID3;`(A zgssxg*p)vZ*wA@Kc&AJY7z?@cILc4*Y2qtCiA+ktDJ63nyRr$iONE>E9zlOIFMOgw zj1FP3mJ)lD=Oab*P%*I?klbNxV=X>O6I&?Q3B8I6&(kZc#j~L(N)OoW8TEW&Pbs!`ViOqP9 z)+(LSvmjVL=@ZBSNlE$8{iCZRmrx<(NvoUqof!!h`_;r4bXyi|ekacq7pibn386+B z0J6eI4Lfd-Sx3wl9&)4wxwT%J$^=Vp8*5em{9-GqK3;YPHKmtX$_xv(H^>gj6XrQ` zqVN}kQFH(@N+~`plv<8BoQm5nr(|1s`LRRm3U(VJBu+9a^5L9#O)s7zT`>;-5haZh ztVJQ-;q_-0Z_U}yVJDewr74azi5+A)~w=608Q>o)9#Cw2x51x#}`R zuuyMZ@~RIFi^39N-p-39=4&B;M;F|}BSP+v4acX(pHq=-&=_+o^)SN>6UYN1+XRxM zrg?t~$BhX9xwJlUw-TnacJj30I~ld3(YhwDySD5bFw*YRjH zLKnf#;;OlCwBoY#xVV(Btmr&786kg!r)+=COe9%POLEhS0EThyRCpB*8>FA8lB~PE zU++wPd)#j?V$u&&S0!7oLtSQNMg@^vI5d~o%qG9?K+hWXC?O%HvPX0(R<|P$s4RtR z+IPpDrzna>g?0(Rh9Gt(8V$_07lne5q>o$=syBuYca%K*ihPJ`vT}~H0=r#Km(O0{ zNurRFZMKDm-~f{oUpTNP$|d49H|3^@$)aVg(hP|ZU9|1h29Kw^K93$wOlH6%!gdXh zL&2oj_)!n`);j7ZL{r3VYYq#~ZTYR-9vKHOlrd;biPV{}2(>jFb{T~aM7B3OWQCv< zgZtjyEj8^88X8WnyT?{NN_T2)LSGYPtl&^?o}iYJgFi3rTr+a?G#$#b zCH{31b<+Ar(Ao88DXvN`C6o`d62>3^v9iRLRNq;gV@N$izg2dC3TAvfvJYw3uOopT zlv0$Tj*PPW?u=3Lz9Hz9G|?mz(iLq&9-gpL$H?M&^2m9Y$o8N!PMo(qqCWE@DpuW? zlrdx0j3MQ3Y9p;z;Eqj)G-ia)6X4qv#m`mr zft`W9k9lv6Ow7Tqg46+{wCSmpvoHOQf|_k#?N_F#S7IrHZIf6(99^(s?W++Pzk%}t zBlQvFU?F5Hj}3#5I^ShVw7WEKa73%qNdlV$Rzz4bm0rb>Ab(QhK+s01S=(Zcu5QWN z(c8m7_n^+VhuA1&qgm>+dsMw;&+#+&HeK{e*jVSUP#+exQCfn6Q{V8AkjH*}fJCNT z2%J<~qEoqt|55FHdyy(`{7p~di8MbI4#Efq_dF;x+k{@M(<$;p?|b#+5X;;Phui{9 z8C1#OD6+YPddc$?gpY2&)H#IA~|%WgSj@ikXozzFVQn@rXXk4Yl!5WvuEcE zu~JKNLW)Ojy<^WCeF(Qb9eND1@ zLQyB$JPF%2QtgO{T|Q9dh|66$K%&ggXY`BpBqcWjrj00ZPTMUSmsCVXoFu;%E563N z8k)CL5bxw8@&ddWc{JOTATtP?XX$HX$%OUN(Gxl$D~VvfO`YQ|V=09vV=GCFxzIQw z-iY{<$m1x*H$;_-OO8VyCb7$HEa6doLB|*-?zh!_66e~hXnKFSjd@efnH!xxofXc^ z`-8%gj6S5Di~4b{jjIj=c0o*2IO%1Ro}fqc7!G-~bO1VUr6|bYUaZV4RzQSAgv3(0 zr(*cSP6G5}<1ecSac3aFVg?5RGT_tqj7hbRtM;K(iMls<&C+8B6tpS^^HwA`#Wn_M zO!$=8pjawqFE@w}*4ExcA*gACsD08)_$MtOMP)zKb~Cl24t!vr@-9UU_S16I6(M<^ zJ%$36zSL&WMUV5R=jkp81dwB&5wuKzgag~~F-2MobtmH6Uv)uM6Ygt5LcyS+@u8=v zc$}rfkWWev0etBYyq$;0F@1p>N`+NK^7BTfDj_2dO%VwkS+;9PZ#J_)HS8V}BeUgN zf<33{ZX?R3bBka5ww1U9{gteTlaRg;#U+baqvy;Vg`4g+XE3R6*Qa9&Gu`Z<`M$f&t87GvpNZ6drBAOvD-%6RH=)yb5xQ<{W zGCc`lsG1fjLcW=|Z>MM=6=D>S=k#Vt;5}Ja68-by-ibth-Zp|ZftQuY%x&AjVSG=p z0U^)VC-l(2ZJSOh$U72@Gp=zi4}F9DOW&u$jsT*ms(?dhIzm`?3bzE>--smo4RNv= zZ)xQ;{Pt860GlR(9iBKJR)L(sek`iaZoFbQj$6JRLugb`BOqn-S(3mX?Q+_P2xoCu z!3p_iBSgUs)j1$dT*9)Q++>l89}jMs`|=(RA0{R&E>z@HN(a$ulObWhCYDmM&>Do# za}2{Kl$~uVUi-o5TuJYP!npMpx(^0wC{idkT^xDS9IlYN^6ddws3SZ{#SA^=VUa7+ zu055{J#jZQ^0PF-Ai@{QpI1=i^l?E_iR(+49+-_`36%T-(fc%Zd{z5#^*ac?g@``v z9Xv1w?2_UNj{KCpVLzbCRrCFF)QSfhKH`i>W|-(G;o3;fNB8T5vn_gwP9Q?7d|ELi zSPA^F!(A`)O>9s@M|jy1?sSvxu{ths@(qkw?A|c*4NaR`h9wWqp}8j|vzmj3Pd@f} zoO)7ixYK6c=ETdolA%H)Uy1d(P)gH>LGKSQ6_;FapF6e5)(M9TdQim?p~6w($fZ1M z>Sl2E0|ipfkX-n1L{P$OAZTghR_NPQt3|@P{OZ-15kJ&R-fnTu2$)OVA>o8u3W@XiR6u5k{X~UW%~zSPJ0*MarOBngbD>&b z*~BJc!FG3Q%(LRc0=f*EIjB(5k^aQq-9~cpmnO~1rzsTdQn&cf!y(sU^7lc6J&=Jm zmN|HJI-6T)=3lm(OSoDr8?b+Mn>U0l#+V*TpaXDK;y6a1sr#=eH|f|jh2}Y$hfdPi z*7D-HGTheT69J6)zVCBVPG48_2tpxFf`VTKb}7h5n3?N1PMSJz9&t4UtIjD2G1w545&$?OW*^ln@U;{b|VK*s1V}l}5iwX|y)?k$^ zu1?oj%hw2^@B=S_yfcDbj^AD$QN+UV4P#07o)1aJBLO*hORE8A3t>JGIJ{Xzgd47s zH}0%OhCg;0FYuA)5AEf7A@O|P zdl2m2fYM4)s@I$-&(9At#5xcKh7oPQ=&dG-XqF3jm>(u^UkNF z6WVNN$uko8mz3>X*NJpWN96;@+N}%bc+g@Y*cGs8B-sMO&8C&BFej!M9Lw+>MCpGV zrnjp$p+&sOjIbKVHeUP{GTctwpcAX<&LNLdsW&BmTuN9-s*Kl`rHCI?Y50w{GuUhg zB3H`UKTv+h}lA7F7 zQ;}T5pje*4*vOtyU-#(CI&6T&!(H zpFsGgnicu^3eC<88tS!ddVe>74Do!S<|6Dzvzr3_@I|IuK}qq;ARC7Gj$i;g3B&a! zqrX92Aphm^4U6?;sLDB64;-7)%uG_l4UW(n`Y*w9YAIsWLRu>CVytXf^#A`E>3Io|QK9_iD*!t8BpJWm! zZQr)wy7onty)W_cF($c%+r-*fU& zYU7u}J7B!Wm-WM(#n-^vuT zHwn@@XH1-ba}=X>iwYy1K~9sxnTF^)CwWAJzzV&9dP-=IjEp|?kL|QJbvjT*UUN(? zA$oKoq{;XGLuwA?s3R>7`X`OES5hKvaHCLdWmO3dw0m08!)@;%9~f;%-K9ekPZ@-ZP9?hey&-{ zEf+N}KD`1Zd{p8(8f0v>)i122aBHACl+Zw*DBPg$0SOAK`DVQyx>3JhGxS$_$uay; zgP#t0^z<$~$c1@GQo5`gx=B!A-TThr6)7SyN%i$=7ajrvG87~#sw6Ec`i}!n@Udp5 zZvwwmpAc@?MlrTHZIBz9N4GEdxk%Ct&-yzJ`2qMV5izKV0uINAL zFQC+qzfU-h8nVwsVnpI&GbvWR>PI)Jof?!OsLwA$SHzH1moM@8uv0tg8C|bYUANEw zfYitB3rst12#fC_O(78u38Iyu?vsdd%fdpU;raM<52xQP#u%zc!R$nz3x!g54EGwm z(b;&3xnrQhh3eA5H8I3I)rthLio5dt$rX!9a*wQc#Nc0qD(9bQU=5lk%XKGcX|V&1 zzISMaM&`q-qiI)(Pl=y|KC`%%*Uo*#IW)L>T;Llzf{rgXps)`J7BwmqT0RyyJGDAn z?PeeU^#UP<>y#2Gm^TCg@irF(K5^EPm*X+Dw`DXovo`@Ude}OEPo%+T(t;ii#-`Rl z7ZMYo1;~z{^sK#`lmukPPpZi-&n)jC3bX`Cc{u@9y%f|;y{t{S%}50W5cxcKzyP*D z7h@6+TN^uP9uI!fKX7@#*T0*YNJ;)+ak1tn)sj~t5w&*$k^mS1jLZz;9w0YXQUOE~ zJ|{DC9u+Z(zaYSO{G^sHE)G0QOz!UPjP7iV_D&W|EZp4OOw6oItgH-R4hCmWI~QXQ z20Lf6-w=Odhyk5Voj?vQAbUHK-Ty#od5*E@XdK>R{vQ{CAzM&cNSIf7oqfX3hjQ>JQC- zmywi~SNf;SZ#PQ>NY-Yy7X8JcMX**{ZV>?seZzwQ0BM6KGWCL<> znX{QNm;ua9833FBV+JlDw>g6e2Y`c}9l*lI$P!yh?#( z?VUXTre784_~+f970CwlhpR|P{-^>x#-@L|-`Us=X!b`Jfc^L<$<)%=&H@N7w}0i@ zKiWb6MI5u60ocIt&c(pW4&-J4Z~y@e+-7F1494J0-~zC40XdofOoe}s1{ zKcz+O9X!FAZ0VxtZuj4+|1TK-M34iS0_~jb|2xzF&hn>Q{x%qaZT_Y75!BnX;v3*Vv?t0Ab~zxko*E!#TNv4b23OjACg; z!+>ACEMQrK=HTkk*oMZrR6_SA!QsgDyhK~W*CXQ}R(gOZU-}=G4H4|>xbI^xo7>mF z`L8#Y;`vv>%>o__KX&&j0kYVeq4+L};2f8*S<44s?-wd?TeTB&y3Y7LO*)%iu?)RU zh^HG2Zo5x6Rz||lmSe_1-is$FF=F>WcD%V9IMkZXdrQ(uq!5L z=<{5Kp%N*w)k56um|`#Ru+na4O`ft_|B;`QrVJvRo4IGvtuv;Y;HsRiQpSXR&QtsQ zCap^>G#SLTJ}w<^Q;8tF(5sPgjw(86(p6Z{zsQK_)Sx?ns_Py?@ZqwL&uKO>F`PJ# za$Q~5|8_f%Z#kpW?fugn5#P;T`28sHN$x`03EHg-9G6Q66M?dwcCV&jYr!r)uLkGU zS&lC;Z*x_HPi5KQ`0>i&vM)w}DLQ z-sUT+wz%@6C0kavj3DFmd+RKU#Yn1|IS+l!`N5d-i@D>@he>~rjW=yhDNf>=bj03g z_+5{4!Vv2`K5;14JY^THO35>;U3MT$r1WF@17DsRCuz3bh@1Tpk~X{Z+Yw_39bY${ zK9!KM3Rz5b_XFdnqv-h6RE?lh4ow2Qr5%WR}=de+y0EvIurG!F;aDz9J)^ z#jj1)A&Dz4IDQiZLAi(VE>9}Rqu3#9(*FRZ;}&dHxdGRKnE!ATVg+P0FaD)Jm3r{{ zFOny(`n5IN<5f_lYi&2D7@*4e@h6((#JhZ{_P)W9bz^}1!BQEL&(lB>E@p!wVV#r= zn~am}g&1Pv@Vh8-Rzg8%Na>V_l7?ruHD(t1BsmEN>pL?@D508RPH>C{mtM4KEuQW; z_@^*WcDeC&+n>zfSam3DGD8S@ojUmCtWs_@NSm?ZnM5MDySv;* zb2)yXpw~pyW|w>HMYENN_XgUSRi{k!*Soj&W@}Nm(qGuJ8EZ4|XnPjekq{NYyRldP z%^ZuQGOQdltIwM4!4JNRt_!45q$JF&IHGij*n%EoPB_g^7oM47+nT1rA*^+<5(WL3 zp!Ck$=i943aB_Wr$N|UjI?qyHV1WfUTcrQzGgkJsI<47A?C6ZZ9g5Wyj#fOBfob+E zS0eU@Q%6F5+s05Ps#L#DGAcOjh2KfQbp)G3_}!3aJvptP>RAYD)8B{@(g| z(TOXt-^!X+ji>?OVR^ZP8wZ5&@UkcVIUe?6z2Za2a5=Wlz)^j=dHuj_pM$h_Wg_TT zklc`wI_K=IOe>5fF3@0$F6hNTpd6Nnuh`|761T9zEm~QAif9}4)s?7dk0y%&HhOMa zrR~dlkSWp9NXAItr}mL9sC~$qhL0P- zMH*s`Y5nk#79L9AV^Z-OVuf~Aru?tvq#EQYCe*ke($X`yiNn_<3O00S^mY5wx2u_O zqtJ09cZsfA5v4K8zT8Zyorx!kMQf?Tt})}HP~aklvBW$mLz2KlH41Pt#0p?(rzdSV zN*LnFbK<^_WWj|WCr|-0G)PlpP+`flYO3E_(0?qldL!D)Y`VpDWs3qW43V=KI(cGL z9e4c=z80`%1U3U;FTGb>)r#YZ`lL0hKo8dRK9&_HZd_CaVQ#@pnquNY_y|Oi0{S|v zm6~{OA(hCh}bX8W!uokor+;&8;Fv3Du_1!~5Cm|PPa?Enjv*a0% z%;S1^3+)!cYsFLFwM45HM|}FgLd+OsX_@=R__JbY;#lInc1ag)MAUU^BGof}^2oFO z06Bm@RmyO@98gIeWtO=`$9bn_)zY?nV5fE`8v=(&gXZD?u_lDh$kONN` zE1vwK+(xl#v4>esP2K{KNrJDKUxu%_O~gaUfz!0G(-H-F;K3@&l_Ii;A|@=9EQY1s zf6`#Ax%)MmL@yd8YY^UpHr!>Vd}8@ZSf%V+L&znE_~4G?%n^xOf?lb3yd7%RJ6G1mV6`u?`aR2;vwqVJ4ng5soHL9gI2z)-^OU z>iTDM%iz-286#+s@{A#do*|#tH5l8ypzk`ppUfZXqC-X~FUMW>oe^#vMc})_Q-hd3 zhj{+T*}zU6CT=u(BqbWm`Qn3R>0PcKkb@sXF%PtY4TB&u!ZZtzE@4B4ty`$CUDs7t zK1fCzW0nib(=vdxrzP31TN+1UQ~j{%v~pOphG zY()qIG2_S0Djk%S)0nG@R5p}@6_&)Qn_&T%@kJL)C@mqololS~dHMK*#Adl-HyssYvPfWW1psqG|#9bNT!z=&~ z7d1%=v;-${t}hnkxXs=9BK!z96w#mPq34Ffcs{$lKPlhMw0VBvhtWDh0G>|GkF8uj zg#{UVJw+FvefimQRP!lQ)$q8uqCTyFYQp*SE46hhLvDSi@yxLTCFaAN?!kGB&AqkW zVVE#=`skT%YYRdvH8UPAA`3pv^_wvsf?7J2rfO?O_s|iwFg2a5Oc(G>%eygSQ87;L z2ifPV-7l;eS=+}SDcoGP0l>$S>@G27#EUur9(~bNT;OUPWmj#zGez*yY36KiA_sR3kkbSYKFhBnR z)bmNR!H49zrCFj)-=U|anqtg=k{m&;eC5ZIyDt_emmCVWsC%NXvw>XpL_B>}oN5v& zj)rHru8g7K_WEICpn|H_+x(%^rMjSbS>5~V*X68@rs}mJecd*P6=0@Y=<&Px%j4{y z(T@3o{Y^K^JB%w`29myCCPAP zw$MNX1>*U~oFz}`9DBl~5iv$kRZ(phT72cP6)l=IvkXrGc<{_*s53yaA_UBPUkS%5 zg$@K`lWC9^ndlV~V8H}djo~l9C@eVgW6GW14KwWHPYzEJ6Lm3V^5hHZauJ6W+Kk%TRWl9-8nm15MxV7v z$6Ke(lsONX9t}k1_iAX(Yvo9?l$r1@S8Q0_H(D}vIT^Q~l|JW9bNTiuquz#I`uJQN zqAfC5_wG?wnYLjdNZi2`r6eeI>^i$olJvFCGg@_2Ki@>fYH}rH8-=p5T`Dawf=1zZ zh9+XCx0zf%W!)$|Kj>vg(o3srOB&*t;O$dL9-3ZYQF%chhD8<=RBFu)&rDSIEZubW zJxeCwSkfmHxAscvRs~<>*(e-5JABr)(@GHBw{#bL3I2ekUASa6;lU@mejtBK?<&Tu@^-Xa7-spNlI8dK|gO<6e^L zF9?@{?>tO;O5yrA($;S*f)F}};I5uHsw`>!svMTg$Q|P-rIJ85iK@Fm?d>DZ$;6Qd z38E6H?~X4bHXLce+>s}afVoy>tjH&O?Lkv+L7T->)zb3bsDW5yWJ$_TM&K}cd!nU{ z@$xcn+HG`Ij&qxXJDP-Ow38n9yitL}dHKBcHTY@gi=d)rK}NPwMX=-QB?D?R%l#55#N5F8@TiPB>(Fsw&;EVX$?5!>!1qPDDaL$+<0i=6X6-k<@Y=! zh-afoyd~#i`YmnxUXPt(;PZakhior8f)}E8w7sQJ%MotkuA*pQv@YU--p0V4A)hd_ z#^J33+;#+%kApLuVuwMzuUrvZKPC;UxPTFP!v6gc(lV{0w#bCKS8;UQG)huTIT^gR z#S^E!*6_y-zSI*tHl8nTaG-McgGksKFX=$79j%q0#Z6LB(r6*&>Xl{iwbf!bcWrxB zb1f$w&u+w$a5B!UbUJ+Bee}J%ysukqmTVDo9*3(cH@t&N{9N<=1O*-8HXcU|Zg*7(>2y0f zUI!V~ZboSVE`36r$ZG22njVxi=x5c02aZ^rc|-4 z3$b)D+F<>J3(u@n1}irCBo9`3*0f$b&GR;*RDP^Ln!2NwUEmj5*55CV;=V@;#7kqDkzT7>}m%^>pbY z&zg=WuPvXmrcJRcy^iLe?N6uijZE?_nkPPKL_KW>dXCgga?Rkyb_Jr7OZQXCf*0p1 zj9(~5FgUco8t0_Bh=tA!e)5H98)2A*QJ=Sn(fVC^suo>yt#}mL6SK1Y(Kn_5@@nd$ z>L1k220|`2@&iaCrRrknqlfE44OG1hJQ_F~4A?0wcq$ zOB310Gfhp>jm)57oYH)WJUeBs7rHnM5~B0;!!ced7YOs|K-RC~RKw zSjbf=q=;GZ!BXJ$0hH%$JYzt(xaLjx90}jzM@ubr+#0?0Ii1pruy4akLbbF(qdz|2 zXoqgP(CVi>QNxobfoAZktZ?G6U?JP(p=8>l@IThapA3pE%A1U2GFKNTDO8xT{-`9; zpg2CDGq1JkKqZ~_80acY(8VvRZ|o{3mbev$7k^8|DOyUYd*t~6jgqT;B8YlXETt%F zc`*vbyPXixBy@}e;{xl#ENnTdp&T>1#9Z=6Kg@te>tUCP92yLZW`|~LI~RV`>?(2Q zCj{Bq5C?Kc<%DFbkRi|)Ef>SuA8*7bc-lWDuKPVLJ^X}0+?*6>2+Ug59H%uYVx=X^ zE7RWkw&O-BLz-Uv4i=e`(tLs^UDK9AT)O=8j)AfXLQp_o9*MMxxRcV8B92R;!P08q zct)z>r*AiMBAm!!lKHBnr*!a8u{ZBKT3KzADn}K03JUh5%nz60*$aM@rpMWR{%qbv z+r%fH_|kV&qwX@OD1Kb47x(jLx)^K<=W<>7r=-3TG&HU5%E%te>qHsG`H)SmOep{h z#bS1gUbGO$9YFiK(&|lmwJEN7oZ2#X{D=#OV(HPFFk|o<#j^ZPL(R8s8wnm6IUM}+ z4Ql#olH46c2r*}_l-ENb#&3X6twdil?qBH0d^ zV8+OP6?)6^E&d7~c*!I}u7#mtt=o2Y#!|X8bpWF$Hy=J(f{G(I)=GwzWyk0{@x*>> z%HDpgk>uTeT3_K*(cTj`G*m-f1?#)j11|RD&cQY};^@_v!?s?`_KiH=5}P~lZzt@! z7zVt)?5m%-Lo{cXv+dxPSx-<`*z z=oDa9vCFJVeB`Cu@^IyE)@d^ERQNW|#LnOiA1+pbLwvVM&_BJr>=DW!RML9|{qAm_ zkq^kI9v`-M>_5eqb6q^|a86u~QyGr=3pm?-W)d)vp$%9&L#{o37fxk+wnxr%cN2Hf zcMxabC^Jy3RsuhqtXuh0_mh6;v6Z=GH^I*%REM*DR6DKq_*6e%9;Q=tX;6p9J(T~i zo;Q4qyssCAaNL}U^Qrhz#jVVIgI}B8`2CU*w!A&PLGri`JfV*2zw>Y6%=zK5-vdGX z5(~p_@J`V8$a3WM_3&g&tE)7UjEd@r36(2a^zp-q^BT^ zt@i^G6l{@C;+(*Zk~8_6u5Nc&8C&S`pRG*|DmrbGNO}ip3elHqz?kVoz;4#ifOmvd z+bZ?EJEJ}L#0_-{x|&=*6mBDA4OAd8_Gt>3E^ zYb!*uxoMZhxwo;ug)Hh%r}livTBCC8aB7P`qith)Axl+Mgbz|uug|gw(aNXZ{Njok zO;uu}Wy+UTV5MVF^#v_dUl`+7iYe#y_SkbE)zRZbC2|9pz86iiOchfhulC_vQw1Lh zbxJF}1bLakG{38wbmLGAI-U0w5WEeCj1pcfWM&$Kp6>9H;1C3!NRY}V{W^01^ltc; z2$ioc0)oBKTPo7v}z)pFn$Q zA%K1?lYm-3fCMWpPl{ANJHJ8HJzzR>0>oN%SrL3NwzBEUYe~;Tg%-7Q#dRszTZ?;^ z`98nh{c~X6@LnXvB8%`C&}0&Z50f6nnk%jMMlxwPqb0+p zT#_ctiF+Lq58k5e77h5DI){r_x+nQr-aZ`?6L#*$Jhz|sk2-xTFP@&+%o{N^<)T7= z*gft%_hN$hy|S< zF<7HfKlBhetL%)gR&MLC=_s#yGf{yHXx5_|wK)yetn%Pt21bIEjVHE?UPZP7JkmF- zw*KtkS!143BN9fd!RDSRnS_04HpIcG_V;@AN~es2PN$4a2Cr@NhcWCHKfPO!&L1AF z)}Z^yezZyl4_NS=atW@jrJfgI2-k1TXZ{7(&6jyL!}BAPufv(bm7US#0?Qlq=3O;a zw6=bQ^tZS%5FMU(hY1p$ta_`tf;t-AYE}ezu{`aXB?Zt{PdRp4YI76L*77x#{&f@y zo%G)QEM)@2S|d;oF^Y7uTsxZ0MAJ4(%-WQJT|iUZLP?Pwr}4q_y86@oT9Phjj_7`~ z{mJ&@{0a8!hBh5%^geYunf|Mw1|L>AGL&$G!6M$OGDg}-IPHeu)0^++8aJ* zobj$sN?o#2&t+VSYhu_kIi`G^cBt!{3=J!IzkA&8tnxBppS*T3VZst-$VAL!gr-U? zD@YSsI{CbX2r%bup$_0=ieNuoqEe*==hn0qhUjA1dKY=S)90a++`Nh4im&z23?}lB zE`1~gKaf94L=LFnwkfc)>tVqXqVmf7F`atKm1;{?n)D~(4U$H7aIMPib&DwrB>*!O z2*4(E(duzdkuJ0clppnDfG|doYsC=N=j+MY#z(D#_U)2LqNx8@038M5`niRUTuwV~ zryYmgT9Rl^5i9S98Ch+3T{nm&(?sJbG8r9R)2=yXkzIFL!e*0kI&9Q>97vLgqDXT- z7d7vRHPNtS1usl=%^)04kxXmonvRi^^2v^vm}OIW1VKa)1w>ImmL=?pjMr^1EWsu) zV`94&u`F0%hRS9v%zeyyuQQR%5F9N`d}wN@nKhF-En-zMdiw+X`8R%*|NMVHeJ6`L z8m%X5b%hozVp*^-7rVmABpNTDelY3j3(!9}Ml_zn>vqt!u#vWw@)a)*AMYoT%FfoV zlnkY5n)LPu=^q>;5=-HA+v#YnXHk2@tg82)v&4FR|0%vSL85Wz+7@BbUp73 zOs>-MvlkCB6wP&OUOSI{>JFaz@7O`dm z>5R&=2M6+>w=VVb(EB&BxTBHVwyfmicdjfuC!=aS^?Gmq^B>-~fv(ObY>Lc^u2w$w z-ZlAsZyi22>Hlr@*?IKA?OeaMliz<}Tgkq08Oj^pvxGERlScjQKIp>rKLKZtGil&<0acdkp%G~S)tE= z=MB8}z4^~-kuKJW{>i(_740>Mxj|;{gjq6|lc5ZS<0REW7rSg>so2_zPCx&(FFPea zsTzbL37Q*fEMiS3CQb*NSmry~`Z|*>JUN+;J3ufxxn`Ge4J9|331!q1+w69h)@`NJ zmc^QOOcd6?5GZw;M?&S2->wM{?k^tR$hsAiPkWz=kl*ccl#+L>w2i|P6b9XHhvj2U zH#{CE?X`s)|I7V-gd>T3F6n14pC~)0w#H3okyCwsI80rQJO48M^6JHZeDAu7w^L`u||h8JMto#O5XS+`et)l(hM#P0P6V^e{jDbvB)` z_snQM`8tk!H+691re#*TVaEI{hm9}%(OulPWnq4s->vY4k8Nk;+R2A?BuV5`AGwv! zJh-8xTLM^6i5Qx$6J`002ovPDHLk FV1hMO;w%6F literal 0 HcmV?d00001 diff --git a/nextgraph/examples/wallet-security-image-white.png b/nextgraph/examples/wallet-security-image-white.png new file mode 100644 index 0000000000000000000000000000000000000000..dd7f476597d73ba90e9c1681c0f094405e2d4503 GIT binary patch literal 437 zcmeAS@N?(olHy`uVBq!ia0vp^(?FPm4M^HB7Cr(}Ea{HEjtmSN`?>!lvI6;x#X;^) zj5kl})B`yR$sR$z3=CCj3=9n|3=F@3LJcn%7)lKo7+xhXFj&oCU=S~uvn$XBD8X6a z5n0T@zzk$EGcevfy$Ps*QO?uFF{I+w+e?Om3=AAc3=Zsnew;x$S!v;Qze!E$%8bVj zMM!Y>v~DyIli*HH)G-hP3MA+N1r7kklN*8J$C!ZPJ*;$8NH4b>%w<$^WYy4{zRUy| OW(=OLelF{r5}E)A^ord8 literal 0 HcmV?d00001 diff --git a/nextgraph/src/lib.rs b/nextgraph/src/lib.rs new file mode 100644 index 0000000..2052b25 --- /dev/null +++ b/nextgraph/src/lib.rs @@ -0,0 +1,137 @@ +#![doc(html_logo_url = "https://nextgraph.org/nextgraph-logo-192.png")] +#![doc(issue_tracker_base_url = "https://git.nextgraph.org/NextGraph/nextgraph-rs/issues")] +#![doc(html_favicon_url = "https://nextgraph.org/favicon.svg")] +//! # NextGraph framework client library +//! +//! NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +//! +//! This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +//! +//! More info here [https://nextgraph.org](https://nextgraph.org). Documentation available here [https://docs.nextgraph.org](https://docs.nextgraph.org). +//! +//! ## LocalBroker, the entrypoint to NextGraph network +//! +//! `local_broker` contains the API for controlling the Local Broker, which is a reduced instance of the network Broker. +//! This is your entrypoint to NextGraph network. +//! It runs embedded in your client program, and once configured (by opening a Session), it can keep for you (on disk or in memory): +//! - the blocks of the repos, +//! - the connection(s) to your Server Broker +//! - the events that you send to the Overlay, if there is no connectivity (Outbox) +//! - A reference to the verifier +//! +//! In addition, the API for creating and managing your wallet is provided here. +//! +//! The Rust API is used internally in the CLI, and for all the Tauri-based Apps. +//! +//! The same API is also made available in Javascript for the browser (and is used by our webapp) and for nodejs. See the npm package [ng-sdk-js](https://www.npmjs.com/package/ng-sdk-js) or [nextgraph](https://www.npmjs.com/package/nextgraph) +//! +//! The library requires `async-std` minimal version 1.12.0 +//! +//! See [examples](https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/nextgraph/examples) for a quick start. +//! +//! ## In-memory +//! +//! With this config, no data will be persisted to disk. +//! +//! ``` +//! use nextgraph::local_broker::{init_local_broker, LocalBrokerConfig}; +//! +//! #[async_std::main] +//! async fn main() -> std::io::Result<()> { +//! // initialize the local_broker with in-memory config. +//! // all sessions will be lost when the program exits +//! init_local_broker(Box::new(|| LocalBrokerConfig::InMemory)).await; +//! +//! // see https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/nextgraph/examples/in_memory.md +//! // for a full example of what the Rust API gives you +//! +//! Ok(()) +//! } +//! ``` +//! +//! ## Persistent +//! +//! With this config, the encrypted wallet, session information, outbox, and all user data will be saved locally, with encryption at rest. +//! +//! ``` +//! use nextgraph::local_broker::{init_local_broker, LocalBrokerConfig}; +//! +//! #[async_std::main] +//! async fn main() -> std::io::Result<()> { +//! // initialize the local_broker with in-memory config. +//! // all sessions will be lost when the program exits +//! let mut current_path = current_dir()?; +//! current_path.push(".ng"); +//! current_path.push("example"); +//! create_dir_all(current_path.clone())?; +//! +//! // initialize the local_broker with config to save to disk in a folder called `.ng/example` in the current directory +//! init_local_broker(Box::new(move || { +//! LocalBrokerConfig::BasePath(current_path.clone()) +//! })).await; +//! +//! // see https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/nextgraph/examples/persistent.md +//! // for a full example of what the Rust API gives you +//! +//! Ok(()) +//! } +//! ``` + +pub mod local_broker; + +pub mod repo { + pub use ng_repo::*; +} + +pub mod net { + pub use ng_net::*; +} + +pub mod verifier { + pub use ng_verifier::site::*; + pub use ng_verifier::types::*; + pub mod protocol { + pub use ng_net::app_protocol::*; + } + pub use ng_verifier::prepare_app_response_for_js; + pub use ng_verifier::read_triples_in_app_response_from_rust; + pub use ng_verifier::triples_ser_to_json_string; +} + +pub mod wallet { + pub use ng_wallet::*; +} + +pub fn get_device_name() -> String { + let mut list: Vec = Vec::with_capacity(3); + #[cfg(not(target_arch = "wasm32"))] + if let Ok(realname) = whoami::fallible::realname() { + list.push(realname); + } else { + #[cfg(not(target_arch = "wasm32"))] + if let Ok(username) = whoami::fallible::username() { + list.push(username); + } + } + if let Ok(devicename) = whoami::fallible::devicename() { + list.push(devicename); + } else { + #[cfg(not(target_arch = "wasm32"))] + if let Ok(hostname) = whoami::fallible::hostname() { + list.push(hostname); + } else { + if let Ok(distro) = whoami::fallible::distro() { + list.push(distro); + } + } + } + #[cfg(target_arch = "wasm32")] + if let Ok(distro) = whoami::fallible::distro() { + list.push(distro.replace("Unknown ","")); + } + + list.join(" ") +} + +#[cfg(debug_assertions)] +mod local_broker_dev_env; diff --git a/nextgraph/src/local_broker.rs b/nextgraph/src/local_broker.rs new file mode 100644 index 0000000..ea6fd6c --- /dev/null +++ b/nextgraph/src/local_broker.rs @@ -0,0 +1,3209 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use core::fmt; +use std::collections::{BTreeMap, HashMap}; +use std::fs::{read, remove_file, write}; +use std::path::PathBuf; + +use async_once_cell::OnceCell; +use async_std::prelude::FutureExt; +use async_std::sync::{Arc, Condvar, Mutex, RwLock}; +use futures::channel::mpsc; +use futures::{SinkExt, StreamExt}; +use lazy_static::lazy_static; +use once_cell::sync::Lazy; +use pdf_writer::{Content, Finish, Name, Pdf, Rect, Ref, Str}; +use qrcode::{render::svg, QrCode}; +use serde_bare::to_vec; +use serde_json::json; +use svg2pdf::ConversionOptions; +use zeroize::Zeroize; + +use ng_repo::block_storage::BlockStorage; +use ng_repo::block_storage::HashMapBlockStorage; +use ng_repo::errors::{NgError, ProtocolError}; +use ng_repo::log::*; +use ng_repo::os_info::get_os_info; +use ng_repo::types::*; +use ng_repo::utils::{derive_key, encrypt_in_place, generate_keypair}; + +use ng_net::app_protocol::*; +use ng_net::broker::*; +use ng_net::connection::{AppConfig, ClientConfig, IConnect, NoiseFSM, StartConfig}; +use ng_net::types::*; +use ng_net::utils::{spawn_and_log_error, Receiver, ResultSend, Sender}; +use ng_net::{actor::*, actors::admin::*}; + +use ng_verifier::types::*; +use ng_verifier::verifier::Verifier; + +use ng_wallet::bip39::encode_mnemonic; +use ng_wallet::emojis::{display_pazzle, encode_pazzle}; +use ng_wallet::{ + create_wallet_first_step_v0, create_wallet_second_step_v0, display_mnemonic, types::*, +}; + +#[cfg(not(target_family = "wasm"))] +use ng_client_ws::remote_ws::ConnectionWebSocket; +#[cfg(target_family = "wasm")] +use ng_client_ws::remote_ws_wasm::ConnectionWebSocket; +#[cfg(not(any(target_family = "wasm", docsrs)))] +use ng_storage_rocksdb::block_storage::RocksDbBlockStorage; + +#[doc(hidden)] +#[derive(Debug, Clone)] +pub struct HeadlessConfig { + // parse_ip_and_port_for(string, "verifier_server") + pub server_addr: BindAddress, + // decode_key(string) + pub server_peer_id: PubKey, + // decode_priv_key(string) + pub client_peer_key: Option, + pub admin_user_key: Option, +} + +type JsStorageReadFn = dyn Fn(String) -> Result + 'static + Sync + Send; +type JsStorageWriteFn = dyn Fn(String, String) -> Result<(), NgError> + 'static + Sync + Send; +type JsStorageDelFn = dyn Fn(String) -> Result<(), NgError> + 'static + Sync + Send; +type JsCallback = dyn Fn() + 'static + Sync + Send; + +#[doc(hidden)] +pub struct JsStorageConfig { + pub local_read: Box, + pub local_write: Box, + pub session_read: Arc>, + pub session_write: Arc>, + pub session_del: Arc>, + pub clear: Arc>, + pub is_browser: bool, +} + +impl JsStorageConfig { + fn get_js_storage_config(&self) -> JsSaveSessionConfig { + let session_read2 = Arc::clone(&self.session_read); + let session_write2 = Arc::clone(&self.session_write); + let session_read3 = Arc::clone(&self.session_read); + let session_write3 = Arc::clone(&self.session_write); + let session_read4 = Arc::clone(&self.session_read); + let session_del = Arc::clone(&self.session_del); + JsSaveSessionConfig { + last_seq_function: Box::new(move |peer_id: PubKey, qty: u16| -> Result { + let res = (session_read2)(format!("ng_peer_last_seq@{}", peer_id)); + let val = match res { + Ok(old_str) => { + let decoded = base64_url::decode(&old_str) + .map_err(|_| NgError::SerializationError)?; + match serde_bare::from_slice(&decoded)? { + SessionPeerLastSeq::V0(old_val) => old_val, + _ => unimplemented!(), + } + } + Err(_) => 0, + }; + if qty > 0 { + let new_val = val + qty as u64; + let spls = SessionPeerLastSeq::V0(new_val); + let ser = serde_bare::to_vec(&spls)?; + //saving the new val + let encoded = base64_url::encode(&ser); + (session_write2)(format!("ng_peer_last_seq@{}", peer_id), encoded)?; + } + Ok(val) + }), + outbox_write_function: Box::new( + move |peer_id: PubKey, seq: u64, event: Vec| -> Result<(), NgError> { + let seq_str = format!("{}", seq); + let res = (session_read3)(format!("ng_outboxes@{}@start", peer_id)); + let start = match res { + Err(_) => { + (session_write3)(format!("ng_outboxes@{}@start", peer_id), seq_str)?; + seq + } + Ok(start_str) => start_str + .parse::() + .map_err(|_| NgError::InvalidFileFormat)?, + }; + let idx = seq - start; + let idx_str = format!("{:05}", idx); + let encoded = base64_url::encode(&event); + (session_write3)(format!("ng_outboxes@{}@{idx_str}", peer_id), encoded) + }, + ), + outbox_read_function: Box::new( + move |peer_id: PubKey| -> Result>, NgError> { + let start_key = format!("ng_outboxes@{}@start", peer_id); + //log_info!("search start key {}", start_key); + let res = (session_read4)(start_key.clone()); + let _start = match res { + Err(_) => return Err(NgError::JsStorageKeyNotFound), + Ok(start_str) => start_str + .parse::() + .map_err(|_| NgError::InvalidFileFormat)?, + }; + let mut idx: u64 = 0; + let mut result = vec![]; + loop { + let idx_str = format!("{:05}", idx); + let str = format!("ng_outboxes@{}@{idx_str}", peer_id); + //log_info!("search key {}", str); + let res = (session_read4)(str.clone()); + let res = match res { + Err(_) => break, + Ok(res) => res, + }; + (session_del)(str)?; + let decoded = + base64_url::decode(&res).map_err(|_| NgError::SerializationError)?; + result.push(decoded); + idx += 1; + } + (session_del)(start_key)?; + Ok(result) + }, + ), + } + } +} + +impl fmt::Debug for JsStorageConfig { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "JsStorageConfig. is_browser {}", self.is_browser) + } +} + +/// Configuration for the LocalBroker. must be returned by a function or closure passed to [init_local_broker] +#[derive(Debug)] +pub enum LocalBrokerConfig { + /// Local broker will not save any wallet, session or user's data + InMemory, + /// Local broker will save all wallets, sessions and user's data on disk, in the provided `Path` + BasePath(PathBuf), + #[doc(hidden)] + /// used internally for the JS SDK + JsStorage(JsStorageConfig), + /// Does not handle wallet and will only create remote sessions from credentials. + /// Only one websocket connection will be established to a predefined verifier (given in config) + #[doc(hidden)] + Headless(HeadlessConfig), +} + +impl LocalBrokerConfig { + pub fn is_in_memory(&self) -> bool { + match self { + Self::InMemory => true, + _ => false, + } + } + pub fn is_persistent(&self) -> bool { + match self { + Self::BasePath(_) => true, + _ => false, + } + } + #[doc(hidden)] + pub fn is_js(&self) -> bool { + match self { + Self::JsStorage(_) => true, + _ => false, + } + } + #[doc(hidden)] + pub fn headless_config(&self) -> &HeadlessConfig { + match self { + Self::Headless(c) => &c, + _ => panic!("dont call headless_config if not in HeadlessConfig"), + } + } + #[doc(hidden)] + pub fn js_config(&self) -> Option<&JsStorageConfig> { + match self { + Self::JsStorage(c) => Some(c), + _ => None, + } + } + #[cfg(not(target_family = "wasm"))] + fn compute_path(&self, dir: &String) -> Result { + match self { + Self::BasePath(path) => { + let mut new_path = path.clone(); + new_path.push(dir); + Ok(new_path) + } + _ => Err(NgError::InvalidArgument), + } + } +} + +#[derive(Debug)] +/// used to initiate a session at a local broker V0 +pub struct SessionConfigV0 { + pub user_id: UserId, + pub wallet_name: String, + pub verifier_type: VerifierType, +} + +#[derive(Debug)] +/// used to initiate a session at a local broker +pub enum SessionConfig { + V0(SessionConfigV0), + WithCredentialsV0(WithCredentialsV0), + HeadlessV0(UserId), +} + +#[derive(Debug)] +/// used to initiate a session at a local broker with credentials +pub struct WithCredentialsV0 { + pub credentials: Credentials, + pub verifier_type: VerifierType, + pub detach: bool, // only used if remote verifier +} + +//trait ISession {} + +#[derive(Debug)] +struct RemoteSession { + #[allow(dead_code)] + config: SessionConfig, + remote_peer_id: DirectPeerId, + user_id: UserId, +} + +impl RemoteSession { + pub(crate) async fn send_request(&self, req: AppRequest) -> Result { + match BROKER + .read() + .await + .request::( + &Some(self.user_id), + &Some(self.remote_peer_id), + req, + ) + .await + { + Err(e) => Err(e), + Ok(SoS::Stream(_)) => Err(NgError::InvalidResponse), + Ok(SoS::Single(res)) => Ok(res), + } + } + + pub(crate) async fn send_request_stream( + &self, + req: AppRequest, + ) -> Result<(Receiver, CancelFn), NgError> { + match BROKER + .read() + .await + .request::( + &Some(self.user_id), + &Some(self.remote_peer_id), + req, + ) + .await + { + Err(e) => Err(e), + Ok(SoS::Single(_)) => Err(NgError::InvalidResponse), + Ok(SoS::Stream(stream)) => { + let fnonce = Box::new(move || { + // stream.close(); + //TODO: implement CancelStream in AppRequest + }); + Ok((stream, fnonce)) + } + } + } +} + +#[derive(Debug)] +struct HeadlessSession { + user_id: UserId, +} + +impl HeadlessSession {} + +#[derive(Debug)] +struct Session { + config: SessionConfig, + peer_key: PrivKey, + #[allow(dead_code)] + last_wallet_nonce: u64, + verifier: Verifier, +} + +impl SessionConfig { + pub fn user_id(&self) -> UserId { + match self { + Self::V0(v0) => v0.user_id, + Self::WithCredentialsV0(creds) => creds.credentials.user_key.to_pub(), + Self::HeadlessV0(hl) => hl.clone(), + } + } + pub fn wallet_name(&self) -> String { + match self { + Self::V0(v0) => v0.wallet_name.clone(), + Self::WithCredentialsV0(_) => panic!("dont call wallet_name on a WithCredentialsV0"), + Self::HeadlessV0(_) => panic!("dont call wallet_name on a HeadlessV0"), + } + } + pub fn verifier_type(&self) -> &VerifierType { + match self { + Self::V0(v0) => &v0.verifier_type, + Self::WithCredentialsV0(creds) => &creds.verifier_type, + Self::HeadlessV0(_) => panic!("dont call verifier_type on a HeadlessV0"), + } + } + pub fn is_remote(&self) -> bool { + match self { + Self::V0(v0) => v0.verifier_type.is_remote(), + Self::WithCredentialsV0(creds) => creds.verifier_type.is_remote(), + Self::HeadlessV0(_) => true, + } + } + pub fn set_verifier_type(&mut self, vt: VerifierType) { + match self { + Self::V0(v0) => v0.verifier_type = vt, + Self::WithCredentialsV0(creds) => creds.verifier_type = vt, + Self::HeadlessV0(_) => panic!("dont call verifier_type on a HeadlessV0"), + } + } + + pub fn is_with_credentials(&self) -> bool { + match self { + Self::WithCredentialsV0(_) => true, + Self::HeadlessV0(_) | Self::V0(_) => false, + } + } + + pub fn is_memory(&self) -> bool { + match self { + Self::V0(v0) => v0.verifier_type.is_memory(), + Self::WithCredentialsV0(creds) => creds.verifier_type.is_memory(), + Self::HeadlessV0(_) => true, + } + } + /// Creates a new in_memory SessionConfig with a UserId and a wallet name + /// + /// that should be passed to [session_start] + pub fn new_in_memory(user_id: &UserId, wallet_name: &String) -> Self { + SessionConfig::V0(SessionConfigV0 { + user_id: user_id.clone(), + wallet_name: wallet_name.clone(), + verifier_type: VerifierType::Memory, + }) + } + + /// Creates a new SessionConfig that tentatively saves data and/or session, with a UserId and a wallet name + /// + /// the session might be downgraded to in_memory if the wallet was added with the in_memory option + /// that should be passed to [session_start] + pub fn new_save(user_id: &UserId, wallet_name: &String) -> Self { + SessionConfig::V0(SessionConfigV0 { + user_id: user_id.clone(), + wallet_name: wallet_name.clone(), + verifier_type: VerifierType::Save, + }) + } + + /// Creates a new remote SessionConfig, with a UserId, a wallet name and optional remote peer_id + /// + /// that should be passed to [session_start] + pub fn new_remote( + user_id: &UserId, + wallet_name: &String, + remote_verifier_peer_id: Option, + ) -> Self { + SessionConfig::V0(SessionConfigV0 { + user_id: user_id.clone(), + wallet_name: wallet_name.clone(), + verifier_type: VerifierType::Remote(remote_verifier_peer_id), + }) + } + + #[doc(hidden)] + pub fn new_headless(user_id: UserId) -> Self { + SessionConfig::HeadlessV0(user_id) + } + + fn force_in_memory(&mut self) { + match self { + Self::V0(v0) => v0.verifier_type = VerifierType::Memory, + Self::WithCredentialsV0(_) | Self::HeadlessV0(_) => { + panic!("dont call force_in_memory on a WithCredentialsV0 or HeadlessV0") + } + } + } + + pub fn new_for_local_broker_config( + user_id: &UserId, + wallet_name: &String, + local_broker_config: &LocalBrokerConfig, + in_memory: bool, + ) -> Result { + Ok(SessionConfig::V0(SessionConfigV0 { + user_id: user_id.clone(), + wallet_name: wallet_name.clone(), + verifier_type: match local_broker_config { + LocalBrokerConfig::InMemory => { + if !in_memory { + return Err(NgError::CannotSaveWhenInMemoryConfig); + } + VerifierType::Memory + } + LocalBrokerConfig::BasePath(_) | LocalBrokerConfig::JsStorage(_) => match in_memory + { + true => VerifierType::Memory, + false => VerifierType::Save, + }, + LocalBrokerConfig::Headless(_) => { + panic!("don't call wallet_create on a Headless LocalBroker") + } + }, + })) + } + + fn valid_verifier_config_for_local_broker_config( + &mut self, + local_broker_config: &LocalBrokerConfig, + ) -> Result<(), NgError> { + if match self { + Self::HeadlessV0(_) => { + panic!("don't call session_start on a Headless LocalBroker") + } + _ => match local_broker_config { + LocalBrokerConfig::InMemory => { + self.set_verifier_type(VerifierType::Memory); + true + } + LocalBrokerConfig::JsStorage(js_config) => match self.verifier_type() { + VerifierType::Memory | VerifierType::Remote(_) => true, + VerifierType::Save => true, + VerifierType::WebRocksDb => js_config.is_browser, + }, + LocalBrokerConfig::BasePath(_) => match self.verifier_type() { + VerifierType::Save | VerifierType::Remote(_) => true, + VerifierType::Memory => true, + _ => false, + }, + LocalBrokerConfig::Headless(_) => { + panic!("don't call session_start on a Headless LocalBroker") + } + }, + } { + Ok(()) + } else { + Err(NgError::InvalidArgument) + } + } +} + +// impl fmt::Debug for SessionConfigV0 { +// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +// write!( +// f, +// "SessionConfigV0 user={} wallet={}", +// self.user_id, self.wallet_name +// ) +// } +// } + +// impl fmt::Debug for SessionConfig { +// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +// match self { +// SessionConfig::V0(v0) => v0.fmt(f), +// } +// } +// } + +struct OpenedWallet { + wallet: SensitiveWallet, + block_storage: Arc>, +} + +impl fmt::Debug for OpenedWallet { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "OpenedWallet.\nwallet {:?}", self.wallet) + } +} + +struct LocalBroker { + pub config: LocalBrokerConfig, + + pub wallets: HashMap, + + pub opened_wallets: HashMap, + + pub sessions: HashMap, + + // use even session_ids for remote_session, odd session_ids for opened_sessions + pub opened_sessions: HashMap, + + pub opened_sessions_list: Vec>, + pub remote_sessions_list: Vec>, + + pub headless_sessions: BTreeMap, + pub headless_connected_to_remote_broker: bool, + + tauri_streams: HashMap, + + disconnections_sender: Sender, + disconnections_receiver: Option>, + pump_cond: Option, Condvar)>>, +} + +impl fmt::Debug for LocalBroker { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "LocalBroker.\nconfig {:?}", self.config)?; + writeln!(f, "wallets {:?}", self.wallets)?; + writeln!(f, "opened_wallets {:?}", self.opened_wallets)?; + writeln!(f, "sessions {:?}", self.sessions)?; + writeln!(f, "opened_sessions {:?}", self.opened_sessions)?; + writeln!(f, "opened_sessions_list {:?}", self.opened_sessions_list) + } +} + +#[doc(hidden)] +#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] +pub trait ILocalBroker: Send + Sync + EActor { + async fn deliver(&mut self, event: Event, overlay: OverlayId, user: UserId); + async fn user_disconnected(&mut self, user_id: UserId); +} + +// used to deliver events to the verifier on Clients, or on Cores that have Verifiers attached. +#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] +impl ILocalBroker for LocalBroker { + async fn deliver(&mut self, event: Event, overlay: OverlayId, user_id: UserId) { + if let Some(session) = self.get_mut_session_for_user(&user_id) { + session.verifier.deliver(event, overlay).await; + } + } + async fn user_disconnected(&mut self, user_id: UserId) { + if let Some(session) = self.get_mut_session_for_user(&user_id) { + session.verifier.connection_lost(); + let _ = self.disconnections_sender.send(user_id.to_string()).await; + } + } +} + +// this is used if an Actor does a BROKER.local_broker.respond +// it happens when a remote peer is doing a request on the verifier +#[async_trait::async_trait] +impl EActor for LocalBroker { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + // search opened_sessions by user_id of fsm + let user = fsm.lock().await.user_id()?; + let session = self + .get_mut_session_for_user(&user) + .ok_or(ProtocolError::ActorError)?; + session.verifier.respond(msg, fsm).await + } +} + +async fn pump( + mut reader: Receiver, + pair: Arc<(Mutex, Condvar)>, +) -> ResultSend<()> { + while let Some(message) = reader.next().await { + let (lock, cvar) = &*pair; + let mut running = lock.lock().await; + while !*running { + running = cvar.wait(running).await; + } + + match message { + LocalBrokerMessage::Deliver { + event, + overlay, + user, + } => { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(Box::new(NgError::LocalBrokerNotInitialized)), + Some(Ok(broker)) => broker.write().await, + }; + broker.deliver(event, overlay, user).await + }, + LocalBrokerMessage::Inbox {msg, user_id, from_queue} => { + async_std::task::spawn_local(async move { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(Box::new(NgError::LocalBrokerNotInitialized)), + Some(Ok(broker)) => broker.write().await, + }; + if let Some(session) = broker.get_mut_session_for_user(&user_id) { + session.verifier.inbox(&msg, from_queue).await; + } + Ok(()) + }).await?; + + }, + LocalBrokerMessage::Disconnected { user_id } => { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(Box::new(NgError::LocalBrokerNotInitialized)), + Some(Ok(broker)) => broker.write().await, + }; + broker.user_disconnected(user_id).await + }, + } + } + + log_debug!("END OF PUMP"); + Ok(()) +} + +impl LocalBroker { + async fn stop_pump(&self) { + let (lock, cvar) = self.pump_cond.as_deref().as_ref().unwrap(); + let mut running = lock.lock().await; + *running = false; + cvar.notify_one(); + } + + async fn start_pump(&self) { + let (lock, cvar) = self.pump_cond.as_deref().as_ref().unwrap(); + let mut running = lock.lock().await; + *running = true; + cvar.notify_one(); + } + + fn init_pump(&mut self, broker_pump_receiver: Receiver) { + let pair = Arc::new((Mutex::new(false), Condvar::new())); + let pair2 = Arc::clone(&pair); + self.pump_cond = Some(pair); + spawn_and_log_error(pump(broker_pump_receiver, pair2)); + } + // fn storage_path_for_user(&self, user_id: &UserId) -> Option { + // match &self.config { + // LocalBrokerConfig::InMemory | LocalBrokerConfig::JsStorage(_) => None, + // LocalBrokerConfig::BasePath(base) => { + // let mut path = base.clone(); + // path.push(format!("user{}", user_id.to_hash_string())); + // Some(path) + // } + // } + // } + + /// helper function to store the sender of a tauri stream in order to be able to cancel it later on + /// only used in Tauri, not used in the JS SDK + fn tauri_stream_add(&mut self, stream_id: String, cancel: CancelFn) { + self.tauri_streams.insert(stream_id, cancel); + } + + /// helper function to cancel a tauri stream + /// only used in Tauri, not used in the JS SDK + fn tauri_stream_cancel(&mut self, stream_id: String) { + let s = self.tauri_streams.remove(&stream_id); + if let Some(cancel) = s { + cancel(); + } + } + + async fn connect_remote_broker(&mut self) -> Result<(), NgError> { + self.err_if_not_headless()?; + + if self.headless_connected_to_remote_broker { + return Ok(()); + } + + let info = get_client_info(ClientType::NodeService); + + let config = self.config.headless_config(); + + BROKER + .write() + .await + .connect( + Arc::new(Box::new(ConnectionWebSocket {})), + config.client_peer_key.to_owned().unwrap(), + config.client_peer_key.as_ref().unwrap().to_pub(), + config.server_peer_id, + StartConfig::App(AppConfig { + user_priv: None, + info, + addr: config.server_addr, + }), + ) + .await?; + + self.headless_connected_to_remote_broker = true; + + Ok(()) + } + + pub(crate) async fn send_request_headless< + A: Into + std::fmt::Debug + Sync + Send + 'static, + B: TryFrom + std::fmt::Debug + Sync + Send + 'static, + >( + &self, + req: A, + ) -> Result { + self.err_if_not_headless()?; + + match BROKER + .read() + .await + .request::( + &None, + &Some(self.config.headless_config().server_peer_id), + req, + ) + .await + { + Err(e) => Err(e), + Ok(SoS::Stream(_)) => Err(NgError::InvalidResponse), + Ok(SoS::Single(res)) => Ok(res), + } + } + + #[allow(dead_code)] + pub(crate) async fn send_request_stream_headless< + A: Into + std::fmt::Debug + Sync + Send + 'static, + B: TryFrom + std::fmt::Debug + Sync + Send + 'static, + >( + &self, + req: A, + ) -> Result<(Receiver, CancelFn), NgError> { + self.err_if_not_headless()?; + + match BROKER + .read() + .await + .request::( + &None, + &Some(self.config.headless_config().server_peer_id), + req, + ) + .await + { + Err(e) => Err(e), + Ok(SoS::Single(_)) => Err(NgError::InvalidResponse), + Ok(SoS::Stream(stream)) => { + let fnonce = Box::new(move || { + // stream.close(); + //TODO: implement CancelStream in AppRequest + }); + Ok((stream, fnonce)) + } + } + } + + fn err_if_headless(&self) -> Result<(), NgError> { + match self.config { + LocalBrokerConfig::Headless(_) => Err(NgError::LocalBrokerIsHeadless), + _ => Ok(()), + } + } + + fn err_if_not_headless(&self) -> Result<(), NgError> { + match self.config { + LocalBrokerConfig::Headless(_) => Ok(()), + _ => Err(NgError::LocalBrokerIsHeadless), + } + } + + fn get_mut_session_for_user(&mut self, user: &UserId) -> Option<&mut Session> { + match self.opened_sessions.get(user) { + Some(idx) => { + let idx = Self::to_real_session_id(*idx); + if self.opened_sessions_list.len() > idx as usize { + self.opened_sessions_list[idx as usize].as_mut() + } else { + None + } + } + None => None, + } + } + + fn is_remote_session(session_id: u64) -> bool { + (session_id & 1) == 0 + } + + fn is_local_session(session_id: u64) -> bool { + !Self::is_remote_session(session_id) + } + + fn to_real_session_id(session_id: u64) -> u64 { + (session_id) >> 1 + } + + #[allow(dead_code)] + fn to_external_session_id(session_id: u64, is_remote: bool) -> u64 { + let mut ext = (session_id) << 1; + if !is_remote { + ext += 1; + } + ext + } + + fn user_to_local_session_id_for_mut(&self, user_id: &UserId) -> Result { + let session_id = self + .opened_sessions + .get(user_id) + .ok_or(NgError::SessionNotFound)?; + self.get_local_session_id_for_mut(*session_id) + } + + fn get_local_session_id_for_mut(&self, session_id: u64) -> Result { + let _ = Self::is_local_session(session_id) + .then_some(true) + .ok_or(NgError::SessionNotFound)?; + let session_id = Self::to_real_session_id(session_id) as usize; + if session_id >= self.opened_sessions_list.len() { + return Err(NgError::InvalidArgument); + } + Ok(session_id) + } + + fn get_real_session_id_for_mut(&self, session_id: u64) -> Result<(usize, bool), NgError> { + let is_remote = Self::is_remote_session(session_id); + let session_id = Self::to_real_session_id(session_id) as usize; + if is_remote { + if session_id >= self.remote_sessions_list.len() { + return Err(NgError::InvalidArgument); + } + } else { + if session_id >= self.opened_sessions_list.len() { + return Err(NgError::InvalidArgument); + } + } + Ok((session_id, is_remote)) + } + + fn get_session(&self, session_id: u64) -> Result<&Session, NgError> { + let _ = Self::is_local_session(session_id) + .then_some(true) + .ok_or(NgError::SessionNotFound)?; + let session_id = Self::to_real_session_id(session_id); + if session_id as usize >= self.opened_sessions_list.len() { + return Err(NgError::InvalidArgument); + } + self.opened_sessions_list[session_id as usize] + .as_ref() + .ok_or(NgError::SessionNotFound) + } + + #[allow(dead_code)] + fn get_headless_session(&self, session_id: u64) -> Result<&HeadlessSession, NgError> { + self.err_if_not_headless()?; + + self.headless_sessions + .get(&session_id) + .ok_or(NgError::SessionNotFound) + } + + #[allow(dead_code)] + fn get_headless_session_by_user(&self, user_id: &UserId) -> Result<&HeadlessSession, NgError> { + self.err_if_not_headless()?; + + let session_id = self + .opened_sessions + .get(user_id) + .ok_or(NgError::SessionNotFound)?; + + self.get_headless_session(*session_id) + } + + fn remove_headless_session( + &mut self, + user_id: &UserId, + ) -> Result<(u64, HeadlessSession), NgError> { + self.err_if_not_headless()?; + + let session_id = self + .opened_sessions + .remove(user_id) + .ok_or(NgError::SessionNotFound)?; + + let session = self + .headless_sessions + .remove(&session_id) + .ok_or(NgError::SessionNotFound)?; + Ok((session_id, session)) + } + + #[allow(dead_code)] + fn get_remote_session(&self, session_id: u64) -> Result<&RemoteSession, NgError> { + let _ = Self::is_remote_session(session_id) + .then_some(true) + .ok_or(NgError::SessionNotFound)?; + let session_id = Self::to_real_session_id(session_id); + if session_id as usize >= self.remote_sessions_list.len() { + return Err(NgError::InvalidArgument); + } + self.remote_sessions_list[session_id as usize] + .as_ref() + .ok_or(NgError::SessionNotFound) + } + + pub fn get_site_store_of_session( + &self, + session: &Session, + store_type: SiteStoreType, + ) -> Result { + self.err_if_headless()?; + + match self.opened_wallets.get(&session.config.wallet_name()) { + Some(opened_wallet) => { + let user_id = session.config.user_id(); + let site = opened_wallet.wallet.site(&user_id)?; + Ok(site.get_site_store_id(store_type)) + } + None => Err(NgError::WalletNotFound), + } + } + + async fn verifier_config_type_from_session_config( + &self, + config: &SessionConfig, + ) -> Result { + Ok(match config { + SessionConfig::HeadlessV0(_) => { + panic!("don't call verifier_config_type_from_session_config with a SessionConfig::HeadlessV0"); + } + _ => match (config.verifier_type(), &self.config) { + (VerifierType::Memory, LocalBrokerConfig::InMemory) => VerifierConfigType::Memory, + (VerifierType::Memory, LocalBrokerConfig::BasePath(_)) => { + VerifierConfigType::Memory + } + #[cfg(all(not(target_family = "wasm")))] + (VerifierType::Save, LocalBrokerConfig::BasePath(base)) => { + let mut path = base.clone(); + path.push(format!("user{}", config.user_id().to_hash_string())); + VerifierConfigType::RocksDb(path) + } + (VerifierType::Remote(to), _) => VerifierConfigType::Remote(*to), + (VerifierType::WebRocksDb, _) => VerifierConfigType::WebRocksDb, + (VerifierType::Memory, LocalBrokerConfig::JsStorage(_)) => { + VerifierConfigType::Memory + } + (VerifierType::Save, LocalBrokerConfig::JsStorage(js)) => { + VerifierConfigType::JsSaveSession(js.get_js_storage_config()) + } + (_, _) => panic!("invalid combination in verifier_config_type_from_session_config"), + }, + }) + } + + fn get_wallet_and_session( + &self, + user_id: &UserId, + ) -> Result<(&SensitiveWallet, &Session), NgError> { + let session_idx = self.user_to_local_session_id_for_mut(user_id)?; + let session = self.opened_sessions_list[session_idx] + .as_ref() + .ok_or(NgError::SessionNotFound)?; + let wallet = &match &session.config { + SessionConfig::WithCredentialsV0(_) | SessionConfig::HeadlessV0(_) => { + panic!("don't call get_wallet_and_session on a Headless or WithCredentials config") + } + SessionConfig::V0(v0) => self + .opened_wallets + .get(&v0.wallet_name) + .ok_or(NgError::WalletNotFound), + }? + .wallet; + + Ok((wallet, session)) + } + + fn get_session_mut(&mut self, user_id: &UserId) -> Result<&mut Session, NgError> { + let session_idx = self.user_to_local_session_id_for_mut(user_id)?; + self.opened_sessions_list[session_idx] + .as_mut() + .ok_or(NgError::SessionNotFound) + } + + async fn disconnect_session(&mut self, user_id: &PubKey) -> Result<(), NgError> { + match self.opened_sessions.get(user_id) { + Some(session) => { + let session = self.get_local_session_id_for_mut(*session)?; + // TODO: change the logic here once it will be possible to have several users connected at the same time + Broker::close_all_connections().await; + let session = self.opened_sessions_list[session] + .as_mut() + .ok_or(NgError::SessionNotFound)?; + session.verifier.connection_lost(); + } + None => {} + } + Ok(()) + } + + async fn wallet_was_opened( + &mut self, + mut wallet: SensitiveWallet, + ) -> Result { + let broker = self; + + //log_info!("wallet_was_opened {}", wallet.id()); + + match broker.opened_wallets.get(&wallet.id()) { + Some(opened_wallet) => { + return Ok(opened_wallet.wallet.client().to_owned().unwrap()); + } + None => {} //Err(NgError::WalletAlreadyOpened); + } + let wallet_id = wallet.id(); + let lws = match broker.wallets.get(&wallet_id) { + Some(lws) => { + if wallet.client().is_none() { + // this case happens when the wallet is opened and not when it is imported (as the client is already there) + wallet.set_client(lws.to_client_v0(wallet.privkey())?); + } + lws + } + None => { + return Err(NgError::WalletNotFound); + } + }; + let block_storage = if lws.in_memory { + Arc::new(std::sync::RwLock::new(HashMapBlockStorage::new())) + as Arc> + } else { + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + { + let key_material = wallet + .client() + .as_ref() + .unwrap() + .sensitive_client_storage + .priv_key + .slice(); + let path = broker.config.compute_path(&format!( + "block{}", + wallet.client().as_ref().unwrap().id.to_hash_string() + ))?; + let key: [u8; 32] = + derive_key("NextGraph Client BlockStorage BLAKE3 key", key_material); + + Arc::new(std::sync::RwLock::new(RocksDbBlockStorage::open( + &path, key, + )?)) + as Arc> + } + #[cfg(any(target_family = "wasm", docsrs))] + { + Arc::new(std::sync::RwLock::new(HashMapBlockStorage::new())) + as Arc> + } + }; + let client = wallet.client().to_owned().unwrap(); + let opened_wallet = OpenedWallet { + wallet, + block_storage, + }; + //log_info!("inserted wallet_was_opened {}", wallet_id); + broker.opened_wallets.insert(wallet_id, opened_wallet); + Ok(client) + } + + fn add_session(&mut self, session: Session) -> Result { + let private_store_id = NuriV0::to_store_nuri_string( + &self.get_site_store_of_session(&session, SiteStoreType::Private)?, + ); + let protected_store_id = NuriV0::to_store_nuri_string( + &self.get_site_store_of_session(&session, SiteStoreType::Protected)?, + ); + let public_store_id = NuriV0::to_store_nuri_string( + &self.get_site_store_of_session(&session, SiteStoreType::Public)?, + ); + + let user_id = session.config.user_id(); + + self.opened_sessions_list.push(Some(session)); + let mut idx = self.opened_sessions_list.len() - 1; + idx = idx << 1; + idx += 1; + self.opened_sessions.insert(user_id, idx as u64); + + Ok(SessionInfo { + session_id: idx as u64, + user: user_id, + private_store_id, + protected_store_id, + public_store_id, + }) + } + + fn add_headless_session(&mut self, session: HeadlessSession) -> Result { + let user_id = session.user_id; + + let mut first_available: u64 = 0; + for sess in self.headless_sessions.keys() { + if *sess != first_available + 1 { + break; + } else { + first_available += 1; + } + } + first_available += 1; + + let ret = self.headless_sessions.insert(first_available, session); + assert!(ret.is_none()); + + self.opened_sessions.insert(user_id, first_available); + + Ok(SessionInfo { + session_id: first_available, + user: user_id, + private_store_id: String::new(), // will be updated when the AppSessionStart reply arrives from broker + protected_store_id: String::new(), + public_store_id: String::new(), + }) + } + + async fn session_start( + &mut self, + mut config: SessionConfig, + user_priv_key: Option, + ) -> Result { + let broker = self; + + let wallet_name: String = config.wallet_name(); + + { + match broker.wallets.get(&wallet_name) { + Some(closed_wallet) => { + if closed_wallet.in_memory { + config.force_in_memory(); + } + } + None => return Err(NgError::WalletNotFound), + } + } + + config.valid_verifier_config_for_local_broker_config(&broker.config)?; + + let wallet_id: PubKey = (*wallet_name).try_into()?; + let user_id = config.user_id(); + + // log_info!("wallet_name {} {:?}", wallet_name, broker.opened_wallets); + match broker.opened_wallets.get(&wallet_name) { + None => return Err(NgError::WalletNotFound), + Some(opened_wallet) => { + let block_storage = Arc::clone(&opened_wallet.block_storage); + let credentials = match opened_wallet.wallet.individual_site(&user_id) { + Some(creds) => creds, + None => match user_priv_key { + Some(user_pk) => (user_pk, None, None, None, None), + None => return Err(NgError::NotFound), + }, + }; + + let client_storage_master_key = serde_bare::to_vec( + &opened_wallet + .wallet + .client() + .as_ref() + .unwrap() + .sensitive_client_storage + .storage_master_key, + ) + .unwrap(); + + let session = match broker.sessions.get(&user_id) { + Some(session) => session, + None => { + // creating the session now + if config.is_memory() { + let session = SessionPeerStorageV0::new(user_id); + broker.sessions.insert(user_id, session); + broker.sessions.get(&user_id).unwrap() + } else { + // first check if there is a saved SessionWalletStorage + let mut sws = match &broker.config { + LocalBrokerConfig::InMemory => { + panic!("cannot open saved session") + } + LocalBrokerConfig::JsStorage(js_config) => { + // read session wallet storage from JsStorage + let res = (js_config.session_read)(format!( + "ng_wallet@{}", + wallet_name + )); + match res { + Ok(string) => { + let decoded = base64_url::decode(&string) + .map_err(|_| NgError::SerializationError)?; + Some(SessionWalletStorageV0::dec_session( + opened_wallet.wallet.privkey(), + &decoded, + )?) + } + Err(_) => None, + } + } + LocalBrokerConfig::BasePath(base_path) => { + // read session wallet storage from disk + let mut path = base_path.clone(); + path.push("sessions"); + path.push(format!("session{}", wallet_name.clone())); + let res = read(path); + if res.is_ok() { + Some(SessionWalletStorageV0::dec_session( + opened_wallet.wallet.privkey(), + &res.unwrap(), + )?) + } else { + None + } + } + LocalBrokerConfig::Headless(_) => { + panic!("don't call session_start on a Headless LocalBroker") + } + }; + let (session, new_sws) = match &mut sws { + None => { + let (s, sws_ser) = SessionWalletStorageV0::create_new_session( + &wallet_id, user_id, + )?; + broker.sessions.insert(user_id, s); + (broker.sessions.get(&user_id).unwrap(), sws_ser) + } + Some(sws) => { + match sws.users.get(&user_id.to_string()) { + Some(sps) => { + broker.sessions.insert(user_id, sps.clone()); + (broker.sessions.get(&user_id).unwrap(), vec![]) + } + None => { + // the user was not found in the SWS. we need to create a SPS, add it, encrypt and serialize the new SWS, + // add the SPS to broker.sessions, and return the newly created SPS and the new SWS encrypted serialization + let sps = SessionPeerStorageV0::new(user_id); + sws.users.insert(user_id.to_string(), sps.clone()); + let encrypted = sws.enc_session(&wallet_id)?; + broker.sessions.insert(user_id, sps); + (broker.sessions.get(&user_id).unwrap(), encrypted) + } + } + } + }; + // save the new sws + if new_sws.len() > 0 { + match &broker.config { + LocalBrokerConfig::InMemory => { + panic!("cannot save session when InMemory mode") + } + LocalBrokerConfig::JsStorage(js_config) => { + // save session wallet storage to JsStorage + let encoded = base64_url::encode(&new_sws); + (js_config.session_write)( + format!("ng_wallet@{}", wallet_name), + encoded, + )?; + } + LocalBrokerConfig::BasePath(base_path) => { + // save session wallet storage to disk + let mut path = base_path.clone(); + path.push("sessions"); + std::fs::create_dir_all(path.clone()).unwrap(); + path.push(format!("session{}", wallet_name)); + //log_debug!("{}", path.clone().display()); + write(path.clone(), &new_sws) + .map_err(|_| NgError::IoError)?; + } + LocalBrokerConfig::Headless(_) => { + panic!("don't call session_start on a Headless LocalBroker") + } + } + } + session + } + } + }; + let session = session.clone(); + + // derive user_master_key from client's storage_master_key + let user_id_ser = serde_bare::to_vec(&user_id).unwrap(); + let mut key_material = [user_id_ser, client_storage_master_key].concat(); // + let mut key: [u8; 32] = derive_key( + "NextGraph user_master_key BLAKE3 key", + key_material.as_slice(), + ); + // log_info!( + // "USER MASTER KEY {user_id} {} {:?}", + // user_id.to_hash_string(), + // key + // ); + + let locator = if let Ok(site) = opened_wallet.wallet.site(&user_id) { + let core = site.cores[0]; //TODO: cycle the other cores if failure to connect (failover) + let brokers = opened_wallet.wallet.broker(core.0)?; + BrokerInfoV0::vec_into_locator(brokers) + } else { + Locator::empty() + }; + + key_material.zeroize(); + let mut verifier = Verifier::new( + VerifierConfig { + config_type: broker + .verifier_config_type_from_session_config(&config) + .await?, + user_master_key: key, + peer_priv_key: session.peer_key.clone(), + user_priv_key: credentials.0, + private_store_read_cap: credentials.1, + private_store_id: credentials.2, + protected_store_id: credentials.3, + public_store_id: credentials.4, + locator, + }, + block_storage, + )?; + key.zeroize(); + + //load verifier from local_storage (if rocks_db) + let _ = verifier.load(); + let session = Session { + config, + peer_key: session.peer_key.clone(), + last_wallet_nonce: session.last_wallet_nonce, + verifier, + }; + Ok(session) + } + } + } + + pub(crate) fn wallet_save(broker: &mut Self) -> Result<(), NgError> { + let wallets_to_be_saved = broker + .wallets + .iter() + .filter(|(_, w)| !w.in_memory) + .map(|(a, b)| (a.clone(), b.clone())) + .collect(); + match &broker.config { + LocalBrokerConfig::JsStorage(js_config) => { + // JS save + let lws_ser = LocalWalletStorage::v0_to_vec(&wallets_to_be_saved); + let encoded = base64_url::encode(&lws_ser); + (js_config.local_write)("ng_wallets".to_string(), encoded)?; + } + LocalBrokerConfig::BasePath(base_path) => { + // save on disk + // TODO: use https://lib.rs/crates/keyring instead of AppLocalData on Tauri apps + let mut path = base_path.clone(); + std::fs::create_dir_all(path.clone()).unwrap(); + path.push("wallets"); + + let lws_ser = LocalWalletStorage::v0_to_vec(&wallets_to_be_saved); + let r = write(path.clone(), &lws_ser); + if r.is_err() { + log_err!("write error {:?} {}", path, r.unwrap_err()); + return Err(NgError::IoError); + } + } + _ => return Err(NgError::CannotSaveWhenInMemoryConfig), + } + Ok(()) + } +} + +static LOCAL_BROKER: OnceCell>, NgError>> = OnceCell::new(); + +pub type ConfigInitFn = dyn Fn() -> LocalBrokerConfig + 'static + Sync + Send; + +async fn init_(config: LocalBrokerConfig) -> Result>, NgError> { + let wallets = match &config { + LocalBrokerConfig::InMemory | LocalBrokerConfig::Headless(_) => HashMap::new(), + LocalBrokerConfig::BasePath(base_path) => { + // load the wallets and sessions from disk + let mut path = base_path.clone(); + path.push("wallets"); + let map_ser = read(path.clone()); + if map_ser.is_ok() { + let wallets = LocalWalletStorage::v0_from_vec(&map_ser.unwrap()); + if wallets.is_err() { + log_err!( + "Load BasePath LocalWalletStorage error: {:?}", + wallets.unwrap_err() + ); + let _ = remove_file(path); + HashMap::new() + } else { + let LocalWalletStorage::V0(wallets) = wallets.unwrap(); + wallets + } + } else { + HashMap::new() + } + } + LocalBrokerConfig::JsStorage(js_storage_config) => { + // load the wallets from JsStorage + match (js_storage_config.local_read)("ng_wallets".to_string()) { + Err(_) => HashMap::new(), + Ok(wallets_string) => { + match base64_url::decode(&wallets_string) + .map_err(|_| NgError::SerializationError) + { + Err(e) => { + log_err!("Load wallets error: {:?}", e); + (js_storage_config.clear)(); + HashMap::new() + } + Ok(map_ser) => match serde_bare::from_slice(&map_ser) { + Err(e) => { + log_err!("Load JS LocalWalletStorage error: {:?}", e); + (js_storage_config.clear)(); + HashMap::new() + } + Ok(wallets) => { + let LocalWalletStorage::V0(v0) = wallets; + v0 + } + }, + } + } + } + } + }; + let (disconnections_sender, disconnections_receiver) = mpsc::unbounded::(); + + let (localbroker_pump_sender, broker_pump_receiver) = mpsc::unbounded::(); + + let mut local_broker = LocalBroker { + config, + wallets, + opened_wallets: HashMap::new(), + sessions: HashMap::new(), + opened_sessions: HashMap::new(), + opened_sessions_list: vec![], + remote_sessions_list: vec![], + headless_sessions: BTreeMap::new(), + tauri_streams: HashMap::new(), + disconnections_sender, + disconnections_receiver: Some(disconnections_receiver), + headless_connected_to_remote_broker: false, + pump_cond: None, + }; + + local_broker.init_pump(broker_pump_receiver); + //log_debug!("{:?}", &local_broker); + + let broker = Arc::new(RwLock::new(local_broker)); + + BROKER + .write() + .await + .set_local_broker(localbroker_pump_sender); + + Ok(broker) +} + +#[doc(hidden)] +pub async fn init_local_broker_with_lazy(config_fn: &Lazy>) { + LOCAL_BROKER + .get_or_init(async { + let config = (&*config_fn)(); + init_(config).await + }) + .await; +} + +#[doc(hidden)] +pub async fn tauri_stream_add(stream_id: String, cancel: CancelFn) -> Result<(), NgError> { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + + broker.tauri_stream_add(stream_id, cancel); + Ok(()) +} + +#[doc(hidden)] +pub async fn tauri_stream_cancel(stream_id: String) -> Result<(), NgError> { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + + broker.tauri_stream_cancel(stream_id); + Ok(()) +} + +/// Initialize the configuration of your local broker +/// +/// , by passing in a function (or closure) that returns a `LocalBrokerConfig`. +/// You must call `init_local_broker` at least once before you can start to use the broker. +/// After the first call, all subsequent calls will have no effect. +pub async fn init_local_broker(config_fn: Box) { + LOCAL_BROKER + .get_or_init(async { + let config = (config_fn)(); + init_(config).await + }) + .await; +} + +/// Retrieves a HashMap of wallets known to the LocalBroker. The name of the Wallet is used as key +pub async fn wallets_get_all() -> Result, NgError> { + let broker = match LOCAL_BROKER.get() { + Some(Err(e)) => { + log_err!("LocalBrokerNotInitialized: {}", e); + return Err(NgError::LocalBrokerNotInitialized); + } + None => { + log_err!("Not initialized"); + return Err(NgError::LocalBrokerNotInitialized); + } + Some(Ok(broker)) => broker.read().await, + }; + Ok(broker.wallets.clone()) +} + +/// Creates a new Wallet for the user. Each user should create only one Wallet. +/// +/// See [CreateWalletV0] for a list of parameters. +/// +/// Wallets are transferable to other devices (see [wallet_get_file] and [wallet_import]) +pub async fn wallet_create_v0(params: CreateWalletV0) -> Result { + // TODO: entering sub-block to release the lock asap + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + if params.local_save && broker.config.is_in_memory() { + return Err(NgError::CannotSaveWhenInMemoryConfig); + } + let in_memory = !params.local_save; + + let intermediate = create_wallet_first_step_v0(params)?; + let lws: LocalWalletStorageV0 = (&intermediate).into(); + + let wallet_name = intermediate.wallet_name.clone(); + broker.wallets.insert(wallet_name, lws); + + let sensitive_wallet: SensitiveWallet = (&intermediate).into(); + + let _client = broker.wallet_was_opened(sensitive_wallet).await?; + + let session_config = SessionConfig::new_for_local_broker_config( + &intermediate.user_privkey.to_pub(), + &intermediate.wallet_name, + &broker.config, + intermediate.in_memory, + )?; + + let mut session = broker + .session_start(session_config, Some(intermediate.user_privkey.clone())) + .await?; + + // let session = broker.opened_sessions_list[session_info.session_id as usize] + // .as_mut() + // .unwrap(); + let with_pdf = intermediate.pdf; + let pin = intermediate.pin; + let (mut res, site, brokers) = + create_wallet_second_step_v0(intermediate, &mut session.verifier).await?; + + if with_pdf { + let wallet_recovery = + wallet_to_wallet_recovery(&res.wallet, res.pazzle.clone(), res.mnemonic, pin); + + if let Ok(pdf_buffer) = wallet_recovery_pdf(wallet_recovery, 600).await { + res.pdf_file = pdf_buffer; + }; + } + + //log_info!("VERIFIER DUMP {:?}", session.verifier); + + broker.wallets.get_mut(&res.wallet_name).unwrap().wallet = res.wallet.clone(); + if !in_memory { + LocalBroker::wallet_save(&mut broker)?; + } + broker + .opened_wallets + .get_mut(&res.wallet_name) + .unwrap() + .wallet + .complete_with_site_and_brokers(site, brokers); + + let session_info = broker.add_session(session)?; + + res.session_id = session_info.session_id; + Ok(res) +} + +#[doc(hidden)] +/// Only used by JS SDK when the localStorage changes and brings out of sync for the Rust side copy of the wallets +pub async fn wallets_reload() -> Result<(), NgError> { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + match &broker.config { + LocalBrokerConfig::JsStorage(js_config) => { + // load the wallets from JsStorage + let wallets_string = (js_config.local_read)("ng_wallets".to_string())?; + let map_ser = + base64_url::decode(&wallets_string).map_err(|_| NgError::SerializationError)?; + let wallets: LocalWalletStorage = serde_bare::from_slice(&map_ser)?; + let LocalWalletStorage::V0(v0) = wallets; + //log_info!("adding wallet {:?}", v0); + broker.wallets.extend(v0); + } + _ => {} + } + Ok(()) +} + +#[doc(hidden)] +/// This should not be used by programmers. Only here because the JS SDK needs it. +/// +/// It will throw an error if you use it. +pub async fn wallet_add(lws: LocalWalletStorageV0) -> Result<(), NgError> { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + if !lws.in_memory && broker.config.is_in_memory() { + return Err(NgError::CannotSaveWhenInMemoryConfig); + } + if broker.wallets.get(&lws.wallet.name()).is_some() { + return Err(NgError::WalletAlreadyAdded); + } + let in_memory = lws.in_memory; + broker.wallets.insert(lws.wallet.name(), lws); + if in_memory { + // if broker.config.is_js() { + // (broker.config.js_config().unwrap().wallets_in_mem_changed)(); + // } + } else { + LocalBroker::wallet_save(&mut broker)?; + } + Ok(()) +} + +pub fn wallet_to_wallet_recovery( + wallet: &Wallet, + pazzle: Vec, + mnemonic: [u16; 12], + pin: [u8; 4], +) -> NgQRCodeWalletRecoveryV0 { + match wallet { + Wallet::V0(v0) => { + let mut content = v0.content.clone(); + content.security_img = vec![]; + content.security_txt = String::new(); + NgQRCodeWalletRecoveryV0 { + wallet: serde_bare::to_vec(&content).unwrap(), + pazzle, + mnemonic, + pin, + } + } + _ => unimplemented!(), + } +} + +/// Generates the Recovery PDF containing the Wallet, PIN, Pazzle and Mnemonic. +pub async fn wallet_recovery_pdf( + recovery: NgQRCodeWalletRecoveryV0, + size: u32, +) -> Result, NgError> { + let ser = serde_bare::to_vec(&recovery)?; + if ser.len() > 2_953 { + return Err(NgError::InvalidPayload); + } + let recovery_str = base64_url::encode(&ser); + let wallet_svg = match QrCode::with_error_correction_level(&ser, qrcode::EcLevel::M) { + Ok(qr) => { + let svg = qr + .render() + .max_dimensions(size, size) + .dark_color(svg::Color("#000000")) + .light_color(svg::Color("#ffffff")) + .build(); + svg + } + Err(_e) => return Err(NgError::BrokerError), + }; + + let options = svg2pdf::usvg::Options::default(); + let tree = svg2pdf::usvg::Tree::from_str(&wallet_svg, &options) + .map_err(|e| NgError::WalletError(e.to_string()))?; + + let (chunk, qrcode_ref) = svg2pdf::to_chunk(&tree, ConversionOptions::default()); + //let pdf_buf = svg2pdf::to_pdf(&tree, ConversionOptions::default(), PageOptions::default()); + + // Define some indirect reference ids we'll use. + let catalog_id = Ref::new(1000); + let page_tree_id = Ref::new(1001); + let page_id = Ref::new(1002); + let font_id = Ref::new(1003); + let content_id = Ref::new(1004); + let font_name = Name(b"F1"); + let qrcode_name = Name(b"Im1"); + + let chunks = recovery_str + .as_bytes() + .chunks(92) + .map(|buf| buf) + .collect::>(); + + let mut content = Content::new(); + + for (line, string) in chunks.iter().enumerate() { + content.begin_text(); + content.set_font(font_name, 10.0); + content.next_line(20.0, 810.0 - line as f32 * 15.0); + content.show(Str(*string)); + content.end_text(); + } + + let pazzle: Vec = display_pazzle(&recovery.pazzle) + .iter() + .map(|p| p.1.to_string()) + .collect(); + let mnemonic = display_mnemonic(&recovery.mnemonic); + + let credentials = format!( + "PIN:{}{}{}{} PAZZLE:{} MNEMONIC:{}", + recovery.pin[0], + recovery.pin[1], + recovery.pin[2], + recovery.pin[3], + pazzle.join(" "), + mnemonic.join(" ") + ); + + let chunks = credentials + .as_bytes() + .chunks(92) + .map(|buf| buf) + .collect::>(); + + for (line, string) in chunks.iter().enumerate() { + content.begin_text(); + content.set_font(font_name, 10.0); + content.next_line(20.0, 630.0 - line as f32 * 15.0); + content.show(Str(*string)); + content.end_text(); + } + + content.save_state(); + content.transform([595.0, 0.0, 0.0, 595.0, 0.0, 0.0]); + content.x_object(qrcode_name); + content.restore_state(); + + // Write a document catalog and a page tree with one A4 page . + let mut pdf = Pdf::new(); + pdf.stream(content_id, &content.finish()); + pdf.catalog(catalog_id).pages(page_tree_id); + pdf.pages(page_tree_id).kids([page_id]).count(1); + { + let mut page = pdf.page(page_id); + let mut page_resources = page + .parent(page_tree_id) + .media_box(Rect::new(0.0, 0.0, 595.0, 842.0)) + .resources(); + page_resources.fonts().pair(font_name, font_id); + page_resources.x_objects().pair(qrcode_name, qrcode_ref); + page_resources.finish(); + + page.contents(content_id); + page.finish(); + } + pdf.type1_font(font_id).base_font(Name(b"Courier")); + pdf.extend(&chunk); + let pdf_buf = pdf.finish(); + + Ok(pdf_buf) +} + +#[cfg(debug_assertions)] +lazy_static! { + static ref NEXTGRAPH_EU: BrokerServerV0 = BrokerServerV0 { + server_type: BrokerServerTypeV0::Localhost(14400), + can_verify: false, + can_forward: false, + peer_id: ng_repo::utils::decode_key({ + use crate::local_broker_dev_env::PEER_ID; + PEER_ID + }) + .unwrap(), + }; +} + +#[cfg(not(debug_assertions))] +lazy_static! { + static ref NEXTGRAPH_EU: BrokerServerV0 = BrokerServerV0 { + server_type: BrokerServerTypeV0::Domain("nextgraph.eu".to_string()), + can_verify: false, + can_forward: false, + peer_id: ng_repo::utils::decode_key("LZn-rQD_NUNxrWT_hBXeHk6cjI6WAy-knRVOdovIjwsA") + .unwrap(), + }; +} + +/// Obtains a Wallet object from a QRCode or a TextCode. +/// +/// The returned object can be used to import the wallet into a new Device +/// with the help of the function [wallet_open_with_pazzle_words] +/// followed by [wallet_import] +pub async fn wallet_import_from_code(code: String) -> Result { + let qr = NgQRCode::from_code(code.trim().to_string())?; + match qr { + NgQRCode::WalletTransferV0(NgQRCodeWalletTransferV0 { + broker, + rendezvous, + secret_key, + is_rendezvous, + }) => { + let wallet: ExportedWallet = do_ext_call( + &broker, + ExtWalletGetExportV0 { + id: rendezvous, + is_rendezvous, + }, + ) + .await?; + + let mut buf = wallet.0.into_vec(); + encrypt_in_place(&mut buf, *secret_key.slice(), [0; 12]); + let wallet: Wallet = serde_bare::from_slice(&buf)?; + + let broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.read().await, + }; + // check that the wallet is not already present in local_broker + let wallet_name = wallet.name(); + if broker.wallets.get(&wallet_name).is_none() { + Ok(wallet) + } else { + Err(NgError::WalletAlreadyAdded) + } + } + _ => Err(NgError::IncompatibleQrCode), + } +} + +/// Starts a rendez-vous to obtain a wallet from other device. +/// +/// A rendezvous is used when the device that is importing, doesn't have a camera. +/// The QRCode is displayed on that device, and another device (with camera, and with the wallet) will scan it. +/// +/// Returns the QRcode in SVG format, and the code (a string) to be used with [wallet_import_from_code] +pub async fn wallet_import_rendezvous(size: u32) -> Result<(String, String), NgError> { + let code = NgQRCode::WalletTransferV0(NgQRCodeWalletTransferV0 { + broker: NEXTGRAPH_EU.clone(), + rendezvous: SymKey::random(), + secret_key: SymKey::random(), + is_rendezvous: true, + }); + let code_string = code.to_code(); + + let code_svg = match QrCode::with_error_correction_level(&code_string, qrcode::EcLevel::M) { + Ok(qr) => { + let svg = qr + .render() + .max_dimensions(size, size) + .dark_color(svg::Color("#000000")) + .light_color(svg::Color("#ffffff")) + .build(); + svg + } + Err(_e) => return Err(NgError::BrokerError), + }; + + Ok((code_svg, code_string)) +} + +/// Gets the TextCode to display in order to export the wallet of the current session ID +/// +/// The ExportedWallet is valid for 5 min. +/// +/// Returns the TextCode +pub async fn wallet_export_get_textcode(session_id: u64) -> Result { + let broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.read().await, + }; + + match &broker.config { + LocalBrokerConfig::Headless(_) => return Err(NgError::LocalBrokerIsHeadless), + _ => { + let (real_session_id, is_remote) = broker.get_real_session_id_for_mut(session_id)?; + + if is_remote { + return Err(NgError::NotImplemented); + } else { + let session = broker.opened_sessions_list[real_session_id] + .as_ref() + .ok_or(NgError::SessionNotFound)?; + let wallet_name = session.config.wallet_name(); + + match broker.wallets.get(&wallet_name) { + None => Err(NgError::WalletNotFound), + Some(lws) => { + //let broker = lws.bootstrap.servers().first().unwrap(); + let wallet = &lws.wallet; + let secret_key = SymKey::random(); + let rendezvous = SymKey::random(); + let code = NgQRCode::WalletTransferV0(NgQRCodeWalletTransferV0 { + broker: NEXTGRAPH_EU.clone(), + rendezvous: rendezvous.clone(), + secret_key: secret_key.clone(), + is_rendezvous: false, + }); + let code_string = code.to_code(); + let mut wallet_ser = serde_bare::to_vec(wallet)?; + encrypt_in_place(&mut wallet_ser, *secret_key.slice(), [0; 12]); + let exported_wallet = + ExportedWallet(serde_bytes::ByteBuf::from(wallet_ser)); + match session + .verifier + .client_request::(WalletPutExport::V0( + WalletPutExportV0 { + wallet: exported_wallet, + rendezvous_id: rendezvous, + is_rendezvous: false, + }, + )) + .await + { + Err(e) => Err(e), + Ok(SoS::Stream(_)) => Err(NgError::InvalidResponse), + Ok(SoS::Single(_)) => Ok(code_string), + } + } + } + } + } + } +} + +/// Gets the QRcode to display in order to export a wallet of the current session ID +/// +/// The ExportedWallet is valid for 5 min. +/// +/// Returns the QRcode in SVG format +pub async fn wallet_export_get_qrcode(session_id: u64, size: u32) -> Result { + let code_string = wallet_export_get_textcode(session_id).await?; + + let code_svg = match QrCode::with_error_correction_level(&code_string, qrcode::EcLevel::M) { + Ok(qr) => { + let svg = qr + .render() + .max_dimensions(size, size) + .dark_color(svg::Color("#000000")) + .light_color(svg::Color("#ffffff")) + .build(); + svg + } + Err(_e) => return Err(NgError::BrokerError), + }; + + Ok(code_svg) +} + +/// Puts the Wallet to the rendezvous ID that was scanned +/// +/// The rendezvous ID is valid for 5 min. +pub async fn wallet_export_rendezvous(session_id: u64, code: String) -> Result<(), NgError> { + let qr = NgQRCode::from_code(code)?; + match qr { + NgQRCode::WalletTransferV0(NgQRCodeWalletTransferV0 { + broker: _, + rendezvous, + secret_key, + is_rendezvous, + }) => { + if !is_rendezvous { + return Err(NgError::NotARendezVous); + } + + let broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.read().await, + }; + + match &broker.config { + LocalBrokerConfig::Headless(_) => return Err(NgError::LocalBrokerIsHeadless), + _ => { + let (real_session_id, is_remote) = + broker.get_real_session_id_for_mut(session_id)?; + + if is_remote { + return Err(NgError::NotImplemented); + } else { + let session = broker.opened_sessions_list[real_session_id] + .as_ref() + .ok_or(NgError::SessionNotFound)?; + let wallet_name = session.config.wallet_name(); + + match broker.wallets.get(&wallet_name) { + None => Err(NgError::WalletNotFound), + Some(lws) => { + //let broker = lws.bootstrap.servers().first().unwrap(); + let wallet = &lws.wallet; + + let mut wallet_ser = serde_bare::to_vec(wallet)?; + encrypt_in_place(&mut wallet_ser, *secret_key.slice(), [0; 12]); + let exported_wallet = + ExportedWallet(serde_bytes::ByteBuf::from(wallet_ser)); + + // TODO: send the WalletPutExport client request to the broker received from QRcode. for now it is cheer luck that all clients are connected to nextgraph.eu. + // if the user doesn't have an account with nextgraph.eu, their broker should relay the request (core protocol ?) + + match session + .verifier + .client_request::(WalletPutExport::V0( + WalletPutExportV0 { + wallet: exported_wallet, + rendezvous_id: rendezvous, + is_rendezvous: true, + }, + )) + .await + { + Err(e) => Err(e), + Ok(SoS::Stream(_)) => Err(NgError::InvalidResponse), + Ok(SoS::Single(_)) => Ok(()), + } + } + } + } + } + } + } + _ => Err(NgError::IncompatibleQrCode), + } +} + +/// Reads a binary Wallet File and decodes it to a Wallet object. +/// +/// This object can be used to import the wallet into a new Device +/// with the help of the function [wallet_open_with_pazzle_words] +/// followed by [wallet_import] +pub async fn wallet_read_file(file: Vec) -> Result { + let ngf: NgFile = file.try_into()?; + if let NgFile::V0(NgFileV0::Wallet(wallet)) = ngf { + let broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.read().await, + }; + // check that the wallet is not already present in local_broker + let wallet_name = wallet.name(); + if broker.wallets.get(&wallet_name).is_none() { + Ok(wallet) + } else { + Err(NgError::WalletAlreadyAdded) + } + } else { + Err(NgError::InvalidFileFormat) + } +} + +/// Retrieves the the Wallet by its name, to be used for opening it +pub async fn wallet_get(wallet_name: &String) -> Result { + let broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.read().await, + }; + // check that the wallet exists + match broker.wallets.get(wallet_name) { + None => Err(NgError::WalletNotFound), + Some(lws) => Ok(lws.wallet.clone()), + } +} + +/// Retrieves the binary content of a Wallet File for the Wallet identified by its name +pub async fn wallet_get_file(wallet_name: &String) -> Result, NgError> { + let broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.read().await, + }; + // check that the wallet exists + match broker.wallets.get(wallet_name) { + None => Err(NgError::WalletNotFound), + Some(lws) => Ok(to_vec(&NgFile::V0(NgFileV0::Wallet(lws.wallet.clone()))).unwrap()), + } +} + +#[doc(hidden)] +/// This is a bit hard to use as the pazzle words are encoded in unsigned bytes. +/// prefer the function wallet_open_with_pazzle_words +pub fn wallet_open_with_pazzle( + wallet: &Wallet, + pazzle: Vec, + pin: [u8; 4], +) -> Result { + let opened_wallet = ng_wallet::open_wallet_with_pazzle(wallet, pazzle, pin)?; + + Ok(opened_wallet) +} + +/// Opens a wallet by providing an ordered list of words, and the pin. +/// +/// If you are opening a wallet that is already known to the LocalBroker, you must then call [wallet_was_opened]. +/// Otherwise, if you are importing, then you must call [wallet_import]. +/// +/// For a list of words, see [list_all_words](crate::wallet::emojis::list_all_words) +pub fn wallet_open_with_pazzle_words( + wallet: &Wallet, + pazzle_words: &Vec, + pin: [u8; 4], +) -> Result { + wallet_open_with_pazzle(wallet, encode_pazzle(pazzle_words)?, pin) +} + +/// Opens a wallet by providing an ordered list of mnemonic words, and the pin. +/// +/// If you are opening a wallet that is already known to the LocalBroker, you must then call [wallet_was_opened]. +/// Otherwise, if you are importing, then you must call [wallet_import]. +pub fn wallet_open_with_mnemonic_words( + wallet: &Wallet, + mnemonic: &Vec, + pin: [u8; 4], +) -> Result { + Ok(ng_wallet::open_wallet_with_mnemonic( + wallet, + encode_mnemonic(mnemonic)?, + pin, + )?) +} + +/// Imports a wallet into the LocalBroker so the user can then access its content. +/// +/// the wallet should have been previous opened with [wallet_open_with_pazzle_words]. +/// Once import is done, the wallet is already marked as opened, and the user can start a new session right away. +/// There is no need to call wallet_was_opened. +pub async fn wallet_import( + encrypted_wallet: Wallet, + mut opened_wallet: SensitiveWallet, + in_memory: bool, +) -> Result { + { + // in a block to release lock before calling wallet_add + let broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.read().await, + }; + + let wallet_name = encrypted_wallet.name(); + if broker.wallets.get(&wallet_name).is_some() { + return Err(NgError::WalletAlreadyAdded); + } + } + + let lws = opened_wallet.import_v0(encrypted_wallet, in_memory)?; + + wallet_add(lws).await?; + + wallet_was_opened(opened_wallet).await +} + +/// Must be called after [wallet_open_with_pazzle_words] if you are not importing. +/// +/// this is a separate step because in JS webapp, the opening of a wallet takes time and freezes the GUI. +/// We need to run it in the background in a WebWorker. but there, the LocalBroker cannot access localStorage... +/// So a separate function must be called, once the WebWorker is done. +pub async fn wallet_was_opened(wallet: SensitiveWallet) -> Result { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + + broker.wallet_was_opened(wallet).await +} + +/// Starts a session with the LocalBroker. The type of verifier is selected at this moment. +/// +/// The session is valid even if there is no internet. The local data will be used in this case. +/// wallet_creation_events should be the list of events that was returned by wallet_create_v0 +/// Return value is the index of the session, will be used in all the doc_* API calls. +pub async fn session_start(config: SessionConfig) -> Result { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + + match &broker.config { + LocalBrokerConfig::Headless(_) => { + match config { + SessionConfig::HeadlessV0(user_id) => { + + broker.err_if_not_headless()?; + // establish the connection if not already there? + + broker.connect_remote_broker().await?; + + let session = HeadlessSession { user_id: user_id.clone() }; + let mut session_info = broker.add_headless_session(session)?; + + let request = AppSessionStart::V0(AppSessionStartV0{ + session_id: session_info.session_id, + credentials: None, + user_id, + detach: true + }); + + let res = broker.send_request_headless(request).await; + + if res.is_err() { + let _ = broker.remove_headless_session(&session_info.user); + return Err(res.unwrap_err()) + } + + if let Ok(AppResponse::V0(AppResponseV0::SessionStart(AppSessionStartResponse::V0(response)))) = res { + session_info.private_store_id = NuriV0::to_store_nuri_string(&response.private_store); + session_info.protected_store_id = NuriV0::to_store_nuri_string(&response.protected_store); + session_info.public_store_id = NuriV0::to_store_nuri_string(&response.public_store); + } + + Ok(session_info) + }, + _ => panic!("don't call session_start with a SessionConfig different from HeadlessV0 when the LocalBroker is configured for Headless") + } + } + // TODO: implement SessionConfig::WithCredentials . VerifierType::Remote => it needs to establish a connection to remote here, then send the AppSessionStart in it. + // also, it is using broker.remote_sessions.get + _ => { + if config.is_remote() || config.is_with_credentials() { + unimplemented!(); + } + + let user_id = config.user_id(); + match broker.opened_sessions.get(&user_id) { + Some(idx) => { + let ses = broker.get_session(*idx); + match ses { + Ok(sess) => { + if !sess.config.is_memory() && config.is_memory() { + return Err(NgError::SessionAlreadyStarted); // already started with a different config. + } else { + return Ok(SessionInfo { + session_id: *idx, + user: user_id, + private_store_id: NuriV0::to_store_nuri_string( + &broker.get_site_store_of_session( + sess, + SiteStoreType::Private, + )?, + ), + protected_store_id: NuriV0::to_store_nuri_string( + &broker.get_site_store_of_session( + sess, + SiteStoreType::Protected, + )?, + ), + public_store_id: NuriV0::to_store_nuri_string( + &broker.get_site_store_of_session( + sess, + SiteStoreType::Public, + )?, + ), + }); + } + } + Err(_) => {} + } + } + None => {} + }; + + let session = broker.session_start(config, None).await?; + broker.add_session(session) + } + } +} + +use web_time::SystemTime; +fn get_unix_time() -> f64 { + SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_millis() as f64 +} + +/// Attempts a TCP connection to the Server Broker of the User. +/// +/// The configuration about which Server to contact is stored in the Wallet. +/// The LocalBroker will be in charge of maintaining this connection alive, +/// cycling through optional alternative servers to contact in case of failure, +/// and will notify the user if connection is lost permanently. +/// Result is a list of (user_id, server_id, server_ip, error, since_date) +/// If error is None, it means the connection is successful +/// +/// Once the connection is established, the user can sync data, open documents, etc.. with the Verifier API +/// +/// In a future version, it will be possible to be connected to several brokers at the same time +/// (for different users/identities opened concurrently on the same Client) +// TODO: improve this return value +// TODO: give public access to the API for subscribing to disconnections +pub async fn user_connect( + user_id: &UserId, +) -> Result, f64)>, NgError> { + let client_info = get_client_info(ClientType::NativeService); + user_connect_with_device_info(client_info, &user_id, None).await +} + +fn get_client_info(client_type: ClientType) -> ClientInfo { + let os_info = get_os_info(); + let info = json!({ + "platform": { + "type": "program", + "arch": os_info.get("rust").unwrap().get("arch"), + "debug": os_info.get("rust").unwrap().get("debug"), + "target": os_info.get("rust").unwrap().get("target"), + "arch_uname": os_info.get("uname").unwrap().get("arch"), + "bitness": os_info.get("uname").unwrap().get("bitness"), + "codename": os_info.get("uname").unwrap().get("codename"), + "edition": os_info.get("uname").unwrap().get("edition"), + }, + "os": { + "name": os_info.get("uname").unwrap().get("os_name"), + "family": os_info.get("rust").unwrap().get("family"), + "version": os_info.get("uname").unwrap().get("version"), + "name_rust": os_info.get("rust").unwrap().get("os_name"), + } + }); + + ClientInfo::new( + client_type, + info.to_string(), + env!("CARGO_PKG_VERSION").to_string(), + ) +} + +/// Used internally by JS SDK and Tauri Apps. Do not use "as is". See [user_connect] instead. +#[doc(hidden)] +pub async fn user_connect_with_device_info( + info: ClientInfo, + original_user_id: &UserId, + location: Option, +) -> Result, f64)>, NgError> { + //FIXME: release this write lock much sooner than at the end of the loop of all tries to connect to some servers ? + // or maybe it is good to block as we dont want concurrent connection attempts potentially to the same server + let mut local_broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + + local_broker.err_if_headless()?; + + let (client, sites, brokers, peer_key) = { + let (wallet, session) = local_broker.get_wallet_and_session(original_user_id)?; + match wallet { + SensitiveWallet::V0(wallet) => ( + wallet.client.clone().unwrap(), + wallet.sites.clone(), + wallet.brokers.clone(), + session.peer_key.clone(), + ), + } + }; + + let mut result: Vec<(String, String, String, Option, f64)> = Vec::new(); + let arc_cnx: Arc> = Arc::new(Box::new(ConnectionWebSocket {})); + + let client_priv = &client.sensitive_client_storage.priv_key; + let client_name = &client.name; + let auto_open = &client.auto_open; + // log_info!( + // "XXXX {} name={:?} auto_open={:?} {:?}", + // client_id.to_string(), + // client_name, + // auto_open, + // wallet + // ); + for user in auto_open { + let user_id = user.to_string(); + let peer_id = peer_key.to_pub(); + log_info!( + "connecting with local peer_id {} for user {}", + peer_id, + user_id + ); + let site = sites.get(&user_id); + if site.is_none() { + result.push(( + user_id, + "".into(), + "".into(), + Some("Site is missing".into()), + get_unix_time(), + )); + continue; + } + let site = site.unwrap(); + let user_priv = site.get_individual_user_priv_key().unwrap(); + let core = site.cores[0]; //TODO: cycle the other cores if failure to connect (failover) + let server_key = core.0; + let broker = brokers.get(&core.0.to_string()); + if broker.is_none() { + result.push(( + user_id, + core.0.to_string(), + "".into(), + Some("Broker is missing".into()), + get_unix_time(), + )); + continue; + } + let brokers = broker.unwrap(); + let mut tried: Option<(String, String, String, Option, f64)> = None; + //TODO: on tauri (or forward in local broker, or CLI), prefer a Public to a Domain. Domain always comes first though, so we need to reorder the list + //TODO: use site.bootstraps to order the list of brokerInfo. + local_broker.stop_pump().await; + for broker_info in brokers { + match broker_info { + BrokerInfoV0::ServerV0(server) => { + let url = server.get_ws_url(&location).await; + log_debug!("URL {:?}", url); + //Option<(String, Vec)> + if url.is_some() { + let url = url.unwrap(); + if url.1.is_empty() { + // TODO deal with Box(Dyn)Public -> tunnel, and on tauri/forward/CLIs, deal with all Box -> direct connections (when url.1.len is > 0) + let res = BROKER + .write() + .await + .connect( + arc_cnx.clone(), + peer_key.clone(), + peer_id, + server_key, + StartConfig::Client(ClientConfig { + url: url.0.clone(), + name: client_name.clone(), + user_priv: user_priv.clone(), + client_priv: client_priv.clone(), + info: info.clone(), + registration: Some(core.1), + }), + ) + .await; + log_debug!("broker.connect : {:?}", res); + + tried = Some(( + user_id.clone(), + core.0.to_string(), + url.0.into(), + match &res { + Ok(_) => None, + Err(e) => Some(e.to_string()), + }, + get_unix_time(), + )); + } + if tried.is_some() && tried.as_ref().unwrap().3.is_none() { + let res = { + let session = local_broker.get_session_mut(original_user_id)?; + session.verifier.connection_opened(server_key).await + }; + if res.is_err() { + let e = res.unwrap_err(); + log_err!("got error while processing opened connection {:?}", e); + Broker::close_all_connections().await; + tried.as_mut().unwrap().3 = Some(e.to_string()); + } else { + local_broker.start_pump().await; + + // try to pop inbox msg + let broker = BROKER.read().await; + broker + .send_client_event(&Some(*user), &Some(server_key), ClientEvent::InboxPopRequest) + .await?; + } + break; + } else { + log_debug!("Failed connection {:?}", tried); + } + } + } + // Core information is discarded + _ => {} + } + } + if tried.is_none() { + tried = Some(( + user_id, + core.0.to_string(), + "".into(), + Some("No broker found".into()), + get_unix_time(), + )); + } + result.push(tried.unwrap()); + } + + Ok(result) +} + +/// Stops the session, that can be resumed later on. All the local data is flushed from RAM. +pub async fn session_stop(user_id: &UserId) -> Result<(), NgError> { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + + match broker.config { + LocalBrokerConfig::Headless(_) => { + let (session_id, _) = broker.remove_headless_session(user_id)?; + + let request = AppSessionStop::V0(AppSessionStopV0 { + session_id, + force_close: false, + }); + + broker.send_request_headless::<_, EmptyAppResponse>(request).await?; + } + _ => { + // TODO implement for Remote + match broker.opened_sessions.remove(user_id) { + Some(id) => { + let _ = broker.get_session(id)?; + let real_id = LocalBroker::to_real_session_id(id); + broker.opened_sessions_list[real_id as usize].take(); + // TODO: change the logic here once it will be possible to have several users connected at the same time + Broker::close_all_connections().await; + } + None => {} + } + } + } + + Ok(()) +} + +/// Stops the session, that can be resumed later on. All the local data is flushed from RAM. +#[doc(hidden)] +pub async fn session_headless_stop(session_id: u64, force_close: bool) -> Result<(), NgError> { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + + match broker.config { + LocalBrokerConfig::Headless(_) => { + let session = broker + .headless_sessions + .remove(&session_id) + .ok_or(NgError::SessionNotFound)?; + + let _ = broker + .opened_sessions + .remove(&session.user_id) + .ok_or(NgError::SessionNotFound)?; + + let request = AppSessionStop::V0(AppSessionStopV0 { + session_id, + force_close, + }); + + broker.send_request_headless::<_, EmptyAppResponse>(request).await?; + } + _ => { + return Err(NgError::LocalBrokerIsNotHeadless); + } + } + + Ok(()) +} + +/// Disconnects the user from the Server Broker(s), but keep all the local data opened and ready. +pub async fn user_disconnect(user_id: &UserId) -> Result<(), NgError> { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + broker.err_if_headless()?; + + broker.disconnect_session(user_id).await +} + +/// Closes a wallet, which means that the pazzle will have to be entered again if the user wants to use it +pub async fn wallet_close(wallet_name: &String) -> Result<(), NgError> { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + + broker.err_if_headless()?; + + match broker.opened_wallets.remove(wallet_name) { + Some(mut opened_wallet) => { + for user in opened_wallet.wallet.site_names() { + let key: PubKey = (user.as_str()).try_into().unwrap(); + match broker.opened_sessions.remove(&key) { + Some(id) => { + let session = broker.get_local_session_id_for_mut(id)?; + broker.opened_sessions_list[session].take(); + } + None => {} + } + } + opened_wallet.wallet.zeroize(); + } + None => return Err(NgError::WalletNotFound), + } + + Broker::close_all_connections().await; + + Ok(()) +} + +/// (not implemented yet) +pub async fn wallet_remove(_wallet_name: String) -> Result<(), NgError> { + let _broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + + _broker.err_if_headless()?; + + todo!(); + // should close the wallet, then remove all the saved sessions and remove the wallet +} + +/// fetches a document's content. +pub async fn doc_fetch_repo_subscribe( + session_id: u64, + repo_o: String, +) -> Result<(Receiver, CancelFn), NgError> { + let mut app_req = AppRequest::doc_fetch_repo_subscribe(repo_o)?; + app_req.set_session_id(session_id); + app_request_stream(app_req).await +} + +// /// fetches the private store home page and subscribes to its updates. +// pub async fn doc_fetch_private( +// session_id: u64, +// ) -> Result<(Receiver, CancelFn), NgError> { +// let mut broker = match LOCAL_BROKER.get() { +// None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), +// Some(Ok(broker)) => broker.write().await, +// }; +// let session_id = self.get_local_session_id_for_mut(session_id)?; +// let session = broker.opened_sessions_list[session_id] +// .as_mut() +// .ok_or(NgError::SessionNotFound)?; + +// session.verifier.doc_fetch_private(true).await +// } + +pub async fn doc_sparql_update( + session_id: u64, + sparql: String, + nuri: Option, +) -> Result, String> { + let (nuri, base) = if let Some(n) = nuri { + let nuri = NuriV0::new_from(&n).map_err(|e| e.to_string())?; + let b = nuri.repo(); + (nuri, Some(b)) + } else { + (NuriV0::new_private_store_target(), None) + }; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_write_query(), + nuri, + payload: Some(AppRequestPayload::new_sparql_query(sparql, base)), + session_id, + }); + + let res = app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + match res { + AppResponse::V0(AppResponseV0::Error(e)) => Err(e), + AppResponse::V0(AppResponseV0::Commits(commits)) => Ok(commits), + _ => Err(NgError::InvalidResponse.to_string()) + } +} + +pub async fn doc_create( + session_id: u64, + crdt: String, + class_name: String, + destination: String, + store_type: Option, + store_repo: Option, +) -> Result { + + let store_repo = if store_type.is_none() || store_repo.is_none() { + None + } else { + Some(StoreRepo::from_type_and_repo(&store_type.unwrap(), &store_repo.unwrap())?) + }; + + doc_create_with_store_repo(session_id,crdt,class_name,destination,store_repo).await +} + +pub async fn doc_create_with_store_repo( + session_id: u64, + crdt: String, + class_name: String, + destination: String, + store_repo: Option, +) -> Result { + + let class = BranchCrdt::from(crdt, class_name)?; + + let nuri = if store_repo.is_none() { + NuriV0::new_private_store_target() + } else { + NuriV0::from_store_repo(&store_repo.unwrap()) + }; + + let destination = DocCreateDestination::from(destination)?; + + let request = AppRequest::V0(AppRequestV0 { + session_id, + command: AppRequestCommandV0::new_create(), + nuri, + payload: Some(AppRequestPayload::V0(AppRequestPayloadV0::Create( + DocCreate { + class, + destination, + }, + ))), + }); + + let response = app_request(request).await?; + + if let AppResponse::V0(AppResponseV0::Nuri(nuri)) = response { + Ok(nuri) + } else { + Err(NgError::InvalidResponse) + } +} + +/// process any type of app request that returns a single value +pub async fn app_request(request: AppRequest) -> Result { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + match &broker.config { + LocalBrokerConfig::Headless(_) => broker.send_request_headless(request).await, + _ => { + let (real_session_id, is_remote) = + broker.get_real_session_id_for_mut(request.session_id())?; + + if is_remote { + let session = broker.remote_sessions_list[real_session_id] + .as_ref() + .ok_or(NgError::SessionNotFound)?; + session.send_request(request).await + } else { + let session = broker.opened_sessions_list[real_session_id] + .as_mut() + .ok_or(NgError::SessionNotFound)?; + session.verifier.app_request(request).await + } + } + } +} + +/// process any type of app request that returns a stream of values +pub async fn app_request_stream( + request: AppRequest, +) -> Result<(Receiver, CancelFn), NgError> { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + match &broker.config { + LocalBrokerConfig::Headless(_) => broker.send_request_stream_headless(request).await, + _ => { + let (real_session_id, is_remote) = + broker.get_real_session_id_for_mut(request.session_id())?; + + if is_remote { + let session = broker.remote_sessions_list[real_session_id] + .as_ref() + .ok_or(NgError::SessionNotFound)?; + session.send_request_stream(request).await + } else { + let session = broker.opened_sessions_list[real_session_id] + .as_mut() + .ok_or(NgError::SessionNotFound)?; + session.verifier.app_request_stream(request).await + } + } + } +} + +/// retrieves the ID of one of the 3 stores of a the personal Site (3P: public, protected, or private) +pub async fn personal_site_store( + session_id: u64, + store_type: SiteStoreType, +) -> Result { + let broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.read().await, + }; + let session = broker.get_session(session_id)?; + + broker.get_site_store_of_session(session, store_type) +} + +#[doc(hidden)] +pub async fn take_disconnections_receiver() -> Result, NgError> { + let mut broker = match LOCAL_BROKER.get() { + None | Some(Err(_)) => return Err(NgError::LocalBrokerNotInitialized), + Some(Ok(broker)) => broker.write().await, + }; + + broker + .disconnections_receiver + .take() + .ok_or(NgError::BrokerError) +} + +async fn do_admin_call< + A: Into + Into + std::fmt::Debug + Sync + Send + 'static, +>( + server_peer_id: DirectPeerId, + admin_user_key: PrivKey, + bind_address: BindAddress, + cmd: A, +) -> Result { + let (peer_privk, peer_pubk) = generate_keypair(); + BROKER + .write() + .await + .admin( + Box::new(ConnectionWebSocket {}), + peer_privk, + peer_pubk, + server_peer_id, + admin_user_key.to_pub(), + admin_user_key.clone(), + bind_address, + cmd, + ) + .await +} + +async fn do_ext_call< + A: Into + Into + std::fmt::Debug + Sync + Send + 'static, + B: TryFrom + std::fmt::Debug + Sync + Send + 'static, +>( + broker_server: &BrokerServerV0, + cmd: A, +) -> Result { + let (peer_privk, peer_pubk) = generate_keypair(); + Broker::ext( + Box::new(ConnectionWebSocket {}), + peer_privk, + peer_pubk, + broker_server.peer_id, + broker_server.get_ws_url(&None).await.unwrap().0, // for now we are only connecting to NextGraph SaaS cloud (nextgraph.eu) so it is safe. + cmd, + ) + .await +} + +#[doc(hidden)] +pub async fn admin_create_user( + server_peer_id: DirectPeerId, + admin_user_key: PrivKey, + server_addr: BindAddress, +) -> Result { + let res = do_admin_call( + server_peer_id, + admin_user_key, + server_addr, + CreateUser::V0(CreateUserV0 {}), + ) + .await?; + + match res { + AdminResponseContentV0::UserId(id) => Ok(id), + _ => Err(ProtocolError::InvalidValue), + } +} + +#[allow(unused_imports)] +#[cfg(test)] +mod test { + use super::*; + use super::{ + init_local_broker, session_start, session_stop, user_connect, user_disconnect, + wallet_close, wallet_create_v0, wallet_get_file, wallet_import, + wallet_open_with_pazzle_words, wallet_read_file, wallet_was_opened, LocalBrokerConfig, + SessionConfig, + }; + use ng_net::types::BootstrapContentV0; + use ng_wallet::{display_mnemonic, emojis::display_pazzle}; + use std::env::current_dir; + use std::fs::read_to_string; + use std::fs::{create_dir_all, File}; + use std::io::BufReader; + use std::io::Read; + use std::io::Write; + use std::path::Path; + + #[async_std::test] + async fn output_image_for_test_white() { + let f = File::open("examples/wallet-security-image-white.png") + .expect("open of examples/wallet-security-image-white.png"); + let mut reader = BufReader::new(f); + let mut security_img = Vec::new(); + // Read file into vector. + reader + .read_to_end(&mut security_img) + .expect("read of valid_security_image.jpg"); + + log_info!("{:?}", security_img); + } + + #[async_std::test] + async fn gen_wallet_for_test() { + if Path::new("tests/wallet.ngw").exists() { + println!("test files already generated. skipping"); + return; + } + + // loading an image file from disk + let f = File::open("examples/wallet-security-image-demo.png") + .expect("open of examples/wallet-security-image-demo.png"); + let mut reader = BufReader::new(f); + let mut security_img = Vec::new(); + // Read file into vector. + reader + .read_to_end(&mut security_img) + .expect("read of valid_security_image.jpg"); + + init_local_broker(Box::new(|| LocalBrokerConfig::InMemory)).await; + + //let peer_id = "X0nh-gOTGKSx0yL0LYJviOWRNacyqIzjQW_LKdK6opU"; + let peer_id_of_server_broker = PubKey::nil(); + + let wallet_result = wallet_create_v0(CreateWalletV0 { + security_img, + security_txt: "know yourself".to_string(), + pin: [1, 2, 1, 2], + pazzle_length: 9, + send_bootstrap: false, + send_wallet: false, + result_with_wallet_file: true, + local_save: false, + // we default to localhost:14400. this is just for the sake of an example + core_bootstrap: BootstrapContentV0::new_localhost(peer_id_of_server_broker), + core_registration: None, + additional_bootstrap: None, + pdf: false, + device_name: "test".to_string(), + }) + .await + .expect("wallet_create_v0"); + + let pazzle = display_pazzle(&wallet_result.pazzle); + let mut pazzle_words = vec![]; + println!("Your pazzle is: {:?}", wallet_result.pazzle); + for emoji in pazzle { + println!(" {}:\t{}", emoji.0, emoji.1); + pazzle_words.push(emoji.1.to_string()); + } + + create_dir_all("tests").expect("create test file"); + + let mut file = File::create("tests/wallet.pazzle").expect("open for write pazzle file"); + file.write_all(pazzle_words.join(" ").as_bytes()) + .expect("write of pazzle"); + + println!("Your mnemonic is:"); + + let mut mnemonic_words = vec![]; + display_mnemonic(&wallet_result.mnemonic) + .iter() + .for_each(|word| { + mnemonic_words.push(word.clone()); + print!("{} ", word.as_str()); + }); + println!(""); + let mut file = File::create("tests/wallet.mnemonic").expect("open for write mnemonic file"); + file.write_all(mnemonic_words.join(" ").as_bytes()) + .expect("write of mnemonic"); + + let opened_wallet = + wallet_open_with_pazzle_words(&wallet_result.wallet, &pazzle_words, [1, 2, 1, 2]) + .expect("opening of wallet"); + + let mut file = File::create("tests/wallet.ngw").expect("open for write wallet file"); + let ser_wallet = + to_vec(&NgFile::V0(NgFileV0::Wallet(wallet_result.wallet.clone()))).unwrap(); + file.write_all(&ser_wallet).expect("write of wallet file"); + + let mut file = + File::create("tests/opened_wallet.ngw").expect("open for write opened_wallet file"); + let ser = serde_bare::to_vec(&opened_wallet).expect("serialization of opened wallet"); + + file.write_all(&ser).expect("write of opened_wallet file"); + } + + #[async_std::test] + async fn gen_opened_wallet_file_for_test() { + let wallet_file = read("tests/wallet.ngw").expect("read wallet file"); + + init_local_broker(Box::new(|| LocalBrokerConfig::InMemory)).await; + + let wallet = wallet_read_file(wallet_file) + .await + .expect("wallet_read_file"); + + let pazzle_string = read_to_string("tests/wallet.pazzle").expect("read pazzle file"); + let pazzle_words = pazzle_string.split(' ').map(|s| s.to_string()).collect(); + + let opened_wallet = wallet_open_with_pazzle_words(&wallet, &pazzle_words, [1, 2, 1, 2]) + .expect("opening of wallet"); + + let mut file = + File::create("tests/opened_wallet.ngw").expect("open for write opened_wallet file"); + let ser = serde_bare::to_vec(&opened_wallet).expect("serialization of opened wallet"); + + file.write_all(&ser).expect("write of opened_wallet file"); + } + + #[ignore] + #[async_std::test] + async fn gen_opened_wallet_file_for_test_with_pazzle_array() { + let wallet_file = read("tests/wallet.ngw").expect("read wallet file"); + + init_local_broker(Box::new(|| LocalBrokerConfig::InMemory)).await; + + let wallet = wallet_read_file(wallet_file) + .await + .expect("wallet_read_file"); + + let pazzle = vec![8, 21, 135, 65, 123, 52, 0, 35, 108]; + let opened_wallet = wallet_open_with_pazzle(&wallet, pazzle, [1, 2, 1, 2]); + + assert_eq!(opened_wallet.unwrap_err(), NgError::EncryptionError); + + // let mut file = + // File::create("tests/opened_wallet.ngw").expect("open for write opened_wallet file"); + // let ser = serde_bare::to_vec(&opened_wallet).expect("serialization of opened wallet"); + + // file.write_all(&ser).expect("write of opened_wallet file"); + } + + #[ignore] + #[async_std::test] + async fn import_session_for_test_to_disk() { + let wallet_file = read("tests/wallet.ngw").expect("read wallet file"); + let opened_wallet_file = read("tests/opened_wallet.ngw").expect("read opened_wallet file"); + let opened_wallet: SensitiveWallet = + serde_bare::from_slice(&opened_wallet_file).expect("deserialization of opened_wallet"); + + let mut current_path = current_dir().expect("cur_dir"); + current_path.push(".."); + current_path.push(".ng"); + current_path.push("example"); + create_dir_all(current_path.clone()).expect("create_dir"); + + // initialize the local_broker with config to save to disk in a folder called `.ng/example` in the current directory + init_local_broker(Box::new(move || { + LocalBrokerConfig::BasePath(current_path.clone()) + })) + .await; + + let wallet = wallet_read_file(wallet_file) + .await + .expect("wallet_read_file"); + + let wallet_name = wallet.name(); + let user_id = opened_wallet.personal_identity(); + + let _client = wallet_import(wallet, opened_wallet, false) + .await + .expect("wallet_import"); + + let _session = session_start(SessionConfig::new_in_memory(&user_id, &wallet_name)) + .await + .expect(""); + } + + async fn import_session_for_test() -> (UserId, String) { + let wallet_file = read("tests/wallet.ngw").expect("read wallet file"); + let opened_wallet_file = read("tests/opened_wallet.ngw").expect("read opened_wallet file"); + let opened_wallet: SensitiveWallet = + serde_bare::from_slice(&opened_wallet_file).expect("deserialization of opened_wallet"); + + init_local_broker(Box::new(|| LocalBrokerConfig::InMemory)).await; + + let wallet = wallet_read_file(wallet_file) + .await + .expect("wallet_read_file"); + + let wallet_name = wallet.name(); + let user_id = opened_wallet.personal_identity(); + + let _client = wallet_import(wallet, opened_wallet, true) + .await + .expect("wallet_import"); + + let _session = session_start(SessionConfig::new_in_memory(&user_id, &wallet_name)) + .await + .expect(""); + + (user_id, wallet_name) + } + + #[async_std::test] + async fn import_wallet() { + let (user_id, wallet_name) = import_session_for_test().await; + + let status = user_connect(&user_id).await.expect("user_connect"); + + let error_reason = status[0].3.as_ref().unwrap(); + assert!(error_reason == "NoiseHandshakeFailed" || error_reason == "ConnectionError"); + + // Then we should disconnect + user_disconnect(&user_id).await.expect("user_disconnect"); + + // stop the session + session_stop(&user_id).await.expect("session_stop"); + + // closes the wallet + wallet_close(&wallet_name).await.expect("wallet_close"); + } + + #[async_std::test] + async fn recovery_pdf() { + let wallet_file = read("tests/wallet.ngw").expect("read wallet file"); + + init_local_broker(Box::new(|| LocalBrokerConfig::InMemory)).await; + + let wallet = wallet_read_file(wallet_file) + .await + .expect("wallet_read_file"); + + let pazzle_string = read_to_string("tests/wallet.pazzle").expect("read pazzle file"); + let pazzle_words = pazzle_string.split(' ').map(|s| s.to_string()).collect(); + + let mnemonic_string = read_to_string("tests/wallet.mnemonic").expect("read mnemonic file"); + let mnemonic_words = mnemonic_string.split(' ').map(|s| s.to_string()).collect(); + + let pin: [u8; 4] = [1, 2, 1, 2]; + + let pazzle = encode_pazzle(&pazzle_words).expect("encode_pazzle"); + let mnemonic = encode_mnemonic(&mnemonic_words).expect("encode_mnemonic"); + + let wallet_recovery = wallet_to_wallet_recovery(&wallet, pazzle, mnemonic, pin); + let pdf_buffer = wallet_recovery_pdf(wallet_recovery, 600) + .await + .expect("wallet_recovery_pdf"); + let mut file = + File::create("tests/recovery.pdf").expect("open for write recovery.pdf file"); + file.write_all(&pdf_buffer).expect("write of recovery.pdf"); + } +} diff --git a/nextgraph/src/local_broker_dev_env.rs b/nextgraph/src/local_broker_dev_env.rs new file mode 100644 index 0000000..1ddaafe --- /dev/null +++ b/nextgraph/src/local_broker_dev_env.rs @@ -0,0 +1 @@ +pub const PEER_ID: &str = "FtdzuDYGewfXWdoPuXIPb0wnd0SAg1WoA2B14S7jW3MA"; diff --git a/ng-broker/Cargo.toml b/ng-broker/Cargo.toml new file mode 100644 index 0000000..2f64ca1 --- /dev/null +++ b/ng-broker/Cargo.toml @@ -0,0 +1,45 @@ +[package] +name = "ng-broker" +version = "0.1.2" +description = "Broker library of NextGraph, a decentralized, secure and local-first web 3.0 ecosystem based on Semantic Web and CRDTs" +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true +homepage.workspace = true +keywords = ["crdt","e2ee","local-first","p2p","pubsub"] +documentation.workspace = true +rust-version.workspace = true + +[badges] +maintenance = { status = "actively-developed" } + +[dependencies] +serde = { version = "1.0", features = ["derive"] } +serde_bare = "0.5.0" +serde_json = "1.0.96" +futures = "0.3.24" +once_cell = "1.17.1" +either = { version = "1.8.1", features=["serde"] } +async-std = { version = "1.12.0", features = ["attributes"] } +async-trait = "0.1.64" +rust-embed= { version = "6.7.0", features=["include-exclude"] } +urlencoding = "2.1.3" +blake3 = "1.3.1" +ng-async-tungstenite = { version = "0.22.2", git = "https://git.nextgraph.org/NextGraph/async-tungstenite.git", branch = "nextgraph", features = ["async-std-runtime"] } +ng-repo = { path = "../ng-repo", version = "0.1.2" } +ng-net = { path = "../ng-net", version = "0.1.2" } +ng-client-ws = { path = "../ng-client-ws", version = "0.1.2" } +ng-verifier = { path = "../ng-verifier", version = "0.1.2" } +ng-storage-rocksdb = { path = "../ng-storage-rocksdb", version = "0.1.2" } + +[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] +version = "0.3.3" +features = ["wasm_js"] + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +getrandom = "0.3.3" +netdev = "0.26" + +[dev-dependencies] +tempfile = "3" \ No newline at end of file diff --git a/ng-broker/README.md b/ng-broker/README.md new file mode 100644 index 0000000..e5d52f1 --- /dev/null +++ b/ng-broker/README.md @@ -0,0 +1,56 @@ +# ng-broker + +![MSRV][rustc-image] +[![Apache 2.0 Licensed][license-image]][license-link] +[![MIT Licensed][license-image2]][license-link2] + +Broker library of NextGraph + +This repository is in active development at [https://git.nextgraph.org/NextGraph/nextgraph-rs](https://git.nextgraph.org/NextGraph/nextgraph-rs), a Gitea instance. For bug reports, issues, merge requests, and in order to join the dev team, please visit the link above and create an account (you can do so with a github account). The [github repo](https://github.com/nextgraph-org/nextgraph-rs) is just a read-only mirror that does not accept issues. + +## NextGraph + +> NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## Support + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +## How to use the library + +NextGraph is not ready yet. You can subscribe to [our newsletter](https://list.nextgraph.org/subscription/form) to get updates, and support us with a [donation](https://nextgraph.org/donate/). + +This library is used internally by [ngd](../ngd/README.md) the daemon/server of NextGraph. It could potentially be used too by external projects that want to embed the NextGraph daemon in their own program. + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +### Contributions license + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as below, without any +additional terms or conditions. + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. + + +[rustc-image]: https://img.shields.io/badge/rustc-1.81+-blue.svg +[license-image]: https://img.shields.io/badge/license-Apache2.0-blue.svg +[license-link]: https://git.nextgraph.org/NextGraph/nextgraph-rs/raw/branch/master/LICENSE-APACHE2 +[license-image2]: https://img.shields.io/badge/license-MIT-blue.svg +[license-link2]: https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/LICENSE-MIT diff --git a/ng-broker/build.rs b/ng-broker/build.rs new file mode 100644 index 0000000..bd0dfc1 --- /dev/null +++ b/ng-broker/build.rs @@ -0,0 +1,5 @@ +fn main() { + if std::env::var("DOCS_RS").is_ok() { + println!("cargo:rustc-cfg=docsrs"); + } +} diff --git a/ng-broker/src/actors/mod.rs b/ng-broker/src/actors/mod.rs new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/ng-broker/src/actors/mod.rs @@ -0,0 +1 @@ + diff --git a/ng-broker/src/interfaces.rs b/ng-broker/src/interfaces.rs new file mode 100644 index 0000000..8eff152 --- /dev/null +++ b/ng-broker/src/interfaces.rs @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. + */ +use ng_net::types::{Interface, InterfaceType}; +use ng_net::utils::{is_ipv4_private, is_public_ipv4}; + +#[cfg(not(target_arch = "wasm32"))] +pub fn print_ipv4(ip: &netdev::ip::Ipv4Net) -> String { + format!("{}/{}", ip.addr, ip.prefix_len) +} +#[cfg(not(target_arch = "wasm32"))] +pub fn print_ipv6(ip: &netdev::ip::Ipv6Net) -> String { + format!("{}/{}", ip.addr, ip.prefix_len) +} + +pub fn find_first(list: &Vec, iftype: InterfaceType) -> Option { + for inf in list { + if inf.if_type == iftype { + return Some(inf.clone()); + } + } + None +} + +pub fn find_first_or_name( + list: &Vec, + iftype: InterfaceType, + name: &String, +) -> Option { + for inf in list { + if (name == "default" || *name == inf.name) && inf.if_type == iftype { + return Some(inf.clone()); + } + } + None +} + +pub fn find_name(list: &Vec, name: &String) -> Option { + for inf in list { + if *name == inf.name { + return Some(inf.clone()); + } + } + None +} + +#[cfg(not(target_arch = "wasm32"))] +pub fn get_interface() -> Vec { + let mut res: Vec = vec![]; + let interfaces = netdev::get_interfaces(); + for interface in interfaces { + if interface.ipv4.len() > 0 { + let first_v4 = interface.ipv4[0].addr; + let if_type = if first_v4.is_loopback() { + InterfaceType::Loopback + } else if is_ipv4_private(&first_v4) { + InterfaceType::Private + } else if is_public_ipv4(&first_v4) { + InterfaceType::Public + } else { + continue; + }; + let interf = Interface { + if_type, + name: interface.name, + mac_addr: interface.mac_addr, + ipv4: interface.ipv4, + ipv6: interface.ipv6, + }; + res.push(interf); + } + } + res +} + +pub fn print_interfaces() { + let interfaces = get_interface(); + for interface in interfaces { + println!("{} \t{:?}", interface.name, interface.if_type); + + println!( + "\tIPv4: {}", + interface + .ipv4 + .iter() + .map(|ip| print_ipv4(ip)) + .collect::>() + .join(" ") + ); + println!( + "\tIPv6: {}", + interface + .ipv6 + .iter() + .map(|ip| print_ipv6(ip)) + .collect::>() + .join(" ") + ); + if let Some(mac_addr) = interface.mac_addr { + println!("\tMAC: {}", mac_addr); + } + } +} diff --git a/ng-broker/src/lib.rs b/ng-broker/src/lib.rs new file mode 100644 index 0000000..aa33148 --- /dev/null +++ b/ng-broker/src/lib.rs @@ -0,0 +1,15 @@ +pub mod types; + +pub mod utils; + +pub mod interfaces; + +pub mod server_broker; + +pub mod server_storage; + +pub mod rocksdb_server_storage; + +pub mod server_ws; + +pub mod actors; diff --git a/ng-broker/src/public/favicon.ico b/ng-broker/src/public/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..dc45c904c8f3dd00635f868a6c80e02b648f8dc4 GIT binary patch literal 36098 zcmeHQ3y>T|8D5N56w#=Nilv@PSx};dAeN;T+y#^D^lUC*@I+B0h60P=Wp^HTCV}L( zP_zgsFeE%;kjvdX_Oj-3MDi#LE7{-^DwLoY2}HO=pe%?G6Y?ULi{C%9n{4)WXXn+s zb2^*a+ka-d|Nj23-`#&t5{c9BZ`QXH`1|q1o+XLI2NH?IdHBx4cO$-ZKmNoF#0%OZU01~Q z`2N&Xgm0~#|J5_cFB6SJ-mhNs&VE(64!r-;l*E75d*F?vardNgs#EvnICDK zMdKr-T(@|!E+>C(D(s~YIj>qrbeZilB(`Qi7N?I7cKD%O-JP0j%4{v#l*7DvucChx zY5x}aaTPxcqp8`LJN;9JD)t$&*bLq)>&2)p3%}8% z$xB18Q5OyZ)3W>zr%VdZ;^=4^=w#<;valg)ypO>nv8N0{tF+;uFi1#Ve`*igK9^6C3RPD<3AkYt3~} zy%@ID>Z|@I+WX49!@OaC6xLiZSEzkZ*D`m+l1duw9aWL;zrD2 z|A@3+!1q|#Y1Y-37BBT)TEmTDD&jqLarztYAF?@S3p7<|t2f@$uP% zZLCD)5$m~s2+_VHXj6YNxM1elVOc`^3Wl0mXeh#-5OJ2(0kJPEtX}nCUqcSdCh)!; zWmWWw!EN}wH086j))^MwGCf<}VhA(bmRW6Y2ZnI!w!*gdr>A@@OnlXJZR}(d&5W=T zxJS$QD=W(mdz&@;FL773w0Pgk+4HP`*x7MYPOZ()={SQo-g5mpC3Rbfc`(xGeND?3 zL1x>6mmy&@wa?|;G)l%A=AFUu+w~=B!@TiDgs2 z<{S=!8DsmqOL&|8tE;K+z?MM$F*tq}zDir*ORy8%_}bStrKI473+xSD5rliheJ*PI ztSFo71KJdL9D;Qe$qoMq_oEekc7wo;@g~K8d;+ zIo~cXdDIVIdM6J06xQiQUwK$6mrlBaKGWc9UY_1~lxZyN#DgZYg?{Y<{{v3CgFZ8` z59JqIbm9qfop>-+@h5(t=uDU6}hE@QbhM@f3D2m!9@TCmt~N|CxWB z;&Lb5VLZcnO5n8w|MrdTQ(fqydG|2(CDs{f_T73Mz`o@!bXFb9(K<*y{Xw4t8;|{! zs`hx|L1C|<{e2SSgLU9=sV0lEo?&0tx@VkAV|urCgTp1B(k+`OU?2THx9Ru|2ito6 z;tL}l#)msGmwOoP1eH_ONDCcWSMnlSGxKXxc|5F}{n#%~(l-Rc)#DDg7M zg*@yk=|ms8z%NdW|JRrF@cKphxZW?maN>y%@Uo5cYO44>-#8Wa4DEUF#J9>GgL+*{ zK8AJqf1&NzhJNrN_y8<7rPLzW>D`x05gy@bAI>05@y$Qh*mH8~%RKRsZuD{bDHs)Y z1pUG;d;fzC;cV+YXrFJPwIQ7Q5@r0r{e zfiRn@Fh49Twe(nf)0yNox|F<{=45#(`Mz3Vo$dqg)>=F}+&_??)5g`80BQflA^mz| zwStq)QF;a&`~29+`z$5e$yI)E72_PEcQ!AHMZ zj7zc3@+aPy$=@*qf9^MNmic<#ScB*X_nyz}W1Z#2AZfadi}HKF*RoL=p3Y|HaDKyW znvD(zy!TdOc5!`_d<=PG?dxP0V!nRJ1)e(X5PKDQc_nYWe)MCm+~vnUdOw<<^t{GF zU88eWj{LfLj(Pij%vTk!>3a>Y&lA2tzL{?0<Q|o1f3lqJ0mnkG=uF*nhj(Ya%^*CwsR?`$p0KF0O`w*FDTP z7Kg-tshqux`fzhSUl=#`SiZvWa|7ouRA4*;pR8Nl=l~X{J&&WvQ}`kaoAF(V_2F;0 z(HUkK@}))l{r3v{N?Zo`6?S8cbtme>rC#xBoyiLfRk#C~oX$s$Vjj7y-u@r0nd6+> z_o3@vguf4X$p7_5Is<|F5sCNt!Ix4*IXM`99$4+vG3@C*gTBT<8`7i8so8i(QU|?P z2>$g#KV(`C_UaXk-DDoNM@45Z&NpE$i+5jO4J~{m{R0nT+qpF|I+@sD9p-wciZy3Sa=Vq*1Ud7uV zHLAQ<;?gqh7v)rj^-7-SSH$Ph?k#PE4{D`-#(Cr3W9cs~m5DHKpIUh~`TpnUH` zT>3vQZm09^VV!{(FI(;slkl_$ayD^BVj`eHB&8?hVOzFLEt4%HCLX3_ZQE?wwg+%+IEV`waBSP0 zWjikP*zggz<)l0bR2A<-^{KLT!^#;e-H}8KC2z|u$0?Ua=mtKvY-ih8+0K8uU*yCJ zUy`%-{Z`ULGI(yWWQtZSBUzHO1oQ}gEEyq(?eGO6I@?nqD3Du56D@^S$iuL-RK{$c z5Fei+XO?VpL#du9KbCBBZ)G5XpiisyAZy7zmRzu8OSF~RWPl-j?6mAmY_Y=tdP$9i V&Q2E35TO_x7ujN}#W)T5`7a874fp^6 literal 0 HcmV?d00001 diff --git a/ng-broker/src/rocksdb_server_storage.rs b/ng-broker/src/rocksdb_server_storage.rs new file mode 100644 index 0000000..efe5395 --- /dev/null +++ b/ng-broker/src/rocksdb_server_storage.rs @@ -0,0 +1,776 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. + */ + +use std::collections::{HashMap, HashSet}; +use std::fs::{read, File, OpenOptions}; +use std::io::Write; +use std::path::PathBuf; +use std::sync::{Arc, Mutex}; + +use ng_repo::block_storage::{BlockStorage, HashMapBlockStorage}; +use ng_repo::errors::{ProtocolError, ServerError, StorageError}; +use ng_repo::log::*; +use ng_repo::object::Object; +use ng_repo::store::Store; +use ng_repo::types::*; + +use ng_net::types::*; + +use ng_storage_rocksdb::block_storage::RocksDbBlockStorage; +use ng_storage_rocksdb::kcv_storage::RocksDbKCVStorage; + +use crate::server_broker::*; +use crate::server_storage::admin::{account::Account, invitation::Invitation, wallet::Wallet}; +use crate::server_storage::core::*; + +pub(crate) struct RocksDbServerStorage { + #[allow(dead_code)] + wallet_storage: RocksDbKCVStorage, + accounts_storage: RocksDbKCVStorage, + //peers_storage: RocksDbKCVStorage, + peers_last_seq_path: PathBuf, + peers_last_seq: Mutex>, + block_storage: Arc>, + core_storage: RocksDbKCVStorage, +} + +impl RocksDbServerStorage { + pub(crate) fn open( + path: &mut PathBuf, + master_key: SymKey, + admin_invite: Option, + ) -> Result { + // create/open the WALLET + let mut wallet_path = path.clone(); + wallet_path.push("wallet"); + std::fs::create_dir_all(wallet_path.clone()).unwrap(); + log_debug!("opening wallet DB"); + //TODO redo the whole key passing mechanism in RKV so it uses zeroize all the way + let wallet_storage = RocksDbKCVStorage::open(&wallet_path, master_key.slice().clone())?; + let wallet = Wallet::open(&wallet_storage); + + // create/open the ACCOUNTS storage + let mut accounts_path = path.clone(); + let accounts_key; + accounts_path.push("accounts"); + + if admin_invite.is_some() && !accounts_path.exists() && !wallet.exists_accounts_key() { + accounts_key = wallet.create_accounts_key()?; + std::fs::create_dir_all(accounts_path.clone()).unwrap(); + let accounts_storage = + RocksDbKCVStorage::open(&accounts_path, accounts_key.slice().clone())?; + let symkey = SymKey::random(); + let invite_code = InvitationCode::Setup(symkey.clone()); + let _ = Invitation::create( + &invite_code, + 0, + &Some("admin user automatically invited at first startup".to_string()), + &accounts_storage, + )?; + let invitation = ng_net::types::Invitation::V0(InvitationV0 { + code: Some(symkey), + name: Some("your Broker, as admin".into()), + url: None, + bootstrap: admin_invite.unwrap(), + }); + for link in invitation.get_urls() { + println!("The admin invitation link is: {}", link) + } + } else { + if admin_invite.is_some() { + log_warn!("Cannot add an admin invitation anymore, as it is not the first start of the server."); + } + accounts_key = wallet.get_or_create_accounts_key()?; + } + log_debug!("opening accounts DB"); + std::fs::create_dir_all(accounts_path.clone()).unwrap(); + //TODO redo the whole key passing mechanism in RKV so it uses zeroize all the way + let accounts_storage = + RocksDbKCVStorage::open(&accounts_path, accounts_key.slice().clone())?; + + // create/open the PEERS storage + // log_debug!("opening peers DB"); + // let peers_key = wallet.get_or_create_peers_key()?; + // let mut peers_path = path.clone(); + // peers_path.push("peers"); + // std::fs::create_dir_all(peers_path.clone()).unwrap(); + // //TODO redo the whole key passing mechanism in RKV so it uses zeroize all the way + // let peers_storage = RocksDbKCVStorage::open(&peers_path, peers_key.slice().clone())?; + + // creates the path for peers_last_seq + let mut peers_last_seq_path = path.clone(); + peers_last_seq_path.push("peers_last_seq"); + std::fs::create_dir_all(peers_last_seq_path.clone()).unwrap(); + + // opening block_storage + let mut blocks_path = path.clone(); + blocks_path.push("blocks"); + std::fs::create_dir_all(blocks_path.clone()).unwrap(); + let blocks_key = wallet.get_or_create_blocks_key()?; + let block_storage = Arc::new(std::sync::RwLock::new(RocksDbBlockStorage::open( + &blocks_path, + *blocks_key.slice(), + )?)); + + // create/open the PEERS storage + log_debug!("opening core DB"); + let core_key = wallet.get_or_create_core_key()?; + let mut core_path = path.clone(); + core_path.push("core"); + std::fs::create_dir_all(core_path.clone()).unwrap(); + //TODO redo the whole key passing mechanism in RKV so it uses zeroize all the way + #[cfg(debug_assertions)] + let mut core_storage = RocksDbKCVStorage::open(&core_path, core_key.slice().clone())?; + #[cfg(not(debug_assertions))] + let core_storage = RocksDbKCVStorage::open(&core_path, core_key.slice().clone())?; + + // check unicity of class prefixes, by storage + #[cfg(debug_assertions)] + { + // TODO: refactor the wallet and accounts with Class and the new OKM mechanism, then include them uncomment the following lines + //log_debug!("CHECKING..."); + // wallet_storage.add_class(&Wallet::CLASS); + // wallet_storage.check_prefixes(); + // accounts_storage.add_class(&Account::CLASS); + // accounts_storage.add_class(&Invitation::CLASS); + // accounts_storage.check_prefixes(); + core_storage.add_class(&TopicStorage::CLASS); + core_storage.add_class(&RepoHashStorage::CLASS); + core_storage.add_class(&OverlayStorage::CLASS); + core_storage.add_class(&CommitStorage::CLASS); + core_storage.add_class(&InboxStorage::CLASS); + core_storage.add_class(&AccountStorage::CLASS); + core_storage.check_prefixes(); + } + + Ok(RocksDbServerStorage { + wallet_storage, + accounts_storage, + //peers_storage, + peers_last_seq_path, + peers_last_seq: Mutex::new(HashMap::new()), + block_storage, + core_storage, + }) + } + + pub(crate) fn get_block_storage( + &self, + ) -> Arc> { + Arc::clone(&self.block_storage) + } + + pub(crate) fn next_seq_for_peer(&self, peer: &PeerId, seq: u64) -> Result<(), ServerError> { + // for now we don't use the hashmap. + // TODO: let's see if the lock is even needed + let _peers_last_seq = self.peers_last_seq.lock(); + + let mut filename = self.peers_last_seq_path.clone(); + filename.push(format!("{}", peer)); + let file = read(filename.clone()); + let mut file_save = match file { + Ok(ser) => { + let last: u64 = serde_bare::from_slice(&ser).map_err(|_| ServerError::FileError)?; + if last >= seq { + return Err(ServerError::SequenceMismatch); + } + OpenOptions::new() + .write(true) + .open(filename) + .map_err(|_| ServerError::FileError)? + } + Err(_) => File::create(filename).map_err(|_| ServerError::FileError)?, + }; + let ser = serde_bare::to_vec(&seq).unwrap(); + file_save + .write_all(&ser) + .map_err(|_| ServerError::FileError)?; + + file_save.sync_data().map_err(|_| ServerError::FileError)?; + Ok(()) + } + + pub(crate) fn get_user(&self, user_id: PubKey) -> Result { + log_debug!("get_user {user_id}"); + Ok(Account::open(&user_id, &self.accounts_storage)?.is_admin()?) + } + pub(crate) fn has_no_user(&self) -> Result { + Ok(!Account::has_users(&self.accounts_storage)?) + } + /// returns the credentials, storage_master_key, and peer_priv_key + pub(crate) fn get_user_credentials( + &self, + user_id: &PubKey, + ) -> Result { + log_debug!("get_user_credentials {user_id}"); + let acc = Account::open(user_id, &self.accounts_storage)?; + Ok(acc.get_credentials()?) + } + pub(crate) fn add_user(&self, user_id: PubKey, is_admin: bool) -> Result<(), ProtocolError> { + log_debug!("add_user {user_id} is admin {is_admin}"); + Account::create(&user_id, is_admin, &self.accounts_storage)?; + Ok(()) + } + pub(crate) fn add_user_credentials( + &self, + user_id: &PubKey, + credentials: &Credentials, + ) -> Result<(), ProtocolError> { + log_debug!("add_user_credentials {user_id}"); + let acc = Account::create(&user_id, false, &self.accounts_storage)?; + acc.add_credentials(credentials)?; + //let storage_key = SymKey::random(); + //let peer_priv_key = PrivKey::random_ed(); + //acc.add_user_keys(&storage_key, &peer_priv_key)?; + Ok(()) + } + pub(crate) fn del_user(&self, user_id: PubKey) -> Result<(), ProtocolError> { + log_debug!("del_user {user_id}"); + let acc = Account::open(&user_id, &self.accounts_storage)?; + acc.del()?; + // TODO: stop the verifier, if any + Ok(()) + } + pub(crate) fn list_users(&self, admins: bool) -> Result, ProtocolError> { + log_debug!("list_users that are admin == {admins}"); + Ok(Account::get_all_users(admins, &self.accounts_storage)?) + } + pub(crate) fn list_invitations( + &self, + admin: bool, + unique: bool, + multi: bool, + ) -> Result)>, ProtocolError> { + log_debug!("list_invitations admin={admin} unique={unique} multi={multi}"); + Ok(Invitation::get_all_invitations( + &self.accounts_storage, + admin, + unique, + multi, + )?) + } + pub(crate) fn add_invitation( + &self, + invite_code: &InvitationCode, + expiry: u32, + memo: &Option, + ) -> Result<(), ProtocolError> { + log_debug!("add_invitation {invite_code} expiry {expiry}"); + Invitation::create(invite_code, expiry, memo, &self.accounts_storage)?; + Ok(()) + } + pub(crate) fn get_invitation_type(&self, invite_code: [u8; 32]) -> Result { + log_debug!("get_invitation_type {:?}", invite_code); + let inv = Invitation::open(&invite_code, &self.accounts_storage)?; + inv.get_type() + } + pub(crate) fn remove_invitation(&self, invite_code: [u8; 32]) -> Result<(), ProtocolError> { + log_debug!("remove_invitation {:?}", invite_code); + let inv = Invitation::open(&invite_code, &self.accounts_storage)?; + inv.del()?; + Ok(()) + } + pub(crate) fn get_inboxes_for_readers(&self, user: &UserId) -> Result,StorageError> { + AccountStorage::load_inboxes(user, &self.core_storage) + } + + pub(crate) fn take_first_msg_from_inbox( + &self, + inbox: &PubKey, + overlay: &OverlayId + ) -> Result { + InboxStorage::take_first_msg(inbox, overlay, &self.core_storage) + } + + pub(crate) fn get_readers_for_inbox( + &self, + inbox: &PubKey, + overlay: &OverlayId + ) -> Result, StorageError> { + InboxStorage::load_readers(inbox, overlay, &self.core_storage) + } + + pub(crate) fn register_inbox_reader(&self, user_id: UserId, inbox_id: PubKey, overlay: OverlayId) -> Result<(), StorageError> { + InboxStorage::register_reader(&inbox_id, &overlay, &user_id, &self.core_storage)?; + AccountStorage::add_inbox(&user_id, inbox_id, overlay, &self.core_storage) + } + + pub(crate) fn enqueue_inbox_msg( + &self, + msg: &InboxMsg + ) -> Result<(), StorageError> { + InboxStorage::open(&msg.body.to_inbox, &msg.body.to_overlay, &self.core_storage)?.enqueue_msg(msg) + } + + pub(crate) fn get_repo_pin_status( + &self, + overlay: &OverlayId, + repo: &RepoHash, + user: &UserId, + ) -> Result { + let repo_info = RepoHashStorage::load_for_user(user, repo, overlay, &self.core_storage)?; + let mut topics = vec![]; + for topic in repo_info.topics { + if let Ok(mut model) = TopicStorage::open(&topic, overlay, &self.core_storage) { + match TopicStorage::USERS.get(&mut model, user) { + Err(_) => {} + Ok(publisher) => topics.push(TopicSubRes::new_from_heads( + TopicStorage::get_all_heads(&mut model)?, + publisher, + topic, + TopicStorage::COMMITS_NBR.get(&mut model)?, + )), + } + } + } + if topics.is_empty() { + return Err(ServerError::False); + } + + Ok(RepoPinStatus::V0(RepoPinStatusV0 { + hash: repo.clone(), + expose_outer: repo_info.expose_outer.len() > 0, + topics, + })) + } + + pub(crate) fn pin_repo_write( + &self, + overlay_access: &OverlayAccess, + repo: &RepoHash, + user_id: &UserId, + ro_topics: &Vec, + rw_topics: &Vec, + overlay_root_topic: &Option, + expose_outer: bool, + ) -> Result { + assert!(!overlay_access.is_read_only()); + + // TODO: all the below DB operations should be done inside a single transaction. need refactor of Object-KCV-Mapping to take an optional transaction. + + let inner_overlay = overlay_access.overlay_id_for_client_protocol_purpose(); + let mut inner_overlay_storage = + match OverlayStorage::open(inner_overlay, &self.core_storage) { + Err(StorageError::NotFound) => { + // inner overlay doesn't exist, we need to create it + OverlayStorage::create( + inner_overlay, + &(*overlay_access).into(), + expose_outer, + &self.core_storage, + )? + } + Err(e) => return Err(e.into()), + Ok(os) => os, + }; + // the overlay we use to store all the info is: the outer for a RW access, and the inner for a WO access. + let overlay = match inner_overlay_storage.overlay_type() { + OverlayType::Outer(_) | OverlayType::OuterOnly => { + panic!("shouldnt happen: we are pinning to an inner overlay. why is it outer type?") + } + OverlayType::Inner(outer) => outer, + OverlayType::InnerOnly => inner_overlay, + } + .clone(); + + // if an overlay_root_topic was provided, we update it in the DB: + // this information is stored on the inner overlay record, contrary to the rest of the info below, that is stored on the outer (except for WO) + if overlay_root_topic.is_some() { + OverlayStorage::TOPIC.set( + &mut inner_overlay_storage, + overlay_root_topic.as_ref().unwrap(), + )?; + } + + // we now do the pinning : + + let mut result: RepoOpened = vec![]; + let mut repo_info = RepoHashStorage::open(repo, &overlay, &self.core_storage)?; + + if expose_outer { + RepoHashStorage::EXPOSE_OUTER.add(&mut repo_info, user_id)?; + } + + let mut rw_topics_added: HashMap = + HashMap::with_capacity(rw_topics.len()); + for topic in rw_topics { + let topic_id = topic.topic_id(); + let mut topic_storage = + TopicStorage::create(topic_id, &overlay, repo, &self.core_storage, true)?; + + RepoHashStorage::TOPICS.add_lazy(&mut repo_info, topic_id)?; + + let _ = TopicStorage::ADVERT.get_or_set(&mut topic_storage, topic)?; + + TopicStorage::USERS.add_or_change(&mut topic_storage, user_id, &true)?; + + rw_topics_added.insert( + *topic_id, + TopicSubRes::new_from_heads( + TopicStorage::get_all_heads(&mut topic_storage)?, + true, + *topic_id, + TopicStorage::COMMITS_NBR.get(&mut topic_storage)?, + ), + ); + } + + for topic in ro_topics { + if rw_topics_added.contains_key(topic) { + continue; + //we do not want to add again as read_only, a topic that was just opened as RW (publisher) + } + + let mut topic_storage = + TopicStorage::create(topic, &overlay, repo, &self.core_storage, true)?; + + RepoHashStorage::TOPICS.add_lazy(&mut repo_info, topic)?; + + let _ = TopicStorage::USERS.get_or_add(&mut topic_storage, user_id, &false)?; + + result.push(TopicSubRes::new_from_heads( + TopicStorage::get_all_heads(&mut topic_storage)?, + false, + *topic, + TopicStorage::COMMITS_NBR.get(&mut topic_storage)?, + )); + } + result.extend(rw_topics_added.into_values()); + Ok(result) + } + + pub(crate) fn pin_repo_read( + &self, + overlay: &OverlayId, + repo: &RepoHash, + user_id: &UserId, + ro_topics: &Vec, + ) -> Result { + let mut overlay_storage = OverlayStorage::open(overlay, &self.core_storage)?; + match overlay_storage.overlay_type() { + OverlayType::Outer(_) => { + let mut result: RepoOpened = vec![]; + let repo_info = RepoHashStorage::load_topics(repo, overlay, &self.core_storage)?; + for topic in ro_topics { + if repo_info.topics.contains(topic) { + let mut topic_storage = + TopicStorage::open(topic, overlay, &self.core_storage)?; + let _ = + TopicStorage::USERS.get_or_add(&mut topic_storage, user_id, &false)?; + + result.push(TopicSubRes::new_from_heads( + TopicStorage::get_all_heads(&mut topic_storage)?, + false, + *topic, + TopicStorage::COMMITS_NBR.get(&mut topic_storage)?, + )); + } + } + Ok(result) + } + _ => return Err(ServerError::NotFound), + } + } + + fn check_overlay(&self, overlay: &OverlayId) -> Result { + let mut overlay_storage = + OverlayStorage::open(overlay, &self.core_storage).map_err(|e| match e { + StorageError::NotFound => ServerError::OverlayNotFound, + _ => e.into(), + })?; + Ok(match overlay_storage.overlay_type() { + OverlayType::OuterOnly => { + if overlay.is_outer() { + *overlay + } else { + return Err(ServerError::OverlayMismatch); + } + } + OverlayType::Outer(_) => { + if overlay.is_outer() { + *overlay + } else { + return Err(ServerError::OverlayMismatch); + } + } + OverlayType::Inner(outer) => { + if outer.is_outer() { + *outer + } else { + return Err(ServerError::OverlayMismatch); + } + } + OverlayType::InnerOnly => { + if overlay.is_inner() { + *overlay + } else { + return Err(ServerError::OverlayMismatch); + } + } + }) + } + + pub(crate) fn topic_sub( + &self, + overlay: &OverlayId, + repo: &RepoHash, + topic: &TopicId, + user_id: &UserId, + publisher: Option<&PublisherAdvert>, + ) -> Result { + let overlay = self.check_overlay(overlay)?; + // now we check that the repo was previously pinned. + // if it was opened but not pinned, then this should be dealt with in the ServerBroker, in memory, not here) + + let is_publisher = publisher.is_some(); + // (we already checked that the advert is valid) + + let mut topic_storage = + TopicStorage::create(topic, &overlay, repo, &self.core_storage, true)?; + let _ = TopicStorage::USERS.get_or_add(&mut topic_storage, user_id, &is_publisher)?; + + if is_publisher { + let _ = TopicStorage::ADVERT.get_or_set(&mut topic_storage, publisher.unwrap())?; + } + + let mut repo_info = RepoHashStorage::open(repo, &overlay, &self.core_storage)?; + RepoHashStorage::TOPICS.add_lazy(&mut repo_info, topic)?; + + Ok(TopicSubRes::new_from_heads( + TopicStorage::get_all_heads(&mut topic_storage)?, + is_publisher, + *topic, + TopicStorage::COMMITS_NBR.get(&mut topic_storage)?, + )) + } + + pub(crate) fn get_commit( + &self, + overlay: &OverlayId, + id: &ObjectId, + ) -> Result, ServerError> { + let overlay = self.check_overlay(overlay)?; + + let mut commit_storage = CommitStorage::open(id, &overlay, &self.core_storage)?; + + let event_info = commit_storage + .event() + .as_ref() + .left() + .ok_or(ServerError::NotFound)?; // TODO: for now we do not deal with events that have been removed from storage + + let mut blocks = Vec::with_capacity(event_info.blocks.len()); + for block_id in event_info.blocks.iter() { + let block = self.block_storage.read().unwrap().get(&overlay, block_id)?; + blocks.push(block); + } + + Ok(blocks) + } + + pub(crate) fn has_block( + &self, + overlay: &OverlayId, + block_id: &BlockId, + ) -> Result<(), ServerError> { + let overlay = self.check_overlay(overlay)?; + let overlay = &overlay; + + Ok(self.block_storage.read().unwrap().has(overlay, block_id)?) + } + + pub(crate) fn get_block( + &self, + overlay: &OverlayId, + block_id: &BlockId, + ) -> Result { + let overlay = self.check_overlay(overlay)?; + let overlay = &overlay; + + Ok(self.block_storage.read().unwrap().get(overlay, block_id)?) + } + + pub(crate) fn add_block( + &self, + overlay: &OverlayId, + block: Block, + ) -> Result { + if overlay.is_outer() { + // we don't publish events on the outer overlay! + return Err(ServerError::OverlayMismatch); + } + let overlay = self.check_overlay(overlay)?; + let overlay = &overlay; + + let mut overlay_storage = OverlayStorage::new(overlay, &self.core_storage); + Ok(self.add_block_(overlay, &mut overlay_storage, block)?) + } + + fn add_block_( + &self, + overlay_id: &OverlayId, + overlay_storage: &mut OverlayStorage, + block: Block, + ) -> Result { + let block_id = self + .block_storage + .write() + .unwrap() + .put(overlay_id, &block, true)?; + OverlayStorage::BLOCKS.increment(overlay_storage, &block_id)?; + Ok(block_id) + } + + pub(crate) fn save_event( + &self, + overlay: &OverlayId, + event: Event, + user_id: &UserId, + ) -> Result { + if overlay.is_outer() { + // we don't publish events on the outer overlay! + return Err(ServerError::OverlayMismatch); + } + let overlay = self.check_overlay(overlay)?; + let overlay = &overlay; + + // TODO: check that the sequence number is correct + + let topic = *event.topic_id(); + // check that the topic exists and that this user has pinned it as publisher + let mut topic_storage = + TopicStorage::open(&topic, overlay, &self.core_storage).map_err(|e| match e { + StorageError::NotFound => ServerError::TopicNotFound, + _ => e.into(), + })?; + let is_publisher = TopicStorage::USERS + .get(&mut topic_storage, user_id) + .map_err(|e| match e { + StorageError::NotFound => ServerError::AccessDenied, + _ => e.into(), + })?; + if !is_publisher { + return Err(ServerError::AccessDenied); + } + //log_info!("SAVED EVENT in overlay {:?} : {}", overlay, event); + // remove the blocks from inside the event, and save the "dehydrated" event and each block separately. + match event { + Event::V0(mut v0) => { + let mut overlay_storage = OverlayStorage::new(overlay, &self.core_storage); + let mut extracted_blocks_ids = Vec::with_capacity(v0.content.blocks.len()); + let first_block_copy = v0.content.blocks[0].clone(); + let temp_mini_block_storage = HashMapBlockStorage::new(); + for block in v0.content.blocks { + let _ = temp_mini_block_storage.put(overlay, &block, false)?; + extracted_blocks_ids.push(self.add_block_( + overlay, + &mut overlay_storage, + block, + )?); + } + + // creating a temporary store to access the blocks + let temp_store = Store::new_from_overlay_id( + overlay, + Arc::new(std::sync::RwLock::new(temp_mini_block_storage)), + ); + let commit_id = extracted_blocks_ids[0]; + let header = Object::load_header(&first_block_copy, &temp_store).map_err(|_e| { + //log_err!("err : {:?}", e); + ServerError::InvalidHeader + })?; + + v0.content.blocks = vec![]; + let event_info = EventInfo { + event: Event::V0(v0), + blocks: extracted_blocks_ids, + }; + + CommitStorage::create( + &commit_id, + overlay, + event_info, + &header, + true, + &self.core_storage, + )?; + + let past = if header.is_some() { + HashSet::from_iter(header.unwrap().acks_and_nacks()) + } else { + HashSet::new() + }; + let head = HashSet::from([commit_id]); + //TODO: current_heads in TopicInfo in ServerBroker is not updated (but it isn't used so far) + TopicStorage::HEADS.remove_from_set_and_add(&mut topic_storage, past, head)?; + + TopicStorage::COMMITS_NBR.increment(&mut topic_storage)?; + } + } + + Ok(topic) + } + + pub(crate) fn topic_sync_req( + &self, + overlay: &OverlayId, + topic: &TopicId, + known_heads: &Vec, + target_heads: &Vec, + known_commits: &Option, + ) -> Result, ServerError> { + let overlay = self.check_overlay(overlay)?; + // quick solution for now using the Branch::sync_req. TODO: use the saved references (ACKS,DEPS) in the server_storage, to have much quicker responses + + let target_heads = if target_heads.is_empty() { + // get the current_heads + let mut topic_storage = TopicStorage::new(topic, &overlay, &self.core_storage); + let heads = TopicStorage::get_all_heads(&mut topic_storage)?; + if heads.is_empty() { + return Err(ServerError::TopicNotFound); + } + Box::new(heads.into_iter()) as Box> + } else { + Box::new(target_heads.iter().cloned()) as Box> + }; + + let store = Store::new_from_overlay_id(&overlay, Arc::clone(&self.block_storage)); + + let commits = Branch::sync_req(target_heads, known_heads, known_commits, &store) + .map_err(|_| ServerError::MalformedBranch)?; + + let mut result = Vec::with_capacity(commits.len()); + + for commit_id in commits { + let commit_storage = CommitStorage::open(&commit_id, &overlay, &self.core_storage)?; + let mut event_info = commit_storage + .take_event() + .left() + .ok_or(ServerError::NotFound)?; // TODO: for now we do not deal with events that have been removed from storage + + // rehydrate the event : + let mut blocks = Vec::with_capacity(event_info.blocks.len()); + for block_id in event_info.blocks { + let block = store.get(&block_id)?; + blocks.push(block); + } + + match event_info.event { + Event::V0(ref mut v0) => { + v0.content.blocks = blocks; + } + } + result.push(TopicSyncRes::V0(TopicSyncResV0::Event(event_info.event))); + } + + Ok(result) + } +} diff --git a/ng-broker/src/server_broker.rs b/ng-broker/src/server_broker.rs new file mode 100644 index 0000000..b7a1771 --- /dev/null +++ b/ng-broker/src/server_broker.rs @@ -0,0 +1,905 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. + */ + +//! Implementation of the Server Broker + +use std::{ + collections::{BTreeMap, HashMap, HashSet}, + path::PathBuf, + sync::Arc, + time::{Duration, SystemTime}, +}; + +use async_std::sync::{Mutex, RwLock}; +use either::Either; +use futures::{channel::mpsc, SinkExt, StreamExt}; +use serde::{Deserialize, Serialize}; + +use ng_repo::{ + block_storage::BlockStorage, + errors::{NgError, ProtocolError, ServerError}, + log::*, + types::*, +}; + +use ng_net::{ + app_protocol::*, + broker::{ClientPeerId, BROKER}, + connection::NoiseFSM, + server_broker::IServerBroker, + types::*, + utils::{spawn_and_log_error, Receiver, ResultSend, Sender}, +}; + +use ng_verifier::{ + site::SiteV0, + types::{BrokerPeerId, VerifierConfig, VerifierConfigType}, + verifier::Verifier, +}; + +use crate::rocksdb_server_storage::RocksDbServerStorage; + +pub struct TopicInfo { + pub repo: RepoHash, + + pub publisher_advert: Option, + + pub current_heads: HashSet, + + pub root_commit: Option, + + /// indicates which users have opened the topic (boolean says if as publisher or not) + pub users: HashMap, +} + +pub struct RepoInfo { + /// set of users that requested the repo to be exposed on the outer overlay + /// only possible if the user is a publisher + pub expose_outer: HashSet, + + /// set of topics of this repo + pub topics: HashSet, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct EventInfo { + pub event: Event, + pub blocks: Vec, +} + +pub struct CommitInfo { + pub event: Either, + pub home_pinned: bool, + pub acks: HashSet, + pub deps: HashSet, + pub futures: HashSet, + pub files: HashSet, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum OverlayType { + OuterOnly, + Outer(OverlayId), // the ID of the inner overlay corresponding to this outer. + Inner(OverlayId), // the ID of the outer overlay corresponding to the inner + InnerOnly, +} + +impl OverlayType { + pub fn is_inner_get_outer(&self) -> Option<&OverlayId> { + match self { + Self::Inner(outer) => Some(outer), + _ => None, + } + } + pub fn is_outer_to_inner(&self) -> bool { + match self { + Self::Outer(_) => true, + _ => false, + } + } + pub fn is_outer_only(&self) -> bool { + match self { + Self::OuterOnly => true, + _ => false, + } + } +} + +impl From for OverlayType { + fn from(oa: OverlayAccess) -> OverlayType { + match oa { + OverlayAccess::ReadOnly(_) => { + panic!("cannot create an OverlayType from a ReadOnly OverlayAccess") + } + OverlayAccess::ReadWrite((_inner, outer)) => OverlayType::Inner(outer), + OverlayAccess::WriteOnly(_inner) => OverlayType::InnerOnly, + } + } +} + +#[allow(dead_code)] +pub(crate) struct OverlayInfo { + pub overlay_type: OverlayType, + pub overlay_topic: Option, + pub topics: HashMap, + pub repos: HashMap, +} + +struct DetachableVerifier { + detach: bool, + attached: Option<(DirectPeerId, u64)>, + verifier: Verifier, +} + +pub struct ServerBrokerState { + #[allow(dead_code)] + overlays: HashMap, + #[allow(dead_code)] + inner_overlays: HashMap>, + + local_subscriptions: HashMap<(OverlayId, TopicId), HashMap>>, + + verifiers: HashMap>>, + remote_apps: HashMap<(DirectPeerId, u64), UserId>, + + wallet_rendezvous: HashMap>, + wallet_exports: HashMap, + wallet_exports_timestamp: BTreeMap, +} + +pub struct ServerBroker { + storage: RocksDbServerStorage, + + state: RwLock, + + path_users: PathBuf, + + master_key: Option, +} + +impl ServerBroker { + pub(crate) fn new( + storage: RocksDbServerStorage, + path_users: PathBuf, + master_key: Option, + ) -> Self { + ServerBroker { + storage: storage, + state: RwLock::new(ServerBrokerState { + overlays: HashMap::new(), + inner_overlays: HashMap::new(), + local_subscriptions: HashMap::new(), + verifiers: HashMap::new(), + remote_apps: HashMap::new(), + wallet_rendezvous: HashMap::new(), + wallet_exports: HashMap::new(), + wallet_exports_timestamp: BTreeMap::new(), + }), + master_key, + path_users, + } + } + + pub fn load(&mut self) -> Result<(), NgError> { + Ok(()) + } + + async fn add_subscription( + &self, + overlay: OverlayId, + topic: TopicId, + peer: ClientPeerId, + ) -> Result<(), ServerError> { + let mut lock = self.state.write().await; + let peers_map = lock + .local_subscriptions + .entry((overlay, topic)) + .or_insert(HashMap::with_capacity(1)); + + log_debug!( + "SUBSCRIBING PEER {:?} TOPIC {} OVERLAY {}", + peer, + topic, + overlay + ); + + if peers_map.insert(*peer.key(), peer.value()).is_some() { + //return Err(ServerError::PeerAlreadySubscribed); + } + Ok(()) + } + + #[allow(dead_code)] + async fn remove_subscription( + &self, + overlay: &OverlayId, + topic: &TopicId, + peer: &PubKey, + ) -> Result<(), ServerError> { + let mut lock = self.state.write().await; + let peers_set = lock + .local_subscriptions + .get_mut(&(*overlay, *topic)) + .ok_or(ServerError::SubscriptionNotFound)?; + + if peers_set.remove(peer).is_none() { + return Err(ServerError::SubscriptionNotFound); + } + Ok(()) + } + + async fn new_verifier_from_credentials( + &self, + user_id: &UserId, + credentials: Credentials, + local_peer_id: DirectPeerId, + partial_credentials: bool, + ) -> Result { + let block_storage = self.get_block_storage(); + let mut path = self.get_path_users(); + let user_hash: Digest = user_id.into(); + path.push(user_hash.to_string()); + std::fs::create_dir_all(path.clone()).unwrap(); + let peer_id_dh = credentials.peer_priv_key.to_pub().to_dh_from_ed(); + let mut verifier = Verifier::new( + VerifierConfig { + config_type: VerifierConfigType::RocksDb(path), + user_master_key: *credentials.user_master_key.slice(), + peer_priv_key: credentials.peer_priv_key, + user_priv_key: credentials.user_key, + private_store_read_cap: if partial_credentials { + None + } else { + Some(credentials.read_cap) + }, + private_store_id: if partial_credentials { + None + } else { + Some(credentials.private_store) + }, + protected_store_id: if partial_credentials { + None + } else { + Some(credentials.protected_store) + }, + public_store_id: if partial_credentials { + None + } else { + Some(credentials.public_store) + }, + locator: Locator::empty(), + }, + block_storage, + )?; + if !partial_credentials { + verifier.connected_broker = BrokerPeerId::Local(local_peer_id); + // start the local transport connection + let mut lock = BROKER.write().await; + lock.connect_local(peer_id_dh, *user_id)?; + } + Ok(verifier) + } +} + +use async_std::future::timeout; + +async fn wait_for_wallet( + mut internal_receiver: Receiver, + mut sender: Sender>, + rendezvous: SymKey, +) -> ResultSend<()> { + let wallet_future = internal_receiver.next(); + let _ = sender + .send( + match timeout(Duration::from_millis(5 * 60_000), wallet_future).await { + Err(_) => Err(ServerError::ExportWalletTimeOut), + Ok(Some(w)) => Ok(w), + Ok(None) => Err(ServerError::BrokerError), + }, + ) + .await; + BROKER + .read() + .await + .get_server_broker()? + .read() + .await + .remove_rendezvous(&rendezvous) + .await; + + Ok(()) +} + +//TODO: the purpose of this trait is to have a level of indirection so we can keep some data in memory (cache) and avoid hitting the storage backend (rocksdb) at every call. +//for now this cache is not implemented, but the structs are ready (see above), and it would just require to change slightly the implementation of the trait functions here below. +#[async_trait::async_trait] +impl IServerBroker for ServerBroker { + fn take_master_key(&mut self) -> Result { + match self.master_key.take() { + None => Err(ProtocolError::AccessDenied), + Some(key) => Ok(key), + } + } + async fn remove_rendezvous(&self, rendezvous: &SymKey) { + let mut lock = self.state.write().await; + let _ = lock.wallet_rendezvous.remove(&rendezvous); + } + async fn wait_for_wallet_at_rendezvous( + &self, + rendezvous: SymKey, + ) -> Receiver> { + let (internal_sender, internal_receiver) = mpsc::unbounded(); + let (mut sender, receiver) = mpsc::unbounded(); + { + let mut state = self.state.write().await; + if state.wallet_rendezvous.contains_key(&rendezvous) { + let _ = sender.send(Err(ServerError::BrokerError)).await; + sender.close_channel(); + return receiver; + } else { + let _ = state + .wallet_rendezvous + .insert(rendezvous.clone(), internal_sender); + } + } + spawn_and_log_error(wait_for_wallet(internal_receiver, sender, rendezvous)); + receiver + } + + async fn get_wallet_export(&self, rendezvous: SymKey) -> Result { + let mut state = self.state.write().await; + match state.wallet_exports.remove(&rendezvous) { + Some(wallet) => Ok(wallet), + None => Err(ServerError::NotFound), + } + } + + async fn put_wallet_export(&self, rendezvous: SymKey, export: ExportedWallet) { + let mut state = self.state.write().await; + let _ = state.wallet_exports.insert(rendezvous.clone(), export); + let _ = state + .wallet_exports_timestamp + .insert(SystemTime::now(), rendezvous); + } + + // TODO: periodically (every 5 min) remove entries in wallet_exports_timestamp and wallet_exports + + async fn put_wallet_at_rendezvous( + &self, + rendezvous: SymKey, + export: ExportedWallet, + ) -> Result<(), ServerError> { + let mut state = self.state.write().await; + match state.wallet_rendezvous.remove(&rendezvous) { + None => Err(ServerError::NotFound), + Some(mut sender) => { + let _ = sender.send(export).await; + Ok(()) + } + } + } + + fn get_block_storage( + &self, + ) -> std::sync::Arc> { + self.storage.get_block_storage() + } + + fn get_path_users(&self) -> PathBuf { + self.path_users.clone() + } + + fn has_block(&self, overlay_id: &OverlayId, block_id: &BlockId) -> Result<(), ServerError> { + self.storage.has_block(overlay_id, block_id) + } + + fn get_block(&self, overlay_id: &OverlayId, block_id: &BlockId) -> Result { + self.storage.get_block(overlay_id, block_id) + } + + fn next_seq_for_peer(&self, peer: &PeerId, seq: u64) -> Result<(), ServerError> { + self.storage.next_seq_for_peer(peer, seq) + } + + fn put_block(&self, overlay_id: &OverlayId, block: Block) -> Result<(), ServerError> { + self.storage.add_block(overlay_id, block)?; + Ok(()) + } + async fn create_user(&self, broker_id: &DirectPeerId) -> Result { + let user_privkey = PrivKey::random_ed(); + let user_id = user_privkey.to_pub(); + let mut creds = Credentials::new_partial(&user_privkey); + let mut verifier = self + .new_verifier_from_credentials(&user_id, creds.clone(), *broker_id, true) + .await?; + let _site = SiteV0::create_personal(user_privkey.clone(), &mut verifier) + .await + .map_err(|e| { + log_err!("create_personal failed with {e}"); + ProtocolError::BrokerError + })?; + + // update credentials from config of verifier. + verifier.complement_credentials(&mut creds); + //verifier.close().await; + // save credentials and user + self.add_user_credentials(&user_id, &creds)?; + + verifier.connected_broker = BrokerPeerId::Local(*broker_id); + + // start the local transport connection + { + let mut lock = BROKER.write().await; + let peer_id_dh = creds.peer_priv_key.to_pub().to_dh_from_ed(); + lock.connect_local(peer_id_dh, user_id)?; + } + let _res = verifier.send_outbox().await; + if _res.is_err() { + log_err!("{:?}", _res); + } + + Ok(user_id) + } + + fn get_user(&self, user_id: PubKey) -> Result { + self.storage.get_user(user_id) + } + fn has_no_user(&self) -> Result { + self.storage.has_no_user() + } + fn add_user_credentials( + &self, + user_id: &PubKey, + credentials: &Credentials, + ) -> Result<(), ProtocolError> { + self.storage.add_user_credentials(user_id, credentials) + } + fn get_user_credentials(&self, user_id: &PubKey) -> Result { + self.storage.get_user_credentials(user_id) + } + fn add_user(&self, user_id: PubKey, is_admin: bool) -> Result<(), ProtocolError> { + self.storage.add_user(user_id, is_admin) + } + + fn del_user(&self, user_id: PubKey) -> Result<(), ProtocolError> { + self.storage.del_user(user_id) + } + fn list_users(&self, admins: bool) -> Result, ProtocolError> { + self.storage.list_users(admins) + } + fn list_invitations( + &self, + admin: bool, + unique: bool, + multi: bool, + ) -> Result)>, ProtocolError> { + self.storage.list_invitations(admin, unique, multi) + } + fn add_invitation( + &self, + invite_code: &InvitationCode, + expiry: u32, + memo: &Option, + ) -> Result<(), ProtocolError> { + self.storage.add_invitation(invite_code, expiry, memo) + } + fn get_invitation_type(&self, invite_code: [u8; 32]) -> Result { + self.storage.get_invitation_type(invite_code) + } + fn remove_invitation(&self, invite_code: [u8; 32]) -> Result<(), ProtocolError> { + self.storage.remove_invitation(invite_code) + } + + async fn app_process_request( + &self, + req: AppRequest, + request_id: i64, + fsm: &Mutex, + ) -> Result<(), ServerError> { + // get the session + let remote = { + fsm.lock() + .await + .remote_peer() + .ok_or(ServerError::SessionNotFound)? + }; + + let session_id = (remote, req.session_id()); + let session_lock = { + let lock = self.state.read().await; + let user_id = lock + .remote_apps + .get(&session_id) + .ok_or(ServerError::SessionNotFound)? + .to_owned(); + + Arc::clone( + lock.verifiers + .get(&user_id) + .ok_or(ServerError::SessionNotFound)?, + ) + }; + + let mut session = session_lock.write().await; + + if session.attached.is_none() || session.attached.unwrap() != session_id { + return Err(ServerError::SessionDetached); + } + + if req.command().is_stream() { + let res = session.verifier.app_request_stream(req).await; + + match res { + Err(e) => { + let error: ServerError = e.into(); + let error_res: AppMessage = error.into(); + fsm.lock() + .await + .send_in_reply_to(error_res.into(), request_id) + .await?; + } + Ok((mut receiver, _cancel)) => { + //TODO: implement cancel + let mut some_sent = false; + while let Some(response) = receiver.next().await { + some_sent = true; + let mut msg: AppMessage = response.into(); + msg.set_result(ServerError::PartialContent.into()); + fsm.lock() + .await + .send_in_reply_to(msg.into(), request_id) + .await?; + } + let end: Result = if some_sent { + Err(ServerError::EndOfStream) + } else { + Err(ServerError::EmptyStream) + }; + fsm.lock() + .await + .send_in_reply_to(end.into(), request_id) + .await?; + } + } + } else { + let res = session.verifier.app_request(req).await; + //log_debug!("GOT RES {:?}", res); + let app_message: AppMessage = match res { + Err(e) => { + log_debug!("AppRequest error NgError {e}"); + let server_err: ServerError = e.into(); + server_err.into() + } + Ok(app_res) => app_res.into(), + }; + fsm.lock() + .await + .send_in_reply_to(app_message.into(), request_id) + .await?; + } + + Ok(()) + } + + async fn app_session_start( + &self, + req: AppSessionStart, + remote: DirectPeerId, + local_peer_id: DirectPeerId, + ) -> Result { + let user_id = req.user_id(); + let id = (remote, req.session_id()); + let verifier_lock_res = { + let lock = self.state.read().await; + lock.verifiers.get(user_id).map(|l| Arc::clone(l)) + }; + let verifier_lock = match verifier_lock_res { + Some(session_lock) => { + let mut session = session_lock.write().await; + if let Some((peer_id, session_id)) = session.attached { + if peer_id != remote || session_id == req.session_id() { + // remove the previous session + let mut write_lock = self.state.write().await; + let _ = write_lock.remote_apps.remove(&(peer_id, session_id)); + } + } + session.attached = Some(id); + Arc::clone(&session_lock) + } + None => { + // we create and load a new verifier + + let credentials = if req.credentials().is_none() { + // headless do not have credentials. we fetch them from server_storage + self.storage.get_user_credentials(user_id)? + } else { + req.credentials().clone().unwrap() + }; + + if *user_id != credentials.user_key.to_pub() { + log_debug!("InvalidRequest"); + return Err(ServerError::InvalidRequest); + } + + let verifier = self + .new_verifier_from_credentials(user_id, credentials, local_peer_id, false) + .await; + if verifier.is_err() { + log_err!( + "new_verifier failed with: {:?}", + verifier.as_ref().unwrap_err() + ); + } + let mut verifier = verifier?; + + // TODO : key.zeroize(); + + //load verifier from local_storage + let _ = verifier.load(); + //TODO: save opened_branches in user_storage, so that when we open again the verifier, the syncing can work + verifier.sync().await; + + let session = DetachableVerifier { + detach: true, + attached: Some(id), + verifier, + }; + let mut write_lock = self.state.write().await; + Arc::clone( + write_lock + .verifiers + .entry(*user_id) + .or_insert(Arc::new(RwLock::new(session))), + ) + } + }; + let verifier = &verifier_lock.read().await.verifier; + let res = AppSessionStartResponse::V0(AppSessionStartResponseV0 { + private_store: *verifier.private_store_id(), + protected_store: *verifier.protected_store_id(), + public_store: *verifier.public_store_id(), + }); + let mut write_lock = self.state.write().await; + if let Some(previous_user) = write_lock.remote_apps.insert(id, *user_id) { + // weird. another session was opened for this id. + // we have to stop it otherwise it would be dangling. + if previous_user != *user_id { + if let Some(previous_session_lock) = write_lock + .verifiers + .get(&previous_user) + .map(|v| Arc::clone(v)) + { + let mut previous_session = previous_session_lock.write().await; + if previous_session.detach { + previous_session.attached = None; + } else { + // we stop it and drop it + let verifier = write_lock.verifiers.remove(&previous_user); + verifier.unwrap().read().await.verifier.close().await; + } + } + } + } + Ok(res) + } + + async fn app_session_stop( + &self, + req: AppSessionStop, + remote_peer_id: &DirectPeerId, + ) -> Result { + let id = (*remote_peer_id, req.session_id()); + + let mut write_lock = self.state.write().await; + let must_be_destroyed = { + let session_user = write_lock + .remote_apps + .remove(&id) + .ok_or(ServerError::SessionNotFound)?; + let session = Arc::clone( + write_lock + .verifiers + .get(&session_user) + .ok_or(ServerError::SessionNotFound)?, + ); + let mut verifier_lock = session.write().await; + if !req.is_force_close() && verifier_lock.detach { + verifier_lock.attached = None; + None + } else { + Some(session_user) + } + }; + if let Some(user) = must_be_destroyed { + let verifier = write_lock.verifiers.remove(&user); + verifier.unwrap().read().await.verifier.close().await; + } + Ok(EmptyAppResponse(())) + } + + fn get_repo_pin_status( + &self, + overlay: &OverlayId, + repo: &RepoHash, + user: &UserId, + ) -> Result { + self.storage.get_repo_pin_status(overlay, repo, user) + } + + async fn pin_repo_write( + &self, + overlay: &OverlayAccess, + repo: &RepoHash, + user_id: &UserId, + ro_topics: &Vec, + rw_topics: &Vec, + overlay_root_topic: &Option, + expose_outer: bool, + peer: &ClientPeerId, + ) -> Result { + let res = self.storage.pin_repo_write( + overlay, + repo, + user_id, + ro_topics, + rw_topics, + overlay_root_topic, + expose_outer, + )?; + + for topic in res.iter() { + self.add_subscription( + *overlay.overlay_id_for_client_protocol_purpose(), + *topic.topic_id(), + peer.clone(), + ) + .await?; + } + Ok(res) + } + + async fn pin_repo_read( + &self, + overlay: &OverlayId, + repo: &RepoHash, + user_id: &UserId, + ro_topics: &Vec, + peer: &ClientPeerId, + ) -> Result { + let res = self + .storage + .pin_repo_read(overlay, repo, user_id, ro_topics)?; + + for topic in res.iter() { + // TODO: those outer subscriptions are not handled yet. they will not emit events. + self.add_subscription(*overlay, *topic.topic_id(), peer.clone()) + .await?; + } + Ok(res) + } + + async fn topic_sub( + &self, + overlay: &OverlayId, + repo: &RepoHash, + topic: &TopicId, + user: &UserId, + publisher: Option<&PublisherAdvert>, + peer: &ClientPeerId, + ) -> Result { + let res = self + .storage + .topic_sub(overlay, repo, topic, user, publisher)?; + self.add_subscription(*overlay, *topic, peer.clone()) + .await?; + Ok(res) + } + + fn get_commit(&self, overlay: &OverlayId, id: &ObjectId) -> Result, ServerError> { + self.storage.get_commit(overlay, id) + } + + async fn remove_all_subscriptions_of_client(&self, client: &ClientPeerId) { + let remote_peer = client.key(); + let mut lock = self.state.write().await; + for ((overlay, topic), peers) in lock.local_subscriptions.iter_mut() { + if peers.remove(remote_peer).is_some() { + log_debug!( + "subscription of peer {} to topic {} in overlay {} removed", + remote_peer, + topic, + overlay + ); + } + } + } + + async fn inbox_post(&self, post: InboxPost) -> Result<(), ServerError> { + + // TODO: deal with Inbox that is not local to the broker (use Core protocol to dispatch it) + + let users = self.storage.get_readers_for_inbox(&post.msg.body.to_inbox, &post.msg.body.to_overlay)?; + if users.is_empty() { + self.storage.enqueue_inbox_msg(&post.msg)?; + return Ok(()) + } + + let broker = BROKER.read().await; + let not_dispatched = broker + .dispatch_inbox_msg(&users, post.msg) + .await?; + if let Some(msg) = not_dispatched { + self.storage.enqueue_inbox_msg(&msg)?; + } + Ok(()) + } + + fn inbox_register(&self, user_id: UserId, registration: InboxRegister) -> Result<(), ServerError> { + + self.storage.register_inbox_reader(user_id, registration.inbox_id, registration.overlay)?; + Ok(()) + } + + async fn inbox_pop_for_user(&self, user: UserId ) -> Result { + let inboxes = self.storage.get_inboxes_for_readers(&user)?; + + for (inbox,overlay) in inboxes { + match self.storage.take_first_msg_from_inbox(&inbox, &overlay) { + Ok(msg) => { + return Ok(msg) + }, + Err(_) => {} + } + } + Err(ServerError::NotFound) + } + + async fn dispatch_event( + &self, + overlay: &OverlayId, + event: Event, + user_id: &UserId, + remote_peer: &PubKey, + ) -> Result, ServerError> { + let topic = self.storage.save_event(overlay, event, user_id)?; + + // log_debug!( + // "DISPATCH EVENT {} {} {:?}", + // overlay, + // topic, + // self.local_subscriptions + // ); + let lock = self.state.read().await; + let mut map = lock + .local_subscriptions + .get(&(*overlay, topic)) + .map(|map| map.iter().collect()) + .unwrap_or(HashMap::new()); + + map.remove(remote_peer); + Ok(map + .iter() + .map(|(k, v)| ClientPeerId::new_from(k, v)) + .collect()) + } + + fn topic_sync_req( + &self, + overlay: &OverlayId, + topic: &TopicId, + known_heads: &Vec, + target_heads: &Vec, + known_commits: &Option, + ) -> Result, ServerError> { + self.storage + .topic_sync_req(overlay, topic, known_heads, target_heads, known_commits) + } +} diff --git a/ng-broker/src/server_storage/admin/account.rs b/ng-broker/src/server_storage/admin/account.rs new file mode 100644 index 0000000..eb4c524 --- /dev/null +++ b/ng-broker/src/server_storage/admin/account.rs @@ -0,0 +1,355 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! User account Storage (Object Key/Col/Value Mapping) + +use std::collections::hash_map::DefaultHasher; +use std::fmt; +use std::hash::Hash; +use std::hash::Hasher; +use std::time::SystemTime; + +use serde_bare::{from_slice, to_vec}; + +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::KCVStorage; +#[allow(unused_imports)] +use ng_repo::log::*; +use ng_repo::types::UserId; + +use ng_net::types::*; + +pub struct Account<'a> { + /// User ID + id: UserId, + storage: &'a dyn KCVStorage, +} + +impl<'a> fmt::Debug for Account<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Account {}", self.id) + } +} + +impl<'a> Account<'a> { + const PREFIX_ACCOUNT: u8 = b'a'; + const PREFIX_CLIENT: u8 = b'c'; + const PREFIX_CLIENT_PROPERTY: u8 = b'd'; + + // propertie's client suffixes + const INFO: u8 = b'i'; + const LAST_SEEN: u8 = b'l'; + const CREDENTIALS: u8 = b'c'; + //const USER_KEYS: u8 = b'k'; + + const ALL_CLIENT_PROPERTIES: [u8; 3] = [ + Self::INFO, + Self::LAST_SEEN, + Self::CREDENTIALS, + //Self::USER_KEYS, + ]; + + pub fn open(id: &UserId, storage: &'a dyn KCVStorage) -> Result, StorageError> { + let opening = Account { + id: id.clone(), + storage, + }; + if !opening.exists() { + return Err(StorageError::NotFound); + } + Ok(opening) + } + pub fn create( + id: &UserId, + admin: bool, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let acc = Account { + id: id.clone(), + storage, + }; + if acc.exists() { + return Err(StorageError::AlreadyExists); + } + storage.put( + Self::PREFIX_ACCOUNT, + &to_vec(&id)?, + None, + &to_vec(&admin)?, + &None, + )?; + Ok(acc) + } + + #[allow(deprecated)] + pub fn get_all_users( + admins: bool, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let size = to_vec(&UserId::nil())?.len(); + let mut res: Vec = vec![]; + for user in + storage.get_all_keys_and_values(Self::PREFIX_ACCOUNT, size, vec![], None, &None)? + { + let admin: bool = from_slice(&user.1)?; + if admin == admins { + let id: UserId = from_slice(&user.0[1..user.0.len()])?; + res.push(id); + } + } + Ok(res) + } + + pub fn has_users(storage: &'a dyn KCVStorage) -> Result { + let size = to_vec(&UserId::nil())?.len(); + let mut res: Vec = vec![]; + //TODO: fix this. we shouldn't have to fetch all the users to know if there is at least one user. highly inefficient. need to add a storage.has_one_key_value method + Ok(!storage + .get_all_keys_and_values(Self::PREFIX_ACCOUNT, size, vec![], None, &None)? + .is_empty()) + } + + pub fn exists(&self) -> bool { + self.storage + .get( + Self::PREFIX_ACCOUNT, + &to_vec(&self.id).unwrap(), + None, + &None, + ) + .is_ok() + } + pub fn id(&self) -> UserId { + self.id + } + pub fn add_client(&self, client: &ClientId, info: &ClientInfo) -> Result<(), StorageError> { + if !self.exists() { + return Err(StorageError::BackendError); + } + + let mut s = DefaultHasher::new(); + info.hash(&mut s); + let hash = s.finish(); + + let client_key = (client.clone(), hash); + let mut client_key_ser = to_vec(&client_key)?; + + let info_ser = to_vec(info)?; + + self.storage.write_transaction(&mut |tx| { + let mut id_and_client = to_vec(&self.id)?; + id_and_client.append(&mut client_key_ser); + if tx + .has_property_value(Self::PREFIX_CLIENT, &id_and_client, None, &vec![], &None) + .is_err() + { + tx.put(Self::PREFIX_CLIENT, &id_and_client, None, &vec![], &None)?; + } + if tx + .has_property_value( + Self::PREFIX_CLIENT_PROPERTY, + &id_and_client, + Some(Self::INFO), + &info_ser, + &None, + ) + .is_err() + { + tx.put( + Self::PREFIX_CLIENT_PROPERTY, + &id_and_client, + Some(Self::INFO), + &info_ser, + &None, + )?; + } + let now = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_secs(); + tx.replace( + Self::PREFIX_CLIENT_PROPERTY, + &id_and_client, + Some(Self::LAST_SEEN), + &to_vec(&now)?, + &None, + )?; + Ok(()) + }) + } + + pub fn add_credentials(&self, credentials: &Credentials) -> Result<(), StorageError> { + if !self.exists() { + return Err(StorageError::BackendError); + } + self.storage.put( + Self::PREFIX_ACCOUNT, + &to_vec(&self.id)?, + Some(Self::CREDENTIALS), + &to_vec(credentials)?, + &None, + ) + } + + pub fn remove_credentials(&self) -> Result<(), StorageError> { + self.storage.del( + Self::PREFIX_ACCOUNT, + &to_vec(&self.id)?, + Some(Self::CREDENTIALS), + &None, + ) + } + + pub fn get_credentials(&self) -> Result { + Ok(from_slice(&self.storage.get( + Self::PREFIX_ACCOUNT, + &to_vec(&self.id)?, + Some(Self::CREDENTIALS), + &None, + )?)?) + } + + // pub fn add_user_keys( + // &self, + // storage_key: &SymKey, + // peer_priv_key: &PrivKey, + // ) -> Result<(), StorageError> { + // if !self.exists() { + // return Err(StorageError::BackendError); + // } + // self.storage.put( + // Self::PREFIX_ACCOUNT, + // &to_vec(&self.id)?, + // Some(Self::USER_KEYS), + // &to_vec(&(storage_key.clone(), peer_priv_key.clone()))?, + // &None, + // ) + // } + + // pub fn remove_user_keys(&self) -> Result<(), StorageError> { + // self.storage.del( + // Self::PREFIX_ACCOUNT, + // &to_vec(&self.id)?, + // Some(Self::USER_KEYS), + // &None, + // ) + // } + + // pub fn get_user_keys(&self) -> Result<(SymKey, PrivKey), StorageError> { + // Ok(from_slice(&self.storage.get( + // Self::PREFIX_ACCOUNT, + // &to_vec(&self.id)?, + // Some(Self::USER_KEYS), + // &None, + // )?)?) + // } + + // pub fn remove_overlay(&self, overlay: &OverlayId) -> Result<(), StorageError> { + // self.storage.del_property_value( + // Self::PREFIX, + // &to_vec(&self.id)?, + // Some(Self::OVERLAY), + // to_vec(overlay)?, + // ) + // } + + // pub fn has_overlay(&self, overlay: &OverlayId) -> Result<(), StorageError> { + // self.storage.has_property_value( + // Self::PREFIX, + // &to_vec(&self.id)?, + // Some(Self::OVERLAY), + // to_vec(overlay)?, + // ) + // } + + pub fn is_admin(&self) -> Result { + if self + .storage + .has_property_value( + Self::PREFIX_ACCOUNT, + &to_vec(&self.id)?, + None, + &to_vec(&true)?, + &None, + ) + .is_ok() + { + return Ok(true); + } + Ok(false) + } + + pub fn del(&self) -> Result<(), StorageError> { + self.storage.write_transaction(&mut |tx| { + let id = to_vec(&self.id)?; + // let mut id_and_client = to_vec(&self.id)?; + // let client_key = (client.clone(), hash); + // let mut client_key_ser = to_vec(&client_key)?; + #[allow(deprecated)] + let client_key = (ClientId::nil(), 0u64); + let client_key_ser = to_vec(&client_key)?; + let size = client_key_ser.len() + id.len(); + + if let Ok(clients) = + tx.get_all_keys_and_values(Self::PREFIX_CLIENT, size, id, None, &None) + { + for client in clients { + tx.del(Self::PREFIX_CLIENT, &client.0, None, &None)?; + tx.del_all( + Self::PREFIX_CLIENT_PROPERTY, + &client.0, + &Self::ALL_CLIENT_PROPERTIES, + &None, + )?; + } + } + tx.del(Self::PREFIX_ACCOUNT, &to_vec(&self.id)?, None, &None)?; + Ok(()) + }) + } +} + +#[cfg(test)] +mod test { + + use ng_repo::types::*; + use ng_storage_rocksdb::kcv_storage::RocksDbKCVStorage; + use std::fs; + use tempfile::Builder; + + use crate::server_storage::admin::account::Account; + + #[test] + pub fn test_account() { + let path_str = "test-env"; + let root = Builder::new().prefix(path_str).tempdir().unwrap(); + let key: [u8; 32] = [0; 32]; + fs::create_dir_all(root.path()).unwrap(); + println!("{}", root.path().to_str().unwrap()); + let storage = RocksDbKCVStorage::open(root.path(), key).unwrap(); + + let user_id = PubKey::Ed25519PubKey([1; 32]); + + let account = Account::create(&user_id, true, &storage).unwrap(); + println!("account created {}", account.id()); + + let account2 = Account::open(&user_id, &storage).unwrap(); + println!("account opened {}", account2.id()); + + // let client_id = PubKey::Ed25519PubKey([56; 32]); + // let client_id_not_added = PubKey::Ed25519PubKey([57; 32]); + + // account2.add_client(&client_id).unwrap(); + + // assert!(account2.is_admin().unwrap()); + + // account.has_client(&client_id).unwrap(); + // assert!(account.has_client(&client_id_not_added).is_err()); + } +} diff --git a/ng-broker/src/server_storage/admin/invitation.rs b/ng-broker/src/server_storage/admin/invitation.rs new file mode 100644 index 0000000..991442c --- /dev/null +++ b/ng-broker/src/server_storage/admin/invitation.rs @@ -0,0 +1,184 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! User account Storage (Object Key/Col/Value Mapping) + +use serde_bare::from_slice; +use serde_bare::to_vec; + +use ng_repo::errors::ProtocolError; +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::KCVStorage; +use ng_repo::types::SymKey; +use ng_repo::utils::now_timestamp; + +use ng_net::types::*; + +pub struct Invitation<'a> { + /// code + id: [u8; 32], + storage: &'a dyn KCVStorage, +} + +impl<'a> Invitation<'a> { + const PREFIX: u8 = b'i'; + + // propertie's invitation suffixes + const TYPE: u8 = b't'; + //const EXPIRE: u8 = b'e'; + + const ALL_PROPERTIES: [u8; 1] = [Self::TYPE]; + + const SUFFIX_FOR_EXIST_CHECK: u8 = Self::TYPE; + + pub fn open( + id: &[u8; 32], + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let opening = Invitation { + id: id.clone(), + storage, + }; + if !opening.exists() { + return Err(StorageError::NotFound); + } + Ok(opening) + } + pub fn create( + id: &InvitationCode, + expiry: u32, + memo: &Option, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let (code_type, code) = match id { + InvitationCode::Unique(c) => (0u8, c.slice()), + InvitationCode::Multi(c) => (1u8, c.slice()), + InvitationCode::Admin(c) => (2u8, c.slice()), + InvitationCode::Setup(c) => (3u8, c.slice()), + }; + let acc = Invitation { + id: code.clone(), + storage, + }; + if acc.exists() { + return Err(StorageError::BackendError); + } + let value = to_vec(&(code_type, expiry, memo.clone()))?; + storage.write_transaction(&mut |tx| { + tx.put( + Self::PREFIX, + &to_vec(code)?, + Some(Self::TYPE), + &value, + &None, + )?; + Ok(()) + })?; + Ok(acc) + } + + pub fn get_all_invitations( + storage: &'a dyn KCVStorage, + mut admin: bool, + mut unique: bool, + mut multi: bool, + ) -> Result)>, StorageError> { + let size = to_vec(&[0u8; 32])?.len(); + let mut res: Vec<(InvitationCode, u32, Option)> = vec![]; + if !admin && !unique && !multi { + admin = true; + unique = true; + multi = true; + } + for invite in storage.get_all_keys_and_values(Self::PREFIX, size, vec![], None, &None)? { + if invite.0.len() == size + 2 { + let code: [u8; 32] = from_slice(&invite.0[1..invite.0.len() - 1])?; + if invite.0[size + 1] == Self::TYPE { + let code_type: (u8, u32, Option) = from_slice(&invite.1)?; + let inv_code = match code_type { + (0, ex, memo) => { + if unique { + Some((InvitationCode::Unique(SymKey::ChaCha20Key(code)), ex, memo)) + } else { + None + } + } + (1, ex, memo) => { + if multi { + Some((InvitationCode::Multi(SymKey::ChaCha20Key(code)), ex, memo)) + } else { + None + } + } + (2, ex, memo) => { + if admin { + Some((InvitationCode::Admin(SymKey::ChaCha20Key(code)), ex, memo)) + } else { + None + } + } + _ => panic!("invalid code type value"), + }; + if inv_code.is_some() { + res.push(inv_code.unwrap()); + } + } + } + } + Ok(res) + } + + pub fn exists(&self) -> bool { + self.storage + .get( + Self::PREFIX, + &to_vec(&self.id).unwrap(), + Some(Self::SUFFIX_FOR_EXIST_CHECK), + &None, + ) + .is_ok() + } + pub fn id(&self) -> [u8; 32] { + self.id + } + + pub fn get_type(&self) -> Result { + let type_ser = + self.storage + .get(Self::PREFIX, &to_vec(&self.id)?, Some(Self::TYPE), &None)?; + let t: (u8, u32, Option) = from_slice(&type_ser)?; + // if t.1 < now_timestamp() { + // return Err(ProtocolError::Expired); + // } + Ok(t.0) + } + + pub fn is_expired(&self) -> Result { + let expire_ser = + self.storage + .get(Self::PREFIX, &to_vec(&self.id)?, Some(Self::TYPE), &None)?; + let expire: (u8, u32, Option) = from_slice(&expire_ser)?; + if expire.1 < now_timestamp() { + return Ok(true); + } + Ok(false) + } + + pub fn del(&self) -> Result<(), StorageError> { + self.storage.write_transaction(&mut |tx| { + tx.del_all( + Self::PREFIX, + &to_vec(&self.id)?, + &Self::ALL_PROPERTIES, + &None, + )?; + Ok(()) + }) + } +} diff --git a/ng-broker/src/server_storage/admin/mod.rs b/ng-broker/src/server_storage/admin/mod.rs new file mode 100644 index 0000000..802fcdd --- /dev/null +++ b/ng-broker/src/server_storage/admin/mod.rs @@ -0,0 +1,5 @@ +pub mod invitation; + +pub mod wallet; + +pub mod account; diff --git a/ng-broker/src/server_storage/admin/wallet.rs b/ng-broker/src/server_storage/admin/wallet.rs new file mode 100644 index 0000000..bdcb08d --- /dev/null +++ b/ng-broker/src/server_storage/admin/wallet.rs @@ -0,0 +1,123 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Broker Wallet Storage (Object Key/Col/Value Mapping), persists to storage all the SymKeys needed to open other storages + +use serde_bare::to_vec; + +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::KCVStorage; +use ng_repo::kcv_storage::WriteTransaction; +use ng_repo::log::*; +use ng_repo::types::*; + +pub struct Wallet<'a> { + storage: &'a dyn KCVStorage, +} + +impl<'a> Wallet<'a> { + const PREFIX: u8 = b'w'; + const PREFIX_OVERLAY: u8 = b'o'; + const PREFIX_USER: u8 = b'u'; + + const KEY_ACCOUNTS: [u8; 8] = *b"accounts"; + //const KEY_PEERS: [u8; 5] = *b"peers"; + const KEY_CORE: [u8; 4] = *b"core"; + const KEY_BLOCKS: [u8; 6] = *b"blocks"; + + // propertie's suffixes + const SYM_KEY: u8 = b"s"[0]; + + //const ALL_PROPERTIES: [u8; 1] = [Self::SYM_KEY]; + + const SUFFIX_FOR_EXIST_CHECK: u8 = Self::SYM_KEY; + + pub fn open(storage: &'a dyn KCVStorage) -> Wallet<'a> { + Wallet { storage } + } + pub fn get_or_create_single_key( + &self, + prefix: u8, + key: &Vec, + ) -> Result { + let mut result: Option = None; + self.storage.write_transaction(&mut |tx| { + let got = tx.get(prefix, key, Some(Self::SUFFIX_FOR_EXIST_CHECK), &None); + match got { + Err(e) => { + if e == StorageError::NotFound { + let res = Self::create_single_key(tx, prefix, key)?; + result = Some(res); + } else { + log_debug!("Error while creating single key {}", e); + return Err(StorageError::BackendError); + } + } + Ok(p) => { + let k: SymKey = p + .as_slice() + .try_into() + .map_err(|_| StorageError::BackendError)?; + result = Some(k); + } + } + Ok(()) + })?; + Ok(result.unwrap()) + } + + pub fn get_or_create_user_key(&self, user: &UserId) -> Result { + self.get_or_create_single_key(Self::PREFIX_USER, &to_vec(user)?) + } + + pub fn get_or_create_overlay_key(&self, overlay: &OverlayId) -> Result { + self.get_or_create_single_key(Self::PREFIX_OVERLAY, &to_vec(overlay)?) + } + + pub fn create_single_key( + tx: &mut dyn WriteTransaction, + prefix: u8, + key: &Vec, + ) -> Result { + let symkey = SymKey::random(); + let vec = symkey.slice().to_vec(); + tx.put(prefix, key, Some(Self::SYM_KEY), &vec, &None)?; + Ok(symkey) + } + pub fn exists_single_key(&self, prefix: u8, key: &Vec) -> bool { + self.storage + .get(prefix, key, Some(Self::SUFFIX_FOR_EXIST_CHECK), &None) + .is_ok() + } + + pub fn exists_accounts_key(&self) -> bool { + self.exists_single_key(Self::PREFIX, &Self::KEY_ACCOUNTS.to_vec()) + } + pub fn create_accounts_key(&self) -> Result { + let mut result: Option = None; + self.storage.write_transaction(&mut |tx| { + let res = Self::create_single_key(tx, Self::PREFIX, &Self::KEY_ACCOUNTS.to_vec())?; + result = Some(res); + Ok(()) + })?; + Ok(result.unwrap()) + } + // pub fn get_or_create_peers_key(&self) -> Result { + // self.get_or_create_single_key(Self::PREFIX, &Self::KEY_PEERS.to_vec()) + // } + pub fn get_or_create_blocks_key(&self) -> Result { + self.get_or_create_single_key(Self::PREFIX, &Self::KEY_BLOCKS.to_vec()) + } + pub fn get_or_create_core_key(&self) -> Result { + self.get_or_create_single_key(Self::PREFIX, &Self::KEY_CORE.to_vec()) + } + pub fn get_or_create_accounts_key(&self) -> Result { + self.get_or_create_single_key(Self::PREFIX, &Self::KEY_ACCOUNTS.to_vec()) + } +} diff --git a/ng-broker/src/server_storage/core/account.rs b/ng-broker/src/server_storage/core/account.rs new file mode 100644 index 0000000..79d52bc --- /dev/null +++ b/ng-broker/src/server_storage/core/account.rs @@ -0,0 +1,95 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Account Storage (Object Key/Col/Value Mapping) + +use std::collections::HashSet; +use std::hash::{DefaultHasher, Hash, Hasher}; + +use ng_net::types::InboxMsg; +use ng_repo::utils::now_precise_timestamp; +use serde_bare::to_vec; + +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::*; +use ng_repo::types::*; + +pub struct AccountStorage<'a> { + key: Vec, + storage: &'a dyn KCVStorage, +} + +impl<'a> IModel for AccountStorage<'a> { + fn key(&self) -> &Vec { + &self.key + } + fn storage(&self) -> &dyn KCVStorage { + self.storage + } + fn class(&self) -> &Class { + &Self::CLASS + } + fn existential(&mut self) -> Option<&mut dyn IExistentialValue> { + None + } +} + +impl<'a> AccountStorage<'a> { + // User <-> Inboxes : list of inboxes a user has registered as reader. + // FIXME: this should be in accounts storage, but because it doesn't implement the ORM yet, it is quicker to implement it here. + pub const INBOXES: MultiValueColumn = MultiValueColumn::new(b'k'); + + pub const CLASS: Class<'a> = Class::new( + "Account", + None, + None, + &[], + &[&Self::INBOXES as &dyn IMultiValueColumn], + ); + + pub fn load_inboxes( + user: &UserId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let mut opening = Self::new(user, storage); + Self::INBOXES.get_all(&mut opening) + } + + pub fn new(user: &UserId, storage: &'a dyn KCVStorage) -> Self { + let mut key: Vec = Vec::with_capacity(33); + key.append(&mut to_vec(user).unwrap()); + Self { key, storage } + } + + pub fn open( + user: &UserId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let opening = Self::new(user, storage); + Ok(opening) + } + + pub fn add_inbox( + user: &UserId, + inbox: PubKey, + overlay: OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result<(), StorageError> { + let mut opening = Self::new(user, storage); + Self::INBOXES.add(&mut opening, &(inbox,overlay)) + } + + pub fn create( + user: &UserId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let creating = Self::new(user, storage); + Ok(creating) + } +} diff --git a/ng-broker/src/server_storage/core/commit.rs b/ng-broker/src/server_storage/core/commit.rs new file mode 100644 index 0000000..8867a7f --- /dev/null +++ b/ng-broker/src/server_storage/core/commit.rs @@ -0,0 +1,158 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Commit Storage (Object Key/Col/Value Mapping) + +use either::Either; +use serde_bare::to_vec; + +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::*; +use ng_repo::types::*; + +use super::OverlayStorage; + +use crate::server_broker::CommitInfo; +use crate::server_broker::EventInfo; + +pub struct CommitStorage<'a> { + key: Vec, + event: ExistentialValue>, + storage: &'a dyn KCVStorage, +} + +impl<'a> IModel for CommitStorage<'a> { + fn key(&self) -> &Vec { + &self.key + } + fn storage(&self) -> &dyn KCVStorage { + self.storage + } + fn class(&self) -> &Class { + &Self::CLASS + } + fn existential(&mut self) -> Option<&mut dyn IExistentialValue> { + Some(&mut self.event) + } +} + +impl<'a> CommitStorage<'a> { + const PREFIX: u8 = b'e'; + + // Topic properties + pub const EVENT: ExistentialValueColumn = ExistentialValueColumn::new(b'e'); + pub const HOME_PINNED: SingleValueColumn = SingleValueColumn::new(b'p'); + + // Commit -> Acks + pub const ACKS: MultiValueColumn = MultiValueColumn::new(b'a'); + // Commit -> Deps + pub const DEPS: MultiValueColumn = MultiValueColumn::new(b'd'); + // Commit -> Files + pub const FILES: MultiValueColumn = MultiValueColumn::new(b'f'); + // Commit -> Causal future commits + pub const FUTURES: MultiValueColumn = MultiValueColumn::new(b'c'); + + pub const CLASS: Class<'a> = Class::new( + "Commit", + Some(Self::PREFIX), + Some(&Self::EVENT), + &[&Self::HOME_PINNED as &dyn ISingleValueColumn], + &[ + &Self::ACKS as &dyn IMultiValueColumn, + &Self::DEPS, + &Self::FILES, + &Self::FUTURES, + ], + ); + + pub fn new(id: &ObjectId, overlay: &OverlayId, storage: &'a dyn KCVStorage) -> Self { + let mut key: Vec = Vec::with_capacity(33 + 33); + key.append(&mut to_vec(overlay).unwrap()); + key.append(&mut to_vec(id).unwrap()); + CommitStorage { + key, + event: ExistentialValue::>::new(), + storage, + } + } + + pub fn load( + id: &ObjectId, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result { + let mut opening = CommitStorage::new(id, overlay, storage); + let props = opening.load_props()?; + let existential = col(&Self::EVENT, &props)?; + opening.event.set(&existential)?; + Ok(CommitInfo { + event: existential, + home_pinned: col(&Self::HOME_PINNED, &props).unwrap_or(false), + acks: Self::ACKS.get_all(&mut opening)?, + deps: Self::DEPS.get_all(&mut opening)?, + files: Self::FILES.get_all(&mut opening)?, + futures: Self::FUTURES.get_all(&mut opening)?, + }) + } + + pub fn open( + id: &ObjectId, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let mut opening = CommitStorage::new(id, overlay, storage); + opening.check_exists()?; + Ok(opening) + } + pub fn create( + id: &ObjectId, + overlay: &OverlayId, + event: EventInfo, + header: &Option, + home_pinned: bool, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let mut creating = CommitStorage::new(id, overlay, storage); + if creating.exists() { + return Err(StorageError::AlreadyExists); + } + let event_either = Either::Left(event); + creating.event.set(&event_either)?; + ExistentialValue::save(&creating, &event_either)?; + + if home_pinned { + Self::HOME_PINNED.set(&mut creating, &true)?; + } + if let Some(header) = header { + let mut overlay_storage = OverlayStorage::new(overlay, storage); + // adding all the references + for ack in header.acks() { + Self::ACKS.add(&mut creating, &ack)?; + OverlayStorage::OBJECTS.increment(&mut overlay_storage, &ack)?; + } + for dep in header.deps() { + Self::DEPS.add(&mut creating, &dep)?; + OverlayStorage::OBJECTS.increment(&mut overlay_storage, &dep)?; + } + for file in header.files() { + Self::FILES.add(&mut creating, file)?; + OverlayStorage::OBJECTS.increment(&mut overlay_storage, &file)?; + } + } + + Ok(creating) + } + + pub fn event(&mut self) -> &Either { + self.event.get().unwrap() + } + pub fn take_event(self) -> Either { + self.event.take().unwrap() + } +} diff --git a/ng-broker/src/server_storage/core/inbox.rs b/ng-broker/src/server_storage/core/inbox.rs new file mode 100644 index 0000000..84196f2 --- /dev/null +++ b/ng-broker/src/server_storage/core/inbox.rs @@ -0,0 +1,120 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Inbox Storage (Object Key/Col/Value Mapping) + +use std::collections::HashSet; +use std::hash::{DefaultHasher, Hash, Hasher}; + +use ng_net::types::InboxMsg; +use ng_repo::utils::now_precise_timestamp; +use serde_bare::to_vec; + +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::*; +use ng_repo::types::*; + +pub struct InboxStorage<'a> { + key: Vec, + storage: &'a dyn KCVStorage, +} + +impl<'a> IModel for InboxStorage<'a> { + fn key(&self) -> &Vec { + &self.key + } + fn storage(&self) -> &dyn KCVStorage { + self.storage + } + fn class(&self) -> &Class { + &Self::CLASS + } + fn existential(&mut self) -> Option<&mut dyn IExistentialValue> { + None + } +} + +// seconds, nanosecs, hash of InboxMsgBody +type MsgKeySuffix = (u64, u32, u64); + +impl<'a> InboxStorage<'a> { + // Inbox <-> Msg : list of incoming messages that will be delivered once a user is online + pub const MSGS: MultiMapColumn = MultiMapColumn::new(b'm'); + // Inbox <-> User : list of users who registered as readers of an inbox + pub const READERS: MultiValueColumn = MultiValueColumn::new(b'i'); + + pub const CLASS: Class<'a> = Class::new( + "Inbox", + None, + None, + &[], + &[&Self::MSGS as &dyn IMultiValueColumn, &Self::READERS], + ); + + pub fn take_first_msg( + inbox: &PubKey, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result { + let mut opening = Self::new(inbox, overlay, storage); + Self::MSGS.take_first_value(&mut opening) + } + + pub fn load_readers( + inbox: &PubKey, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let mut opening = Self::new(inbox, overlay, storage); + Self::READERS.get_all(&mut opening) + } + + pub fn new(inbox: &PubKey, overlay: &OverlayId, storage: &'a dyn KCVStorage) -> Self { + let mut key: Vec = Vec::with_capacity(33 + 33); + key.append(&mut to_vec(overlay).unwrap()); + key.append(&mut to_vec(inbox).unwrap()); + Self { key, storage } + } + + pub fn open( + inbox: &PubKey, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let opening = Self::new(inbox, overlay, storage); + Ok(opening) + } + + pub fn register_reader( + inbox: &PubKey, + overlay: &OverlayId, + reader: &UserId, + storage: &'a dyn KCVStorage, + ) -> Result<(), StorageError> { + let mut opening = Self::new(inbox, overlay, storage); + Self::READERS.add(&mut opening, reader) + } + + pub fn enqueue_msg(&mut self, msg: &InboxMsg) -> Result<(), StorageError> { + let (sec,nano) = now_precise_timestamp(); + let mut hasher = DefaultHasher::new(); + msg.body.hash(&mut hasher); + let key = (sec,nano, hasher.finish()); + Self::MSGS.add(self, &key,msg) + } + + pub fn create( + inbox: &PubKey, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let creating = Self::new(inbox, overlay, storage); + Ok(creating) + } +} diff --git a/ng-broker/src/server_storage/core/mod.rs b/ng-broker/src/server_storage/core/mod.rs new file mode 100644 index 0000000..0fabded --- /dev/null +++ b/ng-broker/src/server_storage/core/mod.rs @@ -0,0 +1,20 @@ +pub mod overlay; +pub use overlay::*; + +pub mod peer; +pub use peer::*; + +pub mod topic; +pub use topic::*; + +pub mod repo; +pub use repo::*; + +pub mod commit; +pub use commit::*; + +pub mod inbox; +pub use inbox::*; + +pub mod account; +pub use account::*; diff --git a/ng-broker/src/server_storage/core/overlay.rs b/ng-broker/src/server_storage/core/overlay.rs new file mode 100644 index 0000000..f6f732e --- /dev/null +++ b/ng-broker/src/server_storage/core/overlay.rs @@ -0,0 +1,142 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Overlay Storage (Object Key/Col/Value Mapping) + +use std::collections::HashMap; + +use serde_bare::to_vec; + +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::*; +use ng_repo::types::*; + +use crate::server_broker::OverlayInfo; +use crate::server_broker::OverlayType; + +pub struct OverlayStorage<'a> { + key: Vec, + overlay_type: ExistentialValue, + storage: &'a dyn KCVStorage, +} + +impl<'a> IModel for OverlayStorage<'a> { + fn key(&self) -> &Vec { + &self.key + } + fn storage(&self) -> &dyn KCVStorage { + self.storage + } + fn class(&self) -> &Class { + &Self::CLASS + } + fn existential(&mut self) -> Option<&mut dyn IExistentialValue> { + Some(&mut self.overlay_type) + } +} + +impl<'a> OverlayStorage<'a> { + const PREFIX: u8 = b'o'; + + // Overlay properties + pub const TYPE: ExistentialValueColumn = ExistentialValueColumn::new(b'y'); + /// BE CAREFUL: this property is exceptionally stored on the InnerOverlay + pub const TOPIC: SingleValueColumn = SingleValueColumn::new(b't'); + + // Overlay <-> Block refcount + pub const BLOCKS: MultiCounterColumn = MultiCounterColumn::new(b'b'); + // Overlay <-> Object refcount + pub const OBJECTS: MultiCounterColumn = MultiCounterColumn::new(b'j'); + + pub const CLASS: Class<'a> = Class::new( + "Overlay", + Some(Self::PREFIX), + Some(&Self::TYPE), + &[&Self::TOPIC as &dyn ISingleValueColumn], + &[&Self::BLOCKS as &dyn IMultiValueColumn, &Self::OBJECTS], + ); + + pub fn new(id: &OverlayId, storage: &'a dyn KCVStorage) -> Self { + OverlayStorage { + key: to_vec(id).unwrap(), + overlay_type: ExistentialValue::::new(), + storage, + } + } + + #[allow(dead_code)] + pub(crate) fn load( + id: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result { + let mut opening = OverlayStorage::new(id, storage); + let props = opening.load_props()?; + let existential = col(&Self::TYPE, &props)?; + opening.overlay_type.set(&existential)?; + let loading = OverlayInfo { + overlay_type: existential, + overlay_topic: col(&Self::TOPIC, &props).ok(), + topics: HashMap::new(), + repos: HashMap::new(), + }; + Ok(loading) + } + + pub fn open( + id: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let mut opening = OverlayStorage::new(id, storage); + opening.check_exists()?; + Ok(opening) + } + + pub fn create( + id: &OverlayId, + overlay_type: &OverlayType, + expose_outer: bool, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let mut overlay = OverlayStorage::new(id, storage); + if overlay.exists() { + if !expose_outer + && overlay_type.is_outer_to_inner() + && overlay.overlay_type().is_outer_only() + { + // we are asked to upgrade an OuterOnly to an Outer(). + // let's do it + ExistentialValue::save(&overlay, overlay_type)?; + } + return Err(StorageError::AlreadyExists); + } + overlay.overlay_type.set(overlay_type)?; + ExistentialValue::save(&overlay, overlay_type)?; + + if id.is_inner() { + if let Some(outer) = overlay_type.is_inner_get_outer() { + if expose_outer { + match OverlayStorage::create(outer, &OverlayType::Outer(*id), false, storage) { + Err(StorageError::AlreadyExists) => { + //it is ok if the Outer overlay already exists. someone else had pinned it before, in read_only, and the broker had subscribed to it from another broker + // or some other user pinned it before as expose_outer. + } + Err(e) => return Err(e), //TODO: in case of error, remove the existentialvalue that was previously saved (or use a transaction) + Ok(_) => {} + } + } + } + } + + Ok(overlay) + } + + pub fn overlay_type(&mut self) -> &OverlayType { + self.overlay_type.get().unwrap() + } +} diff --git a/ng-broker/src/server_storage/core/peer.rs b/ng-broker/src/server_storage/core/peer.rs new file mode 100644 index 0000000..2c4ffe7 --- /dev/null +++ b/ng-broker/src/server_storage/core/peer.rs @@ -0,0 +1,187 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Peer + +use serde_bare::{from_slice, to_vec}; + +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::KCVStorage; +use ng_repo::types::*; + +use ng_net::types::*; + +pub struct Peer<'a> { + /// Topic ID + id: PeerId, + storage: &'a dyn KCVStorage, +} + +impl<'a> Peer<'a> { + const PREFIX: u8 = b"p"[0]; + + // propertie's suffixes + const VERSION: u8 = b"v"[0]; + const ADVERT: u8 = b"a"[0]; + + const ALL_PROPERTIES: [u8; 2] = [Self::VERSION, Self::ADVERT]; + + const SUFFIX_FOR_EXIST_CHECK: u8 = Self::VERSION; + + pub fn open(id: &PeerId, storage: &'a dyn KCVStorage) -> Result, StorageError> { + let opening = Peer { + id: id.clone(), + storage, + }; + if !opening.exists() { + return Err(StorageError::NotFound); + } + Ok(opening) + } + pub fn update_or_create( + advert: &PeerAdvert, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let id = advert.peer(); + match Self::open(id, storage) { + Err(e) => { + if e == StorageError::NotFound { + Self::create(advert, storage) + } else { + Err(StorageError::BackendError) + } + } + Ok(p) => { + p.update_advert(advert)?; + Ok(p) + } + } + } + pub fn create( + advert: &PeerAdvert, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let id = advert.peer(); + let acc = Peer { + id: id.clone(), + storage, + }; + if acc.exists() { + return Err(StorageError::BackendError); + } + storage.write_transaction(&mut |tx| { + tx.put( + Self::PREFIX, + &to_vec(&id)?, + Some(Self::VERSION), + &to_vec(&advert.version())?, + &None, + )?; + tx.put( + Self::PREFIX, + &to_vec(&id)?, + Some(Self::ADVERT), + &to_vec(&advert)?, + &None, + )?; + Ok(()) + })?; + Ok(acc) + } + pub fn exists(&self) -> bool { + self.storage + .get( + Self::PREFIX, + &to_vec(&self.id).unwrap(), + Some(Self::SUFFIX_FOR_EXIST_CHECK), + &None, + ) + .is_ok() + } + pub fn id(&self) -> PeerId { + self.id + } + pub fn version(&self) -> Result { + match self + .storage + .get(Self::PREFIX, &to_vec(&self.id)?, Some(Self::VERSION), &None) + { + Ok(ver) => Ok(from_slice::(&ver)?), + Err(e) => Err(e), + } + } + pub fn set_version(&self, version: u32) -> Result<(), StorageError> { + if !self.exists() { + return Err(StorageError::BackendError); + } + self.storage.replace( + Self::PREFIX, + &to_vec(&self.id)?, + Some(Self::VERSION), + &to_vec(&version)?, + &None, + ) + } + pub fn update_advert(&self, advert: &PeerAdvert) -> Result<(), StorageError> { + if advert.peer() != &self.id { + return Err(StorageError::InvalidValue); + } + let current_advert = self.advert().map_err(|_| StorageError::BackendError)?; + if current_advert.version() >= advert.version() { + return Ok(()); + } + self.storage.write_transaction(&mut |tx| { + tx.replace( + Self::PREFIX, + &to_vec(&self.id)?, + Some(Self::VERSION), + &to_vec(&advert.version())?, + &None, + )?; + tx.replace( + Self::PREFIX, + &to_vec(&self.id)?, + Some(Self::ADVERT), + &to_vec(&advert)?, + &None, + )?; + Ok(()) + }) + } + pub fn advert(&self) -> Result { + match self + .storage + .get(Self::PREFIX, &to_vec(&self.id)?, Some(Self::ADVERT), &None) + { + Ok(advert) => Ok(from_slice::(&advert)?), + Err(e) => Err(e), + } + } + pub fn set_advert(&self, advert: &PeerAdvert) -> Result<(), StorageError> { + if !self.exists() { + return Err(StorageError::BackendError); + } + self.storage.replace( + Self::PREFIX, + &to_vec(&self.id)?, + Some(Self::ADVERT), + &to_vec(advert)?, + &None, + ) + } + + pub fn del(&self) -> Result<(), StorageError> { + self.storage.del_all( + Self::PREFIX, + &to_vec(&self.id)?, + &Self::ALL_PROPERTIES, + &None, + ) + } +} diff --git a/ng-broker/src/server_storage/core/repo.rs b/ng-broker/src/server_storage/core/repo.rs new file mode 100644 index 0000000..4d4a45d --- /dev/null +++ b/ng-broker/src/server_storage/core/repo.rs @@ -0,0 +1,123 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Repo Storage (Object Key/Col/Value Mapping) + +use std::collections::HashSet; + +use serde_bare::to_vec; + +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::*; +use ng_repo::types::*; + +use crate::server_broker::RepoInfo; + +pub struct RepoHashStorage<'a> { + key: Vec, + storage: &'a dyn KCVStorage, +} + +impl<'a> IModel for RepoHashStorage<'a> { + fn key(&self) -> &Vec { + &self.key + } + fn storage(&self) -> &dyn KCVStorage { + self.storage + } + fn class(&self) -> &Class { + &Self::CLASS + } + fn existential(&mut self) -> Option<&mut dyn IExistentialValue> { + None + } +} + +impl<'a> RepoHashStorage<'a> { + // RepoHash <-> Topic : list of topics of a repo that was pinned on the broker + pub const TOPICS: MultiValueColumn = MultiValueColumn::new(b'r'); + // RepoHash <-> User : list of users who asked to expose the repo to the outer overlay + pub const EXPOSE_OUTER: MultiValueColumn = MultiValueColumn::new(b'x'); + + pub const CLASS: Class<'a> = Class::new( + "Repo", + None, + None, + &[], + &[&Self::TOPICS as &dyn IMultiValueColumn, &Self::EXPOSE_OUTER], + ); + + pub fn load( + repo: &RepoHash, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result { + let mut opening = Self::new(repo, overlay, storage); + let info = RepoInfo { + topics: Self::TOPICS.get_all(&mut opening)?, + expose_outer: Self::EXPOSE_OUTER.get_all(&mut opening)?, + }; + Ok(info) + } + + pub fn load_topics( + repo: &RepoHash, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result { + let mut opening = Self::new(repo, overlay, storage); + let info = RepoInfo { + topics: Self::TOPICS.get_all(&mut opening)?, + expose_outer: HashSet::new(), + }; + Ok(info) + } + + pub fn load_for_user( + user: &UserId, + repo: &RepoHash, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result { + let mut opening = Self::new(repo, overlay, storage); + let mut expose_outer = HashSet::new(); + if let Ok(()) = Self::EXPOSE_OUTER.has(&mut opening, user) { + expose_outer.insert(*user); + } + let info = RepoInfo { + topics: Self::TOPICS.get_all(&mut opening)?, + expose_outer, + }; + Ok(info) + } + + pub fn new(repo: &RepoHash, overlay: &OverlayId, storage: &'a dyn KCVStorage) -> Self { + let mut key: Vec = Vec::with_capacity(33 + 33); + key.append(&mut to_vec(overlay).unwrap()); + key.append(&mut to_vec(repo).unwrap()); + Self { key, storage } + } + + pub fn open( + repo: &RepoHash, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let opening = Self::new(repo, overlay, storage); + Ok(opening) + } + pub fn create( + repo: &RepoHash, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let creating = Self::new(repo, overlay, storage); + Ok(creating) + } +} diff --git a/ng-broker/src/server_storage/core/topic.rs b/ng-broker/src/server_storage/core/topic.rs new file mode 100644 index 0000000..95d75e6 --- /dev/null +++ b/ng-broker/src/server_storage/core/topic.rs @@ -0,0 +1,182 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Topic Storage (Object Key/Col/Value Mapping) + +use std::collections::HashMap; +use std::collections::HashSet; + +use serde_bare::to_vec; + +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::*; +use ng_repo::types::*; + +use ng_net::types::*; + +use crate::server_broker::TopicInfo; + +pub struct TopicStorage<'a> { + key: Vec, + repo: ExistentialValue, + storage: &'a dyn KCVStorage, +} + +impl<'a> IModel for TopicStorage<'a> { + fn key(&self) -> &Vec { + &self.key + } + fn storage(&self) -> &dyn KCVStorage { + self.storage + } + fn class(&self) -> &Class { + &Self::CLASS + } + fn existential(&mut self) -> Option<&mut dyn IExistentialValue> { + Some(&mut self.repo) + } + // fn name(&self) -> String { + // format_type_of(self) + // } +} + +impl<'a> TopicStorage<'a> { + const PREFIX: u8 = b't'; + + // Topic properties + pub const ADVERT: SingleValueColumn = SingleValueColumn::new(b'a'); + pub const REPO: ExistentialValueColumn = ExistentialValueColumn::new(b'r'); + pub const ROOT_COMMIT: SingleValueColumn = SingleValueColumn::new(b'o'); + pub const COMMITS_NBR: CounterValue = CounterValue::new(b'n'); + + // Topic <-> Users who pinned it (with boolean: R or W) + pub const USERS: MultiMapColumn = MultiMapColumn::new(b'u'); + // Topic <-> heads + pub const HEADS: MultiValueColumn = MultiValueColumn::new(b'h'); + + pub const CLASS: Class<'a> = Class::new( + "Topic", + Some(Self::PREFIX), + Some(&Self::REPO), + &[ + &Self::ADVERT as &dyn ISingleValueColumn, + &Self::ROOT_COMMIT, + &Self::COMMITS_NBR, + ], + &[&Self::USERS as &dyn IMultiValueColumn, &Self::HEADS], + ); + + pub fn new(id: &TopicId, overlay: &OverlayId, storage: &'a dyn KCVStorage) -> Self { + let mut key: Vec = Vec::with_capacity(33 + 33); + key.append(&mut to_vec(overlay).unwrap()); + key.append(&mut to_vec(id).unwrap()); + TopicStorage { + key, + repo: ExistentialValue::::new(), + storage, + } + } + + pub fn load( + id: &TopicId, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result { + let mut opening = TopicStorage::new(id, overlay, storage); + let props = opening.load_props()?; + let existential = col(&Self::REPO, &props)?; + opening.repo.set(&existential)?; + let ti = TopicInfo { + repo: existential, + publisher_advert: col(&Self::ADVERT, &props).ok(), + root_commit: col(&Self::ROOT_COMMIT, &props).ok(), + users: Self::USERS.get_all(&mut opening)?, + current_heads: Self::HEADS.get_all(&mut opening)?, + }; + Ok(ti) + } + + pub fn open( + id: &TopicId, + overlay: &OverlayId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let mut opening = TopicStorage::new(id, overlay, storage); + opening.check_exists()?; + Ok(opening) + } + pub fn create( + id: &TopicId, + overlay: &OverlayId, + repo: &RepoHash, + storage: &'a dyn KCVStorage, + or_open: bool, + ) -> Result, StorageError> { + let mut topic = TopicStorage::new(id, overlay, storage); + if topic.exists() { + if or_open { + return Ok(topic); + } else { + return Err(StorageError::AlreadyExists); + } + } + topic.repo.set(repo)?; + ExistentialValue::save(&topic, repo)?; + + Ok(topic) + } + + pub fn repo_hash(&mut self) -> &RepoHash { + self.repo.get().unwrap() + } + + pub fn root_commit(&mut self) -> Result { + Self::ROOT_COMMIT.get(self) + } + pub fn set_root_commit(&mut self, commit: &ObjectId) -> Result<(), StorageError> { + Self::ROOT_COMMIT.set(self, commit) + } + + pub fn publisher_advert(&mut self) -> Result { + Self::ADVERT.get(self) + } + pub fn set_publisher_advert(&mut self, advert: &PublisherAdvert) -> Result<(), StorageError> { + Self::ADVERT.set(self, advert) + } + + pub fn add_head(&mut self, head: &ObjectId) -> Result<(), StorageError> { + Self::HEADS.add(self, head) + } + pub fn remove_head(&mut self, head: &ObjectId) -> Result<(), StorageError> { + Self::HEADS.remove(self, head) + } + + pub fn has_head(&mut self, head: &ObjectId) -> Result<(), StorageError> { + Self::HEADS.has(self, head) + } + + pub fn get_all_heads(&mut self) -> Result, StorageError> { + Self::HEADS.get_all(self) + } + + pub fn add_user(&mut self, user: &UserId, publisher: bool) -> Result<(), StorageError> { + Self::USERS.add(self, user, &publisher) + } + pub fn remove_user(&mut self, user: &UserId, publisher: bool) -> Result<(), StorageError> { + Self::USERS.remove(self, user, &publisher) + } + + pub fn has_user(&mut self, user: &UserId, publisher: bool) -> Result<(), StorageError> { + Self::USERS.has(self, user, &publisher) + } + + pub fn get_all_users(&mut self) -> Result, StorageError> { + Self::USERS.get_all(self) + } +} diff --git a/ng-broker/src/server_storage/mod.rs b/ng-broker/src/server_storage/mod.rs new file mode 100644 index 0000000..472b22d --- /dev/null +++ b/ng-broker/src/server_storage/mod.rs @@ -0,0 +1,3 @@ +pub mod admin; + +pub mod core; diff --git a/ng-broker/src/server_ws.rs b/ng-broker/src/server_ws.rs new file mode 100644 index 0000000..eb6cee4 --- /dev/null +++ b/ng-broker/src/server_ws.rs @@ -0,0 +1,950 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. + */ + +//! WebSocket implementation of the Broker + +use std::collections::HashMap; +use std::collections::HashSet; +use std::net::IpAddr; +use std::net::SocketAddr; +use std::path::PathBuf; + +use futures::StreamExt; +use ng_async_tungstenite::tungstenite::http::header::REFERER; +use once_cell::sync::OnceCell; +use rust_embed::RustEmbed; +use serde_json::json; +use urlencoding::decode; + +use async_std::net::{TcpListener, TcpStream}; +use ng_async_tungstenite::accept_hdr_async; +use ng_async_tungstenite::tungstenite::handshake::server::{ + Callback, ErrorResponse, Request, Response, +}; +use ng_async_tungstenite::tungstenite::http::{ + header::{CONNECTION, HOST, ORIGIN}, + HeaderValue, Method, StatusCode, Uri, Version, +}; + +use ng_repo::errors::NgError; +use ng_repo::log::*; +use ng_repo::types::{PrivKey, PubKey, SymKey}; + +use ng_net::broker::*; +use ng_net::connection::IAccept; +use ng_net::types::*; +use ng_net::utils::{is_private_ip, is_public_ip}; +use ng_net::NG_BOOTSTRAP_LOCAL_PATH; + +use ng_client_ws::remote_ws::ConnectionWebSocket; + +use crate::interfaces::*; +use crate::rocksdb_server_storage::RocksDbServerStorage; +use crate::server_broker::ServerBroker; +use crate::types::*; + +static LISTENERS_INFO: OnceCell<(HashMap, HashMap)> = + OnceCell::new(); + +static BOOTSTRAP_STRING: OnceCell = OnceCell::new(); + +struct SecurityCallback { + remote_bind_address: BindAddress, + local_bind_address: BindAddress, +} + +impl SecurityCallback { + fn new(remote_bind_address: BindAddress, local_bind_address: BindAddress) -> Self { + Self { + remote_bind_address, + local_bind_address, + } + } +} + +fn make_error(code: StatusCode) -> ErrorResponse { + Response::builder().status(code).body(None).unwrap() +} + +fn check_no_origin(origin: Option<&HeaderValue>) -> Result<(), ErrorResponse> { + match origin { + Some(_) => Err(make_error(StatusCode::FORBIDDEN)), + None => Ok(()), + } +} + +fn check_origin_is_url( + origin: Option<&HeaderValue>, + domains: &Vec, +) -> Result<(), ErrorResponse> { + match origin { + None => Ok(()), + Some(val) => { + for domain in domains { + if val.to_str().unwrap().starts_with(domain.as_str()) { + return Ok(()); + } + } + Err(make_error(StatusCode::FORBIDDEN)) + } + } +} + +fn check_xff_is_public_or_private( + xff: Option<&HeaderValue>, + none_is_ok: bool, + public: bool, +) -> Result<(), ErrorResponse> { + match xff { + None => { + if none_is_ok { + Ok(()) + } else { + Err(make_error(StatusCode::FORBIDDEN)) + } + } + Some(val) => { + let mut ip_str = val + .to_str() + .map_err(|_| make_error(StatusCode::FORBIDDEN))?; + if ip_str.starts_with("::ffff:") { + ip_str = ip_str.strip_prefix("::ffff:").unwrap(); + } + let ip: IpAddr = ip_str + .parse() + .map_err(|_| make_error(StatusCode::FORBIDDEN))?; + if public && !is_public_ip(&ip) || !public && !is_private_ip(&ip) { + Err(make_error(StatusCode::FORBIDDEN)) + } else { + Ok(()) + } + } + } +} + +fn check_no_xff(xff: Option<&HeaderValue>) -> Result<(), ErrorResponse> { + match xff { + None => Ok(()), + Some(_) => Err(make_error(StatusCode::FORBIDDEN)), + } +} + +fn check_host(host: Option<&HeaderValue>, hosts: Vec) -> Result<(), ErrorResponse> { + match host { + None => Err(make_error(StatusCode::FORBIDDEN)), + Some(val) => { + for hos in hosts { + if val.to_str().unwrap().starts_with(&hos) { + return Ok(()); + } + } + Err(make_error(StatusCode::FORBIDDEN)) + } + } +} + +fn check_host_in_addrs( + host: Option<&HeaderValue>, + addrs: &Vec, +) -> Result<(), ErrorResponse> { + match host { + None => Err(make_error(StatusCode::FORBIDDEN)), + Some(val) => { + for ba in addrs { + if val.to_str().unwrap().starts_with(&ba.ip.to_string()) { + return Ok(()); + } + } + Err(make_error(StatusCode::FORBIDDEN)) + } + } +} + +fn prepare_domain_url_and_host( + accept_forward_for: &AcceptForwardForV0, +) -> (Vec, Vec) { + let domain_str = accept_forward_for.get_domain(); + let url = ["https://", domain_str].concat(); + let hosts_str = vec![domain_str.to_string()]; + let urls_str = vec![url]; + (hosts_str, urls_str) +} + +fn prepare_urls_from_private_addrs(addrs: &Vec, port: u16) -> Vec { + let port_str = if port != 80 { + [":", &port.to_string()].concat() + } else { + "".to_string() + }; + let mut res: Vec = vec![]; + for addr in addrs { + let url = ["http://", &addr.ip.to_string(), &port_str].concat(); + res.push(url); + } + res +} + +#[derive(RustEmbed)] +#[folder = "../ng-app/dist-file/"] +#[include = "*.sha256"] +#[include = "*.gzip"] +struct App; + +#[derive(RustEmbed)] +#[folder = "../helpers/app-auth/dist/"] +#[include = "*.sha256"] +#[include = "*.gzip"] + +struct AppAuth; + +// #[derive(RustEmbed)] +// #[folder = "./static/app/"] +// #[include = "*.sha256"] +// #[include = "*.gzip"] +// struct App; + +// #[derive(RustEmbed)] +// #[folder = "./static/app-auth/"] +// #[include = "*.sha256"] +// #[include = "*.gzip"] + +// struct AppAuth; + +#[derive(RustEmbed)] +#[folder = "src/public/"] +struct AppPublic; + +static ROBOTS: &str = "User-agent: *\r\nDisallow: /"; + +fn upgrade_ws_or_serve_app( + connection: Option<&HeaderValue>, + remote: IP, + serve_app: bool, + uri: &Uri, + last_etag: Option<&HeaderValue>, + cors: Option<&str>, + referer: Option<&HeaderValue>, +) -> Result<(), ErrorResponse> { + if connection.is_some() + && connection + .unwrap() + .to_str() + .unwrap() + .split(|c| c == ' ' || c == ',') + .any(|p| p.eq_ignore_ascii_case("Upgrade")) + { + return Ok(()); + } + + if serve_app && (remote.is_private() || remote.is_loopback()) { + if uri == "/" { + log_debug!("Serving the app"); + let sha_file = App::get("index.sha256").unwrap(); + let sha = format!( + "\"{}\"", + std::str::from_utf8(sha_file.data.as_ref()).unwrap() + ); + if last_etag.is_some() && last_etag.unwrap().to_str().unwrap() == sha { + // return 304 + let res = Response::builder() + .status(StatusCode::NOT_MODIFIED) + .header("Cache-Control", "max-age=31536000, must-revalidate") + .header("ETag", sha) + .body(None) + .unwrap(); + return Err(res); + } + let file = App::get("index.gzip").unwrap(); + let res = Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "text/html") + .header("Cache-Control", "max-age=31536000, must-revalidate") + .header("Content-Encoding", "gzip") + .header("ETag", sha) + .body(Some(file.data.to_vec())) + .unwrap(); + return Err(res); + } else if uri.path() == "/auth/" { + log_debug!("Serving auth app"); + // if referer.is_none() || referer.unwrap().to_str().is_err() || referer.unwrap().to_str().unwrap() != "https://nextgraph.net/" { + // return Err(make_error(StatusCode::FORBIDDEN)); + // } + let webapp_origin = match uri.query() { + Some(query) => { + if query.starts_with("o=") { + match decode(&query.chars().skip(2).collect::()) { + Err(_) => return Err(make_error(StatusCode::BAD_REQUEST)), + Ok(cow) => { + cow.into_owned() + } + } + } else { + return Err(make_error(StatusCode::BAD_REQUEST)) + } + }, + None => {return Err(make_error(StatusCode::BAD_REQUEST))} + }; + let sha_file = AppAuth::get("index.sha256").unwrap(); + let sha = format!( + "\"{}\"", + std::str::from_utf8(sha_file.data.as_ref()).unwrap() + ); + if last_etag.is_some() && last_etag.unwrap().to_str().unwrap() == sha { + // return 304 + let res = Response::builder() + .status(StatusCode::NOT_MODIFIED) + .header("Cache-Control", "max-age=31536000, must-revalidate") + .header("ETag", sha) + .header("Content-Security-Policy", format!("frame-ancestors 'self' https://nextgraph.net {webapp_origin};")) + .header("X-Frame-Options", format!("ALLOW-FROM {webapp_origin}")) + .body(None) + .unwrap(); + return Err(res); + } + let file = AppAuth::get("index.gzip").unwrap(); + let res = Response::builder().status(StatusCode::OK) + .header("Content-Security-Policy", format!("frame-ancestors 'self' https://nextgraph.net {webapp_origin};")) + .header("X-Frame-Options", format!("ALLOW-FROM {webapp_origin}")) + .header("Content-Type", "text/html") + .header("Cache-Control", "max-age=31536000, must-revalidate") + .header("Content-Encoding", "gzip") + .header("ETag", sha) + .body(Some(file.data.to_vec())) + .unwrap(); + return Err(res); + } else if uri == NG_BOOTSTRAP_LOCAL_PATH { + log_debug!("Serving bootstrap"); + + let mut builder = Response::builder().status(StatusCode::OK); + if cors.is_some() { + builder = builder.header("Access-Control-Allow-Origin", cors.unwrap()); + } + let res = builder + .header("Content-Type", "text/json") + .header("Cache-Control", "max-age=0, must-revalidate") + .body(Some(BOOTSTRAP_STRING.get().unwrap().as_bytes().to_vec())) + .unwrap(); + return Err(res); + } else if uri == "/favicon.ico" { + let file = AppPublic::get("favicon.ico").unwrap(); + let res = Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "image/x-icon") + .header("Cache-Control", "max-age=432000, must-revalidate") + .body(Some(file.data.to_vec())) + .unwrap(); + return Err(res); + } else if uri == "/robots.txt" { + let res = Response::builder() + .status(StatusCode::OK) + .header("Content-Type", "text/plain") + .header("Cache-Control", "max-age=3600, must-revalidate") + .body(Some(ROBOTS.as_bytes().to_vec())) + .unwrap(); + return Err(res); + } + } + + Err(make_error(StatusCode::FORBIDDEN)) +} + +impl Callback for SecurityCallback { + fn on_request(self, request: &Request) -> Result<(), ErrorResponse> { + let local_urls = LOCAL_URLS + .to_vec() + .iter() + .map(ToString::to_string) + .collect(); + + let local_hosts = LOCAL_HOSTS + .to_vec() + .iter() + .map(ToString::to_string) + .collect(); + + let (listeners, bind_addresses) = LISTENERS_INFO.get().ok_or( + Response::builder() + .status(StatusCode::INTERNAL_SERVER_ERROR) + .body(None) + .unwrap(), + )?; + + // check that the remote address is allowed to connect on the listener + + let listener_id = bind_addresses + .get(&self.local_bind_address) + .ok_or(make_error(StatusCode::FORBIDDEN))?; + let listener = listeners + .get(listener_id) + .ok_or(make_error(StatusCode::FORBIDDEN))?; + + if request.method() != Method::GET { + return Err(make_error(StatusCode::METHOD_NOT_ALLOWED)); + } + if request.version() != Version::HTTP_11 { + return Err(make_error(StatusCode::HTTP_VERSION_NOT_SUPPORTED)); + } + + let xff = request.headers().get("X-Forwarded-For"); + let connection = request.headers().get(CONNECTION); + let host = request.headers().get(HOST); + let origin = request.headers().get(ORIGIN); + let referer = request.headers().get(REFERER); + let remote = self.remote_bind_address.ip; + let last_etag = request.headers().get("If-None-Match"); + let uri = request.uri(); + + log_debug!( + "connection:{:?} origin:{:?} host:{:?} xff:{:?} remote:{:?} local:{:?} uri:{:?}", + connection, + origin, + host, + xff, + remote, + self.local_bind_address, + uri + ); + + match listener.config.if_type { + InterfaceType::Public => { + if !remote.is_public() { + return Err(make_error(StatusCode::FORBIDDEN)); + } + check_no_xff(xff)?; + check_no_origin(origin)?; + // let mut urls_str = vec![]; + // if !listener.config.refuse_clients { + // urls_str.push(NG_APP_URL.to_string()); + // } + // check_origin_is_url(origin, urls_str)?; + + check_host_in_addrs(host, &listener.addrs)?; + log_debug!( + "accepted core with refuse_clients {}", + listener.config.refuse_clients + ); + return upgrade_ws_or_serve_app( + connection, + remote, + listener.config.serve_app && !listener.config.refuse_clients, + uri, + last_etag, + None, + referer + ); + } + InterfaceType::Loopback => { + if !remote.is_loopback() { + return Err(make_error(StatusCode::FORBIDDEN)); + } + + if listener.config.accept_forward_for.is_public_domain() { + let (mut hosts_str, mut urls_str) = + prepare_domain_url_and_host(&listener.config.accept_forward_for); + if listener.config.accept_direct { + hosts_str = [hosts_str, local_hosts].concat(); + // TODO local_urls might need a trailing :port, but it is ok for now as we do starts_with + urls_str = [urls_str, local_urls].concat(); + } + check_origin_is_url(origin, &urls_str)?; + check_host(host, hosts_str)?; + check_xff_is_public_or_private(xff, listener.config.accept_direct, true)?; + log_debug!( + "accepted loopback PUBLIC_DOMAIN with direct {}", + listener.config.accept_direct + ); + return upgrade_ws_or_serve_app( + connection, + remote, + listener.config.serve_app, + uri, + last_etag, + origin.map(|or| or.to_str().unwrap()).and_then(|val| { + if listener.config.refuse_clients { + None + } else { + Some(val) + } + }), + referer + ); + } else if listener.config.accept_forward_for.is_private_domain() { + let (hosts_str, urls_str) = + prepare_domain_url_and_host(&listener.config.accept_forward_for); + check_origin_is_url(origin, &urls_str)?; + check_host(host, hosts_str)?; + check_xff_is_public_or_private(xff, false, false)?; + log_debug!("accepted loopback PRIVATE_DOMAIN"); + return upgrade_ws_or_serve_app( + connection, + remote, + listener.config.serve_app, + uri, + last_etag, + origin.map(|or| or.to_str().unwrap()), + referer + ); + } else if listener.config.accept_forward_for == AcceptForwardForV0::No { + check_host(host, local_hosts)?; + check_no_xff(xff)?; + // TODO local_urls might need a trailing :port, but it is ok for now as we do starts_with + check_origin_is_url(origin, &local_urls)?; + log_debug!("accepted loopback DIRECT"); + return upgrade_ws_or_serve_app( + connection, + remote, + listener.config.serve_app, + uri, + last_etag, + origin.map(|or| or.to_str().unwrap()), + referer + ); + } + } + InterfaceType::Private => { + if listener.config.accept_forward_for.is_public_static() + || listener.config.accept_forward_for.is_public_dyn() + { + if !listener.config.accept_direct && !remote.is_public() + || listener.config.accept_direct + && !remote.is_private() + && !remote.is_public() + { + return Err(make_error(StatusCode::FORBIDDEN)); + } + check_no_xff(xff)?; + + let mut addrs = listener + .config + .accept_forward_for + .get_public_bind_addresses(); + let mut urls_str = vec![]; + // if !listener.config.refuse_clients { + // urls_str.push(NG_APP_URL.to_string()); + // } + if listener.config.accept_direct { + addrs.extend(&listener.addrs); + urls_str = [ + urls_str, + prepare_urls_from_private_addrs(&listener.addrs, listener.config.port), + ] + .concat(); + } + check_origin_is_url(origin, &urls_str)?; + check_host_in_addrs(host, &addrs)?; + log_debug!("accepted private PUBLIC_STATIC or PUBLIC_DYN with direct {} with refuse_clients {}",listener.config.accept_direct, listener.config.refuse_clients); + return upgrade_ws_or_serve_app( + connection, + remote, + listener.config.serve_app, + uri, + last_etag, + origin.map(|or| or.to_str().unwrap()), + referer + ); + } else if listener.config.accept_forward_for.is_public_domain() { + if !remote.is_private() { + return Err(make_error(StatusCode::FORBIDDEN)); + } + check_xff_is_public_or_private(xff, listener.config.accept_direct, true)?; + + let (mut hosts_str, mut urls_str) = + prepare_domain_url_and_host(&listener.config.accept_forward_for); + if listener.config.accept_direct { + for addr in listener.addrs.iter() { + let str = addr.ip.to_string(); + hosts_str.push(str); + } + urls_str = [ + urls_str, + prepare_urls_from_private_addrs(&listener.addrs, listener.config.port), + ] + .concat(); + } + check_origin_is_url(origin, &urls_str)?; + check_host(host, hosts_str)?; + log_debug!( + "accepted private PUBLIC_DOMAIN with direct {}", + listener.config.accept_direct + ); + return upgrade_ws_or_serve_app( + connection, + remote, + listener.config.serve_app, + uri, + last_etag, + origin.map(|or| or.to_str().unwrap()).and_then(|val| { + if listener.config.refuse_clients { + None + } else { + Some(val) + } + }), + referer + ); + } else if listener.config.accept_forward_for == AcceptForwardForV0::No { + if !remote.is_private() { + return Err(make_error(StatusCode::FORBIDDEN)); + } + + check_no_xff(xff)?; + + check_host_in_addrs(host, &listener.addrs)?; + let urls_str = + prepare_urls_from_private_addrs(&listener.addrs, listener.config.port); + check_origin_is_url(origin, &urls_str)?; + log_debug!("accepted private DIRECT"); + return upgrade_ws_or_serve_app( + connection, + remote, + listener.config.serve_app, + uri, + last_etag, + origin.map(|or| or.to_str().unwrap()), + referer + ); + } + } + _ => {} + } + + Err(make_error(StatusCode::FORBIDDEN)) + } +} + +pub async fn accept(tcp: TcpStream, peer_priv_key: PrivKey) { + let remote_addr = tcp.peer_addr().unwrap(); + let remote_bind_address: BindAddress = (&remote_addr).into(); + + let local_addr = tcp.local_addr().unwrap(); + let local_bind_address: BindAddress = (&local_addr).into(); + + let ws = accept_hdr_async( + tcp, + SecurityCallback::new(remote_bind_address, local_bind_address), + ) + .await; + if ws.is_err() { + log_debug!("websocket rejected"); + return; + } + + log_debug!("websocket accepted"); + + let cws = ConnectionWebSocket {}; + let base = cws + .accept( + remote_bind_address, + local_bind_address, + peer_priv_key, + ws.unwrap(), + ) + .await + .unwrap(); + + let res = BROKER + .write() + .await + .accept(base, remote_bind_address, local_bind_address) + .await; + if res.is_err() { + log_warn!("Accept error: {:?}", res.unwrap_err()); + } +} + +#[cfg(test)] +pub async fn run_server_accept_one( + addr: &str, + port: u16, + peer_priv_key: PrivKey, + _peer_pub_key: PubKey, +) -> std::io::Result<()> { + let addrs = format!("{}:{}", addr, port); + let _root = tempfile::Builder::new().prefix("ngd").tempdir().unwrap(); + // let master_key: [u8; 32] = [0; 32]; + // std::fs::create_dir_all(root.path()).unwrap(); + // log_debug!("data directory: {}", root.path().to_str().unwrap()); + // let store = RocksDbKCVStorage::open(root.path(), master_key); + + let socket = TcpListener::bind(addrs.as_str()).await?; + log_debug!("Listening on {}", addrs.as_str()); + let mut connections = socket.incoming(); + + let tcp = connections.next().await.unwrap()?; + + { + //BROKER.write().await.set_my_peer_id(peer_pub_key); + } + + accept(tcp, peer_priv_key).await; + + Ok(()) +} + +pub async fn run_server_v0( + peer_priv_key: PrivKey, + peer_id: PubKey, + wallet_master_key: SymKey, + config: DaemonConfigV0, + mut path: PathBuf, + admin_invite: bool, +) -> Result<(), NgError> { + // check config + + let mut run_core = false; + let mut run_server = false; + for overlay_conf in config.overlays_configs.iter() { + if overlay_conf.core != BrokerOverlayPermission::Nobody { + run_core = true; + } + if overlay_conf.server != BrokerOverlayPermission::Nobody { + run_server = true; + } + } + if !run_core && !run_server { + return Err(NgError::BrokerConfigErrorStr( + "There isn't any overlay_config that should run as core or server. Check your config.", + )); + } + + if run_core && !run_server { + log_warn!("There isn't any overlay_config that should run as server. This is a misconfiguration as a core server that cannot receive client connections is useless"); + } + + let mut listeners: HashSet = HashSet::new(); + for listener in &config.listeners { + let id: String = listener.to_string(); + if !listeners.insert(id.clone()) { + return Err(NgError::BrokerConfigError(format!( + "The listener {} is defined twice. Check your config file.", + id + ))); + } + } + + let interfaces = get_interface(); + log_debug!("interfaces {:?}", interfaces); + let mut listener_infos: HashMap = HashMap::new(); + let mut listeners_addrs: Vec<(Vec, String)> = vec![]; + let mut listeners: Vec = vec![]; + let mut accept_clients = false; + //let mut serve_app = false; + + // TODO: check that there is only one PublicDyn or one PublicStatic or one Core + + let mut servers: Vec = vec![]; + + let registration_url = config.registration_url; + + // Preparing the listeners addrs and infos + for listener in config.listeners { + if !listener.accept_direct && listener.accept_forward_for == AcceptForwardForV0::No { + log_warn!( + "The interface {} does not accept direct connections nor is configured to forward. it is therefor disabled", + listener.interface_name + ); + continue; + } + + match find_name(&interfaces, &listener.interface_name) { + None => { + return Err(NgError::BrokerConfigError(format!( + "The interface {} does not exist on your host. Check your config file.", + listener.interface_name + ))); + } + Some(interface) => { + let mut addrs: Vec = interface + .ipv4 + .iter() + .filter_map(|ip| { + if interface.if_type.is_ipv4_valid_for_type(&ip.addr) { + Some(SocketAddr::new(IpAddr::V4(ip.addr), listener.port)) + } else { + None + } + }) + .collect(); + if addrs.is_empty() { + return Err(NgError::BrokerConfigError(format!( + "The interface {} does not have any IPv4 address.", + listener.interface_name + ))); + } + if listener.ipv6 { + let mut ipv6s: Vec = interface + .ipv6 + .iter() + .filter_map(|ip| { + if interface.if_type.is_ipv6_valid_for_type(&ip.addr) + || listener.should_bind_public_ipv6_to_private_interface(ip.addr) + { + Some(SocketAddr::new(IpAddr::V6(ip.addr), listener.port)) + } else { + None + } + }) + .collect(); + addrs.append(&mut ipv6s); + } + + if !listener.refuse_clients { + accept_clients = true; + } + if listener.refuse_clients && listener.accept_forward_for.is_public_domain() { + log_warn!( + "You have disabled accepting connections from clients on {}. This is unusual as --domain and --domain-private listeners are meant to answer to clients only. This will activate the relay_websocket on this listener. Is it really intended?", + listener.interface_name + ); + } + // if listener.serve_app { + // serve_app = true; + // } + + let bind_addresses: Vec = + addrs.iter().map(|addr| addr.into()).collect(); + + let server_types = listener.get_bootstraps(bind_addresses.clone()); + let common_peer_id = listener.accept_forward_for.domain_with_common_peer_id(); + for server_type in server_types { + servers.push(BrokerServerV0 { + peer_id: common_peer_id.unwrap_or(peer_id), + can_verify: false, + can_forward: !run_core, + server_type, + }) + } + + let listener_id: String = listener.to_string(); + + let listener_info = ListenerInfo { + config: listener, + addrs: bind_addresses, + }; + + listener_infos.insert(listener_id, listener_info); + listeners_addrs.push((addrs, interface.name)); + } + } + } + + if listeners_addrs.is_empty() { + return Err(NgError::BrokerConfigErrorStr("No listener configured.")); + } + + if !accept_clients { + log_warn!("There isn't any listener that accept clients. This is a misconfiguration as a core server that cannot receive client connections is useless"); + } + let bootstrap_v0 = BootstrapContentV0 { servers }; + let local_bootstrap_info = LocalBootstrapInfo::V0(LocalBootstrapInfoV0 { + bootstrap: bootstrap_v0.clone(), + registration_url: registration_url.clone(), + }); + BOOTSTRAP_STRING + .set(json!(local_bootstrap_info).to_string()) + .unwrap(); + + // saving the infos in the broker. This needs to happen before we start listening, as new incoming connections can happen anytime after that. + // and we need those infos for permission checking. + { + //let root = tempfile::Builder::new().prefix("ngd").tempdir().unwrap(); + let mut path_users = path.clone(); + path_users.push("users"); + path.push("storage"); + std::fs::create_dir_all(path.clone()).unwrap(); + std::fs::create_dir_all(path_users.clone()).unwrap(); + + // opening the server storage (that contains the encryption keys for each store/overlay ) + let server_storage = RocksDbServerStorage::open( + &mut path, + wallet_master_key.clone(), + if admin_invite { + Some(bootstrap_v0.clone()) + } else { + None + }, + ) + .map_err(|e| { + NgError::BrokerConfigError(format!("Error while opening server storage: {}", e)) + })?; + + let server_broker = ServerBroker::new( + server_storage, + path_users, + if admin_invite { + Some(wallet_master_key) + } else { + None + }, + ); + + let mut broker = BROKER.write().await; + broker.set_server_broker(server_broker); + + LISTENERS_INFO + .set(broker.set_listeners(listener_infos)) + .unwrap(); + let server_config = ServerConfig { + overlays_configs: config.overlays_configs, + registration: config.registration, + admin_user: config.admin_user, + registration_url, + peer_id, + bootstrap: BootstrapContent::V0(bootstrap_v0), + }; + broker.set_server_config(server_config); + } + + // Actually starting the listeners + for addrs in listeners_addrs { + let addrs_string = addrs + .0 + .iter() + .map(SocketAddr::to_string) + .collect::>() + .join(", "); + + for addr in addrs.0 { + let tcp_listener = TcpListener::bind(addr).await.map_err(|e| { + NgError::BrokerConfigError(format!( + "cannot bind to {} with addresses {} : {}", + addrs.1, + addrs_string, + e.to_string() + )) + })?; + listeners.push(tcp_listener); + } + + log_info!("Listening on {} {}", addrs.1, addrs_string); + } + + // select on all listeners + let mut incoming = futures::stream::select_all( + listeners + .into_iter() + .map(TcpListener::into_incoming) + .map(Box::pin), + ); + + // Iterate over all incoming connections + + // TODO : select on the shutdown stream too + while let Some(tcp) = incoming.next().await { + // TODO select peer_priv_ket according to config. if --domain-peer present and the connection is for that listener (PublicDomainPeer) then use the peer configured there + let key = peer_priv_key.clone(); + async_std::task::spawn(async move { + accept(tcp.unwrap(), key).await; + }); + } + + Ok(()) +} diff --git a/ng-broker/src/types.rs b/ng-broker/src/types.rs new file mode 100644 index 0000000..0a5e48c --- /dev/null +++ b/ng-broker/src/types.rs @@ -0,0 +1,35 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use serde::{Deserialize, Serialize}; + +use ng_repo::types::PubKey; + +use ng_net::types::{BrokerOverlayConfigV0, ListenerV0, RegistrationConfig}; + +/// DaemonConfig Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct DaemonConfigV0 { + /// List of listeners for TCP (HTTP) incoming connections + pub listeners: Vec, + + pub overlays_configs: Vec, + + pub registration: RegistrationConfig, + + pub admin_user: Option, + + pub registration_url: Option, +} + +/// Daemon config +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum DaemonConfig { + V0(DaemonConfigV0), +} diff --git a/ng-broker/src/utils.rs b/ng-broker/src/utils.rs new file mode 100644 index 0000000..2666c5a --- /dev/null +++ b/ng-broker/src/utils.rs @@ -0,0 +1,31 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +// use ng_repo::log::*; + +// pub fn gen_broker_keys(key: Option<[u8; 32]>) -> [[u8; 32]; 4] { +// let key = match key { +// None => { +// let mut master_key = [0u8; 32]; +// log_warn!("gen_broker_keys: No key provided, generating one"); +// getrandom::fill(&mut master_key).expect("getrandom failed"); +// master_key +// } +// Some(k) => k, +// }; +// let peerid: [u8; 32]; +// let wallet: [u8; 32]; +// let sig: [u8; 32]; + +// peerid = blake3::derive_key("NextGraph Broker BLAKE3 key PeerId privkey", &key); +// wallet = blake3::derive_key("NextGraph Broker BLAKE3 key wallet encryption", &key); +// sig = blake3::derive_key("NextGraph Broker BLAKE3 key config signature", &key); + +// [key, peerid, wallet, sig] +// } diff --git a/ng-client-ws/Cargo.toml b/ng-client-ws/Cargo.toml new file mode 100644 index 0000000..04b7868 --- /dev/null +++ b/ng-client-ws/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "ng-client-ws" +# version = "0.1.0" +description = "Websocket client library of NextGraph, a decentralized, secure and local-first web 3.0 ecosystem based on Semantic Web and CRDTs" +version.workspace = true +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true +homepage.workspace = true +keywords = ["crdt","e2ee","local-first","p2p","web3"] +documentation.workspace = true +rust-version.workspace = true + +[dependencies] +serde_bare = "0.5.0" +either = "1.8.1" +futures = "0.3.24" +async-trait = "0.1.64" +async-std = { version = "1.12.0", features = ["attributes","unstable"] } +ng-repo = { path = "../ng-repo", version = "0.1.2" } +ng-net = { path = "../ng-net", version = "0.1.2" } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +wasm-bindgen = "0.2.88" +ws_stream_wasm = "0.7" +pharos = "0.5" + +[dev-dependencies] +wasm-bindgen-test = "^0.3" + +[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] +version = "0.3.3" +features = ["wasm_js"] + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +getrandom = "0.3.3" +ng-async-tungstenite = { version = "0.22.2", git = "https://git.nextgraph.org/NextGraph/async-tungstenite.git", branch = "nextgraph", features = ["async-std-runtime"] } diff --git a/ng-client-ws/README.md b/ng-client-ws/README.md new file mode 100644 index 0000000..e907118 --- /dev/null +++ b/ng-client-ws/README.md @@ -0,0 +1,56 @@ +# ng-client-ws + +![MSRV][rustc-image] +[![Apache 2.0 Licensed][license-image]][license-link] +[![MIT Licensed][license-image2]][license-link2] + +Websocket client library of NextGraph + +This repository is in active development at [https://git.nextgraph.org/NextGraph/nextgraph-rs](https://git.nextgraph.org/NextGraph/nextgraph-rs), a Gitea instance. For bug reports, issues, merge requests, and in order to join the dev team, please visit the link above and create an account (you can do so with a github account). The [github repo](https://github.com/nextgraph-org/nextgraph-rs) is just a read-only mirror that does not accept issues. + +## NextGraph + +> NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## Support + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +## How to use the library + +NextGraph is not ready yet. You can subscribe to [our newsletter](https://list.nextgraph.org/subscription/form) to get updates, and support us with a [donation](https://nextgraph.org/donate/). + +This library is used internally by [ngcli](../ngcli/README.md), [ng-app](../ng-app/README.md) and by [nextgraph, the Rust client library](../nextgraph/README.md) which you should be using instead. It is not meant to be used by other programs as-is. + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +### Contributions license + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as below, without any +additional terms or conditions. + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. + + +[rustc-image]: https://img.shields.io/badge/rustc-1.81+-blue.svg +[license-image]: https://img.shields.io/badge/license-Apache2.0-blue.svg +[license-link]: https://git.nextgraph.org/NextGraph/nextgraph-rs/raw/branch/master/LICENSE-APACHE2 +[license-image2]: https://img.shields.io/badge/license-MIT-blue.svg +[license-link2]: https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/LICENSE-MIT diff --git a/ng-client-ws/src/lib.rs b/ng-client-ws/src/lib.rs new file mode 100644 index 0000000..784ed1e --- /dev/null +++ b/ng-client-ws/src/lib.rs @@ -0,0 +1,13 @@ +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +#[cfg(not(target_arch = "wasm32"))] +pub mod remote_ws; + +#[cfg(target_arch = "wasm32")] +pub mod remote_ws_wasm; diff --git a/ng-client-ws/src/remote_ws.rs b/ng-client-ws/src/remote_ws.rs new file mode 100644 index 0000000..7d2a8c1 --- /dev/null +++ b/ng-client-ws/src/remote_ws.rs @@ -0,0 +1,394 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +//! WebSocket Remote Connection to a Broker + +use async_std::task; +use either::Either; +use futures::{pin_mut, select, StreamExt}; +use futures::{FutureExt, SinkExt}; +use ng_async_tungstenite::{ + async_std::{connect_async, ConnectStream}, + tungstenite::{protocol::frame::coding::CloseCode, protocol::CloseFrame, Message}, + WebSocketStream, +}; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::*; + +use ng_net::connection::*; +use ng_net::types::*; +use ng_net::utils::{Receiver, Sender}; + +pub struct ConnectionWebSocket {} + +#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] +impl IConnect for ConnectionWebSocket { + async fn open( + &self, + url: String, + peer_privk: PrivKey, + _peer_pubk: PubKey, + remote_peer: DirectPeerId, + config: StartConfig, + ) -> Result { + let mut cnx = ConnectionBase::new(ConnectionDir::Client, TransportProtocol::WS); + + let res = connect_async(url).await; + + match res { + Err(_e) => { + log_debug!("Cannot connect: {:?}", _e); + Err(ProtocolError::ConnectionError) + } + Ok((websocket, _)) => { + cnx.start_read_loop(None, Some(peer_privk), Some(remote_peer)); + let s = cnx.take_sender(); + let r = cnx.take_receiver(); + let mut shutdown = cnx.set_shutdown(); + cnx.release_shutdown(); + + let _join = task::spawn(async move { + log_debug!("START of WS loop"); + + let res = ws_loop(websocket, s, r).await; + + if res.is_err() { + let _ = shutdown.send(Either::Left(res.err().unwrap())).await; + } else { + let _ = shutdown.send(Either::Left(NetError::Closing)).await; + } + log_debug!("END of WS loop"); + }); + + cnx.start(config).await?; + + Ok(cnx) + } + } + } + + async fn probe(&self, ip: IP, port: u16) -> Result, ProtocolError> { + let mut cnx = ConnectionBase::new(ConnectionDir::Client, TransportProtocol::WS); + let url = format!("ws://{}:{}", ip, port); + + let res = connect_async(url).await; + + match res { + Err(_e) => { + log_debug!("Cannot connect: {:?}", _e); + Err(ProtocolError::ConnectionError) + } + Ok((websocket, _)) => { + cnx.start_read_loop(None, None, None); + let s = cnx.take_sender(); + let r = cnx.take_receiver(); + let mut shutdown = cnx.set_shutdown(); + cnx.release_shutdown(); + + let _join = task::spawn(async move { + log_debug!("START of WS loop"); + + let res = ws_loop(websocket, s, r).await; + + if res.is_err() { + let _ = shutdown.send(Either::Left(res.err().unwrap())).await; + } else { + let _ = shutdown.send(Either::Left(NetError::Closing)).await; + } + log_debug!("END of WS loop"); + }); + + cnx.probe().await + } + } + } +} + +#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] +impl IAccept for ConnectionWebSocket { + type Socket = WebSocketStream; + async fn accept( + &self, + remote_bind_address: BindAddress, + local_bind_address: BindAddress, + peer_privk: PrivKey, + socket: Self::Socket, + ) -> Result { + let mut cnx = ConnectionBase::new(ConnectionDir::Server, TransportProtocol::WS); + + cnx.start_read_loop( + Some((local_bind_address, remote_bind_address)), + Some(peer_privk), + None, + ); + let s = cnx.take_sender(); + let r = cnx.take_receiver(); + let mut shutdown = cnx.set_shutdown(); + + let _join = task::spawn(async move { + log_debug!("START of WS loop"); + + let res = ws_loop(socket, s, r).await; + + if res.is_err() { + let _ = shutdown.send(Either::Left(res.err().unwrap())).await; + } else { + let _ = shutdown.send(Either::Left(NetError::Closing)).await; + } + log_debug!("END of WS loop"); + }); + Ok(cnx) + } +} + +async fn close_ws( + stream: &mut WebSocketStream, + receiver: &mut Sender, + code: u16, + reason: &str, +) -> Result<(), NetError> { + log_debug!("close_ws {:?}", code); + + let cmd = if code == 1000 { + ConnectionCommand::Close + } else if code < 4000 { + ConnectionCommand::Error(NetError::WsError) + } else if code < 4950 { + ConnectionCommand::ProtocolError(ProtocolError::try_from(code - 4000).unwrap()) + } else { + ConnectionCommand::Error(NetError::try_from(code - 4949).unwrap()) + }; + log_debug!("sending to read loop {:?}", cmd); + let _ = futures::SinkExt::send(receiver, cmd).await; + + stream + .close(Some(CloseFrame { + code: CloseCode::Library(code), + reason: std::borrow::Cow::Borrowed(reason), + })) + .await + .map_err(|_e| NetError::WsError)?; + Ok(()) +} + +async fn ws_loop( + mut ws: WebSocketStream, + sender: Receiver, + mut receiver: Sender, +) -> Result<(), NetError> { + async fn inner_loop( + stream: &mut WebSocketStream, + mut sender: Receiver, + receiver: &mut Sender, + ) -> Result { + //let mut rx_sender = sender.fuse(); + pin_mut!(stream); + loop { + select! { + r = stream.next().fuse() => match r { + Some(Ok(msg)) => { + //log_debug!("GOT MESSAGE {:?}", msg); + + if msg.is_close() { + if let Message::Close(Some(cf)) = msg { + log_debug!("CLOSE from remote with closeframe: {} {}",cf.code, cf.reason); + let last_command = match cf.code { + CloseCode::Normal => + ConnectionCommand::Close, + CloseCode::Library(c) => { + if c < 4950 { + ConnectionCommand::ProtocolError( + ProtocolError::try_from(c - 4000).unwrap(), + ) + } else { + ConnectionCommand::Error(NetError::try_from(c - 4949).unwrap()) + } + }, + _ => ConnectionCommand::Error(NetError::WsError) + }; + let _ = futures::SinkExt::send(receiver, last_command).await; + } + else { + let _ = futures::SinkExt::send(receiver, ConnectionCommand::Close).await; + log_debug!("CLOSE from remote"); + } + return Ok(ProtocolError::Closing); + } else { + futures::SinkExt::send(receiver,ConnectionCommand::Msg(serde_bare::from_slice::(&msg.into_data())?)).await + .map_err(|_e| NetError::IoError)?; + } + }, + Some(Err(_e)) => {log_debug!("GOT ERROR {:?}",_e);return Err(NetError::WsError);}, + None => break + }, + s = sender.next().fuse() => match s { + Some(msg) => { + //log_debug!("SENDING MESSAGE {:?}", msg); + match msg { + ConnectionCommand::Msg(m) => { + futures::SinkExt::send(&mut stream,Message::binary(serde_bare::to_vec(&m)?)).await.map_err(|_e| NetError::IoError)?; + }, + ConnectionCommand::Error(e) => { + return Err(e); + }, + ConnectionCommand::ProtocolError(e) => { + return Ok(e); + }, + ConnectionCommand::Close => { + break; + }, + ConnectionCommand::ReEnter => { + //do nothing. loop + } + } + }, + None => break + }, + } + } + Ok(ProtocolError::NoError) + } + match inner_loop(&mut ws, sender, &mut receiver).await { + Ok(proto_err) => { + if proto_err == ProtocolError::Closing { + log_debug!("ProtocolError::Closing"); + let _ = ws.close(None).await; + } else if proto_err == ProtocolError::NoError { + close_ws(&mut ws, &mut receiver, 1000, "").await?; + } else { + let mut code = proto_err.clone() as u16; + if code > 949 { + code = ProtocolError::OtherError as u16; + } + close_ws(&mut ws, &mut receiver, code + 4000, &proto_err.to_string()).await?; + //return Err(NetError::ProtocolError); + } + } + Err(e) => { + close_ws( + &mut ws, + &mut receiver, + e.clone() as u16 + 4949, + &e.to_string(), + ) + .await?; + return Err(e); + } + } + Ok(()) +} + +#[cfg(test)] +mod test { + + use crate::remote_ws::*; + use ng_net::types::IP; + use ng_net::utils::{spawn_and_log_error, ResultSend}; + use ng_net::{broker::*, WS_PORT}; + use ng_repo::errors::NgError; + #[allow(unused_imports)] + use ng_repo::log::*; + use ng_repo::utils::generate_keypair; + use std::net::IpAddr; + use std::str::FromStr; + use std::sync::Arc; + + #[async_std::test] + pub async fn test_ws() -> Result<(), NgError> { + let server_key: PubKey = "ALyGZgFaDDALXLppJZLS2TrMScG0TQIS68RzRcPv99aN".try_into()?; + log_debug!("server_key:{}", server_key); + + let keys = generate_keypair(); + let x_from_ed = keys.1.to_dh_from_ed(); + log_debug!("Pub from X {}", x_from_ed); + + let (client_priv, _client) = generate_keypair(); + let (user_priv, user) = generate_keypair(); + + log_debug!("start connecting"); + { + let res = BROKER + .write() + .await + .connect( + Arc::new(Box::new(ConnectionWebSocket {})), + keys.0, + keys.1, + server_key, + StartConfig::Client(ClientConfig { + url: format!("ws://localhost:{}", WS_PORT), + name: None, + user_priv, + client_priv, + info: ClientInfo::new(ClientType::Cli, "".into(), "".into()), + registration: None, + }), + ) + .await; + log_debug!("broker.connect : {:?}", res); + assert!(res.is_err()); + let err = res.unwrap_err(); + assert!( + ProtocolError::NoLocalBrokerFound == err + || ProtocolError::NoiseHandshakeFailed == err + ); + } + + BROKER.read().await.print_status(); + + async fn timer_close(remote_peer_id: DirectPeerId, user: Option) -> ResultSend<()> { + async move { + sleep!(std::time::Duration::from_secs(3)); + log_debug!("timeout"); + BROKER + .write() + .await + .close_peer_connection(&remote_peer_id, user) + .await; + } + .await; + Ok(()) + } + spawn_and_log_error(timer_close(server_key, Some(user))); + + //Broker::graceful_shutdown().await; + + let _ = Broker::join_shutdown_with_timeout(std::time::Duration::from_secs(5)).await; + Ok(()) + } + + #[async_std::test] + pub async fn probe() -> Result<(), NgError> { + log_debug!("start probe"); + { + let res = BROKER + .write() + .await + .probe( + Box::new(ConnectionWebSocket {}), + IP::try_from(&IpAddr::from_str("127.0.0.1").unwrap()).unwrap(), + WS_PORT, + ) + .await; + log_debug!("broker.probe : {:?}", res); + res.expect("assume the probe succeeds"); + } + + //Broker::graceful_shutdown().await; + + let _ = Broker::join_shutdown_with_timeout(std::time::Duration::from_secs(10)).await; + Ok(()) + } +} diff --git a/ng-client-ws/src/remote_ws_wasm.rs b/ng-client-ws/src/remote_ws_wasm.rs new file mode 100644 index 0000000..adecdb1 --- /dev/null +++ b/ng-client-ws/src/remote_ws_wasm.rs @@ -0,0 +1,216 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +//! WebSocket for Wasm Remote Connection to a Broker + +use either::Either; +use futures::FutureExt; +use futures::{select, SinkExt, StreamExt}; +use { + pharos::{Observable, ObserveConfig}, + wasm_bindgen::UnwrapThrowExt, + ws_stream_wasm::*, +}; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::*; + +use ng_net::connection::*; +use ng_net::types::*; +use ng_net::utils::*; + +pub struct ConnectionWebSocket {} + +#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] +impl IConnect for ConnectionWebSocket { + async fn open( + &self, + url: String, + peer_privk: PrivKey, + _peer_pubk: PubKey, + remote_peer: DirectPeerId, + config: StartConfig, + ) -> Result { + log_debug!("url {}", url); + let mut cnx = ConnectionBase::new(ConnectionDir::Client, TransportProtocol::WS); + + let (ws, wsio) = WsMeta::connect(url, None).await.map_err(|_e| { + //log_debug!("{:?}", _e); + ProtocolError::ConnectionError + })?; + + cnx.start_read_loop(None, Some(peer_privk), Some(remote_peer)); + let shutdown = cnx.set_shutdown(); + + spawn_and_log_error(ws_loop( + ws, + wsio, + cnx.take_sender(), + cnx.take_receiver(), + shutdown, + )); + + cnx.start(config).await?; + + Ok(cnx) + } + async fn probe(&self, ip: IP, port: u16) -> Result, ProtocolError> { + let mut cnx = ConnectionBase::new(ConnectionDir::Client, TransportProtocol::WS); + let url = format!("ws://{}:{}", ip, port); + + let (ws, wsio) = WsMeta::connect(url, None).await.map_err(|_e| { + //log_debug!("{:?}", _e); + ProtocolError::ConnectionError + })?; + + cnx.start_read_loop(None, None, None); + let shutdown = cnx.set_shutdown(); + + spawn_and_log_error(ws_loop( + ws, + wsio, + cnx.take_sender(), + cnx.take_receiver(), + shutdown, + )); + + cnx.probe().await + } +} + +async fn ws_loop( + mut ws: WsMeta, + mut stream: WsStream, + sender: Receiver, + mut receiver: Sender, + mut shutdown: Sender>, +) -> ResultSend<()> { + async fn inner_loop( + stream: &mut WsStream, + mut sender: Receiver, + mut receiver: Sender, + ) -> Result { + //let mut rx_sender = sender.fuse(); + loop { + select! { + r = stream.next().fuse() => match r { + Some(msg) => { + //log_debug!("GOT MESSAGE {:?}", msg); + if let WsMessage::Binary(b) = msg { + receiver.send(ConnectionCommand::Msg(serde_bare::from_slice::(&b)?)).await + .map_err(|_e| NetError::IoError)?; + } + else { + break; + } + }, + None => break + }, + s = sender.next().fuse() => match s { + Some(msg) => { + //log_debug!("SENDING MESSAGE {:?}", msg); + match msg { + ConnectionCommand::Msg(m) => { + + stream.send(WsMessage::Binary(serde_bare::to_vec(&m)?)).await.map_err(|_e| { log_debug!("{:?}",_e); return NetError::IoError;})?; + + }, + ConnectionCommand::Error(e) => { + return Err(e); + }, + ConnectionCommand::ProtocolError(e) => { + return Ok(e); + }, + ConnectionCommand::Close => { + break; + }, + ConnectionCommand::ReEnter => { + //do nothing. loop + } + } + }, + None => break + }, + } + } + Ok(ProtocolError::NoError) + } + log_debug!("START of WS loop"); + let mut events = ws + .observe(ObserveConfig::default()) + //.observe(Filter::Pointer(WsEvent::is_closed).into()) + .await + .expect_throw("observe"); + match inner_loop(&mut stream, sender, receiver.clone()).await { + Ok(proto_err) => { + if proto_err == ProtocolError::NoError { + let _ = ws.close_code(1000).await; //.map_err(|_e| NetError::WsError)?; + log_debug!("CLOSED GRACEFULLY"); + } else { + log_debug!("PROTOCOL ERR"); + let mut code = proto_err.clone() as u16; + if code > 949 { + code = ProtocolError::OtherError as u16; + } + let _ = ws.close_reason(code + 4000, proto_err.to_string()).await; + //.map_err(|_e| NetError::WsError)?; + //return Err(Box::new(proto_err)); + } + } + Err(e) => { + let _ = ws + .close_reason(e.clone() as u16 + 4949, e.to_string()) + .await; + //.map_err(|_e| NetError::WsError)?; + //return Err(Box::new(e)); + log_debug!("ERR {:?}", e); + } + } + + let last_event = events.next().await; + log_debug!("WS closed {:?}", last_event.clone()); + let last_command = match last_event { + None => ConnectionCommand::Close, + Some(WsEvent::Open) => ConnectionCommand::Error(NetError::WsError), // this should never happen + Some(WsEvent::Error) => ConnectionCommand::Error(NetError::ConnectionError), + Some(WsEvent::Closing) => ConnectionCommand::Close, + Some(WsEvent::Closed(ce)) => { + if ce.code == 1000 { + ConnectionCommand::Close + } else if ce.code < 4000 { + ConnectionCommand::Error(NetError::WsError) + } else if ce.code < 4950 { + ConnectionCommand::ProtocolError(ProtocolError::try_from(ce.code - 4000).unwrap()) + } else { + ConnectionCommand::Error(NetError::try_from(ce.code - 4949).unwrap()) + } + } + Some(WsEvent::WsErr(_e)) => ConnectionCommand::Error(NetError::WsError), + }; + if let ConnectionCommand::Error(err) = last_command.clone() { + let _ = shutdown.send(Either::Left(err)).await; + } else { + let _ = shutdown.send(Either::Left(NetError::Closing)).await; + } + // if let ConnectionCommand::ProtocolError(err) = last_command.clone() { + //let _ = shutdown.send(Either::Left(NetError::ProtocolError)).await; + // otherwise, shutdown gracefully (with None). it is done automatically during destroy of shutdown + + receiver + .send(last_command) + .await + .map_err(|_e| NetError::IoError)?; + + log_debug!("END of WS loop"); + Ok(()) +} diff --git a/ng-net/Cargo.toml b/ng-net/Cargo.toml new file mode 100644 index 0000000..13c258d --- /dev/null +++ b/ng-net/Cargo.toml @@ -0,0 +1,54 @@ +[package] +name = "ng-net" +# version = "0.1.0" +description = "Network library of NextGraph, a decentralized, secure and local-first web 3.0 ecosystem based on Semantic Web and CRDTs" +categories = ["network-programming"] +version.workspace = true +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true +homepage.workspace = true +keywords = ["crdt","e2ee","local-first","p2p","self-hosted"] +documentation.workspace = true +rust-version.workspace = true + +[badges] +maintenance = { status = "actively-developed" } + +[dependencies] +serde = { version = "1.0", features = ["derive"] } +serde_bare = "0.5.0" +serde_bytes = "0.11.7" +serde_json = "1.0" +lazy_static = "1.4.0" +once_cell = "1.17.1" +either = "1.8.1" +futures = "0.3.24" +async-trait = "0.1.64" +async-recursion = "1.1.1" +async-std = { version = "1.12.0", features = ["attributes","unstable"] } +unique_id = "0.1.5" +noise-protocol = "0.2.0" +noise-rust-crypto = "0.6.2" +ed25519-dalek = "1.0.1" +crypto_box = { version = "0.8.2", features = ["seal"] } +url = "2.4.0" +regex = "1.8.4" +base64-url = "2.0.0" +web-time = "0.2.0" +time = "0.3.41" +zeroize = { version = "1.7.0", features = ["zeroize_derive"] } +ng-repo = { path = "../ng-repo", version = "0.1.2" } +reqwest = { version = "0.11.18", features = ["json","native-tls-vendored"] } + +[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] +version = "0.3.3" +features = ["wasm_js"] + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +getrandom = "0.3.3" +netdev = "0.26" + +[target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] +ng-async-tungstenite = { version = "0.22.2", git = "https://git.nextgraph.org/NextGraph/async-tungstenite.git", branch = "nextgraph", features = ["async-std-runtime", "async-native-tls"] } \ No newline at end of file diff --git a/ng-net/README.md b/ng-net/README.md new file mode 100644 index 0000000..d428869 --- /dev/null +++ b/ng-net/README.md @@ -0,0 +1,56 @@ +# ng-net + +![MSRV][rustc-image] +[![Apache 2.0 Licensed][license-image]][license-link] +[![MIT Licensed][license-image2]][license-link2] + +Network library of NextGraph + +This repository is in active development at [https://git.nextgraph.org/NextGraph/nextgraph-rs](https://git.nextgraph.org/NextGraph/nextgraph-rs), a Gitea instance. For bug reports, issues, merge requests, and in order to join the dev team, please visit the link above and create an account (you can do so with a github account). The [github repo](https://github.com/nextgraph-org/nextgraph-rs) is just a read-only mirror that does not accept issues. + +## NextGraph + +> NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## Support + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +## How to use the library + +NextGraph is not ready yet. You can subscribe to [our newsletter](https://list.nextgraph.org/subscription/form) to get updates, and support us with a [donation](https://nextgraph.org/donate/). + +This library is used internally by [ngd](../ngd/README.md), [ngcli](../ngcli/README.md), [ng-app](../ng-app/README.md) and by [nextgraph, the Rust client library](../nextgraph/README.md) which you should be using instead. It is not meant to be used by other programs as-is. + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +### Contributions license + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as below, without any +additional terms or conditions. + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. + + +[rustc-image]: https://img.shields.io/badge/rustc-1.81+-blue.svg +[license-image]: https://img.shields.io/badge/license-Apache2.0-blue.svg +[license-link]: https://git.nextgraph.org/NextGraph/nextgraph-rs/raw/branch/master/LICENSE-APACHE2 +[license-image2]: https://img.shields.io/badge/license-MIT-blue.svg +[license-link2]: https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/LICENSE-MIT diff --git a/ng-net/src/actor.rs b/ng-net/src/actor.rs new file mode 100644 index 0000000..28eebac --- /dev/null +++ b/ng-net/src/actor.rs @@ -0,0 +1,238 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +//! Actor handles messages in the Protocol. common types are here + +use std::any::TypeId; +use std::marker::PhantomData; +use std::sync::Arc; + +use async_std::stream::StreamExt; +use async_std::sync::Mutex; +use futures::{channel::mpsc, SinkExt}; + +use ng_repo::errors::{NgError, ProtocolError, ServerError}; +use ng_repo::log::*; + +use crate::utils::{spawn_and_log_error, Receiver, ResultSend, Sender}; +use crate::{connection::*, types::ProtocolMessage}; + +impl TryFrom for () { + type Error = ProtocolError; + fn try_from(_msg: ProtocolMessage) -> Result { + Ok(()) + } +} + +#[doc(hidden)] +#[async_trait::async_trait] +pub trait EActor: Send + Sync + std::fmt::Debug { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError>; + + fn set_id(&mut self, _id: i64) {} +} + +#[derive(Debug)] +pub(crate) struct Actor< + 'a, + A: Into + std::fmt::Debug, + B: TryFrom + std::fmt::Debug + Sync, +> { + id: i64, + phantom_a: PhantomData<&'a A>, + phantom_b: PhantomData<&'a B>, + receiver: Option>, + receiver_tx: Sender, + //initiator: bool, +} + +#[derive(Debug)] +pub enum SoS { + Single(B), + Stream(Receiver), +} + +impl SoS { + pub fn is_single(&self) -> bool { + if let Self::Single(_b) = self { + true + } else { + false + } + } + pub fn is_stream(&self) -> bool { + !self.is_single() + } + pub fn unwrap_single(self) -> B { + match self { + Self::Single(s) => s, + Self::Stream(_s) => { + panic!("called `unwrap_single()` on a `Stream` value") + } + } + } + pub fn unwrap_stream(self) -> Receiver { + match self { + Self::Stream(s) => s, + Self::Single(_s) => { + panic!("called `unwrap_stream()` on a `Single` value") + } + } + } +} + +impl< + A: Into + std::fmt::Debug + 'static, + B: TryFrom + Sync + Send + std::fmt::Debug + 'static, + > Actor<'_, A, B> +{ + pub fn new(id: i64, _initiator: bool) -> Self { + let (receiver_tx, receiver) = mpsc::unbounded::(); + Self { + id, + receiver: Some(receiver), + receiver_tx, + phantom_a: PhantomData, + phantom_b: PhantomData, + //initiator, + } + } + + // pub fn verify(&self, msg: ProtocolMessage) -> bool { + // self.initiator && msg.type_id() == TypeId::of::() + // || !self.initiator && msg.type_id() == TypeId::of::
() + // } + + pub fn detach_receiver(&mut self) -> Receiver { + self.receiver.take().unwrap() + } + + pub async fn request( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result, NgError> { + fsm.lock().await.send(msg).await?; + let mut receiver = self.receiver.take().unwrap(); + match receiver.next().await { + Some(ConnectionCommand::Msg(msg)) => { + if let Some(bm) = msg.is_streamable() { + if bm.result() == Into::::into(ServerError::PartialContent) + && TypeId::of::() != TypeId::of::<()>() + { + let (mut b_sender, b_receiver) = mpsc::unbounded::(); + let response = msg.try_into().map_err(|e| { + log_err!("msg.try_into {}", e); + ProtocolError::ActorError + })?; + b_sender + .send(response) + .await + .map_err(|_err| ProtocolError::IoError)?; + async fn pump_stream>( + mut actor_receiver: Receiver, + mut sos_sender: Sender, + fsm: Arc>, + id: i64, + ) -> ResultSend<()> { + async move { + while let Some(ConnectionCommand::Msg(msg)) = + actor_receiver.next().await + { + if let Some(bm) = msg.is_streamable() { + if bm.result() + == Into::::into(ServerError::EndOfStream) + { + break; + } + let response = msg.try_into(); + if response.is_err() { + // TODO deal with errors. + break; + } + if sos_sender.send(response.unwrap()).await.is_err() { + break; + } + } else { + // todo deal with error (not a ClientMessage) + break; + } + } + fsm.lock().await.remove_actor(id).await; + } + .await; + Ok(()) + } + spawn_and_log_error(pump_stream::( + receiver, + b_sender, + Arc::clone(&fsm), + self.id, + )); + return Ok(SoS::::Stream(b_receiver)); + } + } + fsm.lock().await.remove_actor(self.id).await; + let server_error: Result = (&msg).try_into(); + //log_debug!("server_error {:?}", server_error); + if server_error.is_ok() { + return Err(NgError::ServerError(server_error.unwrap())); + } + let response: B = match msg.try_into() { + Ok(b) => b, + Err(ProtocolError::ServerError) => { + return Err(NgError::ServerError(server_error?)); + } + Err(e) => return Err(NgError::ProtocolError(e)), + }; + Ok(SoS::::Single(response)) + } + Some(ConnectionCommand::ProtocolError(e)) => Err(e.into()), + Some(ConnectionCommand::Error(e)) => Err(ProtocolError::from(e).into()), + Some(ConnectionCommand::Close) => Err(ProtocolError::Closing.into()), + _ => Err(ProtocolError::ActorError.into()), + } + } + + pub fn new_responder(id: i64) -> Box { + Box::new(Self::new(id, false)) + } + + pub fn get_receiver_tx(&self) -> Sender { + self.receiver_tx.clone() + } + + pub fn id(&self) -> i64 { + self.id + } +} + +#[cfg(test)] +mod test { + + use crate::actor::*; + use crate::actors::*; + + #[async_std::test] + pub async fn test_actor() { + let _a = Actor::::new(1, true); + // a.handle(ProtocolMessage::Start(StartProtocol::Client( + // ClientHello::Noise3(Noise::V0(NoiseV0 { data: vec![] })), + // ))) + // .await; + // a.handle(ProtocolMessage::Noise(Noise::V0(NoiseV0 { data: vec![] }))) + // .await; + } +} diff --git a/ng-net/src/actors/admin/add_invitation.rs b/ng-net/src/actors/admin/add_invitation.rs new file mode 100644 index 0000000..a8c6e27 --- /dev/null +++ b/ng-net/src/actors/admin/add_invitation.rs @@ -0,0 +1,145 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::*; +use ng_repo::log::*; + +use super::super::StartProtocol; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +/// Add invitation +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AddInvitationV0 { + pub invite_code: InvitationCode, + pub expiry: u32, + pub memo: Option, + pub tos_url: bool, +} + +/// Add invitation +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AddInvitation { + V0(AddInvitationV0), +} + +impl AddInvitation { + pub fn code(&self) -> &InvitationCode { + match self { + AddInvitation::V0(o) => &o.invite_code, + } + } + pub fn expiry(&self) -> u32 { + match self { + AddInvitation::V0(o) => o.expiry, + } + } + pub fn memo(&self) -> &Option { + match self { + AddInvitation::V0(o) => &o.memo, + } + } + pub fn tos_url(&self) -> bool { + match self { + AddInvitation::V0(o) => o.tos_url, + } + } + pub fn get_actor(&self) -> Box { + Actor::::new_responder(0) + } +} + +impl TryFrom for AddInvitation { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Start(StartProtocol::Admin(AdminRequest::V0(AdminRequestV0 { + content: AdminRequestContentV0::AddInvitation(a), + .. + }))) = msg + { + Ok(a) + } else { + log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(_msg: AddInvitation) -> ProtocolMessage { + unimplemented!(); + } +} + +impl From for AdminRequestContentV0 { + fn from(msg: AddInvitation) -> AdminRequestContentV0 { + AdminRequestContentV0::AddInvitation(msg) + } +} + +impl Actor<'_, AddInvitation, AdminResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, AddInvitation, AdminResponse> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = AddInvitation::try_from(msg)?; + let (url, bootstrap, sb) = { + let broker = BROKER.read().await; + let url = if req.tos_url() { + broker.get_registration_url().map(|s| s.clone()) + } else { + None + }; + ( + url, + broker.get_bootstrap()?.clone(), + broker.get_server_broker()?, + ) + }; + { + sb.read() + .await + .add_invitation(req.code(), req.expiry(), req.memo())?; + } + let invitation = crate::types::Invitation::V0(InvitationV0::new( + bootstrap, + Some(req.code().get_symkey()), + None, + url, + )); + let response: AdminResponseV0 = invitation.into(); + fsm.lock().await.send(response.into()).await?; + Ok(()) + } +} + +impl From for AdminResponseV0 { + fn from(res: Invitation) -> AdminResponseV0 { + AdminResponseV0 { + id: 0, + result: 0, + content: AdminResponseContentV0::Invitation(res), + padding: vec![], + } + } +} diff --git a/ng-net/src/actors/admin/add_user.rs b/ng-net/src/actors/admin/add_user.rs new file mode 100644 index 0000000..92ed492 --- /dev/null +++ b/ng-net/src/actors/admin/add_user.rs @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::PubKey; + +use super::super::StartProtocol; + +use crate::broker::{ServerConfig, BROKER}; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +/// Add user account +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct AddUserV0 { + /// User pub key + pub user: PubKey, + /// should the newly added user be an admin of the server + pub is_admin: bool, +} + +/// Add user account +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum AddUser { + V0(AddUserV0), +} + +impl AddUser { + pub fn user(&self) -> PubKey { + match self { + AddUser::V0(o) => o.user, + } + } + pub fn is_admin(&self) -> bool { + match self { + AddUser::V0(o) => o.is_admin, + } + } + pub fn get_actor(&self) -> Box { + Actor::::new_responder(0) + } +} + +impl TryFrom for AddUser { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Start(StartProtocol::Admin(AdminRequest::V0(AdminRequestV0 { + content: AdminRequestContentV0::AddUser(a), + .. + }))) = msg + { + Ok(a) + } else { + log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(_msg: AddUser) -> ProtocolMessage { + unimplemented!(); + } +} + +impl From for AdminRequestContentV0 { + fn from(msg: AddUser) -> AdminRequestContentV0 { + AdminRequestContentV0::AddUser(msg) + } +} + +impl Actor<'_, AddUser, AdminResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, AddUser, AdminResponse> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = AddUser::try_from(msg)?; + + let res = { + let mut is_admin = req.is_admin(); + let sb = { + let broker = BROKER.read().await; + if let Some(ServerConfig { + admin_user: Some(admin_user), + .. + }) = broker.get_config() + { + if *admin_user == req.user() { + is_admin = true; + } + } + broker.get_server_broker()? + }; + + let lock = sb.read().await; + lock.add_user(req.user(), is_admin) + }; + let response: AdminResponseV0 = res.into(); + fsm.lock().await.send(response.into()).await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/admin/create_user.rs b/ng-net/src/actors/admin/create_user.rs new file mode 100644 index 0000000..7ba21dd --- /dev/null +++ b/ng-net/src/actors/admin/create_user.rs @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; +use ng_repo::types::UserId; +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::*; +use ng_repo::log::*; + +use super::super::StartProtocol; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +/// Create user and keeps credentials in the server (for use with headless API) +#[doc(hidden)] +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct CreateUserV0 {} + +/// Create user +#[doc(hidden)] +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum CreateUser { + V0(CreateUserV0), +} + +impl TryFrom for CreateUser { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Start(StartProtocol::Admin(AdminRequest::V0(AdminRequestV0 { + content: AdminRequestContentV0::CreateUser(a), + .. + }))) = msg + { + Ok(a) + } else { + log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(_msg: CreateUser) -> ProtocolMessage { + unimplemented!(); + } +} + +impl From for ProtocolMessage { + fn from(_msg: UserId) -> ProtocolMessage { + unimplemented!(); + } +} + +impl TryFrom for UserId { + type Error = ProtocolError; + fn try_from(_msg: ProtocolMessage) -> Result { + unimplemented!(); + } +} + +impl From for AdminRequestContentV0 { + fn from(msg: CreateUser) -> AdminRequestContentV0 { + AdminRequestContentV0::CreateUser(msg) + } +} + +impl CreateUser { + pub fn get_actor(&self) -> Box { + Actor::::new_responder(0) + } +} + +impl Actor<'_, CreateUser, UserId> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, CreateUser, UserId> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let _req = CreateUser::try_from(msg)?; + + let res = { + let (broker_id, sb) = { + let b = BROKER.read().await; + (b.get_server_peer_id(), b.get_server_broker()?) + }; + let lock = sb.read().await; + lock.create_user(&broker_id).await + }; + + let response: AdminResponseV0 = res.into(); + fsm.lock().await.send(response.into()).await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/admin/del_user.rs b/ng-net/src/actors/admin/del_user.rs new file mode 100644 index 0000000..1869b37 --- /dev/null +++ b/ng-net/src/actors/admin/del_user.rs @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::*; +use ng_repo::types::PubKey; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +use super::super::StartProtocol; + +/// Delete user account V0 +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct DelUserV0 { + /// User pub key + pub user: PubKey, +} + +/// Delete user account +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum DelUser { + V0(DelUserV0), +} + +impl DelUser { + pub fn user(&self) -> PubKey { + match self { + DelUser::V0(o) => o.user, + } + } + pub fn get_actor(&self) -> Box { + Actor::::new_responder(0) + } +} + +impl TryFrom for DelUser { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Start(StartProtocol::Admin(AdminRequest::V0(AdminRequestV0 { + content: AdminRequestContentV0::DelUser(a), + .. + }))) = msg + { + Ok(a) + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(_msg: DelUser) -> ProtocolMessage { + unimplemented!(); + } +} + +impl From for AdminRequestContentV0 { + fn from(msg: DelUser) -> AdminRequestContentV0 { + AdminRequestContentV0::DelUser(msg) + } +} + +impl Actor<'_, DelUser, AdminResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, DelUser, AdminResponse> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = DelUser::try_from(msg)?; + let sb = { BROKER.read().await.get_server_broker()? }; + let res = { sb.read().await.del_user(req.user()) }; + let response: AdminResponseV0 = res.into(); + fsm.lock().await.send(response.into()).await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/admin/list_invitations.rs b/ng-net/src/actors/admin/list_invitations.rs new file mode 100644 index 0000000..ae33d97 --- /dev/null +++ b/ng-net/src/actors/admin/list_invitations.rs @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::*; +#[allow(unused_imports)] +use ng_repo::log::*; + +use super::super::StartProtocol; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +/// List invitations registered on this broker +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct ListInvitationsV0 { + /// should list only the admin invitations. + pub admin: bool, + /// should list only the unique invitations. + pub unique: bool, + /// should list only the multi invitations. + pub multi: bool, +} + +/// List invitations registered on this broker +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum ListInvitations { + V0(ListInvitationsV0), +} + +impl ListInvitations { + pub fn admin(&self) -> bool { + match self { + Self::V0(o) => o.admin, + } + } + pub fn unique(&self) -> bool { + match self { + Self::V0(o) => o.unique, + } + } + pub fn multi(&self) -> bool { + match self { + Self::V0(o) => o.multi, + } + } + pub fn get_actor(&self) -> Box { + Actor::::new_responder(0) + } +} + +impl TryFrom for ListInvitations { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Start(StartProtocol::Admin(AdminRequest::V0(AdminRequestV0 { + content: AdminRequestContentV0::ListInvitations(a), + .. + }))) = msg + { + Ok(a) + } else { + //log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(_msg: ListInvitations) -> ProtocolMessage { + unimplemented!(); + } +} + +impl From for AdminRequestContentV0 { + fn from(msg: ListInvitations) -> AdminRequestContentV0 { + AdminRequestContentV0::ListInvitations(msg) + } +} + +impl Actor<'_, ListInvitations, AdminResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, ListInvitations, AdminResponse> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = ListInvitations::try_from(msg)?; + let sb = { BROKER.read().await.get_server_broker()? }; + let res = { + sb.read() + .await + .list_invitations(req.admin(), req.unique(), req.multi()) + }; + let response: AdminResponseV0 = res.into(); + fsm.lock().await.send(response.into()).await?; + Ok(()) + } +} + +impl From)>, ProtocolError>> for AdminResponseV0 { + fn from( + res: Result)>, ProtocolError>, + ) -> AdminResponseV0 { + match res { + Err(e) => AdminResponseV0 { + id: 0, + result: e.into(), + content: AdminResponseContentV0::EmptyResponse, + padding: vec![], + }, + Ok(vec) => AdminResponseV0 { + id: 0, + result: 0, + content: AdminResponseContentV0::Invitations(vec), + padding: vec![], + }, + } + } +} diff --git a/ng-net/src/actors/admin/list_users.rs b/ng-net/src/actors/admin/list_users.rs new file mode 100644 index 0000000..b0e2582 --- /dev/null +++ b/ng-net/src/actors/admin/list_users.rs @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::*; + +use super::super::StartProtocol; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +/// List users registered on this broker +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct ListUsersV0 { + /// should list only the admins. if false, admin users will be excluded + pub admins: bool, +} + +/// List users registered on this broker +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum ListUsers { + V0(ListUsersV0), +} + +impl ListUsers { + pub fn admins(&self) -> bool { + match self { + Self::V0(o) => o.admins, + } + } + pub fn get_actor(&self) -> Box { + Actor::::new_responder(0) + } +} + +impl TryFrom for ListUsers { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Start(StartProtocol::Admin(AdminRequest::V0(AdminRequestV0 { + content: AdminRequestContentV0::ListUsers(a), + .. + }))) = msg + { + Ok(a) + } else { + //log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(_msg: ListUsers) -> ProtocolMessage { + unimplemented!(); + } +} + +impl From for AdminRequestContentV0 { + fn from(msg: ListUsers) -> AdminRequestContentV0 { + AdminRequestContentV0::ListUsers(msg) + } +} + +impl Actor<'_, ListUsers, AdminResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, ListUsers, AdminResponse> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = ListUsers::try_from(msg)?; + let sb = { BROKER.read().await.get_server_broker()? }; + let res = { sb.read().await.list_users(req.admins()) }; + + let response: AdminResponseV0 = res.into(); + fsm.lock().await.send(response.into()).await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/admin/mod.rs b/ng-net/src/actors/admin/mod.rs new file mode 100644 index 0000000..3dcd8c6 --- /dev/null +++ b/ng-net/src/actors/admin/mod.rs @@ -0,0 +1,17 @@ +pub mod add_user; +pub use add_user::*; + +pub mod del_user; +pub use del_user::*; + +pub mod list_users; +pub use list_users::*; + +pub mod add_invitation; +pub use add_invitation::*; + +pub mod list_invitations; +pub use list_invitations::*; + +pub mod create_user; +pub use create_user::*; diff --git a/ng-net/src/actors/app/mod.rs b/ng-net/src/actors/app/mod.rs new file mode 100644 index 0000000..cd1a78a --- /dev/null +++ b/ng-net/src/actors/app/mod.rs @@ -0,0 +1,3 @@ +pub mod request; + +pub mod session; diff --git a/ng-net/src/actors/app/request.rs b/ng-net/src/actors/app/request.rs new file mode 100644 index 0000000..d662608 --- /dev/null +++ b/ng-net/src/actors/app/request.rs @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; + +use crate::app_protocol::*; +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl AppRequest { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } +} + +impl TryFrom for AppRequest { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let AppMessageContentV0::Request(req) = msg.try_into()? { + Ok(req) + } else { + log_debug!("INVALID AppMessageContentV0::Request"); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(request: AppRequest) -> ProtocolMessage { + AppMessageContentV0::Request(request).into() + } +} + +impl From for ProtocolMessage { + fn from(content: AppMessageContentV0) -> ProtocolMessage { + AppMessage::V0(AppMessageV0 { + content, + id: 0, + result: 0, + }) + .into() + } +} + +impl TryFrom for AppResponse { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let AppMessageContentV0::Response(res) = msg.try_into()? { + Ok(res) + } else { + log_err!("INVALID AppMessageContentV0::Response"); + Err(ProtocolError::InvalidValue) + } + } +} + +impl TryFrom for AppMessageContentV0 { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::AppMessage(AppMessage::V0(AppMessageV0 { + content, result, .. + })) = msg + { + let err = ServerError::try_from(result).unwrap(); + if !err.is_err() { + Ok(content) + } else { + Err(ProtocolError::ServerError) + } + } else { + log_err!("INVALID AppMessageContentV0 {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for AppMessage { + fn from(response: AppResponse) -> AppMessage { + AppMessage::V0(AppMessageV0 { + content: AppMessageContentV0::Response(response), + id: 0, + result: 0, + }) + } +} + +impl From for ProtocolMessage { + fn from(response: AppResponse) -> ProtocolMessage { + let app_msg: AppMessage = response.into(); + app_msg.into() + } +} + +impl Actor<'_, AppRequest, AppResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, AppRequest, AppResponse> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = AppRequest::try_from(msg)?; + let res = { + let sb = { BROKER.read().await.get_server_broker()? }; + let lock = sb.read().await; + lock.app_process_request(req, self.id(), &fsm).await + }; + if res.is_err() { + let server_err: ServerError = res.unwrap_err().into(); + let app_message: AppMessage = server_err.into(); + fsm.lock() + .await + .send_in_reply_to(app_message.into(), self.id()) + .await?; + } + Ok(()) + } +} diff --git a/ng-net/src/actors/app/session.rs b/ng-net/src/actors/app/session.rs new file mode 100644 index 0000000..c608fb7 --- /dev/null +++ b/ng-net/src/actors/app/session.rs @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; + +use crate::app_protocol::*; +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl AppSessionStart { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } +} + +impl TryFrom for AppSessionStart { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let AppMessageContentV0::SessionStart(req) = msg.try_into()? { + Ok(req) + } else { + log_debug!("INVALID AppMessageContentV0::SessionStart"); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(request: AppSessionStart) -> ProtocolMessage { + AppMessageContentV0::SessionStart(request).into() + } +} + +impl TryFrom for AppSessionStartResponse { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let AppMessageContentV0::Response(AppResponse::V0(AppResponseV0::SessionStart(res))) = + msg.try_into()? + { + Ok(res) + } else { + log_debug!("INVALID AppSessionStartResponse"); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for AppMessage { + fn from(response: AppSessionStartResponse) -> AppMessage { + AppResponse::V0(AppResponseV0::SessionStart(response)).into() + } +} + +impl From for ProtocolMessage { + fn from(response: AppSessionStartResponse) -> ProtocolMessage { + response.into() + } +} + +impl Actor<'_, AppSessionStart, AppSessionStartResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, AppSessionStart, AppSessionStartResponse> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = AppSessionStart::try_from(msg)?; + let res = { + let lock = fsm.lock().await; + let remote = lock.remote_peer(); + + //TODO: if fsm.get_user_id is some, check that user_priv_key in credentials matches. + //TODO: if no user in fsm (headless), check user in request is allowed + if remote.is_none() { + Err(ServerError::BrokerError) + } else { + let (sb, broker_id) = { + let b = BROKER.read().await; + (b.get_server_broker()?, b.get_server_peer_id()) + }; + let lock = sb.read().await; + lock.app_session_start(req, remote.unwrap(), broker_id) + .await + } + }; + let app_message: AppMessage = match res { + Err(e) => e.into(), + Ok(o) => o.into(), + }; + fsm.lock() + .await + .send_in_reply_to(app_message.into(), self.id()) + .await?; + Ok(()) + } +} + +/////////////////////// + +impl AppSessionStop { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } +} + +impl TryFrom for AppSessionStop { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let AppMessageContentV0::SessionStop(req) = msg.try_into()? { + Ok(req) + } else { + log_debug!("INVALID AppMessageContentV0::SessionStop"); + Err(ProtocolError::InvalidValue) + } + } +} + +impl TryFrom for EmptyAppResponse { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let res: Result = msg.try_into(); + if let AppMessageContentV0::EmptyResponse = res? { + Ok(EmptyAppResponse(())) + } else { + log_debug!("INVALID AppMessageContentV0::EmptyResponse"); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(request: AppSessionStop) -> ProtocolMessage { + AppMessageContentV0::SessionStop(request).into() + } +} + +impl From> for ProtocolMessage { + fn from(res: Result) -> ProtocolMessage { + match res { + Ok(_a) => ServerError::Ok.into(), + Err(err) => AppMessage::V0(AppMessageV0 { + id: 0, + result: err.into(), + content: AppMessageContentV0::EmptyResponse, + }), + } + .into() + } +} + +impl Actor<'_, AppSessionStop, EmptyAppResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, AppSessionStop, EmptyAppResponse> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = AppSessionStop::try_from(msg)?; + let res = { + let lock = fsm.lock().await; + let remote = lock.remote_peer(); + + if remote.is_none() { + Err(ServerError::BrokerError) + } else { + let sb = { BROKER.read().await.get_server_broker()? }; + let lock = sb.read().await; + lock.app_session_stop(req, remote.as_ref().unwrap()).await + } + }; + + fsm.lock() + .await + .send_in_reply_to(res.into(), self.id()) + .await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/client/blocks_exist.rs b/ng-net/src/actors/client/blocks_exist.rs new file mode 100644 index 0000000..65488b8 --- /dev/null +++ b/ng-net/src/actors/client/blocks_exist.rs @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl BlocksExist { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } +} + +impl TryFrom for BlocksExist { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::BlocksExist(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: BlocksExist) -> ProtocolMessage { + let overlay = *msg.overlay(); + ProtocolMessage::from_client_request_v0(ClientRequestContentV0::BlocksExist(msg), overlay) + } +} + +impl TryFrom for BlocksFound { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let res: ClientResponseContentV0 = msg.try_into()?; + if let ClientResponseContentV0::BlocksFound(a) = res { + Ok(a) + } else { + log_debug!("INVALID {:?}", res); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(b: BlocksFound) -> ProtocolMessage { + ClientResponseContentV0::BlocksFound(b).into() + } +} + +impl Actor<'_, BlocksExist, BlocksFound> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, BlocksExist, BlocksFound> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = BlocksExist::try_from(msg)?; + let sb = { BROKER.read().await.get_server_broker()? }; + + let overlay = req.overlay().clone(); + let mut found = vec![]; + let mut missing = vec![]; + match req { + BlocksExist::V0(v0) => { + for block_id in v0.blocks { + let r = sb.read().await.has_block(&overlay, &block_id); + if r.is_err() { + missing.push(block_id); + } else { + found.push(block_id); + } + } + } + } + let res = Ok(BlocksFound::V0(BlocksFoundV0 { found, missing })); + + fsm.lock() + .await + .send_in_reply_to(res.into(), self.id()) + .await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/client/blocks_get.rs b/ng-net/src/actors/client/blocks_get.rs new file mode 100644 index 0000000..6691d37 --- /dev/null +++ b/ng-net/src/actors/client/blocks_get.rs @@ -0,0 +1,132 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_recursion::async_recursion; +use async_std::sync::RwLock; +use async_std::sync::{Mutex, MutexGuard}; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::{Block, BlockId, OverlayId}; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::server_broker::IServerBroker; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl BlocksGet { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } + + pub fn overlay(&self) -> &OverlayId { + match self { + Self::V0(v0) => v0.overlay.as_ref().unwrap(), + } + } + pub fn set_overlay(&mut self, overlay: OverlayId) { + match self { + Self::V0(v0) => v0.overlay = Some(overlay), + } + } +} + +impl TryFrom for BlocksGet { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::BlocksGet(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: BlocksGet) -> ProtocolMessage { + let overlay = *msg.overlay(); + ProtocolMessage::from_client_request_v0(ClientRequestContentV0::BlocksGet(msg), overlay) + } +} + +impl Actor<'_, BlocksGet, Block> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, BlocksGet, Block> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = BlocksGet::try_from(msg)?; + let server = { BROKER.read().await.get_server_broker()? }; + let mut lock = fsm.lock().await; + let mut something_was_sent = false; + + #[async_recursion] + async fn process_children( + children: &Vec, + server: &RwLock, + overlay: &OverlayId, + lock: &mut MutexGuard<'_, NoiseFSM>, + req_id: i64, + include_children: bool, + something_was_sent: &mut bool, + ) { + for block_id in children { + if let Ok(block) = { server.read().await.get_block(overlay, block_id) } { + let grand_children = block.children().to_vec(); + if let Err(_) = lock.send_in_reply_to(block.into(), req_id).await { + break; + } + *something_was_sent = true; + if include_children { + process_children( + &grand_children, + server, + overlay, + lock, + req_id, + include_children, + something_was_sent, + ) + .await; + } + } + } + } + process_children( + req.ids(), + &server, + req.overlay(), + &mut lock, + self.id(), + req.include_children(), + &mut something_was_sent, + ) + .await; + + if !something_was_sent { + let re: Result<(), ServerError> = Err(ServerError::NotFound); + lock.send_in_reply_to(re.into(), self.id()).await?; + } else { + let re: Result<(), ServerError> = Err(ServerError::EndOfStream); + lock.send_in_reply_to(re.into(), self.id()).await?; + } + + Ok(()) + } +} diff --git a/ng-net/src/actors/client/blocks_put.rs b/ng-net/src/actors/client/blocks_put.rs new file mode 100644 index 0000000..193aae8 --- /dev/null +++ b/ng-net/src/actors/client/blocks_put.rs @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl BlocksPut { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } +} + +impl TryFrom for BlocksPut { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::BlocksPut(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: BlocksPut) -> ProtocolMessage { + let overlay = *msg.overlay(); + ProtocolMessage::from_client_request_v0(ClientRequestContentV0::BlocksPut(msg), overlay) + } +} + +impl Actor<'_, BlocksPut, ()> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, BlocksPut, ()> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = BlocksPut::try_from(msg)?; + let sb = { BROKER.read().await.get_server_broker()? }; + let mut res: Result<(), ServerError> = Ok(()); + let overlay = req.overlay().clone(); + match req { + BlocksPut::V0(v0) => { + for block in v0.blocks { + let r = sb.read().await.put_block(&overlay, block); + if r.is_err() { + res = r; + break; + } + } + } + } + + fsm.lock() + .await + .send_in_reply_to(res.into(), self.id()) + .await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/client/client_event.rs b/ng-net/src/actors/client/client_event.rs new file mode 100644 index 0000000..7edda7f --- /dev/null +++ b/ng-net/src/actors/client/client_event.rs @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::OverlayId; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl ClientEvent { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } +} + +impl TryFrom for ClientEvent { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + + if let ProtocolMessage::ClientMessage(ClientMessage::V0(ClientMessageV0 { + content: ClientMessageContentV0::ClientEvent(e), + .. + })) = msg + { + Ok(e) + } else { + log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(e: ClientEvent) -> ProtocolMessage { + ProtocolMessage::ClientMessage(ClientMessage::V0(ClientMessageV0 { + content: ClientMessageContentV0::ClientEvent(e), + overlay: OverlayId::nil(), + padding: vec![] + })) + } +} + +impl Actor<'_, ClientEvent, ()> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, ClientEvent, ()> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = ClientEvent::try_from(msg)?; + match req { + ClientEvent::InboxPopRequest => { + let sb = { BROKER.read().await.get_server_broker()? }; + let user = {fsm.lock().await.user_id()?}; + let res: Result = { + sb.read().await.inbox_pop_for_user(user).await + }; + + if let Ok(msg) = res { + let _ = fsm + .lock() + .await + .send(ProtocolMessage::ClientMessage(ClientMessage::V0( + ClientMessageV0 { + overlay: msg.body.to_overlay.clone(), + padding: vec![], + content: ClientMessageContentV0::InboxReceive{msg, from_queue: true}, + }, + ))) + .await; + } + } + } + + Ok(()) + } +} diff --git a/ng-net/src/actors/client/commit_get.rs b/ng-net/src/actors/client/commit_get.rs new file mode 100644 index 0000000..34913a4 --- /dev/null +++ b/ng-net/src/actors/client/commit_get.rs @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::{Block, OverlayId}; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl CommitGet { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } + + pub fn overlay(&self) -> &OverlayId { + match self { + Self::V0(v0) => v0.overlay.as_ref().unwrap(), + } + } + pub fn set_overlay(&mut self, overlay: OverlayId) { + match self { + Self::V0(v0) => v0.overlay = Some(overlay), + } + } +} + +impl TryFrom for CommitGet { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::CommitGet(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: CommitGet) -> ProtocolMessage { + let overlay = *msg.overlay(); + ProtocolMessage::from_client_request_v0(ClientRequestContentV0::CommitGet(msg), overlay) + } +} + +impl TryFrom for Block { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let res: ClientResponseContentV0 = msg.try_into()?; + if let ClientResponseContentV0::Block(a) = res { + Ok(a) + } else { + log_debug!("INVALID {:?}", res); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(b: Block) -> ProtocolMessage { + let mut cr: ClientResponse = ClientResponseContentV0::Block(b).into(); + cr.set_result(ServerError::PartialContent.into()); + cr.into() + } +} + +impl Actor<'_, CommitGet, Block> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, CommitGet, Block> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = CommitGet::try_from(msg)?; + let broker = { BROKER.read().await.get_server_broker()? }; + let blocks_res = { broker.read().await.get_commit(req.overlay(), req.id()) }; + // IF NEEDED, the get_commit could be changed to return a stream, and then the send_in_reply_to would be also totally async + match blocks_res { + Ok(blocks) => { + if blocks.is_empty() { + let re: Result<(), ServerError> = Err(ServerError::EmptyStream); + fsm.lock() + .await + .send_in_reply_to(re.into(), self.id()) + .await?; + return Ok(()); + } + let mut lock = fsm.lock().await; + + for block in blocks { + lock.send_in_reply_to(block.into(), self.id()).await?; + } + let re: Result<(), ServerError> = Err(ServerError::EndOfStream); + lock.send_in_reply_to(re.into(), self.id()).await?; + } + Err(e) => { + let re: Result<(), ServerError> = Err(e); + fsm.lock() + .await + .send_in_reply_to(re.into(), self.id()) + .await?; + } + } + + Ok(()) + } +} diff --git a/ng-net/src/actors/client/event.rs b/ng-net/src/actors/client/event.rs new file mode 100644 index 0000000..bc187c7 --- /dev/null +++ b/ng-net/src/actors/client/event.rs @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::*; + +#[cfg(not(target_arch = "wasm32"))] +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl PublishEvent { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } + + pub fn new(event: Event, overlay: OverlayId) -> PublishEvent { + PublishEvent(event, Some(overlay)) + } + pub fn set_overlay(&mut self, overlay: OverlayId) { + self.1 = Some(overlay); + } + + pub fn overlay(&self) -> &OverlayId { + self.1.as_ref().unwrap() + } + pub fn event(&self) -> &Event { + &self.0 + } + pub fn take_event(self) -> Event { + self.0 + } +} + +impl TryFrom for PublishEvent { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::PublishEvent(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: PublishEvent) -> ProtocolMessage { + let overlay = msg.1.unwrap(); + ProtocolMessage::from_client_request_v0(ClientRequestContentV0::PublishEvent(msg), overlay) + } +} + +impl Actor<'_, PublishEvent, ()> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, PublishEvent, ()> { + async fn respond( + &mut self, + _msg: ProtocolMessage, + _fsm: Arc>, + ) -> Result<(), ProtocolError> { + #[cfg(not(target_arch = "wasm32"))] + { + let req = PublishEvent::try_from(_msg)?; + // send a ProtocolError if invalid signatures (will disconnect the client) + req.event().verify()?; + let overlay = req.overlay().clone(); + let (user_id, remote_peer) = { + let fsm = _fsm.lock().await; + ( + fsm.user_id()?, + fsm.remote_peer().ok_or(ProtocolError::ActorError)?, + ) + }; + let res = { + let broker = BROKER.read().await; + broker + .dispatch_event(&overlay, req.take_event(), &user_id, &remote_peer) + .await + }; + if res.is_err() { + let res: Result<(), ServerError> = Err(res.unwrap_err()); + _fsm.lock() + .await + .send_in_reply_to(res.into(), self.id()) + .await?; + } else { + let broker = { BROKER.read().await.get_server_broker()? }; + for client in res.unwrap() { + broker + .read() + .await + .remove_all_subscriptions_of_client(&client) + .await; + } + let finalres: Result<(), ServerError> = Ok(()); + _fsm.lock() + .await + .send_in_reply_to(finalres.into(), self.id()) + .await?; + } + } + Ok(()) + } +} diff --git a/ng-net/src/actors/client/inbox_post.rs b/ng-net/src/actors/client/inbox_post.rs new file mode 100644 index 0000000..1a07fcf --- /dev/null +++ b/ng-net/src/actors/client/inbox_post.rs @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::OverlayId; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl InboxPost { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } +} + +impl TryFrom for InboxPost { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::InboxPost(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: InboxPost) -> ProtocolMessage { + ProtocolMessage::from_client_request_v0( + ClientRequestContentV0::InboxPost(msg), + OverlayId::nil(), + ) + } +} + +impl Actor<'_, InboxPost, ()> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, InboxPost, ()> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = InboxPost::try_from(msg)?; + let sb = { BROKER.read().await.get_server_broker()? }; + let res: Result<(), ServerError> = sb + .read() + .await.inbox_post(req).await; + + fsm.lock() + .await + .send_in_reply_to(res.into(), self.id()) + .await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/client/inbox_register.rs b/ng-net/src/actors/client/inbox_register.rs new file mode 100644 index 0000000..7283aba --- /dev/null +++ b/ng-net/src/actors/client/inbox_register.rs @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::OverlayId; +use ng_repo::utils::verify; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl InboxRegister { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } +} + +impl TryFrom for InboxRegister { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::InboxRegister(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: InboxRegister) -> ProtocolMessage { + ProtocolMessage::from_client_request_v0( + ClientRequestContentV0::InboxRegister(msg), + OverlayId::nil(), + ) + } +} + +impl Actor<'_, InboxRegister, ()> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, InboxRegister, ()> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = InboxRegister::try_from(msg)?; + + // verify registration + if verify(&req.challenge, req.sig, req.inbox_id).is_err() { + fsm.lock() + .await + .send_in_reply_to(Result::<(), _>::Err(ServerError::InvalidSignature).into(), self.id()) + .await?; + return Ok(()) + } + + let sb = { BROKER.read().await.get_server_broker()? }; + + let user_id = { + let fsm = fsm.lock().await; + fsm.user_id()? + }; + + let res: Result<(), ServerError> = sb + .read() + .await.inbox_register(user_id, req); + + fsm.lock() + .await + .send_in_reply_to(res.into(), self.id()) + .await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/client/mod.rs b/ng-net/src/actors/client/mod.rs new file mode 100644 index 0000000..05f341d --- /dev/null +++ b/ng-net/src/actors/client/mod.rs @@ -0,0 +1,25 @@ +pub mod repo_pin_status; + +pub mod pin_repo; + +pub mod topic_sub; + +pub mod event; + +pub mod commit_get; + +pub mod topic_sync_req; + +pub mod blocks_put; + +pub mod blocks_exist; + +pub mod blocks_get; + +pub mod wallet_put_export; + +pub mod inbox_post; + +pub mod inbox_register; + +pub mod client_event; \ No newline at end of file diff --git a/ng-net/src/actors/client/pin_repo.rs b/ng-net/src/actors/client/pin_repo.rs new file mode 100644 index 0000000..495d89f --- /dev/null +++ b/ng-net/src/actors/client/pin_repo.rs @@ -0,0 +1,230 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::repo::Repo; +use ng_repo::types::*; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl PinRepo { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } + pub fn for_branch(repo: &Repo, branch: &BranchId, broker_id: &DirectPeerId) -> PinRepo { + let overlay = OverlayAccess::new_write_access_from_store(&repo.store); + let mut rw_topics = Vec::with_capacity(1); + let mut ro_topics = vec![]; + let branch = repo.branches.get(branch).unwrap(); + + if let Some(privkey) = &branch.topic_priv_key { + rw_topics.push(PublisherAdvert::new( + branch.topic.unwrap(), + privkey.clone(), + *broker_id, + )); + } else { + ro_topics.push(branch.topic.unwrap()); + } + + PinRepo::V0(PinRepoV0 { + hash: repo.id.into(), + overlay, + // TODO: overlay_root_topic + overlay_root_topic: None, + expose_outer: false, + peers: vec![], + max_peer_count: 0, + //allowed_peers: vec![], + ro_topics, + rw_topics, + }) + } + pub fn from_repo(repo: &Repo, broker_id: &DirectPeerId) -> PinRepo { + let overlay = OverlayAccess::new_write_access_from_store(&repo.store); + let mut rw_topics = Vec::with_capacity(repo.branches.len()); + let mut ro_topics = vec![]; + for (_, branch) in repo.branches.iter() { + if let Some(privkey) = &branch.topic_priv_key { + rw_topics.push(PublisherAdvert::new( + branch.topic.unwrap(), + privkey.clone(), + *broker_id, + )); + } else { + ro_topics.push(branch.topic.unwrap()); + } + } + PinRepo::V0(PinRepoV0 { + hash: repo.id.into(), + overlay, + // TODO: overlay_root_topic + overlay_root_topic: None, + expose_outer: false, + peers: vec![], + max_peer_count: 0, + //allowed_peers: vec![], + ro_topics, + rw_topics, + }) + } +} + +impl TryFrom for PinRepo { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::PinRepo(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: PinRepo) -> ProtocolMessage { + let overlay = match msg { + PinRepo::V0(ref v0) => v0.overlay.overlay_id_for_client_protocol_purpose().clone(), + }; + ProtocolMessage::from_client_request_v0(ClientRequestContentV0::PinRepo(msg), overlay) + } +} + +impl TryFrom for RepoOpened { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let res: ClientResponseContentV0 = msg.try_into()?; + if let ClientResponseContentV0::RepoOpened(a) = res { + Ok(a) + } else { + log_debug!("INVALID {:?}", res); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(res: RepoOpened) -> ProtocolMessage { + ClientResponseContentV0::RepoOpened(res).into() + } +} + +impl Actor<'_, RepoPinStatusReq, RepoPinStatus> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, PinRepo, RepoOpened> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = PinRepo::try_from(msg)?; + + let (sb, server_peer_id) = { + let b = BROKER.read().await; + (b.get_server_broker()?, b.get_server_peer_id()) + }; + + // check the validity of the PublisherAdvert(s). this will return a ProtocolError (will close the connection) + for pub_ad in req.rw_topics() { + pub_ad.verify_for_broker(&server_peer_id)?; + } + + let (user_id, remote_peer) = { + let fsm = fsm.lock().await; + (fsm.user_id()?, fsm.get_client_peer_id()?) + }; + + let result = { + match req.overlay_access() { + OverlayAccess::ReadOnly(r) => { + if r.is_inner() + || req.overlay() != r + || req.rw_topics().len() > 0 + || req.overlay_root_topic().is_some() + { + Err(ServerError::InvalidRequest) + } else { + sb.read() + .await + .pin_repo_read( + req.overlay(), + req.hash(), + &user_id, + req.ro_topics(), + &remote_peer, + ) + .await + } + } + OverlayAccess::ReadWrite((w, r)) => { + if req.overlay() != w + || !w.is_inner() + || r.is_inner() + || req.expose_outer() && req.rw_topics().is_empty() + { + // we do not allow to expose_outer if not a publisher for at least one topic + // TODO add a check on "|| overlay_root_topic.is_none()" because it should be mandatory to have one (not sent by client at the moment) + Err(ServerError::InvalidRequest) + } else { + sb.read() + .await + .pin_repo_write( + req.overlay_access(), + req.hash(), + &user_id, + req.ro_topics(), + req.rw_topics(), + req.overlay_root_topic(), + req.expose_outer(), + &remote_peer, + ) + .await + } + } + OverlayAccess::WriteOnly(w) => { + if !w.is_inner() || req.overlay() != w || req.expose_outer() { + Err(ServerError::InvalidRequest) + } else { + sb.read() + .await + .pin_repo_write( + req.overlay_access(), + req.hash(), + &user_id, + req.ro_topics(), + req.rw_topics(), + req.overlay_root_topic(), + false, + &remote_peer, + ) + .await + } + } + } + }; + fsm.lock() + .await + .send_in_reply_to(result.into(), self.id()) + .await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/client/repo_pin_status.rs b/ng-net/src/actors/client/repo_pin_status.rs new file mode 100644 index 0000000..b455c3a --- /dev/null +++ b/ng-net/src/actors/client/repo_pin_status.rs @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl RepoPinStatusReq { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } +} + +impl TryFrom for RepoPinStatusReq { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::RepoPinStatusReq(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: RepoPinStatusReq) -> ProtocolMessage { + let overlay = *msg.overlay(); + ProtocolMessage::from_client_request_v0( + ClientRequestContentV0::RepoPinStatusReq(msg), + overlay, + ) + } +} + +impl TryFrom for RepoPinStatus { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let res: ClientResponseContentV0 = msg.try_into()?; + if let ClientResponseContentV0::RepoPinStatus(a) = res { + Ok(a) + } else { + log_debug!("INVALID {:?}", res); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(res: RepoPinStatus) -> ProtocolMessage { + ClientResponseContentV0::RepoPinStatus(res).into() + } +} + +impl Actor<'_, RepoPinStatusReq, RepoPinStatus> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, RepoPinStatusReq, RepoPinStatus> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = RepoPinStatusReq::try_from(msg)?; + let sb = { BROKER.read().await.get_server_broker()? }; + let res = { + sb.read().await.get_repo_pin_status( + req.overlay(), + req.hash(), + &fsm.lock().await.user_id()?, + ) + }; + fsm.lock() + .await + .send_in_reply_to(res.into(), self.id()) + .await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/client/topic_sub.rs b/ng-net/src/actors/client/topic_sub.rs new file mode 100644 index 0000000..aa7b81a --- /dev/null +++ b/ng-net/src/actors/client/topic_sub.rs @@ -0,0 +1,139 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::repo::{BranchInfo, Repo}; +use ng_repo::types::*; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl TopicSub { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } + /// only set broker_id if you want to be a publisher + pub fn new(repo: &Repo, branch: &BranchInfo, broker_id: Option<&DirectPeerId>) -> TopicSub { + let (overlay, publisher) = if broker_id.is_some() && branch.topic_priv_key.is_some() { + ( + repo.store.inner_overlay(), + Some(PublisherAdvert::new( + branch.topic.unwrap(), + branch.topic_priv_key.to_owned().unwrap(), + *broker_id.unwrap(), + )), + ) + } else { + (repo.store.inner_overlay(), None) + }; + + TopicSub::V0(TopicSubV0 { + repo_hash: repo.id.into(), + overlay: Some(overlay), + topic: branch.topic.unwrap(), + publisher, + }) + } +} + +impl TryFrom for TopicSub { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::TopicSub(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: TopicSub) -> ProtocolMessage { + let overlay = *msg.overlay(); + ProtocolMessage::from_client_request_v0(ClientRequestContentV0::TopicSub(msg), overlay) + } +} + +impl TryFrom for TopicSubRes { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let res: ClientResponseContentV0 = msg.try_into()?; + if let ClientResponseContentV0::TopicSubRes(a) = res { + Ok(a) + } else { + log_debug!("INVALID {:?}", res); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(res: TopicSubRes) -> ProtocolMessage { + ClientResponseContentV0::TopicSubRes(res).into() + } +} + +impl Actor<'_, TopicSub, TopicSubRes> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, TopicSub, TopicSubRes> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = TopicSub::try_from(msg)?; + + let (sb, server_peer_id) = { + let b = BROKER.read().await; + (b.get_server_broker()?, b.get_server_peer_id()) + }; + + // check the validity of the PublisherAdvert. this will return a ProtocolError (will close the connection) + if let Some(advert) = req.publisher() { + advert.verify_for_broker(&server_peer_id)?; + } + + let (user_id, remote_peer) = { + let fsm = fsm.lock().await; + (fsm.user_id()?, fsm.get_client_peer_id()?) + }; + + let res = { + sb.read() + .await + .topic_sub( + req.overlay(), + req.hash(), + req.topic(), + &user_id, + req.publisher(), + &remote_peer, + ) + .await + }; + + fsm.lock() + .await + .send_in_reply_to(res.into(), self.id()) + .await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/client/topic_sync_req.rs b/ng-net/src/actors/client/topic_sync_req.rs new file mode 100644 index 0000000..d358ecf --- /dev/null +++ b/ng-net/src/actors/client/topic_sync_req.rs @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::repo::Repo; +use ng_repo::types::*; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl TopicSyncReq { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } + + pub fn new_empty(topic: TopicId, overlay: &OverlayId) -> Self { + TopicSyncReq::V0(TopicSyncReqV0 { + topic, + known_heads: vec![], + target_heads: vec![], + overlay: Some(*overlay), + known_commits: None, + }) + } + + pub fn new( + repo: &Repo, + topic_id: &TopicId, + known_heads: Vec, + target_heads: Vec, + known_commits: Option, + ) -> TopicSyncReq { + TopicSyncReq::V0(TopicSyncReqV0 { + topic: *topic_id, + known_heads, + target_heads, + overlay: Some(repo.store.get_store_repo().overlay_id_for_read_purpose()), + known_commits, + }) + } +} + +impl TryFrom for TopicSyncReq { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::TopicSyncReq(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: TopicSyncReq) -> ProtocolMessage { + let overlay = *msg.overlay(); + ProtocolMessage::from_client_request_v0(ClientRequestContentV0::TopicSyncReq(msg), overlay) + } +} + +impl TryFrom for TopicSyncRes { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let res: ClientResponseContentV0 = msg.try_into()?; + if let ClientResponseContentV0::TopicSyncRes(a) = res { + Ok(a) + } else { + log_debug!("INVALID {:?}", res); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(b: TopicSyncRes) -> ProtocolMessage { + let mut cr: ClientResponse = ClientResponseContentV0::TopicSyncRes(b).into(); + cr.set_result(ServerError::PartialContent.into()); + cr.into() + } +} + +impl Actor<'_, TopicSyncReq, TopicSyncRes> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, TopicSyncReq, TopicSyncRes> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = TopicSyncReq::try_from(msg)?; + + let sb = { BROKER.read().await.get_server_broker()? }; + + let res = { + sb.read().await.topic_sync_req( + req.overlay(), + req.topic(), + req.known_heads(), + req.target_heads(), + req.known_commits(), + ) + }; + + // IF NEEDED, the topic_sync_req could be changed to return a stream, and then the send_in_reply_to would be also totally async + match res { + Ok(blocks) => { + if blocks.is_empty() { + let re: Result<(), ServerError> = Err(ServerError::EmptyStream); + fsm.lock() + .await + .send_in_reply_to(re.into(), self.id()) + .await?; + return Ok(()); + } + let mut lock = fsm.lock().await; + + for block in blocks { + lock.send_in_reply_to(block.into(), self.id()).await?; + } + let re: Result<(), ServerError> = Err(ServerError::EndOfStream); + lock.send_in_reply_to(re.into(), self.id()).await?; + } + Err(e) => { + let re: Result<(), ServerError> = Err(e); + fsm.lock() + .await + .send_in_reply_to(re.into(), self.id()) + .await?; + } + } + Ok(()) + } +} diff --git a/ng-net/src/actors/client/wallet_put_export.rs b/ng-net/src/actors/client/wallet_put_export.rs new file mode 100644 index 0000000..4a52bfc --- /dev/null +++ b/ng-net/src/actors/client/wallet_put_export.rs @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::OverlayId; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl WalletPutExport { + pub fn get_actor(&self, id: i64) -> Box { + Actor::::new_responder(id) + } +} + +impl TryFrom for WalletPutExport { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let req: ClientRequestContentV0 = msg.try_into()?; + if let ClientRequestContentV0::WalletPutExport(a) = req { + Ok(a) + } else { + log_debug!("INVALID {:?}", req); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: WalletPutExport) -> ProtocolMessage { + ProtocolMessage::from_client_request_v0( + ClientRequestContentV0::WalletPutExport(msg), + OverlayId::nil(), + ) + } +} + +impl Actor<'_, WalletPutExport, ()> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, WalletPutExport, ()> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = WalletPutExport::try_from(msg)?; + let sb = { BROKER.read().await.get_server_broker()? }; + let mut res: Result<(), ServerError> = Ok(()); + + match req { + WalletPutExport::V0(v0) => { + if v0.is_rendezvous { + res = sb + .read() + .await + .put_wallet_at_rendezvous(v0.rendezvous_id, v0.wallet) + .await; + } else { + sb.read() + .await + .put_wallet_export(v0.rendezvous_id, v0.wallet) + .await; + } + } + } + + fsm.lock() + .await + .send_in_reply_to(res.into(), self.id()) + .await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/connecting.rs b/ng-net/src/actors/connecting.rs new file mode 100644 index 0000000..46d776a --- /dev/null +++ b/ng-net/src/actors/connecting.rs @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::*; + +use crate::connection::NoiseFSM; +use crate::{actor::*, types::ProtocolMessage}; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Connecting(); + +impl From for ProtocolMessage { + fn from(_msg: Connecting) -> ProtocolMessage { + unimplemented!(); + } +} + +impl Actor<'_, Connecting, ()> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, Connecting, ()> { + async fn respond( + &mut self, + _msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + fsm.lock().await.remove_actor(0).await; + Ok(()) + } +} diff --git a/ng-net/src/actors/ext/get.rs b/ng-net/src/actors/ext/get.rs new file mode 100644 index 0000000..40bf50e --- /dev/null +++ b/ng-net/src/actors/ext/get.rs @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::object::Object; +use ng_repo::store::Store; +use ng_repo::types::Block; + +use super::super::StartProtocol; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl ExtObjectGetV0 { + pub fn get_actor(&self) -> Box { + Actor::>::new_responder(0) + } +} + +impl TryFrom for ExtObjectGetV0 { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Start(StartProtocol::Ext(ExtRequest::V0(ExtRequestV0 { + content: ExtRequestContentV0::ExtObjectGet(a), + .. + }))) = msg + { + Ok(a) + } else { + log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(_msg: ExtObjectGetV0) -> ProtocolMessage { + unimplemented!(); + } +} + +impl From for ExtRequestContentV0 { + fn from(msg: ExtObjectGetV0) -> ExtRequestContentV0 { + ExtRequestContentV0::ExtObjectGet(msg) + } +} + +impl TryFrom for Vec { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result, Self::Error> { + let content: ExtResponseContentV0 = msg.try_into()?; + if let ExtResponseContentV0::Blocks(res) = content { + Ok(res) + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl Actor<'_, ExtObjectGetV0, Vec> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, ExtObjectGetV0, Vec> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = ExtObjectGetV0::try_from(msg)?; + let sb = { + let broker = BROKER.read().await; + broker.get_server_broker()? + }; + let lock = sb.read().await; + let store = Store::new_from_overlay_id(&req.overlay, lock.get_block_storage()); + let mut blocks = Vec::new(); + for obj_id in req.ids { + // TODO: deal with RandomAccessFiles (or is it just working?) + if let Ok(obj) = Object::load_without_header(obj_id, None, &store) { + blocks.append(&mut obj.into_blocks()); + //TODO: load the obj.files too (if req.include_files) + } + } + let response: ExtResponseV0 = Ok(ExtResponseContentV0::Blocks(blocks)).into(); + fsm.lock().await.send(response.into()).await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/ext/mod.rs b/ng-net/src/actors/ext/mod.rs new file mode 100644 index 0000000..8f6fcb3 --- /dev/null +++ b/ng-net/src/actors/ext/mod.rs @@ -0,0 +1,3 @@ +pub mod wallet_get_export; + +pub mod get; diff --git a/ng-net/src/actors/ext/wallet_get_export.rs b/ng-net/src/actors/ext/wallet_get_export.rs new file mode 100644 index 0000000..f5fa017 --- /dev/null +++ b/ng-net/src/actors/ext/wallet_get_export.rs @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::stream::StreamExt; +use async_std::sync::Mutex; + +use ng_repo::errors::*; +use ng_repo::log::*; + +use super::super::StartProtocol; + +use crate::broker::BROKER; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::{actor::*, types::ProtocolMessage}; + +impl ExtWalletGetExportV0 { + pub fn get_actor(&self) -> Box { + Actor::::new_responder(0) + } +} + +impl TryFrom for ExtWalletGetExportV0 { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Start(StartProtocol::Ext(ExtRequest::V0(ExtRequestV0 { + content: ExtRequestContentV0::WalletGetExport(a), + .. + }))) = msg + { + Ok(a) + } else { + log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(_msg: ExtWalletGetExportV0) -> ProtocolMessage { + unimplemented!(); + } +} + +impl From for ExtRequestContentV0 { + fn from(msg: ExtWalletGetExportV0) -> ExtRequestContentV0 { + ExtRequestContentV0::WalletGetExport(msg) + } +} + +impl TryFrom for ExportedWallet { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + let content: ExtResponseContentV0 = msg.try_into()?; + if let ExtResponseContentV0::Wallet(res) = content { + Ok(res) + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl Actor<'_, ExtWalletGetExportV0, ExportedWallet> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, ExtWalletGetExportV0, ExportedWallet> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let req = ExtWalletGetExportV0::try_from(msg)?; + let result = if req.is_rendezvous { + let mut receiver = { + let broker = BROKER.read().await; + let sb = broker.get_server_broker()?; + let lock = sb.read().await; + lock.wait_for_wallet_at_rendezvous(req.id).await + }; + + match receiver.next().await { + None => Err(ServerError::BrokerError), + Some(Err(e)) => Err(e), + Some(Ok(w)) => Ok(ExtResponseContentV0::Wallet(w)), + } + } else { + { + let broker = BROKER.read().await; + let sb = broker.get_server_broker()?; + let lock = sb.read().await; + lock.get_wallet_export(req.id).await + } + .map(|wallet| ExtResponseContentV0::Wallet(wallet)) + }; + let response: ExtResponseV0 = result.into(); + fsm.lock().await.send(response.into()).await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/mod.rs b/ng-net/src/actors/mod.rs new file mode 100644 index 0000000..96c2172 --- /dev/null +++ b/ng-net/src/actors/mod.rs @@ -0,0 +1,25 @@ +//! List of actors, each one for a specific Protocol message + +#[doc(hidden)] +pub mod noise; +pub use noise::*; + +#[doc(hidden)] +pub mod start; +pub use start::*; + +#[doc(hidden)] +pub mod probe; +pub use probe::*; + +#[doc(hidden)] +pub mod connecting; +pub use connecting::*; + +pub mod client; + +pub mod admin; + +pub mod app; + +pub mod ext; diff --git a/ng-net/src/actors/noise.rs b/ng-net/src/actors/noise.rs new file mode 100644 index 0000000..b7e9d51 --- /dev/null +++ b/ng-net/src/actors/noise.rs @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::*; + +use crate::{actor::*, connection::NoiseFSM, types::ProtocolMessage}; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct NoiseV0 { + // contains the handshake messages or the encrypted content of a ProtocolMessage + #[serde(with = "serde_bytes")] + pub data: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum Noise { + V0(NoiseV0), +} + +impl Noise { + pub fn data(&self) -> &[u8] { + match self { + Noise::V0(v0) => v0.data.as_slice(), + } + } +} + +impl From for ProtocolMessage { + fn from(msg: Noise) -> ProtocolMessage { + ProtocolMessage::Noise(msg) + } +} + +impl TryFrom for Noise { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Noise(n) = msg { + Ok(n) + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl Actor<'_, Noise, Noise> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, Noise, Noise> { + async fn respond( + &mut self, + _msg: ProtocolMessage, + _fsm: Arc>, + ) -> Result<(), ProtocolError> { + Ok(()) + } +} diff --git a/ng-net/src/actors/probe.rs b/ng-net/src/actors/probe.rs new file mode 100644 index 0000000..f673e03 --- /dev/null +++ b/ng-net/src/actors/probe.rs @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::sync::Arc; + +use async_std::sync::Mutex; +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::*; + +use crate::connection::NoiseFSM; +use crate::types::{ProbeResponse, MAGIC_NG_REQUEST}; +use crate::{actor::*, types::ProtocolMessage}; + +/// Send to probe if the server is a NextGraph broker. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Probe {} + +impl TryFrom for ProbeResponse { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::ProbeResponse(res) = msg { + Ok(res) + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl TryFrom for Probe { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Probe(magic) = msg { + if magic == MAGIC_NG_REQUEST { + Ok(Probe {}) + } else { + Err(ProtocolError::InvalidValue) + } + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(_msg: Probe) -> ProtocolMessage { + ProtocolMessage::Probe(MAGIC_NG_REQUEST) + } +} + +impl Actor<'_, Probe, ProbeResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, Probe, ProbeResponse> { + async fn respond( + &mut self, + msg: ProtocolMessage, + _fsm: Arc>, + ) -> Result<(), ProtocolError> { + let _req = Probe::try_from(msg)?; + //let res = ProbeResponse(); + //fsm.lock().await.send(res.into()).await?; + Ok(()) + } +} diff --git a/ng-net/src/actors/start.rs b/ng-net/src/actors/start.rs new file mode 100644 index 0000000..46b64f7 --- /dev/null +++ b/ng-net/src/actors/start.rs @@ -0,0 +1,326 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::any::{Any, TypeId}; +use std::sync::Arc; + +use async_std::sync::Mutex; +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::UserId; + +use crate::actors::noise::Noise; +use crate::connection::NoiseFSM; +use crate::types::{ + AdminRequest, ClientInfo, CoreBrokerConnect, CoreBrokerConnectResponse, CoreMessage, + CoreMessageV0, CoreResponse, CoreResponseContentV0, CoreResponseV0, ExtRequest, +}; +use crate::{actor::*, types::ProtocolMessage}; + +// pub struct Noise3(Noise); + +/// Start chosen protocol +/// First message sent by the connecting peer +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum StartProtocol { + Client(ClientHello), + Ext(ExtRequest), + Core(CoreHello), + Admin(AdminRequest), + App(AppHello), + AppResponse(AppHelloResponse), +} + +impl StartProtocol { + pub fn type_id(&self) -> TypeId { + match self { + StartProtocol::Client(a) => a.type_id(), + StartProtocol::Core(a) => a.type_id(), + StartProtocol::Ext(a) => a.type_id(), + StartProtocol::Admin(a) => a.type_id(), + StartProtocol::App(a) => a.type_id(), + StartProtocol::AppResponse(a) => a.type_id(), + } + } + pub fn get_actor(&self) -> Box { + match self { + StartProtocol::Client(a) => a.get_actor(), + StartProtocol::Core(a) => a.get_actor(), + StartProtocol::Ext(a) => a.get_actor(), + StartProtocol::Admin(a) => a.get_actor(), + StartProtocol::App(a) => a.get_actor(), + StartProtocol::AppResponse(_) => panic!("AppResponse is not a request"), + } + } +} + +impl From for ProtocolMessage { + fn from(msg: StartProtocol) -> ProtocolMessage { + ProtocolMessage::Start(msg) + } +} + +/// Core Hello (finalizes the Noise handshake and sends CoreConnect) +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreHello { + // contains the 3rd Noise handshake message "s,se" + pub noise: Noise, + + /// Noise encrypted payload (a CoreMessage::CoreRequest::BrokerConnect) + #[serde(with = "serde_bytes")] + pub payload: Vec, +} + +impl CoreHello { + pub fn get_actor(&self) -> Box { + Actor::::new_responder(0) + } +} + +impl TryFrom for CoreBrokerConnectResponse { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::CoreMessage(CoreMessage::V0(CoreMessageV0::Response( + CoreResponse::V0(CoreResponseV0 { + content: CoreResponseContentV0::BrokerConnectResponse(a), + .. + }), + ))) = msg + { + Ok(a) + } else { + log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: CoreHello) -> ProtocolMessage { + ProtocolMessage::Start(StartProtocol::Core(msg)) + } +} + +impl From for ProtocolMessage { + fn from(_msg: CoreBrokerConnect) -> ProtocolMessage { + unimplemented!(); + } +} + +impl Actor<'_, CoreBrokerConnect, CoreBrokerConnectResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, CoreBrokerConnect, CoreBrokerConnectResponse> { + async fn respond( + &mut self, + _msg: ProtocolMessage, + _fsm: Arc>, + ) -> Result<(), ProtocolError> { + //let req = CoreBrokerConnect::try_from(msg)?; + // let res = CoreBrokerConnectResponse::V0(CoreBrokerConnectResponseV0 { + // successes: vec![], + // errors: vec![], + // }); + // fsm.lock().await.send(res.into()).await?; + Ok(()) + } +} + +// /// External Hello (finalizes the Noise handshake and sends first ExtRequest) +// #[derive(Clone, Debug, Serialize, Deserialize)] +// pub struct ExtHello { +// // contains the 3rd Noise handshake message "s,se" +// pub noise: Noise, + +// /// Noise encrypted payload (an ExtRequest) +// #[serde(with = "serde_bytes")] +// pub payload: Vec, +// } + +// impl ExtHello { +// pub fn get_actor(&self) -> Box { +// Actor::::new_responder(0) +// } +// } + +// impl From for ProtocolMessage { +// fn from(msg: ExtHello) -> ProtocolMessage { +// ProtocolMessage::Start(StartProtocol::Ext(msg)) +// } +// } + +/// Client Hello +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ClientHello { + // contains the 3rd Noise handshake message "s,se" + Noise3(Noise), + Local, +} + +impl ClientHello { + pub fn type_id(&self) -> TypeId { + match self { + ClientHello::Noise3(a) => a.type_id(), + ClientHello::Local => TypeId::of::(), + } + } + pub fn get_actor(&self) -> Box { + Actor::::new_responder(0) + } +} + +/// Server hello sent upon a client connection +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ServerHelloV0 { + /// Nonce for ClientAuth + #[serde(with = "serde_bytes")] + pub nonce: Vec, +} + +/// Server hello sent upon a client connection +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ServerHello { + V0(ServerHelloV0), +} + +impl ServerHello { + pub fn nonce(&self) -> &Vec { + match self { + ServerHello::V0(o) => &o.nonce, + } + } +} + +impl From for ProtocolMessage { + fn from(msg: ClientHello) -> ProtocolMessage { + ProtocolMessage::Start(StartProtocol::Client(msg)) + } +} + +impl TryFrom for ClientHello { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Start(StartProtocol::Client(a)) = msg { + Ok(a) + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl TryFrom for ServerHello { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::ServerHello(server_hello) = msg { + Ok(server_hello) + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl From for ProtocolMessage { + fn from(msg: ServerHello) -> ProtocolMessage { + ProtocolMessage::ServerHello(msg) + } +} + +impl Actor<'_, ClientHello, ServerHello> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, ClientHello, ServerHello> { + async fn respond( + &mut self, + msg: ProtocolMessage, + fsm: Arc>, + ) -> Result<(), ProtocolError> { + let _req = ClientHello::try_from(msg)?; + let res = ServerHello::V0(ServerHelloV0 { nonce: vec![] }); + fsm.lock().await.send(res.into()).await?; + Ok(()) + } +} + +// impl Actor<'_, ExtHello, ExtResponse> {} + +// #[async_trait::async_trait] +// impl EActor for Actor<'_, ExtHello, ExtResponse> { +// async fn respond( +// &mut self, +// _msg: ProtocolMessage, +// _fsm: Arc>, +// ) -> Result<(), ProtocolError> { +// Ok(()) +// } +// } + +// ///////////// APP HELLO /////////////// + +/// App Hello (finalizes the Noise handshake and sends info about device, and the user_id. +/// not signing any nonce because anyway, in the next message "AppSessionRequest", the user_priv_key will be sent and checked again) +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppHello { + // contains the 3rd Noise handshake message "s,se" + pub noise: Noise, + + pub user: Option, // None for Headless + + pub info: ClientInfo, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppHelloResponse { + pub result: u16, +} + +impl AppHello { + pub fn get_actor(&self) -> Box { + Actor::::new_responder(0) + } +} + +impl From for ProtocolMessage { + fn from(msg: AppHello) -> ProtocolMessage { + ProtocolMessage::Start(StartProtocol::App(msg)) + } +} + +impl From for ProtocolMessage { + fn from(msg: AppHelloResponse) -> ProtocolMessage { + ProtocolMessage::Start(StartProtocol::AppResponse(msg)) + } +} + +impl TryFrom for AppHelloResponse { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::Start(StartProtocol::AppResponse(res)) = msg { + Ok(res) + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl Actor<'_, AppHello, AppHelloResponse> {} + +#[async_trait::async_trait] +impl EActor for Actor<'_, AppHello, AppHelloResponse> { + async fn respond( + &mut self, + _msg: ProtocolMessage, + _fsm: Arc>, + ) -> Result<(), ProtocolError> { + Ok(()) + } +} diff --git a/ng-net/src/app_protocol.rs b/ng-net/src/app_protocol.rs new file mode 100644 index 0000000..bc306cf --- /dev/null +++ b/ng-net/src/app_protocol.rs @@ -0,0 +1,1311 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! App Protocol (between LocalBroker and Verifier) + +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::NgError; +#[allow(unused_imports)] +use ng_repo::log::*; +use ng_repo::repo::CommitInfo; +use ng_repo::types::*; +use ng_repo::utils::{decode_digest, decode_key, decode_sym_key}; +use ng_repo::utils::{decode_overlayid, display_timestamp_local}; + +use crate::types::*; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppFetchContentV0 { + Get, // does not subscribe. + Subscribe, + Update, + ReadQuery, + WriteQuery, + RdfDump, + History, + SignatureStatus, + SignatureRequest, + SignedSnapshotRequest, + Header, + CurrentHeads, + //Invoke, +} + +impl AppFetchContentV0 { + pub fn get_or_subscribe(subscribe: bool) -> Self { + if !subscribe { + AppFetchContentV0::Get + } else { + AppFetchContentV0::Subscribe + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum NgAccessV0 { + ReadCap(ReadCap), + Token(Digest), + #[serde(with = "serde_bytes")] + ExtRequest(Vec), + Key(BlockKey), + Inbox(PrivKey), + Topic(PrivKey), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum TargetBranchV0 { + Chat, + Stream, + Comments, + BackLinks, + Context, + BranchId(BranchId), + Named(String), // branch or commit + Commits(Vec), // only possible if access to their branch is given. must belong to the same branch. +} + +impl TargetBranchV0 { + pub fn is_valid_for_sparql_update(&self) -> bool { + match self { + Self::Commits(_) => false, + _ => true, + } + } + pub fn is_valid_for_discrete_update(&self) -> bool { + match self { + Self::BranchId(_) => true, + //TODO: add Named(s) is s is a branch => true + _ => false, + } + } + pub fn branch_id(&self) -> &BranchId { + match self { + Self::BranchId(id) => id, + _ => panic!("not a TargetBranchV0::BranchId"), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum NuriTargetV0 { + UserSite, // targets the whole data set of the user + + PublicProfile, + PublicStore(RepoId), + ProtectedProfile, + ProtectedStore(RepoId), + GroupStore(RepoId), + DialogStore(RepoId), + PrivateStore, + AllDialogs, + Dialog(String), // shortname of a Dialog + AllGroups, + Group(String), // shortname of a Group + + Repo(RepoId), + Inbox(PubKey), + + None, +} + +impl NuriTargetV0 { + pub fn is_valid_for_sparql_update(&self) -> bool { + match self { + Self::UserSite | Self::AllDialogs | Self::AllGroups => false, + _ => true, + } + } + pub fn is_valid_for_discrete_update(&self) -> bool { + match self { + Self::UserSite | Self::AllDialogs | Self::AllGroups | Self::None => false, + _ => true, + } + } + pub fn is_repo_id(&self) -> bool { + match self { + Self::Repo(_) => true, + _ => false, + } + } + pub fn repo_id(&self) -> &RepoId { + match self { + Self::Repo(id) => id, + _ => panic!("not a NuriTargetV0::Repo"), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CommitInfoJs { + pub past: Vec, + pub key: String, + pub signature: Option, + pub author: String, + pub timestamp: String, + pub final_consistency: bool, + pub commit_type: CommitType, + pub branch: Option, + pub x: u32, + pub y: u32, +} + +impl From<&CommitInfo> for CommitInfoJs { + fn from(info: &CommitInfo) -> Self { + CommitInfoJs { + past: info.past.iter().map(|objid| objid.to_string()).collect(), + key: info.key.to_string(), + signature: info.signature.as_ref().map(|s| NuriV0::signature_ref(&s)), + author: info.author.clone(), + timestamp: display_timestamp_local(info.timestamp), + final_consistency: info.final_consistency, + commit_type: info.commit_type.clone(), + branch: info.branch.map(|b| b.to_string()), + x: info.x, + y: info.y, + } + } +} + +const DID_PREFIX: &str = "did:ng"; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct NuriV0 { + pub identity: Option, // None for personal identity + pub target: NuriTargetV0, + pub entire_store: bool, // If it is a store, will include all the docs belonging to the store + + pub objects: Vec, // used only for FileGet. // cannot be used for queries. only to download an object (file,commit..) + pub signature: Option, + + pub branch: Option, // if None, the main branch is chosen + pub overlay: Option, + + pub access: Vec, + pub topic: Option, + pub locator: Option, +} + +impl NuriV0 { + pub fn new_empty() -> Self { + NuriV0 { + identity: None, + target: NuriTargetV0::None, + entire_store: false, + objects: vec![], + signature: None, + branch: None, + overlay: None, + access: vec![], + topic: None, + locator: None, + } + } + pub fn copy_target_from(&mut self, nuri: &NuriV0) { + self.target = nuri.target.clone(); + } + pub fn commit_graph_name(commit_id: &ObjectId, overlay_id: &OverlayId) -> String { + format!("{DID_PREFIX}:c:{commit_id}:v:{overlay_id}") + } + + pub fn commit_graph_name_from_base64(commit_base64: &String, overlay_id: &OverlayId) -> String { + format!("{DID_PREFIX}:c:{commit_base64}:v:{overlay_id}") + } + + pub fn get_first_commit_ref(&self) -> Result { + let commit_id = match &self.branch { + Some(TargetBranchV0::Commits(commits)) => commits.get(0).ok_or(NgError::CommitNotFound)?, + _ => return Err(NgError::InvalidNuri) + }; + let commit_key = match self.access.get(0) { + Some(NgAccessV0::Key(key)) => key, + _ => return Err(NgError::InvalidNuri) + }; + Ok(ObjectRef::from_id_key(*commit_id, commit_key.clone())) + } + + pub fn from_store_repo(store_repo: &StoreRepo) -> Self { + NuriV0 { + identity: None, + target: NuriTargetV0::Repo(store_repo.repo_id().clone()), + entire_store: false, + objects: vec![], + signature: None, + branch: None, + overlay: None, + access: vec![], + topic: None, + locator: None, + } + } + + pub fn to_store_nuri_string(store_id: &RepoId) -> String { + let overlay_id = OverlayId::outer(store_id); + format!("o:{store_id}:v:{overlay_id}") + } + + pub fn repo_graph_name(repo_id: &RepoId, overlay_id: &OverlayId) -> String { + format!("{DID_PREFIX}:o:{repo_id}:v:{overlay_id}") + } + + pub fn branch_repo_graph_name( + branch_id: &BranchId, + repo_id: &RepoId, + overlay_id: &OverlayId, + ) -> String { + format!("{DID_PREFIX}:o:{repo_id}:v:{overlay_id}:b:{branch_id}") + } + + pub fn repo_skolem( + repo_id: &RepoId, + peer_id: &Vec, + random: u128, + ) -> Result { + let mut arr = Vec::with_capacity(32); + arr.extend_from_slice(peer_id); + arr.extend_from_slice(&random.to_be_bytes()); + let sko: SymKey = arr.as_slice().try_into()?; + Ok(format!("{DID_PREFIX}:o:{repo_id}:u:{sko}")) + } + + pub fn repo(&self) -> String { + Self::repo_id(self.target.repo_id()) + } + + pub fn repo_id(repo_id: &RepoId) -> String { + format!("{DID_PREFIX}:o:{}", repo_id) + } + + pub fn overlay_id(overlay_id: &OverlayId) -> String { + format!("{DID_PREFIX}:v:{overlay_id}") + } + + pub fn topic_id(topic_id: &TopicId) -> String { + format!("{DID_PREFIX}:h:{topic_id}") + } + + pub fn branch_id(branch_id: &BranchId) -> String { + format!("{DID_PREFIX}:b:{branch_id}") + } + + pub fn branch_id_from_base64(branch_base64: &String) -> String { + format!("{DID_PREFIX}:b:{branch_base64}") + } + + pub fn object_ref(obj_ref: &ObjectRef) -> String { + format!("{DID_PREFIX}:{}", obj_ref.object_nuri()) + } + + pub fn signature_ref(obj_ref: &ObjectRef) -> String { + format!("s:{}:k:{}", obj_ref.id, obj_ref.key) + } + + pub fn commit_ref(commit_ref: &ObjectRef) -> String { + format!("c:{}:k:{}", commit_ref.id, commit_ref.key) + } + + pub fn token(token: &Digest) -> String { + format!("{DID_PREFIX}:n:{token}") + } + + pub fn tokenized_commit(repo_id: &RepoId, commit_id: &ObjectId) -> String { + format!("{DID_PREFIX}:o:{repo_id}:t:{commit_id}") + } + + pub fn commit(repo_id: &RepoId, commit_id: &ObjectId) -> String { + format!("{DID_PREFIX}:o:{repo_id}:c:{commit_id}") + } + + pub fn inbox(inbox_id: &PubKey) -> String { + format!("{DID_PREFIX}:d:{inbox_id}") + } + + pub fn from_store_repo_string(store_repo: &StoreRepo) -> String { + match store_repo { + StoreRepo::V0(v0) => match v0 { + StoreRepoV0::PublicStore(id) => NuriV0::public_profile(id), + StoreRepoV0::ProtectedStore(id) => NuriV0::protected_profile(id), + StoreRepoV0::PrivateStore(id) => NuriV0::private_store(id), + StoreRepoV0::Group(id) => NuriV0::group_store(id), + StoreRepoV0::Dialog((id,_)) => NuriV0::dialog_store(id), + }, + } + } + + pub fn public_profile(store_id: &PubKey) -> String { + format!("{DID_PREFIX}:a:{store_id}") + } + + pub fn protected_profile(store_id: &PubKey) -> String { + format!("{DID_PREFIX}:b:{store_id}") + } + + pub fn private_store(store_id: &PubKey) -> String { + format!("{DID_PREFIX}:c:{store_id}") + } + + pub fn group_store(store_id: &PubKey) -> String { + format!("{DID_PREFIX}:g:{store_id}") + } + + pub fn dialog_store(store_id: &PubKey) -> String { + format!("{DID_PREFIX}:d:{store_id}") + } + + pub fn locator(locator: &Locator) -> String { + format!("l:{locator}") + } + + pub fn is_branch_identifier(&self) -> bool { + self.locator.is_none() + && self.topic.is_none() + && self.access.is_empty() + && self.overlay.as_ref().map_or(false, |o| o.is_outer()) + && self + .branch + .as_ref() + .map_or(true, |b| b.is_valid_for_sparql_update()) + && self.objects.is_empty() + && self.signature.is_none() + && !self.entire_store + && self.target.is_repo_id() + } + + pub fn is_valid_for_sparql_update(&self) -> bool { + self.objects.is_empty() + && self.signature.is_none() + && self.entire_store == false + && self.target.is_valid_for_sparql_update() + && self + .branch + .as_ref() + .map_or(true, |b| b.is_valid_for_sparql_update()) + } + pub fn is_valid_for_discrete_update(&self) -> bool { + self.objects.is_empty() + && self.signature.is_none() + && self.entire_store == false + && self.target.is_valid_for_discrete_update() + && self + .branch + .as_ref() + .map_or(true, |b| b.is_valid_for_discrete_update()) + } + pub fn new_repo_target_from_string(repo_id_string: String) -> Result { + let repo_id: RepoId = repo_id_string.as_str().try_into()?; + Ok(Self { + identity: None, + target: NuriTargetV0::Repo(repo_id), + entire_store: false, + objects: vec![], + signature: None, + branch: None, + overlay: None, + access: vec![], + topic: None, + locator: None, + }) + } + pub fn new_repo_target_from_id(repo_id: &RepoId) -> Self { + let mut n = Self::new_empty(); + n.target = NuriTargetV0::Repo(*repo_id); + n + } + + pub fn new_from_obj_ref(obj_ref: &ObjectRef) -> Self { + Self { + identity: None, + target: NuriTargetV0::None, + entire_store: false, + objects: vec![obj_ref.clone()], + signature: None, + branch: None, + overlay: None, + access: vec![], + topic: None, + locator: None, + } + } + + pub fn new_private_store_target() -> Self { + let mut n = Self::new_empty(); + n.target = NuriTargetV0::PrivateStore; + n + } + + pub fn new_protected_store_target() -> Self { + let mut n = Self::new_empty(); + n.target = NuriTargetV0::ProtectedProfile; + n + } + + pub fn new_public_store_target() -> Self { + let mut n = Self::new_empty(); + n.target = NuriTargetV0::PublicProfile; + n + } + + pub fn new_entire_user_site() -> Self { + let mut n = Self::new_empty(); + n.target = NuriTargetV0::UserSite; + n + } + pub fn new_for_readcaps(from: &str) -> Result { + let c = RE_OBJECTS.captures(from); + if let Some(c) = c { + let target = c.get(1).map_or(NuriTargetV0::None, |repo_match| { + if let Ok(id) = decode_key(repo_match.as_str()) { + NuriTargetV0::Repo(id) + } else { + NuriTargetV0::None + } + }); + let overlay_id = decode_overlayid(c.get(2).ok_or(NgError::InvalidNuri)?.as_str())?; + let read_caps = c.get(3).ok_or(NgError::InvalidNuri)?.as_str(); + let sign_obj_id = c.get(4).map(|c| decode_digest(c.as_str())); + let sign_obj_key = c.get(5).map(|c| decode_sym_key(c.as_str())); + let locator = + TryInto::::try_into(c.get(6).ok_or(NgError::InvalidNuri)?.as_str())?; + let signature = if sign_obj_id.is_some() && sign_obj_key.is_some() { + Some(ObjectRef::from_id_key( + sign_obj_id.unwrap()?, + sign_obj_key.unwrap()?, + )) + } else { + None + }; + + let objects = RE_OBJECT_READ_CAPS + .captures_iter(read_caps) + .map(|c| { + Ok(ObjectRef::from_id_key( + decode_digest(c.get(1).ok_or(NgError::InvalidNuri)?.as_str())?, + decode_sym_key(c.get(2).ok_or(NgError::InvalidNuri)?.as_str())?, + )) + }) + .collect::, NgError>>()?; + + if objects.len() < 1 { + return Err(NgError::InvalidNuri); + } + + Ok(Self { + identity: None, + target, + entire_store: false, + objects, + signature, + branch: None, + overlay: Some(overlay_id.into()), + access: vec![], + topic: None, + locator: Some(locator), + }) + } else { + Err(NgError::InvalidNuri) + } + } + + pub fn from_inbox_into_id(from: &String) -> Result { + let c = RE_INBOX.captures(&from); + if c.is_some() + && c.as_ref().unwrap().get(1).is_some() + { + let cap = c.unwrap(); + let d = cap.get(1).unwrap().as_str(); + let to_inbox = decode_key(d)?; + return Ok(to_inbox); + } + Err(NgError::InvalidNuri) + } + + pub fn from_profile_into_overlay_id(from: &String) -> Result { + let c = RE_PROFILE.captures(&from); + if c.is_some() + && c.as_ref().unwrap().get(1).is_some() + { + let cap = c.unwrap(); + let o = cap.get(1).unwrap().as_str(); + let to_profile_id = decode_key(o)?; + let to_overlay = OverlayId::outer(&to_profile_id); + return Ok(to_overlay); + } + Err(NgError::InvalidNuri) + } + + pub fn new_from_repo_graph(from: &String) -> Result { + let c = RE_REPO.captures(from); + + if c.is_some() + && c.as_ref().unwrap().get(1).is_some() + && c.as_ref().unwrap().get(2).is_some() + { + let cap = c.unwrap(); + let o = cap.get(1).unwrap().as_str(); + let v = cap.get(2).unwrap().as_str(); + let repo_id = decode_key(o)?; + let overlay_id = decode_overlayid(v)?; + + let mut n = Self::new_empty(); + n.target = NuriTargetV0::Repo(repo_id); + n.overlay = Some(overlay_id.into()); + return Ok(n); + } + Err(NgError::InvalidNuri) + } + + + pub fn new_from_repo_nuri(from: &String) -> Result { + let repo_id = Self::from_repo_nuri_to_id(from)?; + let mut n = Self::new_empty(); + n.target = NuriTargetV0::Repo(repo_id); + return Ok(n); + } + + + pub fn new_from_commit(from: &String) -> Result { + + let c = RE_COMMIT.captures(&from); + if c.is_some() + && c.as_ref().unwrap().get(1).is_some() + && c.as_ref().unwrap().get(2).is_some() + && c.as_ref().unwrap().get(3).is_some() + { + let cap = c.unwrap(); + let o = cap.get(1).unwrap().as_str(); + let c = cap.get(2).unwrap().as_str(); + let k = cap.get(3).unwrap().as_str(); + let repo_id = decode_key(o)?; + let commit_id = decode_digest(c)?; + let commit_key = decode_sym_key(k)?; + return Ok(Self { + identity: None, + target: NuriTargetV0::Repo(repo_id), + entire_store: false, + objects: vec![], + signature: None, + branch: Some(TargetBranchV0::Commits(vec![commit_id])), + overlay: None, + access: vec![NgAccessV0::Key(commit_key)], + topic: None, + locator: None, + }); + } + Err(NgError::InvalidNuri) + } + + pub fn from_repo_nuri_to_id(from: &String) -> Result { + let c = RE_REPO_O.captures(from); + + if c.is_some() && c.as_ref().unwrap().get(1).is_some() { + let cap = c.unwrap(); + let o = cap.get(1).unwrap().as_str(); + + let repo_id = decode_key(o)?; + return Ok(repo_id); + } + Err(NgError::InvalidNuri) + } + + pub fn new_from(from: &String) -> Result { + let c = RE_REPO_O.captures(from); + + if c.is_some() && c.as_ref().unwrap().get(1).is_some() { + let cap = c.unwrap(); + let o = cap.get(1).unwrap().as_str(); + + let repo_id = decode_key(o)?; + Ok(Self { + identity: None, + target: NuriTargetV0::Repo(repo_id), + entire_store: false, + objects: vec![], + signature: None, + branch: None, + overlay: None, + access: vec![], + topic: None, + locator: None, + }) + } else { + let c = RE_FILE_READ_CAP.captures(from); + if c.is_some() + && c.as_ref().unwrap().get(1).is_some() + && c.as_ref().unwrap().get(2).is_some() + { + let cap = c.unwrap(); + let j = cap.get(1).unwrap().as_str(); + let k = cap.get(2).unwrap().as_str(); + let id = decode_digest(j)?; + let key = decode_sym_key(k)?; + Ok(Self { + identity: None, + target: NuriTargetV0::None, + entire_store: false, + objects: vec![ObjectRef::from_id_key(id, key)], + signature: None, + branch: None, + overlay: None, + access: vec![], + topic: None, + locator: None, + }) + } else { + + if let Ok(n) = NuriV0::new_from_repo_graph(from) { + Ok(n) + } else { + let c = RE_BRANCH.captures(from); + + if c.is_some() + && c.as_ref().unwrap().get(1).is_some() + && c.as_ref().unwrap().get(2).is_some() + && c.as_ref().unwrap().get(3).is_some() + { + let cap = c.unwrap(); + let o = cap.get(1).unwrap().as_str(); + let v = cap.get(2).unwrap().as_str(); + let b = cap.get(3).unwrap().as_str(); + let repo_id = decode_key(o)?; + let overlay_id = decode_overlayid(v)?; + let branch_id = decode_key(b)?; + Ok(Self { + identity: None, + target: NuriTargetV0::Repo(repo_id), + entire_store: false, + objects: vec![], + signature: None, + branch: Some(TargetBranchV0::BranchId(branch_id)), + overlay: Some(overlay_id.into()), + access: vec![], + topic: None, + locator: None, + }) + } else { + Err(NgError::InvalidNuri) + } + } + } + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppRequestCommandV0 { + Fetch(AppFetchContentV0), + Pin, + UnPin, + Delete, + Create, + FileGet, // needs the Nuri of branch/doc/store AND ObjectId + FilePut, // needs the Nuri of branch/doc/store + Header, + InboxPost, + SocialQueryStart, + SocialQueryCancel, + QrCodeProfile, + QrCodeProfileImport, +} + +impl AppRequestCommandV0 { + pub fn is_stream(&self) -> bool { + match self { + Self::Fetch(AppFetchContentV0::Subscribe) | Self::FileGet => true, + _ => false, + } + } + pub fn new_read_query() -> Self { + AppRequestCommandV0::Fetch(AppFetchContentV0::ReadQuery) + } + pub fn new_write_query() -> Self { + AppRequestCommandV0::Fetch(AppFetchContentV0::WriteQuery) + } + pub fn new_update() -> Self { + AppRequestCommandV0::Fetch(AppFetchContentV0::Update) + } + pub fn new_rdf_dump() -> Self { + AppRequestCommandV0::Fetch(AppFetchContentV0::RdfDump) + } + pub fn new_history() -> Self { + AppRequestCommandV0::Fetch(AppFetchContentV0::History) + } + pub fn new_signature_status() -> Self { + AppRequestCommandV0::Fetch(AppFetchContentV0::SignatureStatus) + } + pub fn new_signature_request() -> Self { + AppRequestCommandV0::Fetch(AppFetchContentV0::SignatureRequest) + } + pub fn new_signed_snapshot_request() -> Self { + AppRequestCommandV0::Fetch(AppFetchContentV0::SignedSnapshotRequest) + } + pub fn new_create() -> Self { + AppRequestCommandV0::Create + } + pub fn new_header() -> Self { + AppRequestCommandV0::Header + } + pub fn new_qrcode_for_profile() -> Self { + AppRequestCommandV0::QrCodeProfile + } + pub fn new_qrcode_profile_import() -> Self { + AppRequestCommandV0::QrCodeProfileImport + } + pub fn new_fetch_header() -> Self { + AppRequestCommandV0::Fetch(AppFetchContentV0::Header) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppRequestV0 { + pub command: AppRequestCommandV0, + + pub nuri: NuriV0, + + pub payload: Option, + + pub session_id: u64, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppRequest { + V0(AppRequestV0), +} + +impl AppRequest { + pub fn set_session_id(&mut self, session_id: u64) { + match self { + Self::V0(v0) => v0.session_id = session_id, + } + } + pub fn session_id(&self) -> u64 { + match self { + Self::V0(v0) => v0.session_id, + } + } + pub fn command(&self) -> &AppRequestCommandV0 { + match self { + Self::V0(v0) => &v0.command, + } + } + pub fn new( + command: AppRequestCommandV0, + nuri: NuriV0, + payload: Option, + ) -> Self { + AppRequest::V0(AppRequestV0 { + command, + nuri, + payload, + session_id: 0, + }) + } + + pub fn inbox_post(post: InboxPost) -> Self { + AppRequest::new( + AppRequestCommandV0::InboxPost, + NuriV0::new_empty(), + Some(AppRequestPayload::V0(AppRequestPayloadV0::InboxPost(post))), + ) + } + + pub fn social_query_start( + from_profile: NuriV0, + query: NuriV0, + contacts: String, + degree: u16, + ) -> Self { + AppRequest::new( + AppRequestCommandV0::SocialQueryStart, + query, + Some(AppRequestPayload::V0(AppRequestPayloadV0::SocialQueryStart{ + from_profile, + contacts, + degree + })), + ) + } + + pub fn social_query_cancel( + query: NuriV0, + ) -> Self { + AppRequest::new( + AppRequestCommandV0::SocialQueryCancel, + query, + None + ) + } + + pub fn doc_fetch_repo_subscribe(repo_o: String) -> Result { + Ok(AppRequest::new( + AppRequestCommandV0::Fetch(AppFetchContentV0::get_or_subscribe(true)), + NuriV0::new_from(&repo_o)?, + None, + )) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppSessionStopV0 { + pub session_id: u64, + pub force_close: bool, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppSessionStop { + V0(AppSessionStopV0), +} +impl AppSessionStop { + pub fn session_id(&self) -> u64 { + match self { + Self::V0(v0) => v0.session_id, + } + } + pub fn is_force_close(&self) -> bool { + match self { + Self::V0(v0) => v0.force_close, + } + } +} +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppSessionStartV0 { + pub session_id: u64, + + pub credentials: Option, + + pub user_id: UserId, + + pub detach: bool, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppSessionStart { + V0(AppSessionStartV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppSessionStartResponseV0 { + pub private_store: RepoId, + pub protected_store: RepoId, + pub public_store: RepoId, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppSessionStartResponse { + V0(AppSessionStartResponseV0), +} + +impl AppSessionStart { + pub fn session_id(&self) -> u64 { + match self { + Self::V0(v0) => v0.session_id, + } + } + pub fn credentials(&self) -> &Option { + match self { + Self::V0(v0) => &v0.credentials, + } + } + pub fn user_id(&self) -> &UserId { + match self { + Self::V0(v0) => &v0.user_id, + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum DocQuery { + V0 { + sparql: String, + base: Option, + }, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct GraphUpdate { + // serialization of Vec + #[serde(with = "serde_bytes")] + pub inserts: Vec, + // serialization of Vec + #[serde(with = "serde_bytes")] + pub removes: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum DiscreteUpdate { + /// A yrs::Update + #[serde(with = "serde_bytes")] + YMap(Vec), + #[serde(with = "serde_bytes")] + YArray(Vec), + #[serde(with = "serde_bytes")] + YXml(Vec), + #[serde(with = "serde_bytes")] + YText(Vec), + /// An automerge::Change.raw_bytes() + #[serde(with = "serde_bytes")] + Automerge(Vec), +} + +impl DiscreteUpdate { + pub fn from(crdt: String, update: Vec) -> Self { + match crdt.as_str() { + "YMap" => Self::YMap(update), + "YArray" => Self::YArray(update), + "YXml" => Self::YXml(update), + "YText" => Self::YText(update), + "Automerge" => Self::Automerge(update), + _ => panic!("wrong crdt type"), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct DocUpdate { + pub heads: Vec, + pub graph: Option, + pub discrete: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct DocAddFile { + pub filename: Option, + pub object: ObjectRef, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct DocHeader { + pub title: Option, + pub about: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum DocCreateDestination { + Store, + Stream, + MagicCarpet, +} + +impl DocCreateDestination { + pub fn from(s: String) -> Result { + Ok(match s.as_str() { + "store" => Self::Store, + "stream" => Self::Stream, + "mc" => Self::MagicCarpet, + _ => return Err(NgError::InvalidArgument), + }) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct DocCreate { + pub class: BranchCrdt, + pub destination: DocCreateDestination, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct DocDelete { + /// Nuri of doc to delete + nuri: String, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppRequestPayloadV0 { + Create(DocCreate), + Query(DocQuery), + Update(DocUpdate), + AddFile(DocAddFile), + + Delete(DocDelete), + + SmallFilePut(SmallFile), + RandomAccessFilePut(String), // content_type (iana media type) + RandomAccessFilePutChunk((u32, serde_bytes::ByteBuf)), // end the upload with an empty vec + + Header(DocHeader), + + InboxPost(InboxPost), + SocialQueryStart { + from_profile: NuriV0, + contacts: String, + degree: u16, + }, + //RemoveFile + //Invoke(InvokeArguments), + QrCodeProfile(u32), + QrCodeProfileImport(String), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppRequestPayload { + V0(AppRequestPayloadV0), +} + +impl AppRequestPayload { + pub fn new_sparql_query(sparql: String, base: Option) -> Self { + AppRequestPayload::V0(AppRequestPayloadV0::Query(DocQuery::V0 { sparql, base })) + } + pub fn new_header(title: Option, about: Option) -> Self { + AppRequestPayload::V0(AppRequestPayloadV0::Header(DocHeader { title, about })) + } + pub fn new_discrete_update( + head_strings: Vec, + crdt: String, + update: Vec, + ) -> Result { + let mut heads = Vec::with_capacity(head_strings.len()); + for head in head_strings { + heads.push(decode_digest(&head)?); + } + let discrete = Some(DiscreteUpdate::from(crdt, update)); + Ok(AppRequestPayload::V0(AppRequestPayloadV0::Update( + DocUpdate { + heads, + graph: None, + discrete, + }, + ))) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum DiscretePatch { + /// A yrs::Update + #[serde(with = "serde_bytes")] + YMap(Vec), + #[serde(with = "serde_bytes")] + YArray(Vec), + #[serde(with = "serde_bytes")] + YXml(Vec), + #[serde(with = "serde_bytes")] + YText(Vec), + /// An automerge::Change.raw_bytes() or a concatenation of several. + #[serde(with = "serde_bytes")] + Automerge(Vec), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct GraphPatch { + // serialization of Vec + #[serde(with = "serde_bytes")] + pub inserts: Vec, + // serialization of Vec + #[serde(with = "serde_bytes")] + pub removes: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum DiscreteState { + /// A yrs::Update + #[serde(with = "serde_bytes")] + YMap(Vec), + #[serde(with = "serde_bytes")] + YArray(Vec), + #[serde(with = "serde_bytes")] + YXml(Vec), + #[serde(with = "serde_bytes")] + YText(Vec), + // the output of Automerge::save() + #[serde(with = "serde_bytes")] + Automerge(Vec), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct GraphState { + // serialization of Vec + #[serde(with = "serde_bytes")] + pub triples: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppState { + pub heads: Vec, + pub head_keys: Vec, + pub graph: Option, // there is always a graph present in the branch. but it might not have been asked in the request + pub discrete: Option, + pub files: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppHistory { + pub history: Vec<(ObjectId, CommitInfo)>, + pub swimlane_state: Vec>, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppHistoryJs { + pub history: Vec<(String, CommitInfoJs)>, + pub swimlane_state: Vec>, +} + +impl AppHistory { + pub fn to_js(&self) -> AppHistoryJs { + AppHistoryJs { + history: Vec::from_iter( + self.history + .iter() + .map(|(id, info)| (id.to_string(), info.into())), + ), + swimlane_state: Vec::from_iter( + self.swimlane_state + .iter() + .map(|lane| lane.map_or(None, |b| Some(b.to_string()))), + ), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum OtherPatch { + FileAdd(FileName), + FileRemove(ObjectId), + AsyncSignature((String, Vec)), + Snapshot(ObjectRef), + Compact(ObjectRef), + Other, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppPatch { + pub commit_id: String, + pub commit_info: CommitInfoJs, + // or graph, or discrete, or both, or other. + pub graph: Option, + pub discrete: Option, + pub other: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct FileName { + pub name: Option, + pub reference: ObjectRef, + pub nuri: String, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct FileMetaV0 { + pub content_type: String, + pub size: u64, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppTabStoreInfo { + pub repo: Option, //+ + pub overlay: Option, //+ + pub has_outer: Option, + pub store_type: Option, //+ + pub readcap: Option, + pub is_member: Option, + pub inner: Option, + pub title: Option, + pub icon: Option, + pub description: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppTabDocInfo { + pub nuri: Option, //+ + pub is_store: Option, //+ + pub is_member: Option, //+ + pub title: Option, + pub icon: Option, + pub description: Option, + pub authors: Option>, + pub inbox: Option, + pub can_edit: Option, //+ + //TODO stream + //TODO live_editors + //TODO branches +} + +impl AppTabDocInfo { + pub fn new() -> Self { + AppTabDocInfo { + nuri: None, + is_store: None, + is_member: None, + title: None, + icon: None, + description: None, + authors: None, + inbox: None, + can_edit: None, + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppTabBranchInfo { + pub id: Option, //+ + pub readcap: Option, //+ + pub comment_branch: Option, + pub class: Option, //+ +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppTabInfo { + pub branch: Option, + pub doc: Option, + pub store: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppHeader { + pub about: Option, + pub title: Option, + pub class: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppResponseV0 { + SessionStart(AppSessionStartResponse), + TabInfo(AppTabInfo), + State(AppState), + Patch(AppPatch), + History(AppHistory), + SignatureStatus(Vec<(String, Option, bool)>), + Text(String), + //File(FileName), + FileUploading(u32), + FileUploaded(ObjectRef), + #[serde(with = "serde_bytes")] + FileBinary(Vec), + FileMeta(FileMetaV0), + #[serde(with = "serde_bytes")] + QueryResult(Vec), // a serialized [SPARQL Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) + #[serde(with = "serde_bytes")] + Graph(Vec), // a serde serialization of a list of triples. can be transformed on the client side to RDF-JS data model, or JSON-LD, or else (Turtle,...) http://rdf.js.org/data-model-spec/ + Ok, + True, + False, + Error(String), + EndOfStream, + Nuri(String), + Header(AppHeader), + Commits(Vec), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppResponse { + V0(AppResponseV0), +} + +impl AppResponse { + pub fn error(err: String) -> Self { + AppResponse::V0(AppResponseV0::Error(err)) + } + pub fn ok() -> Self { + AppResponse::V0(AppResponseV0::Ok) + } + pub fn commits(commits: Vec) -> Self { + AppResponse::V0(AppResponseV0::Commits(commits)) + } + pub fn text(text: String) -> Self { + AppResponse::V0(AppResponseV0::Text(text)) + } +} diff --git a/ng-net/src/broker.rs b/ng-net/src/broker.rs new file mode 100644 index 0000000..5469a40 --- /dev/null +++ b/ng-net/src/broker.rs @@ -0,0 +1,1319 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +//! Broker singleton present in every instance of NextGraph (Client, Server, Core node) + +use std::collections::HashMap; +#[cfg(not(target_arch = "wasm32"))] +use std::collections::HashSet; + +use async_std::stream::StreamExt; +#[cfg(not(target_arch = "wasm32"))] +use async_std::sync::Mutex; +use async_std::sync::{Arc, RwLock}; +use either::Either; +use futures::channel::mpsc; +use futures::channel::mpsc::UnboundedSender; +use futures::SinkExt; +use once_cell::sync::Lazy; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::*; + +use crate::actor::SoS; +use crate::connection::*; +use crate::server_broker::IServerBroker; +use crate::types::*; +use crate::utils::spawn_and_log_error; +use crate::utils::{Receiver, ResultSend, Sender}; + +#[doc(hidden)] +#[derive(Debug, Clone)] +pub enum ClientPeerId { + Local((UserId, DirectPeerId)), + Remote(DirectPeerId), +} + +impl ClientPeerId { + pub fn key(&self) -> &DirectPeerId { + match self { + Self::Remote(dpi) => dpi, + Self::Local((_user, dpi)) => dpi, + } + } + pub fn value(&self) -> Option { + match self { + Self::Remote(_) => None, + Self::Local((user, _)) => Some(*user), + } + } + pub fn new_from(peer: &DirectPeerId, local_user: &Option) -> Self { + match local_user { + Some(user) => ClientPeerId::Local((*user, *peer)), + None => ClientPeerId::Remote(*peer), + } + } +} + +#[derive(Debug)] +enum PeerConnection { + Core(BindAddress), + Client(ConnectionBase), + Local(LocalTransport), + NONE, +} + +#[derive(Debug)] +struct LocalTransport { + #[allow(dead_code)] + client_peer_id: DirectPeerId, + client_cnx: ConnectionBase, + server_cnx: ConnectionBase, +} + +impl LocalTransport { + async fn close(&mut self) { + self.client_cnx.close().await; + self.server_cnx.close().await; + } +} + +#[derive(Debug)] +struct BrokerPeerInfo { + #[allow(dead_code)] + last_peer_advert: Option, //FIXME: remove Option + connected: PeerConnection, +} + +#[derive(Debug)] +#[allow(dead_code)] +struct DirectConnection { + addr: BindAddress, + remote_peer_id: X25519PrivKey, + tp: TransportProtocol, + //dir: ConnectionDir, + cnx: ConnectionBase, +} + +#[derive(Debug)] +pub struct ServerConfig { + pub overlays_configs: Vec, + pub registration: RegistrationConfig, + pub admin_user: Option, + pub peer_id: PubKey, + // when creating invitation links, an optional url to redirect the user to can be used, for accepting ToS and making payment, if any. + pub registration_url: Option, + pub bootstrap: BootstrapContent, +} + +pub enum LocalBrokerMessage { + Deliver { + event: Event, + overlay: OverlayId, + user: UserId, + }, + Disconnected { + user_id: UserId, + }, + Inbox{ + user_id: UserId, + msg: InboxMsg, + from_queue: bool, + }, +} + +pub static BROKER: Lazy>> = Lazy::new(|| Arc::new(RwLock::new(Broker::new()))); + +pub struct Broker { + direct_connections: HashMap, + /// tuple of optional userId and peer key in montgomery form. userId is always None on the server side (except for local transport). + peers: HashMap<(Option, Option), BrokerPeerInfo>, + /// (local,remote) -> ConnectionBase + anonymous_connections: HashMap<(BindAddress, BindAddress), ConnectionBase>, + + config: Option, + shutdown: Option>, + shutdown_sender: Sender, + closing: bool, + server_broker: Option>>, + + //local_broker: Option>, + local_broker: Option>, + + #[cfg(not(target_arch = "wasm32"))] + listeners: HashMap, + #[cfg(not(target_arch = "wasm32"))] + bind_addresses: HashMap, + #[cfg(not(target_arch = "wasm32"))] + users_peers: HashMap>>, +} + +impl Broker { + // pub fn init_local_broker( + // &mut self, + // base_path: Option, + // in_memory: bool, + // ) -> Result<(), NgError> { + // if in_memory && base_path.is_some() { + // return Err(NgError::InvalidArgument); + // } + // self.base_path = base_path; + // self.in_memory = in_memory; + // Ok(()) + // } + + // pub fn register_last_seq_function(&mut self, function: Box) { + // if self.last_seq_function.is_none() { + // self.last_seq_function = Some(function); + // } + // } + + pub fn get_server_peer_id(&self) -> DirectPeerId { + self.config.as_ref().unwrap().peer_id + } + + pub(crate) fn get_config(&self) -> Option<&ServerConfig> { + self.config.as_ref() + } + + pub(crate) fn get_registration_url(&self) -> Option<&String> { + self.config + .as_ref() + .and_then(|c| c.registration_url.as_ref()) + } + + pub(crate) fn get_bootstrap(&self) -> Result<&BootstrapContent, ProtocolError> { + self.config + .as_ref() + .map(|c| &c.bootstrap) + .ok_or(ProtocolError::BrokerError) + } + + #[doc(hidden)] + pub fn set_server_broker(&mut self, broker: impl IServerBroker + 'static) { + //log_debug!("set_server_broker"); + self.server_broker = Some(Arc::new(RwLock::new(broker))); + } + + #[doc(hidden)] + pub fn set_local_broker(&mut self, pump: UnboundedSender) { + //log_debug!("set_local_broker"); + self.local_broker = Some(pump); + } + + pub fn set_server_config(&mut self, config: ServerConfig) { + self.config = Some(config); + } + + #[cfg(not(target_arch = "wasm32"))] + pub fn set_listeners( + &mut self, + listeners: HashMap, + ) -> (HashMap, HashMap) { + for entry in listeners.iter() { + for ba in entry.1.addrs.iter() { + self.bind_addresses.insert(ba.clone(), entry.0.clone()); + } + } + self.listeners.extend(listeners); + let mut copy_listeners: HashMap = HashMap::new(); + let mut copy_bind_addresses: HashMap = HashMap::new(); + copy_listeners.clone_from(&self.listeners); + copy_bind_addresses.clone_from(&self.bind_addresses); + (copy_listeners, copy_bind_addresses) + } + + #[doc(hidden)] + pub fn get_server_broker( + &self, + ) -> Result>, ProtocolError> { + //log_debug!("GET STORAGE {:?}", self.server_storage); + Ok(Arc::clone( + self.server_broker + .as_ref() + .ok_or(ProtocolError::BrokerError)?, + )) + } + + // pub(crate) fn get_server_broker_mut( + // &mut self, + // ) -> Result<&mut Box, ProtocolError> { + // //log_debug!("GET STORAGE {:?}", self.server_storage); + // self.server_broker + // .as_mut() + // .ok_or(ProtocolError::BrokerError) + // } + + //Option>>, + pub(crate) fn get_local_broker( + &self, + ) -> Result, ProtocolError> { + Ok(self + .local_broker + .as_ref() + .ok_or(ProtocolError::NoLocalBrokerFound)? + .clone()) + } + + #[cfg(not(target_arch = "wasm32"))] + pub(crate) async fn authorize( + &self, + bind_addresses: &(BindAddress, BindAddress), + auth: Authorization, + ) -> Result<(), ProtocolError> { + let listener_id = self + .bind_addresses + .get(&bind_addresses.0) + .ok_or(ProtocolError::BrokerError)?; + let listener = self + .listeners + .get(listener_id) + .ok_or(ProtocolError::BrokerError)?; + match auth { + Authorization::Discover => { + if listener.config.discoverable + && bind_addresses.1.ip.is_private() + && listener.config.accept_forward_for.is_no() + { + Ok(()) + } else { + Err(ProtocolError::AccessDenied) + } + } + Authorization::ExtMessage => Err(ProtocolError::AccessDenied), + Authorization::Client(user_and_registration) => { + if user_and_registration.1.is_some() { + // user wants to register + let lock = self.get_server_broker()?; + { + let storage = lock.read().await; + if storage.get_user(user_and_registration.0).is_ok() { + return Ok(()); + } + } + { + let mut storage = lock.write().await; + if storage.has_no_user()? { + let code = user_and_registration.1.unwrap().unwrap(); + let inv_type = storage.get_invitation_type(code)?; + if inv_type == 3u8 { + // it is a setup invite + // TODO send (return here) master_key to client (so they can save it in their wallet) + let _master_key = storage.take_master_key()?; + // TODO save remote_boot (in server.path) + storage.add_user(user_and_registration.0, true)?; + storage.remove_invitation(code)?; + return Ok(()); + } + return Err(ProtocolError::InvalidState); + } + } + let storage = lock.read().await; + if let Some(ServerConfig { + registration: reg, .. + }) = &self.config + { + return match reg { + RegistrationConfig::Closed => return Err(ProtocolError::AccessDenied), + RegistrationConfig::Invitation => { + // registration is only possible with an invitation code + if user_and_registration.1.unwrap().is_none() { + Err(ProtocolError::InvitationRequired) + } else { + let code = user_and_registration.1.unwrap().unwrap(); + let inv_type = storage.get_invitation_type(code)?; + storage.add_user(user_and_registration.0, inv_type == 2u8)?; + storage.remove_invitation(code)?; + Ok(()) + } + } + RegistrationConfig::Open => { + // registration is open (no need for invitation. anybody can register) + let mut is_admin = false; + if user_and_registration.1.unwrap().is_some() { + // but if there is an invitation code and it says the user should be admin, then we take that into account + let code = user_and_registration.1.unwrap().unwrap(); + let inv_type = storage.get_invitation_type(code)?; + if inv_type == 2u8 { + // admin + is_admin = true; + storage.remove_invitation(code)?; + } else if inv_type == 1u8 { + storage.remove_invitation(code)?; + } + } + storage.add_user(user_and_registration.0, is_admin)?; + Ok(()) + } + }; + } else { + return Err(ProtocolError::BrokerError); + } + } + // if user doesn't want to register, we accept everything, as perms will be checked later on, once the overlayId is known + Ok(()) + } + Authorization::Core => Err(ProtocolError::AccessDenied), + Authorization::Admin(admin_user) => { + if listener.config.accepts_client() { + if let Some(ServerConfig { + admin_user: Some(admin), + .. + }) = self.config + { + if admin == admin_user { + return Ok(()); + } + } + let found = self.get_server_broker()?.read().await.get_user(admin_user); + if found.is_ok() && found.unwrap() { + return Ok(()); + } + } + Err(ProtocolError::AccessDenied) + } + Authorization::OverlayJoin(_) => Err(ProtocolError::AccessDenied), + } + } + + fn reconnecting(&mut self, peer_id: X25519PrivKey, user: Option) { + let peerinfo = self.peers.get_mut(&(user, Some(peer_id))); + match peerinfo { + Some(info) => match &info.connected { + PeerConnection::NONE => {} + PeerConnection::Client(_cb) => { + info.connected = PeerConnection::NONE; + } + PeerConnection::Core(ip) => { + self.direct_connections.remove(&ip); + info.connected = PeerConnection::NONE; + } + PeerConnection::Local(_) => { + panic!("local transport connections cannot disconnect. shouldn't reconnect") + } + }, + None => {} + } + } + + async fn remove_peer_id(&mut self, peer_id: X25519PrivKey, user: Option) { + self.remove_peer_id_(Some(peer_id), user).await + } + + #[allow(dead_code)] + async fn remove_local_transport(&mut self, user: PubKey) { + self.remove_peer_id_(None, Some(user)).await + } + + async fn remove_peer_id_(&mut self, peer_id: Option, user: Option) { + let removed = self.peers.remove(&(user, peer_id)); + match removed { + Some(info) => match info.connected { + PeerConnection::NONE => {} + PeerConnection::Client(mut _cb) => { + #[cfg(not(target_arch = "wasm32"))] + if user.is_none() { + _cb.release_shutdown(); + // server side + if let Some(fsm) = _cb.fsm { + if let Ok(user) = fsm.lock().await.user_id() { + let _ = self + .remove_user_peer(&user, &Some(peer_id.to_owned().unwrap())); + } + } + let peer = PubKey::X25519PubKey(peer_id.unwrap()); + log_debug!("unsubscribing peer {}", peer); + self.get_server_broker() + .unwrap() + .read() + .await + .remove_all_subscriptions_of_client(&ClientPeerId::new_from( + &peer, &user, + )) + .await; + } + } + PeerConnection::Core(ip) => { + self.direct_connections.remove(&ip); + } + PeerConnection::Local(_lt) => { + #[cfg(not(target_arch = "wasm32"))] + if peer_id.is_none() && user.is_some() { + // server side + let _ = self.remove_user_peer(user.as_ref().unwrap(), &None); + + log_debug!("unsubscribing local peer {}", _lt.client_peer_id); + self.get_server_broker() + .unwrap() + .read() + .await + .remove_all_subscriptions_of_client(&ClientPeerId::new_from( + &_lt.client_peer_id, + &user, + )) + .await; + } + } + }, + None => {} + } + } + + #[cfg(not(target_arch = "wasm32"))] + fn remove_anonymous( + &mut self, + remote_bind_address: BindAddress, + local_bind_address: BindAddress, + ) { + let removed = self + .anonymous_connections + .remove(&(local_bind_address, remote_bind_address)); + if removed.is_some() { + removed.unwrap().release_shutdown(); + } + } + + // #[cfg(not(target_arch = "wasm32"))] + // pub fn test_storage(&self, path: PathBuf) { + // use ng_storage_rocksdb::kcv_store::RocksDbKCVStorage; + + // let key: [u8; 32] = [0; 32]; + // let test_storage = RocksDbKCVStorage::open(&path, key); + // match test_storage { + // Err(e) => { + // log_debug!("storage error {}", e); + // } + // Ok(_) => { + // log_debug!("storage ok"); + // } + // } + // } + + fn new() -> Self { + let (shutdown_sender, shutdown_receiver) = mpsc::unbounded::(); + let mut random_buf = [0u8; 4]; + getrandom::fill(&mut random_buf).unwrap(); + + Broker { + anonymous_connections: HashMap::new(), + config: None, + shutdown: Some(shutdown_receiver), + shutdown_sender, + direct_connections: HashMap::new(), + peers: HashMap::new(), + closing: false, + server_broker: None, + local_broker: None, + + #[cfg(not(target_arch = "wasm32"))] + listeners: HashMap::new(), + #[cfg(not(target_arch = "wasm32"))] + bind_addresses: HashMap::new(), + #[cfg(not(target_arch = "wasm32"))] + users_peers: HashMap::new(), + } + } + + fn take_shutdown(&mut self) -> Result, ProtocolError> { + self.shutdown.take().ok_or(ProtocolError::BrokerError) + } + + pub async fn join_shutdown() -> Result<(), ProtocolError> { + let mut shutdown_join: Receiver; + { + shutdown_join = BROKER.write().await.take_shutdown()?; + } + match shutdown_join.next().await { + Some(ProtocolError::Closing) => Ok(()), + Some(error) => Err(error), + None => Ok(()), + } + } + + /// Used in tests mostly + pub async fn join_shutdown_with_timeout( + timeout: std::time::Duration, + ) -> Result<(), ProtocolError> { + async fn timer_shutdown(timeout: std::time::Duration) -> ResultSend<()> { + async move { + sleep!(timeout); + log_debug!("timeout for shutdown"); + let _ = BROKER + .write() + .await + .shutdown_sender + .send(ProtocolError::Timeout) + .await; + } + .await; + Ok(()) + } + spawn_and_log_error(timer_shutdown(timeout)); + Broker::join_shutdown().await + } + + pub async fn graceful_shutdown() { + let peer_ids; + let anonymous; + { + let mut broker = BROKER.write().await; + if broker.closing { + return; + } + broker.closing = true; + peer_ids = Vec::from_iter(broker.peers.keys().cloned()); + anonymous = Vec::from_iter(broker.anonymous_connections.keys().cloned()); + } + for peer_id in peer_ids { + BROKER + .write() + .await + .close_peer_connection_x(peer_id.1, peer_id.0) + .await; + } + for anon in anonymous { + BROKER.write().await.close_anonymous(anon.1, anon.0).await; + } + let _ = BROKER + .write() + .await + .shutdown_sender + .send(ProtocolError::Closing) + .await; + } + + pub async fn close_all_connections() { + let peer_ids; + let anonymous; + { + let broker = BROKER.write().await; + if broker.closing { + return; + } + peer_ids = Vec::from_iter(broker.peers.keys().cloned()); + anonymous = Vec::from_iter(broker.anonymous_connections.keys().cloned()); + } + for peer_id in peer_ids { + if peer_id.1.is_some() { + BROKER + .write() + .await + .close_peer_connection_x(peer_id.1, peer_id.0) + .await; + } + } + for anon in anonymous { + BROKER.write().await.close_anonymous(anon.1, anon.0).await; + } + } + + #[allow(dead_code)] + #[cfg(not(target_arch = "wasm32"))] + async fn shutdown(&mut self) { + if self.closing { + return; + } + self.closing = true; + + let _ = self.shutdown_sender.send(ProtocolError::Closing).await; + } + + #[doc(hidden)] + #[cfg(not(target_arch = "wasm32"))] + pub async fn accept( + &mut self, + mut connection: ConnectionBase, + remote_bind_address: BindAddress, + local_bind_address: BindAddress, + ) -> Result<(), NetError> { + if self.closing { + return Err(NetError::Closing); + } + + let join: mpsc::UnboundedReceiver> = + connection.take_shutdown(); + if self + .anonymous_connections + .insert((local_bind_address, remote_bind_address), connection) + .is_some() + { + log_err!( + "internal error. duplicate connection {:?} {:?}", + local_bind_address, + remote_bind_address + ); + } + + async fn watch_close( + mut join: Receiver>, + remote_bind_address: BindAddress, + local_bind_address: BindAddress, + ) -> ResultSend<()> { + async move { + let res = join.next().await; + match res { + Some(Either::Right(remote_peer_id)) => { + let _res = join.next().await; + + // if res.is_some() + // && res.as_ref().unwrap().as_ref().unwrap_left() == &NetError::Closing + // { + // return; + // } + log_debug!("SOCKET IS CLOSED {:?} peer_id: {:?}", res, remote_peer_id); + BROKER + .write() + .await + .remove_peer_id(remote_peer_id, None) + .await; + } + _ => { + log_debug!( + "SOCKET IS CLOSED {:?} remote: {:?} local: {:?}", + res, + remote_bind_address, + local_bind_address + ); + BROKER + .write() + .await + .remove_anonymous(remote_bind_address, local_bind_address); + } + } + } + .await; + Ok(()) + } + spawn_and_log_error(watch_close(join, remote_bind_address, local_bind_address)); + + Ok(()) + } + + #[cfg(not(target_arch = "wasm32"))] + fn add_user_peer( + &mut self, + user: UserId, + peer: Option, + ) -> Result<(), ProtocolError> { + let peers_set = self + .users_peers + .entry(user) + .or_insert(HashSet::with_capacity(1)); + + if !peers_set.insert(peer) { + //return Err(ProtocolError::PeerAlreadyConnected); + } + Ok(()) + } + + #[cfg(not(target_arch = "wasm32"))] + fn remove_user_peer( + &mut self, + user: &UserId, + peer: &Option, + ) -> Result<(), ProtocolError> { + let peers_set = self + .users_peers + .get_mut(user) + .ok_or(ProtocolError::UserNotConnected)?; + + if !peers_set.remove(peer) { + return Err(ProtocolError::PeerNotConnected); + } + if peers_set.is_empty() { + let _ = self.users_peers.remove(user); + } + Ok(()) + } + + #[cfg(not(target_arch = "wasm32"))] + pub(crate) async fn attach_and_authorize_app( + &mut self, + remote_bind_address: BindAddress, + local_bind_address: BindAddress, + remote_peer_id: X25519PrivKey, + user: &Option, + _info: &ClientInfo, + ) -> Result<(), ProtocolError> { + let already = self.peers.get(&(None, Some(remote_peer_id))); + if already.is_some() { + match already.unwrap().connected { + PeerConnection::NONE => {} + _ => { + return Err(ProtocolError::PeerAlreadyConnected); + } + }; + } + + //TODO: check permissions for user/remote_bind_address or headless if no user + + //TODO: keep the info + + let mut connection = self + .anonymous_connections + .remove(&(local_bind_address, remote_bind_address)) + .ok_or(ProtocolError::BrokerError)?; + + connection.reset_shutdown(remote_peer_id).await; + + if user.is_some() { + self.add_user_peer(user.unwrap(), Some(remote_peer_id))?; + } + + let connected = PeerConnection::Client(connection); + + let bpi = BrokerPeerInfo { + last_peer_advert: None, + connected, + }; + self.peers.insert((None, Some(remote_peer_id)), bpi); + + Ok(()) + } + + #[cfg(not(target_arch = "wasm32"))] + pub(crate) async fn attach_and_authorize_peer_id( + &mut self, + remote_bind_address: BindAddress, + local_bind_address: BindAddress, + remote_peer_id: X25519PrivKey, + // if client is None it means we are Core mode + client: Option, + fsm: &mut NoiseFSM, + ) -> Result<(), ProtocolError> { + log_debug!("ATTACH PEER_ID {:?}", remote_peer_id); + + let already = self.peers.remove(&(None, Some(remote_peer_id))); + if already.is_some() { + match already.unwrap().connected { + PeerConnection::NONE => {} + PeerConnection::Client(mut cnx) => { + cnx.close_silently().await; + } + _ => {} + }; + } + + // find the listener + let listener_id = self + .bind_addresses + .get(&local_bind_address) + .ok_or(ProtocolError::AccessDenied)?; + let listener = self + .listeners + .get(listener_id) + .ok_or(ProtocolError::AccessDenied)?; + + // authorize + let is_core = if client.is_none() { + // it is a Core connection + if !listener.config.is_core() { + return Err(ProtocolError::AccessDenied); + } + true + } else { + if !listener.config.accepts_client() { + return Err(ProtocolError::AccessDenied); + } + let client = client.as_ref().unwrap(); + self.authorize( + &(local_bind_address, remote_bind_address), + Authorization::Client((client.user.clone(), client.registration.clone())), + ) + .await?; + + // TODO add client to storage + false + }; + + let mut connection = self + .anonymous_connections + .remove(&(local_bind_address, remote_bind_address)) + .ok_or(ProtocolError::BrokerError)?; + + connection.reset_shutdown(remote_peer_id).await; + let connected = if !is_core { + let user = client.unwrap().user; + fsm.set_user_id(user); + self.add_user_peer(user, Some(remote_peer_id))?; + + PeerConnection::Client(connection) + } else { + let dc = DirectConnection { + addr: remote_bind_address, + remote_peer_id, + tp: connection.transport_protocol(), + cnx: connection, + }; + self.direct_connections.insert(remote_bind_address, dc); + PeerConnection::Core(remote_bind_address) + }; + let bpi = BrokerPeerInfo { + last_peer_advert: None, + connected, + }; + self.peers.insert((None, Some(remote_peer_id)), bpi); + + Ok(()) + } + + pub async fn probe( + &mut self, + cnx: Box, + ip: IP, + port: u16, + ) -> Result, ProtocolError> { + if self.closing { + return Err(ProtocolError::Closing); + } + cnx.probe(ip, port).await + } + + pub async fn admin< + A: Into + + Into + + std::fmt::Debug + + Sync + + Send + + 'static, + >( + &mut self, + cnx: Box, + peer_privk: PrivKey, + peer_pubk: PubKey, + remote_peer_id: DirectPeerId, + user: PubKey, + user_priv: PrivKey, + addr: BindAddress, + request: A, + ) -> Result { + let config = StartConfig::Admin(AdminConfig { + user, + user_priv, + addr, + request: request.into(), + }); + let remote_peer_id_dh = remote_peer_id.to_dh_from_ed(); + + let mut connection = cnx + .open( + config.get_url(), + peer_privk.clone(), + peer_pubk, + remote_peer_id_dh, + config.clone(), + ) + .await?; + + connection.admin::().await + } + + pub async fn ext< + A: Into + Into + std::fmt::Debug + Sync + Send + 'static, + B: TryFrom + std::fmt::Debug + Sync + Send + 'static, + >( + cnx: Box, + peer_privk: PrivKey, + peer_pubk: PubKey, + remote_peer_id: DirectPeerId, + url: String, + request: A, + ) -> Result { + let config = StartConfig::Ext(ExtConfig { + url, + request: request.into(), + }); + let remote_peer_id_dh = remote_peer_id.to_dh_from_ed(); + let mut connection = cnx + .open( + config.get_url(), + peer_privk.clone(), + peer_pubk, + remote_peer_id_dh, + config.clone(), + ) + .await?; + connection.ext::().await + } + + #[doc(hidden)] + pub fn connect_local(&mut self, peer_pubk: PubKey, user: UserId) -> Result<(), ProtocolError> { + if self.closing { + return Err(ProtocolError::Closing); + } + + let (client_cnx, server_cnx) = ConnectionBase::create_local_transport_pipe(user, peer_pubk); + + let bpi = BrokerPeerInfo { + last_peer_advert: None, + connected: PeerConnection::Local(LocalTransport { + client_peer_id: peer_pubk, + client_cnx, + server_cnx, + }), + }; + + self.peers.insert((Some(user), None), bpi); + Ok(()) + } + + pub async fn connect( + &mut self, + cnx: Arc>, + peer_privk: PrivKey, + peer_pubk: PubKey, + remote_peer_id: DirectPeerId, + config: StartConfig, + ) -> Result<(), ProtocolError> { + if self.closing { + return Err(ProtocolError::Closing); + } + + log_debug!("CONNECTING"); + let remote_peer_id_dh = remote_peer_id.to_dh_from_ed(); + + // checking if already connected + if config.is_keep_alive() { + let already = self + .peers + .get(&(config.get_user(), Some(*remote_peer_id_dh.slice()))); + if already.is_some() { + match already.unwrap().connected { + PeerConnection::NONE => {} + _ => { + return Err(ProtocolError::PeerAlreadyConnected); + } + }; + } + //TODO, if Core, check that IP is not in self.direct_connections + } + + let mut connection = cnx + .open( + config.get_url(), + peer_privk.clone(), + peer_pubk, + remote_peer_id_dh, + config.clone(), + ) + .await?; + + if !config.is_keep_alive() { + return Ok(()); + } + + let join = connection.take_shutdown(); + + let connected = match &config { + StartConfig::Core(config) => { + let dc = DirectConnection { + addr: config.addr, + remote_peer_id: *remote_peer_id_dh.slice(), + tp: connection.transport_protocol(), + cnx: connection, + }; + self.direct_connections.insert(config.addr, dc); + PeerConnection::Core(config.addr) + } + StartConfig::Client(_) | StartConfig::App(_) => PeerConnection::Client(connection), + _ => unimplemented!(), + }; + + let bpi = BrokerPeerInfo { + last_peer_advert: None, + connected, + }; + + self.peers + .insert((config.get_user(), Some(*remote_peer_id_dh.slice())), bpi); + + async fn watch_close( + mut join: Receiver>, + _cnx: Arc>, + _peer_privk: PrivKey, + _peer_pubkey: PubKey, + remote_peer_id: [u8; 32], + config: StartConfig, + mut local_broker: UnboundedSender, + ) -> ResultSend<()> { + async move { + let res = join.next().await; + log_info!("SOCKET IS CLOSED {:?} {:?}", res, remote_peer_id); + if res.is_some() + && res.as_ref().unwrap().is_left() + && res.unwrap().unwrap_left() != NetError::Closing + { + // we intend to reconnect + let mut broker = BROKER.write().await; + broker.reconnecting(remote_peer_id, config.get_user()); + // TODO: deal with cycle error https://users.rust-lang.org/t/recursive-async-method-causes-cycle-error/84628/5 + // there is async_recursion now. use that + // use a channel and send the reconnect job to it. + // create a spawned loop to read the channel and process the reconnection requests. + // let result = broker + // .connect(cnx, ip, core, peer_pubk, peer_privk, remote_peer_id) + // .await; + // log_debug!("SOCKET RECONNECTION {:?} {:?}", result, &remote_peer_id); + // TODO: deal with error and incremental backoff + + // TODO: incremental reconnections: after 5sec, +10sec, +20sec, +30sec + + // if all attempts fail : + if let Some(user) = config.get_user() { + let _ = local_broker + .send(LocalBrokerMessage::Disconnected { user_id: user }) + .await; + } + } else { + log_debug!("REMOVED"); + BROKER + .write() + .await + .remove_peer_id(remote_peer_id, config.get_user()) + .await; + } + } + .await; + Ok(()) + } + spawn_and_log_error(watch_close( + join, + cnx, + peer_privk, + peer_pubk, + *remote_peer_id_dh.slice(), + config, + self.get_local_broker()?, + )); + Ok(()) + } + + pub async fn send_client_event< + A: Into + std::fmt::Debug + Sync + Send + 'static, + >( + &self, + user: &Option, + remote_peer_id: &Option, // None means local + msg: A, + ) -> Result<(), NgError> { + let bpi = self + .peers + .get(&(*user, remote_peer_id.map(|rpi| rpi.to_dh_slice()))) + .ok_or(NgError::ConnectionNotFound)?; + match &bpi.connected { + PeerConnection::Client(cnx) => cnx.send_client_event(msg).await, + PeerConnection::Local(lt) => lt.client_cnx.send_client_event(msg).await, + _ => Err(NgError::BrokerError), + } + } + + pub async fn request< + A: Into + std::fmt::Debug + Sync + Send + 'static, + B: TryFrom + std::fmt::Debug + Sync + Send + 'static, + >( + &self, + user: &Option, + remote_peer_id: &Option, // None means local + msg: A, + ) -> Result, NgError> { + let bpi = self + .peers + .get(&(*user, remote_peer_id.map(|rpi| rpi.to_dh_slice()))) + .ok_or(NgError::ConnectionNotFound)?; + match &bpi.connected { + PeerConnection::Client(cnx) => cnx.request(msg).await, + PeerConnection::Local(lt) => lt.client_cnx.request(msg).await, + _ => Err(NgError::BrokerError), + } + } + + #[cfg(not(target_arch = "wasm32"))] + fn get_fsm_for_client(&self, client: &ClientPeerId) -> Option>> { + match client { + ClientPeerId::Local((user, _)) => { + if let Some(BrokerPeerInfo { + connected: + PeerConnection::Local(LocalTransport { + server_cnx: ConnectionBase { fsm: Some(fsm), .. }, + .. + }), + .. + }) = self.peers.get(&(Some(*user), None)) + { + Some(Arc::clone(fsm)) + } else { + None + } + } + ClientPeerId::Remote(peer) => { + if let Some(BrokerPeerInfo { + connected: PeerConnection::Client(ConnectionBase { fsm: Some(fsm), .. }), + .. + }) = self.peers.get(&(None, Some(peer.to_dh()))) + { + Some(Arc::clone(fsm)) + } else { + None + } + } + } + } + + #[cfg(not(target_arch = "wasm32"))] + pub(crate) async fn dispatch_event( + &self, + overlay: &OverlayId, + event: Event, + user_id: &UserId, + remote_peer: &PubKey, + ) -> Result, ServerError> { + // TODO: deal with subscriptions on the outer overlay. for now we assume everything is on the inner overlay + + let mut clients_to_remove = vec![]; + + let peers_for_local_dispatch = { + self.get_server_broker()? + .read() + .await + .dispatch_event(overlay, event.clone(), user_id, remote_peer) + .await? + }; + + for client in peers_for_local_dispatch { + log_debug!("dispatch_event peer {:?}", client); + if let Some(fsm) = self.get_fsm_for_client(&client) { + log_debug!("ForwardedEvent peer {:?}", client); + let _ = fsm + .lock() + .await + .send(ProtocolMessage::ClientMessage(ClientMessage::V0( + ClientMessageV0 { + overlay: *overlay, + padding: vec![], + content: ClientMessageContentV0::ForwardedEvent(event.clone()), + }, + ))) + .await; + } else { + // we remove the peer from all local_subscriptions + clients_to_remove.push(client); + } + } + + Ok(clients_to_remove) + } + + #[cfg(not(target_arch = "wasm32"))] + pub async fn dispatch_inbox_msg( + &self, + users: &HashSet, + msg: InboxMsg, + ) -> Result, ServerError> { + + for user in users.iter() { + if let Some(peers) = self.users_peers.get(user) { + for peer in peers.iter() { + if peer.is_some() { + if let Some(BrokerPeerInfo { + connected: PeerConnection::Client(ConnectionBase { fsm: Some(fsm), .. }), + .. + }) = self.peers.get(&(None, Some(peer.to_owned().unwrap()))) + { + //let fsm = Arc::clone(fsm); + let _ = fsm + .lock() + .await + .send(ProtocolMessage::ClientMessage(ClientMessage::V0( + ClientMessageV0 { + overlay: msg.body.to_overlay.clone(), + padding: vec![], + content: ClientMessageContentV0::InboxReceive{msg, from_queue: false}, + }, + ))) + .await; + return Ok(None); + } + } + } + } + } + Ok(Some(msg)) + } + + #[doc(hidden)] + pub async fn close_peer_connection_x( + &mut self, + peer_id: Option, + user: Option, + ) { + if let Some(peer) = self.peers.get_mut(&(user, peer_id)) { + match &mut peer.connected { + PeerConnection::Core(_) => { + //TODO + unimplemented!(); + } + PeerConnection::Client(cb) => { + cb.close().await; + } + PeerConnection::NONE => {} + PeerConnection::Local(lt) => { + assert!(peer_id.is_none()); + assert!(user.is_some()); + lt.close().await; + if self.peers.remove(&(user, None)).is_some() { + log_debug!( + "Local transport connection closed ! {}", + user.unwrap().to_string() + ); + } + } + } + //self.peers.remove(peer_id); // this is done in the watch_close instead + } + } + + pub async fn close_peer_connection(&mut self, peer_id: &DirectPeerId, user: Option) { + self.close_peer_connection_x(Some(peer_id.to_dh_slice()), user) + .await + } + + async fn close_anonymous( + &mut self, + remote_bind_address: BindAddress, + local_bind_address: BindAddress, + ) { + if let Some(cb) = self + .anonymous_connections + .get_mut(&(local_bind_address, remote_bind_address)) + { + cb.close().await; + } + } + + #[doc(hidden)] + pub fn print_status(&self) { + self.peers.iter().for_each(|(peer_id, peer_info)| { + log_info!("PEER in BROKER {:?} {:?}", peer_id, peer_info); + }); + self.direct_connections.iter().for_each(|(ip, direct_cnx)| { + log_info!("direct_connection in BROKER {:?} {:?}", ip, direct_cnx); + }); + self.anonymous_connections.iter().for_each(|(binds, cb)| { + log_info!( + "ANONYMOUS remote {:?} local {:?} {:?}", + binds.1, + binds.0, + cb + ); + }); + } +} diff --git a/ng-net/src/bsps.rs b/ng-net/src/bsps.rs new file mode 100644 index 0000000..6959252 --- /dev/null +++ b/ng-net/src/bsps.rs @@ -0,0 +1,53 @@ +use time::{Month,Date}; +use std::collections::HashMap; +use lazy_static::lazy_static; + +pub struct BSPDetail<'a> { + pub domain: &'a str, + // ISO-2 code + pub country: &'a str, + // email address of sys admin + pub sysadmin: &'a str, + // owned or rented + pub owned: bool, + pub since: Date, + pub has_free: bool, + pub has_paid: bool, + pub official: bool, + pub description: &'a str, +} + +lazy_static! { + pub static ref BSP_DETAILS: HashMap<&'static str, BSPDetail<'static>> = { + let mut d = HashMap::new(); + + d.insert("https://nextgraph.eu", BSPDetail { + domain: "nextgraph.eu", + country: "de", + sysadmin: "team@nextgraph.org", + owned: false, + since: Date::from_calendar_date(2024, Month::September,2).unwrap(), + has_free: true, + has_paid: false, + official: true, + description: "First official Broker Service Provider from NextGraph.org. Based in Europe." + }); + + assert!(d.insert("https://nextgraph.one", BSPDetail { + domain: "nextgraph.one", + country: "de", + sysadmin: "team@nextgraph.org", + owned: false, + since: Date::from_calendar_date(2025, Month::April,20).unwrap(), + has_free: true, + has_paid: false, + official: true, + description: "Second official Broker Service Provider from NextGraph.org. Based in Europe, but that could change." + }).is_none()); + + d + }; + pub static ref BSP_ORIGINS: Vec<&'static str> = { + BSP_DETAILS.keys().cloned().collect() + }; +} \ No newline at end of file diff --git a/ng-net/src/connection.rs b/ng-net/src/connection.rs new file mode 100644 index 0000000..8c5267f --- /dev/null +++ b/ng-net/src/connection.rs @@ -0,0 +1,1621 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +//! Finite State Machine of the connection/protocol/Noise channel + +//static NOISE_CONFIG: &'static str = "Noise_XK_25519_ChaChaPoly_BLAKE2b"; + +use std::any::TypeId; +use std::collections::HashMap; +use std::fmt; +use std::sync::Arc; + +use async_std::stream::StreamExt; +use async_std::sync::Mutex; +use either::Either; +use futures::{channel::mpsc, select, FutureExt, SinkExt}; +use noise_protocol::{patterns::noise_xk, CipherState, HandshakeState}; +use noise_rust_crypto::*; +use serde_bare::from_slice; +use unique_id::sequence::SequenceGenerator; +use unique_id::Generator; +use unique_id::GeneratorFromSeed; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::types::{DirectPeerId, PrivKey, PubKey, UserId, X25519PrivKey}; +use ng_repo::utils::sign; +#[cfg(not(target_arch = "wasm32"))] +use ng_repo::utils::verify; + +use crate::actor::{Actor, SoS}; +use crate::actors::*; +use crate::broker::LocalBrokerMessage; +use crate::broker::{ClientPeerId, BROKER}; +use crate::types::*; +use crate::utils::*; + +#[derive(Debug, Clone)] +pub enum ConnectionCommand { + Msg(ProtocolMessage), + Error(NetError), + ProtocolError(ProtocolError), + Close, + ReEnter, +} + +impl ConnectionCommand { + pub fn is_re_enter(&self) -> bool { + match self { + Self::ReEnter => true, + _ => false, + } + } +} + +#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] +pub trait IConnect: Send + Sync { + async fn open( + &self, + url: String, + peer_privk: PrivKey, + peer_pubk: PubKey, + remote_peer: DirectPeerId, + config: StartConfig, + ) -> Result; + + async fn probe(&self, ip: IP, port: u16) -> Result, ProtocolError>; +} + +#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] +pub trait IAccept: Send + Sync { + type Socket; + async fn accept( + &self, + remote_bind_address: BindAddress, + local_bind_address: BindAddress, + peer_privk: PrivKey, + socket: Self::Socket, + ) -> Result; +} + +#[derive(PartialEq, Debug, Clone)] +pub enum ConnectionDir { + Server, + Client, +} + +impl ConnectionDir { + pub fn is_server(&self) -> bool { + *self == ConnectionDir::Server + } +} + +#[derive(Debug, PartialEq)] +pub enum FSMstate { + Local0, + Start, + Probe, + Relay, + Noise0, // unused + Noise1, + Noise2, + Noise3, + AdminRequest, + ExtRequest, + //ExtResponse, + ClientHello, + ServerHello, + ClientAuth, + AuthResult, + AppHello, + AppHello2, + Closing, +} + +pub struct NoiseFSM { + state: FSMstate, + dir: ConnectionDir, + sender: Sender, + + /// first is local, second is remote + #[allow(dead_code)] + bind_addresses: Option<(BindAddress, BindAddress)>, + + actors: Arc>>>, + + noise_handshake_state: Option>, + noise_cipher_state_enc: Option>, + noise_cipher_state_dec: Option>, + + local: Option, + remote: Option, + + #[allow(dead_code)] + nonce_for_hello: Vec, + config: Option, + + user: Option, +} + +impl fmt::Debug for NoiseFSM { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("NoiseFSM") + .field("state", &self.state) + .field("dir", &self.dir) + .finish() + } +} + +pub enum StepReply { + Responder(ProtocolMessage), + Response(ProtocolMessage), + NONE, + CloseNow, + ReEnter, +} + +#[derive(Debug, Clone)] +pub struct ClientConfig { + pub url: String, + //pub user: PubKey, + pub user_priv: PrivKey, + pub client_priv: PrivKey, + pub info: ClientInfo, + pub name: Option, + //pub peer_advert: PeerAdvert, + pub registration: Option>, +} + +#[derive(Debug, Clone)] +pub struct ExtConfig { + pub url: String, + pub request: ExtRequestContentV0, +} + +#[derive(Debug, Clone)] +pub struct CoreConfig { + pub addr: BindAddress, + //pub interface: String, + pub overlays_config: CoreBrokerConnect, +} + +#[derive(Debug, Clone)] +pub struct AdminConfig { + pub user: PubKey, + pub user_priv: PrivKey, + pub addr: BindAddress, + pub request: AdminRequestContentV0, +} + +#[derive(Debug, Clone)] +pub struct AppConfig { + pub addr: BindAddress, + pub info: ClientInfo, + pub user_priv: Option, +} + +#[derive(Debug, Clone)] +pub enum StartConfig { + Probe, + Relay(BindAddress), + Client(ClientConfig), + Ext(ExtConfig), + Core(CoreConfig), + Admin(AdminConfig), + App(AppConfig), +} + +impl StartConfig { + pub fn get_url(&self) -> String { + match self { + Self::Client(ClientConfig { url, .. }) | Self::Ext(ExtConfig { url, .. }) => { + url.clone() + } + Self::Admin(AdminConfig { addr, .. }) + | Self::Core(CoreConfig { addr, .. }) + | Self::App(AppConfig { addr, .. }) => { + format!("ws://{}:{}", addr.ip, addr.port) + } + _ => unimplemented!(), + } + } + pub fn get_user(&self) -> Option { + match self { + Self::Client(config) => Some(config.user_priv.to_pub()), + Self::App(config) => config.user_priv.as_ref().map(|k| k.to_pub()), + _ => None, + } + } + pub fn is_keep_alive(&self) -> bool { + match self { + StartConfig::Core(_) | StartConfig::Client(_) | StartConfig::App(_) => true, + _ => false, + } + } + pub fn is_oneshot(&self) -> bool { + match self { + StartConfig::Admin(_) | StartConfig::Ext(_) => true, + _ => false, + } + } +} + +impl NoiseFSM { + pub fn new( + bind_addresses: Option<(BindAddress, BindAddress)>, + tp: TransportProtocol, + dir: ConnectionDir, + actors: Arc>>>, + sender: Sender, + local: Option, + remote: Option, + ) -> Self { + Self { + state: if tp == TransportProtocol::Local { + FSMstate::Local0 + } else { + FSMstate::Start + }, + dir, + bind_addresses, + actors, + sender, + noise_handshake_state: None, + noise_cipher_state_enc: None, + noise_cipher_state_dec: None, + local, + remote, + nonce_for_hello: vec![], + config: None, + user: None, + } + } + + pub fn user_id(&self) -> Result { + match &self.config { + Some(start_config) => start_config.get_user().ok_or(ProtocolError::ActorError), + _ => self.user.ok_or(ProtocolError::ActorError), + } + } + + pub fn remote_peer(&self) -> &Option { + &self.remote + } + + pub fn get_client_peer_id(&self) -> Result { + Ok(match self.state { + FSMstate::Local0 => { + ClientPeerId::new_from( + &self.remote_peer().ok_or(ProtocolError::ActorError)?, + &Some(self.user.unwrap()), + ) + // the unwrap and Some is on purpose. to enforce that we do have a user + } + _ => { + ClientPeerId::new_from(&self.remote_peer().ok_or(ProtocolError::ActorError)?, &None) + } + }) + } + + #[cfg(not(target_arch = "wasm32"))] + pub(crate) fn set_user_id(&mut self, user: UserId) { + if self.user.is_none() { + self.user = Some(user); + } + } + + fn decrypt(&mut self, ciphertext: &Noise) -> Result { + let ser = self + .noise_cipher_state_dec + .as_mut() + .unwrap() + .decrypt_vec(ciphertext.data()) + .map_err(|_e| ProtocolError::DecryptionError)?; + + Ok(from_slice::(&ser)?) + } + + fn encrypt(&mut self, plaintext: ProtocolMessage) -> Result { + let ser = serde_bare::to_vec(&plaintext)?; + + let cipher = self + .noise_cipher_state_enc + .as_mut() + .unwrap() + .encrypt_vec(&ser); + + Ok(Noise::V0(NoiseV0 { data: cipher })) + } + + pub async fn remove_actor(&self, id: i64) { + self.actors.lock().await.remove(&id); + } + + pub async fn send(&mut self, msg: ProtocolMessage) -> Result<(), ProtocolError> { + self.send_in_reply_to(msg, 0).await + } + + pub async fn send_in_reply_to( + &mut self, + mut msg: ProtocolMessage, + in_reply_to: i64, + ) -> Result<(), ProtocolError> { + if in_reply_to != 0 { + msg.set_id(in_reply_to); + } + #[cfg(debug_assertions)] + if msg.is_block() { + log_debug!("SENDING BLOCK"); + } else { + log_debug!("SENDING: {:?}", msg); + } + if self.noise_cipher_state_enc.is_some() { + let cipher = self.encrypt(msg)?; + self.sender + .send(ConnectionCommand::Msg(ProtocolMessage::Noise(cipher))) + .await + .map_err(|_e| ProtocolError::IoError)?; + return Ok(()); + } else { + self.sender + .send(ConnectionCommand::Msg(msg)) + .await + .map_err(|_e| ProtocolError::IoError)?; + return Ok(()); + } + } + + // pub async fn receive( + // &mut self, + // msg: ProtocolMessage, + // ) -> Result { + // if self.state == FSMstate::AuthResult && self.noise_cipher_state.is_some() { + // if let ProtocolMessage::Noise(noise) = msg { + // let new = self.decrypt(&noise); + // Ok(new) + // } else { + // Err(ProtocolError::MustBeEncrypted) + // } + // } else { + // Err(ProtocolError::InvalidState) + // } + // } + + async fn process_server_noise0(&mut self, noise: &Noise) -> Result { + let mut handshake = HandshakeState::::new( + noise_xk(), + false, + &[], + Some(sensitive_from_privkey(self.local.take().unwrap().to_dh())), + None, + None, + None, + ); + + let mut payload = handshake.read_message_vec(noise.data()).map_err(|_e| { + log_debug!("{:?}", _e); + ProtocolError::NoiseHandshakeFailed + })?; + + payload = handshake.write_message_vec(&payload).map_err(|_e| { + log_debug!("{:?}", _e); + ProtocolError::NoiseHandshakeFailed + })?; + + let noise = Noise::V0(NoiseV0 { data: payload }); + self.send(noise.into()).await?; + + self.noise_handshake_state = Some(handshake); + + self.state = FSMstate::Noise2; + + return Ok(StepReply::NONE); + } + + #[allow(dead_code)] + fn process_server_noise3(&mut self, noise: &Noise) -> Result<(), ProtocolError> { + let handshake = self.noise_handshake_state.as_mut().unwrap(); + + let _ = handshake + .read_message_vec(noise.data()) + .map_err(|_e| ProtocolError::NoiseHandshakeFailed)?; + + if !handshake.completed() { + return Err(ProtocolError::NoiseHandshakeFailed); + } + let peer_id = handshake.get_rs().unwrap(); + self.remote = Some(PubKey::X25519PubKey(peer_id)); + + let ciphers = handshake.get_ciphers(); + self.noise_cipher_state_enc = Some(ciphers.1); + self.noise_cipher_state_dec = Some(ciphers.0); + + self.noise_handshake_state = None; + + Ok(()) + } + + pub async fn step( + &mut self, + mut msg_opt: Option, + ) -> Result { + if self.noise_cipher_state_dec.is_some() && msg_opt.is_some() { + if let Some(ProtocolMessage::Noise(noise)) = msg_opt.as_ref() { + let new = self.decrypt(noise)?; + msg_opt.replace(new); + } else { + return Err(ProtocolError::MustBeEncrypted); + } + } + if msg_opt.is_some() { + #[cfg(debug_assertions)] + if msg_opt.as_ref().unwrap().is_block() { + log_debug!("RECEIVED BLOCK"); + } else { + log_debug!( + "RECEIVED: {:?} in state {:?}", + msg_opt.as_ref().unwrap(), + self.state + ); + } + } + match self.state { + FSMstate::Closing => {} + // TODO verify that ID is zero + // FSMstate::Local0 => { + // // CLIENT LOCAL + // if !self.dir.is_server() && msg_opt.is_none() { + // self.state = FSMstate::ClientHello; + // //Box::new(Actor::::new(0, true)); + // return Ok(StepReply::NONE); + // } + // // SERVER LOCAL + // else if let Some(msg) = msg_opt.as_ref() { + // if self.dir.is_server() && msg.type_id() == ClientHello::Local.type_id() { + // self.state = FSMstate::ServerHello; + // //Box::new(Actor::::new(msg.id(), false)); + // return Ok(StepReply::NONE); + // } + // } + // } + FSMstate::Start => { + if !self.dir.is_server() && msg_opt.is_none() { + // CLIENT START + match self.config.as_ref().unwrap() { + StartConfig::Probe => { + // PROBE REQUEST + let request = ProtocolMessage::Probe(MAGIC_NG_REQUEST); + self.send(request).await?; + self.state = FSMstate::Probe; + return Ok(StepReply::NONE); + } + StartConfig::Relay(_relay_to) => { + // RELAY REQUEST + //self.state + todo!(); + } + _ => { + // CLIENT INITIALIZE NOISE + let mut handshake = + HandshakeState::::new( + noise_xk(), + true, + &[], + Some(sensitive_from_privkey( + self.local.take().unwrap().to_dh(), + )), + None, + Some(*self.remote.unwrap().slice()), + None, + ); + + let payload = handshake + .write_message_vec(&[]) + .map_err(|_e| ProtocolError::NoiseHandshakeFailed)?; + + let noise = Noise::V0(NoiseV0 { data: payload }); + self.send(noise.into()).await?; + + self.noise_handshake_state = Some(handshake); + + self.state = FSMstate::Noise1; + + return Ok(StepReply::NONE); + } + } + } else { + #[cfg(not(target_arch = "wasm32"))] + if let Some(msg) = msg_opt.as_ref() { + if self.dir.is_server() { + // SERVER START + match msg { + ProtocolMessage::Probe(magic) => { + // PROBE REQUEST + if *magic != MAGIC_NG_REQUEST { + return Err(ProtocolError::WhereIsTheMagic); + } + let mut probe_response = ProbeResponse { + magic: MAGIC_NG_RESPONSE.to_vec(), + peer_id: None, + }; + if BROKER + .read() + .await + .authorize( + &self + .bind_addresses + .ok_or(ProtocolError::BrokerError)?, + Authorization::Discover, + ) + .await + .is_ok() + { + probe_response.peer_id = Some( + self.local + .as_ref() + .ok_or(ProtocolError::BrokerError)? + .to_pub(), + ); + } + self.send(ProtocolMessage::ProbeResponse(probe_response)) + .await?; + self.state = FSMstate::Closing; + sleep!(std::time::Duration::from_secs(2)); + return Ok(StepReply::CloseNow); + } + ProtocolMessage::Relay(_) => { + todo!(); + } + ProtocolMessage::Tunnel(_) => { + self.state = FSMstate::Noise1; + todo!(); + } + ProtocolMessage::Noise(noise) => { + // SERVER INITIALIZE NOISE + return self.process_server_noise0(noise).await; + } + _ => return Err(ProtocolError::InvalidState), + } + } + } + } + } + FSMstate::Probe => { + // CLIENT side receiving probe response + if let Some(msg) = msg_opt { + let id = msg.id(); + if id.is_some() { + return Err(ProtocolError::InvalidState); + } + if let ProtocolMessage::ProbeResponse(_probe_res) = &msg { + return Ok(StepReply::Response(msg)); + } + } + } + FSMstate::Relay => {} + + FSMstate::Noise0 => { + if let Some(ProtocolMessage::Noise(noise)) = msg_opt.as_ref() { + if self.dir.is_server() { + return self.process_server_noise0(noise).await; + } + } + } + FSMstate::Noise1 => { + // CLIENT second round NOISE + if let Some(msg) = msg_opt.as_ref() { + if !self.dir.is_server() { + if let ProtocolMessage::Noise(noise) = msg { + let handshake = self.noise_handshake_state.as_mut().unwrap(); + + let mut payload = handshake + .read_message_vec(noise.data()) + .map_err(|_e| ProtocolError::NoiseHandshakeFailed)?; + + payload = handshake.write_message_vec(&payload).map_err(|_e| { + log_debug!("{:?}", _e); + ProtocolError::NoiseHandshakeFailed + })?; + + if !handshake.completed() { + return Err(ProtocolError::NoiseHandshakeFailed); + } + + let ciphers = handshake.get_ciphers(); + + let mut next_step = StepReply::NONE; + match self.config.as_ref().unwrap() { + StartConfig::Client(_client_config) => { + let noise3 = + ClientHello::Noise3(Noise::V0(NoiseV0 { data: payload })); + self.send(noise3.into()).await?; + self.state = FSMstate::ClientHello; + } + StartConfig::Ext(_ext_config) => { + let noise = Noise::V0(NoiseV0 { data: payload }); + self.send(noise.into()).await?; + self.state = FSMstate::Noise3; + next_step = StepReply::ReEnter; + } + StartConfig::Core(_core_config) => { + todo!(); + } + StartConfig::Admin(_) => { + let noise = Noise::V0(NoiseV0 { data: payload }); + self.send(noise.into()).await?; + self.state = FSMstate::Noise3; + next_step = StepReply::ReEnter; + } + StartConfig::App(app_config) => { + let app_hello = AppHello { + noise: Noise::V0(NoiseV0 { data: payload }), + user: app_config.user_priv.as_ref().map(|k| k.to_pub()), + info: app_config.info.clone(), + }; + self.send(app_hello.into()).await?; + self.state = FSMstate::AppHello; + } + _ => return Err(ProtocolError::InvalidState), + } + + self.noise_cipher_state_enc = Some(ciphers.0); + self.noise_cipher_state_dec = Some(ciphers.1); + + self.noise_handshake_state = None; + + return Ok(next_step); + } + } + } + } + FSMstate::AppHello => { + if let Some(msg) = msg_opt.as_ref() { + if !self.dir.is_server() { + if let ProtocolMessage::Start(StartProtocol::AppResponse(hello_response)) = + msg + { + if hello_response.result != 0 { + return Err(ProtocolError::AccessDenied); + } + + self.state = FSMstate::AppHello2; + + log_debug!("AUTHENTICATION SUCCESSFUL ! waiting for APP requests on the client side"); + + // we notify the actor "Connecting" that the connection is ready + let mut lock = self.actors.lock().await; + let exists = lock.remove(&0); + match exists { + Some(mut actor_sender) => { + let _ = actor_sender.send(ConnectionCommand::ReEnter).await; + } + _ => {} + } + + return Ok(StepReply::NONE); + } + } + } + } + FSMstate::Noise2 => { + // SERVER second round NOISE + #[cfg(not(target_arch = "wasm32"))] + if let Some(msg) = msg_opt.as_ref() { + if self.dir.is_server() { + if let ProtocolMessage::Start(StartProtocol::Client(ClientHello::Noise3( + noise, + ))) = msg + { + self.process_server_noise3(noise)?; + + let mut nonce_buf = [0u8; 32]; + getrandom::fill(&mut nonce_buf).unwrap(); + + self.nonce_for_hello = nonce_buf.to_vec(); + + let server_hello = ServerHello::V0(ServerHelloV0 { + nonce: self.nonce_for_hello.clone(), + }); + + self.state = FSMstate::ServerHello; + self.send(server_hello.into()).await?; + + return Ok(StepReply::NONE); + } else if let ProtocolMessage::Noise(noise) = msg { + self.process_server_noise3(noise)?; + + self.state = FSMstate::Noise3; + + return Ok(StepReply::NONE); + } else if let ProtocolMessage::Start(StartProtocol::App(app_hello)) = msg { + self.process_server_noise3(&app_hello.noise)?; + + let (local_bind_address, remote_bind_address) = + self.bind_addresses.ok_or(ProtocolError::BrokerError)?; + let result = BROKER + .write() + .await + .attach_and_authorize_app( + remote_bind_address, + local_bind_address, + *self.remote.unwrap().slice(), + &app_hello.user, + &app_hello.info, + ) + .await + .err() + .unwrap_or(ProtocolError::NoError); + + let hello_response = AppHelloResponse { + result: result.clone() as u16, + }; + self.send(hello_response.into()).await?; + + if result.is_err() { + return Err(result); + } + if app_hello.user.is_some() { + self.set_user_id(app_hello.user.unwrap()); + } + + log_debug!("AUTHENTICATION SUCCESSFUL ! waiting for APP requests on the server side"); + + self.state = FSMstate::AppHello2; + + return Ok(StepReply::NONE); + } + } + } + } + FSMstate::Noise3 => { + // CLIENT after Noise3, sending StartProtocol + if msg_opt.is_none() && !self.dir.is_server() { + match self.config.as_ref().unwrap() { + StartConfig::Client(_) => { + return Err(ProtocolError::InvalidState); + } + StartConfig::Ext(ext_config) => { + let ext_req = ExtRequestV0 { + content: ext_config.request.clone(), + id: 0, + }; + let protocol_start = StartProtocol::Ext(ExtRequest::V0(ext_req)); + + self.send(protocol_start.into()).await?; + self.state = FSMstate::ExtRequest; + + return Ok(StepReply::NONE); + } + StartConfig::Core(_core_config) => { + todo!(); + } + StartConfig::Admin(admin_config) => { + let ser = serde_bare::to_vec(&admin_config.request)?; + let sig = sign(&admin_config.user_priv, &admin_config.user, &ser)?; + let admin_req = AdminRequestV0 { + content: admin_config.request.clone(), + id: 0, + sig, + admin_user: admin_config.user, + padding: vec![], + }; + let protocol_start = StartProtocol::Admin(AdminRequest::V0(admin_req)); + + self.send(protocol_start.into()).await?; + self.state = FSMstate::AdminRequest; + + return Ok(StepReply::NONE); + } + _ => return Err(ProtocolError::InvalidState), + } + } else if self.dir.is_server() { + // SERVER after Noise3, receives StartProtocol + #[cfg(not(target_arch = "wasm32"))] + if let Some(ProtocolMessage::Start(start_msg)) = msg_opt.as_ref() { + match start_msg { + StartProtocol::Client(_) => { + return Err(ProtocolError::InvalidState); + } + StartProtocol::Ext(_ext_req) => { + self.state = FSMstate::Closing; + return Ok(StepReply::Responder(msg_opt.unwrap())); + } + // StartProtocol::Core(core_config) => { + // todo!(); + // } + StartProtocol::Admin(AdminRequest::V0(req)) => { + { + BROKER + .read() + .await + .authorize( + &self + .bind_addresses + .ok_or(ProtocolError::BrokerError)?, + Authorization::Admin(req.admin_user), + ) + .await?; + } + // PROCESS AdminRequest and send back AdminResponse + let ser = serde_bare::to_vec(&req.content)?; + + let verif = verify(&ser, req.sig, req.admin_user); + if verif.is_err() { + let result: ProtocolError = verif.unwrap_err().into(); + return Err(result); + } else { + self.state = FSMstate::Closing; + return Ok(StepReply::Responder(msg_opt.unwrap())); + } + } + _ => return Err(ProtocolError::InvalidState), + } + } + } + } + FSMstate::AdminRequest => { + // CLIENT side receiving AdminResponse + if let Some(msg) = msg_opt { + if self.dir.is_server() || msg.type_id() != TypeId::of::() { + return Err(ProtocolError::InvalidState); + } + return Ok(StepReply::Response(msg)); + } + } + FSMstate::ExtRequest => { + // CLIENT side receiving ExtResponse + if let Some(msg) = msg_opt { + if self.dir.is_server() || msg.type_id() != TypeId::of::() { + return Err(ProtocolError::InvalidState); + } + return Ok(StepReply::Response(msg)); + } + } + FSMstate::ClientHello => { + if let Some(msg) = msg_opt.as_ref() { + if !self.dir.is_server() { + if let ProtocolMessage::ServerHello(hello) = msg { + if let StartConfig::Client(client_config) = + self.config.as_ref().unwrap() + { + let ClientInfo::V0(info) = &client_config.info; + let user_pub = client_config.user_priv.to_pub(); + let client_pub = client_config.client_priv.to_pub(); + let content = ClientAuthContentV0 { + user: user_pub, + client: client_pub, + // Nonce from ServerHello + nonce: hello.nonce().clone(), + info: info.clone(), + registration: client_config.registration, + }; + let ser = serde_bare::to_vec(&content)?; + let sig = sign(&client_config.user_priv, &user_pub, &ser)?; + let client_sig = + sign(&client_config.client_priv, &client_pub, &ser)?; + let client_auth = ClientAuth::V0(ClientAuthV0 { + content, + // Signature by user key + sig, + client_sig, + }); + + self.state = FSMstate::ClientAuth; + self.send(client_auth.into()).await?; + + return Ok(StepReply::NONE); + } + } + } + } + } + FSMstate::ServerHello => { + #[cfg(not(target_arch = "wasm32"))] + if let Some(msg) = msg_opt.as_ref() { + if self.dir.is_server() { + if let ProtocolMessage::ClientAuth(client_auth) = msg { + if *client_auth.nonce() != self.nonce_for_hello { + return Err(ProtocolError::InvalidNonce); + } + + let ser = serde_bare::to_vec(&client_auth.content_v0())?; + + let result; //= ProtocolError::NoError; + let verif = verify(&ser, client_auth.sig(), client_auth.user()); + if verif.is_err() { + result = verif.unwrap_err().into(); + } else { + let (local_bind_address, remote_bind_address) = + self.bind_addresses.ok_or(ProtocolError::BrokerError)?; + result = BROKER + .write() + .await + .attach_and_authorize_peer_id( + remote_bind_address, + local_bind_address, + *self.remote.unwrap().slice(), + Some(client_auth.content_v0()), + self, + ) + .await + .err() + .unwrap_or(ProtocolError::NoError); + } + let auth_result = AuthResult::V0(AuthResultV0 { + result: result.clone() as u16, + metadata: vec![], + }); + self.send(auth_result.into()).await?; + + if result.is_err() { + return Err(result); + } + log_debug!("AUTHENTICATION SUCCESSFUL ! waiting for requests on the server side"); + self.state = FSMstate::AuthResult; + return Ok(StepReply::NONE); + } + } + } + } + FSMstate::ClientAuth => { + if let Some(msg) = msg_opt.as_ref() { + if !self.dir.is_server() { + if let ProtocolMessage::AuthResult(auth_res) = msg { + if let StartConfig::Client(_client_config) = + self.config.as_ref().unwrap() + { + if auth_res.result() != 0 { + return Err(ProtocolError::AccessDenied); + } + + self.state = FSMstate::AuthResult; + + log_debug!("AUTHENTICATION SUCCESSFUL ! waiting for requests on the client side"); + + // we notify the actor "Connecting" that the connection is ready + let mut lock = self.actors.lock().await; + let exists = lock.remove(&0); + match exists { + Some(mut actor_sender) => { + let _ = actor_sender.send(ConnectionCommand::ReEnter).await; + } + _ => {} + } + + return Ok(StepReply::NONE); + } + } + } + } + } + FSMstate::AppHello2 => { + if let Some(msg) = msg_opt { + if msg.type_id() != TypeId::of::() { + return Err(ProtocolError::InvalidState); + } + match msg.id() { + Some(id) => { + if self.dir.is_server() && id > 0 || !self.dir.is_server() && id < 0 { + return Ok(StepReply::Responder(msg)); + } else if id != 0 { + return Ok(StepReply::Response(msg)); + } + } + None => return Err(ProtocolError::InvalidMessage), + } + } + } + FSMstate::AuthResult | FSMstate::Local0 => { + if let Some(msg) = msg_opt { + if msg.type_id() != TypeId::of::() { + return Err(ProtocolError::InvalidState); + } + match msg.id() { + Some(id) => { + if self.dir.is_server() && id > 0 || !self.dir.is_server() && id < 0 { + return Ok(StepReply::Responder(msg)); + } else if id != 0 { + return Ok(StepReply::Response(msg)); + } + } + None => { + if let ProtocolMessage::ClientMessage(cm) = msg { + let overlay = cm.overlay_id(); + match cm { + ClientMessage::V0(o) => match o.content { + ClientMessageContentV0::ForwardedEvent(event) => { + let _ = BROKER + .read() + .await + .get_local_broker()? + .send(LocalBrokerMessage::Deliver { + event, + overlay, + user: self.user_id()?, + }) + .await; + return Ok(StepReply::NONE); + }, + ClientMessageContentV0::InboxReceive{msg, from_queue} => { + let _ = BROKER + .read() + .await + .get_local_broker()? + .send(LocalBrokerMessage::Inbox { + msg, + user_id: self.user_id()?, + from_queue + }) + .await; + return Ok(StepReply::NONE); + } + _ => {}, + }, + } + } + } + } + } + } + } + log_err!("reached end of FSM"); + Err(ProtocolError::InvalidState) + } +} + +#[derive(Debug)] +pub struct ConnectionBase { + pub(crate) fsm: Option>>, + + sender: Option>, + receiver: Option>, + sender_tx: Option>, + //receiver_tx: Option>, + shutdown: Option>>, + shutdown_sender: Option>>, + dir: ConnectionDir, + next_request_id: SequenceGenerator, + tp: TransportProtocol, + + actors: Arc>>>, +} + +impl ConnectionBase { + pub fn create_local_transport_pipe(user: UserId, client_peer_id: DirectPeerId) -> (Self, Self) { + let mut client_cnx = Self::new(ConnectionDir::Client, TransportProtocol::Local); + let mut server_cnx = Self::new(ConnectionDir::Server, TransportProtocol::Local); + + let (sender_tx, sender_rx) = mpsc::unbounded(); + let (receiver_tx, receiver_rx) = mpsc::unbounded(); + + // SETTING UP THE CLIENT + client_cnx.sender_tx = Some(sender_tx.clone()); + + let fsm = Arc::new(Mutex::new(NoiseFSM::new( + None, + client_cnx.tp, + client_cnx.dir.clone(), + Arc::clone(&client_cnx.actors), + sender_tx.clone(), + None, + None, + ))); + client_cnx.fsm = Some(Arc::clone(&fsm)); + + spawn_and_log_error(Self::read_loop( + receiver_tx.clone(), + receiver_rx, + sender_tx.clone(), + Arc::clone(&client_cnx.actors), + fsm, + )); + + // SETTING UP THE SERVER + server_cnx.sender_tx = Some(receiver_tx.clone()); + + let mut fsm_mut = NoiseFSM::new( + None, + server_cnx.tp, + server_cnx.dir.clone(), + Arc::clone(&server_cnx.actors), + receiver_tx.clone(), + None, + Some(client_peer_id), + ); + fsm_mut.user = Some(user); + let fsm = Arc::new(Mutex::new(fsm_mut)); + server_cnx.fsm = Some(Arc::clone(&fsm)); + + spawn_and_log_error(Self::read_loop( + sender_tx, + sender_rx, + receiver_tx, + Arc::clone(&server_cnx.actors), + fsm, + )); + + (client_cnx, server_cnx) + } + + pub fn new(dir: ConnectionDir, tp: TransportProtocol) -> Self { + Self { + fsm: None, + receiver: None, + sender: None, + sender_tx: None, + //receiver_tx: None, + shutdown: None, + shutdown_sender: None, + next_request_id: SequenceGenerator::new(1), + dir, + tp, + actors: Arc::new(Mutex::new(HashMap::new())), + } + } + + pub fn transport_protocol(&self) -> TransportProtocol { + self.tp + } + + pub fn take_shutdown(&mut self) -> Receiver> { + self.shutdown.take().unwrap() + } + + pub async fn join_shutdown(&mut self) -> Result<(), NetError> { + match self.take_shutdown().next().await { + Some(Either::Left(error)) => Err(error), + Some(Either::Right(_)) => Ok(()), + None => Ok(()), + } + } + + pub fn release_shutdown(&mut self) { + self.shutdown_sender = None; + } + + // only used by accept + pub async fn reset_shutdown(&mut self, remote_peer_id: X25519PrivKey) { + let _ = self + .shutdown_sender + .as_ref() + .unwrap() + .send(Either::Right(remote_peer_id)) + .await; + } + + pub fn set_shutdown(&mut self) -> Sender> { + let (shutdown_sender, shutdown_receiver) = + mpsc::unbounded::>(); + self.shutdown = Some(shutdown_receiver); + self.shutdown_sender = Some(shutdown_sender.clone()); + shutdown_sender + } + + pub fn take_sender(&mut self) -> Receiver { + self.sender.take().unwrap() + } + + pub fn take_receiver(&mut self) -> Sender { + self.receiver.take().unwrap() + } + + pub fn guard(&mut self, dir: ConnectionDir) -> Result<(), NetError> { + if self.dir == dir { + Ok(()) + } else { + Err(NetError::DirectionAlreadySet) + } + } + + async fn read_loop( + mut receiver_tx: Sender, + mut receiver: Receiver, + mut sender: Sender, + actors: Arc>>>, + fsm: Arc>, + ) -> ResultSend<()> { + while let Some(msg) = receiver.next().await { + match msg { + ConnectionCommand::Close + | ConnectionCommand::Error(_) + | ConnectionCommand::ProtocolError(_) => { + log_debug!("EXIT READ LOOP because : {:?}", msg); + let mut lock = actors.lock().await; + for actor in lock.values_mut() { + _ = actor.send(msg.clone()).await; + } + break; + } + _ => { + let res; + if let ConnectionCommand::Msg(proto_msg) = msg { + { + let mut locked_fsm = fsm.lock().await; + res = locked_fsm.step(Some(proto_msg)).await; + } + } else if msg.is_re_enter() { + { + let mut locked_fsm = fsm.lock().await; + res = locked_fsm.step(None).await; + } + } else { + panic!("shouldn't be here. ConnectionCommand in read_loop can only have 5 different variants") + } + + match res { + Err(e) => { + if sender + .send(ConnectionCommand::ProtocolError(e)) + .await + .is_err() + { + break; //TODO test that sending a ProtocolError effectively closes the connection (with ConnectionCommand::Close) + } + } + Ok(StepReply::CloseNow) => { + let _ = sender.send(ConnectionCommand::Close).await; + break; + } + Ok(StepReply::ReEnter) => { + let _ = receiver_tx.send(ConnectionCommand::ReEnter).await; + } + Ok(StepReply::NONE) => {} + Ok(StepReply::Responder(responder)) => { + let r = responder + .get_actor() + .respond(responder, Arc::clone(&fsm)) + .await; + if r.is_err() { + if sender + .send(ConnectionCommand::ProtocolError(r.unwrap_err())) + .await + .is_err() + { + break; + } + } + } + Ok(StepReply::Response(response)) => { + let mut lock = actors.lock().await; + let exists = lock.get_mut(&response.id().unwrap_or(0)); + match exists { + Some(actor_sender) => { + if actor_sender + .send(ConnectionCommand::Msg(response)) + .await + .is_err() + { + break; + } + } + None => { + if sender + .send(ConnectionCommand::ProtocolError( + ProtocolError::ActorError, + )) + .await + .is_err() + { + break; + } + } + } + } + } + } + } + } + log_debug!("END OF READ LOOP"); + let mut lock = actors.lock().await; + for actor in lock.drain() { + actor.1.close_channel(); + } + Ok(()) + } + + pub async fn send_client_event< + A: Into + std::fmt::Debug + Sync + Send + 'static, + >(&self, msg: A) -> Result<(), NgError> { + let proto_msg: ProtocolMessage = msg.into(); + self.fsm.as_ref().unwrap().lock().await.send(proto_msg).await?; + Ok(()) + } + + pub async fn request< + A: Into + std::fmt::Debug + Sync + Send + 'static, + B: TryFrom + std::fmt::Debug + Sync + Send + 'static, + >( + &self, + msg: A, + ) -> Result, NgError> { + if self.fsm.is_none() { + return Err(NgError::ProtocolError(ProtocolError::FsmNotReady)); + } + + let mut id = self.next_request_id.next_id(); + if self.dir == ConnectionDir::Server { + id = !id + 1; + } + let mut actor = Box::new(Actor::::new(id, true)); + self.actors.lock().await.insert(id, actor.get_receiver_tx()); + let mut proto_msg: ProtocolMessage = msg.into(); + proto_msg.set_id(id); + let res = actor + .request(proto_msg, Arc::clone(self.fsm.as_ref().unwrap())) + .await; + res + } + + // FIXME: why not use the FSM instead? looks like this is sending messages to the wire, unencrypted. + // Only final errors are sent this way. but it looks like even those error should be encrypted + pub async fn send(&mut self, cmd: ConnectionCommand) { + let _ = self.sender_tx.as_mut().unwrap().send(cmd).await; + } + + // pub async fn inject(&mut self, cmd: ConnectionCommand) { + // let _ = self.receiver_tx.as_mut().unwrap().send(cmd).await; + // } + + // pub async fn close_streams(&mut self) { + // let _ = self.receiver_tx.as_mut().unwrap().close_channel(); + // let _ = self.sender_tx.as_mut().unwrap().close_channel(); + // } + + pub async fn close(&mut self) { + log_debug!("closing..."); + self.send(ConnectionCommand::Close).await; + } + + pub async fn close_silently(&mut self) { + log_debug!("closing silently..."); + let _ = self + .shutdown_sender + .take() + .unwrap() + .send(Either::Left(NetError::Closing)) + .await; + self.send(ConnectionCommand::Close).await; + } + + pub async fn admin< + A: Into + + Into + + std::fmt::Debug + + Sync + + Send + + 'static, + >( + &mut self, + ) -> Result { + if !self.dir.is_server() { + let mut actor = Box::new(Actor::::new(0, true)); + self.actors.lock().await.insert(0, actor.get_receiver_tx()); + + let mut receiver = actor.detach_receiver(); + match receiver.next().await { + Some(ConnectionCommand::Msg(msg)) => { + self.fsm + .as_ref() + .unwrap() + .lock() + .await + .remove_actor(0) + .await; + let response: AdminResponse = msg.try_into()?; + self.close().await; + if response.result() == 0 { + return Ok(response.content_v0()); + } + Err(ProtocolError::try_from(response.result()).unwrap()) + } + Some(ConnectionCommand::ProtocolError(e)) => Err(e), + Some(ConnectionCommand::Error(e)) => Err(e.into()), + Some(ConnectionCommand::Close) => Err(ProtocolError::Closing), + _ => Err(ProtocolError::ActorError), + } + } else { + panic!("cannot call admin on a server-side connection"); + } + } + + pub async fn ext< + A: Into + Into + std::fmt::Debug + Sync + Send + 'static, + B: TryFrom + std::fmt::Debug + Sync + Send + 'static, + >( + &mut self, + ) -> Result { + if !self.dir.is_server() { + let mut actor = Box::new(Actor::::new(0, true)); + self.actors.lock().await.insert(0, actor.get_receiver_tx()); + let mut receiver = actor.detach_receiver(); + match receiver.next().await { + Some(ConnectionCommand::Msg(msg)) => { + self.fsm + .as_ref() + .unwrap() + .lock() + .await + .remove_actor(0) + .await; + + let server_error: Result = (&msg).try_into(); + let response: B = match msg.try_into() { + Ok(b) => b, + Err(ProtocolError::ServerError) => { + return Err(NgError::ServerError(server_error?)); + } + Err(e) => return Err(NgError::ProtocolError(e)), + }; + self.close().await; + Ok(response) + } + Some(ConnectionCommand::ProtocolError(e)) => Err(e.into()), + Some(ConnectionCommand::Error(e)) => Err(ProtocolError::from(e).into()), + Some(ConnectionCommand::Close) => Err(ProtocolError::Closing.into()), + _ => Err(ProtocolError::ActorError.into()), + } + } else { + panic!("cannot call ext on a server-side connection"); + } + } + + pub async fn probe(&mut self) -> Result, ProtocolError> { + if !self.dir.is_server() { + let config = StartConfig::Probe; + let mut actor = Box::new(Actor::::new(0, true)); + self.actors.lock().await.insert(0, actor.get_receiver_tx()); + let res; + { + let mut fsm = self.fsm.as_ref().unwrap().lock().await; + fsm.config = Some(config); + res = fsm.step(None).await; + } + if let Err(err) = res { + self.send(ConnectionCommand::ProtocolError(err.clone())) + .await; + return Err(err); + } + let mut receiver = actor.detach_receiver(); + let mut shutdown = self.take_shutdown(); + select! { + + res = async_std::future::timeout(std::time::Duration::from_secs(2),receiver.next()).fuse() => { + self.fsm + .as_mut() + .unwrap() + .lock() + .await + .remove_actor(0) + .await; + match res { + Ok(Some(ConnectionCommand::Msg(ProtocolMessage::ProbeResponse(res)))) => { + if res.magic == MAGIC_NG_RESPONSE { + self.close().await; + return Ok(res.peer_id); + } + } + Err(_) => {} + _ => {} + } + self.close().await; + return Err(ProtocolError::WhereIsTheMagic); + }, + _r = shutdown.next().fuse() => { + self.fsm + .as_mut() + .unwrap() + .lock() + .await + .remove_actor(0) + .await; + return Err(ProtocolError::Closing); + } + } + } else { + panic!("cannot call probe on a server-side connection"); + } + } + + pub async fn start(&mut self, config: StartConfig) -> Result<(), ProtocolError> { + // BOOTSTRAP the protocol from client-side + if !self.dir.is_server() { + let is_oneshot = config.is_oneshot(); + let res; + { + let mut fsm = self.fsm.as_ref().unwrap().lock().await; + fsm.config = Some(config); + res = fsm.step(None).await; + } + if let Err(err) = res { + self.send(ConnectionCommand::ProtocolError(err.clone())) + .await; + Err(err) + } else if !is_oneshot { + let mut actor = Box::new(Actor::::new(0, true)); + self.actors.lock().await.insert(0, actor.get_receiver_tx()); + + let mut receiver = actor.detach_receiver(); + match receiver.next().await { + Some(ConnectionCommand::ReEnter) => Ok(()), + Some(ConnectionCommand::ProtocolError(e)) => Err(e), + Some(ConnectionCommand::Error(e)) => Err(e.into()), + Some(ConnectionCommand::Close) => Err(ProtocolError::Closing), + _ => Err(ProtocolError::ActorError), + } + } else { + Ok(()) + } + } else { + panic!("cannot call start on a server-side connection"); + } + } + + pub fn start_read_loop( + &mut self, + bind_addresses: Option<(BindAddress, BindAddress)>, + local: Option, + remote: Option, + ) { + let (sender_tx, sender_rx) = mpsc::unbounded(); + let (receiver_tx, receiver_rx) = mpsc::unbounded(); + self.sender = Some(sender_rx); + self.receiver = Some(receiver_tx.clone()); + self.sender_tx = Some(sender_tx.clone()); + //self.receiver_tx = Some(receiver_tx.clone()); + + let fsm = Arc::new(Mutex::new(NoiseFSM::new( + bind_addresses, + self.tp, + self.dir.clone(), + Arc::clone(&self.actors), + sender_tx.clone(), + local, + remote, + ))); + self.fsm = Some(Arc::clone(&fsm)); + + spawn_and_log_error(Self::read_loop( + receiver_tx, + receiver_rx, + sender_tx, + Arc::clone(&self.actors), + fsm, + )); + } +} + +#[cfg(test)] +mod test { + + use crate::actors::*; + + use ng_repo::log::*; + use std::any::{Any, TypeId}; + + #[async_std::test] + pub async fn test_connection() {} + + #[async_std::test] + pub async fn test_typeid() { + log_debug!( + "{:?}", + ClientHello::Noise3(Noise::V0(NoiseV0 { data: vec![] })).type_id() + ); + let a = Noise::V0(NoiseV0 { data: [].to_vec() }); + log_debug!("{:?}", a.type_id()); + log_debug!("{:?}", TypeId::of::()); + log_debug!("{:?}", ClientHello::Local.type_id()); + log_debug!("{:?}", TypeId::of::()); + } +} diff --git a/ng-net/src/lib.rs b/ng-net/src/lib.rs new file mode 100644 index 0000000..2c58b79 --- /dev/null +++ b/ng-net/src/lib.rs @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +pub mod types; + +#[doc(hidden)] +pub mod app_protocol; + +pub mod broker; + +pub mod server_broker; + +#[doc(hidden)] +pub mod connection; + +pub mod actor; + +pub mod actors; + +pub mod utils; + +#[doc(hidden)] +pub mod tests; + +#[doc(hidden)] +pub mod bsps; + +#[doc(hidden)] +pub static NG_BOOTSTRAP_LOCAL_PATH: &str = "/.ng_bootstrap"; + +#[cfg(debug_assertions)] +#[doc(hidden)] +pub static WS_PORT: u16 = 14400; + +#[cfg(not(debug_assertions))] +#[doc(hidden)] +pub static WS_PORT: u16 = 80; + +#[doc(hidden)] +pub static WS_PORT_ALTERNATE: [u16; 4] = [14400, 28800, 43200, 57600]; + +#[doc(hidden)] +pub static WS_PORT_ALTERNATE_SUPERUSER: u16 = 144; + +#[doc(hidden)] +pub static WS_PORT_REVERSE_PROXY: u16 = 1440; diff --git a/ng-net/src/server_broker.rs b/ng-net/src/server_broker.rs new file mode 100644 index 0000000..74eee21 --- /dev/null +++ b/ng-net/src/server_broker.rs @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +//! Trait for ServerBroker + +use std::path::PathBuf; +use std::sync::Arc; + +use async_std::sync::Mutex; + +use ng_repo::block_storage::BlockStorage; +use ng_repo::errors::*; +use ng_repo::types::*; + +use crate::app_protocol::{AppRequest, AppSessionStart, AppSessionStartResponse, AppSessionStop}; +use crate::broker::ClientPeerId; +use crate::connection::NoiseFSM; +use crate::types::*; +use crate::utils::Receiver; + +#[async_trait::async_trait] +pub trait IServerBroker: Send + Sync { + async fn remove_rendezvous(&self, rendezvous: &SymKey); + async fn put_wallet_export(&self, rendezvous: SymKey, export: ExportedWallet); + async fn get_wallet_export(&self, rendezvous: SymKey) -> Result; + async fn put_wallet_at_rendezvous( + &self, + rendezvous: SymKey, + export: ExportedWallet, + ) -> Result<(), ServerError>; + async fn wait_for_wallet_at_rendezvous( + &self, + rendezvous: SymKey, + ) -> Receiver>; + async fn inbox_post(&self, post: InboxPost) -> Result<(), ServerError>; + fn inbox_register(&self, user_id: UserId, registration: InboxRegister) -> Result<(), ServerError>; + async fn inbox_pop_for_user(&self, user: UserId ) -> Result; + fn get_path_users(&self) -> PathBuf; + fn get_block_storage(&self) -> Arc>; + fn put_block(&self, overlay_id: &OverlayId, block: Block) -> Result<(), ServerError>; + fn has_block(&self, overlay_id: &OverlayId, block_id: &BlockId) -> Result<(), ServerError>; + fn get_block(&self, overlay_id: &OverlayId, block_id: &BlockId) -> Result; + async fn create_user(&self, broker_id: &DirectPeerId) -> Result; + fn get_user(&self, user_id: PubKey) -> Result; + fn has_no_user(&self) -> Result; + fn get_user_credentials(&self, user_id: &PubKey) -> Result; + fn add_user_credentials( + &self, + user_id: &PubKey, + credentials: &Credentials, + ) -> Result<(), ProtocolError>; + fn add_user(&self, user_id: PubKey, is_admin: bool) -> Result<(), ProtocolError>; + fn del_user(&self, user_id: PubKey) -> Result<(), ProtocolError>; + fn list_users(&self, admins: bool) -> Result, ProtocolError>; + fn list_invitations( + &self, + admin: bool, + unique: bool, + multi: bool, + ) -> Result)>, ProtocolError>; + fn add_invitation( + &self, + invite_code: &InvitationCode, + expiry: u32, + memo: &Option, + ) -> Result<(), ProtocolError>; + fn get_invitation_type(&self, invite: [u8; 32]) -> Result; + fn remove_invitation(&self, invite: [u8; 32]) -> Result<(), ProtocolError>; + fn take_master_key(&mut self) -> Result; + async fn app_process_request( + &self, + req: AppRequest, + request_id: i64, + fsm: &Mutex, + ) -> Result<(), ServerError>; + + async fn app_session_start( + &self, + req: AppSessionStart, + remote_peer_id: DirectPeerId, + local_peer_id: DirectPeerId, + ) -> Result; + async fn app_session_stop( + &self, + req: AppSessionStop, + remote_peer_id: &DirectPeerId, + ) -> Result; + + fn next_seq_for_peer(&self, peer: &PeerId, seq: u64) -> Result<(), ServerError>; + + fn get_repo_pin_status( + &self, + overlay: &OverlayId, + repo: &RepoHash, + user_id: &UserId, + ) -> Result; + + async fn pin_repo_write( + &self, + overlay: &OverlayAccess, + repo: &RepoHash, + user_id: &UserId, + ro_topics: &Vec, + rw_topics: &Vec, + overlay_root_topic: &Option, + expose_outer: bool, + peer: &ClientPeerId, + ) -> Result; + + async fn pin_repo_read( + &self, + overlay: &OverlayId, + repo: &RepoHash, + user_id: &UserId, + ro_topics: &Vec, + peer: &ClientPeerId, + ) -> Result; + + async fn topic_sub( + &self, + overlay: &OverlayId, + repo: &RepoHash, + topic: &TopicId, + user_id: &UserId, + publisher: Option<&PublisherAdvert>, + peer: &ClientPeerId, + ) -> Result; + + fn get_commit(&self, overlay: &OverlayId, id: &ObjectId) -> Result, ServerError>; + + async fn dispatch_event( + &self, + overlay: &OverlayId, + event: Event, + user_id: &UserId, + remote_peer: &PubKey, + ) -> Result, ServerError>; + + async fn remove_all_subscriptions_of_client(&self, client: &ClientPeerId); + + fn topic_sync_req( + &self, + overlay: &OverlayId, + topic: &TopicId, + known_heads: &Vec, + target_heads: &Vec, + known_commits: &Option, + ) -> Result, ServerError>; +} diff --git a/ng-net/src/tests/file.rs b/ng-net/src/tests/file.rs new file mode 100644 index 0000000..ebfd4a2 --- /dev/null +++ b/ng-net/src/tests/file.rs @@ -0,0 +1,1435 @@ +pub const TEST: [u8; 29487] = [ + 0, 0, 0, 0, 1, 0, 0, 0, 128, 163, 230, 1, 94, 122, 113, 30, 147, 252, 87, 146, 193, 199, 154, + 156, 116, 48, 90, 60, 132, 44, 212, 181, 100, 82, 138, 212, 5, 164, 60, 61, 164, 83, 216, 88, + 3, 20, 128, 34, 22, 228, 163, 35, 113, 184, 99, 134, 170, 154, 0, 227, 0, 173, 138, 120, 9, 50, + 94, 49, 15, 219, 167, 62, 81, 203, 172, 24, 209, 154, 79, 142, 16, 242, 181, 58, 246, 29, 140, + 143, 86, 3, 120, 181, 231, 227, 126, 24, 210, 149, 163, 254, 9, 118, 128, 232, 46, 131, 56, + 229, 128, 52, 247, 9, 158, 95, 178, 254, 67, 21, 142, 239, 11, 153, 171, 199, 137, 69, 177, + 244, 95, 108, 168, 15, 19, 67, 42, 230, 224, 28, 198, 109, 96, 254, 176, 71, 221, 139, 59, 129, + 168, 241, 194, 98, 114, 11, 33, 218, 23, 218, 191, 128, 10, 216, 177, 183, 34, 213, 230, 253, + 138, 211, 143, 85, 206, 31, 167, 157, 78, 171, 215, 64, 101, 25, 55, 157, 249, 204, 114, 116, + 184, 210, 52, 243, 219, 108, 92, 45, 110, 37, 236, 32, 55, 119, 102, 228, 105, 117, 174, 93, + 164, 156, 144, 22, 221, 40, 245, 79, 62, 120, 186, 126, 224, 31, 28, 215, 202, 146, 19, 23, 23, + 12, 175, 24, 21, 210, 148, 159, 228, 81, 188, 121, 122, 41, 164, 68, 0, 220, 240, 79, 87, 21, + 87, 148, 121, 146, 170, 129, 156, 52, 140, 6, 6, 63, 219, 55, 196, 198, 98, 11, 121, 208, 239, + 89, 86, 188, 172, 225, 78, 234, 223, 226, 101, 247, 233, 32, 156, 244, 215, 208, 248, 192, 87, + 193, 211, 226, 18, 172, 11, 34, 122, 134, 39, 55, 85, 243, 206, 123, 219, 249, 38, 52, 73, 150, + 115, 36, 0, 121, 78, 167, 165, 189, 240, 123, 188, 101, 113, 70, 177, 245, 207, 23, 227, 208, + 123, 38, 52, 150, 168, 128, 52, 6, 174, 201, 220, 76, 201, 31, 58, 55, 191, 12, 152, 126, 143, + 36, 114, 233, 188, 204, 57, 221, 215, 12, 133, 175, 212, 100, 108, 163, 62, 48, 209, 173, 50, + 255, 130, 229, 131, 196, 46, 162, 152, 64, 155, 163, 21, 247, 155, 127, 5, 32, 206, 1, 135, 8, + 194, 171, 147, 39, 168, 159, 219, 12, 202, 13, 138, 160, 9, 240, 172, 51, 21, 116, 128, 129, + 62, 88, 93, 219, 63, 66, 27, 180, 41, 15, 67, 170, 115, 7, 190, 9, 201, 248, 165, 152, 49, 48, + 141, 163, 39, 165, 203, 216, 30, 87, 235, 111, 51, 36, 166, 88, 37, 18, 188, 237, 247, 215, 33, + 66, 9, 218, 54, 72, 65, 94, 49, 146, 130, 119, 14, 32, 253, 165, 56, 135, 166, 172, 108, 65, + 185, 21, 229, 42, 222, 221, 145, 13, 43, 23, 149, 116, 2, 149, 209, 1, 11, 224, 188, 107, 71, + 145, 246, 50, 155, 181, 207, 196, 212, 110, 166, 163, 22, 33, 125, 50, 219, 229, 70, 41, 51, + 114, 72, 178, 16, 239, 117, 162, 91, 191, 9, 138, 175, 9, 12, 207, 37, 110, 163, 217, 217, 47, + 172, 149, 177, 209, 171, 126, 117, 17, 93, 37, 166, 159, 102, 224, 108, 20, 133, 31, 94, 134, + 121, 146, 214, 148, 143, 233, 63, 133, 144, 14, 154, 244, 7, 223, 8, 81, 201, 127, 12, 234, + 252, 4, 248, 102, 120, 200, 247, 55, 201, 128, 253, 194, 15, 180, 60, 34, 10, 150, 101, 92, + 204, 57, 143, 19, 165, 140, 240, 138, 251, 254, 121, 134, 113, 248, 217, 232, 168, 43, 144, + 130, 9, 6, 108, 226, 213, 70, 119, 117, 21, 196, 230, 112, 54, 160, 153, 178, 84, 153, 208, 15, + 73, 191, 236, 194, 28, 118, 179, 31, 222, 73, 37, 76, 232, 123, 77, 83, 201, 164, 161, 188, + 174, 144, 85, 39, 105, 16, 177, 26, 131, 66, 23, 94, 96, 212, 99, 251, 204, 166, 238, 129, 189, + 54, 157, 167, 174, 99, 119, 48, 14, 143, 225, 136, 72, 210, 181, 126, 4, 141, 70, 96, 133, 23, + 182, 50, 156, 0, 158, 52, 247, 3, 247, 42, 163, 93, 11, 158, 178, 109, 207, 140, 156, 210, 5, + 93, 95, 168, 50, 36, 229, 49, 5, 178, 184, 52, 27, 3, 122, 35, 185, 115, 218, 122, 173, 63, 93, + 79, 70, 44, 98, 153, 117, 249, 57, 82, 124, 33, 204, 22, 230, 236, 34, 140, 44, 31, 246, 83, + 21, 24, 195, 131, 151, 189, 40, 76, 5, 243, 178, 155, 92, 244, 22, 99, 187, 125, 156, 179, 153, + 237, 208, 155, 22, 202, 41, 143, 99, 1, 60, 182, 167, 19, 31, 84, 185, 129, 78, 15, 163, 254, + 182, 2, 100, 173, 113, 20, 163, 66, 230, 189, 173, 130, 119, 50, 78, 230, 236, 17, 36, 220, + 135, 42, 100, 193, 43, 151, 81, 7, 170, 18, 82, 116, 160, 68, 80, 136, 95, 106, 37, 166, 53, 5, + 222, 180, 61, 219, 97, 134, 146, 63, 136, 167, 12, 150, 13, 92, 148, 243, 234, 33, 45, 200, + 235, 89, 7, 159, 204, 75, 253, 53, 53, 136, 230, 101, 227, 61, 223, 46, 76, 200, 65, 155, 68, + 97, 15, 153, 88, 12, 191, 211, 8, 3, 128, 221, 253, 52, 244, 240, 36, 60, 210, 65, 16, 63, 161, + 225, 120, 184, 109, 1, 32, 124, 100, 54, 6, 182, 95, 139, 218, 108, 2, 42, 133, 66, 91, 26, + 212, 149, 104, 68, 171, 205, 150, 180, 86, 143, 167, 134, 46, 125, 54, 247, 152, 86, 71, 68, + 193, 196, 49, 74, 184, 43, 159, 165, 251, 209, 116, 51, 192, 145, 203, 14, 97, 117, 96, 78, 26, + 66, 207, 199, 140, 200, 213, 36, 234, 144, 23, 18, 80, 186, 40, 76, 180, 164, 78, 245, 203, + 233, 111, 74, 70, 57, 31, 100, 80, 186, 56, 44, 99, 156, 227, 248, 77, 9, 126, 59, 115, 153, + 222, 185, 217, 62, 31, 124, 183, 235, 100, 142, 92, 86, 38, 19, 241, 21, 188, 138, 115, 136, + 57, 237, 70, 73, 112, 5, 181, 79, 204, 90, 9, 211, 97, 57, 240, 71, 9, 244, 254, 11, 99, 196, + 93, 216, 152, 49, 72, 138, 114, 192, 204, 151, 110, 5, 12, 187, 36, 61, 128, 19, 116, 135, 217, + 24, 4, 220, 253, 43, 128, 136, 141, 202, 191, 204, 16, 58, 54, 101, 225, 200, 146, 229, 143, + 200, 210, 81, 5, 18, 58, 192, 38, 39, 181, 114, 35, 56, 6, 143, 55, 174, 25, 21, 148, 5, 102, + 87, 200, 30, 233, 152, 210, 191, 70, 248, 82, 234, 170, 171, 217, 252, 23, 202, 219, 137, 237, + 181, 93, 39, 179, 187, 12, 152, 43, 120, 8, 240, 68, 164, 89, 175, 136, 201, 102, 225, 40, 222, + 24, 207, 112, 108, 253, 249, 34, 0, 148, 188, 31, 50, 118, 169, 11, 106, 153, 218, 182, 123, + 228, 215, 224, 34, 142, 201, 31, 180, 52, 46, 103, 161, 243, 56, 117, 0, 69, 113, 208, 6, 255, + 18, 230, 166, 94, 110, 25, 91, 87, 66, 74, 116, 186, 253, 3, 224, 221, 41, 64, 212, 210, 96, + 254, 130, 137, 130, 38, 240, 210, 130, 235, 190, 140, 157, 163, 171, 134, 15, 25, 145, 214, + 154, 0, 252, 186, 5, 227, 19, 78, 124, 37, 252, 168, 101, 85, 184, 128, 240, 82, 41, 31, 130, + 56, 161, 67, 246, 208, 255, 46, 18, 136, 217, 167, 31, 4, 135, 171, 191, 109, 138, 236, 211, 1, + 1, 225, 142, 113, 83, 7, 133, 35, 28, 70, 182, 45, 99, 51, 184, 135, 218, 127, 241, 41, 139, + 200, 58, 117, 122, 49, 214, 6, 94, 121, 1, 196, 106, 124, 95, 130, 75, 65, 28, 221, 107, 155, + 39, 145, 89, 5, 241, 87, 125, 228, 246, 54, 209, 161, 41, 154, 228, 141, 16, 140, 78, 130, 95, + 219, 127, 54, 57, 168, 15, 177, 15, 181, 118, 172, 134, 193, 45, 251, 249, 5, 202, 216, 3, 60, + 35, 76, 136, 201, 28, 47, 19, 118, 246, 3, 202, 58, 120, 207, 49, 219, 159, 64, 110, 83, 66, + 101, 132, 39, 22, 69, 230, 191, 192, 186, 8, 178, 134, 116, 108, 239, 230, 61, 60, 238, 123, + 125, 245, 35, 119, 204, 224, 84, 11, 108, 247, 232, 146, 226, 53, 231, 6, 13, 51, 108, 139, 9, + 48, 5, 214, 188, 38, 219, 159, 142, 49, 112, 251, 178, 205, 116, 67, 47, 11, 17, 148, 187, 92, + 175, 65, 222, 180, 15, 28, 1, 141, 117, 26, 60, 97, 218, 71, 155, 176, 156, 120, 7, 207, 13, + 234, 89, 202, 167, 254, 141, 194, 34, 223, 186, 202, 79, 67, 197, 7, 149, 72, 247, 187, 102, + 28, 230, 92, 132, 239, 106, 133, 228, 211, 129, 124, 46, 240, 85, 148, 160, 56, 237, 232, 108, + 233, 174, 239, 161, 254, 113, 28, 169, 71, 108, 38, 152, 119, 47, 201, 190, 15, 88, 113, 2, 73, + 38, 33, 29, 135, 110, 65, 199, 20, 178, 90, 97, 24, 176, 240, 4, 220, 150, 97, 64, 176, 77, 61, + 25, 23, 49, 140, 55, 193, 202, 186, 125, 221, 191, 84, 49, 11, 120, 13, 29, 119, 199, 126, 186, + 71, 182, 67, 87, 202, 67, 154, 114, 158, 232, 89, 125, 26, 58, 237, 43, 24, 11, 44, 223, 106, + 13, 171, 191, 187, 135, 223, 175, 111, 57, 248, 247, 39, 124, 11, 211, 238, 40, 165, 97, 49, + 76, 223, 40, 167, 204, 254, 138, 28, 46, 106, 218, 3, 47, 210, 158, 10, 39, 14, 111, 239, 235, + 235, 86, 72, 215, 116, 86, 51, 51, 33, 216, 15, 226, 119, 235, 130, 117, 92, 254, 132, 230, 53, + 72, 15, 110, 143, 201, 51, 143, 39, 68, 115, 105, 146, 172, 130, 218, 109, 183, 8, 246, 217, + 178, 156, 168, 154, 37, 49, 251, 29, 0, 219, 131, 201, 185, 21, 134, 80, 152, 157, 114, 98, + 138, 197, 140, 236, 60, 132, 173, 208, 232, 233, 185, 19, 143, 254, 177, 71, 145, 187, 177, + 197, 192, 133, 161, 126, 157, 51, 88, 44, 131, 96, 8, 236, 116, 145, 143, 174, 215, 103, 78, + 103, 214, 3, 169, 17, 156, 127, 235, 52, 18, 210, 31, 179, 227, 46, 92, 106, 14, 66, 187, 86, + 239, 176, 16, 135, 25, 76, 188, 24, 108, 79, 58, 61, 139, 9, 168, 244, 219, 254, 172, 91, 199, + 156, 19, 156, 12, 172, 85, 11, 84, 242, 122, 64, 207, 121, 110, 168, 223, 248, 30, 245, 125, + 153, 125, 51, 201, 119, 228, 134, 59, 39, 156, 50, 78, 82, 96, 83, 221, 119, 57, 139, 143, 215, + 1, 209, 83, 95, 252, 235, 148, 135, 86, 91, 150, 127, 43, 187, 31, 211, 14, 43, 249, 51, 142, + 97, 110, 152, 220, 95, 21, 83, 1, 71, 192, 36, 55, 56, 198, 170, 107, 105, 57, 111, 111, 113, + 241, 129, 192, 175, 40, 7, 202, 18, 134, 96, 31, 203, 172, 110, 209, 245, 186, 240, 33, 28, 6, + 135, 89, 245, 73, 200, 24, 203, 31, 196, 159, 174, 113, 169, 59, 43, 126, 46, 174, 213, 245, + 224, 200, 186, 92, 120, 36, 177, 195, 121, 165, 195, 238, 20, 111, 19, 222, 216, 176, 94, 35, + 221, 88, 222, 19, 17, 160, 248, 128, 192, 255, 234, 190, 50, 96, 147, 233, 18, 49, 156, 185, + 38, 13, 172, 166, 116, 84, 208, 114, 142, 151, 38, 27, 104, 78, 207, 183, 243, 13, 175, 157, + 215, 63, 241, 89, 152, 25, 32, 255, 168, 8, 66, 60, 41, 42, 170, 107, 166, 185, 118, 50, 22, + 153, 124, 156, 167, 4, 142, 113, 123, 195, 54, 202, 149, 132, 163, 196, 22, 191, 79, 29, 185, + 163, 249, 212, 229, 51, 249, 116, 167, 157, 236, 80, 120, 190, 106, 129, 55, 75, 236, 137, 248, + 53, 205, 125, 129, 251, 148, 63, 255, 21, 118, 23, 66, 78, 212, 7, 147, 157, 187, 224, 197, + 230, 98, 242, 243, 124, 93, 150, 44, 167, 234, 42, 163, 9, 246, 211, 41, 154, 86, 39, 156, 67, + 58, 123, 3, 236, 16, 76, 103, 224, 68, 5, 245, 80, 129, 205, 189, 91, 254, 34, 38, 103, 243, + 112, 180, 81, 149, 181, 172, 5, 63, 211, 247, 1, 107, 239, 48, 140, 139, 230, 222, 173, 144, + 142, 246, 249, 243, 236, 152, 54, 233, 93, 166, 51, 163, 37, 36, 195, 95, 162, 169, 242, 149, + 167, 155, 38, 108, 108, 1, 8, 199, 79, 43, 238, 247, 241, 229, 31, 131, 109, 36, 60, 194, 24, + 161, 164, 231, 181, 247, 114, 100, 148, 196, 115, 80, 61, 239, 63, 237, 140, 18, 165, 242, 243, + 100, 55, 230, 68, 229, 82, 152, 238, 188, 17, 200, 175, 9, 214, 41, 172, 171, 137, 205, 165, + 249, 136, 216, 160, 171, 32, 215, 244, 116, 154, 155, 210, 247, 60, 97, 88, 26, 182, 40, 155, + 230, 254, 211, 205, 136, 185, 13, 201, 21, 81, 27, 9, 27, 181, 191, 70, 72, 234, 241, 35, 234, + 55, 29, 7, 35, 235, 45, 200, 155, 82, 52, 1, 235, 97, 179, 192, 219, 216, 76, 250, 109, 38, + 214, 104, 155, 72, 213, 167, 251, 42, 37, 176, 46, 205, 30, 254, 153, 120, 33, 135, 203, 4, 74, + 205, 72, 221, 42, 108, 143, 122, 172, 196, 37, 97, 166, 51, 132, 120, 134, 138, 14, 135, 10, + 47, 104, 89, 4, 223, 165, 59, 16, 10, 124, 240, 242, 199, 146, 234, 1, 1, 51, 243, 54, 214, 58, + 141, 177, 83, 217, 31, 132, 48, 40, 77, 23, 225, 217, 81, 83, 192, 101, 251, 38, 30, 171, 191, + 78, 151, 190, 30, 239, 110, 51, 165, 133, 130, 216, 202, 97, 39, 150, 88, 182, 28, 33, 84, 154, + 85, 195, 201, 248, 215, 232, 62, 123, 38, 34, 79, 234, 209, 28, 155, 223, 249, 152, 126, 99, 8, + 172, 78, 134, 24, 3, 159, 34, 82, 154, 43, 83, 86, 199, 178, 17, 138, 255, 116, 113, 166, 69, + 170, 126, 32, 188, 181, 47, 159, 248, 43, 255, 19, 245, 7, 196, 233, 131, 84, 242, 165, 97, + 123, 6, 122, 112, 113, 27, 46, 21, 193, 189, 236, 252, 230, 91, 177, 202, 38, 125, 20, 137, 66, + 8, 140, 225, 225, 138, 192, 161, 144, 181, 183, 217, 138, 152, 119, 92, 7, 198, 19, 240, 115, + 188, 198, 154, 232, 193, 93, 201, 142, 254, 77, 87, 159, 118, 103, 193, 139, 243, 58, 184, 195, + 141, 195, 117, 238, 122, 124, 167, 16, 74, 35, 131, 79, 177, 23, 99, 110, 140, 100, 44, 37, + 104, 174, 218, 34, 135, 63, 176, 215, 46, 13, 79, 78, 68, 230, 135, 253, 211, 178, 2, 32, 105, + 165, 14, 5, 163, 231, 74, 147, 187, 45, 1, 164, 186, 57, 4, 16, 132, 21, 60, 75, 213, 67, 214, + 74, 120, 185, 188, 254, 84, 63, 182, 169, 62, 76, 245, 203, 200, 222, 89, 121, 41, 213, 163, + 134, 133, 239, 66, 163, 149, 235, 250, 111, 42, 244, 138, 75, 220, 7, 109, 78, 85, 120, 75, 19, + 254, 125, 48, 89, 220, 2, 58, 235, 243, 126, 228, 49, 178, 122, 54, 212, 223, 105, 232, 0, 184, + 15, 176, 114, 19, 94, 168, 38, 12, 35, 214, 178, 11, 44, 186, 29, 102, 236, 30, 169, 166, 96, + 189, 37, 54, 218, 61, 88, 193, 142, 74, 39, 237, 184, 249, 41, 60, 162, 231, 186, 169, 76, 229, + 212, 146, 51, 224, 172, 121, 179, 44, 152, 90, 54, 139, 78, 164, 177, 57, 84, 192, 161, 40, 46, + 231, 23, 140, 250, 11, 51, 216, 198, 202, 89, 50, 202, 51, 28, 242, 10, 189, 208, 192, 37, 79, + 123, 10, 139, 136, 16, 235, 116, 184, 221, 187, 66, 215, 168, 95, 7, 224, 55, 251, 3, 6, 209, + 120, 196, 142, 17, 29, 64, 16, 129, 205, 46, 206, 183, 213, 241, 73, 113, 193, 136, 107, 171, + 88, 105, 229, 224, 120, 77, 112, 229, 8, 148, 96, 205, 149, 158, 70, 43, 240, 238, 37, 241, 69, + 76, 157, 114, 89, 8, 212, 241, 86, 207, 193, 146, 152, 209, 42, 71, 181, 104, 224, 132, 108, + 244, 48, 0, 40, 97, 149, 73, 82, 121, 201, 186, 96, 114, 54, 236, 200, 48, 116, 43, 35, 185, + 212, 147, 199, 179, 82, 113, 15, 19, 29, 201, 218, 59, 56, 219, 202, 139, 118, 10, 10, 38, 251, + 177, 72, 13, 228, 138, 28, 196, 212, 122, 230, 189, 72, 121, 33, 175, 4, 17, 234, 188, 84, 2, + 220, 51, 223, 19, 27, 1, 70, 41, 17, 26, 118, 228, 90, 26, 233, 39, 126, 42, 98, 91, 66, 45, + 98, 52, 24, 126, 150, 48, 43, 203, 28, 83, 173, 69, 213, 50, 179, 110, 125, 94, 78, 219, 253, + 173, 107, 233, 107, 197, 214, 195, 32, 172, 78, 107, 97, 64, 212, 106, 111, 41, 255, 240, 91, + 126, 249, 182, 96, 122, 213, 159, 59, 132, 29, 4, 10, 146, 248, 150, 224, 193, 232, 194, 105, + 205, 71, 111, 78, 8, 191, 22, 122, 230, 115, 164, 184, 237, 93, 63, 193, 73, 114, 80, 220, 109, + 43, 143, 94, 190, 165, 48, 87, 71, 236, 78, 180, 114, 232, 76, 72, 201, 92, 170, 159, 228, 112, + 36, 15, 205, 191, 47, 163, 35, 38, 175, 145, 234, 196, 7, 0, 200, 240, 128, 140, 14, 215, 123, + 72, 227, 46, 48, 82, 153, 205, 222, 13, 107, 218, 207, 82, 3, 251, 46, 143, 249, 189, 214, 151, + 107, 241, 203, 191, 44, 117, 162, 156, 182, 239, 204, 140, 253, 121, 95, 42, 48, 191, 75, 63, + 192, 136, 199, 228, 81, 144, 182, 190, 181, 52, 231, 123, 172, 224, 178, 191, 31, 60, 166, 114, + 184, 103, 117, 129, 137, 50, 41, 184, 190, 164, 160, 110, 188, 51, 139, 133, 199, 151, 240, + 214, 52, 132, 135, 235, 19, 113, 12, 39, 241, 125, 202, 63, 37, 194, 89, 254, 192, 180, 110, + 77, 166, 82, 244, 152, 60, 40, 144, 222, 75, 10, 154, 46, 252, 8, 103, 173, 10, 79, 74, 29, 62, + 226, 70, 122, 72, 151, 176, 124, 41, 169, 247, 144, 105, 75, 157, 156, 242, 148, 184, 28, 145, + 132, 81, 237, 136, 187, 82, 46, 161, 158, 230, 57, 218, 122, 70, 63, 99, 7, 7, 28, 225, 124, + 161, 147, 40, 84, 216, 49, 78, 112, 34, 171, 147, 192, 242, 149, 220, 172, 108, 36, 144, 207, + 41, 208, 59, 255, 43, 201, 165, 217, 55, 233, 71, 172, 63, 233, 88, 102, 229, 245, 97, 43, 6, + 172, 156, 254, 162, 212, 149, 105, 86, 132, 72, 55, 132, 161, 157, 177, 127, 29, 197, 229, 55, + 151, 28, 61, 170, 240, 246, 192, 60, 197, 32, 230, 25, 251, 0, 241, 84, 37, 215, 5, 60, 221, + 107, 215, 159, 155, 113, 16, 118, 1, 164, 203, 163, 247, 47, 226, 166, 92, 217, 23, 254, 212, + 114, 124, 116, 64, 3, 8, 38, 143, 71, 51, 144, 171, 79, 247, 214, 253, 206, 128, 130, 246, 7, + 42, 67, 161, 68, 155, 201, 67, 185, 121, 70, 222, 233, 59, 48, 160, 33, 193, 34, 99, 16, 224, + 45, 161, 78, 81, 138, 1, 71, 129, 97, 112, 116, 197, 187, 102, 121, 87, 250, 35, 109, 122, 147, + 15, 71, 118, 57, 49, 89, 49, 32, 60, 202, 46, 42, 80, 19, 2, 146, 50, 40, 168, 234, 160, 110, + 95, 118, 24, 82, 255, 112, 117, 206, 12, 52, 16, 216, 180, 128, 241, 192, 217, 112, 185, 35, + 142, 209, 236, 114, 7, 246, 212, 177, 252, 161, 104, 223, 101, 182, 249, 114, 232, 0, 75, 56, + 146, 216, 237, 61, 222, 58, 137, 78, 51, 248, 107, 80, 244, 93, 241, 244, 201, 121, 19, 204, + 10, 108, 221, 124, 87, 89, 38, 67, 239, 81, 174, 153, 75, 167, 125, 122, 5, 73, 75, 149, 2, 93, + 58, 139, 39, 89, 105, 106, 111, 152, 195, 83, 182, 173, 32, 152, 132, 28, 137, 2, 30, 140, 188, + 9, 240, 39, 37, 4, 62, 122, 111, 195, 199, 145, 11, 225, 58, 241, 82, 83, 108, 230, 211, 137, + 201, 127, 11, 41, 41, 136, 62, 60, 3, 86, 225, 202, 73, 4, 169, 70, 80, 29, 247, 183, 63, 149, + 242, 201, 205, 187, 234, 144, 171, 97, 221, 124, 250, 1, 194, 230, 97, 252, 146, 253, 238, 254, + 6, 211, 119, 161, 24, 195, 133, 45, 1, 137, 83, 26, 166, 149, 224, 65, 92, 151, 165, 163, 152, + 15, 91, 238, 36, 142, 6, 51, 47, 87, 3, 157, 100, 102, 17, 9, 33, 152, 233, 69, 124, 5, 27, + 104, 123, 184, 155, 230, 167, 71, 112, 82, 130, 83, 139, 4, 209, 38, 195, 170, 71, 189, 170, + 187, 79, 20, 96, 139, 121, 184, 148, 225, 129, 20, 167, 137, 32, 135, 203, 251, 139, 223, 170, + 221, 118, 189, 248, 58, 235, 34, 14, 10, 69, 29, 72, 237, 220, 118, 198, 176, 112, 74, 44, 0, + 69, 36, 113, 23, 26, 219, 205, 189, 118, 182, 183, 106, 134, 26, 246, 193, 111, 11, 123, 239, + 253, 64, 82, 113, 194, 238, 164, 18, 186, 130, 40, 8, 96, 61, 176, 226, 164, 232, 122, 248, + 211, 42, 107, 4, 130, 144, 107, 123, 152, 59, 68, 208, 10, 184, 95, 217, 153, 251, 65, 37, 0, + 79, 140, 43, 233, 235, 76, 246, 162, 192, 47, 191, 95, 60, 78, 118, 184, 205, 243, 130, 181, + 35, 60, 218, 144, 213, 78, 201, 188, 107, 127, 239, 189, 168, 67, 118, 48, 61, 244, 231, 234, + 67, 119, 121, 1, 220, 26, 160, 99, 248, 141, 246, 244, 107, 17, 43, 195, 224, 243, 90, 209, + 121, 89, 156, 158, 113, 116, 244, 78, 47, 170, 130, 126, 238, 99, 208, 113, 4, 254, 106, 14, + 48, 226, 246, 20, 114, 235, 112, 7, 189, 96, 98, 207, 247, 206, 245, 147, 225, 1, 250, 48, 65, + 79, 220, 167, 64, 109, 42, 51, 206, 152, 32, 188, 100, 41, 101, 138, 133, 174, 105, 110, 61, + 236, 10, 156, 131, 146, 86, 154, 25, 170, 234, 9, 232, 108, 56, 222, 255, 152, 80, 156, 160, + 235, 203, 16, 228, 151, 172, 147, 204, 147, 88, 111, 48, 155, 62, 37, 53, 126, 85, 206, 171, + 220, 139, 205, 123, 1, 232, 63, 166, 245, 155, 249, 225, 229, 232, 249, 45, 251, 175, 64, 141, + 163, 51, 111, 48, 1, 138, 164, 213, 101, 120, 211, 160, 48, 93, 226, 174, 214, 163, 11, 47, + 144, 65, 93, 191, 200, 90, 61, 165, 117, 16, 127, 193, 173, 62, 159, 158, 253, 119, 108, 77, + 97, 19, 246, 182, 214, 105, 253, 222, 31, 125, 150, 86, 4, 159, 39, 219, 99, 54, 204, 238, 86, + 242, 238, 80, 150, 224, 175, 231, 82, 67, 76, 32, 77, 227, 233, 31, 91, 106, 169, 203, 130, + 214, 129, 153, 2, 169, 129, 26, 238, 125, 120, 225, 231, 190, 204, 2, 132, 241, 244, 45, 5, 27, + 25, 18, 193, 185, 64, 170, 156, 78, 58, 124, 216, 27, 109, 241, 221, 42, 27, 193, 150, 98, 37, + 114, 159, 198, 45, 89, 161, 63, 38, 168, 223, 117, 65, 193, 213, 166, 246, 78, 240, 58, 200, + 114, 5, 210, 210, 210, 185, 2, 240, 5, 84, 123, 194, 183, 248, 142, 181, 4, 228, 73, 150, 15, + 182, 224, 217, 212, 142, 85, 159, 93, 2, 248, 195, 139, 223, 70, 49, 6, 130, 44, 146, 12, 78, + 138, 202, 44, 141, 55, 115, 102, 182, 33, 9, 129, 32, 64, 70, 251, 72, 11, 84, 127, 117, 253, + 197, 102, 129, 113, 24, 58, 229, 74, 128, 126, 51, 101, 84, 100, 190, 99, 79, 68, 89, 93, 239, + 18, 209, 221, 53, 148, 60, 217, 158, 214, 98, 80, 119, 39, 115, 34, 136, 74, 150, 77, 96, 14, + 35, 166, 17, 178, 96, 227, 5, 128, 155, 3, 234, 161, 115, 46, 80, 197, 241, 231, 215, 125, 36, + 255, 234, 143, 98, 70, 251, 249, 4, 150, 112, 115, 243, 92, 168, 181, 103, 145, 152, 194, 138, + 86, 179, 75, 228, 189, 139, 36, 181, 151, 90, 150, 22, 117, 113, 155, 118, 221, 39, 65, 95, + 127, 77, 35, 13, 118, 16, 224, 39, 113, 147, 225, 78, 62, 248, 237, 105, 175, 33, 245, 158, 20, + 161, 21, 167, 54, 46, 160, 130, 230, 213, 164, 124, 68, 164, 189, 33, 190, 195, 91, 191, 43, + 143, 172, 209, 25, 194, 196, 65, 133, 236, 105, 183, 3, 90, 132, 206, 145, 76, 239, 92, 242, + 81, 171, 20, 138, 195, 100, 222, 105, 159, 229, 100, 39, 202, 53, 59, 188, 83, 146, 220, 111, + 80, 227, 165, 17, 197, 146, 212, 251, 204, 77, 245, 168, 88, 215, 16, 195, 236, 13, 204, 113, + 235, 153, 206, 74, 128, 166, 17, 241, 204, 53, 210, 25, 9, 127, 225, 24, 168, 65, 85, 99, 141, + 168, 148, 74, 107, 25, 28, 18, 148, 9, 58, 207, 83, 37, 112, 125, 31, 33, 47, 38, 21, 25, 125, + 172, 194, 196, 101, 183, 216, 215, 77, 94, 189, 223, 28, 239, 115, 11, 203, 172, 113, 123, 177, + 46, 214, 73, 108, 140, 54, 171, 98, 160, 178, 217, 27, 3, 10, 57, 136, 174, 59, 11, 147, 65, + 180, 141, 30, 9, 214, 193, 93, 225, 97, 33, 233, 112, 20, 91, 231, 166, 99, 184, 21, 104, 10, + 230, 130, 57, 109, 112, 253, 98, 89, 202, 106, 220, 228, 122, 97, 33, 96, 63, 140, 79, 207, + 218, 87, 101, 117, 15, 82, 5, 144, 208, 66, 188, 189, 76, 172, 182, 145, 64, 127, 169, 254, + 250, 8, 176, 164, 221, 178, 39, 13, 91, 122, 62, 140, 94, 148, 203, 71, 174, 108, 248, 192, 70, + 44, 37, 174, 86, 23, 33, 110, 79, 82, 234, 172, 105, 236, 60, 45, 253, 186, 135, 53, 161, 255, + 191, 64, 215, 223, 116, 31, 62, 41, 212, 39, 191, 237, 181, 208, 126, 59, 132, 0, 111, 130, + 179, 28, 130, 134, 1, 33, 42, 58, 73, 203, 136, 219, 146, 165, 9, 26, 84, 8, 56, 5, 179, 154, + 216, 164, 242, 58, 57, 68, 27, 220, 212, 232, 219, 43, 227, 224, 59, 0, 70, 14, 159, 73, 72, + 235, 101, 62, 68, 192, 154, 188, 214, 109, 135, 42, 233, 116, 0, 21, 35, 24, 51, 200, 210, 130, + 213, 117, 8, 9, 182, 192, 119, 36, 7, 158, 172, 54, 53, 181, 122, 174, 133, 59, 28, 137, 97, + 11, 208, 250, 116, 254, 127, 88, 103, 163, 137, 98, 193, 159, 94, 83, 154, 191, 167, 150, 100, + 192, 183, 112, 111, 204, 123, 213, 194, 166, 78, 254, 130, 177, 56, 206, 234, 23, 146, 75, 60, + 53, 229, 118, 192, 60, 102, 52, 46, 186, 146, 152, 208, 245, 225, 107, 184, 46, 76, 88, 236, + 134, 11, 41, 249, 0, 219, 78, 69, 227, 31, 107, 94, 30, 4, 22, 88, 131, 7, 251, 120, 129, 221, + 50, 205, 147, 169, 219, 145, 156, 102, 35, 44, 152, 118, 24, 198, 38, 157, 107, 38, 67, 61, 19, + 11, 134, 231, 163, 23, 198, 94, 34, 176, 168, 13, 228, 173, 56, 197, 73, 216, 64, 112, 147, + 150, 122, 132, 130, 148, 213, 215, 95, 134, 62, 88, 141, 64, 143, 88, 132, 152, 189, 91, 168, + 107, 167, 108, 184, 156, 95, 172, 145, 237, 45, 72, 49, 127, 82, 124, 25, 236, 243, 14, 148, + 20, 86, 25, 180, 181, 181, 96, 99, 131, 192, 90, 14, 223, 24, 71, 146, 104, 160, 77, 18, 34, + 245, 175, 231, 88, 242, 32, 154, 29, 88, 58, 196, 174, 235, 173, 111, 136, 203, 226, 221, 20, + 43, 13, 190, 0, 132, 255, 15, 11, 73, 52, 249, 39, 16, 239, 214, 160, 32, 49, 4, 164, 85, 86, + 206, 92, 210, 92, 117, 126, 113, 65, 195, 101, 126, 136, 8, 252, 178, 174, 214, 118, 214, 169, + 81, 97, 197, 200, 177, 84, 47, 250, 35, 28, 54, 148, 60, 61, 91, 8, 216, 43, 217, 183, 60, 195, + 123, 89, 153, 248, 107, 166, 211, 81, 102, 227, 177, 109, 247, 132, 172, 164, 163, 44, 56, 75, + 121, 249, 114, 195, 104, 15, 33, 179, 57, 79, 186, 211, 212, 19, 158, 18, 62, 176, 206, 156, + 217, 118, 119, 151, 214, 22, 60, 67, 192, 110, 207, 247, 193, 208, 124, 238, 230, 188, 129, 44, + 131, 143, 47, 121, 193, 15, 165, 125, 121, 112, 196, 234, 206, 214, 200, 245, 103, 184, 226, + 127, 157, 180, 157, 23, 29, 21, 228, 60, 181, 119, 247, 93, 158, 116, 133, 242, 120, 11, 130, + 88, 142, 68, 77, 167, 83, 114, 87, 7, 33, 177, 177, 145, 77, 64, 229, 151, 153, 110, 108, 192, + 251, 194, 217, 51, 8, 193, 129, 118, 236, 88, 193, 166, 216, 187, 196, 83, 108, 213, 99, 82, + 152, 196, 168, 51, 95, 231, 153, 98, 233, 38, 32, 143, 132, 66, 123, 148, 24, 137, 158, 233, + 20, 112, 129, 18, 70, 212, 150, 134, 47, 232, 29, 8, 191, 23, 91, 224, 93, 30, 58, 207, 106, + 18, 43, 63, 144, 55, 11, 207, 207, 223, 233, 167, 24, 137, 82, 12, 208, 86, 107, 45, 138, 127, + 174, 45, 114, 125, 232, 41, 241, 187, 203, 47, 134, 66, 49, 169, 165, 126, 8, 89, 204, 193, + 167, 255, 237, 141, 232, 16, 36, 62, 37, 53, 254, 38, 192, 126, 208, 15, 247, 12, 33, 122, 60, + 69, 27, 93, 147, 187, 223, 9, 236, 150, 149, 193, 250, 227, 91, 57, 5, 131, 7, 154, 131, 92, + 180, 102, 62, 101, 143, 152, 113, 209, 38, 188, 219, 4, 76, 18, 243, 217, 247, 233, 218, 124, + 111, 178, 84, 193, 201, 110, 98, 87, 101, 112, 250, 213, 190, 190, 142, 26, 76, 177, 203, 21, + 225, 118, 215, 217, 0, 247, 226, 176, 74, 3, 51, 11, 204, 84, 35, 194, 89, 59, 8, 15, 94, 53, + 121, 157, 254, 49, 123, 65, 71, 233, 234, 246, 77, 212, 161, 66, 76, 83, 88, 182, 90, 243, 8, + 28, 104, 153, 153, 52, 115, 82, 76, 204, 248, 139, 186, 79, 135, 235, 95, 48, 209, 228, 20, + 239, 93, 110, 100, 131, 205, 128, 89, 11, 163, 252, 99, 49, 175, 130, 39, 43, 210, 195, 54, + 199, 230, 42, 23, 107, 131, 146, 222, 171, 50, 163, 180, 62, 50, 95, 185, 221, 122, 235, 60, + 207, 141, 98, 80, 106, 222, 8, 102, 5, 32, 174, 149, 135, 12, 54, 87, 234, 239, 184, 28, 66, + 213, 99, 53, 87, 62, 30, 241, 17, 138, 117, 73, 148, 60, 59, 226, 116, 187, 249, 161, 110, 152, + 83, 72, 53, 232, 7, 53, 128, 159, 32, 91, 105, 208, 208, 202, 67, 150, 235, 241, 201, 232, 31, + 212, 42, 74, 241, 113, 62, 164, 154, 46, 109, 242, 255, 60, 171, 158, 108, 20, 236, 202, 78, + 92, 63, 101, 139, 236, 87, 147, 122, 214, 1, 178, 77, 185, 24, 103, 113, 170, 67, 62, 199, 203, + 197, 97, 49, 233, 184, 79, 114, 21, 216, 238, 86, 99, 77, 207, 140, 137, 87, 153, 118, 118, + 188, 183, 120, 114, 202, 30, 184, 13, 67, 211, 43, 170, 198, 14, 3, 57, 102, 210, 93, 122, 168, + 58, 54, 96, 93, 158, 209, 91, 32, 125, 24, 99, 134, 110, 164, 127, 20, 112, 110, 10, 60, 161, + 222, 70, 107, 148, 132, 94, 67, 212, 92, 146, 202, 163, 254, 35, 6, 87, 226, 65, 4, 88, 77, 43, + 152, 116, 49, 57, 11, 84, 60, 83, 155, 23, 144, 237, 227, 156, 53, 210, 169, 123, 117, 113, 80, + 8, 74, 44, 87, 198, 88, 0, 179, 112, 78, 125, 224, 150, 163, 92, 141, 2, 18, 222, 227, 164, + 208, 69, 230, 36, 152, 232, 82, 168, 209, 206, 24, 0, 193, 166, 100, 244, 201, 201, 79, 232, + 102, 77, 14, 18, 64, 69, 4, 174, 138, 135, 23, 148, 82, 82, 133, 42, 236, 24, 42, 62, 67, 105, + 125, 218, 222, 1, 120, 228, 208, 178, 71, 152, 201, 133, 95, 45, 135, 56, 224, 163, 5, 213, 44, + 87, 95, 47, 195, 191, 186, 79, 51, 216, 244, 72, 200, 125, 25, 124, 122, 53, 215, 188, 3, 232, + 141, 19, 162, 160, 67, 246, 148, 34, 133, 197, 28, 10, 148, 174, 209, 98, 38, 143, 74, 194, + 100, 56, 69, 192, 96, 86, 110, 67, 189, 58, 189, 146, 94, 157, 39, 170, 25, 78, 100, 54, 71, + 146, 220, 245, 82, 199, 148, 40, 162, 206, 172, 92, 57, 143, 197, 254, 56, 119, 2, 204, 252, + 112, 23, 169, 186, 169, 46, 14, 159, 50, 137, 41, 66, 14, 240, 133, 166, 156, 154, 241, 182, + 107, 225, 107, 100, 170, 132, 77, 143, 173, 125, 15, 24, 73, 157, 136, 74, 98, 183, 139, 101, + 199, 191, 219, 123, 229, 208, 148, 238, 189, 18, 191, 91, 20, 39, 106, 51, 171, 14, 15, 9, 98, + 18, 125, 11, 206, 165, 191, 170, 39, 204, 41, 61, 52, 159, 190, 41, 243, 27, 157, 159, 243, + 102, 216, 180, 244, 66, 125, 191, 113, 158, 203, 186, 73, 193, 50, 105, 146, 59, 156, 161, 157, + 119, 171, 99, 240, 111, 67, 99, 70, 26, 196, 42, 177, 237, 16, 92, 61, 226, 216, 52, 41, 91, + 255, 140, 112, 174, 152, 94, 223, 186, 187, 23, 123, 253, 71, 107, 151, 227, 143, 44, 40, 50, + 220, 62, 199, 154, 10, 84, 5, 253, 242, 126, 183, 230, 155, 96, 203, 139, 74, 111, 63, 107, + 205, 248, 73, 160, 198, 94, 197, 43, 166, 3, 77, 79, 76, 102, 126, 209, 233, 178, 117, 213, + 246, 239, 243, 67, 27, 211, 68, 178, 44, 98, 235, 152, 69, 229, 214, 206, 120, 213, 199, 187, + 183, 127, 58, 181, 69, 21, 24, 196, 44, 32, 195, 224, 237, 113, 215, 236, 223, 7, 7, 17, 127, + 146, 67, 9, 87, 57, 153, 42, 149, 16, 30, 16, 60, 127, 29, 187, 161, 69, 43, 24, 16, 154, 170, + 93, 231, 169, 236, 125, 241, 238, 82, 208, 235, 217, 199, 7, 206, 182, 234, 62, 19, 202, 216, + 149, 203, 254, 79, 113, 81, 242, 71, 75, 90, 9, 196, 47, 21, 237, 144, 94, 61, 121, 60, 178, + 138, 19, 117, 20, 223, 193, 150, 7, 30, 196, 251, 199, 101, 23, 81, 77, 212, 221, 162, 176, + 193, 137, 1, 142, 58, 132, 189, 178, 115, 226, 108, 8, 205, 33, 150, 45, 154, 222, 245, 138, + 123, 16, 113, 102, 207, 113, 55, 82, 113, 44, 143, 93, 210, 187, 145, 61, 38, 66, 234, 68, 130, + 144, 140, 72, 238, 251, 149, 230, 36, 140, 195, 18, 63, 149, 110, 154, 205, 13, 75, 245, 57, + 159, 142, 76, 239, 129, 96, 223, 47, 138, 145, 112, 54, 32, 44, 125, 71, 166, 235, 30, 218, + 134, 53, 182, 201, 180, 112, 190, 146, 69, 100, 11, 230, 137, 159, 210, 83, 121, 176, 86, 84, + 131, 158, 192, 179, 73, 137, 145, 243, 93, 114, 53, 193, 247, 119, 182, 91, 76, 24, 169, 107, + 159, 108, 206, 117, 232, 108, 36, 3, 76, 107, 38, 37, 94, 154, 251, 132, 204, 249, 3, 180, 227, + 240, 19, 70, 133, 0, 217, 215, 99, 204, 139, 159, 31, 97, 122, 169, 101, 161, 186, 43, 61, 99, + 212, 83, 251, 154, 34, 168, 117, 167, 167, 95, 86, 74, 240, 45, 129, 79, 145, 74, 70, 214, 142, + 49, 97, 191, 35, 167, 81, 10, 5, 216, 4, 62, 62, 188, 100, 213, 248, 237, 125, 127, 106, 36, + 131, 152, 222, 158, 202, 212, 110, 146, 18, 142, 96, 252, 115, 28, 39, 81, 90, 244, 161, 56, + 255, 55, 233, 235, 247, 233, 243, 251, 19, 119, 37, 81, 119, 93, 223, 67, 182, 62, 6, 231, 116, + 228, 67, 212, 226, 101, 38, 105, 104, 110, 152, 3, 151, 56, 136, 233, 162, 244, 146, 234, 79, + 100, 140, 29, 44, 201, 26, 218, 166, 42, 184, 74, 111, 99, 206, 37, 150, 250, 58, 131, 80, 41, + 206, 192, 71, 195, 17, 111, 83, 67, 108, 132, 178, 101, 108, 32, 139, 155, 193, 77, 115, 160, + 193, 88, 233, 224, 211, 180, 69, 104, 18, 199, 1, 105, 71, 251, 174, 48, 184, 10, 4, 2, 17, + 235, 81, 184, 44, 79, 129, 190, 63, 22, 188, 178, 20, 23, 160, 53, 127, 32, 126, 12, 193, 185, + 247, 241, 202, 173, 76, 108, 58, 217, 251, 146, 57, 20, 188, 119, 157, 17, 232, 156, 137, 35, + 15, 102, 212, 73, 224, 200, 201, 84, 155, 43, 90, 224, 84, 152, 141, 153, 60, 229, 215, 148, + 58, 118, 214, 118, 53, 114, 75, 153, 119, 143, 233, 11, 89, 134, 86, 7, 141, 244, 215, 202, + 228, 147, 120, 111, 34, 89, 146, 175, 126, 153, 238, 91, 210, 251, 16, 108, 34, 78, 127, 179, + 168, 79, 10, 40, 220, 150, 111, 204, 252, 219, 140, 47, 113, 198, 28, 115, 246, 94, 186, 159, + 112, 215, 101, 170, 204, 6, 244, 4, 182, 99, 171, 52, 31, 139, 6, 56, 128, 74, 203, 20, 125, + 92, 191, 180, 213, 83, 146, 132, 97, 103, 255, 67, 190, 0, 5, 74, 230, 10, 122, 134, 38, 45, + 53, 27, 197, 185, 19, 11, 237, 206, 202, 25, 6, 186, 144, 199, 11, 202, 137, 10, 52, 52, 117, + 178, 255, 40, 125, 87, 198, 146, 101, 7, 20, 13, 8, 102, 206, 175, 145, 162, 115, 9, 134, 227, + 97, 227, 185, 94, 230, 57, 120, 253, 93, 57, 234, 70, 201, 233, 6, 155, 231, 163, 174, 180, 72, + 160, 118, 75, 156, 59, 125, 23, 126, 67, 191, 28, 131, 146, 28, 85, 181, 74, 110, 233, 23, 62, + 69, 131, 88, 163, 5, 77, 18, 227, 165, 206, 62, 241, 177, 90, 153, 29, 229, 78, 14, 111, 110, + 78, 215, 45, 12, 166, 91, 34, 198, 160, 172, 225, 255, 142, 203, 37, 227, 121, 150, 13, 35, 67, + 154, 16, 227, 30, 242, 62, 160, 12, 126, 40, 197, 25, 210, 9, 78, 185, 135, 235, 52, 63, 5, 9, + 212, 91, 194, 109, 186, 115, 222, 218, 200, 177, 32, 221, 44, 131, 34, 41, 63, 51, 30, 249, 62, + 215, 171, 210, 126, 202, 179, 229, 244, 56, 26, 14, 99, 243, 122, 141, 250, 16, 159, 72, 28, + 128, 148, 68, 118, 246, 102, 120, 186, 229, 0, 162, 195, 82, 69, 130, 225, 106, 78, 21, 87, + 163, 5, 143, 6, 28, 94, 219, 207, 35, 64, 106, 204, 3, 246, 56, 216, 83, 197, 33, 132, 237, + 125, 195, 126, 55, 26, 79, 198, 242, 30, 10, 164, 127, 9, 78, 248, 104, 211, 238, 6, 117, 118, + 94, 6, 85, 189, 212, 113, 15, 213, 117, 7, 106, 232, 147, 100, 169, 45, 141, 228, 49, 201, 87, + 143, 182, 104, 226, 219, 180, 214, 150, 73, 122, 76, 4, 165, 114, 66, 134, 122, 159, 85, 155, + 75, 37, 148, 112, 210, 227, 13, 30, 150, 106, 164, 97, 108, 109, 219, 148, 8, 111, 197, 81, + 105, 191, 89, 201, 179, 167, 169, 83, 121, 25, 113, 103, 45, 21, 158, 167, 96, 37, 248, 169, + 135, 18, 30, 147, 93, 66, 24, 212, 92, 50, 77, 83, 238, 101, 121, 88, 69, 243, 42, 154, 185, + 67, 121, 123, 122, 225, 203, 250, 177, 33, 227, 134, 213, 79, 24, 65, 237, 252, 97, 33, 91, + 109, 50, 157, 134, 239, 215, 42, 232, 248, 182, 152, 254, 156, 189, 63, 185, 252, 174, 33, 102, + 96, 186, 113, 178, 248, 235, 181, 60, 107, 131, 231, 95, 197, 17, 229, 210, 37, 246, 30, 125, + 240, 137, 243, 30, 252, 96, 90, 136, 157, 27, 177, 85, 254, 201, 58, 163, 100, 179, 91, 226, + 80, 149, 113, 137, 111, 49, 57, 196, 178, 17, 215, 48, 185, 73, 25, 41, 126, 220, 34, 37, 6, + 198, 18, 55, 58, 163, 245, 89, 152, 29, 180, 97, 148, 88, 166, 114, 246, 152, 33, 78, 131, 90, + 201, 141, 116, 184, 234, 73, 66, 234, 9, 163, 159, 60, 74, 52, 211, 228, 136, 120, 194, 172, + 230, 245, 153, 173, 6, 152, 1, 8, 166, 77, 244, 6, 40, 72, 143, 206, 123, 37, 235, 165, 189, + 214, 146, 125, 25, 228, 8, 108, 194, 27, 60, 39, 16, 32, 56, 95, 112, 88, 71, 222, 60, 250, 10, + 124, 29, 18, 94, 144, 40, 136, 239, 156, 1, 182, 133, 110, 165, 74, 105, 125, 169, 32, 48, 30, + 97, 131, 12, 9, 5, 209, 27, 67, 183, 240, 101, 72, 151, 175, 14, 218, 20, 226, 76, 35, 245, + 116, 75, 174, 112, 68, 154, 3, 65, 23, 162, 68, 253, 204, 127, 78, 152, 161, 228, 161, 99, 225, + 117, 93, 17, 238, 216, 195, 14, 49, 232, 135, 19, 195, 168, 139, 149, 118, 96, 92, 138, 230, + 212, 232, 112, 148, 251, 204, 76, 45, 129, 196, 29, 70, 182, 230, 22, 214, 203, 10, 137, 142, + 121, 187, 231, 238, 60, 121, 14, 111, 81, 40, 85, 192, 17, 46, 248, 157, 58, 82, 225, 52, 181, + 218, 126, 175, 53, 79, 134, 67, 149, 141, 111, 211, 106, 82, 232, 3, 56, 198, 182, 105, 228, + 218, 31, 247, 222, 168, 36, 167, 250, 112, 118, 98, 125, 97, 93, 235, 36, 58, 19, 52, 58, 30, + 47, 156, 165, 148, 89, 46, 116, 161, 96, 100, 163, 192, 123, 160, 18, 146, 103, 166, 18, 118, + 167, 194, 223, 138, 153, 150, 112, 9, 186, 181, 26, 36, 27, 122, 126, 90, 84, 133, 176, 148, + 200, 108, 10, 58, 208, 19, 207, 93, 162, 156, 237, 223, 176, 89, 182, 170, 121, 10, 236, 250, + 202, 31, 177, 102, 133, 137, 214, 208, 212, 140, 73, 225, 169, 174, 254, 23, 176, 218, 136, + 242, 220, 6, 150, 81, 225, 125, 251, 88, 2, 117, 232, 10, 138, 138, 162, 32, 74, 207, 220, 149, + 127, 236, 29, 135, 92, 255, 214, 86, 47, 162, 140, 200, 68, 120, 109, 140, 15, 191, 201, 253, + 98, 88, 151, 186, 45, 87, 118, 5, 234, 110, 238, 40, 252, 138, 88, 80, 195, 165, 154, 181, 226, + 173, 90, 162, 158, 31, 24, 197, 235, 140, 189, 60, 114, 44, 255, 29, 125, 228, 215, 87, 97, 69, + 188, 223, 63, 185, 165, 247, 19, 210, 22, 187, 242, 127, 45, 65, 101, 32, 210, 25, 128, 179, + 121, 249, 157, 36, 232, 172, 217, 23, 67, 124, 229, 13, 131, 183, 13, 50, 52, 0, 194, 243, 76, + 33, 88, 13, 70, 137, 9, 112, 103, 27, 96, 7, 109, 66, 84, 152, 254, 200, 3, 33, 2, 200, 18, 94, + 122, 175, 28, 149, 203, 21, 182, 60, 11, 162, 205, 201, 83, 101, 1, 77, 65, 92, 97, 71, 85, 22, + 4, 51, 18, 187, 155, 48, 199, 146, 237, 238, 26, 109, 97, 216, 168, 214, 8, 176, 114, 105, 47, + 63, 20, 169, 120, 207, 151, 234, 224, 216, 125, 115, 48, 91, 72, 122, 245, 203, 169, 139, 178, + 190, 189, 69, 36, 246, 116, 182, 32, 192, 90, 158, 49, 179, 209, 79, 145, 210, 164, 78, 135, + 112, 192, 33, 67, 58, 167, 118, 47, 252, 221, 239, 219, 155, 41, 193, 208, 61, 159, 29, 253, + 224, 86, 224, 119, 202, 0, 117, 121, 230, 93, 99, 51, 162, 34, 155, 253, 59, 89, 70, 143, 140, + 162, 124, 249, 250, 60, 119, 77, 244, 177, 143, 207, 162, 134, 48, 36, 162, 199, 175, 129, 48, + 95, 95, 67, 85, 224, 180, 47, 205, 5, 100, 173, 64, 35, 46, 198, 36, 212, 234, 238, 31, 249, + 239, 118, 79, 51, 1, 87, 141, 202, 181, 97, 7, 37, 228, 145, 72, 163, 87, 185, 215, 63, 125, + 159, 249, 180, 91, 13, 198, 62, 99, 242, 126, 186, 127, 83, 115, 136, 230, 110, 98, 240, 79, + 136, 122, 217, 18, 45, 122, 64, 75, 141, 188, 55, 123, 71, 163, 8, 9, 26, 204, 78, 38, 194, 85, + 144, 13, 252, 87, 66, 246, 80, 51, 1, 24, 37, 220, 209, 188, 213, 63, 158, 132, 109, 60, 99, + 26, 179, 43, 100, 26, 76, 30, 219, 221, 32, 112, 72, 26, 35, 20, 159, 178, 96, 76, 59, 121, 60, + 198, 172, 121, 251, 20, 145, 193, 111, 117, 133, 250, 178, 153, 165, 138, 51, 65, 247, 62, 122, + 240, 50, 214, 199, 239, 237, 221, 82, 142, 60, 157, 45, 90, 207, 104, 125, 31, 165, 181, 115, + 6, 75, 152, 183, 208, 3, 15, 166, 59, 244, 10, 144, 211, 78, 254, 230, 213, 236, 29, 248, 68, + 231, 103, 125, 113, 101, 28, 224, 201, 100, 45, 82, 62, 226, 216, 168, 77, 120, 199, 148, 99, + 17, 11, 228, 19, 48, 147, 189, 233, 10, 49, 87, 183, 208, 117, 145, 176, 36, 142, 87, 217, 90, + 245, 177, 198, 253, 186, 213, 49, 173, 6, 105, 167, 212, 104, 145, 241, 203, 75, 226, 196, 217, + 140, 22, 79, 0, 77, 66, 189, 166, 81, 50, 66, 174, 85, 202, 102, 234, 126, 99, 66, 210, 109, + 65, 31, 41, 197, 108, 226, 111, 80, 112, 226, 90, 119, 114, 50, 96, 75, 66, 54, 191, 134, 179, + 14, 121, 209, 163, 203, 255, 150, 15, 39, 52, 137, 110, 221, 246, 24, 92, 215, 195, 146, 184, + 192, 8, 107, 47, 76, 189, 215, 119, 41, 181, 244, 43, 38, 210, 21, 173, 63, 89, 50, 80, 62, 81, + 102, 54, 37, 203, 15, 48, 18, 227, 2, 110, 163, 108, 242, 248, 94, 148, 125, 249, 83, 239, 222, + 210, 88, 206, 10, 58, 193, 37, 6, 25, 173, 54, 65, 163, 6, 52, 202, 239, 22, 93, 151, 144, 179, + 245, 65, 47, 8, 248, 173, 206, 148, 91, 226, 97, 1, 49, 46, 189, 140, 20, 229, 193, 165, 196, + 38, 160, 186, 102, 167, 105, 248, 198, 97, 125, 140, 142, 7, 177, 91, 195, 14, 56, 81, 225, 83, + 97, 85, 118, 67, 42, 150, 174, 74, 189, 222, 93, 108, 229, 39, 11, 156, 205, 48, 26, 106, 195, + 77, 255, 238, 165, 191, 136, 123, 25, 17, 205, 244, 228, 253, 102, 41, 43, 28, 110, 166, 75, + 43, 223, 164, 129, 109, 180, 244, 149, 157, 57, 60, 113, 126, 8, 171, 9, 151, 93, 196, 19, 191, + 65, 116, 5, 211, 91, 69, 204, 112, 102, 82, 253, 74, 250, 105, 67, 36, 242, 122, 58, 208, 168, + 183, 208, 48, 170, 174, 72, 32, 45, 252, 232, 15, 73, 123, 14, 190, 203, 113, 41, 131, 51, 110, + 240, 69, 139, 215, 55, 170, 153, 205, 169, 186, 119, 32, 127, 15, 161, 255, 160, 227, 185, 90, + 55, 5, 88, 141, 91, 229, 87, 167, 221, 142, 220, 67, 88, 148, 171, 155, 47, 23, 160, 163, 174, + 253, 136, 14, 127, 119, 112, 45, 207, 58, 232, 27, 196, 201, 80, 113, 98, 140, 101, 248, 40, + 250, 17, 33, 81, 149, 169, 27, 100, 95, 105, 178, 13, 121, 16, 172, 233, 3, 251, 121, 47, 198, + 250, 1, 96, 180, 214, 225, 56, 230, 175, 221, 0, 168, 141, 128, 212, 98, 136, 35, 172, 160, 46, + 221, 107, 52, 30, 4, 78, 184, 248, 175, 141, 99, 223, 110, 115, 170, 13, 204, 60, 253, 133, + 101, 8, 187, 148, 148, 85, 99, 158, 152, 193, 96, 114, 12, 66, 107, 168, 100, 151, 148, 63, + 240, 247, 156, 14, 20, 238, 138, 67, 83, 104, 156, 6, 52, 224, 56, 217, 219, 119, 212, 229, + 130, 26, 125, 105, 18, 174, 201, 116, 227, 211, 236, 153, 241, 248, 86, 140, 234, 103, 24, 99, + 251, 40, 120, 252, 59, 215, 248, 68, 199, 183, 191, 174, 46, 220, 94, 116, 225, 210, 241, 210, + 144, 246, 65, 125, 123, 3, 2, 13, 186, 18, 138, 6, 123, 246, 178, 169, 15, 81, 101, 127, 87, + 184, 149, 99, 11, 214, 110, 253, 254, 149, 216, 66, 225, 92, 70, 194, 224, 176, 134, 75, 95, + 147, 185, 59, 246, 16, 163, 202, 110, 136, 26, 165, 214, 120, 172, 34, 107, 91, 233, 145, 211, + 231, 231, 129, 189, 252, 38, 44, 214, 207, 154, 82, 118, 132, 3, 216, 140, 221, 142, 8, 109, + 250, 141, 248, 98, 43, 158, 96, 61, 70, 55, 234, 91, 143, 242, 232, 95, 123, 244, 38, 87, 160, + 0, 2, 91, 97, 51, 38, 65, 244, 77, 102, 67, 232, 227, 56, 66, 22, 218, 126, 126, 188, 78, 114, + 133, 105, 74, 215, 246, 153, 63, 189, 204, 10, 114, 194, 19, 23, 167, 12, 58, 252, 41, 152, + 141, 87, 243, 68, 195, 172, 163, 255, 158, 171, 62, 16, 202, 206, 250, 240, 235, 102, 245, 42, + 227, 195, 216, 179, 1, 215, 196, 194, 246, 233, 27, 238, 167, 23, 97, 30, 0, 85, 166, 48, 42, + 153, 157, 242, 234, 37, 189, 151, 94, 176, 56, 211, 57, 70, 12, 10, 149, 110, 30, 51, 117, 214, + 57, 197, 53, 175, 191, 137, 35, 45, 151, 239, 222, 238, 172, 135, 143, 195, 151, 189, 44, 107, + 221, 188, 119, 231, 122, 116, 76, 117, 139, 69, 0, 168, 88, 2, 200, 242, 205, 150, 254, 105, + 48, 180, 57, 77, 115, 197, 175, 153, 195, 9, 248, 128, 207, 198, 124, 38, 251, 205, 246, 76, + 121, 88, 169, 255, 59, 52, 238, 72, 231, 87, 122, 73, 211, 179, 106, 241, 221, 3, 134, 20, 231, + 28, 192, 129, 177, 11, 120, 33, 172, 38, 97, 57, 160, 78, 46, 52, 181, 148, 176, 1, 111, 242, + 36, 138, 214, 153, 128, 249, 108, 139, 3, 74, 31, 251, 177, 162, 240, 233, 255, 21, 79, 249, + 237, 140, 203, 96, 53, 155, 237, 117, 164, 221, 16, 41, 212, 13, 206, 115, 165, 69, 231, 4, + 250, 104, 0, 69, 227, 197, 66, 148, 99, 210, 119, 167, 197, 188, 79, 127, 219, 55, 133, 68, 99, + 212, 193, 109, 88, 182, 105, 38, 26, 20, 236, 191, 101, 250, 132, 93, 241, 179, 121, 85, 237, + 159, 132, 210, 209, 236, 59, 96, 32, 76, 103, 62, 240, 181, 77, 6, 121, 38, 0, 106, 219, 130, + 98, 130, 75, 200, 135, 180, 178, 181, 41, 159, 139, 42, 140, 21, 35, 167, 27, 40, 219, 18, 114, + 216, 18, 55, 148, 147, 150, 75, 156, 108, 207, 169, 30, 100, 137, 166, 117, 199, 238, 105, 128, + 39, 200, 28, 86, 72, 125, 231, 252, 84, 187, 145, 165, 176, 93, 175, 142, 112, 234, 188, 77, + 128, 187, 31, 22, 108, 192, 42, 155, 238, 6, 231, 100, 5, 139, 99, 77, 78, 248, 148, 143, 207, + 2, 10, 230, 3, 158, 215, 100, 5, 226, 236, 207, 0, 191, 34, 12, 204, 154, 101, 195, 190, 121, + 77, 42, 77, 123, 104, 93, 128, 73, 202, 205, 205, 158, 20, 34, 228, 76, 159, 227, 223, 237, 3, + 249, 108, 220, 88, 9, 183, 114, 146, 123, 100, 179, 136, 228, 84, 143, 41, 234, 183, 128, 218, + 125, 135, 122, 243, 201, 241, 0, 84, 82, 168, 199, 164, 133, 183, 234, 73, 1, 76, 82, 31, 209, + 93, 207, 80, 104, 100, 211, 148, 81, 113, 78, 119, 156, 200, 102, 201, 23, 90, 206, 115, 188, + 74, 90, 75, 176, 121, 74, 64, 149, 70, 54, 118, 156, 56, 221, 89, 125, 160, 34, 65, 235, 73, + 34, 176, 169, 177, 37, 157, 136, 168, 68, 80, 115, 189, 248, 53, 83, 179, 176, 38, 198, 154, + 247, 41, 190, 138, 151, 247, 70, 26, 206, 255, 184, 64, 229, 230, 19, 72, 144, 175, 123, 142, + 163, 55, 109, 35, 163, 1, 144, 52, 185, 174, 150, 191, 28, 229, 150, 59, 108, 126, 14, 72, 243, + 135, 200, 241, 185, 187, 85, 253, 25, 228, 105, 10, 217, 74, 170, 139, 221, 65, 75, 20, 3, 75, + 151, 199, 49, 138, 17, 176, 93, 73, 19, 40, 104, 201, 143, 251, 167, 129, 220, 33, 149, 123, + 187, 217, 47, 216, 164, 116, 38, 149, 213, 127, 5, 213, 239, 8, 212, 50, 179, 255, 188, 30, 71, + 49, 193, 85, 205, 185, 115, 243, 212, 45, 224, 1, 147, 224, 18, 38, 233, 106, 67, 253, 116, 49, + 1, 45, 2, 193, 132, 163, 164, 79, 101, 90, 213, 151, 73, 168, 176, 255, 207, 250, 202, 137, 25, + 66, 204, 28, 183, 225, 139, 174, 161, 164, 183, 95, 140, 231, 85, 172, 199, 11, 193, 81, 79, + 90, 224, 196, 12, 65, 93, 108, 143, 242, 183, 226, 218, 53, 158, 236, 156, 115, 46, 48, 41, + 255, 238, 237, 34, 6, 73, 7, 27, 202, 229, 87, 161, 194, 126, 185, 60, 255, 56, 38, 27, 89, + 197, 180, 5, 172, 90, 74, 160, 129, 197, 248, 152, 44, 104, 198, 248, 167, 15, 95, 231, 105, + 169, 237, 241, 127, 91, 99, 67, 249, 177, 146, 246, 10, 157, 173, 175, 154, 187, 9, 240, 129, + 179, 71, 102, 209, 166, 185, 113, 227, 100, 198, 199, 229, 35, 186, 95, 37, 1, 47, 54, 236, + 216, 237, 107, 210, 176, 51, 132, 239, 72, 80, 8, 115, 38, 44, 241, 97, 88, 57, 25, 21, 77, 6, + 94, 165, 193, 147, 122, 61, 130, 197, 222, 58, 140, 75, 96, 61, 1, 101, 99, 252, 5, 40, 143, + 199, 143, 234, 44, 246, 12, 230, 63, 52, 243, 57, 160, 102, 136, 148, 169, 70, 248, 211, 59, + 92, 147, 76, 149, 100, 24, 221, 223, 24, 183, 23, 191, 86, 153, 135, 96, 86, 229, 31, 223, 44, + 194, 245, 217, 65, 184, 48, 151, 90, 154, 19, 225, 217, 28, 138, 225, 171, 55, 33, 30, 180, + 228, 26, 107, 252, 109, 67, 9, 40, 251, 127, 146, 115, 182, 46, 114, 17, 194, 161, 95, 117, + 147, 163, 84, 254, 173, 195, 41, 104, 205, 111, 59, 42, 250, 149, 245, 7, 71, 22, 112, 194, + 226, 22, 87, 166, 1, 30, 0, 118, 67, 208, 44, 179, 23, 45, 153, 176, 208, 220, 107, 132, 190, + 56, 211, 184, 250, 174, 67, 228, 19, 173, 106, 181, 219, 219, 207, 52, 201, 154, 127, 201, 250, + 31, 80, 240, 99, 83, 29, 81, 109, 236, 181, 76, 15, 201, 87, 107, 253, 155, 126, 23, 50, 31, + 211, 68, 209, 99, 225, 93, 245, 101, 197, 244, 246, 37, 67, 136, 197, 129, 45, 107, 29, 11, 13, + 223, 177, 119, 5, 241, 247, 55, 225, 8, 27, 50, 195, 148, 87, 247, 43, 206, 247, 228, 202, 10, + 48, 35, 47, 46, 28, 119, 85, 221, 48, 170, 95, 242, 28, 140, 104, 93, 140, 226, 194, 219, 163, + 17, 133, 31, 215, 255, 149, 205, 83, 204, 93, 82, 211, 215, 170, 200, 221, 191, 76, 0, 172, + 190, 70, 49, 245, 232, 67, 92, 37, 61, 204, 12, 58, 125, 63, 157, 206, 151, 46, 230, 196, 27, + 203, 94, 90, 130, 225, 186, 135, 176, 222, 191, 233, 236, 158, 114, 11, 11, 85, 5, 55, 108, + 211, 107, 12, 212, 201, 178, 233, 106, 1, 149, 50, 61, 10, 180, 114, 228, 23, 174, 115, 251, + 122, 31, 210, 231, 181, 158, 1, 227, 205, 68, 111, 119, 89, 18, 48, 96, 212, 165, 14, 216, 41, + 230, 65, 143, 42, 124, 81, 137, 225, 127, 36, 23, 168, 214, 28, 162, 24, 198, 213, 146, 17, + 133, 169, 212, 213, 240, 97, 20, 136, 117, 94, 140, 188, 110, 210, 93, 234, 78, 97, 105, 178, + 114, 1, 225, 122, 86, 229, 232, 229, 101, 194, 238, 82, 255, 214, 215, 93, 71, 226, 179, 136, + 116, 167, 81, 157, 146, 20, 12, 173, 145, 68, 129, 124, 103, 221, 13, 255, 234, 155, 57, 126, + 171, 98, 221, 186, 132, 171, 68, 77, 94, 101, 0, 222, 25, 211, 50, 169, 205, 42, 102, 37, 133, + 212, 46, 28, 197, 87, 237, 182, 181, 153, 7, 156, 108, 134, 75, 217, 203, 191, 124, 130, 222, + 126, 120, 11, 69, 72, 8, 45, 245, 40, 253, 213, 101, 166, 59, 251, 152, 199, 171, 142, 62, 48, + 211, 53, 227, 248, 116, 236, 171, 76, 107, 126, 94, 199, 189, 142, 248, 85, 210, 53, 71, 56, + 15, 77, 221, 93, 74, 227, 180, 12, 251, 224, 107, 201, 241, 215, 100, 18, 63, 221, 112, 242, + 54, 22, 81, 161, 249, 6, 89, 142, 52, 245, 0, 133, 250, 147, 190, 82, 145, 79, 228, 0, 232, + 234, 249, 127, 53, 81, 111, 204, 27, 222, 160, 83, 226, 127, 126, 121, 200, 2, 51, 161, 183, 8, + 132, 222, 86, 132, 148, 72, 1, 171, 80, 172, 137, 58, 166, 199, 77, 222, 193, 248, 86, 29, 211, + 95, 120, 110, 35, 54, 178, 239, 167, 147, 236, 242, 180, 143, 51, 18, 84, 57, 175, 242, 163, + 171, 114, 84, 67, 186, 210, 254, 189, 156, 85, 206, 12, 15, 54, 114, 220, 80, 34, 109, 195, 16, + 208, 77, 190, 147, 104, 80, 0, 197, 238, 121, 16, 23, 111, 190, 95, 250, 235, 214, 32, 62, 191, + 192, 236, 149, 159, 240, 55, 67, 86, 253, 28, 16, 37, 228, 148, 209, 244, 133, 211, 105, 43, + 141, 92, 110, 69, 87, 94, 229, 201, 241, 231, 5, 212, 250, 184, 196, 164, 20, 124, 167, 184, + 57, 193, 201, 253, 212, 201, 168, 134, 248, 180, 175, 1, 65, 248, 186, 250, 37, 254, 66, 113, + 51, 86, 56, 146, 97, 197, 105, 67, 243, 204, 74, 166, 127, 209, 238, 112, 174, 155, 188, 177, + 17, 208, 145, 198, 69, 224, 150, 44, 68, 86, 28, 19, 224, 149, 24, 21, 2, 72, 105, 132, 96, + 219, 64, 98, 50, 189, 94, 197, 153, 10, 103, 120, 217, 220, 49, 211, 207, 201, 126, 122, 64, + 122, 158, 14, 161, 226, 89, 175, 46, 101, 200, 149, 55, 12, 134, 214, 205, 2, 34, 5, 75, 196, + 146, 219, 255, 8, 234, 172, 141, 210, 134, 40, 128, 129, 94, 115, 248, 145, 154, 196, 232, 180, + 31, 175, 12, 144, 3, 235, 27, 87, 32, 127, 145, 27, 59, 241, 62, 228, 87, 52, 143, 143, 11, + 181, 45, 84, 110, 107, 250, 161, 238, 52, 32, 123, 12, 192, 68, 157, 156, 184, 76, 186, 141, + 113, 57, 211, 13, 122, 139, 172, 235, 17, 169, 162, 45, 72, 253, 148, 182, 200, 216, 97, 220, + 56, 176, 199, 20, 85, 106, 103, 12, 168, 24, 52, 247, 225, 195, 20, 103, 204, 41, 54, 15, 195, + 101, 219, 239, 211, 195, 176, 250, 53, 254, 188, 34, 69, 16, 205, 31, 127, 221, 41, 239, 166, + 248, 213, 163, 221, 155, 22, 204, 112, 188, 219, 254, 135, 240, 194, 168, 230, 220, 35, 76, + 197, 248, 20, 155, 208, 117, 155, 119, 201, 43, 171, 157, 125, 124, 130, 167, 112, 197, 108, + 229, 203, 60, 151, 54, 216, 27, 88, 61, 113, 186, 98, 202, 142, 140, 228, 86, 50, 248, 240, 83, + 59, 44, 60, 150, 175, 107, 42, 144, 224, 79, 97, 48, 29, 142, 64, 255, 94, 206, 159, 72, 68, + 251, 91, 158, 41, 31, 98, 208, 156, 218, 43, 191, 77, 118, 35, 230, 186, 38, 102, 49, 196, 241, + 129, 52, 189, 18, 65, 99, 121, 22, 63, 168, 10, 139, 63, 81, 212, 61, 150, 237, 158, 216, 178, + 116, 5, 208, 194, 33, 59, 184, 140, 69, 222, 89, 61, 194, 143, 113, 78, 215, 55, 56, 179, 37, + 24, 177, 141, 102, 191, 191, 14, 195, 144, 160, 166, 89, 155, 92, 74, 104, 35, 29, 239, 3, 77, + 196, 14, 169, 79, 62, 87, 94, 236, 167, 206, 218, 183, 64, 251, 187, 248, 147, 141, 20, 48, 60, + 49, 91, 170, 184, 116, 38, 96, 72, 8, 94, 194, 1, 136, 250, 20, 88, 185, 64, 56, 152, 20, 122, + 60, 154, 105, 211, 164, 196, 25, 28, 146, 11, 4, 229, 20, 27, 132, 198, 193, 142, 29, 250, 51, + 143, 107, 113, 11, 178, 133, 221, 129, 26, 44, 163, 95, 187, 13, 21, 22, 50, 44, 8, 141, 234, + 69, 13, 98, 204, 188, 227, 174, 189, 13, 247, 144, 33, 125, 83, 162, 213, 130, 73, 19, 52, 14, + 201, 118, 114, 139, 46, 178, 118, 55, 136, 78, 220, 91, 75, 205, 232, 44, 104, 51, 248, 39, 98, + 126, 147, 6, 86, 159, 120, 51, 193, 130, 7, 104, 250, 193, 92, 23, 76, 15, 70, 6, 23, 157, 123, + 111, 168, 187, 129, 103, 31, 155, 148, 249, 243, 145, 190, 11, 15, 142, 130, 203, 89, 255, 169, + 142, 75, 72, 57, 189, 225, 166, 224, 180, 171, 168, 100, 39, 248, 253, 190, 148, 79, 188, 224, + 54, 169, 184, 106, 62, 93, 21, 253, 144, 255, 226, 97, 60, 108, 150, 88, 213, 119, 247, 81, + 208, 143, 131, 166, 65, 128, 200, 69, 233, 86, 69, 132, 166, 60, 96, 116, 209, 64, 170, 152, + 253, 194, 179, 235, 203, 70, 239, 76, 50, 70, 156, 150, 176, 98, 225, 215, 251, 220, 123, 125, + 239, 193, 183, 242, 51, 247, 217, 114, 65, 228, 183, 21, 237, 142, 111, 140, 108, 72, 204, 86, + 14, 39, 217, 98, 80, 237, 127, 130, 132, 220, 160, 80, 83, 112, 243, 98, 192, 38, 59, 95, 84, + 78, 120, 218, 152, 121, 235, 58, 106, 24, 148, 196, 133, 211, 157, 77, 171, 176, 99, 43, 24, + 81, 66, 24, 196, 88, 58, 255, 214, 172, 145, 225, 140, 114, 107, 40, 119, 149, 69, 51, 148, 93, + 175, 183, 31, 55, 56, 200, 181, 127, 60, 89, 205, 42, 231, 0, 52, 154, 23, 146, 29, 63, 5, 0, + 233, 179, 78, 159, 162, 44, 7, 149, 251, 107, 254, 166, 68, 180, 24, 27, 11, 192, 193, 213, + 118, 243, 164, 27, 141, 53, 133, 80, 71, 74, 188, 13, 109, 71, 186, 200, 171, 68, 176, 167, + 208, 136, 194, 69, 118, 177, 158, 86, 63, 126, 235, 80, 184, 24, 166, 172, 96, 193, 93, 65, + 176, 97, 184, 46, 146, 220, 26, 150, 31, 173, 205, 22, 108, 36, 115, 71, 8, 181, 237, 138, 100, + 109, 231, 156, 243, 145, 70, 9, 92, 240, 178, 43, 127, 14, 91, 209, 227, 131, 57, 64, 125, 50, + 158, 90, 94, 243, 59, 165, 199, 37, 28, 216, 15, 243, 45, 97, 194, 247, 254, 219, 97, 174, 254, + 141, 60, 139, 16, 2, 8, 64, 234, 254, 60, 161, 231, 89, 132, 105, 229, 228, 244, 110, 218, 133, + 115, 19, 59, 226, 189, 111, 124, 116, 51, 102, 139, 169, 143, 79, 143, 232, 92, 144, 96, 6, + 205, 133, 105, 227, 10, 66, 166, 123, 77, 126, 234, 116, 44, 222, 143, 70, 57, 21, 20, 192, + 142, 199, 45, 28, 39, 84, 174, 49, 25, 85, 154, 37, 232, 1, 154, 134, 8, 147, 147, 111, 228, + 82, 76, 136, 137, 111, 104, 63, 179, 134, 224, 193, 208, 182, 167, 235, 100, 46, 153, 78, 72, + 208, 74, 144, 224, 124, 158, 82, 56, 111, 200, 68, 133, 60, 115, 102, 218, 186, 32, 104, 23, + 129, 168, 52, 88, 235, 247, 196, 238, 26, 79, 217, 4, 54, 233, 134, 142, 86, 87, 180, 161, 186, + 252, 161, 232, 117, 173, 10, 113, 131, 194, 85, 205, 17, 197, 90, 152, 7, 208, 140, 90, 42, 8, + 231, 139, 97, 134, 248, 70, 95, 0, 250, 181, 58, 163, 119, 46, 65, 245, 174, 178, 231, 110, + 139, 186, 92, 247, 228, 222, 204, 160, 136, 117, 215, 202, 158, 66, 235, 180, 237, 253, 131, + 95, 15, 183, 191, 66, 191, 109, 190, 94, 160, 233, 23, 163, 247, 94, 47, 253, 43, 11, 52, 205, + 187, 249, 189, 117, 67, 59, 56, 219, 204, 127, 61, 130, 72, 222, 247, 204, 64, 173, 192, 192, + 72, 207, 157, 17, 242, 42, 4, 45, 213, 76, 73, 180, 9, 34, 203, 229, 140, 97, 48, 222, 82, 69, + 206, 111, 101, 30, 30, 250, 34, 90, 158, 36, 98, 120, 198, 226, 156, 208, 180, 200, 245, 234, + 95, 130, 96, 41, 224, 142, 249, 168, 165, 230, 242, 189, 132, 127, 238, 144, 112, 85, 12, 57, + 242, 62, 149, 181, 98, 236, 251, 133, 0, 23, 163, 215, 143, 97, 202, 208, 249, 33, 90, 83, 65, + 118, 196, 76, 27, 184, 241, 203, 21, 187, 212, 57, 152, 244, 25, 7, 213, 102, 148, 94, 226, + 161, 43, 35, 210, 52, 244, 252, 213, 29, 98, 196, 99, 231, 143, 24, 39, 137, 254, 230, 187, 79, + 222, 67, 70, 189, 108, 209, 34, 17, 86, 244, 62, 54, 207, 97, 89, 162, 224, 149, 23, 131, 248, + 211, 41, 14, 81, 95, 139, 223, 29, 184, 210, 116, 38, 248, 250, 145, 5, 115, 136, 104, 149, + 241, 102, 106, 176, 116, 129, 190, 2, 216, 250, 244, 147, 140, 123, 18, 112, 154, 0, 209, 246, + 252, 75, 154, 153, 250, 2, 154, 72, 104, 167, 47, 160, 56, 255, 117, 43, 66, 115, 166, 167, + 158, 235, 137, 253, 239, 69, 217, 66, 12, 27, 117, 191, 77, 136, 192, 46, 203, 29, 48, 122, + 209, 177, 97, 145, 219, 184, 48, 146, 6, 160, 123, 154, 77, 165, 119, 207, 12, 96, 241, 244, + 84, 123, 122, 38, 66, 118, 87, 236, 103, 237, 168, 134, 46, 97, 113, 11, 219, 74, 19, 135, 50, + 149, 241, 38, 161, 187, 181, 75, 168, 225, 82, 3, 53, 92, 197, 62, 43, 41, 178, 246, 123, 153, + 255, 5, 195, 51, 232, 77, 37, 18, 163, 167, 73, 109, 35, 208, 114, 238, 71, 123, 218, 72, 240, + 78, 12, 12, 61, 127, 148, 67, 195, 145, 141, 41, 212, 101, 33, 151, 164, 46, 235, 14, 127, 43, + 210, 139, 175, 71, 131, 219, 17, 154, 152, 155, 8, 191, 57, 197, 59, 108, 105, 139, 212, 174, + 245, 173, 15, 105, 200, 3, 84, 73, 129, 150, 33, 77, 244, 242, 73, 83, 87, 222, 148, 191, 82, + 99, 53, 171, 81, 206, 59, 130, 103, 145, 54, 127, 214, 59, 217, 109, 196, 116, 162, 166, 141, + 212, 196, 145, 17, 199, 35, 62, 187, 15, 192, 62, 19, 235, 104, 46, 193, 3, 184, 196, 31, 186, + 92, 108, 119, 70, 188, 187, 221, 20, 180, 115, 187, 203, 57, 27, 168, 113, 161, 71, 21, 216, + 252, 2, 228, 96, 187, 132, 110, 196, 37, 207, 248, 126, 177, 222, 232, 103, 155, 0, 124, 104, + 254, 18, 211, 197, 255, 104, 184, 11, 157, 217, 164, 193, 225, 110, 205, 231, 66, 119, 138, + 195, 89, 104, 252, 115, 23, 38, 144, 247, 15, 247, 172, 252, 45, 129, 55, 204, 46, 181, 68, + 104, 176, 214, 80, 87, 25, 254, 4, 248, 239, 17, 109, 85, 152, 183, 212, 84, 42, 112, 50, 19, + 142, 246, 123, 253, 93, 235, 124, 58, 93, 57, 71, 173, 221, 70, 180, 180, 160, 10, 116, 13, + 247, 26, 189, 21, 165, 205, 75, 83, 165, 91, 122, 142, 230, 100, 47, 135, 238, 21, 252, 70, + 251, 168, 144, 54, 250, 121, 157, 166, 158, 86, 246, 107, 11, 161, 95, 217, 28, 43, 216, 37, + 127, 206, 150, 149, 183, 212, 168, 31, 109, 176, 49, 84, 228, 18, 11, 219, 25, 16, 86, 12, 115, + 191, 23, 239, 83, 93, 179, 53, 147, 74, 30, 150, 173, 152, 80, 78, 35, 68, 233, 74, 181, 75, + 95, 93, 232, 30, 24, 187, 139, 90, 211, 199, 73, 231, 227, 177, 232, 209, 61, 30, 215, 10, 88, + 236, 171, 253, 0, 158, 205, 228, 144, 73, 79, 139, 231, 196, 20, 104, 169, 109, 149, 250, 9, + 238, 245, 130, 196, 35, 156, 137, 203, 145, 47, 35, 9, 183, 57, 141, 47, 34, 110, 5, 252, 40, + 84, 3, 101, 201, 228, 195, 87, 156, 68, 73, 166, 252, 238, 75, 44, 15, 82, 244, 169, 192, 187, + 3, 125, 182, 56, 67, 0, 7, 100, 155, 226, 195, 25, 102, 248, 232, 214, 88, 228, 215, 98, 149, + 86, 235, 32, 108, 120, 13, 26, 115, 46, 41, 192, 244, 254, 167, 227, 58, 108, 149, 173, 74, + 179, 203, 41, 252, 28, 8, 57, 21, 87, 30, 207, 233, 31, 225, 240, 247, 242, 224, 29, 83, 241, + 229, 218, 30, 101, 111, 51, 197, 244, 91, 46, 178, 70, 106, 219, 79, 73, 197, 201, 213, 15, 46, + 2, 118, 188, 179, 40, 87, 250, 34, 241, 207, 143, 47, 175, 53, 174, 246, 62, 38, 235, 30, 234, + 25, 40, 32, 72, 78, 89, 93, 107, 234, 120, 146, 18, 224, 233, 252, 18, 52, 160, 240, 172, 164, + 115, 82, 42, 32, 67, 150, 28, 89, 239, 236, 91, 126, 18, 123, 254, 107, 252, 50, 91, 40, 195, + 125, 250, 95, 154, 241, 233, 5, 177, 163, 56, 37, 194, 215, 211, 206, 33, 160, 243, 119, 147, + 127, 250, 198, 1, 15, 167, 168, 110, 249, 32, 244, 1, 65, 85, 78, 151, 144, 244, 54, 61, 186, + 37, 58, 218, 5, 127, 70, 226, 101, 64, 229, 143, 230, 61, 177, 23, 131, 168, 25, 112, 11, 78, + 100, 134, 181, 59, 51, 239, 160, 103, 87, 11, 160, 243, 242, 121, 228, 218, 176, 70, 137, 255, + 112, 132, 235, 14, 103, 92, 65, 105, 173, 54, 29, 215, 70, 107, 126, 163, 148, 207, 82, 117, + 100, 47, 185, 135, 82, 9, 198, 103, 56, 248, 161, 40, 215, 23, 87, 140, 161, 227, 212, 104, 94, + 97, 166, 224, 62, 252, 170, 64, 93, 81, 177, 139, 253, 14, 15, 99, 180, 170, 81, 245, 68, 113, + 72, 169, 9, 73, 155, 227, 222, 121, 144, 71, 160, 248, 210, 39, 121, 83, 226, 123, 199, 114, + 202, 229, 142, 247, 107, 163, 125, 87, 72, 199, 50, 96, 96, 116, 151, 188, 181, 193, 254, 55, + 76, 203, 78, 97, 106, 19, 17, 212, 236, 113, 77, 1, 202, 238, 130, 49, 25, 77, 213, 150, 142, + 235, 56, 120, 26, 99, 213, 244, 162, 38, 204, 204, 115, 77, 235, 117, 180, 87, 240, 167, 24, + 124, 50, 20, 253, 234, 180, 214, 98, 81, 117, 218, 42, 42, 88, 190, 225, 100, 183, 168, 146, + 80, 141, 7, 211, 57, 83, 208, 238, 7, 209, 67, 117, 162, 161, 27, 202, 243, 46, 58, 253, 219, + 78, 142, 132, 180, 2, 115, 149, 211, 202, 155, 213, 242, 148, 35, 101, 4, 25, 122, 222, 14, + 155, 96, 67, 97, 99, 155, 249, 104, 32, 245, 75, 192, 85, 119, 109, 28, 122, 85, 165, 92, 213, + 188, 65, 131, 152, 128, 201, 66, 82, 246, 118, 169, 74, 62, 194, 201, 54, 156, 180, 184, 188, + 94, 59, 69, 163, 230, 16, 39, 45, 158, 207, 83, 198, 193, 50, 19, 253, 101, 76, 38, 156, 49, + 198, 182, 72, 97, 165, 24, 252, 134, 184, 30, 8, 120, 47, 97, 179, 39, 231, 250, 113, 152, 54, + 179, 38, 30, 193, 46, 167, 143, 248, 105, 223, 29, 194, 221, 136, 33, 135, 159, 45, 94, 151, + 236, 33, 151, 121, 38, 53, 132, 210, 10, 130, 111, 167, 190, 37, 226, 247, 233, 131, 33, 165, + 208, 244, 89, 215, 115, 66, 28, 77, 60, 83, 48, 67, 14, 17, 17, 81, 139, 156, 221, 87, 16, 218, + 175, 165, 156, 139, 66, 73, 127, 42, 189, 61, 37, 216, 203, 113, 109, 219, 132, 203, 17, 197, + 153, 161, 45, 70, 140, 239, 71, 30, 163, 136, 142, 36, 176, 75, 81, 177, 154, 19, 189, 11, 193, + 225, 234, 125, 176, 7, 119, 179, 16, 92, 159, 231, 60, 53, 145, 38, 179, 206, 223, 87, 201, 42, + 91, 14, 189, 246, 64, 202, 9, 123, 66, 92, 214, 102, 29, 10, 225, 248, 131, 97, 164, 53, 77, + 42, 172, 104, 54, 192, 197, 54, 194, 167, 71, 204, 144, 0, 188, 45, 78, 110, 110, 105, 238, 8, + 163, 40, 33, 99, 224, 187, 72, 146, 197, 81, 208, 88, 249, 35, 5, 79, 26, 128, 127, 13, 212, + 215, 214, 190, 58, 11, 216, 153, 153, 146, 55, 214, 203, 137, 138, 66, 173, 194, 156, 227, 64, + 186, 134, 240, 75, 65, 221, 71, 205, 167, 18, 200, 192, 162, 11, 71, 136, 189, 36, 227, 164, + 161, 125, 43, 241, 46, 61, 66, 214, 30, 247, 97, 246, 101, 6, 67, 255, 87, 76, 38, 197, 254, + 253, 91, 68, 33, 134, 66, 125, 170, 61, 170, 56, 111, 233, 159, 244, 247, 94, 67, 86, 144, 232, + 91, 236, 252, 77, 111, 185, 221, 60, 93, 42, 193, 14, 141, 130, 41, 36, 160, 220, 139, 105, 50, + 42, 46, 30, 162, 64, 83, 142, 141, 99, 69, 181, 67, 147, 244, 107, 176, 226, 29, 192, 40, 29, + 219, 147, 5, 164, 92, 24, 215, 6, 155, 254, 210, 21, 116, 129, 223, 51, 65, 57, 202, 35, 3, 66, + 161, 93, 172, 147, 165, 8, 87, 113, 218, 244, 191, 234, 115, 61, 4, 198, 84, 194, 217, 125, 63, + 181, 173, 216, 26, 182, 40, 114, 166, 141, 36, 81, 29, 225, 163, 13, 1, 160, 160, 179, 105, + 119, 147, 100, 207, 131, 231, 54, 230, 253, 93, 229, 103, 230, 182, 206, 226, 14, 150, 17, 31, + 94, 244, 158, 79, 153, 154, 249, 163, 143, 20, 189, 107, 82, 123, 231, 35, 137, 38, 36, 196, + 210, 245, 82, 16, 231, 153, 65, 192, 149, 28, 225, 174, 132, 153, 138, 46, 238, 241, 157, 25, + 10, 225, 144, 85, 2, 39, 220, 253, 190, 86, 54, 201, 59, 174, 234, 193, 8, 113, 228, 30, 250, + 59, 216, 213, 21, 45, 92, 155, 252, 148, 106, 63, 204, 172, 69, 166, 76, 75, 87, 240, 212, 29, + 214, 152, 205, 209, 183, 215, 167, 158, 71, 149, 173, 83, 211, 147, 152, 88, 155, 206, 143, 65, + 209, 33, 99, 6, 239, 2, 136, 41, 103, 144, 149, 189, 191, 46, 193, 44, 154, 49, 153, 149, 6, + 105, 61, 7, 49, 179, 145, 168, 14, 209, 75, 215, 231, 3, 118, 105, 164, 60, 104, 177, 57, 243, + 218, 66, 25, 108, 206, 139, 217, 67, 36, 117, 241, 113, 2, 173, 116, 31, 136, 146, 57, 81, 197, + 203, 109, 60, 41, 66, 38, 97, 203, 233, 83, 98, 163, 255, 119, 202, 22, 233, 95, 138, 2, 169, + 62, 174, 106, 73, 31, 174, 53, 42, 231, 164, 81, 130, 109, 6, 244, 34, 46, 108, 123, 169, 55, + 224, 239, 236, 100, 65, 51, 248, 113, 119, 255, 176, 71, 189, 212, 239, 171, 33, 34, 33, 150, + 165, 247, 40, 179, 255, 253, 78, 47, 150, 22, 138, 131, 161, 71, 133, 131, 50, 113, 82, 78, 21, + 141, 66, 122, 169, 230, 181, 201, 241, 67, 36, 76, 58, 121, 74, 192, 121, 185, 169, 219, 155, + 92, 234, 141, 37, 174, 65, 184, 126, 14, 181, 153, 149, 22, 138, 193, 143, 135, 252, 159, 7, + 51, 29, 110, 67, 246, 217, 2, 40, 81, 162, 142, 68, 240, 96, 80, 239, 222, 123, 54, 15, 230, + 27, 19, 189, 9, 65, 10, 15, 85, 166, 122, 150, 255, 53, 13, 225, 252, 220, 88, 130, 119, 124, + 240, 247, 148, 215, 223, 13, 253, 176, 223, 227, 106, 206, 251, 98, 145, 244, 183, 18, 121, + 248, 197, 161, 114, 159, 81, 82, 31, 78, 68, 74, 10, 70, 6, 70, 46, 158, 247, 170, 160, 62, 24, + 121, 33, 42, 108, 250, 205, 125, 93, 46, 254, 102, 65, 175, 125, 146, 222, 78, 235, 8, 79, 57, + 99, 150, 235, 215, 68, 233, 222, 114, 46, 246, 198, 53, 181, 63, 117, 215, 1, 192, 229, 108, + 109, 77, 38, 186, 87, 205, 80, 49, 29, 17, 27, 142, 105, 70, 175, 151, 122, 115, 170, 226, 232, + 31, 167, 23, 58, 18, 186, 231, 225, 227, 238, 200, 203, 38, 207, 8, 177, 88, 13, 210, 79, 50, + 74, 152, 75, 165, 240, 42, 199, 162, 90, 106, 163, 124, 125, 79, 18, 14, 59, 190, 21, 54, 76, + 67, 218, 15, 101, 222, 54, 173, 10, 205, 255, 106, 145, 144, 68, 103, 242, 197, 206, 151, 232, + 36, 230, 174, 75, 166, 83, 56, 134, 25, 227, 235, 245, 81, 175, 196, 113, 34, 50, 160, 167, + 236, 132, 69, 135, 16, 87, 53, 250, 181, 89, 63, 55, 199, 164, 49, 97, 41, 140, 46, 226, 191, + 227, 119, 43, 248, 98, 219, 254, 98, 116, 19, 66, 147, 252, 58, 107, 87, 69, 228, 16, 131, 202, + 61, 232, 32, 221, 253, 194, 196, 29, 75, 145, 192, 166, 63, 140, 33, 29, 52, 82, 102, 165, 125, + 106, 94, 209, 113, 200, 94, 201, 81, 163, 26, 80, 106, 135, 229, 65, 159, 179, 211, 97, 44, + 167, 120, 110, 134, 66, 243, 252, 213, 163, 9, 24, 38, 210, 31, 82, 72, 99, 57, 9, 61, 97, 213, + 13, 75, 7, 235, 253, 220, 195, 183, 95, 143, 114, 156, 219, 35, 159, 199, 76, 241, 148, 67, + 188, 180, 254, 15, 94, 218, 23, 198, 140, 9, 228, 158, 197, 220, 128, 96, 224, 179, 18, 212, + 57, 34, 117, 226, 234, 76, 230, 28, 254, 58, 155, 81, 43, 27, 87, 134, 215, 70, 139, 146, 59, + 94, 160, 224, 78, 151, 176, 86, 31, 132, 144, 59, 71, 79, 76, 26, 16, 71, 215, 36, 51, 199, + 214, 73, 78, 86, 73, 26, 28, 15, 233, 182, 171, 119, 118, 70, 44, 66, 134, 155, 191, 153, 245, + 97, 99, 49, 210, 169, 231, 107, 18, 189, 104, 241, 67, 188, 228, 237, 186, 2, 68, 151, 148, + 237, 168, 211, 101, 121, 69, 159, 108, 182, 134, 37, 89, 119, 137, 26, 226, 13, 58, 232, 242, + 230, 148, 122, 227, 210, 21, 177, 66, 21, 38, 64, 173, 76, 85, 127, 231, 153, 23, 147, 183, + 216, 118, 115, 91, 58, 251, 139, 38, 106, 169, 191, 159, 235, 168, 135, 109, 25, 123, 14, 99, + 19, 6, 29, 211, 136, 134, 144, 116, 53, 86, 92, 13, 135, 42, 221, 167, 92, 216, 141, 166, 61, + 135, 222, 190, 12, 180, 36, 22, 172, 215, 80, 100, 69, 90, 182, 148, 248, 153, 117, 144, 193, + 53, 25, 239, 111, 79, 239, 230, 17, 252, 43, 245, 158, 109, 15, 216, 250, 38, 77, 105, 151, 29, + 39, 152, 49, 248, 199, 94, 99, 180, 208, 38, 47, 23, 164, 217, 105, 106, 204, 61, 143, 206, + 212, 47, 233, 135, 28, 216, 240, 234, 88, 98, 254, 97, 91, 154, 103, 174, 66, 221, 58, 255, 11, + 76, 215, 252, 103, 29, 51, 89, 141, 166, 191, 150, 147, 163, 125, 61, 164, 30, 17, 17, 110, 37, + 186, 240, 228, 134, 34, 173, 223, 3, 125, 174, 230, 148, 223, 181, 47, 69, 84, 216, 146, 40, + 145, 217, 236, 39, 1, 101, 173, 27, 212, 65, 235, 131, 90, 218, 128, 247, 54, 166, 204, 94, 77, + 149, 99, 176, 10, 216, 201, 212, 158, 105, 1, 71, 185, 105, 213, 172, 80, 119, 195, 215, 201, + 10, 131, 191, 54, 119, 227, 115, 163, 86, 91, 139, 117, 55, 160, 237, 156, 42, 244, 215, 171, + 17, 228, 35, 173, 37, 72, 186, 218, 59, 115, 174, 121, 185, 180, 148, 122, 118, 153, 223, 242, + 145, 12, 221, 10, 8, 77, 255, 167, 14, 150, 209, 183, 38, 36, 23, 202, 194, 237, 134, 120, 249, + 162, 135, 79, 53, 240, 215, 122, 202, 216, 183, 196, 146, 156, 192, 225, 199, 141, 20, 40, 138, + 239, 1, 162, 41, 205, 41, 104, 53, 148, 255, 142, 57, 194, 71, 110, 233, 9, 55, 79, 31, 189, + 27, 150, 118, 82, 251, 47, 239, 243, 31, 252, 230, 174, 93, 121, 180, 151, 197, 188, 197, 151, + 202, 106, 245, 178, 207, 6, 160, 133, 186, 149, 15, 62, 183, 119, 170, 96, 211, 138, 44, 226, + 83, 207, 196, 108, 171, 2, 232, 175, 108, 68, 30, 106, 198, 70, 87, 233, 213, 251, 97, 239, + 235, 24, 201, 59, 39, 178, 230, 203, 221, 200, 117, 190, 70, 137, 102, 157, 201, 223, 53, 146, + 164, 118, 79, 2, 149, 162, 199, 232, 233, 195, 245, 164, 232, 32, 47, 162, 79, 161, 228, 20, + 191, 141, 179, 120, 81, 85, 16, 122, 106, 19, 16, 192, 7, 79, 70, 26, 108, 138, 23, 242, 77, + 164, 5, 17, 164, 170, 250, 177, 43, 242, 221, 189, 59, 85, 40, 85, 117, 103, 219, 23, 38, 171, + 82, 170, 190, 76, 183, 222, 230, 174, 129, 242, 174, 196, 114, 126, 14, 87, 149, 1, 177, 190, + 143, 15, 251, 162, 15, 18, 16, 37, 16, 73, 233, 211, 134, 65, 39, 141, 60, 252, 195, 214, 233, + 185, 119, 82, 70, 77, 115, 245, 18, 15, 230, 30, 141, 229, 92, 193, 133, 139, 231, 118, 44, + 128, 120, 51, 143, 130, 7, 59, 91, 173, 221, 222, 165, 75, 103, 203, 107, 43, 87, 103, 180, 30, + 248, 217, 235, 178, 144, 122, 28, 181, 216, 228, 200, 34, 72, 117, 203, 77, 125, 178, 210, 57, + 23, 60, 163, 210, 190, 29, 47, 182, 212, 223, 76, 9, 90, 199, 121, 78, 112, 228, 178, 221, 19, + 206, 231, 245, 161, 154, 45, 1, 238, 22, 12, 188, 42, 78, 238, 197, 16, 69, 193, 207, 216, 197, + 198, 3, 60, 50, 86, 55, 140, 113, 149, 34, 127, 57, 188, 170, 194, 179, 204, 146, 107, 63, 166, + 29, 22, 224, 46, 104, 6, 178, 159, 161, 2, 239, 9, 64, 221, 168, 28, 61, 164, 157, 74, 202, + 121, 187, 109, 242, 140, 157, 128, 162, 241, 114, 180, 181, 32, 213, 117, 203, 234, 55, 28, 68, + 240, 94, 95, 71, 118, 194, 92, 113, 202, 227, 11, 171, 170, 87, 139, 10, 222, 247, 82, 118, + 150, 204, 123, 223, 0, 197, 138, 47, 190, 181, 188, 32, 202, 145, 182, 142, 133, 134, 88, 23, + 113, 100, 82, 211, 179, 212, 251, 88, 151, 160, 14, 178, 44, 195, 252, 166, 203, 110, 183, 87, + 255, 227, 202, 20, 113, 252, 197, 255, 26, 136, 231, 225, 250, 89, 70, 73, 78, 243, 213, 1, + 219, 3, 90, 149, 49, 129, 155, 255, 67, 229, 100, 168, 135, 249, 96, 33, 154, 209, 205, 13, 6, + 183, 102, 162, 189, 44, 175, 92, 143, 47, 131, 227, 215, 153, 230, 125, 85, 241, 15, 166, 120, + 19, 14, 233, 35, 186, 55, 132, 72, 139, 241, 107, 245, 101, 248, 219, 120, 73, 90, 165, 4, 213, + 205, 121, 245, 93, 27, 70, 221, 116, 216, 56, 228, 237, 165, 53, 146, 15, 157, 209, 107, 209, + 37, 131, 43, 88, 141, 82, 139, 98, 141, 82, 251, 245, 174, 224, 185, 188, 115, 214, 169, 10, + 89, 112, 104, 15, 24, 190, 77, 135, 58, 145, 227, 153, 192, 81, 7, 49, 111, 16, 125, 57, 226, + 8, 133, 102, 213, 52, 226, 185, 34, 165, 207, 139, 179, 114, 39, 204, 198, 211, 34, 188, 180, + 73, 229, 194, 37, 207, 240, 102, 136, 168, 152, 107, 105, 211, 93, 189, 49, 93, 19, 181, 50, + 156, 98, 176, 242, 103, 34, 122, 59, 122, 34, 196, 40, 208, 126, 221, 162, 84, 243, 203, 96, + 227, 45, 190, 5, 232, 110, 34, 203, 89, 85, 185, 63, 183, 181, 47, 207, 18, 43, 133, 142, 144, + 204, 5, 66, 165, 22, 117, 209, 82, 163, 150, 37, 237, 88, 130, 165, 164, 51, 151, 39, 96, 220, + 137, 90, 159, 129, 0, 146, 102, 24, 48, 183, 80, 182, 170, 183, 183, 218, 112, 202, 125, 244, + 85, 32, 35, 190, 96, 48, 35, 0, 133, 38, 157, 43, 127, 111, 64, 190, 250, 239, 34, 3, 217, 86, + 184, 33, 7, 110, 88, 120, 232, 32, 174, 207, 107, 137, 149, 135, 101, 102, 193, 65, 210, 20, + 226, 37, 118, 85, 194, 218, 101, 162, 102, 91, 113, 162, 23, 77, 166, 36, 216, 170, 178, 247, + 19, 101, 99, 51, 78, 161, 144, 153, 122, 102, 161, 242, 153, 244, 171, 159, 211, 211, 233, 69, + 3, 37, 92, 131, 212, 172, 61, 150, 72, 28, 63, 88, 21, 68, 233, 24, 248, 174, 236, 97, 207, 27, + 168, 20, 61, 16, 147, 38, 194, 242, 155, 0, 144, 149, 53, 110, 17, 153, 36, 95, 250, 37, 51, + 84, 37, 35, 88, 119, 122, 229, 84, 162, 32, 143, 15, 171, 108, 203, 214, 217, 155, 121, 10, 51, + 63, 208, 122, 55, 73, 15, 186, 167, 248, 33, 122, 227, 43, 215, 7, 36, 99, 155, 214, 78, 195, + 125, 106, 9, 166, 194, 123, 182, 167, 192, 41, 91, 238, 96, 194, 32, 78, 97, 10, 219, 10, 155, + 131, 196, 191, 63, 242, 12, 227, 181, 164, 81, 11, 88, 84, 215, 163, 176, 137, 10, 181, 242, + 79, 92, 97, 211, 169, 177, 136, 249, 74, 205, 173, 54, 240, 117, 208, 162, 196, 121, 99, 109, + 12, 236, 156, 92, 178, 24, 218, 249, 181, 70, 195, 218, 84, 191, 123, 30, 78, 38, 14, 178, 81, + 208, 200, 201, 222, 116, 176, 117, 43, 93, 255, 210, 83, 211, 159, 22, 178, 55, 88, 187, 215, + 252, 219, 27, 189, 89, 74, 74, 169, 148, 236, 140, 31, 17, 37, 221, 168, 141, 74, 251, 134, + 212, 153, 108, 191, 112, 237, 201, 46, 183, 84, 96, 135, 92, 50, 224, 178, 43, 26, 125, 162, + 80, 138, 17, 67, 100, 75, 180, 190, 10, 238, 53, 239, 200, 178, 87, 90, 234, 233, 120, 143, + 238, 96, 188, 151, 183, 128, 145, 79, 151, 255, 128, 110, 172, 179, 50, 236, 6, 251, 29, 94, + 37, 172, 158, 17, 154, 32, 177, 68, 240, 105, 19, 163, 148, 143, 88, 6, 39, 58, 71, 186, 147, + 176, 75, 119, 240, 32, 9, 141, 250, 141, 242, 85, 2, 28, 68, 2, 197, 193, 17, 158, 126, 27, 71, + 179, 111, 165, 233, 218, 52, 58, 212, 30, 239, 9, 21, 215, 7, 38, 59, 139, 103, 3, 19, 242, + 208, 180, 161, 127, 193, 206, 181, 215, 120, 63, 246, 77, 36, 23, 131, 83, 4, 213, 31, 235, 99, + 115, 93, 110, 62, 71, 79, 189, 51, 7, 127, 123, 179, 5, 46, 139, 210, 215, 249, 222, 124, 161, + 54, 34, 114, 70, 147, 61, 113, 236, 125, 86, 251, 80, 77, 58, 171, 182, 62, 216, 108, 228, 98, + 147, 237, 99, 32, 107, 164, 102, 130, 85, 79, 163, 222, 132, 4, 23, 193, 163, 140, 45, 177, 77, + 84, 74, 117, 182, 186, 250, 118, 130, 75, 195, 2, 217, 96, 213, 67, 255, 28, 170, 90, 127, 213, + 167, 19, 138, 2, 194, 244, 198, 210, 238, 191, 128, 25, 113, 166, 197, 202, 103, 232, 134, 214, + 10, 191, 249, 76, 125, 55, 215, 92, 145, 228, 226, 7, 66, 234, 28, 156, 15, 115, 114, 205, 111, + 115, 222, 234, 166, 224, 48, 202, 73, 172, 180, 30, 93, 39, 185, 187, 68, 97, 56, 0, 202, 15, + 214, 229, 60, 145, 33, 104, 134, 141, 41, 90, 49, 77, 87, 126, 206, 191, 127, 148, 152, 170, + 93, 75, 44, 13, 152, 101, 48, 147, 231, 96, 232, 75, 137, 228, 118, 50, 0, 225, 5, 98, 12, 247, + 29, 53, 25, 82, 253, 32, 13, 69, 34, 139, 216, 171, 223, 26, 231, 128, 210, 165, 165, 200, 70, + 73, 193, 45, 54, 91, 193, 116, 239, 94, 238, 49, 241, 152, 168, 16, 136, 24, 18, 168, 108, 168, + 14, 134, 173, 61, 120, 214, 72, 75, 87, 157, 62, 154, 219, 192, 12, 114, 228, 85, 110, 139, + 116, 4, 204, 9, 32, 132, 146, 45, 164, 159, 179, 140, 241, 7, 188, 172, 48, 184, 189, 114, 120, + 104, 5, 237, 251, 85, 99, 52, 127, 196, 94, 116, 239, 21, 162, 54, 180, 151, 15, 75, 128, 124, + 231, 95, 175, 121, 91, 226, 17, 117, 15, 224, 41, 189, 154, 199, 139, 72, 112, 100, 10, 142, + 221, 144, 3, 89, 9, 46, 152, 170, 67, 48, 210, 8, 80, 37, 227, 81, 141, 225, 0, 80, 222, 153, + 38, 131, 194, 196, 118, 158, 68, 84, 165, 235, 46, 105, 221, 123, 127, 81, 109, 95, 35, 66, + 233, 23, 250, 30, 225, 61, 233, 238, 112, 235, 56, 124, 119, 83, 249, 135, 107, 69, 26, 247, + 226, 137, 136, 91, 19, 63, 121, 93, 228, 108, 204, 214, 30, 29, 166, 179, 103, 153, 141, 48, + 11, 65, 247, 230, 156, 144, 104, 245, 99, 157, 242, 235, 117, 233, 97, 23, 129, 233, 74, 178, + 21, 95, 20, 157, 113, 170, 182, 103, 1, 141, 14, 25, 248, 133, 16, 82, 152, 180, 238, 190, 95, + 65, 146, 32, 158, 65, 43, 41, 168, 105, 136, 181, 62, 207, 94, 210, 224, 251, 225, 212, 99, + 144, 70, 177, 247, 20, 108, 4, 134, 163, 55, 67, 58, 48, 53, 85, 10, 247, 83, 244, 134, 103, + 139, 12, 90, 232, 127, 65, 58, 214, 163, 40, 219, 71, 139, 198, 141, 119, 181, 218, 43, 219, + 77, 193, 236, 237, 179, 48, 176, 77, 20, 60, 82, 92, 21, 152, 117, 218, 142, 15, 220, 121, 119, + 98, 111, 184, 220, 235, 169, 219, 228, 152, 22, 38, 180, 192, 244, 88, 81, 57, 5, 217, 183, + 180, 219, 200, 145, 226, 246, 31, 196, 30, 254, 238, 251, 119, 45, 134, 61, 203, 102, 196, 87, + 73, 214, 76, 97, 21, 147, 170, 186, 73, 208, 22, 74, 67, 178, 82, 245, 100, 168, 177, 51, 133, + 222, 105, 119, 132, 119, 186, 81, 166, 253, 67, 60, 110, 60, 145, 236, 78, 205, 222, 151, 127, + 54, 103, 190, 18, 196, 220, 56, 71, 37, 200, 232, 33, 4, 157, 128, 91, 218, 154, 41, 182, 216, + 103, 66, 73, 239, 142, 128, 229, 62, 227, 183, 161, 66, 141, 6, 238, 184, 29, 179, 17, 126, 33, + 198, 10, 108, 230, 70, 98, 60, 69, 228, 202, 162, 46, 179, 92, 103, 112, 231, 80, 115, 247, + 194, 185, 201, 251, 157, 222, 144, 220, 166, 72, 46, 82, 28, 59, 225, 152, 209, 55, 75, 191, + 173, 168, 246, 158, 80, 213, 33, 218, 100, 82, 230, 255, 211, 27, 59, 200, 115, 4, 110, 126, + 106, 195, 248, 138, 219, 80, 133, 202, 27, 101, 17, 94, 201, 247, 13, 214, 94, 81, 184, 139, 4, + 59, 41, 132, 190, 213, 208, 210, 224, 75, 105, 24, 218, 88, 213, 151, 9, 134, 199, 193, 183, + 21, 76, 162, 84, 228, 10, 199, 67, 95, 201, 94, 241, 73, 92, 254, 218, 22, 49, 158, 136, 93, + 246, 91, 28, 206, 162, 86, 49, 180, 9, 54, 229, 130, 196, 124, 251, 161, 208, 247, 213, 53, 84, + 49, 114, 128, 37, 126, 234, 95, 215, 168, 232, 223, 105, 143, 206, 240, 204, 148, 25, 171, 101, + 123, 32, 49, 145, 57, 165, 12, 243, 252, 69, 3, 148, 94, 199, 147, 152, 193, 195, 247, 130, + 228, 218, 153, 121, 170, 117, 223, 78, 130, 167, 176, 84, 8, 227, 67, 118, 9, 172, 157, 46, + 220, 81, 140, 21, 147, 125, 78, 72, 138, 99, 182, 189, 76, 114, 129, 43, 162, 246, 15, 192, + 217, 168, 153, 14, 29, 23, 144, 21, 253, 153, 45, 129, 89, 137, 167, 11, 98, 57, 220, 108, 242, + 42, 173, 95, 212, 186, 32, 78, 158, 237, 203, 148, 32, 112, 116, 101, 165, 92, 91, 71, 207, + 204, 246, 23, 207, 5, 61, 81, 120, 218, 181, 203, 118, 101, 100, 46, 17, 132, 58, 152, 121, + 255, 251, 25, 86, 250, 152, 158, 228, 37, 37, 241, 193, 105, 168, 91, 196, 59, 247, 127, 126, + 196, 134, 165, 251, 193, 227, 189, 192, 198, 46, 55, 168, 151, 193, 219, 186, 206, 166, 79, + 173, 90, 176, 76, 89, 158, 14, 170, 52, 60, 54, 231, 112, 248, 87, 177, 113, 16, 141, 160, 245, + 160, 82, 229, 179, 20, 129, 143, 4, 134, 147, 218, 68, 74, 237, 224, 27, 82, 8, 209, 57, 41, + 52, 133, 213, 240, 114, 211, 136, 172, 97, 56, 148, 139, 28, 105, 10, 170, 66, 220, 102, 213, + 166, 50, 216, 79, 116, 139, 184, 154, 247, 176, 155, 0, 102, 234, 171, 205, 172, 9, 198, 236, + 111, 188, 122, 122, 165, 6, 96, 36, 232, 158, 199, 19, 189, 168, 122, 76, 229, 203, 96, 217, + 157, 23, 242, 108, 108, 155, 207, 204, 218, 194, 4, 71, 17, 2, 242, 15, 62, 58, 33, 208, 205, + 68, 137, 137, 191, 77, 49, 102, 175, 167, 193, 10, 135, 120, 110, 230, 31, 209, 215, 55, 179, + 237, 171, 195, 168, 149, 139, 203, 191, 137, 125, 200, 86, 25, 2, 9, 7, 199, 61, 57, 103, 227, + 19, 109, 79, 159, 221, 42, 22, 52, 126, 163, 116, 246, 215, 91, 27, 186, 63, 170, 135, 28, 209, + 189, 50, 197, 213, 71, 48, 6, 7, 30, 36, 251, 130, 79, 121, 125, 58, 26, 220, 16, 206, 117, + 132, 216, 199, 135, 123, 193, 114, 202, 212, 61, 133, 37, 238, 14, 32, 221, 73, 232, 57, 28, + 131, 243, 236, 167, 74, 32, 174, 108, 33, 54, 223, 115, 189, 37, 197, 56, 100, 16, 88, 78, 91, + 123, 205, 150, 48, 64, 53, 88, 99, 152, 202, 194, 250, 117, 210, 78, 167, 25, 77, 215, 62, 187, + 96, 254, 204, 208, 92, 106, 131, 16, 181, 138, 227, 69, 141, 13, 199, 173, 140, 204, 146, 67, + 166, 67, 8, 169, 249, 249, 94, 52, 193, 34, 40, 48, 46, 226, 129, 118, 193, 91, 110, 224, 76, + 225, 229, 95, 141, 255, 60, 208, 130, 146, 140, 89, 255, 209, 42, 98, 91, 31, 67, 178, 25, 172, + 6, 38, 239, 192, 157, 105, 82, 238, 152, 165, 216, 129, 85, 228, 135, 246, 123, 134, 167, 35, + 31, 254, 173, 176, 230, 197, 8, 123, 229, 230, 93, 3, 246, 202, 232, 224, 201, 31, 87, 230, + 118, 50, 61, 200, 213, 81, 145, 4, 69, 11, 22, 178, 171, 180, 100, 204, 91, 156, 76, 227, 231, + 195, 195, 37, 139, 152, 127, 125, 5, 49, 12, 114, 109, 86, 205, 18, 11, 244, 74, 223, 87, 186, + 251, 147, 118, 8, 36, 216, 24, 44, 117, 119, 27, 31, 208, 64, 57, 120, 189, 130, 205, 9, 213, + 184, 56, 207, 253, 106, 191, 103, 27, 219, 95, 8, 211, 176, 40, 7, 15, 117, 245, 11, 220, 229, + 255, 82, 180, 135, 34, 79, 106, 130, 80, 17, 182, 38, 89, 137, 86, 26, 23, 100, 249, 210, 220, + 225, 229, 234, 85, 216, 153, 22, 102, 59, 167, 245, 225, 110, 21, 87, 102, 146, 191, 234, 150, + 197, 135, 229, 144, 167, 90, 27, 171, 2, 68, 32, 248, 178, 168, 145, 33, 246, 193, 210, 127, + 87, 155, 241, 221, 153, 181, 11, 187, 78, 132, 71, 227, 163, 43, 228, 243, 10, 23, 161, 3, 78, + 61, 205, 33, 149, 133, 57, 164, 113, 254, 178, 115, 41, 185, 183, 237, 139, 211, 150, 13, 171, + 86, 71, 183, 71, 131, 19, 154, 195, 229, 13, 130, 156, 219, 183, 176, 183, 59, 193, 156, 47, + 211, 143, 56, 55, 27, 220, 205, 25, 32, 215, 246, 227, 87, 160, 251, 184, 164, 228, 242, 60, + 128, 10, 26, 129, 76, 95, 251, 174, 133, 34, 75, 221, 80, 43, 135, 137, 148, 49, 161, 229, 110, + 131, 243, 50, 89, 254, 27, 151, 223, 166, 37, 29, 102, 223, 122, 149, 72, 189, 124, 137, 158, + 204, 160, 169, 112, 180, 92, 30, 47, 102, 43, 36, 180, 99, 229, 160, 219, 252, 248, 198, 223, + 221, 113, 106, 16, 83, 152, 240, 79, 226, 71, 22, 98, 141, 45, 69, 21, 49, 105, 182, 59, 190, + 95, 225, 203, 176, 31, 132, 153, 113, 211, 201, 199, 85, 140, 14, 169, 103, 213, 38, 106, 104, + 67, 178, 118, 200, 44, 210, 31, 21, 182, 83, 86, 69, 252, 246, 219, 239, 154, 234, 208, 86, + 102, 123, 27, 112, 59, 74, 143, 247, 148, 80, 65, 37, 184, 204, 91, 91, 169, 86, 128, 50, 200, + 17, 252, 96, 250, 173, 52, 43, 134, 241, 46, 68, 197, 125, 14, 68, 224, 248, 223, 178, 20, 9, + 195, 28, 42, 133, 15, 14, 102, 81, 19, 221, 1, 185, 163, 110, 227, 30, 127, 242, 31, 125, 89, + 56, 229, 10, 151, 11, 23, 32, 154, 242, 25, 164, 144, 103, 248, 16, 76, 95, 250, 87, 81, 126, + 178, 33, 179, 13, 155, 63, 33, 179, 23, 243, 50, 65, 58, 143, 116, 218, 248, 83, 71, 61, 172, + 150, 207, 138, 30, 215, 200, 47, 84, 130, 232, 61, 157, 2, 12, 56, 132, 41, 32, 222, 60, 11, + 235, 94, 88, 234, 136, 218, 36, 223, 22, 160, 23, 151, 187, 129, 76, 215, 87, 40, 128, 245, + 221, 64, 15, 169, 119, 190, 223, 253, 150, 84, 133, 31, 50, 180, 71, 197, 116, 118, 92, 66, + 194, 105, 16, 9, 138, 214, 113, 214, 244, 52, 130, 221, 59, 175, 179, 130, 130, 149, 57, 26, 4, + 241, 5, 163, 144, 48, 26, 254, 249, 32, 50, 15, 246, 89, 203, 230, 151, 11, 110, 233, 44, 124, + 103, 202, 206, 248, 226, 29, 158, 222, 14, 140, 147, 117, 89, 7, 25, 46, 121, 211, 222, 22, 22, + 5, 94, 48, 199, 213, 58, 19, 210, 101, 60, 173, 101, 192, 129, 132, 252, 142, 235, 65, 183, 67, + 164, 218, 231, 173, 36, 152, 41, 53, 75, 225, 176, 244, 41, 70, 166, 68, 82, 230, 103, 126, 47, + 225, 221, 134, 101, 43, 7, 255, 65, 157, 185, 99, 43, 51, 61, 101, 131, 232, 50, 55, 77, 44, + 117, 205, 88, 1, 7, 224, 198, 17, 93, 132, 212, 198, 147, 123, 216, 202, 201, 231, 151, 240, + 214, 66, 54, 185, 76, 228, 86, 18, 22, 40, 18, 55, 135, 196, 189, 146, 158, 24, 194, 254, 72, + 182, 28, 72, 132, 0, 49, 58, 201, 239, 39, 102, 129, 164, 164, 112, 159, 124, 189, 79, 142, + 125, 167, 96, 236, 122, 87, 95, 231, 64, 148, 22, 58, 22, 60, 251, 75, 170, 250, 105, 114, 2, + 109, 0, 232, 172, 29, 219, 188, 249, 17, 254, 65, 57, 72, 186, 69, 156, 172, 12, 84, 35, 241, + 172, 152, 108, 5, 185, 125, 45, 17, 130, 202, 201, 118, 63, 191, 214, 57, 249, 35, 240, 39, 31, + 231, 56, 133, 198, 86, 191, 240, 20, 155, 241, 248, 230, 40, 85, 156, 213, 18, 110, 26, 236, + 88, 126, 3, 60, 9, 186, 4, 130, 110, 91, 67, 203, 56, 194, 120, 14, 167, 98, 37, 55, 136, 243, + 210, 52, 131, 29, 130, 210, 40, 152, 219, 176, 230, 229, 46, 71, 237, 43, 243, 79, 255, 22, + 245, 13, 46, 205, 38, 243, 224, 161, 194, 182, 212, 65, 2, 254, 220, 80, 99, 203, 79, 206, 191, + 110, 242, 7, 54, 202, 203, 135, 11, 21, 138, 202, 226, 140, 240, 242, 57, 153, 44, 114, 29, + 211, 31, 146, 157, 20, 202, 41, 254, 31, 115, 78, 171, 234, 207, 124, 230, 92, 21, 32, 6, 151, + 84, 147, 100, 139, 242, 232, 71, 206, 82, 35, 187, 115, 91, 241, 36, 221, 223, 80, 234, 5, 59, + 41, 67, 70, 208, 63, 95, 248, 132, 119, 245, 99, 219, 137, 33, 235, 152, 226, 203, 0, 114, 198, + 44, 16, 224, 58, 178, 5, 137, 89, 183, 118, 127, 45, 51, 127, 119, 201, 115, 149, 163, 138, + 196, 197, 69, 217, 50, 90, 92, 103, 162, 31, 170, 59, 112, 55, 105, 183, 156, 97, 107, 215, + 198, 230, 251, 95, 184, 94, 190, 185, 254, 190, 54, 104, 188, 48, 196, 133, 159, 169, 135, 211, + 84, 94, 153, 195, 37, 252, 41, 145, 247, 160, 22, 218, 183, 150, 150, 168, 204, 126, 44, 210, + 4, 172, 146, 202, 145, 221, 24, 232, 18, 26, 25, 217, 157, 194, 177, 96, 236, 98, 121, 161, + 244, 132, 220, 112, 249, 85, 174, 122, 49, 79, 190, 192, 230, 47, 12, 144, 24, 55, 102, 229, + 74, 63, 166, 75, 189, 250, 34, 116, 161, 161, 0, 16, 104, 66, 74, 231, 66, 109, 90, 62, 178, + 19, 120, 79, 190, 123, 174, 104, 102, 9, 80, 164, 160, 45, 60, 131, 95, 161, 170, 148, 99, 12, + 197, 81, 228, 242, 74, 89, 21, 105, 98, 238, 100, 162, 69, 182, 118, 187, 139, 143, 98, 71, + 210, 234, 149, 240, 79, 210, 103, 87, 109, 164, 162, 74, 116, 211, 4, 39, 132, 111, 212, 21, + 247, 99, 120, 164, 77, 181, 63, 94, 19, 201, 244, 120, 175, 117, 87, 207, 19, 64, 26, 177, 15, + 119, 125, 136, 113, 167, 226, 92, 121, 231, 155, 159, 199, 189, 218, 139, 150, 101, 198, 97, + 19, 28, 111, 241, 12, 237, 171, 65, 243, 183, 166, 201, 62, 0, 82, 206, 172, 34, 142, 43, 16, + 238, 34, 57, 84, 146, 61, 36, 96, 158, 247, 57, 124, 30, 127, 160, 60, 93, 215, 67, 117, 94, + 164, 69, 180, 81, 150, 173, 3, 70, 242, 89, 150, 14, 233, 81, 101, 194, 193, 105, 33, 97, 6, 7, + 186, 188, 37, 113, 33, 132, 193, 63, 63, 205, 216, 242, 246, 103, 234, 75, 247, 137, 68, 152, + 17, 213, 229, 235, 205, 199, 98, 110, 89, 36, 60, 87, 156, 225, 138, 106, 179, 87, 98, 242, + 173, 186, 245, 71, 191, 248, 145, 11, 174, 213, 218, 101, 195, 11, 248, 106, 38, 67, 68, 46, + 61, 133, 120, 135, 207, 106, 79, 34, 77, 250, 177, 207, 11, 220, 205, 216, 222, 230, 161, 176, + 93, 170, 21, 237, 183, 6, 153, 39, 224, 156, 123, 33, 191, 196, 207, 2, 78, 51, 249, 197, 57, + 195, 165, 217, 228, 66, 3, 87, 152, 248, 29, 247, 228, 175, 169, 184, 107, 212, 226, 128, 107, + 55, 8, 181, 62, 254, 76, 196, 158, 231, 138, 156, 36, 230, 112, 73, 183, 78, 152, 35, 81, 229, + 69, 243, 218, 77, 173, 64, 84, 186, 78, 3, 64, 114, 123, 0, 239, 116, 177, 59, 148, 220, 97, + 233, 184, 119, 142, 2, 107, 41, 53, 85, 177, 23, 189, 106, 72, 128, 7, 53, 91, 202, 143, 15, + 121, 228, 250, 147, 132, 4, 199, 20, 214, 88, 105, 62, 53, 22, 25, 129, 133, 127, 211, 108, + 186, 91, 58, 191, 241, 103, 73, 232, 50, 3, 171, 124, 149, 220, 162, 218, 97, 38, 157, 68, 41, + 34, 184, 185, 229, 99, 136, 208, 252, 130, 3, 204, 50, 47, 151, 47, 156, 76, 105, 221, 12, 75, + 117, 91, 139, 8, 218, 202, 182, 8, 205, 114, 185, 125, 41, 107, 18, 160, 231, 90, 226, 157, + 117, 39, 111, 182, 142, 107, 164, 169, 34, 99, 156, 88, 13, 169, 112, 175, 234, 222, 247, 68, + 34, 179, 247, 158, 248, 123, 38, 31, 77, 50, 37, 161, 74, 81, 235, 146, 86, 106, 0, 31, 193, + 203, 231, 218, 161, 190, 17, 5, 156, 25, 0, 29, 98, 68, 214, 65, 74, 69, 18, 33, 162, 137, 225, + 29, 74, 202, 187, 167, 232, 119, 17, 71, 3, 232, 50, 209, 216, 88, 208, 163, 151, 234, 218, 19, + 127, 230, 77, 217, 84, 184, 168, 60, 217, 25, 73, 145, 104, 250, 225, 189, 192, 35, 149, 88, + 222, 149, 95, 51, 200, 227, 180, 89, 246, 211, 107, 132, 36, 97, 131, 169, 114, 87, 83, 197, + 185, 141, 214, 35, 59, 202, 207, 182, 52, 147, 116, 62, 74, 73, 11, 117, 189, 184, 199, 214, + 52, 220, 109, 246, 7, 181, 191, 176, 156, 195, 132, 67, 159, 30, 233, 127, 11, 135, 196, 184, + 37, 56, 167, 195, 89, 80, 169, 155, 230, 42, 67, 224, 195, 221, 141, 96, 80, 135, 187, 91, 83, + 150, 169, 214, 115, 25, 159, 35, 72, 243, 102, 77, 238, 74, 137, 241, 31, 175, 226, 95, 69, 98, + 88, 233, 236, 25, 212, 2, 35, 194, 154, 27, 246, 3, 94, 55, 248, 23, 128, 159, 83, 132, 92, + 156, 169, 127, 165, 153, 64, 37, 143, 40, 37, 66, 128, 53, 217, 12, 81, 43, 143, 87, 96, 137, + 136, 154, 218, 120, 248, 101, 92, 197, 104, 182, 162, 33, 209, 184, 72, 186, 23, 186, 82, 201, + 225, 212, 88, 141, 193, 244, 201, 5, 177, 120, 234, 244, 68, 172, 32, 84, 219, 216, 197, 244, + 241, 21, 189, 189, 93, 55, 178, 211, 146, 227, 140, 200, 202, 20, 7, 236, 188, 21, 148, 27, + 158, 97, 222, 197, 130, 253, 219, 188, 124, 6, 164, 94, 47, 246, 55, 106, 6, 185, 116, 61, 166, + 139, 244, 81, 206, 14, 216, 96, 0, 34, 139, 195, 66, 221, 99, 11, 156, 91, 215, 64, 132, 170, + 111, 229, 28, 156, 159, 145, 136, 197, 100, 98, 109, 243, 67, 176, 232, 255, 199, 161, 110, 49, + 60, 121, 253, 146, 12, 149, 168, 178, 175, 173, 106, 153, 241, 22, 106, 167, 118, 194, 93, 191, + 32, 231, 180, 238, 189, 69, 224, 97, 213, 21, 86, 51, 155, 92, 235, 189, 218, 179, 71, 138, + 225, 124, 224, 239, 32, 44, 8, 146, 38, 171, 89, 177, 209, 79, 133, 26, 64, 115, 171, 190, 182, + 66, 1, 94, 240, 33, 14, 250, 45, 129, 154, 15, 156, 19, 32, 43, 171, 236, 19, 90, 174, 161, + 144, 196, 26, 27, 186, 136, 52, 152, 204, 115, 242, 143, 84, 35, 154, 38, 141, 145, 7, 124, + 131, 247, 79, 160, 203, 28, 11, 240, 142, 73, 103, 178, 213, 49, 74, 160, 169, 168, 71, 150, + 197, 62, 64, 182, 64, 145, 128, 34, 195, 228, 196, 7, 99, 7, 211, 167, 116, 68, 111, 245, 255, + 119, 108, 83, 112, 129, 65, 125, 37, 70, 48, 64, 170, 76, 170, 236, 14, 52, 67, 37, 29, 236, 0, + 145, 140, 20, 44, 120, 55, 62, 118, 25, 90, 252, 97, 94, 4, 69, 91, 169, 152, 24, 39, 38, 247, + 11, 63, 124, 128, 74, 31, 117, 96, 129, 93, 63, 230, 75, 202, 193, 198, 143, 134, 124, 227, + 227, 36, 118, 10, 10, 26, 46, 8, 16, 155, 153, 110, 105, 249, 97, 13, 187, 252, 86, 232, 101, + 5, 160, 203, 155, 97, 56, 55, 248, 244, 109, 160, 253, 255, 163, 91, 105, 208, 10, 228, 120, + 172, 74, 30, 205, 156, 195, 40, 74, 140, 67, 73, 229, 221, 199, 197, 236, 224, 220, 27, 0, 106, + 37, 234, 119, 198, 152, 74, 71, 78, 225, 187, 103, 114, 152, 215, 49, 177, 238, 188, 167, 211, + 56, 162, 181, 131, 153, 90, 204, 52, 245, 230, 116, 33, 78, 146, 25, 202, 94, 238, 106, 8, 58, + 76, 152, 40, 78, 165, 176, 125, 147, 144, 138, 166, 201, 101, 81, 90, 88, 136, 163, 1, 20, 6, + 13, 246, 186, 44, 243, 56, 232, 63, 12, 60, 117, 65, 146, 239, 218, 181, 205, 190, 50, 23, 109, + 169, 209, 26, 76, 250, 119, 185, 126, 219, 18, 11, 217, 69, 173, 40, 200, 121, 238, 69, 27, 32, + 197, 208, 159, 191, 211, 133, 190, 142, 124, 252, 64, 195, 109, 90, 196, 209, 211, 227, 209, + 149, 242, 209, 255, 162, 119, 177, 231, 65, 173, 219, 249, 20, 158, 165, 178, 140, 232, 2, 185, + 4, 200, 117, 73, 14, 80, 157, 110, 247, 217, 114, 185, 28, 49, 99, 212, 214, 9, 182, 221, 190, + 182, 238, 128, 169, 121, 11, 128, 153, 212, 122, 1, 65, 6, 107, 39, 220, 87, 120, 29, 206, 248, + 96, 6, 128, 90, 57, 93, 129, 194, 174, 181, 222, 238, 15, 220, 246, 252, 149, 62, 12, 43, 136, + 34, 113, 117, 209, 180, 222, 27, 138, 128, 126, 83, 189, 99, 146, 96, 152, 134, 54, 158, 76, + 187, 10, 26, 242, 142, 47, 55, 106, 124, 119, 83, 253, 167, 22, 108, 39, 105, 208, 170, 180, + 81, 174, 194, 114, 150, 251, 195, 180, 49, 223, 236, 129, 196, 10, 87, 12, 52, 91, 142, 91, 0, + 64, 166, 36, 3, 112, 25, 58, 230, 3, 187, 211, 198, 190, 246, 5, 137, 102, 64, 212, 85, 150, + 154, 192, 195, 12, 42, 66, 163, 67, 187, 191, 152, 108, 17, 5, 36, 221, 46, 119, 125, 248, 76, + 222, 61, 187, 58, 241, 245, 103, 173, 234, 117, 24, 227, 60, 37, 214, 142, 3, 139, 105, 252, + 95, 25, 65, 68, 63, 190, 190, 97, 111, 150, 100, 218, 97, 191, 113, 100, 196, 20, 10, 118, 115, + 158, 165, 80, 44, 77, 163, 139, 125, 135, 117, 12, 149, 199, 58, 10, 232, 175, 72, 102, 203, + 65, 163, 76, 115, 64, 6, 187, 150, 28, 248, 12, 216, 165, 109, 138, 153, 42, 131, 161, 199, + 205, 179, 211, 9, 209, 30, 247, 146, 16, 60, 2, 66, 195, 117, 163, 89, 145, 239, 221, 14, 146, + 122, 68, 202, 138, 21, 49, 48, 125, 57, 87, 252, 185, 149, 7, 59, 25, 15, 69, 190, 240, 207, + 228, 186, 174, 182, 10, 53, 174, 59, 21, 199, 126, 132, 156, 100, 122, 16, 119, 172, 149, 191, + 242, 98, 62, 226, 114, 171, 75, 206, 141, 140, 132, 105, 50, 31, 76, 17, 126, 74, 254, 120, 90, + 209, 72, 139, 9, 184, 91, 102, 64, 225, 66, 236, 81, 66, 31, 13, 243, 201, 30, 21, 253, 255, + 176, 112, 183, 210, 134, 239, 74, 130, 200, 84, 133, 75, 39, 40, 46, 37, 141, 142, 68, 251, + 153, 251, 78, 205, 34, 202, 157, 205, 60, 239, 20, 177, 182, 80, 199, 77, 212, 91, 249, 56, 15, + 10, 129, 133, 79, 158, 97, 218, 60, 208, 177, 67, 4, 210, 94, 222, 242, 79, 154, 44, 203, 7, + 108, 48, 243, 1, 26, 9, 68, 164, 13, 102, 51, 57, 67, 126, 12, 237, 22, 101, 210, 17, 86, 131, + 14, 150, 147, 81, 63, 167, 254, 124, 207, 163, 80, 35, 39, 168, 97, 194, 36, 217, 178, 216, + 102, 204, 125, 78, 210, 81, 8, 98, 98, 107, 67, 238, 224, 108, 153, 153, 145, 209, 21, 56, 168, + 178, 82, 75, 69, 198, 83, 203, 198, 134, 71, 166, 15, 184, 112, 15, 140, 12, 116, 186, 46, 209, + 124, 112, 9, 75, 133, 2, 251, 89, 226, 170, 36, 181, 210, 49, 26, 44, 114, 144, 195, 201, 113, + 104, 245, 186, 176, 35, 141, 10, 164, 7, 78, 35, 43, 153, 244, 227, 112, 238, 40, 150, 209, 37, + 139, 143, 25, 117, 168, 181, 22, 37, 101, 219, 174, 134, 115, 216, 76, 191, 109, 228, 246, 121, + 237, 114, 142, 36, 102, 232, 229, 171, 149, 17, 110, 248, 240, 169, 185, 21, 59, 119, 236, 29, + 182, 138, 14, 92, 174, 83, 235, 239, 57, 149, 126, 110, 98, 172, 170, 20, 212, 37, 122, 108, + 26, 248, 93, 140, 66, 101, 155, 74, 41, 40, 144, 58, 65, 40, 235, 27, 205, 182, 143, 53, 217, + 23, 203, 188, 35, 99, 111, 167, 241, 197, 127, 74, 96, 132, 12, 66, 97, 103, 103, 106, 88, 78, + 35, 26, 66, 160, 99, 37, 249, 6, 147, 203, 2, 154, 195, 98, 73, 164, 78, 60, 195, 160, 58, 92, + 54, 234, 33, 142, 97, 1, 13, 108, 151, 250, 151, 71, 233, 17, 236, 95, 56, 249, 161, 218, 89, + 141, 169, 29, 223, 228, 31, 80, 96, 203, 247, 3, 237, 5, 172, 184, 45, 232, 89, 77, 51, 188, + 62, 96, 143, 234, 204, 37, 20, 9, 7, 206, 74, 161, 134, 243, 54, 194, 51, 235, 10, 181, 40, + 101, 136, 193, 186, 32, 189, 174, 156, 108, 230, 192, 252, 131, 204, 187, 151, 179, 114, 137, + 80, 184, 174, 113, 5, 11, 72, 61, 95, 94, 131, 104, 186, 50, 139, 108, 111, 233, 98, 190, 137, + 182, 102, 249, 52, 182, 50, 226, 72, 225, 173, 89, 152, 37, 242, 95, 15, 107, 33, 238, 246, + 180, 6, 179, 25, 79, 89, 241, 136, 41, 45, 81, 22, 89, 156, 251, 27, 117, 99, 150, 179, 244, + 220, 249, 165, 40, 10, 71, 228, 5, 233, 246, 34, 190, 52, 29, 26, 147, 54, 219, 187, 99, 100, + 74, 160, 55, 248, 73, 208, 159, 38, 197, 59, 101, 98, 125, 20, 99, 136, 89, 63, 105, 2, 79, + 152, 0, 21, 175, 154, 106, 224, 155, 100, 55, 187, 160, 163, 194, 86, 44, 197, 121, 76, 5, 38, + 53, 146, 233, 88, 73, 128, 139, 95, 173, 122, 1, 27, 136, 134, 197, 106, 218, 180, 89, 74, 244, + 150, 236, 38, 129, 51, 64, 62, 18, 249, 192, 66, 70, 34, 17, 168, 87, 201, 47, 135, 117, 238, + 86, 151, 253, 202, 36, 142, 177, 146, 220, 65, 172, 119, 71, 44, 191, 240, 251, 166, 106, 134, + 237, 24, 94, 89, 68, 101, 72, 66, 191, 228, 14, 116, 85, 47, 127, 189, 148, 18, 194, 79, 86, + 173, 131, 202, 65, 74, 154, 205, 23, 188, 135, 165, 41, 96, 159, 49, 244, 22, 110, 50, 162, + 133, 126, 202, 211, 241, 62, 60, 244, 192, 83, 50, 187, 17, 176, 73, 62, 228, 137, 151, 6, 25, + 92, 129, 149, 168, 138, 175, 108, 72, 10, 194, 158, 198, 66, 70, 212, 7, 221, 102, 119, 150, + 199, 16, 133, 214, 32, 213, 77, 39, 240, 157, 231, 107, 249, 132, 151, 178, 144, 3, 64, 172, + 141, 138, 176, 81, 12, 132, 156, 220, 185, 250, 218, 54, 223, 94, 152, 109, 139, 130, 70, 44, + 102, 188, 185, 81, 187, 188, 47, 12, 98, 28, 102, 23, 115, 91, 16, 174, 250, 37, 121, 240, 85, + 119, 48, 83, 242, 139, 215, 176, 1, 206, 9, 108, 204, 115, 28, 194, 195, 250, 208, 185, 59, + 150, 81, 7, 152, 38, 54, 133, 27, 174, 87, 5, 240, 46, 197, 230, 119, 145, 59, 215, 169, 250, + 148, 93, 78, 76, 27, 180, 6, 151, 53, 118, 60, 232, 247, 123, 158, 119, 200, 229, 239, 108, + 228, 124, 34, 210, 93, 93, 96, 175, 242, 108, 133, 85, 76, 81, 188, 164, 56, 124, 9, 211, 16, + 104, 157, 119, 175, 223, 182, 249, 253, 94, 147, 183, 184, 252, 173, 20, 229, 224, 240, 64, + 163, 103, 24, 224, 77, 164, 10, 5, 106, 169, 151, 180, 182, 104, 191, 9, 243, 66, 121, 18, 126, + 136, 13, 191, 20, 170, 146, 179, 117, 114, 77, 126, 163, 220, 185, 19, 180, 98, 83, 229, 111, + 70, 18, 130, 21, 78, 56, 32, 219, 46, 130, 144, 203, 84, 67, 227, 96, 120, 114, 77, 133, 185, + 123, 246, 131, 184, 198, 40, 17, 99, 62, 21, 167, 156, 122, 238, 238, 74, 79, 112, 18, 177, 10, + 254, 133, 60, 44, 149, 196, 123, 95, 177, 199, 255, 239, 22, 67, 8, 252, 244, 69, 165, 150, 78, + 91, 125, 233, 198, 37, 37, 255, 214, 71, 206, 45, 84, 72, 26, 152, 91, 142, 182, 58, 254, 19, + 131, 191, 59, 163, 75, 54, 239, 46, 110, 226, 9, 44, 67, 70, 104, 235, 49, 9, 117, 147, 120, + 64, 60, 127, 13, 149, 103, 210, 116, 210, 134, 155, 2, 123, 78, 141, 11, 202, 143, 32, 207, 39, + 185, 165, 215, 107, 77, 131, 161, 231, 116, 242, 113, 169, 143, 62, 4, 230, 195, 53, 27, 41, + 58, 59, 16, 117, 114, 210, 115, 48, 203, 249, 124, 137, 29, 246, 7, 238, 109, 241, 122, 117, + 60, 204, 174, 110, 74, 124, 77, 49, 134, 11, 59, 82, 166, 202, 59, 20, 144, 217, 213, 196, 224, + 144, 220, 234, 53, 78, 215, 108, 36, 209, 11, 140, 174, 134, 229, 231, 224, 174, 3, 69, 249, + 215, 111, 184, 15, 131, 89, 148, 98, 11, 128, 246, 244, 73, 138, 138, 230, 5, 97, 11, 38, 167, + 144, 196, 22, 114, 113, 244, 173, 126, 94, 74, 101, 19, 49, 2, 109, 48, 206, 157, 169, 16, 71, + 77, 116, 219, 223, 14, 240, 194, 217, 51, 100, 111, 243, 81, 236, 98, 73, 170, 13, 26, 166, + 174, 138, 114, 229, 166, 76, 240, 169, 109, 248, 118, 97, 94, 154, 106, 13, 190, 5, 196, 84, + 58, 48, 142, 137, 125, 144, 79, 113, 54, 247, 214, 129, 101, 69, 24, 65, 133, 228, 225, 42, 57, + 2, 181, 210, 250, 92, 227, 147, 146, 110, 150, 173, 64, 133, 167, 87, 219, 15, 238, 116, 188, + 243, 141, 93, 186, 181, 92, 90, 107, 117, 32, 93, 252, 110, 248, 21, 114, 86, 248, 175, 122, + 97, 25, 160, 239, 185, 1, 213, 93, 80, 15, 255, 71, 137, 179, 66, 199, 182, 166, 142, 138, 65, + 138, 138, 95, 88, 143, 112, 69, 224, 206, 26, 169, 33, 57, 141, 147, 220, 102, 186, 136, 135, + 252, 15, 126, 40, 98, 218, 192, 220, 118, 54, 218, 51, 177, 58, 148, 244, 126, 15, 0, 206, 81, + 128, 150, 71, 179, 231, 105, 99, 82, 131, 156, 66, 183, 233, 177, 117, 0, 4, 207, 175, 70, 143, + 99, 87, 25, 134, 153, 7, 207, 87, 45, 11, 17, 168, 135, 185, 17, 252, 139, 70, 166, 156, 106, + 108, 116, 171, 29, 95, 89, 196, 109, 16, 90, 194, 6, 234, 203, 176, 168, 14, 229, 219, 172, + 228, 134, 237, 7, 44, 177, 110, 148, 137, 131, 161, 253, 27, 182, 6, 90, 27, 175, 85, 211, 222, + 134, 108, 97, 89, 58, 100, 24, 103, 71, 101, 79, 50, 153, 42, 43, 182, 252, 189, 194, 143, 51, + 10, 236, 165, 192, 47, 42, 125, 74, 171, 223, 192, 25, 115, 95, 199, 217, 75, 201, 194, 8, 41, + 169, 42, 233, 194, 56, 20, 166, 179, 20, 209, 188, 228, 210, 157, 110, 206, 78, 253, 251, 133, + 78, 218, 69, 58, 229, 116, 71, 127, 233, 7, 196, 10, 203, 192, 60, 192, 228, 213, 231, 169, + 104, 203, 25, 2, 171, 240, 137, 159, 113, 197, 83, 136, 232, 30, 80, 177, 16, 153, 166, 50, 64, + 32, 150, 255, 9, 140, 187, 226, 195, 166, 38, 5, 191, 175, 238, 106, 240, 105, 91, 208, 111, + 110, 107, 67, 174, 79, 155, 45, 87, 177, 12, 39, 197, 7, 229, 108, 158, 35, 93, 175, 119, 87, + 96, 25, 5, 92, 187, 63, 238, 228, 97, 39, 158, 241, 46, 124, 246, 10, 213, 72, 170, 163, 188, + 3, 189, 206, 87, 16, 58, 17, 234, 171, 42, 209, 86, 165, 188, 88, 84, 228, 23, 86, 171, 11, 6, + 95, 143, 128, 244, 7, 46, 1, 46, 231, 159, 124, 103, 162, 228, 181, 243, 57, 41, 100, 102, 219, + 68, 119, 12, 122, 147, 132, 56, 181, 250, 114, 190, 95, 108, 165, 147, 232, 8, 108, 159, 157, + 197, 139, 180, 169, 192, 99, 104, 96, 172, 207, 244, 212, 173, 76, 208, 92, 71, 6, 96, 104, + 113, 112, 62, 101, 131, 3, 32, 29, 17, 225, 100, 19, 46, 50, 87, 167, 158, 239, 214, 245, 234, + 199, 95, 79, 117, 164, 100, 92, 148, 75, 62, 152, 14, 87, 236, 245, 138, 38, 75, 78, 237, 98, + 146, 199, 105, 215, 57, 187, 237, 121, 2, 193, 12, 163, 49, 0, 91, 254, 130, 146, 37, 119, 140, + 22, 86, 153, 124, 39, 188, 40, 146, 158, 190, 136, 168, 133, 204, 116, 159, 221, 59, 199, 51, + 7, 174, 225, 57, 123, 33, 180, 164, 255, 147, 148, 67, 27, 243, 102, 161, 238, 192, 17, 5, 68, + 89, 209, 196, 16, 105, 116, 134, 140, 16, 181, 128, 146, 71, 57, 204, 46, 34, 81, 195, 192, 23, + 130, 7, 98, 188, 101, 87, 121, 80, 79, 183, 48, 130, 118, 67, 98, 60, 183, 86, 252, 145, 40, + 72, 216, 196, 79, 219, 209, 22, 138, 26, 18, 94, 85, 6, 198, 184, 41, 36, 51, 184, 53, 55, 27, + 20, 141, 135, 64, 208, 57, 220, 190, 24, 159, 166, 195, 182, 186, 200, 24, 182, 187, 16, 217, + 147, 118, 38, 233, 158, 182, 85, 36, 10, 155, 208, 32, 62, 44, 152, 253, 149, 212, 142, 5, 200, + 101, 121, 186, 164, 28, 141, 120, 222, 137, 69, 195, 136, 126, 166, 163, 50, 179, 232, 91, 106, + 163, 75, 166, 27, 81, 16, 37, 253, 104, 76, 179, 145, 248, 76, 55, 25, 228, 179, 107, 49, 231, + 47, 185, 11, 234, 237, 94, 223, 195, 254, 68, 118, 32, 124, 74, 196, 74, 50, 66, 0, 23, 20, 88, + 235, 112, 162, 122, 184, 202, 46, 252, 68, 215, 97, 143, 123, 101, 16, 74, 77, 191, 51, 202, 4, + 36, 69, 54, 146, 74, 92, 27, 102, 39, 30, 220, 30, 128, 64, 102, 182, 241, 170, 104, 106, 72, + 186, 133, 185, 91, 129, 142, 246, 222, 227, 189, 108, 39, 135, 155, 243, 153, 55, 240, 236, 34, + 80, 44, 254, 138, 74, 137, 152, 191, 16, 142, 175, 242, 155, 150, 80, 51, 103, 144, 238, 2, + 186, 64, 144, 242, 220, 125, 186, 143, 133, 84, 177, 37, 172, 17, 179, 83, 30, 239, 249, 8, + 243, 208, 149, 221, 87, 242, 68, 27, 90, 119, 12, 186, 34, 71, 97, 191, 91, 115, 255, 84, 220, + 196, 127, 213, 38, 246, 163, 118, 147, 103, 133, 228, 40, 164, 88, 58, 117, 194, 61, 249, 30, + 252, 6, 255, 186, 166, 226, 206, 41, 39, 240, 168, 52, 103, 11, 57, 41, 202, 32, 219, 94, 37, + 87, 223, 23, 168, 65, 250, 253, 103, 114, 239, 94, 26, 46, 216, 193, 139, 143, 40, 213, 67, + 197, 171, 33, 142, 45, 170, 19, 69, 149, 224, 56, 112, 191, 192, 208, 195, 118, 144, 204, 248, + 19, 129, 185, 183, 171, 28, 115, 103, 20, 245, 138, 252, 12, 183, 84, 219, 206, 130, 84, 189, + 88, 150, 125, 154, 198, 18, 106, 125, 75, 92, 226, 112, 98, 223, 174, 212, 106, 158, 99, 3, + 131, 61, 126, 79, 215, 70, 234, 77, 31, 143, 150, 87, 184, 85, 159, 103, 152, 78, 69, 24, 107, + 73, 209, 169, 232, 99, 32, 254, 255, 99, 119, 79, 107, 117, 105, 111, 165, 183, 204, 190, 43, + 214, 100, 156, 19, 109, 135, 206, 10, 200, 140, 67, 131, 226, 185, 158, 142, 59, 121, 152, 203, + 206, 152, 172, 190, 184, 217, 220, 15, 154, 1, 224, 173, 70, 0, 209, 164, 203, 17, 72, 106, + 115, 43, 9, 211, 131, 240, 135, 72, 13, 231, 57, 225, 140, 119, 243, 228, 143, 101, 165, 118, + 208, 53, 110, 243, 23, 224, 5, 145, 162, 56, 31, 99, 193, 240, 139, 193, 96, 157, 107, 148, 94, + 172, 47, 221, 99, 159, 127, 24, 168, 183, 17, 85, 153, 167, 78, 151, 131, 16, 153, 117, 26, 8, + 159, 252, 91, 242, 213, 89, 102, 4, 121, 226, 144, 64, 195, 15, 239, 65, 247, 81, 83, 187, 167, + 84, 146, 86, 208, 106, 118, 113, 107, 233, 95, 220, 52, 91, 52, 25, 2, 192, 63, 58, 155, 173, + 153, 48, 156, 226, 76, 1, 215, 190, 55, 179, 84, 172, 34, 107, 212, 44, 112, 60, 50, 95, 132, + 225, 91, 123, 209, 16, 46, 15, 33, 86, 122, 169, 43, 19, 236, 32, 170, 145, 232, 51, 44, 100, + 158, 209, 125, 86, 118, 86, 206, 66, 248, 187, 118, 24, 248, 29, 36, 115, 196, 127, 227, 129, + 139, 7, 9, 170, 196, 101, 15, 82, 248, 116, 250, 56, 106, 219, 226, 88, 210, 7, 123, 94, 94, + 10, 109, 183, 241, 38, 210, 182, 44, 167, 196, 111, 235, 98, 243, 142, 248, 105, 248, 0, 4, + 199, 48, 32, 37, 123, 209, 77, 178, 65, 23, 111, 43, 52, 178, 184, 42, 67, 224, 185, 3, 116, + 96, 163, 54, 126, 87, 249, 145, 183, 56, 20, 247, 245, 138, 93, 95, 236, 28, 157, 162, 132, + 218, 24, 199, 177, 61, 222, 197, 159, 144, 252, 62, 17, 239, 118, 240, 86, 18, 65, 199, 75, + 136, 71, 63, 138, 211, 167, 154, 71, 147, 167, 56, 83, 226, 85, 86, 209, 242, 222, 1, 28, 229, + 58, 228, 253, 162, 38, 128, 71, 203, 79, 212, 207, 24, 29, 205, 24, 222, 97, 21, 197, 102, 19, + 22, 191, 151, 223, 63, 152, 115, 95, 131, 243, 15, 148, 247, 154, 151, 112, 247, 230, 183, 160, + 84, 53, 99, 250, 205, 94, 19, 164, 98, 68, 160, 75, 63, 136, 13, 130, 10, 5, 29, 111, 206, 55, + 74, 156, 150, 219, 219, 99, 196, 129, 126, 11, 31, 194, 186, 131, 154, 84, 233, 75, 35, 231, + 65, 1, 222, 6, 185, 27, 77, 218, 167, 50, 251, 81, 175, 31, 168, 123, 10, 163, 208, 4, 103, + 129, 64, 151, 31, 122, 206, 137, 94, 214, 206, 102, 142, 191, 31, 79, 3, 87, 235, 77, 156, 129, + 53, 224, 67, 36, 2, 252, 165, 74, 249, 191, 71, 128, 80, 26, 89, 61, 27, 1, 113, 98, 185, 134, + 114, 102, 36, 198, 193, 43, 94, 167, 135, 146, 10, 113, 186, 239, 23, 52, 180, 207, 186, 213, + 32, 236, 251, 107, 143, 185, 102, 12, 222, 207, 228, 216, 100, 132, 17, 204, 169, 27, 26, 109, + 252, 193, 246, 51, 197, 222, 222, 184, 255, 240, 17, 161, 0, 112, 14, 156, 165, 51, 217, 183, + 143, 151, 164, 8, 42, 77, 170, 172, 131, 44, 150, 85, 91, 58, 131, 54, 177, 102, 75, 141, 9, + 101, 224, 165, 13, 228, 149, 208, 221, 158, 97, 146, 213, 66, 148, 202, 151, 29, 230, 93, 141, + 192, 194, 6, 148, 215, 61, 204, 34, 82, 98, 103, 39, 29, 213, 94, 246, 26, 136, 131, 86, 214, + 241, 252, 64, 118, 92, 195, 234, 232, 12, 100, 178, 171, 232, 108, 139, 12, 155, 139, 174, 185, + 100, 245, 137, 173, 120, 249, 185, 64, 139, 127, 146, 131, 237, 162, 144, 239, 215, 209, 56, + 34, 98, 50, 224, 67, 1, 19, 108, 68, 96, 14, 73, 1, 149, 19, 115, 32, 194, 50, 163, 13, 135, + 106, 27, 131, 65, 126, 79, 235, 51, 130, 173, 91, 62, 62, 212, 206, 179, 200, 212, 71, 225, + 162, 190, 114, 222, 0, 54, 89, 24, 76, 174, 236, 98, 22, 83, 80, 118, 78, 198, 158, 253, 140, + 4, 215, 91, 33, 102, 1, 44, 246, 152, 24, 184, 89, 234, 125, 210, 192, 126, 80, 130, 243, 80, + 170, 176, 47, 179, 92, 138, 209, 111, 252, 161, 126, 254, 132, 134, 193, 89, 230, 39, 197, 87, + 236, 90, 22, 137, 11, 175, 184, 165, 139, 201, 252, 185, 236, 13, 118, 162, 178, 206, 1, 221, + 59, 62, 31, 20, 104, 205, 122, 92, 235, 86, 19, 43, 183, 70, 109, 191, 105, 226, 63, 213, 137, + 50, 168, 192, 91, 144, 20, 52, 180, 46, 236, 103, 59, 108, 47, 31, 24, 169, 25, 196, 47, 39, + 226, 138, 45, 118, 203, 101, 84, 250, 13, 161, 220, 84, 31, 115, 70, 200, 61, 59, 102, 79, 39, + 175, 168, 196, 193, 236, 101, 144, 206, 101, 201, 1, 230, 160, 239, 174, 117, 94, 108, 245, 54, + 46, 48, 176, 227, 197, 134, 251, 189, 17, 96, 229, 138, 110, 255, 72, 159, 34, 92, 128, 116, + 72, 7, 9, 82, 76, 250, 91, 64, 75, 204, 127, 50, 78, 140, 195, 208, 149, 156, 182, 25, 33, 48, + 150, 104, 87, 179, 220, 73, 26, 157, 148, 1, 227, 150, 242, 124, 209, 81, 12, 35, 134, 213, 68, + 158, 95, 237, 80, 22, 251, 168, 150, 62, 80, 72, 91, 255, 200, 164, 51, 47, 203, 37, 156, 247, + 174, 246, 88, 248, 48, 182, 134, 249, 172, 68, 108, 238, 169, 247, 114, 230, 2, 142, 36, 153, + 110, 113, 106, 144, 26, 156, 158, 49, 107, 109, 71, 245, 182, 11, 132, 100, 170, 110, 119, 113, + 96, 94, 238, 117, 17, 212, 198, 128, 114, 133, 135, 11, 112, 218, 11, 54, 218, 213, 173, 216, + 25, 71, 245, 211, 157, 15, 158, 3, 230, 219, 102, 153, 90, 128, 134, 7, 5, 22, 62, 13, 151, + 119, 141, 116, 207, 1, 147, 80, 78, 145, 45, 146, 66, 171, 156, 207, 17, 192, 69, 246, 9, 220, + 98, 19, 118, 22, 194, 138, 53, 231, 30, 29, 165, 31, 39, 203, 150, 136, 253, 86, 131, 171, 108, + 216, 37, 26, 149, 186, 142, 192, 240, 68, 77, 29, 166, 240, 78, 137, 79, 68, 85, 106, 202, 161, + 106, 184, 33, 235, 205, 201, 49, 135, 164, 61, 153, 86, 115, 228, 74, 186, 250, 150, 121, 224, + 191, 146, 54, 161, 122, 101, 55, 190, 13, 50, 25, 114, 61, 24, 155, 216, 31, 130, 41, 104, 125, + 228, 223, 254, 84, 114, 116, 12, 191, 9, 140, 74, 103, 48, 82, 67, 126, 30, 144, 230, 21, 173, + 97, 77, 92, 213, 61, 172, 223, 143, 218, 251, 241, 197, 124, 150, 214, 60, 62, 170, 9, 154, + 167, 105, 88, 24, 53, 133, 125, 178, 19, 74, 217, 234, 123, 228, 230, 174, 161, 184, 22, 222, + 227, 97, 143, 173, 254, 244, 228, 14, 40, 22, 15, 11, 26, 226, 199, 218, 131, 84, 98, 55, 239, + 180, 233, 231, 30, 182, 36, 17, 23, 111, 60, 24, 172, 241, 9, 85, 167, 42, 137, 80, 97, 190, + 123, 85, 101, 31, 122, 121, 95, 188, 55, 21, 53, 17, 133, 49, 99, 242, 22, 45, 4, 117, 76, 244, + 208, 254, 108, 21, 165, 39, 213, 246, 253, 34, 27, 10, 104, 41, 150, 140, 202, 13, 114, 137, + 145, 147, 30, 152, 137, 69, 210, 156, 55, 88, 196, 72, 79, 36, 226, 219, 132, 176, 223, 16, + 238, 254, 218, 182, 240, 53, 165, 201, 46, 5, 164, 19, 247, 205, 243, 211, 224, 157, 140, 26, + 145, 95, 81, 19, 73, 238, 96, 50, 184, 134, 25, 120, 193, 58, 0, 88, 113, 48, 80, 89, 65, 220, + 33, 208, 20, 184, 7, 171, 160, 56, 36, 20, 186, 244, 11, 33, 86, 254, 99, 35, 121, 252, 236, + 14, 16, 254, 225, 63, 39, 72, 255, 0, 98, 44, 151, 13, 36, 137, 40, 218, 160, 82, 255, 162, 20, + 109, 3, 224, 218, 96, 105, 169, 239, 131, 136, 227, 208, 158, 60, 34, 49, 45, 11, 55, 72, 135, + 167, 2, 246, 1, 162, 183, 247, 251, 72, 99, 47, 23, 128, 214, 255, 171, 77, 132, 112, 235, 215, + 5, 12, 44, 33, 81, 222, 161, 218, 56, 244, 228, 221, 124, 35, 232, 12, 215, 104, 95, 189, 97, + 243, 243, 139, 247, 144, 20, 143, 244, 185, 209, 199, 208, 207, 75, 95, 70, 70, 137, 242, 68, + 5, 85, 229, 223, 49, 56, 242, 163, 65, 157, 143, 73, 96, 218, 202, 114, 114, 173, 13, 143, 33, + 84, 81, 55, 109, 68, 76, 67, 151, 136, 222, 176, 195, 236, 169, 224, 107, 241, 137, 123, 233, + 220, 20, 199, 66, 18, 4, 125, 171, 193, 183, 34, 49, 76, 54, 227, 227, 87, 218, 18, 22, 195, 5, + 144, 156, 67, 118, 223, 238, 89, 75, 109, 215, 31, 132, 86, 181, 75, 139, 31, 83, 120, 89, 106, + 181, 200, 201, 229, 58, 84, 152, 195, 135, 169, 126, 2, 21, 24, 243, 89, 91, 173, 211, 230, + 184, 0, 42, 22, 15, 59, 210, 164, 25, 199, 233, 147, 178, 15, 36, 93, 1, 63, 242, 75, 26, 16, + 231, 155, 71, 148, 93, 71, 252, 230, 252, 71, 139, 189, 213, 150, 211, 62, 154, 216, 219, 22, + 243, 200, 245, 255, 48, 82, 142, 186, 63, 139, 61, 253, 22, 40, 146, 8, 173, 79, 20, 222, 169, + 161, 170, 254, 94, 136, 78, 136, 114, 140, 212, 143, 21, 226, 98, 107, 75, 221, 240, 254, 65, + 145, 109, 64, 134, 114, 121, 67, 90, 79, 108, 72, 252, 231, 0, 130, 98, 192, 73, 141, 67, 212, + 236, 7, 189, 240, 36, 205, 77, 252, 181, 203, 162, 38, 175, 148, 6, 177, 179, 102, 178, 173, + 180, 228, 198, 191, 239, 76, 2, 176, 218, 227, 197, 172, 216, 210, 130, 27, 123, 151, 152, 37, + 33, 65, 229, 30, 22, 131, 2, 44, 78, 140, 250, 15, 179, 250, 67, 206, 86, 97, 78, 74, 80, 46, + 217, 0, 100, 194, 225, 178, 145, 120, 211, 203, 121, 228, 187, 146, 45, 207, 166, 99, 254, 243, + 60, 100, 112, 159, 196, 161, 181, 14, 226, 226, 190, 185, 184, 169, 182, 59, 149, 24, 182, 255, + 36, 94, 185, 53, 20, 138, 250, 216, 145, 34, 4, 7, 211, 81, 69, 78, 211, 77, 154, 139, 12, 184, + 50, 193, 19, 205, 167, 103, 53, 63, 147, 210, 158, 14, 57, 74, 71, 64, 90, 185, 222, 157, 105, + 98, 184, 238, 55, 72, 88, 11, 195, 32, 139, 194, 217, 125, 184, 80, 66, 150, 230, 187, 248, + 187, 162, 122, 101, 93, 192, 1, 129, 2, 193, 135, 246, 200, 128, 39, 126, 255, 23, 96, 73, 191, + 72, 219, 122, 67, 49, 145, 120, 119, 134, 156, 29, 117, 159, 224, 248, 18, 24, 187, 121, 104, + 252, 184, 83, 170, 57, 200, 180, 83, 58, 40, 158, 38, 6, 226, 74, 0, 222, 224, 229, 210, 25, + 150, 225, 184, 139, 142, 171, 10, 198, 97, 186, 205, 87, 86, 223, 14, 102, 43, 175, 169, 231, + 113, 121, 219, 185, 26, 237, 143, 148, 165, 155, 93, 167, 55, 82, 189, 22, 151, 121, 135, 218, + 78, 209, 12, 51, 210, 192, 211, 91, 19, 63, 209, 132, 54, 90, 57, 110, 75, 37, 7, 138, 224, 0, + 102, 12, 151, 48, 190, 69, 30, 233, 118, 44, 192, 139, 102, 145, 95, 119, 254, 184, 22, 124, + 106, 128, 88, 192, 77, 86, 104, 210, 98, 79, 223, 104, 231, 199, 89, 164, 206, 37, 61, 16, 112, + 184, 248, 134, 143, 32, 126, 179, 127, 35, 26, 166, 181, 222, 69, 2, 207, 186, 107, 183, 118, + 131, 135, 182, 79, 153, 37, 20, 165, 38, 6, 125, 70, 225, 220, 136, 52, 164, 36, 121, 53, 116, + 236, 214, 218, 196, 79, 107, 46, 71, 77, 3, 41, 226, 84, 154, 64, 202, 107, 231, 44, 181, 186, + 137, 125, 41, 151, 93, 9, 40, 67, 148, 229, 205, 22, 71, 6, 161, 11, 123, 195, 162, 232, 147, + 73, 1, 221, 13, 128, 148, 104, 81, 215, 234, 230, 86, 156, 51, 94, 208, 190, 228, 136, 229, + 201, 147, 5, 75, 38, 18, 240, 95, 224, 203, 86, 99, 207, 185, 194, 46, 105, 0, 34, 214, 2, 43, + 255, 104, 241, 182, 240, 63, 194, 10, 5, 162, 166, 224, 12, 40, 75, 96, 70, 175, 36, 224, 231, + 248, 98, 3, 139, 55, 55, 109, 161, 221, 131, 176, 26, 131, 159, 247, 128, 105, 215, 255, 167, + 243, 167, 78, 198, 238, 17, 83, 66, 168, 6, 230, 155, 187, 204, 217, 241, 196, 41, 73, 28, 85, + 160, 9, 215, 194, 77, 29, 180, 158, 149, 74, 137, 185, 158, 237, 204, 43, 176, 199, 167, 33, + 100, 226, 72, 136, 127, 95, 203, 149, 119, 16, 50, 159, 225, 170, 251, 74, 85, 168, 76, 120, + 36, 5, 17, 230, 228, 161, 213, 129, 63, 150, 149, 97, 134, 167, 84, 152, 234, 65, 160, 223, + 190, 91, 107, 202, 31, 25, 36, 109, 43, 199, 254, 91, 24, 171, 160, 206, 1, 2, 232, 226, 15, + 229, 248, 92, 140, 60, 174, 215, 196, 118, 109, 183, 249, 42, 8, 201, 191, 155, 90, 68, 6, 79, + 131, 131, 58, 190, 207, 158, 77, 68, 197, 165, 84, 222, 202, 193, 155, 11, 59, 103, 106, 28, + 174, 133, 98, 106, 60, 205, 71, 254, 210, 148, 171, 110, 243, 181, 187, 135, 126, 113, 245, + 165, 98, 162, 13, 186, 21, 183, 26, 225, 191, 164, 66, 160, 114, 85, 246, 240, 75, 138, 19, 43, + 151, 25, 163, 237, 135, 67, 187, 5, 237, 26, 151, 77, 103, 115, 74, 186, 7, 239, 5, 183, 240, + 19, 82, 133, 93, 207, 211, 83, 235, 178, 45, 121, 3, 216, 144, 218, 159, 131, 242, 171, 97, + 228, 167, 208, 72, 172, 165, 116, 187, 32, 97, 109, 208, 187, 181, 183, 144, 23, 71, 179, 149, + 190, 71, 18, 80, 157, 226, 177, 214, 148, 70, 209, 93, 12, 151, 223, 249, 172, 75, 176, 192, + 230, 97, 206, 13, 110, 203, 61, 5, 38, 152, 140, 46, 38, 215, 200, 226, 6, 9, 172, 179, 116, + 141, 54, 37, 224, 99, 252, 231, 97, 218, 109, 22, 74, 100, 150, 124, 183, 85, 153, 50, 95, 245, + 65, 167, 253, 0, 15, 155, 111, 217, 227, 120, 154, 61, 231, 225, 146, 234, 176, 116, 126, 65, + 200, 202, 114, 153, 137, 107, 21, 11, 69, 140, 150, 3, 25, 27, 115, 193, 113, 213, 192, 99, + 171, 14, 102, 235, 175, 178, 187, 116, 132, 21, 228, 30, 27, 120, 202, 102, 145, 137, 4, 227, + 1, 223, 191, 102, 223, 128, 71, 56, 97, 224, 31, 54, 161, 29, 105, 9, 143, 209, 104, 117, 175, + 138, 11, 22, 151, 74, 38, 229, 196, 136, 197, 144, 198, 248, 78, 180, 199, 241, 180, 59, 166, + 185, 75, 215, 63, 78, 57, 79, 6, 96, 202, 109, 5, 102, 44, 87, 243, 46, 0, 110, 44, 253, 85, + 225, 76, 29, 233, 211, 197, 2, 150, 86, 170, 111, 72, 143, 129, 182, 8, 163, 27, 90, 183, 152, + 9, 151, 199, 23, 251, 23, 153, 31, 135, 134, 121, 169, 170, 176, 217, 49, 219, 172, 141, 18, + 172, 42, 228, 144, 224, 137, 155, 218, 93, 61, 142, 149, 162, 182, 83, 22, 56, 246, 206, 201, + 225, 100, 188, 158, 212, 210, 179, 50, 174, 177, 136, 131, 37, 217, 173, 93, 220, 61, 201, 32, + 88, 84, 165, 225, 240, 67, 237, 113, 247, 228, 62, 169, 140, 101, 226, 10, 12, 147, 174, 237, + 234, 124, 71, 161, 181, 134, 158, 71, 61, 211, 253, 102, 183, 85, 215, 210, 214, 140, 187, 144, + 46, 62, 168, 7, 247, 55, 18, 75, 143, 238, 40, 102, 92, 215, 227, 241, 255, 231, 184, 238, 35, + 211, 195, 135, 225, 237, 105, 30, 14, 124, 55, 203, 252, 140, 34, 97, 248, 49, 255, 210, 96, + 50, 69, 149, 77, 242, 11, 44, 189, 214, 195, 141, 135, 208, 162, 241, 238, 108, 68, 183, 117, + 253, 105, 81, 252, 140, 97, 37, 185, 9, 240, 244, 68, 79, 24, 170, 171, 98, 110, 40, 50, 105, + 205, 188, 134, 11, 181, 116, 186, 33, 174, 251, 144, 19, 87, 145, 160, 41, 246, 119, 83, 32, + 192, 69, 7, 190, 181, 243, 188, 91, 128, 27, 193, 168, 167, 210, 3, 129, 2, 231, 164, 214, 79, + 234, 71, 66, 213, 25, 159, 57, 176, 103, 58, 173, 163, 251, 229, 106, 232, 0, 111, 159, 147, + 184, 202, 76, 238, 5, 79, 72, 132, 61, 151, 152, 118, 217, 179, 147, 119, 160, 171, 189, 181, + 126, 170, 107, 246, 84, 105, 77, 53, 23, 186, 12, 28, 204, 89, 97, 180, 145, 239, 136, 226, + 234, 86, 220, 64, 61, 177, 195, 87, 96, 169, 146, 109, 195, 121, 35, 94, 81, 91, 2, 70, 15, 36, + 110, 197, 223, 27, 135, 244, 40, 15, 155, 145, 212, 51, 162, 165, 186, 64, 136, 211, 195, 89, + 174, 138, 191, 12, 16, 142, 182, 33, 173, 245, 117, 83, 45, 8, 129, 92, 153, 99, 4, 87, 124, + 173, 60, 59, 145, 215, 38, 91, 7, 244, 78, 199, 175, 212, 5, 85, 184, 170, 182, 12, 139, 197, + 144, 173, 20, 176, 49, 163, 117, 35, 234, 60, 63, 94, 249, 230, 65, 98, 10, 165, 7, 228, 172, + 63, 191, 40, 0, 51, 248, 100, 208, 242, 122, 193, 178, 53, 128, 135, 152, 180, 10, 97, 154, + 151, 168, 63, 140, 30, 146, 65, 56, 222, 0, 172, 85, 250, 158, 32, 179, 228, 215, 240, 106, 37, + 84, 201, 216, 108, 200, 111, 132, 105, 85, 0, 118, 24, 155, 139, 4, 111, 65, 19, 194, 60, 117, + 89, 99, 105, 191, 124, 58, 93, 81, 159, 58, 88, 71, 98, 200, 103, 13, 141, 153, 240, 42, 40, + 81, 153, 75, 120, 153, 135, 225, 229, 240, 150, 212, 179, 242, 101, 39, 245, 108, 56, 103, 35, + 177, 97, 24, 57, 94, 55, 175, 19, 127, 113, 6, 154, 194, 186, 215, 225, 137, 76, 96, 52, 51, + 149, 93, 11, 5, 102, 60, 188, 50, 210, 130, 148, 177, 247, 131, 138, 12, 72, 236, 2, 199, 4, + 138, 242, 78, 12, 138, 127, 210, 134, 93, 209, 139, 89, 172, 86, 72, 137, 116, 132, 145, 6, + 212, 199, 59, 81, 108, 87, 204, 26, 214, 208, 21, 99, 38, 113, 229, 238, 251, 32, 83, 19, 129, + 226, 67, 133, 60, 78, 33, 248, 215, 135, 133, 53, 109, 63, 175, 95, 159, 163, 0, 160, 74, 213, + 240, 141, 200, 134, 252, 47, 246, 231, 180, 196, 102, 239, 242, 176, 75, 14, 76, 165, 255, 209, + 186, 131, 3, 163, 91, 187, 36, 5, 247, 182, 167, 19, 57, 98, 218, 65, 222, 96, 233, 218, 173, + 91, 213, 53, 183, 251, 2, 134, 147, 107, 70, 97, 24, 223, 40, 104, 74, 154, 158, 206, 198, 136, + 144, 54, 201, 107, 245, 116, 15, 112, 129, 237, 2, 142, 33, 112, 188, 209, 62, 194, 94, 101, + 73, 139, 53, 182, 110, 45, 63, 106, 214, 146, 130, 104, 14, 66, 112, 122, 99, 12, 66, 86, 56, + 89, 125, 196, 177, 22, 48, 167, 211, 29, 243, 194, 165, 113, 42, 24, 90, 202, 100, 229, 123, + 10, 94, 244, 11, 234, 156, 149, 134, 83, 110, 53, 174, 45, 219, 218, 154, 78, 80, 232, 215, + 172, 197, 126, 211, 219, 176, 196, 141, 241, 103, 59, 42, 250, 27, 80, 203, 117, 198, 171, 178, + 98, 91, 162, 254, 38, 183, 85, 59, 5, 108, 143, 47, 247, 133, 79, 21, 242, 14, 0, 131, 45, 122, + 218, 71, 214, 44, 242, 26, 153, 48, 41, 143, 43, 75, 70, 78, 108, 35, 13, 47, 164, 113, 105, + 166, 108, 209, 90, 139, 163, 193, 213, 74, 129, 42, 40, 189, 95, 58, 139, 172, 157, 214, 190, + 225, 149, 241, 215, 186, 126, 92, 230, 31, 148, 136, 221, 114, 10, 61, 193, 123, 156, 140, 111, + 182, 196, 173, 62, 218, 7, 28, 112, 165, 173, 104, 102, 198, 205, 80, 81, 85, 11, 129, 4, 171, + 182, 31, 64, 92, 127, 8, 92, 72, 140, 97, 1, 51, 73, 200, 11, 214, 248, 50, 218, 171, 142, 126, + 34, 41, 227, 209, 29, 73, 4, 181, 115, 251, 6, 182, 158, 73, 154, 118, 252, 196, 106, 250, 236, + 187, 237, 79, 70, 132, 193, 210, 180, 10, 229, 45, 141, 21, 91, 86, 119, 201, 18, 38, 147, 222, + 118, 94, 238, 69, 253, 204, 88, 143, 216, 68, 108, 72, 205, 95, 164, 244, 51, 35, 142, 121, + 195, 228, 168, 179, 208, 174, 175, 0, 193, 124, 185, 163, 178, 65, 117, 9, 87, 74, 25, 161, + 241, 124, 145, 51, 182, 11, 253, 210, 60, 102, 177, 232, 112, 14, 19, 122, 14, 111, 225, 209, + 152, 218, 58, 252, 159, 249, 12, 178, 151, 110, 92, 162, 157, 60, 64, 113, 47, 30, 85, 21, 212, + 209, 188, 249, 236, 83, 9, 216, 90, 194, 216, 183, 195, 86, 103, 198, 117, 172, 146, 154, 189, + 44, 59, 77, 117, 207, 242, 105, 191, 186, 139, 110, 15, 184, 69, 124, 33, 21, 234, 106, 213, + 130, 177, 22, 135, 192, 250, 90, 225, 248, 15, 221, 206, 184, 49, 106, 193, 10, 42, 199, 208, + 106, 204, 54, 40, 28, 136, 37, 83, 182, 75, 178, 64, 112, 234, 161, 122, 209, 141, 212, 32, + 173, 119, 226, 234, 19, 89, 210, 216, 235, 51, 177, 148, 194, 150, 130, 84, 117, 23, 154, 91, + 94, 9, 0, 159, 46, 185, 166, 22, 86, 211, 237, 233, 166, 31, 3, 200, 158, 182, 18, 234, 32, + 160, 74, 139, 184, 246, 0, 14, 220, 47, 244, 165, 51, 123, 236, 195, 183, 150, 38, 41, 123, + 181, 34, 103, 142, 168, 51, 218, 202, 14, 177, 223, 153, 144, 64, 56, 107, 77, 133, 89, 245, + 203, 228, 137, 218, 35, 0, 63, 32, 66, 66, 53, 117, 156, 179, 92, 248, 74, 82, 169, 82, 34, 30, + 172, 212, 33, 188, 116, 241, 135, 209, 139, 61, 181, 100, 45, 145, 112, 34, 180, 231, 199, 85, + 133, 221, 86, 17, 46, 195, 159, 190, 124, 63, 1, 155, 33, 243, 37, 236, 31, 77, 197, 187, 47, + 230, 66, 77, 142, 113, 254, 252, 149, 228, 190, 203, 235, 117, 225, 226, 84, 85, 61, 159, 5, + 161, 182, 214, 115, 222, 118, 138, 134, 199, 201, 187, 123, 15, 110, 72, 159, 10, 212, 219, + 189, 162, 76, 71, 92, 184, 199, 30, 64, 127, 120, 161, 105, 26, 25, 38, 209, 4, 25, 90, 47, 46, + 123, 179, 233, 152, 118, 157, 30, 69, 71, 207, 9, 145, 17, 29, 249, 243, 84, 216, 215, 185, + 144, 29, 57, 135, 237, 230, 238, 46, 83, 80, 117, 151, 147, 244, 154, 145, 232, 235, 45, 117, + 220, 183, 174, 37, 113, 105, 48, 93, 162, 35, 136, 237, 90, 62, 31, 12, 166, 146, 139, 195, 19, + 128, 24, 43, 114, 92, 237, 34, 205, 145, 208, 252, 35, 162, 28, 4, 190, 18, 106, 123, 224, 224, + 10, 135, 7, 226, 178, 93, 71, 129, 228, 44, 234, 9, 168, 57, 106, 121, 39, 138, 139, 203, 189, + 13, 240, 138, 50, 250, 117, 76, 198, 0, 197, 110, 17, 242, 232, 61, 46, 28, 97, 177, 207, 155, + 133, 84, 97, 231, 210, 16, 167, 198, 49, 220, 59, 101, 129, 58, 148, 126, 131, 155, 141, 9, + 190, 220, 17, 125, 87, 111, 201, 108, 20, 65, 181, 115, 206, 207, 2, 178, 71, 159, 166, 73, + 184, 199, 144, 92, 57, 166, 100, 126, 88, 116, 15, 122, 123, 231, 188, 223, 31, 69, 67, 56, + 242, 192, 175, 179, 210, 59, 42, 111, 73, 215, 170, 94, 26, 125, 72, 159, 200, 37, 144, 84, + 239, 27, 64, 99, 45, 112, 117, 243, 103, 75, 254, 234, 70, 199, 192, 231, 98, 116, 26, 18, 40, + 92, 220, 112, 13, 203, 87, 255, 254, 146, 105, 103, 7, 142, 139, 66, 86, 75, 134, 79, 207, 80, + 27, 49, 16, 68, 38, 125, 136, 108, 63, 144, 4, 55, 109, 150, 50, 179, 144, 29, 54, 158, 52, + 238, 162, 165, 139, 251, 39, 159, 33, 125, 248, 146, 255, 68, 159, 173, 119, 89, 15, 224, 54, + 101, 138, 148, 109, 52, 212, 215, 120, 28, 89, 169, 121, 201, 154, 6, 210, 23, 76, 209, 37, 28, + 159, 138, 150, 68, 245, 124, 194, 84, 173, 210, 20, 100, 81, 115, 51, 182, 223, 102, 145, 144, + 44, 146, 97, 246, 249, 109, 101, 69, 77, 64, 96, 195, 249, 5, 96, 56, 34, 219, 80, 190, 51, + 191, 194, 18, 102, 216, 253, 6, 219, 220, 4, 189, 44, 97, 195, 135, 253, 110, 71, 154, 209, + 146, 51, 149, 90, 145, 218, 28, 118, 150, 51, 1, 27, 190, 176, 253, 171, 42, 101, 143, 105, + 136, 227, 154, 231, 229, 164, 44, 127, 246, 106, 203, 46, 199, 131, 123, 36, 8, 11, 249, 191, + 249, 223, 159, 131, 49, 185, 197, 235, 192, 255, 190, 196, 110, 255, 246, 88, 91, 145, 37, 148, + 236, 88, 55, 229, 242, 103, 140, 74, 103, 245, 19, 31, 52, 158, 253, 205, 221, 77, 128, 43, 37, + 197, 249, 158, 155, 100, 127, 201, 188, 32, 20, 130, 101, 116, 215, 207, 92, 231, 233, 172, + 181, 85, 233, 112, 218, 23, 102, 205, 193, 227, 254, 22, 3, 30, 3, 135, 107, 252, 140, 147, + 135, 239, 76, 13, 119, 31, 28, 128, 234, 78, 173, 63, 19, 253, 184, 86, 118, 140, 3, 148, 10, + 140, 57, 12, 224, 6, 255, 132, 242, 67, 79, 135, 0, 54, 189, 48, 33, 96, 94, 241, 255, 84, 202, + 60, 28, 215, 167, 71, 71, 70, 249, 234, 49, 216, 142, 134, 42, 205, 233, 91, 196, 211, 195, 82, + 170, 202, 115, 44, 237, 161, 238, 44, 202, 184, 251, 0, 118, 94, 181, 91, 161, 48, 120, 134, + 123, 0, 96, 21, 240, 50, 212, 32, 157, 111, 90, 225, 177, 88, 161, 118, 97, 53, 143, 41, 236, + 46, 18, 108, 173, 127, 65, 75, 184, 65, 249, 169, 161, 105, 133, 23, 226, 107, 255, 191, 96, + 176, 7, 169, 71, 17, 128, 70, 26, 133, 35, 190, 113, 185, 212, 23, 38, 63, 190, 231, 27, 49, + 170, 65, 211, 72, 137, 69, 240, 132, 178, 49, 186, 203, 122, 202, 255, 83, 119, 92, 225, 129, + 49, 106, 123, 113, 76, 227, 9, 124, 108, 133, 71, 87, 39, 122, 173, 210, 136, 135, 118, 200, + 48, 253, 82, 148, 166, 3, 177, 95, 79, 176, 224, 149, 100, 143, 203, 171, 180, 196, 58, 24, 70, + 21, 59, 5, 143, 213, 221, 147, 247, 160, 130, 106, 30, 231, 179, 167, 180, 45, 179, 176, 13, + 163, 140, 184, 143, 86, 235, 111, 176, 236, 149, 181, 0, 102, 76, 189, 66, 43, 163, 24, 221, + 218, 105, 121, 0, 91, 72, 210, 35, 222, 120, 195, 181, 102, 46, 117, 189, 212, 130, 38, 138, + 246, 75, 222, 87, 212, 169, 4, 248, 16, 248, 74, 163, 68, 46, 190, 183, 234, 249, 89, 153, 137, + 154, 221, 112, 87, 15, 185, 188, 166, 93, 147, 210, 226, 98, 133, 46, 247, 105, 199, 115, 34, + 17, 247, 89, 14, 16, 85, 85, 4, 34, 190, 82, 61, 26, 134, 40, 147, 163, 2, 117, 202, 17, 64, + 42, 155, 13, 85, 87, 234, 132, 237, 240, 116, 146, 165, 219, 142, 214, 121, 197, 168, 253, 125, + 211, 80, 214, 42, 157, 204, 96, 51, 162, 54, 162, 20, 203, 174, 166, 209, 70, 12, 224, 23, 127, + 30, 86, 207, 150, 124, 48, 140, 77, 250, 228, 196, 36, 159, 251, 179, 7, 174, 238, 51, 99, 229, + 47, 244, 179, 70, 215, 171, 254, 242, 99, 159, 61, 82, 230, 131, 142, 50, 83, 177, 173, 75, + 120, 4, 1, 134, 108, 51, 9, 6, 228, 170, 65, 239, 178, 232, 95, 23, 17, 102, 134, 248, 247, 56, + 210, 84, 23, 204, 236, 154, 35, 241, 21, 47, 116, 241, 46, 84, 38, 164, 53, 193, 241, 149, 114, + 66, 216, 170, 216, 229, 154, 7, 184, 95, 1, 187, 175, 35, 195, 20, 145, 51, 66, 62, 108, 237, + 136, 79, 146, 159, 107, 60, 163, 206, 77, 8, 86, 73, 214, 187, 72, 251, 82, 153, 197, 117, 144, + 59, 208, 89, 189, 31, 189, 229, 80, 58, 69, 225, 183, 81, 38, 162, 3, 184, 98, 159, 144, 8, 79, + 64, 235, 89, 156, 247, 150, 35, 22, 52, 251, 244, 75, 242, 149, 169, 106, 229, 56, 8, 151, 165, + 12, 177, 105, 65, 245, 51, 110, 247, 7, 245, 24, 207, 83, 39, 140, 175, 118, 109, 252, 188, + 194, 129, 100, 212, 70, 133, 199, 190, 70, 104, 189, 14, 239, 194, 116, 62, 6, 235, 100, 131, + 68, 71, 30, 111, 0, 55, 111, 117, 188, 164, 244, 228, 255, 149, 154, 54, 164, 143, 75, 60, 158, + 29, 133, 165, 105, 76, 250, 56, 196, 63, 126, 211, 250, 204, 4, 200, 235, 76, 227, 157, 192, + 48, 235, 31, 98, 45, 239, 73, 126, 33, 126, 94, 23, 133, 206, 236, 170, 227, 91, 218, 207, 211, + 23, 53, 136, 63, 217, 160, 117, 192, 209, 243, 60, 218, 167, 37, 226, 95, 169, 25, 91, 19, 26, + 109, 142, 42, 91, 88, 221, 186, 242, 87, 91, 31, 7, 207, 164, 118, 109, 125, 199, 78, 64, 97, + 20, 151, 234, 57, 113, 182, 236, 28, 75, 52, 190, 198, 255, 100, 178, 181, 183, 18, 51, 146, + 165, 1, 169, 233, 175, 136, 235, 47, 32, 140, 206, 32, 192, 187, 224, 134, 240, 47, 251, 254, + 217, 65, 107, 199, 159, 242, 170, 3, 63, 202, 55, 250, 73, 193, 160, 152, 148, 221, 87, 112, + 228, 22, 188, 80, 127, 226, 150, 161, 166, 49, 56, 78, 134, 234, 40, 15, 153, 74, 26, 175, 18, + 205, 224, 110, 177, 168, 24, 9, 6, 135, 120, 112, 20, 57, 218, 220, 25, 55, 175, 232, 67, 11, + 124, 238, 218, 138, 18, 93, 1, 77, 192, 240, 119, 17, 70, 144, 3, 94, 117, 173, 229, 172, 44, + 81, 100, 114, 114, 204, 154, 135, 0, 97, 234, 2, 53, 87, 184, 71, 169, 173, 70, 121, 187, 85, + 182, 5, 54, 169, 162, 205, 93, 176, 227, 156, 95, 215, 41, 96, 208, 122, 11, 18, 158, 147, 98, + 189, 189, 17, 163, 143, 102, 254, 157, 155, 76, 167, 193, 102, 62, 95, 217, 66, 30, 159, 27, + 103, 75, 13, 127, 218, 134, 113, 233, 234, 136, 200, 142, 197, 57, 35, 164, 204, 4, 205, 134, + 6, 236, 178, 125, 62, 23, 170, 3, 163, 109, 224, 129, 252, 204, 235, 89, 182, 99, 186, 87, 178, + 183, 39, 83, 17, 92, 227, 254, 190, 157, 226, 149, 2, 223, 17, 204, 196, 55, 77, 22, 45, 147, + 193, 191, 167, 173, 79, 99, 193, 156, 182, 22, 32, 210, 215, 112, 84, 98, 88, 244, 249, 165, + 92, 128, 46, 62, 7, 82, 10, 36, 167, 147, 169, 6, 250, 165, 207, 155, 162, 245, 212, 246, 141, + 73, 149, 244, 186, 56, 176, 254, 138, 97, 7, 9, 39, 176, 56, 50, 117, 151, 77, 204, 179, 101, + 30, 253, 113, 139, 34, 41, 241, 185, 64, 97, 126, 18, 182, 157, 148, 116, 159, 56, 46, 220, + 138, 193, 208, 196, 35, 216, 136, 73, 219, 196, 178, 221, 111, 157, 157, 75, 9, 59, 33, 125, + 249, 23, 133, 192, 151, 169, 12, 110, 134, 44, 51, 86, 204, 131, 19, 35, 9, 139, 22, 115, 75, + 146, 7, 109, 47, 196, 22, 108, 213, 56, 54, 151, 52, 40, 234, 224, 30, 130, 101, 231, 9, 160, + 174, 44, 67, 243, 105, 81, 176, 251, 95, 218, 98, 85, 62, 96, 1, 209, 99, 64, 49, 242, 191, + 131, 186, 167, 103, 155, 241, 226, 142, 203, 73, 170, 119, 62, 42, 228, 89, 120, 182, 108, 26, + 100, 173, 239, 61, 185, 165, 142, 121, 98, 113, 237, 126, 172, 74, 221, 63, 227, 109, 141, 250, + 189, 74, 245, 220, 10, 97, 73, 152, 153, 30, 82, 99, 230, 127, 198, 161, 86, 170, 8, 146, 101, + 233, 159, 193, 39, 224, 33, 143, 160, 36, 30, 13, 194, 204, 89, 38, 88, 169, 251, 40, 104, 112, + 104, 46, 116, 20, 49, 128, 57, 45, 94, 136, 110, 135, 139, 7, 98, 215, 26, 187, 169, 8, 25, 46, + 56, 23, 110, 228, 179, 201, 226, 75, 103, 214, 179, 225, 81, 159, 235, 172, 236, 124, 230, 139, + 181, 224, 25, 60, 62, 249, 128, 124, 95, 177, 75, 54, 57, 98, 175, 208, 154, 233, 217, 1, 237, + 121, 216, 53, 12, 21, 90, 171, 109, 246, 249, 102, 121, 75, 123, 188, 189, 165, 52, 174, 212, + 195, 37, 19, 81, 247, 221, 254, 3, 125, 232, 212, 236, 250, 4, 92, 233, 48, 94, 216, 243, 122, + 54, 31, 166, 164, 229, 12, 231, 211, 233, 39, 6, 225, 59, 167, 200, 90, 245, 98, 216, 176, 129, + 208, 120, 142, 101, 115, 153, 184, 19, 172, 169, 222, 177, 208, 63, 247, 217, 65, 124, 67, 231, + 159, 107, 88, 56, 249, 66, 21, 207, 154, 124, 192, 165, 91, 73, 219, 177, 97, 25, 30, 69, 57, + 249, 37, 201, 187, 175, 22, 120, 35, 109, 2, 149, 147, 244, 121, 17, 59, 137, 24, 68, 110, 84, + 236, 72, 242, 72, 21, 98, 152, 18, 59, 122, 40, 36, 122, 200, 101, 196, 20, 49, 165, 253, 240, + 13, 252, 196, 55, 114, 62, 33, 59, 2, 132, 91, 46, 212, 253, 149, 93, 120, 199, 7, 148, 12, + 163, 238, 39, 98, 235, 95, 95, 157, 167, 44, 180, 201, 57, 147, 121, 196, 20, 29, 0, 134, 92, + 85, 62, 249, 192, 133, 38, 85, 59, 193, 242, 195, 183, 107, 189, 251, 197, 44, 28, 92, 186, + 255, 222, 217, 85, 232, 222, 33, 193, 161, 44, 2, 107, 238, 30, 123, 245, 139, 106, 162, 76, + 103, 27, 160, 27, 106, 31, 15, 173, 63, 227, 27, 206, 195, 199, 101, 132, 127, 247, 159, 79, + 219, 3, 225, 186, 172, 81, 138, 87, 65, 187, 126, 7, 116, 168, 43, 74, 175, 164, 131, 168, 246, + 114, 77, 85, 157, 133, 161, 127, 19, 238, 83, 255, 229, 43, 92, 205, 220, 17, 33, 194, 115, 51, + 104, 251, 122, 175, 226, 18, 250, 144, 24, 211, 121, 41, 91, 124, 23, 222, 184, 74, 165, 150, + 190, 230, 89, 93, 86, 160, 233, 232, 106, 1, 153, 237, 113, 126, 86, 91, 173, 85, 68, 19, 240, + 119, 151, 242, 99, 29, 250, 142, 32, 34, 124, 168, 119, 171, 167, 45, 237, 63, 126, 76, 110, + 135, 248, 222, 139, 230, 155, 13, 33, 227, 208, 172, 77, 58, 40, 94, 4, 146, 183, 152, 128, + 124, 127, 67, 153, 67, 45, 204, 157, 23, 41, 138, 80, 138, 37, 88, 231, 152, 206, 52, 102, 247, + 215, 32, 78, 16, 44, 70, 129, 16, 172, 181, 167, 167, 133, 172, 75, 41, 181, 143, 207, 86, 92, + 166, 218, 130, 34, 208, 108, 231, 74, 230, 197, 20, 166, 143, 119, 96, 52, 165, 133, 110, 226, + 206, 89, 182, 238, 204, 129, 226, 162, 212, 252, 36, 87, 159, 37, 0, 191, 183, 135, 124, 1, 62, + 110, 85, 153, 97, 216, 46, 241, 226, 97, 215, 93, 66, 232, 172, 133, 141, 229, 83, 228, 84, + 101, 78, 192, 129, 136, 122, 176, 220, 188, 198, 192, 226, 98, 192, 84, 197, 169, 247, 55, 74, + 13, 178, 182, 47, 219, 60, 178, 17, 85, 137, 0, 222, 119, 37, 212, 48, 254, 253, 39, 3, 251, + 136, 232, 230, 182, 242, 37, 236, 180, 219, 193, 102, 208, 93, 146, 137, 7, 11, 119, 65, 91, + 36, 34, 229, 68, 24, 12, 23, 240, 182, 64, 23, 66, 201, 119, 128, 30, 98, 28, 151, 224, 177, + 177, 150, 17, 150, 73, 33, 220, 147, 17, 131, 171, 224, 18, 72, 240, 34, 140, 149, 143, 231, + 155, 5, 133, 210, 220, 170, 50, 46, 153, 97, 20, 235, 186, 93, 198, 53, 74, 112, 234, 45, 127, + 103, 91, 76, 236, 202, 88, 151, 3, 193, 85, 250, 118, 12, 193, 216, 117, 106, 20, 47, 106, 96, + 189, 215, 153, 134, 187, 240, 44, 213, 206, 69, 123, 253, 242, 103, 102, 220, 35, 147, 24, 16, + 146, 186, 25, 193, 14, 183, 248, 227, 59, 223, 11, 29, 5, 252, 181, 231, 131, 97, 223, 199, + 163, 184, 168, 249, 32, 124, 86, 144, 105, 123, 217, 235, 87, 178, 90, 235, 60, 23, 187, 104, + 178, 210, 64, 144, 246, 152, 234, 99, 241, 144, 198, 12, 185, 100, 174, 47, 15, 48, 59, 69, + 154, 216, 254, 254, 241, 3, 189, 238, 73, 247, 245, 130, 115, 12, 25, 140, 57, 216, 196, 169, + 60, 89, 140, 128, 230, 186, 123, 238, 150, 73, 79, 218, 106, 229, 244, 225, 193, 76, 130, 173, + 67, 207, 237, 77, 156, 15, 122, 212, 4, 6, 48, 222, 231, 174, 199, 140, 190, 155, 96, 1, 69, + 101, 240, 60, 115, 155, 176, 195, 113, 173, 48, 54, 244, 65, 155, 52, 78, 25, 171, 23, 220, + 119, 241, 88, 180, 65, 92, 56, 247, 236, 47, 194, 139, 153, 147, 136, 79, 217, 203, 182, 226, + 87, 213, 208, 255, 253, 30, 54, 243, 136, 126, 255, 176, 40, 255, 154, 180, 145, 255, 19, 47, + 5, 39, 233, 206, 131, 86, 254, 180, 28, 2, 44, 165, 106, 114, 19, 181, 120, 27, 247, 145, 243, + 247, 26, 53, 14, 80, 76, 141, 161, 58, 231, 61, 6, 5, 48, 54, 77, 8, 204, 3, 177, 198, 116, + 120, 12, 252, 15, 175, 22, 143, 8, 174, 61, 72, 82, 202, 10, 117, 192, 6, 73, 15, 11, 161, 199, + 68, 138, 168, 10, 124, 131, 32, 197, 127, 153, 167, 176, 226, 211, 40, 5, 118, 224, 197, 107, + 194, 226, 248, 153, 166, 202, 183, 55, 204, 91, 51, 155, 114, 234, 95, 115, 103, 128, 5, 84, + 222, 7, 29, 114, 90, 228, 59, 26, 137, 251, 170, 213, 43, 23, 129, 219, 100, 220, 66, 251, 186, + 109, 189, 117, 219, 88, 224, 48, 240, 209, 59, 233, 78, 83, 66, 86, 9, 197, 221, 9, 250, 42, + 174, 11, 206, 144, 45, 108, 60, 115, 219, 7, 169, 176, 106, 174, 247, 199, 176, 152, 80, 215, + 133, 213, 169, 131, 248, 13, 110, 224, 134, 160, 102, 4, 242, 207, 173, 199, 255, 118, 4, 189, + 3, 118, 249, 189, 16, 138, 242, 43, 210, 57, 166, 76, 188, 7, 111, 93, 207, 7, 66, 72, 228, + 229, 101, 32, 143, 99, 119, 148, 159, 0, 192, 146, 232, 177, 243, 202, 165, 159, 193, 186, 243, + 65, 89, 105, 49, 154, 79, 19, 52, 154, 251, 64, 252, 148, 122, 189, 167, 59, 211, 249, 93, 129, + 245, 167, 67, 159, 91, 246, 146, 192, 60, 90, 136, 134, 184, 151, 147, 185, 89, 129, 197, 139, + 100, 246, 98, 97, 175, 152, 251, 105, 52, 98, 104, 172, 71, 238, 15, 228, 112, 56, 20, 228, 45, + 203, 68, 182, 115, 58, 209, 139, 20, 45, 219, 133, 62, 44, 81, 17, 94, 177, 63, 147, 22, 72, + 157, 182, 193, 208, 78, 194, 170, 164, 215, 48, 184, 180, 209, 95, 71, 182, 253, 150, 31, 17, + 240, 19, 81, 30, 190, 248, 83, 54, 31, 73, 238, 188, 90, 111, 254, 185, 138, 93, 81, 25, 10, + 136, 80, 208, 234, 63, 189, 26, 9, 158, 147, 182, 163, 68, 175, 91, 185, 139, 122, 223, 125, + 61, 75, 56, 46, 171, 245, 133, 95, 85, 147, 51, 87, 255, 191, 75, 28, 201, 63, 128, 152, 60, + 193, 5, 233, 113, 1, 152, 196, 236, 245, 24, 248, 148, 142, 231, 61, 0, 185, 136, 70, 0, 231, + 207, 220, 246, 16, 59, 124, 136, 244, 92, 149, 188, 140, 204, 100, 142, 154, 156, 42, 31, 6, + 189, 149, 191, 213, 36, 162, 190, 116, 122, 71, 22, 8, 247, 104, 133, 116, 241, 29, 159, 19, + 122, 31, 158, 230, 243, 169, 224, 103, 248, 68, 7, 119, 86, 87, 131, 61, 165, 101, 75, 76, 75, + 227, 149, 72, 75, 128, 209, 174, 44, 226, 152, 189, 244, 192, 184, 165, 186, 228, 233, 141, 44, + 94, 90, 245, 12, 8, 61, 134, 104, 217, 227, 136, 127, 28, 177, 111, 3, 43, 152, 213, 215, 21, + 79, 115, 188, 14, 77, 187, 178, 110, 215, 22, 73, 32, 133, 148, 95, 40, 226, 226, 77, 120, 167, + 24, 46, 223, 52, 33, 119, 244, 112, 163, 38, 190, 142, 188, 200, 53, 189, 152, 24, 83, 56, 159, + 125, 224, 86, 219, 168, 233, 247, 218, 67, 61, 243, 62, 140, 30, 106, 50, 206, 103, 22, 49, + 203, 247, 102, 45, 238, 132, 8, 176, 217, 239, 109, 109, 243, 90, 91, 217, 196, 248, 163, 124, + 77, 14, 253, 19, 165, 203, 63, 111, 204, 201, 241, 102, 53, 46, 55, 48, 138, 12, 140, 165, 227, + 78, 34, 47, 244, 139, 26, 11, 207, 231, 53, 165, 63, 152, 175, 128, 255, 121, 191, 44, 26, 170, + 165, 129, 86, 196, 184, 48, 20, 85, 13, 187, 94, 18, 180, 222, 10, 130, 89, 161, 131, 56, 49, + 253, 92, 168, 227, 70, 251, 248, 169, 149, 245, 33, 176, 164, 80, 13, 247, 101, 157, 205, 128, + 32, 238, 192, 238, 101, 55, 172, 205, 138, 200, 83, 206, 200, 132, 166, 140, 70, 238, 217, 197, + 214, 193, 110, 171, 18, 123, 236, 143, 162, 171, 250, 207, 244, 63, 134, 17, 211, 140, 104, + 155, 131, 226, 177, 166, 114, 11, 128, 234, 171, 17, 78, 53, 242, 117, 42, 134, 198, 10, 237, + 136, 212, 116, 20, 27, 195, 41, 71, 127, 76, 192, 130, 135, 88, 26, 179, 210, 64, 77, 214, 107, + 244, 244, 15, 187, 135, 89, 16, 220, 12, 58, 215, 170, 102, 46, 243, 244, 85, 165, 211, 83, + 113, 96, 50, 186, 27, 126, 192, 219, 194, 22, 208, 173, 47, 94, 228, 185, 252, 112, 147, 3, 73, + 121, 119, 255, 211, 245, 223, 153, 16, 116, 11, 63, 164, 186, 74, 87, 85, 201, 137, 122, 198, + 235, 61, 127, 193, 175, 183, 172, 251, 80, 153, 81, 8, 237, 4, 221, 25, 234, 193, 100, 198, 14, + 48, 222, 195, 173, 197, 99, 116, 101, 121, 197, 209, 81, 123, 142, 26, 176, 11, 135, 22, 142, + 14, 32, 111, 226, 76, 191, 25, 143, 149, 102, 113, 165, 139, 31, 81, 195, 214, 114, 64, 233, + 144, 246, 142, 223, 125, 169, 198, 42, 220, 251, 63, 136, 223, 198, 193, 231, 171, 210, 44, 66, + 208, 121, 220, 135, 233, 8, 90, 128, 176, 14, 12, 137, 150, 210, 156, 189, 248, 38, 123, 108, + 176, 202, 151, 247, 57, 236, 8, 136, 7, 202, 233, 172, 43, 144, 210, 6, 237, 120, 18, 40, 224, + 61, 134, 241, 86, 206, 213, 232, 242, 218, 175, 166, 155, 24, 63, 149, 245, 232, 201, 49, 92, + 90, 73, 196, 177, 11, 123, 113, 46, 105, 164, 239, 69, 132, 185, 35, 3, 20, 180, 10, 4, 51, 10, + 105, 180, 152, 76, 232, 165, 182, 170, 229, 230, 240, 22, 50, 62, 17, 20, 97, 30, 224, 66, 227, + 82, 119, 105, 124, 247, 181, 170, 192, 203, 146, 168, 20, 161, 35, 231, 205, 34, 123, 134, 21, + 76, 212, 174, 53, 108, 162, 144, 141, 112, 107, 124, 152, 153, 119, 143, 243, 79, 45, 75, 59, + 54, 54, 163, 129, 43, 212, 82, 103, 181, 63, 82, 60, 147, 15, 152, 86, 20, 21, 134, 169, 138, + 205, 114, 147, 142, 149, 34, 50, 43, 177, 250, 211, 36, 232, 113, 83, 234, 117, 229, 230, 191, + 214, 62, 32, 66, 94, 120, 121, 237, 181, 189, 225, 64, 74, 230, 60, 31, 230, 84, 191, 60, 66, + 101, 190, 111, 209, 28, 201, 184, 150, 79, 83, 148, 68, 202, 199, 198, 244, 194, 246, 183, 243, + 192, 136, 98, 33, 221, 101, 80, 86, 243, 131, 196, 138, 98, 6, 13, 206, 26, 137, 83, 31, 224, + 214, 139, 78, 52, 116, 107, 123, 240, 68, 246, 48, 81, 209, 81, 117, 108, 251, 109, 82, 35, + 181, 203, 124, 90, 197, 143, 107, 32, 241, 51, 184, 148, 195, 228, 177, 63, 77, 235, 122, 130, + 115, 98, 168, 146, 225, 64, 48, 112, 59, 248, 88, 153, 67, 53, 56, 152, 202, 154, 127, 0, 81, + 4, 242, 156, 18, 216, 210, 30, 102, 205, 114, 98, 139, 225, 60, 241, 214, 235, 39, 252, 134, + 237, 211, 235, 165, 30, 8, 115, 177, 110, 147, 91, 48, 230, 71, 153, 6, 77, 228, 192, 185, 30, + 217, 153, 194, 77, 123, 189, 241, 52, 66, 157, 175, 228, 189, 218, 161, 161, 8, 157, 13, 241, + 235, 172, 192, 67, 9, 219, 3, 16, 73, 117, 47, 233, 65, 20, 72, 74, 129, 175, 61, 157, 56, 20, + 104, 17, 27, 225, 169, 166, 220, 150, 89, 100, 233, 65, 75, 182, 91, 69, 89, 178, 58, 65, 10, + 28, 25, 168, 120, 43, 160, 31, 121, 20, 180, 137, 114, 116, 209, 112, 3, 7, 119, 16, 204, 29, + 177, 167, 124, 0, 98, 93, 0, 160, 251, 219, 192, 32, 102, 216, 146, 135, 205, 82, 189, 65, 169, + 164, 166, 16, 54, 206, 65, 245, 236, 80, 238, 170, 94, 115, 135, 231, 76, 16, 207, 66, 34, 121, + 97, 57, 152, 219, 61, 19, 30, 181, 218, 243, 83, 72, 77, 24, 172, 168, 147, 62, 238, 35, 207, + 210, 105, 44, 172, 152, 115, 165, 110, 238, 99, 120, 101, 158, 250, 139, 155, 156, 119, 225, + 35, 228, 157, 209, 67, 56, 133, 136, 23, 2, 179, 43, 18, 79, 43, 77, 233, 216, 206, 108, 162, + 124, 243, 72, 113, 152, 208, 151, 138, 40, 159, 20, 153, 155, 179, 82, 80, 127, 13, 66, 123, + 221, 134, 166, 189, 2, 218, 230, 126, 126, 120, 98, 129, 153, 89, 14, 169, 57, 143, 127, 59, + 148, 34, 173, 24, 238, 117, 196, 78, 168, 30, 200, 50, 70, 106, 68, 60, 199, 77, 255, 230, 35, + 136, 222, 107, 223, 9, 117, 13, 85, 109, 12, 148, 175, 167, 246, 150, 49, 147, 64, 141, 215, + 67, 9, 200, 10, 169, 205, 14, 237, 14, 151, 229, 243, 135, 204, 106, 193, 132, 94, 26, 13, 253, + 147, 121, 212, 227, 54, 177, 216, 139, 117, 70, 33, 37, 153, 3, 31, 106, 15, 59, 142, 51, 158, + 31, 167, 197, 248, 7, 24, 49, 123, 200, 57, 2, 165, 236, 175, 24, 132, 224, 148, 140, 178, 160, + 86, 17, 206, 44, 76, 248, 97, 74, 90, 126, 166, 136, 179, 255, 250, 242, 184, 212, 104, 82, 8, + 149, 88, 25, 40, 198, 52, 122, 245, 154, 122, 88, 74, 173, 195, 65, 246, 168, 63, 217, 176, + 218, 62, 170, 243, 152, 117, 175, 254, 216, 202, 123, 17, 11, 54, 95, 34, 168, 230, 53, 248, + 162, 202, 116, 126, 226, 122, 76, 215, 146, 95, 205, 146, 156, 62, 154, 242, 56, 213, 113, 213, + 28, 214, 124, 182, 168, 161, 119, 89, 92, 23, 96, 103, 200, 14, 14, 178, 117, 140, 75, 3, 82, + 99, 78, 164, 55, 185, 150, 205, 48, 85, 193, 252, 217, 142, 132, 212, 205, 93, 53, 195, 129, 4, + 25, 102, 125, 73, 248, 2, 89, 139, 199, 23, 172, 56, 125, 56, 242, 19, 116, 88, 66, 108, 226, + 124, 16, 35, 108, 72, 4, 26, 213, 108, 215, 92, 100, 237, 99, 212, 105, 89, 155, 45, 158, 5, + 96, 72, 244, 18, 159, 93, 129, 132, 189, 64, 214, 246, 13, 85, 167, 206, 169, 74, 80, 230, 178, + 170, 246, 139, 248, 33, 199, 243, 69, 32, 101, 244, 131, 233, 213, 253, 1, 53, 183, 116, 130, + 192, 97, 79, 132, 110, 72, 123, 182, 218, 197, 118, 98, 65, 211, 129, 79, 17, 99, 244, 9, 161, + 67, 198, 34, 137, 151, 91, 151, 66, 13, 46, 69, 77, 47, 235, 248, 154, 201, 133, 141, 187, 183, + 201, 90, 35, 169, 110, 86, 163, 16, 74, 119, 199, 6, 115, 175, 219, 52, 32, 131, 96, 44, 241, + 163, 91, 65, 147, 138, 111, 16, 39, 188, 97, 184, 228, 83, 181, 242, 184, 103, 31, 101, 111, + 73, 113, 64, 66, 83, 103, 180, 93, 25, 30, 74, 47, 33, 1, 71, 224, 57, 4, 38, 54, 154, 16, 185, + 192, 52, 119, 189, 177, 0, 113, 90, 41, 83, 179, 32, 83, 177, 26, 88, 183, 209, 207, 165, 24, + 110, 44, 40, 144, 235, 238, 69, 47, 237, 216, 194, 17, 171, 75, 222, 97, 235, 88, 161, 52, 31, + 142, 178, 248, 190, 45, 122, 171, 186, 176, 51, 72, 230, 196, 188, 194, 156, 23, 77, 215, 98, + 76, 108, 131, 113, 100, 49, 91, 208, 107, 102, 201, 157, 182, 126, 108, 27, 30, 246, 190, 150, + 192, 2, 82, 38, 100, 76, 40, 169, 10, 204, 109, 185, 226, 156, 69, 111, 155, 137, 73, 252, 223, + 237, 246, 81, 180, 215, 101, 13, 93, 180, 139, 146, 160, 248, 65, 46, 95, 115, 35, 13, 115, + 238, 156, 162, 1, 205, 209, 167, 21, 55, 9, 153, 219, 242, 1, 69, 20, 180, 122, 158, 38, 95, + 86, 239, 41, 208, 174, 223, 242, 115, 73, 189, 145, 134, 252, 146, 17, 139, 139, 108, 89, 134, + 107, 69, 85, 234, 198, 195, 148, 99, 42, 220, 225, 114, 169, 131, 210, 149, 197, 133, 182, 244, + 104, 48, 37, 201, 149, 110, 167, 19, 87, 142, 235, 64, 163, 77, 140, 199, 245, 81, 138, 244, + 221, 241, 210, 95, 215, 120, 97, 221, 248, 227, 222, 187, 176, 184, 243, 54, 164, 154, 225, + 147, 62, 29, 187, 101, 166, 222, 100, 126, 105, 166, 188, 74, 159, 148, 129, 224, 170, 37, 187, + 154, 203, 80, 73, 220, 25, 222, 172, 39, 18, 138, 115, 221, 163, 240, 39, 144, 164, 250, 128, + 143, 50, 95, 40, 167, 129, 110, 113, 143, 64, 147, 143, 56, 53, 208, 246, 8, 227, 179, 58, 205, + 216, 244, 181, 255, 183, 191, 1, 206, 226, 20, 206, 248, 146, 25, 225, 86, +]; diff --git a/ng-net/src/tests/mod.rs b/ng-net/src/tests/mod.rs new file mode 100644 index 0000000..dbdc09f --- /dev/null +++ b/ng-net/src/tests/mod.rs @@ -0,0 +1,2 @@ +#[doc(hidden)] +pub mod file; diff --git a/ng-net/src/types.rs b/ng-net/src/types.rs new file mode 100644 index 0000000..bc12e72 --- /dev/null +++ b/ng-net/src/types.rs @@ -0,0 +1,5294 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! NextGraph network protocol types +//! +//! Corresponds to the BARE schema + +use core::fmt; +use std::collections::HashSet; +use std::sync::Arc; +use std::{ + any::{Any, TypeId}, + net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr}, +}; + +use serde::{Deserialize, Serialize}; +use web_time::SystemTime; +use zeroize::{Zeroize, ZeroizeOnDrop}; + +use ng_repo::errors::*; +use ng_repo::log::*; +use ng_repo::store::Store; +use ng_repo::types::*; +use ng_repo::utils::{random_key, sign, verify, decode_digest, decode_key, decode_sym_key, decode_priv_key, decode_overlayid}; + +use crate::app_protocol::*; +use crate::utils::{ + get_domain_without_port_443, is_ipv4_private, is_ipv6_private, is_private_ip, is_public_ip, + is_public_ipv4, is_public_ipv6, decode_locator +}; +use crate::WS_PORT_ALTERNATE; +use crate::{actor::EActor, actors::admin::*, actors::*}; + +use lazy_static::lazy_static; +use regex::Regex; + +lazy_static! { + #[doc(hidden)] + pub(crate) static ref RE_FILE_READ_CAP: Regex = + Regex::new(r"^did:ng:j:([A-Za-z0-9-_]{44}):k:([A-Za-z0-9-_]{44})$").unwrap(); + #[doc(hidden)] + pub(crate) static ref RE_REPO_O: Regex = + Regex::new(r"^did:ng:o:([A-Za-z0-9-_]{44})$").unwrap(); + #[doc(hidden)] + pub(crate) static ref RE_REPO: Regex = + Regex::new(r"^did:ng:o:([A-Za-z0-9-_]{44}):v:([A-Za-z0-9-_]{44})$").unwrap(); + #[doc(hidden)] + pub(crate) static ref RE_BRANCH: Regex = + Regex::new(r"^did:ng:o:([A-Za-z0-9-_]{44}):v:([A-Za-z0-9-_]{44}):b:([A-Za-z0-9-_]{44})$").unwrap(); + #[doc(hidden)] + pub(crate) static ref RE_NAMED_BRANCH_OR_COMMIT: Regex = + Regex::new(r"^did:ng:o:([A-Za-z0-9-_]{44}):v:([A-Za-z0-9-_]{44}):a:([A-Za-z0-9-_%]*)$").unwrap(); //TODO: allow international chars. disallow digit as first char + #[doc(hidden)] + pub(crate) static ref RE_OBJECTS: Regex = + Regex::new(r"^did:ng(?::o:([A-Za-z0-9-_]{44}))?:v:([A-Za-z0-9-_]{44})((?::[cj]:[A-Za-z0-9-_]{44}:k:[A-Za-z0-9-_]{44})+)(?::s:([A-Za-z0-9-_]{44}):k:([A-Za-z0-9-_]{44}))?:l:([A-Za-z0-9-_]*)$").unwrap(); + #[doc(hidden)] + pub(crate) static ref RE_OBJECT_READ_CAPS: Regex = + Regex::new(r":[cj]:([A-Za-z0-9-_]{44}):k:([A-Za-z0-9-_]{44})").unwrap(); + #[doc(hidden)] + pub(crate) static ref RE_FROM_PUBLIC_PROFILE_INBOX: Regex = + Regex::new(r"^did:ng:a:([A-Za-z0-9-_]{44}):p:([A-Za-z0-9-_]{44})$").unwrap(); + #[doc(hidden)] + pub(crate) static ref RE_COMMIT: Regex = + Regex::new(r"^did:ng:o:([A-Za-z0-9-_]{44}):c:([A-Za-z0-9-_]{44}):k:([A-Za-z0-9-_]{44})$").unwrap(); + #[doc(hidden)] + pub(crate) static ref RE_INBOX_OVERLAY: Regex = + Regex::new(r"^did:ng:d:([A-Za-z0-9-_]{44}):v:([A-Za-z0-9-_]{44})(:l:([A-Za-z0-9-_]*))?$").unwrap(); + #[doc(hidden)] + pub(crate) static ref RE_INBOX: Regex = + Regex::new(r"^did:ng:d:([A-Za-z0-9-_]{44})$").unwrap(); + #[doc(hidden)] + pub(crate) static ref RE_PROFILE: Regex = + Regex::new(r"^did:ng:[ab]:([A-Za-z0-9-_]{44})$").unwrap(); +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +/// used to initiate a session at a local broker V0 +pub struct Credentials { + pub user_key: PrivKey, + pub read_cap: ReadCap, + pub private_store: RepoId, + pub protected_store: RepoId, + pub public_store: RepoId, + pub user_master_key: SymKey, + pub peer_priv_key: PrivKey, +} + +impl Credentials { + pub fn new_partial(user_priv_key: &PrivKey) -> Self { + Credentials { + user_key: user_priv_key.clone(), + read_cap: ReadCap::nil(), + private_store: RepoId::nil(), + protected_store: RepoId::nil(), + public_store: RepoId::nil(), + user_master_key: SymKey::random(), + peer_priv_key: PrivKey::random_ed(), + } + } +} + +// +// Network common types +// + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub enum InterfaceType { + Loopback, + Private, + Public, + Invalid, +} + +impl InterfaceType { + pub fn is_ip_valid_for_type(&self, ip: &IP) -> bool { + self.is_ipaddr_valid_for_type(&ip.into()) + } + pub fn is_ipaddr_valid_for_type(&self, ip: &IpAddr) -> bool { + match ip { + IpAddr::V4(v4) => self.is_ipv4_valid_for_type(v4), + IpAddr::V6(v6) => self.is_ipv6_valid_for_type(v6), + } + } + + pub fn is_ipv4_valid_for_type(&self, ip: &Ipv4Addr) -> bool { + match self { + InterfaceType::Loopback => ip.is_loopback(), + InterfaceType::Public => is_public_ipv4(ip), + // we allow to bind to link-local for IPv4 + InterfaceType::Private => is_ipv4_private(ip), + _ => false, + } + } + pub fn is_ipv6_valid_for_type(&self, ip: &Ipv6Addr) -> bool { + match self { + InterfaceType::Loopback => ip.is_loopback(), + InterfaceType::Public => is_public_ipv6(ip), + // we do NOT allow to bind to link-local for IPv6 + InterfaceType::Private => is_ipv6_private(ip), + _ => false, + } + } +} + +#[cfg(not(target_arch = "wasm32"))] +#[derive(Clone, Debug)] +pub struct Interface { + pub if_type: InterfaceType, + pub name: String, + pub mac_addr: Option, + /// List of Ipv4Net for the network interface + pub ipv4: Vec, + /// List of Ipv6Net for the network interface + pub ipv6: Vec, +} + +/// Bind address +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct BindAddress { + pub port: u16, + pub ip: IP, +} + +impl BindAddress { + pub fn to_ws_url(&self) -> String { + format!( + "ws://{}:{}", + self.ip, + if self.port == 0 { 80 } else { self.port } + ) + } + pub fn new_localhost_with_port(port: u16) -> Self { + BindAddress { + ip: LOOPBACK_IPV4.clone(), + port, + } + } +} + +impl From<&SocketAddr> for BindAddress { + #[inline] + fn from(addr: &SocketAddr) -> BindAddress { + let ip_addr = addr.ip(); + let ip = IP::try_from(&ip_addr).unwrap(); + let port = addr.port(); + BindAddress { ip, port } + } +} + +// +// BROKER common types +// + +/// Core Broker connection details Version 0 +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct BrokerCoreV0 { + /// peerId of the server + pub peer_id: PubKey, + + /// network addresses of the broker, typically an IpV4 and an optional IPV6 addr. core broker should not be multi-homed. + pub addrs: Vec, +} + +/// Core Broker connection details +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Hash)] +pub enum BrokerCore { + V0(BrokerCoreV0), +} + +/// BrokerServerTypeV0 type +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum BrokerServerTypeV0 { + Localhost(u16), // optional port number + BoxPrivate(Vec), + Public(Vec), + BoxPublicDyn(Vec), // can be empty + Domain(String), // accepts an optional trailing ":port" number + //Core(Vec), +} + +/// BrokerServer details Version 0 +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct BrokerServerV0 { + /// Network addresses + pub server_type: BrokerServerTypeV0, + + /// is this server capable of running a verifier + pub can_verify: bool, + + /// is this server capable of forwarding client connections to another broker + pub can_forward: bool, + + /// peerId of the server + pub peer_id: PubKey, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct BrokerServerContentV0 { + pub servers: Vec, + + pub version: u32, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct BrokerServer { + pub content: BrokerServerContentV0, + + /// peerId of the server + pub peer_id: PubKey, + + /// optional signature over content by peer_id + pub sig: Option, +} + +pub type LocatorV0 = Vec; + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum Locator { + V0(LocatorV0), +} + +impl fmt::Display for Locator { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let ser = serde_bare::to_vec(&self).unwrap(); + write!(f, "{}", base64_url::encode(&ser)) + } +} + +impl Locator { + pub fn empty() -> Self { + Self::V0(vec![]) + } + pub fn first_broker_server(&self) -> Result { + match self { + Self::V0(v0) => { + let bs = v0.get(0).ok_or(NgError::BrokerNotFound)?; + Ok(BrokerServerV0 { + server_type: bs + .content + .servers + .get(0) + .ok_or(NgError::BrokerNotFound)? + .clone(), + can_verify: false, + can_forward: false, + peer_id: bs.peer_id, + }) + } + } + } + pub fn add(&mut self, bs: BrokerServerV0) { + match self { + Self::V0(v0) => { + for b in v0.iter_mut() { + if b.peer_id == bs.peer_id { + b.content.servers.push(bs.server_type); + return; + } + } + v0.push(BrokerServer { + peer_id: bs.peer_id, + sig: None, + content: BrokerServerContentV0 { + version: 0, + servers: vec![bs.server_type], + }, + }); + } + } + } +} + +impl TryFrom<&str> for Locator { + type Error = NgError; + fn try_from(string: &str) -> Result { + let vec = base64_url::decode(string).map_err(|_| NgError::InvalidKey)?; + Ok(serde_bare::from_slice(&vec).map_err(|_| NgError::InvalidKey)?) + } +} + +impl From for Locator { + fn from(bs: BrokerServerV0) -> Self { + Locator::V0(vec![BrokerServer { + peer_id: bs.peer_id, + content: BrokerServerContentV0 { + version: 0, + servers: vec![bs.server_type], + }, + sig: None, + }]) + } +} + +#[doc(hidden)] +pub const APP_ACCOUNT_REGISTERED_SUFFIX: &str = "/#/user/registered"; + +#[doc(hidden)] +pub const NG_NET_URL: &str = "https://nextgraph.net"; + +#[doc(hidden)] +pub const NG_APP_URL: &str = "https://nextgraph.app"; + +#[doc(hidden)] +pub const APP_NG_WS_URL: &str = "wss://nextgraph.app"; + +#[allow(dead_code)] +fn api_dyn_peer_url(peer_id: &PubKey) -> String { + format!("https://nextgraph.net/api/v1/dynpeer/{}", peer_id) +} + +#[doc(hidden)] +pub const LOCAL_HOSTS: [&str; 3] = ["localhost", "127.0.0.1", "[::1]"]; + +fn local_ws_url(port: &u16) -> String { + format!("ws://localhost:{}", if *port == 0 { 80 } else { *port }) +} +#[doc(hidden)] +pub(crate) fn local_http_url(port: &u16) -> String { + format!("http://localhost:{}", if *port == 0 { 80 } else { *port }) +} + +#[doc(hidden)] +pub const LOCAL_URLS: [&str; 3] = ["http://localhost", "http://127.0.0.1", "http://[::1]"]; +use url::{Host, Url}; + +impl BrokerServerTypeV0 { + pub fn find_first_ipv4(&self) -> Option<&BindAddress> { + match self { + Self::BoxPrivate(addrs) => { + for addr in addrs { + if addr.ip.is_v4() { + return Some(addr); + } + } + return None; + } + _ => None, + } + } + pub fn find_first_ipv6(&self) -> Option<&BindAddress> { + match self { + Self::BoxPrivate(addrs) => { + for addr in addrs { + if addr.ip.is_v6() { + return Some(addr); + } + } + return None; + } + _ => None, + } + } +} + +impl BrokerServerV0 { + pub fn new_localhost(peer_id: PubKey) -> Self { + BrokerServerV0 { + server_type: BrokerServerTypeV0::Localhost(WS_PORT_ALTERNATE[0]), + can_verify: false, + can_forward: true, + peer_id, + } + } + + fn first_ipv4(&self) -> Option<(String, Vec)> { + self.server_type.find_first_ipv4().map_or(None, |bindaddr| { + Some((format!("ws://{}:{}", bindaddr.ip, bindaddr.port), vec![])) + }) + } + + fn first_ipv6(&self) -> Option<(String, Vec)> { + self.server_type.find_first_ipv6().map_or(None, |bindaddr| { + Some((format!("ws://{}:{}", bindaddr.ip, bindaddr.port), vec![])) + }) + } + + pub fn first_ipv4_http(&self) -> Option { + self.server_type.find_first_ipv4().map_or(None, |bindaddr| { + Some(format!("http://{}:{}", bindaddr.ip, bindaddr.port)) + }) + } + + pub fn first_ipv6_http(&self) -> Option { + self.server_type.find_first_ipv6().map_or(None, |bindaddr| { + Some(format!("http://{}:{}", bindaddr.ip, bindaddr.port)) + }) + } + + fn first_ipv6_or_ipv4( + ipv4: bool, + ipv6: bool, + addrs: &Vec, + ) -> Option<&BindAddress> { + if ipv6 { + for addr in addrs { + if addr.ip.is_v6() { + return Some(addr); + } + } + } + if ipv4 { + for addr in addrs { + if addr.ip.is_v4() { + return Some(addr); + } + } + } + return None; + } + + fn ng_app_bootstrap_url(addr: &BindAddress, key: PubKey) -> Option { + let payload = (addr, key); + let payload_ser = serde_bare::to_vec(&payload).ok(); + if payload_ser.is_none() { + return None; + } + Some(format!( + "{}?b={}", + NG_APP_URL, + base64_url::encode(&payload_ser.unwrap()) + )) + } + + fn ng_app_bootstrap_url_with_first_ipv6_or_ipv4( + ipv4: bool, + ipv6: bool, + addrs: &Vec, + key: PubKey, + ) -> Option { + if let Some(addr) = Self::first_ipv6_or_ipv4(ipv4, ipv6, addrs) { + return Self::ng_app_bootstrap_url(addr, key); + } + None + } + + /// set ipv6 only if the browser connected with a remote IPV6. always set ipv4 as a fallback (for now). + pub async fn get_url_for_ngnet(&self, ipv4: bool, ipv6: bool) -> Option { + match &self.server_type { + BrokerServerTypeV0::Public(addrs) => { + Self::ng_app_bootstrap_url_with_first_ipv6_or_ipv4( + ipv4, + ipv6, + addrs, + self.peer_id, + ) + } + BrokerServerTypeV0::BoxPublicDyn(addrs) => { + // let resp = reqwest::get(api_dyn_peer_url(&self.peer_id)).await; + // if resp.is_ok() { + // let resp = resp.unwrap().json::>().await; + // if resp.is_ok() { + // return Self::ng_app_bootstrap_url_with_first_ipv6_or_ipv4( + // ipv4, + // ipv6, + // &resp.unwrap(), + // self.peer_id, + // ); + // } + // } + if addrs.len() > 0 { + Self::ng_app_bootstrap_url_with_first_ipv6_or_ipv4( + ipv4, + ipv6, + &addrs, + self.peer_id, + ) + } else { + None + } + } + BrokerServerTypeV0::Domain(domain) => Some(format!("https://{}", domain)), + BrokerServerTypeV0::Localhost(port) => Some(local_http_url(&port)), + BrokerServerTypeV0::BoxPrivate(_) => { + if ipv6 { + let v6 = self.server_type.find_first_ipv6().map_or(None, |bindaddr| { + Some(format!("http://{}:{}", bindaddr.ip, bindaddr.port)) + }); + if v6.is_some() { + return v6; + } + } + if ipv4 { + self.server_type.find_first_ipv4().map_or(None, |bindaddr| { + Some(format!("http://{}:{}", bindaddr.ip, bindaddr.port)) + }) + } else { + None + } + } + } + } + + pub fn is_public_server(&self) -> bool { + match &self.server_type { + BrokerServerTypeV0::Localhost(_) => false, + BrokerServerTypeV0::BoxPrivate(_) => false, + BrokerServerTypeV0::Public(_) => true, + BrokerServerTypeV0::BoxPublicDyn(_) => true, + BrokerServerTypeV0::Domain(_) => true, + } + } + + pub fn get_domain(&self) -> Option { + if let BrokerServerTypeV0::Domain(domain) = &self.server_type { + Some(domain.clone()) + } else { + None + } + } + + /// on web browser, returns the connection URL and an optional list of BindAddress if a relay is needed + /// filtered by the current location url of the webpage + /// on native apps (do not pass a location), returns or the connection URL without optional BindAddress or an empty string with + /// several BindAddresses to try to connect to with .to_ws_url() + pub async fn get_ws_url( + &self, + location: &Option, + ) -> Option<(String, Vec)> { + if location.is_some() { + let location = location.as_ref().unwrap(); + if location.starts_with(NG_APP_URL) { + match &self.server_type { + BrokerServerTypeV0::Public(addrs) => { + Some((APP_NG_WS_URL.to_string(), addrs.clone())) + } + BrokerServerTypeV0::BoxPublicDyn(addrs) => { + // let resp = reqwest::get(api_dyn_peer_url(&self.peer_id)).await; + // if resp.is_ok() { + // let resp = resp.unwrap().json::>().await; + // if resp.is_ok() { + // return Some((APP_NG_WS_URL.to_string(), resp.unwrap())); + // } + // } + if addrs.len() > 0 { + Some((APP_NG_WS_URL.to_string(), addrs.clone())) + } else { + None + } + } + _ => None, + } + } else if let BrokerServerTypeV0::Domain(domain) = &self.server_type { + let url = format!("https://{}", domain); + if location.starts_with(&url) { + let wss_url = format!("wss://{}", domain); + Some((wss_url, vec![])) + } else { + None + } + } else { + // localhost + if location.starts_with(LOCAL_URLS[0]) + || location.starts_with(LOCAL_URLS[1]) + || location.starts_with(LOCAL_URLS[2]) + { + if let BrokerServerTypeV0::Localhost(port) = self.server_type { + Some((local_ws_url(&port), vec![])) + } else { + None + } + } + // a private address + else if location.starts_with("http://") { + let url = Url::parse(&location).unwrap(); + match url.host() { + Some(Host::Ipv4(ip)) => { + if is_ipv4_private(&ip) { + self.first_ipv4() + } else { + None + } + } + Some(Host::Ipv6(ip)) => { + if is_ipv6_private(&ip) { + self.first_ipv6() + } else { + None + } + } + _ => None, + } + } else { + None + } + } + } else { + // From native / tauri app + match &self.server_type { + //BrokerServerTypeV0::Core(_) => None, + BrokerServerTypeV0::Localhost(port) => Some((local_ws_url(port), vec![])), + BrokerServerTypeV0::BoxPrivate(addrs) => Some((String::new(), addrs.clone())), + BrokerServerTypeV0::Public(addrs) => Some((String::new(), addrs.clone())), + BrokerServerTypeV0::BoxPublicDyn(addrs) => { + // let resp = reqwest::get(api_dyn_peer_url(&self.peer_id)).await; + // if resp.is_ok() { + // let resp = resp.unwrap().json::>().await; + // if resp.is_ok() { + // return Some((String::new(), resp.unwrap())); + // } + // } + if addrs.len() > 0 { + Some((String::new(), addrs.clone())) + } else { + None + } + } + BrokerServerTypeV0::Domain(domain) => Some((format!("wss://{}", domain), vec![])), + } + } + } + + pub fn to_iframe_msg(&self) -> BootstrapIframeMsg { + + match &self.server_type { + BrokerServerTypeV0::Domain(domain) => BootstrapIframeMsg::domain(domain.clone()), + BrokerServerTypeV0::Localhost(port) => BootstrapIframeMsg::local(*port, self.peer_id), + BrokerServerTypeV0::BoxPrivate(addrs) => BootstrapIframeMsg::private(addrs.to_vec(), self.peer_id), + BrokerServerTypeV0::Public(_) | BrokerServerTypeV0::BoxPublicDyn(_) => BootstrapIframeMsg::ngbox(), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BootstrapIframeMsg { + + pub peer_id: Option, + + pub private: Option>, + + pub ngbox: Option, + + pub domain: Option, + + pub localhost: Option, + +} + +impl BootstrapIframeMsg { + fn new() -> Self { + Self { + peer_id:None, + private:None, + ngbox:None, + domain:None, + localhost:None + } + } + + fn domain(domain: String) -> Self { + let mut s = Self::new(); + s.domain = Some(domain); + s + } + + fn ngbox() -> Self { + let mut s = Self::new(); + s.ngbox = Some(true); + s + } + + fn private(addrs: Vec, peer_id: PubKey) -> Self { + let mut s = Self::new(); + s.peer_id = Some(peer_id.to_string()); + s.private = Some(addrs); + s + } + + fn local(port: u16, peer_id: PubKey) -> Self { + let mut s = Self::new(); + s.peer_id = Some(peer_id.to_string()); + s.localhost = Some(port); + s + } +} + +/// Bootstrap content Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BootstrapContentV0 { + /// list of servers, in order of preference + pub servers: Vec, +} + +impl BootstrapContentV0 { + pub fn new_localhost(peer_id: PubKey) -> Self { + BootstrapContentV0 { + servers: vec![BrokerServerV0::new_localhost(peer_id)], + } + } + pub fn new_empty() -> Self { + BootstrapContentV0 { servers: vec![] } + } + pub fn merge(&mut self, with: &BootstrapContentV0) { + 'outer: for server2 in &with.servers { + for server1 in &self.servers { + if *server1 == *server2 { + continue 'outer; + } + } + self.servers.push(server2.clone()); + } + } + pub fn get_first_peer_id(&self) -> Option { + self.servers.first().map(|s| s.peer_id) + } + + pub fn get_domain(&self) -> Option { + for server in self.servers.iter() { + if let BrokerServerTypeV0::Domain(name) = &server.server_type { + return Some(name.clone()); + } + } + None + } + + pub fn to_iframe_msgs(&self) -> Vec { + self.servers.iter().map(|server| server.to_iframe_msg()).collect() + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum BootstrapContent { + V0(BootstrapContentV0), +} + +impl BootstrapContent { + pub fn servers(&self) -> &Vec { + match self { + Self::V0(v0) => &v0.servers, + } + } +} + +/// Local Bootstrap info Version 0, served at /.ng_bootstrap +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct LocalBootstrapInfoV0 { + /// list of servers, in order of preference + pub bootstrap: BootstrapContentV0, + + /// optional registration_url for public server that accept to be BSP for new clients + pub registration_url: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum LocalBootstrapInfo { + V0(LocalBootstrapInfoV0), +} + +impl LocalBootstrapInfo { + pub fn servers(&self) -> &Vec { + match self { + Self::V0(v0) => &v0.bootstrap.servers, + } + } +} + +impl From for Invitation { + fn from(value: LocalBootstrapInfo) -> Self { + let LocalBootstrapInfo::V0(info) = value; + let name = info.bootstrap.get_domain(); + let url = info.registration_url.clone(); + Invitation::V0(InvitationV0 { + bootstrap: info.bootstrap, + code: None, + name, + url, + }) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum InvitationCode { + Unique(SymKey), + Admin(SymKey), + Multi(SymKey), + Setup(SymKey), +} + +impl InvitationCode { + pub fn get_symkey(&self) -> SymKey { + match self { + Self::Unique(s) | Self::Admin(s) | Self::Multi(s) | Self::Setup(s) => s.clone(), + } + } +} + +impl fmt::Display for InvitationCode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Unique(k) => write!(f, "unique {}", k), + Self::Admin(k) => write!(f, "admin {}", k), + Self::Multi(k) => write!(f, "multi {}", k), + Self::Setup(k) => write!(f, "setup {}", k), + } + } +} + +/// Invitation to create an account at a broker. Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct InvitationV0 { + /// list of servers, in order of preference + pub bootstrap: BootstrapContentV0, + + pub code: Option, + + /// an optional name to display to the invitee + pub name: Option, + + // an optional url to redirect the user to, for accepting ToS and making payment, if any. + pub url: Option, +} + +impl InvitationV0 { + pub fn set_bootstrap(&mut self, content: BootstrapContent) { + match content { + BootstrapContent::V0(v0) => self.bootstrap = v0, + } + } + pub fn empty(name: Option) -> Self { + InvitationV0 { + bootstrap: BootstrapContentV0::new_empty(), + code: None, + name, + url: None, + } + } + pub fn new( + bootstrap_content: BootstrapContent, + code: Option, + name: Option, + url: Option, + ) -> Self { + match bootstrap_content { + BootstrapContent::V0(v0) => InvitationV0 { + bootstrap: v0, + code, + name, + url, + }, + } + } + pub fn append_bootstraps(&mut self, add: &mut Option) { + if add.is_some() { + let add = add.as_mut().unwrap(); + self.bootstrap.servers.append(&mut add.servers); + } + } +} + +impl Invitation { + pub fn new_v0( + bootstrap: BootstrapContentV0, + name: Option, + url: Option, + ) -> Self { + Invitation::V0(InvitationV0 { + bootstrap, + code: Some(SymKey::random()), + name, + url, + }) + } + + pub fn new_v0_free( + bootstrap: BootstrapContentV0, + name: Option, + url: Option, + ) -> Self { + Invitation::V0(InvitationV0 { + bootstrap, + code: None, + name, + url, + }) + } + + pub fn intersects(&self, invite2: Invitation) -> Invitation { + let Invitation::V0(v0) = self; + let mut new_invite = InvitationV0 { + bootstrap: BootstrapContentV0::new_empty(), + code: v0.code.clone(), + name: v0.name.clone(), + url: v0.url.clone(), + }; + for server2 in invite2.get_servers() { + for server1 in &v0.bootstrap.servers { + if *server1 == *server2 { + new_invite.bootstrap.servers.push(server2.clone()); + break; + } + } + } + Invitation::V0(new_invite) + } + + pub fn get_servers(&self) -> &Vec { + match self { + Invitation::V0(v0) => &v0.bootstrap.servers, + } + } + pub fn get_domain(&self) -> Option { + for bootstrap in self.get_servers() { + let res = bootstrap.get_domain(); + if res.is_some() { + return res; + } + } + None + } + + pub fn set_name(&mut self, name: Option) { + if name.is_some() { + match self { + Invitation::V0(v0) => v0.name = Some(name.unwrap()), + } + } + } + + pub fn set_url(&mut self, url: Option<&String>) { + if url.is_some() { + match self { + Invitation::V0(v0) => v0.url = Some(url.unwrap().clone()), + } + } + } + + /// first URL in the list is the ngnet one + pub fn get_urls(&self) -> Vec { + match self { + Invitation::V0(v0) => { + let mut res = vec![]; + let ser = serde_bare::to_vec(&self).unwrap(); + let url_param = base64_url::encode(&ser); + res.push(format!("{}/#/i/{}", NG_NET_URL, url_param)); + for server in &v0.bootstrap.servers { + match &server.server_type { + BrokerServerTypeV0::Domain(domain) => { + res.push(format!("https://{}/#/i/{}", domain, url_param)); + } + BrokerServerTypeV0::BoxPrivate(addrs) => { + for bindaddr in addrs { + res.push(format!( + "http://{}:{}/#/i/{}", + bindaddr.ip, bindaddr.port, url_param + )); + } + } + BrokerServerTypeV0::Localhost(port) => { + res.push(format!("{}/#/i/{}", local_http_url(&port), url_param)); + } + _ => {} + } + } + res + } + } + } +} + +impl fmt::Display for Invitation { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let ser = serde_bare::to_vec(&self).unwrap(); + let string = base64_url::encode(&ser); + write!(f, "{}", string) + } +} + +impl TryFrom for Invitation { + type Error = NgError; + fn try_from(value: String) -> Result { + let ser = base64_url::decode(&value).map_err(|_| NgError::InvalidInvitation)?; + let invite: Invitation = + serde_bare::from_slice(&ser).map_err(|_| NgError::InvalidInvitation)?; + Ok(invite) + } +} + +/// Invitation to create an account at a broker. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum Invitation { + V0(InvitationV0), +} + +// impl From for Invitation { +// fn from(value: BootstrapContent) -> Self { +// let BootstrapContent::V0(boot) = value; +// let name = boot.get_domain(); +// Invitation::V0(InvitationV0 { +// bootstrap: boot, +// code: None, +// name, +// url: None, +// }) +// } +// } + +/// Create an account at a Broker Service Provider (BSP). +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CreateAccountBSP { + V0(CreateAccountBSPV0), +} + +impl TryFrom for CreateAccountBSP { + type Error = NgError; + fn try_from(value: String) -> Result { + let ser = base64_url::decode(&value).map_err(|_| NgError::InvalidCreateAccount)?; + let invite: CreateAccountBSP = + serde_bare::from_slice(&ser).map_err(|_| NgError::InvalidCreateAccount)?; + Ok(invite) + } +} + +impl CreateAccountBSP { + pub fn encode(&self) -> Option { + let payload_ser = serde_bare::to_vec(self).ok(); + if payload_ser.is_none() { + return None; + } + Some(base64_url::encode(&payload_ser.unwrap())) + } + // pub fn user(&self) -> PubKey { + // match self { + // Self::V0(v0) => v0.user, + // } + // } + pub fn redirect_url(&self) -> &Option { + match self { + Self::V0(v0) => &v0.redirect_url, + } + } + // pub fn invitation(&self) -> &Option { + // match self { + // Self::V0(v0) => &v0.invitation, + // } + // } + // pub fn additional_bootstrap(&mut self) -> &mut Option { + // match self { + // Self::V0(v0) => &mut v0.additional_bootstrap, + // } + // } +} + +/// Create an account at a Broker Service Provider (BSP). Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CreateAccountBSPV0 { + //pub invitation: Option, + + //pub additional_bootstrap: Option, + /// the user asking to create an account + //pub user: PubKey, + + /// signature over serialized invitation code, with user key + // pub sig: Sig, + + /// for web access, will redirect after successful signup. if left empty, it means user was on native app. + pub redirect_url: Option, +} + +/// ListenerInfo +#[cfg(not(target_arch = "wasm32"))] +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ListenerInfo { + pub config: ListenerV0, + + /// list of BindAddresses + pub addrs: Vec, +} + +/// AcceptForwardForV0 type +/// +/// allow answers to connection requests originating from a client behind a reverse proxy +/// Format of last param in the tuple is a list of comma separated hosts or CIDR subnetworks IPv4 and/or IPv6 addresses accepted as X-Forwarded-For +/// Empty string means all addresses are accepted +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub enum AcceptForwardForV0 { + /// X-Forwarded-For not allowed + No, + + /// X-Forwarded-For accepted only for clients with private LAN addresses. First param is the domain of the proxy server + PrivateDomain((String, String)), + + /// X-Forwarded-For accepted only for clients with public addresses. First param is the domain of the proxy server + /// domain can take an option port (trailing `:port`) + PublicDomain((String, String)), + + /// X-Forwarded-For accepted only for clients with public addresses. First param is the domain of the proxy server + /// domain can take an optional port (trailing `:port`) + /// second param is the privKey of the PeerId of the proxy server, useful when the proxy server is load balancing to several daemons + /// that should all use the same PeerId to answer requests + PublicDomainPeer((String, PrivKey, String)), + + /// accepts only clients with public addresses that arrive on a LAN address binding. This is used for DMZ and port forwarding configs + /// first param is the port, second param in tuple is the interval for periodic probe of the external IP + PublicDyn((u16, u32, String)), + + /// accepts only clients with public addresses that arrive on a LAN address binding. This is used for DMZ and port forwarding configs + /// First param is the IPv4 bind address of the reverse NAT server (DMZ, port forwarding) + /// Second param is an optional IPv6 bind address of the reverse NAT server (DMZ, port forwarding) + PublicStatic((BindAddress, Option, String)), +} + +impl AcceptForwardForV0 { + pub fn get_public_bind_addresses(&self) -> Vec { + match self { + AcceptForwardForV0::PublicStatic((ipv4, ipv6, _)) => { + let mut res = vec![ipv4.clone()]; + if ipv6.is_some() { + res.push(ipv6.unwrap().clone()) + } + res + } + AcceptForwardForV0::PublicDyn(_) => { + todo!(); + } + _ => panic!("cannot call get_public_bind_addresses"), + } + } + + pub fn get_public_bind_ipv6_address(&self) -> Option { + match self { + AcceptForwardForV0::PublicStatic((_ipv4, ipv6, _)) => { + //let _res = vec![ipv4.clone()]; + if ipv6.is_some() { + return Some(ipv6.unwrap().ip.clone()); + } else { + return None; + } + } + AcceptForwardForV0::PublicDyn(_) => { + todo!(); + } + _ => None, + } + } + + pub fn is_public_domain(&self) -> bool { + match self { + AcceptForwardForV0::PublicDomainPeer(_) => true, + AcceptForwardForV0::PublicDomain(_) => true, + _ => false, + } + } + pub fn is_public_static(&self) -> bool { + match self { + AcceptForwardForV0::PublicStatic(_) => true, + _ => false, + } + } + pub fn is_no(&self) -> bool { + match self { + AcceptForwardForV0::No => true, + _ => false, + } + } + pub fn is_public_dyn(&self) -> bool { + match self { + AcceptForwardForV0::PublicDyn(_) => true, + _ => false, + } + } + pub fn is_private_domain(&self) -> bool { + match self { + AcceptForwardForV0::PrivateDomain(_) => true, + _ => false, + } + } + pub fn domain_with_common_peer_id(&self) -> Option { + match self { + AcceptForwardForV0::PublicDomainPeer((_, privkey, _)) => Some(privkey.to_pub()), + _ => None, + } + } + pub fn get_domain(&self) -> &str { + let domain = get_domain_without_port_443(match self { + AcceptForwardForV0::PrivateDomain((d, _)) => d, + AcceptForwardForV0::PublicDomain((d, _)) => d, + AcceptForwardForV0::PublicDomainPeer((d, _, _)) => d, + _ => panic!("cannot call get_domain if AcceptForwardForV0 is not a domain"), + }); + //let mut url = "https://".to_string(); + //url.push_str(domain); + domain + } +} + +#[cfg(not(target_arch = "wasm32"))] +/// DaemonConfig Listener Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ListenerV0 { + /// local interface name to bind to + /// names of interfaces can be retrieved with the --list-interfaces option + pub interface_name: String, + + pub if_type: InterfaceType, + + /// optional number of seconds for an interval of periodic refresh + /// of the actual IP(s) of the interface. Used for dynamic IP interfaces (DHCP) + pub interface_refresh: u32, + + // if to bind to the ipv6 address of the interface + pub ipv6: bool, + + /// local port to listen on + pub port: u16, + + /// force a private or localhost interface to be accepted as a core interface + pub private_core: bool, + + /// should the server serve the app files in HTTP mode (not WS). this setting will be discarded and app will not be served anyway if remote IP is public or listener is public + pub serve_app: bool, + + /// when the box is behind a DMZ, and ipv6 is enabled, the private interface will get the external public IpV6. with this option we allow binding to it + pub bind_public_ipv6: bool, + + /// default to false. Set to true by --core (use --core-with-clients to override to false). only useful for a public IP listener, if the clients should use another listener like --domain or --domain-private. + /// do not set it on a --domain or --domain-private, as this will enable the relay_websocket feature, which should not be used except by nextgraph.app + pub refuse_clients: bool, + + // will answer a probe coming from private LAN and if is_private, with its own peerId, so that guests on the network will be able to connect. + pub discoverable: bool, + + /// Answers to connection requests originating from a direct client, without X-Forwarded-For headers + /// Can be used in combination with a accept_forward_for config, when a local daemon is behind a proxy, and also serves as broker for local apps/webbrowsers + pub accept_direct: bool, + + /// X-Forwarded-For config. only valid if IP/interface is localhost or private + pub accept_forward_for: AcceptForwardForV0, + // impl fn is_private() + // returns false if public IP in interface, or if PublicDyn, PublicStatic + + // an interface with no accept_forward_for and no accept_direct, is de facto, disabled +} + +#[cfg(not(target_arch = "wasm32"))] +impl ListenerV0 { + pub fn should_bind_public_ipv6_to_private_interface(&self, ip: Ipv6Addr) -> bool { + let public_ip = self.accept_forward_for.get_public_bind_ipv6_address(); + if public_ip.is_none() { + return false; + } + let public_ipv6addr: IpAddr = public_ip.as_ref().unwrap().into(); + return if let IpAddr::V6(v6) = public_ipv6addr { + self.bind_public_ipv6 && self.if_type == InterfaceType::Private && ip == v6 + } else { + false + }; + } + + pub fn new_direct(interface: Interface, ipv6: bool, port: u16) -> Self { + Self { + interface_name: interface.name, + if_type: interface.if_type, + interface_refresh: 0, + ipv6, + port, + private_core: false, + discoverable: false, + accept_direct: true, + refuse_clients: false, + serve_app: true, + bind_public_ipv6: false, + accept_forward_for: AcceptForwardForV0::No, + } + } + + pub fn is_core(&self) -> bool { + match self.accept_forward_for { + AcceptForwardForV0::PublicStatic(_) => true, + AcceptForwardForV0::PublicDyn(_) => true, + AcceptForwardForV0::PublicDomain(_) | AcceptForwardForV0::PublicDomainPeer(_) => false, + AcceptForwardForV0::PrivateDomain(_) => false, + AcceptForwardForV0::No => { + self.if_type == InterfaceType::Public + || (self.private_core && self.if_type != InterfaceType::Invalid) + } + } + } + + pub fn accepts_client(&self) -> bool { + match self.accept_forward_for { + AcceptForwardForV0::PublicStatic(_) + | AcceptForwardForV0::PublicDyn(_) + | AcceptForwardForV0::PublicDomain(_) + | AcceptForwardForV0::PublicDomainPeer(_) => self.accept_direct || !self.refuse_clients, + AcceptForwardForV0::PrivateDomain(_) => true, + AcceptForwardForV0::No => { + self.if_type == InterfaceType::Public && !self.refuse_clients + || self.if_type != InterfaceType::Public + } + } + } + + pub fn get_bootstraps(&self, addrs: Vec) -> Vec { + let mut res: Vec = vec![]; + match self.accept_forward_for { + AcceptForwardForV0::PublicStatic(_) => { + let pub_addrs = self.accept_forward_for.get_public_bind_addresses(); + //res.push(BrokerServerTypeV0::Core(pub_addrs.clone())); + if !self.refuse_clients { + res.push(BrokerServerTypeV0::Public(pub_addrs)); + } + if self.accept_direct { + res.push(BrokerServerTypeV0::BoxPrivate(addrs)); + } + } + AcceptForwardForV0::PublicDyn(_) => { + let pub_addrs = self.accept_forward_for.get_public_bind_addresses(); + //res.push(BrokerServerTypeV0::Core(pub_addrs.clone())); + if !self.refuse_clients { + res.push(BrokerServerTypeV0::BoxPublicDyn(pub_addrs)); + } + if self.accept_direct { + res.push(BrokerServerTypeV0::BoxPrivate(addrs)); + } + } + AcceptForwardForV0::PublicDomain(_) | AcceptForwardForV0::PublicDomainPeer(_) => { + if !self.refuse_clients { + res.push(BrokerServerTypeV0::Domain( + self.accept_forward_for.get_domain().to_string(), + )); + } + //// this is removed since a server serving domain requests often needs a local interface too (for ngaccount), but does not want to expose this local interface to clients. + // if self.accept_direct { + // if self.if_type == InterfaceType::Private { + // res.push(BrokerServerTypeV0::BoxPrivate(addrs)); + // } else if self.if_type == InterfaceType::Loopback { + // res.push(BrokerServerTypeV0::Localhost(addrs[0].port)); + // } + // } + } + AcceptForwardForV0::PrivateDomain(_) => { + res.push(BrokerServerTypeV0::Domain( + self.accept_forward_for.get_domain().to_string(), + )); + } + AcceptForwardForV0::No => { + if self.if_type == InterfaceType::Loopback { + res.push(BrokerServerTypeV0::Localhost(addrs[0].port)); + } else if self.if_type == InterfaceType::Public { + //res.push(BrokerServerTypeV0::Core(addrs.clone())); + if !self.refuse_clients { + res.push(BrokerServerTypeV0::Public(addrs)); + } + } else if self.if_type == InterfaceType::Private { + res.push(BrokerServerTypeV0::BoxPrivate(addrs)); + } + } + } + res + } +} +#[cfg(not(target_arch = "wasm32"))] +impl fmt::Display for ListenerV0 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut id = self.interface_name.clone(); + id.push('@'); + id.push_str(&self.port.to_string()); + write!(f, "{}", id) + } +} + +/// Broker Overlay Permission +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum BrokerOverlayPermission { + Nobody, + Anybody, + AllRegisteredUser, + UsersList(Vec), +} + +/// Broker Overlay Config +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct BrokerOverlayConfigV0 { + // list of overlays this config applies to. empty array means applying to all + pub overlays: Vec, + // Who can ask to join an overlay on the core + pub core: BrokerOverlayPermission, + // Who can connect as a client to this server + pub server: BrokerOverlayPermission, + // if core == Nobody and server == Nobody then the listeners will not be started + + // are ExtRequest allowed on the server? this requires the core to be ON. + pub allow_read: bool, + + /// an empty list means to forward to the peer known for each overlay. + /// forward and core are mutually exclusive. forward becomes the default when core is disabled (set to Nobody). + /// core always takes precedence. + pub forward: Vec, +} + +impl BrokerOverlayConfigV0 { + pub fn new() -> Self { + BrokerOverlayConfigV0 { + overlays: vec![], + core: BrokerOverlayPermission::Nobody, + server: BrokerOverlayPermission::Nobody, + allow_read: false, + forward: vec![], + } + } +} + +/// Registration config +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum RegistrationConfig { + Closed, + Invitation, + Open, +} + +/// Overlay Access +/// +/// Used by the Client when opening or pinning a repo. +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum OverlayAccess { + /// The repo will be accessed on the Outer Overlay in Read Only mode + /// This can be used for Public, Protected or Group overlays + /// Value should be an OverlayId::Outer + ReadOnly(OverlayId), + /// The repo will be accessed on the Inner Overlay in Write mode, and the associated Outer overlay is also given + /// This is used for Public, Protected and Group overlays + /// First value in tuple should be the OverlayId::Inner, second the OverlayId::Outer + ReadWrite((OverlayId, OverlayId)), + /// The repo will be accessed on the Inner Overlay in Write mode, and it doesn't have an Outer overlay + /// This is used for Private and Dialog overlays + /// Value should be an OverlayId::Inner + WriteOnly(OverlayId), +} + +impl OverlayAccess { + pub fn is_read_only(&self) -> bool { + match self { + Self::ReadOnly(_) => true, + _ => false, + } + } + pub fn new_write_access_from_store(store: &Store) -> OverlayAccess { + match store.get_store_repo() { + StoreRepo::V0(StoreRepoV0::PrivateStore(_)) | StoreRepo::V0(StoreRepoV0::Dialog(_)) => { + OverlayAccess::WriteOnly(store.inner_overlay()) + } + StoreRepo::V0(StoreRepoV0::ProtectedStore(_)) + | StoreRepo::V0(StoreRepoV0::Group(_)) + | StoreRepo::V0(StoreRepoV0::PublicStore(_)) => { + OverlayAccess::ReadWrite((store.inner_overlay(), store.outer_overlay())) + } + } + } + + pub fn new_read_access_from_store(store: &Store) -> OverlayAccess { + match store.get_store_repo() { + StoreRepo::V0(StoreRepoV0::PrivateStore(_)) | StoreRepo::V0(StoreRepoV0::Dialog(_)) => { + panic!("cannot get read access to a private or dialog store"); + } + StoreRepo::V0(StoreRepoV0::ProtectedStore(_)) + | StoreRepo::V0(StoreRepoV0::Group(_)) + | StoreRepo::V0(StoreRepoV0::PublicStore(_)) => { + OverlayAccess::ReadOnly(store.outer_overlay()) + } + } + } + + pub fn new_ro(outer_overlay: OverlayId) -> Result { + if let OverlayId::Outer(_digest) = outer_overlay { + Ok(OverlayAccess::ReadOnly(outer_overlay)) + } else { + Err(NgError::InvalidArgument) + } + } + pub fn new_rw(inner_overlay: OverlayId, outer_overlay: OverlayId) -> Result { + if let OverlayId::Inner(_digest) = inner_overlay { + if let OverlayId::Outer(_digest) = outer_overlay { + Ok(OverlayAccess::ReadWrite((inner_overlay, outer_overlay))) + } else { + Err(NgError::InvalidArgument) + } + } else { + Err(NgError::InvalidArgument) + } + } + pub fn new_wo(inner_overlay: OverlayId) -> Result { + if let OverlayId::Inner(_digest) = inner_overlay { + Ok(OverlayAccess::WriteOnly(inner_overlay)) + } else { + Err(NgError::InvalidArgument) + } + } + pub fn overlay_id_for_client_protocol_purpose(&self) -> &OverlayId { + match self { + Self::ReadOnly(ro) => ro, + Self::ReadWrite((inner, _outer)) => inner, + Self::WriteOnly(wo) => wo, + } + } +} + +/// Inner Overlay Link +/// +/// Details of the inner overlay of an NgLink +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct InnerOverlayLink { + /// overlay public key ID + pub id: StoreOverlay, + + /// The store has a special branch called `overlay` that is used to manage access to the InnerOverlay + /// only the ReadCapSecret is needed to access the InnerOverlay + /// the full readcap of this branch is needed in order to subscribe to the topic and decrypt the events. The branchId can be found in the branch Definition + /// it can be useful to subscribe to this topic if the user is a member of the store's repo, so it will be notified of BranchCapRefresh on the overlay + /// if the user is an external user to the store, it will lose access to the InnerOverlay after a BranchCapRefresh of the overlay branch of the store. + pub store_overlay_readcap: ReadCap, +} + +/// Overlay Link +/// +/// Details of the overlay of an NgLink +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum OverlayLink { + Outer(Digest), + InnerLink(InnerOverlayLink), + Inner(Digest), + Inherit, + Public(PubKey), + Global, +} + +impl OverlayLink { + pub fn is_outer(&self) -> bool { + match self { + Self::Outer(_) => true, + _ => false, + } + } + pub fn outer(&self) -> &Digest { + match self { + Self::Outer(o) => o, + _ => panic!("not an outer overlay ID"), + } + } +} + +impl TryFrom for OverlayId { + type Error = NgError; + fn try_from(link: OverlayLink) -> Result { + Ok(match link { + OverlayLink::Inner(Digest::Blake3Digest32(i)) => OverlayId::Inner(i), + OverlayLink::Outer(Digest::Blake3Digest32(i)) => OverlayId::Outer(i), + OverlayLink::Global => OverlayId::Global, + _ => return Err(NgError::InvalidArgument), + }) + } +} + +impl From for OverlayLink { + fn from(id: OverlayId) -> Self { + match id { + OverlayId::Inner(i) => OverlayLink::Inner(Digest::from_slice(i)), + OverlayId::Outer(o) => OverlayLink::Outer(Digest::from_slice(o)), + OverlayId::Global => OverlayLink::Global, + } + } +} + +/// Overlay session ID +/// +/// It is a pubkey used for signing all OverlayMessage sent by the peer. +/// Each peer generates it randomly when (re)joining the overlay network. +pub type SessionId = PubKey; + +/// Client ID: client of a user +pub type ClientId = PubKey; + +/// IPv4 address +pub type IPv4 = [u8; 4]; + +const LOOPBACK_IPV4: IP = IP::IPv4([127, 0, 0, 1]); + +/// IPv6 address +pub type IPv6 = [u8; 16]; + +/// IP address +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum IP { + IPv4(IPv4), + IPv6(IPv6), +} + +impl IP { + pub fn is_public(&self) -> bool { + is_public_ip(&self.into()) + } + pub fn is_private(&self) -> bool { + is_private_ip(&self.into()) + } + pub fn is_loopback(&self) -> bool { + let t: &IpAddr = &self.into(); + t.is_loopback() + } + pub fn is_v6(&self) -> bool { + if let Self::IPv6(_) = self { + true + } else { + false + } + } + pub fn is_v4(&self) -> bool { + if let Self::IPv4(_) = self { + true + } else { + false + } + } +} + +impl fmt::Display for IP { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let t: IpAddr = self.try_into().unwrap(); + match self { + IP::IPv4(_) => write!(f, "{}", t), + IP::IPv6(_) => write!(f, "[{}]", t), + } + } +} + +impl From<&IpAddr> for IP { + #[inline] + fn from(ip: &IpAddr) -> IP { + match ip { + IpAddr::V4(v4) => IP::IPv4(v4.octets()), + IpAddr::V6(v6) => IP::IPv6(v6.octets()), + } + } +} + +impl From<&IP> for IpAddr { + #[inline] + fn from(ip: &IP) -> IpAddr { + match ip { + IP::IPv4(v4) => IpAddr::from(*v4), + IP::IPv6(v6) => IpAddr::from(*v6), + } + } +} + +/// IP transport protocol +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum TransportProtocol { + WS, + QUIC, + Local, +} + +/// IP transport address +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct IPTransportAddr { + pub ip: IP, + pub port: u16, + pub protocol: TransportProtocol, +} + +/// Network address +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum NetAddr { + IPTransport(IPTransportAddr), +} + +/** +* info : { + type : WEB | NATIVE-IOS | NATIVE-ANDROID | NATIVE-MACOS | NATIVE-LINUX | NATIVE-WIN + NATIVE-SERVICE | NODE-SERVICE | VERIFIER | CLIENT-BROKER | CLI + vendor : (UA, node version, tauri webview, rust version) + os : operating system string + version : version of client + date_install + date_updated : last update + } +*/ + +/// Client Type +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum ClientType { + Web, + NativeIos, + NativeAndroid, + NativeMacOS, + NativeLinux, + NativeWin, + NativeService, + NodeService, + Verifier, + VerifierLocal, + Box, // VerifierBox + Stick, // VerifierStick + WalletMaster, + ClientBroker, + Cli, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct ClientInfoV0 { + pub client_type: ClientType, + pub details: String, + pub version: String, + pub timestamp_install: u64, + pub timestamp_updated: u64, +} + +/// Client Info +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum ClientInfo { + V0(ClientInfoV0), +} + +impl ClientInfo { + pub fn new(client_type: ClientType, details: String, version: String) -> ClientInfo { + let timestamp_install = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_secs(); + ClientInfo::V0(ClientInfoV0 { + details, + version, + client_type, + timestamp_install, + timestamp_updated: timestamp_install, + }) + } +} + +// +// OVERLAY MESSAGES +// + +/// Overlay leave request +/// +/// In outerOverlay: informs the broker that the overlay is not needed anymore +/// In innerOverlay: Sent to all connected overlay participants to terminate a session +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum OverlayLeave { + V0(), +} + +/// Content of PublisherAdvertV0 +/// +/// the peer is matched with the InnerOverlayMessageV0.Session -> peerId. +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct PublisherAdvertContentV0 { + /// Topic public key + pub topic: TopicId, + + /// Peer public key + pub peer: DirectPeerId, +} + +/// Topic advertisement by a publisher +/// +/// Flooded to all peers in overlay +/// Creates subscription routing table entries +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct PublisherAdvertV0 { + pub content: PublisherAdvertContentV0, + + /// Signature over content by topic key + pub sig: Sig, +} + +/// Topic advertisement by a publisher +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum PublisherAdvert { + V0(PublisherAdvertV0), +} + +impl PublisherAdvert { + pub fn new( + topic_id: TopicId, + topic_key: BranchWriteCapSecret, + broker_peer: DirectPeerId, + ) -> PublisherAdvert { + let content = PublisherAdvertContentV0 { + peer: broker_peer, + topic: topic_id, + }; + let content_ser = serde_bare::to_vec(&content).unwrap(); + let sig = sign(&topic_key, &topic_id, &content_ser).unwrap(); + PublisherAdvert::V0(PublisherAdvertV0 { content, sig }) + } + pub fn topic_id(&self) -> &TopicId { + match self { + Self::V0(v0) => &v0.content.topic, + } + } + + pub fn verify(&self) -> Result<(), NgError> { + match self { + Self::V0(v0) => verify( + &serde_bare::to_vec(&v0.content).unwrap(), + v0.sig, + v0.content.topic, + ), + } + } + + pub fn verify_for_broker(&self, peer_id: &DirectPeerId) -> Result<(), ProtocolError> { + match self { + Self::V0(v0) => { + if v0.content.peer != *peer_id { + return Err(ProtocolError::InvalidPublisherAdvert); + } + } + } + Ok(self.verify()?) + } +} + +/// Topic subscription request by a peer +/// +/// Forwarded towards all publishers along subscription routing table entries +/// that are created by PublisherAdverts +/// Creates event routing table entries along the path +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct SubReqV0 { + /// Topic public key + pub topic: TopicId, + + /// For initial subscription, should be None, + /// When updating a subscription after a new publisher has joined (with a PublisherAdvert), + /// then the target publisher should be entered here. + /// The brokers will only forward the SubscriptionRequest to that publisher (on all available paths) + pub publisher: Option, +} + +/// Topic subscription request by a peer +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum SubReq { + V0(SubReqV0), +} + +/// Topic subscription marker sent by all publishers, back to subscriber +/// +/// Forwarded to all subscribers. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct SubMarkerV0 { + /// The publisher broker that marks its starting cut + /// TODO: that could be omitted, because we can retrieve it with the SessionId + pub publisher: DirectPeerId, + + /// The subscribed topic + pub topic: TopicId, + + /// The subscriber + pub subscriber: DirectPeerId, + + /// Current heads at the broker when receiving the SubReq. Can be used to safely do a CoreTopicSyncReq + pub known_heads: Vec, +} + +/// Topic subscription acknowledgement by a publisher +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum SubMarker { + V0(SubMarkerV0), +} + +/// Topic unsubscription request by a subscriber +/// +/// A broker unsubscribes from all publisher brokers in the overlay +/// when it has no more local subscribers left +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct UnsubReqV0 { + /// Topic public key + pub topic: TopicId, +} + +/// Topic unsubscription request by a subscriber +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum UnsubReq { + V0(UnsubReqV0), +} + +/// Object search in a pub/sub topic +/// +/// Sent along the reverse path of a pub/sub topic +/// from a subscriber to one publisher at a time. +/// fanout is always 1 +/// if result is none, tries another path if several paths available locally +/// answered with a stream of BlockResult +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BlockSearchTopicV0 { + /// Topic to forward the request in + pub topic: TopicId, + + /// Also search in subscribers + pub search_in_subs: bool, + + /// List of Object IDs to request + pub ids: Vec, + + /// Whether or not to include all children recursively in the response + pub include_children: bool, + + /// List of Peer IDs the request traversed so far + pub path: Vec, +} + +/// Object request by ID to publishers +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum BlockSearchTopic { + V0(BlockSearchTopicV0), +} + +/// Block search along a random walk in the overlay +/// +/// fanout is always 1 +/// if result is none, tries another path if several paths available locally +/// answered with a stream BlockResult +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BlockSearchRandomV0 { + /// List of Block IDs to request + pub ids: Vec, + + /// Whether or not to include all children recursively in the response + pub include_children: bool, + + /// Number of random nodes to forward the request to at each step + // pub fanout: u8, + // for now fanout is always 1 + + /// List of Broker Peer IDs the request traversed so far + pub path: Vec, +} + +/// Block request by ID using a random walk in the overlay +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum BlockSearchRandom { + V0(BlockSearchRandomV0), +} + +/// Response to a BlockSearch* request +/// +/// can be a stream +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BlockResultV0 { + /// Resulting Blocks(s) + pub payload: Vec, +} + +/// Response to a BlockSearch* request +/// +/// can be a stream +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum BlockResult { + V0(BlockResultV0), +} + +/// Topic synchronization request +/// +/// In response a stream of `TopicSyncRes`s containing the missing Commits or events are sent +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct TopicSyncReqV0 { + /// Topic public key + pub topic: TopicId, + + /// Fully synchronized until these commits + pub known_heads: Vec, + + /// Stop synchronizing when these commits are met. + /// if empty, the local HEAD at the responder is used instead + pub target_heads: Vec, + + /// optional Bloom filter of all the commit IDs present locally (used in case of detected fork) + pub known_commits: Option, + + #[serde(skip)] + pub overlay: Option, +} + +/// Topic synchronization request +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum TopicSyncReq { + V0(TopicSyncReqV0), +} + +impl TopicSyncReq { + pub fn overlay(&self) -> &OverlayId { + match self { + Self::V0(v0) => v0.overlay.as_ref().unwrap(), + } + } + pub fn set_overlay(&mut self, overlay: OverlayId) { + match self { + Self::V0(v0) => v0.overlay = Some(overlay), + } + } + pub fn topic(&self) -> &TopicId { + match self { + TopicSyncReq::V0(o) => &o.topic, + } + } + pub fn known_heads(&self) -> &Vec { + match self { + TopicSyncReq::V0(o) => &o.known_heads, + } + } + pub fn target_heads(&self) -> &Vec { + match self { + TopicSyncReq::V0(o) => &o.target_heads, + } + } + pub fn known_commits(&self) -> &Option { + match self { + TopicSyncReq::V0(o) => &o.known_commits, + } + } +} + +/// Status of a Forwarded Peer, sent in the Advert +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum PeerStatus { + Connected, + Disconnected, +} + +/// ForwardedPeerAdvertV0 +/// +/// peer_advert.forwarded_by is matched with sessionid->peerid +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ForwardedPeerAdvertV0 { + /// PeerAdvert received from Client + // TODO: this could be obfuscated when user doesnt want to recall events. + pub peer_advert: PeerAdvertV0, + + /// Hashed user Id, used to prevent concurrent connection from different brokers + /// BLAKE3 keyed hash over the UserId + /// - key: BLAKE3 derive_key ("NextGraph UserId Hash Overlay Id ForwardedPeerAdvertV0 BLAKE3 key", overlayId) // will always be an Inner overlay + pub user_hash: Digest, + + /// whether the Advert is about connection or disconnection + pub status: PeerStatus, +} + +/// Forwarded Peer advertisement +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ForwardedPeerAdvert { + V0(ForwardedPeerAdvertV0), +} + +/// ForwardedPeerConflictV0 +/// +/// peer_advert.forwarded_by is matched with sessionid->peerid +/// When the forwarding broker receives the conflict (or notices it), it sends a notification +/// In order to avoid conflicts, the highest version of PeerAdvert should win, when the Forwarding Broker is different. +/// Disconnect wins over connect, for the exact same peer, version and forwarding broker. +/// Conflict can occur when same user_hash, on 2 different Forwarding Broker +/// Or when same peerId appears on 2 different Forwarding Broker. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ForwardedPeerConflictV0 { + /// First conflicting PeerAdvert + pub advert_1: ForwardedPeerAdvertV0, + /// Second conflicting PeerAdvert + pub advert_2: ForwardedPeerAdvertV0, + + pub error_code: u16, +} + +/// Forwarded Peer advertisement conflict +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ForwardedPeerConflict { + V0(ForwardedPeerConflictV0), +} + +/// Content of PeerAdvertV0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct PeerAdvertContentV0 { + /// Peer ID + pub peer: PeerId, + + /// Id of the broker that is forwarding the peer + pub forwarded_by: Option, + + /// Topic subscriptions + // pub subs: BloomFilter128, + + /// Network addresses, must be empty for forwarded peers + pub address: Vec, + + /// Version number + pub version: u32, + + /// App-specific metadata (profile, cryptographic material, etc) + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +/// Peer advertisement +/// +/// Sent when a peer joins an inner overlay. +/// Used only for forwardedPeer for now. +/// In the future, Core brokers could exchange PeerAdvert on the global overlay, and also do some PeerSearch to search for IPs/newer version of PeerAdvert +/// When the forwarding broker receives a client connection, it checks that the peer isn't +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct PeerAdvertV0 { + /// Peer advertisement content + pub content: PeerAdvertContentV0, + + /// Signature over content by peer's private key + pub sig: Sig, +} + +/// Peer advertisement +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum PeerAdvert { + V0(PeerAdvertV0), +} + +impl PeerAdvert { + pub fn version(&self) -> u32 { + match self { + PeerAdvert::V0(o) => o.content.version, + } + } + pub fn peer(&self) -> &PeerId { + match self { + PeerAdvert::V0(o) => &o.content.peer, + } + } +} + +/// Content of InnerOverlayMessageV0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum InnerOverlayMessageContentV0 { + OverlayLeave(OverlayLeave), + ForwardedPeerAdvert(ForwardedPeerAdvert), + ForwardedPeerConflict(ForwardedPeerConflict), + PublisherJoined(PublisherAdvert), + PublisherLeft(PublisherAdvert), + SubReq(SubReq), + SubMarker(SubMarker), + UnsubReq(UnsubReq), + Event(Event), + //InboxPostRequest(InboxPostRequest), + //InboxPostResponse(InboxPostResponse), +} + +/// Inner Overlay message payload V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct InnerOverlayMessagePayloadV0 { + /// Sequence number incremented by peer when sending every overlay message in a session + /// Used to prevent replay attacks inside the overlay + pub seq: u64, + + pub content: InnerOverlayMessageContentV0, +} + +/// Inner Overlay message V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct InnerOverlayMessageV0 { + /// Session ID + pub session: SessionId, + + pub payload: InnerOverlayMessagePayloadV0, + + /// Signature with Session private key, over payload + pub sig: Sig, + + /// Optional padding + #[serde(with = "serde_bytes")] + pub padding: Vec, +} + +/// Inner Overlay message +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum InnerOverlayMessage { + V0(InnerOverlayMessageV0), +} + +/// Overlay Advert Payload V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct OverlayAdvertPayloadV0 { + /// the target Overlay ID (cannot be an Outer overlay) + pub overlay: OverlayId, + + /// the newly generated session ID the peer will use in this overlay + /// All the revoked sessionIDs are kept locally by their initiator. + pub session: SessionId, + + /// Current sequence number. For a new session, must be zero. + pub seq: u64, + + /// list of publisher currently registered on the peer. + /// flooded in overlay (with this overlayAdvert) when first joining an overlay, so that subscription routing tables can be updated + /// or sent in an OverlayAdvertMarker, to a specific new peer that just joined the overlay (in the point of view of the emitter) + /// it can be left empty when a CoreBrokerConnect is made on a broker that is known to be already part of the overlay. + pub publishers: Vec, + + /// the previous session ID the peer was using in this overlay. Used to cleanup seq counters maintained in each other peer + /// if the previous session is empty (because it is the first time broker joins this overlay) + /// or if a remote peer doesn't find this session kept locally, it is not an error. + /// In the later case (if broker missed some intermediary sessions), the remote peer can ask the initiator peer if the last known + /// session can be locally revoked with a ConfirmRevokedSession message (answered with yes or no) + pub previous_session: Option, + + /// peer ID of the broker issuing this Advert + pub peer: DirectPeerId, +} + +/// Overlay Advert V0 : used by a broker peer every time it (re)joins an overlay +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct OverlayAdvertV0 { + pub payload: OverlayAdvertPayloadV0, + + /// Signature with peerId private key, over payload + pub sig: Sig, +} + +/// Overlay Advert : used by a broker peer every time it (re)joins an overlay +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum OverlayAdvert { + V0(OverlayAdvertV0), +} + +/// CoreBrokerJoinedAdvert V0 +/// +/// Each broker that is already part of an overlay, when receiving the CoreBrokerJoinedAdvert, should answer with one direct message +/// to the joining peer (found in OverlayAdvertPayloadV0.peer) for each overlay, containing an OverlayAdvertMarker containing their current sequence number. +/// This is sent for each path (in case multiple paths arrive to the same broker). Only the first sequence number received by joining peer is kept locally +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreBrokerJoinedAdvertV0 { + /// list of overlays joined by an initiator broker, and that the forwarding broker has also previously joined + /// the forwarding broker keeps the ingress edge and all egress edges in the coreRoutingTable + pub overlays: Vec, +} + +/// CoreBrokerLeftAdvert V0 +/// +/// A broker has disconnected from another broker, and the routes need to be updated +/// this is not used to leave one specific overlay. see OverlayLeave message for that purpose +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreBrokerLeftAdvertV0 { + /// The broker that disconnected from the one that is emitting this advert. + pub disconnected: DirectPeerId, +} + +/// CoreOverlayJoinedAdvert V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreOverlayJoinedAdvertV0 { + /// One additional overlay joined by an initiator broker, and that the forwarding broker has also previously joined + /// the forwarding broker keeps the ingress edge and all egress edges in the coreRoutingTable + pub overlay: OverlayAdvertV0, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreBrokerJoinedAdvert { + V0(CoreBrokerJoinedAdvertV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreBrokerLeftAdvert { + V0(CoreBrokerLeftAdvertV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreOverlayJoinedAdvert { + V0(CoreOverlayJoinedAdvertV0), +} + +/// Content of CoreAdvert V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreAdvertContentV0 { + BrokerJoined(CoreBrokerJoinedAdvert), + BrokerLeft(CoreBrokerLeftAdvert), + OverlayJoined(CoreOverlayJoinedAdvert), +} + +/// CoreAdvert V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreAdvertV0 { + pub content: CoreAdvertContentV0, + + /// list of brokers on the path that was followed to deliver this advert. + /// new entry pushed each time a forward is happening in the core network + pub path: Vec, + + /// content signed by the first broker in the path + pub sig: Sig, + + /// Optional padding + #[serde(with = "serde_bytes")] + pub padding: Vec, +} + +/// OverlayAdvertMarker V0 +/// +/// when receiving a marker, the broker saves the ingress edge and the corresponding remote peer and +/// overlay that can be reached (the OverlayAdvertPayloadV0.peer and .overlay) in the CoreRoutingTable +/// It also saves the sessionId and seq number +/// then a ReturnPathTimingAdvert is sent back +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct OverlayAdvertMarkerV0 { + pub marker: OverlayAdvertV0, + + /// New SessionId that triggered this marker (to avoid replay attacks in the core network) + pub in_reply_to: SessionId, + + /// path from the new broker who started a session, to the broker that is sending the marker + pub path: Vec, + + /// randomly generated nonce used for the reply (a ReturnPathTimingMarker) that will be sent back after this marker has been received on the other end + pub reply_nonce: u64, +} + +/// Core Block Get V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreBlocksGetV0 { + /// Block ID to request + pub ids: Vec, + + /// Whether or not to include all children recursively + pub include_children: bool, + + /// randomly generated number by requester, used for sending reply. + /// the requester keeps track of req_nonce and requested peerid. + /// used for handling the stream + pub req_nonce: u64, +} + +/// Core Block Result V0 +/// +/// can be a stream +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreBlockResultV0 { + /// Resulting Object(s) + pub payload: Vec, + + /// randomly generated number by requester, as received in the request + pub req_nonce: u64, +} + +/// ReturnPathTimingAdvertV0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ReturnPathTimingAdvertV0 { + /// Signature over nonce, by sessionId + pub sig: Sig, + + /// randomly generated number as received in the OverlayAdvertMarker + pub nonce: u64, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum OverlayAdvertMarker { + V0(OverlayAdvertMarkerV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ReturnPathTimingAdvert { + V0(ReturnPathTimingAdvertV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreBlocksGet { + V0(CoreBlocksGetV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreBlockResult { + V0(CoreBlockResultV0), +} + +/// Content of CoreDirectMessage V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreDirectMessageContentV0 { + OverlayAdvertMarker(OverlayAdvertMarker), + ReturnPathTimingAdvert(ReturnPathTimingAdvert), + BlocksGet(CoreBlocksGet), + BlockResult(CoreBlockResult), + //InboxPost, + //PartialSignature, + //ClientDirectMessage //for messages between forwarded or direct peers +} + +/// CoreDirectMessage V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreDirectMessageV0 { + pub content: CoreDirectMessageContentV0, + + /// list of brokers on the path that must be followed to deliver this message, next hop is at the bottom of the list. + /// last entry on the list is popped each time a broker is forwarding upstream + /// when list size is zero, the final destination is reached. + /// if only one peer in list, and peer not found in local CoreRoutingTable, use the best route to reach it (without popping) + pub reverse_path: Vec, + + /// The sender + pub from: DirectPeerId, + + /// content signed by the sender + pub sig: Sig, + + /// Optional padding + #[serde(with = "serde_bytes")] + pub padding: Vec, +} + +/// CoreBrokerConnect V0 +/// +/// replied with CoreBrokerConnectResponse +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreBrokerConnectV0 { + pub inner_overlays: Vec, + pub outer_overlays: Vec, +} + +/// CoreBrokerConnect +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreBrokerConnect { + V0(CoreBrokerConnectV0), +} + +/// CoreBrokerConnectResponse +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreBrokerConnectResponse { + V0(CoreBrokerConnectResponseV0), +} + +impl CoreBrokerConnect { + pub fn core_message(&self, id: i64) -> CoreMessage { + match self { + CoreBrokerConnect::V0(v0) => { + CoreMessage::V0(CoreMessageV0::Request(CoreRequest::V0(CoreRequestV0 { + padding: vec![], + id, + content: CoreRequestContentV0::BrokerConnect(CoreBrokerConnect::V0(v0.clone())), + }))) + } + } + } +} + +/// sent to a direct peer just before closing the connection +pub type CoreBrokerDisconnectV0 = (); + +/// Content of CoreOverlayJoin V0 +/// +/// replied with an emptyResponse, and an error code if OverlayId not present on remote broker +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreOverlayJoinV0 { + Inner(OverlayAdvert), + Outer(Digest), +} + +/// Content of OuterOverlayResponse V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum OuterOverlayResponseContentV0 { + EmptyResponse(()), + Block(Block), + TopicSyncRes(TopicSyncRes), + //InboxPostResponse(InboxPostResponse), +} + +/// Content of OuterOverlayRequest V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum OuterOverlayRequestContentV0 { + TopicSyncReq(TopicSyncReq), + OverlayLeave(OverlayLeave), + TopicSub(PubKey), + TopicUnsub(PubKey), + BlocksGet(BlocksGet), + //InboxPostRequest(InboxPostRequest), +} + +/// OuterOverlayRequestV0 V0 +/// +/// replied with OuterOverlayResponseV0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct OuterOverlayRequestV0 { + pub overlay: Digest, + pub content: OuterOverlayRequestContentV0, +} + +/// OuterOverlayResponse V0 +/// +/// reply to an OuterOverlayRequest V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct OuterOverlayResponseV0 { + pub overlay: Digest, + pub content: OuterOverlayResponseContentV0, +} + +/// Core Topic synchronization request +/// +/// behaves like BlockSearchTopic (primarily searches among the publishers, except if search_in_subs is set to true) +/// fanout is 1 for now +/// +/// If some target_heads are not found locally, all successors of known_heads are sent anyway, +/// and then this temporary HEAD is used to propagate/fanout the CoreTopicSyncReq to upstream brokers +/// +/// Answered with one or many TopicSyncRes a stream of `Block`s or Event of the commits +/// If the responder has an Event for the commit(s) in its HEAD, it will send the event instead of the plain commit's blocks. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreTopicSyncReqV0 { + /// Topic public key + pub topic: TopicId, + + /// Also search in subscribers, in addition to publishers + pub search_in_subs: bool, + + /// Fully synchronized until these commits + pub known_heads: Vec, + + /// Stop synchronizing when these commits are met. + /// if empty, the local HEAD at the responder is used instead + pub target_heads: Vec, +} + +/// Topic synchronization request +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreTopicSyncReq { + V0(CoreTopicSyncReqV0), +} + +/// Topic synchronization response V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum TopicSyncResV0 { + Event(Event), + Block(Block), +} + +/// Topic synchronization response +/// +/// it is a stream of blocks and or events. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum TopicSyncRes { + V0(TopicSyncResV0), +} + +impl TopicSyncRes { + pub fn event(&self) -> &Event { + match self { + Self::V0(TopicSyncResV0::Event(e)) => e, + _ => panic!("this TopicSyncResV0 is not an event"), + } + } +} + +impl fmt::Display for TopicSyncRes { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => match v0 { + TopicSyncResV0::Event(e) => writeln!(f, "====== Event ====== {e}"), + TopicSyncResV0::Block(b) => writeln!(f, "====== Block ID ====== {}", b.id()), + }, + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreBrokerDisconnect { + V0(CoreBrokerDisconnectV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreOverlayJoin { + V0(CoreOverlayJoinV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum OuterOverlayRequest { + V0(OuterOverlayRequestV0), +} + +/// Content of CoreRequest V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreRequestContentV0 { + BrokerConnect(CoreBrokerConnect), + BrokerDisconnect(CoreBrokerDisconnect), + OverlayJoin(CoreOverlayJoin), + BlockSearchTopic(BlockSearchTopic), + BlockSearchRandom(BlockSearchRandom), + TopicSyncReq(CoreTopicSyncReq), + OuterOverlayRequest(OuterOverlayRequest), +} + +/// CoreRequest V0 +/// +/// replied with CoreResponse V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreRequestV0 { + /// Request ID + pub id: i64, + pub content: CoreRequestContentV0, + + /// Optional padding + #[serde(with = "serde_bytes")] + pub padding: Vec, +} + +/// Request sent to a broker in the core network +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreRequest { + V0(CoreRequestV0), +} + +/// CoreBrokerConnectResponse V0 +/// +/// reply to a CoreBrokerConnect V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreBrokerConnectResponseV0 { + pub successes: Vec, + pub errors: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum OuterOverlayResponse { + V0(OuterOverlayResponseV0), +} + +/// Content CoreResponse V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreResponseContentV0 { + BrokerConnectResponse(CoreBrokerConnectResponse), + BlockResult(BlockResult), + TopicSyncRes(TopicSyncRes), + OuterOverlayResponse(OuterOverlayResponse), + EmptyResponse(()), +} + +/// CoreResponse V0 +/// +/// reply to a CoreRequest V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CoreResponseV0 { + /// Request ID + pub id: i64, + + /// Result + pub result: u16, + pub content: CoreResponseContentV0, + + /// Optional padding + #[serde(with = "serde_bytes")] + pub padding: Vec, +} + +/// Response to a Request sent to a broker in the core network +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreResponse { + V0(CoreResponseV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum OuterOverlayMessageContentV0 { + Event(Event), +} + +/// OuterOverlayMessage V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct OuterOverlayMessageV0 { + pub overlay: Digest, + + pub content: OuterOverlayMessageContentV0, + + /// Optional padding + #[serde(with = "serde_bytes")] + pub padding: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreAdvert { + V0(CoreAdvertV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreDirectMessage { + V0(CoreDirectMessageV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum OuterOverlayMessage { + V0(OuterOverlayMessageV0), +} + +/// CoreMessageV0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreMessageV0 { + Request(CoreRequest), + Response(CoreResponse), + Advert(CoreAdvert), + Direct(CoreDirectMessage), + InnerOverlay(InnerOverlayMessage), + OuterOverlay(OuterOverlayMessage), +} + +/// Core message +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CoreMessage { + V0(CoreMessageV0), +} + +/// AppMessageContentV0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppMessageContentV0 { + Request(AppRequest), + Response(AppResponse), + SessionStop(AppSessionStop), + SessionStart(AppSessionStart), + EmptyResponse, +} + +/// AppMessageV0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppMessageV0 { + pub content: AppMessageContentV0, + + pub id: i64, + + pub result: u16, +} + +/// App message +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppMessage { + V0(AppMessageV0), +} + +impl IStreamable for AppMessage { + fn result(&self) -> u16 { + match self { + AppMessage::V0(v0) => v0.result, + } + } + fn set_result(&mut self, result: u16) { + match self { + AppMessage::V0(v0) => v0.result = result, + } + } +} + +impl AppMessage { + pub fn get_actor(&self) -> Box { + match self { + AppMessage::V0(AppMessageV0 { content: o, id, .. }) => match o { + AppMessageContentV0::Request(req) => req.get_actor(*id), + AppMessageContentV0::SessionStop(req) => req.get_actor(*id), + AppMessageContentV0::SessionStart(req) => req.get_actor(*id), + AppMessageContentV0::Response(_) | AppMessageContentV0::EmptyResponse => { + panic!("it is not a request"); + } + }, + } + } + pub fn id(&self) -> Option { + match self { + AppMessage::V0(v0) => Some(v0.id), + } + } + pub fn set_id(&mut self, id: i64) { + match self { + AppMessage::V0(r) => r.id = id, + } + } +} + +impl From for ProtocolMessage { + fn from(msg: AppMessage) -> ProtocolMessage { + ProtocolMessage::AppMessage(msg) + } +} + +// +// ADMIN PROTOCOL +// + +/// Content of `AdminRequestV0` +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AdminRequestContentV0 { + AddUser(AddUser), + DelUser(DelUser), + ListUsers(ListUsers), + ListInvitations(ListInvitations), + AddInvitation(AddInvitation), + #[doc(hidden)] + CreateUser(CreateUser), +} +impl AdminRequestContentV0 { + pub fn type_id(&self) -> TypeId { + match self { + Self::AddUser(a) => a.type_id(), + Self::DelUser(a) => a.type_id(), + Self::ListUsers(a) => a.type_id(), + Self::ListInvitations(a) => a.type_id(), + Self::AddInvitation(a) => a.type_id(), + Self::CreateUser(a) => a.type_id(), + } + } + pub fn get_actor(&self) -> Box { + match self { + Self::AddUser(a) => a.get_actor(), + Self::DelUser(a) => a.get_actor(), + Self::ListUsers(a) => a.get_actor(), + Self::ListInvitations(a) => a.get_actor(), + Self::AddInvitation(a) => a.get_actor(), + Self::CreateUser(a) => a.get_actor(), + } + } +} + +/// Admin request +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AdminRequestV0 { + /// Request ID + pub id: i64, + + /// Request content + pub content: AdminRequestContentV0, + + /// Signature over content by admin key + pub sig: Sig, + + /// THe admin user requesting this operation + pub admin_user: PubKey, + + /// Optional padding + #[serde(with = "serde_bytes")] + pub padding: Vec, +} + +impl AdminRequestV0 { + pub fn get_actor(&self) -> Box { + self.content.get_actor() + } +} + +/// Admin request +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AdminRequest { + V0(AdminRequestV0), +} + +impl AdminRequest { + pub fn id(&self) -> i64 { + match self { + Self::V0(o) => o.id, + } + } + pub fn set_id(&mut self, id: i64) { + match self { + Self::V0(v0) => { + v0.id = id; + } + } + } + pub fn type_id(&self) -> TypeId { + match self { + Self::V0(o) => o.content.type_id(), + } + } + pub fn sig(&self) -> Sig { + match self { + Self::V0(o) => o.sig, + } + } + pub fn admin_user(&self) -> PubKey { + match self { + Self::V0(o) => o.admin_user, + } + } + pub fn get_actor(&self) -> Box { + match self { + Self::V0(a) => a.get_actor(), + } + } +} + +impl From for ProtocolMessage { + fn from(msg: AdminRequest) -> ProtocolMessage { + ProtocolMessage::Start(StartProtocol::Admin(msg)) + } +} + +/// Content of `AdminResponseV0` +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AdminResponseContentV0 { + EmptyResponse, + Users(Vec), + Invitations(Vec<(InvitationCode, u32, Option)>), + Invitation(Invitation), + UserId(UserId), +} + +/// Response to an `AdminRequest` V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AdminResponseV0 { + /// Request ID + pub id: i64, + + /// Result (including but not limited to Result) + pub result: u16, + + pub content: AdminResponseContentV0, + + /// Optional padding + #[serde(with = "serde_bytes")] + pub padding: Vec, +} + +/// Response to an `AdminRequest` +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AdminResponse { + V0(AdminResponseV0), +} + +impl From> for AdminResponseV0 { + fn from(res: Result<(), ProtocolError>) -> AdminResponseV0 { + AdminResponseV0 { + id: 0, + result: res.map(|_| 0).unwrap_or_else(|e| e.into()), + content: AdminResponseContentV0::EmptyResponse, + padding: vec![], + } + } +} + +impl From> for AdminResponseV0 { + fn from(res: Result) -> AdminResponseV0 { + match res { + Err(e) => AdminResponseV0 { + id: 0, + result: e.into(), + content: AdminResponseContentV0::EmptyResponse, + padding: vec![], + }, + Ok(id) => AdminResponseV0 { + id: 0, + result: 0, + content: AdminResponseContentV0::UserId(id), + padding: vec![], + }, + } + } +} + +impl From, ProtocolError>> for AdminResponseV0 { + fn from(res: Result, ProtocolError>) -> AdminResponseV0 { + match res { + Err(e) => AdminResponseV0 { + id: 0, + result: e.into(), + content: AdminResponseContentV0::EmptyResponse, + padding: vec![], + }, + Ok(vec) => AdminResponseV0 { + id: 0, + result: 0, + content: AdminResponseContentV0::Users(vec), + padding: vec![], + }, + } + } +} + +impl From for ProtocolMessage { + fn from(msg: AdminResponseV0) -> ProtocolMessage { + ProtocolMessage::AdminResponse(AdminResponse::V0(msg)) + } +} + +impl From for ProtocolMessage { + fn from(msg: AdminResponse) -> ProtocolMessage { + ProtocolMessage::AdminResponse(msg) + } +} + +impl TryFrom for AdminResponse { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::AdminResponse(res) = msg { + Ok(res) + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl AdminResponse { + pub fn id(&self) -> i64 { + match self { + Self::V0(o) => o.id, + } + } + pub fn set_id(&mut self, id: i64) { + match self { + Self::V0(v0) => { + v0.id = id; + } + } + } + pub fn result(&self) -> u16 { + match self { + Self::V0(o) => o.result, + } + } + pub fn content_v0(&self) -> AdminResponseContentV0 { + match self { + Self::V0(o) => o.content.clone(), + } + } +} + +// +// CLIENT PROTOCOL +// + +/// Request to open a repo in a non-durable way (without pinning it). +/// +/// When client will disconnect, the subscriptions and publisherAdvert of the topics will be removed, +/// except if a PinRepo occurred before or after the OpenRepo +/// replied with a RepoOpened +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct OpenRepoV0 { + /// Repo Hash + pub hash: RepoHash, + + // for RW overlay, the overlay that should be used in the clientmessage is the innerOverlay + pub overlay: OverlayAccess, + + /// Broker peers to connect to in order to join the overlay + /// can be empty for private store (the broker will not connect to any other broker) + /// but if the private repo is pinned in other brokers, those brokers should be entered here for syncing. + /// can be empty also when we just created the repo, and there are no other brokers in the overlay + pub peers: Vec, + + /// a list of core brokers that are allowed to connect to the overlay (only valid for an inner (RW/WO) overlay). + /// an empty list means any core broker is allowed. this is the default behaviour. + /// to restrict the overlay to only the current core, its DirectPeerId should be entered here. + // pub allowed_peers: Vec, + + /// Maximum number of peers to connect to for this overlay (only valid for an inner (RW/WO) overlay) + /// 0 means automatic/unlimited + pub max_peer_count: u16, + + /// list of topics that should be subscribed to + pub ro_topics: Vec, + + /// list of topics for which we will be a publisher + /// only possible with inner (RW or WO) overlays. + /// implies also subscribing to it (no need to put it also in ro_topics) + pub rw_topics: Vec, +} + +/// Request to open a repo +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum OpenRepo { + V0(OpenRepoV0), +} + +impl OpenRepo { + pub fn peers(&self) -> &Vec { + match self { + OpenRepo::V0(o) => &o.peers, + } + } +} + +/// Request to pin a repo on the broker. +/// +/// When client will disconnect, the subscriptions and publisherAdvert of the topics will remain active on the broker. +/// replied with a RepoOpened +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct PinRepoV0 { + /// Repo Hash + pub hash: RepoHash, + + /// for RW overlay, the overlay that should be used in the clientmessage is the innerOverlay + pub overlay: OverlayAccess, + + /// Root topic of the overlay, used to listen to overlay refreshes. Only used for inner (RW or WO) overlays + pub overlay_root_topic: Option, + + /// only possible for RW overlays. not allowed for private or dialog overlay + pub expose_outer: bool, + + /// Broker peers to connect to in order to join the overlay + /// If the repo has previously been opened (during the same session) or if it is a private overlay, then peers info can be omitted. + /// If there are no known peers in the overlay yet, vector is left empty (creation of a store, or repo in a store that is owned by user). + pub peers: Vec, + + /// Maximum number of peers to connect to for this overlay (only valid for an inner (RW/WO) overlay) + pub max_peer_count: u16, + + // /// a list of core brokers that are allowed to connect to the overlay (only valid for an inner (RW/WO) overlay). + // /// an empty list means any core broker is allowed. this is the default behaviour. + // /// to restrict the overlay to only the current core, its DirectPeerId should be entered here. + // /// not compatible with expose_outer + // this is probably going to be a config in the server itself. + // pub allowed_peers: Vec, + /// list of topics that should be subscribed to + /// If the repo has previously been opened (during the same session) then ro_topics info can be omitted + pub ro_topics: Vec, + + /// list of topics for which we will be a publisher + /// only possible with inner (RW or WO) overlays. + /// If the repo has previously been opened (during the same session) then rw_topics info can be omitted + pub rw_topics: Vec, + // TODO pub inbox_proof + // TODO pub signer_proof +} + +/// Request to pin a repo +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum PinRepo { + V0(PinRepoV0), +} + +impl PinRepo { + pub fn peers(&self) -> &Vec { + match self { + PinRepo::V0(o) => &o.peers, + } + } + pub fn hash(&self) -> &RepoHash { + match self { + PinRepo::V0(o) => &o.hash, + } + } + pub fn ro_topics(&self) -> &Vec { + match self { + PinRepo::V0(o) => &o.ro_topics, + } + } + pub fn rw_topics(&self) -> &Vec { + match self { + PinRepo::V0(o) => &o.rw_topics, + } + } + pub fn overlay(&self) -> &OverlayId { + match self { + PinRepo::V0(o) => &o.overlay.overlay_id_for_client_protocol_purpose(), + } + } + pub fn overlay_access(&self) -> &OverlayAccess { + match self { + PinRepo::V0(o) => &o.overlay, + } + } + + pub fn overlay_root_topic(&self) -> &Option { + match self { + PinRepo::V0(o) => &o.overlay_root_topic, + } + } + + pub fn expose_outer(&self) -> bool { + match self { + PinRepo::V0(o) => o.expose_outer, + } + } +} + +/// Request to refresh the Pinning of a previously pinned repo. +/// +/// it can consist of updating the expose_outer, the list of ro_topics and/or rw_topics, +/// and in case of a ban_member, the broker will effectively flush the topics locally after all local members except the banned one, have refreshed +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct RefreshPinRepoV0 { + /// The new PinRepo info + pub pin: PinRepo, + + /// optional hashed member ID that should be banned + pub ban_member: Option, + + /// when banning, list of topics that are to be flushed (once all the local members have left, except the one to be banned) + /// All the honest local members have to send this list in order for the banned one to be effectively banned + /// for each Topic, a signature over the hashed UserId to ban, by the Topic private key. + /// The banning process on the broker is meant to flush topics that would remain dangling if the malicious member would not unpin them after being removed from members of repo. + /// The userId of banned user is revealed to the local broker where it was attached, which is a breach of privacy deemed acceptable + /// as only a broker that already knew the userid will enforce it, and + /// that broker might be interested to know that the offending user was banned from a repo, as only malicious users are banned. + /// The broker might also discard this information, and just proceed with the flush without much ado. + /// Of course, if the broker is controlled by the malicious user, it might not proceed with the ban/flush. But who cares. That broker will keep old data forever, but it is a malicious broker anyway. + pub flush_topics: Vec<(TopicId, Sig)>, +} + +/// Request to pin a repo +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum RefreshPinRepo { + V0(RefreshPinRepoV0), +} + +/// Request to unpin a repo on the broker. +/// +/// When client will disconnect, the subscriptions and publisherAdvert of the topics will be removed on the broker +/// (for that user only. other users might continue to have the repo pinned) + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct UnpinRepoV0 { + /// Repo Hash + pub hash: RepoHash, +} + +/// Request to unpin a repo +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum UnpinRepo { + V0(UnpinRepoV0), +} + +impl UnpinRepo { + pub fn hash(&self) -> &RepoHash { + match self { + UnpinRepo::V0(o) => &o.hash, + } + } +} + +/// Request the status of pinning for a repo on the broker. V0 +/// +/// returns an error code if not pinned, otherwise returns a RepoPinStatusV0 +/// the overlay entered in ClientMessage is important. if it is the outer, only outer pinning will be checked. +/// if it is the inner overlay, only the inner pinning will be checked. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct RepoPinStatusReqV0 { + /// Repo Hash + pub hash: RepoHash, + + #[serde(skip)] + pub overlay: Option, +} + +/// Request the status of pinning for a repo on the broker. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum RepoPinStatusReq { + V0(RepoPinStatusReqV0), +} + +impl RepoPinStatusReq { + pub fn hash(&self) -> &RepoHash { + match self { + RepoPinStatusReq::V0(o) => &o.hash, + } + } + pub fn set_overlay(&mut self, overlay: OverlayId) { + match self { + Self::V0(v0) => v0.overlay = Some(overlay), + } + } + + pub fn overlay(&self) -> &OverlayId { + match self { + Self::V0(v0) => v0.overlay.as_ref().unwrap(), + } + } +} + +/// Response with the status of pinning for a repo on the broker. V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct RepoPinStatusV0 { + /// Repo Hash + pub hash: RepoHash, + + /// only possible for RW overlays + pub expose_outer: bool, + + /// list of topics that are subscribed to + pub topics: Vec, + // TODO pub inbox_proof + + // TODO pub signer_proof +} + +/// Response with the status of pinning for a repo on the broker. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum RepoPinStatus { + V0(RepoPinStatusV0), +} + +impl RepoPinStatus { + pub fn hash(&self) -> &RepoHash { + match self { + RepoPinStatus::V0(o) => &o.hash, + } + } + pub fn is_topic_subscribed_as_publisher(&self, topic: &TopicId) -> bool { + match self { + Self::V0(v0) => { + for sub in &v0.topics { + if sub.topic_id() == topic { + return sub.is_publisher(); + } + } + false + } + } + } + pub fn topics(&self) -> &Vec { + match self { + Self::V0(v0) => &v0.topics, + } + } +} + +/// Request subscription to a `Topic` of an already opened or pinned Repo +/// +/// replied with a TopicSubRes containing the current heads that should be used to do a TopicSync +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct TopicSubV0 { + /// Topic to subscribe + pub topic: TopicId, + + /// Hash of the repo that was previously opened or pinned + pub repo_hash: RepoHash, + + /// Publisher need to provide a signed `PublisherAdvert` for the PeerId of the broker + pub publisher: Option, + + #[serde(skip)] + pub overlay: Option, +} + +/// Request subscription to a `Topic` of an already opened or pinned Repo +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum TopicSub { + V0(TopicSubV0), +} + +impl TopicSub { + pub fn hash(&self) -> &RepoHash { + match self { + Self::V0(o) => &o.repo_hash, + } + } + pub fn topic(&self) -> &TopicId { + match self { + Self::V0(o) => &o.topic, + } + } + pub fn publisher(&self) -> Option<&PublisherAdvert> { + match self { + Self::V0(o) => o.publisher.as_ref(), + } + } + pub fn set_overlay(&mut self, overlay: OverlayId) { + match self { + Self::V0(v0) => v0.overlay = Some(overlay), + } + } + pub fn overlay(&self) -> &OverlayId { + match self { + Self::V0(v0) => v0.overlay.as_ref().unwrap(), + } + } +} + +/// Request unsubscription from a `Topic` of an already opened or pinned Repo +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct TopicUnsubV0 { + /// Topic to unsubscribe + pub topic: PubKey, + + /// Hash of the repo that was previously opened or pinned + pub repo_hash: RepoHash, +} + +/// Request unsubscription from a `Topic` of an already opened or pinned Repo +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum TopicUnsub { + V0(TopicUnsubV0), +} + +/// Request a Block by ID +/// +/// commit_header_key is always set to None in the reply when request is made on OuterOverlay of protected or Group overlays +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BlocksGetV0 { + /// Block IDs to request + pub ids: Vec, + + /// Whether or not to include all children recursively + pub include_children: bool, + + /// Topic the object is referenced from, if it is known by the requester. + /// can be used to do a BlockSearchTopic in the core overlay. + pub topic: Option, + + #[serde(skip)] + pub overlay: Option, +} + +/// Request an object by ID +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum BlocksGet { + V0(BlocksGetV0), +} + +impl BlocksGet { + pub fn ids(&self) -> &Vec { + match self { + BlocksGet::V0(o) => &o.ids, + } + } + pub fn include_children(&self) -> bool { + match self { + BlocksGet::V0(o) => o.include_children, + } + } + pub fn topic(&self) -> Option { + match self { + BlocksGet::V0(o) => o.topic, + } + } +} + +/// Request a Commit by ID +/// +/// commit_header_key is always set to None in the reply when request is made on OuterOverlay of protected or Group overlays +/// The difference with BlocksGet is that the Broker will try to return all the commit blocks as they were sent in the Pub/Sub Event, if it has it. +/// This will help in having all the blocks (including the header and body blocks), while a BlocksGet would inevitably return only the blocks of the ObjectContent, +/// and not the header nor the body. And the load() would fail with CommitLoadError::MissingBlocks. That's what happens when the Commit is not present in the pubsub, +/// and we need to default to using BlocksGet instead. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CommitGetV0 { + /// Commit ID to request + pub id: ObjectId, + + /// Topic the commit is referenced from, if it is known by the requester. + /// can be used to do a BlockSearchTopic in the core overlay. + pub topic: Option, + + #[serde(skip)] + pub overlay: Option, +} + +/// Request a Commit by ID (see [CommitGetV0] for more details) +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum CommitGet { + V0(CommitGetV0), +} +impl CommitGet { + pub fn id(&self) -> &ObjectId { + match self { + CommitGet::V0(o) => &o.id, + } + } +} + +/// Request to store one or more blocks +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct WalletPutExportV0 { + pub wallet: ExportedWallet, + pub rendezvous_id: SymKey, + pub is_rendezvous: bool, +} + +/// Request to store one or more blocks +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum WalletPutExport { + V0(WalletPutExportV0), +} + +/// Request to store one or more blocks +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BlocksPutV0 { + /// Blocks to store + pub blocks: Vec, + + #[serde(skip)] + pub overlay: Option, +} + +/// Request to store one or more blocks +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum BlocksPut { + V0(BlocksPutV0), +} + +impl BlocksPut { + pub fn blocks(&self) -> &Vec { + match self { + BlocksPut::V0(o) => &o.blocks, + } + } + pub fn overlay(&self) -> &OverlayId { + match self { + Self::V0(v0) => v0.overlay.as_ref().unwrap(), + } + } + pub fn set_overlay(&mut self, overlay: OverlayId) { + match self { + Self::V0(v0) => v0.overlay = Some(overlay), + } + } +} + +/// Request to know if some blocks are present locally +/// +/// used by client before publishing an event with files, to know what to push +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BlocksExistV0 { + /// Ids of Blocks to check + pub blocks: Vec, + + #[serde(skip)] + pub overlay: Option, +} + +/// Request to store one or more blocks +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum BlocksExist { + V0(BlocksExistV0), +} + +impl BlocksExist { + pub fn blocks(&self) -> &Vec { + match self { + BlocksExist::V0(o) => &o.blocks, + } + } + pub fn overlay(&self) -> &OverlayId { + match self { + Self::V0(v0) => v0.overlay.as_ref().unwrap(), + } + } + pub fn set_overlay(&mut self, overlay: OverlayId) { + match self { + Self::V0(v0) => v0.overlay = Some(overlay), + } + } +} + +/// Request to pin an object +/// +/// Brokers maintain an LRU cache of objects, +/// where old, unused objects might get deleted to free up space for new ones. +/// Pinned objects are retained, regardless of last access. +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct ObjectPinV0 { + pub id: ObjectId, +} + +/// Request to pin an object +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum ObjectPin { + V0(ObjectPinV0), +} + +impl ObjectPin { + pub fn id(&self) -> ObjectId { + match self { + ObjectPin::V0(o) => o.id, + } + } +} + +/// Request to unpin an object +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct ObjectUnpinV0 { + pub id: ObjectId, +} + +/// Request to unpin an object +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum ObjectUnpin { + V0(ObjectUnpinV0), +} + +impl ObjectUnpin { + pub fn id(&self) -> ObjectId { + match self { + ObjectUnpin::V0(o) => o.id, + } + } +} + +/// Request to delete an object +/// +/// only effective if the refcount for this object is zero (basically it removes it from LRU) +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct ObjectDelV0 { + pub id: ObjectId, +} + +/// Request to delete an object +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub enum ObjectDel { + V0(ObjectDelV0), +} + +impl ObjectDel { + pub fn id(&self) -> ObjectId { + match self { + ObjectDel::V0(o) => o.id, + } + } +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct InboxRegister { + pub inbox_id: PubKey, + pub overlay: OverlayId, + // TODO: obtain challenge from Broker + pub challenge: [u8; 32], + // signature of challenge by inbox privkey + pub sig: Sig +} + +impl InboxRegister { + pub fn new(inbox: PrivKey, overlay: OverlayId) -> Result { + let challenge = random_key(); + let inbox_id = inbox.to_pub(); + let sig = sign(&inbox,&inbox_id, &challenge)?; + Ok(Self { + inbox_id, + overlay, + challenge, + sig + }) + } +} + + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct InboxPost { + pub msg: InboxMsg, + /// optional Locator for destination inbox, in case broker doesn't know where to find inbox + pub to_broker: Option, +} + +impl InboxPost { + pub fn new( + to_overlay: OverlayId, + to_inbox: PubKey, + from: Option<(OverlayId,PrivKey)>, + content:&InboxMsgContent, + blocks: Vec, + to_broker: Option + ) -> Result + { + Ok(Self { + msg: InboxMsg::new(to_overlay,to_inbox,from,content,blocks)?, + to_broker + }) + } + + pub fn new_social_query_response( + to_overlay: OverlayId, + to_inbox: PubKey, + from: Option<(OverlayId,PrivKey)>, + query_id: RepoId, + forwarder_id: RepoId, + content: SocialQueryResponseContent + ) -> Result { + let content = InboxMsgContent::SocialQuery(SocialQuery::Response(SocialQueryResponse { query_id, forwarder_id, content })); + Self::new(to_overlay, to_inbox, from, &content, vec![], None) + } + + pub fn new_social_query_response_replying_to( + msg: &InboxMsgBody, + request: &SocialQueryRequest, + content: SocialQueryResponseContent, + inbox_privkey: PrivKey, + ) -> Result { + let to_overlay = msg.from_overlay.ok_or(NgError::InvalidArgument)?; + let to_inbox = msg.from_inbox.ok_or(NgError::InvalidArgument)?; + if msg.to_inbox != inbox_privkey.to_pub() { return Err(NgError::InvalidArgument); } + let from = Some((msg.to_overlay, inbox_privkey)); + let query_id = request.query_id; + let forwarder_id = request.forwarder_id; + let content = InboxMsgContent::SocialQuery(SocialQuery::Response(SocialQueryResponse { query_id, forwarder_id, content })); + Self::new(to_overlay, to_inbox, from, &content, vec![], None) + } + + /// to_profile_nuri = did:ng:[ab] + /// to_inbox_nuri = did:ng:d + pub fn new_social_query_request( + from_profile_store_repo: StoreRepo, + from_inbox: PrivKey, + forwarder_id: RepoId, + to_profile_nuri: String, + to_inbox_nuri: String, + to_broker: Option, + query_id: RepoId, + definition_commit_body_ref: ObjectRef, + blocks: Vec, + degree: u16, + ) -> Result { + + // processing to_profile_nuri + let c = RE_PROFILE.captures(&to_profile_nuri); + if c.is_some() + && c.as_ref().unwrap().get(1).is_some() + { + let cap = c.unwrap(); + let o = cap.get(1).unwrap().as_str(); + let to_profile_id = decode_key(o)?; + let to_overlay = OverlayId::outer(&to_profile_id); + + // processing to_inbox_nuri + let c = RE_INBOX.captures(&to_inbox_nuri); + if c.is_some() + && c.as_ref().unwrap().get(1).is_some() + { + let cap = c.unwrap(); + let d = cap.get(1).unwrap().as_str(); + let to_inbox = decode_key(d)?; + let from_overlay = from_profile_store_repo.outer_overlay(); + let content = InboxMsgContent::SocialQuery(SocialQuery::Request(SocialQueryRequest{ + query_id, + forwarder_id, + from_profile_store_repo, + degree, + definition_commit_body_ref, + })); + + return Ok(InboxPost::new( + to_overlay, + to_inbox, + Some((from_overlay,from_inbox)), + &content, + blocks, + to_broker + )?); + } + } + Err(NgError::InvalidNuri) + } + + pub fn new_contact_details( + from_profile_store_repo: StoreRepo, + from_inbox: PrivKey, + to_overlay: OverlayId, + to_inbox: PubKey, + to_broker: Option, + with_readcap: bool, + name: String, + email: Option + ) -> Result { + + let from_overlay = from_profile_store_repo.outer_overlay(); + let content = InboxMsgContent::ContactDetails(ContactDetails{ + profile: from_profile_store_repo, + read_cap: if with_readcap {unimplemented!();} else {None}, + name, + email + }); + + return Ok(InboxPost::new( + to_overlay, + to_inbox, + Some((from_overlay,from_inbox)), + &content, + vec![], + to_broker + )?); + } + +} + +/// Request to publish an event in pubsub +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct PublishEvent(pub Event, #[serde(skip)] pub Option); + +/// Content of `ClientRequestV0` +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ClientRequestContentV0 { + OpenRepo(OpenRepo), + PinRepo(PinRepo), + UnpinRepo(UnpinRepo), + RepoPinStatusReq(RepoPinStatusReq), + + // once repo is opened or pinned: + TopicSub(TopicSub), + TopicUnsub(TopicUnsub), + + BlocksExist(BlocksExist), + BlocksGet(BlocksGet), + CommitGet(CommitGet), + TopicSyncReq(TopicSyncReq), + + // For Pinned Repos only : + ObjectPin(ObjectPin), + ObjectUnpin(ObjectUnpin), + ObjectDel(ObjectDel), + + // For InnerOverlay's only : + BlocksPut(BlocksPut), + PublishEvent(PublishEvent), + + WalletPutExport(WalletPutExport), + + InboxRegister(InboxRegister), + InboxPost(InboxPost), +} + +impl ClientRequestContentV0 { + pub fn set_overlay(&mut self, overlay: OverlayId) { + match self { + ClientRequestContentV0::RepoPinStatusReq(a) => a.set_overlay(overlay), + ClientRequestContentV0::TopicSub(a) => a.set_overlay(overlay), + ClientRequestContentV0::PinRepo(_a) => {} + ClientRequestContentV0::InboxRegister(_a) => {} + ClientRequestContentV0::InboxPost(_a) => {} + ClientRequestContentV0::PublishEvent(a) => a.set_overlay(overlay), + ClientRequestContentV0::CommitGet(a) => a.set_overlay(overlay), + ClientRequestContentV0::TopicSyncReq(a) => a.set_overlay(overlay), + ClientRequestContentV0::BlocksPut(a) => a.set_overlay(overlay), + ClientRequestContentV0::BlocksExist(a) => a.set_overlay(overlay), + ClientRequestContentV0::BlocksGet(a) => a.set_overlay(overlay), + ClientRequestContentV0::WalletPutExport(_a) => {} + _ => unimplemented!(), + } + } +} + +/// Broker overlay request +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ClientRequestV0 { + /// Request ID + pub id: i64, + + /// Request content + pub content: ClientRequestContentV0, +} + +/// Broker overlay request +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ClientRequest { + V0(ClientRequestV0), +} + +impl ClientRequest { + pub fn id(&self) -> i64 { + match self { + ClientRequest::V0(o) => o.id, + } + } + pub fn set_id(&mut self, id: i64) { + match self { + ClientRequest::V0(v0) => { + v0.id = id; + } + } + } + pub fn content_v0(&self) -> &ClientRequestContentV0 { + match self { + ClientRequest::V0(o) => &o.content, + } + } + pub fn get_actor(&self) -> Box { + match self { + Self::V0(ClientRequestV0 { content, .. }) => match content { + ClientRequestContentV0::RepoPinStatusReq(r) => r.get_actor(self.id()), + ClientRequestContentV0::PinRepo(r) => r.get_actor(self.id()), + ClientRequestContentV0::TopicSub(r) => r.get_actor(self.id()), + ClientRequestContentV0::PublishEvent(r) => r.get_actor(self.id()), + ClientRequestContentV0::CommitGet(r) => r.get_actor(self.id()), + ClientRequestContentV0::TopicSyncReq(r) => r.get_actor(self.id()), + ClientRequestContentV0::BlocksPut(r) => r.get_actor(self.id()), + ClientRequestContentV0::BlocksExist(r) => r.get_actor(self.id()), + ClientRequestContentV0::BlocksGet(r) => r.get_actor(self.id()), + ClientRequestContentV0::WalletPutExport(r) => r.get_actor(self.id()), + ClientRequestContentV0::InboxRegister(r) => r.get_actor(self.id()), + ClientRequestContentV0::InboxPost(r) => r.get_actor(self.id()), + _ => unimplemented!(), + }, + } + } +} + +impl TryFrom for ClientRequestContentV0 { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::ClientMessage(ClientMessage::V0(ClientMessageV0 { + overlay, + content: + ClientMessageContentV0::ClientRequest(ClientRequest::V0(ClientRequestV0 { + mut content, + .. + })), + .. + })) = msg + { + content.set_overlay(overlay); + Ok(content) + } else { + log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +/// Response which blocks have been found locally. V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BlocksFoundV0 { + /// Ids of Blocks that were found locally + pub found: Vec, + + /// Ids of Blocks that were missing locally + pub missing: Vec, +} + +/// Response which blocks have been found locally. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum BlocksFound { + V0(BlocksFoundV0), +} + +impl BlocksFound { + pub fn found(&self) -> &Vec { + match self { + BlocksFound::V0(o) => &o.found, + } + } + pub fn missing(&self) -> &Vec { + match self { + BlocksFound::V0(o) => &o.missing, + } + } +} + +/// Topic subscription response V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct TopicSubResV0 { + /// Topic subscribed + pub topic: TopicId, + pub known_heads: Vec, + pub publisher: bool, + pub commits_nbr: u64, +} + +/// Topic subscription response +/// +/// it is a stream of blocks and or events. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum TopicSubRes { + V0(TopicSubResV0), +} + +impl TopicSubRes { + pub fn topic_id(&self) -> &TopicId { + match self { + Self::V0(v0) => &v0.topic, + } + } + pub fn is_publisher(&self) -> bool { + match self { + Self::V0(v0) => v0.publisher, + } + } + pub fn new_from_heads( + topics: HashSet, + publisher: bool, + topic: TopicId, + commits_nbr: u64, + ) -> Self { + TopicSubRes::V0(TopicSubResV0 { + topic, + known_heads: topics.into_iter().collect(), + publisher, + commits_nbr, + }) + } + pub fn known_heads(&self) -> &Vec { + match self { + Self::V0(v0) => &v0.known_heads, + } + } + pub fn commits_nbr(&self) -> u64 { + match self { + Self::V0(v0) => v0.commits_nbr, + } + } +} + +impl From for TopicSubRes { + fn from(topic: TopicId) -> Self { + TopicSubRes::V0(TopicSubResV0 { + topic, + known_heads: vec![], + publisher: false, + commits_nbr: 0, + }) + } +} + +impl From for TopicSubRes { + fn from(topic: PublisherAdvert) -> Self { + TopicSubRes::V0(TopicSubResV0 { + topic: topic.topic_id().clone(), + known_heads: vec![], + publisher: true, + commits_nbr: 0, + }) + } +} + +pub type RepoOpened = Vec; + +/// Content of `ClientResponseV0` +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ClientResponseContentV0 { + EmptyResponse, + Block(Block), + RepoOpened(RepoOpened), + TopicSubRes(TopicSubRes), + TopicSyncRes(TopicSyncRes), + BlocksFound(BlocksFound), + RepoPinStatus(RepoPinStatus), +} + +/// Response to a `ClientRequest` +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ClientResponseV0 { + /// Request ID + pub id: i64, + + /// Result (including but not limited to Result) + pub result: u16, + + /// Response content + pub content: ClientResponseContentV0, +} + +impl ClientResponse { + pub fn set_result(&mut self, res: u16) { + match self { + Self::V0(v0) => v0.result = res, + } + } +} + +/// Response to a `ClientRequest` +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ClientResponse { + V0(ClientResponseV0), +} + +impl From for ClientResponse { + fn from(err: ServerError) -> ClientResponse { + ClientResponse::V0(ClientResponseV0 { + id: 0, + result: err.into(), + content: ClientResponseContentV0::EmptyResponse, + }) + } +} + +#[derive(Debug)] +pub struct EmptyAppResponse(pub ()); + +impl From for AppMessage { + fn from(err: ServerError) -> AppMessage { + AppMessage::V0(AppMessageV0 { + id: 0, + result: err.into(), + content: AppMessageContentV0::EmptyResponse, + }) + } +} + +impl From> for ProtocolMessage +where + A: Into + std::fmt::Debug, +{ + fn from(res: Result) -> ProtocolMessage { + match res { + Ok(a) => a.into(), + Err(e) => ProtocolMessage::from_client_response_err(e), + } + } +} + +impl From<()> for ProtocolMessage { + fn from(_msg: ()) -> ProtocolMessage { + let cm: ClientResponse = ServerError::Ok.into(); + cm.into() + } +} + +impl ClientResponse { + pub fn id(&self) -> i64 { + match self { + ClientResponse::V0(o) => o.id, + } + } + pub fn set_id(&mut self, id: i64) { + match self { + ClientResponse::V0(v0) => { + v0.id = id; + } + } + } + pub fn result(&self) -> u16 { + match self { + ClientResponse::V0(o) => o.result, + } + } + pub fn block(&self) -> Option<&Block> { + match self { + ClientResponse::V0(o) => match &o.content { + ClientResponseContentV0::Block(b) => Some(b), + _ => panic!("this not a block response"), + }, + } + } +} + +impl TryFrom for ClientResponseContentV0 { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::ClientMessage(ClientMessage::V0(ClientMessageV0 { + content: + ClientMessageContentV0::ClientResponse(ClientResponse::V0(ClientResponseV0 { + content, + result: res, + .. + })), + .. + })) = msg + { + let err = ServerError::try_from(res).unwrap(); + if !err.is_err() { + Ok(content) + } else { + Err(ProtocolError::ServerError) + } + } else { + log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +/// Starts a new Social Query +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct SocialQueryRequest { + /// Query ID + pub query_id: RepoId, + + /// Forwarder ID + pub forwarder_id: RepoId, + + /// Profile ID (must match the from_overlay) + pub from_profile_store_repo: StoreRepo, + + /// degree of forwarding in the social network + /// gets decremented at every hop + /// 0 means unlimited + /// 1 means stop here (after processing locally, do not forward) + pub degree: u16, + + /// Definition in RDF. the blocks are added in InboxMsg.blocks + pub definition_commit_body_ref: ObjectRef, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum SocialQueryResponseContent { + True, + False, + Graph(Vec), + QueryResult(Vec), + EndOfReplies, + AlreadyRequested, + Error(u16), +} + +/// Response to a `SocialQueryRequest` +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct SocialQueryResponse { + /// Query ID + pub query_id: RepoId, + + /// Forwarder ID + pub forwarder_id: RepoId, + + /// Response content + pub content: SocialQueryResponseContent, +} + +/// ContactDetails sent in reply to scanning a QRcode of a profile +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ContactDetails { + /// Profile Nuri + pub profile: StoreRepo, + + /// optional readcap on the profile, if user wants to share the content of profile + pub read_cap: Option, + + pub name: String, + + pub email: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum SocialQuery { + Request(SocialQueryRequest), + Response(SocialQueryResponse), + Cancel(RepoId), + Delete(RepoId), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum InboxMsgContent { + ContactDetails(ContactDetails), + DialogRequest, + Link, + Patch, + ServiceRequest, + ExtRequest, + RemoteQuery, + SocialQuery(SocialQuery), + //Transaction + //Comment + //BackLink +} + +/// InboxMsgBody +#[derive(Clone, Debug, Serialize, Deserialize, Hash, PartialEq)] +pub struct InboxMsgBody { + + pub to_overlay: OverlayId, + pub to_inbox: PubKey, + + pub from_overlay: Option, + pub from_inbox: Option, + + /// crypto_box_sealed of InboxMsgContent serialization, encrypted to the to_inbox pub key + #[serde(with = "serde_bytes")] + pub msg: Vec, +} + +/// InboxMsg +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct InboxMsg { + + pub body: InboxMsgBody, + + /// optional signature by sender (from_inbox pubkey), over body + pub sig: Option, + + /// optional blocks that should be sent with the message + pub blocks: Vec, +} + +impl InboxMsg { + pub fn new( + to_overlay: OverlayId, + to_inbox: PubKey, + from: Option<(OverlayId,PrivKey)>, + content:&InboxMsgContent, + blocks: Vec + ) -> Result { + let ser = serde_bare::to_vec(content).unwrap(); + let mut rng = crypto_box::aead::OsRng {}; + let msg = crypto_box::seal(&mut rng, &to_inbox.to_dh_slice().into(), &ser) + .map_err(|_| NgError::EncryptionError)?; + let body = InboxMsgBody { + to_overlay, + to_inbox, + from_overlay: from.as_ref().map(|(o,_)|o.clone()), + from_inbox: from.as_ref().map(|(_,i)|i.to_pub()), + msg + }; + let sig = match from { + Some((_,inbox)) => { + let ser = serde_bare::to_vec(&body).unwrap(); + Some(sign( + &inbox, + body.from_inbox.as_ref().unwrap(), + &ser, + )?)}, + None=>None + }; + Ok( + Self { + body, + sig, + blocks + } + ) + } + + pub fn get_content(&self, inbox_sk: &PrivKey) -> Result { + let ser = crypto_box::seal_open(&(*inbox_sk.to_dh().slice()).into(), &self.body.msg) + .map_err(|_| NgError::DecryptionError)?; + let content: InboxMsgContent = + serde_bare::from_slice(&ser).map_err(|_| NgError::SerializationError)?; + Ok(content) + } +} + +/// Content of `ClientEvent` +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ClientEvent { + InboxPopRequest, +} + +/// Content of `ClientMessageV0` +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ClientMessageContentV0 { + ClientRequest(ClientRequest), + ClientResponse(ClientResponse), + ForwardedEvent(Event), + ForwardedBlock(Block), + InboxReceive{ msg: InboxMsg, from_queue: bool }, + ClientEvent(ClientEvent), +} +impl ClientMessageContentV0 { + pub fn is_block(&self) -> bool { + match self { + Self::ClientRequest(ClientRequest::V0(ClientRequestV0 { + content: ClientRequestContentV0::BlocksPut(_), + .. + })) => true, + Self::ClientResponse(ClientResponse::V0(ClientResponseV0 { + content: ClientResponseContentV0::Block(_), + .. + })) => true, + _ => false, + } + } +} + +/// Broker message for an overlay +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ClientMessageV0 { + pub overlay: OverlayId, + pub content: ClientMessageContentV0, + /// Optional padding + #[serde(with = "serde_bytes")] + pub padding: Vec, +} + +pub trait IStreamable { + fn result(&self) -> u16; + fn set_result(&mut self, result: u16); +} + +/// Broker message for an overlay +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ClientMessage { + V0(ClientMessageV0), +} + +impl IStreamable for ClientMessage { + fn result(&self) -> u16 { + match self { + ClientMessage::V0(o) => match &o.content { + ClientMessageContentV0::ClientResponse(r) => r.result(), + _ => { + panic!("it is not a response"); + } + }, + } + } + fn set_result(&mut self, result: u16) { + match self { + ClientMessage::V0(o) => match &mut o.content { + ClientMessageContentV0::ClientResponse(r) => r.set_result(result), + _ => { + panic!("it is not a response"); + } + }, + } + } +} + +impl ClientMessage { + pub fn content_v0(&self) -> &ClientMessageContentV0 { + match self { + ClientMessage::V0(o) => &o.content, + } + } + pub fn overlay_request(&self) -> &ClientRequest { + match self { + ClientMessage::V0(o) => match &o.content { + ClientMessageContentV0::ClientRequest(r) => &r, + _ => panic!("not an overlay request"), + }, + } + } + + pub fn overlay_id(&self) -> OverlayId { + match self { + ClientMessage::V0(o) => o.overlay, + } + } + pub fn is_request(&self) -> bool { + match self { + ClientMessage::V0(o) => { + matches!(o.content, ClientMessageContentV0::ClientRequest { .. }) + } + } + } + pub fn is_response(&self) -> bool { + match self { + ClientMessage::V0(o) => { + matches!(o.content, ClientMessageContentV0::ClientResponse { .. }) + } + } + } + pub fn id(&self) -> Option { + match self { + ClientMessage::V0(o) => match &o.content { + ClientMessageContentV0::ClientResponse(r) => Some(r.id()), + ClientMessageContentV0::ClientRequest(r) => Some(r.id()), + ClientMessageContentV0::ClientEvent(r) => Some(1), + _ => None, + }, + } + } + pub fn set_id(&mut self, id: i64) { + match self { + ClientMessage::V0(o) => match &mut o.content { + ClientMessageContentV0::ClientResponse(ref mut r) => r.set_id(id), + ClientMessageContentV0::ClientRequest(ref mut r) => r.set_id(id), + _ => { + panic!("it is an event") + } + }, + } + } + + pub fn block<'a>(&self) -> Option<&Block> { + match self { + ClientMessage::V0(o) => match &o.content { + ClientMessageContentV0::ClientResponse(r) => r.block(), + _ => { + panic!("it is not a response"); + } + }, + } + } + + pub fn get_actor(&self) -> Box { + match self { + ClientMessage::V0(o) => match &o.content { + ClientMessageContentV0::ClientRequest(req) => req.get_actor(), + ClientMessageContentV0::ClientEvent(req) => req.get_actor(1), + _ => { + panic!("it is not a request"); + } + }, + } + } +} + +// +// EXTERNAL REQUESTS +// + +/// Request object(s) by ID by non-members to a broker +/// +/// The response includes the requested objects and all their children recursively, +/// and optionally all file object dependencies and their children recursively. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExtObjectGetV0 { + /// outer overlayId + pub overlay: OverlayId, + + /// List of Object IDs to request, including their children + pub ids: Vec, + + /// Whether or not to include all files objects + pub include_files: bool, +} + +/// Request object(s) by ID by non-members +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ExtObjectGet { + V0(ExtObjectGetV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExtWalletGetExportV0 { + pub id: SymKey, + pub is_rendezvous: bool, +} + +/// Topic synchronization request +pub type ExtTopicSyncReq = TopicSyncReq; + +/// Content of ExtRequestV0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ExtRequestContentV0 { + WalletGetExport(ExtWalletGetExportV0), + ExtObjectGet(ExtObjectGetV0), + ExtTopicSyncReq(ExtTopicSyncReq), + // TODO inbox requests + // TODO subreq ? +} + +impl ExtRequestContentV0 { + pub fn get_actor(&self) -> Box { + match self { + Self::WalletGetExport(a) => a.get_actor(), + Self::ExtObjectGet(a) => a.get_actor(), + _ => unimplemented!(), // Self::ExtTopicSyncReq(a) => a.get_actor(), + } + } +} + +/// External request with its request ID +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExtRequestV0 { + /// Request ID + pub id: i64, + + /// Request content + pub content: ExtRequestContentV0, +} + +/// External request are made by clients directly to a core broker of their choice. +/// +/// They differ from OuterOverlayRequests in the sense that the broker where the client is attached, is not involved in the request. +/// It is a direct connection that is established between the client and the core broker that will give the response. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ExtRequest { + V0(ExtRequestV0), +} + +impl ExtRequest { + pub fn id(&self) -> i64 { + match self { + ExtRequest::V0(v0) => v0.id, + } + } + pub fn set_id(&mut self, id: i64) { + match self { + ExtRequest::V0(v0) => { + v0.id = id; + } + } + } + pub fn get_actor(&self) -> Box { + match self { + Self::V0(a) => a.content.get_actor(), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExportedWallet(pub serde_bytes::ByteBuf); + +/// Content of ExtResponseV0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ExtResponseContentV0 { + EmptyResponse, + Block(Block), + Blocks(Vec), + Wallet(ExportedWallet), + // TODO inbox related replies + // TODO event ? +} + +impl TryFrom for ExtResponseContentV0 { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::ExtResponse(ExtResponse::V0(ExtResponseV0 { + content, + result, + .. + })) = msg + { + let err = ServerError::try_from(result).unwrap(); + if !err.is_err() { + Ok(content) + } else { + Err(ProtocolError::ServerError) + } + } else { + log_debug!("INVALID {:?}", msg); + Err(ProtocolError::InvalidValue) + } + } +} + +/// Response to an ExtRequest +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ExtResponseV0 { + /// Request ID + pub id: i64, + + /// Result code + pub result: u16, + + /// Response content + pub content: ExtResponseContentV0, +} + +/// Response to an ExtRequest +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ExtResponse { + V0(ExtResponseV0), +} + +impl ExtResponse { + pub fn id(&self) -> i64 { + match self { + ExtResponse::V0(v0) => v0.id, + } + } + pub fn set_id(&mut self, id: i64) { + match self { + ExtResponse::V0(v0) => { + v0.id = id; + } + } + } + pub fn result(&self) -> u16 { + match self { + Self::V0(o) => o.result, + } + } + pub fn content_v0(&self) -> ExtResponseContentV0 { + match self { + Self::V0(o) => o.content.clone(), + } + } +} + +impl TryFrom for ExtResponse { + type Error = ProtocolError; + fn try_from(msg: ProtocolMessage) -> Result { + if let ProtocolMessage::ExtResponse(ext_res) = msg { + Ok(ext_res) + } else { + Err(ProtocolError::InvalidValue) + } + } +} + +impl From> for ExtResponseV0 { + fn from(res: Result) -> ExtResponseV0 { + match res { + Err(e) => ExtResponseV0 { + id: 0, + result: e.into(), + content: ExtResponseContentV0::EmptyResponse, + }, + Ok(content) => ExtResponseV0 { + id: 0, + result: 0, + content, + }, + } + } +} + +impl From for ProtocolMessage { + fn from(msg: ExtResponseV0) -> ProtocolMessage { + ProtocolMessage::ExtResponse(ExtResponse::V0(msg)) + } +} + +// +// PROTOCOL MESSAGES +// +#[doc(hidden)] +pub static MAGIC_NG_REQUEST: [u8; 2] = [78u8, 71u8]; +#[doc(hidden)] +pub static MAGIC_NG_RESPONSE: [u8; 4] = [89u8, 88u8, 78u8, 75u8]; + +#[derive(Clone, Debug)] +pub enum Authorization { + Discover, + ExtMessage, + Core, + Client((PubKey, Option>)), + OverlayJoin(PubKey), + Admin(PubKey), +} + +/// ProbeResponse +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ProbeResponse { + /// Response Magic number + #[serde(with = "serde_bytes")] + pub magic: Vec, + + /// Used for discovery of broker on private LAN + /// see ListenerV0.discoverable + pub peer_id: Option, +} + +/// RelayRequest +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct RelayRequest { + /// The BindAddress of the broker to relay to should be of the same IP family than the TunnelRequest.remote_addr + pub address: BindAddress, +} + +/// RelayResponse +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct RelayResponse { + /// Response Magic number + #[serde(with = "serde_bytes")] + pub magic: Vec, + + /// result to the relay request (accept, refuse) + pub result: u16, +} + +/// Tunnel Request +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct TunnelRequest { + /// Request Magic number + #[serde(with = "serde_bytes")] + pub magic: Vec, + + // Bind address of client as connected to the relaying broker. + pub remote_addr: BindAddress, +} + +/// Tunnel Response +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct TunnelResponse { + /// Response Magic number + #[serde(with = "serde_bytes")] + pub magic: Vec, + + /// result to the tunnel request (accept, refuse) + pub result: u16, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ProtocolMessage { + Probe([u8; 2]), + ProbeResponse(ProbeResponse), + Relay(RelayRequest), + RelayResponse(RelayResponse), + Tunnel(TunnelRequest), + TunnelResponse(TunnelResponse), + Noise(Noise), + Start(StartProtocol), + ServerHello(ServerHello), + ClientAuth(ClientAuth), + AuthResult(AuthResult), + ExtRequest(ExtRequest), + ExtResponse(ExtResponse), + //AdminRequest(AdminRequest), + AdminResponse(AdminResponse), + ClientMessage(ClientMessage), + AppMessage(AppMessage), + CoreMessage(CoreMessage), +} + +impl TryFrom<&ProtocolMessage> for ServerError { + type Error = NgError; + fn try_from(msg: &ProtocolMessage) -> Result { + if let ProtocolMessage::ClientMessage(ref bm) = msg { + let res = bm.result(); + if res != 0 { + return Ok(ServerError::try_from(res).unwrap()); + } + } + if let ProtocolMessage::ExtResponse(ref bm) = msg { + let res = bm.result(); + if res != 0 { + return Ok(ServerError::try_from(res).unwrap()); + } + } + if let ProtocolMessage::AppMessage(ref bm) = msg { + let res = bm.result(); + if res != 0 { + return Ok(ServerError::try_from(res).unwrap()); + } + } + Err(NgError::NotAServerError) + } +} + +impl ProtocolMessage { + pub fn id(&self) -> Option { + match self { + ProtocolMessage::ExtRequest(ext_req) => Some(ext_req.id()), + ProtocolMessage::ExtResponse(ext_res) => Some(ext_res.id()), + ProtocolMessage::ClientMessage(client_msg) => client_msg.id(), + ProtocolMessage::AppMessage(app_msg) => app_msg.id(), + _ => None, + } + } + pub fn set_id(&mut self, id: i64) { + match self { + ProtocolMessage::ExtRequest(ext_req) => ext_req.set_id(id), + ProtocolMessage::ExtResponse(ext_res) => ext_res.set_id(id), + ProtocolMessage::ClientMessage(client_msg) => client_msg.set_id(id), + ProtocolMessage::AppMessage(app_msg) => app_msg.set_id(id), + _ => panic!("cannot set ID"), + } + } + pub fn type_id(&self) -> TypeId { + match self { + ProtocolMessage::Noise(a) => a.type_id(), + ProtocolMessage::Start(a) => a.type_id(), + ProtocolMessage::ServerHello(a) => a.type_id(), + ProtocolMessage::ClientAuth(a) => a.type_id(), + ProtocolMessage::AuthResult(a) => a.type_id(), + ProtocolMessage::ExtRequest(a) => a.type_id(), + ProtocolMessage::ExtResponse(a) => a.type_id(), + ProtocolMessage::ClientMessage(a) => a.type_id(), + ProtocolMessage::CoreMessage(a) => a.type_id(), + ProtocolMessage::AppMessage(a) => a.type_id(), + //ProtocolMessage::AdminRequest(a) => a.type_id(), + ProtocolMessage::AdminResponse(a) => a.type_id(), + ProtocolMessage::Probe(a) => a.type_id(), + ProtocolMessage::ProbeResponse(a) => a.type_id(), + ProtocolMessage::Relay(a) => a.type_id(), + ProtocolMessage::RelayResponse(a) => a.type_id(), + ProtocolMessage::Tunnel(a) => a.type_id(), + ProtocolMessage::TunnelResponse(a) => a.type_id(), + } + } + + pub(crate) fn is_streamable(&self) -> Option<&dyn IStreamable> { + match self { + ProtocolMessage::ClientMessage(s) => Some(s as &dyn IStreamable), + ProtocolMessage::AppMessage(s) => Some(s as &dyn IStreamable), + _ => None, + } + } + + pub fn get_actor(&self) -> Box { + match self { + //ProtocolMessage::Noise(a) => a.get_actor(), + ProtocolMessage::Start(a) => a.get_actor(), + ProtocolMessage::ClientMessage(a) => a.get_actor(), + ProtocolMessage::AppMessage(a) => a.get_actor(), + // ProtocolMessage::ServerHello(a) => a.get_actor(), + // ProtocolMessage::ClientAuth(a) => a.get_actor(), + // ProtocolMessage::AuthResult(a) => a.get_actor(), + //ProtocolMessage::ExtRequest(a) => a.get_actor(), + //ProtocolMessage::ExtResponse(a) => a.get_actor(), + // ProtocolMessage::BrokerMessage(a) => a.get_actor(), + _ => unimplemented!(), + } + } + + pub fn from_client_response_err(err: ServerError) -> ProtocolMessage { + let res: ClientResponse = err.into(); + res.into() + } + + pub fn from_client_request_v0( + req: ClientRequestContentV0, + overlay: OverlayId, + ) -> ProtocolMessage { + ProtocolMessage::ClientMessage(ClientMessage::V0(ClientMessageV0 { + overlay, + content: ClientMessageContentV0::ClientRequest(ClientRequest::V0(ClientRequestV0 { + id: 0, + content: req, + })), + padding: vec![], + })) + } + + pub fn is_block(&self) -> bool { + match self { + ProtocolMessage::ClientMessage(ClientMessage::V0(ClientMessageV0 { + content: c, + .. + })) => c.is_block(), + _ => false, + } + } +} + +impl From for ClientResponse { + fn from(msg: ClientResponseContentV0) -> ClientResponse { + ClientResponse::V0(ClientResponseV0 { + id: 0, + result: 0, + content: msg, + }) + } +} + +impl From for ProtocolMessage { + fn from(msg: ClientResponseContentV0) -> ProtocolMessage { + let client_res = ClientResponse::V0(ClientResponseV0 { + id: 0, + result: 0, + content: msg, + }); + client_res.into() + } +} + +impl From for ProtocolMessage { + fn from(msg: ClientResponse) -> ProtocolMessage { + ProtocolMessage::ClientMessage(ClientMessage::V0(ClientMessageV0 { + overlay: OverlayId::nil(), + content: ClientMessageContentV0::ClientResponse(msg), + padding: vec![], + })) + } +} + +// +// AUTHENTICATION MESSAGES +// + +/// Content of ClientAuthV0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ClientAuthContentV0 { + /// User pub key + pub user: PubKey, + + /// Client pub key + pub client: PubKey, + + pub info: ClientInfoV0, + + pub registration: Option>, + + /// Nonce from ServerHello + #[serde(with = "serde_bytes")] + pub nonce: Vec, +} + +/// Client authentication +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ClientAuthV0 { + /// Authentication data + pub content: ClientAuthContentV0, + + /// Signature by user key + pub sig: Sig, + + /// Signature by client key + pub client_sig: Sig, +} + +/// Client authentication +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ClientAuth { + V0(ClientAuthV0), +} + +impl ClientAuth { + pub fn content_v0(&self) -> ClientAuthContentV0 { + match self { + ClientAuth::V0(o) => o.content.clone(), + } + } + pub fn sig(&self) -> Sig { + match self { + ClientAuth::V0(o) => o.sig, + } + } + pub fn user(&self) -> PubKey { + match self { + ClientAuth::V0(o) => o.content.user, + } + } + pub fn client(&self) -> PubKey { + match self { + ClientAuth::V0(o) => o.content.client, + } + } + pub fn nonce(&self) -> &Vec { + match self { + ClientAuth::V0(o) => &o.content.nonce, + } + } + pub fn registration(&self) -> Option> { + match self { + ClientAuth::V0(o) => o.content.registration, + } + } +} + +impl From for ProtocolMessage { + fn from(msg: ClientAuth) -> ProtocolMessage { + ProtocolMessage::ClientAuth(msg) + } +} + +/// Authentication result +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AuthResultV0 { + pub result: u16, + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +/// Authentication result +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AuthResult { + V0(AuthResultV0), +} + +impl AuthResult { + pub fn result(&self) -> u16 { + match self { + AuthResult::V0(o) => o.result, + } + } + pub fn metadata(&self) -> &Vec { + match self { + AuthResult::V0(o) => &o.metadata, + } + } +} + +impl From for ProtocolMessage { + fn from(msg: AuthResult) -> ProtocolMessage { + ProtocolMessage::AuthResult(msg) + } +} + +// +// LINKS +// + +/// Link to a repository +/// +/// Consists of an identifier (repoid), a ReadCap or WriteCap, and a locator (peers and overlayLink) +/// Those capabilities are not durable: They can be refreshed by the members and previously shared Caps will become obsolete/revoked. +/// As long as the user is a member of the repo and subscribes to the root topic (of the repo, and of the store if needed/applicable), they will receive the updated capabilities. +/// But if they don't subscribe, they will lose access after the refresh. +/// For durable capabilities, see PermaCap. +/// In most cases, the link is shared and then the recipient opens it and subscribes soon afterward, so there is no need for a PermaCap +/// Perma capabilities are needed only when the link is stored on disk and kept there unopened for a long period. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct RepoLinkV0 { + /// Repository ID + pub id: RepoId, + + /// read capability for the whole repo + /// current (at the time of sharing the link) root branch definition commit + pub read_cap: ReadCap, + + /// Write capability secret. Only set for editors. in this case, overlay MUST be set to an InnerOverlay + // pub write_cap_secret: Option, + + /// Current overlay link, used to join the overlay + pub overlay: OverlayLink, + + /// Peer brokers to connect to + pub peers: Vec, +} + +/// Link to a repository +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum RepoLink { + V0(RepoLinkV0), +} + +impl RepoLink { + pub fn id(&self) -> &RepoId { + match self { + RepoLink::V0(o) => &o.id, + } + } + pub fn peers(&self) -> &Vec { + match self { + RepoLink::V0(o) => &o.peers, + } + } +} + +/// Link for a Public Repo +/// +/// The latest ReadCap of the branch (or main branch) will be downloaded from the outerOverlay, if the peer brokers listed below allow it. +/// The snapshot can be downloaded instead +/// This link is durable, because the public site are served differently by brokers. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct PublicRepoLinkV0 { + /// Repository ID + pub repo: RepoId, + + /// optional branchId to access. a specific public branch, + /// if not set, the main branch of the repo will be used. + pub branch: Option, + + /// optional commits of head to access. + /// if not set, the main branch of the repo will be used. + pub heads: Vec, + + /// optional snapshot to download, in order to display the content quicker to end-user. + pub snapshot: Option, + + /// The public site store + pub public_store: PubKey, + + /// Peer brokers to connect to + pub peers: Vec, +} + +/// Link to a public repository +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum PublicRepoLink { + V0(PublicRepoLinkV0), +} + +/// Read access to a branch of a Public, Protected or Group store. +/// +/// The overlay to join can be the outer or the inner, depending on what was offered in the link. +/// The difference between the two is that in the outer overlay, only one broker is contacted. +/// In the inner overlay, all the publisher's brokers are contacted, so subscription to the pub/sub is more reliable, less prone to outage. +/// This is not a durable link. If the topic has been refreshed, the pubsub won't be able to be subscribed to, +/// but TopicSyncReq will still work (answering the commits up until the moment the topic was refreshed) +/// and the optional heads will always be retrievable +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ReadBranchLinkV0 { + /// Repository ID + pub repo: RepoId, + + pub branch: BranchId, // must match the one in read_cap + + pub topic: TopicId, + + /// an optional list of heads that can be fetched in this branch + /// useful if a specific head is to be shared + pub heads: Vec, + + /// read capability for the branch + /// current (at the time of sharing the link) branch definition commit + pub read_cap: ReadCap, + + /// Current overlay link, used to join the overlay, most of the time, an outerOverlay is preferred + pub overlay: OverlayLink, + + /// Peer brokers to connect to + pub peers: Vec, +} + +/// Link to a repository +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ReadBranchLink { + V0(ReadBranchLinkV0), +} + +/// Obtains one or more objects of a repo (Commit, File) by their ID. +/// +/// On an outerOverlay, the header is always emptied (no way to reconstruct the DAG of commits) except on public overlays or if a topicId is provided +/// If the intent is to share a whole DAG of commits at a definite CommitID/HEAD, then ReadBranchLink should be used instead (or PublicRepoLink if public site) +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ObjectLinkV0 { + /// Repository ID: not used to make the request. but useful for commits, to know which repo they are from without needing to fetch and open the full DAG of commits. + /// (but the one here might be wrong. only when opening the DAG can the real repo be known. also note that on outerOverlay of non public stores, the DAG is not accessible) + /// note that it could be omitted, specially if the objects are files. As files are content-addressable and belong to an overlay but not to a specific repo or topic. + pub repo: Option, + + /// An optional topic that will be used to retrieve the Certificate of a commit, if needed + /// (topic has to be checked with the one inside the commit. the one here might be wrong. it is provided here as an optimization) + /// or can be used to help with BlockSearchTopic. + /// If the topic is provided, a TopicSyncReq can be performed, and the causal past of the commit will appear (by repeated tries while narrowing down on the ancestors), + /// hence defeating the "emptied header" protection + pub topic: Option, + + pub objects: Vec, + + /// Overlay to join + pub overlay: OverlayLink, + + /// Peer brokers to connect to + pub peers: Vec, +} + +/// Link to a specific commit, without its causal past +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ObjectLink { + V0(ObjectLinkV0), +} + +/// NextGraph Link V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum NgLinkV0 { + Repo(RepoLink), + PublicRepo(PublicRepoLink), + Branch(ReadBranchLink), + Object(ObjectLink), +} + +/// NextGraph Link +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum NgLink { + V0(NgLinkV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct NgQRCodeWalletTransferV0 { + pub broker: BrokerServerV0, + pub rendezvous: SymKey, // Rendez-vous ID + pub secret_key: SymKey, + pub is_rendezvous: bool, +} +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct NgQRCodeProfileSharingV0 { + pub inbox: PubKey, + pub profile: StoreRepo, + pub name: String, + pub email: Option, +} + +#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop, Serialize, Deserialize)] +pub struct NgQRCodeWalletRecoveryV0 { + #[zeroize(skip)] + #[serde(with = "serde_bytes")] + pub wallet: Vec, // a serialized WalletContentV0, //of which security_img and security_text are emptied + pub pazzle: Vec, + pub mnemonic: [u16; 12], + pub pin: [u8; 4], +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum NgQRCode { + WalletTransferV0(NgQRCodeWalletTransferV0), + WalletRecoveryV0(NgQRCodeWalletRecoveryV0), + ProfileSharingV0(NgQRCodeProfileSharingV0), +} + +impl NgQRCode { + pub fn from_code(code: String) -> Result { + let decoded = base64_url::decode(&code).map_err(|_| NgError::SerializationError)?; + Ok(serde_bare::from_slice(&decoded)?) + } + pub fn to_code(&self) -> String { + let ser = serde_bare::to_vec(self).unwrap(); + base64_url::encode(&ser) + } +} + + +// TODO: PermaLinks and InboxPost (and ExtRequests) + +#[cfg(test)] +mod test { + + use crate::types::{BootstrapContentV0, BrokerServerTypeV0, BrokerServerV0, Invitation}; + use ng_repo::types::PubKey; + + #[test] + pub fn invitation() { + let inv = Invitation::new_v0( + BootstrapContentV0 { + servers: vec![BrokerServerV0 { + server_type: BrokerServerTypeV0::Localhost(14400), + can_verify: false, + can_forward: false, + peer_id: PubKey::Ed25519PubKey([ + 95, 73, 225, 250, 3, 147, 24, 164, 177, 211, 34, 244, 45, 130, 111, 136, + 229, 145, 53, 167, 50, 168, 140, 227, 65, 111, 203, 41, 210, 186, 162, 149, + ]), + }], + }, + Some("test invitation".to_string()), + None, + ); + + println!("{:?}", inv.get_urls()); + } +} diff --git a/ng-net/src/utils.rs b/ng-net/src/utils.rs new file mode 100644 index 0000000..88f3a66 --- /dev/null +++ b/ng-net/src/utils.rs @@ -0,0 +1,534 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; + +use async_std::task; +use ed25519_dalek::*; +use futures::{channel::mpsc, Future}; +use lazy_static::lazy_static; +use noise_protocol::U8Array; +use noise_protocol::DH; +use noise_rust_crypto::sensitive::Sensitive; +use regex::Regex; +use url::Host; +use url::Url; + +#[allow(unused_imports)] +use ng_repo::errors::*; +use ng_repo::types::PubKey; +use ng_repo::{log::*, types::PrivKey}; + +use crate::types::*; +use crate::NG_BOOTSTRAP_LOCAL_PATH; +use crate::WS_PORT; + +#[doc(hidden)] +#[cfg(target_arch = "wasm32")] +pub fn spawn_and_log_error(fut: F) -> task::JoinHandle<()> +where + F: Future> + 'static, +{ + task::spawn_local(async move { + if let Err(e) = fut.await { + log_err!("EXCEPTION {}", e) + } + }) +} +#[cfg(target_arch = "wasm32")] +pub type ResultSend = std::result::Result>; + +#[cfg(not(target_arch = "wasm32"))] +pub type ResultSend = std::result::Result>; + +#[doc(hidden)] +#[cfg(not(target_arch = "wasm32"))] +pub fn spawn_and_log_error(fut: F) -> task::JoinHandle<()> +where + F: Future> + Send + 'static, +{ + task::spawn(async move { + if let Err(e) = fut.await { + log_err!("{}", e) + } + }) +} + +#[cfg(target_arch = "wasm32")] +#[cfg(debug_assertions)] +const APP_PREFIX: &str = "http://localhost:14400"; + +#[cfg(target_arch = "wasm32")] +#[cfg(not(debug_assertions))] +const APP_PREFIX: &str = ""; + +pub fn decode_invitation_string(string: String) -> Option { + Invitation::try_from(string).ok() +} + +lazy_static! { + #[doc(hidden)] + static ref RE_IPV6_WITH_PORT: Regex = + Regex::new(r"^\[([0-9a-fA-F:]{3,39})\](\:\d{1,5})?$").unwrap(); +} + +pub fn decode_locator(string: &str) -> Result { + unimplemented!(); +} + +#[doc(hidden)] +pub fn parse_ipv4_and_port_for( + string: String, + for_option: &str, + default_port: u16, +) -> Result<(Ipv4Addr, u16), NgError> { + let parts: Vec<&str> = string.split(":").collect(); + let ipv4 = parts[0].parse::().map_err(|_| { + NgError::ConfigError(format!( + "The value submitted for the {} option is invalid.", + for_option + )) + })?; + + let port = if parts.len() > 1 { + match serde_json::from_str::(parts[1]) { + Err(_) => default_port, + Ok(p) => { + if p == 0 { + default_port + } else { + p + } + } + } + } else { + default_port + }; + Ok((ipv4, port)) +} + +#[doc(hidden)] +pub fn parse_ip_and_port_for(string: String, for_option: &str) -> Result { + let bind = parse_ip_and_port_for_(string, for_option)?; + Ok(BindAddress { + ip: (&bind.0).into(), + port: bind.1, + }) +} + +fn parse_ip_and_port_for_(string: String, for_option: &str) -> Result<(IpAddr, u16), NgError> { + let c = RE_IPV6_WITH_PORT.captures(&string); + let ipv6; + let port; + if c.is_some() && c.as_ref().unwrap().get(1).is_some() { + let cap = c.unwrap(); + let ipv6_str = cap.get(1).unwrap().as_str(); + port = match cap.get(2) { + None => WS_PORT, + Some(p) => { + let mut chars = p.as_str().chars(); + chars.next(); + match serde_json::from_str::(chars.as_str()) { + Err(_) => WS_PORT, + Ok(p) => { + if p == 0 { + WS_PORT + } else { + p + } + } + } + } + }; + let ipv6 = ipv6_str.parse::().map_err(|_| { + NgError::ConfigError(format!( + "The <[IPv6]:PORT> value submitted for the {} option is invalid.", + for_option + )) + })?; + Ok((IpAddr::V6(ipv6), port)) + } else { + // we try just an IPV6 without port + let ipv6_res = string.parse::(); + if ipv6_res.is_err() { + // let's try IPv4 + + parse_ipv4_and_port_for(string, for_option, WS_PORT) + .map(|ipv4| (IpAddr::V4(ipv4.0), ipv4.1)) + } else { + ipv6 = ipv6_res.unwrap(); + port = WS_PORT; + Ok((IpAddr::V6(ipv6), port)) + } + } +} + +pub fn check_is_local_url(bootstrap: &BrokerServerV0, location: &String) -> Option { + if location.starts_with(NG_APP_URL) { + match &bootstrap.server_type { + BrokerServerTypeV0::Public(_) | BrokerServerTypeV0::BoxPublicDyn(_) => { + return Some(APP_NG_WS_URL.to_string()); + } + _ => {} + } + } else if let BrokerServerTypeV0::Domain(domain) = &bootstrap.server_type { + let url = format!("https://{}", domain); + if location.starts_with(&url) { + return Some(url); + } + } else { + // localhost + if location.starts_with(LOCAL_URLS[0]) + || location.starts_with(LOCAL_URLS[1]) + || location.starts_with(LOCAL_URLS[2]) + { + if let BrokerServerTypeV0::Localhost(port) = bootstrap.server_type { + return Some(local_http_url(&port)); + } + } + // a private address + else if location.starts_with("http://") { + let url = Url::parse(location).unwrap(); + match url.host() { + Some(Host::Ipv4(ip)) => { + if is_ipv4_private(&ip) { + let res = bootstrap.first_ipv4_http(); + if res.is_some() { + return res; + } + } + } + Some(Host::Ipv6(ip)) => { + if is_ipv6_private(&ip) { + let res = bootstrap.first_ipv6_http(); + if res.is_some() { + return res; + } + } + } + _ => {} + } + } + } + None +} + +#[cfg(target_arch = "wasm32")] +async fn retrieve_ng_bootstrap(location: &String) -> Option { + let prefix = if APP_PREFIX == "" { + let url = Url::parse(location).unwrap(); + url.origin().unicode_serialization() + } else { + APP_PREFIX.to_string() + }; + let url = format!("{}{}", prefix, NG_BOOTSTRAP_LOCAL_PATH); + log_info!("url {}", url); + let resp = reqwest::get(url).await; + //log_info!("{:?}", resp); + if resp.is_ok() { + let resp = resp.unwrap().json::().await; + return if resp.is_ok() { + Some(resp.unwrap()) + } else { + None + }; + } else { + //log_info!("err {}", resp.unwrap_err()); + return None; + } +} + +#[cfg(not(target_arch = "wasm32"))] +pub async fn retrieve_ng_bootstrap(location: &String) -> Option { + let url = Url::parse(location).unwrap(); + let prefix = url.origin().unicode_serialization(); + let url = format!("{}{}", prefix, NG_BOOTSTRAP_LOCAL_PATH); + log_info!("url {}", url); + let resp = reqwest::get(url).await; + //log_info!("{:?}", resp); + if resp.is_ok() { + let resp = resp.unwrap().json::().await; + return if resp.is_ok() { + Some(resp.unwrap()) + } else { + None + }; + } else { + //log_info!("err {}", resp.unwrap_err()); + return None; + } +} + +// #[cfg(target_arch = "wasm32")] +// pub async fn retrieve_domain(location: String) -> Option { +// let info = retrieve_ng_bootstrap(&location).await; +// if info.is_none() { +// return None; +// } +// for bootstrap in info.unwrap().servers() { +// let res = bootstrap.get_domain(); +// if res.is_some() { +// return res; +// } +// } +// None +// } + +#[cfg(target_arch = "wasm32")] +pub async fn retrieve_local_url(location: String) -> Option { + let info = retrieve_ng_bootstrap(&location).await; + if info.is_none() { + return None; + } + for bootstrap in info.unwrap().servers() { + let res = check_is_local_url(bootstrap, &location); + if res.is_some() { + return res; + } + } + None +} + +#[cfg(target_arch = "wasm32")] +pub async fn retrieve_local_bootstrap( + location_string: String, + invite_string: Option, + must_be_public: bool, +) -> Option { + let invite1: Option = if invite_string.is_some() { + let invitation: Result = invite_string.clone().unwrap().try_into(); + invitation.ok() + } else { + None + }; + log_debug!("{}", location_string); + log_debug!("invite_string {:?} invite1{:?}", invite_string, invite1); + + let invite2: Option = { + let info = retrieve_ng_bootstrap(&location_string).await; + if info.is_none() { + None + } else { + let inv: Invitation = info.unwrap().into(); + Some(inv) + } + }; + + let res = if invite1.is_none() { + invite2 + } else if invite2.is_none() { + invite1 + } else { + invite1.map(|i| i.intersects(invite2.unwrap())) + }; + + if res.is_some() { + for server in res.as_ref().unwrap().get_servers() { + if must_be_public && server.is_public_server() + || !must_be_public && check_is_local_url(server, &location_string).is_some() + { + return res; + } + } + return None; + } + res +} + +pub fn sensitive_from_privkey(privkey: PrivKey) -> Sensitive<[u8; 32]> { + // we copy the key here, because otherwise the 2 zeroize would conflict. as the drop of the PrivKey might be called before the one of Sensitive + let mut bits: [u8; 32] = [0u8; 32]; + bits.copy_from_slice(privkey.slice()); + Sensitive::<[u8; 32]>::from_slice(&bits) +} + +pub fn dh_privkey_from_sensitive(privkey: Sensitive<[u8; 32]>) -> PrivKey { + // we copy the key here, because otherwise the 2 zeroize would conflict. as the drop of the Sensitive might be called before the one of PrivKey + let mut bits: [u8; 32] = [0u8; 32]; + bits.copy_from_slice(privkey.as_slice()); + PrivKey::X25519PrivKey(bits) +} + +pub type Sender = mpsc::UnboundedSender; +pub type Receiver = mpsc::UnboundedReceiver; + +pub fn gen_dh_keys() -> (PrivKey, PubKey) { + let pri = noise_rust_crypto::X25519::genkey(); + let publ = noise_rust_crypto::X25519::pubkey(&pri); + + (dh_privkey_from_sensitive(pri), PubKey::X25519PubKey(publ)) +} + +pub struct Dual25519Keys { + pub x25519_priv: Sensitive<[u8; 32]>, + pub x25519_public: [u8; 32], + pub ed25519_priv: SecretKey, + pub ed25519_pub: PublicKey, +} + +impl Dual25519Keys { + pub fn generate() -> Self { + let mut random = Sensitive::<[u8; 32]>::new(); + getrandom::fill(&mut *random).expect("getrandom failed"); + + let ed25519_priv = SecretKey::from_bytes(&random.as_slice()).unwrap(); + let exp: ExpandedSecretKey = (&ed25519_priv).into(); + let mut exp_bytes = exp.to_bytes(); + let ed25519_pub: PublicKey = (&ed25519_priv).into(); + for byte in &mut exp_bytes[32..] { + *byte = 0; + } + let mut bits = Sensitive::<[u8; 32]>::from_slice(&exp_bytes[0..32]); + bits[0] &= 248; + bits[31] &= 127; + bits[31] |= 64; + + let x25519_public = noise_rust_crypto::X25519::pubkey(&bits); + + Self { + x25519_priv: bits, + x25519_public, + ed25519_priv, + ed25519_pub, + } + } +} + +pub fn get_domain_without_port(domain: &String) -> String { + let parts: Vec<&str> = domain.split(':').collect(); + parts[0].to_string() +} + +pub fn get_domain_without_port_443(domain: &str) -> &str { + let parts: Vec<&str> = domain.split(':').collect(); + if parts.len() > 1 && parts[1] == "443" { + return parts[0]; + } + domain +} + +pub fn is_public_ipv4(ip: &Ipv4Addr) -> bool { + // TODO, use core::net::Ipv4Addr.is_global when it will be stable + return is_ipv4_global(ip); +} + +pub fn is_public_ipv6(ip: &Ipv6Addr) -> bool { + // TODO, use core::net::Ipv6Addr.is_global when it will be stable + return is_ipv6_global(ip); +} + +pub fn is_public_ip(ip: &IpAddr) -> bool { + match ip { + IpAddr::V4(v4) => is_public_ipv4(v4), + IpAddr::V6(v6) => is_public_ipv6(v6), + } +} + +pub fn is_private_ip(ip: &IpAddr) -> bool { + match ip { + IpAddr::V4(v4) => is_ipv4_private(v4), + IpAddr::V6(v6) => is_ipv6_private(v6), + } +} + +#[must_use] +#[inline] +pub const fn is_ipv4_shared(addr: &Ipv4Addr) -> bool { + addr.octets()[0] == 100 && (addr.octets()[1] & 0b1100_0000 == 0b0100_0000) +} + +#[must_use] +#[inline] +pub const fn is_ipv4_benchmarking(addr: &Ipv4Addr) -> bool { + addr.octets()[0] == 198 && (addr.octets()[1] & 0xfe) == 18 +} + +#[must_use] +#[inline] +pub const fn is_ipv4_reserved(addr: &Ipv4Addr) -> bool { + addr.octets()[0] & 240 == 240 && !addr.is_broadcast() +} + +#[must_use] +#[inline] +pub const fn is_ipv4_private(addr: &Ipv4Addr) -> bool { + addr.is_private() || addr.is_link_local() +} + +#[must_use] +#[inline] +pub const fn is_ipv4_global(addr: &Ipv4Addr) -> bool { + !(addr.octets()[0] == 0 // "This network" + || addr.is_private() + || is_ipv4_shared(addr) + || addr.is_loopback() + || addr.is_link_local() + // addresses reserved for future protocols (`192.0.0.0/24`) + ||(addr.octets()[0] == 192 && addr.octets()[1] == 0 && addr.octets()[2] == 0) + || addr.is_documentation() + || is_ipv4_benchmarking(addr) + || is_ipv4_reserved(addr) + || addr.is_broadcast()) +} + +#[must_use] +#[inline] +pub const fn is_ipv6_unique_local(addr: &Ipv6Addr) -> bool { + (addr.segments()[0] & 0xfe00) == 0xfc00 +} + +#[must_use] +#[inline] +pub const fn is_ipv6_unicast_link_local(addr: &Ipv6Addr) -> bool { + (addr.segments()[0] & 0xffc0) == 0xfe80 +} + +#[must_use] +#[inline] +pub const fn is_ipv6_documentation(addr: &Ipv6Addr) -> bool { + (addr.segments()[0] == 0x2001) && (addr.segments()[1] == 0xdb8) +} + +#[must_use] +#[inline] +pub const fn is_ipv6_private(addr: &Ipv6Addr) -> bool { + is_ipv6_unique_local(addr) +} + +#[must_use] +#[inline] +pub const fn is_ipv6_global(addr: &Ipv6Addr) -> bool { + !(addr.is_unspecified() + || addr.is_loopback() + // IPv4-mapped Address (`::ffff:0:0/96`) + || matches!(addr.segments(), [0, 0, 0, 0, 0, 0xffff, _, _]) + // IPv4-IPv6 Translat. (`64:ff9b:1::/48`) + || matches!(addr.segments(), [0x64, 0xff9b, 1, _, _, _, _, _]) + // Discard-Only Address Block (`100::/64`) + || matches!(addr.segments(), [0x100, 0, 0, 0, _, _, _, _]) + // IETF Protocol Assignments (`2001::/23`) + || (matches!(addr.segments(), [0x2001, b, _, _, _, _, _, _] if b < 0x200) + && !( + // Port Control Protocol Anycast (`2001:1::1`) + u128::from_be_bytes(addr.octets()) == 0x2001_0001_0000_0000_0000_0000_0000_0001 + // Traversal Using Relays around NAT Anycast (`2001:1::2`) + || u128::from_be_bytes(addr.octets()) == 0x2001_0001_0000_0000_0000_0000_0000_0002 + // AMT (`2001:3::/32`) + || matches!(addr.segments(), [0x2001, 3, _, _, _, _, _, _]) + // AS112-v6 (`2001:4:112::/48`) + || matches!(addr.segments(), [0x2001, 4, 0x112, _, _, _, _, _]) + // ORCHIDv2 (`2001:20::/28`) + || matches!(addr.segments(), [0x2001, b, _, _, _, _, _, _] if b >= 0x20 && b <= 0x2F) + )) + || is_ipv6_documentation(addr) + || is_ipv6_unique_local(addr) + || is_ipv6_unicast_link_local(addr)) +} diff --git a/ng-oxigraph/Cargo.toml b/ng-oxigraph/Cargo.toml new file mode 100644 index 0000000..fdf7339 --- /dev/null +++ b/ng-oxigraph/Cargo.toml @@ -0,0 +1,249 @@ +[package] +name = "ng-oxigraph" +version = "0.4.0-alpha.8-ngalpha" +authors = ["Tpt ", "Niko PLP "] +license = "MIT OR Apache-2.0" +readme = "README.md" +keywords = ["RDF", "SPARQL", "graph-database", "database"] +categories = ["database-implementations"] +repository = "https://git.nextgraph.org/NextGraph/nextgraph-rs" +homepage = "https://nextgraph.org" +documentation = "https://docs.nextgraph.org/" +description = """ +a SPARQL database and RDF toolkit. fork for NextGraph +""" +edition = "2021" +rust-version = "1.70" +build = "build.rs" + +[features] +default = ["rdf-star","sep-0002","sep-0006", "oxsdatatypes"] +js = ["getrandom/wasm_js", "js-sys"] +rdf-star = [] +custom-now = [] +xml = [] +ttl = [] +sep-0002 = [] +sep-0006 = [] +oxsdatatypes = [] + +[dependencies] +lazy_static = "1.4.0" +digest = "0.10" +hex = "0.4" +json-event-parser = "0.2.0-alpha.2" +md-5 = "0.10" +oxilangtag = "0.1" +oxiri = "0.2.3" +rand = "0.8" +regex = "1.8.4" +serde = { version = "1.0.142", features = ["derive"] } +sha1 = "0.10" +sha2 = "0.10" +siphasher = ">=0.3, <2.0" +thiserror = "1.0.50" +quick-xml = ">=0.29, <0.32" +memchr = "2.5" +peg = "0.8" +base64-url = "2.0.0" +ng-repo = { path = "../ng-repo", version = "0.1.2" } + +[target.'cfg(all(not(target_family = "wasm"),not(docsrs)))'.dependencies] +libc = "0.2" +ng-rocksdb = { version = "0.21.0-ngpreview.7", git = "https://git.nextgraph.org/NextGraph/rust-rocksdb.git", branch = "master", features = [ ] } + +[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies] +getrandom = { version = "0.3.3", features = ["wasm_js"] } +js-sys = { version = "0.3.60", optional = true } + +[target.'cfg(not(target_family = "wasm"))'.dev-dependencies] +codspeed-criterion-compat = "2.3.3" +zstd = ">=0.12, <0.14" + + +[lints.rust] +absolute_paths_not_starting_with_crate = "warn" +elided_lifetimes_in_paths = "warn" +explicit_outlives_requirements = "warn" +let_underscore_drop = "warn" +macro_use_extern_crate = "warn" +# TODO missing_docs = "warn" +trivial_casts = "warn" +trivial_numeric_casts = "warn" +unsafe_code = "warn" +unused_import_braces = "warn" +unused_lifetimes = "warn" +unused_macro_rules = "warn" +unused_qualifications = "warn" + +[lints.clippy] +allow_attributes = "warn" +allow_attributes_without_reason = "warn" +as_underscore = "warn" +assertions_on_result_states = "warn" +bool_to_int_with_if = "warn" +borrow_as_ptr = "warn" +case_sensitive_file_extension_comparisons = "warn" +cast_lossless = "warn" +cast_possible_truncation = "warn" +cast_possible_wrap = "warn" +cast_precision_loss = "warn" +cast_ptr_alignment = "warn" +cast_sign_loss = "warn" +checked_conversions = "warn" +clone_on_ref_ptr = "warn" +cloned_instead_of_copied = "warn" +copy_iterator = "warn" +create_dir = "warn" +dbg_macro = "warn" +decimal_literal_representation = "warn" +default_trait_access = "warn" +default_union_representation = "warn" +deref_by_slicing = "warn" +disallowed_script_idents = "warn" +doc_link_with_quotes = "warn" +empty_drop = "warn" +empty_enum = "warn" +empty_structs_with_brackets = "warn" +enum_glob_use = "warn" +error_impl_error = "warn" +exit = "warn" +expect_used = "warn" +expl_impl_clone_on_copy = "warn" +explicit_deref_methods = "warn" +explicit_into_iter_loop = "warn" +explicit_iter_loop = "warn" +filetype_is_file = "warn" +filter_map_next = "warn" +flat_map_option = "warn" +fn_params_excessive_bools = "warn" +fn_to_numeric_cast_any = "warn" +format_push_string = "warn" +from_iter_instead_of_collect = "warn" +get_unwrap = "warn" +host_endian_bytes = "warn" +if_not_else = "warn" +if_then_some_else_none = "warn" +ignored_unit_patterns = "warn" +implicit_clone = "warn" +implicit_hasher = "warn" +inconsistent_struct_constructor = "warn" +index_refutable_slice = "warn" +inefficient_to_string = "warn" +infinite_loop = "warn" +inline_always = "warn" +inline_asm_x86_att_syntax = "warn" +inline_asm_x86_intel_syntax = "warn" +into_iter_without_iter = "warn" +invalid_upcast_comparisons = "warn" +items_after_statements = "warn" +iter_not_returning_iterator = "warn" +iter_without_into_iter = "warn" +large_digit_groups = "warn" +large_futures = "warn" +large_include_file = "warn" +large_stack_arrays = "warn" +large_types_passed_by_value = "warn" +let_underscore_must_use = "warn" +let_underscore_untyped = "warn" +linkedlist = "warn" +lossy_float_literal = "warn" +macro_use_imports = "warn" +manual_assert = "warn" +manual_instant_elapsed = "warn" +manual_let_else = "warn" +manual_ok_or = "warn" +manual_string_new = "warn" +many_single_char_names = "warn" +map_unwrap_or = "warn" +match_bool = "warn" +match_on_vec_items = "warn" +match_same_arms = "warn" +match_wild_err_arm = "warn" +match_wildcard_for_single_variants = "warn" +maybe_infinite_iter = "warn" +mem_forget = "warn" +mismatching_type_param_order = "warn" +missing_assert_message = "warn" +missing_asserts_for_indexing = "warn" +missing_fields_in_debug = "warn" +multiple_inherent_impl = "warn" +mut_mut = "warn" +mutex_atomic = "warn" +naive_bytecount = "warn" +needless_bitwise_bool = "warn" +needless_continue = "warn" +needless_for_each = "warn" +needless_pass_by_value = "warn" +needless_raw_string_hashes = "warn" +needless_raw_strings = "warn" +negative_feature_names = "warn" +no_effect_underscore_binding = "warn" +no_mangle_with_rust_abi = "warn" +non_ascii_literal = "warn" +panic = "warn" +panic_in_result_fn = "warn" +partial_pub_fields = "warn" +print_stderr = "warn" +print_stdout = "warn" +ptr_as_ptr = "warn" +ptr_cast_constness = "warn" +pub_without_shorthand = "warn" +range_minus_one = "warn" +range_plus_one = "warn" +rc_buffer = "warn" +rc_mutex = "warn" +redundant_closure_for_method_calls = "warn" +redundant_else = "warn" +redundant_feature_names = "warn" +redundant_type_annotations = "warn" +ref_binding_to_reference = "warn" +ref_option_ref = "warn" +ref_patterns = "warn" +rest_pat_in_fully_bound_structs = "warn" +return_self_not_must_use = "warn" +same_functions_in_if_condition = "warn" +same_name_method = "warn" +semicolon_inside_block = "warn" +shadow_same = "warn" +should_panic_without_expect = "warn" +single_match_else = "warn" +stable_sort_primitive = "warn" +str_to_string = "warn" +string_add = "warn" +string_add_assign = "warn" +string_lit_chars_any = "warn" +string_to_string = "warn" +struct_excessive_bools = "warn" +struct_field_names = "warn" +suspicious_xor_used_as_pow = "warn" +tests_outside_test_module = "warn" +todo = "warn" +transmute_ptr_to_ptr = "warn" +trivially_copy_pass_by_ref = "warn" +try_err = "warn" +unchecked_duration_subtraction = "warn" +undocumented_unsafe_blocks = "warn" +unicode_not_nfc = "warn" +unimplemented = "warn" +uninlined_format_args = "warn" +unnecessary_box_returns = "warn" +unnecessary_join = "warn" +unnecessary_safety_comment = "warn" +unnecessary_safety_doc = "warn" +unnecessary_self_imports = "warn" +unnecessary_wraps = "warn" +unneeded_field_pattern = "warn" +unnested_or_patterns = "warn" +unreadable_literal = "warn" +unsafe_derive_deserialize = "warn" +unseparated_literal_suffix = "warn" +unused_async = "warn" +unused_self = "warn" +unwrap_in_result = "warn" +use_debug = "warn" +used_underscore_binding = "warn" +verbose_bit_mask = "warn" +verbose_file_reads = "warn" +wildcard_dependencies = "warn" +zero_sized_map_values = "warn" diff --git a/ng-oxigraph/LICENSE-APACHE b/ng-oxigraph/LICENSE-APACHE new file mode 100644 index 0000000..16fe87b --- /dev/null +++ b/ng-oxigraph/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/ng-oxigraph/LICENSE-MIT b/ng-oxigraph/LICENSE-MIT new file mode 100644 index 0000000..e1609ee --- /dev/null +++ b/ng-oxigraph/LICENSE-MIT @@ -0,0 +1,26 @@ +Copyright (c) 2018 Oxigraph developers +Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/ng-oxigraph/README.md b/ng-oxigraph/README.md new file mode 100644 index 0000000..780e6e5 --- /dev/null +++ b/ng-oxigraph/README.md @@ -0,0 +1,41 @@ +# Oxigraph + +Oxigraph is a graph database library implementing the [SPARQL](https://www.w3.org/TR/sparql11-overview/) standard. +Its author is Thomas Pellissier Tanon thomas@pellissier-tanon.fr + +The official upstream project is here: https://oxigraph.org/ + +https://github.com/oxigraph/oxigraph/ + +https://crates.io/crates/oxigraph + +This package (ng-oxigraph) is a fork used internally by NextGraph.org project. +It mostly adds CRDTs to RDF/SPARQL (and also provides a RocksDB backend with encryption at rest, and OpenBSD support). + +If you are interested to know more about NextGraph: https://nextgraph.org + +https://git.nextgraph.org/NextGraph/nextgraph-rs + +https://crates.io/crates/nextgraph + +## License + +Both OxiGraph and NextGraph are licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +Copyright is attributed to "Copyright (c) 2018 Oxigraph developers" for all the code corresponding to the commit [427d675c9b4e7f55308825357d8628c612b82a91](https://github.com/oxigraph/oxigraph/commit/427d675c9b4e7f55308825357d8628c612b82a91) of the OxiGraph repository on date Mon Apr 8 09:11:04 2024 +0200. + +All the code added in subsequent commits have a copyright attributed to "Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers". + +## NextGraph + +> NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) diff --git a/ng-oxigraph/build.rs b/ng-oxigraph/build.rs new file mode 100644 index 0000000..bd0dfc1 --- /dev/null +++ b/ng-oxigraph/build.rs @@ -0,0 +1,5 @@ +fn main() { + if std::env::var("DOCS_RS").is_ok() { + println!("cargo:rustc-cfg=docsrs"); + } +} diff --git a/ng-oxigraph/src/lib.rs b/ng-oxigraph/src/lib.rs new file mode 100644 index 0000000..80993f5 --- /dev/null +++ b/ng-oxigraph/src/lib.rs @@ -0,0 +1,22 @@ +#![doc(test(attr(deny(warnings))))] +#![doc(test(attr(allow(deprecated))))] +#![doc(html_favicon_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")] +#![doc(html_logo_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")] + +pub mod oxigraph; + +pub mod oxrdf; + +pub mod oxrdfio; + +pub mod oxsdatatypes; + +pub mod oxttl; + +pub mod oxrdfxml; + +pub mod sparesults; + +pub mod spargebra; + +pub mod sparopt; diff --git a/ng-oxigraph/src/oxigraph/io/format.rs b/ng-oxigraph/src/oxigraph/io/format.rs new file mode 100644 index 0000000..8268247 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/io/format.rs @@ -0,0 +1,301 @@ +#![allow(deprecated)] + +use crate::oxrdfio::{RdfFormat, RdfParser, RdfSerializer}; + +/// [RDF graph](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-graph) serialization formats. +/// +/// This enumeration is non exhaustive. New formats like JSON-LD will be added in the future. +#[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)] +#[non_exhaustive] +#[deprecated(note = "use RdfFormat instead", since = "0.4.0")] +pub enum GraphFormat { + /// [N-Triples](https://www.w3.org/TR/n-triples/) + NTriples, + /// [Turtle](https://www.w3.org/TR/turtle/) + Turtle, + /// [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) + RdfXml, +} + +impl GraphFormat { + /// The format canonical IRI according to the [Unique URIs for file formats registry](https://www.w3.org/ns/formats/). + /// + /// ``` + /// use oxigraph::io::GraphFormat; + /// + /// assert_eq!( + /// GraphFormat::NTriples.iri(), + /// "http://www.w3.org/ns/formats/N-Triples" + /// ) + /// ``` + #[inline] + pub fn iri(self) -> &'static str { + match self { + Self::NTriples => "http://www.w3.org/ns/formats/N-Triples", + Self::Turtle => "http://www.w3.org/ns/formats/Turtle", + Self::RdfXml => "http://www.w3.org/ns/formats/RDF_XML", + } + } + + /// The format [IANA media type](https://tools.ietf.org/html/rfc2046). + /// + /// ``` + /// use oxigraph::io::GraphFormat; + /// + /// assert_eq!(GraphFormat::NTriples.media_type(), "application/n-triples") + /// ``` + #[inline] + pub fn media_type(self) -> &'static str { + match self { + Self::NTriples => "application/n-triples", + Self::Turtle => "text/turtle", + Self::RdfXml => "application/rdf+xml", + } + } + + /// The format [IANA-registered](https://tools.ietf.org/html/rfc2046) file extension. + /// + /// ``` + /// use oxigraph::io::GraphFormat; + /// + /// assert_eq!(GraphFormat::NTriples.file_extension(), "nt") + /// ``` + #[inline] + pub fn file_extension(self) -> &'static str { + match self { + Self::NTriples => "nt", + Self::Turtle => "ttl", + Self::RdfXml => "rdf", + } + } + + /// Looks for a known format from a media type. + /// + /// It supports some media type aliases. + /// For example, "application/xml" is going to return `GraphFormat::RdfXml` even if it is not its canonical media type. + /// + /// Example: + /// ``` + /// use oxigraph::io::GraphFormat; + /// + /// assert_eq!( + /// GraphFormat::from_media_type("text/turtle; charset=utf-8"), + /// Some(GraphFormat::Turtle) + /// ) + /// ``` + #[inline] + pub fn from_media_type(media_type: &str) -> Option { + match media_type.split(';').next()?.trim() { + "application/n-triples" | "text/plain" => Some(Self::NTriples), + "text/turtle" | "application/turtle" | "application/x-turtle" => Some(Self::Turtle), + "application/rdf+xml" | "application/xml" | "text/xml" => Some(Self::RdfXml), + _ => None, + } + } + + /// Looks for a known format from an extension. + /// + /// It supports some aliases. + /// + /// Example: + /// ``` + /// use oxigraph::io::GraphFormat; + /// + /// assert_eq!( + /// GraphFormat::from_extension("nt"), + /// Some(GraphFormat::NTriples) + /// ) + /// ``` + #[inline] + pub fn from_extension(extension: &str) -> Option { + match extension { + "nt" | "txt" => Some(Self::NTriples), + "ttl" => Some(Self::Turtle), + "rdf" | "xml" => Some(Self::RdfXml), + _ => None, + } + } +} + +impl From for RdfFormat { + #[inline] + fn from(format: GraphFormat) -> Self { + match format { + GraphFormat::NTriples => Self::NTriples, + GraphFormat::Turtle => Self::Turtle, + GraphFormat::RdfXml => Self::RdfXml, + } + } +} + +impl From for RdfParser { + #[inline] + fn from(format: GraphFormat) -> Self { + RdfFormat::from(format).into() + } +} + +impl From for RdfSerializer { + #[inline] + fn from(format: GraphFormat) -> Self { + RdfFormat::from(format).into() + } +} + +/// [RDF dataset](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset) serialization formats. +/// +/// This enumeration is non exhaustive. New formats like JSON-LD will be added in the future. +#[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)] +#[non_exhaustive] +#[deprecated(note = "use RdfFormat instead", since = "0.4.0")] +pub enum DatasetFormat { + /// [N-Quads](https://www.w3.org/TR/n-quads/) + NQuads, + /// [TriG](https://www.w3.org/TR/trig/) + TriG, +} + +impl DatasetFormat { + /// The format canonical IRI according to the [Unique URIs for file formats registry](https://www.w3.org/ns/formats/). + /// + /// ``` + /// use oxigraph::io::DatasetFormat; + /// + /// assert_eq!( + /// DatasetFormat::NQuads.iri(), + /// "http://www.w3.org/ns/formats/N-Quads" + /// ) + /// ``` + #[inline] + pub fn iri(self) -> &'static str { + match self { + Self::NQuads => "http://www.w3.org/ns/formats/N-Quads", + Self::TriG => "http://www.w3.org/ns/formats/TriG", + } + } + + /// The format [IANA media type](https://tools.ietf.org/html/rfc2046). + /// + /// ``` + /// use oxigraph::io::DatasetFormat; + /// + /// assert_eq!(DatasetFormat::NQuads.media_type(), "application/n-quads") + /// ``` + #[inline] + pub fn media_type(self) -> &'static str { + match self { + Self::NQuads => "application/n-quads", + Self::TriG => "application/trig", + } + } + + /// The format [IANA-registered](https://tools.ietf.org/html/rfc2046) file extension. + /// + /// ``` + /// use oxigraph::io::DatasetFormat; + /// + /// assert_eq!(DatasetFormat::NQuads.file_extension(), "nq") + /// ``` + #[inline] + pub fn file_extension(self) -> &'static str { + match self { + Self::NQuads => "nq", + Self::TriG => "trig", + } + } + + /// Looks for a known format from a media type. + /// + /// It supports some media type aliases. + /// + /// Example: + /// ``` + /// use oxigraph::io::DatasetFormat; + /// + /// assert_eq!( + /// DatasetFormat::from_media_type("application/n-quads; charset=utf-8"), + /// Some(DatasetFormat::NQuads) + /// ) + /// ``` + #[inline] + pub fn from_media_type(media_type: &str) -> Option { + match media_type.split(';').next()?.trim() { + "application/n-quads" | "text/x-nquads" | "text/nquads" => Some(Self::NQuads), + "application/trig" | "application/x-trig" => Some(Self::TriG), + _ => None, + } + } + + /// Looks for a known format from an extension. + /// + /// It supports some aliases. + /// + /// Example: + /// ``` + /// use oxigraph::io::DatasetFormat; + /// + /// assert_eq!( + /// DatasetFormat::from_extension("nq"), + /// Some(DatasetFormat::NQuads) + /// ) + /// ``` + #[inline] + pub fn from_extension(extension: &str) -> Option { + match extension { + "nq" | "txt" => Some(Self::NQuads), + "trig" => Some(Self::TriG), + _ => None, + } + } +} + +impl From for RdfFormat { + #[inline] + fn from(format: DatasetFormat) -> Self { + match format { + DatasetFormat::NQuads => Self::NQuads, + DatasetFormat::TriG => Self::TriG, + } + } +} + +impl From for RdfParser { + #[inline] + fn from(format: DatasetFormat) -> Self { + RdfFormat::from(format).into() + } +} + +impl From for RdfSerializer { + #[inline] + fn from(format: DatasetFormat) -> Self { + RdfFormat::from(format).into() + } +} + +impl TryFrom for GraphFormat { + type Error = (); + + /// Attempts to find a graph format that is a subset of this [`DatasetFormat`]. + #[inline] + fn try_from(value: DatasetFormat) -> Result { + match value { + DatasetFormat::NQuads => Ok(Self::NTriples), + DatasetFormat::TriG => Ok(Self::Turtle), + } + } +} + +impl TryFrom for DatasetFormat { + type Error = (); + + /// Attempts to find a dataset format that is a superset of this [`GraphFormat`]. + #[inline] + fn try_from(value: GraphFormat) -> Result { + match value { + GraphFormat::NTriples => Ok(Self::NQuads), + GraphFormat::Turtle => Ok(Self::TriG), + GraphFormat::RdfXml => Err(()), + } + } +} diff --git a/ng-oxigraph/src/oxigraph/io/mod.rs b/ng-oxigraph/src/oxigraph/io/mod.rs new file mode 100644 index 0000000..6c76116 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/io/mod.rs @@ -0,0 +1,39 @@ +//! Utilities to read and write RDF graphs and datasets using [OxRDF I/O](https://crates.io/crates/oxrdfio). +//! +//! The entry points of this module are the two [`RdfParser`] and [`RdfSerializer`] structs. +//! +//! Usage example converting a Turtle file to a N-Triples file: +//! ``` +//! use oxigraph::io::{RdfFormat, RdfParser, RdfSerializer}; +//! +//! let turtle_file = b"@base . +//! @prefix schema: . +//! a schema:Person ; +//! schema:name \"Foo\" . +//! a schema:Person ; +//! schema:name \"Bar\" ."; +//! +//! let ntriples_file = b" . +//! \"Foo\" . +//! . +//! \"Bar\" . +//! "; +//! +//! let mut writer = RdfSerializer::from_format(RdfFormat::NTriples).serialize_to_write(Vec::new()); +//! for quad in RdfParser::from_format(RdfFormat::Turtle).parse_read(turtle_file.as_ref()) { +//! writer.write_quad(&quad.unwrap()).unwrap(); +//! } +//! assert_eq!(writer.finish().unwrap(), ntriples_file); +//! ``` + +mod format; +pub mod read; +pub mod write; + +#[allow(deprecated)] +pub use self::format::{DatasetFormat, GraphFormat}; +#[allow(deprecated)] +pub use self::read::{DatasetParser, GraphParser}; +#[allow(deprecated)] +pub use self::write::{DatasetSerializer, GraphSerializer}; +pub use crate::oxrdfio::*; diff --git a/ng-oxigraph/src/oxigraph/io/read.rs b/ng-oxigraph/src/oxigraph/io/read.rs new file mode 100644 index 0000000..ca2c62f --- /dev/null +++ b/ng-oxigraph/src/oxigraph/io/read.rs @@ -0,0 +1,199 @@ +#![allow(deprecated)] + +//! Utilities to read RDF graphs and datasets. + +use crate::oxigraph::io::{DatasetFormat, GraphFormat}; +use crate::oxigraph::model::*; +use crate::oxrdfio::{FromReadQuadReader, RdfParseError, RdfParser}; +use std::io::Read; + +/// Parsers for RDF graph serialization formats. +/// +/// It currently supports the following formats: +/// * [N-Triples](https://www.w3.org/TR/n-triples/) ([`GraphFormat::NTriples`]) +/// * [Turtle](https://www.w3.org/TR/turtle/) ([`GraphFormat::Turtle`]) +/// * [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) ([`GraphFormat::RdfXml`]) +/// +/// ``` +/// use oxigraph::io::{GraphFormat, GraphParser}; +/// +/// let file = " ."; +/// +/// let parser = GraphParser::from_format(GraphFormat::NTriples); +/// let triples = parser +/// .read_triples(file.as_bytes()) +/// .collect::, _>>()?; +/// +/// assert_eq!(triples.len(), 1); +/// assert_eq!(triples[0].subject.to_string(), ""); +/// # std::io::Result::Ok(()) +/// ``` +#[deprecated(note = "use RdfParser instead", since = "0.4.0")] +pub struct GraphParser { + inner: RdfParser, +} + +impl GraphParser { + /// Builds a parser for the given format. + #[inline] + pub fn from_format(format: GraphFormat) -> Self { + Self { + inner: RdfParser::from_format(format.into()) + .without_named_graphs() + .rename_blank_nodes(), + } + } + + /// Provides an IRI that could be used to resolve the file relative IRIs. + /// + /// ``` + /// use oxigraph::io::{GraphFormat, GraphParser}; + /// + /// let file = "

."; + /// + /// let parser = + /// GraphParser::from_format(GraphFormat::Turtle).with_base_iri("http://example.com")?; + /// let triples = parser + /// .read_triples(file.as_bytes()) + /// .collect::, _>>()?; + /// + /// assert_eq!(triples.len(), 1); + /// assert_eq!(triples[0].subject.to_string(), ""); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[inline] + pub fn with_base_iri(self, base_iri: impl Into) -> Result { + Ok(Self { + inner: self.inner.with_base_iri(base_iri)?, + }) + } + + /// Executes the parsing itself on a [`Read`] implementation and returns an iterator of triples. + pub fn read_triples(self, reader: R) -> TripleReader { + TripleReader { + parser: self.inner.parse_read(reader), + } + } +} + +/// An iterator yielding read triples. +/// Could be built using a [`GraphParser`]. +/// +/// ``` +/// use oxigraph::io::{GraphFormat, GraphParser}; +/// +/// let file = " ."; +/// +/// let parser = GraphParser::from_format(GraphFormat::NTriples); +/// let triples = parser +/// .read_triples(file.as_bytes()) +/// .collect::, _>>()?; +/// +/// assert_eq!(triples.len(), 1); +/// assert_eq!(triples[0].subject.to_string(), ""); +/// # std::io::Result::Ok(()) +/// ``` +#[must_use] +pub struct TripleReader { + parser: FromReadQuadReader, +} + +impl Iterator for TripleReader { + type Item = Result; + + fn next(&mut self) -> Option { + Some(self.parser.next()?.map(Into::into).map_err(Into::into)) + } +} + +/// A parser for RDF dataset serialization formats. +/// +/// It currently supports the following formats: +/// * [N-Quads](https://www.w3.org/TR/n-quads/) ([`DatasetFormat::NQuads`]) +/// * [TriG](https://www.w3.org/TR/trig/) ([`DatasetFormat::TriG`]) +/// +/// ``` +/// use oxigraph::io::{DatasetFormat, DatasetParser}; +/// +/// let file = " ."; +/// +/// let parser = DatasetParser::from_format(DatasetFormat::NQuads); +/// let quads = parser.read_quads(file.as_bytes()).collect::,_>>()?; +/// +/// assert_eq!(quads.len(), 1); +/// assert_eq!(quads[0].subject.to_string(), ""); +/// # std::io::Result::Ok(()) +/// ``` +#[deprecated(note = "use RdfParser instead", since = "0.4.0")] +pub struct DatasetParser { + inner: RdfParser, +} + +impl DatasetParser { + /// Builds a parser for the given format. + #[inline] + pub fn from_format(format: DatasetFormat) -> Self { + Self { + inner: RdfParser::from_format(format.into()).rename_blank_nodes(), + } + } + + /// Provides an IRI that could be used to resolve the file relative IRIs. + /// + /// ``` + /// use oxigraph::io::{DatasetFormat, DatasetParser}; + /// + /// let file = " {

}"; + /// + /// let parser = + /// DatasetParser::from_format(DatasetFormat::TriG).with_base_iri("http://example.com")?; + /// let triples = parser + /// .read_quads(file.as_bytes()) + /// .collect::, _>>()?; + /// + /// assert_eq!(triples.len(), 1); + /// assert_eq!(triples[0].subject.to_string(), ""); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[inline] + pub fn with_base_iri(self, base_iri: impl Into) -> Result { + Ok(Self { + inner: self.inner.with_base_iri(base_iri)?, + }) + } + + /// Executes the parsing itself on a [`Read`] implementation and returns an iterator of quads. + pub fn read_quads(self, reader: R) -> QuadReader { + QuadReader { + parser: self.inner.parse_read(reader), + } + } +} + +/// An iterator yielding read quads. +/// Could be built using a [`DatasetParser`]. +/// +/// ``` +/// use oxigraph::io::{DatasetFormat, DatasetParser}; +/// +/// let file = " ."; +/// +/// let parser = DatasetParser::from_format(DatasetFormat::NQuads); +/// let quads = parser.read_quads(file.as_bytes()).collect::,_>>()?; +/// +/// assert_eq!(quads.len(), 1); +/// assert_eq!(quads[0].subject.to_string(), ""); +/// # std::io::Result::Ok(()) +/// ``` +#[must_use] +pub struct QuadReader { + parser: FromReadQuadReader, +} + +impl Iterator for QuadReader { + type Item = Result; + + fn next(&mut self) -> Option { + Some(self.parser.next()?.map_err(Into::into)) + } +} diff --git a/ng-oxigraph/src/oxigraph/io/write.rs b/ng-oxigraph/src/oxigraph/io/write.rs new file mode 100644 index 0000000..e487720 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/io/write.rs @@ -0,0 +1,185 @@ +#![allow(deprecated)] + +//! Utilities to write RDF graphs and datasets. + +use crate::oxigraph::io::{DatasetFormat, GraphFormat}; +use crate::oxigraph::model::*; +use crate::oxrdfio::{RdfSerializer, ToWriteQuadWriter}; +use std::io::{self, Write}; + +/// A serializer for RDF graph serialization formats. +/// +/// It currently supports the following formats: +/// * [N-Triples](https://www.w3.org/TR/n-triples/) ([`GraphFormat::NTriples`]) +/// * [Turtle](https://www.w3.org/TR/turtle/) ([`GraphFormat::Turtle`]) +/// * [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) ([`GraphFormat::RdfXml`]) +/// +/// ``` +/// use oxigraph::io::{GraphFormat, GraphSerializer}; +/// use oxigraph::model::*; +/// +/// let mut buffer = Vec::new(); +/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer); +/// writer.write(&Triple { +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), +/// })?; +/// writer.finish()?; +/// +/// assert_eq!( +/// buffer.as_slice(), +/// " .\n".as_bytes() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] +pub struct GraphSerializer { + inner: RdfSerializer, +} + +impl GraphSerializer { + /// Builds a serializer for the given format + #[inline] + pub fn from_format(format: GraphFormat) -> Self { + Self { + inner: RdfSerializer::from_format(format.into()), + } + } + + /// Returns a [`TripleWriter`] allowing writing triples into the given [`Write`] implementation + pub fn triple_writer(self, write: W) -> TripleWriter { + TripleWriter { + writer: self.inner.serialize_to_write(write), + } + } +} + +/// Allows writing triples. +/// Could be built using a [`GraphSerializer`]. +/// +///
+/// +/// Do not forget to run the [`finish`](TripleWriter::finish()) method to properly write the last bytes of the file.
+/// +/// ``` +/// use oxigraph::io::{GraphFormat, GraphSerializer}; +/// use oxigraph::model::*; +/// +/// let mut buffer = Vec::new(); +/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer); +/// writer.write(&Triple { +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), +/// })?; +/// writer.finish()?; +/// +/// assert_eq!( +/// buffer.as_slice(), +/// " .\n".as_bytes() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct TripleWriter { + writer: ToWriteQuadWriter, +} + +impl TripleWriter { + /// Writes a triple + pub fn write<'a>(&mut self, triple: impl Into>) -> io::Result<()> { + self.writer.write_triple(triple) + } + + /// Writes the last bytes of the file + pub fn finish(self) -> io::Result<()> { + self.writer.finish()?.flush() + } +} + +/// A serializer for RDF graph serialization formats. +/// +/// It currently supports the following formats: +/// * [N-Quads](https://www.w3.org/TR/n-quads/) ([`DatasetFormat::NQuads`]) +/// * [TriG](https://www.w3.org/TR/trig/) ([`DatasetFormat::TriG`]) +/// +/// ``` +/// use oxigraph::io::{DatasetFormat, DatasetSerializer}; +/// use oxigraph::model::*; +/// +/// let mut buffer = Vec::new(); +/// let mut writer = DatasetSerializer::from_format(DatasetFormat::NQuads).quad_writer(&mut buffer); +/// writer.write(&Quad { +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), +/// graph_name: NamedNode::new("http://example.com/g")?.into(), +/// })?; +/// writer.finish()?; +/// +/// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] +pub struct DatasetSerializer { + inner: RdfSerializer, +} + +impl DatasetSerializer { + /// Builds a serializer for the given format + #[inline] + pub fn from_format(format: DatasetFormat) -> Self { + Self { + inner: RdfSerializer::from_format(format.into()), + } + } + + /// Returns a [`QuadWriter`] allowing writing triples into the given [`Write`] implementation + pub fn quad_writer(self, write: W) -> QuadWriter { + QuadWriter { + writer: self.inner.serialize_to_write(write), + } + } +} + +/// Allows writing triples. +/// Could be built using a [`DatasetSerializer`]. +/// +///
+/// +/// Do not forget to run the [`finish`](QuadWriter::finish()) method to properly write the last bytes of the file.
+/// +/// ``` +/// use oxigraph::io::{DatasetFormat, DatasetSerializer}; +/// use oxigraph::model::*; +/// +/// let mut buffer = Vec::new(); +/// let mut writer = DatasetSerializer::from_format(DatasetFormat::NQuads).quad_writer(&mut buffer); +/// writer.write(&Quad { +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), +/// graph_name: NamedNode::new("http://example.com/g")?.into(), +/// })?; +/// writer.finish()?; +/// +/// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct QuadWriter { + writer: ToWriteQuadWriter, +} + +impl QuadWriter { + /// Writes a quad + pub fn write<'a>(&mut self, quad: impl Into>) -> io::Result<()> { + self.writer.write_quad(quad) + } + + /// Writes the last bytes of the file + pub fn finish(self) -> io::Result<()> { + self.writer.finish()?.flush() + } +} diff --git a/ng-oxigraph/src/oxigraph/mod.rs b/ng-oxigraph/src/oxigraph/mod.rs new file mode 100644 index 0000000..a62babb --- /dev/null +++ b/ng-oxigraph/src/oxigraph/mod.rs @@ -0,0 +1,18 @@ +pub mod io; +pub mod model; +pub mod sparql; +mod storage; +pub mod store; + +pub mod storage_ng { + pub use super::storage::numeric_encoder; + pub use super::storage::ADDED_IN_MAIN; + pub use super::storage::ADDED_IN_OTHER; + pub use super::storage::BRANCH_PREFIX; + pub use super::storage::COMMIT_HAS_GRAPH; + pub use super::storage::COMMIT_PREFIX; + pub use super::storage::COMMIT_SKIP_NO_GRAPH; + pub use super::storage::REMOVED_IN_MAIN; + pub use super::storage::REMOVED_IN_OTHER; + pub use super::storage::REPO_IN_MAIN; +} diff --git a/ng-oxigraph/src/oxigraph/model.rs b/ng-oxigraph/src/oxigraph/model.rs new file mode 100644 index 0000000..a173d8a --- /dev/null +++ b/ng-oxigraph/src/oxigraph/model.rs @@ -0,0 +1,22 @@ +//! Implements data structures for [RDF 1.1 Concepts](https://www.w3.org/TR/rdf11-concepts/) using [OxRDF](https://crates.io/crates/oxrdf). +//! +//! Usage example: +//! +//! ``` +//! use oxigraph::model::*; +//! +//! let mut graph = Graph::default(); +//! +//! // insertion +//! let ex = NamedNodeRef::new("http://example.com").unwrap(); +//! let triple = TripleRef::new(ex, ex, ex); +//! graph.insert(triple); +//! +//! // simple filter +//! let results: Vec<_> = graph.triples_for_subject(ex).collect(); +//! assert_eq!(vec![triple], results); +//! ``` + +pub use crate::oxrdf::*; + +pub use crate::spargebra::term::GroundQuad; diff --git a/ng-oxigraph/src/oxigraph/sparql/algebra.rs b/ng-oxigraph/src/oxigraph/sparql/algebra.rs new file mode 100644 index 0000000..2beb40e --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/algebra.rs @@ -0,0 +1,328 @@ +// partial Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// partial Copyright (c) 2018 Oxigraph developers +// All work licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice or not, may not be copied, modified, or distributed except +// according to those terms. + +//! [SPARQL 1.1 Query Algebra](https://www.w3.org/TR/sparql11-query/#sparqlQuery) +//! +//! The root type for SPARQL queries is [`Query`] and the root type for updates is [`Update`]. + +use crate::oxigraph::model::*; +use crate::oxigraph::sparql::eval::Timer; +use crate::oxsdatatypes::DayTimeDuration; +use crate::spargebra; +use crate::spargebra::GraphUpdateOperation; +use std::fmt; +use std::str::FromStr; + +/// A parsed [SPARQL query](https://www.w3.org/TR/sparql11-query/). +/// +/// ``` +/// use oxigraph::model::NamedNode; +/// use oxigraph::sparql::Query; +/// +/// let query_str = "SELECT ?s ?p ?o WHERE { ?s ?p ?o . }"; +/// let mut query = Query::parse(query_str, None)?; +/// +/// assert_eq!(query.to_string(), query_str); +/// +/// // We edit the query dataset specification +/// let default = vec![NamedNode::new("http://example.com")?.into()]; +/// query.dataset_mut().set_default_graph(default.clone()); +/// assert_eq!( +/// query.dataset().default_graph_graphs(), +/// Some(default.as_slice()) +/// ); +/// # Ok::<_, Box>(()) +/// ``` +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct Query { + pub(super) inner: spargebra::Query, + pub(super) dataset: QueryDataset, + pub(super) parsing_duration: Option, +} + +impl Query { + /// Parses a SPARQL query with an optional base IRI to resolve relative IRIs in the query. + pub fn parse( + query: &str, + base_iri: Option<&str>, + ) -> Result { + let start = Timer::now(); + let query = Self::from(spargebra::Query::parse(query, base_iri)?); + Ok(Self { + dataset: query.dataset, + inner: query.inner, + parsing_duration: start.elapsed(), + }) + } + + /// Returns [the query dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset) + pub fn dataset(&self) -> &QueryDataset { + &self.dataset + } + + /// Returns [the query dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset) + pub fn dataset_mut(&mut self) -> &mut QueryDataset { + &mut self.dataset + } +} + +impl fmt::Display for Query { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.inner.fmt(f) // TODO: override + } +} + +impl FromStr for Query { + type Err = spargebra::SparqlSyntaxError; + + fn from_str(query: &str) -> Result { + Self::parse(query, None) + } +} + +impl TryFrom<&str> for Query { + type Error = spargebra::SparqlSyntaxError; + + fn try_from(query: &str) -> Result { + Self::from_str(query) + } +} + +impl TryFrom<&String> for Query { + type Error = spargebra::SparqlSyntaxError; + + fn try_from(query: &String) -> Result { + Self::from_str(query) + } +} + +impl From for Query { + fn from(query: spargebra::Query) -> Self { + Self { + dataset: QueryDataset::from_algebra(match &query { + spargebra::Query::Select { dataset, .. } + | spargebra::Query::Construct { dataset, .. } + | spargebra::Query::Describe { dataset, .. } + | spargebra::Query::Ask { dataset, .. } => dataset, + }), + inner: query, + parsing_duration: None, + } + } +} + +/// A parsed [SPARQL update](https://www.w3.org/TR/sparql11-update/). +/// +/// ``` +/// use oxigraph::sparql::Update; +/// +/// let update_str = "CLEAR ALL ;"; +/// let update = Update::parse(update_str, None)?; +/// +/// assert_eq!(update.to_string().trim(), update_str); +/// # Ok::<_, oxigraph::sparql::SparqlSyntaxError>(()) +/// ``` +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct Update { + pub(super) inner: spargebra::Update, + pub(super) using_datasets: Vec>, +} + +impl Update { + /// Parses a SPARQL update with an optional base IRI to resolve relative IRIs in the query. + pub fn parse( + update: &str, + base_iri: Option<&str>, + ) -> Result { + Ok(spargebra::Update::parse(update, base_iri)?.into()) + } + + /// Returns [the query dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset) in [DELETE/INSERT operations](https://www.w3.org/TR/sparql11-update/#deleteInsert). + pub fn using_datasets(&self) -> impl Iterator { + self.using_datasets.iter().filter_map(Option::as_ref) + } + + /// Returns [the query dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset) in [DELETE/INSERT operations](https://www.w3.org/TR/sparql11-update/#deleteInsert). + pub fn using_datasets_mut(&mut self) -> impl Iterator { + self.using_datasets.iter_mut().filter_map(Option::as_mut) + } +} + +impl fmt::Display for Update { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.inner.fmt(f) + } +} + +impl FromStr for Update { + type Err = spargebra::SparqlSyntaxError; + + fn from_str(update: &str) -> Result { + Self::parse(update, None) + } +} + +impl TryFrom<&str> for Update { + type Error = spargebra::SparqlSyntaxError; + + fn try_from(update: &str) -> Result { + Self::from_str(update) + } +} + +impl TryFrom<&String> for Update { + type Error = spargebra::SparqlSyntaxError; + + fn try_from(update: &String) -> Result { + Self::from_str(update) + } +} + +impl From for Update { + fn from(update: spargebra::Update) -> Self { + Self { + using_datasets: update + .operations + .iter() + .map(|operation| { + if let GraphUpdateOperation::DeleteInsert { using, .. } = operation { + Some(QueryDataset::from_algebra(using)) + } else { + None + } + }) + .collect(), + inner: update, + } + } +} + +/// A SPARQL query [dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset) +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct QueryDataset { + default: Option>, + named: Option>, +} + +impl QueryDataset { + pub(crate) fn new() -> Self { + Self { + default: None, + named: None, + } + } + + fn from_algebra(inner: &Option) -> Self { + if let Some(inner) = inner { + Self { + default: Some(inner.default.iter().map(|g| g.clone().into()).collect()), + named: inner + .named + .as_ref() + .map(|named| named.iter().map(|g| g.clone().into()).collect()), + } + } else { + Self { + default: Some(vec![GraphName::DefaultGraph]), + named: None, + } + } + } + + /// Checks if this dataset specification is the default one + /// (i.e. the default graph is the store default graph and all the store named graphs are available) + /// + /// ``` + /// use oxigraph::sparql::Query; + /// + /// assert!(Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)? + /// .dataset() + /// .is_default_dataset()); + /// assert!(!Query::parse( + /// "SELECT ?s ?p ?o FROM WHERE { ?s ?p ?o . }", + /// None + /// )? + /// .dataset() + /// .is_default_dataset()); + /// + /// # Ok::<_, Box>(()) + /// ``` + pub fn is_default_dataset(&self) -> bool { + self.default + .as_ref() + .map_or(false, |t| t == &[GraphName::DefaultGraph]) + && self.named.is_none() + } + + pub fn has_no_default_dataset(&self) -> bool { + self.default + .as_ref() + .map_or(true, |t| t == &[GraphName::DefaultGraph] || t.is_empty()) + } + + /// Returns the list of the store graphs that are available to the query as the default graph or `None` if the union of all graphs is used as the default graph + /// This list is by default only the store default graph + pub fn default_graph_graphs(&self) -> Option<&[GraphName]> { + self.default.as_deref() + } + + /// Sets if the default graph for the query should be the union of all the graphs in the queried store + pub fn set_default_graph_as_union(&mut self) { + self.default = None; + } + + /// Sets the list of graphs the query should consider as being part of the default graph. + /// + /// By default only the store default graph is considered. + /// ``` + /// use oxigraph::model::NamedNode; + /// use oxigraph::sparql::Query; + /// + /// let mut query = Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?; + /// let default = vec![NamedNode::new("http://example.com")?.into()]; + /// query.dataset_mut().set_default_graph(default.clone()); + /// assert_eq!( + /// query.dataset().default_graph_graphs(), + /// Some(default.as_slice()) + /// ); + /// + /// # Ok::<_, Box>(()) + /// ``` + pub fn set_default_graph(&mut self, graphs: Vec) { + self.default = Some(graphs) + } + + /// Returns the list of the available named graphs for the query or `None` if all graphs are available + pub fn available_named_graphs(&self) -> Option<&[NamedOrBlankNode]> { + self.named.as_deref() + } + + /// Sets the list of allowed named graphs in the query. + /// + /// ``` + /// use oxigraph::model::NamedNode; + /// use oxigraph::sparql::Query; + /// + /// let mut query = Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?; + /// let named = vec![NamedNode::new("http://example.com")?.into()]; + /// query + /// .dataset_mut() + /// .set_available_named_graphs(named.clone()); + /// assert_eq!( + /// query.dataset().available_named_graphs(), + /// Some(named.as_slice()) + /// ); + /// + /// # Ok::<_, Box>(()) + /// ``` + pub fn set_available_named_graphs(&mut self, named_graphs: Vec) { + self.named = Some(named_graphs); + } +} diff --git a/ng-oxigraph/src/oxigraph/sparql/dataset.rs b/ng-oxigraph/src/oxigraph/sparql/dataset.rs new file mode 100644 index 0000000..3718318 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/dataset.rs @@ -0,0 +1,274 @@ +// partial Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// partial Copyright (c) 2018 Oxigraph developers +// All work licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice or not, may not be copied, modified, or distributed except +// according to those terms. + +use crate::oxigraph::model::TermRef; +use crate::oxigraph::sparql::algebra::QueryDataset; +use crate::oxigraph::sparql::EvaluationError; +use crate::oxigraph::storage::numeric_encoder::{ + insert_term, EncodedQuad, EncodedTerm, StrHash, StrLookup, +}; +use crate::oxigraph::storage::{MatchBy, StorageError, StorageReader}; +use crate::oxigraph::store::CorruptionError; +use crate::oxrdf::{GraphName, NamedNodeRef, NamedOrBlankNode}; +use crate::sparopt::algebra::NamedNode; + +use std::cell::RefCell; +use std::collections::hash_map::Entry; +use std::collections::HashMap; +use std::iter::empty; +pub struct DatasetView { + reader: StorageReader, + extra: RefCell>, + dataset: EncodedDatasetSpec, +} + +struct ErrorIterator { + err: Option>, +} + +impl Iterator for ErrorIterator { + type Item = Result; + fn next(&mut self) -> Option { + self.err.take() + } +} + +impl DatasetView { + pub fn new( + reader: StorageReader, + query_dataset: &QueryDataset, + default_graph: &Option, + ) -> Self { + let dataset = EncodedDatasetSpec { + default: if query_dataset.has_no_default_dataset() && default_graph.is_some() { + Some(vec![GraphName::NamedNode(NamedNode::new_unchecked( + default_graph.to_owned().unwrap(), + )) + .as_ref() + .into()]) + } else { + query_dataset + .default_graph_graphs() + .map(|graphs| graphs.iter().map(|g| g.as_ref().into()).collect::>()) + }, + named: query_dataset + .available_named_graphs() + .map(|graphs| graphs.iter().map(|g| g.as_ref().into()).collect::>()), + }; + let res = Self { + reader, + extra: RefCell::new(HashMap::default()), + dataset, + }; + if let Some(default_graph) = default_graph { + res.encode_term(NamedNodeRef::new_unchecked(default_graph)); + } + if !query_dataset.has_no_default_dataset() { + query_dataset.default_graph_graphs().map(|graphs| { + graphs.iter().for_each(|g| match g { + GraphName::NamedNode(nn) => { + let _a = res.encode_term(nn); + } + _ => {} + }) + }); + } + query_dataset + .available_named_graphs() + .map(|graphs| for nob in graphs { match nob { + NamedOrBlankNode::NamedNode(nn) => { res.encode_term(NamedNodeRef::new_unchecked(nn.as_str())); } + ,_=>{} + } }); + res + } + + fn parse_graph_name(&self, graph_name: &EncodedTerm) -> Result { + match graph_name { + EncodedTerm::NamedNode { iri_id } => { + let graph_name_string = self + .get_str(iri_id)? + .ok_or::(CorruptionError::msg("graph_not_found").into())?; + self.reader + .parse_graph_name(&graph_name_string, Some(*iri_id)) + } + _ => Err(CorruptionError::msg( + "Invalid graph_name (not a NamedNode) in parse_graph_name", + ) + .into()), + } + } + + fn store_encoded_quads_for_pattern<'a>( + &'a self, + subject: Option<&'a EncodedTerm>, + predicate: Option<&'a EncodedTerm>, + object: Option<&'a EncodedTerm>, + graph_name: Option<&'a EncodedTerm>, + ) -> Box>> { + let graph = if let Some(g) = graph_name { + match self.parse_graph_name(g) { + Ok(match_by) => Some(match_by), + Err(e) => { + return Box::new(ErrorIterator { + err: Some(Err(e.into())), + }) + } + } + } else { + None + }; + + Box::new( + self.reader + .quads_for_pattern(subject, predicate, object, graph) + .map(|t| t.map_err(Into::into)), + ) + } + + #[allow(clippy::needless_collect)] + pub fn encoded_quads_for_pattern( + &self, + subject: Option<&EncodedTerm>, + predicate: Option<&EncodedTerm>, + object: Option<&EncodedTerm>, + graph_name: Option<&EncodedTerm>, + ) -> Box>> { + if let Some(graph_name) = graph_name { + if graph_name.is_default_graph() { + if let Some(default_graph_graphs) = &self.dataset.default { + if default_graph_graphs.len() == 1 { + // Single graph optimization + Box::new( + self.store_encoded_quads_for_pattern( + subject, + predicate, + object, + Some(&default_graph_graphs[0]), + ) + .map(|quad| { + let quad = quad?; + Ok(EncodedQuad::new( + quad.subject, + quad.predicate, + quad.object, + EncodedTerm::DefaultGraph, + )) + }), + ) + } else { + let iters = default_graph_graphs + .iter() + .map(|graph_name| { + self.store_encoded_quads_for_pattern( + subject, + predicate, + object, + Some(graph_name), + ) + }) + .collect::>(); + Box::new(iters.into_iter().flatten().map(|quad| { + let quad = quad?; + Ok(EncodedQuad::new( + quad.subject, + quad.predicate, + quad.object, + EncodedTerm::DefaultGraph, + )) + })) + } + } else { + Box::new( + self.store_encoded_quads_for_pattern(subject, predicate, object, None) + .map(|quad| { + let quad = quad?; + Ok(EncodedQuad::new( + quad.subject, + quad.predicate, + quad.object, + EncodedTerm::DefaultGraph, + )) + }), + ) + } + } else if self + .dataset + .named + .as_ref() + .map_or(true, |d| d.contains(graph_name)) + { + Box::new(self.store_encoded_quads_for_pattern( + subject, + predicate, + object, + Some(graph_name), + )) + } else { + Box::new(empty()) + } + } else if let Some(named_graphs) = &self.dataset.named { + let iters = named_graphs + .iter() + .map(|graph_name| { + self.store_encoded_quads_for_pattern( + subject, + predicate, + object, + Some(graph_name), + ) + }) + .collect::>(); + Box::new(iters.into_iter().flatten()) + } else { + Box::new( + // TODO: filter could be removed here as we never return quads with defaultGraph as graph + self.store_encoded_quads_for_pattern(subject, predicate, object, None) + .filter(|quad| match quad { + Err(_) => true, + Ok(quad) => !quad.graph_name.is_default_graph(), + }), + ) + } + } + + pub fn encode_term<'a>(&self, term: impl Into>) -> EncodedTerm { + let term = term.into(); + let encoded = term.into(); + insert_term(term, &encoded, &mut |key, value| { + self.insert_str(key, value); + Ok(()) + }) + .unwrap(); + encoded + } + + pub fn insert_str(&self, key: &StrHash, value: &str) { + if let Entry::Vacant(e) = self.extra.borrow_mut().entry(*key) { + if !matches!(self.reader.contains_str(key), Ok(true)) { + e.insert(value.to_owned()); + } + } + } +} + +impl StrLookup for DatasetView { + fn get_str(&self, key: &StrHash) -> Result, StorageError> { + Ok(if let Some(value) = self.extra.borrow().get(key) { + Some(value.clone()) + } else { + self.reader.get_str(key)? + }) + } +} + +struct EncodedDatasetSpec { + default: Option>, + named: Option>, +} diff --git a/ng-oxigraph/src/oxigraph/sparql/error.rs b/ng-oxigraph/src/oxigraph/sparql/error.rs new file mode 100644 index 0000000..8b3f317 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/error.rs @@ -0,0 +1,87 @@ +use crate::oxigraph::io::RdfParseError; +use crate::oxigraph::model::NamedNode; +use crate::oxigraph::sparql::results::QueryResultsParseError as ResultsParseError; +use crate::oxigraph::sparql::SparqlSyntaxError; +use crate::oxigraph::storage::StorageError; +use std::convert::Infallible; +use std::error::Error; +use std::io; + +/// A SPARQL evaluation error. +#[derive(Debug, thiserror::Error)] +#[non_exhaustive] +pub enum EvaluationError { + /// An error in SPARQL parsing. + #[error(transparent)] + Parsing(#[from] SparqlSyntaxError), + /// An error from the storage. + #[error(transparent)] + Storage(#[from] StorageError), + /// An error while parsing an external RDF file. + #[error(transparent)] + GraphParsing(#[from] RdfParseError), + /// An error while parsing an external result file (likely from a federated query). + #[error(transparent)] + ResultsParsing(#[from] ResultsParseError), + /// An error returned during results serialization. + #[error(transparent)] + ResultsSerialization(#[from] io::Error), + /// Error during `SERVICE` evaluation + #[error("{0}")] + Service(#[source] Box), + /// Error when `CREATE` tries to create an already existing graph + #[error("The graph {0} already exists")] + GraphAlreadyExists(NamedNode), + /// Error when `DROP` or `CLEAR` tries to remove a not existing graph + #[error("The graph {0} does not exist")] + GraphDoesNotExist(NamedNode), + /// The variable storing the `SERVICE` name is unbound + #[error("The variable encoding the service name is unbound")] + UnboundService, + /// The given `SERVICE` is not supported + #[error("The service {0} is not supported")] + UnsupportedService(NamedNode), + /// The given content media type returned from an HTTP response is not supported (`SERVICE` and `LOAD`) + #[error("The content media type {0} is not supported")] + UnsupportedContentType(String), + /// The `SERVICE` call has not returns solutions + #[error("The service is not returning solutions but a boolean or a graph")] + ServiceDoesNotReturnSolutions, + /// The results are not a RDF graph + #[error("The query results are not a RDF graph")] + NotAGraph, + #[error("NextGraph cannot add triples to the default graph")] + NoDefaultGraph, +} + +impl From for EvaluationError { + #[inline] + fn from(error: Infallible) -> Self { + match error {} + } +} + +impl From for io::Error { + #[inline] + fn from(error: EvaluationError) -> Self { + match error { + EvaluationError::Parsing(error) => Self::new(io::ErrorKind::InvalidData, error), + EvaluationError::GraphParsing(error) => error.into(), + EvaluationError::ResultsParsing(error) => error.into(), + EvaluationError::ResultsSerialization(error) => error, + EvaluationError::Storage(error) => error.into(), + EvaluationError::Service(error) => match error.downcast() { + Ok(error) => *error, + Err(error) => Self::new(io::ErrorKind::Other, error), + }, + EvaluationError::GraphAlreadyExists(_) + | EvaluationError::GraphDoesNotExist(_) + | EvaluationError::UnboundService + | EvaluationError::UnsupportedService(_) + | EvaluationError::UnsupportedContentType(_) + | EvaluationError::ServiceDoesNotReturnSolutions + | EvaluationError::NotAGraph + | EvaluationError::NoDefaultGraph => Self::new(io::ErrorKind::InvalidInput, error), + } + } +} diff --git a/ng-oxigraph/src/oxigraph/sparql/eval.rs b/ng-oxigraph/src/oxigraph/sparql/eval.rs new file mode 100644 index 0000000..d6bd322 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/eval.rs @@ -0,0 +1,5871 @@ +use crate::oxigraph::model::vocab::{rdf, xsd}; +use crate::oxigraph::model::{BlankNode, LiteralRef, NamedNodeRef, Term, Triple}; +use crate::oxigraph::sparql::algebra::{Query, QueryDataset}; +use crate::oxigraph::sparql::dataset::DatasetView; +use crate::oxigraph::sparql::error::EvaluationError; +use crate::oxigraph::sparql::model::*; +use crate::oxigraph::sparql::service::ServiceHandler; +use crate::oxigraph::sparql::CustomFunctionRegistry; +use crate::oxigraph::storage::numeric_encoder::*; +use crate::oxigraph::storage::small_string::SmallString; +use crate::oxrdf::{TermRef, Variable}; +use crate::oxsdatatypes::*; +use crate::spargebra; +use crate::spargebra::algebra::{AggregateFunction, Function, PropertyPathExpression}; +use crate::spargebra::term::{ + GroundSubject, GroundTerm, GroundTermPattern, GroundTriple, NamedNodePattern, TermPattern, + TriplePattern, +}; +use crate::sparopt::algebra::{ + AggregateExpression, Expression, GraphPattern, JoinAlgorithm, LeftJoinAlgorithm, + MinusAlgorithm, OrderExpression, +}; +use digest::Digest; +use json_event_parser::{JsonEvent, ToWriteJsonWriter}; +use md5::Md5; +use oxilangtag::LanguageTag; +use oxiri::Iri; +use rand::random; +use regex::{Regex, RegexBuilder}; +use sha1::Sha1; +use sha2::{Sha256, Sha384, Sha512}; +use std::cell::Cell; +use std::cmp::Ordering; +use std::collections::hash_map::DefaultHasher; +use std::collections::{HashMap, HashSet}; +use std::hash::{Hash, Hasher}; +use std::iter::{empty, once}; +use std::rc::Rc; +use std::sync::Arc; +use std::{fmt, io, str}; + +const REGEX_SIZE_LIMIT: usize = 1_000_000; + +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct EncodedTuple { + inner: Vec>, +} + +impl EncodedTuple { + pub fn with_capacity(capacity: usize) -> Self { + Self { + inner: Vec::with_capacity(capacity), + } + } + + pub fn capacity(&self) -> usize { + self.inner.capacity() + } + + pub fn contains(&self, index: usize) -> bool { + self.inner.get(index).map_or(false, Option::is_some) + } + + pub fn get(&self, index: usize) -> Option<&EncodedTerm> { + self.inner.get(index).unwrap_or(&None).as_ref() + } + + pub fn iter(&self) -> impl Iterator> + '_ { + self.inner.iter().cloned() + } + + pub fn set(&mut self, index: usize, value: EncodedTerm) { + if self.inner.len() <= index { + self.inner.resize(index + 1, None); + } + self.inner[index] = Some(value); + } + + pub fn combine_with(&self, other: &Self) -> Option { + if self.inner.len() < other.inner.len() { + let mut result = other.inner.clone(); + for (key, self_value) in self.inner.iter().enumerate() { + if let Some(self_value) = self_value { + match &other.inner[key] { + Some(other_value) => { + if self_value != other_value { + return None; + } + } + None => result[key] = Some(self_value.clone()), + } + } + } + Some(Self { inner: result }) + } else { + let mut result = self.inner.clone(); + for (key, other_value) in other.inner.iter().enumerate() { + if let Some(other_value) = other_value { + match &self.inner[key] { + Some(self_value) => { + if self_value != other_value { + return None; + } + } + None => result[key] = Some(other_value.clone()), + } + } + } + Some(Self { inner: result }) + } + } +} + +impl IntoIterator for EncodedTuple { + type Item = Option; + type IntoIter = std::vec::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + self.inner.into_iter() + } +} + +type EncodedTuplesIterator = Box>>; + +#[derive(Clone)] +pub struct SimpleEvaluator { + dataset: Rc, + base_iri: Option>>, + now: DateTime, + service_handler: Arc>, + custom_functions: Arc, + run_stats: bool, +} + +impl SimpleEvaluator { + pub fn new( + dataset: Rc, + base_iri: Option>>, + service_handler: Arc>, + custom_functions: Arc, + run_stats: bool, + ) -> Self { + Self { + dataset, + base_iri, + now: DateTime::now(), + service_handler, + custom_functions, + run_stats, + } + } + + pub fn evaluate_select(&self, pattern: &GraphPattern) -> (QueryResults, Rc) { + let mut variables = Vec::new(); + let (eval, stats) = self.graph_pattern_evaluator(pattern, &mut variables); + let from = EncodedTuple::with_capacity(variables.len()); + ( + QueryResults::Solutions(decode_bindings( + Rc::clone(&self.dataset), + eval(from), + Arc::from(variables), + )), + stats, + ) + } + + pub fn evaluate_ask( + &self, + pattern: &GraphPattern, + ) -> (Result, Rc) { + let mut variables = Vec::new(); + let (eval, stats) = self.graph_pattern_evaluator(pattern, &mut variables); + let from = EncodedTuple::with_capacity(variables.len()); + ( + match eval(from).next() { + Some(Ok(_)) => Ok(QueryResults::Boolean(true)), + Some(Err(error)) => Err(error), + None => Ok(QueryResults::Boolean(false)), + }, + stats, + ) + } + + pub fn evaluate_construct( + &self, + pattern: &GraphPattern, + template: &[TriplePattern], + ) -> (QueryResults, Rc) { + let mut variables = Vec::new(); + let (eval, stats) = self.graph_pattern_evaluator(pattern, &mut variables); + let mut bnodes = Vec::new(); + let template = template + .iter() + .map(|t| TripleTemplate { + subject: self.template_value_from_term_or_variable( + &t.subject, + &mut variables, + &mut bnodes, + ), + predicate: self + .template_value_from_named_node_or_variable(&t.predicate, &mut variables), + object: self.template_value_from_term_or_variable( + &t.object, + &mut variables, + &mut bnodes, + ), + }) + .collect(); + let from = EncodedTuple::with_capacity(variables.len()); + ( + QueryResults::Graph(QueryTripleIter { + iter: Box::new(ConstructIterator { + eval: self.clone(), + iter: eval(from), + template, + buffered_results: Vec::default(), + bnodes: Vec::default(), + }), + }), + stats, + ) + } + + pub fn evaluate_describe( + &self, + pattern: &GraphPattern, + ) -> (QueryResults, Rc) { + let mut variables = Vec::new(); + let (eval, stats) = self.graph_pattern_evaluator(pattern, &mut variables); + let from = EncodedTuple::with_capacity(variables.len()); + ( + QueryResults::Graph(QueryTripleIter { + iter: Box::new(DescribeIterator { + eval: self.clone(), + iter: eval(from), + quads: Box::new(empty()), + }), + }), + stats, + ) + } + + pub fn graph_pattern_evaluator( + &self, + pattern: &GraphPattern, + encoded_variables: &mut Vec, + ) -> ( + Rc EncodedTuplesIterator>, + Rc, + ) { + let mut stat_children = Vec::new(); + let mut evaluator = + self.build_graph_pattern_evaluator(pattern, encoded_variables, &mut stat_children); + let stats = Rc::new(EvalNodeWithStats { + label: eval_node_label(pattern), + children: stat_children, + exec_count: Cell::new(0), + exec_duration: Cell::new(self.run_stats.then(DayTimeDuration::default)), + }); + if self.run_stats { + let stats = Rc::clone(&stats); + evaluator = Rc::new(move |tuple| { + let start = Timer::now(); + let inner = evaluator(tuple); + stats.exec_duration.set( + stats + .exec_duration + .get() + .and_then(|stat| stat.checked_add(start.elapsed()?)), + ); + Box::new(StatsIterator { + inner, + stats: Rc::clone(&stats), + }) + }) + } + (evaluator, stats) + } + + fn build_graph_pattern_evaluator( + &self, + pattern: &GraphPattern, + encoded_variables: &mut Vec, + stat_children: &mut Vec>, + ) -> Rc EncodedTuplesIterator> { + match pattern { + GraphPattern::Values { + variables, + bindings, + } => { + let encoding = variables + .iter() + .map(|v| encode_variable(encoded_variables, v)) + .collect::>(); + let encoded_tuples = bindings + .iter() + .map(|row| { + let mut result = EncodedTuple::with_capacity(variables.len()); + for (key, value) in row.iter().enumerate() { + if let Some(term) = value { + result.set( + encoding[key], + match term { + GroundTerm::NamedNode(node) => self.encode_term(node), + GroundTerm::Literal(literal) => self.encode_term(literal), + GroundTerm::Triple(triple) => self.encode_triple(triple), + }, + ); + } + } + result + }) + .collect::>(); + Rc::new(move |from| { + Box::new( + encoded_tuples + .iter() + .filter_map(move |t| Some(Ok(t.combine_with(&from)?))) + .collect::>() + .into_iter(), + ) + }) + } + GraphPattern::Service { + name, + inner, + silent, + } => { + #[allow(clippy::shadow_same)] + let silent = *silent; + let service_name = + TupleSelector::from_named_node_pattern(name, encoded_variables, &self.dataset); + self.build_graph_pattern_evaluator(inner, encoded_variables, &mut Vec::new()); // We call recursively to fill "encoded_variables" + let graph_pattern = spargebra::algebra::GraphPattern::from(inner.as_ref()); + let variables = Rc::from(encoded_variables.as_slice()); + let eval = self.clone(); + Rc::new(move |from| { + match eval.evaluate_service( + &service_name, + &graph_pattern, + Rc::clone(&variables), + &from, + ) { + Ok(result) => Box::new(result.filter_map(move |binding| { + binding + .map(|binding| binding.combine_with(&from)) + .transpose() + })), + Err(e) => { + if silent { + Box::new(once(Ok(from))) + } else { + Box::new(once(Err(e))) + } + } + } + }) + } + GraphPattern::QuadPattern { + subject, + predicate, + object, + graph_name, + } => { + let subject = TupleSelector::from_ground_term_pattern( + subject, + encoded_variables, + &self.dataset, + ); + let predicate = TupleSelector::from_named_node_pattern( + predicate, + encoded_variables, + &self.dataset, + ); + let object = TupleSelector::from_ground_term_pattern( + object, + encoded_variables, + &self.dataset, + ); + let graph_name = TupleSelector::from_graph_name_pattern( + graph_name, + encoded_variables, + &self.dataset, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |from| { + let iter = dataset.encoded_quads_for_pattern( + subject.get_pattern_value(&from).as_ref(), + predicate.get_pattern_value(&from).as_ref(), + object.get_pattern_value(&from).as_ref(), + graph_name.get_pattern_value(&from).as_ref(), + ); + let subject = subject.clone(); + let predicate = predicate.clone(); + let object = object.clone(); + let graph_name = graph_name.clone(); + Box::new(iter.filter_map(move |quad| match quad { + Ok(quad) => { + let mut new_tuple = from.clone(); + put_pattern_value(&subject, quad.subject, &mut new_tuple)?; + put_pattern_value(&predicate, quad.predicate, &mut new_tuple)?; + put_pattern_value(&object, quad.object, &mut new_tuple)?; + put_pattern_value(&graph_name, quad.graph_name, &mut new_tuple)?; + Some(Ok(new_tuple)) + } + Err(error) => Some(Err(error)), + })) + }) + } + GraphPattern::Path { + subject, + path, + object, + graph_name, + } => { + let subject = TupleSelector::from_ground_term_pattern( + subject, + encoded_variables, + &self.dataset, + ); + let path = self.encode_property_path(path); + + let object = TupleSelector::from_ground_term_pattern( + object, + encoded_variables, + &self.dataset, + ); + let graph_name = TupleSelector::from_graph_name_pattern( + graph_name, + encoded_variables, + &self.dataset, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |from| { + let input_subject = subject.get_pattern_value(&from); + let input_object = object.get_pattern_value(&from); + let input_graph_name = graph_name.get_pattern_value(&from); + let path_eval = PathEvaluator { + dataset: Rc::clone(&dataset), + }; + match (input_subject, input_object, input_graph_name) { + (Some(input_subject), Some(input_object), Some(input_graph_name)) => { + match path_eval.eval_closed_in_graph( + &path, + &input_subject, + &input_object, + &input_graph_name, + ) { + Ok(true) => Box::new(once(Ok(from))), + Ok(false) => Box::new(empty()), + Err(e) => Box::new(once(Err(e))), + } + } + (Some(input_subject), None, Some(input_graph_name)) => { + let object = object.clone(); + Box::new( + path_eval + .eval_from_in_graph(&path, &input_subject, &input_graph_name) + .filter_map(move |o| match o { + Ok(o) => { + let mut new_tuple = from.clone(); + put_pattern_value(&object, o, &mut new_tuple)?; + Some(Ok(new_tuple)) + } + Err(error) => Some(Err(error)), + }), + ) + } + (None, Some(input_object), Some(input_graph_name)) => { + let subject = subject.clone(); + Box::new( + path_eval + .eval_to_in_graph(&path, &input_object, &input_graph_name) + .filter_map(move |s| match s { + Ok(s) => { + let mut new_tuple = from.clone(); + put_pattern_value(&subject, s, &mut new_tuple)?; + Some(Ok(new_tuple)) + } + Err(error) => Some(Err(error)), + }), + ) + } + (None, None, Some(input_graph_name)) => { + let subject = subject.clone(); + let object = object.clone(); + Box::new( + path_eval + .eval_open_in_graph(&path, &input_graph_name) + .filter_map(move |so| match so { + Ok((s, o)) => { + let mut new_tuple = from.clone(); + put_pattern_value(&subject, s, &mut new_tuple)?; + put_pattern_value(&object, o, &mut new_tuple)?; + Some(Ok(new_tuple)) + } + Err(error) => Some(Err(error)), + }), + ) + } + (Some(input_subject), Some(input_object), None) => { + let graph_name = graph_name.clone(); + Box::new( + path_eval + .eval_closed_in_unknown_graph( + &path, + &input_subject, + &input_object, + ) + .filter_map(move |r| match r { + Ok(g) => { + let mut new_tuple = from.clone(); + put_pattern_value(&graph_name, g, &mut new_tuple)?; + Some(Ok(new_tuple)) + } + Err(error) => Some(Err(error)), + }), + ) + } + (Some(input_subject), None, None) => { + let object = object.clone(); + let graph_name = graph_name.clone(); + Box::new( + path_eval + .eval_from_in_unknown_graph(&path, &input_subject) + .filter_map(move |r| match r { + Ok((o, g)) => { + let mut new_tuple = from.clone(); + put_pattern_value(&object, o, &mut new_tuple)?; + put_pattern_value(&graph_name, g, &mut new_tuple)?; + Some(Ok(new_tuple)) + } + Err(error) => Some(Err(error)), + }), + ) + } + (None, Some(input_object), None) => { + let subject = subject.clone(); + let graph_name = graph_name.clone(); + Box::new( + path_eval + .eval_to_in_unknown_graph(&path, &input_object) + .filter_map(move |r| match r { + Ok((s, g)) => { + let mut new_tuple = from.clone(); + put_pattern_value(&subject, s, &mut new_tuple)?; + put_pattern_value(&graph_name, g, &mut new_tuple)?; + + Some(Ok(new_tuple)) + } + Err(error) => Some(Err(error)), + }), + ) + } + (None, None, None) => { + let subject = subject.clone(); + let object = object.clone(); + let graph_name = graph_name.clone(); + Box::new(path_eval.eval_open_in_unknown_graph(&path).filter_map( + move |r| match r { + Ok((s, o, g)) => { + let mut new_tuple = from.clone(); + put_pattern_value(&subject, s, &mut new_tuple)?; + put_pattern_value(&object, o, &mut new_tuple)?; + put_pattern_value(&graph_name, g, &mut new_tuple)?; + Some(Ok(new_tuple)) + } + Err(error) => Some(Err(error)), + }, + )) + } + } + }) + } + GraphPattern::Join { + left, + right, + algorithm, + } => { + let (left, left_stats) = self.graph_pattern_evaluator(left, encoded_variables); + stat_children.push(left_stats); + let (right, right_stats) = self.graph_pattern_evaluator(right, encoded_variables); + stat_children.push(right_stats); + + match algorithm { + JoinAlgorithm::HashBuildLeftProbeRight { keys } => { + let build = left; + let probe = right; + if keys.is_empty() { + // Cartesian product + Rc::new(move |from| { + let mut errors = Vec::default(); + let build_values = build(from.clone()) + .filter_map(|result| match result { + Ok(result) => Some(result), + Err(error) => { + errors.push(Err(error)); + None + } + }) + .collect::>(); + Box::new(CartesianProductJoinIterator { + probe_iter: probe(from), + built: build_values, + buffered_results: errors, + }) + }) + } else { + // Real hash join + let keys = keys + .iter() + .map(|v| encode_variable(encoded_variables, v)) + .collect::>(); + Rc::new(move |from| { + let mut errors = Vec::default(); + let mut built_values = EncodedTupleSet::new(keys.clone()); + built_values.extend(build(from.clone()).filter_map(|result| { + match result { + Ok(result) => Some(result), + Err(error) => { + errors.push(Err(error)); + None + } + } + })); + Box::new(HashJoinIterator { + probe_iter: probe(from), + built: built_values, + buffered_results: errors, + }) + }) + } + } + } + } + GraphPattern::Lateral { left, right } => { + let (left, left_stats) = self.graph_pattern_evaluator(left, encoded_variables); + stat_children.push(left_stats); + + if let GraphPattern::LeftJoin { + left: nested_left, + right: nested_right, + expression, + .. + } = right.as_ref() + { + if nested_left.is_empty_singleton() { + // We are in a ForLoopLeftJoin + let right = + GraphPattern::filter(nested_right.as_ref().clone(), expression.clone()); + let (right, right_stats) = + self.graph_pattern_evaluator(&right, encoded_variables); + stat_children.push(right_stats); + return Rc::new(move |from| { + Box::new(ForLoopLeftJoinIterator { + right_evaluator: Rc::clone(&right), + left_iter: left(from), + current_right: Box::new(empty()), + }) + }); + } + } + let (right, right_stats) = self.graph_pattern_evaluator(right, encoded_variables); + stat_children.push(right_stats); + Rc::new(move |from| { + let right = Rc::clone(&right); + Box::new(left(from).flat_map(move |t| match t { + Ok(t) => right(t), + Err(e) => Box::new(once(Err(e))), + })) + }) + } + GraphPattern::Minus { + left, + right, + algorithm, + } => { + let (left, left_stats) = self.graph_pattern_evaluator(left, encoded_variables); + stat_children.push(left_stats); + let (right, right_stats) = self.graph_pattern_evaluator(right, encoded_variables); + stat_children.push(right_stats); + + match algorithm { + MinusAlgorithm::HashBuildRightProbeLeft { keys } => { + if keys.is_empty() { + Rc::new(move |from| { + let right: Vec<_> = + right(from.clone()).filter_map(Result::ok).collect(); + Box::new(left(from).filter(move |left_tuple| { + if let Ok(left_tuple) = left_tuple { + !right.iter().any(|right_tuple| { + are_compatible_and_not_disjointed( + left_tuple, + right_tuple, + ) + }) + } else { + true + } + })) + }) + } else { + let keys = keys + .iter() + .map(|v| encode_variable(encoded_variables, v)) + .collect::>(); + Rc::new(move |from| { + let mut right_values = EncodedTupleSet::new(keys.clone()); + right_values.extend(right(from.clone()).filter_map(Result::ok)); + Box::new(left(from).filter(move |left_tuple| { + if let Ok(left_tuple) = left_tuple { + !right_values.get(left_tuple).iter().any(|right_tuple| { + are_compatible_and_not_disjointed( + left_tuple, + right_tuple, + ) + }) + } else { + true + } + })) + }) + } + } + } + } + GraphPattern::LeftJoin { + left, + right, + expression, + algorithm, + } => { + let (left, left_stats) = self.graph_pattern_evaluator(left, encoded_variables); + stat_children.push(left_stats); + let (right, right_stats) = self.graph_pattern_evaluator(right, encoded_variables); + stat_children.push(right_stats); + let expression = + self.expression_evaluator(expression, encoded_variables, stat_children); + + match algorithm { + LeftJoinAlgorithm::HashBuildRightProbeLeft { keys } => { + // Real hash join + let keys = keys + .iter() + .map(|v| encode_variable(encoded_variables, v)) + .collect::>(); + Rc::new(move |from| { + let mut errors = Vec::default(); + let mut right_values = EncodedTupleSet::new(keys.clone()); + right_values.extend(right(from.clone()).filter_map( + |result| match result { + Ok(result) => Some(result), + Err(error) => { + errors.push(Err(error)); + None + } + }, + )); + Box::new(HashLeftJoinIterator { + left_iter: left(from), + right: right_values, + buffered_results: errors, + expression: Rc::clone(&expression), + }) + }) + } + } + } + GraphPattern::Filter { inner, expression } => { + let (child, child_stats) = self.graph_pattern_evaluator(inner, encoded_variables); + stat_children.push(child_stats); + let expression = + self.expression_evaluator(expression, encoded_variables, stat_children); + + Rc::new(move |from| { + let expression = Rc::clone(&expression); + Box::new(child(from).filter(move |tuple| { + match tuple { + Ok(tuple) => expression(tuple) + .and_then(|term| to_bool(&term)) + .unwrap_or(false), + Err(_) => true, + } + })) + }) + } + GraphPattern::Union { inner } => { + let children = inner + .iter() + .map(|child| { + let (child, child_stats) = + self.graph_pattern_evaluator(child, encoded_variables); + stat_children.push(child_stats); + child + }) + .collect::>(); + + Rc::new(move |from| { + Box::new(UnionIterator { + plans: children.clone(), + input: from, + current_iterator: Box::new(empty()), + current_plan: 0, + }) + }) + } + GraphPattern::Extend { + inner, + variable, + expression, + } => { + let (child, child_stats) = self.graph_pattern_evaluator(inner, encoded_variables); + stat_children.push(child_stats); + + let position = encode_variable(encoded_variables, variable); + let expression = + self.expression_evaluator(expression, encoded_variables, stat_children); + Rc::new(move |from| { + let expression = Rc::clone(&expression); + Box::new(child(from).map(move |tuple| { + let mut tuple = tuple?; + if let Some(value) = expression(&tuple) { + tuple.set(position, value); + } + Ok(tuple) + })) + }) + } + GraphPattern::OrderBy { inner, expression } => { + let (child, child_stats) = self.graph_pattern_evaluator(inner, encoded_variables); + stat_children.push(child_stats); + let by = expression + .iter() + .map(|comp| match comp { + OrderExpression::Asc(expression) => ComparatorFunction::Asc( + self.expression_evaluator(expression, encoded_variables, stat_children), + ), + OrderExpression::Desc(expression) => ComparatorFunction::Desc( + self.expression_evaluator(expression, encoded_variables, stat_children), + ), + }) + .collect::>(); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |from| { + let mut errors = Vec::default(); + let mut values = child(from) + .filter_map(|result| match result { + Ok(result) => Some(result), + Err(error) => { + errors.push(Err(error)); + None + } + }) + .collect::>(); + values.sort_unstable_by(|a, b| { + for comp in &by { + match comp { + ComparatorFunction::Asc(expression) => { + match cmp_terms( + &dataset, + expression(a).as_ref(), + expression(b).as_ref(), + ) { + Ordering::Greater => return Ordering::Greater, + Ordering::Less => return Ordering::Less, + Ordering::Equal => (), + } + } + ComparatorFunction::Desc(expression) => { + match cmp_terms( + &dataset, + expression(a).as_ref(), + expression(b).as_ref(), + ) { + Ordering::Greater => return Ordering::Less, + Ordering::Less => return Ordering::Greater, + Ordering::Equal => (), + } + } + } + } + Ordering::Equal + }); + Box::new(errors.into_iter().chain(values.into_iter().map(Ok))) + }) + } + GraphPattern::Distinct { inner } => { + let (child, child_stats) = self.graph_pattern_evaluator(inner, encoded_variables); + stat_children.push(child_stats); + Rc::new(move |from| Box::new(hash_deduplicate(child(from)))) + } + GraphPattern::Reduced { inner } => { + let (child, child_stats) = self.graph_pattern_evaluator(inner, encoded_variables); + stat_children.push(child_stats); + Rc::new(move |from| { + Box::new(ConsecutiveDeduplication { + inner: child(from), + current: None, + }) + }) + } + GraphPattern::Slice { + inner, + start, + length, + } => { + let (mut child, child_stats) = + self.graph_pattern_evaluator(inner, encoded_variables); + stat_children.push(child_stats); + #[allow(clippy::shadow_same)] + let start = *start; + if start > 0 { + child = Rc::new(move |from| Box::new(child(from).skip(start))); + } + if let Some(length) = *length { + child = Rc::new(move |from| Box::new(child(from).take(length))); + } + child + } + GraphPattern::Project { inner, variables } => { + let mut inner_encoded_variables = variables.clone(); + let (child, child_stats) = + self.graph_pattern_evaluator(inner, &mut inner_encoded_variables); + stat_children.push(child_stats); + let mapping = variables + .iter() + .enumerate() + .map(|(new_variable, variable)| { + (new_variable, encode_variable(encoded_variables, variable)) + }) + .collect::>(); + Rc::new(move |from| { + let mapping = Rc::clone(&mapping); + let mut input_tuple = EncodedTuple::with_capacity(mapping.len()); + for (input_key, output_key) in &*mapping { + if let Some(value) = from.get(*output_key) { + input_tuple.set(*input_key, value.clone()); + } + } + Box::new(child(input_tuple).filter_map(move |tuple| { + match tuple { + Ok(tuple) => { + let mut output_tuple = from.clone(); + for (input_key, output_key) in &*mapping { + if let Some(value) = tuple.get(*input_key) { + if let Some(existing_value) = output_tuple.get(*output_key) + { + if existing_value != value { + return None; // Conflict + } + } else { + output_tuple.set(*output_key, value.clone()); + } + } + } + Some(Ok(output_tuple)) + } + Err(e) => Some(Err(e)), + } + })) + }) + } + GraphPattern::Group { + inner, + aggregates, + variables, + } => { + let (child, child_stats) = self.graph_pattern_evaluator(inner, encoded_variables); + stat_children.push(child_stats); + let key_variables = variables + .iter() + .map(|k| encode_variable(encoded_variables, k)) + .collect::>(); + let aggregate_input_expressions = aggregates + .iter() + .map(|(_, expression)| match expression { + AggregateExpression::CountSolutions { .. } => None, + AggregateExpression::FunctionCall { expr, .. } => { + Some(self.expression_evaluator(expr, encoded_variables, stat_children)) + } + }) + .collect::>(); + let accumulator_builders = aggregates + .iter() + .map(|(_, aggregate)| Self::accumulator_builder(&self.dataset, aggregate)) + .collect::>(); + let accumulator_variables = aggregates + .iter() + .map(|(variable, _)| encode_variable(encoded_variables, variable)) + .collect::>(); + Rc::new(move |from| { + let tuple_size = from.capacity(); + let key_variables = Rc::clone(&key_variables); + let mut errors = Vec::default(); + let mut accumulators_for_group = + HashMap::>, Vec>>::default(); + if key_variables.is_empty() { + // There is always a single group if there is no GROUP BY + accumulators_for_group.insert( + Vec::new(), + accumulator_builders.iter().map(|c| c()).collect::>(), + ); + } + child(from) + .filter_map(|result| match result { + Ok(result) => Some(result), + Err(error) => { + errors.push(error); + None + } + }) + .for_each(|tuple| { + // TODO avoid copy for key? + let key = key_variables + .iter() + .map(|v| tuple.get(*v).cloned()) + .collect(); + + let key_accumulators = + accumulators_for_group.entry(key).or_insert_with(|| { + accumulator_builders.iter().map(|c| c()).collect::>() + }); + for (accumulator, input_expression) in key_accumulators + .iter_mut() + .zip(&aggregate_input_expressions) + { + accumulator.add( + input_expression + .as_ref() + .and_then(|parameter| parameter(&tuple)), + ); + } + }); + let accumulator_variables = accumulator_variables.clone(); + Box::new( + errors + .into_iter() + .map(Err) + .chain(accumulators_for_group.into_iter().map( + move |(key, accumulators)| { + let mut result = EncodedTuple::with_capacity(tuple_size); + for (variable, value) in key_variables.iter().zip(key) { + if let Some(value) = value { + result.set(*variable, value); + } + } + for (accumulator, variable) in + accumulators.into_iter().zip(&accumulator_variables) + { + if let Some(value) = accumulator.state() { + result.set(*variable, value); + } + } + Ok(result) + }, + )), + ) + }) + } + } + } + + fn evaluate_service( + &self, + service_name: &TupleSelector, + graph_pattern: &spargebra::algebra::GraphPattern, + variables: Rc<[Variable]>, + from: &EncodedTuple, + ) -> Result { + let service_name = service_name + .get_pattern_value(from) + .ok_or(EvaluationError::UnboundService)?; + if let QueryResults::Solutions(iter) = self.service_handler.handle( + self.dataset.decode_named_node(&service_name)?, + Query { + inner: spargebra::Query::Select { + dataset: None, + pattern: graph_pattern.clone(), + #[allow(clippy::useless_asref)] + base_iri: self.base_iri.as_ref().map(|iri| iri.as_ref().clone()), + }, + dataset: QueryDataset::new(), + parsing_duration: None, + }, + )? { + Ok(encode_bindings(Rc::clone(&self.dataset), variables, iter)) + } else { + Err(EvaluationError::ServiceDoesNotReturnSolutions) + } + } + + #[allow(clippy::redundant_closure)] // False positive in 1.60 + fn accumulator_builder( + dataset: &Rc, + expression: &AggregateExpression, + ) -> Box Box> { + let mut accumulator: Box Box> = match expression { + AggregateExpression::CountSolutions { .. } => { + Box::new(|| Box::::default()) + } + AggregateExpression::FunctionCall { name, .. } => match name { + AggregateFunction::Count => Box::new(|| Box::::default()), + AggregateFunction::Sum => Box::new(|| Box::::default()), + AggregateFunction::Min => { + let dataset = Rc::clone(dataset); + Box::new(move || Box::new(MinAccumulator::new(Rc::clone(&dataset)))) + } + AggregateFunction::Max => { + let dataset = Rc::clone(dataset); + Box::new(move || Box::new(MaxAccumulator::new(Rc::clone(&dataset)))) + } + AggregateFunction::Avg => Box::new(|| Box::::default()), + AggregateFunction::Sample => Box::new(|| Box::::default()), + AggregateFunction::GroupConcat { separator } => { + let dataset = Rc::clone(dataset); + let separator = Rc::from(separator.as_deref().unwrap_or(" ")); + Box::new(move || { + Box::new(GroupConcatAccumulator::new( + Rc::clone(&dataset), + Rc::clone(&separator), + )) + }) + } + AggregateFunction::Custom(_) => Box::new(|| Box::new(FailingAccumulator)), + }, + }; + if matches!( + expression, + AggregateExpression::CountSolutions { distinct: true } + | AggregateExpression::FunctionCall { distinct: true, .. } + ) { + accumulator = Box::new(move || Box::new(Deduplicate::new(accumulator()))); + } + accumulator + } + + fn expression_evaluator( + &self, + expression: &Expression, + encoded_variables: &mut Vec, + stat_children: &mut Vec>, + ) -> Rc Option> { + match expression { + Expression::NamedNode(t) => { + let t = self.encode_term(t); + Rc::new(move |_| Some(t.clone())) + } + Expression::Literal(t) => { + let t = self.encode_term(t); + Rc::new(move |_| Some(t.clone())) + } + Expression::Variable(v) => { + let v = encode_variable(encoded_variables, v); + Rc::new(move |tuple| tuple.get(v).cloned()) + } + Expression::Bound(v) => { + let v = encode_variable(encoded_variables, v); + Rc::new(move |tuple| Some(tuple.contains(v).into())) + } + Expression::Exists(plan) => { + let (eval, stats) = self.graph_pattern_evaluator(plan, encoded_variables); + stat_children.push(stats); + Rc::new(move |tuple| Some(eval(tuple.clone()).next().is_some().into())) + } + Expression::Or(inner) => { + let children = inner + .iter() + .map(|i| self.expression_evaluator(i, encoded_variables, stat_children)) + .collect::>(); + Rc::new(move |tuple| { + let mut error = false; + for child in &*children { + match child(tuple).and_then(|v| to_bool(&v)) { + Some(true) => return Some(true.into()), + Some(false) => continue, + None => error = true, + } + } + if error { + None + } else { + Some(false.into()) + } + }) + } + Expression::And(inner) => { + let children = inner + .iter() + .map(|i| self.expression_evaluator(i, encoded_variables, stat_children)) + .collect::>(); + Rc::new(move |tuple| { + let mut error = false; + for child in &*children { + match child(tuple).and_then(|v| to_bool(&v)) { + Some(true) => continue, + Some(false) => return Some(false.into()), + None => error = true, + } + } + if error { + None + } else { + Some(true.into()) + } + }) + } + Expression::Equal(a, b) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + Rc::new(move |tuple| equals(&a(tuple)?, &b(tuple)?).map(Into::into)) + } + Expression::SameTerm(a, b) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + Rc::new(move |tuple| Some((a(tuple)? == b(tuple)?).into())) + } + Expression::Greater(a, b) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + Some( + (partial_cmp(&dataset, &a(tuple)?, &b(tuple)?)? == Ordering::Greater) + .into(), + ) + }) + } + Expression::GreaterOrEqual(a, b) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + Some( + match partial_cmp(&dataset, &a(tuple)?, &b(tuple)?)? { + Ordering::Greater | Ordering::Equal => true, + Ordering::Less => false, + } + .into(), + ) + }) + } + Expression::Less(a, b) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + Some((partial_cmp(&dataset, &a(tuple)?, &b(tuple)?)? == Ordering::Less).into()) + }) + } + Expression::LessOrEqual(a, b) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + Some( + match partial_cmp(&dataset, &a(tuple)?, &b(tuple)?)? { + Ordering::Less | Ordering::Equal => true, + Ordering::Greater => false, + } + .into(), + ) + }) + } + Expression::Add(a, b) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + Rc::new( + move |tuple| match NumericBinaryOperands::new(a(tuple)?, b(tuple)?)? { + NumericBinaryOperands::Float(v1, v2) => Some((v1 + v2).into()), + NumericBinaryOperands::Double(v1, v2) => Some((v1 + v2).into()), + NumericBinaryOperands::Integer(v1, v2) => Some(v1.checked_add(v2)?.into()), + NumericBinaryOperands::Decimal(v1, v2) => Some(v1.checked_add(v2)?.into()), + NumericBinaryOperands::Duration(v1, v2) => Some(v1.checked_add(v2)?.into()), + NumericBinaryOperands::YearMonthDuration(v1, v2) => { + Some(v1.checked_add(v2)?.into()) + } + NumericBinaryOperands::DayTimeDuration(v1, v2) => { + Some(v1.checked_add(v2)?.into()) + } + NumericBinaryOperands::DateTimeDuration(v1, v2) => { + Some(v1.checked_add_duration(v2)?.into()) + } + NumericBinaryOperands::DateTimeYearMonthDuration(v1, v2) => { + Some(v1.checked_add_year_month_duration(v2)?.into()) + } + NumericBinaryOperands::DateTimeDayTimeDuration(v1, v2) => { + Some(v1.checked_add_day_time_duration(v2)?.into()) + } + NumericBinaryOperands::DateDuration(v1, v2) => { + Some(v1.checked_add_duration(v2)?.into()) + } + NumericBinaryOperands::DateYearMonthDuration(v1, v2) => { + Some(v1.checked_add_year_month_duration(v2)?.into()) + } + NumericBinaryOperands::DateDayTimeDuration(v1, v2) => { + Some(v1.checked_add_day_time_duration(v2)?.into()) + } + NumericBinaryOperands::TimeDuration(v1, v2) => { + Some(v1.checked_add_duration(v2)?.into()) + } + NumericBinaryOperands::TimeDayTimeDuration(v1, v2) => { + Some(v1.checked_add_day_time_duration(v2)?.into()) + } + NumericBinaryOperands::DateTime(_, _) + | NumericBinaryOperands::Time(_, _) + | NumericBinaryOperands::Date(_, _) => None, + }, + ) + } + Expression::Subtract(a, b) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + Rc::new(move |tuple| { + Some(match NumericBinaryOperands::new(a(tuple)?, b(tuple)?)? { + NumericBinaryOperands::Float(v1, v2) => (v1 - v2).into(), + NumericBinaryOperands::Double(v1, v2) => (v1 - v2).into(), + NumericBinaryOperands::Integer(v1, v2) => v1.checked_sub(v2)?.into(), + NumericBinaryOperands::Decimal(v1, v2) => v1.checked_sub(v2)?.into(), + NumericBinaryOperands::DateTime(v1, v2) => v1.checked_sub(v2)?.into(), + NumericBinaryOperands::Date(v1, v2) => v1.checked_sub(v2)?.into(), + NumericBinaryOperands::Time(v1, v2) => v1.checked_sub(v2)?.into(), + NumericBinaryOperands::Duration(v1, v2) => v1.checked_sub(v2)?.into(), + NumericBinaryOperands::YearMonthDuration(v1, v2) => { + v1.checked_sub(v2)?.into() + } + NumericBinaryOperands::DayTimeDuration(v1, v2) => { + v1.checked_sub(v2)?.into() + } + NumericBinaryOperands::DateTimeDuration(v1, v2) => { + v1.checked_sub_duration(v2)?.into() + } + NumericBinaryOperands::DateTimeYearMonthDuration(v1, v2) => { + v1.checked_sub_year_month_duration(v2)?.into() + } + NumericBinaryOperands::DateTimeDayTimeDuration(v1, v2) => { + v1.checked_sub_day_time_duration(v2)?.into() + } + NumericBinaryOperands::DateDuration(v1, v2) => { + v1.checked_sub_duration(v2)?.into() + } + NumericBinaryOperands::DateYearMonthDuration(v1, v2) => { + v1.checked_sub_year_month_duration(v2)?.into() + } + NumericBinaryOperands::DateDayTimeDuration(v1, v2) => { + v1.checked_sub_day_time_duration(v2)?.into() + } + NumericBinaryOperands::TimeDuration(v1, v2) => { + v1.checked_sub_duration(v2)?.into() + } + NumericBinaryOperands::TimeDayTimeDuration(v1, v2) => { + v1.checked_sub_day_time_duration(v2)?.into() + } + }) + }) + } + Expression::Multiply(a, b) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + Rc::new( + move |tuple| match NumericBinaryOperands::new(a(tuple)?, b(tuple)?)? { + NumericBinaryOperands::Float(v1, v2) => Some((v1 * v2).into()), + NumericBinaryOperands::Double(v1, v2) => Some((v1 * v2).into()), + NumericBinaryOperands::Integer(v1, v2) => Some(v1.checked_mul(v2)?.into()), + NumericBinaryOperands::Decimal(v1, v2) => Some(v1.checked_mul(v2)?.into()), + _ => None, + }, + ) + } + Expression::Divide(a, b) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + Rc::new( + move |tuple| match NumericBinaryOperands::new(a(tuple)?, b(tuple)?)? { + NumericBinaryOperands::Float(v1, v2) => Some((v1 / v2).into()), + NumericBinaryOperands::Double(v1, v2) => Some((v1 / v2).into()), + NumericBinaryOperands::Integer(v1, v2) => { + Some(Decimal::from(v1).checked_div(v2)?.into()) + } + NumericBinaryOperands::Decimal(v1, v2) => Some(v1.checked_div(v2)?.into()), + _ => None, + }, + ) + } + Expression::UnaryPlus(e) => { + let e = self.expression_evaluator(e, encoded_variables, stat_children); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::FloatLiteral(value) => Some(value.into()), + EncodedTerm::DoubleLiteral(value) => Some(value.into()), + EncodedTerm::IntegerLiteral(value) => Some(value.into()), + EncodedTerm::DecimalLiteral(value) => Some(value.into()), + EncodedTerm::DurationLiteral(value) => Some(value.into()), + EncodedTerm::YearMonthDurationLiteral(value) => Some(value.into()), + EncodedTerm::DayTimeDurationLiteral(value) => Some(value.into()), + _ => None, + }) + } + Expression::UnaryMinus(e) => { + let e = self.expression_evaluator(e, encoded_variables, stat_children); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::FloatLiteral(value) => Some((-value).into()), + EncodedTerm::DoubleLiteral(value) => Some((-value).into()), + EncodedTerm::IntegerLiteral(value) => Some(value.checked_neg()?.into()), + EncodedTerm::DecimalLiteral(value) => Some(value.checked_neg()?.into()), + EncodedTerm::DurationLiteral(value) => Some(value.checked_neg()?.into()), + EncodedTerm::YearMonthDurationLiteral(value) => { + Some(value.checked_neg()?.into()) + } + EncodedTerm::DayTimeDurationLiteral(value) => Some(value.checked_neg()?.into()), + _ => None, + }) + } + Expression::Not(e) => { + let e = self.expression_evaluator(e, encoded_variables, stat_children); + Rc::new(move |tuple| to_bool(&e(tuple)?).map(|v| (!v).into())) + } + Expression::Coalesce(l) => { + let l: Vec<_> = l + .iter() + .map(|e| self.expression_evaluator(e, encoded_variables, stat_children)) + .collect(); + Rc::new(move |tuple| { + for e in &l { + if let Some(result) = e(tuple) { + return Some(result); + } + } + None + }) + } + Expression::If(a, b, c) => { + let a = self.expression_evaluator(a, encoded_variables, stat_children); + let b = self.expression_evaluator(b, encoded_variables, stat_children); + let c = self.expression_evaluator(c, encoded_variables, stat_children); + Rc::new(move |tuple| { + if to_bool(&a(tuple)?)? { + b(tuple) + } else { + c(tuple) + } + }) + } + Expression::FunctionCall(function, parameters) => { + match function { + Function::Str => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + Some(build_string_literal_from_id(to_string_id( + &dataset, + &e(tuple)?, + )?)) + }) + } + Function::Lang => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::SmallSmallLangStringLiteral { language, .. } + | EncodedTerm::BigSmallLangStringLiteral { language, .. } => { + Some(build_string_literal_from_id(language.into())) + } + EncodedTerm::SmallBigLangStringLiteral { language_id, .. } + | EncodedTerm::BigBigLangStringLiteral { language_id, .. } => { + Some(build_string_literal_from_id(language_id.into())) + } + e if e.is_literal() => Some(build_string_literal(&dataset, "")), + _ => None, + }) + } + Function::LangMatches => { + let language_tag = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let language_range = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let mut language_tag = + to_simple_string(&dataset, &language_tag(tuple)?)?; + language_tag.make_ascii_lowercase(); + let mut language_range = + to_simple_string(&dataset, &language_range(tuple)?)?; + language_range.make_ascii_lowercase(); + Some( + if &*language_range == "*" { + !language_tag.is_empty() + } else { + !ZipLongest::new( + language_range.split('-'), + language_tag.split('-'), + ) + .any(|parts| match parts { + (Some(range_subtag), Some(language_subtag)) => { + range_subtag != language_subtag + } + (Some(_), None) => true, + (None, _) => false, + }) + } + .into(), + ) + }) + } + Function::Datatype => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| datatype(&dataset, &e(tuple)?)) + } + Function::Iri => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + let base_iri = self.base_iri.clone(); + Rc::new(move |tuple| { + let e = e(tuple)?; + if e.is_named_node() { + Some(e) + } else { + let iri = to_simple_string(&dataset, &e)?; + Some(build_named_node( + &dataset, + &if let Some(base_iri) = &base_iri { + base_iri.resolve(&iri) + } else { + Iri::parse(iri) + } + .ok()? + .into_inner(), + )) + } + }) + } + Function::BNode => match parameters.first() { + Some(id) => { + let id = + self.expression_evaluator(id, encoded_variables, stat_children); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + Some( + dataset.encode_term( + BlankNode::new(to_simple_string(&dataset, &id(tuple)?)?) + .ok()? + .as_ref(), + ), + ) + }) + } + None => Rc::new(|_| { + Some(EncodedTerm::NumericalBlankNode { + id: random::(), + }) + }), + }, + Function::Rand => Rc::new(|_| Some(random::().into())), + Function::Abs => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::IntegerLiteral(value) => Some(value.checked_abs()?.into()), + EncodedTerm::DecimalLiteral(value) => Some(value.checked_abs()?.into()), + EncodedTerm::FloatLiteral(value) => Some(value.abs().into()), + EncodedTerm::DoubleLiteral(value) => Some(value.abs().into()), + _ => None, + }) + } + Function::Ceil => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::IntegerLiteral(value) => Some(value.into()), + EncodedTerm::DecimalLiteral(value) => { + Some(value.checked_ceil()?.into()) + } + EncodedTerm::FloatLiteral(value) => Some(value.ceil().into()), + EncodedTerm::DoubleLiteral(value) => Some(value.ceil().into()), + _ => None, + }) + } + Function::Floor => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::IntegerLiteral(value) => Some(value.into()), + EncodedTerm::DecimalLiteral(value) => { + Some(value.checked_floor()?.into()) + } + EncodedTerm::FloatLiteral(value) => Some(value.floor().into()), + EncodedTerm::DoubleLiteral(value) => Some(value.floor().into()), + _ => None, + }) + } + Function::Round => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::IntegerLiteral(value) => Some(value.into()), + EncodedTerm::DecimalLiteral(value) => { + Some(value.checked_round()?.into()) + } + EncodedTerm::FloatLiteral(value) => Some(value.round().into()), + EncodedTerm::DoubleLiteral(value) => Some(value.round().into()), + _ => None, + }) + } + Function::Concat => { + let l: Vec<_> = parameters + .iter() + .map(|e| self.expression_evaluator(e, encoded_variables, stat_children)) + .collect(); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let mut result = String::default(); + let mut language = None; + for e in &l { + let (value, e_language) = + to_string_and_language(&dataset, &e(tuple)?)?; + if let Some(lang) = language { + if lang != e_language { + language = Some(None) + } + } else { + language = Some(e_language) + } + result += &value + } + Some(build_plain_literal( + &dataset, + &result, + language.and_then(|v| v), + )) + }) + } + Function::SubStr => { + let source = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let starting_loc = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let length = parameters.get(2).map(|l| { + self.expression_evaluator(l, encoded_variables, stat_children) + }); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let (source, language) = + to_string_and_language(&dataset, &source(tuple)?)?; + + let starting_location: usize = + if let EncodedTerm::IntegerLiteral(v) = starting_loc(tuple)? { + i64::from(v).try_into().ok()? + } else { + return None; + }; + let length: Option = if let Some(length) = &length { + if let EncodedTerm::IntegerLiteral(v) = length(tuple)? { + Some(i64::from(v).try_into().ok()?) + } else { + return None; + } + } else { + None + }; + + // We want to slice on char indices, not byte indices + let mut start_iter = source + .char_indices() + .skip(starting_location.checked_sub(1)?) + .peekable(); + let result = + if let Some((start_position, _)) = start_iter.peek().copied() { + if let Some(length) = length { + let mut end_iter = start_iter.skip(length).peekable(); + if let Some((end_position, _)) = end_iter.peek() { + &source[start_position..*end_position] + } else { + &source[start_position..] + } + } else { + &source[start_position..] + } + } else { + "" + }; + Some(build_plain_literal(&dataset, result, language)) + }) + } + Function::StrLen => { + let arg = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + Some( + i64::try_from(to_string(&dataset, &arg(tuple)?)?.chars().count()) + .ok()? + .into(), + ) + }) + } + Function::Replace => { + let arg = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let replacement = self.expression_evaluator( + ¶meters[2], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + if let Some(regex) = + compile_static_pattern_if_exists(¶meters[1], parameters.get(3)) + { + Rc::new(move |tuple| { + let (text, language) = + to_string_and_language(&dataset, &arg(tuple)?)?; + let replacement = to_simple_string(&dataset, &replacement(tuple)?)?; + Some(build_plain_literal( + &dataset, + ®ex.replace_all(&text, replacement.as_str()), + language, + )) + }) + } else { + let pattern = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let flags = parameters.get(3).map(|flags| { + self.expression_evaluator(flags, encoded_variables, stat_children) + }); + Rc::new(move |tuple| { + let pattern = to_simple_string(&dataset, &pattern(tuple)?)?; + let options = if let Some(flags) = &flags { + Some(to_simple_string(&dataset, &flags(tuple)?)?) + } else { + None + }; + let regex = compile_pattern(&pattern, options.as_deref())?; + let (text, language) = + to_string_and_language(&dataset, &arg(tuple)?)?; + let replacement = to_simple_string(&dataset, &replacement(tuple)?)?; + Some(build_plain_literal( + &dataset, + ®ex.replace_all(&text, replacement.as_str()), + language, + )) + }) + } + } + Function::UCase => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let (value, language) = to_string_and_language(&dataset, &e(tuple)?)?; + Some(build_plain_literal( + &dataset, + &value.to_uppercase(), + language, + )) + }) + } + Function::LCase => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let (value, language) = to_string_and_language(&dataset, &e(tuple)?)?; + Some(build_plain_literal( + &dataset, + &value.to_lowercase(), + language, + )) + }) + } + Function::StrStarts => { + let arg1 = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let arg2 = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let (arg1, arg2, _) = to_argument_compatible_strings( + &dataset, + &arg1(tuple)?, + &arg2(tuple)?, + )?; + Some(arg1.starts_with(arg2.as_str()).into()) + }) + } + Function::EncodeForUri => { + let ltrl = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let ltlr = to_string(&dataset, <rl(tuple)?)?; + let mut result = Vec::with_capacity(ltlr.len()); + for c in ltlr.bytes() { + match c { + b'A'..=b'Z' + | b'a'..=b'z' + | b'0'..=b'9' + | b'-' + | b'_' + | b'.' + | b'~' => result.push(c), + _ => { + result.push(b'%'); + let high = c / 16; + let low = c % 16; + result.push(if high < 10 { + b'0' + high + } else { + b'A' + (high - 10) + }); + result.push(if low < 10 { + b'0' + low + } else { + b'A' + (low - 10) + }); + } + } + } + Some(build_string_literal( + &dataset, + str::from_utf8(&result).ok()?, + )) + }) + } + Function::StrEnds => { + let arg1 = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let arg2 = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let (arg1, arg2, _) = to_argument_compatible_strings( + &dataset, + &arg1(tuple)?, + &arg2(tuple)?, + )?; + Some(arg1.ends_with(arg2.as_str()).into()) + }) + } + Function::Contains => { + let arg1 = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let arg2 = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let (arg1, arg2, _) = to_argument_compatible_strings( + &dataset, + &arg1(tuple)?, + &arg2(tuple)?, + )?; + Some(arg1.contains(arg2.as_str()).into()) + }) + } + Function::StrBefore => { + let arg1 = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let arg2 = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let (arg1, arg2, language) = to_argument_compatible_strings( + &dataset, + &arg1(tuple)?, + &arg2(tuple)?, + )?; + Some(if let Some(position) = arg1.find(arg2.as_str()) { + build_plain_literal(&dataset, &arg1[..position], language) + } else { + build_string_literal(&dataset, "") + }) + }) + } + Function::StrAfter => { + let arg1 = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let arg2 = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let (arg1, arg2, language) = to_argument_compatible_strings( + &dataset, + &arg1(tuple)?, + &arg2(tuple)?, + )?; + Some(if let Some(position) = arg1.find(arg2.as_str()) { + build_plain_literal( + &dataset, + &arg1[position + arg2.len()..], + language, + ) + } else { + build_string_literal(&dataset, "") + }) + }) + } + Function::Year => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DateTimeLiteral(date_time) => { + Some(date_time.year().into()) + } + EncodedTerm::DateLiteral(date) => Some(date.year().into()), + EncodedTerm::GYearMonthLiteral(year_month) => { + Some(year_month.year().into()) + } + EncodedTerm::GYearLiteral(year) => Some(year.year().into()), + _ => None, + }) + } + Function::Month => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DateTimeLiteral(date_time) => { + Some(date_time.month().into()) + } + EncodedTerm::DateLiteral(date) => Some(date.month().into()), + EncodedTerm::GYearMonthLiteral(year_month) => { + Some(year_month.month().into()) + } + EncodedTerm::GMonthDayLiteral(month_day) => { + Some(month_day.month().into()) + } + EncodedTerm::GMonthLiteral(month) => Some(month.month().into()), + _ => None, + }) + } + Function::Day => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DateTimeLiteral(date_time) => Some(date_time.day().into()), + EncodedTerm::DateLiteral(date) => Some(date.day().into()), + EncodedTerm::GMonthDayLiteral(month_day) => { + Some(month_day.day().into()) + } + EncodedTerm::GDayLiteral(day) => Some(day.day().into()), + _ => None, + }) + } + Function::Hours => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DateTimeLiteral(date_time) => { + Some(date_time.hour().into()) + } + EncodedTerm::TimeLiteral(time) => Some(time.hour().into()), + _ => None, + }) + } + Function::Minutes => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DateTimeLiteral(date_time) => { + Some(date_time.minute().into()) + } + EncodedTerm::TimeLiteral(time) => Some(time.minute().into()), + _ => None, + }) + } + Function::Seconds => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DateTimeLiteral(date_time) => { + Some(date_time.second().into()) + } + EncodedTerm::TimeLiteral(time) => Some(time.second().into()), + _ => None, + }) + } + Function::Timezone => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| { + Some( + match e(tuple)? { + EncodedTerm::DateTimeLiteral(date_time) => date_time.timezone(), + EncodedTerm::TimeLiteral(time) => time.timezone(), + EncodedTerm::DateLiteral(date) => date.timezone(), + EncodedTerm::GYearMonthLiteral(year_month) => { + year_month.timezone() + } + EncodedTerm::GYearLiteral(year) => year.timezone(), + EncodedTerm::GMonthDayLiteral(month_day) => { + month_day.timezone() + } + EncodedTerm::GDayLiteral(day) => day.timezone(), + EncodedTerm::GMonthLiteral(month) => month.timezone(), + _ => None, + }? + .into(), + ) + }) + } + Function::Tz => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let timezone_offset = match e(tuple)? { + EncodedTerm::DateTimeLiteral(date_time) => { + date_time.timezone_offset() + } + EncodedTerm::TimeLiteral(time) => time.timezone_offset(), + EncodedTerm::DateLiteral(date) => date.timezone_offset(), + EncodedTerm::GYearMonthLiteral(year_month) => { + year_month.timezone_offset() + } + EncodedTerm::GYearLiteral(year) => year.timezone_offset(), + EncodedTerm::GMonthDayLiteral(month_day) => { + month_day.timezone_offset() + } + EncodedTerm::GDayLiteral(day) => day.timezone_offset(), + EncodedTerm::GMonthLiteral(month) => month.timezone_offset(), + _ => return None, + }; + Some(match timezone_offset { + Some(timezone_offset) => { + build_string_literal(&dataset, &timezone_offset.to_string()) + } + None => build_string_literal(&dataset, ""), + }) + }) + } + Function::Adjust => { + let dt = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let tz = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| { + let timezone_offset = Some( + match tz(tuple)? { + EncodedTerm::DayTimeDurationLiteral(tz) => { + TimezoneOffset::try_from(tz) + } + EncodedTerm::DurationLiteral(tz) => { + TimezoneOffset::try_from(tz) + } + _ => return None, + } + .ok()?, + ); + Some(match dt(tuple)? { + EncodedTerm::DateTimeLiteral(date_time) => { + date_time.adjust(timezone_offset)?.into() + } + EncodedTerm::TimeLiteral(time) => { + time.adjust(timezone_offset)?.into() + } + EncodedTerm::DateLiteral(date) => { + date.adjust(timezone_offset)?.into() + } + EncodedTerm::GYearMonthLiteral(year_month) => { + year_month.adjust(timezone_offset)?.into() + } + EncodedTerm::GYearLiteral(year) => { + year.adjust(timezone_offset)?.into() + } + EncodedTerm::GMonthDayLiteral(month_day) => { + month_day.adjust(timezone_offset)?.into() + } + EncodedTerm::GDayLiteral(day) => { + day.adjust(timezone_offset)?.into() + } + EncodedTerm::GMonthLiteral(month) => { + month.adjust(timezone_offset)?.into() + } + _ => return None, + }) + }) + } + Function::Now => { + let now = self.now; + Rc::new(move |_| Some(now.into())) + } + Function::Uuid => { + let dataset = Rc::clone(&self.dataset); + Rc::new(move |_| { + let mut buffer = String::with_capacity(44); + buffer.push_str("urn:uuid:"); + generate_uuid(&mut buffer); + Some(build_named_node(&dataset, &buffer)) + }) + } + Function::StrUuid => { + let dataset = Rc::clone(&self.dataset); + Rc::new(move |_| { + let mut buffer = String::with_capacity(36); + generate_uuid(&mut buffer); + Some(build_string_literal(&dataset, &buffer)) + }) + } + Function::Md5 => self.hash::(parameters, encoded_variables, stat_children), + Function::Sha1 => { + self.hash::(parameters, encoded_variables, stat_children) + } + Function::Sha256 => { + self.hash::(parameters, encoded_variables, stat_children) + } + Function::Sha384 => { + self.hash::(parameters, encoded_variables, stat_children) + } + Function::Sha512 => { + self.hash::(parameters, encoded_variables, stat_children) + } + Function::StrLang => { + let lexical_form = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let lang_tag = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + Some(build_lang_string_literal_from_id( + to_simple_string_id(&lexical_form(tuple)?)?, + build_language_id(&dataset, &lang_tag(tuple)?)?, + )) + }) + } + Function::StrDt => { + let lexical_form = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let datatype = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let value = to_simple_string(&dataset, &lexical_form(tuple)?)?; + let datatype = + if let EncodedTerm::NamedNode { iri_id } = datatype(tuple)? { + dataset.get_str(&iri_id).ok()? + } else { + None + }?; + Some(dataset.encode_term(LiteralRef::new_typed_literal( + &value, + NamedNodeRef::new_unchecked(&datatype), + ))) + }) + } + Function::IsIri => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| Some(e(tuple)?.is_named_node().into())) + } + Function::IsBlank => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| Some(e(tuple)?.is_blank_node().into())) + } + Function::IsLiteral => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| Some(e(tuple)?.is_literal().into())) + } + Function::IsNumeric => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| { + Some( + matches!( + e(tuple)?, + EncodedTerm::FloatLiteral(_) + | EncodedTerm::DoubleLiteral(_) + | EncodedTerm::IntegerLiteral(_) + | EncodedTerm::DecimalLiteral(_) + ) + .into(), + ) + }) + } + Function::Regex => { + let text = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + if let Some(regex) = + compile_static_pattern_if_exists(¶meters[1], parameters.get(2)) + { + Rc::new(move |tuple| { + let text = to_string(&dataset, &text(tuple)?)?; + Some(regex.is_match(&text).into()) + }) + } else { + let pattern = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let flags = parameters.get(2).map(|flags| { + self.expression_evaluator(flags, encoded_variables, stat_children) + }); + Rc::new(move |tuple| { + let pattern = to_simple_string(&dataset, &pattern(tuple)?)?; + let options = if let Some(flags) = &flags { + Some(to_simple_string(&dataset, &flags(tuple)?)?) + } else { + None + }; + let regex = compile_pattern(&pattern, options.as_deref())?; + let text = to_string(&dataset, &text(tuple)?)?; + Some(regex.is_match(&text).into()) + }) + } + } + Function::Triple => { + let s = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let p = self.expression_evaluator( + ¶meters[1], + encoded_variables, + stat_children, + ); + let o = self.expression_evaluator( + ¶meters[2], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| { + let s = s(tuple)?; + let p = p(tuple)?; + let o = o(tuple)?; + (!s.is_literal() + && !s.is_default_graph() + && p.is_named_node() + && !o.is_default_graph()) + .then(|| EncodedTriple::new(s, p, o).into()) + }) + } + Function::Subject => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| { + if let EncodedTerm::Triple(t) = e(tuple)? { + Some(t.subject.clone()) + } else { + None + } + }) + } + Function::Predicate => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| { + if let EncodedTerm::Triple(t) = e(tuple)? { + Some(t.predicate.clone()) + } else { + None + } + }) + } + Function::Object => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| { + if let EncodedTerm::Triple(t) = e(tuple)? { + Some(t.object.clone()) + } else { + None + } + }) + } + Function::IsTriple => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| Some(e(tuple)?.is_triple().into())) + } + Function::Custom(function_name) => { + if let Some(function) = self.custom_functions.get(function_name).cloned() { + let args = parameters + .iter() + .map(|e| { + self.expression_evaluator(e, encoded_variables, stat_children) + }) + .collect::>(); + let dataset = Rc::clone(&self.dataset); + return Rc::new(move |tuple| { + let args = args + .iter() + .map(|f| dataset.decode_term(&f(tuple)?).ok()) + .collect::>>()?; + Some(dataset.encode_term(&function(&args)?)) + }); + } + match function_name.as_ref() { + xsd::STRING => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + Some(build_string_literal_from_id(to_string_id( + &dataset, + &e(tuple)?, + )?)) + }) + } + xsd::BOOLEAN => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::BooleanLiteral(value) => Some(value.into()), + EncodedTerm::FloatLiteral(value) => { + Some(Boolean::from(value).into()) + } + EncodedTerm::DoubleLiteral(value) => { + Some(Boolean::from(value).into()) + } + EncodedTerm::IntegerLiteral(value) => { + Some(Boolean::from(value).into()) + } + EncodedTerm::DecimalLiteral(value) => { + Some(Boolean::from(value).into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_boolean_str(&value) + } + _ => None, + }) + } + xsd::DOUBLE => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::FloatLiteral(value) => { + Some(Double::from(value).into()) + } + EncodedTerm::DoubleLiteral(value) => Some(value.into()), + EncodedTerm::IntegerLiteral(value) => { + Some(Double::from(value).into()) + } + EncodedTerm::DecimalLiteral(value) => { + Some(Double::from(value).into()) + } + EncodedTerm::BooleanLiteral(value) => { + Some(Double::from(value).into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_double_str(&value) + } + EncodedTerm::BigStringLiteral { value_id } => { + parse_double_str(&dataset.get_str(&value_id).ok()??) + } + _ => None, + }) + } + xsd::FLOAT => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::FloatLiteral(value) => Some(value.into()), + EncodedTerm::DoubleLiteral(value) => { + Some(Float::from(value).into()) + } + EncodedTerm::IntegerLiteral(value) => { + Some(Float::from(value).into()) + } + EncodedTerm::DecimalLiteral(value) => { + Some(Float::from(value).into()) + } + EncodedTerm::BooleanLiteral(value) => { + Some(Float::from(value).into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_float_str(&value) + } + EncodedTerm::BigStringLiteral { value_id } => { + parse_float_str(&dataset.get_str(&value_id).ok()??) + } + _ => None, + }) + } + xsd::INTEGER => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::FloatLiteral(value) => { + Some(Integer::try_from(value).ok()?.into()) + } + EncodedTerm::DoubleLiteral(value) => { + Some(Integer::try_from(value).ok()?.into()) + } + EncodedTerm::IntegerLiteral(value) => Some(value.into()), + EncodedTerm::DecimalLiteral(value) => { + Some(Integer::try_from(value).ok()?.into()) + } + EncodedTerm::BooleanLiteral(value) => { + Some(Integer::from(value).into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_integer_str(&value) + } + EncodedTerm::BigStringLiteral { value_id } => { + parse_integer_str(&dataset.get_str(&value_id).ok()??) + } + _ => None, + }) + } + xsd::DECIMAL => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::FloatLiteral(value) => { + Some(Decimal::try_from(value).ok()?.into()) + } + EncodedTerm::DoubleLiteral(value) => { + Some(Decimal::try_from(value).ok()?.into()) + } + EncodedTerm::IntegerLiteral(value) => { + Some(Decimal::from(value).into()) + } + EncodedTerm::DecimalLiteral(value) => Some(value.into()), + EncodedTerm::BooleanLiteral(value) => { + Some(Decimal::from(value).into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_decimal_str(&value) + } + EncodedTerm::BigStringLiteral { value_id } => { + parse_decimal_str(&dataset.get_str(&value_id).ok()??) + } + _ => None, + }) + } + xsd::DATE => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DateLiteral(value) => Some(value.into()), + EncodedTerm::DateTimeLiteral(value) => { + Some(Date::try_from(value).ok()?.into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_date_str(&value) + } + EncodedTerm::BigStringLiteral { value_id } => { + parse_date_str(&dataset.get_str(&value_id).ok()??) + } + _ => None, + }) + } + xsd::TIME => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::TimeLiteral(value) => Some(value.into()), + EncodedTerm::DateTimeLiteral(value) => { + Some(Time::from(value).into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_time_str(&value) + } + EncodedTerm::BigStringLiteral { value_id } => { + parse_time_str(&dataset.get_str(&value_id).ok()??) + } + _ => None, + }) + } + xsd::DATE_TIME => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DateTimeLiteral(value) => Some(value.into()), + EncodedTerm::DateLiteral(value) => { + Some(DateTime::try_from(value).ok()?.into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_date_time_str(&value) + } + EncodedTerm::BigStringLiteral { value_id } => { + parse_date_time_str(&dataset.get_str(&value_id).ok()??) + } + _ => None, + }) + } + xsd::DURATION => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DurationLiteral(value) => Some(value.into()), + EncodedTerm::YearMonthDurationLiteral(value) => { + Some(Duration::from(value).into()) + } + EncodedTerm::DayTimeDurationLiteral(value) => { + Some(Duration::from(value).into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_duration_str(&value) + } + EncodedTerm::BigStringLiteral { value_id } => { + parse_duration_str(&dataset.get_str(&value_id).ok()??) + } + _ => None, + }) + } + xsd::YEAR_MONTH_DURATION => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DurationLiteral(value) => { + Some(YearMonthDuration::try_from(value).ok()?.into()) + } + EncodedTerm::YearMonthDurationLiteral(value) => { + Some(value.into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_year_month_duration_str(&value) + } + EncodedTerm::BigStringLiteral { value_id } => { + parse_year_month_duration_str( + &dataset.get_str(&value_id).ok()??, + ) + } + _ => None, + }) + } + xsd::DAY_TIME_DURATION => { + let e = self.expression_evaluator( + ¶meters[0], + encoded_variables, + stat_children, + ); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| match e(tuple)? { + EncodedTerm::DurationLiteral(value) => { + Some(DayTimeDuration::try_from(value).ok()?.into()) + } + EncodedTerm::DayTimeDurationLiteral(value) => { + Some(value.into()) + } + EncodedTerm::SmallStringLiteral(value) => { + parse_day_time_duration_str(&value) + } + EncodedTerm::BigStringLiteral { value_id } => { + parse_day_time_duration_str( + &dataset.get_str(&value_id).ok()??, + ) + } + _ => None, + }) + } + _ => Rc::new(|_| None), + } + } + } + } + } + } + + fn hash( + &self, + parameters: &[Expression], + encoded_variables: &mut Vec, + stat_children: &mut Vec>, + ) -> Rc Option> { + let arg = self.expression_evaluator(¶meters[0], encoded_variables, stat_children); + let dataset = Rc::clone(&self.dataset); + Rc::new(move |tuple| { + let input = to_simple_string(&dataset, &arg(tuple)?)?; + let hash = hex::encode(H::new().chain_update(input.as_str()).finalize()); + Some(build_string_literal(&dataset, &hash)) + }) + } + + fn encode_term<'b>(&self, term: impl Into>) -> EncodedTerm { + self.dataset.encode_term(term) + } + + fn encode_triple(&self, triple: &GroundTriple) -> EncodedTerm { + EncodedTriple::new( + match &triple.subject { + GroundSubject::NamedNode(node) => self.encode_term(node), + GroundSubject::Triple(triple) => self.encode_triple(triple), + }, + self.encode_term(&triple.predicate), + match &triple.object { + GroundTerm::NamedNode(node) => self.encode_term(node), + GroundTerm::Literal(literal) => self.encode_term(literal), + GroundTerm::Triple(triple) => self.encode_triple(triple), + }, + ) + .into() + } + + fn encode_property_path(&self, path: &PropertyPathExpression) -> Rc { + Rc::new(match path { + PropertyPathExpression::NamedNode(node) => PropertyPath::Path(self.encode_term(node)), + PropertyPathExpression::Reverse(p) => { + PropertyPath::Reverse(self.encode_property_path(p)) + } + PropertyPathExpression::Sequence(a, b) => { + PropertyPath::Sequence(self.encode_property_path(a), self.encode_property_path(b)) + } + PropertyPathExpression::Alternative(a, b) => PropertyPath::Alternative( + self.encode_property_path(a), + self.encode_property_path(b), + ), + PropertyPathExpression::ZeroOrMore(p) => { + PropertyPath::ZeroOrMore(self.encode_property_path(p)) + } + PropertyPathExpression::OneOrMore(p) => { + PropertyPath::OneOrMore(self.encode_property_path(p)) + } + PropertyPathExpression::ZeroOrOne(p) => { + PropertyPath::ZeroOrOne(self.encode_property_path(p)) + } + PropertyPathExpression::NegatedPropertySet(ps) => { + PropertyPath::NegatedPropertySet(ps.iter().map(|p| self.encode_term(p)).collect()) + } + }) + } + + fn template_value_from_term_or_variable( + &self, + term_or_variable: &TermPattern, + variables: &mut Vec, + bnodes: &mut Vec, + ) -> TripleTemplateValue { + match term_or_variable { + TermPattern::Variable(variable) => { + TripleTemplateValue::Variable(encode_variable(variables, variable)) + } + TermPattern::NamedNode(node) => TripleTemplateValue::Constant(self.encode_term(node)), + TermPattern::BlankNode(bnode) => { + TripleTemplateValue::BlankNode(bnode_key(bnodes, bnode)) + } + TermPattern::Literal(literal) => { + TripleTemplateValue::Constant(self.encode_term(literal)) + } + TermPattern::Triple(triple) => match ( + self.template_value_from_term_or_variable(&triple.subject, variables, bnodes), + self.template_value_from_named_node_or_variable(&triple.predicate, variables), + self.template_value_from_term_or_variable(&triple.object, variables, bnodes), + ) { + ( + TripleTemplateValue::Constant(subject), + TripleTemplateValue::Constant(predicate), + TripleTemplateValue::Constant(object), + ) => TripleTemplateValue::Constant( + EncodedTriple { + subject, + predicate, + object, + } + .into(), + ), + (subject, predicate, object) => { + TripleTemplateValue::Triple(Box::new(TripleTemplate { + subject, + predicate, + object, + })) + } + }, + } + } + + fn template_value_from_named_node_or_variable( + &self, + named_node_or_variable: &NamedNodePattern, + variables: &mut Vec, + ) -> TripleTemplateValue { + match named_node_or_variable { + NamedNodePattern::Variable(variable) => { + TripleTemplateValue::Variable(encode_variable(variables, variable)) + } + NamedNodePattern::NamedNode(term) => { + TripleTemplateValue::Constant(self.encode_term(term)) + } + } + } +} + +fn to_bool(term: &EncodedTerm) -> Option { + match term { + EncodedTerm::BooleanLiteral(value) => Some((*value).into()), + EncodedTerm::SmallStringLiteral(value) => Some(!value.is_empty()), + EncodedTerm::BigStringLiteral { .. } => { + Some(false) // A big literal can't be empty + } + EncodedTerm::FloatLiteral(value) => Some(Boolean::from(*value).into()), + EncodedTerm::DoubleLiteral(value) => Some(Boolean::from(*value).into()), + EncodedTerm::IntegerLiteral(value) => Some(Boolean::from(*value).into()), + EncodedTerm::DecimalLiteral(value) => Some(Boolean::from(*value).into()), + _ => None, + } +} + +fn to_string_id(dataset: &DatasetView, term: &EncodedTerm) -> Option { + match term { + EncodedTerm::NamedNode { iri_id } => Some( + if let Ok(value) = SmallString::try_from(dataset.get_str(iri_id).ok()??.as_str()) { + value.into() + } else { + SmallStringOrId::Big(*iri_id) + }, + ), + EncodedTerm::DefaultGraph + | EncodedTerm::NumericalBlankNode { .. } + | EncodedTerm::SmallBlankNode { .. } + | EncodedTerm::BigBlankNode { .. } + | EncodedTerm::Triple(_) => None, + EncodedTerm::SmallStringLiteral(value) + | EncodedTerm::SmallSmallLangStringLiteral { value, .. } + | EncodedTerm::SmallBigLangStringLiteral { value, .. } + | EncodedTerm::SmallTypedLiteral { value, .. } => Some((*value).into()), + EncodedTerm::BigStringLiteral { value_id } + | EncodedTerm::BigSmallLangStringLiteral { value_id, .. } + | EncodedTerm::BigBigLangStringLiteral { value_id, .. } + | EncodedTerm::BigTypedLiteral { value_id, .. } => Some((*value_id).into()), + EncodedTerm::BooleanLiteral(value) => Some(build_string_id( + dataset, + if bool::from(*value) { "true" } else { "false" }, + )), + EncodedTerm::FloatLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::DoubleLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::IntegerLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::DecimalLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::DateTimeLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::TimeLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::DateLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::GYearMonthLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::GYearLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::GMonthDayLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::GDayLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::GMonthLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::DurationLiteral(value) => Some(build_string_id(dataset, &value.to_string())), + EncodedTerm::YearMonthDurationLiteral(value) => { + Some(build_string_id(dataset, &value.to_string())) + } + EncodedTerm::DayTimeDurationLiteral(value) => { + Some(build_string_id(dataset, &value.to_string())) + } + } +} + +fn to_simple_string(dataset: &DatasetView, term: &EncodedTerm) -> Option { + match term { + EncodedTerm::SmallStringLiteral(value) => Some((*value).into()), + EncodedTerm::BigStringLiteral { value_id } => dataset.get_str(value_id).ok()?, + _ => None, + } +} + +fn to_simple_string_id(term: &EncodedTerm) -> Option { + match term { + EncodedTerm::SmallStringLiteral(value) => Some((*value).into()), + EncodedTerm::BigStringLiteral { value_id } => Some((*value_id).into()), + _ => None, + } +} + +fn to_string(dataset: &DatasetView, term: &EncodedTerm) -> Option { + match term { + EncodedTerm::SmallStringLiteral(value) + | EncodedTerm::SmallSmallLangStringLiteral { value, .. } + | EncodedTerm::SmallBigLangStringLiteral { value, .. } => Some((*value).into()), + EncodedTerm::BigStringLiteral { value_id } + | EncodedTerm::BigSmallLangStringLiteral { value_id, .. } + | EncodedTerm::BigBigLangStringLiteral { value_id, .. } => { + dataset.get_str(value_id).ok()? + } + _ => None, + } +} + +fn to_string_and_language( + dataset: &DatasetView, + term: &EncodedTerm, +) -> Option<(String, Option)> { + match term { + EncodedTerm::SmallStringLiteral(value) => Some(((*value).into(), None)), + EncodedTerm::BigStringLiteral { value_id } => { + Some((dataset.get_str(value_id).ok()??, None)) + } + EncodedTerm::SmallSmallLangStringLiteral { value, language } => { + Some(((*value).into(), Some((*language).into()))) + } + EncodedTerm::SmallBigLangStringLiteral { value, language_id } => { + Some(((*value).into(), Some((*language_id).into()))) + } + EncodedTerm::BigSmallLangStringLiteral { value_id, language } => { + Some((dataset.get_str(value_id).ok()??, Some((*language).into()))) + } + EncodedTerm::BigBigLangStringLiteral { + value_id, + language_id, + } => Some(( + dataset.get_str(value_id).ok()??, + Some((*language_id).into()), + )), + _ => None, + } +} + +fn build_named_node(dataset: &DatasetView, iri: &str) -> EncodedTerm { + dataset.encode_term(NamedNodeRef::new_unchecked(iri)) +} + +fn encode_named_node(dataset: &DatasetView, node: NamedNodeRef<'_>) -> EncodedTerm { + dataset.encode_term(node) +} + +fn build_string_literal(dataset: &DatasetView, value: &str) -> EncodedTerm { + build_string_literal_from_id(build_string_id(dataset, value)) +} + +fn build_string_literal_from_id(id: SmallStringOrId) -> EncodedTerm { + match id { + SmallStringOrId::Small(value) => EncodedTerm::SmallStringLiteral(value), + SmallStringOrId::Big(value_id) => EncodedTerm::BigStringLiteral { value_id }, + } +} + +fn build_lang_string_literal( + dataset: &DatasetView, + value: &str, + language_id: SmallStringOrId, +) -> EncodedTerm { + build_lang_string_literal_from_id(build_string_id(dataset, value), language_id) +} + +fn build_lang_string_literal_from_id( + value_id: SmallStringOrId, + language_id: SmallStringOrId, +) -> EncodedTerm { + match (value_id, language_id) { + (SmallStringOrId::Small(value), SmallStringOrId::Small(language)) => { + EncodedTerm::SmallSmallLangStringLiteral { value, language } + } + (SmallStringOrId::Small(value), SmallStringOrId::Big(language_id)) => { + EncodedTerm::SmallBigLangStringLiteral { value, language_id } + } + (SmallStringOrId::Big(value_id), SmallStringOrId::Small(language)) => { + EncodedTerm::BigSmallLangStringLiteral { value_id, language } + } + (SmallStringOrId::Big(value_id), SmallStringOrId::Big(language_id)) => { + EncodedTerm::BigBigLangStringLiteral { + value_id, + language_id, + } + } + } +} + +fn build_plain_literal( + dataset: &DatasetView, + value: &str, + language: Option, +) -> EncodedTerm { + if let Some(language_id) = language { + build_lang_string_literal(dataset, value, language_id) + } else { + build_string_literal(dataset, value) + } +} + +fn build_string_id(dataset: &DatasetView, value: &str) -> SmallStringOrId { + if let Ok(value) = SmallString::try_from(value) { + value.into() + } else { + let id = StrHash::new(value); + dataset.insert_str(&id, value); + SmallStringOrId::Big(id) + } +} + +fn build_language_id(dataset: &DatasetView, value: &EncodedTerm) -> Option { + let mut language = to_simple_string(dataset, value)?; + language.make_ascii_lowercase(); + Some(build_string_id( + dataset, + LanguageTag::parse(language).ok()?.as_str(), + )) +} + +fn to_argument_compatible_strings( + dataset: &DatasetView, + arg1: &EncodedTerm, + arg2: &EncodedTerm, +) -> Option<(String, String, Option)> { + let (value1, language1) = to_string_and_language(dataset, arg1)?; + let (value2, language2) = to_string_and_language(dataset, arg2)?; + (language2.is_none() || language1 == language2).then_some((value1, value2, language1)) +} + +fn compile_static_pattern_if_exists( + pattern: &Expression, + options: Option<&Expression>, +) -> Option { + let static_pattern = if let Expression::Literal(pattern) = pattern { + (pattern.datatype() == xsd::STRING).then(|| pattern.value()) + } else { + None + }; + let static_options = if let Some(options) = options { + if let Expression::Literal(options) = options { + (options.datatype() == xsd::STRING).then(|| Some(options.value())) + } else { + None + } + } else { + Some(None) + }; + if let (Some(static_pattern), Some(static_options)) = (static_pattern, static_options) { + compile_pattern(static_pattern, static_options) + } else { + None + } +} + +pub(super) fn compile_pattern(pattern: &str, flags: Option<&str>) -> Option { + let mut regex_builder = RegexBuilder::new(pattern); + regex_builder.size_limit(REGEX_SIZE_LIMIT); + if let Some(flags) = flags { + for flag in flags.chars() { + match flag { + 's' => { + regex_builder.dot_matches_new_line(true); + } + 'm' => { + regex_builder.multi_line(true); + } + 'i' => { + regex_builder.case_insensitive(true); + } + 'x' => { + regex_builder.ignore_whitespace(true); + } + _ => (), // TODO: implement q + } + } + } + regex_builder.build().ok() +} + +fn decode_bindings( + dataset: Rc, + iter: EncodedTuplesIterator, + variables: Arc<[Variable]>, +) -> QuerySolutionIter { + let tuple_size = variables.len(); + QuerySolutionIter::new( + variables, + Box::new(iter.map(move |values| { + let mut result = vec![None; tuple_size]; + for (i, value) in values?.iter().enumerate() { + if let Some(term) = value { + result[i] = Some(dataset.decode_term(&term)?) + } + } + Ok(result) + })), + ) +} + +// this is used to encode results from a BindingIterator into an EncodedTuplesIterator. This happens when SERVICE clauses are evaluated +fn encode_bindings( + dataset: Rc, + variables: Rc<[Variable]>, + iter: QuerySolutionIter, +) -> EncodedTuplesIterator { + Box::new(iter.map(move |solution| { + let mut encoded_terms = EncodedTuple::with_capacity(variables.len()); + for (variable, term) in solution?.iter() { + put_variable_value( + variable, + &variables, + dataset.encode_term(term), + &mut encoded_terms, + ) + } + Ok(encoded_terms) + })) +} + +fn equals(a: &EncodedTerm, b: &EncodedTerm) -> Option { + match a { + EncodedTerm::DefaultGraph + | EncodedTerm::NamedNode { .. } + | EncodedTerm::NumericalBlankNode { .. } + | EncodedTerm::SmallBlankNode { .. } + | EncodedTerm::BigBlankNode { .. } + | EncodedTerm::SmallSmallLangStringLiteral { .. } + | EncodedTerm::SmallBigLangStringLiteral { .. } + | EncodedTerm::BigSmallLangStringLiteral { .. } + | EncodedTerm::BigBigLangStringLiteral { .. } => Some(a == b), + EncodedTerm::SmallStringLiteral(a) => match b { + EncodedTerm::SmallStringLiteral(b) => Some(a == b), + EncodedTerm::SmallTypedLiteral { .. } | EncodedTerm::BigTypedLiteral { .. } => None, + _ => Some(false), + }, + EncodedTerm::BigStringLiteral { value_id: a } => match b { + EncodedTerm::BigStringLiteral { value_id: b } => Some(a == b), + EncodedTerm::SmallTypedLiteral { .. } | EncodedTerm::BigTypedLiteral { .. } => None, + _ => Some(false), + }, + EncodedTerm::SmallTypedLiteral { .. } => match b { + EncodedTerm::SmallTypedLiteral { .. } if a == b => Some(true), + EncodedTerm::NamedNode { .. } + | EncodedTerm::NumericalBlankNode { .. } + | EncodedTerm::SmallBlankNode { .. } + | EncodedTerm::BigBlankNode { .. } + | EncodedTerm::SmallSmallLangStringLiteral { .. } + | EncodedTerm::SmallBigLangStringLiteral { .. } + | EncodedTerm::BigSmallLangStringLiteral { .. } + | EncodedTerm::BigBigLangStringLiteral { .. } + | EncodedTerm::BigTypedLiteral { .. } => Some(false), + _ => None, + }, + EncodedTerm::BigTypedLiteral { .. } => match b { + EncodedTerm::BigTypedLiteral { .. } if a == b => Some(true), + EncodedTerm::NamedNode { .. } + | EncodedTerm::NumericalBlankNode { .. } + | EncodedTerm::SmallBlankNode { .. } + | EncodedTerm::BigBlankNode { .. } + | EncodedTerm::SmallSmallLangStringLiteral { .. } + | EncodedTerm::SmallBigLangStringLiteral { .. } + | EncodedTerm::BigSmallLangStringLiteral { .. } + | EncodedTerm::BigBigLangStringLiteral { .. } + | EncodedTerm::SmallTypedLiteral { .. } => Some(false), + _ => None, + }, + EncodedTerm::BooleanLiteral(a) => match b { + EncodedTerm::BooleanLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::FloatLiteral(a) => match b { + EncodedTerm::FloatLiteral(b) => Some(a == b), + EncodedTerm::DoubleLiteral(b) => Some(Double::from(*a) == *b), + EncodedTerm::IntegerLiteral(b) => Some(*a == (*b).into()), + EncodedTerm::DecimalLiteral(b) => Some(*a == (*b).into()), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::DoubleLiteral(a) => match b { + EncodedTerm::FloatLiteral(b) => Some(*a == (*b).into()), + EncodedTerm::DoubleLiteral(b) => Some(a == b), + EncodedTerm::IntegerLiteral(b) => Some(*a == (*b).into()), + EncodedTerm::DecimalLiteral(b) => Some(*a == (*b).into()), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::IntegerLiteral(a) => match b { + EncodedTerm::FloatLiteral(b) => Some(Float::from(*a) == *b), + EncodedTerm::DoubleLiteral(b) => Some(Double::from(*a) == *b), + EncodedTerm::IntegerLiteral(b) => Some(a == b), + EncodedTerm::DecimalLiteral(b) => Some(Decimal::from(*a) == *b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::DecimalLiteral(a) => match b { + EncodedTerm::FloatLiteral(b) => Some(Float::from(*a) == *b), + EncodedTerm::DoubleLiteral(b) => Some(Double::from(*a) == *b), + EncodedTerm::IntegerLiteral(b) => Some(*a == (*b).into()), + EncodedTerm::DecimalLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::DateTimeLiteral(a) => match b { + EncodedTerm::DateTimeLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::TimeLiteral(a) => match b { + EncodedTerm::TimeLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::DateLiteral(a) => match b { + EncodedTerm::DateLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::GYearMonthLiteral(a) => match b { + EncodedTerm::GYearMonthLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::GYearLiteral(a) => match b { + EncodedTerm::GYearLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::GMonthDayLiteral(a) => match b { + EncodedTerm::GMonthDayLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::GDayLiteral(a) => match b { + EncodedTerm::GDayLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::GMonthLiteral(a) => match b { + EncodedTerm::GMonthLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::DurationLiteral(a) => match b { + EncodedTerm::DurationLiteral(b) => Some(a == b), + EncodedTerm::YearMonthDurationLiteral(b) => Some(a == b), + EncodedTerm::DayTimeDurationLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::YearMonthDurationLiteral(a) => match b { + EncodedTerm::DurationLiteral(b) => Some(a == b), + EncodedTerm::YearMonthDurationLiteral(b) => Some(a == b), + EncodedTerm::DayTimeDurationLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::DayTimeDurationLiteral(a) => match b { + EncodedTerm::DurationLiteral(b) => Some(a == b), + EncodedTerm::YearMonthDurationLiteral(b) => Some(a == b), + EncodedTerm::DayTimeDurationLiteral(b) => Some(a == b), + _ if b.is_unknown_typed_literal() => None, + _ => Some(false), + }, + EncodedTerm::Triple(a) => { + if let EncodedTerm::Triple(b) = b { + Some( + equals(&a.subject, &b.subject)? + && equals(&a.predicate, &b.predicate)? + && equals(&a.object, &b.object)?, + ) + } else { + Some(false) + } + } + } +} + +fn cmp_terms(dataset: &DatasetView, a: Option<&EncodedTerm>, b: Option<&EncodedTerm>) -> Ordering { + match (a, b) { + (Some(a), Some(b)) => match a { + EncodedTerm::SmallBlankNode(a) => match b { + EncodedTerm::SmallBlankNode(b) => a.cmp(b), + EncodedTerm::BigBlankNode { id_id: b } => { + compare_str_str_id(dataset, a, b).unwrap_or(Ordering::Equal) + } + EncodedTerm::NumericalBlankNode { id: b } => { + a.as_str().cmp(BlankNode::new_from_unique_id(*b).as_str()) + } + _ => Ordering::Less, + }, + EncodedTerm::BigBlankNode { id_id: a } => match b { + EncodedTerm::SmallBlankNode(b) => { + compare_str_id_str(dataset, a, b).unwrap_or(Ordering::Equal) + } + EncodedTerm::BigBlankNode { id_id: b } => { + compare_str_ids(dataset, a, b).unwrap_or(Ordering::Equal) + } + EncodedTerm::NumericalBlankNode { id: b } => { + compare_str_id_str(dataset, a, BlankNode::new_from_unique_id(*b).as_str()) + .unwrap_or(Ordering::Equal) + } + _ => Ordering::Less, + }, + EncodedTerm::NumericalBlankNode { id: a } => { + let a = BlankNode::new_from_unique_id(*a); + match b { + EncodedTerm::SmallBlankNode(b) => a.as_str().cmp(b), + EncodedTerm::BigBlankNode { id_id: b } => { + compare_str_str_id(dataset, a.as_str(), b).unwrap_or(Ordering::Equal) + } + EncodedTerm::NumericalBlankNode { id: b } => { + a.as_str().cmp(BlankNode::new_from_unique_id(*b).as_str()) + } + _ => Ordering::Less, + } + } + EncodedTerm::NamedNode { iri_id: a } => match b { + EncodedTerm::NamedNode { iri_id: b } => { + compare_str_ids(dataset, a, b).unwrap_or(Ordering::Equal) + } + _ if b.is_blank_node() => Ordering::Greater, + _ => Ordering::Less, + }, + EncodedTerm::Triple(a) => match b { + EncodedTerm::Triple(b) => { + match cmp_terms(dataset, Some(&a.subject), Some(&b.subject)) { + Ordering::Equal => { + match cmp_terms(dataset, Some(&a.predicate), Some(&b.predicate)) { + Ordering::Equal => { + cmp_terms(dataset, Some(&a.object), Some(&b.object)) + } + o => o, + } + } + o => o, + } + } + _ => Ordering::Greater, + }, + _ => match b { + _ if b.is_named_node() || b.is_blank_node() => Ordering::Greater, + _ if b.is_triple() => Ordering::Less, + _ => { + if let Some(ord) = partial_cmp_literals(dataset, a, b) { + ord + } else if let (Ok(Term::Literal(a)), Ok(Term::Literal(b))) = + (dataset.decode_term(a), dataset.decode_term(b)) + { + (a.value(), a.datatype(), a.language()).cmp(&( + b.value(), + b.datatype(), + b.language(), + )) + } else { + Ordering::Equal // Should never happen + } + } + }, + }, + (Some(_), None) => Ordering::Greater, + (None, Some(_)) => Ordering::Less, + (None, None) => Ordering::Equal, + } +} + +fn partial_cmp(dataset: &DatasetView, a: &EncodedTerm, b: &EncodedTerm) -> Option { + if a == b { + Some(Ordering::Equal) + } else if let EncodedTerm::Triple(a) = a { + if let EncodedTerm::Triple(b) = b { + match partial_cmp(dataset, &a.subject, &b.subject) { + Some(Ordering::Equal) => match partial_cmp(dataset, &a.predicate, &b.predicate) { + Some(Ordering::Equal) => partial_cmp(dataset, &a.object, &b.object), + o => o, + }, + o => o, + } + } else { + None + } + } else { + partial_cmp_literals(dataset, a, b) + } +} + +fn partial_cmp_literals( + dataset: &DatasetView, + a: &EncodedTerm, + b: &EncodedTerm, +) -> Option { + match a { + EncodedTerm::SmallStringLiteral(a) => match b { + EncodedTerm::SmallStringLiteral(b) => a.partial_cmp(b), + EncodedTerm::BigStringLiteral { value_id: b } => compare_str_str_id(dataset, a, b), + _ => None, + }, + EncodedTerm::BigStringLiteral { value_id: a } => match b { + EncodedTerm::SmallStringLiteral(b) => compare_str_id_str(dataset, a, b), + EncodedTerm::BigStringLiteral { value_id: b } => compare_str_ids(dataset, a, b), + _ => None, + }, + EncodedTerm::SmallSmallLangStringLiteral { + value: a, + language: la, + } => match b { + EncodedTerm::SmallSmallLangStringLiteral { + value: b, + language: lb, + } if la == lb => a.partial_cmp(b), + EncodedTerm::BigSmallLangStringLiteral { + value_id: b, + language: lb, + } if la == lb => compare_str_str_id(dataset, a, b), + _ => None, + }, + EncodedTerm::SmallBigLangStringLiteral { + value: a, + language_id: la, + } => match b { + EncodedTerm::SmallBigLangStringLiteral { + value: b, + language_id: lb, + } if la == lb => a.partial_cmp(b), + EncodedTerm::BigBigLangStringLiteral { + value_id: b, + language_id: lb, + } if la == lb => compare_str_str_id(dataset, a, b), + _ => None, + }, + EncodedTerm::BigSmallLangStringLiteral { + value_id: a, + language: la, + } => match b { + EncodedTerm::SmallSmallLangStringLiteral { + value: b, + language: lb, + } if la == lb => compare_str_id_str(dataset, a, b), + EncodedTerm::BigSmallLangStringLiteral { + value_id: b, + language: lb, + } if la == lb => compare_str_ids(dataset, a, b), + _ => None, + }, + EncodedTerm::BigBigLangStringLiteral { + value_id: a, + language_id: la, + } => match b { + EncodedTerm::SmallBigLangStringLiteral { + value: b, + language_id: lb, + } if la == lb => compare_str_id_str(dataset, a, b), + EncodedTerm::BigBigLangStringLiteral { + value_id: b, + language_id: lb, + } if la == lb => compare_str_ids(dataset, a, b), + _ => None, + }, + EncodedTerm::FloatLiteral(a) => match b { + EncodedTerm::FloatLiteral(b) => a.partial_cmp(b), + EncodedTerm::DoubleLiteral(b) => Double::from(*a).partial_cmp(b), + EncodedTerm::IntegerLiteral(b) => a.partial_cmp(&Float::from(*b)), + EncodedTerm::DecimalLiteral(b) => a.partial_cmp(&(*b).into()), + _ => None, + }, + EncodedTerm::DoubleLiteral(a) => match b { + EncodedTerm::FloatLiteral(b) => a.partial_cmp(&(*b).into()), + EncodedTerm::DoubleLiteral(b) => a.partial_cmp(b), + EncodedTerm::IntegerLiteral(b) => a.partial_cmp(&Double::from(*b)), + EncodedTerm::DecimalLiteral(b) => a.partial_cmp(&(*b).into()), + _ => None, + }, + EncodedTerm::IntegerLiteral(a) => match b { + EncodedTerm::FloatLiteral(b) => Float::from(*a).partial_cmp(b), + EncodedTerm::DoubleLiteral(b) => Double::from(*a).partial_cmp(b), + EncodedTerm::IntegerLiteral(b) => a.partial_cmp(b), + EncodedTerm::DecimalLiteral(b) => Decimal::from(*a).partial_cmp(b), + _ => None, + }, + EncodedTerm::DecimalLiteral(a) => match b { + EncodedTerm::FloatLiteral(b) => Float::from(*a).partial_cmp(b), + EncodedTerm::DoubleLiteral(b) => Double::from(*a).partial_cmp(b), + EncodedTerm::IntegerLiteral(b) => a.partial_cmp(&Decimal::from(*b)), + EncodedTerm::DecimalLiteral(b) => a.partial_cmp(b), + _ => None, + }, + EncodedTerm::DateTimeLiteral(a) => { + if let EncodedTerm::DateTimeLiteral(b) = b { + a.partial_cmp(b) + } else { + None + } + } + EncodedTerm::TimeLiteral(a) => { + if let EncodedTerm::TimeLiteral(b) = b { + a.partial_cmp(b) + } else { + None + } + } + EncodedTerm::DateLiteral(a) => { + if let EncodedTerm::DateLiteral(b) = b { + a.partial_cmp(b) + } else { + None + } + } + EncodedTerm::GYearMonthLiteral(a) => { + if let EncodedTerm::GYearMonthLiteral(b) = b { + a.partial_cmp(b) + } else { + None + } + } + EncodedTerm::GYearLiteral(a) => { + if let EncodedTerm::GYearLiteral(b) = b { + a.partial_cmp(b) + } else { + None + } + } + EncodedTerm::GMonthDayLiteral(a) => { + if let EncodedTerm::GMonthDayLiteral(b) = b { + a.partial_cmp(b) + } else { + None + } + } + EncodedTerm::GDayLiteral(a) => { + if let EncodedTerm::GDayLiteral(b) = b { + a.partial_cmp(b) + } else { + None + } + } + EncodedTerm::GMonthLiteral(a) => { + if let EncodedTerm::GMonthLiteral(b) = b { + a.partial_cmp(b) + } else { + None + } + } + EncodedTerm::DurationLiteral(a) => match b { + EncodedTerm::DurationLiteral(b) => a.partial_cmp(b), + EncodedTerm::YearMonthDurationLiteral(b) => a.partial_cmp(b), + EncodedTerm::DayTimeDurationLiteral(b) => a.partial_cmp(b), + _ => None, + }, + EncodedTerm::YearMonthDurationLiteral(a) => match b { + EncodedTerm::DurationLiteral(b) => a.partial_cmp(b), + EncodedTerm::YearMonthDurationLiteral(b) => a.partial_cmp(b), + EncodedTerm::DayTimeDurationLiteral(b) => a.partial_cmp(b), + _ => None, + }, + EncodedTerm::DayTimeDurationLiteral(a) => match b { + EncodedTerm::DurationLiteral(b) => a.partial_cmp(b), + EncodedTerm::YearMonthDurationLiteral(b) => a.partial_cmp(b), + EncodedTerm::DayTimeDurationLiteral(b) => a.partial_cmp(b), + _ => None, + }, + _ => None, + } +} + +fn compare_str_ids(dataset: &DatasetView, a: &StrHash, b: &StrHash) -> Option { + Some(dataset.get_str(a).ok()??.cmp(&dataset.get_str(b).ok()??)) +} + +fn compare_str_id_str(dataset: &DatasetView, a: &StrHash, b: &str) -> Option { + Some(dataset.get_str(a).ok()??.as_str().cmp(b)) +} + +fn compare_str_str_id(dataset: &DatasetView, a: &str, b: &StrHash) -> Option { + Some(a.cmp(dataset.get_str(b).ok()??.as_str())) +} + +fn datatype(dataset: &DatasetView, value: &EncodedTerm) -> Option { + // TODO: optimize? + match value { + EncodedTerm::NamedNode { .. } + | EncodedTerm::SmallBlankNode { .. } + | EncodedTerm::BigBlankNode { .. } + | EncodedTerm::NumericalBlankNode { .. } + | EncodedTerm::DefaultGraph + | EncodedTerm::Triple(_) => None, + EncodedTerm::SmallStringLiteral(_) | EncodedTerm::BigStringLiteral { .. } => { + Some(encode_named_node(dataset, xsd::STRING)) + } + EncodedTerm::SmallSmallLangStringLiteral { .. } + | EncodedTerm::SmallBigLangStringLiteral { .. } + | EncodedTerm::BigSmallLangStringLiteral { .. } + | EncodedTerm::BigBigLangStringLiteral { .. } => { + Some(encode_named_node(dataset, rdf::LANG_STRING)) + } + EncodedTerm::SmallTypedLiteral { datatype_id, .. } + | EncodedTerm::BigTypedLiteral { datatype_id, .. } => Some(EncodedTerm::NamedNode { + iri_id: *datatype_id, + }), + EncodedTerm::BooleanLiteral(..) => Some(encode_named_node(dataset, xsd::BOOLEAN)), + EncodedTerm::FloatLiteral(..) => Some(encode_named_node(dataset, xsd::FLOAT)), + EncodedTerm::DoubleLiteral(..) => Some(encode_named_node(dataset, xsd::DOUBLE)), + EncodedTerm::IntegerLiteral(..) => Some(encode_named_node(dataset, xsd::INTEGER)), + EncodedTerm::DecimalLiteral(..) => Some(encode_named_node(dataset, xsd::DECIMAL)), + EncodedTerm::DateTimeLiteral(..) => Some(encode_named_node(dataset, xsd::DATE_TIME)), + EncodedTerm::TimeLiteral(..) => Some(encode_named_node(dataset, xsd::TIME)), + EncodedTerm::DateLiteral(..) => Some(encode_named_node(dataset, xsd::DATE)), + EncodedTerm::GYearMonthLiteral(..) => Some(encode_named_node(dataset, xsd::G_YEAR_MONTH)), + EncodedTerm::GYearLiteral(..) => Some(encode_named_node(dataset, xsd::G_YEAR)), + EncodedTerm::GMonthDayLiteral(..) => Some(encode_named_node(dataset, xsd::G_MONTH_DAY)), + EncodedTerm::GDayLiteral(..) => Some(encode_named_node(dataset, xsd::G_DAY)), + EncodedTerm::GMonthLiteral(..) => Some(encode_named_node(dataset, xsd::G_MONTH)), + EncodedTerm::DurationLiteral(..) => Some(encode_named_node(dataset, xsd::DURATION)), + EncodedTerm::YearMonthDurationLiteral(..) => { + Some(encode_named_node(dataset, xsd::YEAR_MONTH_DURATION)) + } + EncodedTerm::DayTimeDurationLiteral(..) => { + Some(encode_named_node(dataset, xsd::DAY_TIME_DURATION)) + } + } +} + +enum NumericBinaryOperands { + Float(Float, Float), + Double(Double, Double), + Integer(Integer, Integer), + Decimal(Decimal, Decimal), + Duration(Duration, Duration), + YearMonthDuration(YearMonthDuration, YearMonthDuration), + DayTimeDuration(DayTimeDuration, DayTimeDuration), + DateTime(DateTime, DateTime), + Time(Time, Time), + Date(Date, Date), + DateTimeDuration(DateTime, Duration), + DateTimeYearMonthDuration(DateTime, YearMonthDuration), + DateTimeDayTimeDuration(DateTime, DayTimeDuration), + DateDuration(Date, Duration), + DateYearMonthDuration(Date, YearMonthDuration), + DateDayTimeDuration(Date, DayTimeDuration), + TimeDuration(Time, Duration), + TimeDayTimeDuration(Time, DayTimeDuration), +} + +impl NumericBinaryOperands { + fn new(a: EncodedTerm, b: EncodedTerm) -> Option { + match (a, b) { + (EncodedTerm::FloatLiteral(v1), EncodedTerm::FloatLiteral(v2)) => { + Some(Self::Float(v1, v2)) + } + (EncodedTerm::FloatLiteral(v1), EncodedTerm::DoubleLiteral(v2)) => { + Some(Self::Double(v1.into(), v2)) + } + (EncodedTerm::FloatLiteral(v1), EncodedTerm::IntegerLiteral(v2)) => { + Some(Self::Float(v1, v2.into())) + } + (EncodedTerm::FloatLiteral(v1), EncodedTerm::DecimalLiteral(v2)) => { + Some(Self::Float(v1, v2.into())) + } + (EncodedTerm::DoubleLiteral(v1), EncodedTerm::FloatLiteral(v2)) => { + Some(Self::Double(v1, v2.into())) + } + (EncodedTerm::DoubleLiteral(v1), EncodedTerm::DoubleLiteral(v2)) => { + Some(Self::Double(v1, v2)) + } + (EncodedTerm::DoubleLiteral(v1), EncodedTerm::IntegerLiteral(v2)) => { + Some(Self::Double(v1, v2.into())) + } + (EncodedTerm::DoubleLiteral(v1), EncodedTerm::DecimalLiteral(v2)) => { + Some(Self::Double(v1, v2.into())) + } + (EncodedTerm::IntegerLiteral(v1), EncodedTerm::FloatLiteral(v2)) => { + Some(Self::Float(v1.into(), v2)) + } + (EncodedTerm::IntegerLiteral(v1), EncodedTerm::DoubleLiteral(v2)) => { + Some(Self::Double(v1.into(), v2)) + } + (EncodedTerm::IntegerLiteral(v1), EncodedTerm::IntegerLiteral(v2)) => { + Some(Self::Integer(v1, v2)) + } + (EncodedTerm::IntegerLiteral(v1), EncodedTerm::DecimalLiteral(v2)) => { + Some(Self::Decimal(v1.into(), v2)) + } + (EncodedTerm::DecimalLiteral(v1), EncodedTerm::FloatLiteral(v2)) => { + Some(Self::Float(v1.into(), v2)) + } + (EncodedTerm::DecimalLiteral(v1), EncodedTerm::DoubleLiteral(v2)) => { + Some(Self::Double(v1.into(), v2)) + } + (EncodedTerm::DecimalLiteral(v1), EncodedTerm::IntegerLiteral(v2)) => { + Some(Self::Decimal(v1, v2.into())) + } + (EncodedTerm::DecimalLiteral(v1), EncodedTerm::DecimalLiteral(v2)) => { + Some(Self::Decimal(v1, v2)) + } + (EncodedTerm::DurationLiteral(v1), EncodedTerm::DurationLiteral(v2)) => { + Some(Self::Duration(v1, v2)) + } + (EncodedTerm::DurationLiteral(v1), EncodedTerm::YearMonthDurationLiteral(v2)) => { + Some(Self::Duration(v1, v2.into())) + } + (EncodedTerm::DurationLiteral(v1), EncodedTerm::DayTimeDurationLiteral(v2)) => { + Some(Self::Duration(v1, v2.into())) + } + (EncodedTerm::YearMonthDurationLiteral(v1), EncodedTerm::DurationLiteral(v2)) => { + Some(Self::Duration(v1.into(), v2)) + } + ( + EncodedTerm::YearMonthDurationLiteral(v1), + EncodedTerm::YearMonthDurationLiteral(v2), + ) => Some(Self::YearMonthDuration(v1, v2)), + ( + EncodedTerm::YearMonthDurationLiteral(v1), + EncodedTerm::DayTimeDurationLiteral(v2), + ) => Some(Self::Duration(v1.into(), v2.into())), + (EncodedTerm::DayTimeDurationLiteral(v1), EncodedTerm::DurationLiteral(v2)) => { + Some(Self::Duration(v1.into(), v2)) + } + ( + EncodedTerm::DayTimeDurationLiteral(v1), + EncodedTerm::YearMonthDurationLiteral(v2), + ) => Some(Self::Duration(v1.into(), v2.into())), + (EncodedTerm::DayTimeDurationLiteral(v1), EncodedTerm::DayTimeDurationLiteral(v2)) => { + Some(Self::DayTimeDuration(v1, v2)) + } + (EncodedTerm::DateTimeLiteral(v1), EncodedTerm::DateTimeLiteral(v2)) => { + Some(Self::DateTime(v1, v2)) + } + (EncodedTerm::DateLiteral(v1), EncodedTerm::DateLiteral(v2)) => { + Some(Self::Date(v1, v2)) + } + (EncodedTerm::TimeLiteral(v1), EncodedTerm::TimeLiteral(v2)) => { + Some(Self::Time(v1, v2)) + } + (EncodedTerm::DateTimeLiteral(v1), EncodedTerm::DurationLiteral(v2)) => { + Some(Self::DateTimeDuration(v1, v2)) + } + (EncodedTerm::DateTimeLiteral(v1), EncodedTerm::YearMonthDurationLiteral(v2)) => { + Some(Self::DateTimeYearMonthDuration(v1, v2)) + } + (EncodedTerm::DateTimeLiteral(v1), EncodedTerm::DayTimeDurationLiteral(v2)) => { + Some(Self::DateTimeDayTimeDuration(v1, v2)) + } + (EncodedTerm::DateLiteral(v1), EncodedTerm::DurationLiteral(v2)) => { + Some(Self::DateDuration(v1, v2)) + } + (EncodedTerm::DateLiteral(v1), EncodedTerm::YearMonthDurationLiteral(v2)) => { + Some(Self::DateYearMonthDuration(v1, v2)) + } + (EncodedTerm::DateLiteral(v1), EncodedTerm::DayTimeDurationLiteral(v2)) => { + Some(Self::DateDayTimeDuration(v1, v2)) + } + (EncodedTerm::TimeLiteral(v1), EncodedTerm::DurationLiteral(v2)) => { + Some(Self::TimeDuration(v1, v2)) + } + (EncodedTerm::TimeLiteral(v1), EncodedTerm::DayTimeDurationLiteral(v2)) => { + Some(Self::TimeDayTimeDuration(v1, v2)) + } + _ => None, + } + } +} + +#[derive(Clone)] +enum TupleSelector { + Constant(EncodedTerm), + Variable(usize), + TriplePattern(Rc), +} + +impl TupleSelector { + fn from_ground_term_pattern( + term_pattern: &GroundTermPattern, + variables: &mut Vec, + dataset: &DatasetView, + ) -> Self { + match term_pattern { + GroundTermPattern::Variable(variable) => { + Self::Variable(encode_variable(variables, variable)) + } + GroundTermPattern::NamedNode(term) => Self::Constant(dataset.encode_term(term)), + GroundTermPattern::Literal(term) => Self::Constant(dataset.encode_term(term)), + GroundTermPattern::Triple(triple) => { + match ( + Self::from_ground_term_pattern(&triple.subject, variables, dataset), + Self::from_named_node_pattern(&triple.predicate, variables, dataset), + Self::from_ground_term_pattern(&triple.object, variables, dataset), + ) { + ( + Self::Constant(subject), + Self::Constant(predicate), + Self::Constant(object), + ) => Self::Constant( + EncodedTriple { + subject, + predicate, + object, + } + .into(), + ), + (subject, predicate, object) => { + Self::TriplePattern(Rc::new(TripleTupleSelector { + subject, + predicate, + object, + })) + } + } + } + } + } + + fn from_named_node_pattern( + named_node_pattern: &NamedNodePattern, + variables: &mut Vec, + dataset: &DatasetView, + ) -> Self { + match named_node_pattern { + NamedNodePattern::Variable(variable) => { + Self::Variable(encode_variable(variables, variable)) + } + NamedNodePattern::NamedNode(term) => Self::Constant(dataset.encode_term(term)), + } + } + + fn from_graph_name_pattern( + graph_name_pattern: &Option, + variables: &mut Vec, + dataset: &DatasetView, + ) -> Self { + if let Some(graph_name_pattern) = graph_name_pattern { + Self::from_named_node_pattern(graph_name_pattern, variables, dataset) + } else { + Self::Constant(EncodedTerm::DefaultGraph) + } + } + + fn get_pattern_value(&self, tuple: &EncodedTuple) -> Option { + match self { + Self::Constant(c) => Some(c.clone()), + Self::Variable(v) => tuple.get(*v).cloned(), + Self::TriplePattern(triple) => Some( + EncodedTriple { + subject: triple.subject.get_pattern_value(tuple)?, + predicate: triple.predicate.get_pattern_value(tuple)?, + object: triple.object.get_pattern_value(tuple)?, + } + .into(), + ), + } + } +} + +struct TripleTupleSelector { + subject: TupleSelector, + predicate: TupleSelector, + object: TupleSelector, +} + +fn put_pattern_value( + selector: &TupleSelector, + value: EncodedTerm, + tuple: &mut EncodedTuple, +) -> Option<()> { + match selector { + TupleSelector::Constant(c) => (*c == value).then_some(()), + TupleSelector::Variable(v) => { + if let Some(old) = tuple.get(*v) { + (value == *old).then_some(()) + } else { + tuple.set(*v, value); + Some(()) + } + } + TupleSelector::TriplePattern(triple) => { + if let EncodedTerm::Triple(value) = value { + put_pattern_value(&triple.subject, value.subject.clone(), tuple)?; + put_pattern_value(&triple.predicate, value.predicate.clone(), tuple)?; + put_pattern_value(&triple.object, value.object.clone(), tuple) + } else { + None + } + } + } +} + +fn put_variable_value( + selector: &Variable, + variables: &[Variable], + value: EncodedTerm, + tuple: &mut EncodedTuple, +) { + for (i, v) in variables.iter().enumerate() { + if selector == v { + tuple.set(i, value); + break; + } + } +} + +pub fn are_compatible_and_not_disjointed(a: &EncodedTuple, b: &EncodedTuple) -> bool { + let mut found_intersection = false; + for (a_value, b_value) in a.iter().zip(b.iter()) { + if let (Some(a_value), Some(b_value)) = (a_value, b_value) { + if a_value != b_value { + return false; + } + found_intersection = true; + } + } + found_intersection +} + +pub enum PropertyPath { + Path(EncodedTerm), + Reverse(Rc), + Sequence(Rc, Rc), + Alternative(Rc, Rc), + ZeroOrMore(Rc), + OneOrMore(Rc), + ZeroOrOne(Rc), + NegatedPropertySet(Rc<[EncodedTerm]>), +} + +#[derive(Clone)] +struct PathEvaluator { + dataset: Rc, +} + +impl PathEvaluator { + fn eval_closed_in_graph( + &self, + path: &PropertyPath, + start: &EncodedTerm, + end: &EncodedTerm, + graph_name: &EncodedTerm, + ) -> Result { + Ok(match path { + PropertyPath::Path(p) => self + .dataset + .encoded_quads_for_pattern(Some(start), Some(p), Some(end), Some(graph_name)) + .next() + .transpose()? + .is_some(), + PropertyPath::Reverse(p) => self.eval_closed_in_graph(p, end, start, graph_name)?, + PropertyPath::Sequence(a, b) => self + .eval_from_in_graph(a, start, graph_name) + .find_map(|middle| { + middle + .and_then(|middle| { + Ok(self + .eval_closed_in_graph(b, &middle, end, graph_name)? + .then_some(())) + }) + .transpose() + }) + .transpose()? + .is_some(), + PropertyPath::Alternative(a, b) => { + self.eval_closed_in_graph(a, start, end, graph_name)? + || self.eval_closed_in_graph(b, start, end, graph_name)? + } + PropertyPath::ZeroOrMore(p) => { + if start == end { + self.is_subject_or_object_in_graph(start, graph_name)? + } else { + look_in_transitive_closure( + self.eval_from_in_graph(p, start, graph_name), + move |e| self.eval_from_in_graph(p, &e, graph_name), + end, + )? + } + } + PropertyPath::OneOrMore(p) => look_in_transitive_closure( + self.eval_from_in_graph(p, start, graph_name), + move |e| self.eval_from_in_graph(p, &e, graph_name), + end, + )?, + PropertyPath::ZeroOrOne(p) => { + if start == end { + self.is_subject_or_object_in_graph(start, graph_name) + } else { + self.eval_closed_in_graph(p, start, end, graph_name) + }? + } + PropertyPath::NegatedPropertySet(ps) => self + .dataset + .encoded_quads_for_pattern(Some(start), None, Some(end), Some(graph_name)) + .find_map(move |t| match t { + Ok(t) => { + if ps.iter().any(|p| *p == t.predicate) { + None + } else { + Some(Ok(())) + } + } + Err(e) => Some(Err(e)), + }) + .transpose()? + .is_some(), + }) + } + + fn eval_closed_in_unknown_graph( + &self, + path: &PropertyPath, + start: &EncodedTerm, + end: &EncodedTerm, + ) -> Box>> { + match path { + PropertyPath::Path(p) => Box::new( + self.dataset + .encoded_quads_for_pattern(Some(start), Some(p), Some(end), None) + .map(|t| Ok(t?.graph_name)), + ), + PropertyPath::Reverse(p) => self.eval_closed_in_unknown_graph(p, end, start), + PropertyPath::Sequence(a, b) => { + let eval = self.clone(); + let b = Rc::clone(b); + let end = end.clone(); + Box::new(self.eval_from_in_unknown_graph(a, start).flat_map_ok( + move |(middle, graph_name)| { + eval.eval_closed_in_graph(&b, &middle, &end, &graph_name) + .map(|is_found| is_found.then_some(graph_name)) + .transpose() + }, + )) + } + PropertyPath::Alternative(a, b) => Box::new(hash_deduplicate( + self.eval_closed_in_unknown_graph(a, start, end) + .chain(self.eval_closed_in_unknown_graph(b, start, end)), + )), + PropertyPath::ZeroOrMore(p) => { + let eval = self.clone(); + let start2 = start.clone(); + let end = end.clone(); + let p = Rc::clone(p); + self.run_if_term_is_a_dataset_node(start, move |graph_name| { + look_in_transitive_closure( + Some(Ok(start2.clone())), + |e| eval.eval_from_in_graph(&p, &e, &graph_name), + &end, + ) + .map(|is_found| is_found.then_some(graph_name)) + .transpose() + }) + } + PropertyPath::OneOrMore(p) => { + let eval = self.clone(); + let end = end.clone(); + let p = Rc::clone(p); + Box::new( + self.eval_from_in_unknown_graph(&p, start) + .filter_map(move |r| { + r.and_then(|(start, graph_name)| { + look_in_transitive_closure( + Some(Ok(start)), + |e| eval.eval_from_in_graph(&p, &e, &graph_name), + &end, + ) + .map(|is_found| is_found.then_some(graph_name)) + }) + .transpose() + }), + ) + } + PropertyPath::ZeroOrOne(p) => { + if start == end { + self.run_if_term_is_a_dataset_node(start, |graph_name| Some(Ok(graph_name))) + } else { + let eval = self.clone(); + let start2 = start.clone(); + let end = end.clone(); + let p = Rc::clone(p); + self.run_if_term_is_a_dataset_node(start, move |graph_name| { + eval.eval_closed_in_graph(&p, &start2, &end, &graph_name) + .map(|is_found| is_found.then_some(graph_name)) + .transpose() + }) + } + } + PropertyPath::NegatedPropertySet(ps) => { + let ps = Rc::clone(ps); + Box::new( + self.dataset + .encoded_quads_for_pattern(Some(start), None, Some(end), None) + .filter_map(move |t| match t { + Ok(t) => { + if ps.iter().any(|p| *p == t.predicate) { + None + } else { + Some(Ok(t.graph_name)) + } + } + Err(e) => Some(Err(e)), + }), + ) + } + } + } + + fn eval_from_in_graph( + &self, + path: &PropertyPath, + start: &EncodedTerm, + graph_name: &EncodedTerm, + ) -> Box>> { + match path { + PropertyPath::Path(p) => Box::new( + self.dataset + .encoded_quads_for_pattern(Some(start), Some(p), None, Some(graph_name)) + .map(|t| Ok(t?.object)), + ), + PropertyPath::Reverse(p) => self.eval_to_in_graph(p, start, graph_name), + PropertyPath::Sequence(a, b) => { + let eval = self.clone(); + let b = Rc::clone(b); + let graph_name2 = graph_name.clone(); + Box::new( + self.eval_from_in_graph(a, start, graph_name) + .flat_map_ok(move |middle| { + eval.eval_from_in_graph(&b, &middle, &graph_name2) + }), + ) + } + PropertyPath::Alternative(a, b) => Box::new(hash_deduplicate( + self.eval_from_in_graph(a, start, graph_name) + .chain(self.eval_from_in_graph(b, start, graph_name)), + )), + PropertyPath::ZeroOrMore(p) => { + self.run_if_term_is_a_graph_node(start, graph_name, || { + let eval = self.clone(); + let p = Rc::clone(p); + let graph_name2 = graph_name.clone(); + transitive_closure(Some(Ok(start.clone())), move |e| { + eval.eval_from_in_graph(&p, &e, &graph_name2) + }) + }) + } + PropertyPath::OneOrMore(p) => { + let eval = self.clone(); + let p = Rc::clone(p); + let graph_name2 = graph_name.clone(); + Box::new(transitive_closure( + self.eval_from_in_graph(&p, start, graph_name), + move |e| eval.eval_from_in_graph(&p, &e, &graph_name2), + )) + } + PropertyPath::ZeroOrOne(p) => { + self.run_if_term_is_a_graph_node(start, graph_name, || { + hash_deduplicate( + once(Ok(start.clone())) + .chain(self.eval_from_in_graph(p, start, graph_name)), + ) + }) + } + PropertyPath::NegatedPropertySet(ps) => { + let ps = Rc::clone(ps); + Box::new( + self.dataset + .encoded_quads_for_pattern(Some(start), None, None, Some(graph_name)) + .filter_map(move |t| match t { + Ok(t) => { + if ps.iter().any(|p| *p == t.predicate) { + None + } else { + Some(Ok(t.object)) + } + } + Err(e) => Some(Err(e)), + }), + ) + } + } + } + + fn eval_from_in_unknown_graph( + &self, + path: &PropertyPath, + start: &EncodedTerm, + ) -> Box>> { + match path { + PropertyPath::Path(p) => Box::new( + self.dataset + .encoded_quads_for_pattern(Some(start), Some(p), None, None) + .map(|t| { + let t = t?; + Ok((t.object, t.graph_name)) + }), + ), + PropertyPath::Reverse(p) => self.eval_to_in_unknown_graph(p, start), + PropertyPath::Sequence(a, b) => { + let eval = self.clone(); + let b = Rc::clone(b); + Box::new(self.eval_from_in_unknown_graph(a, start).flat_map_ok( + move |(middle, graph_name)| { + eval.eval_from_in_graph(&b, &middle, &graph_name) + .map(move |end| Ok((end?, graph_name.clone()))) + }, + )) + } + PropertyPath::Alternative(a, b) => Box::new(hash_deduplicate( + self.eval_from_in_unknown_graph(a, start) + .chain(self.eval_from_in_unknown_graph(b, start)), + )), + PropertyPath::ZeroOrMore(p) => { + let start2 = start.clone(); + let eval = self.clone(); + let p = Rc::clone(p); + self.run_if_term_is_a_dataset_node(start, move |graph_name| { + let eval = eval.clone(); + let p = Rc::clone(&p); + let graph_name2 = graph_name.clone(); + transitive_closure(Some(Ok(start2.clone())), move |e| { + eval.eval_from_in_graph(&p, &e, &graph_name2) + }) + .map(move |e| Ok((e?, graph_name.clone()))) + }) + } + PropertyPath::OneOrMore(p) => { + let eval = self.clone(); + let p = Rc::clone(p); + Box::new(transitive_closure( + self.eval_from_in_unknown_graph(&p, start), + move |(e, graph_name)| { + eval.eval_from_in_graph(&p, &e, &graph_name) + .map(move |e| Ok((e?, graph_name.clone()))) + }, + )) + } + PropertyPath::ZeroOrOne(p) => { + let eval = self.clone(); + let start2 = start.clone(); + let p = Rc::clone(p); + self.run_if_term_is_a_dataset_node(start, move |graph_name| { + hash_deduplicate(once(Ok(start2.clone())).chain(eval.eval_from_in_graph( + &p, + &start2, + &graph_name, + ))) + .map(move |e| Ok((e?, graph_name.clone()))) + }) + } + PropertyPath::NegatedPropertySet(ps) => { + let ps = Rc::clone(ps); + Box::new( + self.dataset + .encoded_quads_for_pattern(Some(start), None, None, None) + .filter_map(move |t| match t { + Ok(t) => { + if ps.iter().any(|p| *p == t.predicate) { + None + } else { + Some(Ok((t.object, t.graph_name))) + } + } + Err(e) => Some(Err(e)), + }), + ) + } + } + } + + fn eval_to_in_graph( + &self, + path: &PropertyPath, + end: &EncodedTerm, + graph_name: &EncodedTerm, + ) -> Box>> { + match path { + PropertyPath::Path(p) => Box::new( + self.dataset + .encoded_quads_for_pattern(None, Some(p), Some(end), Some(graph_name)) + .map(|t| Ok(t?.subject)), + ), + PropertyPath::Reverse(p) => self.eval_from_in_graph(p, end, graph_name), + PropertyPath::Sequence(a, b) => { + let eval = self.clone(); + let a = Rc::clone(a); + let graph_name2 = graph_name.clone(); + Box::new( + self.eval_to_in_graph(b, end, graph_name) + .flat_map_ok(move |middle| { + eval.eval_to_in_graph(&a, &middle, &graph_name2) + }), + ) + } + PropertyPath::Alternative(a, b) => Box::new(hash_deduplicate( + self.eval_to_in_graph(a, end, graph_name) + .chain(self.eval_to_in_graph(b, end, graph_name)), + )), + PropertyPath::ZeroOrMore(p) => { + self.run_if_term_is_a_graph_node(end, graph_name, || { + let eval = self.clone(); + let p = Rc::clone(p); + let graph_name2 = graph_name.clone(); + transitive_closure(Some(Ok(end.clone())), move |e| { + eval.eval_to_in_graph(&p, &e, &graph_name2) + }) + }) + } + PropertyPath::OneOrMore(p) => { + let eval = self.clone(); + let p = Rc::clone(p); + let graph_name2 = graph_name.clone(); + Box::new(transitive_closure( + self.eval_to_in_graph(&p, end, graph_name), + move |e| eval.eval_to_in_graph(&p, &e, &graph_name2), + )) + } + PropertyPath::ZeroOrOne(p) => self.run_if_term_is_a_graph_node(end, graph_name, || { + hash_deduplicate( + once(Ok(end.clone())).chain(self.eval_to_in_graph(p, end, graph_name)), + ) + }), + PropertyPath::NegatedPropertySet(ps) => { + let ps = Rc::clone(ps); + Box::new( + self.dataset + .encoded_quads_for_pattern(None, None, Some(end), Some(graph_name)) + .filter_map(move |t| match t { + Ok(t) => { + if ps.iter().any(|p| *p == t.predicate) { + None + } else { + Some(Ok(t.subject)) + } + } + Err(e) => Some(Err(e)), + }), + ) + } + } + } + + fn eval_to_in_unknown_graph( + &self, + path: &PropertyPath, + end: &EncodedTerm, + ) -> Box>> { + match path { + PropertyPath::Path(p) => Box::new( + self.dataset + .encoded_quads_for_pattern(None, Some(p), Some(end), None) + .map(|t| { + let t = t?; + Ok((t.subject, t.graph_name)) + }), + ), + PropertyPath::Reverse(p) => self.eval_from_in_unknown_graph(p, end), + PropertyPath::Sequence(a, b) => { + let eval = self.clone(); + let a = Rc::clone(a); + Box::new(self.eval_to_in_unknown_graph(b, end).flat_map_ok( + move |(middle, graph_name)| { + eval.eval_from_in_graph(&a, &middle, &graph_name) + .map(move |start| Ok((start?, graph_name.clone()))) + }, + )) + } + PropertyPath::Alternative(a, b) => Box::new(hash_deduplicate( + self.eval_to_in_unknown_graph(a, end) + .chain(self.eval_to_in_unknown_graph(b, end)), + )), + PropertyPath::ZeroOrMore(p) => { + let end2 = end.clone(); + let eval = self.clone(); + let p = Rc::clone(p); + self.run_if_term_is_a_dataset_node(end, move |graph_name| { + let eval = eval.clone(); + let p = Rc::clone(&p); + let graph_name2 = graph_name.clone(); + transitive_closure(Some(Ok(end2.clone())), move |e| { + eval.eval_to_in_graph(&p, &e, &graph_name2) + }) + .map(move |e| Ok((e?, graph_name.clone()))) + }) + } + PropertyPath::OneOrMore(p) => { + let eval = self.clone(); + let p = Rc::clone(p); + Box::new(transitive_closure( + self.eval_to_in_unknown_graph(&p, end), + move |(e, graph_name)| { + eval.eval_to_in_graph(&p, &e, &graph_name) + .map(move |e| Ok((e?, graph_name.clone()))) + }, + )) + } + PropertyPath::ZeroOrOne(p) => { + let eval = self.clone(); + let end2 = end.clone(); + let p = Rc::clone(p); + self.run_if_term_is_a_dataset_node(end, move |graph_name| { + hash_deduplicate(once(Ok(end2.clone())).chain(eval.eval_to_in_graph( + &p, + &end2, + &graph_name, + ))) + .map(move |e| Ok((e?, graph_name.clone()))) + }) + } + PropertyPath::NegatedPropertySet(ps) => { + let ps = Rc::clone(ps); + Box::new( + self.dataset + .encoded_quads_for_pattern(Some(end), None, None, None) + .filter_map(move |t| match t { + Ok(t) => { + if ps.iter().any(|p| *p == t.predicate) { + None + } else { + Some(Ok((t.subject, t.graph_name))) + } + } + Err(e) => Some(Err(e)), + }), + ) + } + } + } + + fn eval_open_in_graph( + &self, + path: &PropertyPath, + graph_name: &EncodedTerm, + ) -> Box>> { + match path { + PropertyPath::Path(p) => Box::new( + self.dataset + .encoded_quads_for_pattern(None, Some(p), None, Some(graph_name)) + .map(|t| t.map(|t| (t.subject, t.object))), + ), + PropertyPath::Reverse(p) => Box::new( + self.eval_open_in_graph(p, graph_name) + .map(|t| t.map(|(s, o)| (o, s))), + ), + PropertyPath::Sequence(a, b) => { + let eval = self.clone(); + let b = Rc::clone(b); + let graph_name2 = graph_name.clone(); + Box::new(self.eval_open_in_graph(a, graph_name).flat_map_ok( + move |(start, middle)| { + eval.eval_from_in_graph(&b, &middle, &graph_name2) + .map(move |end| Ok((start.clone(), end?))) + }, + )) + } + PropertyPath::Alternative(a, b) => Box::new(hash_deduplicate( + self.eval_open_in_graph(a, graph_name) + .chain(self.eval_open_in_graph(b, graph_name)), + )), + PropertyPath::ZeroOrMore(p) => { + let eval = self.clone(); + let p = Rc::clone(p); + let graph_name2 = graph_name.clone(); + Box::new(transitive_closure( + self.get_subject_or_object_identity_pairs_in_graph(graph_name), + move |(start, middle)| { + eval.eval_from_in_graph(&p, &middle, &graph_name2) + .map(move |end| Ok((start.clone(), end?))) + }, + )) + } + PropertyPath::OneOrMore(p) => { + let eval = self.clone(); + let p = Rc::clone(p); + let graph_name2 = graph_name.clone(); + Box::new(transitive_closure( + self.eval_open_in_graph(&p, graph_name), + move |(start, middle)| { + eval.eval_from_in_graph(&p, &middle, &graph_name2) + .map(move |end| Ok((start.clone(), end?))) + }, + )) + } + PropertyPath::ZeroOrOne(p) => Box::new(hash_deduplicate( + self.get_subject_or_object_identity_pairs_in_graph(graph_name) + .chain(self.eval_open_in_graph(p, graph_name)), + )), + PropertyPath::NegatedPropertySet(ps) => { + let ps = Rc::clone(ps); + Box::new( + self.dataset + .encoded_quads_for_pattern(None, None, None, Some(graph_name)) + .filter_map(move |t| match t { + Ok(t) => { + if ps.iter().any(|p| *p == t.predicate) { + None + } else { + Some(Ok((t.subject, t.object))) + } + } + Err(e) => Some(Err(e)), + }), + ) + } + } + } + + fn eval_open_in_unknown_graph( + &self, + path: &PropertyPath, + ) -> Box>> + { + match path { + PropertyPath::Path(p) => Box::new( + self.dataset + .encoded_quads_for_pattern(None, Some(p), None, None) + .map(|t| t.map(|t| (t.subject, t.object, t.graph_name))), + ), + PropertyPath::Reverse(p) => Box::new( + self.eval_open_in_unknown_graph(p) + .map(|t| t.map(|(s, o, g)| (o, s, g))), + ), + PropertyPath::Sequence(a, b) => { + let eval = self.clone(); + let b = Rc::clone(b); + Box::new(self.eval_open_in_unknown_graph(a).flat_map_ok( + move |(start, middle, graph_name)| { + eval.eval_from_in_graph(&b, &middle, &graph_name) + .map(move |end| Ok((start.clone(), end?, graph_name.clone()))) + }, + )) + } + PropertyPath::Alternative(a, b) => Box::new(hash_deduplicate( + self.eval_open_in_unknown_graph(a) + .chain(self.eval_open_in_unknown_graph(b)), + )), + PropertyPath::ZeroOrMore(p) => { + let eval = self.clone(); + let p = Rc::clone(p); + Box::new(transitive_closure( + self.get_subject_or_object_identity_pairs_in_dataset(), + move |(start, middle, graph_name)| { + eval.eval_from_in_graph(&p, &middle, &graph_name) + .map(move |end| Ok((start.clone(), end?, graph_name.clone()))) + }, + )) + } + PropertyPath::OneOrMore(p) => { + let eval = self.clone(); + let p = Rc::clone(p); + Box::new(transitive_closure( + self.eval_open_in_unknown_graph(&p), + move |(start, middle, graph_name)| { + eval.eval_from_in_graph(&p, &middle, &graph_name) + .map(move |end| Ok((start.clone(), end?, graph_name.clone()))) + }, + )) + } + PropertyPath::ZeroOrOne(p) => Box::new(hash_deduplicate( + self.get_subject_or_object_identity_pairs_in_dataset() + .chain(self.eval_open_in_unknown_graph(p)), + )), + PropertyPath::NegatedPropertySet(ps) => { + let ps = Rc::clone(ps); + Box::new( + self.dataset + .encoded_quads_for_pattern(None, None, None, None) + .filter_map(move |t| match t { + Ok(t) => { + if ps.iter().any(|p| *p == t.predicate) { + None + } else { + Some(Ok((t.subject, t.object, t.graph_name))) + } + } + Err(e) => Some(Err(e)), + }), + ) + } + } + } + + fn get_subject_or_object_identity_pairs_in_graph( + &self, + graph_name: &EncodedTerm, + ) -> impl Iterator> { + self.dataset + .encoded_quads_for_pattern(None, None, None, Some(graph_name)) + .flat_map_ok(|t| { + [ + Ok((t.subject.clone(), t.subject)), + Ok((t.object.clone(), t.object)), + ] + }) + } + + fn get_subject_or_object_identity_pairs_in_dataset( + &self, + ) -> impl Iterator> + { + self.dataset + .encoded_quads_for_pattern(None, None, None, None) + .flat_map_ok(|t| { + [ + Ok((t.subject.clone(), t.subject, t.graph_name.clone())), + Ok((t.object.clone(), t.object, t.graph_name)), + ] + }) + } + + fn run_if_term_is_a_graph_node< + T: 'static, + I: Iterator> + 'static, + >( + &self, + term: &EncodedTerm, + graph_name: &EncodedTerm, + f: impl FnOnce() -> I, + ) -> Box>> { + match self.is_subject_or_object_in_graph(term, graph_name) { + Ok(true) => Box::new(f()), + Ok(false) => { + Box::new(empty()) // Not in the database + } + Err(error) => Box::new(once(Err(error))), + } + } + + fn is_subject_or_object_in_graph( + &self, + term: &EncodedTerm, + graph_name: &EncodedTerm, + ) -> Result { + Ok(self + .dataset + .encoded_quads_for_pattern(Some(term), None, None, Some(graph_name)) + .next() + .transpose()? + .is_some() + || self + .dataset + .encoded_quads_for_pattern(None, None, Some(term), Some(graph_name)) + .next() + .transpose()? + .is_some()) + } + + fn run_if_term_is_a_dataset_node< + T: 'static, + I: IntoIterator> + 'static, + >( + &self, + term: &EncodedTerm, + f: impl FnMut(EncodedTerm) -> I + 'static, + ) -> Box>> { + match self + .find_graphs_where_the_node_is_in(term) + .collect::, _>>() + { + Ok(graph_names) => Box::new(graph_names.into_iter().flat_map(f)), + Err(error) => Box::new(once(Err(error))), + } + } + + fn find_graphs_where_the_node_is_in( + &self, + term: &EncodedTerm, + ) -> impl Iterator> { + self.dataset + .encoded_quads_for_pattern(Some(term), None, None, None) + .chain( + self.dataset + .encoded_quads_for_pattern(None, None, Some(term), None), + ) + .map(|q| Ok(q?.graph_name)) + } +} + +struct CartesianProductJoinIterator { + probe_iter: EncodedTuplesIterator, + built: Vec, + buffered_results: Vec>, +} + +impl Iterator for CartesianProductJoinIterator { + type Item = Result; + + fn next(&mut self) -> Option { + loop { + if let Some(result) = self.buffered_results.pop() { + return Some(result); + } + let probe_tuple = match self.probe_iter.next()? { + Ok(probe_tuple) => probe_tuple, + Err(error) => return Some(Err(error)), + }; + for built_tuple in &self.built { + if let Some(result_tuple) = probe_tuple.combine_with(built_tuple) { + self.buffered_results.push(Ok(result_tuple)) + } + } + } + } + + fn size_hint(&self) -> (usize, Option) { + let (min, max) = self.probe_iter.size_hint(); + ( + min.saturating_mul(self.built.len()), + max.map(|v| v.saturating_mul(self.built.len())), + ) + } +} + +struct HashJoinIterator { + probe_iter: EncodedTuplesIterator, + built: EncodedTupleSet, + buffered_results: Vec>, +} + +impl Iterator for HashJoinIterator { + type Item = Result; + + fn next(&mut self) -> Option { + loop { + if let Some(result) = self.buffered_results.pop() { + return Some(result); + } + let probe_tuple = match self.probe_iter.next()? { + Ok(probe_tuple) => probe_tuple, + Err(error) => return Some(Err(error)), + }; + self.buffered_results.extend( + self.built + .get(&probe_tuple) + .iter() + .filter_map(|built_tuple| probe_tuple.combine_with(built_tuple).map(Ok)), + ) + } + } + + fn size_hint(&self) -> (usize, Option) { + ( + 0, + self.probe_iter + .size_hint() + .1 + .map(|v| v.saturating_mul(self.built.len())), + ) + } +} + +struct HashLeftJoinIterator { + left_iter: EncodedTuplesIterator, + right: EncodedTupleSet, + buffered_results: Vec>, + expression: Rc Option>, +} + +impl Iterator for HashLeftJoinIterator { + type Item = Result; + + fn next(&mut self) -> Option { + loop { + if let Some(result) = self.buffered_results.pop() { + return Some(result); + } + let left_tuple = match self.left_iter.next()? { + Ok(left_tuple) => left_tuple, + Err(error) => return Some(Err(error)), + }; + self.buffered_results.extend( + self.right + .get(&left_tuple) + .iter() + .filter_map(|right_tuple| left_tuple.combine_with(right_tuple)) + .filter(|tuple| { + (self.expression)(tuple) + .and_then(|term| to_bool(&term)) + .unwrap_or(false) + }) + .map(Ok), + ); + if self.buffered_results.is_empty() { + // We have not manage to join with anything + return Some(Ok(left_tuple)); + } + } + } + + fn size_hint(&self) -> (usize, Option) { + ( + 0, + self.left_iter + .size_hint() + .1 + .map(|v| v.saturating_mul(self.right.len())), + ) + } +} + +struct ForLoopLeftJoinIterator { + right_evaluator: Rc EncodedTuplesIterator>, + left_iter: EncodedTuplesIterator, + current_right: EncodedTuplesIterator, +} + +impl Iterator for ForLoopLeftJoinIterator { + type Item = Result; + + fn next(&mut self) -> Option { + if let Some(tuple) = self.current_right.next() { + return Some(tuple); + } + let left_tuple = match self.left_iter.next()? { + Ok(left_tuple) => left_tuple, + Err(error) => return Some(Err(error)), + }; + self.current_right = (self.right_evaluator)(left_tuple.clone()); + if let Some(right_tuple) = self.current_right.next() { + Some(right_tuple) + } else { + Some(Ok(left_tuple)) + } + } +} + +struct UnionIterator { + plans: Vec EncodedTuplesIterator>>, + input: EncodedTuple, + current_iterator: EncodedTuplesIterator, + current_plan: usize, +} + +impl Iterator for UnionIterator { + type Item = Result; + + fn next(&mut self) -> Option { + loop { + if let Some(tuple) = self.current_iterator.next() { + return Some(tuple); + } + if self.current_plan >= self.plans.len() { + return None; + } + self.current_iterator = self.plans[self.current_plan](self.input.clone()); + self.current_plan += 1; + } + } +} + +struct ConsecutiveDeduplication { + inner: EncodedTuplesIterator, + current: Option, +} + +impl Iterator for ConsecutiveDeduplication { + type Item = Result; + + fn next(&mut self) -> Option { + // Basic idea. We buffer the previous result and we only emit it when we know the next one or it's the end + loop { + if let Some(next) = self.inner.next() { + match next { + Ok(next) => match self.current.take() { + Some(current) if current != next => { + // We found a relevant value + self.current = Some(next); + return Some(Ok(current)); + } + _ => { + // We discard the value and move to the next one + self.current = Some(next); + } + }, + Err(error) => return Some(Err(error)), // We swap but it's fine. It's an error. + } + } else { + return self.current.take().map(Ok); + } + } + } + + fn size_hint(&self) -> (usize, Option) { + let (min, max) = self.inner.size_hint(); + ((min != 0).into(), max) + } +} + +struct ConstructIterator { + eval: SimpleEvaluator, + iter: EncodedTuplesIterator, + template: Vec, + buffered_results: Vec>, + bnodes: Vec, +} + +impl Iterator for ConstructIterator { + type Item = Result; + + fn next(&mut self) -> Option { + loop { + if let Some(result) = self.buffered_results.pop() { + return Some(result); + } + { + let tuple = match self.iter.next()? { + Ok(tuple) => tuple, + Err(error) => return Some(Err(error)), + }; + for template in &self.template { + if let (Some(subject), Some(predicate), Some(object)) = ( + get_triple_template_value(&template.subject, &tuple, &mut self.bnodes), + get_triple_template_value(&template.predicate, &tuple, &mut self.bnodes), + get_triple_template_value(&template.object, &tuple, &mut self.bnodes), + ) { + self.buffered_results.push(decode_triple( + &*self.eval.dataset, + &subject, + &predicate, + &object, + )); + } + } + self.bnodes.clear(); // We do not reuse old bnodes + } + } + } + + fn size_hint(&self) -> (usize, Option) { + let (min, max) = self.iter.size_hint(); + ( + min.saturating_mul(self.template.len()), + max.map(|v| v.saturating_mul(self.template.len())), + ) + } +} + +pub struct TripleTemplate { + pub subject: TripleTemplateValue, + pub predicate: TripleTemplateValue, + pub object: TripleTemplateValue, +} + +pub enum TripleTemplateValue { + Constant(EncodedTerm), + BlankNode(usize), + Variable(usize), + Triple(Box), +} + +fn get_triple_template_value<'a>( + selector: &'a TripleTemplateValue, + tuple: &'a EncodedTuple, + bnodes: &'a mut Vec, +) -> Option { + match selector { + TripleTemplateValue::Constant(term) => Some(term.clone()), + TripleTemplateValue::Variable(v) => tuple.get(*v).cloned(), + TripleTemplateValue::BlankNode(bnode) => { + if *bnode >= bnodes.len() { + bnodes.resize_with(*bnode + 1, new_bnode) + } + Some(bnodes[*bnode].clone()) + } + TripleTemplateValue::Triple(triple) => Some( + EncodedTriple { + subject: get_triple_template_value(&triple.subject, tuple, bnodes)?, + predicate: get_triple_template_value(&triple.predicate, tuple, bnodes)?, + object: get_triple_template_value(&triple.object, tuple, bnodes)?, + } + .into(), + ), + } +} + +fn new_bnode() -> EncodedTerm { + EncodedTerm::NumericalBlankNode { id: random() } +} + +fn decode_triple( + decoder: &D, + subject: &EncodedTerm, + predicate: &EncodedTerm, + object: &EncodedTerm, +) -> Result { + Ok(Triple::new( + decoder.decode_subject(subject)?, + decoder.decode_named_node(predicate)?, + decoder.decode_term(object)?, + )) +} + +struct DescribeIterator { + eval: SimpleEvaluator, + iter: EncodedTuplesIterator, + quads: Box>>, +} + +impl Iterator for DescribeIterator { + type Item = Result; + + fn next(&mut self) -> Option { + loop { + if let Some(quad) = self.quads.next() { + return Some(match quad { + Ok(quad) => self + .eval + .dataset + .decode_quad(&quad) + .map(Into::into) + .map_err(Into::into), + Err(error) => Err(error), + }); + } + let tuple = match self.iter.next()? { + Ok(tuple) => tuple, + Err(error) => return Some(Err(error)), + }; + let eval = self.eval.clone(); + self.quads = Box::new(tuple.into_iter().flatten().flat_map(move |subject| { + eval.dataset + .encoded_quads_for_pattern( + Some(&subject), + None, + None, + Some(&EncodedTerm::DefaultGraph), + ) + .chain( + eval.dataset + .encoded_quads_for_pattern(Some(&subject), None, None, None), + ) + })); + } + } +} + +struct ZipLongest, I2: Iterator> { + a: I1, + b: I2, +} + +impl, I2: Iterator> ZipLongest { + fn new(a: I1, b: I2) -> Self { + Self { a, b } + } +} + +impl, I2: Iterator> Iterator + for ZipLongest +{ + type Item = (Option, Option); + + fn next(&mut self) -> Option { + match (self.a.next(), self.b.next()) { + (None, None) => None, + r => Some(r), + } + } +} + +fn transitive_closure>>( + start: impl IntoIterator>, + mut next: impl FnMut(T) -> NI, +) -> impl Iterator> { + let mut errors = Vec::new(); + let mut todo = start + .into_iter() + .filter_map(|e| match e { + Ok(e) => Some(e), + Err(e) => { + errors.push(e); + None + } + }) + .collect::>(); + let mut all = todo.iter().cloned().collect::>(); + while let Some(e) = todo.pop() { + for e in next(e) { + match e { + Ok(e) => { + if all.insert(e.clone()) { + todo.push(e) + } + } + Err(e) => errors.push(e), + } + } + } + errors.into_iter().map(Err).chain(all.into_iter().map(Ok)) +} + +fn look_in_transitive_closure< + T: Clone + Eq + Hash, + NI: Iterator>, +>( + start: impl IntoIterator>, + mut next: impl FnMut(T) -> NI, + target: &T, +) -> Result { + let mut todo = start.into_iter().collect::, _>>()?; + let mut all = todo.iter().cloned().collect::>(); + while let Some(e) = todo.pop() { + if e == *target { + return Ok(true); + } + for e in next(e) { + let e = e?; + if all.insert(e.clone()) { + todo.push(e); + } + } + } + Ok(false) +} + +fn hash_deduplicate( + iter: impl Iterator>, +) -> impl Iterator> { + let mut already_seen = HashSet::with_capacity(iter.size_hint().0); + iter.filter(move |e| { + if let Ok(e) = e { + if already_seen.contains(e) { + false + } else { + already_seen.insert(e.clone()); + true + } + } else { + true + } + }) +} + +trait ResultIterator: Iterator> + Sized { + fn flat_map_ok U, U: IntoIterator>>( + self, + f: F, + ) -> FlatMapOk; +} + +impl> + Sized> ResultIterator for I { + fn flat_map_ok U, U: IntoIterator>>( + self, + f: F, + ) -> FlatMapOk { + FlatMapOk { + inner: self, + f, + current: None, + } + } +} + +struct FlatMapOk< + T, + O, + I: Iterator>, + F: FnMut(T) -> U, + U: IntoIterator>, +> { + inner: I, + f: F, + current: Option, +} + +impl< + T, + O, + I: Iterator>, + F: FnMut(T) -> U, + U: IntoIterator>, + > Iterator for FlatMapOk +{ + type Item = Result; + + fn next(&mut self) -> Option { + loop { + if let Some(current) = &mut self.current { + if let Some(next) = current.next() { + return Some(next); + } + } + self.current = None; + match self.inner.next()? { + Ok(e) => self.current = Some((self.f)(e).into_iter()), + Err(error) => return Some(Err(error)), + } + } + } +} + +trait Accumulator { + fn add(&mut self, element: Option); + + fn state(&self) -> Option; +} + +struct Deduplicate { + seen: HashSet>, + inner: Box, +} + +impl Deduplicate { + fn new(inner: Box) -> Self { + Self { + seen: HashSet::default(), + inner, + } + } +} + +impl Accumulator for Deduplicate { + fn add(&mut self, element: Option) { + if self.seen.insert(element.clone()) { + self.inner.add(element) + } + } + + fn state(&self) -> Option { + self.inner.state() + } +} + +#[derive(Default, Debug)] +struct CountAccumulator { + count: i64, +} + +impl Accumulator for CountAccumulator { + fn add(&mut self, _element: Option) { + self.count += 1; + } + + fn state(&self) -> Option { + Some(self.count.into()) + } +} + +struct SumAccumulator { + sum: Option, +} + +impl Default for SumAccumulator { + fn default() -> Self { + Self { + sum: Some(0.into()), + } + } +} + +impl Accumulator for SumAccumulator { + fn add(&mut self, element: Option) { + if let Some(sum) = &self.sum { + if let Some(operands) = element.and_then(|e| NumericBinaryOperands::new(sum.clone(), e)) + { + // TODO: unify with addition? + self.sum = match operands { + NumericBinaryOperands::Float(v1, v2) => Some((v1 + v2).into()), + NumericBinaryOperands::Double(v1, v2) => Some((v1 + v2).into()), + NumericBinaryOperands::Integer(v1, v2) => v1.checked_add(v2).map(Into::into), + NumericBinaryOperands::Decimal(v1, v2) => v1.checked_add(v2).map(Into::into), + NumericBinaryOperands::Duration(v1, v2) => v1.checked_add(v2).map(Into::into), + NumericBinaryOperands::YearMonthDuration(v1, v2) => { + v1.checked_add(v2).map(Into::into) + } + NumericBinaryOperands::DayTimeDuration(v1, v2) => { + v1.checked_add(v2).map(Into::into) + } + _ => None, + }; + } else { + self.sum = None; + } + } + } + + fn state(&self) -> Option { + self.sum.clone() + } +} + +#[derive(Default)] +struct AvgAccumulator { + sum: SumAccumulator, + count: i64, +} + +impl Accumulator for AvgAccumulator { + fn add(&mut self, element: Option) { + self.sum.add(element); + self.count += 1; + } + + fn state(&self) -> Option { + let sum = self.sum.state()?; + if self.count == 0 { + Some(0.into()) + } else { + // TODO: deduplicate? + // TODO: duration? + let count = Integer::from(self.count); + match sum { + EncodedTerm::FloatLiteral(sum) => Some((sum / Float::from(count)).into()), + EncodedTerm::DoubleLiteral(sum) => Some((sum / Double::from(count)).into()), + EncodedTerm::IntegerLiteral(sum) => { + Some(Decimal::from(sum).checked_div(count)?.into()) + } + EncodedTerm::DecimalLiteral(sum) => Some(sum.checked_div(count)?.into()), + _ => None, + } + } + } +} + +#[allow(clippy::option_option)] +struct MinAccumulator { + dataset: Rc, + min: Option>, +} + +impl MinAccumulator { + fn new(dataset: Rc) -> Self { + Self { dataset, min: None } + } +} + +impl Accumulator for MinAccumulator { + fn add(&mut self, element: Option) { + if let Some(min) = &self.min { + if cmp_terms(&self.dataset, element.as_ref(), min.as_ref()) == Ordering::Less { + self.min = Some(element) + } + } else { + self.min = Some(element) + } + } + + fn state(&self) -> Option { + self.min.clone().and_then(|v| v) + } +} + +#[allow(clippy::option_option)] +struct MaxAccumulator { + dataset: Rc, + max: Option>, +} + +impl MaxAccumulator { + fn new(dataset: Rc) -> Self { + Self { dataset, max: None } + } +} + +impl Accumulator for MaxAccumulator { + fn add(&mut self, element: Option) { + if let Some(max) = &self.max { + if cmp_terms(&self.dataset, element.as_ref(), max.as_ref()) == Ordering::Greater { + self.max = Some(element) + } + } else { + self.max = Some(element) + } + } + + fn state(&self) -> Option { + self.max.clone().and_then(|v| v) + } +} + +#[derive(Default)] +struct SampleAccumulator { + value: Option, +} + +impl Accumulator for SampleAccumulator { + fn add(&mut self, element: Option) { + if element.is_some() { + self.value = element + } + } + + fn state(&self) -> Option { + self.value.clone() + } +} + +#[allow(clippy::option_option)] +struct GroupConcatAccumulator { + dataset: Rc, + concat: Option, + language: Option>, + separator: Rc, +} + +impl GroupConcatAccumulator { + fn new(dataset: Rc, separator: Rc) -> Self { + Self { + dataset, + concat: Some(String::new()), + language: None, + separator, + } + } +} + +impl Accumulator for GroupConcatAccumulator { + fn add(&mut self, element: Option) { + if let Some(concat) = self.concat.as_mut() { + if let Some(element) = element { + if let Some((value, e_language)) = to_string_and_language(&self.dataset, &element) { + if let Some(lang) = self.language { + if lang != e_language { + self.language = Some(None) + } + concat.push_str(&self.separator); + } else { + self.language = Some(e_language) + } + concat.push_str(&value); + } + } + } + } + + fn state(&self) -> Option { + self.concat + .as_ref() + .map(|result| build_plain_literal(&self.dataset, result, self.language.and_then(|v| v))) + } +} + +struct FailingAccumulator; + +impl Accumulator for FailingAccumulator { + fn add(&mut self, _: Option) {} + + fn state(&self) -> Option { + None + } +} + +fn encode_variable(variables: &mut Vec, variable: &Variable) -> usize { + if let Some(key) = slice_key(variables, variable) { + key + } else { + variables.push(variable.clone()); + variables.len() - 1 + } +} + +fn bnode_key(blank_nodes: &mut Vec, blank_node: &BlankNode) -> usize { + if let Some(key) = slice_key(blank_nodes, blank_node) { + key + } else { + blank_nodes.push(blank_node.clone()); + blank_nodes.len() - 1 + } +} + +fn slice_key(slice: &[T], element: &T) -> Option { + for (i, item) in slice.iter().enumerate() { + if item == element { + return Some(i); + } + } + None +} + +fn generate_uuid(buffer: &mut String) { + let mut uuid = random::().to_le_bytes(); + uuid[6] = (uuid[6] & 0x0F) | 0x40; + uuid[8] = (uuid[8] & 0x3F) | 0x80; + + write_hexa_bytes(&uuid[0..4], buffer); + buffer.push('-'); + write_hexa_bytes(&uuid[4..6], buffer); + buffer.push('-'); + write_hexa_bytes(&uuid[6..8], buffer); + buffer.push('-'); + write_hexa_bytes(&uuid[8..10], buffer); + buffer.push('-'); + write_hexa_bytes(&uuid[10..16], buffer); +} + +fn write_hexa_bytes(bytes: &[u8], buffer: &mut String) { + for b in bytes { + let high = b / 16; + buffer.push(char::from(if high < 10 { + b'0' + high + } else { + b'a' + (high - 10) + })); + let low = b % 16; + buffer.push(char::from(if low < 10 { + b'0' + low + } else { + b'a' + (low - 10) + })); + } +} + +#[derive(Eq, PartialEq, Clone, Copy)] +enum SmallStringOrId { + Small(SmallString), + Big(StrHash), +} + +impl From for SmallStringOrId { + fn from(value: SmallString) -> Self { + Self::Small(value) + } +} + +impl From for SmallStringOrId { + fn from(value: StrHash) -> Self { + Self::Big(value) + } +} + +pub enum ComparatorFunction { + Asc(Rc Option>), + Desc(Rc Option>), +} + +struct EncodedTupleSet { + key: Vec, + map: HashMap>, + len: usize, +} + +impl EncodedTupleSet { + fn new(key: Vec) -> Self { + Self { + key, + map: HashMap::new(), + len: 0, + } + } + + fn insert(&mut self, tuple: EncodedTuple) { + self.map + .entry(self.tuple_key(&tuple)) + .or_default() + .push(tuple); + self.len += 1; + } + + fn get(&self, tuple: &EncodedTuple) -> &[EncodedTuple] { + self.map.get(&self.tuple_key(tuple)).map_or(&[], |v| v) + } + + fn tuple_key(&self, tuple: &EncodedTuple) -> u64 { + let mut hasher = DefaultHasher::default(); + for v in &self.key { + if let Some(val) = tuple.get(*v) { + val.hash(&mut hasher); + } + } + hasher.finish() + } + + fn len(&self) -> usize { + self.len + } +} + +impl Extend for EncodedTupleSet { + fn extend>(&mut self, iter: T) { + let iter = iter.into_iter(); + self.map.reserve(iter.size_hint().0); + for tuple in iter { + self.insert(tuple); + } + } +} + +struct StatsIterator { + inner: EncodedTuplesIterator, + stats: Rc, +} + +impl Iterator for StatsIterator { + type Item = Result; + + fn next(&mut self) -> Option { + let start = Timer::now(); + let result = self.inner.next(); + self.stats.exec_duration.set( + self.stats + .exec_duration + .get() + .and_then(|stat| stat.checked_add(start.elapsed()?)), + ); + if matches!(result, Some(Ok(_))) { + self.stats.exec_count.set(self.stats.exec_count.get() + 1); + } + result + } +} + +pub struct EvalNodeWithStats { + pub label: String, + pub children: Vec>, + pub exec_count: Cell, + pub exec_duration: Cell>, +} + +impl EvalNodeWithStats { + pub fn json_node( + &self, + writer: &mut ToWriteJsonWriter, + with_stats: bool, + ) -> io::Result<()> { + writer.write_event(JsonEvent::StartObject)?; + writer.write_event(JsonEvent::ObjectKey("name".into()))?; + writer.write_event(JsonEvent::String((&self.label).into()))?; + if with_stats { + writer.write_event(JsonEvent::ObjectKey("number of results".into()))?; + writer.write_event(JsonEvent::Number(self.exec_count.get().to_string().into()))?; + if let Some(duration) = self.exec_duration.get() { + writer.write_event(JsonEvent::ObjectKey("duration in seconds".into()))?; + writer.write_event(JsonEvent::Number(duration.as_seconds().to_string().into()))?; + } + } + writer.write_event(JsonEvent::ObjectKey("children".into()))?; + writer.write_event(JsonEvent::StartArray)?; + for child in &self.children { + child.json_node(writer, with_stats)?; + } + writer.write_event(JsonEvent::EndArray)?; + writer.write_event(JsonEvent::EndObject) + } +} + +impl fmt::Debug for EvalNodeWithStats { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut obj = f.debug_struct("Node"); + obj.field("name", &self.label); + if let Some(exec_duration) = self.exec_duration.get() { + obj.field("number of results", &self.exec_count.get()); + obj.field( + "duration in seconds", + &f32::from(Float::from(exec_duration.as_seconds())), + ); + } + if !self.children.is_empty() { + obj.field("children", &self.children); + } + obj.finish() + } +} + +fn eval_node_label(node: &GraphPattern) -> String { + match node { + GraphPattern::Distinct { .. } => "Distinct(Hash)".to_owned(), + GraphPattern::Extend { + expression, + variable, + .. + } => format!( + "Extend({} -> {variable})", + spargebra::algebra::Expression::from(expression) + ), + GraphPattern::Filter { expression, .. } => format!( + "Filter({})", + spargebra::algebra::Expression::from(expression) + ), + GraphPattern::Group { + variables, + aggregates, + .. + } => { + format!( + "Aggregate({})", + format_list(variables.iter().map(ToString::to_string).chain( + aggregates.iter().map(|(v, agg)| format!( + "{} -> {v}", + spargebra::algebra::AggregateExpression::from(agg) + )) + )) + ) + } + GraphPattern::Join { algorithm, .. } => match algorithm { + JoinAlgorithm::HashBuildLeftProbeRight { keys } => format!( + "LeftJoin(HashBuildLeftProbeRight, keys = {})", + format_list(keys) + ), + }, + GraphPattern::Lateral { right, .. } => { + if let GraphPattern::LeftJoin { + left: nested_left, + expression, + .. + } = right.as_ref() + { + if nested_left.is_empty_singleton() { + // We are in a ForLoopLeftJoin + return format!( + "ForLoopLeftJoin(expression = {})", + spargebra::algebra::Expression::from(expression) + ); + } + } + "Lateral".to_owned() + } + GraphPattern::LeftJoin { + algorithm, + expression, + .. + } => match algorithm { + LeftJoinAlgorithm::HashBuildRightProbeLeft { keys } => format!( + "LeftJoin(HashBuildRightProbeLeft, keys = {}, expression = {})", + format_list(keys), + spargebra::algebra::Expression::from(expression) + ), + }, + GraphPattern::Minus { algorithm, .. } => match algorithm { + MinusAlgorithm::HashBuildRightProbeLeft { keys } => format!( + "AntiJoin(HashBuildRightProbeLeft, keys = {})", + format_list(keys) + ), + }, + GraphPattern::OrderBy { expression, .. } => { + format!( + "Sort({})", + format_list( + expression + .iter() + .map(spargebra::algebra::OrderExpression::from) + ) + ) + } + GraphPattern::Path { + subject, + path, + object, + graph_name, + } => { + if let Some(graph_name) = graph_name { + format!("Path({subject} {path} {object} {graph_name})") + } else { + format!("Path({subject} {path} {object})") + } + } + GraphPattern::Project { variables, .. } => { + format!("Project({})", format_list(variables)) + } + GraphPattern::QuadPattern { + subject, + predicate, + object, + graph_name, + } => { + if let Some(graph_name) = graph_name { + format!("QuadPattern({subject} {predicate} {object} {graph_name})") + } else { + format!("QuadPattern({subject} {predicate} {object})") + } + } + GraphPattern::Reduced { .. } => "Reduced".to_owned(), + GraphPattern::Service { name, silent, .. } => { + if *silent { + format!("Service({name}, Silent)") + } else { + format!("Service({name})") + } + } + GraphPattern::Slice { start, length, .. } => { + if let Some(length) = length { + format!("Slice(start = {start}, length = {length})") + } else { + format!("Slice(start = {start})") + } + } + GraphPattern::Union { .. } => "Union".to_owned(), + GraphPattern::Values { variables, .. } => { + format!("StaticBindings({})", format_list(variables)) + } + } +} + +fn format_list(values: impl IntoIterator) -> String { + values + .into_iter() + .map(|v| v.to_string()) + .collect::>() + .join(", ") +} + +pub struct Timer { + start: DateTime, +} + +impl Timer { + pub fn now() -> Self { + Self { + start: DateTime::now(), + } + } + + pub fn elapsed(&self) -> Option { + DateTime::now().checked_sub(self.start) + } +} + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + + #[test] + fn uuid() { + let mut buffer = String::default(); + generate_uuid(&mut buffer); + assert!( + Regex::new("^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$") + .unwrap() + .is_match(&buffer), + "{buffer} is not a valid UUID" + ); + } +} diff --git a/ng-oxigraph/src/oxigraph/sparql/http/dummy.rs b/ng-oxigraph/src/oxigraph/sparql/http/dummy.rs new file mode 100644 index 0000000..7b3a551 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/http/dummy.rs @@ -0,0 +1,34 @@ +//! Simple HTTP client + +use std::io::{Empty, Error, ErrorKind, Result}; +use std::time::Duration; + +pub struct Client; + +impl Client { + pub fn new(_timeout: Option, _redirection_limit: usize) -> Self { + Self + } + + #[allow(clippy::unused_self)] + pub fn get(&self, _url: &str, _accept: &'static str) -> Result<(String, Empty)> { + Err(Error::new( + ErrorKind::Unsupported, + "HTTP client is not available. Enable the feature 'http-client'", + )) + } + + #[allow(clippy::unused_self, clippy::needless_pass_by_value)] + pub fn post( + &self, + _url: &str, + _payload: Vec, + _content_type: &'static str, + _accept: &'static str, + ) -> Result<(String, Empty)> { + Err(Error::new( + ErrorKind::Unsupported, + "HTTP client is not available. Enable the feature 'http-client'", + )) + } +} diff --git a/ng-oxigraph/src/oxigraph/sparql/http/mod.rs b/ng-oxigraph/src/oxigraph/sparql/http/mod.rs new file mode 100644 index 0000000..b309cf5 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/http/mod.rs @@ -0,0 +1,9 @@ +#[cfg(not(feature = "http-client"))] +mod dummy; +#[cfg(feature = "http-client")] +mod simple; + +#[cfg(not(feature = "http-client"))] +pub use dummy::Client; +#[cfg(feature = "http-client")] +pub use simple::Client; diff --git a/ng-oxigraph/src/oxigraph/sparql/http/simple.rs b/ng-oxigraph/src/oxigraph/sparql/http/simple.rs new file mode 100644 index 0000000..bd81d7c --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/http/simple.rs @@ -0,0 +1,90 @@ +use oxhttp::model::{Body, HeaderName, Method, Request}; +use std::io::{Error, ErrorKind, Result}; +use std::time::Duration; + +pub struct Client { + client: oxhttp::Client, +} + +impl Client { + pub fn new(timeout: Option, redirection_limit: usize) -> Self { + let mut client = oxhttp::Client::new() + .with_redirection_limit(redirection_limit) + .with_user_agent(concat!("Oxigraph/", env!("CARGO_PKG_VERSION"))) + .unwrap(); + if let Some(timeout) = timeout { + client = client.with_global_timeout(timeout); + } + Self { client } + } + + pub fn get(&self, url: &str, accept: &'static str) -> Result<(String, Body)> { + let request = Request::builder(Method::GET, url.parse().map_err(invalid_input_error)?) + .with_header(HeaderName::ACCEPT, accept) + .map_err(invalid_input_error)? + .build(); + let response = self.client.request(request)?; + let status = response.status(); + if !status.is_successful() { + return Err(Error::new( + ErrorKind::Other, + format!( + "Error {} returned by {} with payload:\n{}", + status, + url, + response.into_body().to_string()? + ), + )); + } + let content_type = response + .header(&HeaderName::CONTENT_TYPE) + .ok_or_else(|| invalid_data_error(format!("No Content-Type returned by {url}")))? + .to_str() + .map_err(invalid_data_error)? + .to_owned(); + Ok((content_type, response.into_body())) + } + + pub fn post( + &self, + url: &str, + payload: Vec, + content_type: &'static str, + accept: &'static str, + ) -> Result<(String, Body)> { + let request = Request::builder(Method::POST, url.parse().map_err(invalid_input_error)?) + .with_header(HeaderName::ACCEPT, accept) + .map_err(invalid_input_error)? + .with_header(HeaderName::CONTENT_TYPE, content_type) + .map_err(invalid_input_error)? + .with_body(payload); + let response = self.client.request(request)?; + let status = response.status(); + if !status.is_successful() { + return Err(Error::new( + ErrorKind::Other, + format!( + "Error {} returned by {} with payload:\n{}", + status, + url, + response.into_body().to_string()? + ), + )); + } + let content_type = response + .header(&HeaderName::CONTENT_TYPE) + .ok_or_else(|| invalid_data_error(format!("No Content-Type returned by {url}")))? + .to_str() + .map_err(invalid_data_error)? + .to_owned(); + Ok((content_type, response.into_body())) + } +} + +fn invalid_data_error(error: impl Into>) -> Error { + Error::new(ErrorKind::InvalidData, error) +} + +fn invalid_input_error(error: impl Into>) -> Error { + Error::new(ErrorKind::InvalidInput, error) +} diff --git a/ng-oxigraph/src/oxigraph/sparql/mod.rs b/ng-oxigraph/src/oxigraph/sparql/mod.rs new file mode 100644 index 0000000..3d8c066 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/mod.rs @@ -0,0 +1,354 @@ +// partial Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// partial Copyright (c) 2018 Oxigraph developers +// All work licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice or not, may not be copied, modified, or distributed except +// according to those terms. + +//! [SPARQL](https://www.w3.org/TR/sparql11-overview/) implementation. +//! +//! Stores execute SPARQL. See [`Store`](crate::oxigraph::store::Store::query()) for an example. + +mod algebra; +mod dataset; +mod error; +mod eval; +mod http; +mod model; +pub mod results; +mod service; +mod update; + +use super::model::{NamedNode, Term}; +pub use super::sparql::algebra::{Query, QueryDataset, Update}; +use super::sparql::dataset::DatasetView; +pub use super::sparql::error::EvaluationError; +use super::sparql::eval::{EvalNodeWithStats, SimpleEvaluator, Timer}; +pub use super::sparql::model::{QueryResults, QuerySolution, QuerySolutionIter, QueryTripleIter}; +pub use super::sparql::service::ServiceHandler; +use super::sparql::service::{EmptyServiceHandler, ErrorConversionServiceHandler}; +pub(super) use super::sparql::update::evaluate_update; +use super::storage::StorageReader; +pub use crate::oxrdf::{Variable, VariableNameParseError}; +use crate::oxsdatatypes::{DayTimeDuration, Float}; +use crate::spargebra; +pub use crate::spargebra::SparqlSyntaxError; +use crate::sparopt::algebra::GraphPattern; +use crate::sparopt::Optimizer; +use json_event_parser::{JsonEvent, ToWriteJsonWriter}; +use std::collections::HashMap; +use std::rc::Rc; +use std::sync::Arc; +use std::time::Duration; +use std::{fmt, io}; + +#[allow(clippy::needless_pass_by_value)] +pub(crate) fn evaluate_query( + reader: StorageReader, + query: impl TryInto>, + options: QueryOptions, + run_stats: bool, +) -> Result<(Result, QueryExplanation), EvaluationError> { + let query = query.try_into().map_err(Into::into)?; + let dataset = DatasetView::new(reader, &query.dataset, options.get_default_graph()); + let start_planning = Timer::now(); + let (results, plan_node_with_stats, planning_duration) = match query.inner { + spargebra::Query::Select { + pattern, base_iri, .. + } => { + let mut pattern = GraphPattern::from(&pattern); + if !options.without_optimizations { + pattern = Optimizer::optimize_graph_pattern(pattern); + } + let planning_duration = start_planning.elapsed(); + let (results, explanation) = SimpleEvaluator::new( + Rc::new(dataset), + base_iri.map(Rc::new), + options.service_handler(), + Arc::new(options.custom_functions), + run_stats, + ) + .evaluate_select(&pattern); + (Ok(results), explanation, planning_duration) + } + spargebra::Query::Ask { + pattern, base_iri, .. + } => { + let mut pattern = GraphPattern::from(&pattern); + if !options.without_optimizations { + pattern = Optimizer::optimize_graph_pattern(GraphPattern::Reduced { + inner: Box::new(pattern), + }); + } + let planning_duration = start_planning.elapsed(); + let (results, explanation) = SimpleEvaluator::new( + Rc::new(dataset), + base_iri.map(Rc::new), + options.service_handler(), + Arc::new(options.custom_functions), + run_stats, + ) + .evaluate_ask(&pattern); + (results, explanation, planning_duration) + } + spargebra::Query::Construct { + template, + pattern, + base_iri, + .. + } => { + let mut pattern = GraphPattern::from(&pattern); + if !options.without_optimizations { + pattern = Optimizer::optimize_graph_pattern(GraphPattern::Reduced { + inner: Box::new(pattern), + }); + } + let planning_duration = start_planning.elapsed(); + let (results, explanation) = SimpleEvaluator::new( + Rc::new(dataset), + base_iri.map(Rc::new), + options.service_handler(), + Arc::new(options.custom_functions), + run_stats, + ) + .evaluate_construct(&pattern, &template); + (Ok(results), explanation, planning_duration) + } + spargebra::Query::Describe { + pattern, base_iri, .. + } => { + let mut pattern = GraphPattern::from(&pattern); + if !options.without_optimizations { + pattern = Optimizer::optimize_graph_pattern(GraphPattern::Reduced { + inner: Box::new(pattern), + }); + } + let planning_duration = start_planning.elapsed(); + let (results, explanation) = SimpleEvaluator::new( + Rc::new(dataset), + base_iri.map(Rc::new), + options.service_handler(), + Arc::new(options.custom_functions), + run_stats, + ) + .evaluate_describe(&pattern); + (Ok(results), explanation, planning_duration) + } + }; + let explanation = QueryExplanation { + inner: plan_node_with_stats, + with_stats: run_stats, + parsing_duration: query.parsing_duration, + planning_duration, + }; + Ok((results, explanation)) +} + +/// Options for SPARQL query evaluation. +/// +/// +/// If the `"http-client"` optional feature is enabled, +/// a simple HTTP 1.1 client is used to execute [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/) SERVICE calls. +/// +/// Usage example disabling the federated query support: +/// ``` +/// use oxigraph::sparql::QueryOptions; +/// use oxigraph::store::Store; +/// +/// let store = Store::new()?; +/// store.query_opt( +/// "SELECT * WHERE { SERVICE {} }", +/// QueryOptions::default().without_service_handler(), +/// )?; +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Clone, Default)] +pub struct QueryOptions { + service_handler: Option>>, + custom_functions: CustomFunctionRegistry, + http_timeout: Option, + http_redirection_limit: usize, + without_optimizations: bool, + default_graph: Option, +} + +pub(crate) type CustomFunctionRegistry = + HashMap Option) + Send + Sync>>; + +impl QueryOptions { + /// Use a given [`ServiceHandler`] to execute [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/) SERVICE calls. + #[inline] + #[must_use] + pub fn with_service_handler(mut self, service_handler: impl ServiceHandler + 'static) -> Self { + self.service_handler = Some(Arc::new(ErrorConversionServiceHandler::wrap( + service_handler, + ))); + self + } + + pub fn set_default_graph(&mut self, dg: Option) { + self.default_graph = dg; + } + + pub fn get_default_graph(&self) -> &Option { + &self.default_graph + } + + /// Disables the `SERVICE` calls + #[inline] + #[must_use] + pub fn without_service_handler(mut self) -> Self { + self.service_handler = Some(Arc::new(EmptyServiceHandler)); + self + } + + /// Sets a timeout for HTTP requests done during SPARQL evaluation. + #[cfg(feature = "http-client")] + #[inline] + #[must_use] + pub fn with_http_timeout(mut self, timeout: Duration) -> Self { + self.http_timeout = Some(timeout); + self + } + + /// Sets an upper bound of the number of HTTP redirection followed per HTTP request done during SPARQL evaluation. + /// + /// By default this value is `0`. + #[cfg(feature = "http-client")] + #[inline] + #[must_use] + pub fn with_http_redirection_limit(mut self, redirection_limit: usize) -> Self { + self.http_redirection_limit = redirection_limit; + self + } + + /// Adds a custom SPARQL evaluation function. + /// + /// Example with a function serializing terms to N-Triples: + /// ``` + /// use oxigraph::model::*; + /// use oxigraph::sparql::{QueryOptions, QueryResults}; + /// use oxigraph::store::Store; + /// + /// let store = Store::new()?; + /// + /// if let QueryResults::Solutions(mut solutions) = store.query_opt( + /// "SELECT ((1) AS ?nt) WHERE {}", + /// QueryOptions::default().with_custom_function( + /// NamedNode::new("http://www.w3.org/ns/formats/N-Triples")?, + /// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()), + /// ), + /// )? { + /// assert_eq!( + /// solutions.next().unwrap()?.get("nt"), + /// Some(&Literal::from("\"1\"^^").into()) + /// ); + /// } + /// # Result::<_,Box>::Ok(()) + /// ``` + #[inline] + #[must_use] + pub fn with_custom_function( + mut self, + name: NamedNode, + evaluator: impl Fn(&[Term]) -> Option + Send + Sync + 'static, + ) -> Self { + self.custom_functions.insert(name, Arc::new(evaluator)); + self + } + + fn service_handler(&self) -> Arc> { + self.service_handler.clone().unwrap_or_else(|| { + if cfg!(feature = "http-client") { + Arc::new(service::SimpleServiceHandler::new( + self.http_timeout, + self.http_redirection_limit, + )) + } else { + Arc::new(EmptyServiceHandler) + } + }) + } + + #[doc(hidden)] + #[inline] + #[must_use] + pub fn without_optimizations(mut self) -> Self { + self.without_optimizations = true; + self + } +} + +/// Options for SPARQL update evaluation. +#[derive(Clone, Default)] +pub struct UpdateOptions { + query_options: QueryOptions, +} + +impl From for UpdateOptions { + #[inline] + fn from(query_options: QueryOptions) -> Self { + Self { query_options } + } +} + +impl UpdateOptions { + pub fn set_default_graph(&mut self, dg: Option) { + self.query_options.set_default_graph(dg); + } +} + +/// The explanation of a query. +#[derive(Clone)] +pub struct QueryExplanation { + inner: Rc, + with_stats: bool, + parsing_duration: Option, + planning_duration: Option, +} + +impl QueryExplanation { + /// Writes the explanation as JSON. + pub fn write_in_json(&self, write: impl io::Write) -> io::Result<()> { + let mut writer = ToWriteJsonWriter::new(write); + writer.write_event(JsonEvent::StartObject)?; + if let Some(parsing_duration) = self.parsing_duration { + writer.write_event(JsonEvent::ObjectKey("parsing duration in seconds".into()))?; + writer.write_event(JsonEvent::Number( + parsing_duration.as_seconds().to_string().into(), + ))?; + } + if let Some(planning_duration) = self.planning_duration { + writer.write_event(JsonEvent::ObjectKey("planning duration in seconds".into()))?; + writer.write_event(JsonEvent::Number( + planning_duration.as_seconds().to_string().into(), + ))?; + } + writer.write_event(JsonEvent::ObjectKey("plan".into()))?; + self.inner.json_node(&mut writer, self.with_stats)?; + writer.write_event(JsonEvent::EndObject) + } +} + +impl fmt::Debug for QueryExplanation { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut obj = f.debug_struct("QueryExplanation"); + if let Some(parsing_duration) = self.parsing_duration { + obj.field( + "parsing duration in seconds", + &f32::from(Float::from(parsing_duration.as_seconds())), + ); + } + if let Some(planning_duration) = self.planning_duration { + obj.field( + "planning duration in seconds", + &f32::from(Float::from(planning_duration.as_seconds())), + ); + } + obj.field("tree", &self.inner); + obj.finish_non_exhaustive() + } +} diff --git a/ng-oxigraph/src/oxigraph/sparql/model.rs b/ng-oxigraph/src/oxigraph/sparql/model.rs new file mode 100644 index 0000000..7352cf9 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/model.rs @@ -0,0 +1,372 @@ +use crate::oxigraph::io::{RdfFormat, RdfSerializer}; +use crate::oxigraph::model::*; +use crate::oxigraph::sparql::error::EvaluationError; +use crate::oxigraph::sparql::results::{ + FromReadQueryResultsReader, FromReadSolutionsReader, QueryResultsFormat, + QueryResultsParseError, QueryResultsParser, QueryResultsSerializer, +}; +pub use crate::sparesults::QuerySolution; +use std::io::{Read, Write}; +use std::sync::Arc; + +/// Results of a [SPARQL query](https://www.w3.org/TR/sparql11-query/). +pub enum QueryResults { + /// Results of a [SELECT](https://www.w3.org/TR/sparql11-query/#select) query. + Solutions(QuerySolutionIter), + /// Result of a [ASK](https://www.w3.org/TR/sparql11-query/#ask) query. + Boolean(bool), + /// Results of a [CONSTRUCT](https://www.w3.org/TR/sparql11-query/#construct) or [DESCRIBE](https://www.w3.org/TR/sparql11-query/#describe) query. + Graph(QueryTripleIter), +} + +impl QueryResults { + /// Reads a SPARQL query results serialization. + pub fn read( + read: impl Read + 'static, + format: QueryResultsFormat, + ) -> Result { + Ok(QueryResultsParser::from_format(format) + .parse_read(read)? + .into()) + } + + /// Writes the query results (solutions or boolean). + /// + /// This method fails if it is called on the `Graph` results. + /// + /// ``` + /// use oxigraph::store::Store; + /// use oxigraph::model::*; + /// use oxigraph::sparql::results::QueryResultsFormat; + /// + /// let store = Store::new()?; + /// let ex = NamedNodeRef::new("http://example.com")?; + /// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?; + /// + /// let results = store.query("SELECT ?s WHERE { ?s ?p ?o }")?; + /// assert_eq!( + /// results.write(Vec::new(), QueryResultsFormat::Json)?, + /// r#"{"head":{"vars":["s"]},"results":{"bindings":[{"s":{"type":"uri","value":"http://example.com"}}]}}"#.as_bytes() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn write( + self, + write: W, + format: QueryResultsFormat, + ) -> Result { + let serializer = QueryResultsSerializer::from_format(format); + match self { + Self::Boolean(value) => serializer.serialize_boolean_to_write(write, value), + Self::Solutions(solutions) => { + let mut writer = serializer + .serialize_solutions_to_write(write, solutions.variables().to_vec()) + .map_err(EvaluationError::ResultsSerialization)?; + for solution in solutions { + writer + .write(&solution?) + .map_err(EvaluationError::ResultsSerialization)?; + } + writer.finish() + } + Self::Graph(triples) => { + let s = VariableRef::new_unchecked("subject"); + let p = VariableRef::new_unchecked("predicate"); + let o = VariableRef::new_unchecked("object"); + let mut writer = serializer + .serialize_solutions_to_write( + write, + vec![s.into_owned(), p.into_owned(), o.into_owned()], + ) + .map_err(EvaluationError::ResultsSerialization)?; + for triple in triples { + let triple = triple?; + writer + .write([ + (s, &triple.subject.into()), + (p, &triple.predicate.into()), + (o, &triple.object), + ]) + .map_err(EvaluationError::ResultsSerialization)?; + } + writer.finish() + } + } + .map_err(EvaluationError::ResultsSerialization) + } + + /// Writes the graph query results. + /// + /// This method fails if it is called on the `Solution` or `Boolean` results. + /// + /// ``` + /// use oxigraph::io::RdfFormat; + /// use oxigraph::model::*; + /// use oxigraph::store::Store; + /// + /// let graph = " .\n"; + /// + /// let store = Store::new()?; + /// store.load_graph( + /// graph.as_bytes(), + /// RdfFormat::NTriples, + /// GraphName::DefaultGraph, + /// None, + /// )?; + /// + /// let results = store.query("CONSTRUCT WHERE { ?s ?p ?o }")?; + /// assert_eq!( + /// results.write_graph(Vec::new(), RdfFormat::NTriples)?, + /// graph.as_bytes() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn write_graph( + self, + write: W, + format: impl Into, + ) -> Result { + if let Self::Graph(triples) = self { + let mut writer = RdfSerializer::from_format(format.into()).serialize_to_write(write); + for triple in triples { + writer + .write_triple(&triple?) + .map_err(EvaluationError::ResultsSerialization)?; + } + writer + .finish() + .map_err(EvaluationError::ResultsSerialization) + } else { + Err(EvaluationError::NotAGraph) + } + } +} + +impl From for QueryResults { + #[inline] + fn from(value: QuerySolutionIter) -> Self { + Self::Solutions(value) + } +} + +impl From> for QueryResults { + fn from(reader: FromReadQueryResultsReader) -> Self { + match reader { + FromReadQueryResultsReader::Solutions(s) => Self::Solutions(s.into()), + FromReadQueryResultsReader::Boolean(v) => Self::Boolean(v), + } + } +} + +/// An iterator over [`QuerySolution`]s. +/// +/// ``` +/// use oxigraph::sparql::QueryResults; +/// use oxigraph::store::Store; +/// +/// let store = Store::new()?; +/// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? { +/// for solution in solutions { +/// println!("{:?}", solution?.get("s")); +/// } +/// } +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct QuerySolutionIter { + variables: Arc<[Variable]>, + iter: Box>>, +} + +impl QuerySolutionIter { + /// Construct a new iterator of solution from an ordered list of solution variables and an iterator of solution tuples + /// (each tuple using the same ordering as the variable list such that tuple element 0 is the value for the variable 0...) + pub fn new( + variables: Arc<[Variable]>, + iter: impl Iterator>, EvaluationError>> + 'static, + ) -> Self { + Self { + variables: Arc::clone(&variables), + iter: Box::new( + iter.map(move |t| t.map(|values| (Arc::clone(&variables), values).into())), + ), + } + } + + /// The variables used in the solutions. + /// + /// ``` + /// use oxigraph::sparql::{QueryResults, Variable}; + /// use oxigraph::store::Store; + /// + /// let store = Store::new()?; + /// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s ?o WHERE { ?s ?p ?o }")? { + /// assert_eq!( + /// solutions.variables(), + /// &[Variable::new("s")?, Variable::new("o")?] + /// ); + /// } + /// # Result::<_,Box>::Ok(()) + /// ``` + #[inline] + pub fn variables(&self) -> &[Variable] { + &self.variables + } +} + +impl From> for QuerySolutionIter { + fn from(reader: FromReadSolutionsReader) -> Self { + Self { + variables: reader.variables().into(), + iter: Box::new(reader.map(|t| t.map_err(EvaluationError::from))), + } + } +} + +impl Iterator for QuerySolutionIter { + type Item = Result; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next() + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +/// An iterator over the triples that compose a graph solution. +/// +/// ``` +/// use oxigraph::sparql::QueryResults; +/// use oxigraph::store::Store; +/// +/// let store = Store::new()?; +/// if let QueryResults::Graph(triples) = store.query("CONSTRUCT WHERE { ?s ?p ?o }")? { +/// for triple in triples { +/// println!("{}", triple?); +/// } +/// } +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct QueryTripleIter { + pub(crate) iter: Box>>, +} + +impl Iterator for QueryTripleIter { + type Item = Result; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next() + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } + + #[inline] + fn fold(self, init: Acc, g: G) -> Acc + where + G: FnMut(Acc, Self::Item) -> Acc, + { + self.iter.fold(init, g) + } +} + +#[cfg(feature = "rdf-star")] +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + use std::io::Cursor; + + #[test] + fn test_serialization_roundtrip() -> Result<(), EvaluationError> { + use std::str; + + for format in [ + QueryResultsFormat::Json, + QueryResultsFormat::Xml, + QueryResultsFormat::Tsv, + ] { + let results = vec![ + QueryResults::Boolean(true), + QueryResults::Boolean(false), + QueryResults::Solutions(QuerySolutionIter::new( + [ + Variable::new_unchecked("foo"), + Variable::new_unchecked("bar"), + ] + .as_ref() + .into(), + Box::new( + vec![ + Ok(vec![None, None]), + Ok(vec![ + Some(NamedNode::new_unchecked("http://example.com").into()), + None, + ]), + Ok(vec![ + None, + Some(NamedNode::new_unchecked("http://example.com").into()), + ]), + Ok(vec![ + Some(BlankNode::new_unchecked("foo").into()), + Some(BlankNode::new_unchecked("bar").into()), + ]), + Ok(vec![Some(Literal::new_simple_literal("foo").into()), None]), + Ok(vec![ + Some( + Literal::new_language_tagged_literal_unchecked("foo", "fr") + .into(), + ), + None, + ]), + Ok(vec![ + Some(Literal::from(1).into()), + Some(Literal::from(true).into()), + ]), + Ok(vec![ + Some(Literal::from(1.33).into()), + Some(Literal::from(false).into()), + ]), + Ok(vec![ + Some( + Triple::new( + NamedNode::new_unchecked("http://example.com/s"), + NamedNode::new_unchecked("http://example.com/p"), + Triple::new( + NamedNode::new_unchecked("http://example.com/os"), + NamedNode::new_unchecked("http://example.com/op"), + NamedNode::new_unchecked("http://example.com/oo"), + ), + ) + .into(), + ), + None, + ]), + ] + .into_iter(), + ), + )), + ]; + + for ex in results { + let mut buffer = Vec::new(); + ex.write(&mut buffer, format)?; + let ex2 = QueryResults::read(Cursor::new(buffer.clone()), format)?; + let mut buffer2 = Vec::new(); + ex2.write(&mut buffer2, format)?; + assert_eq!( + str::from_utf8(&buffer).unwrap(), + str::from_utf8(&buffer2).unwrap() + ); + } + } + + Ok(()) + } +} diff --git a/ng-oxigraph/src/oxigraph/sparql/results.rs b/ng-oxigraph/src/oxigraph/sparql/results.rs new file mode 100644 index 0000000..6dea288 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/results.rs @@ -0,0 +1,44 @@ +//! Utilities to read and write RDF results formats using [sparesults](https://crates.io/crates/sparesults). +//! +//! It supports [SPARQL Query Results XML Format (Second Edition)](https://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/). +//! +//! Usage example converting a JSON result file into a TSV result file: +//! +//! ``` +//! use oxigraph::sparql::results::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader, QueryResultsSerializer}; +//! use std::io::Result; +//! +//! fn convert_json_to_tsv(json_file: &[u8]) -> Result> { +//! let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json); +//! let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv); +//! // We start to read the JSON file and see which kind of results it is +//! match json_parser.parse_read(json_file)? { +//! FromReadQueryResultsReader::Boolean(value) => { +//! // it's a boolean result, we copy it in TSV to the output buffer +//! tsv_serializer.serialize_boolean_to_write(Vec::new(), value) +//! } +//! FromReadQueryResultsReader::Solutions(solutions_reader) => { +//! // it's a set of solutions, we create a writer and we write to it while reading in streaming from the JSON file +//! let mut serialize_solutions_to_write = tsv_serializer.serialize_solutions_to_write(Vec::new(), solutions_reader.variables().to_vec())?; +//! for solution in solutions_reader { +//! serialize_solutions_to_write.write(&solution?)?; +//! } +//! serialize_solutions_to_write.finish() +//! } +//! } +//! } +//! +//! // Let's test with a boolean +//! assert_eq!( +//! convert_json_to_tsv(br#"{"boolean":true}"#.as_slice()).unwrap(), +//! b"true" +//! ); +//! +//! // And with a set of solutions +//! assert_eq!( +//! convert_json_to_tsv(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice()).unwrap(), +//! b"?foo\t?bar\n\"test\"\t\n" +//! ); +//! ``` + +pub use crate::sparesults::*; diff --git a/ng-oxigraph/src/oxigraph/sparql/service.rs b/ng-oxigraph/src/oxigraph/sparql/service.rs new file mode 100644 index 0000000..40e9aad --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/service.rs @@ -0,0 +1,124 @@ +use crate::oxigraph::model::NamedNode; +use crate::oxigraph::sparql::algebra::Query; +use crate::oxigraph::sparql::error::EvaluationError; +use crate::oxigraph::sparql::http::Client; +use crate::oxigraph::sparql::model::QueryResults; +use crate::oxigraph::sparql::results::QueryResultsFormat; +use std::error::Error; +use std::time::Duration; + +/// Handler for [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/) SERVICE. +/// +/// Should be given to [`QueryOptions`](super::QueryOptions::with_service_handler()) +/// before evaluating a SPARQL query that uses SERVICE calls. +/// +/// ``` +/// use oxigraph::model::*; +/// use oxigraph::sparql::{EvaluationError, Query, QueryOptions, QueryResults, ServiceHandler}; +/// use oxigraph::store::Store; +/// +/// struct TestServiceHandler { +/// store: Store, +/// } +/// +/// impl ServiceHandler for TestServiceHandler { +/// type Error = EvaluationError; +/// +/// fn handle( +/// &self, +/// service_name: NamedNode, +/// query: Query, +/// ) -> Result { +/// if service_name == "http://example.com/service" { +/// self.store.query(query) +/// } else { +/// panic!() +/// } +/// } +/// } +/// +/// let store = Store::new()?; +/// let service = TestServiceHandler { +/// store: Store::new()?, +/// }; +/// let ex = NamedNodeRef::new("http://example.com")?; +/// service +/// .store +/// .insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?; +/// +/// if let QueryResults::Solutions(mut solutions) = store.query_opt( +/// "SELECT ?s WHERE { SERVICE { ?s ?p ?o } }", +/// QueryOptions::default().with_service_handler(service), +/// )? { +/// assert_eq!(solutions.next().unwrap()?.get("s"), Some(&ex.into())); +/// } +/// # Result::<_,Box>::Ok(()) +/// ``` +pub trait ServiceHandler: Send + Sync { + /// The service evaluation error. + type Error: Error + Send + Sync + 'static; + + /// Evaluates a [`Query`] against a given service identified by a [`NamedNode`]. + fn handle(&self, service_name: NamedNode, query: Query) -> Result; +} + +pub struct EmptyServiceHandler; + +impl ServiceHandler for EmptyServiceHandler { + type Error = EvaluationError; + + fn handle(&self, name: NamedNode, _: Query) -> Result { + Err(EvaluationError::UnsupportedService(name)) + } +} + +pub struct ErrorConversionServiceHandler { + handler: S, +} + +impl ErrorConversionServiceHandler { + pub fn wrap(handler: S) -> Self { + Self { handler } + } +} + +impl ServiceHandler for ErrorConversionServiceHandler { + type Error = EvaluationError; + + fn handle(&self, service_name: NamedNode, query: Query) -> Result { + self.handler + .handle(service_name, query) + .map_err(|e| EvaluationError::Service(Box::new(e))) + } +} + +pub struct SimpleServiceHandler { + client: Client, +} + +impl SimpleServiceHandler { + pub fn new(http_timeout: Option, http_redirection_limit: usize) -> Self { + Self { + client: Client::new(http_timeout, http_redirection_limit), + } + } +} + +impl ServiceHandler for SimpleServiceHandler { + type Error = EvaluationError; + + fn handle(&self, service_name: NamedNode, query: Query) -> Result { + let (content_type, body) = self + .client + .post( + service_name.as_str(), + query.to_string().into_bytes(), + "application/sparql-query", + "application/sparql-results+json, application/sparql-results+xml", + ) + .map_err(|e| EvaluationError::Service(Box::new(e)))?; + let format = QueryResultsFormat::from_media_type(&content_type) + .ok_or_else(|| EvaluationError::UnsupportedContentType(content_type))?; + Ok(QueryResults::read(body, format)?) + } +} diff --git a/ng-oxigraph/src/oxigraph/sparql/update.rs b/ng-oxigraph/src/oxigraph/sparql/update.rs new file mode 100644 index 0000000..08a6414 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/sparql/update.rs @@ -0,0 +1,612 @@ +// partial Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// partial Copyright (c) 2018 Oxigraph developers +// All work licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice or not, may not be copied, modified, or distributed except +// according to those terms. + +use crate::oxigraph::io::{RdfFormat, RdfParser}; +use crate::oxigraph::model::{GraphName as OxGraphName, GraphNameRef, Quad as OxQuad}; +use crate::oxigraph::sparql::algebra::QueryDataset; +use crate::oxigraph::sparql::dataset::DatasetView; +use crate::oxigraph::sparql::eval::{EncodedTuple, SimpleEvaluator}; +use crate::oxigraph::sparql::http::Client; +use crate::oxigraph::sparql::{EvaluationError, Update, UpdateOptions}; +use crate::oxigraph::storage::numeric_encoder::{Decoder, EncodedTerm}; +use crate::oxigraph::storage::CommitWriter; +use crate::oxrdf::NamedNodeRef; +use crate::spargebra::algebra::{GraphPattern, GraphTarget}; +use crate::spargebra::term::{ + BlankNode, GraphName, GraphNamePattern, GroundQuad, GroundQuadPattern, GroundSubject, + GroundTerm, GroundTermPattern, GroundTriple, GroundTriplePattern, NamedNode, NamedNodePattern, + Quad, QuadPattern, Subject, Term, TermPattern, Triple, TriplePattern, Variable, +}; +use crate::spargebra::GraphUpdateOperation; +use crate::sparopt; +use crate::sparopt::Optimizer; +use oxiri::Iri; +use std::collections::HashMap; +use std::io; +use std::rc::Rc; +use std::sync::Arc; + +pub fn evaluate_update<'a, 'b: 'a>( + transaction: &'a mut CommitWriter<'b>, + update: &Update, + options: &UpdateOptions, +) -> Result<(), EvaluationError> { + SimpleUpdateEvaluator { + transaction, + base_iri: update.inner.base_iri.clone().map(Rc::new), + options: options.clone(), + client: Client::new( + options.query_options.http_timeout, + options.query_options.http_redirection_limit, + ), + } + .eval_all(&update.inner.operations, &update.using_datasets) +} + +struct SimpleUpdateEvaluator<'a, 'b> { + transaction: &'a mut CommitWriter<'b>, + base_iri: Option>>, + options: UpdateOptions, + client: Client, +} + +impl<'a, 'b: 'a> SimpleUpdateEvaluator<'a, 'b> { + fn eval_all( + &mut self, + updates: &[GraphUpdateOperation], + using_datasets: &[Option], + ) -> Result<(), EvaluationError> { + for (update, using_dataset) in updates.iter().zip(using_datasets) { + self.eval(update, using_dataset)?; + } + Ok(()) + } + + fn eval( + &mut self, + update: &GraphUpdateOperation, + using_dataset: &Option, + ) -> Result<(), EvaluationError> { + match update { + GraphUpdateOperation::InsertData { data } => self.eval_insert_data(data), + GraphUpdateOperation::DeleteData { data } => self.eval_delete_data(data), + GraphUpdateOperation::DeleteInsert { + delete, + insert, + pattern, + .. + } => self.eval_delete_insert( + delete, + insert, + using_dataset.as_ref().unwrap_or(&QueryDataset::new()), + pattern, + ), + GraphUpdateOperation::Load { + silent, + source, + destination, + } => { + if let Err(error) = self.eval_load(source, destination) { + if *silent { + Ok(()) + } else { + Err(error) + } + } else { + Ok(()) + } + } + GraphUpdateOperation::Clear { graph, silent } => self.eval_clear(graph, *silent), + GraphUpdateOperation::Create { graph, silent } => self.eval_create(graph, *silent), + GraphUpdateOperation::Drop { graph, silent } => self.eval_drop(graph, *silent), + } + } + + fn eval_insert_data(&mut self, data: &[Quad]) -> Result<(), EvaluationError> { + let mut bnodes = HashMap::new(); + for quad in data { + let mut quad = Self::convert_quad(quad, &mut bnodes); + self.set_default_graph_if_needed(&mut quad); + self.transaction.insert(quad.as_ref())?; + } + Ok(()) + } + + fn eval_delete_data(&mut self, data: &[GroundQuad]) -> Result<(), EvaluationError> { + for quad in data { + let mut quad = Self::convert_ground_quad(quad); + self.set_default_graph_if_needed(&mut quad); + self.transaction.remove(quad.as_ref())?; + } + Ok(()) + } + + fn set_default_graph_if_needed(&self, quad: &mut crate::oxrdf::Quad) { + if quad.graph_name.is_default_graph() { + if let Some(default_graph) = &self.options.query_options.default_graph { + quad.graph_name = crate::oxrdf::GraphName::NamedNode(NamedNode::new_unchecked( + default_graph.clone(), + )); + } + } + } + + fn eval_delete_insert( + &mut self, + delete: &[GroundQuadPattern], + insert: &[QuadPattern], + using: &QueryDataset, + algebra: &GraphPattern, + ) -> Result<(), EvaluationError> { + let dataset = Rc::new(DatasetView::new( + self.transaction.reader(), + using, + self.options.query_options.get_default_graph(), + )); + let mut pattern = sparopt::algebra::GraphPattern::from(algebra); + if !self.options.query_options.without_optimizations { + pattern = Optimizer::optimize_graph_pattern(sparopt::algebra::GraphPattern::Reduced { + inner: Box::new(pattern), + }); + } + let evaluator = SimpleEvaluator::new( + Rc::clone(&dataset), + self.base_iri.clone(), + self.options.query_options.service_handler(), + Arc::new(self.options.query_options.custom_functions.clone()), + false, + ); + let mut variables = Vec::new(); + let mut bnodes = HashMap::new(); + let (eval, _) = evaluator.graph_pattern_evaluator(&pattern, &mut variables); + let tuples = + eval(EncodedTuple::with_capacity(variables.len())).collect::, _>>()?; // TODO: would be much better to stream + for tuple in tuples { + for quad in delete { + if let Some(mut quad) = + Self::convert_ground_quad_pattern(quad, &variables, &tuple, &dataset)? + { + self.set_default_graph_if_needed(&mut quad); + self.transaction.remove(quad.as_ref())?; + } + } + for quad in insert { + if let Some(mut quad) = + Self::convert_quad_pattern(quad, &variables, &tuple, &dataset, &mut bnodes)? + { + self.set_default_graph_if_needed(&mut quad); + self.transaction.insert(quad.as_ref())?; + } + } + bnodes.clear(); + } + Ok(()) + } + + /*if quad.graph_name.is_default_graph() { + if let Some(default_graph) = &self.options.query_options.default_graph { + crate::oxrdf::GraphName::NamedNode(NamedNode::new_unchecked( + default_graph.clone(), + )).into() + } else { + return Err(EvaluationError); + } + } */ + + fn eval_load(&mut self, from: &NamedNode, to: &GraphName) -> Result<(), EvaluationError> { + let (content_type, body) = self + .client + .get( + from.as_str(), + "application/n-triples, text/turtle, application/rdf+xml", + ) + .map_err(|e| EvaluationError::Service(Box::new(e)))?; + let format = RdfFormat::from_media_type(&content_type) + .ok_or_else(|| EvaluationError::UnsupportedContentType(content_type))?; + let to_graph_name = match to { + GraphName::NamedNode(graph_name) => graph_name.into(), + GraphName::DefaultGraph => { + if let Some(default_graph) = &self.options.query_options.default_graph { + GraphNameRef::NamedNode(NamedNodeRef::new_unchecked(&default_graph)) + } else { + return Err(EvaluationError::NoDefaultGraph); + } + } + }; + let mut parser = RdfParser::from_format(format) + .rename_blank_nodes() + .without_named_graphs() + .with_default_graph(to_graph_name); + parser = parser.with_base_iri(from.as_str()).map_err(|e| { + EvaluationError::Service(Box::new(io::Error::new( + io::ErrorKind::InvalidInput, + format!("Invalid URL: {from}: {e}"), + ))) + })?; + for q in parser.parse_read(body) { + self.transaction.insert(q?.as_ref())?; + } + Ok(()) + } + + fn eval_create(&mut self, graph_name: &NamedNode, silent: bool) -> Result<(), EvaluationError> { + if self.transaction.insert_named_graph(graph_name.into())? || silent { + Ok(()) + } else { + Err(EvaluationError::GraphAlreadyExists(graph_name.clone())) + } + } + + fn eval_clear(&mut self, graph: &GraphTarget, silent: bool) -> Result<(), EvaluationError> { + unimplemented!(); + // match graph { + // GraphTarget::NamedNode(graph_name) => { + // if self + // .transaction + // .reader() + // .contains_named_graph(&graph_name.as_ref().into())? + // { + // Ok(self.transaction.clear_graph(graph_name.into())?) + // } else if silent { + // Ok(()) + // } else { + // Err(EvaluationError::GraphDoesNotExist(graph_name.clone())) + // } + // } + // GraphTarget::DefaultGraph => { + // self.transaction.clear_graph(GraphNameRef::DefaultGraph)?; + // Ok(()) + // } + // GraphTarget::NamedGraphs => Ok(self.transaction.clear_all_named_graphs()?), + // GraphTarget::AllGraphs => Ok(self.transaction.clear_all_graphs()?), + // } + } + + fn eval_drop(&mut self, graph: &GraphTarget, silent: bool) -> Result<(), EvaluationError> { + match graph { + GraphTarget::NamedNode(graph_name) => { + if self.transaction.remove_named_graph(graph_name.into())? || silent { + Ok(()) + } else { + Err(EvaluationError::GraphDoesNotExist(graph_name.clone())) + } + } + GraphTarget::DefaultGraph => { + Ok(self.transaction.clear_graph(GraphNameRef::DefaultGraph)?) + } + GraphTarget::NamedGraphs => Ok(self.transaction.remove_all_named_graphs()?), + GraphTarget::AllGraphs => Ok(self.transaction.clear()?), + } + } + + fn convert_quad(quad: &Quad, bnodes: &mut HashMap) -> OxQuad { + OxQuad { + subject: match &quad.subject { + Subject::NamedNode(subject) => subject.clone().into(), + Subject::BlankNode(subject) => Self::convert_blank_node(subject, bnodes).into(), + Subject::Triple(subject) => Self::convert_triple(subject, bnodes).into(), + }, + predicate: quad.predicate.clone(), + object: match &quad.object { + Term::NamedNode(object) => object.clone().into(), + Term::BlankNode(object) => Self::convert_blank_node(object, bnodes).into(), + Term::Literal(object) => object.clone().into(), + Term::Triple(subject) => Self::convert_triple(subject, bnodes).into(), + }, + graph_name: match &quad.graph_name { + GraphName::NamedNode(graph_name) => graph_name.clone().into(), + GraphName::DefaultGraph => OxGraphName::DefaultGraph, + }, + } + } + + fn convert_triple(triple: &Triple, bnodes: &mut HashMap) -> Triple { + Triple { + subject: match &triple.subject { + Subject::NamedNode(subject) => subject.clone().into(), + Subject::BlankNode(subject) => Self::convert_blank_node(subject, bnodes).into(), + Subject::Triple(subject) => Self::convert_triple(subject, bnodes).into(), + }, + predicate: triple.predicate.clone(), + object: match &triple.object { + Term::NamedNode(object) => object.clone().into(), + Term::BlankNode(object) => Self::convert_blank_node(object, bnodes).into(), + Term::Literal(object) => object.clone().into(), + Term::Triple(subject) => Self::convert_triple(subject, bnodes).into(), + }, + } + } + + fn convert_blank_node( + node: &BlankNode, + bnodes: &mut HashMap, + ) -> BlankNode { + bnodes.entry(node.clone()).or_default().clone() + } + + fn convert_ground_quad(quad: &GroundQuad) -> OxQuad { + OxQuad { + subject: match &quad.subject { + GroundSubject::NamedNode(subject) => subject.clone().into(), + GroundSubject::Triple(subject) => Self::convert_ground_triple(subject).into(), + }, + predicate: quad.predicate.clone(), + object: match &quad.object { + GroundTerm::NamedNode(object) => object.clone().into(), + GroundTerm::Literal(object) => object.clone().into(), + GroundTerm::Triple(subject) => Self::convert_ground_triple(subject).into(), + }, + graph_name: match &quad.graph_name { + GraphName::NamedNode(graph_name) => graph_name.clone().into(), + GraphName::DefaultGraph => OxGraphName::DefaultGraph, + }, + } + } + + fn convert_ground_triple(triple: &GroundTriple) -> Triple { + Triple { + subject: match &triple.subject { + GroundSubject::NamedNode(subject) => subject.clone().into(), + GroundSubject::Triple(subject) => Self::convert_ground_triple(subject).into(), + }, + predicate: triple.predicate.clone(), + object: match &triple.object { + GroundTerm::NamedNode(object) => object.clone().into(), + GroundTerm::Literal(object) => object.clone().into(), + GroundTerm::Triple(subject) => Self::convert_ground_triple(subject).into(), + }, + } + } + + fn convert_quad_pattern( + quad: &QuadPattern, + variables: &[Variable], + values: &EncodedTuple, + dataset: &DatasetView, + bnodes: &mut HashMap, + ) -> Result, EvaluationError> { + Ok(Some(OxQuad { + subject: match Self::convert_term_or_var( + &quad.subject, + variables, + values, + dataset, + bnodes, + )? { + Some(Term::NamedNode(node)) => node.into(), + Some(Term::BlankNode(node)) => node.into(), + Some(Term::Triple(triple)) => triple.into(), + Some(Term::Literal(_)) | None => return Ok(None), + }, + predicate: if let Some(predicate) = + Self::convert_named_node_or_var(&quad.predicate, variables, values, dataset)? + { + predicate + } else { + return Ok(None); + }, + object: if let Some(object) = + Self::convert_term_or_var(&quad.object, variables, values, dataset, bnodes)? + { + object + } else { + return Ok(None); + }, + graph_name: if let Some(graph_name) = + Self::convert_graph_name_or_var(&quad.graph_name, variables, values, dataset)? + { + graph_name + } else { + return Ok(None); + }, + })) + } + + fn convert_term_or_var( + term: &TermPattern, + variables: &[Variable], + values: &EncodedTuple, + dataset: &DatasetView, + bnodes: &mut HashMap, + ) -> Result, EvaluationError> { + Ok(match term { + TermPattern::NamedNode(term) => Some(term.clone().into()), + TermPattern::BlankNode(bnode) => Some(Self::convert_blank_node(bnode, bnodes).into()), + TermPattern::Literal(term) => Some(term.clone().into()), + TermPattern::Triple(triple) => { + Self::convert_triple_pattern(triple, variables, values, dataset, bnodes)? + .map(Into::into) + } + TermPattern::Variable(v) => Self::lookup_variable(v, variables, values) + .map(|node| dataset.decode_term(&node)) + .transpose()?, + }) + } + + fn convert_named_node_or_var( + term: &NamedNodePattern, + variables: &[Variable], + values: &EncodedTuple, + dataset: &DatasetView, + ) -> Result, EvaluationError> { + Ok(match term { + NamedNodePattern::NamedNode(term) => Some(term.clone()), + NamedNodePattern::Variable(v) => Self::lookup_variable(v, variables, values) + .map(|node| dataset.decode_named_node(&node)) + .transpose()?, + }) + } + + fn convert_graph_name_or_var( + term: &GraphNamePattern, + variables: &[Variable], + values: &EncodedTuple, + dataset: &DatasetView, + ) -> Result, EvaluationError> { + match term { + GraphNamePattern::NamedNode(term) => Ok(Some(term.clone().into())), + GraphNamePattern::DefaultGraph => Ok(Some(OxGraphName::DefaultGraph)), + GraphNamePattern::Variable(v) => Self::lookup_variable(v, variables, values) + .map(|node| { + Ok(if node == EncodedTerm::DefaultGraph { + OxGraphName::DefaultGraph + } else { + dataset.decode_named_node(&node)?.into() + }) + }) + .transpose(), + } + } + + fn convert_triple_pattern( + triple: &TriplePattern, + variables: &[Variable], + values: &EncodedTuple, + dataset: &DatasetView, + bnodes: &mut HashMap, + ) -> Result, EvaluationError> { + Ok(Some(Triple { + subject: match Self::convert_term_or_var( + &triple.subject, + variables, + values, + dataset, + bnodes, + )? { + Some(Term::NamedNode(node)) => node.into(), + Some(Term::BlankNode(node)) => node.into(), + Some(Term::Triple(triple)) => triple.into(), + Some(Term::Literal(_)) | None => return Ok(None), + }, + predicate: if let Some(predicate) = + Self::convert_named_node_or_var(&triple.predicate, variables, values, dataset)? + { + predicate + } else { + return Ok(None); + }, + object: if let Some(object) = + Self::convert_term_or_var(&triple.object, variables, values, dataset, bnodes)? + { + object + } else { + return Ok(None); + }, + })) + } + + fn convert_ground_quad_pattern( + quad: &GroundQuadPattern, + variables: &[Variable], + values: &EncodedTuple, + dataset: &DatasetView, + ) -> Result, EvaluationError> { + Ok(Some(OxQuad { + subject: match Self::convert_ground_term_or_var( + &quad.subject, + variables, + values, + dataset, + )? { + Some(Term::NamedNode(node)) => node.into(), + Some(Term::BlankNode(node)) => node.into(), + Some(Term::Triple(triple)) => triple.into(), + Some(Term::Literal(_)) | None => return Ok(None), + }, + predicate: if let Some(predicate) = + Self::convert_named_node_or_var(&quad.predicate, variables, values, dataset)? + { + predicate + } else { + return Ok(None); + }, + object: if let Some(object) = + Self::convert_ground_term_or_var(&quad.object, variables, values, dataset)? + { + object + } else { + return Ok(None); + }, + graph_name: if let Some(graph_name) = + Self::convert_graph_name_or_var(&quad.graph_name, variables, values, dataset)? + { + graph_name + } else { + return Ok(None); + }, + })) + } + + fn convert_ground_term_or_var( + term: &GroundTermPattern, + variables: &[Variable], + values: &EncodedTuple, + dataset: &DatasetView, + ) -> Result, EvaluationError> { + Ok(match term { + GroundTermPattern::NamedNode(term) => Some(term.clone().into()), + GroundTermPattern::Literal(term) => Some(term.clone().into()), + GroundTermPattern::Triple(triple) => { + Self::convert_ground_triple_pattern(triple, variables, values, dataset)? + .map(Into::into) + } + GroundTermPattern::Variable(v) => Self::lookup_variable(v, variables, values) + .map(|node| dataset.decode_term(&node)) + .transpose()?, + }) + } + + fn convert_ground_triple_pattern( + triple: &GroundTriplePattern, + variables: &[Variable], + values: &EncodedTuple, + dataset: &DatasetView, + ) -> Result, EvaluationError> { + Ok(Some(Triple { + subject: match Self::convert_ground_term_or_var( + &triple.subject, + variables, + values, + dataset, + )? { + Some(Term::NamedNode(node)) => node.into(), + Some(Term::BlankNode(node)) => node.into(), + Some(Term::Triple(triple)) => triple.into(), + Some(Term::Literal(_)) | None => return Ok(None), + }, + predicate: if let Some(predicate) = + Self::convert_named_node_or_var(&triple.predicate, variables, values, dataset)? + { + predicate + } else { + return Ok(None); + }, + object: if let Some(object) = + Self::convert_ground_term_or_var(&triple.object, variables, values, dataset)? + { + object + } else { + return Ok(None); + }, + })) + } + + fn lookup_variable( + v: &Variable, + variables: &[Variable], + values: &EncodedTuple, + ) -> Option { + variables + .iter() + .position(|v2| v == v2) + .and_then(|i| values.get(i)) + .cloned() + } +} diff --git a/ng-oxigraph/src/oxigraph/storage/backend/fallback.rs b/ng-oxigraph/src/oxigraph/storage/backend/fallback.rs new file mode 100644 index 0000000..519497c --- /dev/null +++ b/ng-oxigraph/src/oxigraph/storage/backend/fallback.rs @@ -0,0 +1,420 @@ +// partial Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// partial Copyright (c) 2018 Oxigraph developers +// All work licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice or not, may not be copied, modified, or distributed except +// according to those terms. + +//! TODO: This storage is dramatically naive. + +use super::super::numeric_encoder::StrHash; +use crate::oxigraph::storage::StorageError; +use crate::oxigraph::store::CorruptionError; +use std::cell::RefCell; +use std::collections::{BTreeMap, HashMap, HashSet}; +use std::error::Error; +use std::mem::transmute; +use std::rc::{Rc, Weak}; +use std::sync::{Arc, RwLock, RwLockWriteGuard}; + +pub struct ColumnFamilyDefinition { + pub name: &'static str, + pub use_iter: bool, + pub min_prefix_size: usize, + pub unordered_writes: bool, +} + +#[derive(Clone)] +pub struct Db { + db: Arc, Vec>>>>, + pub past_commits_cache: Arc>>>>, +} + +impl Db { + pub(crate) fn past_commits_cache( + &self, + ) -> Arc>>>> { + Arc::clone(&self.past_commits_cache) + } + + #[allow(clippy::unnecessary_wraps)] + pub fn new(column_families: Vec) -> Result { + let mut trees = HashMap::new(); + for cf in column_families { + trees.insert(ColumnFamily(cf.name), BTreeMap::default()); + } + trees.entry(ColumnFamily("default")).or_default(); // We make sure that "default" key exists. + Ok(Self { + db: Arc::new(RwLock::new(trees)), + past_commits_cache: Arc::new(RwLock::new(HashMap::new())), + }) + } + + #[allow(clippy::unwrap_in_result)] + pub fn column_family(&self, name: &'static str) -> Result { + let column_family = ColumnFamily(name); + if self.db.read().unwrap().contains_key(&column_family) { + Ok(column_family) + } else { + Err(CorruptionError::from_missing_column_family_name(name).into()) + } + } + + #[must_use] + pub fn snapshot(&self) -> Reader { + Reader(InnerReader::Simple(Arc::clone(&self.db))) + } + + #[allow(clippy::unwrap_in_result)] + pub fn transaction<'a, 'b: 'a, T, E: Error + 'static + From>( + &'b self, + f: impl Fn(Transaction<'a>) -> Result, + ) -> Result { + let mut t = Transaction::new(Rc::new(RefCell::new(self.db.write().unwrap()))); + let res = f(t.clone()); + t.rollback(); + res + } + + pub fn ng_transaction<'a, 'b: 'a, T, E: Error + 'static + From>( + &'b self, + mut f: impl FnMut(Transaction<'a>) -> Result, + ) -> Result { + let mut t = Transaction::new(Rc::new(RefCell::new(self.db.write().unwrap()))); + let res = f(t.clone()); + if res.is_err() { + t.rollback(); + } + res + } +} + +#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct ColumnFamily(&'static str); + +#[derive(Clone)] +pub struct Reader(InnerReader); + +#[derive(Clone)] +enum InnerReader { + Simple(Arc, Vec>>>>), + Transaction( + Weak, Vec>>>>>, + ), +} + +impl Reader { + #[allow(clippy::unwrap_in_result)] + pub fn get( + &self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result>, StorageError> { + match &self.0 { + InnerReader::Simple(reader) => Ok(reader + .read() + .unwrap() + .get(column_family) + .and_then(|cf| cf.get(key).cloned())), + InnerReader::Transaction(reader) => { + if let Some(reader) = reader.upgrade() { + Ok((*reader) + .borrow() + .get(column_family) + .and_then(|cf| cf.get(key).cloned())) + } else { + Err(StorageError::Other( + "The transaction is already ended".into(), + )) + } + } + } + } + + #[allow(clippy::unwrap_in_result)] + pub fn contains_key( + &self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result { + match &self.0 { + InnerReader::Simple(reader) => Ok(reader + .read() + .unwrap() + .get(column_family) + .map_or(false, |cf| cf.contains_key(key))), + InnerReader::Transaction(reader) => { + if let Some(reader) = reader.upgrade() { + Ok((*reader) + .borrow() + .get(column_family) + .map_or(false, |cf| cf.contains_key(key))) + } else { + Err(StorageError::Other( + "The transaction is already ended".into(), + )) + } + } + } + } + + #[allow(clippy::iter_not_returning_iterator)] + pub fn iter(&self, column_family: &ColumnFamily) -> Result { + self.scan_prefix(column_family, &[]) + } + + #[allow(clippy::unwrap_in_result)] + pub fn scan_prefix( + &self, + column_family: &ColumnFamily, + prefix: &[u8], + ) -> Result { + let data: Vec<_> = match &self.0 { + InnerReader::Simple(reader) => { + let trees = reader.read().unwrap(); + let Some(tree) = trees.get(column_family) else { + return Ok(Iter { + iter: Vec::new().into_iter(), + current: None, + }); + }; + if prefix.is_empty() { + tree.iter().map(|(k, v)| (k.clone(), v.clone())).collect() + } else { + tree.range(prefix.to_vec()..) + .take_while(|(k, _)| k.starts_with(prefix)) + .map(|(k, v)| (k.clone(), v.clone())) + .collect() + } + } + InnerReader::Transaction(reader) => { + let Some(reader) = reader.upgrade() else { + return Err(StorageError::Other( + "The transaction is already ended".into(), + )); + }; + let trees = (*reader).borrow(); + let Some(tree) = trees.get(column_family) else { + return Ok(Iter { + iter: Vec::new().into_iter(), + current: None, + }); + }; + if prefix.is_empty() { + tree.iter().map(|(k, v)| (k.clone(), v.clone())).collect() + } else { + tree.range(prefix.to_vec()..) + .take_while(|(k, _)| k.starts_with(prefix)) + .map(|(k, v)| (k.clone(), v.clone())) + .collect() + } + } + }; + let mut iter = data.into_iter(); + let current = iter.next(); + Ok(Iter { iter, current }) + } + + #[allow(clippy::unwrap_in_result)] + pub fn len(&self, column_family: &ColumnFamily) -> Result { + match &self.0 { + InnerReader::Simple(reader) => Ok(reader + .read() + .unwrap() + .get(column_family) + .map_or(0, BTreeMap::len)), + InnerReader::Transaction(reader) => { + if let Some(reader) = reader.upgrade() { + Ok((*reader) + .borrow() + .get(column_family) + .map_or(0, BTreeMap::len)) + } else { + Err(StorageError::Other( + "The transaction is already ended".into(), + )) + } + } + } + } + + #[allow(clippy::unwrap_in_result)] + pub fn is_empty(&self, column_family: &ColumnFamily) -> Result { + match &self.0 { + InnerReader::Simple(reader) => Ok(reader + .read() + .unwrap() + .get(column_family) + .map_or(true, BTreeMap::is_empty)), + InnerReader::Transaction(reader) => { + if let Some(reader) = reader.upgrade() { + Ok((*reader) + .borrow() + .get(column_family) + .map_or(true, BTreeMap::is_empty)) + } else { + Err(StorageError::Other( + "The transaction is already ended".into(), + )) + } + } + } + } +} + +#[derive(Clone)] +pub struct Transaction<'a> { + db: Rc, Vec>>>>>, + inserts: Rc), Option>>>>, + removes: Rc), Vec>>>, +} + +impl<'a> Transaction<'a> { + fn new( + db: Rc, Vec>>>>>, + ) -> Self { + Transaction { + db, + inserts: Rc::new(RwLock::new(HashMap::new())), + removes: Rc::new(RwLock::new(HashMap::new())), + } + } + + #[allow(unsafe_code, clippy::useless_transmute)] + pub fn reader(&self) -> Reader { + // SAFETY: This transmute is safe because we take a weak reference and the only Rc reference used is guarded by the lifetime. + Reader(InnerReader::Transaction(Rc::downgrade(unsafe { + transmute(&self.db) + }))) + } + + #[allow(clippy::unnecessary_wraps)] + pub fn contains_key_for_update( + &self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result { + Ok((*self.db) + .borrow() + .get(column_family) + .map_or(false, |cf| cf.contains_key(key))) + } + + fn rollback(&mut self) { + let inserts = self.inserts.read().unwrap(); + for ((column_family, key), val) in inserts.iter() { + if val.is_some() { + //restore original val + self.db + .borrow_mut() + .get_mut(&column_family) + .unwrap() + .insert(key.to_vec(), val.as_ref().unwrap().to_vec()); + } else { + // we remove it + self.db + .borrow_mut() + .get_mut(&column_family) + .unwrap() + .remove(key.into()); + } + } + let removes = self.removes.read().unwrap(); + for ((column_family, key), val) in removes.iter() { + //restore original val + self.db + .borrow_mut() + .get_mut(&column_family) + .unwrap() + .insert(key.to_vec(), val.to_vec()); + } + } + + #[allow(clippy::unnecessary_wraps, clippy::unwrap_in_result)] + pub fn insert( + &mut self, + column_family: &ColumnFamily, + key: &[u8], + value: &[u8], + ) -> Result<(), StorageError> { + let mut previous_val = self + .db + .borrow_mut() + .get_mut(column_family) + .unwrap() + .insert(key.into(), value.into()); + let key = (column_family.clone(), key.to_vec()); + let previous_val2 = self.removes.write().unwrap().remove(&key); + if previous_val.is_none() && previous_val2.is_some() { + previous_val = previous_val2; + } + let mut inserts = self.inserts.write().unwrap(); + if !inserts.contains_key(&key) { + inserts.insert(key, previous_val); + } + + Ok(()) + } + + pub fn insert_empty( + &mut self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result<(), StorageError> { + self.insert(column_family, key, &[]) + } + + #[allow(clippy::unnecessary_wraps, clippy::unwrap_in_result)] + pub fn remove(&mut self, column_family: &ColumnFamily, key: &[u8]) -> Result<(), StorageError> { + let mut val = self + .db + .borrow_mut() + .get_mut(column_family) + .unwrap() + .remove(key); + let val2 = self + .inserts + .write() + .unwrap() + .remove(&(column_family.clone(), key.to_vec())); + if val2.is_some() { + // we prefer the value in inserts as it may contain the original value after several inserts on the same key. + val = val2.unwrap(); + } + if let Some(val) = val { + self.removes + .write() + .unwrap() + .insert((column_family.clone(), key.to_vec()), val.to_vec()); + } + Ok(()) + } +} + +pub struct Iter { + iter: std::vec::IntoIter<(Vec, Vec)>, + current: Option<(Vec, Vec)>, +} + +impl Iter { + pub fn key(&self) -> Option<&[u8]> { + Some(&self.current.as_ref()?.0) + } + + #[allow(dead_code)] + pub fn value(&self) -> Option<&[u8]> { + Some(&self.current.as_ref()?.1) + } + + pub fn next(&mut self) { + self.current = self.iter.next(); + } + + #[allow(clippy::unnecessary_wraps, clippy::unused_self)] + pub fn status(&self) -> Result<(), StorageError> { + Ok(()) + } +} diff --git a/ng-oxigraph/src/oxigraph/storage/backend/mod.rs b/ng-oxigraph/src/oxigraph/storage/backend/mod.rs new file mode 100644 index 0000000..f42e21a --- /dev/null +++ b/ng-oxigraph/src/oxigraph/storage/backend/mod.rs @@ -0,0 +1,12 @@ +//! A storage backend +//! RocksDB is available, if not in memory + +#[cfg(any(target_family = "wasm", docsrs))] +pub use fallback::{ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, Transaction}; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +pub use oxi_rocksdb::{ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, Transaction}; + +#[cfg(any(target_family = "wasm", docsrs))] +mod fallback; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +mod oxi_rocksdb; diff --git a/ng-oxigraph/src/oxigraph/storage/backend/oxi_rocksdb.rs b/ng-oxigraph/src/oxigraph/storage/backend/oxi_rocksdb.rs new file mode 100644 index 0000000..fa7ad83 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/storage/backend/oxi_rocksdb.rs @@ -0,0 +1,1567 @@ +// partial Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// partial Copyright (c) 2018 Oxigraph developers +// All work licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice or not, may not be copied, modified, or distributed except +// according to those terms. + +//! Code inspired by [Rust RocksDB](https://github.com/rust-rocksdb/rust-rocksdb) under Apache License 2.0. + +#![allow( + unsafe_code, + trivial_casts, + clippy::undocumented_unsafe_blocks, + clippy::panic_in_result_fn, + clippy::unwrap_in_result +)] + +use super::super::numeric_encoder::StrHash; +use crate::oxigraph::storage::error::{CorruptionError, StorageError}; +use libc::{c_char, c_void}; +use ng_rocksdb::ffi::*; +use rand::random; +use std::borrow::Borrow; +#[cfg(unix)] +use std::cmp::min; +use std::collections::{HashMap, HashSet}; +use std::env::temp_dir; +use std::error::Error; +use std::ffi::{CStr, CString}; +use std::fs::remove_dir_all; +use std::marker::PhantomData; +use std::ops::Deref; +use std::path::{Path, PathBuf}; +use std::rc::{Rc, Weak}; +use std::sync::{Arc, RwLock}; +use std::thread::{available_parallelism, yield_now}; +use std::{fmt, io, ptr, slice}; + +pub fn opt_bytes_to_ptr>(opt: Option) -> *const c_char { + match opt { + Some(v) => v.as_ref().as_ptr() as *const c_char, + None => ptr::null(), + } +} + +macro_rules! ffi_result { + ( $($function:ident)::*( $arg1:expr $(, $arg:expr)* $(,)? ) ) => {{ + let mut status = rocksdb_status_t { + code: rocksdb_status_code_t_rocksdb_status_code_ok, + subcode: rocksdb_status_subcode_t_rocksdb_status_subcode_none, + severity: rocksdb_status_severity_t_rocksdb_status_severity_none, + string: ptr::null() + }; + let result = $($function)::*($arg1 $(, $arg)* , &mut status); + if status.code == rocksdb_status_code_t_rocksdb_status_code_ok { + Ok(result) + } else { + Err(ErrorStatus(status)) + } + }} +} + +pub struct ColumnFamilyDefinition { + pub name: &'static str, + pub use_iter: bool, + pub min_prefix_size: usize, + pub unordered_writes: bool, +} + +#[derive(Clone)] +pub struct Db { + inner: DbKind, +} + +#[derive(Clone)] +enum DbKind { + ReadOnly(Arc), + ReadWrite(Arc), +} + +impl Db { + pub(crate) fn past_commits_cache( + &self, + ) -> Arc>>>> { + match &self.inner { + DbKind::ReadWrite(rw) => Arc::clone(&rw.past_commits_cache), + _ => panic!("rw not implemented for read only DbKind"), + } + } +} + +struct RwDbHandler { + db: *mut rocksdb_transactiondb_t, + env: UnsafeEnv, + options: *mut rocksdb_options_t, + transaction_options: *mut rocksdb_transaction_options_t, + transactiondb_options: *mut rocksdb_transactiondb_options_t, + read_options: *mut rocksdb_readoptions_t, + write_options: *mut rocksdb_writeoptions_t, + flush_options: *mut rocksdb_flushoptions_t, + env_options: *mut rocksdb_envoptions_t, + ingest_external_file_options: *mut rocksdb_ingestexternalfileoptions_t, + compaction_options: *mut rocksdb_compactoptions_t, + block_based_table_options: *mut rocksdb_block_based_table_options_t, + column_family_names: Vec<&'static str>, + cf_handles: Vec<*mut rocksdb_column_family_handle_t>, + cf_options: Vec<*mut rocksdb_options_t>, + in_memory: bool, + path: PathBuf, + past_commits_cache: Arc>>>>, +} + +unsafe impl Send for RwDbHandler {} + +unsafe impl Sync for RwDbHandler {} + +impl Drop for RwDbHandler { + fn drop(&mut self) { + unsafe { + for cf_handle in &self.cf_handles { + rocksdb_column_family_handle_destroy(*cf_handle); + } + rocksdb_transactiondb_close(self.db); + for cf_option in &self.cf_options { + rocksdb_options_destroy(*cf_option); + } + rocksdb_readoptions_destroy(self.read_options); + rocksdb_writeoptions_destroy(self.write_options); + rocksdb_flushoptions_destroy(self.flush_options); + rocksdb_envoptions_destroy(self.env_options); + rocksdb_ingestexternalfileoptions_destroy(self.ingest_external_file_options); + rocksdb_compactoptions_destroy(self.compaction_options); + rocksdb_transaction_options_destroy(self.transaction_options); + rocksdb_transactiondb_options_destroy(self.transactiondb_options); + rocksdb_options_destroy(self.options); + rocksdb_block_based_options_destroy(self.block_based_table_options); + rocksdb_env_destroy(self.env.0); + } + if self.in_memory { + drop(remove_dir_all(&self.path)); + } + } +} + +struct RoDbHandler { + db: *mut rocksdb_t, + env: UnsafeEnv, + options: *mut rocksdb_options_t, + read_options: *mut rocksdb_readoptions_t, + column_family_names: Vec<&'static str>, + cf_handles: Vec<*mut rocksdb_column_family_handle_t>, + cf_options: Vec<*mut rocksdb_options_t>, + is_secondary: bool, + path_to_remove: Option, +} + +unsafe impl Send for RoDbHandler {} + +unsafe impl Sync for RoDbHandler {} + +impl Drop for RoDbHandler { + fn drop(&mut self) { + unsafe { + for cf_handle in &self.cf_handles { + rocksdb_column_family_handle_destroy(*cf_handle); + } + rocksdb_close(self.db); + for cf_option in &self.cf_options { + rocksdb_options_destroy(*cf_option); + } + rocksdb_readoptions_destroy(self.read_options); + rocksdb_options_destroy(self.options); + rocksdb_env_destroy(self.env.0); + } + if let Some(path) = &self.path_to_remove { + drop(remove_dir_all(path)); + } + } +} + +impl Db { + pub fn new(column_families: Vec) -> Result { + Self::open_read_write(None, column_families, None) + } + + pub fn open_read_write( + path: Option<&Path>, + column_families: Vec, + key: Option<[u8; 32]>, + ) -> Result { + let (path, in_memory) = if let Some(path) = path { + (path.to_path_buf(), false) + } else { + (tmp_path(), true) + }; + let c_path = path_to_cstring(&path)?; + unsafe { + let unsafe_env = Self::create_env(in_memory, key); + let options = Self::db_options(true, &unsafe_env)?; + rocksdb_options_set_create_if_missing(options, 1); + rocksdb_options_set_create_missing_column_families(options, 1); + rocksdb_options_set_compression( + options, + if in_memory { + rocksdb_no_compression + } else { + rocksdb_lz4_compression + } + .try_into() + .unwrap(), + ); + let block_based_table_options = rocksdb_block_based_options_create(); + assert!( + !block_based_table_options.is_null(), + "rocksdb_block_based_options_create returned null" + ); + rocksdb_block_based_options_set_format_version(block_based_table_options, 5); + rocksdb_block_based_options_set_index_block_restart_interval( + block_based_table_options, + 16, + ); + rocksdb_options_set_block_based_table_factory(options, block_based_table_options); + #[cfg(feature = "rocksdb-debug")] + { + rocksdb_options_set_info_log_level(options, 0); + rocksdb_options_enable_statistics(options); + rocksdb_options_set_stats_dump_period_sec(options, 60); + } + + let (column_family_names, c_column_family_names, cf_options) = + Self::column_families_names_and_options(column_families, options); + let mut cf_handles: Vec<*mut rocksdb_column_family_handle_t> = + vec![ptr::null_mut(); column_family_names.len()]; + let c_num_column_families = c_column_family_names.len().try_into().unwrap(); + + let transactiondb_options = rocksdb_transactiondb_options_create(); + assert!( + !transactiondb_options.is_null(), + "rocksdb_transactiondb_options_create returned null" + ); + + let db = ffi_result!(rocksdb_transactiondb_open_column_families_with_status( + options, + transactiondb_options, + c_path.as_ptr(), + c_num_column_families, + c_column_family_names + .iter() + .map(|cf| cf.as_ptr()) + .collect::>() + .as_ptr(), + cf_options.as_ptr().cast(), + cf_handles.as_mut_ptr(), + )) + .map_err(|e| { + rocksdb_transactiondb_options_destroy(transactiondb_options); + for cf_option in &cf_options { + rocksdb_options_destroy(*cf_option); + } + rocksdb_options_destroy(options); + rocksdb_block_based_options_destroy(block_based_table_options); + e + })?; + assert!(!db.is_null(), "rocksdb_create returned null"); + for handle in &cf_handles { + assert!( + !handle.is_null(), + "rocksdb_readoptions_create returned a null column family" + ); + } + + let read_options = rocksdb_readoptions_create(); + assert!( + !read_options.is_null(), + "rocksdb_readoptions_create returned null" + ); + + let write_options = rocksdb_writeoptions_create(); + assert!( + !write_options.is_null(), + "rocksdb_writeoptions_create returned null" + ); + + if in_memory { + rocksdb_writeoptions_disable_WAL(write_options, 1); // No need for WAL + } else { + rocksdb_writeoptions_set_sync(write_options, 1); + } + + let transaction_options = rocksdb_transaction_options_create(); + assert!( + !transaction_options.is_null(), + "rocksdb_transaction_options_create returned null" + ); + rocksdb_transaction_options_set_set_snapshot(transaction_options, 1); + + let flush_options = rocksdb_flushoptions_create(); + assert!( + !flush_options.is_null(), + "rocksdb_flushoptions_create returned null" + ); + + let env_options = rocksdb_envoptions_create(); + assert!( + !env_options.is_null(), + "rocksdb_envoptions_create returned null" + ); + + let ingest_external_file_options = rocksdb_ingestexternalfileoptions_create(); + assert!( + !ingest_external_file_options.is_null(), + "rocksdb_ingestexternalfileoptions_create returned null" + ); + + let compaction_options = rocksdb_compactoptions_create(); + assert!( + !compaction_options.is_null(), + "rocksdb_compactoptions_create returned null" + ); + + Ok(Self { + inner: DbKind::ReadWrite(Arc::new(RwDbHandler { + db, + env: unsafe_env, + options, + transaction_options, + transactiondb_options, + read_options, + write_options, + flush_options, + env_options, + ingest_external_file_options, + compaction_options, + block_based_table_options, + column_family_names, + cf_handles, + cf_options, + in_memory, + path, + past_commits_cache: Arc::new(RwLock::new(HashMap::new())), + })), + }) + } + } + + // pub fn open_secondary( + // primary_path: &Path, + // secondary_path: Option<&Path>, + // column_families: Vec, + // ) -> Result { + // let c_primary_path = path_to_cstring(primary_path)?; + // let (secondary_path, in_memory) = if let Some(path) = secondary_path { + // (path.to_path_buf(), false) + // } else { + // (tmp_path(), true) + // }; + // let c_secondary_path = path_to_cstring(&secondary_path)?; + // unsafe { + // let options = Self::db_options(false, false)?; + // let (column_family_names, c_column_family_names, cf_options) = + // Self::column_families_names_and_options(column_families, options); + // let mut cf_handles: Vec<*mut rocksdb_column_family_handle_t> = + // vec![ptr::null_mut(); column_family_names.len()]; + // let c_num_column_families = c_column_family_names.len().try_into().unwrap(); + // let db = ffi_result!(rocksdb_open_as_secondary_column_families_with_status( + // options, + // c_primary_path.as_ptr(), + // c_secondary_path.as_ptr(), + // c_num_column_families, + // c_column_family_names + // .iter() + // .map(|cf| cf.as_ptr()) + // .collect::>() + // .as_ptr(), + // cf_options.as_ptr().cast(), + // cf_handles.as_mut_ptr(), + // )) + // .map_err(|e| { + // for cf_option in &cf_options { + // rocksdb_options_destroy(*cf_option); + // } + // rocksdb_options_destroy(options); + // e + // })?; + // assert!( + // !db.is_null(), + // "rocksdb_open_for_read_only_column_families_with_status returned null" + // ); + // for handle in &cf_handles { + // assert!( + // !handle.is_null(), + // "rocksdb_open_for_read_only_column_families_with_status returned a null column family" + // ); + // } + // let read_options = rocksdb_readoptions_create(); + // assert!( + // !read_options.is_null(), + // "rocksdb_readoptions_create returned null" + // ); + // Ok(Self { + // inner: DbKind::ReadOnly(Arc::new(RoDbHandler { + // db, + // options, + // read_options, + // column_family_names, + // cf_handles, + // cf_options, + // is_secondary: true, + // path_to_remove: in_memory.then_some(secondary_path), + // })), + // }) + // } + // } + + pub fn open_read_only( + path: &Path, + column_families: Vec, + key: Option<[u8; 32]>, + ) -> Result { + unsafe { + let c_path = path_to_cstring(path)?; + let unsafe_env = Self::create_env(false, key); + let options = Self::db_options(true, &unsafe_env)?; + let (column_family_names, c_column_family_names, cf_options) = + Self::column_families_names_and_options(column_families, options); + let mut cf_handles: Vec<*mut rocksdb_column_family_handle_t> = + vec![ptr::null_mut(); column_family_names.len()]; + let c_num_column_families = c_column_family_names.len().try_into().unwrap(); + let db = ffi_result!(rocksdb_open_for_read_only_column_families_with_status( + options, + c_path.as_ptr(), + c_num_column_families, + c_column_family_names + .iter() + .map(|cf| cf.as_ptr()) + .collect::>() + .as_ptr(), + cf_options.as_ptr().cast(), + cf_handles.as_mut_ptr(), + 0, // false + )) + .map_err(|e| { + for cf_option in &cf_options { + rocksdb_options_destroy(*cf_option); + } + rocksdb_options_destroy(options); + e + })?; + assert!( + !db.is_null(), + "rocksdb_open_for_read_only_column_families_with_status returned null" + ); + for handle in &cf_handles { + assert!( + !handle.is_null(), + "rocksdb_open_for_read_only_column_families_with_status returned a null column family" + ); + } + let read_options = rocksdb_readoptions_create(); + assert!( + !read_options.is_null(), + "rocksdb_readoptions_create returned null" + ); + + Ok(Self { + inner: DbKind::ReadOnly(Arc::new(RoDbHandler { + db, + env: unsafe_env, + options, + read_options, + column_family_names, + cf_handles, + cf_options, + is_secondary: false, + path_to_remove: None, + })), + }) + } + } + + fn create_env(in_memory: bool, key: Option<[u8; 32]>) -> UnsafeEnv { + unsafe { + if in_memory { + let env = rocksdb_create_mem_env(); + assert!(!env.is_null(), "rocksdb_create_mem_env returned null"); + UnsafeEnv(env) + } else { + let env = match key { + Some(_) => rocksdb_create_encrypted_env(opt_bytes_to_ptr(key.as_ref())), + None => rocksdb_create_default_env(), + }; + assert!(!env.is_null(), "rocksdb_create_encrypted_env returned null"); + UnsafeEnv(env) + } + } + } + + fn db_options( + limit_max_open_files: bool, + unsafe_env: &UnsafeEnv, + ) -> Result<*mut rocksdb_options_t, StorageError> { + unsafe { + let options = rocksdb_options_create(); + assert!(!options.is_null(), "rocksdb_options_create returned null"); + rocksdb_options_optimize_level_style_compaction(options, 512 * 1024 * 1024); + rocksdb_options_increase_parallelism( + options, + available_parallelism()?.get().try_into().unwrap(), + ); + if limit_max_open_files { + if let Some(available_fd) = available_file_descriptors()? { + if available_fd < 96 { + rocksdb_options_destroy(options); + return Err(io::Error::new( + io::ErrorKind::Other, + format!( + "Oxigraph needs at least 96 file descriptors, \ + only {available_fd} allowed. \ + Run e.g. `ulimit -n 512` to allow 512 opened files" + ), + ) + .into()); + } + rocksdb_options_set_max_open_files( + options, + (available_fd - 48).try_into().unwrap(), + ) + } + } else { + rocksdb_options_set_max_open_files(options, -1); + } + rocksdb_options_set_info_log_level(options, 2); // We only log warnings + rocksdb_options_set_max_log_file_size(options, 1024 * 1024); // Only 1MB log size + rocksdb_options_set_recycle_log_file_num(options, 10); // We do not keep more than 10 log files + rocksdb_options_set_env(options, unsafe_env.0); + Ok(options) + } + } + + fn column_families_names_and_options( + mut column_families: Vec, + base_options: *mut rocksdb_options_t, + ) -> (Vec<&'static str>, Vec, Vec<*mut rocksdb_options_t>) { + if !column_families.iter().any(|c| c.name == "default") { + column_families.push(ColumnFamilyDefinition { + name: "default", + use_iter: true, + min_prefix_size: 0, + unordered_writes: false, + }) + } + let column_family_names = column_families.iter().map(|c| c.name).collect::>(); + let c_column_family_names = column_family_names + .iter() + .map(|name| CString::new(*name).unwrap()) + .collect(); + + let cf_options = column_families + .into_iter() + .map(|cf| unsafe { + let options = rocksdb_options_create_copy(base_options); + if !cf.use_iter { + rocksdb_options_optimize_for_point_lookup(options, 128); + } + if cf.min_prefix_size > 0 { + rocksdb_options_set_prefix_extractor( + options, + rocksdb_slicetransform_create_fixed_prefix(cf.min_prefix_size), + ); + } + if cf.unordered_writes { + rocksdb_options_set_unordered_write(options, 1); + } + options + }) + .collect::>(); + (column_family_names, c_column_family_names, cf_options) + } + + pub fn column_family(&self, name: &'static str) -> Result { + let (column_family_names, cf_handles) = match &self.inner { + DbKind::ReadOnly(db) => (&db.column_family_names, &db.cf_handles), + DbKind::ReadWrite(db) => (&db.column_family_names, &db.cf_handles), + }; + for (cf, cf_handle) in column_family_names.iter().zip(cf_handles) { + if *cf == name { + return Ok(ColumnFamily(*cf_handle)); + } + } + Err(CorruptionError::from_missing_column_family_name(name).into()) + } + + #[must_use] + pub fn snapshot(&self) -> Reader { + unsafe { + match &self.inner { + DbKind::ReadOnly(db) => { + if db.is_secondary { + // We try to refresh (and ignore the errors) + drop(ffi_result!(rocksdb_try_catch_up_with_primary_with_status( + db.db + ))); + } + let options = rocksdb_readoptions_create_copy(db.read_options); + Reader { + inner: InnerReader::PlainDb(Arc::clone(db)), + options, + } + } + DbKind::ReadWrite(db) => { + let options = rocksdb_readoptions_create_copy(db.read_options); + let snapshot = rocksdb_transactiondb_create_snapshot(db.db); + assert!( + !snapshot.is_null(), + "rocksdb_transactiondb_create_snapshot returned null" + ); + rocksdb_readoptions_set_snapshot(options, snapshot); + Reader { + inner: InnerReader::TransactionalSnapshot(Rc::new(TransactionalSnapshot { + db: Arc::clone(db), + snapshot, + })), + options, + } + } + } + } + } + + pub fn transaction<'a, 'b: 'a, T, E: Error + 'static + From>( + &'b self, + f: impl Fn(Transaction<'a>) -> Result, + ) -> Result { + if let DbKind::ReadWrite(db) = &self.inner { + loop { + let transaction = unsafe { + let transaction = rocksdb_transaction_begin( + db.db, + db.write_options, + db.transaction_options, + ptr::null_mut(), + ); + assert!( + !transaction.is_null(), + "rocksdb_transaction_begin returned null" + ); + transaction + }; + let (read_options, snapshot) = unsafe { + let options = rocksdb_readoptions_create_copy(db.read_options); + let snapshot = rocksdb_transaction_get_snapshot(transaction); + rocksdb_readoptions_set_snapshot(options, snapshot); + (options, snapshot) + }; + let result = f(Transaction { + inner: Rc::new(transaction), + read_options, + _lifetime: PhantomData, + }); + match result { + Ok(result) => { + unsafe { + let r = + ffi_result!(rocksdb_transaction_rollback_with_status(transaction)); + rocksdb_transaction_destroy(transaction); + rocksdb_readoptions_destroy(read_options); + rocksdb_free(snapshot as *mut c_void); + r.map_err(StorageError::from)?; // We make sure to also run destructors if the commit fails + } + return Ok(result); + } + Err(e) => { + unsafe { + let r = + ffi_result!(rocksdb_transaction_rollback_with_status(transaction)); + rocksdb_transaction_destroy(transaction); + rocksdb_readoptions_destroy(read_options); + rocksdb_free(snapshot as *mut c_void); + r.map_err(StorageError::from)?; // We make sure to also run destructors if the commit fails + } + // We look for the root error + let mut error: &(dyn Error + 'static) = &e; + while let Some(e) = error.source() { + error = e; + } + let is_conflict_error = + error.downcast_ref::().map_or(false, |e| { + e.0.code == rocksdb_status_code_t_rocksdb_status_code_busy + || e.0.code + == rocksdb_status_code_t_rocksdb_status_code_timed_out + || e.0.code + == rocksdb_status_code_t_rocksdb_status_code_try_again + }); + if is_conflict_error { + // We give a chance to the OS to do something else before retrying in order to help avoiding another conflict + yield_now(); + } else { + // We raise the error + return Err(e); + } + } + } + } + } else { + Err( + StorageError::Other("Transaction are only possible on read-write instances".into()) + .into(), + ) + } + } + + pub fn ng_transaction<'a, 'b: 'a, T, E: Error + 'static + From>( + &'b self, + mut f: impl FnMut(Transaction<'a>) -> Result, + ) -> Result { + if let DbKind::ReadWrite(db) = &self.inner { + loop { + let transaction = unsafe { + let transaction = rocksdb_transaction_begin( + db.db, + db.write_options, + db.transaction_options, + ptr::null_mut(), + ); + assert!( + !transaction.is_null(), + "rocksdb_transaction_begin returned null" + ); + transaction + }; + let (read_options, snapshot) = unsafe { + let options = rocksdb_readoptions_create_copy(db.read_options); + let snapshot = rocksdb_transaction_get_snapshot(transaction); + rocksdb_readoptions_set_snapshot(options, snapshot); + (options, snapshot) + }; + let result = f(Transaction { + inner: Rc::new(transaction), + read_options, + _lifetime: PhantomData, + }); + match result { + Ok(result) => { + unsafe { + let r = + ffi_result!(rocksdb_transaction_commit_with_status(transaction)); + rocksdb_transaction_destroy(transaction); + rocksdb_readoptions_destroy(read_options); + rocksdb_free(snapshot as *mut c_void); + r.map_err(StorageError::from)?; // We make sure to also run destructors if the commit fails + } + return Ok(result); + } + Err(e) => { + unsafe { + let r = + ffi_result!(rocksdb_transaction_rollback_with_status(transaction)); + rocksdb_transaction_destroy(transaction); + rocksdb_readoptions_destroy(read_options); + rocksdb_free(snapshot as *mut c_void); + r.map_err(StorageError::from)?; // We make sure to also run destructors if the commit fails + } + // We look for the root error + let mut error: &(dyn Error + 'static) = &e; + while let Some(e) = error.source() { + error = e; + } + let is_conflict_error = + error.downcast_ref::().map_or(false, |e| { + e.0.code == rocksdb_status_code_t_rocksdb_status_code_busy + || e.0.code + == rocksdb_status_code_t_rocksdb_status_code_timed_out + || e.0.code + == rocksdb_status_code_t_rocksdb_status_code_try_again + }); + if is_conflict_error { + // We give a chance to the OS to do something else before retrying in order to help avoiding another conflict + yield_now(); + } else { + // We raise the error + return Err(e); + } + } + } + } + } else { + Err( + StorageError::Other("Transaction are only possible on read-write instances".into()) + .into(), + ) + } + } + + pub fn get( + &self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result, StorageError> { + unsafe { + let slice = match &self.inner { + DbKind::ReadOnly(db) => { + ffi_result!(rocksdb_get_pinned_cf_with_status( + db.db, + db.read_options, + column_family.0, + key.as_ptr().cast(), + key.len(), + )) + } + DbKind::ReadWrite(db) => { + ffi_result!(rocksdb_transactiondb_get_pinned_cf_with_status( + db.db, + db.read_options, + column_family.0, + key.as_ptr().cast(), + key.len() + )) + } + }?; + Ok(if slice.is_null() { + None + } else { + Some(PinnableSlice(slice)) + }) + } + } + + pub fn contains_key( + &self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result { + Ok(self.get(column_family, key)?.is_some()) // TODO: optimize + } + + pub fn insert( + &self, + column_family: &ColumnFamily, + key: &[u8], + value: &[u8], + ) -> Result<(), StorageError> { + if let DbKind::ReadWrite(db) = &self.inner { + unsafe { + ffi_result!(rocksdb_transactiondb_put_cf_with_status( + db.db, + db.write_options, + column_family.0, + key.as_ptr().cast(), + key.len(), + value.as_ptr().cast(), + value.len(), + )) + }?; + Ok(()) + } else { + Err(StorageError::Other( + "Inserts are only possible on read-write instances".into(), + )) + } + } + + pub fn flush(&self) -> Result<(), StorageError> { + if let DbKind::ReadWrite(db) = &self.inner { + unsafe { + ffi_result!(rocksdb_transactiondb_flush_cfs_with_status( + db.db, + db.flush_options, + db.cf_handles.as_ptr().cast_mut(), + db.cf_handles.len().try_into().unwrap() + )) + }?; + Ok(()) + } else { + Err(StorageError::Other( + "Flush is only possible on read-write instances".into(), + )) + } + } + + pub fn compact(&self, column_family: &ColumnFamily) -> Result<(), StorageError> { + if let DbKind::ReadWrite(db) = &self.inner { + unsafe { + ffi_result!(rocksdb_transactiondb_compact_range_cf_opt_with_status( + db.db, + column_family.0, + db.compaction_options, + ptr::null(), + 0, + ptr::null(), + 0, + )) + }?; + Ok(()) + } else { + Err(StorageError::Other( + "Compaction is only possible on read-write instances".into(), + )) + } + } + + pub fn new_sst_file(&self) -> Result { + if let DbKind::ReadWrite(db) = &self.inner { + let path = db.path.join(random::().to_string()); + unsafe { + let writer = rocksdb_sstfilewriter_create(db.env_options, db.options); + ffi_result!(rocksdb_sstfilewriter_open_with_status( + writer, + path_to_cstring(&path)?.as_ptr() + )) + .map_err(|e| { + rocksdb_sstfilewriter_destroy(writer); + e + })?; + Ok(SstFileWriter { writer, path }) + } + } else { + Err(StorageError::Other( + "SST creation is only possible on read-write instances".into(), + )) + } + } + + pub fn insert_stt_files( + &self, + ssts_for_cf: &[(&ColumnFamily, PathBuf)], + ) -> Result<(), StorageError> { + if ssts_for_cf.is_empty() { + return Ok(()); // Rocksdb does not support empty lists + } + if let DbKind::ReadWrite(db) = &self.inner { + let mut paths_by_cf = HashMap::<_, Vec<_>>::new(); + for (cf, path) in ssts_for_cf { + paths_by_cf + .entry(*cf) + .or_default() + .push(path_to_cstring(path)?); + } + let cpaths_by_cf = paths_by_cf + .iter() + .map(|(cf, paths)| (*cf, paths.iter().map(|p| p.as_ptr()).collect::>())) + .collect::>(); + let args = cpaths_by_cf + .iter() + .map(|(cf, p)| rocksdb_ingestexternalfilearg_t { + column_family: cf.0, + external_files: p.as_ptr(), + external_files_len: p.len(), + options: db.ingest_external_file_options, + }) + .collect::>(); + unsafe { + ffi_result!(rocksdb_transactiondb_ingest_external_files_with_status( + db.db, + args.as_ptr(), + args.len() + ))?; + } + Ok(()) + } else { + Err(StorageError::Other( + "SST ingestion is only possible on read-write instances".into(), + )) + } + } + + pub fn backup(&self, target_directory: &Path) -> Result<(), StorageError> { + let path = path_to_cstring(target_directory)?; + match &self.inner { + DbKind::ReadOnly(db) => unsafe { + if db.is_secondary { + ffi_result!(rocksdb_try_catch_up_with_primary_with_status(db.db))?; + } + ffi_result!(rocksdb_create_checkpoint_with_status(db.db, path.as_ptr())) + }, + DbKind::ReadWrite(db) => { + if db.in_memory { + return Err(StorageError::Other( + "It is not possible to backup an in-memory database".into(), + )); + } + unsafe { + ffi_result!(rocksdb_transactiondb_create_checkpoint_with_status( + db.db, + path.as_ptr() + )) + } + } + }?; + Ok(()) + } +} + +// It is fine to not keep a lifetime: there is no way to use this type without the database being still in scope. +// So, no use after free possible. +#[derive(Clone, Eq, PartialEq, Hash)] +pub struct ColumnFamily(*mut rocksdb_column_family_handle_t); + +unsafe impl Send for ColumnFamily {} +unsafe impl Sync for ColumnFamily {} + +pub struct Reader { + inner: InnerReader, + options: *mut rocksdb_readoptions_t, +} + +#[derive(Clone)] +enum InnerReader { + TransactionalSnapshot(Rc), + Transaction(Weak<*mut rocksdb_transaction_t>), + PlainDb(Arc), +} + +struct TransactionalSnapshot { + db: Arc, + snapshot: *const rocksdb_snapshot_t, +} + +impl Drop for TransactionalSnapshot { + fn drop(&mut self) { + unsafe { rocksdb_transactiondb_release_snapshot(self.db.db, self.snapshot) } + } +} + +impl Clone for Reader { + fn clone(&self) -> Self { + Self { + inner: self.inner.clone(), + options: unsafe { rocksdb_readoptions_create_copy(self.options) }, + } + } +} + +impl Drop for Reader { + fn drop(&mut self) { + unsafe { rocksdb_readoptions_destroy(self.options) } + } +} + +impl Reader { + pub fn get( + &self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result, StorageError> { + unsafe { + let slice = match &self.inner { + InnerReader::TransactionalSnapshot(inner) => { + ffi_result!(rocksdb_transactiondb_get_pinned_cf_with_status( + inner.db.db, + self.options, + column_family.0, + key.as_ptr().cast(), + key.len() + )) + } + InnerReader::Transaction(inner) => { + let Some(inner) = inner.upgrade() else { + return Err(StorageError::Other( + "The transaction is already ended".into(), + )); + }; + ffi_result!(rocksdb_transaction_get_pinned_cf_with_status( + *inner, + self.options, + column_family.0, + key.as_ptr().cast(), + key.len() + )) + } + InnerReader::PlainDb(inner) => { + ffi_result!(rocksdb_get_pinned_cf_with_status( + inner.db, + self.options, + column_family.0, + key.as_ptr().cast(), + key.len() + )) + } + }?; + Ok(if slice.is_null() { + None + } else { + Some(PinnableSlice(slice)) + }) + } + } + + pub fn contains_key( + &self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result { + Ok(self.get(column_family, key)?.is_some()) // TODO: optimize + } + + #[allow(clippy::iter_not_returning_iterator)] + pub fn iter(&self, column_family: &ColumnFamily) -> Result { + self.scan_prefix(column_family, &[]) + } + + pub fn scan_prefix( + &self, + column_family: &ColumnFamily, + prefix: &[u8], + ) -> Result { + // We generate the upper bound + let upper_bound = { + let mut bound = prefix.to_vec(); + let mut found = false; + for c in bound.iter_mut().rev() { + if *c < u8::MAX { + *c += 1; + found = true; + break; + } + } + found.then_some(bound) + }; + + unsafe { + let options = rocksdb_readoptions_create_copy(self.options); + assert!( + !options.is_null(), + "rocksdb_readoptions_create returned null" + ); + if let Some(upper_bound) = &upper_bound { + rocksdb_readoptions_set_iterate_upper_bound( + options, + upper_bound.as_ptr().cast(), + upper_bound.len(), + ); + } + let iter = match &self.inner { + InnerReader::TransactionalSnapshot(inner) => { + rocksdb_transactiondb_create_iterator_cf(inner.db.db, options, column_family.0) + } + InnerReader::Transaction(inner) => { + let Some(inner) = inner.upgrade() else { + return Err(StorageError::Other( + "The transaction is already ended".into(), + )); + }; + rocksdb_transaction_create_iterator_cf(*inner, options, column_family.0) + } + InnerReader::PlainDb(inner) => { + rocksdb_create_iterator_cf(inner.db, options, column_family.0) + } + }; + assert!(!iter.is_null(), "rocksdb_create_iterator returned null"); + if prefix.is_empty() { + rocksdb_iter_seek_to_first(iter); + } else { + rocksdb_iter_seek(iter, prefix.as_ptr().cast(), prefix.len()); + } + let is_currently_valid = rocksdb_iter_valid(iter) != 0; + Ok(Iter { + inner: iter, + options, + _upper_bound: upper_bound, + _reader: self.clone(), + is_currently_valid, + }) + } + } + + pub fn len(&self, column_family: &ColumnFamily) -> Result { + let mut count = 0; + let mut iter = self.iter(column_family)?; + while iter.is_valid() { + count += 1; + iter.next(); + } + iter.status()?; // We makes sure there is no read problem + Ok(count) + } + + pub fn is_empty(&self, column_family: &ColumnFamily) -> Result { + let iter = self.iter(column_family)?; + iter.status()?; // We makes sure there is no read problem + Ok(!iter.is_valid()) + } +} + +pub struct Transaction<'a> { + inner: Rc<*mut rocksdb_transaction_t>, + read_options: *mut rocksdb_readoptions_t, + _lifetime: PhantomData<&'a ()>, +} + +impl Transaction<'_> { + pub fn reader(&self) -> Reader { + Reader { + inner: InnerReader::Transaction(Rc::downgrade(&self.inner)), + options: unsafe { rocksdb_readoptions_create_copy(self.read_options) }, + } + } + + pub fn get_for_update( + &self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result, StorageError> { + unsafe { + let slice = ffi_result!(rocksdb_transaction_get_for_update_pinned_cf_with_status( + *self.inner, + self.read_options, + column_family.0, + key.as_ptr().cast(), + key.len() + ))?; + Ok(if slice.is_null() { + None + } else { + Some(PinnableSlice(slice)) + }) + } + } + + pub fn contains_key_for_update( + &self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result { + Ok(self.get_for_update(column_family, key)?.is_some()) // TODO: optimize + } + + pub fn insert( + &mut self, + column_family: &ColumnFamily, + key: &[u8], + value: &[u8], + ) -> Result<(), StorageError> { + unsafe { + ffi_result!(rocksdb_transaction_put_cf_with_status( + *self.inner, + column_family.0, + key.as_ptr().cast(), + key.len(), + value.as_ptr().cast(), + value.len(), + ))?; + } + Ok(()) + } + + pub fn insert_empty( + &mut self, + column_family: &ColumnFamily, + key: &[u8], + ) -> Result<(), StorageError> { + self.insert(column_family, key, &[]) + } + + pub fn remove(&mut self, column_family: &ColumnFamily, key: &[u8]) -> Result<(), StorageError> { + unsafe { + ffi_result!(rocksdb_transaction_delete_cf_with_status( + *self.inner, + column_family.0, + key.as_ptr().cast(), + key.len(), + ))?; + } + Ok(()) + } +} + +pub struct PinnableSlice(*mut rocksdb_pinnableslice_t); + +impl Drop for PinnableSlice { + fn drop(&mut self) { + unsafe { + rocksdb_pinnableslice_destroy(self.0); + } + } +} + +impl Deref for PinnableSlice { + type Target = [u8]; + + fn deref(&self) -> &Self::Target { + unsafe { + let mut len = 0; + let val = rocksdb_pinnableslice_value(self.0, &mut len); + slice::from_raw_parts(val.cast(), len) + } + } +} + +impl AsRef<[u8]> for PinnableSlice { + fn as_ref(&self) -> &[u8] { + self + } +} + +impl Borrow<[u8]> for PinnableSlice { + fn borrow(&self) -> &[u8] { + self + } +} + +impl From for Vec { + fn from(value: PinnableSlice) -> Self { + value.to_vec() + } +} + +pub struct Buffer { + base: *mut u8, + len: usize, +} + +impl Drop for Buffer { + fn drop(&mut self) { + unsafe { + rocksdb_free(self.base.cast()); + } + } +} + +impl Deref for Buffer { + type Target = [u8]; + + fn deref(&self) -> &Self::Target { + unsafe { slice::from_raw_parts(self.base, self.len) } + } +} + +impl AsRef<[u8]> for Buffer { + fn as_ref(&self) -> &[u8] { + self + } +} + +impl Borrow<[u8]> for Buffer { + fn borrow(&self) -> &[u8] { + self + } +} + +impl From for Vec { + fn from(value: Buffer) -> Self { + value.to_vec() + } +} + +pub struct Iter { + inner: *mut rocksdb_iterator_t, + is_currently_valid: bool, + _upper_bound: Option>, + _reader: Reader, // needed to ensure that DB still lives while iter is used + options: *mut rocksdb_readoptions_t, /* needed to ensure that options still lives while iter is used */ +} + +impl Drop for Iter { + fn drop(&mut self) { + unsafe { + rocksdb_iter_destroy(self.inner); + rocksdb_readoptions_destroy(self.options); + } + } +} + +#[allow(clippy::non_send_fields_in_send_ty)] +unsafe impl Send for Iter {} + +unsafe impl Sync for Iter {} + +impl Iter { + pub fn is_valid(&self) -> bool { + self.is_currently_valid + } + + pub fn status(&self) -> Result<(), StorageError> { + unsafe { + ffi_result!(rocksdb_iter_get_status(self.inner))?; + } + Ok(()) + } + + pub fn next(&mut self) { + unsafe { + rocksdb_iter_next(self.inner); + self.is_currently_valid = rocksdb_iter_valid(self.inner) != 0; + } + } + + pub fn key(&self) -> Option<&[u8]> { + if self.is_valid() { + unsafe { + let mut len = 0; + let val = rocksdb_iter_key(self.inner, &mut len); + Some(slice::from_raw_parts(val.cast(), len)) + } + } else { + None + } + } + + pub fn value(&self) -> Option<&[u8]> { + if self.is_valid() { + unsafe { + let mut len = 0; + let val = rocksdb_iter_value(self.inner, &mut len); + Some(slice::from_raw_parts(val.cast(), len)) + } + } else { + None + } + } +} + +pub struct SstFileWriter { + writer: *mut rocksdb_sstfilewriter_t, + path: PathBuf, +} + +impl Drop for SstFileWriter { + fn drop(&mut self) { + unsafe { + rocksdb_sstfilewriter_destroy(self.writer); + } + } +} + +impl SstFileWriter { + pub fn insert(&mut self, key: &[u8], value: &[u8]) -> Result<(), StorageError> { + unsafe { + ffi_result!(rocksdb_sstfilewriter_put_with_status( + self.writer, + key.as_ptr().cast(), + key.len(), + value.as_ptr().cast(), + value.len(), + ))?; + } + Ok(()) + } + + pub fn insert_empty(&mut self, key: &[u8]) -> Result<(), StorageError> { + self.insert(key, &[]) + } + + pub fn finish(self) -> Result { + unsafe { + ffi_result!(rocksdb_sstfilewriter_finish_with_status(self.writer))?; + } + Ok(self.path.clone()) + } +} + +#[derive(thiserror::Error)] +#[error("{}", self.message())] +struct ErrorStatus(rocksdb_status_t); + +unsafe impl Send for ErrorStatus {} +unsafe impl Sync for ErrorStatus {} + +impl Drop for ErrorStatus { + fn drop(&mut self) { + if !self.0.string.is_null() { + unsafe { + rocksdb_free(self.0.string as *mut c_void); + } + } + } +} + +impl ErrorStatus { + fn message(&self) -> &str { + if self.0.string.is_null() { + "Unknown error" + } else { + unsafe { CStr::from_ptr(self.0.string) } + .to_str() + .unwrap_or("Invalid error message") + } + } +} + +impl fmt::Debug for ErrorStatus { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ErrorStatus") + .field("code", &self.0.code) + .field("subcode", &self.0.subcode) + .field("severity", &self.0.severity) + .field("message", &self.message()) + .finish() + } +} + +impl From for StorageError { + fn from(status: ErrorStatus) -> Self { + if status.0.code == rocksdb_status_code_t_rocksdb_status_code_io_error { + let kind = + if status.0.subcode == rocksdb_status_subcode_t_rocksdb_status_subcode_no_space { + io::ErrorKind::Other // TODO ErrorKind::StorageFull + } else if status.0.subcode + == rocksdb_status_subcode_t_rocksdb_status_subcode_path_not_found + { + io::ErrorKind::NotFound + } else { + io::ErrorKind::Other + }; + Self::Io(io::Error::new(kind, status)) + } else if status.0.code == rocksdb_status_code_t_rocksdb_status_code_corruption { + Self::Corruption(CorruptionError::new(status)) + } else { + Self::Other(Box::new(status)) + } + } +} + +struct UnsafeEnv(*mut rocksdb_env_t); + +// Hack for OnceCell. OK because only written in OnceCell and used in a thread-safe way by RocksDB +unsafe impl Send for UnsafeEnv {} +unsafe impl Sync for UnsafeEnv {} + +fn path_to_cstring(path: &Path) -> Result { + Ok(CString::new(path.to_str().ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + "The DB path is not valid UTF-8", + ) + })?) + .map_err(|e| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!("The DB path contains null bytes: {e}"), + ) + })?) +} + +#[cfg(unix)] +fn available_file_descriptors() -> io::Result> { + let mut rlimit = libc::rlimit { + rlim_cur: 0, + rlim_max: 0, + }; + if unsafe { libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlimit) } == 0 { + Ok(Some(min(rlimit.rlim_cur, rlimit.rlim_max))) + } else { + Err(io::Error::last_os_error()) + } +} + +#[cfg(windows)] +fn available_file_descriptors() -> io::Result> { + Ok(Some(512)) // https://docs.microsoft.com/en-us/cpp/c-runtime-library/file-handling +} + +#[cfg(not(any(unix, windows)))] +fn available_file_descriptors() -> io::Result> { + Ok(None) +} + +fn tmp_path() -> PathBuf { + if cfg!(target_os = "linux") { + "/dev/shm/".into() + } else { + temp_dir() + } + .join(format!("oxigraph-rocksdb-{}", random::())) +} diff --git a/ng-oxigraph/src/oxigraph/storage/binary_encoder.rs b/ng-oxigraph/src/oxigraph/storage/binary_encoder.rs new file mode 100644 index 0000000..36d0455 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/storage/binary_encoder.rs @@ -0,0 +1,787 @@ +// partial Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// partial Copyright (c) 2018 Oxigraph developers +// All work licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice or not, may not be copied, modified, or distributed except +// according to those terms. + +use crate::oxigraph::storage::error::{CorruptionError, StorageError}; +use crate::oxigraph::storage::numeric_encoder::{EncodedQuad, EncodedTerm, EncodedTriple, StrHash}; +use crate::oxigraph::storage::small_string::SmallString; +use crate::oxsdatatypes::*; +use std::io::Read; +use std::mem::size_of; + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +pub const LATEST_STORAGE_VERSION: u64 = 1; +pub const WRITTEN_TERM_MAX_SIZE: usize = size_of::() + 2 * size_of::(); + +// Encoded term type blocks +// 1-7: usual named nodes (except prefixes c.f. later) +// 8-15: blank nodes +// 16-47: literals +// 48-55: triples +// 56-64: future use +// 64-127: default named node prefixes +// 128-255: custom named node prefixes +const TYPE_NAMED_NODE_ID: u8 = 1; +const TYPE_NUMERICAL_BLANK_NODE_ID: u8 = 8; +const TYPE_SMALL_BLANK_NODE_ID: u8 = 9; +const TYPE_BIG_BLANK_NODE_ID: u8 = 10; +const TYPE_SMALL_STRING_LITERAL: u8 = 16; +const TYPE_BIG_STRING_LITERAL: u8 = 17; +const TYPE_SMALL_SMALL_LANG_STRING_LITERAL: u8 = 20; +const TYPE_SMALL_BIG_LANG_STRING_LITERAL: u8 = 21; +const TYPE_BIG_SMALL_LANG_STRING_LITERAL: u8 = 22; +const TYPE_BIG_BIG_LANG_STRING_LITERAL: u8 = 23; +const TYPE_SMALL_TYPED_LITERAL: u8 = 24; +const TYPE_BIG_TYPED_LITERAL: u8 = 25; +const TYPE_BOOLEAN_LITERAL_TRUE: u8 = 28; +const TYPE_BOOLEAN_LITERAL_FALSE: u8 = 29; +const TYPE_FLOAT_LITERAL: u8 = 30; +const TYPE_DOUBLE_LITERAL: u8 = 31; +const TYPE_INTEGER_LITERAL: u8 = 32; +const TYPE_DECIMAL_LITERAL: u8 = 33; +const TYPE_DATE_TIME_LITERAL: u8 = 34; +const TYPE_TIME_LITERAL: u8 = 35; +const TYPE_DATE_LITERAL: u8 = 36; +const TYPE_G_YEAR_MONTH_LITERAL: u8 = 37; +const TYPE_G_YEAR_LITERAL: u8 = 38; +const TYPE_G_MONTH_DAY_LITERAL: u8 = 39; +const TYPE_G_DAY_LITERAL: u8 = 40; +const TYPE_G_MONTH_LITERAL: u8 = 41; +const TYPE_DURATION_LITERAL: u8 = 42; +const TYPE_YEAR_MONTH_DURATION_LITERAL: u8 = 43; +const TYPE_DAY_TIME_DURATION_LITERAL: u8 = 44; +const TYPE_TRIPLE: u8 = 48; + +#[derive(Clone, Copy)] +pub enum QuadEncoding { + Spog, + Posg, + Ospg, + Gspo, + Gpos, + Gosp, + Dspo, + Dpos, + Dosp, +} + +impl QuadEncoding { + pub fn decode(self, mut buffer: &[u8]) -> Result { + match self { + Self::Spog => buffer.read_spog_quad(), + Self::Posg => buffer.read_posg_quad(), + Self::Ospg => buffer.read_ospg_quad(), + Self::Gspo => buffer.read_gspo_quad(), + Self::Gpos => buffer.read_gpos_quad(), + Self::Gosp => buffer.read_gosp_quad(), + Self::Dspo => buffer.read_dspo_quad(), + Self::Dpos => buffer.read_dpos_quad(), + Self::Dosp => buffer.read_dosp_quad(), + } + } +} + +pub fn decode_term(mut buffer: &[u8]) -> Result { + buffer.read_term() +} + +pub trait TermReader { + fn read_term(&mut self) -> Result; + + fn read_spog_quad(&mut self) -> Result { + let subject = self.read_term()?; + let predicate = self.read_term()?; + let object = self.read_term()?; + let graph_name = self.read_term()?; + Ok(EncodedQuad { + subject, + predicate, + object, + graph_name, + }) + } + + fn read_posg_quad(&mut self) -> Result { + let predicate = self.read_term()?; + let object = self.read_term()?; + let subject = self.read_term()?; + let graph_name = self.read_term()?; + Ok(EncodedQuad { + subject, + predicate, + object, + graph_name, + }) + } + + fn read_ospg_quad(&mut self) -> Result { + let object = self.read_term()?; + let subject = self.read_term()?; + let predicate = self.read_term()?; + let graph_name = self.read_term()?; + Ok(EncodedQuad { + subject, + predicate, + object, + graph_name, + }) + } + + fn read_gspo_quad(&mut self) -> Result { + let graph_name = self.read_term()?; + let subject = self.read_term()?; + let predicate = self.read_term()?; + let object = self.read_term()?; + Ok(EncodedQuad { + subject, + predicate, + object, + graph_name, + }) + } + + fn read_gpos_quad(&mut self) -> Result { + let graph_name = self.read_term()?; + let predicate = self.read_term()?; + let object = self.read_term()?; + let subject = self.read_term()?; + Ok(EncodedQuad { + subject, + predicate, + object, + graph_name, + }) + } + + fn read_gosp_quad(&mut self) -> Result { + let graph_name = self.read_term()?; + let object = self.read_term()?; + let subject = self.read_term()?; + let predicate = self.read_term()?; + Ok(EncodedQuad { + subject, + predicate, + object, + graph_name, + }) + } + + fn read_dspo_quad(&mut self) -> Result { + let subject = self.read_term()?; + let predicate = self.read_term()?; + let object = self.read_term()?; + Ok(EncodedQuad { + subject, + predicate, + object, + graph_name: EncodedTerm::DefaultGraph, + }) + } + + fn read_dpos_quad(&mut self) -> Result { + let predicate = self.read_term()?; + let object = self.read_term()?; + let subject = self.read_term()?; + Ok(EncodedQuad { + subject, + predicate, + object, + graph_name: EncodedTerm::DefaultGraph, + }) + } + + fn read_dosp_quad(&mut self) -> Result { + let object = self.read_term()?; + let subject = self.read_term()?; + let predicate = self.read_term()?; + Ok(EncodedQuad { + subject, + predicate, + object, + graph_name: EncodedTerm::DefaultGraph, + }) + } +} + +impl TermReader for R { + fn read_term(&mut self) -> Result { + let mut type_buffer = [0]; + self.read_exact(&mut type_buffer)?; + match type_buffer[0] { + TYPE_NAMED_NODE_ID => { + let mut buffer = [0; 16]; + self.read_exact(&mut buffer)?; + Ok(EncodedTerm::NamedNode { + iri_id: StrHash::from_be_bytes(buffer), + }) + } + TYPE_NUMERICAL_BLANK_NODE_ID => { + let mut buffer = [0; 16]; + self.read_exact(&mut buffer)?; + Ok(EncodedTerm::NumericalBlankNode { + id: u128::from_be_bytes(buffer), + }) + } + TYPE_SMALL_BLANK_NODE_ID => { + let mut buffer = [0; 16]; + self.read_exact(&mut buffer)?; + Ok(EncodedTerm::SmallBlankNode( + SmallString::from_be_bytes(buffer).map_err(CorruptionError::new)?, + )) + } + TYPE_BIG_BLANK_NODE_ID => { + let mut buffer = [0; 16]; + self.read_exact(&mut buffer)?; + Ok(EncodedTerm::BigBlankNode { + id_id: StrHash::from_be_bytes(buffer), + }) + } + TYPE_SMALL_SMALL_LANG_STRING_LITERAL => { + let mut language_buffer = [0; 16]; + self.read_exact(&mut language_buffer)?; + let mut value_buffer = [0; 16]; + self.read_exact(&mut value_buffer)?; + Ok(EncodedTerm::SmallSmallLangStringLiteral { + value: SmallString::from_be_bytes(value_buffer) + .map_err(CorruptionError::new)?, + language: SmallString::from_be_bytes(language_buffer) + .map_err(CorruptionError::new)?, + }) + } + TYPE_SMALL_BIG_LANG_STRING_LITERAL => { + let mut language_buffer = [0; 16]; + self.read_exact(&mut language_buffer)?; + let mut value_buffer = [0; 16]; + self.read_exact(&mut value_buffer)?; + Ok(EncodedTerm::SmallBigLangStringLiteral { + value: SmallString::from_be_bytes(value_buffer) + .map_err(CorruptionError::new)?, + language_id: StrHash::from_be_bytes(language_buffer), + }) + } + TYPE_BIG_SMALL_LANG_STRING_LITERAL => { + let mut language_buffer = [0; 16]; + self.read_exact(&mut language_buffer)?; + let mut value_buffer = [0; 16]; + self.read_exact(&mut value_buffer)?; + Ok(EncodedTerm::BigSmallLangStringLiteral { + value_id: StrHash::from_be_bytes(value_buffer), + language: SmallString::from_be_bytes(language_buffer) + .map_err(CorruptionError::new)?, + }) + } + TYPE_BIG_BIG_LANG_STRING_LITERAL => { + let mut language_buffer = [0; 16]; + self.read_exact(&mut language_buffer)?; + let mut value_buffer = [0; 16]; + self.read_exact(&mut value_buffer)?; + Ok(EncodedTerm::BigBigLangStringLiteral { + value_id: StrHash::from_be_bytes(value_buffer), + language_id: StrHash::from_be_bytes(language_buffer), + }) + } + TYPE_SMALL_TYPED_LITERAL => { + let mut datatype_buffer = [0; 16]; + self.read_exact(&mut datatype_buffer)?; + let mut value_buffer = [0; 16]; + self.read_exact(&mut value_buffer)?; + Ok(EncodedTerm::SmallTypedLiteral { + datatype_id: StrHash::from_be_bytes(datatype_buffer), + value: SmallString::from_be_bytes(value_buffer) + .map_err(CorruptionError::new)?, + }) + } + TYPE_BIG_TYPED_LITERAL => { + let mut datatype_buffer = [0; 16]; + self.read_exact(&mut datatype_buffer)?; + let mut value_buffer = [0; 16]; + self.read_exact(&mut value_buffer)?; + Ok(EncodedTerm::BigTypedLiteral { + datatype_id: StrHash::from_be_bytes(datatype_buffer), + value_id: StrHash::from_be_bytes(value_buffer), + }) + } + TYPE_SMALL_STRING_LITERAL => { + let mut buffer = [0; 16]; + self.read_exact(&mut buffer)?; + Ok(EncodedTerm::SmallStringLiteral( + SmallString::from_be_bytes(buffer).map_err(CorruptionError::new)?, + )) + } + TYPE_BIG_STRING_LITERAL => { + let mut buffer = [0; 16]; + self.read_exact(&mut buffer)?; + Ok(EncodedTerm::BigStringLiteral { + value_id: StrHash::from_be_bytes(buffer), + }) + } + TYPE_BOOLEAN_LITERAL_TRUE => Ok(true.into()), + TYPE_BOOLEAN_LITERAL_FALSE => Ok(false.into()), + TYPE_FLOAT_LITERAL => { + let mut buffer = [0; 4]; + self.read_exact(&mut buffer)?; + Ok(Float::from_be_bytes(buffer).into()) + } + TYPE_DOUBLE_LITERAL => { + let mut buffer = [0; 8]; + self.read_exact(&mut buffer)?; + Ok(Double::from_be_bytes(buffer).into()) + } + TYPE_INTEGER_LITERAL => { + let mut buffer = [0; 8]; + self.read_exact(&mut buffer)?; + Ok(Integer::from_be_bytes(buffer).into()) + } + TYPE_DECIMAL_LITERAL => { + let mut buffer = [0; 16]; + self.read_exact(&mut buffer)?; + Ok(Decimal::from_be_bytes(buffer).into()) + } + TYPE_DATE_TIME_LITERAL => { + let mut buffer = [0; 18]; + self.read_exact(&mut buffer)?; + Ok(DateTime::from_be_bytes(buffer).into()) + } + TYPE_TIME_LITERAL => { + let mut buffer = [0; 18]; + self.read_exact(&mut buffer)?; + Ok(Time::from_be_bytes(buffer).into()) + } + TYPE_DATE_LITERAL => { + let mut buffer = [0; 18]; + self.read_exact(&mut buffer)?; + Ok(Date::from_be_bytes(buffer).into()) + } + TYPE_G_YEAR_MONTH_LITERAL => { + let mut buffer = [0; 18]; + self.read_exact(&mut buffer)?; + Ok(GYearMonth::from_be_bytes(buffer).into()) + } + TYPE_G_YEAR_LITERAL => { + let mut buffer = [0; 18]; + self.read_exact(&mut buffer)?; + Ok(GYear::from_be_bytes(buffer).into()) + } + TYPE_G_MONTH_DAY_LITERAL => { + let mut buffer = [0; 18]; + self.read_exact(&mut buffer)?; + Ok(GMonthDay::from_be_bytes(buffer).into()) + } + TYPE_G_DAY_LITERAL => { + let mut buffer = [0; 18]; + self.read_exact(&mut buffer)?; + Ok(GDay::from_be_bytes(buffer).into()) + } + TYPE_G_MONTH_LITERAL => { + let mut buffer = [0; 18]; + self.read_exact(&mut buffer)?; + Ok(GMonth::from_be_bytes(buffer).into()) + } + TYPE_DURATION_LITERAL => { + let mut buffer = [0; 24]; + self.read_exact(&mut buffer)?; + Ok(Duration::from_be_bytes(buffer).into()) + } + TYPE_YEAR_MONTH_DURATION_LITERAL => { + let mut buffer = [0; 8]; + self.read_exact(&mut buffer)?; + Ok(YearMonthDuration::from_be_bytes(buffer).into()) + } + TYPE_DAY_TIME_DURATION_LITERAL => { + let mut buffer = [0; 16]; + self.read_exact(&mut buffer)?; + Ok(DayTimeDuration::from_be_bytes(buffer).into()) + } + TYPE_TRIPLE => Ok(EncodedTriple { + subject: self.read_term()?, + predicate: self.read_term()?, + object: self.read_term()?, + } + .into()), + _ => Err(CorruptionError::msg("the term buffer has an invalid type id").into()), + } + } +} + +pub fn write_spog_quad(sink: &mut Vec, quad: &EncodedQuad) { + write_term(sink, &quad.subject); + write_term(sink, &quad.predicate); + write_term(sink, &quad.object); + write_term(sink, &quad.graph_name); +} + +pub fn write_posg_quad(sink: &mut Vec, quad: &EncodedQuad) { + write_term(sink, &quad.predicate); + write_term(sink, &quad.object); + write_term(sink, &quad.subject); + write_term(sink, &quad.graph_name); +} + +pub fn write_ospg_quad(sink: &mut Vec, quad: &EncodedQuad) { + write_term(sink, &quad.object); + write_term(sink, &quad.subject); + write_term(sink, &quad.predicate); + write_term(sink, &quad.graph_name); +} + +pub fn write_gspo_quad(sink: &mut Vec, quad: &EncodedQuad) { + write_term(sink, &quad.graph_name); + write_term(sink, &quad.subject); + write_term(sink, &quad.predicate); + write_term(sink, &quad.object); +} + +pub fn write_gpos_quad(sink: &mut Vec, quad: &EncodedQuad) { + write_term(sink, &quad.graph_name); + write_term(sink, &quad.predicate); + write_term(sink, &quad.object); + write_term(sink, &quad.subject); +} + +pub fn write_gosp_quad(sink: &mut Vec, quad: &EncodedQuad) { + write_term(sink, &quad.graph_name); + write_term(sink, &quad.object); + write_term(sink, &quad.subject); + write_term(sink, &quad.predicate); +} + +pub fn write_spo_quad(sink: &mut Vec, quad: &EncodedQuad) { + write_term(sink, &quad.subject); + write_term(sink, &quad.predicate); + write_term(sink, &quad.object); +} + +pub fn write_pos_quad(sink: &mut Vec, quad: &EncodedQuad) { + write_term(sink, &quad.predicate); + write_term(sink, &quad.object); + write_term(sink, &quad.subject); +} + +pub fn write_osp_quad(sink: &mut Vec, quad: &EncodedQuad) { + write_term(sink, &quad.object); + write_term(sink, &quad.subject); + write_term(sink, &quad.predicate); +} + +pub fn encode_term(t: &EncodedTerm) -> Vec { + let mut vec = Vec::with_capacity(WRITTEN_TERM_MAX_SIZE); + write_term(&mut vec, t); + vec +} + +pub fn encode_graph(t1: StrHash) -> Vec { + let mut vec = Vec::with_capacity(17); + write_term(&mut vec, &EncodedTerm::NamedNode { iri_id: t1 }); + vec +} + +pub fn encode_term_pair(t1: &EncodedTerm, t2: &EncodedTerm) -> Vec { + let mut vec = Vec::with_capacity(2 * WRITTEN_TERM_MAX_SIZE); + write_term(&mut vec, t1); + write_term(&mut vec, t2); + vec +} + +pub fn encode_graph_term(t1: StrHash, t2: EncodedTerm) -> Vec { + let mut vec = Vec::with_capacity(WRITTEN_TERM_MAX_SIZE + 17); + write_term(&mut vec, &EncodedTerm::NamedNode { iri_id: t1 }); + write_term(&mut vec, &t2); + vec +} + +pub fn encode_term_triple(t1: &EncodedTerm, t2: &EncodedTerm, t3: &EncodedTerm) -> Vec { + let mut vec = Vec::with_capacity(3 * WRITTEN_TERM_MAX_SIZE); + write_term(&mut vec, t1); + write_term(&mut vec, t2); + write_term(&mut vec, t3); + vec +} + +pub fn encode_term_graph_pair(t1: StrHash, t2: EncodedTerm, t3: EncodedTerm) -> Vec { + let mut vec = Vec::with_capacity(2 * WRITTEN_TERM_MAX_SIZE + 17); + write_term(&mut vec, &EncodedTerm::NamedNode { iri_id: t1 }); + write_term(&mut vec, &t2); + write_term(&mut vec, &t3); + vec +} + +pub fn encode_term_quad( + t1: &EncodedTerm, + t2: &EncodedTerm, + t3: &EncodedTerm, + t4: &EncodedTerm, +) -> Vec { + let mut vec = Vec::with_capacity(4 * WRITTEN_TERM_MAX_SIZE); + write_term(&mut vec, t1); + write_term(&mut vec, t2); + write_term(&mut vec, t3); + write_term(&mut vec, t4); + vec +} + +pub fn encode_term_graph_triple( + t1: StrHash, + t2: EncodedTerm, + t3: EncodedTerm, + t4: EncodedTerm, +) -> Vec { + let mut vec = Vec::with_capacity(3 * WRITTEN_TERM_MAX_SIZE + 17); + write_term(&mut vec, &EncodedTerm::NamedNode { iri_id: t1 }); + write_term(&mut vec, &t2); + write_term(&mut vec, &t3); + write_term(&mut vec, &t4); + vec +} + +pub fn write_term(sink: &mut Vec, term: &EncodedTerm) { + match term { + EncodedTerm::DefaultGraph => (), + EncodedTerm::NamedNode { iri_id } => { + sink.push(TYPE_NAMED_NODE_ID); + sink.extend_from_slice(&iri_id.to_be_bytes()); + } + EncodedTerm::NumericalBlankNode { id } => { + sink.push(TYPE_NUMERICAL_BLANK_NODE_ID); + sink.extend_from_slice(&id.to_be_bytes()) + } + EncodedTerm::SmallBlankNode(id) => { + sink.push(TYPE_SMALL_BLANK_NODE_ID); + sink.extend_from_slice(&id.to_be_bytes()) + } + EncodedTerm::BigBlankNode { id_id } => { + sink.push(TYPE_BIG_BLANK_NODE_ID); + sink.extend_from_slice(&id_id.to_be_bytes()); + } + EncodedTerm::SmallStringLiteral(value) => { + sink.push(TYPE_SMALL_STRING_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::BigStringLiteral { value_id } => { + sink.push(TYPE_BIG_STRING_LITERAL); + sink.extend_from_slice(&value_id.to_be_bytes()); + } + EncodedTerm::SmallSmallLangStringLiteral { value, language } => { + sink.push(TYPE_SMALL_SMALL_LANG_STRING_LITERAL); + sink.extend_from_slice(&language.to_be_bytes()); + sink.extend_from_slice(&value.to_be_bytes()); + } + EncodedTerm::SmallBigLangStringLiteral { value, language_id } => { + sink.push(TYPE_SMALL_BIG_LANG_STRING_LITERAL); + sink.extend_from_slice(&language_id.to_be_bytes()); + sink.extend_from_slice(&value.to_be_bytes()); + } + EncodedTerm::BigSmallLangStringLiteral { value_id, language } => { + sink.push(TYPE_BIG_SMALL_LANG_STRING_LITERAL); + sink.extend_from_slice(&language.to_be_bytes()); + sink.extend_from_slice(&value_id.to_be_bytes()); + } + EncodedTerm::BigBigLangStringLiteral { + value_id, + language_id, + } => { + sink.push(TYPE_BIG_BIG_LANG_STRING_LITERAL); + sink.extend_from_slice(&language_id.to_be_bytes()); + sink.extend_from_slice(&value_id.to_be_bytes()); + } + EncodedTerm::SmallTypedLiteral { value, datatype_id } => { + sink.push(TYPE_SMALL_TYPED_LITERAL); + sink.extend_from_slice(&datatype_id.to_be_bytes()); + sink.extend_from_slice(&value.to_be_bytes()); + } + EncodedTerm::BigTypedLiteral { + value_id, + datatype_id, + } => { + sink.push(TYPE_BIG_TYPED_LITERAL); + sink.extend_from_slice(&datatype_id.to_be_bytes()); + sink.extend_from_slice(&value_id.to_be_bytes()); + } + EncodedTerm::BooleanLiteral(value) => sink.push(if bool::from(*value) { + TYPE_BOOLEAN_LITERAL_TRUE + } else { + TYPE_BOOLEAN_LITERAL_FALSE + }), + EncodedTerm::FloatLiteral(value) => { + sink.push(TYPE_FLOAT_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::DoubleLiteral(value) => { + sink.push(TYPE_DOUBLE_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::IntegerLiteral(value) => { + sink.push(TYPE_INTEGER_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::DecimalLiteral(value) => { + sink.push(TYPE_DECIMAL_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::DateTimeLiteral(value) => { + sink.push(TYPE_DATE_TIME_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::TimeLiteral(value) => { + sink.push(TYPE_TIME_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::DurationLiteral(value) => { + sink.push(TYPE_DURATION_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::DateLiteral(value) => { + sink.push(TYPE_DATE_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::GYearMonthLiteral(value) => { + sink.push(TYPE_G_YEAR_MONTH_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::GYearLiteral(value) => { + sink.push(TYPE_G_YEAR_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::GMonthDayLiteral(value) => { + sink.push(TYPE_G_MONTH_DAY_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::GDayLiteral(value) => { + sink.push(TYPE_G_DAY_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::GMonthLiteral(value) => { + sink.push(TYPE_G_MONTH_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::YearMonthDurationLiteral(value) => { + sink.push(TYPE_YEAR_MONTH_DURATION_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::DayTimeDurationLiteral(value) => { + sink.push(TYPE_DAY_TIME_DURATION_LITERAL); + sink.extend_from_slice(&value.to_be_bytes()) + } + EncodedTerm::Triple(value) => { + sink.push(TYPE_TRIPLE); + write_term(sink, &value.subject); + write_term(sink, &value.predicate); + write_term(sink, &value.object); + } + } +} + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + use crate::oxigraph::model::TermRef; + use crate::oxigraph::storage::numeric_encoder::*; + use std::cell::RefCell; + use std::collections::HashMap; + + #[derive(Default)] + struct MemoryStrStore { + id2str: RefCell>, + } + + impl StrLookup for MemoryStrStore { + fn get_str(&self, key: &StrHash) -> Result, StorageError> { + Ok(self.id2str.borrow().get(key).cloned()) + } + } + + impl MemoryStrStore { + fn insert_term(&self, term: TermRef<'_>, encoded: &EncodedTerm) { + insert_term(term, encoded, &mut |h, v| { + self.insert_str(h, v); + Ok(()) + }) + .unwrap(); + } + + fn insert_str(&self, key: &StrHash, value: &str) { + self.id2str + .borrow_mut() + .entry(*key) + .or_insert_with(|| value.to_owned()); + } + } + + #[test] + fn test_encoding() { + use crate::oxigraph::model::vocab::xsd; + use crate::oxigraph::model::*; + + let store = MemoryStrStore::default(); + let terms: Vec = vec![ + NamedNode::new_unchecked("http://foo.com").into(), + NamedNode::new_unchecked("http://bar.com").into(), + NamedNode::new_unchecked("http://foo.com").into(), + BlankNode::default().into(), + BlankNode::new_unchecked("bnode").into(), + BlankNode::new_unchecked("foo-bnode-thisisaverylargeblanknode").into(), + Literal::new_simple_literal("literal").into(), + BlankNode::new_unchecked("foo-literal-thisisaverylargestringliteral").into(), + Literal::from(true).into(), + Literal::from(1.2).into(), + Literal::from(1).into(), + Literal::from("foo-string").into(), + Literal::new_language_tagged_literal_unchecked("foo-fr", "fr").into(), + Literal::new_language_tagged_literal_unchecked( + "foo-fr-literal-thisisaverylargelanguagetaggedstringliteral", + "fr", + ) + .into(), + Literal::new_language_tagged_literal_unchecked( + "foo-big", + "fr-FR-Latn-x-foo-bar-baz-bat-aaaa-bbbb-cccc", + ) + .into(), + Literal::new_language_tagged_literal_unchecked( + "foo-big-literal-thisisaverylargelanguagetaggedstringliteral", + "fr-FR-Latn-x-foo-bar-baz-bat-aaaa-bbbb-cccc", + ) + .into(), + Literal::new_typed_literal("-1.32", xsd::DECIMAL).into(), + Literal::new_typed_literal("2020-01-01T01:01:01Z", xsd::DATE_TIME).into(), + Literal::new_typed_literal("2020-01-01", xsd::DATE).into(), + Literal::new_typed_literal("01:01:01Z", xsd::TIME).into(), + Literal::new_typed_literal("2020-01", xsd::G_YEAR_MONTH).into(), + Literal::new_typed_literal("2020", xsd::G_YEAR).into(), + Literal::new_typed_literal("--01-01", xsd::G_MONTH_DAY).into(), + Literal::new_typed_literal("--01", xsd::G_MONTH).into(), + Literal::new_typed_literal("---01", xsd::G_DAY).into(), + Literal::new_typed_literal("PT1S", xsd::DURATION).into(), + Literal::new_typed_literal("PT1S", xsd::DAY_TIME_DURATION).into(), + Literal::new_typed_literal("P1Y", xsd::YEAR_MONTH_DURATION).into(), + Literal::new_typed_literal("-foo", NamedNode::new_unchecked("http://foo.com")).into(), + Literal::new_typed_literal( + "-foo-thisisaverybigtypedliteralwiththefoodatatype", + NamedNode::new_unchecked("http://foo.com"), + ) + .into(), + Triple::new( + NamedNode::new_unchecked("http://foo.com"), + NamedNode::new_unchecked("http://bar.com"), + Literal::from(true), + ) + .into(), + ]; + for term in terms { + let encoded = term.as_ref().into(); + store.insert_term(term.as_ref(), &encoded); + assert_eq!(encoded, term.as_ref().into()); + assert_eq!(term, store.decode_term(&encoded).unwrap()); + + let mut buffer = Vec::new(); + write_term(&mut buffer, &encoded); + assert_eq!(encoded, buffer.as_slice().read_term().unwrap()); + } + } +} diff --git a/ng-oxigraph/src/oxigraph/storage/error.rs b/ng-oxigraph/src/oxigraph/storage/error.rs new file mode 100644 index 0000000..b974683 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/storage/error.rs @@ -0,0 +1,139 @@ +use crate::oxigraph::io::{RdfFormat, RdfParseError}; +use crate::oxigraph::storage::numeric_encoder::EncodedTerm; +use crate::oxrdf::TermRef; +use oxiri::IriParseError; +use std::error::Error; +use std::io; + +/// An error related to storage operations (reads, writes...). +#[derive(Debug, thiserror::Error)] +#[non_exhaustive] +pub enum StorageError { + /// Error from the OS I/O layer. + #[error(transparent)] + Io(#[from] io::Error), + /// Error related to data corruption. + #[error(transparent)] + Corruption(#[from] CorruptionError), + #[doc(hidden)] + #[error("{0}")] + Other(#[source] Box), +} + +impl From for io::Error { + #[inline] + fn from(error: StorageError) -> Self { + match error { + StorageError::Io(error) => error, + StorageError::Corruption(error) => error.into(), + StorageError::Other(error) => Self::new(io::ErrorKind::Other, error), + } + } +} + +/// An error return if some content in the database is corrupted. +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct CorruptionError(#[from] CorruptionErrorKind); + +/// An error return if some content in the database is corrupted. +#[derive(Debug, thiserror::Error)] +enum CorruptionErrorKind { + #[error("{0}")] + Msg(String), + #[error("{0}")] + Other(#[source] Box), +} + +impl CorruptionError { + /// Builds an error from a printable error message. + #[inline] + pub(crate) fn new(error: impl Into>) -> Self { + Self(CorruptionErrorKind::Other(error.into())) + } + + #[inline] + pub(crate) fn from_encoded_term(encoded: &EncodedTerm, term: &TermRef<'_>) -> Self { + // TODO: eventually use a dedicated error enum value + Self::msg(format!("Invalid term encoding {encoded:?} for {term}")) + } + + #[inline] + pub(crate) fn from_missing_column_family_name(name: &'static str) -> Self { + // TODO: eventually use a dedicated error enum value + Self::msg(format!("Column family {name} does not exist")) + } + + /// Builds an error from a printable error message. + #[inline] + pub(crate) fn msg(msg: impl Into) -> Self { + Self(CorruptionErrorKind::Msg(msg.into())) + } +} + +impl From for io::Error { + #[inline] + fn from(error: CorruptionError) -> Self { + Self::new(io::ErrorKind::InvalidData, error) + } +} + +/// An error raised while loading a file into a [`Store`](crate::oxigraph::store::Store). +#[derive(Debug, thiserror::Error)] +pub enum LoaderError { + /// An error raised while reading the file. + #[error(transparent)] + Parsing(#[from] RdfParseError), + /// An error raised during the insertion in the store. + #[error(transparent)] + Storage(#[from] StorageError), + /// The base IRI is invalid. + #[error("Invalid base IRI '{iri}': {error}")] + InvalidBaseIri { + /// The IRI itself. + iri: String, + /// The parsing error. + #[source] + error: IriParseError, + }, +} + +impl From for io::Error { + #[inline] + fn from(error: LoaderError) -> Self { + match error { + LoaderError::Storage(error) => error.into(), + LoaderError::Parsing(error) => error.into(), + LoaderError::InvalidBaseIri { .. } => { + Self::new(io::ErrorKind::InvalidInput, error.to_string()) + } + } + } +} + +/// An error raised while writing a file from a [`Store`](crate::oxigraph::store::Store). +#[derive(Debug, thiserror::Error)] +pub enum SerializerError { + /// An error raised while writing the content. + #[error(transparent)] + Io(#[from] io::Error), + /// An error raised during the lookup in the store. + #[error(transparent)] + Storage(#[from] StorageError), + /// A format compatible with [RDF dataset](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset) is required. + #[error("A RDF format supporting datasets was expected, {0} found")] + DatasetFormatExpected(RdfFormat), +} + +impl From for io::Error { + #[inline] + fn from(error: SerializerError) -> Self { + match error { + SerializerError::Storage(error) => error.into(), + SerializerError::Io(error) => error, + SerializerError::DatasetFormatExpected(_) => { + Self::new(io::ErrorKind::InvalidInput, error.to_string()) + } + } + } +} diff --git a/ng-oxigraph/src/oxigraph/storage/mod.rs b/ng-oxigraph/src/oxigraph/storage/mod.rs new file mode 100644 index 0000000..b70aa86 --- /dev/null +++ b/ng-oxigraph/src/oxigraph/storage/mod.rs @@ -0,0 +1,2912 @@ +// partial Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// partial Copyright (c) 2018 Oxigraph developers +// All work licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice or not, may not be copied, modified, or distributed except +// according to those terms. + +#![allow(clippy::same_name_method)] +use crate::oxigraph::model::Quad; +use crate::oxigraph::model::{GraphNameRef, NamedOrBlankNodeRef, QuadRef, TermRef}; +use crate::oxigraph::storage::backend::{Reader, Transaction}; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use crate::oxigraph::storage::binary_encoder::LATEST_STORAGE_VERSION; +use crate::oxigraph::storage::binary_encoder::{ + decode_term, encode_graph, encode_graph_term, encode_term, encode_term_graph_pair, + encode_term_graph_triple, encode_term_pair, encode_term_quad, encode_term_triple, + write_gosp_quad, write_gpos_quad, write_gspo_quad, write_osp_quad, write_ospg_quad, + write_pos_quad, write_posg_quad, write_spo_quad, write_spog_quad, write_term, QuadEncoding, + WRITTEN_TERM_MAX_SIZE, +}; +pub use crate::oxigraph::storage::error::{ + CorruptionError, LoaderError, SerializerError, StorageError, +}; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use crate::oxigraph::storage::numeric_encoder::Decoder; +use crate::oxigraph::storage::numeric_encoder::{ + insert_term, EncodedQuad, EncodedTerm, StrHash, StrLookup, +}; +use crate::oxrdf::NamedNodeRef; +use backend::{ColumnFamily, ColumnFamilyDefinition, Db, Iter}; +use lazy_static::lazy_static; +use regex::Regex; + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::collections::VecDeque; +use std::collections::{HashMap, HashSet}; +use std::error::Error; +use std::io::Read; +use std::iter::Peekable; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::mem::{swap, take}; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::path::{Path, PathBuf}; +use std::sync::Arc; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::sync::Mutex; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::{io, thread}; + +use self::numeric_encoder::EncodedTriple; + +use ng_repo::log::*; + +mod backend; +mod binary_encoder; +mod error; +pub mod numeric_encoder; +pub mod small_string; + +const ID2STR_CF: &str = "id2str"; +const SPOG_CF: &str = "spog"; +const POSG_CF: &str = "posg"; +const OSPG_CF: &str = "ospg"; +const GSPO_CF: &str = "gspo"; +const GPOS_CF: &str = "gpos"; +const GOSP_CF: &str = "gosp"; +const DSPO_CF: &str = "dspo"; //TODO: remove all the DXXX as we don't use the default graph anymore +const DPOS_CF: &str = "dpos"; +const DOSP_CF: &str = "dosp"; +const HEADS_CF: &str = "heads"; +const PAST_CF: &str = "past"; +const REMOVED_CF: &str = "removed"; +const BRANCHES_CF: &str = "branches"; +const STORES_CF: &str = "stores"; +const NAMES_CF: &str = "names"; +//const GRAPHS_CF: &str = "graphs"; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +const DEFAULT_CF: &str = "default"; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +const DEFAULT_BULK_LOAD_BATCH_SIZE: usize = 1_000_000; + +const DID_PREFIX: &str = "did:ng"; + +lazy_static! { + #[doc(hidden)] + static ref RE_REPO: Regex = + Regex::new(r"^did:ng:o:([A-Za-z0-9-_]*):v:([A-Za-z0-9-_]*)$").unwrap(); + #[doc(hidden)] + static ref RE_BRANCH: Regex = + Regex::new(r"^:b:([A-Za-z0-9-_]*)$").unwrap(); + #[doc(hidden)] + static ref RE_COMMITS: Regex = + Regex::new(r":c:([A-Za-z0-9-_]*)").unwrap(); + #[doc(hidden)] + static ref RE_NAMED_BRANCH_OR_COMMIT: Regex = + Regex::new(r"^:a:([A-Za-z0-9-_%]*)$").unwrap(); //TODO: allow international chars. disallow digit as first char +} + +pub(crate) enum MatchBy { + Repos(RepoIterator), + Commits { + heads: HashSet, + at_current_heads: bool, + original_graph_name: StrHash, + }, +} + +/// Low level storage primitives +#[derive(Clone)] +pub struct Storage { + db: Db, + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + default_cf: ColumnFamily, + id2str_cf: ColumnFamily, + spog_cf: ColumnFamily, + posg_cf: ColumnFamily, + ospg_cf: ColumnFamily, + gspo_cf: ColumnFamily, + gpos_cf: ColumnFamily, + gosp_cf: ColumnFamily, + dspo_cf: ColumnFamily, + dpos_cf: ColumnFamily, + dosp_cf: ColumnFamily, + //graphs_cf: ColumnFamily, + heads_cf: ColumnFamily, + past_cf: ColumnFamily, + removed_cf: ColumnFamily, + branches_cf: ColumnFamily, + stores_cf: ColumnFamily, + names_cf: ColumnFamily, +} + +impl Storage { + pub fn new() -> Result { + Self::setup(Db::new(Self::column_families())?) + } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + pub fn open(path: &Path, key: Option<[u8; 32]>) -> Result { + Self::setup(Db::open_read_write( + Some(path), + Self::column_families(), + key, + )?) + } + + // #[cfg(all(not(target_family = "wasm"),not(docsrs)))] + // pub fn open_secondary(primary_path: &Path) -> Result { + // Self::setup(Db::open_secondary( + // primary_path, + // None, + // Self::column_families(), + // )?) + // } + + // #[cfg(all(not(target_family = "wasm"),not(docsrs)))] + // pub fn open_persistent_secondary( + // primary_path: &Path, + // secondary_path: &Path, + // ) -> Result { + // Self::setup(Db::open_secondary( + // primary_path, + // Some(secondary_path), + // Self::column_families(), + // )?) + // } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + pub fn open_read_only(path: &Path, key: Option<[u8; 32]>) -> Result { + Self::setup(Db::open_read_only(path, Self::column_families(), key)?) + } + + fn column_families() -> Vec { + vec![ + ColumnFamilyDefinition { + name: ID2STR_CF, + use_iter: false, + min_prefix_size: 0, + unordered_writes: true, + }, + ColumnFamilyDefinition { + name: SPOG_CF, + use_iter: true, + min_prefix_size: 17, // named or blank node start + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: POSG_CF, + use_iter: true, + min_prefix_size: 17, // named node start + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: OSPG_CF, + use_iter: true, + min_prefix_size: 0, // There are small literals... + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: GSPO_CF, + use_iter: true, + min_prefix_size: 17, // named or blank node start + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: GPOS_CF, + use_iter: true, + min_prefix_size: 17, // named or blank node start + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: GOSP_CF, + use_iter: true, + min_prefix_size: 17, // named or blank node start + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: DSPO_CF, + use_iter: true, + min_prefix_size: 17, // named or blank node start + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: DPOS_CF, + use_iter: true, + min_prefix_size: 17, // named or blank node start + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: DOSP_CF, + use_iter: true, + min_prefix_size: 0, // There are small literals... + unordered_writes: false, + }, + // ColumnFamilyDefinition { + // name: GRAPHS_CF, + // use_iter: true, + // min_prefix_size: 17, // named or blank node start + // unordered_writes: false, + // }, + ColumnFamilyDefinition { + name: HEADS_CF, + use_iter: true, + min_prefix_size: 32, + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: PAST_CF, + use_iter: true, + min_prefix_size: 16, + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: REMOVED_CF, + use_iter: true, + min_prefix_size: 17, + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: BRANCHES_CF, + use_iter: false, + min_prefix_size: 33, + unordered_writes: true, + }, + ColumnFamilyDefinition { + name: STORES_CF, + use_iter: true, + min_prefix_size: 16, + unordered_writes: false, + }, + ColumnFamilyDefinition { + name: NAMES_CF, + use_iter: false, + min_prefix_size: 16, + unordered_writes: true, + }, + ] + } + + fn setup(db: Db) -> Result { + let this = Self { + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + default_cf: db.column_family(DEFAULT_CF)?, + id2str_cf: db.column_family(ID2STR_CF)?, + spog_cf: db.column_family(SPOG_CF)?, + posg_cf: db.column_family(POSG_CF)?, + ospg_cf: db.column_family(OSPG_CF)?, + gspo_cf: db.column_family(GSPO_CF)?, + gpos_cf: db.column_family(GPOS_CF)?, + gosp_cf: db.column_family(GOSP_CF)?, + dspo_cf: db.column_family(DSPO_CF)?, + dpos_cf: db.column_family(DPOS_CF)?, + dosp_cf: db.column_family(DOSP_CF)?, + //graphs_cf: db.column_family(GRAPHS_CF)?, + heads_cf: db.column_family(HEADS_CF)?, + past_cf: db.column_family(PAST_CF)?, + removed_cf: db.column_family(REMOVED_CF)?, + branches_cf: db.column_family(BRANCHES_CF)?, + stores_cf: db.column_family(STORES_CF)?, + names_cf: db.column_family(NAMES_CF)?, + db, + }; + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + this.migrate()?; + Ok(this) + } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + fn migrate(&self) -> Result<(), StorageError> { + let mut version = self.ensure_version()?; + // if version == 0 { + // // We migrate to v1 + // let mut graph_names = HashSet::new(); + // for quad in self.snapshot().quads() { + // let quad = quad?; + // if !quad.graph_name.is_default_graph() { + // graph_names.insert(quad.graph_name); + // } + // } + // let mut graph_names = graph_names + // .into_iter() + // .map(|g| encode_term(&g)) + // .collect::>(); + // graph_names.sort_unstable(); + // let mut stt_file = self.db.new_sst_file()?; + // for k in graph_names { + // stt_file.insert_empty(&k)?; + // } + // self.db + // .insert_stt_files(&[(&self.graphs_cf, stt_file.finish()?)])?; + // version = 1; + // self.update_version(version)?; + // } + + match version { + _ if version < LATEST_STORAGE_VERSION => Err(CorruptionError::msg(format!( + "The RocksDB database is using the outdated encoding version {version}. Automated migration is not supported, please dump the store dataset using a compatible Oxigraph version and load it again using the current version" + + )).into()), + LATEST_STORAGE_VERSION => Ok(()), + _ => Err(CorruptionError::msg(format!( + "The RocksDB database is using the too recent version {version}. Upgrade to the latest Oxigraph version to load this database" + + )).into()) + } + } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + fn ensure_version(&self) -> Result { + Ok( + if let Some(version) = self.db.get(&self.default_cf, b"oxversion")? { + u64::from_be_bytes(version.as_ref().try_into().map_err(|e| { + CorruptionError::new(format!("Error while parsing the version key: {e}")) + })?) + } else { + self.update_version(LATEST_STORAGE_VERSION)?; + LATEST_STORAGE_VERSION + }, + ) + } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + fn update_version(&self, version: u64) -> Result<(), StorageError> { + self.db + .insert(&self.default_cf, b"oxversion", &version.to_be_bytes())?; + self.db.flush() + } + + pub fn snapshot(&self) -> StorageReader { + StorageReader { + reader: self.db.snapshot(), + storage: self.clone(), + } + } + + pub fn ng_transaction<'a, 'b: 'a, T, E: Error + 'static + From>( + &'b self, + mut f: impl FnMut(StorageWriter<'a>) -> Result, + ) -> Result { + self.db.ng_transaction(|transaction| { + f(StorageWriter { + buffer: Vec::new(), + transaction, + storage: self, + }) + }) + } + + pub fn transaction<'a, 'b: 'a, T, E: Error + 'static + From>( + &'b self, + f: impl Fn(CommitWriter<'a>) -> Result, + ) -> Result { + self.db.transaction(|transaction| { + f(CommitWriter { + inserts: HashSet::new(), + removes: HashSet::new(), + transaction, + storage: self, + }) + }) + } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + pub fn flush(&self) -> Result<(), StorageError> { + self.db.flush() + } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + pub fn compact(&self) -> Result<(), StorageError> { + self.db.compact(&self.default_cf)?; + self.db.compact(&self.gspo_cf)?; + self.db.compact(&self.gpos_cf)?; + self.db.compact(&self.gosp_cf)?; + self.db.compact(&self.spog_cf)?; + self.db.compact(&self.posg_cf)?; + self.db.compact(&self.ospg_cf)?; + self.db.compact(&self.dspo_cf)?; + self.db.compact(&self.dpos_cf)?; + self.db.compact(&self.dosp_cf)?; + self.db.compact(&self.id2str_cf) + } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + pub fn backup(&self, target_directory: &Path) -> Result<(), StorageError> { + self.db.backup(target_directory) + } +} + +pub struct StorageReader { + reader: Reader, + storage: Storage, +} + +// fn encode_one_hash(sh: &StrHash) -> &[u8] { +// &sh.to_be_bytes() +// } + +fn encode_two_hashes(sh1: &StrHash, sh2: &StrHash) -> Vec { + let mut vec = Vec::with_capacity(32); + vec.extend_from_slice(&sh1.to_be_bytes()); + vec.extend_from_slice(&sh2.to_be_bytes()); + vec +} + +fn encode_three_hashes(sh1: &StrHash, sh2: &StrHash, sh3: &StrHash) -> Vec { + let mut vec = Vec::with_capacity(48); + vec.extend_from_slice(&sh1.to_be_bytes()); + vec.extend_from_slice(&sh2.to_be_bytes()); + vec.extend_from_slice(&sh3.to_be_bytes()); + vec +} + +/*impl Iterator for DecodingGraphIterator { + type Item = Result; + + fn next(&mut self) -> Option { + if let Err(e) = self.iter.status() { + return Some(Err(e)); + } + let term: Result = decode_term(self.iter.key()?); + self.iter.next(); + Some(term) + } +} */ + +pub(crate) struct RepoIterator { + pub iter: Option, + pub single: Option, +} + +impl Iterator for RepoIterator { + type Item = StrHash; + + fn next(&mut self) -> Option { + if let Some(iter) = self.iter.as_mut() { + if let Err(e) = iter.status() { + return None; + } + let key = iter.key()?; + let mut buffer = [0; 16]; + (&key[16..32]).read_exact(&mut buffer).ok()?; + let res = StrHash::from_be_bytes(buffer); + iter.next(); + Some(res) + } else { + self.single.take() + } + } +} + +impl From for Box>> { + fn from(e: StorageError) -> Self { + Box::new(std::iter::once(Err(e))) + } +} + +struct NgRepoQuadIterator { + reader: StorageReader, + iter: RepoIterator, +} + +impl NgRepoQuadIterator { + fn new(storage_reader: &StorageReader, iter: RepoIterator) -> Self { + NgRepoQuadIterator { + reader: StorageReader { + reader: storage_reader.reader.clone(), + storage: storage_reader.storage.clone(), + }, + iter, + } + } + fn into_iter( + self, + f: impl Fn(&StorageReader, StrHash) -> DecodingQuadIterator + 'static, + ) -> Box>> { + Box::new(self.iter.map(move |repo| f(&self.reader, repo)).flatten()) + } +} + +struct NgPastQuadIterator<'a> { + reader: StorageReader, + iter: std::collections::hash_set::IntoIter, + current: Option, + f: Box DecodingNgQuadIterator + 'a>, +} + +impl<'a> Iterator for NgPastQuadIterator<'a> { + type Item = Result<(EncodedQuad, u8), StorageError>; + + fn next(&mut self) -> Option { + loop { + let next = self.find_next(); + if next.is_ok() { + return next.unwrap(); + } + } + } +} + +impl<'a> NgPastQuadIterator<'a> { + fn find_next(&mut self) -> Result>, bool> { + if self.current.is_none() { + let i = self.iter.next(); + if i.is_none() { + return Ok(None); + } + self.current = Some((self.f)(&self.reader, i.unwrap())); + } + let next = self.current.as_mut().unwrap().next(); + if next.is_none() { + self.current = None; + return Err(false); + } + Ok(next) + } + + fn new( + storage_reader: &StorageReader, + past: Arc>, + f: impl Fn(&StorageReader, StrHash) -> DecodingNgQuadIterator + 'a, + ) -> NgPastQuadIterator<'a> { + NgPastQuadIterator { + current: None, + reader: StorageReader { + reader: storage_reader.reader.clone(), + storage: storage_reader.storage.clone(), + }, + f: Box::new(f), + //TODO: avoid the copy of past (store a Vec instead of HashSet in cache) + // https://smallcultfollowing.com/babysteps/blog/2018/09/02/rust-pattern-iterating-an-over-a-rc-vec-t/ + iter: past.as_ref().clone().into_iter(), + } + } +} + +struct NgCommitQuadIterator { + iter: Peekable>>>, + at_current_heads: bool, + original_graph_name: StrHash, + current: Option, + current_add_is_removed: Option>, + current_is_added: bool, + reader: StorageReader, + past: Arc>, + skip_check_is_in_past: bool, +} +impl NgCommitQuadIterator { + fn end_of_triple(&mut self) -> Option> { + let mut ret = None; + if let Some(cur) = self.current.as_ref() { + if !self.current_is_added && !self.at_current_heads { + if let Some(removed) = &self.current_add_is_removed { + let removed_in = HashSet::from_iter( + removed + .iter() + .map(|c| { + if let Ok(res) = self.reader.ng_get_removed( + &cur.subject, + &cur.predicate, + &cur.object, + c, + ) { + res.into_iter() + } else { + HashSet::new().into_iter() + } + }) + .flatten(), + ); + // if at least one of removed_in is not in past, then we add it. + if !removed_in.is_subset(&self.past) { + let cur = self.current.take().unwrap(); + ret = Some(Ok(EncodedQuad::new( + cur.subject, + cur.predicate, + cur.object, + EncodedTerm::NamedNode { + iri_id: self.original_graph_name, + }, + ))); + } + } + } + } + self.current_is_added = false; + self.current_add_is_removed = None; + self.current = None; + ret + } + + fn find_next(&mut self) -> Result>, bool> { + match self.iter.peek() { + None | Some(Err(_)) => match self.end_of_triple() { + Some(res) => Ok(Some(res)), + None => match self.iter.next() { + None => Ok(None), + Some(Err(e)) => Ok(Some(Err(e))), + _ => { + panic!(""); + } + }, + }, + Some(Ok((quad, value))) => { + if let EncodedTerm::NamedNode { iri_id } = quad.graph_name { + if self.skip_check_is_in_past || self.past.contains(&iri_id) { + let triple = EncodedTriple::new( + quad.subject.clone(), + quad.predicate.clone(), + quad.object.clone(), + ); + if let Some(cur) = self.current.as_ref() { + if &triple != cur { + match self.end_of_triple() { + Some(res) => return Ok(Some(res)), + None => { + self.current = Some(triple); + } + } + } + } else { + self.current = Some(triple); + } + let (q, value) = self.iter.next().unwrap().unwrap(); + if is_added(value) { + if self.current_is_added { + return Err(false); + } + self.current_is_added = true; + self.current_add_is_removed = None; + return Ok(Some(Ok(EncodedQuad::new( + q.subject, + q.predicate, + q.object, + EncodedTerm::NamedNode { + iri_id: self.original_graph_name, + }, + )))); + } else if is_removed(value) { + if !self.at_current_heads && !self.current_is_added { + if self.current_add_is_removed.is_none() { + self.current_add_is_removed = Some(HashSet::new()); + } + self.current_add_is_removed.as_mut().unwrap().insert(iri_id); + } + return Err(false); + } + } else { + self.iter.next(); + return Err(false); + } + } + Ok(None) + } + } + } +} + +impl Iterator for NgCommitQuadIterator { + type Item = Result; + + fn next(&mut self) -> Option { + loop { + let res = self.find_next(); + if let Ok(found) = res { + return found; + } + // if res.err().unwrap() { + // return None; + // } + } + } +} + +impl StorageReader { + pub(crate) fn parse_graph_name( + &self, + graph_name_string: &String, + iri_id: Option, + ) -> Result { + //log_info!("{}", graph_name_string); + let graph_name_string_len = graph_name_string.len(); + // TODO: deal with <:v> and <:v:n> + if graph_name_string_len < 100 { + return Err( + CorruptionError::msg("Invalid graph_name (too short) in parse_graph_name").into(), + ); + } + let (repo_part, other_part) = graph_name_string.split_at(100); + + let c = RE_REPO.captures(repo_part); + //log_info!("{:?}", c); + let (_repo, overlay) = if c.is_some() + && c.as_ref().unwrap().get(1).is_some() + && c.as_ref().unwrap().get(2).is_some() + { + let cap = c.unwrap(); + let o = cap.get(1).unwrap().as_str(); + let v = cap.get(2).unwrap().as_str(); + (o, v) + } else { + return Err(CorruptionError::msg( + "Invalid graph_name (does not start with did:ng:o:v) in parse_graph_name", + ) + .into()); + }; + + let ov_hash = StrHash::new(repo_part); + + let overlay_hash = StrHash::new(&format!("{DID_PREFIX}:v:{overlay}")); + + // we check that did:ng:o:v is present in dataset + self.get_str(&ov_hash)?.ok_or::( + CorruptionError::msg(format!("Graph {} not found in dataset", graph_name_string)) + .into(), + )?; + + Ok(if graph_name_string_len > 100 && iri_id.is_some() { + if graph_name_string_len < 104 { + return Err(CorruptionError::msg( + "Invalid graph_name (second part is too short) in parse_graph_name", + ) + .into()); + } + let original_graph_name = iri_id.unwrap(); + + match &other_part[0..3] { + ":b:" => { + RE_BRANCH.find(other_part).ok_or::( + CorruptionError::msg( + "Invalid graph_name (invalid branch part) in parse_graph_name", + ) + .into(), + )?; + let branch_hash = StrHash::new(&format!("{DID_PREFIX}{other_part}")); + let topic_hash = self.ng_get_branch_topic(&branch_hash, &overlay_hash)?; + let heads = self.ng_get_heads(&topic_hash, &overlay_hash)?; + MatchBy::Commits { + heads, + at_current_heads: true, + original_graph_name, + } + } + ":a:" => { + RE_NAMED_BRANCH_OR_COMMIT + .find(other_part) + .ok_or::( + CorruptionError::msg( + "Invalid graph_name (invalid named part) in parse_graph_name", + ) + .into(), + )?; + unimplemented!(); + } + ":c:" => { + let commits: Vec<&str> = RE_COMMITS + .find_iter(other_part) + .map(|m| m.as_str()) + .collect(); + if commits.is_empty() { + return Err(CorruptionError::msg( + "Invalid graph_name (invalid commit IDs) in parse_graph_name", + ) + .into()); + } + // TODO: check that all the commits are from the same branch + // TODO: if commits are exactly like current heads of branch, set at_current_heads = true (or if it is the main branch, use MatchBy::Repos) + MatchBy::Commits { + heads: HashSet::from_iter( + commits + .into_iter() + .map(|c| { let s = format!("{DID_PREFIX}{c}:v:{overlay}"); StrHash::new(&s) }), + ), + at_current_heads: false, + original_graph_name, + } + } + ":n:" => { + unimplemented!() + } + _ => { + return Err(CorruptionError::msg( + "Invalid graph_name (unknown second part) in parse_graph_name", + ) + .into()) + } + } + } else { + MatchBy::Repos(RepoIterator { + iter: None, + single: Some(ov_hash), + }) + }) + } + + pub fn ng_get_heads( + &self, + topic: &StrHash, + overlay: &StrHash, + ) -> Result, StorageError> { + let prefix = encode_two_hashes(topic, overlay); + let mut iter = self.reader.scan_prefix(&self.storage.heads_cf, &prefix)?; + let mut set: HashSet = HashSet::new(); + while let Some(key) = iter.key() { + let mut buffer = [0; 16]; + (&key[32..48]).read_exact(&mut buffer)?; + set.insert(StrHash::from_be_bytes(buffer)); + iter.next(); + } + if let Err(e) = iter.status() { + return Err(e); + } + Ok(set) + } + + pub fn ng_get_branch_topic( + &self, + branch: &StrHash, + overlay: &StrHash, + ) -> Result { + let mut key = Vec::with_capacity(33); + key.push(BRANCH_PREFIX); + key.extend_from_slice(&branch.to_be_bytes()); + key.extend_from_slice(&overlay.to_be_bytes()); + + let val = self + .reader + .get(&self.storage.branches_cf, &key)? + .ok_or(CorruptionError::msg("Branch not found"))?; + + if val[0] == 1 { + return Err(CorruptionError::msg("This is a store branch").into()); + } + + let mut buffer = [0; 16]; + (&val[1..17]).read_exact(&mut buffer)?; + + Ok(StrHash::from_be_bytes(buffer)) + } + + pub fn ng_get_removed( + &self, + subject: &EncodedTerm, + predicate: &EncodedTerm, + object: &EncodedTerm, + commit: &StrHash, + ) -> Result, StorageError> { + let mut prefix = Vec::with_capacity(3 * WRITTEN_TERM_MAX_SIZE + 16); + write_term(&mut prefix, subject); + write_term(&mut prefix, predicate); + write_term(&mut prefix, object); + prefix.extend_from_slice(&commit.to_be_bytes()); + + let mut iter = self.reader.scan_prefix(&self.storage.removed_cf, &prefix)?; + let mut set: HashSet = HashSet::new(); + let prefix_len = prefix.len(); + while let Some(key) = iter.key() { + let mut buffer = [0; 16]; + (&key[prefix_len..prefix_len + 16]).read_exact(&mut buffer)?; + set.insert(StrHash::from_be_bytes(buffer)); + iter.next(); + } + if let Err(e) = iter.status() { + return Err(e); + } + Ok(set) + } + + fn ng_get_past(&self, commit: &StrHash) -> Result, bool)>, StorageError> { + let mut res = Vec::with_capacity(1); + let mut iter = self + .reader + .scan_prefix(&self.storage.past_cf, &commit.to_be_bytes())?; + let mut skip = false; + while let Some(key) = iter.key() { + let mut buffer = [0; 16]; + (&key[16..32]).read_exact(&mut buffer)?; + res.push(StrHash::from_be_bytes(buffer)); + if !skip && iter.value().unwrap()[0] == COMMIT_SKIP_NO_GRAPH { + skip = true; + } + iter.next(); + } + if let Err(e) = iter.status() { + return Err(e); + } + if res.is_empty() { + Ok(None) + } else { + Ok(Some((res, skip))) + } + } + + fn aggregate_causal_past( + &self, + aggregate: &mut HashMap, + current: StrHash, + cache: &HashMap>>, + ) -> Result<(), StorageError> { + if aggregate.contains_key(¤t) { + return Ok(()); + } + + if let Some(found_in_cache) = cache.get(¤t) { + aggregate.extend(found_in_cache.iter().map(|c| (*c, false))); + } else { + if let Some((past, skip)) = self.ng_get_past(¤t)? { + aggregate.insert(current, skip); + + for next in past { + self.aggregate_causal_past(aggregate, next, cache)?; + } + } else { + // we add the last one (that doesnt have past) as it must be the first commit in branch that holds content + aggregate.insert(current, false); + } + } + Ok(()) + } + + pub fn past_for_heads( + &self, + heads: &HashSet, + ) -> Result>, StorageError> { + let mut res: HashSet = HashSet::new(); + let mut missing: Vec<&StrHash> = Vec::new(); + let mut ready: Vec> = Vec::new(); + { + let past_commits_cache = self.storage.db.past_commits_cache(); + let cache = past_commits_cache.read().unwrap(); + for head in heads { + if let Some(past) = cache.get(head) { + if heads.len() == 1 { + return Ok(Arc::clone(past)); + } + res.extend(past.iter()); + } else { + missing.push(head); + } + } + + for head in missing.iter() { + let mut aggregate: HashMap = HashMap::with_capacity(1); + + self.aggregate_causal_past(&mut aggregate, **head, &cache)?; + + ready.push(HashSet::from_iter( + aggregate + .into_iter() + .filter_map(|(c, skip)| (!skip).then_some(c)), + )); + } + } + + let past_commits_cache = self.storage.db.past_commits_cache(); + let mut cache = past_commits_cache.write().unwrap(); + + for (head, past) in missing.into_iter().zip(ready) { + let past = cache.entry(*head).or_insert(Arc::new(past)); + if heads.len() == 1 { + return Ok(Arc::clone(past)); + } + res.extend(past.iter()); + } + + Ok(Arc::new(res)) + } + + pub fn len(&self) -> Result { + Ok(self.reader.len(&self.storage.gspo_cf)? + self.reader.len(&self.storage.dspo_cf)?) + } + + pub fn is_empty(&self) -> Result { + Ok(self.reader.is_empty(&self.storage.gspo_cf)? + && self.reader.is_empty(&self.storage.dspo_cf)?) + } + + pub fn contains(&self, quad: &EncodedQuad) -> Result { + let mut buffer = Vec::with_capacity(4 * WRITTEN_TERM_MAX_SIZE); + if quad.graph_name.is_default_graph() { + write_spo_quad(&mut buffer, quad); + Ok(self.reader.contains_key(&self.storage.dspo_cf, &buffer)?) + } else { + write_gspo_quad(&mut buffer, quad); + Ok(self.reader.contains_key(&self.storage.gspo_cf, &buffer)?) + } + } + + pub(crate) fn quads_for_pattern( + &self, + subject: Option<&EncodedTerm>, + predicate: Option<&EncodedTerm>, + object: Option<&EncodedTerm>, + graph_name: Option, + ) -> Box>> { + match subject { + Some(subject) => match predicate { + Some(predicate) => match object { + Some(object) => match graph_name { + Some(graph_name) => self.quads_for_subject_predicate_object_graph( + subject, predicate, object, graph_name, + ), + None => self.quads_for_subject_predicate_object(subject, predicate, object), + }, + None => match graph_name { + Some(graph_name) => { + self.quads_for_subject_predicate_graph(subject, predicate, graph_name) + } + None => self.quads_for_subject_predicate(subject, predicate), + }, + }, + None => match object { + Some(object) => match graph_name { + Some(graph_name) => { + self.quads_for_subject_object_graph(subject, object, graph_name) + } + None => self.quads_for_subject_object(subject, object), + }, + None => match graph_name { + Some(graph_name) => self.quads_for_subject_graph(subject, graph_name), + None => self.quads_for_subject(subject), + }, + }, + }, + None => match predicate { + Some(predicate) => match object { + Some(object) => match graph_name { + Some(graph_name) => { + self.quads_for_predicate_object_graph(predicate, object, graph_name) + } + None => self.quads_for_predicate_object(predicate, object), + }, + None => match graph_name { + Some(graph_name) => self.quads_for_predicate_graph(predicate, graph_name), + None => self.quads_for_predicate(predicate), + }, + }, + None => match object { + Some(object) => match graph_name { + Some(graph_name) => self.quads_for_object_graph(object, graph_name), + None => self.quads_for_object(object), + }, + None => match graph_name { + Some(graph_name) => self.quads_for_graph(graph_name), + None => self.quads(), + }, + }, + }, + } + } + + pub fn quads(&self) -> Box>> { + self.ng_spog_quads(&[]).into_dataset_iter() + } + + fn quads_in_named_graph(&self) -> DecodingQuadIterator { + self.gspo_quads(&[]) + } + + fn quads_for_subject( + &self, + subject: &EncodedTerm, + ) -> Box>> { + self.ng_spog_quads(&encode_term(subject)) + .into_dataset_iter() + } + + fn quads_for_subject_predicate( + &self, + subject: &EncodedTerm, + predicate: &EncodedTerm, + ) -> Box>> { + self.ng_spog_quads(&encode_term_pair(subject, predicate)) + .into_dataset_iter() + } + + fn quads_for_subject_predicate_object( + &self, + subject: &EncodedTerm, + predicate: &EncodedTerm, + object: &EncodedTerm, + ) -> Box>> { + self.ng_spog_quads(&encode_term_triple(subject, predicate, object)) + .into_dataset_iter() + } + + fn quads_for_subject_object( + &self, + subject: &EncodedTerm, + object: &EncodedTerm, + ) -> Box>> { + self.ng_ospg_quads(&encode_term_pair(object, subject)) + .into_dataset_iter() + } + + fn quads_for_predicate( + &self, + predicate: &EncodedTerm, + ) -> Box>> { + self.ng_posg_quads(&encode_term(predicate)) + .into_dataset_iter() + } + + fn quads_for_predicate_object( + &self, + predicate: &EncodedTerm, + object: &EncodedTerm, + ) -> Box>> { + self.ng_posg_quads(&encode_term_pair(predicate, object)) + .into_dataset_iter() + } + + fn quads_for_object( + &self, + object: &EncodedTerm, + ) -> Box>> { + self.ng_ospg_quads(&encode_term(object)).into_dataset_iter() + } + + fn quads_for_graph( + &self, + graph_name: MatchBy, + ) -> Box>> { + match graph_name { + MatchBy::Repos(repo_iter) => NgRepoQuadIterator::new(self, repo_iter) + .into_iter(|reader, repo| reader.gspo_quads(&encode_graph(repo))), + MatchBy::Commits { + heads, + at_current_heads, + original_graph_name, + } => self.quads_for_ng_iter_graph_heads(heads, at_current_heads, original_graph_name), + } + } + + fn quads_for_subject_graph( + &self, + subject: &EncodedTerm, + graph_name: MatchBy, + ) -> Box>> { + match graph_name { + MatchBy::Repos(repo_iter) => { + let subject = subject.clone(); + NgRepoQuadIterator::new(self, repo_iter).into_iter(move |reader, repo| { + reader.gspo_quads(&encode_graph_term(repo, subject.clone())) + }) + } + + MatchBy::Commits { + heads, + at_current_heads, + original_graph_name, + } => self.quads_for_ng_iter_heads( + self.ng_spog_quads(&encode_term(subject)), + heads, + at_current_heads, + original_graph_name, + ), + } + } + + fn quads_for_subject_predicate_graph( + &self, + subject: &EncodedTerm, + predicate: &EncodedTerm, + graph_name: MatchBy, + ) -> Box>> { + match graph_name { + MatchBy::Repos(repo_iter) => { + let subject = subject.clone(); + let predicate = predicate.clone(); + NgRepoQuadIterator::new(self, repo_iter).into_iter(move |reader, repo| { + reader.gspo_quads(&encode_term_graph_pair( + repo, + subject.clone(), + predicate.clone(), + )) + }) + } + MatchBy::Commits { + heads, + at_current_heads, + original_graph_name, + } => self.quads_for_ng_iter_heads( + self.ng_spog_quads(&encode_term_pair(subject, predicate)), + heads, + at_current_heads, + original_graph_name, + ), + } + } + + fn repos_in_store(&self, store: &StrHash) -> RepoIterator { + RepoIterator { + single: None, + iter: Some( + self.reader + .scan_prefix(&self.storage.stores_cf, &store.to_be_bytes()) + .unwrap(), + ), // TODO: propagate error? + } + } + + fn single_repo(&self, repo: StrHash) -> RepoIterator { + RepoIterator { + single: Some(repo), + iter: None, + } + } + + fn quads_for_subject_predicate_object_graph( + &self, + subject: &EncodedTerm, + predicate: &EncodedTerm, + object: &EncodedTerm, + graph_name: MatchBy, + ) -> Box>> { + match graph_name { + MatchBy::Repos(repo_iter) => { + let subject = subject.clone(); + let predicate = predicate.clone(); + let object = object.clone(); + NgRepoQuadIterator::new(self, repo_iter).into_iter(move |reader, repo| { + reader.gspo_quads(&encode_term_graph_triple( + repo, + subject.clone(), + predicate.clone(), + object.clone(), + )) + }) + } + MatchBy::Commits { + heads, + at_current_heads, + original_graph_name, + } => self.quads_for_ng_iter_heads( + self.ng_spog_quads(&encode_term_triple(subject, predicate, object)), + heads, + at_current_heads, + original_graph_name, + ), + } + } + + pub fn quads_for_ng_iter_graph_heads( + &self, + heads: HashSet, + at_current_heads: bool, + original_graph_name: StrHash, + ) -> Box>> { + let past = self.past_for_heads(&heads); + if past.is_err() { + return past.unwrap_err().into(); + } + let past = past.unwrap(); + + let j = NgPastQuadIterator::new(self, Arc::clone(&past), |reader, c| { + reader.ng_gspo_quads(&encode_graph(c)) + }); + let i: Box>> = Box::new(j); + + Box::new(NgCommitQuadIterator { + iter: i.peekable(), + at_current_heads, + original_graph_name, + current: None, + current_is_added: false, + current_add_is_removed: None, + reader: StorageReader { + reader: self.reader.clone(), + storage: self.storage.clone(), + }, + past: Arc::clone(&past), + skip_check_is_in_past: true, + }) + } + + pub fn quads_for_ng_iter_heads( + &self, + iter: DecodingNgQuadIterator, + heads: HashSet, + at_current_heads: bool, + original_graph_name: StrHash, + ) -> Box>> { + let past = self.past_for_heads(&heads); + if past.is_err() { + return past.unwrap_err().into(); + } + let past = past.unwrap(); + let i: Box>> = Box::new(iter); + Box::new(NgCommitQuadIterator { + iter: i.peekable(), + at_current_heads, + original_graph_name, + current: None, + current_is_added: false, + current_add_is_removed: None, + reader: StorageReader { + reader: self.reader.clone(), + storage: self.storage.clone(), + }, + past, + skip_check_is_in_past: false, + }) + } + + pub fn quads_for_subject_predicate_object_heads( + &self, + subject: &EncodedTerm, + predicate: &EncodedTerm, + object: &EncodedTerm, + heads: &HashSet, + at_current_heads: bool, + ) -> Result, StorageError> { + let past = self.past_for_heads(heads)?; + + let iter = self.ng_spog_quads(&encode_term_triple(subject, predicate, object)); + + Ok(HashSet::from_iter(iter.filter_map(|q| match q { + Err(_) => None, + Ok((quad, value)) => { + if let EncodedTerm::NamedNode { iri_id } = quad.graph_name { + if past.contains(&iri_id) { + if is_added(value) { + return Some(iri_id); + } else if is_removed(value) && !at_current_heads { + let removed_in = self + .ng_get_removed(subject, predicate, object, &iri_id) + .ok()?; + if removed_in.is_disjoint(&past) { + return Some(iri_id); + } + } + } + } + None + } + }))) + } + + fn quads_for_subject_object_graph( + &self, + subject: &EncodedTerm, + object: &EncodedTerm, + graph_name: MatchBy, + ) -> Box>> { + match graph_name { + MatchBy::Repos(repo_iter) => { + let subject = subject.clone(); + let object = object.clone(); + NgRepoQuadIterator::new(self, repo_iter).into_iter(move |reader, repo| { + reader.gosp_quads(&encode_term_graph_pair( + repo, + object.clone(), + subject.clone(), + )) + }) + } + MatchBy::Commits { + heads, + at_current_heads, + original_graph_name, + } => self.quads_for_ng_iter_heads( + self.ng_ospg_quads(&encode_term_pair(object, subject)), + heads, + at_current_heads, + original_graph_name, + ), + } + } + + fn quads_for_predicate_graph( + &self, + predicate: &EncodedTerm, + graph_name: MatchBy, + ) -> Box>> { + match graph_name { + MatchBy::Repos(repo_iter) => { + let predicate = predicate.clone(); + + NgRepoQuadIterator::new(self, repo_iter).into_iter(move |reader, repo| { + reader.gpos_quads(&encode_graph_term(repo, predicate.clone())) + }) + } + MatchBy::Commits { + heads, + at_current_heads, + original_graph_name, + } => self.quads_for_ng_iter_heads( + self.ng_posg_quads(&encode_term(predicate)), + heads, + at_current_heads, + original_graph_name, + ), + } + } + + fn quads_for_predicate_object_graph( + &self, + predicate: &EncodedTerm, + object: &EncodedTerm, + graph_name: MatchBy, + ) -> Box>> { + match graph_name { + MatchBy::Repos(repo_iter) => { + let predicate = predicate.clone(); + let object = object.clone(); + NgRepoQuadIterator::new(self, repo_iter).into_iter(move |reader, repo| { + reader.gpos_quads(&encode_term_graph_pair( + repo, + predicate.clone(), + object.clone(), + )) + }) + } + + MatchBy::Commits { + heads, + at_current_heads, + original_graph_name, + } => self.quads_for_ng_iter_heads( + self.ng_posg_quads(&encode_term_pair(predicate, object)), + heads, + at_current_heads, + original_graph_name, + ), + } + } + + fn quads_for_object_graph( + &self, + object: &EncodedTerm, + graph_name: MatchBy, + ) -> Box>> { + match graph_name { + MatchBy::Repos(repo_iter) => { + let object = object.clone(); + NgRepoQuadIterator::new(self, repo_iter).into_iter(move |reader, repo| { + reader.gosp_quads(&encode_graph_term(repo, object.clone())) + }) + } + MatchBy::Commits { + heads, + at_current_heads, + original_graph_name, + } => self.quads_for_ng_iter_heads( + self.ng_ospg_quads(&encode_term(object)), + heads, + at_current_heads, + original_graph_name, + ), + } + } + + // pub fn named_graphs(&self) -> DecodingGraphIterator { + // DecodingGraphIterator { + // iter: self.reader.iter(&self.storage.graphs_cf).unwrap(), // TODO: propagate error? + // } + // } + + // pub fn contains_named_graph(&self, graph_name: &EncodedTerm) -> Result { + // self.reader + // .contains_key(&self.storage.graphs_cf, &encode_term(graph_name)) + // } + + fn ng_spog_quads(&self, prefix: &[u8]) -> DecodingNgQuadIterator { + self.ng_inner_quads(&self.storage.spog_cf, prefix, QuadEncoding::Spog) + } + + fn ng_posg_quads(&self, prefix: &[u8]) -> DecodingNgQuadIterator { + self.ng_inner_quads(&self.storage.posg_cf, prefix, QuadEncoding::Posg) + } + + fn ng_ospg_quads(&self, prefix: &[u8]) -> DecodingNgQuadIterator { + self.ng_inner_quads(&self.storage.ospg_cf, prefix, QuadEncoding::Ospg) + } + + fn ng_gspo_quads(&self, prefix: &[u8]) -> DecodingNgQuadIterator { + self.ng_inner_quads(&self.storage.gspo_cf, prefix, QuadEncoding::Gspo) + } + + fn ng_gpos_quads(&self, prefix: &[u8]) -> DecodingNgQuadIterator { + self.ng_inner_quads(&self.storage.gpos_cf, prefix, QuadEncoding::Gpos) + } + + fn ng_gosp_quads(&self, prefix: &[u8]) -> DecodingNgQuadIterator { + self.ng_inner_quads(&self.storage.gosp_cf, prefix, QuadEncoding::Gosp) + } + + fn ng_inner_quads( + &self, + column_family: &ColumnFamily, + prefix: &[u8], + encoding: QuadEncoding, + ) -> DecodingNgQuadIterator { + DecodingNgQuadIterator { + iter: self.reader.scan_prefix(column_family, prefix).unwrap(), // TODO: propagate error? + encoding, + } + } + + fn spog_quads(&self, prefix: &[u8]) -> DecodingQuadIterator { + self.inner_quads(&self.storage.spog_cf, prefix, QuadEncoding::Spog) + } + + fn posg_quads(&self, prefix: &[u8]) -> DecodingQuadIterator { + self.inner_quads(&self.storage.posg_cf, prefix, QuadEncoding::Posg) + } + + fn ospg_quads(&self, prefix: &[u8]) -> DecodingQuadIterator { + self.inner_quads(&self.storage.ospg_cf, prefix, QuadEncoding::Ospg) + } + + fn gspo_quads(&self, prefix: &[u8]) -> DecodingQuadIterator { + self.inner_quads(&self.storage.gspo_cf, prefix, QuadEncoding::Gspo) + } + + fn gpos_quads(&self, prefix: &[u8]) -> DecodingQuadIterator { + self.inner_quads(&self.storage.gpos_cf, prefix, QuadEncoding::Gpos) + } + + fn gosp_quads(&self, prefix: &[u8]) -> DecodingQuadIterator { + self.inner_quads(&self.storage.gosp_cf, prefix, QuadEncoding::Gosp) + } + + fn dspo_quads(&self, prefix: &[u8]) -> DecodingQuadIterator { + self.inner_quads(&self.storage.dspo_cf, prefix, QuadEncoding::Dspo) + } + + fn dpos_quads(&self, prefix: &[u8]) -> DecodingQuadIterator { + self.inner_quads(&self.storage.dpos_cf, prefix, QuadEncoding::Dpos) + } + + fn dosp_quads(&self, prefix: &[u8]) -> DecodingQuadIterator { + self.inner_quads(&self.storage.dosp_cf, prefix, QuadEncoding::Dosp) + } + + fn inner_quads( + &self, + column_family: &ColumnFamily, + prefix: &[u8], + encoding: QuadEncoding, + ) -> DecodingQuadIterator { + DecodingQuadIterator { + iter: self.reader.scan_prefix(column_family, prefix).unwrap(), // TODO: propagate error? + encoding, + } + } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + pub fn get_str(&self, key: &StrHash) -> Result, StorageError> { + Ok(self + .storage + .db + .get(&self.storage.id2str_cf, &key.to_be_bytes())? + .map(|v| String::from_utf8(v.into())) + .transpose() + .map_err(CorruptionError::new)?) + } + + #[cfg(any(target_family = "wasm", docsrs))] + pub fn get_str(&self, key: &StrHash) -> Result, StorageError> { + Ok(self + .reader + .get(&self.storage.id2str_cf, &key.to_be_bytes())? + .map(String::from_utf8) + .transpose() + .map_err(CorruptionError::new)?) + } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + pub fn contains_str(&self, key: &StrHash) -> Result { + self.storage + .db + .contains_key(&self.storage.id2str_cf, &key.to_be_bytes()) + } + + #[cfg(any(target_family = "wasm", docsrs))] + pub fn contains_str(&self, key: &StrHash) -> Result { + self.reader + .contains_key(&self.storage.id2str_cf, &key.to_be_bytes()) + } + + /// Validates that all the storage invariants held in the data + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + pub fn validate(&self) -> Result<(), StorageError> { + // triples + let dspo_size = self.dspo_quads(&[]).count(); + if dspo_size != self.dpos_quads(&[]).count() || dspo_size != self.dosp_quads(&[]).count() { + return Err(CorruptionError::new( + "Not the same number of triples in dspo, dpos and dosp", + ) + .into()); + } + for spo in self.dspo_quads(&[]) { + let spo = spo?; + self.decode_quad(&spo)?; // We ensure that the quad is readable + if !self.storage.db.contains_key( + &self.storage.dpos_cf, + &encode_term_triple(&spo.predicate, &spo.object, &spo.subject), + )? { + return Err(CorruptionError::new("Quad in dspo and not in dpos").into()); + } + if !self.storage.db.contains_key( + &self.storage.dosp_cf, + &encode_term_triple(&spo.object, &spo.subject, &spo.predicate), + )? { + return Err(CorruptionError::new("Quad in dspo and not in dpos").into()); + } + } + + // quads + let gspo_size = self.gspo_quads(&[]).count(); + if gspo_size != self.gpos_quads(&[]).count() + || gspo_size != self.gosp_quads(&[]).count() + || gspo_size != self.spog_quads(&[]).count() + || gspo_size != self.posg_quads(&[]).count() + || gspo_size != self.ospg_quads(&[]).count() + { + return Err(CorruptionError::new( + "Not the same number of quads in gspo, gpos, gosp, spog, posg, and ospg", + ) + .into()); + } + for gspo in self.gspo_quads(&[]) { + let gspo = gspo?; + self.decode_quad(&gspo)?; // We ensure that the quad is readable + if !self.storage.db.contains_key( + &self.storage.gpos_cf, + &encode_term_quad( + &gspo.graph_name, + &gspo.predicate, + &gspo.object, + &gspo.subject, + ), + )? { + return Err(CorruptionError::new("Quad in gspo and not in gpos").into()); + } + if !self.storage.db.contains_key( + &self.storage.gosp_cf, + &encode_term_quad( + &gspo.graph_name, + &gspo.object, + &gspo.subject, + &gspo.predicate, + ), + )? { + return Err(CorruptionError::new("Quad in gspo and not in gosp").into()); + } + if !self.storage.db.contains_key( + &self.storage.spog_cf, + &encode_term_quad( + &gspo.subject, + &gspo.predicate, + &gspo.object, + &gspo.graph_name, + ), + )? { + return Err(CorruptionError::new("Quad in gspo and not in spog").into()); + } + if !self.storage.db.contains_key( + &self.storage.posg_cf, + &encode_term_quad( + &gspo.predicate, + &gspo.object, + &gspo.subject, + &gspo.graph_name, + ), + )? { + return Err(CorruptionError::new("Quad in gspo and not in posg").into()); + } + if !self.storage.db.contains_key( + &self.storage.ospg_cf, + &encode_term_quad( + &gspo.object, + &gspo.subject, + &gspo.predicate, + &gspo.graph_name, + ), + )? { + return Err(CorruptionError::new("Quad in gspo and not in ospg").into()); + } + // if !self + // .storage + // .db + // .contains_key(&self.storage.graphs_cf, &encode_term(&gspo.graph_name))? + // { + // return Err( + // CorruptionError::new("Quad graph name in gspo and not in graphs").into(), + // ); + // } + } + Ok(()) + } + + /// Validates that all the storage invariants held in the data + #[cfg(any(target_family = "wasm", docsrs))] + #[allow(clippy::unused_self, clippy::unnecessary_wraps)] + pub fn validate(&self) -> Result<(), StorageError> { + Ok(()) // TODO + } +} + +// pub struct ChainedDecodingQuadIterator { +// first: DecodingQuadIterator, +// second: Option, +// } + +// impl ChainedDecodingQuadIterator { +// fn new(first: DecodingQuadIterator) -> Self { +// Self { +// first, +// second: None, +// } +// } + +// fn pair(first: DecodingQuadIterator, second: DecodingQuadIterator) -> Self { +// Self { +// first, +// second: Some(second), +// } +// } +// } + +// impl Iterator for ChainedDecodingQuadIterator { +// type Item = Result; + +// fn next(&mut self) -> Option { +// if let Some(result) = self.first.next() { +// Some(result) +// } else if let Some(second) = self.second.as_mut() { +// second.next() +// } else { +// None +// } +// } +// } + +pub struct DecodingQuadIterator { + iter: Iter, + encoding: QuadEncoding, +} + +impl Iterator for DecodingQuadIterator { + type Item = Result; + + fn next(&mut self) -> Option { + if let Err(e) = self.iter.status() { + return Some(Err(e)); + } + let term = self.encoding.decode(self.iter.key()?); + self.iter.next(); + Some(term) + } +} + +pub struct DecodingNgQuadIterator { + iter: Iter, + encoding: QuadEncoding, +} + +impl Iterator for DecodingNgQuadIterator { + type Item = Result<(EncodedQuad, u8), StorageError>; + + fn next(&mut self) -> Option { + if let Err(e) = self.iter.status() { + return Some(Err(e)); + } + let term = self.encoding.decode(self.iter.key()?); + match term { + Ok(term) => { + let val = self.iter.value()?[0]; + self.iter.next(); + Some(Ok((term, val))) + } + Err(e) => { + self.iter.next(); + Some(Err(e)) + } + } + } +} + +impl DecodingNgQuadIterator { + fn into_dataset_iter(self) -> Box>> { + Box::new(self.filter_map(|q| match q { + Err(e) => Some(Err(e)), + Ok((q, val)) => { + if val == REPO_IN_MAIN { + Some(Ok(q)) + } else { + None + } + } + })) + } +} + +pub struct DecodingGraphIterator { + iter: Iter, +} + +impl Iterator for DecodingGraphIterator { + type Item = Result; + + fn next(&mut self) -> Option { + if let Err(e) = self.iter.status() { + return Some(Err(e)); + } + let term: Result = decode_term(self.iter.key()?); + self.iter.next(); + Some(term) + } +} + +impl StrLookup for StorageReader { + fn get_str(&self, key: &StrHash) -> Result, StorageError> { + self.get_str(key) + } +} + +pub struct CommitWriter<'a> { + inserts: HashSet, + removes: HashSet, + transaction: Transaction<'a>, + storage: &'a Storage, +} + +impl<'a> CommitWriter<'a> { + pub fn reader(&self) -> StorageReader { + StorageReader { + reader: self.transaction.reader(), + storage: self.storage.clone(), + } + } + + pub fn get_update(self) -> (HashSet, HashSet) { + (self.inserts, self.removes) + } + + pub fn insert(&mut self, quad: QuadRef<'_>) -> Result { + if quad.graph_name.is_default_graph() { + Err(StorageError::Other( + "NextGraph cannot insert triples in default graph".into(), + )) + } else { + let quad = quad.into_owned(); + Ok(self.inserts.insert(quad)) + } + } + + pub fn insert_named_graph( + &mut self, + _graph_name: NamedOrBlankNodeRef<'_>, + ) -> Result { + Err(StorageError::Other( + "NextGraph cannot insert named graph".into(), + )) + } + + pub fn remove(&mut self, quad: QuadRef<'_>) -> Result { + if quad.graph_name.is_default_graph() { + Err(StorageError::Other( + "NextGraph cannot remove triples in default graph".into(), + )) + } else { + let quad = quad.into_owned(); + Ok(self.removes.insert(quad)) + } + } + + pub fn clear_graph(&mut self, graph_name: GraphNameRef<'_>) -> Result<(), StorageError> { + if graph_name.is_default_graph() { + Err(StorageError::Other( + "NextGraph cannot clear the default graph".into(), + )) + } else { + unimplemented!(); + } + } + + pub fn clear_all_named_graphs(&mut self) -> Result<(), StorageError> { + unimplemented!(); + } + + pub fn clear_all_graphs(&mut self) -> Result<(), StorageError> { + unimplemented!(); + } + + pub fn remove_named_graph( + &mut self, + _graph_name: NamedOrBlankNodeRef<'_>, + ) -> Result { + unimplemented!(); + } + + pub fn remove_all_named_graphs(&mut self) -> Result<(), StorageError> { + unimplemented!(); + } + + pub fn clear(&mut self) -> Result<(), StorageError> { + unimplemented!(); + } +} + +pub struct StorageWriter<'a> { + buffer: Vec, + transaction: Transaction<'a>, + storage: &'a Storage, +} + +pub const ADDED_IN_MAIN: u8 = 3; +pub const ADDED_IN_OTHER: u8 = 2; +pub const REMOVED_IN_MAIN: u8 = 1; +pub const REMOVED_IN_OTHER: u8 = 0; +pub const REPO_IN_MAIN: u8 = 4; +pub const COMMIT_SKIP_NO_GRAPH: u8 = 1; +pub const COMMIT_HAS_GRAPH: u8 = 0; + +const MASK_ADDED: u8 = 2; +const MASK_REMOVED: u8 = 6; + +pub const BRANCH_PREFIX: u8 = 0; +const TOKEN_PREFIX: u8 = 1; + +pub const COMMIT_PREFIX: u8 = 1; + +#[inline] +fn is_added(val: u8) -> bool { + (val & MASK_ADDED) == MASK_ADDED +} + +#[inline] +fn is_removed(val: u8) -> bool { + (val & MASK_REMOVED) == 0 +} + +#[inline] +fn is_added_in_main(val: u8) -> bool { + val == ADDED_IN_MAIN +} + +impl<'a> StorageWriter<'a> { + pub fn reader(&self) -> StorageReader { + StorageReader { + reader: self.transaction.reader(), + storage: self.storage.clone(), + } + } + + pub fn named_commit_or_branch( + &mut self, + ov_graph_name: NamedNodeRef<'_>, + name: &String, + // if None: remove + value: &Option>, + ) -> Result<(), StorageError> { + let encoded: EncodedTerm = ov_graph_name.into(); + if value.is_some() { + self.insert_term(ov_graph_name.into(), &encoded)?; + } + if let EncodedTerm::NamedNode { iri_id } = encoded { + let mut key = Vec::with_capacity(16 + name.len()); + key.extend_from_slice(&iri_id.to_be_bytes()); + key.extend_from_slice(name.as_bytes()); + if value.is_none() { + self.transaction.remove(&self.storage.names_cf, &key)?; + } else { + self.transaction + .insert(&self.storage.names_cf, &key, value.as_ref().unwrap())?; + } + } else { + panic!("should be an EncodedTerm::NamedNode"); + } + Ok(()) + } + + pub fn doc_in_store( + &mut self, + graph_name: NamedNodeRef<'_>, + overlay: &StrHash, + remove: bool, + ) -> Result<(), StorageError> { + let encoded: EncodedTerm = graph_name.into(); + if !remove { + self.insert_term(graph_name.into(), &encoded)?; + } else { + // TODO: remove term? self.insert_term(graph_name.into(), &encoded)?; + } + if let EncodedTerm::NamedNode { iri_id } = encoded { + let mut key = Vec::with_capacity(32); + key.extend_from_slice(&overlay.to_be_bytes()); + key.extend_from_slice(&iri_id.to_be_bytes()); + if remove { + self.transaction.remove(&self.storage.stores_cf, &key)?; + } else { + self.transaction + .insert_empty(&self.storage.stores_cf, &key)?; + } + } else { + panic!("should be an EncodedTerm::NamedNode"); + } + Ok(()) + } + + pub fn update_branch_and_token( + &mut self, + overlay_encoded: &StrHash, + branch_encoded: &StrHash, + topic_encoded: &StrHash, + token_encoded: &StrHash, + ) -> Result<(), StorageError> { + let mut key = Vec::with_capacity(33); + key.push(BRANCH_PREFIX); + key.extend_from_slice(&branch_encoded.to_be_bytes()); + key.extend_from_slice(&overlay_encoded.to_be_bytes()); + + let mut topic = Vec::with_capacity(17); + topic.push(0); // 1 would mean there is no topic hash andthis is the "store" branch of a store. + topic.extend_from_slice(&topic_encoded.to_be_bytes()); + + let reader = self.transaction.reader(); + if match reader.get(&self.storage.branches_cf, &key)? { + Some(val) => val.to_vec() != topic, + None => true, + } { + self.transaction + .insert(&self.storage.branches_cf, &key, &topic)?; + } + + key.clear(); + key.push(TOKEN_PREFIX); + key.extend_from_slice(&token_encoded.to_be_bytes()); + key.extend_from_slice(&overlay_encoded.to_be_bytes()); + + let mut token = Vec::with_capacity(32); + token.extend_from_slice(&topic_encoded.to_be_bytes()); + token.extend_from_slice(&branch_encoded.to_be_bytes()); + + if match reader.get(&self.storage.branches_cf, &key)? { + Some(val) => val.to_vec() != token, + None => true, + } { + self.transaction + .insert(&self.storage.branches_cf, &key, &token)?; + } + Ok(()) + } + + pub fn ng_update_heads( + &mut self, + topic: &StrHash, + overlay: &StrHash, + commit: &StrHash, + direct_causal_past: &HashSet, + ) -> Result<(), StorageError> { + let mut buffer = Vec::with_capacity(48); + buffer.extend_from_slice(&topic.to_be_bytes()); + buffer.extend_from_slice(&overlay.to_be_bytes()); + + for removing in direct_causal_past { + buffer.truncate(32); + buffer.extend_from_slice(&removing.to_be_bytes()); + self.transaction.remove(&self.storage.heads_cf, &buffer)? + } + + buffer.truncate(32); + buffer.extend_from_slice(&commit.to_be_bytes()); + self.transaction + .insert_empty(&self.storage.heads_cf, &buffer)?; + + Ok(()) + } + + pub fn ng_update_past( + &mut self, + commit: &StrHash, + direct_causal_past: &HashSet, + skip_has_no_graph: bool, + ) -> Result<(), StorageError> { + let mut buffer = Vec::with_capacity(32); + buffer.extend_from_slice(&commit.to_be_bytes()); + + let value = [if skip_has_no_graph { + COMMIT_SKIP_NO_GRAPH + } else { + COMMIT_HAS_GRAPH + }]; + + for adding in direct_causal_past { + buffer.truncate(16); + buffer.extend_from_slice(&adding.to_be_bytes()); + self.transaction + .insert(&self.storage.past_cf, &buffer, &value)? + } + + Ok(()) + } + + pub fn ng_remove(&mut self, quad: &EncodedQuad, commit: &StrHash) -> Result<(), StorageError> { + let mut key = Vec::with_capacity(3 * WRITTEN_TERM_MAX_SIZE + 2 * 16); + write_term(&mut key, &quad.subject); + write_term(&mut key, &quad.predicate); + write_term(&mut key, &quad.object); + if let EncodedTerm::NamedNode { iri_id } = quad.graph_name { + key.extend_from_slice(&iri_id.to_be_bytes()); + key.extend_from_slice(&commit.to_be_bytes()); + self.transaction + .insert_empty(&self.storage.removed_cf, &key) + } else { + Err(CorruptionError::msg("invalid quad").into()) + } + } + + pub fn ng_insert( + &mut self, + quad: QuadRef<'_>, + value: u8, + cv: bool, + ) -> Result<(), StorageError> { + let encoded = quad.into(); + if self.ng_insert_encoded(&encoded, value, cv)? { + self.insert_term(quad.subject.into(), &encoded.subject)?; + self.insert_term(quad.predicate.into(), &encoded.predicate)?; + self.insert_term(quad.object, &encoded.object)?; + + // self.buffer.clear(); + // write_term(&mut self.buffer, &encoded.graph_name); + // if !self + // .transaction + // .contains_key_for_update(&self.storage.graphs_cf, &self.buffer)? + // { + // self.transaction + // .insert_empty(&self.storage.graphs_cf, &self.buffer)?; + self.insert_graph_name(quad.graph_name, &encoded.graph_name)?; + //} + } + Ok(()) + } + + pub fn ng_insert_encoded( + &mut self, + encoded: &EncodedQuad, + value: u8, + cv: bool, + ) -> Result { + let value = [value]; + self.buffer.clear(); + write_spog_quad(&mut self.buffer, encoded); + let result = + // if self + // .transaction + // .contains_key_for_update(&self.storage.spog_cf, &self.buffer)? + // { + // false + // } else + { + self.transaction + .insert(&self.storage.spog_cf, &self.buffer, &value)?; + + self.buffer.clear(); + write_posg_quad(&mut self.buffer, encoded); + self.transaction + .insert(&self.storage.posg_cf, &self.buffer, &value)?; + + self.buffer.clear(); + write_ospg_quad(&mut self.buffer, encoded); + self.transaction + .insert(&self.storage.ospg_cf, &self.buffer, &value)?; + + self.buffer.clear(); + write_gspo_quad(&mut self.buffer, encoded); + self.transaction + .insert(&self.storage.gspo_cf, &self.buffer, &value)?; + + if !cv { + self.buffer.clear(); + write_gpos_quad(&mut self.buffer, encoded); + self.transaction + .insert(&self.storage.gpos_cf, &self.buffer, &value)?; + + self.buffer.clear(); + write_gosp_quad(&mut self.buffer, encoded); + self.transaction + .insert(&self.storage.gosp_cf, &self.buffer, &value)?; + } + true + }; + Ok(result) + } + + // pub fn insert(&mut self, quad: QuadRef<'_>) -> Result { + // let encoded = quad.into(); + // self.buffer.clear(); + // let result = if quad.graph_name.is_default_graph() { + // write_spo_quad(&mut self.buffer, &encoded); + // if self + // .transaction + // .contains_key_for_update(&self.storage.dspo_cf, &self.buffer)? + // { + // false + // } else { + // self.transaction + // .insert_empty(&self.storage.dspo_cf, &self.buffer)?; + + // self.buffer.clear(); + // write_pos_quad(&mut self.buffer, &encoded); + // self.transaction + // .insert_empty(&self.storage.dpos_cf, &self.buffer)?; + + // self.buffer.clear(); + // write_osp_quad(&mut self.buffer, &encoded); + // self.transaction + // .insert_empty(&self.storage.dosp_cf, &self.buffer)?; + + // self.insert_term(quad.subject.into(), &encoded.subject)?; + // self.insert_term(quad.predicate.into(), &encoded.predicate)?; + // self.insert_term(quad.object, &encoded.object)?; + // true + // } + // } else { + // write_spog_quad(&mut self.buffer, &encoded); + // if self + // .transaction + // .contains_key_for_update(&self.storage.spog_cf, &self.buffer)? + // { + // false + // } else { + // self.transaction + // .insert_empty(&self.storage.spog_cf, &self.buffer)?; + + // self.buffer.clear(); + // write_posg_quad(&mut self.buffer, &encoded); + // self.transaction + // .insert_empty(&self.storage.posg_cf, &self.buffer)?; + + // self.buffer.clear(); + // write_ospg_quad(&mut self.buffer, &encoded); + // self.transaction + // .insert_empty(&self.storage.ospg_cf, &self.buffer)?; + + // self.buffer.clear(); + // write_gspo_quad(&mut self.buffer, &encoded); + // self.transaction + // .insert_empty(&self.storage.gspo_cf, &self.buffer)?; + + // self.buffer.clear(); + // write_gpos_quad(&mut self.buffer, &encoded); + // self.transaction + // .insert_empty(&self.storage.gpos_cf, &self.buffer)?; + + // self.buffer.clear(); + // write_gosp_quad(&mut self.buffer, &encoded); + // self.transaction + // .insert_empty(&self.storage.gosp_cf, &self.buffer)?; + + // self.insert_term(quad.subject.into(), &encoded.subject)?; + // self.insert_term(quad.predicate.into(), &encoded.predicate)?; + // self.insert_term(quad.object, &encoded.object)?; + + // self.buffer.clear(); + // write_term(&mut self.buffer, &encoded.graph_name); + // if !self + // .transaction + // .contains_key_for_update(&self.storage.graphs_cf, &self.buffer)? + // { + // self.transaction + // .insert_empty(&self.storage.graphs_cf, &self.buffer)?; + // self.insert_graph_name(quad.graph_name, &encoded.graph_name)?; + // } + // true + // } + // }; + // Ok(result) + // } + + pub fn insert_named_graph( + &mut self, + graph_name: NamedOrBlankNodeRef<'_>, + ) -> Result { + unimplemented!(); + // let encoded_graph_name = graph_name.into(); + + // self.buffer.clear(); + // write_term(&mut self.buffer, &encoded_graph_name); + // let result = if self + // .transaction + // .contains_key_for_update(&self.storage.graphs_cf, &self.buffer)? + // { + // false + // } else { + // self.transaction + // .insert_empty(&self.storage.graphs_cf, &self.buffer)?; + // self.insert_term(graph_name.into(), &encoded_graph_name)?; + // true + // }; + // Ok(result) + } + + fn insert_term( + &mut self, + term: TermRef<'_>, + encoded: &EncodedTerm, + ) -> Result<(), StorageError> { + insert_term(term, encoded, &mut |key, value| self.insert_str(key, value)) + } + + fn insert_graph_name( + &mut self, + graph_name: GraphNameRef<'_>, + encoded: &EncodedTerm, + ) -> Result<(), StorageError> { + match graph_name { + GraphNameRef::NamedNode(graph_name) => self.insert_term(graph_name.into(), encoded), + GraphNameRef::BlankNode(graph_name) => self.insert_term(graph_name.into(), encoded), + GraphNameRef::DefaultGraph => Ok(()), + } + } + + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + fn insert_str(&mut self, key: &StrHash, value: &str) -> Result<(), StorageError> { + if self + .storage + .db + .contains_key(&self.storage.id2str_cf, &key.to_be_bytes())? + { + return Ok(()); + } + self.storage.db.insert( + &self.storage.id2str_cf, + &key.to_be_bytes(), + value.as_bytes(), + ) + } + + #[cfg(any(target_family = "wasm", docsrs))] + fn insert_str(&mut self, key: &StrHash, value: &str) -> Result<(), StorageError> { + self.transaction.insert( + &self.storage.id2str_cf, + &key.to_be_bytes(), + value.as_bytes(), + ) + } + + pub fn remove(&mut self, quad: QuadRef<'_>) -> Result { + self.remove_encoded(&quad.into()) + } + + fn remove_encoded(&mut self, quad: &EncodedQuad) -> Result { + self.buffer.clear(); + let result = if quad.graph_name.is_default_graph() { + write_spo_quad(&mut self.buffer, quad); + + if self + .transaction + .contains_key_for_update(&self.storage.dspo_cf, &self.buffer)? + { + self.transaction + .remove(&self.storage.dspo_cf, &self.buffer)?; + + self.buffer.clear(); + write_pos_quad(&mut self.buffer, quad); + self.transaction + .remove(&self.storage.dpos_cf, &self.buffer)?; + + self.buffer.clear(); + write_osp_quad(&mut self.buffer, quad); + self.transaction + .remove(&self.storage.dosp_cf, &self.buffer)?; + true + } else { + false + } + } else { + write_spog_quad(&mut self.buffer, quad); + + if self + .transaction + .contains_key_for_update(&self.storage.spog_cf, &self.buffer)? + { + self.transaction + .remove(&self.storage.spog_cf, &self.buffer)?; + + self.buffer.clear(); + write_posg_quad(&mut self.buffer, quad); + self.transaction + .remove(&self.storage.posg_cf, &self.buffer)?; + + self.buffer.clear(); + write_ospg_quad(&mut self.buffer, quad); + self.transaction + .remove(&self.storage.ospg_cf, &self.buffer)?; + + self.buffer.clear(); + write_gspo_quad(&mut self.buffer, quad); + self.transaction + .remove(&self.storage.gspo_cf, &self.buffer)?; + + self.buffer.clear(); + write_gpos_quad(&mut self.buffer, quad); + self.transaction + .remove(&self.storage.gpos_cf, &self.buffer)?; + + self.buffer.clear(); + write_gosp_quad(&mut self.buffer, quad); + self.transaction + .remove(&self.storage.gosp_cf, &self.buffer)?; + true + } else { + false + } + }; + Ok(result) + } + + pub fn clear_graph(&mut self, graph_name: GraphNameRef<'_>) -> Result<(), StorageError> { + unimplemented!(); + // if graph_name.is_default_graph() { + // for quad in self.reader().quads_for_graph(&EncodedTerm::DefaultGraph) { + // self.remove_encoded(&quad?)?; + // } + // } else { + // self.buffer.clear(); + // write_term(&mut self.buffer, &graph_name.into()); + // if self + // .transaction + // .contains_key_for_update(&self.storage.graphs_cf, &self.buffer)? + // { + // // The condition is useful to lock the graph itself and ensure no quad is inserted at the same time + // for quad in self.reader().quads_for_graph(&graph_name.into()) { + // self.remove_encoded(&quad?)?; + // } + // } + // } + // Ok(()) + } + + pub fn clear_all_named_graphs(&mut self) -> Result<(), StorageError> { + for quad in self.reader().quads_in_named_graph() { + self.remove_encoded(&quad?)?; + } + Ok(()) + } + + pub fn clear_all_graphs(&mut self) -> Result<(), StorageError> { + for quad in self.reader().quads() { + self.remove_encoded(&quad?)?; + } + Ok(()) + } + + pub fn remove_named_graph( + &mut self, + graph_name: NamedOrBlankNodeRef<'_>, + ) -> Result { + self.remove_encoded_named_graph(&graph_name.into()) + } + + fn remove_encoded_named_graph( + &mut self, + graph_name: &EncodedTerm, + ) -> Result { + unimplemented!(); + // self.buffer.clear(); + // write_term(&mut self.buffer, graph_name); + // let result = if self + // .transaction + // .contains_key_for_update(&self.storage.graphs_cf, &self.buffer)? + // { + // // The condition is done ASAP to lock the graph itself + // for quad in self.reader().quads_for_graph(graph_name) { + // self.remove_encoded(&quad?)?; + // } + // self.buffer.clear(); + // write_term(&mut self.buffer, graph_name); + // self.transaction + // .remove(&self.storage.graphs_cf, &self.buffer)?; + // true + // } else { + // false + // }; + // Ok(result) + } + + pub fn remove_all_named_graphs(&mut self) -> Result<(), StorageError> { + unimplemented!(); + // for graph_name in self.reader().named_graphs() { + // self.remove_encoded_named_graph(&graph_name?)?; + // } + // Ok(()) + } + + pub fn clear(&mut self) -> Result<(), StorageError> { + // for graph_name in self.reader().named_graphs() { + // self.remove_encoded_named_graph(&graph_name?)?; + // } + for quad in self.reader().quads() { + self.remove_encoded(&quad?)?; + } + Ok(()) + } +} + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +#[must_use] +pub struct StorageBulkLoader { + storage: Storage, + hooks: Vec>, + num_threads: Option, + max_memory_size: Option, +} + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +impl StorageBulkLoader { + pub fn new(storage: Storage) -> Self { + Self { + storage, + hooks: Vec::new(), + num_threads: None, + max_memory_size: None, + } + } + + pub fn with_num_threads(mut self, num_threads: usize) -> Self { + self.num_threads = Some(num_threads); + self + } + + pub fn with_max_memory_size_in_megabytes(mut self, max_memory_size: usize) -> Self { + self.max_memory_size = Some(max_memory_size); + self + } + + pub fn on_progress(mut self, callback: impl Fn(u64) + 'static) -> Self { + self.hooks.push(Box::new(callback)); + self + } + + #[allow(clippy::trait_duplication_in_bounds)] + pub fn load + From>( + &self, + quads: impl IntoIterator>, + ) -> Result<(), EO> { + let num_threads = self.num_threads.unwrap_or(2); + if num_threads < 2 { + return Err( + StorageError::Other("The bulk loader needs at least 2 threads".into()).into(), + ); + } + let batch_size = if let Some(max_memory_size) = self.max_memory_size { + max_memory_size * 1000 / num_threads + } else { + DEFAULT_BULK_LOAD_BATCH_SIZE + }; + if batch_size < 10_000 { + return Err(StorageError::Other( + "The bulk loader memory bound is too low. It needs at least 100MB".into(), + ) + .into()); + } + let done_counter = Mutex::new(0); + let mut done_and_displayed_counter = 0; + thread::scope(|thread_scope| { + let mut threads = VecDeque::with_capacity(num_threads - 1); + let mut buffer = Vec::with_capacity(batch_size); + for quad in quads { + let quad = quad?; + buffer.push(quad); + if buffer.len() >= batch_size { + self.spawn_load_thread( + &mut buffer, + &mut threads, + thread_scope, + &done_counter, + &mut done_and_displayed_counter, + num_threads, + batch_size, + )?; + } + } + self.spawn_load_thread( + &mut buffer, + &mut threads, + thread_scope, + &done_counter, + &mut done_and_displayed_counter, + num_threads, + batch_size, + )?; + for thread in threads { + map_thread_result(thread.join()).map_err(StorageError::Io)??; + self.on_possible_progress(&done_counter, &mut done_and_displayed_counter)?; + } + Ok(()) + }) + } + + fn spawn_load_thread<'scope>( + &'scope self, + buffer: &mut Vec, + threads: &mut VecDeque>>, + thread_scope: &'scope thread::Scope<'scope, '_>, + done_counter: &'scope Mutex, + done_and_displayed_counter: &mut u64, + num_threads: usize, + batch_size: usize, + ) -> Result<(), StorageError> { + self.on_possible_progress(done_counter, done_and_displayed_counter)?; + // We avoid to have too many threads + if threads.len() >= num_threads { + if let Some(thread) = threads.pop_front() { + map_thread_result(thread.join()).map_err(StorageError::Io)??; + self.on_possible_progress(done_counter, done_and_displayed_counter)?; + } + } + let mut buffer_to_load = Vec::with_capacity(batch_size); + swap(buffer, &mut buffer_to_load); + let storage = &self.storage; + threads.push_back(thread_scope.spawn(move || { + FileBulkLoader::new(storage, batch_size).load(buffer_to_load, done_counter) + })); + Ok(()) + } + + fn on_possible_progress( + &self, + done: &Mutex, + done_and_displayed: &mut u64, + ) -> Result<(), StorageError> { + let new_counter = *done + .lock() + .map_err(|_| io::Error::new(io::ErrorKind::Other, "Mutex poisoned"))?; + let display_step = DEFAULT_BULK_LOAD_BATCH_SIZE as u64; + if new_counter / display_step > *done_and_displayed / display_step { + for hook in &self.hooks { + hook(new_counter); + } + } + *done_and_displayed = new_counter; + Ok(()) + } +} + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +struct FileBulkLoader<'a> { + storage: &'a Storage, + id2str: HashMap>, + quads: HashSet, + triples: HashSet, + graphs: HashSet, +} + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +impl<'a> FileBulkLoader<'a> { + fn new(storage: &'a Storage, batch_size: usize) -> Self { + Self { + storage, + id2str: HashMap::with_capacity(3 * batch_size), + quads: HashSet::with_capacity(batch_size), + triples: HashSet::with_capacity(batch_size), + graphs: HashSet::default(), + } + } + + fn load(&mut self, quads: Vec, counter: &Mutex) -> Result<(), StorageError> { + self.encode(quads)?; + let size = self.triples.len() + self.quads.len(); + self.save()?; + *counter + .lock() + .map_err(|_| io::Error::new(io::ErrorKind::Other, "Mutex poisoned"))? += + size.try_into().unwrap_or(u64::MAX); + Ok(()) + } + + fn encode(&mut self, quads: Vec) -> Result<(), StorageError> { + for quad in quads { + let encoded = EncodedQuad::from(quad.as_ref()); + if quad.graph_name.is_default_graph() { + if self.triples.insert(encoded.clone()) { + self.insert_term(quad.subject.as_ref().into(), &encoded.subject)?; + self.insert_term(quad.predicate.as_ref().into(), &encoded.predicate)?; + self.insert_term(quad.object.as_ref(), &encoded.object)?; + } + } else if self.quads.insert(encoded.clone()) { + self.insert_term(quad.subject.as_ref().into(), &encoded.subject)?; + self.insert_term(quad.predicate.as_ref().into(), &encoded.predicate)?; + self.insert_term(quad.object.as_ref(), &encoded.object)?; + + if self.graphs.insert(encoded.graph_name.clone()) { + self.insert_term( + match quad.graph_name.as_ref() { + GraphNameRef::NamedNode(n) => n.into(), + GraphNameRef::BlankNode(n) => n.into(), + GraphNameRef::DefaultGraph => { + return Err(CorruptionError::new( + "Default graph this not the default graph", + ) + .into()) + } + }, + &encoded.graph_name, + )?; + } + } + } + Ok(()) + } + + fn save(&mut self) -> Result<(), StorageError> { + let mut to_load = Vec::new(); + + // id2str + if !self.id2str.is_empty() { + let mut id2str = take(&mut self.id2str) + .into_iter() + .map(|(k, v)| (k.to_be_bytes(), v)) + .collect::>(); + id2str.sort_unstable(); + let mut id2str_sst = self.storage.db.new_sst_file()?; + for (k, v) in id2str { + id2str_sst.insert(&k, v.as_bytes())?; + } + to_load.push((&self.storage.id2str_cf, id2str_sst.finish()?)); + } + + if !self.triples.is_empty() { + to_load.push(( + &self.storage.dspo_cf, + self.build_sst_for_keys( + self.triples.iter().map(|quad| { + encode_term_triple(&quad.subject, &quad.predicate, &quad.object) + }), + )?, + )); + to_load.push(( + &self.storage.dpos_cf, + self.build_sst_for_keys( + self.triples.iter().map(|quad| { + encode_term_triple(&quad.predicate, &quad.object, &quad.subject) + }), + )?, + )); + to_load.push(( + &self.storage.dosp_cf, + self.build_sst_for_keys( + self.triples.iter().map(|quad| { + encode_term_triple(&quad.object, &quad.subject, &quad.predicate) + }), + )?, + )); + self.triples.clear(); + } + + if !self.quads.is_empty() { + // to_load.push(( + // &self.storage.graphs_cf, + // self.build_sst_for_keys(self.graphs.iter().map(encode_term))?, + // )); + self.graphs.clear(); + + to_load.push(( + &self.storage.gspo_cf, + self.build_sst_for_keys(self.quads.iter().map(|quad| { + encode_term_quad( + &quad.graph_name, + &quad.subject, + &quad.predicate, + &quad.object, + ) + }))?, + )); + to_load.push(( + &self.storage.gpos_cf, + self.build_sst_for_keys(self.quads.iter().map(|quad| { + encode_term_quad( + &quad.graph_name, + &quad.predicate, + &quad.object, + &quad.subject, + ) + }))?, + )); + to_load.push(( + &self.storage.gosp_cf, + self.build_sst_for_keys(self.quads.iter().map(|quad| { + encode_term_quad( + &quad.graph_name, + &quad.object, + &quad.subject, + &quad.predicate, + ) + }))?, + )); + to_load.push(( + &self.storage.spog_cf, + self.build_sst_for_keys(self.quads.iter().map(|quad| { + encode_term_quad( + &quad.subject, + &quad.predicate, + &quad.object, + &quad.graph_name, + ) + }))?, + )); + to_load.push(( + &self.storage.posg_cf, + self.build_sst_for_keys(self.quads.iter().map(|quad| { + encode_term_quad( + &quad.predicate, + &quad.object, + &quad.subject, + &quad.graph_name, + ) + }))?, + )); + to_load.push(( + &self.storage.ospg_cf, + self.build_sst_for_keys(self.quads.iter().map(|quad| { + encode_term_quad( + &quad.object, + &quad.subject, + &quad.predicate, + &quad.graph_name, + ) + }))?, + )); + self.quads.clear(); + } + + self.storage.db.insert_stt_files(&to_load) + } + + fn insert_term( + &mut self, + term: TermRef<'_>, + encoded: &EncodedTerm, + ) -> Result<(), StorageError> { + insert_term(term, encoded, &mut |key, value| { + self.id2str.entry(*key).or_insert_with(|| value.into()); + Ok(()) + }) + } + + fn build_sst_for_keys( + &self, + values: impl Iterator>, + ) -> Result { + let mut values = values.collect::>(); + values.sort_unstable(); + let mut sst = self.storage.db.new_sst_file()?; + for value in values { + sst.insert_empty(&value)?; + } + sst.finish() + } +} + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn map_thread_result(result: thread::Result) -> io::Result { + result.map_err(|e| { + io::Error::new( + io::ErrorKind::Other, + if let Ok(e) = e.downcast::<&dyn std::fmt::Display>() { + format!("A loader processed crashed with {e}") + } else { + "A loader processed crashed with and unknown error".into() + }, + ) + }) +} diff --git a/ng-oxigraph/src/oxigraph/storage/numeric_encoder.rs b/ng-oxigraph/src/oxigraph/storage/numeric_encoder.rs new file mode 100644 index 0000000..81fa52c --- /dev/null +++ b/ng-oxigraph/src/oxigraph/storage/numeric_encoder.rs @@ -0,0 +1,1031 @@ +#![allow(clippy::unreadable_literal)] + +use crate::oxigraph::model::*; +use crate::oxigraph::storage::error::{CorruptionError, StorageError}; +use crate::oxigraph::storage::small_string::SmallString; +use crate::oxsdatatypes::*; +use siphasher::sip128::{Hasher128, SipHasher24}; +use std::fmt::Debug; +use std::hash::{Hash, Hasher}; +use std::str; +use std::sync::Arc; + +#[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)] +#[repr(transparent)] +pub struct StrHash { + hash: u128, +} + +impl StrHash { + pub fn new(value: &str) -> Self { + let mut hasher = SipHasher24::new(); + hasher.write(value.as_bytes()); + Self { + hash: hasher.finish128().into(), + } + } + + #[inline] + pub fn from_be_bytes(bytes: [u8; 16]) -> Self { + Self { + hash: u128::from_be_bytes(bytes), + } + } + + #[inline] + pub fn to_be_bytes(self) -> [u8; 16] { + self.hash.to_be_bytes() + } +} + +#[derive(Debug, Clone)] +pub enum EncodedTerm { + DefaultGraph, + NamedNode { + iri_id: StrHash, + }, + NumericalBlankNode { + id: u128, + }, + SmallBlankNode(SmallString), + BigBlankNode { + id_id: StrHash, + }, + SmallStringLiteral(SmallString), + BigStringLiteral { + value_id: StrHash, + }, + SmallSmallLangStringLiteral { + value: SmallString, + language: SmallString, + }, + SmallBigLangStringLiteral { + value: SmallString, + language_id: StrHash, + }, + BigSmallLangStringLiteral { + value_id: StrHash, + language: SmallString, + }, + BigBigLangStringLiteral { + value_id: StrHash, + language_id: StrHash, + }, + SmallTypedLiteral { + value: SmallString, + datatype_id: StrHash, + }, + BigTypedLiteral { + value_id: StrHash, + datatype_id: StrHash, + }, + BooleanLiteral(Boolean), + FloatLiteral(Float), + DoubleLiteral(Double), + IntegerLiteral(Integer), + DecimalLiteral(Decimal), + DateTimeLiteral(DateTime), + TimeLiteral(Time), + DateLiteral(Date), + GYearMonthLiteral(GYearMonth), + GYearLiteral(GYear), + GMonthDayLiteral(GMonthDay), + GDayLiteral(GDay), + GMonthLiteral(GMonth), + DurationLiteral(Duration), + YearMonthDurationLiteral(YearMonthDuration), + DayTimeDurationLiteral(DayTimeDuration), + Triple(Arc), +} + +impl PartialEq for EncodedTerm { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::DefaultGraph, Self::DefaultGraph) => true, + (Self::NamedNode { iri_id: iri_id_a }, Self::NamedNode { iri_id: iri_id_b }) => { + iri_id_a == iri_id_b + } + (Self::NumericalBlankNode { id: id_a }, Self::NumericalBlankNode { id: id_b }) => { + id_a == id_b + } + (Self::SmallBlankNode(id_a), Self::SmallBlankNode(id_b)) => id_a == id_b, + (Self::BigBlankNode { id_id: id_a }, Self::BigBlankNode { id_id: id_b }) => { + id_a == id_b + } + (Self::SmallStringLiteral(a), Self::SmallStringLiteral(b)) => a == b, + ( + Self::BigStringLiteral { + value_id: value_id_a, + }, + Self::BigStringLiteral { + value_id: value_id_b, + }, + ) => value_id_a == value_id_b, + ( + Self::SmallSmallLangStringLiteral { + value: value_a, + language: language_a, + }, + Self::SmallSmallLangStringLiteral { + value: value_b, + language: language_b, + }, + ) => value_a == value_b && language_a == language_b, + ( + Self::SmallBigLangStringLiteral { + value: value_a, + language_id: language_id_a, + }, + Self::SmallBigLangStringLiteral { + value: value_b, + language_id: language_id_b, + }, + ) => value_a == value_b && language_id_a == language_id_b, + ( + Self::BigSmallLangStringLiteral { + value_id: value_id_a, + language: language_a, + }, + Self::BigSmallLangStringLiteral { + value_id: value_id_b, + language: language_b, + }, + ) => value_id_a == value_id_b && language_a == language_b, + ( + Self::BigBigLangStringLiteral { + value_id: value_id_a, + language_id: language_id_a, + }, + Self::BigBigLangStringLiteral { + value_id: value_id_b, + language_id: language_id_b, + }, + ) => value_id_a == value_id_b && language_id_a == language_id_b, + ( + Self::SmallTypedLiteral { + value: value_a, + datatype_id: datatype_id_a, + }, + Self::SmallTypedLiteral { + value: value_b, + datatype_id: datatype_id_b, + }, + ) => value_a == value_b && datatype_id_a == datatype_id_b, + ( + Self::BigTypedLiteral { + value_id: value_id_a, + datatype_id: datatype_id_a, + }, + Self::BigTypedLiteral { + value_id: value_id_b, + datatype_id: datatype_id_b, + }, + ) => value_id_a == value_id_b && datatype_id_a == datatype_id_b, + (Self::BooleanLiteral(a), Self::BooleanLiteral(b)) => a == b, + (Self::FloatLiteral(a), Self::FloatLiteral(b)) => a.is_identical_with(*b), + (Self::DoubleLiteral(a), Self::DoubleLiteral(b)) => a.is_identical_with(*b), + (Self::IntegerLiteral(a), Self::IntegerLiteral(b)) => a.is_identical_with(*b), + (Self::DecimalLiteral(a), Self::DecimalLiteral(b)) => a.is_identical_with(*b), + (Self::DateTimeLiteral(a), Self::DateTimeLiteral(b)) => a.is_identical_with(*b), + (Self::TimeLiteral(a), Self::TimeLiteral(b)) => a.is_identical_with(*b), + (Self::DateLiteral(a), Self::DateLiteral(b)) => a.is_identical_with(*b), + (Self::GYearMonthLiteral(a), Self::GYearMonthLiteral(b)) => a.is_identical_with(*b), + (Self::GYearLiteral(a), Self::GYearLiteral(b)) => a.is_identical_with(*b), + (Self::GMonthDayLiteral(a), Self::GMonthDayLiteral(b)) => a.is_identical_with(*b), + (Self::GMonthLiteral(a), Self::GMonthLiteral(b)) => a.is_identical_with(*b), + (Self::GDayLiteral(a), Self::GDayLiteral(b)) => a.is_identical_with(*b), + (Self::DurationLiteral(a), Self::DurationLiteral(b)) => a.is_identical_with(*b), + (Self::YearMonthDurationLiteral(a), Self::YearMonthDurationLiteral(b)) => { + a.is_identical_with(*b) + } + (Self::DayTimeDurationLiteral(a), Self::DayTimeDurationLiteral(b)) => { + a.is_identical_with(*b) + } + (Self::Triple(a), Self::Triple(b)) => a == b, + (_, _) => false, + } + } +} + +impl Eq for EncodedTerm {} + +impl Hash for EncodedTerm { + fn hash(&self, state: &mut H) { + match self { + Self::NamedNode { iri_id } => iri_id.hash(state), + Self::NumericalBlankNode { id } => id.hash(state), + Self::SmallBlankNode(id) => id.hash(state), + Self::BigBlankNode { id_id } => id_id.hash(state), + Self::DefaultGraph => (), + Self::SmallStringLiteral(value) => value.hash(state), + Self::BigStringLiteral { value_id } => value_id.hash(state), + Self::SmallSmallLangStringLiteral { value, language } => { + value.hash(state); + language.hash(state); + } + Self::SmallBigLangStringLiteral { value, language_id } => { + value.hash(state); + language_id.hash(state); + } + Self::BigSmallLangStringLiteral { value_id, language } => { + value_id.hash(state); + language.hash(state); + } + Self::BigBigLangStringLiteral { + value_id, + language_id, + } => { + value_id.hash(state); + language_id.hash(state); + } + Self::SmallTypedLiteral { value, datatype_id } => { + value.hash(state); + datatype_id.hash(state); + } + Self::BigTypedLiteral { + value_id, + datatype_id, + } => { + value_id.hash(state); + datatype_id.hash(state); + } + Self::BooleanLiteral(value) => value.hash(state), + Self::FloatLiteral(value) => value.to_be_bytes().hash(state), + Self::DoubleLiteral(value) => value.to_be_bytes().hash(state), + Self::IntegerLiteral(value) => value.hash(state), + Self::DecimalLiteral(value) => value.hash(state), + Self::DateTimeLiteral(value) => value.hash(state), + Self::TimeLiteral(value) => value.hash(state), + Self::DateLiteral(value) => value.hash(state), + Self::GYearMonthLiteral(value) => value.hash(state), + Self::GYearLiteral(value) => value.hash(state), + Self::GMonthDayLiteral(value) => value.hash(state), + Self::GDayLiteral(value) => value.hash(state), + Self::GMonthLiteral(value) => value.hash(state), + Self::DurationLiteral(value) => value.hash(state), + Self::YearMonthDurationLiteral(value) => value.hash(state), + Self::DayTimeDurationLiteral(value) => value.hash(state), + Self::Triple(value) => value.hash(state), + } + } +} + +impl EncodedTerm { + pub fn is_named_node(&self) -> bool { + matches!(self, Self::NamedNode { .. }) + } + + pub fn is_blank_node(&self) -> bool { + matches!( + self, + Self::NumericalBlankNode { .. } + | Self::SmallBlankNode { .. } + | Self::BigBlankNode { .. } + ) + } + + pub fn is_literal(&self) -> bool { + matches!( + self, + Self::SmallStringLiteral { .. } + | Self::BigStringLiteral { .. } + | Self::SmallSmallLangStringLiteral { .. } + | Self::SmallBigLangStringLiteral { .. } + | Self::BigSmallLangStringLiteral { .. } + | Self::BigBigLangStringLiteral { .. } + | Self::SmallTypedLiteral { .. } + | Self::BigTypedLiteral { .. } + | Self::BooleanLiteral(_) + | Self::FloatLiteral(_) + | Self::DoubleLiteral(_) + | Self::IntegerLiteral(_) + | Self::DecimalLiteral(_) + | Self::DateTimeLiteral(_) + | Self::TimeLiteral(_) + | Self::DateLiteral(_) + | Self::GYearMonthLiteral(_) + | Self::GYearLiteral(_) + | Self::GMonthDayLiteral(_) + | Self::GDayLiteral(_) + | Self::GMonthLiteral(_) + | Self::DurationLiteral(_) + | Self::YearMonthDurationLiteral(_) + | Self::DayTimeDurationLiteral(_) + ) + } + + pub fn is_unknown_typed_literal(&self) -> bool { + matches!( + self, + Self::SmallTypedLiteral { .. } | Self::BigTypedLiteral { .. } + ) + } + + pub fn is_default_graph(&self) -> bool { + matches!(self, Self::DefaultGraph) + } + + pub fn is_triple(&self) -> bool { + matches!(self, Self::Triple { .. }) + } +} + +impl From for EncodedTerm { + fn from(value: bool) -> Self { + Self::BooleanLiteral(value.into()) + } +} + +impl From for EncodedTerm { + fn from(value: i64) -> Self { + Self::IntegerLiteral(value.into()) + } +} + +impl From for EncodedTerm { + fn from(value: i32) -> Self { + Self::IntegerLiteral(value.into()) + } +} + +impl From for EncodedTerm { + fn from(value: u32) -> Self { + Self::IntegerLiteral(value.into()) + } +} + +impl From for EncodedTerm { + fn from(value: u8) -> Self { + Self::IntegerLiteral(value.into()) + } +} + +impl From for EncodedTerm { + fn from(value: f32) -> Self { + Self::FloatLiteral(value.into()) + } +} + +impl From for EncodedTerm { + fn from(value: Float) -> Self { + Self::FloatLiteral(value) + } +} + +impl From for EncodedTerm { + fn from(value: f64) -> Self { + Self::DoubleLiteral(value.into()) + } +} + +impl From for EncodedTerm { + fn from(value: Boolean) -> Self { + Self::BooleanLiteral(value) + } +} + +impl From for EncodedTerm { + fn from(value: Double) -> Self { + Self::DoubleLiteral(value) + } +} + +impl From for EncodedTerm { + fn from(value: Integer) -> Self { + Self::IntegerLiteral(value) + } +} + +impl From for EncodedTerm { + fn from(value: Decimal) -> Self { + Self::DecimalLiteral(value) + } +} + +impl From for EncodedTerm { + fn from(value: DateTime) -> Self { + Self::DateTimeLiteral(value) + } +} + +impl From

."; + /// + /// let parser = RdfParser::from_format(RdfFormat::Turtle).with_base_iri("http://example.com")?; + /// let quads = parser + /// .parse_read(file.as_bytes()) + /// .collect::, _>>()?; + /// + /// assert_eq!(quads.len(), 1); + /// assert_eq!(quads[0].subject.to_string(), ""); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[inline] + pub fn with_base_iri(mut self, base_iri: impl Into) -> Result { + self.inner = match self.inner { + RdfParserKind::N3(p) => RdfParserKind::N3(p), + RdfParserKind::NTriples(p) => RdfParserKind::NTriples(p), + RdfParserKind::NQuads(p) => RdfParserKind::NQuads(p), + RdfParserKind::RdfXml(p) => RdfParserKind::RdfXml(p.with_base_iri(base_iri)?), + RdfParserKind::TriG(p) => RdfParserKind::TriG(p.with_base_iri(base_iri)?), + RdfParserKind::Turtle(p) => RdfParserKind::Turtle(p.with_base_iri(base_iri)?), + }; + Ok(self) + } + + /// Provides the name graph name that should replace the default graph in the returned quads. + /// + /// ``` + /// use oxrdf::NamedNode; + /// use oxrdfio::{RdfFormat, RdfParser}; + /// + /// let file = " ."; + /// + /// let parser = RdfParser::from_format(RdfFormat::Turtle) + /// .with_default_graph(NamedNode::new("http://example.com/g")?); + /// let quads = parser + /// .parse_read(file.as_bytes()) + /// .collect::, _>>()?; + /// + /// assert_eq!(quads.len(), 1); + /// assert_eq!(quads[0].graph_name.to_string(), ""); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[inline] + pub fn with_default_graph(mut self, default_graph: impl Into) -> Self { + self.default_graph = default_graph.into(); + self + } + + /// Sets that the parser must fail if parsing a named graph. + /// + /// This function restricts the parser to only parse a single [RDF graph](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-graph) and not an [RDF dataset](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset). + /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfParser}; + /// + /// let file = " ."; + /// + /// let parser = RdfParser::from_format(RdfFormat::NQuads).without_named_graphs(); + /// assert!(parser.parse_read(file.as_bytes()).next().unwrap().is_err()); + /// ``` + #[inline] + pub fn without_named_graphs(mut self) -> Self { + self.without_named_graphs = true; + self + } + + /// Renames the blank nodes ids from the ones set in the serialization to random ids. + /// + /// This allows to avoid id conflicts when merging graphs together. + /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfParser}; + /// + /// let file = "_:a ."; + /// + /// let result1 = RdfParser::from_format(RdfFormat::NQuads) + /// .rename_blank_nodes() + /// .parse_read(file.as_bytes()) + /// .collect::, _>>()?; + /// let result2 = RdfParser::from_format(RdfFormat::NQuads) + /// .rename_blank_nodes() + /// .parse_read(file.as_bytes()) + /// .collect::, _>>()?; + /// assert_ne!(result1, result2); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[inline] + pub fn rename_blank_nodes(mut self) -> Self { + self.rename_blank_nodes = true; + self + } + + /// Assumes the file is valid to make parsing faster. + /// + /// It will skip some validations. + /// + /// Note that if the file is actually not valid, then broken RDF might be emitted by the parser. + #[inline] + pub fn unchecked(mut self) -> Self { + self.inner = match self.inner { + RdfParserKind::N3(p) => RdfParserKind::N3(p.unchecked()), + RdfParserKind::NTriples(p) => RdfParserKind::NTriples(p.unchecked()), + RdfParserKind::NQuads(p) => RdfParserKind::NQuads(p.unchecked()), + RdfParserKind::RdfXml(p) => RdfParserKind::RdfXml(p.unchecked()), + RdfParserKind::TriG(p) => RdfParserKind::TriG(p.unchecked()), + RdfParserKind::Turtle(p) => RdfParserKind::Turtle(p.unchecked()), + }; + self + } + + /// Parses from a [`Read`] implementation and returns an iterator of quads. + /// + /// Reads are buffered. + /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfParser}; + /// + /// let file = " ."; + /// + /// let parser = RdfParser::from_format(RdfFormat::NTriples); + /// let quads = parser + /// .parse_read(file.as_bytes()) + /// .collect::, _>>()?; + /// + /// assert_eq!(quads.len(), 1); + /// assert_eq!(quads[0].subject.to_string(), ""); + /// # std::io::Result::Ok(()) + /// ``` + pub fn parse_read(self, reader: R) -> FromReadQuadReader { + FromReadQuadReader { + parser: match self.inner { + RdfParserKind::N3(p) => FromReadQuadReaderKind::N3(p.parse_read(reader)), + RdfParserKind::NQuads(p) => FromReadQuadReaderKind::NQuads(p.parse_read(reader)), + RdfParserKind::NTriples(p) => { + FromReadQuadReaderKind::NTriples(p.parse_read(reader)) + } + RdfParserKind::RdfXml(p) => FromReadQuadReaderKind::RdfXml(p.parse_read(reader)), + RdfParserKind::TriG(p) => FromReadQuadReaderKind::TriG(p.parse_read(reader)), + RdfParserKind::Turtle(p) => FromReadQuadReaderKind::Turtle(p.parse_read(reader)), + }, + mapper: QuadMapper { + default_graph: self.default_graph.clone(), + without_named_graphs: self.without_named_graphs, + blank_node_map: self.rename_blank_nodes.then(HashMap::new), + }, + } + } + + /// Parses from a Tokio [`AsyncRead`] implementation and returns an async iterator of quads. + /// + /// Reads are buffered. + /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfParser}; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxrdfio::RdfParseError> { + /// let file = " ."; + /// + /// let parser = RdfParser::from_format(RdfFormat::NTriples); + /// let mut reader = parser.parse_tokio_async_read(file.as_bytes()); + /// if let Some(quad) = reader.next().await { + /// assert_eq!(quad?.subject.to_string(), ""); + /// } + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn parse_tokio_async_read( + self, + reader: R, + ) -> FromTokioAsyncReadQuadReader { + FromTokioAsyncReadQuadReader { + parser: match self.inner { + RdfParserKind::N3(p) => { + FromTokioAsyncReadQuadReaderKind::N3(p.parse_tokio_async_read(reader)) + } + RdfParserKind::NQuads(p) => { + FromTokioAsyncReadQuadReaderKind::NQuads(p.parse_tokio_async_read(reader)) + } + RdfParserKind::NTriples(p) => { + FromTokioAsyncReadQuadReaderKind::NTriples(p.parse_tokio_async_read(reader)) + } + RdfParserKind::RdfXml(p) => { + FromTokioAsyncReadQuadReaderKind::RdfXml(p.parse_tokio_async_read(reader)) + } + RdfParserKind::TriG(p) => { + FromTokioAsyncReadQuadReaderKind::TriG(p.parse_tokio_async_read(reader)) + } + RdfParserKind::Turtle(p) => { + FromTokioAsyncReadQuadReaderKind::Turtle(p.parse_tokio_async_read(reader)) + } + }, + mapper: QuadMapper { + default_graph: self.default_graph.clone(), + without_named_graphs: self.without_named_graphs, + blank_node_map: self.rename_blank_nodes.then(HashMap::new), + }, + } + } +} + +impl From for RdfParser { + fn from(format: RdfFormat) -> Self { + Self::from_format(format) + } +} + +/// Parses a RDF file from a [`Read`] implementation. Can be built using [`RdfParser::parse_read`]. +/// +/// Reads are buffered. +/// +/// ``` +/// use oxrdfio::{RdfFormat, RdfParser}; +/// +/// let file = " ."; +/// +/// let parser = RdfParser::from_format(RdfFormat::NTriples); +/// let quads = parser +/// .parse_read(file.as_bytes()) +/// .collect::, _>>()?; +/// +/// assert_eq!(quads.len(), 1); +/// assert_eq!(quads[0].subject.to_string(), ""); +/// # std::io::Result::Ok(()) +/// ``` +#[must_use] +pub struct FromReadQuadReader { + parser: FromReadQuadReaderKind, + mapper: QuadMapper, +} + +enum FromReadQuadReaderKind { + N3(FromReadN3Reader), + NQuads(FromReadNQuadsReader), + NTriples(FromReadNTriplesReader), + RdfXml(FromReadRdfXmlReader), + TriG(FromReadTriGReader), + Turtle(FromReadTurtleReader), +} + +impl Iterator for FromReadQuadReader { + type Item = Result; + + fn next(&mut self) -> Option { + Some(match &mut self.parser { + FromReadQuadReaderKind::N3(parser) => match parser.next()? { + Ok(quad) => self.mapper.map_n3_quad(quad), + Err(e) => Err(e.into()), + }, + FromReadQuadReaderKind::NQuads(parser) => match parser.next()? { + Ok(quad) => self.mapper.map_quad(quad), + Err(e) => Err(e.into()), + }, + FromReadQuadReaderKind::NTriples(parser) => match parser.next()? { + Ok(triple) => Ok(self.mapper.map_triple_to_quad(triple)), + Err(e) => Err(e.into()), + }, + FromReadQuadReaderKind::RdfXml(parser) => match parser.next()? { + Ok(triple) => Ok(self.mapper.map_triple_to_quad(triple)), + Err(e) => Err(e.into()), + }, + FromReadQuadReaderKind::TriG(parser) => match parser.next()? { + Ok(quad) => self.mapper.map_quad(quad), + Err(e) => Err(e.into()), + }, + FromReadQuadReaderKind::Turtle(parser) => match parser.next()? { + Ok(triple) => Ok(self.mapper.map_triple_to_quad(triple)), + Err(e) => Err(e.into()), + }, + }) + } +} + +impl FromReadQuadReader { + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// An empty iterator is return if the format does not support prefixes. + /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfParser}; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = RdfParser::from_format(RdfFormat::Turtle).parse_read(file.as_slice()); + /// assert!(reader.prefixes().collect::>().is_empty()); // No prefix at the beginning + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn prefixes(&self) -> PrefixesIter<'_> { + PrefixesIter { + inner: match &self.parser { + FromReadQuadReaderKind::N3(p) => PrefixesIterKind::N3(p.prefixes()), + FromReadQuadReaderKind::TriG(p) => PrefixesIterKind::TriG(p.prefixes()), + FromReadQuadReaderKind::Turtle(p) => PrefixesIterKind::Turtle(p.prefixes()), + FromReadQuadReaderKind::NQuads(_) + | FromReadQuadReaderKind::NTriples(_) + | FromReadQuadReaderKind::RdfXml(_) => PrefixesIterKind::None, /* TODO: implement for RDF/XML */ + }, + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// `None` is returned if no base IRI is set or the format does not support base IRIs. + /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfParser}; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = RdfParser::from_format(RdfFormat::Turtle).parse_read(file.as_slice()); + /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI. + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + match &self.parser { + FromReadQuadReaderKind::N3(p) => p.base_iri(), + FromReadQuadReaderKind::TriG(p) => p.base_iri(), + FromReadQuadReaderKind::Turtle(p) => p.base_iri(), + FromReadQuadReaderKind::NQuads(_) + | FromReadQuadReaderKind::NTriples(_) + | FromReadQuadReaderKind::RdfXml(_) => None, // TODO: implement for RDF/XML + } + } +} + +/// Parses a RDF file from a Tokio [`AsyncRead`] implementation. Can be built using [`RdfParser::parse_tokio_async_read`]. +/// +/// Reads are buffered. +/// +/// ``` +/// use oxrdfio::{RdfFormat, RdfParser}; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), oxrdfio::RdfParseError> { +/// let file = " ."; +/// +/// let parser = RdfParser::from_format(RdfFormat::NTriples); +/// let mut reader = parser.parse_tokio_async_read(file.as_bytes()); +/// if let Some(quad) = reader.next().await { +/// assert_eq!(quad?.subject.to_string(), ""); +/// } +/// # Ok(()) +/// # } +/// ``` +#[must_use] +#[cfg(feature = "async-tokio")] +pub struct FromTokioAsyncReadQuadReader { + parser: FromTokioAsyncReadQuadReaderKind, + mapper: QuadMapper, +} + +#[cfg(feature = "async-tokio")] +enum FromTokioAsyncReadQuadReaderKind { + N3(FromTokioAsyncReadN3Reader), + NQuads(FromTokioAsyncReadNQuadsReader), + NTriples(FromTokioAsyncReadNTriplesReader), + RdfXml(FromTokioAsyncReadRdfXmlReader), + TriG(FromTokioAsyncReadTriGReader), + Turtle(FromTokioAsyncReadTurtleReader), +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadQuadReader { + pub async fn next(&mut self) -> Option> { + Some(match &mut self.parser { + FromTokioAsyncReadQuadReaderKind::N3(parser) => match parser.next().await? { + Ok(quad) => self.mapper.map_n3_quad(quad), + Err(e) => Err(e.into()), + }, + FromTokioAsyncReadQuadReaderKind::NQuads(parser) => match parser.next().await? { + Ok(quad) => self.mapper.map_quad(quad), + Err(e) => Err(e.into()), + }, + FromTokioAsyncReadQuadReaderKind::NTriples(parser) => match parser.next().await? { + Ok(triple) => Ok(self.mapper.map_triple_to_quad(triple)), + Err(e) => Err(e.into()), + }, + FromTokioAsyncReadQuadReaderKind::RdfXml(parser) => match parser.next().await? { + Ok(triple) => Ok(self.mapper.map_triple_to_quad(triple)), + Err(e) => Err(e.into()), + }, + FromTokioAsyncReadQuadReaderKind::TriG(parser) => match parser.next().await? { + Ok(quad) => self.mapper.map_quad(quad), + Err(e) => Err(e.into()), + }, + FromTokioAsyncReadQuadReaderKind::Turtle(parser) => match parser.next().await? { + Ok(triple) => Ok(self.mapper.map_triple_to_quad(triple)), + Err(e) => Err(e.into()), + }, + }) + } + + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// An empty iterator is return if the format does not support prefixes. + /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfParser}; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = RdfParser::from_format(RdfFormat::Turtle).parse_read(file.as_slice()); + /// assert_eq!(reader.prefixes().collect::>(), []); // No prefix at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Ok(()) + /// # } + /// ``` + pub fn prefixes(&self) -> PrefixesIter<'_> { + PrefixesIter { + inner: match &self.parser { + FromTokioAsyncReadQuadReaderKind::N3(p) => PrefixesIterKind::N3(p.prefixes()), + FromTokioAsyncReadQuadReaderKind::TriG(p) => PrefixesIterKind::TriG(p.prefixes()), + FromTokioAsyncReadQuadReaderKind::Turtle(p) => { + PrefixesIterKind::Turtle(p.prefixes()) + } + FromTokioAsyncReadQuadReaderKind::NQuads(_) + | FromTokioAsyncReadQuadReaderKind::NTriples(_) + | FromTokioAsyncReadQuadReaderKind::RdfXml(_) => PrefixesIterKind::None, /* TODO: implement for RDF/XML */ + }, + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// `None` is returned if no base IRI is set or the format does not support base IRIs. + /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfParser}; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = + /// RdfParser::from_format(RdfFormat::Turtle).parse_tokio_async_read(file.as_slice()); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Ok(()) + /// # } + /// ``` + pub fn base_iri(&self) -> Option<&str> { + match &self.parser { + FromTokioAsyncReadQuadReaderKind::N3(p) => p.base_iri(), + FromTokioAsyncReadQuadReaderKind::TriG(p) => p.base_iri(), + FromTokioAsyncReadQuadReaderKind::Turtle(p) => p.base_iri(), + FromTokioAsyncReadQuadReaderKind::NQuads(_) + | FromTokioAsyncReadQuadReaderKind::NTriples(_) + | FromTokioAsyncReadQuadReaderKind::RdfXml(_) => None, // TODO: implement for RDF/XML + } + } +} + +/// Iterator on the file prefixes. +/// +/// See [`FromReadQuadReader::prefixes`]. +pub struct PrefixesIter<'a> { + inner: PrefixesIterKind<'a>, +} + +enum PrefixesIterKind<'a> { + Turtle(TurtlePrefixesIter<'a>), + TriG(TriGPrefixesIter<'a>), + N3(N3PrefixesIter<'a>), + None, +} + +impl<'a> Iterator for PrefixesIter<'a> { + type Item = (&'a str, &'a str); + + #[inline] + fn next(&mut self) -> Option { + match &mut self.inner { + PrefixesIterKind::Turtle(iter) => iter.next(), + PrefixesIterKind::TriG(iter) => iter.next(), + PrefixesIterKind::N3(iter) => iter.next(), + PrefixesIterKind::None => None, + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + match &self.inner { + PrefixesIterKind::Turtle(iter) => iter.size_hint(), + PrefixesIterKind::TriG(iter) => iter.size_hint(), + PrefixesIterKind::N3(iter) => iter.size_hint(), + PrefixesIterKind::None => (0, Some(0)), + } + } +} + +struct QuadMapper { + default_graph: GraphName, + without_named_graphs: bool, + blank_node_map: Option>, +} + +impl QuadMapper { + fn map_blank_node(&mut self, node: BlankNode) -> BlankNode { + if let Some(blank_node_map) = &mut self.blank_node_map { + blank_node_map + .entry(node) + .or_insert_with(BlankNode::default) + .clone() + } else { + node + } + } + + fn map_subject(&mut self, node: Subject) -> Subject { + match node { + Subject::NamedNode(node) => node.into(), + Subject::BlankNode(node) => self.map_blank_node(node).into(), + #[cfg(feature = "rdf-star")] + Subject::Triple(triple) => self.map_triple(*triple).into(), + } + } + + fn map_term(&mut self, node: Term) -> Term { + match node { + Term::NamedNode(node) => node.into(), + Term::BlankNode(node) => self.map_blank_node(node).into(), + Term::Literal(literal) => literal.into(), + #[cfg(feature = "rdf-star")] + Term::Triple(triple) => self.map_triple(*triple).into(), + } + } + + fn map_triple(&mut self, triple: Triple) -> Triple { + Triple { + subject: self.map_subject(triple.subject), + predicate: triple.predicate, + object: self.map_term(triple.object), + } + } + + fn map_graph_name(&mut self, graph_name: GraphName) -> Result { + match graph_name { + GraphName::NamedNode(node) => { + if self.without_named_graphs { + Err(RdfParseError::msg("Named graphs are not allowed")) + } else { + Ok(node.into()) + } + } + GraphName::BlankNode(node) => { + if self.without_named_graphs { + Err(RdfParseError::msg("Named graphs are not allowed")) + } else { + Ok(self.map_blank_node(node).into()) + } + } + GraphName::DefaultGraph => Ok(self.default_graph.clone()), + } + } + + fn map_quad(&mut self, quad: Quad) -> Result { + Ok(Quad { + subject: self.map_subject(quad.subject), + predicate: quad.predicate, + object: self.map_term(quad.object), + graph_name: self.map_graph_name(quad.graph_name)?, + }) + } + + fn map_triple_to_quad(&mut self, triple: Triple) -> Quad { + self.map_triple(triple).in_graph(self.default_graph.clone()) + } + + fn map_n3_quad(&mut self, quad: N3Quad) -> Result { + Ok(Quad { + subject: match quad.subject { + N3Term::NamedNode(s) => Ok(s.into()), + N3Term::BlankNode(s) => Ok(self.map_blank_node(s).into()), + N3Term::Literal(_) => Err(RdfParseError::msg( + "literals are not allowed in regular RDF subjects", + )), + #[cfg(feature = "rdf-star")] + N3Term::Triple(s) => Ok(self.map_triple(*s).into()), + N3Term::Variable(_) => Err(RdfParseError::msg( + "variables are not allowed in regular RDF subjects", + )), + }?, + predicate: match quad.predicate { + N3Term::NamedNode(p) => Ok(p), + N3Term::BlankNode(_) => Err(RdfParseError::msg( + "blank nodes are not allowed in regular RDF predicates", + )), + N3Term::Literal(_) => Err(RdfParseError::msg( + "literals are not allowed in regular RDF predicates", + )), + #[cfg(feature = "rdf-star")] + N3Term::Triple(_) => Err(RdfParseError::msg( + "quoted triples are not allowed in regular RDF predicates", + )), + N3Term::Variable(_) => Err(RdfParseError::msg( + "variables are not allowed in regular RDF predicates", + )), + }?, + object: match quad.object { + N3Term::NamedNode(o) => Ok(o.into()), + N3Term::BlankNode(o) => Ok(self.map_blank_node(o).into()), + N3Term::Literal(o) => Ok(o.into()), + #[cfg(feature = "rdf-star")] + N3Term::Triple(o) => Ok(self.map_triple(*o).into()), + N3Term::Variable(_) => Err(RdfParseError::msg( + "variables are not allowed in regular RDF objects", + )), + }?, + graph_name: self.map_graph_name(quad.graph_name)?, + }) + } +} diff --git a/ng-oxigraph/src/oxrdfio/serializer.rs b/ng-oxigraph/src/oxrdfio/serializer.rs new file mode 100644 index 0000000..6e3d439 --- /dev/null +++ b/ng-oxigraph/src/oxrdfio/serializer.rs @@ -0,0 +1,412 @@ +//! Utilities to write RDF graphs and datasets. + +use crate::oxrdf::{GraphNameRef, IriParseError, QuadRef, TripleRef}; +use crate::oxrdfio::format::RdfFormat; + +#[cfg(feature = "async-tokio")] +use crate::oxrdfxml::ToTokioAsyncWriteRdfXmlWriter; +use crate::oxrdfxml::{RdfXmlSerializer, ToWriteRdfXmlWriter}; + +#[cfg(feature = "async-tokio")] +use crate::oxttl::nquads::ToTokioAsyncWriteNQuadsWriter; +use crate::oxttl::nquads::{NQuadsSerializer, ToWriteNQuadsWriter}; +#[cfg(feature = "async-tokio")] +use crate::oxttl::ntriples::ToTokioAsyncWriteNTriplesWriter; +use crate::oxttl::ntriples::{NTriplesSerializer, ToWriteNTriplesWriter}; +#[cfg(feature = "async-tokio")] +use crate::oxttl::trig::ToTokioAsyncWriteTriGWriter; +use crate::oxttl::trig::{ToWriteTriGWriter, TriGSerializer}; +#[cfg(feature = "async-tokio")] +use crate::oxttl::turtle::ToTokioAsyncWriteTurtleWriter; +use crate::oxttl::turtle::{ToWriteTurtleWriter, TurtleSerializer}; +use std::io::{self, Write}; +#[cfg(feature = "async-tokio")] +use tokio::io::AsyncWrite; + +/// A serializer for RDF serialization formats. +/// +/// It currently supports the following formats: +/// * [N3](https://w3c.github.io/N3/spec/) ([`RdfFormat::N3`]) +/// * [N-Quads](https://www.w3.org/TR/n-quads/) ([`RdfFormat::NQuads`]) +/// * [canonical](https://www.w3.org/TR/n-triples/#canonical-ntriples) [N-Triples](https://www.w3.org/TR/n-triples/) ([`RdfFormat::NTriples`]) +/// * [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) ([`RdfFormat::RdfXml`]) +/// * [TriG](https://www.w3.org/TR/trig/) ([`RdfFormat::TriG`]) +/// * [Turtle](https://www.w3.org/TR/turtle/) ([`RdfFormat::Turtle`]) +/// +/// ``` +/// use oxrdfio::{RdfFormat, RdfSerializer}; +/// use oxrdf::{Quad, NamedNode}; +/// +/// let mut writer = RdfSerializer::from_format(RdfFormat::NQuads).serialize_to_write(Vec::new()); +/// writer.write_quad(&Quad { +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), +/// graph_name: NamedNode::new("http://example.com/g")?.into() +/// })?; +/// assert_eq!(writer.finish()?, b" .\n"); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct RdfSerializer { + inner: RdfSerializerKind, +} + +enum RdfSerializerKind { + NQuads(NQuadsSerializer), + NTriples(NTriplesSerializer), + RdfXml(RdfXmlSerializer), + TriG(TriGSerializer), + Turtle(TurtleSerializer), +} + +impl RdfSerializer { + /// Builds a serializer for the given format + #[inline] + pub fn from_format(format: RdfFormat) -> Self { + Self { + inner: match format { + RdfFormat::NQuads => RdfSerializerKind::NQuads(NQuadsSerializer::new()), + RdfFormat::NTriples => RdfSerializerKind::NTriples(NTriplesSerializer::new()), + RdfFormat::RdfXml => RdfSerializerKind::RdfXml(RdfXmlSerializer::new()), + RdfFormat::TriG => RdfSerializerKind::TriG(TriGSerializer::new()), + RdfFormat::Turtle | RdfFormat::N3 => { + RdfSerializerKind::Turtle(TurtleSerializer::new()) + } + }, + } + } + + /// The format the serializer serializes to. + /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfSerializer}; + /// + /// assert_eq!( + /// RdfSerializer::from_format(RdfFormat::Turtle).format(), + /// RdfFormat::Turtle + /// ); + /// ``` + pub fn format(&self) -> RdfFormat { + match &self.inner { + RdfSerializerKind::NQuads(_) => RdfFormat::NQuads, + RdfSerializerKind::NTriples(_) => RdfFormat::NTriples, + RdfSerializerKind::RdfXml(_) => RdfFormat::RdfXml, + RdfSerializerKind::TriG(_) => RdfFormat::TriG, + RdfSerializerKind::Turtle(_) => RdfFormat::Turtle, + } + } + + /// If the format supports it, sets a prefix. + /// + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::{NamedNodeRef, TripleRef}; + /// use oxrdfio::{RdfFormat, RdfSerializer}; + /// + /// let mut writer = RdfSerializer::from_format(RdfFormat::Turtle) + /// .with_prefix("schema", "http://schema.org/")? + /// .serialize_to_write(Vec::new()); + /// writer.write_triple(TripleRef { + /// subject: NamedNodeRef::new("http://example.com/s")?.into(), + /// predicate: rdf::TYPE.into(), + /// object: NamedNodeRef::new("http://schema.org/Person")?.into(), + /// })?; + /// assert_eq!( + /// writer.finish()?, + /// b"@prefix schema: .\n a schema:Person .\n" + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[inline] + pub fn with_prefix( + mut self, + prefix_name: impl Into, + prefix_iri: impl Into, + ) -> Result { + self.inner = match self.inner { + RdfSerializerKind::NQuads(s) => RdfSerializerKind::NQuads(s), + RdfSerializerKind::NTriples(s) => RdfSerializerKind::NTriples(s), + RdfSerializerKind::RdfXml(s) => { + RdfSerializerKind::RdfXml(s.with_prefix(prefix_name, prefix_iri)?) + } + RdfSerializerKind::TriG(s) => { + RdfSerializerKind::TriG(s.with_prefix(prefix_name, prefix_iri)?) + } + RdfSerializerKind::Turtle(s) => { + RdfSerializerKind::Turtle(s.with_prefix(prefix_name, prefix_iri)?) + } + }; + Ok(self) + } + + /// Writes to a [`Write`] implementation. + /// + ///
+ /// + /// Do not forget to run the [`finish`](ToWriteQuadWriter::finish()) method to properly write the last bytes of the file.
+ /// + ///
+ /// + /// This writer does unbuffered writes. You might want to use [`BufWriter`](io::BufWriter) to avoid that.
+ /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfSerializer}; + /// use oxrdf::{Quad, NamedNode}; + /// + /// let mut writer = RdfSerializer::from_format(RdfFormat::NQuads).serialize_to_write(Vec::new()); + /// writer.write_quad(&Quad { + /// subject: NamedNode::new("http://example.com/s")?.into(), + /// predicate: NamedNode::new("http://example.com/p")?, + /// object: NamedNode::new("http://example.com/o")?.into(), + /// graph_name: NamedNode::new("http://example.com/g")?.into() + /// })?; + /// assert_eq!(writer.finish()?, b" .\n"); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn serialize_to_write(self, write: W) -> ToWriteQuadWriter { + ToWriteQuadWriter { + formatter: match self.inner { + RdfSerializerKind::NQuads(s) => { + ToWriteQuadWriterKind::NQuads(s.serialize_to_write(write)) + } + RdfSerializerKind::NTriples(s) => { + ToWriteQuadWriterKind::NTriples(s.serialize_to_write(write)) + } + RdfSerializerKind::RdfXml(s) => { + ToWriteQuadWriterKind::RdfXml(s.serialize_to_write(write)) + } + RdfSerializerKind::TriG(s) => { + ToWriteQuadWriterKind::TriG(s.serialize_to_write(write)) + } + RdfSerializerKind::Turtle(s) => { + ToWriteQuadWriterKind::Turtle(s.serialize_to_write(write)) + } + }, + } + } + + /// Writes to a Tokio [`AsyncWrite`] implementation. + /// + ///
+ /// + /// Do not forget to run the [`finish`](ToTokioAsyncWriteQuadWriter::finish()) method to properly write the last bytes of the file.
+ /// + ///
+ /// + /// This writer does unbuffered writes. You might want to use [`BufWriter`](tokio::io::BufWriter) to avoid that.
+ /// + /// ``` + /// use oxrdfio::{RdfFormat, RdfSerializer}; + /// use oxrdf::{Quad, NamedNode}; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> std::io::Result<()> { + /// let mut writer = RdfSerializer::from_format(RdfFormat::NQuads).serialize_to_tokio_async_write(Vec::new()); + /// writer.write_quad(&Quad { + /// subject: NamedNode::new_unchecked("http://example.com/s").into(), + /// predicate: NamedNode::new_unchecked("http://example.com/p"), + /// object: NamedNode::new_unchecked("http://example.com/o").into(), + /// graph_name: NamedNode::new_unchecked("http://example.com/g").into() + /// }).await?; + /// assert_eq!(writer.finish().await?, " .\n"); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn serialize_to_tokio_async_write( + self, + write: W, + ) -> ToTokioAsyncWriteQuadWriter { + ToTokioAsyncWriteQuadWriter { + formatter: match self.inner { + RdfSerializerKind::NQuads(s) => { + ToTokioAsyncWriteQuadWriterKind::NQuads(s.serialize_to_tokio_async_write(write)) + } + RdfSerializerKind::NTriples(s) => ToTokioAsyncWriteQuadWriterKind::NTriples( + s.serialize_to_tokio_async_write(write), + ), + RdfSerializerKind::RdfXml(s) => { + ToTokioAsyncWriteQuadWriterKind::RdfXml(s.serialize_to_tokio_async_write(write)) + } + RdfSerializerKind::TriG(s) => { + ToTokioAsyncWriteQuadWriterKind::TriG(s.serialize_to_tokio_async_write(write)) + } + RdfSerializerKind::Turtle(s) => { + ToTokioAsyncWriteQuadWriterKind::Turtle(s.serialize_to_tokio_async_write(write)) + } + }, + } + } +} + +impl From for RdfSerializer { + fn from(format: RdfFormat) -> Self { + Self::from_format(format) + } +} + +/// Writes quads or triples to a [`Write`] implementation. +/// +/// Can be built using [`RdfSerializer::serialize_to_write`]. +/// +///
+/// +/// Do not forget to run the [`finish`](ToWriteQuadWriter::finish()) method to properly write the last bytes of the file.
+/// +///
+/// +/// This writer does unbuffered writes. You might want to use [`BufWriter`](io::BufWriter) to avoid that.
+/// +/// ``` +/// use oxrdfio::{RdfFormat, RdfSerializer}; +/// use oxrdf::{Quad, NamedNode}; +/// +/// let mut writer = RdfSerializer::from_format(RdfFormat::NQuads).serialize_to_write(Vec::new()); +/// writer.write_quad(&Quad { +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), +/// graph_name: NamedNode::new("http://example.com/g")?.into(), +/// })?; +/// assert_eq!(writer.finish()?, b" .\n"); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct ToWriteQuadWriter { + formatter: ToWriteQuadWriterKind, +} + +enum ToWriteQuadWriterKind { + NQuads(ToWriteNQuadsWriter), + NTriples(ToWriteNTriplesWriter), + RdfXml(ToWriteRdfXmlWriter), + TriG(ToWriteTriGWriter), + Turtle(ToWriteTurtleWriter), +} + +impl ToWriteQuadWriter { + /// Writes a [`QuadRef`] + pub fn write_quad<'a>(&mut self, quad: impl Into>) -> io::Result<()> { + match &mut self.formatter { + ToWriteQuadWriterKind::NQuads(writer) => writer.write_quad(quad), + ToWriteQuadWriterKind::NTriples(writer) => writer.write_triple(to_triple(quad)?), + ToWriteQuadWriterKind::RdfXml(writer) => writer.write_triple(to_triple(quad)?), + ToWriteQuadWriterKind::TriG(writer) => writer.write_quad(quad), + ToWriteQuadWriterKind::Turtle(writer) => writer.write_triple(to_triple(quad)?), + } + } + + /// Writes a [`TripleRef`] + pub fn write_triple<'a>(&mut self, triple: impl Into>) -> io::Result<()> { + self.write_quad(triple.into().in_graph(GraphNameRef::DefaultGraph)) + } + + /// Writes the last bytes of the file + /// + /// Note that this function does not flush the writer. You need to do that if you are using a [`BufWriter`](io::BufWriter). + pub fn finish(self) -> io::Result { + Ok(match self.formatter { + ToWriteQuadWriterKind::NQuads(writer) => writer.finish(), + ToWriteQuadWriterKind::NTriples(writer) => writer.finish(), + ToWriteQuadWriterKind::RdfXml(writer) => writer.finish()?, + ToWriteQuadWriterKind::TriG(writer) => writer.finish()?, + ToWriteQuadWriterKind::Turtle(writer) => writer.finish()?, + }) + } +} + +/// Writes quads or triples to a [`Write`] implementation. +/// +/// Can be built using [`RdfSerializer::serialize_to_write`]. +/// +///
+/// +/// Do not forget to run the [`finish`](ToWriteQuadWriter::finish()) method to properly write the last bytes of the file.
+/// +///
+/// +/// This writer does unbuffered writes. You might want to use [`BufWriter`](io::BufWriter) to avoid that.
+/// +/// ``` +/// use oxrdfio::{RdfFormat, RdfSerializer}; +/// use oxrdf::{Quad, NamedNode}; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> std::io::Result<()> { +/// let mut writer = RdfSerializer::from_format(RdfFormat::NQuads).serialize_to_tokio_async_write(Vec::new()); +/// writer.write_quad(&Quad { +/// subject: NamedNode::new_unchecked("http://example.com/s").into(), +/// predicate: NamedNode::new_unchecked("http://example.com/p"), +/// object: NamedNode::new_unchecked("http://example.com/o").into(), +/// graph_name: NamedNode::new_unchecked("http://example.com/g").into() +/// }).await?; +/// assert_eq!(writer.finish().await?, " .\n"); +/// # Ok(()) +/// # } +/// ``` +#[must_use] +#[cfg(feature = "async-tokio")] +pub struct ToTokioAsyncWriteQuadWriter { + formatter: ToTokioAsyncWriteQuadWriterKind, +} + +#[cfg(feature = "async-tokio")] +enum ToTokioAsyncWriteQuadWriterKind { + NQuads(ToTokioAsyncWriteNQuadsWriter), + NTriples(ToTokioAsyncWriteNTriplesWriter), + RdfXml(ToTokioAsyncWriteRdfXmlWriter), + TriG(ToTokioAsyncWriteTriGWriter), + Turtle(ToTokioAsyncWriteTurtleWriter), +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteQuadWriter { + /// Writes a [`QuadRef`] + pub async fn write_quad<'a>(&mut self, quad: impl Into>) -> io::Result<()> { + match &mut self.formatter { + ToTokioAsyncWriteQuadWriterKind::NQuads(writer) => writer.write_quad(quad).await, + ToTokioAsyncWriteQuadWriterKind::NTriples(writer) => { + writer.write_triple(to_triple(quad)?).await + } + ToTokioAsyncWriteQuadWriterKind::RdfXml(writer) => { + writer.write_triple(to_triple(quad)?).await + } + ToTokioAsyncWriteQuadWriterKind::TriG(writer) => writer.write_quad(quad).await, + ToTokioAsyncWriteQuadWriterKind::Turtle(writer) => { + writer.write_triple(to_triple(quad)?).await + } + } + } + + /// Writes a [`TripleRef`] + pub async fn write_triple<'a>(&mut self, triple: impl Into>) -> io::Result<()> { + self.write_quad(triple.into().in_graph(GraphNameRef::DefaultGraph)) + .await + } + + /// Writes the last bytes of the file + /// + /// Note that this function does not flush the writer. You need to do that if you are using a [`BufWriter`](io::BufWriter). + pub async fn finish(self) -> io::Result { + Ok(match self.formatter { + ToTokioAsyncWriteQuadWriterKind::NQuads(writer) => writer.finish(), + ToTokioAsyncWriteQuadWriterKind::NTriples(writer) => writer.finish(), + ToTokioAsyncWriteQuadWriterKind::RdfXml(writer) => writer.finish().await?, + ToTokioAsyncWriteQuadWriterKind::TriG(writer) => writer.finish().await?, + ToTokioAsyncWriteQuadWriterKind::Turtle(writer) => writer.finish().await?, + }) + } +} + +fn to_triple<'a>(quad: impl Into>) -> io::Result> { + let quad = quad.into(); + if quad.graph_name.is_default_graph() { + Ok(quad.into()) + } else { + Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Only quads in the default graph can be serialized to a RDF graph format", + )) + } +} diff --git a/ng-oxigraph/src/oxrdfxml/README.md b/ng-oxigraph/src/oxrdfxml/README.md new file mode 100644 index 0000000..29ebb4c --- /dev/null +++ b/ng-oxigraph/src/oxrdfxml/README.md @@ -0,0 +1,56 @@ +OxRDF/XML +========= + +[![Latest Version](https://img.shields.io/crates/v/oxrdfxml.svg)](https://crates.io/crates/oxrdfxml) +[![Released API docs](https://docs.rs/oxrdfxml/badge.svg)](https://docs.rs/oxrdfxml) +[![Crates.io downloads](https://img.shields.io/crates/d/oxrdfxml)](https://crates.io/crates/oxrdfxml) +[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) +[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) + +OxRdfXml is a parser and serializer for [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/). + +The entry points of this library are the two [`RdfXmlParser`] and [`RdfXmlSerializer`] structs. + +Usage example counting the number of people in a RDF/XML file: + +```rust +use oxrdf::{NamedNodeRef, vocab::rdf}; +use oxrdfxml::RdfXmlParser; + +fn main() { + let file = br#" + + + + Foo + + +"#; + + let schema_person = NamedNodeRef::new("http://schema.org/Person").unwrap(); + let mut count = 0; + for triple in RdfXmlParser::new().parse_read(file.as_ref()) { + let triple = triple.unwrap(); + if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + count += 1; + } + } + assert_eq!(2, count); +} +``` + +## License + +This project is licensed under either of + +* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or + ``) +* MIT license ([LICENSE-MIT](../LICENSE-MIT) or + ``) + +at your option. + + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/ng-oxigraph/src/oxrdfxml/error.rs b/ng-oxigraph/src/oxrdfxml/error.rs new file mode 100644 index 0000000..9a59a76 --- /dev/null +++ b/ng-oxigraph/src/oxrdfxml/error.rs @@ -0,0 +1,89 @@ +use oxilangtag::LanguageTagParseError; +use oxiri::IriParseError; +use std::io; +use std::sync::Arc; + +/// Error returned during RDF/XML parsing. +#[derive(Debug, thiserror::Error)] +pub enum RdfXmlParseError { + /// I/O error during parsing (file not found...). + #[error(transparent)] + Io(#[from] io::Error), + /// An error in the file syntax. + #[error(transparent)] + Syntax(#[from] RdfXmlSyntaxError), +} + +impl From for io::Error { + #[inline] + fn from(error: RdfXmlParseError) -> Self { + match error { + RdfXmlParseError::Io(error) => error, + RdfXmlParseError::Syntax(error) => error.into(), + } + } +} + +impl From for RdfXmlParseError { + #[inline] + fn from(error: quick_xml::Error) -> Self { + match error { + quick_xml::Error::Io(error) => { + Self::Io(Arc::try_unwrap(error).unwrap_or_else(|e| io::Error::new(e.kind(), e))) + } + _ => Self::Syntax(RdfXmlSyntaxError(SyntaxErrorKind::Xml(error))), + } + } +} + +/// An error in the syntax of the parsed file. +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct RdfXmlSyntaxError(#[from] pub(crate) SyntaxErrorKind); + +#[derive(Debug, thiserror::Error)] +pub enum SyntaxErrorKind { + #[error(transparent)] + Xml(#[from] quick_xml::Error), + #[error("error while parsing IRI '{iri}': {error}")] + InvalidIri { + iri: String, + #[source] + error: IriParseError, + }, + #[error("error while parsing language tag '{tag}': {error}")] + InvalidLanguageTag { + tag: String, + #[source] + error: LanguageTagParseError, + }, + #[error("{0}")] + Msg(String), +} + +impl RdfXmlSyntaxError { + /// Builds an error from a printable error message. + #[inline] + pub(crate) fn msg(msg: impl Into) -> Self { + Self(SyntaxErrorKind::Msg(msg.into())) + } +} + +impl From for io::Error { + #[inline] + fn from(error: RdfXmlSyntaxError) -> Self { + match error.0 { + SyntaxErrorKind::Xml(error) => match error { + quick_xml::Error::Io(error) => { + Arc::try_unwrap(error).unwrap_or_else(|e| Self::new(e.kind(), e)) + } + quick_xml::Error::UnexpectedEof(error) => { + Self::new(io::ErrorKind::UnexpectedEof, error) + } + _ => Self::new(io::ErrorKind::InvalidData, error), + }, + SyntaxErrorKind::Msg(msg) => Self::new(io::ErrorKind::InvalidData, msg), + _ => Self::new(io::ErrorKind::InvalidData, error), + } + } +} diff --git a/ng-oxigraph/src/oxrdfxml/mod.rs b/ng-oxigraph/src/oxrdfxml/mod.rs new file mode 100644 index 0000000..bbd0f21 --- /dev/null +++ b/ng-oxigraph/src/oxrdfxml/mod.rs @@ -0,0 +1,8 @@ +mod error; +mod parser; +mod serializer; +mod utils; + +pub use error::{RdfXmlParseError, RdfXmlSyntaxError}; +pub use parser::{FromReadRdfXmlReader, RdfXmlParser}; +pub use serializer::{RdfXmlSerializer, ToWriteRdfXmlWriter}; diff --git a/ng-oxigraph/src/oxrdfxml/parser.rs b/ng-oxigraph/src/oxrdfxml/parser.rs new file mode 100644 index 0000000..6bdf76d --- /dev/null +++ b/ng-oxigraph/src/oxrdfxml/parser.rs @@ -0,0 +1,1237 @@ +use crate::oxrdf::vocab::rdf; +use crate::oxrdf::{BlankNode, Literal, NamedNode, Subject, Term, Triple}; +use crate::oxrdfxml::error::{RdfXmlParseError, RdfXmlSyntaxError, SyntaxErrorKind}; +use crate::oxrdfxml::utils::*; +use oxilangtag::LanguageTag; +use oxiri::{Iri, IriParseError}; +use quick_xml::escape::unescape_with; +use quick_xml::events::attributes::Attribute; +use quick_xml::events::*; +use quick_xml::name::{LocalName, QName, ResolveResult}; +use quick_xml::{Error, NsReader, Writer}; +use std::collections::{HashMap, HashSet}; +use std::io::{BufReader, Read}; +use std::str; +#[cfg(feature = "async-tokio")] +use tokio::io::{AsyncRead, BufReader as AsyncBufReader}; + +/// A [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) streaming parser. +/// +/// It reads the file in streaming. +/// It does not keep data in memory except a stack for handling nested XML tags, and a set of all +/// seen `rdf:ID`s to detect duplicate ids and fail according to the specification. +/// +/// Its performances are not optimized yet and hopefully could be significantly enhanced by reducing the +/// number of allocations and copies done by the parser. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxrdfxml::RdfXmlParser; +/// +/// let file = br#" +/// +/// +/// +/// Foo +/// +/// +/// "#; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// for triple in RdfXmlParser::new().parse_read(file.as_ref()) { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct RdfXmlParser { + unchecked: bool, + base: Option>, +} + +impl RdfXmlParser { + /// Builds a new [`RdfXmlParser`]. + #[inline] + pub fn new() -> Self { + Self::default() + } + + /// Assumes the file is valid to make parsing faster. + /// + /// It will skip some validations. + /// + /// Note that if the file is actually not valid, then broken RDF might be emitted by the parser. + #[inline] + pub fn unchecked(mut self) -> Self { + self.unchecked = true; + self + } + + #[inline] + pub fn with_base_iri(mut self, base_iri: impl Into) -> Result { + self.base = Some(Iri::parse(base_iri.into())?); + Ok(self) + } + + /// Parses a RDF/XML file from a [`Read`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; + /// use oxrdfxml::RdfXmlParser; + /// + /// let file = br#" + /// + /// + /// + /// Foo + /// + /// + /// "#; + /// + /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; + /// let mut count = 0; + /// for triple in RdfXmlParser::new().parse_read(file.as_ref()) { + /// let triple = triple?; + /// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn parse_read(self, read: R) -> FromReadRdfXmlReader { + FromReadRdfXmlReader { + results: Vec::new(), + reader: self.parse(BufReader::new(read)), + reader_buffer: Vec::default(), + } + } + + /// Parses a RDF/XML file from a [`AsyncRead`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; + /// use oxrdfxml::RdfXmlParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxrdfxml::RdfXmlParseError> { + /// let file = br#" + /// + /// + /// + /// Foo + /// + /// + /// "#; + /// + /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); + /// let mut count = 0; + /// let mut parser = RdfXmlParser::new().parse_tokio_async_read(file.as_ref()); + /// while let Some(triple) = parser.next().await { + /// let triple = triple?; + /// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn parse_tokio_async_read( + self, + read: R, + ) -> FromTokioAsyncReadRdfXmlReader { + FromTokioAsyncReadRdfXmlReader { + results: Vec::new(), + reader: self.parse(AsyncBufReader::new(read)), + reader_buffer: Vec::default(), + } + } + + fn parse(&self, reader: T) -> RdfXmlReader { + let mut reader = NsReader::from_reader(reader); + reader.expand_empty_elements(true); + RdfXmlReader { + reader, + state: vec![RdfXmlState::Doc { + base_iri: self.base.clone(), + }], + custom_entities: HashMap::default(), + in_literal_depth: 0, + known_rdf_id: HashSet::default(), + is_end: false, + unchecked: self.unchecked, + } + } +} + +/// Parses a RDF/XML file from a [`Read`] implementation. Can be built using [`RdfXmlParser::parse_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxrdfxml::RdfXmlParser; +/// +/// let file = br#" +/// +/// +/// +/// Foo +/// +/// +/// "#; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// for triple in RdfXmlParser::new().parse_read(file.as_ref()) { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct FromReadRdfXmlReader { + results: Vec, + reader: RdfXmlReader>, + reader_buffer: Vec, +} + +impl Iterator for FromReadRdfXmlReader { + type Item = Result; + + fn next(&mut self) -> Option { + loop { + if let Some(triple) = self.results.pop() { + return Some(Ok(triple)); + } else if self.reader.is_end { + return None; + } + if let Err(e) = self.parse_step() { + return Some(Err(e)); + } + } + } +} + +impl FromReadRdfXmlReader { + /// The current byte position in the input data. + pub fn buffer_position(&self) -> usize { + self.reader.reader.buffer_position() + } + + fn parse_step(&mut self) -> Result<(), RdfXmlParseError> { + self.reader_buffer.clear(); + let event = self + .reader + .reader + .read_event_into(&mut self.reader_buffer)?; + self.reader.parse_event(event, &mut self.results) + } +} + +/// Parses a RDF/XML file from a [`AsyncRead`] implementation. Can be built using [`RdfXmlParser::parse_tokio_async_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxrdfxml::RdfXmlParser; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), oxrdfxml::RdfXmlParseError> { +/// let file = br#" +/// +/// +/// +/// Foo +/// +/// +/// "#; +/// +/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); +/// let mut count = 0; +/// let mut parser = RdfXmlParser::new().parse_tokio_async_read(file.as_ref()); +/// while let Some(triple) = parser.next().await { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct FromTokioAsyncReadRdfXmlReader { + results: Vec, + reader: RdfXmlReader>, + reader_buffer: Vec, +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadRdfXmlReader { + /// Reads the next triple or returns `None` if the file is finished. + pub async fn next(&mut self) -> Option> { + loop { + if let Some(triple) = self.results.pop() { + return Some(Ok(triple)); + } else if self.reader.is_end { + return None; + } + if let Err(e) = self.parse_step().await { + return Some(Err(e)); + } + } + } + + /// The current byte position in the input data. + pub fn buffer_position(&self) -> usize { + self.reader.reader.buffer_position() + } + + async fn parse_step(&mut self) -> Result<(), RdfXmlParseError> { + self.reader_buffer.clear(); + let event = self + .reader + .reader + .read_event_into_async(&mut self.reader_buffer) + .await?; + self.reader.parse_event(event, &mut self.results) + } +} + +const RDF_ABOUT: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#about"; +const RDF_ABOUT_EACH: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#aboutEach"; +const RDF_ABOUT_EACH_PREFIX: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#aboutEachPrefix"; +const RDF_BAG_ID: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#bagID"; +const RDF_DATATYPE: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#datatype"; +const RDF_DESCRIPTION: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#Description"; +const RDF_ID: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#ID"; +const RDF_LI: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#li"; +const RDF_NODE_ID: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#nodeID"; +const RDF_PARSE_TYPE: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#parseType"; +const RDF_RDF: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#RDF"; +const RDF_RESOURCE: &str = "http://www.w3.org/1999/02/22-rdf-syntax-ns#resource"; + +const RESERVED_RDF_ELEMENTS: [&str; 11] = [ + RDF_ABOUT, + RDF_ABOUT_EACH, + RDF_ABOUT_EACH_PREFIX, + RDF_BAG_ID, + RDF_DATATYPE, + RDF_ID, + RDF_LI, + RDF_NODE_ID, + RDF_PARSE_TYPE, + RDF_RDF, + RDF_RESOURCE, +]; +const RESERVED_RDF_ATTRIBUTES: [&str; 5] = [ + RDF_ABOUT_EACH, + RDF_ABOUT_EACH_PREFIX, + RDF_LI, + RDF_RDF, + RDF_RESOURCE, +]; + +#[derive(Clone, Debug)] +enum NodeOrText { + Node(Subject), + Text(String), +} + +enum RdfXmlState { + Doc { + base_iri: Option>, + }, + Rdf { + base_iri: Option>, + language: Option, + }, + NodeElt { + base_iri: Option>, + language: Option, + subject: Subject, + li_counter: u64, + }, + PropertyElt { + // Resource, Literal or Empty property element + iri: NamedNode, + base_iri: Option>, + language: Option, + subject: Subject, + object: Option, + id_attr: Option, + datatype_attr: Option, + }, + ParseTypeCollectionPropertyElt { + iri: NamedNode, + base_iri: Option>, + language: Option, + subject: Subject, + objects: Vec, + id_attr: Option, + }, + ParseTypeLiteralPropertyElt { + iri: NamedNode, + base_iri: Option>, + language: Option, + subject: Subject, + writer: Writer>, + id_attr: Option, + emit: bool, // false for parseTypeOtherPropertyElt support + }, +} + +impl RdfXmlState { + fn base_iri(&self) -> Option<&Iri> { + match self { + Self::Doc { base_iri, .. } + | Self::Rdf { base_iri, .. } + | Self::NodeElt { base_iri, .. } + | Self::PropertyElt { base_iri, .. } + | Self::ParseTypeCollectionPropertyElt { base_iri, .. } + | Self::ParseTypeLiteralPropertyElt { base_iri, .. } => base_iri.as_ref(), + } + } + + fn language(&self) -> Option<&String> { + match self { + Self::Doc { .. } => None, + Self::Rdf { language, .. } + | Self::NodeElt { language, .. } + | Self::PropertyElt { language, .. } + | Self::ParseTypeCollectionPropertyElt { language, .. } + | Self::ParseTypeLiteralPropertyElt { language, .. } => language.as_ref(), + } + } +} + +struct RdfXmlReader { + reader: NsReader, + state: Vec, + custom_entities: HashMap, + in_literal_depth: usize, + known_rdf_id: HashSet, + is_end: bool, + unchecked: bool, +} + +impl RdfXmlReader { + fn parse_event( + &mut self, + event: Event<'_>, + results: &mut Vec, + ) -> Result<(), RdfXmlParseError> { + match event { + Event::Start(event) => self.parse_start_event(&event, results), + Event::End(event) => self.parse_end_event(&event, results), + Event::Empty(_) => Err(RdfXmlSyntaxError::msg( + "The expand_empty_elements option must be enabled", + ) + .into()), + Event::Text(event) => self.parse_text_event(&event), + Event::CData(event) => self.parse_text_event(&event.escape()?), + Event::Comment(_) | Event::PI(_) => Ok(()), + Event::Decl(decl) => { + if let Some(encoding) = decl.encoding() { + if !is_utf8(&encoding?) { + return Err(RdfXmlSyntaxError::msg( + "Only UTF-8 is supported by the RDF/XML parser", + ) + .into()); + } + } + Ok(()) + } + Event::DocType(dt) => self.parse_doctype(&dt), + Event::Eof => { + self.is_end = true; + Ok(()) + } + } + } + + fn parse_doctype(&mut self, dt: &BytesText<'_>) -> Result<(), RdfXmlParseError> { + // we extract entities + for input in self + .reader + .decoder() + .decode(dt.as_ref())? + .split('<') + .skip(1) + { + if let Some(input) = input.strip_prefix("!ENTITY") { + let input = input.trim_start().strip_prefix('%').unwrap_or(input); + let (entity_name, input) = input.trim_start().split_once(|c: char| c.is_ascii_whitespace()).ok_or_else(|| { + RdfXmlSyntaxError::msg( + "').ok_or_else(|| { + RdfXmlSyntaxError::msg("") + })?; + + // Resolves custom entities within the current entity definition. + let entity_value = + unescape_with(entity_value, |e| self.resolve_entity(e)).map_err(Error::from)?; + self.custom_entities + .insert(entity_name.to_owned(), entity_value.to_string()); + } + } + Ok(()) + } + + fn parse_start_event( + &mut self, + event: &BytesStart<'_>, + results: &mut Vec, + ) -> Result<(), RdfXmlParseError> { + #[derive(PartialEq, Eq)] + enum RdfXmlParseType { + Default, + Collection, + Literal, + Resource, + Other, + } + + #[derive(PartialEq, Eq)] + enum RdfXmlNextProduction { + Rdf, + NodeElt, + PropertyElt { subject: Subject }, + } + + // Literal case + if let Some(RdfXmlState::ParseTypeLiteralPropertyElt { writer, .. }) = self.state.last_mut() + { + let mut clean_event = BytesStart::new( + self.reader + .decoder() + .decode(event.name().as_ref())? + .to_string(), + ); + for attr in event.attributes() { + clean_event.push_attribute(attr.map_err(Error::InvalidAttr)?); + } + writer.write_event(Event::Start(clean_event))?; + self.in_literal_depth += 1; + return Ok(()); + } + + let tag_name = self.resolve_tag_name(event.name())?; + + // We read attributes + let (mut language, mut base_iri) = if let Some(current_state) = self.state.last() { + ( + current_state.language().cloned(), + current_state.base_iri().cloned(), + ) + } else { + (None, None) + }; + + let mut id_attr = None; + let mut node_id_attr = None; + let mut about_attr = None; + let mut property_attrs = Vec::default(); + let mut resource_attr = None; + let mut datatype_attr = None; + let mut parse_type = RdfXmlParseType::Default; + let mut type_attr = None; + + for attribute in event.attributes() { + let attribute = attribute.map_err(Error::InvalidAttr)?; + if attribute.key.as_ref().starts_with(b"xml") { + if attribute.key.as_ref() == b"xml:lang" { + let tag = self.convert_attribute(&attribute)?.to_ascii_lowercase(); + language = Some(if self.unchecked { + tag + } else { + LanguageTag::parse(tag.to_ascii_lowercase()) + .map_err(|error| { + RdfXmlSyntaxError(SyntaxErrorKind::InvalidLanguageTag { + tag, + error, + }) + })? + .into_inner() + }); + } else if attribute.key.as_ref() == b"xml:base" { + let iri = self.convert_attribute(&attribute)?; + base_iri = Some(if self.unchecked { + Iri::parse_unchecked(iri.clone()) + } else { + Iri::parse(iri.clone()).map_err(|error| { + RdfXmlSyntaxError(SyntaxErrorKind::InvalidIri { iri, error }) + })? + }) + } else { + // We ignore other xml attributes + } + } else { + let attribute_url = self.resolve_attribute_name(attribute.key)?; + if *attribute_url == *RDF_ID { + let mut id = self.convert_attribute(&attribute)?; + if !is_nc_name(&id) { + return Err(RdfXmlSyntaxError::msg(format!( + "{id} is not a valid rdf:ID value" + )) + .into()); + } + id.insert(0, '#'); + id_attr = Some(id); + } else if *attribute_url == *RDF_BAG_ID { + let bag_id = self.convert_attribute(&attribute)?; + if !is_nc_name(&bag_id) { + return Err(RdfXmlSyntaxError::msg(format!( + "{bag_id} is not a valid rdf:bagID value" + )) + .into()); + } + } else if *attribute_url == *RDF_NODE_ID { + let id = self.convert_attribute(&attribute)?; + if !is_nc_name(&id) { + return Err(RdfXmlSyntaxError::msg(format!( + "{id} is not a valid rdf:nodeID value" + )) + .into()); + } + node_id_attr = Some(BlankNode::new_unchecked(id)); + } else if *attribute_url == *RDF_ABOUT { + about_attr = Some(attribute); + } else if *attribute_url == *RDF_RESOURCE { + resource_attr = Some(attribute); + } else if *attribute_url == *RDF_DATATYPE { + datatype_attr = Some(attribute); + } else if *attribute_url == *RDF_PARSE_TYPE { + parse_type = match attribute.value.as_ref() { + b"Collection" => RdfXmlParseType::Collection, + b"Literal" => RdfXmlParseType::Literal, + b"Resource" => RdfXmlParseType::Resource, + _ => RdfXmlParseType::Other, + }; + } else if attribute_url == rdf::TYPE.as_str() { + type_attr = Some(attribute); + } else if RESERVED_RDF_ATTRIBUTES.contains(&&*attribute_url) { + return Err(RdfXmlSyntaxError::msg(format!( + "{attribute_url} is not a valid attribute" + )) + .into()); + } else { + property_attrs.push(( + self.parse_iri(attribute_url)?, + self.convert_attribute(&attribute)?, + )); + } + } + } + + // Parsing with the base URI + let id_attr = match id_attr { + Some(iri) => { + let iri = self.resolve_iri(&base_iri, iri)?; + if self.known_rdf_id.contains(iri.as_str()) { + return Err(RdfXmlSyntaxError::msg(format!( + "{iri} has already been used as rdf:ID value" + )) + .into()); + } + self.known_rdf_id.insert(iri.as_str().into()); + Some(iri) + } + None => None, + }; + let about_attr = match about_attr { + Some(attr) => Some(self.convert_iri_attribute(&base_iri, &attr)?), + None => None, + }; + let resource_attr = match resource_attr { + Some(attr) => Some(self.convert_iri_attribute(&base_iri, &attr)?), + None => None, + }; + let datatype_attr = match datatype_attr { + Some(attr) => Some(self.convert_iri_attribute(&base_iri, &attr)?), + None => None, + }; + let type_attr = match type_attr { + Some(attr) => Some(self.convert_iri_attribute(&base_iri, &attr)?), + None => None, + }; + + let expected_production = match self.state.last() { + Some(RdfXmlState::Doc { .. }) => RdfXmlNextProduction::Rdf, + Some( + RdfXmlState::Rdf { .. } + | RdfXmlState::PropertyElt { .. } + | RdfXmlState::ParseTypeCollectionPropertyElt { .. }, + ) => RdfXmlNextProduction::NodeElt, + Some(RdfXmlState::NodeElt { subject, .. }) => RdfXmlNextProduction::PropertyElt { + subject: subject.clone(), + }, + Some(RdfXmlState::ParseTypeLiteralPropertyElt { .. }) => { + return Err( + RdfXmlSyntaxError::msg("ParseTypeLiteralPropertyElt production children should never be considered as a RDF/XML content").into() + ); + } + None => { + return Err(RdfXmlSyntaxError::msg( + "No state in the stack: the XML is not balanced", + ) + .into()); + } + }; + + let new_state = match expected_production { + RdfXmlNextProduction::Rdf => { + if *tag_name == *RDF_RDF { + RdfXmlState::Rdf { base_iri, language } + } else if RESERVED_RDF_ELEMENTS.contains(&&*tag_name) { + return Err(RdfXmlSyntaxError::msg(format!( + "Invalid node element tag name: {tag_name}" + )) + .into()); + } else { + Self::build_node_elt( + self.parse_iri(tag_name)?, + base_iri, + language, + id_attr, + node_id_attr, + about_attr, + type_attr, + property_attrs, + results, + )? + } + } + RdfXmlNextProduction::NodeElt => { + if RESERVED_RDF_ELEMENTS.contains(&&*tag_name) { + return Err(RdfXmlSyntaxError::msg(format!( + "Invalid property element tag name: {tag_name}" + )) + .into()); + } + Self::build_node_elt( + self.parse_iri(tag_name)?, + base_iri, + language, + id_attr, + node_id_attr, + about_attr, + type_attr, + property_attrs, + results, + )? + } + RdfXmlNextProduction::PropertyElt { subject } => { + let iri = if *tag_name == *RDF_LI { + let Some(RdfXmlState::NodeElt { li_counter, .. }) = self.state.last_mut() + else { + return Err(RdfXmlSyntaxError::msg(format!( + "Invalid property element tag name: {tag_name}" + )) + .into()); + }; + *li_counter += 1; + NamedNode::new_unchecked(format!( + "http://www.w3.org/1999/02/22-rdf-syntax-ns#_{li_counter}" + )) + } else if RESERVED_RDF_ELEMENTS.contains(&&*tag_name) + || *tag_name == *RDF_DESCRIPTION + { + return Err(RdfXmlSyntaxError::msg(format!( + "Invalid property element tag name: {tag_name}" + )) + .into()); + } else { + self.parse_iri(tag_name)? + }; + match parse_type { + RdfXmlParseType::Default => { + if resource_attr.is_some() + || node_id_attr.is_some() + || !property_attrs.is_empty() + { + let object = match (resource_attr, node_id_attr) + { + (Some(resource_attr), None) => Subject::from(resource_attr), + (None, Some(node_id_attr)) => node_id_attr.into(), + (None, None) => BlankNode::default().into(), + (Some(_), Some(_)) => return Err(RdfXmlSyntaxError::msg("Not both rdf:resource and rdf:nodeID could be set at the same time").into()) + }; + Self::emit_property_attrs(&object, property_attrs, &language, results); + if let Some(type_attr) = type_attr { + results.push(Triple::new(object.clone(), rdf::TYPE, type_attr)); + } + RdfXmlState::PropertyElt { + iri, + base_iri, + language, + subject, + object: Some(NodeOrText::Node(object)), + id_attr, + datatype_attr, + } + } else { + RdfXmlState::PropertyElt { + iri, + base_iri, + language, + subject, + object: None, + id_attr, + datatype_attr, + } + } + } + RdfXmlParseType::Literal => RdfXmlState::ParseTypeLiteralPropertyElt { + iri, + base_iri, + language, + subject, + writer: Writer::new(Vec::default()), + id_attr, + emit: true, + }, + RdfXmlParseType::Resource => Self::build_parse_type_resource_property_elt( + iri, base_iri, language, subject, id_attr, results, + ), + RdfXmlParseType::Collection => RdfXmlState::ParseTypeCollectionPropertyElt { + iri, + base_iri, + language, + subject, + objects: Vec::default(), + id_attr, + }, + RdfXmlParseType::Other => RdfXmlState::ParseTypeLiteralPropertyElt { + iri, + base_iri, + language, + subject, + writer: Writer::new(Vec::default()), + id_attr, + emit: false, + }, + } + } + }; + self.state.push(new_state); + Ok(()) + } + + fn parse_end_event( + &mut self, + event: &BytesEnd<'_>, + results: &mut Vec, + ) -> Result<(), RdfXmlParseError> { + // Literal case + if self.in_literal_depth > 0 { + if let Some(RdfXmlState::ParseTypeLiteralPropertyElt { writer, .. }) = + self.state.last_mut() + { + writer.write_event(Event::End(BytesEnd::new( + self.reader.decoder().decode(event.name().as_ref())?, + )))?; + self.in_literal_depth -= 1; + return Ok(()); + } + } + + if let Some(current_state) = self.state.pop() { + self.end_state(current_state, results)?; + } + Ok(()) + } + + fn parse_text_event(&mut self, event: &BytesText<'_>) -> Result<(), RdfXmlParseError> { + let text = event.unescape_with(|e| self.resolve_entity(e))?.to_string(); + match self.state.last_mut() { + Some(RdfXmlState::PropertyElt { object, .. }) => { + if !event.iter().copied().all(is_whitespace) { + *object = Some(NodeOrText::Text(text)); + } + Ok(()) + } + Some(RdfXmlState::ParseTypeLiteralPropertyElt { writer, .. }) => { + writer.write_event(Event::Text(BytesText::new(&text)))?; + Ok(()) + } + _ => { + if event.iter().copied().all(is_whitespace) { + Ok(()) + } else { + Err(RdfXmlSyntaxError::msg(format!("Unexpected text event: '{text}'")).into()) + } + } + } + } + + fn resolve_tag_name(&self, qname: QName<'_>) -> Result { + let (namespace, local_name) = self.reader.resolve_element(qname); + self.resolve_ns_name(namespace, local_name) + } + + fn resolve_attribute_name(&self, qname: QName<'_>) -> Result { + let (namespace, local_name) = self.reader.resolve_attribute(qname); + self.resolve_ns_name(namespace, local_name) + } + + fn resolve_ns_name( + &self, + namespace: ResolveResult<'_>, + local_name: LocalName<'_>, + ) -> Result { + match namespace { + ResolveResult::Bound(ns) => { + let mut value = Vec::with_capacity(ns.as_ref().len() + local_name.as_ref().len()); + value.extend_from_slice(ns.as_ref()); + value.extend_from_slice(local_name.as_ref()); + Ok(unescape_with(&self.reader.decoder().decode(&value)?, |e| { + self.resolve_entity(e) + }) + .map_err(Error::from)? + .to_string()) + } + ResolveResult::Unbound => { + Err(RdfXmlSyntaxError::msg("XML namespaces are required in RDF/XML").into()) + } + ResolveResult::Unknown(v) => Err(RdfXmlSyntaxError::msg(format!( + "Unknown prefix {}:", + self.reader.decoder().decode(&v)? + )) + .into()), + } + } + + #[allow(clippy::too_many_arguments)] + fn build_node_elt( + iri: NamedNode, + base_iri: Option>, + language: Option, + id_attr: Option, + node_id_attr: Option, + about_attr: Option, + type_attr: Option, + property_attrs: Vec<(NamedNode, String)>, + results: &mut Vec, + ) -> Result { + let subject = match (id_attr, node_id_attr, about_attr) { + (Some(id_attr), None, None) => Subject::from(id_attr), + (None, Some(node_id_attr), None) => node_id_attr.into(), + (None, None, Some(about_attr)) => about_attr.into(), + (None, None, None) => BlankNode::default().into(), + (Some(_), Some(_), _) => { + return Err(RdfXmlSyntaxError::msg( + "Not both rdf:ID and rdf:nodeID could be set at the same time", + )) + } + (_, Some(_), Some(_)) => { + return Err(RdfXmlSyntaxError::msg( + "Not both rdf:nodeID and rdf:resource could be set at the same time", + )) + } + (Some(_), _, Some(_)) => { + return Err(RdfXmlSyntaxError::msg( + "Not both rdf:ID and rdf:resource could be set at the same time", + )) + } + }; + + Self::emit_property_attrs(&subject, property_attrs, &language, results); + + if let Some(type_attr) = type_attr { + results.push(Triple::new(subject.clone(), rdf::TYPE, type_attr)); + } + + if iri != *RDF_DESCRIPTION { + results.push(Triple::new(subject.clone(), rdf::TYPE, iri)); + } + Ok(RdfXmlState::NodeElt { + base_iri, + language, + subject, + li_counter: 0, + }) + } + + fn build_parse_type_resource_property_elt( + iri: NamedNode, + base_iri: Option>, + language: Option, + subject: Subject, + id_attr: Option, + results: &mut Vec, + ) -> RdfXmlState { + let object = BlankNode::default(); + let triple = Triple::new(subject, iri, object.clone()); + if let Some(id_attr) = id_attr { + Self::reify(triple.clone(), id_attr, results); + } + results.push(triple); + RdfXmlState::NodeElt { + base_iri, + language, + subject: object.into(), + li_counter: 0, + } + } + + fn end_state( + &mut self, + state: RdfXmlState, + results: &mut Vec, + ) -> Result<(), RdfXmlSyntaxError> { + match state { + RdfXmlState::PropertyElt { + iri, + language, + subject, + id_attr, + datatype_attr, + object, + .. + } => { + let object = match object { + Some(NodeOrText::Node(node)) => Term::from(node), + Some(NodeOrText::Text(text)) => { + Self::new_literal(text, language, datatype_attr).into() + } + None => Self::new_literal(String::new(), language, datatype_attr).into(), + }; + let triple = Triple::new(subject, iri, object); + if let Some(id_attr) = id_attr { + Self::reify(triple.clone(), id_attr, results); + } + results.push(triple); + } + RdfXmlState::ParseTypeCollectionPropertyElt { + iri, + subject, + id_attr, + objects, + .. + } => { + let mut current_node = Subject::from(rdf::NIL); + for object in objects.into_iter().rev() { + let subject = Subject::from(BlankNode::default()); + results.push(Triple::new(subject.clone(), rdf::FIRST, object)); + results.push(Triple::new(subject.clone(), rdf::REST, current_node)); + current_node = subject; + } + let triple = Triple::new(subject, iri, current_node); + if let Some(id_attr) = id_attr { + Self::reify(triple.clone(), id_attr, results); + } + results.push(triple); + } + RdfXmlState::ParseTypeLiteralPropertyElt { + iri, + subject, + id_attr, + writer, + emit, + .. + } => { + if emit { + let object = writer.into_inner(); + if object.is_empty() { + return Err(RdfXmlSyntaxError::msg(format!( + "No value found for rdf:XMLLiteral value of property {iri}" + ))); + } + let triple = Triple::new( + subject, + iri, + Literal::new_typed_literal( + str::from_utf8(&object).map_err(|_| { + RdfXmlSyntaxError::msg( + "The XML literal is not in valid UTF-8".to_owned(), + ) + })?, + rdf::XML_LITERAL, + ), + ); + if let Some(id_attr) = id_attr { + Self::reify(triple.clone(), id_attr, results); + } + results.push(triple); + } + } + RdfXmlState::NodeElt { subject, .. } => match self.state.last_mut() { + Some(RdfXmlState::PropertyElt { object, .. }) => { + *object = Some(NodeOrText::Node(subject)) + } + Some(RdfXmlState::ParseTypeCollectionPropertyElt { objects, .. }) => { + objects.push(subject) + } + _ => (), + }, + _ => (), + } + Ok(()) + } + + fn new_literal( + value: String, + language: Option, + datatype: Option, + ) -> Literal { + if let Some(datatype) = datatype { + Literal::new_typed_literal(value, datatype) + } else if let Some(language) = language { + Literal::new_language_tagged_literal_unchecked(value, language) + } else { + Literal::new_simple_literal(value) + } + } + + fn reify(triple: Triple, statement_id: NamedNode, results: &mut Vec) { + results.push(Triple::new(statement_id.clone(), rdf::TYPE, rdf::STATEMENT)); + results.push(Triple::new( + statement_id.clone(), + rdf::SUBJECT, + triple.subject, + )); + results.push(Triple::new( + statement_id.clone(), + rdf::PREDICATE, + triple.predicate, + )); + results.push(Triple::new(statement_id, rdf::OBJECT, triple.object)); + } + + fn emit_property_attrs( + subject: &Subject, + literal_attributes: Vec<(NamedNode, String)>, + language: &Option, + results: &mut Vec, + ) { + for (literal_predicate, literal_value) in literal_attributes { + results.push(Triple::new( + subject.clone(), + literal_predicate, + if let Some(language) = language.clone() { + Literal::new_language_tagged_literal_unchecked(literal_value, language) + } else { + Literal::new_simple_literal(literal_value) + }, + )); + } + } + + fn convert_attribute(&self, attribute: &Attribute<'_>) -> Result { + Ok(attribute + .decode_and_unescape_value_with(&self.reader, |e| self.resolve_entity(e))? + .into_owned()) + } + + fn convert_iri_attribute( + &self, + base_iri: &Option>, + attribute: &Attribute<'_>, + ) -> Result { + Ok(self.resolve_iri(base_iri, self.convert_attribute(attribute)?)?) + } + + fn resolve_iri( + &self, + base_iri: &Option>, + relative_iri: String, + ) -> Result { + if let Some(base_iri) = base_iri { + Ok(NamedNode::new_unchecked( + if self.unchecked { + base_iri.resolve_unchecked(&relative_iri) + } else { + base_iri.resolve(&relative_iri).map_err(|error| { + RdfXmlSyntaxError(SyntaxErrorKind::InvalidIri { + iri: relative_iri, + error, + }) + })? + } + .into_inner(), + )) + } else { + self.parse_iri(relative_iri) + } + } + + fn parse_iri(&self, relative_iri: String) -> Result { + Ok(NamedNode::new_unchecked(if self.unchecked { + relative_iri + } else { + Iri::parse(relative_iri.clone()) + .map_err(|error| { + RdfXmlSyntaxError(SyntaxErrorKind::InvalidIri { + iri: relative_iri, + error, + }) + })? + .into_inner() + })) + } + + fn resolve_entity(&self, e: &str) -> Option<&str> { + self.custom_entities.get(e).map(String::as_str) + } +} + +fn is_nc_name(name: &str) -> bool { + // Name - (Char* ':' Char*) + is_name(name) && name.chars().all(|c| c != ':') +} + +fn is_name(name: &str) -> bool { + // NameStartChar (NameChar)* + let mut c = name.chars(); + if !c.next().map_or(false, is_name_start_char) { + return false; + } + c.all(is_name_char) +} + +fn is_whitespace(c: u8) -> bool { + matches!(c, b' ' | b'\t' | b'\n' | b'\r') +} + +fn is_utf8(encoding: &[u8]) -> bool { + matches!( + encoding.to_ascii_lowercase().as_slice(), + b"unicode-1-1-utf-8" + | b"unicode11utf8" + | b"unicode20utf8" + | b"utf-8" + | b"utf8" + | b"x-unicode20utf8" + ) +} diff --git a/ng-oxigraph/src/oxrdfxml/serializer.rs b/ng-oxigraph/src/oxrdfxml/serializer.rs new file mode 100644 index 0000000..f23e4f3 --- /dev/null +++ b/ng-oxigraph/src/oxrdfxml/serializer.rs @@ -0,0 +1,461 @@ +use crate::oxrdf::vocab::rdf; +use crate::oxrdf::{NamedNodeRef, Subject, SubjectRef, TermRef, TripleRef}; +use crate::oxrdfxml::utils::*; +use oxiri::{Iri, IriParseError}; +use quick_xml::events::{BytesDecl, BytesEnd, BytesStart, BytesText, Event}; +use quick_xml::Writer; +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::io; +use std::io::Write; +use std::sync::Arc; +#[cfg(feature = "async-tokio")] +use tokio::io::AsyncWrite; + +/// A [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) serializer. +/// +/// ``` +/// use oxrdf::{LiteralRef, NamedNodeRef, TripleRef}; +/// use oxrdfxml::RdfXmlSerializer; +/// +/// let mut writer = RdfXmlSerializer::new().with_prefix("schema", "http://schema.org/")?.serialize_to_write(Vec::new()); +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// ))?; +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://schema.org/name")?, +/// LiteralRef::new_language_tagged_literal_unchecked("Foo Bar", "en"), +/// ))?; +/// assert_eq!( +/// b"\n\n\t\n\t\tFoo Bar\n\t\n", +/// writer.finish()?.as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct RdfXmlSerializer { + prefixes: BTreeMap, +} + +impl RdfXmlSerializer { + /// Builds a new [`RdfXmlSerializer`]. + #[inline] + pub fn new() -> Self { + Self { + prefixes: BTreeMap::new(), + } + } + + #[inline] + pub fn with_prefix( + mut self, + prefix_name: impl Into, + prefix_iri: impl Into, + ) -> Result { + self.prefixes.insert( + Iri::parse(prefix_iri.into())?.into_inner(), + prefix_name.into(), + ); + Ok(self) + } + + /// Writes a RDF/XML file to a [`Write`] implementation. + /// + /// This writer does unbuffered writes. + /// + /// ``` + /// use oxrdf::{LiteralRef, NamedNodeRef, TripleRef}; + /// use oxrdfxml::RdfXmlSerializer; + /// + /// let mut writer = RdfXmlSerializer::new().with_prefix("schema", "http://schema.org/")?.serialize_to_write(Vec::new()); + /// writer.write_triple(TripleRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, + /// NamedNodeRef::new("http://schema.org/Person")?, + /// ))?; + /// writer.write_triple(TripleRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://schema.org/name")?, + /// LiteralRef::new_language_tagged_literal_unchecked("Foo Bar", "en"), + /// ))?; + /// assert_eq!( + /// b"\n\n\t\n\t\tFoo Bar\n\t\n", + /// writer.finish()?.as_slice() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[allow(clippy::unused_self)] + pub fn serialize_to_write(self, write: W) -> ToWriteRdfXmlWriter { + ToWriteRdfXmlWriter { + writer: Writer::new_with_indent(write, b'\t', 1), + inner: self.inner_writer(), + } + } + + /// Writes a RDF/XML file to a [`AsyncWrite`] implementation. + /// + /// This writer does unbuffered writes. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, TripleRef, LiteralRef}; + /// use oxrdfxml::RdfXmlSerializer; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), Box> { + /// let mut writer = RdfXmlSerializer::new().with_prefix("schema", "http://schema.org/")?.serialize_to_tokio_async_write(Vec::new()); + /// writer.write_triple(TripleRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, + /// NamedNodeRef::new("http://schema.org/Person")?, + /// )).await?; + /// writer.write_triple(TripleRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://schema.org/name")?, + /// LiteralRef::new_language_tagged_literal_unchecked("Foo Bar", "en"), + /// )).await?; + /// assert_eq!( + /// b"\n\n\t\n\t\tFoo Bar\n\t\n", + /// writer.finish().await?.as_slice() + /// ); + /// # Ok(()) + /// # } + /// ``` + #[allow(clippy::unused_self)] + #[cfg(feature = "async-tokio")] + pub fn serialize_to_tokio_async_write( + self, + write: W, + ) -> ToTokioAsyncWriteRdfXmlWriter { + ToTokioAsyncWriteRdfXmlWriter { + writer: Writer::new_with_indent(write, b'\t', 1), + inner: self.inner_writer(), + } + } + + fn inner_writer(mut self) -> InnerRdfXmlWriter { + self.prefixes.insert( + "http://www.w3.org/1999/02/22-rdf-syntax-ns#".into(), + "rdf".into(), + ); + InnerRdfXmlWriter { + current_subject: None, + current_resource_tag: None, + prefixes: self.prefixes, + } + } +} + +/// Writes a RDF/XML file to a [`Write`] implementation. Can be built using [`RdfXmlSerializer::serialize_to_write`]. +/// +/// ``` +/// use oxrdf::{LiteralRef, NamedNodeRef, TripleRef}; +/// use oxrdfxml::RdfXmlSerializer; +/// +/// let mut writer = RdfXmlSerializer::new().with_prefix("schema", "http://schema.org/")?.serialize_to_write(Vec::new()); +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// ))?; +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://schema.org/name")?, +/// LiteralRef::new_language_tagged_literal_unchecked("Foo Bar", "en"), +/// ))?; +/// assert_eq!( +/// b"\n\n\t\n\t\tFoo Bar\n\t\n", +/// writer.finish()?.as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct ToWriteRdfXmlWriter { + writer: Writer, + inner: InnerRdfXmlWriter, +} + +impl ToWriteRdfXmlWriter { + /// Writes an extra triple. + #[allow(clippy::match_wildcard_for_single_variants, unreachable_patterns)] + pub fn write_triple<'a>(&mut self, t: impl Into>) -> io::Result<()> { + let mut buffer = Vec::new(); + self.inner.write_triple(t, &mut buffer)?; + self.flush_buffer(&mut buffer) + } + + /// Ends the write process and returns the underlying [`Write`]. + pub fn finish(mut self) -> io::Result { + let mut buffer = Vec::new(); + self.inner.finish(&mut buffer); + self.flush_buffer(&mut buffer)?; + Ok(self.writer.into_inner()) + } + + fn flush_buffer(&mut self, buffer: &mut Vec>) -> io::Result<()> { + for event in buffer.drain(0..) { + self.writer.write_event(event).map_err(map_err)?; + } + Ok(()) + } +} + +/// Writes a RDF/XML file to a [`AsyncWrite`] implementation. Can be built using [`RdfXmlSerializer::serialize_to_tokio_async_write`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, TripleRef, LiteralRef}; +/// use oxrdfxml::RdfXmlSerializer; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), Box> { +/// let mut writer = RdfXmlSerializer::new().with_prefix("schema", "http://schema.org/")?.serialize_to_tokio_async_write(Vec::new()); +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// )).await?; +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://schema.org/name")?, +/// LiteralRef::new_language_tagged_literal_unchecked("Foo Bar", "en"), +/// )).await?; +/// assert_eq!( +/// b"\n\n\t\n\t\tFoo Bar\n\t\n", +/// writer.finish().await?.as_slice() +/// ); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct ToTokioAsyncWriteRdfXmlWriter { + writer: Writer, + inner: InnerRdfXmlWriter, +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteRdfXmlWriter { + /// Writes an extra triple. + #[allow(clippy::match_wildcard_for_single_variants, unreachable_patterns)] + pub async fn write_triple<'a>(&mut self, t: impl Into>) -> io::Result<()> { + let mut buffer = Vec::new(); + self.inner.write_triple(t, &mut buffer)?; + self.flush_buffer(&mut buffer).await + } + + /// Ends the write process and returns the underlying [`Write`]. + pub async fn finish(mut self) -> io::Result { + let mut buffer = Vec::new(); + self.inner.finish(&mut buffer); + self.flush_buffer(&mut buffer).await?; + Ok(self.writer.into_inner()) + } + + async fn flush_buffer(&mut self, buffer: &mut Vec>) -> io::Result<()> { + for event in buffer.drain(0..) { + self.writer + .write_event_async(event) + .await + .map_err(map_err)?; + } + Ok(()) + } +} + +pub struct InnerRdfXmlWriter { + current_subject: Option, + current_resource_tag: Option, + prefixes: BTreeMap, +} + +impl InnerRdfXmlWriter { + #[allow(clippy::match_wildcard_for_single_variants, unreachable_patterns)] + fn write_triple<'a>( + &mut self, + t: impl Into>, + output: &mut Vec>, + ) -> io::Result<()> { + if self.current_subject.is_none() { + self.write_start(output); + } + + let triple = t.into(); + // We open a new rdf:Description if useful + if self.current_subject.as_ref().map(Subject::as_ref) != Some(triple.subject) { + if self.current_subject.is_some() { + output.push(Event::End( + self.current_resource_tag + .take() + .map_or_else(|| BytesEnd::new("rdf:Description"), BytesEnd::new), + )); + } + self.current_subject = Some(triple.subject.into_owned()); + + let (mut description_open, with_type_tag) = if triple.predicate == rdf::TYPE { + if let TermRef::NamedNode(t) = triple.object { + let (prop_qname, prop_xmlns) = self.uri_to_qname_and_xmlns(t); + let mut description_open = BytesStart::new(prop_qname.clone()); + if let Some(prop_xmlns) = prop_xmlns { + description_open.push_attribute(prop_xmlns); + } + self.current_resource_tag = Some(prop_qname.into_owned()); + (description_open, true) + } else { + (BytesStart::new("rdf:Description"), false) + } + } else { + (BytesStart::new("rdf:Description"), false) + }; + match triple.subject { + SubjectRef::NamedNode(node) => { + description_open.push_attribute(("rdf:about", node.as_str())) + } + SubjectRef::BlankNode(node) => { + description_open.push_attribute(("rdf:nodeID", node.as_str())) + } + _ => { + return Err(io::Error::new( + io::ErrorKind::InvalidInput, + "RDF/XML only supports named or blank subject", + )) + } + } + output.push(Event::Start(description_open)); + if with_type_tag { + return Ok(()); // No need for a value + } + } + + let (prop_qname, prop_xmlns) = self.uri_to_qname_and_xmlns(triple.predicate); + let mut property_open = BytesStart::new(prop_qname.clone()); + if let Some(prop_xmlns) = prop_xmlns { + property_open.push_attribute(prop_xmlns); + } + let content = match triple.object { + TermRef::NamedNode(node) => { + property_open.push_attribute(("rdf:resource", node.as_str())); + None + } + TermRef::BlankNode(node) => { + property_open.push_attribute(("rdf:nodeID", node.as_str())); + None + } + TermRef::Literal(literal) => { + if let Some(language) = literal.language() { + property_open.push_attribute(("xml:lang", language)); + } else if !literal.is_plain() { + property_open.push_attribute(("rdf:datatype", literal.datatype().as_str())); + } + Some(literal.value()) + } + _ => { + return Err(io::Error::new( + io::ErrorKind::InvalidInput, + "RDF/XML only supports named, blank or literal object", + )) + } + }; + if let Some(content) = content { + output.push(Event::Start(property_open)); + output.push(Event::Text(BytesText::new(content))); + output.push(Event::End(BytesEnd::new(prop_qname))); + } else { + output.push(Event::Empty(property_open)); + } + Ok(()) + } + + fn write_start(&self, output: &mut Vec>) { + output.push(Event::Decl(BytesDecl::new("1.0", Some("UTF-8"), None))); + let mut rdf_open = BytesStart::new("rdf:RDF"); + for (prefix_value, prefix_name) in &self.prefixes { + rdf_open.push_attribute(( + format!("xmlns:{prefix_name}").as_str(), + prefix_value.as_str(), + )); + } + output.push(Event::Start(rdf_open)) + } + + fn finish(&mut self, output: &mut Vec>) { + if self.current_subject.is_some() { + output.push(Event::End( + self.current_resource_tag + .take() + .map_or_else(|| BytesEnd::new("rdf:Description"), BytesEnd::new), + )); + } else { + self.write_start(output); + } + output.push(Event::End(BytesEnd::new("rdf:RDF"))); + } + + fn uri_to_qname_and_xmlns<'a>( + &self, + uri: NamedNodeRef<'a>, + ) -> (Cow<'a, str>, Option<(&'a str, &'a str)>) { + let (prop_prefix, prop_value) = split_iri(uri.as_str()); + if let Some(prop_prefix) = self.prefixes.get(prop_prefix) { + ( + if prop_prefix.is_empty() { + Cow::Borrowed(prop_value) + } else { + Cow::Owned(format!("{prop_prefix}:{prop_value}")) + }, + None, + ) + } else if prop_prefix == "http://www.w3.org/2000/xmlns/" { + (Cow::Owned(format!("xmlns:{prop_value}")), None) + } else if prop_value.is_empty() { + (Cow::Borrowed("p:"), Some(("xmlns:p", prop_prefix))) + } else { + (Cow::Borrowed(prop_value), Some(("xmlns", prop_prefix))) + } + } +} + +fn map_err(error: quick_xml::Error) -> io::Error { + if let quick_xml::Error::Io(error) = error { + Arc::try_unwrap(error).unwrap_or_else(|error| io::Error::new(error.kind(), error)) + } else { + io::Error::new(io::ErrorKind::Other, error) + } +} + +fn split_iri(iri: &str) -> (&str, &str) { + if let Some(position_base) = iri.rfind(|c| !is_name_char(c) || c == ':') { + if let Some(position_add) = iri[position_base..].find(|c| is_name_start_char(c) && c != ':') + { + ( + &iri[..position_base + position_add], + &iri[position_base + position_add..], + ) + } else { + (iri, "") + } + } else { + (iri, "") + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_split_iri() { + assert_eq!( + split_iri("http://schema.org/Person"), + ("http://schema.org/", "Person") + ); + assert_eq!(split_iri("http://schema.org/"), ("http://schema.org/", "")); + assert_eq!( + split_iri("http://schema.org#foo"), + ("http://schema.org#", "foo") + ); + assert_eq!(split_iri("urn:isbn:foo"), ("urn:isbn:", "foo")); + } +} diff --git a/ng-oxigraph/src/oxrdfxml/utils.rs b/ng-oxigraph/src/oxrdfxml/utils.rs new file mode 100644 index 0000000..0483488 --- /dev/null +++ b/ng-oxigraph/src/oxrdfxml/utils.rs @@ -0,0 +1,26 @@ +pub fn is_name_start_char(c: char) -> bool { + // ":" | [A-Z] | "_" | [a-z] | [#xC0-#xD6] | [#xD8-#xF6] | [#xF8-#x2FF] | [#x370-#x37D] | [#x37F-#x1FFF] | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] | [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] | [#x10000-#xEFFFF] + matches!(c, + ':' + | 'A'..='Z' + | '_' + | 'a'..='z' + | '\u{00C0}'..='\u{00D6}' + | '\u{00D8}'..='\u{00F6}' + | '\u{00F8}'..='\u{02FF}' + | '\u{0370}'..='\u{037D}' + | '\u{037F}'..='\u{1FFF}' + | '\u{200C}'..='\u{200D}' + | '\u{2070}'..='\u{218F}' + | '\u{2C00}'..='\u{2FEF}' + | '\u{3001}'..='\u{D7FF}' + | '\u{F900}'..='\u{FDCF}' + | '\u{FDF0}'..='\u{FFFD}' + | '\u{10000}'..='\u{EFFFF}') +} + +pub fn is_name_char(c: char) -> bool { + // NameStartChar | "-" | "." | [0-9] | #xB7 | [#x0300-#x036F] | [#x203F-#x2040] + is_name_start_char(c) + || matches!(c, '-' | '.' | '0'..='9' | '\u{B7}' | '\u{0300}'..='\u{036F}' | '\u{203F}'..='\u{2040}') +} diff --git a/ng-oxigraph/src/oxsdatatypes/README.md b/ng-oxigraph/src/oxsdatatypes/README.md new file mode 100644 index 0000000..1c3b2c3 --- /dev/null +++ b/ng-oxigraph/src/oxsdatatypes/README.md @@ -0,0 +1,65 @@ +oxsdatatypes +============ + +[![Latest Version](https://img.shields.io/crates/v/oxsdatatypes.svg)](https://crates.io/crates/oxsdatatypes) +[![Released API docs](https://docs.rs/oxsdatatypes/badge.svg)](https://docs.rs/oxsdatatypes) +[![Crates.io downloads](https://img.shields.io/crates/d/oxsdatatypes)](https://crates.io/crates/oxsdatatypes) +[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) +[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) + +oxsdatatypes is an implementation of some [XML Schema Definition Language Datatypes](https://www.w3.org/TR/xmlschema11-2/). +Its main aim is to ease the implementation of SPARQL and XPath. + +Usage example: + +```rust +use std::str::FromStr; +use oxsdatatypes::Decimal; + +assert!(Decimal::from_str("22.2").unwrap() > Decimal::from_str("21").unwrap()); +``` + +Each datatype is represented by a Rust struct. + +Each datatype provides: +* `FromStr` implementation to parse a datatype string serialization following its [lexical mapping](https://www.w3.org/TR/xmlschema11-2/#dt-lexical-mapping). +* `Display` implementation to serialize a datatype following its [canonical mapping](https://www.w3.org/TR/xmlschema11-2/#dt-canonical-mapping). +* `is_identical_with` method following its [identity relation](https://www.w3.org/TR/xmlschema11-2/#identity). +* `PartialEq`, and `Eq` if possible, implementations following its [equality relation](https://www.w3.org/TR/xmlschema11-2/#equality). +* `PartialOrd`, and `Ord` if possible, implementations following its [order relation](https://www.w3.org/TR/xmlschema11-2/#order). +* `From` and `TryFrom` implementations to implement [XPath casting](https://www.w3.org/TR/xpath-functions-31/#casting). +* Various methods implementing [XPath functions](https://www.w3.org/TR/xpath-functions-31/). +* `from_be_bytes` and `to_be_bytes` methods for serialization. + + +### `DateTime::now` behavior + +The `DateTime::now()` function needs special OS support. +Currently: +- If the `custom-now` feature is enabled, a function computing `now` must be set: + ```rust + use oxsdatatypes::Duration; + + #[no_mangle] + fn custom_ox_now() -> Duration { + unimplemented!("now implementation") + } + ``` +- For `wasm32-unknown-unknown` if the `js` feature is enabled the `Date.now()` ECMAScript API is used. +- For all other targets `SystemTime::now()` is used. + +## License + +This project is licensed under either of + +* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or + ``) +* MIT license ([LICENSE-MIT](../LICENSE-MIT) or + ``) + +at your option. + + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/ng-oxigraph/src/oxsdatatypes/boolean.rs b/ng-oxigraph/src/oxsdatatypes/boolean.rs new file mode 100644 index 0000000..94510d4 --- /dev/null +++ b/ng-oxigraph/src/oxsdatatypes/boolean.rs @@ -0,0 +1,134 @@ +use crate::oxsdatatypes::{Decimal, Double, Float, Integer}; +use serde::{Deserialize, Serialize}; +use std::fmt; +use std::str::{FromStr, ParseBoolError}; + +/// [XML Schema `boolean` datatype](https://www.w3.org/TR/xmlschema11-2/#boolean) +/// +/// Uses internally a [`bool`]. +#[derive( + Debug, Clone, Copy, Default, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, +)] +#[repr(transparent)] +pub struct Boolean { + value: bool, +} + +impl Boolean { + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self == other + } +} + +impl From for Boolean { + #[inline] + fn from(value: bool) -> Self { + Self { value } + } +} + +impl From for Boolean { + #[inline] + fn from(value: Integer) -> Self { + (value != Integer::from(0)).into() + } +} + +impl From for Boolean { + #[inline] + fn from(value: Decimal) -> Self { + (value != Decimal::from(0)).into() + } +} + +impl From for Boolean { + #[inline] + fn from(value: Float) -> Self { + (value != Float::from(0.) && !value.is_nan()).into() + } +} + +impl From for Boolean { + #[inline] + fn from(value: Double) -> Self { + (value != Double::from(0.) && !value.is_nan()).into() + } +} + +impl From for bool { + #[inline] + fn from(value: Boolean) -> Self { + value.value + } +} + +impl FromStr for Boolean { + type Err = ParseBoolError; + + #[inline] + fn from_str(input: &str) -> Result { + Ok(match input { + "true" | "1" => true, + "false" | "0" => false, + _ => bool::from_str(input)?, + } + .into()) + } +} + +impl fmt::Display for Boolean { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.value.fmt(f) + } +} + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + + #[test] + fn from_str() -> Result<(), ParseBoolError> { + assert_eq!(Boolean::from_str("true")?.to_string(), "true"); + assert_eq!(Boolean::from_str("1")?.to_string(), "true"); + assert_eq!(Boolean::from_str("false")?.to_string(), "false"); + assert_eq!(Boolean::from_str("0")?.to_string(), "false"); + Ok(()) + } + + #[test] + fn from_integer() { + assert_eq!(Boolean::from(false), Integer::from(0).into()); + assert_eq!(Boolean::from(true), Integer::from(1).into()); + assert_eq!(Boolean::from(true), Integer::from(2).into()); + } + + #[test] + fn from_decimal() { + assert_eq!(Boolean::from(false), Decimal::from(0).into()); + assert_eq!(Boolean::from(true), Decimal::from(1).into()); + assert_eq!(Boolean::from(true), Decimal::from(2).into()); + } + + #[test] + fn from_float() { + assert_eq!(Boolean::from(false), Float::from(0.).into()); + assert_eq!(Boolean::from(true), Float::from(1.).into()); + assert_eq!(Boolean::from(true), Float::from(2.).into()); + assert_eq!(Boolean::from(false), Float::from(f32::NAN).into()); + assert_eq!(Boolean::from(true), Float::from(f32::INFINITY).into()); + } + + #[test] + fn from_double() { + assert_eq!(Boolean::from(false), Double::from(0.).into()); + assert_eq!(Boolean::from(true), Double::from(1.).into()); + assert_eq!(Boolean::from(true), Double::from(2.).into()); + assert_eq!(Boolean::from(false), Double::from(f64::NAN).into()); + assert_eq!(Boolean::from(true), Double::from(f64::INFINITY).into()); + } +} diff --git a/ng-oxigraph/src/oxsdatatypes/date_time.rs b/ng-oxigraph/src/oxsdatatypes/date_time.rs new file mode 100644 index 0000000..95aad7a --- /dev/null +++ b/ng-oxigraph/src/oxsdatatypes/date_time.rs @@ -0,0 +1,3187 @@ +#![allow(clippy::expect_used)] + +use crate::oxsdatatypes::{DayTimeDuration, Decimal, Duration, YearMonthDuration}; +use serde::{Deserialize, Serialize}; +use std::cmp::{min, Ordering}; +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::str::FromStr; + +/// [XML Schema `dateTime` datatype](https://www.w3.org/TR/xmlschema11-2/#dateTime) +/// +/// It encodes the value using a number of seconds from the Gregorian calendar era using a [`Decimal`] +/// and an optional timezone offset in minutes. +#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Copy, Hash, Serialize, Deserialize)] +pub struct DateTime { + timestamp: Timestamp, +} + +impl DateTime { + pub const MAX: Self = Self { + timestamp: Timestamp::MAX, + }; + pub const MIN: Self = Self { + timestamp: Timestamp::MIN, + }; + + #[inline] + pub(super) fn new( + year: i64, + month: u8, + day: u8, + hour: u8, + minute: u8, + second: Decimal, + timezone_offset: Option, + ) -> Result { + Ok(Self { + timestamp: Timestamp::new(&DateTimeSevenPropertyModel { + year: Some(year), + month: Some(month), + day: Some(day), + hour: Some(hour), + minute: Some(minute), + second: Some(second), + timezone_offset, + })?, + }) + } + + /// [fn:current-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-current-dateTime) + #[inline] + pub fn now() -> Self { + Self { + timestamp: Timestamp::now(), + } + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 18]) -> Self { + Self { + timestamp: Timestamp::from_be_bytes(bytes), + } + } + + /// [fn:year-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-year-from-dateTime) + #[inline] + #[must_use] + pub fn year(self) -> i64 { + self.timestamp.year() + } + + /// [fn:month-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-month-from-dateTime) + #[inline] + #[must_use] + pub fn month(self) -> u8 { + self.timestamp.month() + } + + /// [fn:day-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-day-from-dateTime) + #[inline] + #[must_use] + pub fn day(self) -> u8 { + self.timestamp.day() + } + + /// [fn:hour-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-hours-from-dateTime) + #[inline] + #[must_use] + pub fn hour(self) -> u8 { + self.timestamp.hour() + } + + /// [fn:minute-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-minutes-from-dateTime) + #[inline] + #[must_use] + pub fn minute(self) -> u8 { + self.timestamp.minute() + } + + /// [fn:second-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-seconds-from-dateTime) + #[inline] + #[must_use] + pub fn second(self) -> Decimal { + self.timestamp.second() + } + + /// [fn:timezone-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-timezone-from-dateTime) + #[inline] + #[must_use] + pub fn timezone(self) -> Option { + Some(self.timezone_offset()?.into()) + } + + #[inline] + #[must_use] + pub fn timezone_offset(self) -> Option { + self.timestamp.timezone_offset() + } + + #[inline] + fn properties(self) -> DateTimeSevenPropertyModel { + DateTimeSevenPropertyModel { + year: Some(self.year()), + month: Some(self.month()), + day: Some(self.day()), + hour: Some(self.hour()), + minute: Some(self.minute()), + second: Some(self.second()), + timezone_offset: self.timezone_offset(), + } + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 18] { + self.timestamp.to_be_bytes() + } + + /// [op:subtract-dateTimes](https://www.w3.org/TR/xpath-functions-31/#func-subtract-dateTimes) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_sub(self, rhs: impl Into) -> Option { + self.timestamp.checked_sub(rhs.into().timestamp) + } + + /// [op:add-yearMonthDuration-to-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-add-yearMonthDuration-to-dateTime) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_add_year_month_duration( + self, + rhs: impl Into, + ) -> Option { + self.checked_add_duration(Duration::from(rhs.into())) + } + + /// [op:add-dayTimeDuration-to-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-add-dayTimeDuration-to-dateTime) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_add_day_time_duration(self, rhs: impl Into) -> Option { + let rhs = rhs.into(); + Some(Self { + timestamp: self.timestamp.checked_add_seconds(rhs.all_seconds())?, + }) + } + + /// [op:add-yearMonthDuration-to-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-add-yearMonthDuration-to-dateTime) and [op:add-dayTimeDuration-to-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-add-dayTimeDuration-to-dateTime) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_add_duration(self, rhs: impl Into) -> Option { + let rhs = rhs.into(); + if let Ok(rhs) = DayTimeDuration::try_from(rhs) { + self.checked_add_day_time_duration(rhs) + } else { + Some(Self { + timestamp: Timestamp::new(&date_time_plus_duration(rhs, &self.properties())?) + .ok()?, + }) + } + } + + /// [op:subtract-yearMonthDuration-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-subtract-yearMonthDuration-from-dateTime) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_sub_year_month_duration( + self, + rhs: impl Into, + ) -> Option { + self.checked_sub_duration(Duration::from(rhs.into())) + } + + /// [op:subtract-dayTimeDuration-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-subtract-dayTimeDuration-from-dateTime) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_sub_day_time_duration(self, rhs: impl Into) -> Option { + let rhs = rhs.into(); + Some(Self { + timestamp: self.timestamp.checked_sub_seconds(rhs.as_seconds())?, + }) + } + + /// [op:subtract-yearMonthDuration-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-subtract-yearMonthDuration-from-dateTime) and [op:subtract-dayTimeDuration-from-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-subtract-dayTimeDuration-from-dateTime) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_sub_duration(self, rhs: impl Into) -> Option { + let rhs = rhs.into(); + if let Ok(rhs) = DayTimeDuration::try_from(rhs) { + self.checked_sub_day_time_duration(rhs) + } else { + Some(Self { + timestamp: Timestamp::new(&date_time_plus_duration( + rhs.checked_neg()?, + &self.properties(), + )?) + .ok()?, + }) + } + } + + /// [fn:adjust-dateTime-to-timezone](https://www.w3.org/TR/xpath-functions-31/#func-adjust-dateTime-to-timezone) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn adjust(self, timezone_offset: Option) -> Option { + Some(Self { + timestamp: self.timestamp.adjust(timezone_offset)?, + }) + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.timestamp.is_identical_with(other.timestamp) + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl TryFrom for DateTime { + type Error = DateTimeOverflowError; + + #[inline] + fn try_from(date: Date) -> Result { + Self::new( + date.year(), + date.month(), + date.day(), + 0, + 0, + Decimal::default(), + date.timezone_offset(), + ) + } +} + +impl FromStr for DateTime { + type Err = ParseDateTimeError; + + fn from_str(input: &str) -> Result { + ensure_complete(input, date_time_lexical_rep) + } +} + +impl fmt::Display for DateTime { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let year = self.year(); + if year < 0 { + f.write_str("-")?; + } + let second = self.second(); + write!( + f, + "{:04}-{:02}-{:02}T{:02}:{:02}:{}{}", + year.abs(), + self.month(), + self.day(), + self.hour(), + self.minute(), + if Decimal::from(-10) < second && second < Decimal::from(10) { + "0" + } else { + "" + }, + second + )?; + if let Some(timezone_offset) = self.timezone_offset() { + write!(f, "{timezone_offset}")?; + } + Ok(()) + } +} + +/// [XML Schema `time` datatype](https://www.w3.org/TR/xmlschema11-2/#time) +/// +/// It encodes the value using a number of seconds from the Gregorian calendar era using a [`Decimal`], +/// when combined with the date 1972-12-31, and an optional timezone offset in minutes. +#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Copy, Hash)] +pub struct Time { + timestamp: Timestamp, +} + +impl Time { + #[cfg(test)] + const MAX: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(62_230_255_200), + timezone_offset: Some(TimezoneOffset::MIN), + }, + }; + #[cfg(test)] + const MIN: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(62_230_154_400), + timezone_offset: Some(TimezoneOffset::MAX), + }, + }; + + #[inline] + fn new( + mut hour: u8, + minute: u8, + second: Decimal, + timezone_offset: Option, + ) -> Result { + if hour == 24 && minute == 0 && second == Decimal::default() { + hour = 0; + } + Ok(Self { + timestamp: Timestamp::new(&DateTimeSevenPropertyModel { + year: None, + month: None, + day: None, + hour: Some(hour), + minute: Some(minute), + second: Some(second), + timezone_offset, + })?, + }) + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 18]) -> Self { + Self { + timestamp: Timestamp::from_be_bytes(bytes), + } + } + + /// [fn:current-time](https://www.w3.org/TR/xpath-functions-31/#func-current-time) + #[inline] + pub fn now() -> Self { + Self { + timestamp: Timestamp::now(), + } + } + + /// [fn:hour-from-time](https://www.w3.org/TR/xpath-functions-31/#func-hours-from-time) + #[inline] + #[must_use] + pub fn hour(self) -> u8 { + self.timestamp.hour() + } + + /// [fn:minute-from-time](https://www.w3.org/TR/xpath-functions-31/#func-minutes-from-time) + #[inline] + #[must_use] + pub fn minute(self) -> u8 { + self.timestamp.minute() + } + + /// [fn:second-from-time](https://www.w3.org/TR/xpath-functions-31/#func-seconds-from-time) + #[inline] + #[must_use] + pub fn second(self) -> Decimal { + self.timestamp.second() + } + + /// [fn:timezone-from-time](https://www.w3.org/TR/xpath-functions-31/#func-timezone-from-time) + #[inline] + #[must_use] + pub fn timezone(self) -> Option { + Some(self.timezone_offset()?.into()) + } + + #[inline] + #[must_use] + pub fn timezone_offset(self) -> Option { + self.timestamp.timezone_offset() + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 18] { + self.timestamp.to_be_bytes() + } + + /// [op:subtract-times](https://www.w3.org/TR/xpath-functions-31/#func-subtract-times) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_sub(self, rhs: impl Into) -> Option { + self.timestamp.checked_sub(rhs.into().timestamp) + } + + /// [op:add-dayTimeDuration-to-time](https://www.w3.org/TR/xpath-functions-31/#func-add-dayTimeDuration-to-time) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_add_day_time_duration(self, rhs: impl Into) -> Option { + self.checked_add_duration(Duration::from(rhs.into())) + } + + /// [op:add-dayTimeDuration-to-time](https://www.w3.org/TR/xpath-functions-31/#func-add-dayTimeDuration-to-time) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_add_duration(self, rhs: impl Into) -> Option { + Some( + DateTime::new( + 1972, + 12, + 31, + self.hour(), + self.minute(), + self.second(), + self.timezone_offset(), + ) + .ok()? + .checked_add_duration(rhs)? + .into(), + ) + } + + /// [op:subtract-dayTimeDuration-from-time](https://www.w3.org/TR/xpath-functions-31/#func-subtract-dayTimeDuration-from-time) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_sub_day_time_duration(self, rhs: impl Into) -> Option { + self.checked_sub_duration(Duration::from(rhs.into())) + } + + /// [op:subtract-dayTimeDuration-from-time](https://www.w3.org/TR/xpath-functions-31/#func-subtract-dayTimeDuration-from-time) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_sub_duration(self, rhs: impl Into) -> Option { + Some( + DateTime::new( + 1972, + 12, + 31, + self.hour(), + self.minute(), + self.second(), + self.timezone_offset(), + ) + .ok()? + .checked_sub_duration(rhs)? + .into(), + ) + } + + // [fn:adjust-time-to-timezone](https://www.w3.org/TR/xpath-functions-31/#func-adjust-time-to-timezone) + #[inline] + #[must_use] + pub fn adjust(self, timezone_offset: Option) -> Option { + Some( + DateTime::new( + 1972, + 12, + 31, + self.hour(), + self.minute(), + self.second(), + self.timezone_offset(), + ) + .ok()? + .adjust(timezone_offset)? + .into(), + ) + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.timestamp.is_identical_with(other.timestamp) + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl From for Time { + #[inline] + fn from(date_time: DateTime) -> Self { + Self::new( + date_time.hour(), + date_time.minute(), + date_time.second(), + date_time.timezone_offset(), + ) + .expect("Casting from xsd:dateTime to xsd:date can't fail") + } +} + +impl FromStr for Time { + type Err = ParseDateTimeError; + + fn from_str(input: &str) -> Result { + ensure_complete(input, time_lexical_rep) + } +} + +impl fmt::Display for Time { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let second = self.second(); + write!( + f, + "{:02}:{:02}:{}{}", + self.hour(), + self.minute(), + if Decimal::from(-10) < second && second < Decimal::from(10) { + "0" + } else { + "" + }, + second + )?; + if let Some(timezone_offset) = self.timezone_offset() { + write!(f, "{timezone_offset}")?; + } + Ok(()) + } +} + +/// [XML Schema `date` datatype](https://www.w3.org/TR/xmlschema11-2/#date) +/// +/// It encodes the value using a number of seconds from the Gregorian calendar era using a [`Decimal`], +/// when combined with the time 00:00:00, and an optional timezone offset in minutes. +#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Copy, Hash)] +pub struct Date { + timestamp: Timestamp, +} + +impl Date { + pub const MAX: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800), + timezone_offset: Some(TimezoneOffset::MAX), + }, + }; + pub const MIN: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(-170_141_183_460_469_216_800), + timezone_offset: Some(TimezoneOffset::MIN), + }, + }; + + #[inline] + fn new( + year: i64, + month: u8, + day: u8, + timezone_offset: Option, + ) -> Result { + Ok(Self { + timestamp: Timestamp::new(&DateTimeSevenPropertyModel { + year: Some(year), + month: Some(month), + day: Some(day), + hour: None, + minute: None, + second: None, + timezone_offset, + })?, + }) + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 18]) -> Self { + Self { + timestamp: Timestamp::from_be_bytes(bytes), + } + } + + /// [fn:current-date](https://www.w3.org/TR/xpath-functions-31/#func-current-date) + #[inline] + pub fn now() -> Self { + DateTime::now() + .try_into() + .expect("The current time seems way in the future, it's strange") + } + + /// [fn:year-from-date](https://www.w3.org/TR/xpath-functions-31/#func-year-from-date) + #[inline] + #[must_use] + pub fn year(self) -> i64 { + self.timestamp.year() + } + + /// [fn:month-from-date](https://www.w3.org/TR/xpath-functions-31/#func-month-from-date) + #[inline] + #[must_use] + pub fn month(self) -> u8 { + self.timestamp.month() + } + + /// [fn:day-from-date](https://www.w3.org/TR/xpath-functions-31/#func-day-from-date) + #[inline] + #[must_use] + pub fn day(self) -> u8 { + self.timestamp.day() + } + + /// [fn:timezone-from-date](https://www.w3.org/TR/xpath-functions-31/#func-timezone-from-date) + #[inline] + #[must_use] + pub fn timezone(self) -> Option { + Some(self.timezone_offset()?.into()) + } + + #[inline] + #[must_use] + pub fn timezone_offset(self) -> Option { + self.timestamp.timezone_offset() + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 18] { + self.timestamp.to_be_bytes() + } + + /// [op:subtract-dates](https://www.w3.org/TR/xpath-functions-31/#func-subtract-dates) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_sub(self, rhs: impl Into) -> Option { + self.timestamp.checked_sub(rhs.into().timestamp) + } + + /// [op:add-yearMonthDuration-to-date](https://www.w3.org/TR/xpath-functions-31/#func-add-yearMonthDuration-to-date) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_add_year_month_duration( + self, + rhs: impl Into, + ) -> Option { + self.checked_add_duration(Duration::from(rhs.into())) + } + + /// [op:add-dayTimeDuration-to-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-add-dayTimeDuration-to-date) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_add_day_time_duration(self, rhs: impl Into) -> Option { + self.checked_add_duration(Duration::from(rhs.into())) + } + + /// [op:add-yearMonthDuration-to-date](https://www.w3.org/TR/xpath-functions-31/#func-add-yearMonthDuration-to-date) and [op:add-dayTimeDuration-to-dateTime](https://www.w3.org/TR/xpath-functions-31/#func-add-dayTimeDuration-to-date) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_add_duration(self, rhs: impl Into) -> Option { + DateTime::try_from(self) + .ok()? + .checked_add_duration(rhs)? + .try_into() + .ok() + } + + /// [op:subtract-yearMonthDuration-from-date](https://www.w3.org/TR/xpath-functions-31/#func-subtract-yearMonthDuration-from-date) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_sub_year_month_duration( + self, + rhs: impl Into, + ) -> Option { + self.checked_sub_duration(Duration::from(rhs.into())) + } + + /// [op:subtract-dayTimeDuration-from-date](https://www.w3.org/TR/xpath-functions-31/#func-subtract-dayTimeDuration-from-date) + /// + /// Returns `None` in case of overflow ([`FODT0001`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001)). + #[inline] + #[must_use] + pub fn checked_sub_day_time_duration(self, rhs: impl Into) -> Option { + self.checked_sub_duration(Duration::from(rhs.into())) + } + + /// [op:subtract-yearMonthDuration-from-date](https://www.w3.org/TR/xpath-functions-31/#func-subtract-yearMonthDuration-from-date) and [op:subtract-dayTimeDuration-from-date](https://www.w3.org/TR/xpath-functions-31/#func-subtract-dayTimeDuration-from-date) + #[inline] + #[must_use] + pub fn checked_sub_duration(self, rhs: impl Into) -> Option { + DateTime::try_from(self) + .ok()? + .checked_sub_duration(rhs)? + .try_into() + .ok() + } + + // [fn:adjust-date-to-timezone](https://www.w3.org/TR/xpath-functions-31/#func-adjust-date-to-timezone) + #[inline] + #[must_use] + pub fn adjust(self, timezone_offset: Option) -> Option { + DateTime::new( + self.year(), + self.month(), + self.day(), + 0, + 0, + Decimal::default(), + self.timezone_offset(), + ) + .ok()? + .adjust(timezone_offset)? + .try_into() + .ok() + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.timestamp.is_identical_with(other.timestamp) + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl TryFrom for Date { + type Error = DateTimeOverflowError; + + #[inline] + fn try_from(date_time: DateTime) -> Result { + Self::new( + date_time.year(), + date_time.month(), + date_time.day(), + date_time.timezone_offset(), + ) + } +} + +impl FromStr for Date { + type Err = ParseDateTimeError; + + fn from_str(input: &str) -> Result { + ensure_complete(input, date_lexical_rep) + } +} + +impl fmt::Display for Date { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let year = self.year(); + if year < 0 { + f.write_str("-")?; + } + write!(f, "{:04}-{:02}-{:02}", year.abs(), self.month(), self.day())?; + if let Some(timezone_offset) = self.timezone_offset() { + write!(f, "{timezone_offset}")?; + } + Ok(()) + } +} + +/// [XML Schema `gYearMonth` datatype](https://www.w3.org/TR/xmlschema11-2/#gYearMonth) +/// +/// It encodes the value using a number of seconds from the Gregorian calendar era using a [`Decimal`], +/// when combined with the day-time 31T00:00:00, and an optional timezone offset in minutes. +#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Copy, Hash)] +pub struct GYearMonth { + timestamp: Timestamp, +} + +impl GYearMonth { + pub const MAX: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800), + timezone_offset: Some(TimezoneOffset::MAX), + }, + }; + pub const MIN: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(-170_141_183_460_466_970_400), + timezone_offset: Some(TimezoneOffset::MIN), + }, + }; + + #[inline] + fn new( + year: i64, + month: u8, + timezone_offset: Option, + ) -> Result { + Ok(Self { + timestamp: Timestamp::new(&DateTimeSevenPropertyModel { + year: Some(year), + month: Some(month), + day: None, + hour: None, + minute: None, + second: None, + timezone_offset, + })?, + }) + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 18]) -> Self { + Self { + timestamp: Timestamp::from_be_bytes(bytes), + } + } + + #[inline] + #[must_use] + pub fn year(self) -> i64 { + self.timestamp.year() + } + + #[inline] + #[must_use] + pub fn month(self) -> u8 { + self.timestamp.month() + } + + #[inline] + #[must_use] + pub fn timezone(self) -> Option { + Some(self.timezone_offset()?.into()) + } + + #[inline] + #[must_use] + pub fn timezone_offset(self) -> Option { + self.timestamp.timezone_offset() + } + + #[inline] + #[must_use] + pub fn adjust(self, timezone_offset: Option) -> Option { + Some(Self { + timestamp: self.timestamp.adjust(timezone_offset)?, + }) + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 18] { + self.timestamp.to_be_bytes() + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.timestamp.is_identical_with(other.timestamp) + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl TryFrom for GYearMonth { + type Error = DateTimeOverflowError; + + #[inline] + fn try_from(date_time: DateTime) -> Result { + Self::new( + date_time.year(), + date_time.month(), + date_time.timezone_offset(), + ) + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl From for GYearMonth { + #[inline] + fn from(date: Date) -> Self { + Self::new(date.year(), date.month(), date.timezone_offset()) + .expect("Casting from xsd:date to xsd:gYearMonth can't fail") + } +} + +impl FromStr for GYearMonth { + type Err = ParseDateTimeError; + + fn from_str(input: &str) -> Result { + ensure_complete(input, g_year_month_lexical_rep) + } +} + +impl fmt::Display for GYearMonth { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let year = self.year(); + if year < 0 { + f.write_str("-")?; + } + write!(f, "{:04}-{:02}", year.abs(), self.month())?; + if let Some(timezone_offset) = self.timezone_offset() { + write!(f, "{timezone_offset}")?; + } + Ok(()) + } +} + +/// [XML Schema `gYear` datatype](https://www.w3.org/TR/xmlschema11-2/#gYear) +/// +/// It encodes the value using a number of seconds from the Gregorian calendar era using a [`Decimal`], +/// when combined with the month-day-time 12-31T00:00:00, and an optional timezone offset in minutes. +#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Copy, Hash)] +pub struct GYear { + timestamp: Timestamp, +} + +impl GYear { + pub const MAX: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(170_141_183_460_461_440_800), + timezone_offset: Some(TimezoneOffset::MAX), + }, + }; + pub const MIN: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(-170_141_183_460_461_700_000), + timezone_offset: Some(TimezoneOffset::MIN), + }, + }; + + #[inline] + fn new( + year: i64, + timezone_offset: Option, + ) -> Result { + Ok(Self { + timestamp: Timestamp::new(&DateTimeSevenPropertyModel { + year: Some(year), + month: None, + day: None, + hour: None, + minute: None, + second: None, + timezone_offset, + })?, + }) + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 18]) -> Self { + Self { + timestamp: Timestamp::from_be_bytes(bytes), + } + } + + #[inline] + #[must_use] + pub fn year(self) -> i64 { + self.timestamp.year() + } + + #[inline] + #[must_use] + pub fn timezone(self) -> Option { + Some(self.timezone_offset()?.into()) + } + + #[inline] + #[must_use] + pub fn timezone_offset(self) -> Option { + self.timestamp.timezone_offset() + } + + #[inline] + #[must_use] + pub fn adjust(self, timezone_offset: Option) -> Option { + Some(Self { + timestamp: self.timestamp.adjust(timezone_offset)?, + }) + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 18] { + self.timestamp.to_be_bytes() + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.timestamp.is_identical_with(other.timestamp) + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl TryFrom for GYear { + type Error = DateTimeOverflowError; + + #[inline] + fn try_from(date_time: DateTime) -> Result { + Self::new(date_time.year(), date_time.timezone_offset()) + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl TryFrom for GYear { + type Error = DateTimeOverflowError; + + #[inline] + fn try_from(date: Date) -> Result { + Self::new(date.year(), date.timezone_offset()) + } +} + +impl TryFrom for GYear { + type Error = DateTimeOverflowError; + + #[inline] + fn try_from(year_month: GYearMonth) -> Result { + Self::new(year_month.year(), year_month.timezone_offset()) + } +} + +impl FromStr for GYear { + type Err = ParseDateTimeError; + + fn from_str(input: &str) -> Result { + ensure_complete(input, g_year_lexical_rep) + } +} + +impl fmt::Display for GYear { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let year = self.year(); + if year < 0 { + f.write_str("-")?; + } + write!(f, "{:04}", year.abs())?; + if let Some(timezone_offset) = self.timezone_offset() { + write!(f, "{timezone_offset}")?; + } + Ok(()) + } +} + +/// [XML Schema `gMonthDay` datatype](https://www.w3.org/TR/xmlschema11-2/#gMonthDay) +/// +/// It encodes the value using a number of seconds from the Gregorian calendar era using a [`Decimal`], +/// when combined with the year 1972 and the time 31T00:00:00, and an optional timezone offset in minutes. +#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Copy, Hash)] +pub struct GMonthDay { + timestamp: Timestamp, +} + +impl GMonthDay { + #[inline] + fn new( + month: u8, + day: u8, + timezone_offset: Option, + ) -> Result { + Ok(Self { + timestamp: Timestamp::new(&DateTimeSevenPropertyModel { + year: None, + month: Some(month), + day: Some(day), + hour: None, + minute: None, + second: None, + timezone_offset, + })?, + }) + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 18]) -> Self { + Self { + timestamp: Timestamp::from_be_bytes(bytes), + } + } + + #[inline] + #[must_use] + pub fn month(&self) -> u8 { + self.timestamp.month() + } + + #[inline] + #[must_use] + pub fn day(&self) -> u8 { + self.timestamp.day() + } + + #[inline] + #[must_use] + pub fn timezone(&self) -> Option { + Some(self.timezone_offset()?.into()) + } + + #[inline] + #[must_use] + pub fn timezone_offset(&self) -> Option { + self.timestamp.timezone_offset() + } + + #[inline] + #[must_use] + pub fn adjust(&self, timezone_offset: Option) -> Option { + Some(Self { + timestamp: self.timestamp.adjust(timezone_offset)?, + }) + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 18] { + self.timestamp.to_be_bytes() + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.timestamp.is_identical_with(other.timestamp) + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl From for GMonthDay { + #[inline] + fn from(date_time: DateTime) -> Self { + Self::new( + date_time.month(), + date_time.day(), + date_time.timezone_offset(), + ) + .expect("Casting from xsd:dateTime to xsd:gMonthDay can't fail") + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl From for GMonthDay { + #[inline] + fn from(date: Date) -> Self { + Self::new(date.month(), date.day(), date.timezone_offset()) + .expect("Casting from xsd:date to xsd:gMonthDay can't fail") + } +} + +impl FromStr for GMonthDay { + type Err = ParseDateTimeError; + + fn from_str(input: &str) -> Result { + ensure_complete(input, g_month_day_lexical_rep) + } +} + +impl fmt::Display for GMonthDay { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "--{:02}-{:02}", self.month(), self.day())?; + if let Some(timezone_offset) = self.timezone_offset() { + write!(f, "{timezone_offset}")?; + } + Ok(()) + } +} + +/// [XML Schema `gMonth` datatype](https://www.w3.org/TR/xmlschema11-2/#gMonth) +/// +/// It encodes the value using a number of seconds from the Gregorian calendar era using a [`Decimal`], +/// when combined with the year 1972 and the day-time 31T00:00:00, and an optional timezone offset in minutes. +#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Copy, Hash)] +pub struct GMonth { + timestamp: Timestamp, +} + +impl GMonth { + #[inline] + fn new( + month: u8, + timezone_offset: Option, + ) -> Result { + Ok(Self { + timestamp: Timestamp::new(&DateTimeSevenPropertyModel { + year: None, + month: Some(month), + day: None, + hour: None, + minute: None, + second: None, + timezone_offset, + })?, + }) + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 18]) -> Self { + Self { + timestamp: Timestamp::from_be_bytes(bytes), + } + } + + #[inline] + #[must_use] + pub fn month(&self) -> u8 { + self.timestamp.month() + } + + #[inline] + #[must_use] + pub fn timezone(&self) -> Option { + Some(self.timezone_offset()?.into()) + } + + #[inline] + #[must_use] + pub fn timezone_offset(&self) -> Option { + self.timestamp.timezone_offset() + } + + #[inline] + #[must_use] + pub fn adjust(&self, timezone_offset: Option) -> Option { + Some(Self { + timestamp: self.timestamp.adjust(timezone_offset)?, + }) + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 18] { + self.timestamp.to_be_bytes() + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.timestamp.is_identical_with(other.timestamp) + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl From for GMonth { + #[inline] + fn from(date_time: DateTime) -> Self { + Self::new(date_time.month(), date_time.timezone_offset()) + .expect("Casting from xsd:dateTime to xsd:gMonth can't fail") + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl From for GMonth { + #[inline] + fn from(date: Date) -> Self { + Self::new(date.month(), date.timezone_offset()) + .expect("Casting from xsd:date to xsd:gMonth can't fail") + } +} + +impl From for GMonth { + #[inline] + fn from(year_month: GYearMonth) -> Self { + Self::new(year_month.month(), year_month.timezone_offset()) + .expect("Casting from xsd:gYearMonth to xsd:gMonth can't fail") + } +} + +impl From for GMonth { + #[inline] + fn from(month_day: GMonthDay) -> Self { + Self::new(month_day.month(), month_day.timezone_offset()) + .expect("Casting from xsd:gMonthDay to xsd:gMonth can't fail") + } +} + +impl FromStr for GMonth { + type Err = ParseDateTimeError; + + fn from_str(input: &str) -> Result { + ensure_complete(input, g_month_lexical_rep) + } +} + +impl fmt::Display for GMonth { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "--{:02}", self.month())?; + if let Some(timezone_offset) = self.timezone_offset() { + write!(f, "{timezone_offset}")?; + } + Ok(()) + } +} + +/// [XML Schema `date` datatype](https://www.w3.org/TR/xmlschema11-2/#date) +/// +/// It encodes the value using a number of seconds from the Gregorian calendar era using a [`Decimal`], +/// when combined with the year-month 1972-12 and the 00:00:00, and an optional timezone offset in minutes. +#[derive(Eq, PartialEq, PartialOrd, Debug, Clone, Copy, Hash)] +pub struct GDay { + timestamp: Timestamp, +} + +impl GDay { + #[inline] + fn new( + day: u8, + timezone_offset: Option, + ) -> Result { + Ok(Self { + timestamp: Timestamp::new(&DateTimeSevenPropertyModel { + year: None, + month: None, + day: Some(day), + hour: None, + minute: None, + second: None, + timezone_offset, + })?, + }) + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 18]) -> Self { + Self { + timestamp: Timestamp::from_be_bytes(bytes), + } + } + + #[inline] + #[must_use] + pub fn day(&self) -> u8 { + self.timestamp.day() + } + + #[inline] + #[must_use] + pub fn timezone(&self) -> Option { + Some(self.timezone_offset()?.into()) + } + + #[inline] + #[must_use] + pub fn timezone_offset(&self) -> Option { + self.timestamp.timezone_offset() + } + + #[inline] + #[must_use] + pub fn adjust(&self, timezone_offset: Option) -> Option { + Some(Self { + timestamp: self.timestamp.adjust(timezone_offset)?, + }) + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 18] { + self.timestamp.to_be_bytes() + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.timestamp.is_identical_with(other.timestamp) + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl From for GDay { + #[inline] + fn from(date_time: DateTime) -> Self { + Self::new(date_time.day(), date_time.timezone_offset()) + .expect("Casting from xsd:dateTime to xsd:gDay can't fail") + } +} + +/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). +impl From for GDay { + #[inline] + fn from(date: Date) -> Self { + Self::new(date.day(), date.timezone_offset()) + .expect("Casting from xsd:date to xsd:gDay can't fail") + } +} + +impl From for GDay { + #[inline] + fn from(month_day: GMonthDay) -> Self { + Self::new(month_day.day(), month_day.timezone_offset()) + .expect("Casting from xsd:gMonthDay to xsd:gDay can't fail") + } +} + +impl FromStr for GDay { + type Err = ParseDateTimeError; + + fn from_str(input: &str) -> Result { + ensure_complete(input, g_day_lexical_rep) + } +} + +impl fmt::Display for GDay { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "---{:02}", self.day())?; + if let Some(timezone_offset) = self.timezone_offset() { + write!(f, "{timezone_offset}")?; + } + Ok(()) + } +} + +/// A timezone offset with respect to UTC. +/// +/// It is encoded as a number of minutes between -PT14H and PT14H. +#[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash, Serialize, Deserialize)] +pub struct TimezoneOffset { + offset: i16, // in minute with respect to UTC +} + +impl TimezoneOffset { + pub const MAX: Self = Self { offset: 14 * 60 }; + pub const MIN: Self = Self { offset: -14 * 60 }; + pub const UTC: Self = Self { offset: 0 }; + + /// From offset in minute with respect to UTC + #[inline] + pub fn new(offset_in_minutes: i16) -> Result { + let value = Self { + offset: offset_in_minutes, + }; + if Self::MIN <= value && value <= Self::MAX { + Ok(value) + } else { + Err(InvalidTimezoneError { + offset_in_minutes: offset_in_minutes.into(), + }) + } + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 2]) -> Self { + Self { + offset: i16::from_be_bytes(bytes), + } + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 2] { + self.offset.to_be_bytes() + } +} + +impl TryFrom for TimezoneOffset { + type Error = InvalidTimezoneError; + + #[inline] + fn try_from(value: DayTimeDuration) -> Result { + let offset_in_minutes = value.minutes() + value.hours() * 60; + let result = Self::new( + offset_in_minutes + .try_into() + .map_err(|_| Self::Error { offset_in_minutes })?, + )?; + if DayTimeDuration::from(result) == value { + Ok(result) + } else { + // The value is not an integral number of minutes or overflow problems + Err(Self::Error { offset_in_minutes }) + } + } +} + +impl TryFrom for TimezoneOffset { + type Error = InvalidTimezoneError; + + #[inline] + fn try_from(value: Duration) -> Result { + DayTimeDuration::try_from(value) + .map_err(|_| Self::Error { + offset_in_minutes: 0, + })? + .try_into() + } +} + +impl From for DayTimeDuration { + #[inline] + fn from(value: TimezoneOffset) -> Self { + Self::new(i64::from(value.offset) * 60) + } +} + +impl From for Duration { + #[inline] + fn from(value: TimezoneOffset) -> Self { + DayTimeDuration::from(value).into() + } +} + +impl fmt::Display for TimezoneOffset { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.offset { + 0 => f.write_str("Z"), + offset if offset < 0 => write!(f, "-{:02}:{:02}", -offset / 60, -offset % 60), + offset => write!(f, "+{:02}:{:02}", offset / 60, offset % 60), + } + } +} + +/// [The Date/time Seven-property model](https://www.w3.org/TR/xmlschema11-2/#dt-dt-7PropMod) +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +struct DateTimeSevenPropertyModel { + year: Option, + month: Option, + day: Option, + hour: Option, + minute: Option, + second: Option, + timezone_offset: Option, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +struct Timestamp { + value: Decimal, + timezone_offset: Option, +} + +impl PartialEq for Timestamp { + #[inline] + fn eq(&self, other: &Self) -> bool { + match (self.timezone_offset, other.timezone_offset) { + (Some(_), Some(_)) | (None, None) => self.value.eq(&other.value), + _ => false, // TODO: implicit timezone + } + } +} + +impl Eq for Timestamp {} + +impl PartialOrd for Timestamp { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + match (self.timezone_offset, other.timezone_offset) { + (Some(_), Some(_)) | (None, None) => self.value.partial_cmp(&other.value), + (Some(_), None) => { + let plus_result = self + .value + .partial_cmp(&(other.value.checked_add(14 * 3600)?)); + let minus_result = self + .value + .partial_cmp(&(other.value.checked_sub(14 * 3600)?)); + if plus_result == minus_result { + plus_result + } else { + None + } + } + (None, Some(_)) => { + let plus_result = self.value.checked_add(14 * 3600)?.partial_cmp(&other.value); + let minus_result = self.value.checked_sub(14 * 3600)?.partial_cmp(&other.value); + if plus_result == minus_result { + plus_result + } else { + None + } + } + } + } +} + +impl Hash for Timestamp { + #[inline] + fn hash(&self, state: &mut H) { + self.value.hash(state) + } +} + +impl Timestamp { + pub const MAX: Self = Self { + value: Decimal::MAX, + timezone_offset: Some(TimezoneOffset::MAX), + }; + pub const MIN: Self = Self { + value: Decimal::MIN, + timezone_offset: Some(TimezoneOffset::MIN), + }; + + #[inline] + fn new(props: &DateTimeSevenPropertyModel) -> Result { + Ok(Self { + timezone_offset: props.timezone_offset, + value: time_on_timeline(props).ok_or(DateTimeOverflowError)?, + }) + } + + #[inline] + fn now() -> Self { + Self::new( + &date_time_plus_duration( + since_unix_epoch(), + &DateTimeSevenPropertyModel { + year: Some(1970), + month: Some(1), + day: Some(1), + hour: Some(0), + minute: Some(0), + second: Some(Decimal::default()), + timezone_offset: Some(TimezoneOffset::UTC), + }, + ) + .expect("The current time seems way in the future, it's strange"), + ) + .expect("The current time seems way in the future, it's strange") + } + + #[inline] + fn from_be_bytes(bytes: [u8; 18]) -> Self { + Self { + value: Decimal::from_be_bytes(bytes[0..16].try_into().unwrap()), + timezone_offset: if bytes[16..18] == [u8::MAX; 2] { + None + } else { + Some(TimezoneOffset::from_be_bytes( + bytes[16..18].try_into().unwrap(), + )) + }, + } + } + + #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] + #[inline] + #[must_use] + fn year_month_day(&self) -> (i64, u8, u8) { + let mut days = (self.value.as_i128() + + i128::from(self.timezone_offset.unwrap_or(TimezoneOffset::UTC).offset) * 60) + .div_euclid(86400) + + 366; + + // Make days positive + let shift = if days < 0 { + let shift = days / 146_097 - 1; + days -= shift * 146_097; + shift * 400 + } else { + 0 + }; + + let year_mul_400 = days / 146_097; + days -= year_mul_400 * 146_097; + + days -= 1; + let year_mul_100 = days / 36524; + days -= year_mul_100 * 36524; + + days += 1; + let year_mul_4 = days / 1461; + days -= year_mul_4 * 1461; + + days -= 1; + let year_mod_4 = days / 365; + days -= year_mod_4 * 365; + + let year = + (400 * year_mul_400 + 100 * year_mul_100 + 4 * year_mul_4 + year_mod_4 + shift) as i64; + + let is_leap_year = (year_mul_100 == 0 || year_mul_4 != 0) && year_mod_4 == 0; + days += i128::from(is_leap_year); + + let mut month = 0; + for month_i in 1..=12 { + let days_in_month = i128::from(days_in_month(Some(year), month_i)); + if days_in_month > days { + month = month_i; + break; + } + days -= days_in_month + } + let day = days as u8 + 1; + + (year, month, day) + } + + #[inline] + #[must_use] + fn year(&self) -> i64 { + let (year, _, _) = self.year_month_day(); + year + } + + #[inline] + #[must_use] + fn month(&self) -> u8 { + let (_, month, _) = self.year_month_day(); + month + } + + #[inline] + #[must_use] + fn day(&self) -> u8 { + let (_, _, day) = self.year_month_day(); + day + } + + #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] + #[inline] + #[must_use] + fn hour(&self) -> u8 { + (((self.value.as_i128() + + i128::from(self.timezone_offset.unwrap_or(TimezoneOffset::UTC).offset) * 60) + .rem_euclid(86400)) + / 3600) as u8 + } + + #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] + #[inline] + #[must_use] + fn minute(&self) -> u8 { + (((self.value.as_i128() + + i128::from(self.timezone_offset.unwrap_or(TimezoneOffset::UTC).offset) * 60) + .rem_euclid(3600)) + / 60) as u8 + } + + #[inline] + #[must_use] + fn second(&self) -> Decimal { + self.value + .checked_rem_euclid(60) + .unwrap() + .checked_abs() + .unwrap() + } + + #[inline] + #[must_use] + const fn timezone_offset(&self) -> Option { + self.timezone_offset + } + + #[inline] + #[must_use] + fn checked_add_seconds(&self, seconds: impl Into) -> Option { + Some(Self { + value: self.value.checked_add(seconds.into())?, + timezone_offset: self.timezone_offset, + }) + } + + #[inline] + #[must_use] + fn checked_sub(&self, rhs: Self) -> Option { + match (self.timezone_offset, rhs.timezone_offset) { + (Some(_), Some(_)) | (None, None) => { + Some(DayTimeDuration::new(self.value.checked_sub(rhs.value)?)) + } + _ => None, // TODO: implicit timezone + } + } + + #[inline] + #[must_use] + fn checked_sub_seconds(&self, seconds: Decimal) -> Option { + Some(Self { + value: self.value.checked_sub(seconds)?, + timezone_offset: self.timezone_offset, + }) + } + + #[inline] + #[must_use] + fn adjust(&self, timezone_offset: Option) -> Option { + Some(if let Some(from_timezone) = self.timezone_offset { + if let Some(to_timezone) = timezone_offset { + Self { + value: self.value, // We keep the timestamp + timezone_offset: Some(to_timezone), + } + } else { + Self { + value: self + .value + .checked_add(i64::from(from_timezone.offset) * 60)?, /* We keep the literal value */ + timezone_offset: None, + } + } + } else if let Some(to_timezone) = timezone_offset { + Self { + value: self.value.checked_sub(i64::from(to_timezone.offset) * 60)?, /* We keep the literal value */ + timezone_offset: Some(to_timezone), + } + } else { + Self { + value: self.value, + timezone_offset: None, + } + }) + } + + #[inline] + #[must_use] + fn to_be_bytes(self) -> [u8; 18] { + let mut bytes = [0; 18]; + bytes[0..16].copy_from_slice(&self.value.to_be_bytes()); + bytes[16..18].copy_from_slice(&match &self.timezone_offset { + Some(timezone_offset) => timezone_offset.to_be_bytes(), + None => [u8::MAX; 2], + }); + bytes + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.value == other.value && self.timezone_offset == other.timezone_offset + } +} + +#[cfg(feature = "custom-now")] +#[allow(unsafe_code)] +pub fn since_unix_epoch() -> Duration { + extern "Rust" { + fn custom_ox_now() -> Duration; + } + + // SAFETY: Must be defined, if not compilation fails + unsafe { custom_ox_now() } +} + +#[cfg(all( + feature = "js", + not(feature = "custom-now"), + target_family = "wasm", + target_os = "unknown" +))] +fn since_unix_epoch() -> Duration { + DayTimeDuration::new( + Decimal::try_from(crate::oxsdatatypes::Double::from( + js_sys::Date::now() / 1000., + )) + .expect("The current time seems way in the future, it's strange"), + ) + .into() +} + +#[cfg(not(any( + feature = "custom-now", + all(feature = "js", target_family = "wasm", target_os = "unknown") +)))] +fn since_unix_epoch() -> Duration { + use std::time::SystemTime; + + SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("System time before UNIX epoch") + .try_into() + .expect("The current time seems way in the future, it's strange") +} + +/// The [normalizeMonth](https://www.w3.org/TR/xmlschema11-2/#f-dt-normMo) function +fn normalize_month(yr: i64, mo: i64) -> Option<(i64, u8)> { + if mo >= 0 { + let yr = yr.checked_add(mo.checked_sub(1)?.checked_div(12)?)?; + let mo = u8::try_from(mo.checked_sub(1)?.checked_rem(12)?.abs().checked_add(1)?).ok()?; + Some((yr, mo)) + } else { + // Needed to make it work with negative durations + let yr = yr.checked_add(mo.checked_sub(1)?.checked_div(12)?.checked_sub(1)?)?; + let mo = u8::try_from( + 12_i64 + .checked_add(mo.checked_sub(1)?.checked_rem(12)?)? + .checked_add(1)?, + ) + .ok()?; + Some((yr, mo)) + } +} + +/// The [normalizeDa](https://www.w3.org/TR/xmlschema11-2/#f-dt-normDa) function +fn normalize_day(yr: i64, mo: i64, mut da: i64) -> Option<(i64, u8, u8)> { + let (mut yr, mut mo) = normalize_month(yr, mo)?; + loop { + if da <= 0 { + let (yr2, mo2) = normalize_month(yr, i64::from(mo).checked_sub(1)?)?; + yr = yr2; + mo = mo2; + da = da.checked_add(days_in_month(Some(yr), mo).into())?; + } else if da > days_in_month(Some(yr), mo).into() { + da = da.checked_sub(days_in_month(Some(yr), mo).into())?; + let (yr2, mo2) = normalize_month(yr, i64::from(mo).checked_add(1)?)?; + yr = yr2; + mo = mo2; + } else { + return Some((yr, mo, u8::try_from(da).ok()?)); + }; + } +} + +/// The [normalizeMinute](https://www.w3.org/TR/xmlschema11-2/#f-dt-normMi) function +fn normalize_minute(yr: i64, mo: i64, da: i64, hr: i64, mi: i64) -> Option<(i64, u8, u8, u8, u8)> { + let hr = hr.checked_add(mi.checked_div(60)?)?; + let mi = mi.checked_rem(60)?; + let da = da.checked_add(hr.checked_div(24)?)?; + let hr = hr.checked_rem(24)?; + let (yr, mo, da) = normalize_day(yr, mo, da)?; + Some((yr, mo, da, u8::try_from(hr).ok()?, u8::try_from(mi).ok()?)) +} + +/// The [normalizeSecond](https://www.w3.org/TR/xmlschema11-2/#f-dt-normSe) function +fn normalize_second( + yr: i64, + mo: i64, + da: i64, + hr: i64, + mi: i64, + se: Decimal, +) -> Option<(i64, u8, u8, u8, u8, Decimal)> { + let mi = mi.checked_add(i64::try_from(se.as_i128().checked_div(60)?).ok()?)?; // TODO: good idea? + let se = se.checked_rem(60)?; + let (yr, mo, da, hr, mi) = normalize_minute(yr, mo, da, hr, mi)?; + Some((yr, mo, da, hr, mi, se)) +} + +/// The [daysInMonth](https://www.w3.org/TR/xmlschema11-2/#f-daysInMonth) function +fn days_in_month(y: Option, m: u8) -> u8 { + match m { + 2 => { + if let Some(y) = y { + if y % 4 != 0 || (y % 100 == 0 && y % 400 != 0) { + 28 + } else { + 29 + } + } else { + 28 + } + } + 4 | 6 | 9 | 11 => 30, + _ => 31, + } +} + +/// The [dateTimePlusDuration](https://www.w3.org/TR/xmlschema11-2/#vp-dt-dateTimePlusDuration) function +fn date_time_plus_duration( + du: Duration, + dt: &DateTimeSevenPropertyModel, +) -> Option { + let yr = dt.year.unwrap_or(1); + let mo = dt.month.unwrap_or(1); + let da = dt.day.unwrap_or(1); + let hr = dt.hour.unwrap_or(0); + let mi = dt.minute.unwrap_or(0); + let se = dt.second.unwrap_or_default(); + let mo = i64::from(mo).checked_add(du.all_months())?; + let (yr, mo) = normalize_month(yr, mo)?; + let da = min(da, days_in_month(Some(yr), mo)); + let se = se.checked_add(du.all_seconds())?; + let (yr, mo, da, hr, mi, se) = + normalize_second(yr, mo.into(), da.into(), hr.into(), mi.into(), se)?; + + Some(DateTimeSevenPropertyModel { + year: dt.year.map(|_| yr), + month: dt.month.map(|_| mo), + day: dt.day.map(|_| da), + hour: dt.hour.map(|_| hr), + minute: dt.minute.map(|_| mi), + second: dt.second.map(|_| se), + timezone_offset: dt.timezone_offset, + }) +} + +/// The [timeOnTimeline](https://www.w3.org/TR/xmlschema11-2/#vp-dt-timeOnTimeline) function +fn time_on_timeline(props: &DateTimeSevenPropertyModel) -> Option { + let yr = props.year.map_or(1971, |y| y - 1); + let mo = props.month.unwrap_or(12); + let da = props + .day + .map_or_else(|| days_in_month(Some(yr + 1), mo) - 1, |d| d - 1); + let hr = props.hour.unwrap_or(0); + let mi = i128::from(props.minute.unwrap_or(0)) + - i128::from(props.timezone_offset.unwrap_or(TimezoneOffset::UTC).offset); + let se = props.second.unwrap_or_default(); + + Decimal::try_from( + 31_536_000 * i128::from(yr) + + 86400 * i128::from(yr.div_euclid(400) - yr.div_euclid(100) + yr.div_euclid(4)) + + 86400 + * (1..mo) + .map(|m| i128::from(days_in_month(Some(yr + 1), m))) + .sum::() + + 86400 * i128::from(da) + + 3600 * i128::from(hr) + + 60 * mi, + ) + .ok()? + .checked_add(se) +} + +/// A parsing error +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct ParseDateTimeError(#[from] ParseDateTimeErrorKind); + +#[derive(Debug, Clone, thiserror::Error)] +enum ParseDateTimeErrorKind { + #[error("{day} is not a valid day of {month}")] + InvalidDayOfMonth { day: u8, month: u8 }, + #[error(transparent)] + Overflow(#[from] DateTimeOverflowError), + #[error(transparent)] + InvalidTimezone(InvalidTimezoneError), + #[error("{0}")] + Message(&'static str), +} + +impl ParseDateTimeError { + const fn msg(message: &'static str) -> Self { + Self(ParseDateTimeErrorKind::Message(message)) + } +} + +// [16] dateTimeLexicalRep ::= yearFrag '-' monthFrag '-' dayFrag 'T' ((hourFrag ':' minuteFrag ':' secondFrag) | endOfDayFrag) timezoneFrag? +fn date_time_lexical_rep(input: &str) -> Result<(DateTime, &str), ParseDateTimeError> { + let (year, input) = year_frag(input)?; + let input = expect_char(input, '-', "The year and month must be separated by '-'")?; + let (month, input) = month_frag(input)?; + let input = expect_char(input, '-', "The month and day must be separated by '-'")?; + let (day, input) = day_frag(input)?; + let input = expect_char(input, 'T', "The date and time must be separated by 'T'")?; + let (hour, input) = hour_frag(input)?; + let input = expect_char(input, ':', "The hours and minutes must be separated by ':'")?; + let (minute, input) = minute_frag(input)?; + let input = expect_char( + input, + ':', + "The minutes and seconds must be separated by ':'", + )?; + let (second, input) = second_frag(input)?; + // We validate 24:00:00 + if hour == 24 && minute != 0 && second != Decimal::from(0) { + return Err(ParseDateTimeError::msg( + "Times are not allowed to be after 24:00:00", + )); + } + let (timezone_offset, input) = optional_end(input, timezone_frag)?; + validate_day_of_month(Some(year), month, day)?; + Ok(( + DateTime::new(year, month, day, hour, minute, second, timezone_offset)?, + input, + )) +} + +// [17] timeLexicalRep ::= ((hourFrag ':' minuteFrag ':' secondFrag) | endOfDayFrag) timezoneFrag? +fn time_lexical_rep(input: &str) -> Result<(Time, &str), ParseDateTimeError> { + let (hour, input) = hour_frag(input)?; + let input = expect_char(input, ':', "The hours and minutes must be separated by ':'")?; + let (minute, input) = minute_frag(input)?; + let input = expect_char( + input, + ':', + "The minutes and seconds must be separated by ':'", + )?; + let (second, input) = second_frag(input)?; + // We validate 24:00:00 + if hour == 24 && minute != 0 && second != Decimal::from(0) { + return Err(ParseDateTimeError::msg( + "Times are not allowed to be after 24:00:00", + )); + } + let (timezone_offset, input) = optional_end(input, timezone_frag)?; + Ok((Time::new(hour, minute, second, timezone_offset)?, input)) +} + +// [18] dateLexicalRep ::= yearFrag '-' monthFrag '-' dayFrag timezoneFrag? Constraint: Day-of-month Representations +fn date_lexical_rep(input: &str) -> Result<(Date, &str), ParseDateTimeError> { + let (year, input) = year_frag(input)?; + let input = expect_char(input, '-', "The year and month must be separated by '-'")?; + let (month, input) = month_frag(input)?; + let input = expect_char(input, '-', "The month and day must be separated by '-'")?; + let (day, input) = day_frag(input)?; + let (timezone_offset, input) = optional_end(input, timezone_frag)?; + validate_day_of_month(Some(year), month, day)?; + Ok((Date::new(year, month, day, timezone_offset)?, input)) +} + +// [19] gYearMonthLexicalRep ::= yearFrag '-' monthFrag timezoneFrag? +fn g_year_month_lexical_rep(input: &str) -> Result<(GYearMonth, &str), ParseDateTimeError> { + let (year, input) = year_frag(input)?; + let input = expect_char(input, '-', "The year and month must be separated by '-'")?; + let (month, input) = month_frag(input)?; + let (timezone_offset, input) = optional_end(input, timezone_frag)?; + Ok((GYearMonth::new(year, month, timezone_offset)?, input)) +} + +// [20] gYearLexicalRep ::= yearFrag timezoneFrag? +fn g_year_lexical_rep(input: &str) -> Result<(GYear, &str), ParseDateTimeError> { + let (year, input) = year_frag(input)?; + let (timezone_offset, input) = optional_end(input, timezone_frag)?; + Ok((GYear::new(year, timezone_offset)?, input)) +} + +// [21] gMonthDayLexicalRep ::= '--' monthFrag '-' dayFrag timezoneFrag? Constraint: Day-of-month Representations +fn g_month_day_lexical_rep(input: &str) -> Result<(GMonthDay, &str), ParseDateTimeError> { + let input = expect_char(input, '-', "gMonthDay values must start with '--'")?; + let input = expect_char(input, '-', "gMonthDay values must start with '--'")?; + let (month, input) = month_frag(input)?; + let input = expect_char(input, '-', "The month and day must be separated by '-'")?; + let (day, input) = day_frag(input)?; + let (timezone_offset, input) = optional_end(input, timezone_frag)?; + validate_day_of_month(None, month, day)?; + Ok((GMonthDay::new(month, day, timezone_offset)?, input)) +} + +// [22] gDayLexicalRep ::= '---' dayFrag timezoneFrag? +fn g_day_lexical_rep(input: &str) -> Result<(GDay, &str), ParseDateTimeError> { + let input = expect_char(input, '-', "gDay values must start with '---'")?; + let input = expect_char(input, '-', "gDay values must start with '---'")?; + let input = expect_char(input, '-', "gDay values must start with '---'")?; + let (day, input) = day_frag(input)?; + let (timezone_offset, input) = optional_end(input, timezone_frag)?; + Ok((GDay::new(day, timezone_offset)?, input)) +} + +// [23] gMonthLexicalRep ::= '--' monthFrag timezoneFrag? +fn g_month_lexical_rep(input: &str) -> Result<(GMonth, &str), ParseDateTimeError> { + let input = expect_char(input, '-', "gMonth values must start with '--'")?; + let input = expect_char(input, '-', "gMonth values must start with '--'")?; + let (month, input) = month_frag(input)?; + let (timezone_offset, input) = optional_end(input, timezone_frag)?; + Ok((GMonth::new(month, timezone_offset)?, input)) +} + +// [56] yearFrag ::= '-'? (([1-9] digit digit digit+)) | ('0' digit digit digit)) +fn year_frag(input: &str) -> Result<(i64, &str), ParseDateTimeError> { + let (sign, input) = if let Some(left) = input.strip_prefix('-') { + (-1, left) + } else { + (1, input) + }; + let (number_str, input) = integer_prefix(input); + if number_str.len() < 4 { + return Err(ParseDateTimeError::msg( + "The year should be encoded on 4 digits", + )); + } + if number_str.len() > 4 && number_str.starts_with('0') { + return Err(ParseDateTimeError::msg( + "The years value must not start with 0 if it can be encoded in at least 4 digits", + )); + } + let number = i64::from_str(number_str).expect("valid integer"); + Ok((sign * number, input)) +} + +// [57] monthFrag ::= ('0' [1-9]) | ('1' [0-2]) +fn month_frag(input: &str) -> Result<(u8, &str), ParseDateTimeError> { + let (number_str, input) = integer_prefix(input); + if number_str.len() != 2 { + return Err(ParseDateTimeError::msg( + "Month must be encoded with two digits", + )); + } + let number = u8::from_str(number_str).expect("valid integer"); + if !(1..=12).contains(&number) { + return Err(ParseDateTimeError::msg("Month must be between 01 and 12")); + } + Ok((number, input)) +} + +// [58] dayFrag ::= ('0' [1-9]) | ([12] digit) | ('3' [01]) +fn day_frag(input: &str) -> Result<(u8, &str), ParseDateTimeError> { + let (number_str, input) = integer_prefix(input); + if number_str.len() != 2 { + return Err(ParseDateTimeError::msg( + "Day must be encoded with two digits", + )); + } + let number = u8::from_str(number_str).expect("valid integer"); + if !(1..=31).contains(&number) { + return Err(ParseDateTimeError::msg("Day must be between 01 and 31")); + } + Ok((number, input)) +} + +// [59] hourFrag ::= ([01] digit) | ('2' [0-3]) +// We also allow 24 for ease of parsing +fn hour_frag(input: &str) -> Result<(u8, &str), ParseDateTimeError> { + let (number_str, input) = integer_prefix(input); + if number_str.len() != 2 { + return Err(ParseDateTimeError::msg( + "Hours must be encoded with two digits", + )); + } + let number = u8::from_str(number_str).expect("valid integer"); + if !(0..=24).contains(&number) { + return Err(ParseDateTimeError::msg("Hours must be between 00 and 24")); + } + Ok((number, input)) +} + +// [60] minuteFrag ::= [0-5] digit +fn minute_frag(input: &str) -> Result<(u8, &str), ParseDateTimeError> { + let (number_str, input) = integer_prefix(input); + if number_str.len() != 2 { + return Err(ParseDateTimeError::msg( + "Minutes must be encoded with two digits", + )); + } + let number = u8::from_str(number_str).expect("valid integer"); + if !(0..=59).contains(&number) { + return Err(ParseDateTimeError::msg("Minutes must be between 00 and 59")); + } + Ok((number, input)) +} + +// [61] secondFrag ::= ([0-5] digit) ('.' digit+)? +fn second_frag(input: &str) -> Result<(Decimal, &str), ParseDateTimeError> { + let (number_str, input) = decimal_prefix(input); + let (before_dot_str, _) = number_str.split_once('.').unwrap_or((number_str, "")); + if before_dot_str.len() != 2 { + return Err(ParseDateTimeError::msg( + "Seconds must be encoded with two digits", + )); + } + let number = Decimal::from_str(number_str) + .map_err(|_| ParseDateTimeError::msg("The second precision is too large"))?; + if number < Decimal::from(0) || number >= Decimal::from(60) { + return Err(ParseDateTimeError::msg("Seconds must be between 00 and 60")); + } + if number_str.ends_with('.') { + return Err(ParseDateTimeError::msg( + "Seconds are not allowed to end with a dot", + )); + } + Ok((number, input)) +} + +// [63] timezoneFrag ::= 'Z' | ('+' | '-') (('0' digit | '1' [0-3]) ':' minuteFrag | '14:00') +fn timezone_frag(input: &str) -> Result<(TimezoneOffset, &str), ParseDateTimeError> { + if let Some(left) = input.strip_prefix('Z') { + return Ok((TimezoneOffset::UTC, left)); + } + let (sign, input) = if let Some(left) = input.strip_prefix('-') { + (-1, left) + } else if let Some(left) = input.strip_prefix('+') { + (1, left) + } else { + (1, input) + }; + + let (hour_str, input) = integer_prefix(input); + if hour_str.len() != 2 { + return Err(ParseDateTimeError::msg( + "The timezone hours must be encoded with two digits", + )); + } + let hours = i16::from_str(hour_str).expect("valid integer"); + + let input = expect_char( + input, + ':', + "The timezone hours and minutes must be separated by ':'", + )?; + let (minutes, input) = minute_frag(input)?; + + if hours > 13 && !(hours == 14 && minutes == 0) { + return Err(ParseDateTimeError::msg( + "The timezone hours must be between 00 and 13", + )); + } + + Ok(( + TimezoneOffset::new(sign * (hours * 60 + i16::from(minutes))) + .map_err(|e| ParseDateTimeError(ParseDateTimeErrorKind::InvalidTimezone(e)))?, + input, + )) +} + +fn ensure_complete( + input: &str, + parse: impl FnOnce(&str) -> Result<(T, &str), ParseDateTimeError>, +) -> Result { + let (result, left) = parse(input)?; + if !left.is_empty() { + return Err(ParseDateTimeError::msg("Unrecognized value suffix")); + } + Ok(result) +} + +fn expect_char<'a>( + input: &'a str, + constant: char, + error_message: &'static str, +) -> Result<&'a str, ParseDateTimeError> { + if let Some(left) = input.strip_prefix(constant) { + Ok(left) + } else { + Err(ParseDateTimeError::msg(error_message)) + } +} + +fn integer_prefix(input: &str) -> (&str, &str) { + let mut end = input.len(); + for (i, c) in input.char_indices() { + if !c.is_ascii_digit() { + end = i; + break; + } + } + input.split_at(end) +} + +fn decimal_prefix(input: &str) -> (&str, &str) { + let mut end = input.len(); + let mut dot_seen = false; + for (i, c) in input.char_indices() { + if c.is_ascii_digit() { + // Ok + } else if c == '.' && !dot_seen { + dot_seen = true; + } else { + end = i; + break; + } + } + input.split_at(end) +} + +fn optional_end( + input: &str, + parse: impl FnOnce(&str) -> Result<(T, &str), ParseDateTimeError>, +) -> Result<(Option, &str), ParseDateTimeError> { + Ok(if input.is_empty() { + (None, input) + } else { + let (result, input) = parse(input)?; + (Some(result), input) + }) +} + +fn validate_day_of_month(year: Option, month: u8, day: u8) -> Result<(), ParseDateTimeError> { + // Constraint: Day-of-month Values + if day > days_in_month(year, month) { + return Err(ParseDateTimeError( + ParseDateTimeErrorKind::InvalidDayOfMonth { day, month }, + )); + } + Ok(()) +} + +/// An overflow during [`DateTime`]-related operations. +/// +/// Matches XPath [`FODT0001` error](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0001). +#[derive(Debug, Clone, Copy, thiserror::Error)] +#[error("overflow during xsd:dateTime computation")] +pub struct DateTimeOverflowError; + +impl From for ParseDateTimeError { + fn from(error: DateTimeOverflowError) -> Self { + Self(ParseDateTimeErrorKind::Overflow(error)) + } +} + +/// The value provided as timezone is not valid. +/// +/// Matches XPath [`FODT0003` error](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0003). +#[derive(Debug, Clone, Copy, thiserror::Error)] +#[error("invalid timezone offset {}:{}", + self.offset_in_minutes / 60, + self.offset_in_minutes.abs() % 60)] +pub struct InvalidTimezoneError { + offset_in_minutes: i64, +} + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + use std::error::Error; + + #[test] + fn from_str() -> Result<(), ParseDateTimeError> { + assert_eq!(Time::from_str("00:00:00Z")?.to_string(), "00:00:00Z"); + assert_eq!(Time::from_str("00:00:00+00:00")?.to_string(), "00:00:00Z"); + assert_eq!(Time::from_str("00:00:00-00:00")?.to_string(), "00:00:00Z"); + assert_eq!(Time::from_str("00:00:00")?.to_string(), "00:00:00"); + assert_eq!( + Time::from_str("00:00:00+02:00")?.to_string(), + "00:00:00+02:00" + ); + assert_eq!( + Time::from_str("00:00:00+14:00")?.to_string(), + "00:00:00+14:00" + ); + assert_eq!(Time::from_str("24:00:00")?.to_string(), "00:00:00"); + assert_eq!(Time::from_str("24:00:00.00")?.to_string(), "00:00:00"); + assert_eq!( + Time::from_str("23:59:59.9999999999")?.to_string(), + "23:59:59.9999999999" + ); + + assert_eq!(Date::from_str("0001-01-01Z")?.to_string(), "0001-01-01Z"); + assert_eq!(Date::from_str("0001-01-01")?.to_string(), "0001-01-01"); + assert_eq!( + DateTime::from_str("0001-01-01T00:00:00Z")?.to_string(), + "0001-01-01T00:00:00Z" + ); + assert_eq!( + DateTime::from_str("0001-01-01T00:00:00")?.to_string(), + "0001-01-01T00:00:00" + ); + assert_eq!( + DateTime::from_str("1000000000-01-01T00:00:00")?.to_string(), + "1000000000-01-01T00:00:00" + ); + assert_eq!( + DateTime::from_str("2001-12-31T23:59:59")?.to_string(), + "2001-12-31T23:59:59" + ); + assert_eq!( + DateTime::from_str("2004-12-31T23:59:59")?.to_string(), + "2004-12-31T23:59:59" + ); + assert_eq!( + DateTime::from_str("1900-12-31T23:59:59")?.to_string(), + "1900-12-31T23:59:59" + ); + assert_eq!( + DateTime::from_str("2000-12-31T23:59:59")?.to_string(), + "2000-12-31T23:59:59", + ); + assert_eq!( + DateTime::from_str("1899-12-31T23:59:59")?.to_string(), + "1899-12-31T23:59:59" + ); + + assert_eq!( + DateTime::from_str("2001-02-28T23:59:59")?.to_string(), + "2001-02-28T23:59:59" + ); + assert_eq!( + DateTime::from_str("2004-02-29T23:59:59")?.to_string(), + "2004-02-29T23:59:59" + ); + assert_eq!( + DateTime::from_str("1900-02-28T23:59:59")?.to_string(), + "1900-02-28T23:59:59" + ); + assert_eq!( + DateTime::from_str("2000-02-29T23:59:59")?.to_string(), + "2000-02-29T23:59:59", + ); + assert_eq!( + DateTime::from_str("1899-02-28T23:59:59")?.to_string(), + "1899-02-28T23:59:59" + ); + assert_eq!( + DateTime::from_str("2001-03-01T00:00:00")?.to_string(), + "2001-03-01T00:00:00" + ); + assert_eq!( + DateTime::from_str("2004-03-01T00:00:00")?.to_string(), + "2004-03-01T00:00:00" + ); + assert_eq!( + DateTime::from_str("1900-03-01T00:00:00")?.to_string(), + "1900-03-01T00:00:00" + ); + assert_eq!( + DateTime::from_str("2000-03-01T00:00:00")?.to_string(), + "2000-03-01T00:00:00", + ); + assert_eq!( + DateTime::from_str("1899-03-01T00:00:00")?.to_string(), + "1899-03-01T00:00:00" + ); + assert_eq!( + DateTime::from_str("-0899-03-01T00:00:00")?.to_string(), + "-0899-03-01T00:00:00" + ); + assert_eq!( + DateTime::from_str("2000-01-01T00:00:00.1234567")?.to_string(), + "2000-01-01T00:00:00.1234567" + ); + assert_eq!( + DateTime::from_str("2000-01-01T00:00:12.1234567")?.to_string(), + "2000-01-01T00:00:12.1234567" + ); + assert_eq!( + Time::from_str("01:02:03.1234567")?.to_string(), + "01:02:03.1234567" + ); + assert_eq!( + Time::from_str("01:02:13.1234567")?.to_string(), + "01:02:13.1234567" + ); + + assert_eq!( + DateTime::from_str("-1000000000-01-01T00:00:00")?.to_string(), + "-1000000000-01-01T00:00:00" + ); + assert_eq!( + DateTime::from_str("-2001-12-31T23:59:59")?.to_string(), + "-2001-12-31T23:59:59" + ); + assert_eq!( + DateTime::from_str("-2004-12-31T23:59:59")?.to_string(), + "-2004-12-31T23:59:59" + ); + assert_eq!( + DateTime::from_str("-1900-12-31T23:59:59")?.to_string(), + "-1900-12-31T23:59:59" + ); + assert_eq!( + DateTime::from_str("-2000-12-31T23:59:59")?.to_string(), + "-2000-12-31T23:59:59" + ); + assert_eq!( + DateTime::from_str("-1899-12-31T23:59:59")?.to_string(), + "-1899-12-31T23:59:59" + ); + + assert_eq!( + GYearMonth::from_str("-1899-12+01:00")?.to_string(), + "-1899-12+01:00" + ); + assert_eq!(GYearMonth::from_str("-1899-12")?.to_string(), "-1899-12"); + assert_eq!(GYear::from_str("-1899+01:00")?.to_string(), "-1899+01:00"); + assert_eq!(GYear::from_str("-1899")?.to_string(), "-1899"); + assert_eq!( + GMonthDay::from_str("--01-01+01:00")?.to_string(), + "--01-01+01:00" + ); + assert_eq!(GMonthDay::from_str("--01-01")?.to_string(), "--01-01"); + assert_eq!(GDay::from_str("---01+01:00")?.to_string(), "---01+01:00"); + assert_eq!(GDay::from_str("---01")?.to_string(), "---01"); + assert_eq!(GMonth::from_str("--01+01:00")?.to_string(), "--01+01:00"); + assert_eq!(GMonth::from_str("--01")?.to_string(), "--01"); + + GYear::from_str("02020").unwrap_err(); + GYear::from_str("+2020").unwrap_err(); + GYear::from_str("33").unwrap_err(); + + assert_eq!(Time::from_str("00:00:00+14:00")?, Time::MIN); + assert_eq!(Time::from_str("24:00:00-14:00")?, Time::MAX); + Ok(()) + } + + #[test] + fn to_be_bytes() -> Result<(), ParseDateTimeError> { + assert_eq!( + DateTime::from_be_bytes(DateTime::MIN.to_be_bytes()), + DateTime::MIN + ); + assert_eq!( + DateTime::from_be_bytes(DateTime::MAX.to_be_bytes()), + DateTime::MAX + ); + assert_eq!( + DateTime::from_be_bytes(DateTime::from_str("2022-01-03T01:02:03")?.to_be_bytes()), + DateTime::from_str("2022-01-03T01:02:03")? + ); + assert_eq!(Date::from_be_bytes(Date::MIN.to_be_bytes()), Date::MIN); + assert_eq!(Date::from_be_bytes(Date::MAX.to_be_bytes()), Date::MAX); + assert_eq!( + Date::from_be_bytes(Date::from_str("2022-01-03")?.to_be_bytes()), + Date::from_str("2022-01-03")? + ); + assert_eq!(Time::from_be_bytes(Time::MIN.to_be_bytes()), Time::MIN); + assert_eq!(Time::from_be_bytes(Time::MAX.to_be_bytes()), Time::MAX); + assert_eq!( + Time::from_be_bytes(Time::from_str("01:02:03")?.to_be_bytes()), + Time::from_str("01:02:03")? + ); + assert_eq!( + Time::from_be_bytes(Time::from_str("01:02:03")?.to_be_bytes()), + Time::from_str("01:02:03")? + ); + assert_eq!( + GYearMonth::from_be_bytes(GYearMonth::MIN.to_be_bytes()), + GYearMonth::MIN + ); + assert_eq!( + GYearMonth::from_be_bytes(GYearMonth::MAX.to_be_bytes()), + GYearMonth::MAX + ); + assert_eq!(GYear::from_be_bytes(GYear::MIN.to_be_bytes()), GYear::MIN); + assert_eq!(GYear::from_be_bytes(GYear::MAX.to_be_bytes()), GYear::MAX); + Ok(()) + } + + #[test] + fn equals() -> Result<(), ParseDateTimeError> { + assert_eq!( + DateTime::from_str("2002-04-02T12:00:00-01:00")?, + DateTime::from_str("2002-04-02T17:00:00+04:00")? + ); + assert_eq!( + DateTime::from_str("2002-04-02T12:00:00-05:00")?, + DateTime::from_str("2002-04-02T23:00:00+06:00")? + ); + assert_ne!( + DateTime::from_str("2002-04-02T12:00:00-05:00")?, + DateTime::from_str("2002-04-02T17:00:00-05:00")? + ); + assert_eq!( + DateTime::from_str("2002-04-02T12:00:00-05:00")?, + DateTime::from_str("2002-04-02T12:00:00-05:00")? + ); + assert_eq!( + DateTime::from_str("2002-04-02T23:00:00-04:00")?, + DateTime::from_str("2002-04-03T02:00:00-01:00")? + ); + assert_eq!( + DateTime::from_str("1999-12-31T24:00:00-05:00")?, + DateTime::from_str("2000-01-01T00:00:00-05:00")? + ); + assert_ne!( + DateTime::from_str("2005-04-04T24:00:00-05:00")?, + DateTime::from_str("2005-04-04T00:00:00-05:00")? + ); + + assert_ne!( + Date::from_str("2004-12-25Z")?, + Date::from_str("2004-12-25+07:00")? + ); + assert_eq!( + Date::from_str("2004-12-25-12:00")?, + Date::from_str("2004-12-26+12:00")? + ); + + assert_ne!( + Time::from_str("08:00:00+09:00")?, + Time::from_str("17:00:00-06:00")? + ); + assert_eq!( + Time::from_str("21:30:00+10:30")?, + Time::from_str("06:00:00-05:00")? + ); + assert_eq!( + Time::from_str("24:00:00+01:00")?, + Time::from_str("00:00:00+01:00")? + ); + + assert_eq!( + Time::from_str("05:00:00-03:00")?, + Time::from_str("10:00:00+02:00")? + ); + assert_ne!( + Time::from_str("23:00:00-03:00")?, + Time::from_str("02:00:00Z")? + ); + + assert_ne!( + GYearMonth::from_str("1986-02")?, + GYearMonth::from_str("1986-03")? + ); + assert_ne!( + GYearMonth::from_str("1978-03")?, + GYearMonth::from_str("1978-03Z")? + ); + + assert_ne!( + GYear::from_str("2005-12:00")?, + GYear::from_str("2005+12:00")? + ); + assert_ne!(GYear::from_str("1976-05:00")?, GYear::from_str("1976")?); + + assert_eq!( + GMonthDay::from_str("--12-25-14:00")?, + GMonthDay::from_str("--12-26+10:00")? + ); + assert_ne!( + GMonthDay::from_str("--12-25")?, + GMonthDay::from_str("--12-26Z")? + ); + + assert_ne!( + GMonth::from_str("--12-14:00")?, + GMonth::from_str("--12+10:00")? + ); + assert_ne!(GMonth::from_str("--12")?, GMonth::from_str("--12Z")?); + + assert_ne!( + GDay::from_str("---25-14:00")?, + GDay::from_str("---25+10:00")? + ); + assert_ne!(GDay::from_str("---12")?, GDay::from_str("---12Z")?); + Ok(()) + } + + #[test] + #[allow(clippy::neg_cmp_op_on_partial_ord)] + fn cmp() -> Result<(), ParseDateTimeError> { + assert!(Date::from_str("2004-12-25Z")? < Date::from_str("2004-12-25-05:00")?); + assert!(!(Date::from_str("2004-12-25-12:00")? < Date::from_str("2004-12-26+12:00")?)); + + assert!(Date::from_str("2004-12-25Z")? > Date::from_str("2004-12-25+07:00")?); + assert!(!(Date::from_str("2004-12-25-12:00")? > Date::from_str("2004-12-26+12:00")?)); + + assert!(!(Time::from_str("12:00:00")? < Time::from_str("23:00:00+06:00")?)); + assert!(Time::from_str("11:00:00-05:00")? < Time::from_str("17:00:00Z")?); + assert!(!(Time::from_str("23:59:59")? < Time::from_str("24:00:00")?)); + + assert!(!(Time::from_str("08:00:00+09:00")? > Time::from_str("17:00:00-06:00")?)); + + assert!(GMonthDay::from_str("--12-12+13:00")? < GMonthDay::from_str("--12-12+11:00")?); + assert!(GDay::from_str("---15")? < GDay::from_str("---16")?); + assert!(GDay::from_str("---15-13:00")? > GDay::from_str("---16+13:00")?); + assert_eq!( + GDay::from_str("---15-11:00")?, + GDay::from_str("---16+13:00")? + ); + assert!(GDay::from_str("---15-13:00")? + .partial_cmp(&GDay::from_str("---16")?) + .is_none()); + Ok(()) + } + + #[test] + fn year() -> Result<(), ParseDateTimeError> { + assert_eq!( + DateTime::from_str("1999-05-31T13:20:00-05:00")?.year(), + 1999 + ); + assert_eq!( + DateTime::from_str("1999-05-31T21:30:00-05:00")?.year(), + 1999 + ); + assert_eq!(DateTime::from_str("1999-12-31T19:20:00")?.year(), 1999); + assert_eq!(DateTime::from_str("1999-12-31T24:00:00")?.year(), 2000); + assert_eq!(DateTime::from_str("-0002-06-06T00:00:00")?.year(), -2); + + assert_eq!(Date::from_str("1999-05-31")?.year(), 1999); + assert_eq!(Date::from_str("2000-01-01+05:00")?.year(), 2000); + assert_eq!(Date::from_str("-0002-06-01")?.year(), -2); + + assert_eq!(GYear::from_str("-0002")?.year(), -2); + assert_eq!(GYearMonth::from_str("-0002-02")?.year(), -2); + Ok(()) + } + + #[test] + fn month() -> Result<(), ParseDateTimeError> { + assert_eq!(DateTime::from_str("1999-05-31T13:20:00-05:00")?.month(), 5); + assert_eq!(DateTime::from_str("1999-12-31T19:20:00-05:00")?.month(), 12); + + assert_eq!(Date::from_str("1999-05-31-05:00")?.month(), 5); + assert_eq!(Date::from_str("2000-01-01+05:00")?.month(), 1); + + assert_eq!(GMonth::from_str("--02")?.month(), 2); + assert_eq!(GYearMonth::from_str("-0002-02")?.month(), 2); + assert_eq!(GMonthDay::from_str("--02-03")?.month(), 2); + Ok(()) + } + + #[test] + fn day() -> Result<(), ParseDateTimeError> { + assert_eq!(DateTime::from_str("1999-05-31T13:20:00-05:00")?.day(), 31); + assert_eq!(DateTime::from_str("1999-12-31T20:00:00-05:00")?.day(), 31); + + assert_eq!(Date::from_str("1999-05-31-05:00")?.day(), 31); + assert_eq!(Date::from_str("2000-01-01+05:00")?.day(), 1); + + assert_eq!(GDay::from_str("---03")?.day(), 3); + assert_eq!(GMonthDay::from_str("--02-03")?.day(), 3); + Ok(()) + } + + #[test] + fn hour() -> Result<(), ParseDateTimeError> { + assert_eq!(DateTime::from_str("1999-05-31T08:20:00-05:00")?.hour(), 8); + assert_eq!(DateTime::from_str("1999-12-31T21:20:00-05:00")?.hour(), 21); + assert_eq!(DateTime::from_str("1999-12-31T12:00:00")?.hour(), 12); + assert_eq!(DateTime::from_str("1999-12-31T24:00:00")?.hour(), 0); + + assert_eq!(Time::from_str("11:23:00-05:00")?.hour(), 11); + assert_eq!(Time::from_str("21:23:00-05:00")?.hour(), 21); + assert_eq!(Time::from_str("01:23:00+05:00")?.hour(), 1); + assert_eq!(Time::from_str("24:00:00")?.hour(), 0); + Ok(()) + } + + #[test] + fn minute() -> Result<(), ParseDateTimeError> { + assert_eq!( + DateTime::from_str("1999-05-31T13:20:00-05:00")?.minute(), + 20 + ); + assert_eq!( + DateTime::from_str("1999-05-31T13:30:00+05:30")?.minute(), + 30 + ); + + assert_eq!(Time::from_str("13:00:00Z")?.minute(), 0); + Ok(()) + } + + #[test] + fn second() -> Result<(), Box> { + assert_eq!( + DateTime::from_str("1999-05-31T13:20:00-05:00")?.second(), + Decimal::from(0) + ); + + assert_eq!( + Time::from_str("13:20:10.5")?.second(), + Decimal::from_str("10.5")? + ); + Ok(()) + } + + #[test] + fn timezone() -> Result<(), Box> { + assert_eq!( + DateTime::from_str("1999-05-31T13:20:00-05:00")?.timezone(), + Some(DayTimeDuration::from_str("-PT5H")?) + ); + assert_eq!( + DateTime::from_str("2000-06-12T13:20:00Z")?.timezone(), + Some(DayTimeDuration::from_str("PT0S")?) + ); + assert_eq!(DateTime::from_str("2004-08-27T00:00:00")?.timezone(), None); + + assert_eq!( + Date::from_str("1999-05-31-05:00")?.timezone(), + Some(DayTimeDuration::from_str("-PT5H")?) + ); + assert_eq!( + Date::from_str("2000-06-12Z")?.timezone(), + Some(DayTimeDuration::from_str("PT0S")?) + ); + + assert_eq!( + Time::from_str("13:20:00-05:00")?.timezone(), + Some(DayTimeDuration::from_str("-PT5H")?) + ); + assert_eq!(Time::from_str("13:20:00")?.timezone(), None); + Ok(()) + } + + #[test] + fn sub() -> Result<(), Box> { + assert_eq!( + DateTime::from_str("2000-10-30T06:12:00-05:00")? + .checked_sub(DateTime::from_str("1999-11-28T09:00:00Z")?), + Some(DayTimeDuration::from_str("P337DT2H12M")?) + ); + + assert_eq!( + Date::from_str("2000-10-30")?.checked_sub(Date::from_str("1999-11-28")?), + Some(DayTimeDuration::from_str("P337D")?) + ); + assert_eq!( + Date::from_str("2000-10-30+05:00")?.checked_sub(Date::from_str("1999-11-28Z")?), + Some(DayTimeDuration::from_str("P336DT19H")?) + ); + assert_eq!( + Date::from_str("2000-10-15-05:00")?.checked_sub(Date::from_str("2000-10-10+02:00")?), + Some(DayTimeDuration::from_str("P5DT7H")?) + ); + + assert_eq!( + Time::from_str("11:12:00Z")?.checked_sub(Time::from_str("04:00:00-05:00")?), + Some(DayTimeDuration::from_str("PT2H12M")?) + ); + assert_eq!( + Time::from_str("11:00:00-05:00")?.checked_sub(Time::from_str("21:30:00+05:30")?), + Some(DayTimeDuration::from_str("PT0S")?) + ); + assert_eq!( + Time::from_str("17:00:00-06:00")?.checked_sub(Time::from_str("08:00:00+09:00")?), + Some(DayTimeDuration::from_str("P1D")?) + ); + assert_eq!( + Time::from_str("24:00:00")?.checked_sub(Time::from_str("23:59:59")?), + Some(DayTimeDuration::from_str("-PT23H59M59S")?) + ); + Ok(()) + } + + #[test] + fn add_duration() -> Result<(), Box> { + assert_eq!( + DateTime::from_str("2000-01-12T12:13:14Z")? + .checked_add_duration(Duration::from_str("P1Y3M5DT7H10M3.3S")?), + Some(DateTime::from_str("2001-04-17T19:23:17.3Z")?) + ); + assert_eq!( + Date::from_str("2000-01-01")?.checked_add_duration(Duration::from_str("-P3M")?), + Some(Date::from_str("1999-10-01")?) + ); + assert_eq!( + Date::from_str("2000-01-12")?.checked_add_duration(Duration::from_str("PT33H")?), + Some(Date::from_str("2000-01-13")?) + ); + assert_eq!( + Date::from_str("2000-03-30")?.checked_add_duration(Duration::from_str("P1D")?), + Some(Date::from_str("2000-03-31")?) + ); + assert_eq!( + Date::from_str("2000-03-31")?.checked_add_duration(Duration::from_str("P1M")?), + Some(Date::from_str("2000-04-30")?) + ); + assert_eq!( + Date::from_str("2000-03-30")?.checked_add_duration(Duration::from_str("P1M")?), + Some(Date::from_str("2000-04-30")?) + ); + assert_eq!( + Date::from_str("2000-04-30")?.checked_add_duration(Duration::from_str("P1D")?), + Some(Date::from_str("2000-05-01")?) + ); + + assert_eq!( + DateTime::from_str("2000-10-30T11:12:00")? + .checked_add_duration(Duration::from_str("P1Y2M")?), + Some(DateTime::from_str("2001-12-30T11:12:00")?) + ); + assert_eq!( + DateTime::from_str("2000-10-30T11:12:00")? + .checked_add_duration(Duration::from_str("P3DT1H15M")?), + Some(DateTime::from_str("2000-11-02T12:27:00")?) + ); + + assert_eq!( + Date::from_str("2000-10-30")?.checked_add_duration(Duration::from_str("P1Y2M")?), + Some(Date::from_str("2001-12-30")?) + ); + assert_eq!( + Date::from_str("2004-10-30Z")?.checked_add_duration(Duration::from_str("P2DT2H30M0S")?), + Some(Date::from_str("2004-11-01Z")?) + ); + + assert_eq!( + Time::from_str("11:12:00")?.checked_add_duration(Duration::from_str("P3DT1H15M")?), + Some(Time::from_str("12:27:00")?) + ); + assert_eq!( + Time::from_str("23:12:00+03:00")? + .checked_add_duration(Duration::from_str("P1DT3H15M")?), + Some(Time::from_str("02:27:00+03:00")?) + ); + Ok(()) + } + + #[test] + fn sub_duration() -> Result<(), Box> { + assert_eq!( + DateTime::from_str("2000-10-30T11:12:00")? + .checked_sub_duration(Duration::from_str("P1Y2M")?), + Some(DateTime::from_str("1999-08-30T11:12:00")?) + ); + assert_eq!( + DateTime::from_str("2000-10-30T11:12:00")? + .checked_sub_duration(Duration::from_str("P3DT1H15M")?), + Some(DateTime::from_str("2000-10-27T09:57:00")?) + ); + + assert_eq!( + Date::from_str("2000-10-30")?.checked_sub_duration(Duration::from_str("P1Y2M")?), + Some(Date::from_str("1999-08-30")?) + ); + assert_eq!( + Date::from_str("2000-02-29Z")?.checked_sub_duration(Duration::from_str("P1Y")?), + Some(Date::from_str("1999-02-28Z")?) + ); + assert_eq!( + Date::from_str("2000-10-31-05:00")?.checked_sub_duration(Duration::from_str("P1Y1M")?), + Some(Date::from_str("1999-09-30-05:00")?) + ); + assert_eq!( + Date::from_str("2000-10-30")?.checked_sub_duration(Duration::from_str("P3DT1H15M")?), + Some(Date::from_str("2000-10-26")?) + ); + + assert_eq!( + Time::from_str("11:12:00")?.checked_sub_duration(Duration::from_str("P3DT1H15M")?), + Some(Time::from_str("09:57:00")?) + ); + assert_eq!( + Time::from_str("08:20:00-05:00")? + .checked_sub_duration(Duration::from_str("P23DT10H10M")?), + Some(Time::from_str("22:10:00-05:00")?) + ); + Ok(()) + } + + #[test] + fn adjust() -> Result<(), Box> { + assert_eq!( + DateTime::from_str("2002-03-07T10:00:00-07:00")? + .adjust(Some(DayTimeDuration::from_str("PT10H")?.try_into()?)), + Some(DateTime::from_str("2002-03-08T03:00:00+10:00")?) + ); + assert_eq!( + DateTime::from_str("2002-03-07T00:00:00+01:00")? + .adjust(Some(DayTimeDuration::from_str("-PT8H")?.try_into()?)), + Some(DateTime::from_str("2002-03-06T15:00:00-08:00")?) + ); + assert_eq!( + DateTime::from_str("2002-03-07T10:00:00")?.adjust(None), + Some(DateTime::from_str("2002-03-07T10:00:00")?) + ); + assert_eq!( + DateTime::from_str("2002-03-07T10:00:00-07:00")?.adjust(None), + Some(DateTime::from_str("2002-03-07T10:00:00")?) + ); + + assert_eq!( + Date::from_str("2002-03-07")? + .adjust(Some(DayTimeDuration::from_str("-PT10H")?.try_into()?)), + Some(Date::from_str("2002-03-07-10:00")?) + ); + assert_eq!( + Date::from_str("2002-03-07-07:00")? + .adjust(Some(DayTimeDuration::from_str("-PT10H")?.try_into()?)), + Some(Date::from_str("2002-03-06-10:00")?) + ); + assert_eq!( + Date::from_str("2002-03-07")?.adjust(None), + Some(Date::from_str("2002-03-07")?) + ); + assert_eq!( + Date::from_str("2002-03-07-07:00")?.adjust(None), + Some(Date::from_str("2002-03-07")?) + ); + + assert_eq!( + Time::from_str("10:00:00")? + .adjust(Some(DayTimeDuration::from_str("-PT10H")?.try_into()?)), + Some(Time::from_str("10:00:00-10:00")?) + ); + assert_eq!( + Time::from_str("10:00:00-07:00")? + .adjust(Some(DayTimeDuration::from_str("-PT10H")?.try_into()?)), + Some(Time::from_str("07:00:00-10:00")?) + ); + assert_eq!( + Time::from_str("10:00:00")?.adjust(None), + Some(Time::from_str("10:00:00")?) + ); + assert_eq!( + Time::from_str("10:00:00-07:00")?.adjust(None), + Some(Time::from_str("10:00:00")?) + ); + assert_eq!( + Time::from_str("10:00:00-07:00")? + .adjust(Some(DayTimeDuration::from_str("PT10H")?.try_into()?)), + Some(Time::from_str("03:00:00+10:00")?) + ); + Ok(()) + } + + #[test] + fn time_from_datetime() -> Result<(), ParseDateTimeError> { + assert_eq!( + Time::from(DateTime::MIN), + Time::from_str("19:51:08.312696284115894272-14:00")? + ); + assert_eq!( + Time::from(DateTime::MAX), + Time::from_str("04:08:51.687303715884105727+14:00")? + ); + Ok(()) + } + + #[test] + fn date_from_datetime() -> Result<(), Box> { + assert_eq!( + Date::try_from( + DateTime::MIN + .checked_add_day_time_duration(DayTimeDuration::from_str("P1D")?) + .unwrap() + )?, + Date::MIN + ); + assert_eq!(Date::try_from(DateTime::MAX)?, Date::MAX); + Ok(()) + } + + #[test] + fn g_year_month_from_date() { + assert_eq!(GYearMonth::from(Date::MIN), GYearMonth::MIN); + assert_eq!(GYearMonth::from(Date::MAX), GYearMonth::MAX); + } + + #[test] + fn g_year_from_g_year_month() -> Result<(), ParseDateTimeError> { + assert_eq!(GYear::try_from(GYearMonth::MIN)?, GYear::MIN); + assert_eq!( + GYear::try_from(GYearMonth::from_str("5391559471918-12+14:00")?)?, + GYear::MAX + ); + Ok(()) + } + + #[cfg(feature = "custom-now")] + #[test] + fn custom_now() { + #[allow(unsafe_code)] + #[no_mangle] + fn custom_ox_now() -> Duration { + Duration::default() + } + DateTime::now(); + } + + #[cfg(not(feature = "custom-now"))] + #[test] + fn now() -> Result<(), ParseDateTimeError> { + let now = DateTime::now(); + assert!(DateTime::from_str("2022-01-01T00:00:00Z")? < now); + assert!(now < DateTime::from_str("2100-01-01T00:00:00Z")?); + Ok(()) + } + + #[test] + fn minimally_conformant() -> Result<(), ParseDateTimeError> { + // All minimally conforming processors must support nonnegative year values less than 10000 + // (i.e., those expressible with four digits) in all datatypes which + // use the seven-property model defined in The Seven-property Model (§D.2.1) + // and have a non-absent value for year (i.e. dateTime, dateTimeStamp, date, gYearMonth, and gYear). + assert_eq!(GYear::from_str("9999")?.to_string(), "9999"); + assert_eq!( + DateTime::from_str("9999-12-31T23:59:59Z")?.to_string(), + "9999-12-31T23:59:59Z" + ); + + // All minimally conforming processors must support second values to milliseconds + // (i.e. those expressible with three fraction digits) in all datatypes + // which use the seven-property model defined in The Seven-property Model (§D.2.1) + // and have a non-absent value for second (i.e. dateTime, dateTimeStamp, and time). + assert_eq!( + Time::from_str("00:00:00.678Z")?.to_string(), + "00:00:00.678Z" + ); + assert_eq!( + DateTime::from_str("2000-01-01T00:00:00.678Z")?.to_string(), + "2000-01-01T00:00:00.678Z" + ); + Ok(()) + } +} diff --git a/ng-oxigraph/src/oxsdatatypes/decimal.rs b/ng-oxigraph/src/oxsdatatypes/decimal.rs new file mode 100644 index 0000000..1516fc6 --- /dev/null +++ b/ng-oxigraph/src/oxsdatatypes/decimal.rs @@ -0,0 +1,1099 @@ +use crate::oxsdatatypes::{Boolean, Double, Float, Integer, TooLargeForIntegerError}; +use serde::{Deserialize, Serialize}; +use std::fmt; +use std::fmt::Write; +use std::str::FromStr; + +const DECIMAL_PART_DIGITS: u32 = 18; +const DECIMAL_PART_POW: i128 = 1_000_000_000_000_000_000; +const DECIMAL_PART_POW_MINUS_ONE: i128 = 100_000_000_000_000_000; + +/// [XML Schema `decimal` datatype](https://www.w3.org/TR/xmlschema11-2/#decimal) +/// +/// It stores the decimal in a fix point encoding allowing nearly 18 digits before and 18 digits after ".". +/// +/// It stores the value in a [`i128`] integer after multiplying it by 10¹⁸. +#[derive( + Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash, Default, Serialize, Deserialize, +)] +pub struct Decimal { + value: i128, // value * 10^18 +} + +impl Decimal { + pub const MAX: Self = Self { value: i128::MAX }; + pub const MIN: Self = Self { value: i128::MIN }; + #[cfg(test)] + pub const STEP: Self = Self { value: 1 }; + + /// Constructs the decimal i / 10^n + #[inline] + pub const fn new(i: i128, n: u32) -> Result { + let Some(shift) = DECIMAL_PART_DIGITS.checked_sub(n) else { + return Err(TooLargeForDecimalError); + }; + let Some(value) = i.checked_mul(10_i128.pow(shift)) else { + return Err(TooLargeForDecimalError); + }; + Ok(Self { value }) + } + + pub(crate) const fn new_from_i128_unchecked(value: i128) -> Self { + Self { + value: value * DECIMAL_PART_POW, + } + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 16]) -> Self { + Self { + value: i128::from_be_bytes(bytes), + } + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 16] { + self.value.to_be_bytes() + } + + /// [op:numeric-add](https://www.w3.org/TR/xpath-functions-31/#func-numeric-add) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_add(self, rhs: impl Into) -> Option { + Some(Self { + value: self.value.checked_add(rhs.into().value)?, + }) + } + + /// [op:numeric-subtract](https://www.w3.org/TR/xpath-functions-31/#func-numeric-subtract) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_sub(self, rhs: impl Into) -> Option { + Some(Self { + value: self.value.checked_sub(rhs.into().value)?, + }) + } + + /// [op:numeric-multiply](https://www.w3.org/TR/xpath-functions-31/#func-numeric-multiply) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_mul(self, rhs: impl Into) -> Option { + // Idea: we shift right as much as possible to keep as much precision as possible + // Do the multiplication and do the required left shift + let mut left = self.value; + let mut shift_left = 0_u32; + if left != 0 { + while left % 10 == 0 { + left /= 10; + shift_left += 1; + } + } + + let mut right = rhs.into().value; + let mut shift_right = 0_u32; + if right != 0 { + while right % 10 == 0 { + right /= 10; + shift_right += 1; + } + } + + // We do multiplication + shift + let shift = (shift_left + shift_right).checked_sub(DECIMAL_PART_DIGITS)?; + Some(Self { + value: left + .checked_mul(right)? + .checked_mul(10_i128.checked_pow(shift)?)?, + }) + } + + /// [op:numeric-divide](https://www.w3.org/TR/xpath-functions-31/#func-numeric-divide) + /// + /// Returns `None` in case of division by 0 ([FOAR0001](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0001)) or overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_div(self, rhs: impl Into) -> Option { + // Idea: we shift the dividend left as much as possible to keep as much precision as possible + // And we shift right the divisor as much as possible + // Do the multiplication and do the required shift + let mut left = self.value; + let mut shift_left = 0_u32; + if left != 0 { + while let Some(r) = left.checked_mul(10) { + left = r; + shift_left += 1; + } + } + let mut right = rhs.into().value; + let mut shift_right = 0_u32; + if right != 0 { + while right % 10 == 0 { + right /= 10; + shift_right += 1; + } + } + + // We do division + shift + let shift = (shift_left + shift_right).checked_sub(DECIMAL_PART_DIGITS)?; + Some(Self { + value: left + .checked_div(right)? + .checked_div(10_i128.checked_pow(shift)?)?, + }) + } + + /// [op:numeric-mod](https://www.w3.org/TR/xpath-functions-31/#func-numeric-mod) + /// + /// Returns `None` in case of division by 0 ([FOAR0001](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0001)) or overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_rem(self, rhs: impl Into) -> Option { + Some(Self { + value: self.value.checked_rem(rhs.into().value)?, + }) + } + + /// Euclidean remainder + /// + /// Returns `None` in case of division by 0 ([FOAR0001](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0001)) or overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_rem_euclid(self, rhs: impl Into) -> Option { + Some(Self { + value: self.value.checked_rem_euclid(rhs.into().value)?, + }) + } + + /// [op:numeric-unary-minus](https://www.w3.org/TR/xpath-functions-31/#func-numeric-unary-minus) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_neg(self) -> Option { + Some(Self { + value: self.value.checked_neg()?, + }) + } + + /// [fn:abs](https://www.w3.org/TR/xpath-functions-31/#func-abs) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_abs(self) -> Option { + Some(Self { + value: self.value.checked_abs()?, + }) + } + + /// [fn:round](https://www.w3.org/TR/xpath-functions-31/#func-round) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_round(self) -> Option { + let value = self.value / DECIMAL_PART_POW_MINUS_ONE; + Some(Self { + value: if value >= 0 { + value / 10 + i128::from(value % 10 >= 5) + } else { + value / 10 - i128::from(-value % 10 > 5) + } + .checked_mul(DECIMAL_PART_POW)?, + }) + } + + /// [fn:ceiling](https://www.w3.org/TR/xpath-functions-31/#func-ceiling) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_ceil(self) -> Option { + Some(Self { + value: if self.value > 0 && self.value % DECIMAL_PART_POW != 0 { + self.value / DECIMAL_PART_POW + 1 + } else { + self.value / DECIMAL_PART_POW + } + .checked_mul(DECIMAL_PART_POW)?, + }) + } + + /// [fn:floor](https://www.w3.org/TR/xpath-functions-31/#func-floor) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_floor(self) -> Option { + Some(Self { + value: if self.value >= 0 || self.value % DECIMAL_PART_POW == 0 { + self.value / DECIMAL_PART_POW + } else { + self.value / DECIMAL_PART_POW - 1 + } + .checked_mul(DECIMAL_PART_POW)?, + }) + } + + #[inline] + #[must_use] + pub const fn is_negative(self) -> bool { + self.value < 0 + } + + #[inline] + #[must_use] + pub const fn is_positive(self) -> bool { + self.value > 0 + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self == other + } + + #[inline] + #[must_use] + pub(super) const fn as_i128(self) -> i128 { + self.value / DECIMAL_PART_POW + } +} + +impl From for Decimal { + #[inline] + fn from(value: bool) -> Self { + Self { + value: i128::from(value) * DECIMAL_PART_POW, + } + } +} + +impl From for Decimal { + #[inline] + fn from(value: i8) -> Self { + Self { + value: i128::from(value) * DECIMAL_PART_POW, + } + } +} + +impl From for Decimal { + #[inline] + fn from(value: i16) -> Self { + Self { + value: i128::from(value) * DECIMAL_PART_POW, + } + } +} + +impl From for Decimal { + #[inline] + fn from(value: i32) -> Self { + Self { + value: i128::from(value) * DECIMAL_PART_POW, + } + } +} + +impl From for Decimal { + #[inline] + fn from(value: i64) -> Self { + Self { + value: i128::from(value) * DECIMAL_PART_POW, + } + } +} + +impl From for Decimal { + #[inline] + fn from(value: u8) -> Self { + Self { + value: i128::from(value) * DECIMAL_PART_POW, + } + } +} + +impl From for Decimal { + #[inline] + fn from(value: u16) -> Self { + Self { + value: i128::from(value) * DECIMAL_PART_POW, + } + } +} + +impl From for Decimal { + #[inline] + fn from(value: u32) -> Self { + Self { + value: i128::from(value) * DECIMAL_PART_POW, + } + } +} + +impl From for Decimal { + #[inline] + fn from(value: u64) -> Self { + Self { + value: i128::from(value) * DECIMAL_PART_POW, + } + } +} + +impl From for Decimal { + #[inline] + fn from(value: Integer) -> Self { + i64::from(value).into() + } +} + +impl TryFrom for Decimal { + type Error = TooLargeForDecimalError; + + #[inline] + fn try_from(value: i128) -> Result { + Ok(Self { + value: value + .checked_mul(DECIMAL_PART_POW) + .ok_or(TooLargeForDecimalError)?, + }) + } +} + +impl TryFrom for Decimal { + type Error = TooLargeForDecimalError; + + #[inline] + fn try_from(value: u128) -> Result { + Ok(Self { + value: i128::try_from(value) + .map_err(|_| TooLargeForDecimalError)? + .checked_mul(DECIMAL_PART_POW) + .ok_or(TooLargeForDecimalError)?, + }) + } +} + +impl From for Decimal { + #[inline] + fn from(value: Boolean) -> Self { + bool::from(value).into() + } +} + +impl TryFrom for Decimal { + type Error = TooLargeForDecimalError; + + #[inline] + fn try_from(value: Float) -> Result { + Double::from(value).try_into() + } +} + +impl TryFrom for Decimal { + type Error = TooLargeForDecimalError; + + #[inline] + #[allow(clippy::cast_precision_loss, clippy::cast_possible_truncation)] + fn try_from(value: Double) -> Result { + let shifted = f64::from(value) * (DECIMAL_PART_POW as f64); + if (i128::MIN as f64) <= shifted && shifted <= (i128::MAX as f64) { + Ok(Self { + value: shifted as i128, + }) + } else { + Err(TooLargeForDecimalError) + } + } +} + +impl From for Float { + #[inline] + #[allow(clippy::cast_precision_loss)] + fn from(value: Decimal) -> Self { + Double::from(value).into() + } +} + +impl From for Double { + #[inline] + #[allow(clippy::cast_precision_loss)] + fn from(value: Decimal) -> Self { + let mut value = value.value; + let mut shift = DECIMAL_PART_POW; + + // Hack to improve precision + if value != 0 { + while shift != 1 && value % 10 == 0 { + value /= 10; + shift /= 10; + } + } + + ((value as f64) / (shift as f64)).into() + } +} + +impl TryFrom for Integer { + type Error = TooLargeForIntegerError; + + #[inline] + fn try_from(value: Decimal) -> Result { + Ok(i64::try_from( + value + .value + .checked_div(DECIMAL_PART_POW) + .ok_or(TooLargeForIntegerError)?, + ) + .map_err(|_| TooLargeForIntegerError)? + .into()) + } +} + +impl FromStr for Decimal { + type Err = ParseDecimalError; + + /// Parses decimals lexical mapping + fn from_str(input: &str) -> Result { + // (\+|-)?([0-9]+(\.[0-9]*)?|\.[0-9]+) + let input = input.as_bytes(); + if input.is_empty() { + return Err(PARSE_UNEXPECTED_END); + } + + let (sign, mut input) = match input.first() { + Some(b'+') => (1_i128, &input[1..]), + Some(b'-') => (-1_i128, &input[1..]), + _ => (1, input), + }; + + let mut value = 0_i128; + let with_before_dot = input.first().map_or(false, u8::is_ascii_digit); + while let Some(c) = input.first() { + if c.is_ascii_digit() { + value = value + .checked_mul(10) + .ok_or(PARSE_OVERFLOW)? + .checked_add(sign * i128::from(*c - b'0')) + .ok_or(PARSE_OVERFLOW)?; + input = &input[1..]; + } else { + break; + } + } + + let mut exp = DECIMAL_PART_POW; + if let Some(c) = input.first() { + if *c != b'.' { + return Err(PARSE_UNEXPECTED_CHAR); + } + input = &input[1..]; + if input.is_empty() && !with_before_dot { + // We only have a dot + return Err(PARSE_UNEXPECTED_END); + } + while input.last() == Some(&b'0') { + // Hack to avoid underflows + input = &input[..input.len() - 1]; + } + while let Some(c) = input.first() { + if c.is_ascii_digit() { + exp /= 10; + value = value + .checked_mul(10) + .ok_or(PARSE_OVERFLOW)? + .checked_add(sign * i128::from(*c - b'0')) + .ok_or(PARSE_OVERFLOW)?; + input = &input[1..]; + } else { + return Err(PARSE_UNEXPECTED_CHAR); + } + } + if exp == 0 { + // Underflow + return Err(PARSE_UNDERFLOW); + } + } else if !with_before_dot { + // It's empty + return Err(PARSE_UNEXPECTED_END); + } + + Ok(Self { + value: value.checked_mul(exp).ok_or(PARSE_OVERFLOW)?, + }) + } +} + +impl fmt::Display for Decimal { + /// Formats the decimal following its canonical representation. + #[allow(clippy::cast_possible_truncation)] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.value == 0 { + return if let Some(width) = f.width() { + for _ in 0..width { + f.write_char('0')?; + } + Ok(()) + } else { + f.write_char('0') + }; + } + + let mut value = self.value; + if self.value.is_negative() { + f.write_char('-')?; + } + + let mut digits = [b'0'; 40]; + let mut i = 0; + while value != 0 { + digits[i] = b'0' + ((value % 10).unsigned_abs() as u8); + value /= 10; + i += 1; + } + + let last_non_zero = i - 1; + let first_non_zero = digits + .iter() + .copied() + .enumerate() + .find_map(|(i, v)| if v == b'0' { None } else { Some(i) }) + .unwrap_or(40); + + let decimal_part_digits = usize::try_from(DECIMAL_PART_DIGITS).map_err(|_| fmt::Error)?; + if last_non_zero >= decimal_part_digits { + let end = if let Some(mut width) = f.width() { + if self.value.is_negative() { + width -= 1; + } + if last_non_zero - decimal_part_digits + 1 < width { + decimal_part_digits + width + } else { + last_non_zero + 1 + } + } else { + last_non_zero + 1 + }; + for c in digits[decimal_part_digits..end].iter().rev() { + f.write_char(char::from(*c))?; + } + } else { + f.write_char('0')? + } + if decimal_part_digits > first_non_zero { + f.write_char('.')?; + let start = if let Some(precision) = f.precision() { + if decimal_part_digits - first_non_zero > precision { + decimal_part_digits - precision + } else { + first_non_zero + } + } else { + first_non_zero + }; + for c in digits[start..decimal_part_digits].iter().rev() { + f.write_char(char::from(*c))?; + } + } + + Ok(()) + } +} + +/// An error when parsing a [`Decimal`]. +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct ParseDecimalError(#[from] DecimalParseErrorKind); + +#[derive(Debug, Clone, thiserror::Error)] +enum DecimalParseErrorKind { + #[error("Value overflow")] + Overflow, + #[error("Value underflow")] + Underflow, + #[error("Unexpected character")] + UnexpectedChar, + #[error("Unexpected end of string")] + UnexpectedEnd, +} + +const PARSE_OVERFLOW: ParseDecimalError = ParseDecimalError(DecimalParseErrorKind::Overflow); +const PARSE_UNDERFLOW: ParseDecimalError = ParseDecimalError(DecimalParseErrorKind::Underflow); +const PARSE_UNEXPECTED_CHAR: ParseDecimalError = + ParseDecimalError(DecimalParseErrorKind::UnexpectedChar); +const PARSE_UNEXPECTED_END: ParseDecimalError = + ParseDecimalError(DecimalParseErrorKind::UnexpectedEnd); + +impl From for ParseDecimalError { + fn from(_: TooLargeForDecimalError) -> Self { + Self(DecimalParseErrorKind::Overflow) + } +} + +/// The input is too large to fit into a [`Decimal`]. +/// +/// Matches XPath [`FOCA0001` error](https://www.w3.org/TR/xpath-functions-31/#ERRFOCA0001). +#[derive(Debug, Clone, Copy, thiserror::Error)] +#[error("Value too large for xsd:decimal internal representation")] +pub struct TooLargeForDecimalError; + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + + #[test] + fn new() -> Result<(), ParseDecimalError> { + assert_eq!(Decimal::new(1, 0)?.to_string(), "1"); + assert_eq!(Decimal::new(1, 1)?.to_string(), "0.1"); + assert_eq!(Decimal::new(10, 0)?.to_string(), "10"); + assert_eq!(Decimal::new(10, 1)?.to_string(), "1"); + assert_eq!(Decimal::new(10, 2)?.to_string(), "0.1"); + Ok(()) + } + + #[test] + fn from_str() -> Result<(), ParseDecimalError> { + Decimal::from_str("").unwrap_err(); + Decimal::from_str("+").unwrap_err(); + Decimal::from_str("-").unwrap_err(); + Decimal::from_str(".").unwrap_err(); + Decimal::from_str("+.").unwrap_err(); + Decimal::from_str("-.").unwrap_err(); + Decimal::from_str("a").unwrap_err(); + Decimal::from_str(".a").unwrap_err(); + assert_eq!(Decimal::from_str("210")?.to_string(), "210"); + assert_eq!(Decimal::from_str("1000")?.to_string(), "1000"); + assert_eq!(Decimal::from_str("-1.23")?.to_string(), "-1.23"); + assert_eq!( + Decimal::from_str("12678967.543233")?.to_string(), + "12678967.543233" + ); + assert_eq!(Decimal::from_str("+100000.00")?.to_string(), "100000"); + assert_eq!(Decimal::from_str("0.1220")?.to_string(), "0.122"); + assert_eq!(Decimal::from_str(".12200")?.to_string(), "0.122"); + assert_eq!(Decimal::from_str("1.")?.to_string(), "1"); + assert_eq!(Decimal::from_str("1.0")?.to_string(), "1"); + assert_eq!(Decimal::from_str("01.0")?.to_string(), "1"); + assert_eq!(Decimal::from_str("0")?.to_string(), "0"); + assert_eq!(Decimal::from_str("-0")?.to_string(), "0"); + assert_eq!(Decimal::from_str(&Decimal::MAX.to_string())?, Decimal::MAX); + assert_eq!(Decimal::from_str(&Decimal::MIN.to_string())?, Decimal::MIN); + Decimal::from_str("0.0000000000000000001").unwrap_err(); + Decimal::from_str("1000000000000000000000").unwrap_err(); + assert_eq!( + Decimal::from_str("0.100000000000000000000000000").unwrap(), + Decimal::from_str("0.1").unwrap() + ); + Ok(()) + } + + #[test] + fn format() { + assert_eq!(format!("{}", Decimal::from(0)), "0"); + assert_eq!(format!("{}", Decimal::from(1)), "1"); + assert_eq!(format!("{}", Decimal::from(10)), "10"); + assert_eq!(format!("{}", Decimal::from(100)), "100"); + assert_eq!(format!("{}", Decimal::from(-1)), "-1"); + assert_eq!(format!("{}", Decimal::from(-10)), "-10"); + + assert_eq!(format!("{:02}", Decimal::from(0)), "00"); + assert_eq!(format!("{:02}", Decimal::from(1)), "01"); + assert_eq!(format!("{:02}", Decimal::from(10)), "10"); + assert_eq!(format!("{:02}", Decimal::from(100)), "100"); + assert_eq!(format!("{:02}", Decimal::from(-1)), "-1"); + assert_eq!(format!("{:02}", Decimal::from(-10)), "-10"); + } + + #[test] + fn add() { + assert!(Decimal::MIN.checked_add(Decimal::STEP).is_some()); + assert!(Decimal::MAX.checked_add(Decimal::STEP).is_none()); + assert_eq!( + Decimal::MAX.checked_add(Decimal::MIN), + Decimal::STEP.checked_neg() + ); + } + + #[test] + fn sub() { + assert!(Decimal::MIN.checked_sub(Decimal::STEP).is_none()); + assert!(Decimal::MAX.checked_sub(Decimal::STEP).is_some()); + } + + #[test] + fn mul() -> Result<(), ParseDecimalError> { + assert_eq!(Decimal::from(1).checked_mul(-1), Some(Decimal::from(-1))); + assert_eq!( + Decimal::from(1000).checked_mul(1000), + Some(Decimal::from(1_000_000)) + ); + assert_eq!( + Decimal::from_str("0.1")?.checked_mul(Decimal::from_str("0.01")?), + Some(Decimal::from_str("0.001")?) + ); + assert_eq!(Decimal::from(0).checked_mul(1), Some(Decimal::from(0))); + assert_eq!(Decimal::from(1).checked_mul(0), Some(Decimal::from(0))); + assert_eq!(Decimal::MAX.checked_mul(1), Some(Decimal::MAX)); + assert_eq!(Decimal::MIN.checked_mul(1), Some(Decimal::MIN)); + assert_eq!( + Decimal::from(1).checked_mul(Decimal::MAX), + Some(Decimal::MAX) + ); + assert_eq!( + Decimal::from(1).checked_mul(Decimal::MIN), + Some(Decimal::MIN) + ); + assert_eq!( + Decimal::MAX.checked_mul(-1), + Some(Decimal::MIN.checked_add(Decimal::STEP).unwrap()) + ); + assert_eq!(Decimal::MIN.checked_mul(-1), None); + assert_eq!( + Decimal::MIN + .checked_add(Decimal::STEP) + .unwrap() + .checked_mul(-1), + Some(Decimal::MAX) + ); + Ok(()) + } + + #[test] + fn div() -> Result<(), ParseDecimalError> { + assert_eq!(Decimal::from(1).checked_div(1), Some(Decimal::from(1))); + assert_eq!(Decimal::from(100).checked_div(10), Some(Decimal::from(10))); + assert_eq!( + Decimal::from(10).checked_div(100), + Some(Decimal::from_str("0.1")?) + ); + assert_eq!(Decimal::from(1).checked_div(0), None); + assert_eq!(Decimal::from(0).checked_div(1), Some(Decimal::from(0))); + assert_eq!(Decimal::MAX.checked_div(1), Some(Decimal::MAX)); + assert_eq!(Decimal::MIN.checked_div(1), Some(Decimal::MIN)); + assert_eq!( + Decimal::MAX.checked_div(-1), + Some(Decimal::MIN.checked_add(Decimal::STEP).unwrap()) + ); + assert_eq!(Decimal::MIN.checked_div(-1), None); + assert_eq!( + Decimal::MIN + .checked_add(Decimal::STEP) + .unwrap() + .checked_div(-1), + Some(Decimal::MAX) + ); + Ok(()) + } + + #[test] + fn rem() -> Result<(), ParseDecimalError> { + assert_eq!(Decimal::from(10).checked_rem(3), Some(Decimal::from(1))); + assert_eq!(Decimal::from(6).checked_rem(-2), Some(Decimal::from(0))); + assert_eq!( + Decimal::from_str("4.5")?.checked_rem(Decimal::from_str("1.2")?), + Some(Decimal::from_str("0.9")?) + ); + assert_eq!(Decimal::from(1).checked_rem(0), None); + assert_eq!( + Decimal::MAX.checked_rem(1), + Some(Decimal::from_str("0.687303715884105727")?) + ); + assert_eq!( + Decimal::MIN.checked_rem(1), + Some(Decimal::from_str("-0.687303715884105728")?) + ); + assert_eq!( + Decimal::MAX.checked_rem(Decimal::STEP), + Some(Decimal::default()) + ); + assert_eq!( + Decimal::MIN.checked_rem(Decimal::STEP), + Some(Decimal::default()) + ); + assert_eq!( + Decimal::MAX.checked_rem(Decimal::MAX), + Some(Decimal::default()) + ); + assert_eq!( + Decimal::MIN.checked_rem(Decimal::MIN), + Some(Decimal::default()) + ); + Ok(()) + } + + #[test] + fn round() -> Result<(), ParseDecimalError> { + assert_eq!(Decimal::from(10).checked_round(), Some(Decimal::from(10))); + assert_eq!(Decimal::from(-10).checked_round(), Some(Decimal::from(-10))); + assert_eq!( + Decimal::from(i64::MIN).checked_round(), + Some(Decimal::from(i64::MIN)) + ); + assert_eq!( + Decimal::from(i64::MAX).checked_round(), + Some(Decimal::from(i64::MAX)) + ); + assert_eq!( + Decimal::from_str("2.5")?.checked_round(), + Some(Decimal::from(3)) + ); + assert_eq!( + Decimal::from_str("2.4999")?.checked_round(), + Some(Decimal::from(2)) + ); + assert_eq!( + Decimal::from_str("-2.5")?.checked_round(), + Some(Decimal::from(-2)) + ); + assert_eq!(Decimal::MAX.checked_round(), None); + assert_eq!( + Decimal::MAX + .checked_sub(Decimal::from_str("0.5")?) + .unwrap() + .checked_round(), + Some(Decimal::from_str("170141183460469231731")?) + ); + assert_eq!(Decimal::MIN.checked_round(), None); + assert_eq!( + Decimal::MIN + .checked_add(Decimal::from_str("0.5")?) + .unwrap() + .checked_round(), + Some(Decimal::from_str("-170141183460469231731")?) + ); + Ok(()) + } + + #[test] + fn ceil() -> Result<(), ParseDecimalError> { + assert_eq!(Decimal::from(10).checked_ceil(), Some(Decimal::from(10))); + assert_eq!(Decimal::from(-10).checked_ceil(), Some(Decimal::from(-10))); + assert_eq!( + Decimal::from_str("10.5")?.checked_ceil(), + Some(Decimal::from(11)) + ); + assert_eq!( + Decimal::from_str("-10.5")?.checked_ceil(), + Some(Decimal::from(-10)) + ); + assert_eq!( + Decimal::from(i64::MIN).checked_ceil(), + Some(Decimal::from(i64::MIN)) + ); + assert_eq!( + Decimal::from(i64::MAX).checked_ceil(), + Some(Decimal::from(i64::MAX)) + ); + assert_eq!(Decimal::MAX.checked_ceil(), None); + assert_eq!( + Decimal::MAX + .checked_sub(Decimal::from(1)) + .unwrap() + .checked_ceil(), + Some(Decimal::from_str("170141183460469231731")?) + ); + assert_eq!( + Decimal::MIN.checked_ceil(), + Some(Decimal::from_str("-170141183460469231731")?) + ); + Ok(()) + } + + #[test] + fn floor() -> Result<(), ParseDecimalError> { + assert_eq!(Decimal::from(10).checked_floor(), Some(Decimal::from(10))); + assert_eq!(Decimal::from(-10).checked_floor(), Some(Decimal::from(-10))); + assert_eq!( + Decimal::from_str("10.5")?.checked_floor(), + Some(Decimal::from(10)) + ); + assert_eq!( + Decimal::from_str("-10.5")?.checked_floor(), + Some(Decimal::from(-11)) + ); + assert_eq!( + Decimal::from(i64::MIN).checked_floor(), + Some(Decimal::from(i64::MIN)) + ); + assert_eq!( + Decimal::from(i64::MAX).checked_floor(), + Some(Decimal::from(i64::MAX)) + ); + assert_eq!( + Decimal::MAX.checked_floor(), + Some(Decimal::from_str("170141183460469231731")?) + ); + assert_eq!(Decimal::MIN.checked_floor(), None); + assert_eq!( + Decimal::MIN + .checked_add(Decimal::from_str("1")?) + .unwrap() + .checked_floor(), + Some(Decimal::from_str("-170141183460469231731")?) + ); + Ok(()) + } + + #[test] + fn to_be_bytes() -> Result<(), ParseDecimalError> { + assert_eq!( + Decimal::from_be_bytes(Decimal::MIN.to_be_bytes()), + Decimal::MIN + ); + assert_eq!( + Decimal::from_be_bytes(Decimal::MAX.to_be_bytes()), + Decimal::MAX + ); + assert_eq!( + Decimal::from_be_bytes(Decimal::from(i64::MIN).to_be_bytes()), + Decimal::from(i64::MIN) + ); + assert_eq!( + Decimal::from_be_bytes(Decimal::from(i64::MAX).to_be_bytes()), + Decimal::from(i64::MAX) + ); + assert_eq!( + Decimal::from_be_bytes(Decimal::from(0).to_be_bytes()), + Decimal::from(0) + ); + assert_eq!( + Decimal::from_be_bytes(Decimal::from(0).to_be_bytes()), + Decimal::from(0) + ); + assert_eq!( + Decimal::from_be_bytes(Decimal::from_str("0.01")?.to_be_bytes()), + Decimal::from_str("0.01")? + ); + Ok(()) + } + + #[test] + fn from_bool() { + assert_eq!(Decimal::from(false), Decimal::from(0_u8)); + assert_eq!(Decimal::from(true), Decimal::from(1_u8)); + } + + #[test] + fn from_float() -> Result<(), ParseDecimalError> { + assert_eq!( + Decimal::try_from(Float::from(0.)).ok(), + Some(Decimal::from(0)) + ); + assert_eq!( + Decimal::try_from(Float::from(-0.)).ok(), + Some(Decimal::from(0)) + ); + assert_eq!( + Decimal::try_from(Float::from(-123.5)).ok(), + Some(Decimal::from_str("-123.5")?) + ); + Decimal::try_from(Float::from(f32::NAN)).unwrap_err(); + Decimal::try_from(Float::from(f32::INFINITY)).unwrap_err(); + Decimal::try_from(Float::from(f32::NEG_INFINITY)).unwrap_err(); + Decimal::try_from(Float::from(f32::MIN)).unwrap_err(); + Decimal::try_from(Float::from(f32::MAX)).unwrap_err(); + assert!( + Decimal::try_from(Float::from(1_672_507_300_000.)) + .unwrap() + .checked_sub(Decimal::from(1_672_507_293_696_i64)) + .unwrap() + .checked_abs() + .unwrap() + < Decimal::from(1) + ); + Ok(()) + } + + #[test] + fn from_double() -> Result<(), ParseDecimalError> { + assert_eq!( + Decimal::try_from(Double::from(0.)).ok(), + Some(Decimal::from(0)) + ); + assert_eq!( + Decimal::try_from(Double::from(-0.)).ok(), + Some(Decimal::from(0)) + ); + assert_eq!( + Decimal::try_from(Double::from(-123.1)).ok(), + Some(Decimal::from_str("-123.1")?) + ); + assert!( + Decimal::try_from(Double::from(1_672_507_302_466.)) + .unwrap() + .checked_sub(Decimal::from(1_672_507_302_466_i64)) + .unwrap() + .checked_abs() + .unwrap() + < Decimal::from(1) + ); + Decimal::try_from(Double::from(f64::NAN)).unwrap_err(); + Decimal::try_from(Double::from(f64::INFINITY)).unwrap_err(); + Decimal::try_from(Double::from(f64::NEG_INFINITY)).unwrap_err(); + Decimal::try_from(Double::from(f64::MIN)).unwrap_err(); + Decimal::try_from(Double::from(f64::MAX)).unwrap_err(); + Ok(()) + } + + #[test] + fn to_float() -> Result<(), ParseDecimalError> { + assert_eq!(Float::from(Decimal::from(0)), Float::from(0.)); + assert_eq!(Float::from(Decimal::from(1)), Float::from(1.)); + assert_eq!(Float::from(Decimal::from(10)), Float::from(10.)); + assert_eq!(Float::from(Decimal::from_str("0.1")?), Float::from(0.1)); + assert!((Float::from(Decimal::MAX) - Float::from(1.701_412e20)).abs() < Float::from(1.)); + assert!((Float::from(Decimal::MIN) - Float::from(-1.701_412e20)).abs() < Float::from(1.)); + Ok(()) + } + + #[test] + fn to_double() -> Result<(), ParseDecimalError> { + assert_eq!(Double::from(Decimal::from(0)), Double::from(0.)); + assert_eq!(Double::from(Decimal::from(1)), Double::from(1.)); + assert_eq!(Double::from(Decimal::from(10)), Double::from(10.)); + assert_eq!(Double::from(Decimal::from_str("0.1")?), Double::from(0.1)); + assert!( + (Double::from(Decimal::MAX) - Double::from(1.701_411_834_604_692_4e20)).abs() + < Double::from(1.) + ); + assert!( + (Double::from(Decimal::MIN) - Double::from(-1.701_411_834_604_692_4e20)).abs() + < Double::from(1.) + ); + Ok(()) + } + + #[test] + fn minimally_conformant() -> Result<(), ParseDecimalError> { + // All minimally conforming processors must support decimal values whose absolute value can be expressed as i / 10^k, + // where i and k are nonnegative integers such that i < 10^16 and k ≤ 16 (i.e., those expressible with sixteen total digits). + assert_eq!( + Decimal::from_str("1234567890123456")?.to_string(), + "1234567890123456" + ); + assert_eq!( + Decimal::from_str("-1234567890123456")?.to_string(), + "-1234567890123456" + ); + assert_eq!( + Decimal::from_str("0.1234567890123456")?.to_string(), + "0.1234567890123456" + ); + assert_eq!( + Decimal::from_str("-0.1234567890123456")?.to_string(), + "-0.1234567890123456" + ); + Ok(()) + } +} diff --git a/ng-oxigraph/src/oxsdatatypes/double.rs b/ng-oxigraph/src/oxsdatatypes/double.rs new file mode 100644 index 0000000..48e0022 --- /dev/null +++ b/ng-oxigraph/src/oxsdatatypes/double.rs @@ -0,0 +1,326 @@ +use crate::oxsdatatypes::{Boolean, Float, Integer}; +use serde::{Deserialize, Serialize}; +use std::cmp::Ordering; +use std::fmt; +use std::num::ParseFloatError; +use std::ops::{Add, Div, Mul, Neg, Sub}; +use std::str::FromStr; + +/// [XML Schema `double` datatype](https://www.w3.org/TR/xmlschema11-2/#double) +/// +/// Uses internally a [`f64`]. +/// +///
Serialization does not follow the canonical mapping.
+#[derive(Debug, Clone, Copy, Default, PartialEq, Serialize, Deserialize)] +#[repr(transparent)] +pub struct Double { + value: f64, +} + +impl Double { + pub const INFINITY: Self = Self { + value: f64::INFINITY, + }; + pub const MAX: Self = Self { value: f64::MAX }; + pub const MIN: Self = Self { value: f64::MIN }; + pub const NAN: Self = Self { value: f64::NAN }; + pub const NEG_INFINITY: Self = Self { + value: f64::NEG_INFINITY, + }; + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 8]) -> Self { + Self { + value: f64::from_be_bytes(bytes), + } + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 8] { + self.value.to_be_bytes() + } + + /// [fn:abs](https://www.w3.org/TR/xpath-functions-31/#func-abs) + #[inline] + #[must_use] + pub fn abs(self) -> Self { + self.value.abs().into() + } + + /// [fn:ceiling](https://www.w3.org/TR/xpath-functions-31/#func-ceiling) + #[inline] + #[must_use] + pub fn ceil(self) -> Self { + self.value.ceil().into() + } + + /// [fn:floor](https://www.w3.org/TR/xpath-functions-31/#func-floor) + #[inline] + #[must_use] + pub fn floor(self) -> Self { + self.value.floor().into() + } + + /// [fn:round](https://www.w3.org/TR/xpath-functions-31/#func-round) + #[inline] + #[must_use] + pub fn round(self) -> Self { + self.value.round().into() + } + + #[inline] + #[must_use] + pub fn is_nan(self) -> bool { + self.value.is_nan() + } + + #[inline] + #[must_use] + pub fn is_finite(self) -> bool { + self.value.is_finite() + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.value.to_bits() == other.value.to_bits() + } +} + +impl From for f64 { + #[inline] + fn from(value: Double) -> Self { + value.value + } +} + +impl From for Double { + #[inline] + fn from(value: f64) -> Self { + Self { value } + } +} + +impl From for Double { + #[inline] + fn from(value: i8) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Double { + #[inline] + fn from(value: i16) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Double { + #[inline] + fn from(value: i32) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Double { + #[inline] + fn from(value: u8) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Double { + #[inline] + fn from(value: u16) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Double { + #[inline] + fn from(value: u32) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Double { + #[inline] + fn from(value: Float) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Double { + #[inline] + fn from(value: Boolean) -> Self { + f64::from(bool::from(value)).into() + } +} + +impl From for Double { + #[inline] + #[allow(clippy::cast_precision_loss)] + fn from(value: Integer) -> Self { + (i64::from(value) as f64).into() + } +} + +impl FromStr for Double { + type Err = ParseFloatError; + + #[inline] + fn from_str(input: &str) -> Result { + Ok(f64::from_str(input)?.into()) + } +} + +impl fmt::Display for Double { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.value == f64::INFINITY { + f.write_str("INF") + } else if self.value == f64::NEG_INFINITY { + f.write_str("-INF") + } else { + self.value.fmt(f) + } + } +} + +impl PartialOrd for Double { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + self.value.partial_cmp(&other.value) + } +} + +impl Neg for Double { + type Output = Self; + + #[inline] + fn neg(self) -> Self { + (-self.value).into() + } +} + +impl Add for Double { + type Output = Self; + + #[inline] + fn add(self, rhs: Self) -> Self { + (self.value + rhs.value).into() + } +} + +impl Sub for Double { + type Output = Self; + + #[inline] + fn sub(self, rhs: Self) -> Self { + (self.value - rhs.value).into() + } +} + +impl Mul for Double { + type Output = Self; + + #[inline] + fn mul(self, rhs: Self) -> Self { + (self.value * rhs.value).into() + } +} + +impl Div for Double { + type Output = Self; + + #[inline] + fn div(self, rhs: Self) -> Self { + (self.value / rhs.value).into() + } +} + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + + #[test] + fn eq() { + assert_eq!(Double::from(0_f64), Double::from(0_f64)); + assert_ne!(Double::NAN, Double::NAN); + assert_eq!(Double::from(-0.), Double::from(0.)); + } + + #[test] + fn cmp() { + assert_eq!( + Double::from(0.).partial_cmp(&Double::from(0.)), + Some(Ordering::Equal) + ); + assert_eq!( + Double::INFINITY.partial_cmp(&Double::MAX), + Some(Ordering::Greater) + ); + assert_eq!( + Double::NEG_INFINITY.partial_cmp(&Double::MIN), + Some(Ordering::Less) + ); + assert_eq!(Double::NAN.partial_cmp(&Double::from(0.)), None); + assert_eq!(Double::NAN.partial_cmp(&Double::NAN), None); + assert_eq!( + Double::from(0.).partial_cmp(&Double::from(-0.)), + Some(Ordering::Equal) + ); + } + + #[test] + fn is_identical_with() { + assert!(Double::from(0.).is_identical_with(Double::from(0.))); + assert!(Double::NAN.is_identical_with(Double::NAN)); + assert!(!Double::from(-0.).is_identical_with(Double::from(0.))); + } + + #[test] + fn from_str() -> Result<(), ParseFloatError> { + assert_eq!(Double::from_str("NaN")?.to_string(), "NaN"); + assert_eq!(Double::from_str("INF")?.to_string(), "INF"); + assert_eq!(Double::from_str("+INF")?.to_string(), "INF"); + assert_eq!(Double::from_str("-INF")?.to_string(), "-INF"); + assert_eq!(Double::from_str("0.0E0")?.to_string(), "0"); + assert_eq!(Double::from_str("-0.0E0")?.to_string(), "-0"); + assert_eq!(Double::from_str("0.1e1")?.to_string(), "1"); + assert_eq!(Double::from_str("-0.1e1")?.to_string(), "-1"); + assert_eq!(Double::from_str("1.e1")?.to_string(), "10"); + assert_eq!(Double::from_str("-1.e1")?.to_string(), "-10"); + assert_eq!(Double::from_str("1")?.to_string(), "1"); + assert_eq!(Double::from_str("-1")?.to_string(), "-1"); + assert_eq!(Double::from_str("1.")?.to_string(), "1"); + assert_eq!(Double::from_str("-1.")?.to_string(), "-1"); + assert_eq!( + Double::from_str(&f64::MIN.to_string()).unwrap(), + Double::MIN + ); + assert_eq!( + Double::from_str(&f64::MAX.to_string()).unwrap(), + Double::MAX + ); + Ok(()) + } +} diff --git a/ng-oxigraph/src/oxsdatatypes/duration.rs b/ng-oxigraph/src/oxsdatatypes/duration.rs new file mode 100644 index 0000000..d8f5eb0 --- /dev/null +++ b/ng-oxigraph/src/oxsdatatypes/duration.rs @@ -0,0 +1,1249 @@ +use crate::oxsdatatypes::{DateTime, Decimal}; +use serde::{Deserialize, Serialize}; +use std::cmp::Ordering; +use std::fmt; +use std::str::FromStr; +use std::time::Duration as StdDuration; + +/// [XML Schema `duration` datatype](https://www.w3.org/TR/xmlschema11-2/#duration) +/// +/// It stores the duration using a pair of a [`YearMonthDuration`] and a [`DayTimeDuration`]. +#[derive(Eq, PartialEq, Debug, Clone, Copy, Hash, Default, Serialize, Deserialize)] +pub struct Duration { + year_month: YearMonthDuration, + day_time: DayTimeDuration, +} + +impl Duration { + pub const MAX: Self = Self { + year_month: YearMonthDuration::MAX, + day_time: DayTimeDuration::MAX, + }; + pub const MIN: Self = Self { + year_month: YearMonthDuration::MIN, + day_time: DayTimeDuration::MIN, + }; + + #[inline] + pub fn new( + months: impl Into, + seconds: impl Into, + ) -> Result { + Self::construct( + YearMonthDuration::new(months), + DayTimeDuration::new(seconds), + ) + } + + #[inline] + fn construct( + year_month: YearMonthDuration, + day_time: DayTimeDuration, + ) -> Result { + if (year_month > YearMonthDuration::default() && day_time < DayTimeDuration::default()) + || (year_month < YearMonthDuration::default() && day_time > DayTimeDuration::default()) + { + return Err(OppositeSignInDurationComponentsError); + } + Ok(Self { + year_month, + day_time, + }) + } + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 24]) -> Self { + Self { + year_month: YearMonthDuration::from_be_bytes(bytes[0..8].try_into().unwrap()), + day_time: DayTimeDuration::from_be_bytes(bytes[8..24].try_into().unwrap()), + } + } + + /// [fn:years-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-years-from-duration) + #[inline] + #[must_use] + pub fn years(self) -> i64 { + self.year_month.years() + } + + /// [fn:months-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-months-from-duration) + #[inline] + #[must_use] + pub fn months(self) -> i64 { + self.year_month.months() + } + + /// [fn:days-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-days-from-duration) + #[inline] + #[must_use] + pub fn days(self) -> i64 { + self.day_time.days() + } + + /// [fn:hours-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-hours-from-duration) + #[inline] + #[must_use] + pub fn hours(self) -> i64 { + self.day_time.hours() + } + + /// [fn:minutes-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-minutes-from-duration) + #[inline] + #[must_use] + pub fn minutes(self) -> i64 { + self.day_time.minutes() + } + + /// [fn:seconds-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-seconds-from-duration) + #[inline] + #[must_use] + pub fn seconds(self) -> Decimal { + self.day_time.seconds() + } + + #[inline] + #[must_use] + pub(crate) const fn all_months(self) -> i64 { + self.year_month.all_months() + } + + #[inline] + #[must_use] + pub(crate) const fn all_seconds(self) -> Decimal { + self.day_time.as_seconds() + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 24] { + let mut bytes = [0; 24]; + bytes[0..8].copy_from_slice(&self.year_month.to_be_bytes()); + bytes[8..24].copy_from_slice(&self.day_time.to_be_bytes()); + bytes + } + + /// [op:add-yearMonthDurations](https://www.w3.org/TR/xpath-functions-31/#func-add-yearMonthDurations) and [op:add-dayTimeDurations](https://www.w3.org/TR/xpath-functions-31/#func-add-dayTimeDurations) + /// + /// Returns `None` in case of overflow ([`FODT0002`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0002)). + #[inline] + #[must_use] + pub fn checked_add(self, rhs: impl Into) -> Option { + let rhs = rhs.into(); + Self::construct( + self.year_month.checked_add(rhs.year_month)?, + self.day_time.checked_add(rhs.day_time)?, + ) + .ok() + } + + /// [op:subtract-yearMonthDurations](https://www.w3.org/TR/xpath-functions-31/#func-subtract-yearMonthDurations) and [op:subtract-dayTimeDurations](https://www.w3.org/TR/xpath-functions-31/#func-subtract-dayTimeDurations) + /// + /// Returns `None` in case of overflow ([`FODT0002`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0002)). + #[inline] + #[must_use] + pub fn checked_sub(self, rhs: impl Into) -> Option { + let rhs = rhs.into(); + Self::construct( + self.year_month.checked_sub(rhs.year_month)?, + self.day_time.checked_sub(rhs.day_time)?, + ) + .ok() + } + + /// Unary negation. + /// + /// Returns `None` in case of overflow ([`FODT0002`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0002)). + #[inline] + #[must_use] + pub fn checked_neg(self) -> Option { + Some(Self { + year_month: self.year_month.checked_neg()?, + day_time: self.day_time.checked_neg()?, + }) + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self == other + } +} + +impl TryFrom for Duration { + type Error = DurationOverflowError; + + #[inline] + fn try_from(value: StdDuration) -> Result { + Ok(DayTimeDuration::try_from(value)?.into()) + } +} + +impl FromStr for Duration { + type Err = ParseDurationError; + + fn from_str(input: &str) -> Result { + let parts = ensure_complete(input, duration_parts)?; + if parts.year_month.is_none() && parts.day_time.is_none() { + return Err(Self::Err::msg("Empty duration")); + } + Ok(Self::new( + parts.year_month.unwrap_or(0), + parts.day_time.unwrap_or_default(), + )?) + } +} + +impl fmt::Display for Duration { + #[allow(clippy::many_single_char_names)] + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let ym = self.year_month.months; + let ss = self.day_time.seconds; + + if (ym < 0 && ss > 0.into()) || (ym > 0 && ss < 0.into()) { + return Err(fmt::Error); // Not able to format with only a part of the duration that is negative + } + if ym < 0 || ss < 0.into() { + f.write_str("-")?; + } + f.write_str("P")?; + + if ym == 0 && ss == 0.into() { + return f.write_str("T0S"); + } + + { + let y = ym / 12; + let m = ym % 12; + + if y != 0 { + if m == 0 { + write!(f, "{}Y", y.abs())?; + } else { + write!(f, "{}Y{}M", y.abs(), m.abs())?; + } + } else if m != 0 || ss == 0.into() { + write!(f, "{}M", m.abs())?; + } + } + + { + let s_int = ss.as_i128(); + let d = s_int / 86400; + let h = (s_int % 86400) / 3600; + let m = (s_int % 3600) / 60; + let s = ss + .checked_sub( + Decimal::try_from(d * 86400 + h * 3600 + m * 60).map_err(|_| fmt::Error)?, + ) + .ok_or(fmt::Error)?; + + if d != 0 { + write!(f, "{}D", d.abs())?; + } + + if h != 0 || m != 0 || s != 0.into() { + f.write_str("T")?; + if h != 0 { + write!(f, "{}H", h.abs())?; + } + if m != 0 { + write!(f, "{}M", m.abs())?; + } + if s != 0.into() { + write!(f, "{}S", s.checked_abs().ok_or(fmt::Error)?)?; + } + } + } + Ok(()) + } +} + +impl PartialOrd for Duration { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + let first = DateTime::new(1969, 9, 1, 0, 0, 0.into(), None).ok()?; + let first_result = first + .checked_add_duration(*self)? + .partial_cmp(&first.checked_add_duration(*other)?); + let second = DateTime::new(1697, 2, 1, 0, 0, 0.into(), None).ok()?; + let second_result = second + .checked_add_duration(*self)? + .partial_cmp(&second.checked_add_duration(*other)?); + let third = DateTime::new(1903, 3, 1, 0, 0, 0.into(), None).ok()?; + let third_result = third + .checked_add_duration(*self)? + .partial_cmp(&third.checked_add_duration(*other)?); + let fourth = DateTime::new(1903, 7, 1, 0, 0, 0.into(), None).ok()?; + let fourth_result = fourth + .checked_add_duration(*self)? + .partial_cmp(&fourth.checked_add_duration(*other)?); + if first_result == second_result + && second_result == third_result + && third_result == fourth_result + { + first_result + } else { + None + } + } +} + +/// [XML Schema `yearMonthDuration` datatype](https://www.w3.org/TR/xmlschema11-2/#yearMonthDuration) +/// +/// It stores the duration as a number of months encoded using a [`i64`]. +#[derive( + Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash, Default, Serialize, Deserialize, +)] +pub struct YearMonthDuration { + months: i64, +} + +impl YearMonthDuration { + pub const MAX: Self = Self { months: i64::MAX }; + pub const MIN: Self = Self { months: i64::MIN }; + + #[inline] + pub fn new(months: impl Into) -> Self { + Self { + months: months.into(), + } + } + + #[inline] + pub fn from_be_bytes(bytes: [u8; 8]) -> Self { + Self { + months: i64::from_be_bytes(bytes), + } + } + + /// [fn:years-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-years-from-duration) + #[inline] + pub fn years(self) -> i64 { + self.months / 12 + } + + /// [fn:months-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-months-from-duration) + #[inline] + pub fn months(self) -> i64 { + self.months % 12 + } + + #[inline] + pub(crate) const fn all_months(self) -> i64 { + self.months + } + + #[inline] + pub fn to_be_bytes(self) -> [u8; 8] { + self.months.to_be_bytes() + } + + /// [op:add-yearMonthDurations](https://www.w3.org/TR/xpath-functions-31/#func-add-yearMonthDurations) + /// + /// Returns `None` in case of overflow ([`FODT0002`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0002)). + #[inline] + pub fn checked_add(self, rhs: impl Into) -> Option { + let rhs = rhs.into(); + Some(Self { + months: self.months.checked_add(rhs.months)?, + }) + } + + /// [op:subtract-yearMonthDurations](https://www.w3.org/TR/xpath-functions-31/#func-subtract-yearMonthDurations) + /// + /// Returns `None` in case of overflow ([`FODT0002`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0002)). + #[inline] + pub fn checked_sub(self, rhs: impl Into) -> Option { + let rhs = rhs.into(); + Some(Self { + months: self.months.checked_sub(rhs.months)?, + }) + } + + /// Unary negation. + /// + /// Returns `None` in case of overflow ([`FODT0002`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0002)). + #[inline] + pub fn checked_neg(self) -> Option { + Some(Self { + months: self.months.checked_neg()?, + }) + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + pub fn is_identical_with(self, other: Self) -> bool { + self == other + } +} + +impl From for Duration { + #[inline] + fn from(value: YearMonthDuration) -> Self { + Self { + year_month: value, + day_time: DayTimeDuration::default(), + } + } +} + +impl TryFrom for YearMonthDuration { + type Error = DurationOverflowError; + + #[inline] + fn try_from(value: Duration) -> Result { + if value.day_time == DayTimeDuration::default() { + Ok(value.year_month) + } else { + Err(DurationOverflowError) + } + } +} + +impl FromStr for YearMonthDuration { + type Err = ParseDurationError; + + fn from_str(input: &str) -> Result { + let parts = ensure_complete(input, duration_parts)?; + if parts.day_time.is_some() { + return Err(Self::Err::msg( + "There must not be any day or time component in a yearMonthDuration", + )); + } + Ok(Self::new( + parts + .year_month + .ok_or(Self::Err::msg("No year and month values found"))?, + )) + } +} + +impl fmt::Display for YearMonthDuration { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.months == 0 { + f.write_str("P0M") + } else { + Duration::from(*self).fmt(f) + } + } +} + +impl PartialEq for YearMonthDuration { + #[inline] + fn eq(&self, other: &Duration) -> bool { + Duration::from(*self).eq(other) + } +} + +impl PartialEq for Duration { + #[inline] + fn eq(&self, other: &YearMonthDuration) -> bool { + self.eq(&Self::from(*other)) + } +} + +impl PartialOrd for YearMonthDuration { + #[inline] + fn partial_cmp(&self, other: &Duration) -> Option { + Duration::from(*self).partial_cmp(other) + } +} + +impl PartialOrd for Duration { + #[inline] + fn partial_cmp(&self, other: &YearMonthDuration) -> Option { + self.partial_cmp(&Self::from(*other)) + } +} + +/// [XML Schema `dayTimeDuration` datatype](https://www.w3.org/TR/xmlschema11-2/#dayTimeDuration) +/// +/// It stores the duration as a number of seconds encoded using a [`Decimal`]. +#[derive( + Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash, Default, Serialize, Deserialize, +)] +pub struct DayTimeDuration { + seconds: Decimal, +} + +impl DayTimeDuration { + pub const MAX: Self = Self { + seconds: Decimal::MAX, + }; + pub const MIN: Self = Self { + seconds: Decimal::MIN, + }; + + #[inline] + pub fn new(seconds: impl Into) -> Self { + Self { + seconds: seconds.into(), + } + } + + #[inline] + pub fn from_be_bytes(bytes: [u8; 16]) -> Self { + Self { + seconds: Decimal::from_be_bytes(bytes), + } + } + + /// [fn:days-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-days-from-duration) + #[allow(clippy::cast_possible_truncation)] + #[inline] + pub fn days(self) -> i64 { + (self.seconds.as_i128() / 86400) as i64 + } + + /// [fn:hours-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-hours-from-duration) + #[allow(clippy::cast_possible_truncation)] + #[inline] + pub fn hours(self) -> i64 { + ((self.seconds.as_i128() % 86400) / 3600) as i64 + } + + /// [fn:minutes-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-minutes-from-duration) + #[allow(clippy::cast_possible_truncation)] + #[inline] + pub fn minutes(self) -> i64 { + ((self.seconds.as_i128() % 3600) / 60) as i64 + } + + /// [fn:seconds-from-duration](https://www.w3.org/TR/xpath-functions-31/#func-seconds-from-duration) + #[inline] + pub fn seconds(self) -> Decimal { + self.seconds.checked_rem(60).unwrap() + } + + /// The duration in seconds. + #[inline] + pub const fn as_seconds(self) -> Decimal { + self.seconds + } + + #[inline] + pub fn to_be_bytes(self) -> [u8; 16] { + self.seconds.to_be_bytes() + } + + /// [op:add-dayTimeDurations](https://www.w3.org/TR/xpath-functions-31/#func-add-dayTimeDurations) + /// + /// Returns `None` in case of overflow ([`FODT0002`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0002)). + #[inline] + pub fn checked_add(self, rhs: impl Into) -> Option { + let rhs = rhs.into(); + Some(Self { + seconds: self.seconds.checked_add(rhs.seconds)?, + }) + } + + /// [op:subtract-dayTimeDurations](https://www.w3.org/TR/xpath-functions-31/#func-subtract-dayTimeDurations) + /// + /// Returns `None` in case of overflow ([`FODT0002`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0002)). + #[inline] + pub fn checked_sub(self, rhs: impl Into) -> Option { + let rhs = rhs.into(); + Some(Self { + seconds: self.seconds.checked_sub(rhs.seconds)?, + }) + } + + /// Unary negation. + /// + /// Returns `None` in case of overflow ([`FODT0002`](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0002)). + #[inline] + pub fn checked_neg(self) -> Option { + Some(Self { + seconds: self.seconds.checked_neg()?, + }) + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + pub fn is_identical_with(self, other: Self) -> bool { + self == other + } +} + +impl From for Duration { + #[inline] + fn from(value: DayTimeDuration) -> Self { + Self { + year_month: YearMonthDuration::default(), + day_time: value, + } + } +} + +impl TryFrom for DayTimeDuration { + type Error = DurationOverflowError; + + #[inline] + fn try_from(value: Duration) -> Result { + if value.year_month == YearMonthDuration::default() { + Ok(value.day_time) + } else { + Err(DurationOverflowError) + } + } +} + +impl TryFrom for DayTimeDuration { + type Error = DurationOverflowError; + + #[inline] + fn try_from(value: StdDuration) -> Result { + Ok(Self { + seconds: Decimal::new( + i128::try_from(value.as_nanos()).map_err(|_| DurationOverflowError)?, + 9, + ) + .map_err(|_| DurationOverflowError)?, + }) + } +} + +impl TryFrom for StdDuration { + type Error = DurationOverflowError; + + #[inline] + fn try_from(value: DayTimeDuration) -> Result { + if value.seconds.is_negative() { + return Err(DurationOverflowError); + } + let secs = value.seconds.checked_floor().ok_or(DurationOverflowError)?; + let nanos = value + .seconds + .checked_sub(secs) + .ok_or(DurationOverflowError)? + .checked_mul(1_000_000_000) + .ok_or(DurationOverflowError)? + .checked_floor() + .ok_or(DurationOverflowError)?; + Ok(Self::new( + secs.as_i128() + .try_into() + .map_err(|_| DurationOverflowError)?, + nanos + .as_i128() + .try_into() + .map_err(|_| DurationOverflowError)?, + )) + } +} + +impl FromStr for DayTimeDuration { + type Err = ParseDurationError; + + fn from_str(input: &str) -> Result { + let parts = ensure_complete(input, duration_parts)?; + if parts.year_month.is_some() { + return Err(Self::Err::msg( + "There must not be any year or month component in a dayTimeDuration", + )); + } + Ok(Self::new( + parts + .day_time + .ok_or(Self::Err::msg("No day or time values found"))?, + )) + } +} + +impl fmt::Display for DayTimeDuration { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Duration::from(*self).fmt(f) + } +} + +impl PartialEq for DayTimeDuration { + #[inline] + fn eq(&self, other: &Duration) -> bool { + Duration::from(*self).eq(other) + } +} + +impl PartialEq for Duration { + #[inline] + fn eq(&self, other: &DayTimeDuration) -> bool { + self.eq(&Self::from(*other)) + } +} + +impl PartialEq for DayTimeDuration { + #[inline] + fn eq(&self, other: &YearMonthDuration) -> bool { + Duration::from(*self).eq(&Duration::from(*other)) + } +} + +impl PartialEq for YearMonthDuration { + #[inline] + fn eq(&self, other: &DayTimeDuration) -> bool { + Duration::from(*self).eq(&Duration::from(*other)) + } +} + +impl PartialOrd for DayTimeDuration { + #[inline] + fn partial_cmp(&self, other: &Duration) -> Option { + Duration::from(*self).partial_cmp(other) + } +} + +impl PartialOrd for Duration { + #[inline] + fn partial_cmp(&self, other: &DayTimeDuration) -> Option { + self.partial_cmp(&Self::from(*other)) + } +} + +impl PartialOrd for DayTimeDuration { + #[inline] + fn partial_cmp(&self, other: &YearMonthDuration) -> Option { + Duration::from(*self).partial_cmp(&Duration::from(*other)) + } +} + +impl PartialOrd for YearMonthDuration { + #[inline] + fn partial_cmp(&self, other: &DayTimeDuration) -> Option { + Duration::from(*self).partial_cmp(&Duration::from(*other)) + } +} + +// [6] duYearFrag ::= unsignedNoDecimalPtNumeral 'Y' +// [7] duMonthFrag ::= unsignedNoDecimalPtNumeral 'M' +// [8] duDayFrag ::= unsignedNoDecimalPtNumeral 'D' +// [9] duHourFrag ::= unsignedNoDecimalPtNumeral 'H' +// [10] duMinuteFrag ::= unsignedNoDecimalPtNumeral 'M' +// [11] duSecondFrag ::= (unsignedNoDecimalPtNumeral | unsignedDecimalPtNumeral) 'S' +// [12] duYearMonthFrag ::= (duYearFrag duMonthFrag?) | duMonthFrag +// [13] duTimeFrag ::= 'T' ((duHourFrag duMinuteFrag? duSecondFrag?) | (duMinuteFrag duSecondFrag?) | duSecondFrag) +// [14] duDayTimeFrag ::= (duDayFrag duTimeFrag?) | duTimeFrag +// [15] durationLexicalRep ::= '-'? 'P' ((duYearMonthFrag duDayTimeFrag?) | duDayTimeFrag) +struct DurationParts { + year_month: Option, + day_time: Option, +} + +fn duration_parts(input: &str) -> Result<(DurationParts, &str), ParseDurationError> { + // States + const START: u32 = 0; + const AFTER_YEAR: u32 = 1; + const AFTER_MONTH: u32 = 2; + const AFTER_DAY: u32 = 3; + const AFTER_T: u32 = 4; + const AFTER_HOUR: u32 = 5; + const AFTER_MINUTE: u32 = 6; + const AFTER_SECOND: u32 = 7; + + let (is_negative, input) = if let Some(left) = input.strip_prefix('-') { + (true, left) + } else { + (false, input) + }; + let mut input = expect_char(input, 'P', "Durations must start with 'P'")?; + let mut state = START; + let mut year_month: Option = None; + let mut day_time: Option = None; + while !input.is_empty() { + if let Some(left) = input.strip_prefix('T') { + if state >= AFTER_T { + return Err(ParseDurationError::msg("Duplicated time separator 'T'")); + } + state = AFTER_T; + input = left; + } else { + let (number_str, left) = decimal_prefix(input); + match left.chars().next() { + Some('Y') if state < AFTER_YEAR => { + year_month = Some( + year_month + .unwrap_or_default() + .checked_add( + apply_i64_neg( + i64::from_str(number_str).map_err(|_| OVERFLOW_ERROR)?, + is_negative, + )? + .checked_mul(12) + .ok_or(OVERFLOW_ERROR)?, + ) + .ok_or(OVERFLOW_ERROR)?, + ); + state = AFTER_YEAR; + } + Some('M') if state < AFTER_MONTH => { + year_month = Some( + year_month + .unwrap_or_default() + .checked_add(apply_i64_neg( + i64::from_str(number_str).map_err(|_| OVERFLOW_ERROR)?, + is_negative, + )?) + .ok_or(OVERFLOW_ERROR)?, + ); + state = AFTER_MONTH; + } + Some('D') if state < AFTER_DAY => { + if number_str.contains('.') { + return Err(ParseDurationError::msg( + "Decimal numbers are not allowed for days", + )); + } + day_time = Some( + day_time + .unwrap_or_default() + .checked_add( + apply_decimal_neg( + Decimal::from_str(number_str).map_err(|_| OVERFLOW_ERROR)?, + is_negative, + )? + .checked_mul(86400) + .ok_or(OVERFLOW_ERROR)?, + ) + .ok_or(OVERFLOW_ERROR)?, + ); + state = AFTER_DAY; + } + Some('H') if state == AFTER_T => { + if number_str.contains('.') { + return Err(ParseDurationError::msg( + "Decimal numbers are not allowed for hours", + )); + } + day_time = Some( + day_time + .unwrap_or_default() + .checked_add( + apply_decimal_neg( + Decimal::from_str(number_str).map_err(|_| OVERFLOW_ERROR)?, + is_negative, + )? + .checked_mul(3600) + .ok_or(OVERFLOW_ERROR)?, + ) + .ok_or(OVERFLOW_ERROR)?, + ); + state = AFTER_HOUR; + } + Some('M') if (AFTER_T..AFTER_MINUTE).contains(&state) => { + if number_str.contains('.') { + return Err(ParseDurationError::msg( + "Decimal numbers are not allowed for minutes", + )); + } + day_time = Some( + day_time + .unwrap_or_default() + .checked_add( + apply_decimal_neg( + Decimal::from_str(number_str).map_err(|_| OVERFLOW_ERROR)?, + is_negative, + )? + .checked_mul(60) + .ok_or(OVERFLOW_ERROR)?, + ) + .ok_or(OVERFLOW_ERROR)?, + ); + state = AFTER_MINUTE; + } + Some('S') if (AFTER_T..AFTER_SECOND).contains(&state) => { + day_time = Some( + day_time + .unwrap_or_default() + .checked_add(apply_decimal_neg( + Decimal::from_str(number_str).map_err(|_| OVERFLOW_ERROR)?, + is_negative, + )?) + .ok_or(OVERFLOW_ERROR)?, + ); + state = AFTER_SECOND; + } + Some(_) => return Err(ParseDurationError::msg("Unexpected type character")), + None => { + return Err(ParseDurationError::msg( + "Numbers in durations must be followed by a type character", + )) + } + } + input = &left[1..]; + } + } + + Ok(( + DurationParts { + year_month, + day_time, + }, + input, + )) +} + +fn apply_i64_neg(value: i64, is_negative: bool) -> Result { + if is_negative { + value.checked_neg().ok_or(OVERFLOW_ERROR) + } else { + Ok(value) + } +} + +fn apply_decimal_neg(value: Decimal, is_negative: bool) -> Result { + if is_negative { + value.checked_neg().ok_or(OVERFLOW_ERROR) + } else { + Ok(value) + } +} + +fn ensure_complete( + input: &str, + parse: impl FnOnce(&str) -> Result<(T, &str), ParseDurationError>, +) -> Result { + let (result, left) = parse(input)?; + if !left.is_empty() { + return Err(ParseDurationError::msg("Unrecognized value suffix")); + } + Ok(result) +} + +fn expect_char<'a>( + input: &'a str, + constant: char, + error_message: &'static str, +) -> Result<&'a str, ParseDurationError> { + if let Some(left) = input.strip_prefix(constant) { + Ok(left) + } else { + Err(ParseDurationError::msg(error_message)) + } +} + +fn decimal_prefix(input: &str) -> (&str, &str) { + let mut end = input.len(); + let mut dot_seen = false; + for (i, c) in input.char_indices() { + if c.is_ascii_digit() { + // Ok + } else if c == '.' && !dot_seen { + dot_seen = true; + } else { + end = i; + break; + } + } + input.split_at(end) +} + +/// A parsing error +#[derive(Debug, Clone, thiserror::Error)] +#[error("{msg}")] +pub struct ParseDurationError { + msg: &'static str, +} + +const OVERFLOW_ERROR: ParseDurationError = ParseDurationError { + msg: "Overflow error", +}; + +impl ParseDurationError { + const fn msg(msg: &'static str) -> Self { + Self { msg } + } +} + +/// An overflow during [`Duration`]-related operations. +/// +/// Matches XPath [`FODT0002` error](https://www.w3.org/TR/xpath-functions-31/#ERRFODT0002). +#[derive(Debug, Clone, Copy, thiserror::Error)] +#[error("overflow during xsd:duration computation")] +pub struct DurationOverflowError; + +/// The year-month and the day-time components of a [`Duration`] have an opposite sign. +#[derive(Debug, Clone, Copy, thiserror::Error)] +#[error("The xsd:yearMonthDuration and xsd:dayTimeDuration components of a xsd:duration can't have opposite sign")] +pub struct OppositeSignInDurationComponentsError; + +impl From for ParseDurationError { + #[inline] + fn from(_: OppositeSignInDurationComponentsError) -> Self { + Self { + msg: "The xsd:yearMonthDuration and xsd:dayTimeDuration components of a xsd:duration can't have opposite sign" + } + } +} + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + use std::error::Error; + + #[test] + fn from_str() -> Result<(), ParseDurationError> { + let min = Duration::new(i64::MIN, Decimal::MIN)?; + let max = Duration::new(i64::MAX, Decimal::MAX)?; + + assert_eq!(YearMonthDuration::from_str("P1Y")?.to_string(), "P1Y"); + assert_eq!(Duration::from_str("P1Y")?.to_string(), "P1Y"); + assert_eq!(YearMonthDuration::from_str("P1M")?.to_string(), "P1M"); + assert_eq!(Duration::from_str("P1M")?.to_string(), "P1M"); + assert_eq!(DayTimeDuration::from_str("P1D")?.to_string(), "P1D"); + assert_eq!(Duration::from_str("P1D")?.to_string(), "P1D"); + assert_eq!(DayTimeDuration::from_str("PT1H")?.to_string(), "PT1H"); + assert_eq!(Duration::from_str("PT1H")?.to_string(), "PT1H"); + assert_eq!(DayTimeDuration::from_str("PT1M")?.to_string(), "PT1M"); + assert_eq!(Duration::from_str("PT1M")?.to_string(), "PT1M"); + assert_eq!(DayTimeDuration::from_str("PT1.1S")?.to_string(), "PT1.1S"); + assert_eq!(Duration::from_str("PT1.1S")?.to_string(), "PT1.1S"); + assert_eq!(YearMonthDuration::from_str("-P1Y")?.to_string(), "-P1Y"); + assert_eq!(Duration::from_str("-P1Y")?.to_string(), "-P1Y"); + assert_eq!(YearMonthDuration::from_str("-P1M")?.to_string(), "-P1M"); + assert_eq!(Duration::from_str("-P1M")?.to_string(), "-P1M"); + assert_eq!(DayTimeDuration::from_str("-P1D")?.to_string(), "-P1D"); + assert_eq!(Duration::from_str("-P1D")?.to_string(), "-P1D"); + assert_eq!(DayTimeDuration::from_str("-PT1H")?.to_string(), "-PT1H"); + assert_eq!(Duration::from_str("-PT1H")?.to_string(), "-PT1H"); + assert_eq!(DayTimeDuration::from_str("-PT1M")?.to_string(), "-PT1M"); + assert_eq!(Duration::from_str("-PT1M")?.to_string(), "-PT1M"); + assert_eq!(DayTimeDuration::from_str("-PT1S")?.to_string(), "-PT1S"); + assert_eq!(Duration::from_str("-PT1S")?.to_string(), "-PT1S"); + assert_eq!(DayTimeDuration::from_str("-PT1.1S")?.to_string(), "-PT1.1S"); + assert_eq!(Duration::from_str("-PT1.1S")?.to_string(), "-PT1.1S"); + assert_eq!(Duration::from_str(&max.to_string())?, max); + assert_eq!(Duration::from_str(&min.to_string())?, min); + assert_eq!(Duration::from_str("PT0H")?.to_string(), "PT0S"); + assert_eq!(Duration::from_str("-PT0H")?.to_string(), "PT0S"); + assert_eq!(YearMonthDuration::from_str("P0Y")?.to_string(), "P0M"); + assert_eq!(DayTimeDuration::from_str("PT0H")?.to_string(), "PT0S"); + Ok(()) + } + + #[test] + fn from_std() -> Result<(), DurationOverflowError> { + assert_eq!( + Duration::try_from(StdDuration::new(10, 10))?.to_string(), + "PT10.00000001S" + ); + Ok(()) + } + + #[test] + fn to_std() -> Result<(), Box> { + let duration = StdDuration::try_from(DayTimeDuration::from_str("PT10.00000001S")?)?; + assert_eq!(duration.as_secs(), 10); + assert_eq!(duration.subsec_nanos(), 10); + Ok(()) + } + + #[test] + fn to_be_bytes() { + assert_eq!( + Duration::from_be_bytes(Duration::MIN.to_be_bytes()), + Duration::MIN + ); + assert_eq!( + Duration::from_be_bytes(Duration::MAX.to_be_bytes()), + Duration::MAX + ); + assert_eq!( + YearMonthDuration::from_be_bytes(YearMonthDuration::MIN.to_be_bytes()), + YearMonthDuration::MIN + ); + assert_eq!( + YearMonthDuration::from_be_bytes(YearMonthDuration::MAX.to_be_bytes()), + YearMonthDuration::MAX + ); + assert_eq!( + DayTimeDuration::from_be_bytes(DayTimeDuration::MIN.to_be_bytes()), + DayTimeDuration::MIN + ); + assert_eq!( + DayTimeDuration::from_be_bytes(DayTimeDuration::MAX.to_be_bytes()), + DayTimeDuration::MAX + ); + } + + #[test] + fn equals() -> Result<(), ParseDurationError> { + assert_eq!( + YearMonthDuration::from_str("P1Y")?, + YearMonthDuration::from_str("P12M")? + ); + assert_eq!( + YearMonthDuration::from_str("P1Y")?, + Duration::from_str("P12M")? + ); + assert_eq!( + Duration::from_str("P1Y")?, + YearMonthDuration::from_str("P12M")? + ); + assert_eq!(Duration::from_str("P1Y")?, Duration::from_str("P12M")?); + assert_eq!( + DayTimeDuration::from_str("PT24H")?, + DayTimeDuration::from_str("P1D")? + ); + assert_eq!( + DayTimeDuration::from_str("PT24H")?, + Duration::from_str("P1D")? + ); + assert_eq!( + Duration::from_str("PT24H")?, + DayTimeDuration::from_str("P1D")? + ); + assert_eq!(Duration::from_str("PT24H")?, Duration::from_str("P1D")?); + assert_ne!(Duration::from_str("P1Y")?, Duration::from_str("P365D")?); + assert_eq!(Duration::from_str("P0Y")?, Duration::from_str("P0D")?); + assert_ne!(Duration::from_str("P1Y")?, Duration::from_str("P365D")?); + assert_eq!(Duration::from_str("P2Y")?, Duration::from_str("P24M")?); + assert_eq!(Duration::from_str("P10D")?, Duration::from_str("PT240H")?); + assert_eq!( + Duration::from_str("P2Y0M0DT0H0M0S")?, + Duration::from_str("P24M")? + ); + assert_eq!( + Duration::from_str("P0Y0M10D")?, + Duration::from_str("PT240H")? + ); + assert_ne!(Duration::from_str("P1M")?, Duration::from_str("P30D")?); + Ok(()) + } + + #[test] + #[allow(clippy::neg_cmp_op_on_partial_ord)] + fn cmp() -> Result<(), ParseDurationError> { + assert!(Duration::from_str("P1Y1D")? < Duration::from_str("P13MT25H")?); + assert!(YearMonthDuration::from_str("P1Y")? < YearMonthDuration::from_str("P13M")?); + assert!(Duration::from_str("P1Y")? < YearMonthDuration::from_str("P13M")?); + assert!(YearMonthDuration::from_str("P1Y")? < Duration::from_str("P13M")?); + assert!(DayTimeDuration::from_str("P1D")? < DayTimeDuration::from_str("PT25H")?); + assert!(DayTimeDuration::from_str("PT1H")? < DayTimeDuration::from_str("PT61M")?); + assert!(DayTimeDuration::from_str("PT1M")? < DayTimeDuration::from_str("PT61S")?); + assert!(Duration::from_str("PT1H")? < DayTimeDuration::from_str("PT61M")?); + assert!(DayTimeDuration::from_str("PT1H")? < Duration::from_str("PT61M")?); + assert!(YearMonthDuration::from_str("P1M")? < DayTimeDuration::from_str("P40D")?); + assert!(DayTimeDuration::from_str("P25D")? < YearMonthDuration::from_str("P1M")?); + Ok(()) + } + + #[test] + fn years() -> Result<(), ParseDurationError> { + assert_eq!(Duration::from_str("P20Y15M")?.years(), 21); + assert_eq!(Duration::from_str("-P15M")?.years(), -1); + assert_eq!(Duration::from_str("-P2DT15H")?.years(), 0); + Ok(()) + } + + #[test] + fn months() -> Result<(), ParseDurationError> { + assert_eq!(Duration::from_str("P20Y15M")?.months(), 3); + assert_eq!(Duration::from_str("-P20Y18M")?.months(), -6); + assert_eq!(Duration::from_str("-P2DT15H0M0S")?.months(), 0); + Ok(()) + } + + #[test] + fn days() -> Result<(), ParseDurationError> { + assert_eq!(Duration::from_str("P3DT10H")?.days(), 3); + assert_eq!(Duration::from_str("P3DT55H")?.days(), 5); + assert_eq!(Duration::from_str("P3Y5M")?.days(), 0); + Ok(()) + } + + #[test] + fn hours() -> Result<(), ParseDurationError> { + assert_eq!(Duration::from_str("P3DT10H")?.hours(), 10); + assert_eq!(Duration::from_str("P3DT12H32M12S")?.hours(), 12); + assert_eq!(Duration::from_str("PT123H")?.hours(), 3); + assert_eq!(Duration::from_str("-P3DT10H")?.hours(), -10); + Ok(()) + } + + #[test] + fn minutes() -> Result<(), ParseDurationError> { + assert_eq!(Duration::from_str("P3DT10H")?.minutes(), 0); + assert_eq!(Duration::from_str("-P5DT12H30M")?.minutes(), -30); + Ok(()) + } + + #[test] + fn seconds() -> Result<(), Box> { + assert_eq!( + Duration::from_str("P3DT10H12.5S")?.seconds(), + Decimal::from_str("12.5")? + ); + assert_eq!( + Duration::from_str("-PT256S")?.seconds(), + Decimal::from_str("-16.0")? + ); + Ok(()) + } + + #[test] + fn add() -> Result<(), ParseDurationError> { + assert_eq!( + Duration::from_str("P2Y11M")?.checked_add(Duration::from_str("P3Y3M")?), + Some(Duration::from_str("P6Y2M")?) + ); + assert_eq!( + Duration::from_str("P2DT12H5M")?.checked_add(Duration::from_str("P5DT12H")?), + Some(Duration::from_str("P8DT5M")?) + ); + assert_eq!( + Duration::from_str("P1M2D")?.checked_add(Duration::from_str("-P3D")?), + None + ); + assert_eq!( + Duration::from_str("P1M2D")?.checked_add(Duration::from_str("-P2M")?), + None + ); + Ok(()) + } + + #[test] + fn sub() -> Result<(), ParseDurationError> { + assert_eq!( + Duration::from_str("P2Y11M")?.checked_sub(Duration::from_str("P3Y3M")?), + Some(Duration::from_str("-P4M")?) + ); + assert_eq!( + Duration::from_str("P2DT12H")?.checked_sub(Duration::from_str("P1DT10H30M")?), + Some(Duration::from_str("P1DT1H30M")?) + ); + assert_eq!( + Duration::from_str("P1M2D")?.checked_sub(Duration::from_str("P3D")?), + None + ); + assert_eq!( + Duration::from_str("P1M2D")?.checked_sub(Duration::from_str("P2M")?), + None + ); + Ok(()) + } + + #[test] + fn minimally_conformant() -> Result<(), ParseDurationError> { + // All minimally conforming processors must support fractional-second duration values + // to milliseconds (i.e. those expressible with three fraction digits). + assert_eq!(Duration::from_str("PT0.001S")?.to_string(), "PT0.001S"); + assert_eq!(Duration::from_str("-PT0.001S")?.to_string(), "-PT0.001S"); + + // All minimally conforming processors must support duration values with months values + // in the range −119999 to 119999 months (9999 years and 11 months) + // and seconds values in the range −31622400 to 31622400 seconds (one leap-year). + assert_eq!( + Duration::from_str("P119999MT31622400S")?.to_string(), + "P9999Y11M366D" + ); + assert_eq!( + Duration::from_str("-P119999MT31622400S")?.to_string(), + "-P9999Y11M366D" + ); + Ok(()) + } +} diff --git a/ng-oxigraph/src/oxsdatatypes/float.rs b/ng-oxigraph/src/oxsdatatypes/float.rs new file mode 100644 index 0000000..45c3f64 --- /dev/null +++ b/ng-oxigraph/src/oxsdatatypes/float.rs @@ -0,0 +1,310 @@ +use crate::oxsdatatypes::{Boolean, Double, Integer}; +use serde::{Deserialize, Serialize}; +use std::cmp::Ordering; +use std::fmt; +use std::num::ParseFloatError; +use std::ops::{Add, Div, Mul, Neg, Sub}; +use std::str::FromStr; + +/// [XML Schema `float` datatype](https://www.w3.org/TR/xmlschema11-2/#float) +/// +/// Uses internally a [`f32`]. +/// +///
Serialization does not follow the canonical mapping.
+#[derive(Debug, Clone, Copy, Default, PartialEq, Serialize, Deserialize)] +#[repr(transparent)] +pub struct Float { + value: f32, +} + +impl Float { + pub const INFINITY: Self = Self { + value: f32::INFINITY, + }; + pub const MAX: Self = Self { value: f32::MAX }; + pub const MIN: Self = Self { value: f32::MIN }; + pub const NAN: Self = Self { value: f32::NAN }; + pub const NEG_INFINITY: Self = Self { + value: f32::NEG_INFINITY, + }; + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 4]) -> Self { + Self { + value: f32::from_be_bytes(bytes), + } + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 4] { + self.value.to_be_bytes() + } + + /// [fn:abs](https://www.w3.org/TR/xpath-functions-31/#func-abs) + #[inline] + #[must_use] + pub fn abs(self) -> Self { + self.value.abs().into() + } + + /// [fn:ceiling](https://www.w3.org/TR/xpath-functions-31/#func-ceiling) + #[inline] + #[must_use] + pub fn ceil(self) -> Self { + self.value.ceil().into() + } + + /// [fn:floor](https://www.w3.org/TR/xpath-functions-31/#func-floor) + #[inline] + #[must_use] + pub fn floor(self) -> Self { + self.value.floor().into() + } + + /// [fn:round](https://www.w3.org/TR/xpath-functions-31/#func-round) + #[inline] + #[must_use] + pub fn round(self) -> Self { + self.value.round().into() + } + + #[inline] + #[must_use] + pub fn is_nan(self) -> bool { + self.value.is_nan() + } + + #[inline] + #[must_use] + pub fn is_finite(self) -> bool { + self.value.is_finite() + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self.value.to_bits() == other.value.to_bits() + } +} + +impl From for f32 { + #[inline] + fn from(value: Float) -> Self { + value.value + } +} + +impl From for f64 { + #[inline] + fn from(value: Float) -> Self { + value.value.into() + } +} + +impl From for Float { + #[inline] + fn from(value: f32) -> Self { + Self { value } + } +} + +impl From for Float { + #[inline] + fn from(value: i8) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Float { + #[inline] + fn from(value: i16) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Float { + #[inline] + fn from(value: u8) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Float { + #[inline] + fn from(value: u16) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Float { + #[inline] + fn from(value: Boolean) -> Self { + f32::from(bool::from(value)).into() + } +} + +impl From for Float { + #[inline] + #[allow(clippy::cast_precision_loss)] + fn from(value: Integer) -> Self { + (i64::from(value) as f32).into() + } +} + +impl From for Float { + #[inline] + #[allow(clippy::cast_possible_truncation)] + fn from(value: Double) -> Self { + Self { + value: f64::from(value) as f32, + } + } +} + +impl FromStr for Float { + type Err = ParseFloatError; + + #[inline] + fn from_str(input: &str) -> Result { + Ok(f32::from_str(input)?.into()) + } +} + +impl fmt::Display for Float { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.value == f32::INFINITY { + f.write_str("INF") + } else if self.value == f32::NEG_INFINITY { + f.write_str("-INF") + } else { + self.value.fmt(f) + } + } +} + +impl PartialOrd for Float { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + self.value.partial_cmp(&other.value) + } +} + +impl Neg for Float { + type Output = Self; + + #[inline] + fn neg(self) -> Self { + (-self.value).into() + } +} + +impl Add for Float { + type Output = Self; + + #[inline] + fn add(self, rhs: Self) -> Self { + (self.value + rhs.value).into() + } +} + +impl Sub for Float { + type Output = Self; + + #[inline] + fn sub(self, rhs: Self) -> Self { + (self.value - rhs.value).into() + } +} + +impl Mul for Float { + type Output = Self; + + #[inline] + fn mul(self, rhs: Self) -> Self { + (self.value * rhs.value).into() + } +} + +impl Div for Float { + type Output = Self; + + #[inline] + fn div(self, rhs: Self) -> Self { + (self.value / rhs.value).into() + } +} + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + + #[test] + fn eq() { + assert_eq!(Float::from(0.), Float::from(0.)); + assert_ne!(Float::NAN, Float::NAN); + assert_eq!(Float::from(-0.), Float::from(0.)); + } + + #[test] + fn cmp() { + assert_eq!( + Float::from(0.).partial_cmp(&Float::from(0.)), + Some(Ordering::Equal) + ); + assert_eq!( + Float::INFINITY.partial_cmp(&Float::MAX), + Some(Ordering::Greater) + ); + assert_eq!( + Float::NEG_INFINITY.partial_cmp(&Float::MIN), + Some(Ordering::Less) + ); + assert_eq!(Float::NAN.partial_cmp(&Float::from(0.)), None); + assert_eq!(Float::NAN.partial_cmp(&Float::NAN), None); + assert_eq!( + Float::from(0.).partial_cmp(&Float::from(-0.)), + Some(Ordering::Equal) + ); + } + + #[test] + fn is_identical_with() { + assert!(Float::from(0.).is_identical_with(Float::from(0.))); + assert!(Float::NAN.is_identical_with(Float::NAN)); + assert!(!Float::from(-0.).is_identical_with(Float::from(0.))); + } + + #[test] + fn from_str() -> Result<(), ParseFloatError> { + assert_eq!(Float::from_str("NaN")?.to_string(), "NaN"); + assert_eq!(Float::from_str("INF")?.to_string(), "INF"); + assert_eq!(Float::from_str("+INF")?.to_string(), "INF"); + assert_eq!(Float::from_str("-INF")?.to_string(), "-INF"); + assert_eq!(Float::from_str("0.0E0")?.to_string(), "0"); + assert_eq!(Float::from_str("-0.0E0")?.to_string(), "-0"); + assert_eq!(Float::from_str("0.1e1")?.to_string(), "1"); + assert_eq!(Float::from_str("-0.1e1")?.to_string(), "-1"); + assert_eq!(Float::from_str("1.e1")?.to_string(), "10"); + assert_eq!(Float::from_str("-1.e1")?.to_string(), "-10"); + assert_eq!(Float::from_str("1")?.to_string(), "1"); + assert_eq!(Float::from_str("-1")?.to_string(), "-1"); + assert_eq!(Float::from_str("1.")?.to_string(), "1"); + assert_eq!(Float::from_str("-1.")?.to_string(), "-1"); + assert_eq!(Float::from_str(&f32::MIN.to_string())?, Float::MIN); + assert_eq!(Float::from_str(&f32::MAX.to_string())?, Float::MAX); + Ok(()) + } +} diff --git a/ng-oxigraph/src/oxsdatatypes/integer.rs b/ng-oxigraph/src/oxsdatatypes/integer.rs new file mode 100644 index 0000000..c23f9ed --- /dev/null +++ b/ng-oxigraph/src/oxsdatatypes/integer.rs @@ -0,0 +1,400 @@ +use crate::oxsdatatypes::{Boolean, Decimal, Double, Float}; +use serde::{Deserialize, Serialize}; +use std::fmt; +use std::num::ParseIntError; +use std::str::FromStr; + +/// [XML Schema `integer` datatype](https://www.w3.org/TR/xmlschema11-2/#integer) +/// +/// Uses internally a [`i64`]. +#[derive( + Debug, Clone, Copy, Default, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, +)] +#[repr(transparent)] +pub struct Integer { + value: i64, +} + +impl Integer { + pub const MAX: Self = Self { value: i64::MAX }; + pub const MIN: Self = Self { value: i64::MIN }; + + #[inline] + #[must_use] + pub fn from_be_bytes(bytes: [u8; 8]) -> Self { + Self { + value: i64::from_be_bytes(bytes), + } + } + + #[inline] + #[must_use] + pub fn to_be_bytes(self) -> [u8; 8] { + self.value.to_be_bytes() + } + + /// [op:numeric-add](https://www.w3.org/TR/xpath-functions-31/#func-numeric-add) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_add(self, rhs: impl Into) -> Option { + Some(Self { + value: self.value.checked_add(rhs.into().value)?, + }) + } + + /// [op:numeric-subtract](https://www.w3.org/TR/xpath-functions-31/#func-numeric-subtract) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_sub(self, rhs: impl Into) -> Option { + Some(Self { + value: self.value.checked_sub(rhs.into().value)?, + }) + } + + /// [op:numeric-multiply](https://www.w3.org/TR/xpath-functions-31/#func-numeric-multiply) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_mul(self, rhs: impl Into) -> Option { + Some(Self { + value: self.value.checked_mul(rhs.into().value)?, + }) + } + + /// [op:numeric-integer-divide](https://www.w3.org/TR/xpath-functions-31/#func-numeric-integer-divide) + /// + /// Returns `None` in case of division by 0 ([FOAR0001](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0001)) or overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_div(self, rhs: impl Into) -> Option { + Some(Self { + value: self.value.checked_div(rhs.into().value)?, + }) + } + + /// [op:numeric-mod](https://www.w3.org/TR/xpath-functions-31/#func-numeric-mod) + /// + /// Returns `None` in case of division by 0 ([FOAR0001](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0001)) or overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_rem(self, rhs: impl Into) -> Option { + Some(Self { + value: self.value.checked_rem(rhs.into().value)?, + }) + } + + /// Euclidean remainder + /// + /// Returns `None` in case of division by 0 ([FOAR0001](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0001)) or overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_rem_euclid(self, rhs: impl Into) -> Option { + Some(Self { + value: self.value.checked_rem_euclid(rhs.into().value)?, + }) + } + + /// [op:numeric-unary-minus](https://www.w3.org/TR/xpath-functions-31/#func-numeric-unary-minus) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_neg(self) -> Option { + Some(Self { + value: self.value.checked_neg()?, + }) + } + + /// [fn:abs](https://www.w3.org/TR/xpath-functions-31/#func-abs) + /// + /// Returns `None` in case of overflow ([FOAR0002](https://www.w3.org/TR/xpath-functions-31/#ERRFOAR0002)). + #[inline] + #[must_use] + pub fn checked_abs(self) -> Option { + Some(Self { + value: self.value.checked_abs()?, + }) + } + + #[inline] + #[must_use] + pub const fn is_negative(self) -> bool { + self.value < 0 + } + + #[inline] + #[must_use] + pub const fn is_positive(self) -> bool { + self.value > 0 + } + + /// Checks if the two values are [identical](https://www.w3.org/TR/xmlschema11-2/#identity). + #[inline] + #[must_use] + pub fn is_identical_with(self, other: Self) -> bool { + self == other + } +} + +impl From for Integer { + #[inline] + fn from(value: bool) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Integer { + #[inline] + fn from(value: i8) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Integer { + #[inline] + fn from(value: i16) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Integer { + #[inline] + fn from(value: i32) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Integer { + #[inline] + fn from(value: i64) -> Self { + Self { value } + } +} + +impl From for Integer { + #[inline] + fn from(value: u8) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Integer { + #[inline] + fn from(value: u16) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Integer { + #[inline] + fn from(value: u32) -> Self { + Self { + value: value.into(), + } + } +} + +impl From for Integer { + #[inline] + fn from(value: Boolean) -> Self { + bool::from(value).into() + } +} + +impl From for i64 { + #[inline] + fn from(value: Integer) -> Self { + value.value + } +} + +impl FromStr for Integer { + type Err = ParseIntError; + + #[inline] + fn from_str(input: &str) -> Result { + Ok(i64::from_str(input)?.into()) + } +} + +impl fmt::Display for Integer { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.value.fmt(f) + } +} + +impl TryFrom for Integer { + type Error = TooLargeForIntegerError; + + #[inline] + fn try_from(value: Float) -> Result { + Decimal::try_from(value) + .map_err(|_| TooLargeForIntegerError)? + .try_into() + } +} + +impl TryFrom for Integer { + type Error = TooLargeForIntegerError; + + #[inline] + fn try_from(value: Double) -> Result { + Decimal::try_from(value) + .map_err(|_| TooLargeForIntegerError)? + .try_into() + } +} + +/// The input is too large to fit into an [`Integer`]. +/// +/// Matches XPath [`FOCA0003` error](https://www.w3.org/TR/xpath-functions-31/#ERRFOCA0003). +#[derive(Debug, Clone, Copy, thiserror::Error)] +#[error("Value too large for xsd:integer internal representation")] +pub struct TooLargeForIntegerError; + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + + #[test] + fn from_str() -> Result<(), ParseIntError> { + assert_eq!(Integer::from_str("0")?.to_string(), "0"); + assert_eq!(Integer::from_str("-0")?.to_string(), "0"); + assert_eq!(Integer::from_str("123")?.to_string(), "123"); + assert_eq!(Integer::from_str("-123")?.to_string(), "-123"); + Integer::from_str("123456789123456789123456789123456789123456789").unwrap_err(); + Ok(()) + } + + #[test] + fn from_float() -> Result<(), ParseIntError> { + assert_eq!( + Integer::try_from(Float::from(0.)).ok(), + Some(Integer::from_str("0")?) + ); + assert_eq!( + Integer::try_from(Float::from(-0.)).ok(), + Some(Integer::from_str("0")?) + ); + assert_eq!( + Integer::try_from(Float::from(-123.1)).ok(), + Some(Integer::from_str("-123")?) + ); + Integer::try_from(Float::from(f32::NAN)).unwrap_err(); + Integer::try_from(Float::from(f32::INFINITY)).unwrap_err(); + Integer::try_from(Float::from(f32::NEG_INFINITY)).unwrap_err(); + Integer::try_from(Float::from(f32::MIN)).unwrap_err(); + Integer::try_from(Float::from(f32::MAX)).unwrap_err(); + assert!( + Integer::try_from(Float::from(1_672_507_300_000.)) + .unwrap() + .checked_sub(Integer::from_str("1672507300000")?) + .unwrap() + .checked_abs() + .unwrap() + < Integer::from(1_000_000) + ); + Ok(()) + } + + #[test] + fn from_double() -> Result<(), ParseIntError> { + assert_eq!( + Integer::try_from(Double::from(0.0)).ok(), + Some(Integer::from_str("0")?) + ); + assert_eq!( + Integer::try_from(Double::from(-0.0)).ok(), + Some(Integer::from_str("0")?) + ); + assert_eq!( + Integer::try_from(Double::from(-123.1)).ok(), + Some(Integer::from_str("-123")?) + ); + assert!( + Integer::try_from(Double::from(1_672_507_300_000.)) + .unwrap() + .checked_sub(Integer::from_str("1672507300000").unwrap()) + .unwrap() + .checked_abs() + .unwrap() + < Integer::from(10) + ); + Integer::try_from(Double::from(f64::NAN)).unwrap_err(); + Integer::try_from(Double::from(f64::INFINITY)).unwrap_err(); + Integer::try_from(Double::from(f64::NEG_INFINITY)).unwrap_err(); + Integer::try_from(Double::from(f64::MIN)).unwrap_err(); + Integer::try_from(Double::from(f64::MAX)).unwrap_err(); + Ok(()) + } + + #[test] + fn from_decimal() -> Result<(), ParseIntError> { + assert_eq!( + Integer::try_from(Decimal::from(0)).ok(), + Some(Integer::from_str("0")?) + ); + assert_eq!( + Integer::try_from(Decimal::from_str("-123.1").unwrap()).ok(), + Some(Integer::from_str("-123")?) + ); + Integer::try_from(Decimal::MIN).unwrap_err(); + Integer::try_from(Decimal::MAX).unwrap_err(); + Ok(()) + } + + #[test] + fn add() { + assert_eq!( + Integer::MIN.checked_add(1), + Some(Integer::from(i64::MIN + 1)) + ); + assert_eq!(Integer::MAX.checked_add(1), None); + } + + #[test] + fn sub() { + assert_eq!(Integer::MIN.checked_sub(1), None); + assert_eq!( + Integer::MAX.checked_sub(1), + Some(Integer::from(i64::MAX - 1)) + ); + } + + #[test] + fn mul() { + assert_eq!(Integer::MIN.checked_mul(2), None); + assert_eq!(Integer::MAX.checked_mul(2), None); + } + + #[test] + fn div() { + assert_eq!(Integer::from(1).checked_div(0), None); + } + + #[test] + fn rem() { + assert_eq!(Integer::from(10).checked_rem(3), Some(Integer::from(1))); + assert_eq!(Integer::from(6).checked_rem(-2), Some(Integer::from(0))); + assert_eq!(Integer::from(1).checked_rem(0), None); + } +} diff --git a/ng-oxigraph/src/oxsdatatypes/mod.rs b/ng-oxigraph/src/oxsdatatypes/mod.rs new file mode 100644 index 0000000..00e0aa4 --- /dev/null +++ b/ng-oxigraph/src/oxsdatatypes/mod.rs @@ -0,0 +1,21 @@ +mod boolean; +mod date_time; +mod decimal; +mod double; +mod duration; +mod float; +mod integer; + +pub use self::boolean::Boolean; +pub use self::date_time::{ + Date, DateTime, DateTimeOverflowError, GDay, GMonth, GMonthDay, GYear, GYearMonth, + InvalidTimezoneError, ParseDateTimeError, Time, TimezoneOffset, +}; +pub use self::decimal::{Decimal, ParseDecimalError, TooLargeForDecimalError}; +pub use self::double::Double; +pub use self::duration::{ + DayTimeDuration, Duration, DurationOverflowError, OppositeSignInDurationComponentsError, + ParseDurationError, YearMonthDuration, +}; +pub use self::float::Float; +pub use self::integer::{Integer, TooLargeForIntegerError}; diff --git a/ng-oxigraph/src/oxttl/README.md b/ng-oxigraph/src/oxttl/README.md new file mode 100644 index 0000000..47ec03e --- /dev/null +++ b/ng-oxigraph/src/oxttl/README.md @@ -0,0 +1,54 @@ +OxTTL +===== + +[![Latest Version](https://img.shields.io/crates/v/oxttl.svg)](https://crates.io/crates/oxttl) +[![Released API docs](https://docs.rs/oxttl/badge.svg)](https://docs.rs/oxttl) +[![Crates.io downloads](https://img.shields.io/crates/d/oxttl)](https://crates.io/crates/oxttl) +[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) +[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) + +Oxttl is a set of parsers and serializers for [Turtle](https://www.w3.org/TR/turtle/), [TriG](https://www.w3.org/TR/trig/), [N-Triples](https://www.w3.org/TR/n-triples/), [N-Quads](https://www.w3.org/TR/n-quads/) and [N3](https://w3c.github.io/N3/spec/). + +Support for [SPARQL-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html) is also available behind the `rdf-star`feature for all languages but N3 ([Turtle-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#turtle-star), [TriG-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#trig-star), [N-Triples-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#n-triples-star) and [N-Quads-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#n-quads-star)) + +It is designed as a low level parser compatible with both synchronous and asynchronous I/O. + +Usage example counting the number of people in a Turtle file: +```rust +use oxrdf::{NamedNodeRef, vocab::rdf}; +use oxttl::TurtleParser; + +let file = b"@base . +@prefix schema: . + a schema:Person ; + schema:name \"Foo\" . + a schema:Person ; + schema:name \"Bar\" ."; + +let schema_person = NamedNodeRef::new("http://schema.org/Person").unwrap(); +let mut count = 0; +for triple in TurtleParser::new().parse_read(file.as_ref()) { + let triple = triple.unwrap(); + if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + count += 1; + } +} +assert_eq!(2, count); +``` + + +## License + +This project is licensed under either of + +* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or + ``) +* MIT license ([LICENSE-MIT](../LICENSE-MIT) or + ``) + +at your option. + + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/ng-oxigraph/src/oxttl/lexer.rs b/ng-oxigraph/src/oxttl/lexer.rs new file mode 100644 index 0000000..dba6fc7 --- /dev/null +++ b/ng-oxigraph/src/oxttl/lexer.rs @@ -0,0 +1,977 @@ +use crate::oxrdf::NamedNode; +use crate::oxttl::toolkit::{TokenRecognizer, TokenRecognizerError}; +use memchr::{memchr, memchr2}; +use oxilangtag::LanguageTag; +use oxiri::Iri; +use std::borrow::Cow; +use std::cmp::min; +use std::collections::HashMap; +use std::ops::Range; +use std::str; + +#[derive(Debug, PartialEq, Eq)] +pub enum N3Token<'a> { + IriRef(String), + PrefixedName { + prefix: &'a str, + local: Cow<'a, str>, + might_be_invalid_iri: bool, + }, + Variable(Cow<'a, str>), + BlankNodeLabel(&'a str), + String(String), + Integer(&'a str), + Decimal(&'a str), + Double(&'a str), + LangTag(&'a str), + Punctuation(&'a str), + PlainKeyword(&'a str), +} + +#[derive(Eq, PartialEq)] +pub enum N3LexerMode { + NTriples, + Turtle, + N3, +} + +#[derive(Default)] +pub struct N3LexerOptions { + pub base_iri: Option>, +} + +pub struct N3Lexer { + mode: N3LexerMode, + unchecked: bool, +} + +// TODO: there are a lot of 'None' (missing data) returned even if the stream is ending!!! +// TODO: simplify by not giving is_end and fail with an "unexpected eof" is none is returned when is_end=true? + +impl TokenRecognizer for N3Lexer { + type Token<'a> = N3Token<'a>; + type Options = N3LexerOptions; + + fn recognize_next_token<'a>( + &mut self, + data: &'a [u8], + is_ending: bool, + options: &N3LexerOptions, + ) -> Option<(usize, Result, TokenRecognizerError>)> { + match *data.first()? { + b'<' => match *data.get(1)? { + b'<' => Some((2, Ok(N3Token::Punctuation("<<")))), + b'=' if self.mode == N3LexerMode::N3 => { + if let Some((consumed, result)) = self.recognize_iri(data, options) { + Some(if let Ok(result) = result { + (consumed, Ok(result)) + } else { + (2, Ok(N3Token::Punctuation("<="))) + }) + } else if is_ending { + Some((2, Ok(N3Token::Punctuation("<=")))) + } else { + None + } + } + b'-' if self.mode == N3LexerMode::N3 => { + if let Some((consumed, result)) = self.recognize_iri(data, options) { + Some(if let Ok(result) = result { + (consumed, Ok(result)) + } else { + (2, Ok(N3Token::Punctuation("<-"))) + }) + } else if is_ending { + Some((2, Ok(N3Token::Punctuation("<-")))) + } else { + None + } + } + _ => self.recognize_iri(data, options), + }, + b'>' => { + if *data.get(1)? == b'>' { + Some((2, Ok(N3Token::Punctuation(">>")))) + } else { + Some((1, Ok(N3Token::Punctuation(">")))) + } + } + b'_' => match data.get(1)? { + b':' => Self::recognize_blank_node_label(data), + c => Some(( + 1, + Err((0, format!("Unexpected character '{}'", char::from(*c))).into()), + )), + }, + b'"' => { + if self.mode != N3LexerMode::NTriples + && *data.get(1)? == b'"' + && *data.get(2)? == b'"' + { + Self::recognize_long_string(data, b'"') + } else { + Self::recognize_string(data, b'"') + } + } + b'\'' if self.mode != N3LexerMode::NTriples => { + if *data.get(1)? == b'\'' && *data.get(2)? == b'\'' { + Self::recognize_long_string(data, b'\'') + } else { + Self::recognize_string(data, b'\'') + } + } + b'@' => self.recognize_lang_tag(data), + b'.' => match data.get(1) { + Some(b'0'..=b'9') => Self::recognize_number(data), + Some(_) => Some((1, Ok(N3Token::Punctuation(".")))), + None => is_ending.then_some((1, Ok(N3Token::Punctuation(".")))), + }, + b'^' => { + if *data.get(1)? == b'^' { + Some((2, Ok(N3Token::Punctuation("^^")))) + } else { + Some((1, Ok(N3Token::Punctuation("^")))) + } + } + b'(' => Some((1, Ok(N3Token::Punctuation("(")))), + b')' => Some((1, Ok(N3Token::Punctuation(")")))), + b'[' => Some((1, Ok(N3Token::Punctuation("[")))), + b']' => Some((1, Ok(N3Token::Punctuation("]")))), + b'{' => { + if *data.get(1)? == b'|' { + Some((2, Ok(N3Token::Punctuation("{|")))) + } else { + Some((1, Ok(N3Token::Punctuation("{")))) + } + } + b'}' => Some((1, Ok(N3Token::Punctuation("}")))), + b',' => Some((1, Ok(N3Token::Punctuation(",")))), + b';' => Some((1, Ok(N3Token::Punctuation(";")))), + b'!' => Some((1, Ok(N3Token::Punctuation("!")))), + b'|' => { + if *data.get(1)? == b'}' { + Some((2, Ok(N3Token::Punctuation("|}")))) + } else { + Some((1, Ok(N3Token::Punctuation("|")))) + } + } + b'=' => { + if *data.get(1)? == b'>' { + Some((2, Ok(N3Token::Punctuation("=>")))) + } else { + Some((1, Ok(N3Token::Punctuation("=")))) + } + } + b'0'..=b'9' | b'+' | b'-' => Self::recognize_number(data), + b'?' => self.recognize_variable(data, is_ending), + _ => self.recognize_pname_or_keyword(data, is_ending), + } + } +} + +impl N3Lexer { + pub fn new(mode: N3LexerMode, unchecked: bool) -> Self { + Self { mode, unchecked } + } + + fn recognize_iri( + &self, + data: &[u8], + options: &N3LexerOptions, + ) -> Option<(usize, Result, TokenRecognizerError>)> { + // [18] IRIREF ::= '<' ([^#x00-#x20<>"{}|^`\] | UCHAR)* '>' /* #x00=NULL #01-#x1F=control codes #x20=space */ + let mut string = Vec::new(); + let mut i = 1; + loop { + let end = memchr2(b'>', b'\\', &data[i..])?; + string.extend_from_slice(&data[i..i + end]); + i += end; + match data[i] { + b'>' => { + #[allow(clippy::range_plus_one)] + return Some((i + 1, self.parse_iri(string, 0..i + 1, options))); + } + b'\\' => { + let (additional, c) = Self::recognize_escape(&data[i..], i, false)?; + i += additional + 1; + match c { + Ok(c) => { + let mut buf = [0; 4]; + string.extend_from_slice(c.encode_utf8(&mut buf).as_bytes()); + } + Err(e) => return Some((i, Err(e))), + } + } + _ => unreachable!(), + } + } + } + + fn parse_iri( + &self, + iri: Vec, + position: Range, + options: &N3LexerOptions, + ) -> Result, TokenRecognizerError> { + let iri = string_from_utf8(iri, position.clone())?; + Ok(N3Token::IriRef( + if let Some(base_iri) = options.base_iri.as_ref() { + if self.unchecked { + base_iri.resolve_unchecked(&iri) + } else { + base_iri + .resolve(&iri) + .map_err(|e| (position, e.to_string()))? + } + .into_inner() + } else if self.unchecked { + iri + } else { + Iri::parse(iri) + .map_err(|e| (position, e.to_string()))? + .into_inner() + }, + )) + } + + fn recognize_pname_or_keyword<'a>( + &self, + data: &'a [u8], + is_ending: bool, + ) -> Option<(usize, Result, TokenRecognizerError>)> { + // [139s] PNAME_NS ::= PN_PREFIX? ':' + // [140s] PNAME_LN ::= PNAME_NS PN_LOCAL + // [167s] PN_PREFIX ::= PN_CHARS_BASE ((PN_CHARS | '.')* PN_CHARS)? + let mut i = 0; + loop { + if let Some(r) = Self::recognize_unicode_char(&data[i..], i) { + match r { + Ok((c, consumed)) => { + if c == ':' { + i += consumed; + break; + } else if i == 0 { + if !Self::is_possible_pn_chars_base(c) { + return Some(( + consumed, + Err(( + 0..consumed, + format!( + "'{c}' is not allowed at the beginning of a prefix name" + ), + ) + .into()), + )); + } + i += consumed; + } else if Self::is_possible_pn_chars(c) || c == '.' { + i += consumed; + } else { + while data[..i].ends_with(b".") { + i -= 1; + } + return Some(( + i, + str_from_utf8(&data[..i], 0..i).map(N3Token::PlainKeyword), + )); + } + } + Err(e) => return Some((e.location.end, Err(e))), + } + } else if is_ending { + while data[..i].ends_with(b".") { + i -= 1; + } + return Some(if i == 0 { + ( + 1, + Err((0..1, format!("Unexpected byte {}", data[0])).into()), + ) + } else { + ( + i, + str_from_utf8(&data[..i], 0..i).map(N3Token::PlainKeyword), + ) + }); + } else { + return None; + } + } + let pn_prefix = match str_from_utf8(&data[..i - 1], 0..i - 1) { + Ok(pn_prefix) => pn_prefix, + Err(e) => return Some((i, Err(e))), + }; + if pn_prefix.ends_with('.') { + return Some(( + i, + Err(( + 0..i, + format!( + "'{pn_prefix}' is not a valid prefix: prefixes are not allowed to end with '.'"), + ) + .into()), + )); + } + + let (consumed, pn_local_result) = + self.recognize_optional_pn_local(&data[i..], is_ending)?; + Some(( + consumed + i, + pn_local_result.map(|(local, might_be_invalid_iri)| N3Token::PrefixedName { + prefix: pn_prefix, + local, + might_be_invalid_iri, + }), + )) + } + + fn recognize_variable<'a>( + &self, + data: &'a [u8], + is_ending: bool, + ) -> Option<(usize, Result, TokenRecognizerError>)> { + // [36] QUICK_VAR_NAME ::= "?" PN_LOCAL + let (consumed, result) = self.recognize_optional_pn_local(&data[1..], is_ending)?; + Some(( + consumed + 1, + result.and_then(|(name, _)| { + if name.is_empty() { + Err((0..consumed, "A variable name is not allowed to be empty").into()) + } else { + Ok(N3Token::Variable(name)) + } + }), + )) + } + + fn recognize_optional_pn_local<'a>( + &self, + data: &'a [u8], + is_ending: bool, + ) -> Option<(usize, Result<(Cow<'a, str>, bool), TokenRecognizerError>)> { + // [168s] PN_LOCAL ::= (PN_CHARS_U | ':' | [0-9] | PLX) ((PN_CHARS | '.' | ':' | PLX)* (PN_CHARS | ':' | PLX))? + let mut i = 0; + let mut buffer = None; // Buffer if there are some escaped characters + let mut position_that_is_already_in_buffer = 0; + let mut might_be_invalid_iri = false; + let mut ends_with_unescaped_dot = 0; + loop { + if let Some(r) = Self::recognize_unicode_char(&data[i..], i) { + match r { + Ok((c, consumed)) => { + if c == '%' { + i += 1; + let a = char::from(*data.get(i)?); + i += 1; + let b = char::from(*data.get(i)?); + if !a.is_ascii_hexdigit() || !b.is_ascii_hexdigit() { + return Some((i + 1, Err(( + i - 2..=i, format!("escapes in IRIs should be % followed by two hexadecimal characters, found '%{a}{b}'") + ).into()))); + } + i += 1; + ends_with_unescaped_dot = 0; + } else if c == '\\' { + i += 1; + let a = char::from(*data.get(i)?); + if self.unchecked + || matches!( + a, + '_' | '~' + | '.' + | '-' + | '!' + | '$' + | '&' + | '\'' + | '(' + | ')' + | '*' + | '+' + | ',' + | ';' + | '=' + ) + { + // ok to escape + } else if matches!(a, '/' | '?' | '#' | '@' | '%') { + // ok to escape but requires IRI validation + might_be_invalid_iri = true; + } else { + return Some((i + 1, Err(( + i..=i, format!("The character that are allowed to be escaped in IRIs are _~.-!$&'()*+,;=/?#@%, found '{a}'") + ).into()))); + } + let buffer = buffer.get_or_insert_with(String::new); + // We add the missing bytes + if i - position_that_is_already_in_buffer > 1 { + buffer.push_str( + match str_from_utf8( + &data[position_that_is_already_in_buffer..i - 1], + position_that_is_already_in_buffer..i - 1, + ) { + Ok(data) => data, + Err(e) => return Some((i, Err(e))), + }, + ) + } + buffer.push(a); + i += 1; + position_that_is_already_in_buffer = i; + ends_with_unescaped_dot = 0; + } else if i == 0 { + if !(Self::is_possible_pn_chars_u(c) || c == ':' || c.is_ascii_digit()) + { + return Some((0, Ok((Cow::Borrowed(""), false)))); + } + if !self.unchecked { + might_be_invalid_iri |= + Self::is_possible_pn_chars_base_but_not_valid_iri(c) + || c == ':'; + } + i += consumed; + } else if Self::is_possible_pn_chars(c) || c == ':' { + if !self.unchecked { + might_be_invalid_iri |= + Self::is_possible_pn_chars_base_but_not_valid_iri(c) + || c == ':'; + } + i += consumed; + ends_with_unescaped_dot = 0; + } else if c == '.' { + i += consumed; + ends_with_unescaped_dot += 1; + } else { + let buffer = if let Some(mut buffer) = buffer { + buffer.push_str( + match str_from_utf8( + &data[position_that_is_already_in_buffer..i], + position_that_is_already_in_buffer..i, + ) { + Ok(data) => data, + Err(e) => return Some((i, Err(e))), + }, + ); + // We do not include the last dots + for _ in 0..ends_with_unescaped_dot { + buffer.pop(); + } + i -= ends_with_unescaped_dot; + Cow::Owned(buffer) + } else { + let mut data = match str_from_utf8(&data[..i], 0..i) { + Ok(data) => data, + Err(e) => return Some((i, Err(e))), + }; + // We do not include the last dots + data = &data[..data.len() - ends_with_unescaped_dot]; + i -= ends_with_unescaped_dot; + Cow::Borrowed(data) + }; + return Some((i, Ok((buffer, might_be_invalid_iri)))); + } + } + Err(e) => return Some((e.location.end, Err(e))), + } + } else if is_ending { + let buffer = if let Some(mut buffer) = buffer { + // We do not include the last dot + while buffer.ends_with('.') { + buffer.pop(); + i -= 1; + } + Cow::Owned(buffer) + } else { + let mut data = match str_from_utf8(&data[..i], 0..i) { + Ok(data) => data, + Err(e) => return Some((i, Err(e))), + }; + // We do not include the last dot + while let Some(d) = data.strip_suffix('.') { + data = d; + i -= 1; + } + Cow::Borrowed(data) + }; + return Some((i, Ok((buffer, might_be_invalid_iri)))); + } else { + return None; + } + } + } + + fn recognize_blank_node_label( + data: &[u8], + ) -> Option<(usize, Result, TokenRecognizerError>)> { + // [141s] BLANK_NODE_LABEL ::= '_:' (PN_CHARS_U | [0-9]) ((PN_CHARS | '.')* PN_CHARS)? + let mut i = 2; + loop { + match Self::recognize_unicode_char(&data[i..], i)? { + Ok((c, consumed)) => { + if (i == 2 && (Self::is_possible_pn_chars_u(c) || c.is_ascii_digit())) + || (i > 2 && Self::is_possible_pn_chars(c)) + { + // Ok + } else if i > 2 && c == '.' { + if data[i - 1] == b'.' { + i -= 1; + return Some(( + i, + str_from_utf8(&data[2..i], 2..i).map(N3Token::BlankNodeLabel), + )); + } + } else if i == 0 { + return Some(( + i, + Err((0..i, "A blank node ID should not be empty").into()), + )); + } else if data[i - 1] == b'.' { + i -= 1; + return Some(( + i, + str_from_utf8(&data[2..i], 2..i).map(N3Token::BlankNodeLabel), + )); + } else { + return Some(( + i, + str_from_utf8(&data[2..i], 2..i).map(N3Token::BlankNodeLabel), + )); + } + i += consumed; + } + Err(e) => return Some((e.location.end, Err(e))), + } + } + } + + fn recognize_lang_tag<'a>( + &self, + data: &'a [u8], + ) -> Option<(usize, Result, TokenRecognizerError>)> { + // [144s] LANGTAG ::= '@' [a-zA-Z]+ ('-' [a-zA-Z0-9]+)* + let mut is_last_block_empty = true; + for (i, c) in data[1..].iter().enumerate() { + if c.is_ascii_alphabetic() { + is_last_block_empty = false; + } else if i == 0 { + return Some(( + 1, + Err((1..2, "A language code should always start with a letter").into()), + )); + } else if is_last_block_empty { + return Some((i, self.parse_lang_tag(&data[1..i], 1..i - 1))); + } else if *c == b'-' { + is_last_block_empty = true; + } else { + return Some((i + 1, self.parse_lang_tag(&data[1..=i], 1..i))); + } + } + None + } + + fn parse_lang_tag<'a>( + &self, + lang_tag: &'a [u8], + position: Range, + ) -> Result, TokenRecognizerError> { + let lang_tag = str_from_utf8(lang_tag, position.clone())?; + Ok(N3Token::LangTag(if self.unchecked { + lang_tag + } else { + LanguageTag::parse(lang_tag) + .map_err(|e| (position.clone(), e.to_string()))? + .into_inner() + })) + } + + fn recognize_string( + data: &[u8], + delimiter: u8, + ) -> Option<(usize, Result, TokenRecognizerError>)> { + // [22] STRING_LITERAL_QUOTE ::= '"' ([^#x22#x5C#xA#xD] | ECHAR | UCHAR)* '"' /* #x22=" #x5C=\ #xA=new line #xD=carriage return */ + // [23] STRING_LITERAL_SINGLE_QUOTE ::= "'" ([^#x27#x5C#xA#xD] | ECHAR | UCHAR)* "'" /* #x27=' #x5C=\ #xA=new line #xD=carriage return */ + let mut string = String::new(); + let mut i = 1; + loop { + let end = memchr2(delimiter, b'\\', &data[i..])?; + match str_from_utf8(&data[i..i + end], i..i + end) { + Ok(s) => string.push_str(s), + Err(e) => return Some((end, Err(e))), + }; + i += end; + match data[i] { + c if c == delimiter => { + return Some((i + 1, Ok(N3Token::String(string)))); + } + b'\\' => { + let (additional, c) = Self::recognize_escape(&data[i..], i, true)?; + i += additional + 1; + match c { + Ok(c) => { + string.push(c); + } + Err(e) => { + // We read until the end of string char + let end = memchr(delimiter, &data[i..])?; + return Some((i + end + 1, Err(e))); + } + } + } + _ => unreachable!(), + } + } + } + + fn recognize_long_string( + data: &[u8], + delimiter: u8, + ) -> Option<(usize, Result, TokenRecognizerError>)> { + // [24] STRING_LITERAL_LONG_SINGLE_QUOTE ::= "'''" (("'" | "''")? ([^'\] | ECHAR | UCHAR))* "'''" + // [25] STRING_LITERAL_LONG_QUOTE ::= '"""' (('"' | '""')? ([^"\] | ECHAR | UCHAR))* '"""' + let mut string = String::new(); + let mut i = 3; + loop { + let end = memchr2(delimiter, b'\\', &data[i..])?; + match str_from_utf8(&data[i..i + end], i..i + end) { + Ok(s) => string.push_str(s), + Err(e) => return Some((end, Err(e))), + }; + i += end; + match data[i] { + c if c == delimiter => { + if *data.get(i + 1)? == delimiter && *data.get(i + 2)? == delimiter { + return Some((i + 3, Ok(N3Token::String(string)))); + } + i += 1; + string.push(char::from(delimiter)); + } + b'\\' => { + let (additional, c) = Self::recognize_escape(&data[i..], i, true)?; + i += additional + 1; + match c { + Ok(c) => { + string.push(c); + } + Err(e) => return Some((i, Err(e))), + } + } + _ => unreachable!(), + } + } + } + + fn recognize_number(data: &[u8]) -> Option<(usize, Result, TokenRecognizerError>)> { + // [19] INTEGER ::= [+-]? [0-9]+ + // [20] DECIMAL ::= [+-]? [0-9]* '.' [0-9]+ + // [21] DOUBLE ::= [+-]? ([0-9]+ '.' [0-9]* EXPONENT | '.' [0-9]+ EXPONENT | [0-9]+ EXPONENT) + // [154s] EXPONENT ::= [eE] [+-]? [0-9]+ + let mut i = 0; + let c = *data.first()?; + if matches!(c, b'+' | b'-') { + i += 1; + } + // We read the digits before . + let mut count_before: usize = 0; + loop { + let c = *data.get(i)?; + if c.is_ascii_digit() { + i += 1; + count_before += 1; + } else { + break; + } + } + + // We read the digits after . + #[allow(clippy::if_then_some_else_none)] + let count_after = if *data.get(i)? == b'.' { + i += 1; + + let mut count_after = 0; + loop { + let c = *data.get(i)?; + if c.is_ascii_digit() { + i += 1; + count_after += 1; + } else { + break; + } + } + Some(count_after) + } else { + None + }; + + // End + let c = *data.get(i)?; + if matches!(c, b'e' | b'E') { + i += 1; + + let c = *data.get(i)?; + if matches!(c, b'+' | b'-') { + i += 1; + } + + let mut found = false; + loop { + let c = *data.get(i)?; + if c.is_ascii_digit() { + i += 1; + found = true; + } else { + break; + } + } + Some(( + i, + if !found { + Err((0..i, "A double exponent cannot be empty").into()) + } else if count_before == 0 && count_after.unwrap_or(0) == 0 { + Err((0..i, "A double should not be empty").into()) + } else { + str_from_utf8(&data[..i], 0..i).map(N3Token::Double) + }, + )) + } else if let Some(count_after) = count_after { + if count_after == 0 { + // We do not consume the '.' after all + i -= 1; + Some(( + i, + if count_before == 0 { + Err((0..i, "An integer should not be empty").into()) + } else { + str_from_utf8(&data[..i], 0..i).map(N3Token::Integer) + }, + )) + } else { + Some((i, str_from_utf8(&data[..i], 0..i).map(N3Token::Decimal))) + } + } else { + Some(( + i, + if count_before == 0 { + Err((0..i, "An integer should not be empty").into()) + } else { + str_from_utf8(&data[..i], 0..i).map(N3Token::Integer) + }, + )) + } + } + + fn recognize_escape( + data: &[u8], + position: usize, + with_echar: bool, + ) -> Option<(usize, Result)> { + // [26] UCHAR ::= '\u' HEX HEX HEX HEX | '\U' HEX HEX HEX HEX HEX HEX HEX HEX + // [159s] ECHAR ::= '\' [tbnrf"'\] + match *data.get(1)? { + b'u' => match Self::recognize_hex_char(&data[2..], 4, 'u', position) { + Ok(c) => Some((5, Ok(c?))), + Err(e) => Some((5, Err(e))), + }, + b'U' => match Self::recognize_hex_char(&data[2..], 8, 'u', position) { + Ok(c) => Some((9, Ok(c?))), + Err(e) => Some((9, Err(e))), + }, + b't' if with_echar => Some((1, Ok('\t'))), + b'b' if with_echar => Some((1, Ok('\x08'))), + b'n' if with_echar => Some((1, Ok('\n'))), + b'r' if with_echar => Some((1, Ok('\r'))), + b'f' if with_echar => Some((1, Ok('\x0C'))), + b'"' if with_echar => Some((1, Ok('"'))), + b'\'' if with_echar => Some((1, Ok('\''))), + b'\\' if with_echar => Some((1, Ok('\\'))), + c => Some(( + 1, + Err(( + position..position + 2, + format!("Unexpected escape character '\\{}'", char::from(c)), + ) + .into()), + )), // TODO: read until end of string + } + } + + fn recognize_hex_char( + data: &[u8], + len: usize, + escape_char: char, + position: usize, + ) -> Result, TokenRecognizerError> { + if data.len() < len { + return Ok(None); + } + let val = str_from_utf8(&data[..len], position..position + len + 2)?; + let codepoint = u32::from_str_radix(val, 16).map_err(|e| { + ( + position..position + len + 2, + format!( + "The escape sequence '\\{escape_char}{val}' is not a valid hexadecimal string: {e}" + ), + ) + })?; + let c = char::from_u32(codepoint).ok_or_else(|| { + ( + position..position + len +2, + format!( + "The escape sequence '\\{escape_char}{val}' is encoding {codepoint:X} that is not a valid unicode character", + ), + ) + })?; + Ok(Some(c)) + } + + fn recognize_unicode_char( + data: &[u8], + position: usize, + ) -> Option> { + let mut code_point: u32; + let bytes_needed: usize; + let mut lower_boundary = 0x80; + let mut upper_boundary = 0xBF; + + let byte = *data.first()?; + match byte { + 0x00..=0x7F => return Some(Ok((char::from(byte), 1))), + 0xC2..=0xDF => { + bytes_needed = 1; + code_point = u32::from(byte) & 0x1F; + } + 0xE0..=0xEF => { + if byte == 0xE0 { + lower_boundary = 0xA0; + } + if byte == 0xED { + upper_boundary = 0x9F; + } + bytes_needed = 2; + code_point = u32::from(byte) & 0xF; + } + 0xF0..=0xF4 => { + if byte == 0xF0 { + lower_boundary = 0x90; + } + if byte == 0xF4 { + upper_boundary = 0x8F; + } + bytes_needed = 3; + code_point = u32::from(byte) & 0x7; + } + _ => { + return Some(Err(( + position..=position, + "Invalid UTF-8 character encoding", + ) + .into())) + } + } + + for i in 1..=bytes_needed { + let byte = *data.get(i)?; + if byte < lower_boundary || upper_boundary < byte { + return Some(Err(( + position..=position + i, + "Invalid UTF-8 character encoding", + ) + .into())); + } + lower_boundary = 0x80; + upper_boundary = 0xBF; + code_point = (code_point << 6) | (u32::from(byte) & 0x3F); + } + + Some( + char::from_u32(code_point) + .map(|c| (c, bytes_needed + 1)) + .ok_or_else(|| { + ( + position..=position + bytes_needed, + format!("The codepoint {code_point:X} is not a valid unicode character"), + ) + .into() + }), + ) + } + + // [157s] PN_CHARS_BASE ::= [A-Z] | [a-z] | [#x00C0-#x00D6] | [#x00D8-#x00F6] | [#x00F8-#x02FF] | [#x0370-#x037D] | [#x037F-#x1FFF] | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] | [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] | [#x10000-#xEFFFF] + fn is_possible_pn_chars_base(c: char) -> bool { + matches!(c, + 'A'..='Z' + | 'a'..='z' + | '\u{00C0}'..='\u{00D6}' + | '\u{00D8}'..='\u{00F6}' + | '\u{00F8}'..='\u{02FF}' + | '\u{0370}'..='\u{037D}' + | '\u{037F}'..='\u{1FFF}' + | '\u{200C}'..='\u{200D}' + | '\u{2070}'..='\u{218F}' + | '\u{2C00}'..='\u{2FEF}' + | '\u{3001}'..='\u{D7FF}' + | '\u{F900}'..='\u{FDCF}' + | '\u{FDF0}'..='\u{FFFD}' + | '\u{10000}'..='\u{EFFFF}') + } + + // [158s] PN_CHARS_U ::= PN_CHARS_BASE | '_' | ':' + pub(super) fn is_possible_pn_chars_u(c: char) -> bool { + Self::is_possible_pn_chars_base(c) || c == '_' + } + + // [160s] PN_CHARS ::= PN_CHARS_U | '-' | [0-9] | #x00B7 | [#x0300-#x036F] | [#x203F-#x2040] + pub(crate) fn is_possible_pn_chars(c: char) -> bool { + Self::is_possible_pn_chars_u(c) + || matches!(c, + '-' | '0'..='9' | '\u{00B7}' | '\u{0300}'..='\u{036F}' | '\u{203F}'..='\u{2040}') + } + + fn is_possible_pn_chars_base_but_not_valid_iri(c: char) -> bool { + matches!(c, '\u{FFF0}'..='\u{FFFD}') + || u32::from(c) % u32::from('\u{FFFE}') == 0 + || u32::from(c) % u32::from('\u{FFFF}') == 0 + } +} + +pub fn resolve_local_name( + prefix: &str, + local: &str, + might_be_invalid_iri: bool, + prefixes: &HashMap>, +) -> Result { + if let Some(start) = prefixes.get(prefix) { + let iri = format!("{start}{local}"); + if might_be_invalid_iri || start.path().is_empty() { + // We validate again. We always validate if the local part might be the IRI authority. + if let Err(e) = Iri::parse(iri.as_str()) { + return Err(format!( + "The prefixed name {prefix}:{local} builds IRI {iri} that is invalid: {e}" + )); + } + } + Ok(NamedNode::new_unchecked(iri)) + } else { + Err(format!("The prefix {prefix}: has not been declared")) + } +} + +fn str_from_utf8(data: &[u8], range: Range) -> Result<&str, TokenRecognizerError> { + str::from_utf8(data).map_err(|e| { + ( + range.start + e.valid_up_to()..min(range.end, range.start + e.valid_up_to() + 4), + format!("Invalid UTF-8: {e}"), + ) + .into() + }) +} + +fn string_from_utf8(data: Vec, range: Range) -> Result { + String::from_utf8(data).map_err(|e| { + ( + range.start + e.utf8_error().valid_up_to() + ..min(range.end, range.start + e.utf8_error().valid_up_to() + 4), + format!("Invalid UTF-8: {e}"), + ) + .into() + }) +} diff --git a/ng-oxigraph/src/oxttl/line_formats.rs b/ng-oxigraph/src/oxttl/line_formats.rs new file mode 100644 index 0000000..ead06b7 --- /dev/null +++ b/ng-oxigraph/src/oxttl/line_formats.rs @@ -0,0 +1,314 @@ +//! Shared parser implementation for N-Triples and N-Quads. + +#[cfg(feature = "rdf-star")] +use crate::oxrdf::Triple; +use crate::oxrdf::{BlankNode, GraphName, Literal, NamedNode, Quad, Subject, Term}; +use crate::oxttl::lexer::{N3Lexer, N3LexerMode, N3LexerOptions, N3Token}; +use crate::oxttl::toolkit::{Lexer, Parser, RuleRecognizer, RuleRecognizerError}; +use crate::oxttl::{MAX_BUFFER_SIZE, MIN_BUFFER_SIZE}; + +pub struct NQuadsRecognizer { + stack: Vec, + subjects: Vec, + predicates: Vec, + objects: Vec, +} +pub struct NQuadsRecognizerContext { + with_graph_name: bool, + #[cfg(feature = "rdf-star")] + with_quoted_triples: bool, + lexer_options: N3LexerOptions, +} + +enum NQuadsState { + ExpectSubject, + ExpectPredicate, + ExpectedObject, + ExpectPossibleGraphOrEndOfQuotedTriple, + ExpectDot, + ExpectLiteralAnnotationOrGraphNameOrDot { + value: String, + }, + ExpectLiteralDatatype { + value: String, + }, + #[cfg(feature = "rdf-star")] + AfterQuotedSubject, + #[cfg(feature = "rdf-star")] + AfterQuotedObject, +} + +impl RuleRecognizer for NQuadsRecognizer { + type TokenRecognizer = N3Lexer; + type Output = Quad; + type Context = NQuadsRecognizerContext; + + fn error_recovery_state(mut self) -> Self { + self.stack.clear(); + self.subjects.clear(); + self.predicates.clear(); + self.objects.clear(); + self + } + + fn recognize_next( + mut self, + token: N3Token<'_>, + context: &mut NQuadsRecognizerContext, + results: &mut Vec, + errors: &mut Vec, + ) -> Self { + if let Some(state) = self.stack.pop() { + match state { + NQuadsState::ExpectSubject => match token { + N3Token::IriRef(s) => { + self.subjects + .push(NamedNode::new_unchecked(s).into()); + self.stack.push(NQuadsState::ExpectPredicate); + self + } + N3Token::BlankNodeLabel(s) => { + self.subjects.push(BlankNode::new_unchecked(s).into()); + self.stack.push(NQuadsState::ExpectPredicate); + self + } + #[cfg(feature = "rdf-star")] + N3Token::Punctuation("<<") if context.with_quoted_triples => { + self.stack.push(NQuadsState::AfterQuotedSubject); + self.stack.push(NQuadsState::ExpectSubject); + self + } + _ => self.error( + errors, + "The subject of a triple should be an IRI or a blank node, TOKEN found", + ), + }, + NQuadsState::ExpectPredicate => match token { + N3Token::IriRef(p) => { + self.predicates + .push(NamedNode::new_unchecked(p)); + self.stack.push(NQuadsState::ExpectedObject); + self + } + _ => self.error( + errors, + "The predicate of a triple should be an IRI, TOKEN found", + ), + }, + NQuadsState::ExpectedObject => match token { + N3Token::IriRef(o) => { + self.objects + .push(NamedNode::new_unchecked(o).into()); + self.stack + .push(NQuadsState::ExpectPossibleGraphOrEndOfQuotedTriple); + self + } + N3Token::BlankNodeLabel(o) => { + self.objects.push(BlankNode::new_unchecked(o).into()); + self.stack + .push(NQuadsState::ExpectPossibleGraphOrEndOfQuotedTriple); + self + } + N3Token::String(value) => { + self.stack + .push(NQuadsState::ExpectLiteralAnnotationOrGraphNameOrDot { value }); + self + } + #[cfg(feature = "rdf-star")] + N3Token::Punctuation("<<") if context.with_quoted_triples => { + self.stack.push(NQuadsState::AfterQuotedObject); + self.stack.push(NQuadsState::ExpectSubject); + self + } + _ => self.error( + errors, + "The object of a triple should be an IRI, a blank node or a literal, TOKEN found", + ), + }, + NQuadsState::ExpectLiteralAnnotationOrGraphNameOrDot { value } => match token { + N3Token::LangTag(lang_tag) => { + self.objects.push( + Literal::new_language_tagged_literal_unchecked( + value, + lang_tag.to_ascii_lowercase(), + ) + .into(), + ); + self.stack + .push(NQuadsState::ExpectPossibleGraphOrEndOfQuotedTriple); + self + } + N3Token::Punctuation("^^") => { + self.stack + .push(NQuadsState::ExpectLiteralDatatype { value }); + self + } + _ => { + self.objects.push(Literal::new_simple_literal(value).into()); + self.stack + .push(NQuadsState::ExpectPossibleGraphOrEndOfQuotedTriple); + self.recognize_next(token, context, results, errors) + } + }, + NQuadsState::ExpectLiteralDatatype { value } => match token { + N3Token::IriRef(d) => { + self.objects.push( + Literal::new_typed_literal( + value, + NamedNode::new_unchecked(d) + ) + .into(), + ); + self.stack + .push(NQuadsState::ExpectPossibleGraphOrEndOfQuotedTriple); + self + } + _ => self.error(errors, "A literal datatype must be an IRI, found TOKEN"), + }, + NQuadsState::ExpectPossibleGraphOrEndOfQuotedTriple => { + if self.stack.is_empty() { + match token { + N3Token::IriRef(g) if context.with_graph_name => { + self.emit_quad( + results, + NamedNode::new_unchecked(g).into(), + ); + self.stack.push(NQuadsState::ExpectDot); + self + } + N3Token::BlankNodeLabel(g) if context.with_graph_name => { + self.emit_quad(results, BlankNode::new_unchecked(g).into()); + self.stack.push(NQuadsState::ExpectDot); + self + } + _ => { + self.emit_quad(results, GraphName::DefaultGraph); + self.stack.push(NQuadsState::ExpectDot); + self.recognize_next(token, context, results, errors) + } + } + } else if token == N3Token::Punctuation(">>") { + self + } else { + self.error(errors, "Expecting the end of a quoted triple '>>'") + } + } + NQuadsState::ExpectDot => if let N3Token::Punctuation(".") = token { + self.stack.push(NQuadsState::ExpectSubject); + self + } else { + errors.push("Quads should be followed by a dot".into()); + self.stack.push(NQuadsState::ExpectSubject); + self.recognize_next(token, context, results, errors) + }, + #[cfg(feature = "rdf-star")] + NQuadsState::AfterQuotedSubject => { + let triple = Triple { + subject: self.subjects.pop().unwrap(), + predicate: self.predicates.pop().unwrap(), + object: self.objects.pop().unwrap(), + }; + self.subjects.push(triple.into()); + self.stack.push(NQuadsState::ExpectPredicate); + self.recognize_next(token,context, results, errors) + } + #[cfg(feature = "rdf-star")] + NQuadsState::AfterQuotedObject => { + let triple = Triple { + subject: self.subjects.pop().unwrap(), + predicate: self.predicates.pop().unwrap(), + object: self.objects.pop().unwrap(), + }; + self.objects.push(triple.into()); + self.stack + .push(NQuadsState::ExpectPossibleGraphOrEndOfQuotedTriple); + self.recognize_next(token, context, results, errors) + } + } + } else if token == N3Token::Punctuation(".") { + self.stack.push(NQuadsState::ExpectSubject); + self + } else { + self + } + } + + fn recognize_end( + mut self, + _context: &mut NQuadsRecognizerContext, + results: &mut Vec, + errors: &mut Vec, + ) { + match &*self.stack { + [NQuadsState::ExpectSubject] | [] => (), + [NQuadsState::ExpectDot] => errors.push("Triples should be followed by a dot".into()), + [NQuadsState::ExpectPossibleGraphOrEndOfQuotedTriple] => { + self.emit_quad(results, GraphName::DefaultGraph); + errors.push("Triples should be followed by a dot".into()) + } + [NQuadsState::ExpectLiteralAnnotationOrGraphNameOrDot { value }] => { + self.objects.push(Literal::new_simple_literal(value).into()); + self.emit_quad(results, GraphName::DefaultGraph); + errors.push("Triples should be followed by a dot".into()) + } + _ => errors.push("Unexpected end".into()), // TODO + } + } + + fn lexer_options(context: &NQuadsRecognizerContext) -> &N3LexerOptions { + &context.lexer_options + } +} + +impl NQuadsRecognizer { + pub fn new_parser( + with_graph_name: bool, + #[cfg(feature = "rdf-star")] with_quoted_triples: bool, + unchecked: bool, + ) -> Parser { + Parser::new( + Lexer::new( + N3Lexer::new(N3LexerMode::NTriples, unchecked), + MIN_BUFFER_SIZE, + MAX_BUFFER_SIZE, + true, + Some(b"#"), + ), + Self { + stack: vec![NQuadsState::ExpectSubject], + subjects: Vec::new(), + predicates: Vec::new(), + objects: Vec::new(), + }, + NQuadsRecognizerContext { + with_graph_name, + #[cfg(feature = "rdf-star")] + with_quoted_triples, + lexer_options: N3LexerOptions::default(), + }, + ) + } + + #[must_use] + fn error( + mut self, + errors: &mut Vec, + msg: impl Into, + ) -> Self { + errors.push(msg.into()); + self.stack.clear(); + self.subjects.clear(); + self.predicates.clear(); + self.objects.clear(); + self + } + + fn emit_quad(&mut self, results: &mut Vec, graph_name: GraphName) { + results.push(Quad { + subject: self.subjects.pop().unwrap(), + predicate: self.predicates.pop().unwrap(), + object: self.objects.pop().unwrap(), + graph_name, + }) + } +} diff --git a/ng-oxigraph/src/oxttl/mod.rs b/ng-oxigraph/src/oxttl/mod.rs new file mode 100644 index 0000000..5ab210d --- /dev/null +++ b/ng-oxigraph/src/oxttl/mod.rs @@ -0,0 +1,19 @@ +mod lexer; +mod line_formats; +pub mod n3; +pub mod nquads; +pub mod ntriples; +mod terse; +mod toolkit; +pub mod trig; +pub mod turtle; + +pub use crate::oxttl::n3::N3Parser; +pub use crate::oxttl::nquads::{NQuadsParser, NQuadsSerializer}; +pub use crate::oxttl::ntriples::{NTriplesParser, NTriplesSerializer}; +pub use crate::oxttl::toolkit::{TextPosition, TurtleParseError, TurtleSyntaxError}; +pub use crate::oxttl::trig::{TriGParser, TriGSerializer}; +pub use crate::oxttl::turtle::{TurtleParser, TurtleSerializer}; + +pub(crate) const MIN_BUFFER_SIZE: usize = 4096; +pub(crate) const MAX_BUFFER_SIZE: usize = 4096 * 4096; diff --git a/ng-oxigraph/src/oxttl/n3.rs b/ng-oxigraph/src/oxttl/n3.rs new file mode 100644 index 0000000..2e16a78 --- /dev/null +++ b/ng-oxigraph/src/oxttl/n3.rs @@ -0,0 +1,1326 @@ +//! A [N3](https://w3c.github.io/N3/spec/) streaming parser implemented by [`N3Parser`]. + +use crate::oxrdf::vocab::{rdf, xsd}; +#[cfg(feature = "rdf-star")] +use crate::oxrdf::Triple; +use crate::oxrdf::{ + BlankNode, GraphName, Literal, NamedNode, NamedNodeRef, NamedOrBlankNode, Quad, Subject, Term, + Variable, +}; +use crate::oxttl::lexer::{resolve_local_name, N3Lexer, N3LexerMode, N3LexerOptions, N3Token}; +#[cfg(feature = "async-tokio")] +use crate::oxttl::toolkit::FromTokioAsyncReadIterator; +use crate::oxttl::toolkit::{ + FromReadIterator, Lexer, Parser, RuleRecognizer, RuleRecognizerError, TurtleSyntaxError, +}; +use crate::oxttl::{TurtleParseError, MAX_BUFFER_SIZE, MIN_BUFFER_SIZE}; +use oxiri::{Iri, IriParseError}; +use std::collections::hash_map::Iter; +use std::collections::HashMap; +use std::fmt; +use std::io::Read; +#[cfg(feature = "async-tokio")] +use tokio::io::AsyncRead; + +/// A N3 term i.e. a RDF `Term` or a `Variable`. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum N3Term { + NamedNode(NamedNode), + BlankNode(BlankNode), + Literal(Literal), + #[cfg(feature = "rdf-star")] + Triple(Box), + Variable(Variable), +} + +impl fmt::Display for N3Term { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(term) => term.fmt(f), + Self::BlankNode(term) => term.fmt(f), + Self::Literal(term) => term.fmt(f), + #[cfg(feature = "rdf-star")] + Self::Triple(term) => term.fmt(f), + Self::Variable(term) => term.fmt(f), + } + } +} + +impl From for N3Term { + #[inline] + fn from(node: NamedNode) -> Self { + Self::NamedNode(node) + } +} + +impl From> for N3Term { + #[inline] + fn from(node: NamedNodeRef<'_>) -> Self { + Self::NamedNode(node.into_owned()) + } +} + +impl From for N3Term { + #[inline] + fn from(node: BlankNode) -> Self { + Self::BlankNode(node) + } +} + +impl From for N3Term { + #[inline] + fn from(literal: Literal) -> Self { + Self::Literal(literal) + } +} + +#[cfg(feature = "rdf-star")] +impl From for N3Term { + #[inline] + fn from(triple: Triple) -> Self { + Self::Triple(Box::new(triple)) + } +} + +#[cfg(feature = "rdf-star")] +impl From> for N3Term { + #[inline] + fn from(node: Box) -> Self { + Self::Triple(node) + } +} + +impl From for N3Term { + #[inline] + fn from(node: NamedOrBlankNode) -> Self { + match node { + NamedOrBlankNode::NamedNode(node) => node.into(), + NamedOrBlankNode::BlankNode(node) => node.into(), + } + } +} + +impl From for N3Term { + #[inline] + fn from(node: Subject) -> Self { + match node { + Subject::NamedNode(node) => node.into(), + Subject::BlankNode(node) => node.into(), + #[cfg(feature = "rdf-star")] + Subject::Triple(triple) => Self::Triple(triple), + } + } +} + +impl From for N3Term { + #[inline] + fn from(node: Term) -> Self { + match node { + Term::NamedNode(node) => node.into(), + Term::BlankNode(node) => node.into(), + Term::Literal(node) => node.into(), + #[cfg(feature = "rdf-star")] + Term::Triple(triple) => Self::Triple(triple), + } + } +} + +impl From for N3Term { + #[inline] + fn from(variable: Variable) -> Self { + Self::Variable(variable) + } +} + +/// A N3 quad i.e. a quad composed of [`N3Term`]. +/// +/// The `graph_name` is used to encode the formula where the triple is in. +/// In this case the formula is encoded by a blank node. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct N3Quad { + /// The [subject](https://www.w3.org/TR/rdf11-concepts/#dfn-subject) of this triple. + pub subject: N3Term, + + /// The [predicate](https://www.w3.org/TR/rdf11-concepts/#dfn-predicate) of this triple. + pub predicate: N3Term, + + /// The [object](https://www.w3.org/TR/rdf11-concepts/#dfn-object) of this triple. + pub object: N3Term, + + /// The name of the RDF [graph](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-graph) in which the triple is. + pub graph_name: GraphName, +} + +impl fmt::Display for N3Quad { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.graph_name == GraphName::DefaultGraph { + write!(f, "{} {} {}", self.subject, self.predicate, self.object) + } else { + write!( + f, + "{} {} {} {}", + self.subject, self.predicate, self.object, self.graph_name + ) + } + } +} + +impl From for N3Quad { + fn from(quad: Quad) -> Self { + Self { + subject: quad.subject.into(), + predicate: quad.predicate.into(), + object: quad.object.into(), + graph_name: quad.graph_name, + } + } +} + +/// A [N3](https://w3c.github.io/N3/spec/) streaming parser. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNode; +/// use oxttl::n3::{N3Parser, N3Term}; +/// +/// let file = br#"@base . +/// @prefix schema: . +/// a schema:Person ; +/// schema:name "Foo" . +/// a schema:Person ; +/// schema:name "Bar" ."#; +/// +/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); +/// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); +/// let mut count = 0; +/// for triple in N3Parser::new().parse_read(file.as_ref()) { +/// let triple = triple?; +/// if triple.predicate == rdf_type && triple.object == schema_person { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct N3Parser { + unchecked: bool, + base: Option>, + prefixes: HashMap>, +} + +impl N3Parser { + /// Builds a new [`N3Parser`]. + #[inline] + pub fn new() -> Self { + Self::default() + } + + /// Assumes the file is valid to make parsing faster. + /// + /// It will skip some validations. + /// + /// Note that if the file is actually not valid, then broken RDF might be emitted by the parser. + #[inline] + pub fn unchecked(mut self) -> Self { + self.unchecked = true; + self + } + + #[inline] + pub fn with_base_iri(mut self, base_iri: impl Into) -> Result { + self.base = Some(Iri::parse(base_iri.into())?); + Ok(self) + } + + #[inline] + pub fn with_prefix( + mut self, + prefix_name: impl Into, + prefix_iri: impl Into, + ) -> Result { + self.prefixes + .insert(prefix_name.into(), Iri::parse(prefix_iri.into())?); + Ok(self) + } + + /// Parses a N3 file from a [`Read`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::NamedNode; + /// use oxttl::n3::{N3Parser, N3Term}; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" . + /// a schema:Person ; + /// schema:name "Bar" ."#; + /// + /// let rdf_type = N3Term::NamedNode(NamedNode::new( + /// "http://www.w3.org/1999/02/22-rdf-syntax-ns#type", + /// )?); + /// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); + /// let mut count = 0; + /// for triple in N3Parser::new().parse_read(file.as_ref()) { + /// let triple = triple?; + /// if triple.predicate == rdf_type && triple.object == schema_person { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn parse_read(self, read: R) -> FromReadN3Reader { + FromReadN3Reader { + inner: self.parse().parser.parse_read(read), + } + } + + /// Parses a N3 file from a [`AsyncRead`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNode; + /// use oxttl::n3::{N3Parser, N3Term}; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" . + /// a schema:Person ; + /// schema:name "Bar" ."#; + /// + /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); + /// let schema_person = N3Term::NamedNode(NamedNode::new_unchecked("http://schema.org/Person")); + /// let mut count = 0; + /// let mut parser = N3Parser::new().parse_tokio_async_read(file.as_ref()); + /// while let Some(triple) = parser.next().await { + /// let triple = triple?; + /// if triple.predicate == rdf_type && triple.object == schema_person { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn parse_tokio_async_read( + self, + read: R, + ) -> FromTokioAsyncReadN3Reader { + FromTokioAsyncReadN3Reader { + inner: self.parse().parser.parse_tokio_async_read(read), + } + } + + /// Allows to parse a N3 file by using a low-level API. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNode; + /// use oxttl::n3::{N3Parser, N3Term}; + /// + /// let file: [&[u8]; 5] = [ + /// b"@base ", + /// b". @prefix schema: .", + /// b" a schema:Person", + /// b" ; schema:name \"Foo\" . ", + /// b" a schema:Person ; schema:name \"Bar\" .", + /// ]; + /// + /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); + /// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); + /// let mut count = 0; + /// let mut parser = N3Parser::new().parse(); + /// let mut file_chunks = file.iter(); + /// while !parser.is_end() { + /// // We feed more data to the parser + /// if let Some(chunk) = file_chunks.next() { + /// parser.extend_from_slice(chunk); + /// } else { + /// parser.end(); // It's finished + /// } + /// // We read as many triples from the parser as possible + /// while let Some(triple) = parser.read_next() { + /// let triple = triple?; + /// if triple.predicate == rdf_type && triple.object == schema_person { + /// count += 1; + /// } + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn parse(self) -> LowLevelN3Reader { + LowLevelN3Reader { + parser: N3Recognizer::new_parser(self.unchecked, self.base, self.prefixes), + } + } +} + +/// Parses a N3 file from a [`Read`] implementation. Can be built using [`N3Parser::parse_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNode; +/// use oxttl::n3::{N3Parser, N3Term}; +/// +/// let file = br#"@base . +/// @prefix schema: . +/// a schema:Person ; +/// schema:name "Foo" . +/// a schema:Person ; +/// schema:name "Bar" ."#; +/// +/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); +/// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); +/// let mut count = 0; +/// for triple in N3Parser::new().parse_read(file.as_ref()) { +/// let triple = triple?; +/// if triple.predicate == rdf_type && triple.object == schema_person { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct FromReadN3Reader { + inner: FromReadIterator, +} + +impl FromReadN3Reader { + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// ``` + /// use oxttl::N3Parser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = N3Parser::new().parse_read(file.as_ref()); + /// assert_eq!(reader.prefixes().collect::>(), []); // No prefix at the beginning + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn prefixes(&self) -> N3PrefixesIter<'_> { + N3PrefixesIter { + inner: self.inner.parser.context.prefixes.iter(), + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::N3Parser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = N3Parser::new().parse_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI. + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } +} + +impl Iterator for FromReadN3Reader { + type Item = Result; + + fn next(&mut self) -> Option { + self.inner.next() + } +} + +/// Parses a N3 file from a [`AsyncRead`] implementation. Can be built using [`N3Parser::parse_tokio_async_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNode; +/// use oxttl::n3::{N3Parser, N3Term}; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), oxttl::TurtleParseError> { +/// let file = br#"@base . +/// @prefix schema: . +/// a schema:Person ; +/// schema:name "Foo" . +/// a schema:Person ; +/// schema:name "Bar" ."#; +/// +/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); +/// let schema_person = N3Term::NamedNode(NamedNode::new_unchecked("http://schema.org/Person")); +/// let mut count = 0; +/// let mut parser = N3Parser::new().parse_tokio_async_read(file.as_ref()); +/// while let Some(triple) = parser.next().await { +/// let triple = triple?; +/// if triple.predicate == rdf_type && triple.object == schema_person { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct FromTokioAsyncReadN3Reader { + inner: FromTokioAsyncReadIterator, +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadN3Reader { + /// Reads the next triple or returns `None` if the file is finished. + pub async fn next(&mut self) -> Option> { + Some(self.inner.next().await?.map(Into::into)) + } + + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// ``` + /// use oxttl::N3Parser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = N3Parser::new().parse_tokio_async_read(file.as_ref()); + /// assert_eq!(reader.prefixes().collect::>(), []); // No prefix at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Ok(()) + /// # } + /// ``` + pub fn prefixes(&self) -> N3PrefixesIter<'_> { + N3PrefixesIter { + inner: self.inner.parser.context.prefixes.iter(), + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::N3Parser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = N3Parser::new().parse_tokio_async_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Ok(()) + /// # } + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } +} + +/// Parses a N3 file by using a low-level API. Can be built using [`N3Parser::parse`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNode; +/// use oxttl::n3::{N3Parser, N3Term}; +/// +/// let file: [&[u8]; 5] = [ +/// b"@base ", +/// b". @prefix schema: .", +/// b" a schema:Person", +/// b" ; schema:name \"Foo\" . ", +/// b" a schema:Person ; schema:name \"Bar\" .", +/// ]; +/// +/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); +/// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); +/// let mut count = 0; +/// let mut parser = N3Parser::new().parse(); +/// let mut file_chunks = file.iter(); +/// while !parser.is_end() { +/// // We feed more data to the parser +/// if let Some(chunk) = file_chunks.next() { +/// parser.extend_from_slice(chunk); +/// } else { +/// parser.end(); // It's finished +/// } +/// // We read as many triples from the parser as possible +/// while let Some(triple) = parser.read_next() { +/// let triple = triple?; +/// if triple.predicate == rdf_type && triple.object == schema_person { +/// count += 1; +/// } +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct LowLevelN3Reader { + parser: Parser, +} + +impl LowLevelN3Reader { + /// Adds some extra bytes to the parser. Should be called when [`read_next`](Self::read_next) returns [`None`] and there is still unread data. + pub fn extend_from_slice(&mut self, other: &[u8]) { + self.parser.extend_from_slice(other) + } + + /// Tell the parser that the file is finished. + /// + /// This triggers the parsing of the final bytes and might lead [`read_next`](Self::read_next) to return some extra values. + pub fn end(&mut self) { + self.parser.end() + } + + /// Returns if the parsing is finished i.e. [`end`](Self::end) has been called and [`read_next`](Self::read_next) is always going to return `None`. + pub fn is_end(&self) -> bool { + self.parser.is_end() + } + + /// Attempt to parse a new quad from the already provided data. + /// + /// Returns [`None`] if the parsing is finished or more data is required. + /// If it is the case more data should be fed using [`extend_from_slice`](Self::extend_from_slice). + pub fn read_next(&mut self) -> Option> { + self.parser.read_next() + } + + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// ``` + /// use oxttl::N3Parser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = N3Parser::new().parse(); + /// reader.extend_from_slice(file); + /// assert_eq!(reader.prefixes().collect::>(), []); // No prefix at the beginning + /// + /// reader.read_next().unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn prefixes(&self) -> N3PrefixesIter<'_> { + N3PrefixesIter { + inner: self.parser.context.prefixes.iter(), + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::N3Parser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = N3Parser::new().parse(); + /// reader.extend_from_slice(file); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.read_next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } +} + +#[derive(Clone)] +enum Predicate { + Regular(N3Term), + Inverted(N3Term), +} + +struct N3Recognizer { + stack: Vec, + terms: Vec, + predicates: Vec, + contexts: Vec, +} + +struct N3RecognizerContext { + lexer_options: N3LexerOptions, + prefixes: HashMap>, +} + +impl RuleRecognizer for N3Recognizer { + type TokenRecognizer = N3Lexer; + type Output = N3Quad; + type Context = N3RecognizerContext; + + fn error_recovery_state(mut self) -> Self { + self.stack.clear(); + self.terms.clear(); + self.predicates.clear(); + self.contexts.clear(); + self + } + + fn recognize_next( + mut self, + token: N3Token<'_>, + context: &mut N3RecognizerContext, + results: &mut Vec, + errors: &mut Vec, + ) -> Self { + while let Some(rule) = self.stack.pop() { + match rule { + // [1] n3Doc ::= ( ( n3Statement ".") | sparqlDirective) * + // [2] n3Statement ::= n3Directive | triples + // [3] n3Directive ::= prefixID | base + // [4] sparqlDirective ::= sparqlBase | sparqlPrefix + // [5] sparqlBase ::= BASE IRIREF + // [6] sparqlPrefix ::= PREFIX PNAME_NS IRIREF + // [7] prefixID ::= "@prefix" PNAME_NS IRIREF + // [8] base ::= "@base" IRIREF + N3State::N3Doc => { + self.stack.push(N3State::N3Doc); + match token { + N3Token::PlainKeyword(k) if k.eq_ignore_ascii_case("base") => { + self.stack.push(N3State::BaseExpectIri); + return self; + } + N3Token::PlainKeyword(k) if k.eq_ignore_ascii_case("prefix") => { + self.stack.push(N3State::PrefixExpectPrefix); + return self; + } + N3Token::LangTag("prefix") => { + self.stack.push(N3State::N3DocExpectDot); + self.stack.push(N3State::PrefixExpectPrefix); + return self; + } + N3Token::LangTag("base") => { + self.stack.push(N3State::N3DocExpectDot); + self.stack.push(N3State::BaseExpectIri); + return self; + } + _ => { + self.stack.push(N3State::N3DocExpectDot); + self.stack.push(N3State::Triples); + } + } + } + N3State::N3DocExpectDot => { + if token == N3Token::Punctuation(".") { + return self; + } + errors.push("A dot is expected at the end of N3 statements".into()); + } + N3State::BaseExpectIri => return if let N3Token::IriRef(iri) = token { + context.lexer_options.base_iri = Some(Iri::parse_unchecked(iri)); + self + } else { + self.error(errors, "The BASE keyword should be followed by an IRI") + }, + N3State::PrefixExpectPrefix => return match token { + N3Token::PrefixedName { prefix, local, .. } if local.is_empty() => { + self.stack.push(N3State::PrefixExpectIri { name: prefix.to_owned() }); + self + } + _ => { + self.error(errors, "The PREFIX keyword should be followed by a prefix like 'ex:'") + } + }, + N3State::PrefixExpectIri { name } => return if let N3Token::IriRef(iri) = token { + context.prefixes.insert(name, Iri::parse_unchecked(iri)); + self + } else { self.error(errors, "The PREFIX declaration should be followed by a prefix and its value as an IRI") + }, + // [9] triples ::= subject predicateObjectList? + N3State::Triples => { + self.stack.push(N3State::TriplesMiddle); + self.stack.push(N3State::Path); + } + N3State::TriplesMiddle => if matches!(token, N3Token::Punctuation("." | "]" | "}" | ")")) {} else { + self.stack.push(N3State::TriplesEnd); + self.stack.push(N3State::PredicateObjectList); + }, + N3State::TriplesEnd => { + self.terms.pop(); + } + // [10] predicateObjectList ::= verb objectList ( ";" ( verb objectList) ? ) * + N3State::PredicateObjectList => { + self.stack.push(N3State::PredicateObjectListEnd); + self.stack.push(N3State::ObjectsList); + self.stack.push(N3State::Verb); + } + N3State::PredicateObjectListEnd => { + self.predicates.pop(); + if token == N3Token::Punctuation(";") { + self.stack.push(N3State::PredicateObjectListPossibleContinuation); + return self; + } + } + N3State::PredicateObjectListPossibleContinuation => if token == N3Token::Punctuation(";") { + self.stack.push(N3State::PredicateObjectListPossibleContinuation); + return self; + } else if matches!(token, N3Token::Punctuation(";" | "." | "}" | "]" | ")")) {} else { + self.stack.push(N3State::PredicateObjectListEnd); + self.stack.push(N3State::ObjectsList); + self.stack.push(N3State::Verb); + }, + // [11] objectList ::= object ( "," object) * + N3State::ObjectsList => { + self.stack.push(N3State::ObjectsListEnd); + self.stack.push(N3State::Path); + } + N3State::ObjectsListEnd => { + let object = self.terms.pop().unwrap(); + let subject = self.terms.last().unwrap().clone(); + results.push(match self.predicates.last().unwrap().clone() { + Predicate::Regular(predicate) => self.quad( + subject, + predicate, + object, + ), + Predicate::Inverted(predicate) => self.quad( + object, + predicate, + subject, + ) + }); + if token == N3Token::Punctuation(",") { + self.stack.push(N3State::ObjectsListEnd); + self.stack.push(N3State::Path); + return self; + } + } + // [12] verb ::= predicate | "a" | ( "has" expression) | ( "is" expression "of") | "=" | "<=" | "=>" + // [14] predicate ::= expression | ( "<-" expression) + N3State::Verb => match token { + N3Token::PlainKeyword("a") => { + self.predicates.push(Predicate::Regular(rdf::TYPE.into())); + return self; + } + N3Token::PlainKeyword("has") => { + self.stack.push(N3State::AfterRegularVerb); + self.stack.push(N3State::Path); + return self; + } + N3Token::PlainKeyword("is") => { + self.stack.push(N3State::AfterVerbIs); + self.stack.push(N3State::Path); + return self; + } + N3Token::Punctuation("=") => { + self.predicates.push(Predicate::Regular(NamedNode::new_unchecked("http://www.w3.org/2002/07/owl#sameAs").into())); + return self; + } + N3Token::Punctuation("=>") => { + self.predicates.push(Predicate::Regular(NamedNode::new_unchecked("http://www.w3.org/2000/10/swap/log#implies").into())); + return self; + } + N3Token::Punctuation("<=") => { + self.predicates.push(Predicate::Inverted(NamedNode::new_unchecked("http://www.w3.org/2000/10/swap/log#implies").into())); + return self; + } + N3Token::Punctuation("<-") => { + self.stack.push(N3State::AfterInvertedVerb); + self.stack.push(N3State::Path); + return self; + } + _ => { + self.stack.push(N3State::AfterRegularVerb); + self.stack.push(N3State::Path); + } + } + N3State::AfterRegularVerb => { + self.predicates.push(Predicate::Regular(self.terms.pop().unwrap())); + } + N3State::AfterInvertedVerb => { + self.predicates.push(Predicate::Inverted(self.terms.pop().unwrap())); + } + N3State::AfterVerbIs => return match token { + N3Token::PlainKeyword("of") => { + self.predicates.push(Predicate::Inverted(self.terms.pop().unwrap())); + self + } + _ => { + self.error(errors, "The keyword 'is' should be followed by a predicate then by the keyword 'of'") + } + }, + // [13] subject ::= expression + // [15] object ::= expression + // [16] expression ::= path + // [17] path ::= pathItem ( ( "!" path) | ( "^" path) ) ? + N3State::Path => { + self.stack.push(N3State::PathFollowUp); + self.stack.push(N3State::PathItem); + } + N3State::PathFollowUp => match token { + N3Token::Punctuation("!") => { + self.stack.push(N3State::PathAfterIndicator { is_inverse: false }); + self.stack.push(N3State::PathItem); + return self; + } + N3Token::Punctuation("^") => { + self.stack.push(N3State::PathAfterIndicator { is_inverse: true }); + self.stack.push(N3State::PathItem); + return self; + } + _ => () + }, + N3State::PathAfterIndicator { is_inverse } => { + let predicate = self.terms.pop().unwrap(); + let previous = self.terms.pop().unwrap(); + let current = BlankNode::default(); + results.push(if is_inverse { self.quad(current.clone(), predicate, previous) } else { self.quad(previous, predicate, current.clone()) }); + self.terms.push(current.into()); + self.stack.push(N3State::PathFollowUp); + } + // [18] pathItem ::= iri | blankNode | quickVar | collection | blankNodePropertyList | iriPropertyList | literal | formula + // [19] literal ::= rdfLiteral | numericLiteral | BOOLEAN_LITERAL + // [20] blankNodePropertyList ::= "[" predicateObjectList "]" + // [21] iriPropertyList ::= IPLSTART iri predicateObjectList "]" + // [22] collection ::= "(" object* ")" + // [23] formula ::= "{" formulaContent? "}" + // [25] numericLiteral ::= DOUBLE | DECIMAL | INTEGER + // [26] rdfLiteral ::= STRING ( LANGTAG | ( "^^" iri) ) ? + // [27] iri ::= IRIREF | prefixedName + // [28] prefixedName ::= PNAME_LN | PNAME_NS + // [29] blankNode ::= BLANK_NODE_LABEL | ANON + // [30] quickVar ::= QUICK_VAR_NAME + N3State::PathItem => { + return match token { + N3Token::IriRef(iri) => { + self.terms.push(NamedNode::new_unchecked(iri).into()); + self + } + N3Token::PrefixedName { prefix, local, might_be_invalid_iri } => match resolve_local_name(prefix, &local, might_be_invalid_iri, &context.prefixes) { + Ok(t) => { + self.terms.push(t.into()); + self + } + Err(e) => self.error(errors, e) + } + N3Token::BlankNodeLabel(bnode) => { + self.terms.push(BlankNode::new_unchecked(bnode).into()); + self + } + N3Token::Variable(name) => { + self.terms.push(Variable::new_unchecked(name).into()); + self + } + N3Token::Punctuation("[") => { + self.stack.push(N3State::PropertyListMiddle); + self + } + N3Token::Punctuation("(") => { + self.stack.push(N3State::CollectionBeginning); + self + } + N3Token::String(value) => { + self.stack.push(N3State::LiteralPossibleSuffix { value }); + self + } + N3Token::Integer(v) => { + self.terms.push(Literal::new_typed_literal(v, xsd::INTEGER).into()); + self + } + N3Token::Decimal(v) => { + self.terms.push(Literal::new_typed_literal(v, xsd::DECIMAL).into()); + self + } + N3Token::Double(v) => { + self.terms.push(Literal::new_typed_literal(v, xsd::DOUBLE).into()); + self + } + N3Token::PlainKeyword("true") => { + self.terms.push(Literal::new_typed_literal("true", xsd::BOOLEAN).into()); + self + } + N3Token::PlainKeyword("false") => { + self.terms.push(Literal::new_typed_literal("false", xsd::BOOLEAN).into()); + self + } + N3Token::Punctuation("{") => { + self.contexts.push(BlankNode::default()); + self.stack.push(N3State::FormulaContent); + self + } + _ => + self.error(errors, "TOKEN is not a valid RDF value") + + } + } + N3State::PropertyListMiddle => match token { + N3Token::Punctuation("]") => { + self.terms.push(BlankNode::default().into()); + return self; + } + N3Token::PlainKeyword("id") => { + self.stack.push(N3State::IriPropertyList); + return self; + } + _ => { + self.terms.push(BlankNode::default().into()); + self.stack.push(N3State::PropertyListEnd); + self.stack.push(N3State::PredicateObjectList); + } + } + N3State::PropertyListEnd => if token == N3Token::Punctuation("]") { + return self; + } else { + errors.push("blank node property lists should end with a ']'".into()); + } + N3State::IriPropertyList => return match token { + N3Token::IriRef(id) => { + self.terms.push(NamedNode::new_unchecked(id).into()); + self.stack.push(N3State::PropertyListEnd); + self.stack.push(N3State::PredicateObjectList); + self + } + N3Token::PrefixedName { prefix, local, might_be_invalid_iri } => match resolve_local_name(prefix, &local, might_be_invalid_iri, &context.prefixes) { + Ok(t) => { + self.terms.push(t.into()); + self.stack.push(N3State::PropertyListEnd); + self.stack.push(N3State::PredicateObjectList); + self + } + Err(e) => { + self.error(errors, e) + } + } + _ => { + self.error(errors, "The '[ id' construction should be followed by an IRI") + } + }, + N3State::CollectionBeginning => if let N3Token::Punctuation(")") = token { + self.terms.push(rdf::NIL.into()); + return self; + } else { + let root = BlankNode::default(); + self.terms.push(root.clone().into()); + self.terms.push(root.into()); + self.stack.push(N3State::CollectionPossibleEnd); + self.stack.push(N3State::Path); + }, + N3State::CollectionPossibleEnd => { + let value = self.terms.pop().unwrap(); + let old = self.terms.pop().unwrap(); + results.push(self.quad( + old.clone(), + rdf::FIRST, + value, + )); + if let N3Token::Punctuation(")") = token { + results.push(self.quad( + old, + rdf::REST, + rdf::NIL, + )); + return self; + } + let new = BlankNode::default(); + results.push(self.quad( + old, + rdf::REST, + new.clone(), + )); + self.terms.push(new.into()); + self.stack.push(N3State::CollectionPossibleEnd); + self.stack.push(N3State::Path); + } + N3State::LiteralPossibleSuffix { value } => { + match token { + N3Token::LangTag(lang) => { + self.terms.push(Literal::new_language_tagged_literal_unchecked(value, lang.to_ascii_lowercase()).into()); + return self; + } + N3Token::Punctuation("^^") => { + self.stack.push(N3State::LiteralExpectDatatype { value }); + return self; + } + _ => { + self.terms.push(Literal::new_simple_literal(value).into()); + } + } + } + N3State::LiteralExpectDatatype { value } => { + match token { + N3Token::IriRef(datatype) => { + self.terms.push(Literal::new_typed_literal(value, NamedNode::new_unchecked(datatype)).into()); + return self; + } + N3Token::PrefixedName { prefix, local, might_be_invalid_iri } => match resolve_local_name(prefix, &local, might_be_invalid_iri, &context.prefixes) { + Ok(datatype) => { + self.terms.push(Literal::new_typed_literal(value, datatype).into()); + return self; + } + Err(e) => { + return self.error(errors, e); + } + } + _ => { + errors.push("Expecting a datatype IRI after '^^, found TOKEN".into()); + self.stack.clear(); + } + } + } + // [24] formulaContent ::= ( n3Statement ( "." formulaContent? ) ? ) | ( sparqlDirective formulaContent? ) + N3State::FormulaContent => { + match token { + N3Token::Punctuation("}") => { + self.terms.push(self.contexts.pop().unwrap().into()); + return self; + } + N3Token::PlainKeyword(k)if k.eq_ignore_ascii_case("base") => { + self.stack.push(N3State::FormulaContent); + self.stack.push(N3State::BaseExpectIri); + return self; + } + N3Token::PlainKeyword(k)if k.eq_ignore_ascii_case("prefix") => { + self.stack.push(N3State::FormulaContent); + self.stack.push(N3State::PrefixExpectPrefix); + return self; + } + N3Token::LangTag("prefix") => { + self.stack.push(N3State::FormulaContentExpectDot); + self.stack.push(N3State::PrefixExpectPrefix); + return self; + } + N3Token::LangTag("base") => { + self.stack.push(N3State::FormulaContentExpectDot); + self.stack.push(N3State::BaseExpectIri); + return self; + } + _ => { + self.stack.push(N3State::FormulaContentExpectDot); + self.stack.push(N3State::Triples); + } + } + } + N3State::FormulaContentExpectDot => { + match token { + N3Token::Punctuation("}") => { + self.terms.push(self.contexts.pop().unwrap().into()); + return self; + } + N3Token::Punctuation(".") => { + self.stack.push(N3State::FormulaContent); + return self; + } + _ => { + errors.push("A dot is expected at the end of N3 statements".into()); + self.stack.push(N3State::FormulaContent); + } + } + } + } + } + // Empty stack + if token == N3Token::Punctuation(".") { + self.stack.push(N3State::N3Doc); + self + } else { + self + } + } + + fn recognize_end( + self, + _state: &mut N3RecognizerContext, + _results: &mut Vec, + errors: &mut Vec, + ) { + match &*self.stack { + [] | [N3State::N3Doc] => (), + _ => errors.push("Unexpected end".into()), // TODO + } + } + + fn lexer_options(context: &N3RecognizerContext) -> &N3LexerOptions { + &context.lexer_options + } +} + +impl N3Recognizer { + pub fn new_parser( + unchecked: bool, + base_iri: Option>, + prefixes: HashMap>, + ) -> Parser { + Parser::new( + Lexer::new( + N3Lexer::new(N3LexerMode::N3, unchecked), + MIN_BUFFER_SIZE, + MAX_BUFFER_SIZE, + true, + Some(b"#"), + ), + Self { + stack: vec![N3State::N3Doc], + terms: Vec::new(), + predicates: Vec::new(), + contexts: Vec::new(), + }, + N3RecognizerContext { + lexer_options: N3LexerOptions { base_iri }, + prefixes, + }, + ) + } + + #[must_use] + fn error( + mut self, + errors: &mut Vec, + msg: impl Into, + ) -> Self { + errors.push(msg.into()); + self.stack.clear(); + self + } + + fn quad( + &self, + subject: impl Into, + predicate: impl Into, + object: impl Into, + ) -> N3Quad { + N3Quad { + subject: subject.into(), + predicate: predicate.into(), + object: object.into(), + graph_name: self + .contexts + .last() + .map_or(GraphName::DefaultGraph, |g| g.clone().into()), + } + } +} + +#[derive(Debug)] +enum N3State { + N3Doc, + N3DocExpectDot, + BaseExpectIri, + PrefixExpectPrefix, + PrefixExpectIri { name: String }, + Triples, + TriplesMiddle, + TriplesEnd, + PredicateObjectList, + PredicateObjectListEnd, + PredicateObjectListPossibleContinuation, + ObjectsList, + ObjectsListEnd, + Verb, + AfterRegularVerb, + AfterInvertedVerb, + AfterVerbIs, + Path, + PathFollowUp, + PathAfterIndicator { is_inverse: bool }, + PathItem, + PropertyListMiddle, + PropertyListEnd, + IriPropertyList, + CollectionBeginning, + CollectionPossibleEnd, + LiteralPossibleSuffix { value: String }, + LiteralExpectDatatype { value: String }, + FormulaContent, + FormulaContentExpectDot, +} + +/// Iterator on the file prefixes. +/// +/// See [`LowLevelN3Reader::prefixes`]. +pub struct N3PrefixesIter<'a> { + inner: Iter<'a, String, Iri>, +} + +impl<'a> Iterator for N3PrefixesIter<'a> { + type Item = (&'a str, &'a str); + + #[inline] + fn next(&mut self) -> Option { + let (key, value) = self.inner.next()?; + Some((key.as_str(), value.as_str())) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} diff --git a/ng-oxigraph/src/oxttl/nquads.rs b/ng-oxigraph/src/oxttl/nquads.rs new file mode 100644 index 0000000..8b62367 --- /dev/null +++ b/ng-oxigraph/src/oxttl/nquads.rs @@ -0,0 +1,564 @@ +//! A [N-Quads](https://www.w3.org/TR/n-quads/) streaming parser implemented by [`NQuadsParser`] +//! and a serializer implemented by [`NQuadsSerializer`]. + +use crate::oxrdf::{Quad, QuadRef}; +use crate::oxttl::line_formats::NQuadsRecognizer; +#[cfg(feature = "async-tokio")] +use crate::oxttl::toolkit::FromTokioAsyncReadIterator; +use crate::oxttl::toolkit::{FromReadIterator, Parser, TurtleParseError, TurtleSyntaxError}; +use std::io::{self, Read, Write}; +#[cfg(feature = "async-tokio")] +use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; + +/// A [N-Quads](https://www.w3.org/TR/n-quads/) streaming parser. +/// +/// Support for [N-Quads-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#n-quads-star) is available behind the `rdf-star` feature and the [`NQuadsParser::with_quoted_triples`] option. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxttl::NQuadsParser; +/// +/// let file = br#" . +/// "Foo" . +/// . +/// "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// for quad in NQuadsParser::new().parse_read(file.as_ref()) { +/// let quad = quad?; +/// if quad.predicate == rdf::TYPE && quad.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct NQuadsParser { + unchecked: bool, + #[cfg(feature = "rdf-star")] + with_quoted_triples: bool, +} + +impl NQuadsParser { + /// Builds a new [`NQuadsParser`]. + #[inline] + pub fn new() -> Self { + Self::default() + } + + /// Assumes the file is valid to make parsing faster. + /// + /// It will skip some validations. + /// + /// Note that if the file is actually not valid, then broken RDF might be emitted by the parser. + #[inline] + pub fn unchecked(mut self) -> Self { + self.unchecked = true; + self + } + + /// Enables [N-Quads-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#n-quads-star). + #[cfg(feature = "rdf-star")] + #[inline] + pub fn with_quoted_triples(mut self) -> Self { + self.with_quoted_triples = true; + self + } + + /// Parses a N-Quads file from a [`Read`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxttl::NQuadsParser; + /// + /// let file = br#" . + /// "Foo" . + /// . + /// "Bar" ."#; + /// + /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; + /// let mut count = 0; + /// for quad in NQuadsParser::new().parse_read(file.as_ref()) { + /// let quad = quad?; + /// if quad.predicate == rdf::TYPE && quad.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn parse_read(self, read: R) -> FromReadNQuadsReader { + FromReadNQuadsReader { + inner: self.parse().parser.parse_read(read), + } + } + + /// Parses a N-Quads file from a [`AsyncRead`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxttl::NQuadsParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#" . + /// "Foo" . + /// . + /// "Bar" ."#; + /// + /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); + /// let mut count = 0; + /// let mut parser = NQuadsParser::new().parse_tokio_async_read(file.as_ref()); + /// while let Some(triple) = parser.next().await { + /// let triple = triple?; + /// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn parse_tokio_async_read( + self, + read: R, + ) -> FromTokioAsyncReadNQuadsReader { + FromTokioAsyncReadNQuadsReader { + inner: self.parse().parser.parse_tokio_async_read(read), + } + } + + /// Allows to parse a N-Quads file by using a low-level API. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxttl::NQuadsParser; + /// + /// let file: [&[u8]; 4] = [ + /// b" .\n", + /// b" \"Foo\" .\n", + /// b" .\n", + /// b" \"Bar\" .\n" + /// ]; + /// + /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; + /// let mut count = 0; + /// let mut parser = NQuadsParser::new().parse(); + /// let mut file_chunks = file.iter(); + /// while !parser.is_end() { + /// // We feed more data to the parser + /// if let Some(chunk) = file_chunks.next() { + /// parser.extend_from_slice(chunk); + /// } else { + /// parser.end(); // It's finished + /// } + /// // We read as many quads from the parser as possible + /// while let Some(quad) = parser.read_next() { + /// let quad = quad?; + /// if quad.predicate == rdf::TYPE && quad.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[allow(clippy::unused_self)] + pub fn parse(self) -> LowLevelNQuadsReader { + LowLevelNQuadsReader { + parser: NQuadsRecognizer::new_parser( + true, + #[cfg(feature = "rdf-star")] + self.with_quoted_triples, + self.unchecked, + ), + } + } +} + +/// Parses a N-Quads file from a [`Read`] implementation. Can be built using [`NQuadsParser::parse_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxttl::NQuadsParser; +/// +/// let file = br#" . +/// "Foo" . +/// . +/// "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// for quad in NQuadsParser::new().parse_read(file.as_ref()) { +/// let quad = quad?; +/// if quad.predicate == rdf::TYPE && quad.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct FromReadNQuadsReader { + inner: FromReadIterator, +} + +impl Iterator for FromReadNQuadsReader { + type Item = Result; + + fn next(&mut self) -> Option { + self.inner.next() + } +} + +/// Parses a N-Quads file from a [`AsyncRead`] implementation. Can be built using [`NQuadsParser::parse_tokio_async_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxttl::NQuadsParser; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), oxttl::TurtleParseError> { +/// let file = br#" . +/// "Foo" . +/// . +/// "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); +/// let mut count = 0; +/// let mut parser = NQuadsParser::new().parse_tokio_async_read(file.as_ref()); +/// while let Some(triple) = parser.next().await { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct FromTokioAsyncReadNQuadsReader { + inner: FromTokioAsyncReadIterator, +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadNQuadsReader { + /// Reads the next triple or returns `None` if the file is finished. + pub async fn next(&mut self) -> Option> { + Some(self.inner.next().await?.map(Into::into)) + } +} + +/// Parses a N-Quads file by using a low-level API. Can be built using [`NQuadsParser::parse`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxttl::NQuadsParser; +/// +/// let file: [&[u8]; 4] = [ +/// b" .\n", +/// b" \"Foo\" .\n", +/// b" .\n", +/// b" \"Bar\" .\n" +/// ]; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// let mut parser = NQuadsParser::new().parse(); +/// let mut file_chunks = file.iter(); +/// while !parser.is_end() { +/// // We feed more data to the parser +/// if let Some(chunk) = file_chunks.next() { +/// parser.extend_from_slice(chunk); +/// } else { +/// parser.end(); // It's finished +/// } +/// // We read as many quads from the parser as possible +/// while let Some(quad) = parser.read_next() { +/// let quad = quad?; +/// if quad.predicate == rdf::TYPE && quad.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct LowLevelNQuadsReader { + parser: Parser, +} + +impl LowLevelNQuadsReader { + /// Adds some extra bytes to the parser. Should be called when [`read_next`](Self::read_next) returns [`None`] and there is still unread data. + pub fn extend_from_slice(&mut self, other: &[u8]) { + self.parser.extend_from_slice(other) + } + + /// Tell the parser that the file is finished. + /// + /// This triggers the parsing of the final bytes and might lead [`read_next`](Self::read_next) to return some extra values. + pub fn end(&mut self) { + self.parser.end() + } + + /// Returns if the parsing is finished i.e. [`end`](Self::end) has been called and [`read_next`](Self::read_next) is always going to return `None`. + pub fn is_end(&self) -> bool { + self.parser.is_end() + } + + /// Attempt to parse a new quad from the already provided data. + /// + /// Returns [`None`] if the parsing is finished or more data is required. + /// If it is the case more data should be fed using [`extend_from_slice`](Self::extend_from_slice). + pub fn read_next(&mut self) -> Option> { + self.parser.read_next() + } +} + +/// A [N-Quads](https://www.w3.org/TR/n-quads/) serializer. +/// +/// Support for [N-Quads-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#n-quads-star) is available behind the `rdf-star` feature. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, QuadRef}; +/// use oxttl::NQuadsSerializer; +/// +/// let mut writer = NQuadsSerializer::new().serialize_to_write(Vec::new()); +/// writer.write_quad(QuadRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// NamedNodeRef::new("http://example.com")?, +/// ))?; +/// assert_eq!( +/// b" .\n", +/// writer.finish().as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct NQuadsSerializer; + +impl NQuadsSerializer { + /// Builds a new [`NQuadsSerializer`]. + #[inline] + pub fn new() -> Self { + Self + } + + /// Writes a N-Quads file to a [`Write`] implementation. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, QuadRef}; + /// use oxttl::NQuadsSerializer; + /// + /// let mut writer = NQuadsSerializer::new().serialize_to_write(Vec::new()); + /// writer.write_quad(QuadRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, + /// NamedNodeRef::new("http://schema.org/Person")?, + /// NamedNodeRef::new("http://example.com")?, + /// ))?; + /// assert_eq!( + /// b" .\n", + /// writer.finish().as_slice() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn serialize_to_write(self, write: W) -> ToWriteNQuadsWriter { + ToWriteNQuadsWriter { + write, + writer: self.serialize(), + } + } + + /// Writes a N-Quads file to a [`AsyncWrite`] implementation. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, QuadRef}; + /// use oxttl::NQuadsSerializer; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> std::io::Result<()> { + /// let mut writer = NQuadsSerializer::new().serialize_to_tokio_async_write(Vec::new()); + /// writer.write_quad(QuadRef::new( + /// NamedNodeRef::new_unchecked("http://example.com#me"), + /// NamedNodeRef::new_unchecked("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + /// NamedNodeRef::new_unchecked("http://schema.org/Person"), + /// NamedNodeRef::new_unchecked("http://example.com"), + /// )).await?; + /// assert_eq!( + /// b" .\n", + /// writer.finish().as_slice() + /// ); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn serialize_to_tokio_async_write( + self, + write: W, + ) -> ToTokioAsyncWriteNQuadsWriter { + ToTokioAsyncWriteNQuadsWriter { + write, + writer: self.serialize(), + buffer: Vec::new(), + } + } + + /// Builds a low-level N-Quads writer. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, QuadRef}; + /// use oxttl::NQuadsSerializer; + /// + /// let mut buf = Vec::new(); + /// let mut writer = NQuadsSerializer::new().serialize(); + /// writer.write_quad(QuadRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, + /// NamedNodeRef::new("http://schema.org/Person")?, + /// NamedNodeRef::new("http://example.com")?, + /// ), &mut buf)?; + /// assert_eq!( + /// b" .\n", + /// buf.as_slice() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[allow(clippy::unused_self)] + pub fn serialize(self) -> LowLevelNQuadsWriter { + LowLevelNQuadsWriter + } +} + +/// Writes a N-Quads file to a [`Write`] implementation. Can be built using [`NQuadsSerializer::serialize_to_write`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, QuadRef}; +/// use oxttl::NQuadsSerializer; +/// +/// let mut writer = NQuadsSerializer::new().serialize_to_write(Vec::new()); +/// writer.write_quad(QuadRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// NamedNodeRef::new("http://example.com")?, +/// ))?; +/// assert_eq!( +/// b" .\n", +/// writer.finish().as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct ToWriteNQuadsWriter { + write: W, + writer: LowLevelNQuadsWriter, +} + +impl ToWriteNQuadsWriter { + /// Writes an extra quad. + pub fn write_quad<'a>(&mut self, q: impl Into>) -> io::Result<()> { + self.writer.write_quad(q, &mut self.write) + } + + /// Ends the write process and returns the underlying [`Write`]. + pub fn finish(self) -> W { + self.write + } +} + +/// Writes a N-Quads file to a [`AsyncWrite`] implementation. Can be built using [`NQuadsSerializer::serialize_to_tokio_async_write`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, QuadRef}; +/// use oxttl::NQuadsSerializer; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> std::io::Result<()> { +/// let mut writer = NQuadsSerializer::new().serialize_to_tokio_async_write(Vec::new()); +/// writer.write_quad(QuadRef::new( +/// NamedNodeRef::new_unchecked("http://example.com#me"), +/// NamedNodeRef::new_unchecked("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), +/// NamedNodeRef::new_unchecked("http://schema.org/Person"), +/// NamedNodeRef::new_unchecked("http://example.com"), +/// )).await?; +/// assert_eq!( +/// b" .\n", +/// writer.finish().as_slice() +/// ); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct ToTokioAsyncWriteNQuadsWriter { + write: W, + writer: LowLevelNQuadsWriter, + buffer: Vec, +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteNQuadsWriter { + /// Writes an extra quad. + pub async fn write_quad<'a>(&mut self, q: impl Into>) -> io::Result<()> { + self.writer.write_quad(q, &mut self.buffer)?; + self.write.write_all(&self.buffer).await?; + self.buffer.clear(); + Ok(()) + } + + /// Ends the write process and returns the underlying [`Write`]. + pub fn finish(self) -> W { + self.write + } +} + +/// Writes a N-Quads file by using a low-level API. Can be built using [`NQuadsSerializer::serialize`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, QuadRef}; +/// use oxttl::NQuadsSerializer; +/// +/// let mut buf = Vec::new(); +/// let mut writer = NQuadsSerializer::new().serialize(); +/// writer.write_quad(QuadRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// NamedNodeRef::new("http://example.com")?, +/// ), &mut buf)?; +/// assert_eq!( +/// b" .\n", +/// buf.as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct LowLevelNQuadsWriter; + +impl LowLevelNQuadsWriter { + /// Writes an extra quad. + #[allow(clippy::unused_self)] + pub fn write_quad<'a>( + &mut self, + q: impl Into>, + mut write: impl Write, + ) -> io::Result<()> { + writeln!(write, "{} .", q.into()) + } +} diff --git a/ng-oxigraph/src/oxttl/ntriples.rs b/ng-oxigraph/src/oxttl/ntriples.rs new file mode 100644 index 0000000..271b920 --- /dev/null +++ b/ng-oxigraph/src/oxttl/ntriples.rs @@ -0,0 +1,580 @@ +//! A [N-Triples](https://www.w3.org/TR/n-triples/) streaming parser implemented by [`NTriplesParser`] +//! and a serializer implemented by [`NTriplesSerializer`]. + +use crate::oxrdf::{Triple, TripleRef}; +use crate::oxttl::line_formats::NQuadsRecognizer; +#[cfg(feature = "async-tokio")] +use crate::oxttl::toolkit::FromTokioAsyncReadIterator; +use crate::oxttl::toolkit::{FromReadIterator, Parser, TurtleParseError, TurtleSyntaxError}; +use std::io::{self, Read, Write}; +#[cfg(feature = "async-tokio")] +use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; + +/// A [N-Triples](https://www.w3.org/TR/n-triples/) streaming parser. +/// +/// Support for [N-Triples-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#n-triples-star) is available behind the `rdf-star` feature and the [`NTriplesParser::with_quoted_triples`] option. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxttl::NTriplesParser; +/// +/// let file = br#" . +/// "Foo" . +/// . +/// "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// for triple in NTriplesParser::new().parse_read(file.as_ref()) { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct NTriplesParser { + unchecked: bool, + #[cfg(feature = "rdf-star")] + with_quoted_triples: bool, +} + +impl NTriplesParser { + /// Builds a new [`NTriplesParser`]. + #[inline] + pub fn new() -> Self { + Self::default() + } + + /// Assumes the file is valid to make parsing faster. + /// + /// It will skip some validations. + /// + /// Note that if the file is actually not valid, then broken RDF might be emitted by the parser. /// + #[inline] + pub fn unchecked(mut self) -> Self { + self.unchecked = true; + self + } + + /// Enables [N-Triples-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#n-triples-star). + #[cfg(feature = "rdf-star")] + #[inline] + pub fn with_quoted_triples(mut self) -> Self { + self.with_quoted_triples = true; + self + } + + /// Parses a N-Triples file from a [`Read`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxttl::NTriplesParser; + /// + /// let file = br#" . + /// "Foo" . + /// . + /// "Bar" ."#; + /// + /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; + /// let mut count = 0; + /// for triple in NTriplesParser::new().parse_read(file.as_ref()) { + /// let triple = triple?; + /// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn parse_read(self, read: R) -> FromReadNTriplesReader { + FromReadNTriplesReader { + inner: self.parse().parser.parse_read(read), + } + } + + /// Parses a N-Triples file from a [`AsyncRead`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxttl::NTriplesParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#" . + /// "Foo" . + /// . + /// "Bar" ."#; + /// + /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); + /// let mut count = 0; + /// let mut parser = NTriplesParser::new().parse_tokio_async_read(file.as_ref()); + /// while let Some(triple) = parser.next().await { + /// let triple = triple?; + /// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn parse_tokio_async_read( + self, + read: R, + ) -> FromTokioAsyncReadNTriplesReader { + FromTokioAsyncReadNTriplesReader { + inner: self.parse().parser.parse_tokio_async_read(read), + } + } + + /// Allows to parse a N-Triples file by using a low-level API. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxttl::NTriplesParser; + /// + /// let file: [&[u8]; 4] = [ + /// b" .\n", + /// b" \"Foo\" .\n", + /// b" .\n", + /// b" \"Bar\" .\n" + /// ]; + /// + /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; + /// let mut count = 0; + /// let mut parser = NTriplesParser::new().parse(); + /// let mut file_chunks = file.iter(); + /// while !parser.is_end() { + /// // We feed more data to the parser + /// if let Some(chunk) = file_chunks.next() { + /// parser.extend_from_slice(chunk); + /// } else { + /// parser.end(); // It's finished + /// } + /// // We read as many triples from the parser as possible + /// while let Some(triple) = parser.read_next() { + /// let triple = triple?; + /// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[allow(clippy::unused_self)] + pub fn parse(self) -> LowLevelNTriplesReader { + LowLevelNTriplesReader { + parser: NQuadsRecognizer::new_parser( + false, + #[cfg(feature = "rdf-star")] + self.with_quoted_triples, + self.unchecked, + ), + } + } +} + +/// Parses a N-Triples file from a [`Read`] implementation. Can be built using [`NTriplesParser::parse_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxttl::NTriplesParser; +/// +/// let file = br#" . +/// "Foo" . +/// . +/// "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// for triple in NTriplesParser::new().parse_read(file.as_ref()) { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct FromReadNTriplesReader { + inner: FromReadIterator, +} + +impl Iterator for FromReadNTriplesReader { + type Item = Result; + + fn next(&mut self) -> Option { + Some(self.inner.next()?.map(Into::into)) + } +} + +/// Parses a N-Triples file from a [`AsyncRead`] implementation. Can be built using [`NTriplesParser::parse_tokio_async_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxttl::NTriplesParser; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), oxttl::TurtleParseError> { +/// let file = br#" . +/// "Foo" . +/// . +/// "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); +/// let mut count = 0; +/// let mut parser = NTriplesParser::new().parse_tokio_async_read(file.as_ref()); +/// while let Some(triple) = parser.next().await { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct FromTokioAsyncReadNTriplesReader { + inner: FromTokioAsyncReadIterator, +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadNTriplesReader { + /// Reads the next triple or returns `None` if the file is finished. + pub async fn next(&mut self) -> Option> { + Some(self.inner.next().await?.map(Into::into)) + } +} + +/// Parses a N-Triples file by using a low-level API. Can be built using [`NTriplesParser::parse`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxttl::NTriplesParser; +/// +/// let file: [&[u8]; 4] = [ +/// b" .\n", +/// b" \"Foo\" .\n", +/// b" .\n", +/// b" \"Bar\" .\n" +/// ]; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// let mut parser = NTriplesParser::new().parse(); +/// let mut file_chunks = file.iter(); +/// while !parser.is_end() { +/// // We feed more data to the parser +/// if let Some(chunk) = file_chunks.next() { +/// parser.extend_from_slice(chunk); +/// } else { +/// parser.end(); // It's finished +/// } +/// // We read as many triples from the parser as possible +/// while let Some(triple) = parser.read_next() { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct LowLevelNTriplesReader { + parser: Parser, +} + +impl LowLevelNTriplesReader { + /// Adds some extra bytes to the parser. Should be called when [`read_next`](Self::read_next) returns [`None`] and there is still unread data. + pub fn extend_from_slice(&mut self, other: &[u8]) { + self.parser.extend_from_slice(other) + } + + /// Tell the parser that the file is finished. + /// + /// This triggers the parsing of the final bytes and might lead [`read_next`](Self::read_next) to return some extra values. + pub fn end(&mut self) { + self.parser.end() + } + + /// Returns if the parsing is finished i.e. [`end`](Self::end) has been called and [`read_next`](Self::read_next) is always going to return `None`. + pub fn is_end(&self) -> bool { + self.parser.is_end() + } + + /// Attempt to parse a new triple from the already provided data. + /// + /// Returns [`None`] if the parsing is finished or more data is required. + /// If it is the case more data should be fed using [`extend_from_slice`](Self::extend_from_slice). + pub fn read_next(&mut self) -> Option> { + Some(self.parser.read_next()?.map(Into::into)) + } +} + +/// A [canonical](https://www.w3.org/TR/n-triples/#canonical-ntriples) [N-Triples](https://www.w3.org/TR/n-triples/) serializer. +/// +/// Support for [N-Triples-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#n-triples-star) is available behind the `rdf-star` feature. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, TripleRef}; +/// use oxttl::NTriplesSerializer; +/// +/// let mut writer = NTriplesSerializer::new().serialize_to_write(Vec::new()); +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// ))?; +/// assert_eq!( +/// b" .\n", +/// writer.finish().as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct NTriplesSerializer; + +impl NTriplesSerializer { + /// Builds a new [`NTriplesSerializer`]. + #[inline] + pub fn new() -> Self { + Self + } + + /// Writes a N-Triples file to a [`Write`] implementation. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, TripleRef}; + /// use oxttl::NTriplesSerializer; + /// + /// let mut writer = NTriplesSerializer::new().serialize_to_write(Vec::new()); + /// writer.write_triple(TripleRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, + /// NamedNodeRef::new("http://schema.org/Person")?, + /// ))?; + /// assert_eq!( + /// b" .\n", + /// writer.finish().as_slice() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn serialize_to_write(self, write: W) -> ToWriteNTriplesWriter { + ToWriteNTriplesWriter { + write, + writer: self.serialize(), + } + } + + /// Writes a N-Triples file to a [`AsyncWrite`] implementation. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, TripleRef}; + /// use oxttl::NTriplesSerializer; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> std::io::Result<()> { + /// let mut writer = NTriplesSerializer::new().serialize_to_tokio_async_write(Vec::new()); + /// writer.write_triple(TripleRef::new( + /// NamedNodeRef::new_unchecked("http://example.com#me"), + /// NamedNodeRef::new_unchecked("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + /// NamedNodeRef::new_unchecked("http://schema.org/Person"), + /// )).await?; + /// assert_eq!( + /// b" .\n", + /// writer.finish().as_slice() + /// ); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn serialize_to_tokio_async_write( + self, + write: W, + ) -> ToTokioAsyncWriteNTriplesWriter { + ToTokioAsyncWriteNTriplesWriter { + write, + writer: self.serialize(), + buffer: Vec::new(), + } + } + + /// Builds a low-level N-Triples writer. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, TripleRef}; + /// use oxttl::NTriplesSerializer; + /// + /// let mut buf = Vec::new(); + /// let mut writer = NTriplesSerializer::new().serialize(); + /// writer.write_triple(TripleRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, + /// NamedNodeRef::new("http://schema.org/Person")?, + /// ), &mut buf)?; + /// assert_eq!( + /// b" .\n", + /// buf.as_slice() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + #[allow(clippy::unused_self)] + pub fn serialize(self) -> LowLevelNTriplesWriter { + LowLevelNTriplesWriter + } +} + +/// Writes a N-Triples file to a [`Write`] implementation. Can be built using [`NTriplesSerializer::serialize_to_write`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, TripleRef}; +/// use oxttl::NTriplesSerializer; +/// +/// let mut writer = NTriplesSerializer::new().serialize_to_write(Vec::new()); +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// ))?; +/// assert_eq!( +/// b" .\n", +/// writer.finish().as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct ToWriteNTriplesWriter { + write: W, + writer: LowLevelNTriplesWriter, +} + +impl ToWriteNTriplesWriter { + /// Writes an extra triple. + pub fn write_triple<'a>(&mut self, t: impl Into>) -> io::Result<()> { + self.writer.write_triple(t, &mut self.write) + } + + /// Ends the write process and returns the underlying [`Write`]. + pub fn finish(self) -> W { + self.write + } +} + +/// Writes a N-Triples file to a [`AsyncWrite`] implementation. Can be built using [`NTriplesSerializer::serialize_to_tokio_async_write`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, TripleRef}; +/// use oxttl::NTriplesSerializer; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> std::io::Result<()> { +/// let mut writer = NTriplesSerializer::new().serialize_to_tokio_async_write(Vec::new()); +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new_unchecked("http://example.com#me"), +/// NamedNodeRef::new_unchecked("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), +/// NamedNodeRef::new_unchecked("http://schema.org/Person") +/// )).await?; +/// assert_eq!( +/// b" .\n", +/// writer.finish().as_slice() +/// ); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct ToTokioAsyncWriteNTriplesWriter { + write: W, + writer: LowLevelNTriplesWriter, + buffer: Vec, +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteNTriplesWriter { + /// Writes an extra triple. + pub async fn write_triple<'a>(&mut self, t: impl Into>) -> io::Result<()> { + self.writer.write_triple(t, &mut self.buffer)?; + self.write.write_all(&self.buffer).await?; + self.buffer.clear(); + Ok(()) + } + + /// Ends the write process and returns the underlying [`Write`]. + pub fn finish(self) -> W { + self.write + } +} + +/// Writes a N-Triples file by using a low-level API. Can be built using [`NTriplesSerializer::serialize`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, TripleRef}; +/// use oxttl::NTriplesSerializer; +/// +/// let mut buf = Vec::new(); +/// let mut writer = NTriplesSerializer::new().serialize(); +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// ), &mut buf)?; +/// assert_eq!( +/// b" .\n", +/// buf.as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct LowLevelNTriplesWriter; + +impl LowLevelNTriplesWriter { + /// Writes an extra triple. + #[allow(clippy::unused_self)] + pub fn write_triple<'a>( + &mut self, + t: impl Into>, + mut write: impl Write, + ) -> io::Result<()> { + writeln!(write, "{} .", t.into()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::oxrdf::{Literal, NamedNode}; + + #[test] + fn unchecked_parsing() { + let triples = NTriplesParser::new() + .unchecked() + .parse_read(r#" "baz"@toolonglangtag ."#.as_bytes()) + .collect::, _>>() + .unwrap(); + assert_eq!( + triples, + [Triple::new( + NamedNode::new_unchecked("foo"), + NamedNode::new_unchecked("bar"), + Literal::new_language_tagged_literal_unchecked("baz", "toolonglangtag"), + )] + ) + } +} diff --git a/ng-oxigraph/src/oxttl/terse.rs b/ng-oxigraph/src/oxttl/terse.rs new file mode 100644 index 0000000..205f348 --- /dev/null +++ b/ng-oxigraph/src/oxttl/terse.rs @@ -0,0 +1,1072 @@ +//! Shared parser implementation for Turtle and TriG. + +use crate::oxrdf::vocab::{rdf, xsd}; +#[cfg(feature = "rdf-star")] +use crate::oxrdf::Triple; +use crate::oxrdf::{ + BlankNode, GraphName, Literal, NamedNode, NamedOrBlankNode, Quad, Subject, Term, +}; +use crate::oxttl::lexer::{resolve_local_name, N3Lexer, N3LexerMode, N3LexerOptions, N3Token}; +use crate::oxttl::toolkit::{Lexer, Parser, RuleRecognizer, RuleRecognizerError}; +use crate::oxttl::{MAX_BUFFER_SIZE, MIN_BUFFER_SIZE}; +use oxiri::Iri; +use std::collections::hash_map::Iter; +use std::collections::HashMap; + +pub struct TriGRecognizer { + stack: Vec, + cur_subject: Vec, + cur_predicate: Vec, + cur_object: Vec, + cur_graph: GraphName, +} + +#[allow(clippy::partial_pub_fields)] +pub struct TriGRecognizerContext { + pub lexer_options: N3LexerOptions, + pub with_graph_name: bool, + #[cfg(feature = "rdf-star")] + pub with_quoted_triples: bool, + prefixes: HashMap>, +} + +impl TriGRecognizerContext { + pub fn prefixes(&self) -> Iter<'_, String, Iri> { + self.prefixes.iter() + } +} + +impl RuleRecognizer for TriGRecognizer { + type TokenRecognizer = N3Lexer; + type Output = Quad; + type Context = TriGRecognizerContext; + + fn error_recovery_state(mut self) -> Self { + self.stack.clear(); + self.cur_subject.clear(); + self.cur_predicate.clear(); + self.cur_object.clear(); + self.cur_graph = GraphName::DefaultGraph; + self + } + + fn recognize_next( + mut self, + token: N3Token<'_>, + context: &mut TriGRecognizerContext, + results: &mut Vec, + errors: &mut Vec, + ) -> Self { + if let Some(rule) = self.stack.pop() { + match rule { + // [1g] trigDoc ::= (directive | block)* + // [2g] block ::= triplesOrGraph | wrappedGraph | triples2 | "GRAPH" labelOrSubject wrappedGraph + // [3] directive ::= prefixID | base | sparqlPrefix | sparqlBase + // [4] prefixID ::= '@prefix' PNAME_NS IRIREF '.' + // [5] base ::= '@base' IRIREF '.' + // [5s] sparqlPrefix ::= "PREFIX" PNAME_NS IRIREF + // [6s] sparqlBase ::= "BASE" IRIREF + TriGState::TriGDoc => { + self.cur_graph = GraphName::DefaultGraph; + self.stack.push(TriGState::TriGDoc); + match token { + N3Token::PlainKeyword(k) if k.eq_ignore_ascii_case("base") => { + self.stack.push(TriGState::BaseExpectIri); + self + } + N3Token::PlainKeyword(k) if k.eq_ignore_ascii_case("prefix") => { + self.stack.push(TriGState::PrefixExpectPrefix); + self + } + N3Token::LangTag("prefix") => { + self.stack.push(TriGState::ExpectDot); + self.stack.push(TriGState::PrefixExpectPrefix); + self + } + N3Token::LangTag("base") => { + self.stack.push(TriGState::ExpectDot); + self.stack.push(TriGState::BaseExpectIri); + self + } + N3Token::PlainKeyword(k) + if k.eq_ignore_ascii_case("graph") && context.with_graph_name => + { + self.stack.push(TriGState::WrappedGraph); + self.stack.push(TriGState::GraphName); + self + } + N3Token::Punctuation("{") if context.with_graph_name => { + self.stack.push(TriGState::WrappedGraph); + self.recognize_next(token, context, results, errors) + } + _ => { + self.stack.push(TriGState::TriplesOrGraph); + self.recognize_next(token, context, results, errors) + } + } + } + TriGState::ExpectDot => { + self.cur_subject.pop(); + if token == N3Token::Punctuation(".") { + self + } else { + errors.push("A dot is expected at the end of statements".into()); + self.recognize_next(token, context, results, errors) + } + } + TriGState::BaseExpectIri => { + if let N3Token::IriRef(iri) = token { + context.lexer_options.base_iri = Some(Iri::parse_unchecked(iri)); + self + } else { + self.error(errors, "The BASE keyword should be followed by an IRI") + } + } + TriGState::PrefixExpectPrefix => match token { + N3Token::PrefixedName { prefix, local, .. } if local.is_empty() => { + self.stack.push(TriGState::PrefixExpectIri { + name: prefix.to_owned(), + }); + self + } + _ => self.error( + errors, + "The PREFIX keyword should be followed by a prefix like 'ex:'", + ), + }, + TriGState::PrefixExpectIri { name } => { + if let N3Token::IriRef(iri) = token { + context.prefixes.insert(name, Iri::parse_unchecked(iri)); + self + } else { + self.error(errors, "The PREFIX declaration should be followed by a prefix and its value as an IRI") + } + } + // [3g] triplesOrGraph ::= labelOrSubject ( wrappedGraph | predicateObjectList '.' ) | quotedTriple predicateObjectList '.' + // [4g] triples2 ::= blankNodePropertyList predicateObjectList? '.' | collection predicateObjectList '.' + TriGState::TriplesOrGraph => match token { + N3Token::IriRef(iri) => { + self.stack + .push(TriGState::WrappedGraphOrPredicateObjectList { + term: NamedNode::new_unchecked(iri).into(), + }); + self + } + N3Token::PrefixedName { + prefix, + local, + might_be_invalid_iri, + } => match resolve_local_name( + prefix, + &local, + might_be_invalid_iri, + &context.prefixes, + ) { + Ok(t) => { + self.stack + .push(TriGState::WrappedGraphOrPredicateObjectList { + term: t.into(), + }); + self + } + Err(e) => self.error(errors, e), + }, + N3Token::BlankNodeLabel(label) => { + self.stack + .push(TriGState::WrappedGraphOrPredicateObjectList { + term: BlankNode::new_unchecked(label).into(), + }); + self + } + N3Token::Punctuation("[") => { + self.stack + .push(TriGState::WrappedGraphBlankNodePropertyListCurrent); + self + } + N3Token::Punctuation("(") => { + self.stack.push(TriGState::ExpectDot); + self.stack.push(TriGState::PredicateObjectList); + self.stack.push(TriGState::SubjectCollectionBeginning); + self + } + #[cfg(feature = "rdf-star")] + N3Token::Punctuation("<<") if context.with_quoted_triples => { + self.stack.push(TriGState::ExpectDot); + self.stack.push(TriGState::PredicateObjectList); + self.stack.push(TriGState::SubjectQuotedTripleEnd); + self.stack.push(TriGState::QuotedObject); + self.stack.push(TriGState::Verb); + self.stack.push(TriGState::QuotedSubject); + self + } + _ => self.error(errors, "TOKEN is not a valid subject or graph name"), + }, + TriGState::WrappedGraphOrPredicateObjectList { term } => { + if token == N3Token::Punctuation("{") && context.with_graph_name { + self.cur_graph = term.into(); + self.stack.push(TriGState::WrappedGraph); + } else { + self.cur_subject.push(term.into()); + self.stack.push(TriGState::ExpectDot); + self.stack.push(TriGState::PredicateObjectList); + } + self.recognize_next(token, context, results, errors) + } + TriGState::WrappedGraphBlankNodePropertyListCurrent => { + if token == N3Token::Punctuation("]") { + self.stack + .push(TriGState::WrappedGraphOrPredicateObjectList { + term: BlankNode::default().into(), + }); + self + } else { + self.cur_subject.push(BlankNode::default().into()); + self.stack.push(TriGState::ExpectDot); + self.stack.push(TriGState::SubjectBlankNodePropertyListEnd); + self.stack.push(TriGState::PredicateObjectList); + self.recognize_next(token, context, results, errors) + } + } + TriGState::SubjectBlankNodePropertyListEnd => { + if token == N3Token::Punctuation("]") { + self.stack + .push(TriGState::SubjectBlankNodePropertyListAfter); + self + } else { + errors.push("blank node property lists should end with a ']'".into()); + self.stack + .push(TriGState::SubjectBlankNodePropertyListAfter); + self.recognize_next(token, context, results, errors) + } + } + TriGState::SubjectBlankNodePropertyListAfter => { + if matches!(token, N3Token::Punctuation("." | "}")) { + self.recognize_next(token, context, results, errors) + } else { + self.stack.push(TriGState::PredicateObjectList); + self.recognize_next(token, context, results, errors) + } + } + TriGState::SubjectCollectionBeginning => { + if let N3Token::Punctuation(")") = token { + self.cur_subject.push(rdf::NIL.into()); + self + } else { + let root = BlankNode::default(); + self.cur_subject.push(root.clone().into()); + self.cur_subject.push(root.into()); + self.cur_predicate.push(rdf::FIRST.into()); + self.stack.push(TriGState::SubjectCollectionPossibleEnd); + self.stack.push(TriGState::Object); + self.recognize_next(token, context, results, errors) + } + } + TriGState::SubjectCollectionPossibleEnd => { + let old = self.cur_subject.pop().unwrap(); + self.cur_object.pop(); + if let N3Token::Punctuation(")") = token { + self.cur_predicate.pop(); + results.push(Quad::new(old, rdf::REST, rdf::NIL, self.cur_graph.clone())); + self + } else { + let new = BlankNode::default(); + results.push(Quad::new( + old, + rdf::REST, + new.clone(), + self.cur_graph.clone(), + )); + self.cur_subject.push(new.into()); + self.stack.push(TriGState::ObjectCollectionPossibleEnd); + self.stack.push(TriGState::Object); + self.recognize_next(token, context, results, errors) + } + } + // [5g] wrappedGraph ::= '{' triplesBlock? '}' + // [6g] triplesBlock ::= triples ('.' triplesBlock?)? + TriGState::WrappedGraph => { + if token == N3Token::Punctuation("{") { + self.stack.push(TriGState::WrappedGraphPossibleEnd); + self.stack.push(TriGState::Triples); + self + } else { + self.error(errors, "The GRAPH keyword should be followed by a graph name and a value in '{'") + } + } + TriGState::WrappedGraphPossibleEnd => { + self.cur_subject.pop(); + match token { + N3Token::Punctuation("}") => self, + N3Token::Punctuation(".") => { + self.stack.push(TriGState::WrappedGraphPossibleEnd); + self.stack.push(TriGState::Triples); + self + } + _ => { + errors.push( + "A '}' or a '.' is expected at the end of a graph block".into(), + ); + self.recognize_next(token, context, results, errors) + } + } + } + // [6] triples ::= subject predicateObjectList | blankNodePropertyList predicateObjectList? + // [10] subject ::= iri | BlankNode | collection | quotedTriple + TriGState::Triples => match token { + N3Token::Punctuation("}") => { + self.recognize_next(token, context, results, errors) // Early end + } + N3Token::Punctuation("[") => { + self.cur_subject.push(BlankNode::default().into()); + self.stack + .push(TriGState::TriplesBlankNodePropertyListCurrent); + self + } + N3Token::IriRef(iri) => { + self.cur_subject.push(NamedNode::new_unchecked(iri).into()); + self.stack.push(TriGState::PredicateObjectList); + self + } + N3Token::PrefixedName { + prefix, + local, + might_be_invalid_iri, + } => match resolve_local_name( + prefix, + &local, + might_be_invalid_iri, + &context.prefixes, + ) { + Ok(t) => { + self.cur_subject.push(t.into()); + self.stack.push(TriGState::PredicateObjectList); + self + } + Err(e) => self.error(errors, e), + }, + N3Token::BlankNodeLabel(label) => { + self.cur_subject + .push(BlankNode::new_unchecked(label).into()); + self.stack.push(TriGState::PredicateObjectList); + self + } + N3Token::Punctuation("(") => { + self.stack.push(TriGState::PredicateObjectList); + self.stack.push(TriGState::SubjectCollectionBeginning); + self + } + #[cfg(feature = "rdf-star")] + N3Token::Punctuation("<<") if context.with_quoted_triples => { + self.stack.push(TriGState::PredicateObjectList); + self.stack.push(TriGState::SubjectQuotedTripleEnd); + self.stack.push(TriGState::QuotedObject); + self.stack.push(TriGState::Verb); + self.stack.push(TriGState::QuotedSubject); + self + } + _ => self.error(errors, "TOKEN is not a valid RDF subject"), + }, + TriGState::TriplesBlankNodePropertyListCurrent => { + if token == N3Token::Punctuation("]") { + self.stack.push(TriGState::PredicateObjectList); + self + } else { + self.stack.push(TriGState::SubjectBlankNodePropertyListEnd); + self.stack.push(TriGState::PredicateObjectList); + self.recognize_next(token, context, results, errors) + } + } + // [7g] labelOrSubject ::= iri | BlankNode + TriGState::GraphName => match token { + N3Token::IriRef(iri) => { + self.cur_graph = NamedNode::new_unchecked(iri).into(); + self + } + N3Token::PrefixedName { + prefix, + local, + might_be_invalid_iri, + } => match resolve_local_name( + prefix, + &local, + might_be_invalid_iri, + &context.prefixes, + ) { + Ok(t) => { + self.cur_graph = t.into(); + self + } + Err(e) => self.error(errors, e), + }, + N3Token::BlankNodeLabel(label) => { + self.cur_graph = BlankNode::new_unchecked(label).into(); + self + } + N3Token::Punctuation("[") => { + self.stack.push(TriGState::GraphNameAnonEnd); + self + } + _ => self.error(errors, "TOKEN is not a valid graph name"), + }, + TriGState::GraphNameAnonEnd => { + if token == N3Token::Punctuation("]") { + self.cur_graph = BlankNode::default().into(); + self + } else { + self.error(errors, "Anonymous blank node with a property list are not allowed as graph name") + } + } + // [7] predicateObjectList ::= verb objectList (';' (verb objectList)?)* + TriGState::PredicateObjectList => { + self.stack.push(TriGState::PredicateObjectListEnd); + self.stack.push(TriGState::ObjectsList); + self.stack.push(TriGState::Verb); + self.recognize_next(token, context, results, errors) + } + TriGState::PredicateObjectListEnd => { + self.cur_predicate.pop(); + if token == N3Token::Punctuation(";") { + self.stack + .push(TriGState::PredicateObjectListPossibleContinuation); + self + } else { + self.recognize_next(token, context, results, errors) + } + } + TriGState::PredicateObjectListPossibleContinuation => { + if token == N3Token::Punctuation(";") { + self.stack + .push(TriGState::PredicateObjectListPossibleContinuation); + self + } else if matches!(token, N3Token::Punctuation("." | "}" | "]")) { + self.recognize_next(token, context, results, errors) + } else { + self.stack.push(TriGState::PredicateObjectListEnd); + self.stack.push(TriGState::ObjectsList); + self.stack.push(TriGState::Verb); + self.recognize_next(token, context, results, errors) + } + } + // [8] objectList ::= object annotation? ( ',' object annotation? )* + // [30t] annotation ::= '{|' predicateObjectList '|}' + TriGState::ObjectsList => { + self.stack.push(TriGState::ObjectsListEnd); + self.stack.push(TriGState::Object); + self.recognize_next(token, context, results, errors) + } + TriGState::ObjectsListEnd => match token { + N3Token::Punctuation(",") => { + self.cur_object.pop(); + self.stack.push(TriGState::ObjectsListEnd); + self.stack.push(TriGState::Object); + self + } + #[cfg(feature = "rdf-star")] + N3Token::Punctuation("{|") => { + let triple = Triple::new( + self.cur_subject.last().unwrap().clone(), + self.cur_predicate.last().unwrap().clone(), + self.cur_object.pop().unwrap(), + ); + self.cur_subject.push(triple.into()); + self.stack.push(TriGState::AnnotationEnd); + self.stack.push(TriGState::PredicateObjectList); + self + } + _ => { + self.cur_object.pop(); + self.recognize_next(token, context, results, errors) + } + }, + #[cfg(feature = "rdf-star")] + TriGState::AnnotationEnd => { + self.cur_subject.pop(); + self.stack.push(TriGState::ObjectsListAfterAnnotation); + if token == N3Token::Punctuation("|}") { + self + } else { + self.error(errors, "Annotations should end with '|}'") + } + } + #[cfg(feature = "rdf-star")] + TriGState::ObjectsListAfterAnnotation => { + if token == N3Token::Punctuation(",") { + self.stack.push(TriGState::ObjectsListEnd); + self.stack.push(TriGState::Object); + self + } else { + self.recognize_next(token, context, results, errors) + } + } + // [9] verb ::= predicate | 'a' + // [11] predicate ::= iri + TriGState::Verb => match token { + N3Token::PlainKeyword("a") => { + self.cur_predicate.push(rdf::TYPE.into()); + self + } + N3Token::IriRef(iri) => { + self.cur_predicate.push(NamedNode::new_unchecked(iri)); + self + } + N3Token::PrefixedName { + prefix, + local, + might_be_invalid_iri, + } => match resolve_local_name( + prefix, + &local, + might_be_invalid_iri, + &context.prefixes, + ) { + Ok(t) => { + self.cur_predicate.push(t); + self + } + Err(e) => self.error(errors, e), + }, + _ => self.error(errors, "TOKEN is not a valid predicate"), + }, + // [12] object ::= iri | BlankNode | collection | blankNodePropertyList | literal | quotedTriple + // [13] literal ::= RDFLiteral | NumericLiteral | BooleanLiteral + // [14] blank ::= BlankNode | collection + // [15] blankNodePropertyList ::= '[' predicateObjectList ']' + // [16] collection ::= '(' object* ')' + // [17] NumericLiteral ::= INTEGER | DECIMAL | DOUBLE + // [128s] RDFLiteral ::= String (LANGTAG | '^^' iri)? + // [133s] BooleanLiteral ::= 'true' | 'false' + // [18] String ::= STRING_LITERAL_QUOTE | STRING_LITERAL_SINGLE_QUOTE | STRING_LITERAL_LONG_SINGLE_QUOTE | STRING_LITERAL_LONG_QUOTE + // [135s] iri ::= IRIREF | PrefixedName + // [136s] PrefixedName ::= PNAME_LN | PNAME_NS + // [137s] BlankNode ::= BLANK_NODE_LABEL | ANON + TriGState::Object => match token { + N3Token::IriRef(iri) => { + self.cur_object.push(NamedNode::new_unchecked(iri).into()); + self.emit_quad(results); + self + } + N3Token::PrefixedName { + prefix, + local, + might_be_invalid_iri, + } => match resolve_local_name( + prefix, + &local, + might_be_invalid_iri, + &context.prefixes, + ) { + Ok(t) => { + self.cur_object.push(t.into()); + self.emit_quad(results); + self + } + Err(e) => self.error(errors, e), + }, + N3Token::BlankNodeLabel(label) => { + self.cur_object.push(BlankNode::new_unchecked(label).into()); + self.emit_quad(results); + self + } + N3Token::Punctuation("[") => { + self.stack + .push(TriGState::ObjectBlankNodePropertyListCurrent); + self + } + N3Token::Punctuation("(") => { + self.stack.push(TriGState::ObjectCollectionBeginning); + self + } + N3Token::String(value) => { + self.stack + .push(TriGState::LiteralPossibleSuffix { value, emit: true }); + self + } + N3Token::Integer(v) => { + self.cur_object + .push(Literal::new_typed_literal(v, xsd::INTEGER).into()); + self.emit_quad(results); + self + } + N3Token::Decimal(v) => { + self.cur_object + .push(Literal::new_typed_literal(v, xsd::DECIMAL).into()); + self.emit_quad(results); + self + } + N3Token::Double(v) => { + self.cur_object + .push(Literal::new_typed_literal(v, xsd::DOUBLE).into()); + self.emit_quad(results); + self + } + N3Token::PlainKeyword("true") => { + self.cur_object + .push(Literal::new_typed_literal("true", xsd::BOOLEAN).into()); + self.emit_quad(results); + self + } + N3Token::PlainKeyword("false") => { + self.cur_object + .push(Literal::new_typed_literal("false", xsd::BOOLEAN).into()); + self.emit_quad(results); + self + } + #[cfg(feature = "rdf-star")] + N3Token::Punctuation("<<") if context.with_quoted_triples => { + self.stack + .push(TriGState::ObjectQuotedTripleEnd { emit: true }); + self.stack.push(TriGState::QuotedObject); + self.stack.push(TriGState::Verb); + self.stack.push(TriGState::QuotedSubject); + self + } + _ => self.error(errors, "TOKEN is not a valid RDF object"), + }, + TriGState::ObjectBlankNodePropertyListCurrent => { + if token == N3Token::Punctuation("]") { + self.cur_object.push(BlankNode::default().into()); + self.emit_quad(results); + self + } else { + self.cur_subject.push(BlankNode::default().into()); + self.stack.push(TriGState::ObjectBlankNodePropertyListEnd); + self.stack.push(TriGState::PredicateObjectList); + self.recognize_next(token, context, results, errors) + } + } + TriGState::ObjectBlankNodePropertyListEnd => { + if token == N3Token::Punctuation("]") { + self.cur_object.push(self.cur_subject.pop().unwrap().into()); + self.emit_quad(results); + self + } else { + self.error(errors, "blank node property lists should end with a ']'") + } + } + TriGState::ObjectCollectionBeginning => { + if let N3Token::Punctuation(")") = token { + self.cur_object.push(rdf::NIL.into()); + self.emit_quad(results); + self + } else { + let root = BlankNode::default(); + self.cur_object.push(root.clone().into()); + self.emit_quad(results); + self.cur_subject.push(root.into()); + self.cur_predicate.push(rdf::FIRST.into()); + self.stack.push(TriGState::ObjectCollectionPossibleEnd); + self.stack.push(TriGState::Object); + self.recognize_next(token, context, results, errors) + } + } + TriGState::ObjectCollectionPossibleEnd => { + let old = self.cur_subject.pop().unwrap(); + self.cur_object.pop(); + if let N3Token::Punctuation(")") = token { + self.cur_predicate.pop(); + results.push(Quad::new(old, rdf::REST, rdf::NIL, self.cur_graph.clone())); + self + } else { + let new = BlankNode::default(); + results.push(Quad::new( + old, + rdf::REST, + new.clone(), + self.cur_graph.clone(), + )); + self.cur_subject.push(new.into()); + self.stack.push(TriGState::ObjectCollectionPossibleEnd); + self.stack.push(TriGState::Object); + self.recognize_next(token, context, results, errors) + } + } + TriGState::LiteralPossibleSuffix { value, emit } => match token { + N3Token::LangTag(lang) => { + self.cur_object.push( + Literal::new_language_tagged_literal_unchecked( + value, + lang.to_ascii_lowercase(), + ) + .into(), + ); + if emit { + self.emit_quad(results); + } + self + } + N3Token::Punctuation("^^") => { + self.stack + .push(TriGState::LiteralExpectDatatype { value, emit }); + self + } + _ => { + self.cur_object + .push(Literal::new_simple_literal(value).into()); + if emit { + self.emit_quad(results); + } + self.recognize_next(token, context, results, errors) + } + }, + TriGState::LiteralExpectDatatype { value, emit } => match token { + N3Token::IriRef(datatype) => { + self.cur_object.push( + Literal::new_typed_literal(value, NamedNode::new_unchecked(datatype)) + .into(), + ); + if emit { + self.emit_quad(results); + } + self + } + N3Token::PrefixedName { + prefix, + local, + might_be_invalid_iri, + } => match resolve_local_name( + prefix, + &local, + might_be_invalid_iri, + &context.prefixes, + ) { + Ok(t) => { + self.cur_object + .push(Literal::new_typed_literal(value, t).into()); + if emit { + self.emit_quad(results); + } + self + } + Err(e) => self.error(errors, e), + }, + _ => self + .error(errors, "Expecting a datatype IRI after ^^, found TOKEN") + .recognize_next(token, context, results, errors), + }, + // [27t] quotedTriple ::= '<<' qtSubject verb qtObject '>>' + #[cfg(feature = "rdf-star")] + TriGState::SubjectQuotedTripleEnd => { + let triple = Triple::new( + self.cur_subject.pop().unwrap(), + self.cur_predicate.pop().unwrap(), + self.cur_object.pop().unwrap(), + ); + self.cur_subject.push(triple.into()); + if token == N3Token::Punctuation(">>") { + self + } else { + self.error( + errors, + "Expecting '>>' to close a quoted triple, found TOKEN", + ) + } + } + #[cfg(feature = "rdf-star")] + TriGState::ObjectQuotedTripleEnd { emit } => { + let triple = Triple::new( + self.cur_subject.pop().unwrap(), + self.cur_predicate.pop().unwrap(), + self.cur_object.pop().unwrap(), + ); + self.cur_object.push(triple.into()); + if emit { + self.emit_quad(results); + } + if token == N3Token::Punctuation(">>") { + self + } else { + self.error( + errors, + "Expecting '>>' to close a quoted triple, found TOKEN", + ) + } + } + // [28t] qtSubject ::= iri | BlankNode | quotedTriple + #[cfg(feature = "rdf-star")] + TriGState::QuotedSubject => match token { + N3Token::Punctuation("[") => { + self.cur_subject.push(BlankNode::default().into()); + self.stack.push(TriGState::QuotedAnonEnd); + self + } + N3Token::IriRef(iri) => { + self.cur_subject.push(NamedNode::new_unchecked(iri).into()); + self + } + N3Token::PrefixedName { + prefix, + local, + might_be_invalid_iri, + } => match resolve_local_name( + prefix, + &local, + might_be_invalid_iri, + &context.prefixes, + ) { + Ok(t) => { + self.cur_subject.push(t.into()); + self + } + Err(e) => self.error(errors, e), + }, + N3Token::BlankNodeLabel(label) => { + self.cur_subject + .push(BlankNode::new_unchecked(label).into()); + self + } + N3Token::Punctuation("<<") => { + self.stack.push(TriGState::SubjectQuotedTripleEnd); + self.stack.push(TriGState::QuotedObject); + self.stack.push(TriGState::Verb); + self.stack.push(TriGState::QuotedSubject); + self + } + _ => self.error( + errors, + "TOKEN is not a valid RDF quoted triple subject: TOKEN", + ), + }, + // [29t] qtObject ::= iri | BlankNode | literal | quotedTriple + #[cfg(feature = "rdf-star")] + TriGState::QuotedObject => match token { + N3Token::Punctuation("[") => { + self.cur_object.push(BlankNode::default().into()); + self.stack.push(TriGState::QuotedAnonEnd); + self + } + N3Token::IriRef(iri) => { + self.cur_object.push(NamedNode::new_unchecked(iri).into()); + self + } + N3Token::PrefixedName { + prefix, + local, + might_be_invalid_iri, + } => match resolve_local_name( + prefix, + &local, + might_be_invalid_iri, + &context.prefixes, + ) { + Ok(t) => { + self.cur_object.push(t.into()); + self + } + Err(e) => self.error(errors, e), + }, + N3Token::BlankNodeLabel(label) => { + self.cur_object.push(BlankNode::new_unchecked(label).into()); + self + } + N3Token::String(value) => { + self.stack + .push(TriGState::LiteralPossibleSuffix { value, emit: false }); + self + } + N3Token::Integer(v) => { + self.cur_object + .push(Literal::new_typed_literal(v, xsd::INTEGER).into()); + self + } + N3Token::Decimal(v) => { + self.cur_object + .push(Literal::new_typed_literal(v, xsd::DECIMAL).into()); + self + } + N3Token::Double(v) => { + self.cur_object + .push(Literal::new_typed_literal(v, xsd::DOUBLE).into()); + self + } + N3Token::PlainKeyword("true") => { + self.cur_object + .push(Literal::new_typed_literal("true", xsd::BOOLEAN).into()); + self + } + N3Token::PlainKeyword("false") => { + self.cur_object + .push(Literal::new_typed_literal("false", xsd::BOOLEAN).into()); + self + } + N3Token::Punctuation("<<") => { + self.stack + .push(TriGState::ObjectQuotedTripleEnd { emit: false }); + self.stack.push(TriGState::QuotedObject); + self.stack.push(TriGState::Verb); + self.stack.push(TriGState::QuotedSubject); + self + } + _ => self.error(errors, "TOKEN is not a valid RDF quoted triple object"), + }, + #[cfg(feature = "rdf-star")] + TriGState::QuotedAnonEnd => { + if token == N3Token::Punctuation("]") { + self + } else { + self.error(errors, "Anonymous blank node with a property list are not allowed in quoted triples") + } + } + } + } else if token == N3Token::Punctuation(".") || token == N3Token::Punctuation("}") { + // TODO: be smarter depending if we are in '{' or not + self.stack.push(TriGState::TriGDoc); + self + } else { + self + } + } + + fn recognize_end( + mut self, + _context: &mut TriGRecognizerContext, + results: &mut Vec, + errors: &mut Vec, + ) { + match &*self.stack { + [] | [TriGState::TriGDoc] => { + debug_assert!( + self.cur_subject.is_empty(), + "The cur_subject stack must be empty if the state stack is empty" + ); + debug_assert!( + self.cur_predicate.is_empty(), + "The cur_predicate stack must be empty if the state stack is empty" + ); + debug_assert!( + self.cur_object.is_empty(), + "The cur_object stack must be empty if the state stack is empty" + ); + } + [.., TriGState::LiteralPossibleSuffix { value, emit: true }] => { + self.cur_object + .push(Literal::new_simple_literal(value).into()); + self.emit_quad(results); + errors.push("Triples should be followed by a dot".into()) + } + _ => errors.push("Unexpected end".into()), // TODO + } + } + + fn lexer_options(context: &TriGRecognizerContext) -> &N3LexerOptions { + &context.lexer_options + } +} + +impl TriGRecognizer { + pub fn new_parser( + with_graph_name: bool, + #[cfg(feature = "rdf-star")] with_quoted_triples: bool, + unchecked: bool, + base_iri: Option>, + prefixes: HashMap>, + ) -> Parser { + Parser::new( + Lexer::new( + N3Lexer::new(N3LexerMode::Turtle, unchecked), + MIN_BUFFER_SIZE, + MAX_BUFFER_SIZE, + true, + Some(b"#"), + ), + Self { + stack: vec![TriGState::TriGDoc], + cur_subject: Vec::new(), + cur_predicate: Vec::new(), + cur_object: Vec::new(), + cur_graph: GraphName::DefaultGraph, + }, + TriGRecognizerContext { + with_graph_name, + #[cfg(feature = "rdf-star")] + with_quoted_triples, + prefixes, + lexer_options: N3LexerOptions { base_iri }, + }, + ) + } + + #[must_use] + fn error( + mut self, + errors: &mut Vec, + msg: impl Into, + ) -> Self { + errors.push(msg.into()); + self.stack.clear(); + self.cur_subject.clear(); + self.cur_predicate.clear(); + self.cur_object.clear(); + self.cur_graph = GraphName::DefaultGraph; + self + } + + fn emit_quad(&mut self, results: &mut Vec) { + results.push(Quad::new( + self.cur_subject.last().unwrap().clone(), + self.cur_predicate.last().unwrap().clone(), + self.cur_object.last().unwrap().clone(), + self.cur_graph.clone(), + )); + } +} + +#[derive(Debug)] +enum TriGState { + TriGDoc, + ExpectDot, + BaseExpectIri, + PrefixExpectPrefix, + PrefixExpectIri { + name: String, + }, + TriplesOrGraph, + WrappedGraphBlankNodePropertyListCurrent, + SubjectBlankNodePropertyListEnd, + SubjectBlankNodePropertyListAfter, + SubjectCollectionBeginning, + SubjectCollectionPossibleEnd, + WrappedGraphOrPredicateObjectList { + term: NamedOrBlankNode, + }, + WrappedGraph, + WrappedGraphPossibleEnd, + GraphName, + GraphNameAnonEnd, + Triples, + TriplesBlankNodePropertyListCurrent, + PredicateObjectList, + PredicateObjectListEnd, + PredicateObjectListPossibleContinuation, + ObjectsList, + ObjectsListEnd, + #[cfg(feature = "rdf-star")] + AnnotationEnd, + #[cfg(feature = "rdf-star")] + ObjectsListAfterAnnotation, + Verb, + Object, + ObjectBlankNodePropertyListCurrent, + ObjectBlankNodePropertyListEnd, + ObjectCollectionBeginning, + ObjectCollectionPossibleEnd, + LiteralPossibleSuffix { + value: String, + emit: bool, + }, + LiteralExpectDatatype { + value: String, + emit: bool, + }, + #[cfg(feature = "rdf-star")] + SubjectQuotedTripleEnd, + #[cfg(feature = "rdf-star")] + ObjectQuotedTripleEnd { + emit: bool, + }, + #[cfg(feature = "rdf-star")] + QuotedSubject, + #[cfg(feature = "rdf-star")] + QuotedObject, + #[cfg(feature = "rdf-star")] + QuotedAnonEnd, +} diff --git a/ng-oxigraph/src/oxttl/toolkit/error.rs b/ng-oxigraph/src/oxttl/toolkit/error.rs new file mode 100644 index 0000000..083adef --- /dev/null +++ b/ng-oxigraph/src/oxttl/toolkit/error.rs @@ -0,0 +1,97 @@ +use std::ops::Range; +use std::{fmt, io}; + +/// A position in a text i.e. a `line` number starting from 0, a `column` number starting from 0 (in number of code points) and a global file `offset` starting from 0 (in number of bytes). +#[derive(Eq, PartialEq, Debug, Clone, Copy)] +pub struct TextPosition { + pub line: u64, + pub column: u64, + pub offset: u64, +} + +/// An error in the syntax of the parsed file. +/// +/// It is composed of a message and a byte range in the input. +#[derive(Debug, thiserror::Error)] +pub struct TurtleSyntaxError { + pub(super) location: Range, + pub(super) message: String, +} + +impl TurtleSyntaxError { + /// The location of the error inside of the file. + #[inline] + pub fn location(&self) -> Range { + self.location.clone() + } + + /// The error message. + #[inline] + pub fn message(&self) -> &str { + &self.message + } +} + +impl fmt::Display for TurtleSyntaxError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.location.start.offset + 1 >= self.location.end.offset { + write!( + f, + "Parser error at line {} column {}: {}", + self.location.start.line + 1, + self.location.start.column + 1, + self.message + ) + } else if self.location.start.line == self.location.end.line { + write!( + f, + "Parser error between at line {} between columns {} and column {}: {}", + self.location.start.line + 1, + self.location.start.column + 1, + self.location.end.column + 1, + self.message + ) + } else { + write!( + f, + "Parser error between line {} column {} and line {} column {}: {}", + self.location.start.line + 1, + self.location.start.column + 1, + self.location.end.line + 1, + self.location.end.column + 1, + self.message + ) + } + } +} + +impl From for io::Error { + #[inline] + fn from(error: TurtleSyntaxError) -> Self { + Self::new(io::ErrorKind::InvalidData, error) + } +} + +/// A parsing error. +/// +/// It is the union of [`TurtleSyntaxError`] and [`io::Error`]. +#[derive(Debug, thiserror::Error)] +pub enum TurtleParseError { + /// I/O error during parsing (file not found...). + #[error(transparent)] + Io(#[from] io::Error), + /// An error in the file syntax. + #[error(transparent)] + Syntax(#[from] TurtleSyntaxError), +} + +impl From for io::Error { + #[inline] + fn from(error: TurtleParseError) -> Self { + match error { + TurtleParseError::Syntax(e) => e.into(), + TurtleParseError::Io(e) => e, + } + } +} diff --git a/ng-oxigraph/src/oxttl/toolkit/lexer.rs b/ng-oxigraph/src/oxttl/toolkit/lexer.rs new file mode 100644 index 0000000..b1835e9 --- /dev/null +++ b/ng-oxigraph/src/oxttl/toolkit/lexer.rs @@ -0,0 +1,432 @@ +use crate::oxttl::toolkit::error::{TextPosition, TurtleSyntaxError}; +use memchr::{memchr2, memchr2_iter}; +use std::borrow::Cow; +use std::cmp::min; +use std::io::{self, Read}; +use std::ops::{Range, RangeInclusive}; +use std::str; +#[cfg(feature = "async-tokio")] +use tokio::io::{AsyncRead, AsyncReadExt}; + +pub trait TokenRecognizer { + type Token<'a> + where + Self: 'a; + type Options: Default; + + fn recognize_next_token<'a>( + &mut self, + data: &'a [u8], + is_ending: bool, + config: &Self::Options, + ) -> Option<(usize, Result, TokenRecognizerError>)>; +} + +pub struct TokenRecognizerError { + pub location: Range, + pub message: String, +} + +impl> From<(Range, S)> for TokenRecognizerError { + fn from((location, message): (Range, S)) -> Self { + Self { + location, + message: message.into(), + } + } +} + +#[allow(clippy::range_plus_one)] +impl> From<(RangeInclusive, S)> for TokenRecognizerError { + fn from((location, message): (RangeInclusive, S)) -> Self { + (*location.start()..*location.end() + 1, message).into() + } +} + +impl> From<(usize, S)> for TokenRecognizerError { + fn from((location, message): (usize, S)) -> Self { + (location..=location, message).into() + } +} + +pub struct Lexer { + parser: R, + data: Vec, + position: Position, + previous_position: Position, // Lexer position before the last emitted token + is_ending: bool, + min_buffer_size: usize, + max_buffer_size: usize, + is_line_jump_whitespace: bool, + line_comment_start: Option<&'static [u8]>, +} + +#[derive(Clone, Copy)] +struct Position { + line_start_buffer_offset: usize, + buffer_offset: usize, + global_offset: u64, + global_line: u64, +} + +impl Lexer { + pub fn new( + parser: R, + min_buffer_size: usize, + max_buffer_size: usize, + is_line_jump_whitespace: bool, + line_comment_start: Option<&'static [u8]>, + ) -> Self { + Self { + parser, + data: Vec::new(), + position: Position { + line_start_buffer_offset: 0, + buffer_offset: 0, + global_offset: 0, + global_line: 0, + }, + previous_position: Position { + line_start_buffer_offset: 0, + buffer_offset: 0, + global_offset: 0, + global_line: 0, + }, + is_ending: false, + min_buffer_size, + max_buffer_size, + is_line_jump_whitespace, + line_comment_start, + } + } + + pub fn extend_from_slice(&mut self, other: &[u8]) { + self.shrink_data(); + self.data.extend_from_slice(other); + } + + #[inline] + pub fn end(&mut self) { + self.is_ending = true; + } + + pub fn extend_from_read(&mut self, read: &mut impl Read) -> io::Result<()> { + self.shrink_data(); + if self.data.len() == self.max_buffer_size { + return Err(io::Error::new( + io::ErrorKind::OutOfMemory, + format!( + "Reached the buffer maximal size of {}", + self.max_buffer_size + ), + )); + } + let min_end = min(self.data.len() + self.min_buffer_size, self.max_buffer_size); + let new_start = self.data.len(); + self.data.resize(min_end, 0); + if self.data.len() < self.data.capacity() { + // We keep extending to have as much space as available without reallocation + self.data.resize(self.data.capacity(), 0); + } + let read = read.read(&mut self.data[new_start..])?; + self.data.truncate(new_start + read); + self.is_ending = read == 0; + Ok(()) + } + + #[cfg(feature = "async-tokio")] + pub async fn extend_from_tokio_async_read( + &mut self, + read: &mut (impl AsyncRead + Unpin), + ) -> io::Result<()> { + self.shrink_data(); + if self.data.len() == self.max_buffer_size { + return Err(io::Error::new( + io::ErrorKind::OutOfMemory, + format!( + "Reached the buffer maximal size of {}", + self.max_buffer_size + ), + )); + } + let min_end = min(self.data.len() + self.min_buffer_size, self.max_buffer_size); + let new_start = self.data.len(); + self.data.resize(min_end, 0); + if self.data.len() < self.data.capacity() { + // We keep extending to have as much space as available without reallocation + self.data.resize(self.data.capacity(), 0); + } + let read = read.read(&mut self.data[new_start..]).await?; + self.data.truncate(new_start + read); + self.is_ending = read == 0; + Ok(()) + } + + #[allow(clippy::unwrap_in_result)] + pub fn read_next( + &mut self, + options: &R::Options, + ) -> Option, TurtleSyntaxError>> { + self.skip_whitespaces_and_comments()?; + self.previous_position = self.position; + let Some((consumed, result)) = self.parser.recognize_next_token( + &self.data[self.position.buffer_offset..], + self.is_ending, + options, + ) else { + return if self.is_ending { + if self.position.buffer_offset == self.data.len() { + None // We have finished + } else { + let (new_line_jumps, new_line_start) = + Self::find_number_of_line_jumps_and_start_of_last_line( + &self.data[self.position.buffer_offset..], + ); + if new_line_jumps > 0 { + self.position.line_start_buffer_offset = + self.position.buffer_offset + new_line_start; + } + self.position.global_offset += + u64::try_from(self.data.len() - self.position.buffer_offset).unwrap(); + self.position.buffer_offset = self.data.len(); + self.position.global_line += new_line_jumps; + let new_position = TextPosition { + line: self.position.global_line, + column: Self::column_from_bytes( + &self.data[self.position.line_start_buffer_offset..], + ), + offset: self.position.global_offset, + }; + let error = TurtleSyntaxError { + location: new_position..new_position, + message: "Unexpected end of file".into(), + }; + self.position.buffer_offset = self.data.len(); // We consume everything + Some(Err(error)) + } + } else { + None + }; + }; + debug_assert!( + consumed > 0, + "The lexer must consume at least one byte each time" + ); + debug_assert!( + self.position.buffer_offset + consumed <= self.data.len(), + "The lexer tried to consumed {consumed} bytes but only {} bytes are readable", + self.data.len() - self.position.buffer_offset + ); + let (new_line_jumps, new_line_start) = + Self::find_number_of_line_jumps_and_start_of_last_line( + &self.data[self.position.buffer_offset..self.position.buffer_offset + consumed], + ); + if new_line_jumps > 0 { + self.position.line_start_buffer_offset = self.position.buffer_offset + new_line_start; + } + self.position.buffer_offset += consumed; + self.position.global_offset += u64::try_from(consumed).unwrap(); + self.position.global_line += new_line_jumps; + Some(result.map_err(|e| TurtleSyntaxError { + location: self.location_from_buffer_offset_range(e.location), + message: e.message, + })) + } + + pub fn location_from_buffer_offset_range( + &self, + offset_range: Range, + ) -> Range { + let start_offset = self.previous_position.buffer_offset + offset_range.start; + let (start_extra_line_jumps, start_line_start) = + Self::find_number_of_line_jumps_and_start_of_last_line( + &self.data[self.previous_position.buffer_offset..start_offset], + ); + let start_line_start = if start_extra_line_jumps > 0 { + start_line_start + self.previous_position.buffer_offset + } else { + self.previous_position.line_start_buffer_offset + }; + let end_offset = self.previous_position.buffer_offset + offset_range.end; + let (end_extra_line_jumps, end_line_start) = + Self::find_number_of_line_jumps_and_start_of_last_line( + &self.data[self.previous_position.buffer_offset..end_offset], + ); + let end_line_start = if end_extra_line_jumps > 0 { + end_line_start + self.previous_position.buffer_offset + } else { + self.previous_position.line_start_buffer_offset + }; + TextPosition { + line: self.previous_position.global_line + start_extra_line_jumps, + column: Self::column_from_bytes(&self.data[start_line_start..start_offset]), + offset: self.previous_position.global_offset + + u64::try_from(offset_range.start).unwrap(), + }..TextPosition { + line: self.previous_position.global_line + end_extra_line_jumps, + column: Self::column_from_bytes(&self.data[end_line_start..end_offset]), + offset: self.previous_position.global_offset + u64::try_from(offset_range.end).unwrap(), + } + } + + pub fn last_token_location(&self) -> Range { + TextPosition { + line: self.previous_position.global_line, + column: Self::column_from_bytes( + &self.data[self.previous_position.line_start_buffer_offset + ..self.previous_position.buffer_offset], + ), + offset: self.previous_position.global_offset, + }..TextPosition { + line: self.position.global_line, + column: Self::column_from_bytes( + &self.data[self.position.line_start_buffer_offset..self.position.buffer_offset], + ), + offset: self.position.global_offset, + } + } + + pub fn last_token_source(&self) -> Cow<'_, str> { + String::from_utf8_lossy( + &self.data[self.previous_position.buffer_offset..self.position.buffer_offset], + ) + } + + pub fn is_end(&self) -> bool { + self.is_ending && self.data.len() == self.position.buffer_offset + } + + #[allow(clippy::unwrap_in_result)] + fn skip_whitespaces_and_comments(&mut self) -> Option<()> { + loop { + self.skip_whitespaces()?; + + let buf = &self.data[self.position.buffer_offset..]; + if let Some(line_comment_start) = self.line_comment_start { + if buf.starts_with(line_comment_start) { + // Comment + if let Some(end) = memchr2(b'\r', b'\n', &buf[line_comment_start.len()..]) { + let mut end_position = line_comment_start.len() + end; + if buf.get(end_position).copied() == Some(b'\r') { + // We look for \n for Windows line end style + if let Some(c) = buf.get(end_position + 1) { + if *c == b'\n' { + end_position += 1; + } + } else if !self.is_ending { + return None; // We need to read more + } + } + let comment_size = end_position + 1; + self.position.buffer_offset += comment_size; + self.position.line_start_buffer_offset = self.position.buffer_offset; + self.position.global_offset += u64::try_from(comment_size).unwrap(); + self.position.global_line += 1; + continue; + } + if self.is_ending { + self.position.buffer_offset = self.data.len(); // EOF + return Some(()); + } + return None; // We need more data + } + } + return Some(()); + } + } + + fn skip_whitespaces(&mut self) -> Option<()> { + if self.is_line_jump_whitespace { + let mut i = self.position.buffer_offset; + while let Some(c) = self.data.get(i) { + match c { + b' ' | b'\t' => { + self.position.buffer_offset += 1; + self.position.global_offset += 1; + } + b'\r' => { + // We look for \n for Windows line end style + let mut increment: u8 = 1; + if let Some(c) = self.data.get(i + 1) { + if *c == b'\n' { + increment += 1; + i += 1; + } + } else if !self.is_ending { + return None; // We need to read more + } + self.position.buffer_offset += usize::from(increment); + self.position.line_start_buffer_offset = self.position.buffer_offset; + self.position.global_offset += u64::from(increment); + self.position.global_line += 1; + } + b'\n' => { + self.position.buffer_offset += 1; + self.position.line_start_buffer_offset = self.position.buffer_offset; + self.position.global_offset += 1; + self.position.global_line += 1; + } + _ => return Some(()), + } + i += 1; + // TODO: SIMD + } + } else { + for c in &self.data[self.position.buffer_offset..] { + if matches!(c, b' ' | b'\t') { + self.position.buffer_offset += 1; + self.position.global_offset += 1; + } else { + return Some(()); + } + // TODO: SIMD + } + } + Some(()) + } + + fn shrink_data(&mut self) { + if self.position.line_start_buffer_offset > 0 { + self.data + .copy_within(self.position.line_start_buffer_offset.., 0); + self.data + .truncate(self.data.len() - self.position.line_start_buffer_offset); + self.position.buffer_offset -= self.position.line_start_buffer_offset; + self.position.line_start_buffer_offset = 0; + self.previous_position = self.position; + } + } + + fn find_number_of_line_jumps_and_start_of_last_line(bytes: &[u8]) -> (u64, usize) { + let mut num_of_jumps = 0; + let mut last_jump_pos = 0; + let mut previous_cr = 0; + for pos in memchr2_iter(b'\r', b'\n', bytes) { + if bytes[pos] == b'\r' { + previous_cr = pos; + num_of_jumps += 1; + last_jump_pos = pos + 1; + } else { + if previous_cr < pos - 1 { + // We count \r\n as a single line jump + num_of_jumps += 1; + } + last_jump_pos = pos + 1; + } + } + (num_of_jumps, last_jump_pos) + } + + fn column_from_bytes(bytes: &[u8]) -> u64 { + match str::from_utf8(bytes) { + Ok(s) => u64::try_from(s.chars().count()).unwrap(), + Err(e) => { + if e.valid_up_to() == 0 { + 0 + } else { + Self::column_from_bytes(&bytes[..e.valid_up_to()]) + } + } + } + } +} diff --git a/ng-oxigraph/src/oxttl/toolkit/mod.rs b/ng-oxigraph/src/oxttl/toolkit/mod.rs new file mode 100644 index 0000000..10c4216 --- /dev/null +++ b/ng-oxigraph/src/oxttl/toolkit/mod.rs @@ -0,0 +1,13 @@ +//! oxttl parsing toolkit. +//! +//! Provides the basic code to write plain Rust lexers and parsers able to read files chunk by chunk. + +mod error; +mod lexer; +mod parser; + +pub use self::error::{TextPosition, TurtleParseError, TurtleSyntaxError}; +pub use self::lexer::{Lexer, TokenRecognizer, TokenRecognizerError}; +#[cfg(feature = "async-tokio")] +pub use self::parser::FromTokioAsyncReadIterator; +pub use self::parser::{FromReadIterator, Parser, RuleRecognizer, RuleRecognizerError}; diff --git a/ng-oxigraph/src/oxttl/toolkit/parser.rs b/ng-oxigraph/src/oxttl/toolkit/parser.rs new file mode 100644 index 0000000..e406096 --- /dev/null +++ b/ng-oxigraph/src/oxttl/toolkit/parser.rs @@ -0,0 +1,183 @@ +use crate::oxttl::toolkit::error::{TurtleParseError, TurtleSyntaxError}; +use crate::oxttl::toolkit::lexer::{Lexer, TokenRecognizer}; +use std::io::Read; +#[cfg(feature = "async-tokio")] +use tokio::io::AsyncRead; + +pub trait RuleRecognizer: Sized { + type TokenRecognizer: TokenRecognizer; + type Output; + type Context; + + fn error_recovery_state(self) -> Self; + + fn recognize_next( + self, + token: ::Token<'_>, + context: &mut Self::Context, + results: &mut Vec, + errors: &mut Vec, + ) -> Self; + + fn recognize_end( + self, + context: &mut Self::Context, + results: &mut Vec, + errors: &mut Vec, + ); + + fn lexer_options( + context: &Self::Context, + ) -> &::Options; +} + +pub struct RuleRecognizerError { + pub message: String, +} + +impl> From for RuleRecognizerError { + fn from(message: S) -> Self { + Self { + message: message.into(), + } + } +} + +#[allow(clippy::partial_pub_fields)] +pub struct Parser { + lexer: Lexer, + state: Option, + pub context: RR::Context, + results: Vec, + errors: Vec, +} + +impl Parser { + pub fn new(lexer: Lexer, recognizer: RR, context: RR::Context) -> Self { + Self { + lexer, + state: Some(recognizer), + context, + results: vec![], + errors: vec![], + } + } + + pub fn extend_from_slice(&mut self, other: &[u8]) { + self.lexer.extend_from_slice(other) + } + + #[inline] + pub fn end(&mut self) { + self.lexer.end() + } + + #[inline] + pub fn is_end(&self) -> bool { + self.state.is_none() && self.results.is_empty() && self.errors.is_empty() + } + + pub fn read_next(&mut self) -> Option> { + loop { + if let Some(error) = self.errors.pop() { + return Some(Err(TurtleSyntaxError { + location: self.lexer.last_token_location(), + message: error + .message + .replace("TOKEN", &self.lexer.last_token_source()), + })); + } + if let Some(result) = self.results.pop() { + return Some(Ok(result)); + } + if let Some(result) = self.lexer.read_next(RR::lexer_options(&self.context)) { + match result { + Ok(token) => { + self.state = self.state.take().map(|state| { + state.recognize_next( + token, + &mut self.context, + &mut self.results, + &mut self.errors, + ) + }); + continue; + } + Err(e) => { + self.state = self.state.take().map(RR::error_recovery_state); + return Some(Err(e)); + } + } + } + if self.lexer.is_end() { + self.state.take()?.recognize_end( + &mut self.context, + &mut self.results, + &mut self.errors, + ) + } else { + return None; + } + } + } + + pub fn parse_read(self, read: R) -> FromReadIterator { + FromReadIterator { read, parser: self } + } + + #[cfg(feature = "async-tokio")] + pub fn parse_tokio_async_read( + self, + read: R, + ) -> FromTokioAsyncReadIterator { + FromTokioAsyncReadIterator { read, parser: self } + } +} + +#[allow(clippy::partial_pub_fields)] +pub struct FromReadIterator { + read: R, + pub parser: Parser, +} + +impl Iterator for FromReadIterator { + type Item = Result; + + fn next(&mut self) -> Option { + while !self.parser.is_end() { + if let Some(result) = self.parser.read_next() { + return Some(result.map_err(TurtleParseError::Syntax)); + } + if let Err(e) = self.parser.lexer.extend_from_read(&mut self.read) { + return Some(Err(e.into())); + } + } + None + } +} + +#[cfg(feature = "async-tokio")] +pub struct FromTokioAsyncReadIterator { + pub read: R, + pub parser: Parser, +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadIterator { + pub async fn next(&mut self) -> Option> { + while !self.parser.is_end() { + if let Some(result) = self.parser.read_next() { + return Some(result.map_err(TurtleParseError::Syntax)); + } + if let Err(e) = self + .parser + .lexer + .extend_from_tokio_async_read(&mut self.read) + .await + { + return Some(Err(e.into())); + } + } + None + } +} diff --git a/ng-oxigraph/src/oxttl/trig.rs b/ng-oxigraph/src/oxttl/trig.rs new file mode 100644 index 0000000..7a51396 --- /dev/null +++ b/ng-oxigraph/src/oxttl/trig.rs @@ -0,0 +1,1252 @@ +//! A [TriG](https://www.w3.org/TR/trig/) streaming parser implemented by [`TriGParser`] +//! and a serializer implemented by [`TriGSerializer`]. + +use crate::oxrdf::vocab::{rdf, xsd}; +use crate::oxrdf::{ + GraphName, GraphNameRef, LiteralRef, NamedNode, NamedNodeRef, Quad, QuadRef, Subject, TermRef, +}; +use crate::oxttl::lexer::N3Lexer; +use crate::oxttl::terse::TriGRecognizer; +#[cfg(feature = "async-tokio")] +use crate::oxttl::toolkit::FromTokioAsyncReadIterator; +use crate::oxttl::toolkit::{FromReadIterator, Parser, TurtleParseError, TurtleSyntaxError}; +use oxiri::{Iri, IriParseError}; +use std::collections::hash_map::Iter; +use std::collections::{BTreeMap, HashMap}; +use std::fmt; +use std::io::{self, Read, Write}; +#[cfg(feature = "async-tokio")] +use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; + +/// A [TriG](https://www.w3.org/TR/trig/) streaming parser. +/// +/// Support for [TriG-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#trig-star) is available behind the `rdf-star` feature and the [`TriGParser::with_quoted_triples`] option. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxttl::TriGParser; +/// +/// let file = br#"@base . +/// @prefix schema: . +/// a schema:Person ; +/// schema:name "Foo" . +/// a schema:Person ; +/// schema:name "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// for quad in TriGParser::new().parse_read(file.as_ref()) { +/// let quad = quad?; +/// if quad.predicate == rdf::TYPE && quad.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct TriGParser { + unchecked: bool, + base: Option>, + prefixes: HashMap>, + #[cfg(feature = "rdf-star")] + with_quoted_triples: bool, +} + +impl TriGParser { + /// Builds a new [`TriGParser`]. + #[inline] + pub fn new() -> Self { + Self::default() + } + + /// Assumes the file is valid to make parsing faster. + /// + /// It will skip some validations. + /// + /// Note that if the file is actually not valid, then broken RDF might be emitted by the parser. + #[inline] + pub fn unchecked(mut self) -> Self { + self.unchecked = true; + self + } + + #[inline] + pub fn with_base_iri(mut self, base_iri: impl Into) -> Result { + self.base = Some(Iri::parse(base_iri.into())?); + Ok(self) + } + + #[inline] + pub fn with_prefix( + mut self, + prefix_name: impl Into, + prefix_iri: impl Into, + ) -> Result { + self.prefixes + .insert(prefix_name.into(), Iri::parse(prefix_iri.into())?); + Ok(self) + } + + /// Enables [TriG-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#trig-star). + #[cfg(feature = "rdf-star")] + #[inline] + pub fn with_quoted_triples(mut self) -> Self { + self.with_quoted_triples = true; + self + } + + /// Parses a TriG file from a [`Read`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; + /// use oxttl::TriGParser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" . + /// a schema:Person ; + /// schema:name "Bar" ."#; + /// + /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; + /// let mut count = 0; + /// for quad in TriGParser::new().parse_read(file.as_ref()) { + /// let quad = quad?; + /// if quad.predicate == rdf::TYPE && quad.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn parse_read(self, read: R) -> FromReadTriGReader { + FromReadTriGReader { + inner: self.parse().parser.parse_read(read), + } + } + + /// Parses a TriG file from a [`AsyncRead`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; + /// use oxttl::TriGParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" . + /// a schema:Person ; + /// schema:name "Bar" ."#; + /// + /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); + /// let mut count = 0; + /// let mut parser = TriGParser::new().parse_tokio_async_read(file.as_ref()); + /// while let Some(triple) = parser.next().await { + /// let triple = triple?; + /// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn parse_tokio_async_read( + self, + read: R, + ) -> FromTokioAsyncReadTriGReader { + FromTokioAsyncReadTriGReader { + inner: self.parse().parser.parse_tokio_async_read(read), + } + } + + /// Allows to parse a TriG file by using a low-level API. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; + /// use oxttl::TriGParser; + /// + /// let file: [&[u8]; 5] = [ + /// b"@base ", + /// b". @prefix schema: .", + /// b" a schema:Person", + /// b" ; schema:name \"Foo\" . ", + /// b" a schema:Person ; schema:name \"Bar\" .", + /// ]; + /// + /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; + /// let mut count = 0; + /// let mut parser = TriGParser::new().parse(); + /// let mut file_chunks = file.iter(); + /// while !parser.is_end() { + /// // We feed more data to the parser + /// if let Some(chunk) = file_chunks.next() { + /// parser.extend_from_slice(chunk); + /// } else { + /// parser.end(); // It's finished + /// } + /// // We read as many quads from the parser as possible + /// while let Some(quad) = parser.read_next() { + /// let quad = quad?; + /// if quad.predicate == rdf::TYPE && quad.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn parse(self) -> LowLevelTriGReader { + LowLevelTriGReader { + parser: TriGRecognizer::new_parser( + true, + #[cfg(feature = "rdf-star")] + self.with_quoted_triples, + self.unchecked, + self.base, + self.prefixes, + ), + } + } +} + +/// Parses a TriG file from a [`Read`] implementation. Can be built using [`TriGParser::parse_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxttl::TriGParser; +/// +/// let file = br#"@base . +/// @prefix schema: . +/// a schema:Person ; +/// schema:name "Foo" . +/// a schema:Person ; +/// schema:name "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// for quad in TriGParser::new().parse_read(file.as_ref()) { +/// let quad = quad?; +/// if quad.predicate == rdf::TYPE && quad.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct FromReadTriGReader { + inner: FromReadIterator, +} + +impl FromReadTriGReader { + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// ``` + /// use oxttl::TriGParser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TriGParser::new().parse_read(file.as_ref()); + /// assert_eq!(reader.prefixes().collect::>(), []); // No prefix at the beginning + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn prefixes(&self) -> TriGPrefixesIter<'_> { + TriGPrefixesIter { + inner: self.inner.parser.context.prefixes(), + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TriGParser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TriGParser::new().parse_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI. + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } +} + +impl Iterator for FromReadTriGReader { + type Item = Result; + + fn next(&mut self) -> Option { + self.inner.next() + } +} + +/// Parses a TriG file from a [`AsyncRead`] implementation. Can be built using [`TriGParser::parse_tokio_async_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxttl::TriGParser; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), oxttl::TurtleParseError> { +/// let file = br#"@base . +/// @prefix schema: . +/// a schema:Person ; +/// schema:name "Foo" . +/// a schema:Person ; +/// schema:name "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); +/// let mut count = 0; +/// let mut parser = TriGParser::new().parse_tokio_async_read(file.as_ref()); +/// while let Some(triple) = parser.next().await { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct FromTokioAsyncReadTriGReader { + inner: FromTokioAsyncReadIterator, +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadTriGReader { + /// Reads the next triple or returns `None` if the file is finished. + pub async fn next(&mut self) -> Option> { + Some(self.inner.next().await?.map(Into::into)) + } + + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// ``` + /// use oxttl::TriGParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TriGParser::new().parse_tokio_async_read(file.as_ref()); + /// assert_eq!(reader.prefixes().collect::>(), []); // No prefix at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Ok(()) + /// # } + /// ``` + pub fn prefixes(&self) -> TriGPrefixesIter<'_> { + TriGPrefixesIter { + inner: self.inner.parser.context.prefixes(), + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TriGParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TriGParser::new().parse_tokio_async_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Ok(()) + /// # } + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } +} + +/// Parses a TriG file by using a low-level API. Can be built using [`TriGParser::parse`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxttl::TriGParser; +/// +/// let file: [&[u8]; 5] = [ +/// b"@base ", +/// b". @prefix schema: .", +/// b" a schema:Person", +/// b" ; schema:name \"Foo\" . ", +/// b" a schema:Person ; schema:name \"Bar\" .", +/// ]; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// let mut parser = TriGParser::new().parse(); +/// let mut file_chunks = file.iter(); +/// while !parser.is_end() { +/// // We feed more data to the parser +/// if let Some(chunk) = file_chunks.next() { +/// parser.extend_from_slice(chunk); +/// } else { +/// parser.end(); // It's finished +/// } +/// // We read as many quads from the parser as possible +/// while let Some(quad) = parser.read_next() { +/// let quad = quad?; +/// if quad.predicate == rdf::TYPE && quad.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct LowLevelTriGReader { + parser: Parser, +} + +impl LowLevelTriGReader { + /// Adds some extra bytes to the parser. Should be called when [`read_next`](Self::read_next) returns [`None`] and there is still unread data. + pub fn extend_from_slice(&mut self, other: &[u8]) { + self.parser.extend_from_slice(other) + } + + /// Tell the parser that the file is finished. + /// + /// This triggers the parsing of the final bytes and might lead [`read_next`](Self::read_next) to return some extra values. + pub fn end(&mut self) { + self.parser.end() + } + + /// Returns if the parsing is finished i.e. [`end`](Self::end) has been called and [`read_next`](Self::read_next) is always going to return `None`. + pub fn is_end(&self) -> bool { + self.parser.is_end() + } + + /// Attempt to parse a new quad from the already provided data. + /// + /// Returns [`None`] if the parsing is finished or more data is required. + /// If it is the case more data should be fed using [`extend_from_slice`](Self::extend_from_slice). + pub fn read_next(&mut self) -> Option> { + self.parser.read_next() + } + + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// ``` + /// use oxttl::TriGParser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TriGParser::new().parse(); + /// reader.extend_from_slice(file); + /// assert_eq!(reader.prefixes().collect::>(), []); // No prefix at the beginning + /// + /// reader.read_next().unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn prefixes(&self) -> TriGPrefixesIter<'_> { + TriGPrefixesIter { + inner: self.parser.context.prefixes(), + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TriGParser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TriGParser::new().parse(); + /// reader.extend_from_slice(file); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.read_next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } +} + +/// Iterator on the file prefixes. +/// +/// See [`LowLevelTriGReader::prefixes`]. +pub struct TriGPrefixesIter<'a> { + inner: Iter<'a, String, Iri>, +} + +impl<'a> Iterator for TriGPrefixesIter<'a> { + type Item = (&'a str, &'a str); + + #[inline] + fn next(&mut self) -> Option { + let (key, value) = self.inner.next()?; + Some((key.as_str(), value.as_str())) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +/// A [TriG](https://www.w3.org/TR/trig/) serializer. +/// +/// Support for [TriG-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#trig-star) is available behind the `rdf-star` feature. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, QuadRef}; +/// use oxttl::TriGSerializer; +/// +/// let mut writer = TriGSerializer::new() +/// .with_prefix("schema", "http://schema.org/")? +/// .serialize_to_write(Vec::new()); +/// writer.write_quad(QuadRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// NamedNodeRef::new("http://example.com")?, +/// ))?; +/// assert_eq!( +/// b"@prefix schema: .\n {\n\t a schema:Person .\n}\n", +/// writer.finish()?.as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct TriGSerializer { + prefixes: BTreeMap, +} + +impl TriGSerializer { + /// Builds a new [`TriGSerializer`]. + #[inline] + pub fn new() -> Self { + Self { + prefixes: BTreeMap::new(), + } + } + + #[inline] + pub fn with_prefix( + mut self, + prefix_name: impl Into, + prefix_iri: impl Into, + ) -> Result { + self.prefixes.insert( + Iri::parse(prefix_iri.into())?.into_inner(), + prefix_name.into(), + ); + Ok(self) + } + + /// Writes a TriG file to a [`Write`] implementation. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, QuadRef}; + /// use oxttl::TriGSerializer; + /// + /// let mut writer = TriGSerializer::new() + /// .with_prefix("schema", "http://schema.org/")? + /// .serialize_to_write(Vec::new()); + /// writer.write_quad(QuadRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, + /// NamedNodeRef::new("http://schema.org/Person")?, + /// NamedNodeRef::new("http://example.com")?, + /// ))?; + /// assert_eq!( + /// b"@prefix schema: .\n {\n\t a schema:Person .\n}\n", + /// writer.finish()?.as_slice() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn serialize_to_write(self, write: W) -> ToWriteTriGWriter { + ToWriteTriGWriter { + write, + writer: self.serialize(), + } + } + + /// Writes a TriG file to a [`AsyncWrite`] implementation. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, QuadRef}; + /// use oxttl::TriGSerializer; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), Box> { + /// let mut writer = TriGSerializer::new() + /// .with_prefix("schema", "http://schema.org/")? + /// .serialize_to_tokio_async_write(Vec::new()); + /// writer + /// .write_quad(QuadRef::new( + /// NamedNodeRef::new_unchecked("http://example.com#me"), + /// NamedNodeRef::new_unchecked("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + /// NamedNodeRef::new_unchecked("http://schema.org/Person"), + /// NamedNodeRef::new_unchecked("http://example.com"), + /// )) + /// .await?; + /// assert_eq!( + /// b"@prefix schema: .\n {\n\t a schema:Person .\n}\n", + /// writer.finish().await?.as_slice() + /// ); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn serialize_to_tokio_async_write( + self, + write: W, + ) -> ToTokioAsyncWriteTriGWriter { + ToTokioAsyncWriteTriGWriter { + write, + writer: self.serialize(), + buffer: Vec::new(), + } + } + + /// Builds a low-level TriG writer. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, QuadRef}; + /// use oxttl::TriGSerializer; + /// + /// let mut buf = Vec::new(); + /// let mut writer = TriGSerializer::new() + /// .with_prefix("schema", "http://schema.org/")? + /// .serialize(); + /// writer.write_quad( + /// QuadRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, + /// NamedNodeRef::new("http://schema.org/Person")?, + /// NamedNodeRef::new("http://example.com")?, + /// ), + /// &mut buf, + /// )?; + /// writer.finish(&mut buf)?; + /// assert_eq!( + /// b"@prefix schema: .\n {\n\t a schema:Person .\n}\n", + /// buf.as_slice() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn serialize(self) -> LowLevelTriGWriter { + LowLevelTriGWriter { + prefixes: self.prefixes, + prelude_written: false, + current_graph_name: GraphName::DefaultGraph, + current_subject_predicate: None, + } + } +} + +/// Writes a TriG file to a [`Write`] implementation. Can be built using [`TriGSerializer::serialize_to_write`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, QuadRef}; +/// use oxttl::TriGSerializer; +/// +/// let mut writer = TriGSerializer::new() +/// .with_prefix("schema", "http://schema.org/")? +/// .serialize_to_write(Vec::new()); +/// writer.write_quad(QuadRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// NamedNodeRef::new("http://example.com")?, +/// ))?; +/// assert_eq!( +/// b"@prefix schema: .\n {\n\t a schema:Person .\n}\n", +/// writer.finish()?.as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct ToWriteTriGWriter { + write: W, + writer: LowLevelTriGWriter, +} + +impl ToWriteTriGWriter { + /// Writes an extra quad. + pub fn write_quad<'a>(&mut self, q: impl Into>) -> io::Result<()> { + self.writer.write_quad(q, &mut self.write) + } + + /// Ends the write process and returns the underlying [`Write`]. + pub fn finish(mut self) -> io::Result { + self.writer.finish(&mut self.write)?; + Ok(self.write) + } +} + +/// Writes a TriG file to a [`AsyncWrite`] implementation. Can be built using [`TriGSerializer::serialize_to_tokio_async_write`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, QuadRef}; +/// use oxttl::TriGSerializer; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), Box> { +/// let mut writer = TriGSerializer::new() +/// .with_prefix("schema", "http://schema.org/")? +/// .serialize_to_tokio_async_write(Vec::new()); +/// writer +/// .write_quad(QuadRef::new( +/// NamedNodeRef::new_unchecked("http://example.com#me"), +/// NamedNodeRef::new_unchecked("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), +/// NamedNodeRef::new_unchecked("http://schema.org/Person"), +/// NamedNodeRef::new_unchecked("http://example.com"), +/// )) +/// .await?; +/// assert_eq!( +/// b"@prefix schema: .\n {\n\t a schema:Person .\n}\n", +/// writer.finish().await?.as_slice() +/// ); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct ToTokioAsyncWriteTriGWriter { + write: W, + writer: LowLevelTriGWriter, + buffer: Vec, +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteTriGWriter { + /// Writes an extra quad. + pub async fn write_quad<'a>(&mut self, q: impl Into>) -> io::Result<()> { + self.writer.write_quad(q, &mut self.buffer)?; + self.write.write_all(&self.buffer).await?; + self.buffer.clear(); + Ok(()) + } + + /// Ends the write process and returns the underlying [`Write`]. + pub async fn finish(mut self) -> io::Result { + self.writer.finish(&mut self.buffer)?; + self.write.write_all(&self.buffer).await?; + self.buffer.clear(); + Ok(self.write) + } +} + +/// Writes a TriG file by using a low-level API. Can be built using [`TriGSerializer::serialize`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, QuadRef}; +/// use oxttl::TriGSerializer; +/// +/// let mut buf = Vec::new(); +/// let mut writer = TriGSerializer::new() +/// .with_prefix("schema", "http://schema.org/")? +/// .serialize(); +/// writer.write_quad( +/// QuadRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// NamedNodeRef::new("http://example.com")?, +/// ), +/// &mut buf, +/// )?; +/// writer.finish(&mut buf)?; +/// assert_eq!( +/// b"@prefix schema: .\n {\n\t a schema:Person .\n}\n", +/// buf.as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct LowLevelTriGWriter { + prefixes: BTreeMap, + prelude_written: bool, + current_graph_name: GraphName, + current_subject_predicate: Option<(Subject, NamedNode)>, +} + +impl LowLevelTriGWriter { + /// Writes an extra quad. + pub fn write_quad<'a>( + &mut self, + q: impl Into>, + mut write: impl Write, + ) -> io::Result<()> { + if !self.prelude_written { + self.prelude_written = true; + for (prefix_iri, prefix_name) in &self.prefixes { + writeln!(write, "@prefix {prefix_name}: <{prefix_iri}> .")?; + } + } + let q = q.into(); + if q.graph_name == self.current_graph_name.as_ref() { + if let Some((current_subject, current_predicate)) = + self.current_subject_predicate.take() + { + if q.subject == current_subject.as_ref() { + if q.predicate == current_predicate { + self.current_subject_predicate = Some((current_subject, current_predicate)); + write!(write, " , {}", self.term(q.object)) + } else { + self.current_subject_predicate = + Some((current_subject, q.predicate.into_owned())); + writeln!(write, " ;")?; + if !self.current_graph_name.is_default_graph() { + write!(write, "\t")?; + } + write!( + write, + "\t{} {}", + self.predicate(q.predicate), + self.term(q.object) + ) + } + } else { + self.current_subject_predicate = + Some((q.subject.into_owned(), q.predicate.into_owned())); + writeln!(write, " .")?; + if !self.current_graph_name.is_default_graph() { + write!(write, "\t")?; + } + write!( + write, + "{} {} {}", + self.term(q.subject), + self.predicate(q.predicate), + self.term(q.object) + ) + } + } else { + self.current_subject_predicate = + Some((q.subject.into_owned(), q.predicate.into_owned())); + if !self.current_graph_name.is_default_graph() { + write!(write, "\t")?; + } + write!( + write, + "{} {} {}", + self.term(q.subject), + self.predicate(q.predicate), + self.term(q.object) + ) + } + } else { + if self.current_subject_predicate.is_some() { + writeln!(write, " .")?; + } + if !self.current_graph_name.is_default_graph() { + writeln!(write, "}}")?; + } + self.current_graph_name = q.graph_name.into_owned(); + self.current_subject_predicate = + Some((q.subject.into_owned(), q.predicate.into_owned())); + match self.current_graph_name.as_ref() { + GraphNameRef::NamedNode(g) => { + writeln!(write, "{} {{", self.term(g))?; + write!(write, "\t")?; + } + GraphNameRef::BlankNode(g) => { + writeln!(write, "{} {{", self.term(g))?; + write!(write, "\t")?; + } + GraphNameRef::DefaultGraph => (), + } + + write!( + write, + "{} {} {}", + self.term(q.subject), + self.predicate(q.predicate), + self.term(q.object) + ) + } + } + + fn predicate<'a>(&'a self, named_node: impl Into>) -> TurtlePredicate<'a> { + TurtlePredicate { + named_node: named_node.into(), + prefixes: &self.prefixes, + } + } + + fn term<'a>(&'a self, term: impl Into>) -> TurtleTerm<'a> { + TurtleTerm { + term: term.into(), + prefixes: &self.prefixes, + } + } + + /// Finishes to write the file. + pub fn finish(&mut self, mut write: impl Write) -> io::Result<()> { + if self.current_subject_predicate.is_some() { + writeln!(write, " .")?; + } + if !self.current_graph_name.is_default_graph() { + writeln!(write, "}}")?; + } + Ok(()) + } +} + +struct TurtlePredicate<'a> { + named_node: NamedNodeRef<'a>, + prefixes: &'a BTreeMap, +} + +impl<'a> fmt::Display for TurtlePredicate<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.named_node == rdf::TYPE { + f.write_str("a") + } else { + TurtleTerm { + term: self.named_node.into(), + prefixes: self.prefixes, + } + .fmt(f) + } + } +} + +struct TurtleTerm<'a> { + term: TermRef<'a>, + prefixes: &'a BTreeMap, +} + +impl<'a> fmt::Display for TurtleTerm<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.term { + TermRef::NamedNode(v) => { + for (prefix_iri, prefix_name) in self.prefixes { + if let Some(local_name) = v.as_str().strip_prefix(prefix_iri) { + if let Some(escaped_local_name) = escape_local_name(local_name) { + return write!(f, "{prefix_name}:{escaped_local_name}"); + } + } + } + write!(f, "{v}") + } + TermRef::BlankNode(v) => write!(f, "{v}"), + TermRef::Literal(v) => { + let value = v.value(); + let inline = match v.datatype() { + xsd::BOOLEAN => is_turtle_boolean(value), + xsd::INTEGER => is_turtle_integer(value), + xsd::DECIMAL => is_turtle_decimal(value), + xsd::DOUBLE => is_turtle_double(value), + _ => false, + }; + if inline { + f.write_str(value) + } else if v.is_plain() { + write!(f, "{v}") + } else { + write!( + f, + "{}^^{}", + LiteralRef::new_simple_literal(v.value()), + TurtleTerm { + term: v.datatype().into(), + prefixes: self.prefixes + } + ) + } + } + #[cfg(feature = "rdf-star")] + TermRef::Triple(t) => { + write!( + f, + "<< {} {} {} >>", + TurtleTerm { + term: t.subject.as_ref().into(), + prefixes: self.prefixes + }, + TurtleTerm { + term: t.predicate.as_ref().into(), + prefixes: self.prefixes + }, + TurtleTerm { + term: t.object.as_ref(), + prefixes: self.prefixes + } + ) + } + } + } +} + +fn is_turtle_boolean(value: &str) -> bool { + matches!(value, "true" | "false") +} + +fn is_turtle_integer(value: &str) -> bool { + // [19] INTEGER ::= [+-]? [0-9]+ + let mut value = value.as_bytes(); + if let Some(v) = value.strip_prefix(b"+") { + value = v; + } else if let Some(v) = value.strip_prefix(b"-") { + value = v; + } + !value.is_empty() && value.iter().all(u8::is_ascii_digit) +} + +fn is_turtle_decimal(value: &str) -> bool { + // [20] DECIMAL ::= [+-]? [0-9]* '.' [0-9]+ + let mut value = value.as_bytes(); + if let Some(v) = value.strip_prefix(b"+") { + value = v; + } else if let Some(v) = value.strip_prefix(b"-") { + value = v; + } + while value.first().map_or(false, u8::is_ascii_digit) { + value = &value[1..]; + } + let Some(value) = value.strip_prefix(b".") else { + return false; + }; + !value.is_empty() && value.iter().all(u8::is_ascii_digit) +} + +fn is_turtle_double(value: &str) -> bool { + // [21] DOUBLE ::= [+-]? ([0-9]+ '.' [0-9]* EXPONENT | '.' [0-9]+ EXPONENT | [0-9]+ EXPONENT) + // [154s] EXPONENT ::= [eE] [+-]? [0-9]+ + let mut value = value.as_bytes(); + if let Some(v) = value.strip_prefix(b"+") { + value = v; + } else if let Some(v) = value.strip_prefix(b"-") { + value = v; + } + let mut with_before = false; + while value.first().map_or(false, u8::is_ascii_digit) { + value = &value[1..]; + with_before = true; + } + let mut with_after = false; + if let Some(v) = value.strip_prefix(b".") { + value = v; + while value.first().map_or(false, u8::is_ascii_digit) { + value = &value[1..]; + with_after = true; + } + } + if let Some(v) = value.strip_prefix(b"e") { + value = v; + } else if let Some(v) = value.strip_prefix(b"E") { + value = v; + } else { + return false; + } + if let Some(v) = value.strip_prefix(b"+") { + value = v; + } else if let Some(v) = value.strip_prefix(b"-") { + value = v; + } + (with_before || with_after) && !value.is_empty() && value.iter().all(u8::is_ascii_digit) +} + +fn escape_local_name(value: &str) -> Option { + // TODO: PLX + // [168s] PN_LOCAL ::= (PN_CHARS_U | ':' | [0-9] | PLX) ((PN_CHARS | '.' | ':' | PLX)* (PN_CHARS | ':' | PLX))? + let mut output = String::with_capacity(value.len()); + let mut chars = value.chars(); + let first = chars.next()?; + if N3Lexer::is_possible_pn_chars_u(first) || first == ':' || first.is_ascii_digit() { + output.push(first); + } else if can_be_escaped_in_local_name(first) { + output.push('\\'); + output.push(first); + } else { + return None; + } + + while let Some(c) = chars.next() { + if N3Lexer::is_possible_pn_chars(c) || c == ':' || (c == '.' && !chars.as_str().is_empty()) + { + output.push(c); + } else if can_be_escaped_in_local_name(c) { + output.push('\\'); + output.push(c); + } else { + return None; + } + } + + Some(output) +} + +fn can_be_escaped_in_local_name(c: char) -> bool { + matches!( + c, + '_' | '~' + | '.' + | '-' + | '!' + | '$' + | '&' + | '\'' + | '(' + | ')' + | '*' + | '+' + | ',' + | ';' + | '=' + | '/' + | '?' + | '#' + | '@' + | '%' + ) +} + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + use crate::oxrdf::BlankNodeRef; + + #[test] + fn test_write() -> io::Result<()> { + let mut writer = TriGSerializer::new() + .with_prefix("ex", "http://example.com/") + .unwrap() + .serialize_to_write(Vec::new()); + writer.write_quad(QuadRef::new( + NamedNodeRef::new_unchecked("http://example.com/s"), + NamedNodeRef::new_unchecked("http://example.com/p"), + NamedNodeRef::new_unchecked("http://example.com/o."), + NamedNodeRef::new_unchecked("http://example.com/g"), + ))?; + writer.write_quad(QuadRef::new( + NamedNodeRef::new_unchecked("http://example.com/s"), + NamedNodeRef::new_unchecked("http://example.com/p"), + NamedNodeRef::new_unchecked("http://example.com/o{o}"), + NamedNodeRef::new_unchecked("http://example.com/g"), + ))?; + writer.write_quad(QuadRef::new( + NamedNodeRef::new_unchecked("http://example.com/s"), + NamedNodeRef::new_unchecked("http://example.com/p"), + LiteralRef::new_simple_literal("foo"), + NamedNodeRef::new_unchecked("http://example.com/g"), + ))?; + writer.write_quad(QuadRef::new( + NamedNodeRef::new_unchecked("http://example.com/s"), + NamedNodeRef::new_unchecked("http://example.com/p2"), + LiteralRef::new_language_tagged_literal_unchecked("foo", "en"), + NamedNodeRef::new_unchecked("http://example.com/g"), + ))?; + writer.write_quad(QuadRef::new( + BlankNodeRef::new_unchecked("b"), + NamedNodeRef::new_unchecked("http://example.com/p2"), + BlankNodeRef::new_unchecked("b2"), + NamedNodeRef::new_unchecked("http://example.com/g"), + ))?; + writer.write_quad(QuadRef::new( + BlankNodeRef::new_unchecked("b"), + NamedNodeRef::new_unchecked("http://example.com/p2"), + LiteralRef::new_typed_literal("true", xsd::BOOLEAN), + GraphNameRef::DefaultGraph, + ))?; + writer.write_quad(QuadRef::new( + BlankNodeRef::new_unchecked("b"), + NamedNodeRef::new_unchecked("http://example.org/p2"), + LiteralRef::new_typed_literal("false", xsd::BOOLEAN), + NamedNodeRef::new_unchecked("http://example.com/g2"), + ))?; + assert_eq!( + String::from_utf8(writer.finish()?).unwrap(), + "@prefix ex: .\nex:g {\n\tex:s ex:p ex:o\\. , , \"foo\" ;\n\t\tex:p2 \"foo\"@en .\n\t_:b ex:p2 _:b2 .\n}\n_:b ex:p2 true .\nex:g2 {\n\t_:b false .\n}\n" + ); + Ok(()) + } +} diff --git a/ng-oxigraph/src/oxttl/turtle.rs b/ng-oxigraph/src/oxttl/turtle.rs new file mode 100644 index 0000000..a4420a1 --- /dev/null +++ b/ng-oxigraph/src/oxttl/turtle.rs @@ -0,0 +1,878 @@ +//! A [Turtle](https://www.w3.org/TR/turtle/) streaming parser implemented by [`TurtleParser`] +//! and a serializer implemented by [`TurtleSerializer`]. + +use crate::oxrdf::{GraphNameRef, Triple, TripleRef}; +use crate::oxttl::terse::TriGRecognizer; +#[cfg(feature = "async-tokio")] +use crate::oxttl::toolkit::FromTokioAsyncReadIterator; +use crate::oxttl::toolkit::{FromReadIterator, Parser, TurtleParseError, TurtleSyntaxError}; +#[cfg(feature = "async-tokio")] +use crate::oxttl::trig::ToTokioAsyncWriteTriGWriter; +use crate::oxttl::trig::{LowLevelTriGWriter, ToWriteTriGWriter, TriGSerializer}; +use oxiri::{Iri, IriParseError}; +use std::collections::hash_map::Iter; +use std::collections::HashMap; +use std::io::{self, Read, Write}; +#[cfg(feature = "async-tokio")] +use tokio::io::{AsyncRead, AsyncWrite}; + +/// A [Turtle](https://www.w3.org/TR/turtle/) streaming parser. +/// +/// Support for [Turtle-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#turtle-star) is available behind the `rdf-star` feature and the [`TurtleParser::with_quoted_triples`] option. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxttl::TurtleParser; +/// +/// let file = br#"@base . +/// @prefix schema: . +/// a schema:Person ; +/// schema:name "Foo" . +/// a schema:Person ; +/// schema:name "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// for triple in TurtleParser::new().parse_read(file.as_ref()) { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct TurtleParser { + unchecked: bool, + base: Option>, + prefixes: HashMap>, + #[cfg(feature = "rdf-star")] + with_quoted_triples: bool, +} + +impl TurtleParser { + /// Builds a new [`TurtleParser`]. + #[inline] + pub fn new() -> Self { + Self::default() + } + + /// Assumes the file is valid to make parsing faster. + /// + /// It will skip some validations. + /// + /// Note that if the file is actually not valid, then broken RDF might be emitted by the parser. + #[inline] + pub fn unchecked(mut self) -> Self { + self.unchecked = true; + self + } + + #[inline] + pub fn with_base_iri(mut self, base_iri: impl Into) -> Result { + self.base = Some(Iri::parse(base_iri.into())?); + Ok(self) + } + + #[inline] + pub fn with_prefix( + mut self, + prefix_name: impl Into, + prefix_iri: impl Into, + ) -> Result { + self.prefixes + .insert(prefix_name.into(), Iri::parse(prefix_iri.into())?); + Ok(self) + } + + /// Enables [Turtle-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#turtle-star). + #[cfg(feature = "rdf-star")] + #[inline] + pub fn with_quoted_triples(mut self) -> Self { + self.with_quoted_triples = true; + self + } + + /// Parses a Turtle file from a [`Read`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; + /// use oxttl::TurtleParser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" . + /// a schema:Person ; + /// schema:name "Bar" ."#; + /// + /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; + /// let mut count = 0; + /// for triple in TurtleParser::new().parse_read(file.as_ref()) { + /// let triple = triple?; + /// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn parse_read(self, read: R) -> FromReadTurtleReader { + FromReadTurtleReader { + inner: self.parse().parser.parse_read(read), + } + } + + /// Parses a Turtle file from a [`AsyncRead`] implementation. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; + /// use oxttl::TurtleParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" . + /// a schema:Person ; + /// schema:name "Bar" ."#; + /// + /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); + /// let mut count = 0; + /// let mut parser = TurtleParser::new().parse_tokio_async_read(file.as_ref()); + /// while let Some(triple) = parser.next().await { + /// let triple = triple?; + /// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// assert_eq!(2, count); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn parse_tokio_async_read( + self, + read: R, + ) -> FromTokioAsyncReadTurtleReader { + FromTokioAsyncReadTurtleReader { + inner: self.parse().parser.parse_tokio_async_read(read), + } + } + + /// Allows to parse a Turtle file by using a low-level API. + /// + /// Count the number of people: + /// ``` + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; + /// use oxttl::TurtleParser; + /// + /// let file: [&[u8]; 5] = [ + /// b"@base ", + /// b". @prefix schema: .", + /// b" a schema:Person", + /// b" ; schema:name \"Foo\" . ", + /// b" a schema:Person ; schema:name \"Bar\" .", + /// ]; + /// + /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; + /// let mut count = 0; + /// let mut parser = TurtleParser::new().parse(); + /// let mut file_chunks = file.iter(); + /// while !parser.is_end() { + /// // We feed more data to the parser + /// if let Some(chunk) = file_chunks.next() { + /// parser.extend_from_slice(chunk); + /// } else { + /// parser.end(); // It's finished + /// } + /// // We read as many triples from the parser as possible + /// while let Some(triple) = parser.read_next() { + /// let triple = triple?; + /// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + /// count += 1; + /// } + /// } + /// } + /// assert_eq!(2, count); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn parse(self) -> LowLevelTurtleReader { + LowLevelTurtleReader { + parser: TriGRecognizer::new_parser( + false, + #[cfg(feature = "rdf-star")] + self.with_quoted_triples, + self.unchecked, + self.base, + self.prefixes, + ), + } + } +} + +/// Parses a Turtle file from a [`Read`] implementation. Can be built using [`TurtleParser::parse_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxttl::TurtleParser; +/// +/// let file = br#"@base . +/// @prefix schema: . +/// a schema:Person ; +/// schema:name "Foo" . +/// a schema:Person ; +/// schema:name "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// for triple in TurtleParser::new().parse_read(file.as_ref()) { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct FromReadTurtleReader { + inner: FromReadIterator, +} + +impl FromReadTurtleReader { + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// ``` + /// use oxttl::TurtleParser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TurtleParser::new().parse_read(file.as_ref()); + /// assert!(reader.prefixes().collect::>().is_empty()); // No prefix at the beginning + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn prefixes(&self) -> TurtlePrefixesIter<'_> { + TurtlePrefixesIter { + inner: self.inner.parser.context.prefixes(), + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TurtleParser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TurtleParser::new().parse_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI. + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } +} + +impl Iterator for FromReadTurtleReader { + type Item = Result; + + fn next(&mut self) -> Option { + Some(self.inner.next()?.map(Into::into)) + } +} + +/// Parses a Turtle file from a [`AsyncRead`] implementation. Can be built using [`TurtleParser::parse_tokio_async_read`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxttl::TurtleParser; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), oxttl::TurtleParseError> { +/// let file = br#"@base . +/// @prefix schema: . +/// a schema:Person ; +/// schema:name "Foo" . +/// a schema:Person ; +/// schema:name "Bar" ."#; +/// +/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); +/// let mut count = 0; +/// let mut parser = TurtleParser::new().parse_tokio_async_read(file.as_ref()); +/// while let Some(triple) = parser.next().await { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// assert_eq!(2, count); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct FromTokioAsyncReadTurtleReader { + inner: FromTokioAsyncReadIterator, +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadTurtleReader { + /// Reads the next triple or returns `None` if the file is finished. + pub async fn next(&mut self) -> Option> { + Some(self.inner.next().await?.map(Into::into)) + } + + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// ``` + /// use oxttl::TurtleParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TurtleParser::new().parse_tokio_async_read(file.as_ref()); + /// assert_eq!(reader.prefixes().collect::>(), []); // No prefix at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Ok(()) + /// # } + /// ``` + pub fn prefixes(&self) -> TurtlePrefixesIter<'_> { + TurtlePrefixesIter { + inner: self.inner.parser.context.prefixes(), + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TurtleParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::TurtleParseError> { + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TurtleParser::new().parse_tokio_async_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Ok(()) + /// # } + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } +} + +/// Parses a Turtle file by using a low-level API. Can be built using [`TurtleParser::parse`]. +/// +/// Count the number of people: +/// ``` +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxttl::TurtleParser; +/// +/// let file: [&[u8]; 5] = [ +/// b"@base ", +/// b". @prefix schema: .", +/// b" a schema:Person", +/// b" ; schema:name \"Foo\" . ", +/// b" a schema:Person ; schema:name \"Bar\" .", +/// ]; +/// +/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; +/// let mut count = 0; +/// let mut parser = TurtleParser::new().parse(); +/// let mut file_chunks = file.iter(); +/// while !parser.is_end() { +/// // We feed more data to the parser +/// if let Some(chunk) = file_chunks.next() { +/// parser.extend_from_slice(chunk); +/// } else { +/// parser.end(); // It's finished +/// } +/// // We read as many triples from the parser as possible +/// while let Some(triple) = parser.read_next() { +/// let triple = triple?; +/// if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { +/// count += 1; +/// } +/// } +/// } +/// assert_eq!(2, count); +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct LowLevelTurtleReader { + parser: Parser, +} + +impl LowLevelTurtleReader { + /// Adds some extra bytes to the parser. Should be called when [`read_next`](Self::read_next) returns [`None`] and there is still unread data. + pub fn extend_from_slice(&mut self, other: &[u8]) { + self.parser.extend_from_slice(other) + } + + /// Tell the parser that the file is finished. + /// + /// This triggers the parsing of the final bytes and might lead [`read_next`](Self::read_next) to return some extra values. + pub fn end(&mut self) { + self.parser.end() + } + + /// Returns if the parsing is finished i.e. [`end`](Self::end) has been called and [`read_next`](Self::read_next) is always going to return `None`. + pub fn is_end(&self) -> bool { + self.parser.is_end() + } + + /// Attempt to parse a new triple from the already provided data. + /// + /// Returns [`None`] if the parsing is finished or more data is required. + /// If it is the case more data should be fed using [`extend_from_slice`](Self::extend_from_slice). + pub fn read_next(&mut self) -> Option> { + Some(self.parser.read_next()?.map(Into::into)) + } + + /// The list of IRI prefixes considered at the current step of the parsing. + /// + /// This method returns (prefix name, prefix value) tuples. + /// It is empty at the beginning of the parsing and gets updated when prefixes are encountered. + /// It should be full at the end of the parsing (but if a prefix is overridden, only the latest version will be returned). + /// + /// ``` + /// use oxttl::TurtleParser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TurtleParser::new().parse(); + /// reader.extend_from_slice(file); + /// assert_eq!(reader.prefixes().collect::>(), []); // No prefix at the beginning + /// + /// reader.read_next().unwrap()?; // We read the first triple + /// assert_eq!( + /// reader.prefixes().collect::>(), + /// [("schema", "http://schema.org/")] + /// ); // There are now prefixes + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn prefixes(&self) -> TurtlePrefixesIter<'_> { + TurtlePrefixesIter { + inner: self.parser.context.prefixes(), + } + } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TurtleParser; + /// + /// let file = br#"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name "Foo" ."#; + /// + /// let mut reader = TurtleParser::new().parse(); + /// reader.extend_from_slice(file); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.read_next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } +} + +/// Iterator on the file prefixes. +/// +/// See [`LowLevelTurtleReader::prefixes`]. +pub struct TurtlePrefixesIter<'a> { + inner: Iter<'a, String, Iri>, +} + +impl<'a> Iterator for TurtlePrefixesIter<'a> { + type Item = (&'a str, &'a str); + + #[inline] + fn next(&mut self) -> Option { + let (key, value) = self.inner.next()?; + Some((key.as_str(), value.as_str())) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +/// A [Turtle](https://www.w3.org/TR/turtle/) serializer. +/// +/// Support for [Turtle-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#turtle-star) is available behind the `rdf-star` feature. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, TripleRef}; +/// use oxttl::TurtleSerializer; +/// +/// let mut writer = TurtleSerializer::new() +/// .with_prefix("schema", "http://schema.org/")? +/// .serialize_to_write(Vec::new()); +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// ))?; +/// assert_eq!( +/// b"@prefix schema: .\n a schema:Person .\n", +/// writer.finish()?.as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[derive(Default)] +#[must_use] +pub struct TurtleSerializer { + inner: TriGSerializer, +} + +impl TurtleSerializer { + /// Builds a new [`TurtleSerializer`]. + #[inline] + pub fn new() -> Self { + Self::default() + } + + #[inline] + pub fn with_prefix( + mut self, + prefix_name: impl Into, + prefix_iri: impl Into, + ) -> Result { + self.inner = self.inner.with_prefix(prefix_name, prefix_iri)?; + Ok(self) + } + + /// Writes a Turtle file to a [`Write`] implementation. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, TripleRef}; + /// use oxttl::TurtleSerializer; + /// + /// let mut writer = TurtleSerializer::new() + /// .with_prefix("schema", "http://schema.org/")? + /// .serialize_to_write(Vec::new()); + /// writer.write_triple(TripleRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, + /// NamedNodeRef::new("http://schema.org/Person")?, + /// ))?; + /// assert_eq!( + /// b"@prefix schema: .\n a schema:Person .\n", + /// writer.finish()?.as_slice() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn serialize_to_write(self, write: W) -> ToWriteTurtleWriter { + ToWriteTurtleWriter { + inner: self.inner.serialize_to_write(write), + } + } + + /// Writes a Turtle file to a [`AsyncWrite`] implementation. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, TripleRef}; + /// use oxttl::TurtleSerializer; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(),Box> { + /// let mut writer = TurtleSerializer::new() + /// .with_prefix("schema", "http://schema.org/")? + /// .serialize_to_tokio_async_write(Vec::new()); + /// writer + /// .write_triple(TripleRef::new( + /// NamedNodeRef::new_unchecked("http://example.com#me"), + /// NamedNodeRef::new_unchecked("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + /// NamedNodeRef::new_unchecked("http://schema.org/Person"), + /// )) + /// .await?; + /// assert_eq!( + /// b"@prefix schema: .\n a schema:Person .\n", + /// writer.finish().await?.as_slice() + /// ); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub fn serialize_to_tokio_async_write( + self, + write: W, + ) -> ToTokioAsyncWriteTurtleWriter { + ToTokioAsyncWriteTurtleWriter { + inner: self.inner.serialize_to_tokio_async_write(write), + } + } + + /// Builds a low-level Turtle writer. + /// + /// ``` + /// use oxrdf::{NamedNodeRef, TripleRef}; + /// use oxttl::TurtleSerializer; + /// + /// let mut buf = Vec::new(); + /// let mut writer = TurtleSerializer::new() + /// .with_prefix("schema", "http://schema.org/")? + /// .serialize(); + /// writer.write_triple( + /// TripleRef::new( + /// NamedNodeRef::new("http://example.com#me")?, + /// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, + /// NamedNodeRef::new("http://schema.org/Person")?, + /// ), + /// &mut buf, + /// )?; + /// writer.finish(&mut buf)?; + /// assert_eq!( + /// b"@prefix schema: .\n a schema:Person .\n", + /// buf.as_slice() + /// ); + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn serialize(self) -> LowLevelTurtleWriter { + LowLevelTurtleWriter { + inner: self.inner.serialize(), + } + } +} + +/// Writes a Turtle file to a [`Write`] implementation. Can be built using [`TurtleSerializer::serialize_to_write`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, TripleRef}; +/// use oxttl::TurtleSerializer; +/// +/// let mut writer = TurtleSerializer::new() +/// .with_prefix("schema", "http://schema.org/")? +/// .serialize_to_write(Vec::new()); +/// writer.write_triple(TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// ))?; +/// assert_eq!( +/// b"@prefix schema: .\n a schema:Person .\n", +/// writer.finish()?.as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +#[must_use] +pub struct ToWriteTurtleWriter { + inner: ToWriteTriGWriter, +} + +impl ToWriteTurtleWriter { + /// Writes an extra triple. + pub fn write_triple<'a>(&mut self, t: impl Into>) -> io::Result<()> { + self.inner + .write_quad(t.into().in_graph(GraphNameRef::DefaultGraph)) + } + + /// Ends the write process and returns the underlying [`Write`]. + pub fn finish(self) -> io::Result { + self.inner.finish() + } +} + +/// Writes a Turtle file to a [`AsyncWrite`] implementation. Can be built using [`TurtleSerializer::serialize_to_tokio_async_write`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, TripleRef}; +/// use oxttl::TurtleSerializer; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), Box> { +/// let mut writer = TurtleSerializer::new() +/// .with_prefix("schema", "http://schema.org/")? +/// .serialize_to_tokio_async_write(Vec::new()); +/// writer +/// .write_triple(TripleRef::new( +/// NamedNodeRef::new_unchecked("http://example.com#me"), +/// NamedNodeRef::new_unchecked("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), +/// NamedNodeRef::new_unchecked("http://schema.org/Person"), +/// )) +/// .await?; +/// assert_eq!( +/// b"@prefix schema: .\n a schema:Person .\n", +/// writer.finish().await?.as_slice() +/// ); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct ToTokioAsyncWriteTurtleWriter { + inner: ToTokioAsyncWriteTriGWriter, +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteTurtleWriter { + /// Writes an extra triple. + pub async fn write_triple<'a>(&mut self, t: impl Into>) -> io::Result<()> { + self.inner + .write_quad(t.into().in_graph(GraphNameRef::DefaultGraph)) + .await + } + + /// Ends the write process and returns the underlying [`Write`]. + pub async fn finish(self) -> io::Result { + self.inner.finish().await + } +} + +/// Writes a Turtle file by using a low-level API. Can be built using [`TurtleSerializer::serialize`]. +/// +/// ``` +/// use oxrdf::{NamedNodeRef, TripleRef}; +/// use oxttl::TurtleSerializer; +/// +/// let mut buf = Vec::new(); +/// let mut writer = TurtleSerializer::new() +/// .with_prefix("schema", "http://schema.org/")? +/// .serialize(); +/// writer.write_triple( +/// TripleRef::new( +/// NamedNodeRef::new("http://example.com#me")?, +/// NamedNodeRef::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?, +/// NamedNodeRef::new("http://schema.org/Person")?, +/// ), +/// &mut buf, +/// )?; +/// writer.finish(&mut buf)?; +/// assert_eq!( +/// b"@prefix schema: .\n a schema:Person .\n", +/// buf.as_slice() +/// ); +/// # Result::<_,Box>::Ok(()) +/// ``` +pub struct LowLevelTurtleWriter { + inner: LowLevelTriGWriter, +} + +impl LowLevelTurtleWriter { + /// Writes an extra triple. + pub fn write_triple<'a>( + &mut self, + t: impl Into>, + write: impl Write, + ) -> io::Result<()> { + self.inner + .write_quad(t.into().in_graph(GraphNameRef::DefaultGraph), write) + } + + /// Finishes to write the file. + pub fn finish(&mut self, write: impl Write) -> io::Result<()> { + self.inner.finish(write) + } +} + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + use crate::oxrdf::{BlankNodeRef, LiteralRef, NamedNodeRef}; + + #[test] + fn test_write() -> io::Result<()> { + let mut writer = TurtleSerializer::new().serialize_to_write(Vec::new()); + writer.write_triple(TripleRef::new( + NamedNodeRef::new_unchecked("http://example.com/s"), + NamedNodeRef::new_unchecked("http://example.com/p"), + NamedNodeRef::new_unchecked("http://example.com/o"), + ))?; + writer.write_triple(TripleRef::new( + NamedNodeRef::new_unchecked("http://example.com/s"), + NamedNodeRef::new_unchecked("http://example.com/p"), + LiteralRef::new_simple_literal("foo"), + ))?; + writer.write_triple(TripleRef::new( + NamedNodeRef::new_unchecked("http://example.com/s"), + NamedNodeRef::new_unchecked("http://example.com/p2"), + LiteralRef::new_language_tagged_literal_unchecked("foo", "en"), + ))?; + writer.write_triple(TripleRef::new( + BlankNodeRef::new_unchecked("b"), + NamedNodeRef::new_unchecked("http://example.com/p2"), + BlankNodeRef::new_unchecked("b2"), + ))?; + assert_eq!(String::from_utf8(writer.finish()?).unwrap(), " , \"foo\" ;\n\t \"foo\"@en .\n_:b _:b2 .\n"); + Ok(()) + } +} diff --git a/ng-oxigraph/src/sparesults/README.md b/ng-oxigraph/src/sparesults/README.md new file mode 100644 index 0000000..df5a0fb --- /dev/null +++ b/ng-oxigraph/src/sparesults/README.md @@ -0,0 +1,72 @@ +Sparesults +========== + +[![Latest Version](https://img.shields.io/crates/v/sparesults.svg)](https://crates.io/crates/sparesults) +[![Released API docs](https://docs.rs/sparesults/badge.svg)](https://docs.rs/sparesults) +[![Crates.io downloads](https://img.shields.io/crates/d/sparesults)](https://crates.io/crates/sparesults) +[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) +[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) + +Sparesults is a set of parsers and serializers for [SPARQL](https://www.w3.org/TR/sparql11-overview/) query results formats. + +It supports [SPARQL Query Results XML Format (Second Edition)](https://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/). + +Support for [SPARQL-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#query-result-formats) is also available behind the `rdf-star` feature. + +This crate is intended to be a building piece for SPARQL client and server implementations in Rust like [Oxigraph](https://oxigraph.org). + +The entry points of this library are the two [`QueryResultsParser`] and [`QueryResultsSerializer`] structs. + +Usage example converting a JSON result file into a TSV result file: +```rust +use sparesults::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader, QueryResultsSerializer}; +use std::io::Result; + +fn convert_json_to_tsv(json_file: &[u8]) -> Result> { + let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json); + let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv); + // We start to read the JSON file and see which kind of results it is + match json_parser.parse_read(json_file)? { + FromReadQueryResultsReader::Boolean(value) => { + // it's a boolean result, we copy it in TSV to the output buffer + tsv_serializer.serialize_boolean_to_write(Vec::new(), value) + }, + FromReadQueryResultsReader::Solutions(solutions_reader) => { + // it's a set of solutions, we create a writer and we write to it while reading in streaming from the JSON file + let mut serialize_solutions_to_write = tsv_serializer.serialize_solutions_to_write(Vec::new(), solutions_reader.variables().to_vec())?; + for solution in solutions_reader { + serialize_solutions_to_write.write(&solution?)?; + } + serialize_solutions_to_write.finish() + } + } +} + +// Let's test with a boolean +assert_eq!( + convert_json_to_tsv(b"{\"boolean\":true}".as_slice()).unwrap(), + b"true" +); + +// And with a set of solutions +assert_eq!( + convert_json_to_tsv(b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}".as_slice()).unwrap(), + b"?foo\t?bar\n\"test\"\t\n" +); +``` + +## License + +This project is licensed under either of + +* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or + ``) +* MIT license ([LICENSE-MIT](../LICENSE-MIT) or + ``) + +at your option. + + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/ng-oxigraph/src/sparesults/csv.rs b/ng-oxigraph/src/sparesults/csv.rs new file mode 100644 index 0000000..11138f1 --- /dev/null +++ b/ng-oxigraph/src/sparesults/csv.rs @@ -0,0 +1,948 @@ +//! Implementation of [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/) + +use crate::oxrdf::vocab::xsd; +use crate::oxrdf::*; +use crate::sparesults::error::{ + QueryResultsParseError, QueryResultsSyntaxError, SyntaxErrorKind, TextPosition, +}; +use memchr::memchr; +use std::io::{self, Read, Write}; +use std::str::{self, FromStr}; +#[cfg(feature = "async-tokio")] +use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt}; + +const MAX_BUFFER_SIZE: usize = 4096 * 4096; + +pub fn write_boolean_csv_result(mut write: W, value: bool) -> io::Result { + write.write_all(if value { b"true" } else { b"false" })?; + Ok(write) +} + +#[cfg(feature = "async-tokio")] +pub async fn tokio_async_write_boolean_csv_result( + mut write: W, + value: bool, +) -> io::Result { + write + .write_all(if value { b"true" } else { b"false" }) + .await?; + Ok(write) +} + +pub struct ToWriteCsvSolutionsWriter { + inner: InnerCsvSolutionsWriter, + write: W, + buffer: String, +} + +impl ToWriteCsvSolutionsWriter { + pub fn start(mut write: W, variables: Vec) -> io::Result { + let mut buffer = String::new(); + let inner = InnerCsvSolutionsWriter::start(&mut buffer, variables); + write.write_all(buffer.as_bytes())?; + buffer.clear(); + Ok(Self { + inner, + write, + buffer, + }) + } + + pub fn write<'a>( + &mut self, + solution: impl IntoIterator, TermRef<'a>)>, + ) -> io::Result<()> { + self.inner.write(&mut self.buffer, solution); + self.write.write_all(self.buffer.as_bytes())?; + self.buffer.clear(); + Ok(()) + } + + pub fn finish(self) -> W { + self.write + } +} + +#[cfg(feature = "async-tokio")] +pub struct ToTokioAsyncWriteCsvSolutionsWriter { + inner: InnerCsvSolutionsWriter, + write: W, + buffer: String, +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteCsvSolutionsWriter { + pub async fn start(mut write: W, variables: Vec) -> io::Result { + let mut buffer = String::new(); + let inner = InnerCsvSolutionsWriter::start(&mut buffer, variables); + write.write_all(buffer.as_bytes()).await?; + buffer.clear(); + Ok(Self { + inner, + write, + buffer, + }) + } + + pub async fn write<'a>( + &mut self, + solution: impl IntoIterator, TermRef<'a>)>, + ) -> io::Result<()> { + self.inner.write(&mut self.buffer, solution); + self.write.write_all(self.buffer.as_bytes()).await?; + self.buffer.clear(); + Ok(()) + } + + pub fn finish(self) -> W { + self.write + } +} + +struct InnerCsvSolutionsWriter { + variables: Vec, +} + +impl InnerCsvSolutionsWriter { + fn start(output: &mut String, variables: Vec) -> Self { + let mut start_vars = true; + for variable in &variables { + if start_vars { + start_vars = false; + } else { + output.push(','); + } + output.push_str(variable.as_str()); + } + output.push_str("\r\n"); + Self { variables } + } + + fn write<'a>( + &self, + output: &mut String, + solution: impl IntoIterator, TermRef<'a>)>, + ) { + let mut values = vec![None; self.variables.len()]; + for (variable, value) in solution { + if let Some(position) = self.variables.iter().position(|v| *v == variable) { + values[position] = Some(value); + } + } + let mut start_binding = true; + for value in values { + if start_binding { + start_binding = false; + } else { + output.push(','); + } + if let Some(value) = value { + write_csv_term(output, value); + } + } + output.push_str("\r\n"); + } +} + +fn write_csv_term<'a>(output: &mut String, term: impl Into>) { + match term.into() { + TermRef::NamedNode(uri) => output.push_str(uri.as_str()), + TermRef::BlankNode(bnode) => { + output.push_str("_:"); + output.push_str(bnode.as_str()) + } + TermRef::Literal(literal) => write_escaped_csv_string(output, literal.value()), + #[cfg(feature = "rdf-star")] + TermRef::Triple(triple) => { + write_csv_term(output, &triple.subject); + output.push(' '); + write_csv_term(output, &triple.predicate); + output.push(' '); + write_csv_term(output, &triple.object) + } + } +} + +fn write_escaped_csv_string(output: &mut String, s: &str) { + if s.bytes().any(|c| matches!(c, b'"' | b',' | b'\n' | b'\r')) { + output.push('"'); + for c in s.chars() { + if c == '"' { + output.push('"'); + output.push('"'); + } else { + output.push(c) + }; + } + output.push('"'); + } else { + output.push_str(s) + } +} + +pub struct ToWriteTsvSolutionsWriter { + inner: InnerTsvSolutionsWriter, + write: W, + buffer: String, +} + +impl ToWriteTsvSolutionsWriter { + pub fn start(mut write: W, variables: Vec) -> io::Result { + let mut buffer = String::new(); + let inner = InnerTsvSolutionsWriter::start(&mut buffer, variables); + write.write_all(buffer.as_bytes())?; + buffer.clear(); + Ok(Self { + inner, + write, + buffer, + }) + } + + pub fn write<'a>( + &mut self, + solution: impl IntoIterator, TermRef<'a>)>, + ) -> io::Result<()> { + self.inner.write(&mut self.buffer, solution); + self.write.write_all(self.buffer.as_bytes())?; + self.buffer.clear(); + Ok(()) + } + + pub fn finish(self) -> W { + self.write + } +} + +#[cfg(feature = "async-tokio")] +pub struct ToTokioAsyncWriteTsvSolutionsWriter { + inner: InnerTsvSolutionsWriter, + write: W, + buffer: String, +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteTsvSolutionsWriter { + pub async fn start(mut write: W, variables: Vec) -> io::Result { + let mut buffer = String::new(); + let inner = InnerTsvSolutionsWriter::start(&mut buffer, variables); + write.write_all(buffer.as_bytes()).await?; + buffer.clear(); + Ok(Self { + inner, + write, + buffer, + }) + } + + pub async fn write<'a>( + &mut self, + solution: impl IntoIterator, TermRef<'a>)>, + ) -> io::Result<()> { + self.inner.write(&mut self.buffer, solution); + self.write.write_all(self.buffer.as_bytes()).await?; + self.buffer.clear(); + Ok(()) + } + + pub fn finish(self) -> W { + self.write + } +} + +struct InnerTsvSolutionsWriter { + variables: Vec, +} + +impl InnerTsvSolutionsWriter { + fn start(output: &mut String, variables: Vec) -> Self { + let mut start_vars = true; + for variable in &variables { + if start_vars { + start_vars = false; + } else { + output.push('\t'); + } + output.push('?'); + output.push_str(variable.as_str()); + } + output.push('\n'); + Self { variables } + } + + fn write<'a>( + &self, + output: &mut String, + solution: impl IntoIterator, TermRef<'a>)>, + ) { + let mut values = vec![None; self.variables.len()]; + for (variable, value) in solution { + if let Some(position) = self.variables.iter().position(|v| *v == variable) { + values[position] = Some(value); + } + } + let mut start_binding = true; + for value in values { + if start_binding { + start_binding = false; + } else { + output.push('\t'); + } + if let Some(value) = value { + write_tsv_term(output, value); + } + } + output.push('\n'); + } +} + +fn write_tsv_term<'a>(output: &mut String, term: impl Into>) { + match term.into() { + TermRef::NamedNode(node) => { + output.push('<'); + output.push_str(node.as_str()); + output.push('>'); + } + TermRef::BlankNode(node) => { + output.push_str("_:"); + output.push_str(node.as_str()); + } + TermRef::Literal(literal) => { + let value = literal.value(); + if let Some(language) = literal.language() { + write_tsv_quoted_str(output, value); + output.push('@'); + output.push_str(language); + } else { + match literal.datatype() { + xsd::BOOLEAN if is_turtle_boolean(value) => output.push_str(value), + xsd::INTEGER if is_turtle_integer(value) => output.push_str(value), + xsd::DECIMAL if is_turtle_decimal(value) => output.push_str(value), + xsd::DOUBLE if is_turtle_double(value) => output.push_str(value), + xsd::STRING => write_tsv_quoted_str(output, value), + datatype => { + write_tsv_quoted_str(output, value); + output.push_str("^^"); + write_tsv_term(output, datatype); + } + } + } + } + #[cfg(feature = "rdf-star")] + TermRef::Triple(triple) => { + output.push_str("<< "); + write_tsv_term(output, &triple.subject); + output.push(' '); + write_tsv_term(output, &triple.predicate); + output.push(' '); + write_tsv_term(output, &triple.object); + output.push_str(" >>"); + } + } +} + +fn write_tsv_quoted_str(output: &mut String, string: &str) { + output.push('"'); + for c in string.chars() { + match c { + '\t' => output.push_str("\\t"), + '\n' => output.push_str("\\n"), + '\r' => output.push_str("\\r"), + '"' => output.push_str("\\\""), + '\\' => output.push_str("\\\\"), + _ => output.push(c), + }; + } + output.push('"'); +} + +fn is_turtle_boolean(value: &str) -> bool { + matches!(value, "true" | "false") +} + +fn is_turtle_integer(value: &str) -> bool { + // [19] INTEGER ::= [+-]? [0-9]+ + let mut value = value.as_bytes(); + if let Some(v) = value.strip_prefix(b"+") { + value = v; + } else if let Some(v) = value.strip_prefix(b"-") { + value = v; + } + !value.is_empty() && value.iter().all(u8::is_ascii_digit) +} + +fn is_turtle_decimal(value: &str) -> bool { + // [20] DECIMAL ::= [+-]? [0-9]* '.' [0-9]+ + let mut value = value.as_bytes(); + if let Some(v) = value.strip_prefix(b"+") { + value = v; + } else if let Some(v) = value.strip_prefix(b"-") { + value = v; + } + while value.first().map_or(false, u8::is_ascii_digit) { + value = &value[1..]; + } + let Some(value) = value.strip_prefix(b".") else { + return false; + }; + !value.is_empty() && value.iter().all(u8::is_ascii_digit) +} + +fn is_turtle_double(value: &str) -> bool { + // [21] DOUBLE ::= [+-]? ([0-9]+ '.' [0-9]* EXPONENT | '.' [0-9]+ EXPONENT | [0-9]+ EXPONENT) + // [154s] EXPONENT ::= [eE] [+-]? [0-9]+ + let mut value = value.as_bytes(); + if let Some(v) = value.strip_prefix(b"+") { + value = v; + } else if let Some(v) = value.strip_prefix(b"-") { + value = v; + } + let mut with_before = false; + while value.first().map_or(false, u8::is_ascii_digit) { + value = &value[1..]; + with_before = true; + } + let mut with_after = false; + if let Some(v) = value.strip_prefix(b".") { + value = v; + while value.first().map_or(false, u8::is_ascii_digit) { + value = &value[1..]; + with_after = true; + } + } + if let Some(v) = value.strip_prefix(b"e") { + value = v; + } else if let Some(v) = value.strip_prefix(b"E") { + value = v; + } else { + return false; + } + if let Some(v) = value.strip_prefix(b"+") { + value = v; + } else if let Some(v) = value.strip_prefix(b"-") { + value = v; + } + (with_before || with_after) && !value.is_empty() && value.iter().all(u8::is_ascii_digit) +} + +pub enum FromReadTsvQueryResultsReader { + Solutions { + variables: Vec, + solutions: FromReadTsvSolutionsReader, + }, + Boolean(bool), +} + +impl FromReadTsvQueryResultsReader { + pub fn read(mut read: R) -> Result { + let mut reader = LineReader::new(); + let mut buffer = Vec::new(); + let line = reader.next_line(&mut buffer, &mut read)?; + Ok(match inner_read_first_line(reader, line)? { + TsvInnerQueryResults::Solutions { + variables, + solutions, + } => Self::Solutions { + variables, + solutions: FromReadTsvSolutionsReader { + read, + inner: solutions, + buffer, + }, + }, + TsvInnerQueryResults::Boolean(value) => Self::Boolean(value), + }) + } +} + +pub struct FromReadTsvSolutionsReader { + read: R, + inner: TsvInnerSolutionsReader, + buffer: Vec, +} + +impl FromReadTsvSolutionsReader { + pub fn read_next(&mut self) -> Result>>, QueryResultsParseError> { + let line = self + .inner + .reader + .next_line(&mut self.buffer, &mut self.read)?; + self.inner.read_next(line) + } +} + +#[cfg(feature = "async-tokio")] +pub enum FromTokioAsyncReadTsvQueryResultsReader { + Solutions { + variables: Vec, + solutions: FromTokioAsyncReadTsvSolutionsReader, + }, + Boolean(bool), +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadTsvQueryResultsReader { + pub async fn read(mut read: R) -> Result { + let mut reader = LineReader::new(); + let mut buffer = Vec::new(); + let line = reader.next_line_tokio_async(&mut buffer, &mut read).await?; + Ok(match inner_read_first_line(reader, line)? { + TsvInnerQueryResults::Solutions { + variables, + solutions, + } => Self::Solutions { + variables, + solutions: FromTokioAsyncReadTsvSolutionsReader { + read, + inner: solutions, + buffer, + }, + }, + TsvInnerQueryResults::Boolean(value) => Self::Boolean(value), + }) + } +} + +#[cfg(feature = "async-tokio")] +pub struct FromTokioAsyncReadTsvSolutionsReader { + read: R, + inner: TsvInnerSolutionsReader, + buffer: Vec, +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadTsvSolutionsReader { + pub async fn read_next(&mut self) -> Result>>, QueryResultsParseError> { + let line = self + .inner + .reader + .next_line_tokio_async(&mut self.buffer, &mut self.read) + .await?; + self.inner.read_next(line) + } +} + +enum TsvInnerQueryResults { + Solutions { + variables: Vec, + solutions: TsvInnerSolutionsReader, + }, + Boolean(bool), +} + +fn inner_read_first_line( + reader: LineReader, + line: &str, +) -> Result { + let line = line.trim_matches(|c| matches!(c, ' ' | '\r' | '\n')); + if line.eq_ignore_ascii_case("true") { + return Ok(TsvInnerQueryResults::Boolean(true)); + } + if line.eq_ignore_ascii_case("false") { + return Ok(TsvInnerQueryResults::Boolean(false)); + } + let mut variables = Vec::new(); + if !line.is_empty() { + for v in line.split('\t') { + let v = v.trim(); + if v.is_empty() { + return Err(QueryResultsSyntaxError::msg("Empty column on the first row. The first row should be a list of variables like ?foo or $bar").into()); + } + let variable = Variable::from_str(v).map_err(|e| { + QueryResultsSyntaxError::msg(format!("Invalid variable declaration '{v}': {e}")) + })?; + if variables.contains(&variable) { + return Err(QueryResultsSyntaxError::msg(format!( + "The variable {variable} is declared twice" + )) + .into()); + } + variables.push(variable); + } + } + let column_len = variables.len(); + Ok(TsvInnerQueryResults::Solutions { + variables, + solutions: TsvInnerSolutionsReader { reader, column_len }, + }) +} + +struct TsvInnerSolutionsReader { + reader: LineReader, + column_len: usize, +} + +impl TsvInnerSolutionsReader { + #[allow(clippy::unwrap_in_result)] + pub fn read_next( + &self, + line: &str, + ) -> Result>>, QueryResultsParseError> { + if line.is_empty() { + return Ok(None); // EOF + } + let elements = line + .split('\t') + .enumerate() + .map(|(i, v)| { + let v = v.trim(); + if v.is_empty() { + Ok(None) + } else { + Ok(Some(Term::from_str(v).map_err(|e| { + let start_position_char = line + .split('\t') + .take(i) + .map(|c| c.chars().count() + 1) + .sum::(); + let start_position_bytes = + line.split('\t').take(i).map(|c| c.len() + 1).sum::(); + QueryResultsSyntaxError(SyntaxErrorKind::Term { + error: e, + term: v.into(), + location: TextPosition { + line: self.reader.line_count - 1, + column: start_position_char.try_into().unwrap(), + offset: self.reader.last_line_start + + u64::try_from(start_position_bytes).unwrap(), + }..TextPosition { + line: self.reader.line_count - 1, + column: (start_position_char + v.chars().count()) + .try_into() + .unwrap(), + offset: self.reader.last_line_start + + u64::try_from(start_position_bytes + v.len()).unwrap(), + }, + }) + })?)) + } + }) + .collect::, QueryResultsParseError>>()?; + if elements.len() == self.column_len { + Ok(Some(elements)) + } else if self.column_len == 0 && elements == [None] { + Ok(Some(Vec::new())) // Zero columns case + } else { + Err(QueryResultsSyntaxError::located_message( + format!( + "This TSV files has {} columns but we found a row on line {} with {} columns: {}", + self.column_len, + self.reader.line_count - 1, + elements.len(), + line + ), + TextPosition { + line: self.reader.line_count - 1, + column: 0, + offset: self.reader.last_line_start, + }..TextPosition { + line: self.reader.line_count - 1, + column: line.chars().count().try_into().unwrap(), + offset: self.reader.last_line_end, + }, + ) + .into()) + } + } +} + +struct LineReader { + buffer_start: usize, + buffer_end: usize, + line_count: u64, + last_line_start: u64, + last_line_end: u64, +} + +impl LineReader { + fn new() -> Self { + Self { + buffer_start: 0, + buffer_end: 0, + line_count: 0, + last_line_start: 0, + last_line_end: 0, + } + } + + #[allow(clippy::unwrap_in_result)] + fn next_line<'a>( + &mut self, + buffer: &'a mut Vec, + read: &mut impl Read, + ) -> io::Result<&'a str> { + let line_end = loop { + if let Some(eol) = memchr(b'\n', &buffer[self.buffer_start..self.buffer_end]) { + break self.buffer_start + eol + 1; + } + if self.buffer_start > 0 { + buffer.copy_within(self.buffer_start..self.buffer_end, 0); + self.buffer_end -= self.buffer_start; + self.buffer_start = 0; + } + if self.buffer_end + 1024 > buffer.len() { + if self.buffer_end + 1024 > MAX_BUFFER_SIZE { + return Err(io::Error::new( + io::ErrorKind::OutOfMemory, + format!("Reached the buffer maximal size of {MAX_BUFFER_SIZE}"), + )); + } + buffer.resize(self.buffer_end + 1024, b'\0'); + } + let read = read.read(&mut buffer[self.buffer_end..])?; + if read == 0 { + break self.buffer_end; + } + self.buffer_end += read; + }; + let result = str::from_utf8(&buffer[self.buffer_start..line_end]).map_err(|e| { + io::Error::new( + io::ErrorKind::InvalidData, + format!("Invalid UTF-8 in the TSV file: {e}"), + ) + }); + self.line_count += 1; + self.last_line_start = self.last_line_end; + self.last_line_end += u64::try_from(line_end - self.buffer_start).unwrap(); + self.buffer_start = line_end; + result + } + + #[cfg(feature = "async-tokio")] + #[allow(clippy::unwrap_in_result)] + async fn next_line_tokio_async<'a>( + &mut self, + buffer: &'a mut Vec, + read: &mut (impl AsyncRead + Unpin), + ) -> io::Result<&'a str> { + let line_end = loop { + if let Some(eol) = memchr(b'\n', &buffer[self.buffer_start..self.buffer_end]) { + break self.buffer_start + eol + 1; + } + if self.buffer_start > 0 { + buffer.copy_within(self.buffer_start..self.buffer_end, 0); + self.buffer_end -= self.buffer_start; + self.buffer_start = 0; + } + if self.buffer_end + 1024 > buffer.len() { + if self.buffer_end + 1024 > MAX_BUFFER_SIZE { + return Err(io::Error::new( + io::ErrorKind::OutOfMemory, + format!("Reached the buffer maximal size of {MAX_BUFFER_SIZE}"), + )); + } + buffer.resize(self.buffer_end + 1024, b'\0'); + } + let read = read.read(&mut buffer[self.buffer_end..]).await?; + if read == 0 { + break self.buffer_end; + } + self.buffer_end += read; + }; + let result = str::from_utf8(&buffer[self.buffer_start..line_end]).map_err(|e| { + io::Error::new( + io::ErrorKind::InvalidData, + format!("Invalid UTF-8 in the TSV file: {e}"), + ) + }); + self.line_count += 1; + self.last_line_start = self.last_line_end; + self.last_line_end += u64::try_from(line_end - self.buffer_start).unwrap(); + self.buffer_start = line_end; + result + } +} + +#[cfg(test)] +#[allow(clippy::panic_in_result_fn)] +mod tests { + use super::*; + use std::error::Error; + + fn build_example() -> (Vec, Vec>>) { + ( + vec![ + Variable::new_unchecked("x"), + Variable::new_unchecked("literal"), + ], + vec![ + vec![ + Some(NamedNode::new_unchecked("http://example/x").into()), + Some(Literal::new_simple_literal("String").into()), + ], + vec![ + Some(NamedNode::new_unchecked("http://example/x").into()), + Some(Literal::new_simple_literal("String-with-dquote\"").into()), + ], + vec![ + Some(BlankNode::new_unchecked("b0").into()), + Some(Literal::new_simple_literal("Blank node").into()), + ], + vec![ + None, + Some(Literal::new_simple_literal("Missing 'x'").into()), + ], + vec![None, None], + vec![ + Some(NamedNode::new_unchecked("http://example/x").into()), + None, + ], + vec![ + Some(BlankNode::new_unchecked("b1").into()), + Some( + Literal::new_language_tagged_literal_unchecked("String-with-lang", "en") + .into(), + ), + ], + vec![ + Some(BlankNode::new_unchecked("b1").into()), + Some(Literal::new_typed_literal("123", xsd::INTEGER).into()), + ], + vec![ + None, + Some(Literal::new_simple_literal("escape,\t\r\n").into()), + ], + ], + ) + } + + #[test] + fn test_csv_serialization() { + let (variables, solutions) = build_example(); + let mut buffer = String::new(); + let writer = InnerCsvSolutionsWriter::start(&mut buffer, variables.clone()); + for solution in solutions { + writer.write( + &mut buffer, + variables + .iter() + .zip(&solution) + .filter_map(|(v, s)| s.as_ref().map(|s| (v.as_ref(), s.as_ref()))), + ); + } + assert_eq!(buffer, "x,literal\r\nhttp://example/x,String\r\nhttp://example/x,\"String-with-dquote\"\"\"\r\n_:b0,Blank node\r\n,Missing 'x'\r\n,\r\nhttp://example/x,\r\n_:b1,String-with-lang\r\n_:b1,123\r\n,\"escape,\t\r\n\"\r\n"); + } + + #[test] + fn test_tsv_roundtrip() -> Result<(), Box> { + let (variables, solutions) = build_example(); + + // Write + let mut buffer = String::new(); + let writer = InnerTsvSolutionsWriter::start(&mut buffer, variables.clone()); + for solution in &solutions { + writer.write( + &mut buffer, + variables + .iter() + .zip(solution) + .filter_map(|(v, s)| s.as_ref().map(|s| (v.as_ref(), s.as_ref()))), + ); + } + assert_eq!(buffer, "?x\t?literal\n\t\"String\"\n\t\"String-with-dquote\\\"\"\n_:b0\t\"Blank node\"\n\t\"Missing 'x'\"\n\t\n\t\n_:b1\t\"String-with-lang\"@en\n_:b1\t123\n\t\"escape,\\t\\r\\n\"\n"); + + // Read + if let FromReadTsvQueryResultsReader::Solutions { + solutions: mut solutions_iter, + variables: actual_variables, + } = FromReadTsvQueryResultsReader::read(buffer.as_bytes())? + { + assert_eq!(actual_variables.as_slice(), variables.as_slice()); + let mut rows = Vec::new(); + while let Some(row) = solutions_iter.read_next()? { + rows.push(row); + } + assert_eq!(rows, solutions); + } else { + unreachable!() + } + + Ok(()) + } + + #[test] + fn test_bad_tsv() { + let mut bad_tsvs = vec![ + "?", + "?p", + "?p?o", + "?p\n<", + "?p\n_", + "?p\n_:", + "?p\n\"", + "?p\n<<", + "?p\n1\t2\n", + "?p\n\n", + ]; + let a_lot_of_strings = format!("?p\n{}\n", "<".repeat(100_000)); + bad_tsvs.push(&a_lot_of_strings); + for bad_tsv in bad_tsvs { + if let Ok(FromReadTsvQueryResultsReader::Solutions { mut solutions, .. }) = + FromReadTsvQueryResultsReader::read(bad_tsv.as_bytes()) + { + while let Ok(Some(_)) = solutions.read_next() {} + } + } + } + + #[test] + fn test_no_columns_csv_serialization() { + let mut buffer = String::new(); + let writer = InnerCsvSolutionsWriter::start(&mut buffer, Vec::new()); + writer.write(&mut buffer, []); + assert_eq!(buffer, "\r\n\r\n"); + } + + #[test] + fn test_no_columns_tsv_serialization() { + let mut buffer = String::new(); + let writer = InnerTsvSolutionsWriter::start(&mut buffer, Vec::new()); + writer.write(&mut buffer, []); + assert_eq!(buffer, "\n\n"); + } + + #[test] + fn test_no_columns_tsv_parsing() -> io::Result<()> { + if let FromReadTsvQueryResultsReader::Solutions { + mut solutions, + variables, + } = FromReadTsvQueryResultsReader::read(b"\n\n".as_slice())? + { + assert_eq!(variables, Vec::::new()); + assert_eq!(solutions.read_next()?, Some(Vec::new())); + assert_eq!(solutions.read_next()?, None); + } else { + unreachable!() + } + Ok(()) + } + + #[test] + fn test_no_results_csv_serialization() { + let mut buffer = String::new(); + InnerCsvSolutionsWriter::start(&mut buffer, vec![Variable::new_unchecked("a")]); + assert_eq!(buffer, "a\r\n"); + } + + #[test] + fn test_no_results_tsv_serialization() { + let mut buffer = String::new(); + InnerTsvSolutionsWriter::start(&mut buffer, vec![Variable::new_unchecked("a")]); + assert_eq!(buffer, "?a\n"); + } + + #[test] + fn test_no_results_tsv_parsing() -> io::Result<()> { + if let FromReadTsvQueryResultsReader::Solutions { + mut solutions, + variables, + } = FromReadTsvQueryResultsReader::read(b"?a\n".as_slice())? + { + assert_eq!(variables, vec![Variable::new_unchecked("a")]); + assert_eq!(solutions.read_next()?, None); + } else { + unreachable!() + } + Ok(()) + } +} diff --git a/ng-oxigraph/src/sparesults/error.rs b/ng-oxigraph/src/sparesults/error.rs new file mode 100644 index 0000000..d015f71 --- /dev/null +++ b/ng-oxigraph/src/sparesults/error.rs @@ -0,0 +1,157 @@ +use crate::oxrdf::TermParseError; +use std::io; +use std::ops::Range; +use std::sync::Arc; + +/// Error returned during SPARQL result formats format parsing. +#[derive(Debug, thiserror::Error)] +pub enum QueryResultsParseError { + /// I/O error during parsing (file not found...). + #[error(transparent)] + Io(#[from] io::Error), + /// An error in the file syntax. + #[error(transparent)] + Syntax(#[from] QueryResultsSyntaxError), +} + +impl From for io::Error { + #[inline] + fn from(error: QueryResultsParseError) -> Self { + match error { + QueryResultsParseError::Io(error) => error, + QueryResultsParseError::Syntax(error) => error.into(), + } + } +} + +impl From for QueryResultsParseError { + fn from(error: json_event_parser::ParseError) -> Self { + match error { + json_event_parser::ParseError::Syntax(error) => { + QueryResultsSyntaxError::from(error).into() + } + json_event_parser::ParseError::Io(error) => error.into(), + } + } +} + +impl From for QueryResultsParseError { + #[inline] + fn from(error: quick_xml::Error) -> Self { + match error { + quick_xml::Error::Io(error) => { + Self::Io(Arc::try_unwrap(error).unwrap_or_else(|e| io::Error::new(e.kind(), e))) + } + _ => Self::Syntax(QueryResultsSyntaxError(SyntaxErrorKind::Xml(error))), + } + } +} + +impl From for QueryResultsParseError { + #[inline] + fn from(error: quick_xml::escape::EscapeError) -> Self { + quick_xml::Error::from(error).into() + } +} +/// An error in the syntax of the parsed file. +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct QueryResultsSyntaxError(#[from] pub(crate) SyntaxErrorKind); + +#[derive(Debug, thiserror::Error)] +pub(crate) enum SyntaxErrorKind { + #[error(transparent)] + Json(#[from] json_event_parser::SyntaxError), + #[error(transparent)] + Xml(#[from] quick_xml::Error), + #[error("Error {error} on '{term}' in line {}", location.start.line + 1)] + Term { + #[source] + error: TermParseError, + term: String, + location: Range, + }, + #[error("{msg}")] + Msg { + msg: String, + location: Option>, + }, +} + +impl QueryResultsSyntaxError { + /// Builds an error from a printable error message. + #[inline] + pub(crate) fn msg(msg: impl Into) -> Self { + Self(SyntaxErrorKind::Msg { + msg: msg.into(), + location: None, + }) + } + + /// Builds an error from a printable error message and a location + #[inline] + pub(crate) fn located_message(msg: impl Into, location: Range) -> Self { + Self(SyntaxErrorKind::Msg { + msg: msg.into(), + location: Some(location), + }) + } + + /// The location of the error inside of the file. + #[inline] + pub fn location(&self) -> Option> { + match &self.0 { + SyntaxErrorKind::Json(e) => { + let location = e.location(); + Some( + TextPosition { + line: location.start.line, + column: location.start.column, + offset: location.start.offset, + }..TextPosition { + line: location.end.line, + column: location.end.column, + offset: location.end.offset, + }, + ) + } + SyntaxErrorKind::Term { location, .. } => Some(location.clone()), + SyntaxErrorKind::Msg { location, .. } => location.clone(), + SyntaxErrorKind::Xml(_) => None, + } + } +} + +impl From for io::Error { + #[inline] + fn from(error: QueryResultsSyntaxError) -> Self { + match error.0 { + SyntaxErrorKind::Json(error) => Self::new(io::ErrorKind::InvalidData, error), + SyntaxErrorKind::Xml(error) => match error { + quick_xml::Error::Io(error) => { + Arc::try_unwrap(error).unwrap_or_else(|e| Self::new(e.kind(), e)) + } + quick_xml::Error::UnexpectedEof(error) => { + Self::new(io::ErrorKind::UnexpectedEof, error) + } + _ => Self::new(io::ErrorKind::InvalidData, error), + }, + SyntaxErrorKind::Term { .. } => Self::new(io::ErrorKind::InvalidData, error), + SyntaxErrorKind::Msg { msg, .. } => Self::new(io::ErrorKind::InvalidData, msg), + } + } +} + +impl From for QueryResultsSyntaxError { + fn from(error: json_event_parser::SyntaxError) -> Self { + Self(SyntaxErrorKind::Json(error)) + } +} + +/// A position in a text i.e. a `line` number starting from 0, a `column` number starting from 0 (in number of code points) and a global file `offset` starting from 0 (in number of bytes). +#[derive(Eq, PartialEq, Debug, Clone, Copy)] +pub struct TextPosition { + pub line: u64, + pub column: u64, + pub offset: u64, +} diff --git a/ng-oxigraph/src/sparesults/format.rs b/ng-oxigraph/src/sparesults/format.rs new file mode 100644 index 0000000..982ff11 --- /dev/null +++ b/ng-oxigraph/src/sparesults/format.rs @@ -0,0 +1,176 @@ +use std::fmt; + +/// [SPARQL query](https://www.w3.org/TR/sparql11-query/) results serialization formats. +#[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)] +#[non_exhaustive] +pub enum QueryResultsFormat { + /// [SPARQL Query Results XML Format](https://www.w3.org/TR/rdf-sparql-XMLres/) + Xml, + /// [SPARQL Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) + Json, + /// [SPARQL Query Results CSV Format](https://www.w3.org/TR/sparql11-results-csv-tsv/) + Csv, + /// [SPARQL Query Results TSV Format](https://www.w3.org/TR/sparql11-results-csv-tsv/) + Tsv, +} + +impl QueryResultsFormat { + /// The format canonical IRI according to the [Unique URIs for file formats registry](https://www.w3.org/ns/formats/). + /// + /// ``` + /// use sparesults::QueryResultsFormat; + /// + /// assert_eq!( + /// QueryResultsFormat::Json.iri(), + /// "http://www.w3.org/ns/formats/SPARQL_Results_JSON" + /// ) + /// ``` + #[inline] + pub fn iri(self) -> &'static str { + match self { + Self::Xml => "http://www.w3.org/ns/formats/SPARQL_Results_XML", + Self::Json => "http://www.w3.org/ns/formats/SPARQL_Results_JSON", + Self::Csv => "http://www.w3.org/ns/formats/SPARQL_Results_CSV", + Self::Tsv => "http://www.w3.org/ns/formats/SPARQL_Results_TSV", + } + } + + /// The format [IANA media type](https://tools.ietf.org/html/rfc2046). + /// + /// ``` + /// use sparesults::QueryResultsFormat; + /// + /// assert_eq!( + /// QueryResultsFormat::Json.media_type(), + /// "application/sparql-results+json" + /// ) + /// ``` + #[inline] + pub fn media_type(self) -> &'static str { + match self { + Self::Xml => "application/sparql-results+xml", + Self::Json => "application/sparql-results+json", + Self::Csv => "text/csv; charset=utf-8", + Self::Tsv => "text/tab-separated-values; charset=utf-8", + } + } + + /// The format [IANA-registered](https://tools.ietf.org/html/rfc2046) file extension. + /// + /// ``` + /// use sparesults::QueryResultsFormat; + /// + /// assert_eq!(QueryResultsFormat::Json.file_extension(), "srj") + /// ``` + #[inline] + pub fn file_extension(self) -> &'static str { + match self { + Self::Xml => "srx", + Self::Json => "srj", + Self::Csv => "csv", + Self::Tsv => "tsv", + } + } + + /// The format name. + /// + /// ``` + /// use sparesults::QueryResultsFormat; + /// + /// assert_eq!(QueryResultsFormat::Json.name(), "SPARQL Results in JSON") + /// ``` + #[inline] + pub const fn name(self) -> &'static str { + match self { + Self::Xml => "SPARQL Results in XML", + Self::Json => "SPARQL Results in JSON", + Self::Csv => "SPARQL Results in CSV", + Self::Tsv => "SPARQL Results in TSV", + } + } + + /// Looks for a known format from a media type. + /// + /// It supports some media type aliases. + /// For example, "application/xml" is going to return `Xml` even if it is not its canonical media type. + /// + /// Example: + /// ``` + /// use sparesults::QueryResultsFormat; + /// + /// assert_eq!( + /// QueryResultsFormat::from_media_type("application/sparql-results+json; charset=utf-8"), + /// Some(QueryResultsFormat::Json) + /// ) + /// ``` + #[inline] + pub fn from_media_type(media_type: &str) -> Option { + const MEDIA_SUBTYPES: [(&str, QueryResultsFormat); 8] = [ + ("csv", QueryResultsFormat::Csv), + ("json", QueryResultsFormat::Json), + ("plain", QueryResultsFormat::Csv), + ("sparql-results+json", QueryResultsFormat::Json), + ("sparql-results+xml", QueryResultsFormat::Xml), + ("tab-separated-values", QueryResultsFormat::Tsv), + ("tsv", QueryResultsFormat::Tsv), + ("xml", QueryResultsFormat::Xml), + ]; + + let (r#type, subtype) = media_type + .split_once(';') + .unwrap_or((media_type, "")) + .0 + .trim() + .split_once('/')?; + let r#type = r#type.trim(); + if !r#type.eq_ignore_ascii_case("application") && !r#type.eq_ignore_ascii_case("text") { + return None; + } + let subtype = subtype.trim(); + let subtype = subtype.strip_prefix("x-").unwrap_or(subtype); + for (candidate_subtype, candidate_id) in MEDIA_SUBTYPES { + if candidate_subtype.eq_ignore_ascii_case(subtype) { + return Some(candidate_id); + } + } + None + } + + /// Looks for a known format from an extension. + /// + /// It supports some aliases. + /// + /// Example: + /// ``` + /// use sparesults::QueryResultsFormat; + /// + /// assert_eq!( + /// QueryResultsFormat::from_extension("json"), + /// Some(QueryResultsFormat::Json) + /// ) + /// ``` + #[inline] + pub fn from_extension(extension: &str) -> Option { + const MEDIA_TYPES: [(&str, QueryResultsFormat); 7] = [ + ("csv", QueryResultsFormat::Csv), + ("json", QueryResultsFormat::Json), + ("srj", QueryResultsFormat::Json), + ("srx", QueryResultsFormat::Xml), + ("tsv", QueryResultsFormat::Tsv), + ("txt", QueryResultsFormat::Csv), + ("xml", QueryResultsFormat::Xml), + ]; + for (candidate_extension, candidate_id) in MEDIA_TYPES { + if candidate_extension.eq_ignore_ascii_case(extension) { + return Some(candidate_id); + } + } + None + } +} + +impl fmt::Display for QueryResultsFormat { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.name()) + } +} diff --git a/ng-oxigraph/src/sparesults/json.rs b/ng-oxigraph/src/sparesults/json.rs new file mode 100644 index 0000000..8ebefb7 --- /dev/null +++ b/ng-oxigraph/src/sparesults/json.rs @@ -0,0 +1,1101 @@ +//! Implementation of [SPARQL Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) + +use crate::oxrdf::vocab::rdf; +use crate::oxrdf::*; +use crate::sparesults::error::{QueryResultsParseError, QueryResultsSyntaxError}; +use json_event_parser::{FromReadJsonReader, JsonEvent, ToWriteJsonWriter}; +#[cfg(feature = "async-tokio")] +use json_event_parser::{FromTokioAsyncReadJsonReader, ToTokioAsyncWriteJsonWriter}; +use std::collections::BTreeMap; +use std::io::{self, Read, Write}; +use std::mem::take; +#[cfg(feature = "async-tokio")] +use tokio::io::{AsyncRead, AsyncWrite}; + +pub fn write_boolean_json_result(write: W, value: bool) -> io::Result { + let mut writer = ToWriteJsonWriter::new(write); + for event in inner_write_boolean_json_result(value) { + writer.write_event(event)?; + } + writer.finish() +} + +#[cfg(feature = "async-tokio")] +pub async fn tokio_async_write_boolean_json_result( + write: W, + value: bool, +) -> io::Result { + let mut writer = ToTokioAsyncWriteJsonWriter::new(write); + for event in inner_write_boolean_json_result(value) { + writer.write_event(event).await?; + } + writer.finish() +} + +fn inner_write_boolean_json_result(value: bool) -> [JsonEvent<'static>; 7] { + [ + JsonEvent::StartObject, + JsonEvent::ObjectKey("head".into()), + JsonEvent::StartObject, + JsonEvent::EndObject, + JsonEvent::ObjectKey("boolean".into()), + JsonEvent::Boolean(value), + JsonEvent::EndObject, + ] +} + +pub struct ToWriteJsonSolutionsWriter { + inner: InnerJsonSolutionsWriter, + writer: ToWriteJsonWriter, +} + +impl ToWriteJsonSolutionsWriter { + pub fn start(write: W, variables: &[Variable]) -> io::Result { + let mut writer = ToWriteJsonWriter::new(write); + let mut buffer = Vec::with_capacity(48); + let inner = InnerJsonSolutionsWriter::start(&mut buffer, variables); + Self::do_write(&mut writer, buffer)?; + Ok(Self { inner, writer }) + } + + pub fn write<'a>( + &mut self, + solution: impl IntoIterator, TermRef<'a>)>, + ) -> io::Result<()> { + let mut buffer = Vec::with_capacity(48); + self.inner.write(&mut buffer, solution); + Self::do_write(&mut self.writer, buffer) + } + + pub fn finish(mut self) -> io::Result { + let mut buffer = Vec::with_capacity(4); + self.inner.finish(&mut buffer); + Self::do_write(&mut self.writer, buffer)?; + self.writer.finish() + } + + fn do_write(writer: &mut ToWriteJsonWriter, output: Vec>) -> io::Result<()> { + for event in output { + writer.write_event(event)?; + } + Ok(()) + } +} + +#[cfg(feature = "async-tokio")] +pub struct ToTokioAsyncWriteJsonSolutionsWriter { + inner: InnerJsonSolutionsWriter, + writer: ToTokioAsyncWriteJsonWriter, +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteJsonSolutionsWriter { + pub async fn start(write: W, variables: &[Variable]) -> io::Result { + let mut writer = ToTokioAsyncWriteJsonWriter::new(write); + let mut buffer = Vec::with_capacity(48); + let inner = InnerJsonSolutionsWriter::start(&mut buffer, variables); + Self::do_write(&mut writer, buffer).await?; + Ok(Self { inner, writer }) + } + + pub async fn write<'a>( + &mut self, + solution: impl IntoIterator, TermRef<'a>)>, + ) -> io::Result<()> { + let mut buffer = Vec::with_capacity(48); + self.inner.write(&mut buffer, solution); + Self::do_write(&mut self.writer, buffer).await + } + + pub async fn finish(mut self) -> io::Result { + let mut buffer = Vec::with_capacity(4); + self.inner.finish(&mut buffer); + Self::do_write(&mut self.writer, buffer).await?; + self.writer.finish() + } + + async fn do_write( + writer: &mut ToTokioAsyncWriteJsonWriter, + output: Vec>, + ) -> io::Result<()> { + for event in output { + writer.write_event(event).await?; + } + Ok(()) + } +} + +struct InnerJsonSolutionsWriter; + +impl InnerJsonSolutionsWriter { + fn start<'a>(output: &mut Vec>, variables: &'a [Variable]) -> Self { + output.push(JsonEvent::StartObject); + output.push(JsonEvent::ObjectKey("head".into())); + output.push(JsonEvent::StartObject); + output.push(JsonEvent::ObjectKey("vars".into())); + output.push(JsonEvent::StartArray); + for variable in variables { + output.push(JsonEvent::String(variable.as_str().into())); + } + output.push(JsonEvent::EndArray); + output.push(JsonEvent::EndObject); + output.push(JsonEvent::ObjectKey("results".into())); + output.push(JsonEvent::StartObject); + output.push(JsonEvent::ObjectKey("bindings".into())); + output.push(JsonEvent::StartArray); + Self {} + } + + #[allow(clippy::unused_self)] + fn write<'a>( + &self, + output: &mut Vec>, + solution: impl IntoIterator, TermRef<'a>)>, + ) { + output.push(JsonEvent::StartObject); + for (variable, value) in solution { + output.push(JsonEvent::ObjectKey(variable.as_str().into())); + write_json_term(output, value); + } + output.push(JsonEvent::EndObject); + } + + #[allow(clippy::unused_self)] + fn finish(self, output: &mut Vec>) { + output.push(JsonEvent::EndArray); + output.push(JsonEvent::EndObject); + output.push(JsonEvent::EndObject); + } +} + +fn write_json_term<'a>(output: &mut Vec>, term: TermRef<'a>) { + match term { + TermRef::NamedNode(uri) => { + output.push(JsonEvent::StartObject); + output.push(JsonEvent::ObjectKey("type".into())); + output.push(JsonEvent::String("uri".into())); + output.push(JsonEvent::ObjectKey("value".into())); + output.push(JsonEvent::String(uri.as_str().into())); + output.push(JsonEvent::EndObject); + } + TermRef::BlankNode(bnode) => { + output.push(JsonEvent::StartObject); + output.push(JsonEvent::ObjectKey("type".into())); + output.push(JsonEvent::String("bnode".into())); + output.push(JsonEvent::ObjectKey("value".into())); + output.push(JsonEvent::String(bnode.as_str().into())); + output.push(JsonEvent::EndObject); + } + TermRef::Literal(literal) => { + output.push(JsonEvent::StartObject); + output.push(JsonEvent::ObjectKey("type".into())); + output.push(JsonEvent::String("literal".into())); + output.push(JsonEvent::ObjectKey("value".into())); + output.push(JsonEvent::String(literal.value().into())); + if let Some(language) = literal.language() { + output.push(JsonEvent::ObjectKey("xml:lang".into())); + output.push(JsonEvent::String(language.into())); + } else if !literal.is_plain() { + output.push(JsonEvent::ObjectKey("datatype".into())); + output.push(JsonEvent::String(literal.datatype().as_str().into())); + } + output.push(JsonEvent::EndObject); + } + #[cfg(feature = "rdf-star")] + TermRef::Triple(triple) => { + output.push(JsonEvent::StartObject); + output.push(JsonEvent::ObjectKey("type".into())); + output.push(JsonEvent::String("triple".into())); + output.push(JsonEvent::ObjectKey("value".into())); + output.push(JsonEvent::StartObject); + output.push(JsonEvent::ObjectKey("subject".into())); + write_json_term(output, triple.subject.as_ref().into()); + output.push(JsonEvent::ObjectKey("predicate".into())); + write_json_term(output, triple.predicate.as_ref().into()); + output.push(JsonEvent::ObjectKey("object".into())); + write_json_term(output, triple.object.as_ref()); + output.push(JsonEvent::EndObject); + output.push(JsonEvent::EndObject); + } + } +} + +pub enum FromReadJsonQueryResultsReader { + Solutions { + variables: Vec, + solutions: FromReadJsonSolutionsReader, + }, + Boolean(bool), +} + +impl FromReadJsonQueryResultsReader { + pub fn read(read: R) -> Result { + let mut reader = FromReadJsonReader::new(read); + let mut inner = JsonInnerReader::new(); + loop { + if let Some(result) = inner.read_event(reader.read_next_event()?)? { + return match result { + JsonInnerQueryResults::Solutions { + variables, + solutions, + } => Ok(Self::Solutions { + variables, + solutions: FromReadJsonSolutionsReader { + inner: solutions, + reader, + }, + }), + JsonInnerQueryResults::Boolean(value) => Ok(Self::Boolean(value)), + }; + } + } + } +} + +pub struct FromReadJsonSolutionsReader { + inner: JsonInnerSolutions, + reader: FromReadJsonReader, +} + +impl FromReadJsonSolutionsReader { + pub fn read_next(&mut self) -> Result>>, QueryResultsParseError> { + match &mut self.inner { + JsonInnerSolutions::Reader(reader) => loop { + let event = self.reader.read_next_event()?; + if event == JsonEvent::Eof { + return Ok(None); + } + if let Some(result) = reader.read_event(event)? { + return Ok(Some(result)); + } + }, + JsonInnerSolutions::Iterator(iter) => iter.next(), + } + } +} + +#[cfg(feature = "async-tokio")] +pub enum FromTokioAsyncReadJsonQueryResultsReader { + Solutions { + variables: Vec, + solutions: FromTokioAsyncReadJsonSolutionsReader, + }, + Boolean(bool), +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadJsonQueryResultsReader { + pub async fn read(read: R) -> Result { + let mut reader = FromTokioAsyncReadJsonReader::new(read); + let mut inner = JsonInnerReader::new(); + loop { + if let Some(result) = inner.read_event(reader.read_next_event().await?)? { + return match result { + JsonInnerQueryResults::Solutions { + variables, + solutions, + } => Ok(Self::Solutions { + variables, + solutions: FromTokioAsyncReadJsonSolutionsReader { + inner: solutions, + reader, + }, + }), + JsonInnerQueryResults::Boolean(value) => Ok(Self::Boolean(value)), + }; + } + } + } +} + +#[cfg(feature = "async-tokio")] +pub struct FromTokioAsyncReadJsonSolutionsReader { + inner: JsonInnerSolutions, + reader: FromTokioAsyncReadJsonReader, +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadJsonSolutionsReader { + pub async fn read_next(&mut self) -> Result>>, QueryResultsParseError> { + match &mut self.inner { + JsonInnerSolutions::Reader(reader) => loop { + let event = self.reader.read_next_event().await?; + if event == JsonEvent::Eof { + return Ok(None); + } + if let Some(result) = reader.read_event(event)? { + return Ok(Some(result)); + } + }, + JsonInnerSolutions::Iterator(iter) => iter.next(), + } + } +} + +enum JsonInnerQueryResults { + Solutions { + variables: Vec, + solutions: JsonInnerSolutions, + }, + Boolean(bool), +} + +enum JsonInnerSolutions { + Reader(JsonInnerSolutionsReader), + Iterator(JsonBufferedSolutionsIterator), +} + +struct JsonInnerReader { + state: JsonInnerReaderState, + variables: Vec, + current_solution_variables: Vec, + current_solution_values: Vec, + solutions: Vec<(Vec, Vec)>, + vars_read: bool, + solutions_read: bool, +} + +enum JsonInnerReaderState { + Start, + InRootObject, + BeforeHead, + InHead, + BeforeVars, + InVars, + BeforeLinks, + InLinks, + BeforeResults, + InResults, + BeforeBindings, + BeforeSolution, + BetweenSolutionTerms, + Term { + reader: JsonInnerTermReader, + variable: String, + }, + AfterBindings, + BeforeBoolean, + Ignore { + level: usize, + after: JsonInnerReaderStateAfterIgnore, + }, +} + +#[allow(clippy::enum_variant_names)] +#[derive(Clone, Copy)] +enum JsonInnerReaderStateAfterIgnore { + InRootObject, + InHead, + InResults, + AfterBindings, +} + +impl JsonInnerReader { + fn new() -> Self { + Self { + state: JsonInnerReaderState::Start, + variables: Vec::new(), + current_solution_variables: Vec::new(), + current_solution_values: Vec::new(), + solutions: Vec::new(), + vars_read: false, + solutions_read: false, + } + } + + fn read_event( + &mut self, + event: JsonEvent<'_>, + ) -> Result, QueryResultsSyntaxError> { + match &mut self.state { + JsonInnerReaderState::Start => { + if event == JsonEvent::StartObject { + self.state = JsonInnerReaderState::InRootObject; + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg( + "SPARQL JSON results must be an object", + )) + } + } + JsonInnerReaderState::InRootObject => match event { + JsonEvent::ObjectKey(key) => match key.as_ref() { + "head" => { + self.state = JsonInnerReaderState::BeforeHead; + Ok(None) + } + "results" => { + self.state = JsonInnerReaderState::BeforeResults; + Ok(None) + } + "boolean" => { + self.state = JsonInnerReaderState::BeforeBoolean; + Ok(None) + } + _ => { + self.state = JsonInnerReaderState::Ignore { + level: 0, + after: JsonInnerReaderStateAfterIgnore::InRootObject, + }; + Ok(None) + } + }, + JsonEvent::EndObject => Err(QueryResultsSyntaxError::msg( + "SPARQL JSON results must contain a 'boolean' or a 'results' key", + )), + _ => unreachable!(), + }, + JsonInnerReaderState::BeforeHead => { + if event == JsonEvent::StartObject { + self.state = JsonInnerReaderState::InHead; + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg( + "SPARQL JSON results head must be an object", + )) + } + } + JsonInnerReaderState::InHead => match event { + JsonEvent::ObjectKey(key) => match key.as_ref() { + "vars" => { + self.state = JsonInnerReaderState::BeforeVars; + self.vars_read = true; + Ok(None) + } + "links" => { + self.state = JsonInnerReaderState::BeforeLinks; + Ok(None) + } + _ => { + self.state = JsonInnerReaderState::Ignore { + level: 0, + after: JsonInnerReaderStateAfterIgnore::InHead, + }; + Ok(None) + } + }, + JsonEvent::EndObject => { + self.state = JsonInnerReaderState::InRootObject; + Ok(None) + } + _ => unreachable!(), + }, + JsonInnerReaderState::BeforeVars => { + if event == JsonEvent::StartArray { + self.state = JsonInnerReaderState::InVars; + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg( + "SPARQL JSON results vars must be an array", + )) + } + } + JsonInnerReaderState::InVars => match event { + JsonEvent::String(variable) => match Variable::new(variable.clone()) { + Ok(var) => { + if self.variables.contains(&var) { + return Err(QueryResultsSyntaxError::msg(format!( + "The variable {var} is declared twice" + ))); + } + self.variables.push(var); + Ok(None) + } + Err(e) => Err(QueryResultsSyntaxError::msg(format!( + "Invalid variable name '{variable}': {e}" + ))), + }, + JsonEvent::EndArray => { + if self.solutions_read { + let mut mapping = BTreeMap::default(); + for (i, var) in self.variables.iter().enumerate() { + mapping.insert(var.as_str().to_owned(), i); + } + Ok(Some(JsonInnerQueryResults::Solutions { + variables: take(&mut self.variables), + solutions: JsonInnerSolutions::Iterator( + JsonBufferedSolutionsIterator { + mapping, + bindings: take(&mut self.solutions).into_iter(), + }, + ), + })) + } else { + self.state = JsonInnerReaderState::InHead; + Ok(None) + } + } + _ => Err(QueryResultsSyntaxError::msg( + "Variables name in the vars array must be strings", + )), + }, + JsonInnerReaderState::BeforeLinks => { + if event == JsonEvent::StartArray { + self.state = JsonInnerReaderState::InLinks; + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg( + "SPARQL JSON results links must be an array", + )) + } + } + JsonInnerReaderState::InLinks => match event { + JsonEvent::String(_) => Ok(None), + JsonEvent::EndArray => { + self.state = JsonInnerReaderState::InHead; + Ok(None) + } + _ => Err(QueryResultsSyntaxError::msg( + "Links in the links array must be strings", + )), + }, + JsonInnerReaderState::BeforeResults => { + if event == JsonEvent::StartObject { + self.state = JsonInnerReaderState::InResults; + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg( + "SPARQL JSON results result must be an object", + )) + } + } + JsonInnerReaderState::InResults => match event { + JsonEvent::ObjectKey(key) => { + if key == "bindings" { + self.state = JsonInnerReaderState::BeforeBindings; + Ok(None) + } else { + self.state = JsonInnerReaderState::Ignore { + level: 0, + after: JsonInnerReaderStateAfterIgnore::InResults, + }; + Ok(None) + } + } + JsonEvent::EndObject => Err(QueryResultsSyntaxError::msg( + "The results object must contains a 'bindings' key", + )), + _ => unreachable!(), + }, + JsonInnerReaderState::BeforeBindings => { + if event == JsonEvent::StartArray { + self.solutions_read = true; + if self.vars_read { + let mut mapping = BTreeMap::default(); + for (i, var) in self.variables.iter().enumerate() { + mapping.insert(var.as_str().to_owned(), i); + } + Ok(Some(JsonInnerQueryResults::Solutions { + variables: take(&mut self.variables), + solutions: JsonInnerSolutions::Reader(JsonInnerSolutionsReader { + state: JsonInnerSolutionsReaderState::BeforeSolution, + mapping, + new_bindings: Vec::new(), + }), + })) + } else { + self.state = JsonInnerReaderState::BeforeSolution; + Ok(None) + } + } else { + Err(QueryResultsSyntaxError::msg( + "SPARQL JSON results bindings must be an array", + )) + } + } + JsonInnerReaderState::BeforeSolution => match event { + JsonEvent::StartObject => { + self.state = JsonInnerReaderState::BetweenSolutionTerms; + Ok(None) + } + JsonEvent::EndArray => { + self.state = JsonInnerReaderState::AfterBindings; + Ok(None) + } + _ => Err(QueryResultsSyntaxError::msg( + "Expecting a new solution object", + )), + }, + JsonInnerReaderState::BetweenSolutionTerms => match event { + JsonEvent::ObjectKey(key) => { + self.state = JsonInnerReaderState::Term { + reader: JsonInnerTermReader::default(), + variable: key.into(), + }; + Ok(None) + } + JsonEvent::EndObject => { + self.state = JsonInnerReaderState::BeforeSolution; + self.solutions.push(( + take(&mut self.current_solution_variables), + take(&mut self.current_solution_values), + )); + Ok(None) + } + _ => unreachable!(), + }, + JsonInnerReaderState::Term { + ref mut reader, + variable, + } => { + let result = reader.read_event(event); + if let Some(term) = result? { + self.current_solution_variables.push(take(variable)); + self.current_solution_values.push(term); + self.state = JsonInnerReaderState::BetweenSolutionTerms; + } + Ok(None) + } + JsonInnerReaderState::AfterBindings => { + if event == JsonEvent::EndObject { + self.state = JsonInnerReaderState::InRootObject; + } else { + self.state = JsonInnerReaderState::Ignore { + level: 0, + after: JsonInnerReaderStateAfterIgnore::AfterBindings, + } + } + Ok(None) + } + JsonInnerReaderState::BeforeBoolean => { + if let JsonEvent::Boolean(v) = event { + Ok(Some(JsonInnerQueryResults::Boolean(v))) + } else { + Err(QueryResultsSyntaxError::msg("Unexpected boolean value")) + } + } + #[allow(clippy::ref_patterns)] + JsonInnerReaderState::Ignore { level, ref after } => { + let level = match event { + JsonEvent::StartArray | JsonEvent::StartObject => *level + 1, + JsonEvent::EndArray | JsonEvent::EndObject => *level - 1, + JsonEvent::String(_) + | JsonEvent::Number(_) + | JsonEvent::Boolean(_) + | JsonEvent::Null + | JsonEvent::ObjectKey(_) + | JsonEvent::Eof => *level, + }; + self.state = if level == 0 { + match after { + JsonInnerReaderStateAfterIgnore::InRootObject => { + JsonInnerReaderState::InRootObject + } + JsonInnerReaderStateAfterIgnore::InHead => JsonInnerReaderState::InHead, + JsonInnerReaderStateAfterIgnore::InResults => { + JsonInnerReaderState::InResults + } + JsonInnerReaderStateAfterIgnore::AfterBindings => { + JsonInnerReaderState::AfterBindings + } + } + } else { + JsonInnerReaderState::Ignore { + level, + after: *after, + } + }; + Ok(None) + } + } + } +} + +struct JsonInnerSolutionsReader { + state: JsonInnerSolutionsReaderState, + mapping: BTreeMap, + new_bindings: Vec>, +} + +enum JsonInnerSolutionsReaderState { + BeforeSolution, + BetweenSolutionTerms, + Term { + reader: JsonInnerTermReader, + key: usize, + }, + AfterEnd, +} + +impl JsonInnerSolutionsReader { + fn read_event( + &mut self, + event: JsonEvent<'_>, + ) -> Result>>, QueryResultsSyntaxError> { + match &mut self.state { + JsonInnerSolutionsReaderState::BeforeSolution => match event { + JsonEvent::StartObject => { + self.state = JsonInnerSolutionsReaderState::BetweenSolutionTerms; + self.new_bindings = vec![None; self.mapping.len()]; + Ok(None) + } + JsonEvent::EndArray => { + self.state = JsonInnerSolutionsReaderState::AfterEnd; + Ok(None) + } + _ => Err(QueryResultsSyntaxError::msg( + "Expecting a new solution object", + )), + }, + JsonInnerSolutionsReaderState::BetweenSolutionTerms => match event { + JsonEvent::ObjectKey(key) => { + let key = *self.mapping.get(key.as_ref()).ok_or_else(|| { + QueryResultsSyntaxError::msg(format!( + "The variable {key} has not been defined in the header" + )) + })?; + self.state = JsonInnerSolutionsReaderState::Term { + reader: JsonInnerTermReader::default(), + key, + }; + Ok(None) + } + JsonEvent::EndObject => { + self.state = JsonInnerSolutionsReaderState::BeforeSolution; + Ok(Some(take(&mut self.new_bindings))) + } + _ => unreachable!(), + }, + JsonInnerSolutionsReaderState::Term { + ref mut reader, + key, + } => { + let result = reader.read_event(event); + if let Some(term) = result? { + self.new_bindings[*key] = Some(term); + self.state = JsonInnerSolutionsReaderState::BetweenSolutionTerms; + } + Ok(None) + } + JsonInnerSolutionsReaderState::AfterEnd => { + if event == JsonEvent::EndObject { + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg( + "Unexpected JSON after the end of the bindings array", + )) + } + } + } + } +} + +#[derive(Default)] +struct JsonInnerTermReader { + state: JsonInnerTermReaderState, + term_type: Option, + value: Option, + lang: Option, + datatype: Option, + #[cfg(feature = "rdf-star")] + subject: Option, + #[cfg(feature = "rdf-star")] + predicate: Option, + #[cfg(feature = "rdf-star")] + object: Option, +} + +#[derive(Default)] +enum JsonInnerTermReaderState { + #[default] + Start, + Middle, + TermType, + Value, + Lang, + Datatype, + #[cfg(feature = "rdf-star")] + InValue, + #[cfg(feature = "rdf-star")] + Subject(Box), + #[cfg(feature = "rdf-star")] + Predicate(Box), + #[cfg(feature = "rdf-star")] + Object(Box), +} + +enum TermType { + Uri, + BNode, + Literal, + #[cfg(feature = "rdf-star")] + Triple, +} + +impl JsonInnerTermReader { + fn read_event( + &mut self, + event: JsonEvent<'_>, + ) -> Result, QueryResultsSyntaxError> { + match &mut self.state { + JsonInnerTermReaderState::Start => { + if event == JsonEvent::StartObject { + self.state = JsonInnerTermReaderState::Middle; + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg( + "RDF terms must be encoded using objects", + )) + } + } + JsonInnerTermReaderState::Middle => match event { + JsonEvent::ObjectKey(object_key) => { + self.state = match object_key.as_ref() { + "type" => JsonInnerTermReaderState::TermType, + "value" => JsonInnerTermReaderState::Value, + "datatype" => JsonInnerTermReaderState::Datatype, + "xml:lang" => JsonInnerTermReaderState::Lang, + _ => { + return Err(QueryResultsSyntaxError::msg(format!( + "Unsupported term key: {object_key}" + ))); + } + }; + Ok(None) + } + JsonEvent::EndObject => { + self.state = JsonInnerTermReaderState::Start; + match self.term_type.take() { + None => Err(QueryResultsSyntaxError::msg( + "Term serialization should have a 'type' key", + )), + Some(TermType::Uri) => Ok(Some( + NamedNode::new(self.value.take().ok_or_else(|| { + QueryResultsSyntaxError::msg( + "uri serialization should have a 'value' key", + ) + })?) + .map_err(|e| { + QueryResultsSyntaxError::msg(format!("Invalid uri value: {e}")) + })? + .into(), + )), + Some(TermType::BNode) => Ok(Some( + BlankNode::new(self.value.take().ok_or_else(|| { + QueryResultsSyntaxError::msg( + "bnode serialization should have a 'value' key", + ) + })?) + .map_err(|e| { + QueryResultsSyntaxError::msg(format!("Invalid bnode value: {e}")) + })? + .into(), + )), + Some(TermType::Literal) => { + let value = self.value.take().ok_or_else(|| { + QueryResultsSyntaxError::msg( + "literal serialization should have a 'value' key", + ) + })?; + Ok(Some(match self.lang.take() { + Some(lang) => { + if let Some(datatype) = &self.datatype { + if datatype.as_ref() != rdf::LANG_STRING { + return Err(QueryResultsSyntaxError::msg(format!( + "xml:lang value '{lang}' provided with the datatype {datatype}" + ))); + } + } + Literal::new_language_tagged_literal(value, &*lang) + .map_err(|e| { + QueryResultsSyntaxError::msg(format!( + "Invalid xml:lang value '{lang}': {e}" + )) + })? + } + None => { + if let Some(datatype) = self.datatype.take() { + Literal::new_typed_literal(value, datatype) + } else { + Literal::new_simple_literal(value) + } + } + }.into())) + } + #[cfg(feature = "rdf-star")] + Some(TermType::Triple) => Ok(Some( + Triple::new( + match self.subject.take().ok_or_else(|| { + QueryResultsSyntaxError::msg( + "triple serialization should have a 'subject' key", + ) + })? { + Term::NamedNode(subject) => subject.into(), + Term::BlankNode(subject) => subject.into(), + Term::Triple(subject) => Subject::Triple(subject), + Term::Literal(_) => { + return Err(QueryResultsSyntaxError::msg( + "The 'subject' value should not be a literal", + )); + } + }, + match self.predicate.take().ok_or_else(|| { + QueryResultsSyntaxError::msg( + "triple serialization should have a 'predicate' key", + ) + })? { + Term::NamedNode(predicate) => predicate, + _ => { + return Err(QueryResultsSyntaxError::msg( + "The 'predicate' value should be a uri", + )); + } + }, + self.object.take().ok_or_else(|| { + QueryResultsSyntaxError::msg( + "triple serialization should have a 'object' key", + ) + })?, + ) + .into(), + )), + } + } + _ => unreachable!(), + }, + JsonInnerTermReaderState::TermType => { + self.state = JsonInnerTermReaderState::Middle; + if let JsonEvent::String(value) = event { + match value.as_ref() { + "uri" => { + self.term_type = Some(TermType::Uri); + Ok(None) + } + "bnode" => { + self.term_type = Some(TermType::BNode); + Ok(None) + } + "literal" | "typed-literal" => { + self.term_type = Some(TermType::Literal); + Ok(None) + } + #[cfg(feature = "rdf-star")] + "triple" => { + self.term_type = Some(TermType::Triple); + Ok(None) + } + _ => Err(QueryResultsSyntaxError::msg(format!( + "Unexpected term type: '{value}'" + ))), + } + } else { + Err(QueryResultsSyntaxError::msg("Term type must be a string")) + } + } + JsonInnerTermReaderState::Value => match event { + JsonEvent::String(value) => { + self.value = Some(value.into_owned()); + self.state = JsonInnerTermReaderState::Middle; + Ok(None) + } + #[cfg(feature = "rdf-star")] + JsonEvent::StartObject => { + self.state = JsonInnerTermReaderState::InValue; + Ok(None) + } + _ => { + self.state = JsonInnerTermReaderState::Middle; + + Err(QueryResultsSyntaxError::msg("Term value must be a string")) + } + }, + JsonInnerTermReaderState::Lang => { + let result = if let JsonEvent::String(value) = event { + self.lang = Some(value.into_owned()); + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg("Term lang must be strings")) + }; + self.state = JsonInnerTermReaderState::Middle; + + result + } + JsonInnerTermReaderState::Datatype => { + let result = if let JsonEvent::String(value) = event { + match NamedNode::new(value) { + Ok(datatype) => { + self.datatype = Some(datatype); + Ok(None) + } + Err(e) => Err(QueryResultsSyntaxError::msg(format!( + "Invalid datatype: {e}" + ))), + } + } else { + Err(QueryResultsSyntaxError::msg("Term lang must be strings")) + }; + self.state = JsonInnerTermReaderState::Middle; + + result + } + #[cfg(feature = "rdf-star")] + JsonInnerTermReaderState::InValue => match event { + JsonEvent::ObjectKey(object_key) => { + self.state = match object_key.as_ref() { + "subject" => JsonInnerTermReaderState::Subject(Box::default()), + "predicate" => JsonInnerTermReaderState::Predicate(Box::default()), + "object" => JsonInnerTermReaderState::Object(Box::default()), + _ => { + return Err(QueryResultsSyntaxError::msg(format!( + "Unsupported value key: {object_key}" + ))); + } + }; + Ok(None) + } + JsonEvent::EndObject => { + self.state = JsonInnerTermReaderState::Middle; + Ok(None) + } + _ => unreachable!(), + }, + #[cfg(feature = "rdf-star")] + JsonInnerTermReaderState::Subject(ref mut inner_state) => { + if let Some(term) = inner_state.read_event(event)? { + self.state = JsonInnerTermReaderState::InValue; + self.subject = Some(term); + } + Ok(None) + } + #[cfg(feature = "rdf-star")] + JsonInnerTermReaderState::Predicate(ref mut inner_state) => { + if let Some(term) = inner_state.read_event(event)? { + self.state = JsonInnerTermReaderState::InValue; + self.predicate = Some(term); + } + Ok(None) + } + #[cfg(feature = "rdf-star")] + JsonInnerTermReaderState::Object(ref mut inner_state) => { + if let Some(term) = inner_state.read_event(event)? { + self.state = JsonInnerTermReaderState::InValue; + self.object = Some(term); + } + Ok(None) + } + } + } +} + +pub struct JsonBufferedSolutionsIterator { + mapping: BTreeMap, + bindings: std::vec::IntoIter<(Vec, Vec)>, +} + +impl JsonBufferedSolutionsIterator { + fn next(&mut self) -> Result>>, QueryResultsParseError> { + let Some((variables, values)) = self.bindings.next() else { + return Ok(None); + }; + let mut new_bindings = vec![None; self.mapping.len()]; + for (variable, value) in variables.into_iter().zip(values) { + let k = *self.mapping.get(&variable).ok_or_else(|| { + QueryResultsSyntaxError::msg(format!( + "The variable {variable} has not been defined in the header" + )) + })?; + new_bindings[k] = Some(value); + } + Ok(Some(new_bindings)) + } +} diff --git a/ng-oxigraph/src/sparesults/mod.rs b/ng-oxigraph/src/sparesults/mod.rs new file mode 100644 index 0000000..4f88baa --- /dev/null +++ b/ng-oxigraph/src/sparesults/mod.rs @@ -0,0 +1,16 @@ +mod csv; +mod error; +mod format; +mod json; +mod parser; +mod serializer; +pub mod solution; +mod xml; + +pub use crate::sparesults::error::{QueryResultsParseError, QueryResultsSyntaxError, TextPosition}; +pub use crate::sparesults::format::QueryResultsFormat; +pub use crate::sparesults::parser::{ + FromReadQueryResultsReader, FromReadSolutionsReader, QueryResultsParser, +}; +pub use crate::sparesults::serializer::{QueryResultsSerializer, ToWriteSolutionsWriter}; +pub use crate::sparesults::solution::QuerySolution; diff --git a/ng-oxigraph/src/sparesults/parser.rs b/ng-oxigraph/src/sparesults/parser.rs new file mode 100644 index 0000000..9bac0ad --- /dev/null +++ b/ng-oxigraph/src/sparesults/parser.rs @@ -0,0 +1,460 @@ +use crate::oxrdf::Variable; +use crate::sparesults::csv::{FromReadTsvQueryResultsReader, FromReadTsvSolutionsReader}; +#[cfg(feature = "async-tokio")] +use crate::sparesults::csv::{ + FromTokioAsyncReadTsvQueryResultsReader, FromTokioAsyncReadTsvSolutionsReader, +}; +use crate::sparesults::error::{QueryResultsParseError, QueryResultsSyntaxError}; +use crate::sparesults::format::QueryResultsFormat; +use crate::sparesults::json::{FromReadJsonQueryResultsReader, FromReadJsonSolutionsReader}; +#[cfg(feature = "async-tokio")] +use crate::sparesults::json::{ + FromTokioAsyncReadJsonQueryResultsReader, FromTokioAsyncReadJsonSolutionsReader, +}; +use crate::sparesults::solution::QuerySolution; +use crate::sparesults::xml::{FromReadXmlQueryResultsReader, FromReadXmlSolutionsReader}; +#[cfg(feature = "async-tokio")] +use crate::sparesults::xml::{ + FromTokioAsyncReadXmlQueryResultsReader, FromTokioAsyncReadXmlSolutionsReader, +}; +use std::io::Read; +use std::sync::Arc; +#[cfg(feature = "async-tokio")] +use tokio::io::AsyncRead; + +/// Parsers for [SPARQL query](https://www.w3.org/TR/sparql11-query/) results serialization formats. +/// +/// It currently supports the following formats: +/// * [SPARQL Query Results XML Format](https://www.w3.org/TR/rdf-sparql-XMLres/) ([`QueryResultsFormat::Xml`](QueryResultsFormat::Xml)). +/// * [SPARQL Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) ([`QueryResultsFormat::Json`](QueryResultsFormat::Json)). +/// * [SPARQL Query Results TSV Format](https://www.w3.org/TR/sparql11-results-csv-tsv/) ([`QueryResultsFormat::Tsv`](QueryResultsFormat::Tsv)). +/// +/// Example in JSON (the API is the same for XML and TSV): +/// ``` +/// use sparesults::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader}; +/// use oxrdf::{Literal, Variable}; +/// +/// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json); +/// // boolean +/// if let FromReadQueryResultsReader::Boolean(v) = json_parser.parse_read(br#"{"boolean":true}"#.as_slice())? { +/// assert_eq!(v, true); +/// } +/// // solutions +/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice())? { +/// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); +/// for solution in solutions { +/// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); +/// } +/// } +/// # Result::<(),sparesults::QueryResultsParseError>::Ok(()) +/// ``` +pub struct QueryResultsParser { + format: QueryResultsFormat, +} + +impl QueryResultsParser { + /// Builds a parser for the given format. + #[inline] + pub fn from_format(format: QueryResultsFormat) -> Self { + Self { format } + } + + /// Reads a result file from a [`Read`] implementation. + /// + /// Reads are automatically buffered. + /// + /// Example in XML (the API is the same for JSON and TSV): + /// ``` + /// use sparesults::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader}; + /// use oxrdf::{Literal, Variable}; + /// + /// let xml_parser = QueryResultsParser::from_format(QueryResultsFormat::Xml); + /// + /// // boolean + /// if let FromReadQueryResultsReader::Boolean(v) = xml_parser.parse_read(br#"true"#.as_slice())? { + /// assert_eq!(v, true); + /// } + /// + /// // solutions + /// if let FromReadQueryResultsReader::Solutions(solutions) = xml_parser.parse_read(br#"test"#.as_slice())? { + /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); + /// for solution in solutions { + /// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); + /// } + /// } + /// # Result::<(),sparesults::QueryResultsParseError>::Ok(()) + /// ``` + pub fn parse_read( + &self, + reader: R, + ) -> Result, QueryResultsParseError> { + Ok(match self.format { + QueryResultsFormat::Xml => match FromReadXmlQueryResultsReader::read(reader)? { + FromReadXmlQueryResultsReader::Boolean(r) => FromReadQueryResultsReader::Boolean(r), + FromReadXmlQueryResultsReader::Solutions { + solutions, + variables, + } => FromReadQueryResultsReader::Solutions(FromReadSolutionsReader { + variables: variables.into(), + solutions: FromReadSolutionsReaderKind::Xml(solutions), + }), + }, + QueryResultsFormat::Json => match FromReadJsonQueryResultsReader::read(reader)? { + FromReadJsonQueryResultsReader::Boolean(r) => FromReadQueryResultsReader::Boolean(r), + FromReadJsonQueryResultsReader::Solutions { + solutions, + variables, + } => FromReadQueryResultsReader::Solutions(FromReadSolutionsReader { + variables: variables.into(), + solutions: FromReadSolutionsReaderKind::Json(solutions), + }), + }, + QueryResultsFormat::Csv => return Err(QueryResultsSyntaxError::msg("CSV SPARQL results syntax is lossy and can't be parsed to a proper RDF representation").into()), + QueryResultsFormat::Tsv => match FromReadTsvQueryResultsReader::read(reader)? { + FromReadTsvQueryResultsReader::Boolean(r) => FromReadQueryResultsReader::Boolean(r), + FromReadTsvQueryResultsReader::Solutions { + solutions, + variables, + } => FromReadQueryResultsReader::Solutions(FromReadSolutionsReader { + variables: variables.into(), + solutions: FromReadSolutionsReaderKind::Tsv(solutions), + }), + }, + }) + } + + #[deprecated(note = "use parse_read", since = "0.4.0")] + pub fn read_results( + &self, + reader: R, + ) -> Result, QueryResultsParseError> { + self.parse_read(reader) + } + + /// Reads a result file from a Tokio [`AsyncRead`] implementation. + /// + /// Reads are automatically buffered. + /// + /// Example in XML (the API is the same for JSON and TSV): + /// ``` + /// use sparesults::{QueryResultsFormat, QueryResultsParser, FromTokioAsyncReadQueryResultsReader}; + /// use oxrdf::{Literal, Variable}; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), sparesults::QueryResultsParseError> { + /// let xml_parser = QueryResultsParser::from_format(QueryResultsFormat::Xml); + /// + /// // boolean + /// if let FromTokioAsyncReadQueryResultsReader::Boolean(v) = xml_parser.parse_tokio_async_read(br#"true"#.as_slice()).await? { + /// assert_eq!(v, true); + /// } + /// + /// // solutions + /// if let FromTokioAsyncReadQueryResultsReader::Solutions(mut solutions) = xml_parser.parse_tokio_async_read(br#"test"#.as_slice()).await? { + /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); + /// while let Some(solution) = solutions.next().await { + /// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); + /// } + /// } + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub async fn parse_tokio_async_read( + &self, + reader: R, + ) -> Result, QueryResultsParseError> { + Ok(match self.format { + QueryResultsFormat::Xml => match FromTokioAsyncReadXmlQueryResultsReader::read(reader).await? { + FromTokioAsyncReadXmlQueryResultsReader::Boolean(r) => FromTokioAsyncReadQueryResultsReader::Boolean(r), + FromTokioAsyncReadXmlQueryResultsReader::Solutions { + solutions, + variables, + } => FromTokioAsyncReadQueryResultsReader::Solutions(FromTokioAsyncReadSolutionsReader { + variables: variables.into(), + solutions: FromTokioAsyncReadSolutionsReaderKind::Xml(solutions), + }), + }, + QueryResultsFormat::Json => match FromTokioAsyncReadJsonQueryResultsReader::read(reader).await? { + FromTokioAsyncReadJsonQueryResultsReader::Boolean(r) => FromTokioAsyncReadQueryResultsReader::Boolean(r), + FromTokioAsyncReadJsonQueryResultsReader::Solutions { + solutions, + variables, + } => FromTokioAsyncReadQueryResultsReader::Solutions(FromTokioAsyncReadSolutionsReader { + variables: variables.into(), + solutions: FromTokioAsyncReadSolutionsReaderKind::Json(solutions), + }), + }, + QueryResultsFormat::Csv => return Err(QueryResultsSyntaxError::msg("CSV SPARQL results syntax is lossy and can't be parsed to a proper RDF representation").into()), + QueryResultsFormat::Tsv => match FromTokioAsyncReadTsvQueryResultsReader::read(reader).await? { + FromTokioAsyncReadTsvQueryResultsReader::Boolean(r) => FromTokioAsyncReadQueryResultsReader::Boolean(r), + FromTokioAsyncReadTsvQueryResultsReader::Solutions { + solutions, + variables, + } => FromTokioAsyncReadQueryResultsReader::Solutions(FromTokioAsyncReadSolutionsReader { + variables: variables.into(), + solutions: FromTokioAsyncReadSolutionsReaderKind::Tsv(solutions), + }), + }, + }) + } +} + +impl From for QueryResultsParser { + fn from(format: QueryResultsFormat) -> Self { + Self::from_format(format) + } +} + +/// The reader for a given read of a results file. +/// +/// It is either a read boolean ([`bool`]) or a streaming reader of a set of solutions ([`FromReadSolutionsReader`]). +/// +/// Example in TSV (the API is the same for JSON and XML): +/// ``` +/// use oxrdf::{Literal, Variable}; +/// use sparesults::{FromReadQueryResultsReader, QueryResultsFormat, QueryResultsParser}; +/// +/// let tsv_parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv); +/// +/// // boolean +/// if let FromReadQueryResultsReader::Boolean(v) = tsv_parser.parse_read(b"true".as_slice())? { +/// assert_eq!(v, true); +/// } +/// +/// // solutions +/// if let FromReadQueryResultsReader::Solutions(solutions) = +/// tsv_parser.parse_read(b"?foo\t?bar\n\"test\"\t".as_slice())? +/// { +/// assert_eq!( +/// solutions.variables(), +/// &[ +/// Variable::new_unchecked("foo"), +/// Variable::new_unchecked("bar") +/// ] +/// ); +/// for solution in solutions { +/// assert_eq!( +/// solution?.iter().collect::>(), +/// vec![( +/// &Variable::new_unchecked("foo"), +/// &Literal::from("test").into() +/// )] +/// ); +/// } +/// } +/// # Result::<(),sparesults::QueryResultsParseError>::Ok(()) +/// ``` +pub enum FromReadQueryResultsReader { + Solutions(FromReadSolutionsReader), + Boolean(bool), +} + +/// A streaming reader of a set of [`QuerySolution`] solutions. +/// +/// It implements the [`Iterator`] API to iterate over the solutions. +/// +/// Example in JSON (the API is the same for XML and TSV): +/// ``` +/// use sparesults::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader}; +/// use oxrdf::{Literal, Variable}; +/// +/// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json); +/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice())? { +/// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); +/// for solution in solutions { +/// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); +/// } +/// } +/// # Result::<(),sparesults::QueryResultsParseError>::Ok(()) +/// ``` +pub struct FromReadSolutionsReader { + variables: Arc<[Variable]>, + solutions: FromReadSolutionsReaderKind, +} + +enum FromReadSolutionsReaderKind { + Xml(FromReadXmlSolutionsReader), + Json(FromReadJsonSolutionsReader), + Tsv(FromReadTsvSolutionsReader), +} + +impl FromReadSolutionsReader { + /// Ordered list of the declared variables at the beginning of the results. + /// + /// Example in TSV (the API is the same for JSON and XML): + /// ``` + /// use oxrdf::Variable; + /// use sparesults::{FromReadQueryResultsReader, QueryResultsFormat, QueryResultsParser}; + /// + /// let tsv_parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv); + /// if let FromReadQueryResultsReader::Solutions(solutions) = + /// tsv_parser.parse_read(b"?foo\t?bar\n\"ex1\"\t\"ex2\"".as_slice())? + /// { + /// assert_eq!( + /// solutions.variables(), + /// &[ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar") + /// ] + /// ); + /// } + /// # Result::<(),sparesults::QueryResultsParseError>::Ok(()) + /// ``` + #[inline] + pub fn variables(&self) -> &[Variable] { + &self.variables + } +} + +impl Iterator for FromReadSolutionsReader { + type Item = Result; + + fn next(&mut self) -> Option { + Some( + match &mut self.solutions { + FromReadSolutionsReaderKind::Xml(reader) => reader.read_next(), + FromReadSolutionsReaderKind::Json(reader) => reader.read_next(), + FromReadSolutionsReaderKind::Tsv(reader) => reader.read_next(), + } + .transpose()? + .map(|values| (Arc::clone(&self.variables), values).into()), + ) + } +} + +/// The reader for a given read of a results file. +/// +/// It is either a read boolean ([`bool`]) or a streaming reader of a set of solutions ([`FromReadSolutionsReader`]). +/// +/// Example in TSV (the API is the same for JSON and XML): +/// ``` +/// use oxrdf::{Literal, Variable}; +/// use sparesults::{ +/// FromTokioAsyncReadQueryResultsReader, QueryResultsFormat, QueryResultsParser, +/// }; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), sparesults::QueryResultsParseError> { +/// let tsv_parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv); +/// +/// // boolean +/// if let FromTokioAsyncReadQueryResultsReader::Boolean(v) = tsv_parser +/// .parse_tokio_async_read(b"true".as_slice()) +/// .await? +/// { +/// assert_eq!(v, true); +/// } +/// +/// // solutions +/// if let FromTokioAsyncReadQueryResultsReader::Solutions(mut solutions) = tsv_parser +/// .parse_tokio_async_read(b"?foo\t?bar\n\"test\"\t".as_slice()) +/// .await? +/// { +/// assert_eq!( +/// solutions.variables(), +/// &[ +/// Variable::new_unchecked("foo"), +/// Variable::new_unchecked("bar") +/// ] +/// ); +/// while let Some(solution) = solutions.next().await { +/// assert_eq!( +/// solution?.iter().collect::>(), +/// vec![( +/// &Variable::new_unchecked("foo"), +/// &Literal::from("test").into() +/// )] +/// ); +/// } +/// } +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +pub enum FromTokioAsyncReadQueryResultsReader { + Solutions(FromTokioAsyncReadSolutionsReader), + Boolean(bool), +} + +/// A streaming reader of a set of [`QuerySolution`] solutions. +/// +/// It implements the [`Iterator`] API to iterate over the solutions. +/// +/// Example in JSON (the API is the same for XML and TSV): +/// ``` +/// use sparesults::{QueryResultsFormat, QueryResultsParser, FromTokioAsyncReadQueryResultsReader}; +/// use oxrdf::{Literal, Variable}; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> Result<(), sparesults::QueryResultsParseError> { +/// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json); +/// if let FromTokioAsyncReadQueryResultsReader::Solutions(mut solutions) = json_parser.parse_tokio_async_read(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice()).await? { +/// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); +/// while let Some(solution) = solutions.next().await { +/// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); +/// } +/// } +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +pub struct FromTokioAsyncReadSolutionsReader { + variables: Arc<[Variable]>, + solutions: FromTokioAsyncReadSolutionsReaderKind, +} + +#[cfg(feature = "async-tokio")] +enum FromTokioAsyncReadSolutionsReaderKind { + Json(FromTokioAsyncReadJsonSolutionsReader), + Xml(FromTokioAsyncReadXmlSolutionsReader), + Tsv(FromTokioAsyncReadTsvSolutionsReader), +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadSolutionsReader { + /// Ordered list of the declared variables at the beginning of the results. + /// + /// Example in TSV (the API is the same for JSON and XML): + /// ``` + /// use oxrdf::Variable; + /// use sparesults::{ + /// FromTokioAsyncReadQueryResultsReader, QueryResultsFormat, QueryResultsParser, + /// }; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), sparesults::QueryResultsParseError> { + /// let tsv_parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv); + /// if let FromTokioAsyncReadQueryResultsReader::Solutions(solutions) = tsv_parser + /// .parse_tokio_async_read(b"?foo\t?bar\n\"ex1\"\t\"ex2\"".as_slice()) + /// .await? + /// { + /// assert_eq!( + /// solutions.variables(), + /// &[ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar") + /// ] + /// ); + /// } + /// # Ok(()) + /// # } + /// ``` + #[inline] + pub fn variables(&self) -> &[Variable] { + &self.variables + } + + /// Reads the next solution or returns `None` if the file is finished. + pub async fn next(&mut self) -> Option> { + Some( + match &mut self.solutions { + FromTokioAsyncReadSolutionsReaderKind::Json(reader) => reader.read_next().await, + FromTokioAsyncReadSolutionsReaderKind::Xml(reader) => reader.read_next().await, + FromTokioAsyncReadSolutionsReaderKind::Tsv(reader) => reader.read_next().await, + } + .transpose()? + .map(|values| (Arc::clone(&self.variables), values).into()), + ) + } +} diff --git a/ng-oxigraph/src/sparesults/serializer.rs b/ng-oxigraph/src/sparesults/serializer.rs new file mode 100644 index 0000000..dee026f --- /dev/null +++ b/ng-oxigraph/src/sparesults/serializer.rs @@ -0,0 +1,427 @@ +use crate::oxrdf::{TermRef, Variable, VariableRef}; +#[cfg(feature = "async-tokio")] +use crate::sparesults::csv::{ + tokio_async_write_boolean_csv_result, ToTokioAsyncWriteCsvSolutionsWriter, + ToTokioAsyncWriteTsvSolutionsWriter, +}; +use crate::sparesults::csv::{ + write_boolean_csv_result, ToWriteCsvSolutionsWriter, ToWriteTsvSolutionsWriter, +}; +use crate::sparesults::format::QueryResultsFormat; +#[cfg(feature = "async-tokio")] +use crate::sparesults::json::{ + tokio_async_write_boolean_json_result, ToTokioAsyncWriteJsonSolutionsWriter, +}; +use crate::sparesults::json::{write_boolean_json_result, ToWriteJsonSolutionsWriter}; +#[cfg(feature = "async-tokio")] +use crate::sparesults::xml::{ + tokio_async_write_boolean_xml_result, ToTokioAsyncWriteXmlSolutionsWriter, +}; +use crate::sparesults::xml::{write_boolean_xml_result, ToWriteXmlSolutionsWriter}; +use std::io::{self, Write}; +#[cfg(feature = "async-tokio")] +use tokio::io::AsyncWrite; + +/// A serializer for [SPARQL query](https://www.w3.org/TR/sparql11-query/) results serialization formats. +/// +/// It currently supports the following formats: +/// * [SPARQL Query Results XML Format](https://www.w3.org/TR/rdf-sparql-XMLres/) ([`QueryResultsFormat::Xml`](QueryResultsFormat::Xml)) +/// * [SPARQL Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) ([`QueryResultsFormat::Json`](QueryResultsFormat::Json)) +/// * [SPARQL Query Results CSV Format](https://www.w3.org/TR/sparql11-results-csv-tsv/) ([`QueryResultsFormat::Csv`](QueryResultsFormat::Csv)) +/// * [SPARQL Query Results TSV Format](https://www.w3.org/TR/sparql11-results-csv-tsv/) ([`QueryResultsFormat::Tsv`](QueryResultsFormat::Tsv)) +/// +/// Example in JSON (the API is the same for XML, CSV and TSV): +/// ``` +/// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; +/// use oxrdf::{LiteralRef, Variable, VariableRef}; +/// use std::iter::once; +/// +/// let json_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Json); +/// +/// // boolean +/// let mut buffer = Vec::new(); +/// json_serializer.serialize_boolean_to_write(&mut buffer, true)?; +/// assert_eq!(buffer, br#"{"head":{},"boolean":true}"#); +/// +/// // solutions +/// let mut buffer = Vec::new(); +/// let mut writer = json_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?; +/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; +/// writer.finish()?; +/// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#); +/// # std::io::Result::Ok(()) +/// ``` +pub struct QueryResultsSerializer { + format: QueryResultsFormat, +} + +impl QueryResultsSerializer { + /// Builds a serializer for the given format. + #[inline] + pub fn from_format(format: QueryResultsFormat) -> Self { + Self { format } + } + + /// Write a boolean query result (from an `ASK` query) into the given [`Write`] implementation. + /// + /// Example in XML (the API is the same for JSON, CSV and TSV): + /// ``` + /// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; + /// + /// let xml_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Xml); + /// let mut buffer = Vec::new(); + /// xml_serializer.serialize_boolean_to_write(&mut buffer, true)?; + /// assert_eq!(buffer, br#"true"#); + /// # std::io::Result::Ok(()) + /// ``` + pub fn serialize_boolean_to_write(&self, write: W, value: bool) -> io::Result { + match self.format { + QueryResultsFormat::Xml => write_boolean_xml_result(write, value), + QueryResultsFormat::Json => write_boolean_json_result(write, value), + QueryResultsFormat::Csv | QueryResultsFormat::Tsv => { + write_boolean_csv_result(write, value) + } + } + } + + /// Write a boolean query result (from an `ASK` query) into the given [`AsyncWrite`] implementation. + /// + /// Example in JSON (the API is the same for XML, CSV and TSV): + /// ``` + /// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> std::io::Result<()> { + /// let json_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Json); + /// let mut buffer = Vec::new(); + /// json_serializer + /// .serialize_boolean_to_tokio_async_write(&mut buffer, false) + /// .await?; + /// assert_eq!(buffer, br#"{"head":{},"boolean":false}"#); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub async fn serialize_boolean_to_tokio_async_write( + &self, + write: W, + value: bool, + ) -> io::Result { + match self.format { + QueryResultsFormat::Xml => tokio_async_write_boolean_xml_result(write, value).await, + QueryResultsFormat::Json => tokio_async_write_boolean_json_result(write, value).await, + QueryResultsFormat::Csv | QueryResultsFormat::Tsv => { + tokio_async_write_boolean_csv_result(write, value).await + } + } + } + + #[deprecated(note = "use serialize_boolean_to_write", since = "0.4.0")] + pub fn write_boolean_result(&self, writer: W, value: bool) -> io::Result { + self.serialize_boolean_to_write(writer, value) + } + + /// Returns a `SolutionsWriter` allowing writing query solutions into the given [`Write`] implementation. + /// + ///
+ /// + /// Do not forget to run the [`finish`](ToWriteSolutionsWriter::finish()) method to properly write the last bytes of the file.
+ /// + ///
+ /// + /// This writer does unbuffered writes. You might want to use [`BufWriter`](io::BufWriter) to avoid that.
+ /// + /// Example in XML (the API is the same for JSON, CSV and TSV): + /// ``` + /// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; + /// use oxrdf::{LiteralRef, Variable, VariableRef}; + /// use std::iter::once; + /// + /// let xml_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Xml); + /// let mut buffer = Vec::new(); + /// let mut writer = xml_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?; + /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; + /// writer.finish()?; + /// assert_eq!(buffer, br#"test"#); + /// # std::io::Result::Ok(()) + /// ``` + pub fn serialize_solutions_to_write( + &self, + write: W, + variables: Vec, + ) -> io::Result> { + Ok(ToWriteSolutionsWriter { + formatter: match self.format { + QueryResultsFormat::Xml => ToWriteSolutionsWriterKind::Xml( + ToWriteXmlSolutionsWriter::start(write, &variables)?, + ), + QueryResultsFormat::Json => ToWriteSolutionsWriterKind::Json( + ToWriteJsonSolutionsWriter::start(write, &variables)?, + ), + QueryResultsFormat::Csv => ToWriteSolutionsWriterKind::Csv( + ToWriteCsvSolutionsWriter::start(write, variables)?, + ), + QueryResultsFormat::Tsv => ToWriteSolutionsWriterKind::Tsv( + ToWriteTsvSolutionsWriter::start(write, variables)?, + ), + }, + }) + } + + /// Returns a `SolutionsWriter` allowing writing query solutions into the given [`Write`] implementation. + /// + ///
+ /// + /// Do not forget to run the [`finish`](ToWriteSolutionsWriter::finish()) method to properly write the last bytes of the file.
+ /// + ///
+ /// + /// This writer does unbuffered writes. You might want to use [`BufWriter`](io::BufWriter) to avoid that.
+ /// + /// Example in XML (the API is the same for JSON, CSV and TSV): + /// ``` + /// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; + /// use oxrdf::{LiteralRef, Variable, VariableRef}; + /// use std::iter::once; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> std::io::Result<()> { + /// let json_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Json); + /// let mut buffer = Vec::new(); + /// let mut writer = json_serializer.serialize_solutions_to_tokio_async_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]).await?; + /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?; + /// writer.finish().await?; + /// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#); + /// # Ok(()) + /// # } + /// ``` + #[cfg(feature = "async-tokio")] + pub async fn serialize_solutions_to_tokio_async_write( + &self, + write: W, + variables: Vec, + ) -> io::Result> { + Ok(ToTokioAsyncWriteSolutionsWriter { + formatter: match self.format { + QueryResultsFormat::Xml => ToTokioAsyncWriteSolutionsWriterKind::Xml( + ToTokioAsyncWriteXmlSolutionsWriter::start(write, &variables).await?, + ), + QueryResultsFormat::Json => ToTokioAsyncWriteSolutionsWriterKind::Json( + ToTokioAsyncWriteJsonSolutionsWriter::start(write, &variables).await?, + ), + QueryResultsFormat::Csv => ToTokioAsyncWriteSolutionsWriterKind::Csv( + ToTokioAsyncWriteCsvSolutionsWriter::start(write, variables).await?, + ), + QueryResultsFormat::Tsv => ToTokioAsyncWriteSolutionsWriterKind::Tsv( + ToTokioAsyncWriteTsvSolutionsWriter::start(write, variables).await?, + ), + }, + }) + } + + #[deprecated(note = "use serialize_solutions_to_write", since = "0.4.0")] + pub fn solutions_writer( + &self, + writer: W, + variables: Vec, + ) -> io::Result> { + self.serialize_solutions_to_write(writer, variables) + } +} + +impl From for QueryResultsSerializer { + fn from(format: QueryResultsFormat) -> Self { + Self::from_format(format) + } +} + +/// Allows writing query results into a [`Write`] implementation. +/// +/// Could be built using a [`QueryResultsSerializer`]. +/// +///
+/// +/// Do not forget to run the [`finish`](ToWriteSolutionsWriter::finish()) method to properly write the last bytes of the file.
+/// +///
+/// +/// This writer does unbuffered writes. You might want to use [`BufWriter`](io::BufWriter) to avoid that.
+/// +/// Example in TSV (the API is the same for JSON, XML and CSV): +/// ``` +/// use oxrdf::{LiteralRef, Variable, VariableRef}; +/// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; +/// use std::iter::once; +/// +/// let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv); +/// let mut buffer = Vec::new(); +/// let mut writer = tsv_serializer.serialize_solutions_to_write( +/// &mut buffer, +/// vec![ +/// Variable::new_unchecked("foo"), +/// Variable::new_unchecked("bar"), +/// ], +/// )?; +/// writer.write(once(( +/// VariableRef::new_unchecked("foo"), +/// LiteralRef::from("test"), +/// )))?; +/// writer.finish()?; +/// assert_eq!(buffer, b"?foo\t?bar\n\"test\"\t\n"); +/// # std::io::Result::Ok(()) +/// ``` +#[must_use] +pub struct ToWriteSolutionsWriter { + formatter: ToWriteSolutionsWriterKind, +} + +enum ToWriteSolutionsWriterKind { + Xml(ToWriteXmlSolutionsWriter), + Json(ToWriteJsonSolutionsWriter), + Csv(ToWriteCsvSolutionsWriter), + Tsv(ToWriteTsvSolutionsWriter), +} + +impl ToWriteSolutionsWriter { + /// Writes a solution. + /// + /// Example in JSON (the API is the same for XML, CSV and TSV): + /// ``` + /// use sparesults::{QueryResultsFormat, QueryResultsSerializer, QuerySolution}; + /// use oxrdf::{Literal, LiteralRef, Variable, VariableRef}; + /// use std::iter::once; + /// + /// let json_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Json); + /// let mut buffer = Vec::new(); + /// let mut writer = json_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?; + /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; + /// writer.write(&QuerySolution::from((vec![Variable::new_unchecked("bar")], vec![Some(Literal::from("test").into())])))?; + /// writer.finish()?; + /// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}},{"bar":{"type":"literal","value":"test"}}]}}"#); + /// # std::io::Result::Ok(()) + /// ``` + pub fn write<'a>( + &mut self, + solution: impl IntoIterator>, impl Into>)>, + ) -> io::Result<()> { + let solution = solution.into_iter().map(|(v, s)| (v.into(), s.into())); + match &mut self.formatter { + ToWriteSolutionsWriterKind::Xml(writer) => writer.write(solution), + ToWriteSolutionsWriterKind::Json(writer) => writer.write(solution), + ToWriteSolutionsWriterKind::Csv(writer) => writer.write(solution), + ToWriteSolutionsWriterKind::Tsv(writer) => writer.write(solution), + } + } + + /// Writes the last bytes of the file. + pub fn finish(self) -> io::Result { + match self.formatter { + ToWriteSolutionsWriterKind::Xml(write) => write.finish(), + ToWriteSolutionsWriterKind::Json(write) => write.finish(), + ToWriteSolutionsWriterKind::Csv(write) => Ok(write.finish()), + ToWriteSolutionsWriterKind::Tsv(write) => Ok(write.finish()), + } + } +} + +/// Allows writing query results into an [`AsyncWrite`] implementation. + +/// Could be built using a [`QueryResultsSerializer`]. +/// +///
+/// +/// Do not forget to run the [`finish`](ToTokioAsyncWriteSolutionsWriter::finish()) method to properly write the last bytes of the file.
+/// +///
+/// +/// This writer does unbuffered writes. You might want to use [`BufWriter`](tokio::io::BufWriter) to avoid that.
+/// +/// Example in TSV (the API is the same for JSON, CSV and XML): +/// ``` +/// use oxrdf::{LiteralRef, Variable, VariableRef}; +/// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; +/// use std::iter::once; +/// +/// # #[tokio::main(flavor = "current_thread")] +/// # async fn main() -> std::io::Result<()> { +/// let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv); +/// let mut buffer = Vec::new(); +/// let mut writer = tsv_serializer +/// .serialize_solutions_to_tokio_async_write( +/// &mut buffer, +/// vec![ +/// Variable::new_unchecked("foo"), +/// Variable::new_unchecked("bar"), +/// ], +/// ) +/// .await?; +/// writer +/// .write(once(( +/// VariableRef::new_unchecked("foo"), +/// LiteralRef::from("test"), +/// ))) +/// .await?; +/// writer.finish().await?; +/// assert_eq!(buffer, b"?foo\t?bar\n\"test\"\t\n"); +/// # Ok(()) +/// # } +/// ``` +#[cfg(feature = "async-tokio")] +#[must_use] +pub struct ToTokioAsyncWriteSolutionsWriter { + formatter: ToTokioAsyncWriteSolutionsWriterKind, +} + +#[cfg(feature = "async-tokio")] +enum ToTokioAsyncWriteSolutionsWriterKind { + Xml(ToTokioAsyncWriteXmlSolutionsWriter), + Json(ToTokioAsyncWriteJsonSolutionsWriter), + Csv(ToTokioAsyncWriteCsvSolutionsWriter), + Tsv(ToTokioAsyncWriteTsvSolutionsWriter), +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteSolutionsWriter { + /// Writes a solution. + /// + /// Example in JSON (the API is the same for XML, CSV and TSV): + /// ``` + /// use sparesults::{QueryResultsFormat, QueryResultsSerializer, QuerySolution}; + /// use oxrdf::{Literal, LiteralRef, Variable, VariableRef}; + /// use std::iter::once; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> std::io::Result<()> { + /// let json_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Json); + /// let mut buffer = Vec::new(); + /// let mut writer = json_serializer.serialize_solutions_to_tokio_async_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]).await?; + /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?; + /// writer.write(&QuerySolution::from((vec![Variable::new_unchecked("bar")], vec![Some(Literal::from("test").into())]))).await?; + /// writer.finish().await?; + /// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}},{"bar":{"type":"literal","value":"test"}}]}}"#); + /// # Ok(()) + /// # } + /// ``` + pub async fn write<'a>( + &mut self, + solution: impl IntoIterator>, impl Into>)>, + ) -> io::Result<()> { + let solution = solution.into_iter().map(|(v, s)| (v.into(), s.into())); + match &mut self.formatter { + ToTokioAsyncWriteSolutionsWriterKind::Xml(writer) => writer.write(solution).await, + ToTokioAsyncWriteSolutionsWriterKind::Json(writer) => writer.write(solution).await, + ToTokioAsyncWriteSolutionsWriterKind::Csv(writer) => writer.write(solution).await, + ToTokioAsyncWriteSolutionsWriterKind::Tsv(writer) => writer.write(solution).await, + } + } + + /// Writes the last bytes of the file. + pub async fn finish(self) -> io::Result { + match self.formatter { + ToTokioAsyncWriteSolutionsWriterKind::Xml(write) => write.finish().await, + ToTokioAsyncWriteSolutionsWriterKind::Json(write) => write.finish().await, + ToTokioAsyncWriteSolutionsWriterKind::Csv(write) => Ok(write.finish()), + ToTokioAsyncWriteSolutionsWriterKind::Tsv(write) => Ok(write.finish()), + } + } +} diff --git a/ng-oxigraph/src/sparesults/solution.rs b/ng-oxigraph/src/sparesults/solution.rs new file mode 100644 index 0000000..1f18589 --- /dev/null +++ b/ng-oxigraph/src/sparesults/solution.rs @@ -0,0 +1,340 @@ +//! Definition of [`QuerySolution`] structure and associated utility constructions. + +use crate::oxrdf::{Term, Variable, VariableRef}; +use std::fmt; +use std::iter::Zip; +use std::ops::Index; +use std::sync::Arc; + +/// Tuple associating variables and terms that are the result of a SPARQL query. +/// +/// It is the equivalent of a row in SQL. +/// +/// ``` +/// use sparesults::QuerySolution; +/// use oxrdf::{Variable, Literal}; +/// +/// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None])); +/// assert_eq!(solution.get("foo"), Some(&Literal::from(1).into())); // Get the value of the variable ?foo if it exists (here yes). +/// assert_eq!(solution.get(1), None); // Get the value of the second column if it exists (here no). +/// ``` +pub struct QuerySolution { + variables: Arc<[Variable]>, + values: Vec>, +} + +impl QuerySolution { + /// Returns a value for a given position in the tuple ([`usize`](std::usize)) or a given variable name ([`&str`](std::str), [`Variable`] or [`VariableRef`]). + /// + /// ``` + /// use sparesults::QuerySolution; + /// use oxrdf::{Variable, Literal}; + /// + /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None])); + /// assert_eq!(solution.get("foo"), Some(&Literal::from(1).into())); // Get the value of the variable ?foo if it exists (here yes). + /// assert_eq!(solution.get(1), None); // Get the value of the second column if it exists (here no). + /// ``` + #[inline] + pub fn get(&self, index: impl VariableSolutionIndex) -> Option<&Term> { + self.values.get(index.index(self)?).and_then(Option::as_ref) + } + + /// The number of variables which could be bound. + /// + /// It is also the number of columns in the solutions table. + /// + /// ``` + /// use oxrdf::{Literal, Variable}; + /// use sparesults::QuerySolution; + /// + /// let solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![Some(Literal::from(1).into()), None], + /// )); + /// assert_eq!(solution.len(), 2); + /// ``` + #[inline] + pub fn len(&self) -> usize { + self.values.len() + } + + /// Is there any variable bound in the table? + /// + /// ``` + /// use oxrdf::{Literal, Variable}; + /// use sparesults::QuerySolution; + /// + /// let solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![Some(Literal::from(1).into()), None], + /// )); + /// assert!(!solution.is_empty()); + /// + /// let empty_solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![None, None], + /// )); + /// assert!(empty_solution.is_empty()); + /// ``` + #[inline] + pub fn is_empty(&self) -> bool { + self.values.iter().all(Option::is_none) + } + + /// Returns an iterator over bound variables. + /// + /// ``` + /// use oxrdf::{Literal, Variable}; + /// use sparesults::QuerySolution; + /// + /// let solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![Some(Literal::from(1).into()), None], + /// )); + /// assert_eq!( + /// solution.iter().collect::>(), + /// vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())] + /// ); + /// ``` + #[inline] + pub fn iter(&self) -> impl Iterator { + self.into_iter() + } + + /// Returns the ordered slice of variable values. + /// + /// ``` + /// use oxrdf::{Literal, Variable}; + /// use sparesults::QuerySolution; + /// + /// let solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![Some(Literal::from(1).into()), None], + /// )); + /// assert_eq!(solution.values(), &[Some(Literal::from(1).into()), None]); + /// ``` + #[inline] + pub fn values(&self) -> &[Option] { + &self.values + } + + /// Returns the ordered slice of the solution variables, bound or not. + /// + /// ``` + /// use oxrdf::{Literal, Variable}; + /// use sparesults::QuerySolution; + /// + /// let solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![Some(Literal::from(1).into()), None], + /// )); + /// assert_eq!( + /// solution.variables(), + /// &[ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar") + /// ] + /// ); + /// ``` + #[inline] + pub fn variables(&self) -> &[Variable] { + &self.variables + } +} + +impl>, S: Into>>> From<(V, S)> for QuerySolution { + #[inline] + fn from((v, s): (V, S)) -> Self { + Self { + variables: v.into(), + values: s.into(), + } + } +} + +impl<'a> IntoIterator for &'a QuerySolution { + type Item = (&'a Variable, &'a Term); + type IntoIter = Iter<'a>; + + #[inline] + fn into_iter(self) -> Self::IntoIter { + Iter { + inner: self.variables.iter().zip(&self.values), + } + } +} + +impl Index for QuerySolution { + type Output = Term; + + #[allow(clippy::panic)] + #[inline] + fn index(&self, index: usize) -> &Self::Output { + self.get(index) + .unwrap_or_else(|| panic!("The column {index} is not set in this solution")) + } +} + +impl Index<&str> for QuerySolution { + type Output = Term; + + #[allow(clippy::panic)] + #[inline] + fn index(&self, index: &str) -> &Self::Output { + self.get(index) + .unwrap_or_else(|| panic!("The variable ?{index} is not set in this solution")) + } +} + +impl Index> for QuerySolution { + type Output = Term; + + #[allow(clippy::panic)] + #[inline] + fn index(&self, index: VariableRef<'_>) -> &Self::Output { + self.get(index) + .unwrap_or_else(|| panic!("The variable {index} is not set in this solution")) + } +} +impl Index for QuerySolution { + type Output = Term; + + #[inline] + fn index(&self, index: Variable) -> &Self::Output { + self.index(index.as_ref()) + } +} + +impl Index<&Variable> for QuerySolution { + type Output = Term; + + #[inline] + fn index(&self, index: &Variable) -> &Self::Output { + self.index(index.as_ref()) + } +} + +impl PartialEq for QuerySolution { + fn eq(&self, other: &Self) -> bool { + for (k, v) in self.iter() { + if other.get(k) != Some(v) { + return false; + } + } + for (k, v) in other.iter() { + if self.get(k) != Some(v) { + return false; + } + } + true + } +} + +impl Eq for QuerySolution {} + +impl fmt::Debug for QuerySolution { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_map().entries(self.iter()).finish() + } +} + +/// An iterator over [`QuerySolution`] bound variables. +/// +/// ``` +/// use oxrdf::{Literal, Variable}; +/// use sparesults::QuerySolution; +/// +/// let solution = QuerySolution::from(( +/// vec![ +/// Variable::new_unchecked("foo"), +/// Variable::new_unchecked("bar"), +/// ], +/// vec![Some(Literal::from(1).into()), None], +/// )); +/// assert_eq!( +/// solution.iter().collect::>(), +/// vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())] +/// ); +/// ``` +pub struct Iter<'a> { + inner: Zip, std::slice::Iter<'a, Option>>, +} + +impl<'a> Iterator for Iter<'a> { + type Item = (&'a Variable, &'a Term); + + #[inline] + fn next(&mut self) -> Option { + for (variable, value) in &mut self.inner { + if let Some(value) = value { + return Some((variable, value)); + } + } + None + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + (0, self.inner.size_hint().1) + } +} + +/// A utility trait to get values for a given variable or tuple position. +/// +/// See [`QuerySolution::get`]. +pub trait VariableSolutionIndex { + fn index(self, solution: &QuerySolution) -> Option; +} + +impl VariableSolutionIndex for usize { + #[inline] + fn index(self, _: &QuerySolution) -> Option { + Some(self) + } +} + +impl VariableSolutionIndex for &str { + #[inline] + fn index(self, solution: &QuerySolution) -> Option { + solution.variables.iter().position(|v| v.as_str() == self) + } +} + +impl VariableSolutionIndex for VariableRef<'_> { + #[inline] + fn index(self, solution: &QuerySolution) -> Option { + solution.variables.iter().position(|v| *v == self) + } +} + +impl VariableSolutionIndex for &Variable { + #[inline] + fn index(self, solution: &QuerySolution) -> Option { + self.as_ref().index(solution) + } +} + +impl VariableSolutionIndex for Variable { + #[inline] + fn index(self, solution: &QuerySolution) -> Option { + self.as_ref().index(solution) + } +} diff --git a/ng-oxigraph/src/sparesults/xml.rs b/ng-oxigraph/src/sparesults/xml.rs new file mode 100644 index 0000000..0c90f4f --- /dev/null +++ b/ng-oxigraph/src/sparesults/xml.rs @@ -0,0 +1,833 @@ +//! Implementation of [SPARQL Query Results XML Format](https://www.w3.org/TR/rdf-sparql-XMLres/) + +use crate::oxrdf::vocab::rdf; +use crate::oxrdf::*; +use crate::sparesults::error::{QueryResultsParseError, QueryResultsSyntaxError}; +use quick_xml::escape::unescape; +use quick_xml::events::{BytesDecl, BytesEnd, BytesStart, BytesText, Event}; +use quick_xml::{Decoder, Error, Reader, Writer}; +use std::collections::BTreeMap; +use std::io::{self, BufReader, Read, Write}; +use std::mem::take; +use std::sync::Arc; +#[cfg(feature = "async-tokio")] +use tokio::io::{AsyncRead, AsyncWrite, BufReader as AsyncBufReader}; + +pub fn write_boolean_xml_result(write: W, value: bool) -> io::Result { + let mut writer = Writer::new(write); + for event in inner_write_boolean_xml_result(value) { + writer.write_event(event).map_err(map_xml_error)?; + } + Ok(writer.into_inner()) +} + +#[cfg(feature = "async-tokio")] +pub async fn tokio_async_write_boolean_xml_result( + write: W, + value: bool, +) -> io::Result { + let mut writer = Writer::new(write); + for event in inner_write_boolean_xml_result(value) { + writer + .write_event_async(event) + .await + .map_err(map_xml_error)?; + } + Ok(writer.into_inner()) +} + +fn inner_write_boolean_xml_result(value: bool) -> [Event<'static>; 8] { + [ + Event::Decl(BytesDecl::new("1.0", None, None)), + Event::Start( + BytesStart::new("sparql") + .with_attributes([("xmlns", "http://www.w3.org/2005/sparql-results#")]), + ), + Event::Start(BytesStart::new("head")), + Event::End(BytesEnd::new("head")), + Event::Start(BytesStart::new("boolean")), + Event::Text(BytesText::new(if value { "true" } else { "false" })), + Event::End(BytesEnd::new("boolean")), + Event::End(BytesEnd::new("sparql")), + ] +} + +pub struct ToWriteXmlSolutionsWriter { + inner: InnerXmlSolutionsWriter, + writer: Writer, +} + +impl ToWriteXmlSolutionsWriter { + pub fn start(write: W, variables: &[Variable]) -> io::Result { + let mut writer = Writer::new(write); + let mut buffer = Vec::with_capacity(48); + let inner = InnerXmlSolutionsWriter::start(&mut buffer, variables); + Self::do_write(&mut writer, buffer)?; + Ok(Self { inner, writer }) + } + + pub fn write<'a>( + &mut self, + solution: impl IntoIterator, TermRef<'a>)>, + ) -> io::Result<()> { + let mut buffer = Vec::with_capacity(48); + self.inner.write(&mut buffer, solution); + Self::do_write(&mut self.writer, buffer) + } + + pub fn finish(mut self) -> io::Result { + let mut buffer = Vec::with_capacity(4); + self.inner.finish(&mut buffer); + Self::do_write(&mut self.writer, buffer)?; + Ok(self.writer.into_inner()) + } + + fn do_write(writer: &mut Writer, output: Vec>) -> io::Result<()> { + for event in output { + writer.write_event(event).map_err(map_xml_error)?; + } + Ok(()) + } +} + +#[cfg(feature = "async-tokio")] +pub struct ToTokioAsyncWriteXmlSolutionsWriter { + inner: InnerXmlSolutionsWriter, + writer: Writer, +} + +#[cfg(feature = "async-tokio")] +impl ToTokioAsyncWriteXmlSolutionsWriter { + pub async fn start(write: W, variables: &[Variable]) -> io::Result { + let mut writer = Writer::new(write); + let mut buffer = Vec::with_capacity(48); + let inner = InnerXmlSolutionsWriter::start(&mut buffer, variables); + Self::do_write(&mut writer, buffer).await?; + Ok(Self { inner, writer }) + } + + pub async fn write<'a>( + &mut self, + solution: impl IntoIterator, TermRef<'a>)>, + ) -> io::Result<()> { + let mut buffer = Vec::with_capacity(48); + self.inner.write(&mut buffer, solution); + Self::do_write(&mut self.writer, buffer).await + } + + pub async fn finish(mut self) -> io::Result { + let mut buffer = Vec::with_capacity(4); + self.inner.finish(&mut buffer); + Self::do_write(&mut self.writer, buffer).await?; + Ok(self.writer.into_inner()) + } + + async fn do_write(writer: &mut Writer, output: Vec>) -> io::Result<()> { + for event in output { + writer + .write_event_async(event) + .await + .map_err(map_xml_error)?; + } + Ok(()) + } +} + +struct InnerXmlSolutionsWriter; + +impl InnerXmlSolutionsWriter { + fn start<'a>(output: &mut Vec>, variables: &'a [Variable]) -> Self { + output.push(Event::Decl(BytesDecl::new("1.0", None, None))); + output.push(Event::Start(BytesStart::new("sparql").with_attributes([( + "xmlns", + "http://www.w3.org/2005/sparql-results#", + )]))); + output.push(Event::Start(BytesStart::new("head"))); + for variable in variables { + output.push(Event::Empty( + BytesStart::new("variable").with_attributes([("name", variable.as_str())]), + )); + } + output.push(Event::End(BytesEnd::new("head"))); + output.push(Event::Start(BytesStart::new("results"))); + Self {} + } + + #[allow(clippy::unused_self)] + fn write<'a>( + &self, + output: &mut Vec>, + solution: impl IntoIterator, TermRef<'a>)>, + ) { + output.push(Event::Start(BytesStart::new("result"))); + for (variable, value) in solution { + output.push(Event::Start( + BytesStart::new("binding").with_attributes([("name", variable.as_str())]), + )); + write_xml_term(output, value); + output.push(Event::End(BytesEnd::new("binding"))); + } + output.push(Event::End(BytesEnd::new("result"))); + } + + #[allow(clippy::unused_self)] + fn finish(self, output: &mut Vec>) { + output.push(Event::End(BytesEnd::new("results"))); + output.push(Event::End(BytesEnd::new("sparql"))); + } +} + +fn write_xml_term<'a>(output: &mut Vec>, term: TermRef<'a>) { + match term { + TermRef::NamedNode(uri) => { + output.push(Event::Start(BytesStart::new("uri"))); + output.push(Event::Text(BytesText::new(uri.as_str()))); + output.push(Event::End(BytesEnd::new("uri"))); + } + TermRef::BlankNode(bnode) => { + output.push(Event::Start(BytesStart::new("bnode"))); + output.push(Event::Text(BytesText::new(bnode.as_str()))); + output.push(Event::End(BytesEnd::new("bnode"))); + } + TermRef::Literal(literal) => { + let mut start = BytesStart::new("literal"); + if let Some(language) = literal.language() { + start.push_attribute(("xml:lang", language)); + } else if !literal.is_plain() { + start.push_attribute(("datatype", literal.datatype().as_str())) + } + output.push(Event::Start(start)); + output.push(Event::Text(BytesText::new(literal.value()))); + output.push(Event::End(BytesEnd::new("literal"))); + } + #[cfg(feature = "rdf-star")] + TermRef::Triple(triple) => { + output.push(Event::Start(BytesStart::new("triple"))); + output.push(Event::Start(BytesStart::new("subject"))); + write_xml_term(output, triple.subject.as_ref().into()); + output.push(Event::End(BytesEnd::new("subject"))); + output.push(Event::Start(BytesStart::new("predicate"))); + write_xml_term(output, triple.predicate.as_ref().into()); + output.push(Event::End(BytesEnd::new("predicate"))); + output.push(Event::Start(BytesStart::new("object"))); + write_xml_term(output, triple.object.as_ref()); + output.push(Event::End(BytesEnd::new("object"))); + output.push(Event::End(BytesEnd::new("triple"))); + } + } +} + +pub enum FromReadXmlQueryResultsReader { + Solutions { + variables: Vec, + solutions: FromReadXmlSolutionsReader, + }, + Boolean(bool), +} + +impl FromReadXmlQueryResultsReader { + pub fn read(read: R) -> Result { + let mut reader = Reader::from_reader(BufReader::new(read)); + reader.trim_text(true); + reader.expand_empty_elements(true); + let mut reader_buffer = Vec::new(); + let mut inner = XmlInnerQueryResultsReader { + state: ResultsState::Start, + variables: Vec::new(), + decoder: reader.decoder(), + }; + loop { + reader_buffer.clear(); + let event = reader.read_event_into(&mut reader_buffer)?; + if let Some(result) = inner.read_event(event)? { + return Ok(match result { + XmlInnerQueryResults::Solutions { + variables, + solutions, + } => Self::Solutions { + variables, + solutions: FromReadXmlSolutionsReader { + reader, + inner: solutions, + reader_buffer, + }, + }, + XmlInnerQueryResults::Boolean(value) => Self::Boolean(value), + }); + } + } + } +} + +pub struct FromReadXmlSolutionsReader { + reader: Reader>, + inner: XmlInnerSolutionsReader, + reader_buffer: Vec, +} + +impl FromReadXmlSolutionsReader { + pub fn read_next(&mut self) -> Result>>, QueryResultsParseError> { + loop { + self.reader_buffer.clear(); + let event = self.reader.read_event_into(&mut self.reader_buffer)?; + if event == Event::Eof { + return Ok(None); + } + if let Some(solution) = self.inner.read_event(event)? { + return Ok(Some(solution)); + } + } + } +} + +#[cfg(feature = "async-tokio")] +pub enum FromTokioAsyncReadXmlQueryResultsReader { + Solutions { + variables: Vec, + solutions: FromTokioAsyncReadXmlSolutionsReader, + }, + Boolean(bool), +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadXmlQueryResultsReader { + pub async fn read(read: R) -> Result { + let mut reader = Reader::from_reader(AsyncBufReader::new(read)); + reader.trim_text(true); + reader.expand_empty_elements(true); + let mut reader_buffer = Vec::new(); + let mut inner = XmlInnerQueryResultsReader { + state: ResultsState::Start, + variables: Vec::new(), + decoder: reader.decoder(), + }; + loop { + reader_buffer.clear(); + let event = reader.read_event_into_async(&mut reader_buffer).await?; + if let Some(result) = inner.read_event(event)? { + return Ok(match result { + XmlInnerQueryResults::Solutions { + variables, + solutions, + } => Self::Solutions { + variables, + solutions: FromTokioAsyncReadXmlSolutionsReader { + reader, + inner: solutions, + reader_buffer, + }, + }, + XmlInnerQueryResults::Boolean(value) => Self::Boolean(value), + }); + } + } + } +} + +#[cfg(feature = "async-tokio")] +pub struct FromTokioAsyncReadXmlSolutionsReader { + reader: Reader>, + inner: XmlInnerSolutionsReader, + reader_buffer: Vec, +} + +#[cfg(feature = "async-tokio")] +impl FromTokioAsyncReadXmlSolutionsReader { + pub async fn read_next(&mut self) -> Result>>, QueryResultsParseError> { + loop { + self.reader_buffer.clear(); + let event = self + .reader + .read_event_into_async(&mut self.reader_buffer) + .await?; + if event == Event::Eof { + return Ok(None); + } + if let Some(solution) = self.inner.read_event(event)? { + return Ok(Some(solution)); + } + } + } +} + +enum XmlInnerQueryResults { + Solutions { + variables: Vec, + solutions: XmlInnerSolutionsReader, + }, + Boolean(bool), +} + +#[derive(Clone, Copy)] +enum ResultsState { + Start, + Sparql, + Head, + AfterHead, + Boolean, +} + +struct XmlInnerQueryResultsReader { + state: ResultsState, + variables: Vec, + decoder: Decoder, +} + +impl XmlInnerQueryResultsReader { + pub fn read_event( + &mut self, + event: Event<'_>, + ) -> Result, QueryResultsParseError> { + match event { + Event::Start(event) => match self.state { + ResultsState::Start => { + if event.local_name().as_ref() == b"sparql" { + self.state = ResultsState::Sparql; + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg(format!("Expecting tag, found <{}>", self.decoder.decode(event.name().as_ref())?)).into()) + } + } + ResultsState::Sparql => { + if event.local_name().as_ref() == b"head" { + self.state = ResultsState::Head; + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg(format!("Expecting tag, found <{}>", self.decoder.decode(event.name().as_ref())?)).into()) + } + } + ResultsState::Head => { + if event.local_name().as_ref() == b"variable" { + let name = event.attributes() + .filter_map(Result::ok) + .find(|attr| attr.key.local_name().as_ref() == b"name") + .ok_or_else(|| QueryResultsSyntaxError::msg("No name attribute found for the tag"))?; + let name = unescape(&self.decoder.decode(&name.value)?)?.into_owned(); + let variable = Variable::new(name).map_err(|e| QueryResultsSyntaxError::msg(format!("Invalid variable name: {e}")))?; + if self.variables.contains(&variable) { + return Err(QueryResultsSyntaxError::msg(format!( + "The variable {variable} is declared twice" + )) + .into()); + } + self.variables.push(variable); + Ok(None) + } else if event.local_name().as_ref() == b"link" { + // no op + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg(format!("Expecting or tag, found <{}>", self.decoder.decode(event.name().as_ref())?)).into()) + } + } + ResultsState::AfterHead => { + if event.local_name().as_ref() == b"boolean" { + self.state = ResultsState::Boolean; + Ok(None) + } else if event.local_name().as_ref() == b"results" { + let mut mapping = BTreeMap::default(); + for (i, var) in self.variables.iter().enumerate() { + mapping.insert(var.clone().into_string(), i); + } + Ok(Some(XmlInnerQueryResults::Solutions { + variables: take(&mut self.variables), + solutions: XmlInnerSolutionsReader { + decoder: self.decoder, + mapping, + state_stack: vec![State::Start, State::Start], + new_bindings: Vec::new(), + current_var: None, + term: None, + lang: None, + datatype: None, + subject_stack: Vec::new(), + predicate_stack: Vec::new(), + object_stack: Vec::new(), + }, + })) + } else if event.local_name().as_ref() != b"link" && event.local_name().as_ref() != b"results" && event.local_name().as_ref() != b"boolean" { + Err(QueryResultsSyntaxError::msg(format!("Expecting sparql tag, found <{}>", self.decoder.decode(event.name().as_ref())?)).into()) + } else { + Ok(None) + } + } + ResultsState::Boolean => Err(QueryResultsSyntaxError::msg(format!("Unexpected tag inside of tag: <{}>", self.decoder.decode(event.name().as_ref())?)).into()) + }, + Event::Text(event) => { + let value = event.unescape()?; + match self.state { + ResultsState::Boolean => { + if value == "true" { + Ok(Some(XmlInnerQueryResults::Boolean(true))) + } else if value == "false" { + Ok(Some(XmlInnerQueryResults::Boolean(false))) + } else { + Err(QueryResultsSyntaxError::msg(format!("Unexpected boolean value. Found '{value}'")).into()) + } + } + _ => Err(QueryResultsSyntaxError::msg(format!("Unexpected textual value found: '{value}'")).into()) + } + } + Event::End(event) => { + if let ResultsState::Head = self.state { + if event.local_name().as_ref() == b"head" { + self.state = ResultsState::AfterHead + } + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg("Unexpected early file end. All results file should have a and a or tag").into()) + } + } + Event::Eof => Err(QueryResultsSyntaxError::msg("Unexpected early file end. All results file should have a and a or tag").into()), + Event::Comment(_) | Event::Decl(_) | Event::PI(_) | Event::DocType(_) => { + Ok(None) + } + Event::Empty(_) => unreachable!("Empty events are expended"), + Event::CData(_) => { + Err(QueryResultsSyntaxError::msg( + " are not supported in SPARQL XML results", + ) + .into()) + } + } + } +} + +enum State { + Start, + Result, + Binding, + Uri, + BNode, + Literal, + Triple, + Subject, + Predicate, + Object, +} + +struct XmlInnerSolutionsReader { + decoder: Decoder, + mapping: BTreeMap, + state_stack: Vec, + new_bindings: Vec>, + current_var: Option, + term: Option, + lang: Option, + datatype: Option, + subject_stack: Vec, + predicate_stack: Vec, + object_stack: Vec, +} + +impl XmlInnerSolutionsReader { + #[allow(clippy::unwrap_in_result)] + pub fn read_event( + &mut self, + event: Event<'_>, + ) -> Result>>, QueryResultsParseError> { + match event { + Event::Start(event) => match self.state_stack.last().unwrap() { + State::Start => { + if event.local_name().as_ref() == b"result" { + self.new_bindings = vec![None; self.mapping.len()]; + self.state_stack.push(State::Result); + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg(format!( + "Expecting , found <{}>", + self.decoder.decode(event.name().as_ref())? + )) + .into()) + } + } + State::Result => { + if event.local_name().as_ref() == b"binding" { + let Some(attr) = event + .attributes() + .filter_map(Result::ok) + .find(|attr| attr.key.local_name().as_ref() == b"name") + else { + return Err(QueryResultsSyntaxError::msg( + "No name attribute found for the tag", + ) + .into()); + }; + self.current_var = + Some(unescape(&self.decoder.decode(&attr.value)?)?.into_owned()); + self.state_stack.push(State::Binding); + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg(format!( + "Expecting , found <{}>", + self.decoder.decode(event.name().as_ref())? + )) + .into()) + } + } + State::Binding | State::Subject | State::Predicate | State::Object => { + if self.term.is_some() { + return Err(QueryResultsSyntaxError::msg( + "There is already a value for the current binding", + ) + .into()); + } + if event.local_name().as_ref() == b"uri" { + self.state_stack.push(State::Uri); + Ok(None) + } else if event.local_name().as_ref() == b"bnode" { + self.state_stack.push(State::BNode); + Ok(None) + } else if event.local_name().as_ref() == b"literal" { + for attr in event.attributes() { + let attr = attr.map_err(Error::from)?; + if attr.key.as_ref() == b"xml:lang" { + self.lang = Some( + unescape(&self.decoder.decode(&attr.value)?)?.into_owned(), + ); + } else if attr.key.local_name().as_ref() == b"datatype" { + let iri = self.decoder.decode(&attr.value)?; + let iri = unescape(&iri)?; + self.datatype = + Some(NamedNode::new(iri.as_ref()).map_err(|e| { + QueryResultsSyntaxError::msg(format!( + "Invalid datatype IRI '{iri}': {e}" + )) + })?); + } + } + self.state_stack.push(State::Literal); + Ok(None) + } else if event.local_name().as_ref() == b"triple" { + self.state_stack.push(State::Triple); + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg(format!( + "Expecting , or found <{}>", + self.decoder.decode(event.name().as_ref())? + )) + .into()) + } + } + State::Triple => { + if event.local_name().as_ref() == b"subject" { + self.state_stack.push(State::Subject); + Ok(None) + } else if event.local_name().as_ref() == b"predicate" { + self.state_stack.push(State::Predicate); + Ok(None) + } else if event.local_name().as_ref() == b"object" { + self.state_stack.push(State::Object); + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg(format!( + "Expecting , or found <{}>", + self.decoder.decode(event.name().as_ref())? + )) + .into()) + } + } + State::Uri => Err(QueryResultsSyntaxError::msg(format!( + " must only contain a string, found <{}>", + self.decoder.decode(event.name().as_ref())? + )) + .into()), + State::BNode => Err(QueryResultsSyntaxError::msg(format!( + " must only contain a string, found <{}>", + self.decoder.decode(event.name().as_ref())? + )) + .into()), + State::Literal => Err(QueryResultsSyntaxError::msg(format!( + " must only contain a string, found <{}>", + self.decoder.decode(event.name().as_ref())? + )) + .into()), + }, + Event::Text(event) => { + let data = event.unescape()?; + match self.state_stack.last().unwrap() { + State::Uri => { + self.term = Some( + NamedNode::new(data.to_string()) + .map_err(|e| { + QueryResultsSyntaxError::msg(format!( + "Invalid IRI value '{data}': {e}" + )) + })? + .into(), + ); + Ok(None) + } + State::BNode => { + self.term = Some( + BlankNode::new(data.to_string()) + .map_err(|e| { + QueryResultsSyntaxError::msg(format!( + "Invalid blank node value '{data}': {e}" + )) + })? + .into(), + ); + Ok(None) + } + State::Literal => { + self.term = Some( + build_literal(data, self.lang.take(), self.datatype.take())?.into(), + ); + Ok(None) + } + _ => Err(QueryResultsSyntaxError::msg(format!( + "Unexpected textual value found: {data}" + )) + .into()), + } + } + Event::End(_) => match self.state_stack.pop().unwrap() { + State::Start | State::Uri => Ok(None), + State::Result => Ok(Some(take(&mut self.new_bindings))), + State::Binding => { + if let Some(var) = &self.current_var { + if let Some(var) = self.mapping.get(var) { + self.new_bindings[*var] = self.term.take() + } else { + return Err( + QueryResultsSyntaxError::msg(format!("The variable '{var}' is used in a binding but not declared in the variables list")).into() + ); + } + } else { + return Err(QueryResultsSyntaxError::msg( + "No name found for tag", + ) + .into()); + } + Ok(None) + } + State::Subject => { + if let Some(subject) = self.term.take() { + self.subject_stack.push(subject) + } + Ok(None) + } + State::Predicate => { + if let Some(predicate) = self.term.take() { + self.predicate_stack.push(predicate) + } + Ok(None) + } + State::Object => { + if let Some(object) = self.term.take() { + self.object_stack.push(object) + } + Ok(None) + } + State::BNode => { + if self.term.is_none() { + // We default to a random bnode + self.term = Some(BlankNode::default().into()) + } + Ok(None) + } + State::Literal => { + if self.term.is_none() { + // We default to the empty literal + self.term = + Some(build_literal("", self.lang.take(), self.datatype.take())?.into()) + } + Ok(None) + } + State::Triple => { + #[cfg(feature = "rdf-star")] + if let (Some(subject), Some(predicate), Some(object)) = ( + self.subject_stack.pop(), + self.predicate_stack.pop(), + self.object_stack.pop(), + ) { + self.term = Some( + Triple::new( + match subject { + Term::NamedNode(subject) => subject.into(), + Term::BlankNode(subject) => subject.into(), + Term::Triple(subject) => Subject::Triple(subject), + Term::Literal(_) => { + return Err(QueryResultsSyntaxError::msg( + "The value should not be a ", + ) + .into()); + } + }, + match predicate { + Term::NamedNode(predicate) => predicate, + _ => { + return Err(QueryResultsSyntaxError::msg( + "The value should be an ", + ) + .into()); + } + }, + object, + ) + .into(), + ); + Ok(None) + } else { + Err(QueryResultsSyntaxError::msg( + "A should contain a , a and an ", + ) + .into()) + } + #[cfg(not(feature = "rdf-star"))] + { + Err(QueryResultsSyntaxError::msg( + "The tag is only supported with RDF-star", + ) + .into()) + } + } + }, + Event::Eof | Event::Comment(_) | Event::Decl(_) | Event::PI(_) | Event::DocType(_) => { + Ok(None) + } + Event::Empty(_) => unreachable!("Empty events are expended"), + Event::CData(_) => Err(QueryResultsSyntaxError::msg( + " are not supported in SPARQL XML results", + ) + .into()), + } + } +} + +fn build_literal( + value: impl Into, + lang: Option, + datatype: Option, +) -> Result { + match lang { + Some(lang) => { + if let Some(datatype) = datatype { + if datatype.as_ref() != rdf::LANG_STRING { + return Err(QueryResultsSyntaxError::msg(format!( + "xml:lang value '{lang}' provided with the datatype {datatype}" + )) + .into()); + } + } + Literal::new_language_tagged_literal(value, &lang).map_err(|e| { + QueryResultsSyntaxError::msg(format!("Invalid xml:lang value '{lang}': {e}")).into() + }) + } + None => Ok(if let Some(datatype) = datatype { + Literal::new_typed_literal(value, datatype) + } else { + Literal::new_simple_literal(value) + }), + } +} + +fn map_xml_error(error: Error) -> io::Error { + match error { + Error::Io(error) => { + Arc::try_unwrap(error).unwrap_or_else(|error| io::Error::new(error.kind(), error)) + } + Error::UnexpectedEof(_) => io::Error::new(io::ErrorKind::UnexpectedEof, error), + _ => io::Error::new(io::ErrorKind::InvalidData, error), + } +} diff --git a/ng-oxigraph/src/spargebra/README.md b/ng-oxigraph/src/spargebra/README.md new file mode 100644 index 0000000..313d875 --- /dev/null +++ b/ng-oxigraph/src/spargebra/README.md @@ -0,0 +1,46 @@ +Spargebra +========= + +[![Latest Version](https://img.shields.io/crates/v/spargebra.svg)](https://crates.io/crates/spargebra) +[![Released API docs](https://docs.rs/spargebra/badge.svg)](https://docs.rs/spargebra) +[![Crates.io downloads](https://img.shields.io/crates/d/spargebra)](https://crates.io/crates/spargebra) +[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) +[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) + +Spargebra is a [SPARQL](https://www.w3.org/TR/sparql11-overview/) parser. + +It supports both [SPARQL 1.1 Query](https://www.w3.org/TR/sparql11-query/) and [SPARQL 1.1 Update](https://www.w3.org/TR/sparql11-update/). + +The emitted tree is based on [SPARQL 1.1 Query Algebra](https://www.w3.org/TR/sparql11-query/#sparqlQuery) objects. + +The API entry point for SPARQL queries is the [`Query`] struct and the API entry point for SPARQL updates is the [`Update`] struct. + +Support for [SPARQL-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#sparql-star) is also available behind the `rdf-star` feature. + +This crate is intended to be a building piece for SPARQL implementations in Rust like [Oxigraph](https://oxigraph.org). + +Usage example: + +```rust +use spargebra::Query; + +let query_str = "SELECT ?s ?p ?o WHERE { ?s ?p ?o . }"; +let query = Query::parse(query_str, None).unwrap(); +assert_eq!(query.to_string(), query_str); +``` + +## License + +This project is licensed under either of + +* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or + ``) +* MIT license ([LICENSE-MIT](../LICENSE-MIT) or + ``) + +at your option. + + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/ng-oxigraph/src/spargebra/algebra.rs b/ng-oxigraph/src/spargebra/algebra.rs new file mode 100644 index 0000000..7255a91 --- /dev/null +++ b/ng-oxigraph/src/spargebra/algebra.rs @@ -0,0 +1,1419 @@ +//! [SPARQL 1.1 Query Algebra](https://www.w3.org/TR/sparql11-query/#sparqlQuery) representation. + +use crate::oxrdf::LiteralRef; +use crate::spargebra::term::*; +use std::fmt; + +/// A [property path expression](https://www.w3.org/TR/sparql11-query/#defn_PropertyPathExpr). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum PropertyPathExpression { + NamedNode(NamedNode), + Reverse(Box), + Sequence(Box, Box), + Alternative(Box, Box), + ZeroOrMore(Box), + OneOrMore(Box), + ZeroOrOne(Box), + NegatedPropertySet(Vec), +} + +impl PropertyPathExpression { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + match self { + Self::NamedNode(p) => write!(f, "{p}"), + Self::Reverse(p) => { + f.write_str("(reverse ")?; + p.fmt_sse(f)?; + f.write_str(")") + } + Self::Alternative(a, b) => { + f.write_str("(alt ")?; + a.fmt_sse(f)?; + f.write_str(" ")?; + b.fmt_sse(f)?; + f.write_str(")") + } + Self::Sequence(a, b) => { + f.write_str("(seq ")?; + a.fmt_sse(f)?; + f.write_str(" ")?; + b.fmt_sse(f)?; + f.write_str(")") + } + Self::ZeroOrMore(p) => { + f.write_str("(path* ")?; + p.fmt_sse(f)?; + f.write_str(")") + } + Self::OneOrMore(p) => { + f.write_str("(path+ ")?; + p.fmt_sse(f)?; + f.write_str(")") + } + Self::ZeroOrOne(p) => { + f.write_str("(path? ")?; + p.fmt_sse(f)?; + f.write_str(")") + } + Self::NegatedPropertySet(p) => { + f.write_str("(notoneof")?; + for p in p { + write!(f, " {p}")?; + } + f.write_str(")") + } + } + } +} + +impl fmt::Display for PropertyPathExpression { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(p) => p.fmt(f), + Self::Reverse(p) => write!(f, "^({p})"), + Self::Sequence(a, b) => write!(f, "({a} / {b})"), + Self::Alternative(a, b) => write!(f, "({a} | {b})"), + Self::ZeroOrMore(p) => write!(f, "({p})*"), + Self::OneOrMore(p) => write!(f, "({p})+"), + Self::ZeroOrOne(p) => write!(f, "({p})?"), + Self::NegatedPropertySet(p) => { + f.write_str("!(")?; + for (i, c) in p.iter().enumerate() { + if i > 0 { + f.write_str(" | ")?; + } + write!(f, "{c}")?; + } + f.write_str(")") + } + } + } +} + +impl From for PropertyPathExpression { + fn from(p: NamedNode) -> Self { + Self::NamedNode(p) + } +} + +/// An [expression](https://www.w3.org/TR/sparql11-query/#expressions). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum Expression { + NamedNode(NamedNode), + Literal(Literal), + Variable(Variable), + /// [Logical-or](https://www.w3.org/TR/sparql11-query/#func-logical-or). + Or(Box, Box), + /// [Logical-and](https://www.w3.org/TR/sparql11-query/#func-logical-and). + And(Box, Box), + /// [RDFterm-equal](https://www.w3.org/TR/sparql11-query/#func-RDFterm-equal) and all the XSD equalities. + Equal(Box, Box), + /// [sameTerm](https://www.w3.org/TR/sparql11-query/#func-sameTerm). + SameTerm(Box, Box), + /// [op:numeric-greater-than](https://www.w3.org/TR/xpath-functions-31/#func-numeric-greater-than) and other XSD greater than operators. + Greater(Box, Box), + GreaterOrEqual(Box, Box), + /// [op:numeric-less-than](https://www.w3.org/TR/xpath-functions-31/#func-numeric-less-than) and other XSD greater than operators. + Less(Box, Box), + LessOrEqual(Box, Box), + /// [IN](https://www.w3.org/TR/sparql11-query/#func-in) + In(Box, Vec), + /// [op:numeric-add](https://www.w3.org/TR/xpath-functions-31/#func-numeric-add) and other XSD additions. + Add(Box, Box), + /// [op:numeric-subtract](https://www.w3.org/TR/xpath-functions-31/#func-numeric-subtract) and other XSD subtractions. + Subtract(Box, Box), + /// [op:numeric-multiply](https://www.w3.org/TR/xpath-functions-31/#func-numeric-multiply) and other XSD multiplications. + Multiply(Box, Box), + /// [op:numeric-divide](https://www.w3.org/TR/xpath-functions-31/#func-numeric-divide) and other XSD divides. + Divide(Box, Box), + /// [op:numeric-unary-plus](https://www.w3.org/TR/xpath-functions-31/#func-numeric-unary-plus) and other XSD unary plus. + UnaryPlus(Box), + /// [op:numeric-unary-minus](https://www.w3.org/TR/xpath-functions-31/#func-numeric-unary-minus) and other XSD unary minus. + UnaryMinus(Box), + /// [fn:not](https://www.w3.org/TR/xpath-functions-31/#func-not). + Not(Box), + /// [EXISTS](https://www.w3.org/TR/sparql11-query/#func-filter-exists). + Exists(Box), + /// [BOUND](https://www.w3.org/TR/sparql11-query/#func-bound). + Bound(Variable), + /// [IF](https://www.w3.org/TR/sparql11-query/#func-if). + If(Box, Box, Box), + /// [COALESCE](https://www.w3.org/TR/sparql11-query/#func-coalesce). + Coalesce(Vec), + /// A regular function call. + FunctionCall(Function, Vec), +} + +impl Expression { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + match self { + Self::NamedNode(node) => write!(f, "{node}"), + Self::Literal(l) => write!(f, "{l}"), + Self::Variable(var) => write!(f, "{var}"), + Self::Or(a, b) => fmt_sse_binary_expression(f, "||", a, b), + Self::And(a, b) => fmt_sse_binary_expression(f, "&&", a, b), + Self::Equal(a, b) => fmt_sse_binary_expression(f, "=", a, b), + Self::SameTerm(a, b) => fmt_sse_binary_expression(f, "sameTerm", a, b), + Self::Greater(a, b) => fmt_sse_binary_expression(f, ">", a, b), + Self::GreaterOrEqual(a, b) => fmt_sse_binary_expression(f, ">=", a, b), + Self::Less(a, b) => fmt_sse_binary_expression(f, "<", a, b), + Self::LessOrEqual(a, b) => fmt_sse_binary_expression(f, "<=", a, b), + Self::In(a, b) => { + f.write_str("(in ")?; + a.fmt_sse(f)?; + for p in b { + f.write_str(" ")?; + p.fmt_sse(f)?; + } + f.write_str(")") + } + Self::Add(a, b) => fmt_sse_binary_expression(f, "+", a, b), + Self::Subtract(a, b) => fmt_sse_binary_expression(f, "-", a, b), + Self::Multiply(a, b) => fmt_sse_binary_expression(f, "*", a, b), + Self::Divide(a, b) => fmt_sse_binary_expression(f, "/", a, b), + Self::UnaryPlus(e) => fmt_sse_unary_expression(f, "+", e), + Self::UnaryMinus(e) => fmt_sse_unary_expression(f, "-", e), + Self::Not(e) => fmt_sse_unary_expression(f, "!", e), + Self::FunctionCall(function, parameters) => { + f.write_str("( ")?; + function.fmt_sse(f)?; + for p in parameters { + f.write_str(" ")?; + p.fmt_sse(f)?; + } + f.write_str(")") + } + Self::Exists(p) => { + f.write_str("(exists ")?; + p.fmt_sse(f)?; + f.write_str(")") + } + Self::Bound(v) => { + write!(f, "(bound {v})") + } + Self::If(a, b, c) => { + f.write_str("(if ")?; + a.fmt_sse(f)?; + f.write_str(" ")?; + b.fmt_sse(f)?; + f.write_str(" ")?; + c.fmt_sse(f)?; + f.write_str(")") + } + Self::Coalesce(parameters) => { + f.write_str("(coalesce")?; + for p in parameters { + f.write_str(" ")?; + p.fmt_sse(f)?; + } + f.write_str(")") + } + } + } +} + +impl fmt::Display for Expression { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(node) => node.fmt(f), + Self::Literal(l) => l.fmt(f), + Self::Variable(var) => var.fmt(f), + Self::Or(a, b) => write!(f, "({a} || {b})"), + Self::And(a, b) => write!(f, "({a} && {b})"), + Self::Equal(a, b) => { + write!(f, "({a} = {b})") + } + Self::SameTerm(a, b) => { + write!(f, "sameTerm({a}, {b})") + } + Self::Greater(a, b) => { + write!(f, "({a} > {b})") + } + Self::GreaterOrEqual(a, b) => write!(f, "({a} >= {b})"), + Self::Less(a, b) => { + write!(f, "({a} < {b})") + } + Self::LessOrEqual(a, b) => write!(f, "({a} <= {b})"), + Self::In(a, b) => { + write!(f, "({a} IN ")?; + write_arg_list(b, f)?; + f.write_str(")") + } + Self::Add(a, b) => { + write!(f, "{a} + {b}") + } + Self::Subtract(a, b) => { + write!(f, "{a} - {b}") + } + Self::Multiply(a, b) => { + write!(f, "{a} * {b}") + } + Self::Divide(a, b) => { + write!(f, "{a} / {b}") + } + Self::UnaryPlus(e) => write!(f, "+{e}"), + Self::UnaryMinus(e) => write!(f, "-{e}"), + Self::Not(e) => match e.as_ref() { + Self::Exists(p) => write!(f, "NOT EXISTS {{ {p} }}"), + e => write!(f, "!{e}"), + }, + Self::FunctionCall(function, parameters) => { + write!(f, "{function}")?; + write_arg_list(parameters, f) + } + Self::Bound(v) => write!(f, "BOUND({v})"), + Self::Exists(p) => write!(f, "EXISTS {{ {p} }}"), + Self::If(a, b, c) => write!(f, "IF({a}, {b}, {c})"), + Self::Coalesce(parameters) => { + f.write_str("COALESCE")?; + write_arg_list(parameters, f) + } + } + } +} + +impl From for Expression { + fn from(p: NamedNode) -> Self { + Self::NamedNode(p) + } +} + +impl From for Expression { + fn from(p: Literal) -> Self { + Self::Literal(p) + } +} + +impl From for Expression { + fn from(v: Variable) -> Self { + Self::Variable(v) + } +} + +impl From for Expression { + fn from(p: NamedNodePattern) -> Self { + match p { + NamedNodePattern::NamedNode(p) => p.into(), + NamedNodePattern::Variable(p) => p.into(), + } + } +} + +fn write_arg_list( + params: impl IntoIterator, + f: &mut fmt::Formatter<'_>, +) -> fmt::Result { + f.write_str("(")?; + let mut cont = false; + for p in params { + if cont { + f.write_str(", ")?; + } + p.fmt(f)?; + cont = true; + } + f.write_str(")") +} + +/// A function name. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum Function { + Str, + Lang, + LangMatches, + Datatype, + Iri, + BNode, + Rand, + Abs, + Ceil, + Floor, + Round, + Concat, + SubStr, + StrLen, + Replace, + UCase, + LCase, + EncodeForUri, + Contains, + StrStarts, + StrEnds, + StrBefore, + StrAfter, + Year, + Month, + Day, + Hours, + Minutes, + Seconds, + Timezone, + Tz, + Now, + Uuid, + StrUuid, + Md5, + Sha1, + Sha256, + Sha384, + Sha512, + StrLang, + StrDt, + IsIri, + IsBlank, + IsLiteral, + IsNumeric, + Regex, + #[cfg(feature = "rdf-star")] + Triple, + #[cfg(feature = "rdf-star")] + Subject, + #[cfg(feature = "rdf-star")] + Predicate, + #[cfg(feature = "rdf-star")] + Object, + #[cfg(feature = "rdf-star")] + IsTriple, + #[cfg(feature = "sep-0002")] + Adjust, + Custom(NamedNode), +} + +impl Function { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + match self { + Self::Str => f.write_str("str"), + Self::Lang => f.write_str("lang"), + Self::LangMatches => f.write_str("langmatches"), + Self::Datatype => f.write_str("datatype"), + Self::Iri => f.write_str("iri"), + Self::BNode => f.write_str("bnode"), + Self::Rand => f.write_str("rand"), + Self::Abs => f.write_str("abs"), + Self::Ceil => f.write_str("ceil"), + Self::Floor => f.write_str("floor"), + Self::Round => f.write_str("round"), + Self::Concat => f.write_str("concat"), + Self::SubStr => f.write_str("substr"), + Self::StrLen => f.write_str("strlen"), + Self::Replace => f.write_str("replace"), + Self::UCase => f.write_str("ucase"), + Self::LCase => f.write_str("lcase"), + Self::EncodeForUri => f.write_str("encode_for_uri"), + Self::Contains => f.write_str("contains"), + Self::StrStarts => f.write_str("strstarts"), + Self::StrEnds => f.write_str("strends"), + Self::StrBefore => f.write_str("strbefore"), + Self::StrAfter => f.write_str("strafter"), + Self::Year => f.write_str("year"), + Self::Month => f.write_str("month"), + Self::Day => f.write_str("day"), + Self::Hours => f.write_str("hours"), + Self::Minutes => f.write_str("minutes"), + Self::Seconds => f.write_str("seconds"), + Self::Timezone => f.write_str("timezone"), + Self::Tz => f.write_str("tz"), + Self::Now => f.write_str("now"), + Self::Uuid => f.write_str("uuid"), + Self::StrUuid => f.write_str("struuid"), + Self::Md5 => f.write_str("md5"), + Self::Sha1 => f.write_str("sha1"), + Self::Sha256 => f.write_str("sha256"), + Self::Sha384 => f.write_str("sha384"), + Self::Sha512 => f.write_str("sha512"), + Self::StrLang => f.write_str("strlang"), + Self::StrDt => f.write_str("strdt"), + Self::IsIri => f.write_str("isiri"), + Self::IsBlank => f.write_str("isblank"), + Self::IsLiteral => f.write_str("isliteral"), + Self::IsNumeric => f.write_str("isnumeric"), + Self::Regex => f.write_str("regex"), + #[cfg(feature = "rdf-star")] + Self::Triple => f.write_str("triple"), + #[cfg(feature = "rdf-star")] + Self::Subject => f.write_str("subject"), + #[cfg(feature = "rdf-star")] + Self::Predicate => f.write_str("predicate"), + #[cfg(feature = "rdf-star")] + Self::Object => f.write_str("object"), + #[cfg(feature = "rdf-star")] + Self::IsTriple => f.write_str("istriple"), + #[cfg(feature = "sep-0002")] + Self::Adjust => f.write_str("adjust"), + Self::Custom(iri) => write!(f, "{iri}"), + } + } +} + +impl fmt::Display for Function { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Str => f.write_str("STR"), + Self::Lang => f.write_str("LANG"), + Self::LangMatches => f.write_str("LANGMATCHES"), + Self::Datatype => f.write_str("DATATYPE"), + Self::Iri => f.write_str("IRI"), + Self::BNode => f.write_str("BNODE"), + Self::Rand => f.write_str("RAND"), + Self::Abs => f.write_str("ABS"), + Self::Ceil => f.write_str("CEIL"), + Self::Floor => f.write_str("FLOOR"), + Self::Round => f.write_str("ROUND"), + Self::Concat => f.write_str("CONCAT"), + Self::SubStr => f.write_str("SUBSTR"), + Self::StrLen => f.write_str("STRLEN"), + Self::Replace => f.write_str("REPLACE"), + Self::UCase => f.write_str("UCASE"), + Self::LCase => f.write_str("LCASE"), + Self::EncodeForUri => f.write_str("ENCODE_FOR_URI"), + Self::Contains => f.write_str("CONTAINS"), + Self::StrStarts => f.write_str("STRSTARTS"), + Self::StrEnds => f.write_str("STRENDS"), + Self::StrBefore => f.write_str("STRBEFORE"), + Self::StrAfter => f.write_str("STRAFTER"), + Self::Year => f.write_str("YEAR"), + Self::Month => f.write_str("MONTH"), + Self::Day => f.write_str("DAY"), + Self::Hours => f.write_str("HOURS"), + Self::Minutes => f.write_str("MINUTES"), + Self::Seconds => f.write_str("SECONDS"), + Self::Timezone => f.write_str("TIMEZONE"), + Self::Tz => f.write_str("TZ"), + Self::Now => f.write_str("NOW"), + Self::Uuid => f.write_str("UUID"), + Self::StrUuid => f.write_str("STRUUID"), + Self::Md5 => f.write_str("MD5"), + Self::Sha1 => f.write_str("SHA1"), + Self::Sha256 => f.write_str("SHA256"), + Self::Sha384 => f.write_str("SHA384"), + Self::Sha512 => f.write_str("SHA512"), + Self::StrLang => f.write_str("STRLANG"), + Self::StrDt => f.write_str("STRDT"), + Self::IsIri => f.write_str("isIRI"), + Self::IsBlank => f.write_str("isBLANK"), + Self::IsLiteral => f.write_str("isLITERAL"), + Self::IsNumeric => f.write_str("isNUMERIC"), + Self::Regex => f.write_str("REGEX"), + #[cfg(feature = "rdf-star")] + Self::Triple => f.write_str("TRIPLE"), + #[cfg(feature = "rdf-star")] + Self::Subject => f.write_str("SUBJECT"), + #[cfg(feature = "rdf-star")] + Self::Predicate => f.write_str("PREDICATE"), + #[cfg(feature = "rdf-star")] + Self::Object => f.write_str("OBJECT"), + #[cfg(feature = "rdf-star")] + Self::IsTriple => f.write_str("isTRIPLE"), + #[cfg(feature = "sep-0002")] + Self::Adjust => f.write_str("ADJUST"), + Self::Custom(iri) => iri.fmt(f), + } + } +} + +/// A SPARQL query [graph pattern](https://www.w3.org/TR/sparql11-query/#sparqlQuery). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum GraphPattern { + /// A [basic graph pattern](https://www.w3.org/TR/sparql11-query/#defn_BasicGraphPattern). + Bgp { patterns: Vec }, + /// A [property path pattern](https://www.w3.org/TR/sparql11-query/#defn_evalPP_predicate). + Path { + subject: TermPattern, + path: PropertyPathExpression, + object: TermPattern, + }, + /// [Join](https://www.w3.org/TR/sparql11-query/#defn_algJoin). + Join { left: Box, right: Box }, + /// [LeftJoin](https://www.w3.org/TR/sparql11-query/#defn_algLeftJoin). + LeftJoin { + left: Box, + right: Box, + expression: Option, + }, + /// Lateral join i.e. evaluate right for all result row of left + #[cfg(feature = "sep-0006")] + Lateral { left: Box, right: Box }, + /// [Filter](https://www.w3.org/TR/sparql11-query/#defn_algFilter). + Filter { expr: Expression, inner: Box }, + /// [Union](https://www.w3.org/TR/sparql11-query/#defn_algUnion). + Union { left: Box, right: Box }, + Graph { + name: NamedNodePattern, + inner: Box, + }, + /// [Extend](https://www.w3.org/TR/sparql11-query/#defn_extend). + Extend { + inner: Box, + variable: Variable, + expression: Expression, + }, + /// [Minus](https://www.w3.org/TR/sparql11-query/#defn_algMinus). + Minus { left: Box, right: Box }, + /// A table used to provide inline values + Values { + variables: Vec, + bindings: Vec>>, + }, + /// [OrderBy](https://www.w3.org/TR/sparql11-query/#defn_algOrdered). + OrderBy { + inner: Box, + expression: Vec, + }, + /// [Project](https://www.w3.org/TR/sparql11-query/#defn_algProjection). + Project { + inner: Box, + variables: Vec, + }, + /// [Distinct](https://www.w3.org/TR/sparql11-query/#defn_algDistinct). + Distinct { inner: Box }, + /// [Reduced](https://www.w3.org/TR/sparql11-query/#defn_algReduced). + Reduced { inner: Box }, + /// [Slice](https://www.w3.org/TR/sparql11-query/#defn_algSlice). + Slice { + inner: Box, + start: usize, + length: Option, + }, + /// [Group](https://www.w3.org/TR/sparql11-query/#aggregateAlgebra). + Group { + inner: Box, + variables: Vec, + aggregates: Vec<(Variable, AggregateExpression)>, + }, + /// [Service](https://www.w3.org/TR/sparql11-federated-query/#defn_evalService). + Service { + name: NamedNodePattern, + inner: Box, + silent: bool, + }, +} + +impl fmt::Display for GraphPattern { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Bgp { patterns } => { + for pattern in patterns { + write!(f, "{pattern} .")? + } + Ok(()) + } + Self::Path { + subject, + path, + object, + } => write!(f, "{subject} {path} {object} ."), + Self::Join { left, right } => { + #[allow(clippy::match_same_arms)] + match right.as_ref() { + Self::LeftJoin { .. } + | Self::Minus { .. } + | Self::Extend { .. } + | Self::Filter { .. } => { + // The second block might be considered as a modification of the first one. + write!(f, "{left} {{ {right} }}") + } + #[cfg(feature = "sep-0006")] + Self::Lateral { .. } => { + write!(f, "{left} {{ {right} }}") + } + _ => write!(f, "{left} {right}"), + } + } + Self::LeftJoin { + left, + right, + expression, + } => { + if let Some(expr) = expression { + write!(f, "{left} OPTIONAL {{ {right} FILTER({expr}) }}") + } else { + write!(f, "{left} OPTIONAL {{ {right} }}") + } + } + #[cfg(feature = "sep-0006")] + Self::Lateral { left, right } => { + write!(f, "{left} LATERAL {{ {right} }}") + } + Self::Filter { expr, inner } => { + write!(f, "{inner} FILTER({expr})") + } + Self::Union { left, right } => write!(f, "{{ {left} }} UNION {{ {right} }}"), + Self::Graph { name, inner } => { + write!(f, "GRAPH {name} {{ {inner} }}") + } + Self::Extend { + inner, + variable, + expression, + } => write!(f, "{inner} BIND({expression} AS {variable})"), + Self::Minus { left, right } => write!(f, "{left} MINUS {{ {right} }}"), + Self::Service { + name, + inner, + silent, + } => { + if *silent { + write!(f, "SERVICE SILENT {name} {{ {inner} }}") + } else { + write!(f, "SERVICE {name} {{ {inner} }}") + } + } + Self::Values { + variables, + bindings, + } => { + f.write_str("VALUES ( ")?; + for var in variables { + write!(f, "{var} ")?; + } + f.write_str(") { ")?; + for row in bindings { + f.write_str("( ")?; + for val in row { + match val { + Some(val) => write!(f, "{val} "), + None => f.write_str("UNDEF "), + }?; + } + f.write_str(") ")?; + } + f.write_str(" }") + } + Self::Group { + inner, + variables, + aggregates, + } => { + f.write_str("{SELECT")?; + for (a, v) in aggregates { + write!(f, " ({v} AS {a})")?; + } + for b in variables { + write!(f, " {b}")?; + } + write!(f, " WHERE {{ {inner} }}")?; + if !variables.is_empty() { + f.write_str(" GROUP BY")?; + for v in variables { + write!(f, " {v}")?; + } + } + f.write_str("}") + } + p => write!( + f, + "{{ {} }}", + SparqlGraphRootPattern { + pattern: p, + dataset: None + } + ), + } + } +} + +impl Default for GraphPattern { + fn default() -> Self { + Self::Bgp { + patterns: Vec::default(), + } + } +} + +impl GraphPattern { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + match self { + Self::Bgp { patterns } => { + f.write_str("(bgp")?; + for pattern in patterns { + f.write_str(" ")?; + pattern.fmt_sse(f)?; + } + f.write_str(")") + } + Self::Path { + subject, + path, + object, + } => { + f.write_str("(path ")?; + subject.fmt_sse(f)?; + f.write_str(" ")?; + path.fmt_sse(f)?; + f.write_str(" ")?; + object.fmt_sse(f)?; + f.write_str(")") + } + Self::Join { left, right } => { + f.write_str("(join ")?; + left.fmt_sse(f)?; + f.write_str(" ")?; + right.fmt_sse(f)?; + f.write_str(")") + } + Self::LeftJoin { + left, + right, + expression, + } => { + f.write_str("(leftjoin ")?; + left.fmt_sse(f)?; + f.write_str(" ")?; + right.fmt_sse(f)?; + if let Some(expr) = expression { + f.write_str(" ")?; + expr.fmt_sse(f)?; + } + f.write_str(")") + } + #[cfg(feature = "sep-0006")] + Self::Lateral { left, right } => { + f.write_str("(lateral ")?; + left.fmt_sse(f)?; + f.write_str(" ")?; + right.fmt_sse(f)?; + f.write_str(")") + } + Self::Filter { expr, inner } => { + f.write_str("(filter ")?; + expr.fmt_sse(f)?; + f.write_str(" ")?; + inner.fmt_sse(f)?; + f.write_str(")") + } + Self::Union { left, right } => { + f.write_str("(union ")?; + left.fmt_sse(f)?; + f.write_str(" ")?; + right.fmt_sse(f)?; + f.write_str(")") + } + Self::Graph { name, inner } => { + f.write_str("(graph ")?; + name.fmt_sse(f)?; + f.write_str(" ")?; + inner.fmt_sse(f)?; + f.write_str(")") + } + Self::Extend { + inner, + variable, + expression, + } => { + write!(f, "(extend (({variable} ")?; + expression.fmt_sse(f)?; + f.write_str(")) ")?; + inner.fmt_sse(f)?; + f.write_str(")") + } + Self::Minus { left, right } => { + f.write_str("(minus ")?; + left.fmt_sse(f)?; + f.write_str(" ")?; + right.fmt_sse(f)?; + f.write_str(")") + } + Self::Service { + name, + inner, + silent, + } => { + f.write_str("(service ")?; + if *silent { + f.write_str("silent ")?; + } + name.fmt_sse(f)?; + f.write_str(" ")?; + inner.fmt_sse(f)?; + f.write_str(")") + } + Self::Group { + inner, + variables, + aggregates, + } => { + f.write_str("(group (")?; + for (i, v) in variables.iter().enumerate() { + if i > 0 { + f.write_str(" ")?; + } + write!(f, "{v}")?; + } + f.write_str(") (")?; + for (i, (v, a)) in aggregates.iter().enumerate() { + if i > 0 { + f.write_str(" ")?; + } + f.write_str("(")?; + a.fmt_sse(f)?; + write!(f, " {v})")?; + } + f.write_str(") ")?; + inner.fmt_sse(f)?; + f.write_str(")") + } + Self::Values { + variables, + bindings, + } => { + f.write_str("(table (vars")?; + for var in variables { + write!(f, " {var}")?; + } + f.write_str(")")?; + for row in bindings { + f.write_str(" (row")?; + for (value, var) in row.iter().zip(variables) { + if let Some(value) = value { + write!(f, " ({var} {value})")?; + } + } + f.write_str(")")?; + } + f.write_str(")") + } + Self::OrderBy { inner, expression } => { + f.write_str("(order (")?; + for (i, c) in expression.iter().enumerate() { + if i > 0 { + f.write_str(" ")?; + } + c.fmt_sse(f)?; + } + f.write_str(") ")?; + inner.fmt_sse(f)?; + f.write_str(")") + } + Self::Project { inner, variables } => { + f.write_str("(project (")?; + for (i, v) in variables.iter().enumerate() { + if i > 0 { + f.write_str(" ")?; + } + write!(f, "{v}")?; + } + f.write_str(") ")?; + inner.fmt_sse(f)?; + f.write_str(")") + } + Self::Distinct { inner } => { + f.write_str("(distinct ")?; + inner.fmt_sse(f)?; + f.write_str(")") + } + Self::Reduced { inner } => { + f.write_str("(reduced ")?; + inner.fmt_sse(f)?; + f.write_str(")") + } + Self::Slice { + inner, + start, + length, + } => { + if let Some(length) = length { + write!(f, "(slice {start} {length} ")?; + } else { + write!(f, "(slice {start} _ ")?; + } + inner.fmt_sse(f)?; + f.write_str(")") + } + } + } + + /// Calls `callback` on each [in-scope variable](https://www.w3.org/TR/sparql11-query/#variableScope) occurrence. + pub fn on_in_scope_variable<'a>(&'a self, mut callback: impl FnMut(&'a Variable)) { + self.lookup_in_scope_variables(&mut callback) + } + + fn lookup_in_scope_variables<'a>(&'a self, callback: &mut impl FnMut(&'a Variable)) { + #[allow(clippy::match_same_arms)] + match self { + Self::Bgp { patterns } => { + for pattern in patterns { + lookup_triple_pattern_variables(pattern, callback) + } + } + Self::Path { + subject, object, .. + } => { + if let TermPattern::Variable(s) = subject { + callback(s); + } + #[cfg(feature = "rdf-star")] + if let TermPattern::Triple(s) = subject { + lookup_triple_pattern_variables(s, callback) + } + if let TermPattern::Variable(o) = object { + callback(o); + } + #[cfg(feature = "rdf-star")] + if let TermPattern::Triple(o) = object { + lookup_triple_pattern_variables(o, callback) + } + } + Self::Join { left, right } + | Self::LeftJoin { left, right, .. } + | Self::Union { left, right } => { + left.lookup_in_scope_variables(callback); + right.lookup_in_scope_variables(callback); + } + #[cfg(feature = "sep-0006")] + Self::Lateral { left, right } => { + left.lookup_in_scope_variables(callback); + right.lookup_in_scope_variables(callback); + } + Self::Graph { name, inner } => { + if let NamedNodePattern::Variable(g) = &name { + callback(g); + } + inner.lookup_in_scope_variables(callback); + } + Self::Extend { + inner, variable, .. + } => { + callback(variable); + inner.lookup_in_scope_variables(callback); + } + Self::Minus { left, .. } => left.lookup_in_scope_variables(callback), + Self::Group { + variables, + aggregates, + .. + } => { + for v in variables { + callback(v); + } + for (v, _) in aggregates { + callback(v); + } + } + Self::Values { variables, .. } | Self::Project { variables, .. } => { + for v in variables { + callback(v); + } + } + Self::Service { inner, .. } + | Self::Filter { inner, .. } + | Self::OrderBy { inner, .. } + | Self::Distinct { inner } + | Self::Reduced { inner } + | Self::Slice { inner, .. } => inner.lookup_in_scope_variables(callback), + } + } +} + +fn lookup_triple_pattern_variables<'a>( + pattern: &'a TriplePattern, + callback: &mut impl FnMut(&'a Variable), +) { + if let TermPattern::Variable(s) = &pattern.subject { + callback(s); + } + #[cfg(feature = "rdf-star")] + if let TermPattern::Triple(s) = &pattern.subject { + lookup_triple_pattern_variables(s, callback) + } + if let NamedNodePattern::Variable(p) = &pattern.predicate { + callback(p); + } + if let TermPattern::Variable(o) = &pattern.object { + callback(o); + } + #[cfg(feature = "rdf-star")] + if let TermPattern::Triple(o) = &pattern.object { + lookup_triple_pattern_variables(o, callback) + } +} + +pub(crate) struct SparqlGraphRootPattern<'a> { + pub(crate) pattern: &'a GraphPattern, + pub(crate) dataset: Option<&'a QueryDataset>, +} + +impl<'a> fmt::Display for SparqlGraphRootPattern<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut distinct = false; + let mut reduced = false; + let mut order = None; + let mut start = 0; + let mut length = None; + let mut project: &[Variable] = &[]; + + let mut child = self.pattern; + loop { + match child { + GraphPattern::OrderBy { inner, expression } => { + order = Some(expression); + child = inner; + } + GraphPattern::Project { inner, variables } if project.is_empty() => { + project = variables; + child = inner; + } + GraphPattern::Distinct { inner } => { + distinct = true; + child = inner; + } + GraphPattern::Reduced { inner } => { + reduced = true; + child = inner; + } + GraphPattern::Slice { + inner, + start: s, + length: l, + } => { + start = *s; + length = *l; + child = inner; + } + p => { + f.write_str("SELECT")?; + if distinct { + f.write_str(" DISTINCT")?; + } + if reduced { + f.write_str(" REDUCED")?; + } + if project.is_empty() { + f.write_str(" *")?; + } else { + for v in project { + write!(f, " {v}")?; + } + } + if let Some(dataset) = self.dataset { + write!(f, " {dataset}")?; + } + write!(f, " WHERE {{ {p} }}")?; + if let Some(order) = order { + f.write_str(" ORDER BY")?; + for c in order { + write!(f, " {c}")?; + } + } + if start > 0 { + write!(f, " OFFSET {start}")?; + } + if let Some(length) = length { + write!(f, " LIMIT {length}")?; + } + return Ok(()); + } + } + } + } +} + +/// A set function used in aggregates (c.f. [`GraphPattern::Group`]). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum AggregateExpression { + /// [Count](https://www.w3.org/TR/sparql11-query/#defn_aggCount) with *. + CountSolutions { distinct: bool }, + FunctionCall { + name: AggregateFunction, + expr: Expression, + distinct: bool, + }, +} + +impl AggregateExpression { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + match self { + Self::CountSolutions { distinct } => { + f.write_str("(count")?; + if *distinct { + f.write_str(" distinct")?; + } + f.write_str(")") + } + Self::FunctionCall { + name: + AggregateFunction::GroupConcat { + separator: Some(separator), + }, + expr, + distinct, + } => { + f.write_str("(group_concat ")?; + if *distinct { + f.write_str("distinct ")?; + } + expr.fmt_sse(f)?; + write!(f, " {})", LiteralRef::new_simple_literal(separator)) + } + Self::FunctionCall { + name, + expr, + distinct, + } => { + f.write_str("(")?; + name.fmt_sse(f)?; + f.write_str(" ")?; + if *distinct { + f.write_str("distinct ")?; + } + expr.fmt_sse(f)?; + f.write_str(")") + } + } + } +} + +impl fmt::Display for AggregateExpression { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::CountSolutions { distinct } => { + if *distinct { + f.write_str("COUNT(DISTINCT *)") + } else { + f.write_str("COUNT(*)") + } + } + Self::FunctionCall { + name: + AggregateFunction::GroupConcat { + separator: Some(separator), + }, + expr, + distinct, + } => { + if *distinct { + write!( + f, + "GROUP_CONCAT(DISTINCT {}; SEPARATOR = {})", + expr, + LiteralRef::new_simple_literal(separator) + ) + } else { + write!( + f, + "GROUP_CONCAT({}; SEPARATOR = {})", + expr, + LiteralRef::new_simple_literal(separator) + ) + } + } + Self::FunctionCall { + name, + expr, + distinct, + } => { + if *distinct { + write!(f, "{name}(DISTINCT {expr})") + } else { + write!(f, "{name}({expr})") + } + } + } + } +} + +/// An aggregate function name. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum AggregateFunction { + /// [Count](https://www.w3.org/TR/sparql11-query/#defn_aggCount) with *. + Count, + /// [Sum](https://www.w3.org/TR/sparql11-query/#defn_aggSum). + Sum, + /// [Avg](https://www.w3.org/TR/sparql11-query/#defn_aggAvg). + Avg, + /// [Min](https://www.w3.org/TR/sparql11-query/#defn_aggMin). + Min, + /// [Max](https://www.w3.org/TR/sparql11-query/#defn_aggMax). + Max, + /// [GroupConcat](https://www.w3.org/TR/sparql11-query/#defn_aggGroupConcat). + GroupConcat { + separator: Option, + }, + /// [Sample](https://www.w3.org/TR/sparql11-query/#defn_aggSample). + Sample, + Custom(NamedNode), +} + +impl AggregateFunction { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + match self { + Self::Count => f.write_str("count"), + Self::Sum => f.write_str("sum"), + Self::Avg => f.write_str("avg"), + Self::Min => f.write_str("min"), + Self::Max => f.write_str("max"), + Self::GroupConcat { .. } => f.write_str("group_concat"), + Self::Sample => f.write_str("sample"), + Self::Custom(iri) => write!(f, "{iri}"), + } + } +} + +impl fmt::Display for AggregateFunction { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Count => f.write_str("COUNT"), + Self::Sum => f.write_str("SUM"), + Self::Avg => f.write_str("AVG"), + Self::Min => f.write_str("MIN"), + Self::Max => f.write_str("MAX"), + Self::GroupConcat { .. } => f.write_str("GROUP_CONCAT"), + Self::Sample => f.write_str("SAMPLE"), + Self::Custom(iri) => iri.fmt(f), + } + } +} + +/// An ordering comparator used by [`GraphPattern::OrderBy`]. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum OrderExpression { + /// Ascending order + Asc(Expression), + /// Descending order + Desc(Expression), +} + +impl OrderExpression { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + match self { + Self::Asc(e) => { + f.write_str("(asc ")?; + e.fmt_sse(f)?; + f.write_str(")") + } + Self::Desc(e) => { + f.write_str("(desc ")?; + e.fmt_sse(f)?; + f.write_str(")") + } + } + } +} + +impl fmt::Display for OrderExpression { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Asc(e) => write!(f, "ASC({e})"), + Self::Desc(e) => write!(f, "DESC({e})"), + } + } +} + +/// A SPARQL query [dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct QueryDataset { + pub default: Vec, + pub named: Option>, +} + +impl QueryDataset { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + f.write_str("(")?; + for (i, graph_name) in self.default.iter().enumerate() { + if i > 0 { + f.write_str(" ")?; + } + write!(f, "{graph_name}")?; + } + if let Some(named) = &self.named { + for (i, graph_name) in named.iter().enumerate() { + if !self.default.is_empty() || i > 0 { + f.write_str(" ")?; + } + write!(f, "(named {graph_name})")?; + } + } + f.write_str(")") + } +} + +impl fmt::Display for QueryDataset { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for g in &self.default { + write!(f, " FROM {g}")?; + } + if let Some(named) = &self.named { + for g in named { + write!(f, " FROM NAMED {g}")?; + } + } + Ok(()) + } +} + +/// A target RDF graph for update operations. +/// +/// Could be a specific graph, all named graphs or the complete dataset. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum GraphTarget { + NamedNode(NamedNode), + DefaultGraph, + NamedGraphs, + AllGraphs, +} + +impl GraphTarget { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + match self { + Self::NamedNode(node) => write!(f, "{node}"), + Self::DefaultGraph => f.write_str("default"), + Self::NamedGraphs => f.write_str("named"), + Self::AllGraphs => f.write_str("all"), + } + } +} + +impl fmt::Display for GraphTarget { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(node) => write!(f, "GRAPH {node}"), + Self::DefaultGraph => f.write_str("DEFAULT"), + Self::NamedGraphs => f.write_str("NAMED"), + Self::AllGraphs => f.write_str("ALL"), + } + } +} + +impl From for GraphTarget { + fn from(node: NamedNode) -> Self { + Self::NamedNode(node) + } +} + +impl From for GraphTarget { + fn from(graph_name: GraphName) -> Self { + match graph_name { + GraphName::NamedNode(node) => Self::NamedNode(node), + GraphName::DefaultGraph => Self::DefaultGraph, + } + } +} + +#[inline] +fn fmt_sse_unary_expression(f: &mut impl fmt::Write, name: &str, e: &Expression) -> fmt::Result { + write!(f, "({name} ")?; + e.fmt_sse(f)?; + f.write_str(")") +} + +#[inline] +fn fmt_sse_binary_expression( + f: &mut impl fmt::Write, + name: &str, + a: &Expression, + b: &Expression, +) -> fmt::Result { + write!(f, "({name} ")?; + a.fmt_sse(f)?; + f.write_str(" ")?; + b.fmt_sse(f)?; + f.write_str(")") +} diff --git a/ng-oxigraph/src/spargebra/mod.rs b/ng-oxigraph/src/spargebra/mod.rs new file mode 100644 index 0000000..f23b35b --- /dev/null +++ b/ng-oxigraph/src/spargebra/mod.rs @@ -0,0 +1,9 @@ +pub mod algebra; +mod parser; +mod query; +pub mod term; +mod update; + +pub use parser::SparqlSyntaxError; +pub use query::*; +pub use update::*; diff --git a/ng-oxigraph/src/spargebra/parser.rs b/ng-oxigraph/src/spargebra/parser.rs new file mode 100644 index 0000000..67718fe --- /dev/null +++ b/ng-oxigraph/src/spargebra/parser.rs @@ -0,0 +1,2086 @@ +#![allow(clippy::ignored_unit_patterns)] +use crate::oxrdf::vocab::{rdf, xsd}; +use crate::spargebra::algebra::*; +use crate::spargebra::query::*; +use crate::spargebra::term::*; +use crate::spargebra::update::*; +use oxilangtag::LanguageTag; +use oxiri::{Iri, IriParseError}; +use peg::parser; +use peg::str::LineCol; +use rand::random; +use std::char; +use std::collections::{HashMap, HashSet}; +use std::mem::take; +use std::str::FromStr; + +/// Parses a SPARQL query with an optional base IRI to resolve relative IRIs in the query. +pub fn parse_query(query: &str, base_iri: Option<&str>) -> Result { + let mut state = ParserState::from_base_iri(base_iri)?; + parser::QueryUnit(query, &mut state).map_err(|e| SparqlSyntaxError(ParseErrorKind::Syntax(e))) +} + +/// Parses a SPARQL update with an optional base IRI to resolve relative IRIs in the query. +pub fn parse_update(update: &str, base_iri: Option<&str>) -> Result { + let mut state = ParserState::from_base_iri(base_iri)?; + let operations = parser::UpdateInit(update, &mut state) + .map_err(|e| SparqlSyntaxError(ParseErrorKind::Syntax(e)))?; + Ok(Update { + operations, + base_iri: state.base_iri, + }) +} + +/// Error returned during SPARQL parsing. +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct SparqlSyntaxError(#[from] ParseErrorKind); + +#[derive(Debug, thiserror::Error)] +enum ParseErrorKind { + #[error("Invalid SPARQL base IRI provided: {0}")] + InvalidBaseIri(#[from] IriParseError), + #[error(transparent)] + Syntax(#[from] peg::error::ParseError), +} + +struct AnnotatedTerm { + term: TermPattern, + annotations: Vec<(NamedNodePattern, Vec)>, +} + +#[derive(Default)] +struct FocusedTriplePattern { + focus: F, + patterns: Vec, +} + +impl FocusedTriplePattern { + fn new(focus: F) -> Self { + Self { + focus, + patterns: Vec::new(), + } + } +} + +impl From> for FocusedTriplePattern> { + fn from(input: FocusedTriplePattern) -> Self { + Self { + focus: vec![input.focus], + patterns: input.patterns, + } + } +} + +#[derive(Clone, Debug)] +enum VariableOrPropertyPath { + Variable(Variable), + PropertyPath(PropertyPathExpression), +} + +impl From for VariableOrPropertyPath { + fn from(var: Variable) -> Self { + Self::Variable(var) + } +} + +impl From for VariableOrPropertyPath { + fn from(pattern: NamedNodePattern) -> Self { + match pattern { + NamedNodePattern::NamedNode(node) => PropertyPathExpression::from(node).into(), + NamedNodePattern::Variable(v) => v.into(), + } + } +} + +impl From for VariableOrPropertyPath { + fn from(path: PropertyPathExpression) -> Self { + Self::PropertyPath(path) + } +} + +fn add_to_triple_patterns( + subject: TermPattern, + predicate: NamedNodePattern, + object: AnnotatedTerm, + patterns: &mut Vec, +) -> Result<(), &'static str> { + let triple = TriplePattern::new(subject, predicate, object.term); + #[cfg(feature = "rdf-star")] + for (p, os) in object.annotations { + for o in os { + add_to_triple_patterns(triple.clone().into(), p.clone(), o, patterns)? + } + } + #[cfg(not(feature = "rdf-star"))] + if !object.annotations.is_empty() { + return Err("Embedded triples are only available in SPARQL-star"); + } + patterns.push(triple); + Ok(()) +} + +fn add_to_triple_or_path_patterns( + subject: TermPattern, + predicate: impl Into, + object: AnnotatedTermPath, + patterns: &mut Vec, +) -> Result<(), &'static str> { + match predicate.into() { + VariableOrPropertyPath::Variable(p) => { + add_triple_to_triple_or_path_patterns(subject, p, object, patterns)?; + } + VariableOrPropertyPath::PropertyPath(p) => match p { + PropertyPathExpression::NamedNode(p) => { + add_triple_to_triple_or_path_patterns(subject, p, object, patterns)?; + } + PropertyPathExpression::Reverse(p) => add_to_triple_or_path_patterns( + object.term, + *p, + AnnotatedTermPath { + term: subject, + annotations: object.annotations, + }, + patterns, + )?, + PropertyPathExpression::Sequence(a, b) => { + if !object.annotations.is_empty() { + return Err("Annotations are not allowed on property paths"); + } + let middle = BlankNode::default(); + add_to_triple_or_path_patterns( + subject, + *a, + AnnotatedTermPath { + term: middle.clone().into(), + annotations: Vec::new(), + }, + patterns, + )?; + add_to_triple_or_path_patterns( + middle.into(), + *b, + AnnotatedTermPath { + term: object.term, + annotations: Vec::new(), + }, + patterns, + )?; + } + path => { + if !object.annotations.is_empty() { + return Err("Annotations are not allowed on property paths"); + } + patterns.push(TripleOrPathPattern::Path { + subject, + path, + object: object.term, + }) + } + }, + } + Ok(()) +} + +fn add_triple_to_triple_or_path_patterns( + subject: TermPattern, + predicate: impl Into, + object: AnnotatedTermPath, + patterns: &mut Vec, +) -> Result<(), &'static str> { + let triple = TriplePattern::new(subject, predicate, object.term); + #[cfg(feature = "rdf-star")] + for (p, os) in object.annotations { + for o in os { + add_to_triple_or_path_patterns(triple.clone().into(), p.clone(), o, patterns)? + } + } + #[cfg(not(feature = "rdf-star"))] + if !object.annotations.is_empty() { + return Err("Embedded triples are only available in SPARQL-star"); + } + patterns.push(triple.into()); + Ok(()) +} + +fn build_bgp(patterns: Vec) -> GraphPattern { + let mut bgp = Vec::new(); + let mut elements = Vec::with_capacity(patterns.len()); + for pattern in patterns { + match pattern { + TripleOrPathPattern::Triple(t) => bgp.push(t), + TripleOrPathPattern::Path { + subject, + path, + object, + } => { + if !bgp.is_empty() { + elements.push(GraphPattern::Bgp { + patterns: take(&mut bgp), + }); + } + elements.push(GraphPattern::Path { + subject, + path, + object, + }) + } + } + } + if !bgp.is_empty() { + elements.push(GraphPattern::Bgp { patterns: bgp }); + } + elements.into_iter().reduce(new_join).unwrap_or_default() +} + +#[derive(Debug)] +enum TripleOrPathPattern { + Triple(TriplePattern), + Path { + subject: TermPattern, + path: PropertyPathExpression, + object: TermPattern, + }, +} + +impl From for TripleOrPathPattern { + fn from(tp: TriplePattern) -> Self { + Self::Triple(tp) + } +} + +#[derive(Debug)] +struct AnnotatedTermPath { + term: TermPattern, + annotations: Vec<(VariableOrPropertyPath, Vec)>, +} + +impl From for AnnotatedTermPath { + fn from(term: AnnotatedTerm) -> Self { + Self { + term: term.term, + annotations: term + .annotations + .into_iter() + .map(|(p, o)| (p.into(), o.into_iter().map(Self::from).collect())) + .collect(), + } + } +} + +#[derive(Debug, Default)] +struct FocusedTripleOrPathPattern { + focus: F, + patterns: Vec, +} + +impl FocusedTripleOrPathPattern { + fn new(focus: F) -> Self { + Self { + focus, + patterns: Vec::new(), + } + } +} + +impl From> for FocusedTripleOrPathPattern> { + fn from(input: FocusedTripleOrPathPattern) -> Self { + Self { + focus: vec![input.focus], + patterns: input.patterns, + } + } +} + +impl> From> for FocusedTripleOrPathPattern { + fn from(input: FocusedTriplePattern) -> Self { + Self { + focus: input.focus.into(), + patterns: input.patterns.into_iter().map(Into::into).collect(), + } + } +} + +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +enum PartialGraphPattern { + Optional(GraphPattern, Option), + #[cfg(feature = "sep-0006")] + Lateral(GraphPattern), + Minus(GraphPattern), + Bind(Expression, Variable), + Filter(Expression), + Other(GraphPattern), +} + +fn new_join(l: GraphPattern, r: GraphPattern) -> GraphPattern { + // Avoid to output empty BGPs + if let GraphPattern::Bgp { patterns: pl } = &l { + if pl.is_empty() { + return r; + } + } + if let GraphPattern::Bgp { patterns: pr } = &r { + if pr.is_empty() { + return l; + } + } + + match (l, r) { + (GraphPattern::Bgp { patterns: mut pl }, GraphPattern::Bgp { patterns: pr }) => { + pl.extend(pr); + GraphPattern::Bgp { patterns: pl } + } + (GraphPattern::Bgp { patterns }, other) | (other, GraphPattern::Bgp { patterns }) + if patterns.is_empty() => + { + other + } + (l, r) => GraphPattern::Join { + left: Box::new(l), + right: Box::new(r), + }, + } +} + +fn not_empty_fold( + iter: impl Iterator, + combine: impl Fn(T, T) -> T, +) -> Result { + iter.fold(None, |a, b| match a { + Some(av) => Some(combine(av, b)), + None => Some(b), + }) + .ok_or("The iterator should not be empty") +} + +enum SelectionOption { + Distinct, + Reduced, + Default, +} + +enum SelectionMember { + Variable(Variable), + Expression(Expression, Variable), +} + +enum SelectionVariables { + Explicit(Vec), + Star, + Everything, +} + +struct Selection { + pub option: SelectionOption, + pub variables: SelectionVariables, +} + +impl Selection { + fn no_op() -> Self { + Self { + option: SelectionOption::Default, + variables: SelectionVariables::Everything, + } + } +} + +fn build_select( + select: Selection, + r#where: GraphPattern, + mut group: Option<(Vec, Vec<(Expression, Variable)>)>, + having: Option, + order_by: Option>, + offset_limit: Option<(usize, Option)>, + values: Option, + state: &mut ParserState, +) -> Result { + let mut p = r#where; + let mut with_aggregate = false; + + // GROUP BY + let aggregates = state.aggregates.pop().unwrap_or_default(); + if group.is_none() && !aggregates.is_empty() { + group = Some((vec![], vec![])); + } + + if let Some((clauses, binds)) = group { + for (expression, variable) in binds { + p = GraphPattern::Extend { + inner: Box::new(p), + variable, + expression, + }; + } + p = GraphPattern::Group { + inner: Box::new(p), + variables: clauses, + aggregates, + }; + with_aggregate = true; + } + + // HAVING + if let Some(expr) = having { + p = GraphPattern::Filter { + expr, + inner: Box::new(p), + }; + } + + // VALUES + if let Some(data) = values { + p = new_join(p, data); + } + + // SELECT + let mut pv = Vec::new(); + let with_project = match select.variables { + SelectionVariables::Explicit(sel_items) => { + let mut visible = HashSet::default(); + p.on_in_scope_variable(|v| { + visible.insert(v.clone()); + }); + for sel_item in sel_items { + let v = match sel_item { + SelectionMember::Variable(v) => { + if with_aggregate && !visible.contains(&v) { + // We validate projection variables if there is an aggregate + return Err("The SELECT contains a variable that is unbound"); + } + v + } + SelectionMember::Expression(expression, variable) => { + if visible.contains(&variable) { + // We disallow to override an existing variable with an expression + return Err( + "The SELECT overrides an existing variable using an expression", + ); + } + if with_aggregate && !are_variables_bound(&expression, &visible) { + // We validate projection variables if there is an aggregate + return Err( + "The SELECT contains an expression with a variable that is unbound", + ); + } + p = GraphPattern::Extend { + inner: Box::new(p), + variable: variable.clone(), + expression, + }; + variable + } + }; + if pv.contains(&v) { + return Err("Duplicated variable name in SELECT"); + } + pv.push(v) + } + true + } + SelectionVariables::Star => { + if with_aggregate { + return Err("SELECT * is not authorized with GROUP BY"); + } + // TODO: is it really useful to do a projection? + p.on_in_scope_variable(|v| { + if !pv.contains(v) { + pv.push(v.clone()); + } + }); + pv.sort(); + true + } + SelectionVariables::Everything => false, + }; + + let mut m = p; + + // ORDER BY + if let Some(expression) = order_by { + m = GraphPattern::OrderBy { + inner: Box::new(m), + expression, + }; + } + + // PROJECT + if with_project { + m = GraphPattern::Project { + inner: Box::new(m), + variables: pv, + }; + } + match select.option { + SelectionOption::Distinct => m = GraphPattern::Distinct { inner: Box::new(m) }, + SelectionOption::Reduced => m = GraphPattern::Reduced { inner: Box::new(m) }, + SelectionOption::Default => (), + } + + // OFFSET LIMIT + if let Some((start, length)) = offset_limit { + m = GraphPattern::Slice { + inner: Box::new(m), + start, + length, + } + } + Ok(m) +} + +fn are_variables_bound(expression: &Expression, variables: &HashSet) -> bool { + match expression { + Expression::NamedNode(_) + | Expression::Literal(_) + | Expression::Bound(_) + | Expression::Coalesce(_) + | Expression::Exists(_) => true, + Expression::Variable(var) => variables.contains(var), + Expression::UnaryPlus(e) | Expression::UnaryMinus(e) | Expression::Not(e) => { + are_variables_bound(e, variables) + } + Expression::Or(a, b) + | Expression::And(a, b) + | Expression::Equal(a, b) + | Expression::SameTerm(a, b) + | Expression::Greater(a, b) + | Expression::GreaterOrEqual(a, b) + | Expression::Less(a, b) + | Expression::LessOrEqual(a, b) + | Expression::Add(a, b) + | Expression::Subtract(a, b) + | Expression::Multiply(a, b) + | Expression::Divide(a, b) => { + are_variables_bound(a, variables) && are_variables_bound(b, variables) + } + Expression::In(a, b) => { + are_variables_bound(a, variables) && b.iter().all(|b| are_variables_bound(b, variables)) + } + Expression::FunctionCall(_, parameters) => { + parameters.iter().all(|p| are_variables_bound(p, variables)) + } + Expression::If(a, b, c) => { + are_variables_bound(a, variables) + && are_variables_bound(b, variables) + && are_variables_bound(c, variables) + } + } +} + +/// Called on every variable defined using "AS" or "VALUES" +#[cfg(feature = "sep-0006")] +fn add_defined_variables<'a>(pattern: &'a GraphPattern, set: &mut HashSet<&'a Variable>) { + match pattern { + GraphPattern::Bgp { .. } | GraphPattern::Path { .. } => {} + GraphPattern::Join { left, right } + | GraphPattern::LeftJoin { left, right, .. } + | GraphPattern::Lateral { left, right } + | GraphPattern::Union { left, right } + | GraphPattern::Minus { left, right } => { + add_defined_variables(left, set); + add_defined_variables(right, set); + } + GraphPattern::Graph { inner, .. } => { + add_defined_variables(inner, set); + } + GraphPattern::Extend { + inner, variable, .. + } => { + set.insert(variable); + add_defined_variables(inner, set); + } + GraphPattern::Group { + variables, + aggregates, + inner, + } => { + for (v, _) in aggregates { + set.insert(v); + } + let mut inner_variables = HashSet::new(); + add_defined_variables(inner, &mut inner_variables); + for v in inner_variables { + if variables.contains(v) { + set.insert(v); + } + } + } + GraphPattern::Values { variables, .. } => { + for v in variables { + set.insert(v); + } + } + GraphPattern::Project { variables, inner } => { + let mut inner_variables = HashSet::new(); + add_defined_variables(inner, &mut inner_variables); + for v in inner_variables { + if variables.contains(v) { + set.insert(v); + } + } + } + GraphPattern::Service { inner, .. } + | GraphPattern::Filter { inner, .. } + | GraphPattern::OrderBy { inner, .. } + | GraphPattern::Distinct { inner } + | GraphPattern::Reduced { inner } + | GraphPattern::Slice { inner, .. } => add_defined_variables(inner, set), + } +} + +fn copy_graph(from: impl Into, to: impl Into) -> GraphUpdateOperation { + let bgp = GraphPattern::Bgp { + patterns: vec![TriplePattern::new( + Variable::new_unchecked("s"), + Variable::new_unchecked("p"), + Variable::new_unchecked("o"), + )], + }; + GraphUpdateOperation::DeleteInsert { + delete: Vec::new(), + insert: vec![QuadPattern::new( + Variable::new_unchecked("s"), + Variable::new_unchecked("p"), + Variable::new_unchecked("o"), + to, + )], + using: None, + pattern: Box::new(match from.into() { + GraphName::NamedNode(from) => GraphPattern::Graph { + name: from.into(), + inner: Box::new(bgp), + }, + GraphName::DefaultGraph => bgp, + }), + } +} + +enum Either { + Left(L), + Right(R), +} + +pub struct ParserState { + base_iri: Option>, + namespaces: HashMap, + used_bnodes: HashSet, + currently_used_bnodes: HashSet, + aggregates: Vec>, +} + +impl ParserState { + pub(crate) fn from_base_iri(base_iri: Option<&str>) -> Result { + Ok(Self { + base_iri: if let Some(base_iri) = base_iri { + Some( + Iri::parse(base_iri.to_owned()) + .map_err(|e| SparqlSyntaxError(ParseErrorKind::InvalidBaseIri(e)))?, + ) + } else { + None + }, + namespaces: HashMap::default(), + used_bnodes: HashSet::default(), + currently_used_bnodes: HashSet::default(), + aggregates: Vec::new(), + }) + } + + fn parse_iri(&self, iri: String) -> Result, IriParseError> { + if let Some(base_iri) = &self.base_iri { + base_iri.resolve(&iri) + } else { + Iri::parse(iri) + } + } + + fn new_aggregation(&mut self, agg: AggregateExpression) -> Result { + let aggregates = self.aggregates.last_mut().ok_or("Unexpected aggregate")?; + Ok(aggregates + .iter() + .find_map(|(v, a)| (a == &agg).then_some(v)) + .cloned() + .unwrap_or_else(|| { + let new_var = variable(); + aggregates.push((new_var.clone(), agg)); + new_var + })) + } +} + +fn unescape_iriref(mut input: &str) -> Result { + let mut output = String::with_capacity(input.len()); + while let Some((before, after)) = input.split_once('\\') { + output.push_str(before); + let mut after = after.chars(); + let (escape, after) = match after.next() { + Some('u') => read_hex_char::<4>(after.as_str())?, + Some('U') => read_hex_char::<8>(after.as_str())?, + Some(_) => { + return Err( + "IRIs are only allowed to contain escape sequences \\uXXXX and \\UXXXXXXXX", + ) + } + None => return Err("IRIs are not allowed to end with a '\'"), + }; + output.push(escape); + input = after; + } + output.push_str(input); + Ok(output) +} + +fn unescape_string(mut input: &str) -> Result { + let mut output = String::with_capacity(input.len()); + while let Some((before, after)) = input.split_once('\\') { + output.push_str(before); + let mut after = after.chars(); + let (escape, after) = match after.next() { + Some('t') => ('\u{0009}', after.as_str()), + Some('b') => ('\u{0008}', after.as_str()), + Some('n') => ('\u{000A}', after.as_str()), + Some('r') => ('\u{000D}', after.as_str()), + Some('f') => ('\u{000C}', after.as_str()), + Some('"') => ('\u{0022}', after.as_str()), + Some('\'') => ('\u{0027}', after.as_str()), + Some('\\') => ('\u{005C}', after.as_str()), + Some('u') => read_hex_char::<4>(after.as_str())?, + Some('U') => read_hex_char::<8>(after.as_str())?, + Some(_) => return Err("The character that can be escaped in strings are tbnrf\"'\\"), + None => return Err("strings are not allowed to end with a '\'"), + }; + output.push(escape); + input = after; + } + output.push_str(input); + Ok(output) +} + +fn read_hex_char(input: &str) -> Result<(char, &str), &'static str> { + if let Some(escape) = input.get(..SIZE) { + if let Some(char) = u32::from_str_radix(escape, 16) + .ok() + .and_then(char::from_u32) + { + Ok((char, &input[SIZE..])) + } else { + Err("\\u escape sequence should be followed by hexadecimal digits") + } + } else { + Err("\\u escape sequence should be followed by hexadecimal digits") + } +} + +fn variable() -> Variable { + Variable::new_unchecked(format!("{:x}", random::())) +} + +parser! { + //See https://www.w3.org/TR/turtle/#sec-grammar + grammar parser(state: &mut ParserState) for str { + pub rule QueryUnit() -> Query = Query() + + rule Query() -> Query = _ Prologue() _ q:(SelectQuery() / ConstructQuery() / DescribeQuery() / AskQuery()) _ { + q + } + + pub rule UpdateInit() -> Vec = Update() + + rule Prologue() = (BaseDecl() _ / PrefixDecl() _)* {} + + rule BaseDecl() = i("BASE") _ i:IRIREF() { + state.base_iri = Some(i) + } + + rule PrefixDecl() = i("PREFIX") _ ns:PNAME_NS() _ i:IRIREF() { + state.namespaces.insert(ns.into(), i.into_inner()); + } + + rule SelectQuery() -> Query = s:SelectClause() _ d:DatasetClauses() _ w:WhereClause() _ g:GroupClause()? _ h:HavingClause()? _ o:OrderClause()? _ l:LimitOffsetClauses()? _ v:ValuesClause() {? + Ok(Query::Select { + dataset: d, + pattern: build_select(s, w, g, h, o, l, v, state)?, + base_iri: state.base_iri.clone() + }) + } + + rule SubSelect() -> GraphPattern = s:SelectClause() _ w:WhereClause() _ g:GroupClause()? _ h:HavingClause()? _ o:OrderClause()? _ l:LimitOffsetClauses()? _ v:ValuesClause() {? + build_select(s, w, g, h, o, l, v, state) + } + + rule SelectClause() -> Selection = i("SELECT") _ Selection_init() o:SelectClause_option() _ v:SelectClause_variables() { + Selection { + option: o, + variables: v + } + } + rule Selection_init() = { + state.aggregates.push(Vec::new()) + } + rule SelectClause_option() -> SelectionOption = + i("DISTINCT") { SelectionOption::Distinct } / + i("REDUCED") { SelectionOption::Reduced } / + { SelectionOption::Default } + rule SelectClause_variables() -> SelectionVariables = + "*" { SelectionVariables::Star } / + p:SelectClause_member()+ { SelectionVariables::Explicit(p) } + rule SelectClause_member() -> SelectionMember = + v:Var() _ { SelectionMember::Variable(v) } / + "(" _ e:Expression() _ i("AS") _ v:Var() _ ")" _ { SelectionMember::Expression(e, v) } + + rule ConstructQuery() -> Query = + i("CONSTRUCT") _ c:ConstructTemplate() _ d:DatasetClauses() _ w:WhereClause() _ g:GroupClause()? _ h:HavingClause()? _ o:OrderClause()? _ l:LimitOffsetClauses()? _ v:ValuesClause() {? + Ok(Query::Construct { + template: c, + dataset: d, + pattern: build_select(Selection::no_op(), w, g, h, o, l, v, state)?, + base_iri: state.base_iri.clone() + }) + } / + i("CONSTRUCT") _ d:DatasetClauses() _ i("WHERE") _ "{" _ c:ConstructQuery_optional_triple_template() _ "}" _ g:GroupClause()? _ h:HavingClause()? _ o:OrderClause()? _ l:LimitOffsetClauses()? _ v:ValuesClause() {? + Ok(Query::Construct { + template: c.clone(), + dataset: d, + pattern: build_select( + Selection::no_op(), + GraphPattern::Bgp { patterns: c }, + g, h, o, l, v, state + )?, + base_iri: state.base_iri.clone() + }) + } + + rule ConstructQuery_optional_triple_template() -> Vec = TriplesTemplate() / { Vec::new() } + + rule DescribeQuery() -> Query = + i("DESCRIBE") _ "*" _ d:DatasetClauses() w:WhereClause()? _ g:GroupClause()? _ h:HavingClause()? _ o:OrderClause()? _ l:LimitOffsetClauses()? _ v:ValuesClause() {? + Ok(Query::Describe { + dataset: d, + pattern: build_select(Selection::no_op(), w.unwrap_or_default(), g, h, o, l, v, state)?, + base_iri: state.base_iri.clone() + }) + } / + i("DESCRIBE") _ p:DescribeQuery_item()+ _ d:DatasetClauses() w:WhereClause()? _ g:GroupClause()? _ h:HavingClause()? _ o:OrderClause()? _ l:LimitOffsetClauses()? _ v:ValuesClause() {? + Ok(Query::Describe { + dataset: d, + pattern: build_select(Selection { + option: SelectionOption::Default, + variables: SelectionVariables::Explicit(p.into_iter().map(|var_or_iri| match var_or_iri { + NamedNodePattern::NamedNode(n) => SelectionMember::Expression(n.into(), variable()), + NamedNodePattern::Variable(v) => SelectionMember::Variable(v) + }).collect()) + }, w.unwrap_or_default(), g, h, o, l, v, state)?, + base_iri: state.base_iri.clone() + }) + } + rule DescribeQuery_item() -> NamedNodePattern = i:VarOrIri() _ { i } + + rule AskQuery() -> Query = i("ASK") _ d:DatasetClauses() _ w:WhereClause() _ g:GroupClause()? _ h:HavingClause()? _ o:OrderClause()? _ l:LimitOffsetClauses()? _ v:ValuesClause() {? + Ok(Query::Ask { + dataset: d, + pattern: build_select(Selection::no_op(), w, g, h, o, l, v, state)?, + base_iri: state.base_iri.clone() + }) + } + + rule DatasetClause() -> (Option, Option) = i("FROM") _ d:(DefaultGraphClause() / NamedGraphClause()) { d } + rule DatasetClauses() -> Option = d:DatasetClause() ** (_) { + if d.is_empty() { + return None; + } + let mut default = Vec::new(); + let mut named = Vec::new(); + for (d, n) in d { + if let Some(d) = d { + default.push(d); + } + if let Some(n) = n { + named.push(n); + } + } + Some(QueryDataset { + default, named: Some(named) + }) + } + + rule DefaultGraphClause() -> (Option, Option) = s:SourceSelector() { + (Some(s), None) + } + + rule NamedGraphClause() -> (Option, Option) = i("NAMED") _ s:SourceSelector() { + (None, Some(s)) + } + + rule SourceSelector() -> NamedNode = iri() + + rule WhereClause() -> GraphPattern = i("WHERE")? _ p:GroupGraphPattern() { + p + } + + rule GroupClause() -> (Vec, Vec<(Expression,Variable)>) = i("GROUP") _ i("BY") _ c:GroupCondition_item()+ { + let mut projections: Vec<(Expression,Variable)> = Vec::new(); + let clauses = c.into_iter().map(|(e, vo)| { + if let Expression::Variable(v) = e { + v + } else { + let v = vo.unwrap_or_else(variable); + projections.push((e, v.clone())); + v + } + }).collect(); + (clauses, projections) + } + rule GroupCondition_item() -> (Expression, Option) = c:GroupCondition() _ { c } + + rule GroupCondition() -> (Expression, Option) = + e:BuiltInCall() { (e, None) } / + e:FunctionCall() { (e, None) } / + "(" _ e:Expression() _ v:GroupCondition_as()? ")" { (e, v) } / + e:Var() { (e.into(), None) } + rule GroupCondition_as() -> Variable = i("AS") _ v:Var() _ { v } + + rule HavingClause() -> Expression = i("HAVING") _ e:HavingCondition()+ {? + not_empty_fold(e.into_iter(), |a, b| Expression::And(Box::new(a), Box::new(b))) + } + + rule HavingCondition() -> Expression = Constraint() + + rule OrderClause() -> Vec = i("ORDER") _ i("BY") _ c:OrderClause_item()+ { c } + rule OrderClause_item() -> OrderExpression = c:OrderCondition() _ { c } + + rule OrderCondition() -> OrderExpression = + i("ASC") _ e: BrackettedExpression() { OrderExpression::Asc(e) } / + i("DESC") _ e: BrackettedExpression() { OrderExpression::Desc(e) } / + e: Constraint() { OrderExpression::Asc(e) } / + v: Var() { OrderExpression::Asc(Expression::from(v)) } + + rule LimitOffsetClauses() -> (usize, Option) = + l:LimitClause() _ o:OffsetClause()? { (o.unwrap_or(0), Some(l)) } / + o:OffsetClause() _ l:LimitClause()? { (o, l) } + + rule LimitClause() -> usize = i("LIMIT") _ l:$(INTEGER()) {? + usize::from_str(l).map_err(|_| "The query limit should be a non negative integer") + } + + rule OffsetClause() -> usize = i("OFFSET") _ o:$(INTEGER()) {? + usize::from_str(o).map_err(|_| "The query offset should be a non negative integer") + } + + rule ValuesClause() -> Option = + i("VALUES") _ p:DataBlock() { Some(p) } / + { None } + + rule Update() -> Vec = _ Prologue() _ u:(Update1() ** (_ ";" _)) _ ( ";" _)? { u.into_iter().flatten().collect() } + + rule Update1() -> Vec = Load() / Clear() / Drop() / Add() / Move() / Copy() / Create() / InsertData() / DeleteData() / DeleteWhere() / Modify() + rule Update1_silent() -> bool = i("SILENT") { true } / { false } + + rule Load() -> Vec = i("LOAD") _ silent:Update1_silent() _ source:iri() _ destination:Load_to()? { + vec![GraphUpdateOperation::Load { silent, source, destination: destination.map_or(GraphName::DefaultGraph, GraphName::NamedNode) }] + } + rule Load_to() -> NamedNode = i("INTO") _ g: GraphRef() { g } + + rule Clear() -> Vec = i("CLEAR") _ silent:Update1_silent() _ graph:GraphRefAll() { + vec![GraphUpdateOperation::Clear { silent, graph }] + } + + rule Drop() -> Vec = i("DROP") _ silent:Update1_silent() _ graph:GraphRefAll() { + vec![GraphUpdateOperation::Drop { silent, graph }] + } + + rule Create() -> Vec = i("CREATE") _ silent:Update1_silent() _ graph:GraphRef() { + vec![GraphUpdateOperation::Create { silent, graph }] + } + + rule Add() -> Vec = i("ADD") _ silent:Update1_silent() _ from:GraphOrDefault() _ i("TO") _ to:GraphOrDefault() { + // Rewriting defined by https://www.w3.org/TR/sparql11-update/#add + if from == to { + Vec::new() // identity case + } else { + let bgp = GraphPattern::Bgp { patterns: vec![TriplePattern::new(Variable::new_unchecked("s"), Variable::new_unchecked("p"), Variable::new_unchecked("o"))] }; + vec![copy_graph(from, to)] + } + } + + rule Move() -> Vec = i("MOVE") _ silent:Update1_silent() _ from:GraphOrDefault() _ i("TO") _ to:GraphOrDefault() { + // Rewriting defined by https://www.w3.org/TR/sparql11-update/#move + if from == to { + Vec::new() // identity case + } else { + let bgp = GraphPattern::Bgp { patterns: vec![TriplePattern::new(Variable::new_unchecked("s"), Variable::new_unchecked("p"), Variable::new_unchecked("o"))] }; + vec![GraphUpdateOperation::Drop { silent: true, graph: to.clone().into() }, copy_graph(from.clone(), to), GraphUpdateOperation::Drop { silent, graph: from.into() }] + } + } + + rule Copy() -> Vec = i("COPY") _ silent:Update1_silent() _ from:GraphOrDefault() _ i("TO") _ to:GraphOrDefault() { + // Rewriting defined by https://www.w3.org/TR/sparql11-update/#copy + if from == to { + Vec::new() // identity case + } else { + let bgp = GraphPattern::Bgp { patterns: vec![TriplePattern::new(Variable::new_unchecked("s"), Variable::new_unchecked("p"), Variable::new_unchecked("o"))] }; + vec![GraphUpdateOperation::Drop { silent: true, graph: to.clone().into() }, copy_graph(from, to)] + } + } + + rule InsertData() -> Vec = i("INSERT") _ i("DATA") _ data:QuadData() { + vec![GraphUpdateOperation::InsertData { data }] + } + + rule DeleteData() -> Vec = i("DELETE") _ i("DATA") _ data:GroundQuadData() { + vec![GraphUpdateOperation::DeleteData { data }] + } + + rule DeleteWhere() -> Vec = i("DELETE") _ i("WHERE") _ d:QuadPattern() {? + let pattern = d.iter().map(|q| { + let bgp = GraphPattern::Bgp { patterns: vec![TriplePattern::new(q.subject.clone(), q.predicate.clone(), q.object.clone())] }; + match &q.graph_name { + GraphNamePattern::NamedNode(graph_name) => GraphPattern::Graph { name: graph_name.clone().into(), inner: Box::new(bgp) }, + GraphNamePattern::DefaultGraph => bgp, + GraphNamePattern::Variable(graph_name) => GraphPattern::Graph { name: graph_name.clone().into(), inner: Box::new(bgp) }, + } + }).reduce(new_join).unwrap_or_default(); + let delete = d.into_iter().map(GroundQuadPattern::try_from).collect::,_>>().map_err(|()| "Blank nodes are not allowed in DELETE WHERE")?; + Ok(vec![GraphUpdateOperation::DeleteInsert { + delete, + insert: Vec::new(), + using: None, + pattern: Box::new(pattern) + }]) + } + + rule Modify() -> Vec = with:Modify_with()? _ Modify_clear() c:Modify_clauses() _ u:(UsingClause() ** (_)) _ i("WHERE") _ pattern:GroupGraphPattern() { + let (delete, insert) = c; + let mut delete = delete.unwrap_or_default(); + let mut insert = insert.unwrap_or_default(); + #[allow(clippy::shadow_same)] + let mut pattern = pattern; + + let mut using = if u.is_empty() { + None + } else { + let mut default = Vec::new(); + let mut named = Vec::new(); + for (d, n) in u { + if let Some(d) = d { + default.push(d) + } + if let Some(n) = n { + named.push(n) + } + } + Some(QueryDataset { default, named: Some(named) }) + }; + + if let Some(with) = with { + // We inject WITH everywhere + delete = delete.into_iter().map(|q| if q.graph_name == GraphNamePattern::DefaultGraph { + GroundQuadPattern { + subject: q.subject, + predicate: q.predicate, + object: q.object, + graph_name: with.clone().into() + } + } else { + q + }).collect(); + insert = insert.into_iter().map(|q| if q.graph_name == GraphNamePattern::DefaultGraph { + QuadPattern { + subject: q.subject, + predicate: q.predicate, + object: q.object, + graph_name: with.clone().into() + } + } else { + q + }).collect(); + if using.is_none() { + using = Some(QueryDataset { default: vec![with], named: None }); + } + } + + vec![GraphUpdateOperation::DeleteInsert { + delete, + insert, + using, + pattern: Box::new(pattern) + }] + } + rule Modify_with() -> NamedNode = i("WITH") _ i:iri() _ { i } + rule Modify_clauses() -> (Option>, Option>) = d:DeleteClause() _ i:InsertClause()? { + (Some(d), i) + } / i:InsertClause() { + (None, Some(i)) + } + rule Modify_clear() = { + state.used_bnodes.clear(); + state.currently_used_bnodes.clear(); + } + + rule DeleteClause() -> Vec = i("DELETE") _ q:QuadPattern() {? + q.into_iter().map(GroundQuadPattern::try_from).collect::,_>>().map_err(|()| "Blank nodes are not allowed in DELETE WHERE") + } + + rule InsertClause() -> Vec = i("INSERT") _ q:QuadPattern() { q } + + rule UsingClause() -> (Option, Option) = i("USING") _ d:(UsingClause_default() / UsingClause_named()) { d } + rule UsingClause_default() -> (Option, Option) = i:iri() { + (Some(i), None) + } + rule UsingClause_named() -> (Option, Option) = i("NAMED") _ i:iri() { + (None, Some(i)) + } + + rule GraphOrDefault() -> GraphName = i("DEFAULT") { + GraphName::DefaultGraph + } / (i("GRAPH") _)? g:iri() { + GraphName::NamedNode(g) + } + + rule GraphRef() -> NamedNode = i("GRAPH") _ g:iri() { g } + + rule GraphRefAll() -> GraphTarget = i: GraphRef() { i.into() } + / i("DEFAULT") { GraphTarget::DefaultGraph } + / i("NAMED") { GraphTarget::NamedGraphs } + / i("ALL") { GraphTarget::AllGraphs } + + rule QuadPattern() -> Vec = "{" _ q:Quads() _ "}" { q } + + rule QuadData() -> Vec = "{" _ q:Quads() _ "}" {? + q.into_iter().map(Quad::try_from).collect::, ()>>().map_err(|()| "Variables are not allowed in INSERT DATA") + } + rule GroundQuadData() -> Vec = "{" _ q:Quads() _ "}" {? + q.into_iter().map(|q| GroundQuad::try_from(Quad::try_from(q)?)).collect::, ()>>().map_err(|()| "Variables and blank nodes are not allowed in DELETE DATA") + } + + rule Quads() -> Vec = q:(Quads_TriplesTemplate() / Quads_QuadsNotTriples()) ** (_) { + q.into_iter().flatten().collect() + } + rule Quads_TriplesTemplate() -> Vec = t:TriplesTemplate() { + t.into_iter().map(|t| QuadPattern::new(t.subject, t.predicate, t.object, GraphNamePattern::DefaultGraph)).collect() + } //TODO: return iter? + rule Quads_QuadsNotTriples() -> Vec = q:QuadsNotTriples() _ "."? { q } + + rule QuadsNotTriples() -> Vec = i("GRAPH") _ g:VarOrIri() _ "{" _ t:TriplesTemplate()? _ "}" { + t.unwrap_or_default().into_iter().map(|t| QuadPattern::new(t.subject, t.predicate, t.object, g.clone())).collect() + } + + rule TriplesTemplate() -> Vec = ts:TriplesTemplate_inner() ++ (".") ("." _)? { + ts.into_iter().flatten().collect() + } + rule TriplesTemplate_inner() -> Vec = _ t:TriplesSameSubject() _ { t } + + rule GroupGraphPattern() -> GraphPattern = + "{" _ GroupGraphPattern_clear() p:GroupGraphPatternSub() GroupGraphPattern_clear() _ "}" { p } / + "{" _ GroupGraphPattern_clear() p:SubSelect() GroupGraphPattern_clear() _ "}" { p } + rule GroupGraphPattern_clear() = { + // We deal with blank nodes aliases rule + state.used_bnodes.extend(state.currently_used_bnodes.iter().cloned()); + state.currently_used_bnodes.clear(); + } + + rule GroupGraphPatternSub() -> GraphPattern = a:TriplesBlock()? _ b:GroupGraphPatternSub_item()* {? + let mut filter: Option = None; + let mut g = a.map_or_else(GraphPattern::default, build_bgp); + for e in b.into_iter().flatten() { + match e { + PartialGraphPattern::Optional(p, f) => { + g = GraphPattern::LeftJoin { left: Box::new(g), right: Box::new(p), expression: f } + } + #[cfg(feature = "sep-0006")] + PartialGraphPattern::Lateral(p) => { + let mut defined_variables = HashSet::default(); + add_defined_variables(&p, &mut defined_variables); + let mut contains = false; + g.on_in_scope_variable(|v| { + if defined_variables.contains(v) { + contains = true; + } + }); + if contains { + return Err("An existing variable is overridden in the right side of LATERAL"); + } + g = GraphPattern::Lateral { left: Box::new(g), right: Box::new(p) } + } + PartialGraphPattern::Minus(p) => { + g = GraphPattern::Minus { left: Box::new(g), right: Box::new(p) } + } + PartialGraphPattern::Bind(expression, variable) => { + let mut contains = false; + g.on_in_scope_variable(|v| { + if *v == variable { + contains = true; + } + }); + if contains { + return Err("BIND is overriding an existing variable") + } + g = GraphPattern::Extend { inner: Box::new(g), variable, expression } + } + PartialGraphPattern::Filter(expr) => filter = Some(if let Some(f) = filter { + Expression::And(Box::new(f), Box::new(expr)) + } else { + expr + }), + PartialGraphPattern::Other(e) => g = new_join(g, e), + } + } + + Ok(if let Some(expr) = filter { + GraphPattern::Filter { expr, inner: Box::new(g) } + } else { + g + }) + } + rule GroupGraphPatternSub_item() -> Vec = a:GraphPatternNotTriples() _ ("." _)? b:TriplesBlock()? _ { + let mut result = vec![a]; + if let Some(v) = b { + result.push(PartialGraphPattern::Other(build_bgp(v))); + } + result + } + + rule TriplesBlock() -> Vec = hs:TriplesBlock_inner() ++ (".") ("." _)? { + hs.into_iter().flatten().collect() + } + rule TriplesBlock_inner() -> Vec = _ h:TriplesSameSubjectPath() _ { h } + + rule GraphPatternNotTriples() -> PartialGraphPattern = GroupOrUnionGraphPattern() / OptionalGraphPattern() / LateralGraphPattern() / MinusGraphPattern() / GraphGraphPattern() / ServiceGraphPattern() / Filter() / Bind() / InlineData() + + rule OptionalGraphPattern() -> PartialGraphPattern = i("OPTIONAL") _ p:GroupGraphPattern() { + if let GraphPattern::Filter { expr, inner } = p { + PartialGraphPattern::Optional(*inner, Some(expr)) + } else { + PartialGraphPattern::Optional(p, None) + } + } + + rule LateralGraphPattern() -> PartialGraphPattern = i("LATERAL") _ p:GroupGraphPattern() {? + #[cfg(feature = "sep-0006")]{Ok(PartialGraphPattern::Lateral(p))} + #[cfg(not(feature = "sep-0006"))]{Err("The LATERAL modifier is not supported")} + } + + rule GraphGraphPattern() -> PartialGraphPattern = i("GRAPH") _ name:VarOrIri() _ p:GroupGraphPattern() { + PartialGraphPattern::Other(GraphPattern::Graph { name, inner: Box::new(p) }) + } + + rule ServiceGraphPattern() -> PartialGraphPattern = + i("SERVICE") _ i("SILENT") _ name:VarOrIri() _ p:GroupGraphPattern() { PartialGraphPattern::Other(GraphPattern::Service { name, inner: Box::new(p), silent: true }) } / + i("SERVICE") _ name:VarOrIri() _ p:GroupGraphPattern() { PartialGraphPattern::Other(GraphPattern::Service{ name, inner: Box::new(p), silent: false }) } + + rule Bind() -> PartialGraphPattern = i("BIND") _ "(" _ e:Expression() _ i("AS") _ v:Var() _ ")" { + PartialGraphPattern::Bind(e, v) + } + + rule InlineData() -> PartialGraphPattern = i("VALUES") _ p:DataBlock() { PartialGraphPattern::Other(p) } + + rule DataBlock() -> GraphPattern = l:(InlineDataOneVar() / InlineDataFull()) { + GraphPattern::Values { variables: l.0, bindings: l.1 } + } + + rule InlineDataOneVar() -> (Vec, Vec>>) = var:Var() _ "{" _ d:InlineDataOneVar_value()* "}" { + (vec![var], d) + } + rule InlineDataOneVar_value() -> Vec> = t:DataBlockValue() _ { vec![t] } + + rule InlineDataFull() -> (Vec, Vec>>) = "(" _ vars:InlineDataFull_var()* _ ")" _ "{" _ vals:InlineDataFull_values()* "}" {? + if vals.iter().all(|vs| vs.len() == vars.len()) { + Ok((vars, vals)) + } else { + Err("The VALUES clause rows should have exactly the same number of values as there are variables. To set a value to undefined use UNDEF.") + } + } + rule InlineDataFull_var() -> Variable = v:Var() _ { v } + rule InlineDataFull_values() -> Vec> = "(" _ v:InlineDataFull_value()* _ ")" _ { v } + rule InlineDataFull_value() -> Option = v:DataBlockValue() _ { v } + + rule DataBlockValue() -> Option = + t:QuotedTripleData() {? + #[cfg(feature = "rdf-star")]{Ok(Some(t.into()))} + #[cfg(not(feature = "rdf-star"))]{Err("Embedded triples are only available in SPARQL-star")} + } / + i:iri() { Some(i.into()) } / + l:RDFLiteral() { Some(l.into()) } / + l:NumericLiteral() { Some(l.into()) } / + l:BooleanLiteral() { Some(l.into()) } / + i("UNDEF") { None } + + rule MinusGraphPattern() -> PartialGraphPattern = i("MINUS") _ p: GroupGraphPattern() { + PartialGraphPattern::Minus(p) + } + + rule GroupOrUnionGraphPattern() -> PartialGraphPattern = p:GroupOrUnionGraphPattern_item() **<1,> (i("UNION") _) {? + not_empty_fold(p.into_iter(), |a, b| { + GraphPattern::Union { left: Box::new(a), right: Box::new(b) } + }).map(PartialGraphPattern::Other) + } + rule GroupOrUnionGraphPattern_item() -> GraphPattern = p:GroupGraphPattern() _ { p } + + rule Filter() -> PartialGraphPattern = i("FILTER") _ c:Constraint() { + PartialGraphPattern::Filter(c) + } + + rule Constraint() -> Expression = BrackettedExpression() / FunctionCall() / BuiltInCall() + + rule FunctionCall() -> Expression = f: iri() _ a: ArgList() { + Expression::FunctionCall(Function::Custom(f), a) + } + + rule ArgList() -> Vec = + "(" _ e:ArgList_item() **<1,> ("," _) _ ")" { e } / + NIL() { Vec::new() } + rule ArgList_item() -> Expression = e:Expression() _ { e } + + rule ExpressionList() -> Vec = + "(" _ e:ExpressionList_item() **<1,> ("," _) ")" { e } / + NIL() { Vec::new() } + rule ExpressionList_item() -> Expression = e:Expression() _ { e } + + rule ConstructTemplate() -> Vec = "{" _ t:ConstructTriples() _ "}" { t } + + rule ConstructTriples() -> Vec = p:ConstructTriples_item() ** ("." _) "."? { + p.into_iter().flatten().collect() + } + rule ConstructTriples_item() -> Vec = t:TriplesSameSubject() _ { t } + + rule TriplesSameSubject() -> Vec = + s:VarOrTerm() _ po:PropertyListNotEmpty() {? + let mut patterns = po.patterns; + for (p, os) in po.focus { + for o in os { + add_to_triple_patterns(s.clone(), p.clone(), o, &mut patterns)? + } + } + Ok(patterns) + } / + s:TriplesNode() _ po:PropertyList() {? + let mut patterns = s.patterns; + patterns.extend(po.patterns); + for (p, os) in po.focus { + for o in os { + add_to_triple_patterns(s.focus.clone(), p.clone(), o, &mut patterns)? + } + } + Ok(patterns) + } + + rule PropertyList() -> FocusedTriplePattern)>> = + PropertyListNotEmpty() / + { FocusedTriplePattern::default() } + + rule PropertyListNotEmpty() -> FocusedTriplePattern)>> = l:PropertyListNotEmpty_item() **<1,> (";" _) { + l.into_iter().fold(FocusedTriplePattern::)>>::default(), |mut a, b| { + a.focus.push(b.focus); + a.patterns.extend(b.patterns); + a + }) + } + rule PropertyListNotEmpty_item() -> FocusedTriplePattern<(NamedNodePattern,Vec)> = p:Verb() _ o:ObjectList() _ { + FocusedTriplePattern { + focus: (p, o.focus), + patterns: o.patterns + } + } + + rule Verb() -> NamedNodePattern = VarOrIri() / "a" { rdf::TYPE.into_owned().into() } + + rule ObjectList() -> FocusedTriplePattern> = o:ObjectList_item() **<1,> ("," _) { + o.into_iter().fold(FocusedTriplePattern::>::default(), |mut a, b| { + a.focus.push(b.focus); + a.patterns.extend_from_slice(&b.patterns); + a + }) + } + rule ObjectList_item() -> FocusedTriplePattern = o:Object() _ { o } + + rule Object() -> FocusedTriplePattern = g:GraphNode() _ a:Annotation()? { + if let Some(a) = a { + let mut patterns = g.patterns; + patterns.extend(a.patterns); + FocusedTriplePattern { + focus: AnnotatedTerm { + term: g.focus, + annotations: a.focus + }, + patterns + } + } else { + FocusedTriplePattern { + focus: AnnotatedTerm { + term: g.focus, + annotations: Vec::new() + }, + patterns: g.patterns + } + } + } + + rule TriplesSameSubjectPath() -> Vec = + s:VarOrTerm() _ po:PropertyListPathNotEmpty() {? + let mut patterns = po.patterns; + for (p, os) in po.focus { + for o in os { + add_to_triple_or_path_patterns(s.clone(), p.clone(), o, &mut patterns)?; + } + } + Ok(patterns) + } / + s:TriplesNodePath() _ po:PropertyListPath() {? + let mut patterns = s.patterns; + patterns.extend(po.patterns); + for (p, os) in po.focus { + for o in os { + add_to_triple_or_path_patterns(s.focus.clone(), p.clone(), o, &mut patterns)?; + } + } + Ok(patterns) + } + + rule PropertyListPath() -> FocusedTripleOrPathPattern)>> = + PropertyListPathNotEmpty() / + { FocusedTripleOrPathPattern::default() } + + rule PropertyListPathNotEmpty() -> FocusedTripleOrPathPattern)>> = hp:(VerbPath() / VerbSimple()) _ ho:ObjectListPath() _ t:PropertyListPathNotEmpty_item()* { + t.into_iter().flatten().fold(FocusedTripleOrPathPattern { + focus: vec![(hp, ho.focus)], + patterns: ho.patterns + }, |mut a, b| { + a.focus.push(b.focus); + a.patterns.extend(b.patterns); + a + }) + } + rule PropertyListPathNotEmpty_item() -> Option)>> = ";" _ c:PropertyListPathNotEmpty_item_content()? { + c + } + rule PropertyListPathNotEmpty_item_content() -> FocusedTripleOrPathPattern<(VariableOrPropertyPath,Vec)> = p:(VerbPath() / VerbSimple()) _ o:ObjectListPath() _ { + FocusedTripleOrPathPattern { + focus: (p, o.focus.into_iter().map(AnnotatedTermPath::from).collect()), + patterns: o.patterns + } + } + + rule VerbPath() -> VariableOrPropertyPath = p:Path() { + p.into() + } + + rule VerbSimple() -> VariableOrPropertyPath = v:Var() { + v.into() + } + + rule ObjectListPath() -> FocusedTripleOrPathPattern> = o:ObjectListPath_item() **<1,> ("," _) { + o.into_iter().fold(FocusedTripleOrPathPattern::>::default(), |mut a, b| { + a.focus.push(b.focus); + a.patterns.extend(b.patterns); + a + }) + } + rule ObjectListPath_item() -> FocusedTripleOrPathPattern = o:ObjectPath() _ { o } + + rule ObjectPath() -> FocusedTripleOrPathPattern = g:GraphNodePath() _ a:AnnotationPath()? { + if let Some(a) = a { + let mut patterns = g.patterns; + patterns.extend(a.patterns); + FocusedTripleOrPathPattern { + focus: AnnotatedTermPath { + term: g.focus, + annotations: a.focus + }, + patterns + } + } else { + FocusedTripleOrPathPattern { + focus: AnnotatedTermPath { + term: g.focus, + annotations: Vec::new() + }, + patterns: g.patterns + } + } + } + + rule Path() -> PropertyPathExpression = PathAlternative() + + rule PathAlternative() -> PropertyPathExpression = p:PathAlternative_item() **<1,> ("|" _) {? + not_empty_fold(p.into_iter(), |a, b| { + PropertyPathExpression::Alternative(Box::new(a), Box::new(b)) + }) + } + rule PathAlternative_item() -> PropertyPathExpression = p:PathSequence() _ { p } + + rule PathSequence() -> PropertyPathExpression = p:PathSequence_item() **<1,> ("/" _) {? + not_empty_fold(p.into_iter(), |a, b| { + PropertyPathExpression::Sequence(Box::new(a), Box::new(b)) + }) + } + rule PathSequence_item() -> PropertyPathExpression = p:PathEltOrInverse() _ { p } + + rule PathElt() -> PropertyPathExpression = p:PathPrimary() _ o:PathElt_op()? { + match o { + Some('?') => PropertyPathExpression::ZeroOrOne(Box::new(p)), + Some('*') => PropertyPathExpression::ZeroOrMore(Box::new(p)), + Some('+') => PropertyPathExpression::OneOrMore(Box::new(p)), + Some(_) => unreachable!(), + None => p + } + } + rule PathElt_op() -> char = + "*" { '*' } / + "+" { '+' } / + "?" !(['0'..='9'] / PN_CHARS_U()) { '?' } // We mandate that this is not a variable + + rule PathEltOrInverse() -> PropertyPathExpression = + "^" _ p:PathElt() { PropertyPathExpression::Reverse(Box::new(p)) } / + PathElt() + + rule PathPrimary() -> PropertyPathExpression = + v:iri() { v.into() } / + "a" { rdf::TYPE.into_owned().into() } / + "!" _ p:PathNegatedPropertySet() { p } / + "(" _ p:Path() _ ")" { p } + + rule PathNegatedPropertySet() -> PropertyPathExpression = + "(" _ p:PathNegatedPropertySet_item() **<1,> ("|" _) ")" { + let mut direct = Vec::new(); + let mut inverse = Vec::new(); + for e in p { + match e { + Either::Left(a) => direct.push(a), + Either::Right(b) => inverse.push(b) + } + } + if inverse.is_empty() { + PropertyPathExpression::NegatedPropertySet(direct) + } else if direct.is_empty() { + PropertyPathExpression::Reverse(Box::new(PropertyPathExpression::NegatedPropertySet(inverse))) + } else { + PropertyPathExpression::Alternative( + Box::new(PropertyPathExpression::NegatedPropertySet(direct)), + Box::new(PropertyPathExpression::Reverse(Box::new(PropertyPathExpression::NegatedPropertySet(inverse)))) + ) + } + } / + p:PathOneInPropertySet() { + match p { + Either::Left(a) => PropertyPathExpression::NegatedPropertySet(vec![a]), + Either::Right(b) => PropertyPathExpression::Reverse(Box::new(PropertyPathExpression::NegatedPropertySet(vec![b]))), + } + } + rule PathNegatedPropertySet_item() -> Either = p:PathOneInPropertySet() _ { p } + + rule PathOneInPropertySet() -> Either = + "^" _ v:iri() { Either::Right(v) } / + "^" _ "a" { Either::Right(rdf::TYPE.into()) } / + v:iri() { Either::Left(v) } / + "a" { Either::Left(rdf::TYPE.into()) } + + rule TriplesNode() -> FocusedTriplePattern = Collection() / BlankNodePropertyList() + + rule BlankNodePropertyList() -> FocusedTriplePattern = "[" _ po:PropertyListNotEmpty() _ "]" {? + let mut patterns = po.patterns; + let mut bnode = TermPattern::from(BlankNode::default()); + for (p, os) in po.focus { + for o in os { + add_to_triple_patterns(bnode.clone(), p.clone(), o, &mut patterns)?; + } + } + Ok(FocusedTriplePattern { + focus: bnode, + patterns + }) + } + + rule TriplesNodePath() -> FocusedTripleOrPathPattern = CollectionPath() / BlankNodePropertyListPath() + + rule BlankNodePropertyListPath() -> FocusedTripleOrPathPattern = "[" _ po:PropertyListPathNotEmpty() _ "]" {? + let mut patterns = po.patterns; + let mut bnode = TermPattern::from(BlankNode::default()); + for (p, os) in po.focus { + for o in os { + add_to_triple_or_path_patterns(bnode.clone(), p.clone(), o, &mut patterns)?; + } + } + Ok(FocusedTripleOrPathPattern { + focus: bnode, + patterns + }) + } + + rule Collection() -> FocusedTriplePattern = "(" _ o:Collection_item()+ ")" { + let mut patterns: Vec = Vec::new(); + let mut current_list_node = TermPattern::from(rdf::NIL.into_owned()); + for objWithPatterns in o.into_iter().rev() { + let new_blank_node = TermPattern::from(BlankNode::default()); + patterns.push(TriplePattern::new(new_blank_node.clone(), rdf::FIRST.into_owned(), objWithPatterns.focus.clone())); + patterns.push(TriplePattern::new(new_blank_node.clone(), rdf::REST.into_owned(), current_list_node)); + current_list_node = new_blank_node; + patterns.extend_from_slice(&objWithPatterns.patterns); + } + FocusedTriplePattern { + focus: current_list_node, + patterns + } + } + rule Collection_item() -> FocusedTriplePattern = o:GraphNode() _ { o } + + rule CollectionPath() -> FocusedTripleOrPathPattern = "(" _ o:CollectionPath_item()+ _ ")" { + let mut patterns: Vec = Vec::new(); + let mut current_list_node = TermPattern::from(rdf::NIL.into_owned()); + for objWithPatterns in o.into_iter().rev() { + let new_blank_node = TermPattern::from(BlankNode::default()); + patterns.push(TriplePattern::new(new_blank_node.clone(), rdf::FIRST.into_owned(), objWithPatterns.focus.clone()).into()); + patterns.push(TriplePattern::new(new_blank_node.clone(), rdf::REST.into_owned(), current_list_node).into()); + current_list_node = new_blank_node; + patterns.extend(objWithPatterns.patterns); + } + FocusedTripleOrPathPattern { + focus: current_list_node, + patterns + } + } + rule CollectionPath_item() -> FocusedTripleOrPathPattern = p:GraphNodePath() _ { p } + + + rule Annotation() -> FocusedTriplePattern)>> = "{|" _ a:PropertyListNotEmpty() _ "|}" { a } + + rule AnnotationPath() -> FocusedTripleOrPathPattern)>> = "{|" _ a: PropertyListPathNotEmpty() _ "|}" { a } + + rule GraphNode() -> FocusedTriplePattern = + t:VarOrTerm() { FocusedTriplePattern::new(t) } / + TriplesNode() + + rule GraphNodePath() -> FocusedTripleOrPathPattern = + t:VarOrTerm() { FocusedTripleOrPathPattern::new(t) } / + TriplesNodePath() + + rule VarOrTerm() -> TermPattern = + v:Var() { v.into() } / + t:QuotedTriple() {? + #[cfg(feature = "rdf-star")]{Ok(t.into())} + #[cfg(not(feature = "rdf-star"))]{Err("Embedded triples are only available in SPARQL-star")} + } / + t:GraphTerm() { t.into() } + + rule QuotedTriple() -> TriplePattern = "<<" _ s:VarOrTerm() _ p:Verb() _ o:VarOrTerm() _ ">>" {? + Ok(TriplePattern { + subject: s, + predicate: p, + object: o + }) + } + + rule QuotedTripleData() -> GroundTriple = "<<" _ s:DataValueTerm() _ p:QuotedTripleData_p() _ o:DataValueTerm() _ ">>" {? + Ok(GroundTriple { + subject: s.try_into().map_err(|()| "Literals are not allowed in subject position of nested patterns")?, + predicate: p, + object: o + }) + } + rule QuotedTripleData_p() -> NamedNode = i: iri() { i } / "a" { rdf::TYPE.into() } + + rule DataValueTerm() -> GroundTerm = i:iri() { i.into() } / + l:RDFLiteral() { l.into() } / + l:NumericLiteral() { l.into() } / + l:BooleanLiteral() { l.into() } / + t:QuotedTripleData() {? + #[cfg(feature = "rdf-star")]{Ok(t.into())} + #[cfg(not(feature = "rdf-star"))]{Err("Embedded triples are only available in SPARQL-star")} + } + + rule VarOrIri() -> NamedNodePattern = + v:Var() { v.into() } / + i:iri() { i.into() } + + rule Var() -> Variable = name:(VAR1() / VAR2()) { Variable::new_unchecked(name) } + + rule GraphTerm() -> Term = + i:iri() { i.into() } / + l:RDFLiteral() { l.into() } / + l:NumericLiteral() { l.into() } / + l:BooleanLiteral() { l.into() } / + b:BlankNode() { b.into() } / + NIL() { rdf::NIL.into_owned().into() } + + rule Expression() -> Expression = e:ConditionalOrExpression() {e} + + rule ConditionalOrExpression() -> Expression = e:ConditionalOrExpression_item() **<1,> ("||" _) {? + not_empty_fold(e.into_iter(), |a, b| Expression::Or(Box::new(a), Box::new(b))) + } + rule ConditionalOrExpression_item() -> Expression = e:ConditionalAndExpression() _ { e } + + rule ConditionalAndExpression() -> Expression = e:ConditionalAndExpression_item() **<1,> ("&&" _) {? + not_empty_fold(e.into_iter(), |a, b| Expression::And(Box::new(a), Box::new(b))) + } + rule ConditionalAndExpression_item() -> Expression = e:ValueLogical() _ { e } + + rule ValueLogical() -> Expression = RelationalExpression() + + rule RelationalExpression() -> Expression = a:NumericExpression() _ o: RelationalExpression_inner()? { match o { + Some(("=", Some(b), None)) => Expression::Equal(Box::new(a), Box::new(b)), + Some(("!=", Some(b), None)) => Expression::Not(Box::new(Expression::Equal(Box::new(a), Box::new(b)))), + Some((">", Some(b), None)) => Expression::Greater(Box::new(a), Box::new(b)), + Some((">=", Some(b), None)) => Expression::GreaterOrEqual(Box::new(a), Box::new(b)), + Some(("<", Some(b), None)) => Expression::Less(Box::new(a), Box::new(b)), + Some(("<=", Some(b), None)) => Expression::LessOrEqual(Box::new(a), Box::new(b)), + Some(("IN", None, Some(l))) => Expression::In(Box::new(a), l), + Some(("NOT IN", None, Some(l))) => Expression::Not(Box::new(Expression::In(Box::new(a), l))), + Some(_) => unreachable!(), + None => a + } } + rule RelationalExpression_inner() -> (&'input str, Option, Option>) = + s: $("=" / "!=" / ">=" / ">" / "<=" / "<") _ e:NumericExpression() { (s, Some(e), None) } / + i("IN") _ l:ExpressionList() { ("IN", None, Some(l)) } / + i("NOT") _ i("IN") _ l:ExpressionList() { ("NOT IN", None, Some(l)) } + + rule NumericExpression() -> Expression = AdditiveExpression() + + rule AdditiveExpression() -> Expression = a:MultiplicativeExpression() _ o:AdditiveExpression_inner()? { match o { + Some(("+", b)) => Expression::Add(Box::new(a), Box::new(b)), + Some(("-", b)) => Expression::Subtract(Box::new(a), Box::new(b)), + Some(_) => unreachable!(), + None => a, + } } + rule AdditiveExpression_inner() -> (&'input str, Expression) = s: $("+" / "-") _ e:AdditiveExpression() { + (s, e) + } + + rule MultiplicativeExpression() -> Expression = a:UnaryExpression() _ o: MultiplicativeExpression_inner()? { match o { + Some(("*", b)) => Expression::Multiply(Box::new(a), Box::new(b)), + Some(("/", b)) => Expression::Divide(Box::new(a), Box::new(b)), + Some(_) => unreachable!(), + None => a + } } + rule MultiplicativeExpression_inner() -> (&'input str, Expression) = s: $("*" / "/") _ e:MultiplicativeExpression() { + (s, e) + } + + rule UnaryExpression() -> Expression = s: $("!" / "+" / "-")? _ e:PrimaryExpression() { match s { + Some("!") => Expression::Not(Box::new(e)), + Some("+") => Expression::UnaryPlus(Box::new(e)), + Some("-") => Expression::UnaryMinus(Box::new(e)), + Some(_) => unreachable!(), + None => e, + } } + + rule PrimaryExpression() -> Expression = + BrackettedExpression() / + ExprQuotedTriple() / + iriOrFunction() / + v:Var() { v.into() } / + l:RDFLiteral() { l.into() } / + l:NumericLiteral() { l.into() } / + l:BooleanLiteral() { l.into() } / + BuiltInCall() + + rule ExprVarOrTerm() -> Expression = + ExprQuotedTriple() / + i:iri() { i.into() } / + l:RDFLiteral() { l.into() } / + l:NumericLiteral() { l.into() } / + l:BooleanLiteral() { l.into() } / + v:Var() { v.into() } + + rule ExprQuotedTriple() -> Expression = "<<" _ s:ExprVarOrTerm() _ p:Verb() _ o:ExprVarOrTerm() _ ">>" {? + #[cfg(feature = "rdf-star")]{Ok(Expression::FunctionCall(Function::Triple, vec![s, p.into(), o]))} + #[cfg(not(feature = "rdf-star"))]{Err("Embedded triples are only available in SPARQL-star")} + } + + rule BrackettedExpression() -> Expression = "(" _ e:Expression() _ ")" { e } + + rule BuiltInCall() -> Expression = + a:Aggregate() {? state.new_aggregation(a).map(Into::into) } / + i("STR") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Str, vec![e]) } / + i("LANG") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Lang, vec![e]) } / + i("LANGMATCHES") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::LangMatches, vec![a, b]) } / + i("DATATYPE") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Datatype, vec![e]) } / + i("BOUND") _ "(" _ v:Var() _ ")" { Expression::Bound(v) } / + (i("IRI") / i("URI")) _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Iri, vec![e]) } / + i("BNODE") "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::BNode, vec![e]) } / + i("BNODE") NIL() { Expression::FunctionCall(Function::BNode, vec![]) } / + i("RAND") _ NIL() { Expression::FunctionCall(Function::Rand, vec![]) } / + i("ABS") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Abs, vec![e]) } / + i("CEIL") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Ceil, vec![e]) } / + i("FLOOR") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Floor, vec![e]) } / + i("ROUND") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Round, vec![e]) } / + i("CONCAT") e:ExpressionList() { Expression::FunctionCall(Function::Concat, e) } / + SubstringExpression() / + i("STRLEN") _ "(" _ e: Expression() _ ")" { Expression::FunctionCall(Function::StrLen, vec![e]) } / + StrReplaceExpression() / + i("UCASE") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::UCase, vec![e]) } / + i("LCASE") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::LCase, vec![e]) } / + i("ENCODE_FOR_URI") "(" _ e: Expression() _ ")" { Expression::FunctionCall(Function::EncodeForUri, vec![e]) } / + i("CONTAINS") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::Contains, vec![a, b]) } / + i("STRSTARTS") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::StrStarts, vec![a, b]) } / + i("STRENDS") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::StrEnds, vec![a, b]) } / + i("STRBEFORE") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::StrBefore, vec![a, b]) } / + i("STRAFTER") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::StrAfter, vec![a, b]) } / + i("YEAR") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Year, vec![e]) } / + i("MONTH") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Month, vec![e]) } / + i("DAY") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Day, vec![e]) } / + i("HOURS") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Hours, vec![e]) } / + i("MINUTES") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Minutes, vec![e]) } / + i("SECONDS") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Seconds, vec![e]) } / + i("TIMEZONE") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Timezone, vec![e]) } / + i("TZ") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Tz, vec![e]) } / + i("NOW") _ NIL() { Expression::FunctionCall(Function::Now, vec![]) } / + i("UUID") _ NIL() { Expression::FunctionCall(Function::Uuid, vec![]) }/ + i("STRUUID") _ NIL() { Expression::FunctionCall(Function::StrUuid, vec![]) } / + i("MD5") "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Md5, vec![e]) } / + i("SHA1") "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Sha1, vec![e]) } / + i("SHA256") "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Sha256, vec![e]) } / + i("SHA384") "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Sha384, vec![e]) } / + i("SHA512") "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Sha512, vec![e]) } / + i("COALESCE") e:ExpressionList() { Expression::Coalesce(e) } / + i("IF") _ "(" _ a:Expression() _ "," _ b:Expression() _ "," _ c:Expression() _ ")" { Expression::If(Box::new(a), Box::new(b), Box::new(c)) } / + i("STRLANG") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::StrLang, vec![a, b]) } / + i("STRDT") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::StrDt, vec![a, b]) } / + i("sameTerm") "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::SameTerm(Box::new(a), Box::new(b)) } / + (i("isIRI") / i("isURI")) _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::IsIri, vec![e]) } / + i("isBLANK") "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::IsBlank, vec![e]) } / + i("isLITERAL") "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::IsLiteral, vec![e]) } / + i("isNUMERIC") "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::IsNumeric, vec![e]) } / + RegexExpression() / + ExistsFunc() / + NotExistsFunc() / + i("TRIPLE") "(" _ s:Expression() _ "," _ p:Expression() "," _ o:Expression() ")" {? + #[cfg(feature = "rdf-star")]{Ok(Expression::FunctionCall(Function::Triple, vec![s, p, o]))} + #[cfg(not(feature = "rdf-star"))]{Err("The TRIPLE function is only available in SPARQL-star")} + } / + i("SUBJECT") "(" _ e:Expression() _ ")" {? + #[cfg(feature = "rdf-star")]{Ok(Expression::FunctionCall(Function::Subject, vec![e]))} + #[cfg(not(feature = "rdf-star"))]{Err("The SUBJECT function is only available in SPARQL-star")} + } / + i("PREDICATE") "(" _ e:Expression() _ ")" {? + #[cfg(feature = "rdf-star")]{Ok(Expression::FunctionCall(Function::Predicate, vec![e]))} + #[cfg(not(feature = "rdf-star"))]{Err("The PREDICATE function is only available in SPARQL-star")} + } / + i("OBJECT") "(" _ e:Expression() _ ")" {? + #[cfg(feature = "rdf-star")]{Ok(Expression::FunctionCall(Function::Object, vec![e]))} + #[cfg(not(feature = "rdf-star"))]{Err("The OBJECT function is only available in SPARQL-star")} + } / + i("isTriple") "(" _ e:Expression() _ ")" {? + #[cfg(feature = "rdf-star")]{Ok(Expression::FunctionCall(Function::IsTriple, vec![e]))} + #[cfg(not(feature = "rdf-star"))]{Err("The isTriple function is only available in SPARQL-star")} + } / + i("ADJUST") "(" _ a:Expression() _ "," _ b:Expression() _ ")" {? + #[cfg(feature = "sep-0002")]{Ok(Expression::FunctionCall(Function::Adjust, vec![a, b]))} + #[cfg(not(feature = "sep-0002"))]{Err("The ADJUST function is only available in SPARQL 1.2 SEP 0002")} + } + + rule RegexExpression() -> Expression = + i("REGEX") _ "(" _ a:Expression() _ "," _ b:Expression() _ "," _ c:Expression() _ ")" { Expression::FunctionCall(Function::Regex, vec![a, b, c]) } / + i("REGEX") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::Regex, vec![a, b]) } + + + rule SubstringExpression() -> Expression = + i("SUBSTR") _ "(" _ a:Expression() _ "," _ b:Expression() _ "," _ c:Expression() _ ")" { Expression::FunctionCall(Function::SubStr, vec![a, b, c]) } / + i("SUBSTR") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::SubStr, vec![a, b]) } + + + rule StrReplaceExpression() -> Expression = + i("REPLACE") _ "(" _ a:Expression() _ "," _ b:Expression() _ "," _ c:Expression() _ "," _ d:Expression() _ ")" { Expression::FunctionCall(Function::Replace, vec![a, b, c, d]) } / + i("REPLACE") _ "(" _ a:Expression() _ "," _ b:Expression() _ "," _ c:Expression() _ ")" { Expression::FunctionCall(Function::Replace, vec![a, b, c]) } + + rule ExistsFunc() -> Expression = i("EXISTS") _ p:GroupGraphPattern() { Expression::Exists(Box::new(p)) } + + rule NotExistsFunc() -> Expression = i("NOT") _ i("EXISTS") _ p:GroupGraphPattern() { Expression::Not(Box::new(Expression::Exists(Box::new(p)))) } + + rule Aggregate() -> AggregateExpression = + i("COUNT") _ "(" _ i("DISTINCT") _ "*" _ ")" { AggregateExpression::CountSolutions { distinct: true } } / + i("COUNT") _ "(" _ i("DISTINCT") _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Count, expr, distinct: true } } / + i("COUNT") _ "(" _ "*" _ ")" { AggregateExpression::CountSolutions { distinct: false } } / + i("COUNT") _ "(" _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Count, expr, distinct: false } } / + i("SUM") _ "(" _ i("DISTINCT") _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Sum, expr, distinct: true } } / + i("SUM") _ "(" _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Sum, expr, distinct: false } } / + i("MIN") _ "(" _ i("DISTINCT") _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Min, expr, distinct: true } } / + i("MIN") _ "(" _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Min, expr, distinct: false } } / + i("MAX") _ "(" _ i("DISTINCT") _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Max, expr, distinct: true } } / + i("MAX") _ "(" _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Max, expr, distinct: false } } / + i("AVG") _ "(" _ i("DISTINCT") _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Avg, expr, distinct: true } } / + i("AVG") _ "(" _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Avg, expr, distinct: false } } / + i("SAMPLE") _ "(" _ i("DISTINCT") _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Sample, expr, distinct: true } } / + i("SAMPLE") _ "(" _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Sample, expr, distinct: false } } / + i("GROUP_CONCAT") _ "(" _ i("DISTINCT") _ expr:Expression() _ ";" _ i("SEPARATOR") _ "=" _ s:String() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::GroupConcat { separator: Some(s) }, expr, distinct: true } } / + i("GROUP_CONCAT") _ "(" _ i("DISTINCT") _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::GroupConcat { separator: None }, expr, distinct: true } } / + i("GROUP_CONCAT") _ "(" _ expr:Expression() _ ";" _ i("SEPARATOR") _ "=" _ s:String() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::GroupConcat { separator: Some(s) }, expr, distinct: true } } / + i("GROUP_CONCAT") _ "(" _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::GroupConcat { separator: None }, expr, distinct: false } } / + name:iri() _ "(" _ i("DISTINCT") _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Custom(name), expr, distinct: true } } / + name:iri() _ "(" _ expr:Expression() _ ")" { AggregateExpression::FunctionCall { name: AggregateFunction::Custom(name), expr, distinct: false } } + + rule iriOrFunction() -> Expression = i: iri() _ a: ArgList()? { + match a { + Some(a) => Expression::FunctionCall(Function::Custom(i), a), + None => i.into() + } + } + + rule RDFLiteral() -> Literal = + value:String() _ "^^" _ datatype:iri() { Literal::new_typed_literal(value, datatype) } / + value:String() _ language:LANGTAG() { Literal::new_language_tagged_literal_unchecked(value, language.into_inner()) } / + value:String() { Literal::new_simple_literal(value) } + + rule NumericLiteral() -> Literal = NumericLiteralUnsigned() / NumericLiteralPositive() / NumericLiteralNegative() + + rule NumericLiteralUnsigned() -> Literal = + d:$(DOUBLE()) { Literal::new_typed_literal(d, xsd::DOUBLE) } / + d:$(DECIMAL()) { Literal::new_typed_literal(d, xsd::DECIMAL) } / + i:$(INTEGER()) { Literal::new_typed_literal(i, xsd::INTEGER) } + + rule NumericLiteralPositive() -> Literal = + d:$(DOUBLE_POSITIVE()) { Literal::new_typed_literal(d, xsd::DOUBLE) } / + d:$(DECIMAL_POSITIVE()) { Literal::new_typed_literal(d, xsd::DECIMAL) } / + i:$(INTEGER_POSITIVE()) { Literal::new_typed_literal(i, xsd::INTEGER) } + + + rule NumericLiteralNegative() -> Literal = + d:$(DOUBLE_NEGATIVE()) { Literal::new_typed_literal(d, xsd::DOUBLE) } / + d:$(DECIMAL_NEGATIVE()) { Literal::new_typed_literal(d, xsd::DECIMAL) } / + i:$(INTEGER_NEGATIVE()) { Literal::new_typed_literal(i, xsd::INTEGER) } + + rule BooleanLiteral() -> Literal = + "true" { Literal::new_typed_literal("true", xsd::BOOLEAN) } / + "false" { Literal::new_typed_literal("false", xsd::BOOLEAN) } + + rule String() -> String = STRING_LITERAL_LONG1() / STRING_LITERAL_LONG2() / STRING_LITERAL1() / STRING_LITERAL2() + + rule iri() -> NamedNode = i:(IRIREF() / PrefixedName()) { + NamedNode::from(i) + } + + rule PrefixedName() -> Iri = PNAME_LN() / + ns:PNAME_NS() {? if let Some(iri) = state.namespaces.get(ns).cloned() { + Iri::parse(iri).map_err(|_| "IRI parsing failed") + } else { + Err("Prefix not found") + } } + + rule BlankNode() -> BlankNode = id:BLANK_NODE_LABEL() {? + let node = BlankNode::new_unchecked(id); + if state.used_bnodes.contains(&node) { + Err("Already used blank node id") + } else { + state.currently_used_bnodes.insert(node.clone()); + Ok(node) + } + } / ANON() { BlankNode::default() } + + rule IRIREF() -> Iri = "<" i:$((!['>'] [_])*) ">" {? + state.parse_iri(unescape_iriref(i)?).map_err(|_| "IRI parsing failed") + } + + rule PNAME_NS() -> &'input str = ns:$(PN_PREFIX()?) ":" { + ns + } + + rule PNAME_LN() -> Iri = ns:PNAME_NS() local:$(PN_LOCAL()) {? + if let Some(base) = state.namespaces.get(ns) { + let mut iri = String::with_capacity(base.len() + local.len()); + iri.push_str(base); + for chunk in local.split('\\') { // We remove \ + iri.push_str(chunk); + } + Iri::parse(iri).map_err(|_| "IRI parsing failed") + } else { + Err("Prefix not found") + } + } + + rule BLANK_NODE_LABEL() -> &'input str = "_:" b:$((['0'..='9'] / PN_CHARS_U()) PN_CHARS()* ("."+ PN_CHARS()+)*) { + b + } + + rule VAR1() -> &'input str = "?" v:$(VARNAME()) { v } + + rule VAR2() -> &'input str = "$" v:$(VARNAME()) { v } + + rule LANGTAG() -> LanguageTag = "@" l:$(['a' ..= 'z' | 'A' ..= 'Z']+ ("-" ['a' ..= 'z' | 'A' ..= 'Z' | '0' ..= '9']+)*) {? + LanguageTag::parse(l.to_ascii_lowercase()).map_err(|_| "language tag parsing failed") + } + + rule INTEGER() = ['0'..='9']+ + + rule DECIMAL() = ['0'..='9']* "." ['0'..='9']+ + + rule DOUBLE() = (['0'..='9']+ "." ['0'..='9']* / "." ['0'..='9']+ / ['0'..='9']+) EXPONENT() + + rule INTEGER_POSITIVE() = "+" _ INTEGER() + + rule DECIMAL_POSITIVE() = "+" _ DECIMAL() + + rule DOUBLE_POSITIVE() = "+" _ DOUBLE() + + rule INTEGER_NEGATIVE() = "-" _ INTEGER() + + rule DECIMAL_NEGATIVE() = "-" _ DECIMAL() + + rule DOUBLE_NEGATIVE() = "-" _ DOUBLE() + + rule EXPONENT() = ['e' | 'E'] ['+' | '-']? ['0'..='9']+ + + rule STRING_LITERAL1() -> String = "'" l:$((STRING_LITERAL1_simple_char() / ECHAR() / UCHAR())*) "'" {? + unescape_string(l) + } + rule STRING_LITERAL1_simple_char() = !['\u{27}' | '\u{5C}' | '\u{0A}' | '\u{0D}'] [_] + + + rule STRING_LITERAL2() -> String = "\"" l:$((STRING_LITERAL2_simple_char() / ECHAR() / UCHAR())*) "\"" {? + unescape_string(l) + } + rule STRING_LITERAL2_simple_char() = !['\u{22}' | '\u{5C}' | '\u{0A}' | '\u{0D}'] [_] + + rule STRING_LITERAL_LONG1() -> String = "'''" l:$(STRING_LITERAL_LONG1_inner()*) "'''" {? + unescape_string(l) + } + rule STRING_LITERAL_LONG1_inner() = ("''" / "'")? (STRING_LITERAL_LONG1_simple_char() / ECHAR() / UCHAR()) + rule STRING_LITERAL_LONG1_simple_char() = !['\'' | '\\'] [_] + + rule STRING_LITERAL_LONG2() -> String = "\"\"\"" l:$(STRING_LITERAL_LONG2_inner()*) "\"\"\"" {? + unescape_string(l) + } + rule STRING_LITERAL_LONG2_inner() = ("\"\"" / "\"")? (STRING_LITERAL_LONG2_simple_char() / ECHAR() / UCHAR()) + rule STRING_LITERAL_LONG2_simple_char() = !['"' | '\\'] [_] + + rule UCHAR() = "\\u" HEX() HEX() HEX() HEX() / "\\U" HEX() HEX() HEX() HEX() HEX() HEX() HEX() HEX() + + rule ECHAR() = "\\" ['t' | 'b' | 'n' | 'r' | 'f' | '"' |'\'' | '\\'] + + rule NIL() = "(" WS()* ")" + + rule WS() = quiet! { ['\u{20}' | '\u{09}' | '\u{0D}' | '\u{0A}'] } + + rule ANON() = "[" WS()* "]" + + rule PN_CHARS_BASE() = ['A' ..= 'Z' | 'a' ..= 'z' | '\u{00C0}'..='\u{00D6}' | '\u{00D8}'..='\u{00F6}' | '\u{00F8}'..='\u{02FF}' | '\u{0370}'..='\u{037D}' | '\u{037F}'..='\u{1FFF}' | '\u{200C}'..='\u{200D}' | '\u{2070}'..='\u{218F}' | '\u{2C00}'..='\u{2FEF}' | '\u{3001}'..='\u{D7FF}' | '\u{F900}'..='\u{FDCF}' | '\u{FDF0}'..='\u{FFFD}'] + + rule PN_CHARS_U() = ['_'] / PN_CHARS_BASE() + + rule VARNAME() = (['0'..='9'] / PN_CHARS_U()) (['0' ..= '9' | '\u{00B7}' | '\u{0300}'..='\u{036F}' | '\u{203F}'..='\u{2040}'] / PN_CHARS_U())* + + rule PN_CHARS() = ['-' | '0' ..= '9' | '\u{00B7}' | '\u{0300}'..='\u{036F}' | '\u{203F}'..='\u{2040}'] / PN_CHARS_U() + + rule PN_PREFIX() = PN_CHARS_BASE() PN_CHARS()* ("."+ PN_CHARS()+)* + + rule PN_LOCAL() = (PN_CHARS_U() / [':' | '0'..='9'] / PLX()) (PN_CHARS() / [':'] / PLX())* (['.']+ (PN_CHARS() / [':'] / PLX())+)? + + rule PLX() = PERCENT() / PN_LOCAL_ESC() + + rule PERCENT() = ['%'] HEX() HEX() + + rule HEX() = ['0' ..= '9' | 'A' ..= 'F' | 'a' ..= 'f'] + + rule PN_LOCAL_ESC() = ['\\'] ['_' | '~' | '.' | '-' | '!' | '$' | '&' | '\'' | '(' | ')' | '*' | '+' | ',' | ';' | '=' | '/' | '?' | '#' | '@' | '%'] //TODO: added '/' to make tests pass but is it valid? + + //space + rule _() = quiet! { ([' ' | '\t' | '\n' | '\r'] / comment())* } + + //comment + rule comment() = quiet! { ['#'] (!['\r' | '\n'] [_])* } + + rule i(literal: &'static str) = input: $([_]*<{literal.len()}>) {? + if input.eq_ignore_ascii_case(literal) { + Ok(()) + } else { + Err(literal) + } + } + } +} diff --git a/ng-oxigraph/src/spargebra/query.rs b/ng-oxigraph/src/spargebra/query.rs new file mode 100644 index 0000000..0e9d1c6 --- /dev/null +++ b/ng-oxigraph/src/spargebra/query.rs @@ -0,0 +1,300 @@ +use crate::spargebra::algebra::*; +use crate::spargebra::parser::{parse_query, SparqlSyntaxError}; +use crate::spargebra::term::*; +use oxiri::Iri; +use std::fmt; +use std::str::FromStr; + +/// A parsed [SPARQL query](https://www.w3.org/TR/sparql11-query/). +/// +/// ``` +/// use spargebra::Query; +/// +/// let query_str = "SELECT ?s ?p ?o WHERE { ?s ?p ?o . }"; +/// let query = Query::parse(query_str, None)?; +/// assert_eq!(query.to_string(), query_str); +/// assert_eq!( +/// query.to_sse(), +/// "(project (?s ?p ?o) (bgp (triple ?s ?p ?o)))" +/// ); +/// # Ok::<_, spargebra::SparqlSyntaxError>(()) +/// ``` +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum Query { + /// [SELECT](https://www.w3.org/TR/sparql11-query/#select). + Select { + /// The [query dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset). + dataset: Option, + /// The query selection graph pattern. + pattern: GraphPattern, + /// The query base IRI. + base_iri: Option>, + }, + /// [CONSTRUCT](https://www.w3.org/TR/sparql11-query/#construct). + Construct { + /// The query construction template. + template: Vec, + /// The [query dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset). + dataset: Option, + /// The query selection graph pattern. + pattern: GraphPattern, + /// The query base IRI. + base_iri: Option>, + }, + /// [DESCRIBE](https://www.w3.org/TR/sparql11-query/#describe). + Describe { + /// The [query dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset). + dataset: Option, + /// The query selection graph pattern. + pattern: GraphPattern, + /// The query base IRI. + base_iri: Option>, + }, + /// [ASK](https://www.w3.org/TR/sparql11-query/#ask). + Ask { + /// The [query dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset). + dataset: Option, + /// The query selection graph pattern. + pattern: GraphPattern, + /// The query base IRI. + base_iri: Option>, + }, +} + +impl Query { + /// Parses a SPARQL query with an optional base IRI to resolve relative IRIs in the query. + pub fn parse(query: &str, base_iri: Option<&str>) -> Result { + parse_query(query, base_iri) + } + + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub fn to_sse(&self) -> String { + let mut buffer = String::new(); + self.fmt_sse(&mut buffer).unwrap(); + buffer + } + + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + match self { + Self::Select { + dataset, + pattern, + base_iri, + } => { + if let Some(base_iri) = base_iri { + write!(f, "(base <{base_iri}> ")?; + } + if let Some(dataset) = dataset { + f.write_str("(dataset ")?; + dataset.fmt_sse(f)?; + f.write_str(" ")?; + } + pattern.fmt_sse(f)?; + if dataset.is_some() { + f.write_str(")")?; + } + if base_iri.is_some() { + f.write_str(")")?; + } + Ok(()) + } + Self::Construct { + template, + dataset, + pattern, + base_iri, + } => { + if let Some(base_iri) = base_iri { + write!(f, "(base <{base_iri}> ")?; + } + f.write_str("(construct (")?; + for (i, t) in template.iter().enumerate() { + if i > 0 { + f.write_str(" ")?; + } + t.fmt_sse(f)?; + } + f.write_str(") ")?; + if let Some(dataset) = dataset { + f.write_str("(dataset ")?; + dataset.fmt_sse(f)?; + f.write_str(" ")?; + } + pattern.fmt_sse(f)?; + if dataset.is_some() { + f.write_str(")")?; + } + f.write_str(")")?; + if base_iri.is_some() { + f.write_str(")")?; + } + Ok(()) + } + Self::Describe { + dataset, + pattern, + base_iri, + } => { + if let Some(base_iri) = base_iri { + write!(f, "(base <{base_iri}> ")?; + } + f.write_str("(describe ")?; + if let Some(dataset) = dataset { + f.write_str("(dataset ")?; + dataset.fmt_sse(f)?; + f.write_str(" ")?; + } + pattern.fmt_sse(f)?; + if dataset.is_some() { + f.write_str(")")?; + } + f.write_str(")")?; + if base_iri.is_some() { + f.write_str(")")?; + } + Ok(()) + } + Self::Ask { + dataset, + pattern, + base_iri, + } => { + if let Some(base_iri) = base_iri { + write!(f, "(base <{base_iri}> ")?; + } + f.write_str("(ask ")?; + if let Some(dataset) = dataset { + f.write_str("(dataset ")?; + dataset.fmt_sse(f)?; + f.write_str(" ")?; + } + pattern.fmt_sse(f)?; + if dataset.is_some() { + f.write_str(")")?; + } + f.write_str(")")?; + if base_iri.is_some() { + f.write_str(")")?; + } + Ok(()) + } + } + } +} + +impl fmt::Display for Query { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Select { + dataset, + pattern, + base_iri, + } => { + if let Some(base_iri) = base_iri { + writeln!(f, "BASE <{base_iri}>")?; + } + write!( + f, + "{}", + SparqlGraphRootPattern { + pattern, + dataset: dataset.as_ref() + } + ) + } + Self::Construct { + template, + dataset, + pattern, + base_iri, + } => { + if let Some(base_iri) = base_iri { + writeln!(f, "BASE <{base_iri}>")?; + } + f.write_str("CONSTRUCT { ")?; + for triple in template { + write!(f, "{triple} . ")?; + } + f.write_str("}")?; + if let Some(dataset) = dataset { + dataset.fmt(f)?; + } + write!( + f, + " WHERE {{ {} }}", + SparqlGraphRootPattern { + pattern, + dataset: None + } + ) + } + Self::Describe { + dataset, + pattern, + base_iri, + } => { + if let Some(base_iri) = base_iri { + writeln!(f, "BASE <{}>", base_iri.as_str())?; + } + f.write_str("DESCRIBE *")?; + if let Some(dataset) = dataset { + dataset.fmt(f)?; + } + write!( + f, + " WHERE {{ {} }}", + SparqlGraphRootPattern { + pattern, + dataset: None + } + ) + } + Self::Ask { + dataset, + pattern, + base_iri, + } => { + if let Some(base_iri) = base_iri { + writeln!(f, "BASE <{base_iri}>")?; + } + f.write_str("ASK")?; + if let Some(dataset) = dataset { + dataset.fmt(f)?; + } + write!( + f, + " WHERE {{ {} }}", + SparqlGraphRootPattern { + pattern, + dataset: None + } + ) + } + } + } +} + +impl FromStr for Query { + type Err = SparqlSyntaxError; + + fn from_str(query: &str) -> Result { + Self::parse(query, None) + } +} + +impl<'a> TryFrom<&'a str> for Query { + type Error = SparqlSyntaxError; + + fn try_from(query: &str) -> Result { + Self::from_str(query) + } +} + +impl<'a> TryFrom<&'a String> for Query { + type Error = SparqlSyntaxError; + + fn try_from(query: &String) -> Result { + Self::from_str(query) + } +} diff --git a/ng-oxigraph/src/spargebra/term.rs b/ng-oxigraph/src/spargebra/term.rs new file mode 100644 index 0000000..b8214d1 --- /dev/null +++ b/ng-oxigraph/src/spargebra/term.rs @@ -0,0 +1,1028 @@ +// partial Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// partial Copyright (c) 2018 Oxigraph developers +// All work licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice or not, may not be copied, modified, or distributed except +// according to those terms. + +//! Data structures for [RDF 1.1 Concepts](https://www.w3.org/TR/rdf11-concepts/) like IRI, literal or triples. + +pub use crate::oxrdf::{BlankNode, Literal, NamedNode, Subject, Term, Triple, Variable}; +use std::fmt; +use std::fmt::Write; + +/// The union of [IRIs](https://www.w3.org/TR/rdf11-concepts/#dfn-iri) and [triples](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-triple). +/// +/// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum GroundSubject { + NamedNode(NamedNode), + #[cfg(feature = "rdf-star")] + Triple(Box), +} + +impl fmt::Display for GroundSubject { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(node) => node.fmt(f), + #[cfg(feature = "rdf-star")] + Self::Triple(triple) => write!( + f, + "<<{} {} {}>>", + triple.subject, triple.predicate, triple.object + ), + } + } +} + +impl From for GroundSubject { + #[inline] + fn from(node: NamedNode) -> Self { + Self::NamedNode(node) + } +} + +#[cfg(feature = "rdf-star")] +impl From for GroundSubject { + #[inline] + fn from(triple: GroundTriple) -> Self { + Self::Triple(Box::new(triple)) + } +} + +impl TryFrom for GroundSubject { + type Error = (); + + #[inline] + fn try_from(subject: Subject) -> Result { + match subject { + Subject::NamedNode(t) => Ok(t.into()), + Subject::BlankNode(_) => Err(()), + #[cfg(feature = "rdf-star")] + Subject::Triple(t) => Ok(GroundTriple::try_from(*t)?.into()), + } + } +} + +impl TryFrom for GroundSubject { + type Error = (); + + #[inline] + fn try_from(term: GroundTerm) -> Result { + match term { + GroundTerm::NamedNode(t) => Ok(t.into()), + GroundTerm::Literal(_) => Err(()), + #[cfg(feature = "rdf-star")] + GroundTerm::Triple(t) => Ok((*t).into()), + } + } +} + +/// The union of [IRIs](https://www.w3.org/TR/rdf11-concepts/#dfn-iri), [literals](https://www.w3.org/TR/rdf11-concepts/#dfn-literal) and [triples](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-triple). +/// +/// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum GroundTerm { + NamedNode(NamedNode), + Literal(Literal), + #[cfg(feature = "rdf-star")] + Triple(Box), +} + +impl fmt::Display for GroundTerm { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(node) => node.fmt(f), + Self::Literal(literal) => literal.fmt(f), + #[cfg(feature = "rdf-star")] + Self::Triple(triple) => write!( + f, + "<<{} {} {}>>", + triple.subject, triple.predicate, triple.object + ), + } + } +} + +impl From for GroundTerm { + #[inline] + fn from(node: NamedNode) -> Self { + Self::NamedNode(node) + } +} + +impl From for GroundTerm { + #[inline] + fn from(literal: Literal) -> Self { + Self::Literal(literal) + } +} + +#[cfg(feature = "rdf-star")] +impl From for GroundTerm { + #[inline] + fn from(triple: GroundTriple) -> Self { + Self::Triple(Box::new(triple)) + } +} + +impl TryFrom for GroundTerm { + type Error = (); + + #[inline] + fn try_from(term: Term) -> Result { + match term { + Term::NamedNode(t) => Ok(t.into()), + Term::BlankNode(_) => Err(()), + Term::Literal(t) => Ok(t.into()), + #[cfg(feature = "rdf-star")] + Term::Triple(t) => Ok(GroundTriple::try_from(*t)?.into()), + } + } +} + +/// A [RDF triple](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-triple) without blank nodes. +/// +/// The default string formatter is returning a N-Quads representation. +/// +/// ``` +/// use spargebra::term::{GroundTriple, NamedNode}; +/// +/// assert_eq!( +/// " ", +/// GroundTriple { +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), +/// } +/// .to_string() +/// ); +/// # Result::<_,oxrdf::IriParseError>::Ok(()) +/// ``` +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct GroundTriple { + pub subject: GroundSubject, + pub predicate: NamedNode, + pub object: GroundTerm, +} + +impl fmt::Display for GroundTriple { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{} {} {}", self.subject, self.predicate, self.object) + } +} + +impl TryFrom for GroundTriple { + type Error = (); + + #[inline] + fn try_from(triple: Triple) -> Result { + Ok(Self { + subject: triple.subject.try_into()?, + predicate: triple.predicate, + object: triple.object.try_into()?, + }) + } +} + +/// A possible graph name. +/// +/// It is the union of [IRIs](https://www.w3.org/TR/rdf11-concepts/#dfn-iri) and the [default graph name](https://www.w3.org/TR/rdf11-concepts/#dfn-default-graph). +#[derive(Eq, PartialEq, Debug, Clone, Hash, Default)] +pub enum GraphName { + NamedNode(NamedNode), + #[default] + DefaultGraph, +} + +impl GraphName { + // pub(crate) fn is_default_graph(&self) -> bool { + // match self { + // GraphName::DefaultGraph => true, + // _ => false, + // } + // } + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + match self { + Self::NamedNode(node) => write!(f, "{node}"), + Self::DefaultGraph => f.write_str("default"), + } + } +} + +impl fmt::Display for GraphName { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(node) => node.fmt(f), + Self::DefaultGraph => f.write_str("DEFAULT"), + } + } +} + +impl From for GraphName { + #[inline] + fn from(node: NamedNode) -> Self { + Self::NamedNode(node) + } +} + +impl TryFrom for GraphName { + type Error = (); + + #[inline] + fn try_from(pattern: GraphNamePattern) -> Result { + match pattern { + GraphNamePattern::NamedNode(t) => Ok(t.into()), + GraphNamePattern::DefaultGraph => Ok(Self::DefaultGraph), + GraphNamePattern::Variable(_) => Err(()), + } + } +} + +/// A [RDF triple](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-triple) in an [RDF dataset](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset). +/// +/// The default string formatter is returning a N-Quads representation. +/// +/// ``` +/// use spargebra::term::{NamedNode, Quad}; +/// +/// assert_eq!( +/// " ", +/// Quad { +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), +/// graph_name: NamedNode::new("http://example.com/g")?.into(), +/// }.to_string() +/// ); +/// # Result::<_,oxrdf::IriParseError>::Ok(()) +/// ``` +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct Quad { + pub subject: Subject, + pub predicate: NamedNode, + pub object: Term, + pub graph_name: GraphName, +} + +impl Quad { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + if self.graph_name != GraphName::DefaultGraph { + f.write_str("(graph ")?; + self.graph_name.fmt_sse(f)?; + f.write_str(" (")?; + } + write!( + f, + "(triple {} {} {})", + self.subject, self.predicate, self.object + )?; + if self.graph_name != GraphName::DefaultGraph { + f.write_str("))")?; + } + Ok(()) + } +} + +impl fmt::Display for Quad { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.graph_name == GraphName::DefaultGraph { + write!(f, "{} {} {}", self.subject, self.predicate, self.object) + } else { + write!( + f, + "{} {} {} {}", + self.subject, self.predicate, self.object, self.graph_name + ) + } + } +} + +impl TryFrom for Quad { + type Error = (); + + #[inline] + fn try_from(quad: QuadPattern) -> Result { + Ok(Self { + subject: quad.subject.try_into()?, + predicate: quad.predicate.try_into()?, + object: quad.object.try_into()?, + graph_name: quad.graph_name.try_into()?, + }) + } +} + +/// A [RDF triple](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-triple) in an [RDF dataset](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset) without blank nodes. +/// +/// The default string formatter is returning a N-Quads representation. +/// +/// ``` +/// use spargebra::term::{NamedNode, GroundQuad}; +/// +/// assert_eq!( +/// " ", +/// GroundQuad { +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), +/// graph_name: NamedNode::new("http://example.com/g")?.into(), +/// }.to_string() +/// ); +/// # Result::<_,oxrdf::IriParseError>::Ok(()) +/// ``` +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct GroundQuad { + pub subject: GroundSubject, + pub predicate: NamedNode, + pub object: GroundTerm, + pub graph_name: GraphName, +} + +impl GroundQuad { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + if self.graph_name != GraphName::DefaultGraph { + f.write_str("(graph ")?; + self.graph_name.fmt_sse(f)?; + f.write_str(" (")?; + } + write!( + f, + "(triple {} {} {})", + self.subject, self.predicate, self.object + )?; + if self.graph_name != GraphName::DefaultGraph { + f.write_str("))")?; + } + Ok(()) + } +} + +impl fmt::Display for GroundQuad { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.graph_name == GraphName::DefaultGraph { + write!(f, "{} {} {}", self.subject, self.predicate, self.object) + } else { + write!( + f, + "{} {} {} {}", + self.subject, self.predicate, self.object, self.graph_name + ) + } + } +} + +impl TryFrom for GroundQuad { + type Error = (); + + #[inline] + fn try_from(quad: Quad) -> Result { + Ok(Self { + subject: quad.subject.try_into()?, + predicate: quad.predicate, + object: quad.object.try_into()?, + graph_name: quad.graph_name, + }) + } +} + +/// The union of [IRIs](https://www.w3.org/TR/rdf11-concepts/#dfn-iri) and [variables](https://www.w3.org/TR/sparql11-query/#sparqlQueryVariables). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum NamedNodePattern { + NamedNode(NamedNode), + Variable(Variable), +} + +impl NamedNodePattern { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + match self { + Self::NamedNode(node) => write!(f, "{node}"), + Self::Variable(var) => write!(f, "{var}"), + } + } +} + +impl fmt::Display for NamedNodePattern { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(node) => node.fmt(f), + Self::Variable(var) => var.fmt(f), + } + } +} + +impl From for NamedNodePattern { + #[inline] + fn from(node: NamedNode) -> Self { + Self::NamedNode(node) + } +} + +impl From for NamedNodePattern { + #[inline] + fn from(var: Variable) -> Self { + Self::Variable(var) + } +} + +impl TryFrom for NamedNode { + type Error = (); + + #[inline] + fn try_from(pattern: NamedNodePattern) -> Result { + match pattern { + NamedNodePattern::NamedNode(t) => Ok(t), + NamedNodePattern::Variable(_) => Err(()), + } + } +} + +/// The union of [terms](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-term) and [variables](https://www.w3.org/TR/sparql11-query/#sparqlQueryVariables). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum TermPattern { + NamedNode(NamedNode), + BlankNode(BlankNode), + Literal(Literal), + #[cfg(feature = "rdf-star")] + Triple(Box), + Variable(Variable), +} + +impl TermPattern { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + match self { + Self::NamedNode(term) => write!(f, "{term}"), + Self::BlankNode(term) => write!(f, "{term}"), + Self::Literal(term) => write!(f, "{term}"), + #[cfg(feature = "rdf-star")] + Self::Triple(triple) => triple.fmt_sse(f), + Self::Variable(var) => write!(f, "{var}"), + } + } +} + +impl fmt::Display for TermPattern { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(term) => term.fmt(f), + Self::BlankNode(term) => term.fmt(f), + Self::Literal(term) => term.fmt(f), + #[cfg(feature = "rdf-star")] + Self::Triple(triple) => write!(f, "<<{triple}>>"), + Self::Variable(var) => var.fmt(f), + } + } +} + +impl From for TermPattern { + #[inline] + fn from(node: NamedNode) -> Self { + Self::NamedNode(node) + } +} + +impl From for TermPattern { + #[inline] + fn from(node: BlankNode) -> Self { + Self::BlankNode(node) + } +} + +impl From for TermPattern { + #[inline] + fn from(literal: Literal) -> Self { + Self::Literal(literal) + } +} + +#[cfg(feature = "rdf-star")] +impl From for TermPattern { + #[inline] + fn from(triple: TriplePattern) -> Self { + Self::Triple(Box::new(triple)) + } +} + +impl From for TermPattern { + fn from(var: Variable) -> Self { + Self::Variable(var) + } +} + +impl From for TermPattern { + #[inline] + fn from(subject: Subject) -> Self { + match subject { + Subject::NamedNode(node) => node.into(), + Subject::BlankNode(node) => node.into(), + #[cfg(feature = "rdf-star")] + Subject::Triple(t) => TriplePattern::from(*t).into(), + } + } +} + +impl From for TermPattern { + #[inline] + fn from(term: Term) -> Self { + match term { + Term::NamedNode(node) => node.into(), + Term::BlankNode(node) => node.into(), + Term::Literal(literal) => literal.into(), + #[cfg(feature = "rdf-star")] + Term::Triple(t) => TriplePattern::from(*t).into(), + } + } +} + +impl From for TermPattern { + #[inline] + fn from(element: NamedNodePattern) -> Self { + match element { + NamedNodePattern::NamedNode(node) => node.into(), + NamedNodePattern::Variable(var) => var.into(), + } + } +} + +impl From for TermPattern { + #[inline] + fn from(element: GroundTermPattern) -> Self { + match element { + GroundTermPattern::NamedNode(node) => node.into(), + GroundTermPattern::Literal(literal) => literal.into(), + #[cfg(feature = "rdf-star")] + GroundTermPattern::Triple(t) => TriplePattern::from(*t).into(), + GroundTermPattern::Variable(variable) => variable.into(), + } + } +} + +impl TryFrom for Subject { + type Error = (); + + #[inline] + fn try_from(term: TermPattern) -> Result { + match term { + TermPattern::NamedNode(t) => Ok(t.into()), + TermPattern::BlankNode(t) => Ok(t.into()), + #[cfg(feature = "rdf-star")] + TermPattern::Triple(t) => Ok(Triple::try_from(*t)?.into()), + TermPattern::Literal(_) | TermPattern::Variable(_) => Err(()), + } + } +} + +impl TryFrom for Term { + type Error = (); + + #[inline] + fn try_from(pattern: TermPattern) -> Result { + match pattern { + TermPattern::NamedNode(t) => Ok(t.into()), + TermPattern::BlankNode(t) => Ok(t.into()), + TermPattern::Literal(t) => Ok(t.into()), + #[cfg(feature = "rdf-star")] + TermPattern::Triple(t) => Ok(Triple::try_from(*t)?.into()), + TermPattern::Variable(_) => Err(()), + } + } +} +/// The union of [terms](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-term) and [variables](https://www.w3.org/TR/sparql11-query/#sparqlQueryVariables) without blank nodes. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum GroundTermPattern { + NamedNode(NamedNode), + Literal(Literal), + Variable(Variable), + #[cfg(feature = "rdf-star")] + Triple(Box), +} + +impl GroundTermPattern { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + match self { + Self::NamedNode(term) => write!(f, "{term}"), + Self::Literal(term) => write!(f, "{term}"), + Self::Variable(var) => write!(f, "{var}"), + #[cfg(feature = "rdf-star")] + Self::Triple(triple) => triple.fmt_sse(f), + } + } +} + +impl fmt::Display for GroundTermPattern { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(term) => term.fmt(f), + Self::Literal(term) => term.fmt(f), + Self::Variable(var) => var.fmt(f), + #[cfg(feature = "rdf-star")] + Self::Triple(triple) => write!(f, "<<{triple}>>"), + } + } +} + +impl From for GroundTermPattern { + #[inline] + fn from(node: NamedNode) -> Self { + Self::NamedNode(node) + } +} + +impl From for GroundTermPattern { + #[inline] + fn from(literal: Literal) -> Self { + Self::Literal(literal) + } +} + +#[cfg(feature = "rdf-star")] +impl From for GroundTermPattern { + #[inline] + fn from(triple: GroundTriplePattern) -> Self { + Self::Triple(Box::new(triple)) + } +} + +impl From for GroundTermPattern { + #[inline] + fn from(var: Variable) -> Self { + Self::Variable(var) + } +} + +impl From for GroundTermPattern { + #[inline] + fn from(term: GroundSubject) -> Self { + match term { + GroundSubject::NamedNode(node) => node.into(), + #[cfg(feature = "rdf-star")] + GroundSubject::Triple(triple) => GroundTriplePattern::from(*triple).into(), + } + } +} +impl From for GroundTermPattern { + #[inline] + fn from(term: GroundTerm) -> Self { + match term { + GroundTerm::NamedNode(node) => node.into(), + GroundTerm::Literal(literal) => literal.into(), + #[cfg(feature = "rdf-star")] + GroundTerm::Triple(triple) => GroundTriplePattern::from(*triple).into(), + } + } +} + +impl From for GroundTermPattern { + #[inline] + fn from(element: NamedNodePattern) -> Self { + match element { + NamedNodePattern::NamedNode(node) => node.into(), + NamedNodePattern::Variable(var) => var.into(), + } + } +} + +impl TryFrom for GroundTermPattern { + type Error = (); + + #[inline] + fn try_from(pattern: TermPattern) -> Result { + Ok(match pattern { + TermPattern::NamedNode(named_node) => named_node.into(), + TermPattern::BlankNode(_) => return Err(()), + TermPattern::Literal(literal) => literal.into(), + #[cfg(feature = "rdf-star")] + TermPattern::Triple(triple) => GroundTriplePattern::try_from(*triple)?.into(), + TermPattern::Variable(variable) => variable.into(), + }) + } +} + +/// The union of [IRIs](https://www.w3.org/TR/rdf11-concepts/#dfn-iri), [default graph name](https://www.w3.org/TR/rdf11-concepts/#dfn-default-graph) and [variables](https://www.w3.org/TR/sparql11-query/#sparqlQueryVariables). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum GraphNamePattern { + NamedNode(NamedNode), + DefaultGraph, + Variable(Variable), +} + +impl GraphNamePattern { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + match self { + Self::NamedNode(node) => write!(f, "{node}"), + Self::DefaultGraph => f.write_str("default"), + Self::Variable(var) => write!(f, "{var}"), + } + } +} + +impl fmt::Display for GraphNamePattern { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NamedNode(node) => node.fmt(f), + Self::DefaultGraph => f.write_str("DEFAULT"), + Self::Variable(var) => var.fmt(f), + } + } +} + +impl From for GraphNamePattern { + #[inline] + fn from(node: NamedNode) -> Self { + Self::NamedNode(node) + } +} + +impl From for GraphNamePattern { + #[inline] + fn from(var: Variable) -> Self { + Self::Variable(var) + } +} + +impl From for GraphNamePattern { + #[inline] + fn from(graph_name: GraphName) -> Self { + match graph_name { + GraphName::NamedNode(node) => node.into(), + GraphName::DefaultGraph => Self::DefaultGraph, + } + } +} + +impl From for GraphNamePattern { + #[inline] + fn from(graph_name: NamedNodePattern) -> Self { + match graph_name { + NamedNodePattern::NamedNode(node) => node.into(), + NamedNodePattern::Variable(var) => var.into(), + } + } +} + +/// A [triple pattern](https://www.w3.org/TR/sparql11-query/#defn_TriplePattern) +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct TriplePattern { + pub subject: TermPattern, + pub predicate: NamedNodePattern, + pub object: TermPattern, +} + +impl TriplePattern { + pub(crate) fn new( + subject: impl Into, + predicate: impl Into, + object: impl Into, + ) -> Self { + Self { + subject: subject.into(), + predicate: predicate.into(), + object: object.into(), + } + } + + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + f.write_str("(triple ")?; + self.subject.fmt_sse(f)?; + f.write_str(" ")?; + self.predicate.fmt_sse(f)?; + f.write_str(" ")?; + self.object.fmt_sse(f)?; + f.write_str(")") + } +} + +impl fmt::Display for TriplePattern { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{} {} {}", self.subject, self.predicate, self.object) + } +} + +impl From for TriplePattern { + #[inline] + fn from(triple: Triple) -> Self { + Self { + subject: triple.subject.into(), + predicate: triple.predicate.into(), + object: triple.object.into(), + } + } +} + +impl From for TriplePattern { + #[inline] + fn from(triple: GroundTriplePattern) -> Self { + Self { + subject: triple.subject.into(), + predicate: triple.predicate, + object: triple.object.into(), + } + } +} + +impl TryFrom for Triple { + type Error = (); + + #[inline] + fn try_from(triple: TriplePattern) -> Result { + Ok(Self { + subject: triple.subject.try_into()?, + predicate: triple.predicate.try_into()?, + object: triple.object.try_into()?, + }) + } +} + +/// A [triple pattern](https://www.w3.org/TR/sparql11-query/#defn_TriplePattern) without blank nodes. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct GroundTriplePattern { + pub subject: GroundTermPattern, + pub predicate: NamedNodePattern, + pub object: GroundTermPattern, +} + +impl GroundTriplePattern { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + #[allow(dead_code)] + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + f.write_str("(triple ")?; + self.subject.fmt_sse(f)?; + f.write_str(" ")?; + self.predicate.fmt_sse(f)?; + f.write_str(" ")?; + self.object.fmt_sse(f)?; + f.write_str(")") + } +} + +impl fmt::Display for GroundTriplePattern { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{} {} {}", self.subject, self.predicate, self.object) + } +} + +impl From for GroundTriplePattern { + #[inline] + fn from(triple: GroundTriple) -> Self { + Self { + subject: triple.subject.into(), + predicate: triple.predicate.into(), + object: triple.object.into(), + } + } +} + +impl TryFrom for GroundTriplePattern { + type Error = (); + + #[inline] + fn try_from(triple: TriplePattern) -> Result { + Ok(Self { + subject: triple.subject.try_into()?, + predicate: triple.predicate, + object: triple.object.try_into()?, + }) + } +} + +/// A [triple pattern](https://www.w3.org/TR/sparql11-query/#defn_TriplePattern) in a specific graph +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct QuadPattern { + pub subject: TermPattern, + pub predicate: NamedNodePattern, + pub object: TermPattern, + pub graph_name: GraphNamePattern, +} + +impl QuadPattern { + pub(crate) fn new( + subject: impl Into, + predicate: impl Into, + object: impl Into, + graph_name: impl Into, + ) -> Self { + Self { + subject: subject.into(), + predicate: predicate.into(), + object: object.into(), + graph_name: graph_name.into(), + } + } + + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + if self.graph_name != GraphNamePattern::DefaultGraph { + f.write_str("(graph ")?; + self.graph_name.fmt_sse(f)?; + f.write_str(" (")?; + } + f.write_str("(triple ")?; + self.subject.fmt_sse(f)?; + f.write_str(" ")?; + self.predicate.fmt_sse(f)?; + f.write_str(" ")?; + self.object.fmt_sse(f)?; + f.write_str(")")?; + if self.graph_name != GraphNamePattern::DefaultGraph { + f.write_str("))")?; + } + Ok(()) + } +} + +impl fmt::Display for QuadPattern { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.graph_name == GraphNamePattern::DefaultGraph { + write!(f, "{} {} {}", self.subject, self.predicate, self.object) + } else { + write!( + f, + "GRAPH {} {{ {} {} {} }}", + self.graph_name, self.subject, self.predicate, self.object + ) + } + } +} + +/// A [triple pattern](https://www.w3.org/TR/sparql11-query/#defn_TriplePattern) in a specific graph without blank nodes. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct GroundQuadPattern { + pub subject: GroundTermPattern, + pub predicate: NamedNodePattern, + pub object: GroundTermPattern, + pub graph_name: GraphNamePattern, +} + +impl GroundQuadPattern { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { + if self.graph_name != GraphNamePattern::DefaultGraph { + f.write_str("(graph ")?; + self.graph_name.fmt_sse(f)?; + f.write_str(" (")?; + } + f.write_str("(triple ")?; + self.subject.fmt_sse(f)?; + f.write_str(" ")?; + self.predicate.fmt_sse(f)?; + f.write_str(" ")?; + self.object.fmt_sse(f)?; + f.write_str(")")?; + if self.graph_name != GraphNamePattern::DefaultGraph { + f.write_str("))")?; + } + Ok(()) + } +} + +impl fmt::Display for GroundQuadPattern { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.graph_name == GraphNamePattern::DefaultGraph { + write!(f, "{} {} {}", self.subject, self.predicate, self.object) + } else { + write!( + f, + "GRAPH {} {{ {} {} {} }}", + self.graph_name, self.subject, self.predicate, self.object + ) + } + } +} + +impl TryFrom for GroundQuadPattern { + type Error = (); + + #[inline] + fn try_from(pattern: QuadPattern) -> Result { + Ok(Self { + subject: pattern.subject.try_into()?, + predicate: pattern.predicate, + object: pattern.object.try_into()?, + graph_name: pattern.graph_name, + }) + } +} diff --git a/ng-oxigraph/src/spargebra/update.rs b/ng-oxigraph/src/spargebra/update.rs new file mode 100644 index 0000000..c13887d --- /dev/null +++ b/ng-oxigraph/src/spargebra/update.rs @@ -0,0 +1,344 @@ +use crate::spargebra::algebra::*; +use crate::spargebra::parser::{parse_update, SparqlSyntaxError}; +use crate::spargebra::term::*; +use oxiri::Iri; +use std::fmt; +use std::str::FromStr; + +/// A parsed [SPARQL update](https://www.w3.org/TR/sparql11-update/). +/// +/// ``` +/// use spargebra::Update; +/// +/// let update_str = "CLEAR ALL ;"; +/// let update = Update::parse(update_str, None)?; +/// assert_eq!(update.to_string().trim(), update_str); +/// assert_eq!(update.to_sse(), "(update (clear all))"); +/// # Ok::<_, spargebra::SparqlSyntaxError>(()) +/// ``` +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct Update { + /// The update base IRI. + pub base_iri: Option>, + /// The [update operations](https://www.w3.org/TR/sparql11-update/#formalModelGraphUpdate). + pub operations: Vec, +} + +impl Update { + /// Parses a SPARQL update with an optional base IRI to resolve relative IRIs in the query. + pub fn parse(update: &str, base_iri: Option<&str>) -> Result { + parse_update(update, base_iri) + } + + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + pub fn to_sse(&self) -> String { + let mut buffer = String::new(); + self.fmt_sse(&mut buffer).unwrap(); + buffer + } + + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + if let Some(base_iri) = &self.base_iri { + write!(f, "(base <{base_iri}> ")?; + } + f.write_str("(update")?; + for op in &self.operations { + f.write_str(" ")?; + op.fmt_sse(f)?; + } + f.write_str(")")?; + if self.base_iri.is_some() { + f.write_str(")")?; + } + Ok(()) + } +} + +impl fmt::Display for Update { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(base_iri) = &self.base_iri { + writeln!(f, "BASE <{base_iri}>")?; + } + for update in &self.operations { + writeln!(f, "{update} ;")?; + } + Ok(()) + } +} + +impl FromStr for Update { + type Err = SparqlSyntaxError; + + fn from_str(update: &str) -> Result { + Self::parse(update, None) + } +} + +impl<'a> TryFrom<&'a str> for Update { + type Error = SparqlSyntaxError; + + fn try_from(update: &str) -> Result { + Self::from_str(update) + } +} + +impl<'a> TryFrom<&'a String> for Update { + type Error = SparqlSyntaxError; + + fn try_from(update: &String) -> Result { + Self::from_str(update) + } +} + +/// The [graph update operations](https://www.w3.org/TR/sparql11-update/#formalModelGraphUpdate). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum GraphUpdateOperation { + /// [insert data](https://www.w3.org/TR/sparql11-update/#defn_insertDataOperation). + InsertData { data: Vec }, + /// [delete data](https://www.w3.org/TR/sparql11-update/#defn_deleteDataOperation). + DeleteData { data: Vec }, + /// [delete insert](https://www.w3.org/TR/sparql11-update/#defn_deleteInsertOperation). + DeleteInsert { + delete: Vec, + insert: Vec, + using: Option, + pattern: Box, + }, + /// [load](https://www.w3.org/TR/sparql11-update/#defn_loadOperation). + Load { + silent: bool, + source: NamedNode, + destination: GraphName, + }, + /// [clear](https://www.w3.org/TR/sparql11-update/#defn_clearOperation). + Clear { silent: bool, graph: GraphTarget }, + /// [create](https://www.w3.org/TR/sparql11-update/#defn_createOperation). + Create { silent: bool, graph: NamedNode }, + /// [drop](https://www.w3.org/TR/sparql11-update/#defn_dropOperation). + Drop { silent: bool, graph: GraphTarget }, +} + +impl GraphUpdateOperation { + /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). + fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { + match self { + Self::InsertData { data } => { + f.write_str("(insertData (")?; + for (i, t) in data.iter().enumerate() { + if i > 0 { + f.write_str(" ")?; + } + t.fmt_sse(f)?; + } + f.write_str("))") + } + Self::DeleteData { data } => { + f.write_str("(deleteData (")?; + for (i, t) in data.iter().enumerate() { + if i > 0 { + f.write_str(" ")?; + } + t.fmt_sse(f)?; + } + f.write_str("))") + } + Self::DeleteInsert { + delete, + insert, + using, + pattern, + } => { + f.write_str("(modify ")?; + if let Some(using) = using { + f.write_str(" (using ")?; + using.fmt_sse(f)?; + f.write_str(" ")?; + pattern.fmt_sse(f)?; + f.write_str(")")?; + } else { + pattern.fmt_sse(f)?; + } + if !delete.is_empty() { + f.write_str(" (delete (")?; + for (i, t) in delete.iter().enumerate() { + if i > 0 { + f.write_str(" ")?; + } + t.fmt_sse(f)?; + } + f.write_str("))")?; + } + if !insert.is_empty() { + f.write_str(" (insert (")?; + for (i, t) in insert.iter().enumerate() { + if i > 0 { + f.write_str(" ")?; + } + t.fmt_sse(f)?; + } + f.write_str("))")?; + } + f.write_str(")") + } + Self::Load { + silent, + source, + destination, + } => { + f.write_str("(load ")?; + if *silent { + f.write_str("silent ")?; + } + write!(f, "{source} ")?; + destination.fmt_sse(f)?; + f.write_str(")") + } + Self::Clear { silent, graph } => { + f.write_str("(clear ")?; + if *silent { + f.write_str("silent ")?; + } + graph.fmt_sse(f)?; + f.write_str(")") + } + Self::Create { silent, graph } => { + f.write_str("(create ")?; + if *silent { + f.write_str("silent ")?; + } + write!(f, "{graph})") + } + Self::Drop { silent, graph } => { + f.write_str("(drop ")?; + if *silent { + f.write_str("silent ")?; + } + graph.fmt_sse(f)?; + f.write_str(")") + } + } + } +} + +impl fmt::Display for GraphUpdateOperation { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::InsertData { data } => { + writeln!(f, "INSERT DATA {{")?; + write_quads(data, f)?; + f.write_str("}") + } + Self::DeleteData { data } => { + writeln!(f, "DELETE DATA {{")?; + write_ground_quads(data, f)?; + f.write_str("}") + } + Self::DeleteInsert { + delete, + insert, + using, + pattern, + } => { + if !delete.is_empty() { + writeln!(f, "DELETE {{")?; + for quad in delete { + writeln!(f, "\t{quad} .")?; + } + writeln!(f, "}}")?; + } + if !insert.is_empty() { + writeln!(f, "INSERT {{")?; + for quad in insert { + writeln!(f, "\t{quad} .")?; + } + writeln!(f, "}}")?; + } + if let Some(using) = using { + for g in &using.default { + writeln!(f, "USING {g}")?; + } + if let Some(named) = &using.named { + for g in named { + writeln!(f, "USING NAMED {g}")?; + } + } + } + write!( + f, + "WHERE {{ {} }}", + SparqlGraphRootPattern { + pattern, + dataset: None + } + ) + } + Self::Load { + silent, + source, + destination, + } => { + f.write_str("LOAD ")?; + if *silent { + f.write_str("SILENT ")?; + } + write!(f, "{source}")?; + if destination != &GraphName::DefaultGraph { + write!(f, " INTO GRAPH {destination}")?; + } + Ok(()) + } + Self::Clear { silent, graph } => { + f.write_str("CLEAR ")?; + if *silent { + f.write_str("SILENT ")?; + } + write!(f, "{graph}") + } + Self::Create { silent, graph } => { + f.write_str("CREATE ")?; + if *silent { + f.write_str("SILENT ")?; + } + write!(f, "GRAPH {graph}") + } + Self::Drop { silent, graph } => { + f.write_str("DROP ")?; + if *silent { + f.write_str("SILENT ")?; + } + write!(f, "{graph}") + } + } + } +} + +fn write_quads(quads: &[Quad], f: &mut fmt::Formatter<'_>) -> fmt::Result { + for quad in quads { + if quad.graph_name == GraphName::DefaultGraph { + writeln!(f, "\t{} {} {} .", quad.subject, quad.predicate, quad.object)?; + } else { + writeln!( + f, + "\tGRAPH {} {{ {} {} {} }}", + quad.graph_name, quad.subject, quad.predicate, quad.object + )?; + } + } + Ok(()) +} + +fn write_ground_quads(quads: &[GroundQuad], f: &mut fmt::Formatter<'_>) -> fmt::Result { + for quad in quads { + if quad.graph_name == GraphName::DefaultGraph { + writeln!(f, "\t{} {} {} .", quad.subject, quad.predicate, quad.object)?; + } else { + writeln!( + f, + "\tGRAPH {} {{ {} {} {} }}", + quad.graph_name, quad.subject, quad.predicate, quad.object + )?; + } + } + Ok(()) +} diff --git a/ng-oxigraph/src/sparopt/README.md b/ng-oxigraph/src/sparopt/README.md new file mode 100644 index 0000000..1a6e1c6 --- /dev/null +++ b/ng-oxigraph/src/sparopt/README.md @@ -0,0 +1,33 @@ +sparopt +======= + +[![Latest Version](https://img.shields.io/crates/v/sparopt.svg)](https://crates.io/crates/sparopt) +[![Released API docs](https://docs.rs/sparopt/badge.svg)](https://docs.rs/sparopt) +[![Crates.io downloads](https://img.shields.io/crates/d/sparopt)](https://crates.io/crates/sparopt) +[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) +[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) + +sparopt is a work in progress [SPARQL Query](https://www.w3.org/TR/sparql11-query/) optimizer. + +It relies on the output of [spargebra](https://crates.io/crates/spargebra). + +Support for [SPARQL-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#sparql-star) is also available behind the `rdf-star` feature. + +This crate is intended to be a building piece for SPARQL implementations in Rust like [Oxigraph](https://oxigraph.org). + + +## License + +This project is licensed under either of + +* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or + ``) +* MIT license ([LICENSE-MIT](../LICENSE-MIT) or + ``) + +at your option. + + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/ng-oxigraph/src/sparopt/algebra.rs b/ng-oxigraph/src/sparopt/algebra.rs new file mode 100644 index 0000000..e35b892 --- /dev/null +++ b/ng-oxigraph/src/sparopt/algebra.rs @@ -0,0 +1,1662 @@ +//! [SPARQL 1.1 Query Algebra](https://www.w3.org/TR/sparql11-query/#sparqlQuery) representation. + +use crate::oxrdf::vocab::xsd; +use crate::spargebra::algebra::{ + AggregateExpression as AlAggregateExpression, AggregateFunction, Expression as AlExpression, + GraphPattern as AlGraphPattern, OrderExpression as AlOrderExpression, +}; +pub use crate::spargebra::algebra::{Function, PropertyPathExpression}; +use crate::spargebra::term::{BlankNode, GroundSubject, TermPattern, TriplePattern}; +pub use crate::spargebra::term::{ + GroundTerm, GroundTermPattern, Literal, NamedNode, NamedNodePattern, Variable, +}; +#[cfg(feature = "rdf-star")] +use crate::spargebra::term::{GroundTriple, GroundTriplePattern}; +use rand::random; +use std::collections::hash_map::DefaultHasher; +use std::collections::{HashMap, HashSet}; +use std::hash::{Hash, Hasher}; +use std::ops::{Add, BitAnd, BitOr, Div, Mul, Neg, Not, Sub}; + +/// An [expression](https://www.w3.org/TR/sparql11-query/#expressions). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum Expression { + NamedNode(NamedNode), + Literal(Literal), + Variable(Variable), + /// [Logical-or](https://www.w3.org/TR/sparql11-query/#func-logical-or). + Or(Vec), + /// [Logical-and](https://www.w3.org/TR/sparql11-query/#func-logical-and). + And(Vec), + /// [RDFterm-equal](https://www.w3.org/TR/sparql11-query/#func-RDFterm-equal) and all the XSD equalities. + Equal(Box, Box), + /// [sameTerm](https://www.w3.org/TR/sparql11-query/#func-sameTerm). + SameTerm(Box, Box), + /// [op:numeric-greater-than](https://www.w3.org/TR/xpath-functions-31/#func-numeric-greater-than) and other XSD greater than operators. + Greater(Box, Box), + GreaterOrEqual(Box, Box), + /// [op:numeric-less-than](https://www.w3.org/TR/xpath-functions-31/#func-numeric-less-than) and other XSD greater than operators. + Less(Box, Box), + LessOrEqual(Box, Box), + /// [op:numeric-add](https://www.w3.org/TR/xpath-functions-31/#func-numeric-add) and other XSD additions. + Add(Box, Box), + /// [op:numeric-subtract](https://www.w3.org/TR/xpath-functions-31/#func-numeric-subtract) and other XSD subtractions. + Subtract(Box, Box), + /// [op:numeric-multiply](https://www.w3.org/TR/xpath-functions-31/#func-numeric-multiply) and other XSD multiplications. + Multiply(Box, Box), + /// [op:numeric-divide](https://www.w3.org/TR/xpath-functions-31/#func-numeric-divide) and other XSD divides. + Divide(Box, Box), + /// [op:numeric-unary-plus](https://www.w3.org/TR/xpath-functions-31/#func-numeric-unary-plus) and other XSD unary plus. + UnaryPlus(Box), + /// [op:numeric-unary-minus](https://www.w3.org/TR/xpath-functions-31/#func-numeric-unary-minus) and other XSD unary minus. + UnaryMinus(Box), + /// [fn:not](https://www.w3.org/TR/xpath-functions-31/#func-not). + Not(Box), + /// [EXISTS](https://www.w3.org/TR/sparql11-query/#func-filter-exists). + Exists(Box), + /// [BOUND](https://www.w3.org/TR/sparql11-query/#func-bound). + Bound(Variable), + /// [IF](https://www.w3.org/TR/sparql11-query/#func-if). + If(Box, Box, Box), + /// [COALESCE](https://www.w3.org/TR/sparql11-query/#func-coalesce). + Coalesce(Vec), + /// A regular function call. + FunctionCall(Function, Vec), +} + +impl Expression { + pub fn or_all(args: impl IntoIterator) -> Self { + let args = args.into_iter(); + let mut all = Vec::with_capacity(args.size_hint().0); + for arg in args { + if let Some(ebv) = arg.effective_boolean_value() { + if ebv { + return true.into(); + } + // We ignore false values + } else if let Self::Or(args) = arg { + all.extend(args); + } else { + all.push(arg); + } + } + match all.len() { + 0 => false.into(), + 1 => { + let result = all.pop().unwrap(); + if result.returns_boolean() { + result // It's already casted to boolean + } else { + Self::And(vec![result]) + } + } + _ => Self::Or(order_vec(all)), + } + } + + pub fn and_all(args: impl IntoIterator) -> Self { + let args = args.into_iter(); + let mut all = Vec::with_capacity(args.size_hint().0); + for arg in args { + if let Some(ebv) = arg.effective_boolean_value() { + if !ebv { + return false.into(); + } + // We ignore true values + } else if let Self::And(args) = arg { + all.extend(args); + } else { + all.push(arg); + } + } + match all.len() { + 0 => true.into(), + 1 => { + let result = all.pop().unwrap(); + if result.returns_boolean() { + result + } else { + Self::And(vec![result]) + } + } + _ => Self::And(order_vec(all)), + } + } + + pub fn equal(left: Self, right: Self) -> Self { + match (left, right) { + (Self::NamedNode(left), Self::NamedNode(right)) => (left == right).into(), + (Self::Literal(left), Self::Literal(right)) if left == right => true.into(), + (left, right) => { + let (left, right) = order_pair(left, right); + Self::Equal(Box::new(left), Box::new(right)) + } + } + } + + pub fn same_term(left: Self, right: Self) -> Self { + match (left, right) { + (Self::NamedNode(left), Self::NamedNode(right)) => (left == right).into(), + (Self::Literal(left), Self::Literal(right)) if left == right => true.into(), + (left, right) => { + let (left, right) = order_pair(left, right); + Self::SameTerm(Box::new(left), Box::new(right)) + } + } + } + + pub fn greater(left: Self, right: Self) -> Self { + Self::Greater(Box::new(left), Box::new(right)) + } + + pub fn greater_or_equal(left: Self, right: Self) -> Self { + Self::GreaterOrEqual(Box::new(left), Box::new(right)) + } + + pub fn less(left: Self, right: Self) -> Self { + Self::Less(Box::new(left), Box::new(right)) + } + + pub fn less_or_equal(left: Self, right: Self) -> Self { + Self::LessOrEqual(Box::new(left), Box::new(right)) + } + + pub fn unary_plus(inner: Self) -> Self { + Self::UnaryPlus(Box::new(inner)) + } + + pub fn exists(inner: GraphPattern) -> Self { + if inner.is_empty() { + return false.into(); + } + if inner.is_empty_singleton() { + return true.into(); + } + Self::Exists(Box::new(inner)) + } + + pub fn if_cond(cond: Self, then: Self, els: Self) -> Self { + match cond.effective_boolean_value() { + Some(true) => then, + Some(false) => els, + None => Self::If(Box::new(cond), Box::new(then), Box::new(els)), + } + } + + pub fn coalesce(args: Vec) -> Self { + Self::Coalesce(args) + } + + pub fn call(name: Function, args: Vec) -> Self { + Self::FunctionCall(name, args) + } + + pub fn effective_boolean_value(&self) -> Option { + if let Self::Literal(literal) = self { + match literal.datatype() { + xsd::BOOLEAN => match literal.value() { + "true" | "1" => Some(true), + "false" | "0" => Some(false), + _ => None, // TODO + }, + xsd::STRING => Some(!literal.value().is_empty()), + _ => None, // TODO + } + } else { + None + } + } + + pub fn used_variables(&self) -> HashSet<&Variable> { + let mut variables = HashSet::new(); + self.lookup_used_variables(&mut |v| { + variables.insert(v); + }); + variables + } + + pub fn lookup_used_variables<'a>(&'a self, callback: &mut impl FnMut(&'a Variable)) { + match self { + Self::NamedNode(_) | Self::Literal(_) => {} + Self::Variable(v) | Self::Bound(v) => callback(v), + Self::Or(inner) + | Self::And(inner) + | Self::Coalesce(inner) + | Self::FunctionCall(_, inner) => { + for i in inner { + i.lookup_used_variables(callback); + } + } + Self::Equal(a, b) + | Self::SameTerm(a, b) + | Self::Greater(a, b) + | Self::GreaterOrEqual(a, b) + | Self::Less(a, b) + | Self::LessOrEqual(a, b) + | Self::Add(a, b) + | Self::Subtract(a, b) + | Self::Multiply(a, b) + | Self::Divide(a, b) => { + a.lookup_used_variables(callback); + b.lookup_used_variables(callback); + } + Self::UnaryPlus(i) | Self::UnaryMinus(i) | Self::Not(i) => { + i.lookup_used_variables(callback) + } + Self::Exists(e) => e.lookup_used_variables(callback), + Self::If(a, b, c) => { + a.lookup_used_variables(callback); + b.lookup_used_variables(callback); + c.lookup_used_variables(callback); + } + } + } + + fn from_sparql_algebra( + expression: &AlExpression, + graph_name: Option<&NamedNodePattern>, + ) -> Self { + match expression { + AlExpression::NamedNode(node) => Self::NamedNode(node.clone()), + AlExpression::Literal(literal) => Self::Literal(literal.clone()), + AlExpression::Variable(variable) => Self::Variable(variable.clone()), + AlExpression::Or(left, right) => Self::Or(vec![ + Self::from_sparql_algebra(left, graph_name), + Self::from_sparql_algebra(right, graph_name), + ]), + AlExpression::And(left, right) => Self::And(vec![ + Self::from_sparql_algebra(left, graph_name), + Self::from_sparql_algebra(right, graph_name), + ]), + AlExpression::Equal(left, right) => Self::Equal( + Box::new(Self::from_sparql_algebra(left, graph_name)), + Box::new(Self::from_sparql_algebra(right, graph_name)), + ), + AlExpression::SameTerm(left, right) => Self::SameTerm( + Box::new(Self::from_sparql_algebra(left, graph_name)), + Box::new(Self::from_sparql_algebra(right, graph_name)), + ), + AlExpression::Greater(left, right) => Self::Greater( + Box::new(Self::from_sparql_algebra(left, graph_name)), + Box::new(Self::from_sparql_algebra(right, graph_name)), + ), + AlExpression::GreaterOrEqual(left, right) => Self::GreaterOrEqual( + Box::new(Self::from_sparql_algebra(left, graph_name)), + Box::new(Self::from_sparql_algebra(right, graph_name)), + ), + AlExpression::Less(left, right) => Self::Less( + Box::new(Self::from_sparql_algebra(left, graph_name)), + Box::new(Self::from_sparql_algebra(right, graph_name)), + ), + AlExpression::LessOrEqual(left, right) => Self::LessOrEqual( + Box::new(Self::from_sparql_algebra(left, graph_name)), + Box::new(Self::from_sparql_algebra(right, graph_name)), + ), + AlExpression::In(left, right) => { + let left = Self::from_sparql_algebra(left, graph_name); + match right.len() { + 0 => Self::if_cond(left, false.into(), false.into()), + 1 => Self::Equal( + Box::new(left), + Box::new(Self::from_sparql_algebra(&right[0], graph_name)), + ), + _ => Self::Or( + right + .iter() + .map(|e| { + Self::Equal( + Box::new(left.clone()), + Box::new(Self::from_sparql_algebra(e, graph_name)), + ) + }) + .collect(), + ), + } + } + AlExpression::Add(left, right) => Self::Add( + Box::new(Self::from_sparql_algebra(left, graph_name)), + Box::new(Self::from_sparql_algebra(right, graph_name)), + ), + AlExpression::Subtract(left, right) => Self::Subtract( + Box::new(Self::from_sparql_algebra(left, graph_name)), + Box::new(Self::from_sparql_algebra(right, graph_name)), + ), + AlExpression::Multiply(left, right) => Self::Multiply( + Box::new(Self::from_sparql_algebra(left, graph_name)), + Box::new(Self::from_sparql_algebra(right, graph_name)), + ), + AlExpression::Divide(left, right) => Self::Divide( + Box::new(Self::from_sparql_algebra(left, graph_name)), + Box::new(Self::from_sparql_algebra(right, graph_name)), + ), + AlExpression::UnaryPlus(inner) => { + Self::UnaryPlus(Box::new(Self::from_sparql_algebra(inner, graph_name))) + } + AlExpression::UnaryMinus(inner) => { + Self::UnaryMinus(Box::new(Self::from_sparql_algebra(inner, graph_name))) + } + AlExpression::Not(inner) => { + Self::Not(Box::new(Self::from_sparql_algebra(inner, graph_name))) + } + AlExpression::Exists(inner) => Self::Exists(Box::new( + GraphPattern::from_sparql_algebra(inner, graph_name, &mut HashMap::new()), + )), + AlExpression::Bound(variable) => Self::Bound(variable.clone()), + AlExpression::If(cond, yes, no) => Self::If( + Box::new(Self::from_sparql_algebra(cond, graph_name)), + Box::new(Self::from_sparql_algebra(yes, graph_name)), + Box::new(Self::from_sparql_algebra(no, graph_name)), + ), + AlExpression::Coalesce(inner) => Self::Coalesce( + inner + .iter() + .map(|e| Self::from_sparql_algebra(e, graph_name)) + .collect(), + ), + AlExpression::FunctionCall(name, args) => Self::FunctionCall( + name.clone(), + args.iter() + .map(|e| Self::from_sparql_algebra(e, graph_name)) + .collect(), + ), + } + } + + fn returns_boolean(&self) -> bool { + match self { + Self::Or(_) + | Self::And(_) + | Self::Equal(_, _) + | Self::SameTerm(_, _) + | Self::Greater(_, _) + | Self::GreaterOrEqual(_, _) + | Self::Less(_, _) + | Self::LessOrEqual(_, _) + | Self::Not(_) + | Self::Exists(_) + | Self::Bound(_) + | Self::FunctionCall( + Function::IsBlank | Function::IsIri | Function::IsLiteral | Function::IsNumeric, + _, + ) => true, + #[cfg(feature = "rdf-star")] + Self::FunctionCall(Function::IsTriple, _) => true, + Self::Literal(literal) => literal.datatype() == xsd::BOOLEAN, + Self::If(_, a, b) => a.returns_boolean() && b.returns_boolean(), + _ => false, + } + } +} + +impl From for Expression { + fn from(value: NamedNode) -> Self { + Self::NamedNode(value) + } +} + +impl From for Expression { + fn from(value: Literal) -> Self { + Self::Literal(value) + } +} + +impl From for Expression { + fn from(value: GroundSubject) -> Self { + match value { + GroundSubject::NamedNode(value) => value.into(), + #[cfg(feature = "rdf-star")] + GroundSubject::Triple(value) => (*value).into(), + } + } +} + +impl From for Expression { + fn from(value: GroundTerm) -> Self { + match value { + GroundTerm::NamedNode(value) => value.into(), + GroundTerm::Literal(value) => value.into(), + #[cfg(feature = "rdf-star")] + GroundTerm::Triple(value) => (*value).into(), + } + } +} + +impl From for Expression { + fn from(value: NamedNodePattern) -> Self { + match value { + NamedNodePattern::NamedNode(value) => value.into(), + NamedNodePattern::Variable(variable) => variable.into(), + } + } +} + +impl From for Expression { + fn from(value: GroundTermPattern) -> Self { + match value { + GroundTermPattern::NamedNode(value) => value.into(), + GroundTermPattern::Literal(value) => value.into(), + #[cfg(feature = "rdf-star")] + GroundTermPattern::Triple(value) => (*value).into(), + GroundTermPattern::Variable(variable) => variable.into(), + } + } +} + +#[cfg(feature = "rdf-star")] +impl From for Expression { + fn from(value: GroundTriple) -> Self { + Self::FunctionCall( + Function::Triple, + vec![ + value.subject.into(), + value.predicate.into(), + value.object.into(), + ], + ) + } +} + +#[cfg(feature = "rdf-star")] +impl From for Expression { + fn from(value: GroundTriplePattern) -> Self { + Self::FunctionCall( + Function::Triple, + vec![ + value.subject.into(), + value.predicate.into(), + value.object.into(), + ], + ) + } +} + +impl From for Expression { + fn from(value: Variable) -> Self { + Self::Variable(value) + } +} + +impl From for Expression { + fn from(value: bool) -> Self { + Literal::from(value).into() + } +} + +impl From<&Expression> for AlExpression { + fn from(expression: &Expression) -> Self { + match expression { + Expression::NamedNode(node) => Self::NamedNode(node.clone()), + Expression::Literal(literal) => Self::Literal(literal.clone()), + Expression::Variable(variable) => Self::Variable(variable.clone()), + Expression::Or(inner) => inner + .iter() + .map(Into::into) + .reduce(|a, b| Self::Or(Box::new(a), Box::new(b))) + .unwrap_or_else(|| Literal::from(false).into()), + Expression::And(inner) => inner + .iter() + .map(Into::into) + .reduce(|a, b| Self::And(Box::new(a), Box::new(b))) + .unwrap_or_else(|| Literal::from(true).into()), + Expression::Equal(left, right) => Self::Equal( + Box::new(left.as_ref().into()), + Box::new(right.as_ref().into()), + ), + Expression::SameTerm(left, right) => Self::SameTerm( + Box::new(left.as_ref().into()), + Box::new(right.as_ref().into()), + ), + Expression::Greater(left, right) => Self::Greater( + Box::new(left.as_ref().into()), + Box::new(right.as_ref().into()), + ), + Expression::GreaterOrEqual(left, right) => Self::GreaterOrEqual( + Box::new(left.as_ref().into()), + Box::new(right.as_ref().into()), + ), + Expression::Less(left, right) => Self::Less( + Box::new(left.as_ref().into()), + Box::new(right.as_ref().into()), + ), + Expression::LessOrEqual(left, right) => Self::LessOrEqual( + Box::new(left.as_ref().into()), + Box::new(right.as_ref().into()), + ), + Expression::Add(left, right) => Self::Add( + Box::new(left.as_ref().into()), + Box::new(right.as_ref().into()), + ), + Expression::Subtract(left, right) => Self::Subtract( + Box::new(left.as_ref().into()), + Box::new(right.as_ref().into()), + ), + Expression::Multiply(left, right) => Self::Multiply( + Box::new(left.as_ref().into()), + Box::new(right.as_ref().into()), + ), + Expression::Divide(left, right) => Self::Divide( + Box::new(left.as_ref().into()), + Box::new(right.as_ref().into()), + ), + Expression::UnaryPlus(inner) => Self::UnaryPlus(Box::new(inner.as_ref().into())), + Expression::UnaryMinus(inner) => Self::UnaryMinus(Box::new(inner.as_ref().into())), + Expression::Not(inner) => Self::Not(Box::new(inner.as_ref().into())), + Expression::Exists(inner) => Self::Exists(Box::new(inner.as_ref().into())), + Expression::Bound(variable) => Self::Bound(variable.clone()), + Expression::If(cond, yes, no) => Self::If( + Box::new(cond.as_ref().into()), + Box::new(yes.as_ref().into()), + Box::new(no.as_ref().into()), + ), + Expression::Coalesce(inner) => Self::Coalesce(inner.iter().map(Into::into).collect()), + Expression::FunctionCall(name, args) => { + Self::FunctionCall(name.clone(), args.iter().map(Into::into).collect()) + } + } + } +} + +impl BitAnd for Expression { + type Output = Self; + + fn bitand(self, rhs: Self) -> Self::Output { + Self::and_all([self, rhs]) + } +} + +impl BitOr for Expression { + type Output = Self; + + fn bitor(self, rhs: Self) -> Self { + Self::or_all([self, rhs]) + } +} + +impl Not for Expression { + type Output = Self; + + fn not(self) -> Self { + if let Some(v) = self.effective_boolean_value() { + (!v).into() + } else if let Self::Not(v) = self { + if v.returns_boolean() { + *v + } else { + Self::And(vec![*v]) + } + } else { + Self::Not(Box::new(self)) + } + } +} + +impl Add for Expression { + type Output = Self; + + fn add(self, rhs: Self) -> Self { + let (left, right) = order_pair(self, rhs); + Self::Add(Box::new(left), Box::new(right)) + } +} + +impl Sub for Expression { + type Output = Self; + + fn sub(self, rhs: Self) -> Self { + Self::Subtract(Box::new(self), Box::new(rhs)) + } +} + +impl Mul for Expression { + type Output = Self; + + fn mul(self, rhs: Self) -> Self { + let (left, right) = order_pair(self, rhs); + Self::Multiply(Box::new(left), Box::new(right)) + } +} + +impl Div for Expression { + type Output = Self; + + fn div(self, rhs: Self) -> Self { + Self::Divide(Box::new(self), Box::new(rhs)) + } +} + +impl Neg for Expression { + type Output = Self; + + fn neg(self) -> Self { + Self::UnaryMinus(Box::new(self)) + } +} + +/// A SPARQL query [graph pattern](https://www.w3.org/TR/sparql11-query/#sparqlQuery). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum GraphPattern { + /// A [basic graph pattern](https://www.w3.org/TR/sparql11-query/#defn_BasicGraphPattern). + QuadPattern { + subject: GroundTermPattern, + predicate: NamedNodePattern, + object: GroundTermPattern, + graph_name: Option, + }, + /// A [property path pattern](https://www.w3.org/TR/sparql11-query/#defn_evalPP_predicate). + Path { + subject: GroundTermPattern, + path: PropertyPathExpression, + object: GroundTermPattern, + graph_name: Option, + }, + /// [Join](https://www.w3.org/TR/sparql11-query/#defn_algJoin). + Join { + left: Box, + right: Box, + algorithm: JoinAlgorithm, + }, + /// [LeftJoin](https://www.w3.org/TR/sparql11-query/#defn_algLeftJoin). + LeftJoin { + left: Box, + right: Box, + expression: Expression, + algorithm: LeftJoinAlgorithm, + }, + /// Lateral join i.e. evaluate right for all result row of left + #[cfg(feature = "sep-0006")] + Lateral { left: Box, right: Box }, + /// [Filter](https://www.w3.org/TR/sparql11-query/#defn_algFilter). + Filter { + expression: Expression, + inner: Box, + }, + /// [Union](https://www.w3.org/TR/sparql11-query/#defn_algUnion). + Union { inner: Vec }, + /// [Extend](https://www.w3.org/TR/sparql11-query/#defn_extend). + Extend { + inner: Box, + variable: Variable, + expression: Expression, + }, + /// [Minus](https://www.w3.org/TR/sparql11-query/#defn_algMinus). + Minus { + left: Box, + right: Box, + algorithm: MinusAlgorithm, + }, + /// A table used to provide inline values + Values { + variables: Vec, + bindings: Vec>>, + }, + /// [OrderBy](https://www.w3.org/TR/sparql11-query/#defn_algOrdered). + OrderBy { + inner: Box, + expression: Vec, + }, + /// [Project](https://www.w3.org/TR/sparql11-query/#defn_algProjection). + Project { + inner: Box, + variables: Vec, + }, + /// [Distinct](https://www.w3.org/TR/sparql11-query/#defn_algDistinct). + Distinct { inner: Box }, + /// [Reduced](https://www.w3.org/TR/sparql11-query/#defn_algReduced). + Reduced { inner: Box }, + /// [Slice](https://www.w3.org/TR/sparql11-query/#defn_algSlice). + Slice { + inner: Box, + start: usize, + length: Option, + }, + /// [Group](https://www.w3.org/TR/sparql11-query/#aggregateAlgebra). + Group { + inner: Box, + variables: Vec, + aggregates: Vec<(Variable, AggregateExpression)>, + }, + /// [Service](https://www.w3.org/TR/sparql11-federated-query/#defn_evalService). + Service { + name: NamedNodePattern, + inner: Box, + silent: bool, + }, +} + +impl GraphPattern { + pub fn empty() -> Self { + Self::Values { + variables: Vec::new(), + bindings: Vec::new(), + } + } + + /// Check if the pattern is the empty table + fn is_empty(&self) -> bool { + if let Self::Values { bindings, .. } = self { + bindings.is_empty() + } else { + false + } + } + + pub fn empty_singleton() -> Self { + Self::Values { + variables: Vec::new(), + bindings: vec![Vec::new()], + } + } + + pub fn is_empty_singleton(&self) -> bool { + if let Self::Values { bindings, .. } = self { + bindings.len() == 1 && bindings.iter().all(|b| b.iter().all(Option::is_none)) + } else { + false + } + } + + pub fn join(left: Self, right: Self, algorithm: JoinAlgorithm) -> Self { + if left.is_empty() || right.is_empty() { + return Self::empty(); + } + if left.is_empty_singleton() { + return right; + } + if right.is_empty_singleton() { + return left; + } + Self::Join { + left: Box::new(left), + right: Box::new(right), + algorithm, + } + } + + #[cfg(feature = "sep-0006")] + pub fn lateral(left: Self, right: Self) -> Self { + if left.is_empty() || right.is_empty() { + return Self::empty(); + } + if left.is_empty_singleton() { + return right; + } + if right.is_empty_singleton() { + return left; + } + Self::Lateral { + left: Box::new(left), + right: Box::new(right), + } + } + + pub fn left_join( + left: Self, + right: Self, + expression: Expression, + algorithm: LeftJoinAlgorithm, + ) -> Self { + let expression_ebv = expression.effective_boolean_value(); + if left.is_empty() + || right.is_empty() + || right.is_empty_singleton() + || expression_ebv == Some(false) + { + return left; + } + Self::LeftJoin { + left: Box::new(left), + right: Box::new(right), + expression: if expression_ebv == Some(true) { + true.into() + } else { + expression + }, + algorithm, + } + } + + pub fn minus(left: Self, right: Self, algorithm: MinusAlgorithm) -> Self { + if left.is_empty() { + return Self::empty(); + } + if right.is_empty() { + return left; + } + Self::Minus { + left: Box::new(left), + right: Box::new(right), + algorithm, + } + } + + pub fn union(left: Self, right: Self) -> Self { + Self::union_all([left, right]) + } + + pub fn union_all(args: impl IntoIterator) -> Self { + let args = args.into_iter(); + let mut all = Vec::with_capacity(args.size_hint().0); + for arg in args { + if arg.is_empty() { + continue; + } + if let Self::Union { inner } = arg { + all.extend(inner); + } else { + all.push(arg); + } + } + if all.is_empty() { + Self::empty() + } else { + Self::Union { + inner: order_vec(all), + } + } + } + + pub fn filter(inner: Self, expression: Expression) -> Self { + if inner.is_empty() { + return Self::empty(); + } + // We unwrap singleton And + let expression = match expression { + Expression::And(mut l) if l.len() == 1 => l.pop().unwrap(), + e => e, + }; + match expression.effective_boolean_value() { + Some(true) => inner, + Some(false) => Self::empty(), + None => match inner { + Self::Filter { + inner, + expression: e2, + } => Self::Filter { + inner, + expression: expression & e2, + }, + _ => Self::Filter { + inner: Box::new(inner), + expression, + }, + }, + } + } + + pub fn extend(inner: Self, variable: Variable, expression: Expression) -> Self { + if inner.is_empty() { + return Self::empty(); + } + Self::Extend { + inner: Box::new(inner), + variable, + expression, + } + } + + pub fn values( + mut variables: Vec, + mut bindings: Vec>>, + ) -> Self { + let empty_rows = (0..variables.len()) + .filter(|row| !bindings.iter().any(|binding| binding.get(*row).is_some())) + .collect::>(); + if !empty_rows.is_empty() { + // We remove empty rows + variables = variables + .into_iter() + .enumerate() + .filter_map(|(i, v)| { + if empty_rows.contains(&i) { + None + } else { + Some(v) + } + }) + .collect(); + bindings = bindings + .into_iter() + .map(|binding| { + binding + .into_iter() + .enumerate() + .filter_map(|(i, v)| { + if empty_rows.contains(&i) { + None + } else { + Some(v) + } + }) + .collect() + }) + .collect(); + } + Self::Values { + variables, + bindings, + } + } + + pub fn order_by(inner: Self, expression: Vec) -> Self { + if inner.is_empty() { + return Self::empty(); + } + if expression.is_empty() { + return inner; + } + Self::OrderBy { + inner: Box::new(inner), + expression, + } + } + + pub fn project(inner: Self, variables: Vec) -> Self { + Self::Project { + inner: Box::new(inner), + variables, + } + } + + pub fn distinct(inner: Self) -> Self { + if inner.is_empty() { + return Self::empty(); + } + Self::Distinct { + inner: Box::new(inner), + } + } + + pub fn reduced(inner: Self) -> Self { + if inner.is_empty() { + return Self::empty(); + } + Self::Reduced { + inner: Box::new(inner), + } + } + + pub fn slice(inner: Self, start: usize, length: Option) -> Self { + if inner.is_empty() { + return Self::empty(); + } + if start == 0 && length.is_none() { + return inner; + } + Self::Slice { + inner: Box::new(inner), + start, + length, + } + } + + pub fn group( + inner: Self, + variables: Vec, + aggregates: Vec<(Variable, AggregateExpression)>, + ) -> Self { + if inner.is_empty() { + return Self::empty(); + } + Self::Group { + inner: Box::new(inner), + variables, + aggregates, + } + } + + pub fn service(inner: Self, name: NamedNodePattern, silent: bool) -> Self { + if inner.is_empty() { + return Self::empty(); + } + Self::Service { + inner: Box::new(inner), + name, + silent, + } + } + + pub fn lookup_used_variables<'a>(&'a self, callback: &mut impl FnMut(&'a Variable)) { + match self { + Self::Values { variables, .. } | Self::Project { variables, .. } => { + for v in variables { + callback(v); + } + } + Self::QuadPattern { + subject, + predicate, + object, + graph_name, + } => { + lookup_term_pattern_variables(subject, callback); + if let NamedNodePattern::Variable(v) = predicate { + callback(v); + } + lookup_term_pattern_variables(object, callback); + if let Some(NamedNodePattern::Variable(v)) = graph_name { + callback(v); + } + } + Self::Path { + subject, + object, + graph_name, + .. + } => { + lookup_term_pattern_variables(subject, callback); + lookup_term_pattern_variables(object, callback); + if let Some(NamedNodePattern::Variable(v)) = graph_name { + callback(v); + } + } + Self::Filter { inner, expression } => { + expression.lookup_used_variables(callback); + inner.lookup_used_variables(callback); + } + Self::Union { inner } => { + for child in inner { + child.lookup_used_variables(callback); + } + } + Self::Join { left, right, .. } | Self::Minus { left, right, .. } => { + left.lookup_used_variables(callback); + right.lookup_used_variables(callback); + } + #[cfg(feature = "sep-0006")] + Self::Lateral { left, right } => { + left.lookup_used_variables(callback); + right.lookup_used_variables(callback); + } + Self::LeftJoin { + left, + right, + expression, + .. + } => { + expression.lookup_used_variables(callback); + left.lookup_used_variables(callback); + right.lookup_used_variables(callback); + } + Self::Extend { + inner, + variable, + expression, + } => { + callback(variable); + expression.lookup_used_variables(callback); + inner.lookup_used_variables(callback); + } + Self::OrderBy { inner, .. } + | Self::Distinct { inner } + | Self::Reduced { inner } + | Self::Slice { inner, .. } => inner.lookup_used_variables(callback), + Self::Service { inner, name, .. } => { + if let NamedNodePattern::Variable(v) = name { + callback(v); + } + inner.lookup_used_variables(callback); + } + Self::Group { + variables, + aggregates, + .. + } => { + for v in variables { + callback(v); + } + for (v, _) in aggregates { + callback(v); + } + } + } + } + + fn from_sparql_algebra( + pattern: &AlGraphPattern, + graph_name: Option<&NamedNodePattern>, + blank_nodes: &mut HashMap, + ) -> Self { + match pattern { + AlGraphPattern::Bgp { patterns } => patterns + .iter() + .map(|p| { + let (subject, predicate, object) = + Self::triple_pattern_from_algebra(p, blank_nodes); + Self::QuadPattern { + subject, + predicate, + object, + graph_name: graph_name.cloned(), + } + }) + .reduce(|a, b| Self::Join { + left: Box::new(a), + right: Box::new(b), + algorithm: JoinAlgorithm::default(), + }) + .unwrap_or_else(Self::empty_singleton), + AlGraphPattern::Path { + subject, + path, + object, + } => Self::Path { + subject: Self::term_pattern_from_algebra(subject, blank_nodes), + path: path.clone(), + object: Self::term_pattern_from_algebra(object, blank_nodes), + graph_name: graph_name.cloned(), + }, + AlGraphPattern::Join { left, right } => Self::Join { + left: Box::new(Self::from_sparql_algebra(left, graph_name, blank_nodes)), + right: Box::new(Self::from_sparql_algebra(right, graph_name, blank_nodes)), + algorithm: JoinAlgorithm::default(), + }, + AlGraphPattern::LeftJoin { + left, + right, + expression, + } => Self::LeftJoin { + left: Box::new(Self::from_sparql_algebra(left, graph_name, blank_nodes)), + right: Box::new(Self::from_sparql_algebra(right, graph_name, blank_nodes)), + expression: expression.as_ref().map_or_else( + || true.into(), + |e| Expression::from_sparql_algebra(e, graph_name), + ), + algorithm: LeftJoinAlgorithm::default(), + }, + #[cfg(feature = "sep-0006")] + AlGraphPattern::Lateral { left, right } => Self::Lateral { + left: Box::new(Self::from_sparql_algebra(left, graph_name, blank_nodes)), + right: Box::new(Self::from_sparql_algebra(right, graph_name, blank_nodes)), + }, + AlGraphPattern::Filter { inner, expr } => Self::Filter { + inner: Box::new(Self::from_sparql_algebra(inner, graph_name, blank_nodes)), + expression: Expression::from_sparql_algebra(expr, graph_name), + }, + AlGraphPattern::Union { left, right } => Self::Union { + inner: vec![ + Self::from_sparql_algebra(left, graph_name, blank_nodes), + Self::from_sparql_algebra(right, graph_name, blank_nodes), + ], + }, + AlGraphPattern::Graph { inner, name } => { + Self::from_sparql_algebra(inner, Some(name), blank_nodes) + } + AlGraphPattern::Extend { + inner, + expression, + variable, + } => Self::Extend { + inner: Box::new(Self::from_sparql_algebra(inner, graph_name, blank_nodes)), + expression: Expression::from_sparql_algebra(expression, graph_name), + variable: variable.clone(), + }, + AlGraphPattern::Minus { left, right } => Self::Minus { + left: Box::new(Self::from_sparql_algebra(left, graph_name, blank_nodes)), + right: Box::new(Self::from_sparql_algebra(right, graph_name, blank_nodes)), + algorithm: MinusAlgorithm::default(), + }, + AlGraphPattern::Values { + variables, + bindings, + } => Self::Values { + variables: variables.clone(), + bindings: bindings.clone(), + }, + AlGraphPattern::OrderBy { inner, expression } => Self::OrderBy { + inner: Box::new(Self::from_sparql_algebra(inner, graph_name, blank_nodes)), + expression: expression + .iter() + .map(|e| OrderExpression::from_sparql_algebra(e, graph_name)) + .collect(), + }, + AlGraphPattern::Project { inner, variables } => { + let graph_name = if let Some(NamedNodePattern::Variable(graph_name)) = graph_name { + Some(NamedNodePattern::Variable( + if variables.contains(graph_name) { + graph_name.clone() + } else { + new_var() + }, + )) + } else { + graph_name.cloned() + }; + Self::Project { + inner: Box::new(Self::from_sparql_algebra( + inner, + graph_name.as_ref(), + &mut HashMap::new(), + )), + variables: variables.clone(), + } + } + AlGraphPattern::Distinct { inner } => Self::Distinct { + inner: Box::new(Self::from_sparql_algebra(inner, graph_name, blank_nodes)), + }, + AlGraphPattern::Reduced { inner } => Self::Distinct { + inner: Box::new(Self::from_sparql_algebra(inner, graph_name, blank_nodes)), + }, + AlGraphPattern::Slice { + inner, + start, + length, + } => Self::Slice { + inner: Box::new(Self::from_sparql_algebra(inner, graph_name, blank_nodes)), + start: *start, + length: *length, + }, + AlGraphPattern::Group { + inner, + variables, + aggregates, + } => Self::Group { + inner: Box::new(Self::from_sparql_algebra(inner, graph_name, blank_nodes)), + variables: variables.clone(), + aggregates: aggregates + .iter() + .map(|(var, expr)| { + ( + var.clone(), + AggregateExpression::from_sparql_algebra(expr, graph_name), + ) + }) + .collect(), + }, + AlGraphPattern::Service { + inner, + name, + silent, + } => Self::Service { + inner: Box::new(Self::from_sparql_algebra(inner, graph_name, blank_nodes)), + name: name.clone(), + silent: *silent, + }, + } + } + + fn triple_pattern_from_algebra( + pattern: &TriplePattern, + blank_nodes: &mut HashMap, + ) -> (GroundTermPattern, NamedNodePattern, GroundTermPattern) { + ( + Self::term_pattern_from_algebra(&pattern.subject, blank_nodes), + pattern.predicate.clone(), + Self::term_pattern_from_algebra(&pattern.object, blank_nodes), + ) + } + + fn term_pattern_from_algebra( + pattern: &TermPattern, + blank_nodes: &mut HashMap, + ) -> GroundTermPattern { + match pattern { + TermPattern::NamedNode(node) => node.clone().into(), + TermPattern::BlankNode(node) => blank_nodes + .entry(node.clone()) + .or_insert_with(new_var) + .clone() + .into(), + TermPattern::Literal(literal) => literal.clone().into(), + #[cfg(feature = "rdf-star")] + TermPattern::Triple(pattern) => { + let (subject, predicate, object) = + Self::triple_pattern_from_algebra(pattern, blank_nodes); + GroundTriplePattern { + subject, + predicate, + object, + } + .into() + } + TermPattern::Variable(variable) => variable.clone().into(), + } + } +} + +impl From<&AlGraphPattern> for GraphPattern { + fn from(pattern: &AlGraphPattern) -> Self { + Self::from_sparql_algebra(pattern, None, &mut HashMap::new()) + } +} + +impl From<&GraphPattern> for AlGraphPattern { + fn from(pattern: &GraphPattern) -> Self { + match pattern { + GraphPattern::QuadPattern { + subject, + predicate, + object, + graph_name, + } => { + let pattern = Self::Bgp { + patterns: vec![TriplePattern { + subject: subject.clone().into(), + predicate: predicate.clone(), + object: object.clone().into(), + }], + }; + if let Some(graph_name) = graph_name { + Self::Graph { + inner: Box::new(pattern), + name: graph_name.clone(), + } + } else { + pattern + } + } + GraphPattern::Path { + subject, + path, + object, + graph_name, + } => { + let pattern = Self::Path { + subject: subject.clone().into(), + path: path.clone(), + object: object.clone().into(), + }; + if let Some(graph_name) = graph_name { + Self::Graph { + inner: Box::new(pattern), + name: graph_name.clone(), + } + } else { + pattern + } + } + GraphPattern::Join { left, right, .. } => { + match (left.as_ref().into(), right.as_ref().into()) { + (Self::Bgp { patterns: mut left }, Self::Bgp { patterns: right }) => { + left.extend(right); + Self::Bgp { patterns: left } + } + (left, right) => Self::Join { + left: Box::new(left), + right: Box::new(right), + }, + } + } + GraphPattern::LeftJoin { + left, + right, + expression, + .. + } => { + let empty_expr = if let Expression::Literal(l) = expression { + l.datatype() == xsd::BOOLEAN && l.value() == "true" + } else { + false + }; + Self::LeftJoin { + left: Box::new(left.as_ref().into()), + right: Box::new(right.as_ref().into()), + expression: if empty_expr { + None + } else { + Some(expression.into()) + }, + } + } + #[cfg(feature = "sep-0006")] + GraphPattern::Lateral { left, right } => { + match (left.as_ref().into(), right.as_ref().into()) { + (Self::Bgp { patterns: mut left }, Self::Bgp { patterns: right }) => { + left.extend(right); + Self::Bgp { patterns: left } + } + (left, right) => Self::Lateral { + left: Box::new(left), + right: Box::new(right), + }, + } + } + GraphPattern::Filter { inner, expression } => Self::Filter { + inner: Box::new(inner.as_ref().into()), + expr: expression.into(), + }, + GraphPattern::Union { inner } => inner + .iter() + .map(Into::into) + .reduce(|a, b| Self::Union { + left: Box::new(a), + right: Box::new(b), + }) + .unwrap_or_else(|| Self::Values { + variables: Vec::new(), + bindings: Vec::new(), + }), + GraphPattern::Extend { + inner, + expression, + variable, + } => Self::Extend { + inner: Box::new(inner.as_ref().into()), + expression: expression.into(), + variable: variable.clone(), + }, + GraphPattern::Minus { left, right, .. } => Self::Minus { + left: Box::new(left.as_ref().into()), + right: Box::new(right.as_ref().into()), + }, + GraphPattern::Values { + variables, + bindings, + } => Self::Values { + variables: variables.clone(), + bindings: bindings.clone(), + }, + GraphPattern::OrderBy { inner, expression } => Self::OrderBy { + inner: Box::new(inner.as_ref().into()), + expression: expression.iter().map(Into::into).collect(), + }, + GraphPattern::Project { inner, variables } => Self::Project { + inner: Box::new(inner.as_ref().into()), + variables: variables.clone(), + }, + GraphPattern::Distinct { inner } => Self::Distinct { + inner: Box::new(inner.as_ref().into()), + }, + GraphPattern::Reduced { inner } => Self::Distinct { + inner: Box::new(inner.as_ref().into()), + }, + GraphPattern::Slice { + inner, + start, + length, + } => Self::Slice { + inner: Box::new(inner.as_ref().into()), + start: *start, + length: *length, + }, + GraphPattern::Group { + inner, + variables, + aggregates, + } => Self::Group { + inner: Box::new(inner.as_ref().into()), + variables: variables.clone(), + aggregates: aggregates + .iter() + .map(|(var, expr)| (var.clone(), expr.into())) + .collect(), + }, + GraphPattern::Service { + inner, + name, + silent, + } => Self::Service { + inner: Box::new(inner.as_ref().into()), + name: name.clone(), + silent: *silent, + }, + } + } +} + +/// The join algorithm used (c.f. [`GraphPattern::Join`]). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum JoinAlgorithm { + HashBuildLeftProbeRight { keys: Vec }, +} + +impl Default for JoinAlgorithm { + fn default() -> Self { + Self::HashBuildLeftProbeRight { + keys: Vec::default(), + } + } +} + +/// The left join algorithm used (c.f. [`GraphPattern::LeftJoin`]). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum LeftJoinAlgorithm { + HashBuildRightProbeLeft { keys: Vec }, +} + +impl Default for LeftJoinAlgorithm { + fn default() -> Self { + Self::HashBuildRightProbeLeft { + keys: Vec::default(), + } + } +} + +/// The left join algorithm used (c.f. [`GraphPattern::Minus`]). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum MinusAlgorithm { + HashBuildRightProbeLeft { keys: Vec }, +} + +impl Default for MinusAlgorithm { + fn default() -> Self { + Self::HashBuildRightProbeLeft { + keys: Vec::default(), + } + } +} + +/// A set function used in aggregates (c.f. [`GraphPattern::Group`]). +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum AggregateExpression { + CountSolutions { + distinct: bool, + }, + FunctionCall { + name: AggregateFunction, + expr: Expression, + distinct: bool, + }, +} + +impl AggregateExpression { + fn from_sparql_algebra( + expression: &AlAggregateExpression, + graph_name: Option<&NamedNodePattern>, + ) -> Self { + match expression { + AlAggregateExpression::CountSolutions { distinct } => Self::CountSolutions { + distinct: *distinct, + }, + AlAggregateExpression::FunctionCall { + name, + expr, + distinct, + } => Self::FunctionCall { + name: name.clone(), + expr: Expression::from_sparql_algebra(expr, graph_name), + distinct: *distinct, + }, + } + } +} + +impl From<&AggregateExpression> for AlAggregateExpression { + fn from(expression: &AggregateExpression) -> Self { + match expression { + AggregateExpression::CountSolutions { distinct } => Self::CountSolutions { + distinct: *distinct, + }, + AggregateExpression::FunctionCall { + name, + expr, + distinct, + } => Self::FunctionCall { + name: name.clone(), + expr: expr.into(), + distinct: *distinct, + }, + } + } +} + +/// An ordering comparator used by [`GraphPattern::OrderBy`]. +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum OrderExpression { + /// Ascending order + Asc(Expression), + /// Descending order + Desc(Expression), +} + +impl OrderExpression { + fn from_sparql_algebra( + expression: &AlOrderExpression, + graph_name: Option<&NamedNodePattern>, + ) -> Self { + match expression { + AlOrderExpression::Asc(e) => Self::Asc(Expression::from_sparql_algebra(e, graph_name)), + AlOrderExpression::Desc(e) => { + Self::Desc(Expression::from_sparql_algebra(e, graph_name)) + } + } + } +} + +impl From<&OrderExpression> for AlOrderExpression { + fn from(expression: &OrderExpression) -> Self { + match expression { + OrderExpression::Asc(e) => Self::Asc(e.into()), + OrderExpression::Desc(e) => Self::Desc(e.into()), + } + } +} + +fn new_var() -> Variable { + Variable::new_unchecked(format!("{:x}", random::())) +} + +fn order_pair(a: T, b: T) -> (T, T) { + if hash(&a) <= hash(&b) { + (a, b) + } else { + (b, a) + } +} + +fn order_vec(mut vec: Vec) -> Vec { + vec.sort_unstable_by_key(|a| hash(a)); + vec +} + +fn hash(v: impl Hash) -> u64 { + let mut hasher = DefaultHasher::new(); + v.hash(&mut hasher); + hasher.finish() +} + +fn lookup_term_pattern_variables<'a>( + pattern: &'a GroundTermPattern, + callback: &mut impl FnMut(&'a Variable), +) { + if let GroundTermPattern::Variable(v) = pattern { + callback(v); + } + #[cfg(feature = "rdf-star")] + if let GroundTermPattern::Triple(t) = pattern { + lookup_term_pattern_variables(&t.subject, callback); + if let NamedNodePattern::Variable(v) = &t.predicate { + callback(v); + } + lookup_term_pattern_variables(&t.object, callback); + } +} diff --git a/ng-oxigraph/src/sparopt/mod.rs b/ng-oxigraph/src/sparopt/mod.rs new file mode 100644 index 0000000..3628eca --- /dev/null +++ b/ng-oxigraph/src/sparopt/mod.rs @@ -0,0 +1,5 @@ +pub use crate::sparopt::optimizer::Optimizer; + +pub mod algebra; +mod optimizer; +mod type_inference; diff --git a/ng-oxigraph/src/sparopt/optimizer.rs b/ng-oxigraph/src/sparopt/optimizer.rs new file mode 100644 index 0000000..7e1a254 --- /dev/null +++ b/ng-oxigraph/src/sparopt/optimizer.rs @@ -0,0 +1,1082 @@ +use crate::oxrdf::Variable; +use crate::spargebra::algebra::PropertyPathExpression; +use crate::spargebra::term::{GroundTermPattern, NamedNodePattern}; +use crate::sparopt::algebra::{ + Expression, GraphPattern, JoinAlgorithm, LeftJoinAlgorithm, MinusAlgorithm, OrderExpression, +}; +use crate::sparopt::type_inference::{ + infer_expression_type, infer_graph_pattern_types, VariableType, VariableTypes, +}; +use std::cmp::{max, min}; + +pub struct Optimizer; + +impl Optimizer { + pub fn optimize_graph_pattern(pattern: GraphPattern) -> GraphPattern { + let pattern = Self::normalize_pattern(pattern, &VariableTypes::default()); + let pattern = Self::reorder_joins(pattern, &VariableTypes::default()); + Self::push_filters(pattern, Vec::new(), &VariableTypes::default()) + } + + /// Normalize the pattern, discarding any join ordering information + fn normalize_pattern(pattern: GraphPattern, input_types: &VariableTypes) -> GraphPattern { + match pattern { + GraphPattern::QuadPattern { + subject, + predicate, + object, + graph_name, + } => GraphPattern::QuadPattern { + subject, + predicate, + object, + graph_name, + }, + GraphPattern::Path { + subject, + path, + object, + graph_name, + } => GraphPattern::Path { + subject, + path, + object, + graph_name, + }, + GraphPattern::Join { + left, + right, + algorithm, + } => GraphPattern::join( + Self::normalize_pattern(*left, input_types), + Self::normalize_pattern(*right, input_types), + algorithm, + ), + GraphPattern::LeftJoin { + left, + right, + expression, + algorithm, + } => { + let left = Self::normalize_pattern(*left, input_types); + let right = Self::normalize_pattern(*right, input_types); + let mut inner_types = infer_graph_pattern_types(&left, input_types.clone()); + inner_types.intersect_with(infer_graph_pattern_types(&right, input_types.clone())); + GraphPattern::left_join( + left, + right, + Self::normalize_expression(expression, &inner_types), + algorithm, + ) + } + #[cfg(feature = "sep-0006")] + GraphPattern::Lateral { left, right } => { + let left = Self::normalize_pattern(*left, input_types); + let left_types = infer_graph_pattern_types(&left, input_types.clone()); + let right = Self::normalize_pattern(*right, &left_types); + GraphPattern::lateral(left, right) + } + GraphPattern::Filter { inner, expression } => { + let inner = Self::normalize_pattern(*inner, input_types); + let inner_types = infer_graph_pattern_types(&inner, input_types.clone()); + let expression = Self::normalize_expression(expression, &inner_types); + let expression_type = infer_expression_type(&expression, &inner_types); + if expression_type == VariableType::UNDEF { + GraphPattern::empty() + } else { + GraphPattern::filter(inner, expression) + } + } + GraphPattern::Union { inner } => GraphPattern::union_all( + inner + .into_iter() + .map(|e| Self::normalize_pattern(e, input_types)), + ), + GraphPattern::Extend { + inner, + variable, + expression, + } => { + let inner = Self::normalize_pattern(*inner, input_types); + let inner_types = infer_graph_pattern_types(&inner, input_types.clone()); + let expression = Self::normalize_expression(expression, &inner_types); + let expression_type = infer_expression_type(&expression, &inner_types); + if expression_type == VariableType::UNDEF { + // TODO: valid? + inner + } else { + GraphPattern::extend(inner, variable, expression) + } + } + GraphPattern::Minus { + left, + right, + algorithm, + } => GraphPattern::minus( + Self::normalize_pattern(*left, input_types), + Self::normalize_pattern(*right, input_types), + algorithm, + ), + GraphPattern::Values { + variables, + bindings, + } => GraphPattern::values(variables, bindings), + GraphPattern::OrderBy { inner, expression } => { + let inner = Self::normalize_pattern(*inner, input_types); + let inner_types = infer_graph_pattern_types(&inner, input_types.clone()); + GraphPattern::order_by( + inner, + expression + .into_iter() + .map(|e| match e { + OrderExpression::Asc(e) => { + OrderExpression::Asc(Self::normalize_expression(e, &inner_types)) + } + OrderExpression::Desc(e) => { + OrderExpression::Desc(Self::normalize_expression(e, &inner_types)) + } + }) + .collect(), + ) + } + GraphPattern::Project { inner, variables } => { + GraphPattern::project(Self::normalize_pattern(*inner, input_types), variables) + } + GraphPattern::Distinct { inner } => { + GraphPattern::distinct(Self::normalize_pattern(*inner, input_types)) + } + GraphPattern::Reduced { inner } => { + GraphPattern::reduced(Self::normalize_pattern(*inner, input_types)) + } + GraphPattern::Slice { + inner, + start, + length, + } => GraphPattern::slice(Self::normalize_pattern(*inner, input_types), start, length), + GraphPattern::Group { + inner, + variables, + aggregates, + } => { + // TODO: min, max and sample don't care about DISTINCT + GraphPattern::group( + Self::normalize_pattern(*inner, input_types), + variables, + aggregates, + ) + } + GraphPattern::Service { + name, + inner, + silent, + } => GraphPattern::service(Self::normalize_pattern(*inner, input_types), name, silent), + } + } + + fn normalize_expression(expression: Expression, types: &VariableTypes) -> Expression { + match expression { + Expression::NamedNode(node) => node.into(), + Expression::Literal(literal) => literal.into(), + Expression::Variable(variable) => variable.into(), + Expression::Or(inner) => Expression::or_all( + inner + .into_iter() + .map(|e| Self::normalize_expression(e, types)), + ), + Expression::And(inner) => Expression::and_all( + inner + .into_iter() + .map(|e| Self::normalize_expression(e, types)), + ), + Expression::Equal(left, right) => { + let left = Self::normalize_expression(*left, types); + let left_types = infer_expression_type(&left, types); + let right = Self::normalize_expression(*right, types); + let right_types = infer_expression_type(&right, types); + #[allow(unused_mut)] + let mut must_use_equal = left_types.literal && right_types.literal; + #[cfg(feature = "rdf-star")] + { + must_use_equal = must_use_equal || left_types.triple && right_types.triple; + } + if must_use_equal { + Expression::equal(left, right) + } else { + Expression::same_term(left, right) + } + } + Expression::SameTerm(left, right) => Expression::same_term( + Self::normalize_expression(*left, types), + Self::normalize_expression(*right, types), + ), + Expression::Greater(left, right) => Expression::greater( + Self::normalize_expression(*left, types), + Self::normalize_expression(*right, types), + ), + Expression::GreaterOrEqual(left, right) => Expression::greater_or_equal( + Self::normalize_expression(*left, types), + Self::normalize_expression(*right, types), + ), + Expression::Less(left, right) => Expression::less( + Self::normalize_expression(*left, types), + Self::normalize_expression(*right, types), + ), + Expression::LessOrEqual(left, right) => Expression::less_or_equal( + Self::normalize_expression(*left, types), + Self::normalize_expression(*right, types), + ), + Expression::Add(left, right) => { + Self::normalize_expression(*left, types) + Self::normalize_expression(*right, types) + } + Expression::Subtract(left, right) => { + Self::normalize_expression(*left, types) - Self::normalize_expression(*right, types) + } + Expression::Multiply(left, right) => { + Self::normalize_expression(*left, types) * Self::normalize_expression(*right, types) + } + Expression::Divide(left, right) => { + Self::normalize_expression(*left, types) / Self::normalize_expression(*right, types) + } + Expression::UnaryPlus(inner) => { + Expression::unary_plus(Self::normalize_expression(*inner, types)) + } + Expression::UnaryMinus(inner) => -Self::normalize_expression(*inner, types), + Expression::Not(inner) => !Self::normalize_expression(*inner, types), + Expression::Exists(inner) => Expression::exists(Self::normalize_pattern(*inner, types)), + Expression::Bound(variable) => { + let t = types.get(&variable); + if !t.undef { + true.into() + } else if t == VariableType::UNDEF { + false.into() + } else { + Expression::Bound(variable) + } + } + Expression::If(cond, then, els) => Expression::if_cond( + Self::normalize_expression(*cond, types), + Self::normalize_expression(*then, types), + Self::normalize_expression(*els, types), + ), + Expression::Coalesce(inners) => Expression::coalesce( + inners + .into_iter() + .map(|e| Self::normalize_expression(e, types)) + .collect(), + ), + Expression::FunctionCall(name, args) => Expression::call( + name, + args.into_iter() + .map(|e| Self::normalize_expression(e, types)) + .collect(), + ), + } + } + + fn push_filters( + pattern: GraphPattern, + mut filters: Vec, + input_types: &VariableTypes, + ) -> GraphPattern { + match pattern { + GraphPattern::QuadPattern { .. } + | GraphPattern::Path { .. } + | GraphPattern::Values { .. } => { + GraphPattern::filter(pattern, Expression::and_all(filters)) + } + GraphPattern::Join { + left, + right, + algorithm, + } => { + let left_types = infer_graph_pattern_types(&left, input_types.clone()); + let right_types = infer_graph_pattern_types(&right, input_types.clone()); + let mut left_filters = Vec::new(); + let mut right_filters = Vec::new(); + let mut final_filters = Vec::new(); + for filter in filters { + let push_left = are_all_expression_variables_bound(&filter, &left_types); + let push_right = are_all_expression_variables_bound(&filter, &right_types); + if push_left { + if push_right { + left_filters.push(filter.clone()); + right_filters.push(filter); + } else { + left_filters.push(filter); + } + } else if push_right { + right_filters.push(filter); + } else { + final_filters.push(filter); + } + } + GraphPattern::filter( + GraphPattern::join( + Self::push_filters(*left, left_filters, input_types), + Self::push_filters(*right, right_filters, input_types), + algorithm, + ), + Expression::and_all(final_filters), + ) + } + #[cfg(feature = "sep-0006")] + GraphPattern::Lateral { left, right } => { + let left_types = infer_graph_pattern_types(&left, input_types.clone()); + let mut left_filters = Vec::new(); + let mut right_filters = Vec::new(); + for filter in filters { + let push_left = are_all_expression_variables_bound(&filter, &left_types); + if push_left { + left_filters.push(filter); + } else { + right_filters.push(filter); + } + } + let left = Self::push_filters(*left, left_filters, input_types); + let right = Self::push_filters(*right, right_filters, &left_types); + if let GraphPattern::Filter { + inner: right, + expression, + } = right + { + // We prefer to have filter out of the lateral rather than inside the right part + GraphPattern::filter(GraphPattern::lateral(left, *right), expression) + } else { + GraphPattern::lateral(left, right) + } + } + GraphPattern::LeftJoin { + left, + right, + expression, + algorithm, + } => { + let left_types = infer_graph_pattern_types(&left, input_types.clone()); + let right_types = infer_graph_pattern_types(&right, input_types.clone()); + let mut left_filters = Vec::new(); + let mut right_filters = Vec::new(); + let mut final_filters = Vec::new(); + for filter in filters { + let push_left = are_all_expression_variables_bound(&filter, &left_types); + if push_left { + left_filters.push(filter); + } else { + final_filters.push(filter); + } + } + let expression = if expression.effective_boolean_value().is_none() + && (are_all_expression_variables_bound(&expression, &right_types) + || are_no_expression_variables_bound(&expression, &left_types)) + { + right_filters.push(expression); + true.into() + } else { + expression + }; + GraphPattern::filter( + GraphPattern::left_join( + Self::push_filters(*left, left_filters, input_types), + Self::push_filters(*right, right_filters, input_types), + expression, + algorithm, + ), + Expression::and_all(final_filters), + ) + } + GraphPattern::Minus { + left, + right, + algorithm, + } => GraphPattern::minus( + Self::push_filters(*left, filters, input_types), + Self::push_filters(*right, Vec::new(), input_types), + algorithm, + ), + GraphPattern::Extend { + inner, + expression, + variable, + } => { + // TODO: handle the case where the filter overrides an expression variable (should not happen in SPARQL but allowed in the algebra) + let mut inner_filters = Vec::new(); + let mut final_filters = Vec::new(); + for filter in filters { + let extend_variable_used = + filter.used_variables().into_iter().any(|v| *v == variable); + if extend_variable_used { + final_filters.push(filter); + } else { + inner_filters.push(filter); + } + } + GraphPattern::filter( + GraphPattern::extend( + Self::push_filters(*inner, inner_filters, input_types), + variable, + expression, + ), + Expression::and_all(final_filters), + ) + } + GraphPattern::Filter { inner, expression } => { + if let Expression::And(expressions) = expression { + filters.extend(expressions) + } else { + filters.push(expression) + }; + Self::push_filters(*inner, filters, input_types) + } + GraphPattern::Union { inner } => GraphPattern::union_all( + inner + .into_iter() + .map(|c| Self::push_filters(c, filters.clone(), input_types)), + ), + GraphPattern::Slice { + inner, + start, + length, + } => GraphPattern::filter( + GraphPattern::slice( + Self::push_filters(*inner, Vec::new(), input_types), + start, + length, + ), + Expression::and_all(filters), + ), + GraphPattern::Distinct { inner } => { + GraphPattern::distinct(Self::push_filters(*inner, filters, input_types)) + } + GraphPattern::Reduced { inner } => { + GraphPattern::reduced(Self::push_filters(*inner, filters, input_types)) + } + GraphPattern::Project { inner, variables } => { + GraphPattern::project(Self::push_filters(*inner, filters, input_types), variables) + } + GraphPattern::OrderBy { inner, expression } => { + GraphPattern::order_by(Self::push_filters(*inner, filters, input_types), expression) + } + GraphPattern::Service { + inner, + name, + silent, + } => GraphPattern::service( + Self::push_filters(*inner, filters, input_types), + name, + silent, + ), + GraphPattern::Group { + inner, + variables, + aggregates, + } => GraphPattern::filter( + GraphPattern::group( + Self::push_filters(*inner, Vec::new(), input_types), + variables, + aggregates, + ), + Expression::and_all(filters), + ), + } + } + + fn reorder_joins(pattern: GraphPattern, input_types: &VariableTypes) -> GraphPattern { + match pattern { + GraphPattern::QuadPattern { .. } + | GraphPattern::Path { .. } + | GraphPattern::Values { .. } => pattern, + GraphPattern::Join { left, right, .. } => { + // We flatten the join operation + let mut to_reorder = Vec::new(); + let mut todo = vec![*right, *left]; + while let Some(e) = todo.pop() { + if let GraphPattern::Join { left, right, .. } = e { + todo.push(*right); + todo.push(*left); + } else { + to_reorder.push(e); + } + } + + // We do first type inference + let to_reorder_types = to_reorder + .iter() + .map(|p| infer_graph_pattern_types(p, input_types.clone())) + .collect::>(); + + // We do greedy join reordering + let mut output_cartesian_product_joins = Vec::new(); + let mut not_yet_reordered_ids = vec![true; to_reorder.len()]; + // We look for the next connected component to reorder and pick the smallest element + while let Some(next_entry_id) = not_yet_reordered_ids + .iter() + .enumerate() + .filter(|(_, v)| **v) + .map(|(i, _)| i) + .min_by_key(|i| estimate_graph_pattern_size(&to_reorder[*i], input_types)) + { + not_yet_reordered_ids[next_entry_id] = false; // It's now done + let mut output = to_reorder[next_entry_id].clone(); + let mut output_types = to_reorder_types[next_entry_id].clone(); + // We look for an other child to join with that does not blow up the join cost + while let Some(next_id) = not_yet_reordered_ids + .iter() + .enumerate() + .filter(|(_, v)| **v) + .map(|(i, _)| i) + .filter(|i| { + has_common_variables(&output_types, &to_reorder_types[*i], input_types) + }) + .min_by_key(|i| { + // Estimation of the join cost + if cfg!(feature = "sep-0006") + && is_fit_for_for_loop_join( + &to_reorder[*i], + input_types, + &output_types, + ) + { + estimate_lateral_cost( + &output, + &output_types, + &to_reorder[*i], + input_types, + ) + } else { + estimate_join_cost( + &output, + &to_reorder[*i], + &JoinAlgorithm::HashBuildLeftProbeRight { + keys: join_key_variables( + &output_types, + &to_reorder_types[*i], + input_types, + ), + }, + input_types, + ) + } + }) + { + not_yet_reordered_ids[next_id] = false; // It's now done + let next = to_reorder[next_id].clone(); + #[cfg(feature = "sep-0006")] + { + output = if is_fit_for_for_loop_join(&next, input_types, &output_types) + { + GraphPattern::lateral(output, next) + } else { + GraphPattern::join( + output, + next, + JoinAlgorithm::HashBuildLeftProbeRight { + keys: join_key_variables( + &output_types, + &to_reorder_types[next_id], + input_types, + ), + }, + ) + }; + } + #[cfg(not(feature = "sep-0006"))] + { + output = GraphPattern::join( + output, + next, + JoinAlgorithm::HashBuildLeftProbeRight { + keys: join_key_variables( + &output_types, + &to_reorder_types[next_id], + input_types, + ), + }, + ); + } + output_types.intersect_with(to_reorder_types[next_id].clone()); + } + output_cartesian_product_joins.push(output); + } + output_cartesian_product_joins + .into_iter() + .reduce(|left, right| { + let keys = join_key_variables( + &infer_graph_pattern_types(&left, input_types.clone()), + &infer_graph_pattern_types(&right, input_types.clone()), + input_types, + ); + if estimate_graph_pattern_size(&left, input_types) + <= estimate_graph_pattern_size(&right, input_types) + { + GraphPattern::join( + left, + right, + JoinAlgorithm::HashBuildLeftProbeRight { keys }, + ) + } else { + GraphPattern::join( + right, + left, + JoinAlgorithm::HashBuildLeftProbeRight { keys }, + ) + } + }) + .unwrap() + } + #[cfg(feature = "sep-0006")] + GraphPattern::Lateral { left, right } => { + let left_types = infer_graph_pattern_types(&left, input_types.clone()); + GraphPattern::lateral( + Self::reorder_joins(*left, input_types), + Self::reorder_joins(*right, &left_types), + ) + } + GraphPattern::LeftJoin { + left, + right, + expression, + .. + } => { + let left = Self::reorder_joins(*left, input_types); + let left_types = infer_graph_pattern_types(&left, input_types.clone()); + let right = Self::reorder_joins(*right, input_types); + let right_types = infer_graph_pattern_types(&right, input_types.clone()); + #[cfg(feature = "sep-0006")] + { + if is_fit_for_for_loop_join(&right, input_types, &left_types) + && has_common_variables(&left_types, &right_types, input_types) + { + return GraphPattern::lateral( + left, + GraphPattern::left_join( + GraphPattern::empty_singleton(), + right, + expression, + LeftJoinAlgorithm::HashBuildRightProbeLeft { keys: Vec::new() }, + ), + ); + } + } + GraphPattern::left_join( + left, + right, + expression, + LeftJoinAlgorithm::HashBuildRightProbeLeft { + keys: join_key_variables(&left_types, &right_types, input_types), + }, + ) + } + GraphPattern::Minus { left, right, .. } => { + let left = Self::reorder_joins(*left, input_types); + let left_types = infer_graph_pattern_types(&left, input_types.clone()); + let right = Self::reorder_joins(*right, input_types); + let right_types = infer_graph_pattern_types(&right, input_types.clone()); + GraphPattern::minus( + left, + right, + MinusAlgorithm::HashBuildRightProbeLeft { + keys: join_key_variables(&left_types, &right_types, input_types), + }, + ) + } + GraphPattern::Extend { + inner, + expression, + variable, + } => GraphPattern::extend( + Self::reorder_joins(*inner, input_types), + variable, + expression, + ), + GraphPattern::Filter { inner, expression } => { + GraphPattern::filter(Self::reorder_joins(*inner, input_types), expression) + } + GraphPattern::Union { inner } => GraphPattern::union_all( + inner + .into_iter() + .map(|c| Self::reorder_joins(c, input_types)), + ), + GraphPattern::Slice { + inner, + start, + length, + } => GraphPattern::slice(Self::reorder_joins(*inner, input_types), start, length), + GraphPattern::Distinct { inner } => { + GraphPattern::distinct(Self::reorder_joins(*inner, input_types)) + } + GraphPattern::Reduced { inner } => { + GraphPattern::reduced(Self::reorder_joins(*inner, input_types)) + } + GraphPattern::Project { inner, variables } => { + GraphPattern::project(Self::reorder_joins(*inner, input_types), variables) + } + GraphPattern::OrderBy { inner, expression } => { + GraphPattern::order_by(Self::reorder_joins(*inner, input_types), expression) + } + service @ GraphPattern::Service { .. } => { + // We don't do join reordering inside of SERVICE calls, we don't know about cardinalities + service + } + GraphPattern::Group { + inner, + variables, + aggregates, + } => GraphPattern::group( + Self::reorder_joins(*inner, input_types), + variables, + aggregates, + ), + } + } +} + +fn is_fit_for_for_loop_join( + pattern: &GraphPattern, + global_input_types: &VariableTypes, + entry_types: &VariableTypes, +) -> bool { + // TODO: think more about it + match pattern { + GraphPattern::Values { .. } + | GraphPattern::QuadPattern { .. } + | GraphPattern::Path { .. } => true, + #[cfg(feature = "sep-0006")] + GraphPattern::Lateral { left, right } => { + is_fit_for_for_loop_join(left, global_input_types, entry_types) + && is_fit_for_for_loop_join(right, global_input_types, entry_types) + } + GraphPattern::LeftJoin { + left, + right, + expression, + .. + } => { + if !is_fit_for_for_loop_join(left, global_input_types, entry_types) { + return false; + } + + // It is not ok to transform into for loop join if right binds a variable also bound by the entry part of the for loop join + let mut left_types = infer_graph_pattern_types(left, global_input_types.clone()); + let right_types = infer_graph_pattern_types(right, global_input_types.clone()); + if right_types.iter().any(|(variable, t)| { + *t != VariableType::UNDEF + && left_types.get(variable).undef + && entry_types.get(variable) != VariableType::UNDEF + }) { + return false; + } + + // We don't forget the final expression + left_types.intersect_with(right_types); + is_expression_fit_for_for_loop_join(expression, &left_types, entry_types) + } + GraphPattern::Union { inner } => inner + .iter() + .all(|i| is_fit_for_for_loop_join(i, global_input_types, entry_types)), + GraphPattern::Filter { inner, expression } => { + is_fit_for_for_loop_join(inner, global_input_types, entry_types) + && is_expression_fit_for_for_loop_join( + expression, + &infer_graph_pattern_types(inner, global_input_types.clone()), + entry_types, + ) + } + GraphPattern::Extend { + inner, + expression, + variable, + } => { + is_fit_for_for_loop_join(inner, global_input_types, entry_types) + && entry_types.get(variable) == VariableType::UNDEF + && is_expression_fit_for_for_loop_join( + expression, + &infer_graph_pattern_types(inner, global_input_types.clone()), + entry_types, + ) + } + GraphPattern::Join { .. } + | GraphPattern::Minus { .. } + | GraphPattern::Service { .. } + | GraphPattern::OrderBy { .. } + | GraphPattern::Distinct { .. } + | GraphPattern::Reduced { .. } + | GraphPattern::Slice { .. } + | GraphPattern::Project { .. } + | GraphPattern::Group { .. } => false, + } +} + +fn are_all_expression_variables_bound( + expression: &Expression, + variable_types: &VariableTypes, +) -> bool { + expression + .used_variables() + .into_iter() + .all(|v| !variable_types.get(v).undef) +} + +fn are_no_expression_variables_bound( + expression: &Expression, + variable_types: &VariableTypes, +) -> bool { + expression + .used_variables() + .into_iter() + .all(|v| variable_types.get(v) == VariableType::UNDEF) +} + +fn is_expression_fit_for_for_loop_join( + expression: &Expression, + input_types: &VariableTypes, + entry_types: &VariableTypes, +) -> bool { + match expression { + Expression::NamedNode(_) | Expression::Literal(_) => true, + Expression::Variable(v) | Expression::Bound(v) => { + !input_types.get(v).undef || entry_types.get(v) == VariableType::UNDEF + } + Expression::Or(inner) + | Expression::And(inner) + | Expression::Coalesce(inner) + | Expression::FunctionCall(_, inner) => inner + .iter() + .all(|e| is_expression_fit_for_for_loop_join(e, input_types, entry_types)), + Expression::Equal(a, b) + | Expression::SameTerm(a, b) + | Expression::Greater(a, b) + | Expression::GreaterOrEqual(a, b) + | Expression::Less(a, b) + | Expression::LessOrEqual(a, b) + | Expression::Add(a, b) + | Expression::Subtract(a, b) + | Expression::Multiply(a, b) + | Expression::Divide(a, b) => { + is_expression_fit_for_for_loop_join(a, input_types, entry_types) + && is_expression_fit_for_for_loop_join(b, input_types, entry_types) + } + Expression::UnaryPlus(e) | Expression::UnaryMinus(e) | Expression::Not(e) => { + is_expression_fit_for_for_loop_join(e, input_types, entry_types) + } + Expression::If(a, b, c) => { + is_expression_fit_for_for_loop_join(a, input_types, entry_types) + && is_expression_fit_for_for_loop_join(b, input_types, entry_types) + && is_expression_fit_for_for_loop_join(c, input_types, entry_types) + } + Expression::Exists(inner) => is_fit_for_for_loop_join(inner, input_types, entry_types), + } +} + +fn has_common_variables( + left: &VariableTypes, + right: &VariableTypes, + input_types: &VariableTypes, +) -> bool { + // TODO: we should be smart and count as shared variables FILTER(?a = ?b) + left.iter().any(|(variable, left_type)| { + !left_type.undef && !right.get(variable).undef && input_types.get(variable).undef + }) +} + +fn join_key_variables( + left: &VariableTypes, + right: &VariableTypes, + input_types: &VariableTypes, +) -> Vec { + left.iter() + .filter(|(variable, left_type)| { + !left_type.undef && !right.get(variable).undef && input_types.get(variable).undef + }) + .map(|(variable, _)| variable.clone()) + .collect() +} + +fn estimate_graph_pattern_size(pattern: &GraphPattern, input_types: &VariableTypes) -> usize { + match pattern { + GraphPattern::Values { bindings, .. } => bindings.len(), + GraphPattern::QuadPattern { + subject, + predicate, + object, + .. + } => estimate_triple_pattern_size( + is_term_pattern_bound(subject, input_types), + is_named_node_pattern_bound(predicate, input_types), + is_term_pattern_bound(object, input_types), + ), + GraphPattern::Path { + subject, + path, + object, + .. + } => estimate_path_size( + is_term_pattern_bound(subject, input_types), + path, + is_term_pattern_bound(object, input_types), + ), + GraphPattern::Join { + left, + right, + algorithm, + } => estimate_join_cost(left, right, algorithm, input_types), + GraphPattern::LeftJoin { + left, + right, + algorithm, + .. + } => match algorithm { + LeftJoinAlgorithm::HashBuildRightProbeLeft { keys } => { + let left_size = estimate_graph_pattern_size(left, input_types); + max( + left_size, + left_size + .saturating_mul(estimate_graph_pattern_size( + right, + &infer_graph_pattern_types(right, input_types.clone()), + )) + .saturating_div(1_000_usize.saturating_pow(keys.len().try_into().unwrap())), + ) + } + }, + #[cfg(feature = "sep-0006")] + GraphPattern::Lateral { left, right } => estimate_lateral_cost( + left, + &infer_graph_pattern_types(left, input_types.clone()), + right, + input_types, + ), + GraphPattern::Union { inner } => inner + .iter() + .map(|inner| estimate_graph_pattern_size(inner, input_types)) + .fold(0, usize::saturating_add), + GraphPattern::Minus { left, .. } => estimate_graph_pattern_size(left, input_types), + GraphPattern::Filter { inner, .. } + | GraphPattern::Extend { inner, .. } + | GraphPattern::OrderBy { inner, .. } + | GraphPattern::Project { inner, .. } + | GraphPattern::Distinct { inner, .. } + | GraphPattern::Reduced { inner, .. } + | GraphPattern::Group { inner, .. } + | GraphPattern::Service { inner, .. } => estimate_graph_pattern_size(inner, input_types), + GraphPattern::Slice { + inner, + start, + length, + } => { + let inner = estimate_graph_pattern_size(inner, input_types); + if let Some(length) = length { + min(inner, *length - *start) + } else { + inner + } + } + } +} + +fn estimate_join_cost( + left: &GraphPattern, + right: &GraphPattern, + algorithm: &JoinAlgorithm, + input_types: &VariableTypes, +) -> usize { + match algorithm { + JoinAlgorithm::HashBuildLeftProbeRight { keys } => { + estimate_graph_pattern_size(left, input_types) + .saturating_mul(estimate_graph_pattern_size(right, input_types)) + .saturating_div(1_000_usize.saturating_pow(keys.len().try_into().unwrap())) + } + } +} +fn estimate_lateral_cost( + left: &GraphPattern, + left_types: &VariableTypes, + right: &GraphPattern, + input_types: &VariableTypes, +) -> usize { + estimate_graph_pattern_size(left, input_types) + .saturating_mul(estimate_graph_pattern_size(right, left_types)) +} + +fn estimate_triple_pattern_size( + subject_bound: bool, + predicate_bound: bool, + object_bound: bool, +) -> usize { + match (subject_bound, predicate_bound, object_bound) { + (true, true, true) => 1, + (true, true, false) => 10, + (true, false, true) => 2, + (false, true, true) => 10_000, + (true, false, false) => 100, + (false, false, false) => 1_000_000_000, + (false, true, false) => 1_000_000, + (false, false, true) => 100_000, + } +} + +fn estimate_path_size(start_bound: bool, path: &PropertyPathExpression, end_bound: bool) -> usize { + match path { + PropertyPathExpression::NamedNode(_) => { + estimate_triple_pattern_size(start_bound, true, end_bound) + } + PropertyPathExpression::Reverse(p) => estimate_path_size(end_bound, p, start_bound), + PropertyPathExpression::Sequence(a, b) => { + // We do a for loop join in the best direction + min( + estimate_path_size(start_bound, a, false) + .saturating_mul(estimate_path_size(true, b, end_bound)), + estimate_path_size(start_bound, a, true) + .saturating_mul(estimate_path_size(false, b, end_bound)), + ) + } + PropertyPathExpression::Alternative(a, b) => estimate_path_size(start_bound, a, end_bound) + .saturating_add(estimate_path_size(start_bound, b, end_bound)), + PropertyPathExpression::ZeroOrMore(p) => { + if start_bound && end_bound { + 1 + } else if start_bound || end_bound { + estimate_path_size(start_bound, p, end_bound).saturating_mul(1000) + } else { + 1_000_000_000 + } + } + PropertyPathExpression::OneOrMore(p) => { + if start_bound && end_bound { + 1 + } else { + estimate_path_size(start_bound, p, end_bound).saturating_mul(1000) + } + } + PropertyPathExpression::ZeroOrOne(p) => { + if start_bound && end_bound { + 1 + } else if start_bound || end_bound { + estimate_path_size(start_bound, p, end_bound) + } else { + 1_000_000_000 + } + } + PropertyPathExpression::NegatedPropertySet(_) => { + estimate_triple_pattern_size(start_bound, false, end_bound) + } + } +} + +fn is_term_pattern_bound(pattern: &GroundTermPattern, input_types: &VariableTypes) -> bool { + match pattern { + GroundTermPattern::NamedNode(_) | GroundTermPattern::Literal(_) => true, + GroundTermPattern::Variable(v) => !input_types.get(v).undef, + #[cfg(feature = "rdf-star")] + GroundTermPattern::Triple(t) => { + is_term_pattern_bound(&t.subject, input_types) + && is_named_node_pattern_bound(&t.predicate, input_types) + && is_term_pattern_bound(&t.object, input_types) + } + } +} + +fn is_named_node_pattern_bound(pattern: &NamedNodePattern, input_types: &VariableTypes) -> bool { + match pattern { + NamedNodePattern::NamedNode(_) => true, + NamedNodePattern::Variable(v) => !input_types.get(v).undef, + } +} diff --git a/ng-oxigraph/src/sparopt/type_inference.rs b/ng-oxigraph/src/sparopt/type_inference.rs new file mode 100644 index 0000000..7af0c68 --- /dev/null +++ b/ng-oxigraph/src/sparopt/type_inference.rs @@ -0,0 +1,462 @@ +use crate::oxrdf::Variable; +use crate::spargebra::algebra::Function; +use crate::spargebra::term::{GroundTerm, GroundTermPattern, NamedNodePattern}; +use crate::sparopt::algebra::{Expression, GraphPattern}; +use std::collections::HashMap; +use std::ops::{BitAnd, BitOr}; + +pub fn infer_graph_pattern_types( + pattern: &GraphPattern, + mut types: VariableTypes, +) -> VariableTypes { + match pattern { + GraphPattern::QuadPattern { + subject, + predicate, + object, + graph_name, + } => { + add_ground_term_pattern_types(subject, &mut types, false); + if let NamedNodePattern::Variable(v) = predicate { + types.intersect_variable_with(v.clone(), VariableType::NAMED_NODE) + } + add_ground_term_pattern_types(object, &mut types, true); + if let Some(NamedNodePattern::Variable(v)) = graph_name { + types.intersect_variable_with(v.clone(), VariableType::NAMED_NODE) + } + types + } + GraphPattern::Path { + subject, + object, + graph_name, + .. + } => { + add_ground_term_pattern_types(subject, &mut types, false); + add_ground_term_pattern_types(object, &mut types, true); + if let Some(NamedNodePattern::Variable(v)) = graph_name { + types.intersect_variable_with(v.clone(), VariableType::NAMED_NODE) + } + types + } + GraphPattern::Join { left, right, .. } => { + let mut output_types = infer_graph_pattern_types(left, types.clone()); + output_types.intersect_with(infer_graph_pattern_types(right, types)); + output_types + } + #[cfg(feature = "sep-0006")] + GraphPattern::Lateral { left, right } => { + infer_graph_pattern_types(right, infer_graph_pattern_types(left, types)) + } + GraphPattern::LeftJoin { left, right, .. } => { + let mut right_types = infer_graph_pattern_types(right, types.clone()); // TODO: expression + for t in right_types.inner.values_mut() { + t.undef = true; // Right might be unset + } + let mut output_types = infer_graph_pattern_types(left, types); + output_types.intersect_with(right_types); + output_types + } + GraphPattern::Minus { left, .. } => infer_graph_pattern_types(left, types), + GraphPattern::Union { inner } => inner + .iter() + .map(|inner| infer_graph_pattern_types(inner, types.clone())) + .reduce(|mut a, b| { + a.union_with(b); + a + }) + .unwrap_or_default(), + GraphPattern::Extend { + inner, + variable, + expression, + } => { + let mut types = infer_graph_pattern_types(inner, types); + types.intersect_variable_with( + variable.clone(), + infer_expression_type(expression, &types), + ); + types + } + GraphPattern::Filter { inner, .. } => infer_graph_pattern_types(inner, types), + GraphPattern::Project { inner, variables } => VariableTypes { + inner: infer_graph_pattern_types(inner, types) + .inner + .into_iter() + .filter(|(v, _)| variables.contains(v)) + .collect(), + }, + GraphPattern::Distinct { inner } + | GraphPattern::Reduced { inner } + | GraphPattern::OrderBy { inner, .. } + | GraphPattern::Slice { inner, .. } => infer_graph_pattern_types(inner, types), + GraphPattern::Group { + inner, + variables, + aggregates, + } => { + let types = infer_graph_pattern_types(inner, types); + VariableTypes { + inner: infer_graph_pattern_types(inner, types) + .inner + .into_iter() + .filter(|(v, _)| variables.contains(v)) + .chain( + aggregates + .iter() + .map(|(v, _)| (v.clone(), VariableType::ANY)), + ) //TODO: guess from aggregate + .collect(), + } + } + GraphPattern::Values { + variables, + bindings, + } => { + for (i, v) in variables.iter().enumerate() { + let mut t = VariableType::default(); + for binding in bindings { + match binding[i] { + Some(GroundTerm::NamedNode(_)) => t.named_node = true, + Some(GroundTerm::Literal(_)) => t.literal = true, + #[cfg(feature = "rdf-star")] + Some(GroundTerm::Triple(_)) => t.triple = true, + None => t.undef = true, + } + } + types.intersect_variable_with(v.clone(), t) + } + types + } + GraphPattern::Service { + name, + inner, + silent, + } => { + let parent_types = types.clone(); + let mut types = infer_graph_pattern_types(inner, types); + if let NamedNodePattern::Variable(v) = name { + types.intersect_variable_with(v.clone(), VariableType::NAMED_NODE) + } + if *silent { + // On failure, single empty solution + types.union_with(parent_types); + } + types + } + } +} + +fn add_ground_term_pattern_types( + pattern: &GroundTermPattern, + types: &mut VariableTypes, + is_object: bool, +) { + if let GroundTermPattern::Variable(v) = pattern { + types.intersect_variable_with( + v.clone(), + if is_object { + VariableType::TERM + } else { + VariableType::SUBJECT + }, + ) + } + #[cfg(feature = "rdf-star")] + if let GroundTermPattern::Triple(t) = pattern { + add_ground_term_pattern_types(&t.subject, types, false); + if let NamedNodePattern::Variable(v) = &t.predicate { + types.intersect_variable_with(v.clone(), VariableType::NAMED_NODE) + } + add_ground_term_pattern_types(&t.object, types, true); + } +} + +pub fn infer_expression_type(expression: &Expression, types: &VariableTypes) -> VariableType { + match expression { + Expression::NamedNode(_) => VariableType::NAMED_NODE, + Expression::Literal(_) | Expression::Exists(_) | Expression::Bound(_) => { + VariableType::LITERAL + } + Expression::Variable(v) => types.get(v), + Expression::FunctionCall(Function::Datatype | Function::Iri, _) => { + VariableType::NAMED_NODE | VariableType::UNDEF + } + #[cfg(feature = "rdf-star")] + Expression::FunctionCall(Function::Predicate, _) => { + VariableType::NAMED_NODE | VariableType::UNDEF + } + Expression::FunctionCall(Function::BNode, args) => { + if args.is_empty() { + VariableType::BLANK_NODE + } else { + VariableType::BLANK_NODE | VariableType::UNDEF + } + } + Expression::FunctionCall( + Function::Rand | Function::Now | Function::Uuid | Function::StrUuid, + _, + ) => VariableType::LITERAL, + Expression::Or(_) + | Expression::And(_) + | Expression::Equal(_, _) + | Expression::Greater(_, _) + | Expression::GreaterOrEqual(_, _) + | Expression::Less(_, _) + | Expression::LessOrEqual(_, _) + | Expression::Add(_, _) + | Expression::Subtract(_, _) + | Expression::Multiply(_, _) + | Expression::Divide(_, _) + | Expression::UnaryPlus(_) + | Expression::UnaryMinus(_) + | Expression::Not(_) + | Expression::FunctionCall( + Function::Str + | Function::Lang + | Function::LangMatches + | Function::Abs + | Function::Ceil + | Function::Floor + | Function::Round + | Function::Concat + | Function::SubStr + | Function::StrLen + | Function::Replace + | Function::UCase + | Function::LCase + | Function::EncodeForUri + | Function::Contains + | Function::StrStarts + | Function::StrEnds + | Function::StrBefore + | Function::StrAfter + | Function::Year + | Function::Month + | Function::Day + | Function::Hours + | Function::Minutes + | Function::Seconds + | Function::Timezone + | Function::Tz + | Function::Md5 + | Function::Sha1 + | Function::Sha256 + | Function::Sha384 + | Function::Sha512 + | Function::StrLang + | Function::StrDt + | Function::IsIri + | Function::IsBlank + | Function::IsLiteral + | Function::IsNumeric + | Function::Regex, + _, + ) => VariableType::LITERAL | VariableType::UNDEF, + #[cfg(feature = "sep-0002")] + Expression::FunctionCall(Function::Adjust, _) => { + VariableType::LITERAL | VariableType::UNDEF + } + #[cfg(feature = "rdf-star")] + Expression::FunctionCall(Function::IsTriple, _) => { + VariableType::LITERAL | VariableType::UNDEF + } + Expression::SameTerm(left, right) => { + if infer_expression_type(left, types).undef || infer_expression_type(right, types).undef + { + VariableType::LITERAL | VariableType::UNDEF + } else { + VariableType::LITERAL + } + } + Expression::If(_, then, els) => { + infer_expression_type(then, types) | infer_expression_type(els, types) + } + Expression::Coalesce(inner) => { + let mut t = VariableType::UNDEF; + for e in inner { + let new = infer_expression_type(e, types); + t = t | new; + if !new.undef { + t.undef = false; + return t; + } + } + t + } + #[cfg(feature = "rdf-star")] + Expression::FunctionCall(Function::Triple, _) => VariableType::TRIPLE | VariableType::UNDEF, + #[cfg(feature = "rdf-star")] + Expression::FunctionCall(Function::Subject, _) => { + VariableType::SUBJECT | VariableType::UNDEF + } + #[cfg(feature = "rdf-star")] + Expression::FunctionCall(Function::Object, _) => VariableType::TERM | VariableType::UNDEF, + Expression::FunctionCall(Function::Custom(_), _) => VariableType::ANY, + } +} + +#[derive(Default, Clone, Debug)] +pub struct VariableTypes { + inner: HashMap, +} + +impl VariableTypes { + pub fn get(&self, variable: &Variable) -> VariableType { + self.inner + .get(variable) + .copied() + .unwrap_or(VariableType::UNDEF) + } + + pub fn iter(&self) -> impl Iterator { + self.inner.iter() + } + + pub fn intersect_with(&mut self, other: Self) { + for (v, t) in other.inner { + self.intersect_variable_with(v, t); + } + } + + pub fn union_with(&mut self, other: Self) { + for (v, t) in &mut self.inner { + if other.get(v).undef { + t.undef = true; // Might be undefined + } + } + for (v, mut t) in other.inner { + self.inner + .entry(v) + .and_modify(|ex| *ex = *ex | t) + .or_insert({ + t.undef = true; + t + }); + } + } + + fn intersect_variable_with(&mut self, variable: Variable, t: VariableType) { + let t = self.get(&variable) & t; + if t != VariableType::UNDEF { + self.inner.insert(variable, t); + } + } +} + +#[allow(clippy::struct_excessive_bools)] +#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)] +pub struct VariableType { + pub undef: bool, + pub named_node: bool, + pub blank_node: bool, + pub literal: bool, + #[cfg(feature = "rdf-star")] + pub triple: bool, +} + +impl VariableType { + const ANY: Self = Self { + undef: true, + named_node: true, + blank_node: true, + literal: true, + #[cfg(feature = "rdf-star")] + triple: true, + }; + const BLANK_NODE: Self = Self { + undef: false, + named_node: false, + blank_node: true, + literal: false, + #[cfg(feature = "rdf-star")] + triple: false, + }; + const LITERAL: Self = Self { + undef: false, + named_node: false, + blank_node: false, + literal: true, + #[cfg(feature = "rdf-star")] + triple: false, + }; + const NAMED_NODE: Self = Self { + undef: false, + named_node: true, + blank_node: false, + literal: false, + #[cfg(feature = "rdf-star")] + triple: false, + }; + const SUBJECT: Self = Self { + undef: false, + named_node: true, + blank_node: true, + literal: false, + #[cfg(feature = "rdf-star")] + triple: true, + }; + const TERM: Self = Self { + undef: false, + named_node: true, + blank_node: true, + literal: true, + #[cfg(feature = "rdf-star")] + triple: true, + }; + #[cfg(feature = "rdf-star")] + const TRIPLE: Self = Self { + undef: false, + named_node: false, + blank_node: false, + literal: false, + triple: true, + }; + pub const UNDEF: Self = Self { + undef: true, + named_node: false, + blank_node: false, + literal: false, + #[cfg(feature = "rdf-star")] + triple: false, + }; +} + +impl BitOr for VariableType { + type Output = Self; + + fn bitor(self, other: Self) -> Self { + Self { + undef: self.undef || other.undef, + named_node: self.named_node || other.named_node, + blank_node: self.blank_node || other.blank_node, + literal: self.literal || other.literal, + #[cfg(feature = "rdf-star")] + triple: self.triple || other.triple, + } + } +} + +impl BitAnd for VariableType { + type Output = Self; + + #[allow(clippy::nonminimal_bool)] + fn bitand(self, other: Self) -> Self { + Self { + undef: self.undef && other.undef, + named_node: self.named_node && other.named_node + || (self.undef && other.named_node) + || (self.named_node && other.undef), + blank_node: self.blank_node && other.blank_node + || (self.undef && other.blank_node) + || (self.blank_node && other.undef), + literal: self.literal && other.literal + || (self.undef && other.literal) + || (self.literal && other.undef), + #[cfg(feature = "rdf-star")] + triple: self.triple && other.triple + || (self.undef && other.triple) + || (self.triple && other.undef), + } + } +} diff --git a/ng-oxigraph/tests/rocksdb_bc_data/000003.log b/ng-oxigraph/tests/rocksdb_bc_data/000003.log new file mode 100644 index 0000000000000000000000000000000000000000..fac33cee6c6d85fed4876cab01c04c5d486d3f1e GIT binary patch literal 8399 zcmds5e@xV67=Pd6zQUnyG)iMOyaA&c!W|rfM^L#t7n(4QGq9}u3J09rFYgCGMul4@ znfix?uB}{dA!M-Crj}W@HY{puxfSK6E~Q1LTMbMJs8;WN`GV|uUV87iH`69k-^Z^)RfcYXmHfX0XfIxt25Z$0Y{+4u+eN}RweDh zpSE`IyK?Pi=kupuPaQMNv@$8{bIoS6!I)>r%ggrF)Mooz+yS{c+wD&ew0P|dqUKop zk@ZETy>BI5>i_8EK=Kk5!e3o)cTKvqFo#ltiNY8ubWb4|k#r{UDg;cVL=(b7s}L}8 zChtjJP3aWXQZ&?_`u&@SkL!+(C2Z&|{eQD&O zp|ck^zxF{x{NDSzOE#t8WUlrE-2q<<%ly)=i~`rs@8yoa@>|CKEz6%tpIJP3B_}sO zk6Q+yR|=7pG9X;VW7veS&?*Xn$%7N*)IWr#sEkHc2EO*XKOGpzEni$PdbRKLj`Djj zqh60U=#&Gj#!CrVI6R1;SHeo&wUe=uMQ8mL0WrO0P1~Yh5fCx&E*#uRX{zFmkf@UQ zmag>gM?OE+QDCa*@3@#hxD2!456HfNRStyiltWz~|8eNt$pyO}Sa;%P`^1}>GnF-Y zE#(=FxrVB;^%crBlU}#kF8jE(6uQ0}f@hnIMs<&&hgM1Nh^-Kzq4XB|LRJ(yCcg8_ z+BzxCP(zGoPsR|uO2fPEGo2asbz>KLhkMEoAH4a=p1bfd1bt2hUv*>es{PNdYPztn zqdKMYsi&2Nezl$OhAy9+TAzU}+>-tNj`5-whLSDysXxSzSKcc;DcRk!>9Mukqk`e3 zKy(@?D0EDE?mx0`DWzHJQAJfZI;7IUCk8ied-3}-FCAUfdX`%^ zSk5>to*H(qahfZLkW{#m<%}9zNJUCj6|O@2W^9=d7FtCiFsT}!XztR`sa#8pM#!*n zoX5VECXFl|zT87xy01ph#d*3{Y%|T&_{@7Q)Mpziy!L8`>?~TN^x3SS$x5%Sv0{qg zE^x~VT@xwJirQ9e2CDG?ve^j3^_2?Nn9yJKStPOXY7M&pLt%aKs9F0{q}u^a}%@h=slml14;)kKax zbUS;5toC91*_R`#$nM7B4>fUgE1CR@>Pm<6hIyLI#7cOa%O37PJaoZ=V!dtTFyRQ* zCXNQdz(|l*!@TSvjxZGdWgjsNN+s@PFN_Akpd%vhUeBt`jHBDtL|Dk4|A*PwJSGdA zS&7G9++>Ly2B{q5YHi|Z5DbiDuE_#%grV@0C1Mz)a&EE+qd_ouI=RBUrp}?EK{Z*T z8fn1c9=Ge5o=(O}&zvq727(8ZSZA!u6`LWUVH~X$dqZZ>6HlLlorU29BhyJnmg0i^be*&1`v_~{ z1ZiOgYAMJsP7h)ajU%ju8>EE=s3pI+AbqFox9xZ~(QW03G)0=H7dRwTObWaDub|9d?R+ RYCT~s;vg+tAT2>v)BLt literal 0 HcmV?d00001 diff --git a/ng-oxigraph/tests/rocksdb_bc_data/OPTIONS-000026 b/ng-oxigraph/tests/rocksdb_bc_data/OPTIONS-000026 new file mode 100644 index 0000000..518dd62 --- /dev/null +++ b/ng-oxigraph/tests/rocksdb_bc_data/OPTIONS-000026 @@ -0,0 +1,964 @@ +# This is a RocksDB option file. +# +# For detailed file format spec, please refer to the example file +# in examples/rocksdb_option_file_example.ini +# + +[Version] + rocksdb_version=6.7.3 + options_file_version=1.1 + +[DBOptions] + write_dbid_to_manifest=false + avoid_unnecessary_blocking_io=false + two_write_queues=false + allow_ingest_behind=false + writable_file_max_buffer_size=1048576 + avoid_flush_during_shutdown=false + avoid_flush_during_recovery=false + info_log_level=INFO_LEVEL + access_hint_on_compaction_start=NORMAL + allow_concurrent_memtable_write=true + enable_pipelined_write=false + stats_dump_period_sec=600 + stats_persist_period_sec=600 + strict_bytes_per_sync=false + WAL_ttl_seconds=0 + WAL_size_limit_MB=0 + max_subcompactions=1 + dump_malloc_stats=false + db_log_dir= + wal_recovery_mode=kPointInTimeRecovery + log_file_time_to_roll=0 + enable_write_thread_adaptive_yield=true + recycle_log_file_num=0 + table_cache_numshardbits=6 + atomic_flush=false + preserve_deletes=false + stats_history_buffer_size=1048576 + max_open_files=-1 + max_file_opening_threads=16 + delete_obsolete_files_period_micros=21600000000 + max_background_flushes=-1 + write_thread_slow_yield_usec=3 + base_background_compactions=-1 + manual_wal_flush=false + wal_dir=tests/rockdb_bc_data + max_background_compactions=-1 + bytes_per_sync=0 + max_background_jobs=2 + use_fsync=false + unordered_write=false + fail_if_options_file_error=false + random_access_max_buffer_size=1048576 + compaction_readahead_size=0 + wal_bytes_per_sync=0 + new_table_reader_for_compaction_inputs=false + skip_stats_update_on_db_open=false + persist_stats_to_disk=false + skip_log_error_on_recovery=false + log_readahead_size=0 + is_fd_close_on_exec=true + use_adaptive_mutex=false + error_if_exists=false + write_thread_max_yield_usec=100 + enable_thread_tracking=false + db_write_buffer_size=0 + create_missing_column_families=true + paranoid_checks=true + create_if_missing=true + max_manifest_file_size=1073741824 + allow_2pc=false + max_total_wal_size=0 + use_direct_io_for_flush_and_compaction=false + manifest_preallocation_size=4194304 + use_direct_reads=false + delayed_write_rate=16777216 + allow_fallocate=true + max_write_batch_group_size_bytes=1048576 + keep_log_file_num=1000 + allow_mmap_reads=false + max_log_file_size=0 + allow_mmap_writes=false + advise_random_on_open=true + + +[CFOptions "default"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "default"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + + +[CFOptions "id2str"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "id2str"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + + +[CFOptions "spog"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "spog"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + + +[CFOptions "posg"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "posg"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + + +[CFOptions "ospg"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "ospg"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + + +[CFOptions "gspo"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "gspo"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + + +[CFOptions "gpos"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "gpos"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + + +[CFOptions "gosp"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "gosp"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + + +[CFOptions "dspo"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "dspo"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + + +[CFOptions "dpos"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "dpos"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + + +[CFOptions "dosp"] + sample_for_compression=0 + compaction_pri=kMinOverlappingRatio + merge_operator=nullptr + compaction_filter_factory=nullptr + memtable_factory=SkipListFactory + memtable_insert_with_hint_prefix_extractor=nullptr + comparator=leveldb.BytewiseComparator + target_file_size_base=67108864 + max_sequential_skip_in_iterations=8 + compaction_style=kCompactionStyleLevel + max_bytes_for_level_base=268435456 + bloom_locality=0 + write_buffer_size=67108864 + compression_per_level= + memtable_huge_page_size=0 + max_successive_merges=0 + arena_block_size=8388608 + memtable_whole_key_filtering=false + target_file_size_multiplier=1 + max_bytes_for_level_multiplier_additional=1:1:1:1:1:1:1 + num_levels=7 + min_write_buffer_number_to_merge=1 + max_write_buffer_number_to_maintain=0 + max_write_buffer_number=2 + compression=kSnappyCompression + level0_stop_writes_trigger=36 + level0_slowdown_writes_trigger=20 + compaction_filter=nullptr + level0_file_num_compaction_trigger=4 + max_compaction_bytes=1677721600 + compaction_options_universal={stop_style=kCompactionStopStyleTotalSize;compression_size_percent=-1;allow_trivial_move=false;max_merge_width=4294967295;max_size_amplification_percent=200;min_merge_width=2;size_ratio=1;} + memtable_prefix_bloom_size_ratio=0.000000 + max_write_buffer_size_to_maintain=0 + hard_pending_compaction_bytes_limit=274877906944 + ttl=2592000 + table_factory=BlockBasedTable + soft_pending_compaction_bytes_limit=68719476736 + prefix_extractor=nullptr + bottommost_compression=kDisableCompressionOption + force_consistency_checks=false + paranoid_file_checks=false + compaction_options_fifo={allow_compaction=false;max_table_files_size=1073741824;} + max_bytes_for_level_multiplier=10.000000 + optimize_filters_for_hits=false + level_compaction_dynamic_level_bytes=false + inplace_update_num_locks=10000 + inplace_update_support=false + periodic_compaction_seconds=0 + disable_auto_compactions=false + report_bg_io_stats=false + +[TableOptions/BlockBasedTable "dosp"] + pin_top_level_index_and_filter=true + enable_index_compression=true + read_amp_bytes_per_bit=8589934592 + format_version=2 + block_align=false + metadata_block_size=4096 + block_size_deviation=10 + partition_filters=false + block_size=4096 + index_block_restart_interval=1 + no_block_cache=false + checksum=kCRC32c + whole_key_filtering=true + index_shortening=kShortenSeparators + data_block_index_type=kDataBlockBinarySearch + index_type=kBinarySearch + verify_compression=false + filter_policy=nullptr + data_block_hash_table_util_ratio=0.750000 + pin_l0_filter_and_index_blocks_in_cache=false + block_restart_interval=16 + cache_index_and_filter_blocks_with_high_priority=true + cache_index_and_filter_blocks=false + hash_index_allow_collision=true + flush_block_policy_factory=FlushBlockBySizePolicyFactory + diff --git a/ng-oxigraph/tests/store.rs b/ng-oxigraph/tests/store.rs new file mode 100644 index 0000000..6e8b1cb --- /dev/null +++ b/ng-oxigraph/tests/store.rs @@ -0,0 +1,542 @@ +#![cfg(test)] +#![allow(clippy::panic_in_result_fn)] + +use ng_oxigraph::oxigraph::io::RdfFormat; +use ng_oxigraph::oxigraph::model::vocab::{rdf, xsd}; +use ng_oxigraph::oxigraph::model::*; +use ng_oxigraph::oxigraph::store::Store; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use rand::random; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::env::temp_dir; +use std::error::Error; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::fs::{create_dir_all, remove_dir_all, File}; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::io::Write; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::iter::empty; +#[cfg(all(target_os = "linux"))] +use std::iter::once; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::path::{Path, PathBuf}; +#[cfg(all(target_os = "linux"))] +use std::process::Command; + +#[allow(clippy::non_ascii_literal)] +const DATA: &str = r#" +@prefix schema: . +@prefix wd: . +@prefix xsd: . + +wd:Q90 a schema:City ; + schema:name "Paris"@fr , "la ville lumière"@fr ; + schema:country wd:Q142 ; + schema:population 2000000 ; + schema:startDate "-300"^^xsd:gYear ; + schema:url "https://www.paris.fr/"^^xsd:anyURI ; + schema:postalCode "75001" . +"#; + +#[allow(clippy::non_ascii_literal)] +const GRAPH_DATA: &str = r#" +@prefix schema: . +@prefix wd: . +@prefix xsd: . + +GRAPH { + wd:Q90 a schema:City ; + schema:name "Paris"@fr , "la ville lumière"@fr ; + schema:country wd:Q142 ; + schema:population 2000000 ; + schema:startDate "-300"^^xsd:gYear ; + schema:url "https://www.paris.fr/"^^xsd:anyURI ; + schema:postalCode "75001" . +} +"#; +const NUMBER_OF_TRIPLES: usize = 8; + +fn quads(graph_name: impl Into>) -> Vec> { + let graph_name = graph_name.into(); + let paris = NamedNodeRef::new_unchecked("http://www.wikidata.org/entity/Q90"); + let france = NamedNodeRef::new_unchecked("http://www.wikidata.org/entity/Q142"); + let city = NamedNodeRef::new_unchecked("http://schema.org/City"); + let name = NamedNodeRef::new_unchecked("http://schema.org/name"); + let country = NamedNodeRef::new_unchecked("http://schema.org/country"); + let population = NamedNodeRef::new_unchecked("http://schema.org/population"); + let start_date = NamedNodeRef::new_unchecked("http://schema.org/startDate"); + let url = NamedNodeRef::new_unchecked("http://schema.org/url"); + let postal_code = NamedNodeRef::new_unchecked("http://schema.org/postalCode"); + vec![ + QuadRef::new(paris, rdf::TYPE, city, graph_name), + QuadRef::new( + paris, + name, + LiteralRef::new_language_tagged_literal_unchecked("Paris", "fr"), + graph_name, + ), + QuadRef::new( + paris, + name, + LiteralRef::new_language_tagged_literal_unchecked("la ville lumi\u{E8}re", "fr"), + graph_name, + ), + QuadRef::new(paris, country, france, graph_name), + QuadRef::new( + paris, + population, + LiteralRef::new_typed_literal("2000000", xsd::INTEGER), + graph_name, + ), + QuadRef::new( + paris, + start_date, + LiteralRef::new_typed_literal("-300", xsd::G_YEAR), + graph_name, + ), + QuadRef::new( + paris, + url, + LiteralRef::new_typed_literal("https://www.paris.fr/", xsd::ANY_URI), + graph_name, + ), + QuadRef::new( + paris, + postal_code, + LiteralRef::new_simple_literal("75001"), + graph_name, + ), + ] +} + +#[test] +fn test_load_graph() -> Result<(), Box> { + let store = Store::new()?; + store.load_from_read(RdfFormat::Turtle, DATA.as_bytes())?; + for q in quads(GraphNameRef::DefaultGraph) { + assert!(store.contains(q)?); + } + store.validate()?; + Ok(()) +} + +#[test] +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn test_bulk_load_graph() -> Result<(), Box> { + let store = Store::new()?; + store + .bulk_loader() + .load_from_read(RdfFormat::Turtle, DATA.as_bytes())?; + for q in quads(GraphNameRef::DefaultGraph) { + assert!(store.contains(q)?); + } + store.validate()?; + Ok(()) +} + +#[test] +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn test_bulk_load_graph_lenient() -> Result<(), Box> { + let store = Store::new()?; + store.bulk_loader().on_parse_error(|_| Ok(())).load_from_read( + RdfFormat::NTriples, + b" .\n .".as_slice(), + )?; + assert_eq!(store.len()?, 1); + assert!(store.contains(QuadRef::new( + NamedNodeRef::new_unchecked("http://example.com"), + NamedNodeRef::new_unchecked("http://example.com"), + NamedNodeRef::new_unchecked("http://example.com"), + GraphNameRef::DefaultGraph + ))?); + store.validate()?; + Ok(()) +} + +#[test] +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn test_bulk_load_empty() -> Result<(), Box> { + let store = Store::new()?; + store.bulk_loader().load_quads(empty::())?; + assert!(store.is_empty()?); + store.validate()?; + Ok(()) +} + +#[test] +fn test_load_dataset() -> Result<(), Box> { + let store = Store::new()?; + store.load_from_read(RdfFormat::TriG, GRAPH_DATA.as_bytes())?; + for q in quads(NamedNodeRef::new_unchecked( + "http://www.wikidata.org/wiki/Special:EntityData/Q90", + )) { + assert!(store.contains(q)?); + } + store.validate()?; + Ok(()) +} + +#[test] +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn test_bulk_load_dataset() -> Result<(), Box> { + let store = Store::new()?; + store + .bulk_loader() + .load_from_read(RdfFormat::TriG, GRAPH_DATA.as_bytes())?; + let graph_name = + NamedNodeRef::new_unchecked("http://www.wikidata.org/wiki/Special:EntityData/Q90"); + for q in quads(graph_name) { + assert!(store.contains(q)?); + } + assert!(store.contains_named_graph(graph_name)?); + store.validate()?; + Ok(()) +} + +#[test] +fn test_load_graph_generates_new_blank_nodes() -> Result<(), Box> { + let store = Store::new()?; + for _ in 0..2 { + store.load_from_read( + RdfFormat::NTriples, + "_:a .".as_bytes(), + )?; + } + assert_eq!(store.len()?, 2); + Ok(()) +} + +#[test] +fn test_dump_graph() -> Result<(), Box> { + let store = Store::new()?; + for q in quads(GraphNameRef::DefaultGraph) { + store.insert(q)?; + } + + let mut buffer = Vec::new(); + store.dump_graph_to_write(GraphNameRef::DefaultGraph, RdfFormat::NTriples, &mut buffer)?; + assert_eq!( + buffer.into_iter().filter(|c| *c == b'\n').count(), + NUMBER_OF_TRIPLES + ); + Ok(()) +} + +#[test] +fn test_dump_dataset() -> Result<(), Box> { + let store = Store::new()?; + for q in quads(GraphNameRef::DefaultGraph) { + store.insert(q)?; + } + + let buffer = store.dump_to_write(RdfFormat::NQuads, Vec::new())?; + assert_eq!( + buffer.into_iter().filter(|c| *c == b'\n').count(), + NUMBER_OF_TRIPLES + ); + Ok(()) +} + +#[test] +fn test_snapshot_isolation_iterator() -> Result<(), Box> { + let quad = QuadRef::new( + NamedNodeRef::new("http://example.com/s")?, + NamedNodeRef::new("http://example.com/p")?, + NamedNodeRef::new("http://example.com/o")?, + NamedNodeRef::new("http://www.wikidata.org/wiki/Special:EntityData/Q90")?, + ); + let store = Store::new()?; + store.insert(quad)?; + let iter = store.iter(); + store.remove(quad)?; + assert_eq!( + iter.collect::, _>>()?, + vec![quad.into_owned()] + ); + store.validate()?; + Ok(()) +} + +#[test] +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn test_bulk_load_on_existing_delete_overrides_the_delete() -> Result<(), Box> { + let quad = QuadRef::new( + NamedNodeRef::new_unchecked("http://example.com/s"), + NamedNodeRef::new_unchecked("http://example.com/p"), + NamedNodeRef::new_unchecked("http://example.com/o"), + NamedNodeRef::new_unchecked("http://www.wikidata.org/wiki/Special:EntityData/Q90"), + ); + let store = Store::new()?; + store.remove(quad)?; + store.bulk_loader().load_quads([quad.into_owned()])?; + assert_eq!(store.len()?, 1); + Ok(()) +} + +#[test] +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn test_open_bad_dir() -> Result<(), Box> { + let dir = TempDir::default(); + create_dir_all(&dir.0)?; + { + File::create(dir.0.join("CURRENT"))?.write_all(b"foo")?; + } + assert!(Store::open(&dir.0).is_err()); + Ok(()) +} + +#[test] +#[cfg(all(target_os = "linux"))] +fn test_bad_stt_open() -> Result<(), Box> { + let dir = TempDir::default(); + let store = Store::open(&dir.0)?; + remove_dir_all(&dir.0)?; + store + .bulk_loader() + .load_quads(once(Quad::new( + NamedNode::new_unchecked("http://example.com/s"), + NamedNode::new_unchecked("http://example.com/p"), + NamedNode::new_unchecked("http://example.com/o"), + GraphName::DefaultGraph, + ))) + .unwrap_err(); + Ok(()) +} + +// #[test] +// #[cfg(all(not(target_family = "wasm"),not(docsrs)))] +// fn test_backup() -> Result<(), Box> { +// let quad = QuadRef::new( +// NamedNodeRef::new_unchecked("http://example.com/s"), +// NamedNodeRef::new_unchecked("http://example.com/p"), +// NamedNodeRef::new_unchecked("http://example.com/o"), +// GraphNameRef::DefaultGraph, +// ); +// let store_dir = TempDir::default(); +// let backup_from_rw_dir = TempDir::default(); +// let backup_from_ro_dir = TempDir::default(); +// let backup_from_secondary_dir = TempDir::default(); + +// let store = Store::open(&store_dir)?; +// store.insert(quad)?; +// let secondary_store = Store::open_secondary(&store_dir)?; +// store.flush()?; + +// store.backup(&backup_from_rw_dir)?; +// secondary_store.backup(&backup_from_secondary_dir)?; +// store.remove(quad)?; +// assert!(!store.contains(quad)?); + +// let backup_from_rw = Store::open_read_only(&backup_from_rw_dir.0)?; +// backup_from_rw.validate()?; +// assert!(backup_from_rw.contains(quad)?); +// backup_from_rw.backup(&backup_from_ro_dir)?; + +// let backup_from_ro = Store::open_read_only(&backup_from_ro_dir.0)?; +// backup_from_ro.validate()?; +// assert!(backup_from_ro.contains(quad)?); + +// let backup_from_secondary = Store::open_read_only(&backup_from_secondary_dir.0)?; +// backup_from_secondary.validate()?; +// assert!(backup_from_secondary.contains(quad)?); + +// Ok(()) +// } + +#[test] +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn test_bad_backup() -> Result<(), Box> { + let store_dir = TempDir::default(); + let backup_dir = TempDir::default(); + + create_dir_all(&backup_dir.0)?; + Store::open(&store_dir)?.backup(&backup_dir.0).unwrap_err(); + Ok(()) +} + +#[test] +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn test_backup_on_in_memory() -> Result<(), Box> { + let backup_dir = TempDir::default(); + Store::new()?.backup(&backup_dir).unwrap_err(); + Ok(()) +} + +#[test] +#[cfg(all(target_os = "linux"))] +fn test_backward_compatibility() -> Result<(), Box> { + // We run twice to check if data is properly saved and closed + for _ in 0..2 { + let store = Store::open("tests/rocksdb_bc_data")?; + for q in quads(GraphNameRef::DefaultGraph) { + assert!(store.contains(q)?); + } + let graph_name = + NamedNodeRef::new_unchecked("http://www.wikidata.org/wiki/Special:EntityData/Q90"); + for q in quads(graph_name) { + assert!(store.contains(q)?); + } + assert!(store.contains_named_graph(graph_name)?); + assert_eq!( + vec![NamedOrBlankNode::from(graph_name)], + store.named_graphs().collect::, _>>()? + ); + } + reset_dir("tests/rocksdb_bc_data")?; + Ok(()) +} + +// #[test] +// #[cfg(all(not(target_family = "wasm"),not(docsrs)))] +// fn test_secondary() -> Result<(), Box> { +// let quad = QuadRef::new( +// NamedNodeRef::new_unchecked("http://example.com/s"), +// NamedNodeRef::new_unchecked("http://example.com/p"), +// NamedNodeRef::new_unchecked("http://example.com/o"), +// GraphNameRef::DefaultGraph, +// ); +// let primary_dir = TempDir::default(); + +// // We open the store +// let primary = Store::open(&primary_dir)?; +// let secondary = Store::open_secondary(&primary_dir)?; + +// // We insert a quad +// primary.insert(quad)?; +// primary.flush()?; + +// // It is readable from both stores +// for store in &[&primary, &secondary] { +// assert!(store.contains(quad)?); +// assert_eq!( +// store.iter().collect::, _>>()?, +// vec![quad.into_owned()] +// ); +// } + +// // We validate the states +// primary.validate()?; +// secondary.validate()?; + +// // We close the primary store and remove its content +// drop(primary); +// remove_dir_all(&primary_dir)?; + +// // We secondary store is still readable +// assert!(secondary.contains(quad)?); +// secondary.validate()?; + +// Ok(()) +// } + +// #[test] +// #[cfg(all(not(target_family = "wasm"),not(docsrs)))] +// fn test_open_secondary_bad_dir() -> Result<(), Box> { +// let primary_dir = TempDir::default(); +// create_dir_all(&primary_dir.0)?; +// { +// File::create(primary_dir.0.join("CURRENT"))?.write_all(b"foo")?; +// } +// assert!(Store::open_secondary(&primary_dir).is_err()); +// Ok(()) +// } + +#[test] +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn test_read_only() -> Result<(), Box> { + let s = NamedNodeRef::new_unchecked("http://example.com/s"); + let p = NamedNodeRef::new_unchecked("http://example.com/p"); + let first_quad = QuadRef::new( + s, + p, + NamedNodeRef::new_unchecked("http://example.com/o"), + GraphNameRef::DefaultGraph, + ); + let second_quad = QuadRef::new( + s, + p, + NamedNodeRef::new_unchecked("http://example.com/o2"), + GraphNameRef::DefaultGraph, + ); + let store_dir = TempDir::default(); + + // We write to the store and close it + { + let read_write = Store::open(&store_dir)?; + read_write.insert(first_quad)?; + read_write.flush()?; + } + + // We open as read-only + let read_only = Store::open_read_only(&store_dir, None)?; + assert!(read_only.contains(first_quad)?); + assert_eq!( + read_only.iter().collect::, _>>()?, + vec![first_quad.into_owned()] + ); + read_only.validate()?; + + // We open as read-write again + let read_write = Store::open(&store_dir)?; + read_write.insert(second_quad)?; + read_write.flush()?; + read_write.optimize()?; // Makes sure it's well flushed + + // The new quad is in the read-write instance but not the read-only instance + assert!(read_write.contains(second_quad)?); + assert!(!read_only.contains(second_quad)?); + read_only.validate()?; + + Ok(()) +} + +#[test] +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +fn test_open_read_only_bad_dir() -> Result<(), Box> { + let dir = TempDir::default(); + create_dir_all(&dir.0)?; + { + File::create(dir.0.join("CURRENT"))?.write_all(b"foo")?; + } + assert!(Store::open_read_only(&dir, None).is_err()); + Ok(()) +} + +#[cfg(all(target_os = "linux"))] +fn reset_dir(dir: &str) -> Result<(), Box> { + assert!(Command::new("git") + .args(["clean", "-fX", dir]) + .status()? + .success()); + assert!(Command::new("git") + .args(["checkout", "HEAD", "--", dir]) + .status()? + .success()); + Ok(()) +} + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +struct TempDir(PathBuf); + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +impl Default for TempDir { + fn default() -> Self { + Self(temp_dir().join(format!("oxigraph-test-{}", random::()))) + } +} + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +impl AsRef for TempDir { + fn as_ref(&self) -> &Path { + &self.0 + } +} + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +impl Drop for TempDir { + fn drop(&mut self) { + if self.0.is_dir() { + remove_dir_all(&self.0).unwrap(); + } + } +} diff --git a/ng-repo/Cargo.toml b/ng-repo/Cargo.toml new file mode 100644 index 0000000..bbe5909 --- /dev/null +++ b/ng-repo/Cargo.toml @@ -0,0 +1,61 @@ +[package] +name = "ng-repo" +# version = "0.1.0-preview.1" +description = "Repository library of NextGraph, a decentralized, secure and local-first web 3.0 ecosystem based on Semantic Web and CRDTs" +categories = ["asynchronous","database-implementations"] +version.workspace = true +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true +homepage.workspace = true +keywords = [ +"crdt","e2ee","local-first","p2p","collaboration"] +documentation.workspace = true +rust-version.workspace = true + +[badges] +maintenance = { status = "actively-developed" } + +[features] +server_log_output = [] +testing = [] + +[dependencies] +serde = { version = "1.0.142", features = ["derive"] } +serde_bare = "0.5.0" +serde_bytes = "0.11.7" +serde_json = "1.0" +once_cell = "1.17.1" +futures = "0.3.24" +num_enum = "0.5.7" +slice_as_array = "1.1.0" +rand = { version = "0.7", features = ["getrandom"] } +blake3 = "1.3.1" +chacha20 = "0.9.0" +ed25519-dalek = "1.0.1" +sbbf-rs-safe = "0.3.2" +lazy_static = "1.4.0" +curve25519-dalek = "3.2.0" +ng_threshold_crypto = { version = "0.4.1", git = "https://git.nextgraph.org/NextGraph/threshold_crypto.git", branch = "master" } +crypto_box = { version = "0.8.2", features = ["seal"] } +zeroize = { version = "1.7.0", features = ["zeroize_derive"] } +base64-url = "2.0.0" +web-time = "0.2.0" +time = { version= "=0.3.41", features = ["formatting","local-offset"] } +wasm-bindgen = "0.2.88" +os_info = "3" +current_platform = "0.2.0" + +[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] +version = "0.3.3" +features = ["wasm_js"] + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +debug_print = "1.0.0" +log = "0.4" +getrandom = "0.3.3" + +[target.'cfg(target_arch = "wasm32")'.dependencies] +gloo-timers = "0.3.0" +time = { version= "=0.3.41", features = ["formatting","local-offset","wasm-bindgen"] } \ No newline at end of file diff --git a/ng-repo/README.md b/ng-repo/README.md new file mode 100644 index 0000000..79754f5 --- /dev/null +++ b/ng-repo/README.md @@ -0,0 +1,56 @@ +# ng-repo + +![MSRV][rustc-image] +[![Apache 2.0 Licensed][license-image]][license-link] +[![MIT Licensed][license-image2]][license-link2] + +Repository library of NextGraph + +This repository is in active development at [https://git.nextgraph.org/NextGraph/nextgraph-rs](https://git.nextgraph.org/NextGraph/nextgraph-rs), a Gitea instance. For bug reports, issues, merge requests, and in order to join the dev team, please visit the link above and create an account (you can do so with a github account). The [github repo](https://github.com/nextgraph-org/nextgraph-rs) is just a read-only mirror that does not accept issues. + +## NextGraph + +> NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## Support + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +## How to use the library + +NextGraph is not ready yet. You can subscribe to [our newsletter](https://list.nextgraph.org/subscription/form) to get updates, and support us with a [donation](https://nextgraph.org/donate/). + +This library is used internally by [ngd](../ngd/README.md), [ngcli](../ngcli/README.md), [ng-app](../ng-app/README.md) and by [nextgraph, the Rust client library](../nextgraph/README.md) which you should be using instead. It is not meant to be used by other programs as-is. + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +### Contributions license + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as below, without any +additional terms or conditions. + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. + + +[rustc-image]: https://img.shields.io/badge/rustc-1.81+-blue.svg +[license-image]: https://img.shields.io/badge/license-Apache2.0-blue.svg +[license-link]: https://git.nextgraph.org/NextGraph/nextgraph-rs/raw/branch/master/LICENSE-APACHE2 +[license-image2]: https://img.shields.io/badge/license-MIT-blue.svg +[license-link2]: https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/LICENSE-MIT diff --git a/ng-repo/src/block.rs b/ng-repo/src/block.rs new file mode 100644 index 0000000..6c88297 --- /dev/null +++ b/ng-repo/src/block.rs @@ -0,0 +1,285 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Immutable Block, used to store and exchange File and Commit + +use chacha20::cipher::{KeyIvInit, StreamCipher}; +use chacha20::ChaCha20; + +use crate::errors::*; +use crate::log::*; +use crate::types::*; + +impl BlockV0 { + pub fn new( + children: Vec, + mut header_ref: Option, + content: Vec, + key: Option, + ) -> BlockV0 { + let (commit_header, commit_header_key) = header_ref + .take() + .map_or((CommitHeaderObject::None, None), |obj_ref| { + (obj_ref.obj, Some(obj_ref.key)) + }); + let bc = BlockContentV0 { + children, + commit_header: commit_header, + encrypted_content: content, + }; + let mut b = BlockV0 { + id: None, + key, + content: BlockContent::V0(bc), + commit_header_key, + }; + b.id = Some(b.compute_id()); + b + } + + pub fn dummy() -> BlockV0 { + BlockV0::new(vec![], None, vec![], None) + } + + pub fn new_random_access( + children: Vec, + content: Vec, + key: Option, + ) -> BlockV0 { + let bc = BlockContentV0 { + children, + commit_header: CommitHeaderObject::RandomAccess, + encrypted_content: content, + }; + let mut b = BlockV0 { + id: None, + key, + content: BlockContent::V0(bc), + commit_header_key: None, + }; + b.id = Some(b.compute_id()); + b + } + + /// Compute the ID + pub fn compute_id(&self) -> BlockId { + let ser = serde_bare::to_vec(&self.content).unwrap(); + let hash = blake3::hash(ser.as_slice()); + Digest::Blake3Digest32(hash.as_bytes().clone()) + } + + pub fn children(&self) -> &Vec { + self.content.children() + } +} + +impl From<&Digest> for String { + fn from(id: &Digest) -> Self { + let mut ser = serde_bare::to_vec(id).unwrap(); + ser.reverse(); + base64_url::encode(&ser) + } +} + +impl BlockContent { + /// Get the encrypted content + pub fn encrypted_content(&self) -> &Vec { + match self { + BlockContent::V0(bc) => &bc.encrypted_content, + } + } + + // /// Get the header id + // pub fn header_id(&self) -> &Option { + // match self { + // BlockContent::V0(bc) => &bc.commit_header_id, + // } + // } + + /// Get the children + pub fn children(&self) -> &Vec { + match self { + BlockContent::V0(b) => &b.children, + } + } +} + +impl Block { + pub fn new( + children: Vec, + header_ref: Option, + content: Vec, + key: Option, + ) -> Block { + Block::V0(BlockV0::new(children, header_ref, content, key)) + } + + pub fn dummy() -> Block { + Block::V0(BlockV0::dummy()) + } + + pub fn new_random_access( + children: Vec, + content: Vec, + key: Option, + ) -> Block { + Block::V0(BlockV0::new_random_access(children, content, key)) + } + + pub fn new_with_encrypted_content(content: Vec, key: Option) -> Block { + Block::V0(BlockV0::new(vec![], None, content, key)) + } + + pub fn size(&self) -> usize { + serde_bare::to_vec(&self).unwrap().len() + } + + /// Compute the ID + pub fn compute_id(&self) -> BlockId { + match self { + Block::V0(v0) => v0.compute_id(), + } + } + + /// Get the already computed ID or computes it, saves it, and returns it + pub fn get_and_save_id(&mut self) -> BlockId { + match &self { + Block::V0(b) => match b.id { + Some(id) => id, + None => { + let id = self.compute_id(); + let Block::V0(c) = self; + c.id = Some(id); + id + } + }, + } + } + + /// Get the already computed ID or computes it + pub fn id(&self) -> BlockId { + match self { + Block::V0(b) => match b.id { + Some(id) => id, + None => self.compute_id(), + }, + } + } + + /// Get the content + pub fn content(&self) -> &BlockContent { + match self { + Block::V0(b) => &b.content, + } + } + + /// Get the encrypted content + pub fn encrypted_content(&self) -> &Vec { + match self { + Block::V0(b) => &b.content.encrypted_content(), + } + } + + /// Get the children + pub fn children(&self) -> &Vec { + match self { + Block::V0(b) => &b.content.children(), + } + } + + pub fn destroy_header(&mut self) { + match self { + Block::V0(b) => b.commit_header_key = None, + } + } + + /// Get the header reference + pub fn header_ref(&self) -> Option { + match self { + Block::V0(b) => match b.commit_header_key.as_ref() { + Some(key) => match b.content.commit_header_obj() { + CommitHeaderObject::None => None, + CommitHeaderObject::RandomAccess => None, + _ => Some(CommitHeaderRef { + obj: b.content.commit_header_obj().clone(), + key: key.clone(), + }), + }, + + None => None, + }, + } + } + + /// Get the key + pub fn key(&self) -> Option { + match self { + Block::V0(b) => b.key.clone(), + } + } + + /// Set the key + pub fn set_key(&mut self, key: Option) { + match self { + Block::V0(b) => b.key = key, + } + } + + pub fn read( + &self, + key: &SymKey, + ) -> Result<(Vec<(BlockId, BlockKey)>, Vec), ObjectParseError> { + match self { + Block::V0(b) => { + // decrypt content in place (this is why we have to clone first) + let mut content_dec = b.content.encrypted_content().clone(); + match key { + SymKey::ChaCha20Key(key) => { + let nonce = [0u8; 12]; + let mut cipher = ChaCha20::new(key.into(), &nonce.into()); + let mut content_dec_slice = &mut content_dec.as_mut_slice(); + cipher.apply_keystream(&mut content_dec_slice); + } + } + + // deserialize content + let content: ChunkContentV0; + match serde_bare::from_slice(content_dec.as_slice()) { + Ok(c) => content = c, + Err(_e) => { + //log_debug!("Block deserialize error: {}", _e); + return Err(ObjectParseError::BlockDeserializeError); + } + } + // parse content + match content { + ChunkContentV0::InternalNode(keys) => { + let b_children = b.children(); + if keys.len() != b_children.len() { + log_debug!( + "Invalid keys length: got {}, expected {}", + keys.len(), + b_children.len() + ); + log_debug!("!!! children: {:?}", b_children); + log_debug!("!!! keys: {:?}", keys); + return Err(ObjectParseError::InvalidKeys); + } + let mut children = Vec::with_capacity(b_children.len()); + for (id, key) in b_children.iter().zip(keys.iter()) { + children.push((id.clone(), key.clone())); + } + Ok((children, vec![])) + } + ChunkContentV0::DataChunk(chunk) => Ok((vec![], chunk)), + } + } + } + } +} diff --git a/ng-repo/src/block_storage.rs b/ng-repo/src/block_storage.rs new file mode 100644 index 0000000..e55258e --- /dev/null +++ b/ng-repo/src/block_storage.rs @@ -0,0 +1,179 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Storage of Blocks + +use std::sync::RwLock; +use std::{ + cmp::{max, min}, + collections::HashMap, + mem::size_of_val, +}; + +use futures::StreamExt; + +use crate::errors::*; +use crate::types::*; +use crate::utils::Receiver; + +pub trait BlockStorage: Send + Sync { + /// Load a block from the storage. + fn get(&self, overlay: &OverlayId, id: &BlockId) -> Result; + + // fetch a block from broker or core overlay + // pub async fn fetch(&self, id: &BlockId) -> Result { + // todo!(); + // } + + /// Save a block to the storage. + fn put(&self, overlay: &OverlayId, block: &Block, lazy: bool) -> Result; + + /// Delete a block from the storage. + fn del(&self, overlay: &OverlayId, id: &BlockId) -> Result; + + /// number of Blocks in the storage + fn len(&self) -> Result; + + fn has(&self, overlay: &OverlayId, id: &BlockId) -> Result<(), StorageError>; +} + +/* LMDB values: + +const MIN_SIZE: usize = 4072; +const PAGE_SIZE: usize = 4096; +const HEADER: usize = PAGE_SIZE - MIN_SIZE; +const MAX_FACTOR: usize = 512; + +/// Returns a valid/optimal value size for the entries of the storage backend. +pub fn store_valid_value_size(size: usize) -> usize { + min( + ((size + HEADER) as f32 / PAGE_SIZE as f32).ceil() as usize, + MAX_FACTOR, + ) * PAGE_SIZE + - HEADER +} + +/// Returns the maximum value size for the entries of the storage backend. +pub const fn store_max_value_size() -> usize { + MAX_FACTOR * PAGE_SIZE - HEADER +} +*/ + +// ROCKSDB values: + +const ONE_MEGA_BYTE: usize = 1024 * 1024; +const DISK_BLOCK_SIZE: usize = 4096; +// HDD block size at 4096, SSD page size at 4096, on openbsd FFS default is 16384 +// see Rocksdb integrated BlobDB https://rocksdb.org/blog/2021/05/26/integrated-blob-db.html +// blob values should be multiple of 4096 because of the BlobCache of RocksDB that is in heap memory (so must align on mem page). +const MAX_FACTOR: usize = 256; + +/// Returns a valid/optimal value size for the entries of the storage backend. +pub fn store_valid_value_size(size: usize) -> usize { + min( + max(1, (size + DISK_BLOCK_SIZE - 1) / DISK_BLOCK_SIZE), + MAX_FACTOR, + ) * DISK_BLOCK_SIZE +} + +/// Returns the maximum value size for the entries of the storage backend. +pub const fn store_max_value_size() -> usize { + ONE_MEGA_BYTE +} + +/// Store with a HashMap backend +pub struct HashMapBlockStorage { + blocks: RwLock>, +} + +impl HashMapBlockStorage { + pub fn new() -> HashMapBlockStorage { + HashMapBlockStorage { + blocks: RwLock::new(HashMap::new()), + } + } + + pub async fn from_block_stream(overlay: &OverlayId, mut blockstream: Receiver) -> Self { + let this = Self::new(); + while let Some(block) = blockstream.next().await { + this.put(overlay, &block, false).unwrap(); + } + this + } + + pub fn get_len(&self) -> usize { + self.blocks.read().unwrap().len() + } + + pub fn get_all(&self) -> Vec { + self.blocks + .read() + .unwrap() + .values() + .map(|x| x.clone()) + .collect() + } + pub fn put_local(&self, block: &Block) -> Result { + let overlay = OverlayId::nil(); + self.put(&overlay, block, false) + } +} + +impl BlockStorage for HashMapBlockStorage { + fn get(&self, _overlay: &OverlayId, id: &BlockId) -> Result { + match self.blocks.read().unwrap().get(id) { + Some(block) => { + let mut b = block.clone(); + let i = b.get_and_save_id(); + if *id == i { + Ok(b) + } else { + Err(StorageError::DataCorruption) + } + } + None => Err(StorageError::NotFound), + } + } + + fn has(&self, _overlay: &OverlayId, id: &BlockId) -> Result<(), StorageError> { + if !self.blocks.read().unwrap().contains_key(id) { + return Err(StorageError::NotFound); + } + Ok(()) + } + + fn len(&self) -> Result { + Ok(self.get_len()) + } + + fn put( + &self, + _overlay: &OverlayId, + block: &Block, + _lazy: bool, + ) -> Result { + let id = block.id(); + //log_debug!("PUTTING {}", id); + let mut b = block.clone(); + b.set_key(None); + self.blocks.write().unwrap().insert(id, b); + Ok(id) + } + + fn del(&self, _overlay: &OverlayId, id: &BlockId) -> Result { + let block = self + .blocks + .write() + .unwrap() + .remove(id) + .ok_or(StorageError::NotFound)?; + let size = size_of_val(&block); + Ok(size) + } +} diff --git a/ng-repo/src/branch.rs b/ng-repo/src/branch.rs new file mode 100644 index 0000000..c52e11e --- /dev/null +++ b/ng-repo/src/branch.rs @@ -0,0 +1,612 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Branch of a Repository + +use std::collections::HashMap; +use std::collections::HashSet; +use std::fmt; + +use sbbf_rs_safe::Filter; +use zeroize::Zeroize; + +use crate::errors::*; +#[allow(unused_imports)] +use crate::log::*; +use crate::object::*; +use crate::store::Store; +use crate::types::*; +use crate::utils::encrypt_in_place; + +impl BranchV0 { + pub fn new( + id: PubKey, + repo: ObjectRef, + root_branch_readcap_id: ObjectId, + topic_priv: PrivKey, + metadata: Vec, + ) -> BranchV0 { + let topic_privkey: Vec = vec![]; + //TODO: use encrypt_topic_priv_key + let topic = topic_priv.to_pub(); + BranchV0 { + id, + crdt: BranchCrdt::None, + repo, + root_branch_readcap_id, + topic, + topic_privkey, + pulled_from: vec![], + metadata, + } + } +} + +#[allow(dead_code)] +#[derive(Debug)] +pub struct DagNode { + pub future: HashSet, + pub past: HashSet, +} + +#[allow(dead_code)] +struct Dag<'a>(&'a HashMap); + +impl fmt::Display for DagNode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for fu in self.future.iter() { + write!(f, "{} ", fu)?; + } + Ok(()) + } +} + +impl<'a> fmt::Display for Dag<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for node in self.0.iter() { + writeln!(f, "ID: {} FUTURES: {}", node.0, node.1)?; + } + Ok(()) + } +} + +impl DagNode { + fn new() -> Self { + Self { + future: HashSet::new(), + past: HashSet::new(), + } + } + fn collapse( + id: &ObjectId, + dag: &HashMap, + dag_ids: &HashSet, + already_in: &mut HashSet, + ) -> Vec { + let this = dag.get(id).unwrap(); + let intersec = this + .past + .intersection(dag_ids) + .cloned() + .collect::>(); + if intersec.len() > 1 && !intersec.is_subset(already_in) { + // we postpone it + // log_debug!("postponed {}", id); + vec![] + } else { + let mut res = vec![*id]; + already_in.insert(*id); + for child in this.future.iter() { + // log_debug!("child of {} : {}", id, child); + res.append(&mut Self::collapse(child, dag, dag_ids, already_in)); + } + res + } + } +} + +impl Branch { + /// topic private key (a BranchWriteCapSecret), encrypted with a key derived as follow + /// BLAKE3 derive_key ("NextGraph Branch WriteCap Secret BLAKE3 key", + /// RepoWriteCapSecret, TopicId, BranchId ) + /// so that only editors of the repo can decrypt the privkey + /// nonce = 0 + fn encrypt_topic_priv_key( + mut plaintext: Vec, + topic_id: TopicId, + branch_id: BranchId, + repo_write_cap_secret: &RepoWriteCapSecret, + ) -> Vec { + let repo_write_cap_secret = serde_bare::to_vec(repo_write_cap_secret).unwrap(); + let topic_id = serde_bare::to_vec(&topic_id).unwrap(); + let branch_id = serde_bare::to_vec(&branch_id).unwrap(); + let mut key_material = [repo_write_cap_secret, topic_id, branch_id].concat(); + let mut key: [u8; 32] = blake3::derive_key( + "NextGraph Branch WriteCap Secret BLAKE3 key", + key_material.as_slice(), + ); + encrypt_in_place(&mut plaintext, key, [0; 12]); + key.zeroize(); + key_material.zeroize(); + plaintext + } + + pub fn encrypt_branch_write_cap_secret( + privkey: &BranchWriteCapSecret, + topic_id: TopicId, + branch_id: BranchId, + repo_write_cap_secret: &RepoWriteCapSecret, + ) -> Vec { + let plaintext = serde_bare::to_vec(privkey).unwrap(); + Branch::encrypt_topic_priv_key(plaintext, topic_id, branch_id, repo_write_cap_secret) + } + + pub fn decrypt_branch_write_cap_secret( + ciphertext: Vec, + topic_id: TopicId, + branch_id: BranchId, + repo_write_cap_secret: &RepoWriteCapSecret, + ) -> Result { + let plaintext = + Branch::encrypt_topic_priv_key(ciphertext, topic_id, branch_id, repo_write_cap_secret); + Ok(serde_bare::from_slice(&plaintext)?) + } + + pub fn new( + id: PubKey, + repo: ObjectRef, + root_branch_readcap_id: ObjectId, + topic_priv: PrivKey, + metadata: Vec, + ) -> Branch { + Branch::V0(BranchV0::new( + id, + repo, + root_branch_readcap_id, + topic_priv, + metadata, + )) + } + + /// Load causal past of a Commit `cobj` in a `Branch` from the `Store`, + /// + /// and collect in `visited` the ObjectIds encountered on the way, stopping at any commit already belonging to `theirs` or the root of DAG. + /// optionally collecting the missing objects/blocks that couldn't be found locally on the way, + /// and also optionally, collecting the commits of `theirs` found on the way + pub fn load_causal_past( + recursor: &mut Vec<(ObjectId, Option)>, + store: &Store, + theirs: &HashSet, + visited: &mut HashMap, + missing: &mut Option<&mut HashSet>, + theirs_found: &mut Option<&mut HashSet>, + theirs_filter: &Option, + ) -> Result<(), ObjectParseError> { + while let Some((id, future)) = recursor.pop() { + match Object::load(id, None, store) { + Ok(cobj) => { + let id = cobj.id(); + + // check if this commit object is present in theirs or has already been visited in the current walk + // load deps, stop at the root(including it in visited) or if this is a commit object from known_heads + + let mut found_in_theirs = theirs.contains(&id); + if !found_in_theirs { + found_in_theirs = if let Some(filter) = theirs_filter { + let hash = id.get_hash(); + filter.contains_hash(hash) + } else { + false + }; + } + + if found_in_theirs { + if theirs_found.is_some() { + theirs_found.as_mut().unwrap().insert(id); + } + } else { + if let Some(past) = visited.get_mut(&id) { + // we update the future + if let Some(f) = future { + past.future.insert(f); + } + } else { + let mut new_node_to_insert = DagNode::new(); + if let Some(f) = future { + new_node_to_insert.future.insert(f); + } + let pasts = cobj.acks_and_nacks(); + new_node_to_insert.past.extend(pasts.iter().cloned()); + visited.insert(id, new_node_to_insert); + recursor.extend(pasts.into_iter().map(|past_id| (past_id, Some(id)))); + // for past_id in pasts { + // match Object::load(past_id, None, store) { + // Ok(o) => { + // Self::load_causal_past( + // recursor, + // store, + // theirs, + // visited, + // missing, + // theirs_found, + // theirs_filter, + // )?; + // } + // Err(ObjectParseError::MissingBlocks(blocks)) => { + // missing.as_mut().map(|m| m.extend(blocks)); + // } + // Err(e) => return Err(e), + // } + // } + } + } + } + Err(ObjectParseError::MissingBlocks(blocks)) => { + if future.is_some() { + missing.as_mut().map(|m| m.extend(blocks)); + } + } + Err(e) => { + if future.is_some() { + return Err(e); + } + } + } + } + Ok(()) + } + + /// Branch sync request from another peer + /// + /// `target_heads` represents the list of heads the requester would like to reach. this list cannot be empty. + /// if the requester doesn't know what to reach, the responder should fill this list with their own current local head. + /// this is not done here. it should be done before, in the handling of incoming requests. + /// `known_heads` represents the list of current heads at the requester replica at the moment of request. + /// an empty list means the requester has an empty branch locally + /// + /// Return ObjectIds to send, ordered in respect of causal partial order + pub fn sync_req( + target_heads: impl Iterator, + known_heads: &[ObjectId], + known_commits: &Option, + store: &Store, + ) -> Result, ObjectParseError> { + // their commits + let mut theirs: HashMap = HashMap::new(); + + // + let mut recursor: Vec<(ObjectId, Option)> = + known_heads.iter().map(|h| (h.clone(), None)).collect(); + // collect causal past of known_heads + // we silently discard any load error on the known_heads as the responder might not know them (yet). + Self::load_causal_past( + &mut recursor, + store, + &HashSet::new(), + &mut theirs, + &mut None, + &mut None, + &None, + )?; + + // log_debug!("their causal past \n{}", Dag(&theirs)); + + let mut visited = HashMap::new(); + + let theirs: HashSet = theirs.keys().into_iter().cloned().collect(); + + let filter = if let Some(filter) = known_commits.as_ref() { + Some( + filter.filter(), //.map_err(|_| ObjectParseError::FilterDeserializationError)?, + ) + } else { + None + }; + + let mut recursor: Vec<(ObjectId, Option)> = + target_heads.map(|h| (h.clone(), None)).collect(); + // collect all commits reachable from target_heads + // up to the root or until encountering a commit from theirs + // we silently discard any load error on the target_heads as they can be wrong if the requester is confused about what the responder has locally. + Self::load_causal_past( + &mut recursor, + store, + &theirs, + &mut visited, + &mut None, + &mut None, + &filter, + )?; + // for id in target_heads { + // if let Ok(cobj) = Object::load(id, None, store) { + // Self::load_causal_past( + // &cobj, + // store, + // &theirs, + // &mut visited, + // &mut None, + // None, + // &mut None, + // &filter, + // )?; + // } + + // } + + // log_debug!("what we have here \n{}", Dag(&visited)); + + // now ordering to respect causal partial order. + let mut next_generations = HashSet::new(); + for (_, node) in visited.iter() { + for future in node.future.iter() { + next_generations.insert(future); + } + } + let all = HashSet::from_iter(visited.keys()); + let first_generation = all.difference(&next_generations); + + let mut already_in: HashSet = HashSet::new(); + + let sub_dag_to_send_size = visited.len(); + let mut result = Vec::with_capacity(sub_dag_to_send_size); + let dag_ids: HashSet = visited.keys().cloned().collect(); + for first in first_generation { + result.append(&mut DagNode::collapse( + first, + &visited, + &dag_ids, + &mut already_in, + )); + } + // log_debug!( + // "DAG {} {} {}", + // result.len(), + // sub_dag_to_send_size, + // already_in.len() + // ); + if result.len() != sub_dag_to_send_size || already_in.len() != sub_dag_to_send_size { + return Err(ObjectParseError::MalformedDag); + } + + #[cfg(debug_assertions)] + for _res in result.iter() { + log_debug!("sending missing commit {}", _res); + } + + Ok(result) + } +} + +#[allow(unused_imports)] +#[cfg(test)] +mod test { + + //use use bloomfilter::Bloom; + + use crate::branch::*; + + use crate::repo::Repo; + + use crate::log::*; + use crate::store::Store; + use crate::utils::*; + + #[test] + pub fn test_branch() { + fn add_obj( + content: ObjectContentV0, + header: Option, + store: &Store, + ) -> ObjectRef { + let max_object_size = 4000; + let mut obj = Object::new(ObjectContent::V0(content), header, max_object_size, store); + obj.save_in_test(store).unwrap(); + obj.reference().unwrap() + } + + fn add_commit( + branch: BranchId, + author_privkey: PrivKey, + author_pubkey: PubKey, + deps: Vec, + acks: Vec, + body_ref: ObjectRef, + store: &Store, + ) -> ObjectRef { + let header = CommitHeader::new_with_deps_and_acks( + deps.iter().map(|r| r.id).collect(), + acks.iter().map(|r| r.id).collect(), + ); + + let overlay = store.get_store_repo().overlay_id_for_read_purpose(); + + let obj_ref = ObjectRef { + id: ObjectId::Blake3Digest32([1; 32]), + key: SymKey::ChaCha20Key([2; 32]), + }; + let refs = vec![obj_ref]; + let metadata = vec![5u8; 55]; + + let commit = CommitV0::new( + &author_privkey, + &author_pubkey, + overlay, + branch, + QuorumType::NoSigning, + deps, + vec![], + acks, + vec![], + refs, + vec![], + metadata, + body_ref, + ) + .unwrap(); + //log_debug!("commit: {:?}", commit); + add_obj(ObjectContentV0::Commit(Commit::V0(commit)), header, store) + } + + fn add_body_branch(branch: BranchV0, store: &Store) -> ObjectRef { + let body: CommitBodyV0 = CommitBodyV0::Branch(Branch::V0(branch)); + //log_debug!("body: {:?}", body); + add_obj( + ObjectContentV0::CommitBody(CommitBody::V0(body)), + None, + store, + ) + } + + fn add_body_trans(header: Option, content: u8, store: &Store) -> ObjectRef { + let content = [content; 777].to_vec(); + let body = CommitBodyV0::AsyncTransaction(Transaction::V0(content)); + //log_debug!("body: {:?}", body); + add_obj( + ObjectContentV0::CommitBody(CommitBody::V0(body)), + header, + store, + ) + } + + // repo + + let (repo_privkey, repo_pubkey) = generate_keypair(); + let store = Store::dummy_with_key(repo_pubkey); + + // branch + + let (_, branch_pubkey) = generate_keypair(); + + let (member_privkey, member_pubkey) = generate_keypair(); + + let metadata = [66u8; 64].to_vec(); + + let repo = Repo::new_with_member( + &repo_pubkey, + &member_pubkey, + &[PermissionV0::WriteAsync], + store, + ); + + let repo_ref = ObjectRef { + id: ObjectId::Blake3Digest32([1; 32]), + key: SymKey::ChaCha20Key([2; 32]), + }; + + let root_branch_def_id = ObjectId::Blake3Digest32([1; 32]); + + let branch = BranchV0::new( + branch_pubkey, + repo_ref, + root_branch_def_id, + repo_privkey, + metadata, + ); + //log_debug!("branch: {:?}", branch); + + fn print_branch() { + log_debug!("branch deps/acks:"); + log_debug!(""); + log_debug!(" br"); + log_debug!(" / \\"); + log_debug!(" t1 t2"); + log_debug!(" \\ /"); + log_debug!(" t4"); + log_debug!(" |"); + log_debug!(" t5"); + log_debug!(""); + } + + print_branch(); + + // commit bodies + + let branch_body = add_body_branch(branch.clone(), &repo.store); + + let trans_body = add_body_trans(None, 8, &repo.store); + let trans_body2 = add_body_trans(None, 9, &repo.store); + + // create & add commits to store + + let br = add_commit( + branch_pubkey, + member_privkey.clone(), + member_pubkey, + vec![], + vec![], + branch_body.clone(), + &repo.store, + ); + log_debug!(">> br {}", br.id); + + let t1 = add_commit( + branch_pubkey, + member_privkey.clone(), + member_pubkey, + vec![], + vec![br.clone()], + trans_body.clone(), + &repo.store, + ); + log_debug!(">> t1 {}", t1.id); + + let t2 = add_commit( + branch_pubkey, + member_privkey.clone(), + member_pubkey, + vec![], + vec![br.clone()], + trans_body2.clone(), + &repo.store, + ); + log_debug!(">> t2 {}", t2.id); + + let t4 = add_commit( + branch_pubkey, + member_privkey.clone(), + member_pubkey, + vec![], + vec![t1.clone(), t2.clone()], + trans_body.clone(), + &repo.store, + ); + log_debug!(">> t4 {}", t4.id); + + let t5 = add_commit( + branch_pubkey, + member_privkey.clone(), + member_pubkey, + vec![], + vec![t4.clone()], + trans_body.clone(), + &repo.store, + ); + log_debug!(">> t5 {}", t5.id); + + let c5 = Commit::load(t5.clone(), &repo.store, true).unwrap(); + c5.verify(&repo).unwrap(); + + // let mut filter = Filter::new(FilterBuilder::new(10, 0.01)); + // for commit_ref in [br, t1, t2, t5.clone(), a6.clone()] { + // match commit_ref.id { + // ObjectId::Blake3Digest32(d) => filter.add(&d), + // } + // } + // let cfg = filter.config(); + // let their_commits = BloomFilter { + // k: cfg.hashes, + // f: filter.get_u8_array().to_vec(), + // }; + + let ids = Branch::sync_req([t5.id].into_iter(), &[t1.id], &None, &repo.store).unwrap(); + + assert_eq!(ids.len(), 3); + assert_eq!(ids, [t2.id, t4.id, t5.id]); + } +} diff --git a/ng-repo/src/commit.rs b/ng-repo/src/commit.rs new file mode 100644 index 0000000..2e533fa --- /dev/null +++ b/ng-repo/src/commit.rs @@ -0,0 +1,1960 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Commit that composes the DAG of a Branch + +use core::fmt; +use std::any::Any; +use std::collections::HashSet; +use std::iter::FromIterator; + +use ed25519_dalek::{PublicKey, Signature}; +use once_cell::sync::OnceCell; + +use crate::errors::*; +#[allow(unused_imports)] +use crate::log::*; +use crate::object::*; +use crate::repo::CommitInfo; +use crate::repo::Repo; +use crate::store::Store; +use crate::types::*; +use crate::utils::*; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum CommitLoadError { + MissingBlocks(Vec), + ObjectParseError, + NotACommit, + NotACommitBody, + CannotBeAtRootOfBranch, + MustBeAtRootOfBranch, + SingletonCannotHaveHeader, + MalformedHeader, + BodyTypeMismatch, + ContentParseError(ObjectParseError), +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum CommitVerifyError { + InvalidSignature, + InvalidHeader, + PermissionDenied, +} + +impl CommitV0 { + /// New commit + pub fn new( + author_privkey: &PrivKey, + author_pubkey: &PubKey, + overlay: OverlayId, + branch: BranchId, + quorum: QuorumType, + deps: Vec, + ndeps: Vec, + acks: Vec, + nacks: Vec, + files: Vec, + nfiles: Vec, + metadata: Vec, + body: ObjectRef, + ) -> Result { + let headers = CommitHeader::new_with(deps, ndeps, acks, nacks, files, nfiles); + let content = CommitContent::V0(CommitContentV0 { + perms: vec![], + author: CommitContent::author_digest(author_pubkey, overlay), + branch, + header_keys: headers.1, + quorum, + timestamp: now_timestamp(), + metadata, + body, + }); + let content_ser = serde_bare::to_vec(&content).unwrap(); + + // sign commit + let sig = sign(author_privkey, author_pubkey, &content_ser)?; + Ok(CommitV0 { + content: content, + sig, + id: None, + key: None, + header: headers.0, + body: OnceCell::new(), + blocks: vec![], + }) + } + + #[cfg(test)] + /// New commit with invalid header, only for test purposes + pub fn new_with_invalid_header( + author_privkey: &PrivKey, + author_pubkey: &PubKey, + branch: BranchId, + quorum: QuorumType, + metadata: Vec, + body: ObjectRef, + ) -> Result { + let headers = CommitHeader::new_invalid(); + let content = CommitContent::V0(CommitContentV0 { + perms: vec![], + author: CommitContent::author_digest(&author_pubkey, OverlayId::dummy()), + branch, + header_keys: headers.1, + quorum, + timestamp: now_timestamp(), + metadata, + body, + }); + let content_ser = serde_bare::to_vec(&content).unwrap(); + + // sign commit + let sig = sign(&author_privkey, &author_pubkey, &content_ser)?; + Ok(CommitV0 { + content: content, + sig, + id: None, + key: None, + header: headers.0, + body: OnceCell::new(), + blocks: vec![], + }) + } + + pub fn save(&mut self, block_size: usize, store: &Store) -> Result { + if self.id.is_some() && self.key.is_some() { + return Ok(ObjectRef::from_id_key( + self.id.unwrap(), + self.key.to_owned().unwrap(), + )); + } + // log_debug!("{:?}", self.header); + let mut obj = Object::new( + ObjectContent::V0(ObjectContentV0::Commit(Commit::V0(self.clone()))), + self.header.clone(), + block_size, + store, + ); + self.blocks = obj.save(store)?; + if let Some(h) = &mut self.header { + if let Some(id) = obj.header().as_ref().unwrap().id() { + h.set_id(*id); + } + } + self.id = Some(obj.get_and_save_id()); + self.key = Some(obj.key().unwrap()); + Ok(obj.reference().unwrap()) + } +} + +impl IObject for Commit { + fn block_ids(&self) -> Vec { + self.blocks().clone() + } + /// Get ID of including `Object`, + /// only available if the Commit was loaded from store or saved + fn id(&self) -> Option { + match self { + Commit::V0(c) => c.id, + } + } + + /// Get key of including `Object` + /// only available if the Commit was loaded from store or saved + fn key(&self) -> Option { + match self { + Commit::V0(c) => c.key.clone(), + } + } +} + +impl Commit { + /// New commit + pub fn new( + author_privkey: &PrivKey, + author_pubkey: &PubKey, + overlay: OverlayId, + branch: BranchId, + quorum: QuorumType, + deps: Vec, + ndeps: Vec, + acks: Vec, + nacks: Vec, + files: Vec, + nfiles: Vec, + metadata: Vec, + body: ObjectRef, + ) -> Result { + CommitV0::new( + author_privkey, + author_pubkey, + overlay, + branch, + quorum, + deps, + ndeps, + acks, + nacks, + files, + nfiles, + metadata, + body, + ) + .map(|c| Commit::V0(c)) + } + + /// New commit with a body. everything is saved + pub fn new_with_body_acks_deps_and_save( + author_privkey: &PrivKey, + author_pubkey: &PubKey, + branch: BranchId, + quorum: QuorumType, + deps: Vec, + acks: Vec, + body: CommitBody, + store: &Store, + ) -> Result { + Self::new_with_body_and_save( + author_privkey, + author_pubkey, + branch, + quorum, + deps, + vec![], + acks, + vec![], + vec![], + vec![], + vec![], + body, + 0, + store, + ) + } + + /// New commit with a body. everything is saved + pub fn new_with_body_and_save( + author_privkey: &PrivKey, + author_pubkey: &PubKey, + branch: BranchId, + quorum: QuorumType, + deps: Vec, + ndeps: Vec, + acks: Vec, + nacks: Vec, + files: Vec, + nfiles: Vec, + metadata: Vec, + body: CommitBody, + block_size: usize, + store: &Store, + ) -> Result { + let (body_ref, mut saved_body) = body.clone().save(block_size, store)?; + let overlay = store.get_store_repo().overlay_id_for_read_purpose(); + let mut commit_v0 = CommitV0::new( + author_privkey, + author_pubkey, + overlay, + branch, + quorum, + deps, + ndeps, + acks, + nacks, + files, + nfiles, + metadata, + body_ref, + )?; + commit_v0.body.set(body).unwrap(); + let _commit_ref = commit_v0.save(block_size, store)?; + commit_v0.blocks.append(&mut saved_body); + + Ok(Commit::V0(commit_v0)) + } + + pub fn reference(&self) -> Option { + if self.key().is_some() && self.id().is_some() { + Some(ObjectRef { + id: self.id().unwrap(), + key: self.key().unwrap(), + }) + } else { + None + } + } + + pub fn save(&mut self, block_size: usize, store: &Store) -> Result { + match self { + Commit::V0(v0) => v0.save(block_size, store), + } + } + + pub fn blocks(&self) -> &Vec { + match self { + Commit::V0(v0) => &v0.blocks, + } + } + + #[cfg(test)] + fn empty_blocks(&mut self) { + match self { + Commit::V0(v0) => v0.blocks = vec![], + } + } + + pub fn collect_block_ids( + commit_ref: ObjectRef, + store: &Store, + with_body: bool + ) -> Result, CommitLoadError> { + let mut block_ids : Vec; + let (id, key) = (commit_ref.id, commit_ref.key); + match Object::load(id, Some(key.clone()), store) { + Err(ObjectParseError::MissingHeaderBlocks((_, missing))) => { + return Err(CommitLoadError::MissingBlocks(missing)); + }, + Ok(obj) => { + let content = obj + .content() + .map_err(|e| CommitLoadError::ContentParseError(e))?; + let commit = match content { + ObjectContent::V0(ObjectContentV0::Commit(c)) => c, + _ => return Err(CommitLoadError::NotACommit), + }; + block_ids = obj.block_ids(); + + if with_body { + let content = commit.content_v0(); + let (id, key) = (content.body.id, content.body.key.clone()); + let obj = Object::load(id.clone(), Some(key.clone()), store).map_err(|e| match e { + ObjectParseError::MissingBlocks(missing) => CommitLoadError::MissingBlocks(missing), + _ => CommitLoadError::ObjectParseError, + })?; + let content = obj + .content() + .map_err(|_e| CommitLoadError::ObjectParseError)?; + match content { + ObjectContent::V0(ObjectContentV0::CommitBody(_)) => { + block_ids.append(&mut obj.block_ids()); + } + _ => return Err(CommitLoadError::NotACommitBody), + } + } + Ok(block_ids) + } + Err(ObjectParseError::MissingBlocks(missing)) => { + Err(CommitLoadError::MissingBlocks(missing)) + } + Err(_e) => { + log_err!("{:?}", _e); + Err(CommitLoadError::ObjectParseError) + } + } + } + + /// Load commit from store + pub fn load( + commit_ref: ObjectRef, + store: &Store, + with_body: bool, + ) -> Result { + let (id, key) = (commit_ref.id, commit_ref.key); + match Object::load(id, Some(key.clone()), store) { + Err(ObjectParseError::MissingHeaderBlocks((obj, mut missing))) => { + //log_debug!("MISSING {:?}", missing); + if with_body { + let content = obj + .content() + .map_err(|e| CommitLoadError::ContentParseError(e))?; + let mut commit = match content { + ObjectContent::V0(ObjectContentV0::Commit(c)) => c, + _ => return Err(CommitLoadError::NotACommit), + }; + commit.set_id(id); + commit.set_key(key); + match commit.load_body(store) { + Ok(_) => return Err(CommitLoadError::MissingBlocks(missing)), + Err(CommitLoadError::MissingBlocks(mut missing_body)) => { + missing.append(&mut missing_body); + return Err(CommitLoadError::MissingBlocks(missing)); + } + Err(e) => return Err(e), + } + } else { + return Err(CommitLoadError::MissingBlocks(missing)); + } + } + Ok(obj) => { + let content = obj + .content() + .map_err(|e| CommitLoadError::ContentParseError(e))?; + let mut commit = match content { + ObjectContent::V0(ObjectContentV0::Commit(c)) => c, + _ => return Err(CommitLoadError::NotACommit), + }; + commit.set_id(id); + commit.set_key(key); + commit.set_header(obj.header().clone()); + + if with_body { + commit.load_body(store)?; + } + + Ok(commit) + } + Err(ObjectParseError::MissingBlocks(missing)) => { + Err(CommitLoadError::MissingBlocks(missing)) + } + Err(_e) => { + log_err!("{:?}", _e); + Err(CommitLoadError::ObjectParseError) + } + } + } + + /// Load commit body from store + pub fn load_body(&self, store: &Store) -> Result<&CommitBody, CommitLoadError> { + if self.body().is_some() { + return Ok(self.body().unwrap()); + } + let content = self.content_v0(); + let (id, key) = (content.body.id, content.body.key.clone()); + let obj = Object::load(id.clone(), Some(key.clone()), store).map_err(|e| match e { + ObjectParseError::MissingBlocks(missing) => CommitLoadError::MissingBlocks(missing), + _ => CommitLoadError::ObjectParseError, + })?; + let content = obj + .content() + .map_err(|_e| CommitLoadError::ObjectParseError)?; + match content { + ObjectContent::V0(ObjectContentV0::CommitBody(body)) => { + self.set_body(body); + Ok(self.body().unwrap()) + } + _ => Err(CommitLoadError::NotACommitBody), + } + } + + fn set_body(&self, body: CommitBody) { + match self { + Commit::V0(c) => { + c.body.set(body).unwrap(); + } + } + } + + /// Get ID of header `Object` + pub fn header_id(&self) -> &Option { + match self { + Commit::V0(CommitV0 { + header: Some(ch), .. + }) => ch.id(), + _ => &None, + } + } + + /// Set ID of including `Object` + fn set_id(&mut self, id: ObjectId) { + match self { + Commit::V0(c) => c.id = Some(id), + } + } + + /// Set key of including `Object` + fn set_key(&mut self, key: SymKey) { + match self { + Commit::V0(c) => c.key = Some(key), + } + } + + /// Set header of including `Object` + fn set_header(&mut self, header: Option) { + match self { + Commit::V0(c) => c.header = header, + } + } + + /// Get commit signature + pub fn sig(&self) -> &Sig { + match self { + Commit::V0(c) => &c.sig, + } + } + + /// Get author (a UserId) + pub fn author(&self) -> &Digest { + self.content().author() + } + + pub fn timestamp(&self) -> Timestamp { + self.content().timestamp() + } + + pub fn final_consistency(&self) -> bool { + self.content().final_consistency() + || self.body().is_some_and(|body| body.total_order_required()) + } + + pub fn get_type(&self) -> Option { + self.body().map(|b| b.get_type()) + } + + pub fn get_signature_reference(&self) -> Option { + self.body().map_or(None, |b| b.get_signature_reference()) + } + + pub fn as_info(&self, repo: &Repo) -> CommitInfo { + let past = self.acks_ids(); + // past.sort(); + // let branch = past + // .is_empty() + // .then_some(self.id().unwrap()) + // .or(Some(past[0])); + CommitInfo { + past, + key: self.key().unwrap(), + signature: None, + author: repo.get_user_string(self.author()), + timestamp: self.timestamp(), + final_consistency: self.final_consistency(), + commit_type: self.get_type().unwrap(), + branch: None, + x: 0, + y: 0, + } + } + + /// Get branch ID this commit is about + pub fn branch(&self) -> &BranchId { + self.content().branch() + } + + /// Get commit header + pub fn header(&self) -> &Option { + match self { + Commit::V0(c) => &c.header, + } + } + + pub fn body_ref(&self) -> &ObjectRef { + &self.content_v0().body + } + + /// Get commit content V0 + pub fn content_v0(&self) -> &CommitContentV0 { + match self { + Commit::V0(CommitV0 { + content: CommitContent::V0(c), + .. + }) => c, + } + } + + /// Get quorum_type + pub fn quorum_type(&self) -> &QuorumType { + &self.content_v0().quorum + } + + /// Get commit content + pub fn content(&self) -> &CommitContent { + match self { + Commit::V0(CommitV0 { content: c, .. }) => c, + } + } + + pub fn body(&self) -> Option<&CommitBody> { + match self { + Commit::V0(c) => c.body.get(), + } + } + + pub fn owners_signature_required(&self, store: &Store) -> Result { + match self.load_body(store)? { + CommitBody::V0(CommitBodyV0::UpdateRootBranch(new_root)) => { + // load deps (the previous RootBranch commit) + let deps = self.deps(); + if deps.len() != 1 { + Err(CommitLoadError::MalformedHeader) + } else { + let previous_rootbranch_commit = Commit::load(deps[0].clone(), store, true)?; + let previous_rootbranch = previous_rootbranch_commit + .body() + .unwrap() + .root_branch_commit()?; + if previous_rootbranch.owners() != new_root.owners() { + Ok(true) + } else { + Ok(false) + } + } + } + CommitBody::V0(CommitBodyV0::RootBranch(_)) => { + let deps = self.deps(); + let acks = self.acks(); + if deps.is_empty() && acks.len() == 1 { + // we check that the ACK is the repository singleton commit. in this case, it means we are dealing with the first RootBranch commit, which is fine to have no deps. + let causal_past = Commit::load(acks[0].clone(), store, true)?; + if causal_past.body().unwrap().is_repository_singleton_commit() { + return Ok(false); + } + } + Err(CommitLoadError::MalformedHeader) + } + CommitBody::V0(CommitBodyV0::Delete(_)) => Ok(true), + _ => Ok(false), + } + } + + /// This commit is the first one in the branch (doesn't have any ACKs nor Nacks) + pub fn is_root_commit_of_branch(&self) -> bool { + match self { + Commit::V0(CommitV0 { + content: CommitContent::V0(c), + .. + }) => match &c.header_keys { + Some(CommitHeaderKeys::V0(hk)) => hk.acks.is_empty() && hk.nacks.is_empty(), + None => true, + }, + } + } + + /// Get acks (that have both an ID in the header and a key in the header_keys) + pub fn acks(&self) -> Vec { + let mut res: Vec = vec![]; + match self { + Commit::V0(c) => match &c.header { + Some(CommitHeader::V0(header_v0)) => match &c.content.header_keys() { + Some(CommitHeaderKeys::V0(hk_v0)) => { + for ack in header_v0.acks.iter().zip(hk_v0.acks.iter()) { + res.push(ack.into()); + } + } + None => {} + }, + None => {} + }, + }; + res + } + + /// Get acks (that have an ID in the header, without checking if there is a key for them in the header_keys) + /// if there is no header, returns an empty vec + pub fn acks_ids(&self) -> Vec { + match self { + Commit::V0(c) => match &c.header { + Some(h) => h.acks(), + None => vec![], + }, + } + } + + /// Get deps (that have an ID in the header, without checking if there is a key for them in the header_keys) + /// if there is no header, returns an empty vec + pub fn deps_ids(&self) -> Vec { + match self { + Commit::V0(c) => match &c.header { + Some(h) => h.deps(), + None => vec![], + }, + } + } + + /// Get files + pub fn files(&self) -> Vec { + let mut res: Vec = vec![]; + match self { + Commit::V0(c) => match &c.content.header_keys() { + Some(CommitHeaderKeys::V0(hk_v0)) => { + for file in hk_v0.files.iter() { + res.push(file.clone()); + } + } + None => {} + }, + }; + res + } + + /// Get deps (that have both an ID in the header and a key in the header_keys) + pub fn deps(&self) -> Vec { + let mut res: Vec = vec![]; + match self { + Commit::V0(c) => match &c.header { + Some(CommitHeader::V0(header_v0)) => match &c.content.header_keys() { + Some(CommitHeaderKeys::V0(hk_v0)) => { + for dep in header_v0.deps.iter().zip(hk_v0.deps.iter()) { + res.push(dep.into()); + } + } + None => {} + }, + None => {} + }, + }; + res + } + + /// Get all commits that are in the direct causal past of the commit (`acks` and `nacks`) + /// only returns objectRefs that have both an ID from header and a KEY from header_keys (they all have a key) + pub fn direct_causal_past(&self) -> Vec { + let mut res: Vec = vec![]; + match self { + Commit::V0(c) => match (&c.header, &c.content.header_keys()) { + (Some(CommitHeader::V0(header_v0)), Some(CommitHeaderKeys::V0(hk_v0))) => { + for ack in header_v0.acks.iter().zip(hk_v0.acks.iter()) { + res.push(ack.into()); + } + for nack in header_v0.nacks.iter().zip(hk_v0.nacks.iter()) { + res.push(nack.into()); + } + } + _ => {} + }, + }; + res + } + + pub fn direct_causal_past_ids(&self) -> HashSet { + let mut res: HashSet = HashSet::with_capacity(1); + match self { + Commit::V0(c) => match &c.header { + Some(CommitHeader::V0(header_v0)) => { + res.extend(header_v0.acks.iter()); + res.extend(header_v0.nacks.iter()); + } + _ => {} + }, + }; + res + } + + // /// Get seq + // pub fn seq(&self) -> u64 { + // match self { + // Commit::V0(CommitV0 { + // content: CommitContent::V0(c), + // .. + // }) => c.seq, + // } + // } + + /// Verify commit signature + pub fn verify_sig(&self, repo: &Repo) -> Result<(), CommitVerifyError> { + let c = match self { + Commit::V0(c) => c, + }; + let content_ser = serde_bare::to_vec(&c.content).unwrap(); + + let pubkey = repo + .member_pubkey(c.content.author()) + .map_err(|_| CommitVerifyError::PermissionDenied)?; + + let pubkey_slice = match pubkey { + PubKey::Ed25519PubKey(pk) => pk, + _ => panic!("author cannot have a Montgomery key"), + }; + let pk = PublicKey::from_bytes(&pubkey_slice) + .map_err(|_| CommitVerifyError::InvalidSignature)?; + let sig_bytes = match c.sig { + Sig::Ed25519Sig(ss) => [ss[0], ss[1]].concat(), + }; + let sig = + Signature::from_bytes(&sig_bytes).map_err(|_| CommitVerifyError::InvalidSignature)?; + pk.verify_strict(&content_ser, &sig) + .map_err(|_| CommitVerifyError::InvalidSignature) + } + + /// Verify commit permissions + pub fn verify_perm(&self, repo: &Repo) -> Result<(), NgError> { + repo.verify_permission(self) + } + + pub fn verify_perm_creation(&self, user: Option<&Digest>) -> Result<&Digest, NgError> { + let digest = self.content().author(); + if user.is_some() && *digest != *user.unwrap() { + return Err(NgError::PermissionDenied); + } + let body = self.body().ok_or(NgError::InvalidArgument)?; + if !(body.is_repository_singleton_commit() && user.is_none()) { + // a user must be provided to verify all subsequent commits of a Repository commit, that have the same author and that are signed with the repository key + return Err(NgError::InvalidArgument); + } + if body.required_permission().contains(&PermissionV0::Create) { + Ok(digest) + } else { + Err(NgError::PermissionDenied) + } + } + + /// Verify if the commit's `body` and its direct_causal_past, and recursively all their refs are available in the `store` + /// returns a list of all the ObjectIds that have been visited (only commits in the DAG) + /// or a list of missing blocks + pub fn verify_full_object_refs_of_branch_at_commit( + &self, + store: &Store, + ) -> Result, CommitLoadError> { + //log_debug!(">> verify_full_object_refs_of_branch_at_commit: #{}", self.seq()); + + /// Load `Commit`s of a `Branch` from the `Store` starting from the given `Commit`, + /// and collect missing `ObjectId`s + fn load_direct_object_refs( + commit: &Commit, + store: &Store, + visited: &mut HashSet, + missing: &mut HashSet, + ) -> Result<(), CommitLoadError> { + //log_debug!(">>> load_branch: #{}", commit.seq()); + + // if the self of verify_full_object_refs_of_branch_at_commit() has not been saved yet, then it doesn't have an ID + match commit.id() { + Some(id) => { + if visited.contains(&id) { + return Ok(()); + } + visited.insert(id); + // not adding the ObjectId of the header of this commit as it is not part of the DAG (neither is the CommitBody added to visited) + // // commit.header_id().map(|hid| visited.insert(hid)); + } + None => { + if !visited.is_empty() { + // we are not at the beginning (meaning, the self/the commit object) so this is a panic error as all causal + // past commits have been loaded from store and should have an id + panic!("A Commit in the causal past doesn't have an ID"); + } + } + } + + // load body & check if it's the Branch root commit + match commit.load_body(store) { + Ok(_) => Ok(()), + Err(CommitLoadError::MissingBlocks(m)) => { + // The commit body is missing. + missing.extend(m.clone()); + Err(CommitLoadError::MissingBlocks(m)) + } + Err(e) => Err(e), + }?; + + let body = commit.body().unwrap(); + visited.insert(commit.content_v0().body.id); + if commit.is_root_commit_of_branch() { + if !body.must_be_root_commit_in_branch() { + return Err(CommitLoadError::CannotBeAtRootOfBranch); + } + if body.is_repository_singleton_commit() && commit.header().is_some() { + return Err(CommitLoadError::SingletonCannotHaveHeader); + } + } else { + if body.must_be_root_commit_in_branch() { + return Err(CommitLoadError::MustBeAtRootOfBranch); + } + } + + // load direct causal past + for blockref in commit.direct_causal_past() { + match Commit::load(blockref, store, true) { + Ok(mut c) => { + load_direct_object_refs(&mut c, store, visited, missing)?; + } + Err(CommitLoadError::MissingBlocks(m)) => { + missing.extend(m); + } + Err(e) => return Err(e), + } + } + + Ok(()) + } + + let mut visited = HashSet::new(); + let mut missing = HashSet::new(); + load_direct_object_refs(self, store, &mut visited, &mut missing)?; + + if !missing.is_empty() { + return Err(CommitLoadError::MissingBlocks(Vec::from_iter(missing))); + } + Ok(Vec::from_iter(visited)) + } + + /// Verify signature, permissions, and full causal past + pub fn verify(&self, repo: &Repo) -> Result<(), NgError> { + if !self.header().as_ref().map_or(true, |h| h.verify()) { + return Err(NgError::CommitVerifyError(CommitVerifyError::InvalidHeader)); + } + self.verify_sig(repo)?; + self.verify_perm(repo)?; + self.verify_full_object_refs_of_branch_at_commit(&repo.store)?; + Ok(()) + } +} + +impl PermissionV0 { + /// the kind of permissions that can be added and removed with AddWritePermission and RemoveWritePermission permissions respectively + pub fn is_write_permission(&self) -> bool { + match self { + Self::WriteAsync | Self::WriteSync | Self::RefreshWriteCap => true, + _ => false, + } + } + + pub fn is_delegated_by_admin(&self) -> bool { + self.is_write_permission() + || match self { + Self::AddReadMember + | Self::RemoveMember + | Self::AddWritePermission + | Self::RemoveWritePermission + | Self::Compact + | Self::AddBranch + | Self::RemoveBranch + | Self::ChangeName + | Self::RefreshReadCap => true, + _ => false, + } + } + + pub fn is_delegated_by_owner(&self) -> bool { + self.is_delegated_by_admin() + || match self { + Self::ChangeQuorum | Self::Admin | Self::ChangeMainBranch => true, + _ => false, + } + } +} + +impl CommitBody { + pub fn save( + self, + block_size: usize, + store: &Store, + ) -> Result<(ObjectRef, Vec), StorageError> { + let obj = Object::new( + ObjectContent::V0(ObjectContentV0::CommitBody(self)), + None, + block_size, + store, + ); + let blocks = obj.save(store)?; + Ok((obj.reference().unwrap(), blocks)) + } + + pub fn is_add_signer_cap(&self) -> bool { + match self { + Self::V0(v0) => match v0 { + CommitBodyV0::AddSignerCap(_) => true, + _ => false, + }, + } + } + + pub fn root_branch_commit(&self) -> Result<&RootBranch, CommitLoadError> { + match self { + Self::V0(v0) => match v0 { + CommitBodyV0::UpdateRootBranch(rb) | CommitBodyV0::RootBranch(rb) => Ok(rb), + _ => Err(CommitLoadError::BodyTypeMismatch), + }, + } + } + + pub fn is_repository_singleton_commit(&self) -> bool { + match self { + Self::V0(v0) => match v0 { + CommitBodyV0::Repository(_) => true, + _ => false, + }, + } + } + pub fn must_be_root_commit_in_branch(&self) -> bool { + match self { + Self::V0(v0) => match v0 { + CommitBodyV0::Repository(_) => true, + CommitBodyV0::Branch(_) => true, + _ => false, + }, + } + } + + pub fn on_root_branch(&self) -> bool { + match self { + Self::V0(v0) => match v0 { + CommitBodyV0::Repository(_) => true, + CommitBodyV0::RootBranch(_) => true, + CommitBodyV0::UpdateRootBranch(_) => true, + CommitBodyV0::AddBranch(_) => true, + CommitBodyV0::RemoveBranch(_) => true, + CommitBodyV0::AddMember(_) => true, + CommitBodyV0::RemoveMember(_) => true, + CommitBodyV0::AddPermission(_) => true, + CommitBodyV0::RemovePermission(_) => true, + CommitBodyV0::AddName(_) => true, + CommitBodyV0::RemoveName(_) => true, + //CommitBodyV0::Quorum(_) => true, + CommitBodyV0::RootCapRefresh(_) => true, + CommitBodyV0::CapRefreshed(_) => true, + CommitBodyV0::SyncSignature(_) => true, + CommitBodyV0::Delete(_) => true, + _ => false, + }, + } + } + + pub fn on_transactional_branch(&self) -> bool { + match self { + Self::V0(v0) => match v0 { + CommitBodyV0::Branch(_) => true, + CommitBodyV0::UpdateBranch(_) => true, + CommitBodyV0::Snapshot(_) => true, + CommitBodyV0::AsyncTransaction(_) => true, + CommitBodyV0::SyncTransaction(_) => true, + CommitBodyV0::AddFile(_) => true, + CommitBodyV0::RemoveFile(_) => true, + CommitBodyV0::Compact(_) => true, + CommitBodyV0::AsyncSignature(_) => true, + CommitBodyV0::BranchCapRefresh(_) => true, + CommitBodyV0::CapRefreshed(_) => true, + CommitBodyV0::SyncSignature(_) => true, + _ => false, + }, + } + } + + pub fn on_store_branch(&self) -> bool { + match self { + Self::V0(v0) => match v0 { + CommitBodyV0::AddRepo(_) => true, + CommitBodyV0::RemoveRepo(_) => true, + _ => false, + }, + } + } + + pub fn on_user_branch(&self) -> bool { + match self { + Self::V0(v0) => match v0 { + CommitBodyV0::AddLink(_) => true, + CommitBodyV0::RemoveLink(_) => true, + CommitBodyV0::AddSignerCap(_) => true, + CommitBodyV0::RemoveSignerCap(_) => true, + CommitBodyV0::AddInboxCap(_) => true, + CommitBodyV0::WalletUpdate(_) => true, + CommitBodyV0::StoreUpdate(_) => true, + _ => false, + }, + } + } + + pub fn not_allowed_on_individual_private_site(&self) -> bool { + match self { + Self::V0(v0) => match v0 { + CommitBodyV0::SyncTransaction(_) => true, + CommitBodyV0::AddMember(_) => true, + CommitBodyV0::RemoveMember(_) => true, + CommitBodyV0::AddPermission(_) => true, + CommitBodyV0::RemovePermission(_) => true, + _ => false, + }, + } + } + + pub fn total_order_required(&self) -> bool { + match self { + Self::V0(v0) => match v0 { + CommitBodyV0::RootBranch(_) => true, + CommitBodyV0::Branch(_) => true, + CommitBodyV0::UpdateRootBranch(_) => true, + CommitBodyV0::UpdateBranch(_) => true, + CommitBodyV0::AddBranch(AddBranch::V0(AddBranchV0 { + branch_type: BranchType::Transactional, + .. + })) => false, + CommitBodyV0::AddBranch(AddBranch::V0(AddBranchV0 { branch_type: _, .. })) => true, + CommitBodyV0::RemoveBranch(_) => true, + //CommitBodyV0::AddMember(_) => true, + CommitBodyV0::RemoveMember(_) => true, + CommitBodyV0::RemovePermission(_) => true, + //CommitBodyV0::Quorum(_) => true, + CommitBodyV0::Compact(_) => true, + CommitBodyV0::SyncTransaction(_) => true, // check QuorumType::TotalOrder in CommitContent + CommitBodyV0::RootCapRefresh(_) => true, + CommitBodyV0::BranchCapRefresh(_) => true, + _ => false, + }, + } + } + pub fn required_permission(&self) -> HashSet { + let res: Vec; + res = match self { + Self::V0(v0) => match v0 { + CommitBodyV0::Repository(_) => vec![PermissionV0::Create], + CommitBodyV0::RootBranch(_) => vec![PermissionV0::Create], + CommitBodyV0::UpdateRootBranch(_) => vec![ + PermissionV0::ChangeQuorum, + PermissionV0::RefreshWriteCap, + PermissionV0::RefreshReadCap, + PermissionV0::RefreshOverlay, + ], + CommitBodyV0::AddMember(_) => { + vec![PermissionV0::Create, PermissionV0::AddReadMember] + } + CommitBodyV0::RemoveMember(_) => vec![PermissionV0::RemoveMember], + CommitBodyV0::AddPermission(addp) => { + let mut perms = vec![PermissionV0::Create]; + if addp.permission_v0().is_delegated_by_admin() { + perms.push(PermissionV0::Admin); + } + if addp.permission_v0().is_write_permission() { + perms.push(PermissionV0::AddWritePermission); + } + perms + } + CommitBodyV0::RemovePermission(remp) => { + let mut perms = vec![]; + if remp.permission_v0().is_delegated_by_admin() { + perms.push(PermissionV0::Admin); + } + if remp.permission_v0().is_write_permission() { + perms.push(PermissionV0::RemoveWritePermission); + } + perms + } + CommitBodyV0::AddBranch(_) => vec![ + PermissionV0::Create, + PermissionV0::AddBranch, + PermissionV0::RefreshReadCap, + PermissionV0::RefreshWriteCap, + PermissionV0::RefreshOverlay, + PermissionV0::ChangeMainBranch, + ], + CommitBodyV0::RemoveBranch(_) => vec![PermissionV0::RemoveBranch], + CommitBodyV0::UpdateBranch(_) => { + vec![PermissionV0::RefreshReadCap, PermissionV0::RefreshWriteCap] + } + CommitBodyV0::AddName(_) => vec![PermissionV0::AddBranch, PermissionV0::ChangeName], + CommitBodyV0::RemoveName(_) => { + vec![PermissionV0::ChangeName, PermissionV0::RemoveBranch] + } + CommitBodyV0::Branch(_) => vec![PermissionV0::Create, PermissionV0::AddBranch], + CommitBodyV0::Snapshot(_) => vec![PermissionV0::WriteAsync], + CommitBodyV0::Compact(_) => vec![PermissionV0::Compact], + CommitBodyV0::AsyncTransaction(_) => vec![PermissionV0::WriteAsync], + CommitBodyV0::AddFile(_) => vec![PermissionV0::WriteAsync, PermissionV0::WriteSync], + CommitBodyV0::RemoveFile(_) => { + vec![PermissionV0::WriteAsync, PermissionV0::WriteSync] + } + CommitBodyV0::SyncTransaction(_) => vec![PermissionV0::WriteSync], + CommitBodyV0::AsyncSignature(_) => vec![PermissionV0::WriteAsync], + CommitBodyV0::SyncSignature(_) => vec![ + PermissionV0::WriteSync, + PermissionV0::ChangeQuorum, + PermissionV0::RefreshWriteCap, + PermissionV0::RefreshReadCap, + PermissionV0::RefreshOverlay, + PermissionV0::ChangeMainBranch, + PermissionV0::AddBranch, + PermissionV0::RemoveBranch, + PermissionV0::AddReadMember, + PermissionV0::RemoveMember, + PermissionV0::RemoveWritePermission, + PermissionV0::Compact, + ], + CommitBodyV0::RootCapRefresh(_) => { + vec![PermissionV0::RefreshReadCap, PermissionV0::RefreshWriteCap] + } + CommitBodyV0::BranchCapRefresh(_) => { + vec![PermissionV0::RefreshReadCap, PermissionV0::RefreshWriteCap] + } + CommitBodyV0::CapRefreshed(_) => { + vec![PermissionV0::RefreshReadCap, PermissionV0::RefreshWriteCap] + } + CommitBodyV0::Delete(_) => vec![], + CommitBodyV0::AddRepo(_) + | CommitBodyV0::RemoveRepo(_) + | CommitBodyV0::AddLink(_) + | CommitBodyV0::RemoveLink(_) + | CommitBodyV0::AddSignerCap(_) + | CommitBodyV0::RemoveSignerCap(_) + | CommitBodyV0::AddInboxCap(_) + | CommitBodyV0::WalletUpdate(_) + | CommitBodyV0::StoreUpdate(_) => vec![], + }, + }; + HashSet::from_iter(res.iter().cloned()) + } +} + +impl fmt::Display for CommitHeader { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + CommitHeader::V0(v0) => { + writeln!( + f, + "v0 - compact:{} id:{}", + v0.compact, + v0.id.map_or("None".to_string(), |i| format!("{}", i)) + )?; + writeln!(f, "==== acks : {}", v0.acks.len())?; + for ack in &v0.acks { + writeln!(f, "============== {}", ack)?; + } + writeln!(f, "==== nacks : {}", v0.nacks.len())?; + for nack in &v0.nacks { + writeln!(f, "============== {}", nack)?; + } + writeln!(f, "==== deps : {}", v0.deps.len())?; + for dep in &v0.deps { + writeln!(f, "============== {}", dep)?; + } + writeln!(f, "==== ndeps : {}", v0.ndeps.len())?; + for ndep in &v0.ndeps { + writeln!(f, "============== {}", ndep)?; + } + writeln!(f, "==== files : {}", v0.files.len())?; + for file in &v0.files { + writeln!(f, "============== {}", file)?; + } + writeln!(f, "==== nfiles : {}", v0.nfiles.len())?; + for nfile in &v0.nfiles { + writeln!(f, "============== {}", nfile)?; + } + Ok(()) + } + } + } +} + +impl CommitHeader { + pub fn is_root(&self) -> bool { + match self { + CommitHeader::V0(v0) => v0.is_root(), + } + } + pub fn deps(&self) -> Vec { + match self { + CommitHeader::V0(v0) => v0.deps.clone(), + } + } + pub fn acks(&self) -> Vec { + match self { + CommitHeader::V0(v0) => v0.acks.clone(), + } + } + pub fn files(&self) -> &Vec { + match self { + CommitHeader::V0(v0) => &v0.files, + } + } + pub fn acks_and_nacks(&self) -> Vec { + match self { + CommitHeader::V0(v0) => { + let mut res = v0.acks.clone(); + res.extend_from_slice(&v0.nacks); + res + } + } + } + pub fn id(&self) -> &Option { + match self { + CommitHeader::V0(v0) => &v0.id, + } + } + + pub fn set_id(&mut self, id: Digest) { + match self { + CommitHeader::V0(v0) => v0.id = Some(id), + } + } + + pub fn set_compact(&mut self) { + match self { + CommitHeader::V0(v0) => v0.set_compact(), + } + } + + pub fn verify(&self) -> bool { + match self { + CommitHeader::V0(v0) => v0.verify(), + } + } + + pub fn new_with( + deps: Vec, + ndeps: Vec, + acks: Vec, + nacks: Vec, + files: Vec, + nfiles: Vec, + ) -> (Option, Option) { + let res = CommitHeaderV0::new_with(deps, ndeps, acks, nacks, files, nfiles); + ( + res.0.map(|h| CommitHeader::V0(h)), + res.1.map(|h| CommitHeaderKeys::V0(h)), + ) + } + + #[cfg(test)] + pub fn new_invalid() -> (Option, Option) { + let res = CommitHeaderV0::new_invalid(); + ( + res.0.map(|h| CommitHeader::V0(h)), + res.1.map(|h| CommitHeaderKeys::V0(h)), + ) + } + + #[cfg(test)] + pub fn new_with_deps(deps: Vec) -> Option { + CommitHeaderV0::new_with_deps(deps).map(|ch| CommitHeader::V0(ch)) + } + + #[cfg(test)] + pub fn new_with_deps_and_acks(deps: Vec, acks: Vec) -> Option { + CommitHeaderV0::new_with_deps_and_acks(deps, acks).map(|ch| CommitHeader::V0(ch)) + } + + #[cfg(test)] + pub fn new_with_acks(acks: Vec) -> Option { + CommitHeaderV0::new_with_acks(acks).map(|ch| CommitHeader::V0(ch)) + } +} + +impl CommitHeaderV0 { + #[allow(dead_code)] + fn new_empty() -> Self { + Self { + id: None, + compact: false, + deps: vec![], + ndeps: vec![], + acks: vec![], + nacks: vec![], + files: vec![], + nfiles: vec![], + } + } + + #[cfg(test)] + fn new_invalid() -> (Option, Option) { + let ideps: Vec = vec![ObjectId::dummy()]; + let kdeps: Vec = vec![ObjectKey::dummy()]; + + let res = Self { + id: None, + compact: false, + deps: ideps.clone(), + ndeps: ideps, + acks: vec![], + nacks: vec![], + files: vec![], + nfiles: vec![], + }; + ( + Some(res), + Some(CommitHeaderKeysV0 { + deps: kdeps, + acks: vec![], + nacks: vec![], + files: vec![], + }), + ) + } + + pub fn verify(&self) -> bool { + if !self.deps.is_empty() && !self.ndeps.is_empty() { + for ndep in self.ndeps.iter() { + if self.deps.contains(ndep) { + return false; + } + } + } + if !self.acks.is_empty() && !self.nacks.is_empty() { + for nack in self.nacks.iter() { + if self.acks.contains(nack) { + return false; + } + } + } + if !self.files.is_empty() && !self.nfiles.is_empty() { + for nref in self.nfiles.iter() { + if self.files.contains(nref) { + return false; + } + } + } + true + } + + pub fn set_compact(&mut self) { + self.compact = true; + } + + pub fn new_with( + deps: Vec, + ndeps: Vec, + acks: Vec, + nacks: Vec, + files: Vec, + nfiles: Vec, + ) -> (Option, Option) { + if deps.is_empty() + && ndeps.is_empty() + && acks.is_empty() + && nacks.is_empty() + && files.is_empty() + && nfiles.is_empty() + { + (None, None) + } else { + let mut ideps: Vec = vec![]; + let mut indeps: Vec = vec![]; + let mut iacks: Vec = vec![]; + let mut inacks: Vec = vec![]; + let mut ifiles: Vec = vec![]; + let mut infiles: Vec = vec![]; + + let mut kdeps: Vec = vec![]; + let mut kacks: Vec = vec![]; + let mut knacks: Vec = vec![]; + for d in deps { + ideps.push(d.id); + kdeps.push(d.key); + } + for d in ndeps { + indeps.push(d.id); + } + for d in acks { + iacks.push(d.id); + kacks.push(d.key); + } + for d in nacks { + inacks.push(d.id); + knacks.push(d.key); + } + for d in files.clone() { + ifiles.push(d.id); + } + for d in nfiles { + infiles.push(d.id); + } + let res = Self { + id: None, + compact: false, + deps: ideps, + ndeps: indeps, + acks: iacks, + nacks: inacks, + files: ifiles, + nfiles: infiles, + }; + if !res.verify() { + panic!("cannot create a header with conflicting references"); + } + ( + Some(res), + Some(CommitHeaderKeysV0 { + deps: kdeps, + acks: kacks, + nacks: knacks, + files, + }), + ) + } + } + + #[cfg(test)] + pub fn new_with_deps(deps: Vec) -> Option { + assert!(!deps.is_empty()); + let mut n = Self::new_empty(); + n.deps = deps; + Some(n) + } + + #[cfg(test)] + pub fn new_with_deps_and_acks(deps: Vec, acks: Vec) -> Option { + if deps.is_empty() && acks.is_empty() { + return None; + } + //assert!(!deps.is_empty() || !acks.is_empty()); + let mut n = Self::new_empty(); + n.deps = deps; + n.acks = acks; + Some(n) + } + + #[cfg(test)] + pub fn new_with_acks(acks: Vec) -> Option { + assert!(!acks.is_empty()); + let mut n = Self::new_empty(); + n.acks = acks; + Some(n) + } + + /// we do not check the deps because in a forked branch, they point to previous branch heads. + pub fn is_root(&self) -> bool { + //self.deps.is_empty() + // && self.ndeps.is_empty() + self.acks.is_empty() && self.nacks.is_empty() + } +} + +impl fmt::Display for Commit { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "====== Commit V0 ======")?; + if v0.id.is_some() { + writeln!(f, "== ID: {}", v0.id.as_ref().unwrap())?; + } + if v0.key.is_some() { + writeln!(f, "== Key: {}", v0.key.as_ref().unwrap())?; + } + if v0.header.is_some() { + write!(f, "== Header: {}", v0.header.as_ref().unwrap())?; + } + writeln!(f, "== Sig: {}", v0.sig)?; + write!(f, "{}", v0.content)?; + if v0.body.get().is_some() { + write!(f, "== Body: {}", v0.body.get().unwrap())?; + } + } + } + Ok(()) + } +} + +impl fmt::Display for CommitBody { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + write!(f, "V0 ")?; + match v0 { + // + // for root branch: + // + CommitBodyV0::Repository(b) => write!(f, "Repository {}", b), + CommitBodyV0::RootBranch(b) => write!(f, "RootBranch {}", b), + + CommitBodyV0::UpdateRootBranch(b) => write!(f, "UpdateRootBranch {}", b), // total order enforced with total_order_quorum + // CommitBodyV0::AddMember(b) => write!(f, "AddMember {}", b), // total order enforced with total_order_quorum + // CommitBodyV0::RemoveMember(b) => write!(f, "RemoveMember {}", b), // total order enforced with total_order_quorum + // CommitBodyV0::AddPermission(b) => write!(f, "AddPermission {}", b), + // CommitBodyV0::RemovePermission(b) => { + // write!(f, "RemovePermission {}", b) + // } + CommitBodyV0::AddBranch(b) => write!(f, "AddBranch {}", b), + // CommitBodyV0::RemoveBranch(b) => write!(f, "RemoveBranch {}", b), + // CommitBodyV0::AddName(b) => write!(f, "AddName {}", b), + // CommitBodyV0::RemoveName(b) => write!(f, "RemoveName {}", b), + // TODO? Quorum(Quorum) => write!(f, "RootBranch {}", b), // changes the quorum without changing the RootBranch + + // + // For transactional branches: + // + CommitBodyV0::Branch(b) => write!(f, "Branch {}", b), // singleton and should be first in branch + // CommitBodyV0::UpdateBranch(b) => write!(f, "UpdateBranch {}", b), // total order enforced with total_order_quorum + CommitBodyV0::Snapshot(b) => write!(f, "Snapshot {}", b), // a soft snapshot + // CommitBodyV0::AsyncTransaction(b) => write!(f, "AsyncTransaction {}", b), // partial_order + // CommitBodyV0::SyncTransaction(b) => write!(f, "SyncTransaction {}", b), // total_order + CommitBodyV0::AddFile(b) => write!(f, "AddFile {}", b), + // CommitBodyV0::RemoveFile(b) => write!(f, "RemoveFile {}", b), + // CommitBodyV0::Compact(b) => write!(f, "Compact {}", b), // a hard snapshot. total order enforced with total_order_quorum + //Merge(Merge) => write!(f, "RootBranch {}", b), + //Revert(Revert) => write!(f, "RootBranch {}", b), // only possible on partial order commit + CommitBodyV0::AsyncSignature(b) => write!(f, "AsyncSignature {}", b), + + // + // For both + // + // CommitBodyV0::RootCapRefresh(b) => write!(f, "RootCapRefresh {}", b), + // CommitBodyV0::BranchCapRefresh(b) => { + // write!(f, "BranchCapRefresh {}", b) + // } + CommitBodyV0::SyncSignature(b) => write!(f, "SyncSignature {}", b), + //CommitBodyV0::AddRepo(b) => write!(f, "AddRepo {}", b), + //CommitBodyV0::RemoveRepo(b) => write!(f, "RemoveRepo {}", b), + CommitBodyV0::AddSignerCap(b) => write!(f, "AddSignerCap {}", b), + CommitBodyV0::StoreUpdate(b) => write!(f, "StoreUpdate {}", b), + CommitBodyV0::AddInboxCap(b) => write!(f, "AddInboxCap {}", b), + + /* AddLink(AddLink), + RemoveLink(RemoveLink), + RemoveSignerCap(RemoveSignerCap), + WalletUpdate(WalletUpdate), + StoreUpdate(StoreUpdate), */ + _ => write!(f, "!!!! CommitBody Display not implemented for {:?}", v0.type_id()), + } + } + } + } +} + +impl fmt::Display for CommitContent { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "=== CommitContent V0 ===")?; + writeln!(f, "====== author: {}", v0.author)?; + //writeln!(f, "====== seq: {}", v0.seq)?; + writeln!(f, "====== BranchID: {}", v0.branch)?; + writeln!(f, "====== quorum: {:?}", v0.quorum)?; + writeln!(f, "====== Ref body: {}", v0.body)?; + if v0.header_keys.is_none() { + writeln!(f, "====== header keys: None")?; + } else { + write!(f, "{}", v0.header_keys.as_ref().unwrap())?; + } + writeln!(f, "====== Perms commits: {}", v0.perms.len())?; + let mut i = 0; + for block in &v0.perms { + writeln!(f, "========== {:03}: {}", i, block)?; + i += 1; + } + } + } + Ok(()) + } +} + +impl fmt::Display for CommitHeaderKeys { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "=== CommitHeaderKeys V0 ===")?; + writeln!(f, "==== acks : {}", v0.acks.len())?; + for ack in &v0.acks { + writeln!(f, "============== {}", ack)?; + } + writeln!(f, "==== nacks : {}", v0.nacks.len())?; + for nack in &v0.nacks { + writeln!(f, "============== {}", nack)?; + } + writeln!(f, "==== deps : {}", v0.deps.len())?; + for dep in &v0.deps { + writeln!(f, "============== {}", dep)?; + } + writeln!(f, "==== files : {}", v0.files.len())?; + for file in &v0.files { + writeln!(f, "============== {}", file)?; + } + } + } + Ok(()) + } +} + +#[cfg(test)] +mod test { + use crate::commit::*; + #[allow(unused_imports)] + use crate::log::*; + + fn test_commit_header_ref_content_fits( + obj_refs: Vec, + metadata_size: usize, + expect_blocks_len: usize, + ) { + let (priv_key, pub_key) = generate_keypair(); + let obj_ref = ObjectRef::dummy(); + + let branch = pub_key; + let deps = obj_refs.clone(); + let acks = obj_refs.clone(); + let files = obj_refs.clone(); + let body_ref = obj_ref.clone(); + let overlay = OverlayId::dummy(); + + let metadata = vec![66; metadata_size]; + + let mut commit = Commit::new( + &priv_key, + &pub_key, + overlay, + branch, + QuorumType::NoSigning, + deps, + vec![], + acks.clone(), + vec![], + files, + vec![], + metadata, + body_ref, + ) + .unwrap(); + + log_debug!("{}", commit); + + let max_object_size = 0; + + let store = Store::dummy_public_v0(); + + let commit_ref = commit.save(max_object_size, &store).expect("save commit"); + + let commit_object = + Object::load(commit_ref.id.clone(), Some(commit_ref.key.clone()), &store) + .expect("load object from storage"); + + assert_eq!( + commit_object.acks(), + acks.iter().map(|a| a.id).collect::>() + ); + + log_debug!("{}", commit_object); + + // log_debug!("blocks: {}", commit_object.blocks_len()); + // log_debug!("header blocks: {}", commit_object.header_blocks_len()); + // log_debug!("object size: {}", commit_object.size()); + + assert_eq!(commit_object.all_blocks_len(), expect_blocks_len); + + let commit = Commit::load(commit_ref, &store, false).expect("load commit from storage"); + + log_debug!("{}", commit); + } + + #[test] + pub fn test_commit_header_ref_content_fits_or_not() { + let obj_ref = ObjectRef::dummy(); + let obj_refs2 = vec![obj_ref.clone(), obj_ref.clone()]; + let obj_refs = vec![obj_ref.clone()]; + // with 1 refs in header + test_commit_header_ref_content_fits(obj_refs.clone(), 3592, 1); // block 4090 + test_commit_header_ref_content_fits(obj_refs.clone(), 3593, 2); //block 4012 header 117 total: 4129 + test_commit_header_ref_content_fits(obj_refs.clone(), 3741, 2); //block 4094 block 219 total: 4313 + test_commit_header_ref_content_fits(obj_refs.clone(), 3742, 3); // block 4094 block 9 block 285 + + // with 2 refs in header + test_commit_header_ref_content_fits(obj_refs2.clone(), 3360, 1); + test_commit_header_ref_content_fits(obj_refs2.clone(), 3361, 2); + test_commit_header_ref_content_fits(obj_refs2.clone(), 3609, 2); + test_commit_header_ref_content_fits(obj_refs2.clone(), 3610, 3); + } + + #[test] + pub fn test_load_commit_fails_on_non_commit_object() { + let file = SmallFile::V0(SmallFileV0 { + content_type: "file/test".into(), + metadata: Vec::from("some meta data here"), + content: [(0..255).collect::>().as_slice(); 320].concat(), + }); + let content = ObjectContent::V0(ObjectContentV0::SmallFile(file)); + + let max_object_size = 0; + + let store = Store::dummy_public_v0(); + + let obj = Object::new(content.clone(), None, max_object_size, &store); + + _ = obj.save(&store).expect("save object"); + + let commit = Commit::load(obj.reference().unwrap(), &store, false); + + assert_eq!(commit, Err(CommitLoadError::NotACommit)); + } + + #[test] + pub fn test_load_commit_with_body() { + let (priv_key, pub_key) = generate_keypair(); + let obj_ref = ObjectRef::dummy(); + + let branch = pub_key; + let obj_refs = vec![obj_ref.clone()]; + let deps = obj_refs.clone(); + let acks = obj_refs.clone(); + let files = obj_refs.clone(); + + let metadata = Vec::from("some metadata"); + + let body = CommitBody::V0(CommitBodyV0::Repository(Repository::new(&branch))); + + let max_object_size = 0; + + let store = Store::dummy_public_v0(); + + let mut commit = Commit::new_with_body_and_save( + &priv_key, + &pub_key, + branch, + QuorumType::NoSigning, + deps, + vec![], + acks.clone(), + vec![], + files, + vec![], + metadata, + body, + max_object_size, + &store, + ) + .expect("commit::new_with_body_and_save"); + + log_debug!("{}", commit); + + commit.empty_blocks(); + + let commit2 = Commit::load(commit.reference().unwrap(), &store, true) + .expect("load commit with body after save"); + + log_debug!("{}", commit2); + + assert_eq!(commit, commit2); + } + + #[test] + pub fn test_commit_load_body_fails() { + let (priv_key, pub_key) = generate_keypair(); + let obj_ref = ObjectRef::dummy(); + let obj_refs = vec![obj_ref.clone()]; + let branch = pub_key; + let deps = obj_refs.clone(); + let acks = obj_refs.clone(); + let files = obj_refs.clone(); + let metadata = vec![1, 2, 3]; + let body_ref = obj_ref.clone(); + let store = Store::dummy_public_v0(); + + let commit = Commit::new( + &priv_key, + &pub_key, + store.overlay_id, + branch, + QuorumType::NoSigning, + deps, + vec![], + acks, + vec![], + files, + vec![], + metadata, + body_ref, + ) + .unwrap(); + log_debug!("{}", commit); + + let repo = Repo::new_with_member(&pub_key, &pub_key, &[PermissionV0::Create], store); + + // match commit.load_body(repo.store.unwrap()) { + // Ok(_b) => panic!("Body should not exist"), + // Err(CommitLoadError::MissingBlocks(missing)) => { + // assert_eq!(missing.len(), 1); + // } + // Err(e) => panic!("Commit load error: {:?}", e), + // } + + commit.verify_sig(&repo).expect("verify signature"); + match commit.verify_perm(&repo) { + Ok(_) => panic!("Commit should not be Ok"), + Err(NgError::CommitLoadError(CommitLoadError::MissingBlocks(missing))) => { + assert_eq!(missing.len(), 1); + } + Err(e) => panic!("Commit verify perm error: {:?}", e), + } + + // match commit.verify_full_object_refs_of_branch_at_commit(repo.store.unwrap()) { + // Ok(_) => panic!("Commit should not be Ok"), + // Err(CommitLoadError::MissingBlocks(missing)) => { + // assert_eq!(missing.len(), 1); + // } + // Err(e) => panic!("Commit verify error: {:?}", e), + // } + + match commit.verify(&repo) { + Ok(_) => panic!("Commit should not be Ok"), + Err(NgError::CommitLoadError(CommitLoadError::MissingBlocks(missing))) => { + assert_eq!(missing.len(), 1); + } + Err(e) => panic!("Commit verify error: {:?}", e), + } + } + + #[test] + pub fn test_load_commit_with_body_verify_perms() { + let (priv_key, pub_key) = generate_keypair(); + + let branch = pub_key; + + let metadata = Vec::from("some metadata"); + + let body = CommitBody::V0(CommitBodyV0::Repository(Repository::new(&branch))); + + let max_object_size = 0; + + let store = Store::dummy_public_v0(); + + let commit = Commit::new_with_body_and_save( + &priv_key, + &pub_key, + branch, + QuorumType::NoSigning, + vec![], + vec![], + vec![], //acks.clone(), + vec![], + vec![], + vec![], + metadata, + body, + max_object_size, + &store, + ) + .expect("commit::new_with_body_and_save"); + + log_debug!("{}", commit); + + let repo = Repo::new_with_member(&pub_key, &pub_key, &[PermissionV0::Create], store); + + commit.load_body(&repo.store).expect("load body"); + + commit.verify_sig(&repo).expect("verify signature"); + commit.verify_perm(&repo).expect("verify perms"); + commit + .verify_perm_creation(None) + .expect("verify_perm_creation"); + + commit + .verify_full_object_refs_of_branch_at_commit(&repo.store) + .expect("verify is at root of branch and singleton"); + + commit.verify(&repo).expect("verify"); + } + + #[test] + pub fn test_load_commit_with_invalid_header() { + let (priv_key, pub_key) = generate_keypair(); + let obj_ref = ObjectRef::dummy(); + + let branch = pub_key; + let metadata = Vec::from("some metadata"); + + //let max_object_size = 0; + //let store = Store::dummy_public_v0(); + + let commit = Commit::V0( + CommitV0::new_with_invalid_header( + &priv_key, + &pub_key, + branch, + QuorumType::NoSigning, + metadata, + obj_ref, + ) + .expect("commit::new_with_invalid_header"), + ); + + log_debug!("{}", commit); + + let store = Store::dummy_public_v0(); + let repo = Repo::new_with_perms(&[PermissionV0::Create], store); + + assert_eq!( + commit.verify(&repo), + Err(NgError::CommitVerifyError(CommitVerifyError::InvalidHeader)) + ); + } +} diff --git a/ng-repo/src/errors.rs b/ng-repo/src/errors.rs new file mode 100644 index 0000000..a1b286f --- /dev/null +++ b/ng-repo/src/errors.rs @@ -0,0 +1,602 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Errors + +use core::fmt; +use std::error::Error; + +use num_enum::IntoPrimitive; +use num_enum::TryFromPrimitive; + +pub use crate::commit::{CommitLoadError, CommitVerifyError}; +use crate::file::FileError; +use crate::log::*; +use crate::object::Object; +use crate::types::BlockId; + +#[derive(Debug, Eq, PartialEq, Clone)] +#[repr(u16)] +pub enum NgError { + InvalidSignature, + IncompleteSignature, + SerializationError, + EncryptionError, + DecryptionError, + InvalidValue, + ConnectionNotFound, + InvalidKey, + InvalidInvitation, + InvalidCreateAccount, + InvalidFileFormat, + InvalidArgument, + PermissionDenied, + InvalidPazzle, + InvalidMnemonic, + CommitLoadError(CommitLoadError), + ObjectParseError(ObjectParseError), + StorageError(StorageError), + NotFound, + JsStorageKeyNotFound, + IoError, + CommitVerifyError(CommitVerifyError), + LocalBrokerNotInitialized, + JsStorageReadError, + JsStorageWriteError(String), + CannotSaveWhenInMemoryConfig, + WalletNotFound, + WalletAlreadyAdded, + WalletAlreadyOpened, + WalletError(String), + BrokerError, + SessionNotFound, + SessionAlreadyStarted, + RepoNotFound, + BranchNotFound, + StoreNotFound, + UserNotFound, + TopicNotFound, + InboxNotFound, + CommitNotFound, + NotConnected, + ActorError, + ProtocolError(ProtocolError), + ServerError(ServerError), + InvalidResponse, + BootstrapError(String), + NotAServerError, + VerifierError(VerifierError), + SiteNotFoundOnBroker, + BrokerConfigErrorStr(&'static str), + BrokerConfigError(String), + MalformedEvent, + InvalidPayload, + WrongUploadId, + FileError(FileError), + InternalError, + OxiGraphError(String), + ConfigError(String), + LocalBrokerIsHeadless, + LocalBrokerIsNotHeadless, + InvalidNuri, + InvalidTarget, + InvalidQrCode, + NotImplemented, + NotARendezVous, + IncompatibleQrCode, + InvalidClass, + KeyShareNotFound, + BrokerNotFound, + SparqlError(String), + ContactNotFound, + SocialQueryAlreadyStarted, +} + +impl Error for NgError {} + +impl fmt::Display for NgError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::WalletError(string) => write!(f, "WalletError:{}", string), + Self::JsStorageWriteError(string) => write!(f, "JsStorageWriteError:{}", string), + Self::CommitVerifyError(commit_verify_error) => { + write!(f, "CommitVerifyError:{:?}", commit_verify_error) + } + Self::ProtocolError(error) => write!(f, "ProtocolError:{:?}", error), + Self::ServerError(error) => write!(f, "ServerError:{:?}", error), + Self::VerifierError(error) => write!(f, "VerifierError:{:?}", error), + Self::CommitLoadError(commit_load_error) => { + write!(f, "CommitLoadError:{:?}", commit_load_error) + } + Self::BootstrapError(error) => { + write!(f, "BootstrapError:{:?}", error) + } + Self::ObjectParseError(error) => write!(f, "ObjectParseError:{:?}", error), + Self::StorageError(storage_error) => write!(f, "StorageError:{:?}", storage_error), + Self::BrokerConfigErrorStr(s) => write!(f, "BrokerConfigError:{s}"), + Self::BrokerConfigError(s) => write!(f, "BrokerConfigError:{s}"), + _ => write!(f, "{:?}", self), + } + } +} + +impl From for std::io::Error { + fn from(err: NgError) -> std::io::Error { + match err { + NgError::InvalidArgument => std::io::Error::from(std::io::ErrorKind::InvalidInput), + NgError::PermissionDenied => std::io::Error::from(std::io::ErrorKind::PermissionDenied), + NgError::NotFound => std::io::Error::from(std::io::ErrorKind::NotFound), + _ => std::io::Error::new(std::io::ErrorKind::Other, err.to_string().as_str()), + } + } +} + +impl From for NgError { + fn from(_e: serde_bare::error::Error) -> Self { + NgError::SerializationError + } +} + +impl From for NgError { + fn from(_e: ed25519_dalek::ed25519::Error) -> Self { + NgError::InvalidSignature + } +} + +impl From for NgError { + fn from(e: CommitLoadError) -> Self { + NgError::CommitLoadError(e) + } +} + +impl From for NgError { + fn from(e: ObjectParseError) -> Self { + NgError::ObjectParseError(e) + } +} + +impl From for NgError { + fn from(e: FileError) -> Self { + NgError::FileError(e) + } +} + +impl From for NgError { + fn from(e: CommitVerifyError) -> Self { + NgError::CommitVerifyError(e) + } +} + +impl From for NgError { + fn from(e: StorageError) -> Self { + NgError::StorageError(e) + } +} + +impl From for NgError { + fn from(e: VerifierError) -> Self { + match e { + VerifierError::InvalidKey => NgError::InvalidKey, + VerifierError::SerializationError => NgError::SerializationError, + VerifierError::CommitLoadError(e) => NgError::CommitLoadError(e), + VerifierError::StorageError(e) => NgError::StorageError(e), + VerifierError::ObjectParseError(e) => NgError::ObjectParseError(e), + VerifierError::TopicNotFound => NgError::TopicNotFound, + VerifierError::RepoNotFound => NgError::RepoNotFound, + VerifierError::StoreNotFound => NgError::StoreNotFound, + VerifierError::BranchNotFound => NgError::BranchNotFound, + VerifierError::SparqlError(s) => NgError::SparqlError(s), + VerifierError::InternalError => NgError::InternalError, + _ => NgError::VerifierError(e), + } + } +} + +/// Object parsing errors +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum ObjectParseError { + /// Missing blocks + MissingBlocks(Vec), + /// Missing root key + MissingRootKey, + /// Invalid BlockId encountered in the tree + InvalidBlockId, + /// Too many or too few children of a block + InvalidChildren, + /// Number of keys does not match number of children of a block + InvalidKeys, + /// Invalid CommitHeader object content + InvalidHeader, + /// Error deserializing content of a block + BlockDeserializeError, + /// Error deserializing content of the object + ObjectDeserializeError, + + MissingHeaderBlocks((Object, Vec)), + + MalformedDag, + FilterDeserializationError, +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum StorageError { + NotFound, + InvalidValue, + DifferentValue, + BackendError, + SerializationError, + AlreadyExists, + DataCorruption, + UnknownColumnFamily, + PropertyNotFound, + NotAStoreRepo, + OverlayBranchNotFound, + Abort, + NotEmpty, + ServerAlreadyRunningInOtherProcess, + NgError(String), + NoDiscreteState, +} + +impl core::fmt::Display for StorageError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{:?}", self) + } +} + +impl From for StorageError { + fn from(_e: serde_bare::error::Error) -> Self { + StorageError::SerializationError + } +} + +impl From for StorageError { + fn from(e: NgError) -> Self { + StorageError::NgError(e.to_string()) + } +} + +#[derive(Debug, Eq, PartialEq, TryFromPrimitive, IntoPrimitive, Clone)] +#[repr(u16)] +pub enum ServerError { + Ok = 0, + PartialContent, + EndOfStream, + False, + SequenceMismatch, + FileError, + RepoAlreadyOpened, + NotFound, + EmptyStream, + StorageError, + InvalidRequest, + InvalidSignature, + OtherError, + OverlayMismatch, + OverlayNotFound, + TopicNotFound, + AccessDenied, + InvalidHeader, + MalformedBranch, + BrokerError, + ProtocolError, + PeerAlreadySubscribed, + SubscriptionNotFound, + SessionNotFound, + SessionDetached, + OxiGraphError, + InvalidNuri, + InvalidTarget, + ExportWalletTimeOut, + NetError, +} + +impl From for ServerError { + fn from(e: StorageError) -> Self { + match e { + StorageError::NotFound => ServerError::NotFound, + _ => ServerError::StorageError, + } + } +} + +impl From for ServerError { + fn from(e: NetError) -> Self { + match e { + _ => ServerError::NetError, + } + } +} + +impl From for ServerError { + fn from(e: ProtocolError) -> Self { + match e { + ProtocolError::NotFound => ServerError::NotFound, + ProtocolError::BrokerError => ServerError::BrokerError, + _ => { + log_err!("{:?}", e); + ServerError::ProtocolError + } + } + } +} + +impl From for ServerError { + fn from(e: NgError) -> Self { + match e { + NgError::InvalidSignature => ServerError::InvalidSignature, + NgError::OxiGraphError(_) => ServerError::OxiGraphError, + NgError::InvalidNuri => ServerError::InvalidNuri, + NgError::InvalidTarget => ServerError::InvalidTarget, + + _ => ServerError::OtherError, + } + } +} + +impl ServerError { + pub fn is_stream(&self) -> bool { + *self == ServerError::PartialContent || *self == ServerError::EndOfStream + } + pub fn is_err(&self) -> bool { + *self != ServerError::Ok && !self.is_stream() + } +} + +#[derive(Debug, Eq, PartialEq, Clone)] +pub enum VerifierError { + MalformedDag, + MissingCommitInDag, + CommitBodyNotFound, + InvalidKey, + SerializationError, + OtherError(String), + CommitLoadError(CommitLoadError), + InvalidRepositoryCommit, + MissingRepoWriteCapSecret, + StorageError(StorageError), + ObjectParseError(ObjectParseError), + NotImplemented, + InvalidSignatureObject, + MalformedSyncSignatureAcks, + MalformedSyncSignatureDeps, + TopicNotFound, + RepoNotFound, + StoreNotFound, + OverlayNotFound, + BranchNotFound, + InvalidBranch, + NoBlockStorageAvailable, + RootBranchNotFound, + BranchNotOpened, + DoubleBranchSubscription, + InvalidCommit, + LocallyConnected, + InvalidTriple, + InvalidNamedGraph, + OxigraphError(String), + CannotRemoveTriplesWhenNewBranch, + PermissionDenied, + YrsError(String), + AutomergeError(String), + InvalidNuri, + InvalidJson, + NothingToSign, + InvalidSocialQuery, + InvalidResponse, + SparqlError(String), + InboxError(String), + QrCode(String), + InvalidProfile, + ContactAlreadyExists, + InternalError, + InvalidInboxPost, +} + +impl Error for VerifierError {} + +impl core::fmt::Display for VerifierError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{:?}", self) + } +} + +impl From for VerifierError { + fn from(_e: serde_bare::error::Error) -> Self { + VerifierError::SerializationError + } +} + +impl From for VerifierError { + fn from(e: NgError) -> Self { + match e { + NgError::InvalidKey => VerifierError::InvalidKey, + NgError::RepoNotFound => VerifierError::RepoNotFound, + NgError::BranchNotFound => VerifierError::BranchNotFound, + NgError::SerializationError => VerifierError::SerializationError, + NgError::PermissionDenied => VerifierError::PermissionDenied, + NgError::VerifierError(e) => e, + // NgError::JsStorageReadError + // NgError::JsStorageWriteError(String) + // NgError::JsStorageKeyNotFound + // NgError::InvalidFileFormat + _ => VerifierError::OtherError(e.to_string()), + } + } +} + +impl From for VerifierError { + fn from(e: CommitLoadError) -> Self { + VerifierError::CommitLoadError(e) + } +} + +impl From for VerifierError { + fn from(e: ObjectParseError) -> Self { + VerifierError::ObjectParseError(e) + } +} + +impl From for VerifierError { + fn from(e: StorageError) -> Self { + VerifierError::StorageError(e) + } +} + +#[derive(Debug, Eq, PartialEq, TryFromPrimitive, IntoPrimitive, Clone)] +#[repr(u16)] +pub enum NetError { + DirectionAlreadySet = 1, + WsError, + IoError, + ConnectionError, + SerializationError, + ProtocolError, + AccessDenied, + InternalError, + PeerAlreadyConnected, + Closing, +} //MAX 50 NetErrors + +impl Error for NetError {} + +impl fmt::Display for NetError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?}", self) + } +} + +#[derive(Debug, Eq, PartialEq, TryFromPrimitive, IntoPrimitive, Clone)] +#[repr(u16)] +pub enum ProtocolError { + NoError = 0, + + IoError, + WsError, + ActorError, + InvalidState, + InvalidSignature, + SerializationError, + AccessDenied, + InvitationRequired, + BrokerError, + NoLocalBrokerFound, + NotFound, + MissingBlocks, + ObjectParseError, + InvalidValue, + AlreadyExists, + RepoIdRequired, + InvalidPublisherAdvert, + + ConnectionError, + Timeout, + Expired, + + PeerAlreadyConnected, + UserNotConnected, + PeerNotConnected, + OtherError, + NetError, + StorageError, + ServerError, + Closing, + FsmNotReady, + MustBeEncrypted, + NoiseHandshakeFailed, + DecryptionError, + EncryptionError, + WhereIsTheMagic, + + InvalidNonce, + InvalidMessage, +} //MAX 949 ProtocolErrors + +impl From for ProtocolError { + fn from(e: NetError) -> Self { + match e { + NetError::IoError => ProtocolError::IoError, + NetError::WsError => ProtocolError::WsError, + NetError::ConnectionError => ProtocolError::ConnectionError, + NetError::SerializationError => ProtocolError::SerializationError, + NetError::ProtocolError => ProtocolError::OtherError, + NetError::AccessDenied => ProtocolError::AccessDenied, + NetError::PeerAlreadyConnected => ProtocolError::PeerAlreadyConnected, + NetError::Closing => ProtocolError::Closing, + _ => ProtocolError::NetError, + } + } +} + +impl From for ProtocolError { + fn from(e: StorageError) -> Self { + match e { + StorageError::NotFound => ProtocolError::NotFound, + StorageError::InvalidValue => ProtocolError::InvalidValue, + StorageError::BackendError => ProtocolError::StorageError, + StorageError::SerializationError => ProtocolError::SerializationError, + StorageError::AlreadyExists => ProtocolError::AlreadyExists, + _ => ProtocolError::StorageError, + } + } +} + +impl From for NgError { + fn from(e: ProtocolError) -> Self { + NgError::ProtocolError(e) + } +} + +impl From for NgError { + fn from(e: ServerError) -> Self { + NgError::ServerError(e) + } +} + +impl ProtocolError { + pub fn is_err(&self) -> bool { + *self != ProtocolError::NoError + } +} + +impl Error for ProtocolError {} + +impl fmt::Display for ProtocolError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?}", self) + } +} + +impl From for ProtocolError { + fn from(e: NgError) -> Self { + match e { + NgError::InvalidSignature => ProtocolError::InvalidSignature, + NgError::SerializationError => ProtocolError::SerializationError, + _ => ProtocolError::OtherError, + } + } +} + +impl From for ProtocolError { + fn from(_e: ObjectParseError) -> Self { + ProtocolError::ObjectParseError + } +} + +impl From for ProtocolError { + fn from(_e: serde_bare::error::Error) -> Self { + ProtocolError::SerializationError + } +} + +impl From for NetError { + fn from(_e: serde_bare::error::Error) -> Self { + NetError::SerializationError + } +} diff --git a/ng-repo/src/event.rs b/ng-repo/src/event.rs new file mode 100644 index 0000000..32b6b79 --- /dev/null +++ b/ng-repo/src/event.rs @@ -0,0 +1,319 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Event, a message sent in the PUB/SUB + +use core::fmt; +use std::sync::Arc; + +use chacha20::cipher::{KeyIvInit, StreamCipher}; +use chacha20::ChaCha20; +use zeroize::Zeroize; + +use crate::errors::*; +use crate::repo::{BranchInfo, Repo}; +use crate::store::Store; +use crate::types::*; +use crate::utils::*; + +impl fmt::Display for Event { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0")?; + writeln!(f, "topic_sig: {}", v0.topic_sig)?; + writeln!(f, "peer_sig: {}", v0.peer_sig)?; + write!(f, "content: {}", v0.content)?; + Ok(()) + } + } + } +} + +impl fmt::Display for EventContentV0 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "V0")?; + writeln!(f, "topic: {}", self.topic)?; + writeln!(f, "publisher: {}", self.publisher)?; + writeln!(f, "seq: {}", self.seq)?; + writeln!(f, "blocks: {}", self.blocks.len())?; + let mut i = 0; + for block in &self.blocks { + writeln!(f, "========== {:03}: {}", i, block.id())?; + i += 1; + } + writeln!(f, "file ids: {}", self.file_ids.len())?; + let mut i = 0; + for file in &self.file_ids { + writeln!(f, "========== {:03}: {}", i, file)?; + i += 1; + } + writeln!(f, "key: {:?}", self.key)?; + Ok(()) + } +} + +impl Event { + pub fn new( + publisher: &PrivKey, + seq: u64, + commit: &Commit, + additional_blocks: &Vec, + repo: &Repo, + ) -> Result { + Ok(Event::V0(EventV0::new( + publisher, + seq, + commit, + additional_blocks, + repo, + )?)) + } + + pub fn seq_num(&self) -> u64 { + match self { + Event::V0(v0) => v0.content.seq, + } + } + + pub fn topic_id(&self) -> &TopicId { + match self { + Event::V0(v0) => &v0.content.topic, + } + } + + pub fn file_ids(&self) -> &Vec { + match self { + Event::V0(v0) => &v0.content.file_ids, + } + } + + pub fn publisher(&self) -> &PeerId { + match self { + Event::V0(v0) => &v0.content.publisher, + } + } + + pub fn verify(&self) -> Result<(), NgError> { + match self { + Event::V0(v0) => v0.verify(), + } + } + + /// opens an event with the key derived from information kept in Repo. + /// + /// returns the Commit object and optional list of additional block IDs. + /// Those blocks have been added to the storage of store of repo so they can be retrieved. + pub fn open_with_info(&self, repo: &Repo, branch: &BranchInfo) -> Result { + match self { + Self::V0(v0) => v0.open_with_info(repo, branch), + } + } + + pub fn commit_id(&self) -> ObjectId { + match self { + Self::V0(v0) => v0.content.blocks[0].id(), + } + } + + pub fn open( + &self, + store: &Store, + repo_id: &RepoId, + branch_id: &BranchId, + branch_secret: &ReadCapSecret, + ) -> Result { + match self { + Self::V0(v0) => v0.open(store, repo_id, branch_id, branch_secret, true), + } + } + + pub fn open_with_body( + &self, + store: &Store, + repo_id: &RepoId, + branch_id: &BranchId, + branch_secret: &ReadCapSecret, + with_body: bool, + ) -> Result { + match self { + Self::V0(v0) => v0.open(store, repo_id, branch_id, branch_secret, with_body), + } + } + + // pub fn put_blocks<'a>( + // &self, + // overlay: &OverlayId, + // storage: &RwLockWriteGuard<'a, dyn BlockStorage + Send + Sync + 'static>, + // ) -> Result { + // match self { + // Self::V0(v0) => v0.put_blocks(overlay, storage), + // } + // } +} + +impl EventV0 { + pub fn verify(&self) -> Result<(), NgError> { + let content_ser = serde_bare::to_vec(&self.content)?; + verify(&content_ser, self.topic_sig, self.content.topic)?; + match self.content.publisher { + PeerId::Forwarded(peer_id) => verify(&content_ser, self.peer_sig, peer_id)?, + PeerId::ForwardedObfuscated(_) => { + panic!("cannot verify an Event with obfuscated publisher") + } + PeerId::Direct(_) => panic!("direct events are not supported"), + } + if self.content.blocks.len() < 2 { + // an event is always containing a commit, which always has at least 2 blocks (one for the commit content, and one for the commit body) + return Err(NgError::MalformedEvent); + } + Ok(()) + } + + pub fn derive_key( + repo_id: &RepoId, + branch_id: &BranchId, + branch_secret: &ReadCapSecret, + publisher: &PubKey, + ) -> [u8; blake3::OUT_LEN] { + let mut key_material = match (*repo_id, *branch_id, branch_secret.clone(), *publisher) { + ( + PubKey::Ed25519PubKey(repo), + PubKey::Ed25519PubKey(branch), + SymKey::ChaCha20Key(branch_sec), + PubKey::Ed25519PubKey(publ), + ) => [repo, branch, branch_sec, publ].concat(), + (_, _, _, _) => panic!("cannot derive key with Montgomery key"), + }; + let res = blake3::derive_key( + "NextGraph Event Commit ObjectKey ChaCha20 key", + key_material.as_slice(), + ); + key_material.zeroize(); + res + } + + pub fn new( + publisher: &PrivKey, + seq: u64, + commit: &Commit, + additional_blocks: &Vec, + repo: &Repo, + ) -> Result { + let branch_id = commit.branch(); + let repo_id = repo.id; + let store = Arc::clone(&repo.store); + let branch = repo.branch(branch_id)?; + let topic_id = &branch.topic.unwrap(); + let topic_priv_key = branch + .topic_priv_key + .as_ref() + .ok_or(NgError::PermissionDenied)?; + let publisher_pubkey = publisher.to_pub(); + let key = Self::derive_key( + &repo_id, + branch_id, + &branch.read_cap.as_ref().unwrap().key, + &publisher_pubkey, + ); + let commit_key = commit.key().unwrap(); + let mut encrypted_commit_key = Vec::from(commit_key.slice()); + let mut nonce = seq.to_le_bytes().to_vec(); + nonce.append(&mut vec![0; 4]); + let mut cipher = ChaCha20::new((&key).into(), (nonce.as_slice()).into()); + cipher.apply_keystream(encrypted_commit_key.as_mut_slice()); + + let mut blocks = vec![]; + for bid in commit.blocks().iter() { + blocks.push(store.get(bid)?); + } + for bid in additional_blocks.iter() { + blocks.push(store.get(bid)?); + } + let event_content = EventContentV0 { + topic: *topic_id, + publisher: PeerId::Forwarded(publisher_pubkey), + seq, + blocks, + file_ids: commit + .header() + .as_ref() + .map_or_else(|| vec![], |h| h.files().to_vec()), + key: encrypted_commit_key, + }; + let event_content_ser = serde_bare::to_vec(&event_content).unwrap(); + let topic_sig = sign(topic_priv_key, topic_id, &event_content_ser)?; + let peer_sig = sign(publisher, &publisher_pubkey, &event_content_ser)?; + Ok(EventV0 { + content: event_content, + topic_sig, + peer_sig, + }) + } + + // pub fn put_blocks<'a>( + // &self, + // overlay: &OverlayId, + // storage: &RwLockWriteGuard<'a, dyn BlockStorage + Send + Sync + 'static>, + // ) -> Result { + // let mut first_id = None; + // for block in &self.content.blocks { + // let id = storage.put(overlay, block)?; + // if first_id.is_none() { + // first_id = Some(id) + // } + // } + // first_id.ok_or(NgError::CommitLoadError(CommitLoadError::NotACommit)) + // } + + /// opens an event with the key derived from information kept in Repo. + /// + /// returns the Commit object and optional list of additional block IDs. + /// Those blocks have been added to the storage of store of repo so they can be retrieved. + pub fn open_with_info(&self, repo: &Repo, branch: &BranchInfo) -> Result { + self.open( + &repo.store, + &repo.id, + &branch.id, + &branch.read_cap.as_ref().unwrap().key, + true, + ) + } + + pub fn open( + &self, + store: &Store, + repo_id: &RepoId, + branch_id: &BranchId, + branch_secret: &ReadCapSecret, + with_body: bool, + ) -> Result { + // verifying event signatures + self.verify()?; + + let publisher_pubkey = self.content.publisher.get_pub_key(); + let key = Self::derive_key(repo_id, branch_id, branch_secret, &publisher_pubkey); + let mut encrypted_commit_key = self.content.key.clone(); + let mut nonce = self.content.seq.to_le_bytes().to_vec(); + nonce.append(&mut vec![0; 4]); + let mut cipher = ChaCha20::new((&key).into(), (nonce.as_slice()).into()); + cipher.apply_keystream(encrypted_commit_key.as_mut_slice()); + + let commit_key: SymKey = encrypted_commit_key.as_slice().try_into()?; + + let mut first_id = None; + for block in &self.content.blocks { + let id = store.put(block)?; + if first_id.is_none() { + first_id = Some(id) + } + } + let commit_ref = ObjectRef::from_id_key(first_id.unwrap(), commit_key); + Ok(Commit::load(commit_ref, &store, with_body)?) + } +} diff --git a/ng-repo/src/file.rs b/ng-repo/src/file.rs new file mode 100644 index 0000000..f7d3ff4 --- /dev/null +++ b/ng-repo/src/file.rs @@ -0,0 +1,1607 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! SmallFile and RandomAccessFile objects + +use core::fmt; +use std::cmp::min; +use std::collections::HashMap; +use std::sync::Arc; + +use chacha20::cipher::{KeyIvInit, StreamCipher}; +use chacha20::ChaCha20; +use zeroize::Zeroize; + +use crate::block_storage::*; +use crate::errors::*; +#[allow(unused_imports)] +use crate::log::*; +use crate::object::*; +use crate::store::Store; +use crate::types::*; + +/// File errors +#[derive(Debug, Eq, PartialEq, Clone)] +pub enum FileError { + /// Missing blocks + MissingBlocks(Vec), + /// Missing root key + MissingRootKey, + /// Invalid BlockId encountered in the tree + InvalidBlockId, + /// Too many or too few children of a block + InvalidChildren, + /// Number of keys does not match number of children of a block + InvalidKeys, + /// Invalid CommitHeader object content + InvalidHeader, + /// Error deserializing content of a block + BlockDeserializeError, + /// Error deserializing content of the RandomAccessFileMeta + MetaDeserializeError, + /// Files are immutable, you cannot modify them and this one was already saved once. Create a new File for your new data (and delete the old one if needed) + AlreadySaved, + /// File is too big + TooBig, + NotFound, + StorageError, + EndOfFile, + InvalidArgument, + NotAFile, +} + +impl From for FileError { + fn from(e: StorageError) -> Self { + match e { + StorageError::NotFound => FileError::NotFound, + _ => FileError::StorageError, + } + } +} + +impl From for FileError { + fn from(e: ObjectParseError) -> Self { + match e { + _ => FileError::BlockDeserializeError, + } + } +} + +pub trait ReadFile { + fn read(&self, pos: usize, size: usize) -> Result, FileError>; + + fn get_all_blocks_ids(&self) -> Result, FileError>; +} + +/// A File in memory (read access only) +pub struct File<'a> { + internal: Box, + blocks_ids: Vec, +} + +impl<'a> File<'a> { + pub fn open(id: ObjectId, key: SymKey, store: Arc) -> Result, FileError> { + let root_block = store.get(&id)?; + + if root_block.children().len() == 2 + && *root_block.content().commit_header_obj() == CommitHeaderObject::RandomAccess + { + Ok(File { + internal: Box::new(RandomAccessFile::open(id, key, store)?), + blocks_ids: vec![], + }) + } else { + let obj = Object::load(id, Some(key), &store)?; + match obj.content_v0()? { + ObjectContentV0::SmallFile(small_file) => Ok(File { + internal: Box::new(small_file), + blocks_ids: obj.block_ids(), + }), + _ => Err(FileError::NotAFile), + } + } + } +} + +impl<'a> ReadFile for File<'a> { + fn read(&self, pos: usize, size: usize) -> Result, FileError> { + self.internal.read(pos, size) + } + fn get_all_blocks_ids(&self) -> Result, FileError> { + if self.blocks_ids.len() > 0 { + Ok(self.blocks_ids.to_vec()) + } else { + self.internal.get_all_blocks_ids() + } + } +} + +impl ReadFile for SmallFile { + fn read(&self, pos: usize, size: usize) -> Result, FileError> { + match self { + Self::V0(v0) => v0.read(pos, size), + } + } + fn get_all_blocks_ids(&self) -> Result, FileError> { + unimplemented!(); + } +} + +impl ReadFile for SmallFileV0 { + fn read(&self, pos: usize, size: usize) -> Result, FileError> { + if size == 0 { + return Err(FileError::InvalidArgument); + } + if pos + size > self.content.len() { + return Err(FileError::EndOfFile); + } + Ok(self.content[pos..pos + size].to_vec()) + } + fn get_all_blocks_ids(&self) -> Result, FileError> { + unimplemented!(); + } +} + +/// A RandomAccessFile in memory. This is not used to serialize data +pub struct RandomAccessFile { + //storage: Arc<&'a dyn BlockStorage>, + store: Arc, + /// accurate once saved or opened + meta: RandomAccessFileMeta, + + //meta_object_id: Option, + //content_block_id: Option, + /// keeps the deduplicated blocks' IDs, used for async writes + block_contents: HashMap, + + /// Blocks of the Object (nodes of the tree). Only used when writing asynchronously, before saving. + blocks: Vec<(BlockId, BlockKey)>, + + /// When an id is present, the File is opened in Read mode, and cannot be saved. + id: Option, + key: Option, + + content_block: Option<(BlockId, BlockKey)>, + + // used for writes + conv_key: Option<[u8; 32]>, + remainder: Vec, + size: usize, +} + +impl ReadFile for RandomAccessFile { + fn get_all_blocks_ids(&self) -> Result, FileError> { + if self.id.is_none() { + unimplemented!(); + } + let mut res = Vec::with_capacity(4); + let _: Vec<()> = self + .blocks + .iter() + .map(|(id, _)| res.push(id.clone())) + .collect(); + + recurse_tree( + &self.store, + self.content_block.to_owned().unwrap(), + &mut res, + self.meta.depth(), + )?; + + fn recurse_tree( + store: &Store, + current_block_id_key: (Digest, SymKey), + res: &mut Vec, + level: u8, + ) -> Result<(), FileError> { + res.push(current_block_id_key.0); + if level > 0 { + let tree_block = store.get(¤t_block_id_key.0)?; + let (children, content) = tree_block.read(¤t_block_id_key.1)?; + if children.is_empty() || content.len() > 0 { + return Err(FileError::BlockDeserializeError); + } + + for child in children { + recurse_tree(store, child, res, level - 1)?; + } + } + Ok(()) + } + Ok(res) + } + + /// reads at most one block from the file. the returned vector should be tested for size. it might be smaller than what you asked for. + /// `pos`ition can be anywhere in the file. + //TODO: parallelize decryption on multi threads (cores) + fn read(&self, pos: usize, mut size: usize) -> Result, FileError> { + if size == 0 { + return Err(FileError::InvalidArgument); + } + if self.id.is_some() { + let total = self.meta.total_size() as usize; + if pos > total { + return Err(FileError::EndOfFile); + } + size = min(total - pos, size); + let mut current_block_id_key = self.content_block.to_owned().unwrap(); + + let depth = self.meta.depth(); + let arity = self.meta.arity(); + + let mut level_pos = pos; + for level in 0..depth { + let tree_block = self.store.get(¤t_block_id_key.0)?; + let (children, content) = tree_block.read(¤t_block_id_key.1)?; + if children.is_empty() || content.len() > 0 { + return Err(FileError::BlockDeserializeError); + } + let factor = (arity as usize).pow(depth as u32 - level as u32 - 1) + * self.meta.chunk_size() as usize; + let level_index = pos / factor; + if level_index >= children.len() { + return Err(FileError::EndOfFile); + } + current_block_id_key = (children[level_index]).clone(); + level_pos = pos as usize % factor; + } + + let content_block = self.store.get(¤t_block_id_key.0)?; + //log_debug!("CONTENT BLOCK SIZE {}", content_block.size()); + + let (children, content) = content_block.read(¤t_block_id_key.1)?; + + if children.is_empty() && content.len() > 0 { + //log_debug!("CONTENT SIZE {}", content.len()); + + if level_pos >= content.len() { + return Err(FileError::EndOfFile); + } + let end = min(content.len(), level_pos + size); + return Ok(content[level_pos..end].to_vec()); + } else { + return Err(FileError::BlockDeserializeError); + } + } else { + // hasn't been saved yet, we can use the self.blocks as a flat array and the remainder too + let factor = self.meta.chunk_size() as usize; + let index = pos / factor as usize; + let level_pos = pos % factor as usize; + let remainder_pos = self.blocks.len() * factor; + if pos >= remainder_pos { + let pos_in_remainder = pos - remainder_pos; + if self.remainder.len() > 0 && pos_in_remainder < self.remainder.len() { + let end = min(self.remainder.len(), pos_in_remainder + size); + return Ok(self.remainder[pos_in_remainder..end].to_vec()); + } else { + return Err(FileError::EndOfFile); + } + } + //log_debug!("{} {} {} {}", index, self.blocks.len(), factor, level_pos); + if index >= self.blocks.len() { + return Err(FileError::EndOfFile); + } + let block = &self.blocks[index]; + let content_block = self.store.get(&block.0)?; + let (children, content) = content_block.read(&block.1)?; + if children.is_empty() && content.len() > 0 { + //log_debug!("CONTENT SIZE {}", content.len()); + + if level_pos >= content.len() { + return Err(FileError::EndOfFile); + } + let end = min(content.len(), level_pos + size); + return Ok(content[level_pos..end].to_vec()); + } else { + return Err(FileError::BlockDeserializeError); + } + } + } +} + +impl RandomAccessFile { + pub fn meta(&self) -> &RandomAccessFileMeta { + &self.meta + } + + pub fn id(&self) -> &Option { + &self.id + } + + pub fn key(&self) -> &Option { + &self.key + } + + fn make_block( + mut content: Vec, + conv_key: &[u8; blake3::OUT_LEN], + children: Vec, + already_existing: &mut HashMap, + store: &Store, + ) -> Result<(BlockId, BlockKey), StorageError> { + let key_hash = blake3::keyed_hash(conv_key, &content); + + let key_slice = key_hash.as_bytes(); + let key = SymKey::ChaCha20Key(key_slice.clone()); + let it = already_existing.get(&key); + if it.is_some() { + return Ok((*it.unwrap(), key)); + } + let nonce = [0u8; 12]; + let mut cipher = ChaCha20::new(key_slice.into(), &nonce.into()); + //let mut content_enc = Vec::from(content); + let mut content_enc_slice = &mut content.as_mut_slice(); + cipher.apply_keystream(&mut content_enc_slice); + + let mut block = Block::new_random_access(children, content, None); + //log_debug!(">>> make_block random access: {}", block.id()); + //log_debug!("!! children: ({}) {:?}", children.len(), children); + + let id = block.get_and_save_id(); + already_existing.insert(key.clone(), id); + //log_debug!("putting *** {}", id); + store.put(&block)?; + Ok((id, key)) + } + + fn make_parent_block( + conv_key: &[u8; blake3::OUT_LEN], + children: Vec<(BlockId, BlockKey)>, + already_existing: &mut HashMap, + store: &Store, + ) -> Result<(BlockId, BlockKey), StorageError> { + let mut ids: Vec = Vec::with_capacity(children.len()); + let mut keys: Vec = Vec::with_capacity(children.len()); + children.iter().for_each(|child| { + ids.push(child.0); + keys.push(child.1.clone()); + }); + let content = ChunkContentV0::InternalNode(keys); + let content_ser = serde_bare::to_vec(&content).unwrap(); + + Self::make_block(content_ser, conv_key, ids, already_existing, store) + } + + /// Build tree from leaves, returns parent nodes + fn make_tree( + already_existing: &mut HashMap, + leaves: &[(BlockId, BlockKey)], + conv_key: &ChaCha20Key, + arity: u16, + store: &Store, + ) -> Result<(BlockId, BlockKey), StorageError> { + let mut parents: Vec<(BlockId, BlockKey)> = vec![]; + let mut chunks = leaves.chunks(arity as usize); + while let Some(nodes) = chunks.next() { + //log_debug!("making parent"); + parents.push(Self::make_parent_block( + conv_key, + nodes.to_vec(), + already_existing, + store, + )?); + } + //log_debug!("level with {} parents", parents.len()); + + if 1 < parents.len() { + return Self::make_tree(already_existing, parents.as_slice(), conv_key, arity, store); + } + Ok(parents[0].clone()) + } + + /// returns content_block id/key pair, and root_block id/key pair + fn save_( + already_existing: &mut HashMap, + blocks: &[(BlockId, BlockKey)], + meta: &mut RandomAccessFileMeta, + conv_key: &ChaCha20Key, + store: &Store, + ) -> Result<((BlockId, BlockKey), (BlockId, BlockKey)), FileError> { + let leaf_blocks_nbr = blocks.len(); + let arity = meta.arity(); + + let mut depth: u8 = u8::MAX; + for i in 0..u8::MAX { + if leaf_blocks_nbr <= (arity as usize).pow(i.into()) { + depth = i; + break; + } + } + if depth == u8::MAX { + return Err(FileError::TooBig); + } + meta.set_depth(depth); + //log_debug!("depth={} leaves={}", depth, leaf_blocks_nbr); + + let content_block = if depth == 0 { + assert!(blocks.len() == 1); + blocks[0].clone() + } else { + // we create the tree + Self::make_tree(already_existing, &blocks, &conv_key, arity, store)? + }; + + let meta_object = Object::new_with_convergence_key( + ObjectContent::V0(ObjectContentV0::RandomAccessFileMeta(meta.clone())), + None, + store_valid_value_size(meta.chunk_size() as usize), + conv_key, + ); + //log_debug!("saving meta object"); + _ = meta_object.save(store)?; + + // creating the root block that contains as first child the meta_object, and as second child the content_block + // it is added to storage in make_parent_block + //log_debug!("saving root block"); + let root_block = Self::make_parent_block( + conv_key, + vec![ + (meta_object.id(), meta_object.key().unwrap()), + content_block.clone(), + ], + already_existing, + store, + )?; + Ok((content_block, root_block)) + } + + /// Creates a new file based on a content that is fully known at the time of creation. + /// + /// If you want to stream progressively the content into the new file, you should use new_empty(), write() and save() instead + pub fn new_from_slice( + content: &[u8], + block_size: usize, + content_type: String, + metadata: Vec, + store: Arc, + ) -> Result { + //let max_block_size = store_max_value_size(); + let valid_block_size = store_valid_value_size(block_size) - BLOCK_EXTRA; + + let arity = ((valid_block_size) / CHILD_SIZE) as u16; + + let total_size = content.len() as u64; + + let mut conv_key = Object::convergence_key(&store); + + let mut blocks: Vec<(BlockId, BlockKey)> = vec![]; + + let mut already_existing: HashMap = HashMap::new(); + + //log_debug!("making the leaves"); + for chunk in content.chunks(valid_block_size) { + let data_chunk = ChunkContentV0::DataChunk(chunk.to_vec()); + let content_ser = serde_bare::to_vec(&data_chunk).unwrap(); + blocks.push(Self::make_block( + content_ser, + &conv_key, + vec![], + &mut already_existing, + &store, + )?); + } + assert_eq!( + (total_size as usize + valid_block_size - 1) / valid_block_size, + blocks.len() + ); + + let mut meta = RandomAccessFileMeta::V0(RandomAccessFileMetaV0 { + content_type, + metadata, + chunk_size: valid_block_size as u32, + total_size, + arity, + depth: 0, + }); + + let (content_block, root_block) = + Self::save_(&mut already_existing, &blocks, &mut meta, &conv_key, &store)?; + + conv_key.zeroize(); + + Ok(Self { + store, + meta, + block_contents: HashMap::new(), // not used in this case + blocks: vec![], // not used in this case + id: Some(root_block.0.clone()), + key: Some(root_block.1.clone()), + content_block: Some(content_block), + conv_key: None, // not used in this case + remainder: vec![], // not used in this case + size: 0, // not used in this case + }) + } + + pub fn new_empty( + block_size: usize, + content_type: String, + metadata: Vec, + store: Arc, + ) -> Self { + let valid_block_size = store_valid_value_size(block_size) - BLOCK_EXTRA; + + let arity = ((valid_block_size) / CHILD_SIZE) as u16; + + let meta = RandomAccessFileMeta::V0(RandomAccessFileMetaV0 { + content_type, + metadata, + chunk_size: valid_block_size as u32, + arity, + total_size: 0, // will be filled in later, during save + depth: 0, // will be filled in later, during save + }); + + Self { + store: Arc::clone(&store), + meta, + block_contents: HashMap::new(), + blocks: vec![], + id: None, + key: None, + content_block: None, + conv_key: Some(Object::convergence_key(&store)), + remainder: vec![], + size: 0, + } + } + + /// Appends some data at the end of the file currently created with new_empty() and not saved yet. + /// you can call it many times. Don't forget to eventually call save() + pub fn write(&mut self, data: &[u8]) -> Result<(), FileError> { + if self.id.is_some() { + return Err(FileError::AlreadySaved); + } + let remainder = self.remainder.len(); + let chunk_size = self.meta.chunk_size() as usize; + let mut pos: usize = 0; + let conv_key = self.conv_key.unwrap(); + // TODO: provide an option to search in storage for already existing, when doing a resume of previously aborted write + let mut already_existing: HashMap = HashMap::new(); + + if remainder > 0 { + if data.len() >= chunk_size - remainder { + let mut new_block = Vec::with_capacity(chunk_size); + new_block.append(&mut self.remainder); + pos = chunk_size - remainder; + self.size += chunk_size; + //log_debug!("size += chunk_size {} {}", self.size, chunk_size); + new_block.extend(data[0..pos].iter()); + assert_eq!(new_block.len(), chunk_size); + let data_chunk = ChunkContentV0::DataChunk(new_block); + let content_ser = serde_bare::to_vec(&data_chunk).unwrap(); + self.blocks.push(Self::make_block( + content_ser, + &conv_key, + vec![], + &mut already_existing, + &self.store, + )?); + } else { + // not enough data to create a new block + self.remainder.extend(data.iter()); + return Ok(()); + } + } else if data.len() < chunk_size { + self.remainder = Vec::from(data); + return Ok(()); + } + + for chunk in data[pos..].chunks(chunk_size) { + if chunk.len() == chunk_size { + self.size += chunk_size; + //log_debug!("size += chunk_size {} {}", self.size, chunk_size); + let data_chunk = ChunkContentV0::DataChunk(chunk.to_vec()); + let content_ser = serde_bare::to_vec(&data_chunk).unwrap(); + self.blocks.push(Self::make_block( + content_ser, + &conv_key, + vec![], + &mut already_existing, + &self.store, + )?); + } else { + self.remainder = Vec::from(chunk); + return Ok(()); + } + } + Ok(()) + } + + pub fn save(&mut self) -> Result { + if self.id.is_some() { + return Err(FileError::AlreadySaved); + } + // save the remainder, if any. + if self.remainder.len() > 0 { + self.size += self.remainder.len(); + //log_debug!("size += remainder {} {}", self.size, self.remainder.len()); + let mut remainder = Vec::with_capacity(self.remainder.len()); + remainder.append(&mut self.remainder); + let data_chunk = ChunkContentV0::DataChunk(remainder); + let content_ser = serde_bare::to_vec(&data_chunk).unwrap(); + self.blocks.push(Self::make_block( + content_ser, + &self.conv_key.unwrap(), + vec![], + &mut HashMap::new(), + &self.store, + )?); + } + + self.meta.set_total_size(self.size as u64); + + let mut already_existing: HashMap = HashMap::new(); + let (content_block, root_block) = Self::save_( + &mut already_existing, + &self.blocks, + &mut self.meta, + self.conv_key.as_ref().unwrap(), + &self.store, + )?; + + self.conv_key.as_mut().unwrap().zeroize(); + self.conv_key = None; + + self.id = Some(root_block.0); + self.key = Some(root_block.1.clone()); + self.content_block = Some(content_block); + + self.blocks = vec![]; + self.blocks.shrink_to_fit(); + + Ok(root_block.0) + } + + pub fn reference(&self) -> Option { + if self.key.is_some() && self.id.is_some() { + Some(ObjectRef::from_id_key( + self.id.unwrap(), + self.key.to_owned().unwrap(), + )) + } else { + None + } + } + + /// Opens a file for read purpose. + pub fn open( + id: ObjectId, + key: SymKey, + store: Arc, + ) -> Result { + // load root block + let root_block = store.get(&id)?; + + if root_block.children().len() != 2 + || *root_block.content().commit_header_obj() != CommitHeaderObject::RandomAccess + { + return Err(FileError::BlockDeserializeError); + } + + let (root_sub_blocks, _) = root_block.read(&key)?; + + // load meta object (first one in root block) + let meta_object = Object::load( + root_sub_blocks[0].0, + Some(root_sub_blocks[0].1.clone()), + &store, + )?; + + let meta = match meta_object.content_v0()? { + ObjectContentV0::RandomAccessFileMeta(meta) => meta, + _ => return Err(FileError::InvalidChildren), + }; + + Ok(RandomAccessFile { + store, + meta, + block_contents: HashMap::new(), // not used in this case + blocks: vec![(id, SymKey::nil()), (root_sub_blocks[0].0, SymKey::nil())], // not used in this case + id: Some(id), + key: Some(key), + content_block: Some(root_sub_blocks[1].clone()), + conv_key: None, + remainder: vec![], + size: 0, + }) + } + + pub fn blocks(&self) -> impl Iterator + '_ { + self.blocks + .iter() + .map(|key| self.store.get(&key.0).unwrap()) + } + + /// Size once encoded, before deduplication. Only available before save() + pub fn size(&self) -> usize { + let mut total = 0; + self.blocks().for_each(|b| total += b.size()); + total + } + + /// Real size on disk + pub fn dedup_size(&self) -> usize { + let mut total = 0; + self.block_contents + .values() + .for_each(|b| total += self.store.get(b).unwrap().size()); + total + } + + pub fn depth(&self) -> Result { + Ok(self.meta.depth()) + + // unimplemented!(); + // if self.key().is_none() { + // return Err(ObjectParseError::MissingRootKey); + // } + // let parents = vec![(self.id(), self.key().unwrap())]; + // Self::collect_leaves( + // &self.blocks, + // &parents, + // self.blocks.len() - 1, + // &mut None, + // &mut None, + // &self.block_contents, + // ) + } +} + +impl fmt::Display for RandomAccessFile { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!( + f, + "====== File ID {}", + self.id + .map_or("NOT SAVED".to_string(), |i| format!("{}", i)) + )?; + writeln!( + f, + "== Key: {}", + self.key + .as_ref() + .map_or("None".to_string(), |k| format!("{}", k)) + )?; + writeln!(f, "== depth: {}", self.meta.depth())?; + writeln!(f, "== arity: {}", self.meta.arity())?; + writeln!(f, "== chunk_size: {}", self.meta.chunk_size())?; + writeln!(f, "== total_size: {}", self.meta.total_size())?; + writeln!(f, "== content_type: {}", self.meta.content_type())?; + writeln!(f, "== metadata len: {}", self.meta.metadata().len())?; + if self.id.is_none() { + writeln!(f, "== blocks to save: {}", self.blocks.len())?; + } + Ok(()) + } +} + +#[cfg(test)] +mod test { + + use crate::file::*; + use std::io::BufReader; + use std::io::Read; + + /// Checks that a content that does fit in one block, creates an arity of 0 + #[test] + pub fn test_depth_0() { + let block_size = store_max_value_size(); + //store_valid_value_size(0) + + ////// 1 MB of data! + let data_size = block_size - BLOCK_EXTRA; + + let store = Store::dummy_public_v0(); + log_debug!("creating 1MB of data"); + let content: Vec = vec![99; data_size]; + + log_debug!("creating random access file with that data"); + let file: RandomAccessFile = RandomAccessFile::new_from_slice( + &content, + block_size, + "text/plain".to_string(), + vec![], + Arc::clone(&store), + ) + .expect("new_from_slice"); + log_debug!("{}", file); + + let id = file.id.to_owned().unwrap(); + + let file_size = file.size(); + log_debug!("file size to save : {}", file_size); + + log_debug!("data size: {}", data_size); + + let read_content = file.read(0, data_size).expect("reading all"); + assert_eq!(read_content, content); + + let read_content2 = file.read(0, data_size + 1); + assert_eq!(read_content2.unwrap().len(), 1048564); + + let read_content = file.read(data_size - 9, 9).expect("reading end"); + assert_eq!(read_content, vec![99, 99, 99, 99, 99, 99, 99, 99, 99]); + + let read_content = file.read(data_size - 9, 10); + assert_eq!(read_content, Ok(vec![99, 99, 99, 99, 99, 99, 99, 99, 99])); + + // log_debug!( + // "overhead: {} - {}%", + // file_size - data_size, + // ((file_size - data_size) * 100) as f32 / data_size as f32 + // ); + + // let dedup_size = file.dedup_size(); + // log_debug!( + // "dedup compression: {} - {}%", + // data_size - dedup_size, + // ((data_size - dedup_size) * 100) as f32 / data_size as f32 + // ); + + // log_debug!("number of blocks : {}", file.blocks.len()); + // assert_eq!( + // file.blocks.len(), + // MAX_ARITY_LEAVES * (MAX_ARITY_LEAVES + 1) * MAX_ARITY_LEAVES + MAX_ARITY_LEAVES + 1 + // ); + assert_eq!(file.depth(), Ok(0)); + assert_eq!(store.len(), Ok(3)); + + let file = RandomAccessFile::open(id, file.key.unwrap(), store).expect("re open"); + + log_debug!("{}", file); + + let read_content = file.read(0, data_size).expect("reading all after re open"); + assert_eq!(read_content, content); + } + + /// Checks that a content that doesn't fit in all the children of first level in tree + #[ignore] + #[test] + pub fn test_depth_1() { + const MAX_ARITY_LEAVES: usize = 15887; + const MAX_DATA_PAYLOAD_SIZE: usize = 1048564; + + ////// 16 GB of data! + let data_size = MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE; + + let store = Store::dummy_public_v0(); + log_debug!("creating 16GB of data"); + + let content: Vec = vec![99; data_size]; + + log_debug!("creating random access file with that data"); + let file: RandomAccessFile = RandomAccessFile::new_from_slice( + &content, + store_max_value_size(), + "text/plain".to_string(), + vec![], + Arc::clone(&store), + ) + .expect("new_from_slice"); + log_debug!("{}", file); + + let _id = file.id.to_owned().unwrap(); + + log_debug!("data size: {}", data_size); + + assert_eq!(file.depth(), Ok(1)); + + assert_eq!(store.len(), Ok(4)); + } + + /// Checks that a content that doesn't fit in all the children of first level in tree + #[ignore] + #[test] + pub fn test_depth_2() { + const MAX_ARITY_LEAVES: usize = 15887; + const MAX_DATA_PAYLOAD_SIZE: usize = 1048564; + + ////// 16 GB of data! + let data_size = MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE + 1; + + let store = Store::dummy_public_v0(); + log_debug!("creating 16GB of data"); + let content: Vec = vec![99; data_size]; + + log_debug!("creating file with that data"); + let file: RandomAccessFile = RandomAccessFile::new_from_slice( + &content, + store_max_value_size(), + "text/plain".to_string(), + vec![], + Arc::clone(&store), + ) + .expect("new_from_slice"); + log_debug!("{}", file); + + let file_size = file.size(); + log_debug!("file size: {}", file_size); + + log_debug!("data size: {}", data_size); + + assert_eq!(file.depth().unwrap(), 2); + + assert_eq!(store.len(), Ok(7)); + } + + /// Checks that a content that doesn't fit in all the children of first level in tree + #[test] + pub fn test_depth_3() { + const MAX_ARITY_LEAVES: usize = 61; + const MAX_DATA_PAYLOAD_SIZE: usize = 4084; + + ////// 900 MB of data! + let data_size = + MAX_ARITY_LEAVES * MAX_ARITY_LEAVES * MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE; + + let store = Store::dummy_public_v0(); + log_debug!("creating 900MB of data"); + let content: Vec = vec![99; data_size]; + + log_debug!("creating file with that data"); + let file: RandomAccessFile = RandomAccessFile::new_from_slice( + &content, + store_valid_value_size(0), + "text/plain".to_string(), + vec![], + Arc::clone(&store), + ) + .expect("new_from_slice"); + log_debug!("{}", file); + + let file_size = file.size(); + log_debug!("file size: {}", file_size); + + let read_content = file.read(0, data_size).expect("reading all"); + assert_eq!(read_content.len(), MAX_DATA_PAYLOAD_SIZE); + + let read_content = file.read(9000, 10000).expect("reading 10k"); + assert_eq!(read_content, vec![99; 3252]); + + // log_debug!("data size: {}", data_size); + // log_debug!( + // "overhead: {} - {}%", + // file_size - data_size, + // ((file_size - data_size) * 100) as f32 / data_size as f32 + // ); + + // let dedup_size = file.dedup_size(); + // log_debug!( + // "dedup compression: {} - {}%", + // data_size - dedup_size, + // ((data_size - dedup_size) * 100) as f32 / data_size as f32 + // ); + + // log_debug!("number of blocks : {}", file.blocks.len()); + // assert_eq!( + // file.blocks.len(), + // MAX_ARITY_LEAVES * (MAX_ARITY_LEAVES + 1) * MAX_ARITY_LEAVES + MAX_ARITY_LEAVES + 1 + // ); + assert_eq!(file.depth().unwrap(), 3); + + assert_eq!(store.len(), Ok(6)); + } + + /// Checks that a content that doesn't fit in all the children of first level in tree + #[ignore] + #[test] + pub fn test_depth_4() { + const MAX_ARITY_LEAVES: usize = 61; + const MAX_DATA_PAYLOAD_SIZE: usize = 4084; + + ////// 52GB of data! + let data_size = MAX_ARITY_LEAVES + * MAX_ARITY_LEAVES + * MAX_ARITY_LEAVES + * MAX_ARITY_LEAVES + * MAX_DATA_PAYLOAD_SIZE; + + let store = Store::dummy_public_v0(); + log_debug!("creating 55GB of data"); + let content: Vec = vec![99; data_size]; + + log_debug!("creating file with that data"); + let file: RandomAccessFile = RandomAccessFile::new_from_slice( + &content, + store_valid_value_size(0), + "text/plain".to_string(), + vec![], + Arc::clone(&store), + ) + .expect("new_from_slice"); + + log_debug!("{}", file); + + let file_size = file.size(); + log_debug!("file size: {}", file_size); + + log_debug!("data size: {}", data_size); + + assert_eq!(file.depth().unwrap(), 4); + + assert_eq!(store.len(), Ok(7)); + } + + /// Test async write to a file all at once + #[test] + pub fn test_write_all_at_once() { + let f = std::fs::File::open("tests/test.jpg").expect("open of tests/test.jpg"); + let mut reader = BufReader::new(f); + let mut img_buffer: Vec = Vec::new(); + reader + .read_to_end(&mut img_buffer) + .expect("read of test.jpg"); + + let store = Store::dummy_public_v0(); + + log_debug!("creating file with the JPG content"); + let mut file: RandomAccessFile = RandomAccessFile::new_empty( + store_max_value_size(), //store_valid_value_size(0),// + "image/jpeg".to_string(), + vec![], + store, + ); + + log_debug!("{}", file); + + file.write(&img_buffer).expect("write all at once"); + + // !!! all those tests work only because store_max_value_size() is bigger than the actual size of the JPEG file. so it fits in one block. + + assert_eq!( + file.read(0, img_buffer.len()).expect("read before save"), + img_buffer + ); + + // asking too much, receiving just enough + assert_eq!( + file.read(0, img_buffer.len() + 1) + .expect("read before save"), + img_buffer + ); + + // // reading too far, well behind the size of the JPG + // assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(10000, 1).expect("read before save"), vec![41]); + + // // reading one byte after the end of the file size. + // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); + + file.save().expect("save"); + + let res = file.read(0, img_buffer.len()).expect("read all"); + assert_eq!(res, img_buffer); + + // // asking too much, receiving an error, as now we know the total size of file, and we check it + // assert_eq!( + // file.read(0, img_buffer.len() + 1), + // Err(FileError::EndOfFile) + // ); + + // reading too far, well behind the size of the JPG + assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]); + + // // reading one byte after the end of the file size. + // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); + } + + /// Test async write to a file by increments + #[test] + pub fn test_write_by_increments() { + let f = std::fs::File::open("tests/test.jpg").expect("open of tests/test.jpg"); + let mut reader = BufReader::new(f); + let mut img_buffer: Vec = Vec::new(); + reader + .read_to_end(&mut img_buffer) + .expect("read of test.jpg"); + + let store = Store::dummy_public_v0(); + + log_debug!("creating file with the JPG content"); + let mut file: RandomAccessFile = RandomAccessFile::new_empty( + store_max_value_size(), //store_valid_value_size(0),// + "image/jpeg".to_string(), + vec![], + store, + ); + + log_debug!("{}", file); + + for chunk in img_buffer.chunks(1000) { + file.write(chunk).expect("write a chunk"); + } + + assert_eq!( + file.read(0, img_buffer.len()).expect("read before save"), + img_buffer + ); + + // asking too much, receiving just enough + assert_eq!( + file.read(0, img_buffer.len() + 1) + .expect("read before save"), + img_buffer + ); + + // reading too far, well behind the size of the JPG + assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(10000, 1).expect("read before save"), vec![41]); + + // reading one byte after the end of the file size. + assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); + + file.save().expect("save"); + + // this works only because store_max_value_size() is bigger than the actual size of the JPEG file. so it fits in one block. + let res = file.read(0, img_buffer.len()).expect("read all"); + + assert_eq!(res, img_buffer); + + // // asking too much, receiving an error, as now we know the total size of file, and we check it + // assert_eq!( + // file.read(0, img_buffer.len() + 1), + // Err(FileError::EndOfFile) + // ); + + // reading too far, well behind the size of the JPG + assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]); + + // // reading one byte after the end of the file size. + // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); + } + + /// Test async write to a file by increments small blocks + #[test] + pub fn test_write_by_increments_small_blocks() { + let f = std::fs::File::open("tests/test.jpg").expect("open of tests/test.jpg"); + let mut reader = BufReader::new(f); + let mut img_buffer: Vec = Vec::new(); + reader + .read_to_end(&mut img_buffer) + .expect("read of test.jpg"); + + let store = Store::dummy_public_v0(); + + log_debug!("creating file with the JPG content"); + let mut file: RandomAccessFile = RandomAccessFile::new_empty( + store_valid_value_size(0), + "image/jpeg".to_string(), + vec![], + store, + ); + + log_debug!("{}", file); + + let first_block_content = img_buffer[0..4084].to_vec(); + + for chunk in img_buffer.chunks(1000) { + file.write(chunk).expect("write a chunk"); + } + + log_debug!("{}", file); + + assert_eq!( + file.read(0, img_buffer.len()).expect("read before save"), + first_block_content + ); + + // asking too much, receiving just enough + assert_eq!( + file.read(0, img_buffer.len() + 1) + .expect("read before save"), + first_block_content + ); + + // reading too far, well behind the size of the JPG + assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(10000, 1).expect("read before save"), vec![41]); + + // // reading one byte after the end of the file size. + // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); + + file.save().expect("save"); + + log_debug!("{}", file); + + assert_eq!(img_buffer.len(), file.meta.total_size() as usize); + + let res = file.read(0, img_buffer.len()).expect("read all"); + assert_eq!(res, first_block_content); + + // // asking too much, not receiving an error, as we know the total size of file, and return what we can + // assert_eq!( + // file.read(0, img_buffer.len() + 1), + // Err(FileError::EndOfFile) + // ); + + // reading too far, well behind the size of the JPG + assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]); + + // // reading one byte after the end of the file size. + // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); + } + + /// Test async write to a file all at once + #[test] + pub fn test_write_all_at_once_small_blocks() { + let f = std::fs::File::open("tests/test.jpg").expect("open of tests/test.jpg"); + let mut reader = BufReader::new(f); + let mut img_buffer: Vec = Vec::new(); + reader + .read_to_end(&mut img_buffer) + .expect("read of test.jpg"); + + let first_block_content = img_buffer[0..4084].to_vec(); + + let store = Store::dummy_public_v0(); + + log_debug!("creating file with the JPG content"); + let mut file: RandomAccessFile = RandomAccessFile::new_empty( + store_valid_value_size(0), + "image/jpeg".to_string(), + vec![], + store, + ); + + log_debug!("{}", file); + + file.write(&img_buffer).expect("write all at once"); + + assert_eq!( + file.read(0, img_buffer.len()).expect("read before save"), + first_block_content + ); + + // asking too much, receiving just enough + assert_eq!( + file.read(0, img_buffer.len() + 1) + .expect("read before save"), + first_block_content + ); + + // reading too far, well behind the size of the JPG + assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(10000, 1).expect("read before save"), vec![41]); + + // // reading one byte after the end of the file size. + // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); + + file.save().expect("save"); + + let res = file.read(0, img_buffer.len()).expect("read all"); + assert_eq!(res, first_block_content); + + let res = file.read(10, img_buffer.len() - 10).expect("read all"); + assert_eq!(res, first_block_content[10..].to_vec()); + + // // asking too much, receiving an error, as now we know the total size of file, and we check it + // assert_eq!( + // file.read(0, img_buffer.len() + 1), + // Err(FileError::EndOfFile) + // ); + + // reading too far, well behind the size of the JPG + assert_eq!(file.read(100000, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(10000, 1).expect("read after save"), vec![41]); + + // // reading one byte after the end of the file size. + // assert_eq!(file.read(29454, 1), Err(FileError::EndOfFile)); + + assert_eq!(file.read(29454, 0), Err(FileError::InvalidArgument)); + } + + /// Test depth 4 with 52GB of data, but using write in small increments, so the memory burden on the system will be minimal + #[ignore] + #[test] + pub fn test_depth_4_write_small() { + const MAX_ARITY_LEAVES: usize = 61; + const MAX_DATA_PAYLOAD_SIZE: usize = 4084; + + ////// 52GB of data! + let data_size = MAX_ARITY_LEAVES + * MAX_ARITY_LEAVES + * MAX_ARITY_LEAVES + * MAX_ARITY_LEAVES + * MAX_DATA_PAYLOAD_SIZE; + + // chunks of 5 MB + let chunk_nbr = data_size / 5000000; + let last_chunk = data_size % 5000000; + + let store = Store::dummy_public_v0(); + + log_debug!("creating empty file"); + let mut file: RandomAccessFile = RandomAccessFile::new_empty( + store_valid_value_size(0), + "image/jpeg".to_string(), + vec![], + Arc::clone(&store), + ); + + log_debug!("{}", file); + + let chunk = vec![99; 5000000]; + let last_chunk = vec![99; last_chunk]; + + for _i in 0..chunk_nbr { + file.write(&chunk).expect("write a chunk"); + } + + file.write(&last_chunk).expect("write last chunk"); + + log_debug!("{}", file); + + file.save().expect("save"); + + log_debug!("{}", file); + + let file_size = file.size(); + log_debug!("file size: {}", file_size); + + log_debug!("data size: {}", data_size); + + assert_eq!(data_size, file.meta.total_size() as usize); + + assert_eq!(file.depth().unwrap(), 4); + + assert_eq!(store.len(), Ok(7)); + } + + /// Test open + #[test] + pub fn test_open() { + let f = std::fs::File::open("tests/test.jpg").expect("open of tests/test.jpg"); + let mut reader = BufReader::new(f); + let mut img_buffer: Vec = Vec::new(); + reader + .read_to_end(&mut img_buffer) + .expect("read of test.jpg"); + + let store = Store::dummy_public_v0(); + + log_debug!("creating file with the JPG content"); + let mut file: RandomAccessFile = RandomAccessFile::new_empty( + store_max_value_size(), //store_valid_value_size(0),// + "image/jpeg".to_string(), + vec![], + Arc::clone(&store), + ); + + log_debug!("{}", file); + + for chunk in img_buffer.chunks(1000) { + file.write(chunk).expect("write a chunk"); + } + + file.save().expect("save"); + + let file2 = RandomAccessFile::open(file.id().unwrap(), file.key.unwrap(), store) + .expect("reopen file"); + + // this works only because store_max_value_size() is bigger than the actual size of the JPEG file. so it fits in one block. + let res = file2.read(0, img_buffer.len()).expect("read all"); + + log_debug!("{}", file2); + + assert_eq!(res, img_buffer); + + // // asking too much, receiving an error, as now we know the total size of file, and we check it + // assert_eq!( + // file2.read(0, img_buffer.len() + 1), + // Err(FileError::EndOfFile) + // ); + + // reading too far, well behind the size of the JPG + assert_eq!(file2.read(100000, 1), Err(FileError::EndOfFile)); + + assert_eq!(file2.read(10000, 1).expect("read after save"), vec![41]); + + // // reading one byte after the end of the file size. + // assert_eq!(file2.read(29454, 1), Err(FileError::EndOfFile)); + + assert_eq!(file2.read(29454, 0), Err(FileError::InvalidArgument)); + } + + /// Test read JPEG file small + #[test] + pub fn test_read_small_file() { + let f = std::fs::File::open("tests/test.jpg").expect("open of tests/test.jpg"); + let mut reader = BufReader::new(f); + let mut img_buffer: Vec = Vec::new(); + reader + .read_to_end(&mut img_buffer) + .expect("read of test.jpg"); + let len = img_buffer.len(); + let content = ObjectContent::new_file_v0_with_content(img_buffer.clone(), "image/jpeg"); + + let max_object_size = store_max_value_size(); + let store = Store::dummy_public_v0(); + let mut obj = Object::new(content, None, max_object_size, &store); + + log_debug!("{}", obj); + + let _ = obj.save_in_test(&store).expect("save"); + + let file = File::open(obj.id(), obj.key().unwrap(), store).expect("open"); + + let res = file.read(0, len).expect("read all"); + + assert_eq!(res, img_buffer); + } + + /// Test read JPEG file random access + #[test] + pub fn test_read_random_access_file() { + let f = std::fs::File::open("tests/test.jpg").expect("open of tests/test.jpg"); + let mut reader = BufReader::new(f); + let mut img_buffer: Vec = Vec::new(); + reader + .read_to_end(&mut img_buffer) + .expect("read of test.jpg"); + let len = img_buffer.len(); + + let max_object_size = store_max_value_size(); + let store = Store::dummy_public_v0(); + + log_debug!("creating empty file"); + let mut file: RandomAccessFile = RandomAccessFile::new_empty( + max_object_size, + "image/jpeg".to_string(), + vec![], + Arc::clone(&store), + ); + + file.write(&img_buffer).expect("write all"); + + log_debug!("{}", file); + + file.save().expect("save"); + + log_debug!("{}", file); + + let file = File::open( + file.id().unwrap(), + file.key().to_owned().unwrap(), + store, + ) + .expect("open"); + + // this only works because we chose a big block size (1MB) so the small JPG file fits in one block. + // if not, we would have to call read repeatedly and append the results into a buffer, in order to get the full file + let res = file.read(0, len).expect("read all"); + + assert_eq!(res, img_buffer); + } + + /// Test depth 4, but using write in increments, so the memory burden on the system will be minimal + #[ignore] + #[test] + pub fn test_depth_4_big_write_small() { + let encoding_big_file = std::time::Instant::now(); + + let f = std::fs::File::open("[enter path of a big file here]").expect("open of a big file"); + let mut reader = BufReader::new(f); + + let store = Store::dummy_public_v0(); + + log_debug!("creating empty file"); + let mut file: RandomAccessFile = RandomAccessFile::new_empty( + store_valid_value_size(0), + "image/jpeg".to_string(), + vec![], + store, + ); + + log_debug!("{}", file); + + let mut chunk = [0u8; 1000000]; + + loop { + let size = reader.read(&mut chunk).expect("read a chunk"); + //log_debug!("{}", size); + file.write(&chunk[0..size]).expect("write a chunk"); + if size != 1000000 { + break; + } + } + + log_debug!("{}", file); + + file.save().expect("save"); + + log_debug!("{}", file); + + log_debug!("data size: {}", file.meta.total_size()); + + //assert_eq!(data_size, file.meta.total_size() as usize); + + assert_eq!(file.depth().unwrap(), 4); + + log_debug!( + "encoding_big_file took: {} s", + encoding_big_file.elapsed().as_secs_f32() + ); + } + + /// Test depth 4 with 2.7GB of data, but using write in increments, so the memory burden on the system will be minimal + #[ignore] + #[test] + pub fn test_depth_4_big_write_big() { + let encoding_big_file = std::time::Instant::now(); + + let f = std::fs::File::open("[enter path of a big file here]").expect("open of a big file"); + let mut reader = BufReader::new(f); + + let store = Store::dummy_public_v0(); + + log_debug!("creating empty file"); + let mut file: RandomAccessFile = RandomAccessFile::new_empty( + store_max_value_size(), + "image/jpeg".to_string(), + vec![], + store, + ); + + log_debug!("{}", file); + + let mut chunk = [0u8; 2000000]; + + loop { + let size = reader.read(&mut chunk).expect("read a chunk"); + //log_debug!("{}", size); + file.write(&chunk[0..size]).expect("write a chunk"); + if size != 2000000 { + break; + } + } + + log_debug!("{}", file); + + file.save().expect("save"); + + log_debug!("{}", file); + + log_debug!("data size: {}", file.meta.total_size()); + + //assert_eq!(data_size, file.meta.total_size() as usize); + + assert_eq!(file.depth().unwrap(), 1); + + log_debug!( + "encoding_big_file took: {} s", + encoding_big_file.elapsed().as_secs_f32() + ); + } +} diff --git a/ng-repo/src/kcv_storage.rs b/ng-repo/src/kcv_storage.rs new file mode 100644 index 0000000..cf376e4 --- /dev/null +++ b/ng-repo/src/kcv_storage.rs @@ -0,0 +1,1128 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! KeyColumnValue Storage abstraction + +use std::{ + collections::{HashMap, HashSet}, + marker::PhantomData, +}; + +use serde::{Deserialize, Serialize}; +use serde_bare::{from_slice, to_vec}; + +use crate::errors::StorageError; +#[allow(unused_imports)] +use crate::log::*; + +pub fn prop(prop: u8, props: &HashMap>) -> Result +where + A: for<'a> Deserialize<'a>, +{ + Ok(from_slice( + &props.get(&prop).ok_or(StorageError::PropertyNotFound)?, + )?) +} + +pub fn col( + column: &dyn ISingleValueColumn, + props: &HashMap>, +) -> Result +where + A: for<'a> Deserialize<'a>, +{ + Ok(from_slice( + &props + .get(&column.suffix()) + .ok_or(StorageError::PropertyNotFound)?, + )?) +} + +pub struct Class<'a> { + prefix: Option, + pub name: &'static str, + existential_column: Option<&'a dyn ISingleValueColumn>, + columns: &'a [&'a dyn ISingleValueColumn], + multi_value_columns: &'a [&'a dyn IMultiValueColumn], +} + +impl<'a> Class<'a> { + pub const fn new( + name: &'static str, + prefix: Option, + existential_column: Option<&'a dyn ISingleValueColumn>, + columns: &'a [&'a dyn ISingleValueColumn], + multi_value_columns: &'a [&'a dyn IMultiValueColumn], + ) -> Self { + if prefix.is_none() { + if existential_column.is_some() { + panic!("cannot have an existential_column without a prefix"); + } + if columns.len() > 0 { + panic!("cannot have some property columns without a prefix"); + } + } + Self { + columns, + name, + multi_value_columns, + prefix, + existential_column, + } + } + + /// check unicity of prefixes and suffixes + #[cfg(debug_assertions)] + pub fn check(&self) { + let mut prefixes = if self.prefix.is_some() { + HashSet::from([self.prefix.unwrap()]) + } else { + HashSet::new() + }; + + let mut suffixes = if self.existential_column.is_some() { + HashSet::from([self.existential_column.unwrap().suffix()]) + } else { + HashSet::new() + }; + let name = self.name; + //log_debug!("CHECKING CLASS {name}"); + for column in self.columns.iter() { + //log_debug!("INSERTING SUFFIX {}", column.suffix()); + if !suffixes.insert(column.suffix()) { + panic!( + "duplicate suffix {} in {name}!!! check the code", + column.suffix() as char + ); + } + } + //log_debug!("SUFFIXES {:?}", suffixes); + for mvc in self.multi_value_columns.iter() { + //log_debug!("INSERTING PREFIX {}", mvc.prefix()); + if !prefixes.insert(mvc.prefix()) { + panic!( + "duplicate prefix {} in {name}!!! check the code", + mvc.prefix() as char + ); + } + } + //log_debug!("PREFIXES {:?}", prefixes); + } + + pub fn prefixes(&self) -> Vec { + let mut res: Vec = self + .multi_value_columns + .iter() + .map(|c| c.prefix()) + .collect(); + if self.prefix.is_some() { + res.push(self.prefix.unwrap()); + } + res + } + fn suffices(&self) -> Vec { + let mut res: Vec = self.columns.iter().map(|c| c.suffix()).collect(); + if self.existential_column.is_some() { + res.push(self.existential_column.unwrap().suffix()); + } + res + } +} + +pub fn format_type_of(_: &T) -> String { + format!("{}", std::any::type_name::()) +} + +pub trait IModel { + fn key(&self) -> &Vec; + fn prefix(&self) -> u8 { + self.class().prefix.unwrap() + } + fn check_exists(&mut self) -> Result<(), StorageError> { + if !self.exists() { + return Err(StorageError::NotFound); + } + Ok(()) + } + fn existential(&mut self) -> Option<&mut dyn IExistentialValue>; + fn exists(&mut self) -> bool { + if self.existential().is_none() || self.class().existential_column.is_none() { + return true; + } + if self.existential().as_mut().unwrap().exists() { + return true; + } + let prefix = self.prefix(); + let key = self.key(); + let suffix = self.class().existential_column.unwrap().suffix(); + // log_info!( + // "EXISTENTIAL CHECK {} {} {:?}", + // prefix as char, + // suffix as char, + // key + // ); + match self.storage().get(prefix, key, Some(suffix), &None) { + Ok(res) => { + //log_info!("EXISTENTIAL CHECK GOT {:?}", res); + self.existential().as_mut().unwrap().process_exists(res); + true + } + Err(_e) => false, + } + } + fn storage(&self) -> &dyn KCVStorage; + fn load_props(&self) -> Result>, StorageError> { + if self.class().prefix.is_none() { + panic!("cannot call load_props on a Class without prefix"); + } + self.storage().get_all_properties_of_key( + self.prefix(), + self.key().to_vec(), + self.class().suffices(), + &None, + ) + } + fn class(&self) -> &Class; + fn del(&self) -> Result<(), StorageError> { + self.storage().write_transaction(&mut |tx| { + if self.class().prefix.is_some() { + tx.del_all(self.prefix(), self.key(), &self.class().suffices(), &None)?; + } + for mvc in self.class().multi_value_columns.iter() { + let size = mvc.value_size()?; + tx.del_all_values(mvc.prefix(), self.key(), size, None, &None)?; + } + Ok(()) + })?; + Ok(()) + } +} +use std::hash::Hash; +pub struct MultiValueColumn< + Model: IModel, + Column: std::fmt::Debug + Eq + PartialEq + Hash + Serialize + Default + for<'a> Deserialize<'a>, +> { + prefix: u8, + phantom: PhantomData, + model: PhantomData, + //value_size: usize, +} + +impl< + Model: IModel, + Column: std::fmt::Debug + Eq + PartialEq + Hash + Serialize + Default + for<'d> Deserialize<'d>, + > MultiValueColumn +{ + pub const fn new(prefix: u8) -> Self { + MultiValueColumn { + prefix, + phantom: PhantomData, + model: PhantomData, + } + } + + fn compute_key(model: &Model, column: &Column) -> Result, StorageError> { + let model_key = model.key(); + let mut column_ser = to_vec(column)?; + let mut key = Vec::with_capacity(model_key.len() + column_ser.len()); + key.append(&mut model_key.to_vec()); + key.append(&mut column_ser); + Ok(key) + } + + pub fn add(&self, model: &mut Model, column: &Column) -> Result<(), StorageError> { + model.check_exists()?; + let key = Self::compute_key(model, column)?; + model.storage().put(self.prefix, &key, None, &vec![], &None) + } + + pub fn add_lazy(&self, model: &mut Model, column: &Column) -> Result<(), StorageError> { + model.check_exists()?; + let key = Self::compute_key(model, column)?; + model.storage().write_transaction(&mut |tx| { + match tx.has_property_value(self.prefix, &key, None, &vec![], &None) { + Ok(_) => {} + Err(StorageError::NotFound) => { + tx.put(self.prefix, &key, None, &vec![], &None)?; + } + Err(e) => return Err(e), + }; + Ok(()) + }) + } + + pub fn remove(&self, model: &mut Model, column: &Column) -> Result<(), StorageError> { + model.check_exists()?; + let key = Self::compute_key(model, column)?; + model.storage().del(self.prefix, &key, None, &None) + } + + pub fn has(&self, model: &mut Model, column: &Column) -> Result<(), StorageError> { + model.check_exists()?; + let key = Self::compute_key(model, column)?; + model + .storage() + .has_property_value(self.prefix, &key, None, &vec![], &None) + } + + pub fn remove_from_set_and_add( + &self, + model: &mut Model, + mut remove_set: HashSet, + add_set: HashSet, + ) -> Result<(), StorageError> { + // if existing_set.len() == 0 { + // return Err(StorageError::InvalidValue); + // } + model.check_exists()?; + + let key_prefix = model.key(); + let key_prefix_len = key_prefix.len(); + let total_size = key_prefix_len + self.value_size()?; + + //log_debug!("REPLACE HEAD {:?} with {:?}", existing_set, replace_with); + + model.storage().write_transaction(&mut |tx| { + for found in tx.get_all_keys_and_values( + self.prefix, + total_size, + key_prefix.to_vec(), + None, + &None, + )? { + if found.0.len() == total_size + 1 { + let val: Column = from_slice(&found.0[1 + key_prefix_len..total_size + 1])?; + if remove_set.remove(&val) { + tx.del(self.prefix, &found.0[1..].to_vec(), None, &None)?; + } + } + } + + for add in add_set.iter() { + let mut new = Vec::with_capacity(total_size); + new.extend(key_prefix); + let mut val = to_vec(add)?; + new.append(&mut val); + //log_debug!("PUTTING HEAD {} {:?}", self.prefix as char, new); + tx.put(self.prefix, &new, None, &vec![], &None)?; + } + return Ok(()); + }) + } + + pub fn replace_with_new_set_if_old_set_exists( + &self, + model: &mut Model, + mut existing_set: HashSet, + replace_with: HashSet, + ) -> Result<(), StorageError> { + // if existing_set.len() == 0 { + // return Err(StorageError::InvalidValue); + // } + model.check_exists()?; + + let key_prefix = model.key(); + let key_prefix_len = key_prefix.len(); + let total_size = key_prefix_len + self.value_size()?; + + let empty_existing = existing_set.is_empty(); + + //log_debug!("REPLACE HEAD {:?} with {:?}", existing_set, replace_with); + + model.storage().write_transaction(&mut |tx| { + for found in tx.get_all_keys_and_values( + self.prefix, + total_size, + key_prefix.to_vec(), + None, + &None, + )? { + if found.0.len() == total_size + 1 { + let val: Column = from_slice(&found.0[1 + key_prefix_len..total_size + 1])?; + if empty_existing { + return Err(StorageError::NotEmpty); + } + if existing_set.remove(&val) { + tx.del(self.prefix, &found.0[1..].to_vec(), None, &None)?; + } + } + } + if existing_set.is_empty() { + for add in replace_with.iter() { + let mut new = Vec::with_capacity(total_size); + new.extend(key_prefix); + let mut val = to_vec(add)?; + new.append(&mut val); + //log_debug!("PUTTING HEAD {} {:?}", self.prefix as char, new); + tx.put(self.prefix, &new, None, &vec![], &None)?; + } + return Ok(()); + } + Err(StorageError::Abort) + }) + } + + pub fn get_all(&self, model: &mut Model) -> Result, StorageError> { + model.check_exists()?; + let key_prefix = model.key(); + let key_prefix_len = key_prefix.len(); + let mut res: HashSet = HashSet::new(); + let total_size = key_prefix_len + self.value_size()?; + for val in model.storage().get_all_keys_and_values( + self.prefix, + total_size, + key_prefix.to_vec(), + None, + &None, + )? { + if val.0.len() == total_size + 1 { + let val: Column = from_slice(&val.0[1 + key_prefix_len..total_size + 1])?; + res.insert(val); + } + } + Ok(res) + } +} + +impl< + Model: IModel, + Column: std::fmt::Debug + Eq + PartialEq + Hash + Serialize + Default + for<'d> Deserialize<'d>, + > IMultiValueColumn for MultiValueColumn +{ + fn value_size(&self) -> Result { + Ok(to_vec(&Column::default())?.len()) + } + fn prefix(&self) -> u8 { + self.prefix + } +} + +pub struct MultiMapColumn< + Model: IModel, + Column: std::fmt::Debug + Eq + PartialEq + Hash + Serialize + Default + for<'a> Deserialize<'a>, + Value: Serialize + for<'a> Deserialize<'a> + Clone + PartialEq, +> { + prefix: u8, + phantom_column: PhantomData, + phantom_model: PhantomData, + phantom_value: PhantomData, + //value_size: usize, +} + +impl< + Model: IModel, + Column: std::fmt::Debug + Eq + PartialEq + Hash + Serialize + Default + for<'d> Deserialize<'d>, + Value: Serialize + for<'a> Deserialize<'a> + Clone + PartialEq, + > MultiMapColumn +{ + pub const fn new(prefix: u8) -> Self { + MultiMapColumn { + prefix, + phantom_column: PhantomData, + phantom_model: PhantomData, + phantom_value: PhantomData, + } + } + pub fn add( + &self, + model: &mut Model, + column: &Column, + value: &Value, + ) -> Result<(), StorageError> { + model.check_exists()?; + let key = MultiValueColumn::compute_key(model, column)?; + model + .storage() + .put(self.prefix, &key, None, &to_vec(value)?, &None) + } + pub fn remove( + &self, + model: &mut Model, + column: &Column, + value: &Value, + ) -> Result<(), StorageError> { + model.check_exists()?; + let key = MultiValueColumn::compute_key(model, column)?; + model + .storage() + .del_property_value(self.prefix, &key, None, &to_vec(value)?, &None) + } + pub fn remove_regardless_value( + &self, + model: &mut Model, + column: &Column, + ) -> Result<(), StorageError> { + model.check_exists()?; + let key = MultiValueColumn::compute_key(model, column)?; + model.storage().del(self.prefix, &key, None, &None) + } + + pub fn has( + &self, + model: &mut Model, + column: &Column, + value: &Value, + ) -> Result<(), StorageError> { + model.check_exists()?; + let key = MultiValueColumn::compute_key(model, column)?; + model + .storage() + .has_property_value(self.prefix, &key, None, &to_vec(value)?, &None) + } + + pub fn get(&self, model: &mut Model, column: &Column) -> Result { + model.check_exists()?; + let key = MultiValueColumn::compute_key(model, column)?; + let val_ser = model.storage().get(self.prefix, &key, None, &None)?; + Ok(from_slice(&val_ser)?) + } + + pub fn get_or_add( + &self, + model: &mut Model, + column: &Column, + value: &Value, + ) -> Result { + model.check_exists()?; + let key = MultiValueColumn::compute_key(model, column)?; + let mut found: Option = None; + model.storage().write_transaction(&mut |tx| { + found = match tx.get(self.prefix, &key, None, &None) { + Ok(val_ser) => Some(from_slice(&val_ser)?), + Err(StorageError::NotFound) => { + tx.put(self.prefix, &key, None, &to_vec(value)?, &None)?; + None + } + Err(e) => return Err(e), + }; + Ok(()) + })?; + Ok(found.unwrap_or(value.clone())) + } + + pub fn add_or_change( + &self, + model: &mut Model, + column: &Column, + value: &Value, + ) -> Result<(), StorageError> { + model.check_exists()?; + let key = MultiValueColumn::compute_key(model, column)?; + let mut found: Option = None; + model.storage().write_transaction(&mut |tx| { + found = match tx.get(self.prefix, &key, None, &None) { + Ok(val_ser) => Some(from_slice(&val_ser)?), + Err(StorageError::NotFound) => { + tx.put(self.prefix, &key, None, &to_vec(value)?, &None)?; + None + } + Err(e) => return Err(e), + }; + if found.is_some() && found.as_ref().unwrap() != value { + // we change it + tx.put(self.prefix, &key, None, &to_vec(value)?, &None)?; + } + Ok(()) + })?; + Ok(()) + } + + pub fn get_all(&self, model: &mut Model) -> Result, StorageError> { + model.check_exists()?; + let key_prefix = model.key(); + let key_prefix_len = key_prefix.len(); + let mut res: HashMap = HashMap::new(); + let total_size = key_prefix_len + self.value_size()?; + for val in model.storage().get_all_keys_and_values( + self.prefix, + total_size, + key_prefix.to_vec(), + None, + &None, + )? { + if val.0.len() == total_size + 1 { + let col: Column = from_slice(&val.0[1 + key_prefix_len..total_size + 1])?; + let val = from_slice(&val.1)?; + res.insert(col, val); + } + } + Ok(res) + } + + pub fn take_first_value(&self, model: &mut Model) -> Result { + model.check_exists()?; + let key_prefix = model.key(); + let key_prefix_len = key_prefix.len(); + let total_size = key_prefix_len + self.value_size()?; + let val = model.storage().take_first_value( + self.prefix, + total_size, + key_prefix.to_vec(), + None, + &None, + )?; + Ok(from_slice(&val)?) + // ? { + // if val.0.len() == total_size + 1 { + // let col: Column = from_slice(&val.0[1 + key_prefix_len..total_size + 1])?; + // let val = from_slice(&val.1)?; + // res.insert(col, val); + // } + // } + // Ok(res) + } +} +impl< + Model: IModel, + Column: std::fmt::Debug + Eq + PartialEq + Hash + Serialize + Default + for<'d> Deserialize<'d>, + Value: Serialize + for<'a> Deserialize<'a> + Clone + PartialEq, + > IMultiValueColumn for MultiMapColumn +{ + fn value_size(&self) -> Result { + Ok(to_vec(&Column::default())?.len()) + } + fn prefix(&self) -> u8 { + self.prefix + } +} + +pub struct MultiCounterColumn< + Model: IModel, + Column: std::fmt::Debug + Eq + PartialEq + Hash + Serialize + Default + for<'a> Deserialize<'a>, +> { + prefix: u8, + phantom_column: PhantomData, + phantom_model: PhantomData, +} + +impl< + Model: IModel, + Column: std::fmt::Debug + Eq + PartialEq + Hash + Serialize + Default + for<'d> Deserialize<'d>, + > MultiCounterColumn +{ + pub const fn new(prefix: u8) -> Self { + MultiCounterColumn { + prefix, + phantom_column: PhantomData, + phantom_model: PhantomData, + } + } + pub fn increment(&self, model: &mut Model, column: &Column) -> Result<(), StorageError> { + let key = MultiValueColumn::compute_key(model, column)?; + model.storage().write_transaction(&mut |tx| { + let mut val: u64 = match tx.get(self.prefix, &key, None, &None) { + Ok(val_ser) => from_slice(&val_ser)?, + Err(StorageError::NotFound) => 0, + Err(e) => return Err(e), + }; + val += 1; + let val_ser = to_vec(&val)?; + tx.put(self.prefix, &key, None, &val_ser, &None)?; + Ok(()) + }) + } + /// returns true if the counter reached zero (and the key was removed from KVC store) + pub fn decrement(&self, model: &mut Model, column: &Column) -> Result { + let key = MultiValueColumn::compute_key(model, column)?; + let mut ret: bool = false; + model.storage().write_transaction(&mut |tx| { + let val_ser = tx.get(self.prefix, &key, None, &None)?; + let mut val: u64 = from_slice(&val_ser)?; + val -= 1; + ret = val == 0; + if ret { + tx.del(self.prefix, &key, None, &None)?; + } else { + let val_ser = to_vec(&val)?; + tx.put(self.prefix, &key, None, &val_ser, &None)?; + } + Ok(()) + })?; + Ok(ret) + } + + pub fn get(&self, model: &mut Model, column: &Column) -> Result { + let key = MultiValueColumn::compute_key(model, column)?; + let val_ser = model.storage().get(self.prefix, &key, None, &None)?; + let val: u64 = from_slice(&val_ser)?; + Ok(val) + } + + pub fn get_all(&self, model: &mut Model) -> Result, StorageError> { + model.check_exists()?; + let key_prefix = model.key(); + let key_prefix_len = key_prefix.len(); + let mut res: HashMap = HashMap::new(); + let total_size = key_prefix_len + self.value_size()?; + for val in model.storage().get_all_keys_and_values( + self.prefix, + total_size, + key_prefix.to_vec(), + None, + &None, + )? { + if val.0.len() == total_size + 1 { + let col: Column = from_slice(&val.0[1 + key_prefix_len..total_size + 1])?; + let val = from_slice(&val.1)?; + res.insert(col, val); + } + } + Ok(res) + } +} +impl< + Model: IModel, + Column: std::fmt::Debug + Eq + PartialEq + Hash + Serialize + Default + for<'d> Deserialize<'d>, + > IMultiValueColumn for MultiCounterColumn +{ + fn value_size(&self) -> Result { + Ok(to_vec(&(0 as u64))?.len()) + } + fn prefix(&self) -> u8 { + self.prefix + } +} + +pub trait ISingleValueColumn { + fn suffix(&self) -> u8; +} + +pub trait IMultiValueColumn { + fn prefix(&self) -> u8; + fn value_size(&self) -> Result; +} + +pub struct SingleValueColumn Deserialize<'a>> { + suffix: u8, + phantom_value: PhantomData, + phantom_model: PhantomData, +} + +impl Deserialize<'d>> ISingleValueColumn + for SingleValueColumn +{ + fn suffix(&self) -> u8 { + self.suffix + } +} + +impl Deserialize<'d>> + SingleValueColumn +{ + pub const fn new(suffix: u8) -> Self { + SingleValueColumn { + suffix, + phantom_value: PhantomData, + phantom_model: PhantomData, + } + } + + pub fn set(&self, model: &mut Model, value: &Value) -> Result<(), StorageError> { + model.check_exists()?; + model.storage().replace( + model.prefix(), + model.key(), + Some(self.suffix), + &to_vec(value)?, + &None, + ) + } + + pub fn get(&self, model: &mut Model) -> Result { + model.check_exists()?; + match model + .storage() + .get(model.prefix(), model.key(), Some(self.suffix), &None) + { + Ok(res) => Ok(from_slice::(&res)?), + Err(e) => Err(e), + } + } + + pub fn get_or_set(&self, model: &mut Model, value: &Value) -> Result { + model.check_exists()?; + let mut found: Option = None; + model.storage().write_transaction(&mut |tx| { + found = match tx.get(model.prefix(), model.key(), Some(self.suffix), &None) { + Ok(val_ser) => Some(from_slice(&val_ser)?), + Err(StorageError::NotFound) => { + tx.put( + model.prefix(), + model.key(), + Some(self.suffix), + &to_vec(value)?, + &None, + )?; + None + } + Err(e) => return Err(e), + }; + Ok(()) + })?; + Ok(found.unwrap_or(value.clone())) + } + + pub fn has(&self, model: &mut Model, value: &Value) -> Result<(), StorageError> { + model.check_exists()?; + model.storage().has_property_value( + model.prefix(), + model.key(), + Some(self.suffix), + &to_vec(value)?, + &None, + ) + } + + pub fn del(&self, model: &mut Model) -> Result<(), StorageError> { + model.check_exists()?; + model + .storage() + .del(model.prefix(), model.key(), Some(self.suffix), &None) + } +} + +///////////// Counter Value + +pub struct CounterValue { + suffix: u8, + phantom_model: PhantomData, +} + +impl ISingleValueColumn for CounterValue { + fn suffix(&self) -> u8 { + self.suffix + } +} + +impl CounterValue { + pub const fn new(suffix: u8) -> Self { + CounterValue { + suffix, + phantom_model: PhantomData, + } + } + + pub fn increment(&self, model: &mut Model) -> Result<(), StorageError> { + model.storage().write_transaction(&mut |tx| { + let mut val: u64 = match tx.get(model.prefix(), model.key(), Some(self.suffix), &None) { + Ok(val_ser) => from_slice(&val_ser)?, + Err(StorageError::NotFound) => 0, + Err(e) => return Err(e), + }; + val += 1; + let val_ser = to_vec(&val)?; + tx.put( + model.prefix(), + model.key(), + Some(self.suffix), + &val_ser, + &None, + )?; + Ok(()) + }) + } + /// returns true if the counter reached zero, and the property was removed + pub fn decrement(&self, model: &mut Model) -> Result { + let mut ret: bool = false; + model.storage().write_transaction(&mut |tx| { + let val_ser = tx.get(model.prefix(), model.key(), Some(self.suffix), &None)?; + let mut val: u64 = from_slice(&val_ser)?; + val -= 1; + ret = val == 0; + if ret { + tx.del(model.prefix(), model.key(), Some(self.suffix), &None)?; + } else { + let val_ser = to_vec(&val)?; + tx.put( + model.prefix(), + model.key(), + Some(self.suffix), + &val_ser, + &None, + )?; + } + Ok(()) + })?; + Ok(ret) + } + + pub fn get(&self, model: &mut Model) -> Result { + let val_res = model + .storage() + .get(model.prefix(), model.key(), Some(self.suffix), &None); + match val_res { + Ok(val_ser) => Ok(from_slice(&val_ser)?), + Err(StorageError::NotFound) => Ok(0), + Err(e) => Err(e), + } + } + + pub fn del(&self, model: &mut Model) -> Result<(), StorageError> { + model.check_exists()?; + model + .storage() + .del(model.prefix(), model.key(), Some(self.suffix), &None) + } +} + +//////////////// + +pub struct ExistentialValueColumn { + suffix: u8, +} + +impl ISingleValueColumn for ExistentialValueColumn { + fn suffix(&self) -> u8 { + self.suffix + } +} + +impl ExistentialValueColumn { + pub const fn new(suffix: u8) -> Self { + ExistentialValueColumn { suffix } + } +} + +pub struct ExistentialValue Deserialize<'d>> { + value: Option, + value_ser: Vec, +} +pub trait IExistentialValue { + fn process_exists(&mut self, value_ser: Vec); + + fn exists(&self) -> bool; +} + +impl Deserialize<'d>> IExistentialValue for ExistentialValue { + fn exists(&self) -> bool { + self.value.is_some() || self.value_ser.len() > 0 + } + fn process_exists(&mut self, value_ser: Vec) { + self.value_ser = value_ser; + } +} + +impl Deserialize<'d>> ExistentialValue { + pub fn new() -> Self { + ExistentialValue { + value: None, + value_ser: vec![], + } + } + + pub fn set(&mut self, value: &Column) -> Result<(), StorageError> { + if self.value.is_some() { + return Err(StorageError::AlreadyExists); + } + self.value = Some(value.clone()); + + Ok(()) + } + + pub fn save(model: &Model, value: &Column) -> Result<(), StorageError> { + model.storage().replace( + model.prefix(), + model.key(), + Some(model.class().existential_column.unwrap().suffix()), + &to_vec(value)?, + &None, + )?; + Ok(()) + } + + pub fn get(&mut self) -> Result<&Column, StorageError> { + if self.value.is_some() { + return Ok(self.value.as_ref().unwrap()); + } + if self.value_ser.is_empty() { + return Err(StorageError::BackendError); + } + let value = from_slice::(&self.value_ser); + match value { + Err(_) => return Err(StorageError::InvalidValue), + Ok(val) => { + self.value = Some(val); + return Ok(self.value.as_ref().unwrap()); + } + } + } + + pub fn take(mut self) -> Result { + self.get()?; + Ok(self.value.take().unwrap()) + } +} + +pub trait WriteTransaction: ReadTransaction { + /// Save a property value to the store. + fn put( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError>; + + /// Replace the property of a key (single value) to the store. + fn replace( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError>; + + fn take_first_value( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + family: &Option, + ) -> Result, StorageError>; + + /// Delete a property from the store. + fn del( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + family: &Option, + ) -> Result<(), StorageError>; + + /// Delete all properties of a key from the store. + fn del_all( + &self, + prefix: u8, + key: &Vec, + all_suffixes: &[u8], + family: &Option, + ) -> Result<(), StorageError>; + + /// Delete a specific value for a property from the store. + fn del_property_value( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError>; + + /// Delete all properties' values of a key from the store in case the property is a multi-values one + fn del_all_values( + &self, + prefix: u8, + key: &Vec, + property_size: usize, + suffix: Option, + family: &Option, + ) -> Result<(), StorageError>; +} + +pub trait ReadTransaction { + /// Load a property from the store. + fn get( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + family: &Option, + ) -> Result, StorageError>; + + /// Load all the values of a property from the store. + #[deprecated( + note = "KVStore has unique values (since switch from lmdb to rocksdb) use get() instead" + )] + fn get_all( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + family: &Option, + ) -> Result>, StorageError>; + + fn get_all_properties_of_key( + &self, + prefix: u8, + key: Vec, + properties: Vec, + family: &Option, + ) -> Result>, StorageError>; + + fn get_first_key_value( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + family: &Option, + ) -> Result<(Vec,Vec), StorageError>; + + /// Check if a specific value exists for a property from the store. + fn has_property_value( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError>; + + /// retrieves all the keys and values with the given prefix and key_size. if no suffix is specified, then all (including none) the suffices are returned + fn get_all_keys_and_values( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + family: &Option, + ) -> Result, Vec)>, StorageError>; +} + +pub trait KCVStorage: WriteTransaction { + fn write_transaction( + &self, + method: &mut dyn FnMut(&mut dyn WriteTransaction) -> Result<(), StorageError>, + ) -> Result<(), StorageError>; + + // /// Save a property value to the store. + // fn put( + // &self, + // prefix: u8, + // key: &Vec, + // suffix: Option, + // value: Vec, + // ) -> Result<(), StorageError>; + + // /// Replace the property of a key (single value) to the store. + // fn replace( + // &self, + // prefix: u8, + // key: &Vec, + // suffix: Option, + // value: Vec, + // ) -> Result<(), StorageError>; + + // /// Delete a property from the store. + // fn del(&self, prefix: u8, key: &Vec, suffix: Option) -> Result<(), StorageError>; + + // /// Delete all properties of a key from the store. + // fn del_all(&self, prefix: u8, key: &Vec, all_suffixes: &[u8]) -> Result<(), StorageError>; + + // /// Delete a specific value for a property from the store. + // fn del_property_value( + // &self, + // prefix: u8, + // key: &Vec, + // suffix: Option, + // value: Vec, + // ) -> Result<(), StorageError>; +} diff --git a/ng-repo/src/lib.rs b/ng-repo/src/lib.rs new file mode 100644 index 0000000..ac7a4b4 --- /dev/null +++ b/ng-repo/src/lib.rs @@ -0,0 +1,199 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +pub mod types; + +pub mod block_storage; + +pub mod block; + +pub mod object; + +pub mod file; + +pub mod commit; + +pub mod branch; + +pub mod repo; + +pub mod store; + +pub mod event; + +pub mod utils; + +pub mod errors; + +pub mod kcv_storage; + +pub mod os_info; + +pub use ng_threshold_crypto::PublicKeySet; + +#[macro_use] +extern crate slice_as_array; + +pub mod log { + + #[cfg(not(target_arch = "wasm32"))] + pub use debug_print::debug_println; + #[cfg(target_arch = "wasm32")] + pub use gloo_timers; + #[cfg(not(target_arch = "wasm32"))] + pub use log; + + #[cfg(target_arch = "wasm32")] + use wasm_bindgen::prelude::*; + + #[cfg(target_arch = "wasm32")] + #[wasm_bindgen] + extern "C" { + // Use `js_namespace` here to bind `console.log(..)` instead of just + // `log(..)` + #[wasm_bindgen(js_namespace = console)] + pub fn log(s: &str); + + #[wasm_bindgen(js_namespace = console)] + pub fn warn(s: &str); + + #[wasm_bindgen(js_namespace = console)] + pub fn error(s: &str); + + // The `console.log` is quite polymorphic, so we can bind it with multiple + // signatures. Note that we need to use `js_name` to ensure we always call + // `log` in JS. + #[wasm_bindgen(js_namespace = console, js_name = log)] + fn log_u32(a: u32); + + // Multiple arguments too! + #[wasm_bindgen(js_namespace = console, js_name = log)] + fn log_many(a: &str, b: &str); + } + + #[cfg(all(not(feature = "server_log_output"), not(target_arch = "wasm32")))] + #[macro_export] + macro_rules! log_info { + ($($t:tt)*) => (println!("INFO:{}",format!($($t)*))) +} + + #[cfg(all(not(feature = "server_log_output"), not(target_arch = "wasm32")))] + #[macro_export] + macro_rules! log_err { + ($($t:tt)*) => (println!("ERR:{}",format!($($t)*))) +} + + #[cfg(all(not(feature = "server_log_output"), not(target_arch = "wasm32")))] + #[macro_export] + macro_rules! log_warn { + ($($t:tt)*) => (println!("WARN:{}",format!($($t)*))) +} + + #[cfg(all(not(feature = "server_log_output"), not(target_arch = "wasm32")))] + #[macro_export] + macro_rules! log_debug { + ($($t:tt)*) => (debug_println!("DEBUG:{}",format!($($t)*))) +} + + #[cfg(all(not(feature = "server_log_output"), not(target_arch = "wasm32")))] + #[macro_export] + macro_rules! log_trace { + ($($t:tt)*) => (debug_println!("TRACE:{}",format!($($t)*))) +} + + #[cfg(all(feature = "server_log_output", not(target_arch = "wasm32")))] + #[macro_export] + macro_rules! log_info { + ($($t:tt)*) => (log::info!($($t)*)) +} + + #[cfg(all(feature = "server_log_output", not(target_arch = "wasm32")))] + #[macro_export] + macro_rules! log_err { + ($($t:tt)*) => (log::error!($($t)*)) +} + + #[cfg(all(feature = "server_log_output", not(target_arch = "wasm32")))] + #[macro_export] + macro_rules! log_warn { + ($($t:tt)*) => (log::warn!($($t)*)) +} + + #[cfg(all(feature = "server_log_output", not(target_arch = "wasm32")))] + #[macro_export] + macro_rules! log_debug { + ($($t:tt)*) => (log::debug!($($t)*)) +} + + #[cfg(all(feature = "server_log_output", not(target_arch = "wasm32")))] + #[macro_export] + macro_rules! log_trace { + ($($t:tt)*) => (log::trace!($($t)*)) +} + + #[cfg(target_arch = "wasm32")] + #[macro_export] + macro_rules! log_info { + ($($t:tt)*) => (log(&format_args!($($t)*).to_string())) +} + + #[cfg(target_arch = "wasm32")] + #[macro_export] + macro_rules! log_err { + ($($t:tt)*) => (error(&format_args!($($t)*).to_string())) +} + + #[cfg(target_arch = "wasm32")] + #[macro_export] + macro_rules! log_warn { + ($($t:tt)*) => (warn(&format_args!($($t)*).to_string())) +} + + #[cfg(all(debug_assertions, target_arch = "wasm32"))] + #[macro_export] + macro_rules! log_debug { + ($($t:tt)*) => (log(&format!("DEBUG:{}",&format_args!($($t)*).to_string()).to_string())) +} + + #[cfg(all(debug_assertions, target_arch = "wasm32"))] + #[macro_export] + macro_rules! log_trace { + ($($t:tt)*) => (log(&format!("TRACE:{}",&format_args!($($t)*).to_string()).to_string())) +} + + #[cfg(all(not(debug_assertions), target_arch = "wasm32"))] + #[macro_export] + macro_rules! log_debug { + ($($t:tt)*) => {}; + } + + #[cfg(all(not(debug_assertions), target_arch = "wasm32"))] + #[macro_export] + macro_rules! log_trace { + ($($t:tt)*) => {}; + } + + #[cfg(target_arch = "wasm32")] + #[macro_export] + macro_rules! sleep { + ($($t:tt)*) => (gloo_timers::future::sleep($($t)*).await) +} + + #[cfg(not(target_arch = "wasm32"))] + #[macro_export] + macro_rules! sleep { + ($($t:tt)*) => (std::thread::sleep($($t)*)) +} + + pub use log_debug; + pub use log_err; + pub use log_info; + pub use log_trace; + pub use log_warn; + pub use sleep; +} diff --git a/ng-repo/src/object.rs b/ng-repo/src/object.rs new file mode 100644 index 0000000..0b82426 --- /dev/null +++ b/ng-repo/src/object.rs @@ -0,0 +1,1782 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Object: Merkle hash tree of Blocks + +use core::fmt; +use std::collections::{HashMap, HashSet}; + +use chacha20::cipher::{KeyIvInit, StreamCipher}; +use chacha20::ChaCha20; +use zeroize::Zeroize; + +use crate::block_storage::*; +use crate::errors::*; +use crate::log::*; +use crate::store::Store; +use crate::types::*; + +pub const BLOCK_EXTRA: usize = 12; // 8 is the smallest extra + BLOCK_MAX_DATA_EXTRA +pub const HEADER_REF_EXTRA: usize = 66; +pub const HEADER_EMBED_EXTRA: usize = 34; +pub const CHILD_SIZE: usize = 66; + +pub const BLOCK_ID_SIZE: usize = 33; +/// Size of serialized SymKey +pub const BLOCK_KEY_SIZE: usize = 33; +/// Size of serialized Object with deps reference. +/// Varint extra bytes when reaching the maximum value we will ever use in one block +pub const BIG_VARINT_EXTRA: usize = 2; +/// Varint extra bytes when reaching the maximum size of data byte arrays. +pub const DATA_VARINT_EXTRA: usize = 4; +pub const BLOCK_MAX_DATA_EXTRA: usize = 4; + +#[derive(Debug, PartialEq, Eq, Clone)] +/// An Object in memory. This is not used to serialize data +pub struct Object { + /// keeps the deduplicated blocks of the Object + block_contents: HashMap, + + /// Blocks of the Object (nodes of the tree) + blocks: Vec, + + /// Header + header: Option, + + /// Blocks of the Header (nodes of the tree) + header_blocks: Vec, + + #[cfg(test)] + already_saved: bool, +} + +impl Object { + // if it is a Store root repo, the key_material is derived from RepoId + RepoWriteCapSecret + // for a private store root repo, the repowritecapsecret is omitted (zeros) + pub(crate) fn convergence_key(store: &Store) -> [u8; blake3::OUT_LEN] { + let mut key_material = match ( + *store.get_store_repo().repo_id(), + store.get_store_overlay_branch_readcap_secret().clone(), + ) { + (PubKey::Ed25519PubKey(pubkey), SymKey::ChaCha20Key(secret)) => { + [pubkey, secret].concat() + } + (_, _) => panic!("cannot derive key with Montgomery key"), + }; + let res = blake3::derive_key("NextGraph Data BLAKE3 key", key_material.as_slice()); + key_material.zeroize(); + res + } + + fn make_block( + mut content: Vec, + conv_key: &[u8; blake3::OUT_LEN], + children: Vec, + header_ref: Option, + already_existing: &mut HashMap, + ) -> Result { + let key_hash = blake3::keyed_hash(conv_key, &content); + + let key_slice = key_hash.as_bytes(); + let key = SymKey::ChaCha20Key(key_slice.clone()); + let it = already_existing.get(&key); + if it.is_some() { + return Err(*it.unwrap()); + } + let nonce = [0u8; 12]; + let mut cipher = ChaCha20::new(key_slice.into(), &nonce.into()); + //let mut content_enc = Vec::from(content); + let mut content_enc_slice = &mut content.as_mut_slice(); + cipher.apply_keystream(&mut content_enc_slice); + + let block = Block::new(children, header_ref, content, Some(key)); + //log_debug!(">>> make_block: {}", block.id()); + //log_debug!("!! children: ({}) {:?}", children.len(), children); + Ok(block) + } + + fn make_header_v0( + header: CommitHeaderV0, + object_size: usize, + conv_key: &ChaCha20Key, + ) -> (ObjectRef, Vec) { + let header_obj = Object::new_with_convergence_key( + ObjectContent::V0(ObjectContentV0::CommitHeader(CommitHeader::V0(header))), + None, + object_size, + conv_key, + ); + let header_ref = ObjectRef { + id: header_obj.id(), + key: header_obj.key().unwrap(), + }; + (header_ref, header_obj.blocks().cloned().collect()) + } + + fn make_header( + header: CommitHeader, + object_size: usize, + conv_key: &ChaCha20Key, + ) -> (ObjectRef, Vec) { + match header { + CommitHeader::V0(v0) => Self::make_header_v0(v0, object_size, conv_key), + } + } + + /// Build tree from leaves, returns parent nodes and optional header blocks + fn make_tree( + block_contents: &mut HashMap, + already_existing: &mut HashMap, + leaves: &[BlockId], + conv_key: &ChaCha20Key, + header_prepare_size: usize, + mut header_prepare_block_ref: Option, + mut header_prepare_blocks: Vec, + valid_block_size: usize, + arity: usize, + ) -> (Vec, Vec) { + let mut parents: Vec = vec![]; + let mut header_blocks = vec![]; + let chunks = leaves.chunks(arity); + let mut it = chunks.peekable(); + while let Some(nodes) = it.next() { + let children = nodes.to_vec(); + let keys: Vec = nodes + .iter() + .map(|block_id| block_contents.get(block_id).unwrap().key().unwrap()) + .collect(); + let content = ChunkContentV0::InternalNode(keys); + let content_ser = serde_bare::to_vec(&content).unwrap(); + //let child_header = None; + let header = if parents.is_empty() && it.peek().is_none() { + let mut header_prepare_blocks_taken = vec![]; + header_prepare_blocks_taken.append(&mut header_prepare_blocks); + match ( + header_prepare_size, + header_prepare_block_ref.take(), + header_prepare_blocks_taken, + ) { + (0, None, _) => None, + (header_size, Some(block_ref), blocks) => { + let is_embeddable = header_size > 0 + && ((valid_block_size + - BLOCK_EXTRA + - HEADER_EMBED_EXTRA + - header_size) + / CHILD_SIZE) + >= children.len(); + let (header_r, mut h_blocks) = + Self::make_header_ref(is_embeddable, block_ref, blocks); + header_blocks.append(&mut h_blocks); + header_r + } + (_, None, _) => unimplemented!(), + } + //header_ref.take() + } else { + None + }; + Self::add_block( + Self::make_block(content_ser, conv_key, children, header, already_existing), + &mut parents, + block_contents, + already_existing, + ); + } + //log_debug!("parents += {}", parents.len()); + + if 1 < parents.len() { + let mut great_parents = Self::make_tree( + block_contents, + already_existing, + parents.as_slice(), + conv_key, + header_prepare_size, + header_prepare_block_ref, + header_prepare_blocks, + valid_block_size, + arity, + ); + parents.append(&mut great_parents.0); + header_blocks.append(&mut great_parents.1); + } + (parents, header_blocks) + } + + fn make_header_ref( + embedded: bool, + header_ref: BlockRef, + blocks: Vec, + ) -> (Option, Vec) { + if embedded { + ( + Some(CommitHeaderRef { + obj: CommitHeaderObject::EncryptedContent( + blocks[0].encrypted_content().to_vec(), + ), + key: header_ref.key, + }), + vec![], + ) + } else { + ( + Some(CommitHeaderRef { + obj: CommitHeaderObject::Id(header_ref.id), + key: header_ref.key, + }), + blocks, + ) + } + } + + fn add_block( + block_result: Result, + blocks: &mut Vec, + block_contents: &mut HashMap, + already_existing: &mut HashMap, + ) { + match block_result { + Ok(mut block) => { + let id = block.get_and_save_id(); + blocks.push(id); + if !block_contents.contains_key(&id) { + already_existing.insert(block.key().unwrap(), id); + block_contents.insert(id, block); + } + } + Err(id) => { + blocks.push(id); + } + } + } + + /// Create new Object from given content + /// + /// The Object is chunked and stored in a Merkle tree + /// The arity of the Merkle tree is the maximum that fits in the given `max_object_size` + /// + /// Arguments: + /// * `content`: Object content + /// * `header`: CommitHeaderV0 : All references of the object + /// * `block_size`: Desired block size for chunking content, will be rounded up to nearest valid block size + /// * `store`: store public key, needed to generate the convergence key + /// * `store_secret`: store's read capability secret, needed to generate the convergence key + pub fn new( + content: ObjectContent, + header: Option, + block_size: usize, + store: &Store, + ) -> Object { + let mut conv_key = Self::convergence_key(store); + let res = Self::new_with_convergence_key(content, header, block_size, &conv_key); + conv_key.zeroize(); + res + } + + pub fn new_with_convergence_key( + content: ObjectContent, + mut header: Option, + block_size: usize, + conv_key: &ChaCha20Key, + ) -> Object { + if header.is_some() && !content.can_have_header() { + panic!( + "cannot make a new Object with header if ObjectContent type different from Commit" + ); + } + // log_debug!("header {:?}", header); + // create blocks by chunking + encrypting content + let valid_block_size = store_valid_value_size(block_size); + // log_debug!("valid_block_size {}", valid_block_size); + + // let max_arity_leaves: usize = (valid_block_size - BLOCK_EXTRA) / CHILD_SIZE; + // let max_arity_root: usize = + // (valid_block_size - BLOCK_EXTRA - HEADER_REF_EXTRA) / CHILD_SIZE; + let max_data_payload_size = + valid_block_size - BLOCK_EXTRA - HEADER_REF_EXTRA * header.as_ref().map_or(0, |_| 1); + let max_arity: usize = max_data_payload_size / CHILD_SIZE; + + let mut blocks: Vec = vec![]; + let mut block_contents: HashMap = HashMap::new(); + let mut already_existing: HashMap = HashMap::new(); + + let header_prepare = match &header { + None => (0 as usize, None, vec![]), + Some(h) => { + let block_info = Self::make_header(h.clone(), valid_block_size, conv_key); + if block_info.1.len() == 1 { + ( + block_info.1[0].encrypted_content().len(), + Some(block_info.0), + block_info.1, + ) + } else { + (0 as usize, Some(block_info.0), block_info.1) + } + } + }; + // log_debug!("{:?} {:?}", header, header_prepare); + + let content_ser = serde_bare::to_vec(&content).unwrap(); + let content_len = content_ser.len(); + + // log_debug!( + // "only one block? {} {} {}", + // content_len <= max_data_payload_size, + // content_len, + // max_data_payload_size + // ); + let header_blocks = if content_len <= max_data_payload_size { + // content fits in root node + let data_chunk = ChunkContentV0::DataChunk(content_ser.clone()); + let content_ser = serde_bare::to_vec(&data_chunk).unwrap(); + + let (header_ref, h_blocks) = match header_prepare { + (0, None, _) => (None, vec![]), + (header_size, Some(block_ref), blocks) => { + let is_embeddable = header_size > 0 + && valid_block_size - BLOCK_EXTRA - HEADER_EMBED_EXTRA - content_ser.len() + > header_size; + Self::make_header_ref(is_embeddable, block_ref, blocks) + } + (_, None, _) => unimplemented!(), + }; + Self::add_block( + Self::make_block( + content_ser, + conv_key, + vec![], + header_ref, + &mut already_existing, + ), + &mut blocks, + &mut block_contents, + &mut already_existing, + ); + + h_blocks + } else { + // chunk content and create leaf nodes + let mut i = 0; + #[cfg(not(target_arch = "wasm32"))] + let _total = std::cmp::max(1, content_len / (valid_block_size - BLOCK_EXTRA)); + for chunk in content_ser.chunks(valid_block_size - BLOCK_EXTRA) { + let data_chunk = ChunkContentV0::DataChunk(chunk.to_vec()); + let chunk_ser = serde_bare::to_vec(&data_chunk).unwrap(); + Self::add_block( + Self::make_block(chunk_ser, conv_key, vec![], None, &mut already_existing), + &mut blocks, + &mut block_contents, + &mut already_existing, + ); + #[cfg(not(target_arch = "wasm32"))] + log_debug!( + "make_block {} of {} - {}%", + i + 1, + _total + 1, + i * 100 / _total + ); + i = i + 1; + } + + // internal nodes + // max_arity: max number of ObjectRefs that fit inside an InternalNode Object within the max_data_payload_size limit + let mut parents = Self::make_tree( + &mut block_contents, + &mut already_existing, + blocks.as_slice(), + conv_key, + header_prepare.0, + header_prepare.1, + header_prepare.2, + valid_block_size, + max_arity, + ); + + blocks.append(&mut parents.0); + parents.1 + }; + + if header_blocks.len() > 0 { + // log_debug!( + // "header_blocks.len() {} {}", + // header_blocks.len(), + // header_blocks.last().unwrap().id() + // ); + header + .as_mut() + .unwrap() + .set_id(header_blocks.last().unwrap().id()); + } + Object { + blocks, + block_contents, + header, + header_blocks, + #[cfg(test)] + already_saved: false, + } + } + + /// Load an Object from BlockStorage (taking a reference) + /// + /// Returns Ok(Object) or an Err(ObjectParseError::MissingBlocks(Vec of ObjectId)) of missing BlockIds + pub fn load_ref(reference: &ObjectRef, store: &Store) -> Result { + Self::load(reference.id.clone(), Some(reference.key.clone()), store) + } + + pub fn load_header( + root_block: &Block, + store: &Store, + ) -> Result, ObjectParseError> { + Ok(Self::load_header_(root_block, store)?.0) + } + + fn load_header_( + root: &Block, + store: &Store, + ) -> Result<(Option, Vec), ObjectParseError> { + match root.header_ref() { + Some(header_ref) => match header_ref.obj { + CommitHeaderObject::None | CommitHeaderObject::RandomAccess => { + panic!("shouldn't happen") + } + CommitHeaderObject::Id(id) => { + let obj_res = Object::load(id, Some(header_ref.key.clone()), store); + match obj_res { + Err(e) => return Err(e), + Ok(obj) => match obj.content()? { + ObjectContent::V0(ObjectContentV0::CommitHeader(mut commit_header)) => { + commit_header.set_id(id); + Ok((Some(commit_header), obj.blocks().cloned().collect())) + } + _ => { + return Err(ObjectParseError::InvalidHeader); + } + }, + } + } + CommitHeaderObject::EncryptedContent(content) => { + let (_, decrypted_content) = + Block::new_with_encrypted_content(content, None).read(&header_ref.key)?; + match serde_bare::from_slice(&decrypted_content) { + Ok(ObjectContent::V0(ObjectContentV0::CommitHeader(commit_header))) => { + Ok((Some(commit_header), vec![])) + } + Err(_e) => { + return Err(ObjectParseError::InvalidHeader); + } + _ => { + return Err(ObjectParseError::InvalidHeader); + } + } + } + }, + None => Ok((None, vec![])), + } + } + + /// Load an Object from BlockStorage + /// + /// Returns Ok(Object) or an Err(ObjectParseError::MissingBlocks(Vec of ObjectId )) of missing BlockIds + pub fn load( + id: ObjectId, + key: Option, + store: &Store, + ) -> Result { + Self::load_(id, key, store, true) + } + + pub fn load_without_header( + id: ObjectId, + key: Option, + store: &Store, + ) -> Result { + Self::load_(id, key, store, false) + } + + fn load_( + id: ObjectId, + key: Option, + store: &Store, + with_header: bool, + ) -> Result { + fn load_tree( + parents: Vec, + store: &Store, + blocks: &mut Vec, + missing: &mut Vec, + block_contents: &mut HashMap, + ) { + let mut children: Vec = vec![]; + for id in parents { + match store.get(&id) { + Ok(block) => { + match &block { + Block::V0(o) => { + children.extend(o.children().iter().rev()); + } + } + blocks.insert(0, id); + if !block_contents.contains_key(&id) { + block_contents.insert(id, block); + } + } + Err(_) => missing.push(id.clone()), + } + } + if !children.is_empty() { + load_tree(children, store, blocks, missing, block_contents); + } + } + + let mut blocks: Vec = vec![]; + let mut block_contents: HashMap = HashMap::new(); + let mut missing: Vec = vec![]; + + load_tree( + vec![id], + store, + &mut blocks, + &mut missing, + &mut block_contents, + ); + + if !missing.is_empty() { + return Err(ObjectParseError::MissingBlocks(missing)); + } + + let root = block_contents.get_mut(blocks.last().unwrap()).unwrap(); + if key.is_some() { + root.set_key(key); + } + + let header = if with_header { + match Self::load_header_(root, store) { + Err(ObjectParseError::MissingBlocks(m)) => { + return Err(ObjectParseError::MissingHeaderBlocks(( + Object { + blocks, + block_contents, + header: None, + header_blocks: vec![], + #[cfg(test)] + already_saved: false, + }, + m, + ))); + } + Err(e) => return Err(e), + Ok(h) => h, + } + } else { + root.destroy_header(); + (None, vec![]) + }; + + Ok(Object { + blocks, + block_contents, + header: header.0, + header_blocks: header.1, + #[cfg(test)] + already_saved: true, + }) + } + + /// Save blocks of the object and the blocks of the header object in the store + pub fn save(&self, store: &Store) -> Result, StorageError> { + let mut deduplicated: HashSet = HashSet::new(); + //.chain(self.header_blocks.iter()) + for block_id in self.blocks.iter() { + deduplicated.insert(*block_id); + store.put(self.block_contents.get(block_id).unwrap())?; + } + for block in &self.header_blocks { + let id = block.id(); + if deduplicated.get(&id).is_none() { + deduplicated.insert(id); + store.put(block)?; + } + } + let root_id = self.id(); + let mut blocks = vec![root_id]; + deduplicated.remove(&root_id); + let list = deduplicated.drain(); + blocks.append(&mut list.collect()); + deduplicated.shrink_to(0); + Ok(blocks) + } + + #[cfg(test)] + pub fn save_in_test(&mut self, store: &Store) -> Result, StorageError> { + assert!(self.already_saved == false); + self.already_saved = true; + + self.save(store) + } + + /// Get the ID of the Object + pub fn id(&self) -> ObjectId { + self.root_block().id() + } + + /// Get the ID of the Object and saves it + pub fn get_and_save_id(&mut self) -> ObjectId { + self.block_contents + .get_mut(self.blocks.last().unwrap()) + .unwrap() + .get_and_save_id() + } + + /// Get the key for the Object + pub fn key(&self) -> Option { + self.root_block().key() + } + + /// Get an `ObjectRef` for the root object + pub fn reference(&self) -> Option { + if self.key().is_some() { + Some(ObjectRef { + id: self.id(), + key: self.key().unwrap(), + }) + } else { + None + } + } + + pub fn is_root(&self) -> bool { + self.header.as_ref().map_or(true, |h| h.is_root()) + } + + /// Get deps (that have an ID in the header, without checking if there is a key for them in the header_keys) + /// if there is no header, returns an empty vec + pub fn deps(&self) -> Vec { + match &self.header { + Some(h) => h.deps(), + None => vec![], + } + } + + /// Get acks and nacks (that have an ID in the header, without checking if there is a key for them in the header_keys) + /// if there is no header, returns an empty vec + pub fn acks_and_nacks(&self) -> Vec { + match &self.header { + Some(h) => h.acks_and_nacks(), + None => vec![], + } + } + + /// Get acks (that have an ID in the header, without checking if there is a key for them in the header_keys) + /// if there is no header, returns an empty vec + pub fn acks(&self) -> Vec { + match &self.header { + Some(h) => h.acks(), + None => vec![], + } + } + + pub fn root_block(&self) -> &Block { + self.block_contents + .get(self.blocks.last().unwrap()) + .unwrap() + } + + pub fn header(&self) -> &Option { + &self.header + } + + pub fn blocks(&self) -> impl Iterator + '_ { + self.blocks + .iter() + .map(|key| self.block_contents.get(key).unwrap()) + } + + pub fn all_blocks_len(&self) -> usize { + self.blocks.len() + self.header_blocks.len() + } + + pub fn blocks_len(&self) -> usize { + self.blocks.len() + } + + pub fn header_blocks_len(&self) -> usize { + self.header_blocks.len() + } + + pub fn size(&self) -> usize { + let mut total = 0; + self.blocks().for_each(|b| { + let s = b.size(); + //log_debug!("@@@@ block {}", s); + total += s; + }); + self.header_blocks.iter().for_each(|b| { + let s = b.size(); + //log_debug!("@@@@ header {}", s); + total += s; + }); + total + } + + pub fn dedup_size(&self) -> usize { + let mut total = 0; + self.block_contents.values().for_each(|b| total += b.size()); + self.header_blocks.iter().for_each(|b| total += b.size()); + total + } + + pub fn hashmap(&self) -> &HashMap { + &self.block_contents + } + + pub fn into_blocks(self) -> Vec { + self.block_contents.into_values().collect() + } + + /// Collect leaves from the tree + fn collect_leaves( + blocks: &Vec, + parents: &Vec<(ObjectId, SymKey)>, + parent_index: usize, + leaves: &mut Option<&mut Vec>, + obj_content: &mut Option<&mut Vec>, + block_contents: &HashMap, + ) -> Result { + // log_debug!( + // ">>> collect_leaves: #{}..{}", + // parent_index, + // parent_index + parents.len() - 1 + // ); + let mut children: Vec<(ObjectId, SymKey)> = vec![]; + let mut i = parent_index; + + for (id, key) in parents { + //log_debug!("!!! parent: #{}", i); + let block = block_contents.get(&blocks[i]).unwrap(); + i += 1; + + // verify object ID + let block_id = block.id(); + if *id != block_id { + log_debug!("Invalid ObjectId.\nExp: {:?}\nGot: {:?}", *id, block_id); + return Err(ObjectParseError::InvalidBlockId); + } + + match block { + Block::V0(b) => { + let b_children = b.children(); + if leaves.is_none() && obj_content.is_none() { + // we just want to calculate the depth. no need to decrypt + for id in b_children { + #[allow(deprecated)] + children.push((id.clone(), ObjectKey::nil())); + } + continue; + } + // decrypt content in place (this is why we have to clone first) + let mut content_dec = b.content.encrypted_content().clone(); + match key { + SymKey::ChaCha20Key(key) => { + let nonce = [0u8; 12]; + let mut cipher = ChaCha20::new(key.into(), &nonce.into()); + let mut content_dec_slice = &mut content_dec.as_mut_slice(); + cipher.apply_keystream(&mut content_dec_slice); + } + } + + // deserialize content + let content: ChunkContentV0; + match serde_bare::from_slice(content_dec.as_slice()) { + Ok(c) => content = c, + Err(_e) => { + //log_debug!("Block deserialize error: {}", e); + return Err(ObjectParseError::BlockDeserializeError); + } + } + // parse content + match content { + ChunkContentV0::InternalNode(keys) => { + if keys.len() != b_children.len() { + log_debug!( + "Invalid keys length: got {}, expected {}", + keys.len(), + b_children.len() + ); + log_debug!("!!! children: {:?}", b_children); + log_debug!("!!! keys: {:?}", keys); + return Err(ObjectParseError::InvalidKeys); + } + + for (id, key) in b_children.iter().zip(keys.iter()) { + children.push((id.clone(), key.clone())); + } + } + ChunkContentV0::DataChunk(chunk) => { + if leaves.is_some() { + //FIXME this part is never used (when leaves.is_some ?) + //FIXME if it was used, we should probably try to remove the block.clone() + let mut leaf = block.clone(); + leaf.set_key(Some(key.clone())); + let l = &mut **leaves.as_mut().unwrap(); + l.push(leaf); + } + if obj_content.is_some() { + let c = &mut **obj_content.as_mut().unwrap(); + c.extend_from_slice(chunk.as_slice()); + } + } + } + } + } + } + Ok(if !children.is_empty() { + if parent_index < children.len() { + return Err(ObjectParseError::InvalidChildren); + } + Self::collect_leaves( + blocks, + &children, + parent_index - children.len(), + leaves, + obj_content, + block_contents, + )? + 1 + } else { + 0 + }) + } + + // /// Parse the Object and return the leaf Blocks with decryption key set + // pub fn leaves(&self) -> Result, ObjectParseError> { + // let mut leaves: Vec = vec![]; + // let parents = vec![(self.id(), self.key().unwrap())]; + // match Self::collect_leaves( + // &self.blocks, + // &parents, + // self.blocks.len() - 1, + // &mut Some(&mut leaves), + // &mut None, + // ) { + // Ok(_) => Ok(leaves), + // Err(e) => Err(e), + // } + // } + + /// Parse the Object and return the decrypted content assembled from Blocks + pub fn content(&self) -> Result { + // TODO: keep a local cache of content (with oncecell) + if self.key().is_none() { + return Err(ObjectParseError::MissingRootKey); + } + let mut obj_content: Vec = vec![]; + let parents = vec![(self.id(), self.key().unwrap())]; + match Self::collect_leaves( + &self.blocks, + &parents, + self.blocks.len() - 1, + &mut None, + &mut Some(&mut obj_content), + &self.block_contents, + ) { + Ok(_) => match serde_bare::from_slice(obj_content.as_slice()) { + Ok(c) => Ok(c), + Err(_e) => { + //log_debug!("Object deserialize error: {}", e); + Err(ObjectParseError::ObjectDeserializeError) + } + }, + Err(e) => Err(e), + } + } + + /// Parse the Object returns the depth of the tree + pub fn depth(&self) -> Result { + if self.key().is_none() { + return Err(ObjectParseError::MissingRootKey); + } + let parents = vec![(self.id(), self.key().unwrap())]; + Self::collect_leaves( + &self.blocks, + &parents, + self.blocks.len() - 1, + &mut None, + &mut None, + &self.block_contents, + ) + } + + pub fn content_v0(&self) -> Result { + match self.content() { + Ok(ObjectContent::V0(v0)) => Ok(v0), + Err(e) => Err(e), + } + } +} + +impl IObject for Object { + fn block_ids(&self) -> Vec { + let mut deduplicated: HashSet = HashSet::new(); + //.chain(self.header_blocks.iter()) + for block_id in self.blocks.iter() { + deduplicated.insert(*block_id); + } + for block in &self.header_blocks { + let id = block.id(); + if deduplicated.get(&id).is_none() { + deduplicated.insert(id); + } + } + let root_id = self.id(); + let mut blocks = vec![root_id]; + deduplicated.remove(&root_id); + let list = deduplicated.drain(); + blocks.append(&mut list.collect()); + deduplicated.shrink_to(0); + blocks + } + + fn id(&self) -> Option { + Some(self.id()) + } + + fn key(&self) -> Option { + self.key() + } +} + +impl fmt::Display for Object { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "====== Object ID {}", self.id())?; + writeln!( + f, + "== Key: {}", + self.key().map_or("None".to_string(), |k| format!("{}", k)) + )?; + #[cfg(test)] + writeln!(f, "== saved: {}", self.already_saved)?; + writeln!( + f, + "== Header: {}", + self.header + .as_ref() + .map_or("None".to_string(), |k| format!("{}", k)) + )?; + writeln!(f, "== Blocks: {}", self.blocks.len())?; + let mut i = 0; + for block_id in &self.blocks { + writeln!(f, "========== {:03}: {}", i, block_id)?; + i += 1; + } + writeln!(f, "== Depth: {:?}", self.depth().unwrap_or(0))?; + + writeln!(f, "== Header Blocks: {}", self.header_blocks.len())?; + i = 0; + for block in &self.header_blocks { + writeln!(f, "========== {:03}: {}", i, block.id())?; + } + write!( + f, + "{}", + self.content().map_or_else( + |e| format!("Error on content: {:?}", e), + |c| format!("{}", c) + ) + )?; + Ok(()) + } +} + +impl ObjectContent { + pub fn can_have_header(&self) -> bool { + match self { + Self::V0(v0) => match v0 { + ObjectContentV0::Commit(_) => true, + _ => false, + }, + } + } + + pub fn new_file_v0_with_content(content: Vec, content_type: &str) -> Self { + ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + content_type: content_type.into(), + metadata: vec![], + content, + }))) + } +} + +impl fmt::Display for ObjectContent { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let (version, content) = match self { + Self::V0(v0) => ( + "v0", + match v0 { + ObjectContentV0::Commit(c) => ("Commit", format!("{}", c)), + ObjectContentV0::CommitBody(c) => ("CommitBody", format!("{}", c)), + ObjectContentV0::CommitHeader(c) => ("CommitHeader", format!("{}", c)), + ObjectContentV0::Quorum(_c) => ("Quorum", format!("{}", "")), + ObjectContentV0::Signature(_c) => ("Signature", format!("{}", "")), + ObjectContentV0::Certificate(_c) => ("Certificate", format!("{}", "")), + ObjectContentV0::SmallFile(_c) => ("SmallFile", format!("{}", "")), + ObjectContentV0::RandomAccessFileMeta(_c) => { + ("RandomAccessFileMeta", format!("{}", "")) + } + ObjectContentV0::RefreshCap(_c) => ("RefreshCap", format!("{}", "")), + ObjectContentV0::Snapshot(_c) => ("Snapshot", format!("size={}", _c.len())), + }, + ), + }; + writeln!(f, "====== ObjectContent {} {} ======", version, content.0)?; + write!(f, "{}", content.1)?; + Ok(()) + } +} + +#[cfg(test)] +mod test { + + use crate::object::*; + use std::io::BufReader; + use std::io::Read; + use std::io::Write; + + // Those constants are calculated with BlockStorage::get_max_value_size + /// Maximum arity of branch containing max number of leaves + // const MAX_ARITY_LEAVES: usize = 15887; + // /// Maximum arity of root branch + // const MAX_ARITY_ROOT: usize = 15886; + // /// Maximum data that can fit in object.content + // const MAX_DATA_PAYLOAD_SIZE: usize = 1048564; + + #[test] + pub fn test_pubkey_from_str() { + let pubkey = PubKey::Ed25519PubKey([1u8; 32]); + let str = pubkey.to_string(); + let server_key: PubKey = str.as_str().try_into().unwrap(); + assert_eq!(server_key, pubkey); + } + + /// Test no header needed if not a commit + #[test] + #[should_panic] + pub fn test_no_header() { + let file = SmallFile::V0(SmallFileV0 { + content_type: "image/jpeg".into(), + metadata: vec![], + content: vec![], + }); + let content = ObjectContent::V0(ObjectContentV0::SmallFile(file)); + let store = Store::dummy_public_v0(); + let header = CommitHeader::new_with_acks([ObjectId::dummy()].to_vec()); + let _obj = Object::new(content, header, store_max_value_size(), &store); + } + + /// Test JPEG file + #[test] + pub fn test_jpg() { + let f = std::fs::File::open("tests/test.jpg").expect("open of tests/test.jpg"); + let mut reader = BufReader::new(f); + let mut img_buffer: Vec = Vec::new(); + reader + .read_to_end(&mut img_buffer) + .expect("read of test.jpg"); + let content = ObjectContent::new_file_v0_with_content(img_buffer, "image/jpeg"); + + let max_object_size = store_max_value_size(); + let store = Store::dummy_public_v0(); + let obj = Object::new(content, None, max_object_size, &store); + + log_debug!("{}", obj); + + let mut i = 0; + for node in obj.blocks() { + log_debug!("#{}: {}", i, node.id()); + let mut file = std::fs::File::create(format!("tests/{}.ng", node.id())) + .expect("open block write file"); + let ser_file = serde_bare::to_vec(node).unwrap(); + file.write_all(&ser_file) + .expect(&format!("write of block #{}", i)); + i += 1; + } + } + + /// Test tree API + #[test] + pub fn test_object() { + let file = SmallFile::V0(SmallFileV0 { + content_type: "file/test".into(), + metadata: Vec::from("some meta data here"), + content: [(0..255).collect::>().as_slice(); 320].concat(), + }); + let content = ObjectContent::V0(ObjectContentV0::SmallFile(file)); + + let acks = vec![]; + //let header = CommitHeader::new_with_acks(acks.clone()); + let max_object_size = 0; + + let store = Store::dummy_public_v0(); + + let mut obj = Object::new(content.clone(), None, max_object_size, &store); + + log_debug!("{}", obj); + + assert_eq!(*obj.acks(), acks); + + match obj.content() { + Ok(cnt) => { + log_debug!("{}", cnt); + assert_eq!(content, cnt); + } + Err(e) => panic!("Object parse error: {:?}", e), + } + + obj.save_in_test(&store).expect("Object save error"); + + let obj2 = Object::load(obj.id(), obj.key(), &store).unwrap(); + + log_debug!("{}", obj2); + + assert_eq!(*obj2.acks(), acks); + + match obj2.content() { + Ok(cnt) => { + log_debug!("{}", cnt); + assert_eq!(content, cnt); + } + Err(e) => panic!("Object2 parse error: {:?}", e), + } + + let obj3 = Object::load(obj.id(), None, &store).unwrap(); + + log_debug!("{}", obj3); + + assert_eq!(*obj3.acks(), acks); + + match obj3.content() { + Err(ObjectParseError::MissingRootKey) => (), + Err(e) => panic!("Object3 parse error: {:?}", e), + Ok(_) => panic!("Object3 should not return content"), + } + } + + /// Checks that a content that fits the root node, will not be chunked into children nodes + #[test] + pub fn test_depth_0() { + let store = Store::dummy_public_v0(); + + let empty_file = + ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + content_type: "".into(), + metadata: vec![], + content: vec![], + }))); + let content_ser = serde_bare::to_vec(&empty_file).unwrap(); + log_debug!("content len for empty : {}", content_ser.len()); + + // let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + // content_type: "".into(), + // metadata: vec![], + // content: vec![99; 1000], + // }))); + // let content_ser = serde_bare::to_vec(&content).unwrap(); + // log_debug!("content len for 1000 : {}", content_ser.len()); + + // let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + // content_type: "".into(), + // metadata: vec![], + // content: vec![99; 1048554], + // }))); + // let content_ser = serde_bare::to_vec(&content).unwrap(); + // log_debug!("content len for 1048554 : {}", content_ser.len()); + + // let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + // content_type: "".into(), + // metadata: vec![], + // content: vec![99; 1550000], + // }))); + // let content_ser = serde_bare::to_vec(&content).unwrap(); + // log_debug!("content len for 1550000 : {}", content_ser.len()); + + // let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + // content_type: "".into(), + // metadata: vec![], + // content: vec![99; 1550000000], + // }))); + // let content_ser = serde_bare::to_vec(&content).unwrap(); + // log_debug!("content len for 1550000000 : {}", content_ser.len()); + + // let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + // content_type: "".into(), + // metadata: vec![99; 1000], + // content: vec![99; 1000], + // }))); + // let content_ser = serde_bare::to_vec(&content).unwrap(); + // log_debug!("content len for 1000+1000: {}", content_ser.len()); + + // let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + // content_type: "".into(), + // metadata: vec![99; 1000], + // content: vec![99; 524277], + // }))); + // let content_ser = serde_bare::to_vec(&content).unwrap(); + // log_debug!("content len for 1000+524277: {}", content_ser.len()); + + // let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + // content_type: "".into(), + // metadata: vec![99; 524277], + // content: vec![99; 524277], + // }))); + // let content_ser = serde_bare::to_vec(&content).unwrap(); + // log_debug!("content len for 2*524277: {}", content_ser.len()); + + let empty_obj = Object::new(empty_file, None, store_max_value_size(), &store); + + let empty_file_size = empty_obj.size(); + log_debug!("empty file size: {}", empty_file_size); + + let size = + store_max_value_size() - empty_file_size - BLOCK_MAX_DATA_EXTRA - BIG_VARINT_EXTRA; + log_debug!("full file content size: {}", size); + + let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + content_type: "".into(), + metadata: vec![], + content: vec![99; size], + }))); + let content_ser = serde_bare::to_vec(&content).unwrap(); + log_debug!("content len: {}", content_ser.len()); + + let object = Object::new(content, None, store_max_value_size(), &store); + log_debug!("{}", object); + + log_debug!("object size: {}", object.size()); + + assert_eq!(object.blocks.len(), 1); + } + + /// Checks that a content that doesn't fit in all the children of first level in tree + #[ignore] + #[test] + pub fn test_depth_1() { + const MAX_ARITY_LEAVES: usize = 15887; + // /// Maximum arity of root branch + // const MAX_ARITY_ROOT: usize = 15886; + // /// Maximum data that can fit in object.content + const MAX_DATA_PAYLOAD_SIZE: usize = 1048564; + + ////// 16 GB of data! + let data_size = MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE - 10; + + let store = Store::dummy_public_v0(); + log_debug!("creating 16GB of data"); + let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + content_type: "".into(), + metadata: vec![], + content: vec![99; data_size], + }))); + //let content_ser = serde_bare::to_vec(&content).unwrap(); + //log_debug!("content len: {}", content_ser.len()); + log_debug!("creating object with that data"); + let object = Object::new(content, None, store_max_value_size(), &store); + log_debug!("{}", object); + + let obj_size = object.size(); + log_debug!("object size: {}", obj_size); + + log_debug!("data size: {}", data_size); + log_debug!( + "overhead: {} - {}%", + obj_size - data_size, + ((obj_size - data_size) * 100) as f32 / data_size as f32 + ); + + log_debug!("number of blocks : {}", object.blocks.len()); + assert_eq!(object.blocks.len(), MAX_ARITY_LEAVES + 1); + assert_eq!(object.depth().unwrap(), 1); + } + + /// Checks that a content that doesn't fit in all the children of first level in tree + #[ignore] + #[test] + pub fn test_depth_2() { + const MAX_ARITY_LEAVES: usize = 15887; + const MAX_DATA_PAYLOAD_SIZE: usize = 1048564; + + ////// 16 GB of data! + let data_size = MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE; + + let store = Store::dummy_public_v0(); + log_debug!("creating 16GB of data"); + let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + content_type: "".into(), + metadata: vec![], + content: vec![99; data_size], + }))); + //let content_ser = serde_bare::to_vec(&content).unwrap(); + //log_debug!("content len: {}", content_ser.len()); + log_debug!("creating object with that data"); + let object = Object::new(content, None, store_max_value_size(), &store); + log_debug!("{}", object); + + let obj_size = object.size(); + log_debug!("object size: {}", obj_size); + + log_debug!("data size: {}", data_size); + log_debug!( + "overhead: {} - {}%", + obj_size - data_size, + ((obj_size - data_size) * 100) as f32 / data_size as f32 + ); + + log_debug!("number of blocks : {}", object.blocks.len()); + assert_eq!(object.blocks.len(), MAX_ARITY_LEAVES + 4); + assert_eq!(object.depth().unwrap(), 2); + } + + /// Checks that a content that doesn't fit in all the children of first level in tree + #[ignore] + #[test] + pub fn test_depth_3() { + const MAX_ARITY_LEAVES: usize = 61; + const MAX_DATA_PAYLOAD_SIZE: usize = 4084; + + ////// 900 MB of data! + let data_size = + MAX_ARITY_LEAVES * MAX_ARITY_LEAVES * MAX_ARITY_LEAVES * MAX_DATA_PAYLOAD_SIZE - 10; + + let store = Store::dummy_public_v0(); + log_debug!("creating 900MB of data"); + let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + content_type: "".into(), + metadata: vec![], + content: vec![99; data_size], + }))); + //let content_ser = serde_bare::to_vec(&content).unwrap(); + //log_debug!("content len: {}", content_ser.len()); + log_debug!("creating object with that data"); + let object = Object::new(content, None, store_valid_value_size(0), &store); + log_debug!("{}", object); + + let obj_size = object.size(); + log_debug!("object size: {}", obj_size); + + log_debug!("data size: {}", data_size); + log_debug!( + "overhead: {} - {}%", + obj_size - data_size, + ((obj_size - data_size) * 100) as f32 / data_size as f32 + ); + + let dedup_size = object.dedup_size(); + log_debug!( + "dedup compression: {} - {}%", + data_size - dedup_size, + ((data_size - dedup_size) * 100) as f32 / data_size as f32 + ); + + log_debug!("number of blocks : {}", object.blocks.len()); + assert_eq!( + object.blocks.len(), + MAX_ARITY_LEAVES * (MAX_ARITY_LEAVES + 1) * MAX_ARITY_LEAVES + MAX_ARITY_LEAVES + 1 + ); + assert_eq!(object.depth().unwrap(), 3); + } + + /// Checks that a content that doesn't fit in all the children of first level in tree + #[ignore] + #[test] + pub fn test_depth_4() { + const MAX_ARITY_LEAVES: usize = 61; + const MAX_DATA_PAYLOAD_SIZE: usize = 4084; + + ////// 52GB of data! + let data_size = MAX_ARITY_LEAVES + * MAX_ARITY_LEAVES + * MAX_ARITY_LEAVES + * MAX_ARITY_LEAVES + * MAX_DATA_PAYLOAD_SIZE + - 12; + + let store = Store::dummy_public_v0(); + log_debug!("creating 52GB of data"); + let content = ObjectContent::V0(ObjectContentV0::SmallFile(SmallFile::V0(SmallFileV0 { + content_type: "".into(), + metadata: vec![], + content: vec![99; data_size], + }))); + //let content_ser = serde_bare::to_vec(&content).unwrap(); + //log_debug!("content len: {}", content_ser.len()); + log_debug!("creating object with that data"); + let object = Object::new(content, None, store_valid_value_size(0), &store); + log_debug!("{}", object); + + let obj_size = object.size(); + log_debug!("object size: {}", obj_size); + + log_debug!("data size: {}", data_size); + log_debug!( + "overhead: {} - {}%", + obj_size - data_size, + ((obj_size - data_size) * 100) as f32 / data_size as f32 + ); + + log_debug!("number of blocks : {}", object.blocks.len()); + assert_eq!( + object.blocks.len(), + MAX_ARITY_LEAVES + * (MAX_ARITY_LEAVES * (MAX_ARITY_LEAVES + 1) * MAX_ARITY_LEAVES + + MAX_ARITY_LEAVES + + 1) + + 1 + ); + assert_eq!(object.depth().unwrap(), 4); + } + + #[test] + pub fn test_block_size() { + //let max_block_size = store_max_value_size(); + + fn test_block(max_block_size: usize) { + let max_arity_leaves: usize = (max_block_size - BLOCK_EXTRA) / CHILD_SIZE; + let max_arity_root: usize = + (max_block_size - BLOCK_EXTRA - HEADER_REF_EXTRA) / CHILD_SIZE; + + let max_data_payload_size = max_block_size - BLOCK_EXTRA; + + log_debug!("max_block_size: {}", max_block_size); + log_debug!("max_arity_leaves: {}", max_arity_leaves); + log_debug!("max_arity_root: {}", max_arity_root); + log_debug!("max_data_payload_size: {}", max_data_payload_size); + + let (id, key) = ObjectRef::dummy().into(); + + // this should never happen + let zero_key = ChunkContentV0::InternalNode(vec![]); + let zero_key_ser = serde_bare::to_vec(&zero_key).unwrap(); + + let one_key = ChunkContentV0::InternalNode(vec![key.clone()]); + let one_key_ser = serde_bare::to_vec(&one_key).unwrap(); + + let two_keys = ChunkContentV0::InternalNode(vec![key.clone(), key.clone()]); + let two_keys_ser = serde_bare::to_vec(&two_keys).unwrap(); + + let max_keys = ChunkContentV0::InternalNode(vec![key.clone(); max_arity_leaves]); + let max_keys_ser = serde_bare::to_vec(&max_keys).unwrap(); + + let max_keys_root = ChunkContentV0::InternalNode(vec![key.clone(); max_arity_root]); + let max_keys_root_ser = serde_bare::to_vec(&max_keys_root).unwrap(); + + // this should never happen + let data_empty = ChunkContentV0::DataChunk(vec![]); + let data_empty_ser = serde_bare::to_vec(&data_empty).unwrap(); + + let data_full = ChunkContentV0::DataChunk(vec![0; max_data_payload_size]); + let data_full_ser = serde_bare::to_vec(&data_full).unwrap(); + + // this should never happen: an empty block with no children and no data and no header + let leaf_empty = Block::new(vec![], None, data_empty_ser.clone(), None); + let leaf_empty_ser = serde_bare::to_vec(&leaf_empty).unwrap(); + + log_debug!( + "block size of empty leaf without header: {}", + leaf_empty_ser.len() + ); + + let leaf_full_data = Block::new(vec![], None, data_full_ser.clone(), None); + let leaf_full_data_ser = serde_bare::to_vec(&leaf_full_data).unwrap(); + + log_debug!( + "block size of full leaf block without header: {}", + leaf_full_data_ser.len() + ); + + // this should never happen: an empty block with no children and no keys + let internal_zero = Block::new(vec![], None, zero_key_ser.clone(), None); + let internal_zero_ser = serde_bare::to_vec(&internal_zero).unwrap(); + + log_debug!( + "block size of empty internal block without header: {}", + internal_zero_ser.len() + ); + + assert!(leaf_full_data_ser.len() <= max_block_size); + + // let root_zero = Block::new( + // vec![], + // None, + // zero_key_ser.clone(), + // None, + // ); + // let root_zero_ser = serde_bare::to_vec(&root_zero).unwrap(); + + let header_ref = CommitHeaderRef::from_id_key(id, key.clone()); + + // this should never happen. an embedded header never has an empty content + let header_embed = CommitHeaderRef::from_content_key(vec![], key.clone()); + + // this should never happen: an empty block with no children and no data and header ref + let root_zero_header_ref = Block::new( + vec![], + Some(header_ref.clone()), + data_empty_ser.clone(), + None, + ); + let root_zero_header_ref_ser = serde_bare::to_vec(&root_zero_header_ref).unwrap(); + + // this should never happen: an empty block with no children and no data and header embed + let root_zero_header_embed = Block::new( + vec![], + Some(header_embed.clone()), + data_empty_ser.clone(), + None, + ); + let root_zero_header_embed_ser = serde_bare::to_vec(&root_zero_header_embed).unwrap(); + + // log_debug!( + // "block size of empty root block without header: {}", + // root_zero_ser.len() + // ); + + log_debug!( + "block size of empty root block with header ref: {}", + root_zero_header_ref_ser.len() + ); + + log_debug!( + "block size of empty root block with header embedded: {}", + root_zero_header_embed_ser.len() + ); + + let internal_max = + Block::new(vec![id; max_arity_leaves], None, max_keys_ser.clone(), None); + let internal_max_ser = serde_bare::to_vec(&internal_max).unwrap(); + + let internal_one = Block::new(vec![id; 1], None, one_key_ser.clone(), None); + let internal_one_ser = serde_bare::to_vec(&internal_one).unwrap(); + + let internal_two = Block::new(vec![id; 2], None, two_keys_ser.clone(), None); + let internal_two_ser = serde_bare::to_vec(&internal_two).unwrap(); + + log_debug!( + "block size of internal block with 1 child, without header: {}", + internal_one_ser.len() + ); + + log_debug!( + "block size of internal block with 2 children, without header: {}", + internal_two_ser.len() + ); + + log_debug!( + "block size of internal block with max arity children, without header: {}", + internal_max_ser.len() + ); + + assert!(internal_max_ser.len() <= max_block_size); + + let root_one = Block::new( + vec![id; 1], + Some(header_ref.clone()), + one_key_ser.clone(), + None, + ); + let root_one_ser = serde_bare::to_vec(&root_one).unwrap(); + + let root_two = Block::new( + vec![id; 2], + Some(header_ref.clone()), + two_keys_ser.clone(), + None, + ); + let root_two_ser = serde_bare::to_vec(&root_two).unwrap(); + + let root_max = Block::new( + vec![id; max_arity_root], + Some(header_ref.clone()), + max_keys_root_ser.clone(), + None, + ); + let root_max_ser = serde_bare::to_vec(&root_max).unwrap(); + + let data_full_when_header_ref = + ChunkContentV0::DataChunk(vec![0; max_data_payload_size - HEADER_REF_EXTRA]); + let data_full_when_header_ref_ser = + serde_bare::to_vec(&data_full_when_header_ref).unwrap(); + + let root_full = Block::new( + vec![], + Some(header_ref.clone()), + data_full_when_header_ref_ser.clone(), + None, + ); + let root_full_ser = serde_bare::to_vec(&root_full).unwrap(); + + log_debug!( + "block size of root block with header ref with 1 child: {}", + root_one_ser.len() + ); + + log_debug!( + "block size of root block with header ref with 2 children: {}", + root_two_ser.len() + ); + + log_debug!( + "block size of root block with header ref with max arity children: {}", + root_max_ser.len() + ); + + log_debug!( + "block size of root block with header ref with full DataChunk (fitting ObjectContent): {}", + root_full_ser.len() + ); + + assert!(root_full_ser.len() <= max_block_size); + + let root_embed_one = Block::new( + vec![id; 1], + Some(header_embed.clone()), + one_key_ser.clone(), + None, + ); + let root_embed_one_ser = serde_bare::to_vec(&root_embed_one).unwrap(); + + let root_embed_two = Block::new( + vec![id; 2], + Some(header_embed.clone()), + two_keys_ser.clone(), + None, + ); + let root_embed_two_ser = serde_bare::to_vec(&root_embed_two).unwrap(); + + let root_embed_max = Block::new( + vec![id; max_arity_root], + Some(header_embed.clone()), + max_keys_root_ser.clone(), + None, + ); + let root_embed_max_ser = serde_bare::to_vec(&root_embed_max).unwrap(); + + let data_full_when_header_embed = + ChunkContentV0::DataChunk(vec![0; max_data_payload_size - HEADER_EMBED_EXTRA]); + let data_full_when_header_embed_ser = + serde_bare::to_vec(&data_full_when_header_embed).unwrap(); + + let root_embed_full = Block::new( + vec![], + Some(header_embed.clone()), + data_full_when_header_embed_ser.clone(), + None, + ); + let root_embed_full_ser = serde_bare::to_vec(&root_embed_full).unwrap(); + + log_debug!( + "block size of root block with header embed with 1 child: {}", + root_embed_one_ser.len() + ); + + log_debug!( + "block size of root block with header embed with 2 children: {}", + root_embed_two_ser.len() + ); + + log_debug!( + "block size of root block with header embed with max arity children: {}", + root_embed_max_ser.len() + ); + + log_debug!( + "block size of root block with header embed with full DataChunk (fitting ObjectContent): {}", + root_embed_full_ser.len() + ); + + assert!(root_embed_full_ser.len() <= max_block_size); + + let header_acks_1 = CommitHeader::new_with_acks(vec![id]); + let header_acks_2 = CommitHeader::new_with_acks(vec![id, id]); + let header_acks_60 = CommitHeader::new_with_acks(vec![id; 60]); + let header_acks_60_deps_60 = + CommitHeader::new_with_deps_and_acks(vec![id; 60], vec![id; 60]); + + fn make_header_block(header: Option) -> CommitHeaderRef { + let content_ser = serde_bare::to_vec(&ObjectContent::V0( + ObjectContentV0::CommitHeader(header.unwrap()), + )) + .unwrap(); + let data_chunk = ChunkContentV0::DataChunk(content_ser.clone()); + let encrypted_content = serde_bare::to_vec(&data_chunk).unwrap(); + CommitHeaderRef::from_content_key(encrypted_content, SymKey::dummy()) + } + + let header_embed_acks_1 = make_header_block(header_acks_1); + let header_embed_acks_2 = make_header_block(header_acks_2); + let header_embed_acks_60 = make_header_block(header_acks_60); + let header_embed_acks_60_deps_60 = make_header_block(header_acks_60_deps_60); + + fn test_header_embed(name: &str, header: CommitHeaderRef, max_block_size: usize) { + let (id, key) = BlockRef::dummy().into(); + + log_debug!("header content size : {}", header.encrypted_content_len()); + + let max_arity = (max_block_size + - header.encrypted_content_len() + - BLOCK_EXTRA + - HEADER_EMBED_EXTRA) + / CHILD_SIZE; + + log_debug!("max arity for header {} : {}", name, max_arity); + + let max_keys_when_real_header = + ChunkContentV0::InternalNode(vec![key.clone(); max_arity]); + let max_keys_when_real_header_ser = + serde_bare::to_vec(&max_keys_when_real_header).unwrap(); + + let root_embed_max = Block::new( + vec![id; max_arity], + Some(header), + max_keys_when_real_header_ser.clone(), + None, + ); + let root_embed_max_ser = serde_bare::to_vec(&root_embed_max).unwrap(); + + log_debug!( + "block size of root block with header {} with max possible arity children : {}", + name, + root_embed_max_ser.len() + ); + + assert!(root_embed_max_ser.len() <= max_block_size); + } + + test_header_embed( + "embed acks 60 deps 60", + header_embed_acks_60_deps_60, + max_block_size, + ); + + test_header_embed("embed acks 60", header_embed_acks_60, max_block_size); + + test_header_embed("embed acks 2", header_embed_acks_2, max_block_size); + + test_header_embed("embed acks 1", header_embed_acks_1, max_block_size); + } + + let max_block_size = store_max_value_size(); + let min_block_size = store_valid_value_size(0); + + test_block(max_block_size); + test_block(min_block_size); + test_block(store_valid_value_size(10000)); + test_block(store_valid_value_size(100000)); + test_block(store_valid_value_size(1000000)); + test_block(store_valid_value_size(5000)); + } +} diff --git a/ng-repo/src/os_info.rs b/ng-repo/src/os_info.rs new file mode 100644 index 0000000..0ead837 --- /dev/null +++ b/ng-repo/src/os_info.rs @@ -0,0 +1,65 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use os_info; +use serde_json::{json, Value}; + +pub fn get_os_info() -> Value { + let arch = std::env::consts::ARCH; + let machine = match arch { + "ia32" => "x86", + "x64" => "x86_64", + "i386" => "x86", + "i686" => "x86", + "amd64" => "x86_64", + "arm64" => "aarch64", + "powerpc" => "ppc", + "powerpc64" => "ppc64", + _ => arch, + }; + + let info = os_info::get(); + let os_type = info.os_type(); + let os_name = match os_type { + os_info::Type::Macos => "macOS".to_string(), + _ => format!("{:?}", os_type), + }; + + let val = json!({ + "uname": { + "os_name": os_name, + "version": info.version().to_string(), + "arch": info.architecture().map(|s| s.into()).unwrap_or(Value::Null), + "bitness": format!("{:?}",info.bitness()), + "codename": info.codename().map(|s| s.into()).unwrap_or(Value::Null), + "edition": info.edition().map(|s| s.into()).unwrap_or(Value::Null), + }, + "rust": { + "family": std::env::consts::FAMILY, + "os_name": match std::env::consts::OS { + "linux" => "Linux", + "macos" => "macOS", + "ios" => "iOS", + "freebsd" => "FreeBSD", + "dragonfly" => "DragonFly", + "netbsd" => "NetBSD", + "openbsd" => "OpenBSD", + "solaris" => "Solaris", + "android" => "Android", + "windows" => "Windows", + _ => std::env::consts::OS, + }, + "arch": machine, + "debug": cfg!(debug_assertions), + "target": current_platform::CURRENT_PLATFORM, + } + }); + //println!("{}", to_string_pretty(&val).unwrap()); + val +} diff --git a/ng-repo/src/repo.rs b/ng-repo/src/repo.rs new file mode 100644 index 0000000..8de9222 --- /dev/null +++ b/ng-repo/src/repo.rs @@ -0,0 +1,686 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Repository + +use core::fmt; +use std::collections::HashMap; +use std::collections::HashSet; +use std::sync::Arc; + +use serde::{Deserialize, Serialize}; + +use crate::errors::*; +#[allow(unused_imports)] +use crate::log::*; +use crate::store::Store; +use crate::types::*; + +impl RepositoryV0 { + pub fn new_with_meta(id: &PubKey, metadata: &Vec) -> RepositoryV0 { + RepositoryV0 { + id: id.clone(), + metadata: metadata.clone(), + verification_program: vec![], + fork_of: vec![], + creator: None, + } + } +} + +impl Repository { + pub fn new(id: &RepoId) -> Self { + Repository::V0(RepositoryV0 { + id: id.clone(), + verification_program: vec![], + creator: None, + fork_of: vec![], + metadata: vec![], + }) + } + pub fn new_with_meta(id: &PubKey, metadata: &Vec) -> Repository { + Repository::V0(RepositoryV0::new_with_meta(id, metadata)) + } + pub fn id(&self) -> &PubKey { + match self { + Self::V0(v0) => &v0.id, + } + } +} + +#[derive(Debug)] +pub struct UserInfo { + /// list of permissions granted to user, with optional metadata + pub permissions: HashMap>, + pub id: UserId, +} + +impl UserInfo { + pub fn has_any_perm(&self, perms: &HashSet) -> Result<(), NgError> { + //log_debug!("perms {:?}", perms); + if self.has_perm(&PermissionV0::Owner).is_ok() { + return Ok(()); + } + let is_admin = self.has_perm(&PermissionV0::Admin).is_ok(); + //log_debug!("is_admin {}", is_admin); + //is_delegated_by_admin + let has_perms: HashSet<&PermissionV0> = self.permissions.keys().collect(); + //log_debug!("has_perms {:?}", has_perms); + for perm in perms { + if is_admin && perm.is_delegated_by_admin() || has_perms.contains(perm) { + return Ok(()); + } + } + // if has_perms.intersection(perms).count() > 0 { + // Ok(()) + // } else { + Err(NgError::PermissionDenied) + } + pub fn has_perm(&self, perm: &PermissionV0) -> Result<&Vec, NgError> { + self.permissions.get(perm).ok_or(NgError::PermissionDenied) + } +} + +#[derive(Debug, Clone)] +pub struct BranchInfo { + pub id: BranchId, + + pub branch_type: BranchType, + + pub crdt: BranchCrdt, + + pub topic: Option, + + pub topic_priv_key: Option, + + pub read_cap: Option, + + pub fork_of: Option, + + pub merged_in: Option, + + pub current_heads: Vec, + + pub commits_nbr: u64, +} + +/// In memory Repository representation. With helper functions that access the underlying UserStorage and keeps proxy of the values +#[derive(Debug)] +pub struct Repo { + pub id: RepoId, + /// Repo definition + pub repo_def: Repository, + + pub read_cap: Option, + + pub write_cap: Option, + + pub signer: Option, + + pub inbox: Option, + + pub certificate_ref: Option, + + pub members: HashMap, + + pub branches: HashMap, + + /// if opened_branches is empty, it means the repo has not been opened yet. + /// if a branchId is present in the hashmap, it means it is opened. + /// the boolean indicates if the branch is opened as publisher or not + pub opened_branches: HashMap, + + /*pub main_branch_rc: Option, + + pub chat_branch_rc: Option, + + // only used if it is a StoreRepo + pub store_branch_rc: Option, + pub overlay_branch_rc: Option, + + // only used if it is a private StoreRepo + pub user_branch_rc: Option,*/ + pub store: Arc, +} + +impl fmt::Display for Repo { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "====== Repo ====== {}", self.id)?; + + write!(f, "== repo_def: {}", self.repo_def)?; + + if self.signer.is_some() { + writeln!(f, "== signer: {:?}", self.signer)?; + } + + writeln!(f, "== members: {:?}", self.members)?; + + Ok(()) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CommitInfo { + pub past: Vec, + pub key: ObjectKey, + pub signature: Option, + pub author: String, + pub timestamp: Timestamp, + pub final_consistency: bool, + pub commit_type: CommitType, + pub branch: Option, + pub x: u32, + pub y: u32, +} + +impl Repo { + #[cfg(any(test, feature = "testing"))] + #[allow(deprecated)] + pub fn new_with_perms(perms: &[PermissionV0], store: Arc) -> Self { + let pub_key = PubKey::nil(); + Self::new_with_member(&pub_key, &pub_key, perms, store) + } + + pub(crate) fn get_user_string(&self, user_hash: &Digest) -> String { + self.members + .get(user_hash) + .map_or_else(|| format!("t:{user_hash}"), |info| format!("i:{}", info.id)) + } + + fn load_causal_past( + &self, + recursor: &mut Vec<(BlockRef, Option)>, + visited: &mut HashMap, CommitInfo)>, + signatures: &mut HashMap, + ) -> Result, VerifierError> { + let mut root = None; + while let Some((next_ref, future)) = recursor.pop() { + if let Ok(cobj) = Commit::load(next_ref, &self.store, true) { + let id = cobj.id().unwrap(); + if let Some((future_set, info)) = visited.get_mut(&id) { + // we update the future + if let Some(f) = future { + future_set.insert(f); + } + if let Some(sign) = signatures.remove(&id) { + info.signature = Some(sign); + } + } else { + let commit_type = cobj.get_type().unwrap(); + let acks = cobj.acks(); + // for a in acks.iter() { + // log_debug!("ACKS of {} {}", id.to_string(), a.id.to_string()); + // } + let (past, real_acks, next_future) = match commit_type { + CommitType::SyncSignature => { + assert_eq!(acks.len(), 1); + let dep = cobj.deps(); + assert_eq!(dep.len(), 1); + let mut current_commit = dep[0].clone(); + let sign_ref = cobj.get_signature_reference().unwrap(); + let real_acks; + let mut future = id; + loop { + let o = Commit::load(current_commit.clone(), &self.store, true)?; + let deps = o.deps(); + let commit_info = CommitInfo { + past: deps.iter().map(|r| r.id.clone()).collect(), + key: o.key().unwrap(), + signature: Some(sign_ref.clone()), + author: self.get_user_string(o.author()), + timestamp: o.timestamp(), + final_consistency: o.final_consistency(), + commit_type: o.get_type().unwrap(), + branch: None, + x: 0, + y: 0, + }; + let id = o.id().unwrap(); + + visited.insert(id, ([future].into(), commit_info)); + future = id; + if id == acks[0].id { + real_acks = o.acks(); + break; + } + assert_eq!(deps.len(), 1); + current_commit = deps[0].clone(); + } + (vec![dep[0].id], real_acks, future) + } + CommitType::AsyncSignature => { + let past: Vec = acks.iter().map(|r| r.id.clone()).collect(); + let sign = cobj.get_signature_reference().unwrap(); + for p in cobj.deps().iter() { + signatures.insert(p.id, sign.clone()); + //visited.get_mut(&p.id).unwrap().1.signature = Some(sign.clone()); + } + (past, acks, id) + } + _ => (acks.iter().map(|r| r.id.clone()).collect(), acks, id), + }; + + let commit_info = CommitInfo { + past, + key: cobj.key().unwrap(), + signature: signatures.remove(&id), + author: self.get_user_string(cobj.author()), + timestamp: cobj.timestamp(), + final_consistency: cobj.final_consistency(), + commit_type, + branch: None, + x: 0, + y: 0, + }; + visited.insert(id, (future.map_or([].into(), |f| [f].into()), commit_info)); + if real_acks.is_empty() && root.is_none() { + root = Some(next_future); + } + recursor.extend(real_acks.into_iter().map(|br| (br, Some(next_future)))); + // for past_ref in real_acks { + // let o = Commit::load(past_ref, &self.store, true)?; + // if let Some(r) = self.load_causal_past(&o, visited, Some(next_future))? { + // root = Some(r); + // } + // } + } + } + } + Ok(root) + } + + fn past_is_all_in( + past: &Vec, + already_in: &HashMap, + coming_from: &ObjectId, + ) -> bool { + for p in past { + if !already_in.contains_key(p) && p != coming_from { + return false; + } + } + true + } + + fn collapse( + id: &ObjectId, + dag: &mut HashMap, CommitInfo)>, + already_in: &mut HashMap, + branches_order: &mut Vec>, + branches: &mut HashMap, + //swimlanes: &mut Vec>, + ) -> Vec<(ObjectId, CommitInfo)> { + let (_, c) = dag.get(id).unwrap(); + //log_debug!("processing {id}"); + if c.past.len() > 1 && !Self::past_is_all_in(&c.past, already_in, id) { + // we postpone the merge until all the past commits have been added + //log_debug!("postponed {}", id); + vec![] + } else { + let (future, mut info) = dag.remove(id).unwrap(); + let mut branch = match info.past.len() { + 0 => *id, + _ => info.branch.unwrap(), + // _ => { + // we merge on the smallest branch ordinal. + // let smallest_branch = info + // .past + // .iter() + // .map(|past_commit| { + // branches.get(already_in.get(past_commit).unwrap()).unwrap() + // }) + // .min() + // .unwrap(); + // branches_order + // .get(*smallest_branch) + // .unwrap() + // .unwrap() + // .clone() + // } + }; + info.branch = Some(branch.clone()); + // let swimlane_idx = branches.get(&branch).unwrap(); + // let swimlane = swimlanes.get_mut(*swimlane_idx).unwrap(); + // if swimlane.last().map_or(true, |last| last != &branch) { + // swimlane.push(branch.clone()); + // } + let mut res = vec![(*id, info)]; + let mut first_child_branch = branch.clone(); + already_in.insert(*id, branch); + let mut future = Vec::from_iter(future); + future.sort(); + // the first branch is the continuation as parent. + let mut iterator = future.iter().peekable(); + while let Some(child) = iterator.next() { + //log_debug!("child of {} : {}", id, child); + { + // we merge on the smallest branch ordinal. + let (_, info) = dag.get_mut(child).unwrap(); + if let Some(b) = info.branch.to_owned() { + let previous_ordinal = branches.get(&b).unwrap(); + let new_ordinal = branches.get(&branch).unwrap(); + let close = if previous_ordinal > new_ordinal { + let _ = info.branch.insert(branch); + // we close the previous branch + // log_debug!( + // "closing previous {} {} in favor of new {} {}", + // previous_ordinal, + // b, + // new_ordinal, + // branch + // ); + &b + } else { + // otherwise we close the new branch + if first_child_branch == branch { + first_child_branch = b; + } + // log_debug!( + // "closing new branch {} {} in favor of previous {} {}", + // new_ordinal, + // branch, + // previous_ordinal, + // b + // ); + &branch + }; + let i = branches.get(close).unwrap(); + branches_order.get_mut(*i).unwrap().take(); + } else { + let _ = info.branch.insert(branch); + } + } + // log_debug!( + // "branches_order before children of {child} {:?}", + // branches_order + // .iter() + // .enumerate() + // .map(|(i, b)| b.map_or(format!("{i}:closed"), |bb| format!("{i}:{bb}"))) + // .collect::>() + // .join(" -- ") + // ); + res.append(&mut Self::collapse( + child, + dag, + already_in, + branches_order, + branches, + //swimlanes, + )); + // log_debug!( + // "branches_order after children of {child} {:?}", + // branches_order + // .iter() + // .enumerate() + // .map(|(i, b)| b.map_or(format!("{i}:closed"), |bb| format!("{i}:{bb}"))) + // .collect::>() + // .join(" -- ") + // ); + // each other child gets a new branch + if let Some(next) = iterator.peek() { + branch = **next; + if branches.contains_key(*next) { + continue; + } + let mut branch_inserted = false; + let mut first_child_branch_passed = false; + for (i, next_branch) in branches_order.iter_mut().enumerate() { + if let Some(b) = next_branch { + if b == &first_child_branch { + first_child_branch_passed = true; + //log_debug!("first_child_branch_passed"); + } + } + if next_branch.is_none() && first_child_branch_passed { + //log_debug!("found empty lane {}, putting branch in it {}", i, branch); + let _ = next_branch.insert(branch.clone()); + branches.insert(branch, i); + branch_inserted = true; + break; + } + } + if !branch_inserted { + //swimlanes.push(Vec::new()); + // log_debug!( + // "adding new lane {}, for branch {}", + // branches_order.len(), + // branch + // ); + branches_order.push(Some(branch.clone())); + branches.insert(branch, branches_order.len() - 1); + } + } + } + res + } + } + + pub fn history_at_heads( + &self, + heads: &[ObjectRef], + ) -> Result<(Vec<(ObjectId, CommitInfo)>, Vec>), VerifierError> { + assert!(!heads.is_empty()); + // for h in heads { + // log_debug!("HEAD {}", h.id); + // } + let mut visited = HashMap::new(); + let mut root = None; + let mut recursor: Vec<(BlockRef, Option)> = + heads.iter().map(|h| (h.clone(), None)).collect(); + let mut signatures: HashMap = HashMap::new(); + let r = self.load_causal_past(&mut recursor, &mut visited, &mut signatures)?; + if r.is_some() { + root = r; + } + // for id in heads { + // if let Ok(cobj) = Commit::load(id.clone(), &self.store, true) { + // let r = self.load_causal_past(&cobj, &mut visited, None)?; + // //log_debug!("ROOT? {:?}", r.map(|rr| rr.to_string())); + // if r.is_some() { + // root = r; + // } + // } + // } + + // for h in visited.keys() { + // log_debug!("VISITED {}", h); + // } + if root.is_none() { + return Err(VerifierError::MalformedDag); + } + let root = root.unwrap(); + + let mut already_in: HashMap = HashMap::new(); + let mut branches_order: Vec> = vec![Some(root.clone())]; + let mut branches: HashMap = HashMap::from([(root.clone(), 0)]); + //let mut swimlanes: Vec> = vec![vec![root.clone()]]; + let mut commits = Self::collapse( + &root, + &mut visited, + &mut already_in, + &mut branches_order, + &mut branches, + //&mut swimlanes, + ); + for (i, (_, commit)) in commits.iter_mut().enumerate() { + commit.y = i as u32; + commit.x = *branches.get(commit.branch.as_ref().unwrap()).unwrap() as u32; + } + Ok((commits, branches_order)) + } + + pub fn update_branch_current_heads( + &mut self, + branch: &BranchId, + commit_ref: ObjectRef, + past: Vec, + ) -> Result, VerifierError> { + //log_info!("from branch {} HEAD UPDATED TO {}", branch, commit_ref.id); + if let Some(branch) = self.branches.get_mut(branch) { + let mut set: HashSet<&ObjectRef> = HashSet::from_iter(branch.current_heads.iter()); + for p in past { + set.remove(&p); + } + let already_in_heads = set.contains(&commit_ref); + branch.current_heads = set.into_iter().cloned().collect(); + if !already_in_heads { + branch.current_heads.push(commit_ref); + branch.commits_nbr += 1; + } + // we return the new current heads + Ok(branch.current_heads.to_vec()) + } else { + Err(VerifierError::BranchNotFound) + } + } + + pub fn new_with_member( + repo_id: &PubKey, + member: &UserId, + perms: &[PermissionV0], + store: Arc, + ) -> Self { + let mut members = HashMap::new(); + let permissions = HashMap::from_iter( + perms + .iter() + .map(|p| (*p, vec![])) + .collect::)>>() + .iter() + .cloned(), + ); + let overlay = store.get_store_repo().overlay_id_for_read_purpose(); + let member_hash = CommitContent::author_digest(member, overlay); + //log_debug!("added member {:?} {:?}", member, member_hash); + members.insert( + member_hash, + UserInfo { + id: *member, + permissions, + }, + ); + Self { + id: repo_id.clone(), + repo_def: Repository::new(&repo_id), + members, + store, + signer: None, + inbox: None, + certificate_ref: None, + read_cap: None, + write_cap: None, + branches: HashMap::new(), + opened_branches: HashMap::new(), + //main_branch_rc: None, + } + } + + pub fn verify_permission(&self, commit: &Commit) -> Result<(), NgError> { + let content_author = commit.content_v0().author; + let body = commit.load_body(&self.store)?; + match self.members.get(&content_author) { + Some(info) => return info.has_any_perm(&body.required_permission()), + None => {} + } + Err(NgError::PermissionDenied) + } + + pub fn member_pubkey(&self, hash: &Digest) -> Result { + match self.members.get(hash) { + Some(user_info) => Ok(user_info.id), + None => Err(NgError::NotFound), + } + } + + pub fn branch(&self, id: &BranchId) -> Result<&BranchInfo, NgError> { + //TODO: load the BranchInfo from storage + self.branches.get(id).ok_or(NgError::BranchNotFound) + } + + pub fn branch_mut(&mut self, id: &BranchId) -> Result<&mut BranchInfo, NgError> { + //TODO: load the BranchInfo from storage + self.branches.get_mut(id).ok_or(NgError::BranchNotFound) + } + + pub fn overlay_branch(&self) -> Option<&BranchInfo> { + for (_, branch) in self.branches.iter() { + if branch.branch_type == BranchType::Overlay { + return Some(branch); + } + } + None + } + + pub fn user_branch(&self) -> Option<&BranchInfo> { + for (_, branch) in self.branches.iter() { + if branch.branch_type == BranchType::User { + return Some(branch); + } + } + None + } + + pub fn main_branch(&self) -> Option<&BranchInfo> { + for (_, branch) in self.branches.iter() { + if branch.branch_type == BranchType::Main { + return Some(branch); + } + } + None + } + + pub fn store_branch(&self) -> Option<&BranchInfo> { + for (_, branch) in self.branches.iter() { + if branch.branch_type == BranchType::Store { + return Some(branch); + } + } + None + } + + pub fn header_branch(&self) -> Option<&BranchInfo> { + for (_, branch) in self.branches.iter() { + if branch.branch_type == BranchType::Header { + return Some(branch); + } + } + None + } + + pub fn root_branch(&self) -> Option<&BranchInfo> { + for (_, branch) in self.branches.iter() { + if branch.branch_type == BranchType::Root { + return Some(branch); + } + } + None + } + + pub fn overlay_branch_read_cap(&self) -> Option<&ReadCap> { + match self.overlay_branch() { + Some(bi) => Some(bi.read_cap.as_ref().unwrap()), + None => self.read_cap.as_ref(), // this is for private stores that don't have an overlay branch + } + } + + pub fn branch_is_opened(&self, branch: &BranchId) -> bool { + self.opened_branches.contains_key(branch) + } + + pub fn branch_is_opened_as_publisher(&self, branch: &BranchId) -> bool { + match self.opened_branches.get(branch) { + Some(val) => *val, + None => false, + } + } + + // pub(crate) fn get_store(&self) -> &Store { + // self.store.unwrap() + // } +} diff --git a/ng-repo/src/store.rs b/ng-repo/src/store.rs new file mode 100644 index 0000000..1bf56bf --- /dev/null +++ b/ng-repo/src/store.rs @@ -0,0 +1,763 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +//! Store of a Site, or of a Group or Dialog + +use core::fmt; +use std::collections::HashMap; +use std::sync::{Arc, RwLock}; + +use ng_threshold_crypto::SecretKeySet; + +use crate::block_storage::{BlockStorage, HashMapBlockStorage}; +use crate::errors::{NgError, StorageError}; +#[allow(unused_imports)] +use crate::log::*; +use crate::object::Object; +use crate::repo::{BranchInfo, Repo}; +use crate::types::*; +use crate::utils::{generate_keypair, sign}; + +pub struct Store { + //TODO: store_repo, store_readcap and store_overlay_branch_readcap could be empty, if we have only an outer access to the store. should be Options + store_repo: StoreRepo, + store_readcap: ReadCap, + store_overlay_branch_readcap: ReadCap, + pub overlay_id: OverlayId, + storage: Arc>, +} + +impl From<&Store> for StoreUpdate { + fn from(s: &Store) -> StoreUpdate { + StoreUpdate::V0(StoreUpdateV0 { + store: s.store_repo, + store_read_cap: s.store_readcap.clone(), + overlay_branch_read_cap: s.store_overlay_branch_readcap.clone(), + metadata: vec![], + }) + } +} + +impl fmt::Debug for Store { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Store:\nstore_repo {}", self.store_repo)?; + writeln!(f, "store_readcap {}", self.store_readcap)?; + writeln!( + f, + "store_overlay_branch_readcap {}", + self.store_overlay_branch_readcap + )?; + writeln!(f, "overlay_id {}", self.overlay_id) + } +} + +impl Store { + pub fn new_temp_in_mem() -> Self { + Store { + store_repo: StoreRepo::nil(), + store_readcap: ReadCap::nil(), + store_overlay_branch_readcap: ReadCap::nil(), + overlay_id: OverlayId::nil(), + storage: Arc::new(RwLock::new(HashMapBlockStorage::new())), + } + } + + pub fn new_from_overlay_id( + overlay: &OverlayId, + storage: Arc>, + ) -> Store { + Store { + store_repo: StoreRepo::nil(), + store_readcap: ReadCap::nil(), + store_overlay_branch_readcap: ReadCap::nil(), + overlay_id: overlay.clone(), + storage, + } + } + + pub fn new_from( + update: &StoreUpdate, + storage: Arc>, + ) -> Store { + match update { + StoreUpdate::V0(v0) => Store::new( + v0.store, + v0.store_read_cap.clone(), + v0.overlay_branch_read_cap.clone(), + storage, + ), + } + } + pub fn id(&self) -> &PubKey { + self.store_repo.repo_id() + } + pub fn set_read_caps(&mut self, read_cap: ReadCap, overlay_read_cap: Option) { + self.store_readcap = read_cap; + if let Some(overlay_read_cap) = overlay_read_cap { + self.store_overlay_branch_readcap = overlay_read_cap; + } + } + + pub fn is_private(&self) -> bool { + self.store_repo.is_private() + } + + pub fn get_store_repo(&self) -> &StoreRepo { + &self.store_repo + } + + pub fn get_store_readcap(&self) -> &ReadCap { + &self.store_readcap + } + + pub fn get_store_overlay_branch_readcap_secret(&self) -> &ReadCapSecret { + &self.store_overlay_branch_readcap.key + } + + pub fn get_store_overlay_branch_readcap(&self) -> &ReadCap { + &self.store_overlay_branch_readcap + } + + pub fn get_store_readcap_secret(&self) -> &ReadCapSecret { + &self.store_readcap.key + } + + /// Load a block from the storage. + pub fn get(&self, id: &BlockId) -> Result { + self.storage + .read() + .map_err(|_| StorageError::BackendError)? + .get(&self.overlay_id, id) + } + + /// fetch a block from broker or core overlay + pub async fn fetch(&self, _id: &BlockId) -> Result { + todo!(); + } + + /// Save a block to the storage. + pub fn put(&self, block: &Block) -> Result { + self.storage + .write() + .map_err(|_| StorageError::BackendError)? + .put(&self.overlay_id, block, true) + } + + /// Delete a block from the storage. + pub fn del(&self, id: &BlockId) -> Result { + self.storage + .write() + .map_err(|_| StorageError::BackendError)? + .del(&self.overlay_id, id) + } + + /// number of Blocks in the storage + pub fn len(&self) -> Result { + self.storage + .read() + .map_err(|_| StorageError::BackendError)? + .len() + } + + pub fn has(&self, id: &BlockId) -> Result<(), StorageError> { + self.storage + .read() + .map_err(|_| StorageError::BackendError)? + .has(&self.overlay_id, id) + } + + /// returns the (branch_commit, add_branch_commit, branch_info) + fn create_branch( + &self, + branch_type: BranchType, + crdt: BranchCrdt, + creator: &UserId, + creator_priv_key: &PrivKey, + repo_pub_key: BranchId, + repository_commit_ref: ObjectRef, + root_branch_readcap_id: ObjectId, + repo_write_cap_secret: &RepoWriteCapSecret, + add_branch_deps: Vec, + add_branch_acks: Vec, + ) -> Result<(Commit, Commit, BranchInfo), NgError> { + let (branch_priv_key, branch_pub_key) = generate_keypair(); + + let (branch_topic_priv_key, branch_topic_pub_key) = generate_keypair(); + + let branch_commit_body = CommitBody::V0(CommitBodyV0::Branch(Branch::V0(BranchV0 { + id: branch_pub_key, + crdt: crdt.clone(), + repo: repository_commit_ref, + root_branch_readcap_id, + topic: branch_topic_pub_key, + topic_privkey: Branch::encrypt_branch_write_cap_secret( + &branch_topic_priv_key, + branch_topic_pub_key, + branch_pub_key, + repo_write_cap_secret, + ), + pulled_from: vec![], + metadata: vec![], + }))); + + let branch_commit = Commit::new_with_body_acks_deps_and_save( + &branch_priv_key, + &branch_pub_key, + branch_pub_key, + QuorumType::Owners, + vec![], + vec![], + branch_commit_body, + self, + )?; + let branch_read_cap = branch_commit.reference().unwrap(); + + // creating the AddBranch commit (on root_branch), deps to the RootBranch commit + // author is the owner + + let add_branch_commit_body = + CommitBody::V0(CommitBodyV0::AddBranch(AddBranch::V0(AddBranchV0 { + branch_type: branch_type.clone(), + branch_id: branch_pub_key, + topic_id: Some(branch_topic_pub_key), + branch_read_cap: Some(branch_read_cap.clone()), + fork_of: None, + merged_in: None, + crdt: crdt.clone(), + }))); + + let add_branch_commit = Commit::new_with_body_acks_deps_and_save( + creator_priv_key, + creator, + repo_pub_key, + QuorumType::Owners, + add_branch_deps, + add_branch_acks, + add_branch_commit_body, + self, + )?; + + let branch_info = BranchInfo { + id: branch_pub_key, + branch_type, + topic: Some(branch_topic_pub_key), + topic_priv_key: Some(branch_topic_priv_key), + read_cap: Some(branch_read_cap), + fork_of: None, + merged_in: None, + crdt, + current_heads: vec![], + commits_nbr: 0, + }; + + Ok((branch_commit, add_branch_commit, branch_info)) + } + + pub fn create_repo_default( + self: Arc, + creator: &UserId, + creator_priv_key: &PrivKey, + repo_write_cap_secret: SymKey, + branch_crdt: BranchCrdt, + ) -> Result<(Repo, Vec<(Commit, Vec)>), NgError> { + let (repo_priv_key, repo_pub_key) = generate_keypair(); + + self.create_repo_with_keys( + creator, + creator_priv_key, + repo_priv_key, + repo_pub_key, + repo_write_cap_secret, + Some(branch_crdt), + false, + ) + } + + pub fn create_repo_with_keys( + self: Arc, + creator: &UserId, + creator_priv_key: &PrivKey, + repo_priv_key: PrivKey, + repo_pub_key: PubKey, + repo_write_cap_secret: SymKey, + mut branch_crdt: Option, + is_private_store: bool, + ) -> Result<(Repo, Vec<(Commit, Vec)>), NgError> { + let is_store = branch_crdt.is_none(); + if is_store { + branch_crdt = Some(BranchCrdt::Graph( + if is_private_store { + "data:container" + } else { + "social:profile" + }.to_string() + )); + } + + let mut events = Vec::with_capacity(9); + let mut events_postponed = Vec::with_capacity(6); + + // creating the Repository commit + + let repository = Repository::new(&repo_pub_key); + + let repository_commit_body = CommitBody::V0(CommitBodyV0::Repository(repository.clone())); + + let repository_commit = Commit::new_with_body_acks_deps_and_save( + &repo_priv_key, + &repo_pub_key, + repo_pub_key, + QuorumType::NoSigning, + vec![], + vec![], + repository_commit_body, + &self, + )?; + + let repository_commit_body_ref = repository_commit.body_ref().clone(); + + //log_debug!("REPOSITORY COMMIT {}", repository_commit); + + let repository_commit_ref = repository_commit.reference().unwrap(); + + let (topic_priv_key, topic_pub_key) = generate_keypair(); + + // creating the RootBranch commit, acks to Repository commit + + let root_branch_commit_body = + CommitBody::V0(CommitBodyV0::RootBranch(RootBranch::V0(RootBranchV0 { + id: repo_pub_key, + repo: repository_commit_ref.clone(), + store: (&self.store_repo).into(), + store_sig: None, //TODO: the store signature + topic: topic_pub_key, + topic_privkey: Branch::encrypt_branch_write_cap_secret( + &topic_priv_key, + topic_pub_key, + repo_pub_key, + &repo_write_cap_secret, + ), + inherit_perms_users_and_quorum_from_store: None, + quorum: None, + reconciliation_interval: RelTime::None, + owners: vec![creator.clone()], + owners_write_cap: vec![serde_bytes::ByteBuf::from(RootBranch::encrypt_write_cap( + creator, + &repo_write_cap_secret, + )?)], + metadata: vec![], + }))); + + let root_branch_commit = Commit::new_with_body_acks_deps_and_save( + &repo_priv_key, + &repo_pub_key, + repo_pub_key, + QuorumType::Owners, + vec![], + vec![repository_commit_ref.clone()], + root_branch_commit_body, + &self, + )?; + + let root_branch_readcap = root_branch_commit.reference().unwrap(); + let root_branch_readcap_id = root_branch_readcap.id; + // adding the 2 events for the Repository and Rootbranch commits + + events.push((repository_commit, vec![])); + + events.push((root_branch_commit, vec![])); + + // creating the header branch + let (header_add_branch_commit, header_branch_info, next_dep) = if !is_private_store { + let (header_branch_commit, header_add_branch_commit, header_branch_info) = + self.as_ref().create_branch( + BranchType::Header, + BranchCrdt::Graph(branch_crdt.as_ref().unwrap().class().clone()), + creator, + creator_priv_key, + repo_pub_key, + repository_commit_ref.clone(), + root_branch_readcap_id, + &repo_write_cap_secret, + vec![root_branch_readcap.clone()], + vec![], + )?; + let header_add_branch_readcap = header_add_branch_commit.reference().unwrap(); + events_postponed.push((header_branch_commit, vec![])); + ( + Some(header_add_branch_commit), + Some(header_branch_info), + header_add_branch_readcap, + ) + } else { + (None, None, root_branch_readcap.clone()) + }; + + // creating the main branch + + let (main_branch_commit, main_add_branch_commit, main_branch_info) = + self.as_ref().create_branch( + BranchType::Main, + branch_crdt.unwrap(), + creator, + creator_priv_key, + repo_pub_key, + repository_commit_ref.clone(), + root_branch_readcap_id, + &repo_write_cap_secret, + vec![next_dep], + vec![], + )?; + events_postponed.push((main_branch_commit, vec![])); + + // TODO: optional AddMember and AddPermission, that should be added as deps to the SynSignature below (and to the commits of the SignatureContent) + // using the creator as author (and incrementing their peer's seq_num) + + let extra_branches = if is_store { + // creating the store branch + let (store_branch_commit, store_add_branch_commit, store_branch_info) = + self.as_ref().create_branch( + BranchType::Store, + BranchCrdt::None, + creator, + creator_priv_key, + repo_pub_key, + repository_commit_ref.clone(), + root_branch_readcap_id, + &repo_write_cap_secret, + vec![main_add_branch_commit.reference().unwrap()], + vec![], + )?; + events_postponed.push((store_branch_commit, vec![])); + + // creating the overlay or user branch + let ( + overlay_or_user_branch_commit, + overlay_or_user_add_branch_commit, + overlay_or_user_branch_info, + ) = self.as_ref().create_branch( + if is_private_store { + BranchType::User + } else { + BranchType::Overlay + }, + BranchCrdt::None, + creator, + creator_priv_key, + repo_pub_key, + repository_commit_ref.clone(), + root_branch_readcap_id, + &repo_write_cap_secret, + vec![store_add_branch_commit.reference().unwrap()], + vec![], + )?; + + events_postponed.push((overlay_or_user_branch_commit, vec![])); + + Some(( + store_add_branch_commit, + store_branch_info, + overlay_or_user_add_branch_commit, + overlay_or_user_branch_info, + )) + } else { + None + }; + + let sync_sign_deps = if is_store { + extra_branches.as_ref().unwrap().2.reference().unwrap() + } else { + main_add_branch_commit.reference().unwrap() + }; + + // preparing the threshold keys for the unique owner + let mut rng = rand::thread_rng(); + let sk_set = SecretKeySet::random(0, &mut rng); + let pk_set = sk_set.public_keys(); + + let sk_share = sk_set.secret_key_share(0); + + // creating signature for RootBranch, AddBranch and Branch commits + // signed with owner threshold signature (threshold = 0) + + let mut signed_commits = if header_branch_info.is_some() { + vec![ + header_branch_info + .as_ref() + .unwrap() + .read_cap + .as_ref() + .unwrap() + .id, + main_branch_info.read_cap.as_ref().unwrap().id, + ] + } else { + vec![main_branch_info.read_cap.as_ref().unwrap().id] + }; + + if let Some((_, store_branch, oou_add_branch, oou_branch)) = &extra_branches { + signed_commits.append(&mut vec![ + oou_add_branch.id().unwrap(), + store_branch.read_cap.as_ref().unwrap().id, + oou_branch.read_cap.as_ref().unwrap().id, + ]); + } else { + signed_commits.push(main_add_branch_commit.id().unwrap()); + } + + let signature_content = SignatureContent::V0(SignatureContentV0 { + commits: signed_commits, + }); + + let signature_content_ser = serde_bare::to_vec(&signature_content).unwrap(); + let sig_share = sk_share.sign(signature_content_ser); + let sig = pk_set + .combine_signatures([(0, &sig_share)]) + .map_err(|_| NgError::IncompleteSignature)?; + + let threshold_sig = ThresholdSignatureV0::Owners(sig); + + // creating root certificate of the repo + + let cert_content = CertificateContentV0 { + previous: repository_commit_body_ref, + readcap_id: root_branch_readcap_id, + owners_pk_set: pk_set.public_key(), + orders_pk_sets: OrdersPublicKeySetsV0::None, + }; + + // signing the root certificate + let cert_content_ser = serde_bare::to_vec(&cert_content).unwrap(); + let sig = sign(&repo_priv_key, &repo_pub_key, &cert_content_ser)?; + let cert_sig = CertificateSignatureV0::Repo(sig); + + let cert = Certificate::V0(CertificateV0 { + content: cert_content, + sig: cert_sig, + }); + // saving the certificate + let cert_object = Object::new( + ObjectContent::V0(ObjectContentV0::Certificate(cert)), + None, + 0, + &self, + ); + let cert_obj_blocks = cert_object.save(&self)?; + + // finally getting the signature: + + let certificate_ref = cert_object.reference().unwrap(); + let signature = Signature::V0(SignatureV0 { + content: signature_content, + threshold_sig, + certificate_ref: certificate_ref.clone(), + }); + + // saving the signature + let sig_object = Object::new( + ObjectContent::V0(ObjectContentV0::Signature(signature)), + None, + 0, + &self, + ); + let sig_obj_blocks = sig_object.save(&self)?; + + // keeping the Secret Key Share of the owner + let signer_cap = SignerCap { + repo: repo_pub_key, + epoch: root_branch_readcap_id, + owner: Some(ng_threshold_crypto::serde_impl::SerdeSecret(sk_share)), + total_order: None, + partial_order: None, + }; + + let sync_signature = SyncSignature::V0(sig_object.reference().unwrap()); + + // creating the SyncSignature commit body (cloned for each branch) + let sync_sig_commit_body = CommitBody::V0(CommitBodyV0::SyncSignature(sync_signature)); + + // creating the SyncSignature commit for the root_branch with deps to the AddBranch and acks to the RootBranch commit as it is its direct causal future. + + let sync_sig_on_root_branch_commit = Commit::new_with_body_acks_deps_and_save( + creator_priv_key, + creator, + repo_pub_key, + QuorumType::IamTheSignature, + vec![sync_sign_deps], + vec![root_branch_readcap.clone()], + sync_sig_commit_body.clone(), + &self, + )?; + + let mut branches = if header_branch_info.is_some() { + vec![ + ( + header_branch_info.as_ref().unwrap().id, + header_branch_info.unwrap(), + ), + (main_branch_info.id, main_branch_info), + ] + } else { + vec![(main_branch_info.id, main_branch_info)] + }; + + // adding the event for the sync_sig_on_root_branch_commit + + let mut capacity = + cert_obj_blocks.len() + sig_obj_blocks.len() + main_add_branch_commit.blocks().len(); + if header_add_branch_commit.is_some() { + capacity += header_add_branch_commit.as_ref().unwrap().blocks().len() + } + let mut additional_blocks = Vec::with_capacity(capacity); + additional_blocks.extend(cert_obj_blocks.iter()); + additional_blocks.extend(sig_obj_blocks.iter()); + additional_blocks.extend(main_add_branch_commit.blocks().iter()); + if header_add_branch_commit.is_some() { + additional_blocks.extend(header_add_branch_commit.unwrap().blocks().iter()); + } + if let Some((store_add_branch, store_branch_info, oou_add_branch, oou_branch_info)) = + extra_branches + { + additional_blocks.extend(store_add_branch.blocks().iter()); + additional_blocks.extend(oou_add_branch.blocks().iter()); + branches.push((store_branch_info.id, store_branch_info)); + branches.push((oou_branch_info.id, oou_branch_info)); + } + + // creating the SyncSignature for all (2+ optional 2) branches with deps to the Branch commit and acks also to this commit as it is its direct causal future. + + for (branch_id, branch_info) in &mut branches { + let sync_sig_on_branch_commit = Commit::new_with_body_acks_deps_and_save( + creator_priv_key, + creator, + *branch_id, + QuorumType::IamTheSignature, + vec![branch_info.read_cap.to_owned().unwrap()], + vec![branch_info.read_cap.to_owned().unwrap()], + sync_sig_commit_body.clone(), + &self, + )?; + + let sync_sig_on_branch_commit_ref = sync_sig_on_branch_commit.reference().unwrap(); + + // adding the event for the sync_sig_on_branch_commit + + let mut additional = Vec::with_capacity(cert_obj_blocks.len() + sig_obj_blocks.len()); + additional.extend(cert_obj_blocks.iter()); + additional.extend(sig_obj_blocks.iter()); + events_postponed.push((sync_sig_on_branch_commit, additional)); + + branch_info.current_heads = vec![sync_sig_on_branch_commit_ref]; + + // TODO: add the CertificateRefresh event on main branch + } + + let sync_sig_on_root_branch_commit_ref = + sync_sig_on_root_branch_commit.reference().unwrap(); + + events.push((sync_sig_on_root_branch_commit, additional_blocks)); + events.extend(events_postponed); + + // preparing the Repo + + let root_branch = BranchInfo { + id: repo_pub_key.clone(), + branch_type: BranchType::Root, + topic: Some(topic_pub_key), + topic_priv_key: Some(topic_priv_key), + read_cap: Some(root_branch_readcap.clone()), + fork_of: None, + merged_in: None, + crdt: BranchCrdt::None, + current_heads: vec![sync_sig_on_root_branch_commit_ref], + commits_nbr: 0, + }; + + branches.push((root_branch.id, root_branch)); + + let repo = Repo { + id: repo_pub_key, + repo_def: repository, + signer: Some(signer_cap), + inbox: None, + members: HashMap::new(), + store: Arc::clone(&self), + read_cap: Some(root_branch_readcap), + write_cap: Some(repo_write_cap_secret), + branches: branches.into_iter().collect(), + opened_branches: HashMap::new(), + certificate_ref: Some(certificate_ref), + }; + + Ok((repo, events)) + } + + pub fn new( + store_repo: StoreRepo, + store_readcap: ReadCap, + store_overlay_branch_readcap: ReadCap, + storage: Arc>, + ) -> Self { + Self { + store_repo, + store_readcap, + overlay_id: store_repo.overlay_id_for_storage_purpose(), + storage, + store_overlay_branch_readcap, + } + } + + pub fn inner_overlay(&self) -> OverlayId { + self.store_repo + .overlay_id_for_write_purpose(&self.store_overlay_branch_readcap.key) + } + + pub fn overlay_for_read_on_client_protocol(&self) -> OverlayId { + match self.store_repo { + _ => self.inner_overlay(), + //StoreRepo::V0(StoreRepoV0::PrivateStore(_)) => self.inner_overlay(), + //_ => self.overlay_id, + } + } + + pub fn outer_overlay(&self) -> OverlayId { + self.store_repo.outer_overlay() + } + + #[allow(deprecated)] + #[cfg(any(test, feature = "testing"))] + pub fn dummy_public_v0() -> Arc { + let store_repo = StoreRepo::dummy_public_v0(); + let store_readcap = ReadCap::dummy(); + let store_overlay_branch_readcap = ReadCap::dummy(); + Arc::new(Self::new( + store_repo, + store_readcap, + store_overlay_branch_readcap, + Arc::new(RwLock::new(HashMapBlockStorage::new())) + as Arc>, + )) + } + + #[cfg(any(test, feature = "testing"))] + pub fn dummy_with_key(repo_pubkey: PubKey) -> Arc { + let store_repo = StoreRepo::dummy_with_key(repo_pubkey); + let store_readcap = ReadCap::dummy(); + let store_overlay_branch_readcap = ReadCap::dummy(); + Arc::new(Self::new( + store_repo, + store_readcap, + store_overlay_branch_readcap, + Arc::new(RwLock::new(HashMapBlockStorage::new())) + as Arc>, + )) + } +} diff --git a/ng-repo/src/types.rs b/ng-repo/src/types.rs new file mode 100644 index 0000000..1a5c12d --- /dev/null +++ b/ng-repo/src/types.rs @@ -0,0 +1,3148 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! NextGraph Repo types +//! +//! Corresponds to the BARE schema + +use core::fmt; +use std::cmp::Ordering; +use std::collections::hash_map::DefaultHasher; +use std::hash::{Hash, Hasher}; + +use once_cell::sync::OnceCell; +use sbbf_rs_safe::Filter; +use serde::{Deserialize, Serialize}; +use ng_threshold_crypto::serde_impl::SerdeSecret; +use ng_threshold_crypto::SignatureShare; +use zeroize::{Zeroize, ZeroizeOnDrop}; + +use crate::errors::NgError; +use crate::utils::{ + decode_key, decode_priv_key, dh_pubkey_array_from_ed_pubkey_slice, + dh_pubkey_from_ed_pubkey_slice, ed_privkey_to_ed_pubkey, from_ed_privkey_to_dh_privkey, + random_key, verify, +}; + +// +// COMMON TYPES +// + +/// 32-byte Blake3 hash digest +pub type Blake3Digest32 = [u8; 32]; + +/// Hash digest +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum Digest { + Blake3Digest32(Blake3Digest32), +} + +impl Ord for Digest { + fn cmp(&self, other: &Self) -> Ordering { + match self { + Self::Blake3Digest32(left) => match other { + Self::Blake3Digest32(right) => left.cmp(right), + }, + } + } +} + +impl PartialOrd for Digest { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Digest { + pub fn from_slice(slice: [u8; 32]) -> Digest { + Digest::Blake3Digest32(slice) + } + pub fn slice(&self) -> &[u8; 32] { + match self { + Self::Blake3Digest32(o) => o, + } + } + pub fn to_slice(self) -> [u8; 32] { + match self { + Self::Blake3Digest32(o) => o, + } + } + /// returns a hash that is consistent across platforms (32/64 bits. important for WASM32 compatibility with the rest) + /// see https://www.reddit.com/r/rust/comments/fwpki6/a_debugging_mystery_hashing_slices_in_wasm_works/ + pub fn get_hash(&self) -> u64 { + let mut hasher = DefaultHasher::new(); + let ser = serde_bare::to_vec(&self).unwrap(); + for e in ser { + e.hash(&mut hasher); + } + hasher.finish() + } + + pub fn print_all(all: &[Digest]) -> String { + all.iter() + .map(|d| d.to_string()) + .collect::>() + .join(" ") + } + + pub fn print_iter(all: impl Iterator) -> String { + all.map(|d| d.to_string()) + .collect::>() + .join(" ") + } + + pub fn print_iter_ref<'a>(all: impl Iterator) -> String { + all.map(|d| d.to_string()) + .collect::>() + .join(" ") + } + + pub fn print_all_ref(all: &[&Digest]) -> String { + all.into_iter() + .map(|d| d.to_string()) + .collect::>() + .join(" ") + } +} + +impl fmt::Display for Digest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", std::string::String::from(self)) + } +} + +impl From<&Vec> for Digest { + fn from(ser: &Vec) -> Self { + let hash = blake3::hash(ser.as_slice()); + Digest::Blake3Digest32(hash.as_bytes().clone()) + } +} + +impl From<&[u8; 32]> for Digest { + fn from(ser: &[u8; 32]) -> Self { + let hash = blake3::hash(ser); + Digest::Blake3Digest32(hash.as_bytes().clone()) + } +} + +impl From<&PubKey> for Digest { + fn from(key: &PubKey) -> Self { + key.slice().into() + } +} + +/// ChaCha20 symmetric key +pub type ChaCha20Key = [u8; 32]; + +/// Symmetric cryptographic key +#[derive(Clone, Zeroize, ZeroizeOnDrop, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum SymKey { + ChaCha20Key(ChaCha20Key), +} + +impl SymKey { + pub fn slice(&self) -> &[u8; 32] { + match self { + SymKey::ChaCha20Key(o) => o, + } + } + pub fn random() -> Self { + SymKey::ChaCha20Key(random_key()) + } + pub fn from_array(array: [u8; 32]) -> Self { + SymKey::ChaCha20Key(array) + } + pub fn nil() -> Self { + SymKey::ChaCha20Key([0; 32]) + } + #[cfg(any(test, feature = "testing"))] + pub fn dummy() -> Self { + SymKey::ChaCha20Key([0; 32]) + } +} + +impl fmt::Display for SymKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut ser = serde_bare::to_vec(&self).unwrap(); + ser.reverse(); + write!(f, "{}", base64_url::encode(&ser)) + } +} + +impl TryFrom<&[u8]> for SymKey { + type Error = NgError; + fn try_from(buf: &[u8]) -> Result { + let sym_key_array = *slice_as_array!(buf, [u8; 32]).ok_or(NgError::InvalidKey)?; + let sym_key = SymKey::ChaCha20Key(sym_key_array); + Ok(sym_key) + } +} + +/// Curve25519 public key Edwards form +pub type Ed25519PubKey = [u8; 32]; + +/// Curve25519 public key Montgomery form +pub type X25519PubKey = [u8; 32]; + +/// Curve25519 private key Edwards form +pub type Ed25519PrivKey = [u8; 32]; + +/// Curve25519 private key Montgomery form +pub type X25519PrivKey = [u8; 32]; + +/// Public key +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum PubKey { + Ed25519PubKey(Ed25519PubKey), + X25519PubKey(X25519PubKey), +} + +impl Default for PubKey { + fn default() -> Self { + Self::nil() + } +} + +impl PubKey { + pub fn to_dh(self) -> X25519PubKey { + match self { + Self::X25519PubKey(x) => x, + _ => panic!("cannot call to_dh on an Edward key"), + } + } + pub fn slice(&self) -> &[u8; 32] { + match self { + PubKey::Ed25519PubKey(o) | PubKey::X25519PubKey(o) => o, + } + } + pub fn to_dh_from_ed(&self) -> PubKey { + match self { + PubKey::Ed25519PubKey(ed) => dh_pubkey_from_ed_pubkey_slice(ed), + _ => panic!( + "there is no need to convert a Montgomery key to Montgomery. it is already one. check your code" + ), + } + } + // pub fn dh_from_ed_slice(slice: &[u8]) -> PubKey { + // dh_pubkey_from_ed_pubkey_slice(slice) + // } + pub fn to_dh_slice(&self) -> [u8; 32] { + match self { + PubKey::Ed25519PubKey(o) => dh_pubkey_array_from_ed_pubkey_slice(o), + _ => panic!("can only convert an edward key to montgomery"), + } + } + + pub fn nil() -> Self { + PubKey::Ed25519PubKey([0u8; 32]) + } + + pub fn to_hash_string(&self) -> String { + let ser = serde_bare::to_vec(&self).unwrap(); + let hash = blake3::hash(&ser); + base64_url::encode(&hash.as_bytes()) + } +} + +impl fmt::Display for PubKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut ser = serde_bare::to_vec(&self).unwrap(); + ser.reverse(); + write!(f, "{}", base64_url::encode(&ser)) + } +} + +impl TryFrom<&str> for PubKey { + type Error = NgError; + fn try_from(str: &str) -> Result { + decode_key(str) + } +} + +/// Private key +#[derive(Clone, Zeroize, ZeroizeOnDrop, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum PrivKey { + Ed25519PrivKey(Ed25519PrivKey), + X25519PrivKey(X25519PrivKey), +} + +#[allow(deprecated)] +impl Default for PrivKey { + fn default() -> Self { + Self::nil() + } +} + +impl PrivKey { + pub fn slice(&self) -> &[u8; 32] { + match self { + PrivKey::Ed25519PrivKey(o) | PrivKey::X25519PrivKey(o) => o, + } + } + pub fn to_pub(&self) -> PubKey { + match self { + PrivKey::Ed25519PrivKey(_) => ed_privkey_to_ed_pubkey(self), + _ => panic!("X25519PrivKey to pub not implemented"), + } + } + + pub fn nil() -> PrivKey { + PrivKey::Ed25519PrivKey([0u8; 32]) + } + + #[cfg(any(test, feature = "testing"))] + pub fn dummy() -> PrivKey { + PrivKey::Ed25519PrivKey([0u8; 32]) + } + + pub fn to_dh(&self) -> PrivKey { + from_ed_privkey_to_dh_privkey(self) + } + + pub fn random_ed() -> Self { + PrivKey::Ed25519PrivKey(random_key()) + } +} + +impl From<[u8; 32]> for PrivKey { + fn from(buf: [u8; 32]) -> Self { + let priv_key = PrivKey::Ed25519PrivKey(buf); + priv_key + } +} + +impl TryFrom<&[u8]> for PrivKey { + type Error = NgError; + fn try_from(buf: &[u8]) -> Result { + let priv_key_array = *slice_as_array!(buf, [u8; 32]).ok_or(NgError::InvalidKey)?; + let priv_key = PrivKey::Ed25519PrivKey(priv_key_array); + Ok(priv_key) + } +} + +impl TryFrom<&str> for PrivKey { + type Error = NgError; + fn try_from(str: &str) -> Result { + decode_priv_key(str) + } +} + +impl fmt::Display for PrivKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut ser = serde_bare::to_vec(&self).unwrap(); + ser.reverse(); + write!(f, "{}", base64_url::encode(&ser)) + } +} + +/// Ed25519 signature +pub type Ed25519Sig = [[u8; 32]; 2]; + +/// Cryptographic signature +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum Sig { + Ed25519Sig(Ed25519Sig), +} + +impl fmt::Display for Sig { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Ed25519Sig(ed) => { + write!( + f, + "{} {}", + base64_url::encode(&ed[0]), + base64_url::encode(&ed[1]) + ) + } + } + } +} + +impl Sig { + pub fn nil() -> Self { + Sig::Ed25519Sig([[0; 32]; 2]) + } +} + +/// Timestamp: absolute time in minutes since 2022-02-22 22:22 UTC +pub type Timestamp = u32; + +pub const EPOCH_AS_UNIX_TIMESTAMP: u64 = 1645568520; + +/// Relative time (e.g. delay from current time) +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RelTime { + Seconds(u8), + Minutes(u8), + Hours(u8), + Days(u8), + None, +} + +impl fmt::Display for RelTime { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Seconds(s) => writeln!(f, "{} sec.", s), + Self::Minutes(s) => writeln!(f, "{} min.", s), + Self::Hours(s) => writeln!(f, "{} h.", s), + Self::Days(s) => writeln!(f, "{} d.", s), + Self::None => writeln!(f, "None"), + } + } +} + +/// Bloom filter (variable size) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct BloomFilterV0 { + /// Filter + #[serde(with = "serde_bytes")] + pub f: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum BloomFilter { + V0(BloomFilterV0), +} + +impl BloomFilter { + pub fn filter(&self) -> Filter { + match self { + Self::V0(v0) => Filter::from_bytes(&v0.f).unwrap(), + } + } + pub fn from_filter(filter: &Filter) -> Self { + BloomFilter::V0(BloomFilterV0 { + f: filter.as_bytes().to_vec(), + }) + } +} + +// +// REPOSITORY TYPES +// + +/// RepoId is a PubKey +pub type RepoId = PubKey; + +/// RepoHash is the BLAKE3 Digest over the RepoId +pub type RepoHash = Digest; + +impl From for RepoHash { + fn from(id: RepoId) -> Self { + Digest::Blake3Digest32(*blake3::hash(id.slice()).as_bytes()) + } +} + +// impl From for String { +// fn from(id: RepoHash) -> Self { +// hex::encode(to_vec(&id).unwrap()) +// } +// } + +/// Topic ID: public key of the topic +pub type TopicId = PubKey; + +/// User ID: user account for broker and member of a repo +pub type UserId = PubKey; + +/// BranchId is a PubKey +pub type BranchId = PubKey; + +/// Block ID: BLAKE3 hash over the serialized BlockContent (contains encrypted content) +pub type BlockId = Digest; + +pub type BlockKey = SymKey; + +/// Block reference +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct BlockRef { + /// Block ID + pub id: BlockId, + + /// Key for decrypting the Block + pub key: BlockKey, +} + +impl Default for BlockId { + fn default() -> Self { + Self::nil() + } +} + +impl BlockId { + #[cfg(any(test, feature = "testing"))] + pub fn dummy() -> Self { + Digest::Blake3Digest32([0u8; 32]) + } + + pub fn nil() -> Self { + Digest::Blake3Digest32([0u8; 32]) + } +} + +impl BlockRef { + #[cfg(any(test, feature = "testing"))] + pub fn dummy() -> Self { + BlockRef { + id: Digest::Blake3Digest32([0u8; 32]), + key: SymKey::ChaCha20Key([0u8; 32]), + } + } + + pub fn nil() -> Self { + BlockRef { + id: Digest::Blake3Digest32([0u8; 32]), + key: SymKey::ChaCha20Key([0u8; 32]), + } + } + + pub fn from_id_key(id: BlockId, key: BlockKey) -> Self { + BlockRef { id, key } + } + + pub fn object_nuri(&self) -> String { + format!("j:{}:k:{}", self.id, self.key) + } + + pub fn commit_nuri(&self) -> String { + format!("c:{}:k:{}", self.id, self.key) + } + + pub fn readcap_nuri(&self) -> String { + let ser = serde_bare::to_vec(self).unwrap(); + format!("r:{}", base64_url::encode(&ser)) + } + + pub fn tokenize(&self) -> Digest { + let ser = serde_bare::to_vec(self).unwrap(); + Digest::Blake3Digest32(*blake3::hash(&ser).as_bytes()) + } +} + +impl From for (BlockId, BlockKey) { + fn from(blockref: BlockRef) -> (BlockId, BlockKey) { + (blockref.id.clone(), blockref.key.clone()) + } +} + +impl From<(&BlockId, &BlockKey)> for BlockRef { + fn from(id_key: (&BlockId, &BlockKey)) -> Self { + BlockRef { + id: id_key.0.clone(), + key: id_key.1.clone(), + } + } +} + +impl fmt::Display for BlockRef { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{} {}", self.id, self.key) + } +} + +/// Object ID +pub type ObjectId = BlockId; + +/// Object Key +pub type ObjectKey = BlockKey; + +/// Object reference +pub type ObjectRef = BlockRef; + +/// Read capability (for a commit, branch, whole repo, or store) +/// +/// For a store: A ReadCap to the root repo of the store +/// For a repo: A reference to the latest RootBranch definition commit +/// For a branch: A reference to the latest Branch definition commit +/// For a commit or object, the ObjectRef is itself the read capability +pub type ReadCap = ObjectRef; + +/// Read capability secret (for a commit, branch, whole repo, or store) +/// +/// it is already included in the ReadCap (it is the key part of the reference) +pub type ReadCapSecret = ObjectKey; + +/// Write capability secret (for a whole repo) +pub type RepoWriteCapSecret = SymKey; + +/// Write capability secret (for a branch's topic) +pub type BranchWriteCapSecret = PrivKey; + +//TODO: PermaCap (involves sending an InboxPost to some verifiers) + +// +// IDENTITY, SITE, STORE, OVERLAY common types +// + +// /// List of Identity types +// #[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +// pub enum Identity { +// OrgSite(PubKey), +// IndividualSite(PubKey), +// OrgPublicStore(PubKey), +// OrgProtectedStore(PubKey), +// OrgPrivateStore(PubKey), +// IndividualPublicStore(PubKey), +// IndividualProtectedStore(PubKey), +// IndividualPrivateStore(PubKey), +// } + +pub type OuterOverlayId = Digest; + +pub type InnerOverlayId = Digest; + +/// Overlay ID +/// +/// - for outer overlays that need to be discovered by public key: +/// BLAKE3 hash over the public key of the store repo +/// - for inner overlays: +/// BLAKE3 keyed hash over the public key of the store repo +/// - key: BLAKE3 derive_key ("NextGraph Overlay ReadCapSecret BLAKE3 key", store repo's overlay's branch ReadCapSecret) +/// except for Dialog Overlays where the Hash is computed from 2 secrets. +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum OverlayId { + Outer(Blake3Digest32), + Inner(Blake3Digest32), + Global, +} + +impl Default for OverlayId { + fn default() -> Self { + OverlayId::Outer([0;32]) + } +} + +impl fmt::Display for OverlayId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut ser = serde_bare::to_vec(&self).unwrap(); + ser.reverse(); + write!(f, "{}", base64_url::encode(&ser)) + } +} + +impl OverlayId { + // pub fn inner_from_store(store: &Store) -> OverlayId { + // Self::inner(store.id(), store.get_store_overlay_branch_readcap_secret()) + // } + pub fn inner( + store_id: &PubKey, + store_overlay_branch_readcap_secret: &ReadCapSecret, + ) -> OverlayId { + let store_id = serde_bare::to_vec(store_id).unwrap(); + let mut store_overlay_branch_readcap_secret_ser = + serde_bare::to_vec(store_overlay_branch_readcap_secret).unwrap(); + let mut key: [u8; 32] = blake3::derive_key( + "NextGraph Overlay ReadCapSecret BLAKE3 key", + store_overlay_branch_readcap_secret_ser.as_slice(), + ); + let key_hash = blake3::keyed_hash(&key, &store_id); + store_overlay_branch_readcap_secret_ser.zeroize(); + key.zeroize(); + OverlayId::Inner(*key_hash.as_bytes()) + } + + pub fn outer(store_id: &PubKey) -> OverlayId { + let store_id = serde_bare::to_vec(store_id).unwrap(); + let d: Digest = (&store_id).into(); + OverlayId::Outer(d.to_slice()) + } + #[cfg(any(test, feature = "testing"))] + pub fn dummy() -> OverlayId { + OverlayId::Outer(Digest::dummy().to_slice()) + } + pub fn nil() -> OverlayId { + OverlayId::Outer(Digest::nil().to_slice()) + } + + pub fn is_inner(&self) -> bool { + match self { + Self::Inner(_) => true, + _ => false, + } + } + + pub fn is_outer(&self) -> bool { + match self { + Self::Outer(_) => true, + _ => false, + } + } +} + +/// List of Store Overlay types +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum StoreOverlayV0 { + PublicStore(PubKey), + ProtectedStore(PubKey), + PrivateStore(PubKey), + Group(PubKey), + Dialog(Digest), +} + +impl fmt::Display for StoreOverlayV0 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "StoreOverlay V0 ")?; + match self { + StoreOverlayV0::PublicStore(k) => writeln!(f, "PublicStore: {}", k), + StoreOverlayV0::ProtectedStore(k) => writeln!(f, "ProtectedStore: {}", k), + StoreOverlayV0::PrivateStore(k) => writeln!(f, "PrivateStore: {}", k), + StoreOverlayV0::Group(k) => writeln!(f, "Group: {}", k), + StoreOverlayV0::Dialog(k) => writeln!(f, "Dialog: {}", k), + } + } +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum StoreOverlay { + V0(StoreOverlayV0), + OwnV0(StoreOverlayV0), // The repo is a store, so the overlay can be derived from its own ID. In this case, the branchId of the `overlay` branch is entered here as PubKey of the StoreOverlayV0 variants. +} + +impl fmt::Display for StoreOverlay { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => writeln!(f, "{}", v0), + Self::OwnV0(v0) => writeln!(f, "Own: {}", v0), + } + } +} + +impl StoreOverlay { + pub fn from_store_repo(store_repo: &StoreRepo, overlay_branch: BranchId) -> StoreOverlay { + match store_repo { + StoreRepo::V0(v0) => match v0 { + StoreRepoV0::PublicStore(_id) => { + StoreOverlay::V0(StoreOverlayV0::PublicStore(overlay_branch)) + } + StoreRepoV0::ProtectedStore(_id) => { + StoreOverlay::V0(StoreOverlayV0::ProtectedStore(overlay_branch)) + } + StoreRepoV0::PrivateStore(_id) => { + StoreOverlay::V0(StoreOverlayV0::PrivateStore(overlay_branch)) + } + StoreRepoV0::Group(_id) => StoreOverlay::V0(StoreOverlayV0::Group(overlay_branch)), + StoreRepoV0::Dialog((_, d)) => StoreOverlay::V0(StoreOverlayV0::Dialog(d.clone())), + }, + } + } + + pub fn overlay_id_for_read_purpose(&self) -> OverlayId { + match self { + StoreOverlay::V0(StoreOverlayV0::PublicStore(id)) + | StoreOverlay::V0(StoreOverlayV0::ProtectedStore(id)) + | StoreOverlay::V0(StoreOverlayV0::PrivateStore(id)) + | StoreOverlay::V0(StoreOverlayV0::Group(id)) => OverlayId::outer(id), + StoreOverlay::V0(StoreOverlayV0::Dialog(d)) => OverlayId::Inner(d.clone().to_slice()), + StoreOverlay::OwnV0(_) => unimplemented!(), + } + } + + pub fn overlay_id_for_write_purpose( + &self, + store_overlay_branch_readcap_secret: ReadCapSecret, + ) -> OverlayId { + match self { + StoreOverlay::V0(StoreOverlayV0::PublicStore(id)) + | StoreOverlay::V0(StoreOverlayV0::ProtectedStore(id)) + | StoreOverlay::V0(StoreOverlayV0::PrivateStore(id)) + | StoreOverlay::V0(StoreOverlayV0::Group(id)) => { + OverlayId::inner(id, &store_overlay_branch_readcap_secret) + } + StoreOverlay::V0(StoreOverlayV0::Dialog(d)) => OverlayId::Inner(d.clone().to_slice()), + StoreOverlay::OwnV0(_) => unimplemented!(), + } + } +} + +impl From<&StoreRepo> for StoreOverlay { + fn from(store_repo: &StoreRepo) -> Self { + match store_repo { + StoreRepo::V0(v0) => match v0 { + StoreRepoV0::PublicStore(id) => { + StoreOverlay::V0(StoreOverlayV0::PublicStore(id.clone())) + } + StoreRepoV0::ProtectedStore(id) => { + StoreOverlay::V0(StoreOverlayV0::ProtectedStore(id.clone())) + } + StoreRepoV0::PrivateStore(id) => { + StoreOverlay::V0(StoreOverlayV0::PrivateStore(id.clone())) + } + StoreRepoV0::Group(id) => StoreOverlay::V0(StoreOverlayV0::Group(id.clone())), + StoreRepoV0::Dialog((_, d)) => StoreOverlay::V0(StoreOverlayV0::Dialog(d.clone())), + }, + } + } +} + +/// List of Store Root Repo types +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum StoreRepoV0 { + PublicStore(RepoId), + ProtectedStore(RepoId), + PrivateStore(RepoId), + Group(RepoId), + Dialog((RepoId, Digest)), +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum StoreRepo { + V0(StoreRepoV0), +} + +impl fmt::Display for StoreRepo { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "StoreRepo V0 {} {}", + match self { + StoreRepo::V0(v0) => match v0 { + StoreRepoV0::PublicStore(_) => "PublicStore", + StoreRepoV0::ProtectedStore(_) => "ProtectedStore", + StoreRepoV0::PrivateStore(_) => "PrivateStore", + StoreRepoV0::Group(_) => "Group", + StoreRepoV0::Dialog(_) => "Dialog", + }, + }, + self.repo_id() + ) + } +} + +impl StoreRepo { + pub fn from_type_and_repo(store_type: &String, repo_id_str: &String) -> Result { + let repo_id: RepoId = repo_id_str.as_str().try_into()?; + Ok(StoreRepo::V0(match store_type.as_str() { + "public" => StoreRepoV0::PublicStore(repo_id), + "protected" => StoreRepoV0::ProtectedStore(repo_id), + "private" => StoreRepoV0::PrivateStore(repo_id), + "group" => StoreRepoV0::Group(repo_id), + "dialog" | _ => unimplemented!(), + })) + } + + pub fn store_type_for_app(&self) -> String { + match self { + Self::V0(v0) => match v0 { + StoreRepoV0::PublicStore(_) => "public", + StoreRepoV0::ProtectedStore(_) => "protected", + StoreRepoV0::PrivateStore(_) => "private", + StoreRepoV0::Group(_) => "group", + StoreRepoV0::Dialog(_) => "dialog", + }, + } + .to_string() + } + + pub fn repo_id(&self) -> &RepoId { + match self { + Self::V0(v0) => match v0 { + StoreRepoV0::PublicStore(id) + | StoreRepoV0::ProtectedStore(id) + | StoreRepoV0::PrivateStore(id) + | StoreRepoV0::Group(id) + | StoreRepoV0::Dialog((id, _)) => id, + }, + } + } + #[cfg(any(test, feature = "testing"))] + #[allow(deprecated)] + pub fn dummy_public_v0() -> Self { + let store_pubkey = PubKey::nil(); + StoreRepo::V0(StoreRepoV0::PublicStore(store_pubkey)) + } + #[cfg(any(test, feature = "testing"))] + pub fn dummy_with_key(repo_pubkey: PubKey) -> Self { + StoreRepo::V0(StoreRepoV0::PublicStore(repo_pubkey)) + } + + pub fn nil() -> Self { + let store_pubkey = PubKey::nil(); + StoreRepo::V0(StoreRepoV0::PublicStore(store_pubkey)) + } + + pub fn new_private(repo_pubkey: PubKey) -> Self { + StoreRepo::V0(StoreRepoV0::PrivateStore(repo_pubkey)) + } + + pub fn outer_overlay(&self) -> OverlayId { + self.overlay_id_for_read_purpose() + } + + pub fn overlay_id_for_read_purpose(&self) -> OverlayId { + let store_overlay: StoreOverlay = self.into(); + store_overlay.overlay_id_for_read_purpose() + //OverlayId::outer(self.repo_id()) + } + + pub fn is_private(&self) -> bool { + match self { + Self::V0(StoreRepoV0::PrivateStore(_)) => true, + _ => false, + } + } + + pub fn is_public(&self) -> bool { + match self { + Self::V0(StoreRepoV0::PublicStore(_)) => true, + _ => false, + } + } + + // pub fn overlay_id_for_storage_purpose( + // &self, + // store_overlay_branch_readcap_secret: Option, + // ) -> OverlayId { + // match self { + // Self::V0(StoreRepoV0::PublicStore(id)) + // | Self::V0(StoreRepoV0::ProtectedStore(id)) + // | Self::V0(StoreRepoV0::Group(id)) + // | Self::V0(StoreRepoV0::PrivateStore(id)) => self.overlay_id_for_read_purpose(), + // Self::V0(StoreRepoV0::Dialog(d)) => OverlayId::inner( + // &d.0, + // store_overlay_branch_readcap_secret + // .expect("Dialog needs store_overlay_branch_readcap_secret"), + // ), + // } + // } + + pub fn overlay_id_for_storage_purpose(&self) -> OverlayId { + match self { + Self::V0(StoreRepoV0::PublicStore(_id)) + | Self::V0(StoreRepoV0::ProtectedStore(_id)) + | Self::V0(StoreRepoV0::Group(_id)) + | Self::V0(StoreRepoV0::PrivateStore(_id)) => self.overlay_id_for_read_purpose(), + Self::V0(StoreRepoV0::Dialog(d)) => OverlayId::Inner(d.1.clone().to_slice()), + } + } + + pub fn overlay_id_for_write_purpose( + &self, + store_overlay_branch_readcap_secret: &ReadCapSecret, + ) -> OverlayId { + match self { + Self::V0(StoreRepoV0::PublicStore(id)) + | Self::V0(StoreRepoV0::ProtectedStore(id)) + | Self::V0(StoreRepoV0::Group(id)) + | Self::V0(StoreRepoV0::PrivateStore(id)) => { + OverlayId::inner(id, store_overlay_branch_readcap_secret) + } + Self::V0(StoreRepoV0::Dialog(d)) => OverlayId::Inner(d.1.clone().to_slice()), + } + } +} + +/// Site type +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum SiteType { + Org, + Individual((PrivKey, ReadCap)), // the priv_key of the user, and the read_cap of the private store +} + +/// Site Store +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct SiteStore { + pub id: PubKey, + + pub store_type: SiteStoreType, +} + +/// Site Store type +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum SiteStoreType { + Public, + Protected, + Private, +} + +/// Site Name +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum SiteName { + Personal, + Name(String), +} + +/// Reduced Site (for QRcode) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct ReducedSiteV0 { + pub user_key: PrivKey, + + pub private_store_read_cap: ReadCap, + + pub core: PubKey, + pub bootstraps: Vec, +} + +// +// BLOCKS common types +// + +/// Internal node of a Merkle tree +pub type InternalNode = Vec; + +/// encrypted_content of BlockContentV0: a Merkle tree node +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum ChunkContentV0 { + /// Internal node with references to children + InternalNode(InternalNode), + + #[serde(with = "serde_bytes")] + DataChunk(Vec), +} + +/// Header of a Commit, can be embedded or as a ref +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CommitHeaderV0 { + /// optional Commit Header ID + #[serde(skip)] + pub id: Option, + + /// Other objects this commit strongly depends on (ex: ADD for a REMOVE, files for an nfiles) + pub deps: Vec, + + /// dependency that is removed after this commit. used for reverts + pub ndeps: Vec, + + /// tells brokers that this is a hard snapshot and that all the ACKs and full causal past should be treated as ndeps (their body removed) + /// brokers will only perform the deletion of bodies after this commit has been ACKed by at least one subsequent commit + /// but if the next commit is a nack, the deletion is prevented. + pub compact: bool, + + /// current valid commits in head + pub acks: Vec, + + /// head commits that are invalid + pub nacks: Vec, + + /// list of Files that are referenced in this commit + pub files: Vec, + + /// list of Files that are not referenced anymore after this commit + /// the commit(s) that created the files should be in deps + pub nfiles: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum CommitHeader { + V0(CommitHeaderV0), +} + +/// Keys for the corresponding IDs contained in the Header +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CommitHeaderKeysV0 { + /// Other objects this commit strongly depends on (ex: ADD for a REMOVE, files for an nfiles) + pub deps: Vec, + + // ndeps keys are not included because we don't need the keys to access the commits we will not need anymore + // the keys are in the deps of their respective subsequent commits in the DAG anyway + /// current valid commits in head + pub acks: Vec, + + /// head commits that are invalid + pub nacks: Vec, + + /// list of Files that are referenced in this commit. Exceptionally this is an ObjectRef, because + /// even if the CommitHeader is omitted, we want the Files to be openable. + pub files: Vec, + // nfiles keys are not included because we don't need the keys to access the files we will not need anymore + // the keys are in the deps of the respective commits that added them anyway +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum CommitHeaderKeys { + V0(CommitHeaderKeysV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum CommitHeaderObject { + Id(ObjectId), + EncryptedContent(Vec), + None, + RandomAccess, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CommitHeaderRef { + pub obj: CommitHeaderObject, + pub key: ObjectKey, +} + +impl CommitHeaderRef { + pub fn from_id_key(id: BlockId, key: ObjectKey) -> Self { + CommitHeaderRef { + obj: CommitHeaderObject::Id(id), + key, + } + } + pub fn from_content_key(content: Vec, key: ObjectKey) -> Self { + CommitHeaderRef { + obj: CommitHeaderObject::EncryptedContent(content), + key, + } + } + pub fn encrypted_content_len(&self) -> usize { + match &self.obj { + CommitHeaderObject::EncryptedContent(ec) => ec.len(), + _ => 0, + } + } +} + +/// unencrypted part of the Block +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct BlockContentV0 { + /// Reference (actually, only its ID or an embedded block if the size is small enough) + /// to a CommitHeader of the root Block of a commit that contains references to other objects (e.g. Commit deps & acks) + /// Only set if the block is a commit (and it is the root block of the Object). + /// It is an easy way to know if the Block is a commit (but be careful because some root commits can be without a header). + pub commit_header: CommitHeaderObject, + + /// Block IDs for child nodes in the Merkle tree, + /// is empty if ObjectContent fits in one block or this block is a leaf. in both cases, encrypted_content is then not empty + pub children: Vec, + + /// contains encrypted ChunkContentV0 (entirely, when fitting, or chunks of ObjectContentV0, in DataChunk) used for leaves of the Merkle tree, + /// or to store the keys of children (in InternalNode) + /// + /// Encrypted using convergent encryption with ChaCha20: + /// - convergence_key: BLAKE3 derive_key ("NextGraph Data BLAKE3 key", + /// StoreRepo + store's repo ReadCapSecret ) + /// // basically similar to the InnerOverlayId but not hashed, so that brokers cannot do "confirmation of a file" attack + /// - key: BLAKE3 keyed hash (convergence_key, plain_chunk_content) + /// - nonce: 0 + #[serde(with = "serde_bytes")] + pub encrypted_content: Vec, +} + +/// Immutable object with encrypted content +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum BlockContent { + V0(BlockContentV0), +} + +impl BlockContent { + pub fn commit_header_obj(&self) -> &CommitHeaderObject { + match self { + Self::V0(v0) => &v0.commit_header, + } + } +} + +/// Immutable block with encrypted content +/// +/// `ObjectContent` is chunked and stored as `Block`s in a Merkle tree. +/// A Block is a Merkle tree node. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct BlockV0 { + /// Block ID + #[serde(skip)] + pub id: Option, + + /// Block Key + #[serde(skip)] + pub key: Option, + + /// Header + // #[serde(skip)] + // TODO + // pub header: Option, + + /// Key needed to open the CommitHeader. can be omitted if the Commit is shared without its ancestors, + /// or if the block is not a root block of commit, or that commit is a root commit (first in branch) + pub commit_header_key: Option, + + pub content: BlockContent, +} + +/// Immutable block with encrypted content +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Block { + V0(BlockV0), +} + +// +// REPO IMPLEMENTATION +// + +/// Repository definition +/// +/// First commit published in root branch, signed by repository key +/// For the Root repo of a store(overlay), the convergence_key should be derived from : +/// "NextGraph Data BLAKE3 key", RepoId + RepoWriteCapSecret) +/// for a private store root repo, the RepoWriteCapSecret can be omitted +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RepositoryV0 { + /// Repo public key ID + pub id: RepoId, + + /// Verification program (WASM) + #[serde(with = "serde_bytes")] + pub verification_program: Vec, + + /// Optional serialization of a ReadBranchLink (of a rootbranch or a transactional branch), if the repository is a fork of another one. + /// then transaction branches of this new repo, will be able to reference the forked repo/branches commits as DEPS in their singleton Branch commit. + #[serde(with = "serde_bytes")] + pub fork_of: Vec, + + /// User ID who created this repo + pub creator: Option, + + // TODO: for org store root repo, should have a sig by the org priv_key, over the repoid, and a sig by this repo_priv_key over the org_id (to establish the bidirectional linking between org and store) + + // TODO: discrete doc type + // TODO: order (store, partial order, partial sign all commits,(conflict resolution strategy), total order, fsm, smart contract ) + // TODO: immutable conditions (allow_change_owners, allow_change_quorum, min_quorum, allow_inherit_perms, signers_can_be_editors, all_editors_are_signers, etc...) + /// Immutable App-specific metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +/// Repository definition +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Repository { + V0(RepositoryV0), +} + +impl fmt::Display for Repository { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0")?; + writeln!(f, "repo_id: {}", v0.id)?; + writeln!( + f, + "creator: {}", + v0.creator.map_or("None".to_string(), |c| format!("{}", c)) + )?; + Ok(()) + } + } + } +} + +/// Root Branch definition V0 +/// +/// Second commit in the root branch, signed by repository key +/// is used also to update the root branch definition when users are removed, quorum(s) are changed, repo is moved to other store. +/// In this case, it is signed by its author, and requires an additional group signature by the total_order_quorum or by the owners_quorum. +/// DEPS: Reference to the previous root branch definition commit, if it is an update +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RootBranchV0 { + /// Branch public key ID, equal to the repo_id + pub id: PubKey, + + /// Reference to the repository commit, to get the verification_program and other immutable details + pub repo: ObjectRef, + + /// Store ID the repo belongs to + /// the identity is checked by verifiers (check overlay is matching) + pub store: StoreOverlay, + + /// signature of repoId with store's partial_order signature + /// in order to verify that the store recognizes this repo as part of itself. + /// only if not a store root repo itself + pub store_sig: Option, + + /// Pub/sub topic ID for publishing events about the root branch + pub topic: TopicId, + + /// topic private key (a BranchWriteCapSecret), encrypted with a key derived as follow + /// BLAKE3 derive_key ("NextGraph Branch WriteCap Secret BLAKE3 key", + /// RepoWriteCapSecret, TopicId, BranchId ) + /// so that only editors of the repo can decrypt the privkey + /// nonce = 0 + /// not encrypted for individual store repo. + #[serde(with = "serde_bytes")] + pub topic_privkey: Vec, + + /// if set, permissions are inherited from Store Repo. + /// Optional is a store_read_cap + /// (only set if this repo is not the store repo itself) + /// check that it matches the self.store + /// can only be committed by an owner + /// it generates a new certificate + /// owners are not inherited from store + // TODO: ReadCap or PermaCap. If it is a ReadCap, a new RootBranch commit should be published (RootCapRefresh, only read_cap changes) every time the store read cap changes. + /// empty for private repos, eventhough they are all implicitly inheriting perms from private store + pub inherit_perms_users_and_quorum_from_store: Option, + + /// Quorum definition ObjectRef + /// TODO: ObjectKey should be encrypted with SIGNER_KEY ? + pub quorum: Option, + + /// BEC periodic reconciliation interval. zero deactivates it + pub reconciliation_interval: RelTime, + + // list of owners. all of them are required to sign any RootBranch that modifies the list of owners or the inherit_perms_users_and_quorum_from_store field. + pub owners: Vec, + + /// when the list of owners is changed, a crypto_box containing the RepoWriteCapSecret should be included here for each owner. + /// this should also be done at creation time, with the UserId of the first owner, except for individual private store repo, because it doesnt have a RepoWriteCapSecret + /// the vector has the same order and size as the owners one. each owner finds their write_cap here. + pub owners_write_cap: Vec, + + /// Mutable App-specific metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +/// RootBranch definition +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RootBranch { + V0(RootBranchV0), +} + +impl RootBranch { + pub fn topic(&self) -> &TopicId { + match self { + Self::V0(v0) => &v0.topic, + } + } + pub fn repo_id(&self) -> &RepoId { + match self { + Self::V0(v0) => &v0.id, + } + } + pub fn owners(&self) -> &Vec { + match self { + Self::V0(v0) => &v0.owners, + } + } + pub fn encrypt_write_cap( + for_user: &UserId, + write_cap: &RepoWriteCapSecret, + ) -> Result, NgError> { + let ser = serde_bare::to_vec(write_cap).unwrap(); + let mut rng = crypto_box::aead::OsRng {}; + let cipher = crypto_box::seal(&mut rng, &for_user.to_dh_slice().into(), &ser) + .map_err(|_| NgError::EncryptionError)?; + Ok(cipher) + } + pub fn decrypt_write_cap( + by_user: &PrivKey, + cipher: &Vec, + ) -> Result { + let ser = crypto_box::seal_open(&(*by_user.to_dh().slice()).into(), cipher) + .map_err(|_| NgError::DecryptionError)?; + let write_cap: RepoWriteCapSecret = + serde_bare::from_slice(&ser).map_err(|_| NgError::SerializationError)?; + Ok(write_cap) + } +} + +impl fmt::Display for RootBranch { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0")?; + writeln!(f, "repo_id: {}", v0.id)?; + writeln!(f, "repo_ref: {}", v0.repo)?; + write!(f, "store: {}", v0.store)?; + writeln!( + f, + "store_sig: {}", + v0.store_sig + .as_ref() + .map_or("None".to_string(), |c| format!("{}", c)) + )?; + writeln!(f, "topic: {}", v0.topic)?; + writeln!( + f, + "inherit_perms: {}", + v0.inherit_perms_users_and_quorum_from_store + .as_ref() + .map_or("None".to_string(), |c| format!("{}", c)) + )?; + writeln!( + f, + "quorum: {}", + v0.quorum + .as_ref() + .map_or("None".to_string(), |c| format!("{}", c)) + )?; + writeln!(f, "reconciliation_interval: {}", v0.reconciliation_interval)?; + Ok(()) + } + } + } +} + +/// Quorum definition V0 +/// +/// Changed when the signers need to be updated. Signers are not necessarily editors of the repo, and they do not need to be members either, as they will be notified of RootCapRefresh anyway. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct QuorumV0 { + /// Number of signatures required for a partial order commit to be valid (threshold+1) + pub partial_order_quorum: u32, + + /// List of the users who can sign for partial order + pub partial_order_users: Vec, + + /// Number of signatures required for a total order commit to be valid (threshold+1) + pub total_order_quorum: u32, + + /// List of the users who can sign for total order + pub total_order_users: Vec, + + // TODO: + // epoch: ObjectId pointing to rootbranch commit (read_cap_id) + /// cryptographic material for Threshold signature + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +/// Quorum definition, is part of the RootBranch commit +// TODO: can it be sent in the root branch without being part of a RootBranch ? +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Quorum { + V0(QuorumV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum BranchCrdt { + Graph(String), + YMap(String), + YArray(String), + YXml(String), + YText(String), + Automerge(String), + Elmer(String), + //Rdfs, + //Owl, + //Shacl, + //Shex, + None, // this is used by Overlay, Store and User BranchTypes +} + +impl BranchCrdt { + pub fn is_graph(&self) -> bool { + match self { + BranchCrdt::Graph(_) => true, + _ => false, + } + } + pub fn name(&self) -> String { + match self { + BranchCrdt::Graph(_) => "Graph", + BranchCrdt::YMap(_) => "YMap", + BranchCrdt::YArray(_) => "YArray", + BranchCrdt::YXml(_) => "YXml", + BranchCrdt::YText(_) => "YText", + BranchCrdt::Automerge(_) => "Automerge", + BranchCrdt::Elmer(_) => "Elmer", + BranchCrdt::None => panic!("BranchCrdt::None does not have a name"), + } + .to_string() + } + pub fn class(&self) -> &String { + match self { + BranchCrdt::Graph(c) + | BranchCrdt::YMap(c) + | BranchCrdt::YArray(c) + | BranchCrdt::YXml(c) + | BranchCrdt::YText(c) + | BranchCrdt::Automerge(c) + | BranchCrdt::Elmer(c) => c, + BranchCrdt::None => panic!("BranchCrdt::None does not have a class"), + } + } + pub fn from(name: String, class: String) -> Result { + Ok(match name.as_str() { + "Graph" => BranchCrdt::Graph(class), + "YMap" => BranchCrdt::YMap(class), + "YArray" => BranchCrdt::YArray(class), + "YXml" => BranchCrdt::YXml(class), + "YText" => BranchCrdt::YText(class), + "Automerge" => BranchCrdt::Automerge(class), + "Elmer" => BranchCrdt::Elmer(class), + _ => return Err(NgError::InvalidClass), + }) + } +} + +/// Branch definition +/// +/// First commit in a branch, signed by branch key +/// In case of a fork, the commit DEPS indicate +/// the previous branch heads, and the ACKS are empty. +/// +/// Can be used also to update the branch definition when users are removed +/// In this case, the total_order quorum is needed, and DEPS indicates the BranchCapRefresh commit +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct BranchV0 { + /// Branch public key ID + pub id: PubKey, + + pub crdt: BranchCrdt, + + /// Reference to the repository commit + pub repo: ObjectRef, + + /// object ID of the current root_branch commit (ReadCap), in order to keep in sync this branch with root_branch + /// The key is not provided as external readers should not be able to access the root branch definition. + /// it is only used by verifiers (who have the key already) + pub root_branch_readcap_id: ObjectId, + + /// Pub/sub topic for publishing events + pub topic: PubKey, + + /// topic private key (a BranchWriteCapSecret), encrypted with a key derived as follow + /// BLAKE3 derive_key ("NextGraph Branch WriteCap Secret BLAKE3 key", + /// RepoWriteCapSecret, TopicId, BranchId ) + /// so that only editors of the repo can decrypt the privkey + /// For individual store repo, the RepoWriteCapSecret is zero + #[serde(with = "serde_bytes")] + pub topic_privkey: Vec, + + /// optional: this branch is the result of a pull request coming from another repo. + /// contains a serialization of a ReadBranchLink of a transactional branch from another repo + #[serde(with = "serde_bytes")] + pub pulled_from: Vec, + + /// App-specific metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +impl fmt::Display for Branch { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0")?; + writeln!(f, "id: {}", v0.id)?; + writeln!(f, "repo: {}", v0.repo)?; + writeln!(f, "root_branch_readcap_id: {}", v0.root_branch_readcap_id)?; + writeln!(f, "topic: {}", v0.topic)?; + writeln!(f, "topic_privkey: {:?}", v0.topic_privkey)?; + Ok(()) + } + } + } +} + +/// Branch definition +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Branch { + V0(BranchV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum BranchType { + Main, // Main is also transactional + Store, + Overlay, + User, + // special transactional branches + Chat, + Stream, + Comments, + BackLinks, + Context, + Transactional, // this could have been called OtherTransactional, but for the sake of simplicity, we use Transactional for any branch that is not the Main one. + Root, // only used for BranchInfo + //Unknown, // only used temporarily when loading a branch info from commits (Branch commit, then AddBranch commit) + Header, +} + +impl BranchType { + pub fn is_main(&self) -> bool { + match self { + Self::Main => true, + _ => false, + } + } + pub fn is_header(&self) -> bool { + match self { + Self::Header => true, + _ => false, + } + } +} + +impl fmt::Display for BranchType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "{}", + match self { + Self::Main => "Main", + Self::Header => "Header", + Self::Store => "Store", + Self::Overlay => "Overlay", + Self::User => "User", + Self::Transactional => "Transactional", + Self::Root => "Root", + Self::Chat => "Chat", + Self::Stream => "Stream", + Self::Comments => "Comments", + Self::BackLinks => "BackLinks", + Self::Context => "Context", + //Self::Ontology => "Ontology", + //Self::Unknown => "==unknown==", + } + ) + } +} + +/// Add a branch to the repository +/// +/// DEPS: if update branch: previous AddBranch commit of the same branchId +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct AddBranchV0 { + // the new topic_id (will be needed immediately by future readers + // in order to subscribe to the pub/sub). should be identical to the one in the Branch definition. + // None if merged_in + pub topic_id: Option, + // the new branch definition commit + // (we need the ObjectKey in order to open the pub/sub Event) + // None if merged_in + pub branch_read_cap: Option, + + pub crdt: BranchCrdt, + + pub branch_id: BranchId, + + pub branch_type: BranchType, + + pub fork_of: Option, + + pub merged_in: Option, +} + +impl fmt::Display for AddBranch { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0 {}", v0.branch_type)?; + writeln!(f, "branch_id: {}", v0.branch_id)?; + if v0.topic_id.is_some() { + writeln!(f, "topic_id: {}", v0.topic_id.as_ref().unwrap())?; + } + if v0.branch_read_cap.is_some() { + writeln!( + f, + "branch_read_cap: {}", + v0.branch_read_cap.as_ref().unwrap() + )?; + } + if v0.fork_of.is_some() { + writeln!(f, "fork_of: {}", v0.fork_of.as_ref().unwrap())?; + } + if v0.merged_in.is_some() { + writeln!(f, "merged_in: {}", v0.merged_in.as_ref().unwrap())?; + } + Ok(()) + } + } + } +} + +/// Add a branch to the repository +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum AddBranch { + V0(AddBranchV0), +} + +pub type RemoveBranchV0 = (); + +/// Remove a branch from the repository +/// +/// DEPS: should point to the previous AddBranch. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RemoveBranch { + V0(RemoveBranchV0), +} + +/// Add member to a repo +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct AddMemberV0 { + /// Member to add + pub member: UserId, + + /// App-specific metadata + /// (role, app level permissions, cryptographic material, etc) + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum AddMember { + V0(AddMemberV0), +} + +/// Remove member from a repo +/// +/// An owner cannot be removed (it cannot be added even) +/// The overlay should be refreshed if user was malicious, after the user is removed from last repo. See REFRESH_READ_CAP on store repo. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RemoveMemberV0 { + /// Member to remove + pub member: UserId, + + /// should this user be banned and prevented from being invited again by anybody else + pub banned: bool, + + /// Metadata + /// (reason, etc...) + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RemoveMember { + V0(RemoveMemberV0), +} + +/// when a signing capability is removed, a new SignerCap should be committed to User branch, with the removed key set to None +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct SignerCap { + pub repo: RepoId, + + /// latest RootBranch commit or Quorum commit that defines the signing epoch + pub epoch: ObjectId, + + pub owner: Option>, + + pub total_order: Option>, + + pub partial_order: Option>, +} + +impl SignerCap { + pub fn sign_with_owner(&self, content: &[u8]) -> Result { + if let Some(key_share) = &self.owner { + Ok(key_share.sign(content)) + } else { + Err(NgError::KeyShareNotFound) + } + } +} + +/// Permissions +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub enum PermissionV0 { + Create, // Used internally by the creator at creation time. Not part of the permission set that can be added and removed + Owner, // used internally for owners + + // + // permissions delegated by owners and admins (all admins inherit them) + // + AddReadMember, // adds a member to the repo (AddMember). without additional perm, the user is a reader + RemoveMember, // if user has any specific perm, RemoveWritePermission, RefreshWriteCap and/or Admin permission is needed. always behind SyncSignature + AddWritePermission, // can send AddPermission that add 3 perms to other user: WriteAsync, WriteSync, and RefreshWriteCap + WriteAsync, // can send AsyncTransaction, AddFile, RemoveFile, Snapshot, optionally with AsyncSignature + WriteSync, // can send SyncTransaction, AddFile, RemoveFile, always behind SyncSignature + Compact, // can send Compact, always behind SyncSignature + RemoveWritePermission, // can send RemovePermission that remove the WriteAsync, WriteSync or RefreshWriteCap permissions from user. RefreshWriteCap will probably be needed by the user who does the RemovePermission + + AddBranch, // can send AddBranch and Branch commits + RemoveBranch, // can send removeBranch, always behind SyncSignature + ChangeName, // can send AddName and RemoveName + + RefreshReadCap, // can send RootCapRefresh or BranchCapRefresh that do not contain a write_cap, followed by UpdateRootBranch and/or UpdateBranch commits, with or without renewed topicIds. Always behind SyncSignature + RefreshWriteCap, // can send RootCapRefresh that contains a write_cap and associated BranchCapRefreshes, followed by UpdateRootBranch and associated UpdateBranch commits on all branches, with renewed topicIds and RepoWriteCapSecret. Always behind SyncSignature + + // + // permissions delegated by owners: + // + ChangeQuorum, // can add and remove Signers, change the quorum thresholds for total order and partial order. implies the RefreshReadCap perm (without changing topicids). Always behind SyncSignature + Admin, // can administer the repo: assigns perms to other user with AddPermission and RemovePermission. RemovePermission always behind SyncSignature + ChangeMainBranch, + + // other permissions. TODO: specify them more in details + Chat, // can chat + Inbox, // can read inbox + PermaShare, // can create and answer to PermaCap (PermaLink) + UpdateStore, // only for store root repo (add repo, remove repo) to the store special branch + RefreshOverlay, // Equivalent to BranchCapRefresh for the overlay special branch. +} + +/// Add permission to a member in a repo +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct AddPermissionV0 { + /// Member receiving the permission + pub member: UserId, + + /// Permission given to user + pub permission: PermissionV0, + + /// Metadata + /// (role, app level permissions, cryptographic material, etc) + /// if the added permission is a write one, a crypto_box containing the RepoWriteCapSecret should be included here for the member that receives the perm. + /// + /// Can be some COMMON KEY privkey encrypted with the user pubkey + /// If a PROOF for the common key is needed, should be sent here too + /// COMMON KEYS are: SHARE, INBOX, + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum AddPermission { + V0(AddPermissionV0), +} + +impl AddPermission { + pub fn permission_v0(&self) -> &PermissionV0 { + match self { + Self::V0(v0) => &v0.permission, + } + } +} + +/// Remove permission from a user in a repo +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RemovePermissionV0 { + /// Member to remove + pub member: UserId, + + /// Permission removed from user + pub permission: PermissionV0, + + /// Metadata + /// (reason, new cryptographic materials...) + /// If the permission was linked to a COMMON KEY, a new privkey should be generated + /// and sent to all users that still have this permission, encrypted with their respective pubkey + /// If a PROOF for the common key is needed, should be sent here too + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RemovePermission { + V0(RemovePermissionV0), +} + +impl RemovePermission { + pub fn permission_v0(&self) -> &PermissionV0 { + match self { + Self::V0(v0) => &v0.permission, + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RepoNamedItemV0 { + Branch(BranchId), + Commit(ObjectRef), +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RepoNamedItem { + V0(RepoNamedItemV0), +} + +/// Add a new name in the repo that can point to a branch or a commit +/// +/// Or change the value of a name +/// DEPS: if it is a change of value: all the previous AddName commits seen for this name +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct AddNameV0 { + /// the name. in case of conflict, the smallest Id is taken. + pub name: String, + + /// A branch or commit + pub item: RepoNamedItem, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum AddName { + V0(AddNameV0), +} + +/// Remove a name from the repo, using ORset CRDT logic +/// +/// DEPS: all the AddName commits seen for this name +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RemoveNameV0 { + /// name to remove + pub name: String, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RemoveName { + V0(RemoveNameV0), +} + +// +// Commits on Store branch +// + +/// Adds a repo into the store branch. +/// +/// The repo's `store` field should match the destination store +/// DEPS to the previous AddRepo commit(s) if it is an update. in this case, repo_id of the referenced rootbranch definition(s) should match +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct AddRepoV0 { + pub read_cap: ReadCap, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum AddRepo { + V0(AddRepoV0), +} + +impl AddRepo { + pub fn read_cap(&self) -> &ReadCap { + match self { + Self::V0(v0) => &v0.read_cap, + } + } +} + +/// Removes a repo from the store branch. +/// +/// DEPS to the previous AddRepo commit(s) (ORset logic) with matching repo_id +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RemoveRepoV0 { + pub id: RepoId, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RemoveRepo { + V0(RemoveRepoV0), +} + +// TODO: publish (for public site only) + +// +// Commits on User branch +// + +/// Adds a link into the user branch, so that a user can share with all its device a new Link they received. +/// +/// The repo's `store` field should not match with any store of the user. Only external repos are accepted here. +/// DEPS to the previous AddLink commit(s) if it is an update. in this case, repo_id of the referenced rootbranch definition(s) should match +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct AddLinkV0 { + pub read_cap: ReadCap, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum AddLink { + V0(AddLinkV0), +} + +/// Removes a link from the `user` branch. +/// +/// DEPS to the previous AddLink commit(s) (ORset logic) with matching repo_id +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RemoveLinkV0 { + pub id: RepoId, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RemoveLink { + V0(RemoveLinkV0), +} + + +/// Adds an Inbox Capability (privkey) into the user branch, so that a user can share with all its device. +/// +/// DEPS to the previous AddInboxCap commit(s) if it is an update. in this case, repo_id should match +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct AddInboxCapV0 { + /// Repo the Inbox is opened for + pub repo_id: RepoId, + + /// Overlay of the repo + pub overlay: OverlayId, + + pub priv_key: PrivKey, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum AddInboxCap { + V0(AddInboxCapV0), +} + +impl AddInboxCap { + pub fn new_v0(repo_id: RepoId, overlay: OverlayId, priv_key: PrivKey) -> Self { + Self::V0(AddInboxCapV0{ + repo_id, + overlay, + priv_key, + metadata: vec![] + }) + } +} + +impl fmt::Display for AddInboxCap { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0")?; + writeln!(f, "repo: {:?}", v0.repo_id)?; + writeln!(f, "cap: {:?}", v0.priv_key)?; + Ok(()) + } + } + } +} + +/// Adds a SignerCap into the user branch, +/// +/// so that a user can share with all its device a new signing capability that was just created. +/// The cap's `epoch` field should be dereferenced and the user must be part of the quorum/owners. +/// DEPS to the previous AddSignerCap commit(s) if it is an update. in this case, repo_ids have to match, +/// and the referenced rootbranch definition(s) should have compatible causal past (the newer AddSignerCap must have a newer epoch compared to the one of the replaced cap ) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct AddSignerCapV0 { + pub cap: SignerCap, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum AddSignerCap { + V0(AddSignerCapV0), +} + +impl fmt::Display for AddSignerCap { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0")?; + writeln!(f, "cap: {:?}", v0.cap)?; + + Ok(()) + } + } + } +} + +/// Removes a SignerCap from the `user` branch. +/// +/// DEPS to the previous AddSignerCap commit(s) (ORset logic) with matching repo_id +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RemoveSignerCapV0 { + pub id: RepoId, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RemoveSignerCap { + V0(RemoveSignerCapV0), +} + +/// Adds a wallet operation so all the devices can sync their locally saved wallet on disk (at the next wallet opening) +/// +/// DEPS are the last HEAD of wallet updates. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct WalletUpdateV0 { + #[serde(with = "serde_bytes")] + pub op: Vec, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum WalletUpdate { + V0(WalletUpdateV0), +} + +/// Updates the ReadCap of the public, protected sites, Group and Dialog stores of the User +/// +/// DEPS to the previous ones. +/// this is used to speedup joining the overlay of such stores, for new devices on new brokers +/// so they don't have to read the whole pub/sub of the StoreRepo in order to get the last ReadCap +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct StoreUpdateV0 { + // id of the store. + pub store: StoreRepo, + + pub store_read_cap: ReadCap, + + pub overlay_branch_read_cap: ReadCap, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum StoreUpdate { + V0(StoreUpdateV0), +} + +impl fmt::Display for StoreUpdate { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0")?; + writeln!(f, "store: {}", v0.store)?; + writeln!(f, "store_read_cap: {}", v0.store_read_cap)?; + write!( + f, + "overlay_branch_read_cap: {}", + v0.overlay_branch_read_cap + )?; + Ok(()) + } + } + } +} + +// +// Commits on transaction branches +// + +/// Transaction with CRDT operations +// TODO: edeps: List<(repo_id,ObjectRef)> +// TODO: rcpts: List +pub type TransactionV0 = Vec; + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Transaction { + #[serde(with = "serde_bytes")] + V0(TransactionV0), +} + +impl Transaction { + pub fn body_type(&self) -> u8 { + match self { + Self::V0(v0) => v0[0], + } + } +} + +/// Add a new binary file in a branch +/// +/// FILES: the file ObjectRef +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct AddFileV0 { + /// an optional name. does not conflict (not unique across the branch nor repo) + pub name: Option, + + /// Metadata + #[serde(with = "serde_bytes")] + pub metadata: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum AddFile { + V0(AddFileV0), +} + +impl fmt::Display for AddFile { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0")?; + writeln!(f, "name: {:?}", v0.name) + } + } + } +} + +impl AddFile { + pub fn name(&self) -> &Option { + match self { + Self::V0(v0) => &v0.name, + } + } +} + +/// Remove a file from the branch, using ORset CRDT logic +/// +/// (removes the ref counting. not necessarily the file itself) +/// NFILES: the file ObjectRef +/// DEPS: all the visible AddFile commits in the branch (ORset) +pub type RemoveFileV0 = (); + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RemoveFile { + V0(RemoveFileV0), +} + +/// Snapshot of a Branch +/// +/// Contains a data structure +/// computed from the commits at the specified head. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct SnapshotV0 { + // Branch heads the snapshot was made from + pub heads: Vec, + + /// Reference to Object containing Snapshot data structure + pub content: ObjectRef, +} + +/// Snapshot of a Branch +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Snapshot { + V0(SnapshotV0), +} + +impl Snapshot { + pub fn snapshot_ref(&self) -> &ObjectRef { + match self { + Self::V0(v0) => &v0.content, + } + } +} + +impl fmt::Display for Snapshot { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0\r\nheads:")?; + for h in v0.heads.iter() { + writeln!(f, "{h}")?; + } + writeln!(f, "content: {}", v0.content)?; + Ok(()) + } + } + } +} + +/// Compact: Hard Snapshot of a Branch +/// +/// Contains a data structure +/// computed from the commits at the specified head. +/// ACKS contains the head the snapshot was made from +/// +/// hard snapshot will erase all the CommitBody of ancestors in the branch +/// the compact boolean should be set in the Header too. +/// after a hard snapshot, it is recommended to refresh the read capability (to empty the topics of the keys they still hold) +/// If a branch is based on a hard snapshot, it cannot be merged back into the branch where the hard snapshot was made. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CompactV0 { + // Branch heads the snapshot was made from, can be useful when shared outside and the commit_header_key is set to None. otherwise it is redundant to ACKS + pub heads: Vec, + + // optional serialization of a ReadBranchLink, if the snapshot is made from another repo. + #[serde(with = "serde_bytes")] + pub origin: Vec, + + /// Reference to Object containing Snapshot data structure + pub content: ObjectRef, +} + +/// Snapshot of a Branch +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Compact { + V0(CompactV0), +} + +// Async Threshold Signature of a commit (or commits) V0 based on the partial order quorum +// +// Can sign Transaction, AddFile, and Snapshot, after they have been committed to the DAG. +// DEPS: the signed commits +// #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +// pub struct AsyncSignatureV0 { +// /// An Object containing the Threshold signature +// pub signature: ObjectRef, +// } + +/// Async Threshold Signature of a commit based on the partial order quorum +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum AsyncSignature { + V0(ObjectRef), +} + +impl AsyncSignature { + pub fn verify_(&self) -> bool { + // check that the signature object referenced here, is of type threshold_sig Partial + unimplemented!(); + } + pub fn reference(&self) -> &ObjectRef { + match self { + Self::V0(v0) => v0, + } + } +} + +impl fmt::Display for AsyncSignature { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0\r\nsignature object ref: {}", v0)?; + Ok(()) + } + } + } +} + +/// Sync Threshold Signature of one or a chain of commits . V0 +/// +/// points to the new Signature Object +/// based on the total order quorum (or owners quorum) +/// mandatory for UpdateRootBranch, UpdateBranch, some AddBranch, RemoveBranch, RemoveMember, RemovePermission, Quorum, Compact, sync Transaction, RootCapRefresh, BranchCapRefresh +/// DEPS: the last signed commit in chain +/// ACKS: previous head before the chain of signed commit(s). should be identical to the HEADS (marked as DEPS) of first commit in chain +// #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +// pub struct SyncSignatureV0 { +// /// An Object containing the Threshold signature +// pub signature: ObjectRef, +// } +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum SyncSignature { + V0(ObjectRef), +} + +impl SyncSignature { + pub fn verify_quorum(&self) -> bool { + // check that the signature object referenced here, is of type threshold_sig Total or Owner + unimplemented!(); + } + pub fn reference(&self) -> &ObjectRef { + match self { + Self::V0(v0) => v0, + } + } +} + +impl fmt::Display for SyncSignature { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0")?; + writeln!(f, "{}", v0)?; + Ok(()) + } + } + } +} + +/// the second tuple member is only set when a write_cap refresh is performed, and for users that are Editor (any Member that also has at least one permission, plus all the Owners) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RefreshSecretV0(SymKey, Option); + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RefreshCapV0 { + /// an ordered list of user IDs, with their corresponding crypto_box of a RefreshSecretV0. + /// A hashed User ID for each Member (use author_digest()), Signer and Owner of the repo (except the one that is being excluded, if any) + /// the ordering is important as it allows receivers to perform a binary search on the array (searching for their own ID) + /// the refresh secret is used for encrypting the SyncSignature commit's key in the event sent in old topic (RefreshSecretV0.0) and for an optional write_cap refresh (RefreshSecretV0.1) + pub refresh_secret: Vec<(Digest, serde_bytes::ByteBuf)>, +} + +/// RefreshCap +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RefreshCap { + V0(RefreshCapV0), +} + +/// RootCapRefresh. renew the capabilities of the root branch, or all transactional branches and the root_branch. +/// +/// Each branch forms its separate chain for that purpose. +/// can refresh the topic ids, or not +/// ACKS: current HEADS in the branch at the moment of refresh. DEPS to the previous RootBranch commit that will be superseded. +/// the chain on the root_branch is : RootCapRefresh -> RemovePermission/RemoveMember -> UpdateRootBranch -> optional AddPermission(s) -> AddBranch x for each branch +/// and on each transactional branch: BranchCapRefresh -> UpdateBranch +/// always eventually followed at the end of each chain by a SyncSignature (each branch its own). +/// The key used in EventV0 to encrypt the key for that SyncSignature commit is the refresh_secret (RefreshSecretV0.0). +/// +/// On each new topic, the first commit (singleton) is a BranchCapRefreshed that contains internal references to the old branch (but no DEPS or ACKS). + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RootCapRefreshV0 { + // ObjectRef to the RefreshCap object + pub refresh_ref: ObjectRef, + + /// write cap encrypted with the refresh_secret RefreshSecretV0.1 + /// only allowed if the user has RefreshWriteCap permission + pub write_cap: Option, +} + +/// +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RootCapRefresh { + V0(RootCapRefreshV0), +} + +/// BranchCapRefresh renew the capabilities of one specific transactional branch +/// +/// ACKS: current HEADS in the branch at the moment of refresh. DEPS to the previous Branch commit that will be superseded. +/// the chain is, on the transactional branch: BranchCapRefresh -> UpdateBranch +/// if this is an isolated branch refresh (not part of a rootcaprefresh), then the root branch chain is : AddBranch (ACKS to HEADS, quorumtype:TotalOrder ) +/// always eventually followed at the end of each chain by a SyncSignature (each branch its own) +/// The key used in EventV0 to encrypt the key for that SyncSignature commit is the refresh_secret (RefreshSecretV0.0), but not on the root branch if it is an isolated branch refresh +/// +/// On the new topic, the first commit (singleton) is a BranchCapRefreshed that contains internal references to the old branch (but no DEPS or ACKS). + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct BranchCapRefreshV0 { + /// ObjectRef to the RefreshCap object (shared with a root branch and other transac branches, or specially crafted for this branch if it is an isolated branch refresh) + pub refresh_ref: ObjectRef, +} + +/// BranchCapRefresh +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum BranchCapRefresh { + V0(BranchCapRefreshV0), +} + +/// BranchCapRefreshed is a singleton in a new topic. it has no ACKS nor DEPS. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct BranchCapRefreshedV0 { + /// reference to the previous read_cap of the branch + pub continuation_of: ReadCap, + + /// reference to the SyncSignature commit that did the refresh + pub refresh: ObjectRef, + + /// reference to the UpdateBranch/UpdateRootBranch commit within the event of the SyncSignature + pub new_read_cap: ReadCap, +} + +/// BranchCapRefreshed +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum BranchCapRefreshed { + V0(BranchCapRefreshedV0), +} + +/// A Threshold Signature content +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct SignatureContentV0 { + /// list of all the "end of chain" commit for each branch when doing a SyncSignature, or a list of arbitrary commits to sign, for AsyncSignature. + pub commits: Vec, +} + +/// A Signature content +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum SignatureContent { + V0(SignatureContentV0), +} + +impl SignatureContent { + pub fn commits(&self) -> &[ObjectId] { + match self { + Self::V0(v0) => &v0.commits, + } + } +} + +impl fmt::Display for SignatureContent { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0 == Commits: {}", v0.commits.len())?; + let mut i = 0; + for block_id in &v0.commits { + writeln!(f, "========== {:03}: {}", i, block_id)?; + i += 1; + } + Ok(()) + } + } + } +} + +/// A Threshold Signature and the set used to generate it +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum ThresholdSignatureV0 { + PartialOrder(ng_threshold_crypto::Signature), + TotalOrder(ng_threshold_crypto::Signature), + Owners(ng_threshold_crypto::Signature), +} + +impl fmt::Display for ThresholdSignatureV0 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::PartialOrder(_) => { + writeln!(f, "PartialOrder") + } + Self::TotalOrder(_) => { + writeln!(f, "TotalOrder") + } + Self::Owners(_) => { + writeln!(f, "Owners") + } + } + } +} + +/// A Threshold Signature object (not a commit) containing all the information that the signers have prepared. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct SignatureV0 { + /// the content that is signed + pub content: SignatureContent, + + /// The threshold signature itself. can come from 3 different sets + pub threshold_sig: ThresholdSignatureV0, + + /// A reference to the Certificate that should be used to verify this signature. + pub certificate_ref: ObjectRef, +} + +impl SignatureV0 { + pub fn verify(&self, cert: &CertificateV0) -> Result<(), NgError> { + let ser = serde_bare::to_vec(&self.content).unwrap(); + match &self.threshold_sig { + ThresholdSignatureV0::Owners(sig) => { + if !cert.get_owners_pub_key().verify(sig, &ser) { + return Err(NgError::InvalidSignature); + } + return Ok(()); + } + _ => unimplemented!(), + } + } +} + +impl fmt::Display for Signature { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::V0(v0) => { + writeln!(f, "V0")?; + writeln!(f, "content: {}", v0.content)?; + writeln!(f, "threshold_sig: {}", v0.threshold_sig)?; + writeln!(f, "certificate_ref:{}", v0.certificate_ref)?; + Ok(()) + } + } + } +} + +impl Signature { + pub fn certificate_ref(&self) -> &ObjectRef { + match self { + Self::V0(v0) => &v0.certificate_ref, + } + } + pub fn signed_commits(&self) -> &[ObjectId] { + match self { + Self::V0(v0) => match &v0.content { + SignatureContent::V0(v0) => &v0.commits, + }, + } + } +} + +/// A Signature object (it is not a commit), referenced in AsyncSignature or SyncSignature +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Signature { + V0(SignatureV0), +} + +/// Enum for "orders" PKsets. +/// +/// Can be inherited from the store, in this case, it is an ObjectRef pointing to the latest Certificate of the store. +/// Or can be 2 PublicKey defined specially for this repo, +/// .0 one for the total_order (first one). +/// .1 the other for the partial_order (second one. a PublicKey. is optional, as some repos are forcefully totally ordered and do not have this set). +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum OrdersPublicKeySetsV0 { + Store(ObjectRef), + Repo( + ( + ng_threshold_crypto::PublicKey, + Option, + ), + ), + None, // the total_order quorum is not defined (yet, or anymore). there are no signers for the total_order, neither for the partial_order. The owners replace them. +} + +/// A Certificate content, that will be signed by the previous certificate signers. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CertificateContentV0 { + /// the previous certificate in the chain of trust. Can be another Certificate or the Repository commit's body when we are at the root of the chain of trust. + pub previous: ObjectRef, + + /// The Commit Id of the latest RootBranch definition (= the ReadCap ID) in order to keep in sync with the options for signing. + /// not used for verifying (this is why the secret is not present). + pub readcap_id: ObjectId, + + /// PublicKey used by the Owners. verifier uses this PK if the signature was issued by the Owners. + pub owners_pk_set: ng_threshold_crypto::PublicKey, + + /// two "orders" PublicKeys (total_order and partial_order). + pub orders_pk_sets: OrdersPublicKeySetsV0, +} + +/// A Signature of a Certificate, with an indication of which the threshold keyset or private key used to generate it +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum CertificateSignatureV0 { + /// the root CertificateContentV0 is signed with the PrivKey of the Repo + Repo(Sig), + /// Any other certificate in the chain of trust is signed by the total_order quorum of the previous certificate, hence establishing the chain of trust. + TotalOrder(ng_threshold_crypto::Signature), + /// if the previous cert's total order PKset has a threshold value of 0 or 1 (1 or 2 signers in the quorum), + /// then it is allowed that the next certificate (this one) will be signed by the owners PKset instead. + /// This is for a simple reason: if a user is removed from the list of signers in the total_order quorum, + /// then in those 2 cases, the excluded signer will probably not cooperate to their exclusion, and will not sign the new certificate. + /// to avoid deadlocks, we allow the owners to step in and sign the new cert instead. + /// The Owners are also used when there is no quorum/signer defined (OrdersPublicKeySetsV0::None). + Owners(ng_threshold_crypto::Signature), + /// in case the new certificate being signed is an update on the store certificate (OrdersPublicKeySetsV0::Store(ObjectRef) has changed from previous cert) + /// then the signature is in that new store certificate, and not here. nothing else should have changed in the CertificateContent, and the validity of the new store cert has to be checked + Store, +} + +/// A Certificate object (not a commit) containing all the information needed to verify a signature. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CertificateV0 { + /// content of the certificate, which is signed here below by the previous certificate signers. + pub content: CertificateContentV0, + + /// signature over the content. + pub sig: CertificateSignatureV0, +} + +impl CertificateV0 { + pub fn verify_with_repo_id(&self, repo_id: &RepoId) -> Result<(), NgError> { + let ser = serde_bare::to_vec(&self.content).unwrap(); + match self.sig { + CertificateSignatureV0::Repo(sig) => verify(&ser, sig, repo_id.clone()), + _ => Err(NgError::InvalidArgument), + } + } + pub fn get_owners_pub_key(&self) -> &ng_threshold_crypto::PublicKey { + &self.content.owners_pk_set + } +} + +/// A certificate object +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Certificate { + V0(CertificateV0), +} + +/// Commit body V0 +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum CommitBodyV0 { + // + // for root branch: + // + Repository(Repository), // singleton and should be first in root_branch + RootBranch(RootBranch), // singleton and should be second in root_branch + UpdateRootBranch(RootBranch), // total order enforced with total_order_quorum + RootCapRefresh(RootCapRefresh), // total order enforced with total_order_quorum + AddMember(AddMember), // total order enforced with total_order_quorum + RemoveMember(RemoveMember), // total order enforced with total_order_quorum + AddPermission(AddPermission), + RemovePermission(RemovePermission), + AddBranch(AddBranch), + RemoveBranch(RemoveBranch), + AddName(AddName), + RemoveName(RemoveName), + Delete(()), // signed with owners key. Deletes the repo + + // TODO? Quorum(Quorum), // changes the quorum without changing the RootBranch + + // + // For transactional branches: + // + Branch(Branch), // singleton and should be first in branch + BranchCapRefresh(BranchCapRefresh), // total order enforced with total_order_quorum + UpdateBranch(Branch), // total order enforced with total_order_quorum + Snapshot(Snapshot), // a soft snapshot + AsyncTransaction(Transaction), // partial_order + SyncTransaction(Transaction), // total_order + AddFile(AddFile), + RemoveFile(RemoveFile), + Compact(Compact), // a hard snapshot. total order enforced with total_order_quorum + //Merge(Merge), + //Revert(Revert), // only possible on partial order commit + AsyncSignature(AsyncSignature), + + // + // For both + // + CapRefreshed(BranchCapRefreshed), // singleton and should be first in renewed branch + SyncSignature(SyncSignature), + + // + // For store branch: + // + AddRepo(AddRepo), + RemoveRepo(RemoveRepo), + + // + // For user branch: + // + AddLink(AddLink), + RemoveLink(RemoveLink), + AddInboxCap(AddInboxCap), + AddSignerCap(AddSignerCap), + RemoveSignerCap(RemoveSignerCap), + WalletUpdate(WalletUpdate), + StoreUpdate(StoreUpdate), +} + +/// Commit body +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum CommitBody { + V0(CommitBodyV0), +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum QuorumType { + NoSigning, + PartialOrder, + TotalOrder, + Owners, + IamTheSignature, +} + +impl QuorumType { + pub fn final_consistency(&self) -> bool { + match self { + Self::TotalOrder => true, + _ => false, + } + } +} + +impl CommitBody { + pub fn get_type(&self) -> CommitType { + match self { + Self::V0(v0) => v0.get_type(), + } + } + pub fn get_signature_reference(&self) -> Option { + match self { + Self::V0(v0) => v0.get_signature_reference(), + } + } +} + +impl CommitBodyV0 { + pub fn get_type(&self) -> CommitType { + match self { + Self::Branch(_) => CommitType::Branch, + Self::BranchCapRefresh(_) => CommitType::BranchCapRefresh, + Self::UpdateBranch(_) => CommitType::UpdateBranch, + Self::Snapshot(_) => CommitType::Snapshot, + Self::AsyncTransaction(t) | Self::SyncTransaction(t) => match t.body_type() { + 0 => CommitType::TransactionGraph, + 1 => CommitType::TransactionDiscrete, + 2 => CommitType::TransactionBoth, + _ => panic!("invalid TransactionBody"), + }, + Self::AddFile(_) => CommitType::FileAdd, + Self::RemoveFile(_) => CommitType::FileRemove, + Self::Compact(_) => CommitType::Compact, + Self::AsyncSignature(_) => CommitType::AsyncSignature, + Self::CapRefreshed(_) => CommitType::CapRefreshed, + Self::SyncSignature(_) => CommitType::SyncSignature, + _ => CommitType::Other, + } + } + + pub fn get_signature_reference(&self) -> Option { + match self { + Self::AsyncSignature(s) => Some(s.reference().clone()), + Self::SyncSignature(s) => Some(s.reference().clone()), + _ => None, + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum CommitType { + TransactionGraph, + TransactionDiscrete, + TransactionBoth, + FileAdd, + FileRemove, + Snapshot, + Compact, + AsyncSignature, + SyncSignature, + Branch, + UpdateBranch, + BranchCapRefresh, + CapRefreshed, + Other, +} + +/// Content of a Commit +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CommitContentV0 { + /// Commit author (a hash of UserId) + /// BLAKE3 keyed hash over UserId + /// - key: BLAKE3 derive_key ("NextGraph UserId Hash Overlay Id for Commit BLAKE3 key", overlayId) + /// hash will be different than for ForwardedPeerAdvertV0 so that core brokers dealing with public sites wont be able to correlate commits and editing peers (via common author's hash). + /// only the brokers of the authors that pin a repo for outeroverlay exposure, will be able to correlate. + /// it also is a different hash than the InboxId, and the OuterOverlayId, which is good to prevent correlation when the RepoId is used as author (for Repository, RootBranch and Branch commits) + pub author: Digest, + + // Peer's sequence number + // pub seq: u64, + /// BranchId the commit belongs to (not a ref, as readers do not need to access the branch definition) + pub branch: BranchId, + + /// optional list of dependencies on some commits in the root branch that contain the write permission needed for this commit + pub perms: Vec, + + /// Keys to be able to open all the references (deps, acks, files, etc...) + pub header_keys: Option, + + /// This commit can only be accepted if signed by this quorum + pub quorum: QuorumType, + + pub timestamp: Timestamp, + + /// App-specific metadata (commit message?) + #[serde(with = "serde_bytes")] + pub metadata: Vec, + + /// reference to an Object with a CommitBody inside. + /// When the commit is reverted or erased (before compaction/snapshot), the CommitBody is deleted, creating a dangling reference + pub body: ObjectRef, +} + +/// Content of a Commit +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum CommitContent { + V0(CommitContentV0), +} + +impl CommitContent { + pub fn header_keys(&self) -> &Option { + match self { + CommitContent::V0(v0) => &v0.header_keys, + } + } + pub fn author(&self) -> &Digest { + match self { + CommitContent::V0(v0) => &v0.author, + } + } + pub fn timestamp(&self) -> Timestamp { + match self { + CommitContent::V0(v0) => v0.timestamp, + } + } + pub fn branch(&self) -> &BranchId { + match self { + CommitContent::V0(v0) => &v0.branch, + } + } + + pub fn final_consistency(&self) -> bool { + match self { + CommitContent::V0(v0) => v0.quorum.final_consistency(), + } + } + + pub fn author_digest(author: &UserId, overlay: OverlayId) -> Digest { + let author_id = serde_bare::to_vec(author).unwrap(); + let overlay_id = serde_bare::to_vec(&overlay).unwrap(); + let mut key: [u8; 32] = blake3::derive_key( + "NextGraph UserId Hash Overlay Id for Commit BLAKE3 key", + overlay_id.as_slice(), + ); + let key_hash = blake3::keyed_hash(&key, &author_id); + key.zeroize(); + Digest::from_slice(*key_hash.as_bytes()) + } +} + +/// Commit object +/// +/// Signed by member key authorized to publish this commit type +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct CommitV0 { + /// ID of containing Object + #[serde(skip)] + pub id: Option, + + /// Key of containing Object + #[serde(skip)] + pub key: Option, + + /// optional Commit Header + #[serde(skip)] + pub header: Option, + + /// optional Commit Body + #[serde(skip)] + pub body: OnceCell, + + /// optional List of blocks, including the header and body ones. First one is the ObjectId of commit. Vec is ready to be sent in Event + #[serde(skip)] + pub blocks: Vec, + + /// Commit content + pub content: CommitContent, + + /// Signature over the content (a CommitContent) by the author. an editor (UserId) + pub sig: Sig, +} + +/// Commit Object +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum Commit { + V0(CommitV0), +} + +/// File Object +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct SmallFileV0 { + pub content_type: String, + + #[serde(with = "serde_bytes")] + pub metadata: Vec, + + #[serde(with = "serde_bytes")] + pub content: Vec, +} + +/// A file stored in an Object +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum SmallFile { + V0(SmallFileV0), +} + +/// Random Access File Object +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct RandomAccessFileMetaV0 { + pub content_type: String, + + #[serde(with = "serde_bytes")] + pub metadata: Vec, + + pub total_size: u64, + + pub chunk_size: u32, + + pub arity: u16, + + pub depth: u8, +} + +/// A Random Access file stored in an Object +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum RandomAccessFileMeta { + V0(RandomAccessFileMetaV0), +} + +impl RandomAccessFileMeta { + pub fn arity(&self) -> u16 { + match self { + Self::V0(v0) => v0.arity, + } + } + + pub fn depth(&self) -> u8 { + match self { + Self::V0(v0) => v0.depth, + } + } + + pub fn set_depth(&mut self, depth: u8) { + match self { + Self::V0(v0) => { + v0.depth = depth; + } + } + } + + pub fn chunk_size(&self) -> u32 { + match self { + Self::V0(v0) => v0.chunk_size, + } + } + + pub fn total_size(&self) -> u64 { + match self { + Self::V0(v0) => v0.total_size, + } + } + + pub fn set_total_size(&mut self, size: u64) { + match self { + Self::V0(v0) => { + v0.total_size = size; + } + } + } + + pub fn metadata(&self) -> &Vec { + match self { + Self::V0(v0) => &v0.metadata, + } + } + + pub fn content_type(&self) -> &String { + match self { + Self::V0(v0) => &v0.content_type, + } + } +} + +/// Immutable data stored encrypted in a Merkle tree V0 +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum ObjectContentV0 { + Commit(Commit), + CommitBody(CommitBody), + CommitHeader(CommitHeader), + Quorum(Quorum), + Signature(Signature), + Certificate(Certificate), + SmallFile(SmallFile), + RandomAccessFileMeta(RandomAccessFileMeta), + RefreshCap(RefreshCap), + #[serde(with = "serde_bytes")] + Snapshot(Vec), // JSON serialization (UTF8) +} + +/// Immutable data stored encrypted in a Merkle tree +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +pub enum ObjectContent { + V0(ObjectContentV0), +} + +// +// COMMON TYPES FOR MESSAGES +// + +pub trait IObject { + fn block_ids(&self) -> Vec; + + fn id(&self) -> Option; + + fn key(&self) -> Option; +} + +pub type DirectPeerId = PubKey; + +pub type ForwardedPeerId = PubKey; + +/// Peer ID: public key of the node, or an encrypted version of it +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq)] +pub enum PeerId { + Direct(DirectPeerId), + Forwarded(ForwardedPeerId), + /// BLAKE3 keyed hash over ForwardedPeerId + /// - key: BLAKE3 derive_key ("NextGraph ForwardedPeerId Hash Overlay Id BLAKE3 key", overlayId) + ForwardedObfuscated(Digest), +} + +impl fmt::Display for PeerId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Direct(p) => { + write!(f, "Direct : {}", p) + } + Self::Forwarded(p) => { + write!(f, "Forwarded : {}", p) + } + Self::ForwardedObfuscated(p) => { + write!(f, "ForwardedObfuscated : {}", p) + } + } + } +} + +impl PeerId { + pub fn get_pub_key(&self) -> PubKey { + match self { + Self::Direct(pk) | Self::Forwarded(pk) => pk.clone(), + _ => panic!("cannot get a pubkey for ForwardedObfuscated"), + } + } +} + +/// Content of EventV0 +/// +/// Contains the objects of newly published Commit, its optional blocks, and optional FILES and their blocks. +/// If a block is not present in the Event, its ID should be present in file_ids and the block should be put on the emitting broker beforehand with BlocksPut. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct EventContentV0 { + /// Pub/sub topic + pub topic: TopicId, + + // TODO: could be obfuscated (or not, if we want to be able to recall events) + // on public repos, should be obfuscated + pub publisher: PeerId, + + /// Commit sequence number of publisher + pub seq: u64, + + /// Blocks with encrypted content. First in the list is always the commit block followed by its children, then its optional header and body blocks (and eventual children), + /// blocks of the FILES are optional (only sent here if user specifically want to push them to the pub/sub). + /// the first in the list MUST contain a commit_header_key + /// When saved locally (the broker keeps the associated event, until the topic is refreshed(the last heads retain their events) ), + /// so, this `blocks` list is emptied (as the blocked are saved in the overlay storage anyway) and their IDs are kept on the side. + /// then when the event needs to be send in reply to a *TopicSyncReq, the blocks list is regenerated from the IDs, + /// so that a valid EventContent can be sent (and so that its signature can be verified successfully) + pub blocks: Vec, + + /// Ids of additional Blocks (FILES or Objects) with encrypted content that are not to be pushed in the pub/sub + /// they will be retrieved later by interested users + pub file_ids: Vec, + + /// can be : + /// * Encrypted key for the Commit object (the first Block in blocks vec) + /// The ObjectKey is encrypted using ChaCha20: + /// - key: BLAKE3 derive_key ("NextGraph Event Commit ObjectKey ChaCha20 key", + /// RepoId + BranchId + branch_secret(ReadCapSecret of the branch) + publisher) + /// - nonce: commit_seq + /// * If it is a CertificateRefresh, both the blocks and file_ids vectors are empty. + /// the key here contains an encrypted ObjectRef to the new Certificate. + /// The whole ObjectRef is encrypted (including the ID) to avoid correlation of topics who will have the same Certificate ID (belong to the same repo) + /// Encrypted using ChaCha20, with : + /// - key: BLAKE3 derive_key ("NextGraph Event Certificate ObjectRef ChaCha20 key", + /// RepoId + BranchId + branch_secret(ReadCapSecret of the branch) + publisher) + /// it is the same key as above, because the commit_seq will be different (incremented anyway) + /// - nonce: commit_seq + #[serde(with = "serde_bytes")] + pub key: Vec, +} + +/// Pub/sub event published in a topic +/// +/// Forwarded along event routing table entries +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct EventV0 { + pub content: EventContentV0, + + /// Signature over content by topic key + pub topic_sig: Sig, + + /// Signature over content by publisher PeerID priv key + pub peer_sig: Sig, +} + +/// Pub/sub event published in a topic +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum Event { + V0(EventV0), +} diff --git a/ng-repo/src/utils.rs b/ng-repo/src/utils.rs new file mode 100644 index 0000000..5178c2f --- /dev/null +++ b/ng-repo/src/utils.rs @@ -0,0 +1,298 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use chacha20::cipher::{KeyIvInit, StreamCipher}; +use chacha20::ChaCha20; +use curve25519_dalek::edwards::{CompressedEdwardsY, EdwardsPoint}; +use ed25519_dalek::*; +use futures::channel::mpsc; +use rand::rngs::OsRng; +use rand::RngCore; +use time::{OffsetDateTime, UtcOffset}; +use web_time::{Duration, SystemTime, UNIX_EPOCH}; +use zeroize::Zeroize; + +use crate::errors::*; +#[allow(unused_imports)] +use crate::log::*; +use crate::types::*; + +pub fn derive_key(context: &str, key_material: &[u8]) -> [u8; 32] { + blake3::derive_key(context, key_material) +} + +pub fn ed_keypair_from_priv_bytes(secret_key: [u8; 32]) -> (PrivKey, PubKey) { + let sk = SecretKey::from_bytes(&secret_key).unwrap(); + let pk: PublicKey = (&sk).into(); + let pub_key = PubKey::Ed25519PubKey(pk.to_bytes()); + let priv_key = PrivKey::Ed25519PrivKey(secret_key); + (priv_key, pub_key) +} + +pub fn from_ed_privkey_to_dh_privkey(private: &PrivKey) -> PrivKey { + //SecretKey and ExpandedSecretKey are Zeroized at drop + if let PrivKey::Ed25519PrivKey(slice) = private { + let ed25519_priv = SecretKey::from_bytes(slice).unwrap(); + let exp: ExpandedSecretKey = (&ed25519_priv).into(); + let mut exp_bytes = exp.to_bytes(); + exp_bytes[32..].zeroize(); + let mut bits = *slice_as_array!(&exp_bytes[0..32], [u8; 32]).unwrap(); + bits[0] &= 248; + bits[31] &= 127; + bits[31] |= 64; + // PrivKey takes ownership and will zeroize on drop + PrivKey::X25519PrivKey(bits) + } else { + panic!("this is not an Edmonds privkey") + } +} + +/// don't forget to zeroize the string later on +pub fn decode_key(key_string: &str) -> Result { + let mut vec = base64_url::decode(key_string).map_err(|_| NgError::InvalidKey)?; + vec.reverse(); + Ok(serde_bare::from_slice(&vec).map_err(|_| NgError::InvalidKey)?) +} + +pub fn decode_priv_key(key_string: &str) -> Result { + let mut vec = base64_url::decode(key_string).map_err(|_| NgError::InvalidKey)?; + vec.reverse(); + Ok(serde_bare::from_slice(&vec).map_err(|_| NgError::InvalidKey)?) +} + +pub fn decode_sym_key(key_string: &str) -> Result { + let mut vec = base64_url::decode(key_string).map_err(|_| NgError::InvalidKey)?; + vec.reverse(); + Ok(serde_bare::from_slice(&vec).map_err(|_| NgError::InvalidKey)?) +} + +pub fn decode_digest(key_string: &str) -> Result { + let mut vec = base64_url::decode(key_string).map_err(|_| NgError::InvalidKey)?; + vec.reverse(); + Ok(serde_bare::from_slice(&vec).map_err(|_| NgError::InvalidKey)?) +} + +pub fn decode_overlayid(id_string: &str) -> Result { + let mut vec = base64_url::decode(id_string).map_err(|_| NgError::InvalidKey)?; + vec.reverse(); + Ok(serde_bare::from_slice(&vec).map_err(|_| NgError::InvalidKey)?) +} + +pub fn ed_privkey_to_ed_pubkey(privkey: &PrivKey) -> PubKey { + // SecretKey is zeroized on drop (3 lines below) se we are safe + let sk = SecretKey::from_bytes(privkey.slice()).unwrap(); + let pk: PublicKey = (&sk).into(); + PubKey::Ed25519PubKey(pk.to_bytes()) +} + +/// use with caution. it should be embedded in a zeroize struct in order to be safe +pub fn random_key() -> [u8; 32] { + let mut sk = [0u8; 32]; + let mut csprng = OsRng {}; + csprng.fill_bytes(&mut sk); + sk +} + +pub fn generate_null_ed_keypair() -> (PrivKey, PubKey) { + // we don't use zeroize because... well, it is already a zeroized privkey ;) + let master_key: [u8; 32] = [0; 32]; + let sk = SecretKey::from_bytes(&master_key).unwrap(); + let pk: PublicKey = (&sk).into(); + let priv_key = PrivKey::Ed25519PrivKey(sk.to_bytes()); + let pub_key = PubKey::Ed25519PubKey(pk.to_bytes()); + (priv_key, pub_key) +} + +pub fn dh_pubkey_from_ed_pubkey_slice(public: &[u8]) -> PubKey { + PubKey::X25519PubKey(dh_pubkey_array_from_ed_pubkey_slice(public)) +} + +pub fn dh_pubkey_array_from_ed_pubkey_slice(public: &[u8]) -> X25519PubKey { + let mut bits: [u8; 32] = [0u8; 32]; + bits.copy_from_slice(public); + let compressed = CompressedEdwardsY(bits); + let ed_point: EdwardsPoint = compressed.decompress().unwrap(); + //compressed.zeroize(); + let mon_point = ed_point.to_montgomery(); + //ed_point.zeroize(); + let array = mon_point.to_bytes(); + //mon_point.zeroize(); + array +} + +pub fn pubkey_privkey_to_keypair(pubkey: &PubKey, privkey: &PrivKey) -> Keypair { + match (privkey, pubkey) { + (PrivKey::Ed25519PrivKey(sk), PubKey::Ed25519PubKey(pk)) => { + let secret = SecretKey::from_bytes(sk).unwrap(); + let public = PublicKey::from_bytes(pk).unwrap(); + + Keypair { secret, public } + } + (_, _) => panic!("cannot sign with Montgomery keys"), + } +} + +pub fn keypair_from_ed(secret: SecretKey, public: PublicKey) -> (PrivKey, PubKey) { + let ed_priv_key = secret.to_bytes(); + let ed_pub_key = public.to_bytes(); + let pub_key = PubKey::Ed25519PubKey(ed_pub_key); + let priv_key = PrivKey::Ed25519PrivKey(ed_priv_key); + (priv_key, pub_key) +} + +pub fn sign( + author_privkey: &PrivKey, + author_pubkey: &PubKey, + content: &[u8], +) -> Result { + let keypair = pubkey_privkey_to_keypair(author_pubkey, author_privkey); + let sig_bytes = keypair.sign(content).to_bytes(); + // log_debug!( + // "XXXX SIGN {:?} {:?} {:?}", + // author_pubkey, + // content.as_slice(), + // sig_bytes + // ); + let mut it = sig_bytes.chunks_exact(32); + let mut ss: Ed25519Sig = [[0; 32], [0; 32]]; + ss[0].copy_from_slice(it.next().unwrap()); + ss[1].copy_from_slice(it.next().unwrap()); + Ok(Sig::Ed25519Sig(ss)) +} + +pub fn verify(content: &[u8], sig: Sig, pub_key: PubKey) -> Result<(), NgError> { + let pubkey = match pub_key { + PubKey::Ed25519PubKey(pk) => pk, + _ => panic!("cannot verify with Montgomery keys"), + }; + let pk = PublicKey::from_bytes(&pubkey)?; + let sig_bytes = match sig { + Sig::Ed25519Sig(ss) => [ss[0], ss[1]].concat(), + }; + let sig = ed25519_dalek::Signature::from_bytes(&sig_bytes)?; + Ok(pk.verify_strict(content, &sig)?) +} + +pub fn generate_keypair() -> (PrivKey, PubKey) { + let mut csprng = OsRng {}; + let keypair: Keypair = Keypair::generate(&mut csprng); + let ed_priv_key = keypair.secret.to_bytes(); + let ed_pub_key = keypair.public.to_bytes(); + let priv_key = PrivKey::Ed25519PrivKey(ed_priv_key); + let pub_key = PubKey::Ed25519PubKey(ed_pub_key); + (priv_key, pub_key) +} + +pub fn encrypt_in_place(plaintext: &mut Vec, key: [u8; 32], nonce: [u8; 12]) { + let mut cipher = ChaCha20::new(&key.into(), &nonce.into()); + let mut content_dec_slice = plaintext.as_mut_slice(); + cipher.apply_keystream(&mut content_dec_slice); +} + +/// returns the NextGraph Timestamp of now. +pub fn now_timestamp() -> Timestamp { + ((SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs() + - EPOCH_AS_UNIX_TIMESTAMP) + / 60) + .try_into() + .unwrap() +} + +pub fn now_precise_timestamp() -> (u64,u32) { + let dur = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap(); + (dur.as_secs(),dur.subsec_nanos()) +} + +/// returns a new NextGraph Timestamp equivalent to the duration after now. +pub fn timestamp_after(duration: Duration) -> Timestamp { + (((SystemTime::now().duration_since(UNIX_EPOCH).unwrap() + duration).as_secs() + - EPOCH_AS_UNIX_TIMESTAMP) + / 60) + .try_into() + .unwrap() +} + +/// displays the NextGraph Timestamp in UTC. +#[cfg(not(target_arch = "wasm32"))] +pub fn display_timestamp(ts: &Timestamp) -> String { + let dur = + Duration::from_secs(EPOCH_AS_UNIX_TIMESTAMP) + Duration::from_secs(*ts as u64 * 60u64); + + let dt: OffsetDateTime = OffsetDateTime::UNIX_EPOCH + dur; + + dt.format(&time::format_description::parse("[day]/[month]/[year] [hour]:[minute] UTC").unwrap()) + .unwrap() +} + +/// displays the NextGraph Timestamp in local time for the history (JS) +pub fn display_timestamp_local(ts: Timestamp) -> String { + let dur = Duration::from_secs(EPOCH_AS_UNIX_TIMESTAMP) + Duration::from_secs(ts as u64 * 60u64); + + let dt: OffsetDateTime = OffsetDateTime::UNIX_EPOCH + dur; + + let dt = dt.to_offset(TIMEZONE_OFFSET.clone()); + dt.format( + &time::format_description::parse("[day]/[month]/[year repr:last_two] [hour]:[minute]") + .unwrap(), + ) + .unwrap() +} + +use lazy_static::lazy_static; +lazy_static! { + static ref TIMEZONE_OFFSET: UtcOffset = unsafe { + time::util::local_offset::set_soundness(time::util::local_offset::Soundness::Unsound); + UtcOffset::current_local_offset().unwrap() + }; +} + +pub(crate) type Receiver = mpsc::UnboundedReceiver; + +#[cfg(test)] +mod test { + use crate::{ + log::*, + utils::{display_timestamp_local, now_timestamp}, + }; + + #[test] + pub fn test_time() { + let time = now_timestamp() + 120; // 2 hours later + log_info!("{}", display_timestamp_local(time)); + } + + #[test] + pub fn test_locales() { + let list = vec!["C", "c", "aa-bb-cc-dd", "aa-ff_bb.456d"]; + let res: Vec = list + .iter() + .filter_map(|lang| { + if *lang == "C" || *lang == "c" { + None + } else { + let mut split = lang.split('.'); + let code = split.next().unwrap(); + let code = code.replace("_", "-"); + let mut split = code.rsplitn(2, '-'); + let country = split.next().unwrap(); + Some(match split.next() { + Some(next) => format!("{}-{}", next, country.to_uppercase()), + None => country.to_string(), + }) + } + }) + .collect(); + log_debug!("{:?}", res); + } +} diff --git a/ng-repo/tests/test.jpg b/ng-repo/tests/test.jpg new file mode 100644 index 0000000000000000000000000000000000000000..345a6d2c1ed520e2986dd9b483cd7ab9de4dbc03 GIT binary patch literal 29454 zcmbTdWmFtn7cJUYNN{%zuE8BbAT;hS!Cli$@Zj$5?he77KyY_=cL*LVc?~(|d~dv8 z@7`w6qpND~x#pTn_AaXVGyi8DfF=c$1OlL-0RSk-58zJ+0R1&o6jT%p6gmJJ9SQ~= z>dz8D6!7L1{A>7EZ(hT{MnHIjh>U}ZjD&DJ3ljBPBH}4Jj#; zAQLMmHyTIf~;Narn6Hrmp(9+R!aB^|;@bZa%6a$J&NJ=TIsH&-JXlfZ5 zo0yuJTUgqEad32Uc5(If^A89N3J!^hjf+o6OiE74$<50zC@d;2sjjK5t8Zv*YW~sH z-P7CGKQK5sH9a#sH@~pBvAMOqv%9x{aCm-kd3Akr`|Ixh*)J#n%wM-4?|&Wp|M&$7 z2NX0cEDS9CvtLlqPLN+1bXYiY)>jxG6yXg(m~YvEeG3)&k3{$k}H+WyZ3z2R687@^0>vJ7NICZAn>AwvG2 z0Sm9c0IykvU(rH=Aa8&kh&jj+9hB0G2Y@V?zwi(t{=!3hKY#q|2t=x5=y8aGNr(~< z>GUC9{tx^`+gA_`0M9!2Kz+`F%lhz)@~={bn#2%+LteD~#}SAQBID4TUPv!EB9MqY z&v~)^JWKsW@@Ih{KrFcbiTW(eKi)My6NbP%N0Jsg-O%>0Dv;p_FL5>WdY<;oQe+AO z^b*f!)aR%}NI*=nzDNP#`5IyoE(`i^^j=6og#dIE$X>ANylORIA(vqb;P|@tA-2#I zo|X7-t`>kU2okpENI&!aM>PN*gr+}4V*msOf*lG_)PI)kg|(qKL=4tvabK?gb?7-5 ze?tQuZu|vujSLs71Cp%}7D*7Yko?h6{+lR>5c1Edtfi0jTIa=;=Oa3=XI{^^h#|0e z&q?tt%(H-qGS3>Z%0Tq0eRlP)Vi61!c`xbqpX_LjFF|;r^;+v0{vZ9up^Tj5p2O~?W%z>eBF?`>j({CK{cmnWA--6AHV)Ac3Q|vhGwXQ*4#ee` z%>5_U|6)VzK|^Bh^-SZr?*GOF_MfuA@nS!s$b%^MKVe>6BRBsK{Y7^Oy2MKcK=Sk@ zUJ&ljk$Lv}C7=I?{!em0_cw?^MG&gb4exJ%VFJ9giWgdts6gN#$qn&`?q5m& z@#1f|pQ-&@lRAu8-7oB(d(2CzJO@bMObuQ^`Pl+Q$Y*`XAcXXppr2*+hor4PgeD{> zpIg#j%^|*P3IAKQkUDJKMvz?5vC$BnP4Y*uAD@gXt57j=G7z%uU%MSKJBcp*7M*RC zr>6r&`Pn+OHs+hKCtt7gqT)oGY_3l!&6is0rg1xiG?9`T)<_pt>8w>Y;=WUf`cByC=g>f+SXomQo9V;^wF?|s zc*QU}mg413l{dJQnY|Uf{y5S@Ru#QHg}ziN%N$d9`F(V?EbvDuI|&vRZdK!l5lShy z`Nfi7>5A`uYXeIHd7@JzgyJWja-|cV+=%`F7V4VHr_g3w2C`eAUV8w zD#e$F(j_(h?GIPeY~lBI(T?LvHjw6LMWhv??qMHyb+L!8Iw+=xl37V(0nb4VkJD00 z>L8Z3v4w*znH-`rqA>oo_w?TkV0p<}NRmSqEwty7dG2n?&&PiY{iO%|uP~maC$ z$a!({$fzBU9{quvp)W_P^08?teLMUFEJ-R1z*^-hol3$%2y@t>w!xL1n&j1;mu;vx z=z@0d+fWt|%~Y09P*4*KI%E6<8%p%r@#Ldn8pHD|l;?5mVmY6H(F~Hz*sa6it$TC3n~^po zoyQA~i=zka$D{WBNGh362!U!0S>X1q?1bOy=3AJjYGj}}((w0IB%S?3s82a10v*3b zd3#7^5wq?n?-Q78-x;iyo9$xEBfZ|umqS(9iJ^?ybcom8T?l+n3rpY>I=N9Y+hcXY zJf;zsY_q3Xe@-orJrUSF7*eYnDEO6;fq%f#NY|Oq-1up_aL883WPmi@sgE^O_kxEi zf#Ydb-fBaemx^U@F0>CW7XWjOUq+RzvO;NHJ*dq!sPzZnzcG2DyUSnb01t)q^|!-J zK;K~5G?AQ)njQg*y%KK>Po*q>hO!#F6$PZ6|Jy1&e}u%jgBC64rE|XYp?{jx%L?^h zGkxyNF)O0OURTRd_oh{)>hXjVdE29r?s;6Mq_rO-PCe8+ zN}Dx^Js>_7QXuhfhKe^xvld&%gD=kLdC`>0k%A2T4q44veAnmTUPpEC5;>H?6>NYp7NdiWp zxza25yGk13WP06gZb54ty|J?~qwqZY1vql5<7L= z^?rDH^&x7s2z+7HH=S2EC2lctDfz8x=q2P%OM_5T=VdMq9J}MAHm3LTT;#BCa94-G z9cGz*DN8Xa>W3y{{7cz|c2v`0Wt-f{@m*x3Q%j3YCGs&$wRYIUC*b4BB#!TFR1HoG z>G^=)z$OFD!Age6T^o7}k%M%&uu`Xj{MBjYT0l8G{u6^_K4A-5sw;Md0|kbJ79x1< zGz?|6ZiFC0mNGmIL*4p&phn94xk@Y%i{5R(k#ZMYdsS9`+bk4i&E7qeOWbiv3qi!A!hwC~N z#F{%sTfEXMKtwb1QPqmgc)VgqZuOjh3WhUR5WCKqFQ-a0V>EXA`dE4*h1 zW&L0p*HMyx*Nt5_lMa7zm-~JZMj*E6W2zebp_6fKLw2*6IaheS`2)RuC_Xvv>;S>a}1*5%3G8!Cl>3PNr;|+2i z@dJX&-116s=N(afHA89)^YxNv9Tm>@eB~~w((xaN)gq$*0DcIi{=SV(u#;o39J#0s zTdxBO6%!1>+{wmK>bqVr#3E9cRM;Jf=2Xk2M;C7dEj_@^Wwvi!bI#4~cUpsDoK0^I zZDvPcqkB=LiJU4;@$MB1?oK0_3lPc*nwX4oC(I&7@ox*xR?!1UrOvZaoD0U( z(gbUnOxL}_cpaa>R@%SxOYK{T7QSptLMs`LcF=G|HTFu!04!+k@iqt4{B8ZDXboGd za4D%@9%Yt;=pXL~GC~VYayX5C-o(?NhP?JLNHYn%uHZF8 z4@* z&d>@*1JAme%@#3u%Vt%q^-zr|c@?Sp0W)<4e-Y`v~80O40U z?)e&+HxlDSZt$loOXD7auoTZ20^tRmD*4fqr*hR7I2~PizBcE(em$5kbj#9RT6R(B zq1>{J8N$aK1wEF!Iu9y^FI9!>U~5PX=nR?A_GlQytw)M}g6#RXEecJsx~a&R4-)5a z*zn0^_dnyI_sUD{c2D?gEbNb#ak@-8g*5Y@@bTh)sp!TbBEwsDha}8cpUqf~TPi5r zhu0R%Rv`PZ{N%d0Ip!8Pr5&|$$l&A<`3?Zhy7aR&9OIJ+O@n4x!V;7)o?PgzD-dms&JEGz&|$jmOjS16;YxM+FLEd(cI{#1L2K;Hrr zN*%P)Q6ZzFQ_NF7QY=YVFsxnQ@POE)%AdOt{9CmP!T!E)_#j>ExLsT}w+DaRh)QLX zRNnHcq;JKH(xilqljCV(F$9VXYN_1C*~~KJEPILbp+;Ub7$u+@{nDiUOs=2}JA1@k z#!3QjxQ8<2Z66`qhq3GOysOuzYSqqM!?s%}U%PsRcWVmrHk&>LQa7bffbYi~6HG3J z)qFkXIss-`TX(yyJJh9;%sU0i!5eMGPR>h;ULTQb5ae~8D>aKb%7(EkhT9yHuL)yp zw)6)Z)w+m0JZ3;kV4C)xNF)lvZ)(^&(3(a%puT)w-9?tbtS1kg2Yurol?fjA+t=5{ zHu)B8&HDkrOjj}pV=~{CMkh7yrrPDcUMgX~rbO2n+GV~#RbFk=d7J7}8_ABfo_88H z6w~spg!L%px~R}bbIXvj(l|^Ck>g5Q!j@oH+Q)IJrxm696w&u?j+wr+keDx>M@uU2 zdfyc%IPdLJf)7WXV~1m4%)HQ}{gYL9sjmI6^fAkHJkbp`%x+i8hL|EkBMg0RKmbhr z`*JKxIp$mXGIH95TrN^1ZJ?)%9VRw0i=6o0Cyefy-$rl&xsQ1Xt0bWh)ECQiijJM$ zi%_BPS-bh0)Q3R3gF_aQ&?d>e1V>f){`qtb>`OHTNs349k@zlc2f~C!seHR{0U|T<3jgus@fU z(lS~-uS+eR?M+7qMq9ZE!U#cXObaf`76FveGrbc0_P$e*TAO>fby~z<%l2z`=&T5;ltjX+ zZy!;m&KZj6t3iim3~Ne?jKOJSM^775bNMFtJS~XvtY%=5pD50}WJXsxh3#l+v=ZOi zj+wtsNBQ5W2Jk#Gw1+zp;7EVet8uj5t%}okKO6S+w|A4c1`_2OMX_%4Tx}~W1)6db zVqKd8B`!2Qz-+7;@e?AJXYmAzq0>FBIMFgWH!CJo9^%ej#O>EBQZ24M^K%JolTye1 zGqwEg8r}B}@o5%l`a9#)m%V_F-IfDWw)}I+G>30r@@BHyH@^L{T+WkPMu{y!R?@xTV>{p*8;6a%V_4xChji@|t19DR^^Dm2WJH>R z`xaiisz zB#$WZ{fAAm^ofcWd}WijY6T{>i)E#ZP6qo@iQLadco%Esy3LXV;i>oX{7X%p`5_tn8XkLKWQ5wzN{ncUv#Hj{v!-7oY9c+0cJJF*owR($Ncell z8-C{)-Y-uzqH%)adXwL}8;;i)ra%9xW2yU99+2`{5ua9mG*FV$s+kHL4pB6JJaS?C7^|F==H=dkoiW&0hcB8F;{D6?Cw&x?@zM*L5gGRh zUq#^)i$to{Jx&@KS|)4a(u|7pV_i{x_7o-~kGr=@xQH#H?#MAK)g@8JpX&ee>w~+h zswEG^)8`dHxMfjE4diCs+OnkLYLfF*q;OvJ2Eu?Z z3e?!Pco-}8f<2!cMgl8GaGmT66zcobt&{6Nq<6o|ceyrl>EDU1^+#v@ykJ7QQmikq}N$4D_`t#~0*!rApI7~h%wF%ZU z-0c@pCTj5R4dli;JjsaaLtnJ5{lr(XIm8mrT*z(j;2*%zoQW=RClE#I-X#s|l5a95 zR9bn0;Lw8O7iFv!zXT8za!16NX6r6hh^?5X>*Ho|^HEbXBr?wTW6^Y|z4;H>f@95) z1uPpCg>+}0Cwk@fZQdHL!&7rk7=(7L%y>sLim=O16WQ?MDP57R*PIoYIF;8G*x4Lw z?O}!hN5DFgC;f5ZyXACg%{Lagn#I})UjDjYv)1T@8Y_uSIb)pWF40ZUeZ|r6%`cZ1xCd*{gRjU9WaYV9cTm)YL>#=)jMrBPt)d(^EyH(zF zDZ)OVQ|+L);iicAls@7c({t{06Y05f>g<_pvl7-?ewjuQ)7>qG>nvLWszWGshN}2R znwndX-D3~{3&RP;`F@tga;Ns#P*1a!&pqK6B~ljcUGKyg58(1n&9=mWq$MG1S}sve zC1%B_31NJ1SI2?qVR|lR+mV{piDEX(>)_)D!(lD}vyM+Ew#{0&OSKUR44^ zYtb=gmUn9WFq@ku9RzjL+ied=4n6~7>b6|hEJH{uoKpNrKLQ?);}k`ua3_RrT`d1^MT z&or{pYi)wRt^|gggBRzBwZLt|W0=B*ZSl6J>@*4ZNp!tz?#N$$=q%>hk7&gHP>`Fd z(Uxwi-!?{juQNd(ET{nF7E8C}52_qrAzj7h7r9{7oK7<#M$a>xLdM|Vx%@q<@pU<1 zvjkf|p-2wHPV}8db(3_1%X?>e>&G8$I>hFKToRX{FN^V`R+@nWF|2JG@dG_>&YJdO z`_4)87j?x4F`dvNnmci@hfUk+-D9kNRD*Nt?$)1AKNh0}%E!>I@pcXfT#kGJ>RdE2 zf%AeGJ3{~{vt$?yK=NCS6xvUT6xpyS7EHZ2Zzaj-2h(7W#+9w(c)vCQ77M*Q7g4d9 z5~AggyO;adefXff`)0Fkm0bw}X4!4?jVPg^7`)X@jQCr^8>ZHrsebbA1Kn#Rks$)q zqIXLmEPMtpUuwz;UH9hahj_d-!xSlXvj@=k;dC>927*DqUCQa|Gp;U=h{lZhFxk+G;kG);T6i(gXe@eQs%nfMQ zTlYvojO3j$3X@W6zVmDe#c%gXHhwD(+$kso6N{xCqWXH3y<(kh`}hX{GQ~>h-Rboh zT(}x>89m@nn))>nPKHQve(=O#X?umze`*c%9aIp6b5{11?cR>PO^#+iolYfoywtrx zIgBxrQVXFBhtaeC8Xew25LN+4ku{qj(FWZ zMXheJ$bOQNdy^P=3OW8lsZd;?0cyg_@kEf z)kK5e$|QRFgNSpG9|Iyk#L)2wPV-;Q^Q8H!%k|1>znj za;sHSFx7reSS(W^+KX@{VOJlb6SBaU%+GlzyW3Y%|0zr^vId80q+&ET2G7NIdVt@UQAG zvC8S$#&#=Z#uB7OG?6V+DQCIBuPsFJmc6@?-qaWj!9?GZd9{D0N{WNMV>=+9adB7XA-lMuY1M zRdH!vP9WlC4C`%aD%u%s1fN(UlYp%0;=n{}vB!9z6*r-)Y48S(T%47(1(9s=sHC8x zl@iPYQ0V!F{H3=TYdLR_Ta3y>aXY^smk{k2&h{+M`n9YruB zx0f(IU&!U|RKbFuYKcZ)<0X#iYxq_Cu>!hr&*MJ;mjz@c?hW~6@#sg2J7pDIdK;3j zYpuhuU6x>&jLZ$Kjqg4YdpTnJ`p9J`o%H1J*AwFKfj@x0Vyu+1 zceK2!{1P;Ci*1b2_LK1s$(v8BdZI~a9o27eM^=t@xqr>oXB&lV#-J{n;a`7M96)O! zPk4zmx^05bi2{`7gj1yq_9j+IiWYGw;VtVb`=Ev^;k7PUNNK>6Ns5$S^@ zBhk&H1$gx4T?$uIBB*4EgDM8UTZSKuTIssTQILLWLLY6}$6CBte!VSETw4rHtH+3B z76Y0Kxn}uQD97dLva`toD&?fgIW{_UZ7;0gQJ+pc4v}R2m<8>QTJBQ&v$j}eH)Q%W z-C{bT?24HBkh}Xs^+&5KJnZ3I3-;2~kLG<~O>JXen0nU8*;EM~}_y4FUO-+RqT z9RmA5Fu~>69Ti3nnc?fSotQ!NeZ%VY7W=G)=%C%6piUUFPuT zoW6^_PqI$}s7%_!OtJesn@y!pss0T-Cp-<0jCZ3vY*2py5g}LS$O3RwyDnEbGp?2W zr`If=brgX0JI~ox*NHke7vf(|4~d5^@}bh=TelAVLRy!908o{-gPNZ*J*U_GXALu> z_bsLSOpdm*-^YI)C6(le)FnkU!#ua}kmd1yRPv;%Ebm{*e;>VJ_NK9G*X26-4?xBj zK9g1W@o2thIwY5Bd+gmCDY;rb(@4u2#(Uyy+q|l=$5-F^3iBN!tMo9h$j>qU0D1&n z0q@UG_pUgP6dlXqcf%MMreZGG+eziwk8sZWyS*_%ztq_Zv9%5yI4ahvYWUKHQENxW z&QqE*5ve2kC!I(a@~!{~7jc=@(Q!r(oBnG<4qWn{2cJHN`r~9Kw=o%pKY&u5Yn~<^ zODodsq=bSl3(srP8nU-3s>(`1IaXdcp721xu05|dH zo#Iat%otCh8EkvM7{!Rnrgz9gO`bniv8-feE1<+1!WZn{q9(oS6(;GbY%>cT#yTik z9P2On(nQKSVVN^>jc43Iiv+3$H2why)pg31ZiC!MOH@|Qm_~7X(NAZ+K#?oO8#PC| zzhQpSki7d9fK*iEOoM+{wc4UN9Fmxb5Zj%sB0!WW0t&I%r)jb!E>MtWW&4^}M5F+V z`wlyO`ArM0G_a$R;^5Asw%PYx?|G^WwY2_D8&HhvbxCX_2PsPp^amX?imDNDroqPg zML%vvJvXw@voKI}N zDQqY5W6i0`-~jZ`D8$kfB=*PV%b@uQ&0))IX`i(f+xG;Yq=*{kG+#6GJhG~hhi}s5 zfCyzGG`8Y%Kj`*om-1%Sd0xd!gP}ZEHZ+i2)wt=(xlvK1sk3es?P6~&xn8f4eo&+X zABXttuB3L!Wbg=Vy9Gn(HB#~@ucZ;qdQ7Tw?5N=YW@B)J-j)K9GAzvc1bImFncYrt zs!S&z6q}l3@Mf_-%nV6*MJmqmWMAAOOM}9dtY)Ll>K^zCCJ<}AFrUt}I4nCIgX)m2 zEoQp~>Z_OQwb*z2m*5Kl9<|`^2&ZrJ4o6<+HZ$dZzIMsBm1Hy-K#8q#&a8Y2TV!WZ z=W|VLdhqDSNe=BuH5ywt$5V@9h6OgJ{2_GbDv}EYBvUN?7B!3Dm80~=9wq&W_>xb$ zD|!iy!A8OM+orNT)0b<7mZKn<)zoj=avfDBGPa0e4)VI9F`e*23pjhbL~<eZVNYSP390E!*H5*JUeQ!KurQqA zYKLm}(jo_9KN1;u$~{Hg#kDh)Bvk+3?e2 z%#A*R(@DJ(#w|8RG^bL-Gn&bier!U77CR*2|arI!1=RWCbKO@|?r}tixTWv3E z!>W3;>UGnkEmIC?R~qx}X@4h0o2w*`q|_4otGk?u$=g9Wz`=(%G|^WT zl2SQ|9EO8M(;I`<%gc<_(r+{H2k`O6IBRohh}E*CrO77`4UV+{*m~Q58WfkkJN4w_ zf?OtikdG=><+3dBW_9yAg$}q1KfSK*ZZ`I65*uzX#!R?5JV%S#_AL?xyoJFWF@w1r zeVP&EW?CxlZZ$<`7srn8LMhd!;WOx$szSj*C-s*>oxx7*{BL&55@Wg@RYO9mk z%q%q0gk+^bpT4b8Q%DKM9=23dOwW&$x1Q2)wUSf3WJf!~P4OsY zYC`x)@AcN!YpcS%)GJ?+j>^<*^serm)~}q!&8@+V4_4%N9tNGHTARgQrN~Al9AO(( z>F%3|&KFf%qh@48pV#EI4!iX8*4i!j1{p*{=T0-+el#F`UGa^-0}UKoEoQ?>aYgiH zs>#_N53cO+M!Ly1nn?7K^fnt#X(iQ8C~0fh8pEY`m)Kt#L5HJp%!fM8-g*th!-V|- zAfik!Nu(>>4Pz9!(7d?^E)0+c4V?C4G_DTGuC#=0&Pbxn*RtMn2rk){^hV9^n2D zAe_Mm=$J?wvNdS-7NaHZeIc)2$9OU;v*lE6)GPlV)$9JF?7u;>aTEehQIt(|+&aEt zSL;)|<7(_@(V>h!A7V$vjU)&-9fqQv^h1I<%3F33oQXkwAk92pK}4Zfe!=g410$10 zU-#XX@>PCRrD~cHnF7XqG=_y`@)raO)dhYUIm_xtvZF?L`qg=VeI&b2k=0P&(BNOc z9Z&d;b?ivu&D$n9l*vB;7UX^%m}`y!Bij?EDO|d@w4#kUQPL=`gRxp@VInkcDr7ziPHBOszaW$WDHQlO z#5wz;YnsU7Raz}jOnB$zSe!R&<(0XKVV8HQFqUYIrj5}bOYWkH%96EBH*bzv$IC*0 zW8L`-56`51T0#^TSBoFT8$@R@>i2GUiW6k7FNtVcm{Y`$LNN+6w)g+mLEE*y!Spdim|C8%*l(Bi z*?u)bIx?-&%DF*Z35Y)}>Z6jjn_7@t)RvnNXkwVWiz$@v<_(l?Iu zTNRf$cwOXzsEFtmI?aJvuB&}iLB{a)sLD^!TphnghE<63R~E6o_~lnTg+uN=!yhl9&4}Y*(XiJgiM8-0M(yJF_|Ac@$-IXLhZPw(}7@y0$&S0Y}PCA(04A0eo zGcShZ-A%upk(UPfrlJLguK#iu<#(FZV`S@?gs_0c`5=q)ASr(po$XR0q(1;DrGgmP zB|6Jl;J{W4Tf0tf(2#Kioc>HbT`4>3C^sCwAdt7e2vo(*ytJr0WIfqkd7*D}D8Z>H zd(0JbZpt6YZlDP@1k)#YP;l?VL9#!Dztnk_Vy6_#akVswz>~zPD`dYkmC}=Id_~r1 z9Pu$ypNG=>@mz^1m{NksPq%T4;@}Tpu7E8I83xUlT<7v$p%!Wq=!B!Te(A$M6IARx z#|<-paRMA-A!T-_S4?%{`ws7tY0?5M^2w2wcg$$A>1atbCsG+akBJnsDzSgJ=vU19 zSc*unTxUR}#LqTxiv<-0Z_8lbl3i;o zStJVO+)pz^bV}`y6OI*ay_&5mE+^5;JBmz$Gqfx5j`Ka&tOgkyd>}min@66jGRvvv z=|)+aY}|k|PNomGF#P=#dMj88+?k@WeU`iF^dJsZN{ddC^47ah0v`A`t*m2{iPd2a z-=1<45@~`Rh#x2u&?MHmK0NUhDTbS{!eOnBAls3*aUlC|zQ_~@8e`gOzwJSgCO zxO#Pil1s34U4FtG<|Q*%PPErLXq3-sw?JiQTrx-U+D?G}qZ$kb@)|dB6aJCXLBD0^ z%><9qr^gPqk?4lV!VSruxUf4;8rm^F?DZ(Vt)&oNct^t(U5TF?nqBNMm^?fqR(NRJ z@>vU?mgH`g9)BC5#=CUDM@P#!8uuP#WLqJDwYqn(a>M)u*4326mnpQG@HA8wR^QUz0&eyr8 zpIz3Cr)=I(56XgGD@~r#1MX7aZ_HLXHZ{xjNCzPK^$i+lj-{w`O7&3=wP-i;>`~ky zEXuEZtPGB;%dTB4NL>iB6Q%}c&v5AxmP2wg>rNV#Bg zs85hT+_Kl7zLJ+xZy}EP$n6MuT9qKUWtiSbMB>aTTWuY5BHC8d5)hnAR1Zq2y`t`} zaw+g+eG|il|5gs9lF@)}p@vx0Y2HPMM>)OZV@>g{l-eEF9<#`Oc8y`r;KT_MW$d4LqZpE!ezRqJ;HhQgPhT= zDh%zjR0hvuiE-U{FtCeBHSd)?Bjgz!f~aI3yF%lNJyvKVoBp@6j5v;@CFX+My`nX| z%U4cB4p`F&yTsOpam{pOVMLf~kDEEOCc4|z(4b4GlaN|!`{*%--af|9d?BG6#t~B+ zkKXHP5^MrJq}gSbSKRqE{K-^H2zikEwW=}R$0Hbnb0R4cARsdi$~*RM(!<0RH^sf9 zNYuMisPHmNw6N8o9vDkO2>?NwN8UUoUGu9|u7%yRbu`e3l6gY1A^%JSJ{G%w=ox5g zUMpS+JC~Nepo! zGCKXJpjb;bnJ$!dZrG}2PriIvW*@M1)Y@=IKx5KYbijf5+v7J;z53Gj@+$N$!T?6zUi;rbOPivl@we&@PsnJSp zZwvMfh<>d60l0R&!drGF;Ap(Z-D)H?VHKxj3gBS`3YuH>>vXNBjEy03$1|%(XEv`` zXol?Uzl%Y1jDYQb6NaXW>(Sb>Ji_FukD%TB%_5N znVt32v{U=EMEZ8~7kOy#JNx19L7I!Xj#c>wXW)^9N{b>myiH*E zLcDtfJ)?HfgF`)GZOAM5adkr*qM_j$7wozYWy}-P{jTi7;Xp%fTpaXnGZTlX6Ii4) z)t)PAsh(EOmBPer2j#FEihT2E{^SA5^Q1}PmJdY5C;R5QhVO#8{LMSIeA?I#)Ml8(AqAD3@koW3oBxztZ!wq%y6&1B5XJ0|E-eJ+-N=grj+3tu4rOYC|9)DtT!G8;04Zc%m{NLt7$P*Jb3WvN0WIT9|UrN@J>r zNf~VnGlgaS=~Pt=ErC9@yRZ~g>pCM?VK%)yxwt2ax+|a%Eb+D9FY1+ezWvbJ{H)OF z&fT?Dg9X>W{B262#(jzIUp$;w3c6S8RHE(Tw;D-R#~;)6}A<+1vKf zD#dx-yNl%Rjt|aaUw~1k8rup|pngh+GqtQBu<5%XPM6gSmWclKcz(H8&U@IFZ_u_+ zjn+i>hASDXTcR5Ge4GmjgHxkVSRaQK172CCSt~g)Mi7ISrhXOkR25Xn+3K4Mho1Qc z(BN%mD>ca#8q!uUX7KuIX$`SPJ?4X@&G557lrs|zB8{-Q9|Vo7)3=*>1|_=8uMb3a+gn~G&=J1L z1x5?RJ{=R!ZwZ{MjO$4Fg*66ms=CE}*E6>HZ%d5_qa*6K&yETgJ2svkvmCJBxxprj_p)UNz0bt@1lK3z z4-V1dSe5Lg^vb>9uzwx?#YR#GE0_plFghG*I4n0Kk`M9J6N_&W8*~QGt%?q zYQ!m+vBizT!aRhMD)7aQNBt&tf4@7=pDqO%PDn)ul1US;^IaYE3&L5*g)HwCtG1!O zcM1?}&Qps`_E2iaK@!tv95FS_%O|j|XMciQ_F}NAknGm?9Ggmhx2N(XDJVi`! z%c@R>M*$F*N-Bo+`#WLstIwLm<%T9xrWs#);R=F67FiYNgQu-4Uwx~#R2g&9^mWi#qQ3AbbR$T!B$@+G>96sF+A!tCpU!u&vc z-Ck$YdLivs3}b>8t4AVH2l~d1+CBIJc4Q>ZhxE$wN@))7I&FwYnxcW)6j05i4p(_A zw{#TZg9+NavU!DL^g!pW`STt#2DXW0uB6~XTJO-P)G!n9AfzuNB()$WirCNs-#fHn z=i{@sf0l2^E^&lanJcs1Z7^T_L37u@I1!@(btGB1tR#8Gl3$xKlQ>3k8|cM{F2{V2 zUM4FmHE>E%U+Xim+07p;HpCA#HkGfnP){>I)awL*ElE|%#qZ(;bw4pivN8C8y*Q=(;qB6*ls3Og zdKsLk#q+>vdn?u`p*{+vuYF!0auWnQb&B0lipk@{*kL~;bwa*Gbh%i(IQy~gZJfd( zka)lPZu`q3iaT$KHm0e#V0|uS$b~rd4ggJF4*q(CpKLm1wM$VuC@+EA7#8dr>8e(x zm1_2RaDWghq6L(a9b|^q`$_~T2jxMh^X6pg1ZRKtI3F)VzD}ml=2iT_#^gua;Z|PC z1lS1qEyWW?bq2~r`D6p{MaRzWF zd0`Up`1oVHvNVd0xD4bq4~mVZF=AiTkn3{r#v0F7{iZPFb_&1yVAh`wz9CSK|Kbl^ zc3fB5{{CY~seb23)PT`ai&j|g#?>Q^WBe25N}r)B0c{)-HjM$XCqj{nFU8uMux}JR zhtdcwma>Bd1`WaLB%>}R)!0J`P9`?fXmXQs>#4qOrcHb8{^g<2x7IFD)u?2^4iD$s zvPC+i#uhLA{&SP>&3*3&v#GVsJL}F@(*5Q>w_+w4-#NWog)ad0(7YOcDv{aw1E{vK z?)3m zbDZf@n{Qp{bWs~458l8W$+E_M3jXX)KfoyVWg|ke4(Y zd#zD1n&NDZ_clJe8rs$)IQ{cZuwc|3O`76dEqs=KGV|>OxF1EU!aMezrQj(gE`}q% z<$7f?0*X7E+iz^LDu&H|`G&mYwXGyyVlq5*&*MoUxLU?jvc zs<1twe!nc1NdVu+-6+c?>32o}`A?UD@*62m7D4;gHWV7F$z6|)19;Ssf3s4vd%Z7O zvTs#Vk%8H1ya6$K-XdCCb7g61i177_x8WW6_fh(${AnrrMeN%Cj3H^S zRUhdRf92__qm)UefoLlVRdm%;ZSwhR~|?WV}Yn)?e$v2@;r$!@>%n zjuQJCD#{=4*fg+nBQQGNjdrZ3+4q|7?sJ6Nkm9M~5rg>@9r?+bv-HSFld32P&29Xn zQ%i=R#fU5iC+%Rp3d3AI>6yEa!$VC}>KY^-3O0ogVl!qiS_sC+gQS2^hNutlj|^|P z!j(Qsn=#*9>6XN=jAvB_K9?zx>mS}5jl=LdqnyO?aJu`RX>0E zoGwBkf&C++{Q;Q2CafZpQ*aiynXGf1x0rKbV~0@QYrQSgtDRD|0yb4mOG==l`SJUv zWwY8MIPX&1~p2RJxAXzDSm3;Bc0QiNcxd(+(# z^cGPfxpIn0@9Rx11X(ghAtp5Y$DtibtFc6HY$Pp%wUslETvD(U@W74^4InJrS~R10 zY>>ch=~CG2Y2N9rX&SITV?KW2-GB}%AvY#djfripV2a*2+1t!Ku~z*nM#QDcHe07j z{K%xjk3-E0%sGy)z}lqGB(X^B>(pnEKDC`lbw<>s(VCjPscAag!M0m_h9X1#yAYlCBc3OlJMMp$|ozA3!jB2#2H7WZ<`3B5!`4p?FH+aK1I(CpslQ4@jWENA5rpwC{_gr0_xTAFY(eWS|3 zBVcX7^~pSbmB&%pToZ{#CQm#M z#*S@lD=S>$JVWBx^y{fDpowK6cz-kQx45pVRNR%>%~rQ7vN?@=U;fjN%PAR+lwnWa zM(#oM_O6IV$2{tD9M;9aC60Z1iq_^tBm|S^IbPKI43H2`3V=PRuo}8moxmPz$YGCR zWnf0;!0H7wwYcn9wAZGzbVlDKYBsTEJoB96)~?{CW3TX^iR`Rx@Aoh>Z1ZAQ2V)+} zPp>u0iG!aljpG)Q$vsOlK_qp3>o;dqHi3B#bCN@+1%oF?4c+NdKR~1Y<=6ajPwwoQLmGbHu zMY|bSbdYY@{A z&Nl(ZY09KyqhO%e!1p$n1yq6%oQ!Z!(w>Z3q8V;k&PnJ+J6M)qu$c)l5HS35GAilH z&cY#oF5c^8?0!>~9k6P;jI$NRyE8eBw-bOt#TT$KBW<522h%*%Fu5`_6Vw(bA1``A za{igDeV9*f(4>zl81o>>LVJ&TnM0V;Gq#2$?Uc6CnOFCSPd=mTR^eo3$YI<;XXSe? zdQ{%aKsyf(-X@=9<&y$I^8>-hx4kB>xve=_4xd*?XOcKV?gSijpL}}I;y-9br_E;X ziPgaW0APWG>sdN|LNZLVEK*D4h=5cqTb}s!$NA!=MXTy*Z4J$0^<;~7oLS{V_gPLj z#zsFA)~X7`$(;4&_0y3h$W>F2GT@QNLFg!HAz!)Hcy3oV7O&;2<~95L;=KCOwAtNF zUMT4xSsk636>@Wh7!`w+^)*qj`#7Pxw~|QPId=h+h75NaeMDmGHU2PzeLG!S{Jm$J1sgtrZbpvr4?X+0H-SZbcN2tNAVWFI^ zbN3QlB#vFzu2qqU`qqRJTim~pT3ombfIjy%NKNQiitarxNJ-|1=1ChrFFcxSU5URg zQwp~>%-D^=4UNxn)0K8mPe46Es|@oD;&YS*PH-8lvo65SPV9E??r;c zRx-d62=ykNz}wMudFPrZbtw_~w>lCJ;ohaPlui9|+9{ISCc1fuJD$SXS_Cg zqzJLi5qory?k$`WSDq`65mM3SiM=>o*_v=@e`HDJTUk;5(CDypu#k4+`qw>5-hB-t zPQ-1YUaFumTf~sC!6~?w>)x?gsLJ-Z{gjL?dsM#Art)naHqR2Vd;-88-qq+(f}C#5 zDoRjxMHTT~#mYn_Rr8}#p}P_9?_IEpO2$r2jF-fhOpJ=l7$AZ`=9{>#%$e^VVyz)8 zae&$FO2q0$vjLovaB)D$Tkr1u1u+&;9z<0qZ~McFL%D7Urnew{swq%ps<|X7J@MX& zK@_E8NiX$Rvo_I962<&W7z#)ojb^Dv_b=KutYkl#oxd@1d)BrisPD7O<(#Mjm4rF#<#+Zxt+O@`?C~)~CcjO9Osg$JRc8_s6jegIF)xq-i0(axk{{ULV z*ylAuODU5jNX{3oXpx-G+rr__ooE9w{o0Sa(!9#Q6Fn+P@*}N#q_(tSCCbJEkj}sW zp8VEIPWDEQIyTII1+=F|f-vgvmS#xQa-(tnEc5va(yP5fD63Pse_5Y07~>)`2QMNM zx4&cQR~^CZayOTDvZS6x%8j}w=li-DoPA7s!MmcZ5iANB-WAl ziI#?3@|h&^+aY>x$6D1y&B&SyX(EsXkcB(;W%#Mf%WVX*ZBhZ`;ePSt^Zcry zXFbWTS;d0y!7U#p3EFE*g^jyn@aeHK43Y*qepQ^)ld(~DMp(F({&3GSGVUV){onAW zqN5!Jd$Z6qSuEZ%ZLSfKM(m~l80VV#{7hNs)0T&l_~ObNJ!HHQO0i5yVpAW!7@3G5fCm5`^z{p&(>gFM^8OSd-sOu3 zqI;BCztx1^-1YRUP-0CT7}O+J-2)Yc2g*lku0&rslP0{%CEBg#GjiGE6en$na%_SL ztuFG{N<7T<&jy{-+>%Wy?i)$wwz;!67P2rq0px8t;Qayo>z=Gz)jKm-J8pG0+B{nB zviDX}%@>vvU|BjJOrDqmxg|MMg_^n0qNgjKf#LayTko`kD<#t+2VwG(6r6jLoSO5i z-KfXN>4a6=G5#WWf=fFevD*MABoa?JTpn}lT43!~-kqnZJ=V;xZ%8(3eDI|!nW`blQAQjOPje?Gniu}mnQ&G+zC$Ix{s{x_@m_SJdD)%x+l@Q<32`mXvEh4rn^0X{15C3y3d5n!IXL9??L=a! z)oGPE%B3XGo(|Mwv(r`yT*T&2mO0MSGueH!nswmiJE39bqYE>4U!EWA+0T~q0;CmV zo;&BKYE<_{RQOrL_+9~hE#2(ba`}itf=EzQaoe{_?5WWll(oAnOB&u?CFr;fP!idYYjVn~2ga@=URuVT&FqHE_L-Z^ZXv=H}YoAPlz8sw>lP4gQ@wUv!sTTRmS+Xx=V zQjR$z8)Py+1pYWVu6k3e3AEyAH3_X5$Ll(BS=^Z8ayE{Pr=M|Mw5oGP=QQc4voyrV zaT|zUq;wUn$*~_GWA&yF;B{(q|fV)y>3_h@k>7%Q*Rd z=07avxoWG~Bccy`m90EYH~MAGoLk{(-P>>Ql5j9H>?=7?O{z9; zT(o0pJ(K~D!lKRY4cx3R8FRdWlic%F$^&kUa>k*4^7GF*tA_f5D>~pP>MKhHhJ-E7 z(o`O!wH6C9a>F}F2OR+v2%afnworKtdVqRT5Iy2b+Z!x99Q39Z8y&<^F~Q%S-6(pD zSgtq7%jGiz#s_LbEL>Z~l=)RN&?x4F*!CwITY;Q`jzu5>M&Fkkx^u+{7|AfKfm|0S zoN=0~h$R8#VIv%E&R33UCKe;W7LC0|RD7+}iY^9hQnOqZlQ_q2l|f>{x|Plt9>+Z> zxU4$VD}FYNHiN@Z zEyG7N(K9N;B&Be`{4rV6kH4|CNiI#!U&DH#j^5(cm`0vS3eO?I zKZkyujdaR7+~tIEU4K~25@TH6d7(ywr$gHJ$dGfq&t=6)ZvmQf<@dJ za9NHqOqV3gn~2Jo<%Z?tVm@rQDnaA$=}d-QOV2TYe4m%*a^#X~E{3FM-NC_l*p(-} zG|?q6zc)}ossUN>a_KVxVBy923X2QAnE6 z?ycolh_Ywp&O1_QX)~l6)yqS&_$2X+VI+(meeqekYoV>*QfnPiEbKgiYk=E>g>tId zIUkiosmZ(CYRMSyC=-_0LiH*SC-JJ~G1->lo+;)gXb9eb2HnDupZ>K>v|1H=l^#92 z#A&qWILBI)?m-NUi`GA>&00)DP_%%QC_Mo*>^ly(jkdo9PfQc*Rk2afxY}gTYS=M_ zRNa#6$LpG{bjw-Yj$_)&w2J(JW$of@SWQpd-ZjMejQd@&ANfdrtx${IrCZ`%Nhh%8URVx?^*F1{O-tbG{(8kg3qmIsKr%*xv09L^PDt@M%*0)4L zNhfnxN!90u)9fp6ZzNeiGm<-)C(Er$4$Ri0ZR1g@O7-BHu%eJZIXd!VU1 zs~8q&&m4}t?FS?KqaABzWQ+P7q8VpEft0TdPeVzol?F|7BD>>mbJ$Rj*;7%Cmf1_j z*yxp+Q;knT+zGW_NKY;iy_Q)G^ zDYiSBa2L9t&yX0}Hu*5WcLUco=cc2iPL-~v#;0p%Hk&M^x6kEdV~%+1_*YzS%c$jR z=y4Z!vX~m(i3&&r9(r`COIw*aJ&KSL8*23@y;C5zh0}m?GoHO_UZ6Rf%w$K__RskB@XeADD+Z>u23z7(CSolISkF5y+@3TqdjmeUEp|M<&G?f?zDxnjAY-)& z0e0F6gek(Th`!AfDU?&w6%49Vjul^ERl-IP02j!Vw&hS+UPGFF;~Q_+5jIt&ly9Bm=y} z3|SioJq0pB3rjyO#A7)@ClmXI8RjJ%9f5aw7)uEc_tnr*`nWs@YF zU{xeTV_pc=vO}TmimL_LGEWIX8mn>MizlHvj4aX;io^yT>KqYQ5COjuNygrTy)=U3 zVUaDS+^4AgsfF1%xLI-XWwX|rvIf|oyq_}LoGOwDG8PYe+@f@h@baoZomr{pkn#1pDIQUdhtbsO%jQs$-)&soglE`9Fe}{!TES1m<#rS9(Nz!4_c%qD7k3? zJFrNlC3FNtF*rFb+ZC9`n|$YU74#mpRv62ChG5G09Wp6(6CRKT#?)msw>l?Le7`PG zT%Jw^Or%<|ZJJ32RZay;LnCnDb;b@l(R3hKncoc{BOOP2tAQ)*i}!51m)@0tBAs`U zyY!|_h^|Wz8XS|U@ zWa2X}`CKdm48^VkjZl<=RcVx&cFBVsk1X?%WC~~)jTOFkFXTOFOd1vv0VgLo>xvo@B}o<6kPp2U7}kk*Zq9l2 zqm)Auc4;tp&lv)hfSNp_FbqjO*>j2#U__!vL%qWK`id=K+(08z0dPpc!5vLDt``zX z2q9H?Juy^}Twt9^DnKXH&;u3_@#pTIp{N8_joW|B#16pIC6Q77Riu zHy0q|?&77eLQiwGKnOPzjEaUk5?x9SuH{@4?VdYQ8)R-$NYQXN9y)Y1Yz$;FBx=E* za}Gxo2#Kc2IV@xi ztOk8WFeO#_F&V)g^k@PVC4mp~W1R3sC#nE)p>kM}-v*U};vi6WafaflA(E&^J9d(N zdQxZsBaSEBqZsv~!X9n`$lKek9H?ll&x6-F^`~KtG6K6m$?44@42()p@P1z6m=DB} zCdmQ$s1?MCys~#>(qKr__Q>b3ph6_rjDyK1wJ;D|;r6p*=}ZJ^a&igHD+PrY5~}^? z$GsM?Sad99ErKw5lj}f;{N-8l>ykLd08Y2K=Vsf}C)Cu1$z2`9eKVf8=qR{c ztvNBqTWcz?;FFy5{{Ysba1_b=knfPK%IB%Y3q;4p7pivWF(Gd zg(Hiuar1FN#wtjxs^o^<)OV^8o1C68K}7MaNOwCDJ99`yxOu~NJo8Kkw193KKME`W zo>!JOuOq%_U{al#q$r0V9t{I6#Un(!0X+2w)}4Tc;^eawP&enVYCt|5zYUVdjwlfJ zh(-W0pKf|n0<^|ZOMTQGDOdq+7jPhC;~bu}SP7(%6aiEb{nJ3f8*j(}AoGLToxprD z70&WLqNRX&`F?rD01`|J@-dEvgat1=h9L9JP|3F=Fc<)ukS2&ZQyMl~sRDvnra8mL zT!1s4wCpVi??wo9=O26Cn9*E0I8ZWAN<$<_oE$Lqt1)a?zp+KreFuCA+ z)P}%rNJIpN9oTlDRx8VL(@D9(5hp>81v`v$kF-a*LXHB0cmj|a?Uaxx=Z>{S61PKd zOwa|9X%=@OJF~~VAzYp)Vc1UYX^4!%vn=hPq z#RDG$4l(JRnm|eBeBEdfBr>SvbH@UJu-0_|$f`H=$o_P~#cz{)Zv*D^Gywz>68lKo zoQ(QX9gPUSSIhKK*WQz0OlKZe^(3+CPwP@3X%I)W5Jpb}wE#wEQ3ljev~>5T78|=N ztgLd!arEY$^c9BO7A_8c=-_(P?leO$5N9~YL+e67F^Q5j3U(ptJ!-9C*kgtPcM?Zj zQD6eazWKq&7^h$t=VBvFpXW&cNEDzPh6lAf0JvW#J5MLA05pB&>-cx3GI>>ZY*B&_ z8T6?{m6w)bRE9bo*usO;C+SImNc&TqhWf#N(cn)r8wDOh5yUX)r3tNM5-5QW+Q|p7g*{NIgEZ3~>dQ zK9m5v%3~aldTEl;B1~W9W5qj)))cnR2+5!aB!m)Aev|-t#JC@=A&I~D;1kDskSA+` z4itr6w7`mMlGt_N_ofyT&SnH~IiLyJQc$Wf+y_HI5<=^>e8ZMJ&C493aWb=|gEDise!+F_BcAz=?<`0}<~`2*53kqa^ec0V1*8=)iND zhP47Rp@{Y+W=ETAHqZeug%<= zKn$gr7TOmaf!dIZNi=b}F~$l3#~jkIAqxWk04kG=AEg6hakybHFmg%ieW~mW$LAEr zO#>Y5{Amf2G5#Fq+|m&Yj&Zb#p^@a~fF%(*T!E3-G_C@NP&S%i3>feiAoMt-A*h6~ z1QAZ)PRa`?IU}+4rk%+jEDFtt!vl_<^*4P$V%o}}yY}o4YA#jCSr7#_7lBHe@3XSdRT4eD|m zB=)8>I#~*k0KmZQNMNtOCvg}Q005h_l2;rG1#!Vy<~eh;_ssxUSOD5dEDvU&R~`Bial-9sY3YL#K!9BpChx~7T-B&{%>kYvUMCJ>K6;??N4Eid2n`-^&nAT zum)1=k<+D8*a`f^LO9NQ(QBX^GqXl;GmZ@_bqfp$5l{f}WFiLhH?xq8c=a^Ghzoh|kX}@!g`E9G7J08@mE0ONro3=Ld zouq#{k&!w&KblDg8CqZ}I0)Gzp&)hbK!^l{xFleYPSnO(B^d&MGBfj2wt~45GorTA zIvf$-ik5(A51*ZJk%5W^I1-T^ry%4GDTr9ne5{aqm0wCX0yvzz?PG;4)Y1ZIeAx>G zao>*g0Oo`g3P8y{M>(daa2X35dA?-MI~u2`!r*TO!(g7Pnm|6D{qyjEhL6f=jz#ZrjW?0%X3&t_}QUVbqVm490=m&E{ z83s<)BRf>p2xLm0PSeTjpGu4BCiM}Yem5y9IHO<`vZ%_g-1~Q?3gp3+SxIc1Wa6Y& zxhRaeBrradu1&~UTsb5TDaOTIk8wS+GuE$^W8J_QAOlmVtT>if!xk7nr6CbJ4E(&G zT4{RSP2coG5wL6$17LH{TA)Jk&I`F!1F+605cg_zy$U6#Rx+l7@9DC>yQ^1 z?t9Z)j_i^J5d4yW0UNmQNsDtXL%iTF4roflEGn+gEOKgZ)Soa!9T*Ne(Ax$%V@wmD z!kx!<1cZ?M$E7XMuXH>(1N+8=wGz~edayqy*A(?b7z9kkvG`E#JweAg+Hhz>xP>X%HHAtdgHCawTC#OnHbqf-x{m>cBLwbpXw&}1QNyk3a zl$fqiAQ##X=UNs>%_zVbz~FH}6M}wKkuq_SPU18)`9Y3XE1468Q;&aI0F@;s@&l<^p#wB({EK)KC;k#%Z zK8G|78TYm^TcbQX*_J@3b7OCObmzYy_Mm2&hTOFF=&K}_>ZFKQ4UjOyJ@Uujr2s2f zK(ey#kiX2uc*qI{M^3rOpjI6on{Ja8+anNDa=FIP*w1cxJt=_On;S%FZXKOex%-2R z5xXM*@zbt5(t3d$mN7zJLS=FkGRB{Ikl-)#Ka~Mt(`k|JWiKXMC0yfb;kK@Qd8PzU zV7^;P0G(uA!BvMYLnsFr;BtQ|ObM=Z=)_37UP7|%J1`iMNaW+7+J8C#&eUw;hB)3W z@&e2YF~&&t$EG>W0A~`TIabGN5D6PP9l)HaroehJ-LNiL^fas@IUE-xpup`+Vyw9x z!6Ouy0g;>J-NEfnz%f%j&mD0{2NH3D0A8Yi9wuQKKmZJLpYWz3rdQ>8E1vX_Sb0An zSqL7OpkpMEE<%mR*EG-*WSIMBwt9*{C(OKnN$5>9#5@6j0|UV%fkl8~znI%I&}0+p zD6#@j>gqR=9Ag8vedx7-l)Q>bVYr1H)QDrpu|X)Hbga`049Gm31KOUbOgH54N3}E? z5g9Nrcoj`{7>y$5)7Fwgu^5m5PH<|F> zgj29`PhJIAEr7Dh91oVF{e%Z@(4jiQs7m6s!PeQIV6E#Je$*t#Vynq2vbs4!dEnx$F`_lPmN#cv7&-nd z@@bC4p8DZ}D>=dp0iHVN^X7qx9n^~3gEIP%XbTK^9F-xMs5}CA6alAa8zMBUXHPVR?xDE)8U`ZDaLyTngMdNp zMeGCQx|FIUu6(_q0qKeu3-+^|?rff?Cph$^B0sb--gh`B0O0nfF`=Y(Ro>0V1d@26 z0O^A51RVQOa2fa80Z$_sqQEL65C%#1qhPS|8C8oSg(I)6E8GZ*x0FjS%1FVc7aHsW zl2-(Q*R?c&Y7vf33w7pz9de4hL}Z>hG+YRj{NOik)G!_Sbrh@wu5rqoudN6Mg&WHp zfJX*^7bkv4w_fxB=VrljOJ|{`AW2!v7Gg NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## Support + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +## How to use the library + +NextGraph is not ready yet. You can subscribe to [our newsletter](https://list.nextgraph.org/subscription/form) to get updates, and support us with a [donation](https://nextgraph.org/donate/). + +This library is used internally by [ngcli](../ngcli/README.md), [ng-app](../ng-app/README.md) and by [nextgraph, the Rust client library](../nextgraph/README.md) which you should be using instead. It is not meant to be used by other programs as-is. + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +### Contributions license + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as below, without any +additional terms or conditions. + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. + + +[rustc-image]: https://img.shields.io/badge/rustc-1.81+-blue.svg +[license-image]: https://img.shields.io/badge/license-Apache2.0-blue.svg +[license-link]: https://git.nextgraph.org/NextGraph/nextgraph-rs/raw/branch/master/LICENSE-APACHE2 +[license-image2]: https://img.shields.io/badge/license-MIT-blue.svg +[license-link2]: https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/LICENSE-MIT diff --git a/ng-storage-rocksdb/build.rs b/ng-storage-rocksdb/build.rs new file mode 100644 index 0000000..bd0dfc1 --- /dev/null +++ b/ng-storage-rocksdb/build.rs @@ -0,0 +1,5 @@ +fn main() { + if std::env::var("DOCS_RS").is_ok() { + println!("cargo:rustc-cfg=docsrs"); + } +} diff --git a/ng-storage-rocksdb/src/block_storage.rs b/ng-storage-rocksdb/src/block_storage.rs new file mode 100644 index 0000000..632563b --- /dev/null +++ b/ng-storage-rocksdb/src/block_storage.rs @@ -0,0 +1,167 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use std::path::Path; +use std::thread::available_parallelism; + +#[allow(unused_imports)] +use ng_rocksdb::{ + BlockBasedOptions, ColumnFamily, ColumnFamilyDescriptor, DBCompressionType, Direction, Env, + ErrorKind, IteratorMode, Options, TransactionDB, TransactionDBOptions, +}; + +use ng_repo::block_storage::BlockStorage; +use ng_repo::errors::StorageError; +use ng_repo::log::*; +use ng_repo::types::*; + +#[allow(dead_code)] +pub struct RocksDbBlockStorage { + /// the main store where all the properties of keys are stored + db: TransactionDB, + /// path for the storage backend data + path: String, +} + +impl RocksDbBlockStorage { + /// Opens the store and returns a KCVStorage object that should be kept and used to manipulate the properties + /// The key is the encryption key for the data at rest. + pub fn open<'a>(path: &Path, key: [u8; 32]) -> Result { + let mut opts = Options::default(); + let default_parallelism_approx = available_parallelism() + .unwrap_or(std::num::NonZeroUsize::new(1).unwrap()) + .get(); + //opts.set_use_fsync(true); + opts.set_max_background_jobs(default_parallelism_approx as i32); + opts.increase_parallelism(default_parallelism_approx as i32); + + // the default WAL size is CF_nbr * write_buffer_size * max_write_buffer_number * 4 + opts.set_max_total_wal_size(256 * 1024 * 1024); + opts.set_write_buffer_size(64 * 1024 * 1024); // which is the default. might have to reduce this on smartphones. + opts.set_target_file_size_base(1024 * 1024); + opts.set_max_write_buffer_number(2); // the default + opts.set_level_zero_file_num_compaction_trigger(4); // the default + opts.set_max_bytes_for_level_base(16 * 1024 * 1024); + opts.set_target_file_size_multiplier(10); + opts.set_level_compaction_dynamic_level_bytes(true); + opts.set_num_levels(7); // the default + + opts.create_if_missing(true); + opts.create_missing_column_families(false); + opts.set_enable_blob_files(true); + // all values are going to BlobStore + opts.set_min_blob_size(0); + // set a low value (16M) for file_size to reduce space amplification + opts.set_blob_file_size(16 * 1024 * 1024); + // no need for compression, as the data is encrypted (it won't compress) + opts.set_blob_compression_type(DBCompressionType::None); + opts.set_enable_blob_gc(true); + // the oldest half of blob files will be selected for GC + opts.set_blob_gc_age_cutoff(0.75); + // in those oldest blob files, if 50% of it (8MB) is garbage, a forced compact will occur. + // this way we are reducing the space amplification by small decrements of 8MB + opts.set_blob_gc_force_threshold(0.5); + + let mut block_based_opts = BlockBasedOptions::default(); + // we will have a cache of decrypted objects, so there is no point in caching also the encrypted blocks. + block_based_opts.disable_cache(); + block_based_opts.set_block_size(16 * 1024); + block_based_opts.set_bloom_filter(10.0, false); + block_based_opts.set_format_version(6); + opts.set_block_based_table_factory(&block_based_opts); + + let env = Env::enc_env(key).unwrap(); + opts.set_env(&env); + let tx_options = TransactionDBOptions::new(); + let db: TransactionDB = TransactionDB::open(&opts, &tx_options, &path).map_err(|e| { + log_err!("{e}"); + if e.into_string().starts_with("IO error: While lock file") { + StorageError::ServerAlreadyRunningInOtherProcess + } else { + StorageError::BackendError + } + })?; + + log_info!( + "created blockstorage with Rocksdb Version: {}", + Env::version() + ); + + Ok(RocksDbBlockStorage { + db: db, + path: path.to_str().unwrap().to_string(), + }) + } + + fn compute_key(overlay: &OverlayId, id: &BlockId) -> Vec { + let mut key: Vec = Vec::with_capacity(34 + 33); + key.append(&mut serde_bare::to_vec(overlay).unwrap()); + key.append(&mut serde_bare::to_vec(id).unwrap()); + key + } +} + +impl BlockStorage for RocksDbBlockStorage { + /// Load a block from the storage. + fn get(&self, overlay: &OverlayId, id: &BlockId) -> Result { + let block_ser = self + .db + .get(Self::compute_key(overlay, id)) + .map_err(|_e| StorageError::BackendError)? + .ok_or(StorageError::NotFound)?; + let block: Block = serde_bare::from_slice(&block_ser)?; + Ok(block) + } + + fn has(&self, overlay: &OverlayId, id: &BlockId) -> Result<(), StorageError> { + let _block_ser = self + .db + .get(Self::compute_key(overlay, id)) + .map_err(|_e| StorageError::BackendError)? + .ok_or(StorageError::NotFound)?; + Ok(()) + } + + /// Save a block to the storage. + fn put(&self, overlay: &OverlayId, block: &Block, lazy: bool) -> Result { + // TODO? return an error if already present in blockstorage and !lazy ? + let block_id = block.id(); + let ser = serde_bare::to_vec(block)?; + let tx = self.db.transaction(); + let key = Self::compute_key(overlay, &block_id); + if lazy { + if let Some(block_ser) = tx + .get(key.clone()) + .map_err(|_e| StorageError::BackendError)? + { + let block: Block = serde_bare::from_slice(&block_ser)?; + return Ok(block.id()); + } + } + tx.put(key, &ser).map_err(|_e| StorageError::BackendError)?; + tx.commit().map_err(|_| StorageError::BackendError)?; + Ok(block_id) + } + + /// Delete a block from the storage. + fn del(&self, overlay: &OverlayId, id: &BlockId) -> Result { + let tx = self.db.transaction(); + tx.delete(Self::compute_key(overlay, id)) + .map_err(|_e| StorageError::BackendError)?; + tx.commit().map_err(|_| StorageError::BackendError)?; + // TODO, return real size + Ok(0) + } + + /// number of Blocks in the storage + fn len(&self) -> Result { + //TODO return number of blocks + Ok(0) + } +} diff --git a/ng-storage-rocksdb/src/kcv_storage.rs b/ng-storage-rocksdb/src/kcv_storage.rs new file mode 100644 index 0000000..e78c6dc --- /dev/null +++ b/ng-storage-rocksdb/src/kcv_storage.rs @@ -0,0 +1,887 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use std::collections::HashMap; +use std::path::Path; +use std::path::PathBuf; +use std::thread::available_parallelism; + +use ng_rocksdb::BlockBasedOptions; +use ng_rocksdb::Cache; +use ng_rocksdb::DBIteratorWithThreadMode; + +use ng_repo::errors::*; +use ng_repo::kcv_storage::*; +use ng_repo::log::*; + +#[allow(unused_imports)] +use ng_rocksdb::{ + ColumnFamily, ColumnFamilyDescriptor, Direction, Env, ErrorKind, IteratorMode, Options, + TransactionDB, TransactionDBOptions, +}; + +pub struct RocksdbTransaction<'a> { + store: &'a RocksDbKCVStorage, + tx: Option>, +} + +impl<'a> RocksdbTransaction<'a> { + fn commit(&mut self) { + self.tx.take().unwrap().commit().unwrap(); + } + fn tx(&self) -> &ng_rocksdb::Transaction<'a, TransactionDB> { + self.tx.as_ref().unwrap() + } + fn get_iterator( + &self, + property_start: &[u8], + family: &Option, + ) -> Result, StorageError> { + Ok(match family { + Some(cf) => self.tx().iterator_cf( + self.store + .db + .cf_handle(&cf) + .ok_or(StorageError::UnknownColumnFamily)?, + IteratorMode::From(property_start, Direction::Forward), + ), + None => self + .tx() + .iterator(IteratorMode::From(property_start, Direction::Forward)), + }) + } +} + +impl<'a> ReadTransaction for RocksdbTransaction<'a> { + fn get_all_keys_and_values( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + family: &Option, + ) -> Result, Vec)>, StorageError> { + let property_start = + RocksDbKCVStorage::calc_key_start(prefix, key_size, &key_prefix, &suffix); + let iter = self.get_iterator(&property_start, &family)?; + self.store + .get_all_keys_and_values_(prefix, key_size, key_prefix, suffix, iter) + } + + fn get_first_key_value( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + family: &Option, + ) -> Result<(Vec,Vec), StorageError> { + let property_start = + RocksDbKCVStorage::calc_key_start(prefix, key_size, &key_prefix, &suffix); + let iter = self.get_iterator(&property_start, &family)?; + self.store + .get_first_key_value_(prefix, key_size, key_prefix, suffix, iter) + } + + fn get_all_properties_of_key( + &self, + prefix: u8, + key: Vec, + properties: Vec, + family: &Option, + ) -> Result>, StorageError> { + let key_size = key.len(); + let prop_values = self.get_all_keys_and_values(prefix, key_size, key, None, family)?; + Ok(RocksDbKCVStorage::get_all_properties_of_key( + prop_values, + key_size, + &properties, + )) + } + + /// Load a single value property from the store. + fn get( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + family: &Option, + ) -> Result, StorageError> { + let property = RocksDbKCVStorage::compute_property(prefix, key, &suffix); + let res = match family { + Some(cf) => self.tx().get_for_update_cf( + self.store + .db + .cf_handle(&cf) + .ok_or(StorageError::UnknownColumnFamily)?, + property, + true, + ), + None => self.tx().get_for_update(property, true), + } + .map_err(|_e| StorageError::BackendError)?; + match res { + Some(val) => Ok(val), + None => Err(StorageError::NotFound), + } + } + + /// Load all the values of a property from the store. + fn get_all( + &self, + _prefix: u8, + _key: &Vec, + _suffix: Option, + _family: &Option, + ) -> Result>, StorageError> { + unimplemented!(); + } + + /// Check if a specific value exists for a property from the store. + fn has_property_value( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError> { + let exists = self.get(prefix, key, suffix, family)?; + if exists.eq(value) { + Ok(()) + } else { + Err(StorageError::DifferentValue) + } + } +} + +impl<'a> WriteTransaction for RocksdbTransaction<'a> { + /// Save a property value to the store. + fn put( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError> { + let property = RocksDbKCVStorage::compute_property(prefix, key, &suffix); + match family { + Some(cf) => self.tx().put_cf( + self.store + .db + .cf_handle(&cf) + .ok_or(StorageError::UnknownColumnFamily)?, + property, + value, + ), + None => self.tx().put(property, value), + } + .map_err(|_e| StorageError::BackendError)?; + + Ok(()) + } + + /// Replace the property of a key (single value) to the store. + fn replace( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError> { + self.put(prefix, key, suffix, value, family) + } + + /// Delete a property from the store. + fn del( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + family: &Option, + ) -> Result<(), StorageError> { + let property = RocksDbKCVStorage::compute_property(prefix, key, &suffix); + let res = match family { + Some(cf) => self.tx().delete_cf( + self.store + .db + .cf_handle(&cf) + .ok_or(StorageError::UnknownColumnFamily)?, + property, + ), + None => self.tx().delete(property), + }; + if res.is_err() { + if let ErrorKind::NotFound = res.unwrap_err().kind() { + return Ok(()); + } + return Err(StorageError::BackendError); + } + Ok(()) + } + + fn take_first_value( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + family: &Option, + ) -> Result, StorageError> { + let (key,value) = self.get_first_key_value(prefix, key_size, key_prefix, suffix, family)?; + let key_without_prefix = key[1..].to_vec(); + self.del(prefix, &key_without_prefix, suffix, family)?; + Ok(value) + } + + /// Delete a specific value for a property from the store. + fn del_property_value( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError> { + let exists = self.get(prefix, key, suffix, family)?; + if exists.eq(value) { + self.del(prefix, key, suffix, family) + } else { + Err(StorageError::DifferentValue) + } + } + + /// Delete all properties of a key from the store. + // TODO: this could be optimized with an iterator + fn del_all( + &self, + prefix: u8, + key: &Vec, + all_suffixes: &[u8], + family: &Option, + ) -> Result<(), StorageError> { + for suffix in all_suffixes { + self.del(prefix, key, Some(*suffix), family)?; + } + if all_suffixes.is_empty() { + self.del(prefix, key, None, family)?; + } + Ok(()) + } + + fn del_all_values( + &self, + prefix: u8, + key: &Vec, + property_size: usize, + suffix: Option, + family: &Option, + ) -> Result<(), StorageError> { + let key_size = key.len() + property_size; + let property_start = RocksDbKCVStorage::calc_key_start(prefix, key_size, &key, &suffix); + let mut iter = self.get_iterator(&property_start, &family)?; + + let mut vec_key_end = key.clone(); + let mut trailing_max = vec![255u8; property_size]; + vec_key_end.append(&mut trailing_max); + + // let property_start = Self::compute_property(prefix, &vec_key_start, suffix); + let property_end = RocksDbKCVStorage::compute_property( + prefix, + &vec_key_end, + &Some(suffix.unwrap_or(255u8)), + ); + + loop { + let res = iter.next(); + match res { + Some(Ok(val)) => { + match compare(&val.0, property_end.as_slice()) { + std::cmp::Ordering::Less | std::cmp::Ordering::Equal => { + if suffix.is_some() { + if val.0.len() < (key_size + 2) + || val.0[1 + key_size] != suffix.unwrap() + { + continue; + } + // } else if val.0.len() > (key_size + 1) { + // continue; + } + self.tx() + .delete(val.0) + .map_err(|_| StorageError::BackendError)?; + } + _ => {} //, + } + } + Some(Err(_e)) => return Err(StorageError::BackendError), + None => { + break; + } + } + } + Ok(()) + } +} + +pub struct RocksDbKCVStorage { + /// the main store where all the properties of keys are stored + db: TransactionDB, + /// path for the storage backend data + path: String, + + #[cfg(debug_assertions)] + pub classes: Vec<(String, Vec)>, +} + +fn compare(a: &[T], b: &[T]) -> std::cmp::Ordering { + let mut iter_b = b.iter(); + for v in a { + match iter_b.next() { + Some(w) => match v.cmp(w) { + std::cmp::Ordering::Equal => continue, + ord => return ord, + }, + None => break, + } + } + return a.len().cmp(&b.len()); +} + +impl ReadTransaction for RocksDbKCVStorage { + /// returns a list of (key,value) that are in the range specified in the request + fn get_all_keys_and_values( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + family: &Option, + ) -> Result, Vec)>, StorageError> { + let property_start = Self::calc_key_start(prefix, key_size, &key_prefix, &suffix); + let iter = self.get_iterator(&property_start, &family)?; + self.get_all_keys_and_values_(prefix, key_size, key_prefix, suffix, iter) + } + + fn get_first_key_value( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + family: &Option, + ) -> Result<(Vec,Vec), StorageError> { + let property_start = + RocksDbKCVStorage::calc_key_start(prefix, key_size, &key_prefix, &suffix); + let iter = self.get_iterator(&property_start, &family)?; + self.get_first_key_value_(prefix, key_size, key_prefix, suffix, iter) + } + + /// returns a map of found properties and their value. If `properties` is empty, then all the properties are returned. + /// Otherwise, only the properties in the list are returned (if found in backend storage) + fn get_all_properties_of_key( + &self, + prefix: u8, + key: Vec, + properties: Vec, + family: &Option, + ) -> Result>, StorageError> { + let key_size = key.len(); + let prop_values = self.get_all_keys_and_values(prefix, key_size, key, None, family)?; + Ok(Self::get_all_properties_of_key( + prop_values, + key_size, + &properties, + )) + } + + /// Load a single value property from the store. + fn get( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + family: &Option, + ) -> Result, StorageError> { + let property = Self::compute_property(prefix, key, &suffix); + let res = match family { + Some(cf) => self.db.get_cf( + self.db + .cf_handle(&cf) + .ok_or(StorageError::UnknownColumnFamily)?, + property, + ), + None => self.db.get(property), + } + .map_err(|_e| StorageError::BackendError)?; + match res { + Some(val) => Ok(val), + None => Err(StorageError::NotFound), + } + } + + /// Load all the values of a property from the store. + fn get_all( + &self, + _prefix: u8, + _key: &Vec, + _suffix: Option, + _family: &Option, + ) -> Result>, StorageError> { + unimplemented!(); + } + + /// Check if a specific value exists for a property from the store. + fn has_property_value( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError> { + let exists = self.get(prefix, key, suffix, family)?; + if exists.eq(value) { + Ok(()) + } else { + Err(StorageError::DifferentValue) + } + } +} + +impl KCVStorage for RocksDbKCVStorage { + fn write_transaction( + &self, + method: &mut dyn FnMut(&mut dyn WriteTransaction) -> Result<(), StorageError>, + ) -> Result<(), StorageError> { + let tx = self.db.transaction(); + + let mut transaction = RocksdbTransaction { + store: self, + tx: Some(tx), + }; + let res = method(&mut transaction); + if res.is_ok() { + transaction.commit(); + //lock.sync(true); + } + res + } +} + +impl WriteTransaction for RocksDbKCVStorage { + /// Save a property value to the store. + fn put( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError> { + self.write_transaction(&mut |tx| tx.put(prefix, key, suffix, value, family)) + } + + /// Replace the property of a key (single value) to the store. + fn replace( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError> { + self.write_transaction(&mut |tx| tx.replace(prefix, key, suffix, value, family)) + } + + /// Delete a property from the store. + fn del( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + family: &Option, + ) -> Result<(), StorageError> { + self.write_transaction(&mut |tx| tx.del(prefix, key, suffix, family)) + } + + fn take_first_value( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + family: &Option, + ) -> Result, StorageError> { + let mut value: Option> = None; + self.write_transaction(&mut |tx| { + let val = tx.take_first_value(prefix, key_size, key_prefix.clone(), suffix, family)?; + value = Some(val); + Ok(()) + })?; + Ok(value.unwrap()) + } + + /// Delete a specific value for a property from the store. + fn del_property_value( + &self, + prefix: u8, + key: &Vec, + suffix: Option, + value: &Vec, + family: &Option, + ) -> Result<(), StorageError> { + self.write_transaction(&mut |tx| tx.del_property_value(prefix, key, suffix, value, family)) + } + + /// Delete all properties of a key from the store. + fn del_all( + &self, + prefix: u8, + key: &Vec, + all_suffixes: &[u8], + family: &Option, + ) -> Result<(), StorageError> { + self.write_transaction(&mut |tx| { + for suffix in all_suffixes { + tx.del(prefix, key, Some(*suffix), family)?; + } + if all_suffixes.is_empty() { + tx.del(prefix, key, None, family)?; + } + Ok(()) + }) + } + + fn del_all_values( + &self, + prefix: u8, + key: &Vec, + property_size: usize, + suffix: Option, + family: &Option, + ) -> Result<(), StorageError> { + self.write_transaction(&mut |tx| { + tx.del_all_values(prefix, key, property_size, suffix, family) + }) + } +} + +impl RocksDbKCVStorage { + pub fn path(&self) -> PathBuf { + PathBuf::from(&self.path) + } + + fn get_all_properties_of_key( + prop_values: Vec<(Vec, Vec)>, + key_size: usize, + properties: &Vec, + ) -> HashMap> { + let mut res = HashMap::new(); + for prop_val in prop_values { + let prop = prop_val.0[1 + key_size]; + if properties.len() > 0 && !properties.contains(&prop) { + continue; + } + res.insert(prop, prop_val.1); + } + res + } + + fn get_all_keys_and_values_( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + mut iter: DBIteratorWithThreadMode<'_, impl ng_rocksdb::DBAccess>, + ) -> Result, Vec)>, StorageError> { + if key_prefix.len() > key_size { + return Err(StorageError::InvalidValue); + } + + // let mut vec_key_start = key_prefix.clone(); + // let mut trailing_zeros = vec![0u8; key_size - key_prefix.len()]; + // vec_key_start.append(&mut trailing_zeros); + + let mut vec_key_end = key_prefix.clone(); + let mut trailing_max = vec![255u8; key_size - key_prefix.len()]; + vec_key_end.append(&mut trailing_max); + + // let property_start = Self::compute_property(prefix, &vec_key_start, suffix); + let property_end = + Self::compute_property(prefix, &vec_key_end, &Some(suffix.unwrap_or(255u8))); + + // let mut iter = match family { + // Some(cf) => self.db.iterator_cf( + // self.db + // .cf_handle(&cf) + // .ok_or(StorageError::UnknownColumnFamily)?, + // IteratorMode::From(&property_start, Direction::Forward), + // ), + // None => self + // .db + // .iterator(IteratorMode::From(&property_start, Direction::Forward)), + // }; + let mut vector: Vec<(Vec, Vec)> = vec![]; + loop { + let res = iter.next(); + match res { + Some(Ok(val)) => { + //log_info!("{:?} {:?}", val.0, val.1); + match compare(&val.0, property_end.as_slice()) { + std::cmp::Ordering::Less | std::cmp::Ordering::Equal => { + if suffix.is_some() { + if val.0.len() < key_size + 2 + || val.0[1 + key_size] != suffix.unwrap() + { + // log_info!( + // "SKIPPED cause suffix {} {} {} {}", + // val.0.len(), + // key_size + 2, + // val.0[1 + key_size], + // suffix.unwrap() + // ); + continue; + } + // } else if val.0.len() > (key_size + 1) { + // continue; + } + vector.push((val.0.to_vec(), val.1.to_vec())); + } + _ => { + //log_info!("SKIPPED cause above END"); + break; + } //, + } + } + Some(Err(_e)) => return Err(StorageError::BackendError), + None => { + break; + } + } + } + Ok(vector) + } + + fn get_first_key_value_( + &self, + prefix: u8, + key_size: usize, + key_prefix: Vec, + suffix: Option, + mut iter: DBIteratorWithThreadMode<'_, impl ng_rocksdb::DBAccess>, + ) -> Result<(Vec,Vec), StorageError> { + if key_prefix.len() > key_size { + return Err(StorageError::InvalidValue); + } + + // let mut vec_key_start = key_prefix.clone(); + // let mut trailing_zeros = vec![0u8; key_size - key_prefix.len()]; + // vec_key_start.append(&mut trailing_zeros); + + let mut vec_key_end = key_prefix.clone(); + let mut trailing_max = vec![255u8; key_size - key_prefix.len()]; + vec_key_end.append(&mut trailing_max); + + // let property_start = Self::compute_property(prefix, &vec_key_start, suffix); + let property_end = + Self::compute_property(prefix, &vec_key_end, &Some(suffix.unwrap_or(255u8))); + + // let mut iter = match family { + // Some(cf) => self.db.iterator_cf( + // self.db + // .cf_handle(&cf) + // .ok_or(StorageError::UnknownColumnFamily)?, + // IteratorMode::From(&property_start, Direction::Forward), + // ), + // None => self + // .db + // .iterator(IteratorMode::From(&property_start, Direction::Forward)), + // }; + loop { + let res = iter.next(); + match res { + Some(Ok(val)) => { + //log_info!("{:?} {:?}", val.0, val.1); + match compare(&val.0, property_end.as_slice()) { + std::cmp::Ordering::Less | std::cmp::Ordering::Equal => { + if suffix.is_some() { + if val.0.len() < key_size + 2 + || val.0[1 + key_size] != suffix.unwrap() + { + // log_info!( + // "SKIPPED cause suffix {} {} {} {}", + // val.0.len(), + // key_size + 2, + // val.0[1 + key_size], + // suffix.unwrap() + // ); + continue; + } + // } else if val.0.len() > (key_size + 1) { + // continue; + } + return Ok((val.0.to_vec(), val.1.to_vec())); + } + _ => { + //log_info!("SKIPPED cause above END"); + break; + } //, + } + } + Some(Err(_e)) => return Err(StorageError::BackendError), + None => { + break; + } + } + } + Err(StorageError::NotFound) + } + + fn calc_key_start( + prefix: u8, + key_size: usize, + key_prefix: &Vec, + suffix: &Option, + ) -> Vec { + let mut vec_key_start = key_prefix.clone(); + let mut trailing_zeros = vec![0u8; key_size - key_prefix.len()]; + vec_key_start.append(&mut trailing_zeros); + + // let mut vec_key_end = key_prefix.clone(); + // let mut trailing_max = vec![255u8; key_size - key_prefix.len()]; + // vec_key_end.append(&mut trailing_max); + + Self::compute_property(prefix, &vec_key_start, suffix) + } + + fn get_iterator( + &self, + property_start: &[u8], + family: &Option, + ) -> Result, StorageError> { + Ok(match family { + Some(cf) => self.db.iterator_cf( + self.db + .cf_handle(&cf) + .ok_or(StorageError::UnknownColumnFamily)?, + IteratorMode::From(property_start, Direction::Forward), + ), + None => self + .db + .iterator(IteratorMode::From(property_start, Direction::Forward)), + }) + } + + fn compute_property(prefix: u8, key: &Vec, suffix: &Option) -> Vec { + let mut new: Vec = Vec::with_capacity(key.len() + 2); + new.push(prefix); + new.extend(key); + if suffix.is_some() { + new.push(suffix.unwrap()) + } + new + } + + /// Opens the store and returns a KCVStorage object that should be kept and used to manipulate the properties + /// The key is the encryption key for the data at rest. + pub fn open<'a>(path: &Path, key: [u8; 32]) -> Result { + let mut opts = Options::default(); + let default_parallelism_approx = available_parallelism() + .unwrap_or(std::num::NonZeroUsize::new(1).unwrap()) + .get(); + //opts.set_use_fsync(true); + opts.set_max_background_jobs(default_parallelism_approx as i32); + opts.increase_parallelism(default_parallelism_approx as i32); + + // the default WAL size is CF_nbr * write_buffer_size * max_write_buffer_number * 4 + // we limit it to 1GB + opts.set_max_total_wal_size(1024 * 1024 * 1024); + opts.set_write_buffer_size(64 * 1024 * 1024); // which is the default. might have to reduce this on smartphones. + let nbr_of_cf = 1; + opts.set_db_write_buffer_size(64 * 1024 * 1024 * nbr_of_cf); + opts.set_target_file_size_base(64 * 1024 * 1024); // the default + opts.set_max_write_buffer_number(2); // the default + opts.set_level_zero_file_num_compaction_trigger(4); // the default + opts.set_max_bytes_for_level_base(256 * 1024 * 1024); + opts.set_target_file_size_multiplier(10); + opts.set_level_compaction_dynamic_level_bytes(true); + opts.set_num_levels(7); // the default + + opts.create_if_missing(true); + opts.create_missing_column_families(true); + + let mut block_based_opts = BlockBasedOptions::default(); + // we will have a cache of decrypted objects, so there is no point in caching also the encrypted blocks. + let cache = Cache::new_lru_cache(64 * 1024 * 1024); + block_based_opts.set_block_cache(&cache); + block_based_opts.set_cache_index_and_filter_blocks(true); + block_based_opts.set_pin_l0_filter_and_index_blocks_in_cache(true); + block_based_opts.set_block_size(16 * 1024); + block_based_opts.set_bloom_filter(10.0, false); + block_based_opts.set_format_version(6); + opts.set_block_based_table_factory(&block_based_opts); + + let env = Env::enc_env(key).unwrap(); + opts.set_env(&env); + + // TODO: use open_cf and choose which column family to create/ versus using set_prefix_extractor and doing prefix seek + + let tx_options = TransactionDBOptions::new(); + let db: TransactionDB = TransactionDB::open(&opts, &tx_options, &path).map_err(|e| { + log_err!("{e}"); + if e.into_string().starts_with("IO error: While lock file") { + StorageError::ServerAlreadyRunningInOtherProcess + } else { + StorageError::BackendError + } + })?; + + log_info!( + "created kcv storage with Rocksdb Version: {}", + Env::version() + ); + + Ok(RocksDbKCVStorage { + db: db, + path: path.to_str().unwrap().to_string(), + #[cfg(debug_assertions)] + classes: Vec::new(), + }) + } + + #[cfg(debug_assertions)] + pub fn add_class(&mut self, class: &Class) { + class.check(); + self.classes + .push((class.name.to_string(), class.prefixes())); + } + + #[cfg(debug_assertions)] + pub fn check_prefixes(&self) { + use std::collections::HashSet; + //log_debug!("CHECKING PREFIXES"); + let mut all_prefixes = HashSet::new(); + for (class, prefixes) in self.classes.iter() { + //log_debug!("CHECKING CLASS {class}"); + for prefix in prefixes { + //log_debug!("CHECKING PREFIX {prefix}"); + if !all_prefixes.insert(prefix) { + panic!( + "duplicate prefix {} for class {class} !!! check the code", + *prefix as char + ); + } + } + } + } +} diff --git a/ng-storage-rocksdb/src/lib.rs b/ng-storage-rocksdb/src/lib.rs new file mode 100644 index 0000000..87d6bc8 --- /dev/null +++ b/ng-storage-rocksdb/src/lib.rs @@ -0,0 +1,5 @@ +#[cfg(all(not(target_arch = "wasm32"), not(docsrs)))] +pub mod block_storage; + +#[cfg(all(not(target_arch = "wasm32"), not(docsrs)))] +pub mod kcv_storage; diff --git a/ng-verifier/Cargo.toml b/ng-verifier/Cargo.toml new file mode 100644 index 0000000..73ca9ee --- /dev/null +++ b/ng-verifier/Cargo.toml @@ -0,0 +1,54 @@ +[package] +name = "ng-verifier" +version = "0.1.2" +description = "Verifier library of NextGraph" +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true +homepage.workspace = true +keywords = ["crdt","e2ee","local-first","p2p","eventual-consistency"] +documentation.workspace = true +rust-version.workspace = true +build = "build.rs" + +[badges] +maintenance = { status = "actively-developed" } + +[features] +testing = [] + +[dependencies] +serde = { version = "1.0", features = ["derive"] } +serde_bare = "0.5.0" +serde_bytes = "0.11.7" +serde_json = "1.0" +rand = { version = "0.7", features = ["getrandom"] } +web-time = "0.2.0" +either = "1.8.1" +futures = "0.3.24" +lazy_static = "1.4.0" +async-trait = "0.1.64" +base64-url = "2.0.0" +async-std = { version = "1.12.0", features = [ "attributes", "unstable" ] } +automerge = "0.5.11" +yrs = "0.19.2" +qrcode = { version = "0.14.1", default-features = false, features = ["svg"] } +sbbf-rs-safe = "0.3.2" +ng-repo = { path = "../ng-repo", version = "0.1.2" } +ng-net = { path = "../ng-net", version = "0.1.2" } +ng-oxigraph = { path = "../ng-oxigraph", version = "0.4.0-alpha.8-ngalpha" } + +[target.'cfg(target_family = "wasm")'.dependencies] +ng-oxigraph = { path = "../ng-oxigraph", version = "0.4.0-alpha.8-ngalpha", features = ["js"] } + +[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] +version = "0.3.3" +features = ["wasm_js"] + +[target.'cfg(all(not(target_family = "wasm"),not(docsrs)))'.dependencies] +ng-storage-rocksdb = { path = "../ng-storage-rocksdb", version = "0.1.2" } +getrandom = "0.3.3" + +[dev-dependencies] +ng-repo = { path = "../ng-repo", version = "0.1.2", features = ["testing"] } \ No newline at end of file diff --git a/ng-verifier/README.md b/ng-verifier/README.md new file mode 100644 index 0000000..2451814 --- /dev/null +++ b/ng-verifier/README.md @@ -0,0 +1,58 @@ +# Verifier library of NextGraph + +![MSRV][rustc-image] +[![Apache 2.0 Licensed][license-image]][license-link] +[![MIT Licensed][license-image2]][license-link2] + +The verifier is locally decrypting the incoming commits and building the materialized state of the documents. + +It serves an API to the Apps that can read, write and query the materialized state. + +This repository is in active development at [https://git.nextgraph.org/NextGraph/nextgraph-rs](https://git.nextgraph.org/NextGraph/nextgraph-rs), a Gitea instance. For bug reports, issues, merge requests, and in order to join the dev team, please visit the link above and create an account (you can do so with a github account). The [github repo](https://github.com/nextgraph-org/nextgraph-rs) is just a read-only mirror that does not accept issues. + +## NextGraph + +> NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## Support + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +## How to use the library + +NextGraph is not ready yet. You can subscribe to [our newsletter](https://list.nextgraph.org/subscription/form) to get updates, and support us with a [donation](https://nextgraph.org/donate/). + +This library is used internally by [ngd](../ngd/README.md), [ngcli](../ngcli/README.md), [ng-app](../ng-app/README.md) and by [nextgraph, the Rust client library](../nextgraph/README.md) which you should be using instead. It is not meant to be used by other programs as-is. + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +### Contributions license + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as below, without any +additional terms or conditions. + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. + + +[rustc-image]: https://img.shields.io/badge/rustc-1.81+-blue.svg +[license-image]: https://img.shields.io/badge/license-Apache2.0-blue.svg +[license-link]: https://git.nextgraph.org/NextGraph/nextgraph-rs/raw/branch/master/LICENSE-APACHE2 +[license-image2]: https://img.shields.io/badge/license-MIT-blue.svg +[license-link2]: https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/LICENSE-MIT diff --git a/ng-verifier/build.rs b/ng-verifier/build.rs new file mode 100644 index 0000000..bd0dfc1 --- /dev/null +++ b/ng-verifier/build.rs @@ -0,0 +1,5 @@ +fn main() { + if std::env::var("DOCS_RS").is_ok() { + println!("cargo:rustc-cfg=docsrs"); + } +} diff --git a/ng-verifier/src/commits/mod.rs b/ng-verifier/src/commits/mod.rs new file mode 100644 index 0000000..068e580 --- /dev/null +++ b/ng-verifier/src/commits/mod.rs @@ -0,0 +1,728 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Verifiers for each Commit type + +pub mod transaction; + +pub mod snapshot; + +use std::collections::HashMap; +use std::sync::Arc; + +use ng_net::broker::BROKER; +use ng_repo::errors::VerifierError; +#[allow(unused_imports)] +use ng_repo::log::*; +use ng_repo::object::Object; +use ng_repo::repo::{BranchInfo, CommitInfo, Repo}; +use ng_repo::store::Store; +use ng_repo::types::*; + +use ng_net::app_protocol::*; + +use crate::verifier::Verifier; + +#[async_trait::async_trait] +pub trait CommitVerifier { + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError>; +} + +pub(crate) fn list_dep_chain_until( + start: ObjectRef, + end: &ObjectId, + store: &Store, + with_body: bool, +) -> Result, VerifierError> { + let mut res = vec![]; + let mut pos = start; + loop { + let pos_id = pos.id.clone(); + if pos_id == *end { + break; + } + let commit = Commit::load(pos, &store, with_body)?; + let deps = commit.deps(); + if deps.len() != 1 { + return Err(VerifierError::MalformedSyncSignatureDeps); + } + res.push(commit); + pos = deps[0].clone(); + } + res.reverse(); + + Ok(res) +} + +#[async_trait::async_trait] +impl CommitVerifier for RootBranch { + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + _branch_id: &BranchId, + _repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + match self { + RootBranch::V0(root_branch) => { + let repository_commit = Commit::load(root_branch.repo.clone(), &store, true)?; + let repository = match repository_commit + .body() + .ok_or(VerifierError::CommitBodyNotFound)? + { + CommitBody::V0(CommitBodyV0::Repository(r)) => r, + _ => return Err(VerifierError::InvalidRepositoryCommit), + }; + //TODO: deal with quorum_type (verify signature) + + let user_priv = verifier.user_privkey(); + let user_id = verifier.user_id(); + let repo_write_cap_secret = if store.id() == &root_branch.id && store.is_private() { + Some(SymKey::nil()) + } else if let Some(pos) = root_branch.owners.iter().position(|o| o == user_id) { + let cryptobox = &root_branch.owners_write_cap[pos]; + Some(RootBranch::decrypt_write_cap(user_priv, cryptobox)?) + } else { + None + }; + let topic_priv_key = if let Some(rwcs) = repo_write_cap_secret.as_ref() { + Branch::decrypt_branch_write_cap_secret( + root_branch.topic_privkey.clone(), + root_branch.topic.clone(), + root_branch.id.clone(), + rwcs, + ) + .map_or(None, |k| Some(k)) + } else { + None + }; + let reference = commit.reference().unwrap(); + let root_branch = BranchInfo { + id: root_branch.id.clone(), + branch_type: BranchType::Root, + topic: Some(root_branch.topic), + topic_priv_key, + read_cap: Some(reference.clone()), + fork_of: None, + crdt: BranchCrdt::None, + merged_in: None, + current_heads: vec![reference.clone()], + commits_nbr: 1, + }; + let id = root_branch.id; + let branches = vec![(root_branch.id, root_branch)]; + let signer = verifier + .user_storage() + .and_then(|storage| storage.get_signer_cap(&id).ok()); + let inbox = verifier + .user_storage() + .and_then(|storage| storage.get_inbox_cap(&id).ok()); + let repo = Repo { + id, + repo_def: repository.clone(), + signer, + inbox, + members: HashMap::new(), + store: Arc::clone(&store), + read_cap: Some(reference), + write_cap: repo_write_cap_secret, + branches: branches.into_iter().collect(), + opened_branches: HashMap::new(), + certificate_ref: verifier.temporary_repo_certificates.remove(&id), + }; + verifier.populate_topics(&repo); + let _repo_ref = verifier.add_repo_and_save(repo); + } + } + + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for Branch { + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + _branch_id: &BranchId, + _repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + match self { + Branch::V0(branch) => { + //TODO: deal with root_branch_readcap_id (the epoch) + + //TODO: deal with quorum_type (verify signature) + + let repository_commit: Commit = Commit::load(branch.repo.clone(), &store, true)?; + + let repository = match repository_commit + .body() + .ok_or(VerifierError::CommitBodyNotFound)? + { + CommitBody::V0(CommitBodyV0::Repository(r)) => r, + _ => return Err(VerifierError::InvalidRepositoryCommit), + }; + + // check that the repository exists + let repo = verifier.get_repo_mut(repository.id(), store.get_store_repo())?; + + let topic_priv_key = if let Some(rwcs) = repo.write_cap.as_ref() { + Branch::decrypt_branch_write_cap_secret( + branch.topic_privkey.clone(), + branch.topic.clone(), + branch.id.clone(), + rwcs, + ) + .map_or(None, |k| Some(k)) + } else { + None + }; + let reference = commit.reference().unwrap(); + + let branch_info = repo.branch_mut(&branch.id)?; + if branch_info.read_cap.as_ref().unwrap() != &reference { + return Err(VerifierError::InvalidBranch); + } + branch_info.topic_priv_key = topic_priv_key; + branch_info.current_heads = vec![reference]; + + verifier.update_branch(&repository.id(), &branch.id, store.get_store_repo())?; + Ok(()) + } + } + } +} +#[async_trait::async_trait] +impl CommitVerifier for SyncSignature { + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + match self { + SyncSignature::V0(signature_ref) => { + let sign = Object::load_ref(signature_ref, &store)?; + match sign.content_v0()? { + ObjectContentV0::Signature(sig) => { + //TODO: verify signature + verifier.update_repo_certificate(repo_id, sig.certificate_ref()); + } + _ => return Err(VerifierError::InvalidSignatureObject), + } + // process each deps + let acks = commit.acks(); + if acks.len() != 1 { + return Err(VerifierError::MalformedSyncSignatureAcks); + } + let ack = &acks[0]; + let deps = commit.deps(); + if deps.len() != 1 { + return Err(VerifierError::MalformedSyncSignatureDeps); + } + let commits = list_dep_chain_until(deps[0].clone(), &ack.id, &store, true)?; + for commit in commits { + verifier + .verify_commit(&commit, branch_id, repo_id, Arc::clone(&store)) + .await?; + } + } + } + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for AddBranch { + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + _branch_id: &BranchId, + _repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + match self { + AddBranch::V0(v0) => { + if v0.branch_type == BranchType::Root { + return Err(VerifierError::InvalidBranch); + } + + // TODO fetch the readcap and verify that crdt and other infos in Branch definition are the same as in AddBranch commit + let branch_info = BranchInfo { + id: v0.branch_id, + branch_type: v0.branch_type.clone(), + topic: v0.topic_id, + topic_priv_key: None, + read_cap: v0.branch_read_cap.clone(), + fork_of: v0.fork_of, + merged_in: v0.merged_in, + crdt: v0.crdt.clone(), + current_heads: vec![], + commits_nbr: 0, + }; + + verifier.add_branch_and_save( + commit.branch(), + branch_info, + store.get_store_repo(), + )?; + } + } + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for Repository { + async fn verify( + &self, + _commit: &Commit, + _verifier: &mut Verifier, + _branch_id: &BranchId, + _repo_id: &RepoId, + _store: Arc, + ) -> Result<(), VerifierError> { + // left empty intentionally + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for StoreUpdate { + async fn verify( + &self, + _commit: &Commit, + verifier: &mut Verifier, + _branch_id: &BranchId, + _repo_id: &RepoId, + _store: Arc, + ) -> Result<(), VerifierError> { + verifier.new_store_from_update(self) + } +} +#[async_trait::async_trait] +impl CommitVerifier for AddInboxCap { + async fn verify( + &self, + _commit: &Commit, + verifier: &mut Verifier, + _branch_id: &BranchId, + _repo_id: &RepoId, + _store: Arc, + ) -> Result<(), VerifierError> { + match self { + AddInboxCap::V0(v0) => verifier.update_inbox_cap_v0(&v0), + } + } +} +#[async_trait::async_trait] +impl CommitVerifier for AddSignerCap { + async fn verify( + &self, + _commit: &Commit, + verifier: &mut Verifier, + _branch_id: &BranchId, + _repo_id: &RepoId, + _store: Arc, + ) -> Result<(), VerifierError> { + match self { + AddSignerCap::V0(v0) => verifier.update_signer_cap(&v0.cap), + } + } +} +#[async_trait::async_trait] +impl CommitVerifier for AddMember { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for RemoveMember { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for AddPermission { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for RemovePermission { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for RemoveBranch { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for AddName { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for RemoveName { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for () { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for Snapshot { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + let repo = verifier.get_repo(repo_id, store.get_store_repo())?; + verifier + .push_app_response( + branch_id, + AppResponse::V0(AppResponseV0::Patch(AppPatch { + commit_id: commit.id().unwrap().to_string(), + commit_info: (&commit.as_info(repo)).into(), + graph: None, + discrete: None, + other: Some(OtherPatch::Snapshot(self.snapshot_ref().clone())), + })), + ) + .await; + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for AddFile { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + let files = commit.files(); + + if files.len() == 1 { + let refe = commit.files().remove(0); + let filename = FileName { + name: self.name().clone(), + nuri: NuriV0::object_ref(&refe), + reference: refe, + }; + let commit_id = commit.id().unwrap(); + verifier.user_storage.as_ref().unwrap().branch_add_file( + commit.id().unwrap(), + *branch_id, + filename.clone(), + )?; + let repo = verifier.get_repo(repo_id, store.get_store_repo())?; + verifier + .push_app_response( + branch_id, + AppResponse::V0(AppResponseV0::Patch(AppPatch { + commit_id: commit_id.to_string(), + commit_info: (&commit.as_info(repo)).into(), + graph: None, + discrete: None, + other: Some(OtherPatch::FileAdd(filename)), + })), + ) + .await; + Ok(()) + } else { + Err(VerifierError::InvalidCommit) + } + } +} +#[async_trait::async_trait] +impl CommitVerifier for RemoveFile { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for Compact { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for AsyncSignature { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + match self { + AsyncSignature::V0(signature_ref) => { + let sign = Object::load_ref(signature_ref, &store)?; + let deps: Vec = commit.deps(); + match sign.content_v0()? { + ObjectContentV0::Signature(sig) => { + //TODO: verify signature (each deps should be in the sig.signed_commits()) + + // pushing AppResponse + let repo = verifier.get_repo(repo_id, store.get_store_repo())?; + verifier + .push_app_response( + branch_id, + AppResponse::V0(AppResponseV0::Patch(AppPatch { + commit_id: commit.id().unwrap().to_string(), + commit_info: (&commit.as_info(repo)).into(), + graph: None, + discrete: None, + other: Some(OtherPatch::AsyncSignature(( + NuriV0::signature_ref(&signature_ref), + sig.signed_commits() + .iter() + .map(|c| c.to_string()) + .collect(), + ))), + })), + ) + .await; + + Ok(()) + } + _ => return Err(VerifierError::InvalidSignatureObject), + } + } + } + } +} +#[async_trait::async_trait] +impl CommitVerifier for RootCapRefresh { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for BranchCapRefresh { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for AddRepo { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + let broker = BROKER.read().await; + let remote = (&verifier.connected_broker).into(); + let user = Some(verifier.user_id().clone()); + let read_cap = self.read_cap(); + let overlay_id = store.overlay_id; + verifier + .load_repo_from_read_cap(read_cap, &broker, &user, &remote, store, true) + .await?; + verifier.add_doc(repo_id, &overlay_id)?; + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for RemoveRepo { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for AddLink { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for RemoveLink { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for RemoveSignerCap { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} +#[async_trait::async_trait] +impl CommitVerifier for WalletUpdate { + #[allow(unused_variables)] + async fn verify( + &self, + commit: &Commit, + verifier: &mut Verifier, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + Ok(()) + } +} diff --git a/ng-verifier/src/commits/snapshot.rs b/ng-verifier/src/commits/snapshot.rs new file mode 100644 index 0000000..c3b0af5 --- /dev/null +++ b/ng-verifier/src/commits/snapshot.rs @@ -0,0 +1,121 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use crate::verifier::Verifier; +use ng_net::app_protocol::NuriTargetV0; +use ng_oxigraph::oxigraph::sparql::{Query, QueryResults}; +use ng_repo::errors::{StorageError, VerifierError}; +use ng_repo::types::*; +use serde_json::json; +use yrs::types::ToJson; +use yrs::updates::decoder::Decode; +use yrs::{GetString, Transact}; + +impl Verifier { + pub(crate) fn take_snapshot( + &self, + crdt: &BranchCrdt, + branch_id: &BranchId, + target: &NuriTargetV0, + ) -> Result { + let state = match self + .user_storage + .as_ref() + .unwrap() + .branch_get_discrete_state(branch_id) + { + Ok(s) => Ok(s), + Err(StorageError::NoDiscreteState) => Ok(vec![]), + Err(e) => Err(e), + }?; + + let discrete = if state.is_empty() { + serde_json::Value::Null + } else { + match crdt { + BranchCrdt::Automerge(_) => { + let doc = automerge::Automerge::load(&state) + .map_err(|e| VerifierError::AutomergeError(e.to_string()))?; + + serde_json::json!(automerge::AutoSerde::from(&doc)) + } + BranchCrdt::YText(_) => { + let doc = yrs::Doc::new(); + let text = doc.get_or_insert_text("ng"); + let mut txn = doc.transact_mut(); + let update = yrs::Update::decode_v1(&state) + .map_err(|e| VerifierError::YrsError(e.to_string()))?; + txn.apply_update(update); + serde_json::Value::from(text.get_string(&txn)) + } + BranchCrdt::YArray(_) => { + let doc = yrs::Doc::new(); + let array = doc.get_or_insert_array("ng"); + let mut txn = doc.transact_mut(); + let update = yrs::Update::decode_v1(&state) + .map_err(|e| VerifierError::YrsError(e.to_string()))?; + txn.apply_update(update); + let mut json = String::new(); + array.to_json(&txn).to_json(&mut json); + + serde_json::from_str(&json).map_err(|_| VerifierError::InvalidJson)? + } + BranchCrdt::YMap(_) => { + let doc = yrs::Doc::new(); + let map = doc.get_or_insert_map("ng"); + let mut txn = doc.transact_mut(); + let update = yrs::Update::decode_v1(&state) + .map_err(|e| VerifierError::YrsError(e.to_string()))?; + txn.apply_update(update); + let mut json = String::new(); + map.to_json(&txn).to_json(&mut json); + serde_json::from_str(&json).map_err(|_| VerifierError::InvalidJson)? + } + BranchCrdt::YXml(_) => { + // TODO: if it is markdown, output the markdown instead of XML + let doc = yrs::Doc::new(); + let xml = doc.get_or_insert_xml_fragment("prosemirror"); + let mut txn = doc.transact_mut(); + let update = yrs::Update::decode_v1(&state) + .map_err(|e| VerifierError::YrsError(e.to_string()))?; + txn.apply_update(update); + serde_json::json!({"xml":xml.get_string(&txn)}) + } + _ => return Err(VerifierError::InvalidBranch), + } + }; + + let store = self.graph_dataset.as_ref().unwrap(); + let parsed = Query::parse("CONSTRUCT { ?s ?p ?o } WHERE { ?s ?p ?o }", None).unwrap(); + let results = store + .query(parsed, self.resolve_target_for_sparql(target, true)?) + .map_err(|e| VerifierError::OxigraphError(e.to_string()))?; + let results = if let QueryResults::Graph(quads) = results { + let mut results = Vec::with_capacity(quads.size_hint().0); + for quad in quads { + match quad { + Err(e) => return Err(VerifierError::OxigraphError(e.to_string())), + Ok(triple) => results.push(triple.to_string()), + } + } + results + } else { + return Err(VerifierError::OxigraphError( + "Invalid Oxigraph query result".to_string(), + )); + }; + + let res = json!({ + "discrete": discrete, + "graph": results, + }); + + Ok(serde_json::to_string(&res).unwrap()) + } +} diff --git a/ng-verifier/src/commits/transaction.rs b/ng-verifier/src/commits/transaction.rs new file mode 100644 index 0000000..1fe7c44 --- /dev/null +++ b/ng-verifier/src/commits/transaction.rs @@ -0,0 +1,831 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Verifiers for AsyncTransaction Commit + +use std::collections::{HashMap, HashSet}; +use std::sync::Arc; + +use ng_oxigraph::oxigraph::storage_ng::numeric_encoder::{EncodedQuad, EncodedTerm}; +use ng_oxigraph::oxigraph::storage_ng::*; +use ng_repo::repo::Repo; +use serde::{Deserialize, Serialize}; +use yrs::updates::decoder::Decode; +use yrs::{ReadTxn, StateVector, Transact, Update}; + +use ng_net::app_protocol::*; +use ng_oxigraph::oxrdf::{ + BlankNode, GraphName, GraphNameRef, NamedNode, Quad, Subject, Term, Triple, TripleRef, +}; +use ng_repo::errors::VerifierError; +use ng_repo::log::*; +use ng_repo::store::Store; +use ng_repo::types::*; + +use crate::types::*; +use crate::verifier::Verifier; + +struct BranchUpdateInfo { + branch_id: BranchId, + branch_type: BranchType, + repo_id: RepoId, + topic_id: TopicId, + token: Digest, + overlay_id: OverlayId, + previous_heads: HashSet, + commit_id: ObjectId, + transaction: GraphTransaction, + commit_info: CommitInfoJs, +} + +impl Verifier { + pub(crate) fn add_doc( + &self, + repo_id: &RepoId, + overlay_id: &OverlayId, + ) -> Result<(), VerifierError> { + self.doc_in_store(repo_id, overlay_id, false) + } + + pub(crate) fn remove_doc( + &self, + repo_id: &RepoId, + overlay_id: &OverlayId, + ) -> Result<(), VerifierError> { + self.doc_in_store(repo_id, overlay_id, true) + } + + fn doc_in_store( + &self, + repo_id: &RepoId, + overlay_id: &OverlayId, + remove: bool, + ) -> Result<(), VerifierError> { + let ov_graphname = NamedNode::new_unchecked(NuriV0::repo_graph_name(repo_id, overlay_id)); + + let overlay_encoded = numeric_encoder::StrHash::new(&NuriV0::overlay_id(overlay_id)); + + self.graph_dataset + .as_ref() + .unwrap() + .ng_transaction( + move |mut transaction| -> Result<(), ng_oxigraph::oxigraph::store::StorageError> { + transaction.doc_in_store(ov_graphname.as_ref(), &overlay_encoded, remove) + }, + ) + .map_err(|e| VerifierError::OxigraphError(e.to_string())) + } + + pub(crate) fn add_named_commit( + &self, + repo_id: &RepoId, + overlay_id: &OverlayId, + name: String, + commit_id: ObjectId, + ) -> Result<(), VerifierError> { + self.named_commit_or_branch( + repo_id, + overlay_id, + name, + false, + Some(format!("{commit_id}")), + ) + } + + pub(crate) fn add_named_branch( + &self, + repo_id: &RepoId, + overlay_id: &OverlayId, + name: String, + branch_id: BranchId, + ) -> Result<(), VerifierError> { + self.named_commit_or_branch( + repo_id, + overlay_id, + name, + true, + Some(format!("{branch_id}")), + ) + } + + pub(crate) fn remove_named( + &self, + repo_id: &RepoId, + overlay_id: &OverlayId, + name: String, + ) -> Result<(), VerifierError> { + self.named_commit_or_branch(repo_id, overlay_id, name, false, None) + } + + fn named_commit_or_branch( + &self, + repo_id: &RepoId, + overlay_id: &OverlayId, + name: String, + is_branch: bool, + base64_id: Option, + ) -> Result<(), VerifierError> { + let ov_graphname = NamedNode::new_unchecked(NuriV0::repo_graph_name(repo_id, overlay_id)); + + let value = if base64_id.is_none() { + None + } else { + if is_branch { + let overlay_encoded = + numeric_encoder::StrHash::new(&NuriV0::overlay_id(overlay_id)); + let branch_encoded = numeric_encoder::StrHash::new(&NuriV0::branch_id_from_base64( + base64_id.as_ref().unwrap(), + )); + let mut buffer = Vec::with_capacity(33); + buffer.push(BRANCH_PREFIX); + buffer.extend_from_slice(&branch_encoded.to_be_bytes()); + buffer.extend_from_slice(&overlay_encoded.to_be_bytes()); + Some(buffer) + } else { + let commit_name = + NuriV0::commit_graph_name_from_base64(base64_id.as_ref().unwrap(), overlay_id); + let commit_encoded = numeric_encoder::StrHash::new(&commit_name); + let mut buffer = Vec::with_capacity(17); + buffer.push(COMMIT_PREFIX); + buffer.extend_from_slice(&commit_encoded.to_be_bytes()); + Some(buffer) + } + }; + + self.graph_dataset + .as_ref() + .unwrap() + .ng_transaction( + move |mut transaction: ng_oxigraph::oxigraph::store::Transaction<'_>| -> Result<(), ng_oxigraph::oxigraph::store::StorageError> { + transaction.named_commit_or_branch(ov_graphname.as_ref(), &name, &value) + }, + ) + .map_err(|e| VerifierError::OxigraphError(e.to_string())) + } + + pub(crate) async fn update_discrete( + &mut self, + patch: DiscreteTransaction, + crdt: &BranchCrdt, + branch_id: &BranchId, + commit_id: ObjectId, + commit_info: CommitInfoJs, + ) -> Result<(), VerifierError> { + let new_state = if let Ok(state) = self + .user_storage + .as_ref() + .unwrap() + .branch_get_discrete_state(branch_id) + { + match crdt { + BranchCrdt::Automerge(_) => { + let mut doc = automerge::Automerge::load(&state) + .map_err(|e| VerifierError::AutomergeError(e.to_string()))?; + let _ = doc + .load_incremental(patch.as_slice()) + .map_err(|e| VerifierError::AutomergeError(e.to_string()))?; + doc.save() + } + BranchCrdt::YArray(_) + | BranchCrdt::YMap(_) + | BranchCrdt::YText(_) + | BranchCrdt::YXml(_) => { + let doc = yrs::Doc::new(); + { + let mut txn = doc.transact_mut(); + let update = yrs::Update::decode_v1(&state) + .map_err(|e| VerifierError::YrsError(e.to_string()))?; + txn.apply_update(update); + let update = yrs::Update::decode_v1(patch.as_slice()) + .map_err(|e| VerifierError::YrsError(e.to_string()))?; + txn.apply_update(update); + txn.commit(); + } + let empty_state_vector = yrs::StateVector::default(); + let transac = doc.transact(); + transac.encode_state_as_update_v1(&empty_state_vector) + } + _ => return Err(VerifierError::InvalidBranch), + } + } else { + patch.to_vec() + }; + self.user_storage + .as_ref() + .unwrap() + .branch_set_discrete_state(*branch_id, new_state)?; + + let patch = match (crdt, patch) { + (BranchCrdt::Automerge(_), DiscreteTransaction::Automerge(v)) => { + DiscretePatch::Automerge(v) + } + (BranchCrdt::YArray(_), DiscreteTransaction::YArray(v)) => DiscretePatch::YArray(v), + (BranchCrdt::YMap(_), DiscreteTransaction::YMap(v)) => DiscretePatch::YMap(v), + (BranchCrdt::YText(_), DiscreteTransaction::YText(v)) => DiscretePatch::YText(v), + (BranchCrdt::YXml(_), DiscreteTransaction::YXml(v)) => DiscretePatch::YXml(v), + _ => { + //log_debug!("{:?} {:?}", crdt, patch); + return Err(VerifierError::InvalidCommit); + } + }; + self.push_app_response( + branch_id, + AppResponse::V0(AppResponseV0::Patch(AppPatch { + commit_id: commit_id.to_string(), + commit_info: commit_info, + graph: None, + discrete: Some(patch), + other: None, + })), + ) + .await; + Ok(()) + } + + pub(crate) fn get_triples_from_transaction(commit_body: &CommitBody) -> Result, VerifierError> { + match commit_body { + CommitBody::V0(CommitBodyV0::AsyncTransaction(Transaction::V0(v0))) => { + let transac: TransactionBody = serde_bare::from_slice(v0)?; + if let Some(graph_transac) = transac.graph { + return Ok(graph_transac.inserts); + } + }, + _ => {} + } + Err(VerifierError::InvalidCommit) + } + + pub(crate) async fn verify_async_transaction( + &mut self, + transaction: &Transaction, + commit: &Commit, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + let Transaction::V0(v0) = transaction; + let mut body: TransactionBody = serde_bare::from_slice(&v0)?; + + let repo = self.get_repo(repo_id, store.get_store_repo())?; + + let branch = repo.branch(branch_id)?; + let commit_id = commit.id().unwrap(); + let commit_info: CommitInfoJs = (&commit.as_info(repo)).into(); + + if body.graph.is_some() { + let mut transaction = body.graph.take().unwrap(); + transaction.tokenize_with_commit_id(commit_id, repo_id); + let info = BranchUpdateInfo { + branch_id: *branch_id, + branch_type: branch.branch_type.clone(), + repo_id: *repo_id, + topic_id: branch.topic.clone().unwrap(), + token: branch.read_cap.as_ref().unwrap().tokenize(), + overlay_id: store.overlay_id, + previous_heads: commit.direct_causal_past_ids(), + commit_id, + transaction, + commit_info, + }; + self.update_graph(vec![info]).await?; + } else + //TODO: change the logic here. transaction commits can have both a discrete and graph update. Only one AppResponse should be sent in this case, containing both updates. + if body.discrete.is_some() { + let patch = body.discrete.unwrap(); + let crdt = &repo.branch(branch_id)?.crdt.clone(); + self.update_discrete(patch, &crdt, branch_id, commit_id, commit_info) + .await?; + } + + Ok(()) + } + + // pub(crate) fn find_branch_and_repo_for_nuri( + // &self, + // nuri: &NuriV0, + // ) -> Result<(RepoId, BranchId, StoreRepo), VerifierError> { + // if !nuri.is_branch_identifier() { + // return Err(VerifierError::InvalidNuri); + // } + // let store = self.get_store_by_overlay_id(&OverlayId::Outer( + // nuri.overlay.as_ref().unwrap().outer().to_slice(), + // ))?; + // let repo = self.get_repo(nuri.target.repo_id(), store.get_store_repo())?; + // Ok(( + // match nuri.branch { + // None => { + // let b = repo.main_branch().ok_or(VerifierError::BranchNotFound)?; + // if b.topic_priv_key.is_none() { + // return Err(VerifierError::PermissionDenied); + // } + // b.id + // } + // Some(TargetBranchV0::BranchId(id)) => { + // let b = repo.branch(&id)?; + // //TODO: deal with named branch that is also the main branch + // if b.topic_priv_key.is_none() { + // return Err(VerifierError::PermissionDenied); + // } + // id + // } + // // TODO: implement TargetBranchV0::Named + // _ => unimplemented!(), + // }, + // repo.id, + // store.get_store_repo().clone(), + // )) + // } + + fn find_branch_and_repo_for_quad( + &self, + quad: &Quad, + branches: &mut HashMap< + BranchId, + (StoreRepo, RepoId, BranchType, TopicId, Digest, OverlayId), + >, + nuri_branches: &mut HashMap, + ) -> Result<(RepoId, BranchId, bool), VerifierError> { + match &quad.graph_name { + GraphName::NamedNode(named_node) => { + let graph_name = named_node.as_string(); + //log_debug!("graph_name {graph_name}"); + if let Some(branch_found) = nuri_branches.get(graph_name) { + return Ok(branch_found.clone()); + } + let nuri = NuriV0::new_from(graph_name)?; + if !nuri.is_branch_identifier() { + return Err(VerifierError::InvalidNamedGraph); + } + let store = self.get_store_by_overlay_id(&OverlayId::Outer( + nuri.overlay.unwrap().outer().to_slice(), + ))?; + let repo = self.get_repo(nuri.target.repo_id(), store.get_store_repo())?; + let (branch_id, is_publisher, branch_type, topic_id, token) = match nuri.branch { + None => { + let b = repo.main_branch().ok_or(VerifierError::BranchNotFound)?; + ( + b.id, + b.topic_priv_key.is_some(), + b.branch_type.clone(), + b.topic.clone().unwrap(), + b.read_cap.as_ref().unwrap().tokenize(), + ) + } + Some(TargetBranchV0::BranchId(id)) => { + let b = repo.branch(&id)?; + //TODO: deal with named branch that is also the main branch + ( + id, + b.topic_priv_key.is_some(), + b.branch_type.clone(), + b.topic.clone().unwrap(), + b.read_cap.as_ref().unwrap().tokenize(), + ) + } + // TODO: implement TargetBranchV0::Named + _ => unimplemented!(), + }; + let _ = branches.entry(branch_id).or_insert(( + store.get_store_repo().clone(), + repo.id, + branch_type, + topic_id, + token, + store.overlay_id, + )); + let _ = nuri_branches.entry(graph_name.clone()).or_insert(( + repo.id, + branch_id, + is_publisher, + )); + Ok((repo.id, branch_id, is_publisher)) + } + _ => Err(VerifierError::InvalidNamedGraph), + } + } + + pub(crate) async fn prepare_sparql_update( + &mut self, + inserts: Vec, + removes: Vec, + peer_id: Vec, + ) -> Result, VerifierError> { + // options when not a publisher on the repo: + // - skip + // - TODO: abort (the whole transaction) + // - TODO: inbox (sent to inbox of document for a suggested update) + // for now we just do skip, without giving option to user + let mut inserts_map: HashMap> = HashMap::with_capacity(1); + let mut removes_map: HashMap> = HashMap::with_capacity(1); + let mut branches: HashMap< + BranchId, + (StoreRepo, RepoId, BranchType, TopicId, Digest, OverlayId), + > = HashMap::with_capacity(1); + let mut nuri_branches: HashMap = + HashMap::with_capacity(1); + let mut inserts_len = inserts.len(); + let mut removes_len = removes.len(); + for mut insert in inserts { + let (repo_id, branch_id, is_publisher) = + self.find_branch_and_repo_for_quad(&insert, &mut branches, &mut nuri_branches)?; + if !is_publisher { + continue; + } + let set = inserts_map.entry(branch_id).or_insert_with(|| { + let set = HashSet::with_capacity(inserts_len); + inserts_len = 1; + set + }); + + // changing blank node to skolemized node + + //log_debug!("INSERTING BN {}", quad); + if insert.subject.is_blank_node() { + //log_debug!("INSERTING SUBJECT BN {}", insert.subject); + if let Subject::BlankNode(b) = &insert.subject { + let iri = + NuriV0::repo_skolem(&repo_id, &peer_id, b.as_ref().unique_id().unwrap())?; + insert.subject = Subject::NamedNode(NamedNode::new_unchecked(iri)); + } + } + if insert.object.is_blank_node() { + //log_debug!("INSERTING OBJECT BN {}", insert.object); + if let Term::BlankNode(b) = &insert.object { + let iri = + NuriV0::repo_skolem(&repo_id, &peer_id, b.as_ref().unique_id().unwrap())?; + insert.object = Term::NamedNode(NamedNode::new_unchecked(iri)); + } + } + // TODO deal with triples in subject and object (RDF-STAR) + + set.insert(insert.into()); + } + for remove in removes { + let (repo_id, branch_id, is_publisher) = + self.find_branch_and_repo_for_quad(&remove, &mut branches, &mut nuri_branches)?; + if !is_publisher { + continue; + } + let set = removes_map.entry(branch_id).or_insert_with(|| { + let set = HashSet::with_capacity(removes_len); + removes_len = 1; + set + }); + set.insert(remove.into()); + } + + let mut updates = Vec::with_capacity(branches.len()); + + for (branch_id, (store_repo, repo_id, branch_type, topic_id, token, overlay_id)) in branches + { + let graph_transac = GraphTransaction { + inserts: Vec::from_iter(inserts_map.remove(&branch_id).unwrap_or(HashSet::new())), + removes: Vec::from_iter(removes_map.remove(&branch_id).unwrap_or(HashSet::new())), + }; + + let mut transac = TransactionBody { + body_type: TransactionBodyType::Graph, + graph: Some(graph_transac), + discrete: None, + }; + + let transaction_commit_body = + CommitBodyV0::AsyncTransaction(Transaction::V0(serde_bare::to_vec(&transac)?)); + + let commit = self + .new_transaction_commit( + transaction_commit_body, + &repo_id, + &branch_id, + &store_repo, + vec![], //TODO deps + vec![], + ) + .await?; + + let repo = self.get_repo(&repo_id, &store_repo)?; + let commit_info: CommitInfoJs = (&commit.as_info(repo)).into(); + + let mut graph_update = transac.graph.take().unwrap(); + graph_update.tokenize_with_commit_id(commit.id().unwrap(), &repo_id); + + let info = BranchUpdateInfo { + branch_id, + branch_type, + repo_id, + topic_id, + token, + overlay_id, + previous_heads: commit.direct_causal_past_ids(), + commit_id: commit.id().unwrap(), + transaction: graph_update, + commit_info, + }; + updates.push(info); + } + self.update_graph(updates).await + } + + async fn update_graph( + &mut self, + mut updates: Vec, + ) -> Result, VerifierError> { + let updates_ref = &mut updates; + let res = self + .graph_dataset + .as_ref() + .unwrap() + .ng_transaction( + move |mut transaction| -> Result<(), ng_oxigraph::oxigraph::store::StorageError> { + let reader = transaction.ng_get_reader(); + + for update in updates_ref.iter_mut() { + let branch_is_main = update.branch_type.is_main(); + + let commit_name = + NuriV0::commit_graph_name(&update.commit_id, &update.overlay_id); + let commit_encoded = numeric_encoder::StrHash::new(&commit_name); + let cv_graphname = NamedNode::new_unchecked(commit_name); + let cv_graphname_ref = GraphNameRef::NamedNode((&cv_graphname).into()); + let ov_main = if branch_is_main { + let ov_graphname = NamedNode::new_unchecked(NuriV0::repo_graph_name( + &update.repo_id, + &update.overlay_id, + )); + Some(ov_graphname) + } else { + None + }; + let value = if branch_is_main { + ADDED_IN_MAIN + } else { + ADDED_IN_OTHER + }; + for triple in update.transaction.inserts.iter() { + let triple_ref: TripleRef = triple.into(); + let quad_ref = triple_ref.in_graph(cv_graphname_ref); + transaction.insert(quad_ref, value, true)?; + if let Some(ov_graphname) = ov_main.as_ref() { + let ov_graphname_ref = GraphNameRef::NamedNode(ov_graphname.into()); + let triple_ref: TripleRef = triple.into(); + let quad_ref = triple_ref.in_graph(ov_graphname_ref); + transaction.insert(quad_ref, REPO_IN_MAIN, false)?; + } + } + + let topic_encoded = + numeric_encoder::StrHash::new(&NuriV0::topic_id(&update.topic_id)); + let overlay_encoded = + numeric_encoder::StrHash::new(&NuriV0::overlay_id(&update.overlay_id)); + + let branch_encoded = + numeric_encoder::StrHash::new(&NuriV0::branch_id(&update.branch_id)); + let token_encoded = + numeric_encoder::StrHash::new(&NuriV0::token(&update.token)); + + transaction.update_branch_and_token( + &overlay_encoded, + &branch_encoded, + &topic_encoded, + &token_encoded, + )?; + + let direct_causal_past_encoded: HashSet = + HashSet::from_iter(update.previous_heads.iter().map(|commit_id| { + numeric_encoder::StrHash::new(&NuriV0::commit_graph_name( + commit_id, + &update.overlay_id, + )) + })); + + let current_heads = + reader.ng_get_heads(&topic_encoded, &overlay_encoded)?; + + transaction.update_heads( + &topic_encoded, + &overlay_encoded, + &commit_encoded, + &direct_causal_past_encoded, + )?; + + if !direct_causal_past_encoded.is_empty() { + // adding past + transaction.update_past( + &commit_encoded, + &direct_causal_past_encoded, + false, + )?; + } + + if !update.transaction.removes.is_empty() { + if current_heads.is_empty() { + return Err(ng_oxigraph::oxigraph::store::StorageError::Other( + Box::new(VerifierError::CannotRemoveTriplesWhenNewBranch), + )); + } + + let at_current_heads = current_heads == direct_causal_past_encoded; + // if not, we need to base ourselves on the materialized state of the direct_causal_past of the commit + let value = if branch_is_main { + REMOVED_IN_MAIN + } else { + REMOVED_IN_OTHER + }; + let mut to_remove_from_removes: HashSet = HashSet::new(); + for (pos, remove) in update.transaction.removes.iter().enumerate() { + let encoded_subject = remove.subject.as_ref().into(); + let encoded_predicate = remove.predicate.as_ref().into(); + let encoded_object = remove.object.as_ref().into(); + let observed_adds = reader + .quads_for_subject_predicate_object_heads( + &encoded_subject, + &encoded_predicate, + &encoded_object, + &direct_causal_past_encoded, + at_current_heads, + )?; + + for removing in observed_adds { + let graph_encoded = EncodedTerm::NamedNode { iri_id: removing }; + let quad_encoded = EncodedQuad::new( + encoded_subject.clone(), + encoded_predicate.clone(), + encoded_object.clone(), + graph_encoded, + ); + transaction.insert_encoded(&quad_encoded, value, true)?; + transaction.ng_remove(&quad_encoded, &commit_encoded)?; + } + if let Some(ov_graphname) = ov_main.as_ref() { + let should_remove_ov_triples = at_current_heads || { + reader + .quads_for_subject_predicate_object_heads( + &encoded_subject, + &encoded_predicate, + &encoded_object, + ¤t_heads, + true, + )? + .is_empty() + }; + if should_remove_ov_triples { + let ov_graphname_ref = + GraphNameRef::NamedNode(ov_graphname.into()); + let triple_ref: TripleRef = remove.into(); + let quad_ref = triple_ref.in_graph(ov_graphname_ref); + transaction.remove(quad_ref)?; + } else { + to_remove_from_removes.insert(pos); + } + } + } + let mut idx: usize = 0; + update.transaction.removes.retain(|_| { + let retain = !to_remove_from_removes.remove(&idx); + idx += 1; + retain + }); + } + } + Ok(()) + }, + ) + .map_err(|e| VerifierError::OxigraphError(e.to_string())); + match res { + Ok(()) => { + let mut commit_nuris = Vec::with_capacity(updates.len()); + for update in updates { + if update.branch_type.is_header() { + let mut tab_doc_info = AppTabDocInfo::new(); + for removed in update.transaction.removes { + match removed.predicate.as_str() { + NG_ONTOLOGY_ABOUT => tab_doc_info.description = Some("".to_string()), + NG_ONTOLOGY_TITLE => tab_doc_info.title = Some("".to_string()), + _ => {} + } + } + for inserted in update.transaction.inserts { + match inserted.predicate.as_str() { + NG_ONTOLOGY_ABOUT => { + if let Term::Literal(l) = inserted.object { + tab_doc_info.description = Some(l.value().to_string()) + } + } + NG_ONTOLOGY_TITLE => { + if let Term::Literal(l) = inserted.object { + tab_doc_info.title = Some(l.value().to_string()) + } + } + _ => {} + } + } + self.push_app_response( + &update.branch_id, + AppResponse::V0(AppResponseV0::TabInfo(AppTabInfo { + branch: None, + doc: Some(tab_doc_info), + store: None, + })), + ) + .await; + } else { + let graph_patch = update.transaction.as_patch(); + commit_nuris.push(NuriV0::commit(&update.repo_id, &update.commit_id)); + self.push_app_response( + &update.branch_id, + AppResponse::V0(AppResponseV0::Patch(AppPatch { + commit_id: update.commit_id.to_string(), + commit_info: update.commit_info, + graph: Some(graph_patch), + discrete: None, + other: None, + })), + ) + .await; + } + } + Ok(commit_nuris) + }, + Err(e) => Err(e) + } + } + + pub(crate) async fn process_sparql_update( + &mut self, + nuri: &NuriV0, + query: &String, + base: &Option, + peer_id: Vec, + ) -> Result, String> { + let store = self.graph_dataset.as_ref().unwrap(); + + let update = ng_oxigraph::oxigraph::sparql::Update::parse(query, base.as_deref()) + .map_err(|e| e.to_string())?; + + let res = store.ng_update( + update, + self.resolve_target_for_sparql(&nuri.target, true) + .map_err(|e| e.to_string())?, + ); + match res { + Err(e) => Err(e.to_string()), + Ok((inserts, removes)) => { + if inserts.is_empty() && removes.is_empty() { + Ok(vec![]) + } else { + self.prepare_sparql_update( + Vec::from_iter(inserts), + Vec::from_iter(removes), + peer_id, + ) + .await + .map_err(|e| e.to_string()) + } + } + } + } +} + +#[cfg(test)] +mod test { + + use super::{TransactionBody, TransactionBodyType}; + use ng_repo::log::*; + use serde_bare::to_vec; + + #[test] + pub fn test_transaction_body() { + let body = TransactionBody { + body_type: TransactionBodyType::Graph, + graph: None, + discrete: None, + }; + let ser = to_vec(&body).unwrap(); + + log_debug!("graph {:?}", ser); + + let body = TransactionBody { + body_type: TransactionBodyType::Discrete, + graph: None, + discrete: None, + }; + let ser = to_vec(&body).unwrap(); + + log_debug!("discrete {:?}", ser); + + let body = TransactionBody { + body_type: TransactionBodyType::Both, + graph: None, + discrete: None, + }; + let ser = to_vec(&body).unwrap(); + + log_debug!("both {:?}", ser); + } +} diff --git a/ng-verifier/src/inbox_processor.rs b/ng-verifier/src/inbox_processor.rs new file mode 100644 index 0000000..287bc58 --- /dev/null +++ b/ng-verifier/src/inbox_processor.rs @@ -0,0 +1,681 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Processor for each type of InboxMsgContent + +use std::sync::Arc; + +use base64_url::base64::read; +use ng_net::actor::SoS; +use ng_net::broker::BROKER; +use ng_oxigraph::oxigraph::sparql::QueryResults; +use ng_oxigraph::oxrdf::{NamedNode, Term, Triple}; +use ng_oxigraph::oxsdatatypes::DateTime; +use ng_repo::types::{Block, ObjectRef, OverlayId, PrivKey, ReadCap, RepoId, StoreRepo, StoreRepoV0}; +use ng_repo::{errors::*, store::Store, types::Commit}; +use ng_repo::log::*; + +use ng_net::types::{InboxMsg, InboxMsgContent, InboxPost, SocialQuery, SocialQueryResponse, SocialQueryResponseContent}; +use ng_net::app_protocol::*; + +use crate::verifier::*; + +impl Verifier { + + pub(crate) async fn post_to_inbox(&self, post: InboxPost) -> Result<(), VerifierError> { + //log_info!("post_to_inbox {:?}",post); + let res = match self.client_request::<_,()>(post).await + { + Err(e) => Err(VerifierError::InboxError(e.to_string())), + Ok(SoS::Stream(_)) => Err(VerifierError::InboxError(NgError::InvalidResponse.to_string())), + Ok(SoS::Single(_)) => Ok(()), + }; + //log_info!("res {:?}",res); + res + } + + pub(crate) async fn create_social_query_forwarder( + &mut self, + social_query_doc_nuri_string: &String, + from_forwarder_nuri_string: &String, + from_profile_nuri_string: &String, + from_inbox_nuri_string: &String, + ) -> Result<(String, NuriV0), VerifierError> { + // creating the ForwardedSocialQuery in the private store + let forwarder = self.doc_create_with_store_repo( + "Graph".to_string(), "social:query:forwarded".to_string(), + "store".to_string(), None // meaning in private store + ).await?; + let forwarder_nuri = NuriV0::new_from_repo_graph(&forwarder)?; + let forwarder_id = forwarder_nuri.target.repo_id().clone(); + let forwarder_nuri_string = NuriV0::repo_id(&forwarder_id); + + // adding triples in forwarder doc : ng:social_query_id + let sparql_update = format!(" PREFIX ng: + PREFIX xsd: + INSERT DATA {{ <> ng:social_query_id <{social_query_doc_nuri_string}>. + <> ng:social_query_forwarder <{from_forwarder_nuri_string}>. + <> ng:social_query_from_inbox <{from_inbox_nuri_string}>. + <> ng:social_query_from_profile <{from_profile_nuri_string}>. + <> ng:social_query_started \"{}\"^^xsd:dateTime . }}",DateTime::now()); + let ret = self + .process_sparql_update(&forwarder_nuri, &sparql_update, &Some(forwarder_nuri_string.clone()), vec![]) + .await; + if let Err(e) = ret { + return Err(VerifierError::SparqlError(e)); + } + Ok((forwarder_nuri_string,forwarder_nuri)) + } + + pub(crate) async fn mark_social_query_forwarder(&mut self, forwarder_nuri_string: &String, forwarder_nuri: &NuriV0, predicate: String) -> Result<(), VerifierError> { + + // adding triples in forwarder doc : ng:social_query_id + let sparql_update = format!("INSERT DATA {{ <{forwarder_nuri_string}> \"{}\"^^ . }}",DateTime::now()); + let ret = self + .process_sparql_update(forwarder_nuri, &sparql_update, &None, vec![]) + .await; + if let Err(e) = ret { + return Err(VerifierError::SparqlError(e)); + } + Ok(()) + } + + pub(crate) fn get_privkey_of_inbox(&self, this_overlay: &OverlayId) -> Result { + let store = self.get_store_by_overlay_id(this_overlay)?; + let repo = self.repos.get(&store.id()).ok_or(NgError::RepoNotFound)?; + let from_inbox = repo.inbox.to_owned().ok_or(NgError::InboxNotFound)?; + Ok(from_inbox) + } + + fn get_profile_replying_to(&self, from_profile: &String) -> Result<(OverlayId, PrivKey) ,NgError> { + + let from_profile_id = if from_profile.starts_with("did:ng:b") { + self.config.protected_store_id.unwrap() + } else { + self.config.public_store_id.unwrap() + }; + + let repo = self.repos.get(&from_profile_id).ok_or(NgError::RepoNotFound)?; + let inbox = repo.inbox.to_owned().ok_or(NgError::InboxNotFound)?; + let overlay = repo.store.get_store_repo().outer_overlay(); + + Ok( (overlay, inbox.clone()) ) + } + + pub(crate) fn get_2_profiles(&self) -> Result<( + (StoreRepo, PrivKey), // public + (StoreRepo, PrivKey) // protected + ) ,NgError> { + + let protected_store_id = self.config.protected_store_id.unwrap(); + let protected_repo = self.repos.get(&protected_store_id).ok_or(NgError::RepoNotFound)?; + let protected_inbox = protected_repo.inbox.to_owned().ok_or(NgError::InboxNotFound)?; + let protected_store_repo = protected_repo.store.get_store_repo(); + + let public_store_id = self.config.public_store_id.unwrap(); + let public_repo = self.repos.get(&public_store_id).ok_or(NgError::RepoNotFound)?; + let public_inbox = public_repo.inbox.to_owned().ok_or(NgError::InboxNotFound)?; + let public_store_repo = public_repo.store.get_store_repo(); + + Ok(( + (*public_store_repo, public_inbox.clone()), + (*protected_store_repo, protected_inbox.clone()) + )) + } + + pub(crate) async fn social_query_dispatch( + &mut self, + to_profile_nuri: &String, + to_inbox_nuri: &String, + forwarder_nuri: &NuriV0, + forwarder_id: &RepoId, + from_profiles: &( + (StoreRepo, PrivKey), // public + (StoreRepo, PrivKey) // protected + ), + query_id: &RepoId, + definition_commit_body_ref: &ObjectRef, + blocks: &Vec, + degree: u16, + ) -> Result<(), VerifierError> { + + // first add an entry in the local forwarded social query, to monitor progress + let sparql_update = format!(" + PREFIX ng: + INSERT DATA {{ + ng:social_query_forwarded_to_profile <{to_profile_nuri}> . + ng:social_query_forwarded_to_inbox <{to_inbox_nuri}> . + }}"); + let ret = self + .process_sparql_update(&forwarder_nuri, &sparql_update, &None, vec![]) + .await; + if let Err(e) = ret { + return Err(VerifierError::SparqlError(e)); + } + // then send InboxPost message. + + let from_profile = if to_profile_nuri.starts_with("did:ng:b") { + &from_profiles.1 + } else { + &from_profiles.0 + }; + + self.post_to_inbox(InboxPost::new_social_query_request( + from_profile.0, + from_profile.1.clone(), + *forwarder_id, + to_profile_nuri.clone(), + to_inbox_nuri.clone(), + None, + *query_id, + definition_commit_body_ref.clone(), + blocks.to_vec(), + degree, + )?).await?; + + Ok(()) + } + + pub(crate) async fn process_inbox( + &mut self, + msg: &InboxMsg, + content: InboxMsgContent, + ) -> Result<(), VerifierError> { + + match content { + InboxMsgContent::SocialQuery(SocialQuery::Request(req)) => { + + let profile_id_nuri = NuriV0::from_store_repo_string(&req.from_profile_store_repo); + + //TODO: check that msg.body.from_overlay matches with req.from_profile_store_repo + + //TODO: check that this contact is mutual req.from_profile_store_repo must be in our contact list + + // getting the privkey of the inbox because we will need it here below to send responses. + let reply_with_inbox = self.get_privkey_of_inbox(&msg.body.to_overlay)?; + + let social_query_doc_nuri_string: String = NuriV0::repo_id(&req.query_id); + + // checking that we didn't process this query ID yet. if we did, return a SocialQueryResponseContent::AlreadyRequested + match self.sparql_query( + &NuriV0::new_entire_user_site(), + format!("ASK {{ ?s <{social_query_doc_nuri_string}> }}"), None).await? + { + QueryResults::Boolean(true) => { + let post = InboxPost::new_social_query_response_replying_to( + &msg.body, + &req, + SocialQueryResponseContent::AlreadyRequested, + reply_with_inbox.clone() + )?; + self.post_to_inbox(post).await?; + return Ok(()); + } + _ => {} + } + + // otherwise, create the forwarder + let (forwarder_nuri_string, forwarder_nuri) = self.create_social_query_forwarder( + &social_query_doc_nuri_string, + &NuriV0::repo_id(&req.forwarder_id), + &NuriV0::from_store_repo_string(&req.from_profile_store_repo), + &NuriV0::inbox(&msg.body.from_inbox.unwrap()) + ).await?; + + let temp_mini_block_storage = Store::new_temp_in_mem(); + for block in msg.blocks.iter() { + let _id = temp_mini_block_storage.put(block)?; + } + let commit = Commit::load(req.definition_commit_body_ref.clone(), + &temp_mini_block_storage, true) + .map_err(|e| { + //log_err!("err : {:?}", e); + e + })?; + + let triples = Verifier::get_triples_from_transaction(commit.body().unwrap())?; + + let mut sparql: Option = None; + for triple in triples { + if triple.predicate.as_str() == "did:ng:x:ng#social_query_sparql" { + sparql = Some( + match triple.object { + Term::Literal(l) => l.value().into(), + _ => return Err(VerifierError::InvalidSocialQuery) + }); + break; + } + } + //TODO: in case of errors here below, mark the forwarder as ng:social_query_error + if sparql.is_none() { return Err(VerifierError::InvalidSocialQuery); } + + //log_info!("{}",sparql.as_ref().unwrap()); + + let res = self.sparql_query(&NuriV0::new_entire_user_site(), sparql.unwrap(), None).await?; + + let results = match res { + QueryResults::Boolean(_) | QueryResults::Solutions(_) => return Err(VerifierError::NotImplemented), + QueryResults::Graph(triples) => { + let mut results = vec![]; + for t in triples { + match t { + Err(e) => { log_err!("{}",e.to_string()); return Err(VerifierError::SparqlError(e.to_string()))}, + Ok(triple) => results.push(triple), + } + } + results + } + }; + + //log_info!("{:?}",results); + + // Do we have local results matching the request's query? If yes, we send them back to the forwarder right away + if !results.is_empty() { + let content = SocialQueryResponseContent::Graph(serde_bare::to_vec(&results).unwrap()); + let post = InboxPost::new_social_query_response_replying_to( + &msg.body, + &req, + content, + reply_with_inbox.clone() + )?; + self.post_to_inbox(post).await?; + } + + // only fan out if we have contacts (that match the grant selected by current user) + // and if degree is > to 1 or equal to zero + if req.degree == 1 { + + // ending here. + self.mark_social_query_forwarder(&forwarder_nuri_string, &forwarder_nuri, "social_query_ended".to_string()).await?; + let post = InboxPost::new_social_query_response_replying_to( + &msg.body, + &req, + SocialQueryResponseContent::EndOfReplies, + reply_with_inbox.clone() + )?; + self.post_to_inbox(post).await?; + + return Ok(()) + } + // fan out forwarded social queries to all contacts (except the one we received it from) + + // getting the contacts to forward to + let sparql = format!("PREFIX ng: + PREFIX vcard: + SELECT ?profile_id ?inbox_id WHERE + {{ ?c a vcard:Individual . + OPTIONAL {{ ?c ng:site ?profile_id . ?c ng:site_inbox ?inbox_id }} + OPTIONAL {{ ?c ng:protected ?profile_id . ?c ng:protected_inbox ?inbox_id }} + FILTER ( bound(?profile_id) && NOT EXISTS {{ ?c ng:site <{profile_id_nuri}> }} && NOT EXISTS {{ ?c ng:protected <{profile_id_nuri}> }} ) + }}"); + //log_info!("{sparql}"); + let sols = match self.sparql_query( + &NuriV0::new_entire_user_site(), + sparql, None).await? + { + QueryResults::Solutions(sols) => { sols } + _ => return Err(VerifierError::SparqlError(NgError::InvalidResponse.to_string())), + }; + + let degree = if req.degree == 0 { 0 } else { req.degree - 1 }; + //log_info!("new degree {degree}"); + let mut found_contact = false; + let forwarder_id = forwarder_nuri.target.repo_id().clone(); + + let from_profiles = self.get_2_profiles()?; + + for sol in sols { + match sol { + Err(e) => return Err(VerifierError::SparqlError(e.to_string())), + Ok(s) => { + if let Some(Term::NamedNode(profile_id)) = s.get("profile_id") { + let to_profile_nuri = profile_id.as_string(); + if let Some(Term::NamedNode(inbox_id)) = s.get("inbox_id") { + let to_inbox_nuri = inbox_id.as_string(); + + found_contact = true; + + self.social_query_dispatch( + to_profile_nuri, + to_inbox_nuri, + &forwarder_nuri, + &forwarder_id, + &from_profiles, + &req.query_id, + &req.definition_commit_body_ref, + &msg.blocks, + degree + ).await?; + } + } + } + } + } + // if not found any contact, we stop here + //log_info!("found contact {found_contact}"); + if !found_contact { + self.mark_social_query_forwarder(&forwarder_nuri_string, &forwarder_nuri, "social_query_ended".to_string()).await?; + let post = InboxPost::new_social_query_response_replying_to( + &msg.body, + &req, + SocialQueryResponseContent::EndOfReplies, + reply_with_inbox + )?; + self.post_to_inbox(post).await?; + } + + } + InboxMsgContent::SocialQuery(SocialQuery::Response(response)) => { + + if msg.body.from_inbox.is_none() { + // TODO log error + // we do nothing as this is invalid msg. it must have a from. + return Err(VerifierError::InvalidSocialQuery) + } + + let forwarder_nuri = NuriV0::new_repo_target_from_id(&response.forwarder_id); + + //first we open the response.forwarder_id (because in webapp, it might not be loaded yet) + { + let broker = BROKER.read().await; + let user = Some(self.user_id().clone()); + //let remote = (&self.connected_broker).into(); + + let (user_branch_id, private_store_id) = { + let private_store = self + .repos + .get(self.private_store_id()) + .ok_or(NgError::StoreNotFound)?; + + (private_store.user_branch().unwrap().id, private_store.id) + }; + + // if self.repos.get(&response.forwarder_id).is_none() { + + // // we need to load the forwarder + // self.load_repo_from_read_cap( + // &response.forwarder_readcap, + // &broker, + // &user, + // &remote, + // Arc::clone(&private_store.store), + // true, + // ) + // .await?; + // self.open_for_target(&forwarder_nuri.target, false).await?; + // } + + self.open_branch_(&private_store_id, &user_branch_id, + false, &broker, &user, &self.connected_broker.clone(), true ).await?; + + let main_branch_id = { + self.repos.get(&response.forwarder_id).unwrap().main_branch().unwrap().id + }; + + self.open_branch_(&response.forwarder_id, &main_branch_id, + false, &broker, &user, &self.connected_broker.clone(), true ).await?; + } + + let forwarder_nuri_string = NuriV0::repo_id(&response.forwarder_id); + // checking that we do have a running ForwardedSocialQuery, and that it didnt end, otherwise it must be spam. + match self.sparql_query( &forwarder_nuri, format!("ASK {{ <> <{}> }} ", + NuriV0::repo_id(&response.query_id)),Some(forwarder_nuri_string.clone())).await? { + QueryResults::Boolean(true) => {} + _ => { return Err(VerifierError::InvalidSocialQuery) } + } + let (forwarded_from_profile, forwarded_from_inbox, from_forwarder) = match self.sparql_query( + &forwarder_nuri, + "PREFIX ng: + SELECT ?from_profile ?from_inbox ?from_forwarder ?ended WHERE + {{ OPTIONAL {{ <> ng:social_query_from_profile ?from_profile . }} + OPTIONAL {{ <> ng:social_query_from_inbox ?from_inbox .}} + OPTIONAL {{ <> ng:social_query_forwarder ?from_forwarder .}} + OPTIONAL {{ <> ng:social_query_ended ?ended . }} + }}".to_string(), + Some(forwarder_nuri_string)).await? + { + QueryResults::Solutions(mut sols) => { + match sols.next() { + None => { + //log_info!("at origin and not ended"); + (None, None, None) + } + Some(Err(e)) => { + // TODO log error + // we do nothing as we couldn't find the ForwardedSocialQuery + return Err(VerifierError::SparqlError(e.to_string())); + } + Some(Ok(sol)) => { + if let Some(Term::NamedNode(_)) = sol.get("ended") { + // TODO log error : someone is giving back some results while the forwarder is ended + return Ok(()) + }; + let from_profile = if let Some(Term::NamedNode(nuri)) = sol.get("from_profile") { + Some(nuri.as_string().clone()) + } else { + None + }; + let from_inbox = if let Some(Term::NamedNode(nuri)) = sol.get("from_inbox") { + Some(nuri.as_string().clone()) + } else { + None + }; + let from_forwarder = if let Some(Term::NamedNode(nuri)) = sol.get("from_forwarder") { + Some(nuri.as_string().clone()) + } else { + None + }; + + (from_profile, from_inbox, from_forwarder) + } + } + } + _ => return Err(VerifierError::SparqlError(NgError::InvalidResponse.to_string())), + }; + + // searching for the tokenized commit that added this forwarding. + let spar = format!("PREFIX ng: + SELECT ?token WHERE + {{ ?token ng:social_query_forwarded_to_inbox <{}> . + MINUS {{ ?token ng:social_query_ended ?t . }} . + }}", + NuriV0::inbox(&msg.body.from_inbox.unwrap()) + ); + //log_info!("{spar}"); + let token = match self.sparql_query( + &forwarder_nuri, + //<> ng:social_query_id <{}> NuriV0::inbox(&msg.body.from_inbox.unwrap()), + spar, + Some(NuriV0::repo_id(&response.forwarder_id))).await? + { + QueryResults::Solutions(mut sols) => { + match sols.next() { + None => { return Err(VerifierError::SparqlError("Token not found".to_string())); } + Some(Err(e)) => { + // TODO log error + // we do nothing as we couldn't find the token + return Err(VerifierError::SparqlError(e.to_string())); + } + Some(Ok(sol)) => { + if let Some(Term::NamedNode(token)) = sol.get("token") { + token.as_string().clone() + } else { + // TODO log error + // we do nothing as we couldn't find the token + return Err(VerifierError::SparqlError(NgError::InvalidResponse.to_string())); + } + } + } + } + _ => return Err(VerifierError::SparqlError(NgError::InvalidResponse.to_string())), + }; + //log_info!("token = {token}"); + + let at_origin = forwarded_from_profile.is_none() || forwarded_from_inbox.is_none() || from_forwarder.is_none(); + + match response.content { + SocialQueryResponseContent::AlreadyRequested + | SocialQueryResponseContent::EndOfReplies + | SocialQueryResponseContent::Error(_) => { + // ending here this forwarding. + self.mark_social_query_forwarder(&token, &forwarder_nuri, "social_query_ended".to_string()).await?; + // TODO record error + + // if we are at the end of the whole ForwardedSocialQuery (no more pending responses) + // we send EndOfReplies upstream, and mark as ended. + + let the_end = match self.sparql_query( + &forwarder_nuri, + format!("PREFIX ng: + SELECT ?token WHERE + {{ ?token ng:social_query_forwarded_to_profile ?p . + MINUS {{ ?token ng:social_query_ended ?t . }} + }}"), + None).await? + { + QueryResults::Solutions(mut sols) => { + match sols.next() { + None => true, + _ => false, + } + } + _ => { + // TODO: log error + false + } + }; + if the_end { + // marking the end + self.mark_social_query_forwarder(&NuriV0::repo_id(&response.forwarder_id), &forwarder_nuri, "social_query_ended".to_string()).await?; + + if !at_origin { + // getting the privkey of the inbox because we will need it here below to send responses. + let from = self.get_profile_replying_to(forwarded_from_profile.as_ref().unwrap())?; + + // sending EndOfReplies upstream + let to_overlay = NuriV0::from_profile_into_overlay_id(forwarded_from_profile.as_ref().unwrap())?; + let to_inbox_id = NuriV0::from_inbox_into_id(forwarded_from_inbox.as_ref().unwrap())?; + let from_forwarder = NuriV0::from_repo_nuri_to_id(from_forwarder.as_ref().unwrap())?; + let post = InboxPost::new_social_query_response( + to_overlay, + to_inbox_id, + Some(from), + response.query_id, + from_forwarder, + SocialQueryResponseContent::EndOfReplies + )?; + self.post_to_inbox(post).await?; + } + } + } + SocialQueryResponseContent::Graph(graph) => { + + if at_origin { + + // insert the triples in the query document + let triples: Vec = serde_bare::from_slice(&graph)?; + + if triples.is_empty() { + return Err(VerifierError::InvalidResponse); + } + + // for t in triples.iter() { + // log_info!("{}",t.to_string()); + // } + + let overlay_id = self.repos.get(&response.query_id).ok_or(VerifierError::RepoNotFound)?.store.outer_overlay(); + let nuri_ov = NuriV0::repo_graph_name(&response.query_id, &overlay_id); + let graph_name = NamedNode::new_unchecked(&nuri_ov); + let quads = triples.into_iter().map(|t| t.in_graph(graph_name.clone()) ).collect(); + let commits = self.prepare_sparql_update(quads, vec![], self.get_peer_id_for_skolem()).await?; + + } else { + + // we forward upstream + + // getting the privkey of the inbox because we will need it here below to send responses. + let from = self.get_profile_replying_to(forwarded_from_profile.as_ref().unwrap())?; + + let to_overlay = NuriV0::from_profile_into_overlay_id(forwarded_from_profile.as_ref().unwrap())?; + let to_inbox_id = NuriV0::from_inbox_into_id(forwarded_from_inbox.as_ref().unwrap())?; + let from_forwarder = NuriV0::from_repo_nuri_to_id(from_forwarder.as_ref().unwrap())?; + let post = InboxPost::new_social_query_response( + to_overlay, + to_inbox_id, + Some(from), + response.query_id, + from_forwarder, + SocialQueryResponseContent::Graph(graph) + )?; + self.post_to_inbox(post).await?; + } + + } + SocialQueryResponseContent::QueryResult(_) | SocialQueryResponseContent::False | SocialQueryResponseContent::True => { + // not implemented yet + return Err(VerifierError::NotImplemented) + } + } + + } + InboxMsgContent::ContactDetails(details) => { + if msg.body.from_inbox.is_none() { + // TODO log error + // we do nothing as this is invalid msg. it must have a from. + return Err(VerifierError::InvalidInboxPost); + } + + let inbox_nuri_string: String = NuriV0::inbox(&msg.body.from_inbox.unwrap()); + let profile_nuri_string: String = NuriV0::from_store_repo_string(&details.profile); + let a_or_b = if details.profile.is_public() { "site" } else { "protected" }; + + // checking if this contact has already been added + match self.sparql_query( + &NuriV0::new_entire_user_site(), + format!("ASK {{ ?s <{inbox_nuri_string}> . ?s <{profile_nuri_string}> }}"), None).await? + { + QueryResults::Boolean(true) => { + return Err(VerifierError::ContactAlreadyExists); + } + _ => {} + } + + let contact = self.doc_create_with_store_repo( + "Graph".to_string(), "social:contact".to_string(), + "store".to_string(), None // meaning in private store + ).await?; + let contact_nuri = NuriV0::new_from_repo_graph(&contact)?; + let contact_id = contact_nuri.target.repo_id().clone(); + let contact_nuri_string = NuriV0::repo_id(&contact_id); + let has_email = details.email.map_or("".to_string(), |email| format!("<> vcard:hasEmail \"{email}\".")); + + // adding triples in contact doc + let sparql_update = format!(" PREFIX ng: + PREFIX vcard: + INSERT DATA {{ <> ng:{a_or_b} <{profile_nuri_string}>. + <> ng:{a_or_b}_inbox <{inbox_nuri_string}>. + <> a vcard:Individual . + <> vcard:fn \"{}\". + {has_email} }}", details.name); + + let ret = self + .process_sparql_update(&contact_nuri, &sparql_update, &Some(contact_nuri_string), vec![]) + .await; + if let Err(e) = ret { + return Err(VerifierError::SparqlError(e)); + } + + self.update_header(&contact_nuri.target, Some(details.name), None).await?; + + } + _ => return Err(VerifierError::NotImplemented) + } + Ok(()) + } +} \ No newline at end of file diff --git a/ng-verifier/src/lib.rs b/ng-verifier/src/lib.rs new file mode 100644 index 0000000..dfe5d12 --- /dev/null +++ b/ng-verifier/src/lib.rs @@ -0,0 +1,91 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +pub mod types; + +pub mod site; + +#[doc(hidden)] +pub mod verifier; + +mod user_storage; + +mod commits; + +mod request_processor; + +mod inbox_processor; + +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +mod rocksdb_user_storage; + +use ng_net::app_protocol::*; +use ng_oxigraph::oxrdf::Triple; +use ng_repo::errors::NgError; + +pub fn triples_ser_to_json_string(ser: &Vec) -> Result { + let triples: Vec = serde_bare::from_slice(ser) + .map_err(|_| "Deserialization error of Vec".to_string())?; + + let mut triples_json: Vec = Vec::with_capacity(triples.len()); + for insert in triples { + triples_json.push(serde_json::Value::String(insert.to_string())); + } + let triples_json = serde_json::Value::Array(triples_json); + serde_json::to_string(&triples_json) + .map_err(|_| "Cannot serialize Vec to JSON".to_string()) +} + +fn triples_ser_to_json_ser(ser: &Vec) -> Result, String> { + let json = triples_ser_to_json_string(ser)?; + Ok(json.as_bytes().to_vec()) +} + +pub fn read_triples_in_app_response_from_rust( + mut app_response: AppResponse, +) -> Result<(Vec, Vec), NgError> { + let mut inserts: Vec = vec![]; + let mut removes: Vec = vec![]; + if let AppResponse::V0(AppResponseV0::State(AppState { ref mut graph, .. })) = app_response { + if graph.is_some() { + let graph_state = graph.take().unwrap(); + inserts = serde_bare::from_slice(&graph_state.triples)?; + }; + } else if let AppResponse::V0(AppResponseV0::Patch(AppPatch { ref mut graph, .. })) = + app_response + { + if graph.is_some() { + let graph_patch = graph.take().unwrap(); + inserts = serde_bare::from_slice(&graph_patch.inserts)?; + removes = serde_bare::from_slice(&graph_patch.removes)?; + }; + } + Ok((inserts, removes)) +} + +pub fn prepare_app_response_for_js(mut app_response: AppResponse) -> Result { + if let AppResponse::V0(AppResponseV0::State(AppState { ref mut graph, .. })) = app_response { + if graph.is_some() { + let graph_state = graph.take().unwrap(); + *graph = Some(GraphState { + triples: triples_ser_to_json_ser(&graph_state.triples)?, + }); + }; + } else if let AppResponse::V0(AppResponseV0::Patch(AppPatch { ref mut graph, .. })) = + app_response + { + if graph.is_some() { + let mut graph_patch = graph.take().unwrap(); + graph_patch.inserts = triples_ser_to_json_ser(&graph_patch.inserts)?; + graph_patch.removes = triples_ser_to_json_ser(&graph_patch.removes)?; + *graph = Some(graph_patch); + }; + } + Ok(app_response) +} diff --git a/ng-verifier/src/request_processor.rs b/ng-verifier/src/request_processor.rs new file mode 100644 index 0000000..8d92a07 --- /dev/null +++ b/ng-verifier/src/request_processor.rs @@ -0,0 +1,1359 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Processor for each type of AppRequest + +use std::collections::HashSet; +use std::sync::Arc; + +use futures::channel::mpsc; +use futures::SinkExt; +use futures::StreamExt; +use ng_net::actor::SoS; +use ng_net::types::InboxPost; +use ng_net::types::NgQRCode; +use ng_net::types::NgQRCodeProfileSharingV0; +use ng_oxigraph::oxigraph::sparql::EvaluationError; +use ng_oxigraph::oxigraph::sparql::{results::*, Query, QueryResults}; +use ng_oxigraph::oxrdf::{Literal, NamedNode, Quad, Term}; +use ng_oxigraph::oxsdatatypes::DateTime; + +use ng_repo::errors::*; +use ng_repo::file::{RandomAccessFile, ReadFile}; +#[allow(unused_imports)] +use ng_repo::log::*; +use ng_repo::object::Object; +use ng_repo::repo::CommitInfo; +use ng_repo::store::Store; +use ng_repo::types::BranchId; +use ng_repo::types::StoreRepo; +use ng_repo::types::*; +use ng_repo::PublicKeySet; + +use ng_net::app_protocol::*; +use ng_net::utils::ResultSend; +use ng_net::utils::{spawn_and_log_error, Receiver, Sender}; + +use crate::types::*; +use crate::verifier::*; + +impl Verifier { + pub(crate) async fn process_stream( + &mut self, + command: &AppRequestCommandV0, + nuri: &NuriV0, + _payload: &Option, + ) -> Result<(Receiver, CancelFn), NgError> { + match command { + AppRequestCommandV0::Fetch(fetch) => match fetch { + AppFetchContentV0::Subscribe => { + let (repo_id, branch_id, store_repo) = + self.open_for_target(&nuri.target, false).await?; + Ok(self + .create_branch_subscription(repo_id, branch_id, store_repo) + .await?) + } + _ => unimplemented!(), + }, + AppRequestCommandV0::FileGet => { + if nuri.objects.len() < 1 { + return Err(NgError::InvalidArgument); + } + let (repo_id, _, store_repo) = self.resolve_target(&nuri.target)?; + let obj = nuri.objects.get(0).unwrap(); + let repo = self.get_repo(&repo_id, &store_repo)?; + if let Some(mut stream) = self + .fetch_blocks_if_needed(&obj.id, &repo_id, &store_repo) + .await? + { + // TODO: start opening the file and running the sending_loop after we received 10 (3 mandatory and 7 depths max) blocks. + // for files below 10MB we wont see a difference, but for big files, we can start sending out some AppResponse earlier. + while let Some(block) = stream.next().await { + repo.store.put(&block)?; + } + } + let file = + RandomAccessFile::open(obj.id, obj.key.clone(), Arc::clone(&repo.store))?; + + let (mut tx, rx) = mpsc::unbounded::(); + tx.send(AppResponse::V0(AppResponseV0::FileMeta(FileMetaV0 { + content_type: file.meta().content_type().clone(), + size: file.meta().total_size(), + }))) + .await + .map_err(|_| NgError::InternalError)?; + + async fn sending_loop( + file: Arc, + mut tx: Sender, + ) -> ResultSend<()> { + let mut pos = 0; + loop { + let res = file.read(pos, 1048564); + + if res.is_err() { + //log_info!("ERR={:?}", res.unwrap_err()); + let _ = tx.send(AppResponse::V0(AppResponseV0::EndOfStream)).await; + tx.close_channel(); + break; + } + let res = res.unwrap(); + //log_info!("reading={} {}", pos, res.len()); + pos += res.len(); + if let Err(_) = tx + .send(AppResponse::V0(AppResponseV0::FileBinary(res))) + .await + { + break; + } + } + Ok(()) + } + + spawn_and_log_error(sending_loop(Arc::new(file), tx.clone())); + let fnonce = Box::new(move || { + //log_debug!("FileGet cancelled"); + tx.close_channel(); + }); + Ok((rx, fnonce)) + } + _ => unimplemented!(), + } + } + + fn resolve_target( + &self, + target: &NuriTargetV0, + ) -> Result<(RepoId, BranchId, StoreRepo), NgError> { + match target { + NuriTargetV0::PrivateStore => { + let repo_id = self.config.private_store_id.unwrap(); + let (branch, store_repo) = { + let repo = self.repos.get(&repo_id).ok_or(NgError::RepoNotFound)?; + let branch = repo.main_branch().ok_or(NgError::BranchNotFound)?; + (branch.id, repo.store.get_store_repo().clone()) + }; + Ok((repo_id, branch, store_repo)) + } + NuriTargetV0::Repo(repo_id) => { + let (branch, store_repo) = { + let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?; + let branch = repo.main_branch().ok_or(NgError::BranchNotFound)?; + (branch.id, repo.store.get_store_repo().clone()) + }; + Ok((*repo_id, branch, store_repo)) + } + _ => unimplemented!(), + } + } + + pub(crate) async fn update_header(&mut self, target: &NuriTargetV0, title: Option, about: Option) -> Result<(), VerifierError> { + + let (repo_id, branch_id, store_repo) = self.resolve_header_branch(target)?; + let graph_name = NuriV0::branch_repo_graph_name( + &branch_id, + &repo_id, + &store_repo.overlay_id_for_storage_purpose(), + ); + + let base = NuriV0::repo_id(&repo_id); + + let mut deletes = String::new(); + let mut wheres = String::new(); + let mut inserts = String::new(); + if let Some(about) = about { + deletes += &format!("<> <{NG_ONTOLOGY_ABOUT}> ?a. "); + wheres += &format!("OPTIONAL {{ <> <{NG_ONTOLOGY_ABOUT}> ?a }} "); + if about.len() > 0 { + inserts += &format!( + "<> <{NG_ONTOLOGY_ABOUT}> \"{}\". ", + about.replace("\\", "\\\\").replace("\"", "\\\"") + ); + } + } + if let Some(title) = title { + deletes += &format!("<> <{NG_ONTOLOGY_TITLE}> ?n. "); + wheres += &format!("OPTIONAL {{ <> <{NG_ONTOLOGY_TITLE}> ?n }} "); + if title.len() > 0 { + inserts += &format!( + "<> <{NG_ONTOLOGY_TITLE}> \"{}\". ", + title.replace("\\", "\\\\").replace("\"", "\\\"") + ); + } + } + let query = format!( + "DELETE {{ {deletes} }} INSERT {{ {inserts} }} WHERE {{ {wheres} }}" + ); + + let oxistore = self.graph_dataset.as_ref().unwrap(); + + let update = ng_oxigraph::oxigraph::sparql::Update::parse(&query, Some(&base)) + .map_err(|e| NgError::InternalError)?; + + let res = oxistore.ng_update(update, Some(graph_name)); + match res { + Err(e) => Err(VerifierError::InternalError), + Ok((inserts, removes)) => { + if inserts.is_empty() && removes.is_empty() { + Ok(()) + } else { + self + .prepare_sparql_update( + Vec::from_iter(inserts), + Vec::from_iter(removes), + self.get_peer_id_for_skolem(), + ) + .await?; + Ok(()) + } + } + } + } + + fn resolve_header_branch( + &self, + target: &NuriTargetV0, + ) -> Result<(RepoId, BranchId, StoreRepo), NgError> { + Ok(match target { + NuriTargetV0::Repo(repo_id) => { + let (branch, store_repo) = { + let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?; + let branch = repo.header_branch().ok_or(NgError::BranchNotFound)?; + (branch.id, repo.store.get_store_repo().clone()) + }; + (*repo_id, branch, store_repo) + } + _ => return Err(NgError::NotImplemented), + }) + } + + pub(crate) fn resolve_target_for_sparql( + &self, + target: &NuriTargetV0, + update: bool, + ) -> Result, NgError> { + match target { + NuriTargetV0::PrivateStore => { + let repo_id = self.config.private_store_id.unwrap(); + let repo = self.repos.get(&repo_id).ok_or(NgError::RepoNotFound)?; + let overlay_id = repo.store.overlay_id; + Ok(Some(NuriV0::repo_graph_name(&repo_id, &overlay_id))) + } + NuriTargetV0::Repo(repo_id) => { + let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?; + Ok(Some(NuriV0::repo_graph_name( + &repo_id, + &repo.store.overlay_id, + ))) + } + NuriTargetV0::UserSite | NuriTargetV0::None => { + if update { + return Err(NgError::InvalidTarget); + } else { + //log_info!("QUERYING UNION GRAPH"); + return Ok(None); + } + } + _ => unimplemented!(), + } + } + + pub(crate) async fn open_for_target( + &mut self, + target: &NuriTargetV0, + as_publisher: bool, + ) -> Result<(RepoId, BranchId, StoreRepo), NgError> { + let (repo_id, branch, store_repo) = self.resolve_target(target)?; + self.open_branch(&repo_id, &branch, as_publisher).await?; + Ok((repo_id, branch, store_repo)) + } + + pub fn handle_query_results(results: QueryResults) -> Result { + Ok(match results { + QueryResults::Solutions(solutions) => { + let serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Json); + + let mut solutions_writer = serializer + .serialize_solutions_to_write(Vec::new(), solutions.variables().to_vec()) + .map_err(|_| "QueryResult serializer error")?; + for solution in solutions { + solutions_writer + .write(&solution.map_err(|e| e.to_string())?) + .map_err(|_| "QueryResult serializer error")?; + } + AppResponse::V0(AppResponseV0::QueryResult( + solutions_writer + .finish() + .map_err(|_| "QueryResult serializer error")?, + )) + } + QueryResults::Boolean(b) => { + if b { + AppResponse::V0(AppResponseV0::True) + } else { + AppResponse::V0(AppResponseV0::False) + } + } + QueryResults::Graph(quads) => { + let mut results = vec![]; + for quad in quads { + match quad { + Err(e) => return Ok(AppResponse::error(e.to_string())), + Ok(triple) => results.push(triple), + } + } + AppResponse::V0(AppResponseV0::Graph(serde_bare::to_vec(&results).unwrap())) + } + }) + } + + fn history_for_nuri( + &self, + target: &NuriTargetV0, + ) -> Result<(Vec<(ObjectId, CommitInfo)>, Vec>), VerifierError> { + let (repo_id, branch_id, store_repo) = self.resolve_target(target)?; // TODO deal with targets that are commit heads + let repo = self.get_repo(&repo_id, &store_repo)?; + let branch = repo.branch(&branch_id)?; + repo.history_at_heads(&branch.current_heads) + } + + async fn signed_snapshot_request( + &mut self, + target: &NuriTargetV0, + ) -> Result { + let (repo_id, branch_id, store_repo) = self.resolve_target(target)?; // TODO deal with targets that are commit heads + let repo = self.get_repo(&repo_id, &store_repo)?; + let branch = repo.branch(&branch_id)?; + + let snapshot_json = self.take_snapshot(&branch.crdt, &branch_id, target)?; + //log_debug!("snapshot created {snapshot_json}"); + let snapshot_object = Object::new( + ObjectContent::V0(ObjectContentV0::Snapshot(snapshot_json.as_bytes().to_vec())), + None, + 0, + &repo.store, + ); + + let snap_obj_blocks = snapshot_object.save(&repo.store)?; + + if self.connected_broker.is_some() { + let mut blocks = Vec::with_capacity(snap_obj_blocks.len()); + for block_id in snap_obj_blocks { + blocks.push(repo.store.get(&block_id)?); + } + self.put_blocks(blocks, repo).await?; + } + + let snapshot_commit_body = CommitBodyV0::Snapshot(Snapshot::V0(SnapshotV0 { + heads: branch.current_heads.iter().map(|h| h.id).collect(), + content: snapshot_object.reference().unwrap(), //TODO : content could be omitted as the ref is already in files + })); + + let mut proto_events = Vec::with_capacity(2); + + let snapshot_commit = Commit::new_with_body_and_save( + self.user_privkey(), + self.user_id(), + branch_id, + QuorumType::Owners, // TODO: deal with PartialOrder (when the snapshot is not requested by owners) + vec![], + vec![], + branch.current_heads.clone(), + vec![], + vec![snapshot_object.reference().unwrap()], + vec![], + vec![], + CommitBody::V0(snapshot_commit_body), + 0, + &repo.store, + )?; + + let snapshot_commit_id = snapshot_commit.id().unwrap(); + let snapshot_commit_ref = snapshot_commit.reference().unwrap(); + + let signature_content = SignatureContent::V0(SignatureContentV0 { + commits: vec![snapshot_commit_id], + }); + + let signature_content_ser = serde_bare::to_vec(&signature_content).unwrap(); + let sig_share = repo + .signer + .as_ref() + .unwrap() + .sign_with_owner(&signature_content_ser)?; + let sig = PublicKeySet::combine_signatures_with_threshold(0, [(0, &sig_share)]) + .map_err(|_| NgError::IncompleteSignature)?; + let threshold_sig = ThresholdSignatureV0::Owners(sig); + + let signature = Signature::V0(SignatureV0 { + content: signature_content, + threshold_sig, + certificate_ref: repo.certificate_ref.clone().unwrap(), + }); + + let signature_object = Object::new( + ObjectContent::V0(ObjectContentV0::Signature(signature)), + None, + 0, + &repo.store, + ); + + let sign_obj_blocks = signature_object.save(&repo.store)?; + + let signature_commit_body = + CommitBodyV0::AsyncSignature(AsyncSignature::V0(signature_object.reference().unwrap())); + + let signature_commit = Commit::new_with_body_and_save( + self.user_privkey(), + self.user_id(), + branch_id, + QuorumType::IamTheSignature, + vec![snapshot_commit_ref.clone()], + vec![], + vec![snapshot_commit_ref], + vec![], + vec![], + vec![], + vec![], + CommitBody::V0(signature_commit_body), + 0, + &repo.store, + )?; + + let store = Arc::clone(&repo.store); + self.verify_commit_( + &snapshot_commit, + &branch_id, + &repo_id, + Arc::clone(&store), + true, + ) + .await?; + self.verify_commit_(&signature_commit, &branch_id, &repo_id, store, true) + .await?; + + proto_events.push((snapshot_commit, vec![])); + proto_events.push((signature_commit, sign_obj_blocks)); + self.new_events(proto_events, repo_id, &store_repo).await?; + Ok(true) + } + + fn find_signable_commits( + heads: &[BlockRef], + store: &Store, + ) -> Result, VerifierError> { + let mut res = HashSet::with_capacity(heads.len()); + for head in heads { + let commit = Commit::load(head.clone(), store, true)?; + let commit_type = commit.get_type().unwrap(); + res.extend(match commit_type { + CommitType::SyncSignature => { + continue; // we shouldn't be signing asynchronously a SyncSignature + } + CommitType::AsyncSignature => { + Self::find_signable_commits(&commit.deps(), store)?.into_iter() + } + _ => HashSet::from([commit.reference().unwrap()]).into_iter(), + }); + } + Ok(res) + } + + async fn signature_request(&mut self, target: &NuriTargetV0) -> Result { + let (repo_id, branch_id, store_repo) = self.resolve_target(target)?; // TODO deal with targets that are commit heads + let repo = self.get_repo(&repo_id, &store_repo)?; + let branch = repo.branch(&branch_id)?; + + let commits = Vec::from_iter( + Verifier::find_signable_commits(&branch.current_heads, &repo.store)?.into_iter(), + ); + if commits.is_empty() { + return Err(VerifierError::NothingToSign); + } + + let signature_content = SignatureContent::V0(SignatureContentV0 { + commits: commits.iter().map(|h| h.id).collect(), + }); + + let signature_content_ser = serde_bare::to_vec(&signature_content).unwrap(); + let sig_share = repo + .signer + .as_ref() + .unwrap() + .sign_with_owner(&signature_content_ser)?; + let sig = PublicKeySet::combine_signatures_with_threshold(0, [(0, &sig_share)]) + .map_err(|_| NgError::IncompleteSignature)?; + let threshold_sig = ThresholdSignatureV0::Owners(sig); + + let signature = Signature::V0(SignatureV0 { + content: signature_content, + threshold_sig, + certificate_ref: repo.certificate_ref.clone().unwrap(), + }); + + let signature_object = Object::new( + ObjectContent::V0(ObjectContentV0::Signature(signature)), + None, + 0, + &repo.store, + ); + + let sign_obj_blocks = signature_object.save(&repo.store)?; + + let signature_commit_body = + CommitBodyV0::AsyncSignature(AsyncSignature::V0(signature_object.reference().unwrap())); + + let signature_commit = Commit::new_with_body_and_save( + self.user_privkey(), + self.user_id(), + branch_id, + QuorumType::IamTheSignature, + commits, + vec![], + branch.current_heads.clone(), + vec![], + vec![], + vec![], + vec![], + CommitBody::V0(signature_commit_body), + 0, + &repo.store, + )?; + + let store = Arc::clone(&repo.store); + + self.verify_commit_(&signature_commit, &branch_id, &repo_id, store, true) + .await?; + + self.new_event(&signature_commit, &sign_obj_blocks, repo_id, &store_repo) + .await?; + + Ok(true) + } + + fn find_signed_past( + commit: &Commit, + store: &Store, + ) -> Result, VerifierError> { + let commit_type = commit.get_type().unwrap(); + match commit_type { + CommitType::SyncSignature => { + let mut acks = commit.acks(); + if acks.len() != 1 { + return Err(VerifierError::MalformedSyncSignatureAcks); + } + let ack = &acks[0]; + let deps = commit.deps(); + if deps.len() != 1 { + return Err(VerifierError::MalformedSyncSignatureDeps); + } + let commits = + crate::commits::list_dep_chain_until(deps[0].clone(), &ack.id, &store, false)?; + let mut res = HashSet::with_capacity(commits.len() + 1); + res.extend(commits.into_iter().map(|c| c.reference().unwrap())); + res.insert(acks.pop().unwrap()); + Ok(res) + } + CommitType::AsyncSignature => Ok(HashSet::from_iter(commit.deps().into_iter())), + _ => Ok(HashSet::new()), + } + } + + fn signature_status( + &self, + target: &NuriTargetV0, + ) -> Result, bool)>, VerifierError> { + let (repo_id, branch_id, store_repo) = self.resolve_target(target)?; // TODO deal with targets that are commit heads + let repo = self.get_repo(&repo_id, &store_repo)?; + let branch = repo.branch(&branch_id)?; + let mut res = Vec::with_capacity(branch.current_heads.len()); + let is_unique_head = branch.current_heads.len() == 1; + for head in branch.current_heads.iter() { + let cobj = Commit::load(head.clone(), &repo.store, true)?; + let commit_type = cobj.get_type().unwrap(); + let mut is_snapshot = false; + let has_sig = match commit_type { + CommitType::SyncSignature => true, + CommitType::AsyncSignature => { + let mut past = cobj.acks(); + if is_unique_head && past.len() == 1 { + // we check if the signed commit is a snapshot + let signed_commit = Commit::load(past.pop().unwrap(), &repo.store, true)?; + is_snapshot = match signed_commit.get_type().unwrap() { + CommitType::Snapshot => true, + _ => false, + }; + } + true + } + _ => false, + }; + let sig = if has_sig { + Some(format!( + "{}:{}", + Verifier::find_signed_past(&cobj, &repo.store)? + .into_iter() + .map(|c| c.commit_nuri()) + .collect::>() + .join(":"), + NuriV0::signature_ref(&cobj.get_signature_reference().unwrap()) + )) + } else { + None + }; + res.push((head.id, sig, is_snapshot)); + } + Ok(res) + } + + pub(crate) async fn doc_create_with_store_repo( + &mut self, + crdt: String, + class_name: String, + destination: String, + store_repo: Option, + ) -> Result { + + let class = BranchCrdt::from(crdt, class_name)?; + + let nuri = if store_repo.is_none() { + NuriV0::new_private_store_target() + } else { + NuriV0::from_store_repo(&store_repo.unwrap()) + }; + + let destination = DocCreateDestination::from(destination)?; + + self.doc_create(nuri, DocCreate { + class, + destination, + }).await + } + + pub(crate) async fn sparql_query(&self, nuri: &NuriV0, sparql: String, base: Option) -> Result { + //log_debug!("query={}", query); + let store = self.graph_dataset.as_ref().unwrap(); + let mut parsed = Query::parse(&sparql, base.as_deref()) + .map_err(|e| VerifierError::SparqlError(e.to_string()))?; + let dataset = parsed.dataset_mut(); + //log_debug!("DEFAULTS {:?}", dataset.default_graph_graphs()); + if dataset.has_no_default_dataset() { + //log_info!("DEFAULT GRAPH AS UNION"); + dataset.set_default_graph_as_union(); + } + store + .query(parsed, self.resolve_target_for_sparql(&nuri.target, false)?) + .map_err(|e| VerifierError::SparqlError(e.to_string())) + } + + pub(crate) async fn doc_create( + &mut self, + nuri: NuriV0, + doc_create: DocCreate + ) -> Result { + //TODO: deal with doc_create.destination + + let user_id = self.user_id().clone(); + let user_priv_key = self.user_privkey().clone(); + let primary_class = doc_create.class.class().clone(); + let (_,_,store) = self.resolve_target(&nuri.target)?; + let repo_id = self + .new_repo_default( + &user_id, + &user_priv_key, + &store, + doc_create.class, + ) + .await?; + + let header_branch_id = { + let repo = self.get_repo(&repo_id, &store)?; + repo.header_branch().ok_or(NgError::BranchNotFound)?.id + }; + + // adding an AddRepo commit to the Store branch of store. + self.send_add_repo_to_store(&repo_id, &store) + .await?; + + // adding an ldp:contains triple to the store main branch + let overlay_id = store.outer_overlay(); + let nuri = NuriV0::repo_id(&repo_id); + let nuri_result = NuriV0::repo_graph_name(&repo_id, &overlay_id); + let store_nuri = NuriV0::from_store_repo(&store); + let store_nuri_string = NuriV0::repo_id(store.repo_id()); + let query = format!("INSERT DATA {{ <{store_nuri_string}> <{nuri}>. }}"); + + let ret = self + .process_sparql_update(&store_nuri, &query, &None, vec![]) + .await; + if let Err(e) = ret { + return Err(NgError::SparqlError(e)); + } + + self.add_doc(&repo_id, &overlay_id)?; + + // adding the class triple to the header branch + let header_branch_nuri = format!("{nuri_result}:b:{}", header_branch_id); + let quad = Quad { + subject: NamedNode::new_unchecked(&nuri).into(), + predicate: NG_ONTOLOGY_CLASS_NAME.clone().into(), + object: Literal::new_simple_literal(primary_class).into(), + graph_name: NamedNode::new_unchecked(&header_branch_nuri).into(), + }; + let ret = self.prepare_sparql_update(vec![quad], vec![], vec![]).await; + if let Err(e) = ret { + return Err(NgError::SparqlError(e.to_string())); + } + Ok(nuri_result) + } + + fn get_profile_for_inbox_post(&self, public: bool) -> Result<(StoreRepo, PrivKey),NgError> { + + let from_profile_id = if !public { + self.config.protected_store_id.unwrap() + } else { + self.config.public_store_id.unwrap() + }; + + let repo = self.repos.get(&from_profile_id).ok_or(NgError::RepoNotFound)?; + let inbox = repo.inbox.to_owned().ok_or(NgError::InboxNotFound)?; + let store_repo = repo.store.get_store_repo(); + + Ok( (store_repo.clone(), inbox.clone()) ) + } + + async fn import_contact_from_qrcode(&mut self, repo_id: RepoId, contact: NgQRCodeProfileSharingV0) -> Result<(), VerifierError> { + + let inbox_nuri_string: String = NuriV0::inbox(&contact.inbox); + let profile_nuri_string: String = NuriV0::from_store_repo_string(&contact.profile); + let a_or_b = if contact.profile.is_public() { "site" } else { "protected" }; + + // checking if this contact has already been added + match self.sparql_query( + &NuriV0::new_entire_user_site(), + format!("ASK {{ ?s <{inbox_nuri_string}> . ?s <{profile_nuri_string}> }}"), None).await? + { + QueryResults::Boolean(true) => { + return Err(VerifierError::ContactAlreadyExists); + } + _ => {} + } + + // getting the privkey of the inbox and ovelray because we will need it here below to send responses. + let (from_profile, from_inbox) = self.get_profile_for_inbox_post(contact.profile.is_public())?; + + // get the name and optional email address of the profile we will respond with. + // if we don't have a name, we fail + let from_profile_nuri = NuriV0::repo_id(from_profile.repo_id()); + + let (name,email) = match self.sparql_query( + &NuriV0::from_store_repo(&from_profile), + format!("PREFIX vcard: + SELECT ?name ?email WHERE {{ <> vcard:fn ?name . <> vcard:hasEmail ?email }}"), Some(from_profile_nuri)).await? + { + QueryResults::Solutions(mut sol) => { + let mut name = None; + let mut email = None; + if let Some(Ok(s)) = sol.next() { + if let Some(Term::Literal(l)) = s.get("name") { + name = Some(l.value().to_string()); + } + if let Some(Term::Literal(l)) = s.get("email") { + email = Some(l.value().to_string()); + } + } + if name.is_none() { + return Err(VerifierError::InvalidProfile) + } + (name.unwrap(),email) + } + _ => return Err(VerifierError::InvalidResponse), + }; + + let contact_doc_nuri_string = NuriV0::repo_id(&repo_id); + let contact_doc_nuri = NuriV0::new_repo_target_from_id(&repo_id); + let has_email = contact.email.map_or("".to_string(), |email| format!("<> vcard:hasEmail \"{email}\".")); + + let sparql_update = format!(" PREFIX ng: + PREFIX vcard: + INSERT DATA {{ <> ng:{a_or_b} <{profile_nuri_string}>. + <> ng:{a_or_b}_inbox <{inbox_nuri_string}>. + <> a vcard:Individual . + <> vcard:fn \"{}\". + {has_email} }}", contact.name); + let ret = self + .process_sparql_update(&contact_doc_nuri, &sparql_update, &Some(contact_doc_nuri_string), vec![]) + .await; + if let Err(e) = ret { + return Err(VerifierError::SparqlError(e)); + } + + self.update_header(&contact_doc_nuri.target, Some(contact.name), None).await?; + + self.post_to_inbox(InboxPost::new_contact_details( + from_profile, + from_inbox, + contact.profile.outer_overlay(), + contact.inbox, + None, + false, + name, + email, + )?).await?; + + Ok(()) + } + + pub(crate) async fn search_for_contacts(&self, excluding_profile_id_nuri: Option) -> Result, VerifierError> { + let extra_conditions = if let Some(s) = excluding_profile_id_nuri { + format!("&& NOT EXISTS {{ ?c ng:site <{s}> }} && NOT EXISTS {{ ?c ng:protected <{s}> }}") + } else { + String::new() + }; + let sparql = format!("PREFIX ng: + SELECT ?profile_id ?inbox_id WHERE + {{ ?c a . + OPTIONAL {{ ?c ng:site ?profile_id . ?c ng:site_inbox ?inbox_id }} + OPTIONAL {{ ?c ng:protected ?profile_id . ?c ng:protected_inbox ?inbox_id }} + FILTER ( bound(?profile_id) {extra_conditions} ) + }}"); + //log_info!("{sparql}"); + let sols = match self.sparql_query( + &NuriV0::new_entire_user_site(), + sparql, None).await? + { + QueryResults::Solutions(sols) => { sols } + _ => return Err(VerifierError::SparqlError(NgError::InvalidResponse.to_string())), + }; + + let mut res = vec![]; + for sol in sols { + match sol { + Err(e) => return Err(VerifierError::SparqlError(e.to_string())), + Ok(s) => { + if let Some(Term::NamedNode(profile_id)) = s.get("profile_id") { + let profile_nuri = profile_id.as_string(); + if let Some(Term::NamedNode(inbox_id)) = s.get("inbox_id") { + let inbox_nuri = inbox_id.as_string(); + res.push((profile_nuri.clone(), inbox_nuri.clone())); + } + } + } + } + } + Ok(res) + +} + + pub(crate) async fn process( + &mut self, + command: &AppRequestCommandV0, + nuri: NuriV0, + payload: Option, + ) -> Result { + match command { + AppRequestCommandV0::SocialQueryStart => { + let (from_profile, contacts_string, degree) = if let Some(AppRequestPayload::V0(AppRequestPayloadV0::SocialQueryStart{ + from_profile, contacts, degree + })) = + payload + { (from_profile, contacts, degree) } + else { + return Err(NgError::InvalidPayload); + }; + + let query_id = nuri.target.repo_id(); + + // checking that the query hasn't been started yet + match self.sparql_query( + &NuriV0::new_repo_target_from_id(query_id), + format!("ASK {{ <> ?forwarder }}"), Some(NuriV0::repo_id(query_id))).await? + { + QueryResults::Boolean(true) => { + return Err(NgError::SocialQueryAlreadyStarted); + } + _ => {} + } + + // return error if not connected + if self.connected_broker.is_none() { + return Err(NgError::NotConnected); + } + + // searching for contacts (all stores, one store, a sparql query, etc..) + // (profile_nuri, inbox_nuri) + let contacts = if contacts_string.as_str() == "did:ng:d:c" { + self.search_for_contacts(None).await? + // let mut res = vec![]; + // res.push(("did:ng:a:rjoQTS4LMBDcuh8CEjmTYrgALeApBg2cgKqyPEuQDUgA".to_string(),"did:ng:d:KMFdOcGjdFBQgA9QNEDWcgEErQ1isbvDe7d_xndNOUMA".to_string())); + // res + } else { + return Ok(AppResponse::error(NgError::NotImplemented.to_string())); + }; + + // if no contact found, return here with an AppResponse::error + if contacts.is_empty() { + return Ok(AppResponse::error(NgError::ContactNotFound.to_string())); + } + + //resolve from_profile + let from_profile_id = match from_profile.target { + NuriTargetV0::ProtectedProfile => { + self.config.protected_store_id.unwrap() + } + NuriTargetV0::PublicProfile => { + self.config.public_store_id.unwrap() + }, + _ => return Err(NgError::InvalidNuri) + }; + let store = { + let repo = self.repos.get(&from_profile_id).ok_or(NgError::RepoNotFound)?; + repo.store.clone() + }; + + let definition_commit_body_ref = nuri.get_first_commit_ref()?; + let block_ids = Commit::collect_block_ids(definition_commit_body_ref.clone(), &store, true)?; + let mut blocks= Vec::with_capacity(block_ids.len()); + //log_info!("blocks nbr {}",block_ids.len()); + for bid in block_ids.iter() { + blocks.push(store.get(bid)?); + } + + // creating the ForwardedSocialQuery in the private store + let forwarder = self.doc_create_with_store_repo( + "Graph".to_string(), "social:query:forwarded".to_string(), + "store".to_string(), None // meaning in private store + ).await?; + let forwarder_nuri = NuriV0::new_from_repo_graph(&forwarder)?; + let forwarder_id = forwarder_nuri.target.repo_id().clone(); + let forwarder_nuri_string = NuriV0::repo_id(&forwarder_id); + + // adding triples in social_query doc : ng:social_query_forwarder + let social_query_doc_nuri_string = NuriV0::repo_id(query_id); + let sparql_update = format!("INSERT DATA {{ <{social_query_doc_nuri_string}> <{forwarder_nuri_string}>. }}"); + let ret = self + .process_sparql_update(&nuri, &sparql_update, &None, vec![]) + .await; + if let Err(e) = ret { + return Err(NgError::SparqlError(e)); + } + + // adding triples in forwarder doc : ng:social_query_id and ng:social_query_started + let sparql_update = format!("INSERT DATA {{ <> <{social_query_doc_nuri_string}> . + <> \"{}\"^^ . }}",DateTime::now()); + let ret = self + .process_sparql_update(&forwarder_nuri, &sparql_update, &Some(forwarder_nuri_string), vec![]) + .await; + if let Err(e) = ret { + log_err!("{sparql_update}"); + return Err(NgError::SparqlError(e)); + } + + let from_profiles: ((StoreRepo, PrivKey), (StoreRepo, PrivKey)) = self.get_2_profiles()?; + + for (to_profile_nuri, to_inbox_nuri) in contacts { + + match self.social_query_dispatch( + &to_profile_nuri, + &to_inbox_nuri, + &forwarder_nuri, + &forwarder_id, + &from_profiles, + query_id, + &definition_commit_body_ref, + &blocks, + degree + ).await { + Ok(_) => {} + Err(e) => return Ok(AppResponse::error(e.to_string())), + } + } + + return Ok(AppResponse::ok()); + + // // FOR THE SAKE OF TESTING + // let to_profile_nuri = NuriV0::public_profile(&from_profile_id); + // let to_inbox_nuri: String = NuriV0::inbox(&from_inbox.to_pub()); + // let post = InboxPost::new_social_query_request( + // store.get_store_repo().clone(), + // from_inbox, + // forwarder_id, + // to_profile_nuri, + // to_inbox_nuri, + // None, + // *query_id, + // definition_commit_body_ref, + // blocks, + // degree, + // )?; + // return match self.client_request::<_,()>(post).await + // { + // Err(e) => Ok(AppResponse::error(e.to_string())), + // Ok(SoS::Stream(_)) => Ok(AppResponse::error(NgError::InvalidResponse.to_string())), + // Ok(SoS::Single(_)) => Ok(AppResponse::ok()), + // }; + } + AppRequestCommandV0::QrCodeProfile => { + let size = if let Some(AppRequestPayload::V0(AppRequestPayloadV0::QrCodeProfile(size))) = + payload + { + size + } else { + return Err(NgError::InvalidPayload); + }; + let public = match nuri.target { + NuriTargetV0::PublicProfile => true, + NuriTargetV0::ProtectedProfile => false, + _ => return Err(NgError::InvalidPayload) + }; + return match self.get_qrcode_for_profile(public, size).await { + Err(e) => Ok(AppResponse::error(e.to_string())), + Ok(qrcode) => Ok(AppResponse::text(qrcode)), + }; + } + AppRequestCommandV0::QrCodeProfileImport => { + let profile = if let Some(AppRequestPayload::V0(AppRequestPayloadV0::QrCodeProfileImport( text))) = + payload + { + let ser = base64_url::decode(&text).map_err(|_| NgError::SerializationError)?; + let code:NgQRCode = serde_bare::from_slice(&ser)?; + let profile = match code { + NgQRCode::ProfileSharingV0(profile) => profile, + _ => return Err(NgError::InvalidPayload) + }; + profile + } else { + return Err(NgError::InvalidPayload); + }; + let repo_id = match nuri.target { + NuriTargetV0::Repo(id) => id, + _ => return Err(NgError::InvalidPayload) + }; + return match self.import_contact_from_qrcode(repo_id, profile).await { + Err(e) => Ok(AppResponse::error(e.to_string())), + Ok(()) => Ok(AppResponse::ok()), + }; + } + AppRequestCommandV0::Header => { + if let Some(AppRequestPayload::V0(AppRequestPayloadV0::Header(doc_header))) = + payload + { + return match self.update_header(&nuri.target, doc_header.title, doc_header.about).await { + Ok(_) => Ok(AppResponse::ok()), + Err(e) => Ok(AppResponse::error(e.to_string())) + }; + } else { + return Err(NgError::InvalidPayload); + } + } + AppRequestCommandV0::Create => { + return if let Some(AppRequestPayload::V0(AppRequestPayloadV0::Create(doc_create))) = + payload + { + match self.doc_create(nuri, doc_create).await { + Err(NgError::SparqlError(e)) => Ok(AppResponse::error(e)), + Err(e) => Err(e), + Ok(nuri_result) => Ok(AppResponse::V0(AppResponseV0::Nuri(nuri_result))) + } + } else { + Err(NgError::InvalidPayload) + }; + } + AppRequestCommandV0::Fetch(fetch) => match fetch { + AppFetchContentV0::Header => { + let (repo_id, branch_id, store_repo) = + match self.resolve_header_branch(&nuri.target) { + Err(e) => return Ok(AppResponse::error(e.to_string())), + Ok(a) => a, + }; + self.open_branch(&repo_id, &branch_id, true).await?; + let graph_name = NuriV0::branch_repo_graph_name( + &branch_id, + &repo_id, + &store_repo.overlay_id_for_storage_purpose(), + ); + let base = NuriV0::repo_id(&repo_id); + let oxistore = self.graph_dataset.as_ref().unwrap(); + let parsed = Query::parse( + &format!("SELECT ?class ?title ?about WHERE {{ OPTIONAL {{ <> <{NG_ONTOLOGY_CLASS}> ?class }} OPTIONAL {{ <> <{NG_ONTOLOGY_ABOUT}> ?about }} OPTIONAL {{ <> <{NG_ONTOLOGY_TITLE}> ?title }} }}"), Some(&base)); + if parsed.is_err() { + return Ok(AppResponse::error(parsed.unwrap_err().to_string())); + } + let results = oxistore.query(parsed.unwrap(), Some(graph_name)); + match results { + Err(e) => return Ok(AppResponse::error(e.to_string())), + Ok(QueryResults::Solutions(mut sol)) => { + let mut title = None; + let mut about = None; + let mut class = None; + if let Some(Ok(s)) = sol.next() { + if let Some(Term::Literal(l)) = s.get("title") { + title = Some(l.value().to_string()); + } + if let Some(Term::Literal(l)) = s.get("about") { + about = Some(l.value().to_string()); + } + if let Some(Term::Literal(l)) = s.get("class") { + class = Some(l.value().to_string()); + } + } + return Ok(AppResponse::V0(AppResponseV0::Header(AppHeader { + about, + title, + class, + }))); + } + _ => return Err(NgError::InvalidResponse), + }; + } + AppFetchContentV0::ReadQuery => { + if let Some(AppRequestPayload::V0(AppRequestPayloadV0::Query(DocQuery::V0 { + sparql, + base, + }))) = payload + { + let results = self.sparql_query(&nuri, sparql, base).await; + return Ok(match results { + Err(VerifierError::SparqlError(s)) => AppResponse::error(s), + Err(e) => AppResponse::error(e.to_string()), + Ok(qr) => { + let res = Self::handle_query_results(qr); + match res { + Ok(ok) => ok, + Err(s) => AppResponse::error(s), + } + } + }); + } else { + return Err(NgError::InvalidPayload); + } + } + AppFetchContentV0::WriteQuery => { + if !nuri.is_valid_for_sparql_update() { + return Err(NgError::InvalidNuri); + } + return if let Some(AppRequestPayload::V0(AppRequestPayloadV0::Query( + DocQuery::V0 { sparql, base }, + ))) = payload + { + Ok( + match self + .process_sparql_update( + &nuri, + &sparql, + &base, + self.get_peer_id_for_skolem(), + ) + .await + { + Err(e) => AppResponse::error(e), + Ok(commits) => AppResponse::commits(commits), + }, + ) + } else { + Err(NgError::InvalidPayload) + }; + } + AppFetchContentV0::Update => { + if !nuri.is_valid_for_discrete_update() { + return Err(NgError::InvalidNuri); + } + return if let Some(AppRequestPayload::V0(AppRequestPayloadV0::Update(update))) = + payload + { + //TODO: deal with update.graph + //TODO: verify that update.heads are the same as what the Verifier knows + if let Some(discrete) = update.discrete { + let (repo_id, branch_id, store_repo) = + match self.resolve_target(&nuri.target) { + Err(e) => return Ok(AppResponse::error(e.to_string())), + Ok(a) => a, + }; + + let patch: DiscreteTransaction = discrete.into(); + + let transac = TransactionBody { + body_type: TransactionBodyType::Discrete, + graph: None, + discrete: Some(patch.clone()), + }; + + let transaction_commit_body = CommitBodyV0::AsyncTransaction( + Transaction::V0(serde_bare::to_vec(&transac)?), + ); + + let commit = self + .new_transaction_commit( + transaction_commit_body, + &repo_id, + &branch_id, + &store_repo, + vec![], //TODO deps + vec![], + ) + .await?; + + let repo = self.get_repo(&repo_id, &store_repo)?; + let commit_info: CommitInfoJs = (&commit.as_info(repo)).into(); + + let crdt: &BranchCrdt = &repo.branch(&branch_id)?.crdt.clone(); + self.update_discrete( + patch, + &crdt, + &branch_id, + commit.id().unwrap(), + commit_info, + ) + .await?; + } + + Ok(AppResponse::ok()) + } else { + Err(NgError::InvalidPayload) + }; + } + AppFetchContentV0::RdfDump => { + let store = self.graph_dataset.as_ref().unwrap(); + + let results = store.iter(); + + let vec: Vec = results + .map(|q| match q { + Ok(o) => o.to_string(), + Err(e) => e.to_string(), + }) + .collect(); + + return Ok(AppResponse::V0(AppResponseV0::Text(vec.join("\n")))); + } + AppFetchContentV0::CurrentHeads => { + + if nuri.target.is_repo_id() { + if let Ok(s) = self.get_main_branch_current_heads_nuri(nuri.target.repo_id()) { + return Ok(AppResponse::V0(AppResponseV0::Text(s))); + } + } + return Ok(AppResponse::error(VerifierError::InvalidNuri.to_string())); + } + + AppFetchContentV0::History => { + if !nuri.is_valid_for_sparql_update() { + return Err(NgError::InvalidNuri); + } + return Ok(match self.history_for_nuri(&nuri.target) { + Err(e) => AppResponse::error(e.to_string()), + Ok(history) => AppResponse::V0(AppResponseV0::History(AppHistory { + history: history.0, + swimlane_state: history.1, + })), + }); + } + AppFetchContentV0::SignatureStatus => { + if !nuri.is_valid_for_sparql_update() { + return Err(NgError::InvalidNuri); + } + return Ok(match self.signature_status(&nuri.target) { + Err(e) => AppResponse::error(e.to_string()), + Ok(status) => AppResponse::V0(AppResponseV0::SignatureStatus( + status + .into_iter() + .map(|(commitid, signature, is_snapshot)| { + (commitid.to_string(), signature, is_snapshot) + }) + .collect(), + )), + }); + } + AppFetchContentV0::SignedSnapshotRequest => { + if !nuri.is_valid_for_sparql_update() { + return Err(NgError::InvalidNuri); + } + return Ok(match self.signed_snapshot_request(&nuri.target).await { + Err(e) => AppResponse::error(e.to_string()), + Ok(immediate) => { + if immediate { + AppResponse::V0(AppResponseV0::True) + } else { + AppResponse::V0(AppResponseV0::False) + } + } + }); + } + AppFetchContentV0::SignatureRequest => { + if !nuri.is_valid_for_sparql_update() { + return Err(NgError::InvalidNuri); + } + return Ok(match self.signature_request(&nuri.target).await { + Err(e) => AppResponse::error(e.to_string()), + Ok(immediate) => { + if immediate { + AppResponse::V0(AppResponseV0::True) + } else { + AppResponse::V0(AppResponseV0::False) + } + } + }); + } + _ => unimplemented!(), + }, + AppRequestCommandV0::FilePut => match payload { + None => return Err(NgError::InvalidPayload), + Some(AppRequestPayload::V0(v0)) => match v0 { + AppRequestPayloadV0::AddFile(add) => { + let (repo_id, branch, store_repo) = + self.open_for_target(&nuri.target, true).await?; + //log_info!("GOT ADD FILE {:?}", add); + + if self.connected_broker.is_some() { + self.put_all_blocks_of_file(&add.object, &repo_id, &store_repo) + .await?; + } + + let add_file_commit_body = CommitBodyV0::AddFile(AddFile::V0(AddFileV0 { + name: add.filename, + metadata: vec![], + })); + + self.new_commit( + add_file_commit_body, + &repo_id, + &branch, + &store_repo, + &vec![], + vec![], + vec![add.object], + ) + .await?; + } + AppRequestPayloadV0::SmallFilePut(_small) => { + unimplemented!(); + } + AppRequestPayloadV0::RandomAccessFilePut(content_type) => { + let (repo_id, _, store_repo) = self.resolve_target(&nuri.target)?; + let repo = self.get_repo(&repo_id, &store_repo)?; + let id = self.start_upload(content_type, Arc::clone(&repo.store)); + return Ok(AppResponse::V0(AppResponseV0::FileUploading(id))); + } + AppRequestPayloadV0::RandomAccessFilePutChunk((id, chunk)) => { + if chunk.len() > 0 { + self.continue_upload(id, &chunk)?; + } else { + let reference = self.finish_upload(id)?; + return Ok(AppResponse::V0(AppResponseV0::FileUploaded(reference))); + } + } + _ => return Err(NgError::InvalidPayload), + }, + }, + + _ => return Err(NgError::NotImplemented), + } + Ok(AppResponse::V0(AppResponseV0::Ok)) + } +} diff --git a/ng-verifier/src/rocksdb_user_storage.rs b/ng-verifier/src/rocksdb_user_storage.rs new file mode 100644 index 0000000..7c12fec --- /dev/null +++ b/ng-verifier/src/rocksdb_user_storage.rs @@ -0,0 +1,198 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! RocksDb Backend for UserStorage trait + +use std::collections::HashMap; +use std::path::PathBuf; +use std::sync::{Arc, RwLock}; + +use either::Either::{Left, Right}; + +use ng_net::app_protocol::{ + AppTabBranchInfo, AppTabDocInfo, AppTabInfo, AppTabStoreInfo, FileName, NuriV0, +}; +use ng_repo::block_storage::BlockStorage; +use ng_repo::log::*; +use ng_repo::repo::{BranchInfo, Repo}; +use ng_repo::store::Store; +use ng_repo::{errors::StorageError, types::*}; + +use ng_storage_rocksdb::kcv_storage::RocksDbKCVStorage; + +use crate::user_storage::branch::*; +use crate::user_storage::repo::*; +use crate::user_storage::*; + +pub(crate) struct RocksDbUserStorage { + user_storage: RocksDbKCVStorage, +} + +impl RocksDbUserStorage { + pub fn open(path: &PathBuf, master_key: [u8; 32]) -> Result { + Ok(RocksDbUserStorage { + user_storage: RocksDbKCVStorage::open(path, master_key)?, + }) + } +} + +impl UserStorage for RocksDbUserStorage { + // fn repo_id_to_store_overlay(&self, id: &RepoId) -> Result { + // unimplemented!(); + // } + + fn get_all_store_and_repo_ids(&self) -> Result>, StorageError> { + RepoStorage::get_all_store_and_repo_ids(&self.user_storage) + } + + fn load_store( + &self, + store_repo: &StoreRepo, + block_storage: Arc>, + ) -> Result { + RepoStorage::load( + store_repo.repo_id(), + Right(block_storage), + &self.user_storage, + ) + } + + fn load_repo(&self, repo_id: &RepoId, store: Arc) -> Result { + RepoStorage::load(repo_id, Left(store), &self.user_storage) + } + + fn save_repo(&self, repo: &Repo) -> Result<(), StorageError> { + RepoStorage::create_from_repo(repo, &self.user_storage)?; + Ok(()) + } + + fn add_branch(&self, repo_id: &RepoId, branch_info: &BranchInfo) -> Result<(), StorageError> { + RepoStorage::add_branch_from_info(repo_id, branch_info, &self.user_storage) + } + + fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError> { + RepoStorage::update_signer_cap(signer_cap, &self.user_storage) + } + + fn update_inbox_cap(&self, repo_id: &RepoId, overlay: &OverlayId, priv_key: &PrivKey) -> Result<(), StorageError> { + RepoStorage::update_inbox_cap(repo_id, overlay, priv_key, &self.user_storage) + } + + fn update_certificate( + &self, + repo_id: &RepoId, + certificate: &ObjectRef, + ) -> Result<(), StorageError> { + RepoStorage::update_certificate(repo_id, certificate, &self.user_storage) + } + + fn get_signer_cap(&self, repo_id: &RepoId) -> Result { + RepoStorage::open(repo_id, &self.user_storage)?.get_signer_cap() + } + + fn get_inbox_cap(&self, repo_id: &RepoId) -> Result { + RepoStorage::open(repo_id, &self.user_storage)?.get_inbox_cap() + } + + fn update_branch_current_heads( + &self, + _repo_id: &RepoId, + branch_id: &BranchId, + new_heads: Vec, + ) -> Result<(), StorageError> { + let branch = BranchStorage::new(branch_id, &self.user_storage)?; + if let Err(e) = branch.replace_current_heads(new_heads) { + log_err!("error while updating branch current head {:?}", e); + Err(e) + } else { + Ok(()) + } + } + + fn branch_set_discrete_state( + &self, + branch: BranchId, + state: Vec, + ) -> Result<(), StorageError> { + let branch = BranchStorage::open(&branch, &self.user_storage)?; + branch.set_discrete_state(state) + } + + fn branch_get_discrete_state(&self, branch: &BranchId) -> Result, StorageError> { + let branch = BranchStorage::new(&branch, &self.user_storage)?; + branch + .get_discrete_state() + .map_err(|_| StorageError::NoDiscreteState) + } + + fn branch_add_file( + &self, + commit_id: ObjectId, + branch: BranchId, + file: FileName, + ) -> Result<(), StorageError> { + let branch = BranchStorage::new(&branch, &self.user_storage)?; + branch.add_file(&commit_id, &file) + } + fn branch_get_all_files(&self, branch: &BranchId) -> Result, StorageError> { + BranchStorage::get_all_files(&branch, &self.user_storage) + } + + fn branch_get_tab_info( + &self, + branch: &BranchId, + repo: &RepoId, + store: &StoreRepo, + ) -> Result { + let branch_info = BranchStorage::load(branch, &self.user_storage)?; + + let branch_tab_info = AppTabBranchInfo { + id: Some(format!("b:{}", branch.to_string())), + readcap: Some(branch_info.read_cap.unwrap().readcap_nuri()), + class: Some(branch_info.crdt.class().clone()), + comment_branch: None, //TODO + }; + + let root_branch_info = BranchStorage::load(repo, &self.user_storage)?; + + let doc_tab_info = AppTabDocInfo { + nuri: Some(format!("o:{}", repo.to_string())), + is_store: Some(store.repo_id() == repo), + is_member: Some(root_branch_info.read_cap.unwrap().readcap_nuri()), // TODO + authors: None, // TODO + inbox: None, // TODO + can_edit: Some(true), + title: None, + icon: None, + description: None, + }; + + let store_tab_info = AppTabStoreInfo { + repo: Some(store.clone()), + overlay: Some(format!( + "v:{}", + store.overlay_id_for_read_purpose().to_string() + )), + store_type: Some(store.store_type_for_app()), + has_outer: None, //TODO + inner: None, //TODO + is_member: None, //TODO + readcap: None, //TODO + title: None, + icon: None, + description: None, + }; + + Ok(AppTabInfo { + branch: Some(branch_tab_info), + doc: Some(doc_tab_info), + store: Some(store_tab_info), + }) + } +} diff --git a/ng-verifier/src/site.rs b/ng-verifier/src/site.rs new file mode 100644 index 0000000..80801a3 --- /dev/null +++ b/ng-verifier/src/site.rs @@ -0,0 +1,333 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +//! A Site of an Individual or Org (with 3P stores: Public, Protected, Private) + +use serde::{Deserialize, Serialize}; + +use ng_repo::errors::NgError; +use ng_repo::types::*; +use ng_repo::utils::generate_keypair; + +use crate::verifier::Verifier; + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct SiteV0 { + pub site_type: SiteType, + + pub id: PubKey, + + pub name: SiteName, + + // Identity::OrgPublicStore or Identity::IndividualPublicStore + pub public: SiteStore, + + // Identity::OrgProtectedStore or Identity::IndividualProtectedStore + pub protected: SiteStore, + + // Identity::OrgPrivateStore or Identity::IndividualPrivateStore + pub private: SiteStore, + + // Only for IndividualSite: TODO reorganize those 2 fields + #[doc(hidden)] + pub cores: Vec<(PubKey, Option<[u8; 32]>)>, + #[doc(hidden)] + pub bootstraps: Vec, +} + +impl SiteV0 { + pub fn get_individual_user_priv_key(&self) -> Option { + match &self.site_type { + SiteType::Individual((priv_key, _)) => Some(priv_key.clone()), + _ => None, + } + } + + pub fn get_individual_site_private_store_read_cap(&self) -> Option { + match &self.site_type { + SiteType::Individual((_, read_cap)) => Some(read_cap.clone()), + _ => None, + } + } + + fn site_store_to_store_repo(site_store: &SiteStore) -> StoreRepo { + StoreRepo::V0(match site_store.store_type { + SiteStoreType::Public => StoreRepoV0::PublicStore(site_store.id), + SiteStoreType::Protected => StoreRepoV0::ProtectedStore(site_store.id), + SiteStoreType::Private => StoreRepoV0::PrivateStore(site_store.id), + }) + } + + pub fn get_site_store_id(&self, store_type: SiteStoreType) -> PubKey { + match store_type { + SiteStoreType::Public => self.public.id, + SiteStoreType::Protected => self.protected.id, + SiteStoreType::Private => self.private.id, + } + } + + async fn create_individual_( + user_priv_key: PrivKey, + verifier: &mut Verifier, + site_name: SiteName, + ) -> Result { + let site_pubkey = user_priv_key.to_pub(); + + let (public_store_privkey, public_store_pubkey) = generate_keypair(); + + let (protected_store_privkey, protected_store_pubkey) = generate_keypair(); + + let (private_store_privkey, private_store_pubkey) = generate_keypair(); + + let public = SiteStore { + id: public_store_pubkey, + store_type: SiteStoreType::Public, + }; + + let protected = SiteStore { + id: protected_store_pubkey, + store_type: SiteStoreType::Protected, + }; + + let private = SiteStore { + id: private_store_pubkey, + store_type: SiteStoreType::Private, + }; + + let public_store = Self::site_store_to_store_repo(&public); + let protected_store = Self::site_store_to_store_repo(&protected); + let private_store = Self::site_store_to_store_repo(&private); + + verifier.reserve_more(37)?; + + let mut signer_caps = Vec::with_capacity(3); + + let public_repo = verifier + .new_store_default( + &site_pubkey, + &user_priv_key, + public_store_privkey, + &public_store, + false, + ) + .await?; + + let public_store_update: StoreUpdate = public_repo.store.as_ref().into(); + signer_caps.push(public_repo.signer.to_owned().unwrap()); + + // Creating the Inbox commit body about public store. + let public_store_inbox_commit_body = + CommitBody::V0(CommitBodyV0::AddInboxCap( + AddInboxCap::new_v0(public_repo.id, public_repo.store.outer_overlay(), public_repo.inbox.to_owned().unwrap()))); + + let protected_repo = verifier + .new_store_default( + &site_pubkey, + &user_priv_key, + protected_store_privkey, + &protected_store, + false, + ) + .await?; + + let protected_store_update: StoreUpdate = protected_repo.store.as_ref().into(); + signer_caps.push(protected_repo.signer.to_owned().unwrap()); + + // Creating the Inbox commit body about protected store. + let protected_store_inbox_commit_body = + CommitBody::V0(CommitBodyV0::AddInboxCap( + AddInboxCap::new_v0(protected_repo.id, protected_repo.store.outer_overlay(),protected_repo.inbox.to_owned().unwrap()))); + + let private_repo = verifier + .new_store_default( + &site_pubkey, + &user_priv_key, + private_store_privkey, + &private_store, + true, + ) + .await?; + + signer_caps.push(private_repo.signer.to_owned().unwrap()); + let user_branch = private_repo.user_branch().unwrap(); + + // Creating the StoreUpdate about public store. + let public_store_update_commit_body = + CommitBody::V0(CommitBodyV0::StoreUpdate(public_store_update)); + + let public_store_update_commit = Commit::new_with_body_acks_deps_and_save( + &user_priv_key, + &site_pubkey, + user_branch.id, + QuorumType::NoSigning, + vec![], + user_branch.current_heads.clone(), + public_store_update_commit_body, + &private_repo.store, + )?; + + // Creating the StoreUpdate about protected store. + let protected_store_update_commit_body = + CommitBody::V0(CommitBodyV0::StoreUpdate(protected_store_update)); + + let protected_store_update_commit = Commit::new_with_body_acks_deps_and_save( + &user_priv_key, + &site_pubkey, + user_branch.id, + QuorumType::NoSigning, + vec![], + vec![public_store_update_commit.reference().unwrap()], + protected_store_update_commit_body, + &private_repo.store, + )?; + + // Creating the Inbox commit about public store. + let public_store_inbox_commit = Commit::new_with_body_acks_deps_and_save( + &user_priv_key, + &site_pubkey, + user_branch.id, + QuorumType::NoSigning, + vec![], + vec![protected_store_update_commit.reference().unwrap()], + public_store_inbox_commit_body, + &private_repo.store, + )?; + + // Creating the Inbox commit about protected store. + let protected_store_inbox_commit = Commit::new_with_body_acks_deps_and_save( + &user_priv_key, + &site_pubkey, + user_branch.id, + QuorumType::NoSigning, + vec![], + vec![public_store_inbox_commit.reference().unwrap()], + protected_store_inbox_commit_body, + &private_repo.store, + )?; + + let mut current_head = protected_store_inbox_commit.reference().unwrap(); + + let private_repo_id = private_repo.id; + let private_store_repo = private_repo.store.get_store_repo().clone(); + let private_repo_read_cap = private_repo.read_cap.to_owned().unwrap(); + + // Creating the AddSignerCap for each store + let mut commits = Vec::with_capacity(7); + commits.push((public_store_update_commit, vec![])); + commits.push((protected_store_update_commit, vec![])); + commits.push((public_store_inbox_commit, vec![])); + commits.push((protected_store_inbox_commit, vec![])); + + for cap in signer_caps { + let add_signer_cap_commit_body = CommitBody::V0(CommitBodyV0::AddSignerCap( + AddSignerCap::V0(AddSignerCapV0 { + cap, + metadata: vec![], + }), + )); + + let add_signer_cap_commit = Commit::new_with_body_acks_deps_and_save( + &user_priv_key, + &site_pubkey, + user_branch.id, + QuorumType::NoSigning, + vec![], + vec![current_head], + add_signer_cap_commit_body, + &private_repo.store, + )?; + current_head = add_signer_cap_commit.reference().unwrap(); + commits.push((add_signer_cap_commit, vec![])); + } + + // update the current_heads + //verifier.update_current_heads(&private_repo_id, &user_branch_id, vec![current_head])?; + // this is now done in send_or_save_event_to_outbox + + // sending the additional events + verifier + .new_events(commits, private_repo_id, &private_store_repo) + .await?; + + let site = Self { + site_type: SiteType::Individual((user_priv_key, private_repo_read_cap)), + id: site_pubkey, + name: site_name, + public, + protected, + private, + cores: vec![], + bootstraps: vec![], + }; + + verifier.config.private_store_read_cap = site.get_individual_site_private_store_read_cap(); + verifier.config.private_store_id = Some(site.private.id); + verifier.config.protected_store_id = Some(site.protected.id); + verifier.config.public_store_id = Some(site.public.id); + + Ok(site) + } + + pub async fn create_individual( + name: String, + user_priv_key: PrivKey, + verifier: &mut Verifier, + ) -> Result { + Self::create_individual_(user_priv_key, verifier, SiteName::Name(name)).await + } + + pub async fn create_personal( + user_priv_key: PrivKey, + verifier: &mut Verifier, + ) -> Result { + let site = Self::create_individual_(user_priv_key, verifier, SiteName::Personal).await?; + + Ok(site) + } + + pub async fn create_org(name: String) -> Result { + // TODO: implement correctly. see create_personal/create_individual + + let (_site_privkey, site_pubkey) = generate_keypair(); + + let (_public_store_privkey, public_store_pubkey) = generate_keypair(); + + let (_protected_store_privkey, protected_store_pubkey) = generate_keypair(); + + let (_private_store_privkey, private_store_pubkey) = generate_keypair(); + + let public = SiteStore { + id: public_store_pubkey, + store_type: SiteStoreType::Public, + }; + + let protected = SiteStore { + id: protected_store_pubkey, + store_type: SiteStoreType::Protected, + }; + + let private = SiteStore { + id: private_store_pubkey, + store_type: SiteStoreType::Private, + }; + + Ok(Self { + site_type: SiteType::Org, + id: site_pubkey, + name: SiteName::Name(name), + public, + protected, + private, + cores: vec![], + bootstraps: vec![], + }) + } +} diff --git a/ng-verifier/src/types.rs b/ng-verifier/src/types.rs new file mode 100644 index 0000000..197e1e8 --- /dev/null +++ b/ng-verifier/src/types.rs @@ -0,0 +1,356 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Types for Verifier + +use core::fmt; +use std::path::PathBuf; + +use serde::{Deserialize, Serialize}; +//use oxigraph::io::{RdfFormat, RdfParser, RdfSerializer}; +//use oxigraph::store::Store; +//use oxigraph::model::GroundQuad; +//use yrs::{StateVector, Update}; +use lazy_static::lazy_static; + +use ng_net::{app_protocol::*, types::*}; +use ng_oxigraph::oxrdf::{ + GraphName, GraphNameRef, NamedNode, Quad, Subject, Term, Triple, TripleRef, +}; +use ng_repo::{errors::*, types::*}; + +pub const NG_ONTOLOGY: &str = "did:ng:x:ng#"; + +pub const NG_ONTOLOGY_ABOUT: &str = "did:ng:x:ng#a"; +pub const NG_ONTOLOGY_TITLE: &str = "did:ng:x:ng#n"; +pub const NG_ONTOLOGY_CLASS: &str = "did:ng:x:ng#c"; + +lazy_static! { + pub static ref NG_ONTOLOGY_ABOUT_NAME: NamedNode = NamedNode::new_unchecked(NG_ONTOLOGY_ABOUT); + pub static ref NG_ONTOLOGY_TITLE_NAME: NamedNode = NamedNode::new_unchecked(NG_ONTOLOGY_TITLE); + pub static ref NG_ONTOLOGY_CLASS_NAME: NamedNode = NamedNode::new_unchecked(NG_ONTOLOGY_CLASS); +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct GraphTransaction { + pub inserts: Vec, + pub removes: Vec, +} + +const TOKENIZED_COMMIT: &str = "did:ng:_"; + +impl GraphTransaction { + pub(crate) fn as_patch(&self) -> GraphPatch { + GraphPatch { + inserts: serde_bare::to_vec(&self.inserts).unwrap(), + removes: serde_bare::to_vec(&self.removes).unwrap(), + } + } + pub(crate) fn tokenize_with_commit_id(&mut self, commit_id: ObjectId, repo_id: &RepoId) { + for triple in self.inserts.iter_mut() { + if let Subject::NamedNode(nn) = &triple.subject { + if nn.as_str().starts_with(TOKENIZED_COMMIT) { + let mut str = nn.as_string().clone(); + let new_iri = NuriV0::tokenized_commit(repo_id, &commit_id); + str.replace_range(..8, &new_iri); + triple.subject = NamedNode::new_unchecked(str).into(); + } + } + if triple.predicate.as_str().starts_with(TOKENIZED_COMMIT) { + let mut str = triple.predicate.as_string().clone(); + let new_iri = NuriV0::tokenized_commit(repo_id, &commit_id); + str.replace_range(..8, &new_iri); + triple.predicate = NamedNode::new_unchecked(str); + } + if let Term::NamedNode(nn) = &triple.object { + if nn.as_str().starts_with(TOKENIZED_COMMIT) { + let mut str = nn.as_string().clone(); + let new_iri = NuriV0::tokenized_commit(repo_id, &commit_id); + str.replace_range(..8, &new_iri); + triple.object = NamedNode::new_unchecked(str).into(); + } + } + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum DiscreteTransaction { + /// A serialization of a yrs::Update + #[serde(with = "serde_bytes")] + YMap(Vec), + #[serde(with = "serde_bytes")] + YArray(Vec), + #[serde(with = "serde_bytes")] + YXml(Vec), + #[serde(with = "serde_bytes")] + YText(Vec), + /// An automerge::Patch + #[serde(with = "serde_bytes")] + Automerge(Vec), +} + +impl From for DiscreteTransaction { + fn from(update: DiscreteUpdate) -> Self { + match update { + DiscreteUpdate::Automerge(v) => DiscreteTransaction::Automerge(v), + DiscreteUpdate::YMap(v) => DiscreteTransaction::YMap(v), + DiscreteUpdate::YArray(v) => DiscreteTransaction::YArray(v), + DiscreteUpdate::YXml(v) => DiscreteTransaction::YXml(v), + DiscreteUpdate::YText(v) => DiscreteTransaction::YText(v), + } + } +} + +impl DiscreteTransaction { + pub fn to_vec(&self) -> Vec { + match self { + Self::YMap(v) + | Self::YArray(v) + | Self::YXml(v) + | Self::YText(v) + | Self::Automerge(v) => v.to_vec(), + } + } + pub fn as_slice(&self) -> &[u8] { + match self { + Self::YMap(v) + | Self::YArray(v) + | Self::YXml(v) + | Self::YText(v) + | Self::Automerge(v) => v, + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum TransactionBodyType { + Graph, + Discrete, + Both, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct TransactionBody { + pub body_type: TransactionBodyType, + pub graph: Option, + pub discrete: Option, +} + +#[doc(hidden)] +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum SessionPeerLastSeq { + V0(u64), + V1((u64, Sig)), +} + +impl SessionPeerLastSeq { + pub fn ser(&self) -> Result, NgError> { + Ok(serde_bare::to_vec(self)?) + } + pub fn deser(ser: &[u8]) -> Result { + Ok(serde_bare::from_slice(ser).map_err(|_| NgError::SerializationError)?) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum VerifierType { + /// nothing will be saved on disk during the session + Memory, + /// will save all user data locally, with RocksDb backend on native, and on webapp, will save only the session and wallet, not the data itself + Save, + /// the verifier will be remote. a Noise connection will be opened + /// optional peerId to connect to. If None, will try any that has the flag `can_verify` + Remote(Option), + /// IndexedDb based rocksdb compiled to WASM... not ready yet. obviously. only works in the browser + WebRocksDb, + // Server, this type is for Server Broker that act as verifier. They answer to VerifierType::Remote types of verifier. deprecated +} + +impl VerifierType { + pub fn is_memory(&self) -> bool { + match self { + Self::Memory => true, + _ => false, + } + } + pub fn is_persistent(&self) -> bool { + match self { + Self::Save => true, + _ => false, + } + } + + pub fn is_remote(&self) -> bool { + match self { + Self::Remote(_) => true, + _ => false, + } + } +} +#[doc(hidden)] +//type LastSeqFn = fn(peer_id: PubKey, qty: u16) -> Result; +pub type LastSeqFn = dyn Fn(PubKey, u16) -> Result + 'static + Sync + Send; +#[doc(hidden)] +// peer_id: PubKey, seq_num:u64, event_ser: vec, +pub type OutboxWriteFn = + dyn Fn(PubKey, u64, Vec) -> Result<(), NgError> + 'static + Sync + Send; +#[doc(hidden)] +// peer_id: PubKey, +pub type OutboxReadFn = dyn Fn(PubKey) -> Result>, NgError> + 'static + Sync + Send; + +#[doc(hidden)] +pub struct JsSaveSessionConfig { + pub last_seq_function: Box, + pub outbox_write_function: Box, + pub outbox_read_function: Box, +} + +impl fmt::Debug for JsSaveSessionConfig { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "JsSaveSessionConfig") + } +} + +#[doc(hidden)] +#[derive(Debug)] +pub enum VerifierConfigType { + /// nothing will be saved on disk during the session + Memory, + /// only the session information is saved locally. the UserStorage is not saved. + JsSaveSession(JsSaveSessionConfig), + /// will save all user data locally, with RocksDb backend + RocksDb(PathBuf), + /// the verifier will be remote. a Noise connection will be opened + /// optional peerId to connect to. If None, will try any that has the flag `can_verify` + /// // TODO: Pass the AppConfig + Remote(Option), + /// IndexedDb based rocksdb compiled to WASM... not ready yet. obviously. only works in the browser + WebRocksDb, + /// headless + Headless(Credentials), +} + +impl VerifierConfigType { + pub(crate) fn should_load_last_seq_num(&self) -> bool { + match self { + Self::JsSaveSession(_) | Self::RocksDb(_) => true, + _ => false, + } + } + + pub(crate) fn is_persistent(&self) -> bool { + match self { + Self::RocksDb(_) => true, + _ => false, + } + } + + #[allow(dead_code)] + pub(crate) fn is_in_memory(&self) -> bool { + match self { + Self::Memory | Self::JsSaveSession(_) => true, + _ => false, + } + } +} + +#[derive(Debug)] +pub struct VerifierConfig { + pub config_type: VerifierConfigType, + /// not used for Memory + pub user_master_key: [u8; 32], + /// not used for Memory + pub peer_priv_key: PrivKey, + pub user_priv_key: PrivKey, + pub private_store_read_cap: Option, + pub private_store_id: Option, + pub public_store_id: Option, + pub protected_store_id: Option, + pub locator: Locator, +} + +#[doc(hidden)] +pub type CancelFn = Box; + +#[doc(hidden)] +#[derive(Debug, Clone)] +pub enum BrokerPeerId { + Local(DirectPeerId), + Direct(DirectPeerId), + None, +} + +impl From<&BrokerPeerId> for Option { + fn from(bpi: &BrokerPeerId) -> Option { + match bpi { + BrokerPeerId::Local(_) => None, + BrokerPeerId::Direct(d) => Some(*d), + BrokerPeerId::None => panic!("cannot connect to a broker without a peerid"), + } + } +} + +impl From for Option { + fn from(bpi: BrokerPeerId) -> Option { + (&bpi).into() + } +} + +impl BrokerPeerId { + pub fn new_direct(peer: DirectPeerId) -> Self { + Self::Direct(peer) + } + pub fn is_some(&self) -> bool { + match self { + BrokerPeerId::Local(_) | BrokerPeerId::Direct(_) => true, + _ => false, + } + } + pub fn is_none(&self) -> bool { + !self.is_some() + } + pub fn connected_or_err(&self) -> Result, NgError> { + match self { + BrokerPeerId::None => Err(NgError::NotConnected), + _ => Ok(self.into()), + } + } + pub fn broker_peer_id(&self) -> &DirectPeerId { + match self { + BrokerPeerId::Local(p) | BrokerPeerId::Direct(p) => p, + _ => panic!("dont call broker_peer_id on a BrokerPeerId::None"), + } + } + pub fn is_local(&self) -> bool { + match self { + BrokerPeerId::Local(_) => true, + _ => false, + } + } + pub fn is_direct(&self) -> bool { + match self { + BrokerPeerId::Direct(_) => true, + _ => false, + } + } + pub fn is_direct_or_err(&self) -> Result<(), NgError> { + match self { + BrokerPeerId::Direct(_) => Ok(()), + _ => Err(NgError::NotConnected), + } + } + + pub fn to_direct_if_not_local(&self, peer: DirectPeerId) -> Result { + match self { + BrokerPeerId::Local(_) => Err(VerifierError::LocallyConnected), + _ => Ok(BrokerPeerId::Direct(peer)), + } + } +} diff --git a/ng-verifier/src/user_storage/branch.rs b/ng-verifier/src/user_storage/branch.rs new file mode 100644 index 0000000..ef11495 --- /dev/null +++ b/ng-verifier/src/user_storage/branch.rs @@ -0,0 +1,350 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Branch Storage (Object Key/Col/Value Mapping) + +#![allow(dead_code)] + +use serde_bare::from_slice; +use serde_bare::to_vec; + +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::prop; +use ng_repo::kcv_storage::KCVStorage; +#[allow(unused_imports)] +use ng_repo::log::*; +use ng_repo::repo::BranchInfo; +use ng_repo::types::*; + +use ng_net::app_protocol::FileName; + +pub struct BranchStorage<'a> { + storage: &'a dyn KCVStorage, + id: BranchId, +} + +impl<'a> BranchStorage<'a> { + const PREFIX: u8 = b'c'; + + // branch properties suffixes + const TYPE: u8 = b'b'; + const PUBLISHER: u8 = b'p'; + const READ_CAP: u8 = b'r'; + const TOPIC: u8 = b't'; + const COMMITS_NBR: u8 = b'n'; + const FORK_OF: u8 = b'f'; + const MERGED_IN: u8 = b'm'; + const CRDT: u8 = b'd'; + const CLASS: u8 = b'c'; + const DISCRETE_STATE: u8 = b's'; + + const ALL_PROPERTIES: [u8; 10] = [ + Self::TYPE, + Self::PUBLISHER, + Self::READ_CAP, + Self::TOPIC, + Self::COMMITS_NBR, + Self::FORK_OF, + Self::MERGED_IN, + Self::CRDT, + Self::CLASS, + Self::DISCRETE_STATE, + ]; + + const PREFIX_HEADS: u8 = b'h'; + + const PREFIX_FILES: u8 = b'f'; + + const SUFFIX_FOR_EXIST_CHECK: u8 = Self::TYPE; + + pub fn new( + id: &BranchId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + Ok(BranchStorage { + id: id.clone(), + storage, + }) + } + + pub fn open( + id: &BranchId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let opening = Self::new(id, storage)?; + if !opening.exists() { + return Err(StorageError::NotFound); + } + Ok(opening) + } + + pub fn create_from_info( + info: &BranchInfo, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + Self::create( + &info.id, + &info.read_cap, + &info.branch_type, + &info.topic, + &info.fork_of, + &info.merged_in, + &info.crdt, + info.topic_priv_key.as_ref(), + &info.current_heads, + storage, + ) + } + + //TODO: save all branch info under the repo_id (key prefix should be repo_id) + + pub fn create( + id: &BranchId, + read_cap: &Option, + branch_type: &BranchType, + topic: &Option, + fork_of: &Option, + merged_in: &Option, + crdt: &BranchCrdt, + publisher: Option<&BranchWriteCapSecret>, + current_heads: &Vec, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let bs = BranchStorage { + id: id.clone(), + storage, + }; + // if bs.exists() { + // return Err(StorageError::AlreadyExists); + // } + + storage.write_transaction(&mut |tx| { + let id_ser = to_vec(&id)?; + if read_cap.is_some() { + let value = to_vec(read_cap.as_ref().unwrap())?; + tx.put(Self::PREFIX, &id_ser, Some(Self::READ_CAP), &value, &None)?; + } + + let value = to_vec(branch_type)?; + tx.put(Self::PREFIX, &id_ser, Some(Self::TYPE), &value, &None)?; + if topic.is_some() { + let value = to_vec(topic.as_ref().unwrap())?; + tx.put(Self::PREFIX, &id_ser, Some(Self::TOPIC), &value, &None)?; + } + if merged_in.is_some() { + let value = to_vec(merged_in.as_ref().unwrap())?; + tx.put(Self::PREFIX, &id_ser, Some(Self::MERGED_IN), &value, &None)?; + } + if fork_of.is_some() { + let value = to_vec(fork_of.as_ref().unwrap())?; + tx.put(Self::PREFIX, &id_ser, Some(Self::FORK_OF), &value, &None)?; + } + if *crdt != BranchCrdt::None { + let value = to_vec(&crdt.name())?; + tx.put(Self::PREFIX, &id_ser, Some(Self::CRDT), &value, &None)?; + let value = to_vec(&crdt.class())?; + tx.put(Self::PREFIX, &id_ser, Some(Self::CLASS), &value, &None)?; + } + if let Some(privkey) = publisher { + let value = to_vec(privkey)?; + tx.put(Self::PREFIX, &id_ser, Some(Self::PUBLISHER), &value, &None)?; + } + for head in current_heads { + let mut head_ser = to_vec(head)?; + let mut key = Vec::with_capacity(id_ser.len() + head_ser.len()); + key.append(&mut id_ser.clone()); + key.append(&mut head_ser); + tx.put(Self::PREFIX_HEADS, &key, None, &vec![], &None)?; + } + Ok(()) + })?; + Ok(bs) + } + + pub fn load(id: &BranchId, storage: &'a dyn KCVStorage) -> Result { + let props = storage.get_all_properties_of_key( + Self::PREFIX, + to_vec(id).unwrap(), + Self::ALL_PROPERTIES.to_vec(), + &None, + )?; + + let crdt_name = prop(Self::CRDT, &props).ok(); + let class = prop(Self::CLASS, &props).ok(); + let crdt: BranchCrdt = if crdt_name.is_none() || class.is_none() { + BranchCrdt::None + } else { + BranchCrdt::from(crdt_name.unwrap(), class.unwrap())? + }; + + let bs = BranchInfo { + id: id.clone(), + branch_type: prop(Self::TYPE, &props)?, + read_cap: prop(Self::READ_CAP, &props).ok(), + topic: prop(Self::TOPIC, &props).ok(), + fork_of: prop(Self::FORK_OF, &props).ok(), + merged_in: prop(Self::MERGED_IN, &props).ok(), + crdt, + topic_priv_key: prop(Self::PUBLISHER, &props).ok(), + current_heads: Self::get_all_heads(id, storage)?, + commits_nbr: prop(Self::COMMITS_NBR, &props).unwrap_or(0), + }; + Ok(bs) + } + + pub fn get_all_heads( + id: &BranchId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let size = to_vec(&ObjectRef::nil())?.len(); + let key_prefix = to_vec(id).unwrap(); + let key_prefix_len = key_prefix.len(); + let mut res: Vec = vec![]; + let total_size = key_prefix_len + size; + for head in storage.get_all_keys_and_values( + Self::PREFIX_HEADS, + total_size, + key_prefix, + None, + &None, + )? { + if head.0.len() == total_size + 1 { + let head: ObjectRef = from_slice(&head.0[1 + key_prefix_len..total_size + 1])?; + res.push(head); + } + } + Ok(res) + } + + pub fn set_discrete_state(&self, state: Vec) -> Result<(), StorageError> { + self.storage.write_transaction(&mut |tx| { + let id_ser = &to_vec(&self.id)?; + tx.put( + Self::PREFIX, + &id_ser, + Some(Self::DISCRETE_STATE), + &state, + &None, + )?; + Ok(()) + }) + } + + pub fn get_discrete_state(&self) -> Result, StorageError> { + let id_ser = &to_vec(&self.id)?; + self.storage + .get(Self::PREFIX, &id_ser, Some(Self::DISCRETE_STATE), &None) + } + + pub fn add_file(&self, commit_id: &ObjectId, file: &FileName) -> Result<(), StorageError> { + self.storage.write_transaction(&mut |tx| { + let branch_id_ser = to_vec(&self.id)?; + let commit_id_ser = to_vec(commit_id)?; + let val = to_vec(file)?; + let mut key = Vec::with_capacity(branch_id_ser.len() + commit_id_ser.len()); + key.append(&mut branch_id_ser.clone()); + key.append(&mut commit_id_ser.clone()); + tx.put(Self::PREFIX_FILES, &key, None, &val, &None)?; + Ok(()) + }) + } + + pub fn get_all_files( + id: &BranchId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let size = to_vec(&ObjectId::nil())?.len(); + let key_prefix = to_vec(id).unwrap(); + let key_prefix_len = key_prefix.len(); + let mut res: Vec = vec![]; + let total_size = key_prefix_len + size; + for file in storage.get_all_keys_and_values( + Self::PREFIX_FILES, + total_size, + key_prefix, + None, + &None, + )? { + if file.0.len() == total_size + 1 { + let file: FileName = from_slice(&file.1)?; + res.push(file); + } + } + Ok(res) + } + + pub fn exists(&self) -> bool { + self.storage + .get( + Self::PREFIX, + &to_vec(&self.id).unwrap(), + Some(Self::SUFFIX_FOR_EXIST_CHECK), + &None, + ) + .is_ok() + } + pub fn id(&self) -> &RepoId { + &self.id + } + + pub fn replace_current_heads(&self, new_heads: Vec) -> Result<(), StorageError> { + self.storage.write_transaction(&mut |tx| { + let id_ser = &to_vec(&self.id)?; + let size = to_vec(&ObjectRef::nil())?.len(); + tx.del_all_values(Self::PREFIX_HEADS, id_ser, size, None, &None)?; + for head in new_heads.iter() { + let mut head_ser = to_vec(head)?; + let mut key = Vec::with_capacity(id_ser.len() + head_ser.len()); + key.append(&mut id_ser.clone()); + key.append(&mut head_ser); + tx.put(Self::PREFIX_HEADS, &key, None, &vec![], &None)?; + } + + let mut val: u64 = match tx.get(Self::PREFIX, id_ser, Some(Self::COMMITS_NBR), &None) { + Ok(val_ser) => from_slice(&val_ser)?, + Err(StorageError::NotFound) => 0, + Err(e) => return Err(e), + }; + val += 1; + let val_ser = to_vec(&val)?; + tx.put( + Self::PREFIX, + id_ser, + Some(Self::COMMITS_NBR), + &val_ser, + &None, + )?; + // log_info!( + // "putting commit_nbr {} {:?} {} {:?}", + // Self::PREFIX as char, + // id_ser, + // Self::COMMITS_NBR as char, + // val_ser + // ); + Ok(()) + }) + } + + pub fn del(&self) -> Result<(), StorageError> { + self.storage.write_transaction(&mut |tx| { + let key = &to_vec(&self.id)?; + tx.del_all(Self::PREFIX, key, &Self::ALL_PROPERTIES, &None)?; + let size = to_vec(&ObjectRef::nil())?.len(); + tx.del_all_values(Self::PREFIX_HEADS, key, size, None, &None)?; + Ok(()) + }) + } +} + +#[cfg(test)] +mod test { + + #[test] + pub fn test_branch() {} +} diff --git a/ng-verifier/src/user_storage/mod.rs b/ng-verifier/src/user_storage/mod.rs new file mode 100644 index 0000000..e0066b7 --- /dev/null +++ b/ng-verifier/src/user_storage/mod.rs @@ -0,0 +1,18 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Storage of user application data (RDF, rich-text, JSON, etc) and helper objects for mapping to KCV + +pub mod storage; + +pub use storage::*; + +pub mod repo; + +pub mod branch; diff --git a/ng-verifier/src/user_storage/repo.rs b/ng-verifier/src/user_storage/repo.rs new file mode 100644 index 0000000..3acbb25 --- /dev/null +++ b/ng-verifier/src/user_storage/repo.rs @@ -0,0 +1,499 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Repo Storage (Object Key/Col/Value Mapping) + +#![allow(dead_code)] + +use std::collections::HashMap; +use std::collections::HashSet; +use std::sync::{Arc, RwLock}; + +use either::{Either, Left, Right}; +use serde_bare::from_slice; +use serde_bare::to_vec; + +use ng_repo::block_storage::BlockStorage; +use ng_repo::errors::StorageError; +use ng_repo::kcv_storage::prop; +use ng_repo::kcv_storage::KCVStorage; +#[allow(unused_imports)] +use ng_repo::log::*; +use ng_repo::repo::BranchInfo; +use ng_repo::repo::Repo; +use ng_repo::store::Store; +use ng_repo::types::*; + +use super::branch::BranchStorage; + +pub struct RepoStorage<'a> { + storage: &'a dyn KCVStorage, + id: RepoId, +} + +impl<'a> RepoStorage<'a> { + const PREFIX: u8 = b'r'; + + // repo properties suffixes + const SIGNER_CAP: u8 = b'a'; + //const SIGNER_CAP_PARTIAL: u8 = b'b'; + const CHAT_BRANCH: u8 = b'c'; + const DEFINITION: u8 = b'd'; + const STORE_BRANCH: u8 = b'e'; + const CERTIFICATE: u8 = b'f'; + const INHERIT: u8 = b'i'; + const OVERLAY_BRANCH: u8 = b'l'; + const MAIN_BRANCH: u8 = b'm'; + const OWNERS: u8 = b'o'; + const PINNED: u8 = b'p'; + const QUORUM: u8 = b'q'; + const READ_CAP: u8 = b'r'; + const STORE_REPO: u8 = b's'; + //const SIGNER_CAP_TOTAL: u8 = b't'; + const USER_BRANCH: u8 = b'u'; + const WRITE_CAP_SECRET: u8 = b'w'; + const INBOX_CAP: u8 = b'x'; + + const ALL_PROPERTIES: [u8; 16] = [ + Self::SIGNER_CAP, + Self::INBOX_CAP, + //Self::SIGNER_CAP_PARTIAL, + Self::CHAT_BRANCH, + Self::DEFINITION, + Self::STORE_BRANCH, + Self::INHERIT, + Self::OVERLAY_BRANCH, + Self::MAIN_BRANCH, + Self::OWNERS, + Self::PINNED, + Self::QUORUM, + Self::READ_CAP, + Self::STORE_REPO, + //Self::SIGNER_CAP_TOTAL, + Self::USER_BRANCH, + Self::WRITE_CAP_SECRET, + Self::CERTIFICATE, + ]; + + const PREFIX_BRANCHES: u8 = b'b'; + + const SUFFIX_FOR_EXIST_CHECK: u8 = Self::READ_CAP; + + pub fn open(id: &RepoId, storage: &'a dyn KCVStorage) -> Result, StorageError> { + let opening = RepoStorage::new(id, storage); + if !opening.exists() { + return Err(StorageError::NotFound); + } + Ok(opening) + } + + pub fn new(id: &RepoId, storage: &'a dyn KCVStorage) -> RepoStorage<'a> { + RepoStorage { + id: id.clone(), + storage, + } + } + + pub fn create_from_repo( + repo: &Repo, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + Self::create( + &repo.id, + repo.read_cap.as_ref().unwrap(), + repo.write_cap.as_ref(), + repo.signer.as_ref(), + repo.inbox.as_ref(), + repo.store.get_store_repo(), + &repo.repo_def, + &repo.branches, + storage, + ) + } + + pub fn add_branch_from_info( + repo_id: &RepoId, + branch_info: &BranchInfo, + storage: &'a dyn KCVStorage, + ) -> Result<(), StorageError> { + BranchStorage::create_from_info(branch_info, storage)?; + storage.write_transaction(&mut |tx| { + let repo_id_ser = to_vec(&repo_id)?; + let branch_id_ser = to_vec(&branch_info.id)?; + let mut key = Vec::with_capacity(repo_id_ser.len() + branch_id_ser.len()); + key.append(&mut repo_id_ser.clone()); + key.append(&mut branch_id_ser.clone()); + tx.put(Self::PREFIX_BRANCHES, &key, None, &vec![], &None)?; + + if branch_info.branch_type == BranchType::Store { + tx.put( + Self::PREFIX, + &repo_id_ser, + Some(Self::STORE_BRANCH), + &branch_id_ser, + &None, + )?; + } + Ok(()) + })?; + Ok(()) + } + + pub fn update_signer_cap( + signer_cap: &SignerCap, + storage: &'a dyn KCVStorage, + ) -> Result<(), StorageError> { + let repo_id = signer_cap.repo; + storage.write_transaction(&mut |tx| { + let id_ser = to_vec(&repo_id)?; + let value = to_vec(signer_cap)?; + tx.put(Self::PREFIX, &id_ser, Some(Self::SIGNER_CAP), &value, &None)?; + Ok(()) + })?; + Ok(()) + } + + pub fn update_inbox_cap( + repo_id: &RepoId, + overlay: &OverlayId, + priv_key: &PrivKey, + storage: &'a dyn KCVStorage, + ) -> Result<(), StorageError> { + storage.write_transaction(&mut |tx| { + let id_ser = to_vec(repo_id)?; + let value = to_vec(priv_key)?; + tx.put(Self::PREFIX, &id_ser, Some(Self::INBOX_CAP), &value, &None)?; + Ok(()) + })?; + Ok(()) + } + + pub fn update_certificate( + id: &RepoId, + certificate: &ObjectRef, + storage: &'a dyn KCVStorage, + ) -> Result<(), StorageError> { + storage.write_transaction(&mut |tx| { + let id_ser = to_vec(id)?; + let value = to_vec(certificate)?; + tx.put( + Self::PREFIX, + &id_ser, + Some(Self::CERTIFICATE), + &value, + &None, + )?; + Ok(()) + })?; + Ok(()) + } + + pub fn get_signer_cap(&self) -> Result { + let ser = self.storage.get( + Self::PREFIX, + &to_vec(&self.id).unwrap(), + Some(Self::SIGNER_CAP), + &None, + )?; + Ok(from_slice(&ser)?) + } + + pub fn get_inbox_cap(&self) -> Result { + let ser = self.storage.get( + Self::PREFIX, + &to_vec(&self.id).unwrap(), + Some(Self::INBOX_CAP), + &None, + )?; + Ok(from_slice(&ser)?) + } + + pub fn create( + id: &RepoId, + read_cap: &ReadCap, + write_cap: Option<&RepoWriteCapSecret>, + signer_cap: Option<&SignerCap>, + inbox_cap: Option<&PrivKey>, + store_repo: &StoreRepo, + repo_def: &Repository, + branches: &HashMap, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let repo = RepoStorage { + id: id.clone(), + storage, + }; + if repo.exists() { + return Err(StorageError::AlreadyExists); + } + + let mut store_branch = None; + + // FIXME: use the same transaction for all branches and the repo + for branch in branches.values() { + BranchStorage::create_from_info(branch, storage)?; + if branch.branch_type == BranchType::Store { + store_branch = Some(branch.id); + } + } + + storage.write_transaction(&mut |tx| { + let id_ser = to_vec(&id)?; + let value = to_vec(read_cap)?; + tx.put(Self::PREFIX, &id_ser, Some(Self::READ_CAP), &value, &None)?; + let value = to_vec(store_repo)?; + tx.put(Self::PREFIX, &id_ser, Some(Self::STORE_REPO), &value, &None)?; + let value = to_vec(repo_def)?; + tx.put(Self::PREFIX, &id_ser, Some(Self::DEFINITION), &value, &None)?; + if let Some(wc) = write_cap { + let value = to_vec(wc)?; + tx.put( + Self::PREFIX, + &id_ser, + Some(Self::WRITE_CAP_SECRET), + &value, + &None, + )?; + } + if let Some(sb) = store_branch { + let value = to_vec(&sb)?; + tx.put( + Self::PREFIX, + &id_ser, + Some(Self::STORE_BRANCH), + &value, + &None, + )?; + } + if let Some(sc) = signer_cap { + let value = to_vec(sc)?; + tx.put(Self::PREFIX, &id_ser, Some(Self::SIGNER_CAP), &value, &None)?; + } + if let Some(ic) = inbox_cap { + let value = to_vec(ic)?; + tx.put(Self::PREFIX, &id_ser, Some(Self::INBOX_CAP), &value, &None)?; + } + for branch in branches.keys() { + let mut branch_ser = to_vec(branch)?; + let mut key = Vec::with_capacity(id_ser.len() + branch_ser.len()); + key.append(&mut id_ser.clone()); + key.append(&mut branch_ser); + tx.put(Self::PREFIX_BRANCHES, &key, None, &vec![], &None)?; + } + Ok(()) + })?; + + Ok(repo) + } + + pub fn load( + id: &RepoId, + store: Either, Arc>>, + storage: &'a dyn KCVStorage, + ) -> Result { + //("LOADING repo {}", id); + let branch_ids = Self::get_all_branches(id, storage)?; + let mut branches = HashMap::new(); + let mut overlay_branch_read_cap = None; + let mut store_branch_id = None; + for branch in branch_ids { + let info = BranchStorage::load(&branch, storage)?; + if info.branch_type == BranchType::Overlay { + overlay_branch_read_cap = Some(info.read_cap.clone().unwrap()); + } + if info.branch_type == BranchType::Store { + store_branch_id = Some(info.id.clone()); + } + //log_info!("LOADING BRANCH INFO {}", branch); + //log_info!("TOPIC {}", info.topic); + let _ = branches.insert(branch, info); + } + + let props = storage.get_all_properties_of_key( + Self::PREFIX, + to_vec(id).unwrap(), + Self::ALL_PROPERTIES.to_vec(), + &None, + )?; + + let store = match store { + Left(s) => s, + Right(bs) => { + // we want to load a store. let's start by retrieving the store repo + // check that it has a STORE_BRANCH + if store_branch_id.is_none() { + return Err(StorageError::NotAStoreRepo); + } + let store_repo: StoreRepo = + prop(Self::STORE_REPO, &props).map_err(|_| StorageError::NotAStoreRepo)?; + let store_info = branches.get(id).ok_or(StorageError::NotFound)?; + let overlay_branch_read_cap = if store_repo.is_private() { + store_info.read_cap.clone().unwrap() + } else { + overlay_branch_read_cap.ok_or(StorageError::OverlayBranchNotFound)? + }; + Arc::new(Store::new( + store_repo, + store_info.read_cap.clone().unwrap(), + overlay_branch_read_cap, + bs, + )) + } + }; + + let opened_branches = if let Some(store_branch) = store_branch_id { + HashMap::from([(store_branch, true)]) + } else { + HashMap::new() + }; + + let repo = Repo { + id: id.clone(), + repo_def: prop(Self::DEFINITION, &props)?, + read_cap: prop(Self::READ_CAP, &props)?, + write_cap: prop(Self::WRITE_CAP_SECRET, &props).ok(), + signer: prop(Self::SIGNER_CAP, &props).ok(), + inbox: prop(Self::INBOX_CAP, &props).ok(), + //TODO: members + members: HashMap::new(), + branches, + opened_branches, + store, + certificate_ref: prop(Self::CERTIFICATE, &props).ok(), + }; + Ok(repo) + } + + pub fn exists(&self) -> bool { + self.storage + .get( + Self::PREFIX, + &to_vec(&self.id).unwrap(), + Some(Self::SUFFIX_FOR_EXIST_CHECK), + &None, + ) + .is_ok() + } + pub fn id(&self) -> &RepoId { + &self.id + } + + pub fn get_all_branches( + id: &RepoId, + storage: &'a dyn KCVStorage, + ) -> Result, StorageError> { + let size = to_vec(&BranchId::nil())?.len(); + let key_prefix = to_vec(id).unwrap(); + let mut res: Vec = vec![]; + let key_prefix_len = key_prefix.len(); + let total_size = key_prefix_len + size; + for branch in storage.get_all_keys_and_values( + Self::PREFIX_BRANCHES, + total_size, + key_prefix, + None, + &None, + )? { + if branch.0.len() == total_size + 1 { + let branch_id: BranchId = + from_slice(&branch.0[1 + key_prefix_len..total_size + 1])?; + res.push(branch_id); + } + } + Ok(res) + } + + pub fn get_all_store_and_repo_ids( + storage: &'a dyn KCVStorage, + ) -> Result>, StorageError> { + //log_info!("get_all_store_and_repo_ids"); + let mut res = HashMap::new(); + let size = to_vec(&RepoId::nil())?.len(); + let mut store_ids = HashSet::new(); + for (store_id_ser, _) in storage.get_all_keys_and_values( + Self::PREFIX, + size, + vec![], + Some(Self::STORE_BRANCH), + &None, + )? { + let store_id: RepoId = from_slice(&store_id_ser[1..1 + size])?; + //log_info!("FOUND store_id {}", store_id); + store_ids.insert(store_id); + } + let mut repo_ids = HashMap::new(); + for (repo_id_ser, store_repo_ser) in storage.get_all_keys_and_values( + Self::PREFIX, + size, + vec![], + Some(Self::STORE_REPO), + &None, + )? { + let repo_id: RepoId = from_slice(&repo_id_ser[1..1 + size])?; + //log_info!("FOUND repo_id {}", repo_id); + let store_repo: StoreRepo = from_slice(&store_repo_ser)?; + repo_ids.insert(repo_id, store_repo); + } + + for store in store_ids.iter() { + let store_repo = repo_ids.get(store).ok_or(StorageError::NotAStoreRepo)?; + res.insert(*store_repo, vec![]); + //log_info!("INSERTED store_id {}", store); + } + + for (repo_id, store_repo) in repo_ids.iter() { + if store_ids.get(repo_id).is_none() { + let repos = res.get_mut(store_repo).ok_or(StorageError::NotFound)?; + repos.push(*repo_id); + //log_info!("INSERTED repo_id {}", repo_id); + } + } + + Ok(res) + } + + // pub fn get_type(&self) -> Result { + // let type_ser = self + // .store + // .get(Self::PREFIX, &to_vec(&self.id)?, Some(Self::TYPE), &None)?; + // let t: (u8, u32, Option) = from_slice(&type_ser)?; + // // if t.1 < now_timestamp() { + // // return Err(ProtocolError::Expired); + // // } + // Ok(t.0) + // } + + // pub fn is_expired(&self) -> Result { + // let expire_ser = + // self.store + // .get(Self::PREFIX, &to_vec(&self.id)?, Some(Self::TYPE), &None)?; + // let expire: (u8, u32, Option) = from_slice(&expire_ser)?; + // if expire.1 < now_timestamp() { + // return Ok(true); + // } + // Ok(false) + // } + + pub fn del(&self) -> Result<(), StorageError> { + self.storage.write_transaction(&mut |tx| { + let key = &to_vec(&self.id)?; + tx.del_all(Self::PREFIX, key, &Self::ALL_PROPERTIES, &None)?; + let size = to_vec(&BranchId::nil())?.len(); + tx.del_all_values(Self::PREFIX_BRANCHES, key, size, None, &None)?; + Ok(()) + }) + } +} + +#[cfg(test)] +mod test { + + #[test] + pub fn test_repo() {} +} diff --git a/ng-verifier/src/user_storage/storage.rs b/ng-verifier/src/user_storage/storage.rs new file mode 100644 index 0000000..25bac80 --- /dev/null +++ b/ng-verifier/src/user_storage/storage.rs @@ -0,0 +1,218 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Storage of user application data (RDF, content of rich-text document, etc) + +use std::{ + collections::HashMap, + sync::{Arc, RwLock}, +}; + +use ng_net::app_protocol::{AppTabInfo, FileName}; +use ng_repo::{ + block_storage::BlockStorage, + errors::StorageError, + repo::{BranchInfo, Repo}, + store::Store, + types::*, +}; + +pub trait UserStorage: Send + Sync { + //fn repo_id_to_store_overlay(&self, id: &RepoId) -> Result; + + fn get_all_store_and_repo_ids(&self) -> Result>, StorageError>; + + fn load_store( + &self, + store_repo: &StoreRepo, + block_storage: Arc>, + ) -> Result; + + fn load_repo(&self, repo_id: &RepoId, store: Arc) -> Result; + + fn save_repo(&self, repo: &Repo) -> Result<(), StorageError>; + + fn add_branch(&self, repo_id: &RepoId, branch_info: &BranchInfo) -> Result<(), StorageError>; + + fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError>; + + fn update_inbox_cap(&self, repo_id: &RepoId, overlay: &OverlayId, priv_key: &PrivKey) -> Result<(), StorageError>; + + fn update_certificate( + &self, + repo_id: &RepoId, + certificate: &ObjectRef, + ) -> Result<(), StorageError>; + + fn get_signer_cap(&self, repo_id: &RepoId) -> Result; + + fn get_inbox_cap(&self, repo_id: &RepoId) -> Result; + + fn branch_add_file( + &self, + commit_id: ObjectId, + branch: BranchId, + file: FileName, + ) -> Result<(), StorageError>; + + fn branch_get_all_files(&self, branch: &BranchId) -> Result, StorageError>; + + fn branch_set_discrete_state( + &self, + branch: BranchId, + state: Vec, + ) -> Result<(), StorageError>; + + fn branch_get_discrete_state(&self, branch: &BranchId) -> Result, StorageError>; + + fn branch_get_tab_info( + &self, + branch: &BranchId, + repo: &RepoId, + store: &StoreRepo, + ) -> Result; + + fn update_branch_current_heads( + &self, + repo_id: &RepoId, + branch_id: &BranchId, + new_heads: Vec, + ) -> Result<(), StorageError>; +} + +pub(crate) struct InMemoryUserStorage { + branch_files: RwLock>>, + branch_discrete_state: RwLock>>, + repo_signer_cap: RwLock>, + repo_inbox_cap: RwLock>, +} + +impl InMemoryUserStorage { + pub fn new() -> Self { + InMemoryUserStorage { + branch_files: RwLock::new(HashMap::new()), + branch_discrete_state: RwLock::new(HashMap::new()), + repo_signer_cap: RwLock::new(HashMap::new()), + repo_inbox_cap: RwLock::new(HashMap::new()), + } + } +} + +impl UserStorage for InMemoryUserStorage { + fn branch_add_file( + &self, + _commit_id: ObjectId, + branch: BranchId, + file: FileName, + ) -> Result<(), StorageError> { + let mut lock = self.branch_files.write().unwrap(); + let file_list = lock.entry(branch).or_insert_with(|| Vec::with_capacity(1)); + file_list.push(file); + Ok(()) + } + + fn branch_get_all_files(&self, branch: &BranchId) -> Result, StorageError> { + let lock = self.branch_files.read().unwrap(); + if let Some(file_list) = lock.get(&branch) { + Ok(file_list.to_vec()) + } else { + Ok(vec![]) + } + } + + fn branch_set_discrete_state( + &self, + branch: BranchId, + state: Vec, + ) -> Result<(), StorageError> { + let mut lock = self.branch_discrete_state.write().unwrap(); + let _ = lock.insert(branch, state); + Ok(()) + } + + fn branch_get_discrete_state(&self, branch: &BranchId) -> Result, StorageError> { + let lock = self.branch_discrete_state.read().unwrap(); + if let Some(state) = lock.get(&branch) { + Ok(state.to_vec()) + } else { + Err(StorageError::NoDiscreteState) + } + } + + fn branch_get_tab_info( + &self, + branch: &BranchId, + repo: &RepoId, + store: &StoreRepo, + ) -> Result { + unimplemented!(); + } + + fn get_all_store_and_repo_ids(&self) -> Result>, StorageError> { + unimplemented!(); + } + + fn load_store( + &self, + _store_repo: &StoreRepo, + _block_storage: Arc>, + ) -> Result { + unimplemented!(); + } + fn load_repo(&self, _repo_id: &RepoId, _store: Arc) -> Result { + unimplemented!(); + } + + fn save_repo(&self, _repo: &Repo) -> Result<(), StorageError> { + unimplemented!(); + } + + fn add_branch(&self, _repo_id: &RepoId, _branch_info: &BranchInfo) -> Result<(), StorageError> { + unimplemented!(); + } + + fn update_certificate( + &self, + repo_id: &RepoId, + certificate: &ObjectRef, + ) -> Result<(), StorageError> { + unimplemented!(); + } + + fn update_signer_cap(&self, signer_cap: &SignerCap) -> Result<(), StorageError> { + let mut lock = self.repo_signer_cap.write().unwrap(); + lock.insert(signer_cap.repo, signer_cap.clone()); + Ok(()) + } + + fn get_signer_cap(&self, repo_id: &RepoId) -> Result { + let mut lock = self.repo_signer_cap.write().unwrap(); + Ok(lock.remove(repo_id).ok_or(StorageError::NotFound)?) + } + + fn update_inbox_cap(&self, repo_id: &RepoId, overlay: &OverlayId, priv_key: &PrivKey) -> Result<(), StorageError> { + let mut lock = self.repo_inbox_cap.write().unwrap(); + lock.insert(*repo_id, priv_key.clone()); + Ok(()) + } + + fn get_inbox_cap(&self, repo_id: &RepoId) -> Result { + let mut lock = self.repo_inbox_cap.write().unwrap(); + Ok(lock.remove(repo_id).ok_or(StorageError::NotFound)?) + } + + fn update_branch_current_heads( + &self, + _repo_id: &RepoId, + _branch_id: &BranchId, + _new_heads: Vec, + ) -> Result<(), StorageError> { + unimplemented!(); + } +} diff --git a/ng-verifier/src/verifier.rs b/ng-verifier/src/verifier.rs new file mode 100644 index 0000000..494337e --- /dev/null +++ b/ng-verifier/src/verifier.rs @@ -0,0 +1,3051 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +//! Repo object (on heap) to handle a Repository + +use core::fmt; +use std::cmp::max; +use std::collections::BTreeMap; +use std::collections::HashSet; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::fs::create_dir_all; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::fs::{read, File, OpenOptions}; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use std::io::Write; +use std::{collections::HashMap, sync::Arc}; + +use async_std::stream::StreamExt; +use async_std::sync::{Mutex, RwLockReadGuard}; +use futures::channel::mpsc; +use futures::SinkExt; +use ng_oxigraph::oxigraph::sparql::Query; +use ng_oxigraph::oxigraph::sparql::QueryResults; +use ng_oxigraph::oxrdf::Term; +use ng_repo::utils::derive_key; +use qrcode::render::svg; +use qrcode::QrCode; +use sbbf_rs_safe::Filter; +use serde::{Deserialize, Serialize}; +use web_time::SystemTime; + +//use ng_oxigraph::oxigraph::io::{RdfFormat, RdfParser, RdfSerializer}; +//use ng_oxigraph::oxigraph::store::Store; +//use ng_oxigraph::oxigraph::model::GroundQuad; +//use yrs::{StateVector, Update}; + +use ng_oxigraph::oxrdf::{GraphNameRef, NamedNode, Triple}; + +use ng_repo::file::ReadFile; +use ng_repo::log::*; +#[cfg(any(test, feature = "testing"))] +use ng_repo::utils::generate_keypair; +use ng_repo::{ + block_storage::{store_max_value_size, BlockStorage, HashMapBlockStorage}, + errors::{NgError, ProtocolError, ServerError, StorageError, VerifierError}, + file::RandomAccessFile, + object::Object, + repo::{BranchInfo, Repo}, + store::Store, + types::*, +}; + +use ng_net::actor::SoS; +use ng_net::app_protocol::*; +use ng_net::broker::{Broker, BROKER}; +use ng_net::{ + connection::NoiseFSM, + types::*, + utils::{Receiver, Sender}, +}; + +use crate::commits::*; +#[cfg(all(not(target_family = "wasm"), not(docsrs)))] +use crate::rocksdb_user_storage::RocksDbUserStorage; +use crate::types::*; +use crate::user_storage::InMemoryUserStorage; +use crate::user_storage::UserStorage; + +// pub trait IVerifier { +// fn add_branch_and_save( +// &mut self, +// repo_id: &RepoId, +// branch_info: BranchInfo, +// store_repo: &StoreRepo, +// ) -> Result<(), VerifierError>; + +// fn add_repo_and_save(&mut self, repo: Repo) -> &Repo; + +// fn get_repo(&self, id: &RepoId, store_repo: &StoreRepo) -> Result<&Repo, NgError>; +// } + +pub struct Verifier { + pub(crate) config: VerifierConfig, + user_id: UserId, + pub connected_broker: BrokerPeerId, + pub(crate) graph_dataset: Option, + pub(crate) user_storage: Option>>, + block_storage: Option>>, + last_seq_num: u64, + peer_id: PubKey, + max_reserved_seq_num: u64, + last_reservation: SystemTime, + stores: HashMap>, + inner_to_outer: HashMap, + pub(crate) outer: String, + pub(crate) repos: HashMap, + inboxes: HashMap, + // TODO: deal with collided repo_ids. self.repos should be a HashMap enum Collision {Yes, No(Repo)} + // add a collided_repos: HashMap<(OverlayId, RepoId), Repo> + // only use get_repo() everywhere in the code (always passing the overlay) so that collisions can be handled. + // also do the same in RocksdbStorage + /// (OverlayId, TopicId), (RepoId, BranchId) + pub(crate) topics: HashMap<(OverlayId, TopicId), (RepoId, BranchId)>, + /// only used for InMemory type, to store the outbox + in_memory_outbox: Vec, + uploads: BTreeMap, + branch_subscriptions: HashMap>, + pub(crate) temporary_repo_certificates: HashMap, +} + +impl fmt::Debug for Verifier { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "Verifier\nconfig: {:?}", self.config)?; + writeln!(f, "connected_broker: {:?}", self.connected_broker)?; + writeln!(f, "stores: {:?}", self.stores)?; + writeln!(f, "repos: {:?}", self.repos) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +struct EventOutboxStorage { + event: Event, + overlay: OverlayId, + file_blocks: Vec, +} + +impl Verifier { + pub(crate) fn get_peer_id_for_skolem(&self) -> Vec { + self.peer_id.to_dh_slice()[0..16].to_vec() + } + + pub fn complement_credentials(&self, creds: &mut Credentials) { + creds.private_store = self.private_store_id().clone(); + creds.protected_store = self.protected_store_id().clone(); + creds.public_store = self.public_store_id().clone(); + creds.read_cap = self.config.private_store_read_cap.to_owned().unwrap(); + } + + pub(crate) fn user_privkey(&self) -> &PrivKey { + &self.config.user_priv_key + } + + pub(crate) fn user_id(&self) -> &UserId { + &self.user_id + } + + pub fn private_store_id(&self) -> &RepoId { + self.config.private_store_id.as_ref().unwrap() + } + pub fn protected_store_id(&self) -> &RepoId { + self.config.protected_store_id.as_ref().unwrap() + } + pub fn public_store_id(&self) -> &RepoId { + self.config.public_store_id.as_ref().unwrap() + } + + pub fn update_locator(&mut self, locator: Locator) { + self.outer = NuriV0::locator(&locator); + self.config.locator = locator; + } + + pub async fn close(&self) { + log_debug!("VERIFIER CLOSED {}", self.user_id()); + BROKER + .write() + .await + .close_peer_connection_x(None, Some(self.user_id().clone())) + .await; + } + + pub(crate) fn start_upload(&mut self, content_type: String, store: Arc) -> u32 { + let mut first_available: u32 = 0; + for upload in self.uploads.keys() { + if *upload != first_available + 1 { + break; + } else { + first_available += 1; + } + } + first_available += 1; + + let ret = self.uploads.insert( + first_available, + RandomAccessFile::new_empty(store_max_value_size(), content_type, vec![], store), + ); + assert!(ret.is_none()); + first_available + } + + pub(crate) fn continue_upload( + &mut self, + upload_id: u32, + data: &Vec, + ) -> Result<(), NgError> { + let file = self + .uploads + .get_mut(&upload_id) + .ok_or(NgError::WrongUploadId)?; + Ok(file.write(data)?) + } + + pub(crate) fn finish_upload(&mut self, upload_id: u32) -> Result { + let mut file = self + .uploads + .remove(&upload_id) + .ok_or(NgError::WrongUploadId)?; + let _id = file.save()?; + Ok(file.reference().unwrap()) + } + + pub(crate) async fn put_all_blocks_of_file( + &self, + file_ref: &ObjectRef, + repo_id: &RepoId, + store_repo: &StoreRepo, + ) -> Result<(), NgError> { + let repo = self.get_repo(&repo_id, &store_repo)?; + // check that the referenced object exists locally. + repo.store.has(&file_ref.id)?; + // we send all the blocks to the broker. + let blocks = if let Ok(file) = RandomAccessFile::open( + file_ref.id.clone(), + file_ref.key.clone(), + Arc::clone(&repo.store), + ) { + file.get_all_blocks_ids()? + } else { + let obj = Object::load_ref(file_ref, &repo.store)?; + obj.block_ids() + }; + let found = self.has_blocks(blocks, repo).await?; + for block_id in found.missing() { + let block = repo.store.get(block_id)?; + self.put_blocks(vec![block], repo).await?; + } + Ok(()) + } + + pub(crate) async fn push_app_response(&mut self, branch: &BranchId, response: AppResponse) { + // log_info!( + // "push_app_response {} {:?}", + // branch, + // self.branch_subscriptions + // ); + if let Some(sender) = self.branch_subscriptions.get_mut(branch) { + if sender.is_closed() { + log_debug!("closed so removed {}", branch); + self.branch_subscriptions.remove(branch); + } else { + let _ = sender.send(response).await; + } + } + } + + fn branch_get_tab_info( + &self, + repo: &Repo, + branch: &BranchId, + outer: String, + ) -> Result<(AppTabInfo, Option), NgError> { + let branch_info = repo.branch(branch)?; + + let branch_tab_info = AppTabBranchInfo { + id: Some(format!("b:{}", branch.to_string())), + readcap: Some(branch_info.read_cap.as_ref().unwrap().readcap_nuri()), + class: Some(branch_info.crdt.class().clone()), + comment_branch: None, //TODO + }; + + // Retrieve Header branch info (title and about) + let header_branch_info = repo.header_branch(); + let mut about = None; + let mut title = None; + if let Some(header_branch_info) = header_branch_info { + let oxistore = self.graph_dataset.as_ref().unwrap(); + let header_graph = NuriV0::branch_repo_graph_name( + &header_branch_info.id, + &repo.id, + &repo.store.overlay_id, + ); + let base = NuriV0::repo_id(&repo.id); + let parsed = Query::parse(&format!("SELECT ?title ?about WHERE {{ OPTIONAL {{ <> <{NG_ONTOLOGY_ABOUT}> ?about }} OPTIONAL {{ <> <{NG_ONTOLOGY_TITLE}> ?title }} }}"), + Some(&base)).map_err(|e| NgError::OxiGraphError(e.to_string()))?; + let results = oxistore + .query(parsed, Some(header_graph)) + .map_err(|e| NgError::OxiGraphError(e.to_string()))?; + match results { + QueryResults::Solutions(mut sol) => { + if let Some(Ok(s)) = sol.next() { + if let Some(Term::Literal(l)) = s.get("title") { + title = Some(l.value().to_string()); + } + if let Some(Term::Literal(l)) = s.get("about") { + about = Some(l.value().to_string()); + } + } + } + _ => return Err(NgError::InvalidResponse), + } + } + + let root_branch_info = repo.branch(&repo.id)?; + + let doc_tab_info = AppTabDocInfo { + nuri: Some(format!("o:{}", repo.id.to_string())), + is_store: Some(repo.store.id() == &repo.id), + is_member: Some(root_branch_info.read_cap.as_ref().unwrap().readcap_nuri()), // TODO + authors: None, // TODO + inbox: None, // TODO + can_edit: Some(true), + title, + icon: None, + description: about, + }; + + let store_tab_info = AppTabStoreInfo { + repo: Some(repo.store.get_store_repo().clone()), + overlay: Some(format!("v:{}", repo.store.overlay_id.to_string())), + store_type: Some(repo.store.get_store_repo().store_type_for_app()), + has_outer: Some(outer), //TODO + inner: None, //TODO + is_member: None, //TODO + readcap: None, //TODO + title: None, + icon: None, + description: None, + }; + + Ok(( + AppTabInfo { + branch: Some(branch_tab_info), + doc: Some(doc_tab_info), + store: Some(store_tab_info), + }, + header_branch_info.map(|i| i.id), + )) + } + + pub(crate) async fn create_branch_subscription( + &mut self, + repo_id: RepoId, + branch_id: BranchId, + store_repo: StoreRepo, + ) -> Result<(Receiver, CancelFn), VerifierError> { + //log_info!("#### create_branch_subscription {}", branch_id); + let (tx, rx) = mpsc::unbounded::(); + //log_info!("SUBSCRIBE"); + if let Some(returned) = self.branch_subscriptions.insert(branch_id, tx.clone()) { + //log_info!("RESUBSCRIBE"); + if !returned.is_closed() { + //log_info!("FORCE CLOSE"); + returned.close_channel(); + //return Err(VerifierError::DoubleBranchSubscription); + } + } + let (heads, head_keys, tab_info, header_branch_id, crdt) = { + let repo = self.get_repo(&repo_id, &store_repo)?; + let branch = repo.branch(&branch_id)?; + + let heads: Vec = branch.current_heads.iter().map(|h| h.id.clone()).collect(); + let head_keys: Vec = + branch.current_heads.iter().map(|h| h.key.clone()).collect(); + + //let tx = self.branch_subscriptions.entry(branch).or_insert_with(|| {}); + + let (tab_info, header_branch_id) = + self.branch_get_tab_info(repo, &branch_id, self.outer.clone())?; + + ( + heads, + head_keys, + tab_info, + header_branch_id, + branch.crdt.clone(), + ) + }; + + if let Some(header_branch_id) = header_branch_id { + if let Some(returned) = self + .branch_subscriptions + .insert(header_branch_id, tx.clone()) + { + if !returned.is_closed() { + returned.close_channel(); + } + } + } + + let files = self + .user_storage + .as_ref() + .unwrap() + .branch_get_all_files(&branch_id)?; + + // let tab_info = self.user_storage.as_ref().unwrap().branch_get_tab_info( + // &branch_id, + // &repo_id, + // &store_repo, + // )?; + + let store = self.graph_dataset.as_ref().unwrap(); + let graph_name = self + .resolve_target_for_sparql(&NuriTargetV0::Repo(repo_id), false)? + .unwrap(); //TODO: deal with branch + let quad_iter = store.quads_for_pattern( + None, + None, + None, + Some(GraphNameRef::NamedNode( + NamedNode::new_unchecked(graph_name).as_ref(), + )), + ); + let mut results = vec![]; + for quad in quad_iter { + match quad { + Err(e) => {} //return Err(VerifierError::OxigraphError(e.to_string())), + Ok(quad) => results.push(Triple::from(quad)), + } + } + + let discrete = if crdt.is_graph() { + None + } else { + match self + .user_storage + .as_ref() + .unwrap() + .branch_get_discrete_state(&branch_id) + { + Ok(state) => Some(match crdt { + BranchCrdt::Automerge(_) => DiscreteState::Automerge(state), + BranchCrdt::YArray(_) => DiscreteState::YArray(state), + BranchCrdt::YMap(_) => DiscreteState::YMap(state), + BranchCrdt::YText(_) => DiscreteState::YText(state), + BranchCrdt::YXml(_) => DiscreteState::YXml(state), + _ => return Err(VerifierError::InvalidBranch), + }), + Err(StorageError::NoDiscreteState) => None, + Err(e) => return Err(e.into()), + } + }; + + let state = AppState { + heads, + head_keys, + graph: if results.is_empty() { + None + } else { + Some(GraphState { + triples: serde_bare::to_vec(&results).unwrap(), + }) + }, + discrete, + files, + }; + + self.push_app_response( + &branch_id, + AppResponse::V0(AppResponseV0::TabInfo(tab_info)), + ) + .await; + + self.push_app_response(&branch_id, AppResponse::V0(AppResponseV0::State(state))) + .await; + + let fnonce = Box::new(move || { + log_debug!("CLOSE_CHANNEL of subscription for branch {}", branch_id); + if !tx.is_closed() { + tx.close_channel(); + } + }); + Ok((rx, fnonce)) + } + + #[allow(deprecated)] + #[cfg(any(test, feature = "testing"))] + pub fn new_dummy() -> Self { + let (peer_priv_key, peer_id) = generate_keypair(); + let block_storage = Arc::new(std::sync::RwLock::new(HashMapBlockStorage::new())) + as Arc>; + let user_priv_key = PrivKey::random_ed(); + let user_id = user_priv_key.to_pub(); + Verifier { + config: VerifierConfig { + config_type: VerifierConfigType::Memory, + user_master_key: [0; 32], + peer_priv_key, + user_priv_key, + private_store_read_cap: None, + private_store_id: None, + protected_store_id: None, + public_store_id: None, + locator: Locator::empty(), + }, + outer: "".to_string(), + user_id, + connected_broker: BrokerPeerId::None, + graph_dataset: None, + user_storage: None, + block_storage: Some(block_storage), + last_seq_num: 0, + peer_id, + max_reserved_seq_num: 1, + last_reservation: SystemTime::UNIX_EPOCH, + stores: HashMap::new(), + repos: HashMap::new(), + inboxes: HashMap::new(), + topics: HashMap::new(), + in_memory_outbox: vec![], + inner_to_outer: HashMap::new(), + uploads: BTreeMap::new(), + branch_subscriptions: HashMap::new(), + temporary_repo_certificates: HashMap::new(), + } + } + + pub fn load(&mut self) -> Result<(), NgError> { + // log_info!( + // "SHOULD LOAD? {} {} {}", + // self.is_persistent(), + // self.user_storage.is_some(), + // self.block_storage.is_some() + // ); + if self.is_persistent() && self.user_storage.is_some() && self.block_storage.is_some() { + let user_storage = Arc::clone(self.user_storage.as_ref().unwrap()); + let stores = user_storage.get_all_store_and_repo_ids()?; + + for (store, repos) in stores.iter() { + log_debug!("LOADING STORE: {}", store); + let repo = user_storage + .load_store(store, Arc::clone(self.block_storage.as_ref().unwrap()))?; + self.stores.insert( + store.overlay_id_for_storage_purpose(), + Arc::clone(&repo.store), + ); + let store = Arc::clone(&repo.store); + self.populate_topics(&repo); + self.add_repo_without_saving(repo); + + for repo_id in repos { + //log_info!("LOADING REPO: {}", repo_id); + let repo = user_storage.load_repo(repo_id, Arc::clone(&store))?; + self.populate_topics(&repo); + self.add_repo_without_saving(repo); + } + } + } + Ok(()) + } + + fn is_persistent(&self) -> bool { + self.config.config_type.is_persistent() + } + + #[allow(dead_code)] + fn is_in_memory(&self) -> bool { + self.config.config_type.is_in_memory() + } + + fn need_bootstrap(&self) -> bool { + self.stores.is_empty() + } + + fn get_arc_block_storage( + &self, + ) -> Result>, VerifierError> { + Ok(Arc::clone( + self.block_storage + .as_ref() + .ok_or(VerifierError::NoBlockStorageAvailable)?, + )) + } + + #[allow(dead_code)] + fn get_store_or_load(&mut self, store_repo: &StoreRepo) -> Arc { + let overlay_id = store_repo.overlay_id_for_storage_purpose(); + let block_storage = self + .get_arc_block_storage() + .expect("get_store_or_load cannot be called on Remote Verifier"); + let store = self.stores.entry(overlay_id).or_insert_with(|| { + // FIXME: get store_readcap and store_overlay_branch_readcap from user storage + let store_readcap = ReadCap::nil(); + let store_overlay_branch_readcap = ReadCap::nil(); + let store = Store::new( + *store_repo, + store_readcap, + store_overlay_branch_readcap, + block_storage, + ); + Arc::new(store) + }); + Arc::clone(store) + } + + fn complete_site_store( + &mut self, + store_repo: &StoreRepo, + mut repo: Repo, + ) -> Result { + let read_cap = repo.read_cap.to_owned().unwrap(); + let overlay_read_cap = repo.overlay_branch_read_cap().cloned(); + + let overlay_id = store_repo.overlay_id_for_storage_purpose(); + let store = self + .stores + .remove(&overlay_id) + .ok_or(NgError::StoreNotFound)?; + // if repo_already_inserted { + // let mut repo = self + // .repos + // .remove(store_repo.repo_id()) + // .ok_or(NgError::RepoNotFound)?; + // log_info!( + // "{}", + // Arc::::strong_count(&repo.store) + // ); + // } + drop(repo.store); + //log_info!("{}", Arc::::strong_count(&store)); + let mut mut_store = Arc::::into_inner(store).unwrap(); + mut_store.set_read_caps(read_cap, overlay_read_cap); + let new_store = Arc::new(mut_store); + let _ = self.stores.insert(overlay_id, Arc::clone(&new_store)); + repo.store = new_store; + // if repo_already_inserted { + // let _ = self.repos.insert(*store_repo.repo_id(), repo); + // } + + Ok(repo) + } + + #[allow(dead_code)] + fn complete_site_store_already_inserted( + &mut self, + store_repo: StoreRepo, + ) -> Result<(), NgError> { + let overlay_id = store_repo.overlay_id_for_storage_purpose(); + let store = self + .stores + .remove(&overlay_id) + .ok_or(NgError::StoreNotFound)?; + + let mut repo = self.repos.remove(store.id()).ok_or(NgError::RepoNotFound)?; + // log_info!( + // "{}", + // Arc::::strong_count(&repo.store) + // ); + let read_cap = repo.read_cap.to_owned().unwrap(); + let overlay_read_cap = repo.overlay_branch_read_cap().cloned(); + + drop(repo.store); + //log_info!("{}", Arc::::strong_count(&store)); + let mut mut_store = Arc::::into_inner(store).unwrap(); + mut_store.set_read_caps(read_cap, overlay_read_cap); + let new_store = Arc::new(mut_store); + let _ = self.stores.insert(overlay_id, Arc::clone(&new_store)); + repo.store = new_store; + + let _ = self.repos.insert(*store_repo.repo_id(), repo); + + Ok(()) + } + + #[allow(dead_code)] + pub(crate) fn get_store(&self, store_repo: &StoreRepo) -> Result, VerifierError> { + let overlay_id = store_repo.overlay_id_for_storage_purpose(); + let store = self + .stores + .get(&overlay_id) + .ok_or(VerifierError::StoreNotFound)?; + Ok(Arc::clone(store)) + } + + pub(crate) fn get_repo_mut( + &mut self, + id: &RepoId, + _store_repo: &StoreRepo, + ) -> Result<&mut Repo, VerifierError> { + //let store = self.get_store(store_repo); + let repo_ref = self.repos.get_mut(id).ok_or(VerifierError::RepoNotFound); + // .or_insert_with(|| { + // // load from storage + // Repo { + // id: *id, + // repo_def: Repository::new(&PubKey::nil(), &vec![]), + // read_cap: None, + // write_cap: None, + // signer: None, + // members: HashMap::new(), + // branches: HashMap::new(), + // store, + // } + // }); + repo_ref + } + + #[allow(dead_code)] + fn add_store(&mut self, store: Arc) { + let overlay_id = store.get_store_repo().overlay_id_for_storage_purpose(); + if self.stores.contains_key(&overlay_id) { + return; + } + // TODO: store in user_storage + self.stores.insert(overlay_id, store); + } + + // pub(crate) fn update_current_heads( + // &mut self, + // repo_id: &RepoId, + // branch_id: &BranchId, + // current_heads: Vec, + // ) -> Result<(), VerifierError> { + // let repo = self + // .repos + // .get_mut(repo_id) + // .ok_or(VerifierError::RepoNotFound)?; + // let branch = repo + // .branches + // .get_mut(branch_id) + // .ok_or(VerifierError::BranchNotFound)?; + // branch.current_heads = current_heads; + // Ok(()) + // } + + pub(crate) async fn new_event( + &mut self, + commit: &Commit, + additional_blocks: &Vec, + repo_id: RepoId, + store_repo: &StoreRepo, + ) -> Result<(), NgError> { + if self.last_seq_num + 1 >= self.max_reserved_seq_num { + self.reserve_more(1)?; + } + self.new_event_(commit, additional_blocks, repo_id, store_repo) + .await + } + + #[allow(dead_code)] + pub(crate) async fn new_event_with_repo( + &mut self, + commit: &Commit, + additional_blocks: &Vec, + repo: &Repo, + ) -> Result<(), NgError> { + if self.last_seq_num + 1 >= self.max_reserved_seq_num { + self.reserve_more(1)?; + } + self.new_event_with_repo_(commit, additional_blocks, repo) + .await + } + + async fn new_event_( + &mut self, + commit: &Commit, + additional_blocks: &Vec, + repo_id: RepoId, + store_repo: &StoreRepo, + ) -> Result<(), NgError> { + let publisher = self.config.peer_priv_key.clone(); + self.last_seq_num += 1; + let seq_num = self.last_seq_num; + let repo = self.get_repo(&repo_id, store_repo)?; + + let event = Event::new(&publisher, seq_num, commit, additional_blocks, repo)?; + let past = commit.direct_causal_past(); + self.send_or_save_event_to_outbox( + commit.reference().unwrap(), + past, + event, + repo.store.inner_overlay(), + ) + .await?; + Ok(()) + } + + async fn new_event_with_repo_( + &mut self, + commit: &Commit, + additional_blocks: &Vec, + repo: &Repo, + ) -> Result<(), NgError> { + let publisher = self.config.peer_priv_key.clone(); + self.last_seq_num += 1; + let seq_num = self.last_seq_num; + + let event = Event::new(&publisher, seq_num, commit, additional_blocks, repo)?; + self.send_or_save_event_to_outbox( + commit.reference().unwrap(), + commit.direct_causal_past(), + event, + repo.store.inner_overlay(), + ) + .await?; + Ok(()) + } + + #[allow(dead_code)] + pub(crate) fn last_seq_number(&mut self) -> Result { + if self.available_seq_nums() <= 1 { + self.reserve_more(1)?; + } + self.last_seq_num += 1; + Ok(self.last_seq_num) + } + + pub(crate) async fn new_commit( + &mut self, + commit_body: CommitBodyV0, + repo_id: &RepoId, + branch_id: &BranchId, + store_repo: &StoreRepo, + additional_blocks: &Vec, + deps: Vec, + files: Vec, + ) -> Result<(), NgError> { + let commit = { + let repo = self.get_repo(repo_id, &store_repo)?; + let branch = repo.branch(branch_id)?; + let commit = Commit::new_with_body_and_save( + self.user_privkey(), + self.user_id(), + *branch_id, + QuorumType::NoSigning, + deps, + vec![], + branch.current_heads.clone(), + vec![], + files, + vec![], + vec![], + CommitBody::V0(commit_body), + 0, + &repo.store, + )?; + self.verify_commit_(&commit, branch_id, repo_id, Arc::clone(&repo.store), true) + .await?; + commit + }; + //log_info!("{}", commit); + + self.new_event(&commit, additional_blocks, *repo_id, store_repo) + .await + } + + pub(crate) async fn new_commits( + &mut self, + // commit_body, quorum_type, additional_blocks, deps, files + proto_commits: Vec<( + CommitBodyV0, + QuorumType, + &Vec, + Vec, + Vec, + )>, + repo_id: &RepoId, + branch_id: &BranchId, + store_repo: &StoreRepo, + ) -> Result<(), NgError> { + let repo = self.get_repo(repo_id, &store_repo)?; + let branch = repo.branch(branch_id)?; + let mut acks = branch.current_heads.clone(); + let mut proto_events: Vec<(Commit, Vec)> = Vec::with_capacity(proto_commits.len()); + let store = Arc::clone(&repo.store); + for (commit_body, quorum_type, additional_blocks, deps, files) in proto_commits.into_iter() + { + let commit = { + let commit = Commit::new_with_body_and_save( + self.user_privkey(), + self.user_id(), + *branch_id, + quorum_type, + deps, + vec![], + acks, + vec![], + files, + vec![], + vec![], + CommitBody::V0(commit_body), + 0, + &store, + )?; + self.verify_commit_(&commit, branch_id, repo_id, Arc::clone(&store), true) + .await?; + commit + }; + acks = vec![commit.reference().unwrap()]; + proto_events.push((commit, additional_blocks.to_vec())); + } + + self.new_events(proto_events, *repo_id, store_repo).await + } + + pub(crate) async fn new_transaction_commit( + &mut self, + commit_body: CommitBodyV0, + repo_id: &RepoId, + branch_id: &BranchId, + store_repo: &StoreRepo, + deps: Vec, + files: Vec, + ) -> Result { + let commit = { + let repo = self.get_repo(repo_id, &store_repo)?; + let branch = repo.branch(branch_id)?; + let commit = Commit::new_with_body_and_save( + self.user_privkey(), + self.user_id(), + *branch_id, + QuorumType::NoSigning, + deps, + vec![], + branch.current_heads.clone(), + vec![], + files, + vec![], + vec![], + CommitBody::V0(commit_body), + 0, + &repo.store, + )?; + commit + }; + //log_info!("{}", commit); + + self.new_event(&commit, &vec![], *repo_id, store_repo) + .await?; + + Ok(commit) + } + + #[allow(dead_code)] + pub(crate) async fn new_commit_simple( + &mut self, + commit_body: CommitBodyV0, + repo_id: &RepoId, + branch_id: &BranchId, + store_repo: &StoreRepo, + additional_blocks: &Vec, + ) -> Result<(), NgError> { + self.new_commit( + commit_body, + repo_id, + branch_id, + store_repo, + additional_blocks, + vec![], + vec![], + ) + .await + } + + pub(crate) fn update_repo_certificate( + &mut self, + repo_id: &RepoId, + certificate_ref: &ObjectRef, + ) { + match self.repos.get_mut(repo_id) { + Some(repo) => repo.certificate_ref = Some(certificate_ref.clone()), + None => { + self.temporary_repo_certificates + .insert(*repo_id, certificate_ref.clone()); + } + } + if let Some(user_storage) = self.user_storage_if_persistent() { + let _ = user_storage.update_certificate(repo_id, certificate_ref); + } + //TODO: verify the certificate with the previous one (chain), before changing it + } + + #[allow(dead_code)] + pub(crate) async fn new_events_with_repo( + &mut self, + events: Vec<(Commit, Vec)>, + repo: &Repo, + ) -> Result<(), NgError> { + let missing_count = events.len() as i64 - self.available_seq_nums() as i64; + // this is reducing the capacity of reserver_seq_num by half (cast from u64 to i64) + // but we will never reach situation where so many seq_nums are reserved, neither such a big list of events to process + if missing_count >= 0 { + self.reserve_more(missing_count as u64 + 1)?; + } + for event in events { + self.new_event_with_repo_(&event.0, &event.1, repo).await?; + } + Ok(()) + } + + pub(crate) async fn new_events( + &mut self, + events: Vec<(Commit, Vec)>, + repo_id: RepoId, + store_repo: &StoreRepo, + ) -> Result<(), NgError> { + let missing_count = events.len() as i64 - self.available_seq_nums() as i64; + // this is reducing the capacity of reserver_seq_num by half (cast from u64 to i64) + // but we will never reach situation where so many seq_nums are reserved, neither such a big list of events to process + if missing_count >= 0 { + self.reserve_more(missing_count as u64 + 1)?; + } + for event in events { + self.new_event_(&event.0, &event.1, repo_id.clone(), store_repo) + .await?; + } + Ok(()) + } + + fn available_seq_nums(&self) -> u64 { + self.max_reserved_seq_num - self.last_seq_num + } + + pub(crate) fn reserve_more(&mut self, at_least: u64) -> Result<(), NgError> { + // the qty is calculated based on the last_reservation. the closer to now, the higher the qty. + // below 1 sec, => 100 + // below 5 sec, => 10 + // above 5 sec => 1 + let qty = match self.last_reservation.elapsed().unwrap().as_secs() { + 0..=1 => 100u16, + 2..=5 => 10u16, + 6.. => 1u16, + }; + self.take_some_peer_last_seq_numbers(max(at_least as u16, qty)) + } + + fn take_events_from_outbox(&mut self) -> Result, NgError> { + match &self.config.config_type { + VerifierConfigType::JsSaveSession(js) => { + let events_ser = (js.outbox_read_function)(self.peer_id)?; + let mut res = Vec::with_capacity(events_ser.len()); + for event_ser in events_ser { + let event = serde_bare::from_slice(&event_ser)?; + res.push(event); + } + Ok(res) + } + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + VerifierConfigType::RocksDb(path) => { + let mut path = path.clone(); + path.push(format!("outbox{}", self.peer_id.to_hash_string())); + let file = read(path.clone()); + let mut res = vec![]; + match file { + Ok(ser) => { + if ser.len() > 0 { + let mut pos: usize = 0; + let usize_size = usize::BITS as usize / 8; + loop { + let size = usize::from_le_bytes( + ser[pos..pos + usize_size] + .try_into() + .map_err(|_| NgError::SerializationError)?, + ); + //log_info!("size={}", size); + pos += usize_size; + //let buff = &ser[pos..pos + size]; + //log_info!("EVENT={:?}", buff.len()); + let event = serde_bare::from_slice(&ser[pos..pos + size])?; + //log_info!("EVENT_DESER={:?}", event); + res.push(event); + pos += size; + if pos >= ser.len() { + break; + } + } + } + } + Err(_) => {} + } + let _ = std::fs::remove_file(path); + Ok(res) + } + VerifierConfigType::Memory => { + let res = self.in_memory_outbox.drain(..).collect(); + Ok(res) + } + _ => unimplemented!(), + } + } + + pub async fn client_request< + A: Into + std::fmt::Debug + Sync + Send + 'static, + B: TryFrom + std::fmt::Debug + Sync + Send + 'static, + >( + &self, + msg: A, + ) -> Result, NgError> { + if self.connected_broker.is_some() { + let connected_broker = self.connected_broker.clone(); + let broker = BROKER.read().await; + let user = self.user_id().clone(); + + broker + .request::(&Some(user), &connected_broker.into(), msg) + .await + } else { + Err(NgError::NotConnected) + } + } + + async fn send_or_save_event_to_outbox<'a>( + &'a mut self, + commit_ref: ObjectRef, + past: Vec, + event: Event, + overlay: OverlayId, + ) -> Result<(), NgError> { + //log_info!("========== EVENT {:03}: {}", event.seq_num(), event); + + let (repo_id, branch_id) = self + .topics + .get(&(overlay, *event.topic_id())) + .ok_or(NgError::TopicNotFound)? + .to_owned(); + + self.update_branch_current_heads(&repo_id, &branch_id, past, commit_ref.clone())?; + + if self.connected_broker.is_some() { + // send the event to the server already + let connected_broker = self.connected_broker.clone(); + let broker = BROKER.read().await; + let user = self.user_id().clone(); + self.send_event(event, &broker, &Some(user), &connected_broker, overlay) + .await?; + } else { + match &self.config.config_type { + VerifierConfigType::JsSaveSession(js) => { + //log_info!("========== SAVING EVENT {:03}", event.seq_num()); + + let mut file_blocks = Vec::new(); + if !event.file_ids().is_empty() { + let store = &self.repos.get(&repo_id).unwrap().store; + let commit = Commit::load(commit_ref, store, false)?; + for file_ref in commit.files() { + let obj = Object::load_ref(&file_ref, store)?; + file_blocks.append(&mut obj.into_blocks()); + } + } + let e = EventOutboxStorage { + event, + overlay, + file_blocks, + }; + + (js.outbox_write_function)( + self.peer_id, + e.event.seq_num(), + serde_bare::to_vec(&e)?, + )?; + } + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + VerifierConfigType::RocksDb(path) => { + let mut path = path.clone(); + std::fs::create_dir_all(path.clone()).unwrap(); + path.push(format!("outbox{}", self.peer_id.to_hash_string())); + let mut file = OpenOptions::new() + .append(true) + .create(true) + .open(path) + .map_err(|_| NgError::IoError)?; + let e = EventOutboxStorage { + event, + overlay, + file_blocks: vec![], + }; + let event_ser = serde_bare::to_vec(&e)?; + //log_info!("EVENT size={}", event_ser.len()); + //log_info!("EVENT {:?}", event_ser); + let size_ser = event_ser.len().to_le_bytes().to_vec(); + file.write_all(&size_ser).map_err(|_| NgError::IoError)?; + file.flush().map_err(|_| NgError::IoError)?; + file.write_all(&event_ser).map_err(|_| NgError::IoError)?; + file.flush().map_err(|_| NgError::IoError)?; + file.sync_data().map_err(|_| NgError::IoError)?; + } + VerifierConfigType::Memory => { + self.in_memory_outbox.push(EventOutboxStorage { + event, + overlay, + file_blocks: vec![], + }); + } + _ => unimplemented!(), + } + } + Ok(()) + } + + pub fn connection_lost(&mut self) { + self.connected_broker = BrokerPeerId::None; + // for (_, repo) in self.repos.iter_mut() { + // repo.opened_branches = HashMap::new(); + // } + } + + pub async fn sync(&mut self) { + let mut branches = vec![]; + { + for (id, repo) in self.repos.iter_mut() { + for (branch, publisher) in repo.opened_branches.iter() { + branches.push((*id, *branch, *publisher)); + } + repo.opened_branches = HashMap::new(); + } + } + let connected_broker = self.connected_broker.clone(); + let user = self.user_id().clone(); + let broker = BROKER.read().await; + //log_info!("looping on branches {:?}", branches); + for (repo, branch, publisher) in branches { + //log_info!("open_branch_ repo {} branch {}", repo, branch); + let _e = self + .open_branch_( + &repo, + &branch, + publisher, + &broker, + &Some(user), + &connected_broker, + false, + ) + .await; + } + } + + pub async fn connection_opened(&mut self, peer: DirectPeerId) -> Result<(), NgError> { + self.connected_broker = self.connected_broker.to_direct_if_not_local(peer)?; + log_info!("CONNECTION ESTABLISHED WITH peer {}", peer); + if let Err(e) = self.bootstrap().await { + self.connected_broker = BrokerPeerId::None; + return Err(e); + } + + let connected_broker = self.connected_broker.clone(); + + let mut branches = vec![]; + { + for (id, repo) in self.repos.iter_mut() { + for (branch, publisher) in repo.opened_branches.iter() { + branches.push((*id, *branch, *publisher)); + } + repo.opened_branches = HashMap::new(); + } + } + + let res = self.send_outbox().await; + log_info!("SENDING EVENTS FROM OUTBOX RETURNED: {:?}", res); + + let user = self.user_id().clone(); + let broker = BROKER.read().await; + // log_debug!( + // "looping on branches {:?}", + // branches + // .iter() + // .map(|(_, b, _)| b.to_string()) + // .collect::>() + // ); + for (repo, branch, publisher) in branches { + //log_debug!("open_branch_ repo {} branch {}", repo, branch); + let _e = self + .open_branch_( + &repo, + &branch, + publisher, + &broker, + &Some(user), + &connected_broker, + false, + ) + .await; + // log_debug!( + // "END OF open_branch_ repo {} branch {} with {:?}", + // repo, + // branch, + // _e + // ); + // discarding error. + } + + // registering inbox for protected and public store. (FIXME: this should be done instead in the 1st connection during wallet creation) + let remote = self.connected_broker.connected_or_err()?; + let mut done = false; + for (_,store) in self.stores.iter() { + if store.id() == self.protected_store_id() || store.id() == self.public_store_id() { + let repo = self.get_repo( store.id(), &store.get_store_repo())?; + let inbox = repo.inbox.to_owned().unwrap(); + // sending InboxRegister + let msg = InboxRegister::new(inbox, store.outer_overlay())?; + broker + .request::(&Some(user), &remote, msg) + .await?; + if !done { + done = true; + } else { + break; + } + } + } + + Ok(()) + } + + pub(crate) async fn open_branch( + &mut self, + repo_id: &RepoId, + branch: &BranchId, + as_publisher: bool, + ) -> Result<(), NgError> { + if !self.connected_broker.is_some() { + let repo: &mut Repo = self.repos.get_mut(repo_id).ok_or(NgError::RepoNotFound)?; + repo.opened_branches.insert(*branch, as_publisher); + return Ok(()); + } + + let user = self.user_id().clone(); + let connected_broker = self.connected_broker.clone(); + self.open_branch_( + repo_id, + branch, + as_publisher, + &BROKER.read().await, + &Some(user), + &connected_broker, + false, + ) + .await + } + + pub(crate) async fn put_blocks(&self, blocks: Vec, repo: &Repo) -> Result<(), NgError> { + let overlay = repo.store.overlay_for_read_on_client_protocol(); + + let broker = BROKER.read().await; + let user = self.user_id().clone(); + let remote = self.connected_broker.connected_or_err()?; + + let msg = BlocksPut::V0(BlocksPutV0 { + blocks, + overlay: Some(overlay), + }); + broker + .request::(&Some(user), &remote, msg) + .await?; + Ok(()) + } + + pub(crate) async fn has_blocks( + &self, + blocks: Vec, + repo: &Repo, + ) -> Result { + let overlay = repo.store.overlay_for_read_on_client_protocol(); + + let broker = BROKER.read().await; + let user = self.user_id().clone(); + let remote = self.connected_broker.connected_or_err()?; + + let msg = BlocksExist::V0(BlocksExistV0 { + blocks, + overlay: Some(overlay), + }); + if let SoS::Single(found) = broker + .request::(&Some(user), &remote, msg) + .await? + { + Ok(found) + } else { + Err(NgError::InvalidResponse) + } + } + + pub(crate) async fn open_branch_<'a>( + &mut self, + repo_id: &RepoId, + branch: &BranchId, + as_publisher: bool, + broker: &RwLockReadGuard<'static, Broker>, + user: &Option, + remote_broker: &BrokerPeerId, + force: bool, + ) -> Result<(), NgError> { + let (need_open, mut need_sub, overlay) = { + let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?; + let overlay = repo.store.overlay_for_read_on_client_protocol(); + if force { + (true, true, overlay) + } else { + match repo.opened_branches.get(branch) { + Some(val) => (false, as_publisher && !val, overlay), + None => (repo.opened_branches.is_empty(), true, overlay), + } + } + }; + // log_info!( + // "OPEN BRANCH {branch} need_open {} need_sub {}", + // need_open, + // need_sub + // ); + + let remote = remote_broker.into(); + + if need_open { + // TODO: implement OpenRepo. for now we always do a Pinning because OpenRepo is not implemented on the broker. + let msg = RepoPinStatusReq::V0(RepoPinStatusReqV0 { + hash: repo_id.into(), + overlay: Some(overlay), + }); + match broker + .request::(user, &remote, msg) + .await + { + Err(NgError::ServerError(ServerError::False)) + | Err(NgError::ServerError(ServerError::RepoAlreadyOpened)) => { + // pinning the repo on the server broker + let (pin_req, topic_id) = { + let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?; + let topic_id = repo.branch(branch).unwrap().topic.unwrap(); + // TODO only pinning the requested branch. + // let pin_req = + // PinRepo::for_branch(repo, branch, remote_broker.broker_peer_id()); + let pin_req = PinRepo::from_repo(repo, remote_broker.broker_peer_id()); + (pin_req, topic_id) + }; + + match broker + .request::(user, &remote, pin_req) + .await + { + Ok(SoS::Single(opened)) => { + self.repo_was_opened(repo_id, &opened)?; + //TODO: check that in the returned opened_repo, the branch we are interested in has effectively been subscribed as publisher by the broker. + + for topic in opened { + let (_, branch_id) = self + .topics + .get(&(overlay, *topic.topic_id())) + .ok_or(NgError::TopicNotFound)? + .to_owned(); + + //if topic.topic_id() == &topic_id { + self.do_sync_req_if_needed( + broker, + user, + &remote, + &branch_id, + repo_id, + topic.known_heads(), + topic.commits_nbr(), + ) + .await?; + //break; + //} + } + } + Ok(_) => return Err(NgError::InvalidResponse), + Err(e) => return Err(e), + } + } + Err(e) => return Err(e), + Ok(SoS::Single(pin_status)) => { + // checking that the branch is subscribed as publisher + let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?; + let branch_info = repo.branch(branch)?; + let topic_id = branch_info.topic.as_ref().unwrap(); + // log_info!( + // "as_publisher {} {}", + // as_publisher, + // pin_status.is_topic_subscribed_as_publisher(topic_id) + // ); + if as_publisher && !pin_status.is_topic_subscribed_as_publisher(topic_id) { + need_sub = true; + //log_info!("need_sub forced to true"); + } else { + for topic in pin_status.topics() { + if topic.topic_id() == topic_id { + self.do_sync_req_if_needed( + broker, + user, + &remote, + branch, + repo_id, + topic.known_heads(), + topic.commits_nbr(), + ) + .await?; + break; + } + } + } + } + _ => return Err(NgError::InvalidResponse), + } + } + if need_sub { + // we subscribe + let repo = self.repos.get(repo_id).ok_or(NgError::RepoNotFound)?; + let branch_info = repo.branch(branch)?; + + let broker_id = if as_publisher { + if branch_info.topic_priv_key.is_none() { + // we need to subscribe as publisher, but we cant + log_debug!("no topic_priv_key"); + return Err(NgError::PermissionDenied); + } + Some(remote_broker.broker_peer_id()) + } else { + None + }; + + let topic_sub = TopicSub::new(repo, branch_info, broker_id); + + match broker + .request::(user, &remote, topic_sub) + .await + { + Ok(SoS::Single(sub)) => { + let repo = self.repos.get_mut(&repo_id).ok_or(NgError::RepoNotFound)?; + Self::branch_was_opened(&self.topics, repo, &sub)?; + self.do_sync_req_if_needed( + broker, + user, + &remote, + branch, + repo_id, + sub.known_heads(), + sub.commits_nbr(), + ) + .await?; + } + Ok(_) => return Err(NgError::InvalidResponse), + Err(e) => { + return Err(e); + } + } + } + Ok(()) + } + + async fn send_event( + &mut self, + event: Event, + broker: &RwLockReadGuard<'static, Broker>, + user: &Option, + remote: &BrokerPeerId, + overlay: OverlayId, + ) -> Result<(), NgError> { + assert!(overlay.is_inner()); + let (repo_id, branch_id) = self + .topics + .get(&(overlay, *event.topic_id())) + .ok_or(NgError::TopicNotFound)? + .to_owned(); + + self.open_branch_(&repo_id, &branch_id, true, broker, user, remote, false) + .await?; + + let _ = broker + .request::(user, &remote.into(), PublishEvent::new(event, overlay)) + .await?; + + Ok(()) + } + + pub async fn get_qrcode_for_profile(&self, public: bool, size: u32) -> Result { + + let profile_id = if public { + self.public_store_id() + } else { + self.protected_store_id() + }; + + let repo = self.repos.get(&profile_id).ok_or(NgError::RepoNotFound)?; + let inbox = repo.inbox.to_owned().ok_or(NgError::InboxNotFound)?.to_pub(); + let profile = repo.store.get_store_repo().clone(); + + let sparql = format!(" + PREFIX vcard: + SELECT ?name ?email WHERE + {{ <> vcard:fn ?name . + <> vcard:hasEmail ?email . + }}"); + //log_info!("{sparql}"); + let (name, email) = match self.sparql_query( + &NuriV0::new_repo_target_from_id(profile_id), + sparql, Some(NuriV0::repo_id(profile_id))).await? + { + QueryResults::Solutions(mut sols) => { + match sols.next() { + None => { + //log_info!("name or email not found"); + (None, None) + } + Some(Err(e)) => { + return Err(VerifierError::SparqlError(e.to_string())); + } + Some(Ok(sol)) => { + let name = if let Some(Term::Literal(l)) = sol.get("name") { + Some(l.value().to_string()) + } else { + None + }; + let email = if let Some(Term::Literal(l)) = sol.get("email") { + Some(l.value().to_string()) + } else { + None + }; + (name, email) + } + } + } + _ => return Err(VerifierError::SparqlError(NgError::InvalidResponse.to_string())), + }; + if name.is_none() { + return Err(VerifierError::InvalidProfile); + } + let profile_sharing = NgQRCode::ProfileSharingV0(NgQRCodeProfileSharingV0 { + inbox, + profile, + name: name.unwrap(), + email + }); + + let ser = serde_bare::to_vec(&profile_sharing)?; + let encoded = base64_url::encode(&ser); + log_info!("qrcode= {encoded}"); + match QrCode::with_error_correction_level(encoded.as_bytes(), qrcode::EcLevel::M) { + Ok(qr) => { + Ok(qr + .render() + .max_dimensions(size, size) + .dark_color(svg::Color("#000000")) + .light_color(svg::Color("#ffffff")) + .build() + ) + } + Err(e) => Err(VerifierError::QrCode(e.to_string())), + } + } + + pub async fn inbox(&mut self, msg: &InboxMsg, from_queue: bool) { + + //log_info!("RECEIVED INBOX MSG {:?}", msg); + + match self.inboxes.get(&msg.body.to_inbox) { + Some(repo_id) => { + match self.repos.get(repo_id) { + Some(repo) => { + if let Some(privkey) = &repo.inbox { + match msg.get_content(privkey) { + Ok(content) => { + //log_info!("received msg content {:?}", content); + let res = self.process_inbox(msg, content).await; + if let Err(e) = res { + log_err!("Error during process_inbox {e}"); + } + }, + Err(e) => { + log_err!("cannot unseal inbox msg {e}"); + } + } + } + }, + None => {} + } + }, + None => {} + } + + if from_queue && self.connected_broker.is_some(){ + log_info!("try to pop one more inbox msg"); + // try to pop inbox msg + let connected_broker = self.connected_broker.clone(); + let broker = BROKER.read().await; + let user = self.user_id().clone(); + let _ = broker + .send_client_event(&Some(user), &connected_broker.into(), ClientEvent::InboxPopRequest) + .await; + } + } + + pub async fn deliver(&mut self, event: Event, overlay: OverlayId) { + //let event_str = event.to_string(); + if let Err(e) = self.deliver_(event, overlay).await { + log_err!("DELIVERY ERROR {}", e); + } + } + + async fn deliver_(&mut self, event: Event, overlay: OverlayId) -> Result<(), NgError> { + let (repo_id, branch_id) = self + .topics + .get(&(overlay, *event.topic_id())) + .ok_or(NgError::TopicNotFound)? + .to_owned(); + + // let outer = self + // .inner_to_outer + // .get(&overlay) + // .ok_or(VerifierError::OverlayNotFound)?; + // let store = self + // .stores + // .get(outer) + // .ok_or(VerifierError::OverlayNotFound)?; + let repo = self + .repos + .get(&repo_id) + .ok_or(VerifierError::RepoNotFound)?; + repo.branch_is_opened(&branch_id) + .then_some(true) + .ok_or(VerifierError::BranchNotOpened)?; + let branch = repo.branch(&branch_id)?; + + let commit = event.open( + &repo.store, + &repo_id, + &branch_id, + &branch.read_cap.as_ref().unwrap().key, + )?; + + self.verify_commit(&commit, &branch_id, &repo_id, Arc::clone(&repo.store)) + .await?; + + Ok(()) + } + + pub(crate) async fn verify_commit( + &mut self, + commit: &Commit, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), VerifierError> { + self.verify_commit_(commit, branch_id, repo_id, store, false) + .await + } + + pub(crate) async fn verify_commit_( + &mut self, + commit: &Commit, + branch_id: &BranchId, + repo_id: &RepoId, + store: Arc, + skip_heads_update: bool, + ) -> Result<(), VerifierError> { + //let quorum_type = commit.quorum_type(); + // log_info!( + // "VERIFYING {} {} {:?}", + // store.get_store_repo(), + // commit, + // store + // ); + //log_info!("{}", commit); + // TODO: check that DAG is well formed. check the heads + + let res = match commit.body().ok_or(VerifierError::CommitBodyNotFound)? { + CommitBody::V0(v0) => match v0 { + CommitBodyV0::Repository(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::RootBranch(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::Branch(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::SyncSignature(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::AddBranch(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::StoreUpdate(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::AddInboxCap(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::AddSignerCap(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::AddFile(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::AddRepo(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::Snapshot(a) => a.verify(commit, self, branch_id, repo_id, store), + CommitBodyV0::AsyncSignature(a) => { + a.verify(commit, self, branch_id, repo_id, store) + } + CommitBodyV0::AsyncTransaction(a) => { + Box::pin(self.verify_async_transaction(a, commit, branch_id, repo_id, store)) + } + _ => { + log_err!("unimplemented verifier {}", commit); + return Err(VerifierError::NotImplemented); + } + }, + }; + let res = res.await; + if res.is_ok() && !skip_heads_update { + let commit_ref = commit.reference().unwrap(); + let past = commit.direct_causal_past(); + self.update_branch_current_heads(repo_id, branch_id, past, commit_ref)?; + Ok(()) + } else { + res + } + } + + pub(crate) fn get_main_branch_current_heads_nuri(&self, repo_id: &RepoId) -> Result { + if let Some(repo) = self.repos.get(repo_id) { + if let Some(info) = repo.main_branch() { + let mut res = NuriV0::repo_id(repo_id); + for head in info.current_heads.iter() { + res = [res,NuriV0::commit_ref(head)].join(":"); + } + return Ok(res); + } + } + Err(VerifierError::RepoNotFound) + } + + fn update_branch_current_heads( + &mut self, + repo_id: &RepoId, + branch: &BranchId, + direct_past: Vec, + commit_ref: ObjectRef, + ) -> Result<(), VerifierError> { + if let Some(repo) = self.repos.get_mut(repo_id) { + let new_heads = repo.update_branch_current_heads(branch, commit_ref, direct_past)?; + + //log_info!("NEW HEADS {} {:?}", branch, new_heads); + if let Some(user_storage) = self.user_storage_if_persistent() { + let _ = user_storage.update_branch_current_heads(repo_id, branch, new_heads); + } + } + Ok(()) + } + + fn user_storage_if_persistent(&self) -> Option>> { + if self.is_persistent() { + self.user_storage() + } else { + None + } + } + + pub(crate) fn user_storage(&self) -> Option>> { + if let Some(us) = self.user_storage.as_ref() { + Some(Arc::clone(us)) + } else { + None + } + } + + pub(crate) fn add_branch_and_save( + &mut self, + repo_id: &RepoId, + branch_info: BranchInfo, + store_repo: &StoreRepo, + ) -> Result<(), VerifierError> { + if let Some(user_storage) = self.user_storage_if_persistent() { + user_storage.add_branch(repo_id, &branch_info)?; + } + let branch_id = branch_info.id.clone(); + let topic_id = branch_info.topic.clone().unwrap(); + let repo = self.get_repo_mut(repo_id, store_repo)?; + let res = repo.branches.insert(branch_info.id.clone(), branch_info); + //assert!(res.is_none()); + + let overlay_id: OverlayId = repo.store.inner_overlay(); + let repo_id = repo_id.clone(); + let res = self + .topics + .insert((overlay_id, topic_id), (repo_id, branch_id)); + //assert_eq!(res, None); + + Ok(()) + } + + pub(crate) fn update_branch( + &self, + repo_id: &RepoId, + branch_id: &BranchId, + store_repo: &StoreRepo, + ) -> Result<(), VerifierError> { + if let Some(user_storage) = self.user_storage_if_persistent() { + let repo = self.get_repo(repo_id, store_repo)?; + user_storage.add_branch(repo_id, repo.branch(branch_id)?)?; + } + Ok(()) + } + + pub(crate) fn update_inbox_cap_v0( + &mut self, + inbox_cap: &AddInboxCapV0, + ) -> Result<(), VerifierError> { + let storage = match self.repos.get_mut(&inbox_cap.repo_id) { + Some(repo) => { + repo.inbox = Some(inbox_cap.priv_key.clone()); + log_info!("INBOX for {} : {}", inbox_cap.repo_id.to_string(), inbox_cap.priv_key.to_pub().to_string()); + self.inboxes.insert(inbox_cap.priv_key.to_pub(), repo.id); + self.user_storage_if_persistent() + } + None => self.user_storage(), + }; + if let Some(user_storage) = storage { + user_storage.update_inbox_cap(&inbox_cap.repo_id, &inbox_cap.overlay, &inbox_cap.priv_key)?; + } + + Ok(()) + } + + pub(crate) fn update_signer_cap( + &mut self, + signer_cap: &SignerCap, + ) -> Result<(), VerifierError> { + let storage = match self.repos.get_mut(&signer_cap.repo) { + Some(repo) => { + repo.signer = Some(signer_cap.clone()); + self.user_storage_if_persistent() + } + None => self.user_storage(), + }; + if let Some(user_storage) = storage { + user_storage.update_signer_cap(signer_cap)?; + } + + Ok(()) + } + + pub(crate) fn add_repo_and_save(&mut self, repo: Repo) -> &Repo { + let us = self.user_storage_if_persistent(); + let repo_ref: &Repo = self.add_repo_(repo); + // save in user_storage + if let Some(user_storage) = us { + let _ = user_storage.save_repo(repo_ref); + } + repo_ref + } + + pub(crate) fn get_repo( + &self, + id: &RepoId, + _store_repo: &StoreRepo, + ) -> Result<&Repo, VerifierError> { + //let store = self.get_store(store_repo); + let repo_ref: Result<&Repo, VerifierError> = + self.repos.get(id).ok_or(VerifierError::RepoNotFound); + repo_ref + } + + pub(crate) fn get_store_by_overlay_id( + &self, + id: &OverlayId, + ) -> Result, VerifierError> { + Ok(Arc::clone( + self.stores.get(id).ok_or(VerifierError::StoreNotFound)?, + )) + } + + async fn bootstrap(&mut self) -> Result<(), NgError> { + if let Err(e) = self.bootstrap_from_remote().await { + log_warn!("bootstrap_from_remote failed with {}", e); + // maybe it failed because the 3P stores are still in the outbox and haven't been sent yet. + // we immediately try to send the events present in the outbox + let res = self.send_outbox().await; + log_info!("SENDING 3P EVENTS FROM OUTBOX RETURNED: {:?}", res); + + return res; + } + Ok(()) + } + + async fn do_sync_req_if_needed( + &mut self, + broker: &RwLockReadGuard<'static, Broker>, + user: &Option, + remote: &Option, + branch_id: &BranchId, + repo_id: &RepoId, + remote_heads: &Vec, + remote_commits_nbr: u64, + ) -> Result<(), NgError> { + let (store, msg, branch_secret) = { + // log_info!( + // "do_sync_req_if_needed for branch {} {} {}", + // branch_id, + // remote_commits_nbr, + // Digest::print_all(remote_heads) + // ); + if remote_commits_nbr == 0 || remote_heads.is_empty() { + log_debug!("branch is new on the broker. doing nothing"); + return Ok(()); + } + + let repo = self.repos.get(repo_id).unwrap(); + let branch_info = repo.branch(branch_id)?; + + let store = Arc::clone(&repo.store); + + let ours = branch_info.current_heads.iter().map(|refe| refe.id); + let ours_set: HashSet = HashSet::from_iter(ours.clone()); + + let theirs = HashSet::from_iter(remote_heads.clone().into_iter()); + + if ours_set.difference(&theirs).count() == 0 + && theirs.difference(&ours_set).count() == 0 + { + // no need to sync + log_debug!( + "branch {} is up to date at heads {}", + branch_id, + Digest::print_iter(ours) + ); + return Ok(()); + } + + let mut theirs_found = HashSet::new(); + let mut visited = HashMap::new(); + + let mut recursor: Vec<(ObjectId, Option)> = + ours_set.iter().map(|h| (h.clone(), None)).collect(); + + // log_debug!( + // "SEARCHING FOR THEIR HEADS from OURS {}", + // Digest::print_iter(ours) + // ); + + let _ = Branch::load_causal_past( + &mut recursor, + &repo.store, + &theirs, + &mut visited, + &mut None, + &mut Some(&mut theirs_found), + &None, + ); + + // log_debug!( + // "FOUND THEIR HEADS {}", + // Digest::print_iter_ref(theirs_found.iter()) + // ); + + // for our in ours_set.iter() { + // //log_info!("OUR HEADS {}", our); + // if let Ok(cobj) = Object::load(*our, None, &repo.store) { + // let _ = + // } + // } + + let theirs_not_found: Vec = + theirs.difference(&theirs_found).cloned().collect(); + + let known_commits = if theirs_not_found.is_empty() { + //log_debug!("local heads are newer than remote"); + return Ok(()); + } else { + if visited.is_empty() { + None + } else { + // prepare bloom filter + let expected_elements = + remote_commits_nbr + max(visited.len() as u64, branch_info.commits_nbr); + let mut filter = Filter::new(27, expected_elements as usize); + for commit_id in visited.keys() { + filter.insert_hash(commit_id.get_hash()); + } + Some(BloomFilter::from_filter(&filter)) + } + }; + + let msg = TopicSyncReq::V0(TopicSyncReqV0 { + topic: branch_info.topic.unwrap(), + known_heads: ours_set.union(&theirs_found).into_iter().cloned().collect(), + target_heads: theirs_not_found, + known_commits, + overlay: Some(store.overlay_for_read_on_client_protocol()), + }); + ( + store, + msg, + branch_info.read_cap.as_ref().unwrap().key.clone(), + ) + }; + + match broker + .request::(user, remote, msg) + .await + { + Err(e) => return Err(e), + Ok(SoS::Stream(mut events)) => { + while let Some(event) = events.next().await { + let commit = event + .event() + .open(&store, repo_id, branch_id, &branch_secret)?; + + // TODO: deal with missing commits in the DAG (fetch them individually with CommitGet). This can happen because of false positive on BloomFilter + + self.verify_commit(&commit, branch_id, repo_id, Arc::clone(&store)) + .await?; + } + } + Ok(_) => return Err(NgError::InvalidResponse), + } + + Ok(()) + } + + async fn do_sync_req( + &mut self, + broker: &RwLockReadGuard<'static, Broker>, + user: &Option, + remote: &Option, + topic: &TopicId, + branch_id: &BranchId, + branch_secret: &ReadCapSecret, + repo_id: &RepoId, + store: Arc, + ) -> Result<(), NgError> { + let msg = TopicSyncReq::new_empty(*topic, &store.overlay_for_read_on_client_protocol()); + match broker + .request::(user, remote, msg) + .await + { + Err(e) => return Err(e), + Ok(SoS::Stream(mut events)) => { + while let Some(event) = events.next().await { + let commit = event + .event() + .open(&store, repo_id, branch_id, branch_secret)?; + + self.verify_commit(&commit, branch_id, repo_id, Arc::clone(&store)) + .await?; + } + } + Ok(_) => return Err(NgError::InvalidResponse), + } + Ok(()) + } + + pub(crate) async fn send_add_repo_to_store( + &mut self, + repo_id: &RepoId, + store_repo: &StoreRepo, + ) -> Result<(), VerifierError> { + let repo = self.get_repo(repo_id, store_repo)?; + + let transaction_commit_body = CommitBodyV0::AddRepo(AddRepo::V0(AddRepoV0 { + read_cap: repo.read_cap.clone().ok_or(VerifierError::RepoNotFound)?, + metadata: vec![], + })); + + let store_id = store_repo.repo_id(); + let store_branch_id = { + let store = self.get_repo(store_id, store_repo)?; + let store_branch = store.store_branch().ok_or(VerifierError::StoreNotFound)?; + store_branch.id.clone() + }; + + let _commit = self + .new_transaction_commit( + transaction_commit_body, + store_id, + &store_branch_id, + &store_repo, + vec![], + vec![], + ) + .await?; + + Ok(()) + } + + async fn load_store_from_read_cap<'a>( + &mut self, + broker: &RwLockReadGuard<'static, Broker>, + user: &Option, + remote: &Option, + store: Arc, + ) -> Result<(), NgError> { + let (repo_id, store_branch) = self + .load_repo_from_read_cap( + store.get_store_readcap(), + broker, + user, + remote, + Arc::clone(&store), + true, + ) + .await?; + + let repo = self.get_repo_mut(&repo_id, store.get_store_repo())?; + // for (b, _) in repo.branches.iter() { + // let _ = repo.opened_branches.insert(b.clone(), true); + // } + // adding the Store branch to the opened_branches + // TODO: only do it if the Store is 3P. + if let Some(store_branch_id) = store_branch { + let _ = repo.opened_branches.insert(store_branch_id, true); + } + + Ok(()) + } + + /// return the repo_id and an option branch_id of the Store branch, if any + pub(crate) async fn load_repo_from_read_cap<'a>( + &mut self, + read_cap: &ReadCap, + broker: &RwLockReadGuard<'static, Broker>, + user: &Option, + remote: &Option, + store: Arc, + load_branches: bool, + ) -> Result<(RepoId, Option), NgError> { + // first we fetch the read_cap commit of private store repo. + let root_branch_commit = Self::get_commit( + read_cap.clone(), + None, + &store.overlay_for_read_on_client_protocol(), + &broker, + user, + remote, + ) + .await?; + + match root_branch_commit + .body() + .ok_or(VerifierError::CommitBodyNotFound)? + { + CommitBody::V0(v0) => match v0 { + CommitBodyV0::RootBranch(root_branch) => { + // doing a SyncReq on the topic of root branch + + let topic = root_branch.topic(); + + let repo_id = root_branch.repo_id(); + self.do_sync_req( + &broker, + user, + remote, + topic, + repo_id, + &read_cap.key, + repo_id, + Arc::clone(&store), + ) + .await?; + + let mut store_branch = None; + + if load_branches { + let other_branches: Vec<(PubKey, PubKey, SymKey)> = self + .get_repo(repo_id, store.get_store_repo())? + .branches + .iter() + .map(|(branch_id, branch)| { + if branch.branch_type == BranchType::Store { + store_branch = Some(branch_id.clone()); + } + ( + branch_id.clone(), + branch.topic.clone().unwrap(), + branch.read_cap.as_ref().unwrap().key.clone(), + ) + }) + .collect(); + + // loading the other Branches of store + for (branch_id, topic, secret) in other_branches { + if branch_id == *repo_id { + // root branch of store is already synced + continue; + } + self.do_sync_req( + &broker, + user, + remote, + &topic, + &branch_id, + &secret, + repo_id, + Arc::clone(&store), + ) + .await?; + } + } + + log_debug!("loaded from read_cap {}", repo_id); + + return Ok((repo_id.clone(), store_branch)); + } + _ => return Err(VerifierError::RootBranchNotFound.into()), + }, + } + } + + async fn get_commit( + commit_ref: ObjectRef, + topic_id: Option, + overlay: &OverlayId, + broker: &RwLockReadGuard<'static, Broker>, + user: &Option, + remote: &Option, + ) -> Result { + let msg = CommitGet::V0(CommitGetV0 { + id: commit_ref.id, + topic: topic_id, // we dont have the topic (only available from RepoLink/BranchLink) but we are pretty sure the Broker has the commit anyway. + overlay: Some(*overlay), + }); + match broker.request::(user, remote, msg).await { + Err(NgError::ServerError(ServerError::NotFound)) => { + // TODO: fallback to BlocksGet, then Commit::load(with_body:true), which will return an Err(CommitLoadError::MissingBlocks), then do another BlocksGet with those, and then again Commit::load... + return Err(NgError::SiteNotFoundOnBroker); + } + Ok(SoS::Stream(blockstream)) => { + // we could use the in_memory block_storage of the verifier, but then we would have to remove the blocks from there. + // instead we just create a new temporary in memory block storage + let temp_mem_block_storage = + HashMapBlockStorage::from_block_stream(overlay, blockstream).await; + // creating a temporary store to access the blocks + let temp_store = Store::new_from_overlay_id( + overlay, + Arc::new(std::sync::RwLock::new(temp_mem_block_storage)), + ); + Ok(Commit::load(commit_ref, &temp_store, true)?) + } + Ok(_) => return Err(NgError::InvalidResponse), + Err(e) => return Err(e), + } + } + + pub(crate) async fn fetch_blocks_if_needed( + &self, + id: &BlockId, + repo_id: &RepoId, + store_repo: &StoreRepo, + ) -> Result>, NgError> { + let repo = self.get_repo(repo_id, store_repo)?; + + let overlay = repo.store.overlay_for_read_on_client_protocol(); + + let broker = BROKER.read().await; + let user = Some(self.user_id().clone()); + let remote = &self.connected_broker; + + match repo.store.has(id) { + Err(StorageError::NotFound) => { + if remote.is_none() { + return Err(NgError::NotFound); + } + let msg = BlocksGet::V0(BlocksGetV0 { + ids: vec![*id], + topic: None, + include_children: true, + overlay: Some(overlay), + }); + match broker + .request::(&user, &remote.into(), msg) + .await + { + Ok(SoS::Stream(blockstream)) => Ok(Some(blockstream)), + Ok(_) => return Err(NgError::InvalidResponse), + Err(e) => return Err(e), + } + } + Err(e) => Err(e.into()), + Ok(()) => Ok(None), + } + } + + async fn bootstrap_from_remote(&mut self) -> Result<(), NgError> { + if self.need_bootstrap() { + let broker = BROKER.read().await; + let user = Some(self.user_id().clone()); + self.connected_broker.is_direct_or_err()?; + + let private_store_id = self.config.private_store_id.to_owned().unwrap(); + let private_store = self.create_private_store_from_credentials()?; + let remote = (&self.connected_broker).into(); + + self.load_store_from_read_cap(&broker, &user, &remote, private_store) + .await?; + + let other_stores: Vec> = self + .stores + .iter() + .map(|(_, store)| Arc::clone(store)) + .collect(); + + // load the other stores (protected and public) + for store in other_stores { + if *store.id() == private_store_id { + continue; + // we already loaded the private store + } + self.load_store_from_read_cap(&broker, &user, &remote, store) + .await?; + } + } + Ok(()) + } + + fn create_private_store_from_credentials(&mut self) -> Result, VerifierError> { + let private_store_id = self.config.private_store_id.to_owned().unwrap(); + let store_repo = StoreRepo::new_private(private_store_id); + + let store = Arc::new(Store::new( + store_repo, + self.config.private_store_read_cap.to_owned().unwrap(), + self.config.private_store_read_cap.to_owned().unwrap(), + self.get_arc_block_storage()?, + )); + + let store = self + .stores + .entry(store_repo.overlay_id_for_storage_purpose()) + .or_insert_with(|| store); + Ok(Arc::clone(store)) + } + + async fn load_from_credentials_and_outbox( + &mut self, + events: &Vec, + ) -> Result<(), VerifierError> { + let private_store_id = self.config.private_store_id.as_ref().unwrap(); + let private_inner_overlay_id = OverlayId::inner( + private_store_id, + &self.config.private_store_read_cap.as_ref().unwrap().key, + ); + + let private_store = self.create_private_store_from_credentials()?; + + // 2nd pass: load all the other branches of the private store repo. + + // 1st pass: load all events about private store + let mut postponed_signer_caps = Vec::with_capacity(3); + let mut private_user_branch = None; + + for e in events { + if e.overlay == private_inner_overlay_id { + // it is an event about the private store + //log_info!("VERIFYING EVENT {} {}", e.overlay, e.event); + let (branch_id, branch_secret) = + match self.get_repo(private_store.id(), private_store.get_store_repo()) { + Err(_) => (private_store.id(), private_store.get_store_readcap_secret()), + Ok(repo) => { + let (_, branch_id) = self + .topics + .get(&(e.overlay, *e.event.topic_id())) + .ok_or(VerifierError::TopicNotFound)?; + let branch = repo.branch(branch_id)?; + (branch_id, &branch.read_cap.as_ref().unwrap().key) + } + }; + + let commit = + e.event + .open(&private_store, private_store.id(), branch_id, branch_secret)?; + + if commit + .body() + .ok_or(VerifierError::CommitBodyNotFound)? + .is_add_signer_cap() + { + private_user_branch = Some(branch_id.clone()); + postponed_signer_caps.push(commit); + } else { + self.verify_commit( + &commit, + &branch_id.clone(), + private_store.id(), + Arc::clone(&private_store), + ) + .await?; + } + } + } + + //log_info!("{:?}\n{:?}\n{:?}", self.repos, self.stores, self.topics); + //log_info!("SECOND PASS"); + // 2nd pass : load the other events (that are not from private store) + for (_, store) in self.stores.clone().iter() { + let store_inner_overlay_id = store.inner_overlay(); + + // log_info!( + // "TRYING OVERLAY {} {}", + // store_inner_overlay_id, + // private_inner_overlay_id + // ); + if store_inner_overlay_id == private_inner_overlay_id { + //log_info!("SKIPPED PRIVATE"); + continue; + // we skip the private store, as we already loaded it + } + + for e in events { + if e.overlay == store_inner_overlay_id { + // it is an event about the store we are loading + //log_info!("VERIFYING EVENT {} {}", e.overlay, e.event); + let (branch_id, branch_secret) = + match self.get_repo(store.id(), store.get_store_repo()) { + Err(_) => (store.id(), store.get_store_readcap_secret()), + Ok(repo) => { + let (_, branch_id) = self + .topics + .get(&(e.overlay, *e.event.topic_id())) + .ok_or(VerifierError::TopicNotFound)?; + let branch = repo.branch(branch_id)?; + (branch_id, &branch.read_cap.as_ref().unwrap().key) + } + }; + + let commit = e.event.open(store, store.id(), branch_id, branch_secret)?; + + self.verify_commit(&commit, &branch_id.clone(), store.id(), Arc::clone(store)) + .await?; + } else { + // log_info!( + // "SKIPPED wrong overlay {} {}", + // e.overlay, + // store_inner_overlay_id + // ); + } + } + } + + // finally, ingest the signer_caps. + for signer_cap in postponed_signer_caps { + self.verify_commit( + &signer_cap, + private_user_branch.as_ref().unwrap(), + private_store.id(), + Arc::clone(&private_store), + ) + .await?; + } + + Ok(()) + } + + // fn display(heads: &Vec) -> String { + // let mut ret = String::new(); + // if heads.len() == 0 { + // ret = "0".to_string(); + // } + // for head in heads { + // ret.push_str(&format!("{} ", head.id)); + // } + // ret + // } + + pub async fn send_outbox(&mut self) -> Result<(), NgError> { + let ret = self.take_events_from_outbox(); + if ret.is_err() { + log_debug!( + "take_events_from_outbox returned {:}", + ret.as_ref().unwrap_err() + ); + } + let events: Vec = ret.unwrap_or(vec![]); + if events.is_empty() { + return Ok(()); + } + let broker = BROKER.read().await; + let user = Some(self.user_id().clone()); + self.connected_broker.connected_or_err()?; + let remote = self.connected_broker.clone(); + + // for all the events, check that they are valid (topic exists, current_heads match with event) + let mut need_replay = false; + let mut events_to_replay = Vec::with_capacity(events.len()); + //let mut branch_heads: HashMap> = HashMap::new(); + for e in events { + match self.topics.get(&(e.overlay, *e.event.topic_id())) { + Some((repo_id, branch_id)) => match self.repos.get(repo_id) { + Some(repo) => match repo.branches.get(branch_id) { + Some(_branch) => { + // let commit = e.event.open_with_info(repo, branch)?; + // let acks = commit.acks(); + // match branch_heads.get(branch_id) { + // Some(previous_heads) => { + // if *previous_heads != acks { + // // skip event, as it is outdated. + // continue; + // } else { + // branch_heads + // .insert(*branch_id, vec![commit.reference().unwrap()]); + // } + // } + // None => { + // if acks != branch.current_heads { + // // skip event, as it is outdated. + // continue; + // } else { + // branch_heads + // .insert(*branch_id, vec![commit.reference().unwrap()]); + // } + // } + // } + } + None => { + log_info!("REPLAY BRANCH NOT FOUND {}", branch_id); + need_replay = true; + } + }, + None => { + log_info!("REPLAY REPO NOT FOUND {}", repo_id); + need_replay = true; + } + }, + None => { + log_info!( + "REPLAY TOPIC NOT FOUND {} IN OVERLAY {}", + e.event.topic_id(), + e.overlay + ); + need_replay = true; + } + } + events_to_replay.push(e); + } + log_info!("NEED REPLAY {need_replay}"); + if need_replay { + self.load_from_credentials_and_outbox(&events_to_replay) + .await?; + log_info!("REPLAY DONE"); + } + log_info!("SENDING {} EVENTS FROM OUTBOX", events_to_replay.len()); + for e in events_to_replay { + let files = e.event.file_ids(); + if !files.is_empty() || !need_replay { + let (repo_id, branch_id) = self + .topics + .get(&(e.overlay, *e.event.topic_id())) + .ok_or(NgError::TopicNotFound)? + .to_owned(); + + let repo = self + .repos + .get(&repo_id) + .ok_or(VerifierError::RepoNotFound)?; + + let branch = repo.branch(&branch_id)?; + + let commit = e.event.open_with_body( + &repo.store, + &repo_id, + &branch_id, + &branch.read_cap.as_ref().unwrap().key, + !need_replay, + )?; + + let store_repo = repo.store.get_store_repo().clone(); + let store = Arc::clone(&repo.store); + for block in e.file_blocks { + _ = store.put(&block); + } + + self.open_branch_(&repo_id, &branch_id, true, &broker, &user, &remote, false) + .await?; + + for file in commit.files() { + //log_debug!("PUT FILE {:?}", file.id); + self.put_all_blocks_of_file(&file, &repo_id, &store_repo) + .await?; + } + + if !need_replay { + self.verify_commit_(&commit, &branch_id, &repo_id, store, false) + .await?; + } + } + self.send_event(e.event, &broker, &user, &remote, e.overlay) + .await?; + } + Ok(()) + } + + fn take_some_peer_last_seq_numbers(&mut self, qty: u16) -> Result<(), NgError> { + match &self.config.config_type { + VerifierConfigType::JsSaveSession(js) => { + let res = (js.last_seq_function)(self.peer_id, qty)?; + self.max_reserved_seq_num = res + qty as u64; + } + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + VerifierConfigType::RocksDb(path) => { + let mut path = path.clone(); + std::fs::create_dir_all(path.clone()).unwrap(); + path.push(format!("lastseq{}", self.peer_id.to_hash_string())); + //log_debug!("last_seq path {}", path.display()); + + let file = read(path.clone()); + let (mut file_save, val) = match file { + Ok(ser) => { + let old_val = if ser.len() > 0 { + match SessionPeerLastSeq::deser(&ser)? { + SessionPeerLastSeq::V0(v) => v, + _ => unimplemented!(), + } + } else { + 0 + }; + ( + OpenOptions::new() + .write(true) + .open(path) + .map_err(|_| NgError::IoError)?, + old_val, + ) + } + Err(_) => (File::create(path).map_err(|_| NgError::IoError)?, 0), + }; + if qty > 0 { + let new_val = val + qty as u64; + let spls = SessionPeerLastSeq::V0(new_val); + let ser = spls.ser()?; + file_save.write_all(&ser).map_err(|_| NgError::IoError)?; + + file_save.sync_data().map_err(|_| NgError::IoError)?; + } + self.max_reserved_seq_num = val + qty as u64; + } + _ => { + self.max_reserved_seq_num += qty as u64; + } + } + self.last_reservation = SystemTime::now(); + log_debug!( + "reserving more {qty} seq_nums. now at {}", + self.max_reserved_seq_num + ); + Ok(()) + } + + pub fn new( + config: VerifierConfig, + block_storage: Arc>, + ) -> Result { + let (graph, user, block) = match &config.config_type { + VerifierConfigType::Memory | VerifierConfigType::JsSaveSession(_) => ( + Some(ng_oxigraph::oxigraph::store::Store::new().unwrap()), + Some(Box::new(InMemoryUserStorage::new()) as Box), + Some(block_storage), + ), + #[cfg(all(not(target_family = "wasm"), not(docsrs)))] + VerifierConfigType::RocksDb(path) => { + let mut path_oxi = path.clone(); + path_oxi.push("graph"); + create_dir_all(path_oxi.clone()).unwrap(); + let mut path_user = path.clone(); + path_user.push("user"); + create_dir_all(path_user.clone()).unwrap(); + let oxi_key = derive_key("NextGraph OxiGraph BLAKE3 key", &config.user_master_key); + let user_storage_key = + derive_key("NextGraph UserStorage BLAKE3 key", &config.user_master_key); + ( + // FIXME: we are using 2 derived keys here. + // this is temporary, until we remove the code in oxi_rocksdb of oxigraph, + // and have oxigraph use directly the UserStorage + Some( + ng_oxigraph::oxigraph::store::Store::open_with_key(path_oxi, oxi_key) + .map_err(|e| NgError::OxiGraphError(e.to_string()))?, + ), + Some( + Box::new(RocksDbUserStorage::open(&path_user, user_storage_key)?) + as Box, + ), + Some(block_storage), + ) + } + VerifierConfigType::Remote(_) => (None, None, None), + _ => unimplemented!(), // can be WebRocksDb or RocksDb on wasm platforms, or Headless + }; + let peer_id = config.peer_priv_key.to_pub(); + let mut verif = Verifier { + user_id: config.user_priv_key.to_pub(), + outer: NuriV0::locator(&config.locator), + config, + connected_broker: BrokerPeerId::None, + graph_dataset: graph, + user_storage: user.map(|u| Arc::new(u)), + block_storage: block, + peer_id, + last_reservation: SystemTime::UNIX_EPOCH, // this is to avoid reserving 100 seq_nums at every start of a new session + max_reserved_seq_num: 0, + last_seq_num: 0, + stores: HashMap::new(), + repos: HashMap::new(), + inboxes: HashMap::new(), + topics: HashMap::new(), + in_memory_outbox: vec![], + inner_to_outer: HashMap::new(), + uploads: BTreeMap::new(), + branch_subscriptions: HashMap::new(), + temporary_repo_certificates: HashMap::new(), + }; + // this is important as it will load the last seq from storage + if verif.config.config_type.should_load_last_seq_num() { + verif.take_some_peer_last_seq_numbers(0)?; + verif.last_seq_num = verif.max_reserved_seq_num; + verif.last_reservation = SystemTime::UNIX_EPOCH; + } + Ok(verif) + } + + pub async fn app_request_stream( + &mut self, + req: AppRequest, + ) -> Result<(Receiver, CancelFn), NgError> { + match req { + AppRequest::V0(v0) => { + self.process_stream(&v0.command, &v0.nuri, &v0.payload) + .await + } + } + } + + pub async fn app_request(&mut self, req: AppRequest) -> Result { + match req { + AppRequest::V0(v0) => self.process(&v0.command, v0.nuri, v0.payload).await, + } + } + + pub async fn respond( + &mut self, + _msg: ProtocolMessage, + _fsm: Arc>, + ) -> Result<(), ProtocolError> { + unimplemented!(); + } + + fn add_repo_without_saving(&mut self, repo: Repo) { + self.add_repo_(repo); + } + + pub(crate) fn populate_topics(&mut self, repo: &Repo) { + for (branch_id, info) in repo.branches.iter() { + let overlay_id: OverlayId = repo.store.inner_overlay(); + let topic_id = info.topic.clone().unwrap(); + let repo_id = repo.id.clone(); + let branch_id = branch_id.clone(); + let _res = self + .topics + .insert((overlay_id, topic_id), (repo_id, branch_id)); + } + } + + fn add_repo_(&mut self, repo: Repo) -> &Repo { + //self.populate_topics(&repo); + let _ = self.add_doc(&repo.id, &repo.store.overlay_id); + if repo.inbox.is_some() { + log_info!("INBOX for {} : {}", repo.id.to_string(), repo.inbox.as_ref().unwrap().to_pub().to_string()); + _ = self.inboxes.insert(repo.inbox.as_ref().unwrap().to_pub(), repo.id); + } + let repo_ref = self.repos.entry(repo.id).or_insert(repo); + repo_ref + } + + fn branch_was_opened( + topics: &HashMap<(OverlayId, PubKey), (PubKey, PubKey)>, + repo: &mut Repo, + sub: &TopicSubRes, + ) -> Result<(), NgError> { + let overlay = repo.store.inner_overlay(); + // log_info!( + // "branch_was_opened topic {} overlay {}", + // sub.topic_id(), + // overlay + // ); + let (_, branch_id) = topics + .get(&(overlay, *sub.topic_id())) + .ok_or(NgError::TopicNotFound)?; + // log_info!( + // "branch_was_opened insert branch_id {} is_publisher {}", + // branch_id, + // sub.is_publisher() + // ); + repo.opened_branches.insert(*branch_id, sub.is_publisher()); + Ok(()) + } + + fn repo_was_opened( + &mut self, + repo_id: &RepoId, + opened_repo: &RepoOpened, + ) -> Result<(), NgError> { + let repo = self.repos.get_mut(repo_id).ok_or(NgError::RepoNotFound)?; + //TODO: improve the inner_to_outer insert. (should be done when store is created, not here. should work also for dialogs.) + self.inner_to_outer.insert( + repo.store.overlay_for_read_on_client_protocol(), + repo.store.overlay_id, + ); + for sub in opened_repo { + Self::branch_was_opened(&self.topics, repo, sub)?; + } + Ok(()) + } + + pub(crate) fn new_store_from_update( + &mut self, + update: &StoreUpdate, + ) -> Result<(), VerifierError> { + let store = Store::new_from(update, self.get_arc_block_storage()?); + let overlay_id = store.get_store_repo().overlay_id_for_storage_purpose(); + let _store = self + .stores + .entry(overlay_id) + .or_insert_with(|| Arc::new(store)); + Ok(()) + } + + pub(crate) async fn new_store_default<'a>( + &'a mut self, + creator: &UserId, + creator_priv_key: &PrivKey, + priv_key: PrivKey, + store_repo: &StoreRepo, + private: bool, + ) -> Result<&'a Repo, NgError> { + let repo_write_cap_secret = match private { + false => SymKey::random(), + true => SymKey::nil(), + }; + let overlay_id = store_repo.overlay_id_for_storage_purpose(); + let block_storage = self.get_arc_block_storage()?; + let store: &mut Arc = self.stores.entry(overlay_id).or_insert_with(|| { + let store_readcap = ReadCap::nil(); + // temporarily set the store_overlay_branch_readcap to an objectRef that has an empty id, and a key = to the repo_write_cap_secret + let store_overlay_branch_readcap = + ReadCap::from_id_key(ObjectId::nil(), repo_write_cap_secret.clone()); + let store = Store::new( + *store_repo, + store_readcap, + store_overlay_branch_readcap, + block_storage, + ); + Arc::new(store) + }); + let (mut repo, proto_events) = Arc::clone(store).create_repo_with_keys( + creator, + creator_priv_key, + priv_key, + store_repo.repo_id().clone(), + repo_write_cap_secret, + None, + private, + )?; + if !private { + repo.inbox = Some(PrivKey::random_ed()); + } + let repo = self.complete_site_store(store_repo, repo)?; + self.populate_topics(&repo); + self.new_events_with_repo(proto_events, &repo).await?; + let repo_ref = self.add_repo_and_save(repo); + Ok(repo_ref) + } + + /// returns the Repo and the last seq_num of the peer + pub(crate) async fn new_repo_default<'a>( + &'a mut self, + creator: &UserId, + creator_priv_key: &PrivKey, + store_repo: &StoreRepo, + branch_crdt: BranchCrdt, + ) -> Result { + let (repo_id, proto_events, add_signer_cap_commit, private_store_repo) = { + let store = self.get_store_or_load(store_repo); + let repo_write_cap_secret = SymKey::random(); + let (repo, proto_events) = store.create_repo_default( + creator, + creator_priv_key, + repo_write_cap_secret, + branch_crdt, + )?; + self.populate_topics(&repo); + + // send AddSignerCap to User branch of private store + let add_signer_cap_commit_body = CommitBody::V0(CommitBodyV0::AddSignerCap( + AddSignerCap::V0(AddSignerCapV0 { + cap: repo.signer.to_owned().unwrap(), + metadata: vec![], + }), + )); + let (add_signer_cap_commit, private_store_repo) = { + // find user_branch of private repo + let private_repo = self + .repos + .get(self.private_store_id()) + .ok_or(NgError::StoreNotFound)?; + let user_branch = private_repo.user_branch().ok_or(NgError::BranchNotFound)?; + ( + Commit::new_with_body_acks_deps_and_save( + creator_priv_key, + creator, + user_branch.id, + QuorumType::NoSigning, + vec![], + user_branch.current_heads.clone(), + add_signer_cap_commit_body, + &private_repo.store, + )?, + private_repo.store.get_store_repo().clone(), + ) + }; + + let repo_ref = self.add_repo_and_save(repo); + ( + repo_ref.id, + proto_events, + add_signer_cap_commit, + private_store_repo, + ) + }; + self.new_events( + vec![(add_signer_cap_commit, vec![])], + private_store_repo.repo_id().clone(), + &private_store_repo, + ) + .await?; + + self.new_events(proto_events, repo_id, store_repo).await?; + //let repo_ref = self.add_repo_and_save(repo); + Ok(repo_id) + } +} +#[cfg(test)] +mod test { + + use crate::verifier::*; + use ng_oxigraph::oxrdf::BlankNode; + use ng_repo::store::Store; + use std::str::FromStr; + + #[test] + pub fn test_blank_node() { + let bn = BlankNode::from_str("_:____").expect("parse"); + log_debug!("{:?}", bn); + } + + #[async_std::test] + pub async fn test_new_repo_default() { + let (creator_priv_key, creator_pub_key) = generate_keypair(); + + let (_publisher_privkey, publisher_pubkey) = generate_keypair(); + let _publisher_peer = PeerId::Forwarded(publisher_pubkey); + + let store = Store::dummy_public_v0(); + let store_repo = store.get_store_repo().clone(); + let mut verifier = Verifier::new_dummy(); + verifier.add_store(store); + + let repo = verifier + .new_repo_default( + &creator_pub_key, + &creator_priv_key, + &store_repo, + BranchCrdt::Graph("test".to_string()), + ) + .await + .expect("new_default"); + + assert_eq!(verifier.last_seq_num, 5); + } +} diff --git a/ng-wallet/.gitignore b/ng-wallet/.gitignore new file mode 100644 index 0000000..8574aec --- /dev/null +++ b/ng-wallet/.gitignore @@ -0,0 +1,3 @@ +.DS_Store +tests/generated_security_image.jpg +tests/wallet.ngw \ No newline at end of file diff --git a/ng-wallet/Cargo.toml b/ng-wallet/Cargo.toml new file mode 100644 index 0000000..1e37d58 --- /dev/null +++ b/ng-wallet/Cargo.toml @@ -0,0 +1,45 @@ +[package] +name = "ng-wallet" +version = "0.1.2" +description = "Wallet library of NextGraph. keeps the secret keys of all identities of the user in a safe wallet. NextGraph is a decentralized, secure and local-first web 3.0 ecosystem based on Semantic Web and CRDTs" +categories = ["authentication"] +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true +homepage.workspace = true +keywords = ["crdt","e2ee","local-first","p2p","privacy-protection"] +documentation.workspace = true +rust-version.workspace = true + +[dependencies] +serde = { version = "1.0.142", features = ["derive"] } +serde_bare = "0.5.0" +serde_bytes = "0.11.7" +serde-big-array = "0.5.1" +lazy_static = "1.4.0" +safe-transmute = "0.11.2" +async-std = { version = "1.12.0", features = ["attributes","unstable"] } +getrandom = { version = "0.3.3" } +rand = { version = "0.7", features = ["getrandom"] } +aes-gcm-siv = {version = "0.11.1", features = ["aes","heapless","getrandom","std"] } +zeroize = { version = "1.7.0", features = ["zeroize_derive"] } +crypto_box = { version = "0.8.2", features = ["seal"] } +base64-url = "2.0.0" +blake3 = "1.3.1" +argon2 = "0.5.0" +chacha20poly1305 = "0.10.1" +#{version = "0.10.1", features = ["heapless","getrandom"] } +image = "0.24.6" +web-time = "0.2.0" +ng-repo = { path = "../ng-repo", version = "0.1.2" } +ng-net = { path = "../ng-net", version = "0.1.2" } +ng-verifier = { path = "../ng-verifier", version = "0.1.2" } + +[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom] +version = "0.3.3" +features = ["wasm_js"] + +[dev-dependencies] +ng-repo = { path = "../ng-repo", version = "0.1.2", features = ["testing"] } +ng-verifier = { path = "../ng-verifier", version = "0.1.2", features = ["testing"] } \ No newline at end of file diff --git a/ng-wallet/README.md b/ng-wallet/README.md new file mode 100644 index 0000000..99689c2 --- /dev/null +++ b/ng-wallet/README.md @@ -0,0 +1,56 @@ +# ng-wallet + +![MSRV][rustc-image] +[![Apache 2.0 Licensed][license-image]][license-link] +[![MIT Licensed][license-image2]][license-link2] + +Wallet library of NextGraph. keeps the secret keys of all identities of the user in a safe wallet + +This repository is in active development at [https://git.nextgraph.org/NextGraph/nextgraph-rs](https://git.nextgraph.org/NextGraph/nextgraph-rs), a Gitea instance. For bug reports, issues, merge requests, and in order to join the dev team, please visit the link above and create an account (you can do so with a github account). The [github repo](https://github.com/nextgraph-org/nextgraph-rs) is just a read-only mirror that does not accept issues. + +## NextGraph + +> NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## Support + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +## How to use the library + +NextGraph is not ready yet. You can subscribe to [our newsletter](https://list.nextgraph.org/subscription/form) to get updates, and support us with a [donation](https://nextgraph.org/donate/). + +This library is used internally by [ngd](../ngd/README.md), [ngcli](../ngcli/README.md), [ng-app](../ng-app/README.md) and by [nextgraph, the Rust client library](../nextgraph/README.md) which you should be using instead. It is not meant to be used by other programs as-is. + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +### Contributions license + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as below, without any +additional terms or conditions. + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. + + +[rustc-image]: https://img.shields.io/badge/rustc-1.81+-blue.svg +[license-image]: https://img.shields.io/badge/license-Apache2.0-blue.svg +[license-link]: https://git.nextgraph.org/NextGraph/nextgraph-rs/raw/branch/master/LICENSE-APACHE2 +[license-image2]: https://img.shields.io/badge/license-MIT-blue.svg +[license-link2]: https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/LICENSE-MIT diff --git a/ng-wallet/src/bip39.rs b/ng-wallet/src/bip39.rs new file mode 100644 index 0000000..e254073 --- /dev/null +++ b/ng-wallet/src/bip39.rs @@ -0,0 +1,241 @@ +use ng_repo::errors::NgError; +use std::collections::HashMap; + +#[allow(non_upper_case_globals)] +pub const bip39_wordlist: [&str; 2048] = [ + "abandon", "ability", "able", "about", "above", "absent", "absorb", "abstract", "absurd", + "abuse", "access", "accident", "account", "accuse", "achieve", "acid", "acoustic", "acquire", + "across", "act", "action", "actor", "actress", "actual", "adapt", "add", "addict", "address", + "adjust", "admit", "adult", "advance", "advice", "aerobic", "affair", "afford", "afraid", + "again", "age", "agent", "agree", "ahead", "aim", "air", "airport", "aisle", "alarm", "album", + "alcohol", "alert", "alien", "all", "alley", "allow", "almost", "alone", "alpha", "already", + "also", "alter", "always", "amateur", "amazing", "among", "amount", "amused", "analyst", + "anchor", "ancient", "anger", "angle", "angry", "animal", "ankle", "announce", "annual", + "another", "answer", "antenna", "antique", "anxiety", "any", "apart", "apology", "appear", + "apple", "approve", "april", "arch", "arctic", "area", "arena", "argue", "arm", "armed", + "armor", "army", "around", "arrange", "arrest", "arrive", "arrow", "art", "artefact", "artist", + "artwork", "ask", "aspect", "assault", "asset", "assist", "assume", "asthma", "athlete", + "atom", "attack", "attend", "attitude", "attract", "auction", "audit", "august", "aunt", + "author", "auto", "autumn", "average", "avocado", "avoid", "awake", "aware", "away", "awesome", + "awful", "awkward", "axis", "baby", "bachelor", "bacon", "badge", "bag", "balance", "balcony", + "ball", "bamboo", "banana", "banner", "bar", "barely", "bargain", "barrel", "base", "basic", + "basket", "battle", "beach", "bean", "beauty", "because", "become", "beef", "before", "begin", + "behave", "behind", "believe", "below", "belt", "bench", "benefit", "best", "betray", "better", + "between", "beyond", "bicycle", "bid", "bike", "bind", "biology", "bird", "birth", "bitter", + "black", "blade", "blame", "blanket", "blast", "bleak", "bless", "blind", "blood", "blossom", + "blouse", "blue", "blur", "blush", "board", "boat", "body", "boil", "bomb", "bone", "bonus", + "book", "boost", "border", "boring", "borrow", "boss", "bottom", "bounce", "box", "boy", + "bracket", "brain", "brand", "brass", "brave", "bread", "breeze", "brick", "bridge", "brief", + "bright", "bring", "brisk", "broccoli", "broken", "bronze", "broom", "brother", "brown", + "brush", "bubble", "buddy", "budget", "buffalo", "build", "bulb", "bulk", "bullet", "bundle", + "bunker", "burden", "burger", "burst", "bus", "business", "busy", "butter", "buyer", "buzz", + "cabbage", "cabin", "cable", "cactus", "cage", "cake", "call", "calm", "camera", "camp", "can", + "canal", "cancel", "candy", "cannon", "canoe", "canvas", "canyon", "capable", "capital", + "captain", "car", "carbon", "card", "cargo", "carpet", "carry", "cart", "case", "cash", + "casino", "castle", "casual", "cat", "catalog", "catch", "category", "cattle", "caught", + "cause", "caution", "cave", "ceiling", "celery", "cement", "census", "century", "cereal", + "certain", "chair", "chalk", "champion", "change", "chaos", "chapter", "charge", "chase", + "chat", "cheap", "check", "cheese", "chef", "cherry", "chest", "chicken", "chief", "child", + "chimney", "choice", "choose", "chronic", "chuckle", "chunk", "churn", "cigar", "cinnamon", + "circle", "citizen", "city", "civil", "claim", "clap", "clarify", "claw", "clay", "clean", + "clerk", "clever", "click", "client", "cliff", "climb", "clinic", "clip", "clock", "clog", + "close", "cloth", "cloud", "clown", "club", "clump", "cluster", "clutch", "coach", "coast", + "coconut", "code", "coffee", "coil", "coin", "collect", "color", "column", "combine", "come", + "comfort", "comic", "common", "company", "concert", "conduct", "confirm", "congress", + "connect", "consider", "control", "convince", "cook", "cool", "copper", "copy", "coral", + "core", "corn", "correct", "cost", "cotton", "couch", "country", "couple", "course", "cousin", + "cover", "coyote", "crack", "cradle", "craft", "cram", "crane", "crash", "crater", "crawl", + "crazy", "cream", "credit", "creek", "crew", "cricket", "crime", "crisp", "critic", "crop", + "cross", "crouch", "crowd", "crucial", "cruel", "cruise", "crumble", "crunch", "crush", "cry", + "crystal", "cube", "culture", "cup", "cupboard", "curious", "current", "curtain", "curve", + "cushion", "custom", "cute", "cycle", "dad", "damage", "damp", "dance", "danger", "daring", + "dash", "daughter", "dawn", "day", "deal", "debate", "debris", "decade", "december", "decide", + "decline", "decorate", "decrease", "deer", "defense", "define", "defy", "degree", "delay", + "deliver", "demand", "demise", "denial", "dentist", "deny", "depart", "depend", "deposit", + "depth", "deputy", "derive", "describe", "desert", "design", "desk", "despair", "destroy", + "detail", "detect", "develop", "device", "devote", "diagram", "dial", "diamond", "diary", + "dice", "diesel", "diet", "differ", "digital", "dignity", "dilemma", "dinner", "dinosaur", + "direct", "dirt", "disagree", "discover", "disease", "dish", "dismiss", "disorder", "display", + "distance", "divert", "divide", "divorce", "dizzy", "doctor", "document", "dog", "doll", + "dolphin", "domain", "donate", "donkey", "donor", "door", "dose", "double", "dove", "draft", + "dragon", "drama", "drastic", "draw", "dream", "dress", "drift", "drill", "drink", "drip", + "drive", "drop", "drum", "dry", "duck", "dumb", "dune", "during", "dust", "dutch", "duty", + "dwarf", "dynamic", "eager", "eagle", "early", "earn", "earth", "easily", "east", "easy", + "echo", "ecology", "economy", "edge", "edit", "educate", "effort", "egg", "eight", "either", + "elbow", "elder", "electric", "elegant", "element", "elephant", "elevator", "elite", "else", + "embark", "embody", "embrace", "emerge", "emotion", "employ", "empower", "empty", "enable", + "enact", "end", "endless", "endorse", "enemy", "energy", "enforce", "engage", "engine", + "enhance", "enjoy", "enlist", "enough", "enrich", "enroll", "ensure", "enter", "entire", + "entry", "envelope", "episode", "equal", "equip", "era", "erase", "erode", "erosion", "error", + "erupt", "escape", "essay", "essence", "estate", "eternal", "ethics", "evidence", "evil", + "evoke", "evolve", "exact", "example", "excess", "exchange", "excite", "exclude", "excuse", + "execute", "exercise", "exhaust", "exhibit", "exile", "exist", "exit", "exotic", "expand", + "expect", "expire", "explain", "expose", "express", "extend", "extra", "eye", "eyebrow", + "fabric", "face", "faculty", "fade", "faint", "faith", "fall", "false", "fame", "family", + "famous", "fan", "fancy", "fantasy", "farm", "fashion", "fat", "fatal", "father", "fatigue", + "fault", "favorite", "feature", "february", "federal", "fee", "feed", "feel", "female", + "fence", "festival", "fetch", "fever", "few", "fiber", "fiction", "field", "figure", "file", + "film", "filter", "final", "find", "fine", "finger", "finish", "fire", "firm", "first", + "fiscal", "fish", "fit", "fitness", "fix", "flag", "flame", "flash", "flat", "flavor", "flee", + "flight", "flip", "float", "flock", "floor", "flower", "fluid", "flush", "fly", "foam", + "focus", "fog", "foil", "fold", "follow", "food", "foot", "force", "forest", "forget", "fork", + "fortune", "forum", "forward", "fossil", "foster", "found", "fox", "fragile", "frame", + "frequent", "fresh", "friend", "fringe", "frog", "front", "frost", "frown", "frozen", "fruit", + "fuel", "fun", "funny", "furnace", "fury", "future", "gadget", "gain", "galaxy", "gallery", + "game", "gap", "garage", "garbage", "garden", "garlic", "garment", "gas", "gasp", "gate", + "gather", "gauge", "gaze", "general", "genius", "genre", "gentle", "genuine", "gesture", + "ghost", "giant", "gift", "giggle", "ginger", "giraffe", "girl", "give", "glad", "glance", + "glare", "glass", "glide", "glimpse", "globe", "gloom", "glory", "glove", "glow", "glue", + "goat", "goddess", "gold", "good", "goose", "gorilla", "gospel", "gossip", "govern", "gown", + "grab", "grace", "grain", "grant", "grape", "grass", "gravity", "great", "green", "grid", + "grief", "grit", "grocery", "group", "grow", "grunt", "guard", "guess", "guide", "guilt", + "guitar", "gun", "gym", "habit", "hair", "half", "hammer", "hamster", "hand", "happy", + "harbor", "hard", "harsh", "harvest", "hat", "have", "hawk", "hazard", "head", "health", + "heart", "heavy", "hedgehog", "height", "hello", "helmet", "help", "hen", "hero", "hidden", + "high", "hill", "hint", "hip", "hire", "history", "hobby", "hockey", "hold", "hole", "holiday", + "hollow", "home", "honey", "hood", "hope", "horn", "horror", "horse", "hospital", "host", + "hotel", "hour", "hover", "hub", "huge", "human", "humble", "humor", "hundred", "hungry", + "hunt", "hurdle", "hurry", "hurt", "husband", "hybrid", "ice", "icon", "idea", "identify", + "idle", "ignore", "ill", "illegal", "illness", "image", "imitate", "immense", "immune", + "impact", "impose", "improve", "impulse", "inch", "include", "income", "increase", "index", + "indicate", "indoor", "industry", "infant", "inflict", "inform", "inhale", "inherit", + "initial", "inject", "injury", "inmate", "inner", "innocent", "input", "inquiry", "insane", + "insect", "inside", "inspire", "install", "intact", "interest", "into", "invest", "invite", + "involve", "iron", "island", "isolate", "issue", "item", "ivory", "jacket", "jaguar", "jar", + "jazz", "jealous", "jeans", "jelly", "jewel", "job", "join", "joke", "journey", "joy", "judge", + "juice", "jump", "jungle", "junior", "junk", "just", "kangaroo", "keen", "keep", "ketchup", + "key", "kick", "kid", "kidney", "kind", "kingdom", "kiss", "kit", "kitchen", "kite", "kitten", + "kiwi", "knee", "knife", "knock", "know", "lab", "label", "labor", "ladder", "lady", "lake", + "lamp", "language", "laptop", "large", "later", "latin", "laugh", "laundry", "lava", "law", + "lawn", "lawsuit", "layer", "lazy", "leader", "leaf", "learn", "leave", "lecture", "left", + "leg", "legal", "legend", "leisure", "lemon", "lend", "length", "lens", "leopard", "lesson", + "letter", "level", "liar", "liberty", "library", "license", "life", "lift", "light", "like", + "limb", "limit", "link", "lion", "liquid", "list", "little", "live", "lizard", "load", "loan", + "lobster", "local", "lock", "logic", "lonely", "long", "loop", "lottery", "loud", "lounge", + "love", "loyal", "lucky", "luggage", "lumber", "lunar", "lunch", "luxury", "lyrics", "machine", + "mad", "magic", "magnet", "maid", "mail", "main", "major", "make", "mammal", "man", "manage", + "mandate", "mango", "mansion", "manual", "maple", "marble", "march", "margin", "marine", + "market", "marriage", "mask", "mass", "master", "match", "material", "math", "matrix", + "matter", "maximum", "maze", "meadow", "mean", "measure", "meat", "mechanic", "medal", "media", + "melody", "melt", "member", "memory", "mention", "menu", "mercy", "merge", "merit", "merry", + "mesh", "message", "metal", "method", "middle", "midnight", "milk", "million", "mimic", "mind", + "minimum", "minor", "minute", "miracle", "mirror", "misery", "miss", "mistake", "mix", "mixed", + "mixture", "mobile", "model", "modify", "mom", "moment", "monitor", "monkey", "monster", + "month", "moon", "moral", "more", "morning", "mosquito", "mother", "motion", "motor", + "mountain", "mouse", "move", "movie", "much", "muffin", "mule", "multiply", "muscle", "museum", + "mushroom", "music", "must", "mutual", "myself", "mystery", "myth", "naive", "name", "napkin", + "narrow", "nasty", "nation", "nature", "near", "neck", "need", "negative", "neglect", + "neither", "nephew", "nerve", "nest", "net", "network", "neutral", "never", "news", "next", + "nice", "night", "noble", "noise", "nominee", "noodle", "normal", "north", "nose", "notable", + "note", "nothing", "notice", "novel", "now", "nuclear", "number", "nurse", "nut", "oak", + "obey", "object", "oblige", "obscure", "observe", "obtain", "obvious", "occur", "ocean", + "october", "odor", "off", "offer", "office", "often", "oil", "okay", "old", "olive", "olympic", + "omit", "once", "one", "onion", "online", "only", "open", "opera", "opinion", "oppose", + "option", "orange", "orbit", "orchard", "order", "ordinary", "organ", "orient", "original", + "orphan", "ostrich", "other", "outdoor", "outer", "output", "outside", "oval", "oven", "over", + "own", "owner", "oxygen", "oyster", "ozone", "pact", "paddle", "page", "pair", "palace", + "palm", "panda", "panel", "panic", "panther", "paper", "parade", "parent", "park", "parrot", + "party", "pass", "patch", "path", "patient", "patrol", "pattern", "pause", "pave", "payment", + "peace", "peanut", "pear", "peasant", "pelican", "pen", "penalty", "pencil", "people", + "pepper", "perfect", "permit", "person", "pet", "phone", "photo", "phrase", "physical", + "piano", "picnic", "picture", "piece", "pig", "pigeon", "pill", "pilot", "pink", "pioneer", + "pipe", "pistol", "pitch", "pizza", "place", "planet", "plastic", "plate", "play", "please", + "pledge", "pluck", "plug", "plunge", "poem", "poet", "point", "polar", "pole", "police", + "pond", "pony", "pool", "popular", "portion", "position", "possible", "post", "potato", + "pottery", "poverty", "powder", "power", "practice", "praise", "predict", "prefer", "prepare", + "present", "pretty", "prevent", "price", "pride", "primary", "print", "priority", "prison", + "private", "prize", "problem", "process", "produce", "profit", "program", "project", "promote", + "proof", "property", "prosper", "protect", "proud", "provide", "public", "pudding", "pull", + "pulp", "pulse", "pumpkin", "punch", "pupil", "puppy", "purchase", "purity", "purpose", + "purse", "push", "put", "puzzle", "pyramid", "quality", "quantum", "quarter", "question", + "quick", "quit", "quiz", "quote", "rabbit", "raccoon", "race", "rack", "radar", "radio", + "rail", "rain", "raise", "rally", "ramp", "ranch", "random", "range", "rapid", "rare", "rate", + "rather", "raven", "raw", "razor", "ready", "real", "reason", "rebel", "rebuild", "recall", + "receive", "recipe", "record", "recycle", "reduce", "reflect", "reform", "refuse", "region", + "regret", "regular", "reject", "relax", "release", "relief", "rely", "remain", "remember", + "remind", "remove", "render", "renew", "rent", "reopen", "repair", "repeat", "replace", + "report", "require", "rescue", "resemble", "resist", "resource", "response", "result", + "retire", "retreat", "return", "reunion", "reveal", "review", "reward", "rhythm", "rib", + "ribbon", "rice", "rich", "ride", "ridge", "rifle", "right", "rigid", "ring", "riot", "ripple", + "risk", "ritual", "rival", "river", "road", "roast", "robot", "robust", "rocket", "romance", + "roof", "rookie", "room", "rose", "rotate", "rough", "round", "route", "royal", "rubber", + "rude", "rug", "rule", "run", "runway", "rural", "sad", "saddle", "sadness", "safe", "sail", + "salad", "salmon", "salon", "salt", "salute", "same", "sample", "sand", "satisfy", "satoshi", + "sauce", "sausage", "save", "say", "scale", "scan", "scare", "scatter", "scene", "scheme", + "school", "science", "scissors", "scorpion", "scout", "scrap", "screen", "script", "scrub", + "sea", "search", "season", "seat", "second", "secret", "section", "security", "seed", "seek", + "segment", "select", "sell", "seminar", "senior", "sense", "sentence", "series", "service", + "session", "settle", "setup", "seven", "shadow", "shaft", "shallow", "share", "shed", "shell", + "sheriff", "shield", "shift", "shine", "ship", "shiver", "shock", "shoe", "shoot", "shop", + "short", "shoulder", "shove", "shrimp", "shrug", "shuffle", "shy", "sibling", "sick", "side", + "siege", "sight", "sign", "silent", "silk", "silly", "silver", "similar", "simple", "since", + "sing", "siren", "sister", "situate", "six", "size", "skate", "sketch", "ski", "skill", "skin", + "skirt", "skull", "slab", "slam", "sleep", "slender", "slice", "slide", "slight", "slim", + "slogan", "slot", "slow", "slush", "small", "smart", "smile", "smoke", "smooth", "snack", + "snake", "snap", "sniff", "snow", "soap", "soccer", "social", "sock", "soda", "soft", "solar", + "soldier", "solid", "solution", "solve", "someone", "song", "soon", "sorry", "sort", "soul", + "sound", "soup", "source", "south", "space", "spare", "spatial", "spawn", "speak", "special", + "speed", "spell", "spend", "sphere", "spice", "spider", "spike", "spin", "spirit", "split", + "spoil", "sponsor", "spoon", "sport", "spot", "spray", "spread", "spring", "spy", "square", + "squeeze", "squirrel", "stable", "stadium", "staff", "stage", "stairs", "stamp", "stand", + "start", "state", "stay", "steak", "steel", "stem", "step", "stereo", "stick", "still", + "sting", "stock", "stomach", "stone", "stool", "story", "stove", "strategy", "street", + "strike", "strong", "struggle", "student", "stuff", "stumble", "style", "subject", "submit", + "subway", "success", "such", "sudden", "suffer", "sugar", "suggest", "suit", "summer", "sun", + "sunny", "sunset", "super", "supply", "supreme", "sure", "surface", "surge", "surprise", + "surround", "survey", "suspect", "sustain", "swallow", "swamp", "swap", "swarm", "swear", + "sweet", "swift", "swim", "swing", "switch", "sword", "symbol", "symptom", "syrup", "system", + "table", "tackle", "tag", "tail", "talent", "talk", "tank", "tape", "target", "task", "taste", + "tattoo", "taxi", "teach", "team", "tell", "ten", "tenant", "tennis", "tent", "term", "test", + "text", "thank", "that", "theme", "then", "theory", "there", "they", "thing", "this", + "thought", "three", "thrive", "throw", "thumb", "thunder", "ticket", "tide", "tiger", "tilt", + "timber", "time", "tiny", "tip", "tired", "tissue", "title", "toast", "tobacco", "today", + "toddler", "toe", "together", "toilet", "token", "tomato", "tomorrow", "tone", "tongue", + "tonight", "tool", "tooth", "top", "topic", "topple", "torch", "tornado", "tortoise", "toss", + "total", "tourist", "toward", "tower", "town", "toy", "track", "trade", "traffic", "tragic", + "train", "transfer", "trap", "trash", "travel", "tray", "treat", "tree", "trend", "trial", + "tribe", "trick", "trigger", "trim", "trip", "trophy", "trouble", "truck", "true", "truly", + "trumpet", "trust", "truth", "try", "tube", "tuition", "tumble", "tuna", "tunnel", "turkey", + "turn", "turtle", "twelve", "twenty", "twice", "twin", "twist", "two", "type", "typical", + "ugly", "umbrella", "unable", "unaware", "uncle", "uncover", "under", "undo", "unfair", + "unfold", "unhappy", "uniform", "unique", "unit", "universe", "unknown", "unlock", "until", + "unusual", "unveil", "update", "upgrade", "uphold", "upon", "upper", "upset", "urban", "urge", + "usage", "use", "used", "useful", "useless", "usual", "utility", "vacant", "vacuum", "vague", + "valid", "valley", "valve", "van", "vanish", "vapor", "various", "vast", "vault", "vehicle", + "velvet", "vendor", "venture", "venue", "verb", "verify", "version", "very", "vessel", + "veteran", "viable", "vibrant", "vicious", "victory", "video", "view", "village", "vintage", + "violin", "virtual", "virus", "visa", "visit", "visual", "vital", "vivid", "vocal", "voice", + "void", "volcano", "volume", "vote", "voyage", "wage", "wagon", "wait", "walk", "wall", + "walnut", "want", "warfare", "warm", "warrior", "wash", "wasp", "waste", "water", "wave", + "way", "wealth", "weapon", "wear", "weasel", "weather", "web", "wedding", "weekend", "weird", + "welcome", "west", "wet", "whale", "what", "wheat", "wheel", "when", "where", "whip", + "whisper", "wide", "width", "wife", "wild", "will", "win", "window", "wine", "wing", "wink", + "winner", "winter", "wire", "wisdom", "wise", "wish", "witness", "wolf", "woman", "wonder", + "wood", "wool", "word", "work", "world", "worry", "worth", "wrap", "wreck", "wrestle", "wrist", + "write", "wrong", "yard", "year", "yellow", "you", "young", "youth", "zebra", "zero", "zone", + "zoo", +]; + +lazy_static! { + pub static ref BIP39_WORD_MAP: HashMap = { + let mut m = HashMap::new(); + for (i, word) in bip39_wordlist.iter().enumerate() { + m.insert(word.to_string(), i as u16); + } + m + }; +} + +/// Taking a list of bip39 words, returns a list of u16 codes +pub fn encode_mnemonic(words: &Vec) -> Result<[u16; 12], NgError> { + if words.len() != 12 { + return Err(NgError::InvalidMnemonic); + } + let mut res = [0u16; 12]; + for (idx, word) in words.iter().enumerate() { + res[idx] = *BIP39_WORD_MAP + .get(word.as_str()) + .ok_or(NgError::InvalidMnemonic)?; + } + Ok(res) +} diff --git a/ng-wallet/src/emojis.rs b/ng-wallet/src/emojis.rs new file mode 100644 index 0000000..c117dc1 --- /dev/null +++ b/ng-wallet/src/emojis.rs @@ -0,0 +1,1302 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use std::collections::HashMap; + +use ng_repo::errors::NgError; + +pub struct EmojiDef<'a> { + pub hexcode: &'a str, + pub shortcode: &'a str, + pub code: &'a str, +} +#[allow(non_upper_case_globals)] +const face: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f600", + shortcode: "grinning_face", + code: "happy", + }, + EmojiDef { + hexcode: "1f602", + shortcode: "face_with_tears_of_joy", + code: "happy_tears", + }, + EmojiDef { + hexcode: "1f607", + shortcode: "smiling_face_with_halo", + code: "halo", + }, + EmojiDef { + hexcode: "1f970", + shortcode: "smiling_face_with_hearts", + code: "three_hearts", + }, + EmojiDef { + hexcode: "1f60d", + shortcode: "smiling_face_with_heart_eyes", + code: "with_two_hearts", + }, + EmojiDef { + hexcode: "1f618", + shortcode: "face_blowing_a_kiss", + code: "one_heart", + }, + EmojiDef { + hexcode: "1f61d", + shortcode: "squinting_face_with_tongue", + code: "with_tongue", + }, + EmojiDef { + hexcode: "1f917", + shortcode: "hugging_face", + code: "with_two_hands", + }, + EmojiDef { + hexcode: "1f92d", + shortcode: "face_with_hand_over_mouth", + code: "one_hand", + }, + EmojiDef { + hexcode: "1f910", + shortcode: "zipper_mouth_face", + code: "silenced", + }, + EmojiDef { + hexcode: "1f973", + shortcode: "partying_face", + code: "celebrating", + }, + EmojiDef { + hexcode: "1f60e", + shortcode: "smiling_face_with_sunglasses", + code: "sunglasses", + }, + EmojiDef { + hexcode: "1f644", + shortcode: "face_with_rolling_eyes", + code: "eyes_up", + }, + EmojiDef { + hexcode: "1f9d0", + shortcode: "face_with_monocle", + code: "monocle", + }, + EmojiDef { + hexcode: "1f634", + shortcode: "sleeping_face", + code: "sleeping", + }, +]; +#[allow(non_upper_case_globals)] +const face_unwell: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f637", + shortcode: "face_with_medical_mask", + + code: "mask", + }, + EmojiDef { + hexcode: "1f912", + shortcode: "face_with_thermometer", + code: "fever", + }, + EmojiDef { + hexcode: "1f915", + shortcode: "face_with_head_bandage", + code: "bandage", + }, + EmojiDef { + hexcode: "1f92e", + shortcode: "face_vomiting", + code: "vomit", + }, + EmojiDef { + hexcode: "1f927", + shortcode: "sneezing_face", + code: "tissue", + }, + EmojiDef { + hexcode: "1f975", + shortcode: "hot_face", + code: "hot", + }, + EmojiDef { + hexcode: "1f976", + shortcode: "cold_face", + code: "cold", + }, + EmojiDef { + hexcode: "1f635", + shortcode: "knocked_out_face", + code: "crossed_eyes", + }, + EmojiDef { + hexcode: "1f92f", + shortcode: "exploding_head", + code: "exploding", + }, + EmojiDef { + hexcode: "2639", + shortcode: "frowning_face", + code: "sad", + }, + EmojiDef { + hexcode: "1f925", + shortcode: "lying_face", + code: "long_nose", + }, + EmojiDef { + hexcode: "1f62d", + shortcode: "loudly_crying_face", + code: "many_tears", + }, + EmojiDef { + hexcode: "1f631", + shortcode: "face_screaming_in_fear", + code: "fear", + }, + EmojiDef { + hexcode: "1f971", + shortcode: "yawning_face", + code: "tired", + }, + EmojiDef { + hexcode: "1f624", + shortcode: "face_with_steam_from_nose", + code: "annoyed", + }, +]; +#[allow(non_upper_case_globals)] +const face_costume: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f921", + shortcode: "clown_face", + code: "clown", + }, + EmojiDef { + hexcode: "1f47b", + shortcode: "ghost", + code: "ghost", + }, + EmojiDef { + hexcode: "1f436", + shortcode: "dog_face", + code: "dog", + }, + EmojiDef { + hexcode: "1f638", + shortcode: "grinning_cat_with_smiling_eyes", + code: "happy_cat", + }, + EmojiDef { + hexcode: "1f640", + shortcode: "weary_cat", + code: "scared_cat", + }, + EmojiDef { + hexcode: "1f63f", + shortcode: "crying_cat", + code: "sad_cat", + }, + EmojiDef { + hexcode: "1f648", + shortcode: "see_no_evil_monkey", + code: "monkey_no_see", + }, + EmojiDef { + hexcode: "1f649", + shortcode: "hear_no_evil_monkey", + code: "monkey_no_hear", + }, + EmojiDef { + hexcode: "1f64a", + shortcode: "speak_no_evil_monkey", + code: "monkey_no_talk", + }, + EmojiDef { + hexcode: "1f477", + shortcode: "construction_worker", + code: "builder", + }, + EmojiDef { + hexcode: "1f478", + shortcode: "princess", + code: "princess", + }, + EmojiDef { + hexcode: "1f9d1_200d_1f692", + shortcode: "firefighter", + code: "firefighter", + }, + EmojiDef { + hexcode: "1f9d9", + shortcode: "mage", + code: "mage", + }, + EmojiDef { + hexcode: "1f9dc", + shortcode: "merperson", + code: "mermaid", + }, + EmojiDef { + hexcode: "1f9da", + shortcode: "fairy", + code: "fairy", + }, +]; +#[allow(non_upper_case_globals)] +const emotion: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f48c", + shortcode: "love_letter", + code: "letter_heart", + }, + EmojiDef { + hexcode: "2764", + shortcode: "red_heart", + code: "red_heart", + }, + EmojiDef { + hexcode: "1f495", + shortcode: "two_hearts", + code: "two_hearts", + }, + EmojiDef { + hexcode: "1f48b", + shortcode: "kiss_mark", + code: "kiss", + }, + EmojiDef { + hexcode: "1f4af", + shortcode: "hundred_points", + code: "hundred", + }, + EmojiDef { + hexcode: "1f4a5", + shortcode: "collision", + code: "explosion", + }, + EmojiDef { + hexcode: "1f4a6", + shortcode: "sweat_droplets", + code: "drops", + }, + EmojiDef { + hexcode: "1f91d", + shortcode: "handshake", + code: "handshake", + }, + EmojiDef { + hexcode: "1f590", + shortcode: "hand_with_fingers_splayed", + code: "hand_five_fingers", + }, + EmojiDef { + hexcode: "270c", + shortcode: "victory_hand", + code: "hand_two_fingers", + }, + EmojiDef { + hexcode: "1f44d", + shortcode: "thumbs_up", + code: "thumbs_up", + }, + EmojiDef { + hexcode: "270a", + shortcode: "raised_fist", + code: "fist", + }, + EmojiDef { + hexcode: "1f450", + shortcode: "open_hands", + code: "two_hands", + }, + EmojiDef { + hexcode: "270d", + shortcode: "writing_hand", + code: "writing", + }, + EmojiDef { + hexcode: "1f64f", + shortcode: "folded_hands", + code: "praying", + }, +]; +#[allow(non_upper_case_globals)] +const body: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f4aa", + shortcode: "flexed_biceps", + code: "arm", + }, + EmojiDef { + hexcode: "1f9b5", + shortcode: "leg", + code: "leg", + }, + EmojiDef { + hexcode: "1f9b6", + shortcode: "foot", + code: "foot", + }, + EmojiDef { + hexcode: "1f442", + shortcode: "ear", + code: "ear", + }, + EmojiDef { + hexcode: "1f443", + shortcode: "nose", + code: "nose", + }, + EmojiDef { + hexcode: "1f9e0", + shortcode: "brain", + code: "brain", + }, + EmojiDef { + hexcode: "1f9b7", + shortcode: "tooth", + code: "tooth", + }, + EmojiDef { + hexcode: "1f9b4", + shortcode: "bone", + code: "bone", + }, + EmojiDef { + hexcode: "1f441", + shortcode: "eye", + code: "eye", + }, + EmojiDef { + hexcode: "1f445", + shortcode: "tongue", + code: "tongue", + }, + EmojiDef { + hexcode: "1f444", + shortcode: "mouth", + code: "mouth", + }, + EmojiDef { + hexcode: "1f455", + shortcode: "t_shirt", + code: "shirt", + }, + EmojiDef { + hexcode: "1f456", + shortcode: "jeans", + code: "pants", + }, + EmojiDef { + hexcode: "1f457", + shortcode: "dress", + code: "dress", + }, + EmojiDef { + hexcode: "1f45f", + shortcode: "running_shoe", + code: "shoe", + }, +]; +#[allow(non_upper_case_globals)] +const sport: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f93a", + shortcode: "person_fencing", + code: "fencing", + }, + EmojiDef { + hexcode: "1f3c7", + shortcode: "horse_racing", + code: "horse_riding", + }, + EmojiDef { + hexcode: "26f7", + shortcode: "skier", + code: "ski", + }, + EmojiDef { + hexcode: "1f6a3", + shortcode: "person_rowing_boat", + code: "rowing_boat", + }, + EmojiDef { + hexcode: "1f3ca", + shortcode: "person_swimming", + code: "swim", + }, + EmojiDef { + hexcode: "1f3c4", + shortcode: "person_surfing", + code: "surf", + }, + EmojiDef { + hexcode: "1f3cb", + shortcode: "person_lifting_weights", + code: "gym", + }, + EmojiDef { + hexcode: "1f93c", + shortcode: "people_wrestling", + code: "wrestling", + }, + EmojiDef { + hexcode: "1f6b4", + shortcode: "person_biking", + code: "bike", + }, + EmojiDef { + hexcode: "1fa82", + shortcode: "parachute", + code: "parachute", + }, + EmojiDef { + hexcode: "26bd", + shortcode: "soccer_ball", + code: "football", + }, + EmojiDef { + hexcode: "1f3c0", + shortcode: "basketball", + code: "basketball", + }, + EmojiDef { + hexcode: "1f3be", + shortcode: "tennis", + code: "tennis", + }, + EmojiDef { + hexcode: "1f3d3", + shortcode: "ping_pong", + code: "ping_pong", + }, + EmojiDef { + hexcode: "1f94b", + shortcode: "martial_arts_uniform", + code: "martial", + }, +]; +#[allow(non_upper_case_globals)] +const bigger_animal: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f981", + shortcode: "lion", + code: "lion", + }, + EmojiDef { + hexcode: "1f406", + shortcode: "leopard", + code: "leopard", + }, + EmojiDef { + hexcode: "1f434", + shortcode: "horse_face", + code: "horse", + }, + EmojiDef { + hexcode: "1f993", + shortcode: "zebra", + code: "zebra", + }, + EmojiDef { + hexcode: "1f416", + shortcode: "pig", + code: "pig", + }, + EmojiDef { + hexcode: "1f410", + shortcode: "goat", + code: "goat", + }, + EmojiDef { + hexcode: "1f411", + shortcode: "ewe", + code: "sheep", + }, + EmojiDef { + hexcode: "1f42a", + shortcode: "camel", + code: "camel", + }, + EmojiDef { + hexcode: "1f992", + shortcode: "giraffe", + code: "giraffe", + }, + EmojiDef { + hexcode: "1f418", + shortcode: "elephant", + code: "elephant", + }, + EmojiDef { + hexcode: "1f98f", + shortcode: "rhinoceros", + code: "rhinoceros", + }, + EmojiDef { + hexcode: "1f9a9", + shortcode: "flamingo", + code: "flamingo", + }, + EmojiDef { + hexcode: "1f433", + shortcode: "spouting_whale", + code: "whale", + }, + EmojiDef { + hexcode: "1f42c", + shortcode: "dolphin", + code: "dolphin", + }, + EmojiDef { + hexcode: "1f43b_200d_2744", + shortcode: "polar_bear", + code: "bear", + }, +]; + +#[allow(non_upper_case_globals)] +const smaller_animal: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f413", + shortcode: "rooster", + code: "rooster", + }, + EmojiDef { + hexcode: "1f423", + shortcode: "hatching_chick", + code: "chick", + }, + EmojiDef { + hexcode: "1f985", + shortcode: "eagle", + code: "eagle", + }, + EmojiDef { + hexcode: "1f986", + shortcode: "duck", + code: "duck", + }, + EmojiDef { + hexcode: "1f989", + shortcode: "owl", + code: "owl", + }, + EmojiDef { + hexcode: "1f407", + shortcode: "rabbit", + code: "rabbit", + }, + EmojiDef { + hexcode: "1f427", + shortcode: "penguin", + code: "penguin", + }, + EmojiDef { + hexcode: "1f98e", + shortcode: "lizard", + code: "lizard", + }, + EmojiDef { + hexcode: "1f422", + shortcode: "turtle", + code: "turtle", + }, + EmojiDef { + hexcode: "1f40d", + shortcode: "snake", + code: "snake", + }, + EmojiDef { + hexcode: "1f994", + shortcode: "hedgehog", + code: "hedgehog", + }, + EmojiDef { + hexcode: "1f987", + shortcode: "bat", + code: "bat", + }, + EmojiDef { + hexcode: "1f41f", + shortcode: "fish", + code: "fish", + }, + EmojiDef { + hexcode: "1f41a", + shortcode: "spiral_shell", + code: "shell", + }, + EmojiDef { + hexcode: "1f419", + shortcode: "octopus", + code: "octopus", + }, +]; +#[allow(non_upper_case_globals)] +const plants: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f40c", + shortcode: "snail", + code: "snail", + }, + EmojiDef { + hexcode: "1f98b", + shortcode: "butterfly", + code: "butterfly", + }, + EmojiDef { + hexcode: "1f41c", + shortcode: "ant", + code: "ant", + }, + EmojiDef { + hexcode: "1f41d", + shortcode: "honeybee", + code: "bee", + }, + EmojiDef { + hexcode: "1f41e", + shortcode: "lady_beetle", + code: "beetle", + }, + EmojiDef { + hexcode: "1f339", + shortcode: "rose", + code: "rose", + }, + EmojiDef { + hexcode: "1f33b", + shortcode: "sunflower", + code: "sunflower", + }, + EmojiDef { + hexcode: "1f332", + shortcode: "evergreen_tree", + code: "fir", + }, + EmojiDef { + hexcode: "1f334", + shortcode: "palm_tree", + code: "palm_tree", + }, + EmojiDef { + hexcode: "1f335", + shortcode: "cactus", + code: "cactus", + }, + EmojiDef { + hexcode: "1f340", + shortcode: "four_leaf_clover", + code: "clover", + }, + EmojiDef { + hexcode: "1fab4", + shortcode: "potted_plant", + code: "potted_plant", + }, + EmojiDef { + hexcode: "1f490", + shortcode: "bouquet", + code: "bouquet", + }, + EmojiDef { + hexcode: "1f342", + shortcode: "fallen_leaf", + code: "three_leaves", + }, + EmojiDef { + hexcode: "1f344", + shortcode: "mushroom", + code: "mushroom", + }, +]; +#[allow(non_upper_case_globals)] +const fruits: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f347", + shortcode: "grapes", + code: "grapes", + }, + EmojiDef { + hexcode: "1f349", + shortcode: "watermelon", + code: "watermelon", + }, + EmojiDef { + hexcode: "1f34b", + shortcode: "lemon", + code: "lemon", + }, + EmojiDef { + hexcode: "1f34c", + shortcode: "banana", + code: "banana", + }, + EmojiDef { + hexcode: "1f34d", + shortcode: "pineapple", + code: "pineapple", + }, + EmojiDef { + hexcode: "1f34e", + shortcode: "red_apple", + code: "apple", + }, + EmojiDef { + hexcode: "1f352", + shortcode: "cherries", + code: "cherries", + }, + EmojiDef { + hexcode: "1f353", + shortcode: "strawberry", + code: "strawberry", + }, + EmojiDef { + hexcode: "1fad0", + shortcode: "blueberries", + code: "three_blueberries", + }, + EmojiDef { + hexcode: "1f95d", + shortcode: "kiwi_fruit", + code: "kiwi", + }, + EmojiDef { + hexcode: "1f951", + shortcode: "avocado", + code: "avocado", + }, + EmojiDef { + hexcode: "1f346", + shortcode: "eggplant", + code: "eggplant", + }, + EmojiDef { + hexcode: "1f955", + shortcode: "carrot", + code: "carrot", + }, + EmojiDef { + hexcode: "1f33d", + shortcode: "ear_of_corn", + code: "corn", + }, + EmojiDef { + hexcode: "1f336", + shortcode: "hot_pepper", + code: "pepper", + }, +]; +#[allow(non_upper_case_globals)] +const food: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f950", + shortcode: "croissant", + code: "croissant", + }, + EmojiDef { + hexcode: "1f956", + shortcode: "baguette_bread", + code: "bread", + }, + EmojiDef { + hexcode: "1f968", + shortcode: "pretzel", + code: "pretzel", + }, + EmojiDef { + hexcode: "1f9c0", + shortcode: "cheese_wedge", + code: "cheese", + }, + EmojiDef { + hexcode: "1f355", + shortcode: "pizza", + code: "pizza", + }, + EmojiDef { + hexcode: "1f373", + shortcode: "cooking", + code: "egg", + }, + EmojiDef { + hexcode: "1f366", + shortcode: "soft_ice_cream", + code: "ice_cream", + }, + EmojiDef { + hexcode: "1f36a", + shortcode: "cookie", + code: "cookie", + }, + EmojiDef { + hexcode: "1f370", + shortcode: "shortcake", + code: "cake", + }, + EmojiDef { + hexcode: "1f36b", + shortcode: "chocolate_bar", + code: "chocolate", + }, + EmojiDef { + hexcode: "1f36c", + shortcode: "candy", + code: "sweet", + }, + EmojiDef { + hexcode: "2615", + shortcode: "hot_beverage", + code: "coffee", + }, + EmojiDef { + hexcode: "1f37e", + shortcode: "bottle_with_popping_cork", + code: "champagne_bottle", + }, + EmojiDef { + hexcode: "1f377", + shortcode: "wine_glass", + code: "glass_wine", + }, + EmojiDef { + hexcode: "1f942", + shortcode: "clinking_glasses", + code: "two_glasses", + }, +]; +#[allow(non_upper_case_globals)] +const travel: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f3d4", + shortcode: "snow_capped_mountain", + code: "mountain", + }, + EmojiDef { + hexcode: "1f3d5", + shortcode: "camping", + code: "camping", + }, + EmojiDef { + hexcode: "1f3d6", + shortcode: "beach_with_umbrella", + code: "beach", + }, + EmojiDef { + hexcode: "1f9ed", + shortcode: "compass", + code: "compass", + }, + EmojiDef { + hexcode: "1f3db", + shortcode: "classical_building", + code: "museum", + }, + EmojiDef { + hexcode: "1f3e1", + shortcode: "house_with_garden", + code: "house", + }, + EmojiDef { + hexcode: "26f2", + shortcode: "fountain", + code: "fountain", + }, + EmojiDef { + hexcode: "1f3aa", + shortcode: "circus_tent", + code: "circus", + }, + EmojiDef { + hexcode: "1f682", + shortcode: "locomotive", + code: "train", + }, + EmojiDef { + hexcode: "1f695", + shortcode: "taxi", + code: "taxi", + }, + EmojiDef { + hexcode: "1f3cd", + shortcode: "motorcycle", + code: "motorcycle", + }, + EmojiDef { + hexcode: "26f5", + shortcode: "sailboat", + code: "sailboat", + }, + EmojiDef { + hexcode: "2708", + shortcode: "airplane", + code: "airplane", + }, + EmojiDef { + hexcode: "1f681", + shortcode: "helicopter", + code: "helicopter", + }, + EmojiDef { + hexcode: "1f680", + shortcode: "rocket", + code: "rocket", + }, +]; +#[allow(non_upper_case_globals)] +const sky: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "2600", + shortcode: "sun", + code: "sun", + }, + EmojiDef { + hexcode: "1f319", + shortcode: "crescent_moon", + code: "moon", + }, + EmojiDef { + hexcode: "1fa90", + shortcode: "ringed_planet", + code: "planet", + }, + EmojiDef { + hexcode: "2b50", + shortcode: "star", + code: "star", + }, + EmojiDef { + hexcode: "1f30c", + shortcode: "milky_way", + code: "night_sky", + }, + EmojiDef { + hexcode: "1f327", + shortcode: "cloud_with_rain", + code: "cloud", + }, + EmojiDef { + hexcode: "2614", + shortcode: "umbrella_with_rain_drops", + code: "umbrella", + }, + EmojiDef { + hexcode: "26a1", + shortcode: "high_voltage", + code: "lightning", + }, + EmojiDef { + hexcode: "2744", + shortcode: "snowflake", + code: "snowflake", + }, + EmojiDef { + hexcode: "26c4", + shortcode: "snowman_without_snow", + code: "snowman", + }, + EmojiDef { + hexcode: "1f321", + shortcode: "thermometer", + code: "thermometer", + }, + EmojiDef { + hexcode: "1f525", + shortcode: "fire", + code: "fire", + }, + EmojiDef { + hexcode: "1f388", + shortcode: "balloon", + code: "balloon", + }, + EmojiDef { + hexcode: "1fa81", + shortcode: "kite", + code: "kite", + }, + EmojiDef { + hexcode: "1f308", + shortcode: "rainbow", + code: "rainbow", + }, +]; +#[allow(non_upper_case_globals)] +const play: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f3b8", + shortcode: "guitar", + code: "guitar", + }, + EmojiDef { + hexcode: "1f3b7", + shortcode: "saxophone", + code: "saxophone", + }, + EmojiDef { + hexcode: "1f3b5", + shortcode: "musical_note", + code: "music", + }, + EmojiDef { + hexcode: "1f3a8", + shortcode: "artist_palette", + code: "painting", + }, + EmojiDef { + hexcode: "265f", + shortcode: "chess_pawn", + code: "chess", + }, + EmojiDef { + hexcode: "1f381", + shortcode: "wrapped_gift", + code: "gift", + }, + EmojiDef { + hexcode: "1f3b2", + shortcode: "game_die", + code: "die", + }, + EmojiDef { + hexcode: "1f9e9", + shortcode: "puzzle_piece", + code: "puzzle", + }, + EmojiDef { + hexcode: "1f9f8", + shortcode: "teddy_bear", + code: "teddy_bear", + }, + EmojiDef { + hexcode: "1f9e8", + shortcode: "firecracker", + code: "firecracker", + }, + EmojiDef { + hexcode: "1f3af", + shortcode: "bullseye", + code: "bullseye", + }, + EmojiDef { + hexcode: "1f6fc", + shortcode: "roller_skate", + code: "roller_skate", + }, + EmojiDef { + hexcode: "1f6f4", + shortcode: "kick_scooter", + code: "kick_scooter", + }, + EmojiDef { + hexcode: "2693", + shortcode: "anchor", + code: "anchor", + }, + EmojiDef { + hexcode: "1f93f", + shortcode: "diving_mask", + code: "scuba_diving", + }, +]; +#[allow(non_upper_case_globals)] +const house: [EmojiDef<'static>; 15] = [ + EmojiDef { + hexcode: "1f9f9", + shortcode: "broom", + code: "broom", + }, + EmojiDef { + hexcode: "1f50d", + shortcode: "magnifying_glass_tilted_left", + code: "magnifying_glass", + }, + EmojiDef { + hexcode: "1f4a1", + shortcode: "light_bulb", + code: "bulb", + }, + EmojiDef { + hexcode: "1f4da", + shortcode: "books", + code: "three_books", + }, + EmojiDef { + hexcode: "1f4e6", + shortcode: "package", + code: "package", + }, + EmojiDef { + hexcode: "270f", + shortcode: "pencil", + code: "pencil", + }, + EmojiDef { + hexcode: "1f4cc", + shortcode: "pushpin", + code: "pin", + }, + EmojiDef { + hexcode: "1f4ce", + shortcode: "paperclip", + code: "paperclip", + }, + EmojiDef { + hexcode: "2702", + shortcode: "scissors", + code: "scissors", + }, + EmojiDef { + hexcode: "1f511", + shortcode: "key", + code: "key", + }, + EmojiDef { + hexcode: "1f513", + shortcode: "unlocked", + code: "lock", + }, + EmojiDef { + hexcode: "1fa91", + shortcode: "chair", + code: "chair", + }, + EmojiDef { + hexcode: "1f6c1", + shortcode: "bathtub", + code: "bathtub", + }, + EmojiDef { + hexcode: "1f9fd", + shortcode: "sponge", + code: "sponge", + }, + EmojiDef { + hexcode: "1f6d2", + shortcode: "shopping_cart", + code: "shopping_cart", + }, +]; + +lazy_static! { + pub static ref EMOJIS: HashMap<&'static str, [EmojiDef<'static>; 15]> = vec![ + ("face", face), + ("face_unwell", face_unwell), + ("face_costume", face_costume), + ("emotion", emotion), + ("body", body), + ("sport", sport), + ("bigger_animal", bigger_animal), + ("smaller_animal", smaller_animal), + ("plants", plants), + ("fruits", fruits), + ("food", food), + ("travel", travel), + ("sky", sky), + ("play", play), + ("house", house), + ] + .into_iter() + .collect(); +} + +pub const EMOJI_CAT: [&'static str; 15] = [ + "face", + "sport", + "bigger_animal", + "smaller_animal", + "plants", + "fruits", + "food", + "travel", + "sky", + "body", + "face_unwell", + "house", + "play", + "face_costume", + "emotion", +]; + +lazy_static! { + pub static ref EMOJI_CODES: HashMap<&'static str, u8> = generate_tuples(); +} + +fn generate_tuples() -> HashMap<&'static str, u8> { + let mut tuples = vec![]; + for (icat, cat_name) in EMOJI_CAT.iter().enumerate() { + for (iemoji, emoji) in EMOJIS.get(cat_name).unwrap().iter().enumerate() { + let nbr = (icat << 4) + iemoji; + tuples.push((emoji.code, nbr as u8)); + } + } + // let mut map = HashMap::new(); + // for t in tuples.into_iter() { + // match map.insert(t.0, t.1) { + // Some(double) => log_info!("{} {} {}", t.0, t.1, double), + // None => {} + // } + // } + // map + tuples.into_iter().collect() +} + +/// returns a list of tuples of 2 strings (category,emoji) +pub fn display_pazzle(pazzle: &Vec) -> Vec<(&'static str, &'static str)> { + let mut res = vec![]; + for emoji in pazzle { + let cat = (emoji & 240) >> 4; + let idx = emoji & 15; + let cat_str = EMOJI_CAT[cat as usize]; + res.push((cat_str, EMOJIS.get(cat_str).unwrap()[idx as usize].code)); + } + res +} + +pub fn display_pazzle_one(pazzle: &Vec) -> Vec { + let res: Vec = display_pazzle(pazzle) + .into_iter() + .map(|(cat, emoji)| String::from(format!("{cat}:{emoji}"))) + .collect(); + res +} + +//use ng_repo::log::*; + +/// taking a list of pazzle words, returns a list of u8 codes +pub fn encode_pazzle(words: &Vec) -> Result, NgError> { + //assert_eq!(EMOJI_CODES.len(), 15 * 15); + let mut res = vec![]; + for word in words { + res.push( + *EMOJI_CODES + .get(word.as_str()) + .ok_or(NgError::InvalidPazzle)?, + ); + } + //log_info!("{:?}", res); + Ok(res) +} + +/// lists all the words available for a pazzle, together with its category and u8 code +pub fn list_all_words() -> Vec<(&'static str, &'static str, u8)> { + let mut tuples = vec![]; + for (icat, cat_name) in EMOJI_CAT.iter().enumerate() { + for (iemoji, emoji) in EMOJIS.get(cat_name).unwrap().iter().enumerate() { + let nbr = (icat << 4) + iemoji; + tuples.push((emoji.code, *cat_name, nbr as u8)); + } + } + tuples +} diff --git a/ng-wallet/src/lib.rs b/ng-wallet/src/lib.rs new file mode 100644 index 0000000..30c3a60 --- /dev/null +++ b/ng-wallet/src/lib.rs @@ -0,0 +1,911 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +#[macro_use] +extern crate lazy_static; + +pub mod types; + +pub mod bip39; + +pub mod emojis; + +pub mod permissions; + +use std::{collections::HashMap, io::Cursor}; + +use aes_gcm_siv::{ + aead::{heapless::Vec as HeaplessVec, AeadInPlace, KeyInit}, + Aes256GcmSiv, Nonce, +}; +use argon2::{Algorithm, Argon2, AssociatedData, ParamsBuilder, Version}; +use chacha20poly1305::XChaCha20Poly1305; +use image::{imageops::FilterType, io::Reader as ImageReader, ImageOutputFormat}; +use ng_net::types::Locator; +use rand::distributions::{Distribution, Uniform}; +use rand::prelude::*; +use safe_transmute::transmute_to_bytes; +use serde_bare::{from_slice, to_vec}; +#[cfg(debug_assertions)] +use web_time::Instant; +use zeroize::Zeroize; + +use ng_repo::types::*; +use ng_repo::utils::{generate_keypair, now_timestamp, sign, verify}; +use ng_repo::{log::*, types::PrivKey}; + +use ng_verifier::{site::SiteV0, verifier::Verifier}; + +use crate::bip39::bip39_wordlist; +use crate::types::*; + +impl Wallet { + pub fn id(&self) -> WalletId { + match self { + Wallet::V0(v0) => v0.id, + _ => unimplemented!(), + } + } + pub fn content_as_bytes(&self) -> Vec { + match self { + Wallet::V0(v0) => serde_bare::to_vec(&v0.content).unwrap(), + _ => unimplemented!(), + } + } + pub fn sig(&self) -> Sig { + match self { + Wallet::V0(v0) => v0.sig, + _ => unimplemented!(), + } + } + pub fn pazzle_length(&self) -> u8 { + match self { + Wallet::V0(v0) => v0.content.pazzle_length, + _ => unimplemented!(), + } + } + pub fn name(&self) -> String { + match self { + Wallet::V0(v0) => v0.id.to_string(), + _ => unimplemented!(), + } + } + + /// `nonce` : The current nonce used for encrypting this wallet by the user on this device. + /// It should be incremented BEFORE encrypting the wallet again + /// when some new operations have been added to the log of the Wallet. + /// The nonce is by PeerId. It is saved together with the PeerId in the SessionPeerStorage. + /// If the session is not saved (in-memory) it is lost, but it is fine, as the PeerId is also lost, and a new one + /// will be generated for the next session. + pub fn encrypt( + &self, + wallet_log: &WalletLog, + master_key: &[u8; 32], + peer_id: PubKey, + nonce: u64, + wallet_privkey: PrivKey, + ) -> Result { + let timestamp = now_timestamp(); + let wallet_id = self.id(); + let encrypted = + enc_wallet_log(wallet_log, master_key, peer_id, nonce, timestamp, wallet_id)?; + + let mut wallet_content = match self { + Wallet::V0(v0) => v0.content.clone(), + _ => unimplemented!(), + }; + + wallet_content.timestamp = timestamp; + wallet_content.peer_id = peer_id; + wallet_content.nonce = nonce; + wallet_content.encrypted = encrypted; + + let ser_wallet = serde_bare::to_vec(&wallet_content).unwrap(); + + let sig = sign(&wallet_privkey, &wallet_id, &ser_wallet).unwrap(); + + let wallet_v0 = WalletV0 { + // ID + id: wallet_id, + // Content + content: wallet_content, + // Signature over content by wallet's private key + sig, + }; + + // let content = BootstrapContentV0 { servers: vec![] }; + // let ser = serde_bare::to_vec(&content).unwrap(); + // let sig = sign(wallet_key, wallet_id, &ser).unwrap(); + + // let bootstrap = Bootstrap::V0(BootstrapV0 { + // id: wallet_id, + // content, + // sig, + // }); + + Ok(Wallet::V0(wallet_v0)) + } +} + +pub fn enc_master_key( + master_key: &[u8; 32], + key: &[u8; 32], + nonce: u8, + wallet_id: WalletId, +) -> Result<[u8; 48], NgWalletError> { + let cipher = Aes256GcmSiv::new(key.into()); + let mut nonce_buffer = [0u8; 12]; + nonce_buffer[0] = nonce; + let nonce = Nonce::from_slice(&nonce_buffer); + + let mut buffer: HeaplessVec = HeaplessVec::new(); // Note: buffer needs 16-bytes overhead for auth tag + buffer + .extend_from_slice(master_key) + .map_err(|_| NgWalletError::InternalError)?; + + // Encrypt `buffer` in-place, replacing the plaintext contents with ciphertext + cipher + .encrypt_in_place(nonce, &to_vec(&wallet_id).unwrap(), &mut buffer) + .map_err(|_e| NgWalletError::EncryptionError)?; + + // `buffer` now contains the encrypted master key + // log_debug!("cipher {:?}", buffer); + Ok(buffer.into_array::<48>().unwrap()) +} + +pub fn dec_master_key( + ciphertext: [u8; 48], + key: &[u8; 32], + nonce: u8, + wallet_id: WalletId, +) -> Result<[u8; 32], NgWalletError> { + let cipher = Aes256GcmSiv::new(key.into()); + let mut nonce_buffer = [0u8; 12]; + nonce_buffer[0] = nonce; + let nonce = Nonce::from_slice(&nonce_buffer); + + let mut buffer: HeaplessVec = HeaplessVec::from_slice(&ciphertext).unwrap(); // Note: buffer needs 16-bytes overhead for auth tag + + // Decrypt `buffer` in-place, replacing its ciphertext context with the original plaintext + cipher + .decrypt_in_place(nonce, &to_vec(&wallet_id).unwrap(), &mut buffer) + .map_err(|_e| NgWalletError::DecryptionError)?; + Ok(buffer.into_array::<32>().unwrap()) +} + +fn gen_nonce(peer_id: PubKey, nonce: u64) -> [u8; 24] { + let mut buffer = Vec::with_capacity(24); + buffer.extend_from_slice(&peer_id.slice()[0..16]); + buffer.extend_from_slice(&nonce.to_be_bytes()); + buffer.try_into().unwrap() +} + +fn gen_associated_data(timestamp: Timestamp, wallet_id: WalletId) -> Vec { + let ser_wallet = to_vec(&wallet_id).unwrap(); + [ser_wallet, timestamp.to_be_bytes().to_vec()].concat() +} + +pub fn enc_wallet_log( + log: &WalletLog, + master_key: &[u8; 32], + peer_id: PubKey, + nonce: u64, + timestamp: Timestamp, + wallet_id: WalletId, +) -> Result, NgWalletError> { + let ser_log = to_vec(log).map_err(|_e| NgWalletError::InternalError)?; + + let nonce_buffer: [u8; 24] = gen_nonce(peer_id, nonce); + + let cipher = XChaCha20Poly1305::new(master_key.into()); + + let mut buffer: Vec = Vec::with_capacity(ser_log.len() + 16); // Note: buffer needs 16-bytes overhead for auth tag + buffer.extend_from_slice(&ser_log); + + // Encrypt `buffer` in-place, replacing the plaintext contents with ciphertext + cipher + .encrypt_in_place( + &nonce_buffer.into(), + &gen_associated_data(timestamp, wallet_id), + &mut buffer, + ) + .map_err(|_e| NgWalletError::EncryptionError)?; + + // `buffer` now contains the message ciphertext + // log_debug!("encrypted_block ciphertext {:?}", buffer); + + Ok(buffer) +} + +// pub fn dec_session(key: PrivKey, vec: &Vec) -> Result { +// let session_ser = crypto_box::seal_open(&(*key.to_dh().slice()).into(), vec) +// .map_err(|_| NgWalletError::DecryptionError)?; +// let session: SessionWalletStorage = +// serde_bare::from_slice(&session_ser).map_err(|_| NgWalletError::SerializationError)?; +// let SessionWalletStorage::V0(v0) = session; +// Ok(v0) +// } + +// pub fn create_new_session( +// wallet_id: PubKey, +// user: PubKey, +// ) -> Result<(SessionWalletStorageV0, Vec), NgWalletError> { +// let peer = generate_keypair(); +// let mut sws = SessionWalletStorageV0::new(); +// let sps = SessionPeerStorageV0 { +// user, +// peer_key: peer.0, +// last_wallet_nonce: 0, +// }; +// sws.users.insert(user.to_string(), sps); +// let sws_ser = serde_bare::to_vec(&SessionWalletStorage::V0(sws.clone())).unwrap(); +// let mut rng = crypto_box::aead::OsRng {}; +// let cipher = crypto_box::seal(&mut rng, &wallet_id.to_dh_slice().into(), &sws_ser) +// .map_err(|_| NgWalletError::EncryptionError)?; +// Ok((sws, cipher)) +// } + +pub fn dec_encrypted_block( + mut ciphertext: Vec, + master_key: [u8; 32], + peer_id: PubKey, + nonce: u64, + timestamp: Timestamp, + wallet_id: WalletId, +) -> Result { + let nonce_buffer: [u8; 24] = gen_nonce(peer_id, nonce); + + let cipher = XChaCha20Poly1305::new(master_key.as_ref().into()); + + // Decrypt `ciphertext` in-place, replacing its ciphertext context with the original plaintext + cipher + .decrypt_in_place( + &nonce_buffer.into(), + &gen_associated_data(timestamp, wallet_id), + &mut ciphertext, + ) + .map_err(|_e| NgWalletError::DecryptionError)?; + + let decrypted_log = + from_slice::(&ciphertext).map_err(|_e| NgWalletError::DecryptionError)?; + + //master_key.zeroize(); // this is now done in the SensitiveWalletV0 + + // `ciphertext` now contains the decrypted block + //log_debug!("decrypted_block {:?}", ciphertext); + ciphertext.zeroize(); + + match decrypted_log { + WalletLog::V0(v0) => v0.reduce(master_key), + } +} + +// FIXME: An important note on the cost parameters !!! +// here they are set to quite high values because the code gets optimized (unfortunately) so the cost params take that into account. +// on native apps in debug mode (dev mode), the rust code is not optimized and we get a timing above 1 min, which is way too much +// once compiled for release (prod), the timing goes down to 8 sec on native apps because of the Rust optimization. +// on the WASM32 target, the wasm-pack has optimization disabled (wasm-opt = false) but we suspect the optimization happens on the V8 runtime, in the browser or node. +// we get 10 secs on the same machine for web based app. which is acceptable. +// we should have a look at https://blog.trailofbits.com/2022/01/26/part-1-the-life-of-an-optimization-barrier/ +// and https://blog.trailofbits.com/2022/02/01/part-2-rusty-crypto/ +// the memory size could be too high for iOS which seems to have a limit of 120MB in total for the whole app. +// we haven't test it yet. https://community.bitwarden.com/t/recommended-settings-for-argon2/50901/16?page=4 +pub fn derive_key_from_pass(mut pass: Vec, salt: [u8; 16], wallet_id: WalletId) -> [u8; 32] { + let params = ParamsBuilder::new() + .m_cost(40 * 1024) + .t_cost(40) + .p_cost(1) + .data(AssociatedData::new(wallet_id.slice()).unwrap()) + .output_len(32) + .build() + .unwrap(); + let argon = Argon2::new(Algorithm::Argon2id, Version::V0x13, params); + let mut out = [0u8; 32]; + argon.hash_password_into(&pass, &salt, &mut out).unwrap(); + pass.zeroize(); + out +} + +pub fn open_wallet_with_pazzle( + wallet: &Wallet, + mut pazzle: Vec, + mut pin: [u8; 4], +) -> Result { + // each digit shouldnt be greater than 9 + if pin[0] > 9 || pin[1] > 9 || pin[2] > 9 || pin[3] > 9 { + return Err(NgWalletError::InvalidPin); + } + + //log_info!("pazzle={:?}", pazzle); + + #[cfg(debug_assertions)] + let opening_pazzle = Instant::now(); + + verify(&wallet.content_as_bytes(), wallet.sig(), wallet.id()) + .map_err(|_e| NgWalletError::InvalidSignature)?; + + match wallet { + Wallet::V0(v0) => { + pazzle.extend_from_slice(&pin); + let mut pazzle_key = derive_key_from_pass(pazzle, v0.content.salt_pazzle, v0.id); + // pazzle is zeroized in derive_key_from_pass + pin.zeroize(); + + let master_key = dec_master_key( + v0.content.enc_master_key_pazzle, + &pazzle_key, + v0.content.master_nonce, + v0.id, + )?; + pazzle_key.zeroize(); + + #[cfg(debug_assertions)] + log_debug!( + "opening of wallet with pazzle took: {} ms", + opening_pazzle.elapsed().as_millis() + ); + let cipher = v0.content.encrypted.clone(); + Ok(SensitiveWallet::V0(dec_encrypted_block( + cipher, + master_key, + v0.content.peer_id, + v0.content.nonce, + v0.content.timestamp, + v0.id, + )?)) + } + _ => unimplemented!(), + } +} + +pub fn open_wallet_with_mnemonic( + wallet: &Wallet, + mut mnemonic: [u16; 12], + mut pin: [u8; 4], +) -> Result { + verify(&wallet.content_as_bytes(), wallet.sig(), wallet.id()) + .map_err(|_e| NgWalletError::InvalidSignature)?; + + match wallet { + Wallet::V0(v0) => { + let mut mnemonic_key = derive_key_from_pass( + [transmute_to_bytes(&mnemonic), &pin].concat(), + v0.content.salt_mnemonic, + v0.id, + ); + mnemonic.zeroize(); + pin.zeroize(); + + let master_key = dec_master_key( + v0.content.enc_master_key_mnemonic, + &mnemonic_key, + v0.content.master_nonce, + v0.id, + )?; + mnemonic_key.zeroize(); + + Ok(SensitiveWallet::V0(dec_encrypted_block( + v0.content.encrypted.clone(), + master_key, + v0.content.peer_id, + v0.content.nonce, + v0.content.timestamp, + v0.id, + )?)) + } + _ => unimplemented!(), + } +} + +pub fn display_mnemonic(mnemonic: &[u16; 12]) -> Vec { + let res: Vec = mnemonic + .into_iter() + .map(|i| String::from(bip39_wordlist[*i as usize])) + .collect(); + res +} + +pub fn gen_shuffle_for_pazzle_opening(pazzle_length: u8) -> ShuffledPazzle { + let mut rng = rand::thread_rng(); + let mut category_indices: Vec = (0..pazzle_length).collect(); + //log_debug!("{:?}", category_indices); + category_indices.shuffle(&mut rng); + //log_debug!("{:?}", category_indices); + + let mut emoji_indices: Vec> = Vec::with_capacity(pazzle_length.into()); + for _ in 0..pazzle_length { + let mut idx: Vec = (0..15).collect(); + //log_debug!("{:?}", idx); + idx.shuffle(&mut rng); + //log_debug!("{:?}", idx); + emoji_indices.push(idx) + } + ShuffledPazzle { + category_indices, + emoji_indices, + } +} + +pub fn gen_shuffle_for_pin() -> Vec { + let mut rng = rand::thread_rng(); + let mut digits: Vec = (0..10).collect(); + //log_debug!("{:?}", digits); + digits.shuffle(&mut rng); + //log_debug!("{:?}", digits); + digits +} + +/// creates a Wallet from a pin, a security text and image +/// and returns the Wallet, the pazzle and the mnemonic +pub fn create_wallet_first_step_v0( + params: CreateWalletV0, +) -> Result { + // pazzle_length can only be 9, 12, or 15 + if params.pazzle_length != 9 + //&& params.pazzle_length != 12 + //&& params.pazzle_length != 15 + && params.pazzle_length != 0 + { + return Err(NgWalletError::InvalidPazzleLength); + } + + // check validity of PIN + + // shouldn't start with 0 + // if params.pin[0] == 0 { + // return Err(NgWalletError::InvalidPin); + // } + + // each digit shouldnt be greater than 9 + if params.pin[0] > 9 || params.pin[1] > 9 || params.pin[2] > 9 || params.pin[3] > 9 { + return Err(NgWalletError::InvalidPin); + } + + // check for same digit doesnt appear 3 times + if (params.pin[0] == params.pin[1] && params.pin[0] == params.pin[2]) + || (params.pin[0] == params.pin[1] && params.pin[0] == params.pin[3]) + || (params.pin[0] == params.pin[2] && params.pin[0] == params.pin[3]) + || (params.pin[1] == params.pin[2] && params.pin[1] == params.pin[3]) + { + return Err(NgWalletError::InvalidPin); + } + + // check for ascending series + if params.pin[1] == params.pin[0] + 1 + && params.pin[2] == params.pin[1] + 1 + && params.pin[3] == params.pin[2] + 1 + { + return Err(NgWalletError::InvalidPin); + } + + // check for descending series + if params.pin[3] >= 3 + && params.pin[2] == params.pin[3] - 1 + && params.pin[1] == params.pin[2] - 1 + && params.pin[0] == params.pin[1] - 1 + { + return Err(NgWalletError::InvalidPin); + } + + // check validity of security text + let words: Vec<_> = params.security_txt.split_whitespace().collect(); + let new_string = words.join(" "); + let count = new_string.chars().count(); + if count < 10 || count > 100 { + return Err(NgWalletError::InvalidSecurityText); + } + + // check validity of image + let decoded_img = ImageReader::new(Cursor::new(¶ms.security_img)) + .with_guessed_format() + .map_err(|_e| NgWalletError::InvalidSecurityImage)? + .decode() + .map_err(|_e| NgWalletError::InvalidSecurityImage)?; + + if decoded_img.height() < 150 || decoded_img.width() < 150 { + return Err(NgWalletError::InvalidSecurityImage); + } + + let resized_img = if decoded_img.height() == 400 && decoded_img.width() == 400 { + decoded_img + } else { + decoded_img.resize_to_fill(400, 400, FilterType::Triangle) + }; + + let buffer: Vec = Vec::with_capacity(100000); + let mut cursor = Cursor::new(buffer); + resized_img + .write_to(&mut cursor, ImageOutputFormat::Jpeg(72)) + .map_err(|_e| NgWalletError::InvalidSecurityImage)?; + + // creating the wallet keys + + let (wallet_privkey, wallet_id) = generate_keypair(); + + // TODO: should be derived from OwnershipProof + let user_privkey = PrivKey::random_ed(); + + let user = user_privkey.to_pub(); + + let client = ClientV0::new_with_auto_open(user); + + let intermediary = CreateWalletIntermediaryV0 { + wallet_privkey, + wallet_name: wallet_id.to_string(), + client, + user_privkey, + in_memory: !params.local_save, + security_img: cursor.into_inner(), + security_txt: new_string, + pazzle_length: params.pazzle_length, + pin: params.pin, + send_bootstrap: params.send_bootstrap, + send_wallet: params.send_wallet, + result_with_wallet_file: params.result_with_wallet_file, + core_bootstrap: params.core_bootstrap.clone(), + core_registration: params.core_registration, + additional_bootstrap: params.additional_bootstrap.clone(), + pdf: params.pdf, + }; + Ok(intermediary) +} + +pub async fn create_wallet_second_step_v0( + mut params: CreateWalletIntermediaryV0, + verifier: &mut Verifier, +) -> Result< + ( + CreateWalletResultV0, + SiteV0, + HashMap>, + ), + NgWalletError, +> { + #[cfg(debug_assertions)] + let creating_pazzle = Instant::now(); + + let mut site = SiteV0::create_personal(params.user_privkey.clone(), verifier) + .await + .map_err(|e| { + log_err!("create_personal failed with {e}"); + NgWalletError::InternalError + })?; + + let user = params.user_privkey.to_pub(); + + let wallet_id = params.wallet_privkey.to_pub(); + + let mut ran = thread_rng(); + + let mut category_indices: Vec = (0..params.pazzle_length).collect(); + category_indices.shuffle(&mut ran); + + let between = Uniform::try_from(0..15).unwrap(); + let mut pazzle = vec![0u8; params.pazzle_length.into()]; + for (ix, i) in pazzle.iter_mut().enumerate() { + //*i = ran.gen_range(0, 15) + (category_indices[ix] << 4); + *i = between.sample(&mut ran) + (category_indices[ix] << 4); + } + + //log_debug!("pazzle {:?}", pazzle); + let between = Uniform::try_from(0..2048).unwrap(); + let mut mnemonic = [0u16; 12]; + for i in &mut mnemonic { + //*i = ran.gen_range(0, 2048); + *i = between.sample(&mut ran); + } + + //log_debug!("mnemonic {:?}", display_mnemonic(&mnemonic)); + + //slice_as_array!(&mnemonic, [String; 12]) + //.ok_or(NgWalletError::InternalError)? + //.clone(), + + let create_op = WalletOpCreateV0 { + wallet_privkey: params.wallet_privkey.clone(), + // pazzle: pazzle.clone(), + // mnemonic, + // pin: params.pin, + personal_site: site.clone(), + save_recovery_kit: if params.send_wallet { + SaveToNGOne::Wallet + } else if params.send_bootstrap { + SaveToNGOne::Bootstrap + } else { + SaveToNGOne::No + }, + //client: client.clone(), + }; + + //Creating a new peerId for this Client and User. we don't do that anymore + //let peer = generate_keypair(); + + let mut wallet_log = WalletLog::new_v0(create_op); + + // adding some more operations in the log + + // pub core_bootstrap: BootstrapContentV0, + // #[zeroize(skip)] + // pub core_registration: Option<[u8; 32]>, + // #[zeroize(skip)] + // pub additional_bootstrap: Option, + + let mut brokers: HashMap> = HashMap::new(); + + let core_pubkey = params + .core_bootstrap + .get_first_peer_id() + .ok_or(NgWalletError::InvalidBootstrap)?; + wallet_log.add(WalletOperation::AddSiteCoreV0(( + user, + core_pubkey, + params.core_registration, + ))); + + site.cores.push((core_pubkey, params.core_registration)); + + if let Some(additional) = ¶ms.additional_bootstrap { + params.core_bootstrap.merge(additional); + } + let mut locator = Locator::empty(); + for server in ¶ms.core_bootstrap.servers { + locator.add(server.clone()); + + wallet_log.add(WalletOperation::AddBrokerServerV0(server.clone())); + wallet_log.add(WalletOperation::AddSiteBootstrapV0((user, server.peer_id))); + site.bootstraps.push(server.peer_id); + + let broker = BrokerInfoV0::ServerV0(server.clone()); + let key = broker.get_id().to_string(); + let mut list = brokers.get_mut(&key); + if list.is_none() { + let new_list = vec![]; + brokers.insert(key.clone(), new_list); + list = brokers.get_mut(&key); + } + list.unwrap().push(broker); + } + verifier.update_locator(locator); + + let mut master_key = [0u8; 32]; + getrandom::fill(&mut master_key).map_err(|_e| NgWalletError::InternalError)?; + + let mut salt_pazzle = [0u8; 16]; + let mut enc_master_key_pazzle = [0u8; 48]; + if params.pazzle_length > 0 { + getrandom::fill(&mut salt_pazzle).map_err(|_e| NgWalletError::InternalError)?; + + let mut pazzle_key = derive_key_from_pass( + [pazzle.clone(), params.pin.to_vec()].concat(), + salt_pazzle, + wallet_id, + ); + + enc_master_key_pazzle = enc_master_key(&master_key, &pazzle_key, 0, wallet_id)?; + pazzle_key.zeroize(); + } + + let mut salt_mnemonic = [0u8; 16]; + getrandom::fill(&mut salt_mnemonic).map_err(|_e| NgWalletError::InternalError)?; + + //log_debug!("salt_pazzle {:?}", salt_pazzle); + //log_debug!("salt_mnemonic {:?}", salt_mnemonic); + + let mut mnemonic_key = derive_key_from_pass( + [transmute_to_bytes(&mnemonic), ¶ms.pin].concat(), + salt_mnemonic, + wallet_id, + ); + + let enc_master_key_mnemonic = enc_master_key(&master_key, &mnemonic_key, 0, wallet_id)?; + mnemonic_key.zeroize(); + + let timestamp = now_timestamp(); + + let encrypted = enc_wallet_log( + &wallet_log, + &master_key, + // the peer_id used to generate the nonce at creation time is always zero + PubKey::nil(), + 0, + timestamp, + wallet_id, + )?; + master_key.zeroize(); + + let wallet_content = WalletContentV0 { + security_img: params.security_img.clone(), + security_txt: params.security_txt.clone(), + pazzle_length: params.pazzle_length, + salt_pazzle, + salt_mnemonic, + enc_master_key_pazzle, + enc_master_key_mnemonic, + master_nonce: 0, + timestamp, + peer_id: PubKey::nil(), + nonce: 0, + encrypted, + }; + + let ser_wallet = serde_bare::to_vec(&wallet_content).unwrap(); + + let sig = sign(¶ms.wallet_privkey, &wallet_id, &ser_wallet).unwrap(); + + let wallet_v0 = WalletV0 { + // ID + id: wallet_id, + // Content + content: wallet_content, + // Signature over content by wallet's private key + sig, + }; + + // let content = BootstrapContentV0 { servers: vec![] }; + // let ser = serde_bare::to_vec(&content).unwrap(); + // let sig = sign(wallet_key, wallet_id, &ser).unwrap(); + + // let bootstrap = Bootstrap::V0(BootstrapV0 { + // id: wallet_id, + // content, + // sig, + // }); + + #[cfg(debug_assertions)] + log_debug!( + "creating of wallet took: {} ms", + creating_pazzle.elapsed().as_millis() + ); + + let wallet = Wallet::V0(wallet_v0); + let wallet_file = match params.result_with_wallet_file { + false => vec![], + true => to_vec(&NgFile::V0(NgFileV0::Wallet(wallet.clone()))).unwrap(), + }; + Ok(( + CreateWalletResultV0 { + wallet: wallet, + wallet_file, + pazzle, + mnemonic: mnemonic.clone(), + mnemonic_str: display_mnemonic(&mnemonic), + wallet_name: params.wallet_name.clone(), + client: params.client.clone(), + user, + in_memory: params.in_memory, + session_id: 0, + pdf_file: vec![], + }, + site, + brokers, + )) +} + +#[cfg(test)] +mod test { + use crate::emojis::display_pazzle_one; + + use super::*; + use ng_net::types::BootstrapContentV0; + use std::fs::File; + use std::io::BufReader; + use std::io::Read; + use std::io::Write; + use std::time::Instant; + + // #[test] + // fn random_pass() { + // super::random_pass() + // } + + #[test] + fn test_gen_shuffle() { + let _shuffle = gen_shuffle_for_pazzle_opening(9); + log_debug!("{:?}", _shuffle); + let _shuffle = gen_shuffle_for_pazzle_opening(12); + log_debug!("{:?}", _shuffle); + let _shuffle = gen_shuffle_for_pazzle_opening(15); + log_debug!("{:?}", _shuffle); + let _digits = gen_shuffle_for_pin(); + log_debug!("{:?}", _digits); + } + + #[async_std::test] + async fn create_wallet() { + // loading an image file from disk + let f = File::open("tests/valid_security_image.jpg") + .expect("open of tests/valid_security_image.jpg"); + let mut reader = BufReader::new(f); + let mut img_buffer = Vec::new(); + // Read file into vector. + reader + .read_to_end(&mut img_buffer) + .expect("read of valid_security_image.jpg"); + + let pin = [5, 2, 9, 1]; + + let _creation = Instant::now(); + + let res = create_wallet_first_step_v0(CreateWalletV0::new( + img_buffer, + " know yourself ".to_string(), + pin, + 9, + false, + false, + BootstrapContentV0::new_localhost(PubKey::nil()), + None, + None, + false, + "test".to_string(), + )) + .expect("create_wallet_first_step_v0"); + + let mut verifier = Verifier::new_dummy(); + let (res, _, _) = create_wallet_second_step_v0(res, &mut verifier) + .await + .expect("create_wallet_second_step_v0"); + + log_info!( + "creation of wallet took: {} ms", + _creation.elapsed().as_millis() + ); + log_debug!("-----------------------------"); + + let mut file = File::create("tests/wallet.ngw").expect("open wallet write file"); + let ser_wallet = to_vec(&NgFile::V0(NgFileV0::Wallet(res.wallet.clone()))).unwrap(); + let _ = file.write_all(&ser_wallet); + + log_debug!("wallet id: {}", res.wallet.id()); + log_debug!("pazzle {:?}", display_pazzle_one(&res.pazzle)); + log_debug!("mnemonic {:?}", display_mnemonic(&res.mnemonic)); + log_debug!("pin {:?}", pin); + + if let Wallet::V0(v0) = &res.wallet { + log_debug!("security text: {:?}", v0.content.security_txt); + + let mut file = + File::create("tests/generated_security_image.jpg").expect("open write file"); + let _ = file.write_all(&v0.content.security_img); + + let f = File::open("tests/generated_security_image.jpg.compare") + .expect("open of generated_security_image.jpg.compare"); + let mut reader = BufReader::new(f); + let mut generated_security_image_compare = Vec::new(); + // Read file into vector. + reader + .read_to_end(&mut generated_security_image_compare) + .expect("read of generated_security_image.jpg.compare"); + + assert_eq!(v0.content.security_img, generated_security_image_compare); + + let _opening_mnemonic = Instant::now(); + + let _w = open_wallet_with_mnemonic(&Wallet::V0(v0.clone()), res.mnemonic, pin.clone()) + .expect("open with mnemonic"); + //log_debug!("encrypted part {:?}", w); + + log_info!( + "opening of wallet with mnemonic took: {} ms", + _opening_mnemonic.elapsed().as_millis() + ); + + if v0.content.pazzle_length > 0 { + let _opening_pazzle = Instant::now(); + let _w = open_wallet_with_pazzle(&Wallet::V0(v0.clone()), res.pazzle.clone(), pin) + .expect("open with pazzle"); + log_info!( + "opening of wallet with pazzle took: {} ms", + _opening_pazzle.elapsed().as_millis() + ); + } + log_debug!("encrypted part {:?}", _w); + } + } +} diff --git a/ng-wallet/src/permissions.rs b/ng-wallet/src/permissions.rs new file mode 100644 index 0000000..6dd9c00 --- /dev/null +++ b/ng-wallet/src/permissions.rs @@ -0,0 +1,232 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use serde::{Deserialize, Serialize}; + +use std::collections::HashMap; + +use ng_repo::types::UserId; + +/// Access Mode +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AccessMode { + Read, + Write, + Create, + HookCreate, + HookDelete, + Control, + Sign, + Run, + Cron, + Query, + SocialQuery, + Share, + DeviceCapability, +} + +/// Access Scope +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AccessScope { + Once, + OnceSub, + OnceMany, + OnceManySub, + Permanent, + Foreground, + Background, +} + +/// Access Request Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AccessRequestV0 { + /// ID of the Access Request. Should be the tokenized CommitID of the RDF AccessRequest in the App's manifest Document. + pub id: String, + + pub mode: AccessMode, + + /// allowed types for this access mode. Usually a PrimaryClass. can be "any". + /// for Runs: name of the service + /// for Queries: Nuri of the Sparql, Fragment, ShapeTree or GraphQL + /// for Cron: the time interval + /// for Share: Stream, e:mail, e:xxx, Contact, Document + /// for DeviceCapability: camera, microphone, location, receiveSMS, scanQR, internet + pub types: Vec, + + /// allowed scopes for this access mode + pub scopes: Vec, + + /// is this access request optional? + pub optional: bool, + + /// request depends on another request (only if optional) + pub depends_on: Option +} + +impl AccessRequestV0 { + pub fn new_access_all() -> Self { + Self { + id: "".to_string(), + mode: AccessMode::Read, + types: vec!["any".to_string()], + scopes: vec![AccessScope::Permanent], + optional: false, + depends_on: None, + } + } +} + +/// App Component type +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppComponentType { + Viewer, + Editor, + ReadService, + WriteService, + Model, +} + +/// AppComponentV0 Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppComponentV0 { + /// Name of the component, can be an official component of the for n:g:z, or custom ones n:xxx:z:yyy or o:xxx + pub name: String, + + pub component_type: AppComponentType +} + +/// Primary Class Install Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct PrimaryClassInstallV0 { + /// Primary Class name, can be an official name or a custom name of the form app:n... or app:o:... + pub primary_class: String, + + pub components: Vec +} + +/// App Manifest Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AppManifestV0 { + /// Nuri + pub nuri: Option, + + /// Origin (for webapps only) + pub origin: Option, + + /// cannot create Documents? + pub singleton: bool, + + /// list of Access Requests + pub access_requests: Vec, + + /// installs: list of Viewers, Editors, Services and Models, by PrimaryClass, that will be installed by this app + pub installs: HashMap, + + /// dependencies: list of other apps (Nuri) that needs to be installed before this app can be installed + pub dependencies: Vec, + + /// optional name. Only for registered or official apps + pub name: Option, + + /// optional title. Broker will enter the domain's homepage title here, if any + pub title: Option, + + /// optional description. Broker will enter the domain's homepage description here, if any + pub description: Option, + + /// optional icon. Broker will enter the domain's homepage favicon here, if any + #[serde(with = "serde_bytes")] + pub icon: Vec, + + /// optional image. Broker will enter the domain's homepage main image here, if any + #[serde(with = "serde_bytes")] + pub image: Vec, +} + +/// Web App Manifest +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AppManifest { + V0(AppManifestV0), +} + +impl AppManifest { + pub fn new_for_origin_all_access_v0(origin: String) -> Self { + AppManifest::V0( + AppManifestV0 { + nuri: None, + origin: Some(origin), + singleton: true, + access_requests: vec![AccessRequestV0::new_access_all()], + installs: HashMap::new(), + dependencies: vec![], + name: None, + title: None, + description: None, + icon: vec![], + image: vec![] + } + ) + } + pub fn new_v0(origin: String, singleton: bool, access_requests: Vec) -> Self { + AppManifest::V0( + AppManifestV0 { + nuri: None, + origin: Some(origin), + singleton, + access_requests, + installs: HashMap::new(), + dependencies: vec![], + name: None, + title: None, + description: None, + icon: vec![], + image: vec![] + } + ) + } + pub fn to_url_param(&self) -> String { + let ser = serde_bare::to_vec(self).unwrap(); + base64_url::encode(&ser) + } +} + +/// Access Grant Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AccessGrantV0 { + /// Nuri of tokenized commitID of this grant + pub id: String, + + /// reference to the AccessRequest. can be None for PermaCaps + pub request: Option, + + pub mode: AccessMode, + + /// Usually a PrimaryClass. + /// for Runs: name of the service + /// for Queries: Nuri of the Sparql, Fragment, ShapeTree or GraphQL + /// for Cron: the time interval + /// for Share: Stream, e:mail, e:xxx, Contact, Document + /// for DeviceCapability: camera, microphone, location, receiveSMS, scanQR, internet + pub access_type: String, + + pub scope: AccessScope, + + /// Nuri of target. Can be None for services + pub target: Option, + + /// UserId of grantee (a user or a robot) + pub grantee: UserId, + + /// grant depends on another grant + pub depends_on: Option +} + + + + diff --git a/ng-wallet/src/types.rs b/ng-wallet/src/types.rs new file mode 100644 index 0000000..52dfd6f --- /dev/null +++ b/ng-wallet/src/types.rs @@ -0,0 +1,1481 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +use serde::{Deserialize, Serialize}; +use serde_big_array::BigArray; +use std::collections::hash_map::{DefaultHasher, Keys}; +use std::hash::{Hash, Hasher}; +use std::{collections::HashMap, fmt}; +use web_time::SystemTime; +use zeroize::{Zeroize, ZeroizeOnDrop}; + +use ng_repo::errors::NgError; +#[allow(unused_imports)] +use ng_repo::log::*; +use ng_repo::types::*; +use ng_repo::utils::{encrypt_in_place, generate_keypair}; + +use ng_net::types::*; + +use ng_verifier::site::SiteV0; + +/// WalletId is a PubKey +pub type WalletId = PubKey; + +/// BootstrapId is a WalletId +pub type BootstrapId = WalletId; + +/// Bootstrap Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BootstrapV0 { + /// ID + pub id: BootstrapId, + + /// Content + pub content: BootstrapContentV0, + + /// Signature over content by wallet's private key + pub sig: Sig, +} + +/// Bootstrap info +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum Bootstrap { + V0(BootstrapV0), +} + +impl Bootstrap { + pub fn id(&self) -> BootstrapId { + match self { + Bootstrap::V0(v0) => v0.id, + } + } + pub fn content_as_bytes(&self) -> Vec { + match self { + Bootstrap::V0(v0) => serde_bare::to_vec(&v0.content).unwrap(), + } + } + pub fn sig(&self) -> Sig { + match self { + Bootstrap::V0(v0) => v0.sig, + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct SessionWalletStorageV0 { + // string is base64_url encoding of userId(pubkey) + pub users: HashMap, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum SessionWalletStorage { + V0(SessionWalletStorageV0), +} + +impl SessionWalletStorageV0 { + pub fn new() -> Self { + SessionWalletStorageV0 { + users: HashMap::new(), + } + } + pub fn dec_session( + wallet_key: PrivKey, + vec: &Vec, + ) -> Result { + let session_ser = crypto_box::seal_open(&(*wallet_key.to_dh().slice()).into(), vec) + .map_err(|_| NgWalletError::DecryptionError)?; + let session: SessionWalletStorage = + serde_bare::from_slice(&session_ser).map_err(|_| NgWalletError::SerializationError)?; + let SessionWalletStorage::V0(v0) = session; + Ok(v0) + } + + pub fn create_new_session( + wallet_id: &PubKey, + user: PubKey, + ) -> Result<(SessionPeerStorageV0, Vec), NgWalletError> { + let mut sws = SessionWalletStorageV0::new(); + let sps = SessionPeerStorageV0::new(user); + sws.users.insert(sps.user.to_string(), sps.clone()); + let cipher = sws.enc_session(wallet_id)?; + Ok((sps, cipher)) + } + + pub fn enc_session(&self, wallet_id: &PubKey) -> Result, NgWalletError> { + let sws_ser = serde_bare::to_vec(&SessionWalletStorage::V0(self.clone())).unwrap(); + let mut rng = crypto_box::aead::OsRng {}; + let cipher = crypto_box::seal(&mut rng, &wallet_id.to_dh_slice().into(), &sws_ser) + .map_err(|_| NgWalletError::EncryptionError)?; + Ok(cipher) + } + + // pub fn get_first_user_peer_nonce(&self) -> Result<(PubKey, u64), NgWalletError> { + // if self.users.len() > 1 { + // panic!("get_first_user_peer_nonce does not work as soon as there are more than one user in SessionWalletStorageV0") + // }; + // let first = self.users.values().next(); + // if first.is_none() { + // return Err(NgWalletError::InternalError); + // } + // let sps = first.unwrap(); + // Ok((sps.peer_key.to_pub(), sps.last_wallet_nonce)) + // } +} + +#[derive(Serialize, Deserialize)] +pub struct SessionInfoString { + pub session_id: u64, + pub user: String, + pub private_store_id: String, + pub protected_store_id: String, + pub public_store_id: String, +} + +impl From for SessionInfoString { + fn from(f: SessionInfo) -> Self { + SessionInfoString { + session_id: f.session_id, + private_store_id: f.private_store_id, + protected_store_id: f.protected_store_id, + public_store_id: f.public_store_id, + user: f.user.to_string(), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct SessionInfo { + pub session_id: u64, + pub user: UserId, + pub private_store_id: String, + pub protected_store_id: String, + pub public_store_id: String, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct SessionPeerStorageV0 { + pub user: UserId, + pub peer_key: PrivKey, + /// The current nonce used for encrypting this wallet by the user on this device. + /// It should be incremented BEFORE encrypting the wallet again + /// when some new operations have been added to the log of the Wallet. + /// The nonce is by PeerId. It is saved together with the PeerId in the SessionPeerStorage. + /// If the session is not saved (in-memory) it is lost, but it is fine, as the PeerId is also lost, and a new one + /// will be generated for the next session. + pub last_wallet_nonce: u64, +} + +impl SessionPeerStorageV0 { + pub fn new(user: UserId) -> Self { + let peer = generate_keypair(); + SessionPeerStorageV0 { + user, + peer_key: peer.0, + last_wallet_nonce: 0, + } + } +} + +#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop, Serialize, Deserialize)] +pub struct LocalClientStorageV0 { + pub priv_key: PrivKey, + pub storage_master_key: SymKey, +} + +impl LocalClientStorageV0 { + fn crypt(text: &mut Vec, client: ClientId, wallet_privkey: PrivKey) { + let client_ser = serde_bare::to_vec(&client).unwrap(); + let wallet_privkey_ser = serde_bare::to_vec(&wallet_privkey).unwrap(); + let mut key_material = [client_ser, wallet_privkey_ser].concat(); + + let mut key: [u8; 32] = blake3::derive_key( + "NextGraph LocalClientStorageV0 BLAKE3 key", + key_material.as_slice(), + ); + + encrypt_in_place(text, key, [0; 12]); + key.zeroize(); + key_material.zeroize(); + } + + pub fn decrypt( + ciphertext: &mut Vec, + client: ClientId, + wallet_privkey: PrivKey, + ) -> Result { + Self::crypt(ciphertext, client, wallet_privkey); + + let res = + serde_bare::from_slice(&ciphertext).map_err(|_| NgWalletError::DecryptionError)?; + + ciphertext.zeroize(); + + Ok(res) + } + + pub fn encrypt( + &self, + client: ClientId, + wallet_privkey: PrivKey, + ) -> Result, NgWalletError> { + let mut ser = serde_bare::to_vec(self).map_err(|_| NgWalletError::EncryptionError)?; + + Self::crypt(&mut ser, client, wallet_privkey); + + Ok(ser) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct LocalWalletStorageV0 { + pub in_memory: bool, + pub bootstrap: BootstrapContent, + pub wallet: Wallet, + pub client_id: ClientId, + pub client_auto_open: Vec, + pub client_name: Option, + #[serde(with = "serde_bytes")] + pub encrypted_client_storage: Vec, +} + +impl From<&CreateWalletIntermediaryV0> for LocalWalletStorageV0 { + fn from(res: &CreateWalletIntermediaryV0) -> Self { + LocalWalletStorageV0 { + bootstrap: BootstrapContent::V0(BootstrapContentV0::new_empty()), + wallet: Wallet::TemporarilyEmpty, + in_memory: res.in_memory, + client_id: res.client.id, + client_auto_open: res.client.auto_open.clone(), + client_name: res.client.name.clone(), + encrypted_client_storage: res + .client + .sensitive_client_storage + .encrypt(res.client.id, res.wallet_privkey.clone()) + .unwrap(), + } + } +} + +impl From<&CreateWalletIntermediaryV0> for SensitiveWalletV0 { + fn from(res: &CreateWalletIntermediaryV0) -> Self { + SensitiveWalletV0 { + wallet_privkey: res.wallet_privkey.clone(), + wallet_id: res.wallet_name.clone(), + save_recovery_kit: if res.send_wallet { + SaveToNGOne::Wallet + } else if res.send_bootstrap { + SaveToNGOne::Bootstrap + } else { + SaveToNGOne::No + }, + // for now, personal_site is null. will be replaced later + personal_site: PubKey::nil(), + personal_site_id: "".to_string(), + sites: HashMap::new(), + brokers: HashMap::new(), + overlay_core_overrides: HashMap::new(), + third_parties: HashMap::new(), + log: None, + master_key: None, + client: None, + } + } +} + +impl From<&CreateWalletIntermediaryV0> for SensitiveWallet { + fn from(res: &CreateWalletIntermediaryV0) -> SensitiveWallet { + SensitiveWallet::V0(res.into()) + } +} + +impl LocalWalletStorageV0 { + #[doc(hidden)] + pub fn new( + encrypted_wallet: Wallet, + wallet_priv_key: PrivKey, + client: &ClientV0, + in_memory: bool, + ) -> Result { + Ok(LocalWalletStorageV0 { + bootstrap: BootstrapContent::V0(BootstrapContentV0::new_empty()), + wallet: encrypted_wallet, + in_memory, + client_id: client.id, + client_auto_open: client.auto_open.clone(), + client_name: client.name.clone(), + encrypted_client_storage: client + .sensitive_client_storage + .encrypt(client.id, wallet_priv_key)?, + }) + } + #[doc(hidden)] + pub fn to_client_v0(&self, wallet_privkey: PrivKey) -> Result { + Ok(ClientV0 { + id: self.client_id, + auto_open: self.client_auto_open.clone(), + name: self.client_name.clone(), + sensitive_client_storage: self.local_client_storage_v0(wallet_privkey)?, + }) + } + + /// decrypts the client_storage field, given the wallet PrivKey + pub fn local_client_storage_v0( + &self, + wallet_privkey: PrivKey, + ) -> Result { + let mut cipher = self.encrypted_client_storage.clone(); + LocalClientStorageV0::decrypt(&mut cipher, self.client_id, wallet_privkey) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum LocalWalletStorage { + V0(HashMap), +} + +impl LocalWalletStorage { + pub fn v0_from_vec(vec: &Vec) -> Result { + let wallets: LocalWalletStorage = serde_bare::from_slice(vec)?; + Ok(wallets) + } + pub fn v0_to_vec(wallets: &HashMap) -> Vec { + serde_bare::to_vec(&LocalWalletStorage::V0(wallets.clone())).unwrap() + } +} + +/// Device info Version 0 +#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop, Serialize, Deserialize)] +pub struct ClientV0 { + #[zeroize(skip)] + /// ClientID + pub id: PubKey, + + /// list of users that should be opened automatically (at launch, after wallet opened) on this device + #[zeroize(skip)] + pub auto_open: Vec, + + /// user supplied Device name. can be useful to distinguish between several devices (phone, tablet, laptop, office desktop, etc...) + #[zeroize(skip)] + pub name: Option, + + /// contains the decrypted information needed when user is opening their wallet on this client. + pub sensitive_client_storage: LocalClientStorageV0, +} + +impl ClientV0 { + pub fn id(&self) -> String { + self.id.to_string() + } + + #[deprecated(note = "**Don't use nil method**")] + #[allow(deprecated)] + pub fn nil() -> Self { + ClientV0 { + id: PubKey::nil(), + sensitive_client_storage: LocalClientStorageV0 { + priv_key: PrivKey::nil(), + storage_master_key: SymKey::nil(), + }, + auto_open: vec![], + name: None, + } + } + + #[cfg(test)] + #[allow(deprecated)] + pub fn dummy() -> Self { + Self::nil() + } + + pub fn new_with_auto_open(user: PubKey) -> Self { + let (priv_key, id) = generate_keypair(); + ClientV0 { + id, + sensitive_client_storage: LocalClientStorageV0 { + priv_key, + storage_master_key: SymKey::random(), + }, + auto_open: vec![user], + name: None, + } + } + + pub fn new() -> Self { + let (priv_key, id) = generate_keypair(); + ClientV0 { + id, + sensitive_client_storage: LocalClientStorageV0 { + priv_key, + storage_master_key: SymKey::random(), + }, + auto_open: vec![], + name: None, + } + } +} + +/// Save to nextgraph.one +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum SaveToNGOne { + No, + Bootstrap, + Wallet, +} + +/// SensitiveWallet block Version 0 +#[derive(Clone, Zeroize, ZeroizeOnDrop, Debug, Serialize, Deserialize)] +pub struct SensitiveWalletV0 { + pub wallet_privkey: PrivKey, + + #[zeroize(skip)] + pub wallet_id: String, + + //#[serde(with = "serde_bytes")] + //pub pazzle: Vec, + + //pub mnemonic: [u16; 12], + + //pub pin: [u8; 4], + #[zeroize(skip)] + pub save_recovery_kit: SaveToNGOne, + + #[zeroize(skip)] + pub personal_site: PubKey, + + #[zeroize(skip)] + pub personal_site_id: String, + + #[zeroize(skip)] + pub sites: HashMap, + + // map of brokers and their connection details + #[zeroize(skip)] + pub brokers: HashMap>, + + // map of all devices of the user + //#[zeroize(skip)] + //pub clients: HashMap, + #[zeroize(skip)] + pub overlay_core_overrides: HashMap>, + + /// third parties data saved in the wallet. the string (key) in the hashmap should be unique among vendors. + /// the format of the byte array (value) is up to the vendor, to serde as needed. + #[zeroize(skip)] + pub third_parties: HashMap, + + #[zeroize(skip)] + pub log: Option, + + pub master_key: Option<[u8; 32]>, + + pub client: Option, +} + +/// SensitiveWallet block +#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop, Serialize, Deserialize)] +pub enum SensitiveWallet { + V0(SensitiveWalletV0), +} + +impl SensitiveWallet { + pub fn get_bootstrap_iframe_msgs(brokers: HashMap>) -> Vec { + brokers.values().flatten().filter_map(|broker_info| match broker_info { + BrokerInfoV0::CoreV0(_) => None, + BrokerInfoV0::ServerV0(s) => Some(s.to_iframe_msg()) + }).collect::>() + } + pub fn privkey(&self) -> PrivKey { + match self { + Self::V0(v0) => v0.wallet_privkey.clone(), + } + } + pub fn id(&self) -> String { + match self { + Self::V0(v0) => v0.wallet_id.clone(), + } + } + // TODO: this is unfortunate. id should return the PubKey, name should return the String + pub fn name(&self) -> String { + self.id() + } + pub fn client(&self) -> &Option { + match self { + Self::V0(v0) => &v0.client, + } + } + pub fn site_names(&self) -> Keys { + match self { + Self::V0(v0) => v0.sites.keys(), + } + } + pub fn site(&self, user_id: &UserId) -> Result<&SiteV0, NgError> { + match self { + Self::V0(v0) => match v0.sites.get(&user_id.to_string()) { + Some(site) => Ok(site), + None => Err(NgError::UserNotFound), + }, + } + } + pub fn broker(&self, id: DirectPeerId) -> Result, NgError> { + match self { + Self::V0(v0) => match v0.brokers.get(&id.to_string()) { + Some(broker_info) => Ok(broker_info.to_vec()), + None => Err(NgError::BrokerNotFound), + }, + } + } + pub fn set_client(&mut self, client: ClientV0) { + match self { + Self::V0(v0) => v0.client = Some(client), + } + } + pub fn individual_site( + &self, + user_id: &UserId, + ) -> Option<( + PrivKey, + Option, + Option, + Option, + Option, + )> { + match self { + Self::V0(v0) => match v0.sites.get(&user_id.to_string()) { + Some(site) => match &site.site_type { + SiteType::Individual((user, readcap)) => Some(( + user.clone(), + Some(readcap.clone()), + Some(site.private.id), + Some(site.protected.id), + Some(site.public.id), + )), + _ => None, + }, + None => None, + }, + } + } + pub fn has_user(&self, user_id: &UserId) -> bool { + match self { + Self::V0(v0) => v0.sites.get(&user_id.to_string()).is_some(), + } + } + pub fn personal_identity(&self) -> UserId { + match self { + Self::V0(v0) => v0.personal_site, + } + } + pub fn import_v0( + &mut self, + encrypted_wallet: Wallet, + in_memory: bool, + ) -> Result { + match self { + Self::V0(v0) => v0.import(encrypted_wallet, in_memory), + } + } + + pub fn complete_with_site_and_brokers( + &mut self, + site: SiteV0, + brokers: HashMap>, + ) { + match self { + Self::V0(v0) => v0.complete_with_site_and_brokers(site, brokers), + } + } +} + +impl SensitiveWalletV0 { + pub fn import( + &mut self, + encrypted_wallet: Wallet, + in_memory: bool, + ) -> Result { + // Creating a new client + // TODO, create client with auto_open taken from wallet log ? + let client = ClientV0::new_with_auto_open(self.personal_site); + + let lws = LocalWalletStorageV0::new( + encrypted_wallet, + self.wallet_privkey.clone(), + &client, + in_memory, + )?; + + self.client = Some(client); + + Ok(lws) + } + pub fn add_site(&mut self, site: SiteV0) { + let site_id = site.id; + let _ = self.sites.insert(site_id.to_string(), site); + } + pub fn add_brokers(&mut self, brokers: Vec) { + for broker in brokers { + let key = broker.get_id().to_string(); + let mut list = self.brokers.get_mut(&key); + if list.is_none() { + let new_list = vec![]; + self.brokers.insert(key.clone(), new_list); + list = self.brokers.get_mut(&key); + } + list.unwrap().push(broker); + } + } + // pub fn add_client(&mut self, client: ClientV0) { + // let client_id = client.priv_key.to_pub().to_string(); + // let _ = self.clients.insert(client_id, client); + // } + pub fn add_overlay_core_overrides(&mut self, overlay: &OverlayId, cores: &Vec) { + let _ = self + .overlay_core_overrides + .insert(overlay.to_string(), cores.to_vec()); + } + + pub fn complete_with_site_and_brokers( + &mut self, + site: SiteV0, + brokers: HashMap>, + ) { + let personal_site = site.id; + let personal_site_id = personal_site.to_string(); + self.personal_site = personal_site; + self.personal_site_id = personal_site_id.clone(); + self.sites.insert(personal_site_id, site); + self.brokers = brokers; + } +} + +/// Wallet content Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct WalletContentV0 { + #[serde(with = "serde_bytes")] + pub security_img: Vec, + + pub security_txt: String, + + /// can be 9, 12 or 15 (or 0, in this case salt_pazzle and enc_master_key_pazzle are filled with zeros and should not be used) + pub pazzle_length: u8, + + pub salt_pazzle: [u8; 16], + + pub salt_mnemonic: [u8; 16], + + // encrypted master keys. first is encrypted with pazzle, second is encrypted with mnemonic + // AD = wallet_id + #[serde(with = "BigArray")] + pub enc_master_key_pazzle: [u8; 48], + #[serde(with = "BigArray")] + pub enc_master_key_mnemonic: [u8; 48], + + // nonce for the encryption of masterkey + // incremented only if the masterkey changes + // be very careful with incrementing this, as a conflict would result in total loss of crypto guarantees. + pub master_nonce: u8, + + pub timestamp: Timestamp, + + // the peerId that updated this version of the Wallet. this value is truncated by half and concatenated with the nonce + pub peer_id: PubKey, + pub nonce: u64, + + // WalletLog content encrypted with XChaCha20Poly1305, AD = timestamp and walletID + #[serde(with = "serde_bytes")] + pub encrypted: Vec, +} + +/// Wallet Log V0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct WalletLogV0 { + pub log: Vec<(u128, WalletOperation)>, +} + +/// Wallet Log +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum WalletLog { + V0(WalletLogV0), +} + +impl WalletLog { + pub fn new_v0(create_op: WalletOpCreateV0) -> Self { + WalletLog::V0(WalletLogV0::new(create_op)) + } + pub fn add(&mut self, op: WalletOperation) { + match self { + Self::V0(v0) => v0.add(op), + } + } +} + +impl WalletLogV0 { + pub fn new(create_op: WalletOpCreateV0) -> Self { + let mut wallet = WalletLogV0 { log: vec![] }; + wallet.add(WalletOperation::CreateWalletV0(create_op)); + wallet + } + + pub fn add(&mut self, op: WalletOperation) { + let duration = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_nanos(); + self.log.push((duration, op)); + } + + /// applies all the operation and produces an encrypted wallet object. + pub fn reduce(self, master_key: [u8; 32]) -> Result { + if self.log.len() < 1 { + Err(NgWalletError::NoCreateWalletPresent) + } else if let (_, WalletOperation::CreateWalletV0(create_op)) = &self.log[0] { + let mut wallet: SensitiveWalletV0 = create_op.into(); + wallet.master_key = Some(master_key); + + for op in &self.log { + match &op.1 { + WalletOperation::CreateWalletV0(_) => { /* intentionally left blank. this op is already reduced */ + } + WalletOperation::AddSiteV0(o) => { + if self.is_first_and_not_deleted_afterwards(op, "RemoveSiteV0") { + wallet.add_site(o.clone()); + } + } + WalletOperation::RemoveSiteV0(_) => {} + WalletOperation::AddBrokerServerV0(o) => { + if self.is_last_and_not_deleted_afterwards(op, "RemoveBrokerServerV0") { + wallet.add_brokers(vec![BrokerInfoV0::ServerV0(o.clone())]); + } + } + WalletOperation::RemoveBrokerServerV0(_) => {} + WalletOperation::SetSaveToNGOneV0(o) => { + if self.is_last_occurrence(op.0, &op.1) != 0 { + wallet.save_recovery_kit = o.clone(); + } + } + WalletOperation::SetBrokerCoreV0(o) => { + if self.is_last_occurrence(op.0, &op.1) != 0 { + wallet.add_brokers(vec![BrokerInfoV0::CoreV0(o.clone())]); + } + } + // WalletOperation::SetClientV0(o) => { + // if self.is_last_occurrence(op.0, &op.1) != 0 { + // wallet.add_client(o.clone()); + // } + // } + WalletOperation::AddOverlayCoreOverrideV0((overlay, cores)) => { + if self + .is_last_and_not_deleted_afterwards(op, "RemoveOverlayCoreOverrideV0") + { + wallet.add_overlay_core_overrides(overlay, cores); + } + } + WalletOperation::RemoveOverlayCoreOverrideV0(_) => {} + WalletOperation::AddSiteCoreV0((site, core, registration)) => { + if self.is_first_and_not_deleted_afterwards(op, "RemoveSiteCoreV0") { + let _ = wallet.sites.get_mut(&site.to_string()).and_then(|site| { + site.cores.push((*core, *registration)); + None:: + }); + } + } + WalletOperation::RemoveSiteCoreV0(_) => {} + WalletOperation::AddSiteBootstrapV0((site, server)) => { + if self.is_first_and_not_deleted_afterwards(op, "RemoveSiteBootstrapV0") { + let _ = wallet.sites.get_mut(&site.to_string()).and_then(|site| { + site.bootstraps.push(*server); + None:: + }); + } + } + WalletOperation::RemoveSiteBootstrapV0(_) => {} + WalletOperation::AddThirdPartyDataV0((key, value)) => { + if self.is_last_and_not_deleted_afterwards(op, "RemoveThirdPartyDataV0") { + let _ = wallet.third_parties.insert(key.to_string(), value.clone()); + } + } + WalletOperation::RemoveThirdPartyDataV0(_) => {} // WalletOperation::SetSiteRBDRefV0((site, store_type, rbdr)) => { + // if self.is_last_occurrence(op.0, &op.1) != 0 { + // let _ = wallet.sites.get_mut(&site.to_string()).and_then(|site| { + // match store_type { + // SiteStoreType::Public => site.public.read_cap = rbdr.clone(), + // SiteStoreType::Protected => { + // site.protected.read_cap = rbdr.clone() + // } + // SiteStoreType::Private => site.private.read_cap = rbdr.clone(), + // }; + // None:: + // }); + // } + // } + // WalletOperation::SetSiteRepoSecretV0((site, store_type, secret)) => { + // if self.is_last_occurrence(op.0, &op.1) != 0 { + // let _ = wallet.sites.get_mut(&site.to_string()).and_then(|site| { + // match store_type { + // SiteStoreType::Public => site.public.write_cap = secret.clone(), + // SiteStoreType::Protected => { + // site.protected.write_cap = secret.clone() + // } + // SiteStoreType::Private => { + // site.private.write_cap = secret.clone() + // } + // }; + // None:: + // }); + // } + // } + } + } + //log_debug!("reduced {:?}", wallet); + wallet.log = Some(self); + Ok(wallet) + } else { + Err(NgWalletError::NoCreateWalletPresent) + } + } + + pub fn is_first_and_not_deleted_afterwards( + &self, + item: &(u128, WalletOperation), + delete_type: &str, + ) -> bool { + let hash = self.is_first_occurrence(item.0, &item.1); + if hash != 0 { + // check that it hasn't been deleted since the first occurrence + let deleted = self.find_first_occurrence_of_type_and_hash_after_timestamp( + delete_type, + hash, + item.0, + ); + return deleted.is_none(); + } + false + } + + pub fn is_last_and_not_deleted_afterwards( + &self, + item: &(u128, WalletOperation), + delete_type: &str, + ) -> bool { + let hash = self.is_last_occurrence(item.0, &item.1); + if hash != 0 { + // check that it hasn't been deleted since the last occurrence + let deleted = self.find_first_occurrence_of_type_and_hash_after_timestamp( + delete_type, + hash, + item.0, + ); + return deleted.is_none(); + } + false + } + + pub fn is_first_occurrence(&self, timestamp: u128, searched_op: &WalletOperation) -> u64 { + let searched_hash = searched_op.hash(); + //let mut timestamp = u128::MAX; + //let mut found = searched_op; + for op in &self.log { + let hash = op.1.hash(); + if hash.0 == searched_hash.0 && op.0 < timestamp && hash.1 == searched_hash.1 { + //timestamp = op.0; + //found = &op.1; + return 0; + } + } + searched_hash.0 + } + + pub fn is_last_occurrence(&self, timestamp: u128, searched_op: &WalletOperation) -> u64 { + let searched_hash = searched_op.hash(); + //let mut timestamp = 0u128; + //let mut found = searched_op; + for op in &self.log { + let hash = op.1.hash(); + if hash.0 == searched_hash.0 && op.0 > timestamp && hash.1 == searched_hash.1 { + //timestamp = op.0; + //found = &op.1; + return 0; + } + } + searched_hash.0 + } + + pub fn find_first_occurrence_of_type_and_hash_after_timestamp( + &self, + searched_type: &str, + searched_hash: u64, + after: u128, + ) -> Option<(u128, &WalletOperation)> { + let mut timestamp = u128::MAX; + let mut found = None; + for op in &self.log { + let hash = op.1.hash(); + if hash.0 == searched_hash + && op.0 > after + && op.0 < timestamp + && hash.1 == searched_type + { + timestamp = op.0; + found = Some(&op.1); + } + } + found.map(|f| (timestamp, f)) + } +} + +/// WalletOperation +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum WalletOperation { + CreateWalletV0(WalletOpCreateV0), + AddSiteV0(SiteV0), + RemoveSiteV0(PrivKey), + AddBrokerServerV0(BrokerServerV0), + RemoveBrokerServerV0(BrokerServerV0), + SetSaveToNGOneV0(SaveToNGOne), + SetBrokerCoreV0(BrokerCoreV0), + //SetClientV0(ClientV0), + AddOverlayCoreOverrideV0((OverlayId, Vec)), + RemoveOverlayCoreOverrideV0(OverlayId), + AddSiteCoreV0((PubKey, PubKey, Option<[u8; 32]>)), + RemoveSiteCoreV0((PubKey, PubKey)), + AddSiteBootstrapV0((PubKey, PubKey)), + RemoveSiteBootstrapV0((PubKey, PubKey)), + AddThirdPartyDataV0((String, serde_bytes::ByteBuf)), + RemoveThirdPartyDataV0(String), + //SetSiteRBDRefV0((PubKey, SiteStoreType, ObjectRef)), + //SetSiteRepoSecretV0((PubKey, SiteStoreType, RepoWriteCapSecret)), +} + +impl WalletOperation { + pub fn hash(&self) -> (u64, &str) { + let mut s = DefaultHasher::new(); + match self { + Self::CreateWalletV0(_t) => (0, "CreateWalletV0"), + Self::AddSiteV0(t) => { + t.id.hash(&mut s); + (s.finish(), "AddSiteV0") + } + Self::RemoveSiteV0(t) => { + t.hash(&mut s); + (s.finish(), "RemoveSiteV0") + } + Self::AddBrokerServerV0(t) => { + t.hash(&mut s); + (s.finish(), "AddBrokerServerV0") + } + Self::RemoveBrokerServerV0(t) => { + t.hash(&mut s); + (s.finish(), "RemoveBrokerServerV0") + } + Self::SetSaveToNGOneV0(_t) => (0, "SetSaveToNGOneV0"), + Self::SetBrokerCoreV0(t) => { + t.peer_id.hash(&mut s); + (s.finish(), "SetBrokerCoreV0") + } + // Self::SetClientV0(t) => { + // t.priv_key.hash(&mut s); + // (s.finish(), "SetClientV0") + // } + Self::AddOverlayCoreOverrideV0(t) => { + t.0.hash(&mut s); + (s.finish(), "AddOverlayCoreOverrideV0") + } + Self::RemoveOverlayCoreOverrideV0(t) => { + t.hash(&mut s); + (s.finish(), "RemoveOverlayCoreOverrideV0") + } + Self::AddSiteCoreV0(t) => { + t.0.hash(&mut s); + t.1.hash(&mut s); + (s.finish(), "AddSiteCoreV0") + } + Self::RemoveSiteCoreV0(t) => { + t.0.hash(&mut s); + t.1.hash(&mut s); + (s.finish(), "RemoveSiteCoreV0") + } + Self::AddSiteBootstrapV0(t) => { + t.0.hash(&mut s); + t.1.hash(&mut s); + (s.finish(), "AddSiteBootstrapV0") + } + Self::RemoveSiteBootstrapV0(t) => { + t.0.hash(&mut s); + t.1.hash(&mut s); + (s.finish(), "RemoveSiteBootstrapV0") + } + Self::AddThirdPartyDataV0(t) => { + t.0.hash(&mut s); + (s.finish(), "AddThirdPartyDataV0") + } + Self::RemoveThirdPartyDataV0(t) => { + t.hash(&mut s); + (s.finish(), "RemoveThirdPartyDataV0") + } // Self::SetSiteRBDRefV0(t) => { + // t.0.hash(&mut s); + // t.1.hash(&mut s); + // (s.finish(), "SetSiteRBDRefV0") + // } + // Self::SetSiteRepoSecretV0(t) => { + // t.0.hash(&mut s); + // t.1.hash(&mut s); + // (s.finish(), "SetSiteRepoSecretV0") + // } + } + } +} + +/// WalletOp Create V0 +/// first operation in the log +/// also serialized and encoded in Rescue QRcode +#[derive(Clone, Zeroize, ZeroizeOnDrop, Debug, Serialize, Deserialize)] +pub struct WalletOpCreateV0 { + pub wallet_privkey: PrivKey, + + // #[serde(skip)] + // pub pazzle: Vec, + + // #[serde(skip)] + // pub mnemonic: [u16; 12], + + // #[serde(skip)] + // pub pin: [u8; 4], + #[zeroize(skip)] + pub save_recovery_kit: SaveToNGOne, + + #[zeroize(skip)] + pub personal_site: SiteV0, + // list of brokers and their connection details + //#[zeroize(skip)] + //pub brokers: Vec, + //#[serde(skip)] + //pub client: ClientV0, +} + +impl From<&WalletOpCreateV0> for SensitiveWalletV0 { + fn from(op: &WalletOpCreateV0) -> Self { + let personal_site = op.personal_site.id; + let mut wallet = SensitiveWalletV0 { + wallet_privkey: op.wallet_privkey.clone(), + wallet_id: op.wallet_privkey.to_pub().to_string(), + //pazzle: op.pazzle.clone(), + //mnemonic: op.mnemonic.clone(), + //pin: op.pin.clone(), + save_recovery_kit: op.save_recovery_kit.clone(), + personal_site, + personal_site_id: personal_site.to_string(), + sites: HashMap::new(), + brokers: HashMap::new(), + //clients: HashMap::new(), + overlay_core_overrides: HashMap::new(), + third_parties: HashMap::new(), + log: None, + master_key: None, + client: None, //Some(op.client.clone()), + }; + wallet.add_site(op.personal_site.clone()); + //wallet.add_brokers(op.brokers.clone()); + //wallet.add_client(op.client.clone()); + wallet + } +} + +/// Reduced Wallet content Version 0, for Login QRcode +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ReducedWalletContentV0 { + /// can be 9, 12 or 15 (or 0, in this case salt_pazzle and enc_master_key_pazzle are filled with zeros and should not be used) + pub pazzle_length: u8, + + pub salt_pazzle: [u8; 16], + + pub salt_mnemonic: [u8; 16], + + // encrypted master keys. first is encrypted with pazzle, second is encrypted with mnemonic + // AD = wallet_id + #[serde(with = "BigArray")] + pub enc_master_key_pazzle: [u8; 48], + #[serde(with = "BigArray")] + pub enc_master_key_mnemonic: [u8; 48], + + // nonce for the encryption of masterkey + // incremented only if the masterkey changes + // be very careful with incrementing this, as a conflict would result in total loss of crypto guarantees. + pub master_nonce: u8, + + pub timestamp: Timestamp, + + // the peerId that updated this version of the Wallet. this value is truncated by half and concatenated with the nonce + pub peer_id: PubKey, + pub nonce: u64, + + // ReducedSensitiveWalletV0 content encrypted with XChaCha20Poly1305, AD = timestamp and walletID + #[serde(with = "serde_bytes")] + pub encrypted: Vec, +} + +/// Broker Info Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum BrokerInfoV0 { + ServerV0(BrokerServerV0), + CoreV0(BrokerCoreV0), +} + +impl BrokerInfoV0 { + pub fn get_id(&self) -> PubKey { + match self { + Self::CoreV0(c) => c.peer_id, + Self::ServerV0(s) => s.peer_id, + } + } + pub fn into_locator(&self) -> Locator { + match self { + Self::CoreV0(_) => panic!("BrokerCoreV0 cannot be made a Locator"), + Self::ServerV0(s) => s.clone().into(), + } + } + pub fn vec_into_locator(list: Vec) -> Locator { + let mut loc = Locator::empty(); + list.into_iter().for_each(|info| match info { + Self::CoreV0(_) => {} + Self::ServerV0(bs) => loc.add(bs), + }); + loc + } +} + +/// ReducedSensitiveWallet block Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ReducedSensitiveWalletV0 { + pub save_recovery_kit: SaveToNGOne, + + // main Site (Personal) + pub personal_site: ReducedSiteV0, + + // list of brokers and their connection details + pub brokers: Vec, + + pub client: ClientV0, +} + +/// ReducedSensitiveWallet block +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum ReducedSensitiveWallet { + V0(ReducedSensitiveWalletV0), +} + +/// Wallet Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct WalletV0 { + /// ID + pub id: WalletId, + + /// Content + pub content: WalletContentV0, + + /// Signature over content by wallet's private key + pub sig: Sig, +} + +/// Wallet info +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum Wallet { + V0(WalletV0), + TemporarilyEmpty, +} + +/// Add Wallet Version 0 +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AddWalletV0 { + /// wallet. optional (for those who chose not to upload their wallet recovery kit to their broker) + pub wallet: Option, + + /// bootstrap + pub bootstrap: Bootstrap, +} + +/// Add Wallet +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum AddWallet { + V0(AddWalletV0), +} + +impl AddWallet { + pub fn id(&self) -> BootstrapId { + match self { + AddWallet::V0(v0) => v0.bootstrap.id(), + } + } + pub fn bootstrap(&self) -> &Bootstrap { + match self { + AddWallet::V0(v0) => &v0.bootstrap, + } + } + pub fn wallet(&self) -> Option<&Wallet> { + match self { + AddWallet::V0(v0) => v0.wallet.as_ref(), + } + } +} + +/// Create Wallet Version 0, used by the API create_wallet_v0 as a list of arguments +#[derive(Clone, Zeroize, ZeroizeOnDrop, Debug, Serialize, Deserialize)] +pub struct CreateWalletV0 { + /// a vector containing the binary content of an image file that will be used at every login, displayed (on devices that can) + /// to the user so they can check the wallet is theirs and that entering their pazzle and PIN is safe and there is no phishing attack. + /// an attacker would redirect the user to a clone of the wallet opener app, and would try to steal what the user enters + /// but this attacker would not possess the security_img of the user as it is only present locally in the wallet file. + /// the image should be bigger than 150x150px. There is no need to provide more than 400x400px as it will be scaled down anyway. + /// We accept several formats like JPEG, PNG, GIF, WEBP and more. + /// The image should be unique to the user. But it should not be too personal neither. Do not upload face picture, this is not a profile pic. + /// The best would be any picture that the user recognizes as unique. + /// Please be aware that other users who are sharing the same device, will be able to see this image. + #[zeroize(skip)] + #[serde(with = "serde_bytes")] + pub security_img: Vec, + /// A string of characters of minimum length 10. + /// This phrase will be presented to the user every time they are about to enter their pazzle and PIN in order to unlock their wallet. + /// It should be something the user will remember, but not something too personal. + /// Do not enter full name, nor address, nor phone number. + /// Instead, the user can enter a quote, a small phrase that they like, or something meaningless to others, but unique to them. + /// Please be aware that other users who are sharing the same device, will be able to see this phrase. + pub security_txt: String, + /// chose a PIN code. + /// We recommend the user to choose a PIN code they already know very well (unlock phone, credit card). + /// The PIN and the rest of the Wallet will never be sent to NextGraph or any other third party (check the source code if you don't believe us). + /// It cannot be a series like 1234 or 8765. The same digit cannot repeat more than once. By example 4484 is invalid. + /// Try to avoid birth date, last digits of phone number, or zip code for privacy concern + pub pin: [u8; 4], + /// For now, only 9 is supported. 12 and 15 are planned. + /// A value of 0 will deactivate the pazzle mechanism on this Wallet, and only the mnemonic could be used to open it. + pub pazzle_length: u8, + #[zeroize(skip)] + /// Not implemented yet. Will send the bootstrap to our cloud servers, if needed + pub send_bootstrap: bool, + #[zeroize(skip)] + /// Not implemented yet. Will send an encrypted Wallet file to our cloud servers, if needed. (and no, it does not contain the user's pazzle nor PIN) + pub send_wallet: bool, + #[zeroize(skip)] + /// Do you want a binary file containing the whole Wallet ? + pub result_with_wallet_file: bool, + #[zeroize(skip)] + /// Should the wallet be saved locally on disk, by the LocalBroker. It will not work on a in-memory LocalBroker, obviously. + pub local_save: bool, + #[zeroize(skip)] + /// What Broker Server to contact when there is internet and we want to sync. + pub core_bootstrap: BootstrapContentV0, + #[zeroize(skip)] + /// What is the registration code at that Broker Server. Only useful the first time you connect to the Server. + /// Can be None the rest of the time, or if your server does not need an Invitation. + pub core_registration: Option<[u8; 32]>, + #[zeroize(skip)] + /// Bootstrap of another server that you might use in order to connect to NextGraph network. It can be another interface on the same `core` server. + pub additional_bootstrap: Option, + + #[zeroize(skip)] + /// Should generate a recovery PDF containing all the information of the wallet in plain text. + pub pdf: bool, + + #[zeroize(skip)] + /// short name of the device + pub device_name: String, +} + +impl CreateWalletV0 { + pub fn new( + security_img: Vec, + security_txt: String, + pin: [u8; 4], + pazzle_length: u8, + send_bootstrap: bool, + send_wallet: bool, + core_bootstrap: BootstrapContentV0, + core_registration: Option<[u8; 32]>, + additional_bootstrap: Option, + pdf: bool, + device_name: String, + ) -> Self { + CreateWalletV0 { + result_with_wallet_file: false, + local_save: true, + security_img, + security_txt, + pin, + pazzle_length, + send_bootstrap, + send_wallet, + core_bootstrap, + core_registration, + additional_bootstrap, + pdf, + device_name, + } + } +} + +// #[derive(Clone, Zeroize, ZeroizeOnDrop, Debug, Serialize, Deserialize)] +// pub struct WalletCreationSiteEventsV0 { +// store_id: RepoId, +// store_read_cap: ReadCap, +// topic_id: TopicId, +// topic_priv_key: BranchWriteCapSecret, +// events: Vec<(Commit, Vec)>, +// } + +// #[derive(Clone, Zeroize, ZeroizeOnDrop, Debug, Serialize, Deserialize)] +// pub struct WalletCreationEventsV0 {} + +#[derive(Clone, Zeroize, ZeroizeOnDrop, Debug, Serialize, Deserialize)] +pub struct CreateWalletResultV0 { + #[zeroize(skip)] + /// The encrypted form of the Wallet object that was created. + /// basically the same as what the file contains. + pub wallet: Wallet, + // #[serde(skip)] + // /// The private key of the Wallet. Used for signing the wallet and other internal purposes. + // /// it is contained in the opened wallet. No need to save it anywhere. + // pub wallet_privkey: PrivKey, + #[serde(with = "serde_bytes")] + #[zeroize(skip)] + /// The binary file that can be saved to disk and given to the user + pub wallet_file: Vec, + /// randomly generated pazzle + pub pazzle: Vec, + /// randomly generated mnemonic. It is an alternate way to open the wallet. + /// A BIP39 list of 12 words. We argue that the Pazzle is easier to remember than this. + pub mnemonic: [u16; 12], + /// The words of the mnemonic, in a human readable form. + pub mnemonic_str: Vec, + #[zeroize(skip)] + /// a string identifying uniquely the wallet + pub wallet_name: String, + /// newly created Client that uniquely identifies the device where the wallet has been created. + pub client: ClientV0, + #[zeroize(skip)] + /// UserId of the "personal identity" of the user + pub user: PubKey, + #[zeroize(skip)] + /// is this an in_memory wallet that should not be saved to disk by the LocalBroker? + pub in_memory: bool, + + pub session_id: u64, + + #[serde(with = "serde_bytes")] + /// The PDF file that can be printed by the user + pub pdf_file: Vec, +} + +impl CreateWalletResultV0 { + pub fn personal_identity(&self) -> UserId { + self.user + } +} + +#[derive(Clone, Zeroize, ZeroizeOnDrop, Debug)] +pub struct CreateWalletIntermediaryV0 { + /// The private key of the Wallet. Used for signing the wallet and other internal purposes. + /// it is contained in the opened wallet. No need to save it anywhere. + pub wallet_privkey: PrivKey, + #[zeroize(skip)] + /// a string identifying uniquely the wallet + pub wallet_name: String, + /// newly created Client that uniquely identifies the device where the wallet has been created. + pub client: ClientV0, + + /// User priv key of the "personal identity" of the user + pub user_privkey: PrivKey, + #[zeroize(skip)] + /// is this an in_memory wallet that should not be saved to disk by the LocalBroker? + pub in_memory: bool, + + #[zeroize(skip)] + pub security_img: Vec, + + pub security_txt: String, + + pub pazzle_length: u8, + + pub pin: [u8; 4], + + #[zeroize(skip)] + pub send_bootstrap: bool, + #[zeroize(skip)] + pub send_wallet: bool, + #[zeroize(skip)] + pub result_with_wallet_file: bool, + #[zeroize(skip)] + pub core_bootstrap: BootstrapContentV0, + pub core_registration: Option<[u8; 32]>, + #[zeroize(skip)] + pub additional_bootstrap: Option, + #[zeroize(skip)] + pub pdf: bool, +} + +#[derive(Debug, Eq, PartialEq, Clone)] +pub enum NgWalletError { + InvalidPin, + InvalidPazzle, + InvalidPazzleLength, + InvalidMnemonic, + InvalidSecurityImage, + InvalidSecurityText, + SubmissionError, + InternalError, + EncryptionError, + DecryptionError, + InvalidSignature, + NoCreateWalletPresent, + InvalidBootstrap, + SerializationError, +} + +impl From for NgError { + fn from(wallet_error: NgWalletError) -> NgError { + match wallet_error { + NgWalletError::SerializationError => NgError::SerializationError, + NgWalletError::InvalidSignature => NgError::InvalidSignature, + NgWalletError::EncryptionError | NgWalletError::DecryptionError => { + NgError::EncryptionError + } + _ => NgError::WalletError(wallet_error.to_string()), + } + } +} + +impl fmt::Display for NgWalletError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?}", self) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum NgFileV0 { + Wallet(Wallet), + Other, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum NgFile { + V0(NgFileV0), +} + +impl TryFrom> for NgFile { + type Error = NgError; + fn try_from(file: Vec) -> Result { + let ngf: Self = serde_bare::from_slice(&file).map_err(|_| NgError::InvalidFileFormat)?; + Ok(ngf) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ShuffledPazzle { + pub category_indices: Vec, + pub emoji_indices: Vec>, +} + diff --git a/ng-wallet/tests/generated_security_image.jpg.compare b/ng-wallet/tests/generated_security_image.jpg.compare new file mode 100644 index 0000000000000000000000000000000000000000..63adde791ce7051da8ba4ad75d0efb5992bdd649 GIT binary patch literal 29484 zcmbTcby!>76Ezy#-HWwIaf-VY3GVJK?hssxLy~n%O(ad76J(17OMkrGWq_XaE2T@&|bO0l-3piiV1Yfx-enV?n`S zK|L)2!~jU}2#5&qNQelC$jC@2X!satsHkYfxOkZOuSqE=UXzlOQ_*rTQoUuRAtz@N zWMbvy=Hug|WDph?<`LuI<>P)f0)>o>jE0K#3IpR6H#Ipm_y6a|Q#XJd5+@wg%LDN5 z0R;^M3kL}g2^j@4pdJ$d4Fv-O4GRMY2MY@s?G1S!0E-2O{f6~DJdW~51PTx?n}19; zBBf|`2cF8rZz^^p#{eW`d;&tESJZE5XzA!VIJvlac=^P{C4iDr(lV-Q>Kd9_+B(K2 zre@|AmR3&AF0O9w9-e_g!Jk7y!@^_Z;u8{+l2cN1a`W;F3X6(MYHI818ycIMTROYC zdwTo&2L>mnre|j7<`)*%H#WDncXs#o4^Gd{FD|dH|J>X@3@pMkE+}X>$Uh7gEZiGbcT>6m0&8xS}!H)g4Hb>?*(Uj2tJB@u@i0 zsZXDwy&(I42Q1+K7qb5W`#-oA0jMxg5E;Q>0fYhP$k!OJX`w9rQ~_V1{g46lBD7Fe zLAU@VC=nEq06%=lB^g*I7G!`9Ku7fXj25#F02vEc@cLyu?enyk>wo9#7$JXnnGFYc zdC@D?P7D5f2q%T2_P zktYW{V|vE(5)DA>wj)~0Ju;pwcP)iHuA&wM|lqVIXonhV8|Twyf+YMa9`gm z{J{O8BZ~Zi=`}Kd&hI%Ux%dAKeU1ZF>Gf9#(s8J8==Hxy|3(0@4V_`;4EN&oA1D8r zhwwsx8w6niQRkVKf54KT1Ng@g3WPQ|rGGI&kN=NeNGhLa{CEM03@Fq}`|@8fNY80a zhNp!BiJ-i21AzhI^*PvQzW)#TtdKDfY|#EM{{ChJ_c`|vQo}|65(pvn89n?Alu8xf zGvyE?5N=sTP{IKaRUzs?xOk4+GW1_k@LvemgIIaN3jHiBIxXlgFLoiwnCTGpl%Sp` z|8@P6sFG)&sLz-4wKAUb#7yU>0?x9~2#RRecmjVbI zN-)(@@Bg#xhYT72!VheZ5(MaU&_B)LLIG>}L?NPpBoh)FEuaLD_Xa{0gteD*2$bh! zC_ShBzjQ&+AgDb1{U_Pi&jNZ$lhWUKUL^B!7%oEi6`3<;5<(=T=#-$IWiJJt{GWM9 z`2Zm15tT*Yp#N6Yi^iVw`me|!;~{QvpELVXgVB(a?ibD>W+48bD+LNz#TQTn(EtPj zgh+_&|HbzFE@Uk1i=>{J`L7ILD(xj~2+F_meXbY)uAfde}b7yBThEzg_~#*fUEI>JcF+1Qfin1gQTr4q$@D{>Rxk z9CG*mbLf^3mi`y~vo`;);GfBOp#vhc|C8sJJU}u}7pfzIC-N_Nz84w)Ge>k8HVrZ7 z_ae+^apC(xy%a2@p46Tt^%DLEzUZLmOhT&uUt%Ga|JOO5?LitUK-o+{7qnzmEi}h#Gi$I9@@si)?KAMgE!v6ceWIQYPna;n8dpXc^grhpC zK+_VNO%4X^X6Mss>lO8$bbeZi`Ti?)Y>86wBc!2 z^0@(}G|qKzo5w3Zri+%;vPOBZniTC8NNqjPzlqAHcHB;y`BMGvp3>7_4+@L2f~@ck z7;E|jFq^f`M%ha$(cBhm?qnu$l;=!QZd4X%XpFA0(24jc9Yi{eClcc*eQ&jytZdTO zQgdXZI zqc8H_RaN~-nUfcN()P9+Wg^`h$JpNfu9B*bQWX_VxiE>Ih3o|JV2HGFp+uWUzrN7- zPs72_`73&6nJ*fWgY0LyKwkfe1>gcv$zR<-N*%#d@nfH}m8FKV{%Yf*EF*HW87xL2 z=k825pV~~LAicY(M(NJ{!Eqhw8%z#s+zOy%3YXIZ!&oL)UCaEh&&WYOBZH@ViKR~f zX4hR0N-w=w$#DQ2cVx-M4qQMksfTo$#FX;|ShE4g_OQj_M;7Rh+8E~U^dv4WUW^g{ z5W#zjK4{WbWx_(3@8hSpvx4&WU$>hR3hdvyPcp9idVZfxVvBme_#nf!PXJ=-&h)xu z6lIHry0;Rn`PR|jPcgxqE0vI!guH47Ty;7;2PrN|Rz0LK>uRrXucVB6K!wLW0kTzG zj9O3!_~<@%W!33o6Y?umWG5AVLY%r$uUX8b9&C)AxVi|2;rSSRQP zVm+-ZKk=#?Vyc?W_JnoF8I|g7ynBK#c(m7(7?wI8R@>7B-MuhZADH})+f9bBpkhSXH(g9h^Fu>dioxuF$ z3+Vg-YFD0~fdxdYAv|O^^xR3XX%lV3kqa5=@8WM_AXn{=^sd3L&3~qzffi!(PsE&N zJ528%@2~jg%ggZzHyeVX-*ix+o-U`Juk+R|r|b`6$y0%KRlO|Uhs#>z8~NoH4ZX7= zSG2Nsl%7&+4DaL=XTI|g;q&2)VlGHuBpNCEDZSJE+MnWX!A=YH{Li`WnX1f#T=TA% zGsp2(OH4vKaJ{C<1m@PuOOsRjOrjxjqdB8OKUEbzX-B-_5HSElhwt?l@?RB zo@|>uF0pj`x9Qi?l*2eakFM_uH}zQ}bTq}erpzLHh97kl^znR72?risLK>skW%yq? z@-xZZdE0puBu@86EbM+9g*wc7tar{wqB=5l%?Q`&3zw+x0ygyASLg~Cum~PXO?}xj zr;o(3@S)+Z#m65Eif~+W5XSJ#A42zc6Uwm`qL}Z1HE_a6v!Eor-6)yEJ9?jIHG93C z*VMo@3b@fl2+zlx`I6Ee^JOCmY<)aCd;|^f(L>#B+%>3x>tvnD)DU8iZ=e3q;2N=8 z>3Kmb!+a&MEN)8dM5KoM-*yGE&B%es>bc*~M7^KUhbfP5MMMDoAZ35B&N_4!PTs%G z_uVGs_)-r$T#6kSp#8lwtw%-w{fC40nUIR~;y}S^5ZE&?-G}GCgp5rP%Np)o-I26) zfu!TI!BT|+bRt~Ty830)*=na29j|WkO5N|PB%+b&IK{vTEm}c_)B^ha_94oGOypFb zhlQjKLPmVTf~l{pz#|dZ^eE%hIRp)-mbB(XzFf)?*s1j~uHoDb*)uJbkL?UVoy`PufKDYjKX>@TipdhN7L7`SPaW0^Q4w?AztG zfcRQEj-YR8r=zMxL&c+g@}O}f!O5${^~`Z(zsFkpn?_7Jeo^lXrS=D3x|*N}3fav6@F zf)m@*9qRFO)Md_|jHe4-LKiD{oh)7Z1{`n3a2E~+&KfKZ9Ca(gc33~`U@pKE3T(86 zb~={x6izS5%-ZaJdPnJNwJNm!8tZEp>g~C~&)RDh-kW-x?o$T2mc*}}@3Cuo)jf!P zkteZa#poIx=YpVzKtu_%qODCtufcpzfXs_s;!|*11V8d|{^v-kO82c&{i(R| zY&w25d?xU2x#zr$av{0%DX89*t2x4R`n?DM_5${S-VPr#fG+~_nZlpqJwxX?J!_=9 z87Z7ty6U#Q$~4u+R${0|V~TC!dhSOQR&}3l@O1pf_K^4cPQA9-ac~_w0S6*z@4F0C zHZ9Dg0L%vQ&~EG?h?uFpl&zX17V+?~=hLXEf1%j*#KD!|JwBVu6TnUAi<4D{Hj`1Z zF3>pbS46~SX7labtIDyml)V;L9ZC&h3`>=Yy{{oVM7>(RI-7|g!hzn~98%myF@nxl zcGBQgyj!~`z^jn->9}kCo}=Olzg40t)w_O5vMl~5KqO5%&%NRqlS<$MuH8LWC?@7a_SD8Wb3QQM9>z)ShQDTo@8`VJ*7kPA)@9zAz5;c&=f%q)xDCW~kofONW{X zKc!_WOBnCgMP_B-p_yx1_LiVbYN=DlyUbCX}eiGE-~(LsI!HOvnAmS(XePyC_+0-9b9%NHwitki2ecP{)d{u36DIL*@+ z9X^d%5@csh4o-cZ=hZ|Re4O5Kn&!D7LnWFae6Dn>76c8~WNNI^Jd1fXy+HPKU;5Qd zw)kAvBU4QT4eT0Cwgnkh4MI~M%JDfBH%;DGmJ7kk6l(r%a_vpk(F54_s(wGfY@V82 zZzJe?$AB*Dr3!Iv>JzK`6aB6EuCJ@s0lk}t;LbM0(g#}Dj1HhjiF5nmvW%w|P%T6TlX-VB zUz%aR$K=BFf>-KO@@qiHIsc~$){Dy#dEhSpvQnK!pTnxw6pU0dax3@*&e3RNTe`>2 zRz_N$%V?$k`@H^O@d`Pr;#mcuYSyffx!l1!(=$F>GTPBVBKktiTQALg4WUC`N+#u) z;=&W~v=R~keZ_jHi(gIC8-1a2CDoHe%9%4dDvjyd(}i=d99{RoSVyGSjE+hfpJyQ! z-M8G9=Ids&&cNCdYik7WIR+6->S$_Q__ixa>W-Vl(xKp@cG3ux4%(d|EL3J-)Nbl| zd}=vWYhtPy&F8E9l}%e96eia&ouJ1?yIJ03EsI7g+vTkJw4HQgWFY;zE-L8ePQA~z zKl2V>fF;7NLrdKO$uttwM;5>BWzk6%c=<>7*gp>WoA%vd+}vz_LqcBPoH%zE*U|6g z_!~UEh@L)i2SaV_l3GAEryeHGR-IT`il zk)g5ev8;57mU|7+%bdeWu*U&RYpyg7dgP7WmuL1k zwt<3yr=VnL{hRI}lW3ZH@@qpKn4yT+jP zN8&}qjCj*PlH&UVp+>n4y0}{Tb~e?9(IA&o@^qUn zop?o%S;tnGhJ9+wVlT;rL9ZOv{vuZY`@)u~%tySICh09n73FD#oWk33@my}FO0>_p z@XP97Tn*F~KZa4cfMU^?*Df|^n{FPCH9|BQ_(*wWbb8CwqH`JBp8znEZ88VQW$B(N zqu0~%vJ~cske=@De7Ua;RABDhmeXF2ZiC50mm++iE&dgR7mm&N&I=`rlj@C+`bV%Z z@~>R|-r(2?f}@qR<}%YY$Dheh0OXykn1cvjt@$lf4|G9Z-0AFv$le}7?U-1BM8oum z2=jbrrPS7KV7$>Kt36zvDs6kCwzOf)DodYzyPHbIYPBopc0^rlE1Qcg`cHLMp$t>= zSK@@c&PZ*iek{Bdx7rcKpH$4LZAj@jE#5Vm2)$~g!)}Nq+X0xyk+$AyI1V4`FMgfH zK(RFff03A2>{3kSFWNBDf`f@BIIDQ|r_z#YV<F&?rG-2q|donD@lQ*vr9su>O^*WV$(I>ei(M8)TCu=x3Ue#N>xZn?0pPaV# zFX~kMrDih5h`^3}2IQ1_$bOK~UiMuu@uAauMs>=RvWkz1zHJ3A_Q}_4&J~jCgO+M7 zQ86=SNiFs4wgoe$+Qx0QWNP7EjuOmHOC_0?dpt^!>C}I6;rJ@0DU*^^+oxOzq|2y{ z{GwK|zhW2w6%}O`)V@Scq9JFZNNIK-PW{A& zOK%d&&io#*(ahVNH*#!|n`YL?6|H#Xfr9;ty$+7#is-7?@_eDX@MN{m9$S(*tHHuC zr0DQggD~+SZQC!Xqbna`^0(Haq-F6RLlbfSE(#Q%`bNPr!hU*V_R8IiDiw{5PSreKg zq-<=r792j!-i%I0u%^;zfWAloSrd!&`3C2|_`f9$%8UI&bvHe^6OSWTydZ0Id6ik3 zr^qMe5qjS|3aE-D-GkqZ~0l69oq+(W$xsy zcE~3eW!p4_2bLzQf%mMIJ7Uh{_Sj^~u7NkIU*gNnIAYs}$uX1Hz9n)6v23Cu(D)nd zAjdiE>OyN<69+zE}-1wBE64pXE z!W?}r+MHlgO)G9SgnZeUoY0oZnq*{tzh$^{o}GA8Gj7}oSXVo(+I~j_SV2Q}$^^V! z$$HV_H_M7EV^$%&loE*5v$eI-=L3}V4>JZqs_ID2L)DTvpSt^TNK8^c9e;^=gQ_$@ zDUC8XxoI(k^<#H#HB0!TqZ_AlfkoXZc^ZXX^cL6wX}1z@NTWQzxQtHU!mbC?J^t}G zGtbskL`8*oLI&8)u#v5gzP~^|vPGB!)f=864o^i1WU&~b+Csk{gNMI4#C6!XBSB(L z^8}dFoAP|bR)$f6Difm6+yLV*OOcx1DugSnhT7Y z%;!H5OJldI*a=i(DP5Po(KK`98RJ}eh3V{3#&b8Osd$E}NZ~Y+< zta4e0F;%qGJzwgtd_ML%%fT(h6l*FjCQ-%eJ}V>rY6n74CjEAZPBnc*by?Zz{zPdv z02zrtM8Mk`k<)R}mDRp%l~icx6E?gO?x#(Q-E@G*eF>@BfuI}H`=Siy^kV9kK1aOl zvEilz-s|E@>i#1+qZ5hp_fV6e0X@le<~u0S-@)#cLTuZmzt~MGG9m?uH1C*jv)21& z+v&?4YY7V-X5NHSy?5cCTXxP2J?84-EU@Z!Zmv zjIq*)GMvuv;jM@^fcGX!&`VQ+8C`y<<~}rANNeF5AF9~bLXkw-L!{q#&PC{fo=zu& zdN|~rr(N$u-x7i389A(g4a%A^uORYEj)+lvg53!_O(Ib;T`!wAnscY#VxH58R$Ql&!c?uUY;(hw38t{# z1bvvG(k5}vXgz7S=AK>DFXW7D55lqo1vYSKQ%?bN80p!~6TqBN$W9ETj$9?HhK_A7 z_D-v&S+>zb*j>@)zO!AA)MAiJ>Kx>}m@sOs6*3UZ+OCx_(Bq}0?If}1p4@*{U%c+L?55H=o(P*j zxck`DV*FM?`(9;JQRf(N#QW_^?$0pl^mLo^WSn%e%Ps?~FB;(npD&hbXLt3j{6x}G zH`*4cM@My{R!z~Q(+zsVhjZh?ZX{`_bE&Cv#(qwer}Id4ml@07%&%aIXj(a^-BWZZk7GohlZ*=RHh_}31{ z;rDZBr51Nd`I>ud*6aCN-@ZnBAJrL4#E!|%J?v7>q7*_n^J8?&lz<===h-lLwIPA?k?!Wi+m03({hpFLD!}@v_kMZ6BcWny9OZBj0B6nB+wokupu060NFJ(F*C(Kx6 zZi$@A*a0EmSuOnrTd_Dkw!SYX=YS1JLFM69#gpBVUTg)7G}w@YHMERls5>UQ8j-tc zRcN(`KXleHDOl#l3=OJl>#SbwW#Rwyc6ev7zml z1x{HL1>AO7LATCuUaIqRE~d>IILo+y8ziB|yZpTyGgETfNuRS=x+lMxHTK<@uRV20 z465QQtO$Fn^XJiIHV%%L{u4L0nwXqTxd1P%h z=+T|}1Tlz8x8t)oBDN+c2nx=PXLOl+X(SdtuQVdFu|IlFrkYNgBwcemhEjkjw-l6XpLFn_;*TCkRR}mpuOVgTJb)Sl3OBdNiAPkzLy;c%A zkoQa`0@4qkgC^o;NX9}pv=*07G|WQ1=O7!Q$@U16G{xyt)`7R+(q37ntdQ-@`Qzv+ zRPU%KK*hIr?}F%!Co}tIHuGserh#{QcdQW|MiQm2osRWE((DV<^=}Ww{VGGvl=sfWhGfCd$D)2!YeeLypAN zBknmo3Weq|6eWz!NZ`E6w{eqP^sidud)gZ_$B+k|z={sO59)#DGCHX(GPDlgc`3;~ zo2${!LO1iY9jCfyZObx<2a}EwU28#&1s0ju&ilZ+hM3O9^sTrV{QB3zb;g^I+D`yg zz2&xgg%ymVNM!*DoqDRFkmEiEc4| zyZ0PdK7fP=+xY1JBpr;lDc-f*yyA;~0?4HWDBNC}=MjxaX4g}OF>?Ef(DQ|#?@Sde z1gV#3^);R2n;{~s5{(tmjr$%w0X!DaRJhj^mnCEFDQ{HO2a1C8z0 zc3Gq*hNyB?7RqAMQCYts!C(mx*4espIsn(xUcYHh?3D7EsUr-C)#5$=Gr65{q{(6H z#Js8AU?EGz9-8dBMQ(6eXDQFWJgQDH12$K!*FEgdqc9QJTXLOq%XLeY;t{;!6~3urQtiE0^3(RA zfhM7Idn^noz)5pWuFwhaR!zeIJc5lec{x||TS>-V?VfL^(Zw`GJY6bPTspPqFg4N8 ziDP;buJ{stMp^Er?kASqAE_PTfZ(-2>?5iw?_I&BKA(%kB;x6*&-_V^(BxE#{@4CX zS%rqPb&;99Y%Nrc0UKAiccMC2J8f>rc{No;Bt8jxNz-7rYFA=Q3L()F?+j(p`{O1| zZ(n*whV;I$NvTTPMBTP3bfuX2n7Mte5gTU}ulO=*ArPA!m!B_=mSCB)3wPabfqHwE z2F0(N;X9RAb_{l)Taf)9?ela86PPCeDc=6ZD|cks;kA7H(itB~m*T*OVnPIjp~w2o z*~rqR{*$in%imQ8Kk4WQlebp9KbyCThU{&X5}EzI4ao zRyxmf`VoJWVPo<>6$JAq?@`uJH>Op_m$GXV2NW`Ve#riq0*@hNPN@7+G5hP?s#H;(mOy`ot)8+4O32$|(r3&AueG3UY=C?|aBEbt)&7Og<&>q98LF$; z#CX&@k{HbYih(y$jm|;$uDowkwy2Nka#j=tER3q|*L#XP!4p{^$p{*p8rQ7WI2g5H zGJK#`S@tu$cGrL*ONbn_mhb9{9Vil<*4=toj11uJ8&{LjYo++w+-UZDiUy3qOSU1R zd4hzFd%cUmnN^oC!mA$a$C($rfmkvTNRz64I;McIuR7-4Im@VHXvNK~BKE2?l&{$m z6YGF)7x(8ohv`PbnYZnh_+Ey!CAOHYQc%LHm$|?)0*fz7v%MKMVnfzUL#DYxHj!1H z4hV;mj8eN^Xdwno82BqX^HN+li3exU9$O}0z9}7K6|S|pAnqu<7?ncCEqO|brb{E& z-A22e)Y$eo_J$-ziH&Zr0TI?@<$z=D0vfjp*G)X>K%hpki_Nhdn2)N>|}WwW*vPN`%X;1`{!*ymQ%M_dxQxkmjGF>Y@_}G z>5|mWTbf!4y5#HSk6nQ|$@XFyw+rCTy_nw7Q)2PDfm1 z*(3JP7FK(ee>9Dy4PP+mCV)E^@yVSt(=U}+Z|24(&Z)y< z+)O^ithQ67Q{~&HeEBqCG5otHXi=dUZ}?1HnbOodD6aojlPX_XHkH-5WO{wjW_g*h zMwTLzXn=@tjI%D6hEyY4R`$*0!TTc}0G{C%pcdU@_eL}2s34b#2vm?D!ElYE+}_yK zZWgZG=zqzB_fz}4qb@i^ulTJqO3@;9j@icRP8QchQqW{W&_;1D_iP_*(OAZQ{-~+h zc_lSn6N4K9d9XUNT{^b8T>fwwmX0p`*%nx}=>urq8_xQe9;p|0-`E5XgwlXvWU(9F zto*vR-r2~=#l{#!b2E7rOOt>@1?r-e6002ffjUn^RN@q;fgR+AfAEH*(}@Vfe=yyw z%9-wajV1nMjVUi>bcL}>O~f&yrC2wr#(rzZw*dGXklTgdQAy zm^1^sklRSH=Ufl|908fy8HzT>&^bEd?HryQoR&4Ia-KO_#N97U-M)k3^~W*WcfhP@VTyXb*D zSDZ8Wsnb0L;jxQdPOlkHs8zGl2yrd^3%Ye18r$IIib%}=Xt@?Fid%(@en=1-;CV+d z+ID$aQs))BWXx1K%{*ftZxV5;Qb-)7GTyBD3GP?4WlqEj^RcQ@bMV}@C2`@+MNvZ; z#nEW~$;U+OIB49A+_}T>vG~PpYc=o59$Mx~vSsol9)!7jD?22jMZFw`D7y4enG>bQ z^le`lQIqcGQiVy8R|LpI0R%i_H$AF^5vQvnv0Hg7j_B-Dfs$PQ#zHrQHT_gX0$Z*e z{<)fZ%>=H9MR77q-#!%j&yl#{A)zaZj`?R($UXsLa(CDRSRW1sODFK8?Oa&uCh}s4 zprBAbDk?{n8_zj;?Qy{n4+MPoVex7Aobp8c+1Sw77~HUxKwQN-b|{5p)U1F$`2=7= z>(_(1%1kJ)d}1mW2dZLq)Jm? zl*1SN4I3yZNm*`*_hETvM#yW(Je;!Hn0c$=JrkJsZ9>IJZVnl)R1Yc*2*KDKEd1cY z%+0CLv($LU6yzR;+e!-q*1D>Y6BmSNZQ2>R3`nWOzb?tyA5+^*o1oTaiEhd}ufXEI zQKzWNO$vYFgdJuZ&w9Xy_^AG=AFLr)UJLSaCEU}Hfc(<*sk4@y)11Jyb7Ep9-{~`3 zpdS=(A6_0=;}Q1wV2GG0TElw3DZPmDYJ<}k_PS7}3#2!#(h}XGQxOfB9es zsNTIOvY23tgQ8iji-!q70{K}w&VRFxr_RE9duXZVNW4oza-AEyxy!J)$}}?_^VtP+ zErEu&V87@%pxqCfl|VIT4(X&e*R zcwfI_8#?=lw#5b~6}EMf?l(J4!dv9Ut8j|NT1wSMm2Q>;K8@*>iTIh6KJSL3)e-8{ z&*AKTl}ltP6~QP6#>12c?8~joG80J-cny`5QQHzN2waW$^Af23-Sk_TdFhaChc&iA z@NyT`cbe5BG@IDOh|i1jp_ZqiGQnzkTcsqZPXHN}f>_ukI;&aWz-BDlH@)1@Armm% zhnWVtQg+r+Za5-AAa8#WsG6I3Y0+TFX0p5T?1Sxr6sNNM5m)%B8GjV}M{TGfm_EV% zg4+NNvb`aqrH)^z_9}547fX}KJjtvELQYFlsU5i{7q6R4z~WIKc&PmEPo;D*VqQC9 zd2U8T6+*u9_M-=kvm&zLjS1&-e#MAoZ>niTNXJ?&laTFgZX%OPvf|SAyK&lAidbr% z`NKba$25#3RaZNi%Js4L>%- zJ{=ttRi6>VKoqKUHzQVgoHyr-wIeaawgPcGWb#}Iy=m2EldFg);lcEOh||1CyUKa$ zGRn-UR`X?}koQMilTfH)Xl;kTHjaAk3Kri{Wt^B>M;rceoz?wbKj1Ke@(gi6nsuS|!}wmt6qz%~x7%=MQY?OZp5@2VwJ| zMf)ny;~LG3H>Zw+caO(g7*-d1_15qOJxT&1{0FjsZA%o^uZ%1X-+?>G3Hx9pT*^N# zQ1Ws+A=TfR>hB#;^)Hbf?VgsazwKJY(Dx23{)n|Ia>CMgT5_dvpvX#{SXZ;S;bYo6 zt-?Zr&fBMOKRuYBMee2F{`8?Du4N?HaDri+ehr z>Rcgt=|T3QoC1CN=bs5t^rxiK_J*aa~|tL5r}Vb<`+vq~Po z85`Z`aPkWKG}A}0tX6tXJSe&_9eGeHznM`2Ns%@IhBC$<9OPWq*DT&7T7B;JEP*pf{ zL|5gsHT1dQ@LAdo4`y?9SN(}}KSju0f+2!n17*r6g|IhJ`r=Oj_J$L9MVYh}@BjsI ztMBCy*=e=HGCGF9tA+(Gao6nCg_SPx39v5Dh>j% z0gDdHARj!5pZDIiwoVGB<%q32VwVUT(wIzC$g_;$XUqdOGV9jNzU4A9Q$C3ife(i$3o=&WJukCl*7umuR1HmauSD6 zOpL!O#T@#5iWc1mtUZ%dG+#$%pKC%T%3_=<=&$; zW5^Uy4W@&SR;dxuxkb^;z9Bqgo2j#`vPL9nEai)qW{i^`GCiQ{Omep9oQa*!DN$p2 zVQ@2UaodJimlfjBAYVYPt-TYDG&->=Ga4(AH>WD-)qb6ClwM(7VB+Kq@Smui^h z1ja#@wC}t`ZE{aG&Uoa07|}Wm%u8q&@5tOG7tRi?n}SHiU<=g*!$Y)D(s};2H3cmc zOt{SYag;slj9t|hxayDHT6wb3J28L|BPQA`oB*oV{s{mDc;InMppbN~JOMm^z!NTe zzT#+de%I$9G-w2t(nWR9itrqti*~{Yuof0zTvcl}0QwxdMIw)IZx8hTQ@b*bUm=Qtx$EV1?C!)a12Qz#-#!47~!EkRc%%jS^-a?Cmu%V=mj znTRlBnC&s;G0u{pRMrG3nx?2z;rB{drfCQyH{QZJf*A^-dcVJwHFAUNf}=)c>l_r= z1EVo6HfrmE$HuZ#tJ`Tz22)C(m0@F=fZM7O=J0Vlx~Y;0N8;PujDAfW`L{wUoV8DRsM_@rjpDGv|4Q2OGM2j|RW>;ovh(d$%m1ElR+^ z>@k#cfn9dA+rk&u=)$gQoj;CG?TA(N5#VEWo13~sAH$-iYs7dX#a7#+xmj-Ckg9D8 zhVE;`@TUw=ohDC;w7w@PKHjseWF_cM6RrCI>M%&TD7Cr`iDClYi z-V(vah!==6z6Xv<$X$+O<(nfjFfe4X2Z)hLlSV2sT`=+E{FJvW>TQUFf%S_hGB70j zT&Q-*0941v)-}MdR}@M zD-9TnIr(YnJtgevw3j*;RXBZ=t;76E0*?zNQSr{V20-M?>>?*G&{Ig$g;_8!D)neZ8~mE6%9aW*={z;~CV!z|6kHK8RlZ z6~0!l5N@99RUi!v?b?bLRhN9;&fj9!#n&xL0^l(&xp}i|m&jFGguMfQojnsQAC#pi z_?^W(iJ*MB_NXp{br3v!OQcxYpNM+2LG)wT3g=D8_!)0MMD#{ZwwT{qz1t{SD5cyO z9cig~wy^oGOO?cV*$KK|wf*uSlr(XW$tZ*oa0<_d4Rv&5{gr*bqnW6!->>J?A)p%B zJ<@-b{Afno1;eSpMaQ2giQa6xf3KFVHKj?z&BF0o;<}>?Cjz8WtXHU7jjM^TNYYWx zWSZRXqtZ-W8!`TMHVNHiXo8e`C>5z3ld&DSw?xU?(JG!aXiL&NUu&g<#NkHcsJy9V zZOiyjW`t!q+?!nCaZH=B(1RJw7R$0I=&96PS<#Un4(zm;$Ww&e6XS*8$}=XQUPpla zhLV}WJ;8qy0Qn3+?~KLJ;X(yI1VIrAIQ8vIquN9-%W}5i*Zd%A^e96j4&9%2#f9$) zscH+;!J>F$L$J8ekHWt?#=JakPJW)mRbEH@OTIlj!F;w)t+(Bq+63p_j)FD|I3 zXOA}s@ui8(^=6_>HIwba`bps*Y;WUQmaC|mJ*#ofasj#jR=>k>%o$(x_4*H(dP{oN z6qbq&^Ml*rns|^CRa``-1N3eMt1gh9AB!(CVQp?J+5;zHH#erAV=GIPcXc@U7jw4v zYh99|v$O|x2;glRHjtPS*2eG0jIim1KQTbMB>^7ATc_l}8 zEIj;PuPUthL*oy11L%991A9-Vc^lx8Ia^JIR3)kf)h>kUrZ1{atmns2XZwae^my!+ z6SR!y@o5n;U`NE|Oixv2i)*8S&%l)}bF@7r;Ew?ImckwZ~+pu?!E*Xx+v906SWzqX5|%(Y`*fV+J*$k* z;i;FA%d^MCT9ADM*LEtKEI`nb<1SoU=re9Z++xkmme23s2%>a7KSepiii@=cyIp6a zbp*6RK(x)~A9!z_OLV!q?}*%#^u(y598S)ttduS{3lIy5k>Uml4ho07ELJTZZ@2s; z8x7)}Bm^bR#}oBwlwGi;twzAB9{@gePJK;|-@4H(i@(O$9#b=Alqk$XXV=|=$Fi+> z>y!J*jblCw8UJ`aF+{djq4q6AEqbt*DB$(*2R}tYk7KoCYYLqh8Cn-g^b74zklSt% zgwki2!fGiqmy}~AJSgKF{SaviVQzOaqU+x_R!wVc&{0P&K#^qTt|pWj6ih6$)c}7O zS1g+)ce95Y!@QxS0er{RLpq*1#^0Mg$|uZJtd}dag-;m5+^66+Ga!kTg_fi_ExJl% zNQg>OO!?@(*`hwiOOiD`aV*i##zfav=HZ`EeO>#=K_3>C5Pqv1M9>h+`kKVVTsVaz zLtQbBzZj~KM%FB|&=Kw*+u zO0q=@sxG$3hVot5zg3!g@2u4d?k?d_rX{3T=;V9EQ#yy2_=ZFD0SKB-#B-faOOWfb z$F2}eKUX(lNU}K@Tq=5iw}1)e+FYhCKBm)fBH+iPKC0m)$c?9 zspTjY2ioFuBIK2RoI=PyT-IWab(h4@_Bd5<5L zr8RP~ys=nh>QQN}1w538YJsBKRP=tdeHX=2h>d)XhP+=9-i@N~D33~VYF6QJfNq#S z#<9U~dqG_mB;)FbIe60tX@&21k;H*F68y#!kEAIEn${xt;>PbZ1kzjZocOHU`f<}Y z1LUnq!(I_>(ID{b)M3M!IgCW$i1bOko#7qefU@d6UQZ@x_3P!?6q>H!7`bEe$Y~!1 z5fRdR8hybpT;AjOvX`n7Gf0F7Ud~*dj>I{#S~Pr5y467{{xxzaGZ)7Hc3LX$lH+a2 zah2W>ifu~H0x|j;`2Bq;)g_0u*jpl4G?LgKP3pw`Bo*$Qb~;wT9$RzW#6gQ(abKoy zSMbs$5t_;}8bnlFNr9uWs$)X|yn1))41!8^UKw2FmWVxDDg(sg`FTTlsNcnY-5;n@ zr+>>AjGrnXWHG@4-!)>z{V~f}G|51V%(F(W$262nNs|LFyySIm#O2l?CEgI(qOI*!pVpnfXzA zvoUsZU$lwB;JWmeQ&5o#bXNU1jG0WBKl;2P@=ZjT)d7-h8vN ztj_G~sSjjMas0WsP>RZje`dd^&@ih8w9USIM=E{<8s+(c_oh;udG zX*SuLO_4ODf;(thJ3Z7nd);(r_WuM?F0Rp@Ejmdp8bylV3Pfv@^JM2Aonto$KYOV4 zdky~EjtETF`j(?4qn3;8#~+E$O2yRH+^Bc!adgiYHlud7ck>wI+m|eW;P&fEa&cVA z4l=v5ItxDo>AoddFXF$GYOzE6fI58*b4wF0!}dw_Dc<}mvw#+g_T|?nuoR<2v#wUR`WBXS)2e~l~LrmR%h1L24tQB0C2kO_9LPWT;a zk{M>%)nj#4LRf>j$1C)qtwQ2Y8%Vxk*g3($>qk%#VSg}ricpLd&w6{IJpxf8xpIn0 z@9Rx11X(ghAtp5Y$DtibtFc2K*hpIkYbs|RxTRnz;ei|+8bU1FS~R10Y>>ch=~CHN zG_Q2lw2fFFF`qwh?!X5X5Sx=J#>CdwFhy@1Z0+VASgU@OqheC!8!gkM{$x^NN1^70 zW*o;?;B8W8l31j6b?P(7A6m|&I-_b*=*>-DRJ5ILVB0Od!x16_$JVk;xoNSKF5%P> zh2GS`{n=jF>}jbc>_T_kj(DEc(bdHE^0?!693N`66&(^AJDo`d%1vuE;8l3IbpRIp zNAmWp)yJXdw_0p*T&!}lg+$iY9iSZRE+Qz5~ZC!(<*a&{55>MjM%4FSWg!?9B|atNEK%GD736 zZ%(5|HDP;Ldg_)Jatn31S96RQ*|NtT-8rqI-t3ocks3novz3i9NC!FR*Xvrz+=PlV zT;0KSjA9|cRUHLXUZQ9o=5cQoI6m0Q=>r8|o=%$!?LOwI5}P5o zx4V{jP3j^ja>Eav*#5Mxhi3OWh@1~0V?QX32730aWb`zO)YE~R?H*PM8v||!u1Vwc zt~!p&=R`NLb}KJEmRMxMp8R?rF<#xwb^NS(^xOQH4Kw8@UJ5+PWbd z9P_Enb6XbxmO1t6D_fZmkP=Uv<$F`;G2nu5Q~~WpfY#Ei?f~;%LkxQhD*`t*2T&=c zt;b@;roAQMHRP=bYyrwRZ(89e;%UPi13of4PC1XPXkZI~ev-eR-}-OdR=a zZ%U$9D8=FlU1pl*$|)`G*X1DqDtmmntYt}bdZDV1v4N=h(O&t|qaZ>MG;7x%Pr|j0 znzA!@z0TW2@yFTpNFlYcF|wcDV4vQT#&PM=xT;~_Gt}BOwAk$|ua{8LE!fJtq=R

O{}?or{w#GoEmBc5^1deoBSRyitN$jytxTB}95q>DW` zbN&@=B5cYMS<7Q^%R7dVLE%paG`At>Lhj@Mgs2^=k`Wus&K4%x0**1>iv`CY^nlVn z(S;o1r((AfZ8mnd7qG(LXDzgzxHuomUEJN5mjpv%hnSiCL#tOjz&dYIa%05PvzU)Y@LtF za-+5lS5cN?xVL6!F}C7x2sonl1|)5>enuZr9Mq+w`#N*}fNG@N~HIK6i?b;NP zk=R4BE5O)a6htX`~@LFjkA|<8AxEqai`QqHnVwpvMg{#18pQW^(2p{BDRCR=5tS> zR%mqXJLPq>f=LI?!vOP|=#Hjt$j;Oa#A~+EV*_{0T=gEK2DOHUa=FjkNpO-mcV4+x zMkDK58IoJvzmQs7xC?+j_ccfx(6JTVdR~x|%@NF!Hhx}tG}gNleqG8}Gsz&_*7B;M zCvhDGX>0CFX4Z<0Eu?Zv&5x8RZ8d-bd%4tZ;_ ztCuP-_;S&rl0w%DCzwHDwyTY(xZX2O}BJ z<66xnv4U-jq+D98GbQhs51xz~70NHg9WJyHZ%=7_zzNOu;_ z2`kSP$B3zD^F-R5F6_-XG(WN=@~y0>{{U!oSUK28J8}JMo~3U-hOwt&HqftCP#CS^ zNLXN$+)H)uSgcfKdt83XMi#xQUuaW#HjbNTiCDe?U=MF<@1X@a-I!FApzMk(;=7BK zh)S#HMx{e`Bj4V;p%j&joSPXhi7uHL6_zkT1cA*rab1}+-aN%xLRjMgv)Yx3)Qx5X zIV9lXkjNYF?)?Q&iztsGDwDVU;l&}`xdc;NkUrHEs4`Vtk`$i!??j-AQn4hL`m5QS zXr_r`{w0hBBo4;2RHJ*B?Hg7yAI#3*n7KXcTM^WE+2!)iQ~^rDCXFPFte6~gjMGGj zu)Wp(Wd{Ujkm)Nb31PfhdOnj49E9sKJQBNYWz&}sVB&evF?)E(S(;P7z#r>00MjSSt&c&8aU|N zGCUU2of-(ktHN2CBT>qY$N00)<$Gz4j_f3^A%pcergC+zBMsk@tly47uepN#(Xe z^xclNs)?JCG#1iDAPXT1dx6@Applp2r!OtE63Mkl2a|>S$CJ-k ze3U0?t!_ITcE#b-Vq_U440QafIj1LLqU?;ZaV`Aeo@8a*MgjZ3;Y~$GItuq^plGsL zyky&4BO;C2OaL*@HS^e*v(Tq44<+%%ls0j_{6u2?<9yUsY=bAj5um1VK! zxi`3$mOgdJ5>AoEqj7dPTM%=41_Tc;a(f|dwjj|KzK*t%JGH%WopO}gSJ{cc)5R<<>aa4e~ z)}_CA<74Na{iA{iwc&e6L+l}HS0DI|=@ipbIQ=dDnYMP+d|$yDTa_n>EI zqw1p2OeCF_-#;b?Di2?$y=66_vSu7}Npp89#&Zm0lH}kKNyh5rwR69MQx1wJx5#9- z-@zb#-Yd?OZ#y%-n{lUqAuc7k);uq7b7~8#plOz8Ay{-d$0r<~y{L>;I;}FNIaH*Y z6T#Yyc6w^TE0~2B|4yO+#D6cR#$qmJFWS8Yy+VQe=2teB#bxJEWXsYaJhnP=Loq& z$G>_Wr1K)?m*T)J<+76AHCc`tLB`N<4RTb&Ci#t{TFS<*t)}UE?Sv0wsYe`#JrR}J+ND>~pP>MKhQ!vYrPX(|s<+KUC5 zIbofo1CD@-1Wy#O+bBGSJwQDv5)XKiw#Lg2$2}>I!(+ILCOA9u+ocatixtNC8GNQ- zc);yQlFh}uSx=QyJpztsO_i}Q+T0A}400&|q>aBWH+1KU5HdM1tbtq?C!BGbtCB3B zJgg*Rjk(J4%_PTSECFcS)MZD?-AJP1GoqE6;Ix^>J9MfYiPhAuaKQFC=|#n1)}dJg z5;!>^Q{1bTe$;mqUUS%bQwx=^p^jAzksO1iCM%jSM2!a9f|2ZITws#eDSkB+oD`*S9UPx5$}jPgN_dTeQ8Px`YVnTX=?0r zI<>@U;ihSBW{}Mu0A^*}anIq3&YXSyjp|8qZgTz`)eLs`7OcWF@<>*B4hj4_^z3V< zQP$@yC1lZGM7)s^)rn<5=p7hg1l*0Vo zK?0Zx&xa%&XVQ=WX%%=Z0iX-Et_(%omTdJSIH_w;xc>mP^R-hs_M?;qdtoLXR@vKX zid@}=VW!W?LeY@q4^Bl=mG>Jd@8b=T>M4kFI2>n>=}eKTfwy3APaf4pB5Oywx0PNZ z$)A@v?Mb4NXGk@xmWO5VN#hv8NfQo+2<5kLIvn|CuQ_M`z5xoEnyM-e^{c4$Lv?}*1JbQPD(`n9e zjjW>t8CtK7v=d+13yYvxTLIR7|G+7 zYLW<6Cs5CUf$Q3s#fCF9s;PnNj`YN7TN48iK32{MKU#~DEa!aDxFamxGt#L6D-g#X zW->rDx|A~Mj`iBSFLBsYM2T+RaA#%%Zv^xvhSCC1Vv&NJoR0LaU5Mlyl|?LMKX*N- zNPv>f3~L}KfPP$YRiLhCv@)z4$@xzs=~ZEpCz~C*IXK-(ZZ=(#DEzl`=B(Hl0~>Kv z*Hk;LjQ0-hmmz)47G#}1oirA2GN?dbJAB35N$x(JS3N0nE1d*5(;27xSRkHPYvb_Z zt8%P_)Z<0)?2Q>JrDFkl=aErwcvAQ@F|>=QmfDSeOh2&U@6c5l^s4*itjkTBHNugXRq>00EKSl%?1U zL?H!vAdUrRs1dAQSSp}xBy*m%SGWe&UN&v_VNIYDYMvTG4_*&iS7RR)!6lWBFnA#J zp`Zy7C5^(W0(*hpnr#7@vOv4YIO&?uWs4|UR|-Mfp1zdUp?e!Dw(+PQVJ(iC$f{`| z=R<;_j0IqRl&o5kWqarH$W{56{dlO6Gm$~=9A}<$k6M#0$!z6v7=kwOxDnQrl4IC> zj!PZVotW%8(2$7ZFu5ZKX!WW=c3DlKM$Utbbfy;-51^z~$)aXooWISEC7O;ynYkVcV5 z8xHvy9ctkuo}nwXpiL>j8$s{KKq+%QY!*d1t|NI0V=wig0LO$*NqxbG7zc`Nj^>;N z?x*wQ1~!epOfTKQ_04(dsOb}>YpJoR+S$#f%PDR1`B@m_o;v;&*BkQcIa>N0McwSC z2Df5Dk^u*vohnk+=1xyyqy&fBy-Dv>$Soms;2ezSuUeO=4rcQi7F8tnBy^>=z(A`c za!3#mxFGTCR-MOk-jutZP+J3n)H32;oufo06rV$yO64|YO?a-JT(P)by?>=y7r6{_ zOT0KjM^2rnWVTMCHoQ*RVdh@7IHFCbLvT|0Su3Fn@Hoa9AcG# z8rx_k4sqKYni>m|2xeILLNkx82>|c2N#u>ml6j%AT#_`E7zQe%xbIWVVOV^E-INE) z`|js8JklMClbN9#l0{_C1RNT*D}{=3$u!a{46(-os?Cg8;RG;ji3cEKwFv=w+6ja# zyD^*`^xP{pwFZd3%`}-%yGP|s3d6q1BvI#jk~u0W!ox{;pbY5z(iMhEX7U3F+iU&W zq1cQptNyWFRE~J0B5$^+z>Nr8bMHXxL2!Xjnv5~sXc0A%`Q(=&R34|lwIF&Inc`L3 zwl*Vx6m_R^Y)H$tFv_5x?o_hT5J*Tzautu><22TQ$p+OVvm9;gd(uN8GBT9|1Y?Q- zd`zX)xf$=C^z4Q#pvL3O+M^`nu4%gnL~=o9$2`=%0f{5wb`CbSK=w3{4)YK(WNaMt z6v+fFEc~|-jO6s7b|o%aKynH)@-aapnPDosAwXA>Pg7KYj_G&gjHt&LH1`73{v

n*I-Of=Q@YkBxl;)tK^%M6Un^0vG3RD!w+um+Op zcq3NH4u`fXtQTa-JS7Bbt;c#So`mW!vq(xS5Eyr;a7A1|2K-4U8+s1((hH1+M7Eo9 zo}==n7iEEPvg72-XRS444X_2|`Ig-A{>>K_p*NVIF^uuri-m|%HkJ2eAU#2;X#p$F z6EPqo2N}j_L$P5a-~Ox~21p(0H({hlG6_gbup{_s+$=`_0J}SO7Ni0q$N^=H;Pv8x zhnnf~q+sW-6zn2+fSN3vAyfI%3l0&<8}3XWmx3q(MZk|ckM9SqS`z_s(gJs2kxEMF zL_{$-IW5~2nL-~JDtd{JNCRVPGMe9=6R5slmnbezCjzEY zEm*eAB!eoa0;M65xNth-2OVg-5G+jZhL91CqrFwYmG(vZwq48bQ(y?EUF2^4Dv7ZT z$zlUTl6sISlE|532V{|`-OzNX0za@wU4ps=>5jZpZpB=^XK0q;6ALDL`h9CQ^(l8Q zG*DwcQi7)-iVA61j^aqxFcE^Y@sH_249=F{YhpPIkO4e!52ZVRpr4qMPCZ35hDRb0 znJh8aiU5%(3j}Kx?D~`XQGSB3GmkO1lh5%{&<6<>R|-FJ2XV_#1V`pV+YhZ;)+l^u<+$Vry8AT*qsmHRol*U?Le$6 zNdO!CuWW%$0}-OP&gJ}vtqF~xVIUH6bB?&7p(0d~U3mce(y+#~OS^V+&#gXD3`yCf z!Q(t+3RVJG@`%7OB==>`C`o}Di5(923+w7AwTE#4jYtK-BL@U^G}^daNhBbJRp9i+ zQbBStI+9d?PpO~=tRd4sboC8Dkya;d{{S-(I|EEiv>qqgtPWK0D6m+vVi8HWxd$J2 z6)l1id!4EPLAab`R593+>QHTWD&U`N@!F8vBXW{Pi-EZD)1jkaWN^rls|J0{IUGi0%?O12woz+F^G|>608P& zMKC2*`7s&69`y793X;Hw`LWJ;B9qksj#Ms75ra&p6`AmQ=RUOT0}Oz!&@y^+NCw6wD0n|FaZCsP@+8?H zKQ#ilkvEo3?3$JYjX!LTdkO?KCd6bONj<87L34-N&5xxp5v9q|30p>_te=ukPUP8HIcOJPk0J+}kQ~{CB2TFIa6RenYDnc3> z(0dyXdt-H?$8F>o?&6T2F?u?mQS0wmMiY$heG77ng}RwPDTUY)2qTl;w!O?o$mUp5 zIJ)N_Hxv{xQblD~BsT7&y-_B1a(Kle$>Ug%?sg=0=8%g}^M>qs=BNj>fNmQ<3M>Gg zSC%%fBfe;0Q=OTlD2E^(4I!4|k)mCIo_d4pRbU~wxh%yL4f*TZqz6Nn;j&oq#W4?P zgkS?1_UENAD@+kQX+2b><&R{`+I zS3Ai1iiQE_<@x6n07)<_$i_Mv5EQ)f7=zC>U`x3ffWQRQjGi#%Ola9|qzVaQnCA}} zasbYH)3CH7z8E3boPFmbaB##Z^y|$rTw}z-Knk9E5#E3u3Kx`t zxPjA-v|I`D%nJUPpa=%#8&v!HP%DuD?KUuToD)FwF2@eU^3Hkg4M-OxPc4-~v1A-# zrtDTCmPx+M(jeOy=rhi9REZ3UZJ|)OA#%(`F_5FS#|1FMAQC8O3tO5+LV;9_VROLu zsSSYKkcbHkJFx9YRx3+#(@D9(5hp>81y#m5$J!&@p+^BhJOM}w4U~{5=Z>_?BzZOf zKs&MCnh*d=uRPPXpfrFS6W4D!p@2pJ$tT!Q0Cq3{WFB{JqQEGf&;qywbox@T3alN2 z@>{9zObbJ42-!Ku%uOZ)Pb$i%=L2s~dUil8V`)MkJ;*g6Nj_QCxC%M~decE;Byz{K z0~BFfInSj4T1%JO0RSI*VzBhM`@xhP_n-);ln@m@L5@O>v;rU;bU&R)$E<}$Gn{m% za2Pjk+}+IpT3HN3sMh8TZ5G(@(+3dah%JMjEsI%fP?I-&+i8#INQ>I5CWqN zK?n1ta4d+QZX*EXcQgS4G`VCddiqkZ$LyDqz!U6gg~WA{;~=RA91mJREw-H54>PKd zOwa|9YZiAQJF~~V0=YC$!?2y)(@l4{GAfPzGC!R#v0vog+ras~4FW5`LA*RHjbY3$6>p& zs>;VKcOOn^-$7Vy!D8U&?v4koPUA!p@dk5@bUw5sA*@W1s8g{IQR`J}6=F9G0`4S^ zxT3%*V&8n=;C{4<4gUbY0G>P4 z0W(|>aHJ~rsscH#OJUc8-k4Z#oXiN|b3hZcq@h$}xDJMZC56{&`G+id$Gt!jNi zW7>ctj$j)A@l0ZQq%q^6%`pt~(nzpIPIx(>EKM4;kbd#8yRr13lDoq!a$p78dVIr* zWf90q?pDA(Ge8MybMxmI_vFwq{{U$S++eS;>p%_N=C*bYPhr}nfp!TO3$;M!9Vsyq zw7dN__b0U=Ad+w-Qlr>pV}n5=$%wqe{qOGdrhtPA_(keQdU{j2Oq5JW0R}SIBN)J> zNVD?Dt&h5LN39EB8x_3Mg#&~cIXx(CBv7tZBI6kqN!$pSf`Bm|^uUY)*xE8rK|m5K z9o~!wIjCz;BPtk>Fi)je3~XR8AYz0U924XWJ%37LKtMZBC$4#^5V0W&=XY!YO$>ht z5DLFHa%w;sOE4|8E;$3WAr_Ko<8ot+6a$VqqQHbK44*2Kj31={#^Z3pU|{5v)caG| zY{%ym#!CYqyZF)*BxC$J&$*-`DIDWz6vjuBngEnU<#Gl`T++A-9YES?fG}ggT!YZ! zkcOfXzz{`Mz@3y9P;y6O>rFe5K3El-5rzjHJ?d`yjAGizpu6_$4{BE`Wx)3_3;iU!PU&tu+`0w$2LJYXNz zfdaIphR_12{LLUN$FRrY`aD~L)(yKpO>aoEram5Y>(HV>GLWKq;(14)pi z9OseiPTfGBB>78%3CJ8&wgcgx%V|(&XQA|^C2$RzEi&4#O2_Xqq)y6CKa39RMY5%Ah9QyfLM) z4NMMCN|rIQF~fEw=Z{LIuo6P0qt0+oa42FV2%*9rP6^2Cnuan^;DArvF8llDabCiftW1p)n7M1hReL1~W;!1JHBZHq7wE zb`&Wga}5^TKr;NN9Vs~l`x3zFpr`?|d(vv*xcL^}IcWZGYDuzn93<)&3+e4oU>Ne? z?IY?yqQkJEl)B_}=}l}Re=!h_GoJMAbOUB~XwC+4!KH4au)vWO01kLL0+yn?9X2t% zaKo?^mti1llt^~AdGw(~b}J*~Hh$^pnxWW^0U;-9XB}$B#dsO8*~hW%PQaN8$0qU> zNh{Wn$A&I;x#`-N4J(3ngNl)jZhO-aJPwLbwn@pzpbL*OspYak%8#uAu*8CCA}lkx zkFP;M?n`Eg8b0RT#wu7c0RwrP*~msbdYYiQC)wqBj?i)M=|EhbZ`|X4TWaTy$F)s` zay{GAcE;Xww2$XVWG;@6=8{3iR+tV)0UIQABo4i(B!Eauf<_4R?LkG7QIIGDBR@4& zv=zvaofWo`(BO{zRJ0+0K7Mt^Mg}M-a3vx;PC>{VQzTf?e5{aqm0wCX0g*X(+Q$l8 zsiXwZ`LY%W}sB+1;E}5hQU2oG=P1yag+17 z#tv#_Ei?idP@`|g7auN9Y}GA>NxbdKu-%R^QUWekL6f=jz#ZrjVp!TT3&t_}Py|9q z#B8I1&<^H?GQLjMBRf>p2y9B8PSeTjpGu4BCiM}Yem5y9IHO?-SyW|LZhgDc8o4lK zR#ICh891pGu1X^=NemCAP06_nD~BY3#Z+un$oCW5BRy*QKzq0Y1ORF^6^9bb7-GW* z^rQq%!*|Qc^{PV@%1R6Ynxuze@ygqAjM8XU8qJjioO7NiunITe;PL8dSpy-Fb}$YB z^s3x&Xk5M6Lp-_>4PI)-w)3}L$b|enStmlk% z#TNoOWI1F|R50Yw1d*6yj3`3KKcBrZ>NZ&sm=sb*Kku4PaUss`4$v}qBi4WtVr2}^ zl5jFlYCaG=ol(4dDRUz3*hrq+SEBu=qAU}ej7Bc)E{lX=^fl$-!hUtCax zG2w}$2kyB6agOJ`HMv)0kSK@blmH0b$9ha#nNl6+0dR9dRszbZ?DEGZru{|c2!o>m z$66a;$1G`rbNExZ?!b_eADHx|x)tt+hX8+g(3YZFk#AN9S11q*?FaL#ERmW~fHQ%>;(#Xv{Hr2l<07i$m-P8Tj#n$06NOWc ze_8;UB_{F)duDP$GDjHBIUpQXTe=1A~?sVwHNV{G_vh6!C7?McjKAyCa3lWI`1m^~+4#F&ijlf`0V6ci>M*%@& z=qV1v%#XKe&V4AbtRkI*l6vqexoicNP~d#Eo%;w5-JwEt<1_&V?p8+&Z8qIrBg#{<#Y|^2>|(X+Mkg{^kOy*y+>NfkPk9~Pkz+WW3g4_m04XJp*-+$ zR+!Nm+{+uYtc)E002X;P$6@b#aKROv;RXQD9dr5fK*q;)BDUbnzN8ug!yZQ^NMgISYA_GXe+jo@f=w1hOdDWOh@@88iUNUU=El%^`cJ zZa#*QLM*2Y;g~o$1Rm7yU>zmYrBNkw4NP99Q#pl8TZ=(Pa_zjz&R0!10?%Vuvl>ns>P8)k=NFh z?gV5vluIwlNWrET8tej+R|J9AwKRZY5spp^b>@H{a*De|WS%)RTnLo>;5Tm6Fc1&^`HrusED&U^DsEa6oCKP$M+g@ literal 0 HcmV?d00001 diff --git a/ng-wallet/tests/valid_security_image.jpg b/ng-wallet/tests/valid_security_image.jpg new file mode 100644 index 0000000000000000000000000000000000000000..345a6d2c1ed520e2986dd9b483cd7ab9de4dbc03 GIT binary patch literal 29454 zcmbTdWmFtn7cJUYNN{%zuE8BbAT;hS!Cli$@Zj$5?he77KyY_=cL*LVc?~(|d~dv8 z@7`w6qpND~x#pTn_AaXVGyi8DfF=c$1OlL-0RSk-58zJ+0R1&o6jT%p6gmJJ9SQ~= z>dz8D6!7L1{A>7EZ(hT{MnHIjh>U}ZjD&DJ3ljBPBH}4Jj#; zAQLMmHyTIf~;Narn6Hrmp(9+R!aB^|;@bZa%6a$J&NJ=TIsH&-JXlfZ5 zo0yuJTUgqEad32Uc5(If^A89N3J!^hjf+o6OiE74$<50zC@d;2sjjK5t8Zv*YW~sH z-P7CGKQK5sH9a#sH@~pBvAMOqv%9x{aCm-kd3Akr`|Ixh*)J#n%wM-4?|&Wp|M&$7 z2NX0cEDS9CvtLlqPLN+1bXYiY)>jxG6yXg(m~YvEeG3)&k3{$k}H+WyZ3z2R687@^0>vJ7NICZAn>AwvG2 z0Sm9c0IykvU(rH=Aa8&kh&jj+9hB0G2Y@V?zwi(t{=!3hKY#q|2t=x5=y8aGNr(~< z>GUC9{tx^`+gA_`0M9!2Kz+`F%lhz)@~={bn#2%+LteD~#}SAQBID4TUPv!EB9MqY z&v~)^JWKsW@@Ih{KrFcbiTW(eKi)My6NbP%N0Jsg-O%>0Dv;p_FL5>WdY<;oQe+AO z^b*f!)aR%}NI*=nzDNP#`5IyoE(`i^^j=6og#dIE$X>ANylORIA(vqb;P|@tA-2#I zo|X7-t`>kU2okpENI&!aM>PN*gr+}4V*msOf*lG_)PI)kg|(qKL=4tvabK?gb?7-5 ze?tQuZu|vujSLs71Cp%}7D*7Yko?h6{+lR>5c1Edtfi0jTIa=;=Oa3=XI{^^h#|0e z&q?tt%(H-qGS3>Z%0Tq0eRlP)Vi61!c`xbqpX_LjFF|;r^;+v0{vZ9up^Tj5p2O~?W%z>eBF?`>j({CK{cmnWA--6AHV)Ac3Q|vhGwXQ*4#ee` z%>5_U|6)VzK|^Bh^-SZr?*GOF_MfuA@nS!s$b%^MKVe>6BRBsK{Y7^Oy2MKcK=Sk@ zUJ&ljk$Lv}C7=I?{!em0_cw?^MG&gb4exJ%VFJ9giWgdts6gN#$qn&`?q5m& z@#1f|pQ-&@lRAu8-7oB(d(2CzJO@bMObuQ^`Pl+Q$Y*`XAcXXppr2*+hor4PgeD{> zpIg#j%^|*P3IAKQkUDJKMvz?5vC$BnP4Y*uAD@gXt57j=G7z%uU%MSKJBcp*7M*RC zr>6r&`Pn+OHs+hKCtt7gqT)oGY_3l!&6is0rg1xiG?9`T)<_pt>8w>Y;=WUf`cByC=g>f+SXomQo9V;^wF?|s zc*QU}mg413l{dJQnY|Uf{y5S@Ru#QHg}ziN%N$d9`F(V?EbvDuI|&vRZdK!l5lShy z`Nfi7>5A`uYXeIHd7@JzgyJWja-|cV+=%`F7V4VHr_g3w2C`eAUV8w zD#e$F(j_(h?GIPeY~lBI(T?LvHjw6LMWhv??qMHyb+L!8Iw+=xl37V(0nb4VkJD00 z>L8Z3v4w*znH-`rqA>oo_w?TkV0p<}NRmSqEwty7dG2n?&&PiY{iO%|uP~maC$ z$a!({$fzBU9{quvp)W_P^08?teLMUFEJ-R1z*^-hol3$%2y@t>w!xL1n&j1;mu;vx z=z@0d+fWt|%~Y09P*4*KI%E6<8%p%r@#Ldn8pHD|l;?5mVmY6H(F~Hz*sa6it$TC3n~^po zoyQA~i=zka$D{WBNGh362!U!0S>X1q?1bOy=3AJjYGj}}((w0IB%S?3s82a10v*3b zd3#7^5wq?n?-Q78-x;iyo9$xEBfZ|umqS(9iJ^?ybcom8T?l+n3rpY>I=N9Y+hcXY zJf;zsY_q3Xe@-orJrUSF7*eYnDEO6;fq%f#NY|Oq-1up_aL883WPmi@sgE^O_kxEi zf#Ydb-fBaemx^U@F0>CW7XWjOUq+RzvO;NHJ*dq!sPzZnzcG2DyUSnb01t)q^|!-J zK;K~5G?AQ)njQg*y%KK>Po*q>hO!#F6$PZ6|Jy1&e}u%jgBC64rE|XYp?{jx%L?^h zGkxyNF)O0OURTRd_oh{)>hXjVdE29r?s;6Mq_rO-PCe8+ zN}Dx^Js>_7QXuhfhKe^xvld&%gD=kLdC`>0k%A2T4q44veAnmTUPpEC5;>H?6>NYp7NdiWp zxza25yGk13WP06gZb54ty|J?~qwqZY1vql5<7L= z^?rDH^&x7s2z+7HH=S2EC2lctDfz8x=q2P%OM_5T=VdMq9J}MAHm3LTT;#BCa94-G z9cGz*DN8Xa>W3y{{7cz|c2v`0Wt-f{@m*x3Q%j3YCGs&$wRYIUC*b4BB#!TFR1HoG z>G^=)z$OFD!Age6T^o7}k%M%&uu`Xj{MBjYT0l8G{u6^_K4A-5sw;Md0|kbJ79x1< zGz?|6ZiFC0mNGmIL*4p&phn94xk@Y%i{5R(k#ZMYdsS9`+bk4i&E7qeOWbiv3qi!A!hwC~N z#F{%sTfEXMKtwb1QPqmgc)VgqZuOjh3WhUR5WCKqFQ-a0V>EXA`dE4*h1 zW&L0p*HMyx*Nt5_lMa7zm-~JZMj*E6W2zebp_6fKLw2*6IaheS`2)RuC_Xvv>;S>a}1*5%3G8!Cl>3PNr;|+2i z@dJX&-116s=N(afHA89)^YxNv9Tm>@eB~~w((xaN)gq$*0DcIi{=SV(u#;o39J#0s zTdxBO6%!1>+{wmK>bqVr#3E9cRM;Jf=2Xk2M;C7dEj_@^Wwvi!bI#4~cUpsDoK0^I zZDvPcqkB=LiJU4;@$MB1?oK0_3lPc*nwX4oC(I&7@ox*xR?!1UrOvZaoD0U( z(gbUnOxL}_cpaa>R@%SxOYK{T7QSptLMs`LcF=G|HTFu!04!+k@iqt4{B8ZDXboGd za4D%@9%Yt;=pXL~GC~VYayX5C-o(?NhP?JLNHYn%uHZF8 z4@* z&d>@*1JAme%@#3u%Vt%q^-zr|c@?Sp0W)<4e-Y`v~80O40U z?)e&+HxlDSZt$loOXD7auoTZ20^tRmD*4fqr*hR7I2~PizBcE(em$5kbj#9RT6R(B zq1>{J8N$aK1wEF!Iu9y^FI9!>U~5PX=nR?A_GlQytw)M}g6#RXEecJsx~a&R4-)5a z*zn0^_dnyI_sUD{c2D?gEbNb#ak@-8g*5Y@@bTh)sp!TbBEwsDha}8cpUqf~TPi5r zhu0R%Rv`PZ{N%d0Ip!8Pr5&|$$l&A<`3?Zhy7aR&9OIJ+O@n4x!V;7)o?PgzD-dms&JEGz&|$jmOjS16;YxM+FLEd(cI{#1L2K;Hrr zN*%P)Q6ZzFQ_NF7QY=YVFsxnQ@POE)%AdOt{9CmP!T!E)_#j>ExLsT}w+DaRh)QLX zRNnHcq;JKH(xilqljCV(F$9VXYN_1C*~~KJEPILbp+;Ub7$u+@{nDiUOs=2}JA1@k z#!3QjxQ8<2Z66`qhq3GOysOuzYSqqM!?s%}U%PsRcWVmrHk&>LQa7bffbYi~6HG3J z)qFkXIss-`TX(yyJJh9;%sU0i!5eMGPR>h;ULTQb5ae~8D>aKb%7(EkhT9yHuL)yp zw)6)Z)w+m0JZ3;kV4C)xNF)lvZ)(^&(3(a%puT)w-9?tbtS1kg2Yurol?fjA+t=5{ zHu)B8&HDkrOjj}pV=~{CMkh7yrrPDcUMgX~rbO2n+GV~#RbFk=d7J7}8_ABfo_88H z6w~spg!L%px~R}bbIXvj(l|^Ck>g5Q!j@oH+Q)IJrxm696w&u?j+wr+keDx>M@uU2 zdfyc%IPdLJf)7WXV~1m4%)HQ}{gYL9sjmI6^fAkHJkbp`%x+i8hL|EkBMg0RKmbhr z`*JKxIp$mXGIH95TrN^1ZJ?)%9VRw0i=6o0Cyefy-$rl&xsQ1Xt0bWh)ECQiijJM$ zi%_BPS-bh0)Q3R3gF_aQ&?d>e1V>f){`qtb>`OHTNs349k@zlc2f~C!seHR{0U|T<3jgus@fU z(lS~-uS+eR?M+7qMq9ZE!U#cXObaf`76FveGrbc0_P$e*TAO>fby~z<%l2z`=&T5;ltjX+ zZy!;m&KZj6t3iim3~Ne?jKOJSM^775bNMFtJS~XvtY%=5pD50}WJXsxh3#l+v=ZOi zj+wtsNBQ5W2Jk#Gw1+zp;7EVet8uj5t%}okKO6S+w|A4c1`_2OMX_%4Tx}~W1)6db zVqKd8B`!2Qz-+7;@e?AJXYmAzq0>FBIMFgWH!CJo9^%ej#O>EBQZ24M^K%JolTye1 zGqwEg8r}B}@o5%l`a9#)m%V_F-IfDWw)}I+G>30r@@BHyH@^L{T+WkPMu{y!R?@xTV>{p*8;6a%V_4xChji@|t19DR^^Dm2WJH>R z`xaiisz zB#$WZ{fAAm^ofcWd}WijY6T{>i)E#ZP6qo@iQLadco%Esy3LXV;i>oX{7X%p`5_tn8XkLKWQ5wzN{ncUv#Hj{v!-7oY9c+0cJJF*owR($Ncell z8-C{)-Y-uzqH%)adXwL}8;;i)ra%9xW2yU99+2`{5ua9mG*FV$s+kHL4pB6JJaS?C7^|F==H=dkoiW&0hcB8F;{D6?Cw&x?@zM*L5gGRh zUq#^)i$to{Jx&@KS|)4a(u|7pV_i{x_7o-~kGr=@xQH#H?#MAK)g@8JpX&ee>w~+h zswEG^)8`dHxMfjE4diCs+OnkLYLfF*q;OvJ2Eu?Z z3e?!Pco-}8f<2!cMgl8GaGmT66zcobt&{6Nq<6o|ceyrl>EDU1^+#v@ykJ7QQmikq}N$4D_`t#~0*!rApI7~h%wF%ZU z-0c@pCTj5R4dli;JjsaaLtnJ5{lr(XIm8mrT*z(j;2*%zoQW=RClE#I-X#s|l5a95 zR9bn0;Lw8O7iFv!zXT8za!16NX6r6hh^?5X>*Ho|^HEbXBr?wTW6^Y|z4;H>f@95) z1uPpCg>+}0Cwk@fZQdHL!&7rk7=(7L%y>sLim=O16WQ?MDP57R*PIoYIF;8G*x4Lw z?O}!hN5DFgC;f5ZyXACg%{Lagn#I})UjDjYv)1T@8Y_uSIb)pWF40ZUeZ|r6%`cZ1xCd*{gRjU9WaYV9cTm)YL>#=)jMrBPt)d(^EyH(zF zDZ)OVQ|+L);iicAls@7c({t{06Y05f>g<_pvl7-?ewjuQ)7>qG>nvLWszWGshN}2R znwndX-D3~{3&RP;`F@tga;Ns#P*1a!&pqK6B~ljcUGKyg58(1n&9=mWq$MG1S}sve zC1%B_31NJ1SI2?qVR|lR+mV{piDEX(>)_)D!(lD}vyM+Ew#{0&OSKUR44^ zYtb=gmUn9WFq@ku9RzjL+ied=4n6~7>b6|hEJH{uoKpNrKLQ?);}k`ua3_RrT`d1^MT z&or{pYi)wRt^|gggBRzBwZLt|W0=B*ZSl6J>@*4ZNp!tz?#N$$=q%>hk7&gHP>`Fd z(Uxwi-!?{juQNd(ET{nF7E8C}52_qrAzj7h7r9{7oK7<#M$a>xLdM|Vx%@q<@pU<1 zvjkf|p-2wHPV}8db(3_1%X?>e>&G8$I>hFKToRX{FN^V`R+@nWF|2JG@dG_>&YJdO z`_4)87j?x4F`dvNnmci@hfUk+-D9kNRD*Nt?$)1AKNh0}%E!>I@pcXfT#kGJ>RdE2 zf%AeGJ3{~{vt$?yK=NCS6xvUT6xpyS7EHZ2Zzaj-2h(7W#+9w(c)vCQ77M*Q7g4d9 z5~AggyO;adefXff`)0Fkm0bw}X4!4?jVPg^7`)X@jQCr^8>ZHrsebbA1Kn#Rks$)q zqIXLmEPMtpUuwz;UH9hahj_d-!xSlXvj@=k;dC>927*DqUCQa|Gp;U=h{lZhFxk+G;kG);T6i(gXe@eQs%nfMQ zTlYvojO3j$3X@W6zVmDe#c%gXHhwD(+$kso6N{xCqWXH3y<(kh`}hX{GQ~>h-Rboh zT(}x>89m@nn))>nPKHQve(=O#X?umze`*c%9aIp6b5{11?cR>PO^#+iolYfoywtrx zIgBxrQVXFBhtaeC8Xew25LN+4ku{qj(FWZ zMXheJ$bOQNdy^P=3OW8lsZd;?0cyg_@kEf z)kK5e$|QRFgNSpG9|Iyk#L)2wPV-;Q^Q8H!%k|1>znj za;sHSFx7reSS(W^+KX@{VOJlb6SBaU%+GlzyW3Y%|0zr^vId80q+&ET2G7NIdVt@UQAG zvC8S$#&#=Z#uB7OG?6V+DQCIBuPsFJmc6@?-qaWj!9?GZd9{D0N{WNMV>=+9adB7XA-lMuY1M zRdH!vP9WlC4C`%aD%u%s1fN(UlYp%0;=n{}vB!9z6*r-)Y48S(T%47(1(9s=sHC8x zl@iPYQ0V!F{H3=TYdLR_Ta3y>aXY^smk{k2&h{+M`n9YruB zx0f(IU&!U|RKbFuYKcZ)<0X#iYxq_Cu>!hr&*MJ;mjz@c?hW~6@#sg2J7pDIdK;3j zYpuhuU6x>&jLZ$Kjqg4YdpTnJ`p9J`o%H1J*AwFKfj@x0Vyu+1 zceK2!{1P;Ci*1b2_LK1s$(v8BdZI~a9o27eM^=t@xqr>oXB&lV#-J{n;a`7M96)O! zPk4zmx^05bi2{`7gj1yq_9j+IiWYGw;VtVb`=Ev^;k7PUNNK>6Ns5$S^@ zBhk&H1$gx4T?$uIBB*4EgDM8UTZSKuTIssTQILLWLLY6}$6CBte!VSETw4rHtH+3B z76Y0Kxn}uQD97dLva`toD&?fgIW{_UZ7;0gQJ+pc4v}R2m<8>QTJBQ&v$j}eH)Q%W z-C{bT?24HBkh}Xs^+&5KJnZ3I3-;2~kLG<~O>JXen0nU8*;EM~}_y4FUO-+RqT z9RmA5Fu~>69Ti3nnc?fSotQ!NeZ%VY7W=G)=%C%6piUUFPuT zoW6^_PqI$}s7%_!OtJesn@y!pss0T-Cp-<0jCZ3vY*2py5g}LS$O3RwyDnEbGp?2W zr`If=brgX0JI~ox*NHke7vf(|4~d5^@}bh=TelAVLRy!908o{-gPNZ*J*U_GXALu> z_bsLSOpdm*-^YI)C6(le)FnkU!#ua}kmd1yRPv;%Ebm{*e;>VJ_NK9G*X26-4?xBj zK9g1W@o2thIwY5Bd+gmCDY;rb(@4u2#(Uyy+q|l=$5-F^3iBN!tMo9h$j>qU0D1&n z0q@UG_pUgP6dlXqcf%MMreZGG+eziwk8sZWyS*_%ztq_Zv9%5yI4ahvYWUKHQENxW z&QqE*5ve2kC!I(a@~!{~7jc=@(Q!r(oBnG<4qWn{2cJHN`r~9Kw=o%pKY&u5Yn~<^ zODodsq=bSl3(srP8nU-3s>(`1IaXdcp721xu05|dH zo#Iat%otCh8EkvM7{!Rnrgz9gO`bniv8-feE1<+1!WZn{q9(oS6(;GbY%>cT#yTik z9P2On(nQKSVVN^>jc43Iiv+3$H2why)pg31ZiC!MOH@|Qm_~7X(NAZ+K#?oO8#PC| zzhQpSki7d9fK*iEOoM+{wc4UN9Fmxb5Zj%sB0!WW0t&I%r)jb!E>MtWW&4^}M5F+V z`wlyO`ArM0G_a$R;^5Asw%PYx?|G^WwY2_D8&HhvbxCX_2PsPp^amX?imDNDroqPg zML%vvJvXw@voKI}N zDQqY5W6i0`-~jZ`D8$kfB=*PV%b@uQ&0))IX`i(f+xG;Yq=*{kG+#6GJhG~hhi}s5 zfCyzGG`8Y%Kj`*om-1%Sd0xd!gP}ZEHZ+i2)wt=(xlvK1sk3es?P6~&xn8f4eo&+X zABXttuB3L!Wbg=Vy9Gn(HB#~@ucZ;qdQ7Tw?5N=YW@B)J-j)K9GAzvc1bImFncYrt zs!S&z6q}l3@Mf_-%nV6*MJmqmWMAAOOM}9dtY)Ll>K^zCCJ<}AFrUt}I4nCIgX)m2 zEoQp~>Z_OQwb*z2m*5Kl9<|`^2&ZrJ4o6<+HZ$dZzIMsBm1Hy-K#8q#&a8Y2TV!WZ z=W|VLdhqDSNe=BuH5ywt$5V@9h6OgJ{2_GbDv}EYBvUN?7B!3Dm80~=9wq&W_>xb$ zD|!iy!A8OM+orNT)0b<7mZKn<)zoj=avfDBGPa0e4)VI9F`e*23pjhbL~<eZVNYSP390E!*H5*JUeQ!KurQqA zYKLm}(jo_9KN1;u$~{Hg#kDh)Bvk+3?e2 z%#A*R(@DJ(#w|8RG^bL-Gn&bier!U77CR*2|arI!1=RWCbKO@|?r}tixTWv3E z!>W3;>UGnkEmIC?R~qx}X@4h0o2w*`q|_4otGk?u$=g9Wz`=(%G|^WT zl2SQ|9EO8M(;I`<%gc<_(r+{H2k`O6IBRohh}E*CrO77`4UV+{*m~Q58WfkkJN4w_ zf?OtikdG=><+3dBW_9yAg$}q1KfSK*ZZ`I65*uzX#!R?5JV%S#_AL?xyoJFWF@w1r zeVP&EW?CxlZZ$<`7srn8LMhd!;WOx$szSj*C-s*>oxx7*{BL&55@Wg@RYO9mk z%q%q0gk+^bpT4b8Q%DKM9=23dOwW&$x1Q2)wUSf3WJf!~P4OsY zYC`x)@AcN!YpcS%)GJ?+j>^<*^serm)~}q!&8@+V4_4%N9tNGHTARgQrN~Al9AO(( z>F%3|&KFf%qh@48pV#EI4!iX8*4i!j1{p*{=T0-+el#F`UGa^-0}UKoEoQ?>aYgiH zs>#_N53cO+M!Ly1nn?7K^fnt#X(iQ8C~0fh8pEY`m)Kt#L5HJp%!fM8-g*th!-V|- zAfik!Nu(>>4Pz9!(7d?^E)0+c4V?C4G_DTGuC#=0&Pbxn*RtMn2rk){^hV9^n2D zAe_Mm=$J?wvNdS-7NaHZeIc)2$9OU;v*lE6)GPlV)$9JF?7u;>aTEehQIt(|+&aEt zSL;)|<7(_@(V>h!A7V$vjU)&-9fqQv^h1I<%3F33oQXkwAk92pK}4Zfe!=g410$10 zU-#XX@>PCRrD~cHnF7XqG=_y`@)raO)dhYUIm_xtvZF?L`qg=VeI&b2k=0P&(BNOc z9Z&d;b?ivu&D$n9l*vB;7UX^%m}`y!Bij?EDO|d@w4#kUQPL=`gRxp@VInkcDr7ziPHBOszaW$WDHQlO z#5wz;YnsU7Raz}jOnB$zSe!R&<(0XKVV8HQFqUYIrj5}bOYWkH%96EBH*bzv$IC*0 zW8L`-56`51T0#^TSBoFT8$@R@>i2GUiW6k7FNtVcm{Y`$LNN+6w)g+mLEE*y!Spdim|C8%*l(Bi z*?u)bIx?-&%DF*Z35Y)}>Z6jjn_7@t)RvnNXkwVWiz$@v<_(l?Iu zTNRf$cwOXzsEFtmI?aJvuB&}iLB{a)sLD^!TphnghE<63R~E6o_~lnTg+uN=!yhl9&4}Y*(XiJgiM8-0M(yJF_|Ac@$-IXLhZPw(}7@y0$&S0Y}PCA(04A0eo zGcShZ-A%upk(UPfrlJLguK#iu<#(FZV`S@?gs_0c`5=q)ASr(po$XR0q(1;DrGgmP zB|6Jl;J{W4Tf0tf(2#Kioc>HbT`4>3C^sCwAdt7e2vo(*ytJr0WIfqkd7*D}D8Z>H zd(0JbZpt6YZlDP@1k)#YP;l?VL9#!Dztnk_Vy6_#akVswz>~zPD`dYkmC}=Id_~r1 z9Pu$ypNG=>@mz^1m{NksPq%T4;@}Tpu7E8I83xUlT<7v$p%!Wq=!B!Te(A$M6IARx z#|<-paRMA-A!T-_S4?%{`ws7tY0?5M^2w2wcg$$A>1atbCsG+akBJnsDzSgJ=vU19 zSc*unTxUR}#LqTxiv<-0Z_8lbl3i;o zStJVO+)pz^bV}`y6OI*ay_&5mE+^5;JBmz$Gqfx5j`Ka&tOgkyd>}min@66jGRvvv z=|)+aY}|k|PNomGF#P=#dMj88+?k@WeU`iF^dJsZN{ddC^47ah0v`A`t*m2{iPd2a z-=1<45@~`Rh#x2u&?MHmK0NUhDTbS{!eOnBAls3*aUlC|zQ_~@8e`gOzwJSgCO zxO#Pil1s34U4FtG<|Q*%PPErLXq3-sw?JiQTrx-U+D?G}qZ$kb@)|dB6aJCXLBD0^ z%><9qr^gPqk?4lV!VSruxUf4;8rm^F?DZ(Vt)&oNct^t(U5TF?nqBNMm^?fqR(NRJ z@>vU?mgH`g9)BC5#=CUDM@P#!8uuP#WLqJDwYqn(a>M)u*4326mnpQG@HA8wR^QUz0&eyr8 zpIz3Cr)=I(56XgGD@~r#1MX7aZ_HLXHZ{xjNCzPK^$i+lj-{w`O7&3=wP-i;>`~ky zEXuEZtPGB;%dTB4NL>iB6Q%}c&v5AxmP2wg>rNV#Bg zs85hT+_Kl7zLJ+xZy}EP$n6MuT9qKUWtiSbMB>aTTWuY5BHC8d5)hnAR1Zq2y`t`} zaw+g+eG|il|5gs9lF@)}p@vx0Y2HPMM>)OZV@>g{l-eEF9<#`Oc8y`r;KT_MW$d4LqZpE!ezRqJ;HhQgPhT= zDh%zjR0hvuiE-U{FtCeBHSd)?Bjgz!f~aI3yF%lNJyvKVoBp@6j5v;@CFX+My`nX| z%U4cB4p`F&yTsOpam{pOVMLf~kDEEOCc4|z(4b4GlaN|!`{*%--af|9d?BG6#t~B+ zkKXHP5^MrJq}gSbSKRqE{K-^H2zikEwW=}R$0Hbnb0R4cARsdi$~*RM(!<0RH^sf9 zNYuMisPHmNw6N8o9vDkO2>?NwN8UUoUGu9|u7%yRbu`e3l6gY1A^%JSJ{G%w=ox5g zUMpS+JC~Nepo! zGCKXJpjb;bnJ$!dZrG}2PriIvW*@M1)Y@=IKx5KYbijf5+v7J;z53Gj@+$N$!T?6zUi;rbOPivl@we&@PsnJSp zZwvMfh<>d60l0R&!drGF;Ap(Z-D)H?VHKxj3gBS`3YuH>>vXNBjEy03$1|%(XEv`` zXol?Uzl%Y1jDYQb6NaXW>(Sb>Ji_FukD%TB%_5N znVt32v{U=EMEZ8~7kOy#JNx19L7I!Xj#c>wXW)^9N{b>myiH*E zLcDtfJ)?HfgF`)GZOAM5adkr*qM_j$7wozYWy}-P{jTi7;Xp%fTpaXnGZTlX6Ii4) z)t)PAsh(EOmBPer2j#FEihT2E{^SA5^Q1}PmJdY5C;R5QhVO#8{LMSIeA?I#)Ml8(AqAD3@koW3oBxztZ!wq%y6&1B5XJ0|E-eJ+-N=grj+3tu4rOYC|9)DtT!G8;04Zc%m{NLt7$P*Jb3WvN0WIT9|UrN@J>r zNf~VnGlgaS=~Pt=ErC9@yRZ~g>pCM?VK%)yxwt2ax+|a%Eb+D9FY1+ezWvbJ{H)OF z&fT?Dg9X>W{B262#(jzIUp$;w3c6S8RHE(Tw;D-R#~;)6}A<+1vKf zD#dx-yNl%Rjt|aaUw~1k8rup|pngh+GqtQBu<5%XPM6gSmWclKcz(H8&U@IFZ_u_+ zjn+i>hASDXTcR5Ge4GmjgHxkVSRaQK172CCSt~g)Mi7ISrhXOkR25Xn+3K4Mho1Qc z(BN%mD>ca#8q!uUX7KuIX$`SPJ?4X@&G557lrs|zB8{-Q9|Vo7)3=*>1|_=8uMb3a+gn~G&=J1L z1x5?RJ{=R!ZwZ{MjO$4Fg*66ms=CE}*E6>HZ%d5_qa*6K&yETgJ2svkvmCJBxxprj_p)UNz0bt@1lK3z z4-V1dSe5Lg^vb>9uzwx?#YR#GE0_plFghG*I4n0Kk`M9J6N_&W8*~QGt%?q zYQ!m+vBizT!aRhMD)7aQNBt&tf4@7=pDqO%PDn)ul1US;^IaYE3&L5*g)HwCtG1!O zcM1?}&Qps`_E2iaK@!tv95FS_%O|j|XMciQ_F}NAknGm?9Ggmhx2N(XDJVi`! z%c@R>M*$F*N-Bo+`#WLstIwLm<%T9xrWs#);R=F67FiYNgQu-4Uwx~#R2g&9^mWi#qQ3AbbR$T!B$@+G>96sF+A!tCpU!u&vc z-Ck$YdLivs3}b>8t4AVH2l~d1+CBIJc4Q>ZhxE$wN@))7I&FwYnxcW)6j05i4p(_A zw{#TZg9+NavU!DL^g!pW`STt#2DXW0uB6~XTJO-P)G!n9AfzuNB()$WirCNs-#fHn z=i{@sf0l2^E^&lanJcs1Z7^T_L37u@I1!@(btGB1tR#8Gl3$xKlQ>3k8|cM{F2{V2 zUM4FmHE>E%U+Xim+07p;HpCA#HkGfnP){>I)awL*ElE|%#qZ(;bw4pivN8C8y*Q=(;qB6*ls3Og zdKsLk#q+>vdn?u`p*{+vuYF!0auWnQb&B0lipk@{*kL~;bwa*Gbh%i(IQy~gZJfd( zka)lPZu`q3iaT$KHm0e#V0|uS$b~rd4ggJF4*q(CpKLm1wM$VuC@+EA7#8dr>8e(x zm1_2RaDWghq6L(a9b|^q`$_~T2jxMh^X6pg1ZRKtI3F)VzD}ml=2iT_#^gua;Z|PC z1lS1qEyWW?bq2~r`D6p{MaRzWF zd0`Up`1oVHvNVd0xD4bq4~mVZF=AiTkn3{r#v0F7{iZPFb_&1yVAh`wz9CSK|Kbl^ zc3fB5{{CY~seb23)PT`ai&j|g#?>Q^WBe25N}r)B0c{)-HjM$XCqj{nFU8uMux}JR zhtdcwma>Bd1`WaLB%>}R)!0J`P9`?fXmXQs>#4qOrcHb8{^g<2x7IFD)u?2^4iD$s zvPC+i#uhLA{&SP>&3*3&v#GVsJL}F@(*5Q>w_+w4-#NWog)ad0(7YOcDv{aw1E{vK z?)3m zbDZf@n{Qp{bWs~458l8W$+E_M3jXX)KfoyVWg|ke4(Y zd#zD1n&NDZ_clJe8rs$)IQ{cZuwc|3O`76dEqs=KGV|>OxF1EU!aMezrQj(gE`}q% z<$7f?0*X7E+iz^LDu&H|`G&mYwXGyyVlq5*&*MoUxLU?jvc zs<1twe!nc1NdVu+-6+c?>32o}`A?UD@*62m7D4;gHWV7F$z6|)19;Ssf3s4vd%Z7O zvTs#Vk%8H1ya6$K-XdCCb7g61i177_x8WW6_fh(${AnrrMeN%Cj3H^S zRUhdRf92__qm)UefoLlVRdm%;ZSwhR~|?WV}Yn)?e$v2@;r$!@>%n zjuQJCD#{=4*fg+nBQQGNjdrZ3+4q|7?sJ6Nkm9M~5rg>@9r?+bv-HSFld32P&29Xn zQ%i=R#fU5iC+%Rp3d3AI>6yEa!$VC}>KY^-3O0ogVl!qiS_sC+gQS2^hNutlj|^|P z!j(Qsn=#*9>6XN=jAvB_K9?zx>mS}5jl=LdqnyO?aJu`RX>0E zoGwBkf&C++{Q;Q2CafZpQ*aiynXGf1x0rKbV~0@QYrQSgtDRD|0yb4mOG==l`SJUv zWwY8MIPX&1~p2RJxAXzDSm3;Bc0QiNcxd(+(# z^cGPfxpIn0@9Rx11X(ghAtp5Y$DtibtFc6HY$Pp%wUslETvD(U@W74^4InJrS~R10 zY>>ch=~CG2Y2N9rX&SITV?KW2-GB}%AvY#djfripV2a*2+1t!Ku~z*nM#QDcHe07j z{K%xjk3-E0%sGy)z}lqGB(X^B>(pnEKDC`lbw<>s(VCjPscAag!M0m_h9X1#yAYlCBc3OlJMMp$|ozA3!jB2#2H7WZ<`3B5!`4p?FH+aK1I(CpslQ4@jWENA5rpwC{_gr0_xTAFY(eWS|3 zBVcX7^~pSbmB&%pToZ{#CQm#M z#*S@lD=S>$JVWBx^y{fDpowK6cz-kQx45pVRNR%>%~rQ7vN?@=U;fjN%PAR+lwnWa zM(#oM_O6IV$2{tD9M;9aC60Z1iq_^tBm|S^IbPKI43H2`3V=PRuo}8moxmPz$YGCR zWnf0;!0H7wwYcn9wAZGzbVlDKYBsTEJoB96)~?{CW3TX^iR`Rx@Aoh>Z1ZAQ2V)+} zPp>u0iG!aljpG)Q$vsOlK_qp3>o;dqHi3B#bCN@+1%oF?4c+NdKR~1Y<=6ajPwwoQLmGbHu zMY|bSbdYY@{A z&Nl(ZY09KyqhO%e!1p$n1yq6%oQ!Z!(w>Z3q8V;k&PnJ+J6M)qu$c)l5HS35GAilH z&cY#oF5c^8?0!>~9k6P;jI$NRyE8eBw-bOt#TT$KBW<522h%*%Fu5`_6Vw(bA1``A za{igDeV9*f(4>zl81o>>LVJ&TnM0V;Gq#2$?Uc6CnOFCSPd=mTR^eo3$YI<;XXSe? zdQ{%aKsyf(-X@=9<&y$I^8>-hx4kB>xve=_4xd*?XOcKV?gSijpL}}I;y-9br_E;X ziPgaW0APWG>sdN|LNZLVEK*D4h=5cqTb}s!$NA!=MXTy*Z4J$0^<;~7oLS{V_gPLj z#zsFA)~X7`$(;4&_0y3h$W>F2GT@QNLFg!HAz!)Hcy3oV7O&;2<~95L;=KCOwAtNF zUMT4xSsk636>@Wh7!`w+^)*qj`#7Pxw~|QPId=h+h75NaeMDmGHU2PzeLG!S{Jm$J1sgtrZbpvr4?X+0H-SZbcN2tNAVWFI^ zbN3QlB#vFzu2qqU`qqRJTim~pT3ombfIjy%NKNQiitarxNJ-|1=1ChrFFcxSU5URg zQwp~>%-D^=4UNxn)0K8mPe46Es|@oD;&YS*PH-8lvo65SPV9E??r;c zRx-d62=ykNz}wMudFPrZbtw_~w>lCJ;ohaPlui9|+9{ISCc1fuJD$SXS_Cg zqzJLi5qory?k$`WSDq`65mM3SiM=>o*_v=@e`HDJTUk;5(CDypu#k4+`qw>5-hB-t zPQ-1YUaFumTf~sC!6~?w>)x?gsLJ-Z{gjL?dsM#Art)naHqR2Vd;-88-qq+(f}C#5 zDoRjxMHTT~#mYn_Rr8}#p}P_9?_IEpO2$r2jF-fhOpJ=l7$AZ`=9{>#%$e^VVyz)8 zae&$FO2q0$vjLovaB)D$Tkr1u1u+&;9z<0qZ~McFL%D7Urnew{swq%ps<|X7J@MX& zK@_E8NiX$Rvo_I962<&W7z#)ojb^Dv_b=KutYkl#oxd@1d)BrisPD7O<(#Mjm4rF#<#+Zxt+O@`?C~)~CcjO9Osg$JRc8_s6jegIF)xq-i0(axk{{ULV z*ylAuODU5jNX{3oXpx-G+rr__ooE9w{o0Sa(!9#Q6Fn+P@*}N#q_(tSCCbJEkj}sW zp8VEIPWDEQIyTII1+=F|f-vgvmS#xQa-(tnEc5va(yP5fD63Pse_5Y07~>)`2QMNM zx4&cQR~^CZayOTDvZS6x%8j}w=li-DoPA7s!MmcZ5iANB-WAl ziI#?3@|h&^+aY>x$6D1y&B&SyX(EsXkcB(;W%#Mf%WVX*ZBhZ`;ePSt^Zcry zXFbWTS;d0y!7U#p3EFE*g^jyn@aeHK43Y*qepQ^)ld(~DMp(F({&3GSGVUV){onAW zqN5!Jd$Z6qSuEZ%ZLSfKM(m~l80VV#{7hNs)0T&l_~ObNJ!HHQO0i5yVpAW!7@3G5fCm5`^z{p&(>gFM^8OSd-sOu3 zqI;BCztx1^-1YRUP-0CT7}O+J-2)Yc2g*lku0&rslP0{%CEBg#GjiGE6en$na%_SL ztuFG{N<7T<&jy{-+>%Wy?i)$wwz;!67P2rq0px8t;Qayo>z=Gz)jKm-J8pG0+B{nB zviDX}%@>vvU|BjJOrDqmxg|MMg_^n0qNgjKf#LayTko`kD<#t+2VwG(6r6jLoSO5i z-KfXN>4a6=G5#WWf=fFevD*MABoa?JTpn}lT43!~-kqnZJ=V;xZ%8(3eDI|!nW`blQAQjOPje?Gniu}mnQ&G+zC$Ix{s{x_@m_SJdD)%x+l@Q<32`mXvEh4rn^0X{15C3y3d5n!IXL9??L=a! z)oGPE%B3XGo(|Mwv(r`yT*T&2mO0MSGueH!nswmiJE39bqYE>4U!EWA+0T~q0;CmV zo;&BKYE<_{RQOrL_+9~hE#2(ba`}itf=EzQaoe{_?5WWll(oAnOB&u?CFr;fP!idYYjVn~2ga@=URuVT&FqHE_L-Z^ZXv=H}YoAPlz8sw>lP4gQ@wUv!sTTRmS+Xx=V zQjR$z8)Py+1pYWVu6k3e3AEyAH3_X5$Ll(BS=^Z8ayE{Pr=M|Mw5oGP=QQc4voyrV zaT|zUq;wUn$*~_GWA&yF;B{(q|fV)y>3_h@k>7%Q*Rd z=07avxoWG~Bccy`m90EYH~MAGoLk{(-P>>Ql5j9H>?=7?O{z9; zT(o0pJ(K~D!lKRY4cx3R8FRdWlic%F$^&kUa>k*4^7GF*tA_f5D>~pP>MKhHhJ-E7 z(o`O!wH6C9a>F}F2OR+v2%afnworKtdVqRT5Iy2b+Z!x99Q39Z8y&<^F~Q%S-6(pD zSgtq7%jGiz#s_LbEL>Z~l=)RN&?x4F*!CwITY;Q`jzu5>M&Fkkx^u+{7|AfKfm|0S zoN=0~h$R8#VIv%E&R33UCKe;W7LC0|RD7+}iY^9hQnOqZlQ_q2l|f>{x|Plt9>+Z> zxU4$VD}FYNHiN@Z zEyG7N(K9N;B&Be`{4rV6kH4|CNiI#!U&DH#j^5(cm`0vS3eO?I zKZkyujdaR7+~tIEU4K~25@TH6d7(ywr$gHJ$dGfq&t=6)ZvmQf<@dJ za9NHqOqV3gn~2Jo<%Z?tVm@rQDnaA$=}d-QOV2TYe4m%*a^#X~E{3FM-NC_l*p(-} zG|?q6zc)}ossUN>a_KVxVBy923X2QAnE6 z?ycolh_Ywp&O1_QX)~l6)yqS&_$2X+VI+(meeqekYoV>*QfnPiEbKgiYk=E>g>tId zIUkiosmZ(CYRMSyC=-_0LiH*SC-JJ~G1->lo+;)gXb9eb2HnDupZ>K>v|1H=l^#92 z#A&qWILBI)?m-NUi`GA>&00)DP_%%QC_Mo*>^ly(jkdo9PfQc*Rk2afxY}gTYS=M_ zRNa#6$LpG{bjw-Yj$_)&w2J(JW$of@SWQpd-ZjMejQd@&ANfdrtx${IrCZ`%Nhh%8URVx?^*F1{O-tbG{(8kg3qmIsKr%*xv09L^PDt@M%*0)4L zNhfnxN!90u)9fp6ZzNeiGm<-)C(Er$4$Ri0ZR1g@O7-BHu%eJZIXd!VU1 zs~8q&&m4}t?FS?KqaABzWQ+P7q8VpEft0TdPeVzol?F|7BD>>mbJ$Rj*;7%Cmf1_j z*yxp+Q;knT+zGW_NKY;iy_Q)G^ zDYiSBa2L9t&yX0}Hu*5WcLUco=cc2iPL-~v#;0p%Hk&M^x6kEdV~%+1_*YzS%c$jR z=y4Z!vX~m(i3&&r9(r`COIw*aJ&KSL8*23@y;C5zh0}m?GoHO_UZ6Rf%w$K__RskB@XeADD+Z>u23z7(CSolISkF5y+@3TqdjmeUEp|M<&G?f?zDxnjAY-)& z0e0F6gek(Th`!AfDU?&w6%49Vjul^ERl-IP02j!Vw&hS+UPGFF;~Q_+5jIt&ly9Bm=y} z3|SioJq0pB3rjyO#A7)@ClmXI8RjJ%9f5aw7)uEc_tnr*`nWs@YF zU{xeTV_pc=vO}TmimL_LGEWIX8mn>MizlHvj4aX;io^yT>KqYQ5COjuNygrTy)=U3 zVUaDS+^4AgsfF1%xLI-XWwX|rvIf|oyq_}LoGOwDG8PYe+@f@h@baoZomr{pkn#1pDIQUdhtbsO%jQs$-)&soglE`9Fe}{!TES1m<#rS9(Nz!4_c%qD7k3? zJFrNlC3FNtF*rFb+ZC9`n|$YU74#mpRv62ChG5G09Wp6(6CRKT#?)msw>l?Le7`PG zT%Jw^Or%<|ZJJ32RZay;LnCnDb;b@l(R3hKncoc{BOOP2tAQ)*i}!51m)@0tBAs`U zyY!|_h^|Wz8XS|U@ zWa2X}`CKdm48^VkjZl<=RcVx&cFBVsk1X?%WC~~)jTOFkFXTOFOd1vv0VgLo>xvo@B}o<6kPp2U7}kk*Zq9l2 zqm)Auc4;tp&lv)hfSNp_FbqjO*>j2#U__!vL%qWK`id=K+(08z0dPpc!5vLDt``zX z2q9H?Juy^}Twt9^DnKXH&;u3_@#pTIp{N8_joW|B#16pIC6Q77Riu zHy0q|?&77eLQiwGKnOPzjEaUk5?x9SuH{@4?VdYQ8)R-$NYQXN9y)Y1Yz$;FBx=E* za}Gxo2#Kc2IV@xi ztOk8WFeO#_F&V)g^k@PVC4mp~W1R3sC#nE)p>kM}-v*U};vi6WafaflA(E&^J9d(N zdQxZsBaSEBqZsv~!X9n`$lKek9H?ll&x6-F^`~KtG6K6m$?44@42()p@P1z6m=DB} zCdmQ$s1?MCys~#>(qKr__Q>b3ph6_rjDyK1wJ;D|;r6p*=}ZJ^a&igHD+PrY5~}^? z$GsM?Sad99ErKw5lj}f;{N-8l>ykLd08Y2K=Vsf}C)Cu1$z2`9eKVf8=qR{c ztvNBqTWcz?;FFy5{{Ysba1_b=knfPK%IB%Y3q;4p7pivWF(Gd zg(Hiuar1FN#wtjxs^o^<)OV^8o1C68K}7MaNOwCDJ99`yxOu~NJo8Kkw193KKME`W zo>!JOuOq%_U{al#q$r0V9t{I6#Un(!0X+2w)}4Tc;^eawP&enVYCt|5zYUVdjwlfJ zh(-W0pKf|n0<^|ZOMTQGDOdq+7jPhC;~bu}SP7(%6aiEb{nJ3f8*j(}AoGLToxprD z70&WLqNRX&`F?rD01`|J@-dEvgat1=h9L9JP|3F=Fc<)ukS2&ZQyMl~sRDvnra8mL zT!1s4wCpVi??wo9=O26Cn9*E0I8ZWAN<$<_oE$Lqt1)a?zp+KreFuCA+ z)P}%rNJIpN9oTlDRx8VL(@D9(5hp>81v`v$kF-a*LXHB0cmj|a?Uaxx=Z>{S61PKd zOwa|9X%=@OJF~~VAzYp)Vc1UYX^4!%vn=hPq z#RDG$4l(JRnm|eBeBEdfBr>SvbH@UJu-0_|$f`H=$o_P~#cz{)Zv*D^Gywz>68lKo zoQ(QX9gPUSSIhKK*WQz0OlKZe^(3+CPwP@3X%I)W5Jpb}wE#wEQ3ljev~>5T78|=N ztgLd!arEY$^c9BO7A_8c=-_(P?leO$5N9~YL+e67F^Q5j3U(ptJ!-9C*kgtPcM?Zj zQD6eazWKq&7^h$t=VBvFpXW&cNEDzPh6lAf0JvW#J5MLA05pB&>-cx3GI>>ZY*B&_ z8T6?{m6w)bRE9bo*usO;C+SImNc&TqhWf#N(cn)r8wDOh5yUX)r3tNM5-5QW+Q|p7g*{NIgEZ3~>dQ zK9m5v%3~aldTEl;B1~W9W5qj)))cnR2+5!aB!m)Aev|-t#JC@=A&I~D;1kDskSA+` z4itr6w7`mMlGt_N_ofyT&SnH~IiLyJQc$Wf+y_HI5<=^>e8ZMJ&C493aWb=|gEDise!+F_BcAz=?<`0}<~`2*53kqa^ec0V1*8=)iND zhP47Rp@{Y+W=ETAHqZeug%<= zKn$gr7TOmaf!dIZNi=b}F~$l3#~jkIAqxWk04kG=AEg6hakybHFmg%ieW~mW$LAEr zO#>Y5{Amf2G5#Fq+|m&Yj&Zb#p^@a~fF%(*T!E3-G_C@NP&S%i3>feiAoMt-A*h6~ z1QAZ)PRa`?IU}+4rk%+jEDFtt!vl_<^*4P$V%o}}yY}o4YA#jCSr7#_7lBHe@3XSdRT4eD|m zB=)8>I#~*k0KmZQNMNtOCvg}Q005h_l2;rG1#!Vy<~eh;_ssxUSOD5dEDvU&R~`Bial-9sY3YL#K!9BpChx~7T-B&{%>kYvUMCJ>K6;??N4Eid2n`-^&nAT zum)1=k<+D8*a`f^LO9NQ(QBX^GqXl;GmZ@_bqfp$5l{f}WFiLhH?xq8c=a^Ghzoh|kX}@!g`E9G7J08@mE0ONro3=Ld zouq#{k&!w&KblDg8CqZ}I0)Gzp&)hbK!^l{xFleYPSnO(B^d&MGBfj2wt~45GorTA zIvf$-ik5(A51*ZJk%5W^I1-T^ry%4GDTr9ne5{aqm0wCX0yvzz?PG;4)Y1ZIeAx>G zao>*g0Oo`g3P8y{M>(daa2X35dA?-MI~u2`!r*TO!(g7Pnm|6D{qyjEhL6f=jz#ZrjW?0%X3&t_}QUVbqVm490=m&E{ z83s<)BRf>p2xLm0PSeTjpGu4BCiM}Yem5y9IHO<`vZ%_g-1~Q?3gp3+SxIc1Wa6Y& zxhRaeBrradu1&~UTsb5TDaOTIk8wS+GuE$^W8J_QAOlmVtT>if!xk7nr6CbJ4E(&G zT4{RSP2coG5wL6$17LH{TA)Jk&I`F!1F+605cg_zy$U6#Rx+l7@9DC>yQ^1 z?t9Z)j_i^J5d4yW0UNmQNsDtXL%iTF4roflEGn+gEOKgZ)Soa!9T*Ne(Ax$%V@wmD z!kx!<1cZ?M$E7XMuXH>(1N+8=wGz~edayqy*A(?b7z9kkvG`E#JweAg+Hhz>xP>X%HHAtdgHCawTC#OnHbqf-x{m>cBLwbpXw&}1QNyk3a zl$fqiAQ##X=UNs>%_zVbz~FH}6M}wKkuq_SPU18)`9Y3XE1468Q;&aI0F@;s@&l<^p#wB({EK)KC;k#%Z zK8G|78TYm^TcbQX*_J@3b7OCObmzYy_Mm2&hTOFF=&K}_>ZFKQ4UjOyJ@Uujr2s2f zK(ey#kiX2uc*qI{M^3rOpjI6on{Ja8+anNDa=FIP*w1cxJt=_On;S%FZXKOex%-2R z5xXM*@zbt5(t3d$mN7zJLS=FkGRB{Ikl-)#Ka~Mt(`k|JWiKXMC0yfb;kK@Qd8PzU zV7^;P0G(uA!BvMYLnsFr;BtQ|ObM=Z=)_37UP7|%J1`iMNaW+7+J8C#&eUw;hB)3W z@&e2YF~&&t$EG>W0A~`TIabGN5D6PP9l)HaroehJ-LNiL^fas@IUE-xpup`+Vyw9x z!6Ouy0g;>J-NEfnz%f%j&mD0{2NH3D0A8Yi9wuQKKmZJLpYWz3rdQ>8E1vX_Sb0An zSqL7OpkpMEE<%mR*EG-*WSIMBwt9*{C(OKnN$5>9#5@6j0|UV%fkl8~znI%I&}0+p zD6#@j>gqR=9Ag8vedx7-l)Q>bVYr1H)QDrpu|X)Hbga`049Gm31KOUbOgH54N3}E? z5g9Nrcoj`{7>y$5)7Fwgu^5m5PH<|F> zgj29`PhJIAEr7Dh91oVF{e%Z@(4jiQs7m6s!PeQIV6E#Je$*t#Vynq2vbs4!dEnx$F`_lPmN#cv7&-nd z@@bC4p8DZ}D>=dp0iHVN^X7qx9n^~3gEIP%XbTK^9F-xMs5}CA6alAa8zMBUXHPVR?xDE)8U`ZDaLyTngMdNp zMeGCQx|FIUu6(_q0qKeu3-+^|?rff?Cph$^B0sb--gh`B0O0nfF`=Y(Ro>0V1d@26 z0O^A51RVQOa2fa80Z$_sqQEL65C%#1qhPS|8C8oSg(I)6E8GZ*x0FjS%1FVc7aHsW zl2-(Q*R?c&Y7vf33w7pz9de4hL}Z>hG+YRj{NOik)G!_Sbrh@wu5rqoudN6Mg&WHp zfJX*^7bkv4w_fxB=VrljOJ|{`AW2!v7Gg NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## JS/WASM module + +This crate is composed of + +- the npm package `ng-sdk-js` which is the SDK +- an example of web app using the vite bundler `example-webapp-vite` +- an example of React web app `app-react` +- an example of node-js app `app-node` +- `index.html` an example of vanilla JS usage of the SDK + +## Support + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +## For developers + +Read our [getting started guide](https://docs.nextgraph.org/en/getting-started/). + +``` +// for nodejs +npm i nextgraph +// or for browser +npm i nextgraphweb +``` + +## For contributors + +First of all, run: + +``` +cargo install cargo-run-script +``` + +We recommend contributors to use the production build, as the creation and opening of wallets is very slow in the dev build. +Only use the dev build when debugging the sdk. see the next chapter for the production build. +Please note that the dev and prod builds share the same output folder, they thus override each other. +When building the app, be sure to have the production build of the SDK in the output folder. + +``` +// for the app sdk (browser) +cargo run-script appdev + +// for the nodejs sdk +cargo run-script nodedev +``` + +For testing in vanilla JS + +``` +cargo run-script webdev +python3 -m http.server +// open http://localhost:8000 + +``` + +Or automated testing with headless chrome: + +``` +wasm-pack test --chrome --headless +``` + +## Production build + +``` +cargo run-script app +tar --exclude .DS_Store -zcvf pkg.tar.gz pkg +cargo run-script node +cargo run-script web +``` + +## Publishing to npm + +``` +cargo run-script node +cd pkg-node +npm login --auth-type legacy +npm publish --auth-type legacy +``` + +### Example Plain JS web app (with Vite) + +see [README here](example-webapp-vite/README.md) + +### Example React web app + +``` +cd ../app-react +npm run dev +``` + +This URL will open automatically in browser : [http://localhost:8080](http://localhost:8080) + +### Example NodeJS app + +``` +cd ../app-node +npm run start +``` + +### Contributions license + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in the work by you shall be dual licensed as below, without any +additional terms or conditions.s + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. diff --git a/sdk/ng-sdk-js/LICENSE-APACHE2 b/sdk/ng-sdk-js/LICENSE-APACHE2 new file mode 100644 index 0000000..e362c4c --- /dev/null +++ b/sdk/ng-sdk-js/LICENSE-APACHE2 @@ -0,0 +1,16 @@ +Apache 2.0 License + +Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/sdk/ng-sdk-js/LICENSE-MIT b/sdk/ng-sdk-js/LICENSE-MIT new file mode 100644 index 0000000..92fc43f --- /dev/null +++ b/sdk/ng-sdk-js/LICENSE-MIT @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sdk/ng-sdk-js/README.md b/sdk/ng-sdk-js/README.md new file mode 100644 index 0000000..a92726d --- /dev/null +++ b/sdk/ng-sdk-js/README.md @@ -0,0 +1,160 @@ +# ng-sdk-js + +[![Apache 2.0 Licensed][license-image]][license-link] +[![MIT Licensed][license-image2]][license-link2] +[![project chat](https://img.shields.io/badge/zulip-join_chat-brightgreen.svg)](https://forum.nextgraph.org) + +JavaScript/WASM package containing the SDK of NextGraph + +## NextGraph + +> NextGraph brings about the convergence of P2P and Semantic Web technologies, towards a decentralized, secure and privacy-preserving cloud, based on CRDTs. +> +> This open source ecosystem provides solutions for end-users (a platform) and software developers (a framework), wishing to use or create **decentralized** apps featuring: **live collaboration** on rich-text documents, peer to peer communication with **end-to-end encryption**, offline-first, **local-first**, portable and interoperable data, total ownership of data and software, security and privacy. Centered on repositories containing **semantic data** (RDF), **rich text**, and structured data formats like **JSON**, synced between peers belonging to permissioned groups of users, it offers strong eventual consistency, thanks to the use of **CRDTs**. Documents can be linked together, signed, shared securely, queried using the **SPARQL** language and organized into sites and containers. +> +> More info here [https://nextgraph.org](https://nextgraph.org) + +## Support + +Documentation can be found here [https://docs.nextgraph.org](https://docs.nextgraph.org) + +And our community forum where you can ask questions is here [https://forum.nextgraph.org](https://forum.nextgraph.org) + +## For developers + +Read our [getting started guide](https://docs.nextgraph.org/en/getting-started/). + +``` +npm i ng-sdk-js +``` + +The API is divided in 4 parts: + +- the wallet API that lets user open and change their wallet and use its credentials +- the LocalVerifier API to open the documents locally +- the RemoteVerifier API that is connecting to the ngd server and runs the verifier on the server. +- a special mode of operation for ngd called `Headless` where all the users of that server have given full control of their data, to the server. + +All of those API share a common `Session API` (all the functions that have a session_id as first argument) + +The wallet API is not documented as it will be deprecated as soon as we will have an Authorization/Capability Delegation mechanism between the NextGraph apps and the Wallet. +Still, this API will always be available as it is used internally by the NextGraph app, and could be used also by the owner of a wallet, to access its data with nodeJS or Rust. + +## Headless server (runs the verifiers of the users on the server) + +NextGraph daemon (ngd) is normally used only as a Broker of encrypted messages, but it can also be configured to run the verifiers of some or all of the users' data. +The verifier is the service that opens the encrypted data and "materialize" it. In local-first/CRDT terminology, this means that the many commits that form the DAG of operations, are reduced in order to obtain the current state of a document, that can then be read or edited locally by the user. Usually, the verifier runs locally in the native NextGraph app, and the materialized state is persisted locally (with encryption at rest). The web version of the app (available at https://nextgraph.app) is not persisting the materialized state yet, because the "UserStorage for Web" feature is not ready yet. Programmers can also run a local verifier with the wallet API in Rust or nodeJS (not documented), or use the CLI to create a local materialized state. + +It is also possible to run a remote verifier on ngd, and the user has to give their credentials to the server (partially or fully) so the server can decrypt the data and process it. Obviously this breaks the end-to-end-encryption. But depending on the use-cases, it can be useful to have the verifier run on some server. + +## APIs + +The nodeJS API is limited for now, to the following functions. + +All the functions are async. you must use them with `await` (or `.then()`). + +They all can throw errors. You must enclose them in `try {} catch(e) {}` + +See the example [here](https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/ng-sdk-js/app-node). + +## Wallet API + +open and modify the wallet. +not documented yet. We don't really want developers to use it, as the opening of a wallet is a sensitive operation, that shouldn't be necessary for developers to create apps and ask permission to access the data of users. +We will provide an adhoc API for Permission/Capability delegation so the wallet API will be deprecated. + +## LocalVerifier API + +can manipulate partial access to the user's data. coming soon + +## RemoteVerifier API + +entrust the credentials of user to an ngd server. coming soon + +## Headless API + +- `ng.init_headless(config)` must be called before any other call. +- `ng.admin_create_user(config)` creates a new user on the server, and populates their 3P stores. returns the user_id +- `ng.session_headless_start(user_id)` starts a new session for the user. returns the session info, including the session_id +- `ng.sparql_query(session_id, "[SPARQL query]", base, nuri)` returns or: + - for SELECT queries: a JSON Sparql Query Result as a Javascript object. [SPARQL Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) + - for CONSTRUCT queries: a list of quads in the format [RDF-JS data model](http://rdf.js.org/data-model-spec/) that can be used as ingress to RDFjs lib. + - for ASK queries: a boolean +- `ng.sparql_update(session_id, "[SPARQL update]")` returns nothing, but can throw an error. +- `ng.file_put_to_private_store(session_id,"[filename]","[mimetype]")` returns the Nuri (NextGraph URI) of the file, as a string. +- `ng.file_get_from_private_store(session_id, "[nuri]", callback)` returns a cancel function. The `callback(file)` function will be called as follow + - once at first with some metadata information in `file.V0.FileMeta` + - one or more times with all the blobs of data, in `file.V0.FileBinary` + - finally, one more time with `file.V0 == 'EndOfStream'`. See the example on how to reconstruct a buffer out of this. +- `ng.session_headless_stop(session_id, force_close)` stops the session, but doesn't close the remote verifier, except if force_close is true. if false, the verifier is detached from the session and continues to run on the server. A new session can then be reattached to it, by calling session_headless_start with the same user_id. + +Here is the format of the config object to be supplied in the calls to `init_headless` and `admin_create_user`: + +```js +config = { + server_peer_id: "[your server ID]", + admin_user_key: "[your admin key]", + client_peer_key: "[the client key]", + server_addr: "[IP and PORT of the server]", // this one is optional. it will default to localhost:1440. Format is: A.A.A.A:P for IPv4 or [AAAA:::]:P for IpV6 +}; +``` + +Alternatively, you can use the environnement variables: + +``` +NG_HEADLESS_SERVER_PEER_ID +NG_HEADLESS_ADMIN_USER_KEY +NG_HEADLESS_CLIENT_PEER_KEY +NG_HEADLESS_SERVER_ADDR +``` + +If you supply both, the values passed in the API function call takes precedence over the env vars. + +In order to generate those keys, you will have first to run the `ngd` server, by following those instructions. + +## Install and configure ngd + +The binaries can be obtained from the [release page](https://git.nextgraph.org/NextGraph/nextgraph-rs/releases). + +You can also, [compile](https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/DEV.md#first-run) them from source. + +After creating your wallet by following the above instructions, the NG_HEADLESS_ADMIN_USER_KEY is your user private key that you can find in the app, under User Panel / Account. + +`SERVER_DOMAIN` can be anything you want. If you run a web server with some content at `server.com`, then the NextGraph web app could be served at the subdomain `app.server.com` or `ng.server.com`. +This is what you should enter in `SERVER_DOMAIN`. You also have to setup your reverse proxy (haproxy, nginx, etc...) to forward incoming TLS connections to ngd. ngd listens for TCP connections on localhost port 1440 as configured above. The header `X-Forwarded-For` must be set by your reverse proxy. ngd does not handle TLS. Your reverse proxy has to handle the TLS terminated connections, and forward a TCP connection to ngd. +You can use ngd in your internal network (Docker, etc...) without exposing it to the internet. In this case, remove the `-d ` option. But the goal of ngd is to be a broker that connects to other brokers on the internet, so it should have a public interface configured at some point. + +The 2 API functions that need a config, also need a `NG_HEADLESS_CLIENT_PEER_KEY` that we haven't created yet. + +You should create it with another call to: + +```bash +ngcli gen-key +# the private key is what goes to NG_HEADLESS_CLIENT_PEER_KEY . it identifies the client (the process that is using this library. a nodeJS process) +# the public key will go to the ngd config for authorization (but this is not implemented yet. just keep it somewhere for now) +``` + +That's it. The broker is configured. You can create an entry in systemd/init.d for your system to start the daemon at every boot. Don't forget to change the working directory to where your data is, or use `--base` option. + +If you have configured a domain, then the web app can be accessed at https://app.server.com by example. + +--- + +## License + +Licensed under either of + +- Apache License, Version 2.0 ([LICENSE-APACHE2](LICENSE-APACHE2) or http://www.apache.org/licenses/LICENSE-2.0) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) + at your option. + +`SPDX-License-Identifier: Apache-2.0 OR MIT` + +--- + +NextGraph received funding through the [NGI Assure Fund](https://nlnet.nl/assure) and the [NGI Zero Commons Fund](https://nlnet.nl/commonsfund/), both funds established by [NLnet](https://nlnet.nl/) Foundation with financial support from the European Commission's [Next Generation Internet](https://ngi.eu/) programme, under the aegis of DG Communications Networks, Content and Technology under grant agreements No 957073 and No 101092990, respectively. + +[license-image]: https://img.shields.io/badge/license-Apache2.0-blue.svg +[license-link]: https://git.nextgraph.org/NextGraph/nextgraph-rs/raw/branch/master/LICENSE-APACHE2 +[license-image2]: https://img.shields.io/badge/license-MIT-blue.svg +[license-link2]: https://git.nextgraph.org/NextGraph/nextgraph-rs/src/branch/master/LICENSE-MIT diff --git a/sdk/ng-sdk-js/index.html b/sdk/ng-sdk-js/index.html new file mode 100644 index 0000000..5b27873 --- /dev/null +++ b/sdk/ng-sdk-js/index.html @@ -0,0 +1,32 @@ + + + + + + + NextGraph web sdk test + + +

run python3 -m http.server to use it

+ + + diff --git a/sdk/ng-sdk-js/js/bowser.js b/sdk/ng-sdk-js/js/bowser.js new file mode 100644 index 0000000..2c684d2 --- /dev/null +++ b/sdk/ng-sdk-js/js/bowser.js @@ -0,0 +1,2233 @@ +// NOTE: this list must be up-to-date with browsers listed in +// test/acceptance/useragentstrings.yml +export const BROWSER_ALIASES_MAP = { + 'Amazon Silk': 'amazon_silk', + 'Android Browser': 'android', + Bada: 'bada', + BlackBerry: 'blackberry', + Chrome: 'chrome', + Chromium: 'chromium', + Electron: 'electron', + Epiphany: 'epiphany', + Firefox: 'firefox', + Focus: 'focus', + Generic: 'generic', + 'Google Search': 'google_search', + Googlebot: 'googlebot', + 'Internet Explorer': 'ie', + 'K-Meleon': 'k_meleon', + Maxthon: 'maxthon', + 'Microsoft Edge': 'edge', + 'MZ Browser': 'mz', + 'NAVER Whale Browser': 'naver', + Opera: 'opera', + 'Opera Coast': 'opera_coast', + PhantomJS: 'phantomjs', + Puffin: 'puffin', + QupZilla: 'qupzilla', + QQ: 'qq', + QQLite: 'qqlite', + Safari: 'safari', + Sailfish: 'sailfish', + 'Samsung Internet for Android': 'samsung_internet', + SeaMonkey: 'seamonkey', + Sleipnir: 'sleipnir', + Swing: 'swing', + Tizen: 'tizen', + 'UC Browser': 'uc', + Vivaldi: 'vivaldi', + 'WebOS Browser': 'webos', + WeChat: 'wechat', + 'Yandex Browser': 'yandex', + Roku: 'roku', + }; + + export const BROWSER_MAP = { + amazon_silk: 'Amazon Silk', + android: 'Android Browser', + bada: 'Bada', + blackberry: 'BlackBerry', + chrome: 'Chrome', + chromium: 'Chromium', + electron: 'Electron', + epiphany: 'Epiphany', + firefox: 'Firefox', + focus: 'Focus', + generic: 'Generic', + googlebot: 'Googlebot', + google_search: 'Google Search', + ie: 'Internet Explorer', + k_meleon: 'K-Meleon', + maxthon: 'Maxthon', + edge: 'Microsoft Edge', + mz: 'MZ Browser', + naver: 'NAVER Whale Browser', + opera: 'Opera', + opera_coast: 'Opera Coast', + phantomjs: 'PhantomJS', + puffin: 'Puffin', + qupzilla: 'QupZilla', + qq: 'QQ Browser', + qqlite: 'QQ Browser Lite', + safari: 'Safari', + sailfish: 'Sailfish', + samsung_internet: 'Samsung Internet for Android', + seamonkey: 'SeaMonkey', + sleipnir: 'Sleipnir', + swing: 'Swing', + tizen: 'Tizen', + uc: 'UC Browser', + vivaldi: 'Vivaldi', + webos: 'WebOS Browser', + wechat: 'WeChat', + yandex: 'Yandex Browser', + }; + + export const PLATFORMS_MAP = { + tablet: 'tablet', + mobile: 'mobile', + desktop: 'desktop', + tv: 'tv', + }; + + export const OS_MAP = { + WindowsPhone: 'Windows Phone', + Windows: 'Windows', + MacOS: 'macOS', + iOS: 'iOS', + Android: 'Android', + WebOS: 'WebOS', + BlackBerry: 'BlackBerry', + Bada: 'Bada', + Tizen: 'Tizen', + Linux: 'Linux', + ChromeOS: 'Chrome OS', + PlayStation4: 'PlayStation 4', + Roku: 'Roku', + }; + + export const ENGINE_MAP = { + EdgeHTML: 'EdgeHTML', + Blink: 'Blink', + Trident: 'Trident', + Presto: 'Presto', + Gecko: 'Gecko', + WebKit: 'WebKit', + }; + + class Utils { + /** + * Get first matched item for a string + * @param {RegExp} regexp + * @param {String} ua + * @return {Array|{index: number, input: string}|*|boolean|string} + */ + static getFirstMatch(regexp, ua) { + const match = ua.match(regexp); + return (match && match.length > 0 && match[1]) || ''; + } + + /** + * Get second matched item for a string + * @param regexp + * @param {String} ua + * @return {Array|{index: number, input: string}|*|boolean|string} + */ + static getSecondMatch(regexp, ua) { + const match = ua.match(regexp); + return (match && match.length > 1 && match[2]) || ''; + } + + /** + * Match a regexp and return a constant or undefined + * @param {RegExp} regexp + * @param {String} ua + * @param {*} _const Any const that will be returned if regexp matches the string + * @return {*} + */ + static matchAndReturnConst(regexp, ua, _const) { + if (regexp.test(ua)) { + return _const; + } + return void (0); + } + + static getWindowsVersionName(version) { + switch (version) { + case 'NT': return 'NT'; + case 'XP': return 'XP'; + case 'NT 5.0': return '2000'; + case 'NT 5.1': return 'XP'; + case 'NT 5.2': return '2003'; + case 'NT 6.0': return 'Vista'; + case 'NT 6.1': return '7'; + case 'NT 6.2': return '8'; + case 'NT 6.3': return '8.1'; + case 'NT 10.0': return '10'; + default: return undefined; + } + } + + /** + * Get macOS version name + * 10.5 - Leopard + * 10.6 - Snow Leopard + * 10.7 - Lion + * 10.8 - Mountain Lion + * 10.9 - Mavericks + * 10.10 - Yosemite + * 10.11 - El Capitan + * 10.12 - Sierra + * 10.13 - High Sierra + * 10.14 - Mojave + * 10.15 - Catalina + * + * @example + * getMacOSVersionName("10.14") // 'Mojave' + * + * @param {string} version + * @return {string} versionName + */ + static getMacOSVersionName(version) { + const v = version.split('.').splice(0, 2).map(s => parseInt(s, 10) || 0); + v.push(0); + if (v[0] !== 10) { + switch (v[0]) { + case 11: return 'Big Sur'; + case 12: return 'Monterey'; + case 13: return 'Ventura'; + case 14: return 'Sonoma'; + } + } + else switch (v[1]) { + case 5: return 'Leopard'; + case 6: return 'Snow Leopard'; + case 7: return 'Lion'; + case 8: return 'Mountain Lion'; + case 9: return 'Mavericks'; + case 10: return 'Yosemite'; + case 11: return 'El Capitan'; + case 12: return 'Sierra'; + case 13: return 'High Sierra'; + case 14: return 'Mojave'; + case 15: return 'Catalina'; + default: return undefined; + } + } + + /** + * Get Android version name + * 1.5 - Cupcake + * 1.6 - Donut + * 2.0 - Eclair + * 2.1 - Eclair + * 2.2 - Froyo + * 2.x - Gingerbread + * 3.x - Honeycomb + * 4.0 - Ice Cream Sandwich + * 4.1 - Jelly Bean + * 4.4 - KitKat + * 5.x - Lollipop + * 6.x - Marshmallow + * 7.x - Nougat + * 8.x - Oreo + * 9.x - Pie + * + * @example + * getAndroidVersionName("7.0") // 'Nougat' + * + * @param {string} version + * @return {string} versionName + */ + static getAndroidVersionName(version) { + const v = version.split('.').splice(0, 2).map(s => parseInt(s, 10) || 0); + v.push(0); + if (v[0] === 1 && v[1] < 5) return undefined; + if (v[0] === 1 && v[1] < 6) return 'Cupcake'; + if (v[0] === 1 && v[1] >= 6) return 'Donut'; + if (v[0] === 2 && v[1] < 2) return 'Eclair'; + if (v[0] === 2 && v[1] === 2) return 'Froyo'; + if (v[0] === 2 && v[1] > 2) return 'Gingerbread'; + if (v[0] === 3) return 'Honeycomb'; + if (v[0] === 4 && v[1] < 1) return 'Ice Cream Sandwich'; + if (v[0] === 4 && v[1] < 4) return 'Jelly Bean'; + if (v[0] === 4 && v[1] >= 4) return 'KitKat'; + if (v[0] === 5) return 'Lollipop'; + if (v[0] === 6) return 'Marshmallow'; + if (v[0] === 7) return 'Nougat'; + if (v[0] === 8) return 'Oreo'; + if (v[0] === 9) return 'Pie'; + if (v[0] === 10) return 'Android 10'; + if (v[0] === 11) return 'Android 11'; + if (v[0] === 12) return 'Android 12'; + if (v[0] === 13) return 'Android 13'; + if (v[0] === 14) return 'Android 14'; + if (v[0] === 15) return 'Android 15'; + return undefined; + } + + /** + * Get version precisions count + * + * @example + * getVersionPrecision("1.10.3") // 3 + * + * @param {string} version + * @return {number} + */ + static getVersionPrecision(version) { + return version.split('.').length; + } + + /** + * Calculate browser version weight + * + * @example + * compareVersions('1.10.2.1', '1.8.2.1.90') // 1 + * compareVersions('1.010.2.1', '1.09.2.1.90'); // 1 + * compareVersions('1.10.2.1', '1.10.2.1'); // 0 + * compareVersions('1.10.2.1', '1.0800.2'); // -1 + * compareVersions('1.10.2.1', '1.10', true); // 0 + * + * @param {String} versionA versions versions to compare + * @param {String} versionB versions versions to compare + * @param {boolean} [isLoose] enable loose comparison + * @return {Number} comparison result: -1 when versionA is lower, + * 1 when versionA is bigger, 0 when both equal + */ + /* eslint consistent-return: 1 */ + static compareVersions(versionA, versionB, isLoose = false) { + // 1) get common precision for both versions, for example for "10.0" and "9" it should be 2 + const versionAPrecision = Utils.getVersionPrecision(versionA); + const versionBPrecision = Utils.getVersionPrecision(versionB); + + let precision = Math.max(versionAPrecision, versionBPrecision); + let lastPrecision = 0; + + const chunks = Utils.map([versionA, versionB], (version) => { + const delta = precision - Utils.getVersionPrecision(version); + + // 2) "9" -> "9.0" (for precision = 2) + const _version = version + new Array(delta + 1).join('.0'); + + // 3) "9.0" -> ["000000000"", "000000009"] + return Utils.map(_version.split('.'), chunk => new Array(20 - chunk.length).join('0') + chunk).reverse(); + }); + + // adjust precision for loose comparison + if (isLoose) { + lastPrecision = precision - Math.min(versionAPrecision, versionBPrecision); + } + + // iterate in reverse order by reversed chunks array + precision -= 1; + while (precision >= lastPrecision) { + // 4) compare: "000000009" > "000000010" = false (but "9" > "10" = true) + if (chunks[0][precision] > chunks[1][precision]) { + return 1; + } + + if (chunks[0][precision] === chunks[1][precision]) { + if (precision === lastPrecision) { + // all version chunks are same + return 0; + } + + precision -= 1; + } else if (chunks[0][precision] < chunks[1][precision]) { + return -1; + } + } + + return undefined; + } + + /** + * Array::map polyfill + * + * @param {Array} arr + * @param {Function} iterator + * @return {Array} + */ + static map(arr, iterator) { + const result = []; + let i; + if (Array.prototype.map) { + return Array.prototype.map.call(arr, iterator); + } + for (i = 0; i < arr.length; i += 1) { + result.push(iterator(arr[i])); + } + return result; + } + + /** + * Array::find polyfill + * + * @param {Array} arr + * @param {Function} predicate + * @return {Array} + */ + static find(arr, predicate) { + let i; + let l; + if (Array.prototype.find) { + return Array.prototype.find.call(arr, predicate); + } + for (i = 0, l = arr.length; i < l; i += 1) { + const value = arr[i]; + if (predicate(value, i)) { + return value; + } + } + return undefined; + } + + /** + * Object::assign polyfill + * + * @param {Object} obj + * @param {Object} ...objs + * @return {Object} + */ + static assign(obj, ...assigners) { + const result = obj; + let i; + let l; + if (Object.assign) { + return Object.assign(obj, ...assigners); + } + for (i = 0, l = assigners.length; i < l; i += 1) { + const assigner = assigners[i]; + if (typeof assigner === 'object' && assigner !== null) { + const keys = Object.keys(assigner); + keys.forEach((key) => { + result[key] = assigner[key]; + }); + } + } + return obj; + } + + /** + * Get short version/alias for a browser name + * + * @example + * getBrowserAlias('Microsoft Edge') // edge + * + * @param {string} browserName + * @return {string} + */ + static getBrowserAlias(browserName) { + return BROWSER_ALIASES_MAP[browserName]; + } + + /** + * Get short version/alias for a browser name + * + * @example + * getBrowserAlias('edge') // Microsoft Edge + * + * @param {string} browserAlias + * @return {string} + */ + static getBrowserTypeByAlias(browserAlias) { + return BROWSER_MAP[browserAlias] || ''; + } + } + + const commonVersionIdentifier = /version\/(\d+(\.?_?\d+)+)/i; + +const browserParsersList = [ + /* Googlebot */ + { + test: [/googlebot/i], + describe(ua) { + const browser = { + name: 'Googlebot', + }; + const version = Utils.getFirstMatch(/googlebot\/(\d+(\.\d+))/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Opera < 13.0 */ + { + test: [/opera/i], + describe(ua) { + const browser = { + name: 'Opera', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Opera > 13.0 */ + { + test: [/opr\/|opios/i], + describe(ua) { + const browser = { + name: 'Opera', + }; + const version = Utils.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/SamsungBrowser/i], + describe(ua) { + const browser = { + name: 'Samsung Internet for Android', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/Whale/i], + describe(ua) { + const browser = { + name: 'NAVER Whale Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/MZBrowser/i], + describe(ua) { + const browser = { + name: 'MZ Browser', + }; + const version = Utils.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/focus/i], + describe(ua) { + const browser = { + name: 'Focus', + }; + const version = Utils.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/swing/i], + describe(ua) { + const browser = { + name: 'Swing', + }; + const version = Utils.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/coast/i], + describe(ua) { + const browser = { + name: 'Opera Coast', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/opt\/\d+(?:.?_?\d+)+/i], + describe(ua) { + const browser = { + name: 'Opera Touch', + }; + const version = Utils.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/yabrowser/i], + describe(ua) { + const browser = { + name: 'Yandex Browser', + }; + const version = Utils.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/ucbrowser/i], + describe(ua) { + const browser = { + name: 'UC Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/Maxthon|mxios/i], + describe(ua) { + const browser = { + name: 'Maxthon', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/epiphany/i], + describe(ua) { + const browser = { + name: 'Epiphany', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/puffin/i], + describe(ua) { + const browser = { + name: 'Puffin', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/sleipnir/i], + describe(ua) { + const browser = { + name: 'Sleipnir', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/k-meleon/i], + describe(ua) { + const browser = { + name: 'K-Meleon', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/micromessenger/i], + describe(ua) { + const browser = { + name: 'WeChat', + }; + const version = Utils.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/qqbrowser/i], + describe(ua) { + const browser = { + name: (/qqbrowserlite/i).test(ua) ? 'QQ Browser Lite' : 'QQ Browser', + }; + const version = Utils.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/msie|trident/i], + describe(ua) { + const browser = { + name: 'Internet Explorer', + }; + const version = Utils.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/\sedg\//i], + describe(ua) { + const browser = { + name: 'Microsoft Edge', + }; + + const version = Utils.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/edg([ea]|ios)/i], + describe(ua) { + const browser = { + name: 'Microsoft Edge', + }; + + const version = Utils.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/vivaldi/i], + describe(ua) { + const browser = { + name: 'Vivaldi', + }; + const version = Utils.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/seamonkey/i], + describe(ua) { + const browser = { + name: 'SeaMonkey', + }; + const version = Utils.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/sailfish/i], + describe(ua) { + const browser = { + name: 'Sailfish', + }; + + const version = Utils.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/silk/i], + describe(ua) { + const browser = { + name: 'Amazon Silk', + }; + const version = Utils.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/phantom/i], + describe(ua) { + const browser = { + name: 'PhantomJS', + }; + const version = Utils.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/slimerjs/i], + describe(ua) { + const browser = { + name: 'SlimerJS', + }; + const version = Utils.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/blackberry|\bbb\d+/i, /rim\stablet/i], + describe(ua) { + const browser = { + name: 'BlackBerry', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/(web|hpw)[o0]s/i], + describe(ua) { + const browser = { + name: 'WebOS Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/bada/i], + describe(ua) { + const browser = { + name: 'Bada', + }; + const version = Utils.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/tizen/i], + describe(ua) { + const browser = { + name: 'Tizen', + }; + const version = Utils.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/qupzilla/i], + describe(ua) { + const browser = { + name: 'QupZilla', + }; + const version = Utils.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/firefox|iceweasel|fxios/i], + describe(ua) { + const browser = { + name: 'Firefox', + }; + const version = Utils.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/electron/i], + describe(ua) { + const browser = { + name: 'Electron', + }; + const version = Utils.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/MiuiBrowser/i], + describe(ua) { + const browser = { + name: 'Miui', + }; + const version = Utils.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/chromium/i], + describe(ua) { + const browser = { + name: 'Chromium', + }; + const version = Utils.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/chrome|crios|crmo/i], + describe(ua) { + const browser = { + name: 'Chrome', + }; + const version = Utils.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/GSA/i], + describe(ua) { + const browser = { + name: 'Google Search', + }; + const version = Utils.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Android Browser */ + { + test(parser) { + const notLikeAndroid = !parser.test(/like android/i); + const butAndroid = parser.test(/android/i); + return notLikeAndroid && butAndroid; + }, + describe(ua) { + const browser = { + name: 'Android Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* PlayStation 4 */ + { + test: [/playstation 4/i], + describe(ua) { + const browser = { + name: 'PlayStation 4', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Safari */ + { + test: [/safari|applewebkit/i], + describe(ua) { + const browser = { + name: 'Safari', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Something else */ + { + test: [/.*/i], + describe(ua) { + /* Here we try to make sure that there are explicit details about the device + * in order to decide what regexp exactly we want to apply + * (as there is a specific decision based on that conclusion) + */ + const regexpWithoutDeviceSpec = /^(.*)\/(.*) /; + const regexpWithDeviceSpec = /^(.*)\/(.*)[ \t]\((.*)/; + const hasDeviceSpec = ua.search('\\(') !== -1; + const regexp = hasDeviceSpec ? regexpWithDeviceSpec : regexpWithoutDeviceSpec; + return { + name: Utils.getFirstMatch(regexp, ua), + version: Utils.getSecondMatch(regexp, ua), + }; + }, + }, +]; + +const enginesParsersList = [ + /* EdgeHTML */ + { + test(parser) { + return parser.getBrowserName(true) === 'microsoft edge'; + }, + describe(ua) { + const isBlinkBased = /\sedg\//i.test(ua); + + // return blink if it's blink-based one + if (isBlinkBased) { + return { + name: ENGINE_MAP.Blink, + }; + } + + // otherwise match the version and return EdgeHTML + const version = Utils.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i, ua); + + return { + name: ENGINE_MAP.EdgeHTML, + version, + }; + }, + }, + + /* Trident */ + { + test: [/trident/i], + describe(ua) { + const engine = { + name: ENGINE_MAP.Trident, + }; + + const version = Utils.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Presto */ + { + test(parser) { + return parser.test(/presto/i); + }, + describe(ua) { + const engine = { + name: ENGINE_MAP.Presto, + }; + + const version = Utils.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Gecko */ + { + test(parser) { + const isGecko = parser.test(/gecko/i); + const likeGecko = parser.test(/like gecko/i); + return isGecko && !likeGecko; + }, + describe(ua) { + const engine = { + name: ENGINE_MAP.Gecko, + }; + + const version = Utils.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Blink */ + { + test: [/(apple)?webkit\/537\.36/i], + describe() { + return { + name: ENGINE_MAP.Blink, + }; + }, + }, + + /* WebKit */ + { + test: [/(apple)?webkit/i], + describe(ua) { + const engine = { + name: ENGINE_MAP.WebKit, + }; + + const version = Utils.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + ]; + + const platformParsersList = [ + /* Googlebot */ + { + test: [/googlebot/i], + describe() { + return { + type: 'bot', + vendor: 'Google', + }; + }, + }, + + /* Huawei */ + { + test: [/huawei/i], + describe(ua) { + const model = Utils.getFirstMatch(/(can-l01)/i, ua) && 'Nova'; + const platform = { + type: PLATFORMS_MAP.mobile, + vendor: 'Huawei', + }; + if (model) { + platform.model = model; + } + return platform; + }, + }, + + /* Nexus Tablet */ + { + test: [/nexus\s*(?:7|8|9|10).*/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Nexus', + }; + }, + }, + + /* iPad */ + { + test: [/ipad/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Apple', + model: 'iPad', + }; + }, + }, + + /* Firefox on iPad */ + { + test: [/Macintosh(.*?) FxiOS(.*?)\//], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Apple', + model: 'iPad', + }; + }, + }, + + /* Amazon Kindle Fire */ + { + test: [/kftt build/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Amazon', + model: 'Kindle Fire HD 7', + }; + }, + }, + + /* Another Amazon Tablet with Silk */ + { + test: [/silk/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Amazon', + }; + }, + }, + + /* Tablet */ + { + test: [/tablet(?! pc)/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + }; + }, + }, + + /* iPod/iPhone */ + { + test(parser) { + const iDevice = parser.test(/ipod|iphone/i); + const likeIDevice = parser.test(/like (ipod|iphone)/i); + return iDevice && !likeIDevice; + }, + describe(ua) { + const model = Utils.getFirstMatch(/(ipod|iphone)/i, ua); + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Apple', + model, + }; + }, + }, + + /* Nexus Mobile */ + { + test: [/nexus\s*[0-6].*/i, /galaxy nexus/i], + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Nexus', + }; + }, + }, + + /* Mobile */ + { + test: [/[^-]mobi/i], + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* BlackBerry */ + { + test(parser) { + return parser.getBrowserName(true) === 'blackberry'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'BlackBerry', + }; + }, + }, + + /* Bada */ + { + test(parser) { + return parser.getBrowserName(true) === 'bada'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* Windows Phone */ + { + test(parser) { + return parser.getBrowserName() === 'windows phone'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Microsoft', + }; + }, + }, + + /* Android Tablet */ + { + test(parser) { + const osMajorVersion = Number(String(parser.getOSVersion()).split('.')[0]); + return parser.getOSName(true) === 'android' && (osMajorVersion >= 3); + }, + describe() { + return { + type: PLATFORMS_MAP.tablet, + }; + }, + }, + + /* Android Mobile */ + { + test(parser) { + return parser.getOSName(true) === 'android'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* desktop */ + { + test(parser) { + return parser.getOSName(true) === 'macos'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + vendor: 'Apple', + }; + }, + }, + + /* Windows */ + { + test(parser) { + return parser.getOSName(true) === 'windows'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + }; + }, + }, + + /* Linux */ + { + test(parser) { + return parser.getOSName(true) === 'linux'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + }; + }, + }, + + /* PlayStation 4 */ + { + test(parser) { + return parser.getOSName(true) === 'playstation 4'; + }, + describe() { + return { + type: PLATFORMS_MAP.tv, + }; + }, + }, + + /* Roku */ + { + test(parser) { + return parser.getOSName(true) === 'roku'; + }, + describe() { + return { + type: PLATFORMS_MAP.tv, + }; + }, + }, + ]; + + + const osParsersList = [ + /* Roku */ + { + test: [/Roku\/DVP/], + describe(ua) { + const version = Utils.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i, ua); + return { + name: OS_MAP.Roku, + version, + }; + }, + }, + + /* Windows Phone */ + { + test: [/windows phone/i], + describe(ua) { + const version = Utils.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i, ua); + return { + name: OS_MAP.WindowsPhone, + version, + }; + }, + }, + + /* Windows */ + { + test: [/windows /i], + describe(ua) { + const version = Utils.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i, ua); + const versionName = Utils.getWindowsVersionName(version); + + return { + name: OS_MAP.Windows, + version, + versionName, + }; + }, + }, + + /* Firefox on iPad */ + { + test: [/Macintosh(.*?) FxiOS(.*?)\//], + describe(ua) { + const result = { + name: OS_MAP.iOS, + }; + const version = Utils.getSecondMatch(/(Version\/)(\d[\d.]+)/, ua); + if (version) { + result.version = version; + } + return result; + }, + }, + + /* macOS */ + { + test: [/macintosh/i], + describe(ua) { + const version = Utils.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i, ua).replace(/[_\s]/g, '.'); + const versionName = Utils.getMacOSVersionName(version); + + const os = { + name: OS_MAP.MacOS, + version, + }; + if (versionName) { + os.versionName = versionName; + } + return os; + }, + }, + + /* iOS */ + { + test: [/(ipod|iphone|ipad)/i], + describe(ua) { + const version = Utils.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i, ua).replace(/[_\s]/g, '.'); + + return { + name: OS_MAP.iOS, + version, + }; + }, + }, + + /* Android */ + { + test(parser) { + const notLikeAndroid = !parser.test(/like android/i); + const butAndroid = parser.test(/android/i); + return notLikeAndroid && butAndroid; + }, + describe(ua) { + const version = Utils.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i, ua); + const versionName = Utils.getAndroidVersionName(version); + const os = { + name: OS_MAP.Android, + version, + }; + if (versionName) { + os.versionName = versionName; + } + return os; + }, + }, + + /* WebOS */ + { + test: [/(web|hpw)[o0]s/i], + describe(ua) { + const version = Utils.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i, ua); + const os = { + name: OS_MAP.WebOS, + }; + + if (version && version.length) { + os.version = version; + } + return os; + }, + }, + + /* BlackBerry */ + { + test: [/blackberry|\bbb\d+/i, /rim\stablet/i], + describe(ua) { + const version = Utils.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i, ua) + || Utils.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i, ua) + || Utils.getFirstMatch(/\bbb(\d+)/i, ua); + + return { + name: OS_MAP.BlackBerry, + version, + }; + }, + }, + + /* Bada */ + { + test: [/bada/i], + describe(ua) { + const version = Utils.getFirstMatch(/bada\/(\d+(\.\d+)*)/i, ua); + + return { + name: OS_MAP.Bada, + version, + }; + }, + }, + + /* Tizen */ + { + test: [/tizen/i], + describe(ua) { + const version = Utils.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i, ua); + + return { + name: OS_MAP.Tizen, + version, + }; + }, + }, + + /* Linux */ + { + test: [/linux/i], + describe() { + return { + name: OS_MAP.Linux, + }; + }, + }, + + /* Chrome OS */ + { + test: [/CrOS/], + describe() { + return { + name: OS_MAP.ChromeOS, + }; + }, + }, + + /* Playstation 4 */ + { + test: [/PlayStation 4/], + describe(ua) { + const version = Utils.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i, ua); + return { + name: OS_MAP.PlayStation4, + version, + }; + }, + }, + ]; + + /** + * The main class that arranges the whole parsing process. + */ +class Parser { + /** + * Create instance of Parser + * + * @param {String} UA User-Agent string + * @param {Boolean} [skipParsing=false] parser can skip parsing in purpose of performance + * improvements if you need to make a more particular parsing + * like {@link Parser#parseBrowser} or {@link Parser#parsePlatform} + * + * @throw {Error} in case of empty UA String + * + * @constructor + */ + constructor(UA, skipParsing = false) { + if (UA === void (0) || UA === null || UA === '') { + throw new Error("UserAgent parameter can't be empty"); + } + + this._ua = UA; + + /** + * @typedef ParsedResult + * @property {Object} browser + * @property {String|undefined} [browser.name] + * Browser name, like `"Chrome"` or `"Internet Explorer"` + * @property {String|undefined} [browser.version] Browser version as a String `"12.01.45334.10"` + * @property {Object} os + * @property {String|undefined} [os.name] OS name, like `"Windows"` or `"macOS"` + * @property {String|undefined} [os.version] OS version, like `"NT 5.1"` or `"10.11.1"` + * @property {String|undefined} [os.versionName] OS name, like `"XP"` or `"High Sierra"` + * @property {Object} platform + * @property {String|undefined} [platform.type] + * platform type, can be either `"desktop"`, `"tablet"` or `"mobile"` + * @property {String|undefined} [platform.vendor] Vendor of the device, + * like `"Apple"` or `"Samsung"` + * @property {String|undefined} [platform.model] Device model, + * like `"iPhone"` or `"Kindle Fire HD 7"` + * @property {Object} engine + * @property {String|undefined} [engine.name] + * Can be any of this: `WebKit`, `Blink`, `Gecko`, `Trident`, `Presto`, `EdgeHTML` + * @property {String|undefined} [engine.version] String version of the engine + */ + this.parsedResult = {}; + + if (skipParsing !== true) { + this.parse(); + } + } + + /** + * Get UserAgent string of current Parser instance + * @return {String} User-Agent String of the current object + * + * @public + */ + getUA() { + return this._ua; + } + + /** + * Test a UA string for a regexp + * @param {RegExp} regex + * @return {Boolean} + */ + test(regex) { + return regex.test(this._ua); + } + + /** + * Get parsed browser object + * @return {Object} + */ + parseBrowser() { + this.parsedResult.browser = {}; + + const browserDescriptor = Utils.find(browserParsersList, (_browser) => { + if (typeof _browser.test === 'function') { + return _browser.test(this); + } + + if (_browser.test instanceof Array) { + return _browser.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (browserDescriptor) { + this.parsedResult.browser = browserDescriptor.describe(this.getUA()); + } + + return this.parsedResult.browser; + } + + /** + * Get parsed browser object + * @return {Object} + * + * @public + */ + getBrowser() { + if (this.parsedResult.browser) { + return this.parsedResult.browser; + } + + return this.parseBrowser(); + } + + /** + * Get browser's name + * @return {String} Browser's name or an empty string + * + * @public + */ + getBrowserName(toLowerCase) { + if (toLowerCase) { + return String(this.getBrowser().name).toLowerCase() || ''; + } + return this.getBrowser().name || ''; + } + + + /** + * Get browser's version + * @return {String} version of browser + * + * @public + */ + getBrowserVersion() { + return this.getBrowser().version; + } + + /** + * Get OS + * @return {Object} + * + * @example + * this.getOS(); + * { + * name: 'macOS', + * version: '10.11.12' + * } + */ + getOS() { + if (this.parsedResult.os) { + return this.parsedResult.os; + } + + return this.parseOS(); + } + + /** + * Parse OS and save it to this.parsedResult.os + * @return {*|{}} + */ + parseOS() { + this.parsedResult.os = {}; + + const os = Utils.find(osParsersList, (_os) => { + if (typeof _os.test === 'function') { + return _os.test(this); + } + + if (_os.test instanceof Array) { + return _os.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (os) { + this.parsedResult.os = os.describe(this.getUA()); + } + + return this.parsedResult.os; + } + + /** + * Get OS name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} name of the OS — macOS, Windows, Linux, etc. + */ + getOSName(toLowerCase) { + const { name } = this.getOS(); + + if (toLowerCase) { + return String(name).toLowerCase() || ''; + } + + return name || ''; + } + + /** + * Get OS version + * @return {String} full version with dots ('10.11.12', '5.6', etc) + */ + getOSVersion() { + return this.getOS().version; + } + + /** + * Get parsed platform + * @return {{}} + */ + getPlatform() { + if (this.parsedResult.platform) { + return this.parsedResult.platform; + } + + return this.parsePlatform(); + } + + /** + * Get platform name + * @param {Boolean} [toLowerCase=false] + * @return {*} + */ + getPlatformType(toLowerCase = false) { + const { type } = this.getPlatform(); + + if (toLowerCase) { + return String(type).toLowerCase() || ''; + } + + return type || ''; + } + + /** + * Get parsed platform + * @return {{}} + */ + parsePlatform() { + this.parsedResult.platform = {}; + + const platform = Utils.find(platformParsersList, (_platform) => { + if (typeof _platform.test === 'function') { + return _platform.test(this); + } + + if (_platform.test instanceof Array) { + return _platform.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (platform) { + this.parsedResult.platform = platform.describe(this.getUA()); + } + + return this.parsedResult.platform; + } + + /** + * Get parsed engine + * @return {{}} + */ + getEngine() { + if (this.parsedResult.engine) { + return this.parsedResult.engine; + } + + return this.parseEngine(); + } + + /** + * Get engines's name + * @return {String} Engines's name or an empty string + * + * @public + */ + getEngineName(toLowerCase) { + if (toLowerCase) { + return String(this.getEngine().name).toLowerCase() || ''; + } + return this.getEngine().name || ''; + } + + /** + * Get parsed platform + * @return {{}} + */ + parseEngine() { + this.parsedResult.engine = {}; + + const engine = Utils.find(enginesParsersList, (_engine) => { + if (typeof _engine.test === 'function') { + return _engine.test(this); + } + + if (_engine.test instanceof Array) { + return _engine.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (engine) { + this.parsedResult.engine = engine.describe(this.getUA()); + } + + return this.parsedResult.engine; + } + + /** + * Parse full information about the browser + * @returns {Parser} + */ + parse() { + this.parseBrowser(); + this.parseOS(); + this.parsePlatform(); + this.parseEngine(); + + return this; + } + + /** + * Get parsed result + * @return {ParsedResult} + */ + getResult() { + return Utils.assign({}, this.parsedResult); + } + + /** + * Check if parsed browser matches certain conditions + * + * @param {Object} checkTree It's one or two layered object, + * which can include a platform or an OS on the first layer + * and should have browsers specs on the bottom-laying layer + * + * @returns {Boolean|undefined} Whether the browser satisfies the set conditions or not. + * Returns `undefined` when the browser is no described in the checkTree object. + * + * @example + * const browser = Bowser.getParser(window.navigator.userAgent); + * if (browser.satisfies({chrome: '>118.01.1322' })) + * // or with os + * if (browser.satisfies({windows: { chrome: '>118.01.1322' } })) + * // or with platforms + * if (browser.satisfies({desktop: { chrome: '>118.01.1322' } })) + */ + satisfies(checkTree) { + const platformsAndOSes = {}; + let platformsAndOSCounter = 0; + const browsers = {}; + let browsersCounter = 0; + + const allDefinitions = Object.keys(checkTree); + + allDefinitions.forEach((key) => { + const currentDefinition = checkTree[key]; + if (typeof currentDefinition === 'string') { + browsers[key] = currentDefinition; + browsersCounter += 1; + } else if (typeof currentDefinition === 'object') { + platformsAndOSes[key] = currentDefinition; + platformsAndOSCounter += 1; + } + }); + + if (platformsAndOSCounter > 0) { + const platformsAndOSNames = Object.keys(platformsAndOSes); + const OSMatchingDefinition = Utils.find(platformsAndOSNames, name => (this.isOS(name))); + + if (OSMatchingDefinition) { + const osResult = this.satisfies(platformsAndOSes[OSMatchingDefinition]); + + if (osResult !== void 0) { + return osResult; + } + } + + const platformMatchingDefinition = Utils.find( + platformsAndOSNames, + name => (this.isPlatform(name)), + ); + if (platformMatchingDefinition) { + const platformResult = this.satisfies(platformsAndOSes[platformMatchingDefinition]); + + if (platformResult !== void 0) { + return platformResult; + } + } + } + + if (browsersCounter > 0) { + const browserNames = Object.keys(browsers); + const matchingDefinition = Utils.find(browserNames, name => (this.isBrowser(name, true))); + + if (matchingDefinition !== void 0) { + return this.compareVersion(browsers[matchingDefinition]); + } + } + + return undefined; + } + + /** + * Check if the browser name equals the passed string + * @param browserName The string to compare with the browser name + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {boolean} + */ + isBrowser(browserName, includingAlias = false) { + const defaultBrowserName = this.getBrowserName().toLowerCase(); + let browserNameLower = browserName.toLowerCase(); + const alias = Utils.getBrowserTypeByAlias(browserNameLower); + + if (includingAlias && alias) { + browserNameLower = alias.toLowerCase(); + } + return browserNameLower === defaultBrowserName; + } + + compareVersion(version) { + let expectedResults = [0]; + let comparableVersion = version; + let isLoose = false; + + const currentBrowserVersion = this.getBrowserVersion(); + + if (typeof currentBrowserVersion !== 'string') { + return void 0; + } + + if (version[0] === '>' || version[0] === '<') { + comparableVersion = version.substr(1); + if (version[1] === '=') { + isLoose = true; + comparableVersion = version.substr(2); + } else { + expectedResults = []; + } + if (version[0] === '>') { + expectedResults.push(1); + } else { + expectedResults.push(-1); + } + } else if (version[0] === '=') { + comparableVersion = version.substr(1); + } else if (version[0] === '~') { + isLoose = true; + comparableVersion = version.substr(1); + } + + return expectedResults.indexOf( + Utils.compareVersions(currentBrowserVersion, comparableVersion, isLoose), + ) > -1; + } + + isOS(osName) { + return this.getOSName(true) === String(osName).toLowerCase(); + } + + isPlatform(platformType) { + return this.getPlatformType(true) === String(platformType).toLowerCase(); + } + + isEngine(engineName) { + return this.getEngineName(true) === String(engineName).toLowerCase(); + } + + /** + * Is anything? Check if the browser is called "anything", + * the OS called "anything" or the platform called "anything" + * @param {String} anything + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {Boolean} + */ + is(anything, includingAlias = false) { + return this.isBrowser(anything, includingAlias) || this.isOS(anything) + || this.isPlatform(anything); + } + + /** + * Check if any of the given values satisfies this.is(anything) + * @param {String[]} anythings + * @returns {Boolean} + */ + some(anythings = []) { + return anythings.some(anything => this.is(anything)); + } + } +/** + * Bowser class. + * Keep it simple as much as it can be. + * It's supposed to work with collections of {@link Parser} instances + * rather then solve one-instance problems. + * All the one-instance stuff is located in Parser class. + * + * @class + * @classdesc Bowser is a static object, that provides an API to the Parsers + * @hideconstructor + */ + export class Bowser { + /** + * Creates a {@link Parser} instance + * + * @param {String} UA UserAgent string + * @param {Boolean} [skipParsing=false] Will make the Parser postpone parsing until you ask it + * explicitly. Same as `skipParsing` for {@link Parser}. + * @returns {Parser} + * @throws {Error} when UA is not a String + * + * @example + * const parser = Bowser.getParser(window.navigator.userAgent); + * const result = parser.getResult(); + */ + static getParser(UA, skipParsing = false) { + if (typeof UA !== 'string') { + throw new Error('UserAgent should be a string'); + } + return new Parser(UA, skipParsing); + } + + /** + * Creates a {@link Parser} instance and runs {@link Parser.getResult} immediately + * + * @param UA + * @return {ParsedResult} + * + * @example + * const result = Bowser.parse(window.navigator.userAgent); + */ + static parse(UA) { + return (new Parser(UA)).getResult(); + } + + static get BROWSER_MAP() { + return BROWSER_MAP; + } + + static get ENGINE_MAP() { + return ENGINE_MAP; + } + + static get OS_MAP() { + return OS_MAP; + } + + static get PLATFORMS_MAP() { + return PLATFORMS_MAP; + } + }; + + \ No newline at end of file diff --git a/sdk/ng-sdk-js/js/browser.js b/sdk/ng-sdk-js/js/browser.js new file mode 100644 index 0000000..62b1b20 --- /dev/null +++ b/sdk/ng-sdk-js/js/browser.js @@ -0,0 +1,136 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + +let ls; +let ss; +let no_local_storage; false; + +(async () => { + try { + ls = localStorage; + ss = sessionStorage; + + try { + let ret = await document.requestStorageAccess({ localStorage: true, sessionStorage: true }); + ls = ret.localStorage; + ss = ret.sessionStorage; + console.log("REQUEST STORAGE ACCESS GRANTED by chrome"); + } + catch(e) { + console.warn("requestStorageAccess of chrome failed. falling back to previous api", e) + try { + await document.requestStorageAccess(); + localStorage; + console.log("REQUEST STORAGE ACCESS GRANTED"); + } catch (e) { + console.error("REQUEST STORAGE ACCESS DENIED",e); + no_local_storage = true; + } + } + + } catch (e) { + no_local_storage = true; + console.log("no access to localStorage") + } +})(); + +export function client_details() { + return window.navigator.userAgent; +} + +export function client_details2(obj,version) { + //console.log("version",version) + obj.browser.appVersion = navigator?.appVersion; + obj.browser.arch = navigator?.platform; + obj.browser.vendor = navigator?.vendor; + obj.browser.ua = window.navigator.userAgent; + obj.engine.sdk = version; + return JSON.stringify(obj); +} + +export function session_save(key,value) { + try { + ss.setItem(key, value); + + } catch(e) { + console.error(e); + return convert_error(e.message); + } +} + +export function is_browser() { + return true; +} + +function convert_error(e) { + if ( + e == "The operation is insecure." || + e == + "Failed to read the 'sessionStorage' property from 'Window': Access is denied for this document." || + e == + "Failed to read the 'localStorage' property from 'Window': Access is denied for this document." + ) { + return "Please allow this website to store cookies, session and local storage."; + } else { + return e + } +} + +export function session_get(key) { + + try { + return ss.getItem(key); + + } catch(e) { + console.error(e); + } + +} + +export function session_remove(key) { + + try { + return ss.removeItem(key); + + } catch(e) { + console.error(e); + } + +} + +export function local_save(key,value) { + try { + ls.setItem(key, value); + + } catch(e) { + console.error(e); + return convert_error(e.message); + } +} + +export function storage_clear() { + try { + ls.clear(); + ss.clear(); + + } catch(e) { + console.error(e); + } +} + +export function local_get(key) { + + try { + return ls.getItem(key); + + } catch(e) { + console.error(e); + } + +} \ No newline at end of file diff --git a/sdk/ng-sdk-js/js/node.js b/sdk/ng-sdk-js/js/node.js new file mode 100644 index 0000000..7012672 --- /dev/null +++ b/sdk/ng-sdk-js/js/node.js @@ -0,0 +1,207 @@ +// Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers +// All rights reserved. +// Licensed under the Apache License, Version 2.0 +// +// or the MIT license , +// at your option. All files in the project carrying such +// notice may not be copied, modified, or distributed except +// according to those terms. + + +const macNameMap = new Map([ + [23, ['Sonoma', '14']], + [22, ['Ventura', '13']], + [21, ['Monterey', '12']], + [20, ['Big Sur', '11']], + [19, ['Catalina', '10.15']], + [18, ['Mojave', '10.14']], + [17, ['High Sierra', '10.13']], + [16, ['Sierra', '10.12']], + [15, ['El Capitan', '10.11']], + [14, ['Yosemite', '10.10']], + [13, ['Mavericks', '10.9']], + [12, ['Mountain Lion', '10.8']], + [11, ['Lion', '10.7']], + [10, ['Snow Leopard', '10.6']], + [9, ['Leopard', '10.5']], + [8, ['Tiger', '10.4']], + [7, ['Panther', '10.3']], + [6, ['Jaguar', '10.2']], + [5, ['Puma', '10.1']], +]); + +function macosRelease(release) { + let split = (release).split('.'); + rel = Number(split[0]); + let [name, version] = macNameMap.get(rel) || ['Unknown', release]; + if (name!='Unknown') { + if (split.length>1) version += '.'+split[1]; + //if (split.length>2 && split[2]) version += '.'+split[2]; + } + return { + name: "macOS", + versionName: name, + version, + release + }; +} + +const winNames = new Map([ + ['10.0.2', '11'], // It's unclear whether future Windows 11 versions will use this version scheme: https://github.com/sindresorhus/windows-release/pull/26/files#r744945281 + ['10.0.22', 'Server 2022'], + ['10.0', '10 or Server 2016/2019'], + ['6.3', '8.1 or Server 2012 R2'], + ['6.2', '8 or Server 2012'], + ['6.1', '7 or Server 2008 R2'], + ['6.0', 'Vista or Server 2008'], + ['5.2', 'Server 2003'], + ['5.1', 'XP'], + ['5.0', '2000'], + ['4.90', 'ME'], + ['4.10', '98'], + ['4.03', '95'], + ['4.00', '95'], + ['3.00', 'NT'], +]); + +function windowsRelease(release) { + const version = /(\d+\.\d+)(?:\.(\d+))?/.exec(release); + + let ver = version[1] || ''; + const build = version[2] || ''; + + if (ver.startsWith('3.')) { + ver = '3.00'; + } + if (ver === '10.0' && build.startsWith('20348')) { + // Windows Server 2022 + ver = '10.0.22'; + } else if (ver === '10.0' && build.startsWith('2')) { + // Windows 11 + ver = '10.0.2'; + } + + return { + name: "Windows", + versionName: winNames.get(ver), + version: build, + release + }; +} + +function osName(platform, release) { + if (platform === 'darwin') { + return release? macosRelease(release) : {name: "macOS"}; + } + + if (platform === 'linux') { + id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; + return {name:'Linux', version: id || release, release}; + } + + if (platform === 'win32') { + return release ? windowsRelease(release) : {name: "Windows"}; + } + if (platform === 'aix') { platform = 'AIX'; } + else if (platform === 'freebsd') { platform = 'FreeBSD'; } + else if (platform === 'openbsd') { platform = 'OpenBSD'; } + else if (platform === 'android') { platform = 'Android'; } + else if (platform === 'sunos') { platform = 'SunOS'; } + return {name:platform, version:release}; +} +module.exports.version = function () { + return require('../../../package.json').version; +} + +module.exports.get_env_vars = function () { + + return { + server_addr: process.env.NG_HEADLESS_SERVER_ADDR, + server_peer_id: process.env.NG_HEADLESS_SERVER_PEER_ID, + client_peer_key: process.env.NG_HEADLESS_CLIENT_PEER_KEY, + admin_user_key: process.env.NG_HEADLESS_ADMIN_USER_KEY + }; +} + +const path = require('path'); +const fs = require('fs'); + +module.exports.upload_file = async ( filename, callback, end) => { + let readStream = fs.createReadStream(filename,{ highWaterMark: 1048564 }); + + return new Promise(async (resolve, reject) => { + readStream.on('data', async function(chunk) { + try { + let ret = await callback(chunk); + } + catch (e) { + readStream.destroy(); + reject(e); + } + }).on('end', async function() { + let reference = await end(path.basename(filename)); + resolve(reference); + }).on('error', async function(e) { + reject(e.message); + + }); + }) +} + + +module.exports.client_details = function () { + const process = require('process'); + const osnode = require('os'); + let arch = osnode.machine? osnode.machine() : process.arch; + if (arch=="ia32") {arch="x86"} + else if (arch=="x64") {arch="x86_64"} + else if (arch=="i386") {arch="x86"} + else if (arch=="i686") {arch="x86"} + else if (arch=="amd64") {arch="x86_64"} + else if (arch=="arm64") {arch="aarch64"} + let os = osName(osnode.platform(),osnode.release()); + if (osnode.version) os.uname = osnode.version(); + os.type = osnode.type(); + + return JSON.stringify({ + platform: { type: "server", arch }, + os, + engine: { + name: "nodejs", + version: process.version, + arch : process.arch, + machine: osnode.machine? osnode.machine() : undefined, + versions: process.versions + } + }); +}; + + +module.exports.is_browser = function() { + return false; +} + + +module.exports.session_save = function(key,value) { + +} + +module.exports.storage_clear = function() { + +} + +module.exports.session_get = function(key) { + +} + +module.exports.session_remove = function(key) { + +} + +module.exports.local_save = function(key,value) { + +} + +module.exports.local_get = function(key) { + +} diff --git a/sdk/ng-sdk-js/prepare-node.js b/sdk/ng-sdk-js/prepare-node.js new file mode 100644 index 0000000..3bb7b83 --- /dev/null +++ b/sdk/ng-sdk-js/prepare-node.js @@ -0,0 +1,24 @@ +const fs = require('fs'); + +const PATH = './pkg-node/package.json'; +const PATH_README = './pkg-node/README.md'; + +const pkg_json = fs.readFileSync(PATH); +let pkg = JSON.parse(pkg_json) +pkg.name = "nextgraph"; +pkg.version = "0.1.2"; +pkg.description = "nodeJS SDK of NextGraph"; +pkg.files.push("ng_sdk_js_bg.wasm.d.ts"); +pkg.files.push("snippets/**/*.js"); +fs.writeFileSync(PATH, JSON.stringify(pkg, null, 2), 'utf8'); + +fs.readFile(PATH_README, 'utf8', function (err,data) { + if (err) { + return console.log(err); + } + var result = data.replace(/ ng-sdk-js/g, ' nextgraph'); + + fs.writeFile(PATH_README, result, 'utf8', function (err) { + if (err) return console.log(err); + }); + }); \ No newline at end of file diff --git a/sdk/ng-sdk-js/prepare-web.js b/sdk/ng-sdk-js/prepare-web.js new file mode 100644 index 0000000..80432e0 --- /dev/null +++ b/sdk/ng-sdk-js/prepare-web.js @@ -0,0 +1,11 @@ +const fs = require('fs'); + +const PATH = './pkg/package.json'; +const PATH_README = './pkg/README.md'; + +const pkg_json = fs.readFileSync(PATH); +let pkg = JSON.parse(pkg_json) +pkg.name = "@nextgraph-monorepo/ng-sdk-js"; +pkg.files.push("ng_sdk_js_bg.wasm.d.ts"); +pkg.files.push("snippets/**/*.js"); +fs.writeFileSync(PATH, JSON.stringify(pkg, null, 2), 'utf8'); diff --git a/sdk/ng-sdk-js/src/lib.rs b/sdk/ng-sdk-js/src/lib.rs new file mode 100644 index 0000000..aaddf64 --- /dev/null +++ b/sdk/ng-sdk-js/src/lib.rs @@ -0,0 +1,2088 @@ +/* + * Copyright (c) 2022-2025 Niko Bonnieure, Par le Peuple, NextGraph.org developers + * All rights reserved. + * Licensed under the Apache License, Version 2.0 + * + * or the MIT license , + * at your option. All files in the project carrying such + * notice may not be copied, modified, or distributed except + * according to those terms. +*/ + +#![cfg(target_arch = "wasm32")] +#![allow(unused_imports)] + +mod model; + +use std::collections::HashMap; +use std::net::IpAddr; +use std::str::FromStr; +use std::sync::Arc; + +use nextgraph::net::app_protocol::AppRequest; +use ng_wallet::types::SensitiveWallet; +use once_cell::sync::Lazy; +use serde::{Deserialize, Serialize}; +// use js_sys::Reflect; +use async_std::stream::StreamExt; +use futures::channel::mpsc; +use futures::SinkExt; +use js_sys::{Array, Object}; +use oxrdf::Triple; +use sys_locale::get_locales; +use wasm_bindgen::prelude::*; +use wasm_bindgen_futures::future_to_promise; +use wasm_bindgen_futures::JsFuture; + +use ng_repo::errors::{NgError, ProtocolError}; +use ng_repo::log::*; +use ng_repo::types::*; +use ng_repo::utils::{decode_key, decode_priv_key}; + +use ng_net::app_protocol::*; +use ng_net::broker::*; +use ng_net::types::{BindAddress, ClientInfo, ClientInfoV0, ClientType, CreateAccountBSP, IP, BootstrapContentV0, InboxPost}; +use ng_net::utils::{ + decode_invitation_string, parse_ip_and_port_for, retrieve_local_bootstrap, retrieve_local_url, + spawn_and_log_error, Receiver, ResultSend, Sender, +}; +use ng_net::{actor::*, actors::admin::*}; +use ng_net::{WS_PORT, WS_PORT_REVERSE_PROXY}; + +use ng_client_ws::remote_ws_wasm::ConnectionWebSocket; + +use ng_wallet::types::*; +use ng_wallet::*; + +use nextgraph::local_broker::*; +use nextgraph::verifier::CancelFn; + + +use crate::model::*; + +#[wasm_bindgen] +pub async fn locales() -> Result { + Ok(serde_wasm_bindgen::to_value(&get_locales().collect::>()).unwrap()) +} + +#[wasm_bindgen] +pub async fn get_device_name() -> Result { + Ok(serde_wasm_bindgen::to_value(&nextgraph::get_device_name()).unwrap()) +} + +#[wasm_bindgen] +pub async fn bootstrap_to_iframe_msgs(bootstrap: JsValue) -> Result { + let content: BootstrapContentV0 = serde_wasm_bindgen::from_value::(bootstrap) + .map_err(|_| "Invalid BootstrapContentV0".to_string())?; + let iframe_msg = content.to_iframe_msgs(); + Ok(serde_wasm_bindgen::to_value(&iframe_msg).unwrap()) +} + +#[wasm_bindgen] +pub async fn get_bootstrap_iframe_msgs_for_brokers(brokers: JsValue) -> Result { + let brokers = serde_wasm_bindgen::from_value::>>(brokers) + .map_err(|_| "Invalid brokers".to_string())?; + let iframe_msgs = SensitiveWallet::get_bootstrap_iframe_msgs(brokers); + Ok(serde_wasm_bindgen::to_value(&iframe_msgs).unwrap()) +} + +#[wasm_bindgen] +pub async fn get_local_bootstrap(location: String, invite: JsValue) -> JsValue { + let res = retrieve_local_bootstrap(location, invite.as_string(), false).await; + if res.is_some() { + serde_wasm_bindgen::to_value(&res.unwrap()).unwrap() + } else { + JsValue::FALSE + } +} + +#[wasm_bindgen] +pub async fn get_local_bootstrap_and_domain(location: String) -> JsValue { + let res = retrieve_local_bootstrap(location, None, false).await; + if res.is_some() { + let domain = res.as_ref().unwrap().get_domain(); + serde_wasm_bindgen::to_value(&(res.unwrap(), domain)).unwrap() + } else { + serde_wasm_bindgen::to_value(&(false, false)).unwrap() + } +} + +#[wasm_bindgen] +pub async fn get_local_bootstrap_with_public( + location: String, + invite: JsValue, + must_be_public: bool, +) -> JsValue { + let res = retrieve_local_bootstrap(location, invite.as_string(), must_be_public).await; + if res.is_some() { + serde_wasm_bindgen::to_value(&res.unwrap()).unwrap() + } else { + JsValue::FALSE + } +} + +#[wasm_bindgen] +pub async fn decode_invitation(invite: String) -> JsValue { + let res = decode_invitation_string(invite); + if res.is_some() { + serde_wasm_bindgen::to_value(&res.unwrap()).unwrap() + } else { + JsValue::FALSE + } +} + +#[wasm_bindgen] +pub async fn get_local_url(location: String) -> JsValue { + let res = retrieve_local_url(location).await; + if res.is_some() { + serde_wasm_bindgen::to_value(&res.unwrap()).unwrap() + } else { + JsValue::FALSE + } +} + +#[wasm_bindgen] +pub async fn get_ngnet_url_of_invitation(invitation_string: String) -> JsValue { + let res = decode_invitation_string(invitation_string); + if res.is_some() { + serde_wasm_bindgen::to_value(&res.unwrap().get_urls()[0]).unwrap() + } else { + JsValue::FALSE + } +} + +#[wasm_bindgen] +pub fn wallet_gen_shuffle_for_pazzle_opening(pazzle_length: u8) -> JsValue { + let res = gen_shuffle_for_pazzle_opening(pazzle_length); + serde_wasm_bindgen::to_value(&res).unwrap() +} + +#[wasm_bindgen] +pub fn wallet_gen_shuffle_for_pin() -> Vec { + gen_shuffle_for_pin() +} + +#[wasm_bindgen] +pub fn privkey_to_string(privkey: JsValue) -> Result { + let p = serde_wasm_bindgen::from_value::(privkey) + .map_err(|_| "Deserialization error of privkey")?; + Ok(format!("{p}")) +} + +#[wasm_bindgen] +pub fn wallet_open_with_pazzle( + wallet: JsValue, + pazzle: Vec, + pin: JsValue, +) -> Result { + let encrypted_wallet = serde_wasm_bindgen::from_value::(wallet) + .map_err(|_| "Deserialization error of wallet")?; + let pin = serde_wasm_bindgen::from_value::<[u8; 4]>(pin) + .map_err(|_| "Deserialization error of pin")?; + let res = nextgraph::local_broker::wallet_open_with_pazzle(&encrypted_wallet, pazzle, pin); + match res { + Ok(r) => Ok(r + .serialize(&serde_wasm_bindgen::Serializer::new().serialize_maps_as_objects(true)) + .unwrap()), + Err(e) => Err(e.to_string().into()), + } +} + +#[wasm_bindgen] +pub fn wallet_open_with_mnemonic( + wallet: JsValue, + mnemonic: JsValue, + pin: JsValue, +) -> Result { + let encrypted_wallet = serde_wasm_bindgen::from_value::(wallet) + .map_err(|_| "Deserialization error of wallet")?; + let pin = serde_wasm_bindgen::from_value::<[u8; 4]>(pin) + .map_err(|_| "Deserialization error of pin")?; + let mnemonic = serde_wasm_bindgen::from_value::<[u16; 12]>(mnemonic) + .map_err(|_| "Deserialization error of mnemonic")?; + let res = ng_wallet::open_wallet_with_mnemonic(&encrypted_wallet, mnemonic, pin); + match res { + Ok(r) => Ok(r + .serialize(&serde_wasm_bindgen::Serializer::new().serialize_maps_as_objects(true)) + .unwrap()), + Err(e) => Err(e.to_string().into()), + } +} + +#[wasm_bindgen] +pub fn wallet_open_with_mnemonic_words( + wallet: JsValue, + mnemonic_words: Array, + pin: JsValue, +) -> Result { + let encrypted_wallet = serde_wasm_bindgen::from_value::(wallet) + .map_err(|_| "Deserialization error of wallet")?; + let pin = serde_wasm_bindgen::from_value::<[u8; 4]>(pin) + .map_err(|_| "Deserialization error of pin")?; + let mnemonic_vec: Vec = mnemonic_words + .iter() + .map(|word| word.as_string().unwrap()) + .collect(); + + let res = nextgraph::local_broker::wallet_open_with_mnemonic_words( + &encrypted_wallet, + &mnemonic_vec, + pin, + ); + match res { + Ok(r) => Ok(r + .serialize(&serde_wasm_bindgen::Serializer::new().serialize_maps_as_objects(true)) + .unwrap()), + Err(e) => Err(e.to_string().into()), + } +} + +#[wasm_bindgen] +pub fn wallet_update(wallet_id: JsValue, operations: JsValue) -> Result { + let _wallet = serde_wasm_bindgen::from_value::(wallet_id) + .map_err(|_| "Deserialization error of WalletId")?; + let _operations = serde_wasm_bindgen::from_value::>(operations) + .map_err(|_| "Deserialization error of operations")?; + unimplemented!(); + // match res { + // Ok(r) => Ok(serde_wasm_bindgen::to_value(&r).unwrap()), + // Err(e) => Err(e.to_string().into()), + // } +} + +#[wasm_bindgen] +pub async fn get_wallets() -> Result { + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + + let res = wallets_get_all().await.map_err(|e| { + log_err!("wallets_get_all error {}", e.to_string()); + }); + if res.is_ok() { + return Ok(serde_wasm_bindgen::to_value(&res.unwrap()).unwrap()); + } + Ok(JsValue::UNDEFINED) +} + +#[wasm_bindgen] +pub async fn session_start(wallet_name: String, user_id: JsValue) -> Result { + let user_id = serde_wasm_bindgen::from_value::(user_id) + .map_err(|_| "Deserialization error of user_id")?; + + let config = SessionConfig::new_save(&user_id, &wallet_name); + let res: SessionInfoString = nextgraph::local_broker::session_start(config) + .await + .map_err(|e: NgError| e.to_string())? + .into(); + + Ok(serde_wasm_bindgen::to_value(&res).unwrap()) +} + +#[wasm_bindgen] +pub async fn session_in_memory_start( + wallet_name: String, + user_id: JsValue, +) -> Result { + let user_id = serde_wasm_bindgen::from_value::(user_id) + .map_err(|_| "Deserialization error of user_id")?; + + let config = SessionConfig::new_in_memory(&user_id, &wallet_name); + let res: SessionInfoString = nextgraph::local_broker::session_start(config) + .await + .map_err(|e: NgError| e.to_string())? + .into(); + + Ok(serde_wasm_bindgen::to_value(&res).unwrap()) +} + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen] +pub async fn session_headless_start(user_id: String) -> Result { + let user_id = decode_key(&user_id).map_err(|_| "Invalid user_id")?; + + let config = SessionConfig::new_headless(user_id); + let res: SessionInfoString = nextgraph::local_broker::session_start(config) + .await + .map_err(|e: NgError| e.to_string())? + .into(); + + Ok(serde_wasm_bindgen::to_value(&res).unwrap()) +} + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen] +pub async fn session_headless_stop(session_id: JsValue, force_close: bool) -> Result<(), String> { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + + let _ = nextgraph::local_broker::session_headless_stop(session_id, force_close) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(()) +} + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen] +pub async fn sparql_query( + session_id: JsValue, + sparql: String, + base: JsValue, + nuri: JsValue, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + let nuri = if nuri.is_string() { + NuriV0::new_from(&nuri.as_string().unwrap()).map_err(|e| e.to_string())? + } else { + NuriV0::new_entire_user_site() + }; + + let base_opt = if base.is_string() { + Some(base.as_string().unwrap()) + } else { + None + }; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_read_query(), + nuri, + payload: Some(AppRequestPayload::new_sparql_query(sparql, base_opt)), + session_id, + }); + + let response = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + let AppResponse::V0(res) = response; + match res { + AppResponseV0::False => return Ok(JsValue::FALSE), + AppResponseV0::True => return Ok(JsValue::TRUE), + AppResponseV0::Graph(graph) => { + let triples: Vec = serde_bare::from_slice(&graph) + .map_err(|_| "Deserialization error of graph".to_string())?; + + let results = Array::new(); + for triple in triples { + results.push(&JsQuad::from(triple).into()); + } + Ok(results.into()) + } + AppResponseV0::QueryResult(buf) => { + let string = String::from_utf8(buf) + .map_err(|_| "Deserialization error of JSON QueryResult String".to_string())?; + + js_sys::JSON::parse(&string) + } + AppResponseV0::Error(e) => Err(e.to_string().into()), + _ => Err("invalid response".to_string().into()), + } +} + +#[wasm_bindgen] +pub async fn discrete_update( + session_id: JsValue, + update: JsValue, + heads: Array, + crdt: String, + nuri: String, +) -> Result<(), String> { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + let nuri = NuriV0::new_from(&nuri).map_err(|e| e.to_string())?; + let mut head_strings = Vec::with_capacity(heads.length() as usize); + for head in heads.iter() { + if let Some(s) = head.as_string() { + head_strings.push(s) + } else { + return Err("Invalid HEADS".to_string()); + } + } + let update: serde_bytes::ByteBuf = + serde_wasm_bindgen::from_value::(update) + .map_err(|_| "Deserialization error of update".to_string())?; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_update(), + nuri, + payload: Some( + AppRequestPayload::new_discrete_update(head_strings, crdt, update.into_vec()) + .map_err(|e| format!("Deserialization error of heads: {e}"))?, + ), + session_id, + }); + + let res = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + if let AppResponse::V0(AppResponseV0::Error(e)) = res { + Err(e) + } else { + Ok(()) + } +} + +#[wasm_bindgen] +pub async fn sparql_update( + session_id: JsValue, + sparql: String, + nuri: JsValue, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + + let (nuri, base) = if nuri.is_string() { + let n = nuri.as_string().unwrap(); + let nuri = NuriV0::new_from(&n).map_err(|e| e.to_string())?; + let b = nuri.repo(); + (nuri, Some(b)) + } else { + (NuriV0::new_private_store_target(), None) + }; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_write_query(), + nuri, + payload: Some(AppRequestPayload::new_sparql_query(sparql, base)), + session_id, + }); + + let res = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + match res { + AppResponse::V0(AppResponseV0::Error(e)) => Err(e), + AppResponse::V0(AppResponseV0::Commits(commits)) => Ok(serde_wasm_bindgen::to_value(&commits).unwrap()), + _ => Err(NgError::InvalidResponse.to_string()) + } +} + +#[wasm_bindgen] +pub async fn update_header( + session_id: JsValue, + nuri: String, + title: JsValue, + about: JsValue, +) -> Result<(), String> { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + + let nuri = NuriV0::new_from(&nuri).map_err(|e| e.to_string())?; + + let title = if title.is_string() { + Some(title.as_string().unwrap()) + } else { + None + }; + + let about = if about.is_string() { + Some(about.as_string().unwrap()) + } else { + None + }; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_header(), + nuri, + payload: Some(AppRequestPayload::new_header(title, about)), + session_id, + }); + + let res = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + if let AppResponse::V0(AppResponseV0::Error(e)) = res { + Err(e) + } else { + Ok(()) + } +} + +#[wasm_bindgen] +pub async fn fetch_header(session_id: JsValue, nuri: String) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + + let nuri = NuriV0::new_from(&nuri).map_err(|e| e.to_string())?; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_fetch_header(), + nuri, + payload: None, + session_id, + }); + + let res = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + match res { + AppResponse::V0(AppResponseV0::Error(e)) => Err(e), + AppResponse::V0(AppResponseV0::Header(h)) => Ok(serde_wasm_bindgen::to_value(&h).unwrap()), + _ => Err("invalid response".to_string()), + } +} + +#[cfg(not(wasmpack_target = "nodejs"))] +#[wasm_bindgen] +pub async fn sparql_query( + session_id: JsValue, + sparql: String, + base: JsValue, + nuri: JsValue, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + + let nuri = if nuri.is_string() { + NuriV0::new_from(&nuri.as_string().unwrap()).map_err(|e| e.to_string())? + } else { + NuriV0::new_entire_user_site() + }; + let base_opt = if base.is_string() { + Some(base.as_string().unwrap()) + } else { + None + }; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_read_query(), + nuri, + payload: Some(AppRequestPayload::new_sparql_query(sparql, base_opt)), + session_id, + }); + + let response = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + let AppResponse::V0(res) = response; + match res { + AppResponseV0::False => return Ok(JsValue::FALSE), + AppResponseV0::True => return Ok(JsValue::TRUE), + AppResponseV0::Graph(graph) => { + let triples: Vec = serde_bare::from_slice(&graph) + .map_err(|_| "Deserialization error of Vec".to_string())?; + + Ok(JsValue::from( + triples + .into_iter() + .map(|x| JsValue::from_str(&x.to_string())) + .collect::(), + )) + } + AppResponseV0::QueryResult(buf) => { + let string = String::from_utf8(buf) + .map_err(|_| "Deserialization error of JSON QueryResult String".to_string())?; + js_sys::JSON::parse(&string) + } + AppResponseV0::Error(e) => Err(e.to_string().into()), + _ => Err("invalid response".to_string().into()), + } +} + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen] +pub async fn rdf_dump(session_id: JsValue) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_rdf_dump(), + nuri: NuriV0::new_entire_user_site(), + payload: None, + session_id, + }); + + let res = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + let AppResponse::V0(res) = res; + match res { + AppResponseV0::Text(s) => Ok(s), + _ => Err("invalid response".to_string()), + } +} + +/// from_profile_nuri = did:ng:a or did:ng:b +/// query_nuri = did:ng:o:c:k +/// contacts = did:ng:d:c or a sparql query +#[wasm_bindgen] +pub async fn social_query_start( + session_id: JsValue, + from_profile_nuri: String, + query_nuri: String, + contacts: String, + degree: JsValue, + ) -> Result<(), String> { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + let degree: u16 = serde_wasm_bindgen::from_value::(degree) + .map_err(|_| "Invalid degree".to_string())?; + + let query = NuriV0::new_from_commit(&query_nuri).map_err(|e| format!("Invalid query_nuri {e}"))?; + + let from_profile = match from_profile_nuri.as_str() { + "did:ng:a" => NuriV0::new_public_store_target(), + "did:ng:b" => NuriV0::new_protected_store_target(), + _ => return Err("Invalid from_profile_nuri".to_string()) + }; + + if ! (contacts == "did:ng:d:c" || contacts.starts_with("SELECT")) { return Err("Invalid contacts".to_string()); } + + let mut request = AppRequest::social_query_start(from_profile, query, contacts, degree); + request.set_session_id(session_id); + + let res = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + let AppResponse::V0(res) = res; + match res { + AppResponseV0::Ok => Ok(()), + AppResponseV0::Error(e) => Err(e.to_string()), + _ => Err("invalid response".to_string()), + } +} + +#[wasm_bindgen] +pub async fn branch_history(session_id: JsValue, nuri: JsValue) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + + let nuri = if nuri.is_string() { + NuriV0::new_from(&nuri.as_string().unwrap()).map_err(|e| e.to_string())? + } else { + NuriV0::new_private_store_target() + }; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_history(), + nuri, + payload: None, + session_id, + }); + + let res = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + let AppResponse::V0(res) = res; + //log_debug!("{:?}", res); + match res { + AppResponseV0::History(s) => Ok(serde_wasm_bindgen::to_value(&s.to_js()).unwrap()), + _ => Err("invalid response".to_string()), + } +} + +#[wasm_bindgen] +pub async fn signature_status(session_id: JsValue, nuri: JsValue) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + + let nuri = if nuri.is_string() { + NuriV0::new_from(&nuri.as_string().unwrap()).map_err(|e| e.to_string())? + } else { + NuriV0::new_private_store_target() + }; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_signature_status(), + nuri, + payload: None, + session_id, + }); + + let res = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + let AppResponse::V0(res) = res; + //log_debug!("{:?}", res); + match res { + AppResponseV0::SignatureStatus(s) => Ok(serde_wasm_bindgen::to_value(&s).unwrap()), + _ => Err("invalid response".to_string()), + } +} + +#[wasm_bindgen] +pub async fn signed_snapshot_request( + session_id: JsValue, + nuri: JsValue, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + + let nuri = if nuri.is_string() { + NuriV0::new_from(&nuri.as_string().unwrap()).map_err(|e| e.to_string())? + } else { + NuriV0::new_private_store_target() + }; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_signed_snapshot_request(), + nuri, + payload: None, + session_id, + }); + + let res = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + let AppResponse::V0(res) = res; + //log_debug!("{:?}", res); + match res { + AppResponseV0::True => Ok(JsValue::TRUE), + AppResponseV0::False => Ok(JsValue::FALSE), + AppResponseV0::Error(e) => Err(e), + _ => Err("invalid response".to_string()), + } +} + +#[wasm_bindgen] +pub async fn signature_request(session_id: JsValue, nuri: JsValue) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Invalid session_id".to_string())?; + + let nuri = if nuri.is_string() { + NuriV0::new_from(&nuri.as_string().unwrap()).map_err(|e| e.to_string())? + } else { + NuriV0::new_private_store_target() + }; + + let request = AppRequest::V0(AppRequestV0 { + command: AppRequestCommandV0::new_signature_request(), + nuri, + payload: None, + session_id, + }); + + let res = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + let AppResponse::V0(res) = res; + //log_debug!("{:?}", res); + match res { + AppResponseV0::True => Ok(JsValue::TRUE), + AppResponseV0::False => Ok(JsValue::FALSE), + AppResponseV0::Error(e) => Err(e), + _ => Err("invalid response".to_string()), + } +} + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen] +pub async fn admin_create_user(config: JsValue) -> Result { + let config = HeadLessConfigStrings::load(config)?; + let admin_user_key = config + .admin_user_key + .ok_or("No admin_user_key found in config nor env var.".to_string())?; + + let res = nextgraph::local_broker::admin_create_user( + config.server_peer_id, + admin_user_key, + config.server_addr, + ) + .await + .map_err(|e: ProtocolError| e.to_string())?; + + Ok(serde_wasm_bindgen::to_value(&res.to_string()).unwrap()) +} + +#[wasm_bindgen] +pub async fn session_start_remote( + wallet_name: String, + user_id: JsValue, + peer_id: JsValue, +) -> Result { + let user_id = serde_wasm_bindgen::from_value::(user_id) + .map_err(|_| "Deserialization error of user_id")?; + + let peer_id = serde_wasm_bindgen::from_value::>(peer_id) + .map_err(|_| "Deserialization error of peer_id")?; + + let config = SessionConfig::new_remote(&user_id, &wallet_name, peer_id); + let res: SessionInfoString = nextgraph::local_broker::session_start(config) + .await + .map_err(|e: NgError| e.to_string())? + .into(); + + Ok(serde_wasm_bindgen::to_value(&res).unwrap()) +} + +#[wasm_bindgen] +pub async fn wallets_reload() -> Result<(), String> { + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + nextgraph::local_broker::wallets_reload() + .await + .map_err(|e: NgError| e.to_string()) +} + +#[wasm_bindgen] +pub async fn add_in_memory_wallet(lws: JsValue) -> Result<(), String> { + let lws = serde_wasm_bindgen::from_value::(lws) + .map_err(|_| "Deserialization error of lws")?; + if !lws.in_memory { + return Err("This is not an in memory wallet".to_string()); + } + match nextgraph::local_broker::wallet_add(lws).await { + Ok(_) => Ok(()), + Err(NgError::WalletAlreadyAdded) => Ok(()), + Err(e) => Err(e.to_string()), + } +} + +#[cfg(not(wasmpack_target = "nodejs"))] +#[wasm_bindgen(module = "/js/browser.js")] +extern "C" { + fn session_save(key: String, value: String) -> Option; + fn session_get(key: String) -> Option; + fn session_remove(key: String); + fn local_save(key: String, value: String) -> Option; + fn local_get(key: String) -> Option; + fn is_browser() -> bool; + fn storage_clear(); +} + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen(module = "/js/node.js")] +extern "C" { + fn session_save(key: String, value: String) -> Option; + fn session_get(key: String) -> Option; + fn session_remove(key: String); + fn local_save(key: String, value: String) -> Option; + fn local_get(key: String) -> Option; + fn is_browser() -> bool; + fn storage_clear(); + #[wasm_bindgen(catch)] + async fn upload_file( + filename: String, + cb_chunk: &Closure js_sys::Promise>, + cb_end: &Closure js_sys::Promise>, + ) -> Result; +} + +fn local_read(key: String) -> Result { + local_get(key).ok_or(NgError::JsStorageReadError) +} + +fn local_write(key: String, value: String) -> Result<(), NgError> { + match local_save(key, value) { + Some(err) => Err(NgError::JsStorageWriteError(err)), + None => Ok(()), + } +} + +fn session_read(key: String) -> Result { + session_get(key).ok_or(NgError::JsStorageReadError) +} + +fn session_write(key: String, value: String) -> Result<(), NgError> { + match session_save(key, value) { + Some(err) => Err(NgError::JsStorageWriteError(err)), + None => Ok(()), + } +} + +fn session_del(key: String) -> Result<(), NgError> { + session_remove(key); + Ok(()) +} + +fn clear() { + storage_clear(); +} + +static INIT_LOCAL_BROKER: Lazy> = Lazy::new(|| { + Box::new(|| { + LocalBrokerConfig::JsStorage(JsStorageConfig { + local_read: Box::new(local_read), + local_write: Box::new(local_write), + session_read: Arc::new(Box::new(session_read)), + session_write: Arc::new(Box::new(session_write)), + session_del: Arc::new(Box::new(session_del)), + clear: Arc::new(Box::new(clear)), + is_browser: is_browser(), + }) + }) +}); + +#[wasm_bindgen] +pub async fn wallet_create(params: JsValue) -> Result { + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + let mut params = serde_wasm_bindgen::from_value::(params) + .map_err(|_| "Deserialization error of args")?; + params.result_with_wallet_file = true; + let res = nextgraph::local_broker::wallet_create_v0(params).await; + match res { + Ok(r) => Ok(serde_wasm_bindgen::to_value(&r).unwrap()), + Err(e) => Err(e.to_string().into()), + } +} + +#[wasm_bindgen] +pub async fn wallet_get_file(wallet_name: String) -> Result { + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + + let res = nextgraph::local_broker::wallet_get_file(&wallet_name).await; + match res { + Ok(r) => Ok(serde_wasm_bindgen::to_value(&serde_bytes::ByteBuf::from(r)).unwrap()), + Err(e) => Err(e.to_string().into()), + } +} + +#[wasm_bindgen] +pub async fn wallet_read_file(file: JsValue) -> Result { + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + let file = serde_wasm_bindgen::from_value::(file) + .map_err(|_| "Deserialization error of file".to_string())?; + + let wallet = nextgraph::local_broker::wallet_read_file(file.into_vec()) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(serde_wasm_bindgen::to_value(&wallet).unwrap()) +} + +#[wasm_bindgen] +pub async fn wallet_import_from_code(code: JsValue) -> Result { + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + let code = serde_wasm_bindgen::from_value::(code) + .map_err(|_| "Deserialization error of code".to_string())?; + + let wallet = nextgraph::local_broker::wallet_import_from_code(code) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(serde_wasm_bindgen::to_value(&wallet).unwrap()) +} + +#[wasm_bindgen] +pub async fn wallet_import_rendezvous(size: JsValue) -> Result { + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + let size: u32 = serde_wasm_bindgen::from_value::(size) + .map_err(|_| "Deserialization error of size".to_string())?; + + let res = nextgraph::local_broker::wallet_import_rendezvous(size) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(serde_wasm_bindgen::to_value(&res).unwrap()) +} + +#[wasm_bindgen] +pub async fn wallet_export_get_qrcode( + session_id: JsValue, + size: JsValue, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + let size: u32 = serde_wasm_bindgen::from_value::(size) + .map_err(|_| "Deserialization error of size".to_string())?; + + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + + let res = nextgraph::local_broker::wallet_export_get_qrcode(session_id, size) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(serde_wasm_bindgen::to_value(&res).unwrap()) +} + +#[wasm_bindgen] +pub async fn wallet_export_get_textcode(session_id: JsValue) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + + let res = nextgraph::local_broker::wallet_export_get_textcode(session_id) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(serde_wasm_bindgen::to_value(&res).unwrap()) +} + +#[wasm_bindgen] +pub async fn wallet_export_rendezvous(session_id: JsValue, code: JsValue) -> Result<(), String> { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + let code = serde_wasm_bindgen::from_value::(code) + .map_err(|_| "Deserialization error of code".to_string())?; + + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + + nextgraph::local_broker::wallet_export_rendezvous(session_id, code) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(()) +} + +#[wasm_bindgen] +pub async fn wallet_was_opened( + opened_wallet: JsValue, //SensitiveWallet +) -> Result { + let opened_wallet = serde_wasm_bindgen::from_value::(opened_wallet) + .map_err(|_| "Deserialization error of SensitiveWallet".to_string())?; + + let client = nextgraph::local_broker::wallet_was_opened(opened_wallet) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(serde_wasm_bindgen::to_value(&client).unwrap()) +} + +#[wasm_bindgen] +pub async fn wallet_import( + encrypted_wallet: JsValue, //Wallet, + opened_wallet: JsValue, //SensitiveWallet + in_memory: bool, +) -> Result { + let encrypted_wallet = serde_wasm_bindgen::from_value::(encrypted_wallet) + .map_err(|_| "Deserialization error of Wallet".to_string())?; + let opened_wallet = serde_wasm_bindgen::from_value::(opened_wallet) + .map_err(|_| "Deserialization error of SensitiveWallet".to_string())?; + + let client = nextgraph::local_broker::wallet_import(encrypted_wallet, opened_wallet, in_memory) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(serde_wasm_bindgen::to_value(&client).unwrap()) +} + + +#[wasm_bindgen] +pub async fn import_contact_from_qrcode( + session_id: JsValue, + doc_nuri: String, + qrcode: String, +) -> Result<(), String> { + + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + + let mut request = AppRequest::new( + AppRequestCommandV0::QrCodeProfileImport, + NuriV0::new_from_repo_nuri(&doc_nuri).map_err(|e| e.to_string())?, + Some(AppRequestPayload::V0(AppRequestPayloadV0::QrCodeProfileImport(qrcode))), + ); + request.set_session_id(session_id); + + let response = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + match response { + AppResponse::V0(AppResponseV0::Ok) => Ok(()), + AppResponse::V0(AppResponseV0::Error(e)) => Err(e), + _ => Err("invalid response".to_string()), + } +} + +#[wasm_bindgen] +pub async fn get_qrcode_for_profile( + session_id: JsValue, + public: bool, + size: JsValue, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + let size: u32 = serde_wasm_bindgen::from_value::(size) + .map_err(|_| "Deserialization error of size".to_string())?; + + let nuri = if public { + NuriV0::new_public_store_target() + } else { + NuriV0::new_protected_store_target() + }; + + let mut request = AppRequest::new( + AppRequestCommandV0::QrCodeProfile, + nuri, + Some(AppRequestPayload::V0(AppRequestPayloadV0::QrCodeProfile(size))), + ); + request.set_session_id(session_id); + + let response = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + match response { + AppResponse::V0(AppResponseV0::Text(qrcode)) => Ok(qrcode), + AppResponse::V0(AppResponseV0::Error(e)) => Err(e), + _ => Err("invalid response".to_string()), + } +} + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen(module = "/js/node.js")] +extern "C" { + fn client_details() -> String; + fn version() -> String; + fn get_env_vars() -> JsValue; +} + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen] +pub fn client_info() -> JsValue { + let res = ClientInfo::V0(client_info_()); + //res + serde_wasm_bindgen::to_value(&res).unwrap() +} + +#[wasm_bindgen] +pub fn encode_create_account(payload: JsValue) -> JsValue { + //log_debug!("{:?}", payload); + let create_account = serde_wasm_bindgen::from_value::(payload).unwrap(); + //log_debug!("create_account {:?}", create_account); + let res = create_account.encode(); + //log_debug!("res {:?}", res); + serde_wasm_bindgen::to_value(&res).unwrap() +} + +#[cfg(not(wasmpack_target = "nodejs"))] +#[wasm_bindgen(module = "/js/browser.js")] +extern "C" { + fn client_details() -> String; +} + +#[cfg(not(wasmpack_target = "nodejs"))] +#[wasm_bindgen(module = "/js/bowser.js")] +extern "C" { + type Bowser; + #[wasm_bindgen(static_method_of = Bowser)] + fn parse(UA: String) -> JsValue; +} + +#[cfg(not(wasmpack_target = "nodejs"))] +#[wasm_bindgen(module = "/js/browser.js")] +extern "C" { + fn client_details2(val: JsValue, version: String) -> String; +} + +#[cfg(all(not(wasmpack_target = "nodejs"), target_arch = "wasm32"))] +pub fn client_info_() -> ClientInfoV0 { + let ua = client_details(); + + let bowser = Bowser::parse(ua); + //log_debug!("{:?}", bowser); + + let details_string = client_details2(bowser, env!("CARGO_PKG_VERSION").to_string()); + + let res = ClientInfoV0 { + client_type: ClientType::Web, + details: details_string, + version: "".to_string(), + timestamp_install: 0, + timestamp_updated: 0, + }; + res + //serde_wasm_bindgen::to_value(&res).unwrap() +} + +#[cfg(all(wasmpack_target = "nodejs", target_arch = "wasm32"))] +pub fn client_info_() -> ClientInfoV0 { + let res = ClientInfoV0 { + client_type: ClientType::NodeService, + details: client_details(), + version: version(), + timestamp_install: 0, + timestamp_updated: 0, + }; + res + //serde_wasm_bindgen::to_value(&res).unwrap() +} + +#[cfg(all(not(wasmpack_target = "nodejs"), target_arch = "wasm32"))] +#[wasm_bindgen] +pub fn client_info() -> JsValue { + let res = ClientInfo::V0(client_info_()); + serde_wasm_bindgen::to_value(&res).unwrap() +} + +#[wasm_bindgen] +pub async fn test() { + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + //log_debug!("test is {}", BROKER.read().await.test()); + #[cfg(debug_assertions)] + let client_info = client_info(); + log_debug!("{:?}", client_info); +} + +// #[wasm_bindgen] +// pub async fn app_request_stream_with_nuri_command( +// nuri: String, +// command: JsValue, +// session_id: JsValue, +// callback: &js_sys::Function, +// payload: JsValue, +// ) -> Result { +// let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) +// .map_err(|_| "Deserialization error of session_id".to_string())?; +// let nuri = NuriV0::new_from(&nuri).map_err(|e| e.to_string())?; + +// let command = serde_wasm_bindgen::from_value::(command) +// .map_err(|_| "Deserialization error of AppRequestCommandV0".to_string())?; + +// let payload = if !payload.is_undefined() && payload.is_object() { +// Some(AppRequestPayload::V0( +// serde_wasm_bindgen::from_value::(payload) +// .map_err(|_| "Deserialization error of AppRequestPayloadV0".to_string())?, +// )) +// } else { +// None +// }; + +// let request = AppRequest::V0(AppRequestV0 { +// session_id, +// command, +// nuri, +// payload, +// }); +// app_request_stream_(request, callback).await +// } + +// #[wasm_bindgen] +// pub async fn app_request_stream( +// // js_session_id: JsValue, +// request: JsValue, +// callback: &js_sys::Function, +// ) -> Result { +// let request = serde_wasm_bindgen::from_value::(request) +// .map_err(|_| "Deserialization error of AppRequest".to_string())?; + +// app_request_stream_(request, callback).await +// } +#[wasm_bindgen] +pub async fn app_request_stream( + // js_session_id: JsValue, + request: JsValue, + callback: &js_sys::Function, +) -> Result { + let request = serde_wasm_bindgen::from_value::(request) + .map_err(|_| "Deserialization error of AppRequest".to_string())?; + + app_request_stream_(request, callback).await +} + +async fn app_request_stream_( + request: AppRequest, + callback: &js_sys::Function, +) -> Result { + let (reader, cancel) = nextgraph::local_broker::app_request_stream(request) + .await + .map_err(|e: NgError| e.to_string())?; + + let (canceller_tx, canceller_rx) = mpsc::unbounded(); + + async fn inner_task( + mut reader: Receiver, + callback: js_sys::Function, + mut canceller_tx: Sender<()>, + ) -> ResultSend<()> { + while let Some(app_response) = reader.next().await { + let app_response = nextgraph::verifier::prepare_app_response_for_js(app_response)?; + //let mut graph_triples_js: Option = None; + // if let AppResponse::V0(AppResponseV0::State(AppState { ref mut graph, .. })) = + // app_response + // { + // if graph.is_some() { + // let graph_state = graph.take().unwrap(); + // let triples: Vec = serde_bare::from_slice(&graph_state.triples) + // .map_err(|_| "Deserialization error of graph".to_string())?; + + // let results = Array::new(); + // for triple in triples { + // results.push(&JsQuad::from(triple).into()); + // } + // let list:JsValue = results.into(); + // list. + // }; + // }; + let response_js = serde_wasm_bindgen::to_value(&app_response).unwrap(); + // if let Some(graph_triples) = graph_triples_js { + // let response: Object = response_js.try_into().map_err(|_| { + // "Error while adding triples to AppResponse.V0.State".to_string() + // })?; + // let v0 = Object::get_own_property_descriptor(&response, &JsValue::from_str("V0")); + // let v0_obj: Object = v0.try_into().map_err(|_| { + // "Error while adding triples to AppResponse.V0.State".to_string() + // })?; + // let state = + // Object::get_own_property_descriptor(&v0_obj, &JsValue::from_str("State")); + // let state_obj: Object = state.try_into().map_err(|_| { + // "Error while adding triples to AppResponse.V0.State".to_string() + // })?; + // let kv = Array::new_with_length(2); + // kv.push(&JsValue::from_str("triples")); + // kv.push(&graph_triples); + // let entries = Array::new_with_length(1); + // entries.push(&kv.into()); + // let graph = Object::from_entries(&entries).map_err(|_| { + // "Error while creating the triples for AppResponse.V0.State.graph".to_string() + // })?; + // let response = + // Object::define_property(&state_obj, &JsValue::from_str("graph"), &graph); + // response_js = response.into(); + // }; + let this = JsValue::null(); + match callback.call1(&this, &response_js) { + Ok(jsval) => { + let promise_res: Result = jsval.dyn_into(); + match promise_res { + Ok(promise) => match JsFuture::from(promise).await { + Ok(js_value) => { + if js_value == JsValue::TRUE { + //log_debug!("cancel because true"); + reader.close(); + let _ = canceller_tx.send(()).await; + canceller_tx.close_channel(); + break; + } + } + Err(_) => {} + }, + Err(returned_val) => { + if returned_val == JsValue::TRUE { + //log_debug!("cancel because true"); + reader.close(); + let _ = canceller_tx.send(()).await; + canceller_tx.close_channel(); + break; + } + } + } + } + Err(e) => { + log_err!("JS callback for app_request_stream failed with {:?}", e); + } + } + } + Ok(()) + } + + async fn inner_canceller(mut canceller_rx: Receiver<()>, cancel: CancelFn) -> ResultSend<()> { + if let Some(_) = canceller_rx.next().await { + //log_info!("cancelling"); + cancel(); + } + Ok(()) + } + + spawn_and_log_error(inner_canceller(canceller_rx, cancel)); + + spawn_and_log_error(inner_task(reader, callback.clone(), canceller_tx.clone())); + + let cb = Closure::once(move || { + log_debug!("trying to cancel"); + //sender.close_channel() + let _ = canceller_tx.unbounded_send(()); + canceller_tx.close_channel(); + }); + //Closure::wrap(Box::new(move |sender| sender.close_channel()) as Box)>); + let ret = cb.as_ref().clone(); + cb.forget(); + Ok(ret) +} + +#[wasm_bindgen] +pub async fn app_request(request: JsValue) -> Result { + // let session_id: u64 = serde_wasm_bindgen::from_value::(js_session_id) + // .map_err(|_| "Deserialization error of session_id".to_string())?; + let request = serde_wasm_bindgen::from_value::(request) + .map_err(|_| "Deserialization error of AppRequest".to_string())?; + + let response = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(serde_wasm_bindgen::to_value(&response).unwrap()) +} + +#[wasm_bindgen] +pub async fn app_request_with_nuri_command( + nuri: String, + command: JsValue, + session_id: JsValue, + payload: JsValue, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + let nuri = NuriV0::new_from(&nuri).map_err(|e| e.to_string())?; + + let command = serde_wasm_bindgen::from_value::(command) + .map_err(|_| "Deserialization error of AppRequestCommandV0".to_string())?; + + let payload = if !payload.is_undefined() && payload.is_object() { + Some(AppRequestPayload::V0( + serde_wasm_bindgen::from_value::(payload) + .map_err(|_| "Deserialization error of AppRequestPayloadV0".to_string())?, + )) + } else { + None + }; + + let request = AppRequest::V0(AppRequestV0 { + session_id, + command, + nuri, + payload, + }); + + let response = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(serde_wasm_bindgen::to_value(&response).unwrap()) +} + +#[cfg(not(wasmpack_target = "nodejs"))] +#[wasm_bindgen] +pub async fn doc_create( + session_id: JsValue, + crdt: String, + class_name: String, + destination: String, + store_repo: JsValue, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + + let store_repo = serde_wasm_bindgen::from_value::>(store_repo) + .map_err(|_| "Deserialization error of store_repo".to_string())?; + + nextgraph::local_broker::doc_create_with_store_repo(session_id, crdt, class_name, destination, store_repo) + .await + .map_err(|e| e.to_string()).map(|nuri| serde_wasm_bindgen::to_value(&nuri).unwrap()) +} + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen] +pub async fn doc_create( + session_id: JsValue, + crdt: String, + class_name: String, + destination: String, + store_type: JsValue, + store_repo: JsValue, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + + let store_type = serde_wasm_bindgen::from_value::>(store_type) + .map_err(|_| "Deserialization error of store_type".to_string())?; + + let store_repo = serde_wasm_bindgen::from_value::>(store_repo) + .map_err(|_| "Deserialization error of store_repo".to_string())?; + + nextgraph::local_broker::doc_create(session_id, crdt, class_name, destination, store_type, store_repo) + .await + .map_err(|e| e.to_string()).map(|nuri| serde_wasm_bindgen::to_value(&nuri).unwrap()) +} + +#[wasm_bindgen] +pub async fn file_get_from_private_store( + session_id: JsValue, + nuri: String, + callback: &js_sys::Function, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + + let nuri = NuriV0::new_from(&nuri).map_err(|e| format!("nuri: {}", e.to_string()))?; + + let branch_nuri = NuriV0::new_private_store_target(); + + file_get_(session_id, nuri, branch_nuri, callback).await +} + +#[wasm_bindgen] +pub async fn file_get( + session_id: JsValue, + reference: JsValue, + branch_nuri: String, + callback: &js_sys::Function, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + let reference: BlockRef = serde_wasm_bindgen::from_value::(reference) + .map_err(|_| "Deserialization error of file reference".to_string())?; + + let branch_nuri = + NuriV0::new_from(&branch_nuri).map_err(|e| format!("branch_nuri: {}", e.to_string()))?; + + file_get_( + session_id, + NuriV0::new_from_obj_ref(&reference), + branch_nuri, + callback, + ) + .await +} + +async fn file_get_( + session_id: u64, + mut nuri: NuriV0, + branch_nuri: NuriV0, + callback: &js_sys::Function, +) -> Result { + nuri.copy_target_from(&branch_nuri); + + let mut request = AppRequest::new(AppRequestCommandV0::FileGet, nuri, None); + request.set_session_id(session_id); + + app_request_stream_(request, callback).await +} + +async fn do_upload_done( + upload_id: u32, + session_id: u64, + nuri: NuriV0, + filename: String, +) -> Result { + let mut request = AppRequest::new( + AppRequestCommandV0::FilePut, + nuri.clone(), + Some(AppRequestPayload::V0( + AppRequestPayloadV0::RandomAccessFilePutChunk((upload_id, serde_bytes::ByteBuf::new())), + )), + ); + request.set_session_id(session_id); + + let response = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + let reference = match response { + AppResponse::V0(AppResponseV0::FileUploaded(refe)) => refe, + _ => return Err("invalid response".to_string()), + }; + + let mut request = AppRequest::new( + AppRequestCommandV0::FilePut, + nuri, + Some(AppRequestPayload::V0(AppRequestPayloadV0::AddFile( + DocAddFile { + filename: Some(filename), + object: reference.clone(), + }, + ))), + ); + request.set_session_id(session_id); + + nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + Ok(reference) +} + +#[cfg(wasmpack_target = "nodejs")] +async fn do_upload_done_( + upload_id: u32, + session_id: u64, + nuri: NuriV0, + filename: String, +) -> Result { + let response = do_upload_done(upload_id, session_id, nuri, filename) + .await + .map_err(|e| { + let ee: JsValue = e.into(); + ee + })?; + + Ok(serde_wasm_bindgen::to_value(&response).unwrap()) +} + +#[wasm_bindgen] +pub async fn upload_done( + upload_id: JsValue, + session_id: JsValue, + nuri: JsValue, + filename: String, +) -> Result { + let upload_id: u32 = serde_wasm_bindgen::from_value::(upload_id) + .map_err(|_| "Deserialization error of upload_id".to_string())?; + let nuri: NuriV0 = serde_wasm_bindgen::from_value::(nuri) + .map_err(|_| "Deserialization error of nuri".to_string())?; + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + + let reference = do_upload_done(upload_id, session_id, nuri, filename).await?; + + Ok(serde_wasm_bindgen::to_value(&reference).unwrap()) +} + +async fn do_upload_start(session_id: u64, nuri: NuriV0, mimetype: String) -> Result { + let mut request = AppRequest::new( + AppRequestCommandV0::FilePut, + nuri, + Some(AppRequestPayload::V0( + AppRequestPayloadV0::RandomAccessFilePut(mimetype), + )), + ); + request.set_session_id(session_id); + + let response = nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string())?; + + match response { + AppResponse::V0(AppResponseV0::FileUploading(upload_id)) => Ok(upload_id), + _ => Err("invalid response".to_string()), + } +} + +#[wasm_bindgen] +pub async fn upload_start( + session_id: JsValue, + nuri: String, + mimetype: String, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + let nuri: NuriV0 = NuriV0::new_from(&nuri).map_err(|e| e.to_string())?; + + let upload_id = do_upload_start(session_id, nuri, mimetype).await?; + + Ok(serde_wasm_bindgen::to_value(&upload_id).unwrap()) +} + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen] +pub async fn file_put_to_private_store( + session_id: JsValue, + filename: String, + mimetype: String, +) -> Result { + let target = NuriV0::new_private_store_target(); + + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + + let upload_id = do_upload_start(session_id, target.clone(), mimetype).await?; + let target_for_chunk = target.clone(); + let cb_chunk = Closure::new(move |chunk| { + let chunk_res = serde_wasm_bindgen::from_value::(chunk); + match chunk_res { + Err(_e) => { + js_sys::Promise::reject(&JsValue::from_str("Deserialization error of chunk")) + } + Ok(chunk) => future_to_promise(do_upload_chunk_( + session_id, + upload_id, + chunk, + target_for_chunk.clone(), + )), + } + }); + + let cb_end = Closure::new(move |file| { + future_to_promise(do_upload_done_(upload_id, session_id, target.clone(), file)) + }); + + let reference = upload_file(filename, &cb_chunk, &cb_end) + .await + .map_err(|e| e.as_string().unwrap())?; + let reference = serde_wasm_bindgen::from_value::(reference) + .map_err(|_| "Deserialization error of reference".to_string())?; + let nuri = format!("did:ng:{}", reference.object_nuri()); + Ok(nuri) +} + +async fn do_upload_chunk( + session_id: u64, + upload_id: u32, + chunk: serde_bytes::ByteBuf, + nuri: NuriV0, +) -> Result { + let mut request = AppRequest::new( + AppRequestCommandV0::FilePut, + nuri, + Some(AppRequestPayload::V0( + AppRequestPayloadV0::RandomAccessFilePutChunk((upload_id, chunk)), + )), + ); + request.set_session_id(session_id); + + nextgraph::local_broker::app_request(request) + .await + .map_err(|e: NgError| e.to_string()) +} + +#[cfg(wasmpack_target = "nodejs")] +async fn do_upload_chunk_( + session_id: u64, + upload_id: u32, + chunk: serde_bytes::ByteBuf, + nuri: NuriV0, +) -> Result { + let response = do_upload_chunk(session_id, upload_id, chunk, nuri) + .await + .map_err(|e| { + let ee: JsValue = e.into(); + ee + })?; + + Ok(serde_wasm_bindgen::to_value(&response).unwrap()) +} + +#[wasm_bindgen] +pub async fn upload_chunk( + session_id: JsValue, + upload_id: JsValue, + chunk: JsValue, + nuri: String, +) -> Result { + //log_debug!("upload_chunk {:?}", js_nuri); + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + let upload_id: u32 = serde_wasm_bindgen::from_value::(upload_id) + .map_err(|_| "Deserialization error of upload_id".to_string())?; + let chunk: serde_bytes::ByteBuf = serde_wasm_bindgen::from_value::(chunk) + .map_err(|_| "Deserialization error of chunk".to_string())?; + let nuri: NuriV0 = NuriV0::new_from(&nuri).map_err(|e| e.to_string())?; + + let response = do_upload_chunk(session_id, upload_id, chunk, nuri).await?; + + Ok(serde_wasm_bindgen::to_value(&response).unwrap()) +} + +#[wasm_bindgen] +pub async fn doc_fetch_private_subscribe() -> Result { + let request = AppRequest::new( + AppRequestCommandV0::Fetch(AppFetchContentV0::get_or_subscribe(true)), + NuriV0::new_private_store_target(), + None, + ); + Ok(serde_wasm_bindgen::to_value(&request).unwrap()) +} + +#[wasm_bindgen] +pub async fn doc_fetch_repo_subscribe(repo_o: String) -> Result { + Ok(serde_wasm_bindgen::to_value( + &AppRequest::doc_fetch_repo_subscribe(repo_o).map_err(|e| e.to_string())?, + ) + .unwrap()) +} + +#[wasm_bindgen] +pub async fn doc_subscribe( + repo_o: String, + session_id: JsValue, + callback: &js_sys::Function, +) -> Result { + let session_id: u64 = serde_wasm_bindgen::from_value::(session_id) + .map_err(|_| "Deserialization error of session_id".to_string())?; + let mut request = AppRequest::doc_fetch_repo_subscribe(repo_o).map_err(|e| e.to_string())?; + request.set_session_id(session_id); + app_request_stream_(request, callback).await +} + +// // #[wasm_bindgen] +// pub async fn get_readcap() -> Result { +// let request = ObjectRef::nil(); +// Ok(serde_wasm_bindgen::to_value(&request).unwrap()) +// } + +#[wasm_bindgen] +pub async fn disconnections_subscribe(callback: &js_sys::Function) -> Result { + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + + let reader = nextgraph::local_broker::take_disconnections_receiver() + .await + .map_err(|_e: NgError| false)?; + + async fn inner_task( + mut reader: Receiver, + callback: js_sys::Function, + ) -> ResultSend<()> { + while let Some(user_id) = reader.next().await { + let this = JsValue::null(); + let user_id_js = serde_wasm_bindgen::to_value(&user_id).unwrap(); + match callback.call1(&this, &user_id_js) { + Ok(jsval) => { + let promise_res: Result = jsval.dyn_into(); + match promise_res { + Ok(promise) => { + let _ = JsFuture::from(promise).await; + } + Err(_) => {} + } + } + Err(e) => { + log_err!( + "JS callback for disconnections_subscribe failed with {:?}", + e + ); + } + } + } + log_debug!("END OF disconnections reader"); + Ok(()) + } + + spawn_and_log_error(inner_task(reader, callback.clone())); + Ok(true.into()) +} + +#[wasm_bindgen] +pub async fn probe() { + let _res = BROKER + .write() + .await + .probe( + Box::new(ConnectionWebSocket {}), + IP::try_from(&IpAddr::from_str("127.0.0.1").unwrap()).unwrap(), + WS_PORT, + ) + .await; + log_debug!("broker.probe : {:?}", _res); + + let _ = Broker::join_shutdown_with_timeout(std::time::Duration::from_secs(5)).await; +} + +#[cfg(wasmpack_target = "nodejs")] +#[derive(Serialize, Deserialize)] +struct HeadLessConfigStrings { + server_addr: Option, + server_peer_id: Option, + client_peer_key: Option, + admin_user_key: Option, +} + +#[cfg(wasmpack_target = "nodejs")] +impl HeadLessConfigStrings { + fn load(config: JsValue) -> Result { + let string_config = if config.is_object() { + serde_wasm_bindgen::from_value::(config) + .map_err(|_| "Deserialization error of config object".to_string())? + } else { + HeadLessConfigStrings { + server_addr: None, + server_peer_id: None, + client_peer_key: None, + admin_user_key: None, + } + }; + let var_env_config = + serde_wasm_bindgen::from_value::(get_env_vars()) + .map_err(|_| "Deserialization error of env vars".to_string())?; + + let server_addr = if let Some(s) = string_config.server_addr { + parse_ip_and_port_for(s, "server_addr").map_err(|e: NgError| e.to_string())? + } else { + if let Some(s) = var_env_config.server_addr { + parse_ip_and_port_for(s, "server_addr from var env") + .map_err(|e: NgError| e.to_string())? + } else { + BindAddress::new_localhost_with_port(WS_PORT_REVERSE_PROXY) + } + }; + + let server_peer_id = if let Some(s) = string_config.server_peer_id { + Some(decode_key(&s).map_err(|e: NgError| e.to_string())?) + } else { + if let Some(s) = var_env_config.server_peer_id { + Some(decode_key(&s).map_err(|e: NgError| e.to_string())?) + } else { + None + } + } + .ok_or("No server_peer_id found in config nor env var.".to_string())?; + + let client_peer_key = if let Some(s) = string_config.client_peer_key { + Some(decode_priv_key(&s).map_err(|e: NgError| e.to_string())?) + } else { + if let Some(s) = var_env_config.client_peer_key { + Some(decode_priv_key(&s).map_err(|e: NgError| e.to_string())?) + } else { + None + } + }; + + let admin_user_key = if let Some(s) = string_config.admin_user_key { + Some(decode_priv_key(&s).map_err(|e: NgError| e.to_string())?) + } else { + if let Some(s) = var_env_config.admin_user_key { + Some(decode_priv_key(&s).map_err(|e: NgError| e.to_string())?) + } else { + None + } + }; + + Ok(HeadlessConfig { + server_addr, + server_peer_id, + client_peer_key, + admin_user_key, + }) + } +} +/* +#[doc(hidden)] +#[derive(Debug)] +pub struct HeadlessConfig { + // parse_ip_and_port_for(string, "verifier_server") + pub server_addr: Option, + // decode_key(string) + pub server_peer_id: PubKey, + // decode_priv_key(string) + pub client_peer_key: PrivKey, +}*/ + +#[cfg(wasmpack_target = "nodejs")] +#[wasm_bindgen] +pub async fn init_headless(config: JsValue) -> Result<(), String> { + //log_info!("{:?}", js_config); + + let config = HeadLessConfigStrings::load(config)?; + let _ = config + .client_peer_key + .as_ref() + .ok_or("No client_peer_key found in config nor env var.".to_string())?; + + init_local_broker(Box::new(move || { + LocalBrokerConfig::Headless(config.clone()) + })) + .await; + + Ok(()) +} + +#[wasm_bindgen] +pub async fn start() { + async fn inner_task() -> ResultSend<()> { + Ok(()) + } + spawn_and_log_error(inner_task()).await; +} + +#[wasm_bindgen] +pub async fn session_stop(user_id: String) -> Result<(), String> { + let user_id = decode_key(&user_id).map_err(|_| "Invalid user_id")?; + + nextgraph::local_broker::session_stop(&user_id) + .await + .map_err(|e: NgError| e.to_string()) +} + +#[wasm_bindgen] +pub async fn user_disconnect(user_id: String) -> Result<(), String> { + let user_id = decode_key(&user_id).map_err(|_| "Invalid user_id")?; + + nextgraph::local_broker::user_disconnect(&user_id) + .await + .map_err(|e: NgError| e.to_string()) +} + +#[wasm_bindgen] +pub async fn wallet_close(wallet_name: String) -> Result<(), String> { + nextgraph::local_broker::wallet_close(&wallet_name) + .await + .map_err(|e: NgError| e.to_string()) +} + +#[wasm_bindgen] +pub async fn user_connect( + client_info: JsValue, + user_id: String, + location: Option, +) -> Result { + let info = serde_wasm_bindgen::from_value::(client_info) + .map_err(|_| "serde error on info")?; + let user_id = decode_key(&user_id).map_err(|_| "Invalid user_id")?; + + #[derive(Serialize, Deserialize)] + struct ConnectionInfo { + pub server_id: String, + pub server_ip: String, + pub error: Option, + #[serde(with = "serde_wasm_bindgen::preserve")] + pub since: js_sys::Date, + } + + let mut opened_connections: HashMap = HashMap::new(); + + let results = nextgraph::local_broker::user_connect_with_device_info(info, &user_id, location) + .await + .map_err(|e: NgError| e.to_string())?; + + log_debug!("{:?}", results); + + for result in results { + let date = js_sys::Date::new_0(); + date.set_time(result.4); + opened_connections.insert( + result.0, + ConnectionInfo { + server_id: result.1, + server_ip: result.2, + error: result.3, + since: date, + }, + ); + } + + //BROKER.read().await.print_status(); + + Ok(opened_connections + .serialize(&serde_wasm_bindgen::Serializer::new().serialize_maps_as_objects(true)) + .unwrap()) +} + +const EMPTY_IMG: [u8;437] = [137, 80, 78, 71, 13, 10, 26, 10, 0, 0, 0, 13, 73, 72, 68, 82, 0, 0, 0, 150, 0, 0, 0, 150, 8, 6, 0, 0, 0, 60, 1, 113, 226, 0, 0, 0, 4, 103, 65, 77, 65, 0, 0, 177, 143, 11, 252, 97, 5, 0, 0, 0, 1, 115, 82, 71, 66, 1, 217, 201, 44, 127, 0, 0, 0, 32, 99, 72, 82, 77, 0, 0, 122, 38, 0, 0, 128, 132, 0, 0, 250, 0, 0, 0, 128, 232, 0, 0, 117, 48, 0, 0, 234, 96, 0, 0, 58, 152, 0, 0, 23, 112, 156, 186, 81, 60, 0, 0, 0, 9, 112, 72, 89, 115, 0, 0, 3, 0, 0, 0, 3, 0, 1, 217, 203, 178, 96, 0, 0, 1, 30, 73, 68, 65, 84, 120, 218, 237, 210, 49, 17, 0, 0, 8, 196, 48, 192, 191, 231, 199, 0, 35, 99, 34, 161, 215, 78, 146, 130, 103, 35, 1, 198, 194, 88, 24, 11, 140, 133, 177, 48, 22, 24, 11, 99, 97, 44, 48, 22, 198, 194, 88, 96, 44, 140, 133, 177, 192, 88, 24, 11, 99, 129, 177, 48, 22, 198, 2, 99, 97, 44, 140, 5, 198, 194, 88, 24, 11, 140, 133, 177, 48, 22, 24, 11, 99, 97, 44, 48, 22, 198, 194, 88, 96, 44, 140, 133, 177, 192, 88, 24, 11, 99, 129, 177, 48, 22, 198, 2, 99, 97, 44, 140, 5, 198, 194, 88, 24, 11, 140, 133, 177, 48, 22, 24, 11, 99, 97, 44, 48, 22, 198, 194, 88, 96, 44, 140, 133, 177, 192, 88, 24, 11, 99, 129, 177, 48, 22, 198, 2, 99, 97, 44, 140, 5, 198, 194, 88, 24, 11, 140, 133, 177, 48, 22, 24, 11, 99, 97, 44, 48, 22, 198, 194, 88, 96, 44, 140, 133, 177, 48, 22, 24, 11, 99, 97, 44, 48, 22, 198, 194, 88, 96, 44, 140, 133, 177, 192, 88, 24, 11, 99, 129, 177, 48, 22, 198, 2, 99, 97, 44, 140, 5, 198, 194, 88, 24, 11, 140, 133, 177, 48, 22, 24, 11, 99, 97, 44, 48, 22, 198, 194, 88, 96, 44, 140, 133, 177, 192, 88, 24, 11, 99, 129, 177, 48, 22, 198, 2, 99, 97, 44, 140, 5, 198, 194, 88, 24, 11, 140, 133, 177, 48, 22, 24, 11, 99, 97, 44, 48, 22, 198, 194, 88, 96, 44, 140, 133, 177, 192, 88, 24, 11, 99, 193, 109, 1, 34, 65, 5, 40, 46, 151, 166, 52, 0, 0, 0, 0, 73, 69, 78, 68, 174, 66, 96, 130]; + +#[wasm_bindgen] +pub async fn gen_wallet_for_test(ngd_peer_id: String)-> Result { + init_local_broker_with_lazy(&INIT_LOCAL_BROKER).await; + //init_local_broker(Box::new(|| LocalBrokerConfig::InMemory)).await; + + let peer_id_of_server_broker = decode_key(&ngd_peer_id).map_err(|e: NgError| e.to_string())?; + + let wallet_result = wallet_create_v0(CreateWalletV0 { + security_img: Vec::from(EMPTY_IMG), + security_txt: "testsecurityphrase".to_string(), + pin: [1, 2, 1, 2], + pazzle_length: 9, + send_bootstrap: false, + send_wallet: false, + result_with_wallet_file: false, + local_save: false, + core_bootstrap: BootstrapContentV0::new_localhost(peer_id_of_server_broker), + core_registration: None, + additional_bootstrap: None, + pdf: false, + device_name: "test".to_string(), + }) + .await + .expect("wallet_create_v0"); + + let mut mnemonic_words = Vec::with_capacity(12); + display_mnemonic(&wallet_result.mnemonic) + .iter() + .for_each(|word| { + mnemonic_words.push(word.clone()); + }); + + let res = (wallet_result,mnemonic_words); + Ok(serde_wasm_bindgen::to_value(&res).unwrap()) + +} + +#[cfg(test)] +mod test { + use wasm_bindgen_test::*; + wasm_bindgen_test_configure!(run_in_browser); + //use crate::probe; + use crate::start; + + #[wasm_bindgen_test] + pub async fn test_connection() { + //probe().await; + start().await; + } +} diff --git a/sdk/ng-sdk-js/src/model.rs b/sdk/ng-sdk-js/src/model.rs new file mode 100644 index 0000000..eaabcd2 --- /dev/null +++ b/sdk/ng-sdk-js/src/model.rs @@ -0,0 +1,825 @@ +// Copyright (c) 2018 Oxigraph developers (Thomas Pellissier Tanon) +// taken from Oxigraph https://github.com/oxigraph/oxigraph https://oxigraph.org +// Licensed under Apache-2.0 and MIT + +#![allow(dead_code, clippy::inherent_to_string, clippy::unused_self)] + +use js_sys::{Error, Reflect, UriError}; +use oxrdf::Triple; +use oxrdf::*; +use wasm_bindgen::prelude::*; +use wasm_bindgen::JsValue; + +#[macro_export] +macro_rules! format_err { + ($msg:literal $(,)?) => { + ::wasm_bindgen::JsValue::from(::js_sys::Error::new($msg)) + }; + ($fmt:literal, $($arg:tt)*) => { + ::wasm_bindgen::JsValue::from(::js_sys::Error::new(&format!($fmt, $($arg)*))) + }; +} + +#[allow(clippy::needless_pass_by_value)] +pub fn to_err(e: impl ToString) -> JsValue { + JsValue::from(Error::new(&e.to_string())) +} + +thread_local! { + pub static FROM_JS: FromJsConverter = FromJsConverter::default(); +} + +#[wasm_bindgen(js_name = namedNode)] +pub fn named_node(value: String) -> Result { + NamedNode::new(value) + .map(Into::into) + .map_err(|v| UriError::new(&v.to_string()).into()) +} + +#[wasm_bindgen(js_name = blankNode)] +pub fn blank_node(value: Option) -> Result { + Ok(if let Some(value) = value { + BlankNode::new(value).map_err(to_err)? + } else { + BlankNode::default() + } + .into()) +} + +#[wasm_bindgen] +pub fn literal( + value: Option, + language_or_datatype: &JsValue, +) -> Result { + if language_or_datatype.is_null() || language_or_datatype.is_undefined() { + Ok(Literal::new_simple_literal(value.unwrap_or_default()).into()) + } else if language_or_datatype.is_string() { + Ok(Literal::new_language_tagged_literal( + value.unwrap_or_default(), + language_or_datatype.as_string().unwrap_or_default(), + ) + .map_err(to_err)? + .into()) + } else if let JsTerm::NamedNode(datatype) = FROM_JS.with(|c| c.to_term(language_or_datatype))? { + Ok(Literal::new_typed_literal(value.unwrap_or_default(), datatype).into()) + } else { + Err(format_err!("The literal datatype should be a NamedNode")) + } +} + +#[wasm_bindgen(js_name = defaultGraph)] +pub fn default_graph() -> JsDefaultGraph { + JsDefaultGraph +} + +#[wasm_bindgen(js_name = variable)] +pub fn variable(value: String) -> Result { + Ok(Variable::new(value).map_err(to_err)?.into()) +} + +#[wasm_bindgen(js_name = triple)] +pub fn triple(subject: &JsValue, predicate: &JsValue, object: &JsValue) -> Result { + quad(subject, predicate, object, &JsValue::UNDEFINED) +} + +#[wasm_bindgen(js_name = quad)] +pub fn quad( + subject: &JsValue, + predicate: &JsValue, + object: &JsValue, + graph: &JsValue, +) -> Result { + Ok(FROM_JS + .with(|c| c.to_quad_from_parts(subject, predicate, object, graph))? + .into()) +} + +#[wasm_bindgen(js_name = fromTerm)] +pub fn from_term(original: &JsValue) -> Result { + Ok(if original.is_null() { + JsValue::NULL + } else { + FROM_JS.with(|c| c.to_term(original))?.into() + }) +} + +#[wasm_bindgen(js_name = fromQuad)] +pub fn from_quad(original: &JsValue) -> Result { + Ok(if original.is_null() { + JsValue::NULL + } else { + JsQuad::from(FROM_JS.with(|c| c.to_quad(original))?).into() + }) +} + +#[wasm_bindgen(js_name = NamedNode)] +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct JsNamedNode { + inner: NamedNode, +} + +#[wasm_bindgen(js_class = NamedNode)] +impl JsNamedNode { + #[wasm_bindgen(getter = termType)] + pub fn term_type(&self) -> String { + "NamedNode".to_owned() + } + + #[wasm_bindgen(getter)] + pub fn value(&self) -> String { + self.inner.as_str().to_owned() + } + + #[wasm_bindgen(js_name = toString)] + pub fn to_string(&self) -> String { + self.inner.to_string() + } + + pub fn equals(&self, other: &JsValue) -> bool { + if let Ok(Some(JsTerm::NamedNode(other))) = + FromJsConverter::default().to_optional_term(other) + { + self == &other + } else { + false + } + } +} + +impl From for JsNamedNode { + fn from(inner: NamedNode) -> Self { + Self { inner } + } +} + +impl From for NamedNode { + fn from(node: JsNamedNode) -> Self { + node.inner + } +} + +impl From for NamedOrBlankNode { + fn from(node: JsNamedNode) -> Self { + node.inner.into() + } +} + +impl From for Subject { + fn from(node: JsNamedNode) -> Self { + node.inner.into() + } +} + +impl From for Term { + fn from(node: JsNamedNode) -> Self { + node.inner.into() + } +} + +impl From for GraphName { + fn from(node: JsNamedNode) -> Self { + node.inner.into() + } +} + +#[wasm_bindgen(js_name = BlankNode)] +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct JsBlankNode { + inner: BlankNode, +} + +#[wasm_bindgen(js_class = BlankNode)] +impl JsBlankNode { + #[wasm_bindgen(getter = termType)] + pub fn term_type(&self) -> String { + "BlankNode".to_owned() + } + + #[wasm_bindgen(getter)] + pub fn value(&self) -> String { + self.inner.as_str().to_owned() + } + + #[wasm_bindgen(js_name = toString)] + pub fn to_string(&self) -> String { + self.inner.to_string() + } + + pub fn equals(&self, other: &JsValue) -> bool { + if let Ok(Some(JsTerm::BlankNode(other))) = + FromJsConverter::default().to_optional_term(other) + { + self == &other + } else { + false + } + } +} + +impl From for JsBlankNode { + fn from(inner: BlankNode) -> Self { + Self { inner } + } +} + +impl From for BlankNode { + fn from(node: JsBlankNode) -> Self { + node.inner + } +} + +impl From for NamedOrBlankNode { + fn from(node: JsBlankNode) -> Self { + node.inner.into() + } +} + +impl From for Subject { + fn from(node: JsBlankNode) -> Self { + node.inner.into() + } +} + +impl From for Term { + fn from(node: JsBlankNode) -> Self { + node.inner.into() + } +} + +impl From for GraphName { + fn from(node: JsBlankNode) -> Self { + node.inner.into() + } +} + +#[wasm_bindgen(js_name = Literal)] +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct JsLiteral { + inner: Literal, +} + +#[wasm_bindgen(js_class = Literal)] +impl JsLiteral { + #[wasm_bindgen(getter = termType)] + pub fn term_type(&self) -> String { + "Literal".to_owned() + } + + #[wasm_bindgen(getter)] + pub fn value(&self) -> String { + self.inner.value().to_owned() + } + + #[wasm_bindgen(getter)] + pub fn language(&self) -> String { + self.inner.language().unwrap_or("").to_owned() + } + + #[wasm_bindgen(getter)] + pub fn datatype(&self) -> JsNamedNode { + self.inner.datatype().into_owned().into() + } + + #[wasm_bindgen(js_name = toString)] + pub fn to_string(&self) -> String { + self.inner.to_string() + } + + pub fn equals(&self, other: &JsValue) -> bool { + if let Ok(Some(JsTerm::Literal(other))) = FromJsConverter::default().to_optional_term(other) + { + self == &other + } else { + false + } + } +} + +impl From for JsLiteral { + fn from(inner: Literal) -> Self { + Self { inner } + } +} + +impl From for Literal { + fn from(node: JsLiteral) -> Self { + node.inner + } +} + +impl From for Term { + fn from(node: JsLiteral) -> Self { + node.inner.into() + } +} + +#[wasm_bindgen(js_name = DefaultGraph)] +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct JsDefaultGraph; + +#[wasm_bindgen(js_class = DefaultGraph)] +impl JsDefaultGraph { + #[wasm_bindgen(getter = termType)] + pub fn term_type(&self) -> String { + "DefaultGraph".to_owned() + } + + #[wasm_bindgen(getter)] + pub fn value(&self) -> String { + String::new() + } + + #[wasm_bindgen(js_name = toString)] + pub fn to_string(&self) -> String { + "DEFAULT".to_owned() + } + + pub fn equals(&self, other: &JsValue) -> bool { + if let Ok(Some(JsTerm::DefaultGraph(other))) = + FromJsConverter::default().to_optional_term(other) + { + self == &other + } else { + false + } + } +} + +#[wasm_bindgen(js_name = Variable)] +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct JsVariable { + inner: Variable, +} + +#[wasm_bindgen(js_class = Variable)] +impl JsVariable { + #[wasm_bindgen(getter = termType)] + pub fn term_type(&self) -> String { + "Variable".to_owned() + } + + #[wasm_bindgen(getter)] + pub fn value(&self) -> String { + self.inner.as_str().to_owned() + } + + #[wasm_bindgen(js_name = toString)] + pub fn to_string(&self) -> String { + self.inner.to_string() + } + + pub fn equals(&self, other: &JsValue) -> bool { + if let Ok(Some(JsTerm::Variable(other))) = + FromJsConverter::default().to_optional_term(other) + { + self == &other + } else { + false + } + } +} + +impl From for JsVariable { + fn from(inner: Variable) -> Self { + Self { inner } + } +} + +impl From for Variable { + fn from(node: JsVariable) -> Self { + node.inner + } +} + +#[wasm_bindgen(js_name = Quad)] +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub struct JsQuad { + inner: Quad, +} + +#[wasm_bindgen(js_class = Quad)] +impl JsQuad { + #[wasm_bindgen(getter = termType)] + pub fn term_type(&self) -> String { + "Quad".to_owned() + } + + #[wasm_bindgen(getter)] + pub fn value(&self) -> String { + String::new() + } + + #[wasm_bindgen(getter = subject)] + pub fn subject(&self) -> JsValue { + JsTerm::from(self.inner.subject.clone()).into() + } + + #[wasm_bindgen(getter = predicate)] + pub fn predicate(&self) -> JsValue { + JsTerm::from(self.inner.predicate.clone()).into() + } + + #[wasm_bindgen(getter = object)] + pub fn object(&self) -> JsValue { + JsTerm::from(self.inner.object.clone()).into() + } + + #[wasm_bindgen(getter = graph)] + pub fn graph(&self) -> JsValue { + JsTerm::from(self.inner.graph_name.clone()).into() + } + + #[wasm_bindgen(js_name = toString)] + pub fn to_string(&self) -> String { + self.inner.to_string() + } + + pub fn equals(&self, other: &JsValue) -> bool { + if let Ok(Some(JsTerm::Quad(other))) = FromJsConverter::default().to_optional_term(other) { + self == &other + } else { + false + } + } +} + +impl From for JsQuad { + fn from(inner: Triple) -> Self { + Self { + inner: inner.in_graph(GraphName::DefaultGraph), + } + } +} + +impl From for JsQuad { + fn from(inner: Quad) -> Self { + Self { inner } + } +} + +impl From for Quad { + fn from(quad: JsQuad) -> Self { + quad.inner + } +} + +impl From for Triple { + fn from(quad: JsQuad) -> Self { + quad.inner.into() + } +} + +#[derive(Eq, PartialEq, Debug, Clone, Hash)] +pub enum JsTerm { + NamedNode(JsNamedNode), + BlankNode(JsBlankNode), + Literal(JsLiteral), + DefaultGraph(JsDefaultGraph), + Variable(JsVariable), + Quad(JsQuad), +} + +impl From for JsValue { + fn from(value: JsTerm) -> Self { + match value { + JsTerm::NamedNode(v) => v.into(), + JsTerm::BlankNode(v) => v.into(), + JsTerm::Literal(v) => v.into(), + JsTerm::DefaultGraph(v) => v.into(), + JsTerm::Variable(v) => v.into(), + JsTerm::Quad(v) => v.into(), + } + } +} + +impl From for JsTerm { + fn from(node: NamedNode) -> Self { + Self::NamedNode(node.into()) + } +} + +impl From for JsTerm { + fn from(node: BlankNode) -> Self { + Self::BlankNode(node.into()) + } +} + +impl From for JsTerm { + fn from(literal: Literal) -> Self { + Self::Literal(literal.into()) + } +} + +impl From for JsTerm { + fn from(node: NamedOrBlankNode) -> Self { + match node { + NamedOrBlankNode::NamedNode(node) => node.into(), + NamedOrBlankNode::BlankNode(node) => node.into(), + } + } +} + +impl From for JsTerm { + fn from(node: Subject) -> Self { + match node { + Subject::NamedNode(node) => node.into(), + Subject::BlankNode(node) => node.into(), + Subject::Triple(node) => node.into(), + } + } +} + +impl From for JsTerm { + fn from(term: Term) -> Self { + match term { + Term::NamedNode(node) => node.into(), + Term::BlankNode(node) => node.into(), + Term::Literal(literal) => literal.into(), + Term::Triple(node) => node.into(), + } + } +} + +impl From for JsTerm { + fn from(name: GraphName) -> Self { + match name { + GraphName::NamedNode(node) => node.into(), + GraphName::BlankNode(node) => node.into(), + GraphName::DefaultGraph => Self::DefaultGraph(JsDefaultGraph), + } + } +} + +impl From for JsTerm { + fn from(variable: Variable) -> Self { + Self::Variable(variable.into()) + } +} + +impl From for JsTerm { + fn from(triple: Triple) -> Self { + Self::Quad(triple.into()) + } +} + +impl From> for JsTerm { + fn from(triple: Box) -> Self { + triple.as_ref().clone().into() + } +} + +impl From for JsTerm { + fn from(quad: Quad) -> Self { + Self::Quad(quad.into()) + } +} + +impl TryFrom for NamedNode { + type Error = JsValue; + + fn try_from(value: JsTerm) -> Result { + match value { + JsTerm::NamedNode(node) => Ok(node.into()), + JsTerm::BlankNode(node) => Err(format_err!( + "The blank node {} is not a named node", + node.inner + )), + JsTerm::Literal(literal) => Err(format_err!( + "The literal {} is not a named node", + literal.inner + )), + JsTerm::DefaultGraph(_) => Err(format_err!("The default graph is not a named node")), + JsTerm::Variable(variable) => Err(format_err!( + "The variable {} is not a named node", + variable.inner + )), + JsTerm::Quad(quad) => Err(format_err!("The quad {} is not a named node", quad.inner)), + } + } +} + +impl TryFrom for NamedOrBlankNode { + type Error = JsValue; + + fn try_from(value: JsTerm) -> Result { + match value { + JsTerm::NamedNode(node) => Ok(node.into()), + JsTerm::BlankNode(node) => Ok(node.into()), + JsTerm::Literal(literal) => Err(format_err!( + "The literal {} is not a possible named or blank node term", + literal.inner + )), + JsTerm::DefaultGraph(_) => Err(format_err!( + "The default graph is not a possible named or blank node term" + )), + JsTerm::Variable(variable) => Err(format_err!( + "The variable {} is not a possible named or blank node term", + variable.inner + )), + JsTerm::Quad(quad) => Err(format_err!( + "The quad {} is not a possible named or blank node term", + quad.inner + )), + } + } +} + +impl TryFrom for Subject { + type Error = JsValue; + + fn try_from(value: JsTerm) -> Result { + match value { + JsTerm::NamedNode(node) => Ok(node.into()), + JsTerm::BlankNode(node) => Ok(node.into()), + JsTerm::Literal(literal) => Err(format_err!( + "The literal {} is not a possible RDF subject", + literal.inner + )), + JsTerm::DefaultGraph(_) => Err(format_err!( + "The default graph is not a possible RDF subject" + )), + JsTerm::Variable(variable) => Err(format_err!( + "The variable {} is not a possible RDF subject", + variable.inner + )), + JsTerm::Quad(quad) => Ok(Triple::from(quad).into()), + } + } +} + +impl TryFrom for Term { + type Error = JsValue; + + fn try_from(value: JsTerm) -> Result { + match value { + JsTerm::NamedNode(node) => Ok(node.into()), + JsTerm::BlankNode(node) => Ok(node.into()), + JsTerm::Literal(literal) => Ok(literal.into()), + JsTerm::DefaultGraph(_) => { + Err(format_err!("The default graph is not a possible RDF term")) + } + JsTerm::Variable(variable) => Err(format_err!( + "The variable {} is not a possible RDF term", + variable.inner + )), + JsTerm::Quad(quad) => Ok(Triple::from(quad).into()), + } + } +} + +impl TryFrom for GraphName { + type Error = JsValue; + + fn try_from(value: JsTerm) -> Result { + match value { + JsTerm::NamedNode(node) => Ok(node.into()), + JsTerm::BlankNode(node) => Ok(node.into()), + JsTerm::Literal(literal) => Err(format_err!( + "The literal {} is not a possible graph name", + literal.inner + )), + JsTerm::DefaultGraph(_) => Ok(Self::DefaultGraph), + JsTerm::Variable(variable) => Err(format_err!( + "The variable {} is not a possible RDF term", + variable.inner + )), + JsTerm::Quad(quad) => Err(format_err!( + "The quad {} is not a possible RDF term", + quad.inner + )), + } + } +} + +pub struct FromJsConverter { + term_type: JsValue, + value: JsValue, + language: JsValue, + datatype: JsValue, + subject: JsValue, + predicate: JsValue, + object: JsValue, + graph: JsValue, +} + +impl Default for FromJsConverter { + fn default() -> Self { + Self { + term_type: JsValue::from_str("termType"), + value: JsValue::from_str("value"), + language: JsValue::from_str("language"), + datatype: JsValue::from_str("datatype"), + subject: JsValue::from_str("subject"), + predicate: JsValue::from_str("predicate"), + object: JsValue::from_str("object"), + graph: JsValue::from_str("graph"), + } + } +} + +impl FromJsConverter { + pub fn to_term(&self, value: &JsValue) -> Result { + let term_type = Reflect::get(value, &self.term_type)?; + if let Some(term_type) = term_type.as_string() { + match term_type.as_str() { + "NamedNode" => Ok(NamedNode::new( + Reflect::get(value, &self.value)? + .as_string() + .ok_or_else(|| format_err!("NamedNode should have a string value"))?, + ) + .map_err(|v| UriError::new(&v.to_string()))? + .into()), + "BlankNode" => Ok(BlankNode::new( + Reflect::get(value, &self.value)? + .as_string() + .ok_or_else(|| format_err!("BlankNode should have a string value"))?, + ) + .map_err(to_err)? + .into()), + "Literal" => { + if let JsTerm::NamedNode(datatype) = + self.to_term(&Reflect::get(value, &self.datatype)?)? + { + let datatype = NamedNode::from(datatype); + let literal_value = Reflect::get(value, &self.value)? + .as_string() + .ok_or_else(|| format_err!("Literal should have a string value"))?; + Ok(match datatype.as_str() { + "http://www.w3.org/2001/XMLSchema#string" => Literal::new_simple_literal(literal_value), + "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString" => Literal::new_language_tagged_literal(literal_value, Reflect::get(value, &self.language)?.as_string().ok_or_else( + || format_err!("Literal with rdf:langString datatype should have a language"), + )?).map_err(to_err)?, + _ => Literal::new_typed_literal(literal_value, datatype) + }.into()) + } else { + Err(format_err!( + "Literal should have a datatype that is a NamedNode" + )) + } + } + "DefaultGraph" => Ok(JsTerm::DefaultGraph(JsDefaultGraph)), + "Variable" => Ok(Variable::new( + Reflect::get(value, &self.value)? + .as_string() + .ok_or_else(|| format_err!("Variable should have a string value"))?, + ) + .map_err(to_err)? + .into()), + "Quad" => Ok(self.to_quad(value)?.into()), + _ => Err(format_err!( + "The termType {term_type} is not supported by Oxigraph" + )), + } + } else if term_type.is_undefined() { + // It's a quad without the proper type + if Reflect::has(value, &self.subject)? + && Reflect::has(value, &self.predicate)? + && Reflect::has(value, &self.object)? + { + Ok(self.to_quad(value)?.into()) + } else { + Err(format_err!( + "RDF term objects should have a termType attribute" + )) + } + } else { + Err(format_err!("The object termType field should be a string")) + } + } + + pub fn to_optional_term(&self, value: &JsValue) -> Result, JsValue> { + if value.is_null() || value.is_undefined() { + Ok(None) + } else { + self.to_term(value).map(Some) + } + } + + pub fn to_quad(&self, value: &JsValue) -> Result { + self.to_quad_from_parts( + &Reflect::get(value, &self.subject)?, + &Reflect::get(value, &self.predicate)?, + &Reflect::get(value, &self.object)?, + &Reflect::get(value, &self.graph)?, + ) + } + + pub fn to_quad_from_parts( + &self, + subject: &JsValue, + predicate: &JsValue, + object: &JsValue, + graph_name: &JsValue, + ) -> Result { + Ok(Quad { + subject: Subject::try_from(self.to_term(subject)?)?, + predicate: NamedNode::try_from(self.to_term(predicate)?)?, + object: Term::try_from(self.to_term(object)?)?, + graph_name: if graph_name.is_undefined() { + GraphName::DefaultGraph + } else { + GraphName::try_from(self.to_term(graph_name)?)? + }, + }) + } +}