Compare commits
262 Commits
Author | SHA1 | Date |
---|---|---|
Niko PLP | c7f873f904 | 7 months ago |
Niko PLP | 7b0b60cda0 | 7 months ago |
Niko PLP | a4e2847810 | 7 months ago |
Niko PLP | 41e2e7206e | 7 months ago |
Niko PLP | aca204d9e5 | 7 months ago |
Niko PLP | b3ae51da82 | 9 months ago |
Niko PLP | c2d31daa1c | 9 months ago |
Niko PLP | f3ae4d8074 | 9 months ago |
Niko PLP | 53834396aa | 9 months ago |
Niko PLP | a9ff0524e2 | 9 months ago |
Niko PLP | 0d82c473f7 | 9 months ago |
Niko PLP | 6226e1fba6 | 9 months ago |
Niko PLP | fdcaf65a8d | 9 months ago |
Niko PLP | 77edc05ced | 9 months ago |
Tpt | 427d675c9b | 9 months ago |
Tpt | 1a40ab2017 | 9 months ago |
Tpt | cbb72c7be6 | 9 months ago |
Tpt | 58699f36f3 | 9 months ago |
Tpt | 83aa8170ea | 9 months ago |
Tpt | 1d5843fddc | 9 months ago |
Tpt | 0f0c1d2742 | 9 months ago |
Tpt | 184b8367dc | 9 months ago |
Tpt | ba396bad10 | 9 months ago |
Tpt | 75695dcdf7 | 9 months ago |
Tpt | 4c27b43e41 | 9 months ago |
Tpt | dcfcdd359c | 9 months ago |
Tpt | f7023a381e | 9 months ago |
Tpt | 2998f795fd | 9 months ago |
Tpt | 4705d75893 | 9 months ago |
Tpt | 9b9cf9cbcb | 9 months ago |
Tpt | 583d64e0c0 | 9 months ago |
Tpt | a0cc75b7cc | 9 months ago |
Tpt | be44451679 | 9 months ago |
Tpt | 43ef3e9e8a | 9 months ago |
Tpt | 0ac70e73dc | 10 months ago |
Tpt | f5b975e4d1 | 10 months ago |
Tpt | bd5e54a00a | 10 months ago |
Tpt | c57615519b | 10 months ago |
Tpt | 130f090555 | 10 months ago |
Tpt | bdde46b5c7 | 10 months ago |
Tpt | 2b656df6ee | 10 months ago |
Tpt | 8e2548467c | 10 months ago |
Tpt | fee7bf0d8a | 10 months ago |
Tpt | e0087c56b3 | 10 months ago |
Tpt | 0b5790a18f | 10 months ago |
Tpt | f7d132f317 | 10 months ago |
Jesse Wright | d361e1d283 | 10 months ago |
Tpt | 1424181379 | 10 months ago |
Tpt | 01d73fa62d | 10 months ago |
Tpt | accadaac34 | 10 months ago |
Tpt | 7d45ea43f5 | 10 months ago |
Tpt | c13cb8db7c | 10 months ago |
Tpt | 6c7514d058 | 10 months ago |
Tpt | e48b268fc5 | 10 months ago |
Tpt | c277804026 | 10 months ago |
Tpt | efae84b5f8 | 10 months ago |
Tpt | 269c73a7c2 | 10 months ago |
Yuri Astrakhan | ea300e9081 | 11 months ago |
Yuri Astrakhan | a078b12508 | 11 months ago |
Yuri Astrakhan | 0400f04915 | 11 months ago |
Tpt | 655ecd3e91 | 11 months ago |
Yuri Astrakhan | 1c3f054836 | 11 months ago |
Yuri Astrakhan | f5de5d3e98 | 11 months ago |
Yuri Astrakhan | 089875ad21 | 11 months ago |
Tpt | 9e3758e2c9 | 11 months ago |
Tpt | be26d210f1 | 11 months ago |
Yuri Astrakhan | a924df0e0a | 11 months ago |
Tpt | 0b1aabfcdd | 11 months ago |
Tpt | 70a4ff231b | 11 months ago |
Tpt | d49fb47767 | 11 months ago |
Yuri Astrakhan | c15233e964 | 11 months ago |
Yuri Astrakhan | 1e37577b71 | 11 months ago |
Yuri Astrakhan | 1e4326a2c5 | 11 months ago |
Tpt | c0d245871c | 11 months ago |
Tpt | 2b6ac5c195 | 11 months ago |
Tpt | ec030fb652 | 11 months ago |
Tpt | 2a81106c34 | 11 months ago |
Tpt | 46d3ed3f99 | 11 months ago |
Tpt | ef765666be | 11 months ago |
Tpt | 0a7cea5e25 | 11 months ago |
Yuri Astrakhan | 18bf383701 | 11 months ago |
Tpt | 54489aacfb | 11 months ago |
Yuri Astrakhan | 6494ba6e31 | 11 months ago |
Yuri Astrakhan | 185d83838c | 11 months ago |
Tpt | d838d55f02 | 11 months ago |
etiennept | f354bc7546 | 11 months ago |
Yuri Astrakhan | a976eb3efc | 11 months ago |
Yuri Astrakhan | 522bda2906 | 11 months ago |
Yuri Astrakhan | 5be6f55155 | 11 months ago |
Yuri Astrakhan | 405b95b4bd | 11 months ago |
Yuri Astrakhan | 5f603bc4fe | 11 months ago |
Yuri Astrakhan | 2b8df24b8b | 11 months ago |
Yuri Astrakhan | 51941c0dc5 | 11 months ago |
Yuri Astrakhan | d4bfcd3b24 | 11 months ago |
Tpt | df040400c5 | 11 months ago |
Tpt | b08c201074 | 11 months ago |
Tpt | c2040a30fd | 11 months ago |
Tpt | c2df0b829d | 12 months ago |
Tpt | cffc536eb9 | 12 months ago |
Tpt | 5cf8025aec | 12 months ago |
Tpt | d4eaa3c5ef | 12 months ago |
Tpt | dcabf50ab6 | 12 months ago |
Tpt | 0d23f4ae48 | 12 months ago |
Tpt | d1da94b08b | 12 months ago |
Tpt | f01796b1a4 | 12 months ago |
Tpt | 93eab63868 | 12 months ago |
Tpt | 42a66f62b9 | 12 months ago |
Tpt | f2a2bd5b5d | 12 months ago |
Tpt | 391e8d7662 | 12 months ago |
Tpt | a5781d1187 | 12 months ago |
Tpt | a84b898fda | 12 months ago |
Tpt | d170b53609 | 12 months ago |
Tpt | 790501e1b3 | 12 months ago |
Tpt | bdf5d593ee | 12 months ago |
Tpt | 1761672b41 | 12 months ago |
Tpt | bde73e5d72 | 12 months ago |
Tpt | 4c79e7ee78 | 12 months ago |
Tpt | 5cc3e37876 | 12 months ago |
Tpt | 8104f9e1de | 12 months ago |
Tpt | ed94f56ab4 | 12 months ago |
Tpt | 025bd2afd2 | 12 months ago |
Tpt | 4756217787 | 12 months ago |
Tpt | 2e9ac3cc1a | 12 months ago |
Tpt | 604d1bbe2e | 12 months ago |
Tpt | 4084acb9b8 | 12 months ago |
Tpt | 2a135283d5 | 12 months ago |
etiennept | 735db897ff | 12 months ago |
Tpt | 4b3f3f3278 | 1 year ago |
Tpt | 3241f47059 | 1 year ago |
Tpt | 4841f89072 | 1 year ago |
Tpt | efd5eec65d | 1 year ago |
Tpt | 899e553249 | 1 year ago |
Tpt | 4f404ab650 | 1 year ago |
Tpt | f445166942 | 1 year ago |
Tpt | 756c5394d0 | 1 year ago |
Tpt | d1cb4cecbd | 1 year ago |
Tpt | 99c3a4cce4 | 1 year ago |
Tpt | 48174cac12 | 1 year ago |
Tpt | a9fee4f6b8 | 1 year ago |
Tpt | f8034c68e9 | 1 year ago |
Tpt | 98caee8f92 | 1 year ago |
Tpt | ddf589ea14 | 1 year ago |
Tpt | d19947414e | 1 year ago |
Tpt | cc41448b18 | 1 year ago |
Tpt | e6d98445e6 | 1 year ago |
Tpt | 261f9c64a5 | 1 year ago |
Tpt | 48db7f872b | 1 year ago |
Tpt | 8a7c6cf2c1 | 1 year ago |
Tpt | ab5f5c1c60 | 1 year ago |
Tpt | a2a6c5a41e | 1 year ago |
Tpt | ea80c11d6e | 1 year ago |
Tpt | 5647624012 | 1 year ago |
Tpt | ef429e6d1b | 1 year ago |
Tpt | 517df6d59e | 1 year ago |
Tpt | 832a4ba27d | 1 year ago |
Tpt | 8d348b2a6f | 1 year ago |
Tpt | 64f45cd11b | 1 year ago |
Tpt | 70b1c52166 | 1 year ago |
Tpt | 38844f6436 | 1 year ago |
Tpt | d280f7d2f7 | 1 year ago |
Tpt | dbb39d867a | 1 year ago |
Tpt | 5e3a2fc89d | 1 year ago |
Tpt | c5f02d9263 | 1 year ago |
Tpt | 90b7b128f2 | 1 year ago |
Tpt | 9b985295ae | 1 year ago |
Tpt | 108721624f | 1 year ago |
Tpt | 67fd726f9d | 1 year ago |
Tpt | 412ca37b3c | 1 year ago |
Tpt | 7a3e07d98d | 1 year ago |
Tpt | 6a21cb0625 | 1 year ago |
Tpt | 0783d1dcda | 1 year ago |
Tpt | 8ee30cf001 | 1 year ago |
Tpt | 8c8ca54596 | 1 year ago |
Tpt | 7c4578f5f5 | 1 year ago |
Tpt | 4c97637e4b | 1 year ago |
Tpt | a6f32390df | 1 year ago |
Tpt | 180ae22293 | 1 year ago |
Tpt | a8f98a0056 | 1 year ago |
Tpt | 1b511ed018 | 1 year ago |
Tpt | 87d2006b6e | 1 year ago |
Tpt | be074000cc | 1 year ago |
Tpt | 3c51dd31bc | 1 year ago |
Tpt | 555f6b8d7c | 1 year ago |
Tpt | bdedcc47e3 | 1 year ago |
Tpt | 99abe69ba1 | 1 year ago |
Tpt | be002dd51e | 1 year ago |
Tpt | 6edfb7a2f4 | 1 year ago |
Tpt | 13c3515d7b | 1 year ago |
Tpt | 8193cac86d | 1 year ago |
Tpt | b1c90b599b | 1 year ago |
Tpt | 1d55635fe2 | 1 year ago |
Tpt | 1eaa77ad93 | 1 year ago |
Tpt | 7fe055d2b4 | 1 year ago |
Tpt | 9da26c6f95 | 1 year ago |
Tpt | f10e5a40a3 | 1 year ago |
Tpt | 024bc7b8e8 | 1 year ago |
Tpt | 6611b491b1 | 1 year ago |
Tpt | c9ec5f7c0c | 1 year ago |
Tpt | d44f9bee7a | 1 year ago |
Tpt | 570f21748d | 1 year ago |
Tpt | d2306cea52 | 1 year ago |
Tpt | 9e76323e2b | 1 year ago |
Tpt | 872111ab88 | 1 year ago |
Tpt | 3de3f9c4bc | 1 year ago |
Tpt | 010196c974 | 1 year ago |
Tpt | bbf184f7ae | 1 year ago |
Tpt | 788450932a | 1 year ago |
Tpt | f586cc048f | 1 year ago |
Tpt | 88e49f6c66 | 1 year ago |
Tpt | 807cf0d436 | 1 year ago |
Tpt | 5fee36e587 | 1 year ago |
Tpt | c6e55c706a | 1 year ago |
Tpt | 7c227830e9 | 1 year ago |
Tpt | 4a798ed3ea | 1 year ago |
Tpt | f183196859 | 1 year ago |
Tpt | 217abaf7ee | 1 year ago |
Tpt | 7cd383af79 | 1 year ago |
Tpt | 73af297b4c | 1 year ago |
Tpt | b06d6506cb | 1 year ago |
Tpt | 12a738279f | 1 year ago |
Tpt | 4cb377bda4 | 1 year ago |
Tpt | afaabf6110 | 1 year ago |
Tpt | 4f7445104a | 1 year ago |
Tpt | 3adf33d2f4 | 1 year ago |
Tpt | 922023b1da | 1 year ago |
Tpt | 077c1fc1a8 | 1 year ago |
Tpt | b22e74379a | 1 year ago |
Tpt | 1e1ed65d3b | 1 year ago |
Tpt | 8a398db20e | 1 year ago |
Tpt | 00f179058e | 1 year ago |
Thomas | 8e770fbb5d | 1 year ago |
Thomas | c31ba0e823 | 1 year ago |
Tpt | cdabe52847 | 1 year ago |
Thomas | 501f9ce6f9 | 1 year ago |
Thomas | 24a1dd2556 | 1 year ago |
Tpt | c8e718ed2d | 1 year ago |
Tpt | db7fab0f20 | 1 year ago |
Tpt | f6c8358b24 | 1 year ago |
Tpt | 69d8ce6b4e | 1 year ago |
Thomas | 94986a0d28 | 1 year ago |
Tpt | 98ac089984 | 2 years ago |
Tpt | 001b6e07b7 | 2 years ago |
Tpt | 86f14ce96f | 2 years ago |
Tpt | cb9922379c | 2 years ago |
Dan Yamamoto | 5085a60a87 | 2 years ago |
Dan Yamamoto | 43e6ce87f8 | 2 years ago |
Tpt | 71b1768d28 | 2 years ago |
Tpt | a1cbfdf67d | 2 years ago |
Tpt | 6cc7488905 | 2 years ago |
Tpt | a27f31b84e | 2 years ago |
Tpt | 785df9b00b | 2 years ago |
Tpt | 76deca135c | 2 years ago |
Tpt | 2281575c14 | 2 years ago |
Tpt | 5af06e926a | 2 years ago |
Tpt | 01caaa5d70 | 2 years ago |
Tpt | 81895cb6bc | 2 years ago |
Tpt | 40b10cdabc | 2 years ago |
Tpt | 7c0563cb1b | 2 years ago |
Tpt | a8abf26913 | 2 years ago |
Tpt | c016116b09 | 2 years ago |
Tpt | ae294683d6 | 2 years ago |
Tpt | ab17138f33 | 2 years ago |
@ -1,137 +0,0 @@ |
|||||||
[build] |
|
||||||
rustflags = [ |
|
||||||
"-Wtrivial-casts", |
|
||||||
"-Wtrivial-numeric-casts", |
|
||||||
"-Wunsafe-code", |
|
||||||
"-Wunused-lifetimes", |
|
||||||
"-Wunused-qualifications", |
|
||||||
# TODO: 1.63+ "-Wclippy::as-underscore", |
|
||||||
# TODO: 1.65+ ""-Wclippy::bool-to-int-with-if", |
|
||||||
"-Wclippy::borrow-as-ptr", |
|
||||||
"-Wclippy::case-sensitive-file-extension-comparisons", |
|
||||||
"-Wclippy::cast-lossless", |
|
||||||
"-Wclippy::cast-possible-truncation", |
|
||||||
"-Wclippy::cast-possible-wrap", |
|
||||||
"-Wclippy::cast-precision-loss", |
|
||||||
"-Wclippy::cast-ptr-alignment", |
|
||||||
"-Wclippy::cast-sign-loss", |
|
||||||
"-Wclippy::checked-conversions", |
|
||||||
"-Wclippy::clone-on-ref-ptr", |
|
||||||
"-Wclippy::cloned-instead-of-copied", |
|
||||||
"-Wclippy::copy-iterator", |
|
||||||
"-Wclippy::dbg-macro", |
|
||||||
"-Wclippy::decimal-literal-representation", |
|
||||||
"-Wclippy::default-trait-access", |
|
||||||
"-Wclippy::default-union-representation", |
|
||||||
# TODO: 1.61+ "-Wclippy::deref-by-slicing", |
|
||||||
# TODO: 1.63+ "-Wclippy::doc-link-with-quotes", |
|
||||||
# TODO: 1.62+ "-Wclippy::empty-drop", |
|
||||||
"-Wclippy::empty-enum", |
|
||||||
# TODO: on major version "-Wclippy::empty-structs-with-brackets", |
|
||||||
"-Wclippy::enum-glob-use", |
|
||||||
"-Wclippy::exit", |
|
||||||
"-Wclippy::expect-used", |
|
||||||
"-Wclippy::expl-impl-clone-on-copy", |
|
||||||
"-Wclippy::explicit-deref-methods", |
|
||||||
"-Wclippy::explicit-into-iter-loop", |
|
||||||
"-Wclippy::explicit-iter-loop", |
|
||||||
"-Wclippy::filter-map-next", |
|
||||||
"-Wclippy::flat-map-option", |
|
||||||
"-Wclippy::fn-to-numeric-cast-any", |
|
||||||
# TODO: 1.62+ "-Wclippy::format-push-string", |
|
||||||
"-Wclippy::from-iter-instead-of-collect", |
|
||||||
"-Wclippy::get-unwrap", |
|
||||||
"-Wclippy::if-not-else", |
|
||||||
"-Wclippy::if-then-some-else-none", |
|
||||||
"-Wclippy::implicit-clone", |
|
||||||
"-Wclippy::inconsistent-struct-constructor", |
|
||||||
"-Wclippy::index-refutable-slice", |
|
||||||
"-Wclippy::inefficient-to-string", |
|
||||||
"-Wclippy::inline-always", |
|
||||||
"-Wclippy::inline-asm-x86-att-syntax", |
|
||||||
"-Wclippy::inline-asm-x86-intel-syntax", |
|
||||||
"-Wclippy::invalid-upcast-comparisons", |
|
||||||
"-Wclippy::items-after-statements", |
|
||||||
"-Wclippy::large-digit-groups", |
|
||||||
# TODO: 1.68+ "-Wclippy::large-futures", |
|
||||||
"-Wclippy::large-stack-arrays", |
|
||||||
"-Wclippy::large-types-passed-by-value", |
|
||||||
"-Wclippy::let-underscore-must-use", |
|
||||||
"-Wclippy::let-unit-value", |
|
||||||
"-Wclippy::linkedlist", |
|
||||||
"-Wclippy::lossy-float-literal", |
|
||||||
"-Wclippy::macro-use-imports", |
|
||||||
"-Wclippy::manual-assert", |
|
||||||
# TODO: 1.65+ "-Wclippy::manual-instant-elapsed", |
|
||||||
# TODO: 1.67+ "-Wclippy::manual-let-else", |
|
||||||
"-Wclippy::manual-ok-or", |
|
||||||
# TODO: 1.65+ "-Wclippy::manual-string-new", |
|
||||||
"-Wclippy::many-single-char-names", |
|
||||||
"-Wclippy::map-unwrap-or", |
|
||||||
"-Wclippy::match-bool", |
|
||||||
"-Wclippy::match-same-arms", |
|
||||||
"-Wclippy::match-wildcard-for-single-variants", |
|
||||||
"-Wclippy::maybe-infinite-iter", |
|
||||||
"-Wclippy::mem-forget", |
|
||||||
# TODO: 1.63+ "-Wclippy::mismatching-type-param-order", |
|
||||||
"-Wclippy::multiple-inherent-impl", |
|
||||||
"-Wclippy::mut-mut", |
|
||||||
"-Wclippy::mutex-atomic", |
|
||||||
"-Wclippy::naive-bytecount", |
|
||||||
"-Wclippy::needless-bitwise-bool", |
|
||||||
"-Wclippy::needless-continue", |
|
||||||
"-Wclippy::needless-pass-by-value", |
|
||||||
"-Wclippy::no-effect-underscore-binding", |
|
||||||
# TODO: 1.69+ "-Wclippy::no-mangle-with-rust-abi", |
|
||||||
"-Wclippy::non-ascii-literal", |
|
||||||
"-Wclippy::print-stderr", |
|
||||||
"-Wclippy::print-stdout", |
|
||||||
"-Wclippy::ptr-as-ptr", |
|
||||||
"-Wclippy::range-minus-one", |
|
||||||
"-Wclippy::range-plus-one", |
|
||||||
"-Wclippy::rc-buffer", |
|
||||||
"-Wclippy::rc-mutex", |
|
||||||
"-Wclippy::redundant-closure-for-method-calls", |
|
||||||
"-Wclippy::redundant-else", |
|
||||||
"-Wclippy::redundant-feature-names", |
|
||||||
"-Wclippy::ref-binding-to-reference", |
|
||||||
"-Wclippy::ref-option-ref", |
|
||||||
"-Wclippy::rest-pat-in-fully-bound-structs", |
|
||||||
"-Wclippy::return-self-not-must-use", |
|
||||||
"-Wclippy::same-functions-in-if-condition", |
|
||||||
# TODO: strange failure on 1.60 "-Wclippy::same-name-method", |
|
||||||
# TODO: 1.68+ "-Wclippy::semicolon-outside-block", |
|
||||||
"-Wclippy::single-match-else", |
|
||||||
"-Wclippy::stable-sort-primitive", |
|
||||||
"-Wclippy::str-to-string", |
|
||||||
"-Wclippy::string-add", |
|
||||||
"-Wclippy::string-add-assign", |
|
||||||
"-Wclippy::string-lit-as-bytes", |
|
||||||
"-Wclippy::string-to-string", |
|
||||||
# TODO: 1.67+ "-Wclippy::suspicious-xor-used-as-pow", |
|
||||||
"-Wclippy::todo", |
|
||||||
"-Wclippy::transmute-ptr-to-ptr", |
|
||||||
"-Wclippy::trivially-copy-pass-by-ref", |
|
||||||
"-Wclippy::try-err", |
|
||||||
"-Wclippy::unicode-not-nfc", |
|
||||||
"-Wclippy::unimplemented", |
|
||||||
# TODO: 1.66+ "-Wclippy::uninlined-format-args", |
|
||||||
# TODO: 1.70+ "-Wclippy::unnecessary-box-returns", |
|
||||||
# TODO: 1.61+ "-Wclippy::unnecessary-join", |
|
||||||
# TODO: 1.67+ "-Wclippy::unnecessary-safety-comment", |
|
||||||
# TODO: 1.67+ "-Wclippy::unnecessary-safety-doc", |
|
||||||
"-Wclippy::unnecessary-self-imports", |
|
||||||
"-Wclippy::unnecessary-wraps", |
|
||||||
"-Wclippy::unneeded-field-pattern", |
|
||||||
"-Wclippy::unnested-or-patterns", |
|
||||||
"-Wclippy::unreadable-literal", |
|
||||||
"-Wclippy::unseparated-literal-suffix", |
|
||||||
"-Wclippy::unused-async", |
|
||||||
"-Wclippy::unused-self", |
|
||||||
"-Wclippy::use-debug", |
|
||||||
"-Wclippy::used-underscore-binding", |
|
||||||
"-Wclippy::verbose-bit-mask", |
|
||||||
"-Wclippy::verbose-file-reads", |
|
||||||
"-Wclippy::wildcard-dependencies", |
|
||||||
"-Wclippy::zero-sized-map-values", |
|
||||||
] |
|
@ -1,5 +1,4 @@ |
|||||||
FROM gcr.io/oss-fuzz-base/base-builder-rust:v1 |
FROM gcr.io/oss-fuzz-base/base-builder-rust:v1 |
||||||
RUN apt-get update && apt-get install -y llvm-dev libclang-dev clang && apt-get clean && rm --recursive --force /var/lib/apt/lists/* |
|
||||||
COPY . $SRC/oxigraph |
COPY . $SRC/oxigraph |
||||||
WORKDIR oxigraph |
WORKDIR oxigraph |
||||||
COPY .clusterfuzzlite/build.sh $SRC/ |
COPY .clusterfuzzlite/build.sh $SRC/ |
||||||
|
@ -1,21 +0,0 @@ |
|||||||
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.241.1/containers/rust/.devcontainer/base.Dockerfile |
|
||||||
|
|
||||||
# [Choice] Debian OS version (use bullseye on local arm64/Apple Silicon): buster, bullseye |
|
||||||
ARG VARIANT="bullseye" |
|
||||||
FROM mcr.microsoft.com/vscode/devcontainers/rust:0-${VARIANT} |
|
||||||
|
|
||||||
# [Optional] Uncomment this section to install additional packages. |
|
||||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ |
|
||||||
&& apt-get -y install --no-install-recommends \ |
|
||||||
python3 \ |
|
||||||
python3-venv \ |
|
||||||
python-is-python3 \ |
|
||||||
libclang-dev |
|
||||||
|
|
||||||
ENV VIRTUAL_ENV=/opt/venv |
|
||||||
RUN python -m venv $VIRTUAL_ENV |
|
||||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH" |
|
||||||
RUN pip install --no-cache-dir -r python/requirements.dev.txt |
|
||||||
|
|
||||||
# Change owner to the devcontainer user |
|
||||||
RUN chown -R 1000:1000 $VIRTUAL_ENV |
|
@ -1,69 +0,0 @@ |
|||||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: |
|
||||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.241.1/containers/rust |
|
||||||
{ |
|
||||||
"name": "Rust", |
|
||||||
"build": { |
|
||||||
"dockerfile": "Dockerfile", |
|
||||||
"args": { |
|
||||||
// Use the VARIANT arg to pick a Debian OS version: buster, bullseye |
|
||||||
// Use bullseye when on local on arm64/Apple Silicon. |
|
||||||
"VARIANT": "bullseye" |
|
||||||
} |
|
||||||
}, |
|
||||||
"runArgs": ["--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined"], |
|
||||||
|
|
||||||
// Configure tool-specific properties. |
|
||||||
"customizations": { |
|
||||||
// Configure properties specific to VS Code. |
|
||||||
"vscode": { |
|
||||||
// Set *default* container specific settings.json values on container create. |
|
||||||
"settings": { |
|
||||||
"lldb.executable": "/usr/bin/lldb", |
|
||||||
// VS Code don't watch files under ./target |
|
||||||
"files.watcherExclude": { |
|
||||||
"**/target/**": true |
|
||||||
}, |
|
||||||
"rust-analyzer.checkOnSave.command": "clippy", |
|
||||||
|
|
||||||
"python.defaultInterpreterPath": "/opt/venv/bin/python", |
|
||||||
"python.linting.enabled": true, |
|
||||||
"python.linting.pylintEnabled": true, |
|
||||||
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", |
|
||||||
"python.formatting.blackPath": "/usr/local/py-utils/bin/black", |
|
||||||
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", |
|
||||||
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit", |
|
||||||
"python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", |
|
||||||
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", |
|
||||||
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", |
|
||||||
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", |
|
||||||
"python.linting.pylintPath": "/opt/venv/bin/pylint", |
|
||||||
"python.testing.pytestPath": "/opt/venv/bin/pytest" |
|
||||||
}, |
|
||||||
|
|
||||||
// Add the IDs of extensions you want installed when the container is created. |
|
||||||
"extensions": [ |
|
||||||
"vadimcn.vscode-lldb", |
|
||||||
"mutantdino.resourcemonitor", |
|
||||||
"rust-lang.rust-analyzer", |
|
||||||
"tamasfe.even-better-toml", |
|
||||||
"serayuzgur.crates", |
|
||||||
"ms-python.python", |
|
||||||
"ms-python.vscode-pylance", |
|
||||||
"esbenp.prettier-vscode", |
|
||||||
"stardog-union.stardog-rdf-grammars" |
|
||||||
] |
|
||||||
} |
|
||||||
}, |
|
||||||
|
|
||||||
// Use 'forwardPorts' to make a list of ports inside the container available locally. |
|
||||||
// "forwardPorts": [], |
|
||||||
|
|
||||||
// Use 'postCreateCommand' to run commands after the container is created. |
|
||||||
"postCreateCommand": "git submodule update --init && cargo build", |
|
||||||
|
|
||||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. |
|
||||||
"remoteUser": "vscode", |
|
||||||
"features": { |
|
||||||
"python": "3.10" |
|
||||||
} |
|
||||||
} |
|
@ -0,0 +1,27 @@ |
|||||||
|
name: 'Setup Rust' |
||||||
|
description: 'Setup Rust using Rustup' |
||||||
|
inputs: |
||||||
|
version: |
||||||
|
description: 'Rust version to use. By default latest stable version' |
||||||
|
required: false |
||||||
|
default: 'stable' |
||||||
|
component: |
||||||
|
description: 'Rust extra component to install like clippy' |
||||||
|
required: false |
||||||
|
target: |
||||||
|
description: 'Rust extra target to install like wasm32-unknown-unknown' |
||||||
|
required: false |
||||||
|
runs: |
||||||
|
using: "composite" |
||||||
|
steps: |
||||||
|
- run: rustup update |
||||||
|
shell: bash |
||||||
|
- run: rustup default ${{ inputs.version }} |
||||||
|
shell: bash |
||||||
|
- run: rustup component add ${{ inputs.component }} |
||||||
|
shell: bash |
||||||
|
if: ${{ inputs.component }} |
||||||
|
- run: rustup target add ${{ inputs.target }} |
||||||
|
shell: bash |
||||||
|
if: ${{ inputs.target }} |
||||||
|
- uses: Swatinem/rust-cache@v2 |
@ -0,0 +1,11 @@ |
|||||||
|
if [ -f "rocksdb" ] |
||||||
|
then |
||||||
|
cd rocksdb || exit |
||||||
|
else |
||||||
|
git clone https://github.com/facebook/rocksdb.git |
||||||
|
cd rocksdb || exit |
||||||
|
git checkout v8.0.0 |
||||||
|
make shared_lib |
||||||
|
fi |
||||||
|
sudo make install-shared |
||||||
|
sudo ldconfig /usr/local/lib |
File diff suppressed because it is too large
Load Diff
@ -1,22 +1,272 @@ |
|||||||
[workspace] |
[workspace] |
||||||
members = [ |
members = [ |
||||||
"js", |
"js", |
||||||
"lib", |
"lib/oxigraph", |
||||||
"lib/oxrdf", |
"lib/oxrdf", |
||||||
|
"lib/oxrdfio", |
||||||
|
"lib/oxrdfxml", |
||||||
"lib/oxsdatatypes", |
"lib/oxsdatatypes", |
||||||
"lib/spargebra", |
"lib/oxttl", |
||||||
"lib/sparesults", |
"lib/sparesults", |
||||||
|
"lib/spargebra", |
||||||
|
"lib/sparopt", |
||||||
"lib/sparql-smith", |
"lib/sparql-smith", |
||||||
"oxrocksdb-sys", |
|
||||||
"python", |
|
||||||
"server", |
|
||||||
"testsuite" |
"testsuite" |
||||||
] |
] |
||||||
resolver = "2" |
resolver = "2" |
||||||
|
|
||||||
|
[workspace.package] |
||||||
|
version = "0.4.0-alpha.7-dev" |
||||||
|
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
||||||
|
license = "MIT OR Apache-2.0" |
||||||
|
edition = "2021" |
||||||
|
rust-version = "1.70" |
||||||
|
|
||||||
|
[workspace.dependencies] |
||||||
|
rocksdb = {git = "https://git.nextgraph.org/NextGraph/rust-rocksdb.git", branch = "master", features = [ ] } |
||||||
|
serde = { version = "1.0.142", features = ["derive"] } |
||||||
|
anyhow = "1.0.72" |
||||||
|
arbitrary = "1.3" |
||||||
|
assert_cmd = "2.0" |
||||||
|
assert_fs = "1.0" |
||||||
|
bindgen = ">=0.60, <0.70" |
||||||
|
cc = "1.0.73" |
||||||
|
clap = "4.0" |
||||||
|
codspeed-criterion-compat = "2.3.3" |
||||||
|
console_error_panic_hook = "0.1.7" |
||||||
|
digest = "0.10" |
||||||
|
flate2 = "1.0" |
||||||
|
getrandom = "0.2.8" |
||||||
|
hex = "0.4" |
||||||
|
js-sys = "0.3.60" |
||||||
|
json-event-parser = "0.2.0-alpha.2" |
||||||
|
md-5 = "0.10" |
||||||
|
memchr = "2.5" |
||||||
|
oxilangtag = "0.1" |
||||||
|
oxiri = "0.2.3" |
||||||
|
peg = "0.8" |
||||||
|
pkg-config = "0.3.25" |
||||||
|
predicates = ">=2.0, <4.0" |
||||||
|
pyo3 = "0.21.0" |
||||||
|
quick-xml = ">=0.29, <0.32" |
||||||
|
rand = "0.8" |
||||||
|
rayon-core = "1.11" |
||||||
|
regex = "1.7" |
||||||
|
sha1 = "0.10" |
||||||
|
sha2 = "0.10" |
||||||
|
siphasher = ">=0.3, <2.0" |
||||||
|
text-diff = "0.4" |
||||||
|
thiserror = "1.0.50" |
||||||
|
time = "0.3" |
||||||
|
tokio = "1.29" |
||||||
|
url = "2.4" |
||||||
|
wasm-bindgen = "0.2.83" |
||||||
|
zstd = ">=0.12, <0.14" |
||||||
|
|
||||||
|
# Internal dependencies |
||||||
|
oxigraph = { version = "=0.4.0-alpha.7-dev", path = "lib/oxigraph" } |
||||||
|
oxrdf = { version = "=0.2.0-alpha.4", path = "lib/oxrdf" } |
||||||
|
oxrdfio = { version = "=0.1.0-alpha.5", path = "lib/oxrdfio" } |
||||||
|
oxrdfxml = { version = "=0.1.0-alpha.5", path = "lib/oxrdfxml" } |
||||||
|
oxsdatatypes = { version = "=0.2.0-alpha.1", path = "lib/oxsdatatypes" } |
||||||
|
oxttl = { version = "=0.1.0-alpha.5", path = "lib/oxttl" } |
||||||
|
sparesults = { version = "=0.2.0-alpha.4", path = "lib/sparesults" } |
||||||
|
spargebra = { version = "=0.3.0-alpha.4", path = "lib/spargebra" } |
||||||
|
sparopt = { version = "=0.1.0-alpha.5-dev", path = "lib/sparopt" } |
||||||
|
|
||||||
|
[workspace.lints.rust] |
||||||
|
absolute_paths_not_starting_with_crate = "warn" |
||||||
|
elided_lifetimes_in_paths = "warn" |
||||||
|
explicit_outlives_requirements = "warn" |
||||||
|
let_underscore_drop = "warn" |
||||||
|
macro_use_extern_crate = "warn" |
||||||
|
# TODO missing_docs = "warn" |
||||||
|
trivial_casts = "warn" |
||||||
|
trivial_numeric_casts = "warn" |
||||||
|
unsafe_code = "warn" |
||||||
|
unused_import_braces = "warn" |
||||||
|
unused_lifetimes = "warn" |
||||||
|
unused_macro_rules = "warn" |
||||||
|
unused_qualifications = "warn" |
||||||
|
|
||||||
|
[workspace.lints.clippy] |
||||||
|
allow_attributes = "warn" |
||||||
|
allow_attributes_without_reason = "warn" |
||||||
|
as_underscore = "warn" |
||||||
|
assertions_on_result_states = "warn" |
||||||
|
bool_to_int_with_if = "warn" |
||||||
|
borrow_as_ptr = "warn" |
||||||
|
case_sensitive_file_extension_comparisons = "warn" |
||||||
|
cast_lossless = "warn" |
||||||
|
cast_possible_truncation = "warn" |
||||||
|
cast_possible_wrap = "warn" |
||||||
|
cast_precision_loss = "warn" |
||||||
|
cast_ptr_alignment = "warn" |
||||||
|
cast_sign_loss = "warn" |
||||||
|
checked_conversions = "warn" |
||||||
|
clone_on_ref_ptr = "warn" |
||||||
|
cloned_instead_of_copied = "warn" |
||||||
|
copy_iterator = "warn" |
||||||
|
create_dir = "warn" |
||||||
|
dbg_macro = "warn" |
||||||
|
decimal_literal_representation = "warn" |
||||||
|
default_trait_access = "warn" |
||||||
|
default_union_representation = "warn" |
||||||
|
deref_by_slicing = "warn" |
||||||
|
disallowed_script_idents = "warn" |
||||||
|
doc_link_with_quotes = "warn" |
||||||
|
empty_drop = "warn" |
||||||
|
empty_enum = "warn" |
||||||
|
empty_structs_with_brackets = "warn" |
||||||
|
enum_glob_use = "warn" |
||||||
|
error_impl_error = "warn" |
||||||
|
exit = "warn" |
||||||
|
expect_used = "warn" |
||||||
|
expl_impl_clone_on_copy = "warn" |
||||||
|
explicit_deref_methods = "warn" |
||||||
|
explicit_into_iter_loop = "warn" |
||||||
|
explicit_iter_loop = "warn" |
||||||
|
filetype_is_file = "warn" |
||||||
|
filter_map_next = "warn" |
||||||
|
flat_map_option = "warn" |
||||||
|
fn_params_excessive_bools = "warn" |
||||||
|
fn_to_numeric_cast_any = "warn" |
||||||
|
format_push_string = "warn" |
||||||
|
from_iter_instead_of_collect = "warn" |
||||||
|
get_unwrap = "warn" |
||||||
|
host_endian_bytes = "warn" |
||||||
|
if_not_else = "warn" |
||||||
|
if_then_some_else_none = "warn" |
||||||
|
ignored_unit_patterns = "warn" |
||||||
|
implicit_clone = "warn" |
||||||
|
implicit_hasher = "warn" |
||||||
|
inconsistent_struct_constructor = "warn" |
||||||
|
index_refutable_slice = "warn" |
||||||
|
inefficient_to_string = "warn" |
||||||
|
infinite_loop = "warn" |
||||||
|
inline_always = "warn" |
||||||
|
inline_asm_x86_att_syntax = "warn" |
||||||
|
inline_asm_x86_intel_syntax = "warn" |
||||||
|
into_iter_without_iter = "warn" |
||||||
|
invalid_upcast_comparisons = "warn" |
||||||
|
items_after_statements = "warn" |
||||||
|
iter_not_returning_iterator = "warn" |
||||||
|
iter_without_into_iter = "warn" |
||||||
|
large_digit_groups = "warn" |
||||||
|
large_futures = "warn" |
||||||
|
large_include_file = "warn" |
||||||
|
large_stack_arrays = "warn" |
||||||
|
large_types_passed_by_value = "warn" |
||||||
|
let_underscore_must_use = "warn" |
||||||
|
let_underscore_untyped = "warn" |
||||||
|
linkedlist = "warn" |
||||||
|
lossy_float_literal = "warn" |
||||||
|
macro_use_imports = "warn" |
||||||
|
manual_assert = "warn" |
||||||
|
manual_instant_elapsed = "warn" |
||||||
|
manual_let_else = "warn" |
||||||
|
manual_ok_or = "warn" |
||||||
|
manual_string_new = "warn" |
||||||
|
many_single_char_names = "warn" |
||||||
|
map_unwrap_or = "warn" |
||||||
|
match_bool = "warn" |
||||||
|
match_on_vec_items = "warn" |
||||||
|
match_same_arms = "warn" |
||||||
|
match_wild_err_arm = "warn" |
||||||
|
match_wildcard_for_single_variants = "warn" |
||||||
|
maybe_infinite_iter = "warn" |
||||||
|
mem_forget = "warn" |
||||||
|
mismatching_type_param_order = "warn" |
||||||
|
missing_assert_message = "warn" |
||||||
|
missing_asserts_for_indexing = "warn" |
||||||
|
missing_fields_in_debug = "warn" |
||||||
|
multiple_inherent_impl = "warn" |
||||||
|
mut_mut = "warn" |
||||||
|
mutex_atomic = "warn" |
||||||
|
naive_bytecount = "warn" |
||||||
|
needless_bitwise_bool = "warn" |
||||||
|
needless_continue = "warn" |
||||||
|
needless_for_each = "warn" |
||||||
|
needless_pass_by_value = "warn" |
||||||
|
needless_raw_string_hashes = "warn" |
||||||
|
needless_raw_strings = "warn" |
||||||
|
negative_feature_names = "warn" |
||||||
|
no_effect_underscore_binding = "warn" |
||||||
|
no_mangle_with_rust_abi = "warn" |
||||||
|
non_ascii_literal = "warn" |
||||||
|
panic = "warn" |
||||||
|
panic_in_result_fn = "warn" |
||||||
|
partial_pub_fields = "warn" |
||||||
|
print_stderr = "warn" |
||||||
|
print_stdout = "warn" |
||||||
|
ptr_as_ptr = "warn" |
||||||
|
ptr_cast_constness = "warn" |
||||||
|
pub_without_shorthand = "warn" |
||||||
|
range_minus_one = "warn" |
||||||
|
range_plus_one = "warn" |
||||||
|
rc_buffer = "warn" |
||||||
|
rc_mutex = "warn" |
||||||
|
redundant_closure_for_method_calls = "warn" |
||||||
|
redundant_else = "warn" |
||||||
|
redundant_feature_names = "warn" |
||||||
|
redundant_type_annotations = "warn" |
||||||
|
ref_binding_to_reference = "warn" |
||||||
|
ref_option_ref = "warn" |
||||||
|
ref_patterns = "warn" |
||||||
|
rest_pat_in_fully_bound_structs = "warn" |
||||||
|
return_self_not_must_use = "warn" |
||||||
|
same_functions_in_if_condition = "warn" |
||||||
|
same_name_method = "warn" |
||||||
|
semicolon_inside_block = "warn" |
||||||
|
shadow_same = "warn" |
||||||
|
should_panic_without_expect = "warn" |
||||||
|
single_match_else = "warn" |
||||||
|
stable_sort_primitive = "warn" |
||||||
|
str_to_string = "warn" |
||||||
|
string_add = "warn" |
||||||
|
string_add_assign = "warn" |
||||||
|
string_lit_chars_any = "warn" |
||||||
|
string_to_string = "warn" |
||||||
|
struct_excessive_bools = "warn" |
||||||
|
struct_field_names = "warn" |
||||||
|
suspicious_xor_used_as_pow = "warn" |
||||||
|
tests_outside_test_module = "warn" |
||||||
|
todo = "warn" |
||||||
|
transmute_ptr_to_ptr = "warn" |
||||||
|
trivially_copy_pass_by_ref = "warn" |
||||||
|
try_err = "warn" |
||||||
|
unchecked_duration_subtraction = "warn" |
||||||
|
undocumented_unsafe_blocks = "warn" |
||||||
|
unicode_not_nfc = "warn" |
||||||
|
unimplemented = "warn" |
||||||
|
uninlined_format_args = "warn" |
||||||
|
unnecessary_box_returns = "warn" |
||||||
|
unnecessary_join = "warn" |
||||||
|
unnecessary_safety_comment = "warn" |
||||||
|
unnecessary_safety_doc = "warn" |
||||||
|
unnecessary_self_imports = "warn" |
||||||
|
unnecessary_wraps = "warn" |
||||||
|
unneeded_field_pattern = "warn" |
||||||
|
unnested_or_patterns = "warn" |
||||||
|
unreadable_literal = "warn" |
||||||
|
unsafe_derive_deserialize = "warn" |
||||||
|
unseparated_literal_suffix = "warn" |
||||||
|
unused_async = "warn" |
||||||
|
unused_self = "warn" |
||||||
|
unwrap_in_result = "warn" |
||||||
|
use_debug = "warn" |
||||||
|
used_underscore_binding = "warn" |
||||||
|
verbose_bit_mask = "warn" |
||||||
|
verbose_file_reads = "warn" |
||||||
|
wildcard_dependencies = "warn" |
||||||
|
zero_sized_map_values = "warn" |
||||||
|
|
||||||
[profile.release] |
[profile.release] |
||||||
lto = true |
lto = true |
||||||
codegen-units = 1 |
codegen-units = 1 |
||||||
|
strip = "debuginfo" |
||||||
|
|
||||||
[profile.release.package.oxigraph_js] |
[profile.release.package.oxigraph-js] |
||||||
|
codegen-units = 1 |
||||||
opt-level = "z" |
opt-level = "z" |
||||||
|
strip = "debuginfo" |
||||||
|
@ -1,4 +1,4 @@ |
|||||||
avoid-breaking-exported-api = true |
avoid-breaking-exported-api = false |
||||||
cognitive-complexity-threshold = 50 |
cognitive-complexity-threshold = 50 |
||||||
too-many-arguments-threshold = 10 |
too-many-arguments-threshold = 10 |
||||||
type-complexity-threshold = 500 |
type-complexity-threshold = 500 |
After Width: | Height: | Size: 4.6 KiB |
@ -0,0 +1,35 @@ |
|||||||
|
+------------------+ +----------------+ +-----------------+ |
||||||
|
+ oxigraph CLI {r} + + pyoxigraph {p} + + oxigraph JS {j} + |
||||||
|
+------------------+ +----------------+ +-----------------+ |
||||||
|
|
||||||
|
+---------------------------------------------------------------------------+ |
||||||
|
+ oxigraph (Rust) {r} + |
||||||
|
+---------------------------------------------------------------------------+ |
||||||
|
|
||||||
|
+----------------------------+ +-------------+ |
||||||
|
+ oxrdfio {r} + + sparopt {r} + |
||||||
|
+----------------------------+ +-------------+ |
||||||
|
|
||||||
|
+-----------+ +--------------+ +-----------------+ +----------------+ |
||||||
|
+ oxttl {r} + + oxrdfxml {r} + + spargebra {r} + + sparesults {r} + |
||||||
|
+-----------+ +--------------+ +-----------------+ +----------------+ |
||||||
|
|
||||||
|
+-----------------------------------------------------------------------+ |
||||||
|
+ oxrdf {r} + |
||||||
|
+-----------------------------------------------------------------------+ |
||||||
|
|
||||||
|
+------------------+ |
||||||
|
+ oxsdatatypes {r} + |
||||||
|
+------------------+ |
||||||
|
|
||||||
|
|
||||||
|
# Legend: |
||||||
|
r = { |
||||||
|
fill: papayawhip; |
||||||
|
} |
||||||
|
p = { |
||||||
|
fill: lightyellow; |
||||||
|
} |
||||||
|
j = { |
||||||
|
fill: lightgreen; |
||||||
|
} |
@ -0,0 +1,28 @@ |
|||||||
|
#![no_main] |
||||||
|
|
||||||
|
use libfuzzer_sys::fuzz_target; |
||||||
|
use oxttl::N3Parser; |
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| { |
||||||
|
let mut quads = Vec::new(); |
||||||
|
let mut parser = N3Parser::new() |
||||||
|
.with_base_iri("http://example.com/") |
||||||
|
.unwrap() |
||||||
|
.parse(); |
||||||
|
for chunk in data.split(|c| *c == 0xFF) { |
||||||
|
parser.extend_from_slice(chunk); |
||||||
|
while let Some(result) = parser.read_next() { |
||||||
|
if let Ok(quad) = result { |
||||||
|
quads.push(quad); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
parser.end(); |
||||||
|
while let Some(result) = parser.read_next() { |
||||||
|
if let Ok(quad) = result { |
||||||
|
quads.push(quad); |
||||||
|
} |
||||||
|
} |
||||||
|
assert!(parser.is_end()); |
||||||
|
//TODO: serialize
|
||||||
|
}); |
@ -0,0 +1,84 @@ |
|||||||
|
#![no_main] |
||||||
|
|
||||||
|
use libfuzzer_sys::fuzz_target; |
||||||
|
use oxrdf::Quad; |
||||||
|
use oxttl::{NQuadsParser, NQuadsSerializer}; |
||||||
|
|
||||||
|
fn parse<'a>( |
||||||
|
chunks: impl IntoIterator<Item = &'a [u8]>, |
||||||
|
unchecked: bool, |
||||||
|
) -> (Vec<Quad>, Vec<String>) { |
||||||
|
let mut quads = Vec::new(); |
||||||
|
let mut errors = Vec::new(); |
||||||
|
let mut parser = NQuadsParser::new().with_quoted_triples(); |
||||||
|
if unchecked { |
||||||
|
parser = parser.unchecked(); |
||||||
|
} |
||||||
|
let mut reader = parser.parse(); |
||||||
|
for chunk in chunks { |
||||||
|
reader.extend_from_slice(chunk); |
||||||
|
while let Some(result) = reader.read_next() { |
||||||
|
match result { |
||||||
|
Ok(quad) => quads.push(quad), |
||||||
|
Err(error) => errors.push(error.to_string()), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
reader.end(); |
||||||
|
while let Some(result) = reader.read_next() { |
||||||
|
match result { |
||||||
|
Ok(quad) => quads.push(quad), |
||||||
|
Err(error) => errors.push(error.to_string()), |
||||||
|
} |
||||||
|
} |
||||||
|
assert!(reader.is_end()); |
||||||
|
(quads, errors) |
||||||
|
} |
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| { |
||||||
|
// We parse with splitting
|
||||||
|
let (quads, errors) = parse(data.split(|c| *c == 0xFF), false); |
||||||
|
// We parse without splitting
|
||||||
|
let (quads_without_split, errors_without_split) = parse( |
||||||
|
[data |
||||||
|
.iter() |
||||||
|
.copied() |
||||||
|
.filter(|c| *c != 0xFF) |
||||||
|
.collect::<Vec<_>>() |
||||||
|
.as_slice()], |
||||||
|
false, |
||||||
|
); |
||||||
|
assert_eq!(quads, quads_without_split); |
||||||
|
assert_eq!(errors, errors_without_split); |
||||||
|
|
||||||
|
// We test also unchecked if valid
|
||||||
|
if errors.is_empty() { |
||||||
|
let (quads_unchecked, errors_unchecked) = parse(data.split(|c| *c == 0xFF), true); |
||||||
|
assert!(errors_unchecked.is_empty()); |
||||||
|
assert_eq!(quads, quads_unchecked); |
||||||
|
} |
||||||
|
|
||||||
|
// We serialize
|
||||||
|
let mut writer = NQuadsSerializer::new().serialize_to_write(Vec::new()); |
||||||
|
for quad in &quads { |
||||||
|
writer.write_quad(quad).unwrap(); |
||||||
|
} |
||||||
|
let new_serialization = writer.finish(); |
||||||
|
|
||||||
|
// We parse the serialization
|
||||||
|
let new_quads = NQuadsParser::new() |
||||||
|
.with_quoted_triples() |
||||||
|
.parse_read(new_serialization.as_slice()) |
||||||
|
.collect::<Result<Vec<_>, _>>() |
||||||
|
.map_err(|e| { |
||||||
|
format!( |
||||||
|
"Error on {:?} from {quads:?} based on {:?}: {e}", |
||||||
|
String::from_utf8_lossy(&new_serialization), |
||||||
|
String::from_utf8_lossy(data) |
||||||
|
) |
||||||
|
}) |
||||||
|
.unwrap(); |
||||||
|
|
||||||
|
// We check the roundtrip has not changed anything
|
||||||
|
assert_eq!(new_quads, quads); |
||||||
|
}); |
@ -0,0 +1,35 @@ |
|||||||
|
#![no_main] |
||||||
|
|
||||||
|
use libfuzzer_sys::fuzz_target; |
||||||
|
use oxrdfxml::{RdfXmlParser, RdfXmlSerializer}; |
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| { |
||||||
|
// We parse
|
||||||
|
let triples = RdfXmlParser::new() |
||||||
|
.parse_read(data) |
||||||
|
.flatten() |
||||||
|
.collect::<Vec<_>>(); |
||||||
|
|
||||||
|
// We serialize
|
||||||
|
let mut writer = RdfXmlSerializer::new().serialize_to_write(Vec::new()); |
||||||
|
for triple in &triples { |
||||||
|
writer.write_triple(triple).unwrap(); |
||||||
|
} |
||||||
|
let new_serialization = writer.finish().unwrap(); |
||||||
|
|
||||||
|
// We parse the serialization
|
||||||
|
let new_triples = RdfXmlParser::new() |
||||||
|
.parse_read(new_serialization.as_slice()) |
||||||
|
.collect::<Result<Vec<_>, _>>() |
||||||
|
.map_err(|e| { |
||||||
|
format!( |
||||||
|
"Error on {:?} from {triples:?} based on {:?}: {e}", |
||||||
|
String::from_utf8_lossy(&new_serialization), |
||||||
|
String::from_utf8_lossy(data) |
||||||
|
) |
||||||
|
}) |
||||||
|
.unwrap(); |
||||||
|
|
||||||
|
// We check the roundtrip has not changed anything
|
||||||
|
assert_eq!(new_triples, triples); |
||||||
|
}); |
@ -0,0 +1,166 @@ |
|||||||
|
#![no_main] |
||||||
|
|
||||||
|
use libfuzzer_sys::fuzz_target; |
||||||
|
use oxrdf::graph::CanonicalizationAlgorithm; |
||||||
|
use oxrdf::{Dataset, GraphName, Quad, Subject, Term, Triple}; |
||||||
|
use oxttl::{TriGParser, TriGSerializer}; |
||||||
|
|
||||||
|
fn parse<'a>( |
||||||
|
chunks: impl IntoIterator<Item = &'a [u8]>, |
||||||
|
unchecked: bool, |
||||||
|
) -> (Vec<Quad>, Vec<String>, Vec<(String, String)>) { |
||||||
|
let mut quads = Vec::new(); |
||||||
|
let mut errors = Vec::new(); |
||||||
|
let mut parser = TriGParser::new() |
||||||
|
.with_quoted_triples() |
||||||
|
.with_base_iri("http://example.com/") |
||||||
|
.unwrap(); |
||||||
|
if unchecked { |
||||||
|
parser = parser.unchecked(); |
||||||
|
} |
||||||
|
let mut reader = parser.parse(); |
||||||
|
for chunk in chunks { |
||||||
|
reader.extend_from_slice(chunk); |
||||||
|
while let Some(result) = reader.read_next() { |
||||||
|
match result { |
||||||
|
Ok(quad) => quads.push(quad), |
||||||
|
Err(error) => errors.push(error.to_string()), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
reader.end(); |
||||||
|
while let Some(result) = reader.read_next() { |
||||||
|
match result { |
||||||
|
Ok(quad) => quads.push(quad), |
||||||
|
Err(error) => errors.push(error.to_string()), |
||||||
|
} |
||||||
|
} |
||||||
|
assert!(reader.is_end()); |
||||||
|
( |
||||||
|
quads, |
||||||
|
errors, |
||||||
|
reader |
||||||
|
.prefixes() |
||||||
|
.map(|(k, v)| (k.to_owned(), v.to_owned())) |
||||||
|
.collect(), |
||||||
|
) |
||||||
|
} |
||||||
|
|
||||||
|
fn count_triple_blank_nodes(triple: &Triple) -> usize { |
||||||
|
(match &triple.subject { |
||||||
|
Subject::BlankNode(_) => 1, |
||||||
|
Subject::Triple(t) => count_triple_blank_nodes(t), |
||||||
|
_ => 0, |
||||||
|
}) + (match &triple.object { |
||||||
|
Term::BlankNode(_) => 1, |
||||||
|
Term::Triple(t) => count_triple_blank_nodes(t), |
||||||
|
_ => 0, |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
fn count_quad_blank_nodes(quad: &Quad) -> usize { |
||||||
|
(match &quad.subject { |
||||||
|
Subject::BlankNode(_) => 1, |
||||||
|
Subject::Triple(t) => count_triple_blank_nodes(t), |
||||||
|
_ => 0, |
||||||
|
}) + (match &quad.object { |
||||||
|
Term::BlankNode(_) => 1, |
||||||
|
Term::Triple(t) => count_triple_blank_nodes(t), |
||||||
|
_ => 0, |
||||||
|
}) + usize::from(matches!(quad.graph_name, GraphName::BlankNode(_))) |
||||||
|
} |
||||||
|
|
||||||
|
fn serialize_quads(quads: &[Quad], prefixes: Vec<(String, String)>) -> Vec<u8> { |
||||||
|
let mut serializer = TriGSerializer::new(); |
||||||
|
for (prefix_name, prefix_iri) in prefixes { |
||||||
|
serializer = serializer.with_prefix(prefix_name, prefix_iri).unwrap(); |
||||||
|
} |
||||||
|
let mut writer = serializer.serialize_to_write(Vec::new()); |
||||||
|
for quad in quads { |
||||||
|
writer.write_quad(quad).unwrap(); |
||||||
|
} |
||||||
|
writer.finish().unwrap() |
||||||
|
} |
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| { |
||||||
|
// We parse with splitting
|
||||||
|
let (quads, errors, prefixes) = parse(data.split(|c| *c == 0xFF), false); |
||||||
|
// We parse without splitting
|
||||||
|
let (quads_without_split, errors_without_split, _) = parse( |
||||||
|
[data |
||||||
|
.iter() |
||||||
|
.copied() |
||||||
|
.filter(|c| *c != 0xFF) |
||||||
|
.collect::<Vec<_>>() |
||||||
|
.as_slice()], |
||||||
|
false, |
||||||
|
); |
||||||
|
let (quads_unchecked, errors_unchecked, _) = parse(data.split(|c| *c == 0xFF), true); |
||||||
|
if errors.is_empty() { |
||||||
|
assert!(errors_unchecked.is_empty()); |
||||||
|
} |
||||||
|
|
||||||
|
let bnodes_count = quads.iter().map(count_quad_blank_nodes).sum::<usize>(); |
||||||
|
if bnodes_count == 0 { |
||||||
|
assert_eq!( |
||||||
|
quads, |
||||||
|
quads_without_split, |
||||||
|
"With split:\n{}\nWithout split:\n{}", |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads_without_split, Vec::new())) |
||||||
|
); |
||||||
|
if errors.is_empty() { |
||||||
|
assert_eq!( |
||||||
|
quads, |
||||||
|
quads_unchecked, |
||||||
|
"Validating:\n{}\nUnchecked:\n{}", |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads_unchecked, Vec::new())) |
||||||
|
); |
||||||
|
} |
||||||
|
} else if bnodes_count <= 4 { |
||||||
|
let mut dataset_with_split = quads.iter().collect::<Dataset>(); |
||||||
|
let mut dataset_without_split = quads_without_split.iter().collect::<Dataset>(); |
||||||
|
dataset_with_split.canonicalize(CanonicalizationAlgorithm::Unstable); |
||||||
|
dataset_without_split.canonicalize(CanonicalizationAlgorithm::Unstable); |
||||||
|
assert_eq!( |
||||||
|
dataset_with_split, |
||||||
|
dataset_without_split, |
||||||
|
"With split:\n{}\nWithout split:\n{}", |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads_without_split, Vec::new())) |
||||||
|
); |
||||||
|
if errors.is_empty() { |
||||||
|
let mut dataset_unchecked = quads_unchecked.iter().collect::<Dataset>(); |
||||||
|
dataset_unchecked.canonicalize(CanonicalizationAlgorithm::Unstable); |
||||||
|
assert_eq!( |
||||||
|
dataset_with_split, |
||||||
|
dataset_unchecked, |
||||||
|
"Validating:\n{}\nUnchecked:\n{}", |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads_unchecked, Vec::new())) |
||||||
|
); |
||||||
|
} |
||||||
|
} |
||||||
|
assert_eq!(errors, errors_without_split); |
||||||
|
|
||||||
|
// We serialize
|
||||||
|
let new_serialization = serialize_quads(&quads, prefixes); |
||||||
|
|
||||||
|
// We parse the serialization
|
||||||
|
let new_quads = TriGParser::new() |
||||||
|
.with_quoted_triples() |
||||||
|
.parse_read(new_serialization.as_slice()) |
||||||
|
.collect::<Result<Vec<_>, _>>() |
||||||
|
.map_err(|e| { |
||||||
|
format!( |
||||||
|
"Error on {:?} from {quads:?} based on {:?}: {e}", |
||||||
|
String::from_utf8_lossy(&new_serialization), |
||||||
|
String::from_utf8_lossy(data) |
||||||
|
) |
||||||
|
}) |
||||||
|
.unwrap(); |
||||||
|
|
||||||
|
// We check the roundtrip has not changed anything
|
||||||
|
assert_eq!(new_quads, quads); |
||||||
|
}); |
@ -1,20 +1,26 @@ |
|||||||
[package] |
[package] |
||||||
name = "oxigraph_js" |
name = "oxigraph-js" |
||||||
version = "0.3.22" |
version.workspace = true |
||||||
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
authors.workspace = true |
||||||
license = "MIT OR Apache-2.0" |
license.workspace = true |
||||||
readme = "README.md" |
readme = "README.md" |
||||||
keywords = ["RDF", "N-Triples", "Turtle", "RDF/XML", "SPARQL"] |
keywords = ["RDF", "N-Triples", "Turtle", "XML", "SPARQL"] |
||||||
repository = "https://github.com/oxigraph/oxigraph/tree/main/js" |
repository = "https://github.com/oxigraph/oxigraph/tree/main/js" |
||||||
description = "JavaScript bindings of Oxigraph" |
description = "JavaScript bindings of Oxigraph" |
||||||
edition = "2021" |
edition.workspace = true |
||||||
|
rust-version.workspace = true |
||||||
|
publish = false |
||||||
|
|
||||||
[lib] |
[lib] |
||||||
crate-type = ["cdylib"] |
crate-type = ["cdylib"] |
||||||
name = "oxigraph" |
name = "oxigraph" |
||||||
|
doc = false |
||||||
|
|
||||||
[dependencies] |
[dependencies] |
||||||
oxigraph = { version = "0.3.22", path="../lib" } |
console_error_panic_hook.workspace = true |
||||||
wasm-bindgen = "0.2" |
js-sys.workspace = true |
||||||
js-sys = "0.3" |
oxigraph = { workspace = true, features = ["js"] } |
||||||
console_error_panic_hook = "0.1" |
wasm-bindgen.workspace = true |
||||||
|
|
||||||
|
[lints] |
||||||
|
workspace = true |
||||||
|
@ -1,7 +1,8 @@ |
|||||||
{ |
{ |
||||||
|
"$schema": "https://biomejs.dev/schemas/1.0.0/schema.json", |
||||||
"formatter": { |
"formatter": { |
||||||
"indentStyle": "space", |
"indentStyle": "space", |
||||||
"indentSize": 4, |
"indentWidth": 4, |
||||||
"lineWidth": 100 |
"lineWidth": 100 |
||||||
}, |
}, |
||||||
"linter": { |
"linter": { |
File diff suppressed because it is too large
Load Diff
@ -1,63 +0,0 @@ |
|||||||
[package] |
|
||||||
name = "oxigraph" |
|
||||||
version = "0.3.22" |
|
||||||
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
|
||||||
license = "MIT OR Apache-2.0" |
|
||||||
readme = "README.md" |
|
||||||
keywords = ["RDF", "SPARQL", "graph-database", "database"] |
|
||||||
categories = ["database-implementations"] |
|
||||||
repository = "https://github.com/oxigraph/oxigraph/tree/main/lib" |
|
||||||
homepage = "https://oxigraph.org/" |
|
||||||
documentation = "https://docs.rs/oxigraph" |
|
||||||
description = """ |
|
||||||
a SPARQL database and RDF toolkit |
|
||||||
""" |
|
||||||
edition = "2021" |
|
||||||
rust-version = "1.60" |
|
||||||
|
|
||||||
[package.metadata.docs.rs] |
|
||||||
all-features = true |
|
||||||
|
|
||||||
[features] |
|
||||||
default = [] |
|
||||||
http_client = ["oxhttp", "oxhttp/rustls"] |
|
||||||
rocksdb_debug = [] |
|
||||||
|
|
||||||
[dependencies] |
|
||||||
rand = "0.8" |
|
||||||
md-5 = "0.10" |
|
||||||
sha-1 = "0.10" |
|
||||||
sha2 = "0.10" |
|
||||||
digest = "0.10" |
|
||||||
regex = "1" |
|
||||||
oxilangtag = "0.1" |
|
||||||
oxiri = "0.2" |
|
||||||
rio_api = "0.8" |
|
||||||
rio_turtle = "0.8" |
|
||||||
rio_xml = "0.8" |
|
||||||
hex = "0.4" |
|
||||||
siphasher = ">=0.3,<2.0" |
|
||||||
lazy_static = "1" |
|
||||||
json-event-parser = "0.1" |
|
||||||
oxrdf = { version = "0.1.7", path="oxrdf", features = ["rdf-star", "oxsdatatypes"] } |
|
||||||
oxsdatatypes = { version = "0.1.3", path="oxsdatatypes" } |
|
||||||
spargebra = { version = "0.2.8", path="spargebra", features = ["rdf-star", "sep-0002", "sep-0006"] } |
|
||||||
sparesults = { version = "0.1.8", path="sparesults", features = ["rdf-star"] } |
|
||||||
|
|
||||||
[target.'cfg(not(target_family = "wasm"))'.dependencies] |
|
||||||
libc = "0.2" |
|
||||||
oxrocksdb-sys = { version = "0.3.22", path="../oxrocksdb-sys" } |
|
||||||
oxhttp = { version = "0.1", optional = true } |
|
||||||
|
|
||||||
[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies] |
|
||||||
getrandom = { version = "0.2", features = ["js"] } |
|
||||||
js-sys = "0.3" |
|
||||||
|
|
||||||
[target.'cfg(not(target_family = "wasm"))'.dev-dependencies] |
|
||||||
criterion = "0.4" |
|
||||||
oxhttp = "0.1" |
|
||||||
zstd = "0.12" |
|
||||||
|
|
||||||
[[bench]] |
|
||||||
name = "store" |
|
||||||
harness = false |
|
@ -1,72 +1,13 @@ |
|||||||
Oxigraph |
Oxigraph Rust crates |
||||||
======== |
==================== |
||||||
|
|
||||||
[![Latest Version](https://img.shields.io/crates/v/oxigraph.svg)](https://crates.io/crates/oxigraph) |
Oxigraph is implemented in Rust. |
||||||
[![Released API docs](https://docs.rs/oxigraph/badge.svg)](https://docs.rs/oxigraph) |
It is composed on a main library, [`oxigraph`](./oxigraph) and a set of smaller crates used by the `oxigraph` crate: |
||||||
[![Crates.io downloads](https://img.shields.io/crates/d/oxigraph)](https://crates.io/crates/oxigraph) |
* [`oxrdf`](./oxrdf), datastructures encoding RDF basic concepts (the `model` module of the `oxigraph` crate). |
||||||
[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) |
* [`oxrdfio`](./oxrdfio), a unified parser and serializer API for RDF formats (the `io` module of the `oxigraph` crate). It itself relies on: |
||||||
[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) |
* [`oxttl`](./oxttl), N-Triple, N-Quad, Turtle, TriG and N3 parsing and serialization. |
||||||
|
* [`oxrdfxml`](./oxrdfxml), RDF/XML parsing and serialization. |
||||||
Oxigraph is a graph database library implementing the [SPARQL](https://www.w3.org/TR/sparql11-overview/) standard. |
* [`spargebra`](./spargebra), a SPARQL parser. |
||||||
|
* [`sparesults`](./sparesults), parsers and serializers for SPARQL result formats (the `sparql::results` module of the `oxigraph` crate). |
||||||
Its goal is to provide a compliant, safe and fast on-disk graph database. |
* [`sparopt`](./sparesults), a SPARQL optimizer. |
||||||
It also provides a set of utility functions for reading, writing, and processing RDF files. |
* [`oxsdatatypes`](./oxsdatatypes), an implementation of some XML Schema datatypes. |
||||||
|
|
||||||
Oxigraph is in heavy development and SPARQL query evaluation has not been optimized yet. |
|
||||||
|
|
||||||
Oxigraph also provides [a standalone HTTP server](https://crates.io/crates/oxigraph_server) and [a Python library](https://pyoxigraph.readthedocs.io/) based on this library. |
|
||||||
|
|
||||||
|
|
||||||
Oxigraph implements the following specifications: |
|
||||||
* [SPARQL 1.1 Query](https://www.w3.org/TR/sparql11-query/), [SPARQL 1.1 Update](https://www.w3.org/TR/sparql11-update/), and [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/). |
|
||||||
* [Turtle](https://www.w3.org/TR/turtle/), [TriG](https://www.w3.org/TR/trig/), [N-Triples](https://www.w3.org/TR/n-triples/), [N-Quads](https://www.w3.org/TR/n-quads/), and [RDF XML](https://www.w3.org/TR/rdf-syntax-grammar/) RDF serialization formats for both data ingestion and retrieval using the [Rio library](https://github.com/oxigraph/rio). |
|
||||||
* [SPARQL Query Results XML Format](https://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/). |
|
||||||
|
|
||||||
A preliminary benchmark [is provided](../bench/README.md). Oxigraph internal design [is described on the wiki](https://github.com/oxigraph/oxigraph/wiki/Architecture). |
|
||||||
|
|
||||||
The main entry point of Oxigraph is the [`Store`](store::Store) struct: |
|
||||||
```rust |
|
||||||
use oxigraph::store::Store; |
|
||||||
use oxigraph::model::*; |
|
||||||
use oxigraph::sparql::QueryResults; |
|
||||||
|
|
||||||
let store = Store::new().unwrap(); |
|
||||||
|
|
||||||
// insertion |
|
||||||
let ex = NamedNode::new("http://example.com").unwrap(); |
|
||||||
let quad = Quad::new(ex.clone(), ex.clone(), ex.clone(), GraphName::DefaultGraph); |
|
||||||
store.insert(&quad).unwrap(); |
|
||||||
|
|
||||||
// quad filter |
|
||||||
let results = store.quads_for_pattern(Some(ex.as_ref().into()), None, None, None).collect::<Result<Vec<Quad>,_>>().unwrap(); |
|
||||||
assert_eq!(vec![quad], results); |
|
||||||
|
|
||||||
// SPARQL query |
|
||||||
if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }").unwrap() { |
|
||||||
assert_eq!(solutions.next().unwrap().unwrap().get("s"), Some(&ex.into())); |
|
||||||
} |
|
||||||
``` |
|
||||||
|
|
||||||
Some parts of this library are available as standalone crates: |
|
||||||
* [`oxrdf`](https://crates.io/crates/oxrdf) provides datastructures encoding RDF basic concepts (the `oxigraph::model` module). |
|
||||||
* [`spargebra`](https://crates.io/crates/spargebra) provides a SPARQL parser. |
|
||||||
* [`sparesults`](https://crates.io/crates/sparesults) provides parsers and serializers for SPARQL result formats. |
|
||||||
|
|
||||||
To build the library, don't forget to clone the submodules using `git clone --recursive https://github.com/oxigraph/oxigraph.git` to clone the repository including submodules or `git submodule update --init` to add submodules to the already cloned repository. |
|
||||||
|
|
||||||
|
|
||||||
## License |
|
||||||
|
|
||||||
This project is licensed under either of |
|
||||||
|
|
||||||
* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or |
|
||||||
`<http://www.apache.org/licenses/LICENSE-2.0>`) |
|
||||||
* MIT license ([LICENSE-MIT](../LICENSE-MIT) or |
|
||||||
`<http://opensource.org/licenses/MIT>`) |
|
||||||
|
|
||||||
at your option. |
|
||||||
|
|
||||||
|
|
||||||
### Contribution |
|
||||||
|
|
||||||
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. |
|
||||||
|
@ -1,265 +0,0 @@ |
|||||||
use criterion::{criterion_group, criterion_main, Criterion, Throughput}; |
|
||||||
use oxhttp::model::{Method, Request, Status}; |
|
||||||
use oxigraph::io::GraphFormat; |
|
||||||
use oxigraph::model::GraphNameRef; |
|
||||||
use oxigraph::sparql::{Query, QueryResults, Update}; |
|
||||||
use oxigraph::store::Store; |
|
||||||
use rand::random; |
|
||||||
use std::env::temp_dir; |
|
||||||
use std::fs::{remove_dir_all, File}; |
|
||||||
use std::io::{BufRead, BufReader, Cursor, Read}; |
|
||||||
use std::path::{Path, PathBuf}; |
|
||||||
|
|
||||||
fn store_load(c: &mut Criterion) { |
|
||||||
{ |
|
||||||
let mut data = Vec::new(); |
|
||||||
read_data("explore-1000.nt.zst") |
|
||||||
.read_to_end(&mut data) |
|
||||||
.unwrap(); |
|
||||||
|
|
||||||
let mut group = c.benchmark_group("store load"); |
|
||||||
group.throughput(Throughput::Bytes(data.len() as u64)); |
|
||||||
group.sample_size(10); |
|
||||||
group.bench_function("load BSBM explore 1000 in memory", |b| { |
|
||||||
b.iter(|| { |
|
||||||
let store = Store::new().unwrap(); |
|
||||||
do_load(&store, &data); |
|
||||||
}) |
|
||||||
}); |
|
||||||
group.bench_function("load BSBM explore 1000 in on disk", |b| { |
|
||||||
b.iter(|| { |
|
||||||
let path = TempDir::default(); |
|
||||||
let store = Store::open(&path).unwrap(); |
|
||||||
do_load(&store, &data); |
|
||||||
}) |
|
||||||
}); |
|
||||||
group.bench_function("load BSBM explore 1000 in on disk with bulk load", |b| { |
|
||||||
b.iter(|| { |
|
||||||
let path = TempDir::default(); |
|
||||||
let store = Store::open(&path).unwrap(); |
|
||||||
do_bulk_load(&store, &data); |
|
||||||
}) |
|
||||||
}); |
|
||||||
} |
|
||||||
|
|
||||||
{ |
|
||||||
let mut data = Vec::new(); |
|
||||||
read_data("explore-10000.nt.zst") |
|
||||||
.read_to_end(&mut data) |
|
||||||
.unwrap(); |
|
||||||
|
|
||||||
let mut group = c.benchmark_group("store load large"); |
|
||||||
group.throughput(Throughput::Bytes(data.len() as u64)); |
|
||||||
group.sample_size(10); |
|
||||||
group.bench_function("load BSBM explore 10000 in on disk with bulk load", |b| { |
|
||||||
b.iter(|| { |
|
||||||
let path = TempDir::default(); |
|
||||||
let store = Store::open(&path).unwrap(); |
|
||||||
do_bulk_load(&store, &data); |
|
||||||
}) |
|
||||||
}); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
fn do_load(store: &Store, data: &[u8]) { |
|
||||||
store |
|
||||||
.load_graph( |
|
||||||
Cursor::new(&data), |
|
||||||
GraphFormat::NTriples, |
|
||||||
GraphNameRef::DefaultGraph, |
|
||||||
None, |
|
||||||
) |
|
||||||
.unwrap(); |
|
||||||
store.optimize().unwrap(); |
|
||||||
} |
|
||||||
|
|
||||||
fn do_bulk_load(store: &Store, data: &[u8]) { |
|
||||||
store |
|
||||||
.bulk_loader() |
|
||||||
.load_graph( |
|
||||||
Cursor::new(&data), |
|
||||||
GraphFormat::NTriples, |
|
||||||
GraphNameRef::DefaultGraph, |
|
||||||
None, |
|
||||||
) |
|
||||||
.unwrap(); |
|
||||||
store.optimize().unwrap(); |
|
||||||
} |
|
||||||
|
|
||||||
fn store_query_and_update(c: &mut Criterion) { |
|
||||||
let mut data = Vec::new(); |
|
||||||
read_data("explore-1000.nt.zst") |
|
||||||
.read_to_end(&mut data) |
|
||||||
.unwrap(); |
|
||||||
|
|
||||||
let operations = read_data("mix-exploreAndUpdate-1000.tsv.zst") |
|
||||||
.lines() |
|
||||||
.map(|l| { |
|
||||||
let l = l.unwrap(); |
|
||||||
let mut parts = l.trim().split('\t'); |
|
||||||
let kind = parts.next().unwrap(); |
|
||||||
let operation = parts.next().unwrap(); |
|
||||||
match kind { |
|
||||||
"query" => Operation::Query(Query::parse(operation, None).unwrap()), |
|
||||||
"update" => Operation::Update(Update::parse(operation, None).unwrap()), |
|
||||||
_ => panic!("Unexpected operation kind {kind}"), |
|
||||||
} |
|
||||||
}) |
|
||||||
.collect::<Vec<_>>(); |
|
||||||
let query_operations = operations |
|
||||||
.iter() |
|
||||||
.filter(|o| matches!(o, Operation::Query(_))) |
|
||||||
.cloned() |
|
||||||
.collect::<Vec<_>>(); |
|
||||||
|
|
||||||
let mut group = c.benchmark_group("store operations"); |
|
||||||
group.throughput(Throughput::Elements(operations.len() as u64)); |
|
||||||
group.sample_size(10); |
|
||||||
|
|
||||||
{ |
|
||||||
let memory_store = Store::new().unwrap(); |
|
||||||
do_bulk_load(&memory_store, &data); |
|
||||||
group.bench_function("BSBM explore 1000 query in memory", |b| { |
|
||||||
b.iter(|| run_operation(&memory_store, &query_operations)) |
|
||||||
}); |
|
||||||
group.bench_function("BSBM explore 1000 queryAndUpdate in memory", |b| { |
|
||||||
b.iter(|| run_operation(&memory_store, &operations)) |
|
||||||
}); |
|
||||||
} |
|
||||||
|
|
||||||
{ |
|
||||||
let path = TempDir::default(); |
|
||||||
let disk_store = Store::open(&path).unwrap(); |
|
||||||
do_bulk_load(&disk_store, &data); |
|
||||||
group.bench_function("BSBM explore 1000 query on disk", |b| { |
|
||||||
b.iter(|| run_operation(&disk_store, &query_operations)) |
|
||||||
}); |
|
||||||
group.bench_function("BSBM explore 1000 queryAndUpdate on disk", |b| { |
|
||||||
b.iter(|| run_operation(&disk_store, &operations)) |
|
||||||
}); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
fn run_operation(store: &Store, operations: &[Operation]) { |
|
||||||
for operation in operations { |
|
||||||
match operation { |
|
||||||
Operation::Query(q) => match store.query(q.clone()).unwrap() { |
|
||||||
QueryResults::Boolean(_) => (), |
|
||||||
QueryResults::Solutions(s) => { |
|
||||||
for s in s { |
|
||||||
s.unwrap(); |
|
||||||
} |
|
||||||
} |
|
||||||
QueryResults::Graph(g) => { |
|
||||||
for t in g { |
|
||||||
t.unwrap(); |
|
||||||
} |
|
||||||
} |
|
||||||
}, |
|
||||||
Operation::Update(u) => store.update(u.clone()).unwrap(), |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
fn sparql_parsing(c: &mut Criterion) { |
|
||||||
let mut data = Vec::new(); |
|
||||||
read_data("explore-1000.nt.zst") |
|
||||||
.read_to_end(&mut data) |
|
||||||
.unwrap(); |
|
||||||
|
|
||||||
let operations = read_data("mix-exploreAndUpdate-1000.tsv.zst") |
|
||||||
.lines() |
|
||||||
.map(|l| { |
|
||||||
let l = l.unwrap(); |
|
||||||
let mut parts = l.trim().split('\t'); |
|
||||||
let kind = parts.next().unwrap(); |
|
||||||
let operation = parts.next().unwrap(); |
|
||||||
match kind { |
|
||||||
"query" => RawOperation::Query(operation.to_owned()), |
|
||||||
"update" => RawOperation::Update(operation.to_owned()), |
|
||||||
_ => panic!("Unexpected operation kind {kind}"), |
|
||||||
} |
|
||||||
}) |
|
||||||
.collect::<Vec<_>>(); |
|
||||||
|
|
||||||
let mut group = c.benchmark_group("sparql parsing"); |
|
||||||
group.sample_size(10); |
|
||||||
group.throughput(Throughput::Bytes( |
|
||||||
operations |
|
||||||
.iter() |
|
||||||
.map(|o| match o { |
|
||||||
RawOperation::Query(q) => q.len(), |
|
||||||
RawOperation::Update(u) => u.len(), |
|
||||||
}) |
|
||||||
.sum::<usize>() as u64, |
|
||||||
)); |
|
||||||
group.bench_function("BSBM query and update set", |b| { |
|
||||||
b.iter(|| { |
|
||||||
for operation in &operations { |
|
||||||
match operation { |
|
||||||
RawOperation::Query(q) => { |
|
||||||
Query::parse(q, None).unwrap(); |
|
||||||
} |
|
||||||
RawOperation::Update(u) => { |
|
||||||
Update::parse(u, None).unwrap(); |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
}) |
|
||||||
}); |
|
||||||
} |
|
||||||
|
|
||||||
criterion_group!(store, sparql_parsing, store_query_and_update, store_load); |
|
||||||
|
|
||||||
criterion_main!(store); |
|
||||||
|
|
||||||
fn read_data(file: &str) -> impl BufRead { |
|
||||||
if !Path::new(file).exists() { |
|
||||||
let mut client = oxhttp::Client::new(); |
|
||||||
client.set_redirection_limit(5); |
|
||||||
let url = format!("https://github.com/Tpt/bsbm-tools/releases/download/v0.2/{file}"); |
|
||||||
let request = Request::builder(Method::GET, url.parse().unwrap()).build(); |
|
||||||
let response = client.request(request).unwrap(); |
|
||||||
assert_eq!( |
|
||||||
response.status(), |
|
||||||
Status::OK, |
|
||||||
"{}", |
|
||||||
response.into_body().to_string().unwrap() |
|
||||||
); |
|
||||||
std::io::copy(&mut response.into_body(), &mut File::create(file).unwrap()).unwrap(); |
|
||||||
} |
|
||||||
BufReader::new(zstd::Decoder::new(File::open(file).unwrap()).unwrap()) |
|
||||||
} |
|
||||||
|
|
||||||
#[derive(Clone)] |
|
||||||
enum RawOperation { |
|
||||||
Query(String), |
|
||||||
Update(String), |
|
||||||
} |
|
||||||
|
|
||||||
#[allow(clippy::large_enum_variant)] |
|
||||||
#[derive(Clone)] |
|
||||||
enum Operation { |
|
||||||
Query(Query), |
|
||||||
Update(Update), |
|
||||||
} |
|
||||||
|
|
||||||
struct TempDir(PathBuf); |
|
||||||
|
|
||||||
impl Default for TempDir { |
|
||||||
fn default() -> Self { |
|
||||||
Self(temp_dir().join(format!("oxigraph-bench-{}", random::<u128>()))) |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
impl AsRef<Path> for TempDir { |
|
||||||
fn as_ref(&self) -> &Path { |
|
||||||
&self.0 |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
impl Drop for TempDir { |
|
||||||
fn drop(&mut self) { |
|
||||||
remove_dir_all(&self.0).unwrap() |
|
||||||
} |
|
||||||
} |
|
@ -0,0 +1,59 @@ |
|||||||
|
[package] |
||||||
|
name = "oxigraph" |
||||||
|
version.workspace = true |
||||||
|
authors.workspace = true |
||||||
|
license.workspace = true |
||||||
|
readme = "README.md" |
||||||
|
keywords = ["RDF", "SPARQL", "graph-database", "database"] |
||||||
|
categories = ["database-implementations"] |
||||||
|
repository = "https://github.com/oxigraph/oxigraph/tree/main/lib/oxigraph" |
||||||
|
homepage = "https://oxigraph.org/" |
||||||
|
documentation = "https://docs.rs/oxigraph" |
||||||
|
description = """ |
||||||
|
a SPARQL database and RDF toolkit |
||||||
|
""" |
||||||
|
edition.workspace = true |
||||||
|
rust-version.workspace = true |
||||||
|
|
||||||
|
[features] |
||||||
|
js = ["getrandom/js", "oxsdatatypes/js", "js-sys"] |
||||||
|
|
||||||
|
|
||||||
|
[dependencies] |
||||||
|
digest.workspace = true |
||||||
|
hex.workspace = true |
||||||
|
json-event-parser.workspace = true |
||||||
|
md-5.workspace = true |
||||||
|
oxilangtag.workspace = true |
||||||
|
oxiri.workspace = true |
||||||
|
oxrdf = { workspace = true, features = ["rdf-star", "oxsdatatypes"] } |
||||||
|
oxrdfio = { workspace = true, features = ["rdf-star"] } |
||||||
|
oxsdatatypes.workspace = true |
||||||
|
rand.workspace = true |
||||||
|
regex.workspace = true |
||||||
|
sha1.workspace = true |
||||||
|
sha2.workspace = true |
||||||
|
siphasher.workspace = true |
||||||
|
sparesults = { workspace = true, features = ["rdf-star"] } |
||||||
|
spargebra = { workspace = true, features = ["rdf-star", "sep-0002", "sep-0006"] } |
||||||
|
sparopt = { workspace = true, features = ["rdf-star", "sep-0002", "sep-0006"] } |
||||||
|
thiserror.workspace = true |
||||||
|
|
||||||
|
[target.'cfg(not(target_family = "wasm"))'.dependencies] |
||||||
|
libc = "0.2" |
||||||
|
rocksdb.workspace = true |
||||||
|
|
||||||
|
[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies] |
||||||
|
getrandom.workspace = true |
||||||
|
js-sys = { workspace = true, optional = true } |
||||||
|
|
||||||
|
[target.'cfg(not(target_family = "wasm"))'.dev-dependencies] |
||||||
|
codspeed-criterion-compat.workspace = true |
||||||
|
zstd.workspace = true |
||||||
|
|
||||||
|
[lints] |
||||||
|
workspace = true |
||||||
|
|
||||||
|
[package.metadata.docs.rs] |
||||||
|
rustdoc-args = ["--cfg", "docsrs"] |
||||||
|
|
@ -0,0 +1,82 @@ |
|||||||
|
Oxigraph |
||||||
|
======== |
||||||
|
|
||||||
|
[![Latest Version](https://img.shields.io/crates/v/oxigraph.svg)](https://crates.io/crates/oxigraph) |
||||||
|
[![Released API docs](https://docs.rs/oxigraph/badge.svg)](https://docs.rs/oxigraph) |
||||||
|
[![Crates.io downloads](https://img.shields.io/crates/d/oxigraph)](https://crates.io/crates/oxigraph) |
||||||
|
[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) |
||||||
|
[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) |
||||||
|
|
||||||
|
Oxigraph is a graph database library implementing the [SPARQL](https://www.w3.org/TR/sparql11-overview/) standard. |
||||||
|
|
||||||
|
Its goal is to provide a compliant, safe and fast on-disk graph database. |
||||||
|
It also provides a set of utility functions for reading, writing, and processing RDF files. |
||||||
|
|
||||||
|
Oxigraph is in heavy development and SPARQL query evaluation has not been optimized yet. |
||||||
|
|
||||||
|
Oxigraph also provides [a CLI tool](https://crates.io/crates/oxigraph-cli) and [a Python library](https://pyoxigraph.readthedocs.io/) based on this library. |
||||||
|
|
||||||
|
|
||||||
|
Oxigraph implements the following specifications: |
||||||
|
* [SPARQL 1.1 Query](https://www.w3.org/TR/sparql11-query/), [SPARQL 1.1 Update](https://www.w3.org/TR/sparql11-update/), and [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/). |
||||||
|
* [Turtle](https://www.w3.org/TR/turtle/), [TriG](https://www.w3.org/TR/trig/), [N-Triples](https://www.w3.org/TR/n-triples/), [N-Quads](https://www.w3.org/TR/n-quads/), and [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) RDF serialization formats for both data ingestion and retrieval. |
||||||
|
* [SPARQL Query Results XML Format](https://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/). |
||||||
|
|
||||||
|
A preliminary benchmark [is provided](../bench/README.md). Oxigraph internal design [is described on the wiki](https://github.com/oxigraph/oxigraph/wiki/Architecture). |
||||||
|
|
||||||
|
The main entry point of Oxigraph is the [`Store`](store::Store) struct: |
||||||
|
```rust |
||||||
|
use oxigraph::store::Store; |
||||||
|
use oxigraph::model::*; |
||||||
|
use oxigraph::sparql::QueryResults; |
||||||
|
|
||||||
|
let store = Store::new().unwrap(); |
||||||
|
|
||||||
|
// insertion |
||||||
|
let ex = NamedNode::new("http://example.com").unwrap(); |
||||||
|
let quad = Quad::new(ex.clone(), ex.clone(), ex.clone(), GraphName::DefaultGraph); |
||||||
|
store.insert(&quad).unwrap(); |
||||||
|
|
||||||
|
// quad filter |
||||||
|
let results = store.quads_for_pattern(Some(ex.as_ref().into()), None, None, None).collect::<Result<Vec<Quad>,_>>().unwrap(); |
||||||
|
assert_eq!(vec![quad], results); |
||||||
|
|
||||||
|
// SPARQL query |
||||||
|
if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }").unwrap() { |
||||||
|
assert_eq!(solutions.next().unwrap().unwrap().get("s"), Some(&ex.into())); |
||||||
|
} |
||||||
|
``` |
||||||
|
|
||||||
|
It is based on these crates that can be used separately: |
||||||
|
* [`oxrdf`](https://crates.io/crates/oxrdf), datastructures encoding RDF basic concepts (the [`oxigraph::model`](crate::model) module). |
||||||
|
* [`oxrdfio`](https://crates.io/crates/oxrdfio), a unified parser and serializer API for RDF formats (the [`oxigraph::io`](crate::io) module). It itself relies on: |
||||||
|
* [`oxttl`](https://crates.io/crates/oxttl), N-Triple, N-Quad, Turtle, TriG and N3 parsing and serialization. |
||||||
|
* [`oxrdfxml`](https://crates.io/crates/oxrdfxml), RDF/XML parsing and serialization. |
||||||
|
* [`spargebra`](https://crates.io/crates/spargebra), a SPARQL parser. |
||||||
|
* [`sparesults`](https://crates.io/crates/sparesults), parsers and serializers for SPARQL result formats (the [`oxigraph::sparql::results`](crate::sparql::results) module). |
||||||
|
* [`sparopt`](https://crates.io/crates/sparesults), a SPARQL optimizer. |
||||||
|
* [`oxsdatatypes`](https://crates.io/crates/oxsdatatypes), an implementation of some XML Schema datatypes. |
||||||
|
|
||||||
|
To build the library locally, don't forget to clone the submodules using `git clone --recursive https://github.com/oxigraph/oxigraph.git` to clone the repository including submodules or `git submodule update --init` to add submodules to the already cloned repository. |
||||||
|
|
||||||
|
It is possible to disable the RocksDB storage backend to only use the in-memory fallback by disabling the `rocksdb` default feature: |
||||||
|
```toml |
||||||
|
oxigraph = { version = "*", default-features = false } |
||||||
|
``` |
||||||
|
This is the default behavior when compiling Oxigraph to WASM. |
||||||
|
|
||||||
|
## License |
||||||
|
|
||||||
|
This project is licensed under either of |
||||||
|
|
||||||
|
* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or |
||||||
|
`<http://www.apache.org/licenses/LICENSE-2.0>`) |
||||||
|
* MIT license ([LICENSE-MIT](../LICENSE-MIT) or |
||||||
|
`<http://opensource.org/licenses/MIT>`) |
||||||
|
|
||||||
|
at your option. |
||||||
|
|
||||||
|
|
||||||
|
### Contribution |
||||||
|
|
||||||
|
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. |
@ -0,0 +1,39 @@ |
|||||||
|
//! Utilities to read and write RDF graphs and datasets using [OxRDF I/O](https://crates.io/crates/oxrdfio).
|
||||||
|
//!
|
||||||
|
//! The entry points of this module are the two [`RdfParser`] and [`RdfSerializer`] structs.
|
||||||
|
//!
|
||||||
|
//! Usage example converting a Turtle file to a N-Triples file:
|
||||||
|
//! ```
|
||||||
|
//! use oxigraph::io::{RdfFormat, RdfParser, RdfSerializer};
|
||||||
|
//!
|
||||||
|
//! let turtle_file = b"@base <http://example.com/> .
|
||||||
|
//! @prefix schema: <http://schema.org/> .
|
||||||
|
//! <foo> a schema:Person ;
|
||||||
|
//! schema:name \"Foo\" .
|
||||||
|
//! <bar> a schema:Person ;
|
||||||
|
//! schema:name \"Bar\" .";
|
||||||
|
//!
|
||||||
|
//! let ntriples_file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
|
||||||
|
//! <http://example.com/foo> <http://schema.org/name> \"Foo\" .
|
||||||
|
//! <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
|
||||||
|
//! <http://example.com/bar> <http://schema.org/name> \"Bar\" .
|
||||||
|
//! ";
|
||||||
|
//!
|
||||||
|
//! let mut writer = RdfSerializer::from_format(RdfFormat::NTriples).serialize_to_write(Vec::new());
|
||||||
|
//! for quad in RdfParser::from_format(RdfFormat::Turtle).parse_read(turtle_file.as_ref()) {
|
||||||
|
//! writer.write_quad(&quad.unwrap()).unwrap();
|
||||||
|
//! }
|
||||||
|
//! assert_eq!(writer.finish().unwrap(), ntriples_file);
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
mod format; |
||||||
|
pub mod read; |
||||||
|
pub mod write; |
||||||
|
|
||||||
|
#[allow(deprecated)] |
||||||
|
pub use self::format::{DatasetFormat, GraphFormat}; |
||||||
|
#[allow(deprecated)] |
||||||
|
pub use self::read::{DatasetParser, GraphParser}; |
||||||
|
#[allow(deprecated)] |
||||||
|
pub use self::write::{DatasetSerializer, GraphSerializer}; |
||||||
|
pub use oxrdfio::*; |
@ -0,0 +1,199 @@ |
|||||||
|
#![allow(deprecated)] |
||||||
|
|
||||||
|
//! Utilities to read RDF graphs and datasets.
|
||||||
|
|
||||||
|
use crate::io::{DatasetFormat, GraphFormat}; |
||||||
|
use crate::model::*; |
||||||
|
use oxrdfio::{FromReadQuadReader, RdfParseError, RdfParser}; |
||||||
|
use std::io::Read; |
||||||
|
|
||||||
|
/// Parsers for RDF graph serialization formats.
|
||||||
|
///
|
||||||
|
/// It currently supports the following formats:
|
||||||
|
/// * [N-Triples](https://www.w3.org/TR/n-triples/) ([`GraphFormat::NTriples`])
|
||||||
|
/// * [Turtle](https://www.w3.org/TR/turtle/) ([`GraphFormat::Turtle`])
|
||||||
|
/// * [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) ([`GraphFormat::RdfXml`])
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{GraphFormat, GraphParser};
|
||||||
|
///
|
||||||
|
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
|
||||||
|
///
|
||||||
|
/// let parser = GraphParser::from_format(GraphFormat::NTriples);
|
||||||
|
/// let triples = parser
|
||||||
|
/// .read_triples(file.as_bytes())
|
||||||
|
/// .collect::<Result<Vec<_>, _>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(triples.len(), 1);
|
||||||
|
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # std::io::Result::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[deprecated(note = "use RdfParser instead", since = "0.4.0")] |
||||||
|
pub struct GraphParser { |
||||||
|
inner: RdfParser, |
||||||
|
} |
||||||
|
|
||||||
|
impl GraphParser { |
||||||
|
/// Builds a parser for the given format.
|
||||||
|
#[inline] |
||||||
|
pub fn from_format(format: GraphFormat) -> Self { |
||||||
|
Self { |
||||||
|
inner: RdfParser::from_format(format.into()) |
||||||
|
.without_named_graphs() |
||||||
|
.rename_blank_nodes(), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Provides an IRI that could be used to resolve the file relative IRIs.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{GraphFormat, GraphParser};
|
||||||
|
///
|
||||||
|
/// let file = "</s> </p> </o> .";
|
||||||
|
///
|
||||||
|
/// let parser =
|
||||||
|
/// GraphParser::from_format(GraphFormat::Turtle).with_base_iri("http://example.com")?;
|
||||||
|
/// let triples = parser
|
||||||
|
/// .read_triples(file.as_bytes())
|
||||||
|
/// .collect::<Result<Vec<_>, _>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(triples.len(), 1);
|
||||||
|
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[inline] |
||||||
|
pub fn with_base_iri(self, base_iri: impl Into<String>) -> Result<Self, IriParseError> { |
||||||
|
Ok(Self { |
||||||
|
inner: self.inner.with_base_iri(base_iri)?, |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
/// Executes the parsing itself on a [`Read`] implementation and returns an iterator of triples.
|
||||||
|
pub fn read_triples<R: Read>(self, reader: R) -> TripleReader<R> { |
||||||
|
TripleReader { |
||||||
|
parser: self.inner.parse_read(reader), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An iterator yielding read triples.
|
||||||
|
/// Could be built using a [`GraphParser`].
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{GraphFormat, GraphParser};
|
||||||
|
///
|
||||||
|
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
|
||||||
|
///
|
||||||
|
/// let parser = GraphParser::from_format(GraphFormat::NTriples);
|
||||||
|
/// let triples = parser
|
||||||
|
/// .read_triples(file.as_bytes())
|
||||||
|
/// .collect::<Result<Vec<_>, _>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(triples.len(), 1);
|
||||||
|
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # std::io::Result::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[must_use] |
||||||
|
pub struct TripleReader<R: Read> { |
||||||
|
parser: FromReadQuadReader<R>, |
||||||
|
} |
||||||
|
|
||||||
|
impl<R: Read> Iterator for TripleReader<R> { |
||||||
|
type Item = Result<Triple, RdfParseError>; |
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> { |
||||||
|
Some(self.parser.next()?.map(Into::into).map_err(Into::into)) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// A parser for RDF dataset serialization formats.
|
||||||
|
///
|
||||||
|
/// It currently supports the following formats:
|
||||||
|
/// * [N-Quads](https://www.w3.org/TR/n-quads/) ([`DatasetFormat::NQuads`])
|
||||||
|
/// * [TriG](https://www.w3.org/TR/trig/) ([`DatasetFormat::TriG`])
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{DatasetFormat, DatasetParser};
|
||||||
|
///
|
||||||
|
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .";
|
||||||
|
///
|
||||||
|
/// let parser = DatasetParser::from_format(DatasetFormat::NQuads);
|
||||||
|
/// let quads = parser.read_quads(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(quads.len(), 1);
|
||||||
|
/// assert_eq!(quads[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # std::io::Result::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[deprecated(note = "use RdfParser instead", since = "0.4.0")] |
||||||
|
pub struct DatasetParser { |
||||||
|
inner: RdfParser, |
||||||
|
} |
||||||
|
|
||||||
|
impl DatasetParser { |
||||||
|
/// Builds a parser for the given format.
|
||||||
|
#[inline] |
||||||
|
pub fn from_format(format: DatasetFormat) -> Self { |
||||||
|
Self { |
||||||
|
inner: RdfParser::from_format(format.into()).rename_blank_nodes(), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Provides an IRI that could be used to resolve the file relative IRIs.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{DatasetFormat, DatasetParser};
|
||||||
|
///
|
||||||
|
/// let file = "<g> { </s> </p> </o> }";
|
||||||
|
///
|
||||||
|
/// let parser =
|
||||||
|
/// DatasetParser::from_format(DatasetFormat::TriG).with_base_iri("http://example.com")?;
|
||||||
|
/// let triples = parser
|
||||||
|
/// .read_quads(file.as_bytes())
|
||||||
|
/// .collect::<Result<Vec<_>, _>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(triples.len(), 1);
|
||||||
|
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[inline] |
||||||
|
pub fn with_base_iri(self, base_iri: impl Into<String>) -> Result<Self, IriParseError> { |
||||||
|
Ok(Self { |
||||||
|
inner: self.inner.with_base_iri(base_iri)?, |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
/// Executes the parsing itself on a [`Read`] implementation and returns an iterator of quads.
|
||||||
|
pub fn read_quads<R: Read>(self, reader: R) -> QuadReader<R> { |
||||||
|
QuadReader { |
||||||
|
parser: self.inner.parse_read(reader), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An iterator yielding read quads.
|
||||||
|
/// Could be built using a [`DatasetParser`].
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{DatasetFormat, DatasetParser};
|
||||||
|
///
|
||||||
|
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .";
|
||||||
|
///
|
||||||
|
/// let parser = DatasetParser::from_format(DatasetFormat::NQuads);
|
||||||
|
/// let quads = parser.read_quads(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(quads.len(), 1);
|
||||||
|
/// assert_eq!(quads[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # std::io::Result::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[must_use] |
||||||
|
pub struct QuadReader<R: Read> { |
||||||
|
parser: FromReadQuadReader<R>, |
||||||
|
} |
||||||
|
|
||||||
|
impl<R: Read> Iterator for QuadReader<R> { |
||||||
|
type Item = Result<Quad, RdfParseError>; |
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> { |
||||||
|
Some(self.parser.next()?.map_err(Into::into)) |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,185 @@ |
|||||||
|
#![allow(deprecated)] |
||||||
|
|
||||||
|
//! Utilities to write RDF graphs and datasets.
|
||||||
|
|
||||||
|
use crate::io::{DatasetFormat, GraphFormat}; |
||||||
|
use crate::model::*; |
||||||
|
use oxrdfio::{RdfSerializer, ToWriteQuadWriter}; |
||||||
|
use std::io::{self, Write}; |
||||||
|
|
||||||
|
/// A serializer for RDF graph serialization formats.
|
||||||
|
///
|
||||||
|
/// It currently supports the following formats:
|
||||||
|
/// * [N-Triples](https://www.w3.org/TR/n-triples/) ([`GraphFormat::NTriples`])
|
||||||
|
/// * [Turtle](https://www.w3.org/TR/turtle/) ([`GraphFormat::Turtle`])
|
||||||
|
/// * [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) ([`GraphFormat::RdfXml`])
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{GraphFormat, GraphSerializer};
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
///
|
||||||
|
/// let mut buffer = Vec::new();
|
||||||
|
/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer);
|
||||||
|
/// writer.write(&Triple {
|
||||||
|
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||||
|
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||||
|
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||||
|
/// })?;
|
||||||
|
/// writer.finish()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// buffer.as_slice(),
|
||||||
|
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n".as_bytes()
|
||||||
|
/// );
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] |
||||||
|
pub struct GraphSerializer { |
||||||
|
inner: RdfSerializer, |
||||||
|
} |
||||||
|
|
||||||
|
impl GraphSerializer { |
||||||
|
/// Builds a serializer for the given format
|
||||||
|
#[inline] |
||||||
|
pub fn from_format(format: GraphFormat) -> Self { |
||||||
|
Self { |
||||||
|
inner: RdfSerializer::from_format(format.into()), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Returns a [`TripleWriter`] allowing writing triples into the given [`Write`] implementation
|
||||||
|
pub fn triple_writer<W: Write>(self, write: W) -> TripleWriter<W> { |
||||||
|
TripleWriter { |
||||||
|
writer: self.inner.serialize_to_write(write), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Allows writing triples.
|
||||||
|
/// Could be built using a [`GraphSerializer`].
|
||||||
|
///
|
||||||
|
/// <div class="warning">
|
||||||
|
///
|
||||||
|
/// Do not forget to run the [`finish`](TripleWriter::finish()) method to properly write the last bytes of the file.</div>
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{GraphFormat, GraphSerializer};
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
///
|
||||||
|
/// let mut buffer = Vec::new();
|
||||||
|
/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer);
|
||||||
|
/// writer.write(&Triple {
|
||||||
|
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||||
|
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||||
|
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||||
|
/// })?;
|
||||||
|
/// writer.finish()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// buffer.as_slice(),
|
||||||
|
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n".as_bytes()
|
||||||
|
/// );
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[must_use] |
||||||
|
pub struct TripleWriter<W: Write> { |
||||||
|
writer: ToWriteQuadWriter<W>, |
||||||
|
} |
||||||
|
|
||||||
|
impl<W: Write> TripleWriter<W> { |
||||||
|
/// Writes a triple
|
||||||
|
pub fn write<'a>(&mut self, triple: impl Into<TripleRef<'a>>) -> io::Result<()> { |
||||||
|
self.writer.write_triple(triple) |
||||||
|
} |
||||||
|
|
||||||
|
/// Writes the last bytes of the file
|
||||||
|
pub fn finish(self) -> io::Result<()> { |
||||||
|
self.writer.finish()?.flush() |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// A serializer for RDF graph serialization formats.
|
||||||
|
///
|
||||||
|
/// It currently supports the following formats:
|
||||||
|
/// * [N-Quads](https://www.w3.org/TR/n-quads/) ([`DatasetFormat::NQuads`])
|
||||||
|
/// * [TriG](https://www.w3.org/TR/trig/) ([`DatasetFormat::TriG`])
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{DatasetFormat, DatasetSerializer};
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
///
|
||||||
|
/// let mut buffer = Vec::new();
|
||||||
|
/// let mut writer = DatasetSerializer::from_format(DatasetFormat::NQuads).quad_writer(&mut buffer);
|
||||||
|
/// writer.write(&Quad {
|
||||||
|
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||||
|
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||||
|
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||||
|
/// graph_name: NamedNode::new("http://example.com/g")?.into(),
|
||||||
|
/// })?;
|
||||||
|
/// writer.finish()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(buffer.as_slice(), "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .\n".as_bytes());
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] |
||||||
|
pub struct DatasetSerializer { |
||||||
|
inner: RdfSerializer, |
||||||
|
} |
||||||
|
|
||||||
|
impl DatasetSerializer { |
||||||
|
/// Builds a serializer for the given format
|
||||||
|
#[inline] |
||||||
|
pub fn from_format(format: DatasetFormat) -> Self { |
||||||
|
Self { |
||||||
|
inner: RdfSerializer::from_format(format.into()), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Returns a [`QuadWriter`] allowing writing triples into the given [`Write`] implementation
|
||||||
|
pub fn quad_writer<W: Write>(self, write: W) -> QuadWriter<W> { |
||||||
|
QuadWriter { |
||||||
|
writer: self.inner.serialize_to_write(write), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Allows writing triples.
|
||||||
|
/// Could be built using a [`DatasetSerializer`].
|
||||||
|
///
|
||||||
|
/// <div class="warning">
|
||||||
|
///
|
||||||
|
/// Do not forget to run the [`finish`](QuadWriter::finish()) method to properly write the last bytes of the file.</div>
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{DatasetFormat, DatasetSerializer};
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
///
|
||||||
|
/// let mut buffer = Vec::new();
|
||||||
|
/// let mut writer = DatasetSerializer::from_format(DatasetFormat::NQuads).quad_writer(&mut buffer);
|
||||||
|
/// writer.write(&Quad {
|
||||||
|
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||||
|
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||||
|
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||||
|
/// graph_name: NamedNode::new("http://example.com/g")?.into(),
|
||||||
|
/// })?;
|
||||||
|
/// writer.finish()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(buffer.as_slice(), "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .\n".as_bytes());
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[must_use] |
||||||
|
pub struct QuadWriter<W: Write> { |
||||||
|
writer: ToWriteQuadWriter<W>, |
||||||
|
} |
||||||
|
|
||||||
|
impl<W: Write> QuadWriter<W> { |
||||||
|
/// Writes a quad
|
||||||
|
pub fn write<'a>(&mut self, quad: impl Into<QuadRef<'a>>) -> io::Result<()> { |
||||||
|
self.writer.write_quad(quad) |
||||||
|
} |
||||||
|
|
||||||
|
/// Writes the last bytes of the file
|
||||||
|
pub fn finish(self) -> io::Result<()> { |
||||||
|
self.writer.finish()?.flush() |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,12 @@ |
|||||||
|
#![doc = include_str!("../README.md")] |
||||||
|
#![doc(test(attr(deny(warnings))))] |
||||||
|
#![doc(test(attr(allow(deprecated))))] |
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))] |
||||||
|
#![doc(html_favicon_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")] |
||||||
|
#![doc(html_logo_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")] |
||||||
|
|
||||||
|
pub mod io; |
||||||
|
pub mod model; |
||||||
|
pub mod sparql; |
||||||
|
mod storage; |
||||||
|
pub mod store; |
@ -0,0 +1,22 @@ |
|||||||
|
//! Implements data structures for [RDF 1.1 Concepts](https://www.w3.org/TR/rdf11-concepts/) using [OxRDF](https://crates.io/crates/oxrdf).
|
||||||
|
//!
|
||||||
|
//! Usage example:
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! use oxigraph::model::*;
|
||||||
|
//!
|
||||||
|
//! let mut graph = Graph::default();
|
||||||
|
//!
|
||||||
|
//! // insertion
|
||||||
|
//! let ex = NamedNodeRef::new("http://example.com").unwrap();
|
||||||
|
//! let triple = TripleRef::new(ex, ex, ex);
|
||||||
|
//! graph.insert(triple);
|
||||||
|
//!
|
||||||
|
//! // simple filter
|
||||||
|
//! let results: Vec<_> = graph.triples_for_subject(ex).collect();
|
||||||
|
//! assert_eq!(vec![triple], results);
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
pub use oxrdf::*; |
||||||
|
|
||||||
|
pub use spargebra::term::GroundQuad; |
@ -0,0 +1,84 @@ |
|||||||
|
use crate::io::RdfParseError; |
||||||
|
use crate::model::NamedNode; |
||||||
|
use crate::sparql::results::QueryResultsParseError as ResultsParseError; |
||||||
|
use crate::sparql::SparqlSyntaxError; |
||||||
|
use crate::storage::StorageError; |
||||||
|
use std::convert::Infallible; |
||||||
|
use std::error::Error; |
||||||
|
use std::io; |
||||||
|
|
||||||
|
/// A SPARQL evaluation error.
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
#[non_exhaustive] |
||||||
|
pub enum EvaluationError { |
||||||
|
/// An error in SPARQL parsing.
|
||||||
|
#[error(transparent)] |
||||||
|
Parsing(#[from] SparqlSyntaxError), |
||||||
|
/// An error from the storage.
|
||||||
|
#[error(transparent)] |
||||||
|
Storage(#[from] StorageError), |
||||||
|
/// An error while parsing an external RDF file.
|
||||||
|
#[error(transparent)] |
||||||
|
GraphParsing(#[from] RdfParseError), |
||||||
|
/// An error while parsing an external result file (likely from a federated query).
|
||||||
|
#[error(transparent)] |
||||||
|
ResultsParsing(#[from] ResultsParseError), |
||||||
|
/// An error returned during results serialization.
|
||||||
|
#[error(transparent)] |
||||||
|
ResultsSerialization(#[from] io::Error), |
||||||
|
/// Error during `SERVICE` evaluation
|
||||||
|
#[error("{0}")] |
||||||
|
Service(#[source] Box<dyn Error + Send + Sync + 'static>), |
||||||
|
/// Error when `CREATE` tries to create an already existing graph
|
||||||
|
#[error("The graph {0} already exists")] |
||||||
|
GraphAlreadyExists(NamedNode), |
||||||
|
/// Error when `DROP` or `CLEAR` tries to remove a not existing graph
|
||||||
|
#[error("The graph {0} does not exist")] |
||||||
|
GraphDoesNotExist(NamedNode), |
||||||
|
/// The variable storing the `SERVICE` name is unbound
|
||||||
|
#[error("The variable encoding the service name is unbound")] |
||||||
|
UnboundService, |
||||||
|
/// The given `SERVICE` is not supported
|
||||||
|
#[error("The service {0} is not supported")] |
||||||
|
UnsupportedService(NamedNode), |
||||||
|
/// The given content media type returned from an HTTP response is not supported (`SERVICE` and `LOAD`)
|
||||||
|
#[error("The content media type {0} is not supported")] |
||||||
|
UnsupportedContentType(String), |
||||||
|
/// The `SERVICE` call has not returns solutions
|
||||||
|
#[error("The service is not returning solutions but a boolean or a graph")] |
||||||
|
ServiceDoesNotReturnSolutions, |
||||||
|
/// The results are not a RDF graph
|
||||||
|
#[error("The query results are not a RDF graph")] |
||||||
|
NotAGraph, |
||||||
|
} |
||||||
|
|
||||||
|
impl From<Infallible> for EvaluationError { |
||||||
|
#[inline] |
||||||
|
fn from(error: Infallible) -> Self { |
||||||
|
match error {} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl From<EvaluationError> for io::Error { |
||||||
|
#[inline] |
||||||
|
fn from(error: EvaluationError) -> Self { |
||||||
|
match error { |
||||||
|
EvaluationError::Parsing(error) => Self::new(io::ErrorKind::InvalidData, error), |
||||||
|
EvaluationError::GraphParsing(error) => error.into(), |
||||||
|
EvaluationError::ResultsParsing(error) => error.into(), |
||||||
|
EvaluationError::ResultsSerialization(error) => error, |
||||||
|
EvaluationError::Storage(error) => error.into(), |
||||||
|
EvaluationError::Service(error) => match error.downcast() { |
||||||
|
Ok(error) => *error, |
||||||
|
Err(error) => Self::new(io::ErrorKind::Other, error), |
||||||
|
}, |
||||||
|
EvaluationError::GraphAlreadyExists(_) |
||||||
|
| EvaluationError::GraphDoesNotExist(_) |
||||||
|
| EvaluationError::UnboundService |
||||||
|
| EvaluationError::UnsupportedService(_) |
||||||
|
| EvaluationError::UnsupportedContentType(_) |
||||||
|
| EvaluationError::ServiceDoesNotReturnSolutions |
||||||
|
| EvaluationError::NotAGraph => Self::new(io::ErrorKind::InvalidInput, error), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,9 @@ |
|||||||
|
#[cfg(not(feature = "http-client"))] |
||||||
|
mod dummy; |
||||||
|
#[cfg(feature = "http-client")] |
||||||
|
mod simple; |
||||||
|
|
||||||
|
#[cfg(not(feature = "http-client"))] |
||||||
|
pub use dummy::Client; |
||||||
|
#[cfg(feature = "http-client")] |
||||||
|
pub use simple::Client; |
@ -0,0 +1,371 @@ |
|||||||
|
use crate::io::{RdfFormat, RdfSerializer}; |
||||||
|
use crate::model::*; |
||||||
|
use crate::sparql::error::EvaluationError; |
||||||
|
use crate::sparql::results::{ |
||||||
|
FromReadQueryResultsReader, FromReadSolutionsReader, QueryResultsFormat, |
||||||
|
QueryResultsParseError, QueryResultsParser, QueryResultsSerializer, |
||||||
|
}; |
||||||
|
pub use sparesults::QuerySolution; |
||||||
|
use std::io::{Read, Write}; |
||||||
|
use std::sync::Arc; |
||||||
|
|
||||||
|
/// Results of a [SPARQL query](https://www.w3.org/TR/sparql11-query/).
|
||||||
|
pub enum QueryResults { |
||||||
|
/// Results of a [SELECT](https://www.w3.org/TR/sparql11-query/#select) query.
|
||||||
|
Solutions(QuerySolutionIter), |
||||||
|
/// Result of a [ASK](https://www.w3.org/TR/sparql11-query/#ask) query.
|
||||||
|
Boolean(bool), |
||||||
|
/// Results of a [CONSTRUCT](https://www.w3.org/TR/sparql11-query/#construct) or [DESCRIBE](https://www.w3.org/TR/sparql11-query/#describe) query.
|
||||||
|
Graph(QueryTripleIter), |
||||||
|
} |
||||||
|
|
||||||
|
impl QueryResults { |
||||||
|
/// Reads a SPARQL query results serialization.
|
||||||
|
pub fn read( |
||||||
|
read: impl Read + 'static, |
||||||
|
format: QueryResultsFormat, |
||||||
|
) -> Result<Self, QueryResultsParseError> { |
||||||
|
Ok(QueryResultsParser::from_format(format) |
||||||
|
.parse_read(read)? |
||||||
|
.into()) |
||||||
|
} |
||||||
|
|
||||||
|
/// Writes the query results (solutions or boolean).
|
||||||
|
///
|
||||||
|
/// This method fails if it is called on the `Graph` results.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
/// use oxigraph::sparql::results::QueryResultsFormat;
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// let ex = NamedNodeRef::new("http://example.com")?;
|
||||||
|
/// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?;
|
||||||
|
///
|
||||||
|
/// let results = store.query("SELECT ?s WHERE { ?s ?p ?o }")?;
|
||||||
|
/// assert_eq!(
|
||||||
|
/// results.write(Vec::new(), QueryResultsFormat::Json)?,
|
||||||
|
/// r#"{"head":{"vars":["s"]},"results":{"bindings":[{"s":{"type":"uri","value":"http://example.com"}}]}}"#.as_bytes()
|
||||||
|
/// );
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
pub fn write<W: Write>( |
||||||
|
self, |
||||||
|
write: W, |
||||||
|
format: QueryResultsFormat, |
||||||
|
) -> Result<W, EvaluationError> { |
||||||
|
let serializer = QueryResultsSerializer::from_format(format); |
||||||
|
match self { |
||||||
|
Self::Boolean(value) => serializer.serialize_boolean_to_write(write, value), |
||||||
|
Self::Solutions(solutions) => { |
||||||
|
let mut writer = serializer |
||||||
|
.serialize_solutions_to_write(write, solutions.variables().to_vec()) |
||||||
|
.map_err(EvaluationError::ResultsSerialization)?; |
||||||
|
for solution in solutions { |
||||||
|
writer |
||||||
|
.write(&solution?) |
||||||
|
.map_err(EvaluationError::ResultsSerialization)?; |
||||||
|
} |
||||||
|
writer.finish() |
||||||
|
} |
||||||
|
Self::Graph(triples) => { |
||||||
|
let s = VariableRef::new_unchecked("subject"); |
||||||
|
let p = VariableRef::new_unchecked("predicate"); |
||||||
|
let o = VariableRef::new_unchecked("object"); |
||||||
|
let mut writer = serializer |
||||||
|
.serialize_solutions_to_write( |
||||||
|
write, |
||||||
|
vec![s.into_owned(), p.into_owned(), o.into_owned()], |
||||||
|
) |
||||||
|
.map_err(EvaluationError::ResultsSerialization)?; |
||||||
|
for triple in triples { |
||||||
|
let triple = triple?; |
||||||
|
writer |
||||||
|
.write([ |
||||||
|
(s, &triple.subject.into()), |
||||||
|
(p, &triple.predicate.into()), |
||||||
|
(o, &triple.object), |
||||||
|
]) |
||||||
|
.map_err(EvaluationError::ResultsSerialization)?; |
||||||
|
} |
||||||
|
writer.finish() |
||||||
|
} |
||||||
|
} |
||||||
|
.map_err(EvaluationError::ResultsSerialization) |
||||||
|
} |
||||||
|
|
||||||
|
/// Writes the graph query results.
|
||||||
|
///
|
||||||
|
/// This method fails if it is called on the `Solution` or `Boolean` results.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::RdfFormat;
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
///
|
||||||
|
/// let graph = "<http://example.com> <http://example.com> <http://example.com> .\n";
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// store.load_graph(
|
||||||
|
/// graph.as_bytes(),
|
||||||
|
/// RdfFormat::NTriples,
|
||||||
|
/// GraphName::DefaultGraph,
|
||||||
|
/// None,
|
||||||
|
/// )?;
|
||||||
|
///
|
||||||
|
/// let results = store.query("CONSTRUCT WHERE { ?s ?p ?o }")?;
|
||||||
|
/// assert_eq!(
|
||||||
|
/// results.write_graph(Vec::new(), RdfFormat::NTriples)?,
|
||||||
|
/// graph.as_bytes()
|
||||||
|
/// );
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
pub fn write_graph<W: Write>( |
||||||
|
self, |
||||||
|
write: W, |
||||||
|
format: impl Into<RdfFormat>, |
||||||
|
) -> Result<W, EvaluationError> { |
||||||
|
if let Self::Graph(triples) = self { |
||||||
|
let mut writer = RdfSerializer::from_format(format.into()).serialize_to_write(write); |
||||||
|
for triple in triples { |
||||||
|
writer |
||||||
|
.write_triple(&triple?) |
||||||
|
.map_err(EvaluationError::ResultsSerialization)?; |
||||||
|
} |
||||||
|
writer |
||||||
|
.finish() |
||||||
|
.map_err(EvaluationError::ResultsSerialization) |
||||||
|
} else { |
||||||
|
Err(EvaluationError::NotAGraph) |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl From<QuerySolutionIter> for QueryResults { |
||||||
|
#[inline] |
||||||
|
fn from(value: QuerySolutionIter) -> Self { |
||||||
|
Self::Solutions(value) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl<R: Read + 'static> From<FromReadQueryResultsReader<R>> for QueryResults { |
||||||
|
fn from(reader: FromReadQueryResultsReader<R>) -> Self { |
||||||
|
match reader { |
||||||
|
FromReadQueryResultsReader::Solutions(s) => Self::Solutions(s.into()), |
||||||
|
FromReadQueryResultsReader::Boolean(v) => Self::Boolean(v), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An iterator over [`QuerySolution`]s.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::sparql::QueryResults;
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? {
|
||||||
|
/// for solution in solutions {
|
||||||
|
/// println!("{:?}", solution?.get("s"));
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
pub struct QuerySolutionIter { |
||||||
|
variables: Arc<[Variable]>, |
||||||
|
iter: Box<dyn Iterator<Item = Result<QuerySolution, EvaluationError>>>, |
||||||
|
} |
||||||
|
|
||||||
|
impl QuerySolutionIter { |
||||||
|
/// Construct a new iterator of solution from an ordered list of solution variables and an iterator of solution tuples
|
||||||
|
/// (each tuple using the same ordering as the variable list such that tuple element 0 is the value for the variable 0...)
|
||||||
|
pub fn new( |
||||||
|
variables: Arc<[Variable]>, |
||||||
|
iter: impl Iterator<Item = Result<Vec<Option<Term>>, EvaluationError>> + 'static, |
||||||
|
) -> Self { |
||||||
|
Self { |
||||||
|
variables: Arc::clone(&variables), |
||||||
|
iter: Box::new( |
||||||
|
iter.map(move |t| t.map(|values| (Arc::clone(&variables), values).into())), |
||||||
|
), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// The variables used in the solutions.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::sparql::{QueryResults, Variable};
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s ?o WHERE { ?s ?p ?o }")? {
|
||||||
|
/// assert_eq!(
|
||||||
|
/// solutions.variables(),
|
||||||
|
/// &[Variable::new("s")?, Variable::new("o")?]
|
||||||
|
/// );
|
||||||
|
/// }
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[inline] |
||||||
|
pub fn variables(&self) -> &[Variable] { |
||||||
|
&self.variables |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl<R: Read + 'static> From<FromReadSolutionsReader<R>> for QuerySolutionIter { |
||||||
|
fn from(reader: FromReadSolutionsReader<R>) -> Self { |
||||||
|
Self { |
||||||
|
variables: reader.variables().into(), |
||||||
|
iter: Box::new(reader.map(|t| t.map_err(EvaluationError::from))), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl Iterator for QuerySolutionIter { |
||||||
|
type Item = Result<QuerySolution, EvaluationError>; |
||||||
|
|
||||||
|
#[inline] |
||||||
|
fn next(&mut self) -> Option<Self::Item> { |
||||||
|
self.iter.next() |
||||||
|
} |
||||||
|
|
||||||
|
#[inline] |
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) { |
||||||
|
self.iter.size_hint() |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An iterator over the triples that compose a graph solution.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::sparql::QueryResults;
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// if let QueryResults::Graph(triples) = store.query("CONSTRUCT WHERE { ?s ?p ?o }")? {
|
||||||
|
/// for triple in triples {
|
||||||
|
/// println!("{}", triple?);
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
pub struct QueryTripleIter { |
||||||
|
pub(crate) iter: Box<dyn Iterator<Item = Result<Triple, EvaluationError>>>, |
||||||
|
} |
||||||
|
|
||||||
|
impl Iterator for QueryTripleIter { |
||||||
|
type Item = Result<Triple, EvaluationError>; |
||||||
|
|
||||||
|
#[inline] |
||||||
|
fn next(&mut self) -> Option<Self::Item> { |
||||||
|
self.iter.next() |
||||||
|
} |
||||||
|
|
||||||
|
#[inline] |
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) { |
||||||
|
self.iter.size_hint() |
||||||
|
} |
||||||
|
|
||||||
|
#[inline] |
||||||
|
fn fold<Acc, G>(self, init: Acc, g: G) -> Acc |
||||||
|
where |
||||||
|
G: FnMut(Acc, Self::Item) -> Acc, |
||||||
|
{ |
||||||
|
self.iter.fold(init, g) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
#[cfg(test)] |
||||||
|
#[allow(clippy::panic_in_result_fn)] |
||||||
|
mod tests { |
||||||
|
use super::*; |
||||||
|
use std::io::Cursor; |
||||||
|
|
||||||
|
#[test] |
||||||
|
fn test_serialization_roundtrip() -> Result<(), EvaluationError> { |
||||||
|
use std::str; |
||||||
|
|
||||||
|
for format in [ |
||||||
|
QueryResultsFormat::Json, |
||||||
|
QueryResultsFormat::Xml, |
||||||
|
QueryResultsFormat::Tsv, |
||||||
|
] { |
||||||
|
let results = vec![ |
||||||
|
QueryResults::Boolean(true), |
||||||
|
QueryResults::Boolean(false), |
||||||
|
QueryResults::Solutions(QuerySolutionIter::new( |
||||||
|
[ |
||||||
|
Variable::new_unchecked("foo"), |
||||||
|
Variable::new_unchecked("bar"), |
||||||
|
] |
||||||
|
.as_ref() |
||||||
|
.into(), |
||||||
|
Box::new( |
||||||
|
vec![ |
||||||
|
Ok(vec![None, None]), |
||||||
|
Ok(vec![ |
||||||
|
Some(NamedNode::new_unchecked("http://example.com").into()), |
||||||
|
None, |
||||||
|
]), |
||||||
|
Ok(vec![ |
||||||
|
None, |
||||||
|
Some(NamedNode::new_unchecked("http://example.com").into()), |
||||||
|
]), |
||||||
|
Ok(vec![ |
||||||
|
Some(BlankNode::new_unchecked("foo").into()), |
||||||
|
Some(BlankNode::new_unchecked("bar").into()), |
||||||
|
]), |
||||||
|
Ok(vec![Some(Literal::new_simple_literal("foo").into()), None]), |
||||||
|
Ok(vec![ |
||||||
|
Some( |
||||||
|
Literal::new_language_tagged_literal_unchecked("foo", "fr") |
||||||
|
.into(), |
||||||
|
), |
||||||
|
None, |
||||||
|
]), |
||||||
|
Ok(vec![ |
||||||
|
Some(Literal::from(1).into()), |
||||||
|
Some(Literal::from(true).into()), |
||||||
|
]), |
||||||
|
Ok(vec![ |
||||||
|
Some(Literal::from(1.33).into()), |
||||||
|
Some(Literal::from(false).into()), |
||||||
|
]), |
||||||
|
Ok(vec![ |
||||||
|
Some( |
||||||
|
Triple::new( |
||||||
|
NamedNode::new_unchecked("http://example.com/s"), |
||||||
|
NamedNode::new_unchecked("http://example.com/p"), |
||||||
|
Triple::new( |
||||||
|
NamedNode::new_unchecked("http://example.com/os"), |
||||||
|
NamedNode::new_unchecked("http://example.com/op"), |
||||||
|
NamedNode::new_unchecked("http://example.com/oo"), |
||||||
|
), |
||||||
|
) |
||||||
|
.into(), |
||||||
|
), |
||||||
|
None, |
||||||
|
]), |
||||||
|
] |
||||||
|
.into_iter(), |
||||||
|
), |
||||||
|
)), |
||||||
|
]; |
||||||
|
|
||||||
|
for ex in results { |
||||||
|
let mut buffer = Vec::new(); |
||||||
|
ex.write(&mut buffer, format)?; |
||||||
|
let ex2 = QueryResults::read(Cursor::new(buffer.clone()), format)?; |
||||||
|
let mut buffer2 = Vec::new(); |
||||||
|
ex2.write(&mut buffer2, format)?; |
||||||
|
assert_eq!( |
||||||
|
str::from_utf8(&buffer).unwrap(), |
||||||
|
str::from_utf8(&buffer2).unwrap() |
||||||
|
); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
Ok(()) |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,44 @@ |
|||||||
|
//! Utilities to read and write RDF results formats using [sparesults](https://crates.io/crates/sparesults).
|
||||||
|
//!
|
||||||
|
//! It supports [SPARQL Query Results XML Format (Second Edition)](https://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/).
|
||||||
|
//!
|
||||||
|
//! Usage example converting a JSON result file into a TSV result file:
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! use oxigraph::sparql::results::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader, QueryResultsSerializer};
|
||||||
|
//! use std::io::Result;
|
||||||
|
//!
|
||||||
|
//! fn convert_json_to_tsv(json_file: &[u8]) -> Result<Vec<u8>> {
|
||||||
|
//! let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json);
|
||||||
|
//! let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv);
|
||||||
|
//! // We start to read the JSON file and see which kind of results it is
|
||||||
|
//! match json_parser.parse_read(json_file)? {
|
||||||
|
//! FromReadQueryResultsReader::Boolean(value) => {
|
||||||
|
//! // it's a boolean result, we copy it in TSV to the output buffer
|
||||||
|
//! tsv_serializer.serialize_boolean_to_write(Vec::new(), value)
|
||||||
|
//! }
|
||||||
|
//! FromReadQueryResultsReader::Solutions(solutions_reader) => {
|
||||||
|
//! // it's a set of solutions, we create a writer and we write to it while reading in streaming from the JSON file
|
||||||
|
//! let mut serialize_solutions_to_write = tsv_serializer.serialize_solutions_to_write(Vec::new(), solutions_reader.variables().to_vec())?;
|
||||||
|
//! for solution in solutions_reader {
|
||||||
|
//! serialize_solutions_to_write.write(&solution?)?;
|
||||||
|
//! }
|
||||||
|
//! serialize_solutions_to_write.finish()
|
||||||
|
//! }
|
||||||
|
//! }
|
||||||
|
//! }
|
||||||
|
//!
|
||||||
|
//! // Let's test with a boolean
|
||||||
|
//! assert_eq!(
|
||||||
|
//! convert_json_to_tsv(br#"{"boolean":true}"#.as_slice()).unwrap(),
|
||||||
|
//! b"true"
|
||||||
|
//! );
|
||||||
|
//!
|
||||||
|
//! // And with a set of solutions
|
||||||
|
//! assert_eq!(
|
||||||
|
//! convert_json_to_tsv(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice()).unwrap(),
|
||||||
|
//! b"?foo\t?bar\n\"test\"\t\n"
|
||||||
|
//! );
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
pub use sparesults::*; |
@ -0,0 +1,12 @@ |
|||||||
|
//! A storage backend
|
||||||
|
//! RocksDB is available, if not in memory
|
||||||
|
|
||||||
|
#[cfg(any(target_family = "wasm"))] |
||||||
|
pub use fallback::{ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, Transaction}; |
||||||
|
#[cfg(all(not(target_family = "wasm")))] |
||||||
|
pub use oxi_rocksdb::{ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, Transaction}; |
||||||
|
|
||||||
|
#[cfg(any(target_family = "wasm"))] |
||||||
|
mod fallback; |
||||||
|
#[cfg(all(not(target_family = "wasm")))] |
||||||
|
mod oxi_rocksdb; |
@ -0,0 +1,139 @@ |
|||||||
|
use crate::io::{RdfFormat, RdfParseError}; |
||||||
|
use crate::storage::numeric_encoder::EncodedTerm; |
||||||
|
use oxiri::IriParseError; |
||||||
|
use oxrdf::TermRef; |
||||||
|
use std::error::Error; |
||||||
|
use std::io; |
||||||
|
|
||||||
|
/// An error related to storage operations (reads, writes...).
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
#[non_exhaustive] |
||||||
|
pub enum StorageError { |
||||||
|
/// Error from the OS I/O layer.
|
||||||
|
#[error(transparent)] |
||||||
|
Io(#[from] io::Error), |
||||||
|
/// Error related to data corruption.
|
||||||
|
#[error(transparent)] |
||||||
|
Corruption(#[from] CorruptionError), |
||||||
|
#[doc(hidden)] |
||||||
|
#[error("{0}")] |
||||||
|
Other(#[source] Box<dyn Error + Send + Sync + 'static>), |
||||||
|
} |
||||||
|
|
||||||
|
impl From<StorageError> for io::Error { |
||||||
|
#[inline] |
||||||
|
fn from(error: StorageError) -> Self { |
||||||
|
match error { |
||||||
|
StorageError::Io(error) => error, |
||||||
|
StorageError::Corruption(error) => error.into(), |
||||||
|
StorageError::Other(error) => Self::new(io::ErrorKind::Other, error), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An error return if some content in the database is corrupted.
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
#[error(transparent)] |
||||||
|
pub struct CorruptionError(#[from] CorruptionErrorKind); |
||||||
|
|
||||||
|
/// An error return if some content in the database is corrupted.
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
enum CorruptionErrorKind { |
||||||
|
#[error("{0}")] |
||||||
|
Msg(String), |
||||||
|
#[error("{0}")] |
||||||
|
Other(#[source] Box<dyn Error + Send + Sync + 'static>), |
||||||
|
} |
||||||
|
|
||||||
|
impl CorruptionError { |
||||||
|
/// Builds an error from a printable error message.
|
||||||
|
#[inline] |
||||||
|
pub(crate) fn new(error: impl Into<Box<dyn Error + Send + Sync + 'static>>) -> Self { |
||||||
|
Self(CorruptionErrorKind::Other(error.into())) |
||||||
|
} |
||||||
|
|
||||||
|
#[inline] |
||||||
|
pub(crate) fn from_encoded_term(encoded: &EncodedTerm, term: &TermRef<'_>) -> Self { |
||||||
|
// TODO: eventually use a dedicated error enum value
|
||||||
|
Self::msg(format!("Invalid term encoding {encoded:?} for {term}")) |
||||||
|
} |
||||||
|
|
||||||
|
#[inline] |
||||||
|
pub(crate) fn from_missing_column_family_name(name: &'static str) -> Self { |
||||||
|
// TODO: eventually use a dedicated error enum value
|
||||||
|
Self::msg(format!("Column family {name} does not exist")) |
||||||
|
} |
||||||
|
|
||||||
|
/// Builds an error from a printable error message.
|
||||||
|
#[inline] |
||||||
|
pub(crate) fn msg(msg: impl Into<String>) -> Self { |
||||||
|
Self(CorruptionErrorKind::Msg(msg.into())) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl From<CorruptionError> for io::Error { |
||||||
|
#[inline] |
||||||
|
fn from(error: CorruptionError) -> Self { |
||||||
|
Self::new(io::ErrorKind::InvalidData, error) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An error raised while loading a file into a [`Store`](crate::store::Store).
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
pub enum LoaderError { |
||||||
|
/// An error raised while reading the file.
|
||||||
|
#[error(transparent)] |
||||||
|
Parsing(#[from] RdfParseError), |
||||||
|
/// An error raised during the insertion in the store.
|
||||||
|
#[error(transparent)] |
||||||
|
Storage(#[from] StorageError), |
||||||
|
/// The base IRI is invalid.
|
||||||
|
#[error("Invalid base IRI '{iri}': {error}")] |
||||||
|
InvalidBaseIri { |
||||||
|
/// The IRI itself.
|
||||||
|
iri: String, |
||||||
|
/// The parsing error.
|
||||||
|
#[source] |
||||||
|
error: IriParseError, |
||||||
|
}, |
||||||
|
} |
||||||
|
|
||||||
|
impl From<LoaderError> for io::Error { |
||||||
|
#[inline] |
||||||
|
fn from(error: LoaderError) -> Self { |
||||||
|
match error { |
||||||
|
LoaderError::Storage(error) => error.into(), |
||||||
|
LoaderError::Parsing(error) => error.into(), |
||||||
|
LoaderError::InvalidBaseIri { .. } => { |
||||||
|
Self::new(io::ErrorKind::InvalidInput, error.to_string()) |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An error raised while writing a file from a [`Store`](crate::store::Store).
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
pub enum SerializerError { |
||||||
|
/// An error raised while writing the content.
|
||||||
|
#[error(transparent)] |
||||||
|
Io(#[from] io::Error), |
||||||
|
/// An error raised during the lookup in the store.
|
||||||
|
#[error(transparent)] |
||||||
|
Storage(#[from] StorageError), |
||||||
|
/// A format compatible with [RDF dataset](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset) is required.
|
||||||
|
#[error("A RDF format supporting datasets was expected, {0} found")] |
||||||
|
DatasetFormatExpected(RdfFormat), |
||||||
|
} |
||||||
|
|
||||||
|
impl From<SerializerError> for io::Error { |
||||||
|
#[inline] |
||||||
|
fn from(error: SerializerError) -> Self { |
||||||
|
match error { |
||||||
|
SerializerError::Storage(error) => error.into(), |
||||||
|
SerializerError::Io(error) => error, |
||||||
|
SerializerError::DatasetFormatExpected(_) => { |
||||||
|
Self::new(io::ErrorKind::InvalidInput, error.to_string()) |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
File diff suppressed because it is too large
Load Diff
@ -1,28 +1,33 @@ |
|||||||
[package] |
[package] |
||||||
name = "oxrdf" |
name = "oxrdf" |
||||||
version = "0.1.7" |
version = "0.2.0-alpha.4" |
||||||
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
authors.workspace = true |
||||||
license = "MIT OR Apache-2.0" |
license.workspace = true |
||||||
readme = "README.md" |
readme = "README.md" |
||||||
keywords = ["RDF"] |
keywords = ["RDF"] |
||||||
repository = "https://github.com/oxigraph/oxigraph/tree/main/lib/oxrdf" |
repository = "https://github.com/oxigraph/oxigraph/tree/main/lib/oxrdf" |
||||||
homepage = "https://oxigraph.org/" |
|
||||||
description = """ |
description = """ |
||||||
A library providing basic data structures related to RDF |
A library providing basic data structures related to RDF |
||||||
""" |
""" |
||||||
documentation = "https://docs.rs/oxrdf" |
documentation = "https://docs.rs/oxrdf" |
||||||
edition = "2021" |
edition.workspace = true |
||||||
rust-version = "1.60" |
rust-version.workspace = true |
||||||
|
|
||||||
[features] |
[features] |
||||||
default = [] |
default = [] |
||||||
rdf-star = [] |
rdf-star = [] |
||||||
|
|
||||||
[dependencies] |
[dependencies] |
||||||
rand = "0.8" |
oxilangtag.workspace = true |
||||||
oxilangtag = "0.1" |
oxiri.workspace = true |
||||||
oxiri = "0.2" |
oxsdatatypes = { workspace = true, optional = true } |
||||||
oxsdatatypes = { version = "0.1.3", path="../oxsdatatypes", optional = true } |
rand.workspace = true |
||||||
|
thiserror.workspace = true |
||||||
|
serde.workspace = true |
||||||
|
|
||||||
|
[lints] |
||||||
|
workspace = true |
||||||
|
|
||||||
[package.metadata.docs.rs] |
[package.metadata.docs.rs] |
||||||
all-features = true |
all-features = true |
||||||
|
rustdoc-args = ["--cfg", "docsrs"] |
||||||
|
@ -0,0 +1,36 @@ |
|||||||
|
[package] |
||||||
|
name = "oxrdfio" |
||||||
|
version = "0.1.0-alpha.5" |
||||||
|
authors.workspace = true |
||||||
|
license.workspace = true |
||||||
|
readme = "README.md" |
||||||
|
keywords = ["RDF"] |
||||||
|
repository = "https://github.com/oxigraph/oxigraph/tree/master/lib/oxrdfxml" |
||||||
|
documentation = "https://docs.rs/oxrdfio" |
||||||
|
description = """ |
||||||
|
Parser and serializer for various RDF formats |
||||||
|
""" |
||||||
|
edition.workspace = true |
||||||
|
rust-version.workspace = true |
||||||
|
|
||||||
|
[features] |
||||||
|
default = [] |
||||||
|
async-tokio = ["dep:tokio", "oxrdfxml/async-tokio", "oxttl/async-tokio"] |
||||||
|
rdf-star = ["oxrdf/rdf-star", "oxttl/rdf-star"] |
||||||
|
|
||||||
|
[dependencies] |
||||||
|
oxrdf.workspace = true |
||||||
|
oxrdfxml.workspace = true |
||||||
|
oxttl.workspace = true |
||||||
|
thiserror.workspace = true |
||||||
|
tokio = { workspace = true, optional = true, features = ["io-util"] } |
||||||
|
|
||||||
|
[dev-dependencies] |
||||||
|
tokio = { workspace = true, features = ["rt", "macros"] } |
||||||
|
|
||||||
|
[lints] |
||||||
|
workspace = true |
||||||
|
|
||||||
|
[package.metadata.docs.rs] |
||||||
|
all-features = true |
||||||
|
rustdoc-args = ["--cfg", "docsrs"] |
@ -0,0 +1,67 @@ |
|||||||
|
OxRDF I/O |
||||||
|
========= |
||||||
|
|
||||||
|
[![Latest Version](https://img.shields.io/crates/v/oxrdfio.svg)](https://crates.io/crates/oxrdfio) |
||||||
|
[![Released API docs](https://docs.rs/oxrdfio/badge.svg)](https://docs.rs/oxrdfio) |
||||||
|
[![Crates.io downloads](https://img.shields.io/crates/d/oxrdfio)](https://crates.io/crates/oxrdfio) |
||||||
|
[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) |
||||||
|
[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) |
||||||
|
|
||||||
|
OxRDF I/O is a set of parsers and serializers for RDF. |
||||||
|
|
||||||
|
It supports: |
||||||
|
* [N3](https://w3c.github.io/N3/spec/) using [`oxttl`](https://crates.io/crates/oxttl) |
||||||
|
* [N-Quads](https://www.w3.org/TR/n-quads/) using [`oxttl`](https://crates.io/crates/oxttl) |
||||||
|
* [N-Triples](https://www.w3.org/TR/n-triples/) using [`oxttl`](https://crates.io/crates/oxttl) |
||||||
|
* [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) using [`oxrdfxml`](https://crates.io/crates/oxrdfxml) |
||||||
|
* [TriG](https://www.w3.org/TR/trig/) using [`oxttl`](https://crates.io/crates/oxttl) |
||||||
|
* [Turtle](https://www.w3.org/TR/turtle/) using [`oxttl`](https://crates.io/crates/oxttl) |
||||||
|
|
||||||
|
Support for [SPARQL-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html) is also available behind the `rdf-star`feature for [Turtle-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#turtle-star), [TriG-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#trig-star), [N-Triples-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#n-triples-star) and [N-Quads-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html#n-quads-star). |
||||||
|
|
||||||
|
It is designed as a low level parser compatible with both synchronous and asynchronous I/O (behind the `async-tokio` feature). |
||||||
|
|
||||||
|
The entry points of this library are the two [`RdfParser`] and [`RdfSerializer`] structs. |
||||||
|
|
||||||
|
Usage example converting a Turtle file to a N-Triples file: |
||||||
|
```rust |
||||||
|
use oxrdfio::{RdfFormat, RdfParser, RdfSerializer}; |
||||||
|
|
||||||
|
let turtle_file = b"@base <http://example.com/> . |
||||||
|
@prefix schema: <http://schema.org/> . |
||||||
|
<foo> a schema:Person ; |
||||||
|
schema:name \"Foo\" . |
||||||
|
<bar> a schema:Person ; |
||||||
|
schema:name \"Bar\" ."; |
||||||
|
|
||||||
|
let ntriples_file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . |
||||||
|
<http://example.com/foo> <http://schema.org/name> \"Foo\" . |
||||||
|
<http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . |
||||||
|
<http://example.com/bar> <http://schema.org/name> \"Bar\" . |
||||||
|
"; |
||||||
|
|
||||||
|
let mut writer = RdfSerializer::from_format(RdfFormat::NTriples).serialize_to_write(Vec::new()); |
||||||
|
for quad in RdfParser::from_format(RdfFormat::Turtle).parse_read(turtle_file.as_ref()) { |
||||||
|
writer.write_quad(&quad.unwrap()).unwrap(); |
||||||
|
} |
||||||
|
assert_eq!(writer.finish().unwrap(), ntriples_file); |
||||||
|
``` |
||||||
|
|
||||||
|
Parsers for other RDF formats exists in Rust like [graph-rdfa-processor](https://github.com/nbittich/graph-rdfa-processor) for RDFa and [json-ld](https://github.com/timothee-haudebourg/json-ld) for JSON-LD. |
||||||
|
|
||||||
|
|
||||||
|
## License |
||||||
|
|
||||||
|
This project is licensed under either of |
||||||
|
|
||||||
|
* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or |
||||||
|
`<http://www.apache.org/licenses/LICENSE-2.0>`) |
||||||
|
* MIT license ([LICENSE-MIT](../LICENSE-MIT) or |
||||||
|
`<http://opensource.org/licenses/MIT>`) |
||||||
|
|
||||||
|
at your option. |
||||||
|
|
||||||
|
|
||||||
|
### Contribution |
||||||
|
|
||||||
|
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue