Compare commits
552 Commits
Author | SHA1 | Date |
---|---|---|
Niko PLP | c7f873f904 | 8 months ago |
Niko PLP | 7b0b60cda0 | 8 months ago |
Niko PLP | a4e2847810 | 8 months ago |
Niko PLP | 41e2e7206e | 8 months ago |
Niko PLP | aca204d9e5 | 8 months ago |
Niko PLP | b3ae51da82 | 9 months ago |
Niko PLP | c2d31daa1c | 9 months ago |
Niko PLP | f3ae4d8074 | 9 months ago |
Niko PLP | 53834396aa | 9 months ago |
Niko PLP | a9ff0524e2 | 9 months ago |
Niko PLP | 0d82c473f7 | 9 months ago |
Niko PLP | 6226e1fba6 | 9 months ago |
Niko PLP | fdcaf65a8d | 9 months ago |
Niko PLP | 77edc05ced | 9 months ago |
Tpt | 427d675c9b | 9 months ago |
Tpt | 1a40ab2017 | 9 months ago |
Tpt | cbb72c7be6 | 10 months ago |
Tpt | 58699f36f3 | 10 months ago |
Tpt | 83aa8170ea | 10 months ago |
Tpt | 1d5843fddc | 10 months ago |
Tpt | 0f0c1d2742 | 10 months ago |
Tpt | 184b8367dc | 10 months ago |
Tpt | ba396bad10 | 10 months ago |
Tpt | 75695dcdf7 | 10 months ago |
Tpt | 4c27b43e41 | 10 months ago |
Tpt | dcfcdd359c | 10 months ago |
Tpt | f7023a381e | 10 months ago |
Tpt | 2998f795fd | 10 months ago |
Tpt | 4705d75893 | 10 months ago |
Tpt | 9b9cf9cbcb | 10 months ago |
Tpt | 583d64e0c0 | 10 months ago |
Tpt | a0cc75b7cc | 10 months ago |
Tpt | be44451679 | 10 months ago |
Tpt | 43ef3e9e8a | 10 months ago |
Tpt | 0ac70e73dc | 10 months ago |
Tpt | f5b975e4d1 | 10 months ago |
Tpt | bd5e54a00a | 10 months ago |
Tpt | c57615519b | 10 months ago |
Tpt | 130f090555 | 10 months ago |
Tpt | bdde46b5c7 | 10 months ago |
Tpt | 2b656df6ee | 10 months ago |
Tpt | 8e2548467c | 10 months ago |
Tpt | fee7bf0d8a | 10 months ago |
Tpt | e0087c56b3 | 10 months ago |
Tpt | 0b5790a18f | 11 months ago |
Tpt | f7d132f317 | 11 months ago |
Jesse Wright | d361e1d283 | 11 months ago |
Tpt | 1424181379 | 11 months ago |
Tpt | 01d73fa62d | 11 months ago |
Tpt | accadaac34 | 11 months ago |
Tpt | 7d45ea43f5 | 11 months ago |
Tpt | c13cb8db7c | 11 months ago |
Tpt | 6c7514d058 | 11 months ago |
Tpt | e48b268fc5 | 11 months ago |
Tpt | c277804026 | 11 months ago |
Tpt | efae84b5f8 | 11 months ago |
Tpt | 269c73a7c2 | 11 months ago |
Yuri Astrakhan | ea300e9081 | 11 months ago |
Yuri Astrakhan | a078b12508 | 11 months ago |
Yuri Astrakhan | 0400f04915 | 11 months ago |
Tpt | 655ecd3e91 | 11 months ago |
Yuri Astrakhan | 1c3f054836 | 11 months ago |
Yuri Astrakhan | f5de5d3e98 | 11 months ago |
Yuri Astrakhan | 089875ad21 | 11 months ago |
Tpt | 9e3758e2c9 | 11 months ago |
Tpt | be26d210f1 | 11 months ago |
Yuri Astrakhan | a924df0e0a | 11 months ago |
Tpt | 0b1aabfcdd | 11 months ago |
Tpt | 70a4ff231b | 11 months ago |
Tpt | d49fb47767 | 11 months ago |
Yuri Astrakhan | c15233e964 | 11 months ago |
Yuri Astrakhan | 1e37577b71 | 11 months ago |
Yuri Astrakhan | 1e4326a2c5 | 11 months ago |
Tpt | c0d245871c | 12 months ago |
Tpt | 2b6ac5c195 | 12 months ago |
Tpt | ec030fb652 | 12 months ago |
Tpt | 2a81106c34 | 12 months ago |
Tpt | 46d3ed3f99 | 12 months ago |
Tpt | ef765666be | 12 months ago |
Tpt | 0a7cea5e25 | 12 months ago |
Yuri Astrakhan | 18bf383701 | 12 months ago |
Tpt | 54489aacfb | 12 months ago |
Yuri Astrakhan | 6494ba6e31 | 12 months ago |
Yuri Astrakhan | 185d83838c | 12 months ago |
Tpt | d838d55f02 | 12 months ago |
etiennept | f354bc7546 | 12 months ago |
Yuri Astrakhan | a976eb3efc | 12 months ago |
Yuri Astrakhan | 522bda2906 | 12 months ago |
Yuri Astrakhan | 5be6f55155 | 12 months ago |
Yuri Astrakhan | 405b95b4bd | 12 months ago |
Yuri Astrakhan | 5f603bc4fe | 12 months ago |
Yuri Astrakhan | 2b8df24b8b | 12 months ago |
Yuri Astrakhan | 51941c0dc5 | 12 months ago |
Yuri Astrakhan | d4bfcd3b24 | 12 months ago |
Tpt | df040400c5 | 1 year ago |
Tpt | b08c201074 | 1 year ago |
Tpt | c2040a30fd | 1 year ago |
Tpt | c2df0b829d | 1 year ago |
Tpt | cffc536eb9 | 1 year ago |
Tpt | 5cf8025aec | 1 year ago |
Tpt | d4eaa3c5ef | 1 year ago |
Tpt | dcabf50ab6 | 1 year ago |
Tpt | 0d23f4ae48 | 1 year ago |
Tpt | d1da94b08b | 1 year ago |
Tpt | f01796b1a4 | 1 year ago |
Tpt | 93eab63868 | 1 year ago |
Tpt | 42a66f62b9 | 1 year ago |
Tpt | f2a2bd5b5d | 1 year ago |
Tpt | 391e8d7662 | 1 year ago |
Tpt | a5781d1187 | 1 year ago |
Tpt | a84b898fda | 1 year ago |
Tpt | d170b53609 | 1 year ago |
Tpt | 790501e1b3 | 1 year ago |
Tpt | bdf5d593ee | 1 year ago |
Tpt | 1761672b41 | 1 year ago |
Tpt | bde73e5d72 | 1 year ago |
Tpt | 4c79e7ee78 | 1 year ago |
Tpt | 5cc3e37876 | 1 year ago |
Tpt | 8104f9e1de | 1 year ago |
Tpt | ed94f56ab4 | 1 year ago |
Tpt | 025bd2afd2 | 1 year ago |
Tpt | 4756217787 | 1 year ago |
Tpt | 2e9ac3cc1a | 1 year ago |
Tpt | 604d1bbe2e | 1 year ago |
Tpt | 4084acb9b8 | 1 year ago |
Tpt | 2a135283d5 | 1 year ago |
etiennept | 735db897ff | 1 year ago |
Tpt | 4b3f3f3278 | 1 year ago |
Tpt | 3241f47059 | 1 year ago |
Tpt | 4841f89072 | 1 year ago |
Tpt | efd5eec65d | 1 year ago |
Tpt | 899e553249 | 1 year ago |
Tpt | 03afe5c6c6 | 1 year ago |
Tpt | d88c2e0a8a | 1 year ago |
Tpt | 9979a3d503 | 1 year ago |
Dan Brickley | 389d993dc4 | 1 year ago |
Tpt | 4f404ab650 | 1 year ago |
Tpt | f445166942 | 1 year ago |
Tpt | e1ff1d919c | 1 year ago |
Tpt | 31733beda8 | 1 year ago |
Tpt | aa82fc8157 | 1 year ago |
Tpt | 6d1d752e01 | 1 year ago |
Tpt | 756c5394d0 | 1 year ago |
Tpt | d1cb4cecbd | 1 year ago |
Tpt | 99c3a4cce4 | 1 year ago |
Tpt | 48174cac12 | 1 year ago |
Tpt | 9af2717502 | 1 year ago |
Tpt | a9fee4f6b8 | 1 year ago |
Tpt | f8034c68e9 | 1 year ago |
Tpt | 98caee8f92 | 1 year ago |
Tpt | ddf589ea14 | 1 year ago |
Tpt | d19947414e | 1 year ago |
Tpt | cc41448b18 | 1 year ago |
Tpt | e6d98445e6 | 1 year ago |
Tpt | 261f9c64a5 | 1 year ago |
Tpt | 48db7f872b | 1 year ago |
Tpt | 8a7c6cf2c1 | 1 year ago |
Tpt | ab5f5c1c60 | 1 year ago |
Tpt | a2a6c5a41e | 1 year ago |
Tpt | a259879ef1 | 1 year ago |
Tpt | ea80c11d6e | 1 year ago |
Tpt | 1dfad23e4b | 1 year ago |
Tpt | 5647624012 | 1 year ago |
dependabot[bot] | e87bff6e6e | 1 year ago |
Tpt | b1e33293a5 | 1 year ago |
Tpt | ef429e6d1b | 1 year ago |
Tpt | b0a01e65fa | 1 year ago |
Tpt | 517df6d59e | 1 year ago |
Tpt | 832a4ba27d | 1 year ago |
Vilnis Termanis | c1b57f460b | 1 year ago |
Tpt | 8d348b2a6f | 1 year ago |
Tpt | b777d0110a | 1 year ago |
Tpt | 64f45cd11b | 1 year ago |
Tpt | 70b1c52166 | 1 year ago |
Tpt | 38844f6436 | 1 year ago |
Tpt | d280f7d2f7 | 1 year ago |
Tpt | dbb39d867a | 1 year ago |
Tpt | 5e3a2fc89d | 1 year ago |
Tpt | c5f02d9263 | 1 year ago |
Tpt | 90b7b128f2 | 1 year ago |
Tpt | 9b985295ae | 1 year ago |
Tpt | 108721624f | 1 year ago |
Tpt | 67fd726f9d | 1 year ago |
Tpt | 412ca37b3c | 1 year ago |
Tpt | 7a3e07d98d | 1 year ago |
Tpt | 6a21cb0625 | 1 year ago |
Tpt | 0783d1dcda | 1 year ago |
Tpt | 8ee30cf001 | 1 year ago |
Tpt | 8c8ca54596 | 1 year ago |
Tpt | 7c4578f5f5 | 1 year ago |
Tpt | 4c97637e4b | 1 year ago |
Tpt | a6f32390df | 1 year ago |
Tpt | 180ae22293 | 1 year ago |
Tpt | a8f98a0056 | 1 year ago |
Tpt | 1b511ed018 | 1 year ago |
Tpt | 87d2006b6e | 1 year ago |
Tpt | be074000cc | 1 year ago |
Tpt | 3c51dd31bc | 1 year ago |
Tpt | 555f6b8d7c | 1 year ago |
Tpt | bdedcc47e3 | 1 year ago |
Tpt | 99abe69ba1 | 1 year ago |
Tpt | be002dd51e | 1 year ago |
Tpt | 6edfb7a2f4 | 1 year ago |
Tpt | 13c3515d7b | 1 year ago |
Tpt | 8193cac86d | 1 year ago |
Tpt | b1c90b599b | 1 year ago |
Tpt | 1d55635fe2 | 1 year ago |
Tpt | 1eaa77ad93 | 1 year ago |
Tpt | 7fe055d2b4 | 1 year ago |
Tpt | 9da26c6f95 | 1 year ago |
Tpt | f10e5a40a3 | 1 year ago |
Tpt | 024bc7b8e8 | 1 year ago |
Tpt | 6611b491b1 | 1 year ago |
Tpt | 666a00cfab | 1 year ago |
Tpt | c9ec5f7c0c | 1 year ago |
Tpt | d44f9bee7a | 1 year ago |
Tpt | 570f21748d | 1 year ago |
Tpt | d2306cea52 | 1 year ago |
Tpt | 9e76323e2b | 1 year ago |
Tpt | 872111ab88 | 1 year ago |
Tpt | 3de3f9c4bc | 1 year ago |
Tpt | 010196c974 | 1 year ago |
Tpt | bbf184f7ae | 1 year ago |
Tpt | 4568ae4209 | 1 year ago |
Tpt | 788450932a | 1 year ago |
Tpt | f586cc048f | 1 year ago |
Tpt | 88e49f6c66 | 1 year ago |
Tpt | 807cf0d436 | 1 year ago |
Tpt | 5fee36e587 | 1 year ago |
Tpt | c6e55c706a | 1 year ago |
Tpt | 7c227830e9 | 1 year ago |
Tpt | f878463828 | 1 year ago |
Tpt | bb7379addb | 1 year ago |
Tpt | 382aa2e01f | 1 year ago |
Tpt | 3bb05e2af2 | 1 year ago |
Tpt | 4a798ed3ea | 1 year ago |
Tpt | f183196859 | 1 year ago |
Tpt | 217abaf7ee | 1 year ago |
Tpt | 7cd383af79 | 1 year ago |
Tpt | 73af297b4c | 1 year ago |
Tpt | b06d6506cb | 1 year ago |
Tpt | 12a738279f | 1 year ago |
Tpt | 4cb377bda4 | 1 year ago |
Tpt | afaabf6110 | 1 year ago |
Tpt | 4f7445104a | 1 year ago |
Tpt | 3adf33d2f4 | 1 year ago |
Tpt | 922023b1da | 1 year ago |
Tpt | 077c1fc1a8 | 1 year ago |
Tpt | b22e74379a | 1 year ago |
Tpt | 1e1ed65d3b | 2 years ago |
Tpt | 8a398db20e | 2 years ago |
Tpt | 00f179058e | 2 years ago |
Thomas | 8e770fbb5d | 2 years ago |
Thomas | c31ba0e823 | 2 years ago |
Tpt | cdabe52847 | 2 years ago |
Thomas | 501f9ce6f9 | 2 years ago |
Thomas | 24a1dd2556 | 2 years ago |
Tpt | c8e718ed2d | 2 years ago |
Tpt | db7fab0f20 | 2 years ago |
Tpt | f6c8358b24 | 2 years ago |
Tpt | 69d8ce6b4e | 2 years ago |
Thomas | 94986a0d28 | 2 years ago |
Thomas | b69e0d38f6 | 2 years ago |
Tpt | 98ac089984 | 2 years ago |
Tpt | 001b6e07b7 | 2 years ago |
Tpt | 86f14ce96f | 2 years ago |
Tpt | cb9922379c | 2 years ago |
Dan Yamamoto | 5085a60a87 | 2 years ago |
Dan Yamamoto | 43e6ce87f8 | 2 years ago |
Tpt | 71b1768d28 | 2 years ago |
Tpt | a1cbfdf67d | 2 years ago |
Tpt | 6cc7488905 | 2 years ago |
Tpt | a27f31b84e | 2 years ago |
Tpt | 785df9b00b | 2 years ago |
Tpt | 76deca135c | 2 years ago |
Tpt | 2281575c14 | 2 years ago |
Tpt | 5af06e926a | 2 years ago |
Tpt | 01caaa5d70 | 2 years ago |
Tpt | 81895cb6bc | 2 years ago |
Tpt | 40b10cdabc | 2 years ago |
Tpt | 7c0563cb1b | 2 years ago |
Tpt | a8abf26913 | 2 years ago |
Tpt | c016116b09 | 2 years ago |
Tpt | ae294683d6 | 2 years ago |
Tpt | ab17138f33 | 2 years ago |
Tpt | 8e76341bb2 | 2 years ago |
Tpt | f47306a4c5 | 2 years ago |
Tpt | acf83d4a31 | 2 years ago |
Tpt | f23ef514e4 | 2 years ago |
Tpt | 9a4f726aa4 | 2 years ago |
Tpt | 99186c1e7d | 2 years ago |
Tpt | 7a1cce527d | 2 years ago |
Tpt | 1c1531f640 | 2 years ago |
Tpt | 03f7641355 | 2 years ago |
Tpt | 8c68cf4041 | 2 years ago |
Tpt | 1f89bef860 | 2 years ago |
Tpt | ac61adc9c2 | 2 years ago |
Tpt | edec370f0a | 2 years ago |
Tpt | 5f2c9a3b92 | 2 years ago |
Tpt | 8c62137a01 | 2 years ago |
Tpt | f72a9600ae | 2 years ago |
Tpt | 57d39cad24 | 2 years ago |
Tpt | a7758484a5 | 2 years ago |
Tpt | 5d253c6afb | 2 years ago |
Tpt | 3e51020222 | 2 years ago |
Tpt | adda2d2d7e | 2 years ago |
Tpt | 22f990344f | 2 years ago |
Tpt | 4cc9e4008b | 2 years ago |
Tpt | 0a064a8704 | 2 years ago |
Thomas | d2804d8a8d | 2 years ago |
Tpt | d500614fcc | 2 years ago |
Tpt | 2650c5ed13 | 2 years ago |
Tpt | 7b9e9f9694 | 2 years ago |
Tpt | d992fb7545 | 2 years ago |
Tpt | b2d625e10e | 2 years ago |
Tpt | 63945638ea | 2 years ago |
Tpt | 5bfbbdbd3f | 2 years ago |
Tpt | eb40457d5c | 2 years ago |
Tpt | d24461fc42 | 2 years ago |
Tpt | 8bec2e2ff9 | 2 years ago |
Tpt | 9a6233b511 | 2 years ago |
Tpt | d26731432c | 2 years ago |
Tpt | 38af275451 | 2 years ago |
Tpt | cb89166380 | 2 years ago |
Tpt | 5ce24dda01 | 2 years ago |
Tpt | 05fbb0e071 | 2 years ago |
Thomas Tanon | 8f3af5a7fc | 2 years ago |
dependabot[bot] | a25bf55919 | 2 years ago |
Tpt | f9d7b93abf | 2 years ago |
Tpt | e96672a2a8 | 2 years ago |
Tpt | cfe52db3a3 | 2 years ago |
Tpt | 7175784356 | 2 years ago |
Tpt | a2d8bcaaa3 | 2 years ago |
Tpt | f520de8893 | 2 years ago |
Tpt | 8e3ee3b6dd | 2 years ago |
Yaroslav Halchenko | 6f37c4c9c9 | 2 years ago |
Yaroslav Halchenko | aeeabf5d1c | 2 years ago |
Yaroslav Halchenko | 029fbf470e | 2 years ago |
Yaroslav Halchenko | a3294a8abd | 2 years ago |
Yaroslav Halchenko | 80ce67e6dd | 2 years ago |
Yaroslav Halchenko | 38357dd9b5 | 2 years ago |
Thomas | 704440538d | 2 years ago |
Yaroslav Halchenko | bbe9bd0303 | 2 years ago |
Tpt | cf03da0fab | 2 years ago |
Tpt | b8c5628e3b | 2 years ago |
Tpt | 79c5e3918e | 2 years ago |
Tpt | b630ab4185 | 2 years ago |
Tpt | 3e0f6b5405 | 2 years ago |
Maximilian Goisser | 1d02098b70 | 2 years ago |
Tpt | c3cf8e2002 | 2 years ago |
Tpt | c6b8c754ee | 2 years ago |
Tpt | d653e0645b | 2 years ago |
Tpt | 56e105bc04 | 2 years ago |
Tpt | d587d3b2bb | 2 years ago |
Tpt | 284e79521d | 2 years ago |
Tpt | 86bbebf93c | 2 years ago |
Tpt | feeaf17fe6 | 2 years ago |
Thomas | f41c499ef3 | 2 years ago |
Tpt | a977adff91 | 2 years ago |
Tpt | d74fc58a1c | 2 years ago |
Tpt | 60ffd99ad8 | 2 years ago |
Tpt | dcd59ac4dd | 2 years ago |
Tpt | 81793bc221 | 2 years ago |
Tpt | 9dc1106b9a | 2 years ago |
Tpt | 6af6c9c0eb | 2 years ago |
Tpt | 7787be6e84 | 2 years ago |
Tpt | f4b99e6953 | 2 years ago |
Tpt | ef65d53190 | 2 years ago |
Tpt | 76dec0b6a8 | 2 years ago |
Tpt | 13976014e7 | 2 years ago |
Tpt | f29a49bcd2 | 2 years ago |
Tpt | 5ce23665f8 | 2 years ago |
Tpt | 51c896fe03 | 2 years ago |
Tpt | 0c407cd041 | 2 years ago |
Tpt | 9c32f07e87 | 2 years ago |
Tpt | 5852d0b4df | 2 years ago |
Tpt | 88732f7dc7 | 2 years ago |
Tpt | 5849c6fdbe | 2 years ago |
Tpt | 3fb6beb0ba | 2 years ago |
Tpt | f9c58602a0 | 2 years ago |
Tpt | cdf76307d9 | 2 years ago |
Tpt | a164b268c2 | 2 years ago |
Tpt | 21994d39fd | 2 years ago |
Tpt | 20dc1f26df | 2 years ago |
Tpt | 0f43ef19e3 | 2 years ago |
Tpt | bdb803dab5 | 2 years ago |
Tpt | c40c81447e | 2 years ago |
Tpt | d4e964ac47 | 2 years ago |
Tpt | 935e778db1 | 2 years ago |
Tpt | 7b74fa9b0a | 2 years ago |
Tpt | 28def4001b | 2 years ago |
Tpt | fbcbd60c0e | 2 years ago |
Tpt | 0e00e8209a | 2 years ago |
Tpt | e553b6374a | 2 years ago |
Tpt | 23e47bcc5e | 2 years ago |
Tpt | 3d61867386 | 2 years ago |
Tpt | bf36e60b34 | 2 years ago |
Tpt | beca5e88ca | 2 years ago |
Thomas Tanon | e90d98bb2c | 2 years ago |
Tpt | c4a5b65ac0 | 2 years ago |
Tpt | d8fa540b97 | 2 years ago |
Tpt | 9b20dbe6dc | 2 years ago |
Tpt | 85d4c70171 | 2 years ago |
Tpt | 9d6b72e9c4 | 2 years ago |
Tpt | 53edaf9d11 | 2 years ago |
Tpt | 5eaa388312 | 2 years ago |
Tpt | 26f4e2dc98 | 2 years ago |
Tpt | 5f68cb3746 | 2 years ago |
Tpt | 1ffb559ee2 | 2 years ago |
Tpt | 1570a3a4f1 | 2 years ago |
Tpt | 6d4a15d067 | 2 years ago |
Tpt | d42e2a818c | 2 years ago |
Tpt | 84d6d48b0e | 2 years ago |
Tpt | df55148355 | 2 years ago |
Tpt | b2385509a6 | 2 years ago |
Tpt | 855c39146d | 2 years ago |
Tpt | df2233c51c | 2 years ago |
Tpt | 9729ec8ed3 | 2 years ago |
Tpt | cdd8866fd3 | 2 years ago |
Tpt | f8486364b3 | 2 years ago |
Tpt | fab5db9511 | 2 years ago |
Tpt | 9063867ec9 | 2 years ago |
Tpt | 31c6bb7815 | 2 years ago |
Tpt | 7e7489499d | 2 years ago |
Tpt | 42cd6b0094 | 2 years ago |
Tpt | 6d09d77c61 | 2 years ago |
Tpt | a51509dcd3 | 2 years ago |
Tpt | a271e39fa0 | 2 years ago |
Thomas Tanon | cbc24950e3 | 2 years ago |
Tpt | c3f0aa94bf | 2 years ago |
Tpt | 306271df61 | 2 years ago |
Tpt | af02d5e1c4 | 2 years ago |
Tpt | c8caf805fa | 2 years ago |
Tpt | 03df957427 | 2 years ago |
Tpt | 4ce1b0e241 | 2 years ago |
Tpt | ef2701dc0c | 2 years ago |
Benedikt Seidl | 2b271e45ac | 2 years ago |
Benedikt Seidl | f7637ee5a5 | 2 years ago |
Benedikt Seidl | aa9476b9cc | 2 years ago |
Tpt | 62ff6ec138 | 2 years ago |
Tpt | c25a76c1f3 | 2 years ago |
Tpt | 7b81955d72 | 2 years ago |
Tpt | 909a906d2a | 2 years ago |
Tpt | cb2c891979 | 2 years ago |
Tpt | 7a0c457867 | 2 years ago |
Tpt | d80cdf3054 | 2 years ago |
Tpt | 0668983cd6 | 2 years ago |
Tpt | b267d5ea07 | 2 years ago |
Tpt | c60dd0d3ca | 2 years ago |
Tpt | 0786c40a5e | 2 years ago |
Tpt | 524903b03d | 2 years ago |
Tpt | f15101a2b3 | 2 years ago |
dependabot[bot] | 70d4eef803 | 2 years ago |
Tpt | afdb1f76e8 | 2 years ago |
dependabot[bot] | 6dc4aefe99 | 2 years ago |
dependabot[bot] | bd77bce2cd | 2 years ago |
Thomas | 339a619f28 | 2 years ago |
Tpt | 8684b82893 | 2 years ago |
Tpt | 2d19a19320 | 2 years ago |
Tpt | 7fcf9e1051 | 2 years ago |
Tpt | 6375481a80 | 2 years ago |
Tpt | 323ad73831 | 2 years ago |
Tpt | 6cabf6da15 | 2 years ago |
Tpt | 575bb8d253 | 2 years ago |
Tpt | ca415ec044 | 2 years ago |
Tpt | f47b2b1a7b | 2 years ago |
Tpt | 44fc4eef1a | 2 years ago |
Tpt | 54356f5273 | 2 years ago |
Tpt | 3d9cbc5d14 | 2 years ago |
Tpt | 0c23589187 | 2 years ago |
Tpt | 47e5ef329e | 2 years ago |
Thomas Tanon | c71f2d66b1 | 2 years ago |
Tpt | 22a3c21c4e | 2 years ago |
Tpt | 20928b82fa | 2 years ago |
Tpt | f969a66d05 | 2 years ago |
Thomas Tanon | ea0b4e22e7 | 2 years ago |
Tpt | 07e105e1be | 2 years ago |
Tpt | 0a78eacfcd | 2 years ago |
Tpt | a7bc31b446 | 2 years ago |
Tpt | 5c055e0d12 | 2 years ago |
Tpt | 9fe5436f94 | 2 years ago |
Tpt | 1fa0633db3 | 2 years ago |
Tpt | f6e9ceccc1 | 2 years ago |
Tpt | d97eb9eb31 | 2 years ago |
Tpt | 4927b3148e | 2 years ago |
Tpt | 54ce7410d2 | 2 years ago |
Tpt | d453721e8b | 2 years ago |
Tpt | 719cde2eac | 2 years ago |
Tpt | 3485833875 | 2 years ago |
Tpt | 78c4e750ae | 2 years ago |
Tpt | fe2b7c2e76 | 2 years ago |
Tpt | 6539f0a72e | 2 years ago |
Tpt | 76dd879ea6 | 2 years ago |
Tpt | 3f3523963d | 2 years ago |
Tpt | 14121b21f4 | 2 years ago |
Tpt | 854e29ee38 | 2 years ago |
Tpt | 027da6d639 | 2 years ago |
Tpt | 53913b7e96 | 2 years ago |
Tpt | de4c5abd9c | 2 years ago |
Tpt | 7fdd045516 | 2 years ago |
Tpt | 1ded5ac4b4 | 2 years ago |
Tpt | be3b009f5d | 2 years ago |
Tpt | 92feec7e98 | 2 years ago |
Tpt | 686e1edc8e | 2 years ago |
Tpt | 808c9db007 | 2 years ago |
Tpt | f21ab0ea6a | 2 years ago |
Tpt | e055c7b5f8 | 2 years ago |
Tpt | 59359b13d9 | 2 years ago |
Thomas Tanon | 149d600e65 | 2 years ago |
Tpt | a8f666fb5d | 2 years ago |
Tpt | 24371412b4 | 2 years ago |
Tpt | d7e4d5583f | 2 years ago |
Tpt | 2ca8bd19d3 | 2 years ago |
Tpt | d65e587756 | 2 years ago |
Tpt | b6c9a5b429 | 2 years ago |
Tpt | 07b5c32935 | 2 years ago |
Tpt | 576760e417 | 2 years ago |
Thomas Tanon | 65ed5471da | 2 years ago |
Tpt | e5b15031b6 | 2 years ago |
Tpt | 3712142e6f | 2 years ago |
Tpt | 45c541edad | 2 years ago |
Tpt | 45cd47d3c1 | 2 years ago |
Tpt | 7568aaab7b | 2 years ago |
Tpt | 6b02ac3c10 | 2 years ago |
Tpt | b7059d07e8 | 2 years ago |
Tpt | 0ccdea2ff1 | 2 years ago |
Thomas Tanon | a6de2e59a5 | 2 years ago |
Tpt | dd9201e466 | 2 years ago |
Tpt | ecd4b423dc | 2 years ago |
Tpt | 127ffc3547 | 2 years ago |
Tpt | 112631a0d7 | 2 years ago |
Tpt | 38fdffc147 | 2 years ago |
Tpt | 74c565a690 | 2 years ago |
Tpt | 3f7ff6843d | 2 years ago |
Tpt | 7f89baad87 | 2 years ago |
Tpt | 2de13a9498 | 2 years ago |
Tpt | 796780cd12 | 2 years ago |
Tpt | c9762fc280 | 2 years ago |
Tpt | ea4ae6bc48 | 2 years ago |
Tpt | 7581d9a6b2 | 2 years ago |
Tpt | cda6b09d79 | 2 years ago |
Tpt | c2fd3920b5 | 2 years ago |
Tpt | 54c66279f3 | 2 years ago |
Tpt | 841c9a2066 | 2 years ago |
Tpt | 579c876f98 | 2 years ago |
Tpt | 59aea75a30 | 2 years ago |
Tpt | 9dc8dce69c | 2 years ago |
Tpt | be51f90352 | 2 years ago |
Tpt | 1317fef237 | 2 years ago |
Tpt | 7a259955d2 | 2 years ago |
Thomas Tanon | ffa16b3afd | 2 years ago |
Maximilian Goisser | 325dc59f84 | 2 years ago |
@ -0,0 +1,4 @@ |
|||||||
|
FROM gcr.io/oss-fuzz-base/base-builder-rust:v1 |
||||||
|
COPY . $SRC/oxigraph |
||||||
|
WORKDIR oxigraph |
||||||
|
COPY .clusterfuzzlite/build.sh $SRC/ |
@ -0,0 +1,30 @@ |
|||||||
|
#!/bin/bash -eu |
||||||
|
shopt -s globstar |
||||||
|
|
||||||
|
function build_seed_corpus() { |
||||||
|
mkdir "/tmp/oxigraph_$1" |
||||||
|
for file in **/*."$2" |
||||||
|
do |
||||||
|
hash=$(sha256sum "$file" | awk '{print $1;}') |
||||||
|
cp "$file" "/tmp/oxigraph_$1/$hash" |
||||||
|
done |
||||||
|
zip "$1_seed_corpus.zip" /tmp/"oxigraph_$1"/* |
||||||
|
rm -r "/tmp/oxigraph_$1" |
||||||
|
} |
||||||
|
|
||||||
|
|
||||||
|
cd "$SRC"/oxigraph |
||||||
|
git submodule init |
||||||
|
git submodule update |
||||||
|
cargo fuzz build -O --debug-assertions |
||||||
|
for TARGET in sparql_eval sparql_results_json sparql_results_tsv sparql_results_xml n3 nquads trig rdf_xml |
||||||
|
do |
||||||
|
cp fuzz/target/x86_64-unknown-linux-gnu/release/$TARGET "$OUT"/ |
||||||
|
done |
||||||
|
build_seed_corpus sparql_results_json srj |
||||||
|
build_seed_corpus sparql_results_tsv tsv |
||||||
|
build_seed_corpus sparql_results_xml srx |
||||||
|
build_seed_corpus n3 n3 |
||||||
|
build_seed_corpus nquads nq |
||||||
|
build_seed_corpus trig trig |
||||||
|
build_seed_corpus rdf_xml rdf |
@ -0,0 +1 @@ |
|||||||
|
language: rust |
@ -1,21 +0,0 @@ |
|||||||
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.241.1/containers/rust/.devcontainer/base.Dockerfile |
|
||||||
|
|
||||||
# [Choice] Debian OS version (use bullseye on local arm64/Apple Silicon): buster, bullseye |
|
||||||
ARG VARIANT="bullseye" |
|
||||||
FROM mcr.microsoft.com/vscode/devcontainers/rust:0-${VARIANT} |
|
||||||
|
|
||||||
# [Optional] Uncomment this section to install additional packages. |
|
||||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ |
|
||||||
&& apt-get -y install --no-install-recommends \ |
|
||||||
python3 \ |
|
||||||
python3-venv \ |
|
||||||
python-is-python3 \ |
|
||||||
libclang-dev |
|
||||||
|
|
||||||
ENV VIRTUAL_ENV=/opt/venv |
|
||||||
RUN python -m venv $VIRTUAL_ENV |
|
||||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH" |
|
||||||
RUN pip install --no-cache-dir -r python/requirements.dev.txt |
|
||||||
|
|
||||||
# Change owner to the devcontainer user |
|
||||||
RUN chown -R 1000:1000 $VIRTUAL_ENV |
|
@ -1,69 +0,0 @@ |
|||||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: |
|
||||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.241.1/containers/rust |
|
||||||
{ |
|
||||||
"name": "Rust", |
|
||||||
"build": { |
|
||||||
"dockerfile": "Dockerfile", |
|
||||||
"args": { |
|
||||||
// Use the VARIANT arg to pick a Debian OS version: buster, bullseye |
|
||||||
// Use bullseye when on local on arm64/Apple Silicon. |
|
||||||
"VARIANT": "bullseye" |
|
||||||
} |
|
||||||
}, |
|
||||||
"runArgs": ["--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined"], |
|
||||||
|
|
||||||
// Configure tool-specific properties. |
|
||||||
"customizations": { |
|
||||||
// Configure properties specific to VS Code. |
|
||||||
"vscode": { |
|
||||||
// Set *default* container specific settings.json values on container create. |
|
||||||
"settings": { |
|
||||||
"lldb.executable": "/usr/bin/lldb", |
|
||||||
// VS Code don't watch files under ./target |
|
||||||
"files.watcherExclude": { |
|
||||||
"**/target/**": true |
|
||||||
}, |
|
||||||
"rust-analyzer.checkOnSave.command": "clippy", |
|
||||||
|
|
||||||
"python.defaultInterpreterPath": "/opt/venv/bin/python", |
|
||||||
"python.linting.enabled": true, |
|
||||||
"python.linting.pylintEnabled": true, |
|
||||||
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", |
|
||||||
"python.formatting.blackPath": "/usr/local/py-utils/bin/black", |
|
||||||
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", |
|
||||||
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit", |
|
||||||
"python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", |
|
||||||
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", |
|
||||||
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", |
|
||||||
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", |
|
||||||
"python.linting.pylintPath": "/opt/venv/bin/pylint", |
|
||||||
"python.testing.pytestPath": "/opt/venv/bin/pytest" |
|
||||||
}, |
|
||||||
|
|
||||||
// Add the IDs of extensions you want installed when the container is created. |
|
||||||
"extensions": [ |
|
||||||
"vadimcn.vscode-lldb", |
|
||||||
"mutantdino.resourcemonitor", |
|
||||||
"rust-lang.rust-analyzer", |
|
||||||
"tamasfe.even-better-toml", |
|
||||||
"serayuzgur.crates", |
|
||||||
"ms-python.python", |
|
||||||
"ms-python.vscode-pylance", |
|
||||||
"esbenp.prettier-vscode", |
|
||||||
"stardog-union.stardog-rdf-grammars" |
|
||||||
] |
|
||||||
} |
|
||||||
}, |
|
||||||
|
|
||||||
// Use 'forwardPorts' to make a list of ports inside the container available locally. |
|
||||||
// "forwardPorts": [], |
|
||||||
|
|
||||||
// Use 'postCreateCommand' to run commands after the container is created. |
|
||||||
"postCreateCommand": "git submodule update --init && cargo build", |
|
||||||
|
|
||||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. |
|
||||||
"remoteUser": "vscode", |
|
||||||
"features": { |
|
||||||
"python": "3.10" |
|
||||||
} |
|
||||||
} |
|
@ -0,0 +1,16 @@ |
|||||||
|
version: 2 |
||||||
|
updates: |
||||||
|
- package-ecosystem: "github-actions" |
||||||
|
directory: "/" |
||||||
|
schedule: |
||||||
|
interval: weekly |
||||||
|
- package-ecosystem: "pip" |
||||||
|
directory: "/python/" |
||||||
|
versioning-strategy: increase-if-necessary |
||||||
|
schedule: |
||||||
|
interval: weekly |
||||||
|
- package-ecosystem: "npm" |
||||||
|
directory: "/js/" |
||||||
|
versioning-strategy: increase-if-necessary |
||||||
|
schedule: |
||||||
|
interval: weekly |
@ -0,0 +1,16 @@ |
|||||||
|
--- |
||||||
|
name: Bug report |
||||||
|
about: Create a report to help us improve |
||||||
|
title: '' |
||||||
|
labels: bug |
||||||
|
assignees: '' |
||||||
|
|
||||||
|
--- |
||||||
|
|
||||||
|
**Describe the bug** |
||||||
|
A clear and concise description of what the bug is. |
||||||
|
|
||||||
|
**To Reproduce** |
||||||
|
Steps to reproduce the behavior: |
||||||
|
1. Which version of Oxigraph are you using? On which platform? |
||||||
|
2. A command-line or a code snippet that triggers the bug. |
@ -0,0 +1,20 @@ |
|||||||
|
--- |
||||||
|
name: Feature request |
||||||
|
about: Suggest an idea for this project |
||||||
|
title: '' |
||||||
|
labels: enhancement |
||||||
|
assignees: '' |
||||||
|
|
||||||
|
--- |
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.** |
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] |
||||||
|
|
||||||
|
**Describe the solution you'd like** |
||||||
|
A clear and concise description of what you want to happen. |
||||||
|
|
||||||
|
**Describe alternatives you've considered** |
||||||
|
A clear and concise description of any alternative solutions or features you've considered. |
||||||
|
|
||||||
|
**Additional context** |
||||||
|
Please link to other systems implementing the feature, specification of it if it exists and/or existing documentation about this feature. |
@ -0,0 +1,10 @@ |
|||||||
|
--- |
||||||
|
name: Question |
||||||
|
about: Please don't use issues but the Q&A section of the "discussions" space |
||||||
|
title: '' |
||||||
|
labels: question |
||||||
|
assignees: '' |
||||||
|
|
||||||
|
--- |
||||||
|
|
||||||
|
|
@ -0,0 +1,27 @@ |
|||||||
|
name: 'Setup Rust' |
||||||
|
description: 'Setup Rust using Rustup' |
||||||
|
inputs: |
||||||
|
version: |
||||||
|
description: 'Rust version to use. By default latest stable version' |
||||||
|
required: false |
||||||
|
default: 'stable' |
||||||
|
component: |
||||||
|
description: 'Rust extra component to install like clippy' |
||||||
|
required: false |
||||||
|
target: |
||||||
|
description: 'Rust extra target to install like wasm32-unknown-unknown' |
||||||
|
required: false |
||||||
|
runs: |
||||||
|
using: "composite" |
||||||
|
steps: |
||||||
|
- run: rustup update |
||||||
|
shell: bash |
||||||
|
- run: rustup default ${{ inputs.version }} |
||||||
|
shell: bash |
||||||
|
- run: rustup component add ${{ inputs.component }} |
||||||
|
shell: bash |
||||||
|
if: ${{ inputs.component }} |
||||||
|
- run: rustup target add ${{ inputs.target }} |
||||||
|
shell: bash |
||||||
|
if: ${{ inputs.target }} |
||||||
|
- uses: Swatinem/rust-cache@v2 |
@ -0,0 +1,11 @@ |
|||||||
|
if [ -f "rocksdb" ] |
||||||
|
then |
||||||
|
cd rocksdb || exit |
||||||
|
else |
||||||
|
git clone https://github.com/facebook/rocksdb.git |
||||||
|
cd rocksdb || exit |
||||||
|
git checkout v8.0.0 |
||||||
|
make shared_lib |
||||||
|
fi |
||||||
|
sudo make install-shared |
||||||
|
sudo ldconfig /usr/local/lib |
@ -0,0 +1,19 @@ |
|||||||
|
cd /workdir |
||||||
|
apk add clang-dev |
||||||
|
curl https://static.rust-lang.org/rustup/dist/%arch%-unknown-linux-musl/rustup-init --output rustup-init |
||||||
|
chmod +x rustup-init |
||||||
|
./rustup-init -y --profile minimal |
||||||
|
source "$HOME/.cargo/env" |
||||||
|
export PATH="${PATH}:/opt/python/cp37-cp37m/bin:/opt/python/cp38-cp38/bin:/opt/python/cp39-cp39/bin:/opt/python/cp310-cp310/bin:/opt/python/cp311-cp311/bin" |
||||||
|
cd python |
||||||
|
python3.12 -m venv venv |
||||||
|
source venv/bin/activate |
||||||
|
pip install -r requirements.dev.txt |
||||||
|
maturin develop --release |
||||||
|
python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff |
||||||
|
maturin build --release --features abi3 --compatibility musllinux_1_2 |
||||||
|
if [ %for_each_version% ]; then |
||||||
|
for VERSION in 8 9 10 11 12; do |
||||||
|
maturin build --release --interpreter "python3.$VERSION" --compatibility musllinux_1_2 |
||||||
|
done |
||||||
|
fi |
@ -1,276 +0,0 @@ |
|||||||
name: Release artifacts |
|
||||||
|
|
||||||
on: |
|
||||||
release: |
|
||||||
types: [ published ] |
|
||||||
|
|
||||||
jobs: |
|
||||||
push_server_to_docker_registry: |
|
||||||
runs-on: ubuntu-latest |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- uses: docker/setup-buildx-action@v2 |
|
||||||
- uses: docker/metadata-action@v4 |
|
||||||
id: docker_meta |
|
||||||
with: |
|
||||||
images: | |
|
||||||
${{ github.repository }} |
|
||||||
ghcr.io/${{ github.repository }} |
|
||||||
- uses: docker/login-action@v2 |
|
||||||
with: |
|
||||||
username: ${{ secrets.DOCKER_USERNAME }} |
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }} |
|
||||||
- uses: docker/login-action@v2 |
|
||||||
with: |
|
||||||
registry: ghcr.io |
|
||||||
username: ${{github.actor}} |
|
||||||
password: ${{secrets.GITHUB_TOKEN}} |
|
||||||
- uses: docker/build-push-action@v3 |
|
||||||
with: |
|
||||||
context: . |
|
||||||
file: server/Dockerfile |
|
||||||
pull: true |
|
||||||
push: true |
|
||||||
tags: ${{ steps.docker_meta.outputs.tags }} |
|
||||||
labels: ${{ steps.docker_meta.outputs.labels }} |
|
||||||
cache-from: type=gha |
|
||||||
cache-to: type=gha,mode=max |
|
||||||
|
|
||||||
publish_crates: |
|
||||||
runs-on: ubuntu-latest |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- run: rustup update |
|
||||||
- run: cargo login $CRATES_IO_TOKEN |
|
||||||
env: |
|
||||||
CRATES_IO_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} |
|
||||||
- run: cargo publish |
|
||||||
working-directory: ./oxrocksdb-sys |
|
||||||
continue-on-error: true |
|
||||||
- run: cargo publish |
|
||||||
working-directory: ./lib/oxrdf |
|
||||||
continue-on-error: true |
|
||||||
- run: sleep 60 |
|
||||||
- run: cargo publish |
|
||||||
working-directory: ./lib/sparesults |
|
||||||
continue-on-error: true |
|
||||||
- run: cargo publish |
|
||||||
working-directory: ./lib/spargebra |
|
||||||
continue-on-error: true |
|
||||||
- run: sleep 60 |
|
||||||
- run: cargo publish |
|
||||||
working-directory: ./lib |
|
||||||
continue-on-error: true |
|
||||||
- run: sleep 60 |
|
||||||
- run: cargo publish |
|
||||||
working-directory: ./server |
|
||||||
|
|
||||||
publish_pypi_linux: |
|
||||||
runs-on: ubuntu-latest |
|
||||||
strategy: |
|
||||||
matrix: |
|
||||||
architecture: [ "x86_64", "aarch64" ] |
|
||||||
continue-on-error: true |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- uses: docker/setup-qemu-action@v2 |
|
||||||
with: |
|
||||||
platforms: linux/${{ matrix.architecture }} |
|
||||||
if: matrix.architecture != 'x86_64' |
|
||||||
- run: sed 's/%arch%/${{ matrix.architecture }}/g' .github/workflows/manylinux_build.sh > .github/workflows/manylinux_build_script.sh |
|
||||||
- run: docker run -v "$(pwd)":/workdir --platform linux/${{ matrix.architecture }} quay.io/pypa/manylinux2014_${{ matrix.architecture }} /bin/bash /workdir/.github/workflows/manylinux_build_script.sh |
|
||||||
- uses: pypa/gh-action-pypi-publish@release/v1 |
|
||||||
with: |
|
||||||
user: __token__ |
|
||||||
password: ${{ secrets.PYPI_PASSWORD }} |
|
||||||
packages_dir: target/wheels |
|
||||||
- uses: softprops/action-gh-release@v1 |
|
||||||
with: |
|
||||||
files: target/wheels/*.whl |
|
||||||
|
|
||||||
publish_pypi_mac: |
|
||||||
runs-on: macos-latest |
|
||||||
env: |
|
||||||
DEVELOPER_DIR: '/Applications/Xcode.app/Contents/Developer' |
|
||||||
SDKROOT: '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk' |
|
||||||
MACOSX_DEPLOYMENT_TARGET: '10.14' |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- uses: actions/setup-python@v4 |
|
||||||
with: |
|
||||||
python-version: "3.10" |
|
||||||
- run: rustup update && rustup target add aarch64-apple-darwin |
|
||||||
- run: pip install -r python/requirements.dev.txt |
|
||||||
- run: maturin build --release -m python/Cargo.toml |
|
||||||
- run: pip install --no-index --find-links=target/wheels/ pyoxigraph |
|
||||||
- run: rm -r target/wheels |
|
||||||
- run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --black |
|
||||||
working-directory: ./python |
|
||||||
- run: maturin publish --no-sdist --universal2 -m python/Cargo.toml -u __token__ -p ${{ secrets.PYPI_PASSWORD }} |
|
||||||
- run: maturin publish --no-sdist -m python/Cargo.toml -u __token__ -p ${{ secrets.PYPI_PASSWORD }} |
|
||||||
- uses: softprops/action-gh-release@v1 |
|
||||||
with: |
|
||||||
files: target/wheels/*.whl |
|
||||||
|
|
||||||
publish_pypi_windows: |
|
||||||
runs-on: windows-latest |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- uses: actions/setup-python@v4 |
|
||||||
with: |
|
||||||
python-version: "3.10" |
|
||||||
- run: rustup update |
|
||||||
- run: Remove-Item -LiteralPath "C:\msys64\" -Force -Recurse |
|
||||||
- run: pip install -r python/requirements.dev.txt |
|
||||||
- run: maturin build --release -m python/Cargo.toml |
|
||||||
- run: pip install --no-index --find-links=target/wheels/ pyoxigraph |
|
||||||
- run: rm -r target/wheels |
|
||||||
- run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --black |
|
||||||
working-directory: ./python |
|
||||||
- run: maturin publish --no-sdist -m python/Cargo.toml -u __token__ -p ${{ secrets.PYPI_PASSWORD }} |
|
||||||
- uses: softprops/action-gh-release@v1 |
|
||||||
with: |
|
||||||
files: target/wheels/*.whl |
|
||||||
|
|
||||||
publish_pypi_stdist: |
|
||||||
runs-on: ubuntu-latest |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- uses: actions/setup-python@v4 |
|
||||||
with: |
|
||||||
python-version: "3.10" |
|
||||||
- run: rustup update |
|
||||||
- run: pip install -r python/requirements.dev.txt |
|
||||||
- run: maturin build -m python/Cargo.toml |
|
||||||
- run: pip install --no-index --find-links=target/wheels/ pyoxigraph |
|
||||||
- run: rm -r target/wheels |
|
||||||
- run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --black |
|
||||||
working-directory: ./python |
|
||||||
- run: maturin sdist -m python/Cargo.toml |
|
||||||
- uses: pypa/gh-action-pypi-publish@release/v1 |
|
||||||
with: |
|
||||||
user: __token__ |
|
||||||
password: ${{ secrets.PYPI_PASSWORD }} |
|
||||||
packages_dir: target/wheels |
|
||||||
- uses: softprops/action-gh-release@v1 |
|
||||||
with: |
|
||||||
files: target/wheels/*.tar.gz |
|
||||||
|
|
||||||
publish_npm: |
|
||||||
runs-on: ubuntu-latest |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- uses: actions/setup-node@v3 |
|
||||||
with: |
|
||||||
node-version: 16 |
|
||||||
registry-url: https://registry.npmjs.org |
|
||||||
- run: rustup update |
|
||||||
- run: cargo install wasm-pack |
|
||||||
- run: npm install |
|
||||||
working-directory: ./js |
|
||||||
- run: npm run release |
|
||||||
working-directory: ./js |
|
||||||
env: |
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} |
|
||||||
- run: npm run pack |
|
||||||
working-directory: ./js |
|
||||||
- uses: softprops/action-gh-release@v1 |
|
||||||
with: |
|
||||||
files: js/*.tgz |
|
||||||
|
|
||||||
publish_full_archive: |
|
||||||
runs-on: ubuntu-latest |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- run: | |
|
||||||
zip -r oxigraph_${{ github.event.release.tag_name }}.zip . |
|
||||||
tar -czf /tmp/oxigraph_${{ github.event.release.tag_name }}.tar.gz . |
|
||||||
mv /tmp/oxigraph_${{ github.event.release.tag_name }}.tar.gz . |
|
||||||
- uses: softprops/action-gh-release@v1 |
|
||||||
with: |
|
||||||
files: | |
|
||||||
oxigraph_${{ github.event.release.tag_name }}.zip |
|
||||||
oxigraph_${{ github.event.release.tag_name }}.tar.gz |
|
||||||
|
|
||||||
publish_homebrew: |
|
||||||
if: "!contains('-', github.event.release.tag_name)" |
|
||||||
runs-on: ubuntu-latest |
|
||||||
needs: publish_full_archive |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
repository: oxigraph/homebrew-oxigraph |
|
||||||
token: ${{ secrets.FULL_ACCESS_TOKEN }} |
|
||||||
- run: | |
|
||||||
wget "https://github.com/oxigraph/oxigraph/releases/download/${{ github.event.release.tag_name }}/oxigraph_${{ github.event.release.tag_name }}.tar.gz" |
|
||||||
SHA=`shasum -a 256 "oxigraph_${{ github.event.release.tag_name }}.tar.gz" | awk '{ print $1 }'` |
|
||||||
rm "oxigraph_${{ github.event.release.tag_name }}.tar.gz" |
|
||||||
sed -i "s/download\/.*\.tar/download\/${{ github.event.release.tag_name }}\/oxigraph_${{ github.event.release.tag_name }}.tar/g" Formula/oxigraph.rb |
|
||||||
sed -i "s/sha256 \".*\"/sha256 \"$SHA\"/g" Formula/oxigraph.rb |
|
||||||
git config user.name github-actions |
|
||||||
git config user.email github-actions@github.com |
|
||||||
git add . |
|
||||||
git diff-index --quiet HEAD || git commit -m "Upgrades to ${{ github.event.release.tag_name }}" |
|
||||||
git push |
|
||||||
|
|
||||||
publish_binary_linux: |
|
||||||
runs-on: ubuntu-latest |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- run: cargo build --release |
|
||||||
working-directory: ./server |
|
||||||
- run: mv target/release/oxigraph_server oxigraph_server_${{ github.event.release.tag_name }}_x86_64_linux_gnu |
|
||||||
- uses: softprops/action-gh-release@v1 |
|
||||||
with: |
|
||||||
files: oxigraph_server_${{ github.event.release.tag_name }}_x86_64_linux_gnu |
|
||||||
|
|
||||||
publish_binary_mac: |
|
||||||
runs-on: macos-latest |
|
||||||
env: |
|
||||||
DEVELOPER_DIR: '/Applications/Xcode.app/Contents/Developer' |
|
||||||
SDKROOT: '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk' |
|
||||||
MACOSX_DEPLOYMENT_TARGET: '10.14' |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- run: rustup update |
|
||||||
- run: cargo build --release |
|
||||||
working-directory: ./server |
|
||||||
- run: mv target/release/oxigraph_server oxigraph_server_${{ github.event.release.tag_name }}_x86_64_apple |
|
||||||
- uses: softprops/action-gh-release@v1 |
|
||||||
with: |
|
||||||
files: oxigraph_server_${{ github.event.release.tag_name }}_x86_64_apple |
|
||||||
|
|
||||||
publish_binary_windows: |
|
||||||
runs-on: windows-latest |
|
||||||
steps: |
|
||||||
- uses: actions/checkout@v3 |
|
||||||
with: |
|
||||||
submodules: true |
|
||||||
- run: rustup update |
|
||||||
- run: Remove-Item -LiteralPath "C:\msys64\" -Force -Recurse |
|
||||||
- run: cargo build --release |
|
||||||
working-directory: ./server |
|
||||||
- run: mv target/release/oxigraph_server.exe oxigraph_server_${{ github.event.release.tag_name }}_x86_64_windows_msvc.exe |
|
||||||
- uses: softprops/action-gh-release@v1 |
|
||||||
with: |
|
||||||
files: oxigraph_server_${{ github.event.release.tag_name }}_x86_64_windows_msvc.exe |
|
@ -0,0 +1,3 @@ |
|||||||
|
Thomas Tanon <thomas@pellissier-tanon.fr> <thomaspt@hotmail.fr> <Tpt@users.noreply.github.com> |
||||||
|
Thomas Tanon <thomas@pellissier-tanon.fr> |
||||||
|
Thomas Tanon <thomas.pellissier-tanon@helsing.ai> |
File diff suppressed because it is too large
Load Diff
@ -1,16 +1,272 @@ |
|||||||
[workspace] |
[workspace] |
||||||
members = [ |
members = [ |
||||||
"js", |
"js", |
||||||
"lib", |
"lib/oxigraph", |
||||||
"lib/oxrdf", |
"lib/oxrdf", |
||||||
"lib/spargebra", |
"lib/oxrdfio", |
||||||
|
"lib/oxrdfxml", |
||||||
|
"lib/oxsdatatypes", |
||||||
|
"lib/oxttl", |
||||||
"lib/sparesults", |
"lib/sparesults", |
||||||
"python", |
"lib/spargebra", |
||||||
"oxrocksdb-sys", |
"lib/sparopt", |
||||||
"server", |
"lib/sparql-smith", |
||||||
"testsuite" |
"testsuite" |
||||||
] |
] |
||||||
|
resolver = "2" |
||||||
|
|
||||||
|
[workspace.package] |
||||||
|
version = "0.4.0-alpha.7-dev" |
||||||
|
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
||||||
|
license = "MIT OR Apache-2.0" |
||||||
|
edition = "2021" |
||||||
|
rust-version = "1.70" |
||||||
|
|
||||||
|
[workspace.dependencies] |
||||||
|
rocksdb = {git = "https://git.nextgraph.org/NextGraph/rust-rocksdb.git", branch = "master", features = [ ] } |
||||||
|
serde = { version = "1.0.142", features = ["derive"] } |
||||||
|
anyhow = "1.0.72" |
||||||
|
arbitrary = "1.3" |
||||||
|
assert_cmd = "2.0" |
||||||
|
assert_fs = "1.0" |
||||||
|
bindgen = ">=0.60, <0.70" |
||||||
|
cc = "1.0.73" |
||||||
|
clap = "4.0" |
||||||
|
codspeed-criterion-compat = "2.3.3" |
||||||
|
console_error_panic_hook = "0.1.7" |
||||||
|
digest = "0.10" |
||||||
|
flate2 = "1.0" |
||||||
|
getrandom = "0.2.8" |
||||||
|
hex = "0.4" |
||||||
|
js-sys = "0.3.60" |
||||||
|
json-event-parser = "0.2.0-alpha.2" |
||||||
|
md-5 = "0.10" |
||||||
|
memchr = "2.5" |
||||||
|
oxilangtag = "0.1" |
||||||
|
oxiri = "0.2.3" |
||||||
|
peg = "0.8" |
||||||
|
pkg-config = "0.3.25" |
||||||
|
predicates = ">=2.0, <4.0" |
||||||
|
pyo3 = "0.21.0" |
||||||
|
quick-xml = ">=0.29, <0.32" |
||||||
|
rand = "0.8" |
||||||
|
rayon-core = "1.11" |
||||||
|
regex = "1.7" |
||||||
|
sha1 = "0.10" |
||||||
|
sha2 = "0.10" |
||||||
|
siphasher = ">=0.3, <2.0" |
||||||
|
text-diff = "0.4" |
||||||
|
thiserror = "1.0.50" |
||||||
|
time = "0.3" |
||||||
|
tokio = "1.29" |
||||||
|
url = "2.4" |
||||||
|
wasm-bindgen = "0.2.83" |
||||||
|
zstd = ">=0.12, <0.14" |
||||||
|
|
||||||
|
# Internal dependencies |
||||||
|
oxigraph = { version = "=0.4.0-alpha.7-dev", path = "lib/oxigraph" } |
||||||
|
oxrdf = { version = "=0.2.0-alpha.4", path = "lib/oxrdf" } |
||||||
|
oxrdfio = { version = "=0.1.0-alpha.5", path = "lib/oxrdfio" } |
||||||
|
oxrdfxml = { version = "=0.1.0-alpha.5", path = "lib/oxrdfxml" } |
||||||
|
oxsdatatypes = { version = "=0.2.0-alpha.1", path = "lib/oxsdatatypes" } |
||||||
|
oxttl = { version = "=0.1.0-alpha.5", path = "lib/oxttl" } |
||||||
|
sparesults = { version = "=0.2.0-alpha.4", path = "lib/sparesults" } |
||||||
|
spargebra = { version = "=0.3.0-alpha.4", path = "lib/spargebra" } |
||||||
|
sparopt = { version = "=0.1.0-alpha.5-dev", path = "lib/sparopt" } |
||||||
|
|
||||||
|
[workspace.lints.rust] |
||||||
|
absolute_paths_not_starting_with_crate = "warn" |
||||||
|
elided_lifetimes_in_paths = "warn" |
||||||
|
explicit_outlives_requirements = "warn" |
||||||
|
let_underscore_drop = "warn" |
||||||
|
macro_use_extern_crate = "warn" |
||||||
|
# TODO missing_docs = "warn" |
||||||
|
trivial_casts = "warn" |
||||||
|
trivial_numeric_casts = "warn" |
||||||
|
unsafe_code = "warn" |
||||||
|
unused_import_braces = "warn" |
||||||
|
unused_lifetimes = "warn" |
||||||
|
unused_macro_rules = "warn" |
||||||
|
unused_qualifications = "warn" |
||||||
|
|
||||||
|
[workspace.lints.clippy] |
||||||
|
allow_attributes = "warn" |
||||||
|
allow_attributes_without_reason = "warn" |
||||||
|
as_underscore = "warn" |
||||||
|
assertions_on_result_states = "warn" |
||||||
|
bool_to_int_with_if = "warn" |
||||||
|
borrow_as_ptr = "warn" |
||||||
|
case_sensitive_file_extension_comparisons = "warn" |
||||||
|
cast_lossless = "warn" |
||||||
|
cast_possible_truncation = "warn" |
||||||
|
cast_possible_wrap = "warn" |
||||||
|
cast_precision_loss = "warn" |
||||||
|
cast_ptr_alignment = "warn" |
||||||
|
cast_sign_loss = "warn" |
||||||
|
checked_conversions = "warn" |
||||||
|
clone_on_ref_ptr = "warn" |
||||||
|
cloned_instead_of_copied = "warn" |
||||||
|
copy_iterator = "warn" |
||||||
|
create_dir = "warn" |
||||||
|
dbg_macro = "warn" |
||||||
|
decimal_literal_representation = "warn" |
||||||
|
default_trait_access = "warn" |
||||||
|
default_union_representation = "warn" |
||||||
|
deref_by_slicing = "warn" |
||||||
|
disallowed_script_idents = "warn" |
||||||
|
doc_link_with_quotes = "warn" |
||||||
|
empty_drop = "warn" |
||||||
|
empty_enum = "warn" |
||||||
|
empty_structs_with_brackets = "warn" |
||||||
|
enum_glob_use = "warn" |
||||||
|
error_impl_error = "warn" |
||||||
|
exit = "warn" |
||||||
|
expect_used = "warn" |
||||||
|
expl_impl_clone_on_copy = "warn" |
||||||
|
explicit_deref_methods = "warn" |
||||||
|
explicit_into_iter_loop = "warn" |
||||||
|
explicit_iter_loop = "warn" |
||||||
|
filetype_is_file = "warn" |
||||||
|
filter_map_next = "warn" |
||||||
|
flat_map_option = "warn" |
||||||
|
fn_params_excessive_bools = "warn" |
||||||
|
fn_to_numeric_cast_any = "warn" |
||||||
|
format_push_string = "warn" |
||||||
|
from_iter_instead_of_collect = "warn" |
||||||
|
get_unwrap = "warn" |
||||||
|
host_endian_bytes = "warn" |
||||||
|
if_not_else = "warn" |
||||||
|
if_then_some_else_none = "warn" |
||||||
|
ignored_unit_patterns = "warn" |
||||||
|
implicit_clone = "warn" |
||||||
|
implicit_hasher = "warn" |
||||||
|
inconsistent_struct_constructor = "warn" |
||||||
|
index_refutable_slice = "warn" |
||||||
|
inefficient_to_string = "warn" |
||||||
|
infinite_loop = "warn" |
||||||
|
inline_always = "warn" |
||||||
|
inline_asm_x86_att_syntax = "warn" |
||||||
|
inline_asm_x86_intel_syntax = "warn" |
||||||
|
into_iter_without_iter = "warn" |
||||||
|
invalid_upcast_comparisons = "warn" |
||||||
|
items_after_statements = "warn" |
||||||
|
iter_not_returning_iterator = "warn" |
||||||
|
iter_without_into_iter = "warn" |
||||||
|
large_digit_groups = "warn" |
||||||
|
large_futures = "warn" |
||||||
|
large_include_file = "warn" |
||||||
|
large_stack_arrays = "warn" |
||||||
|
large_types_passed_by_value = "warn" |
||||||
|
let_underscore_must_use = "warn" |
||||||
|
let_underscore_untyped = "warn" |
||||||
|
linkedlist = "warn" |
||||||
|
lossy_float_literal = "warn" |
||||||
|
macro_use_imports = "warn" |
||||||
|
manual_assert = "warn" |
||||||
|
manual_instant_elapsed = "warn" |
||||||
|
manual_let_else = "warn" |
||||||
|
manual_ok_or = "warn" |
||||||
|
manual_string_new = "warn" |
||||||
|
many_single_char_names = "warn" |
||||||
|
map_unwrap_or = "warn" |
||||||
|
match_bool = "warn" |
||||||
|
match_on_vec_items = "warn" |
||||||
|
match_same_arms = "warn" |
||||||
|
match_wild_err_arm = "warn" |
||||||
|
match_wildcard_for_single_variants = "warn" |
||||||
|
maybe_infinite_iter = "warn" |
||||||
|
mem_forget = "warn" |
||||||
|
mismatching_type_param_order = "warn" |
||||||
|
missing_assert_message = "warn" |
||||||
|
missing_asserts_for_indexing = "warn" |
||||||
|
missing_fields_in_debug = "warn" |
||||||
|
multiple_inherent_impl = "warn" |
||||||
|
mut_mut = "warn" |
||||||
|
mutex_atomic = "warn" |
||||||
|
naive_bytecount = "warn" |
||||||
|
needless_bitwise_bool = "warn" |
||||||
|
needless_continue = "warn" |
||||||
|
needless_for_each = "warn" |
||||||
|
needless_pass_by_value = "warn" |
||||||
|
needless_raw_string_hashes = "warn" |
||||||
|
needless_raw_strings = "warn" |
||||||
|
negative_feature_names = "warn" |
||||||
|
no_effect_underscore_binding = "warn" |
||||||
|
no_mangle_with_rust_abi = "warn" |
||||||
|
non_ascii_literal = "warn" |
||||||
|
panic = "warn" |
||||||
|
panic_in_result_fn = "warn" |
||||||
|
partial_pub_fields = "warn" |
||||||
|
print_stderr = "warn" |
||||||
|
print_stdout = "warn" |
||||||
|
ptr_as_ptr = "warn" |
||||||
|
ptr_cast_constness = "warn" |
||||||
|
pub_without_shorthand = "warn" |
||||||
|
range_minus_one = "warn" |
||||||
|
range_plus_one = "warn" |
||||||
|
rc_buffer = "warn" |
||||||
|
rc_mutex = "warn" |
||||||
|
redundant_closure_for_method_calls = "warn" |
||||||
|
redundant_else = "warn" |
||||||
|
redundant_feature_names = "warn" |
||||||
|
redundant_type_annotations = "warn" |
||||||
|
ref_binding_to_reference = "warn" |
||||||
|
ref_option_ref = "warn" |
||||||
|
ref_patterns = "warn" |
||||||
|
rest_pat_in_fully_bound_structs = "warn" |
||||||
|
return_self_not_must_use = "warn" |
||||||
|
same_functions_in_if_condition = "warn" |
||||||
|
same_name_method = "warn" |
||||||
|
semicolon_inside_block = "warn" |
||||||
|
shadow_same = "warn" |
||||||
|
should_panic_without_expect = "warn" |
||||||
|
single_match_else = "warn" |
||||||
|
stable_sort_primitive = "warn" |
||||||
|
str_to_string = "warn" |
||||||
|
string_add = "warn" |
||||||
|
string_add_assign = "warn" |
||||||
|
string_lit_chars_any = "warn" |
||||||
|
string_to_string = "warn" |
||||||
|
struct_excessive_bools = "warn" |
||||||
|
struct_field_names = "warn" |
||||||
|
suspicious_xor_used_as_pow = "warn" |
||||||
|
tests_outside_test_module = "warn" |
||||||
|
todo = "warn" |
||||||
|
transmute_ptr_to_ptr = "warn" |
||||||
|
trivially_copy_pass_by_ref = "warn" |
||||||
|
try_err = "warn" |
||||||
|
unchecked_duration_subtraction = "warn" |
||||||
|
undocumented_unsafe_blocks = "warn" |
||||||
|
unicode_not_nfc = "warn" |
||||||
|
unimplemented = "warn" |
||||||
|
uninlined_format_args = "warn" |
||||||
|
unnecessary_box_returns = "warn" |
||||||
|
unnecessary_join = "warn" |
||||||
|
unnecessary_safety_comment = "warn" |
||||||
|
unnecessary_safety_doc = "warn" |
||||||
|
unnecessary_self_imports = "warn" |
||||||
|
unnecessary_wraps = "warn" |
||||||
|
unneeded_field_pattern = "warn" |
||||||
|
unnested_or_patterns = "warn" |
||||||
|
unreadable_literal = "warn" |
||||||
|
unsafe_derive_deserialize = "warn" |
||||||
|
unseparated_literal_suffix = "warn" |
||||||
|
unused_async = "warn" |
||||||
|
unused_self = "warn" |
||||||
|
unwrap_in_result = "warn" |
||||||
|
use_debug = "warn" |
||||||
|
used_underscore_binding = "warn" |
||||||
|
verbose_bit_mask = "warn" |
||||||
|
verbose_file_reads = "warn" |
||||||
|
wildcard_dependencies = "warn" |
||||||
|
zero_sized_map_values = "warn" |
||||||
|
|
||||||
[profile.release] |
[profile.release] |
||||||
lto = true |
lto = true |
||||||
codegen-units = 1 |
codegen-units = 1 |
||||||
|
strip = "debuginfo" |
||||||
|
|
||||||
|
[profile.release.package.oxigraph-js] |
||||||
|
codegen-units = 1 |
||||||
|
opt-level = "z" |
||||||
|
strip = "debuginfo" |
||||||
|
@ -0,0 +1,49 @@ |
|||||||
|
#!/usr/bin/env bash |
||||||
|
|
||||||
|
DATASET_SIZE=100000 |
||||||
|
PARALLELISM=16 |
||||||
|
VERSION="4.2.2" |
||||||
|
TOMCAT_VERSION="9.0.71" |
||||||
|
|
||||||
|
set -eu |
||||||
|
wget -nc -O "rdf4j-${VERSION}.zip" "https://www.eclipse.org/downloads/download.php?file=/rdf4j/eclipse-rdf4j-${VERSION}-sdk.zip&mirror_id=1" |
||||||
|
wget -nc -O "tomcat-${TOMCAT_VERSION}.zip" "https://dlcdn.apache.org/tomcat/tomcat-9/v${TOMCAT_VERSION}/bin/apache-tomcat-${TOMCAT_VERSION}.zip" |
||||||
|
cd bsbm-tools || exit |
||||||
|
./generate -fc -pc ${DATASET_SIZE} -s nt -fn "explore-${DATASET_SIZE}" -ud -ufn "explore-update-${DATASET_SIZE}" |
||||||
|
wget -nc -O "rdf4j-${VERSION}.zip" "https://www.eclipse.org/downloads/download.php?file=/rdf4j/eclipse-rdf4j-${VERSION}-sdk.zip&mirror_id=1" |
||||||
|
unzip ../"rdf4j-${VERSION}.zip" |
||||||
|
unzip ../"tomcat-${TOMCAT_VERSION}.zip" |
||||||
|
CATALINA_HOME="$(pwd)/apache-tomcat-${TOMCAT_VERSION}" |
||||||
|
export CATALINA_HOME |
||||||
|
export JAVA_OPTS="-Dorg.eclipse.rdf4j.appdata.basedir=${CATALINA_HOME}/rdf4j" |
||||||
|
cp "eclipse-rdf4j-${VERSION}"/war/rdf4j-server.war "${CATALINA_HOME}"/webapps/ |
||||||
|
chmod +x "${CATALINA_HOME}"/bin/*.sh |
||||||
|
"${CATALINA_HOME}"/bin/startup.sh |
||||||
|
sleep 30 |
||||||
|
curl -f -X PUT http://localhost:8080/rdf4j-server/repositories/bsbm -H 'Content-Type:text/turtle' -d ' |
||||||
|
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>. |
||||||
|
@prefix rep: <http://www.openrdf.org/config/repository#>. |
||||||
|
@prefix sr: <http://www.openrdf.org/config/repository/sail#>. |
||||||
|
@prefix sail: <http://www.openrdf.org/config/sail#>. |
||||||
|
|
||||||
|
[] a rep:Repository ; |
||||||
|
rep:repositoryID "bsbm" ; |
||||||
|
rdfs:label "BSBM" ; |
||||||
|
rep:repositoryImpl [ |
||||||
|
rep:repositoryType "openrdf:SailRepository" ; |
||||||
|
sr:sailImpl [ |
||||||
|
sail:sailType "rdf4j:LmdbStore" |
||||||
|
] |
||||||
|
] . |
||||||
|
' |
||||||
|
sleep 10 |
||||||
|
curl -f -X PUT -H 'Content-Type:application/n-triples' -T "explore-${DATASET_SIZE}.nt" http://localhost:8080/rdf4j-server/repositories/bsbm/statements |
||||||
|
./testdriver -mt ${PARALLELISM} -ucf usecases/explore/sparql.txt -o "../bsbm.explore.rdf4j-lmdb.${VERSION}.${DATASET_SIZE}.${PARALLELISM}.xml" http://localhost:8080/rdf4j-server/repositories/bsbm |
||||||
|
./testdriver -mt ${PARALLELISM} -ucf usecases/exploreAndUpdate/sparql.txt -o "../bsbm.exploreAndUpdate.rdf4j-lmdb.${VERSION}.${DATASET_SIZE}.${PARALLELISM}.xml" http://localhost:8080/rdf4j-server/repositories/bsbm -u http://localhost:8080/rdf4j-server/repositories/bsbm/statements -udataset "explore-update-${DATASET_SIZE}.nt" |
||||||
|
#./testdriver -mt ${PARALLELISM} -ucf usecases/businessIntelligence/sparql.txt -o "../bsbm.businessIntelligence.rdf4j-lmdb.${VERSION}.${DATASET_SIZE}.${PARALLELISM}.xml" http://localhost:8080/rdf4j-server/repositories/bsbm |
||||||
|
"${CATALINA_HOME}"/bin/shutdown.sh |
||||||
|
rm -f "explore-${DATASET_SIZE}.nt" |
||||||
|
rm -f "explore-update-${DATASET_SIZE}.nt" |
||||||
|
rm -rf td_data |
||||||
|
rm -rf "eclipse-rdf4j-${VERSION}" |
||||||
|
rm -rf "apache-tomcat-${TOMCAT_VERSION}" |
@ -0,0 +1,63 @@ |
|||||||
|
""" |
||||||
|
Converts a SPARQL query JSON explanation file to a flamegraph. |
||||||
|
Usage: python explanation_to_flamegraph.py explanation.json flamegraph.svg |
||||||
|
""" |
||||||
|
import json |
||||||
|
import subprocess |
||||||
|
from argparse import ArgumentParser |
||||||
|
from pathlib import Path |
||||||
|
from shutil import which |
||||||
|
from tempfile import NamedTemporaryFile |
||||||
|
|
||||||
|
parser = ArgumentParser( |
||||||
|
prog='OxigraphFlamegraph', |
||||||
|
description='Builds a flamegraph from the Oxigraph query explanation JSON format', |
||||||
|
epilog='Text at the bottom of help') |
||||||
|
parser.add_argument('json_explanation', type=Path) |
||||||
|
parser.add_argument('flamegraph_svg', type=Path) |
||||||
|
args = parser.parse_args() |
||||||
|
|
||||||
|
|
||||||
|
def trace_line(label: str, value: float): |
||||||
|
return f"{label} {int(value * 1_000_000)}" |
||||||
|
|
||||||
|
|
||||||
|
with args.json_explanation.open('rt') as fp: |
||||||
|
explanation = json.load(fp) |
||||||
|
trace = [] |
||||||
|
if "parsing duration in seconds" in explanation: |
||||||
|
trace.append(trace_line("parsing", explanation['parsing duration in seconds'])) |
||||||
|
if "planning duration in seconds" in explanation: |
||||||
|
trace.append(trace_line("planning", explanation['planning duration in seconds'])) |
||||||
|
already_used_names = {} |
||||||
|
|
||||||
|
|
||||||
|
def add_to_trace(node, path): |
||||||
|
path = f"{path};{node['name'].replace(' ', '`')}" |
||||||
|
if path in already_used_names: |
||||||
|
already_used_names[path] += 1 |
||||||
|
path = f"{path}`{already_used_names[path]}" |
||||||
|
else: |
||||||
|
already_used_names[path] = 0 |
||||||
|
samples = node['duration in seconds'] - sum(child['duration in seconds'] for child in node.get("children", ())) |
||||||
|
if int(samples * 1_000_000) > 0: |
||||||
|
trace.append(trace_line(path, samples)) |
||||||
|
for i, child in enumerate(node.get("children", ())): |
||||||
|
add_to_trace(child, path) |
||||||
|
|
||||||
|
|
||||||
|
add_to_trace(explanation["plan"], 'eval') |
||||||
|
inferno = which('inferno-flamegraph') |
||||||
|
flamegraph_pl = which('flamegraph.pl') |
||||||
|
if inferno: |
||||||
|
args.flamegraph_svg.write_text( |
||||||
|
subprocess.run([inferno], input='\n'.join(trace), stdout=subprocess.PIPE, text=True).stdout) |
||||||
|
elif flamegraph_pl: |
||||||
|
with NamedTemporaryFile('w+t') as fp: |
||||||
|
fp.write('\n'.join(trace)) |
||||||
|
fp.flush() |
||||||
|
args.flamegraph_svg.write_text( |
||||||
|
subprocess.run([flamegraph_pl, fp.name], stdout=subprocess.PIPE, text=True).stdout) |
||||||
|
else: |
||||||
|
raise Exception( |
||||||
|
'This script requires either the inferno-flamegraph from https://github.com/jonhoo/inferno either the flamegraph.pl script from https://github.com/brendangregg/FlameGraph to be installed and be in $PATH.') |
@ -0,0 +1,52 @@ |
|||||||
|
""" |
||||||
|
Converts a SPARQL query JSON explanation file to a tracing event file compatible with Chrome. |
||||||
|
Usage: python explanation_to_trace.py explanation.json trace.json |
||||||
|
""" |
||||||
|
import json |
||||||
|
from argparse import ArgumentParser |
||||||
|
from pathlib import Path |
||||||
|
|
||||||
|
parser = ArgumentParser( |
||||||
|
prog='OxigraphTracing', |
||||||
|
description='Builds a Trace Event Format file from the Oxigraph query explanation JSON format') |
||||||
|
parser.add_argument('json_explanation', type=Path) |
||||||
|
parser.add_argument('json_trace_event', type=Path) |
||||||
|
args = parser.parse_args() |
||||||
|
|
||||||
|
with args.json_explanation.open('rt') as fp: |
||||||
|
explanation = json.load(fp) |
||||||
|
trace = [] |
||||||
|
|
||||||
|
|
||||||
|
def trace_element(name: str, cat: str, start_s: float, duration_s: float): |
||||||
|
return { |
||||||
|
"name": name, |
||||||
|
"cat": cat, |
||||||
|
"ph": "X", |
||||||
|
"ts": int(start_s * 1_000_000), |
||||||
|
"dur": int(duration_s * 1_000_000), |
||||||
|
"pid": 1 |
||||||
|
} |
||||||
|
|
||||||
|
|
||||||
|
def add_to_trace(node, path, start_time: float): |
||||||
|
path = f"{path};{node['name'].replace(' ', '`')}" |
||||||
|
trace.append(trace_element(node["name"], node["name"].split("(")[0], start_time, node["duration in seconds"])) |
||||||
|
for child in node.get("children", ()): |
||||||
|
add_to_trace(child, path, start_time) |
||||||
|
start_time += child["duration in seconds"] |
||||||
|
|
||||||
|
|
||||||
|
current_time = 0 |
||||||
|
if "parsing duration in seconds" in explanation: |
||||||
|
d = explanation["parsing duration in seconds"] |
||||||
|
trace.append(trace_element(f"parsing", "parsing", current_time, d)) |
||||||
|
current_time += d |
||||||
|
if "planning duration in seconds" in explanation: |
||||||
|
d = explanation["planning duration in seconds"] |
||||||
|
trace.append(trace_element(f"planning", "planning", current_time, d)) |
||||||
|
current_time += d |
||||||
|
add_to_trace(explanation["plan"], 'eval', current_time) |
||||||
|
|
||||||
|
with args.json_trace_event.open("wt") as fp: |
||||||
|
json.dump(trace, fp) |
@ -1,4 +1,4 @@ |
|||||||
avoid-breaking-exported-api = true |
avoid-breaking-exported-api = false |
||||||
cognitive-complexity-threshold = 50 |
cognitive-complexity-threshold = 50 |
||||||
too-many-arguments-threshold = 10 |
too-many-arguments-threshold = 10 |
||||||
type-complexity-threshold = 500 |
type-complexity-threshold = 500 |
After Width: | Height: | Size: 4.6 KiB |
@ -0,0 +1,35 @@ |
|||||||
|
+------------------+ +----------------+ +-----------------+ |
||||||
|
+ oxigraph CLI {r} + + pyoxigraph {p} + + oxigraph JS {j} + |
||||||
|
+------------------+ +----------------+ +-----------------+ |
||||||
|
|
||||||
|
+---------------------------------------------------------------------------+ |
||||||
|
+ oxigraph (Rust) {r} + |
||||||
|
+---------------------------------------------------------------------------+ |
||||||
|
|
||||||
|
+----------------------------+ +-------------+ |
||||||
|
+ oxrdfio {r} + + sparopt {r} + |
||||||
|
+----------------------------+ +-------------+ |
||||||
|
|
||||||
|
+-----------+ +--------------+ +-----------------+ +----------------+ |
||||||
|
+ oxttl {r} + + oxrdfxml {r} + + spargebra {r} + + sparesults {r} + |
||||||
|
+-----------+ +--------------+ +-----------------+ +----------------+ |
||||||
|
|
||||||
|
+-----------------------------------------------------------------------+ |
||||||
|
+ oxrdf {r} + |
||||||
|
+-----------------------------------------------------------------------+ |
||||||
|
|
||||||
|
+------------------+ |
||||||
|
+ oxsdatatypes {r} + |
||||||
|
+------------------+ |
||||||
|
|
||||||
|
|
||||||
|
# Legend: |
||||||
|
r = { |
||||||
|
fill: papayawhip; |
||||||
|
} |
||||||
|
p = { |
||||||
|
fill: lightyellow; |
||||||
|
} |
||||||
|
j = { |
||||||
|
fill: lightgreen; |
||||||
|
} |
@ -0,0 +1,28 @@ |
|||||||
|
#![no_main] |
||||||
|
|
||||||
|
use libfuzzer_sys::fuzz_target; |
||||||
|
use oxttl::N3Parser; |
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| { |
||||||
|
let mut quads = Vec::new(); |
||||||
|
let mut parser = N3Parser::new() |
||||||
|
.with_base_iri("http://example.com/") |
||||||
|
.unwrap() |
||||||
|
.parse(); |
||||||
|
for chunk in data.split(|c| *c == 0xFF) { |
||||||
|
parser.extend_from_slice(chunk); |
||||||
|
while let Some(result) = parser.read_next() { |
||||||
|
if let Ok(quad) = result { |
||||||
|
quads.push(quad); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
parser.end(); |
||||||
|
while let Some(result) = parser.read_next() { |
||||||
|
if let Ok(quad) = result { |
||||||
|
quads.push(quad); |
||||||
|
} |
||||||
|
} |
||||||
|
assert!(parser.is_end()); |
||||||
|
//TODO: serialize
|
||||||
|
}); |
@ -0,0 +1,84 @@ |
|||||||
|
#![no_main] |
||||||
|
|
||||||
|
use libfuzzer_sys::fuzz_target; |
||||||
|
use oxrdf::Quad; |
||||||
|
use oxttl::{NQuadsParser, NQuadsSerializer}; |
||||||
|
|
||||||
|
fn parse<'a>( |
||||||
|
chunks: impl IntoIterator<Item = &'a [u8]>, |
||||||
|
unchecked: bool, |
||||||
|
) -> (Vec<Quad>, Vec<String>) { |
||||||
|
let mut quads = Vec::new(); |
||||||
|
let mut errors = Vec::new(); |
||||||
|
let mut parser = NQuadsParser::new().with_quoted_triples(); |
||||||
|
if unchecked { |
||||||
|
parser = parser.unchecked(); |
||||||
|
} |
||||||
|
let mut reader = parser.parse(); |
||||||
|
for chunk in chunks { |
||||||
|
reader.extend_from_slice(chunk); |
||||||
|
while let Some(result) = reader.read_next() { |
||||||
|
match result { |
||||||
|
Ok(quad) => quads.push(quad), |
||||||
|
Err(error) => errors.push(error.to_string()), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
reader.end(); |
||||||
|
while let Some(result) = reader.read_next() { |
||||||
|
match result { |
||||||
|
Ok(quad) => quads.push(quad), |
||||||
|
Err(error) => errors.push(error.to_string()), |
||||||
|
} |
||||||
|
} |
||||||
|
assert!(reader.is_end()); |
||||||
|
(quads, errors) |
||||||
|
} |
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| { |
||||||
|
// We parse with splitting
|
||||||
|
let (quads, errors) = parse(data.split(|c| *c == 0xFF), false); |
||||||
|
// We parse without splitting
|
||||||
|
let (quads_without_split, errors_without_split) = parse( |
||||||
|
[data |
||||||
|
.iter() |
||||||
|
.copied() |
||||||
|
.filter(|c| *c != 0xFF) |
||||||
|
.collect::<Vec<_>>() |
||||||
|
.as_slice()], |
||||||
|
false, |
||||||
|
); |
||||||
|
assert_eq!(quads, quads_without_split); |
||||||
|
assert_eq!(errors, errors_without_split); |
||||||
|
|
||||||
|
// We test also unchecked if valid
|
||||||
|
if errors.is_empty() { |
||||||
|
let (quads_unchecked, errors_unchecked) = parse(data.split(|c| *c == 0xFF), true); |
||||||
|
assert!(errors_unchecked.is_empty()); |
||||||
|
assert_eq!(quads, quads_unchecked); |
||||||
|
} |
||||||
|
|
||||||
|
// We serialize
|
||||||
|
let mut writer = NQuadsSerializer::new().serialize_to_write(Vec::new()); |
||||||
|
for quad in &quads { |
||||||
|
writer.write_quad(quad).unwrap(); |
||||||
|
} |
||||||
|
let new_serialization = writer.finish(); |
||||||
|
|
||||||
|
// We parse the serialization
|
||||||
|
let new_quads = NQuadsParser::new() |
||||||
|
.with_quoted_triples() |
||||||
|
.parse_read(new_serialization.as_slice()) |
||||||
|
.collect::<Result<Vec<_>, _>>() |
||||||
|
.map_err(|e| { |
||||||
|
format!( |
||||||
|
"Error on {:?} from {quads:?} based on {:?}: {e}", |
||||||
|
String::from_utf8_lossy(&new_serialization), |
||||||
|
String::from_utf8_lossy(data) |
||||||
|
) |
||||||
|
}) |
||||||
|
.unwrap(); |
||||||
|
|
||||||
|
// We check the roundtrip has not changed anything
|
||||||
|
assert_eq!(new_quads, quads); |
||||||
|
}); |
@ -0,0 +1,35 @@ |
|||||||
|
#![no_main] |
||||||
|
|
||||||
|
use libfuzzer_sys::fuzz_target; |
||||||
|
use oxrdfxml::{RdfXmlParser, RdfXmlSerializer}; |
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| { |
||||||
|
// We parse
|
||||||
|
let triples = RdfXmlParser::new() |
||||||
|
.parse_read(data) |
||||||
|
.flatten() |
||||||
|
.collect::<Vec<_>>(); |
||||||
|
|
||||||
|
// We serialize
|
||||||
|
let mut writer = RdfXmlSerializer::new().serialize_to_write(Vec::new()); |
||||||
|
for triple in &triples { |
||||||
|
writer.write_triple(triple).unwrap(); |
||||||
|
} |
||||||
|
let new_serialization = writer.finish().unwrap(); |
||||||
|
|
||||||
|
// We parse the serialization
|
||||||
|
let new_triples = RdfXmlParser::new() |
||||||
|
.parse_read(new_serialization.as_slice()) |
||||||
|
.collect::<Result<Vec<_>, _>>() |
||||||
|
.map_err(|e| { |
||||||
|
format!( |
||||||
|
"Error on {:?} from {triples:?} based on {:?}: {e}", |
||||||
|
String::from_utf8_lossy(&new_serialization), |
||||||
|
String::from_utf8_lossy(data) |
||||||
|
) |
||||||
|
}) |
||||||
|
.unwrap(); |
||||||
|
|
||||||
|
// We check the roundtrip has not changed anything
|
||||||
|
assert_eq!(new_triples, triples); |
||||||
|
}); |
@ -0,0 +1,61 @@ |
|||||||
|
#![no_main] |
||||||
|
|
||||||
|
use libfuzzer_sys::fuzz_target; |
||||||
|
use oxigraph::io::RdfFormat; |
||||||
|
use oxigraph::sparql::{Query, QueryOptions, QueryResults, QuerySolutionIter}; |
||||||
|
use oxigraph::store::Store; |
||||||
|
use std::sync::OnceLock; |
||||||
|
|
||||||
|
fuzz_target!(|data: sparql_smith::Query| { |
||||||
|
static STORE: OnceLock<Store> = OnceLock::new(); |
||||||
|
let store = STORE.get_or_init(|| { |
||||||
|
let store = Store::new().unwrap(); |
||||||
|
store |
||||||
|
.load_from_read(RdfFormat::TriG, sparql_smith::DATA_TRIG.as_bytes()) |
||||||
|
.unwrap(); |
||||||
|
store |
||||||
|
}); |
||||||
|
|
||||||
|
let query_str = data.to_string(); |
||||||
|
if let Ok(query) = Query::parse(&query_str, None) { |
||||||
|
let options = QueryOptions::default(); |
||||||
|
let with_opt = store.query_opt(query.clone(), options.clone()).unwrap(); |
||||||
|
let without_opt = store |
||||||
|
.query_opt(query, options.without_optimizations()) |
||||||
|
.unwrap(); |
||||||
|
match (with_opt, without_opt) { |
||||||
|
(QueryResults::Solutions(with_opt), QueryResults::Solutions(without_opt)) => { |
||||||
|
assert_eq!( |
||||||
|
query_solutions_key(with_opt, query_str.contains(" REDUCED ")), |
||||||
|
query_solutions_key(without_opt, query_str.contains(" REDUCED ")) |
||||||
|
) |
||||||
|
} |
||||||
|
(QueryResults::Graph(_), QueryResults::Graph(_)) => unimplemented!(), |
||||||
|
(QueryResults::Boolean(with_opt), QueryResults::Boolean(without_opt)) => { |
||||||
|
assert_eq!(with_opt, without_opt) |
||||||
|
} |
||||||
|
_ => panic!("Different query result types"), |
||||||
|
} |
||||||
|
} |
||||||
|
}); |
||||||
|
|
||||||
|
fn query_solutions_key(iter: QuerySolutionIter, is_reduced: bool) -> String { |
||||||
|
// TODO: ordering
|
||||||
|
let mut b = iter |
||||||
|
.into_iter() |
||||||
|
.map(|t| { |
||||||
|
let mut b = t |
||||||
|
.unwrap() |
||||||
|
.iter() |
||||||
|
.map(|(var, val)| format!("{var}: {val}")) |
||||||
|
.collect::<Vec<_>>(); |
||||||
|
b.sort_unstable(); |
||||||
|
b.join(" ") |
||||||
|
}) |
||||||
|
.collect::<Vec<_>>(); |
||||||
|
b.sort_unstable(); |
||||||
|
if is_reduced { |
||||||
|
b.dedup(); |
||||||
|
} |
||||||
|
b.join("\n") |
||||||
|
} |
@ -1,10 +1,7 @@ |
|||||||
#![no_main] |
#![no_main] |
||||||
use libfuzzer_sys::fuzz_target; |
use libfuzzer_sys::fuzz_target; |
||||||
use spargebra::Query; |
use spargebra::Query; |
||||||
use std::str; |
|
||||||
|
|
||||||
fuzz_target!(|data: &[u8]| { |
fuzz_target!(|data: &str| { |
||||||
if let Ok(data) = str::from_utf8(data) { |
let _ = Query::parse(data, None); |
||||||
Query::parse(data, None); |
|
||||||
} |
|
||||||
}); |
}); |
||||||
|
@ -1,15 +1,6 @@ |
|||||||
#![no_main] |
#![no_main] |
||||||
use libfuzzer_sys::fuzz_target; |
use libfuzzer_sys::fuzz_target; |
||||||
use sparesults::{QueryResultsFormat, QueryResultsParser, QueryResultsReader}; |
use oxigraph_fuzz::result_format::fuzz_result_format; |
||||||
|
use sparesults::QueryResultsFormat; |
||||||
|
|
||||||
fuzz_target!(|data: &[u8]| { |
fuzz_target!(|data: &[u8]| fuzz_result_format(QueryResultsFormat::Json, data)); |
||||||
let parser = QueryResultsParser::from_format(QueryResultsFormat::Json); |
|
||||||
if let Ok(QueryResultsReader::Solutions(solutions)) = parser.read_results(data) { |
|
||||||
for s in solutions { |
|
||||||
if s.is_err() { |
|
||||||
// TODO: avoid infinite loop of errors
|
|
||||||
break; |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
}); |
|
||||||
|
@ -1,10 +1,6 @@ |
|||||||
#![no_main] |
#![no_main] |
||||||
use libfuzzer_sys::fuzz_target; |
use libfuzzer_sys::fuzz_target; |
||||||
use sparesults::{QueryResultsFormat, QueryResultsParser, QueryResultsReader}; |
use oxigraph_fuzz::result_format::fuzz_result_format; |
||||||
|
use sparesults::QueryResultsFormat; |
||||||
|
|
||||||
fuzz_target!(|data: &[u8]| { |
fuzz_target!(|data: &[u8]| fuzz_result_format(QueryResultsFormat::Tsv, data)); |
||||||
let parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv); |
|
||||||
if let Ok(QueryResultsReader::Solutions(solutions)) = parser.read_results(data) { |
|
||||||
for _ in solutions {} |
|
||||||
} |
|
||||||
}); |
|
||||||
|
@ -1,10 +1,6 @@ |
|||||||
#![no_main] |
#![no_main] |
||||||
use libfuzzer_sys::fuzz_target; |
use libfuzzer_sys::fuzz_target; |
||||||
use sparesults::{QueryResultsFormat, QueryResultsParser, QueryResultsReader}; |
use oxigraph_fuzz::result_format::fuzz_result_format; |
||||||
|
use sparesults::QueryResultsFormat; |
||||||
|
|
||||||
fuzz_target!(|data: &[u8]| { |
fuzz_target!(|data: &[u8]| fuzz_result_format(QueryResultsFormat::Xml, data)); |
||||||
let parser = QueryResultsParser::from_format(QueryResultsFormat::Xml); |
|
||||||
if let Ok(QueryResultsReader::Solutions(solutions)) = parser.read_results(data) { |
|
||||||
for _ in solutions {} |
|
||||||
} |
|
||||||
}); |
|
||||||
|
@ -0,0 +1,166 @@ |
|||||||
|
#![no_main] |
||||||
|
|
||||||
|
use libfuzzer_sys::fuzz_target; |
||||||
|
use oxrdf::graph::CanonicalizationAlgorithm; |
||||||
|
use oxrdf::{Dataset, GraphName, Quad, Subject, Term, Triple}; |
||||||
|
use oxttl::{TriGParser, TriGSerializer}; |
||||||
|
|
||||||
|
fn parse<'a>( |
||||||
|
chunks: impl IntoIterator<Item = &'a [u8]>, |
||||||
|
unchecked: bool, |
||||||
|
) -> (Vec<Quad>, Vec<String>, Vec<(String, String)>) { |
||||||
|
let mut quads = Vec::new(); |
||||||
|
let mut errors = Vec::new(); |
||||||
|
let mut parser = TriGParser::new() |
||||||
|
.with_quoted_triples() |
||||||
|
.with_base_iri("http://example.com/") |
||||||
|
.unwrap(); |
||||||
|
if unchecked { |
||||||
|
parser = parser.unchecked(); |
||||||
|
} |
||||||
|
let mut reader = parser.parse(); |
||||||
|
for chunk in chunks { |
||||||
|
reader.extend_from_slice(chunk); |
||||||
|
while let Some(result) = reader.read_next() { |
||||||
|
match result { |
||||||
|
Ok(quad) => quads.push(quad), |
||||||
|
Err(error) => errors.push(error.to_string()), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
reader.end(); |
||||||
|
while let Some(result) = reader.read_next() { |
||||||
|
match result { |
||||||
|
Ok(quad) => quads.push(quad), |
||||||
|
Err(error) => errors.push(error.to_string()), |
||||||
|
} |
||||||
|
} |
||||||
|
assert!(reader.is_end()); |
||||||
|
( |
||||||
|
quads, |
||||||
|
errors, |
||||||
|
reader |
||||||
|
.prefixes() |
||||||
|
.map(|(k, v)| (k.to_owned(), v.to_owned())) |
||||||
|
.collect(), |
||||||
|
) |
||||||
|
} |
||||||
|
|
||||||
|
fn count_triple_blank_nodes(triple: &Triple) -> usize { |
||||||
|
(match &triple.subject { |
||||||
|
Subject::BlankNode(_) => 1, |
||||||
|
Subject::Triple(t) => count_triple_blank_nodes(t), |
||||||
|
_ => 0, |
||||||
|
}) + (match &triple.object { |
||||||
|
Term::BlankNode(_) => 1, |
||||||
|
Term::Triple(t) => count_triple_blank_nodes(t), |
||||||
|
_ => 0, |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
fn count_quad_blank_nodes(quad: &Quad) -> usize { |
||||||
|
(match &quad.subject { |
||||||
|
Subject::BlankNode(_) => 1, |
||||||
|
Subject::Triple(t) => count_triple_blank_nodes(t), |
||||||
|
_ => 0, |
||||||
|
}) + (match &quad.object { |
||||||
|
Term::BlankNode(_) => 1, |
||||||
|
Term::Triple(t) => count_triple_blank_nodes(t), |
||||||
|
_ => 0, |
||||||
|
}) + usize::from(matches!(quad.graph_name, GraphName::BlankNode(_))) |
||||||
|
} |
||||||
|
|
||||||
|
fn serialize_quads(quads: &[Quad], prefixes: Vec<(String, String)>) -> Vec<u8> { |
||||||
|
let mut serializer = TriGSerializer::new(); |
||||||
|
for (prefix_name, prefix_iri) in prefixes { |
||||||
|
serializer = serializer.with_prefix(prefix_name, prefix_iri).unwrap(); |
||||||
|
} |
||||||
|
let mut writer = serializer.serialize_to_write(Vec::new()); |
||||||
|
for quad in quads { |
||||||
|
writer.write_quad(quad).unwrap(); |
||||||
|
} |
||||||
|
writer.finish().unwrap() |
||||||
|
} |
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| { |
||||||
|
// We parse with splitting
|
||||||
|
let (quads, errors, prefixes) = parse(data.split(|c| *c == 0xFF), false); |
||||||
|
// We parse without splitting
|
||||||
|
let (quads_without_split, errors_without_split, _) = parse( |
||||||
|
[data |
||||||
|
.iter() |
||||||
|
.copied() |
||||||
|
.filter(|c| *c != 0xFF) |
||||||
|
.collect::<Vec<_>>() |
||||||
|
.as_slice()], |
||||||
|
false, |
||||||
|
); |
||||||
|
let (quads_unchecked, errors_unchecked, _) = parse(data.split(|c| *c == 0xFF), true); |
||||||
|
if errors.is_empty() { |
||||||
|
assert!(errors_unchecked.is_empty()); |
||||||
|
} |
||||||
|
|
||||||
|
let bnodes_count = quads.iter().map(count_quad_blank_nodes).sum::<usize>(); |
||||||
|
if bnodes_count == 0 { |
||||||
|
assert_eq!( |
||||||
|
quads, |
||||||
|
quads_without_split, |
||||||
|
"With split:\n{}\nWithout split:\n{}", |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads_without_split, Vec::new())) |
||||||
|
); |
||||||
|
if errors.is_empty() { |
||||||
|
assert_eq!( |
||||||
|
quads, |
||||||
|
quads_unchecked, |
||||||
|
"Validating:\n{}\nUnchecked:\n{}", |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads_unchecked, Vec::new())) |
||||||
|
); |
||||||
|
} |
||||||
|
} else if bnodes_count <= 4 { |
||||||
|
let mut dataset_with_split = quads.iter().collect::<Dataset>(); |
||||||
|
let mut dataset_without_split = quads_without_split.iter().collect::<Dataset>(); |
||||||
|
dataset_with_split.canonicalize(CanonicalizationAlgorithm::Unstable); |
||||||
|
dataset_without_split.canonicalize(CanonicalizationAlgorithm::Unstable); |
||||||
|
assert_eq!( |
||||||
|
dataset_with_split, |
||||||
|
dataset_without_split, |
||||||
|
"With split:\n{}\nWithout split:\n{}", |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads_without_split, Vec::new())) |
||||||
|
); |
||||||
|
if errors.is_empty() { |
||||||
|
let mut dataset_unchecked = quads_unchecked.iter().collect::<Dataset>(); |
||||||
|
dataset_unchecked.canonicalize(CanonicalizationAlgorithm::Unstable); |
||||||
|
assert_eq!( |
||||||
|
dataset_with_split, |
||||||
|
dataset_unchecked, |
||||||
|
"Validating:\n{}\nUnchecked:\n{}", |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||||
|
String::from_utf8_lossy(&serialize_quads(&quads_unchecked, Vec::new())) |
||||||
|
); |
||||||
|
} |
||||||
|
} |
||||||
|
assert_eq!(errors, errors_without_split); |
||||||
|
|
||||||
|
// We serialize
|
||||||
|
let new_serialization = serialize_quads(&quads, prefixes); |
||||||
|
|
||||||
|
// We parse the serialization
|
||||||
|
let new_quads = TriGParser::new() |
||||||
|
.with_quoted_triples() |
||||||
|
.parse_read(new_serialization.as_slice()) |
||||||
|
.collect::<Result<Vec<_>, _>>() |
||||||
|
.map_err(|e| { |
||||||
|
format!( |
||||||
|
"Error on {:?} from {quads:?} based on {:?}: {e}", |
||||||
|
String::from_utf8_lossy(&new_serialization), |
||||||
|
String::from_utf8_lossy(data) |
||||||
|
) |
||||||
|
}) |
||||||
|
.unwrap(); |
||||||
|
|
||||||
|
// We check the roundtrip has not changed anything
|
||||||
|
assert_eq!(new_quads, quads); |
||||||
|
}); |
@ -0,0 +1 @@ |
|||||||
|
pub mod result_format; |
@ -0,0 +1,63 @@ |
|||||||
|
use anyhow::Context; |
||||||
|
use sparesults::{ |
||||||
|
FromReadQueryResultsReader, QueryResultsFormat, QueryResultsParser, QueryResultsSerializer, |
||||||
|
}; |
||||||
|
|
||||||
|
pub fn fuzz_result_format(format: QueryResultsFormat, data: &[u8]) { |
||||||
|
let parser = QueryResultsParser::from_format(format); |
||||||
|
let serializer = QueryResultsSerializer::from_format(format); |
||||||
|
|
||||||
|
let Ok(reader) = parser.parse_read(data) else { |
||||||
|
return; |
||||||
|
}; |
||||||
|
match reader { |
||||||
|
FromReadQueryResultsReader::Solutions(solutions) => { |
||||||
|
let Ok(solutions) = solutions.collect::<Result<Vec<_>, _>>() else { |
||||||
|
return; |
||||||
|
}; |
||||||
|
|
||||||
|
// We try to write again
|
||||||
|
let mut writer = serializer |
||||||
|
.serialize_solutions_to_write( |
||||||
|
Vec::new(), |
||||||
|
solutions |
||||||
|
.first() |
||||||
|
.map_or_else(Vec::new, |s| s.variables().to_vec()), |
||||||
|
) |
||||||
|
.unwrap(); |
||||||
|
for solution in &solutions { |
||||||
|
writer.write(solution).unwrap(); |
||||||
|
} |
||||||
|
let serialized = String::from_utf8(writer.finish().unwrap()).unwrap(); |
||||||
|
|
||||||
|
// And to parse again
|
||||||
|
if let FromReadQueryResultsReader::Solutions(roundtrip_solutions) = parser |
||||||
|
.parse_read(serialized.as_bytes()) |
||||||
|
.with_context(|| format!("Parsing {serialized:?}")) |
||||||
|
.unwrap() |
||||||
|
{ |
||||||
|
assert_eq!( |
||||||
|
roundtrip_solutions |
||||||
|
.collect::<Result<Vec<_>, _>>() |
||||||
|
.with_context(|| format!("Parsing {serialized:?}")) |
||||||
|
.unwrap(), |
||||||
|
solutions |
||||||
|
) |
||||||
|
} |
||||||
|
} |
||||||
|
FromReadQueryResultsReader::Boolean(value) => { |
||||||
|
// We try to write again
|
||||||
|
let mut serialized = Vec::new(); |
||||||
|
serializer |
||||||
|
.serialize_boolean_to_write(&mut serialized, value) |
||||||
|
.unwrap(); |
||||||
|
|
||||||
|
// And to parse again
|
||||||
|
if let FromReadQueryResultsReader::Boolean(roundtrip_value) = |
||||||
|
parser.parse_read(serialized.as_slice()).unwrap() |
||||||
|
{ |
||||||
|
assert_eq!(roundtrip_value, value) |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
@ -1,23 +1,26 @@ |
|||||||
[package] |
[package] |
||||||
name = "oxigraph_js" |
name = "oxigraph-js" |
||||||
version = "0.4.0-alpha" |
version.workspace = true |
||||||
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
authors.workspace = true |
||||||
license = "MIT OR Apache-2.0" |
license.workspace = true |
||||||
readme = "README.md" |
readme = "README.md" |
||||||
keywords = ["RDF", "N-Triples", "Turtle", "RDF/XML", "SPARQL"] |
keywords = ["RDF", "N-Triples", "Turtle", "XML", "SPARQL"] |
||||||
repository = "https://github.com/oxigraph/oxigraph/tree/main/js" |
repository = "https://github.com/oxigraph/oxigraph/tree/main/js" |
||||||
description = "JavaScript bindings of Oxigraph" |
description = "JavaScript bindings of Oxigraph" |
||||||
edition = "2021" |
edition.workspace = true |
||||||
|
rust-version.workspace = true |
||||||
|
publish = false |
||||||
|
|
||||||
[lib] |
[lib] |
||||||
crate-type = ["cdylib"] |
crate-type = ["cdylib"] |
||||||
name = "oxigraph" |
name = "oxigraph" |
||||||
|
doc = false |
||||||
|
|
||||||
[dependencies] |
[dependencies] |
||||||
oxigraph = { version = "0.4.0-alpha", path="../lib" } |
console_error_panic_hook.workspace = true |
||||||
wasm-bindgen = "0.2" |
js-sys.workspace = true |
||||||
js-sys = "0.3" |
oxigraph = { workspace = true, features = ["js"] } |
||||||
console_error_panic_hook = "0.1" |
wasm-bindgen.workspace = true |
||||||
|
|
||||||
[dev-dependencies] |
[lints] |
||||||
wasm-bindgen-test = "0.3" |
workspace = true |
||||||
|
@ -0,0 +1,14 @@ |
|||||||
|
{ |
||||||
|
"$schema": "https://biomejs.dev/schemas/1.0.0/schema.json", |
||||||
|
"formatter": { |
||||||
|
"indentStyle": "space", |
||||||
|
"indentWidth": 4, |
||||||
|
"lineWidth": 100 |
||||||
|
}, |
||||||
|
"linter": { |
||||||
|
"ignore": ["pkg"] |
||||||
|
}, |
||||||
|
"organizeImports": { |
||||||
|
"enabled": true |
||||||
|
} |
||||||
|
} |
@ -1,31 +1,19 @@ |
|||||||
#! /usr/bin/env node
|
#! /usr/bin/env node
|
||||||
|
|
||||||
const fs = require('fs') |
const fs = require("node:fs"); |
||||||
|
const pkg = JSON.parse(fs.readFileSync("./pkg/package.json")); |
||||||
// We copy file to the new directory
|
pkg.name = "oxigraph"; |
||||||
fs.mkdirSync('pkg') |
pkg.main = "node.js"; |
||||||
for (const file of fs.readdirSync('./pkg-web')) { |
pkg.browser = "web.js"; |
||||||
fs.copyFileSync('./pkg-web/' + file, './pkg/' + file) |
pkg.files = ["*.{js,wasm,d.ts}"]; |
||||||
} |
pkg.homepage = "https://github.com/oxigraph/oxigraph/tree/main/js"; |
||||||
for (const file of fs.readdirSync('./pkg-node')) { |
|
||||||
fs.copyFileSync('./pkg-node/' + file, './pkg/' + file) |
|
||||||
} |
|
||||||
|
|
||||||
const pkg = JSON.parse(fs.readFileSync('./pkg/package.json')) |
|
||||||
pkg.name = 'oxigraph' |
|
||||||
pkg.main = 'node.js' |
|
||||||
pkg.browser = 'web.js' |
|
||||||
pkg.files = [ |
|
||||||
'*.{js,wasm,d.ts}' |
|
||||||
] |
|
||||||
pkg.homepage = 'https://github.com/oxigraph/oxigraph/tree/main/js' |
|
||||||
pkg.bugs = { |
pkg.bugs = { |
||||||
url: 'https://github.com/oxigraph/oxigraph/issues' |
url: "https://github.com/oxigraph/oxigraph/issues", |
||||||
} |
}; |
||||||
pkg.collaborators = undefined |
pkg.collaborators = undefined; |
||||||
pkg.repository = { |
pkg.repository = { |
||||||
type: 'git', |
type: "git", |
||||||
url: 'https://github.com/oxigraph/oxigraph.git', |
url: "https://github.com/oxigraph/oxigraph.git", |
||||||
directory: 'js' |
directory: "js", |
||||||
} |
}; |
||||||
fs.writeFileSync('./pkg/package.json', JSON.stringify(pkg, null, 2)) |
fs.writeFileSync("./pkg/package.json", JSON.stringify(pkg, null, 2)); |
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,21 +1,20 @@ |
|||||||
{ |
{ |
||||||
"name": "oxigraph_tests", |
"name": "oxigraph_tests", |
||||||
"description": "Oxigraph JS build and tests", |
"description": "Oxigraph JS build and tests", |
||||||
"private": true, |
"private": true, |
||||||
"devDependencies": { |
"devDependencies": { |
||||||
"mocha": "^10.0.0", |
"@biomejs/biome": "^1.0.0", |
||||||
"@rdfjs/data-model": "^2.0.1", |
"@rdfjs/data-model": "^2.0.1", |
||||||
"standard": "^17.0.0" |
"mocha": "^10.0.0" |
||||||
}, |
}, |
||||||
"scripts": { |
"scripts": { |
||||||
"test": "standard && wasm-pack build --debug --target nodejs && mocha", |
"fmt": "biome format . --write && biome check . --apply-unsafe && biome format . --write", |
||||||
"build": "rm -rf pkg && wasm-pack build --release --target web --out-name web && mv pkg pkg-web && wasm-pack build --release --target nodejs --out-name node && mv pkg pkg-node && node build_package.js && rm -r pkg-web && rm -r pkg-node", |
"test": "biome ci . && wasm-pack build --debug --target nodejs --weak-refs --reference-types && mocha", |
||||||
"release": "npm run build && npm publish ./pkg", |
"build": "wasm-pack build --release --target web --out-name web --weak-refs --reference-types && wasm-pack build --release --target nodejs --out-name node --weak-refs --reference-types && node build_package.js", |
||||||
"pack": "npm run build && npm pack ./pkg" |
"release": "npm run build && npm publish ./pkg", |
||||||
}, |
"pack": "npm run build && npm pack ./pkg" |
||||||
"standard": { |
}, |
||||||
"ignore": [ |
"standard": { |
||||||
"pkg*" |
"ignore": ["pkg*"] |
||||||
] |
} |
||||||
} |
|
||||||
} |
} |
||||||
|
@ -1,38 +1,52 @@ |
|||||||
/* global describe, it */ |
/* global describe, it */ |
||||||
|
|
||||||
import oxigraph from '../pkg/oxigraph.js' |
import assert from "node:assert"; |
||||||
import assert from 'assert' |
import runTests from "../node_modules/@rdfjs/data-model/test/index.js"; |
||||||
import runTests from '../node_modules/@rdfjs/data-model/test/index.js' |
import oxigraph from "../pkg/oxigraph.js"; |
||||||
|
|
||||||
runTests({ factory: oxigraph }) |
runTests({ factory: oxigraph }); |
||||||
|
|
||||||
describe('DataModel', function () { |
describe("DataModel", () => { |
||||||
describe('#toString()', function () { |
describe("#toString()", () => { |
||||||
it('namedNode().toString() should return SPARQL compatible syntax', function () { |
it("namedNode().toString() should return SPARQL compatible syntax", () => { |
||||||
assert.strictEqual('<http://example.com>', oxigraph.namedNode('http://example.com').toString()) |
assert.strictEqual( |
||||||
}) |
"<http://example.com>", |
||||||
|
oxigraph.namedNode("http://example.com").toString(), |
||||||
it('blankNode().toString() should return SPARQL compatible syntax', function () { |
); |
||||||
assert.strictEqual('_:a', oxigraph.blankNode('a').toString()) |
}); |
||||||
}) |
|
||||||
|
it("blankNode().toString() should return SPARQL compatible syntax", () => { |
||||||
it('literal().toString() should return SPARQL compatible syntax', function () { |
assert.strictEqual("_:a", oxigraph.blankNode("a").toString()); |
||||||
assert.strictEqual('"a\\"b"@en', oxigraph.literal('a"b', 'en').toString()) |
}); |
||||||
}) |
|
||||||
|
it("literal().toString() should return SPARQL compatible syntax", () => { |
||||||
it('defaultGraph().toString() should return SPARQL compatible syntax', function () { |
assert.strictEqual('"a\\"b"@en', oxigraph.literal('a"b', "en").toString()); |
||||||
assert.strictEqual('DEFAULT', oxigraph.defaultGraph().toString()) |
}); |
||||||
}) |
|
||||||
|
it("defaultGraph().toString() should return SPARQL compatible syntax", () => { |
||||||
it('variable().toString() should return SPARQL compatible syntax', function () { |
assert.strictEqual("DEFAULT", oxigraph.defaultGraph().toString()); |
||||||
assert.strictEqual('?a', oxigraph.variable('a').toString()) |
}); |
||||||
}) |
|
||||||
|
it("variable().toString() should return SPARQL compatible syntax", () => { |
||||||
it('quad().toString() should return SPARQL compatible syntax', function () { |
assert.strictEqual("?a", oxigraph.variable("a").toString()); |
||||||
assert.strictEqual( |
}); |
||||||
'<http://example.com/s> <http://example.com/p> <<<http://example.com/s1> <http://example.com/p1> <http://example.com/o1>>> <http://example.com/g>', |
|
||||||
oxigraph.quad(oxigraph.namedNode('http://example.com/s'), oxigraph.namedNode('http://example.com/p'), oxigraph.quad(oxigraph.namedNode('http://example.com/s1'), oxigraph.namedNode('http://example.com/p1'), oxigraph.namedNode('http://example.com/o1')), oxigraph.namedNode('http://example.com/g')).toString() |
it("quad().toString() should return SPARQL compatible syntax", () => { |
||||||
) |
assert.strictEqual( |
||||||
}) |
"<http://example.com/s> <http://example.com/p> <<<http://example.com/s1> <http://example.com/p1> <http://example.com/o1>>> <http://example.com/g>", |
||||||
}) |
oxigraph |
||||||
}) |
.quad( |
||||||
|
oxigraph.namedNode("http://example.com/s"), |
||||||
|
oxigraph.namedNode("http://example.com/p"), |
||||||
|
oxigraph.quad( |
||||||
|
oxigraph.namedNode("http://example.com/s1"), |
||||||
|
oxigraph.namedNode("http://example.com/p1"), |
||||||
|
oxigraph.namedNode("http://example.com/o1"), |
||||||
|
), |
||||||
|
oxigraph.namedNode("http://example.com/g"), |
||||||
|
) |
||||||
|
.toString(), |
||||||
|
); |
||||||
|
}); |
||||||
|
}); |
||||||
|
}); |
||||||
|
@ -1,161 +1,208 @@ |
|||||||
/* global describe, it */ |
/* global describe, it */ |
||||||
|
|
||||||
import { Store } from '../pkg/oxigraph.js' |
import assert from "node:assert"; |
||||||
import assert from 'assert' |
import dataModel from "@rdfjs/data-model"; |
||||||
import dataModel from '@rdfjs/data-model' |
import { Store } from "../pkg/oxigraph.js"; |
||||||
|
|
||||||
const ex = dataModel.namedNode('http://example.com') |
const ex = dataModel.namedNode("http://example.com"); |
||||||
const triple = dataModel.quad( |
const triple = dataModel.quad( |
||||||
dataModel.blankNode('s'), |
dataModel.blankNode("s"), |
||||||
dataModel.namedNode('http://example.com/p'), |
dataModel.namedNode("http://example.com/p"), |
||||||
dataModel.literal('o') |
dataModel.literal("o"), |
||||||
) |
); |
||||||
|
|
||||||
describe('Store', function () { |
describe("Store", () => { |
||||||
describe('#add()', function () { |
describe("#add()", () => { |
||||||
it('an added quad should be in the store', function () { |
it("an added quad should be in the store", () => { |
||||||
const store = new Store() |
const store = new Store(); |
||||||
store.add(dataModel.quad(ex, ex, triple)) |
store.add(dataModel.quad(ex, ex, triple)); |
||||||
assert(store.has(dataModel.quad(ex, ex, triple))) |
assert(store.has(dataModel.quad(ex, ex, triple))); |
||||||
}) |
}); |
||||||
}) |
}); |
||||||
|
|
||||||
describe('#delete()', function () { |
describe("#delete()", () => { |
||||||
it('an removed quad should not be in the store anymore', function () { |
it("an removed quad should not be in the store anymore", () => { |
||||||
const store = new Store([dataModel.quad(triple, ex, ex)]) |
const store = new Store([dataModel.quad(triple, ex, ex)]); |
||||||
assert(store.has(dataModel.quad(triple, ex, ex))) |
assert(store.has(dataModel.quad(triple, ex, ex))); |
||||||
store.delete(dataModel.quad(triple, ex, ex)) |
store.delete(dataModel.quad(triple, ex, ex)); |
||||||
assert(!store.has(dataModel.quad(triple, ex, ex))) |
assert(!store.has(dataModel.quad(triple, ex, ex))); |
||||||
}) |
}); |
||||||
}) |
}); |
||||||
|
|
||||||
describe('#has()', function () { |
describe("#has()", () => { |
||||||
it('an added quad should be in the store', function () { |
it("an added quad should be in the store", () => { |
||||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||||
assert(store.has(dataModel.quad(ex, ex, ex))) |
assert(store.has(dataModel.quad(ex, ex, ex))); |
||||||
}) |
}); |
||||||
}) |
}); |
||||||
|
|
||||||
describe('#size()', function () { |
describe("#size()", () => { |
||||||
it('A store with one quad should have 1 for size', function () { |
it("A store with one quad should have 1 for size", () => { |
||||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||||
assert.strictEqual(1, store.size) |
assert.strictEqual(1, store.size); |
||||||
}) |
}); |
||||||
}) |
}); |
||||||
|
|
||||||
describe('#match_quads()', function () { |
describe("#match_quads()", () => { |
||||||
it('blank pattern should return all quads', function () { |
it("blank pattern should return all quads", () => { |
||||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||||
const results = store.match() |
const results = store.match(); |
||||||
assert.strictEqual(1, results.length) |
assert.strictEqual(1, results.length); |
||||||
assert(dataModel.quad(ex, ex, ex).equals(results[0])) |
assert(dataModel.quad(ex, ex, ex).equals(results[0])); |
||||||
}) |
}); |
||||||
}) |
}); |
||||||
|
|
||||||
describe('#query()', function () { |
describe("#query()", () => { |
||||||
it('ASK true', function () { |
it("ASK true", () => { |
||||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||||
assert.strictEqual(true, store.query('ASK { ?s ?s ?s }')) |
assert.strictEqual(true, store.query("ASK { ?s ?s ?s }")); |
||||||
}) |
}); |
||||||
|
|
||||||
it('ASK false', function () { |
it("ASK false", () => { |
||||||
const store = new Store() |
const store = new Store(); |
||||||
assert.strictEqual(false, store.query('ASK { FILTER(false)}')) |
assert.strictEqual(false, store.query("ASK { FILTER(false)}")); |
||||||
}) |
}); |
||||||
|
|
||||||
it('CONSTRUCT', function () { |
it("CONSTRUCT", () => { |
||||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||||
const results = store.query('CONSTRUCT { ?s ?p ?o } WHERE { ?s ?p ?o }') |
const results = store.query("CONSTRUCT { ?s ?p ?o } WHERE { ?s ?p ?o }"); |
||||||
assert.strictEqual(1, results.length) |
assert.strictEqual(1, results.length); |
||||||
assert(dataModel.quad(ex, ex, ex).equals(results[0])) |
assert(dataModel.quad(ex, ex, ex).equals(results[0])); |
||||||
}) |
}); |
||||||
|
|
||||||
it('SELECT', function () { |
it("SELECT", () => { |
||||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||||
const results = store.query('SELECT ?s WHERE { ?s ?p ?o }') |
const results = store.query("SELECT ?s WHERE { ?s ?p ?o }"); |
||||||
assert.strictEqual(1, results.length) |
assert.strictEqual(1, results.length); |
||||||
assert(ex.equals(results[0].get('s'))) |
assert(ex.equals(results[0].get("s"))); |
||||||
}) |
}); |
||||||
|
|
||||||
it('SELECT with NOW()', function () { |
it("SELECT with NOW()", () => { |
||||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||||
const results = store.query('SELECT (YEAR(NOW()) AS ?y) WHERE {}') |
const results = store.query( |
||||||
assert.strictEqual(1, results.length) |
"SELECT * WHERE { FILTER(2022 <= YEAR(NOW()) && YEAR(NOW()) <= 2100) }", |
||||||
}) |
); |
||||||
|
assert.strictEqual(1, results.length); |
||||||
it('SELECT with RAND()', function () { |
}); |
||||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
|
||||||
const results = store.query('SELECT (RAND() AS ?y) WHERE {}') |
it("SELECT with RAND()", () => { |
||||||
assert.strictEqual(1, results.length) |
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||||
}) |
const results = store.query("SELECT (RAND() AS ?y) WHERE {}"); |
||||||
}) |
assert.strictEqual(1, results.length); |
||||||
|
}); |
||||||
describe('#update()', function () { |
|
||||||
it('INSERT DATA', function () { |
it("SELECT with base IRI", () => { |
||||||
const store = new Store() |
const store = new Store(); |
||||||
store.update('INSERT DATA { <http://example.com> <http://example.com> <http://example.com> }') |
const results = store.query("SELECT * WHERE { BIND(<t> AS ?t) }", { |
||||||
assert.strictEqual(1, store.size) |
base_iri: "http://example.com/", |
||||||
}) |
}); |
||||||
|
assert.strictEqual(1, results.length); |
||||||
it('DELETE DATA', function () { |
}); |
||||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
|
||||||
store.update('DELETE DATA { <http://example.com> <http://example.com> <http://example.com> }') |
it("SELECT with union graph", () => { |
||||||
assert.strictEqual(0, store.size) |
const store = new Store([dataModel.quad(ex, ex, ex, ex)]); |
||||||
}) |
const results = store.query("SELECT * WHERE { ?s ?p ?o }", { |
||||||
|
use_default_graph_as_union: true, |
||||||
it('DELETE WHERE', function () { |
}); |
||||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
assert.strictEqual(1, results.length); |
||||||
store.update('DELETE WHERE { ?v ?v ?v }') |
}); |
||||||
assert.strictEqual(0, store.size) |
}); |
||||||
}) |
|
||||||
}) |
describe("#update()", () => { |
||||||
|
it("INSERT DATA", () => { |
||||||
describe('#load()', function () { |
const store = new Store(); |
||||||
it('load NTriples in the default graph', function () { |
store.update( |
||||||
const store = new Store() |
"INSERT DATA { <http://example.com> <http://example.com> <http://example.com> }", |
||||||
store.load('<http://example.com> <http://example.com> <http://example.com> .', 'application/n-triples') |
); |
||||||
assert(store.has(dataModel.quad(ex, ex, ex))) |
assert.strictEqual(1, store.size); |
||||||
}) |
}); |
||||||
|
|
||||||
it('load NTriples in an other graph', function () { |
it("DELETE DATA", () => { |
||||||
const store = new Store() |
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||||
store.load('<http://example.com> <http://example.com> <http://example.com> .', 'application/n-triples', null, ex) |
store.update( |
||||||
assert(store.has(dataModel.quad(ex, ex, ex, ex))) |
"DELETE DATA { <http://example.com> <http://example.com> <http://example.com> }", |
||||||
}) |
); |
||||||
|
assert.strictEqual(0, store.size); |
||||||
it('load Turtle with a base IRI', function () { |
}); |
||||||
const store = new Store() |
|
||||||
store.load('<http://example.com> <http://example.com> <> .', 'text/turtle', 'http://example.com') |
it("DELETE WHERE", () => { |
||||||
assert(store.has(dataModel.quad(ex, ex, ex))) |
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||||
}) |
store.update("DELETE WHERE { ?v ?v ?v }"); |
||||||
|
assert.strictEqual(0, store.size); |
||||||
it('load NQuads', function () { |
}); |
||||||
const store = new Store() |
}); |
||||||
store.load('<http://example.com> <http://example.com> <http://example.com> <http://example.com> .', 'application/n-quads') |
|
||||||
assert(store.has(dataModel.quad(ex, ex, ex, ex))) |
describe("#load()", () => { |
||||||
}) |
it("load NTriples in the default graph", () => { |
||||||
|
const store = new Store(); |
||||||
it('load TriG with a base IRI', function () { |
store.load( |
||||||
const store = new Store() |
"<http://example.com> <http://example.com> <http://example.com> .", |
||||||
store.load('GRAPH <> { <http://example.com> <http://example.com> <> }', 'application/trig', 'http://example.com') |
"application/n-triples", |
||||||
assert(store.has(dataModel.quad(ex, ex, ex, ex))) |
); |
||||||
}) |
assert(store.has(dataModel.quad(ex, ex, ex))); |
||||||
}) |
}); |
||||||
|
|
||||||
describe('#dump()', function () { |
it("load NTriples in an other graph", () => { |
||||||
it('dump dataset content', function () { |
const store = new Store(); |
||||||
const store = new Store([dataModel.quad(ex, ex, ex, ex)]) |
store.load( |
||||||
assert.strictEqual('<http://example.com> <http://example.com> <http://example.com> <http://example.com> .\n', store.dump('application/n-quads')) |
"<http://example.com> <http://example.com> <http://example.com> .", |
||||||
}) |
"application/n-triples", |
||||||
|
null, |
||||||
it('dump named graph content', function () { |
ex, |
||||||
const store = new Store([dataModel.quad(ex, ex, ex, ex)]) |
); |
||||||
assert.strictEqual('<http://example.com> <http://example.com> <http://example.com> .\n', store.dump('application/n-triples', ex)) |
assert(store.has(dataModel.quad(ex, ex, ex, ex))); |
||||||
}) |
}); |
||||||
|
|
||||||
it('dump default graph content', function () { |
it("load Turtle with a base IRI", () => { |
||||||
const store = new Store([dataModel.quad(ex, ex, ex, ex)]) |
const store = new Store(); |
||||||
assert.strictEqual('', store.dump('application/n-triples')) |
store.load( |
||||||
}) |
"<http://example.com> <http://example.com> <> .", |
||||||
}) |
"text/turtle", |
||||||
}) |
"http://example.com", |
||||||
|
); |
||||||
|
assert(store.has(dataModel.quad(ex, ex, ex))); |
||||||
|
}); |
||||||
|
|
||||||
|
it("load NQuads", () => { |
||||||
|
const store = new Store(); |
||||||
|
store.load( |
||||||
|
"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .", |
||||||
|
"application/n-quads", |
||||||
|
); |
||||||
|
assert(store.has(dataModel.quad(ex, ex, ex, ex))); |
||||||
|
}); |
||||||
|
|
||||||
|
it("load TriG with a base IRI", () => { |
||||||
|
const store = new Store(); |
||||||
|
store.load( |
||||||
|
"GRAPH <> { <http://example.com> <http://example.com> <> }", |
||||||
|
"application/trig", |
||||||
|
"http://example.com", |
||||||
|
); |
||||||
|
assert(store.has(dataModel.quad(ex, ex, ex, ex))); |
||||||
|
}); |
||||||
|
}); |
||||||
|
|
||||||
|
describe("#dump()", () => { |
||||||
|
it("dump dataset content", () => { |
||||||
|
const store = new Store([dataModel.quad(ex, ex, ex, ex)]); |
||||||
|
assert.strictEqual( |
||||||
|
"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .\n", |
||||||
|
store.dump("application/n-quads"), |
||||||
|
); |
||||||
|
}); |
||||||
|
|
||||||
|
it("dump named graph content", () => { |
||||||
|
const store = new Store([dataModel.quad(ex, ex, ex, ex)]); |
||||||
|
assert.strictEqual( |
||||||
|
"<http://example.com> <http://example.com> <http://example.com> .\n", |
||||||
|
store.dump("application/n-triples", ex), |
||||||
|
); |
||||||
|
}); |
||||||
|
|
||||||
|
it("dump default graph content", () => { |
||||||
|
const store = new Store([dataModel.quad(ex, ex, ex, ex)]); |
||||||
|
assert.strictEqual("", store.dump("application/n-triples", dataModel.defaultGraph())); |
||||||
|
}); |
||||||
|
}); |
||||||
|
}); |
||||||
|
@ -1,63 +0,0 @@ |
|||||||
[package] |
|
||||||
name = "oxigraph" |
|
||||||
version = "0.4.0-alpha" |
|
||||||
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
|
||||||
license = "MIT OR Apache-2.0" |
|
||||||
readme = "README.md" |
|
||||||
keywords = ["RDF", "SPARQL", "graph-database", "database"] |
|
||||||
categories = ["database-implementations"] |
|
||||||
repository = "https://github.com/oxigraph/oxigraph/tree/main/lib" |
|
||||||
homepage = "https://oxigraph.org/" |
|
||||||
description = """ |
|
||||||
a SPARQL database and RDF toolkit |
|
||||||
""" |
|
||||||
edition = "2021" |
|
||||||
|
|
||||||
[package.metadata.docs.rs] |
|
||||||
all-features = true |
|
||||||
|
|
||||||
[features] |
|
||||||
default = [] |
|
||||||
http_client = ["oxhttp", "oxhttp/rustls"] |
|
||||||
|
|
||||||
[dependencies] |
|
||||||
rand = "0.8" |
|
||||||
md-5 = "0.10" |
|
||||||
sha-1 = "0.10" |
|
||||||
sha2 = "0.10" |
|
||||||
digest = "0.10" |
|
||||||
regex = "1" |
|
||||||
oxilangtag = "0.1" |
|
||||||
oxiri = "0.2" |
|
||||||
rio_api = "0.7" |
|
||||||
rio_turtle = "0.7" |
|
||||||
rio_xml = "0.7" |
|
||||||
hex = "0.4" |
|
||||||
nom = "7" |
|
||||||
siphasher = "0.3" |
|
||||||
lazy_static = "1" |
|
||||||
sysinfo = "0.26" |
|
||||||
oxrdf = { version = "0.1.0", path="oxrdf", features = ["rdf-star"] } |
|
||||||
spargebra = { version = "0.3.0-alpha", path="spargebra", features = ["rdf-star", "ex-lateral"] } |
|
||||||
sparesults = { version = "0.1.1", path="sparesults", features = ["rdf-star"] } |
|
||||||
|
|
||||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] |
|
||||||
libc = "0.2" |
|
||||||
oxrocksdb-sys = { version = "0.3.7", path="../oxrocksdb-sys" } |
|
||||||
oxhttp = { version = "0.1", optional = true } |
|
||||||
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dependencies] |
|
||||||
js-sys = "0.3" |
|
||||||
getrandom = {version="0.2", features=["js"]} |
|
||||||
|
|
||||||
[dev-dependencies] |
|
||||||
criterion = "0.4" |
|
||||||
oxhttp = "0.1" |
|
||||||
zstd = "0.11" |
|
||||||
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dev-dependencies] |
|
||||||
wasm-bindgen-test = "0.3" |
|
||||||
|
|
||||||
[[bench]] |
|
||||||
name = "store" |
|
||||||
harness = false |
|
@ -1,72 +1,13 @@ |
|||||||
Oxigraph |
Oxigraph Rust crates |
||||||
======== |
==================== |
||||||
|
|
||||||
[![Latest Version](https://img.shields.io/crates/v/oxigraph.svg)](https://crates.io/crates/oxigraph) |
Oxigraph is implemented in Rust. |
||||||
[![Released API docs](https://docs.rs/oxigraph/badge.svg)](https://docs.rs/oxigraph) |
It is composed on a main library, [`oxigraph`](./oxigraph) and a set of smaller crates used by the `oxigraph` crate: |
||||||
[![Crates.io downloads](https://img.shields.io/crates/d/oxigraph)](https://crates.io/crates/oxigraph) |
* [`oxrdf`](./oxrdf), datastructures encoding RDF basic concepts (the `model` module of the `oxigraph` crate). |
||||||
[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) |
* [`oxrdfio`](./oxrdfio), a unified parser and serializer API for RDF formats (the `io` module of the `oxigraph` crate). It itself relies on: |
||||||
[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) |
* [`oxttl`](./oxttl), N-Triple, N-Quad, Turtle, TriG and N3 parsing and serialization. |
||||||
|
* [`oxrdfxml`](./oxrdfxml), RDF/XML parsing and serialization. |
||||||
Oxigraph is a graph database library implementing the [SPARQL](https://www.w3.org/TR/sparql11-overview/) standard. |
* [`spargebra`](./spargebra), a SPARQL parser. |
||||||
|
* [`sparesults`](./sparesults), parsers and serializers for SPARQL result formats (the `sparql::results` module of the `oxigraph` crate). |
||||||
Its goal is to provide a compliant, safe and fast on-disk graph database. |
* [`sparopt`](./sparesults), a SPARQL optimizer. |
||||||
It also provides a set of utility functions for reading, writing, and processing RDF files. |
* [`oxsdatatypes`](./oxsdatatypes), an implementation of some XML Schema datatypes. |
||||||
|
|
||||||
Oxigraph is in heavy development and SPARQL query evaluation has not been optimized yet. |
|
||||||
|
|
||||||
Oxigraph also provides [a standalone HTTP server](https://crates.io/crates/oxigraph_server) and [a Python library](https://pyoxigraph.readthedocs.io/) based on this library. |
|
||||||
|
|
||||||
|
|
||||||
Oxigraph implements the following specifications: |
|
||||||
* [SPARQL 1.1 Query](https://www.w3.org/TR/sparql11-query/), [SPARQL 1.1 Update](https://www.w3.org/TR/sparql11-update/), and [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/). |
|
||||||
* [Turtle](https://www.w3.org/TR/turtle/), [TriG](https://www.w3.org/TR/trig/), [N-Triples](https://www.w3.org/TR/n-triples/), [N-Quads](https://www.w3.org/TR/n-quads/), and [RDF XML](https://www.w3.org/TR/rdf-syntax-grammar/) RDF serialization formats for both data ingestion and retrieval using the [Rio library](https://github.com/oxigraph/rio). |
|
||||||
* [SPARQL Query Results XML Format](http://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/). |
|
||||||
|
|
||||||
A preliminary benchmark [is provided](../bench/README.md). Oxigraph internal design [is described on the wiki](https://github.com/oxigraph/oxigraph/wiki/Architecture). |
|
||||||
|
|
||||||
The main entry point of Oxigraph is the [`Store`](store::Store) struct: |
|
||||||
```rust |
|
||||||
use oxigraph::store::Store; |
|
||||||
use oxigraph::model::*; |
|
||||||
use oxigraph::sparql::QueryResults; |
|
||||||
|
|
||||||
let store = Store::new().unwrap(); |
|
||||||
|
|
||||||
// insertion |
|
||||||
let ex = NamedNode::new("http://example.com").unwrap(); |
|
||||||
let quad = Quad::new(ex.clone(), ex.clone(), ex.clone(), GraphName::DefaultGraph); |
|
||||||
store.insert(&quad).unwrap(); |
|
||||||
|
|
||||||
// quad filter |
|
||||||
let results = store.quads_for_pattern(Some(ex.as_ref().into()), None, None, None).collect::<Result<Vec<Quad>,_>>().unwrap(); |
|
||||||
assert_eq!(vec![quad], results); |
|
||||||
|
|
||||||
// SPARQL query |
|
||||||
if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }").unwrap() { |
|
||||||
assert_eq!(solutions.next().unwrap().unwrap().get("s"), Some(&ex.into())); |
|
||||||
} |
|
||||||
``` |
|
||||||
|
|
||||||
Some parts of this library are available as standalone crates: |
|
||||||
* [`oxrdf`](https://crates.io/crates/oxrdf) provides datastructures encoding RDF basic concepts (the `oxigraph::model` module). |
|
||||||
* [`spargebra`](https://crates.io/crates/spargebra) provides a SPARQL parser. |
|
||||||
* [`sparesults`](https://crates.io/crates/sparesults) provides parsers and serializers for SPARQL result formats. |
|
||||||
|
|
||||||
To build the library, don't forget to clone the submodules using `git clone --recursive https://github.com/oxigraph/oxigraph.git` to clone the repository including submodules or `git submodule update --init` to add submodules to the already cloned repository. |
|
||||||
|
|
||||||
|
|
||||||
## License |
|
||||||
|
|
||||||
This project is licensed under either of |
|
||||||
|
|
||||||
* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or |
|
||||||
`<http://www.apache.org/licenses/LICENSE-2.0>`) |
|
||||||
* MIT license ([LICENSE-MIT](../LICENSE-MIT) or |
|
||||||
`<http://opensource.org/licenses/MIT>`) |
|
||||||
|
|
||||||
at your option. |
|
||||||
|
|
||||||
|
|
||||||
### Contribution |
|
||||||
|
|
||||||
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. |
|
||||||
|
@ -1,208 +0,0 @@ |
|||||||
use criterion::{criterion_group, criterion_main, Criterion, Throughput}; |
|
||||||
use oxhttp::model::{Method, Request, Status}; |
|
||||||
use oxigraph::io::GraphFormat; |
|
||||||
use oxigraph::model::GraphNameRef; |
|
||||||
use oxigraph::sparql::{Query, QueryResults, Update}; |
|
||||||
use oxigraph::store::Store; |
|
||||||
use rand::random; |
|
||||||
use std::env::temp_dir; |
|
||||||
use std::fs::{remove_dir_all, File}; |
|
||||||
use std::io::{BufRead, BufReader, Cursor, Read}; |
|
||||||
use std::path::{Path, PathBuf}; |
|
||||||
|
|
||||||
fn store_load(c: &mut Criterion) { |
|
||||||
{ |
|
||||||
let mut data = Vec::new(); |
|
||||||
read_data("explore-1000.nt.zst") |
|
||||||
.read_to_end(&mut data) |
|
||||||
.unwrap(); |
|
||||||
|
|
||||||
let mut group = c.benchmark_group("store load"); |
|
||||||
group.throughput(Throughput::Bytes(data.len() as u64)); |
|
||||||
group.sample_size(10); |
|
||||||
group.bench_function("load BSBM explore 1000 in memory", |b| { |
|
||||||
b.iter(|| { |
|
||||||
let store = Store::new().unwrap(); |
|
||||||
do_load(&store, &data); |
|
||||||
}) |
|
||||||
}); |
|
||||||
group.bench_function("load BSBM explore 1000 in on disk", |b| { |
|
||||||
b.iter(|| { |
|
||||||
let path = TempDir::default(); |
|
||||||
let store = Store::open(&path.0).unwrap(); |
|
||||||
do_load(&store, &data); |
|
||||||
}) |
|
||||||
}); |
|
||||||
group.bench_function("load BSBM explore 1000 in on disk with bulk load", |b| { |
|
||||||
b.iter(|| { |
|
||||||
let path = TempDir::default(); |
|
||||||
let store = Store::open(&path.0).unwrap(); |
|
||||||
do_bulk_load(&store, &data); |
|
||||||
}) |
|
||||||
}); |
|
||||||
} |
|
||||||
|
|
||||||
{ |
|
||||||
let mut data = Vec::new(); |
|
||||||
read_data("explore-10000.nt.zst") |
|
||||||
.read_to_end(&mut data) |
|
||||||
.unwrap(); |
|
||||||
|
|
||||||
let mut group = c.benchmark_group("store load large"); |
|
||||||
group.throughput(Throughput::Bytes(data.len() as u64)); |
|
||||||
group.sample_size(10); |
|
||||||
group.bench_function("load BSBM explore 10000 in on disk with bulk load", |b| { |
|
||||||
b.iter(|| { |
|
||||||
let path = TempDir::default(); |
|
||||||
let store = Store::open(&path.0).unwrap(); |
|
||||||
do_bulk_load(&store, &data); |
|
||||||
}) |
|
||||||
}); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
fn do_load(store: &Store, data: &[u8]) { |
|
||||||
store |
|
||||||
.load_graph( |
|
||||||
Cursor::new(&data), |
|
||||||
GraphFormat::NTriples, |
|
||||||
GraphNameRef::DefaultGraph, |
|
||||||
None, |
|
||||||
) |
|
||||||
.unwrap(); |
|
||||||
store.optimize().unwrap(); |
|
||||||
} |
|
||||||
|
|
||||||
fn do_bulk_load(store: &Store, data: &[u8]) { |
|
||||||
store |
|
||||||
.bulk_loader() |
|
||||||
.load_graph( |
|
||||||
Cursor::new(&data), |
|
||||||
GraphFormat::NTriples, |
|
||||||
GraphNameRef::DefaultGraph, |
|
||||||
None, |
|
||||||
) |
|
||||||
.unwrap(); |
|
||||||
store.optimize().unwrap(); |
|
||||||
} |
|
||||||
|
|
||||||
fn store_query_and_update(c: &mut Criterion) { |
|
||||||
let mut data = Vec::new(); |
|
||||||
read_data("explore-1000.nt.zst") |
|
||||||
.read_to_end(&mut data) |
|
||||||
.unwrap(); |
|
||||||
|
|
||||||
let operations = read_data("mix-exploreAndUpdate-1000.tsv.zst") |
|
||||||
.lines() |
|
||||||
.map(|l| { |
|
||||||
let l = l.unwrap(); |
|
||||||
let mut parts = l.trim().split('\t'); |
|
||||||
let kind = parts.next().unwrap(); |
|
||||||
let operation = parts.next().unwrap(); |
|
||||||
match kind { |
|
||||||
"query" => Operation::Query(Query::parse(operation, None).unwrap()), |
|
||||||
"update" => Operation::Update(Update::parse(operation, None).unwrap()), |
|
||||||
_ => panic!("Unexpected operation kind {}", kind), |
|
||||||
} |
|
||||||
}) |
|
||||||
.collect::<Vec<_>>(); |
|
||||||
let query_operations = operations |
|
||||||
.iter() |
|
||||||
.filter(|o| matches!(o, Operation::Query(_))) |
|
||||||
.cloned() |
|
||||||
.collect::<Vec<_>>(); |
|
||||||
|
|
||||||
let mut group = c.benchmark_group("store operations"); |
|
||||||
group.throughput(Throughput::Elements(operations.len() as u64)); |
|
||||||
group.sample_size(10); |
|
||||||
|
|
||||||
{ |
|
||||||
let memory_store = Store::new().unwrap(); |
|
||||||
do_bulk_load(&memory_store, &data); |
|
||||||
group.bench_function("BSBM explore 1000 query in memory", |b| { |
|
||||||
b.iter(|| run_operation(&memory_store, &query_operations)) |
|
||||||
}); |
|
||||||
group.bench_function("BSBM explore 1000 queryAndUpdate in memory", |b| { |
|
||||||
b.iter(|| run_operation(&memory_store, &operations)) |
|
||||||
}); |
|
||||||
} |
|
||||||
|
|
||||||
{ |
|
||||||
let path = TempDir::default(); |
|
||||||
let disk_store = Store::open(&path.0).unwrap(); |
|
||||||
do_bulk_load(&disk_store, &data); |
|
||||||
group.bench_function("BSBM explore 1000 query on disk", |b| { |
|
||||||
b.iter(|| run_operation(&disk_store, &query_operations)) |
|
||||||
}); |
|
||||||
group.bench_function("BSBM explore 1000 queryAndUpdate on disk", |b| { |
|
||||||
b.iter(|| run_operation(&disk_store, &operations)) |
|
||||||
}); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
fn run_operation(store: &Store, operations: &[Operation]) { |
|
||||||
for operation in operations { |
|
||||||
match operation { |
|
||||||
Operation::Query(q) => match store.query(q.clone()).unwrap() { |
|
||||||
QueryResults::Boolean(_) => (), |
|
||||||
QueryResults::Solutions(s) => { |
|
||||||
for s in s { |
|
||||||
s.unwrap(); |
|
||||||
} |
|
||||||
} |
|
||||||
QueryResults::Graph(g) => { |
|
||||||
for t in g { |
|
||||||
t.unwrap(); |
|
||||||
} |
|
||||||
} |
|
||||||
}, |
|
||||||
Operation::Update(u) => store.update(u.clone()).unwrap(), |
|
||||||
} |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
criterion_group!(store, store_query_and_update, store_load); |
|
||||||
|
|
||||||
criterion_main!(store); |
|
||||||
|
|
||||||
fn read_data(file: &str) -> impl BufRead { |
|
||||||
if !Path::new(file).exists() { |
|
||||||
let mut client = oxhttp::Client::new(); |
|
||||||
client.set_redirection_limit(5); |
|
||||||
let url = format!( |
|
||||||
"https://github.com/Tpt/bsbm-tools/releases/download/v0.2/{}", |
|
||||||
file |
|
||||||
); |
|
||||||
let request = Request::builder(Method::GET, url.parse().unwrap()).build(); |
|
||||||
let response = client.request(request).unwrap(); |
|
||||||
assert_eq!( |
|
||||||
response.status(), |
|
||||||
Status::OK, |
|
||||||
"{}", |
|
||||||
response.into_body().to_string().unwrap() |
|
||||||
); |
|
||||||
std::io::copy(&mut response.into_body(), &mut File::create(file).unwrap()).unwrap(); |
|
||||||
} |
|
||||||
BufReader::new(zstd::Decoder::new(File::open(file).unwrap()).unwrap()) |
|
||||||
} |
|
||||||
|
|
||||||
#[allow(clippy::large_enum_variant)] |
|
||||||
#[derive(Clone)] |
|
||||||
enum Operation { |
|
||||||
Query(Query), |
|
||||||
Update(Update), |
|
||||||
} |
|
||||||
|
|
||||||
struct TempDir(PathBuf); |
|
||||||
|
|
||||||
impl Default for TempDir { |
|
||||||
fn default() -> Self { |
|
||||||
Self(temp_dir().join(format!("oxigraph-bench-{}", random::<u128>()))) |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
impl Drop for TempDir { |
|
||||||
fn drop(&mut self) { |
|
||||||
remove_dir_all(&self.0).unwrap() |
|
||||||
} |
|
||||||
} |
|
@ -0,0 +1,59 @@ |
|||||||
|
[package] |
||||||
|
name = "oxigraph" |
||||||
|
version.workspace = true |
||||||
|
authors.workspace = true |
||||||
|
license.workspace = true |
||||||
|
readme = "README.md" |
||||||
|
keywords = ["RDF", "SPARQL", "graph-database", "database"] |
||||||
|
categories = ["database-implementations"] |
||||||
|
repository = "https://github.com/oxigraph/oxigraph/tree/main/lib/oxigraph" |
||||||
|
homepage = "https://oxigraph.org/" |
||||||
|
documentation = "https://docs.rs/oxigraph" |
||||||
|
description = """ |
||||||
|
a SPARQL database and RDF toolkit |
||||||
|
""" |
||||||
|
edition.workspace = true |
||||||
|
rust-version.workspace = true |
||||||
|
|
||||||
|
[features] |
||||||
|
js = ["getrandom/js", "oxsdatatypes/js", "js-sys"] |
||||||
|
|
||||||
|
|
||||||
|
[dependencies] |
||||||
|
digest.workspace = true |
||||||
|
hex.workspace = true |
||||||
|
json-event-parser.workspace = true |
||||||
|
md-5.workspace = true |
||||||
|
oxilangtag.workspace = true |
||||||
|
oxiri.workspace = true |
||||||
|
oxrdf = { workspace = true, features = ["rdf-star", "oxsdatatypes"] } |
||||||
|
oxrdfio = { workspace = true, features = ["rdf-star"] } |
||||||
|
oxsdatatypes.workspace = true |
||||||
|
rand.workspace = true |
||||||
|
regex.workspace = true |
||||||
|
sha1.workspace = true |
||||||
|
sha2.workspace = true |
||||||
|
siphasher.workspace = true |
||||||
|
sparesults = { workspace = true, features = ["rdf-star"] } |
||||||
|
spargebra = { workspace = true, features = ["rdf-star", "sep-0002", "sep-0006"] } |
||||||
|
sparopt = { workspace = true, features = ["rdf-star", "sep-0002", "sep-0006"] } |
||||||
|
thiserror.workspace = true |
||||||
|
|
||||||
|
[target.'cfg(not(target_family = "wasm"))'.dependencies] |
||||||
|
libc = "0.2" |
||||||
|
rocksdb.workspace = true |
||||||
|
|
||||||
|
[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies] |
||||||
|
getrandom.workspace = true |
||||||
|
js-sys = { workspace = true, optional = true } |
||||||
|
|
||||||
|
[target.'cfg(not(target_family = "wasm"))'.dev-dependencies] |
||||||
|
codspeed-criterion-compat.workspace = true |
||||||
|
zstd.workspace = true |
||||||
|
|
||||||
|
[lints] |
||||||
|
workspace = true |
||||||
|
|
||||||
|
[package.metadata.docs.rs] |
||||||
|
rustdoc-args = ["--cfg", "docsrs"] |
||||||
|
|
@ -0,0 +1,82 @@ |
|||||||
|
Oxigraph |
||||||
|
======== |
||||||
|
|
||||||
|
[![Latest Version](https://img.shields.io/crates/v/oxigraph.svg)](https://crates.io/crates/oxigraph) |
||||||
|
[![Released API docs](https://docs.rs/oxigraph/badge.svg)](https://docs.rs/oxigraph) |
||||||
|
[![Crates.io downloads](https://img.shields.io/crates/d/oxigraph)](https://crates.io/crates/oxigraph) |
||||||
|
[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) |
||||||
|
[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) |
||||||
|
|
||||||
|
Oxigraph is a graph database library implementing the [SPARQL](https://www.w3.org/TR/sparql11-overview/) standard. |
||||||
|
|
||||||
|
Its goal is to provide a compliant, safe and fast on-disk graph database. |
||||||
|
It also provides a set of utility functions for reading, writing, and processing RDF files. |
||||||
|
|
||||||
|
Oxigraph is in heavy development and SPARQL query evaluation has not been optimized yet. |
||||||
|
|
||||||
|
Oxigraph also provides [a CLI tool](https://crates.io/crates/oxigraph-cli) and [a Python library](https://pyoxigraph.readthedocs.io/) based on this library. |
||||||
|
|
||||||
|
|
||||||
|
Oxigraph implements the following specifications: |
||||||
|
* [SPARQL 1.1 Query](https://www.w3.org/TR/sparql11-query/), [SPARQL 1.1 Update](https://www.w3.org/TR/sparql11-update/), and [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/). |
||||||
|
* [Turtle](https://www.w3.org/TR/turtle/), [TriG](https://www.w3.org/TR/trig/), [N-Triples](https://www.w3.org/TR/n-triples/), [N-Quads](https://www.w3.org/TR/n-quads/), and [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) RDF serialization formats for both data ingestion and retrieval. |
||||||
|
* [SPARQL Query Results XML Format](https://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/). |
||||||
|
|
||||||
|
A preliminary benchmark [is provided](../bench/README.md). Oxigraph internal design [is described on the wiki](https://github.com/oxigraph/oxigraph/wiki/Architecture). |
||||||
|
|
||||||
|
The main entry point of Oxigraph is the [`Store`](store::Store) struct: |
||||||
|
```rust |
||||||
|
use oxigraph::store::Store; |
||||||
|
use oxigraph::model::*; |
||||||
|
use oxigraph::sparql::QueryResults; |
||||||
|
|
||||||
|
let store = Store::new().unwrap(); |
||||||
|
|
||||||
|
// insertion |
||||||
|
let ex = NamedNode::new("http://example.com").unwrap(); |
||||||
|
let quad = Quad::new(ex.clone(), ex.clone(), ex.clone(), GraphName::DefaultGraph); |
||||||
|
store.insert(&quad).unwrap(); |
||||||
|
|
||||||
|
// quad filter |
||||||
|
let results = store.quads_for_pattern(Some(ex.as_ref().into()), None, None, None).collect::<Result<Vec<Quad>,_>>().unwrap(); |
||||||
|
assert_eq!(vec![quad], results); |
||||||
|
|
||||||
|
// SPARQL query |
||||||
|
if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }").unwrap() { |
||||||
|
assert_eq!(solutions.next().unwrap().unwrap().get("s"), Some(&ex.into())); |
||||||
|
} |
||||||
|
``` |
||||||
|
|
||||||
|
It is based on these crates that can be used separately: |
||||||
|
* [`oxrdf`](https://crates.io/crates/oxrdf), datastructures encoding RDF basic concepts (the [`oxigraph::model`](crate::model) module). |
||||||
|
* [`oxrdfio`](https://crates.io/crates/oxrdfio), a unified parser and serializer API for RDF formats (the [`oxigraph::io`](crate::io) module). It itself relies on: |
||||||
|
* [`oxttl`](https://crates.io/crates/oxttl), N-Triple, N-Quad, Turtle, TriG and N3 parsing and serialization. |
||||||
|
* [`oxrdfxml`](https://crates.io/crates/oxrdfxml), RDF/XML parsing and serialization. |
||||||
|
* [`spargebra`](https://crates.io/crates/spargebra), a SPARQL parser. |
||||||
|
* [`sparesults`](https://crates.io/crates/sparesults), parsers and serializers for SPARQL result formats (the [`oxigraph::sparql::results`](crate::sparql::results) module). |
||||||
|
* [`sparopt`](https://crates.io/crates/sparesults), a SPARQL optimizer. |
||||||
|
* [`oxsdatatypes`](https://crates.io/crates/oxsdatatypes), an implementation of some XML Schema datatypes. |
||||||
|
|
||||||
|
To build the library locally, don't forget to clone the submodules using `git clone --recursive https://github.com/oxigraph/oxigraph.git` to clone the repository including submodules or `git submodule update --init` to add submodules to the already cloned repository. |
||||||
|
|
||||||
|
It is possible to disable the RocksDB storage backend to only use the in-memory fallback by disabling the `rocksdb` default feature: |
||||||
|
```toml |
||||||
|
oxigraph = { version = "*", default-features = false } |
||||||
|
``` |
||||||
|
This is the default behavior when compiling Oxigraph to WASM. |
||||||
|
|
||||||
|
## License |
||||||
|
|
||||||
|
This project is licensed under either of |
||||||
|
|
||||||
|
* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or |
||||||
|
`<http://www.apache.org/licenses/LICENSE-2.0>`) |
||||||
|
* MIT license ([LICENSE-MIT](../LICENSE-MIT) or |
||||||
|
`<http://opensource.org/licenses/MIT>`) |
||||||
|
|
||||||
|
at your option. |
||||||
|
|
||||||
|
|
||||||
|
### Contribution |
||||||
|
|
||||||
|
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. |
@ -0,0 +1,39 @@ |
|||||||
|
//! Utilities to read and write RDF graphs and datasets using [OxRDF I/O](https://crates.io/crates/oxrdfio).
|
||||||
|
//!
|
||||||
|
//! The entry points of this module are the two [`RdfParser`] and [`RdfSerializer`] structs.
|
||||||
|
//!
|
||||||
|
//! Usage example converting a Turtle file to a N-Triples file:
|
||||||
|
//! ```
|
||||||
|
//! use oxigraph::io::{RdfFormat, RdfParser, RdfSerializer};
|
||||||
|
//!
|
||||||
|
//! let turtle_file = b"@base <http://example.com/> .
|
||||||
|
//! @prefix schema: <http://schema.org/> .
|
||||||
|
//! <foo> a schema:Person ;
|
||||||
|
//! schema:name \"Foo\" .
|
||||||
|
//! <bar> a schema:Person ;
|
||||||
|
//! schema:name \"Bar\" .";
|
||||||
|
//!
|
||||||
|
//! let ntriples_file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
|
||||||
|
//! <http://example.com/foo> <http://schema.org/name> \"Foo\" .
|
||||||
|
//! <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
|
||||||
|
//! <http://example.com/bar> <http://schema.org/name> \"Bar\" .
|
||||||
|
//! ";
|
||||||
|
//!
|
||||||
|
//! let mut writer = RdfSerializer::from_format(RdfFormat::NTriples).serialize_to_write(Vec::new());
|
||||||
|
//! for quad in RdfParser::from_format(RdfFormat::Turtle).parse_read(turtle_file.as_ref()) {
|
||||||
|
//! writer.write_quad(&quad.unwrap()).unwrap();
|
||||||
|
//! }
|
||||||
|
//! assert_eq!(writer.finish().unwrap(), ntriples_file);
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
mod format; |
||||||
|
pub mod read; |
||||||
|
pub mod write; |
||||||
|
|
||||||
|
#[allow(deprecated)] |
||||||
|
pub use self::format::{DatasetFormat, GraphFormat}; |
||||||
|
#[allow(deprecated)] |
||||||
|
pub use self::read::{DatasetParser, GraphParser}; |
||||||
|
#[allow(deprecated)] |
||||||
|
pub use self::write::{DatasetSerializer, GraphSerializer}; |
||||||
|
pub use oxrdfio::*; |
@ -0,0 +1,199 @@ |
|||||||
|
#![allow(deprecated)] |
||||||
|
|
||||||
|
//! Utilities to read RDF graphs and datasets.
|
||||||
|
|
||||||
|
use crate::io::{DatasetFormat, GraphFormat}; |
||||||
|
use crate::model::*; |
||||||
|
use oxrdfio::{FromReadQuadReader, RdfParseError, RdfParser}; |
||||||
|
use std::io::Read; |
||||||
|
|
||||||
|
/// Parsers for RDF graph serialization formats.
|
||||||
|
///
|
||||||
|
/// It currently supports the following formats:
|
||||||
|
/// * [N-Triples](https://www.w3.org/TR/n-triples/) ([`GraphFormat::NTriples`])
|
||||||
|
/// * [Turtle](https://www.w3.org/TR/turtle/) ([`GraphFormat::Turtle`])
|
||||||
|
/// * [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) ([`GraphFormat::RdfXml`])
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{GraphFormat, GraphParser};
|
||||||
|
///
|
||||||
|
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
|
||||||
|
///
|
||||||
|
/// let parser = GraphParser::from_format(GraphFormat::NTriples);
|
||||||
|
/// let triples = parser
|
||||||
|
/// .read_triples(file.as_bytes())
|
||||||
|
/// .collect::<Result<Vec<_>, _>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(triples.len(), 1);
|
||||||
|
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # std::io::Result::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[deprecated(note = "use RdfParser instead", since = "0.4.0")] |
||||||
|
pub struct GraphParser { |
||||||
|
inner: RdfParser, |
||||||
|
} |
||||||
|
|
||||||
|
impl GraphParser { |
||||||
|
/// Builds a parser for the given format.
|
||||||
|
#[inline] |
||||||
|
pub fn from_format(format: GraphFormat) -> Self { |
||||||
|
Self { |
||||||
|
inner: RdfParser::from_format(format.into()) |
||||||
|
.without_named_graphs() |
||||||
|
.rename_blank_nodes(), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Provides an IRI that could be used to resolve the file relative IRIs.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{GraphFormat, GraphParser};
|
||||||
|
///
|
||||||
|
/// let file = "</s> </p> </o> .";
|
||||||
|
///
|
||||||
|
/// let parser =
|
||||||
|
/// GraphParser::from_format(GraphFormat::Turtle).with_base_iri("http://example.com")?;
|
||||||
|
/// let triples = parser
|
||||||
|
/// .read_triples(file.as_bytes())
|
||||||
|
/// .collect::<Result<Vec<_>, _>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(triples.len(), 1);
|
||||||
|
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[inline] |
||||||
|
pub fn with_base_iri(self, base_iri: impl Into<String>) -> Result<Self, IriParseError> { |
||||||
|
Ok(Self { |
||||||
|
inner: self.inner.with_base_iri(base_iri)?, |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
/// Executes the parsing itself on a [`Read`] implementation and returns an iterator of triples.
|
||||||
|
pub fn read_triples<R: Read>(self, reader: R) -> TripleReader<R> { |
||||||
|
TripleReader { |
||||||
|
parser: self.inner.parse_read(reader), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An iterator yielding read triples.
|
||||||
|
/// Could be built using a [`GraphParser`].
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{GraphFormat, GraphParser};
|
||||||
|
///
|
||||||
|
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
|
||||||
|
///
|
||||||
|
/// let parser = GraphParser::from_format(GraphFormat::NTriples);
|
||||||
|
/// let triples = parser
|
||||||
|
/// .read_triples(file.as_bytes())
|
||||||
|
/// .collect::<Result<Vec<_>, _>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(triples.len(), 1);
|
||||||
|
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # std::io::Result::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[must_use] |
||||||
|
pub struct TripleReader<R: Read> { |
||||||
|
parser: FromReadQuadReader<R>, |
||||||
|
} |
||||||
|
|
||||||
|
impl<R: Read> Iterator for TripleReader<R> { |
||||||
|
type Item = Result<Triple, RdfParseError>; |
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> { |
||||||
|
Some(self.parser.next()?.map(Into::into).map_err(Into::into)) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// A parser for RDF dataset serialization formats.
|
||||||
|
///
|
||||||
|
/// It currently supports the following formats:
|
||||||
|
/// * [N-Quads](https://www.w3.org/TR/n-quads/) ([`DatasetFormat::NQuads`])
|
||||||
|
/// * [TriG](https://www.w3.org/TR/trig/) ([`DatasetFormat::TriG`])
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{DatasetFormat, DatasetParser};
|
||||||
|
///
|
||||||
|
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .";
|
||||||
|
///
|
||||||
|
/// let parser = DatasetParser::from_format(DatasetFormat::NQuads);
|
||||||
|
/// let quads = parser.read_quads(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(quads.len(), 1);
|
||||||
|
/// assert_eq!(quads[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # std::io::Result::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[deprecated(note = "use RdfParser instead", since = "0.4.0")] |
||||||
|
pub struct DatasetParser { |
||||||
|
inner: RdfParser, |
||||||
|
} |
||||||
|
|
||||||
|
impl DatasetParser { |
||||||
|
/// Builds a parser for the given format.
|
||||||
|
#[inline] |
||||||
|
pub fn from_format(format: DatasetFormat) -> Self { |
||||||
|
Self { |
||||||
|
inner: RdfParser::from_format(format.into()).rename_blank_nodes(), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Provides an IRI that could be used to resolve the file relative IRIs.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{DatasetFormat, DatasetParser};
|
||||||
|
///
|
||||||
|
/// let file = "<g> { </s> </p> </o> }";
|
||||||
|
///
|
||||||
|
/// let parser =
|
||||||
|
/// DatasetParser::from_format(DatasetFormat::TriG).with_base_iri("http://example.com")?;
|
||||||
|
/// let triples = parser
|
||||||
|
/// .read_quads(file.as_bytes())
|
||||||
|
/// .collect::<Result<Vec<_>, _>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(triples.len(), 1);
|
||||||
|
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[inline] |
||||||
|
pub fn with_base_iri(self, base_iri: impl Into<String>) -> Result<Self, IriParseError> { |
||||||
|
Ok(Self { |
||||||
|
inner: self.inner.with_base_iri(base_iri)?, |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
/// Executes the parsing itself on a [`Read`] implementation and returns an iterator of quads.
|
||||||
|
pub fn read_quads<R: Read>(self, reader: R) -> QuadReader<R> { |
||||||
|
QuadReader { |
||||||
|
parser: self.inner.parse_read(reader), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An iterator yielding read quads.
|
||||||
|
/// Could be built using a [`DatasetParser`].
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{DatasetFormat, DatasetParser};
|
||||||
|
///
|
||||||
|
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .";
|
||||||
|
///
|
||||||
|
/// let parser = DatasetParser::from_format(DatasetFormat::NQuads);
|
||||||
|
/// let quads = parser.read_quads(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(quads.len(), 1);
|
||||||
|
/// assert_eq!(quads[0].subject.to_string(), "<http://example.com/s>");
|
||||||
|
/// # std::io::Result::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[must_use] |
||||||
|
pub struct QuadReader<R: Read> { |
||||||
|
parser: FromReadQuadReader<R>, |
||||||
|
} |
||||||
|
|
||||||
|
impl<R: Read> Iterator for QuadReader<R> { |
||||||
|
type Item = Result<Quad, RdfParseError>; |
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> { |
||||||
|
Some(self.parser.next()?.map_err(Into::into)) |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,185 @@ |
|||||||
|
#![allow(deprecated)] |
||||||
|
|
||||||
|
//! Utilities to write RDF graphs and datasets.
|
||||||
|
|
||||||
|
use crate::io::{DatasetFormat, GraphFormat}; |
||||||
|
use crate::model::*; |
||||||
|
use oxrdfio::{RdfSerializer, ToWriteQuadWriter}; |
||||||
|
use std::io::{self, Write}; |
||||||
|
|
||||||
|
/// A serializer for RDF graph serialization formats.
|
||||||
|
///
|
||||||
|
/// It currently supports the following formats:
|
||||||
|
/// * [N-Triples](https://www.w3.org/TR/n-triples/) ([`GraphFormat::NTriples`])
|
||||||
|
/// * [Turtle](https://www.w3.org/TR/turtle/) ([`GraphFormat::Turtle`])
|
||||||
|
/// * [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) ([`GraphFormat::RdfXml`])
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{GraphFormat, GraphSerializer};
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
///
|
||||||
|
/// let mut buffer = Vec::new();
|
||||||
|
/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer);
|
||||||
|
/// writer.write(&Triple {
|
||||||
|
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||||
|
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||||
|
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||||
|
/// })?;
|
||||||
|
/// writer.finish()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// buffer.as_slice(),
|
||||||
|
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n".as_bytes()
|
||||||
|
/// );
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] |
||||||
|
pub struct GraphSerializer { |
||||||
|
inner: RdfSerializer, |
||||||
|
} |
||||||
|
|
||||||
|
impl GraphSerializer { |
||||||
|
/// Builds a serializer for the given format
|
||||||
|
#[inline] |
||||||
|
pub fn from_format(format: GraphFormat) -> Self { |
||||||
|
Self { |
||||||
|
inner: RdfSerializer::from_format(format.into()), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Returns a [`TripleWriter`] allowing writing triples into the given [`Write`] implementation
|
||||||
|
pub fn triple_writer<W: Write>(self, write: W) -> TripleWriter<W> { |
||||||
|
TripleWriter { |
||||||
|
writer: self.inner.serialize_to_write(write), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Allows writing triples.
|
||||||
|
/// Could be built using a [`GraphSerializer`].
|
||||||
|
///
|
||||||
|
/// <div class="warning">
|
||||||
|
///
|
||||||
|
/// Do not forget to run the [`finish`](TripleWriter::finish()) method to properly write the last bytes of the file.</div>
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{GraphFormat, GraphSerializer};
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
///
|
||||||
|
/// let mut buffer = Vec::new();
|
||||||
|
/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer);
|
||||||
|
/// writer.write(&Triple {
|
||||||
|
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||||
|
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||||
|
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||||
|
/// })?;
|
||||||
|
/// writer.finish()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(
|
||||||
|
/// buffer.as_slice(),
|
||||||
|
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n".as_bytes()
|
||||||
|
/// );
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[must_use] |
||||||
|
pub struct TripleWriter<W: Write> { |
||||||
|
writer: ToWriteQuadWriter<W>, |
||||||
|
} |
||||||
|
|
||||||
|
impl<W: Write> TripleWriter<W> { |
||||||
|
/// Writes a triple
|
||||||
|
pub fn write<'a>(&mut self, triple: impl Into<TripleRef<'a>>) -> io::Result<()> { |
||||||
|
self.writer.write_triple(triple) |
||||||
|
} |
||||||
|
|
||||||
|
/// Writes the last bytes of the file
|
||||||
|
pub fn finish(self) -> io::Result<()> { |
||||||
|
self.writer.finish()?.flush() |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// A serializer for RDF graph serialization formats.
|
||||||
|
///
|
||||||
|
/// It currently supports the following formats:
|
||||||
|
/// * [N-Quads](https://www.w3.org/TR/n-quads/) ([`DatasetFormat::NQuads`])
|
||||||
|
/// * [TriG](https://www.w3.org/TR/trig/) ([`DatasetFormat::TriG`])
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{DatasetFormat, DatasetSerializer};
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
///
|
||||||
|
/// let mut buffer = Vec::new();
|
||||||
|
/// let mut writer = DatasetSerializer::from_format(DatasetFormat::NQuads).quad_writer(&mut buffer);
|
||||||
|
/// writer.write(&Quad {
|
||||||
|
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||||
|
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||||
|
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||||
|
/// graph_name: NamedNode::new("http://example.com/g")?.into(),
|
||||||
|
/// })?;
|
||||||
|
/// writer.finish()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(buffer.as_slice(), "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .\n".as_bytes());
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] |
||||||
|
pub struct DatasetSerializer { |
||||||
|
inner: RdfSerializer, |
||||||
|
} |
||||||
|
|
||||||
|
impl DatasetSerializer { |
||||||
|
/// Builds a serializer for the given format
|
||||||
|
#[inline] |
||||||
|
pub fn from_format(format: DatasetFormat) -> Self { |
||||||
|
Self { |
||||||
|
inner: RdfSerializer::from_format(format.into()), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Returns a [`QuadWriter`] allowing writing triples into the given [`Write`] implementation
|
||||||
|
pub fn quad_writer<W: Write>(self, write: W) -> QuadWriter<W> { |
||||||
|
QuadWriter { |
||||||
|
writer: self.inner.serialize_to_write(write), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Allows writing triples.
|
||||||
|
/// Could be built using a [`DatasetSerializer`].
|
||||||
|
///
|
||||||
|
/// <div class="warning">
|
||||||
|
///
|
||||||
|
/// Do not forget to run the [`finish`](QuadWriter::finish()) method to properly write the last bytes of the file.</div>
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::{DatasetFormat, DatasetSerializer};
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
///
|
||||||
|
/// let mut buffer = Vec::new();
|
||||||
|
/// let mut writer = DatasetSerializer::from_format(DatasetFormat::NQuads).quad_writer(&mut buffer);
|
||||||
|
/// writer.write(&Quad {
|
||||||
|
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||||
|
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||||
|
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||||
|
/// graph_name: NamedNode::new("http://example.com/g")?.into(),
|
||||||
|
/// })?;
|
||||||
|
/// writer.finish()?;
|
||||||
|
///
|
||||||
|
/// assert_eq!(buffer.as_slice(), "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .\n".as_bytes());
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[must_use] |
||||||
|
pub struct QuadWriter<W: Write> { |
||||||
|
writer: ToWriteQuadWriter<W>, |
||||||
|
} |
||||||
|
|
||||||
|
impl<W: Write> QuadWriter<W> { |
||||||
|
/// Writes a quad
|
||||||
|
pub fn write<'a>(&mut self, quad: impl Into<QuadRef<'a>>) -> io::Result<()> { |
||||||
|
self.writer.write_quad(quad) |
||||||
|
} |
||||||
|
|
||||||
|
/// Writes the last bytes of the file
|
||||||
|
pub fn finish(self) -> io::Result<()> { |
||||||
|
self.writer.finish()?.flush() |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,12 @@ |
|||||||
|
#![doc = include_str!("../README.md")] |
||||||
|
#![doc(test(attr(deny(warnings))))] |
||||||
|
#![doc(test(attr(allow(deprecated))))] |
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))] |
||||||
|
#![doc(html_favicon_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")] |
||||||
|
#![doc(html_logo_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")] |
||||||
|
|
||||||
|
pub mod io; |
||||||
|
pub mod model; |
||||||
|
pub mod sparql; |
||||||
|
mod storage; |
||||||
|
pub mod store; |
@ -0,0 +1,22 @@ |
|||||||
|
//! Implements data structures for [RDF 1.1 Concepts](https://www.w3.org/TR/rdf11-concepts/) using [OxRDF](https://crates.io/crates/oxrdf).
|
||||||
|
//!
|
||||||
|
//! Usage example:
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! use oxigraph::model::*;
|
||||||
|
//!
|
||||||
|
//! let mut graph = Graph::default();
|
||||||
|
//!
|
||||||
|
//! // insertion
|
||||||
|
//! let ex = NamedNodeRef::new("http://example.com").unwrap();
|
||||||
|
//! let triple = TripleRef::new(ex, ex, ex);
|
||||||
|
//! graph.insert(triple);
|
||||||
|
//!
|
||||||
|
//! // simple filter
|
||||||
|
//! let results: Vec<_> = graph.triples_for_subject(ex).collect();
|
||||||
|
//! assert_eq!(vec![triple], results);
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
pub use oxrdf::*; |
||||||
|
|
||||||
|
pub use spargebra::term::GroundQuad; |
@ -0,0 +1,84 @@ |
|||||||
|
use crate::io::RdfParseError; |
||||||
|
use crate::model::NamedNode; |
||||||
|
use crate::sparql::results::QueryResultsParseError as ResultsParseError; |
||||||
|
use crate::sparql::SparqlSyntaxError; |
||||||
|
use crate::storage::StorageError; |
||||||
|
use std::convert::Infallible; |
||||||
|
use std::error::Error; |
||||||
|
use std::io; |
||||||
|
|
||||||
|
/// A SPARQL evaluation error.
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
#[non_exhaustive] |
||||||
|
pub enum EvaluationError { |
||||||
|
/// An error in SPARQL parsing.
|
||||||
|
#[error(transparent)] |
||||||
|
Parsing(#[from] SparqlSyntaxError), |
||||||
|
/// An error from the storage.
|
||||||
|
#[error(transparent)] |
||||||
|
Storage(#[from] StorageError), |
||||||
|
/// An error while parsing an external RDF file.
|
||||||
|
#[error(transparent)] |
||||||
|
GraphParsing(#[from] RdfParseError), |
||||||
|
/// An error while parsing an external result file (likely from a federated query).
|
||||||
|
#[error(transparent)] |
||||||
|
ResultsParsing(#[from] ResultsParseError), |
||||||
|
/// An error returned during results serialization.
|
||||||
|
#[error(transparent)] |
||||||
|
ResultsSerialization(#[from] io::Error), |
||||||
|
/// Error during `SERVICE` evaluation
|
||||||
|
#[error("{0}")] |
||||||
|
Service(#[source] Box<dyn Error + Send + Sync + 'static>), |
||||||
|
/// Error when `CREATE` tries to create an already existing graph
|
||||||
|
#[error("The graph {0} already exists")] |
||||||
|
GraphAlreadyExists(NamedNode), |
||||||
|
/// Error when `DROP` or `CLEAR` tries to remove a not existing graph
|
||||||
|
#[error("The graph {0} does not exist")] |
||||||
|
GraphDoesNotExist(NamedNode), |
||||||
|
/// The variable storing the `SERVICE` name is unbound
|
||||||
|
#[error("The variable encoding the service name is unbound")] |
||||||
|
UnboundService, |
||||||
|
/// The given `SERVICE` is not supported
|
||||||
|
#[error("The service {0} is not supported")] |
||||||
|
UnsupportedService(NamedNode), |
||||||
|
/// The given content media type returned from an HTTP response is not supported (`SERVICE` and `LOAD`)
|
||||||
|
#[error("The content media type {0} is not supported")] |
||||||
|
UnsupportedContentType(String), |
||||||
|
/// The `SERVICE` call has not returns solutions
|
||||||
|
#[error("The service is not returning solutions but a boolean or a graph")] |
||||||
|
ServiceDoesNotReturnSolutions, |
||||||
|
/// The results are not a RDF graph
|
||||||
|
#[error("The query results are not a RDF graph")] |
||||||
|
NotAGraph, |
||||||
|
} |
||||||
|
|
||||||
|
impl From<Infallible> for EvaluationError { |
||||||
|
#[inline] |
||||||
|
fn from(error: Infallible) -> Self { |
||||||
|
match error {} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl From<EvaluationError> for io::Error { |
||||||
|
#[inline] |
||||||
|
fn from(error: EvaluationError) -> Self { |
||||||
|
match error { |
||||||
|
EvaluationError::Parsing(error) => Self::new(io::ErrorKind::InvalidData, error), |
||||||
|
EvaluationError::GraphParsing(error) => error.into(), |
||||||
|
EvaluationError::ResultsParsing(error) => error.into(), |
||||||
|
EvaluationError::ResultsSerialization(error) => error, |
||||||
|
EvaluationError::Storage(error) => error.into(), |
||||||
|
EvaluationError::Service(error) => match error.downcast() { |
||||||
|
Ok(error) => *error, |
||||||
|
Err(error) => Self::new(io::ErrorKind::Other, error), |
||||||
|
}, |
||||||
|
EvaluationError::GraphAlreadyExists(_) |
||||||
|
| EvaluationError::GraphDoesNotExist(_) |
||||||
|
| EvaluationError::UnboundService |
||||||
|
| EvaluationError::UnsupportedService(_) |
||||||
|
| EvaluationError::UnsupportedContentType(_) |
||||||
|
| EvaluationError::ServiceDoesNotReturnSolutions |
||||||
|
| EvaluationError::NotAGraph => Self::new(io::ErrorKind::InvalidInput, error), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,9 @@ |
|||||||
|
#[cfg(not(feature = "http-client"))] |
||||||
|
mod dummy; |
||||||
|
#[cfg(feature = "http-client")] |
||||||
|
mod simple; |
||||||
|
|
||||||
|
#[cfg(not(feature = "http-client"))] |
||||||
|
pub use dummy::Client; |
||||||
|
#[cfg(feature = "http-client")] |
||||||
|
pub use simple::Client; |
@ -0,0 +1,328 @@ |
|||||||
|
//! [SPARQL](https://www.w3.org/TR/sparql11-overview/) implementation.
|
||||||
|
//!
|
||||||
|
//! Stores execute SPARQL. See [`Store`](crate::store::Store::query()) for an example.
|
||||||
|
|
||||||
|
mod algebra; |
||||||
|
mod dataset; |
||||||
|
mod error; |
||||||
|
mod eval; |
||||||
|
mod http; |
||||||
|
mod model; |
||||||
|
pub mod results; |
||||||
|
mod service; |
||||||
|
mod update; |
||||||
|
|
||||||
|
use crate::model::{NamedNode, Term}; |
||||||
|
pub use crate::sparql::algebra::{Query, QueryDataset, Update}; |
||||||
|
use crate::sparql::dataset::DatasetView; |
||||||
|
pub use crate::sparql::error::EvaluationError; |
||||||
|
use crate::sparql::eval::{EvalNodeWithStats, SimpleEvaluator, Timer}; |
||||||
|
pub use crate::sparql::model::{QueryResults, QuerySolution, QuerySolutionIter, QueryTripleIter}; |
||||||
|
pub use crate::sparql::service::ServiceHandler; |
||||||
|
use crate::sparql::service::{EmptyServiceHandler, ErrorConversionServiceHandler}; |
||||||
|
pub(crate) use crate::sparql::update::evaluate_update; |
||||||
|
use crate::storage::StorageReader; |
||||||
|
use json_event_parser::{JsonEvent, ToWriteJsonWriter}; |
||||||
|
pub use oxrdf::{Variable, VariableNameParseError}; |
||||||
|
use oxsdatatypes::{DayTimeDuration, Float}; |
||||||
|
pub use spargebra::SparqlSyntaxError; |
||||||
|
use sparopt::algebra::GraphPattern; |
||||||
|
use sparopt::Optimizer; |
||||||
|
use std::collections::HashMap; |
||||||
|
use std::rc::Rc; |
||||||
|
use std::sync::Arc; |
||||||
|
use std::time::Duration; |
||||||
|
use std::{fmt, io}; |
||||||
|
|
||||||
|
#[allow(clippy::needless_pass_by_value)] |
||||||
|
pub(crate) fn evaluate_query( |
||||||
|
reader: StorageReader, |
||||||
|
query: impl TryInto<Query, Error = impl Into<EvaluationError>>, |
||||||
|
options: QueryOptions, |
||||||
|
run_stats: bool, |
||||||
|
) -> Result<(Result<QueryResults, EvaluationError>, QueryExplanation), EvaluationError> { |
||||||
|
let query = query.try_into().map_err(Into::into)?; |
||||||
|
let dataset = DatasetView::new(reader, &query.dataset); |
||||||
|
let start_planning = Timer::now(); |
||||||
|
let (results, plan_node_with_stats, planning_duration) = match query.inner { |
||||||
|
spargebra::Query::Select { |
||||||
|
pattern, base_iri, .. |
||||||
|
} => { |
||||||
|
let mut pattern = GraphPattern::from(&pattern); |
||||||
|
if !options.without_optimizations { |
||||||
|
pattern = Optimizer::optimize_graph_pattern(pattern); |
||||||
|
} |
||||||
|
let planning_duration = start_planning.elapsed(); |
||||||
|
let (results, explanation) = SimpleEvaluator::new( |
||||||
|
Rc::new(dataset), |
||||||
|
base_iri.map(Rc::new), |
||||||
|
options.service_handler(), |
||||||
|
Arc::new(options.custom_functions), |
||||||
|
run_stats, |
||||||
|
) |
||||||
|
.evaluate_select(&pattern); |
||||||
|
(Ok(results), explanation, planning_duration) |
||||||
|
} |
||||||
|
spargebra::Query::Ask { |
||||||
|
pattern, base_iri, .. |
||||||
|
} => { |
||||||
|
let mut pattern = GraphPattern::from(&pattern); |
||||||
|
if !options.without_optimizations { |
||||||
|
pattern = Optimizer::optimize_graph_pattern(GraphPattern::Reduced { |
||||||
|
inner: Box::new(pattern), |
||||||
|
}); |
||||||
|
} |
||||||
|
let planning_duration = start_planning.elapsed(); |
||||||
|
let (results, explanation) = SimpleEvaluator::new( |
||||||
|
Rc::new(dataset), |
||||||
|
base_iri.map(Rc::new), |
||||||
|
options.service_handler(), |
||||||
|
Arc::new(options.custom_functions), |
||||||
|
run_stats, |
||||||
|
) |
||||||
|
.evaluate_ask(&pattern); |
||||||
|
(results, explanation, planning_duration) |
||||||
|
} |
||||||
|
spargebra::Query::Construct { |
||||||
|
template, |
||||||
|
pattern, |
||||||
|
base_iri, |
||||||
|
.. |
||||||
|
} => { |
||||||
|
let mut pattern = GraphPattern::from(&pattern); |
||||||
|
if !options.without_optimizations { |
||||||
|
pattern = Optimizer::optimize_graph_pattern(GraphPattern::Reduced { |
||||||
|
inner: Box::new(pattern), |
||||||
|
}); |
||||||
|
} |
||||||
|
let planning_duration = start_planning.elapsed(); |
||||||
|
let (results, explanation) = SimpleEvaluator::new( |
||||||
|
Rc::new(dataset), |
||||||
|
base_iri.map(Rc::new), |
||||||
|
options.service_handler(), |
||||||
|
Arc::new(options.custom_functions), |
||||||
|
run_stats, |
||||||
|
) |
||||||
|
.evaluate_construct(&pattern, &template); |
||||||
|
(Ok(results), explanation, planning_duration) |
||||||
|
} |
||||||
|
spargebra::Query::Describe { |
||||||
|
pattern, base_iri, .. |
||||||
|
} => { |
||||||
|
let mut pattern = GraphPattern::from(&pattern); |
||||||
|
if !options.without_optimizations { |
||||||
|
pattern = Optimizer::optimize_graph_pattern(GraphPattern::Reduced { |
||||||
|
inner: Box::new(pattern), |
||||||
|
}); |
||||||
|
} |
||||||
|
let planning_duration = start_planning.elapsed(); |
||||||
|
let (results, explanation) = SimpleEvaluator::new( |
||||||
|
Rc::new(dataset), |
||||||
|
base_iri.map(Rc::new), |
||||||
|
options.service_handler(), |
||||||
|
Arc::new(options.custom_functions), |
||||||
|
run_stats, |
||||||
|
) |
||||||
|
.evaluate_describe(&pattern); |
||||||
|
(Ok(results), explanation, planning_duration) |
||||||
|
} |
||||||
|
}; |
||||||
|
let explanation = QueryExplanation { |
||||||
|
inner: plan_node_with_stats, |
||||||
|
with_stats: run_stats, |
||||||
|
parsing_duration: query.parsing_duration, |
||||||
|
planning_duration, |
||||||
|
}; |
||||||
|
Ok((results, explanation)) |
||||||
|
} |
||||||
|
|
||||||
|
/// Options for SPARQL query evaluation.
|
||||||
|
///
|
||||||
|
///
|
||||||
|
/// If the `"http-client"` optional feature is enabled,
|
||||||
|
/// a simple HTTP 1.1 client is used to execute [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/) SERVICE calls.
|
||||||
|
///
|
||||||
|
/// Usage example disabling the federated query support:
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::sparql::QueryOptions;
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// store.query_opt(
|
||||||
|
/// "SELECT * WHERE { SERVICE <https://query.wikidata.org/sparql> {} }",
|
||||||
|
/// QueryOptions::default().without_service_handler(),
|
||||||
|
/// )?;
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[derive(Clone, Default)] |
||||||
|
pub struct QueryOptions { |
||||||
|
service_handler: Option<Arc<dyn ServiceHandler<Error = EvaluationError>>>, |
||||||
|
custom_functions: CustomFunctionRegistry, |
||||||
|
http_timeout: Option<Duration>, |
||||||
|
http_redirection_limit: usize, |
||||||
|
without_optimizations: bool, |
||||||
|
} |
||||||
|
|
||||||
|
pub(crate) type CustomFunctionRegistry = |
||||||
|
HashMap<NamedNode, Arc<dyn (Fn(&[Term]) -> Option<Term>) + Send + Sync>>; |
||||||
|
|
||||||
|
impl QueryOptions { |
||||||
|
/// Use a given [`ServiceHandler`] to execute [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/) SERVICE calls.
|
||||||
|
#[inline] |
||||||
|
#[must_use] |
||||||
|
pub fn with_service_handler(mut self, service_handler: impl ServiceHandler + 'static) -> Self { |
||||||
|
self.service_handler = Some(Arc::new(ErrorConversionServiceHandler::wrap( |
||||||
|
service_handler, |
||||||
|
))); |
||||||
|
self |
||||||
|
} |
||||||
|
|
||||||
|
/// Disables the `SERVICE` calls
|
||||||
|
#[inline] |
||||||
|
#[must_use] |
||||||
|
pub fn without_service_handler(mut self) -> Self { |
||||||
|
self.service_handler = Some(Arc::new(EmptyServiceHandler)); |
||||||
|
self |
||||||
|
} |
||||||
|
|
||||||
|
/// Sets a timeout for HTTP requests done during SPARQL evaluation.
|
||||||
|
#[cfg(feature = "http-client")] |
||||||
|
#[inline] |
||||||
|
#[must_use] |
||||||
|
pub fn with_http_timeout(mut self, timeout: Duration) -> Self { |
||||||
|
self.http_timeout = Some(timeout); |
||||||
|
self |
||||||
|
} |
||||||
|
|
||||||
|
/// Sets an upper bound of the number of HTTP redirection followed per HTTP request done during SPARQL evaluation.
|
||||||
|
///
|
||||||
|
/// By default this value is `0`.
|
||||||
|
#[cfg(feature = "http-client")] |
||||||
|
#[inline] |
||||||
|
#[must_use] |
||||||
|
pub fn with_http_redirection_limit(mut self, redirection_limit: usize) -> Self { |
||||||
|
self.http_redirection_limit = redirection_limit; |
||||||
|
self |
||||||
|
} |
||||||
|
|
||||||
|
/// Adds a custom SPARQL evaluation function.
|
||||||
|
///
|
||||||
|
/// Example with a function serializing terms to N-Triples:
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
/// use oxigraph::sparql::{QueryOptions, QueryResults};
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
///
|
||||||
|
/// if let QueryResults::Solutions(mut solutions) = store.query_opt(
|
||||||
|
/// "SELECT (<http://www.w3.org/ns/formats/N-Triples>(1) AS ?nt) WHERE {}",
|
||||||
|
/// QueryOptions::default().with_custom_function(
|
||||||
|
/// NamedNode::new("http://www.w3.org/ns/formats/N-Triples")?,
|
||||||
|
/// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()),
|
||||||
|
/// ),
|
||||||
|
/// )? {
|
||||||
|
/// assert_eq!(
|
||||||
|
/// solutions.next().unwrap()?.get("nt"),
|
||||||
|
/// Some(&Literal::from("\"1\"^^<http://www.w3.org/2001/XMLSchema#integer>").into())
|
||||||
|
/// );
|
||||||
|
/// }
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[inline] |
||||||
|
#[must_use] |
||||||
|
pub fn with_custom_function( |
||||||
|
mut self, |
||||||
|
name: NamedNode, |
||||||
|
evaluator: impl Fn(&[Term]) -> Option<Term> + Send + Sync + 'static, |
||||||
|
) -> Self { |
||||||
|
self.custom_functions.insert(name, Arc::new(evaluator)); |
||||||
|
self |
||||||
|
} |
||||||
|
|
||||||
|
fn service_handler(&self) -> Arc<dyn ServiceHandler<Error = EvaluationError>> { |
||||||
|
self.service_handler.clone().unwrap_or_else(|| { |
||||||
|
if cfg!(feature = "http-client") { |
||||||
|
Arc::new(service::SimpleServiceHandler::new( |
||||||
|
self.http_timeout, |
||||||
|
self.http_redirection_limit, |
||||||
|
)) |
||||||
|
} else { |
||||||
|
Arc::new(EmptyServiceHandler) |
||||||
|
} |
||||||
|
}) |
||||||
|
} |
||||||
|
|
||||||
|
#[doc(hidden)] |
||||||
|
#[inline] |
||||||
|
#[must_use] |
||||||
|
pub fn without_optimizations(mut self) -> Self { |
||||||
|
self.without_optimizations = true; |
||||||
|
self |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// Options for SPARQL update evaluation.
|
||||||
|
#[derive(Clone, Default)] |
||||||
|
pub struct UpdateOptions { |
||||||
|
query_options: QueryOptions, |
||||||
|
} |
||||||
|
|
||||||
|
impl From<QueryOptions> for UpdateOptions { |
||||||
|
#[inline] |
||||||
|
fn from(query_options: QueryOptions) -> Self { |
||||||
|
Self { query_options } |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// The explanation of a query.
|
||||||
|
#[derive(Clone)] |
||||||
|
pub struct QueryExplanation { |
||||||
|
inner: Rc<EvalNodeWithStats>, |
||||||
|
with_stats: bool, |
||||||
|
parsing_duration: Option<DayTimeDuration>, |
||||||
|
planning_duration: Option<DayTimeDuration>, |
||||||
|
} |
||||||
|
|
||||||
|
impl QueryExplanation { |
||||||
|
/// Writes the explanation as JSON.
|
||||||
|
pub fn write_in_json(&self, write: impl io::Write) -> io::Result<()> { |
||||||
|
let mut writer = ToWriteJsonWriter::new(write); |
||||||
|
writer.write_event(JsonEvent::StartObject)?; |
||||||
|
if let Some(parsing_duration) = self.parsing_duration { |
||||||
|
writer.write_event(JsonEvent::ObjectKey("parsing duration in seconds".into()))?; |
||||||
|
writer.write_event(JsonEvent::Number( |
||||||
|
parsing_duration.as_seconds().to_string().into(), |
||||||
|
))?; |
||||||
|
} |
||||||
|
if let Some(planning_duration) = self.planning_duration { |
||||||
|
writer.write_event(JsonEvent::ObjectKey("planning duration in seconds".into()))?; |
||||||
|
writer.write_event(JsonEvent::Number( |
||||||
|
planning_duration.as_seconds().to_string().into(), |
||||||
|
))?; |
||||||
|
} |
||||||
|
writer.write_event(JsonEvent::ObjectKey("plan".into()))?; |
||||||
|
self.inner.json_node(&mut writer, self.with_stats)?; |
||||||
|
writer.write_event(JsonEvent::EndObject) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl fmt::Debug for QueryExplanation { |
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
||||||
|
let mut obj = f.debug_struct("QueryExplanation"); |
||||||
|
if let Some(parsing_duration) = self.parsing_duration { |
||||||
|
obj.field( |
||||||
|
"parsing duration in seconds", |
||||||
|
&f32::from(Float::from(parsing_duration.as_seconds())), |
||||||
|
); |
||||||
|
} |
||||||
|
if let Some(planning_duration) = self.planning_duration { |
||||||
|
obj.field( |
||||||
|
"planning duration in seconds", |
||||||
|
&f32::from(Float::from(planning_duration.as_seconds())), |
||||||
|
); |
||||||
|
} |
||||||
|
obj.field("tree", &self.inner); |
||||||
|
obj.finish_non_exhaustive() |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,371 @@ |
|||||||
|
use crate::io::{RdfFormat, RdfSerializer}; |
||||||
|
use crate::model::*; |
||||||
|
use crate::sparql::error::EvaluationError; |
||||||
|
use crate::sparql::results::{ |
||||||
|
FromReadQueryResultsReader, FromReadSolutionsReader, QueryResultsFormat, |
||||||
|
QueryResultsParseError, QueryResultsParser, QueryResultsSerializer, |
||||||
|
}; |
||||||
|
pub use sparesults::QuerySolution; |
||||||
|
use std::io::{Read, Write}; |
||||||
|
use std::sync::Arc; |
||||||
|
|
||||||
|
/// Results of a [SPARQL query](https://www.w3.org/TR/sparql11-query/).
|
||||||
|
pub enum QueryResults { |
||||||
|
/// Results of a [SELECT](https://www.w3.org/TR/sparql11-query/#select) query.
|
||||||
|
Solutions(QuerySolutionIter), |
||||||
|
/// Result of a [ASK](https://www.w3.org/TR/sparql11-query/#ask) query.
|
||||||
|
Boolean(bool), |
||||||
|
/// Results of a [CONSTRUCT](https://www.w3.org/TR/sparql11-query/#construct) or [DESCRIBE](https://www.w3.org/TR/sparql11-query/#describe) query.
|
||||||
|
Graph(QueryTripleIter), |
||||||
|
} |
||||||
|
|
||||||
|
impl QueryResults { |
||||||
|
/// Reads a SPARQL query results serialization.
|
||||||
|
pub fn read( |
||||||
|
read: impl Read + 'static, |
||||||
|
format: QueryResultsFormat, |
||||||
|
) -> Result<Self, QueryResultsParseError> { |
||||||
|
Ok(QueryResultsParser::from_format(format) |
||||||
|
.parse_read(read)? |
||||||
|
.into()) |
||||||
|
} |
||||||
|
|
||||||
|
/// Writes the query results (solutions or boolean).
|
||||||
|
///
|
||||||
|
/// This method fails if it is called on the `Graph` results.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
/// use oxigraph::sparql::results::QueryResultsFormat;
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// let ex = NamedNodeRef::new("http://example.com")?;
|
||||||
|
/// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?;
|
||||||
|
///
|
||||||
|
/// let results = store.query("SELECT ?s WHERE { ?s ?p ?o }")?;
|
||||||
|
/// assert_eq!(
|
||||||
|
/// results.write(Vec::new(), QueryResultsFormat::Json)?,
|
||||||
|
/// r#"{"head":{"vars":["s"]},"results":{"bindings":[{"s":{"type":"uri","value":"http://example.com"}}]}}"#.as_bytes()
|
||||||
|
/// );
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
pub fn write<W: Write>( |
||||||
|
self, |
||||||
|
write: W, |
||||||
|
format: QueryResultsFormat, |
||||||
|
) -> Result<W, EvaluationError> { |
||||||
|
let serializer = QueryResultsSerializer::from_format(format); |
||||||
|
match self { |
||||||
|
Self::Boolean(value) => serializer.serialize_boolean_to_write(write, value), |
||||||
|
Self::Solutions(solutions) => { |
||||||
|
let mut writer = serializer |
||||||
|
.serialize_solutions_to_write(write, solutions.variables().to_vec()) |
||||||
|
.map_err(EvaluationError::ResultsSerialization)?; |
||||||
|
for solution in solutions { |
||||||
|
writer |
||||||
|
.write(&solution?) |
||||||
|
.map_err(EvaluationError::ResultsSerialization)?; |
||||||
|
} |
||||||
|
writer.finish() |
||||||
|
} |
||||||
|
Self::Graph(triples) => { |
||||||
|
let s = VariableRef::new_unchecked("subject"); |
||||||
|
let p = VariableRef::new_unchecked("predicate"); |
||||||
|
let o = VariableRef::new_unchecked("object"); |
||||||
|
let mut writer = serializer |
||||||
|
.serialize_solutions_to_write( |
||||||
|
write, |
||||||
|
vec![s.into_owned(), p.into_owned(), o.into_owned()], |
||||||
|
) |
||||||
|
.map_err(EvaluationError::ResultsSerialization)?; |
||||||
|
for triple in triples { |
||||||
|
let triple = triple?; |
||||||
|
writer |
||||||
|
.write([ |
||||||
|
(s, &triple.subject.into()), |
||||||
|
(p, &triple.predicate.into()), |
||||||
|
(o, &triple.object), |
||||||
|
]) |
||||||
|
.map_err(EvaluationError::ResultsSerialization)?; |
||||||
|
} |
||||||
|
writer.finish() |
||||||
|
} |
||||||
|
} |
||||||
|
.map_err(EvaluationError::ResultsSerialization) |
||||||
|
} |
||||||
|
|
||||||
|
/// Writes the graph query results.
|
||||||
|
///
|
||||||
|
/// This method fails if it is called on the `Solution` or `Boolean` results.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::io::RdfFormat;
|
||||||
|
/// use oxigraph::model::*;
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
///
|
||||||
|
/// let graph = "<http://example.com> <http://example.com> <http://example.com> .\n";
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// store.load_graph(
|
||||||
|
/// graph.as_bytes(),
|
||||||
|
/// RdfFormat::NTriples,
|
||||||
|
/// GraphName::DefaultGraph,
|
||||||
|
/// None,
|
||||||
|
/// )?;
|
||||||
|
///
|
||||||
|
/// let results = store.query("CONSTRUCT WHERE { ?s ?p ?o }")?;
|
||||||
|
/// assert_eq!(
|
||||||
|
/// results.write_graph(Vec::new(), RdfFormat::NTriples)?,
|
||||||
|
/// graph.as_bytes()
|
||||||
|
/// );
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
pub fn write_graph<W: Write>( |
||||||
|
self, |
||||||
|
write: W, |
||||||
|
format: impl Into<RdfFormat>, |
||||||
|
) -> Result<W, EvaluationError> { |
||||||
|
if let Self::Graph(triples) = self { |
||||||
|
let mut writer = RdfSerializer::from_format(format.into()).serialize_to_write(write); |
||||||
|
for triple in triples { |
||||||
|
writer |
||||||
|
.write_triple(&triple?) |
||||||
|
.map_err(EvaluationError::ResultsSerialization)?; |
||||||
|
} |
||||||
|
writer |
||||||
|
.finish() |
||||||
|
.map_err(EvaluationError::ResultsSerialization) |
||||||
|
} else { |
||||||
|
Err(EvaluationError::NotAGraph) |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl From<QuerySolutionIter> for QueryResults { |
||||||
|
#[inline] |
||||||
|
fn from(value: QuerySolutionIter) -> Self { |
||||||
|
Self::Solutions(value) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl<R: Read + 'static> From<FromReadQueryResultsReader<R>> for QueryResults { |
||||||
|
fn from(reader: FromReadQueryResultsReader<R>) -> Self { |
||||||
|
match reader { |
||||||
|
FromReadQueryResultsReader::Solutions(s) => Self::Solutions(s.into()), |
||||||
|
FromReadQueryResultsReader::Boolean(v) => Self::Boolean(v), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An iterator over [`QuerySolution`]s.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::sparql::QueryResults;
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? {
|
||||||
|
/// for solution in solutions {
|
||||||
|
/// println!("{:?}", solution?.get("s"));
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
pub struct QuerySolutionIter { |
||||||
|
variables: Arc<[Variable]>, |
||||||
|
iter: Box<dyn Iterator<Item = Result<QuerySolution, EvaluationError>>>, |
||||||
|
} |
||||||
|
|
||||||
|
impl QuerySolutionIter { |
||||||
|
/// Construct a new iterator of solution from an ordered list of solution variables and an iterator of solution tuples
|
||||||
|
/// (each tuple using the same ordering as the variable list such that tuple element 0 is the value for the variable 0...)
|
||||||
|
pub fn new( |
||||||
|
variables: Arc<[Variable]>, |
||||||
|
iter: impl Iterator<Item = Result<Vec<Option<Term>>, EvaluationError>> + 'static, |
||||||
|
) -> Self { |
||||||
|
Self { |
||||||
|
variables: Arc::clone(&variables), |
||||||
|
iter: Box::new( |
||||||
|
iter.map(move |t| t.map(|values| (Arc::clone(&variables), values).into())), |
||||||
|
), |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// The variables used in the solutions.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::sparql::{QueryResults, Variable};
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s ?o WHERE { ?s ?p ?o }")? {
|
||||||
|
/// assert_eq!(
|
||||||
|
/// solutions.variables(),
|
||||||
|
/// &[Variable::new("s")?, Variable::new("o")?]
|
||||||
|
/// );
|
||||||
|
/// }
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
#[inline] |
||||||
|
pub fn variables(&self) -> &[Variable] { |
||||||
|
&self.variables |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl<R: Read + 'static> From<FromReadSolutionsReader<R>> for QuerySolutionIter { |
||||||
|
fn from(reader: FromReadSolutionsReader<R>) -> Self { |
||||||
|
Self { |
||||||
|
variables: reader.variables().into(), |
||||||
|
iter: Box::new(reader.map(|t| t.map_err(EvaluationError::from))), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl Iterator for QuerySolutionIter { |
||||||
|
type Item = Result<QuerySolution, EvaluationError>; |
||||||
|
|
||||||
|
#[inline] |
||||||
|
fn next(&mut self) -> Option<Self::Item> { |
||||||
|
self.iter.next() |
||||||
|
} |
||||||
|
|
||||||
|
#[inline] |
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) { |
||||||
|
self.iter.size_hint() |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An iterator over the triples that compose a graph solution.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// use oxigraph::sparql::QueryResults;
|
||||||
|
/// use oxigraph::store::Store;
|
||||||
|
///
|
||||||
|
/// let store = Store::new()?;
|
||||||
|
/// if let QueryResults::Graph(triples) = store.query("CONSTRUCT WHERE { ?s ?p ?o }")? {
|
||||||
|
/// for triple in triples {
|
||||||
|
/// println!("{}", triple?);
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||||
|
/// ```
|
||||||
|
pub struct QueryTripleIter { |
||||||
|
pub(crate) iter: Box<dyn Iterator<Item = Result<Triple, EvaluationError>>>, |
||||||
|
} |
||||||
|
|
||||||
|
impl Iterator for QueryTripleIter { |
||||||
|
type Item = Result<Triple, EvaluationError>; |
||||||
|
|
||||||
|
#[inline] |
||||||
|
fn next(&mut self) -> Option<Self::Item> { |
||||||
|
self.iter.next() |
||||||
|
} |
||||||
|
|
||||||
|
#[inline] |
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) { |
||||||
|
self.iter.size_hint() |
||||||
|
} |
||||||
|
|
||||||
|
#[inline] |
||||||
|
fn fold<Acc, G>(self, init: Acc, g: G) -> Acc |
||||||
|
where |
||||||
|
G: FnMut(Acc, Self::Item) -> Acc, |
||||||
|
{ |
||||||
|
self.iter.fold(init, g) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
#[cfg(test)] |
||||||
|
#[allow(clippy::panic_in_result_fn)] |
||||||
|
mod tests { |
||||||
|
use super::*; |
||||||
|
use std::io::Cursor; |
||||||
|
|
||||||
|
#[test] |
||||||
|
fn test_serialization_roundtrip() -> Result<(), EvaluationError> { |
||||||
|
use std::str; |
||||||
|
|
||||||
|
for format in [ |
||||||
|
QueryResultsFormat::Json, |
||||||
|
QueryResultsFormat::Xml, |
||||||
|
QueryResultsFormat::Tsv, |
||||||
|
] { |
||||||
|
let results = vec![ |
||||||
|
QueryResults::Boolean(true), |
||||||
|
QueryResults::Boolean(false), |
||||||
|
QueryResults::Solutions(QuerySolutionIter::new( |
||||||
|
[ |
||||||
|
Variable::new_unchecked("foo"), |
||||||
|
Variable::new_unchecked("bar"), |
||||||
|
] |
||||||
|
.as_ref() |
||||||
|
.into(), |
||||||
|
Box::new( |
||||||
|
vec![ |
||||||
|
Ok(vec![None, None]), |
||||||
|
Ok(vec![ |
||||||
|
Some(NamedNode::new_unchecked("http://example.com").into()), |
||||||
|
None, |
||||||
|
]), |
||||||
|
Ok(vec![ |
||||||
|
None, |
||||||
|
Some(NamedNode::new_unchecked("http://example.com").into()), |
||||||
|
]), |
||||||
|
Ok(vec![ |
||||||
|
Some(BlankNode::new_unchecked("foo").into()), |
||||||
|
Some(BlankNode::new_unchecked("bar").into()), |
||||||
|
]), |
||||||
|
Ok(vec![Some(Literal::new_simple_literal("foo").into()), None]), |
||||||
|
Ok(vec![ |
||||||
|
Some( |
||||||
|
Literal::new_language_tagged_literal_unchecked("foo", "fr") |
||||||
|
.into(), |
||||||
|
), |
||||||
|
None, |
||||||
|
]), |
||||||
|
Ok(vec![ |
||||||
|
Some(Literal::from(1).into()), |
||||||
|
Some(Literal::from(true).into()), |
||||||
|
]), |
||||||
|
Ok(vec![ |
||||||
|
Some(Literal::from(1.33).into()), |
||||||
|
Some(Literal::from(false).into()), |
||||||
|
]), |
||||||
|
Ok(vec![ |
||||||
|
Some( |
||||||
|
Triple::new( |
||||||
|
NamedNode::new_unchecked("http://example.com/s"), |
||||||
|
NamedNode::new_unchecked("http://example.com/p"), |
||||||
|
Triple::new( |
||||||
|
NamedNode::new_unchecked("http://example.com/os"), |
||||||
|
NamedNode::new_unchecked("http://example.com/op"), |
||||||
|
NamedNode::new_unchecked("http://example.com/oo"), |
||||||
|
), |
||||||
|
) |
||||||
|
.into(), |
||||||
|
), |
||||||
|
None, |
||||||
|
]), |
||||||
|
] |
||||||
|
.into_iter(), |
||||||
|
), |
||||||
|
)), |
||||||
|
]; |
||||||
|
|
||||||
|
for ex in results { |
||||||
|
let mut buffer = Vec::new(); |
||||||
|
ex.write(&mut buffer, format)?; |
||||||
|
let ex2 = QueryResults::read(Cursor::new(buffer.clone()), format)?; |
||||||
|
let mut buffer2 = Vec::new(); |
||||||
|
ex2.write(&mut buffer2, format)?; |
||||||
|
assert_eq!( |
||||||
|
str::from_utf8(&buffer).unwrap(), |
||||||
|
str::from_utf8(&buffer2).unwrap() |
||||||
|
); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
Ok(()) |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,44 @@ |
|||||||
|
//! Utilities to read and write RDF results formats using [sparesults](https://crates.io/crates/sparesults).
|
||||||
|
//!
|
||||||
|
//! It supports [SPARQL Query Results XML Format (Second Edition)](https://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/).
|
||||||
|
//!
|
||||||
|
//! Usage example converting a JSON result file into a TSV result file:
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! use oxigraph::sparql::results::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader, QueryResultsSerializer};
|
||||||
|
//! use std::io::Result;
|
||||||
|
//!
|
||||||
|
//! fn convert_json_to_tsv(json_file: &[u8]) -> Result<Vec<u8>> {
|
||||||
|
//! let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json);
|
||||||
|
//! let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv);
|
||||||
|
//! // We start to read the JSON file and see which kind of results it is
|
||||||
|
//! match json_parser.parse_read(json_file)? {
|
||||||
|
//! FromReadQueryResultsReader::Boolean(value) => {
|
||||||
|
//! // it's a boolean result, we copy it in TSV to the output buffer
|
||||||
|
//! tsv_serializer.serialize_boolean_to_write(Vec::new(), value)
|
||||||
|
//! }
|
||||||
|
//! FromReadQueryResultsReader::Solutions(solutions_reader) => {
|
||||||
|
//! // it's a set of solutions, we create a writer and we write to it while reading in streaming from the JSON file
|
||||||
|
//! let mut serialize_solutions_to_write = tsv_serializer.serialize_solutions_to_write(Vec::new(), solutions_reader.variables().to_vec())?;
|
||||||
|
//! for solution in solutions_reader {
|
||||||
|
//! serialize_solutions_to_write.write(&solution?)?;
|
||||||
|
//! }
|
||||||
|
//! serialize_solutions_to_write.finish()
|
||||||
|
//! }
|
||||||
|
//! }
|
||||||
|
//! }
|
||||||
|
//!
|
||||||
|
//! // Let's test with a boolean
|
||||||
|
//! assert_eq!(
|
||||||
|
//! convert_json_to_tsv(br#"{"boolean":true}"#.as_slice()).unwrap(),
|
||||||
|
//! b"true"
|
||||||
|
//! );
|
||||||
|
//!
|
||||||
|
//! // And with a set of solutions
|
||||||
|
//! assert_eq!(
|
||||||
|
//! convert_json_to_tsv(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice()).unwrap(),
|
||||||
|
//! b"?foo\t?bar\n\"test\"\t\n"
|
||||||
|
//! );
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
pub use sparesults::*; |
@ -0,0 +1,12 @@ |
|||||||
|
//! A storage backend
|
||||||
|
//! RocksDB is available, if not in memory
|
||||||
|
|
||||||
|
#[cfg(any(target_family = "wasm"))] |
||||||
|
pub use fallback::{ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, Transaction}; |
||||||
|
#[cfg(all(not(target_family = "wasm")))] |
||||||
|
pub use oxi_rocksdb::{ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, Transaction}; |
||||||
|
|
||||||
|
#[cfg(any(target_family = "wasm"))] |
||||||
|
mod fallback; |
||||||
|
#[cfg(all(not(target_family = "wasm")))] |
||||||
|
mod oxi_rocksdb; |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,139 @@ |
|||||||
|
use crate::io::{RdfFormat, RdfParseError}; |
||||||
|
use crate::storage::numeric_encoder::EncodedTerm; |
||||||
|
use oxiri::IriParseError; |
||||||
|
use oxrdf::TermRef; |
||||||
|
use std::error::Error; |
||||||
|
use std::io; |
||||||
|
|
||||||
|
/// An error related to storage operations (reads, writes...).
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
#[non_exhaustive] |
||||||
|
pub enum StorageError { |
||||||
|
/// Error from the OS I/O layer.
|
||||||
|
#[error(transparent)] |
||||||
|
Io(#[from] io::Error), |
||||||
|
/// Error related to data corruption.
|
||||||
|
#[error(transparent)] |
||||||
|
Corruption(#[from] CorruptionError), |
||||||
|
#[doc(hidden)] |
||||||
|
#[error("{0}")] |
||||||
|
Other(#[source] Box<dyn Error + Send + Sync + 'static>), |
||||||
|
} |
||||||
|
|
||||||
|
impl From<StorageError> for io::Error { |
||||||
|
#[inline] |
||||||
|
fn from(error: StorageError) -> Self { |
||||||
|
match error { |
||||||
|
StorageError::Io(error) => error, |
||||||
|
StorageError::Corruption(error) => error.into(), |
||||||
|
StorageError::Other(error) => Self::new(io::ErrorKind::Other, error), |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An error return if some content in the database is corrupted.
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
#[error(transparent)] |
||||||
|
pub struct CorruptionError(#[from] CorruptionErrorKind); |
||||||
|
|
||||||
|
/// An error return if some content in the database is corrupted.
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
enum CorruptionErrorKind { |
||||||
|
#[error("{0}")] |
||||||
|
Msg(String), |
||||||
|
#[error("{0}")] |
||||||
|
Other(#[source] Box<dyn Error + Send + Sync + 'static>), |
||||||
|
} |
||||||
|
|
||||||
|
impl CorruptionError { |
||||||
|
/// Builds an error from a printable error message.
|
||||||
|
#[inline] |
||||||
|
pub(crate) fn new(error: impl Into<Box<dyn Error + Send + Sync + 'static>>) -> Self { |
||||||
|
Self(CorruptionErrorKind::Other(error.into())) |
||||||
|
} |
||||||
|
|
||||||
|
#[inline] |
||||||
|
pub(crate) fn from_encoded_term(encoded: &EncodedTerm, term: &TermRef<'_>) -> Self { |
||||||
|
// TODO: eventually use a dedicated error enum value
|
||||||
|
Self::msg(format!("Invalid term encoding {encoded:?} for {term}")) |
||||||
|
} |
||||||
|
|
||||||
|
#[inline] |
||||||
|
pub(crate) fn from_missing_column_family_name(name: &'static str) -> Self { |
||||||
|
// TODO: eventually use a dedicated error enum value
|
||||||
|
Self::msg(format!("Column family {name} does not exist")) |
||||||
|
} |
||||||
|
|
||||||
|
/// Builds an error from a printable error message.
|
||||||
|
#[inline] |
||||||
|
pub(crate) fn msg(msg: impl Into<String>) -> Self { |
||||||
|
Self(CorruptionErrorKind::Msg(msg.into())) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
impl From<CorruptionError> for io::Error { |
||||||
|
#[inline] |
||||||
|
fn from(error: CorruptionError) -> Self { |
||||||
|
Self::new(io::ErrorKind::InvalidData, error) |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An error raised while loading a file into a [`Store`](crate::store::Store).
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
pub enum LoaderError { |
||||||
|
/// An error raised while reading the file.
|
||||||
|
#[error(transparent)] |
||||||
|
Parsing(#[from] RdfParseError), |
||||||
|
/// An error raised during the insertion in the store.
|
||||||
|
#[error(transparent)] |
||||||
|
Storage(#[from] StorageError), |
||||||
|
/// The base IRI is invalid.
|
||||||
|
#[error("Invalid base IRI '{iri}': {error}")] |
||||||
|
InvalidBaseIri { |
||||||
|
/// The IRI itself.
|
||||||
|
iri: String, |
||||||
|
/// The parsing error.
|
||||||
|
#[source] |
||||||
|
error: IriParseError, |
||||||
|
}, |
||||||
|
} |
||||||
|
|
||||||
|
impl From<LoaderError> for io::Error { |
||||||
|
#[inline] |
||||||
|
fn from(error: LoaderError) -> Self { |
||||||
|
match error { |
||||||
|
LoaderError::Storage(error) => error.into(), |
||||||
|
LoaderError::Parsing(error) => error.into(), |
||||||
|
LoaderError::InvalidBaseIri { .. } => { |
||||||
|
Self::new(io::ErrorKind::InvalidInput, error.to_string()) |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/// An error raised while writing a file from a [`Store`](crate::store::Store).
|
||||||
|
#[derive(Debug, thiserror::Error)] |
||||||
|
pub enum SerializerError { |
||||||
|
/// An error raised while writing the content.
|
||||||
|
#[error(transparent)] |
||||||
|
Io(#[from] io::Error), |
||||||
|
/// An error raised during the lookup in the store.
|
||||||
|
#[error(transparent)] |
||||||
|
Storage(#[from] StorageError), |
||||||
|
/// A format compatible with [RDF dataset](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset) is required.
|
||||||
|
#[error("A RDF format supporting datasets was expected, {0} found")] |
||||||
|
DatasetFormatExpected(RdfFormat), |
||||||
|
} |
||||||
|
|
||||||
|
impl From<SerializerError> for io::Error { |
||||||
|
#[inline] |
||||||
|
fn from(error: SerializerError) -> Self { |
||||||
|
match error { |
||||||
|
SerializerError::Storage(error) => error.into(), |
||||||
|
SerializerError::Io(error) => error, |
||||||
|
SerializerError::DatasetFormatExpected(_) => { |
||||||
|
Self::new(io::ErrorKind::InvalidInput, error.to_string()) |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue