Compare commits
619 Commits
Author | SHA1 | Date |
---|---|---|
|
c7f873f904 | 1 year ago |
|
7b0b60cda0 | 1 year ago |
|
a4e2847810 | 1 year ago |
|
41e2e7206e | 1 year ago |
|
aca204d9e5 | 1 year ago |
|
b3ae51da82 | 1 year ago |
|
c2d31daa1c | 1 year ago |
|
f3ae4d8074 | 1 year ago |
|
53834396aa | 1 year ago |
|
a9ff0524e2 | 1 year ago |
|
0d82c473f7 | 1 year ago |
|
6226e1fba6 | 1 year ago |
|
fdcaf65a8d | 1 year ago |
|
77edc05ced | 1 year ago |
![]() |
427d675c9b | 1 year ago |
![]() |
1a40ab2017 | 1 year ago |
![]() |
cbb72c7be6 | 1 year ago |
![]() |
58699f36f3 | 1 year ago |
![]() |
83aa8170ea | 1 year ago |
![]() |
1d5843fddc | 1 year ago |
![]() |
0f0c1d2742 | 1 year ago |
![]() |
184b8367dc | 1 year ago |
![]() |
ba396bad10 | 1 year ago |
![]() |
75695dcdf7 | 1 year ago |
![]() |
4c27b43e41 | 1 year ago |
![]() |
dcfcdd359c | 1 year ago |
![]() |
f7023a381e | 1 year ago |
![]() |
2998f795fd | 1 year ago |
![]() |
4705d75893 | 1 year ago |
![]() |
9b9cf9cbcb | 1 year ago |
![]() |
583d64e0c0 | 1 year ago |
![]() |
a0cc75b7cc | 1 year ago |
![]() |
be44451679 | 1 year ago |
![]() |
43ef3e9e8a | 1 year ago |
![]() |
0ac70e73dc | 1 year ago |
![]() |
f5b975e4d1 | 1 year ago |
![]() |
bd5e54a00a | 1 year ago |
![]() |
c57615519b | 1 year ago |
![]() |
130f090555 | 1 year ago |
![]() |
bdde46b5c7 | 1 year ago |
![]() |
2b656df6ee | 1 year ago |
![]() |
8e2548467c | 1 year ago |
![]() |
fee7bf0d8a | 1 year ago |
![]() |
e0087c56b3 | 1 year ago |
![]() |
0b5790a18f | 1 year ago |
![]() |
f7d132f317 | 1 year ago |
![]() |
d361e1d283 | 1 year ago |
![]() |
1424181379 | 1 year ago |
![]() |
01d73fa62d | 1 year ago |
![]() |
accadaac34 | 1 year ago |
![]() |
7d45ea43f5 | 1 year ago |
![]() |
c13cb8db7c | 1 year ago |
![]() |
6c7514d058 | 1 year ago |
![]() |
e48b268fc5 | 1 year ago |
![]() |
c277804026 | 1 year ago |
![]() |
efae84b5f8 | 1 year ago |
![]() |
269c73a7c2 | 1 year ago |
![]() |
ea300e9081 | 1 year ago |
![]() |
a078b12508 | 1 year ago |
![]() |
0400f04915 | 1 year ago |
![]() |
655ecd3e91 | 1 year ago |
![]() |
1c3f054836 | 1 year ago |
![]() |
f5de5d3e98 | 1 year ago |
![]() |
089875ad21 | 1 year ago |
![]() |
9e3758e2c9 | 1 year ago |
![]() |
be26d210f1 | 1 year ago |
![]() |
a924df0e0a | 1 year ago |
![]() |
0b1aabfcdd | 1 year ago |
![]() |
70a4ff231b | 1 year ago |
![]() |
d49fb47767 | 1 year ago |
![]() |
c15233e964 | 1 year ago |
![]() |
1e37577b71 | 1 year ago |
![]() |
1e4326a2c5 | 1 year ago |
![]() |
c0d245871c | 1 year ago |
![]() |
2b6ac5c195 | 1 year ago |
![]() |
ec030fb652 | 1 year ago |
![]() |
2a81106c34 | 1 year ago |
![]() |
46d3ed3f99 | 1 year ago |
![]() |
ef765666be | 1 year ago |
![]() |
0a7cea5e25 | 1 year ago |
![]() |
18bf383701 | 1 year ago |
![]() |
54489aacfb | 1 year ago |
![]() |
6494ba6e31 | 1 year ago |
![]() |
185d83838c | 1 year ago |
![]() |
d838d55f02 | 1 year ago |
![]() |
f354bc7546 | 1 year ago |
![]() |
a976eb3efc | 1 year ago |
![]() |
522bda2906 | 1 year ago |
![]() |
5be6f55155 | 1 year ago |
![]() |
405b95b4bd | 1 year ago |
![]() |
5f603bc4fe | 1 year ago |
![]() |
2b8df24b8b | 1 year ago |
![]() |
51941c0dc5 | 1 year ago |
![]() |
d4bfcd3b24 | 1 year ago |
![]() |
df040400c5 | 1 year ago |
![]() |
b08c201074 | 1 year ago |
![]() |
c2040a30fd | 1 year ago |
![]() |
c2df0b829d | 1 year ago |
![]() |
cffc536eb9 | 1 year ago |
![]() |
5cf8025aec | 1 year ago |
![]() |
d4eaa3c5ef | 1 year ago |
![]() |
dcabf50ab6 | 1 year ago |
![]() |
0d23f4ae48 | 1 year ago |
![]() |
d1da94b08b | 1 year ago |
![]() |
f01796b1a4 | 1 year ago |
![]() |
93eab63868 | 1 year ago |
![]() |
42a66f62b9 | 1 year ago |
![]() |
f2a2bd5b5d | 1 year ago |
![]() |
391e8d7662 | 1 year ago |
![]() |
a5781d1187 | 1 year ago |
![]() |
a84b898fda | 1 year ago |
![]() |
d170b53609 | 1 year ago |
![]() |
790501e1b3 | 1 year ago |
![]() |
bdf5d593ee | 1 year ago |
![]() |
1761672b41 | 1 year ago |
![]() |
bde73e5d72 | 1 year ago |
![]() |
4c79e7ee78 | 1 year ago |
![]() |
5cc3e37876 | 1 year ago |
![]() |
8104f9e1de | 1 year ago |
![]() |
ed94f56ab4 | 1 year ago |
![]() |
025bd2afd2 | 1 year ago |
![]() |
4756217787 | 1 year ago |
![]() |
2e9ac3cc1a | 1 year ago |
![]() |
604d1bbe2e | 1 year ago |
![]() |
4084acb9b8 | 1 year ago |
![]() |
2a135283d5 | 1 year ago |
![]() |
735db897ff | 1 year ago |
![]() |
4b3f3f3278 | 2 years ago |
![]() |
3241f47059 | 2 years ago |
![]() |
4841f89072 | 2 years ago |
![]() |
efd5eec65d | 2 years ago |
![]() |
899e553249 | 2 years ago |
![]() |
03afe5c6c6 | 2 years ago |
![]() |
d88c2e0a8a | 2 years ago |
![]() |
9979a3d503 | 2 years ago |
![]() |
389d993dc4 | 2 years ago |
![]() |
4f404ab650 | 2 years ago |
![]() |
f445166942 | 2 years ago |
![]() |
e1ff1d919c | 2 years ago |
![]() |
31733beda8 | 2 years ago |
![]() |
aa82fc8157 | 2 years ago |
![]() |
6d1d752e01 | 2 years ago |
![]() |
756c5394d0 | 2 years ago |
![]() |
d1cb4cecbd | 2 years ago |
![]() |
99c3a4cce4 | 2 years ago |
![]() |
48174cac12 | 2 years ago |
![]() |
9af2717502 | 2 years ago |
![]() |
a9fee4f6b8 | 2 years ago |
![]() |
f8034c68e9 | 2 years ago |
![]() |
98caee8f92 | 2 years ago |
![]() |
ddf589ea14 | 2 years ago |
![]() |
d19947414e | 2 years ago |
![]() |
cc41448b18 | 2 years ago |
![]() |
e6d98445e6 | 2 years ago |
![]() |
261f9c64a5 | 2 years ago |
![]() |
48db7f872b | 2 years ago |
![]() |
8a7c6cf2c1 | 2 years ago |
![]() |
ab5f5c1c60 | 2 years ago |
![]() |
a2a6c5a41e | 2 years ago |
![]() |
a259879ef1 | 2 years ago |
![]() |
ea80c11d6e | 2 years ago |
![]() |
1dfad23e4b | 2 years ago |
![]() |
5647624012 | 2 years ago |
![]() |
e87bff6e6e | 2 years ago |
![]() |
b1e33293a5 | 2 years ago |
![]() |
ef429e6d1b | 2 years ago |
![]() |
b0a01e65fa | 2 years ago |
![]() |
517df6d59e | 2 years ago |
![]() |
832a4ba27d | 2 years ago |
![]() |
c1b57f460b | 2 years ago |
![]() |
8d348b2a6f | 2 years ago |
![]() |
b777d0110a | 2 years ago |
![]() |
64f45cd11b | 2 years ago |
![]() |
70b1c52166 | 2 years ago |
![]() |
38844f6436 | 2 years ago |
![]() |
d280f7d2f7 | 2 years ago |
![]() |
dbb39d867a | 2 years ago |
![]() |
5e3a2fc89d | 2 years ago |
![]() |
c5f02d9263 | 2 years ago |
![]() |
90b7b128f2 | 2 years ago |
![]() |
9b985295ae | 2 years ago |
![]() |
108721624f | 2 years ago |
![]() |
67fd726f9d | 2 years ago |
![]() |
412ca37b3c | 2 years ago |
![]() |
7a3e07d98d | 2 years ago |
![]() |
6a21cb0625 | 2 years ago |
![]() |
0783d1dcda | 2 years ago |
![]() |
8ee30cf001 | 2 years ago |
![]() |
8c8ca54596 | 2 years ago |
![]() |
7c4578f5f5 | 2 years ago |
![]() |
4c97637e4b | 2 years ago |
![]() |
a6f32390df | 2 years ago |
![]() |
180ae22293 | 2 years ago |
![]() |
a8f98a0056 | 2 years ago |
![]() |
1b511ed018 | 2 years ago |
![]() |
87d2006b6e | 2 years ago |
![]() |
be074000cc | 2 years ago |
![]() |
3c51dd31bc | 2 years ago |
![]() |
555f6b8d7c | 2 years ago |
![]() |
bdedcc47e3 | 2 years ago |
![]() |
99abe69ba1 | 2 years ago |
![]() |
be002dd51e | 2 years ago |
![]() |
6edfb7a2f4 | 2 years ago |
![]() |
13c3515d7b | 2 years ago |
![]() |
8193cac86d | 2 years ago |
![]() |
b1c90b599b | 2 years ago |
![]() |
1d55635fe2 | 2 years ago |
![]() |
1eaa77ad93 | 2 years ago |
![]() |
7fe055d2b4 | 2 years ago |
![]() |
9da26c6f95 | 2 years ago |
![]() |
f10e5a40a3 | 2 years ago |
![]() |
024bc7b8e8 | 2 years ago |
![]() |
6611b491b1 | 2 years ago |
![]() |
666a00cfab | 2 years ago |
![]() |
c9ec5f7c0c | 2 years ago |
![]() |
d44f9bee7a | 2 years ago |
![]() |
570f21748d | 2 years ago |
![]() |
d2306cea52 | 2 years ago |
![]() |
9e76323e2b | 2 years ago |
![]() |
872111ab88 | 2 years ago |
![]() |
3de3f9c4bc | 2 years ago |
![]() |
010196c974 | 2 years ago |
![]() |
bbf184f7ae | 2 years ago |
![]() |
4568ae4209 | 2 years ago |
![]() |
788450932a | 2 years ago |
![]() |
f586cc048f | 2 years ago |
![]() |
88e49f6c66 | 2 years ago |
![]() |
807cf0d436 | 2 years ago |
![]() |
5fee36e587 | 2 years ago |
![]() |
c6e55c706a | 2 years ago |
![]() |
7c227830e9 | 2 years ago |
![]() |
f878463828 | 2 years ago |
![]() |
bb7379addb | 2 years ago |
![]() |
382aa2e01f | 2 years ago |
![]() |
3bb05e2af2 | 2 years ago |
![]() |
4a798ed3ea | 2 years ago |
![]() |
f183196859 | 2 years ago |
![]() |
217abaf7ee | 2 years ago |
![]() |
7cd383af79 | 2 years ago |
![]() |
73af297b4c | 2 years ago |
![]() |
b06d6506cb | 2 years ago |
![]() |
12a738279f | 2 years ago |
![]() |
4cb377bda4 | 2 years ago |
![]() |
afaabf6110 | 2 years ago |
![]() |
4f7445104a | 2 years ago |
![]() |
3adf33d2f4 | 2 years ago |
![]() |
922023b1da | 2 years ago |
![]() |
077c1fc1a8 | 2 years ago |
![]() |
b22e74379a | 2 years ago |
![]() |
1e1ed65d3b | 2 years ago |
![]() |
8a398db20e | 2 years ago |
![]() |
00f179058e | 2 years ago |
![]() |
8e770fbb5d | 2 years ago |
![]() |
c31ba0e823 | 2 years ago |
![]() |
cdabe52847 | 2 years ago |
![]() |
501f9ce6f9 | 2 years ago |
![]() |
24a1dd2556 | 2 years ago |
![]() |
c8e718ed2d | 2 years ago |
![]() |
db7fab0f20 | 2 years ago |
![]() |
f6c8358b24 | 2 years ago |
![]() |
69d8ce6b4e | 2 years ago |
![]() |
94986a0d28 | 2 years ago |
![]() |
b69e0d38f6 | 2 years ago |
![]() |
98ac089984 | 2 years ago |
![]() |
001b6e07b7 | 2 years ago |
![]() |
86f14ce96f | 2 years ago |
![]() |
cb9922379c | 2 years ago |
![]() |
5085a60a87 | 2 years ago |
![]() |
43e6ce87f8 | 2 years ago |
![]() |
71b1768d28 | 2 years ago |
![]() |
a1cbfdf67d | 2 years ago |
![]() |
6cc7488905 | 2 years ago |
![]() |
a27f31b84e | 2 years ago |
![]() |
785df9b00b | 2 years ago |
![]() |
76deca135c | 2 years ago |
![]() |
2281575c14 | 2 years ago |
![]() |
5af06e926a | 2 years ago |
![]() |
01caaa5d70 | 2 years ago |
![]() |
81895cb6bc | 2 years ago |
![]() |
40b10cdabc | 2 years ago |
![]() |
7c0563cb1b | 2 years ago |
![]() |
a8abf26913 | 2 years ago |
![]() |
c016116b09 | 2 years ago |
![]() |
ae294683d6 | 2 years ago |
![]() |
ab17138f33 | 2 years ago |
![]() |
8e76341bb2 | 2 years ago |
![]() |
f47306a4c5 | 2 years ago |
![]() |
acf83d4a31 | 2 years ago |
![]() |
f23ef514e4 | 2 years ago |
![]() |
9a4f726aa4 | 2 years ago |
![]() |
99186c1e7d | 2 years ago |
![]() |
7a1cce527d | 2 years ago |
![]() |
1c1531f640 | 2 years ago |
![]() |
03f7641355 | 2 years ago |
![]() |
8c68cf4041 | 2 years ago |
![]() |
1f89bef860 | 2 years ago |
![]() |
ac61adc9c2 | 2 years ago |
![]() |
edec370f0a | 2 years ago |
![]() |
5f2c9a3b92 | 2 years ago |
![]() |
8c62137a01 | 2 years ago |
![]() |
f72a9600ae | 2 years ago |
![]() |
57d39cad24 | 2 years ago |
![]() |
a7758484a5 | 2 years ago |
![]() |
5d253c6afb | 2 years ago |
![]() |
3e51020222 | 2 years ago |
![]() |
adda2d2d7e | 2 years ago |
![]() |
22f990344f | 2 years ago |
![]() |
4cc9e4008b | 2 years ago |
![]() |
0a064a8704 | 2 years ago |
![]() |
d2804d8a8d | 2 years ago |
![]() |
d500614fcc | 2 years ago |
![]() |
2650c5ed13 | 2 years ago |
![]() |
7b9e9f9694 | 2 years ago |
![]() |
d992fb7545 | 2 years ago |
![]() |
b2d625e10e | 2 years ago |
![]() |
63945638ea | 2 years ago |
![]() |
5bfbbdbd3f | 2 years ago |
![]() |
eb40457d5c | 2 years ago |
![]() |
d24461fc42 | 2 years ago |
![]() |
8bec2e2ff9 | 2 years ago |
![]() |
9a6233b511 | 2 years ago |
![]() |
d26731432c | 2 years ago |
![]() |
38af275451 | 2 years ago |
![]() |
cb89166380 | 2 years ago |
![]() |
5ce24dda01 | 2 years ago |
![]() |
05fbb0e071 | 2 years ago |
![]() |
8f3af5a7fc | 2 years ago |
![]() |
a25bf55919 | 2 years ago |
![]() |
f9d7b93abf | 2 years ago |
![]() |
e96672a2a8 | 2 years ago |
![]() |
cfe52db3a3 | 2 years ago |
![]() |
7175784356 | 2 years ago |
![]() |
a2d8bcaaa3 | 2 years ago |
![]() |
f520de8893 | 2 years ago |
![]() |
8e3ee3b6dd | 2 years ago |
![]() |
6f37c4c9c9 | 2 years ago |
![]() |
aeeabf5d1c | 2 years ago |
![]() |
029fbf470e | 2 years ago |
![]() |
a3294a8abd | 2 years ago |
![]() |
80ce67e6dd | 2 years ago |
![]() |
38357dd9b5 | 2 years ago |
![]() |
704440538d | 2 years ago |
![]() |
bbe9bd0303 | 2 years ago |
![]() |
cf03da0fab | 2 years ago |
![]() |
b8c5628e3b | 2 years ago |
![]() |
79c5e3918e | 2 years ago |
![]() |
b630ab4185 | 2 years ago |
![]() |
3e0f6b5405 | 2 years ago |
![]() |
1d02098b70 | 2 years ago |
![]() |
c3cf8e2002 | 2 years ago |
![]() |
c6b8c754ee | 2 years ago |
![]() |
d653e0645b | 2 years ago |
![]() |
56e105bc04 | 2 years ago |
![]() |
d587d3b2bb | 2 years ago |
![]() |
284e79521d | 2 years ago |
![]() |
86bbebf93c | 2 years ago |
![]() |
feeaf17fe6 | 2 years ago |
![]() |
f41c499ef3 | 2 years ago |
![]() |
a977adff91 | 2 years ago |
![]() |
d74fc58a1c | 2 years ago |
![]() |
60ffd99ad8 | 2 years ago |
![]() |
dcd59ac4dd | 2 years ago |
![]() |
81793bc221 | 2 years ago |
![]() |
9dc1106b9a | 2 years ago |
![]() |
6af6c9c0eb | 2 years ago |
![]() |
7787be6e84 | 2 years ago |
![]() |
f4b99e6953 | 2 years ago |
![]() |
ef65d53190 | 2 years ago |
![]() |
76dec0b6a8 | 2 years ago |
![]() |
13976014e7 | 2 years ago |
![]() |
f29a49bcd2 | 2 years ago |
![]() |
5ce23665f8 | 2 years ago |
![]() |
51c896fe03 | 2 years ago |
![]() |
0c407cd041 | 2 years ago |
![]() |
9c32f07e87 | 2 years ago |
![]() |
5852d0b4df | 2 years ago |
![]() |
88732f7dc7 | 2 years ago |
![]() |
5849c6fdbe | 2 years ago |
![]() |
3fb6beb0ba | 2 years ago |
![]() |
f9c58602a0 | 2 years ago |
![]() |
cdf76307d9 | 2 years ago |
![]() |
a164b268c2 | 2 years ago |
![]() |
21994d39fd | 2 years ago |
![]() |
20dc1f26df | 2 years ago |
![]() |
0f43ef19e3 | 2 years ago |
![]() |
bdb803dab5 | 2 years ago |
![]() |
c40c81447e | 2 years ago |
![]() |
d4e964ac47 | 2 years ago |
![]() |
935e778db1 | 2 years ago |
![]() |
7b74fa9b0a | 2 years ago |
![]() |
28def4001b | 2 years ago |
![]() |
fbcbd60c0e | 2 years ago |
![]() |
0e00e8209a | 2 years ago |
![]() |
e553b6374a | 2 years ago |
![]() |
23e47bcc5e | 2 years ago |
![]() |
3d61867386 | 2 years ago |
![]() |
bf36e60b34 | 2 years ago |
![]() |
beca5e88ca | 2 years ago |
![]() |
e90d98bb2c | 2 years ago |
![]() |
c4a5b65ac0 | 2 years ago |
![]() |
d8fa540b97 | 2 years ago |
![]() |
9b20dbe6dc | 2 years ago |
![]() |
85d4c70171 | 2 years ago |
![]() |
9d6b72e9c4 | 2 years ago |
![]() |
53edaf9d11 | 2 years ago |
![]() |
5eaa388312 | 2 years ago |
![]() |
26f4e2dc98 | 2 years ago |
![]() |
5f68cb3746 | 2 years ago |
![]() |
1ffb559ee2 | 2 years ago |
![]() |
1570a3a4f1 | 2 years ago |
![]() |
6d4a15d067 | 2 years ago |
![]() |
d42e2a818c | 2 years ago |
![]() |
84d6d48b0e | 2 years ago |
![]() |
df55148355 | 2 years ago |
![]() |
b2385509a6 | 2 years ago |
![]() |
855c39146d | 2 years ago |
![]() |
df2233c51c | 2 years ago |
![]() |
9729ec8ed3 | 2 years ago |
![]() |
cdd8866fd3 | 2 years ago |
![]() |
f8486364b3 | 2 years ago |
![]() |
fab5db9511 | 2 years ago |
![]() |
9063867ec9 | 2 years ago |
![]() |
31c6bb7815 | 2 years ago |
![]() |
7e7489499d | 2 years ago |
![]() |
42cd6b0094 | 2 years ago |
![]() |
6d09d77c61 | 2 years ago |
![]() |
a51509dcd3 | 2 years ago |
![]() |
a271e39fa0 | 2 years ago |
![]() |
cbc24950e3 | 2 years ago |
![]() |
c3f0aa94bf | 2 years ago |
![]() |
306271df61 | 2 years ago |
![]() |
af02d5e1c4 | 2 years ago |
![]() |
c8caf805fa | 2 years ago |
![]() |
03df957427 | 2 years ago |
![]() |
4ce1b0e241 | 2 years ago |
![]() |
ef2701dc0c | 2 years ago |
![]() |
2b271e45ac | 2 years ago |
![]() |
f7637ee5a5 | 2 years ago |
![]() |
aa9476b9cc | 2 years ago |
![]() |
62ff6ec138 | 2 years ago |
![]() |
c25a76c1f3 | 2 years ago |
![]() |
7b81955d72 | 2 years ago |
![]() |
909a906d2a | 2 years ago |
![]() |
cb2c891979 | 2 years ago |
![]() |
7a0c457867 | 2 years ago |
![]() |
d80cdf3054 | 2 years ago |
![]() |
0668983cd6 | 2 years ago |
![]() |
b267d5ea07 | 2 years ago |
![]() |
c60dd0d3ca | 2 years ago |
![]() |
0786c40a5e | 2 years ago |
![]() |
524903b03d | 2 years ago |
![]() |
f15101a2b3 | 2 years ago |
![]() |
70d4eef803 | 2 years ago |
![]() |
afdb1f76e8 | 2 years ago |
![]() |
6dc4aefe99 | 2 years ago |
![]() |
bd77bce2cd | 2 years ago |
![]() |
339a619f28 | 2 years ago |
![]() |
8684b82893 | 2 years ago |
![]() |
2d19a19320 | 2 years ago |
![]() |
7fcf9e1051 | 2 years ago |
![]() |
6375481a80 | 2 years ago |
![]() |
323ad73831 | 2 years ago |
![]() |
6cabf6da15 | 2 years ago |
![]() |
575bb8d253 | 2 years ago |
![]() |
ca415ec044 | 2 years ago |
![]() |
f47b2b1a7b | 2 years ago |
![]() |
44fc4eef1a | 2 years ago |
![]() |
54356f5273 | 2 years ago |
![]() |
3d9cbc5d14 | 2 years ago |
![]() |
0c23589187 | 2 years ago |
![]() |
47e5ef329e | 2 years ago |
![]() |
c71f2d66b1 | 2 years ago |
![]() |
22a3c21c4e | 2 years ago |
![]() |
20928b82fa | 2 years ago |
![]() |
f969a66d05 | 2 years ago |
![]() |
ea0b4e22e7 | 2 years ago |
![]() |
07e105e1be | 2 years ago |
![]() |
0a78eacfcd | 2 years ago |
![]() |
a7bc31b446 | 2 years ago |
![]() |
5c055e0d12 | 2 years ago |
![]() |
9fe5436f94 | 2 years ago |
![]() |
1fa0633db3 | 2 years ago |
![]() |
f6e9ceccc1 | 2 years ago |
![]() |
d97eb9eb31 | 2 years ago |
![]() |
4927b3148e | 2 years ago |
![]() |
54ce7410d2 | 2 years ago |
![]() |
d453721e8b | 2 years ago |
![]() |
719cde2eac | 2 years ago |
![]() |
3485833875 | 2 years ago |
![]() |
78c4e750ae | 2 years ago |
![]() |
fe2b7c2e76 | 2 years ago |
![]() |
6539f0a72e | 2 years ago |
![]() |
76dd879ea6 | 2 years ago |
![]() |
3f3523963d | 2 years ago |
![]() |
14121b21f4 | 3 years ago |
![]() |
854e29ee38 | 3 years ago |
![]() |
027da6d639 | 3 years ago |
![]() |
53913b7e96 | 3 years ago |
![]() |
de4c5abd9c | 3 years ago |
![]() |
7fdd045516 | 3 years ago |
![]() |
1ded5ac4b4 | 3 years ago |
![]() |
be3b009f5d | 3 years ago |
![]() |
92feec7e98 | 3 years ago |
![]() |
686e1edc8e | 3 years ago |
![]() |
808c9db007 | 3 years ago |
![]() |
f21ab0ea6a | 3 years ago |
![]() |
e055c7b5f8 | 3 years ago |
![]() |
59359b13d9 | 3 years ago |
![]() |
149d600e65 | 3 years ago |
![]() |
a8f666fb5d | 3 years ago |
![]() |
24371412b4 | 3 years ago |
![]() |
d7e4d5583f | 3 years ago |
![]() |
2ca8bd19d3 | 3 years ago |
![]() |
d65e587756 | 3 years ago |
![]() |
b6c9a5b429 | 3 years ago |
![]() |
07b5c32935 | 3 years ago |
![]() |
576760e417 | 3 years ago |
![]() |
65ed5471da | 3 years ago |
![]() |
e5b15031b6 | 3 years ago |
![]() |
3712142e6f | 3 years ago |
![]() |
45c541edad | 3 years ago |
![]() |
45cd47d3c1 | 3 years ago |
![]() |
7568aaab7b | 3 years ago |
![]() |
6b02ac3c10 | 3 years ago |
![]() |
b7059d07e8 | 3 years ago |
![]() |
0ccdea2ff1 | 3 years ago |
![]() |
a6de2e59a5 | 3 years ago |
![]() |
dd9201e466 | 3 years ago |
![]() |
ecd4b423dc | 3 years ago |
![]() |
127ffc3547 | 3 years ago |
![]() |
112631a0d7 | 3 years ago |
![]() |
38fdffc147 | 3 years ago |
![]() |
74c565a690 | 3 years ago |
![]() |
3f7ff6843d | 3 years ago |
![]() |
7f89baad87 | 3 years ago |
![]() |
2de13a9498 | 3 years ago |
![]() |
796780cd12 | 3 years ago |
![]() |
c9762fc280 | 3 years ago |
![]() |
ea4ae6bc48 | 3 years ago |
![]() |
7581d9a6b2 | 3 years ago |
![]() |
cda6b09d79 | 3 years ago |
![]() |
c2fd3920b5 | 3 years ago |
![]() |
54c66279f3 | 3 years ago |
![]() |
841c9a2066 | 3 years ago |
![]() |
579c876f98 | 3 years ago |
![]() |
59aea75a30 | 3 years ago |
![]() |
9dc8dce69c | 3 years ago |
![]() |
be51f90352 | 3 years ago |
![]() |
1317fef237 | 3 years ago |
![]() |
7a259955d2 | 3 years ago |
![]() |
ffa16b3afd | 3 years ago |
![]() |
325dc59f84 | 3 years ago |
![]() |
6c80c8dd56 | 3 years ago |
![]() |
9be51dc062 | 3 years ago |
![]() |
865f1dac8d | 3 years ago |
![]() |
00ab9ab83c | 3 years ago |
![]() |
b84df8a3d9 | 3 years ago |
![]() |
825b330132 | 3 years ago |
![]() |
dbb04936db | 3 years ago |
![]() |
08612b870a | 3 years ago |
![]() |
2fcc052ecd | 3 years ago |
![]() |
b3871fb8f2 | 3 years ago |
![]() |
4dbe079d37 | 3 years ago |
![]() |
d16033ba66 | 3 years ago |
![]() |
becb64b80b | 3 years ago |
![]() |
d1cd004d71 | 3 years ago |
![]() |
5e13aee5be | 3 years ago |
![]() |
931629114d | 3 years ago |
![]() |
ee65afaaea | 3 years ago |
![]() |
17731d10ab | 3 years ago |
![]() |
63412792af | 3 years ago |
![]() |
992137441f | 3 years ago |
![]() |
000bc16810 | 3 years ago |
![]() |
eedc4b3a71 | 3 years ago |
![]() |
cda274873c | 3 years ago |
![]() |
f5b73f842e | 3 years ago |
![]() |
c1d2cf030d | 3 years ago |
![]() |
73bc18e5bd | 3 years ago |
![]() |
8636de227e | 3 years ago |
![]() |
6c6a36ec49 | 3 years ago |
![]() |
4cea628570 | 3 years ago |
![]() |
e9bbe8e036 | 3 years ago |
![]() |
f3e138f0b4 | 3 years ago |
![]() |
1d7208ea5a | 3 years ago |
![]() |
49fd92a6ea | 3 years ago |
![]() |
241bd763fa | 3 years ago |
![]() |
e686e8ce8f | 3 years ago |
![]() |
4883b582f1 | 3 years ago |
![]() |
571e1c9559 | 3 years ago |
![]() |
ba9f6528e7 | 3 years ago |
![]() |
2db906350d | 3 years ago |
![]() |
f218134e6c | 3 years ago |
![]() |
4902bac3a7 | 3 years ago |
![]() |
93107e6bfe | 3 years ago |
![]() |
4d97f9d171 | 3 years ago |
![]() |
f4daa4a600 | 3 years ago |
![]() |
9313152b9f | 3 years ago |
![]() |
06fa80d24e | 3 years ago |
![]() |
1bca9436df | 3 years ago |
![]() |
95fd439dc2 | 3 years ago |
![]() |
b5f206d54e | 3 years ago |
![]() |
c7c82c06db | 3 years ago |
![]() |
ed5004a70d | 3 years ago |
![]() |
489d271148 | 3 years ago |
![]() |
43c86e873c | 3 years ago |
![]() |
126df03994 | 3 years ago |
![]() |
710a76b9cc | 3 years ago |
![]() |
9c06f7f097 | 3 years ago |
![]() |
1f7e59dde7 | 3 years ago |
![]() |
d9487fd9f5 | 3 years ago |
![]() |
5a3b1e3b36 | 3 years ago |
![]() |
c507c76482 | 3 years ago |
![]() |
9c6014fbc9 | 3 years ago |
![]() |
b04bdcceed | 3 years ago |
![]() |
61129511d8 | 3 years ago |
![]() |
67b342a35f | 3 years ago |
![]() |
283444f7c3 | 3 years ago |
![]() |
aeb79dd572 | 3 years ago |
![]() |
325b2c8f58 | 3 years ago |
@ -0,0 +1,4 @@ |
||||
FROM gcr.io/oss-fuzz-base/base-builder-rust:v1 |
||||
COPY . $SRC/oxigraph |
||||
WORKDIR oxigraph |
||||
COPY .clusterfuzzlite/build.sh $SRC/ |
@ -0,0 +1,30 @@ |
||||
#!/bin/bash -eu |
||||
shopt -s globstar |
||||
|
||||
function build_seed_corpus() { |
||||
mkdir "/tmp/oxigraph_$1" |
||||
for file in **/*."$2" |
||||
do |
||||
hash=$(sha256sum "$file" | awk '{print $1;}') |
||||
cp "$file" "/tmp/oxigraph_$1/$hash" |
||||
done |
||||
zip "$1_seed_corpus.zip" /tmp/"oxigraph_$1"/* |
||||
rm -r "/tmp/oxigraph_$1" |
||||
} |
||||
|
||||
|
||||
cd "$SRC"/oxigraph |
||||
git submodule init |
||||
git submodule update |
||||
cargo fuzz build -O --debug-assertions |
||||
for TARGET in sparql_eval sparql_results_json sparql_results_tsv sparql_results_xml n3 nquads trig rdf_xml |
||||
do |
||||
cp fuzz/target/x86_64-unknown-linux-gnu/release/$TARGET "$OUT"/ |
||||
done |
||||
build_seed_corpus sparql_results_json srj |
||||
build_seed_corpus sparql_results_tsv tsv |
||||
build_seed_corpus sparql_results_xml srx |
||||
build_seed_corpus n3 n3 |
||||
build_seed_corpus nquads nq |
||||
build_seed_corpus trig trig |
||||
build_seed_corpus rdf_xml rdf |
@ -0,0 +1 @@ |
||||
language: rust |
@ -0,0 +1,16 @@ |
||||
version: 2 |
||||
updates: |
||||
- package-ecosystem: "github-actions" |
||||
directory: "/" |
||||
schedule: |
||||
interval: weekly |
||||
- package-ecosystem: "pip" |
||||
directory: "/python/" |
||||
versioning-strategy: increase-if-necessary |
||||
schedule: |
||||
interval: weekly |
||||
- package-ecosystem: "npm" |
||||
directory: "/js/" |
||||
versioning-strategy: increase-if-necessary |
||||
schedule: |
||||
interval: weekly |
@ -0,0 +1,16 @@ |
||||
--- |
||||
name: Bug report |
||||
about: Create a report to help us improve |
||||
title: '' |
||||
labels: bug |
||||
assignees: '' |
||||
|
||||
--- |
||||
|
||||
**Describe the bug** |
||||
A clear and concise description of what the bug is. |
||||
|
||||
**To Reproduce** |
||||
Steps to reproduce the behavior: |
||||
1. Which version of Oxigraph are you using? On which platform? |
||||
2. A command-line or a code snippet that triggers the bug. |
@ -0,0 +1,20 @@ |
||||
--- |
||||
name: Feature request |
||||
about: Suggest an idea for this project |
||||
title: '' |
||||
labels: enhancement |
||||
assignees: '' |
||||
|
||||
--- |
||||
|
||||
**Is your feature request related to a problem? Please describe.** |
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] |
||||
|
||||
**Describe the solution you'd like** |
||||
A clear and concise description of what you want to happen. |
||||
|
||||
**Describe alternatives you've considered** |
||||
A clear and concise description of any alternative solutions or features you've considered. |
||||
|
||||
**Additional context** |
||||
Please link to other systems implementing the feature, specification of it if it exists and/or existing documentation about this feature. |
@ -0,0 +1,10 @@ |
||||
--- |
||||
name: Question |
||||
about: Please don't use issues but the Q&A section of the "discussions" space |
||||
title: '' |
||||
labels: question |
||||
assignees: '' |
||||
|
||||
--- |
||||
|
||||
|
@ -0,0 +1,27 @@ |
||||
name: 'Setup Rust' |
||||
description: 'Setup Rust using Rustup' |
||||
inputs: |
||||
version: |
||||
description: 'Rust version to use. By default latest stable version' |
||||
required: false |
||||
default: 'stable' |
||||
component: |
||||
description: 'Rust extra component to install like clippy' |
||||
required: false |
||||
target: |
||||
description: 'Rust extra target to install like wasm32-unknown-unknown' |
||||
required: false |
||||
runs: |
||||
using: "composite" |
||||
steps: |
||||
- run: rustup update |
||||
shell: bash |
||||
- run: rustup default ${{ inputs.version }} |
||||
shell: bash |
||||
- run: rustup component add ${{ inputs.component }} |
||||
shell: bash |
||||
if: ${{ inputs.component }} |
||||
- run: rustup target add ${{ inputs.target }} |
||||
shell: bash |
||||
if: ${{ inputs.target }} |
||||
- uses: Swatinem/rust-cache@v2 |
@ -0,0 +1,11 @@ |
||||
if [ -f "rocksdb" ] |
||||
then |
||||
cd rocksdb || exit |
||||
else |
||||
git clone https://github.com/facebook/rocksdb.git |
||||
cd rocksdb || exit |
||||
git checkout v8.0.0 |
||||
make shared_lib |
||||
fi |
||||
sudo make install-shared |
||||
sudo ldconfig /usr/local/lib |
@ -0,0 +1,19 @@ |
||||
cd /workdir |
||||
apk add clang-dev |
||||
curl https://static.rust-lang.org/rustup/dist/%arch%-unknown-linux-musl/rustup-init --output rustup-init |
||||
chmod +x rustup-init |
||||
./rustup-init -y --profile minimal |
||||
source "$HOME/.cargo/env" |
||||
export PATH="${PATH}:/opt/python/cp37-cp37m/bin:/opt/python/cp38-cp38/bin:/opt/python/cp39-cp39/bin:/opt/python/cp310-cp310/bin:/opt/python/cp311-cp311/bin" |
||||
cd python |
||||
python3.12 -m venv venv |
||||
source venv/bin/activate |
||||
pip install -r requirements.dev.txt |
||||
maturin develop --release |
||||
python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff |
||||
maturin build --release --features abi3 --compatibility musllinux_1_2 |
||||
if [ %for_each_version% ]; then |
||||
for VERSION in 8 9 10 11 12; do |
||||
maturin build --release --interpreter "python3.$VERSION" --compatibility musllinux_1_2 |
||||
done |
||||
fi |
@ -1,286 +0,0 @@ |
||||
name: Release artifacts |
||||
|
||||
on: |
||||
release: |
||||
types: [ published ] |
||||
|
||||
jobs: |
||||
push_server_to_docker_registry: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- uses: docker/setup-buildx-action@v1 |
||||
- uses: docker/metadata-action@v3 |
||||
id: docker_meta |
||||
with: |
||||
images: | |
||||
${{ github.repository }} |
||||
ghcr.io/${{ github.repository }} |
||||
- uses: docker/login-action@v1 |
||||
with: |
||||
username: ${{ secrets.DOCKER_USERNAME }} |
||||
password: ${{ secrets.DOCKER_PASSWORD }} |
||||
- uses: docker/login-action@v1 |
||||
with: |
||||
registry: ghcr.io |
||||
username: ${{github.actor}} |
||||
password: ${{secrets.GITHUB_TOKEN}} |
||||
- uses: docker/build-push-action@v2 |
||||
with: |
||||
context: . |
||||
file: server/Dockerfile |
||||
pull: true |
||||
push: true |
||||
tags: ${{ steps.docker_meta.outputs.tags }} |
||||
labels: ${{ steps.docker_meta.outputs.labels }} |
||||
cache-from: type=gha |
||||
cache-to: type=gha,mode=max |
||||
|
||||
publish_crates: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- run: rustup update |
||||
- run: cargo login $CRATES_IO_TOKEN |
||||
env: |
||||
CRATES_IO_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} |
||||
- run: cargo publish |
||||
working-directory: ./oxrocksdb-sys |
||||
- run: cargo publish |
||||
working-directory: ./lib/oxrdf |
||||
- run: sleep 60 |
||||
- run: cargo publish |
||||
working-directory: ./lib/sparesults |
||||
- run: cargo publish |
||||
working-directory: ./lib/spargebra |
||||
- run: sleep 60 |
||||
- run: cargo publish |
||||
working-directory: ./lib |
||||
- run: sleep 60 |
||||
- run: cargo publish |
||||
working-directory: ./server |
||||
|
||||
publish_pypi_linux: |
||||
runs-on: ubuntu-latest |
||||
strategy: |
||||
matrix: |
||||
architecture: [ "x86_64", "aarch64" ] |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- uses: docker/setup-qemu-action@v1 |
||||
with: |
||||
platforms: linux/${{ matrix.architecture }} |
||||
if: matrix.architecture != 'x86_64' |
||||
- run: sed 's/%arch%/${{ matrix.architecture }}/g' .github/workflows/manylinux_build.sh > .github/workflows/manylinux_build_script.sh |
||||
- run: docker run -v "$(pwd)":/workdir --platform linux/${{ matrix.architecture }} quay.io/pypa/manylinux2014_${{ matrix.architecture }} /bin/bash /workdir/.github/workflows/manylinux_build_script.sh |
||||
- uses: pypa/gh-action-pypi-publish@release/v1 |
||||
with: |
||||
user: __token__ |
||||
password: ${{ secrets.PYPI_PASSWORD }} |
||||
packages_dir: target/wheels |
||||
- uses: softprops/action-gh-release@v1 |
||||
with: |
||||
files: target/wheels/* |
||||
|
||||
publish_pypi_mac: |
||||
runs-on: macos-latest |
||||
env: |
||||
DEVELOPER_DIR: '/Applications/Xcode.app/Contents/Developer' |
||||
SDKROOT: '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk' |
||||
MACOSX_DEPLOYMENT_TARGET: '10.14' |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- uses: actions/setup-python@v2 |
||||
with: |
||||
python-version: 3.7 |
||||
- run: rustup update && rustup target add aarch64-apple-darwin |
||||
- run: pip install maturin |
||||
- run: maturin publish --no-sdist --universal2 -m python/Cargo.toml -u __token__ -p ${{ secrets.PYPI_PASSWORD }} |
||||
- run: maturin publish --no-sdist -m python/Cargo.toml -u __token__ -p ${{ secrets.PYPI_PASSWORD }} |
||||
- uses: softprops/action-gh-release@v1 |
||||
with: |
||||
files: target/wheels/* |
||||
|
||||
publish_pypi_windows: |
||||
runs-on: windows-latest |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- uses: actions/setup-python@v2 |
||||
with: |
||||
python-version: 3.7 |
||||
- run: rustup update |
||||
- run: pip install maturin |
||||
- run: Remove-Item -LiteralPath "C:\msys64\" -Force -Recurse |
||||
- run: maturin publish --no-sdist -m python/Cargo.toml -u __token__ -p ${{ secrets.PYPI_PASSWORD }} |
||||
- uses: softprops/action-gh-release@v1 |
||||
with: |
||||
files: target/wheels/* |
||||
|
||||
publish_pypi_stdist: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- run: pip install maturin |
||||
- run: maturin sdist -m python/Cargo.toml |
||||
- uses: pypa/gh-action-pypi-publish@release/v1 |
||||
with: |
||||
user: __token__ |
||||
password: ${{ secrets.PYPI_PASSWORD }} |
||||
packages_dir: target/wheels |
||||
- uses: softprops/action-gh-release@v1 |
||||
with: |
||||
files: target/wheels/* |
||||
|
||||
publish_npm: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- uses: actions/setup-node@v2 |
||||
with: |
||||
node-version: 14 |
||||
registry-url: https://registry.npmjs.org |
||||
- run: rustup update |
||||
- run: cargo install wasm-pack |
||||
- run: npm install |
||||
working-directory: ./js |
||||
- run: npm run release |
||||
working-directory: ./js |
||||
env: |
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} |
||||
- run: npm run pack |
||||
working-directory: ./js |
||||
- uses: softprops/action-gh-release@v1 |
||||
with: |
||||
files: js/*.tgz |
||||
|
||||
publish_python_doc: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- run: rustup update |
||||
- uses: actions/setup-python@v2 |
||||
with: |
||||
python-version: 3.8 |
||||
- run: python -m venv python/venv |
||||
- run: source python/venv/bin/activate && pip install --upgrade maturin sphinx |
||||
- run: source venv/bin/activate && maturin develop |
||||
working-directory: ./python |
||||
- run: source ../venv/bin/activate && sphinx-build -M doctest . build |
||||
working-directory: ./python/docs |
||||
- run: source ../venv/bin/activate && sphinx-build -M html . build |
||||
working-directory: ./python/docs |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
repository: oxigraph/oxigraph.github.io |
||||
path: website |
||||
token: ${{ secrets.FULL_ACCESS_TOKEN }} |
||||
- run: echo "RELEASE_VERSION=$(echo ${GITHUB_REF:10})" >> $GITHUB_ENV |
||||
- run: rm -rf ./website/pyoxigraph/stable && mkdir -p ./website/pyoxigraph/stable && cp -r ./python/docs/build/html/* ./website/pyoxigraph/stable/ |
||||
if: "!contains('-', github.event.release.tag_name)" |
||||
- run: mkdir -p ./website/pyoxigraph/$RELEASE_VERSION && cp -r ./python/docs/build/html/* ./website/pyoxigraph/$RELEASE_VERSION/ |
||||
- run: | |
||||
git config user.name github-actions |
||||
git config user.email github-actions@github.com |
||||
git add . |
||||
git diff-index --quiet HEAD || git commit -m "Updates pyoxigraph documentation" |
||||
git push |
||||
working-directory: ./website |
||||
|
||||
publish_full_archive: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- run: | |
||||
zip -r oxigraph_${{ github.event.release.tag_name }}.zip . |
||||
tar -czf /tmp/oxigraph_${{ github.event.release.tag_name }}.tar.gz . |
||||
mv /tmp/oxigraph_${{ github.event.release.tag_name }}.tar.gz . |
||||
- uses: softprops/action-gh-release@v1 |
||||
with: |
||||
files: | |
||||
oxigraph_${{ github.event.release.tag_name }}.zip |
||||
oxigraph_${{ github.event.release.tag_name }}.tar.gz |
||||
|
||||
publish_homebrew: |
||||
if: "!contains('-', github.event.release.tag_name)" |
||||
runs-on: ubuntu-latest |
||||
needs: publish_full_archive |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
repository: oxigraph/homebrew-oxigraph |
||||
token: ${{ secrets.FULL_ACCESS_TOKEN }} |
||||
- run: | |
||||
wget "https://github.com/oxigraph/oxigraph/releases/download/${{ github.event.release.tag_name }}/oxigraph_${{ github.event.release.tag_name }}.tar.gz" |
||||
SHA=`shasum -a 256 "oxigraph_${{ github.event.release.tag_name }}.tar.gz" | awk '{ print $1 }'` |
||||
rm "oxigraph_${{ github.event.release.tag_name }}.tar.gz" |
||||
sed -i "s/download\/.*\.tar/download\/${{ github.event.release.tag_name }}\/oxigraph_${{ github.event.release.tag_name }}.tar/g" Formula/oxigraph.rb |
||||
sed -i "s/sha256 \".*\"/sha256 \"$SHA\"/g" Formula/oxigraph.rb |
||||
git config user.name github-actions |
||||
git config user.email github-actions@github.com |
||||
git add . |
||||
git diff-index --quiet HEAD || git commit -m "Upgrades to ${{ github.event.release.tag_name }}" |
||||
git push |
||||
|
||||
publish_binary_linux: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- run: cargo build --release |
||||
working-directory: ./server |
||||
- run: mv target/release/oxigraph_server oxigraph_server_${{ github.event.release.tag_name }}_x86_64_linux_gnu |
||||
- uses: softprops/action-gh-release@v1 |
||||
with: |
||||
files: oxigraph_server_${{ github.event.release.tag_name }}_x86_64_linux_gnu |
||||
|
||||
publish_binary_mac: |
||||
runs-on: macos-latest |
||||
env: |
||||
DEVELOPER_DIR: '/Applications/Xcode.app/Contents/Developer' |
||||
SDKROOT: '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk' |
||||
MACOSX_DEPLOYMENT_TARGET: '10.14' |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- run: rustup update |
||||
- run: cargo build --release |
||||
working-directory: ./server |
||||
- run: mv target/release/oxigraph_server oxigraph_server_${{ github.event.release.tag_name }}_x86_64_apple |
||||
- uses: softprops/action-gh-release@v1 |
||||
with: |
||||
files: oxigraph_server_${{ github.event.release.tag_name }}_x86_64_apple |
||||
|
||||
publish_binary_windows: |
||||
runs-on: windows-latest |
||||
steps: |
||||
- uses: actions/checkout@v2 |
||||
with: |
||||
submodules: true |
||||
- run: rustup update |
||||
- run: Remove-Item -LiteralPath "C:\msys64\" -Force -Recurse |
||||
- run: cargo build --release |
||||
working-directory: ./server |
||||
- run: mv target/release/oxigraph_server.exe oxigraph_server_${{ github.event.release.tag_name }}_x86_64_windows_msvc.exe |
||||
- uses: softprops/action-gh-release@v1 |
||||
with: |
||||
files: oxigraph_server_${{ github.event.release.tag_name }}_x86_64_windows_msvc.exe |
@ -0,0 +1,3 @@ |
||||
Thomas Tanon <thomas@pellissier-tanon.fr> <thomaspt@hotmail.fr> <Tpt@users.noreply.github.com> |
||||
Thomas Tanon <thomas@pellissier-tanon.fr> |
||||
Thomas Tanon <thomas.pellissier-tanon@helsing.ai> |
@ -0,0 +1,21 @@ |
||||
version: 2 |
||||
|
||||
sphinx: |
||||
builder: html |
||||
|
||||
build: |
||||
os: "ubuntu-22.04" |
||||
tools: |
||||
python: "3" |
||||
rust: "1.70" |
||||
apt_packages: |
||||
- clang |
||||
|
||||
python: |
||||
install: |
||||
- requirements: python/requirements.dev.txt |
||||
- method: pip |
||||
path: python |
||||
|
||||
submodules: |
||||
include: all |
@ -0,0 +1,16 @@ |
||||
cff-version: 1.2.0 |
||||
abstract: "Oxigraph is a graph database implementing the SPARQL standard." |
||||
authors: |
||||
- given-names: Thomas |
||||
family-names: Pellissier Tanon |
||||
email: thomas@pellissier-tanon.fr |
||||
orcid: "https://orcid.org/0000-0002-0620-6486" |
||||
doi: 10.5281/zenodo.7408022 |
||||
license: |
||||
- Apache-2.0 |
||||
- MIT |
||||
message: "If you use this software, please cite it as below." |
||||
repository-code: "https://github.com/oxigraph/oxigraph" |
||||
title: Oxigraph |
||||
type: software |
||||
url: "https://oxigraph.org" |
File diff suppressed because it is too large
Load Diff
@ -1,16 +1,272 @@ |
||||
[workspace] |
||||
members = [ |
||||
"js", |
||||
"lib", |
||||
"lib/oxigraph", |
||||
"lib/oxrdf", |
||||
"lib/spargebra", |
||||
"lib/oxrdfio", |
||||
"lib/oxrdfxml", |
||||
"lib/oxsdatatypes", |
||||
"lib/oxttl", |
||||
"lib/sparesults", |
||||
"python", |
||||
"oxrocksdb-sys", |
||||
"server", |
||||
"lib/spargebra", |
||||
"lib/sparopt", |
||||
"lib/sparql-smith", |
||||
"testsuite" |
||||
] |
||||
resolver = "2" |
||||
|
||||
[workspace.package] |
||||
version = "0.4.0-alpha.7-dev" |
||||
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
||||
license = "MIT OR Apache-2.0" |
||||
edition = "2021" |
||||
rust-version = "1.70" |
||||
|
||||
[workspace.dependencies] |
||||
rocksdb = {git = "https://git.nextgraph.org/NextGraph/rust-rocksdb.git", branch = "master", features = [ ] } |
||||
serde = { version = "1.0.142", features = ["derive"] } |
||||
anyhow = "1.0.72" |
||||
arbitrary = "1.3" |
||||
assert_cmd = "2.0" |
||||
assert_fs = "1.0" |
||||
bindgen = ">=0.60, <0.70" |
||||
cc = "1.0.73" |
||||
clap = "4.0" |
||||
codspeed-criterion-compat = "2.3.3" |
||||
console_error_panic_hook = "0.1.7" |
||||
digest = "0.10" |
||||
flate2 = "1.0" |
||||
getrandom = "0.2.8" |
||||
hex = "0.4" |
||||
js-sys = "0.3.60" |
||||
json-event-parser = "0.2.0-alpha.2" |
||||
md-5 = "0.10" |
||||
memchr = "2.5" |
||||
oxilangtag = "0.1" |
||||
oxiri = "0.2.3" |
||||
peg = "0.8" |
||||
pkg-config = "0.3.25" |
||||
predicates = ">=2.0, <4.0" |
||||
pyo3 = "0.21.0" |
||||
quick-xml = ">=0.29, <0.32" |
||||
rand = "0.8" |
||||
rayon-core = "1.11" |
||||
regex = "1.7" |
||||
sha1 = "0.10" |
||||
sha2 = "0.10" |
||||
siphasher = ">=0.3, <2.0" |
||||
text-diff = "0.4" |
||||
thiserror = "1.0.50" |
||||
time = "0.3" |
||||
tokio = "1.29" |
||||
url = "2.4" |
||||
wasm-bindgen = "0.2.83" |
||||
zstd = ">=0.12, <0.14" |
||||
|
||||
# Internal dependencies |
||||
oxigraph = { version = "=0.4.0-alpha.7-dev", path = "lib/oxigraph" } |
||||
oxrdf = { version = "=0.2.0-alpha.4", path = "lib/oxrdf" } |
||||
oxrdfio = { version = "=0.1.0-alpha.5", path = "lib/oxrdfio" } |
||||
oxrdfxml = { version = "=0.1.0-alpha.5", path = "lib/oxrdfxml" } |
||||
oxsdatatypes = { version = "=0.2.0-alpha.1", path = "lib/oxsdatatypes" } |
||||
oxttl = { version = "=0.1.0-alpha.5", path = "lib/oxttl" } |
||||
sparesults = { version = "=0.2.0-alpha.4", path = "lib/sparesults" } |
||||
spargebra = { version = "=0.3.0-alpha.4", path = "lib/spargebra" } |
||||
sparopt = { version = "=0.1.0-alpha.5-dev", path = "lib/sparopt" } |
||||
|
||||
[workspace.lints.rust] |
||||
absolute_paths_not_starting_with_crate = "warn" |
||||
elided_lifetimes_in_paths = "warn" |
||||
explicit_outlives_requirements = "warn" |
||||
let_underscore_drop = "warn" |
||||
macro_use_extern_crate = "warn" |
||||
# TODO missing_docs = "warn" |
||||
trivial_casts = "warn" |
||||
trivial_numeric_casts = "warn" |
||||
unsafe_code = "warn" |
||||
unused_import_braces = "warn" |
||||
unused_lifetimes = "warn" |
||||
unused_macro_rules = "warn" |
||||
unused_qualifications = "warn" |
||||
|
||||
[workspace.lints.clippy] |
||||
allow_attributes = "warn" |
||||
allow_attributes_without_reason = "warn" |
||||
as_underscore = "warn" |
||||
assertions_on_result_states = "warn" |
||||
bool_to_int_with_if = "warn" |
||||
borrow_as_ptr = "warn" |
||||
case_sensitive_file_extension_comparisons = "warn" |
||||
cast_lossless = "warn" |
||||
cast_possible_truncation = "warn" |
||||
cast_possible_wrap = "warn" |
||||
cast_precision_loss = "warn" |
||||
cast_ptr_alignment = "warn" |
||||
cast_sign_loss = "warn" |
||||
checked_conversions = "warn" |
||||
clone_on_ref_ptr = "warn" |
||||
cloned_instead_of_copied = "warn" |
||||
copy_iterator = "warn" |
||||
create_dir = "warn" |
||||
dbg_macro = "warn" |
||||
decimal_literal_representation = "warn" |
||||
default_trait_access = "warn" |
||||
default_union_representation = "warn" |
||||
deref_by_slicing = "warn" |
||||
disallowed_script_idents = "warn" |
||||
doc_link_with_quotes = "warn" |
||||
empty_drop = "warn" |
||||
empty_enum = "warn" |
||||
empty_structs_with_brackets = "warn" |
||||
enum_glob_use = "warn" |
||||
error_impl_error = "warn" |
||||
exit = "warn" |
||||
expect_used = "warn" |
||||
expl_impl_clone_on_copy = "warn" |
||||
explicit_deref_methods = "warn" |
||||
explicit_into_iter_loop = "warn" |
||||
explicit_iter_loop = "warn" |
||||
filetype_is_file = "warn" |
||||
filter_map_next = "warn" |
||||
flat_map_option = "warn" |
||||
fn_params_excessive_bools = "warn" |
||||
fn_to_numeric_cast_any = "warn" |
||||
format_push_string = "warn" |
||||
from_iter_instead_of_collect = "warn" |
||||
get_unwrap = "warn" |
||||
host_endian_bytes = "warn" |
||||
if_not_else = "warn" |
||||
if_then_some_else_none = "warn" |
||||
ignored_unit_patterns = "warn" |
||||
implicit_clone = "warn" |
||||
implicit_hasher = "warn" |
||||
inconsistent_struct_constructor = "warn" |
||||
index_refutable_slice = "warn" |
||||
inefficient_to_string = "warn" |
||||
infinite_loop = "warn" |
||||
inline_always = "warn" |
||||
inline_asm_x86_att_syntax = "warn" |
||||
inline_asm_x86_intel_syntax = "warn" |
||||
into_iter_without_iter = "warn" |
||||
invalid_upcast_comparisons = "warn" |
||||
items_after_statements = "warn" |
||||
iter_not_returning_iterator = "warn" |
||||
iter_without_into_iter = "warn" |
||||
large_digit_groups = "warn" |
||||
large_futures = "warn" |
||||
large_include_file = "warn" |
||||
large_stack_arrays = "warn" |
||||
large_types_passed_by_value = "warn" |
||||
let_underscore_must_use = "warn" |
||||
let_underscore_untyped = "warn" |
||||
linkedlist = "warn" |
||||
lossy_float_literal = "warn" |
||||
macro_use_imports = "warn" |
||||
manual_assert = "warn" |
||||
manual_instant_elapsed = "warn" |
||||
manual_let_else = "warn" |
||||
manual_ok_or = "warn" |
||||
manual_string_new = "warn" |
||||
many_single_char_names = "warn" |
||||
map_unwrap_or = "warn" |
||||
match_bool = "warn" |
||||
match_on_vec_items = "warn" |
||||
match_same_arms = "warn" |
||||
match_wild_err_arm = "warn" |
||||
match_wildcard_for_single_variants = "warn" |
||||
maybe_infinite_iter = "warn" |
||||
mem_forget = "warn" |
||||
mismatching_type_param_order = "warn" |
||||
missing_assert_message = "warn" |
||||
missing_asserts_for_indexing = "warn" |
||||
missing_fields_in_debug = "warn" |
||||
multiple_inherent_impl = "warn" |
||||
mut_mut = "warn" |
||||
mutex_atomic = "warn" |
||||
naive_bytecount = "warn" |
||||
needless_bitwise_bool = "warn" |
||||
needless_continue = "warn" |
||||
needless_for_each = "warn" |
||||
needless_pass_by_value = "warn" |
||||
needless_raw_string_hashes = "warn" |
||||
needless_raw_strings = "warn" |
||||
negative_feature_names = "warn" |
||||
no_effect_underscore_binding = "warn" |
||||
no_mangle_with_rust_abi = "warn" |
||||
non_ascii_literal = "warn" |
||||
panic = "warn" |
||||
panic_in_result_fn = "warn" |
||||
partial_pub_fields = "warn" |
||||
print_stderr = "warn" |
||||
print_stdout = "warn" |
||||
ptr_as_ptr = "warn" |
||||
ptr_cast_constness = "warn" |
||||
pub_without_shorthand = "warn" |
||||
range_minus_one = "warn" |
||||
range_plus_one = "warn" |
||||
rc_buffer = "warn" |
||||
rc_mutex = "warn" |
||||
redundant_closure_for_method_calls = "warn" |
||||
redundant_else = "warn" |
||||
redundant_feature_names = "warn" |
||||
redundant_type_annotations = "warn" |
||||
ref_binding_to_reference = "warn" |
||||
ref_option_ref = "warn" |
||||
ref_patterns = "warn" |
||||
rest_pat_in_fully_bound_structs = "warn" |
||||
return_self_not_must_use = "warn" |
||||
same_functions_in_if_condition = "warn" |
||||
same_name_method = "warn" |
||||
semicolon_inside_block = "warn" |
||||
shadow_same = "warn" |
||||
should_panic_without_expect = "warn" |
||||
single_match_else = "warn" |
||||
stable_sort_primitive = "warn" |
||||
str_to_string = "warn" |
||||
string_add = "warn" |
||||
string_add_assign = "warn" |
||||
string_lit_chars_any = "warn" |
||||
string_to_string = "warn" |
||||
struct_excessive_bools = "warn" |
||||
struct_field_names = "warn" |
||||
suspicious_xor_used_as_pow = "warn" |
||||
tests_outside_test_module = "warn" |
||||
todo = "warn" |
||||
transmute_ptr_to_ptr = "warn" |
||||
trivially_copy_pass_by_ref = "warn" |
||||
try_err = "warn" |
||||
unchecked_duration_subtraction = "warn" |
||||
undocumented_unsafe_blocks = "warn" |
||||
unicode_not_nfc = "warn" |
||||
unimplemented = "warn" |
||||
uninlined_format_args = "warn" |
||||
unnecessary_box_returns = "warn" |
||||
unnecessary_join = "warn" |
||||
unnecessary_safety_comment = "warn" |
||||
unnecessary_safety_doc = "warn" |
||||
unnecessary_self_imports = "warn" |
||||
unnecessary_wraps = "warn" |
||||
unneeded_field_pattern = "warn" |
||||
unnested_or_patterns = "warn" |
||||
unreadable_literal = "warn" |
||||
unsafe_derive_deserialize = "warn" |
||||
unseparated_literal_suffix = "warn" |
||||
unused_async = "warn" |
||||
unused_self = "warn" |
||||
unwrap_in_result = "warn" |
||||
use_debug = "warn" |
||||
used_underscore_binding = "warn" |
||||
verbose_bit_mask = "warn" |
||||
verbose_file_reads = "warn" |
||||
wildcard_dependencies = "warn" |
||||
zero_sized_map_values = "warn" |
||||
|
||||
[profile.release] |
||||
lto = true |
||||
codegen-units = 1 |
||||
strip = "debuginfo" |
||||
|
||||
[profile.release.package.oxigraph-js] |
||||
codegen-units = 1 |
||||
opt-level = "z" |
||||
strip = "debuginfo" |
||||
|
@ -1,10 +0,0 @@ |
||||
This project is licensed under either of |
||||
|
||||
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or |
||||
http://www.apache.org/licenses/LICENSE-2.0) |
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or |
||||
http://opensource.org/licenses/MIT) |
||||
|
||||
at your option. |
||||
|
||||
SPDX-License-Identifier: MIT OR Apache-2.0 |
@ -0,0 +1,49 @@ |
||||
#!/usr/bin/env bash |
||||
|
||||
DATASET_SIZE=100000 |
||||
PARALLELISM=16 |
||||
VERSION="4.2.2" |
||||
TOMCAT_VERSION="9.0.71" |
||||
|
||||
set -eu |
||||
wget -nc -O "rdf4j-${VERSION}.zip" "https://www.eclipse.org/downloads/download.php?file=/rdf4j/eclipse-rdf4j-${VERSION}-sdk.zip&mirror_id=1" |
||||
wget -nc -O "tomcat-${TOMCAT_VERSION}.zip" "https://dlcdn.apache.org/tomcat/tomcat-9/v${TOMCAT_VERSION}/bin/apache-tomcat-${TOMCAT_VERSION}.zip" |
||||
cd bsbm-tools || exit |
||||
./generate -fc -pc ${DATASET_SIZE} -s nt -fn "explore-${DATASET_SIZE}" -ud -ufn "explore-update-${DATASET_SIZE}" |
||||
wget -nc -O "rdf4j-${VERSION}.zip" "https://www.eclipse.org/downloads/download.php?file=/rdf4j/eclipse-rdf4j-${VERSION}-sdk.zip&mirror_id=1" |
||||
unzip ../"rdf4j-${VERSION}.zip" |
||||
unzip ../"tomcat-${TOMCAT_VERSION}.zip" |
||||
CATALINA_HOME="$(pwd)/apache-tomcat-${TOMCAT_VERSION}" |
||||
export CATALINA_HOME |
||||
export JAVA_OPTS="-Dorg.eclipse.rdf4j.appdata.basedir=${CATALINA_HOME}/rdf4j" |
||||
cp "eclipse-rdf4j-${VERSION}"/war/rdf4j-server.war "${CATALINA_HOME}"/webapps/ |
||||
chmod +x "${CATALINA_HOME}"/bin/*.sh |
||||
"${CATALINA_HOME}"/bin/startup.sh |
||||
sleep 30 |
||||
curl -f -X PUT http://localhost:8080/rdf4j-server/repositories/bsbm -H 'Content-Type:text/turtle' -d ' |
||||
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>. |
||||
@prefix rep: <http://www.openrdf.org/config/repository#>. |
||||
@prefix sr: <http://www.openrdf.org/config/repository/sail#>. |
||||
@prefix sail: <http://www.openrdf.org/config/sail#>. |
||||
|
||||
[] a rep:Repository ; |
||||
rep:repositoryID "bsbm" ; |
||||
rdfs:label "BSBM" ; |
||||
rep:repositoryImpl [ |
||||
rep:repositoryType "openrdf:SailRepository" ; |
||||
sr:sailImpl [ |
||||
sail:sailType "rdf4j:LmdbStore" |
||||
] |
||||
] . |
||||
' |
||||
sleep 10 |
||||
curl -f -X PUT -H 'Content-Type:application/n-triples' -T "explore-${DATASET_SIZE}.nt" http://localhost:8080/rdf4j-server/repositories/bsbm/statements |
||||
./testdriver -mt ${PARALLELISM} -ucf usecases/explore/sparql.txt -o "../bsbm.explore.rdf4j-lmdb.${VERSION}.${DATASET_SIZE}.${PARALLELISM}.xml" http://localhost:8080/rdf4j-server/repositories/bsbm |
||||
./testdriver -mt ${PARALLELISM} -ucf usecases/exploreAndUpdate/sparql.txt -o "../bsbm.exploreAndUpdate.rdf4j-lmdb.${VERSION}.${DATASET_SIZE}.${PARALLELISM}.xml" http://localhost:8080/rdf4j-server/repositories/bsbm -u http://localhost:8080/rdf4j-server/repositories/bsbm/statements -udataset "explore-update-${DATASET_SIZE}.nt" |
||||
#./testdriver -mt ${PARALLELISM} -ucf usecases/businessIntelligence/sparql.txt -o "../bsbm.businessIntelligence.rdf4j-lmdb.${VERSION}.${DATASET_SIZE}.${PARALLELISM}.xml" http://localhost:8080/rdf4j-server/repositories/bsbm |
||||
"${CATALINA_HOME}"/bin/shutdown.sh |
||||
rm -f "explore-${DATASET_SIZE}.nt" |
||||
rm -f "explore-update-${DATASET_SIZE}.nt" |
||||
rm -rf td_data |
||||
rm -rf "eclipse-rdf4j-${VERSION}" |
||||
rm -rf "apache-tomcat-${TOMCAT_VERSION}" |
@ -0,0 +1,63 @@ |
||||
""" |
||||
Converts a SPARQL query JSON explanation file to a flamegraph. |
||||
Usage: python explanation_to_flamegraph.py explanation.json flamegraph.svg |
||||
""" |
||||
import json |
||||
import subprocess |
||||
from argparse import ArgumentParser |
||||
from pathlib import Path |
||||
from shutil import which |
||||
from tempfile import NamedTemporaryFile |
||||
|
||||
parser = ArgumentParser( |
||||
prog='OxigraphFlamegraph', |
||||
description='Builds a flamegraph from the Oxigraph query explanation JSON format', |
||||
epilog='Text at the bottom of help') |
||||
parser.add_argument('json_explanation', type=Path) |
||||
parser.add_argument('flamegraph_svg', type=Path) |
||||
args = parser.parse_args() |
||||
|
||||
|
||||
def trace_line(label: str, value: float): |
||||
return f"{label} {int(value * 1_000_000)}" |
||||
|
||||
|
||||
with args.json_explanation.open('rt') as fp: |
||||
explanation = json.load(fp) |
||||
trace = [] |
||||
if "parsing duration in seconds" in explanation: |
||||
trace.append(trace_line("parsing", explanation['parsing duration in seconds'])) |
||||
if "planning duration in seconds" in explanation: |
||||
trace.append(trace_line("planning", explanation['planning duration in seconds'])) |
||||
already_used_names = {} |
||||
|
||||
|
||||
def add_to_trace(node, path): |
||||
path = f"{path};{node['name'].replace(' ', '`')}" |
||||
if path in already_used_names: |
||||
already_used_names[path] += 1 |
||||
path = f"{path}`{already_used_names[path]}" |
||||
else: |
||||
already_used_names[path] = 0 |
||||
samples = node['duration in seconds'] - sum(child['duration in seconds'] for child in node.get("children", ())) |
||||
if int(samples * 1_000_000) > 0: |
||||
trace.append(trace_line(path, samples)) |
||||
for i, child in enumerate(node.get("children", ())): |
||||
add_to_trace(child, path) |
||||
|
||||
|
||||
add_to_trace(explanation["plan"], 'eval') |
||||
inferno = which('inferno-flamegraph') |
||||
flamegraph_pl = which('flamegraph.pl') |
||||
if inferno: |
||||
args.flamegraph_svg.write_text( |
||||
subprocess.run([inferno], input='\n'.join(trace), stdout=subprocess.PIPE, text=True).stdout) |
||||
elif flamegraph_pl: |
||||
with NamedTemporaryFile('w+t') as fp: |
||||
fp.write('\n'.join(trace)) |
||||
fp.flush() |
||||
args.flamegraph_svg.write_text( |
||||
subprocess.run([flamegraph_pl, fp.name], stdout=subprocess.PIPE, text=True).stdout) |
||||
else: |
||||
raise Exception( |
||||
'This script requires either the inferno-flamegraph from https://github.com/jonhoo/inferno either the flamegraph.pl script from https://github.com/brendangregg/FlameGraph to be installed and be in $PATH.') |
@ -0,0 +1,52 @@ |
||||
""" |
||||
Converts a SPARQL query JSON explanation file to a tracing event file compatible with Chrome. |
||||
Usage: python explanation_to_trace.py explanation.json trace.json |
||||
""" |
||||
import json |
||||
from argparse import ArgumentParser |
||||
from pathlib import Path |
||||
|
||||
parser = ArgumentParser( |
||||
prog='OxigraphTracing', |
||||
description='Builds a Trace Event Format file from the Oxigraph query explanation JSON format') |
||||
parser.add_argument('json_explanation', type=Path) |
||||
parser.add_argument('json_trace_event', type=Path) |
||||
args = parser.parse_args() |
||||
|
||||
with args.json_explanation.open('rt') as fp: |
||||
explanation = json.load(fp) |
||||
trace = [] |
||||
|
||||
|
||||
def trace_element(name: str, cat: str, start_s: float, duration_s: float): |
||||
return { |
||||
"name": name, |
||||
"cat": cat, |
||||
"ph": "X", |
||||
"ts": int(start_s * 1_000_000), |
||||
"dur": int(duration_s * 1_000_000), |
||||
"pid": 1 |
||||
} |
||||
|
||||
|
||||
def add_to_trace(node, path, start_time: float): |
||||
path = f"{path};{node['name'].replace(' ', '`')}" |
||||
trace.append(trace_element(node["name"], node["name"].split("(")[0], start_time, node["duration in seconds"])) |
||||
for child in node.get("children", ()): |
||||
add_to_trace(child, path, start_time) |
||||
start_time += child["duration in seconds"] |
||||
|
||||
|
||||
current_time = 0 |
||||
if "parsing duration in seconds" in explanation: |
||||
d = explanation["parsing duration in seconds"] |
||||
trace.append(trace_element(f"parsing", "parsing", current_time, d)) |
||||
current_time += d |
||||
if "planning duration in seconds" in explanation: |
||||
d = explanation["planning duration in seconds"] |
||||
trace.append(trace_element(f"planning", "planning", current_time, d)) |
||||
current_time += d |
||||
add_to_trace(explanation["plan"], 'eval', current_time) |
||||
|
||||
with args.json_trace_event.open("wt") as fp: |
||||
json.dump(trace, fp) |
After Width: | Height: | Size: 4.6 KiB |
@ -0,0 +1,35 @@ |
||||
+------------------+ +----------------+ +-----------------+ |
||||
+ oxigraph CLI {r} + + pyoxigraph {p} + + oxigraph JS {j} + |
||||
+------------------+ +----------------+ +-----------------+ |
||||
|
||||
+---------------------------------------------------------------------------+ |
||||
+ oxigraph (Rust) {r} + |
||||
+---------------------------------------------------------------------------+ |
||||
|
||||
+----------------------------+ +-------------+ |
||||
+ oxrdfio {r} + + sparopt {r} + |
||||
+----------------------------+ +-------------+ |
||||
|
||||
+-----------+ +--------------+ +-----------------+ +----------------+ |
||||
+ oxttl {r} + + oxrdfxml {r} + + spargebra {r} + + sparesults {r} + |
||||
+-----------+ +--------------+ +-----------------+ +----------------+ |
||||
|
||||
+-----------------------------------------------------------------------+ |
||||
+ oxrdf {r} + |
||||
+-----------------------------------------------------------------------+ |
||||
|
||||
+------------------+ |
||||
+ oxsdatatypes {r} + |
||||
+------------------+ |
||||
|
||||
|
||||
# Legend: |
||||
r = { |
||||
fill: papayawhip; |
||||
} |
||||
p = { |
||||
fill: lightyellow; |
||||
} |
||||
j = { |
||||
fill: lightgreen; |
||||
} |
@ -0,0 +1,28 @@ |
||||
#![no_main] |
||||
|
||||
use libfuzzer_sys::fuzz_target; |
||||
use oxttl::N3Parser; |
||||
|
||||
fuzz_target!(|data: &[u8]| { |
||||
let mut quads = Vec::new(); |
||||
let mut parser = N3Parser::new() |
||||
.with_base_iri("http://example.com/") |
||||
.unwrap() |
||||
.parse(); |
||||
for chunk in data.split(|c| *c == 0xFF) { |
||||
parser.extend_from_slice(chunk); |
||||
while let Some(result) = parser.read_next() { |
||||
if let Ok(quad) = result { |
||||
quads.push(quad); |
||||
} |
||||
} |
||||
} |
||||
parser.end(); |
||||
while let Some(result) = parser.read_next() { |
||||
if let Ok(quad) = result { |
||||
quads.push(quad); |
||||
} |
||||
} |
||||
assert!(parser.is_end()); |
||||
//TODO: serialize
|
||||
}); |
@ -0,0 +1,84 @@ |
||||
#![no_main] |
||||
|
||||
use libfuzzer_sys::fuzz_target; |
||||
use oxrdf::Quad; |
||||
use oxttl::{NQuadsParser, NQuadsSerializer}; |
||||
|
||||
fn parse<'a>( |
||||
chunks: impl IntoIterator<Item = &'a [u8]>, |
||||
unchecked: bool, |
||||
) -> (Vec<Quad>, Vec<String>) { |
||||
let mut quads = Vec::new(); |
||||
let mut errors = Vec::new(); |
||||
let mut parser = NQuadsParser::new().with_quoted_triples(); |
||||
if unchecked { |
||||
parser = parser.unchecked(); |
||||
} |
||||
let mut reader = parser.parse(); |
||||
for chunk in chunks { |
||||
reader.extend_from_slice(chunk); |
||||
while let Some(result) = reader.read_next() { |
||||
match result { |
||||
Ok(quad) => quads.push(quad), |
||||
Err(error) => errors.push(error.to_string()), |
||||
} |
||||
} |
||||
} |
||||
reader.end(); |
||||
while let Some(result) = reader.read_next() { |
||||
match result { |
||||
Ok(quad) => quads.push(quad), |
||||
Err(error) => errors.push(error.to_string()), |
||||
} |
||||
} |
||||
assert!(reader.is_end()); |
||||
(quads, errors) |
||||
} |
||||
|
||||
fuzz_target!(|data: &[u8]| { |
||||
// We parse with splitting
|
||||
let (quads, errors) = parse(data.split(|c| *c == 0xFF), false); |
||||
// We parse without splitting
|
||||
let (quads_without_split, errors_without_split) = parse( |
||||
[data |
||||
.iter() |
||||
.copied() |
||||
.filter(|c| *c != 0xFF) |
||||
.collect::<Vec<_>>() |
||||
.as_slice()], |
||||
false, |
||||
); |
||||
assert_eq!(quads, quads_without_split); |
||||
assert_eq!(errors, errors_without_split); |
||||
|
||||
// We test also unchecked if valid
|
||||
if errors.is_empty() { |
||||
let (quads_unchecked, errors_unchecked) = parse(data.split(|c| *c == 0xFF), true); |
||||
assert!(errors_unchecked.is_empty()); |
||||
assert_eq!(quads, quads_unchecked); |
||||
} |
||||
|
||||
// We serialize
|
||||
let mut writer = NQuadsSerializer::new().serialize_to_write(Vec::new()); |
||||
for quad in &quads { |
||||
writer.write_quad(quad).unwrap(); |
||||
} |
||||
let new_serialization = writer.finish(); |
||||
|
||||
// We parse the serialization
|
||||
let new_quads = NQuadsParser::new() |
||||
.with_quoted_triples() |
||||
.parse_read(new_serialization.as_slice()) |
||||
.collect::<Result<Vec<_>, _>>() |
||||
.map_err(|e| { |
||||
format!( |
||||
"Error on {:?} from {quads:?} based on {:?}: {e}", |
||||
String::from_utf8_lossy(&new_serialization), |
||||
String::from_utf8_lossy(data) |
||||
) |
||||
}) |
||||
.unwrap(); |
||||
|
||||
// We check the roundtrip has not changed anything
|
||||
assert_eq!(new_quads, quads); |
||||
}); |
@ -0,0 +1,35 @@ |
||||
#![no_main] |
||||
|
||||
use libfuzzer_sys::fuzz_target; |
||||
use oxrdfxml::{RdfXmlParser, RdfXmlSerializer}; |
||||
|
||||
fuzz_target!(|data: &[u8]| { |
||||
// We parse
|
||||
let triples = RdfXmlParser::new() |
||||
.parse_read(data) |
||||
.flatten() |
||||
.collect::<Vec<_>>(); |
||||
|
||||
// We serialize
|
||||
let mut writer = RdfXmlSerializer::new().serialize_to_write(Vec::new()); |
||||
for triple in &triples { |
||||
writer.write_triple(triple).unwrap(); |
||||
} |
||||
let new_serialization = writer.finish().unwrap(); |
||||
|
||||
// We parse the serialization
|
||||
let new_triples = RdfXmlParser::new() |
||||
.parse_read(new_serialization.as_slice()) |
||||
.collect::<Result<Vec<_>, _>>() |
||||
.map_err(|e| { |
||||
format!( |
||||
"Error on {:?} from {triples:?} based on {:?}: {e}", |
||||
String::from_utf8_lossy(&new_serialization), |
||||
String::from_utf8_lossy(data) |
||||
) |
||||
}) |
||||
.unwrap(); |
||||
|
||||
// We check the roundtrip has not changed anything
|
||||
assert_eq!(new_triples, triples); |
||||
}); |
@ -0,0 +1,61 @@ |
||||
#![no_main] |
||||
|
||||
use libfuzzer_sys::fuzz_target; |
||||
use oxigraph::io::RdfFormat; |
||||
use oxigraph::sparql::{Query, QueryOptions, QueryResults, QuerySolutionIter}; |
||||
use oxigraph::store::Store; |
||||
use std::sync::OnceLock; |
||||
|
||||
fuzz_target!(|data: sparql_smith::Query| { |
||||
static STORE: OnceLock<Store> = OnceLock::new(); |
||||
let store = STORE.get_or_init(|| { |
||||
let store = Store::new().unwrap(); |
||||
store |
||||
.load_from_read(RdfFormat::TriG, sparql_smith::DATA_TRIG.as_bytes()) |
||||
.unwrap(); |
||||
store |
||||
}); |
||||
|
||||
let query_str = data.to_string(); |
||||
if let Ok(query) = Query::parse(&query_str, None) { |
||||
let options = QueryOptions::default(); |
||||
let with_opt = store.query_opt(query.clone(), options.clone()).unwrap(); |
||||
let without_opt = store |
||||
.query_opt(query, options.without_optimizations()) |
||||
.unwrap(); |
||||
match (with_opt, without_opt) { |
||||
(QueryResults::Solutions(with_opt), QueryResults::Solutions(without_opt)) => { |
||||
assert_eq!( |
||||
query_solutions_key(with_opt, query_str.contains(" REDUCED ")), |
||||
query_solutions_key(without_opt, query_str.contains(" REDUCED ")) |
||||
) |
||||
} |
||||
(QueryResults::Graph(_), QueryResults::Graph(_)) => unimplemented!(), |
||||
(QueryResults::Boolean(with_opt), QueryResults::Boolean(without_opt)) => { |
||||
assert_eq!(with_opt, without_opt) |
||||
} |
||||
_ => panic!("Different query result types"), |
||||
} |
||||
} |
||||
}); |
||||
|
||||
fn query_solutions_key(iter: QuerySolutionIter, is_reduced: bool) -> String { |
||||
// TODO: ordering
|
||||
let mut b = iter |
||||
.into_iter() |
||||
.map(|t| { |
||||
let mut b = t |
||||
.unwrap() |
||||
.iter() |
||||
.map(|(var, val)| format!("{var}: {val}")) |
||||
.collect::<Vec<_>>(); |
||||
b.sort_unstable(); |
||||
b.join(" ") |
||||
}) |
||||
.collect::<Vec<_>>(); |
||||
b.sort_unstable(); |
||||
if is_reduced { |
||||
b.dedup(); |
||||
} |
||||
b.join("\n") |
||||
} |
@ -1,10 +1,7 @@ |
||||
#![no_main] |
||||
use libfuzzer_sys::fuzz_target; |
||||
use spargebra::Query; |
||||
use std::str; |
||||
|
||||
fuzz_target!(|data: &[u8]| { |
||||
if let Ok(data) = str::from_utf8(data) { |
||||
Query::parse(data, None); |
||||
} |
||||
fuzz_target!(|data: &str| { |
||||
let _ = Query::parse(data, None); |
||||
}); |
||||
|
@ -1,15 +1,6 @@ |
||||
#![no_main] |
||||
use libfuzzer_sys::fuzz_target; |
||||
use sparesults::{QueryResultsFormat, QueryResultsParser, QueryResultsReader}; |
||||
use oxigraph_fuzz::result_format::fuzz_result_format; |
||||
use sparesults::QueryResultsFormat; |
||||
|
||||
fuzz_target!(|data: &[u8]| { |
||||
let parser = QueryResultsParser::from_format(QueryResultsFormat::Json); |
||||
if let Ok(QueryResultsReader::Solutions(solutions)) = parser.read_results(data) { |
||||
for s in solutions { |
||||
if s.is_err() { |
||||
// TODO: avoid infinite loop of errors
|
||||
break; |
||||
} |
||||
} |
||||
} |
||||
}); |
||||
fuzz_target!(|data: &[u8]| fuzz_result_format(QueryResultsFormat::Json, data)); |
||||
|
@ -1,10 +1,6 @@ |
||||
#![no_main] |
||||
use libfuzzer_sys::fuzz_target; |
||||
use sparesults::{QueryResultsFormat, QueryResultsParser, QueryResultsReader}; |
||||
use oxigraph_fuzz::result_format::fuzz_result_format; |
||||
use sparesults::QueryResultsFormat; |
||||
|
||||
fuzz_target!(|data: &[u8]| { |
||||
let parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv); |
||||
if let Ok(QueryResultsReader::Solutions(solutions)) = parser.read_results(data) { |
||||
for _ in solutions {} |
||||
} |
||||
}); |
||||
fuzz_target!(|data: &[u8]| fuzz_result_format(QueryResultsFormat::Tsv, data)); |
||||
|
@ -1,10 +1,6 @@ |
||||
#![no_main] |
||||
use libfuzzer_sys::fuzz_target; |
||||
use sparesults::{QueryResultsFormat, QueryResultsParser, QueryResultsReader}; |
||||
use oxigraph_fuzz::result_format::fuzz_result_format; |
||||
use sparesults::QueryResultsFormat; |
||||
|
||||
fuzz_target!(|data: &[u8]| { |
||||
let parser = QueryResultsParser::from_format(QueryResultsFormat::Xml); |
||||
if let Ok(QueryResultsReader::Solutions(solutions)) = parser.read_results(data) { |
||||
for _ in solutions {} |
||||
} |
||||
}); |
||||
fuzz_target!(|data: &[u8]| fuzz_result_format(QueryResultsFormat::Xml, data)); |
||||
|
@ -0,0 +1,166 @@ |
||||
#![no_main] |
||||
|
||||
use libfuzzer_sys::fuzz_target; |
||||
use oxrdf::graph::CanonicalizationAlgorithm; |
||||
use oxrdf::{Dataset, GraphName, Quad, Subject, Term, Triple}; |
||||
use oxttl::{TriGParser, TriGSerializer}; |
||||
|
||||
fn parse<'a>( |
||||
chunks: impl IntoIterator<Item = &'a [u8]>, |
||||
unchecked: bool, |
||||
) -> (Vec<Quad>, Vec<String>, Vec<(String, String)>) { |
||||
let mut quads = Vec::new(); |
||||
let mut errors = Vec::new(); |
||||
let mut parser = TriGParser::new() |
||||
.with_quoted_triples() |
||||
.with_base_iri("http://example.com/") |
||||
.unwrap(); |
||||
if unchecked { |
||||
parser = parser.unchecked(); |
||||
} |
||||
let mut reader = parser.parse(); |
||||
for chunk in chunks { |
||||
reader.extend_from_slice(chunk); |
||||
while let Some(result) = reader.read_next() { |
||||
match result { |
||||
Ok(quad) => quads.push(quad), |
||||
Err(error) => errors.push(error.to_string()), |
||||
} |
||||
} |
||||
} |
||||
reader.end(); |
||||
while let Some(result) = reader.read_next() { |
||||
match result { |
||||
Ok(quad) => quads.push(quad), |
||||
Err(error) => errors.push(error.to_string()), |
||||
} |
||||
} |
||||
assert!(reader.is_end()); |
||||
( |
||||
quads, |
||||
errors, |
||||
reader |
||||
.prefixes() |
||||
.map(|(k, v)| (k.to_owned(), v.to_owned())) |
||||
.collect(), |
||||
) |
||||
} |
||||
|
||||
fn count_triple_blank_nodes(triple: &Triple) -> usize { |
||||
(match &triple.subject { |
||||
Subject::BlankNode(_) => 1, |
||||
Subject::Triple(t) => count_triple_blank_nodes(t), |
||||
_ => 0, |
||||
}) + (match &triple.object { |
||||
Term::BlankNode(_) => 1, |
||||
Term::Triple(t) => count_triple_blank_nodes(t), |
||||
_ => 0, |
||||
}) |
||||
} |
||||
|
||||
fn count_quad_blank_nodes(quad: &Quad) -> usize { |
||||
(match &quad.subject { |
||||
Subject::BlankNode(_) => 1, |
||||
Subject::Triple(t) => count_triple_blank_nodes(t), |
||||
_ => 0, |
||||
}) + (match &quad.object { |
||||
Term::BlankNode(_) => 1, |
||||
Term::Triple(t) => count_triple_blank_nodes(t), |
||||
_ => 0, |
||||
}) + usize::from(matches!(quad.graph_name, GraphName::BlankNode(_))) |
||||
} |
||||
|
||||
fn serialize_quads(quads: &[Quad], prefixes: Vec<(String, String)>) -> Vec<u8> { |
||||
let mut serializer = TriGSerializer::new(); |
||||
for (prefix_name, prefix_iri) in prefixes { |
||||
serializer = serializer.with_prefix(prefix_name, prefix_iri).unwrap(); |
||||
} |
||||
let mut writer = serializer.serialize_to_write(Vec::new()); |
||||
for quad in quads { |
||||
writer.write_quad(quad).unwrap(); |
||||
} |
||||
writer.finish().unwrap() |
||||
} |
||||
|
||||
fuzz_target!(|data: &[u8]| { |
||||
// We parse with splitting
|
||||
let (quads, errors, prefixes) = parse(data.split(|c| *c == 0xFF), false); |
||||
// We parse without splitting
|
||||
let (quads_without_split, errors_without_split, _) = parse( |
||||
[data |
||||
.iter() |
||||
.copied() |
||||
.filter(|c| *c != 0xFF) |
||||
.collect::<Vec<_>>() |
||||
.as_slice()], |
||||
false, |
||||
); |
||||
let (quads_unchecked, errors_unchecked, _) = parse(data.split(|c| *c == 0xFF), true); |
||||
if errors.is_empty() { |
||||
assert!(errors_unchecked.is_empty()); |
||||
} |
||||
|
||||
let bnodes_count = quads.iter().map(count_quad_blank_nodes).sum::<usize>(); |
||||
if bnodes_count == 0 { |
||||
assert_eq!( |
||||
quads, |
||||
quads_without_split, |
||||
"With split:\n{}\nWithout split:\n{}", |
||||
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||
String::from_utf8_lossy(&serialize_quads(&quads_without_split, Vec::new())) |
||||
); |
||||
if errors.is_empty() { |
||||
assert_eq!( |
||||
quads, |
||||
quads_unchecked, |
||||
"Validating:\n{}\nUnchecked:\n{}", |
||||
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||
String::from_utf8_lossy(&serialize_quads(&quads_unchecked, Vec::new())) |
||||
); |
||||
} |
||||
} else if bnodes_count <= 4 { |
||||
let mut dataset_with_split = quads.iter().collect::<Dataset>(); |
||||
let mut dataset_without_split = quads_without_split.iter().collect::<Dataset>(); |
||||
dataset_with_split.canonicalize(CanonicalizationAlgorithm::Unstable); |
||||
dataset_without_split.canonicalize(CanonicalizationAlgorithm::Unstable); |
||||
assert_eq!( |
||||
dataset_with_split, |
||||
dataset_without_split, |
||||
"With split:\n{}\nWithout split:\n{}", |
||||
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||
String::from_utf8_lossy(&serialize_quads(&quads_without_split, Vec::new())) |
||||
); |
||||
if errors.is_empty() { |
||||
let mut dataset_unchecked = quads_unchecked.iter().collect::<Dataset>(); |
||||
dataset_unchecked.canonicalize(CanonicalizationAlgorithm::Unstable); |
||||
assert_eq!( |
||||
dataset_with_split, |
||||
dataset_unchecked, |
||||
"Validating:\n{}\nUnchecked:\n{}", |
||||
String::from_utf8_lossy(&serialize_quads(&quads, Vec::new())), |
||||
String::from_utf8_lossy(&serialize_quads(&quads_unchecked, Vec::new())) |
||||
); |
||||
} |
||||
} |
||||
assert_eq!(errors, errors_without_split); |
||||
|
||||
// We serialize
|
||||
let new_serialization = serialize_quads(&quads, prefixes); |
||||
|
||||
// We parse the serialization
|
||||
let new_quads = TriGParser::new() |
||||
.with_quoted_triples() |
||||
.parse_read(new_serialization.as_slice()) |
||||
.collect::<Result<Vec<_>, _>>() |
||||
.map_err(|e| { |
||||
format!( |
||||
"Error on {:?} from {quads:?} based on {:?}: {e}", |
||||
String::from_utf8_lossy(&new_serialization), |
||||
String::from_utf8_lossy(data) |
||||
) |
||||
}) |
||||
.unwrap(); |
||||
|
||||
// We check the roundtrip has not changed anything
|
||||
assert_eq!(new_quads, quads); |
||||
}); |
@ -0,0 +1 @@ |
||||
pub mod result_format; |
@ -0,0 +1,63 @@ |
||||
use anyhow::Context; |
||||
use sparesults::{ |
||||
FromReadQueryResultsReader, QueryResultsFormat, QueryResultsParser, QueryResultsSerializer, |
||||
}; |
||||
|
||||
pub fn fuzz_result_format(format: QueryResultsFormat, data: &[u8]) { |
||||
let parser = QueryResultsParser::from_format(format); |
||||
let serializer = QueryResultsSerializer::from_format(format); |
||||
|
||||
let Ok(reader) = parser.parse_read(data) else { |
||||
return; |
||||
}; |
||||
match reader { |
||||
FromReadQueryResultsReader::Solutions(solutions) => { |
||||
let Ok(solutions) = solutions.collect::<Result<Vec<_>, _>>() else { |
||||
return; |
||||
}; |
||||
|
||||
// We try to write again
|
||||
let mut writer = serializer |
||||
.serialize_solutions_to_write( |
||||
Vec::new(), |
||||
solutions |
||||
.first() |
||||
.map_or_else(Vec::new, |s| s.variables().to_vec()), |
||||
) |
||||
.unwrap(); |
||||
for solution in &solutions { |
||||
writer.write(solution).unwrap(); |
||||
} |
||||
let serialized = String::from_utf8(writer.finish().unwrap()).unwrap(); |
||||
|
||||
// And to parse again
|
||||
if let FromReadQueryResultsReader::Solutions(roundtrip_solutions) = parser |
||||
.parse_read(serialized.as_bytes()) |
||||
.with_context(|| format!("Parsing {serialized:?}")) |
||||
.unwrap() |
||||
{ |
||||
assert_eq!( |
||||
roundtrip_solutions |
||||
.collect::<Result<Vec<_>, _>>() |
||||
.with_context(|| format!("Parsing {serialized:?}")) |
||||
.unwrap(), |
||||
solutions |
||||
) |
||||
} |
||||
} |
||||
FromReadQueryResultsReader::Boolean(value) => { |
||||
// We try to write again
|
||||
let mut serialized = Vec::new(); |
||||
serializer |
||||
.serialize_boolean_to_write(&mut serialized, value) |
||||
.unwrap(); |
||||
|
||||
// And to parse again
|
||||
if let FromReadQueryResultsReader::Boolean(roundtrip_value) = |
||||
parser.parse_read(serialized.as_slice()).unwrap() |
||||
{ |
||||
assert_eq!(roundtrip_value, value) |
||||
} |
||||
} |
||||
} |
||||
} |
@ -1,23 +1,26 @@ |
||||
[package] |
||||
name = "oxigraph_js" |
||||
version = "0.3.1" |
||||
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
||||
license = "MIT OR Apache-2.0" |
||||
name = "oxigraph-js" |
||||
version.workspace = true |
||||
authors.workspace = true |
||||
license.workspace = true |
||||
readme = "README.md" |
||||
keywords = ["RDF", "N-Triples", "Turtle", "RDF/XML", "SPARQL"] |
||||
keywords = ["RDF", "N-Triples", "Turtle", "XML", "SPARQL"] |
||||
repository = "https://github.com/oxigraph/oxigraph/tree/main/js" |
||||
description = "JavaScript bindings of Oxigraph" |
||||
edition = "2021" |
||||
edition.workspace = true |
||||
rust-version.workspace = true |
||||
publish = false |
||||
|
||||
[lib] |
||||
crate-type = ["cdylib"] |
||||
name = "oxigraph" |
||||
doc = false |
||||
|
||||
[dependencies] |
||||
oxigraph = { version = "0.3.1", path="../lib" } |
||||
wasm-bindgen = "0.2" |
||||
js-sys = "0.3" |
||||
console_error_panic_hook = "0.1" |
||||
console_error_panic_hook.workspace = true |
||||
js-sys.workspace = true |
||||
oxigraph = { workspace = true, features = ["js"] } |
||||
wasm-bindgen.workspace = true |
||||
|
||||
[dev-dependencies] |
||||
wasm-bindgen-test = "0.3" |
||||
[lints] |
||||
workspace = true |
||||
|
@ -0,0 +1,14 @@ |
||||
{ |
||||
"$schema": "https://biomejs.dev/schemas/1.0.0/schema.json", |
||||
"formatter": { |
||||
"indentStyle": "space", |
||||
"indentWidth": 4, |
||||
"lineWidth": 100 |
||||
}, |
||||
"linter": { |
||||
"ignore": ["pkg"] |
||||
}, |
||||
"organizeImports": { |
||||
"enabled": true |
||||
} |
||||
} |
@ -1,31 +1,19 @@ |
||||
#! /usr/bin/env node
|
||||
|
||||
const fs = require('fs') |
||||
|
||||
// We copy file to the new directory
|
||||
fs.mkdirSync('pkg') |
||||
for (const file of fs.readdirSync('./pkg-web')) { |
||||
fs.copyFileSync('./pkg-web/' + file, './pkg/' + file) |
||||
} |
||||
for (const file of fs.readdirSync('./pkg-node')) { |
||||
fs.copyFileSync('./pkg-node/' + file, './pkg/' + file) |
||||
} |
||||
|
||||
const pkg = JSON.parse(fs.readFileSync('./pkg/package.json')) |
||||
pkg.name = 'oxigraph' |
||||
pkg.main = 'node.js' |
||||
pkg.browser = 'web.js' |
||||
pkg.files = [ |
||||
'*.{js,wasm,d.ts}' |
||||
] |
||||
pkg.homepage = 'https://github.com/oxigraph/oxigraph/tree/main/js' |
||||
const fs = require("node:fs"); |
||||
const pkg = JSON.parse(fs.readFileSync("./pkg/package.json")); |
||||
pkg.name = "oxigraph"; |
||||
pkg.main = "node.js"; |
||||
pkg.browser = "web.js"; |
||||
pkg.files = ["*.{js,wasm,d.ts}"]; |
||||
pkg.homepage = "https://github.com/oxigraph/oxigraph/tree/main/js"; |
||||
pkg.bugs = { |
||||
url: 'https://github.com/oxigraph/oxigraph/issues' |
||||
} |
||||
pkg.collaborators = undefined |
||||
url: "https://github.com/oxigraph/oxigraph/issues", |
||||
}; |
||||
pkg.collaborators = undefined; |
||||
pkg.repository = { |
||||
type: 'git', |
||||
url: 'https://github.com/oxigraph/oxigraph.git', |
||||
directory: 'js' |
||||
} |
||||
fs.writeFileSync('./pkg/package.json', JSON.stringify(pkg, null, 2)) |
||||
type: "git", |
||||
url: "https://github.com/oxigraph/oxigraph.git", |
||||
directory: "js", |
||||
}; |
||||
fs.writeFileSync("./pkg/package.json", JSON.stringify(pkg, null, 2)); |
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,38 +1,52 @@ |
||||
/* global describe, it */ |
||||
|
||||
import oxigraph from '../pkg/oxigraph.js' |
||||
import assert from 'assert' |
||||
import runTests from '../node_modules/@rdfjs/data-model/test/index.js' |
||||
import assert from "node:assert"; |
||||
import runTests from "../node_modules/@rdfjs/data-model/test/index.js"; |
||||
import oxigraph from "../pkg/oxigraph.js"; |
||||
|
||||
runTests({ factory: oxigraph }) |
||||
runTests({ factory: oxigraph }); |
||||
|
||||
describe('DataModel', function () { |
||||
describe('#toString()', function () { |
||||
it('namedNode().toString() should return SPARQL compatible syntax', function () { |
||||
assert.strictEqual('<http://example.com>', oxigraph.namedNode('http://example.com').toString()) |
||||
}) |
||||
describe("DataModel", () => { |
||||
describe("#toString()", () => { |
||||
it("namedNode().toString() should return SPARQL compatible syntax", () => { |
||||
assert.strictEqual( |
||||
"<http://example.com>", |
||||
oxigraph.namedNode("http://example.com").toString(), |
||||
); |
||||
}); |
||||
|
||||
it('blankNode().toString() should return SPARQL compatible syntax', function () { |
||||
assert.strictEqual('_:a', oxigraph.blankNode('a').toString()) |
||||
}) |
||||
it("blankNode().toString() should return SPARQL compatible syntax", () => { |
||||
assert.strictEqual("_:a", oxigraph.blankNode("a").toString()); |
||||
}); |
||||
|
||||
it('literal().toString() should return SPARQL compatible syntax', function () { |
||||
assert.strictEqual('"a\\"b"@en', oxigraph.literal('a"b', 'en').toString()) |
||||
}) |
||||
it("literal().toString() should return SPARQL compatible syntax", () => { |
||||
assert.strictEqual('"a\\"b"@en', oxigraph.literal('a"b', "en").toString()); |
||||
}); |
||||
|
||||
it('defaultGraph().toString() should return SPARQL compatible syntax', function () { |
||||
assert.strictEqual('DEFAULT', oxigraph.defaultGraph().toString()) |
||||
}) |
||||
it("defaultGraph().toString() should return SPARQL compatible syntax", () => { |
||||
assert.strictEqual("DEFAULT", oxigraph.defaultGraph().toString()); |
||||
}); |
||||
|
||||
it('variable().toString() should return SPARQL compatible syntax', function () { |
||||
assert.strictEqual('?a', oxigraph.variable('a').toString()) |
||||
}) |
||||
it("variable().toString() should return SPARQL compatible syntax", () => { |
||||
assert.strictEqual("?a", oxigraph.variable("a").toString()); |
||||
}); |
||||
|
||||
it('quad().toString() should return SPARQL compatible syntax', function () { |
||||
it("quad().toString() should return SPARQL compatible syntax", () => { |
||||
assert.strictEqual( |
||||
'<http://example.com/s> <http://example.com/p> <<<http://example.com/s1> <http://example.com/p1> <http://example.com/o1>>> <http://example.com/g>', |
||||
oxigraph.quad(oxigraph.namedNode('http://example.com/s'), oxigraph.namedNode('http://example.com/p'), oxigraph.quad(oxigraph.namedNode('http://example.com/s1'), oxigraph.namedNode('http://example.com/p1'), oxigraph.namedNode('http://example.com/o1')), oxigraph.namedNode('http://example.com/g')).toString() |
||||
"<http://example.com/s> <http://example.com/p> <<<http://example.com/s1> <http://example.com/p1> <http://example.com/o1>>> <http://example.com/g>", |
||||
oxigraph |
||||
.quad( |
||||
oxigraph.namedNode("http://example.com/s"), |
||||
oxigraph.namedNode("http://example.com/p"), |
||||
oxigraph.quad( |
||||
oxigraph.namedNode("http://example.com/s1"), |
||||
oxigraph.namedNode("http://example.com/p1"), |
||||
oxigraph.namedNode("http://example.com/o1"), |
||||
), |
||||
oxigraph.namedNode("http://example.com/g"), |
||||
) |
||||
}) |
||||
}) |
||||
}) |
||||
.toString(), |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
|
@ -1,161 +1,208 @@ |
||||
/* global describe, it */ |
||||
|
||||
import { Store } from '../pkg/oxigraph.js' |
||||
import assert from 'assert' |
||||
import dataModel from '@rdfjs/data-model' |
||||
import assert from "node:assert"; |
||||
import dataModel from "@rdfjs/data-model"; |
||||
import { Store } from "../pkg/oxigraph.js"; |
||||
|
||||
const ex = dataModel.namedNode('http://example.com') |
||||
const ex = dataModel.namedNode("http://example.com"); |
||||
const triple = dataModel.quad( |
||||
dataModel.blankNode('s'), |
||||
dataModel.namedNode('http://example.com/p'), |
||||
dataModel.literal('o') |
||||
) |
||||
|
||||
describe('Store', function () { |
||||
describe('#add()', function () { |
||||
it('an added quad should be in the store', function () { |
||||
const store = new Store() |
||||
store.add(dataModel.quad(ex, ex, triple)) |
||||
assert(store.has(dataModel.quad(ex, ex, triple))) |
||||
}) |
||||
}) |
||||
|
||||
describe('#delete()', function () { |
||||
it('an removed quad should not be in the store anymore', function () { |
||||
const store = new Store([dataModel.quad(triple, ex, ex)]) |
||||
assert(store.has(dataModel.quad(triple, ex, ex))) |
||||
store.delete(dataModel.quad(triple, ex, ex)) |
||||
assert(!store.has(dataModel.quad(triple, ex, ex))) |
||||
}) |
||||
}) |
||||
|
||||
describe('#has()', function () { |
||||
it('an added quad should be in the store', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
||||
assert(store.has(dataModel.quad(ex, ex, ex))) |
||||
}) |
||||
}) |
||||
|
||||
describe('#size()', function () { |
||||
it('A store with one quad should have 1 for size', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
||||
assert.strictEqual(1, store.size) |
||||
}) |
||||
}) |
||||
|
||||
describe('#match_quads()', function () { |
||||
it('blank pattern should return all quads', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
||||
const results = store.match() |
||||
assert.strictEqual(1, results.length) |
||||
assert(dataModel.quad(ex, ex, ex).equals(results[0])) |
||||
}) |
||||
}) |
||||
|
||||
describe('#query()', function () { |
||||
it('ASK true', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
||||
assert.strictEqual(true, store.query('ASK { ?s ?s ?s }')) |
||||
}) |
||||
|
||||
it('ASK false', function () { |
||||
const store = new Store() |
||||
assert.strictEqual(false, store.query('ASK { FILTER(false)}')) |
||||
}) |
||||
|
||||
it('CONSTRUCT', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
||||
const results = store.query('CONSTRUCT { ?s ?p ?o } WHERE { ?s ?p ?o }') |
||||
assert.strictEqual(1, results.length) |
||||
assert(dataModel.quad(ex, ex, ex).equals(results[0])) |
||||
}) |
||||
|
||||
it('SELECT', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
||||
const results = store.query('SELECT ?s WHERE { ?s ?p ?o }') |
||||
assert.strictEqual(1, results.length) |
||||
assert(ex.equals(results[0].get('s'))) |
||||
}) |
||||
|
||||
it('SELECT with NOW()', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
||||
const results = store.query('SELECT (YEAR(NOW()) AS ?y) WHERE {}') |
||||
assert.strictEqual(1, results.length) |
||||
}) |
||||
|
||||
it('SELECT with RAND()', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
||||
const results = store.query('SELECT (RAND() AS ?y) WHERE {}') |
||||
assert.strictEqual(1, results.length) |
||||
}) |
||||
}) |
||||
|
||||
describe('#update()', function () { |
||||
it('INSERT DATA', function () { |
||||
const store = new Store() |
||||
store.update('INSERT DATA { <http://example.com> <http://example.com> <http://example.com> }') |
||||
assert.strictEqual(1, store.size) |
||||
}) |
||||
|
||||
it('DELETE DATA', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
||||
store.update('DELETE DATA { <http://example.com> <http://example.com> <http://example.com> }') |
||||
assert.strictEqual(0, store.size) |
||||
}) |
||||
|
||||
it('DELETE WHERE', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]) |
||||
store.update('DELETE WHERE { ?v ?v ?v }') |
||||
assert.strictEqual(0, store.size) |
||||
}) |
||||
}) |
||||
|
||||
describe('#load()', function () { |
||||
it('load NTriples in the default graph', function () { |
||||
const store = new Store() |
||||
store.load('<http://example.com> <http://example.com> <http://example.com> .', 'application/n-triples') |
||||
assert(store.has(dataModel.quad(ex, ex, ex))) |
||||
}) |
||||
|
||||
it('load NTriples in an other graph', function () { |
||||
const store = new Store() |
||||
store.load('<http://example.com> <http://example.com> <http://example.com> .', 'application/n-triples', null, ex) |
||||
assert(store.has(dataModel.quad(ex, ex, ex, ex))) |
||||
}) |
||||
|
||||
it('load Turtle with a base IRI', function () { |
||||
const store = new Store() |
||||
store.load('<http://example.com> <http://example.com> <> .', 'text/turtle', 'http://example.com') |
||||
assert(store.has(dataModel.quad(ex, ex, ex))) |
||||
}) |
||||
|
||||
it('load NQuads', function () { |
||||
const store = new Store() |
||||
store.load('<http://example.com> <http://example.com> <http://example.com> <http://example.com> .', 'application/n-quads') |
||||
assert(store.has(dataModel.quad(ex, ex, ex, ex))) |
||||
}) |
||||
|
||||
it('load TriG with a base IRI', function () { |
||||
const store = new Store() |
||||
store.load('GRAPH <> { <http://example.com> <http://example.com> <> }', 'application/trig', 'http://example.com') |
||||
assert(store.has(dataModel.quad(ex, ex, ex, ex))) |
||||
}) |
||||
}) |
||||
|
||||
describe('#dump()', function () { |
||||
it('dump dataset content', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex, ex)]) |
||||
assert.strictEqual('<http://example.com> <http://example.com> <http://example.com> <http://example.com> .\n', store.dump('application/n-quads')) |
||||
}) |
||||
|
||||
it('dump named graph content', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex, ex)]) |
||||
assert.strictEqual('<http://example.com> <http://example.com> <http://example.com> .\n', store.dump('application/n-triples', ex)) |
||||
}) |
||||
|
||||
it('dump default graph content', function () { |
||||
const store = new Store([dataModel.quad(ex, ex, ex, ex)]) |
||||
assert.strictEqual('', store.dump('application/n-triples')) |
||||
}) |
||||
}) |
||||
}) |
||||
dataModel.blankNode("s"), |
||||
dataModel.namedNode("http://example.com/p"), |
||||
dataModel.literal("o"), |
||||
); |
||||
|
||||
describe("Store", () => { |
||||
describe("#add()", () => { |
||||
it("an added quad should be in the store", () => { |
||||
const store = new Store(); |
||||
store.add(dataModel.quad(ex, ex, triple)); |
||||
assert(store.has(dataModel.quad(ex, ex, triple))); |
||||
}); |
||||
}); |
||||
|
||||
describe("#delete()", () => { |
||||
it("an removed quad should not be in the store anymore", () => { |
||||
const store = new Store([dataModel.quad(triple, ex, ex)]); |
||||
assert(store.has(dataModel.quad(triple, ex, ex))); |
||||
store.delete(dataModel.quad(triple, ex, ex)); |
||||
assert(!store.has(dataModel.quad(triple, ex, ex))); |
||||
}); |
||||
}); |
||||
|
||||
describe("#has()", () => { |
||||
it("an added quad should be in the store", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||
assert(store.has(dataModel.quad(ex, ex, ex))); |
||||
}); |
||||
}); |
||||
|
||||
describe("#size()", () => { |
||||
it("A store with one quad should have 1 for size", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||
assert.strictEqual(1, store.size); |
||||
}); |
||||
}); |
||||
|
||||
describe("#match_quads()", () => { |
||||
it("blank pattern should return all quads", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||
const results = store.match(); |
||||
assert.strictEqual(1, results.length); |
||||
assert(dataModel.quad(ex, ex, ex).equals(results[0])); |
||||
}); |
||||
}); |
||||
|
||||
describe("#query()", () => { |
||||
it("ASK true", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||
assert.strictEqual(true, store.query("ASK { ?s ?s ?s }")); |
||||
}); |
||||
|
||||
it("ASK false", () => { |
||||
const store = new Store(); |
||||
assert.strictEqual(false, store.query("ASK { FILTER(false)}")); |
||||
}); |
||||
|
||||
it("CONSTRUCT", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||
const results = store.query("CONSTRUCT { ?s ?p ?o } WHERE { ?s ?p ?o }"); |
||||
assert.strictEqual(1, results.length); |
||||
assert(dataModel.quad(ex, ex, ex).equals(results[0])); |
||||
}); |
||||
|
||||
it("SELECT", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||
const results = store.query("SELECT ?s WHERE { ?s ?p ?o }"); |
||||
assert.strictEqual(1, results.length); |
||||
assert(ex.equals(results[0].get("s"))); |
||||
}); |
||||
|
||||
it("SELECT with NOW()", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||
const results = store.query( |
||||
"SELECT * WHERE { FILTER(2022 <= YEAR(NOW()) && YEAR(NOW()) <= 2100) }", |
||||
); |
||||
assert.strictEqual(1, results.length); |
||||
}); |
||||
|
||||
it("SELECT with RAND()", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||
const results = store.query("SELECT (RAND() AS ?y) WHERE {}"); |
||||
assert.strictEqual(1, results.length); |
||||
}); |
||||
|
||||
it("SELECT with base IRI", () => { |
||||
const store = new Store(); |
||||
const results = store.query("SELECT * WHERE { BIND(<t> AS ?t) }", { |
||||
base_iri: "http://example.com/", |
||||
}); |
||||
assert.strictEqual(1, results.length); |
||||
}); |
||||
|
||||
it("SELECT with union graph", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex, ex)]); |
||||
const results = store.query("SELECT * WHERE { ?s ?p ?o }", { |
||||
use_default_graph_as_union: true, |
||||
}); |
||||
assert.strictEqual(1, results.length); |
||||
}); |
||||
}); |
||||
|
||||
describe("#update()", () => { |
||||
it("INSERT DATA", () => { |
||||
const store = new Store(); |
||||
store.update( |
||||
"INSERT DATA { <http://example.com> <http://example.com> <http://example.com> }", |
||||
); |
||||
assert.strictEqual(1, store.size); |
||||
}); |
||||
|
||||
it("DELETE DATA", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||
store.update( |
||||
"DELETE DATA { <http://example.com> <http://example.com> <http://example.com> }", |
||||
); |
||||
assert.strictEqual(0, store.size); |
||||
}); |
||||
|
||||
it("DELETE WHERE", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex)]); |
||||
store.update("DELETE WHERE { ?v ?v ?v }"); |
||||
assert.strictEqual(0, store.size); |
||||
}); |
||||
}); |
||||
|
||||
describe("#load()", () => { |
||||
it("load NTriples in the default graph", () => { |
||||
const store = new Store(); |
||||
store.load( |
||||
"<http://example.com> <http://example.com> <http://example.com> .", |
||||
"application/n-triples", |
||||
); |
||||
assert(store.has(dataModel.quad(ex, ex, ex))); |
||||
}); |
||||
|
||||
it("load NTriples in an other graph", () => { |
||||
const store = new Store(); |
||||
store.load( |
||||
"<http://example.com> <http://example.com> <http://example.com> .", |
||||
"application/n-triples", |
||||
null, |
||||
ex, |
||||
); |
||||
assert(store.has(dataModel.quad(ex, ex, ex, ex))); |
||||
}); |
||||
|
||||
it("load Turtle with a base IRI", () => { |
||||
const store = new Store(); |
||||
store.load( |
||||
"<http://example.com> <http://example.com> <> .", |
||||
"text/turtle", |
||||
"http://example.com", |
||||
); |
||||
assert(store.has(dataModel.quad(ex, ex, ex))); |
||||
}); |
||||
|
||||
it("load NQuads", () => { |
||||
const store = new Store(); |
||||
store.load( |
||||
"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .", |
||||
"application/n-quads", |
||||
); |
||||
assert(store.has(dataModel.quad(ex, ex, ex, ex))); |
||||
}); |
||||
|
||||
it("load TriG with a base IRI", () => { |
||||
const store = new Store(); |
||||
store.load( |
||||
"GRAPH <> { <http://example.com> <http://example.com> <> }", |
||||
"application/trig", |
||||
"http://example.com", |
||||
); |
||||
assert(store.has(dataModel.quad(ex, ex, ex, ex))); |
||||
}); |
||||
}); |
||||
|
||||
describe("#dump()", () => { |
||||
it("dump dataset content", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex, ex)]); |
||||
assert.strictEqual( |
||||
"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .\n", |
||||
store.dump("application/n-quads"), |
||||
); |
||||
}); |
||||
|
||||
it("dump named graph content", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex, ex)]); |
||||
assert.strictEqual( |
||||
"<http://example.com> <http://example.com> <http://example.com> .\n", |
||||
store.dump("application/n-triples", ex), |
||||
); |
||||
}); |
||||
|
||||
it("dump default graph content", () => { |
||||
const store = new Store([dataModel.quad(ex, ex, ex, ex)]); |
||||
assert.strictEqual("", store.dump("application/n-triples", dataModel.defaultGraph())); |
||||
}); |
||||
}); |
||||
}); |
||||
|
@ -1,63 +0,0 @@ |
||||
[package] |
||||
name = "oxigraph" |
||||
version = "0.3.1" |
||||
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
||||
license = "MIT OR Apache-2.0" |
||||
readme = "README.md" |
||||
keywords = ["RDF", "SPARQL", "graph-database", "database"] |
||||
categories = ["database-implementations"] |
||||
repository = "https://github.com/oxigraph/oxigraph/tree/main/lib" |
||||
homepage = "https://oxigraph.org/" |
||||
description = """ |
||||
a SPARQL database and RDF toolkit |
||||
""" |
||||
edition = "2021" |
||||
|
||||
[package.metadata.docs.rs] |
||||
all-features = true |
||||
|
||||
[features] |
||||
default = [] |
||||
http_client = ["oxhttp", "oxhttp/rustls"] |
||||
|
||||
[dependencies] |
||||
rand = "0.8" |
||||
md-5 = "0.10" |
||||
sha-1 = "0.10" |
||||
sha2 = "0.10" |
||||
digest = "0.10" |
||||
regex = "1" |
||||
oxilangtag = "0.1" |
||||
oxiri = "0.2" |
||||
rio_api = "0.7" |
||||
rio_turtle = "0.7" |
||||
rio_xml = "0.7" |
||||
hex = "0.4" |
||||
nom = "7" |
||||
siphasher = "0.3" |
||||
lazy_static = "1" |
||||
sysinfo = "0.23" |
||||
oxrdf = { version = "0.1.0", path="oxrdf", features = ["rdf-star"] } |
||||
spargebra = { version = "0.2.0", path="spargebra", features = ["rdf-star"] } |
||||
sparesults = { version = "0.1.0", path="sparesults", features = ["rdf-star"] } |
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] |
||||
libc = "0.2" |
||||
oxrocksdb-sys = { version = "0.3.1", path="../oxrocksdb-sys" } |
||||
oxhttp = { version = "0.1", optional = true } |
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies] |
||||
js-sys = "0.3" |
||||
getrandom = {version="0.2", features=["js"]} |
||||
|
||||
[dev-dependencies] |
||||
criterion = "0.3" |
||||
oxhttp = "0.1" |
||||
zstd = "0.11" |
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dev-dependencies] |
||||
wasm-bindgen-test = "0.3" |
||||
|
||||
[[bench]] |
||||
name = "store" |
||||
harness = false |
@ -1,69 +1,13 @@ |
||||
Oxigraph |
||||
======== |
||||
|
||||
[](https://crates.io/crates/oxigraph) |
||||
[](https://docs.rs/oxigraph) |
||||
[](https://crates.io/crates/oxigraph) |
||||
[](https://github.com/oxigraph/oxigraph/actions) |
||||
[](https://gitter.im/oxigraph/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) |
||||
|
||||
Oxigraph is a graph database library implementing the [SPARQL](https://www.w3.org/TR/sparql11-overview/) standard. |
||||
|
||||
Its goal is to provide a compliant, safe and fast on-disk graph database. |
||||
It also provides a set of utility functions for reading, writing, and processing RDF files. |
||||
|
||||
Oxigraph is in heavy development and SPARQL query evaluation has not been optimized yet. |
||||
|
||||
Oxigraph also provides [a standalone HTTP server](https://crates.io/crates/oxigraph_server) and [a Python library](https://oxigraph.org/pyoxigraph/) based on this library. |
||||
|
||||
|
||||
Oxigraph implements the following specifications: |
||||
* [SPARQL 1.1 Query](https://www.w3.org/TR/sparql11-query/), [SPARQL 1.1 Update](https://www.w3.org/TR/sparql11-update/), and [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/). |
||||
* [Turtle](https://www.w3.org/TR/turtle/), [TriG](https://www.w3.org/TR/trig/), [N-Triples](https://www.w3.org/TR/n-triples/), [N-Quads](https://www.w3.org/TR/n-quads/), and [RDF XML](https://www.w3.org/TR/rdf-syntax-grammar/) RDF serialization formats for both data ingestion and retrieval using the [Rio library](https://github.com/oxigraph/rio). |
||||
* [SPARQL Query Results XML Format](http://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/). |
||||
|
||||
A preliminary benchmark [is provided](../bench/README.md). Oxigraph internal design [is described on the wiki](https://github.com/oxigraph/oxigraph/wiki/Architecture). |
||||
|
||||
The main entry point of Oxigraph is the [`Store`](store::Store) struct: |
||||
```rust |
||||
use oxigraph::store::Store; |
||||
use oxigraph::model::*; |
||||
use oxigraph::sparql::QueryResults; |
||||
|
||||
let store = Store::new().unwrap(); |
||||
|
||||
// insertion |
||||
let ex = NamedNode::new("http://example.com").unwrap(); |
||||
let quad = Quad::new(ex.clone(), ex.clone(), ex.clone(), GraphName::DefaultGraph); |
||||
store.insert(&quad).unwrap(); |
||||
|
||||
// quad filter |
||||
let results = store.quads_for_pattern(Some(ex.as_ref().into()), None, None, None).collect::<Result<Vec<Quad>,_>>().unwrap(); |
||||
assert_eq!(vec![quad], results); |
||||
|
||||
// SPARQL query |
||||
if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }").unwrap() { |
||||
assert_eq!(solutions.next().unwrap().unwrap().get("s"), Some(&ex.into())); |
||||
} |
||||
``` |
||||
|
||||
Some parts of this library are available as standalone crates: |
||||
* [`oxrdf`](https://crates.io/crates/oxrdf) provides datastructures encoding RDF basic concepts (the `oxigraph::model` module). |
||||
* [`spargebra`](https://crates.io/crates/spargebra) provides a SPARQL parser. |
||||
* [`sparesults`](https://crates.io/crates/sparesults) provides parsers and serializers for SPARQL result formats. |
||||
|
||||
## License |
||||
|
||||
This project is licensed under either of |
||||
|
||||
* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or |
||||
`<http://www.apache.org/licenses/LICENSE-2.0>`) |
||||
* MIT license ([LICENSE-MIT](../LICENSE-MIT) or |
||||
`<http://opensource.org/licenses/MIT>`) |
||||
|
||||
at your option. |
||||
|
||||
|
||||
### Contribution |
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. |
||||
Oxigraph Rust crates |
||||
==================== |
||||
|
||||
Oxigraph is implemented in Rust. |
||||
It is composed on a main library, [`oxigraph`](./oxigraph) and a set of smaller crates used by the `oxigraph` crate: |
||||
* [`oxrdf`](./oxrdf), datastructures encoding RDF basic concepts (the `model` module of the `oxigraph` crate). |
||||
* [`oxrdfio`](./oxrdfio), a unified parser and serializer API for RDF formats (the `io` module of the `oxigraph` crate). It itself relies on: |
||||
* [`oxttl`](./oxttl), N-Triple, N-Quad, Turtle, TriG and N3 parsing and serialization. |
||||
* [`oxrdfxml`](./oxrdfxml), RDF/XML parsing and serialization. |
||||
* [`spargebra`](./spargebra), a SPARQL parser. |
||||
* [`sparesults`](./sparesults), parsers and serializers for SPARQL result formats (the `sparql::results` module of the `oxigraph` crate). |
||||
* [`sparopt`](./sparesults), a SPARQL optimizer. |
||||
* [`oxsdatatypes`](./oxsdatatypes), an implementation of some XML Schema datatypes. |
||||
|
@ -1,208 +0,0 @@ |
||||
use criterion::{criterion_group, criterion_main, Criterion, Throughput}; |
||||
use oxhttp::model::{Method, Request, Status}; |
||||
use oxigraph::io::GraphFormat; |
||||
use oxigraph::model::GraphNameRef; |
||||
use oxigraph::sparql::{Query, QueryResults, Update}; |
||||
use oxigraph::store::Store; |
||||
use rand::random; |
||||
use std::env::temp_dir; |
||||
use std::fs::{remove_dir_all, File}; |
||||
use std::io::{BufRead, BufReader, Cursor, Read}; |
||||
use std::path::{Path, PathBuf}; |
||||
|
||||
fn store_load(c: &mut Criterion) { |
||||
{ |
||||
let mut data = Vec::new(); |
||||
read_data("explore-1000.nt.zst") |
||||
.read_to_end(&mut data) |
||||
.unwrap(); |
||||
|
||||
let mut group = c.benchmark_group("store load"); |
||||
group.throughput(Throughput::Bytes(data.len() as u64)); |
||||
group.sample_size(10); |
||||
group.bench_function("load BSBM explore 1000 in memory", |b| { |
||||
b.iter(|| { |
||||
let store = Store::new().unwrap(); |
||||
do_load(&store, &data); |
||||
}) |
||||
}); |
||||
group.bench_function("load BSBM explore 1000 in on disk", |b| { |
||||
b.iter(|| { |
||||
let path = TempDir::default(); |
||||
let store = Store::open(&path.0).unwrap(); |
||||
do_load(&store, &data); |
||||
}) |
||||
}); |
||||
group.bench_function("load BSBM explore 1000 in on disk with bulk load", |b| { |
||||
b.iter(|| { |
||||
let path = TempDir::default(); |
||||
let store = Store::open(&path.0).unwrap(); |
||||
do_bulk_load(&store, &data); |
||||
}) |
||||
}); |
||||
} |
||||
|
||||
{ |
||||
let mut data = Vec::new(); |
||||
read_data("explore-10000.nt.zst") |
||||
.read_to_end(&mut data) |
||||
.unwrap(); |
||||
|
||||
let mut group = c.benchmark_group("store load large"); |
||||
group.throughput(Throughput::Bytes(data.len() as u64)); |
||||
group.sample_size(10); |
||||
group.bench_function("load BSBM explore 10000 in on disk with bulk load", |b| { |
||||
b.iter(|| { |
||||
let path = TempDir::default(); |
||||
let store = Store::open(&path.0).unwrap(); |
||||
do_bulk_load(&store, &data); |
||||
}) |
||||
}); |
||||
} |
||||
} |
||||
|
||||
fn do_load(store: &Store, data: &[u8]) { |
||||
store |
||||
.load_graph( |
||||
Cursor::new(&data), |
||||
GraphFormat::NTriples, |
||||
GraphNameRef::DefaultGraph, |
||||
None, |
||||
) |
||||
.unwrap(); |
||||
store.optimize().unwrap(); |
||||
} |
||||
|
||||
fn do_bulk_load(store: &Store, data: &[u8]) { |
||||
store |
||||
.bulk_loader() |
||||
.load_graph( |
||||
Cursor::new(&data), |
||||
GraphFormat::NTriples, |
||||
GraphNameRef::DefaultGraph, |
||||
None, |
||||
) |
||||
.unwrap(); |
||||
store.optimize().unwrap(); |
||||
} |
||||
|
||||
fn store_query_and_update(c: &mut Criterion) { |
||||
let mut data = Vec::new(); |
||||
read_data("explore-1000.nt.zst") |
||||
.read_to_end(&mut data) |
||||
.unwrap(); |
||||
|
||||
let operations = read_data("mix-exploreAndUpdate-1000.tsv.zst") |
||||
.lines() |
||||
.map(|l| { |
||||
let l = l.unwrap(); |
||||
let mut parts = l.trim().split('\t'); |
||||
let kind = parts.next().unwrap(); |
||||
let operation = parts.next().unwrap(); |
||||
match kind { |
||||
"query" => Operation::Query(Query::parse(operation, None).unwrap()), |
||||
"update" => Operation::Update(Update::parse(operation, None).unwrap()), |
||||
_ => panic!("Unexpected operation kind {}", kind), |
||||
} |
||||
}) |
||||
.collect::<Vec<_>>(); |
||||
let query_operations = operations |
||||
.iter() |
||||
.filter(|o| matches!(o, Operation::Query(_))) |
||||
.cloned() |
||||
.collect::<Vec<_>>(); |
||||
|
||||
let mut group = c.benchmark_group("store operations"); |
||||
group.throughput(Throughput::Elements(operations.len() as u64)); |
||||
group.sample_size(10); |
||||
|
||||
{ |
||||
let memory_store = Store::new().unwrap(); |
||||
do_bulk_load(&memory_store, &data); |
||||
group.bench_function("BSBM explore 1000 query in memory", |b| { |
||||
b.iter(|| run_operation(&memory_store, &query_operations)) |
||||
}); |
||||
group.bench_function("BSBM explore 1000 queryAndUpdate in memory", |b| { |
||||
b.iter(|| run_operation(&memory_store, &operations)) |
||||
}); |
||||
} |
||||
|
||||
{ |
||||
let path = TempDir::default(); |
||||
let disk_store = Store::open(&path.0).unwrap(); |
||||
do_bulk_load(&disk_store, &data); |
||||
group.bench_function("BSBM explore 1000 query on disk", |b| { |
||||
b.iter(|| run_operation(&disk_store, &query_operations)) |
||||
}); |
||||
group.bench_function("BSBM explore 1000 queryAndUpdate on disk", |b| { |
||||
b.iter(|| run_operation(&disk_store, &operations)) |
||||
}); |
||||
} |
||||
} |
||||
|
||||
fn run_operation(store: &Store, operations: &[Operation]) { |
||||
for operation in operations { |
||||
match operation { |
||||
Operation::Query(q) => match store.query(q.clone()).unwrap() { |
||||
QueryResults::Boolean(_) => (), |
||||
QueryResults::Solutions(s) => { |
||||
for s in s { |
||||
s.unwrap(); |
||||
} |
||||
} |
||||
QueryResults::Graph(g) => { |
||||
for t in g { |
||||
t.unwrap(); |
||||
} |
||||
} |
||||
}, |
||||
Operation::Update(u) => store.update(u.clone()).unwrap(), |
||||
} |
||||
} |
||||
} |
||||
|
||||
criterion_group!(store, store_query_and_update, store_load); |
||||
|
||||
criterion_main!(store); |
||||
|
||||
fn read_data(file: &str) -> impl BufRead { |
||||
if !Path::new(file).exists() { |
||||
let mut client = oxhttp::Client::new(); |
||||
client.set_redirection_limit(5); |
||||
let url = format!( |
||||
"https://github.com/Tpt/bsbm-tools/releases/download/v0.2/{}", |
||||
file |
||||
); |
||||
let request = Request::builder(Method::GET, url.parse().unwrap()).build(); |
||||
let response = client.request(request).unwrap(); |
||||
assert_eq!( |
||||
response.status(), |
||||
Status::OK, |
||||
"{}", |
||||
response.into_body().to_string().unwrap() |
||||
); |
||||
std::io::copy(&mut response.into_body(), &mut File::create(file).unwrap()).unwrap(); |
||||
} |
||||
BufReader::new(zstd::Decoder::new(File::open(file).unwrap()).unwrap()) |
||||
} |
||||
|
||||
#[allow(clippy::large_enum_variant)] |
||||
#[derive(Clone)] |
||||
enum Operation { |
||||
Query(Query), |
||||
Update(Update), |
||||
} |
||||
|
||||
struct TempDir(PathBuf); |
||||
|
||||
impl Default for TempDir { |
||||
fn default() -> Self { |
||||
Self(temp_dir().join(format!("oxigraph-bench-{}", random::<u128>()))) |
||||
} |
||||
} |
||||
|
||||
impl Drop for TempDir { |
||||
fn drop(&mut self) { |
||||
remove_dir_all(&self.0).unwrap() |
||||
} |
||||
} |
@ -0,0 +1,59 @@ |
||||
[package] |
||||
name = "oxigraph" |
||||
version.workspace = true |
||||
authors.workspace = true |
||||
license.workspace = true |
||||
readme = "README.md" |
||||
keywords = ["RDF", "SPARQL", "graph-database", "database"] |
||||
categories = ["database-implementations"] |
||||
repository = "https://github.com/oxigraph/oxigraph/tree/main/lib/oxigraph" |
||||
homepage = "https://oxigraph.org/" |
||||
documentation = "https://docs.rs/oxigraph" |
||||
description = """ |
||||
a SPARQL database and RDF toolkit |
||||
""" |
||||
edition.workspace = true |
||||
rust-version.workspace = true |
||||
|
||||
[features] |
||||
js = ["getrandom/js", "oxsdatatypes/js", "js-sys"] |
||||
|
||||
|
||||
[dependencies] |
||||
digest.workspace = true |
||||
hex.workspace = true |
||||
json-event-parser.workspace = true |
||||
md-5.workspace = true |
||||
oxilangtag.workspace = true |
||||
oxiri.workspace = true |
||||
oxrdf = { workspace = true, features = ["rdf-star", "oxsdatatypes"] } |
||||
oxrdfio = { workspace = true, features = ["rdf-star"] } |
||||
oxsdatatypes.workspace = true |
||||
rand.workspace = true |
||||
regex.workspace = true |
||||
sha1.workspace = true |
||||
sha2.workspace = true |
||||
siphasher.workspace = true |
||||
sparesults = { workspace = true, features = ["rdf-star"] } |
||||
spargebra = { workspace = true, features = ["rdf-star", "sep-0002", "sep-0006"] } |
||||
sparopt = { workspace = true, features = ["rdf-star", "sep-0002", "sep-0006"] } |
||||
thiserror.workspace = true |
||||
|
||||
[target.'cfg(not(target_family = "wasm"))'.dependencies] |
||||
libc = "0.2" |
||||
rocksdb.workspace = true |
||||
|
||||
[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies] |
||||
getrandom.workspace = true |
||||
js-sys = { workspace = true, optional = true } |
||||
|
||||
[target.'cfg(not(target_family = "wasm"))'.dev-dependencies] |
||||
codspeed-criterion-compat.workspace = true |
||||
zstd.workspace = true |
||||
|
||||
[lints] |
||||
workspace = true |
||||
|
||||
[package.metadata.docs.rs] |
||||
rustdoc-args = ["--cfg", "docsrs"] |
||||
|
@ -0,0 +1,82 @@ |
||||
Oxigraph |
||||
======== |
||||
|
||||
[](https://crates.io/crates/oxigraph) |
||||
[](https://docs.rs/oxigraph) |
||||
[](https://crates.io/crates/oxigraph) |
||||
[](https://github.com/oxigraph/oxigraph/actions) |
||||
[](https://gitter.im/oxigraph/community) |
||||
|
||||
Oxigraph is a graph database library implementing the [SPARQL](https://www.w3.org/TR/sparql11-overview/) standard. |
||||
|
||||
Its goal is to provide a compliant, safe and fast on-disk graph database. |
||||
It also provides a set of utility functions for reading, writing, and processing RDF files. |
||||
|
||||
Oxigraph is in heavy development and SPARQL query evaluation has not been optimized yet. |
||||
|
||||
Oxigraph also provides [a CLI tool](https://crates.io/crates/oxigraph-cli) and [a Python library](https://pyoxigraph.readthedocs.io/) based on this library. |
||||
|
||||
|
||||
Oxigraph implements the following specifications: |
||||
* [SPARQL 1.1 Query](https://www.w3.org/TR/sparql11-query/), [SPARQL 1.1 Update](https://www.w3.org/TR/sparql11-update/), and [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/). |
||||
* [Turtle](https://www.w3.org/TR/turtle/), [TriG](https://www.w3.org/TR/trig/), [N-Triples](https://www.w3.org/TR/n-triples/), [N-Quads](https://www.w3.org/TR/n-quads/), and [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) RDF serialization formats for both data ingestion and retrieval. |
||||
* [SPARQL Query Results XML Format](https://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/). |
||||
|
||||
A preliminary benchmark [is provided](../bench/README.md). Oxigraph internal design [is described on the wiki](https://github.com/oxigraph/oxigraph/wiki/Architecture). |
||||
|
||||
The main entry point of Oxigraph is the [`Store`](store::Store) struct: |
||||
```rust |
||||
use oxigraph::store::Store; |
||||
use oxigraph::model::*; |
||||
use oxigraph::sparql::QueryResults; |
||||
|
||||
let store = Store::new().unwrap(); |
||||
|
||||
// insertion |
||||
let ex = NamedNode::new("http://example.com").unwrap(); |
||||
let quad = Quad::new(ex.clone(), ex.clone(), ex.clone(), GraphName::DefaultGraph); |
||||
store.insert(&quad).unwrap(); |
||||
|
||||
// quad filter |
||||
let results = store.quads_for_pattern(Some(ex.as_ref().into()), None, None, None).collect::<Result<Vec<Quad>,_>>().unwrap(); |
||||
assert_eq!(vec![quad], results); |
||||
|
||||
// SPARQL query |
||||
if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }").unwrap() { |
||||
assert_eq!(solutions.next().unwrap().unwrap().get("s"), Some(&ex.into())); |
||||
} |
||||
``` |
||||
|
||||
It is based on these crates that can be used separately: |
||||
* [`oxrdf`](https://crates.io/crates/oxrdf), datastructures encoding RDF basic concepts (the [`oxigraph::model`](crate::model) module). |
||||
* [`oxrdfio`](https://crates.io/crates/oxrdfio), a unified parser and serializer API for RDF formats (the [`oxigraph::io`](crate::io) module). It itself relies on: |
||||
* [`oxttl`](https://crates.io/crates/oxttl), N-Triple, N-Quad, Turtle, TriG and N3 parsing and serialization. |
||||
* [`oxrdfxml`](https://crates.io/crates/oxrdfxml), RDF/XML parsing and serialization. |
||||
* [`spargebra`](https://crates.io/crates/spargebra), a SPARQL parser. |
||||
* [`sparesults`](https://crates.io/crates/sparesults), parsers and serializers for SPARQL result formats (the [`oxigraph::sparql::results`](crate::sparql::results) module). |
||||
* [`sparopt`](https://crates.io/crates/sparesults), a SPARQL optimizer. |
||||
* [`oxsdatatypes`](https://crates.io/crates/oxsdatatypes), an implementation of some XML Schema datatypes. |
||||
|
||||
To build the library locally, don't forget to clone the submodules using `git clone --recursive https://github.com/oxigraph/oxigraph.git` to clone the repository including submodules or `git submodule update --init` to add submodules to the already cloned repository. |
||||
|
||||
It is possible to disable the RocksDB storage backend to only use the in-memory fallback by disabling the `rocksdb` default feature: |
||||
```toml |
||||
oxigraph = { version = "*", default-features = false } |
||||
``` |
||||
This is the default behavior when compiling Oxigraph to WASM. |
||||
|
||||
## License |
||||
|
||||
This project is licensed under either of |
||||
|
||||
* Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or |
||||
`<http://www.apache.org/licenses/LICENSE-2.0>`) |
||||
* MIT license ([LICENSE-MIT](../LICENSE-MIT) or |
||||
`<http://opensource.org/licenses/MIT>`) |
||||
|
||||
at your option. |
||||
|
||||
|
||||
### Contribution |
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. |
@ -0,0 +1,39 @@ |
||||
//! Utilities to read and write RDF graphs and datasets using [OxRDF I/O](https://crates.io/crates/oxrdfio).
|
||||
//!
|
||||
//! The entry points of this module are the two [`RdfParser`] and [`RdfSerializer`] structs.
|
||||
//!
|
||||
//! Usage example converting a Turtle file to a N-Triples file:
|
||||
//! ```
|
||||
//! use oxigraph::io::{RdfFormat, RdfParser, RdfSerializer};
|
||||
//!
|
||||
//! let turtle_file = b"@base <http://example.com/> .
|
||||
//! @prefix schema: <http://schema.org/> .
|
||||
//! <foo> a schema:Person ;
|
||||
//! schema:name \"Foo\" .
|
||||
//! <bar> a schema:Person ;
|
||||
//! schema:name \"Bar\" .";
|
||||
//!
|
||||
//! let ntriples_file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
|
||||
//! <http://example.com/foo> <http://schema.org/name> \"Foo\" .
|
||||
//! <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
|
||||
//! <http://example.com/bar> <http://schema.org/name> \"Bar\" .
|
||||
//! ";
|
||||
//!
|
||||
//! let mut writer = RdfSerializer::from_format(RdfFormat::NTriples).serialize_to_write(Vec::new());
|
||||
//! for quad in RdfParser::from_format(RdfFormat::Turtle).parse_read(turtle_file.as_ref()) {
|
||||
//! writer.write_quad(&quad.unwrap()).unwrap();
|
||||
//! }
|
||||
//! assert_eq!(writer.finish().unwrap(), ntriples_file);
|
||||
//! ```
|
||||
|
||||
mod format; |
||||
pub mod read; |
||||
pub mod write; |
||||
|
||||
#[allow(deprecated)] |
||||
pub use self::format::{DatasetFormat, GraphFormat}; |
||||
#[allow(deprecated)] |
||||
pub use self::read::{DatasetParser, GraphParser}; |
||||
#[allow(deprecated)] |
||||
pub use self::write::{DatasetSerializer, GraphSerializer}; |
||||
pub use oxrdfio::*; |
@ -0,0 +1,199 @@ |
||||
#![allow(deprecated)] |
||||
|
||||
//! Utilities to read RDF graphs and datasets.
|
||||
|
||||
use crate::io::{DatasetFormat, GraphFormat}; |
||||
use crate::model::*; |
||||
use oxrdfio::{FromReadQuadReader, RdfParseError, RdfParser}; |
||||
use std::io::Read; |
||||
|
||||
/// Parsers for RDF graph serialization formats.
|
||||
///
|
||||
/// It currently supports the following formats:
|
||||
/// * [N-Triples](https://www.w3.org/TR/n-triples/) ([`GraphFormat::NTriples`])
|
||||
/// * [Turtle](https://www.w3.org/TR/turtle/) ([`GraphFormat::Turtle`])
|
||||
/// * [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) ([`GraphFormat::RdfXml`])
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::{GraphFormat, GraphParser};
|
||||
///
|
||||
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
|
||||
///
|
||||
/// let parser = GraphParser::from_format(GraphFormat::NTriples);
|
||||
/// let triples = parser
|
||||
/// .read_triples(file.as_bytes())
|
||||
/// .collect::<Result<Vec<_>, _>>()?;
|
||||
///
|
||||
/// assert_eq!(triples.len(), 1);
|
||||
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||
/// # std::io::Result::Ok(())
|
||||
/// ```
|
||||
#[deprecated(note = "use RdfParser instead", since = "0.4.0")] |
||||
pub struct GraphParser { |
||||
inner: RdfParser, |
||||
} |
||||
|
||||
impl GraphParser { |
||||
/// Builds a parser for the given format.
|
||||
#[inline] |
||||
pub fn from_format(format: GraphFormat) -> Self { |
||||
Self { |
||||
inner: RdfParser::from_format(format.into()) |
||||
.without_named_graphs() |
||||
.rename_blank_nodes(), |
||||
} |
||||
} |
||||
|
||||
/// Provides an IRI that could be used to resolve the file relative IRIs.
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::{GraphFormat, GraphParser};
|
||||
///
|
||||
/// let file = "</s> </p> </o> .";
|
||||
///
|
||||
/// let parser =
|
||||
/// GraphParser::from_format(GraphFormat::Turtle).with_base_iri("http://example.com")?;
|
||||
/// let triples = parser
|
||||
/// .read_triples(file.as_bytes())
|
||||
/// .collect::<Result<Vec<_>, _>>()?;
|
||||
///
|
||||
/// assert_eq!(triples.len(), 1);
|
||||
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
#[inline] |
||||
pub fn with_base_iri(self, base_iri: impl Into<String>) -> Result<Self, IriParseError> { |
||||
Ok(Self { |
||||
inner: self.inner.with_base_iri(base_iri)?, |
||||
}) |
||||
} |
||||
|
||||
/// Executes the parsing itself on a [`Read`] implementation and returns an iterator of triples.
|
||||
pub fn read_triples<R: Read>(self, reader: R) -> TripleReader<R> { |
||||
TripleReader { |
||||
parser: self.inner.parse_read(reader), |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// An iterator yielding read triples.
|
||||
/// Could be built using a [`GraphParser`].
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::{GraphFormat, GraphParser};
|
||||
///
|
||||
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
|
||||
///
|
||||
/// let parser = GraphParser::from_format(GraphFormat::NTriples);
|
||||
/// let triples = parser
|
||||
/// .read_triples(file.as_bytes())
|
||||
/// .collect::<Result<Vec<_>, _>>()?;
|
||||
///
|
||||
/// assert_eq!(triples.len(), 1);
|
||||
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||
/// # std::io::Result::Ok(())
|
||||
/// ```
|
||||
#[must_use] |
||||
pub struct TripleReader<R: Read> { |
||||
parser: FromReadQuadReader<R>, |
||||
} |
||||
|
||||
impl<R: Read> Iterator for TripleReader<R> { |
||||
type Item = Result<Triple, RdfParseError>; |
||||
|
||||
fn next(&mut self) -> Option<Self::Item> { |
||||
Some(self.parser.next()?.map(Into::into).map_err(Into::into)) |
||||
} |
||||
} |
||||
|
||||
/// A parser for RDF dataset serialization formats.
|
||||
///
|
||||
/// It currently supports the following formats:
|
||||
/// * [N-Quads](https://www.w3.org/TR/n-quads/) ([`DatasetFormat::NQuads`])
|
||||
/// * [TriG](https://www.w3.org/TR/trig/) ([`DatasetFormat::TriG`])
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::{DatasetFormat, DatasetParser};
|
||||
///
|
||||
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .";
|
||||
///
|
||||
/// let parser = DatasetParser::from_format(DatasetFormat::NQuads);
|
||||
/// let quads = parser.read_quads(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
|
||||
///
|
||||
/// assert_eq!(quads.len(), 1);
|
||||
/// assert_eq!(quads[0].subject.to_string(), "<http://example.com/s>");
|
||||
/// # std::io::Result::Ok(())
|
||||
/// ```
|
||||
#[deprecated(note = "use RdfParser instead", since = "0.4.0")] |
||||
pub struct DatasetParser { |
||||
inner: RdfParser, |
||||
} |
||||
|
||||
impl DatasetParser { |
||||
/// Builds a parser for the given format.
|
||||
#[inline] |
||||
pub fn from_format(format: DatasetFormat) -> Self { |
||||
Self { |
||||
inner: RdfParser::from_format(format.into()).rename_blank_nodes(), |
||||
} |
||||
} |
||||
|
||||
/// Provides an IRI that could be used to resolve the file relative IRIs.
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::{DatasetFormat, DatasetParser};
|
||||
///
|
||||
/// let file = "<g> { </s> </p> </o> }";
|
||||
///
|
||||
/// let parser =
|
||||
/// DatasetParser::from_format(DatasetFormat::TriG).with_base_iri("http://example.com")?;
|
||||
/// let triples = parser
|
||||
/// .read_quads(file.as_bytes())
|
||||
/// .collect::<Result<Vec<_>, _>>()?;
|
||||
///
|
||||
/// assert_eq!(triples.len(), 1);
|
||||
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
#[inline] |
||||
pub fn with_base_iri(self, base_iri: impl Into<String>) -> Result<Self, IriParseError> { |
||||
Ok(Self { |
||||
inner: self.inner.with_base_iri(base_iri)?, |
||||
}) |
||||
} |
||||
|
||||
/// Executes the parsing itself on a [`Read`] implementation and returns an iterator of quads.
|
||||
pub fn read_quads<R: Read>(self, reader: R) -> QuadReader<R> { |
||||
QuadReader { |
||||
parser: self.inner.parse_read(reader), |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// An iterator yielding read quads.
|
||||
/// Could be built using a [`DatasetParser`].
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::{DatasetFormat, DatasetParser};
|
||||
///
|
||||
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .";
|
||||
///
|
||||
/// let parser = DatasetParser::from_format(DatasetFormat::NQuads);
|
||||
/// let quads = parser.read_quads(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
|
||||
///
|
||||
/// assert_eq!(quads.len(), 1);
|
||||
/// assert_eq!(quads[0].subject.to_string(), "<http://example.com/s>");
|
||||
/// # std::io::Result::Ok(())
|
||||
/// ```
|
||||
#[must_use] |
||||
pub struct QuadReader<R: Read> { |
||||
parser: FromReadQuadReader<R>, |
||||
} |
||||
|
||||
impl<R: Read> Iterator for QuadReader<R> { |
||||
type Item = Result<Quad, RdfParseError>; |
||||
|
||||
fn next(&mut self) -> Option<Self::Item> { |
||||
Some(self.parser.next()?.map_err(Into::into)) |
||||
} |
||||
} |
@ -0,0 +1,185 @@ |
||||
#![allow(deprecated)] |
||||
|
||||
//! Utilities to write RDF graphs and datasets.
|
||||
|
||||
use crate::io::{DatasetFormat, GraphFormat}; |
||||
use crate::model::*; |
||||
use oxrdfio::{RdfSerializer, ToWriteQuadWriter}; |
||||
use std::io::{self, Write}; |
||||
|
||||
/// A serializer for RDF graph serialization formats.
|
||||
///
|
||||
/// It currently supports the following formats:
|
||||
/// * [N-Triples](https://www.w3.org/TR/n-triples/) ([`GraphFormat::NTriples`])
|
||||
/// * [Turtle](https://www.w3.org/TR/turtle/) ([`GraphFormat::Turtle`])
|
||||
/// * [RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/) ([`GraphFormat::RdfXml`])
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::{GraphFormat, GraphSerializer};
|
||||
/// use oxigraph::model::*;
|
||||
///
|
||||
/// let mut buffer = Vec::new();
|
||||
/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer);
|
||||
/// writer.write(&Triple {
|
||||
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||
/// })?;
|
||||
/// writer.finish()?;
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// buffer.as_slice(),
|
||||
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n".as_bytes()
|
||||
/// );
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
#[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] |
||||
pub struct GraphSerializer { |
||||
inner: RdfSerializer, |
||||
} |
||||
|
||||
impl GraphSerializer { |
||||
/// Builds a serializer for the given format
|
||||
#[inline] |
||||
pub fn from_format(format: GraphFormat) -> Self { |
||||
Self { |
||||
inner: RdfSerializer::from_format(format.into()), |
||||
} |
||||
} |
||||
|
||||
/// Returns a [`TripleWriter`] allowing writing triples into the given [`Write`] implementation
|
||||
pub fn triple_writer<W: Write>(self, write: W) -> TripleWriter<W> { |
||||
TripleWriter { |
||||
writer: self.inner.serialize_to_write(write), |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// Allows writing triples.
|
||||
/// Could be built using a [`GraphSerializer`].
|
||||
///
|
||||
/// <div class="warning">
|
||||
///
|
||||
/// Do not forget to run the [`finish`](TripleWriter::finish()) method to properly write the last bytes of the file.</div>
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::{GraphFormat, GraphSerializer};
|
||||
/// use oxigraph::model::*;
|
||||
///
|
||||
/// let mut buffer = Vec::new();
|
||||
/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer);
|
||||
/// writer.write(&Triple {
|
||||
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||
/// })?;
|
||||
/// writer.finish()?;
|
||||
///
|
||||
/// assert_eq!(
|
||||
/// buffer.as_slice(),
|
||||
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n".as_bytes()
|
||||
/// );
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
#[must_use] |
||||
pub struct TripleWriter<W: Write> { |
||||
writer: ToWriteQuadWriter<W>, |
||||
} |
||||
|
||||
impl<W: Write> TripleWriter<W> { |
||||
/// Writes a triple
|
||||
pub fn write<'a>(&mut self, triple: impl Into<TripleRef<'a>>) -> io::Result<()> { |
||||
self.writer.write_triple(triple) |
||||
} |
||||
|
||||
/// Writes the last bytes of the file
|
||||
pub fn finish(self) -> io::Result<()> { |
||||
self.writer.finish()?.flush() |
||||
} |
||||
} |
||||
|
||||
/// A serializer for RDF graph serialization formats.
|
||||
///
|
||||
/// It currently supports the following formats:
|
||||
/// * [N-Quads](https://www.w3.org/TR/n-quads/) ([`DatasetFormat::NQuads`])
|
||||
/// * [TriG](https://www.w3.org/TR/trig/) ([`DatasetFormat::TriG`])
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::{DatasetFormat, DatasetSerializer};
|
||||
/// use oxigraph::model::*;
|
||||
///
|
||||
/// let mut buffer = Vec::new();
|
||||
/// let mut writer = DatasetSerializer::from_format(DatasetFormat::NQuads).quad_writer(&mut buffer);
|
||||
/// writer.write(&Quad {
|
||||
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||
/// graph_name: NamedNode::new("http://example.com/g")?.into(),
|
||||
/// })?;
|
||||
/// writer.finish()?;
|
||||
///
|
||||
/// assert_eq!(buffer.as_slice(), "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .\n".as_bytes());
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
#[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] |
||||
pub struct DatasetSerializer { |
||||
inner: RdfSerializer, |
||||
} |
||||
|
||||
impl DatasetSerializer { |
||||
/// Builds a serializer for the given format
|
||||
#[inline] |
||||
pub fn from_format(format: DatasetFormat) -> Self { |
||||
Self { |
||||
inner: RdfSerializer::from_format(format.into()), |
||||
} |
||||
} |
||||
|
||||
/// Returns a [`QuadWriter`] allowing writing triples into the given [`Write`] implementation
|
||||
pub fn quad_writer<W: Write>(self, write: W) -> QuadWriter<W> { |
||||
QuadWriter { |
||||
writer: self.inner.serialize_to_write(write), |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// Allows writing triples.
|
||||
/// Could be built using a [`DatasetSerializer`].
|
||||
///
|
||||
/// <div class="warning">
|
||||
///
|
||||
/// Do not forget to run the [`finish`](QuadWriter::finish()) method to properly write the last bytes of the file.</div>
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::{DatasetFormat, DatasetSerializer};
|
||||
/// use oxigraph::model::*;
|
||||
///
|
||||
/// let mut buffer = Vec::new();
|
||||
/// let mut writer = DatasetSerializer::from_format(DatasetFormat::NQuads).quad_writer(&mut buffer);
|
||||
/// writer.write(&Quad {
|
||||
/// subject: NamedNode::new("http://example.com/s")?.into(),
|
||||
/// predicate: NamedNode::new("http://example.com/p")?,
|
||||
/// object: NamedNode::new("http://example.com/o")?.into(),
|
||||
/// graph_name: NamedNode::new("http://example.com/g")?.into(),
|
||||
/// })?;
|
||||
/// writer.finish()?;
|
||||
///
|
||||
/// assert_eq!(buffer.as_slice(), "<http://example.com/s> <http://example.com/p> <http://example.com/o> <http://example.com/g> .\n".as_bytes());
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
#[must_use] |
||||
pub struct QuadWriter<W: Write> { |
||||
writer: ToWriteQuadWriter<W>, |
||||
} |
||||
|
||||
impl<W: Write> QuadWriter<W> { |
||||
/// Writes a quad
|
||||
pub fn write<'a>(&mut self, quad: impl Into<QuadRef<'a>>) -> io::Result<()> { |
||||
self.writer.write_quad(quad) |
||||
} |
||||
|
||||
/// Writes the last bytes of the file
|
||||
pub fn finish(self) -> io::Result<()> { |
||||
self.writer.finish()?.flush() |
||||
} |
||||
} |
@ -0,0 +1,12 @@ |
||||
#![doc = include_str!("../README.md")] |
||||
#![doc(test(attr(deny(warnings))))] |
||||
#![doc(test(attr(allow(deprecated))))] |
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))] |
||||
#![doc(html_favicon_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")] |
||||
#![doc(html_logo_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")] |
||||
|
||||
pub mod io; |
||||
pub mod model; |
||||
pub mod sparql; |
||||
mod storage; |
||||
pub mod store; |
@ -0,0 +1,22 @@ |
||||
//! Implements data structures for [RDF 1.1 Concepts](https://www.w3.org/TR/rdf11-concepts/) using [OxRDF](https://crates.io/crates/oxrdf).
|
||||
//!
|
||||
//! Usage example:
|
||||
//!
|
||||
//! ```
|
||||
//! use oxigraph::model::*;
|
||||
//!
|
||||
//! let mut graph = Graph::default();
|
||||
//!
|
||||
//! // insertion
|
||||
//! let ex = NamedNodeRef::new("http://example.com").unwrap();
|
||||
//! let triple = TripleRef::new(ex, ex, ex);
|
||||
//! graph.insert(triple);
|
||||
//!
|
||||
//! // simple filter
|
||||
//! let results: Vec<_> = graph.triples_for_subject(ex).collect();
|
||||
//! assert_eq!(vec![triple], results);
|
||||
//! ```
|
||||
|
||||
pub use oxrdf::*; |
||||
|
||||
pub use spargebra::term::GroundQuad; |
@ -0,0 +1,84 @@ |
||||
use crate::io::RdfParseError; |
||||
use crate::model::NamedNode; |
||||
use crate::sparql::results::QueryResultsParseError as ResultsParseError; |
||||
use crate::sparql::SparqlSyntaxError; |
||||
use crate::storage::StorageError; |
||||
use std::convert::Infallible; |
||||
use std::error::Error; |
||||
use std::io; |
||||
|
||||
/// A SPARQL evaluation error.
|
||||
#[derive(Debug, thiserror::Error)] |
||||
#[non_exhaustive] |
||||
pub enum EvaluationError { |
||||
/// An error in SPARQL parsing.
|
||||
#[error(transparent)] |
||||
Parsing(#[from] SparqlSyntaxError), |
||||
/// An error from the storage.
|
||||
#[error(transparent)] |
||||
Storage(#[from] StorageError), |
||||
/// An error while parsing an external RDF file.
|
||||
#[error(transparent)] |
||||
GraphParsing(#[from] RdfParseError), |
||||
/// An error while parsing an external result file (likely from a federated query).
|
||||
#[error(transparent)] |
||||
ResultsParsing(#[from] ResultsParseError), |
||||
/// An error returned during results serialization.
|
||||
#[error(transparent)] |
||||
ResultsSerialization(#[from] io::Error), |
||||
/// Error during `SERVICE` evaluation
|
||||
#[error("{0}")] |
||||
Service(#[source] Box<dyn Error + Send + Sync + 'static>), |
||||
/// Error when `CREATE` tries to create an already existing graph
|
||||
#[error("The graph {0} already exists")] |
||||
GraphAlreadyExists(NamedNode), |
||||
/// Error when `DROP` or `CLEAR` tries to remove a not existing graph
|
||||
#[error("The graph {0} does not exist")] |
||||
GraphDoesNotExist(NamedNode), |
||||
/// The variable storing the `SERVICE` name is unbound
|
||||
#[error("The variable encoding the service name is unbound")] |
||||
UnboundService, |
||||
/// The given `SERVICE` is not supported
|
||||
#[error("The service {0} is not supported")] |
||||
UnsupportedService(NamedNode), |
||||
/// The given content media type returned from an HTTP response is not supported (`SERVICE` and `LOAD`)
|
||||
#[error("The content media type {0} is not supported")] |
||||
UnsupportedContentType(String), |
||||
/// The `SERVICE` call has not returns solutions
|
||||
#[error("The service is not returning solutions but a boolean or a graph")] |
||||
ServiceDoesNotReturnSolutions, |
||||
/// The results are not a RDF graph
|
||||
#[error("The query results are not a RDF graph")] |
||||
NotAGraph, |
||||
} |
||||
|
||||
impl From<Infallible> for EvaluationError { |
||||
#[inline] |
||||
fn from(error: Infallible) -> Self { |
||||
match error {} |
||||
} |
||||
} |
||||
|
||||
impl From<EvaluationError> for io::Error { |
||||
#[inline] |
||||
fn from(error: EvaluationError) -> Self { |
||||
match error { |
||||
EvaluationError::Parsing(error) => Self::new(io::ErrorKind::InvalidData, error), |
||||
EvaluationError::GraphParsing(error) => error.into(), |
||||
EvaluationError::ResultsParsing(error) => error.into(), |
||||
EvaluationError::ResultsSerialization(error) => error, |
||||
EvaluationError::Storage(error) => error.into(), |
||||
EvaluationError::Service(error) => match error.downcast() { |
||||
Ok(error) => *error, |
||||
Err(error) => Self::new(io::ErrorKind::Other, error), |
||||
}, |
||||
EvaluationError::GraphAlreadyExists(_) |
||||
| EvaluationError::GraphDoesNotExist(_) |
||||
| EvaluationError::UnboundService |
||||
| EvaluationError::UnsupportedService(_) |
||||
| EvaluationError::UnsupportedContentType(_) |
||||
| EvaluationError::ServiceDoesNotReturnSolutions |
||||
| EvaluationError::NotAGraph => Self::new(io::ErrorKind::InvalidInput, error), |
||||
} |
||||
} |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,9 @@ |
||||
#[cfg(not(feature = "http-client"))] |
||||
mod dummy; |
||||
#[cfg(feature = "http-client")] |
||||
mod simple; |
||||
|
||||
#[cfg(not(feature = "http-client"))] |
||||
pub use dummy::Client; |
||||
#[cfg(feature = "http-client")] |
||||
pub use simple::Client; |
@ -0,0 +1,328 @@ |
||||
//! [SPARQL](https://www.w3.org/TR/sparql11-overview/) implementation.
|
||||
//!
|
||||
//! Stores execute SPARQL. See [`Store`](crate::store::Store::query()) for an example.
|
||||
|
||||
mod algebra; |
||||
mod dataset; |
||||
mod error; |
||||
mod eval; |
||||
mod http; |
||||
mod model; |
||||
pub mod results; |
||||
mod service; |
||||
mod update; |
||||
|
||||
use crate::model::{NamedNode, Term}; |
||||
pub use crate::sparql::algebra::{Query, QueryDataset, Update}; |
||||
use crate::sparql::dataset::DatasetView; |
||||
pub use crate::sparql::error::EvaluationError; |
||||
use crate::sparql::eval::{EvalNodeWithStats, SimpleEvaluator, Timer}; |
||||
pub use crate::sparql::model::{QueryResults, QuerySolution, QuerySolutionIter, QueryTripleIter}; |
||||
pub use crate::sparql::service::ServiceHandler; |
||||
use crate::sparql::service::{EmptyServiceHandler, ErrorConversionServiceHandler}; |
||||
pub(crate) use crate::sparql::update::evaluate_update; |
||||
use crate::storage::StorageReader; |
||||
use json_event_parser::{JsonEvent, ToWriteJsonWriter}; |
||||
pub use oxrdf::{Variable, VariableNameParseError}; |
||||
use oxsdatatypes::{DayTimeDuration, Float}; |
||||
pub use spargebra::SparqlSyntaxError; |
||||
use sparopt::algebra::GraphPattern; |
||||
use sparopt::Optimizer; |
||||
use std::collections::HashMap; |
||||
use std::rc::Rc; |
||||
use std::sync::Arc; |
||||
use std::time::Duration; |
||||
use std::{fmt, io}; |
||||
|
||||
#[allow(clippy::needless_pass_by_value)] |
||||
pub(crate) fn evaluate_query( |
||||
reader: StorageReader, |
||||
query: impl TryInto<Query, Error = impl Into<EvaluationError>>, |
||||
options: QueryOptions, |
||||
run_stats: bool, |
||||
) -> Result<(Result<QueryResults, EvaluationError>, QueryExplanation), EvaluationError> { |
||||
let query = query.try_into().map_err(Into::into)?; |
||||
let dataset = DatasetView::new(reader, &query.dataset); |
||||
let start_planning = Timer::now(); |
||||
let (results, plan_node_with_stats, planning_duration) = match query.inner { |
||||
spargebra::Query::Select { |
||||
pattern, base_iri, .. |
||||
} => { |
||||
let mut pattern = GraphPattern::from(&pattern); |
||||
if !options.without_optimizations { |
||||
pattern = Optimizer::optimize_graph_pattern(pattern); |
||||
} |
||||
let planning_duration = start_planning.elapsed(); |
||||
let (results, explanation) = SimpleEvaluator::new( |
||||
Rc::new(dataset), |
||||
base_iri.map(Rc::new), |
||||
options.service_handler(), |
||||
Arc::new(options.custom_functions), |
||||
run_stats, |
||||
) |
||||
.evaluate_select(&pattern); |
||||
(Ok(results), explanation, planning_duration) |
||||
} |
||||
spargebra::Query::Ask { |
||||
pattern, base_iri, .. |
||||
} => { |
||||
let mut pattern = GraphPattern::from(&pattern); |
||||
if !options.without_optimizations { |
||||
pattern = Optimizer::optimize_graph_pattern(GraphPattern::Reduced { |
||||
inner: Box::new(pattern), |
||||
}); |
||||
} |
||||
let planning_duration = start_planning.elapsed(); |
||||
let (results, explanation) = SimpleEvaluator::new( |
||||
Rc::new(dataset), |
||||
base_iri.map(Rc::new), |
||||
options.service_handler(), |
||||
Arc::new(options.custom_functions), |
||||
run_stats, |
||||
) |
||||
.evaluate_ask(&pattern); |
||||
(results, explanation, planning_duration) |
||||
} |
||||
spargebra::Query::Construct { |
||||
template, |
||||
pattern, |
||||
base_iri, |
||||
.. |
||||
} => { |
||||
let mut pattern = GraphPattern::from(&pattern); |
||||
if !options.without_optimizations { |
||||
pattern = Optimizer::optimize_graph_pattern(GraphPattern::Reduced { |
||||
inner: Box::new(pattern), |
||||
}); |
||||
} |
||||
let planning_duration = start_planning.elapsed(); |
||||
let (results, explanation) = SimpleEvaluator::new( |
||||
Rc::new(dataset), |
||||
base_iri.map(Rc::new), |
||||
options.service_handler(), |
||||
Arc::new(options.custom_functions), |
||||
run_stats, |
||||
) |
||||
.evaluate_construct(&pattern, &template); |
||||
(Ok(results), explanation, planning_duration) |
||||
} |
||||
spargebra::Query::Describe { |
||||
pattern, base_iri, .. |
||||
} => { |
||||
let mut pattern = GraphPattern::from(&pattern); |
||||
if !options.without_optimizations { |
||||
pattern = Optimizer::optimize_graph_pattern(GraphPattern::Reduced { |
||||
inner: Box::new(pattern), |
||||
}); |
||||
} |
||||
let planning_duration = start_planning.elapsed(); |
||||
let (results, explanation) = SimpleEvaluator::new( |
||||
Rc::new(dataset), |
||||
base_iri.map(Rc::new), |
||||
options.service_handler(), |
||||
Arc::new(options.custom_functions), |
||||
run_stats, |
||||
) |
||||
.evaluate_describe(&pattern); |
||||
(Ok(results), explanation, planning_duration) |
||||
} |
||||
}; |
||||
let explanation = QueryExplanation { |
||||
inner: plan_node_with_stats, |
||||
with_stats: run_stats, |
||||
parsing_duration: query.parsing_duration, |
||||
planning_duration, |
||||
}; |
||||
Ok((results, explanation)) |
||||
} |
||||
|
||||
/// Options for SPARQL query evaluation.
|
||||
///
|
||||
///
|
||||
/// If the `"http-client"` optional feature is enabled,
|
||||
/// a simple HTTP 1.1 client is used to execute [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/) SERVICE calls.
|
||||
///
|
||||
/// Usage example disabling the federated query support:
|
||||
/// ```
|
||||
/// use oxigraph::sparql::QueryOptions;
|
||||
/// use oxigraph::store::Store;
|
||||
///
|
||||
/// let store = Store::new()?;
|
||||
/// store.query_opt(
|
||||
/// "SELECT * WHERE { SERVICE <https://query.wikidata.org/sparql> {} }",
|
||||
/// QueryOptions::default().without_service_handler(),
|
||||
/// )?;
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
#[derive(Clone, Default)] |
||||
pub struct QueryOptions { |
||||
service_handler: Option<Arc<dyn ServiceHandler<Error = EvaluationError>>>, |
||||
custom_functions: CustomFunctionRegistry, |
||||
http_timeout: Option<Duration>, |
||||
http_redirection_limit: usize, |
||||
without_optimizations: bool, |
||||
} |
||||
|
||||
pub(crate) type CustomFunctionRegistry = |
||||
HashMap<NamedNode, Arc<dyn (Fn(&[Term]) -> Option<Term>) + Send + Sync>>; |
||||
|
||||
impl QueryOptions { |
||||
/// Use a given [`ServiceHandler`] to execute [SPARQL 1.1 Federated Query](https://www.w3.org/TR/sparql11-federated-query/) SERVICE calls.
|
||||
#[inline] |
||||
#[must_use] |
||||
pub fn with_service_handler(mut self, service_handler: impl ServiceHandler + 'static) -> Self { |
||||
self.service_handler = Some(Arc::new(ErrorConversionServiceHandler::wrap( |
||||
service_handler, |
||||
))); |
||||
self |
||||
} |
||||
|
||||
/// Disables the `SERVICE` calls
|
||||
#[inline] |
||||
#[must_use] |
||||
pub fn without_service_handler(mut self) -> Self { |
||||
self.service_handler = Some(Arc::new(EmptyServiceHandler)); |
||||
self |
||||
} |
||||
|
||||
/// Sets a timeout for HTTP requests done during SPARQL evaluation.
|
||||
#[cfg(feature = "http-client")] |
||||
#[inline] |
||||
#[must_use] |
||||
pub fn with_http_timeout(mut self, timeout: Duration) -> Self { |
||||
self.http_timeout = Some(timeout); |
||||
self |
||||
} |
||||
|
||||
/// Sets an upper bound of the number of HTTP redirection followed per HTTP request done during SPARQL evaluation.
|
||||
///
|
||||
/// By default this value is `0`.
|
||||
#[cfg(feature = "http-client")] |
||||
#[inline] |
||||
#[must_use] |
||||
pub fn with_http_redirection_limit(mut self, redirection_limit: usize) -> Self { |
||||
self.http_redirection_limit = redirection_limit; |
||||
self |
||||
} |
||||
|
||||
/// Adds a custom SPARQL evaluation function.
|
||||
///
|
||||
/// Example with a function serializing terms to N-Triples:
|
||||
/// ```
|
||||
/// use oxigraph::model::*;
|
||||
/// use oxigraph::sparql::{QueryOptions, QueryResults};
|
||||
/// use oxigraph::store::Store;
|
||||
///
|
||||
/// let store = Store::new()?;
|
||||
///
|
||||
/// if let QueryResults::Solutions(mut solutions) = store.query_opt(
|
||||
/// "SELECT (<http://www.w3.org/ns/formats/N-Triples>(1) AS ?nt) WHERE {}",
|
||||
/// QueryOptions::default().with_custom_function(
|
||||
/// NamedNode::new("http://www.w3.org/ns/formats/N-Triples")?,
|
||||
/// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()),
|
||||
/// ),
|
||||
/// )? {
|
||||
/// assert_eq!(
|
||||
/// solutions.next().unwrap()?.get("nt"),
|
||||
/// Some(&Literal::from("\"1\"^^<http://www.w3.org/2001/XMLSchema#integer>").into())
|
||||
/// );
|
||||
/// }
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
#[inline] |
||||
#[must_use] |
||||
pub fn with_custom_function( |
||||
mut self, |
||||
name: NamedNode, |
||||
evaluator: impl Fn(&[Term]) -> Option<Term> + Send + Sync + 'static, |
||||
) -> Self { |
||||
self.custom_functions.insert(name, Arc::new(evaluator)); |
||||
self |
||||
} |
||||
|
||||
fn service_handler(&self) -> Arc<dyn ServiceHandler<Error = EvaluationError>> { |
||||
self.service_handler.clone().unwrap_or_else(|| { |
||||
if cfg!(feature = "http-client") { |
||||
Arc::new(service::SimpleServiceHandler::new( |
||||
self.http_timeout, |
||||
self.http_redirection_limit, |
||||
)) |
||||
} else { |
||||
Arc::new(EmptyServiceHandler) |
||||
} |
||||
}) |
||||
} |
||||
|
||||
#[doc(hidden)] |
||||
#[inline] |
||||
#[must_use] |
||||
pub fn without_optimizations(mut self) -> Self { |
||||
self.without_optimizations = true; |
||||
self |
||||
} |
||||
} |
||||
|
||||
/// Options for SPARQL update evaluation.
|
||||
#[derive(Clone, Default)] |
||||
pub struct UpdateOptions { |
||||
query_options: QueryOptions, |
||||
} |
||||
|
||||
impl From<QueryOptions> for UpdateOptions { |
||||
#[inline] |
||||
fn from(query_options: QueryOptions) -> Self { |
||||
Self { query_options } |
||||
} |
||||
} |
||||
|
||||
/// The explanation of a query.
|
||||
#[derive(Clone)] |
||||
pub struct QueryExplanation { |
||||
inner: Rc<EvalNodeWithStats>, |
||||
with_stats: bool, |
||||
parsing_duration: Option<DayTimeDuration>, |
||||
planning_duration: Option<DayTimeDuration>, |
||||
} |
||||
|
||||
impl QueryExplanation { |
||||
/// Writes the explanation as JSON.
|
||||
pub fn write_in_json(&self, write: impl io::Write) -> io::Result<()> { |
||||
let mut writer = ToWriteJsonWriter::new(write); |
||||
writer.write_event(JsonEvent::StartObject)?; |
||||
if let Some(parsing_duration) = self.parsing_duration { |
||||
writer.write_event(JsonEvent::ObjectKey("parsing duration in seconds".into()))?; |
||||
writer.write_event(JsonEvent::Number( |
||||
parsing_duration.as_seconds().to_string().into(), |
||||
))?; |
||||
} |
||||
if let Some(planning_duration) = self.planning_duration { |
||||
writer.write_event(JsonEvent::ObjectKey("planning duration in seconds".into()))?; |
||||
writer.write_event(JsonEvent::Number( |
||||
planning_duration.as_seconds().to_string().into(), |
||||
))?; |
||||
} |
||||
writer.write_event(JsonEvent::ObjectKey("plan".into()))?; |
||||
self.inner.json_node(&mut writer, self.with_stats)?; |
||||
writer.write_event(JsonEvent::EndObject) |
||||
} |
||||
} |
||||
|
||||
impl fmt::Debug for QueryExplanation { |
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
||||
let mut obj = f.debug_struct("QueryExplanation"); |
||||
if let Some(parsing_duration) = self.parsing_duration { |
||||
obj.field( |
||||
"parsing duration in seconds", |
||||
&f32::from(Float::from(parsing_duration.as_seconds())), |
||||
); |
||||
} |
||||
if let Some(planning_duration) = self.planning_duration { |
||||
obj.field( |
||||
"planning duration in seconds", |
||||
&f32::from(Float::from(planning_duration.as_seconds())), |
||||
); |
||||
} |
||||
obj.field("tree", &self.inner); |
||||
obj.finish_non_exhaustive() |
||||
} |
||||
} |
@ -0,0 +1,371 @@ |
||||
use crate::io::{RdfFormat, RdfSerializer}; |
||||
use crate::model::*; |
||||
use crate::sparql::error::EvaluationError; |
||||
use crate::sparql::results::{ |
||||
FromReadQueryResultsReader, FromReadSolutionsReader, QueryResultsFormat, |
||||
QueryResultsParseError, QueryResultsParser, QueryResultsSerializer, |
||||
}; |
||||
pub use sparesults::QuerySolution; |
||||
use std::io::{Read, Write}; |
||||
use std::sync::Arc; |
||||
|
||||
/// Results of a [SPARQL query](https://www.w3.org/TR/sparql11-query/).
|
||||
pub enum QueryResults { |
||||
/// Results of a [SELECT](https://www.w3.org/TR/sparql11-query/#select) query.
|
||||
Solutions(QuerySolutionIter), |
||||
/// Result of a [ASK](https://www.w3.org/TR/sparql11-query/#ask) query.
|
||||
Boolean(bool), |
||||
/// Results of a [CONSTRUCT](https://www.w3.org/TR/sparql11-query/#construct) or [DESCRIBE](https://www.w3.org/TR/sparql11-query/#describe) query.
|
||||
Graph(QueryTripleIter), |
||||
} |
||||
|
||||
impl QueryResults { |
||||
/// Reads a SPARQL query results serialization.
|
||||
pub fn read( |
||||
read: impl Read + 'static, |
||||
format: QueryResultsFormat, |
||||
) -> Result<Self, QueryResultsParseError> { |
||||
Ok(QueryResultsParser::from_format(format) |
||||
.parse_read(read)? |
||||
.into()) |
||||
} |
||||
|
||||
/// Writes the query results (solutions or boolean).
|
||||
///
|
||||
/// This method fails if it is called on the `Graph` results.
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::store::Store;
|
||||
/// use oxigraph::model::*;
|
||||
/// use oxigraph::sparql::results::QueryResultsFormat;
|
||||
///
|
||||
/// let store = Store::new()?;
|
||||
/// let ex = NamedNodeRef::new("http://example.com")?;
|
||||
/// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?;
|
||||
///
|
||||
/// let results = store.query("SELECT ?s WHERE { ?s ?p ?o }")?;
|
||||
/// assert_eq!(
|
||||
/// results.write(Vec::new(), QueryResultsFormat::Json)?,
|
||||
/// r#"{"head":{"vars":["s"]},"results":{"bindings":[{"s":{"type":"uri","value":"http://example.com"}}]}}"#.as_bytes()
|
||||
/// );
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
pub fn write<W: Write>( |
||||
self, |
||||
write: W, |
||||
format: QueryResultsFormat, |
||||
) -> Result<W, EvaluationError> { |
||||
let serializer = QueryResultsSerializer::from_format(format); |
||||
match self { |
||||
Self::Boolean(value) => serializer.serialize_boolean_to_write(write, value), |
||||
Self::Solutions(solutions) => { |
||||
let mut writer = serializer |
||||
.serialize_solutions_to_write(write, solutions.variables().to_vec()) |
||||
.map_err(EvaluationError::ResultsSerialization)?; |
||||
for solution in solutions { |
||||
writer |
||||
.write(&solution?) |
||||
.map_err(EvaluationError::ResultsSerialization)?; |
||||
} |
||||
writer.finish() |
||||
} |
||||
Self::Graph(triples) => { |
||||
let s = VariableRef::new_unchecked("subject"); |
||||
let p = VariableRef::new_unchecked("predicate"); |
||||
let o = VariableRef::new_unchecked("object"); |
||||
let mut writer = serializer |
||||
.serialize_solutions_to_write( |
||||
write, |
||||
vec![s.into_owned(), p.into_owned(), o.into_owned()], |
||||
) |
||||
.map_err(EvaluationError::ResultsSerialization)?; |
||||
for triple in triples { |
||||
let triple = triple?; |
||||
writer |
||||
.write([ |
||||
(s, &triple.subject.into()), |
||||
(p, &triple.predicate.into()), |
||||
(o, &triple.object), |
||||
]) |
||||
.map_err(EvaluationError::ResultsSerialization)?; |
||||
} |
||||
writer.finish() |
||||
} |
||||
} |
||||
.map_err(EvaluationError::ResultsSerialization) |
||||
} |
||||
|
||||
/// Writes the graph query results.
|
||||
///
|
||||
/// This method fails if it is called on the `Solution` or `Boolean` results.
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::io::RdfFormat;
|
||||
/// use oxigraph::model::*;
|
||||
/// use oxigraph::store::Store;
|
||||
///
|
||||
/// let graph = "<http://example.com> <http://example.com> <http://example.com> .\n";
|
||||
///
|
||||
/// let store = Store::new()?;
|
||||
/// store.load_graph(
|
||||
/// graph.as_bytes(),
|
||||
/// RdfFormat::NTriples,
|
||||
/// GraphName::DefaultGraph,
|
||||
/// None,
|
||||
/// )?;
|
||||
///
|
||||
/// let results = store.query("CONSTRUCT WHERE { ?s ?p ?o }")?;
|
||||
/// assert_eq!(
|
||||
/// results.write_graph(Vec::new(), RdfFormat::NTriples)?,
|
||||
/// graph.as_bytes()
|
||||
/// );
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
pub fn write_graph<W: Write>( |
||||
self, |
||||
write: W, |
||||
format: impl Into<RdfFormat>, |
||||
) -> Result<W, EvaluationError> { |
||||
if let Self::Graph(triples) = self { |
||||
let mut writer = RdfSerializer::from_format(format.into()).serialize_to_write(write); |
||||
for triple in triples { |
||||
writer |
||||
.write_triple(&triple?) |
||||
.map_err(EvaluationError::ResultsSerialization)?; |
||||
} |
||||
writer |
||||
.finish() |
||||
.map_err(EvaluationError::ResultsSerialization) |
||||
} else { |
||||
Err(EvaluationError::NotAGraph) |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl From<QuerySolutionIter> for QueryResults { |
||||
#[inline] |
||||
fn from(value: QuerySolutionIter) -> Self { |
||||
Self::Solutions(value) |
||||
} |
||||
} |
||||
|
||||
impl<R: Read + 'static> From<FromReadQueryResultsReader<R>> for QueryResults { |
||||
fn from(reader: FromReadQueryResultsReader<R>) -> Self { |
||||
match reader { |
||||
FromReadQueryResultsReader::Solutions(s) => Self::Solutions(s.into()), |
||||
FromReadQueryResultsReader::Boolean(v) => Self::Boolean(v), |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// An iterator over [`QuerySolution`]s.
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::sparql::QueryResults;
|
||||
/// use oxigraph::store::Store;
|
||||
///
|
||||
/// let store = Store::new()?;
|
||||
/// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? {
|
||||
/// for solution in solutions {
|
||||
/// println!("{:?}", solution?.get("s"));
|
||||
/// }
|
||||
/// }
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
pub struct QuerySolutionIter { |
||||
variables: Arc<[Variable]>, |
||||
iter: Box<dyn Iterator<Item = Result<QuerySolution, EvaluationError>>>, |
||||
} |
||||
|
||||
impl QuerySolutionIter { |
||||
/// Construct a new iterator of solution from an ordered list of solution variables and an iterator of solution tuples
|
||||
/// (each tuple using the same ordering as the variable list such that tuple element 0 is the value for the variable 0...)
|
||||
pub fn new( |
||||
variables: Arc<[Variable]>, |
||||
iter: impl Iterator<Item = Result<Vec<Option<Term>>, EvaluationError>> + 'static, |
||||
) -> Self { |
||||
Self { |
||||
variables: Arc::clone(&variables), |
||||
iter: Box::new( |
||||
iter.map(move |t| t.map(|values| (Arc::clone(&variables), values).into())), |
||||
), |
||||
} |
||||
} |
||||
|
||||
/// The variables used in the solutions.
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::sparql::{QueryResults, Variable};
|
||||
/// use oxigraph::store::Store;
|
||||
///
|
||||
/// let store = Store::new()?;
|
||||
/// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s ?o WHERE { ?s ?p ?o }")? {
|
||||
/// assert_eq!(
|
||||
/// solutions.variables(),
|
||||
/// &[Variable::new("s")?, Variable::new("o")?]
|
||||
/// );
|
||||
/// }
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
#[inline] |
||||
pub fn variables(&self) -> &[Variable] { |
||||
&self.variables |
||||
} |
||||
} |
||||
|
||||
impl<R: Read + 'static> From<FromReadSolutionsReader<R>> for QuerySolutionIter { |
||||
fn from(reader: FromReadSolutionsReader<R>) -> Self { |
||||
Self { |
||||
variables: reader.variables().into(), |
||||
iter: Box::new(reader.map(|t| t.map_err(EvaluationError::from))), |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl Iterator for QuerySolutionIter { |
||||
type Item = Result<QuerySolution, EvaluationError>; |
||||
|
||||
#[inline] |
||||
fn next(&mut self) -> Option<Self::Item> { |
||||
self.iter.next() |
||||
} |
||||
|
||||
#[inline] |
||||
fn size_hint(&self) -> (usize, Option<usize>) { |
||||
self.iter.size_hint() |
||||
} |
||||
} |
||||
|
||||
/// An iterator over the triples that compose a graph solution.
|
||||
///
|
||||
/// ```
|
||||
/// use oxigraph::sparql::QueryResults;
|
||||
/// use oxigraph::store::Store;
|
||||
///
|
||||
/// let store = Store::new()?;
|
||||
/// if let QueryResults::Graph(triples) = store.query("CONSTRUCT WHERE { ?s ?p ?o }")? {
|
||||
/// for triple in triples {
|
||||
/// println!("{}", triple?);
|
||||
/// }
|
||||
/// }
|
||||
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
|
||||
/// ```
|
||||
pub struct QueryTripleIter { |
||||
pub(crate) iter: Box<dyn Iterator<Item = Result<Triple, EvaluationError>>>, |
||||
} |
||||
|
||||
impl Iterator for QueryTripleIter { |
||||
type Item = Result<Triple, EvaluationError>; |
||||
|
||||
#[inline] |
||||
fn next(&mut self) -> Option<Self::Item> { |
||||
self.iter.next() |
||||
} |
||||
|
||||
#[inline] |
||||
fn size_hint(&self) -> (usize, Option<usize>) { |
||||
self.iter.size_hint() |
||||
} |
||||
|
||||
#[inline] |
||||
fn fold<Acc, G>(self, init: Acc, g: G) -> Acc |
||||
where |
||||
G: FnMut(Acc, Self::Item) -> Acc, |
||||
{ |
||||
self.iter.fold(init, g) |
||||
} |
||||
} |
||||
|
||||
#[cfg(test)] |
||||
#[allow(clippy::panic_in_result_fn)] |
||||
mod tests { |
||||
use super::*; |
||||
use std::io::Cursor; |
||||
|
||||
#[test] |
||||
fn test_serialization_roundtrip() -> Result<(), EvaluationError> { |
||||
use std::str; |
||||
|
||||
for format in [ |
||||
QueryResultsFormat::Json, |
||||
QueryResultsFormat::Xml, |
||||
QueryResultsFormat::Tsv, |
||||
] { |
||||
let results = vec![ |
||||
QueryResults::Boolean(true), |
||||
QueryResults::Boolean(false), |
||||
QueryResults::Solutions(QuerySolutionIter::new( |
||||
[ |
||||
Variable::new_unchecked("foo"), |
||||
Variable::new_unchecked("bar"), |
||||
] |
||||
.as_ref() |
||||
.into(), |
||||
Box::new( |
||||
vec![ |
||||
Ok(vec![None, None]), |
||||
Ok(vec![ |
||||
Some(NamedNode::new_unchecked("http://example.com").into()), |
||||
None, |
||||
]), |
||||
Ok(vec![ |
||||
None, |
||||
Some(NamedNode::new_unchecked("http://example.com").into()), |
||||
]), |
||||
Ok(vec![ |
||||
Some(BlankNode::new_unchecked("foo").into()), |
||||
Some(BlankNode::new_unchecked("bar").into()), |
||||
]), |
||||
Ok(vec![Some(Literal::new_simple_literal("foo").into()), None]), |
||||
Ok(vec![ |
||||
Some( |
||||
Literal::new_language_tagged_literal_unchecked("foo", "fr") |
||||
.into(), |
||||
), |
||||
None, |
||||
]), |
||||
Ok(vec![ |
||||
Some(Literal::from(1).into()), |
||||
Some(Literal::from(true).into()), |
||||
]), |
||||
Ok(vec![ |
||||
Some(Literal::from(1.33).into()), |
||||
Some(Literal::from(false).into()), |
||||
]), |
||||
Ok(vec![ |
||||
Some( |
||||
Triple::new( |
||||
NamedNode::new_unchecked("http://example.com/s"), |
||||
NamedNode::new_unchecked("http://example.com/p"), |
||||
Triple::new( |
||||
NamedNode::new_unchecked("http://example.com/os"), |
||||
NamedNode::new_unchecked("http://example.com/op"), |
||||
NamedNode::new_unchecked("http://example.com/oo"), |
||||
), |
||||
) |
||||
.into(), |
||||
), |
||||
None, |
||||
]), |
||||
] |
||||
.into_iter(), |
||||
), |
||||
)), |
||||
]; |
||||
|
||||
for ex in results { |
||||
let mut buffer = Vec::new(); |
||||
ex.write(&mut buffer, format)?; |
||||
let ex2 = QueryResults::read(Cursor::new(buffer.clone()), format)?; |
||||
let mut buffer2 = Vec::new(); |
||||
ex2.write(&mut buffer2, format)?; |
||||
assert_eq!( |
||||
str::from_utf8(&buffer).unwrap(), |
||||
str::from_utf8(&buffer2).unwrap() |
||||
); |
||||
} |
||||
} |
||||
|
||||
Ok(()) |
||||
} |
||||
} |
@ -0,0 +1,44 @@ |
||||
//! Utilities to read and write RDF results formats using [sparesults](https://crates.io/crates/sparesults).
|
||||
//!
|
||||
//! It supports [SPARQL Query Results XML Format (Second Edition)](https://www.w3.org/TR/rdf-sparql-XMLres/), [SPARQL 1.1 Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) and [SPARQL 1.1 Query Results CSV and TSV Formats](https://www.w3.org/TR/sparql11-results-csv-tsv/).
|
||||
//!
|
||||
//! Usage example converting a JSON result file into a TSV result file:
|
||||
//!
|
||||
//! ```
|
||||
//! use oxigraph::sparql::results::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader, QueryResultsSerializer};
|
||||
//! use std::io::Result;
|
||||
//!
|
||||
//! fn convert_json_to_tsv(json_file: &[u8]) -> Result<Vec<u8>> {
|
||||
//! let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json);
|
||||
//! let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv);
|
||||
//! // We start to read the JSON file and see which kind of results it is
|
||||
//! match json_parser.parse_read(json_file)? {
|
||||
//! FromReadQueryResultsReader::Boolean(value) => {
|
||||
//! // it's a boolean result, we copy it in TSV to the output buffer
|
||||
//! tsv_serializer.serialize_boolean_to_write(Vec::new(), value)
|
||||
//! }
|
||||
//! FromReadQueryResultsReader::Solutions(solutions_reader) => {
|
||||
//! // it's a set of solutions, we create a writer and we write to it while reading in streaming from the JSON file
|
||||
//! let mut serialize_solutions_to_write = tsv_serializer.serialize_solutions_to_write(Vec::new(), solutions_reader.variables().to_vec())?;
|
||||
//! for solution in solutions_reader {
|
||||
//! serialize_solutions_to_write.write(&solution?)?;
|
||||
//! }
|
||||
//! serialize_solutions_to_write.finish()
|
||||
//! }
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! // Let's test with a boolean
|
||||
//! assert_eq!(
|
||||
//! convert_json_to_tsv(br#"{"boolean":true}"#.as_slice()).unwrap(),
|
||||
//! b"true"
|
||||
//! );
|
||||
//!
|
||||
//! // And with a set of solutions
|
||||
//! assert_eq!(
|
||||
//! convert_json_to_tsv(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice()).unwrap(),
|
||||
//! b"?foo\t?bar\n\"test\"\t\n"
|
||||
//! );
|
||||
//! ```
|
||||
|
||||
pub use sparesults::*; |
@ -0,0 +1,12 @@ |
||||
//! A storage backend
|
||||
//! RocksDB is available, if not in memory
|
||||
|
||||
#[cfg(any(target_family = "wasm"))] |
||||
pub use fallback::{ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, Transaction}; |
||||
#[cfg(all(not(target_family = "wasm")))] |
||||
pub use oxi_rocksdb::{ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, Transaction}; |
||||
|
||||
#[cfg(any(target_family = "wasm"))] |
||||
mod fallback; |
||||
#[cfg(all(not(target_family = "wasm")))] |
||||
mod oxi_rocksdb; |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,139 @@ |
||||
use crate::io::{RdfFormat, RdfParseError}; |
||||
use crate::storage::numeric_encoder::EncodedTerm; |
||||
use oxiri::IriParseError; |
||||
use oxrdf::TermRef; |
||||
use std::error::Error; |
||||
use std::io; |
||||
|
||||
/// An error related to storage operations (reads, writes...).
|
||||
#[derive(Debug, thiserror::Error)] |
||||
#[non_exhaustive] |
||||
pub enum StorageError { |
||||
/// Error from the OS I/O layer.
|
||||
#[error(transparent)] |
||||
Io(#[from] io::Error), |
||||
/// Error related to data corruption.
|
||||
#[error(transparent)] |
||||
Corruption(#[from] CorruptionError), |
||||
#[doc(hidden)] |
||||
#[error("{0}")] |
||||
Other(#[source] Box<dyn Error + Send + Sync + 'static>), |
||||
} |
||||
|
||||
impl From<StorageError> for io::Error { |
||||
#[inline] |
||||
fn from(error: StorageError) -> Self { |
||||
match error { |
||||
StorageError::Io(error) => error, |
||||
StorageError::Corruption(error) => error.into(), |
||||
StorageError::Other(error) => Self::new(io::ErrorKind::Other, error), |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// An error return if some content in the database is corrupted.
|
||||
#[derive(Debug, thiserror::Error)] |
||||
#[error(transparent)] |
||||
pub struct CorruptionError(#[from] CorruptionErrorKind); |
||||
|
||||
/// An error return if some content in the database is corrupted.
|
||||
#[derive(Debug, thiserror::Error)] |
||||
enum CorruptionErrorKind { |
||||
#[error("{0}")] |
||||
Msg(String), |
||||
#[error("{0}")] |
||||
Other(#[source] Box<dyn Error + Send + Sync + 'static>), |
||||
} |
||||
|
||||
impl CorruptionError { |
||||
/// Builds an error from a printable error message.
|
||||
#[inline] |
||||
pub(crate) fn new(error: impl Into<Box<dyn Error + Send + Sync + 'static>>) -> Self { |
||||
Self(CorruptionErrorKind::Other(error.into())) |
||||
} |
||||
|
||||
#[inline] |
||||
pub(crate) fn from_encoded_term(encoded: &EncodedTerm, term: &TermRef<'_>) -> Self { |
||||
// TODO: eventually use a dedicated error enum value
|
||||
Self::msg(format!("Invalid term encoding {encoded:?} for {term}")) |
||||
} |
||||
|
||||
#[inline] |
||||
pub(crate) fn from_missing_column_family_name(name: &'static str) -> Self { |
||||
// TODO: eventually use a dedicated error enum value
|
||||
Self::msg(format!("Column family {name} does not exist")) |
||||
} |
||||
|
||||
/// Builds an error from a printable error message.
|
||||
#[inline] |
||||
pub(crate) fn msg(msg: impl Into<String>) -> Self { |
||||
Self(CorruptionErrorKind::Msg(msg.into())) |
||||
} |
||||
} |
||||
|
||||
impl From<CorruptionError> for io::Error { |
||||
#[inline] |
||||
fn from(error: CorruptionError) -> Self { |
||||
Self::new(io::ErrorKind::InvalidData, error) |
||||
} |
||||
} |
||||
|
||||
/// An error raised while loading a file into a [`Store`](crate::store::Store).
|
||||
#[derive(Debug, thiserror::Error)] |
||||
pub enum LoaderError { |
||||
/// An error raised while reading the file.
|
||||
#[error(transparent)] |
||||
Parsing(#[from] RdfParseError), |
||||
/// An error raised during the insertion in the store.
|
||||
#[error(transparent)] |
||||
Storage(#[from] StorageError), |
||||
/// The base IRI is invalid.
|
||||
#[error("Invalid base IRI '{iri}': {error}")] |
||||
InvalidBaseIri { |
||||
/// The IRI itself.
|
||||
iri: String, |
||||
/// The parsing error.
|
||||
#[source] |
||||
error: IriParseError, |
||||
}, |
||||
} |
||||
|
||||
impl From<LoaderError> for io::Error { |
||||
#[inline] |
||||
fn from(error: LoaderError) -> Self { |
||||
match error { |
||||
LoaderError::Storage(error) => error.into(), |
||||
LoaderError::Parsing(error) => error.into(), |
||||
LoaderError::InvalidBaseIri { .. } => { |
||||
Self::new(io::ErrorKind::InvalidInput, error.to_string()) |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// An error raised while writing a file from a [`Store`](crate::store::Store).
|
||||
#[derive(Debug, thiserror::Error)] |
||||
pub enum SerializerError { |
||||
/// An error raised while writing the content.
|
||||
#[error(transparent)] |
||||
Io(#[from] io::Error), |
||||
/// An error raised during the lookup in the store.
|
||||
#[error(transparent)] |
||||
Storage(#[from] StorageError), |
||||
/// A format compatible with [RDF dataset](https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset) is required.
|
||||
#[error("A RDF format supporting datasets was expected, {0} found")] |
||||
DatasetFormatExpected(RdfFormat), |
||||
} |
||||
|
||||
impl From<SerializerError> for io::Error { |
||||
#[inline] |
||||
fn from(error: SerializerError) -> Self { |
||||
match error { |
||||
SerializerError::Storage(error) => error.into(), |
||||
SerializerError::Io(error) => error, |
||||
SerializerError::DatasetFormatExpected(_) => { |
||||
Self::new(io::ErrorKind::InvalidInput, error.to_string()) |
||||
} |
||||
} |
||||
} |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -1,26 +1,33 @@ |
||||
[package] |
||||
name = "oxrdf" |
||||
version = "0.1.0" |
||||
authors = ["Tpt <thomas@pellissier-tanon.fr>"] |
||||
license = "MIT OR Apache-2.0" |
||||
version = "0.2.0-alpha.4" |
||||
authors.workspace = true |
||||
license.workspace = true |
||||
readme = "README.md" |
||||
keywords = ["RDF"] |
||||
repository = "https://github.com/oxigraph/oxigraph/tree/main/lib/oxrdf" |
||||
homepage = "https://oxigraph.org/" |
||||
description = """ |
||||
A library providing basic data structures related to RDF |
||||
""" |
||||
edition = "2021" |
||||
documentation = "https://docs.rs/oxrdf" |
||||
edition.workspace = true |
||||
rust-version.workspace = true |
||||
|
||||
[features] |
||||
default = [] |
||||
rdf-star = [] |
||||
|
||||
[dependencies] |
||||
rand = "0.8" |
||||
oxilangtag = "0.1" |
||||
oxiri = "0.2" |
||||
lasso = { version = "0.6", features = ["inline-more"] } |
||||
oxilangtag.workspace = true |
||||
oxiri.workspace = true |
||||
oxsdatatypes = { workspace = true, optional = true } |
||||
rand.workspace = true |
||||
thiserror.workspace = true |
||||
serde.workspace = true |
||||
|
||||
[lints] |
||||
workspace = true |
||||
|
||||
[package.metadata.docs.rs] |
||||
all-features = true |
||||
rustdoc-args = ["--cfg", "docsrs"] |
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue