diff --git a/package-lock.json b/package-lock.json index e9f36ff..5f02dca 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5181,6 +5181,10 @@ "resolved": "packages/demo-react", "link": true }, + "node_modules/@ldo/jsonld-dataset-proxy": { + "resolved": "packages/jsonld-dataset-proxy", + "link": true + }, "node_modules/@ldo/ldo": { "resolved": "packages/ldo", "link": true @@ -13609,6 +13613,33 @@ "node": ">= 0.6" } }, + "node_modules/event-stream": { + "version": "3.3.4", + "resolved": "https://registry.npmjs.org/event-stream/-/event-stream-3.3.4.tgz", + "integrity": "sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g==", + "dev": true, + "dependencies": { + "duplexer": "~0.1.1", + "from": "~0", + "map-stream": "~0.1.0", + "pause-stream": "0.0.11", + "split": "0.3", + "stream-combiner": "~0.0.4", + "through": "~2.3.1" + } + }, + "node_modules/event-stream/node_modules/split": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/split/-/split-0.3.3.tgz", + "integrity": "sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA==", + "dev": true, + "dependencies": { + "through": "2" + }, + "engines": { + "node": "*" + } + }, "node_modules/event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", @@ -14833,6 +14864,12 @@ "node": ">= 0.6" } }, + "node_modules/from": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/from/-/from-0.1.7.tgz", + "integrity": "sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g==", + "dev": true + }, "node_modules/fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", @@ -18187,16 +18224,6 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-18.17.12.tgz", "integrity": "sha512-d6xjC9fJ/nSnfDeU0AMDsaJyb1iHsqCSOdi84w4u+SlN/UgQdY5tRhpMzaFYsI4mnpvgTivEaQd0yOUhAtOnEQ==" }, - "node_modules/jsonld-dataset-proxy": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/jsonld-dataset-proxy/-/jsonld-dataset-proxy-1.2.3.tgz", - "integrity": "sha512-TcKHylUSeGe3wr39L06nszpk7MFResd5TLetqlBh9rWVMpJxg96hsgSpjV8I8skgWVOx2ch638xUPhD3pXsOFQ==", - "dependencies": { - "@rdfjs/data-model": "^1.2.0", - "jsonld2graphobject": "^0.0.4", - "o-dataset-pack": "^0.2.14" - } - }, "node_modules/jsonld-streaming-parser": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/jsonld-streaming-parser/-/jsonld-streaming-parser-3.2.0.tgz", @@ -19408,6 +19435,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/map-stream": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/map-stream/-/map-stream-0.1.0.tgz", + "integrity": "sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g==", + "dev": true + }, "node_modules/md5": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", @@ -21326,6 +21359,12 @@ "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==", "dev": true }, + "node_modules/node-cleanup": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/node-cleanup/-/node-cleanup-2.1.2.tgz", + "integrity": "sha512-qN8v/s2PAJwGUtr1/hYTpNKlD6Y9rc4p8KSmJXyGdYGZsDGKXrGThikLFP9OCHFeLeEpQzPwiAtdIvBLqm//Hw==", + "dev": true + }, "node_modules/node-dir": { "version": "0.1.17", "resolved": "https://registry.npmjs.org/node-dir/-/node-dir-0.1.17.tgz", @@ -22881,6 +22920,15 @@ "node": ">=8" } }, + "node_modules/pause-stream": { + "version": "0.0.11", + "resolved": "https://registry.npmjs.org/pause-stream/-/pause-stream-0.0.11.tgz", + "integrity": "sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A==", + "dev": true, + "dependencies": { + "through": "~2.3" + } + }, "node_modules/performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", @@ -24515,6 +24563,21 @@ "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", "dev": true }, + "node_modules/ps-tree": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/ps-tree/-/ps-tree-1.2.0.tgz", + "integrity": "sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA==", + "dev": true, + "dependencies": { + "event-stream": "=3.3.4" + }, + "bin": { + "ps-tree": "bin/ps-tree.js" + }, + "engines": { + "node": ">= 0.10" + } + }, "node_modules/pseudomap": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", @@ -27075,6 +27138,15 @@ "node": ">= 0.10.0" } }, + "node_modules/stream-combiner": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/stream-combiner/-/stream-combiner-0.0.4.tgz", + "integrity": "sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw==", + "dev": true, + "dependencies": { + "duplexer": "~0.1.1" + } + }, "node_modules/streamsearch": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", @@ -27118,6 +27190,15 @@ } ] }, + "node_modules/string-argv": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.2.tgz", + "integrity": "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==", + "dev": true, + "engines": { + "node": ">=0.6.19" + } + }, "node_modules/string-length": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", @@ -28393,6 +28474,27 @@ "node": ">=0.4.0" } }, + "node_modules/tsc-watch": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/tsc-watch/-/tsc-watch-6.0.4.tgz", + "integrity": "sha512-cHvbvhjO86w2aGlaHgSCeQRl+Aqw6X6XN4sQMPZKF88GoP30O+oTuh5lRIJr5pgFWrRpF1AgXnJJ2DoFEIPHyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "node-cleanup": "^2.1.2", + "ps-tree": "^1.2.0", + "string-argv": "^0.3.1" + }, + "bin": { + "tsc-watch": "dist/lib/tsc-watch.js" + }, + "engines": { + "node": ">=12.12.0" + }, + "peerDependencies": { + "typescript": "*" + } + }, "node_modules/tsconfig-paths": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz", @@ -30697,14 +30799,116 @@ "node": ">= 6" } }, + "packages/jsonld-dataset-proxy": { + "name": "@ldo/jsonld-dataset-proxy", + "version": "0.0.0", + "license": "MIT", + "dependencies": { + "@rdfjs/data-model": "^1.2.0", + "jsonld2graphobject": "^0.0.4", + "o-dataset-pack": "^0.2.14" + }, + "devDependencies": { + "@rdfjs/types": "^1.0.1", + "@types/jest": "^27.0.3", + "@types/jsonld": "^1.5.6", + "@types/n3": "^1.10.4", + "@types/rdfjs__dataset": "^1.0.5", + "@types/shexj": "2.1.4", + "jest": "^27.4.5", + "shex-test": "^0.5.5", + "ts-jest": "^27.1.2", + "ts-node": "^10.4.0", + "tsc-watch": "^6.0.0" + } + }, + "packages/jsonld-dataset-proxy/node_modules/n3": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/n3/-/n3-0.4.5.tgz", + "integrity": "sha512-sv4bFeqVTTj9hT/OAdndpHpECxlkmpHxdnHUkhNgx3P3Tnw2WqpTUzMEeY+ELEoeW1q6Xqq9LNO0lu/zqogIZA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "packages/jsonld-dataset-proxy/node_modules/shex-test": { + "version": "0.5.8", + "resolved": "https://registry.npmjs.org/shex-test/-/shex-test-0.5.8.tgz", + "integrity": "sha512-wfrhu/lb2zrr4MANpoGbqLbPaoL0yOoDTJp6n/Ewtk2iEeVQD8RLYXVCMLYdmo6ccue/I/yubcIqv4p2uarCbg==", + "dev": true, + "dependencies": { + "n3": "^0.4.5", + "xlsx": "^0.8.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "packages/jsonld-dataset-proxy/node_modules/ts-jest": { + "version": "27.1.5", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-27.1.5.tgz", + "integrity": "sha512-Xv6jBQPoBEvBq/5i2TeSG9tt/nqkbpcurrEG1b+2yfBrcJelOZF9Ml6dmyMh7bcW9JyFbRYpR5rxROSlBLTZHA==", + "dev": true, + "dependencies": { + "bs-logger": "0.x", + "fast-json-stable-stringify": "2.x", + "jest-util": "^27.0.0", + "json5": "2.x", + "lodash.memoize": "4.x", + "make-error": "1.x", + "semver": "7.x", + "yargs-parser": "20.x" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@types/jest": "^27.0.0", + "babel-jest": ">=27.0.0 <28", + "jest": "^27.0.0", + "typescript": ">=3.8 <5.0" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@types/jest": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + } + } + }, + "packages/jsonld-dataset-proxy/node_modules/typescript": { + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", + "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", + "dev": true, + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, "packages/ldo": { "name": "@ldo/ldo", "version": "0.0.0", "license": "MIT", "dependencies": { + "@ldo/jsonld-dataset-proxy": "^0.0.0", "@rdfjs/data-model": "^1.2.0", "buffer": "^6.0.3", - "jsonld-dataset-proxy": "^1.2.1", "n3": "^1.16.2", "o-dataset-pack": "^0.2.11", "readable-stream": "^4.3.0" @@ -30897,9 +31101,9 @@ "license": "MIT", "dependencies": { "@inrupt/solid-client": "^1.29.0", + "@ldo/jsonld-dataset-proxy": "^0.0.0", "@ldo/ldo": "^0.0.0", "cross-fetch": "^3.1.6", - "jsonld-dataset-proxy": "^1.2.3", "o-dataset-pack": "^0.2.14", "solid-authn-react-native": "^2.0.3", "stream": "^0.0.2" @@ -36128,9 +36332,70 @@ } } }, + "@ldo/jsonld-dataset-proxy": { + "version": "file:packages/jsonld-dataset-proxy", + "requires": { + "@rdfjs/data-model": "^1.2.0", + "@rdfjs/types": "^1.0.1", + "@types/jest": "^27.0.3", + "@types/jsonld": "^1.5.6", + "@types/n3": "^1.10.4", + "@types/rdfjs__dataset": "^1.0.5", + "@types/shexj": "2.1.4", + "jest": "^27.4.5", + "jsonld2graphobject": "^0.0.4", + "o-dataset-pack": "^0.2.14", + "shex-test": "^0.5.5", + "ts-jest": "^27.1.2", + "ts-node": "^10.4.0", + "tsc-watch": "^6.0.0" + }, + "dependencies": { + "n3": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/n3/-/n3-0.4.5.tgz", + "integrity": "sha512-sv4bFeqVTTj9hT/OAdndpHpECxlkmpHxdnHUkhNgx3P3Tnw2WqpTUzMEeY+ELEoeW1q6Xqq9LNO0lu/zqogIZA==", + "dev": true + }, + "shex-test": { + "version": "0.5.8", + "resolved": "https://registry.npmjs.org/shex-test/-/shex-test-0.5.8.tgz", + "integrity": "sha512-wfrhu/lb2zrr4MANpoGbqLbPaoL0yOoDTJp6n/Ewtk2iEeVQD8RLYXVCMLYdmo6ccue/I/yubcIqv4p2uarCbg==", + "dev": true, + "requires": { + "n3": "^0.4.5", + "xlsx": "^0.8.0" + } + }, + "ts-jest": { + "version": "27.1.5", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-27.1.5.tgz", + "integrity": "sha512-Xv6jBQPoBEvBq/5i2TeSG9tt/nqkbpcurrEG1b+2yfBrcJelOZF9Ml6dmyMh7bcW9JyFbRYpR5rxROSlBLTZHA==", + "dev": true, + "requires": { + "bs-logger": "0.x", + "fast-json-stable-stringify": "2.x", + "jest-util": "^27.0.0", + "json5": "2.x", + "lodash.memoize": "4.x", + "make-error": "1.x", + "semver": "7.x", + "yargs-parser": "20.x" + } + }, + "typescript": { + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", + "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", + "dev": true, + "peer": true + } + } + }, "@ldo/ldo": { "version": "file:packages/ldo", "requires": { + "@ldo/jsonld-dataset-proxy": "^0.0.0", "@rdfjs/data-model": "^1.2.0", "@rdfjs/types": "^1.1.0", "@types/jest": "^27.0.3", @@ -36141,7 +36406,6 @@ "buffer": "^6.0.3", "cross-fetch": "^3.1.5", "jest": "^27.4.5", - "jsonld-dataset-proxy": "^1.2.1", "n3": "^1.16.2", "o-dataset-pack": "^0.2.11", "readable-stream": "^4.3.0", @@ -36252,6 +36516,7 @@ "@babel/preset-typescript": "^7.22.11", "@inrupt/solid-client": "^1.29.0", "@ldo/cli": "^0.0.0", + "@ldo/jsonld-dataset-proxy": "^0.0.0", "@ldo/ldo": "^0.0.0", "@rdfjs/types": "^1.1.0", "@types/jest": "^29.0.3", @@ -36259,7 +36524,6 @@ "@types/n3": "^1.10.4", "@types/shexj": "2.1.4", "cross-fetch": "^3.1.6", - "jsonld-dataset-proxy": "^1.2.3", "o-dataset-pack": "^0.2.14", "solid-authn-react-native": "^2.0.3", "stream": "^0.0.2", @@ -43461,6 +43725,32 @@ "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==" }, + "event-stream": { + "version": "3.3.4", + "resolved": "https://registry.npmjs.org/event-stream/-/event-stream-3.3.4.tgz", + "integrity": "sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g==", + "dev": true, + "requires": { + "duplexer": "~0.1.1", + "from": "~0", + "map-stream": "~0.1.0", + "pause-stream": "0.0.11", + "split": "0.3", + "stream-combiner": "~0.0.4", + "through": "~2.3.1" + }, + "dependencies": { + "split": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/split/-/split-0.3.3.tgz", + "integrity": "sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA==", + "dev": true, + "requires": { + "through": "2" + } + } + } + }, "event-target-shim": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", @@ -44398,6 +44688,12 @@ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" }, + "from": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/from/-/from-0.1.7.tgz", + "integrity": "sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g==", + "dev": true + }, "fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", @@ -46920,16 +47216,6 @@ } } }, - "jsonld-dataset-proxy": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/jsonld-dataset-proxy/-/jsonld-dataset-proxy-1.2.3.tgz", - "integrity": "sha512-TcKHylUSeGe3wr39L06nszpk7MFResd5TLetqlBh9rWVMpJxg96hsgSpjV8I8skgWVOx2ch638xUPhD3pXsOFQ==", - "requires": { - "@rdfjs/data-model": "^1.2.0", - "jsonld2graphobject": "^0.0.4", - "o-dataset-pack": "^0.2.14" - } - }, "jsonld-streaming-parser": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/jsonld-streaming-parser/-/jsonld-streaming-parser-3.2.0.tgz", @@ -47799,6 +48085,12 @@ "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", "dev": true }, + "map-stream": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/map-stream/-/map-stream-0.1.0.tgz", + "integrity": "sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g==", + "dev": true + }, "md5": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", @@ -49333,6 +49625,12 @@ "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==", "dev": true }, + "node-cleanup": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/node-cleanup/-/node-cleanup-2.1.2.tgz", + "integrity": "sha512-qN8v/s2PAJwGUtr1/hYTpNKlD6Y9rc4p8KSmJXyGdYGZsDGKXrGThikLFP9OCHFeLeEpQzPwiAtdIvBLqm//Hw==", + "dev": true + }, "node-dir": { "version": "0.1.17", "resolved": "https://registry.npmjs.org/node-dir/-/node-dir-0.1.17.tgz", @@ -50529,6 +50827,15 @@ "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" }, + "pause-stream": { + "version": "0.0.11", + "resolved": "https://registry.npmjs.org/pause-stream/-/pause-stream-0.0.11.tgz", + "integrity": "sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A==", + "dev": true, + "requires": { + "through": "~2.3" + } + }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", @@ -51527,6 +51834,15 @@ "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", "dev": true }, + "ps-tree": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/ps-tree/-/ps-tree-1.2.0.tgz", + "integrity": "sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA==", + "dev": true, + "requires": { + "event-stream": "=3.3.4" + } + }, "pseudomap": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", @@ -53542,6 +53858,15 @@ "integrity": "sha512-uyQK/mx5QjHun80FLJTfaWE7JtwfRMKBLkMne6udYOmvH0CawotVa7TfgYHzAnpphn4+TweIx1QKMnRIbipmUg==", "peer": true }, + "stream-combiner": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/stream-combiner/-/stream-combiner-0.0.4.tgz", + "integrity": "sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw==", + "dev": true, + "requires": { + "duplexer": "~0.1.1" + } + }, "streamsearch": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", @@ -53567,6 +53892,12 @@ } } }, + "string-argv": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.2.tgz", + "integrity": "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==", + "dev": true + }, "string-length": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", @@ -54540,6 +54871,18 @@ } } }, + "tsc-watch": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/tsc-watch/-/tsc-watch-6.0.4.tgz", + "integrity": "sha512-cHvbvhjO86w2aGlaHgSCeQRl+Aqw6X6XN4sQMPZKF88GoP30O+oTuh5lRIJr5pgFWrRpF1AgXnJJ2DoFEIPHyg==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "node-cleanup": "^2.1.2", + "ps-tree": "^1.2.0", + "string-argv": "^0.3.1" + } + }, "tsconfig-paths": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz", diff --git a/packages/jsonld-dataset-proxy/.eslintrc b/packages/jsonld-dataset-proxy/.eslintrc new file mode 100644 index 0000000..83c51a9 --- /dev/null +++ b/packages/jsonld-dataset-proxy/.eslintrc @@ -0,0 +1,3 @@ +{ + "extends": ["../../.eslintrc"] +} \ No newline at end of file diff --git a/packages/jsonld-dataset-proxy/README.md b/packages/jsonld-dataset-proxy/README.md new file mode 100644 index 0000000..e4c19cc --- /dev/null +++ b/packages/jsonld-dataset-proxy/README.md @@ -0,0 +1,809 @@ +# JSONLD Dataset Proxy + +Edit RDFJS Dataset just like regular JavaScript Object Literals. + +Just a few lines of familiar code: +```typescript +const personNode = namedNode("http://example.com/Person1"); +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +person.age = 23; +person.name.push("John"); +``` + +are equivalent to: +```typescript +dataset.deleteMatches( + namedNode("http://example.com/Person1"), + namedNode("http://xmlns.com/foaf/0.1/age") +); +dataset.add( + quad( + namedNode("http://example.com/Person1"), + namedNode("http://xmlns.com/foaf/0.1/age"), + literal("23", "http://www.w3.org/2001/XMLSchema#integer") + ) +); +dataset.add( + quad( + namedNode("http://example.com/Person1"), + namedNode("http://xmlns.com/foaf/0.1/name"), + literal("John", "http://www.w3.org/2001/XMLSchema#string") + ) +); +``` + +Plus, you get IntelliSense typescript suggestions to help you write your code! + +![Intellisense Example](./readme-images/Intellisense.png) + + +## Installation +```bash +npm install jsonld-dataset-proxy +``` + +## Simple Example +```typescript +import jsonldDatasetProxy, { write } from "jsonld-dataset-proxy"; +import { ContextDefinition } from "jsonld"; +import { serializedToDataset } from "o-dataset-pack"; +import { namedNode } from "@rdfjs/data-model"; + +async function start() { + // Define initial data + const initialData = ` + @prefix example: . + @prefix foaf: . + @prefix xsd: . + + example:Person1 + foaf:name "Johnathan"^^xsd:string; + foaf:age "22"^^xsd:integer. + `; + // Create a dataset loaded with initial data + const dataset = await serializedToDataset(initialData); + // Make a JSONLD Dataset Proxy + const person = jsonldDatasetProxy( + dataset, + PersonContext + ).fromSubject(namedNode("http://example.com/Person1")); + // Make Modifications + person.age = 23; + person.name.push("John"); + write(namedNode("http://example.com/otherGraph")).using(person); + person.name.push("Smith"); + + console.log(dataset.toString()); + // Logs: + // "Johnathan" . + // "John" . + // "23"^^ . + // "Smith" . +} + +// Person Typescript Typing +interface IPerson { + name: string[]; + age: number; +} + +// Person JSONLD Context +const PersonContext: ContextDefinition = { + name: { + "@id": "http://xmlns.com/foaf/0.1/name", + "@type": "http://www.w3.org/2001/XMLSchema#string", + "@container": "@set", + }, + age: { + "@id": "http://xmlns.com/foaf/0.1/age", + "@type": "http://www.w3.org/2001/XMLSchema#integer", + }, +}; + +start(); +``` + +## Full Usage + + - [Defining a Context and Type](#defining-a-context-and-type) + - [Getting a Jsonld DatasetProxy](#getting-a-jsonld-dataset-proxy) + - [`.fromSubject(entryNode)`](#fromsubjecttentrynode) + - [`.matchSubject(predicate?, object?, graph?)`](#matchsubjecttpredicate-object-graph) + - [`.matchObject(subject?, predicate?, graph?)`](#matchobjecttsubject-predicate-object) + - [`.fromJson(inputData)`](#fromjsontinputdata) + - [Getting Field Values and Traversing](#getting-field-values-and-traversing) + - [Setting a Primitive](#setting-a-primitive) + - [Setting an Object](#setting-an-object) + - [Array Methods](#array-methods) + - [Overwriting an Object](#overwriting-an-object) + - [Changing an Object's Id](#changing-an-objects-id) + - [Removing an Object Connection](#removing-an-object-connection) + - [Deleting an Entire Object](#deleting-an-entire-object) + - [Using Blank Nodes](#using-blank-nodes) + - [Writing Information to a Specific Graph](#writing-information-to-a-specific-graph) + - [`jsonldDatasetProxy(...).write(...graphs)`](#jsonlddatasetproxywritegraphs) + - [`write(...graphs).using(...jsonldDatasetProxies)`](#writegraphsusingjsonlddatasetproxies) + - [`write(...graphs).usingCopy(...jsonldDatasetProxies)`](#writegraphsusingcopyjsonlddatasetproxies) + - [Detecting a the graph of specific information](#detecting-a-the-graph-of-specific-information) + +For the most part, a JSONLD Dataset Proxy has parity with JavaScript Object Literals. However, there are a few differences to highlight. This section details how you would do different tasks. + +### Defining a Context and Type +The first step to getting a JSONLD Dataset Proxy is defining the JSONLD Context and TypeScript Typings. This can either be done through a [generator](https://github.com/o-development/shexj2typeandcontext) or defining them manually. + +In this example typescript typing `IPerson` is an interface that represents a person. Notice the `@id` and `@context` fields. Be sure to include them in your interfaces if you wish to use those properties. + +```typescript +import { ContextDefinition } from "jsonld"; + +interface IPerson { + "@id"?: string; + "@context"?: ContextDefinition; + name?: string[]; + age?: number; + bestFriend?: IPerson; + knows?: IPerson[]; +} +``` + +We can make a [JSONLD context](https://w3c.github.io/json-ld-syntax/#the-context) to match this type: + +```typescript +import { ContextDefinition } from "jsonld"; + +const PersonContext: ContextDefinition = { + name: { + "@id": "http://xmlns.com/foaf/0.1/name", + "@type": "http://www.w3.org/2001/XMLSchema#string", + "@container": "@set", + }, + age: { + "@id": "http://xmlns.com/foaf/0.1/age", + "@type": "http://www.w3.org/2001/XMLSchema#integer", + }, + bestFriend: { + "@id": "http://xmlns.com/foaf/0.1/bestFriend", + "@type": "@id", + }, + knows: { + "@id": "http://xmlns.com/foaf/0.1/knows", + "@type": "@id", + "@container": "@set", + }, +}; +``` + +To do this, create an object that has corresponding fields to your type. Each field is an object that contains the following properties: + - `@id`: indicates the URI of the corresponding predicate + - `@type`: If the corresponding type is a pimitive (Like a number or string), use this field to list the RDF Literal type (Most often this is an XMLSchema type). If the corresponding type is an object, list `@id` here. + - `@container`: If the corresponding type is an array of items, set `@container` to `@set`, if not, do not include the `@container` property. + +Note that only the features described here work with JSONLD Dataset Proxy. Other features of JSONLD Contexts are not yet supported. + +### Getting a Jsonld Dataset Proxy +Once the Typescript Typings and Context have been defined, we can get the JSONLD Dataset Proxy for a specific dataset. + +```typescript +import jsonldDatasetProxy from "jsonld-dataset-proxy"; +import { createDataset } from "o-dataset-pack"; + +const dataset = await createDataset(); +// Make a JSONLD Dataset Proxy +const person = jsonldDatasetProxy( + dataset, + PersonContext +) // ... +``` + +The functon `jsonldDatasetProxy` takes in three parameters: + - `dataset`: The dataset you wish to traverse and manipulate. This can be any dataset that follows the [RDFJS Dataset Interface](https://rdf.js.org/dataset-spec/#dataset-interface). Note that this is not to be confused with the RDFJS Dataset ***Core*** interface. This example uses the "o-dataset-pack", but any implementation of the RDFJS Dataset Interface is acceptable. + - `context`: The JSONLD context. + + After defining the `dataset` and `context` there are a few ways to get Jsonld Dataset Proxies: + +#### `.fromSubject(entryNode)` +`fromSubject` lets you define a an `entryNode`, the place of entry for the graph. The object returned by `jsonldDatasetProxy` will represent the given node. This parameter accepts both `namedNode`s and `blankNode`s. `fromSubject` takes a generic type representing the typescript type of the given subject. + +```typescript +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +``` + +#### `.matchSubject(predicate?, object?, graph?)` +`matchSubject` returns a Jsonld Dataset Proxy representing all subjects in the dataset matching the given predicate, object, and graph. + +```typescript +const people = jsonldDatasetProxy( + dataset, + PersonContext +).matchSubject( + namedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + namedNode("http://xmlns.com/foaf/0.1/Person") +); +people.forEach((person) => { + console.log(person.name); +}); +``` + +#### `.matchObject(subject?, predicate?, object?)` +`matchObject` returns a Jsonld Dataset Proxy representing all objects in the dataset matching the given subject, predicate, and graph. + +```typescript +const friendsOfPerson1 = jsonldDatasetProxy( + dataset, + PersonContext +).matchSubject( + namedNode("http://example.com/Person1"), + namedNode("http://xmlns.com/foaf/0.1/knows") +); +friendsOfPerson1.forEach((person) => { + console.log(person.name); +}); +``` + +#### `.fromJson(inputData)` +`fromJson` will take any regular Json, add the information to the dataset, and return a Jsonld Dataset Proxy representing the given data. + +```typescript +const person2 = jsonldDatasetProxy( + dataset, + PersonContext +).fromJson({ + "@id": "http://example.com/Person2", + name: ["Jane", "Doe"], + birthdate: "1990/11/03", + age: 33, +}); +``` + +### Getting Field Values and Traversing +Getting a field and traversing the object is just as easy as getting data out of a standard JavaScript Object Literal. + +In all the following example, we will use a dataset loaded with the following data: + +```typescript +const dataset = await serializedToDataset(` + @prefix example: . + @prefix foaf: . + @prefix xsd: . + + example:Person1 + foaf:name "Johnathan"^^xsd:string, "John"^^xsd:string; + foaf:age "22"^^xsd:integer; + foaf:bestFriend example:Person2; + foaf:friends example:Person2, example:Person3. + + example:Person2 + foaf:name "Alice"^^xsd:string; + foaf:age "28"^^xsd:integer. + + example:Person3 + foaf:name "Dave"^^xsd:string; + foaf:age "33"^^xsd:integer. +`); +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +// Get primitives +console.log(person.age); // 22 +// Get nested primitives +console.log(person?.bestFriend?.age); // 28 +// All array methods work +console.log(person.name?.reduce((agg, cur) => agg + cur, "")); // JonathanJohn +// You can also access array items via their index +// But this isn't recommened. The library will do its best to maintain the +// ordering in the array, but as datasets have no concept of order, this is +// not always accurate. +console.log(person.name?.[1]); // John +// Get the id of the object +// (If the node is a blankNode the @id will be undefined) +console.log(person.bestFriend?.["@id"]); // "http://example.com/Person2" +// Finally, you can retrieve the context +console.log(person["@context"]); // { "name": { "@id": ... }} +``` + + + +### Setting a Primitive +Setting a non-array primitive will remove the existing triple from the dataset and add a new triple. + +```typescript +const dataset = createDataset(); +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")) +person.age = 23; +console.log(dataset.toString()); +// "23"^^ . +``` + +### Setting an Object +Setting a field to a JavaScript object literal will recursively add all parts of the object literal to the dataset. + +```typescript +const dataset = createDataset(); +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +person.bestFriend = { + "@id": "http://example.com/Person2", + name: ["Alice"], + bestFriend: { + "@id": "http://example.com/Person3", + name: ["Bob"], + }, +}; +console.log(dataset.toString()); +// . +// . +// "Alice" . +// "Bob" . +``` + +### Array Methods +Any methods that modify arrays work as expected. + +```typescript +const dataset = await serializedToDataset(` + @prefix example: . + @prefix foaf: . + @prefix xsd: . + + example:Person1 + foaf:name "Garrett"^^xsd:string, "Bobby"^^xsd:string. +`); +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +person.name?.push("Ferguson"); +console.log(dataset.toString()); +// "Garrett" . +// "Bobby" . +// "Ferguson" . +``` + +### Overwriting an Object +If an object literal is set and the id is equivalent to an existing id, that node will be overwritten. All triples from the previous object are removed and replaced with triples from the new object. + +```typescript +const dataset = await serializedToDataset(` + @prefix example: . + @prefix foaf: . + @prefix xsd: . + + example:Person2 + foaf:name "Alice"^^xsd:string; + foaf:age "28"^^xsd:integer. +`); +const person = jsonldDatasetProxy( + dataset, + PersonContext, +).fromSubject(namedNode("http://example.com/Person1")); +person.bestFriend = { + "@id": "http://example.com/Person2", + name: ["Jane"], +}; +console.log(dataset.toString()); +// "Jane" . +// . +``` + +### Changing an Object's Id +You can rename an object by setting its `@id` field. This will update all triples that reference the id to the new id. + +```typescript +const dataset = await serializedToDataset(` + @prefix example: . + @prefix foaf: . + @prefix xsd: . + + example:Person1 + foaf:name "Alice"^^xsd:string; + foaf:bestFriend example:Person2. + + example:Person2 + foaf:bestFriend example:Person1. +`); +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")) +person["@id"] = "http://example.com/NewPersonId"; +console.log(dataset.toString()); +// . +// "Alice" . +// . +``` + +### Removing an Object Connection +Removing one triple can be done by setting a property to `undefined`; + +```typescript +const dataset = await serializedToDataset(` + @prefix example: . + @prefix foaf: . + @prefix xsd: . + + example:Person1 + foaf:name "Alice"^^xsd:string; + foaf:bestFriend example:Person2. + + example:Person2 + foaf:name "Bob"^^xsd:string; + foaf:bestFriend example:Person1. +`); +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +person.bestFriend = undefined; +console.log(dataset.toString()); +// "Alice" . +// "Bob" . +// . +``` + +### Deleting an Entire Object +If you want to delete all triples represented by an object, there are two ways using the `delete` operator. + +First, you can call `delete` on a specific property: +```typescript +const dataset = await serializedToDataset(` + @prefix example: . + @prefix foaf: . + @prefix xsd: . + + example:Person1 + foaf:name "Alice"^^xsd:string; + foaf:bestFriend example:Person2. + + example:Person2 + foaf:name "Bob"^^xsd:string; + foaf:bestFriend example:Person1. +`); +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +delete person.bestFriend; +console.log(dataset.toString()); +// "Alice" . +``` + +And secondly, you can call `delete` on the `@id` property. +```typescript +const dataset = await serializedToDataset(` + @prefix example: . + @prefix foaf: . + @prefix xsd: . + + example:Person1 + foaf:name "Alice"^^xsd:string; + foaf:bestFriend example:Person2. + + example:Person2 + foaf:name "Bob"^^xsd:string; + foaf:bestFriend example:Person1. +`); +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +delete person["@id"]; +console.log(dataset.toString()); +// "Bob" . +``` + +### Using Blank Nodes +If you want to create an object with a blankNode subject, simply omit the `@id` field when you're making the object. +```typescript +const dataset = await createDataset(); +const person = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +person.bestFriend = { + name: ["Charlie"], +}; +console.log(dataset.toString()); +// _:b1 . +// _:b1 "Charlie" . +``` + +If your dataset has blank nodes and you want to assign that blank node as a triple's object, you can retrieve it from the JSONLD Dataset Proxy and assign it. +```typescript +const dataset = await serializedToDataset(` + @prefix example: . + @prefix foaf: . + @prefix xsd: . + + example:Person1 + foaf:knows [ + foaf:name "Alice"^^xsd:string; + ]. +`); +const person = jsonldDatasetProxy( + dataset, + PersonContext, +).fromSubject(namedNode("http://example.com/Person1")); + +const alice = person.knows?.[0]; +person.bestFriend = alice; +console.log(dataset.toString()); +// _:n3-0 "Alice" . +// _:n3-0 . +// _:n3-0 . +``` + +### Writing Information to a Specific Graph +By default, all new quads are added to the default graph, but you can change the graph to which new quads are added in a few different ways: + +NOTE: These operations only dictate the graph for new triples. Any operations that delete triples will delete triples regardless of their graph. + +#### `jsonldDatasetProxy(...).write(...graphs)` +The write graph can be set upon creating a jsonld dataset proxy by using the `write` method. This method takes in any number of graphs. + +```typescript +const person1 = jsonldDatasetProxy(dataset, PersonContext) + .write(namedNode("http://example.com/ExampleGraph")) + .fromSubject(namedNode("http://example.com/Person1")); +person1.name.push("Jack"); +console.log(dataset.toString()); +// Logs: +// "Jack" . +``` + +#### `write(...graphs).using(...jsonldDatasetProxies)` +The `write(...).using(...)` function lets you define the graphs you wish to write to using specific jsonldDatasetProxies. + +```typescript +import jsonldDatasetProxy, { write } from "jsonld-dataset-proxy"; + +const person1 = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +// Now all additions with person1 will be on ExampleGraph1 +write(namedNode("http://example.com/ExampleGraph1")).using(person1); +person1.name.push("Jack"); +// Now all additions with person1 will be on ExampleGraph2 +write(namedNode("http://example.com/ExampleGraph2")).using(person1); +person1.name.push("Spicer"); + +console.log(dataset.toString()); +// Logs: +// "Jack" . +// "Spicer" . +``` + +The function also returns an `end` function that will reset the graph to what it was before. This is useful for nesting graph modifications. + +```typescript +const person1 = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +person1.name.push("default"); +const end1 = write(namedNode("http://example.com/Graph1")).using(person1); +person1.name.push("1"); +const end2 = write(namedNode("http://example.com/Graph2")).using(person1); +person1.name.push("2"); +const end3 = write(namedNode("http://example.com/Graph3")).using(person1); +person1.name.push("3"); +end3(); +person1.name.push("2 again"); +end2(); +person1.name.push("1 again"); +end1(); +person1.name.push("default again"); +console.log(dataset.toString()); +// Logs: +// "default" . +// "default again" . +// "1" . +// "1 again" . +// "2" . +// "2 again" . +// "3" . +``` + +#### `write(...graphs).usingCopy(...jsonldDatasetProxies)` +If you would like a new variable to write to without modifying the original Jsonld Dataset Proxy, you can use `write(...).usingCopy(...)`. + +```typescript +const person1 = jsonldDatasetProxy( + dataset, + PersonContext +).fromSubject(namedNode("http://example.com/Person1")); +const [person1WritingToNewGraph] = write( + namedNode("http://example.com/NewGraph") +).usingCopy(person1); +person1WritingToNewGraph.name.push("Brandon"); +person1.name.push("Sanderson"); +console.log(dataset.toString()); +// Logs: +// "Brandon" . +// "Sanderson" . +``` + +### Detecting a the graph of specific information + +The graph of specific information can be detected using the `graphOf(subject, predicate, object)` function. The `graphOf` function takes in two to three arguments. + + - `subject`: A Jsonld Dataset Proxy that represents the subject of a quad. + - `predicate`: A string key + - `object?`: An optional parameter that represents the direct object of a statement. This could be a Jsonld Dataset Proxy or a number to indicate the location in an array. This argument can be left blank if the given field is not an array. + +```typescript +graphOf(person, "name", 0); // returns defaultGraph() +graphOf(person, "age"); // returns defaultGraph() +``` + +## Managing Language Tags + +RDF includes a special attribute for string literals called a [language tag](https://www.w3.org/TR/rdf11-concepts/#section-Graph-Literal). Lanugage tags let developers provide string representations for many different translations and JSON-LD Dataset Proxy helps you manage them. + +To use language tags, RDF requires the datatype of a literal to be `http://www.w3.org/1999/02/22-rdf-syntax-ns#langString`, and LDO's functions will only work on literals of type that type. + +For the following examples, we'll use this context and dataset, typescript typing and JSON-LD Context. Notice that there is a field called "label" with translations for French and Korean and one language string that doesn't have a language tag. There's also a field called "description" that holds multiple strings per language. + +```typescript +// Define initial data +const initialData = ` + @prefix example: . + @prefix rdfs: . + @prefix ns: . + + example:Hospital + rdfs:label "Hospital"^^ns:langString; + rdfs:label "Hôpital"@fr; + rdfs:label "병원"@ko; + rdfs:description "Heals patients"^^ns:langString; + rdfs:description "Has doctors"^^ns:langString; + rdfs:description "Guérit les malades"@fr; + rdfs:description "A des médecins"@fr; + rdfs:description "환자를 치료하다"@ko; + rdfs:description "의사 있음"@ko. +`; + +// Typescript Typing +interface IThing { + label: string; + description: string[]; +} + +// Define JSON-LD Context +const PersonContext: ContextDefinition = { + label: { + "@id": "http://www.w3.org/2000/01/rdf-schema#label", + "@type": "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", + }, + description: { + "@id": "http://www.w3.org/2000/01/rdf-schema#description", + "@type": "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", + "@container": "@set", + }, +}; +``` + +### Language Preferences +A language preference is an ordered list telling the JSON-LD Dataset Proxy the language you prefer as well as callbacks. + +Valid values for the language preferences includes any [IETF Language Tag](https://en.wikipedia.org/wiki/IETF_language_tag) as well as the special tags `@none` and `@other`. `@none` represents any language literal that doesn't have a language tag. `@other` represents any language literal that isn't listed among the language preferences. + +For read operations, the JSON-LD Dataset Proxy will search for values in order of the preference. Write operations will choose the first language in the language preference, unless that language is `@other`, in which case it will choose the next language. + +```typescript +// Read Spansih first, then Korean, then language strings with no language +// New writes are in Spanish +["es", "ko", "@none"] + +// Read any language other than french, then french +// New writes are in French +["@other", "fr"] +``` + +Language preferences can be set when a JSON-LD Dataset Proxy is created using the `setLanguagePreferences` method. + +```typescript +// Create a dataset loaded with initial data +const dataset = await serializedToDataset(initialData); +// Make a JSONLD Dataset Proxy +const hospitalInfo = jsonldDatasetProxy(dataset, PersonContext) + .setLanguagePreferences("es", "ko", "@none") + .fromSubject(namedNode("http://example.com/Hospital")); + +console.log(hospitalInfo.label); // Logs "병원" +console.log(hospitalInfo.description.length); // Logs "2" for the 2 korean entries +console.log(hospitalInfo.description[0]); // Logs "환자를 치료하다" +console.log(hospitalInfo.description[1]); // Logs "의사 있음" + +// Adds a string to the description in spanish, because spanish if the first +// language in the language preference +hospitalInfo.description.push("Cura a las pacientes"); + +// Now that a spanish entry exists, JSON-LD dataset proxy focuses on that +console.log(hospitalInfo.description.length); // Logs "1" for the 1 spanish entry +console.log(hospitalInfo.description[0]); // Logs "Cura a las pacientes" +``` + +### `setLanguagePreferences(...languagePreferences).using(...jsonldDatasetProxies)` +The `setLanguagePreferences(...).using(...)` function sets the language preferences for a set of JSON-LD Dataset Proxies. It follows roughly the same paridigms as the `write(...).using(...)` function. + +```typescript +import { setLanguagePreferences } from "jsonld-dataset-proxy"; + +setLanguagePreferences("fr", "ko").using(hospitalInfo); +console.log(hospitalInfo.label); // Logs "Hôpital" +setLanguagePreferences("@none").using(hospitalInfo); +console.log(hospitalInfo.label); // Logs "Hospital" +``` + +### `setLanguagePreferences(...languagePreferences).usingCopy(...jsonldDatasetProxies)` +The `setLanguagePreferences(...).usingCopy(...)` function returns a copy of the provided JSON-LD Dataset Proxies with the given language preferences. It follows roughly the same paridigms as the `write(...).usingCopy(...)` function. + +```typescript +import { setLanguagePreferences } from "jsonld-dataset-proxy"; + +// ... + +const [frenchPreference] = setLanguagePreferences("fr").usingCopy(hospitalInfo); +const [koreanPreference] = setLanguagePreferences("ko").usingCopy(hospitalInfo); +console.log(frenchPreference.label); // Logs "Hôpital" +console.log(koreanPreference.label); // Logs "병원" +``` + +### `languageOf(jsonldDatasetProxy, key)` +The `languageOf` function lets you view and modify the languages more directly. `languageOf` takes two properties: + + - `jsonldDatasetProxy`: A JSON-LD dataset proxy + - `key`: A key on the JSON-LD dataset proxy pointing to a language string. + +It returns a mapping of languages to strings or sets of strings depending on the cardinality of the JSON-LD context. + +```typescript +const labelLanguages = languagesOf(hospitalInfo, "label"); +// labelLanguages: { '@none': 'Hospital', fr: 'Hôpital', ko: '병원' } +const descriptionLanguages = languagesOf(hospitalInfo, "description"); +// descriptionLanguages: +// { +// '@none': Set(2) { 'Heals patients', 'Has doctors' }, +// fr: Set(2) { 'Guérit les malades', 'A des médecins' }, +// ko: Set(2) { '환자를 치료하다', '의사 있음' } +// } +``` + +You can also modify languauages by changing the mappings. Mappings with sets of strings follow the JavaScript [`Set` interface](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set). + +```typescript +// Adds a Chinese label +labelLanguages.zh = "医院"; +// Changes the no-language label from to "Super Hospital" +labelLanguages["@none"] = "Super Hospital"; +// Removes the French label +delete labelLanguages.fr; +// Adds a Hindi description +descriptionLanguages.hi?.add("रोगियों को ठीक करता है"); +// Checks to see if the korean label contains "의사 있음" +descriptionLanguages.ko?.has("의사 있음"); // returns true +// Removes "Has Doctors" from the no-language description +descriptionLanguages["@none"]?.delete("Has Doctors"); +``` + +## Limitations + - Currently this library only supports the following features of JSON-LD context: + - "@id", + - "@type", + - "@container": "@set" + +## Liscense +MIT \ No newline at end of file diff --git a/packages/jsonld-dataset-proxy/jest.config.js b/packages/jsonld-dataset-proxy/jest.config.js new file mode 100644 index 0000000..bad5f64 --- /dev/null +++ b/packages/jsonld-dataset-proxy/jest.config.js @@ -0,0 +1,6 @@ +// eslint-disable-next-line @typescript-eslint/no-var-requires +const sharedConfig = require("../../jest.config.js"); +module.exports = { + ...sharedConfig, + rootDir: "./", +}; diff --git a/packages/jsonld-dataset-proxy/package.json b/packages/jsonld-dataset-proxy/package.json new file mode 100644 index 0000000..5a72ad9 --- /dev/null +++ b/packages/jsonld-dataset-proxy/package.json @@ -0,0 +1,46 @@ +{ + "name": "@ldo/jsonld-dataset-proxy", + "version": "0.0.0", + "description": "", + "main": "dist/index.js", + "scripts": { + "build": "tsc --project tsconfig.build.json", + "build:watch": "tsc-watch", + "test": "jest --coverage", + "prepublishOnly": "npm run test && npm run build", + "start": "ts-node ./example/example.ts", + "start:lang": "ts-node ./example/languageExample.ts", + "lint": "eslint src/** --fix --no-error-on-unmatched-pattern" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/o-development/jsonld-dataset-proxy.git" + }, + "author": "Jackson Morgan", + "license": "MIT", + "bugs": { + "url": "https://github.com/o-development/jsonld-dataset-proxy/issues" + }, + "homepage": "https://github.com/o-development/jsonld-dataset-proxy#readme", + "devDependencies": { + "@rdfjs/types": "^1.0.1", + "@types/jest": "^27.0.3", + "@types/jsonld": "^1.5.6", + "@types/n3": "^1.10.4", + "@types/rdfjs__dataset": "^1.0.5", + "@types/shexj": "2.1.4", + "jest": "^27.4.5", + "shex-test": "^0.5.5", + "ts-jest": "^27.1.2", + "ts-node": "^10.4.0", + "tsc-watch": "^6.0.0" + }, + "files": [ + "dist" + ], + "dependencies": { + "@rdfjs/data-model": "^1.2.0", + "jsonld2graphobject": "^0.0.4", + "o-dataset-pack": "^0.2.14" + } +} diff --git a/packages/jsonld-dataset-proxy/readme-images/Intellisense.png b/packages/jsonld-dataset-proxy/readme-images/Intellisense.png new file mode 100644 index 0000000..9c25bec Binary files /dev/null and b/packages/jsonld-dataset-proxy/readme-images/Intellisense.png differ diff --git a/packages/jsonld-dataset-proxy/src/ContextUtil.ts b/packages/jsonld-dataset-proxy/src/ContextUtil.ts new file mode 100644 index 0000000..887f7cf --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/ContextUtil.ts @@ -0,0 +1,90 @@ +import type { ContextDefinition, ExpandedTermDefinition } from "jsonld"; + +// Create JSONLD Shorthands +const shorthandToIriMap: Record = { + "@type": "http://www.w3.org/1999/02/22-rdf-syntax-ns#type", +}; + +/** + * Context Util + * Handles the JSON-LD context and allows conversion between IRIs and terms + */ +export class ContextUtil { + public readonly context: ContextDefinition; + private iriToKeyMap: Record; + + constructor(context: ContextDefinition) { + this.context = context; + this.iriToKeyMap = {}; + Object.entries(context).forEach(([contextKey, contextValue]) => { + if (typeof contextValue === "string") { + this.iriToKeyMap[this.keyIdToIri(contextValue)] = contextKey; + } else if ( + typeof contextValue === "object" && + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (contextValue as any)["@id"] + ) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.iriToKeyMap[this.keyIdToIri((contextValue as any)["@id"])] = + contextKey; + } + }); + } + + public keyToIri(key: string): string { + if (!this.context[key]) { + return key; + } else if (typeof this.context[key] === "string") { + return this.keyIdToIri(this.context[key] as string); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } else if (this.context[key] && (this.context[key] as any)["@id"]) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return this.keyIdToIri((this.context[key] as any)["@id"]); + } + return key; + } + + private keyIdToIri(keyId: string) { + if (shorthandToIriMap[keyId]) { + return shorthandToIriMap[keyId]; + } else { + return keyId; + } + } + + public iriToKey(iri: string): string { + if (this.iriToKeyMap[iri]) { + return this.iriToKeyMap[iri]; + } + return iri; + } + + public getType(key: string): string { + if ( + typeof this.context[key] === "object" && + (this.context[key] as ExpandedTermDefinition)["@type"] + ) { + return (this.context[key] as ExpandedTermDefinition)["@type"] as string; + } + return "http://www.w3.org/2001/XMLSchema#string"; + } + + public isArray(key: string): boolean { + return !!( + this.context[key] && + typeof this.context[key] === "object" && + (this.context[key] as ExpandedTermDefinition)["@container"] && + (this.context[key] as ExpandedTermDefinition)["@container"] === "@set" + ); + } + + public isLangString(key: string): boolean { + return !!( + this.context[key] && + typeof this.context[key] === "object" && + (this.context[key] as ExpandedTermDefinition)["@type"] && + (this.context[key] as ExpandedTermDefinition)["@type"] === + "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString" + ); + } +} diff --git a/packages/jsonld-dataset-proxy/src/JsonldDatasetProxyBuilder.ts b/packages/jsonld-dataset-proxy/src/JsonldDatasetProxyBuilder.ts new file mode 100644 index 0000000..5c29f75 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/JsonldDatasetProxyBuilder.ts @@ -0,0 +1,105 @@ +import { blankNode, namedNode } from "@rdfjs/data-model"; +import type { BlankNode, NamedNode } from "@rdfjs/types"; +import type { LanguageOrdering } from "./language/languageTypes"; +import type { ProxyContext } from "./ProxyContext"; +import type { GraphType, ObjectLike, QuadMatch } from "./types"; + +/** + * Helps build JSON LD Dataset Proxies for a specific dataset and context + */ +export class JsonldDatasetProxyBuilder { + private proxyContext: ProxyContext; + + constructor(proxyContext: ProxyContext) { + this.proxyContext = proxyContext; + } + + /** + * Designates that all Jsonld Dataset Proxies created should write to the + * specified graphs + */ + write(...graphs: GraphType[]): JsonldDatasetProxyBuilder { + return new JsonldDatasetProxyBuilder( + this.proxyContext.duplicate({ writeGraphs: graphs }), + ); + } + + /** + * List the language tags in the order they should be used. When a langString + * is accessed, LDO will search for values in the order of language given. + * @param languageOrdering The ordering of languages. For example + * ("en", "fr", "none", "other"). Defaults to + * ("none", "en", "other") + */ + setLanguagePreferences( + ...languageOrdering: LanguageOrdering + ): JsonldDatasetProxyBuilder { + return new JsonldDatasetProxyBuilder( + this.proxyContext.duplicate({ languageOrdering }), + ); + } + + /** + * Creates a JSON LD Dataset Proxy that matches the given subject + * @param subject The node to match + */ + fromSubject(subject: NamedNode | BlankNode): T { + return this.proxyContext.createSubjectProxy(subject) as unknown as T; + } + + /** + * Matches Subjects to provided predicates, objects, and graphs. Returns a + * JSON LD Dataset that can be read an modified. + * @param predicate The predicate to match + * @param object The object to match + * @param graph The graph to match + */ + matchSubject( + predicate?: QuadMatch[1], + object?: QuadMatch[2], + graph?: QuadMatch[3], + ): T[] { + return this.proxyContext.createArrayProxy( + [null, predicate, object, graph], + true, + ) as unknown as T[]; + } + + /** + * Matches Objects to provided subjects, predicates, and graphs. Returns a + * JSON LD Dataset that can be read an modified. + * @param subject The subject to match + * @param predicate The predicate to match + * @param graph The graph to match + */ + matchObject( + subject?: QuadMatch[0], + predicate?: QuadMatch[1], + graph?: QuadMatch[3], + ): T[] { + return this.proxyContext.createArrayProxy([ + subject, + predicate, + null, + graph, + ]) as unknown as T[]; + } + + /** + * Takes a given object and places it in the dataset while returning a JSON LD + * Dataset Proxy representing the object. + * + * @param inputData Initial Data + * @param graph Optional graph to save this data to + */ + fromJson(inputData: T): T { + const entryNode = inputData["@id"] + ? namedNode(inputData["@id"]) + : blankNode(); + const proxy = this.fromSubject(entryNode); + Object.entries(inputData).forEach(([key, value]) => { + proxy[key] = value; + }); + return proxy; + } +} diff --git a/packages/jsonld-dataset-proxy/src/ProxyContext.ts b/packages/jsonld-dataset-proxy/src/ProxyContext.ts new file mode 100644 index 0000000..1fa43bb --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/ProxyContext.ts @@ -0,0 +1,110 @@ +import type { BlankNode, Dataset, NamedNode } from "@rdfjs/types"; +import type { ArrayProxyTarget } from "./arrayProxy/createArrayHandler"; +import { createArrayHandler } from "./arrayProxy/createArrayHandler"; +import { createSubjectHandler } from "./subjectProxy/createSubjectHandler"; +import type { SubjectProxy } from "./subjectProxy/SubjectProxy"; +import type { ArrayProxy } from "./arrayProxy/ArrayProxy"; +import type { GraphType, QuadMatch } from "./types"; +import { _getUnderlyingArrayTarget } from "./types"; +import type { ContextUtil } from "./ContextUtil"; +import type { LanguageOrdering } from "./language/languageTypes"; + +export interface ProxyContextOptions { + dataset: Dataset; + contextUtil: ContextUtil; + writeGraphs: GraphType[]; + languageOrdering: LanguageOrdering; + prefilledArrayTargets?: ArrayProxyTarget[]; + state?: Record; +} + +/** + * This file keeps track of the target objects used in the proxies. + * The reason is so that JSON.stringify does not recurse inifinitely + * when it encounters a circular object. + */ +export class ProxyContext { + private subjectMap: Map = new Map(); + private arrayMap: Map = new Map(); + + readonly dataset: Dataset; + readonly contextUtil: ContextUtil; + readonly writeGraphs: GraphType[]; + readonly languageOrdering: LanguageOrdering; + public state: Record; + + constructor(options: ProxyContextOptions) { + this.dataset = options.dataset; + this.contextUtil = options.contextUtil; + this.writeGraphs = options.writeGraphs; + this.languageOrdering = options.languageOrdering; + this.state = options.state || {}; + if (options.prefilledArrayTargets) { + options.prefilledArrayTargets.forEach((target) => { + this.createArrayProxy(target[0], target[2], target); + }); + } + } + + public createSubjectProxy(node: NamedNode | BlankNode): SubjectProxy { + if (!this.subjectMap.has(node.value)) { + const proxy = new Proxy( + { "@id": node }, + this.createSubjectHandler(), + ) as unknown as SubjectProxy; + this.subjectMap.set(node.value, proxy); + } + return this.subjectMap.get(node.value) as SubjectProxy; + } + + protected createSubjectHandler() { + return createSubjectHandler(this); + } + + private getArrayKey(...quadMatch: QuadMatch) { + return `${quadMatch[0]?.value || "undefined"}|${ + quadMatch[1]?.value || "undefined" + }|${quadMatch[2]?.value || "undefined"}|${ + quadMatch[3]?.value || "undefined" + }`; + } + + public createArrayProxy( + quadMatch: QuadMatch, + isSubjectOriented = false, + initialTarget?: ArrayProxyTarget, + isLangStringArray?: boolean, + ): ArrayProxy { + const key = this.getArrayKey(...quadMatch); + if (!this.arrayMap.has(key)) { + const proxy = new Proxy( + initialTarget || [quadMatch, [], isSubjectOriented, isLangStringArray], + this.createArrayHandler(), + ) as unknown as ArrayProxy; + this.arrayMap.set(key, proxy); + } + return this.arrayMap.get(key) as ArrayProxy; + } + + protected createArrayHandler() { + return createArrayHandler(this); + } + + public duplicate(alternativeOptions: Partial) { + const prefilledArrayTargets: ArrayProxyTarget[] = []; + this.arrayMap.forEach((value) => { + prefilledArrayTargets.push(value[_getUnderlyingArrayTarget]); + }); + const fullOptions: ProxyContextOptions = { + ...{ + dataset: this.dataset, + contextUtil: this.contextUtil, + writeGraphs: this.writeGraphs, + languageOrdering: this.languageOrdering, + prefilledArrayTargets, + }, + ...alternativeOptions, + }; + return new ProxyContext(fullOptions); + } +} diff --git a/packages/jsonld-dataset-proxy/src/arrayProxy/ArrayProxy.ts b/packages/jsonld-dataset-proxy/src/arrayProxy/ArrayProxy.ts new file mode 100644 index 0000000..e44b2ce --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/arrayProxy/ArrayProxy.ts @@ -0,0 +1,20 @@ +import type { Dataset } from "@rdfjs/types"; +import type { ArrayProxyTarget } from "./createArrayHandler"; +import type { + ObjectType, + _getNodeAtIndex, + _getUnderlyingArrayTarget, + _getUnderlyingDataset, + _getUnderlyingMatch, + _proxyContext, +} from "../types"; +import { _getUnderlyingNode } from "../types"; +import type { ProxyContext } from "../ProxyContext"; + +export type ArrayProxy = Array & { + readonly [_getUnderlyingDataset]: Dataset; + readonly [_getUnderlyingMatch]: ArrayProxyTarget[0]; + readonly [_getNodeAtIndex]: (index: number) => ObjectType | undefined; + readonly [_getUnderlyingArrayTarget]: ArrayProxyTarget; + [_proxyContext]: ProxyContext; +}; diff --git a/packages/jsonld-dataset-proxy/src/arrayProxy/arrayMethods.ts b/packages/jsonld-dataset-proxy/src/arrayProxy/arrayMethods.ts new file mode 100644 index 0000000..c474e15 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/arrayProxy/arrayMethods.ts @@ -0,0 +1,216 @@ +import type { ArrayProxyTarget } from "./createArrayHandler"; +import type { ObjectJsonRepresentation } from "../util/nodeToJsonldRepresentation"; +import { nodeToJsonldRepresentation } from "../util/nodeToJsonldRepresentation"; +import { modifyArray } from "./modifyArray"; +import type { ProxyContext } from "../ProxyContext"; + +export type methodBuilder = ( + target: ArrayProxyTarget, + key: string, + proxyContext: ProxyContext, +) => Return; + +export interface ArrayMethodBuildersType { + copyWithin: methodBuilder["copyWithin"]>; + fill: methodBuilder["fill"]>; + pop: methodBuilder["pop"]>; + push: methodBuilder["push"]>; + reverse: methodBuilder["reverse"]>; + shift: methodBuilder["shift"]>; + sort: methodBuilder["sort"]>; + splice: methodBuilder["splice"]>; + unshift: methodBuilder["unshift"]>; +} + +export const methodNames: Set = new Set([ + "copyWithin", + "fill", + "pop", + "push", + "reverse", + "shift", + "sort", + "splice", + "unshift", +]); + +export const arrayMethodsBuilders: ArrayMethodBuildersType = { + copyWithin: (target, key, proxyContext) => { + return (targetIndex, start, end) => { + return modifyArray( + { + target, + key, + quadsToDelete: (quads) => { + const realEnd = end || quads.length; + return quads.slice(targetIndex, targetIndex + (realEnd - start)); + }, + modifyCoreArray: (coreArray) => { + coreArray.copyWithin(targetIndex, start, end); + return proxyContext.createArrayProxy( + target[0], + target[2], + ) as ObjectJsonRepresentation[]; + }, + }, + proxyContext, + ); + }; + }, + fill: (target, key, proxyContext) => { + return (value, start, end) => { + return modifyArray( + { + target, + key, + toAdd: [value], + quadsToDelete: (quads) => { + return quads.slice(start, end); + }, + modifyCoreArray: (coreArray, addedValues) => { + coreArray.fill(addedValues[0], start, end); + return proxyContext.createArrayProxy( + target[0], + target[2], + ) as ObjectJsonRepresentation[]; + }, + }, + proxyContext, + ); + }; + }, + pop: (target, key, proxyContext) => { + return () => { + return modifyArray( + { + target, + key, + quadsToDelete: (quads) => { + return quads[quads.length - 1] ? [quads[quads.length - 1]] : []; + }, + modifyCoreArray: (coreArray) => { + const popped = coreArray.pop(); + return popped + ? nodeToJsonldRepresentation(popped, proxyContext) + : undefined; + }, + }, + proxyContext, + ); + }; + }, + push: (target, key, proxyContext) => { + return (...args) => { + return modifyArray( + { + target, + key, + toAdd: args, + modifyCoreArray: (coreArray, addedValues) => { + coreArray.push(...addedValues); + return proxyContext.createArrayProxy(target[0], target[2]).length; + }, + }, + proxyContext, + ); + }; + }, + reverse: (target, _key, proxyContext) => { + return () => { + target[1].reverse(); + return proxyContext.createArrayProxy( + target[0], + target[2], + ) as ObjectJsonRepresentation[]; + }; + }, + shift: (target, key, proxyContext) => { + return () => { + return modifyArray( + { + target, + key, + quadsToDelete: (quads) => { + return quads[0] ? [quads[0]] : []; + }, + modifyCoreArray: (coreArray) => { + const shifted = coreArray.shift(); + return shifted + ? nodeToJsonldRepresentation(shifted, proxyContext) + : undefined; + }, + }, + proxyContext, + ); + }; + }, + sort: (target, _key, proxyContext) => { + return (compareFunction) => { + if (compareFunction) { + target[1].sort((a, b) => { + return compareFunction( + nodeToJsonldRepresentation(a, proxyContext), + nodeToJsonldRepresentation(b, proxyContext), + ); + }); + } else if (target) { + target[1].sort((a, b) => { + const aReal = nodeToJsonldRepresentation(a, proxyContext); + const bReal = nodeToJsonldRepresentation(b, proxyContext); + if (aReal > bReal) { + return 1; + } else if (bReal > aReal) { + return -1; + } else { + return 0; + } + }); + } + return proxyContext.createArrayProxy( + target[0], + target[2], + ) as ObjectJsonRepresentation[]; + }; + }, + splice: (target, key, proxyContext) => { + return (start, deleteCount, ...items: ObjectJsonRepresentation[]) => { + return modifyArray( + { + target, + key, + toAdd: items, + quadsToDelete: (quads) => { + return quads.splice(start, deleteCount); + }, + modifyCoreArray: (coreArray, addedValues) => { + const spliced = coreArray.splice( + start, + deleteCount || 0, + ...addedValues, + ); + return spliced.map((node) => { + return nodeToJsonldRepresentation(node, proxyContext); + }); + }, + }, + proxyContext, + ); + }; + }, + unshift: (target, key, proxyContext) => { + return (...args) => { + return modifyArray( + { + target, + key, + toAdd: args, + modifyCoreArray: (coreArray, addedValues) => { + coreArray.unshift(...addedValues); + return proxyContext.createArrayProxy(target[0], target[2]).length; + }, + }, + proxyContext, + ); + }; + }, +}; diff --git a/packages/jsonld-dataset-proxy/src/arrayProxy/createArrayHandler.ts b/packages/jsonld-dataset-proxy/src/arrayProxy/createArrayHandler.ts new file mode 100644 index 0000000..375db9b --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/arrayProxy/createArrayHandler.ts @@ -0,0 +1,177 @@ +import type { NamedNode } from "@rdfjs/types"; +import type { ObjectJsonRepresentation } from "../util/nodeToJsonldRepresentation"; +import { nodeToJsonldRepresentation } from "../util/nodeToJsonldRepresentation"; +import { quad } from "@rdfjs/data-model"; +import type { ArrayMethodBuildersType } from "./arrayMethods"; +import { arrayMethodsBuilders, methodNames } from "./arrayMethods"; +import type { ObjectType, QuadMatch, SubjectType } from "../types"; +import { + _getNodeAtIndex, + _getUnderlyingArrayTarget, + _getUnderlyingDataset, + _getUnderlyingMatch, + _isSubjectOriented, + _proxyContext, +} from "../types"; +import { modifyArray } from "./modifyArray"; +import type { ProxyContext } from "../ProxyContext"; +import { NodeSet } from "../util/NodeSet"; +import { filterQuadsByLanguageOrdering } from "../language/languageUtils"; + +export type ArrayProxyTarget = [ + quadMatch: QuadMatch, + curArray: ObjectType[], + isSubjectOriented?: boolean, + isLangStringArray?: boolean, +]; + +function updateArrayOrder( + target: ArrayProxyTarget, + proxyContext: ProxyContext, +): void { + let quads = proxyContext.dataset.match(...target[0]); + if (target[3]) { + // Is lang string array + quads = filterQuadsByLanguageOrdering(quads, proxyContext.languageOrdering); + } + const datasetObjects = new NodeSet(); + quads.toArray().forEach((quad) => { + // If this this a subject-oriented document + if (target[2]) { + datasetObjects.add(quad.subject as SubjectType); + } else { + datasetObjects.add(quad.object as ObjectType); + } + }); + const processedObjects: ObjectType[] = []; + target[1].forEach((arrItem) => { + if (datasetObjects.has(arrItem)) { + processedObjects.push(arrItem); + datasetObjects.delete(arrItem); + } + }); + datasetObjects.toArray().forEach((datasetObject) => { + processedObjects.push(datasetObject); + }); + target[1] = processedObjects; +} + +function getProcessedArray( + target: ArrayProxyTarget, + proxyContext: ProxyContext, +): ObjectJsonRepresentation[] { + return target[1].map((node) => { + return nodeToJsonldRepresentation(node, proxyContext); + }); +} + +export function createArrayHandler( + proxyContext: ProxyContext, +): ProxyHandler { + return { + get(target, key, ...rest) { + switch (key) { + case _getUnderlyingDataset: + return proxyContext.dataset; + case _getUnderlyingMatch: + return target[0]; + case _isSubjectOriented: + return target[2]; + case _getUnderlyingArrayTarget: + return target; + case _proxyContext: + return proxyContext; + case _getNodeAtIndex: + return (index: number): ObjectType | undefined => { + updateArrayOrder(target, proxyContext); + return target[1][index]; + }; + } + + // TODO: Because of this, every get operation is O(n). Consider changing + // this + updateArrayOrder(target, proxyContext); + const processedArray = getProcessedArray(target, proxyContext); + if (methodNames.has(key as keyof ArrayMethodBuildersType)) { + return arrayMethodsBuilders[key as keyof ArrayMethodBuildersType]( + target, + key as string, + proxyContext, + ); + } + return Reflect.get(processedArray, key, ...rest); + }, + getOwnPropertyDescriptor(target, key, ...rest) { + updateArrayOrder(target, proxyContext); + const processedArray = getProcessedArray(target, proxyContext); + return Reflect.getOwnPropertyDescriptor(processedArray, key, ...rest); + }, + ownKeys(target, ...rest) { + updateArrayOrder(target, proxyContext); + const processedArray = getProcessedArray(target, proxyContext); + return Reflect.ownKeys(processedArray, ...rest); + }, + getPrototypeOf(target, ...rest) { + updateArrayOrder(target, proxyContext); + const processedObjects = getProcessedArray(target, proxyContext); + return Reflect.getPrototypeOf(processedObjects, ...rest); + }, + has(target, ...rest) { + updateArrayOrder(target, proxyContext); + const processedObjects = getProcessedArray(target, proxyContext); + return Reflect.has(processedObjects, ...rest); + }, + set(target, key, value, ...rest) { + if (key === _proxyContext) { + proxyContext = value; + return true; + } + updateArrayOrder(target, proxyContext); + if (typeof key !== "symbol" && !isNaN(parseInt(key as string))) { + const index = parseInt(key); + return modifyArray( + { + target, + key, + toAdd: [value], + quadsToDelete(allQuads) { + return allQuads[index] ? [allQuads[index]] : []; + }, + modifyCoreArray(coreArray, addedValues) { + coreArray[index] = addedValues[0]; + return true; + }, + }, + proxyContext, + ); + } + return Reflect.set(target[1], key, ...rest); + }, + deleteProperty(target, key) { + const { dataset } = proxyContext; + if (typeof key !== "symbol" && !isNaN(parseInt(key as string))) { + const objectQuad = dataset.match(...target[0]).toArray()[parseInt(key)]; + if (!objectQuad) { + return true; + } + const term = target[2] ? objectQuad.subject : objectQuad.object; + if (term.termType === "Literal") { + const subject = target[0][0] as NamedNode; + const predicate = target[0][1] as NamedNode; + if (subject && predicate) { + dataset.delete(quad(subject, predicate, term)); + } + return true; + } else if ( + term.termType === "NamedNode" || + term.termType === "BlankNode" + ) { + dataset.deleteMatches(term, undefined, undefined); + dataset.deleteMatches(undefined, undefined, term); + return true; + } + } + return true; + }, + }; +} diff --git a/packages/jsonld-dataset-proxy/src/arrayProxy/isArrayProxy.ts b/packages/jsonld-dataset-proxy/src/arrayProxy/isArrayProxy.ts new file mode 100644 index 0000000..17ce027 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/arrayProxy/isArrayProxy.ts @@ -0,0 +1,23 @@ +import { + _getNodeAtIndex, + _getUnderlyingArrayTarget, + _getUnderlyingDataset, + _getUnderlyingMatch, + _getUnderlyingNode, + _proxyContext, + _writeGraphs, +} from "../types"; +import type { ArrayProxy } from "./ArrayProxy"; + +export function isArrayProxy(someObject?: unknown): someObject is ArrayProxy { + if (!someObject) return false; + if (typeof someObject !== "object") return false; + const potentialArrayProxy = someObject as ArrayProxy; + + return !( + typeof potentialArrayProxy[_getUnderlyingDataset] !== "object" || + typeof potentialArrayProxy[_getUnderlyingMatch] !== "object" || + typeof potentialArrayProxy[_getNodeAtIndex] !== "function" || + typeof potentialArrayProxy[_getUnderlyingArrayTarget] !== "object" + ); +} diff --git a/packages/jsonld-dataset-proxy/src/arrayProxy/modifyArray.ts b/packages/jsonld-dataset-proxy/src/arrayProxy/modifyArray.ts new file mode 100644 index 0000000..fabdf2c --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/arrayProxy/modifyArray.ts @@ -0,0 +1,129 @@ +import { defaultGraph } from "@rdfjs/data-model"; +import type { Quad } from "@rdfjs/types"; +import { ProxyTransactionalDataset } from "o-dataset-pack"; +import { createExtendedDatasetFactory } from "o-dataset-pack/dist/createExtendedDataset"; +import type { ProxyContext } from "../ProxyContext"; +import type { ObjectType } from "../types"; +import { addObjectToDataset } from "../util/addObjectToDataset"; +import { + getNodeFromRawObject, + getNodeFromRawValue, +} from "../util/getNodeFromRaw"; +import { nodeToString } from "../util/NodeSet"; +import type { ObjectJsonRepresentation } from "../util/nodeToJsonldRepresentation"; +import type { RawObject, RawValue } from "../util/RawObject"; +import type { ArrayProxyTarget } from "./createArrayHandler"; + +export function checkArrayModification( + target: ArrayProxyTarget, + objectsToAdd: RawValue[], + proxyContext: ProxyContext, +) { + if (target[2]) { + for (const objectToAdd of objectsToAdd) { + // Undefined is fine no matter what + if (objectToAdd === undefined) { + return; + } + if (typeof objectToAdd !== "object") { + throw new Error( + `Cannot add a literal "${objectToAdd}"(${typeof objectToAdd}) to a subject-oriented collection.`, + ); + } + // Create a test dataset to see if the inputted data is valid + const testDataset = new ProxyTransactionalDataset( + proxyContext.dataset, + createExtendedDatasetFactory(), + ); + addObjectToDataset( + objectToAdd as RawObject, + false, + proxyContext.duplicate({ + writeGraphs: [defaultGraph()], + }), + ); + const isValidAddition = + testDataset.match( + getNodeFromRawObject(objectToAdd, proxyContext.contextUtil), + target[0][1], + target[0][2], + ).size !== 0; + if (!isValidAddition) { + throw new Error( + `Cannot add value to collection. This must contain a quad that matches (${nodeToString( + target[0][0], + )}, ${nodeToString(target[0][1])}, ${nodeToString( + target[0][2], + )}, ${nodeToString(target[0][3])})`, + ); + } + } + } else if (!target[0][0] || !target[0][1]) { + throw new Error( + "A collection that does not specify a match for both a subject or predicate cannot be modified directly.", + ); + } +} + +export function modifyArray( + config: { + target: ArrayProxyTarget; + key: string; + toAdd?: RawValue[]; + quadsToDelete?: (quads: Quad[]) => Quad[]; + modifyCoreArray: ( + coreArray: ArrayProxyTarget[1], + addedValues: ArrayProxyTarget[1], + ) => ReturnType; + }, + proxyContext: ProxyContext, +): ReturnType { + const { target, toAdd, quadsToDelete, modifyCoreArray, key } = config; + const { dataset, contextUtil } = proxyContext; + checkArrayModification(target, toAdd || [], proxyContext); + + // Remove appropriate Quads + if (quadsToDelete) { + const quadArr = dataset.match(...target[0]).toArray(); + const deleteQuadArr = quadsToDelete(quadArr); + // Filter out overlapping items + deleteQuadArr.forEach((delQuad) => { + if (target[2]) { + dataset.deleteMatches(delQuad.subject, undefined, undefined); + } else { + dataset.delete(delQuad); + } + }); + } + + // Add new items to the dataset + const added = toAdd + ?.map((item) => { + return typeof item === "object" + ? addObjectToDataset(item, false, proxyContext) + : item; + }) + .filter( + (val) => val != undefined, + ) as NonNullable[]; + if (!target[2] && target[0][0] && target[0][1] && added) { + addObjectToDataset( + { + "@id": target[0][0], + [contextUtil.iriToKey(target[0][1].value)]: added, + } as RawObject, + false, + proxyContext, + ); + } + const addedNodes = added + ? (added + .map((addedValue) => { + return getNodeFromRawValue(key, addedValue, proxyContext); + }) + .filter((val) => val != undefined) as ObjectType[]) + : []; + + // Allow the base array to be modified + return modifyCoreArray(target[1], addedNodes); +} diff --git a/packages/jsonld-dataset-proxy/src/graphOf.ts b/packages/jsonld-dataset-proxy/src/graphOf.ts new file mode 100644 index 0000000..97afba5 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/graphOf.ts @@ -0,0 +1,62 @@ +import { namedNode } from "@rdfjs/data-model"; +import { + getSubjectProxyFromObject, + isSubjectProxy, +} from "./subjectProxy/isSubjectProxy"; +import type { GraphType, ObjectLike, ObjectType } from "./types"; +import { + _getNodeAtIndex, + _getUnderlyingDataset, + _getUnderlyingMatch, + _getUnderlyingNode, + _proxyContext, +} from "./types"; + +/** + * Returns the graph for which a defined triple is a member + * @param subject A JsonldDatasetProxy that represents the subject + * @param predicate The key on the JsonldDatasetProxy + * @param object The direct object. This can be a JsonldDatasetProxy or the index + * @returns a list of graphs for which the triples are members + */ +export function graphOf( + subject: Subject, + predicate: Key, + object?: NonNullable extends Array + ? number | ObjectLike + : ObjectLike, +): GraphType[] { + const subjectProxy = getSubjectProxyFromObject(subject); + const proxyContext = subjectProxy[_proxyContext]; + const subjectNode = subjectProxy[_getUnderlyingNode]; + const predicateNode = namedNode( + proxyContext.contextUtil.keyToIri(predicate as string), + ); + let objectNode: ObjectType | null; + if (object == null) { + objectNode = null; + } else if (typeof object === "number") { + const proxyArray = subject[predicate]; + if (!proxyArray[_getUnderlyingMatch]) { + throw new Error( + `Key "${String(predicate)}" of ${subject} is not an array.`, + ); + } + if (!proxyArray[object]) { + throw new Error(`Index ${object} does not exist.`); + } + if (isSubjectProxy(proxyArray[object])) { + objectNode = proxyArray[object][1]; + } + objectNode = proxyArray[_getNodeAtIndex](object); + } else { + const objectProxy = getSubjectProxyFromObject(object); + objectNode = objectProxy[_getUnderlyingNode]; + } + const quads = subjectProxy[_getUnderlyingDataset].match( + subjectNode, + predicateNode, + objectNode, + ); + return quads.toArray().map((quad): GraphType => quad.graph as GraphType); +} diff --git a/packages/jsonld-dataset-proxy/src/index.ts b/packages/jsonld-dataset-proxy/src/index.ts new file mode 100644 index 0000000..a226868 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/index.ts @@ -0,0 +1,37 @@ +import { jsonldDatasetProxy } from "./jsonldDatasetProxy"; + +export default jsonldDatasetProxy; +export * from "./types"; +export * from "./ContextUtil"; +export * from "./ProxyContext"; +export * from "./JsonldDatasetProxyBuilder"; +export * from "./jsonldDatasetProxy"; +export * from "./write"; +export * from "./graphOf"; +export * from "./setLanguagePreferences"; + +export * from "./language/languagesOf"; +export * from "./language/languageMapProxy"; +export * from "./language/languageSet"; +export * from "./language/languageTypes"; +export * from "./language/languageUtils"; + +export * from "./arrayProxy/createArrayHandler"; +export * from "./arrayProxy/arrayMethods"; +export * from "./arrayProxy/ArrayProxy"; +export * from "./arrayProxy/modifyArray"; +export * from "./arrayProxy/isArrayProxy"; + +export * from "./subjectProxy/createSubjectHandler"; +export * from "./subjectProxy/SubjectProxy"; +export * from "./subjectProxy/getValueForKey"; +export * from "./subjectProxy/deleteFromDataset"; +export * from "./subjectProxy/isSubjectProxy"; + +export * from "./util/addObjectToDataset"; +export * from "./util/nodeToJsonldRepresentation"; +export * from "./util/RawObject"; +export * from "./util/getNodeFromRaw"; +export * from "./util/NodeSet"; +export * from "./util/isProxy"; +export * from "./util/createInteractOptions"; diff --git a/packages/jsonld-dataset-proxy/src/jsonldDatasetProxy.ts b/packages/jsonld-dataset-proxy/src/jsonldDatasetProxy.ts new file mode 100644 index 0000000..9c49be3 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/jsonldDatasetProxy.ts @@ -0,0 +1,27 @@ +import { defaultGraph } from "@rdfjs/data-model"; +import type { Dataset } from "@rdfjs/types"; +import type { ContextDefinition } from "jsonld"; +import { ContextUtil } from "./ContextUtil"; +import { JsonldDatasetProxyBuilder } from "./JsonldDatasetProxyBuilder"; +import { ProxyContext } from "./ProxyContext"; + +/** + * Creates a JSON-LD Dataset Proxy + * + * @param inputDataset the source dataset + * @param context JSON-LD Context + * @returns a JSON-LD Dataset proxy + */ +export function jsonldDatasetProxy( + inputDataset: Dataset, + context: ContextDefinition, +): JsonldDatasetProxyBuilder { + const contextUtil = new ContextUtil(context); + const proxyContext = new ProxyContext({ + dataset: inputDataset, + contextUtil, + writeGraphs: [defaultGraph()], + languageOrdering: ["none", "en", "other"], + }); + return new JsonldDatasetProxyBuilder(proxyContext); +} diff --git a/packages/jsonld-dataset-proxy/src/language/languageMapProxy.ts b/packages/jsonld-dataset-proxy/src/language/languageMapProxy.ts new file mode 100644 index 0000000..44fa2f8 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/language/languageMapProxy.ts @@ -0,0 +1,75 @@ +import { literal, quad } from "@rdfjs/data-model"; +import type { ProxyContext } from "../ProxyContext"; +import type { PredicateType, SubjectType } from "../types"; +import { + languageKeyToLiteralLanguage, + quadsToLanguageQuadMap, + languageDeleteMatch, +} from "./languageUtils"; +import type { LanguageMap, LanguageSetMap } from "./languagesOf"; +import LanguageSet from "./languageSet"; + +export function createLanguageMapProxy< + Target extends LanguageMap | LanguageSetMap, +>( + subject: SubjectType, + predicate: PredicateType, + proxyContext: ProxyContext, + isArray: boolean, +): Target { + const target: Target = {} as Target; + // Function to call to update the target to represent what's in the dataset + const targetSetter = (target: Target) => { + // Clear the target + Object.keys(target).forEach((key) => delete target[key]); + // Add current language map to target + const allQuads = proxyContext.dataset.match(subject, predicate); + const languageQuadMap = quadsToLanguageQuadMap(allQuads); + Object.entries(languageQuadMap).forEach(([language, quads]) => { + const stringArray = quads.toArray().map((quad) => quad.object.value); + if (isArray) { + target[language] = new Set(stringArray); + } else { + target[language] = stringArray[0]; + } + }); + }; + + targetSetter(target); + + return new Proxy(target, { + get: (target, key) => { + targetSetter(target); + if (typeof key !== "string") { + return Reflect.get(target, key); + } + if (isArray) { + return new LanguageSet(subject, predicate, key, proxyContext); + } + return Reflect.get(target, key); + }, + set: (target, key, value) => { + const language = languageKeyToLiteralLanguage(key); + // Delete all quads with the language currently + if (!isArray) { + languageDeleteMatch(proxyContext.dataset, subject, predicate, language); + } + // Add the new quad for the language + proxyContext.writeGraphs.forEach((writeGraph) => { + proxyContext.dataset.add( + quad(subject, predicate, literal(value, language), writeGraph), + ); + }); + return Reflect.set(target, key, value); + }, + deleteProperty: (target, key) => { + languageDeleteMatch( + proxyContext.dataset, + subject, + predicate, + languageKeyToLiteralLanguage(key), + ); + return Reflect.deleteProperty(target, key); + }, + }) as Target; +} diff --git a/packages/jsonld-dataset-proxy/src/language/languageSet.ts b/packages/jsonld-dataset-proxy/src/language/languageSet.ts new file mode 100644 index 0000000..8026424 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/language/languageSet.ts @@ -0,0 +1,129 @@ +import type { Dataset, Literal } from "@rdfjs/types"; +import type { PredicateType, SubjectType } from "../types"; +import type { LanguageKey } from "./languageTypes"; +import type { LiteralObjectQuad } from "./languageUtils"; +import { languageDeleteMatch, languageMatch } from "./languageUtils"; +import { literal, quad } from "@rdfjs/data-model"; +import type { ProxyContext } from "../ProxyContext"; + +export default class LanguageSet implements Set { + private subject: SubjectType; + private predicate: PredicateType; + private languageKey: LanguageKey; + private proxyContext: ProxyContext; + + constructor( + subject: SubjectType, + predicate: PredicateType, + languageKey: LanguageKey, + proxyContext: ProxyContext, + ) { + this.subject = subject; + this.predicate = predicate; + this.languageKey = languageKey; + this.proxyContext = proxyContext; + } + + private matchThis(): Dataset { + return languageMatch( + this.proxyContext.dataset, + this.subject, + this.predicate, + this.languageKey, + ); + } + + private getLiteral(value: string): Literal { + return this.languageKey === "@none" + ? literal(value) + : literal(value, this.languageKey); + } + + public get size(): number { + return this.matchThis().size; + } + + add(value: string): this { + this.proxyContext.writeGraphs.forEach((graph) => { + this.proxyContext.dataset.add( + quad( + this.subject, + this.predicate, + literal(value, this.languageKey), + graph, + ), + ); + }); + return this; + } + + clear(): void { + languageDeleteMatch( + this.proxyContext.dataset, + this.subject, + this.predicate, + this.languageKey, + ); + } + + delete(value: string): boolean { + const hadValue = this.has(value); + this.proxyContext.dataset.deleteMatches( + this.subject, + this.predicate, + this.getLiteral(value), + ); + return hadValue; + } + + forEach( + callbackfn: (value: string, value2: string, set: Set) => void, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + thisArg?: any, + ): void { + const quads = this.matchThis(); + quads.forEach((curQuad) => { + callbackfn(curQuad.object.value, curQuad.object.value, thisArg || this); + }); + } + + has(item: string): boolean { + return ( + this.proxyContext.dataset.match( + this.subject, + this.predicate, + this.getLiteral(item), + ).size > 0 + ); + } + + *entries(): IterableIterator<[string, string]> { + const quads = this.matchThis(); + for (const curQuad of quads) { + yield [curQuad.object.value, curQuad.object.value]; + } + } + + *keys(): IterableIterator { + const quads = this.matchThis(); + for (const curQuad of quads) { + yield curQuad.object.value; + } + } + + *values(): IterableIterator { + const quads = this.matchThis(); + for (const curQuad of quads) { + yield curQuad.object.value; + } + } + + *[Symbol.iterator](): IterableIterator { + const quads = this.matchThis(); + for (const curQuad of quads) { + yield curQuad.object.value; + } + } + + [Symbol.toStringTag] = "LanguageSet"; +} diff --git a/packages/jsonld-dataset-proxy/src/language/languageTypes.ts b/packages/jsonld-dataset-proxy/src/language/languageTypes.ts new file mode 100644 index 0000000..0efff8e --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/language/languageTypes.ts @@ -0,0 +1,3 @@ +export type LanguageOrdering = ("@none" | "@other" | string)[]; + +export type LanguageKey = "@none" | string; diff --git a/packages/jsonld-dataset-proxy/src/language/languageUtils.ts b/packages/jsonld-dataset-proxy/src/language/languageUtils.ts new file mode 100644 index 0000000..35cb639 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/language/languageUtils.ts @@ -0,0 +1,159 @@ +import type { Dataset, Literal, Quad, Quad_Object } from "@rdfjs/types"; +import { createDataset } from "o-dataset-pack"; +import type { PredicateType, SubjectType } from "../types"; +import type { LanguageKey, LanguageOrdering } from "./languageTypes"; + +/** + * + * @param dataset + * @param subject + * @param predicate + * @param languageKey + * @returns + */ +export function languageMatch( + dataset: Dataset, + subject: SubjectType, + predicate: PredicateType, + languageKey: LanguageKey, +): Dataset { + const literalLanguage = languageKeyToLiteralLanguage(languageKey); + return dataset.match(subject, predicate).filter((quad) => { + return ( + isLanguageLiteral(quad.object) && quad.object.language === literalLanguage + ); + }) as Dataset; +} + +/** + * + * @param dataset + * @param subject + * @param predicate + * @param languageKey + */ +export function languageDeleteMatch( + dataset: Dataset, + subject: SubjectType, + predicate: PredicateType, + languageKey: LanguageKey, +): void { + const quadsToDelete = languageMatch(dataset, subject, predicate, languageKey); + quadsToDelete.forEach((quad) => { + dataset.delete(quad); + }); +} + +/** + * Given a node, will return true if that node is a literal that could have a + * language. This does not guarantee that it is a language literal. + * @param node the node to test + * @returns boolean + */ +export function isLanguageLiteral(node: Quad_Object): node is Literal { + return ( + node.termType === "Literal" && + (node.datatype.value === + "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString" || + node.datatype.value === "http://www.w3.org/2001/XMLSchema#string") + ); +} + +export interface LiteralObjectQuad extends Quad { + object: Literal; +} + +export function quadsToLanguageQuadMap( + quads: Dataset, +): Record> { + const languageQuadMap: Record> = {}; + quads.forEach((quad) => { + const literal = quad.object; + if (isLanguageLiteral(literal)) { + const languageKey = literalLanguageToLanguageKey(literal.language); + if (!languageQuadMap[languageKey]) { + languageQuadMap[languageKey] = + createDataset() as Dataset; + } + languageQuadMap[languageKey].add(quad as LiteralObjectQuad); + } + }); + return languageQuadMap; +} + +export function filterQuadsByLanguageOrdering( + quads: Dataset, + languageOrdering: LanguageOrdering, +): Dataset { + const languageQuadMap = quadsToLanguageQuadMap(quads); + const validLanguages = new Set(languageOrdering); + const presentLanguages = new Set(Object.keys(languageQuadMap)); + for (const currentLanguageKey of languageOrdering) { + if (presentLanguages.has(currentLanguageKey)) { + return languageQuadMap[currentLanguageKey]; + } + if (currentLanguageKey === "@other") { + for (const presentLang of presentLanguages) { + if (!validLanguages.has(presentLang)) { + return languageQuadMap[presentLang]; + } + } + } + } + return createDataset(); +} + +export function getLanguageKeyForWriteOperation( + languageOrdering: LanguageOrdering, +): LanguageKey | undefined { + return languageOrdering.find((lang) => lang !== "@other"); +} + +// function addToDatasetMap( +// key: string, +// value: Quad, +// map: Record +// ) { +// if (!map[key]) { +// map[key] = createDataset(); +// } +// map[key].add(value); +// } + +// export function filterDatasetByLanguageOrdering( +// dataset: Dataset, +// proxyContext: ProxyContext +// ): Dataset { +// // TODO: This is an O(n) task that could be reduced to O(1) if we cached some +// // of the processing +// const validLangs = new Set(proxyContext.languageOrdering); +// const sortedLangs: Record = {}; +// dataset.forEach((quad) => { +// const literal = quad.object; +// if (isLangStringNode(literal)) { +// if (literal.language === "") { +// addToDatasetMap("@none", quad, sortedLangs); +// } else if (validLangs.has(literal.language)) { +// addToDatasetMap(literal.language, quad, sortedLangs); +// } else { +// addToDatasetMap("@other", quad, sortedLangs); +// } +// } +// }); +// for (const language of proxyContext.languageOrdering) { +// if (sortedLangs[language]) { +// return sortedLangs[language]; +// } +// } +// return createDataset(); +// } + +export function languageKeyToLiteralLanguage( + languageKey: string | symbol, +): string { + return (languageKey === "@none" ? "" : languageKey) as string; +} + +export function literalLanguageToLanguageKey(literalLanguage: string): string { + return literalLanguage === "" ? "@none" : literalLanguage; +} diff --git a/packages/jsonld-dataset-proxy/src/language/languagesOf.ts b/packages/jsonld-dataset-proxy/src/language/languagesOf.ts new file mode 100644 index 0000000..0a111b2 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/language/languagesOf.ts @@ -0,0 +1,61 @@ +import { namedNode } from "@rdfjs/data-model"; +import { getSubjectProxyFromObject } from "../subjectProxy/isSubjectProxy"; +import type { ObjectLike } from "../types"; +import { _getUnderlyingNode, _proxyContext } from "../types"; +import { createLanguageMapProxy } from "./languageMapProxy"; + +/** + * ----------------------------------------------------------------------------- + * Types + * ----------------------------------------------------------------------------- + */ + +export type LanguageMap = { + "@none"?: string; + [language: string]: string | undefined; +}; + +export type LanguageSetMap = { + "@none"?: LanguageSet; + [language: string]: LanguageSet | undefined; +}; + +export type LanguageSet = Set; + +export type LanguageOfConditionalReturn< + SubjectObject extends ObjectLike, + Key extends keyof SubjectObject, +> = NonNullable extends Array + ? LanguageSetMap + : LanguageMap; + +/** + * ----------------------------------------------------------------------------- + * Functions + * ----------------------------------------------------------------------------- + */ + +/** + * + * @param subject + * @param predicate + * @returns + */ +export function languagesOf< + SubjectObject extends ObjectLike, + Key extends keyof SubjectObject, +>( + subjectObject: SubjectObject, + key: Key, +): LanguageOfConditionalReturn { + const proxy = getSubjectProxyFromObject(subjectObject); + const proxyContext = proxy[_proxyContext]; + const subject = proxy[_getUnderlyingNode]; + const predicate = namedNode(proxyContext.contextUtil.keyToIri(key as string)); + return createLanguageMapProxy( + subject, + predicate, + proxyContext, + proxyContext.contextUtil.isArray(key as string), + ) as LanguageOfConditionalReturn; +} diff --git a/packages/jsonld-dataset-proxy/src/setLanguagePreferences.ts b/packages/jsonld-dataset-proxy/src/setLanguagePreferences.ts new file mode 100644 index 0000000..a6541c3 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/setLanguagePreferences.ts @@ -0,0 +1,14 @@ +import type { LanguageOrdering } from "./language/languageTypes"; +import type { InteractOptions } from "./util/createInteractOptions"; +import { createInteractOptions } from "./util/createInteractOptions"; + +/** + * Set the default language pr + * @param graphs The graphs that should be written to + * @returns a write builder + */ +export function setLanguagePreferences( + ...languageOrdering: LanguageOrdering +): InteractOptions { + return createInteractOptions("languageOrdering", languageOrdering); +} diff --git a/packages/jsonld-dataset-proxy/src/subjectProxy/SubjectProxy.ts b/packages/jsonld-dataset-proxy/src/subjectProxy/SubjectProxy.ts new file mode 100644 index 0000000..1c22472 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/subjectProxy/SubjectProxy.ts @@ -0,0 +1,20 @@ +import type { BlankNode, Dataset, NamedNode } from "@rdfjs/types"; +import type { ContextDefinition } from "jsonld"; +import type { ProxyContext } from "../ProxyContext"; +import type { + GraphType, + _getUnderlyingDataset, + _getUnderlyingNode, + _proxyContext, + _writeGraphs, +} from "../types"; + +export type SubjectProxy = { + "@id"?: string; + "@context": ContextDefinition; + readonly [key: string | number | symbol]: unknown; + readonly [_getUnderlyingDataset]: Dataset; + readonly [_getUnderlyingNode]: NamedNode | BlankNode; + [_proxyContext]: ProxyContext; + readonly [_writeGraphs]: GraphType[]; +}; diff --git a/packages/jsonld-dataset-proxy/src/subjectProxy/createSubjectHandler.ts b/packages/jsonld-dataset-proxy/src/subjectProxy/createSubjectHandler.ts new file mode 100644 index 0000000..bbf9df0 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/subjectProxy/createSubjectHandler.ts @@ -0,0 +1,106 @@ +import type { BlankNode, NamedNode } from "@rdfjs/types"; +import { namedNode, quad } from "@rdfjs/data-model"; +import { addObjectToDataset } from "../util/addObjectToDataset"; +import { deleteValueFromDataset } from "./deleteFromDataset"; +import { + _getUnderlyingDataset, + _getUnderlyingNode, + _proxyContext, + _writeGraphs, +} from "../types"; +import { getValueForKey } from "./getValueForKey"; +import type { ProxyContext } from "../ProxyContext"; + +export interface SubjectProxyTarget { + "@id": NamedNode | BlankNode; +} + +export function createSubjectHandler( + initialProxyContext: ProxyContext, +): ProxyHandler { + let proxyContext = initialProxyContext; + return { + get(target: SubjectProxyTarget, key: string | symbol) { + switch (key) { + case _getUnderlyingDataset: + return proxyContext.dataset; + case _getUnderlyingNode: + return target["@id"]; + case _proxyContext: + return proxyContext; + case _writeGraphs: + return proxyContext.writeGraphs; + case "@context": + return proxyContext.contextUtil.context; + } + return getValueForKey(target, key, proxyContext); + }, + getOwnPropertyDescriptor(target: SubjectProxyTarget, key: string) { + return { + value: getValueForKey(target, key, proxyContext), + writable: true, + enumerable: true, + configurable: true, + }; + }, + ownKeys(target) { + const subject = target["@id"]; + const tripleDataset = proxyContext.dataset.match(subject); + const keys: Set = new Set(["@id"]); + tripleDataset.toArray().forEach((quad) => { + keys.add(proxyContext.contextUtil.iriToKey(quad.predicate.value)); + }); + return Array.from(keys); + }, + set: (target: SubjectProxyTarget, key, value) => { + if (key === _proxyContext) { + proxyContext = value; + return true; + } + if (key === "@id" && typeof value === "string") { + // Replace Subject Quads + const currentSubjectQuads = proxyContext.dataset + .match(target["@id"]) + .toArray(); + const newSubjectQuads = currentSubjectQuads.map((curQuad) => + quad( + namedNode(value), + curQuad.predicate, + curQuad.object, + curQuad.graph, + ), + ); + currentSubjectQuads.forEach((curQuad) => + proxyContext.dataset.delete(curQuad), + ); + proxyContext.dataset.addAll(newSubjectQuads); + // Replace Object Quads + const currentObjectQuads = proxyContext.dataset + .match(undefined, undefined, target["@id"]) + .toArray(); + const newObjectQuads = currentObjectQuads.map((curQuad) => + quad( + curQuad.subject, + curQuad.predicate, + namedNode(value), + curQuad.graph, + ), + ); + currentObjectQuads.forEach((curQuad) => + proxyContext.dataset.delete(curQuad), + ); + proxyContext.dataset.addAll(newObjectQuads); + target["@id"] = namedNode(value); + } + addObjectToDataset( + { "@id": target["@id"], [key]: value }, + true, + proxyContext, + ); + return true; + }, + deleteProperty(target, key) { + return deleteValueFromDataset(target, key, proxyContext); + }, + }; +} diff --git a/packages/jsonld-dataset-proxy/src/subjectProxy/deleteFromDataset.ts b/packages/jsonld-dataset-proxy/src/subjectProxy/deleteFromDataset.ts new file mode 100644 index 0000000..381c226 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/subjectProxy/deleteFromDataset.ts @@ -0,0 +1,44 @@ +import type { Term } from "@rdfjs/types"; +import { namedNode, quad } from "@rdfjs/data-model"; +import type { SubjectProxyTarget } from "./createSubjectHandler"; +import type { ProxyContext } from "../ProxyContext"; + +export function deleteValueFromDataset( + target: SubjectProxyTarget, + key: string | symbol, + proxyContext: ProxyContext, +) { + const nodesToRemove: Term[] = []; + if (key === "@context") { + return true; + } + if (key === "toString" || key === Symbol.toStringTag) { + return true; + } + if (typeof key === "symbol") { + return true; + } + const subject = target["@id"]; + const predicate = namedNode(proxyContext.contextUtil.keyToIri(key)); + if (key === "@id") { + nodesToRemove.push(target["@id"]); + } else { + const objectDataset = proxyContext.dataset.match(subject, predicate); + if (objectDataset.size === 0) { + return true; + } else { + nodesToRemove.push(...objectDataset.toArray().map((quad) => quad.object)); + } + } + nodesToRemove.forEach((term) => { + if (term.termType === "Literal") { + proxyContext.dataset.delete(quad(subject, predicate, term)); + return true; + } else if (term.termType === "NamedNode") { + proxyContext.dataset.deleteMatches(term, undefined, undefined); + proxyContext.dataset.deleteMatches(undefined, undefined, term); + return true; + } + }); + return true; +} diff --git a/packages/jsonld-dataset-proxy/src/subjectProxy/getValueForKey.ts b/packages/jsonld-dataset-proxy/src/subjectProxy/getValueForKey.ts new file mode 100644 index 0000000..37f5141 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/subjectProxy/getValueForKey.ts @@ -0,0 +1,62 @@ +import type { SubjectProxyTarget } from "./createSubjectHandler"; +import { namedNode } from "@rdfjs/data-model"; +import { nodeToJsonldRepresentation } from "../util/nodeToJsonldRepresentation"; +import type { SubjectProxy } from "./SubjectProxy"; +import type { ArrayProxy } from "../arrayProxy/ArrayProxy"; +import type { ProxyContext } from "../ProxyContext"; +import { filterQuadsByLanguageOrdering } from "../language/languageUtils"; + +/** + * Given a subject target and a key return the correct value + */ +export function getValueForKey( + target: SubjectProxyTarget, + key: string | symbol, + proxyContext: ProxyContext, +): SubjectProxy | ArrayProxy | string | number | boolean | undefined { + const { contextUtil, dataset } = proxyContext; + if (key === "@id") { + if (target["@id"].termType === "BlankNode") { + return undefined; + } + return contextUtil.iriToKey(target["@id"].value); + } + if (key === "toString" || key === Symbol.toStringTag) { + // TODO: this toString method right now returns [object Object], + // which is correct, but it could be more descriptive, especially + // because console.log doesn't return anyting helpful due to the proxy. + return Reflect.get(target, "toString"); + } + if (typeof key === "symbol") { + return; + } + const subject = target["@id"]; + const predicate = namedNode(contextUtil.keyToIri(key)); + if (contextUtil.isArray(key)) { + const arrayProxy = proxyContext.createArrayProxy( + [subject, predicate, null, null], + false, + undefined, + contextUtil.isLangString(key), + ); + return arrayProxy; + } + let objectDataset = dataset.match(subject, predicate); + if (contextUtil.isLangString(key)) { + objectDataset = filterQuadsByLanguageOrdering( + objectDataset, + proxyContext.languageOrdering, + ); + } + if (objectDataset.size === 0) { + return undefined; + } else if (objectDataset.size === 1) { + const thing = nodeToJsonldRepresentation( + objectDataset.toArray()[0].object, + proxyContext, + ); + return thing; + } else { + return proxyContext.createArrayProxy([subject, predicate, null, null]); + } +} diff --git a/packages/jsonld-dataset-proxy/src/subjectProxy/isSubjectProxy.ts b/packages/jsonld-dataset-proxy/src/subjectProxy/isSubjectProxy.ts new file mode 100644 index 0000000..d10aac3 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/subjectProxy/isSubjectProxy.ts @@ -0,0 +1,30 @@ +import type { ObjectLike } from "../types"; +import { + _getUnderlyingDataset, + _getUnderlyingNode, + _proxyContext, + _writeGraphs, +} from "../types"; +import type { SubjectProxy } from "./SubjectProxy"; + +export function isSubjectProxy( + someObject?: unknown, +): someObject is SubjectProxy { + if (!someObject) return false; + if (typeof someObject !== "object") return false; + const potentialSubjectProxy = someObject as SubjectProxy; + return !( + typeof potentialSubjectProxy[_writeGraphs] !== "object" || + typeof potentialSubjectProxy[_getUnderlyingDataset] !== "object" || + typeof potentialSubjectProxy[_getUnderlyingNode] !== "object" || + typeof potentialSubjectProxy[_proxyContext] !== "object" + ); +} + +export function getSubjectProxyFromObject(object: ObjectLike): SubjectProxy { + const potentialSubjectProxy = object as SubjectProxy; + if (!isSubjectProxy(potentialSubjectProxy)) { + throw new Error(`${object} is not a Jsonld Dataset Proxy Subject`); + } + return potentialSubjectProxy; +} diff --git a/packages/jsonld-dataset-proxy/src/types.ts b/packages/jsonld-dataset-proxy/src/types.ts new file mode 100644 index 0000000..b012d5e --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/types.ts @@ -0,0 +1,25 @@ +import type { BlankNode, DefaultGraph, Literal, NamedNode } from "@rdfjs/types"; + +export const _getUnderlyingNode = Symbol("_getUnderlyingNode"); +export const _getUnderlyingMatch = Symbol("_getUnderlyingMatch"); +export const _isSubjectOriented = Symbol("_isSubjectOriented"); +export const _getNodeAtIndex = Symbol("_getNodeAtIndex"); +export const _getUnderlyingDataset = Symbol("_getUnderlyingDataset"); +export const _getUnderlyingArrayTarget = Symbol("_getUnderlyingArrayTarget"); +export const _proxyContext = Symbol("_proxyContext"); +export const _writeGraphs = Symbol("_writeGraphs"); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export type ObjectLike = Record; + +export type SubjectType = NamedNode | BlankNode; +export type PredicateType = NamedNode; +export type ObjectType = NamedNode | BlankNode | Literal; +export type GraphType = NamedNode | BlankNode | DefaultGraph; + +export type QuadMatch = [ + SubjectType | undefined | null, + PredicateType | undefined | null, + ObjectType | undefined | null, + GraphType | undefined | null, +]; diff --git a/packages/jsonld-dataset-proxy/src/util/NodeSet.ts b/packages/jsonld-dataset-proxy/src/util/NodeSet.ts new file mode 100644 index 0000000..271c07b --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/util/NodeSet.ts @@ -0,0 +1,47 @@ +import type { BlankNode, DefaultGraph, Literal, NamedNode } from "@rdfjs/types"; +import type { ObjectType } from "../types"; + +export function nodeToString( + node: NamedNode | BlankNode | DefaultGraph | Literal | null | undefined, +): string { + if (node == null) { + return "null"; + } + switch (node.termType) { + case "NamedNode": + return `namedNode(${node.value})`; + case "BlankNode": + return `blankNode(${node.value})`; + case "Literal": + return `literal(${node.value},${node.datatype.value})`; + case "DefaultGraph": + return "defaultGraph()"; + } +} + +export class NodeSet { + private set: Set = new Set(); + private map: Record = {}; + + add(node: ObjectType) { + const key = nodeToString(node); + this.set.add(key); + this.map[key] = node; + } + + has(node: ObjectType): boolean { + return this.set.has(nodeToString(node)); + } + + delete(node: ObjectType) { + const key = nodeToString(node); + delete this.map[key]; + return this.set.delete(nodeToString(node)); + } + + toArray() { + return Array.from(this.set).map((stringVal) => { + return this.map[stringVal]; + }); + } +} diff --git a/packages/jsonld-dataset-proxy/src/util/RawObject.ts b/packages/jsonld-dataset-proxy/src/util/RawObject.ts new file mode 100644 index 0000000..2d710b4 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/util/RawObject.ts @@ -0,0 +1,13 @@ +import type { BlankNode, NamedNode } from "@rdfjs/types"; +import { _getUnderlyingNode } from "../types"; +import type { SubjectProxy } from "../subjectProxy/SubjectProxy"; + +export type RawObject = + | ({ + "@id"?: string | NamedNode | BlankNode; + } & { + [key: string | symbol | number]: RawValue | RawValue[]; + }) + | SubjectProxy; + +export type RawValue = string | boolean | number | RawObject | undefined; diff --git a/packages/jsonld-dataset-proxy/src/util/addObjectToDataset.ts b/packages/jsonld-dataset-proxy/src/util/addObjectToDataset.ts new file mode 100644 index 0000000..7252503 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/util/addObjectToDataset.ts @@ -0,0 +1,140 @@ +import type { BlankNode, NamedNode } from "@rdfjs/types"; +import { literal, namedNode, quad } from "@rdfjs/data-model"; +import { _getUnderlyingNode } from "../types"; +import type { SubjectProxy } from "../subjectProxy/SubjectProxy"; +import { getNodeFromRawObject, getNodeFromRawValue } from "./getNodeFromRaw"; +import type { RawObject, RawValue } from "./RawObject"; +import type { ProxyContext } from "../ProxyContext"; +import { isSubjectProxy } from "../subjectProxy/isSubjectProxy"; +import { NodeSet } from "./NodeSet"; +import { + getLanguageKeyForWriteOperation, + languageDeleteMatch, + languageKeyToLiteralLanguage, +} from "../language/languageUtils"; + +export function addRawValueToDatasetRecursive( + subject: NamedNode | BlankNode, + key: string, + value: RawValue, + visitedObjects: NodeSet, + shouldDeleteOldTriples: boolean, + proxyContext: ProxyContext, +): void { + const { dataset, contextUtil } = proxyContext; + const predicate = namedNode(contextUtil.keyToIri(key)); + // Get the Object Node + const object = getNodeFromRawValue(key, value, proxyContext); + if (object == undefined) { + dataset.deleteMatches(subject, predicate); + } else if (object.termType === "Literal") { + let languageAppliedObject = object; + // Handle language use case + if (contextUtil.isLangString(key)) { + const languageKey = getLanguageKeyForWriteOperation( + proxyContext.languageOrdering, + ); + if (!languageKey) return; + languageAppliedObject = literal( + object.value, + languageKeyToLiteralLanguage(languageKey), + ); + } + proxyContext.writeGraphs.forEach((graph) => { + proxyContext.dataset.add( + quad(subject, predicate, languageAppliedObject, graph), + ); + }); + } else { + // Delete any triples if the id is the same + if (!visitedObjects.has(object) && !isSubjectProxy(value)) { + dataset.deleteMatches(object, undefined, undefined); + } + proxyContext.writeGraphs.forEach((graph) => { + dataset.add(quad(subject, predicate, object, graph)); + }); + if (!isSubjectProxy(value)) { + const updateData: RawObject = ( + typeof value === "object" + ? { ...value, "@id": object } + : { "@id": object } + ) as RawObject; + addRawObjectToDatasetRecursive( + updateData, + visitedObjects, + shouldDeleteOldTriples, + proxyContext, + ); + } + } +} + +export function addRawObjectToDatasetRecursive( + item: RawObject, + visitedObjects: NodeSet, + shouldDeleteOldTriples: boolean, + proxyContext: ProxyContext, +): SubjectProxy { + if (isSubjectProxy(item)) { + return item as SubjectProxy; + } + const { dataset } = proxyContext; + const subject = getNodeFromRawObject(item, proxyContext.contextUtil); + if (visitedObjects.has(subject)) { + return proxyContext.createSubjectProxy(subject); + } + visitedObjects.add(subject); + Object.entries(item).forEach(([key, value]) => { + if (key === "@id") { + return; + } + const predicate = namedNode(proxyContext.contextUtil.keyToIri(key)); + if (shouldDeleteOldTriples) { + if (proxyContext.contextUtil.isLangString(key)) { + const languageKey = getLanguageKeyForWriteOperation( + proxyContext.languageOrdering, + ); + if (languageKey) { + languageDeleteMatch(dataset, subject, predicate, languageKey); + } + } else { + dataset.deleteMatches(subject, predicate); + } + } + if (Array.isArray(value)) { + value.forEach((valueItem) => { + addRawValueToDatasetRecursive( + subject, + key, + valueItem, + visitedObjects, + true, + proxyContext, + ); + }); + } else { + addRawValueToDatasetRecursive( + subject, + key, + value as RawValue, + visitedObjects, + true, + proxyContext, + ); + } + }); + return proxyContext.createSubjectProxy(subject); +} + +export function addObjectToDataset( + item: RawObject, + shouldDeleteOldTriples: boolean, + proxyContext: ProxyContext, +): SubjectProxy { + return addRawObjectToDatasetRecursive( + item, + new NodeSet(), + shouldDeleteOldTriples, + proxyContext, + ); +} diff --git a/packages/jsonld-dataset-proxy/src/util/createInteractOptions.ts b/packages/jsonld-dataset-proxy/src/util/createInteractOptions.ts new file mode 100644 index 0000000..7821695 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/util/createInteractOptions.ts @@ -0,0 +1,55 @@ +import { getSubjectProxyFromObject } from "../subjectProxy/isSubjectProxy"; +import type { ObjectLike } from "../types"; +import { _getUnderlyingNode, _proxyContext } from "../types"; +import { getProxyFromObject } from "./isProxy"; + +export interface InteractOptions { + /** + * Given a dataset proxy, this will set the action on that dataset proxy + * @param objects Any number of dataset proxies + * @returns An end function. Call this if to reset the interaction + */ + using(...objects: ObjectLike[]): () => void; + /** + * Given a dataset proxy this will copy the dataset proxy and set the action + * on the copy + * @param objects Any number of dataset proxies + * @returns cloned dataset proxies + */ + usingCopy(...objects: T[]): T[]; +} + +export function createInteractOptions( + paramKey: string, + parameter: unknown, +): InteractOptions { + return { + using(...objects: ObjectLike[]): () => void { + const onEndFunctions: (() => void)[] = []; + objects.forEach((object) => { + const proxy = getProxyFromObject(object); + const oldProxyContext = proxy[_proxyContext]; + proxy[_proxyContext] = proxy[_proxyContext].duplicate({ + [paramKey]: parameter, + }); + onEndFunctions.push(() => { + proxy[_proxyContext] = oldProxyContext; + }); + }); + return function endWrite() { + onEndFunctions.forEach((func) => func()); + }; + }, + usingCopy(...objects: T[]): T[] { + return objects.map((object) => { + const proxy = getSubjectProxyFromObject(object); + const newProxyContext = proxy[_proxyContext].duplicate({ + [paramKey]: parameter, + }); + return newProxyContext.createSubjectProxy( + proxy[_getUnderlyingNode], + ) as unknown as T; + }); + }, + }; +} diff --git a/packages/jsonld-dataset-proxy/src/util/getNodeFromRaw.ts b/packages/jsonld-dataset-proxy/src/util/getNodeFromRaw.ts new file mode 100644 index 0000000..4ea9a73 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/util/getNodeFromRaw.ts @@ -0,0 +1,45 @@ +import type { BlankNode, Literal, NamedNode } from "@rdfjs/types"; +import { namedNode, literal, blankNode } from "@rdfjs/data-model"; +import type { ContextUtil } from "../ContextUtil"; +import { _getUnderlyingNode } from "../types"; +import type { RawObject, RawValue } from "./RawObject"; +import type { ProxyContext } from "../ProxyContext"; + +export function getNodeFromRawObject( + item: RawObject, + contextUtil: ContextUtil, +): NamedNode | BlankNode { + if (item[_getUnderlyingNode]) { + return item[_getUnderlyingNode] as NamedNode | BlankNode; + } else if (!item["@id"]) { + return blankNode(); + } else if (typeof item["@id"] === "string") { + return namedNode(contextUtil.keyToIri(item["@id"])); + } else { + return item["@id"]; + } +} + +export function getNodeFromRawValue( + key: string, + value: RawValue, + proxyContext: ProxyContext, +): BlankNode | NamedNode | Literal | undefined { + // Get the Object Node + if (value == undefined) { + return undefined; + } else if ( + typeof value === "string" || + typeof value === "boolean" || + typeof value === "number" + ) { + const datatype = proxyContext.contextUtil.getType(key); + if (datatype === "@id") { + return namedNode(value.toString()); + } else { + return literal(value.toString(), datatype); + } + } else { + return getNodeFromRawObject(value, proxyContext.contextUtil); + } +} diff --git a/packages/jsonld-dataset-proxy/src/util/isProxy.ts b/packages/jsonld-dataset-proxy/src/util/isProxy.ts new file mode 100644 index 0000000..776cbcb --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/util/isProxy.ts @@ -0,0 +1,20 @@ +import type { ArrayProxy } from "../arrayProxy/ArrayProxy"; +import { isArrayProxy } from "../arrayProxy/isArrayProxy"; +import { isSubjectProxy } from "../subjectProxy/isSubjectProxy"; +import type { SubjectProxy } from "../subjectProxy/SubjectProxy"; +import type { ObjectLike } from "../types"; + +export function isProxy( + someObject?: unknown, +): someObject is ArrayProxy | SubjectProxy { + return isSubjectProxy(someObject) || isArrayProxy(someObject); +} + +export function getProxyFromObject( + object: ObjectLike | ObjectLike[], +): SubjectProxy | ArrayProxy { + if (!isProxy(object)) { + throw new Error(`${object} is not a Jsonld Dataset Proxy`); + } + return object; +} diff --git a/packages/jsonld-dataset-proxy/src/util/nodeToJsonldRepresentation.ts b/packages/jsonld-dataset-proxy/src/util/nodeToJsonldRepresentation.ts new file mode 100644 index 0000000..d1dcd08 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/util/nodeToJsonldRepresentation.ts @@ -0,0 +1,76 @@ +import type { Literal, Quad_Object } from "@rdfjs/types"; +import type { ProxyContext } from "../ProxyContext"; +import type { SubjectProxy } from "../subjectProxy/SubjectProxy"; + +export type ObjectJsonRepresentation = string | number | boolean | SubjectProxy; + +export function literalToJsonldRepresentation(literal: Literal) { + switch (literal.datatype.value) { + case "http://www.w3.org/2001/XMLSchema#string": + case "http://www.w3.org/2001/XMLSchema#ENTITIES": + case "http://www.w3.org/2001/XMLSchema#ENTITY": + case "http://www.w3.org/2001/XMLSchema#ID": + case "http://www.w3.org/2001/XMLSchema#IDREF": + case "http://www.w3.org/2001/XMLSchema#IDREFS": + case "http://www.w3.org/2001/XMLSchema#language": + case "http://www.w3.org/2001/XMLSchema#Name": + case "http://www.w3.org/2001/XMLSchema#NCName": + case "http://www.w3.org/2001/XMLSchema#NMTOKEN": + case "http://www.w3.org/2001/XMLSchema#NMTOKENS": + case "http://www.w3.org/2001/XMLSchema#normalizedString": + case "http://www.w3.org/2001/XMLSchema#QName": + case "http://www.w3.org/2001/XMLSchema#token": + return literal.value; + case "http://www.w3.org/2001/XMLSchema#date": + case "http://www.w3.org/2001/XMLSchema#dateTime": + case "http://www.w3.org/2001/XMLSchema#duration": + case "http://www.w3.org/2001/XMLSchema#gDay": + case "http://www.w3.org/2001/XMLSchema#gMonth": + case "http://www.w3.org/2001/XMLSchema#gMonthDay": + case "http://www.w3.org/2001/XMLSchema#gYear": + case "http://www.w3.org/2001/XMLSchema#gYearMonth": + case "http://www.w3.org/2001/XMLSchema#time": + return literal.value; + case "http://www.w3.org/2001/XMLSchema#integer": + case "http://www.w3.org/2001/XMLSchema#byte": + case "http://www.w3.org/2001/XMLSchema#decimal": + case "http://www.w3.org/2001/XMLSchema#int": + case "http://www.w3.org/2001/XMLSchema#long": + case "http://www.w3.org/2001/XMLSchema#negativeInteger": + case "http://www.w3.org/2001/XMLSchema#nonNegativeInteger": + case "http://www.w3.org/2001/XMLSchema#nonPositiveInteger": + case "http://www.w3.org/2001/XMLSchema#positiveInteger": + case "http://www.w3.org/2001/XMLSchema#short": + case "http://www.w3.org/2001/XMLSchema#unsignedLong": + case "http://www.w3.org/2001/XMLSchema#unsignedInt": + case "http://www.w3.org/2001/XMLSchema#unsignedShort": + case "http://www.w3.org/2001/XMLSchema#unsignedByte": + return parseFloat(literal.value); + case "http://www.w3.org/2001/XMLSchema#boolean": + return literal.value === "true"; + case "http://www.w3.org/2001/XMLSchema#hexBinary": + return literal.value; + case "http://www.w3.org/2001/XMLSchema#anyURI": + return literal.value; + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#HTML": + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#PlainLiteral": + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#XMLLiteral": + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#JSON": + return literal.value; + default: + return literal.value; + } +} + +export function nodeToJsonldRepresentation( + node: Quad_Object, + proxyContext: ProxyContext, +): string | number | boolean | SubjectProxy { + if (node.termType === "Literal") { + return literalToJsonldRepresentation(node); + } else if (node.termType === "NamedNode" || node.termType === "BlankNode") { + return proxyContext.createSubjectProxy(node); + } else { + throw new Error("Can only convert NamedNodes or Literals or BlankNodes"); + } +} diff --git a/packages/jsonld-dataset-proxy/src/write.ts b/packages/jsonld-dataset-proxy/src/write.ts new file mode 100644 index 0000000..b0d90d4 --- /dev/null +++ b/packages/jsonld-dataset-proxy/src/write.ts @@ -0,0 +1,12 @@ +import type { GraphType } from "./types"; +import type { InteractOptions } from "./util/createInteractOptions"; +import { createInteractOptions } from "./util/createInteractOptions"; + +/** + * Set the graphs that should be written to + * @param graphs The graphs that should be written to + * @returns a write builder + */ +export function write(...graphs: GraphType[]): InteractOptions { + return createInteractOptions("writeGraphs", graphs); +} diff --git a/packages/jsonld-dataset-proxy/test/ContextUtil.test.ts b/packages/jsonld-dataset-proxy/test/ContextUtil.test.ts new file mode 100644 index 0000000..eafff63 --- /dev/null +++ b/packages/jsonld-dataset-proxy/test/ContextUtil.test.ts @@ -0,0 +1,39 @@ +import { ContextUtil } from "../src/ContextUtil"; + +describe("ContextUtil", () => { + describe("keyToIri and iriToKey", () => { + it("handles a context that is simply a string map", () => { + const fakeContext = { + name: "http://hl7.org/fhir/name", + }; + const contextUtil = new ContextUtil(fakeContext); + expect(contextUtil.keyToIri("name")).toBe("http://hl7.org/fhir/name"); + }); + + it("returns the given key if it is not in the context", () => { + const contextUtil = new ContextUtil({}); + expect(contextUtil.keyToIri("name")).toBe("name"); + expect(contextUtil.iriToKey("http://hl7.org/fhir/name")).toBe( + "http://hl7.org/fhir/name", + ); + }); + + it("handles a context that existsm, but does not have an id", () => { + const contextUtil = new ContextUtil({ + name: { "@type": "http://www.w3.org/2001/XMLSchema#string" }, + }); + expect(contextUtil.keyToIri("name")).toBe("name"); + }); + }); + + describe("getType", () => { + it("returns xsd:string if no type is provided", () => { + const contextUtil = new ContextUtil({ + name: { "@id": "http://hl7.org/fhir/name" }, + }); + expect(contextUtil.getType("name")).toBe( + "http://www.w3.org/2001/XMLSchema#string", + ); + }); + }); +}); diff --git a/packages/jsonld-dataset-proxy/test/isProxy.test.ts b/packages/jsonld-dataset-proxy/test/isProxy.test.ts new file mode 100644 index 0000000..133a2a0 --- /dev/null +++ b/packages/jsonld-dataset-proxy/test/isProxy.test.ts @@ -0,0 +1,36 @@ +import { + getProxyFromObject, + getSubjectProxyFromObject, + isArrayProxy, + isSubjectProxy, +} from "../src"; + +describe("isSubjectProxy", () => { + it("returns false if undefined is passed as a parameter", () => { + expect(isSubjectProxy(undefined)).toBe(false); + }); + + it("throws an error if the given object isn't a subject proxy", () => { + expect(() => getSubjectProxyFromObject({ cool: "bean" })).toThrowError( + `[object Object] is not a Jsonld Dataset Proxy`, + ); + }); +}); + +describe("isProxy", () => { + it("throws an error if the given object isn't a proxy", () => { + expect(() => getProxyFromObject({ cool: "bean" })).toThrowError( + `[object Object] is not a Jsonld Dataset Proxy`, + ); + }); +}); + +describe("isArrayProxy", () => { + it("returns false if undefined is passed as a parameter", () => { + expect(isArrayProxy(undefined)).toBe(false); + }); + + it("returns false if string is passed as a parameter", () => { + expect(isArrayProxy("hello")).toBe(false); + }); +}); diff --git a/packages/jsonld-dataset-proxy/test/jsonldDatasetProxy.test.ts b/packages/jsonld-dataset-proxy/test/jsonldDatasetProxy.test.ts new file mode 100644 index 0000000..aacb30e --- /dev/null +++ b/packages/jsonld-dataset-proxy/test/jsonldDatasetProxy.test.ts @@ -0,0 +1,1659 @@ +import { createDataset, serializedToDataset } from "o-dataset-pack"; +import type { JsonldDatasetProxyBuilder, LanguageSet } from "../src"; +import { + graphOf, + jsonldDatasetProxy, + languagesOf, + setLanguagePreferences, + write, + _getNodeAtIndex, + _getUnderlyingArrayTarget, + _getUnderlyingDataset, + _getUnderlyingMatch, + _getUnderlyingNode, + _isSubjectOriented, + _proxyContext, + _writeGraphs, +} from "../src"; +import type { ObservationShape, PatientShape } from "./patientExampleData"; +import { + patientData, + patientContext, + tinyPatientData, + tinyArrayPatientData, + patientDataWithBlankNodes, + tinyPatientDataWithBlankNodes, + tinyPatientDataWithLanguageTags, +} from "./patientExampleData"; +import { namedNode, quad, literal, defaultGraph } from "@rdfjs/data-model"; +import type { Dataset, NamedNode } from "@rdfjs/types"; +import type { ContextDefinition } from "jsonld"; + +describe("jsonldDatasetProxy", () => { + async function getLoadedDataset(): Promise< + [Dataset, ObservationShape, JsonldDatasetProxyBuilder] + > { + const dataset = await serializedToDataset(patientData); + const builder = await jsonldDatasetProxy(dataset, patientContext); + return [ + dataset, + builder.fromSubject(namedNode("http://example.com/Observation1")), + builder, + ]; + } + + async function getLoadedDatasetWithBlankNodes(): Promise< + [Dataset, ObservationShape, JsonldDatasetProxyBuilder] + > { + const dataset = await serializedToDataset(patientDataWithBlankNodes); + const builder = await jsonldDatasetProxy(dataset, patientContext); + return [ + dataset, + builder.fromSubject(namedNode("http://example.com/Observation1")), + builder, + ]; + } + + async function getTinyLoadedDataset(): Promise< + [Dataset, ObservationShape, JsonldDatasetProxyBuilder] + > { + const dataset = await serializedToDataset(tinyPatientData); + const builder = await jsonldDatasetProxy(dataset, patientContext); + return [ + dataset, + builder.fromSubject(namedNode("http://example.com/Observation1")), + builder, + ]; + } + + async function getGraphLoadedDataset(): Promise< + [Dataset, ObservationShape, JsonldDatasetProxyBuilder] + > { + const tempDataset = await serializedToDataset(patientData); + const dataset = createDataset(); + const subjectGraphMap: Record = { + "http://example.com/Observation1": namedNode( + "http://example.com/Observation1Doc", + ), + "http://example.com/Patient1": namedNode( + "http://example.com/Patient1Doc", + ), + "http://example.com/Patient2": namedNode( + "http://example.com/Patient2Doc", + ), + "http://example.com/Patient3": namedNode( + "http://example.com/Patient3Doc", + ), + }; + tempDataset.forEach((tempQuad) => { + dataset.add( + quad( + tempQuad.subject, + tempQuad.predicate, + tempQuad.object, + subjectGraphMap[tempQuad.subject.value], + ), + ); + }); + const builder = await jsonldDatasetProxy(dataset, patientContext); + return [ + dataset, + builder.fromSubject(namedNode("http://example.com/Observation1")), + builder, + ]; + } + + async function getTinyLoadedDatasetWithBlankNodes(): Promise< + [Dataset, ObservationShape, JsonldDatasetProxyBuilder] + > { + const dataset = await serializedToDataset(tinyPatientDataWithBlankNodes); + const builder = await jsonldDatasetProxy(dataset, patientContext); + return [ + dataset, + builder.fromSubject(namedNode("http://example.com/Observation1")), + builder, + ]; + } + + async function getTinyLoadedDatasetWithLanguageTags(): Promise< + [Dataset, ObservationShape, JsonldDatasetProxyBuilder] + > { + const dataset = await serializedToDataset(tinyPatientDataWithLanguageTags); + const builder = await jsonldDatasetProxy(dataset, patientContext); + return [ + dataset, + builder.fromSubject(namedNode("http://example.com/Observation1")), + builder, + ]; + } + + async function getArrayLoadedDataset(): Promise< + [Dataset, PatientShape, JsonldDatasetProxyBuilder] + > { + const dataset = await serializedToDataset(tinyArrayPatientData); + const builder = await jsonldDatasetProxy(dataset, patientContext); + return [ + dataset, + builder.fromSubject(namedNode("http://example.com/Patient1")), + builder, + ]; + } + + async function getEmptyObservationDataset(): Promise< + [Dataset, ObservationShape, JsonldDatasetProxyBuilder] + > { + const dataset = await createDataset(); + const builder = await jsonldDatasetProxy(dataset, patientContext); + return [ + dataset, + builder.fromSubject(namedNode("http://example.com/Observation1")), + builder, + ]; + } + + async function getEmptyPatientDataset(): Promise< + [Dataset, PatientShape, JsonldDatasetProxyBuilder] + > { + const dataset = await createDataset(); + const builder = await jsonldDatasetProxy(dataset, patientContext); + return [ + dataset, + builder.fromSubject(namedNode("http://example.com/Patient1")), + builder, + ]; + } + + describe("read", () => { + it("retreives a primitive", async () => { + const [, observation] = await getLoadedDataset(); + expect(observation["@id"]).toBe("http://example.com/Observation1"); + expect(observation.notes).toBe("Cool Notes"); + }); + + it("retreives a primitive with blank nodes", async () => { + const [, observation] = await getLoadedDatasetWithBlankNodes(); + expect(observation.subject?.age).toBe(35); + }); + + it("retrieves a nested primitive", async () => { + const [, observation] = await getLoadedDataset(); + expect(observation?.subject && observation.subject["@id"]).toBe( + "http://example.com/Patient1", + ); + expect(observation?.subject?.age).toBe(35); + expect(observation?.subject?.birthdate).toBe("1986-01-01"); + expect(observation?.subject?.isHappy).toBe(true); + }); + + it("retrieves a nested primitive with a blank node", async () => { + const [, observation] = await getLoadedDatasetWithBlankNodes(); + expect(observation?.subject?.roommate?.[0].age).toBe(34); + }); + + it("retreives a @type value as rdf:type", async () => { + const [, observation] = await getLoadedDataset(); + expect(observation.subject?.type?.["@id"]).toBe("Patient"); + }); + + it("simulates the getter behavior of an array of primitives", async () => { + const [, observation] = await getLoadedDataset(); + const arr = observation?.subject?.name as string[]; + expect(Array.isArray(arr)).toBe(true); + expect(arr.length).toBe(3); + expect(arr[0]).toBe("Garrett"); + expect(arr[1]).toBe("Bobby"); + expect(arr[2]).toBe("Ferguson"); + expect(arr.at(0)).toBe("Garrett"); + expect(arr.at(-1)).toBe("Ferguson"); + expect(arr.concat(["Mimoey"])).toEqual([ + "Garrett", + "Bobby", + "Ferguson", + "Mimoey", + ]); + const entriesIterator = arr.entries(); + expect(entriesIterator.next()).toEqual({ + value: [0, "Garrett"], + done: false, + }); + expect(entriesIterator.next()).toEqual({ + value: [1, "Bobby"], + done: false, + }); + expect(entriesIterator.next()).toEqual({ + value: [2, "Ferguson"], + done: false, + }); + expect(entriesIterator.next()).toEqual({ + value: undefined, + done: true, + }); + expect(arr.every((val) => val.length > 2)).toBe(true); + expect(arr.every((val) => val.length > 6)).toBe(false); + expect(arr.filter((val) => val.length > 6)).toEqual([ + "Garrett", + "Ferguson", + ]); + expect(arr.find((val) => val.length < 6)).toBe("Bobby"); + expect(arr.findIndex((val) => val.length < 6)).toBe(1); + // arr.flat (Not included because there should never be nested arrays) + let concatTest = ""; + arr.forEach((value) => (concatTest += value)); + expect(concatTest).toBe("GarrettBobbyFerguson"); + expect(arr.includes("Bobby")).toBe(true); + expect(arr.indexOf("Bobby")).toBe(1); + expect(arr.join("-")).toBe("Garrett-Bobby-Ferguson"); + const keysIterator = arr.keys(); + expect(keysIterator.next()).toEqual({ + value: 0, + done: false, + }); + expect(keysIterator.next()).toEqual({ + value: 1, + done: false, + }); + expect(keysIterator.next()).toEqual({ + value: 2, + done: false, + }); + expect(keysIterator.next()).toEqual({ + value: undefined, + done: true, + }); + expect(arr.lastIndexOf("Bobby")).toBe(1); + expect(arr.map((val) => val.toUpperCase())).toEqual([ + "GARRETT", + "BOBBY", + "FERGUSON", + ]); + expect(arr.reduce((agg, val) => agg + val, "")).toBe( + "GarrettBobbyFerguson", + ); + expect(arr.slice(2)).toEqual(["Ferguson"]); + expect(arr.some((val) => val.startsWith("G"))).toBe(true); + expect(arr.toString()).toBe("Garrett,Bobby,Ferguson"); + const valuesIterator = arr.values(); + expect(valuesIterator.next()).toEqual({ + value: "Garrett", + done: false, + }); + expect(valuesIterator.next()).toEqual({ + value: "Bobby", + done: false, + }); + expect(valuesIterator.next()).toEqual({ + value: "Ferguson", + done: false, + }); + expect(valuesIterator.next()).toEqual({ + value: undefined, + done: true, + }); + expect(JSON.stringify(arr)).toBe(`["Garrett","Bobby","Ferguson"]`); + expect(arr.toString()).toBe("Garrett,Bobby,Ferguson"); + }); + + it("can traverse a circular graph", async () => { + const [, observation] = await getLoadedDataset(); + expect(observation.subject?.roommate?.[0].roommate?.[0]?.name?.[0]).toBe( + "Garrett", + ); + }); + + it("simulates getter object properties", async () => { + const [, observation] = await getLoadedDataset(); + const obj = observation.subject as PatientShape; + + expect(obj["@id"]).toEqual("http://example.com/Patient1"); + expect(obj.type).toEqual({ "@id": "Patient" }); + expect(obj.name).toEqual(["Garrett", "Bobby", "Ferguson"]); + expect(obj.birthdate).toEqual("1986-01-01"); + expect(obj.age).toEqual(35); + expect(obj.isHappy).toEqual(true); + const entries = Object.entries(obj); + expect(entries[0]).toEqual(["@id", "http://example.com/Patient1"]); + expect(entries[1]).toEqual(["type", { "@id": "Patient" }]); + expect(entries[2]).toEqual(["name", ["Garrett", "Bobby", "Ferguson"]]); + expect(entries[3]).toEqual(["birthdate", "1986-01-01"]); + expect(entries[4]).toEqual(["age", 35]); + expect(entries[5]).toEqual(["isHappy", true]); + expect(entries[6][0]).toEqual("roommate"); + expect(Object.keys(obj)).toEqual([ + "@id", + "type", + "name", + "birthdate", + "age", + "isHappy", + "roommate", + ]); + const values = Object.values(obj); + expect(values[0]).toEqual("http://example.com/Patient1"); + expect(values[1]).toEqual({ "@id": "Patient" }); + expect(values[2]).toEqual(["Garrett", "Bobby", "Ferguson"]); + expect(values[3]).toEqual("1986-01-01"); + expect(values[4]).toEqual(35); + expect(values[5]).toEqual(true); + }); + + it("handles stringification of a non circular object", async () => { + const [, observation] = await getLoadedDataset(); + const obj = observation.subject?.roommate?.[1] as PatientShape; + expect(obj.toString()).toBe("[object Object]"); + expect(JSON.stringify(obj)).toBe( + `{"@id":"http://example.com/Patient3","type":{"@id":"Patient"},"name":["Amy"],"birthdate":"1988-01-01","age":33,"isHappy":true}`, + ); + }); + + it("Returns an array for required array fields even if no data is in the dataset", async () => { + const [, observation] = await getLoadedDataset(); + const obj = observation.subject?.roommate?.[1] as PatientShape; + expect(obj.roommate).toEqual([]); + }); + + it("updates when the dataset is updated", async () => { + const [dataset, observation] = await getLoadedDataset(); + expect(observation.notes).toBe("Cool Notes"); + dataset.delete( + quad( + namedNode("http://example.com/Observation1"), + namedNode("http://hl7.org/fhir/notes"), + literal("Cool Notes", "http://www.w3.org/2001/XMLSchema#string"), + ), + ); + dataset.add( + quad( + namedNode("http://example.com/Observation1"), + namedNode("http://hl7.org/fhir/notes"), + literal("Bad Notes", "http://www.w3.org/2001/XMLSchema#string"), + ), + ); + expect(observation.notes).toBe("Bad Notes"); + }); + + it("handles stringfication of a circular object", async () => { + const [, observation] = await getLoadedDataset(); + const obj = observation.subject as PatientShape; + expect(obj.toString()).toBe("[object Object]"); + + expect(() => JSON.stringify(obj)).toThrow( + "Converting circular structure to JSON", + ); + }); + + it("returns undefined if called with an unrecognized symbol", async () => { + const [, observation] = await getLoadedDataset(); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + expect(observation[Symbol.toPrimitive]).toBe(undefined); + }); + + it("returns an array object if multiple triples exist, even if @container is not @set", async () => { + const dataset = await serializedToDataset(patientData); + const fakePatientSContext: ContextDefinition = { + name: { + "@id": "http://hl7.org/fhir/name", + "@type": "http://www.w3.org/2001/XMLSchema#string", + }, + }; + const builder = jsonldDatasetProxy(dataset, fakePatientSContext); + const patient = builder.fromSubject( + namedNode("http://example.com/Patient1"), + ); + expect(patient.name).toEqual(["Garrett", "Bobby", "Ferguson"]); + }); + + it("returns context when the @context key is called", async () => { + const [, observation] = await getLoadedDataset(); + expect(observation["@context"]).toEqual(patientContext); + }); + }); + + describe("write", () => { + it("sets a primitive value that doesn't exist yet", async () => { + const [dataset, observation] = await getEmptyObservationDataset(); + observation.notes = "Cool Notes"; + expect(dataset.toString()).toBe( + ' "Cool Notes" .\n', + ); + }); + + it("sets primitive number and boolean values", async () => { + const [dataset, patient] = await getEmptyPatientDataset(); + patient.age = 35; + patient.isHappy = true; + expect(dataset.toString()).toBe( + ' "35"^^ .\n "true"^^ .\n', + ); + }); + + it("sets a @type value as rdf:type", async () => { + const [dataset, patient] = await getEmptyPatientDataset(); + patient.type = { "@id": "Patient" }; + expect(dataset.toString()).toBe( + " .\n", + ); + }); + + it("replaces a primitive value that currently exists", async () => { + const [dataset, observation] = await getEmptyObservationDataset(); + dataset.add( + quad( + namedNode("http://example.com/Observation1"), + namedNode("http://hl7.org/fhir/notes"), + literal("Cool Notes"), + ), + ); + observation.notes = "Lame Notes"; + expect(dataset.toString()).toBe( + ' "Lame Notes" .\n', + ); + }); + + it("adds all quads from a set object", async () => { + const [dataset, observation] = await getEmptyObservationDataset(); + const patient: PatientShape = { + "@id": "http://example.com/Patient1", + birthdate: "2001-01-01", + }; + observation.subject = patient; + expect(dataset.toString()).toBe( + ' .\n "2001-01-01"^^ .\n', + ); + }); + + it("sets a retrieved blank node object", async () => { + const [, observation] = await getTinyLoadedDatasetWithBlankNodes(); + const patient2 = observation.subject?.roommate?.[0] as PatientShape; + observation.subject = patient2; + expect(observation.subject.name).toEqual(["Rob"]); + expect(observation.subject.roommate?.[0]?.name).toEqual(["Garrett"]); + expect(observation.subject.roommate?.[0]?.roommate?.[0].name).toEqual([ + "Rob", + ]); + }); + + it("only removes the connection when a value is set to undefined", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + observation.subject = undefined; + expect(dataset.toString()).toBe( + ' "Garrett" .\n .\n "Rob" .\n .\n', + ); + }); + + it("Creates a blank node if the id is blank during set", async () => { + const [dataset, observation] = await getEmptyObservationDataset(); + observation.subject = { name: ["Joe"] }; + expect(observation.subject?.["@id"]).toBeUndefined(); + expect(observation.subject.name).toEqual(["Joe"]); + expect( + dataset + .match( + namedNode("http://example.com/Observation1"), + namedNode("http://hl7.org/fhir/subject"), + ) + .toArray()[0].object.termType, + ).toBe("BlankNode"); + }); + + it("adds all quads from a set object that includes an array", async () => { + const [dataset, observation] = await getEmptyObservationDataset(); + const patient: PatientShape = { + "@id": "http://example.com/Patient1", + birthdate: "2001-01-01", + name: ["Jon", "Bon", "Jovi"], + }; + observation.subject = patient; + expect(dataset.toString()).toBe( + ' .\n "2001-01-01"^^ .\n "Jon" .\n "Bon" .\n "Jovi" .\n', + ); + }); + + it("does not infinitely recurse if there is a loop when setting an object", async () => { + const [dataset, observation] = await getEmptyObservationDataset(); + const patient1: PatientShape = { + "@id": "http://example.com/Patient1", + name: ["jon"], + }; + const patient2: PatientShape = { + "@id": "http://example.com/patient2", + name: ["jane"], + roommate: [patient1], + }; + patient1.roommate = [patient2]; + observation.subject = patient1; + expect(dataset.toString()).toBe( + ' .\n "jon" .\n .\n "jane" .\n .\n', + ); + }); + + it("adds a proxy object to the array", async () => { + const [, , builder] = await getTinyLoadedDataset(); + const patient3 = builder.fromSubject( + namedNode("http://example.com/Patient3"), + ); + const patient1 = builder.fromSubject( + namedNode("http://example.com/Patient1"), + ); + patient3.roommate.push(patient1); + }); + + it("sets a primitive on an array", async () => { + const [dataset, patient] = await getEmptyPatientDataset(); + (patient.name as string[])[0] = "jon"; + expect(dataset.toString()).toBe( + ' "jon" .\n', + ); + }); + + it("sets a primitive on an array and overwrites one that already is there", async () => { + const [dataset, patient] = await getEmptyPatientDataset(); + dataset.add( + quad( + namedNode("http://example.com/Patient1"), + namedNode("http://hl7.org/fhir/name"), + literal("jon", "http://www.w3.org/2001/XMLSchema#string"), + ), + ); + (patient.name as string[])[0] = "not jon"; + expect(dataset.toString()).toBe( + ' "not jon" .\n', + ); + }); + + it("sets an array", async () => { + const [dataset, patient] = await getEmptyPatientDataset(); + patient.name = ["Joe", "Mama"]; + expect(dataset.toString()).toBe( + ' "Joe" .\n "Mama" .\n', + ); + }); + + it("Does not remove the full object when it is replaced on an object", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + const replacementPatient: PatientShape = { + "@id": "http://example.com/ReplacementPatient", + name: ["Jackson"], + }; + observation.subject = replacementPatient; + expect(dataset.toString()).toBe( + ' .\n "Garrett" .\n .\n "Rob" .\n .\n "Jackson" .\n', + ); + }); + + it("Does not remove the full object when it is replaced on an array", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + const replacementPatient: PatientShape = { + "@id": "http://example.com/ReplacementPatient", + name: ["Jackson"], + }; + const roommateArr = observation?.subject?.roommate as PatientShape[]; + roommateArr[0] = replacementPatient; + expect(dataset.toString()).toBe( + ' .\n "Garrett" .\n .\n "Rob" .\n .\n "Jackson" .\n', + ); + }); + + it("Keeps the correct array index when setting an index", async () => { + const [, observation] = await getLoadedDataset(); + const roommateArr = observation.subject?.roommate as PatientShape[]; + roommateArr[0] = { + "@id": "http://example.com/ReplacementPatient", + name: ["Jackson"], + }; + expect(roommateArr.length).toBe(2); + expect(roommateArr[0].name?.[0]).toBe("Jackson"); + }); + + it("Changes the subject name if the @id is changed", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + const patient = observation?.subject as PatientShape; + patient["@id"] = "http://example.com/RenamedPatient"; + expect(patient["@id"]).toBe("http://example.com/RenamedPatient"); + expect(dataset.toString()).toBe( + ' .\n "Rob" .\n .\n "Garrett" .\n .\n', + ); + }); + + it("Removes all adjoining triples when garbage collection is indicated via the delete operator on an object", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + delete observation.subject; + expect(dataset.toString()).toBe( + ' "Rob" .\n', + ); + }); + + it("Removes all adjoining triples in an array when garbage collection is indicated via the delete operator on an object", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + delete observation.subject?.name; + expect(dataset.toString()).toBe( + ' .\n .\n "Rob" .\n .\n', + ); + }); + + it("Removes all adjoining triples when garbage collection is indicated via the delete operator on an array", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + delete observation.subject?.roommate?.[0]; + expect(dataset.toString()).toBe( + ' .\n "Garrett" .\n', + ); + }); + + it("Removes all adjoining triples when garbage collection is indicated via the delete operator on an array with blank nodes", async () => { + const [dataset, observation] = await getTinyLoadedDatasetWithBlankNodes(); + delete observation.subject?.roommate?.[0]; + expect(dataset.toString()).toBe( + ' _:b25_Patient1 .\n_:b25_Patient1 "Garrett" .\n', + ); + }); + + it("Removes a literal in an array when using the delete operator", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + delete observation.subject?.name?.[0]; + expect(dataset.toString()).toBe( + ' .\n .\n "Rob" .\n .\n', + ); + }); + + it("Deletes itself if @id is deleted", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + delete observation["@id"]; + expect(observation).toEqual({ "@id": "http://example.com/Observation1" }); + expect(dataset.toString()).toBe( + ' "Garrett" .\n .\n "Rob" .\n .\n', + ); + }); + + it("Does nothing when deleting triples that don't exist", async () => { + const [dataset, observation] = await getEmptyObservationDataset(); + delete observation.subject; + expect(dataset.toString()).toBe(""); + }); + + it("Does nothing when deleting context", async () => { + const [, observation] = await getTinyLoadedDataset(); + delete observation["@context"]; + expect(observation["@context"]).toEqual(patientContext); + }); + + it("Does nothing when deleting toString", async () => { + const [, observation] = await getTinyLoadedDataset(); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + delete observation.toString; + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + delete observation[Symbol.toStringTag]; + expect(typeof observation.toString).toBe("function"); + }); + + it("Does nothing when deleting any symbol", async () => { + const [, observation] = await getTinyLoadedDataset(); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + delete observation[Symbol.search]; + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + expect(observation[Symbol.search]).toBe(undefined); + }); + + it("Removes old triples from a node that has the same id as the one it replaced", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + const replacementPatient: PatientShape = { + "@id": "http://example.com/Patient1", + name: ["Mister Sneaky"], + }; + observation.subject = replacementPatient; + expect(dataset.toString()).toBe( + ' .\n "Mister Sneaky" .\n "Rob" .\n .\n', + ); + }); + + it("handles Object.assign", async () => { + const [dataset, observation] = await getTinyLoadedDataset(); + Object.assign(observation, { + age: 35, + isHappy: true, + }); + expect(dataset.toString()).toBe( + ' .\n "35"^^ .\n "true"^^ .\n "Garrett" .\n .\n "Rob" .\n .\n', + ); + }); + + it("Adds elements to the array even if they were modified by the datastore", async () => { + const [dataset, patient] = await getEmptyPatientDataset(); + patient.name = ["Joe", "Blow"]; + dataset.add( + quad( + namedNode("http://example.com/Patient1"), + namedNode("http://hl7.org/fhir/name"), + literal("Tow"), + ), + ); + expect(patient.name).toEqual(["Joe", "Blow", "Tow"]); + }); + + it("Removes elements from the array even if they were modified by the datastore", async () => { + const [dataset, patient] = await getEmptyPatientDataset(); + patient.name = ["Joe", "Blow"]; + dataset.delete( + quad( + namedNode("http://example.com/Patient1"), + namedNode("http://hl7.org/fhir/name"), + literal("Blow"), + ), + ); + expect(patient.name).toEqual(["Joe"]); + }); + + it("Removes and adds from the array even if they were modified by the datastore", async () => { + const [dataset, patient] = await getEmptyPatientDataset(); + patient.name = ["Joe", "Blow"]; + dataset.delete( + quad( + namedNode("http://example.com/Patient1"), + namedNode("http://hl7.org/fhir/name"), + literal("Blow"), + ), + ); + dataset.add( + quad( + namedNode("http://example.com/Patient1"), + namedNode("http://hl7.org/fhir/name"), + literal("Tow"), + ), + ); + expect(patient.name).toEqual(["Joe", "Tow"]); + }); + + it("Prevents duplicates from being added to the array", async () => { + const [, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + arr[3] = "Garrett"; + expect(arr).toEqual(["Garrett", "Bobby", "Ferguson"]); + }); + + it("Prevents duplicates from being added when a value is overwritten", async () => { + const [, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + arr[1] = "Garrett"; + expect(arr).toEqual(["Garrett", "Ferguson"]); + }); + + it("Prevents duplicates for Objects", async () => { + const [, observation] = await getLoadedDataset(); + const roommates = observation.subject?.roommate as PatientShape[]; + roommates[0] = { "@id": "http://example.com/Patient3" }; + expect(roommates.length).toBe(1); + expect(roommates[0].name?.[0]).toBe("Amy"); + }); + + it("Does nothing when you try to set a symbol on an array", async () => { + const [, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + expect(() => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + arr[Symbol.search] = "Cool"; + }).not.toThrowError(); + }); + + it("Does nothing when you try to delete a symbol on an array", async () => { + const [, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + expect(() => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + delete arr[Symbol.search]; + }).not.toThrowError(); + }); + + it("Does nothing when you try to delete an index of the array that doesn't exist", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + delete arr[5]; + expect(arr).toEqual(["Garrett", "Bobby", "Ferguson"]); + expect(dataset.toString()).toEqual( + ' "Garrett" .\n "Bobby" .\n "Ferguson" .\n', + ); + }); + + it("Can set a triple object named node with just a string", async () => { + const [dataset, observation] = await getEmptyObservationDataset(); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + observation.subject = "http://example.com/Patient1"; + expect(observation.subject).toEqual({ + "@id": "http://example.com/Patient1", + }); + expect(dataset.toString()).toBe( + " .\n", + ); + }); + + describe("Array Methods", () => { + it("handles copyWithin", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + arr.copyWithin(0, 2, 3); + expect(arr).toEqual(["Ferguson", "Bobby"]); + expect(dataset.toString()).toEqual( + ' "Bobby" .\n "Ferguson" .\n', + ); + }); + + it("handles copyWithin with the optional end variable missing", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + arr.copyWithin(0, 2); + expect(arr).toEqual(["Ferguson", "Bobby"]); + expect(dataset.toString()).toEqual( + ' "Bobby" .\n "Ferguson" .\n', + ); + }); + + it("handles fill", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + arr.fill("Beepy", 2, 5); + expect(arr).toEqual(["Garrett", "Bobby", "Beepy"]); + expect(dataset.toString()).toEqual( + ' "Garrett" .\n "Bobby" .\n "Beepy" .\n', + ); + }); + + it("handles pop", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + expect(arr.pop()).toBe("Ferguson"); + expect(arr).toEqual(["Garrett", "Bobby"]); + expect(dataset.toString()).toEqual( + ' "Garrett" .\n "Bobby" .\n', + ); + }); + + it("returns undefined for pop on an empty collection", async () => { + const [, patient] = await getArrayLoadedDataset(); + patient.name = []; + expect(patient.name.pop()).toBe(undefined); + }); + + it("handles push", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + arr.push("Beepy"); + expect(arr).toEqual(["Garrett", "Bobby", "Ferguson", "Beepy"]); + expect(dataset.toString()).toEqual( + ' "Garrett" .\n "Bobby" .\n "Ferguson" .\n "Beepy" .\n', + ); + }); + + it("handles reverse", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + patient.name?.reverse(); + expect(patient.name).toEqual(["Ferguson", "Bobby", "Garrett"]); + expect(dataset.toString()).toBe( + ' "Garrett" .\n "Bobby" .\n "Ferguson" .\n', + ); + }); + + it("handles shift", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + expect(arr.shift()).toEqual("Garrett"); + expect(arr).toEqual(["Bobby", "Ferguson"]); + expect(dataset.toString()).toEqual( + ' "Bobby" .\n "Ferguson" .\n', + ); + }); + + it("returns undefined for shift on an empty collection", async () => { + const [, patient] = await getArrayLoadedDataset(); + patient.name = []; + expect(patient.name.shift()).toBe(undefined); + }); + + it("handles sort", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + patient.name?.sort((a, b) => { + return a.length - b.length; + }); + expect(patient.name).toEqual(["Bobby", "Garrett", "Ferguson"]); + expect(dataset.toString()).toBe( + ' "Garrett" .\n "Bobby" .\n "Ferguson" .\n', + ); + }); + + it("handles sort without a sort function", async () => { + const [, patient] = await getArrayLoadedDataset(); + patient.name?.sort(); + expect(patient.name).toEqual(["Bobby", "Ferguson", "Garrett"]); + }); + + it("handles sort without a sort function and there are two equal values", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + dataset.add( + quad( + namedNode("http://example.com/Patient1"), + namedNode("http://hl7.org/fhir/name"), + literal( + "Bobby", + namedNode("http://www.w3.org/2001/XMLSchema#token"), + ), + ), + ); + patient.name?.sort(); + expect(patient.name).toEqual(["Bobby", "Bobby", "Ferguson", "Garrett"]); + }); + + it("handles splice", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + arr.splice(1, 0, "Beepy"); + expect(arr).toEqual(["Garrett", "Beepy", "Bobby", "Ferguson"]); + expect(dataset.toString()).toEqual( + ' "Garrett" .\n "Bobby" .\n "Ferguson" .\n "Beepy" .\n', + ); + }); + + it("handles splice with objects", async () => { + const [, observation] = await getLoadedDataset(); + const roommates = observation.subject?.roommate as PatientShape[]; + roommates.splice( + 0, + 1, + { + "@id": "http://example.com/Patient4", + type: { "@id": "Patient" }, + name: ["Dippy"], + age: 2, + }, + { + "@id": "http://example.com/Patient5", + type: { "@id": "Patient" }, + name: ["Licky"], + age: 3, + }, + ); + expect(roommates[0].name?.[0]).toBe("Dippy"); + expect(roommates[1].name?.[0]).toBe("Licky"); + expect(roommates[2].name?.[0]).toBe("Amy"); + }); + + it("handles splice with only two params", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + arr.splice(1, 1); + expect(arr).toEqual(["Garrett", "Ferguson"]); + expect(dataset.toString()).toEqual( + ' "Garrett" .\n "Ferguson" .\n', + ); + }); + + it("handles unshift", async () => { + const [dataset, patient] = await getArrayLoadedDataset(); + const arr = patient.name as string[]; + arr.unshift("Beepy"); + expect(arr).toEqual(["Beepy", "Garrett", "Bobby", "Ferguson"]); + expect(dataset.toString()).toEqual( + ' "Garrett" .\n "Bobby" .\n "Ferguson" .\n "Beepy" .\n', + ); + }); + }); + }); + + describe("underlying data", () => { + it("retrieves underlying data", async () => { + const dataset = await serializedToDataset(patientData); + const entryNode = namedNode("http://example.com/Observation1"); + const context = patientContext; + const builder = jsonldDatasetProxy(dataset, context); + const observation = builder.fromSubject(entryNode); + expect(observation[_getUnderlyingDataset]).toBe(dataset); + expect(observation[_getUnderlyingNode].value).toBe( + "http://example.com/Observation1", + ); + expect(observation[_writeGraphs][0].termType).toBe("DefaultGraph"); + expect(observation[_proxyContext].writeGraphs[0].termType).toBe( + "DefaultGraph", + ); + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const roommateArr = observation.subject!.roommate!; + expect(roommateArr[_getUnderlyingDataset]).toBe(dataset); + expect(roommateArr[_isSubjectOriented]).toBe(false); + const match = roommateArr[_getUnderlyingMatch]; + expect(match[0].value).toBe("http://example.com/Patient1"); + expect(match[1].value).toBe("http://hl7.org/fhir/roommate"); + expect(roommateArr[_getNodeAtIndex](0).value).toBe( + "http://example.com/Patient2", + ); + expect(roommateArr[_getNodeAtIndex](10)).toBe(undefined); + expect(observation.subject.name[_getNodeAtIndex](0).value).toBe( + "Garrett", + ); + const underlyingArrayTarget = roommateArr[_getUnderlyingArrayTarget]; + expect(underlyingArrayTarget[1][0].value).toBe( + "http://example.com/Patient2", + ); + }); + }); + + describe("matchSubject", () => { + let patients: PatientShape[]; + let dataset: Dataset; + + beforeEach(async () => { + const [receivedDataset, , builder] = await getLoadedDataset(); + dataset = receivedDataset; + patients = builder.matchSubject( + namedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + namedNode("http://hl7.org/fhir/Patient"), + ); + }); + + it("creates a list of subjects that match a certain pattern", async () => { + expect(patients[0].name?.[0]).toBe("Garrett"); + expect(patients[1].name?.[0]).toBe("Rob"); + expect(patients[2].name?.[0]).toBe("Amy"); + }); + + it("Successfully adds a node to the list", async () => { + patients.push({ + "@id": "http://example.com/Patient4", + type: { "@id": "Patient" }, + name: ["Dippy"], + age: 2, + }); + expect( + dataset + .match( + null, + namedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + namedNode("http://hl7.org/fhir/Patient"), + ) + .some((quad) => { + return quad.subject.value === "http://example.com/Patient4"; + }), + ).toBe(true); + expect(patients[3].name?.[0]).toBe("Dippy"); + }); + + it("will read a new object if something has been added to the dataset after object creation", async () => { + dataset.add( + quad( + namedNode("http://example.com/Patient4"), + namedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + namedNode("http://hl7.org/fhir/Patient"), + ), + ); + dataset.add( + quad( + namedNode("http://example.com/Patient4"), + namedNode("http://hl7.org/fhir/name"), + literal("Dippy"), + ), + ); + + expect( + dataset + .match( + null, + namedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + namedNode("http://hl7.org/fhir/Patient"), + ) + .some((quad) => { + return quad.subject.value === "http://example.com/Patient4"; + }), + ).toBe(true); + expect(patients[3].name?.[0]).toBe("Dippy"); + }); + + it("errors if an object is added without the correct parameters", async () => { + expect(() => + patients.push({ + "@id": "http://example.com/Patient4", + name: ["Dippy"], + age: 2, + }), + ).toThrowError( + `Cannot add value to collection. This must contain a quad that matches (null, namedNode(http://www.w3.org/1999/02/22-rdf-syntax-ns#type), namedNode(http://hl7.org/fhir/Patient), null)`, + ); + }); + + it("errors if a literal is added to the collection", async () => { + // @ts-expect-error Purposely pushing an incorrect value to trigger an error + expect(() => patients.push("some string")).toThrowError( + `Cannot add a literal "some string"(string) to a subject-oriented collection.`, + ); + }); + + it("Removes all an object and replaces in upon set", async () => { + patients[0] = { + "@id": "http://example.com/Patient4", + type: { "@id": "Patient" }, + name: ["Dippy"], + age: 2, + }; + + expect(dataset.match(namedNode("http://example.com/Patient1")).size).toBe( + 0, + ); + expect(patients[0].name?.[0]).toBe("Dippy"); + }); + + it("Removes an object and replaces it upon splice", async () => { + patients.splice( + 1, + 1, + { + "@id": "http://example.com/Patient4", + type: { "@id": "Patient" }, + name: ["Dippy"], + age: 2, + }, + { + "@id": "http://example.com/Patient5", + type: { "@id": "Patient" }, + name: ["Licky"], + age: 3, + }, + ); + + expect(dataset.match(namedNode("http://example.com/Patient2")).size).toBe( + 0, + ); + expect(patients[1].name?.[0]).toBe("Dippy"); + expect(patients[2].name?.[0]).toBe("Licky"); + }); + + it("Removes an object completely when assigning it to undefined", async () => { + // @ts-expect-error This violates the typings + patients[0] = undefined; + + expect(dataset.match(namedNode("http://example.com/Patient1")).size).toBe( + 0, + ); + expect(patients[0].name?.[0]).toBe("Rob"); + }); + + it("Removes an object completely when using the delete parameter", async () => { + delete patients[0]; + + expect(dataset.match(namedNode("http://example.com/Patient1")).size).toBe( + 0, + ); + expect(patients[0].name?.[0]).toBe("Rob"); + }); + + it("creates a collection that matches only collections in a certain graph", async () => { + const [, , builder] = await getGraphLoadedDataset(); + patients = builder.matchSubject( + namedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"), + namedNode("http://hl7.org/fhir/Patient"), + namedNode("http://example.com/Patient1Doc"), + ); + expect(patients.length).toBe(1); + expect(patients[0]["@id"]).toBe("http://example.com/Patient1"); + }); + }); + + describe("matchObject", () => { + let patients: PatientShape[]; + let builder: JsonldDatasetProxyBuilder; + + beforeEach(async () => { + const [, , receivedBuilder] = await getLoadedDataset(); + builder = receivedBuilder; + patients = builder.matchObject( + null, + namedNode("http://hl7.org/fhir/roommate"), + null, + ); + }); + + it("create a collection that matches the null, predicate, null pattern", async () => { + expect(patients[0].name?.[0]).toBe("Garrett"); + expect(patients[1].name?.[0]).toBe("Amy"); + expect(patients[2].name?.[0]).toBe("Rob"); + }); + + it("cannot write to a collection that matches the null, predicate, null pattern", () => { + expect( + () => (patients[1] = { "@id": "http://example.com/Patient4" }), + ).toThrow( + "A collection that does not specify a match for both a subject or predicate cannot be modified directly.", + ); + }); + + it("creates a collection that matches the subject, null, null pattern", () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const hodgePodge = builder.matchObject( + namedNode("http://example.com/Patient3"), + null, + null, + ); + expect(hodgePodge.length).toBe(5); + expect(hodgePodge[0]["@id"]).toBe("Patient"); + expect(hodgePodge[1]).toBe("Amy"); + expect(hodgePodge[2]).toBe("1988-01-01"); + expect(hodgePodge[3]).toBe(33); + expect(hodgePodge[4]).toBe(true); + }); + }); + + describe("fromJson", () => { + it("initializes a patient using the fromJSON method", async () => { + const [, , builder] = await getEmptyPatientDataset(); + const patient = builder.fromJson({ + name: ["Jack", "Horner"], + birthdate: "1725/11/03", + age: 298, + roommate: [ + { + name: ["Ethical", "Bug"], + }, + ], + }); + expect(patient.name?.[0]).toBe("Jack"); + expect(patient.name?.[1]).toBe("Horner"); + expect(patient.birthdate).toBe("1725/11/03"); + expect(patient.age).toBe(298); + expect(patient.roommate?.[0].name?.[0]).toBe("Ethical"); + expect(patient.roommate?.[0].name?.[1]).toBe("Bug"); + }); + + it("initializes a patient using the fromJSON method with a named node", async () => { + const [, , builder] = await getEmptyPatientDataset(); + const patient = builder.fromJson({ + "@id": "http://example.com/Patient13", + name: ["Jack", "Horner"], + birthdate: "1725/11/03", + age: 298, + roommate: [ + { + name: ["Ethical", "Bug"], + }, + ], + }); + expect(patient["@id"]).toBe("http://example.com/Patient13"); + expect(patient.name?.[0]).toBe("Jack"); + expect(patient.name?.[1]).toBe("Horner"); + expect(patient.birthdate).toBe("1725/11/03"); + expect(patient.age).toBe(298); + expect(patient.roommate?.[0].name?.[0]).toBe("Ethical"); + expect(patient.roommate?.[0].name?.[1]).toBe("Bug"); + }); + }); + + describe("Graph Methods", () => { + describe("builder", () => { + it("sets write graph", async () => { + const [dataset, , builder] = await getEmptyObservationDataset(); + const patient4 = builder + .write(namedNode("http://example.com/Patient4Doc")) + .fromSubject(namedNode("https://example.com/Patient4")); + patient4.name = ["Jackson"]; + expect(dataset.toString()).toBe( + ' "Jackson" .\n', + ); + }); + }); + + describe("graphOf", () => { + it("detects the graph of a single value", async () => { + const [, observation] = await getGraphLoadedDataset(); + expect(graphOf(observation, "subject")[0].value).toBe( + "http://example.com/Observation1Doc", + ); + expect( + graphOf(observation, "subject", observation.subject)[0].value, + ).toBe("http://example.com/Observation1Doc"); + expect( + graphOf(observation.subject as PatientShape, "age")[0].value, + ).toBe("http://example.com/Patient1Doc"); + }); + + it("detects the graph of an array value", async () => { + const [, observation] = await getGraphLoadedDataset(); + const patient1 = observation.subject as PatientShape; + expect(graphOf(patient1, "name", 0)[0].value).toBe( + "http://example.com/Patient1Doc", + ); + expect(graphOf(patient1, "roommate", 0)[0].value).toBe( + "http://example.com/Patient1Doc", + ); + expect( + graphOf(patient1, "roommate", patient1.roommate?.[1])[0].value, + ).toBe("http://example.com/Patient1Doc"); + }); + + it("detects the graph of a value in multiple graphs", async () => { + const [dataset, observation] = await getGraphLoadedDataset(); + dataset.add( + quad( + namedNode("http://example.com/Observation1"), + namedNode("http://hl7.org/fhir/subject"), + namedNode("http://example.com/Patient1"), + namedNode("http://example.com/SomeOtherDoc"), + ), + ); + expect(graphOf(observation, "subject")[0].value).toBe( + "http://example.com/Observation1Doc", + ); + expect(graphOf(observation, "subject")[1].value).toBe( + "http://example.com/SomeOtherDoc", + ); + }); + + it("throws an error if a number is provided as an object and the object is not an array", async () => { + const [, observation] = await getGraphLoadedDataset(); + // @ts-expect-error this should not be allowed + expect(() => graphOf(observation, "subject", 0)).toThrowError( + `Key "subject" of [object Object] is not an array.`, + ); + }); + + it("throws an error if the index is out of bounds", async () => { + const [, observation] = await getGraphLoadedDataset(); + expect(() => + graphOf(observation.subject as PatientShape, "name", 10), + ).toThrowError(`Index 10 does not exist.`); + }); + }); + + describe("write method", () => { + it("changes the write graph", async () => { + const [, observation] = await getGraphLoadedDataset(); + write(namedNode("http://example.com/SomeOtherDoc")).using(observation); + observation.notes = "Cool Notes"; + expect(graphOf(observation, "notes")[0].value).toBe( + "http://example.com/SomeOtherDoc", + ); + }); + + it("allows the write graph to be reset", async () => { + const doc1 = namedNode("http://example.com/Doc1"); + const doc2 = namedNode("http://example.com/Doc2"); + const doc3 = namedNode("http://example.com/Doc3"); + + const [, patient] = await getEmptyPatientDataset(); + patient.name?.push("default"); + const end1 = write(doc1).using(patient); + patient.name?.push("1"); + const end2 = write(doc2).using(patient); + patient.name?.push("2"); + const end3 = write(doc3).using(patient); + patient.name?.push("3"); + end3(); + patient.name?.push("2 again"); + end2(); + patient.name?.push("1 again"); + end1(); + patient.name?.push("default again"); + + expect(graphOf(patient, "name", 0)[0].value).toBe(defaultGraph().value); + expect(graphOf(patient, "name", 1)[0].value).toBe(doc1.value); + expect(graphOf(patient, "name", 2)[0].value).toBe(doc2.value); + expect(graphOf(patient, "name", 3)[0].value).toBe(doc3.value); + expect(graphOf(patient, "name", 4)[0].value).toBe(doc2.value); + expect(graphOf(patient, "name", 5)[0].value).toBe(doc1.value); + expect(graphOf(patient, "name", 6)[0].value).toBe(defaultGraph().value); + }); + + it("copies the proxy and changes the write graphs without modifying the original", async () => { + const doc1 = namedNode("http://example.com/Doc1"); + + const [, patient] = await getEmptyPatientDataset(); + patient.name?.push("Default"); + const [patientOnDoc1] = write(doc1).usingCopy(patient); + patientOnDoc1.name?.push("Doc1"); + expect(graphOf(patient, "name", 0)[0].value).toBe(defaultGraph().value); + expect(graphOf(patient, "name", 1)[0].value).toBe(doc1.value); + }); + + it("works with array proxies", async () => { + const [, , builder] = await getTinyLoadedDataset(); + const allRoommates = builder.matchObject( + namedNode("http://example.com/Patient1"), + namedNode("http://hl7.org/fhir/roommate"), + ); + write(namedNode("http://example.com/SomeGraph")).using( + allRoommates, + allRoommates, + ); + allRoommates[0].age = 20; + expect(graphOf(allRoommates[0], "age")[0].value).toBe( + "http://example.com/SomeGraph", + ); + }); + }); + }); + + describe("languageTag Support", () => { + it("Retrieves the proper language given the languageOrdering", async () => { + const [, , builder] = await getTinyLoadedDatasetWithLanguageTags(); + + const observation = builder + .setLanguagePreferences("fr", "en") + .fromSubject( + namedNode("http://example.com/Observation1"), + ); + + const patient = observation.subject as PatientShape; + + expect(observation.langNotes).toBe("Notes Sympas"); + expect(patient.langName?.[0]).toBe("Jean"); + + setLanguagePreferences("ru", "zh").using(observation, patient); + + expect(observation.langNotes).toBeUndefined(); + expect(patient.langName?.length).toBe(0); + + setLanguagePreferences("@other", "fr").using(observation, patient); + expect(observation.langNotes).not.toBe("Notes Sympas"); + expect(patient.langName?.[0]).not.toBe("Jean"); + + setLanguagePreferences().using(observation, patient); + expect(observation.langNotes).toBe(undefined); + expect(patient.langName?.length).toBe(0); + }); + + it("sets language strings based on the default language", async () => { + const [, , builder] = await getTinyLoadedDatasetWithLanguageTags(); + const observation = builder + .setLanguagePreferences("fr", "en") + .fromSubject( + namedNode("http://example.com/Observation1"), + ); + observation.langNotes = "quelques notes"; + expect(languagesOf(observation, "langNotes")).toEqual({ + fr: "quelques notes", + "@none": "Cool Notes", + en: "Cooler Notes", + es: "Notas Geniales", + }); + const patient = observation.subject as PatientShape; + patient.langName?.push("Luc"); + expect(languagesOf(patient, "langName").fr?.has("Jean")).toBe(true); + expect(languagesOf(patient, "langName").fr?.has("Luc")).toBe(true); + expect(languagesOf(patient, "langName")["@none"]?.has("Jon")).toBe(true); + expect(languagesOf(patient, "langName").en?.has("John")).toBe(true); + expect(languagesOf(patient, "langName").es?.has("Juan")).toBe(true); + + // Skips other in favor of setting the next language + setLanguagePreferences("@other", "es").using(observation, patient); + observation.langNotes = "algunas notas"; + expect(languagesOf(observation, "langNotes")).toEqual({ + fr: "quelques notes", + "@none": "Cool Notes", + en: "Cooler Notes", + es: "algunas notas", + }); + + // Does not set a language if only other + setLanguagePreferences("@other").using(observation, patient); + observation.langNotes = "Some Notes that will never be written"; + expect(languagesOf(observation, "langNotes")).toEqual({ + fr: "quelques notes", + "@none": "Cool Notes", + en: "Cooler Notes", + es: "algunas notas", + }); + + // Does not set a language if empty + setLanguagePreferences().using(observation, patient); + observation.langNotes = "Some Notes that will never be written"; + expect(languagesOf(observation, "langNotes")).toEqual({ + fr: "quelques notes", + "@none": "Cool Notes", + en: "Cooler Notes", + es: "algunas notas", + }); + + // Sets @none + setLanguagePreferences("@none").using(observation, patient); + observation.langNotes = "Other notes"; + expect(languagesOf(observation, "langNotes")).toEqual({ + fr: "quelques notes", + "@none": "Other notes", + en: "Cooler Notes", + es: "algunas notas", + }); + }); + + it("uses languageOf to make a languageMap", async () => { + const [, observation] = await getTinyLoadedDatasetWithLanguageTags(); + const languageMap = languagesOf(observation, "langNotes"); + expect(languageMap).toEqual({ + "@none": "Cool Notes", + en: "Cooler Notes", + es: "Notas Geniales", + fr: "Notes Sympas", + }); + }); + + it("uses languageOf to set values on a languageMap", async () => { + const [dataset, observation] = + await getTinyLoadedDatasetWithLanguageTags(); + const languageMap = languagesOf(observation, "langNotes"); + languageMap.zh = "很酷的笔记"; + languageMap.fr = "notes plus fraîches"; + expect(languageMap).toEqual({ + "@none": "Cool Notes", + en: "Cooler Notes", + es: "Notas Geniales", + fr: "notes plus fraîches", + zh: "很酷的笔记", + }); + const langNoteQuads = dataset.match( + namedNode("http://example.com/Observation1"), + namedNode("http://hl7.org/fhir/langNotes"), + ); + expect(langNoteQuads.size).toBe(5); + expect( + langNoteQuads.some( + (quad) => + quad.object.termType === "Literal" && + quad.object.language === "fr" && + quad.object.value === "notes plus fraîches", + ), + ).toBe(true); + expect( + langNoteQuads.some( + (quad) => + quad.object.termType === "Literal" && + quad.object.language === "zh" && + quad.object.value === "很酷的笔记", + ), + ).toBe(true); + }); + + it("uses languageOf to delete values on a languageMap", async () => { + const [dataset, observation] = + await getTinyLoadedDatasetWithLanguageTags(); + const languageMap = languagesOf(observation, "langNotes"); + delete languageMap.fr; + expect(languageMap).toEqual({ + "@none": "Cool Notes", + en: "Cooler Notes", + es: "Notas Geniales", + }); + const langNoteQuads = dataset.match( + namedNode("http://example.com/Observation1"), + namedNode("http://hl7.org/fhir/langNotes"), + ); + expect(langNoteQuads.size).toBe(3); + expect( + langNoteQuads.every( + (quad) => + !( + quad.object.termType === "Literal" && + quad.object.language === "fr" + ), + ), + ).toBe(true); + }); + + it("executes the methods of the LanguageSet", async () => { + const [dataset, observation] = + await getTinyLoadedDatasetWithLanguageTags(); + + const subject = namedNode("http://example.com/Patient1"); + const predicate = namedNode("http://hl7.org/fhir/langName"); + + const patient = observation.subject as PatientShape; + + const enSet = languagesOf(patient, "langName").en as LanguageSet; + + expect(enSet.size).toBe(1); + + enSet.add("Doe"); + expect(enSet.size).toBe(2); + expect(enSet.has("Doe")).toBe(true); + expect(dataset.has(quad(subject, predicate, literal("Doe", "en")))).toBe( + true, + ); + + const callbackMock = jest.fn(); + enSet.forEach(callbackMock); + expect(callbackMock).toHaveBeenCalledTimes(2); + expect(callbackMock).toHaveBeenCalledWith("John", "John", enSet); + + const entries = enSet.entries(); + const entriesVal1 = entries.next(); + const entriesVal2 = entries.next(); + const entriesVal3 = entries.next(); + expect(entriesVal1.value).toEqual(["John", "John"]); + expect(entriesVal2.value).toEqual(["Doe", "Doe"]); + expect(entriesVal3.done).toBe(true); + + const keys = enSet.keys(); + const keysVal1 = keys.next(); + const keysVal2 = keys.next(); + const keysVal3 = keys.next(); + expect(keysVal1.value).toBe("John"); + expect(keysVal2.value).toBe("Doe"); + expect(keysVal3.done).toBe(true); + + const values = enSet.values(); + const valuesVal1 = values.next(); + const valuesVal2 = values.next(); + const valuesVal3 = values.next(); + expect(valuesVal1.value).toBe("John"); + expect(valuesVal2.value).toBe("Doe"); + expect(valuesVal3.done).toBe(true); + + enSet.delete("John"); + expect(enSet.size).toBe(1); + expect(enSet.has("John")).toBe(false); + expect(dataset.has(quad(subject, predicate, literal("John", "en")))).toBe( + false, + ); + + enSet.clear(); + expect(enSet.size).toBe(0); + }); + }); +}); diff --git a/packages/jsonld-dataset-proxy/test/nodeToJsonRepresentation.test.ts b/packages/jsonld-dataset-proxy/test/nodeToJsonRepresentation.test.ts new file mode 100644 index 0000000..46707f1 --- /dev/null +++ b/packages/jsonld-dataset-proxy/test/nodeToJsonRepresentation.test.ts @@ -0,0 +1,63 @@ +import { createDataset } from "o-dataset-pack"; +import { ContextUtil } from "../src/ContextUtil"; +import { nodeToJsonldRepresentation } from "../src/util/nodeToJsonldRepresentation"; +import { literal, defaultGraph } from "@rdfjs/data-model"; +import { ProxyContext } from "../src"; + +describe("objectToJsonRepresentation", () => { + const extraParams: ProxyContext = new ProxyContext({ + dataset: createDataset(), + contextUtil: new ContextUtil({}), + writeGraphs: [defaultGraph()], + languageOrdering: ["@none", "@other"], + }); + + it("returns a string for hexBinary", () => { + expect( + nodeToJsonldRepresentation( + literal("F03493", "http://www.w3.org/2001/XMLSchema#hexBinary"), + extraParams, + ), + ).toBe("F03493"); + }); + + it("returns a string for HTML", () => { + expect( + nodeToJsonldRepresentation( + literal( + "", + "http://www.w3.org/1999/02/22-rdf-syntax-ns#HTML", + ), + extraParams, + ), + ).toBe(""); + }); + + it("returns a string for anyUri", () => { + expect( + nodeToJsonldRepresentation( + literal( + "http://example.com", + "http://www.w3.org/2001/XMLSchema#anyURI", + ), + extraParams, + ), + ).toBe("http://example.com"); + }); + + it("returns a string for an unrecognized datatype", () => { + expect( + nodeToJsonldRepresentation( + literal("meh", "http://weirddatatype.com"), + extraParams, + ), + ).toBe("meh"); + }); + + it("throws an error when it encoutners a quad that is not a Liter, NamedNode, or BlankNode", () => { + expect(() => + // @ts-expect-error defaultGraph is not allowed + nodeToJsonldRepresentation(defaultGraph(), extraParams), + ).toThrow("Can only convert NamedNodes or Literals or BlankNodes"); + }); +}); diff --git a/packages/jsonld-dataset-proxy/test/nodeToString.test.ts b/packages/jsonld-dataset-proxy/test/nodeToString.test.ts new file mode 100644 index 0000000..1d05021 --- /dev/null +++ b/packages/jsonld-dataset-proxy/test/nodeToString.test.ts @@ -0,0 +1,15 @@ +import { blankNode, defaultGraph, literal, namedNode } from "@rdfjs/data-model"; +import { nodeToString } from "../src"; + +describe("nodeToString", () => { + it("returns all the correct values for nodeToString", () => { + expect(nodeToString(namedNode("http://example.com"))).toBe( + "namedNode(http://example.com)", + ); + expect(nodeToString(blankNode("_b1"))).toBe("blankNode(_b1)"); + expect(nodeToString(literal("Hello"))).toBe( + "literal(Hello,http://www.w3.org/2001/XMLSchema#string)", + ); + expect(nodeToString(defaultGraph())).toBe("defaultGraph()"); + }); +}); diff --git a/packages/jsonld-dataset-proxy/test/patientExampleData.ts b/packages/jsonld-dataset-proxy/test/patientExampleData.ts new file mode 100644 index 0000000..b199c06 --- /dev/null +++ b/packages/jsonld-dataset-proxy/test/patientExampleData.ts @@ -0,0 +1,196 @@ +import type { ContextDefinition } from "jsonld"; +import type { Schema } from "shexj"; + +export interface ObservationShape { + "@id"?: string; + "@context"?: ContextDefinition; + subject?: PatientShape; + notes?: string; + langNotes?: string; +} + +export type PatientShape = { + "@id"?: string; + "@context"?: ContextDefinition; + type?: { "@id": "Patient" }; + name?: string[]; + langName?: string[]; + birthdate?: string; + age?: number; + isHappy?: boolean; + roommate?: PatientShape[]; +}; + +// No need to fully define the schema because this library doesn't use it +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +export const patientSchema: Schema = {}; + +export const patientContext: ContextDefinition = { + type: { + "@id": "@type", + }, + Patient: "http://hl7.org/fhir/Patient", + subject: { "@id": "http://hl7.org/fhir/subject", "@type": "@id" }, + name: { + "@id": "http://hl7.org/fhir/name", + "@type": "http://www.w3.org/2001/XMLSchema#string", + "@container": "@set", + }, + langName: { + "@id": "http://hl7.org/fhir/langName", + "@type": "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", + "@container": "@set", + }, + birthdate: { + "@id": "http://hl7.org/fhir/birthdate", + "@type": "http://www.w3.org/2001/XMLSchema#date", + }, + age: { + "@id": "http://hl7.org/fhir/age", + "@type": "http://www.w3.org/2001/XMLSchema#integer", + }, + isHappy: { + "@id": "http://hl7.org/fhir/isHappy", + "@type": "http://www.w3.org/2001/XMLSchema#boolean", + }, + roommate: { + "@id": "http://hl7.org/fhir/roommate", + "@type": "@id", + "@container": "@set", + }, + notes: { + "@id": "http://hl7.org/fhir/notes", + "@type": "http://www.w3.org/2001/XMLSchema#string", + }, + langNotes: { + "@id": "http://hl7.org/fhir/langNotes", + "@type": "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", + }, +}; + +export const patientData = ` +@prefix example: . +@prefix fhir: . +@prefix xsd: . +@prefix rdf: . + +example:Observation1 + fhir:notes "Cool Notes"^^xsd:string ; + fhir:subject example:Patient1 . + +example:Patient1 + rdf:type fhir:Patient ; + fhir:name "Garrett"^^xsd:string, "Bobby"^^xsd:string, "Ferguson"^^xsd:string ; + fhir:birthdate "1986-01-01"^^xsd:date ; + fhir:age "35"^^xsd:integer ; + fhir:isHappy "true"^^xsd:boolean ; + fhir:roommate example:Patient2, example:Patient3 . + +example:Patient2 + rdf:type fhir:Patient ; + fhir:name "Rob"^^xsd:string ; + fhir:birthdate "1987-01-01"^^xsd:date ; + fhir:age "34"^^xsd:integer ; + fhir:isHappy "false"^^xsd:boolean ; + fhir:roommate example:Patient1, example:Patient3 . + +example:Patient3 + rdf:type fhir:Patient ; + fhir:name "Amy"^^xsd:string ; + fhir:birthdate "1988-01-01"^^xsd:date ; + fhir:age "33"^^xsd:integer ; + fhir:isHappy "true"^^xsd:boolean . +`; + +export const patientDataWithBlankNodes = ` +@prefix example: . +@prefix fhir: . +@prefix xsd: . + +example:Observation1 + fhir:notes "Cool Notes"^^xsd:string ; + fhir:subject _:Patient1 . + +_:Patient1 + fhir:name "Garrett"^^xsd:string, "Bobby"^^xsd:string, "Ferguson"^^xsd:string ; + fhir:birthdate "1986-01-01"^^xsd:date ; + fhir:age "35"^^xsd:integer ; + fhir:isHappy "true"^^xsd:boolean ; + fhir:roommate _:Patient2, _:Patient3 . + +_:Patient2 + fhir:name "Rob"^^xsd:string ; + fhir:birthdate "1987-01-01"^^xsd:date ; + fhir:age "34"^^xsd:integer ; + fhir:isHappy "false"^^xsd:boolean ; + fhir:roommate _:Patient1, _:Patient3 . + +_:Patient3 + fhir:name "Amy"^^xsd:string ; + fhir:birthdate "1988-01-01"^^xsd:date ; + fhir:age "33"^^xsd:integer ; + fhir:isHappy "true"^^xsd:boolean . +`; + +export const tinyPatientData = ` +@prefix example: . +@prefix fhir: . +@prefix xsd: . + +example:Observation1 + fhir:subject example:Patient1 . + +example:Patient1 + fhir:name "Garrett"^^xsd:string ; + fhir:roommate example:Patient2 . + +example:Patient2 + fhir:name "Rob"^^xsd:string ; + fhir:roommate example:Patient1 . +`; + +export const tinyArrayPatientData = ` +@prefix example: . +@prefix fhir: . +@prefix xsd: . + +example:Patient1 + fhir:name "Garrett"^^xsd:string, "Bobby"^^xsd:string, "Ferguson"^^xsd:string . +`; + +export const tinyPatientDataWithBlankNodes = ` +@prefix example: . +@prefix fhir: . +@prefix xsd: . + +example:Observation1 + fhir:subject _:Patient1 . + +_:Patient1 + fhir:name "Garrett"^^xsd:string ; + fhir:roommate _:Patient2 . + +_:Patient2 + fhir:name "Rob"^^xsd:string ; + fhir:roommate _:Patient1 . +`; + +export const tinyPatientDataWithLanguageTags = ` +@prefix example: . +@prefix fhir: . +@prefix xsd: . + +example:Observation1 + fhir:subject example:Patient1 ; + fhir:langNotes "Cool Notes" ; + fhir:langNotes "Cooler Notes"@en ; + fhir:langNotes "Notas Geniales"@es ; + fhir:langNotes "Notes Sympas"@fr . + +example:Patient1 + fhir:langName "Jon" ; + fhir:langName "John"@en ; + fhir:langName "Juan"@es ; + fhir:langName "Jean"@fr . +`; diff --git a/packages/jsonld-dataset-proxy/tsconfig.build.json b/packages/jsonld-dataset-proxy/tsconfig.build.json new file mode 100644 index 0000000..ce7be9c --- /dev/null +++ b/packages/jsonld-dataset-proxy/tsconfig.build.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist" + }, + "include": ["./src"] +} \ No newline at end of file diff --git a/packages/ldo/jest.config.js b/packages/ldo/jest.config.js index b28362a..bad5f64 100644 --- a/packages/ldo/jest.config.js +++ b/packages/ldo/jest.config.js @@ -1,5 +1,6 @@ -const sharedConfig = require('../../jest.config.js'); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const sharedConfig = require("../../jest.config.js"); module.exports = { ...sharedConfig, - 'rootDir': './', -} \ No newline at end of file + rootDir: "./", +}; diff --git a/packages/ldo/package.json b/packages/ldo/package.json index d6933a5..c901b64 100644 --- a/packages/ldo/package.json +++ b/packages/ldo/package.json @@ -34,9 +34,9 @@ "ts-node": "^10.4.0" }, "dependencies": { + "@ldo/jsonld-dataset-proxy": "^0.0.0", "@rdfjs/data-model": "^1.2.0", "buffer": "^6.0.3", - "jsonld-dataset-proxy": "^1.2.1", "n3": "^1.16.2", "o-dataset-pack": "^0.2.11", "readable-stream": "^4.3.0" diff --git a/packages/ldo/src/LdoBuilder.ts b/packages/ldo/src/LdoBuilder.ts index a0a3bab..5e1862d 100644 --- a/packages/ldo/src/LdoBuilder.ts +++ b/packages/ldo/src/LdoBuilder.ts @@ -4,7 +4,7 @@ import type { QuadMatch, SubjectType, LanguageOrdering, -} from "jsonld-dataset-proxy"; +} from "@ldo/jsonld-dataset-proxy"; import type { ShapeType } from "./ShapeType"; import type { LdoBase } from "./util"; import { normalizeNodeName, normalizeNodeNames } from "./util"; diff --git a/packages/ldo/src/LdoDataset.ts b/packages/ldo/src/LdoDataset.ts index 7407566..78ad4df 100644 --- a/packages/ldo/src/LdoDataset.ts +++ b/packages/ldo/src/LdoDataset.ts @@ -1,5 +1,5 @@ import type { Quad } from "@rdfjs/types"; -import jsonldDatasetProxy from "jsonld-dataset-proxy"; +import jsonldDatasetProxy from "@ldo/jsonld-dataset-proxy"; import { WrapperSubscribableDataset } from "o-dataset-pack"; import { LdoBuilder } from "./LdoBuilder"; import type { ShapeType } from "./ShapeType"; diff --git a/packages/ldo/src/methods.ts b/packages/ldo/src/methods.ts index 315786b..15e45ed 100644 --- a/packages/ldo/src/methods.ts +++ b/packages/ldo/src/methods.ts @@ -1,12 +1,12 @@ import type { Dataset } from "@rdfjs/types"; import type { JsonLdDocument } from "jsonld"; -import type { GraphType, InteractOptions } from "jsonld-dataset-proxy"; +import type { GraphType, InteractOptions } from "@ldo/jsonld-dataset-proxy"; import { getProxyFromObject, _getUnderlyingDataset, _proxyContext, write as writeDependency, -} from "jsonld-dataset-proxy"; +} from "@ldo/jsonld-dataset-proxy"; import type { Quad, WriterOptions } from "n3"; import type { DatasetChanges, SubscribableDataset } from "o-dataset-pack"; import { datasetToString } from "./datasetConverters"; @@ -21,7 +21,7 @@ export { graphOf, languagesOf, setLanguagePreferences, -} from "jsonld-dataset-proxy"; +} from "@ldo/jsonld-dataset-proxy"; export function write(...graphs: (GraphType | string)[]): InteractOptions { return writeDependency(...normalizeNodeNames(graphs)); diff --git a/packages/ldo/src/util.ts b/packages/ldo/src/util.ts index a38ec6d..ba35adb 100644 --- a/packages/ldo/src/util.ts +++ b/packages/ldo/src/util.ts @@ -7,12 +7,12 @@ import type { PredicateType, SubjectProxy, SubjectType, -} from "jsonld-dataset-proxy"; +} from "@ldo/jsonld-dataset-proxy"; import { getProxyFromObject, _getUnderlyingDataset, _proxyContext, -} from "jsonld-dataset-proxy"; +} from "@ldo/jsonld-dataset-proxy"; import type { Quad } from "n3"; import type { SubscribableDataset, TransactionalDataset } from "o-dataset-pack"; diff --git a/packages/ldo/test/LdoDataset.test.ts b/packages/ldo/test/LdoDataset.test.ts index e6f5ed8..7b2fa2b 100644 --- a/packages/ldo/test/LdoDataset.test.ts +++ b/packages/ldo/test/LdoDataset.test.ts @@ -5,8 +5,8 @@ import { ProfileShapeType } from "./profileData"; import type { LdoBuilder, LdoDataset } from "../src"; import { createLdoDataset, graphOf, parseRdf, toTurtle } from "../src"; import { sampleJsonld, sampleTurtle } from "./sampleData"; -import type { SubjectProxy } from "jsonld-dataset-proxy"; -import { _proxyContext } from "jsonld-dataset-proxy"; +import type { SubjectProxy } from "@ldo/jsonld-dataset-proxy"; +import { _proxyContext } from "@ldo/jsonld-dataset-proxy"; describe("LdoDataset", () => { let ldoDataset: LdoDataset; diff --git a/packages/ldo/test/methods.test.ts b/packages/ldo/test/methods.test.ts index 7a85304..a202d9e 100644 --- a/packages/ldo/test/methods.test.ts +++ b/packages/ldo/test/methods.test.ts @@ -1,11 +1,11 @@ import { namedNode } from "@rdfjs/data-model"; -import type { SubjectProxy } from "jsonld-dataset-proxy"; +import type { SubjectProxy } from "@ldo/jsonld-dataset-proxy"; import { getProxyFromObject, graphOf, _getUnderlyingDataset, _proxyContext, -} from "jsonld-dataset-proxy"; +} from "@ldo/jsonld-dataset-proxy"; import { createDataset } from "o-dataset-pack"; import type { SolidProfileShape } from "./profileData"; import { ProfileShapeType } from "./profileData"; diff --git a/packages/schema-converter-shex/jest.config.js b/packages/schema-converter-shex/jest.config.js index b28362a..bad5f64 100644 --- a/packages/schema-converter-shex/jest.config.js +++ b/packages/schema-converter-shex/jest.config.js @@ -1,5 +1,6 @@ -const sharedConfig = require('../../jest.config.js'); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const sharedConfig = require("../../jest.config.js"); module.exports = { ...sharedConfig, - 'rootDir': './', -} \ No newline at end of file + rootDir: "./", +}; diff --git a/packages/schema-converter-shex/package.json b/packages/schema-converter-shex/package.json index 2eb8f7c..d76a637 100644 --- a/packages/schema-converter-shex/package.json +++ b/packages/schema-converter-shex/package.json @@ -6,7 +6,8 @@ "scripts": { "build": "tsc --project tsconfig.build.json", "test": "jest --coverage", - "prepublishOnly": "npm run test && npm run build" + "prepublishOnly": "npm run test && npm run build", + "lint": "eslint src/** --fix --no-error-on-unmatched-pattern" }, "repository": { "type": "git", diff --git a/packages/schema-converter-shex/src/context/JsonLdContextBuilder.ts b/packages/schema-converter-shex/src/context/JsonLdContextBuilder.ts index aaa2776..5fdd2ed 100644 --- a/packages/schema-converter-shex/src/context/JsonLdContextBuilder.ts +++ b/packages/schema-converter-shex/src/context/JsonLdContextBuilder.ts @@ -1,5 +1,5 @@ -import { Annotation } from "shexj"; -import { ContextDefinition, ExpandedTermDefinition } from "jsonld"; +import type { Annotation } from "shexj"; +import type { ContextDefinition, ExpandedTermDefinition } from "jsonld"; /** * Name functions @@ -24,7 +24,7 @@ export function nameFromObject(obj: { }): string | undefined { const labelAnnotationObject = obj.annotations?.find( (annotation) => - annotation.predicate === "http://www.w3.org/2000/01/rdf-schema#label" + annotation.predicate === "http://www.w3.org/2000/01/rdf-schema#label", )?.object; if (labelAnnotationObject && typeof labelAnnotationObject === "string") { return toCamelCase(iriToName(labelAnnotationObject)); @@ -68,7 +68,7 @@ export class JsonLdContextBuilder { iri: string, expandedTermDefinition: ExpandedTermDefinition, isContainer: boolean, - annotations?: Annotation[] + annotations?: Annotation[], ) { this.addSubject(iri, annotations); if (!this.iriTypes[iri]) { @@ -115,7 +115,7 @@ export class JsonLdContextBuilder { const labelAnnotationObject = annotations.find( (annotation) => annotation.predicate === - "http://www.w3.org/2000/01/rdf-schema#label" + "http://www.w3.org/2000/01/rdf-schema#label", )?.object; if ( labelAnnotationObject && diff --git a/packages/schema-converter-shex/src/context/ShexJContextVisitor.ts b/packages/schema-converter-shex/src/context/ShexJContextVisitor.ts index a175a70..8651df1 100644 --- a/packages/schema-converter-shex/src/context/ShexJContextVisitor.ts +++ b/packages/schema-converter-shex/src/context/ShexJContextVisitor.ts @@ -1,5 +1,5 @@ import ShexJTraverser from "shexj-traverser"; -import { JsonLdContextBuilder } from "./JsonLdContextBuilder"; +import type { JsonLdContextBuilder } from "./JsonLdContextBuilder"; /** * Visitor @@ -7,9 +7,7 @@ import { JsonLdContextBuilder } from "./JsonLdContextBuilder"; export const ShexJNameVisitor = ShexJTraverser.createVisitor({ Shape: { - visitor: async (shape, context) => { - - } + visitor: async (_shape, _context) => {}, }, TripleConstraint: { visitor: async (tripleConstraint, context) => { @@ -26,7 +24,7 @@ export const ShexJNameVisitor = "@type": tripleConstraint.valueExpr.datatype, }, isContainer, - tripleConstraint.annotations + tripleConstraint.annotations, ); } else if ( tripleConstraint.valueExpr.nodeKind && @@ -36,14 +34,14 @@ export const ShexJNameVisitor = tripleConstraint.predicate, { "@type": "@id" }, isContainer, - tripleConstraint.annotations + tripleConstraint.annotations, ); } else { context.addPredicate( tripleConstraint.predicate, {}, isContainer, - tripleConstraint.annotations + tripleConstraint.annotations, ); } } else { @@ -53,13 +51,13 @@ export const ShexJNameVisitor = "@type": "@id", }, isContainer, - tripleConstraint.annotations + tripleConstraint.annotations, ); } } else { context.addSubject( tripleConstraint.predicate, - tripleConstraint.annotations + tripleConstraint.annotations, ); } }, diff --git a/packages/schema-converter-shex/src/context/shexjToContext.ts b/packages/schema-converter-shex/src/context/shexjToContext.ts index 32cc1f5..6a99aeb 100644 --- a/packages/schema-converter-shex/src/context/shexjToContext.ts +++ b/packages/schema-converter-shex/src/context/shexjToContext.ts @@ -1,11 +1,11 @@ -import { ContextDefinition } from "jsonld"; -import { Schema } from "shexj"; +import type { ContextDefinition } from "jsonld"; +import type { Schema } from "shexj"; import { JsonLdContextBuilder } from "./JsonLdContextBuilder"; import { ShexJNameVisitor } from "./ShexJContextVisitor"; import { jsonld2graphobject } from "jsonld2graphobject"; export async function shexjToContext( - shexj: Schema + shexj: Schema, ): Promise { const processedShexj: Schema = (await jsonld2graphobject( { @@ -13,7 +13,7 @@ export async function shexjToContext( "@id": "SCHEMA", "@context": "http://www.w3.org/ns/shex.jsonld", }, - "SCHEMA" + "SCHEMA", )) as unknown as Schema; const jsonLdContextBuilder = new JsonLdContextBuilder(); await ShexJNameVisitor.visit(processedShexj, "Schema", jsonLdContextBuilder); diff --git a/packages/schema-converter-shex/src/typing/ShapeInterfaceDeclaration.ts b/packages/schema-converter-shex/src/typing/ShapeInterfaceDeclaration.ts index ed1e3bb..037f300 100644 --- a/packages/schema-converter-shex/src/typing/ShapeInterfaceDeclaration.ts +++ b/packages/schema-converter-shex/src/typing/ShapeInterfaceDeclaration.ts @@ -1,4 +1,4 @@ -import { InterfaceDeclaration } from "dts-dom"; +import type { InterfaceDeclaration } from "dts-dom"; export interface ShapeInterfaceDeclaration extends InterfaceDeclaration { shapeId?: string; diff --git a/packages/schema-converter-shex/src/typing/ShexJTypingTransformer.ts b/packages/schema-converter-shex/src/typing/ShexJTypingTransformer.ts index cf1dabe..3b662d1 100644 --- a/packages/schema-converter-shex/src/typing/ShexJTypingTransformer.ts +++ b/packages/schema-converter-shex/src/typing/ShexJTypingTransformer.ts @@ -1,19 +1,19 @@ import ShexJTraverser from "shexj-traverser"; import * as dom from "dts-dom"; -import { Annotation } from "shexj"; +import type { Annotation } from "shexj"; import { nameFromObject } from "../context/JsonLdContextBuilder"; -import { ShapeInterfaceDeclaration } from "./ShapeInterfaceDeclaration"; +import type { ShapeInterfaceDeclaration } from "./ShapeInterfaceDeclaration"; export interface ShexJTypeTransformerContext { getNameFromIri: (iri: string) => string; } export function commentFromAnnotations( - annotations?: Annotation[] + annotations?: Annotation[], ): string | undefined { const commentAnnotationObject = annotations?.find( (annotation) => - annotation.predicate === "http://www.w3.org/2000/01/rdf-schema#comment" + annotation.predicate === "http://www.w3.org/2000/01/rdf-schema#comment", )?.object; if (typeof commentAnnotationObject === "string") { // It's an IRI @@ -49,7 +49,7 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< Schema: { transformer: async ( _schema, - getTransformedChildren + getTransformedChildren, ): Promise => { const transformedChildren = await getTransformedChildren(); const interfaces: dom.TopLevelDeclaration[] = []; @@ -67,7 +67,7 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< ShapeDecl: { transformer: async ( shapeDecl, - getTransformedChildren + getTransformedChildren, ): Promise => { const shapeName = nameFromObject(shapeDecl) || "Shape"; const { shapeExpr } = await getTransformedChildren(); @@ -80,7 +80,7 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< } else { // TODO: Handle other items throw new Error( - "Cannot handle ShapeOr, ShapeAnd, ShapeNot, ShapeExternal, or NodeConstraint" + "Cannot handle ShapeOr, ShapeAnd, ShapeNot, ShapeExternal, or NodeConstraint", ); } }, @@ -95,15 +95,15 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< dom.create.property( "@id", dom.type.string, - dom.DeclarationFlags.Optional - ) + dom.DeclarationFlags.Optional, + ), ); newInterface.members.push( dom.create.property( "@context", dom.create.namedTypeReference("ContextDefinition"), - dom.DeclarationFlags.Optional - ) + dom.DeclarationFlags.Optional, + ), ); if (typeof transformedChildren.expression === "string") { // TODO: handle string @@ -113,14 +113,14 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< "interface" ) { newInterface.members.push( - ...(transformedChildren.expression as dom.ObjectType).members + ...(transformedChildren.expression as dom.ObjectType).members, ); } else if ( (transformedChildren.expression as dom.PropertyDeclaration).kind === "property" ) { newInterface.members.push( - transformedChildren.expression as dom.PropertyDeclaration + transformedChildren.expression as dom.PropertyDeclaration, ); } // Use EXTENDS @@ -129,7 +129,7 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< transformedChildren.extends.forEach((extendsItem) => { if ((extendsItem as dom.InterfaceDeclaration).kind === "interface") { newInterface.baseTypes?.push( - extendsItem as dom.InterfaceDeclaration + extendsItem as dom.InterfaceDeclaration, ); } }); @@ -151,21 +151,21 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< transformedChildren.expressions .filter( ( - expression + expression, ): expression is dom.ObjectType | dom.PropertyDeclaration => { return ( (expression as dom.PropertyDeclaration).kind === "property" || (expression as dom.ObjectType).kind === "object" || (expression as dom.InterfaceDeclaration).kind === "interface" ); - } + }, ) .forEach( ( expression: | dom.ObjectType | dom.InterfaceDeclaration - | dom.PropertyDeclaration + | dom.PropertyDeclaration, ) => { if (expression.kind === "property") { inputPropertyExpressions.push(expression); @@ -176,7 +176,7 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< } }); } - } + }, ); // Merge property expressions @@ -206,7 +206,7 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< dom.type.array(dom.create.union([oldProeprtyType, propertyType])), isOptional ? dom.DeclarationFlags.Optional - : dom.DeclarationFlags.None + : dom.DeclarationFlags.None, ); // Set JS Comment properties[propertyDeclaration.name].jsDocComment = @@ -229,7 +229,7 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< tripleConstraint, getTransformedChildren, setReturnPointer, - context + context, ) => { const transformedChildren = await getTransformedChildren(); const propertyName = context.getNameFromIri(tripleConstraint.predicate); @@ -244,11 +244,11 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< const propertyDeclaration = dom.create.property( propertyName, isArray ? dom.type.array(type) : type, - isOptional ? dom.DeclarationFlags.Optional : dom.DeclarationFlags.None + isOptional ? dom.DeclarationFlags.Optional : dom.DeclarationFlags.None, ); propertyDeclaration.jsDocComment = commentFromAnnotations( - tripleConstraint.annotations + tripleConstraint.annotations, ); return propertyDeclaration; }, @@ -258,7 +258,7 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< nodeConstraint, _getTransformedChildren, setReturnPointer, - context + context, ) => { if (nodeConstraint.datatype) { switch (nodeConstraint.datatype) { @@ -325,7 +325,7 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< dom.create.property( "@id", dom.type.string, - dom.DeclarationFlags.Optional + dom.DeclarationFlags.Optional, ), ]); case "literal": @@ -341,9 +341,9 @@ export const ShexJTypingTransformer = ShexJTraverser.createTransformer< dom.create.objectType([ dom.create.property( "@id", - dom.type.stringLiteral(context.getNameFromIri(value)) + dom.type.stringLiteral(context.getNameFromIri(value)), ), - ]) + ]), ); } }); diff --git a/packages/schema-converter-shex/src/typing/shexjToTyping.ts b/packages/schema-converter-shex/src/typing/shexjToTyping.ts index 455eea5..9f99b3d 100644 --- a/packages/schema-converter-shex/src/typing/shexjToTyping.ts +++ b/packages/schema-converter-shex/src/typing/shexjToTyping.ts @@ -1,5 +1,5 @@ -import { ContextDefinition } from "jsonld"; -import { Schema } from "shexj"; +import type { ContextDefinition } from "jsonld"; +import type { Schema } from "shexj"; import { JsonLdContextBuilder } from "../context/JsonLdContextBuilder"; import { ShexJNameVisitor } from "../context/ShexJContextVisitor"; import { jsonld2graphobject } from "jsonld2graphobject"; @@ -15,7 +15,7 @@ export interface TypeingReturn { } export async function shexjToTyping( - shexj: Schema + shexj: Schema, ): Promise<[TypeingReturn, ContextDefinition]> { const processedShexj: Schema = (await jsonld2graphobject( { @@ -23,7 +23,7 @@ export async function shexjToTyping( "@id": "SCHEMA", "@context": "http://www.w3.org/ns/shex.jsonld", }, - "SCHEMA" + "SCHEMA", )) as unknown as Schema; const jsonLdContextBuilder = new JsonLdContextBuilder(); await ShexJNameVisitor.visit(processedShexj, "Schema", jsonLdContextBuilder); @@ -34,7 +34,7 @@ export async function shexjToTyping( { getNameFromIri: jsonLdContextBuilder.getNameFromIri.bind(jsonLdContextBuilder), - } + }, ); const typings = declarations.map((declaration) => { return { diff --git a/packages/solid-react/package.json b/packages/solid-react/package.json index b8b8c79..7884bf1 100644 --- a/packages/solid-react/package.json +++ b/packages/solid-react/package.json @@ -36,9 +36,9 @@ }, "dependencies": { "@inrupt/solid-client": "^1.29.0", + "@ldo/jsonld-dataset-proxy": "^0.0.0", "@ldo/ldo": "^0.0.0", "cross-fetch": "^3.1.6", - "jsonld-dataset-proxy": "^1.2.3", "o-dataset-pack": "^0.2.14", "solid-authn-react-native": "^2.0.3", "stream": "^0.0.2" diff --git a/packages/solid-react/src/ldoHooks/helpers/TrackingProxyContext.ts b/packages/solid-react/src/ldoHooks/helpers/TrackingProxyContext.ts index 7659f49..d127177 100644 --- a/packages/solid-react/src/ldoHooks/helpers/TrackingProxyContext.ts +++ b/packages/solid-react/src/ldoHooks/helpers/TrackingProxyContext.ts @@ -2,8 +2,8 @@ import type { ArrayProxyTarget, SubjectProxyTarget, ProxyContextOptions, -} from "jsonld-dataset-proxy"; -import { ProxyContext } from "jsonld-dataset-proxy"; +} from "@ldo/jsonld-dataset-proxy"; +import { ProxyContext } from "@ldo/jsonld-dataset-proxy"; import type { UpdateManager } from "./UpdateManager"; import { namedNode } from "@rdfjs/data-model"; diff --git a/packages/solid-react/src/ldoHooks/helpers/UpdateManager.ts b/packages/solid-react/src/ldoHooks/helpers/UpdateManager.ts index 0ad004f..dbdbd3e 100644 --- a/packages/solid-react/src/ldoHooks/helpers/UpdateManager.ts +++ b/packages/solid-react/src/ldoHooks/helpers/UpdateManager.ts @@ -5,8 +5,8 @@ import type { SubjectType, PredicateType, ObjectType, -} from "jsonld-dataset-proxy"; -import { nodeToString } from "jsonld-dataset-proxy"; +} from "@ldo/jsonld-dataset-proxy"; +import { nodeToString } from "@ldo/jsonld-dataset-proxy"; import type { Quad } from "@rdfjs/types"; export type TripleMatch = [QuadMatch[0], QuadMatch[1], QuadMatch[2]]; diff --git a/packages/solid-react/src/ldoHooks/useSubject.ts b/packages/solid-react/src/ldoHooks/useSubject.ts index 5909efa..704842a 100644 --- a/packages/solid-react/src/ldoHooks/useSubject.ts +++ b/packages/solid-react/src/ldoHooks/useSubject.ts @@ -1,5 +1,8 @@ -import type { SubjectType } from "jsonld-dataset-proxy"; -import { ContextUtil, JsonldDatasetProxyBuilder } from "jsonld-dataset-proxy"; +import type { SubjectType } from "@ldo/jsonld-dataset-proxy"; +import { + ContextUtil, + JsonldDatasetProxyBuilder, +} from "@ldo/jsonld-dataset-proxy"; import type { ShapeType, LdoBase } from "@ldo/ldo"; import { LdoBuilder } from "@ldo/ldo"; import { useLdoContext } from "../LdoContext"; diff --git a/packages/solid-react/src/useLdo.ts b/packages/solid-react/src/useLdo.ts index d2e4744..69158db 100644 --- a/packages/solid-react/src/useLdo.ts +++ b/packages/solid-react/src/useLdo.ts @@ -8,7 +8,7 @@ import type { DataResource } from "./document/resource/dataResource/DataResource import type { BinaryResource } from "./document/resource/binaryResource/BinaryResource"; import type { ContainerResource } from "./document/resource/dataResource/containerResource/ContainerResource"; import type { AccessRules } from "./document/accessRules/AccessRules"; -import type { SubjectType } from "jsonld-dataset-proxy"; +import type { SubjectType } from "@ldo/jsonld-dataset-proxy"; import type { DatasetChanges } from "o-dataset-pack"; import type { Quad } from "@rdfjs/types"; diff --git a/packages/solid-react/src/util/splitChangesByGraph.ts b/packages/solid-react/src/util/splitChangesByGraph.ts index 0d3184d..99fa321 100644 --- a/packages/solid-react/src/util/splitChangesByGraph.ts +++ b/packages/solid-react/src/util/splitChangesByGraph.ts @@ -1,6 +1,6 @@ import type { DatasetChanges } from "o-dataset-pack"; import { createDataset } from "o-dataset-pack"; -import type { GraphType } from "jsonld-dataset-proxy"; +import type { GraphType } from "@ldo/jsonld-dataset-proxy"; import type { Quad } from "@rdfjs/types"; import { defaultGraph, namedNode, quad as createQuad } from "@rdfjs/data-model";