Modified how bulk editing works by adding after

main
jaxoncreed 2 years ago
parent d89ac86b0b
commit 81e145c319
  1. 22
      packages/solid/src/SolidLdoDataset.ts
  2. 10
      packages/solid/src/methods.ts
  3. 60
      packages/solid/src/package.json
  4. 15
      packages/solid/src/requester/ContainerRequester.ts
  5. 16
      packages/solid/src/requester/LeafRequester.ts
  6. 35
      packages/solid/src/requester/Requester.ts
  7. 5
      packages/solid/src/requester/requests/requestOptions.ts
  8. 13
      packages/solid/src/requester/requests/updateDataResource.ts
  9. 16
      packages/solid/src/requester/util/modifyQueueFuntions.ts
  10. 1
      packages/solid/src/resource/Resource.ts
  11. 6
      packages/solid/src/util/RequestBatcher.ts
  12. 125
      packages/solid/test/Integration.test.ts
  13. 4
      packages/subscribable-dataset/src/ProxyTransactionalDataset.ts
  14. 2
      packages/subscribable-dataset/src/types.ts

@ -22,6 +22,7 @@ import { splitChangesByGraph } from "./util/splitChangesByGraph";
import type { ContainerUri, LeafUri } from "./util/uriTypes";
import { isContainerUri } from "./util/uriTypes";
import type { Resource } from "./resource/Resource";
import { quad as createQuad } from "@rdfjs/data-model";
export class SolidLdoDataset extends LdoDataset {
public context: SolidLdoDatasetContext;
@ -42,6 +43,9 @@ export class SolidLdoDataset extends LdoDataset {
return this.context.resourceStore.get(uri, options);
}
/**
* commitChangesToPod
*/
async commitChangesToPod(
changes: DatasetChanges<Quad>,
): Promise<
@ -50,6 +54,8 @@ export class SolidLdoDataset extends LdoDataset {
>
| AggregateError<UpdateResultError | InvalidUriError>
> {
// Optimistically add changes to the datastore
// this.bulk(changes);
const changesByGraph = splitChangesByGraph(changes);
// Iterate through all changes by graph in
@ -90,7 +96,23 @@ export class SolidLdoDataset extends LdoDataset {
// If one has errored, return error
const errors = results.filter((result) => result[2].isError);
if (errors.length > 0) {
// // Rollback errors
// errors.forEach((error) => {
// // Add the graph back to the quads
// const added = error[1].added?.map((quad) =>
// createQuad(quad.subject, quad.predicate, quad.object, error[0]),
// );
// const removed = error[1].removed?.map((quad) =>
// createQuad(quad.subject, quad.predicate, quad.object, error[0]),
// );
// this.bulk({
// added: removed,
// removed: added,
// });
// });
return new AggregateError(
errors.map(
(result) => result[2] as UpdateResultError | InvalidUriError,

@ -4,12 +4,13 @@ import {
write,
transactionChanges,
getDataset,
commitTransaction,
} from "@ldo/ldo";
import type { DatasetChanges } from "@ldo/rdf-utils";
import type { Resource } from "./resource/Resource";
import type { SolidLdoDataset } from "./SolidLdoDataset";
import type { Quad } from "@rdfjs/types";
import { _proxyContext, getProxyFromObject } from "@ldo/jsonld-dataset-proxy";
import type { SubscribableDataset } from "@ldo/subscribable-dataset";
/**
* Begins tracking changes to eventually commit
@ -40,7 +41,12 @@ export function commitData(
input: LdoBase,
): ReturnType<SolidLdoDataset["commitChangesToPod"]> {
const changes = transactionChanges(input);
commitTransaction(input);
// Take the LdoProxy out of commit mode. This uses hidden methods of JSONLD-DATASET-PROXY
const proxy = getProxyFromObject(input);
proxy[_proxyContext] = proxy[_proxyContext].duplicate({
dataset: proxy[_proxyContext].state
.parentDataset as SubscribableDataset<Quad>,
});
const dataset = getDataset(input) as SolidLdoDataset;
return dataset.commitChangesToPod(changes as DatasetChanges<Quad>);
}

@ -0,0 +1,60 @@
{
"name": "@ldo/solid",
"version": "0.0.1-alpha.17",
"description": "A library for LDO and Solid",
"main": "dist/index.js",
"scripts": {
"example": "ts-node ./example/example.ts",
"build": "tsc --project tsconfig.build.json",
"watch": "tsc --watch",
"test": "jest --coverage",
"test:watch": "jest --watch",
"prepublishOnly": "npm run test && npm run build",
"build:ldo": "ldo build --input src/.shapes --output src/.ldo",
"lint": "eslint src/** --fix --no-error-on-unmatched-pattern",
"docs": "typedoc --plugin typedoc-plugin-markdown"
},
"repository": {
"type": "git",
"url": "git+https://github.com/o-development/ldobjects.git"
},
"author": "Jackson Morgan",
"license": "MIT",
"bugs": {
"url": "https://github.com/o-development/ldobjects/issues"
},
"homepage": "https://github.com/o-development/ldobjects/tree/main/packages/solid#readme",
"devDependencies": {
"@inrupt/solid-client-authn-core": "^1.17.1",
"@ldo/cli": "^0.0.1-alpha.17",
"@rdfjs/data-model": "^1.2.0",
"@rdfjs/types": "^1.0.1",
"@solid/community-server": "^6.0.2",
"@types/jest": "^29.0.3",
"dotenv": "^16.3.1",
"jest-rdf": "^1.8.0",
"ts-jest": "^29.0.2",
"ts-node": "^10.9.1",
"typed-emitter": "^2.1.0",
"typedoc": "^0.25.4",
"typedoc-plugin-markdown": "^3.17.1"
},
"dependencies": {
"@inrupt/solid-client": "^1.30.0",
"@ldo/dataset": "^0.0.1-alpha.17",
"@ldo/ldo": "^0.0.1-alpha.17",
"@ldo/rdf-utils": "^0.0.1-alpha.17",
"@types/parse-link-header": "^2.0.1",
"cross-fetch": "^3.1.6",
"http-link-header": "^1.1.1",
"ts-mixer": "^6.0.3"
},
"files": [
"dist",
"src"
],
"publishConfig": {
"access": "public"
},
"gitHead": "1c242d645fc488f8d0e9f4da7425d9928abf1e9d"
}

@ -8,6 +8,7 @@ import type {
ContainerCreateIfAbsentResult,
} from "./requests/createDataResource";
import type { ReadContainerResult } from "./requests/readResource";
import { modifyQueueByMergingEventsWithTheSameKeys } from "./util/modifyQueueFuntions";
export const IS_ROOT_CONTAINER_KEY = "isRootContainer";
@ -45,17 +46,9 @@ export class ContainerRequester extends Requester {
name: IS_ROOT_CONTAINER_KEY,
args: [this.uri as ContainerUri, { fetch: this.context.fetch }],
perform: checkRootContainer,
modifyQueue: (queue, currentlyLoading) => {
if (
queue.length === 0 &&
currentlyLoading?.name === IS_ROOT_CONTAINER_KEY
) {
return currentlyLoading;
} else if (queue[queue.length - 1]?.name === IS_ROOT_CONTAINER_KEY) {
return queue[queue.length - 1];
}
return undefined;
},
modifyQueue: modifyQueueByMergingEventsWithTheSameKeys(
IS_ROOT_CONTAINER_KEY,
),
});
}
}

@ -56,18 +56,12 @@ export class LeafRequester extends Requester {
async updateDataResource(
changes: DatasetChanges<Quad>,
): Promise<UpdateResult> {
const transaction = this.context.solidLdoDataset.startTransaction();
transaction.addAll(changes.added || []);
changes.removed?.forEach((quad) => transaction.delete(quad));
// Commit data optimistically
transaction.commit();
const result = await this.requestBatcher.queueProcess({
name: UPDATE_KEY,
args: [
this.uri,
changes,
{ fetch: this.context.fetch, onRollback: () => transaction.rollback() },
{ fetch: this.context.fetch, dataset: this.context.solidLdoDataset },
],
perform: updateDataResource,
modifyQueue: (queue, currentlyProcessing, [, changes]) => {
@ -138,10 +132,12 @@ export class LeafRequester extends Requester {
}
return undefined;
},
after: (result) => {
if (!result.isError) {
transaction.commit();
}
},
});
if (!result.isError) {
transaction.commit();
}
return result;
}
}

@ -14,6 +14,7 @@ import type {
import { readResource } from "./requests/readResource";
import type { DeleteResult } from "./requests/deleteResource";
import { deleteResource } from "./requests/deleteResource";
import { modifyQueueByMergingEventsWithTheSameKeys } from "./util/modifyQueueFuntions";
const READ_KEY = "read";
const CREATE_KEY = "createDataResource";
@ -52,18 +53,13 @@ export abstract class Requester {
name: READ_KEY,
args: [this.uri, { dataset: transaction, fetch: this.context.fetch }],
perform: readResource,
modifyQueue: (queue, currentlyLoading) => {
if (queue.length === 0 && currentlyLoading?.name === READ_KEY) {
return currentlyLoading;
} else if (queue[queue.length - 1]?.name === READ_KEY) {
return queue[queue.length - 1];
modifyQueue: modifyQueueByMergingEventsWithTheSameKeys(READ_KEY),
after: (result) => {
if (!result.isError) {
transaction.commit();
}
return undefined;
},
});
if (!result.isError) {
transaction.commit();
}
return result;
}
@ -76,18 +72,13 @@ export abstract class Requester {
name: DELETE_KEY,
args: [this.uri, { dataset: transaction, fetch: this.context.fetch }],
perform: deleteResource,
modifyQueue: (queue, currentlyLoading) => {
if (queue.length === 0 && currentlyLoading?.name === DELETE_KEY) {
return currentlyLoading;
} else if (queue[queue.length - 1]?.name === DELETE_KEY) {
return queue[queue.length - 1];
modifyQueue: modifyQueueByMergingEventsWithTheSameKeys(DELETE_KEY),
after: (result) => {
if (!result.isError) {
transaction.commit();
}
return undefined;
},
});
if (!result.isError) {
transaction.commit();
}
return result;
}
@ -145,10 +136,12 @@ export abstract class Requester {
}
return undefined;
},
after: (result) => {
if (!result.isError) {
transaction.commit();
}
},
});
if (!result.isError) {
transaction.commit();
}
return result;
}
}

@ -1,9 +1,10 @@
import type { Dataset, Quad } from "@rdfjs/types";
import type { BulkEditableDataset } from "@ldo/subscribable-dataset";
import type { Quad } from "@rdfjs/types";
export interface BasicRequestOptions {
fetch?: typeof fetch;
}
export interface DatasetRequestOptions extends BasicRequestOptions {
dataset?: Dataset<Quad>;
dataset?: BulkEditableDataset<Quad>;
}

@ -7,7 +7,7 @@ import { UnexpectedResourceError } from "../results/error/ErrorResult";
import type { HttpErrorResultType } from "../results/error/HttpErrorResult";
import { HttpErrorResult } from "../results/error/HttpErrorResult";
import type { UpdateSuccess } from "../results/success/UpdateSuccess";
import type { BasicRequestOptions } from "./requestOptions";
import type { DatasetRequestOptions } from "./requestOptions";
export type UpdateResult = UpdateSuccess | UpdateResultError;
export type UpdateResultError = HttpErrorResultType | UnexpectedResourceError;
@ -15,9 +15,11 @@ export type UpdateResultError = HttpErrorResultType | UnexpectedResourceError;
export async function updateDataResource(
uri: LeafUri,
datasetChanges: DatasetChanges<Quad>,
options?: BasicRequestOptions & { onRollback?: () => void },
options?: DatasetRequestOptions,
): Promise<UpdateResult> {
try {
// Optimistically add data
options?.dataset?.bulk(datasetChanges);
const fetch = guaranteeFetch(options?.fetch);
// Make request
@ -32,8 +34,11 @@ export async function updateDataResource(
const httpError = HttpErrorResult.checkResponse(uri, response);
if (httpError) {
// Handle error rollback
if (options?.onRollback) {
options.onRollback();
if (options?.dataset) {
options.dataset.bulk({
added: datasetChanges.removed,
removed: datasetChanges.added,
});
}
return httpError;
}

@ -0,0 +1,16 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import type { WaitingProcess } from "../../util/RequestBatcher";
export function modifyQueueByMergingEventsWithTheSameKeys(key: string) {
return (
queue: WaitingProcess<any[], any>[],
currentlyLoading: WaitingProcess<any[], any> | undefined,
) => {
if (queue.length === 0 && currentlyLoading?.name === key) {
return currentlyLoading;
} else if (queue[queue.length - 1]?.name === key) {
return queue[queue.length - 1];
}
return undefined;
};
}

@ -194,6 +194,7 @@ export abstract class Resource extends (EventEmitter as new () => TypedEmitter<{
// async getAccessRules(): Promise<AccessRuleResult | AccessRuleFetchError> {
// return getAccessRules({ uri: this.uri, fetch: this.context.fetch });
// }
/* istanbul ignore next */
async setAccessRules(
newAccessRules: AccessRule,
): Promise<ResourceResult<SetAccessRulesResult, Leaf | Container>> {

@ -5,6 +5,7 @@ export interface WaitingProcess<Args extends any[], Return> {
perform: (...args: Args) => Promise<Return>;
awaitingResolutions: ((returnValue: Return) => void)[];
awaitingRejections: ((err: any) => void)[];
after?: (result: Return) => void;
}
export const ANY_KEY = "any";
@ -25,6 +26,7 @@ export interface WaitingProcessOptions<Args extends any[], Return> {
currentlyProcessing: WaitingProcess<any[], any> | undefined,
args: Args,
) => WaitingProcess<any[], any> | undefined;
after?: (result: Return) => void;
}
/**
@ -77,6 +79,9 @@ export class RequestBatcher {
const returnValue = await processToTrigger.perform(
...processToTrigger.args,
);
if (processToTrigger.after) {
processToTrigger.after(returnValue);
}
processToTrigger.awaitingResolutions.forEach((callback) => {
callback(returnValue);
});
@ -120,6 +125,7 @@ export class RequestBatcher {
perform: options.perform,
awaitingResolutions: [resolve],
awaitingRejections: [reject],
after: options.after,
};
// HACK: Ugly cast
this.processQueue.push(

@ -103,6 +103,7 @@ async function testRequestLoads<ReturnVal>(
isReloading: boolean;
isDeleting: boolean;
isUpdating: boolean;
isDoingInitialFetch: boolean;
}>,
): Promise<ReturnVal> {
const allLoadingValues = {
@ -113,6 +114,7 @@ async function testRequestLoads<ReturnVal>(
isReloading: false,
isDeleting: false,
isUpdating: false,
isDoingInitialFetch: false,
...loadingValues,
};
const [returnVal] = await Promise.all([
@ -207,6 +209,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.type).toBe("dataReadSuccess");
expect(
@ -218,6 +221,7 @@ describe("SolidLdoDataset", () => {
).toBe(1);
expect(resource.isBinary()).toBe(false);
expect(resource.isDataResource()).toBe(true);
expect(resource.isPresent()).toBe(true);
});
it("Auto reads a resource", async () => {
@ -247,6 +251,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.type).toBe("containerReadSuccess");
expect(resource.children().length).toBe(2);
@ -257,6 +262,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.type).toBe("binaryReadSuccess");
expect(resource.isBinary()).toBe(true);
@ -268,6 +274,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.type).toBe("absentReadSuccess");
if (result.type !== "absentReadSuccess") return;
@ -280,6 +287,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.isError).toBe(true);
expect(result.type).toBe("serverError");
@ -291,6 +299,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.isError).toBe(true);
expect(result.type).toBe("unauthenticatedError");
@ -302,6 +311,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.isError).toBe(true);
expect(result.type).toBe("unexpectedHttpError");
@ -315,6 +325,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.isError).toBe(true);
if (!result.isError) return;
@ -335,6 +346,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.isError).toBe(true);
if (!result.isError) return;
@ -350,6 +362,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.isError).toBe(true);
if (!result.isError) return;
@ -368,6 +381,7 @@ describe("SolidLdoDataset", () => {
const result = await testRequestLoads(() => resource.read(), resource, {
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
});
expect(result.isError).toBe(true);
if (!result.isError) return;
@ -381,6 +395,8 @@ describe("SolidLdoDataset", () => {
const resource = solidLdoDataset.getResource(SAMPLE_DATA_URI);
expect(resource.isBinary()).toBe(undefined);
expect(resource.isDataResource()).toBe(undefined);
expect(resource.isUnfetched()).toBe(true);
expect(resource.isPresent()).toBe(undefined);
});
});
@ -396,6 +412,7 @@ describe("SolidLdoDataset", () => {
{
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
},
);
expect(result.type).toBe("containerReadSuccess");
@ -410,6 +427,7 @@ describe("SolidLdoDataset", () => {
{
isLoading: true,
isReading: true,
isDoingInitialFetch: true,
},
);
expect(result.type).toBe("dataReadSuccess");
@ -952,7 +970,54 @@ describe("SolidLdoDataset", () => {
namedNode(SAMPLE_DATA_URI),
),
),
).toBe(true);
expect(
solidLdoDataset.has(
createQuad(
namedNode("http://example.org/#green-goblin"),
namedNode("http://xmlns.com/foaf/0.1/name"),
literal("Green Goblin"),
namedNode(SAMPLE_DATA_URI),
),
),
).toBe(false);
});
it("applies only remove changes to the Pod", async () => {
const changes: DatasetChanges<Quad> = {
removed: createDataset([
createQuad(
namedNode("http://example.org/#green-goblin"),
namedNode("http://xmlns.com/foaf/0.1/name"),
literal("Green Goblin"),
namedNode(SAMPLE_DATA_URI),
),
]),
};
const result = await testRequestLoads(
() => solidLdoDataset.commitChangesToPod(changes),
solidLdoDataset.getResource(SAMPLE_DATA_URI),
{
isLoading: true,
isUpdating: true,
},
);
expect(result.type).toBe("aggregateSuccess");
const aggregateSuccess = result as AggregateSuccess<
ResourceSuccess<UpdateSuccess, Leaf>
>;
expect(aggregateSuccess.results.length).toBe(1);
expect(aggregateSuccess.results[0].type === "updateSuccess").toBe(true);
expect(
solidLdoDataset.has(
createQuad(
namedNode("http://example.org/#green-goblin"),
namedNode("http://xmlns.com/foaf/0.1/name"),
literal("Green Goblin"),
namedNode(SAMPLE_DATA_URI),
),
),
).toBe(false);
});
it("handles an HTTP error", async () => {
@ -1031,7 +1096,40 @@ describe("SolidLdoDataset", () => {
defaultGraph(),
),
),
);
).toBe(true);
});
it("batches data update changes", async () => {
const resource = solidLdoDataset.getResource(SAMPLE_DATA_URI);
const [, updateResult1, updateResult2] = await Promise.all([
resource.read(),
solidLdoDataset.commitChangesToPod({ removed: changes.removed }),
solidLdoDataset.commitChangesToPod({ added: changes.added }),
]);
expect(updateResult1.type).toBe("aggregateSuccess");
expect(updateResult2.type).toBe("aggregateSuccess");
expect(fetchMock).toHaveBeenCalledTimes(2);
expect(
solidLdoDataset.has(
createQuad(
namedNode("http://example.org/#green-goblin"),
namedNode("http://xmlns.com/foaf/0.1/name"),
literal("Norman Osborn"),
namedNode(SAMPLE_DATA_URI),
),
),
).toBe(true);
expect(
solidLdoDataset.has(
createQuad(
namedNode("http://example.org/#green-goblin"),
namedNode("http://xmlns.com/foaf/0.1/name"),
literal("Green Goblin"),
namedNode(SAMPLE_DATA_URI),
),
),
).toBe(false);
});
});
@ -1159,6 +1257,31 @@ describe("SolidLdoDataset", () => {
expect(result.isError).toBe(true);
expect(result.type).toBe("unexpectedResourceError");
});
it("batches the upload request while waiting on another request", async () => {
const resource = solidLdoDataset.getResource(SAMPLE2_DATA_URI);
const [, result1, result2] = await Promise.all([
resource.read(),
resource.createAndOverwrite(),
resource.createAndOverwrite(),
]);
expect(result1.type).toBe("createSuccess");
expect(result2.type).toBe("createSuccess");
expect(fetchMock).toHaveBeenCalledTimes(1);
});
it("batches the upload request while waiting on a similar request", async () => {
const resource = solidLdoDataset.getResource(SAMPLE2_DATA_URI);
const [result1, result2] = await Promise.all([
resource.createAndOverwrite(),
resource.createAndOverwrite(),
]);
expect(result1.type).toBe("createSuccess");
expect(result2.type).toBe("createSuccess");
expect(fetchMock).toHaveBeenCalledTimes(1);
});
});
describe("uploadIfAbsent", () => {

@ -11,9 +11,7 @@ import { mergeDatasetChanges } from "./mergeDatasetChanges";
*/
export class ProxyTransactionalDataset<InAndOutQuad extends BaseQuad = BaseQuad>
extends ExtendedDataset<InAndOutQuad>
implements
BulkEditableDataset<InAndOutQuad>,
TransactionalDataset<InAndOutQuad>
implements TransactionalDataset<InAndOutQuad>
{
/**
* The parent dataset that will be updated upon commit

@ -20,7 +20,7 @@ export interface BulkEditableDataset<InAndOutQuad extends BaseQuad = BaseQuad>
* A dataset that allows you to modify the dataset and
*/
export interface TransactionalDataset<InAndOutQuad extends BaseQuad = BaseQuad>
extends Dataset<InAndOutQuad, InAndOutQuad> {
extends BulkEditableDataset<InAndOutQuad> {
rollback(): void;
commit(): void;
getChanges(): DatasetChanges<InAndOutQuad>;

Loading…
Cancel
Save