Merge branch 'feat/orm-diffs' into refactor

refactor
Niko PLP 2 days ago
commit 26f6f3dc2a
  1. 1
      app/nextgraph/src/native-api.ts
  2. 2
      engine/net/src/orm.rs
  3. 42
      engine/verifier/src/orm/add_remove_triples.rs
  4. 327
      engine/verifier/src/orm/handle_backend_update.rs
  5. 217
      engine/verifier/src/orm/handle_frontend_update.rs
  6. 1
      engine/verifier/src/orm/initialize.rs
  7. 272
      engine/verifier/src/orm/process_changes.rs
  8. 14
      engine/verifier/src/orm/query.rs
  9. 49
      engine/verifier/src/orm/shape_validation.rs
  10. 11
      engine/verifier/src/orm/types.rs
  11. 2
      engine/verifier/src/request_processor.rs
  12. 90
      sdk/js/alien-deepsignals/src/deepSignal.ts
  13. 94
      sdk/js/alien-deepsignals/src/test/deepSignalOptions.test.ts
  14. 78
      sdk/js/alien-deepsignals/src/test/watchPatches.test.ts
  15. 5
      sdk/js/api-web/worker.js
  16. 6
      sdk/js/examples/multi-framework-signals/src/app/pages/index.astro
  17. 245
      sdk/js/examples/multi-framework-signals/src/frontends/react/HelloWorld.tsx
  18. 78
      sdk/js/examples/multi-framework-signals/src/frontends/svelte/HelloWorld.svelte
  19. 36
      sdk/js/examples/multi-framework-signals/src/frontends/utils/flattenObject.ts
  20. 24
      sdk/js/examples/multi-framework-signals/src/frontends/vue/HelloWorld.vue
  21. 37
      sdk/js/examples/multi-framework-signals/src/shapes/orm/basic.schema.ts
  22. 9
      sdk/js/examples/multi-framework-signals/src/shapes/orm/basic.shapeTypes.ts
  23. 22
      sdk/js/examples/multi-framework-signals/src/shapes/orm/basic.typings.ts
  24. 10
      sdk/js/examples/multi-framework-signals/src/shapes/orm/catShape.schema.ts
  25. 2
      sdk/js/examples/multi-framework-signals/src/shapes/orm/catShape.shapeTypes.ts
  26. 2
      sdk/js/examples/multi-framework-signals/src/shapes/orm/catShape.typings.ts
  27. 10
      sdk/js/examples/multi-framework-signals/src/shapes/orm/personShape.schema.ts
  28. 2
      sdk/js/examples/multi-framework-signals/src/shapes/orm/personShape.shapeTypes.ts
  29. 2
      sdk/js/examples/multi-framework-signals/src/shapes/orm/personShape.typings.ts
  30. 22
      sdk/js/examples/multi-framework-signals/src/shapes/orm/testShape.schema.ts
  31. 2
      sdk/js/examples/multi-framework-signals/src/shapes/orm/testShape.shapeTypes.ts
  32. 4
      sdk/js/examples/multi-framework-signals/src/shapes/orm/testShape.typings.ts
  33. 8
      sdk/js/examples/multi-framework-signals/src/shapes/shex/basic.shex
  34. 2
      sdk/js/examples/multi-framework-signals/src/shapes/shex/catShape.shex
  35. 2
      sdk/js/examples/multi-framework-signals/src/shapes/shex/personShape.shex
  36. 4
      sdk/js/examples/multi-framework-signals/src/shapes/shex/testShape.shex
  37. 18
      sdk/js/lib-wasm/src/lib.rs
  38. 542
      sdk/js/shex-orm/dist/ShexJTypes.d.ts
  39. 1
      sdk/js/shex-orm/dist/ShexJTypes.d.ts.map
  40. 1
      sdk/js/shex-orm/dist/ShexJTypes.js
  41. 8
      sdk/js/shex-orm/dist/build.d.ts
  42. 1
      sdk/js/shex-orm/dist/build.d.ts.map
  43. 62
      sdk/js/shex-orm/dist/build.js
  44. 3
      sdk/js/shex-orm/dist/cli.d.ts
  45. 1
      sdk/js/shex-orm/dist/cli.d.ts.map
  46. 15
      sdk/js/shex-orm/dist/cli.js
  47. 2
      sdk/js/shex-orm/dist/index.d.ts
  48. 1
      sdk/js/shex-orm/dist/index.d.ts.map
  49. 1
      sdk/js/shex-orm/dist/index.js
  50. 12
      sdk/js/shex-orm/dist/schema-converter/converter.d.ts
  51. 1
      sdk/js/shex-orm/dist/schema-converter/converter.d.ts.map
  52. 69
      sdk/js/shex-orm/dist/schema-converter/converter.js
  53. 8
      sdk/js/shex-orm/dist/schema-converter/templates/schema.ejs
  54. 14
      sdk/js/shex-orm/dist/schema-converter/templates/shapeTypes.ejs
  55. 14
      sdk/js/shex-orm/dist/schema-converter/templates/typings.ejs
  56. 348
      sdk/js/shex-orm/dist/schema-converter/transformers/ShexJSchemaTransformer.d.ts
  57. 1
      sdk/js/shex-orm/dist/schema-converter/transformers/ShexJSchemaTransformer.d.ts.map
  58. 208
      sdk/js/shex-orm/dist/schema-converter/transformers/ShexJSchemaTransformer.js
  59. 366
      sdk/js/shex-orm/dist/schema-converter/transformers/ShexJTypingTransformer.d.ts
  60. 1
      sdk/js/shex-orm/dist/schema-converter/transformers/ShexJTypingTransformer.d.ts.map
  61. 550
      sdk/js/shex-orm/dist/schema-converter/transformers/ShexJTypingTransformer.js
  62. 5
      sdk/js/shex-orm/dist/schema-converter/util/ShapeInterfaceDeclaration.d.ts
  63. 1
      sdk/js/shex-orm/dist/schema-converter/util/ShapeInterfaceDeclaration.d.ts.map
  64. 1
      sdk/js/shex-orm/dist/schema-converter/util/ShapeInterfaceDeclaration.js
  65. 8
      sdk/js/shex-orm/dist/schema-converter/util/annotateReadablePredicates.d.ts
  66. 1
      sdk/js/shex-orm/dist/schema-converter/util/annotateReadablePredicates.d.ts.map
  67. 129
      sdk/js/shex-orm/dist/schema-converter/util/annotateReadablePredicates.js
  68. 3
      sdk/js/shex-orm/dist/schema-converter/util/dedupeObjectTypeMembers.d.ts
  69. 1
      sdk/js/shex-orm/dist/schema-converter/util/dedupeObjectTypeMembers.d.ts.map
  70. 38
      sdk/js/shex-orm/dist/schema-converter/util/dedupeObjectTypeMembers.js
  71. 4
      sdk/js/shex-orm/dist/schema-converter/util/getRdfTypesForTripleConstraint.d.ts
  72. 1
      sdk/js/shex-orm/dist/schema-converter/util/getRdfTypesForTripleConstraint.d.ts.map
  73. 89
      sdk/js/shex-orm/dist/schema-converter/util/getRdfTypesForTripleConstraint.js
  74. 37
      sdk/js/shex-orm/dist/types.d.ts
  75. 1
      sdk/js/shex-orm/dist/types.d.ts.map
  76. 1
      sdk/js/shex-orm/dist/types.js
  77. 2
      sdk/js/shex-orm/dist/util/forAllShapes.d.ts
  78. 1
      sdk/js/shex-orm/dist/util/forAllShapes.d.ts.map
  79. 17
      sdk/js/shex-orm/dist/util/forAllShapes.js
  80. 26
      sdk/js/shex-orm/src/schema-converter/transformers/ShexJSchemaTransformer.ts
  81. 12
      sdk/js/signals/src/connector/applyDiff.test.ts
  82. 2
      sdk/js/signals/src/connector/applyDiff.ts
  83. 113
      sdk/js/signals/src/connector/ormConnectionHandler.ts
  84. 12
      sdk/js/signals/src/frontendAdapters/react/useShape.ts
  85. 113
      sdk/js/signals/src/frontendAdapters/vue/useDeepSignal.ts
  86. 147
      sdk/rust/src/tests/orm_apply_patches.rs
  87. 58
      sdk/rust/src/tests/orm_create_patches.rs

@ -18,6 +18,7 @@ const mapping = {
"wallet_gen_shuffle_for_pazzle_opening": ["pazzle_length"], "wallet_gen_shuffle_for_pazzle_opening": ["pazzle_length"],
"wallet_gen_shuffle_for_pin": [], "wallet_gen_shuffle_for_pin": [],
"wallet_open_with_pazzle": ["wallet","pazzle","pin"], "wallet_open_with_pazzle": ["wallet","pazzle","pin"],
"wallet_open_with_password": ["wallet","password"],
"wallet_open_with_mnemonic_words": ["wallet","mnemonic_words","pin"], "wallet_open_with_mnemonic_words": ["wallet","mnemonic_words","pin"],
"wallet_open_with_mnemonic": ["wallet","mnemonic","pin"], "wallet_open_with_mnemonic": ["wallet","mnemonic","pin"],
"wallet_was_opened": ["opened_wallet"], "wallet_was_opened": ["opened_wallet"],

@ -41,8 +41,10 @@ pub enum OrmPatchType {
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct OrmPatch { pub struct OrmPatch {
pub op: OrmPatchOp, pub op: OrmPatchOp,
#[serde(skip_serializing_if = "Option::is_none")]
pub valType: Option<OrmPatchType>, pub valType: Option<OrmPatchType>,
pub path: String, pub path: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub value: Option<Value>, // TODO: Improve type pub value: Option<Value>, // TODO: Improve type
} }

@ -44,7 +44,6 @@ pub fn add_remove_triples(
tracked_predicates: HashMap::new(), tracked_predicates: HashMap::new(),
parents: HashMap::new(), parents: HashMap::new(),
valid: OrmTrackedSubjectValidity::Pending, valid: OrmTrackedSubjectValidity::Pending,
prev_valid: OrmTrackedSubjectValidity::Pending,
subject_iri: subject_iri.to_string(), subject_iri: subject_iri.to_string(),
shape: shape.clone(), shape: shape.clone(),
})) }))
@ -208,47 +207,8 @@ pub fn add_remove_triples(
} else { } else {
panic!("tracked_predicate.current_literals must not be None."); panic!("tracked_predicate.current_literals must not be None.");
} }
} else if tracked_predicate
.schema
.dataTypes
.iter()
.any(|dt| dt.valType == OrmSchemaValType::shape)
{
// Remove parent from child and child from tracked children.
// If predicate is of type shape, register (parent -> child) links so that
// nested subjects can later be (lazily) fetched / validated.
let shapes_to_process: Vec<_> = tracked_predicate
.schema
.dataTypes
.iter()
.filter_map(|dt| {
if dt.valType == OrmSchemaValType::shape {
dt.shape.clone()
} else {
None
}
})
.collect();
if let BasicType::Str(obj_iri) = &val_removed {
// Remove link to children
tracked_predicate
.tracked_children
.retain(|ts| *obj_iri != ts.read().unwrap().subject_iri);
for shape_iri in shapes_to_process {
// Get or create object's tracked subject struct.
let child_shape = schema.get(&shape_iri).unwrap();
// Remove self from parent
get_or_create_tracked_subject(&obj_iri, child_shape, tracked_subjects)
.write()
.unwrap()
.parents
.remove(subject_iri);
}
}
} }
// Parent-child link removal is handled during cleanup since we need to keep them for creating patches.
} }
Ok(()) Ok(())
} }

@ -68,10 +68,29 @@ impl Verifier {
}) })
.collect(); .collect();
log_info!(
"[orm_backend_update] called with #adds, #removes: {}, {}",
triple_inserts.len(),
triple_removes.len()
);
log_info!(
"[orm_backend_update] Total subscriptions scopes: {}",
self.orm_subscriptions.len()
);
let mut scopes = vec![]; let mut scopes = vec![];
for (scope, subs) in self.orm_subscriptions.iter_mut() { for (scope, subs) in self.orm_subscriptions.iter_mut() {
// Remove old subscriptions // Remove old subscriptions
let initial_sub_count = subs.len();
subs.retain(|sub| !sub.sender.is_closed()); subs.retain(|sub| !sub.sender.is_closed());
let retained_sub_count = subs.len();
log_info!(
"[orm_backend_update] Scope {:?}: {} subs ({} retained after cleanup)",
scope,
initial_sub_count,
retained_sub_count
);
if !(scope.target == NuriTargetV0::UserSite if !(scope.target == NuriTargetV0::UserSite
|| scope || scope
@ -80,11 +99,22 @@ impl Verifier {
.map_or(false, |ol| overlaylink == *ol) .map_or(false, |ol| overlaylink == *ol)
|| scope.target == NuriTargetV0::Repo(repo_id)) || scope.target == NuriTargetV0::Repo(repo_id))
{ {
log_info!(
"[orm_backend_update] SKIPPING scope {:?} - does not match repo_id={:?} or overlay={:?}",
scope,
repo_id,
overlay_id
);
continue; continue;
} }
log_info!(
"[orm_backend_update] PROCESSING scope {:?} - matches criteria",
scope
);
// prepare to apply updates to tracked subjects and record the changes. // prepare to apply updates to tracked subjects and record the changes.
let root_shapes_and_tracked_subjects = subs let root_shapes_and_tracked_shapes = subs
.iter() .iter()
.map(|sub| { .map(|sub| {
( (
@ -98,33 +128,76 @@ impl Verifier {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
scopes.push((scope.clone(), root_shapes_and_tracked_subjects)); scopes.push((scope.clone(), root_shapes_and_tracked_shapes));
} }
log_debug!( log_debug!(
"[orm_backend_update], creating patch objects for #scopes {}", "[orm_backend_update], creating patch objects for #scopes {}",
scopes.len() scopes.len()
); );
if scopes.is_empty() {
log_info!("[orm_backend_update] NO SCOPES MATCHED - returning early without patches");
return;
}
for (scope, shapes_zip) in scopes { for (scope, shapes_zip) in scopes {
let mut orm_changes: OrmChanges = HashMap::new(); let mut orm_changes: OrmChanges = HashMap::new();
log_info!(
"[orm_backend_update] Processing scope {:?} with {} shape types",
scope,
shapes_zip.len()
);
// Apply the changes to tracked subjects. // Apply the changes to tracked subjects.
for (root_shape_arc, all_shapes) in shapes_zip { for (root_shape_arc, all_tracked_shapes) in shapes_zip {
let shape_iri = root_shape_arc.iri.clone(); let shape_iri = root_shape_arc.iri.clone();
log_info!(
"[orm_backend_update] Calling process_changes_for_shape_and_session for shape={}, session={}",
shape_iri,
session_id
);
let _ = self.process_changes_for_shape_and_session( let _ = self.process_changes_for_shape_and_session(
&scope, &scope,
&shape_iri, &shape_iri,
all_shapes, if all_tracked_shapes.len() > 0 {
all_tracked_shapes
} else {
// If all tracked subjects are empty, wee need to add the root shape manually.
vec![root_shape_arc]
},
session_id, session_id,
&triple_inserts, &triple_inserts,
&triple_removes, &triple_removes,
&mut orm_changes, &mut orm_changes,
false, false,
); );
log_info!(
"[orm_backend_update] After process_changes_for_shape_and_session: orm_changes has {} shapes",
orm_changes.len()
);
}
log_info!(
"[orm_backend_update] Total orm_changes for scope: {} shapes with changes",
orm_changes.len()
);
for (shape_iri, subject_changes) in &orm_changes {
log_info!(
"[orm_backend_update] Shape {}: {} subjects changed",
shape_iri,
subject_changes.len()
);
} }
let subs = self.orm_subscriptions.get(&scope).unwrap(); let subs = self.orm_subscriptions.get_mut(&scope).unwrap();
for sub in subs.iter() { log_info!(
"[orm_backend_update] Processing {} subscriptions for this scope",
subs.len()
);
for sub in subs.iter_mut() {
log_debug!( log_debug!(
"Applying changes to subscription with nuri {} and shape {}", "Applying changes to subscription with nuri {} and shape {}",
sub.nuri.repo(), sub.nuri.repo(),
@ -158,41 +231,86 @@ impl Verifier {
// Process changes for this subscription // Process changes for this subscription
// Iterate over all changes and create patches // Iterate over all changes and create patches
log_info!(
"[orm_backend_update] Iterating over {} shapes in orm_changes",
orm_changes.len()
);
for (shape_iri, subject_changes) in &orm_changes { for (shape_iri, subject_changes) in &orm_changes {
log_info!(
"[orm_backend_update] Processing shape {}: {} subject changes",
shape_iri,
subject_changes.len()
);
for (subject_iri, change) in subject_changes { for (subject_iri, change) in subject_changes {
log_debug!( log_debug!(
"Patch creating for subject change {}. #changed preds: {}", "Patch creating for subject change x shape {} x {}. #changed preds: {}",
subject_iri, subject_iri,
shape_iri,
change.predicates.len() change.predicates.len()
); );
// Get the tracked subject for this (subject, shape) pair // Get the tracked subject for this (subject, shape) pair
let tracked_subject = sub let Some(tracked_subject) = sub
.tracked_subjects .tracked_subjects
.get(subject_iri) .get(subject_iri)
.and_then(|shapes| shapes.get(shape_iri)) .and_then(|shapes| shapes.get(shape_iri))
.map(|ts| ts.read().unwrap()) .map(|ts| ts.read().unwrap())
.unwrap(); else {
// We might not be tracking this subject x shape combination. Then, there is nothing to do.
log_info!(
"[orm_backend_update] SKIPPING subject {} x shape {} - not tracked in this subscription",
subject_iri,
shape_iri
);
continue;
};
log_debug!(
" - Validity check: prev_valid={:?}, valid={:?}",
change.prev_valid,
tracked_subject.valid
);
// Now we have the tracked predicate (containing the shape) and the change. // Now we have the tracked predicate (containing the shape) and the change.
// Check validity changes // Check validity changes
if tracked_subject.prev_valid == OrmTrackedSubjectValidity::Invalid if change.prev_valid == OrmTrackedSubjectValidity::Invalid
&& tracked_subject.valid == OrmTrackedSubjectValidity::Invalid && tracked_subject.valid == OrmTrackedSubjectValidity::Invalid
{ {
// Is the subject invalid and was it before? There is nothing we need to inform about. // Is the subject invalid and was it before? There is nothing we need to inform about.
log_info!(
"[orm_backend_update] SKIPPING subject {} - was and still is Invalid",
subject_iri
);
continue; continue;
} else if tracked_subject.prev_valid == OrmTrackedSubjectValidity::Valid } else if change.prev_valid == OrmTrackedSubjectValidity::Valid
&& tracked_subject.valid == OrmTrackedSubjectValidity::Invalid && tracked_subject.valid != OrmTrackedSubjectValidity::Valid
|| tracked_subject.valid == OrmTrackedSubjectValidity::Untracked
{ {
log_info!(
"[orm_backend_update] Subject {} became invalid or untracked (prev={:?}, now={:?})",
subject_iri,
change.prev_valid,
tracked_subject.valid
);
// Has the subject become invalid or untracked? // Has the subject become invalid or untracked?
// Check if any parent is also being deleted - if so, skip this deletion patch
// because the parent deletion will implicitly delete the children
let has_parent_being_deleted =
tracked_subject.parents.values().any(|parent_arc| {
let parent_ts = parent_arc.read().unwrap();
parent_ts.valid == OrmTrackedSubjectValidity::ToDelete
});
log_info!(
"[orm_backend_update] has_parent_being_deleted={}",
has_parent_being_deleted
);
if !has_parent_being_deleted {
// We add a patch, deleting the object at its root. // We add a patch, deleting the object at its root.
let mut path: Vec<String>; // Start with an empty path - the subject IRI will be added in build_path_to_root_and_create_patches
if tracked_subject.parents.is_empty() { let mut path = vec![];
// If this is a root object, we need to add the object's id itself.
path = vec![tracked_subject.subject_iri.clone()];
} else {
path = vec![];
}
build_path_to_root_and_create_patches( build_path_to_root_and_create_patches(
&tracked_subject, &tracked_subject,
@ -202,8 +320,20 @@ impl Verifier {
(OrmPatchOp::remove, Some(OrmPatchType::object), None, None), (OrmPatchOp::remove, Some(OrmPatchType::object), None, None),
&mut patches, &mut patches,
&mut objects_to_create, &mut objects_to_create,
&change.prev_valid,
&orm_changes,
&tracked_subject.subject_iri,
); );
}
} else { } else {
log_info!(
"[orm_backend_update] Subject {} is valid or became valid (prev={:?}, now={:?}), processing {} predicate changes",
subject_iri,
change.prev_valid,
tracked_subject.valid,
change.predicates.len()
);
// The subject is valid or has become valid. // The subject is valid or has become valid.
// Process each predicate change // Process each predicate change
for (_pred_iri, pred_change) in &change.predicates { for (_pred_iri, pred_change) in &change.predicates {
@ -221,6 +351,12 @@ impl Verifier {
// Get the diff operations for this predicate change // Get the diff operations for this predicate change
let diff_ops = create_diff_ops_from_predicate_change(pred_change); let diff_ops = create_diff_ops_from_predicate_change(pred_change);
log_info!(
"[orm_backend_update] Created {} diff_ops for predicate {}",
diff_ops.len(),
_pred_iri
);
// For each diff operation, traverse up to the root to build the path // For each diff operation, traverse up to the root to build the path
for diff_op in diff_ops { for diff_op in diff_ops {
let mut path = vec![pred_name.clone()]; let mut path = vec![pred_name.clone()];
@ -234,6 +370,9 @@ impl Verifier {
diff_op, diff_op,
&mut patches, &mut patches,
&mut objects_to_create, &mut objects_to_create,
&change.prev_valid,
&orm_changes,
&tracked_subject.subject_iri,
); );
} }
} }
@ -241,12 +380,18 @@ impl Verifier {
} }
} }
log_info!(
"[orm_backend_update] Finished iterating shapes. Created {} patches, {} objects_to_create",
patches.len(),
objects_to_create.len()
);
// Create patches for objects that need to be created // Create patches for objects that need to be created
// These are patches with {op: add, valType: object, value: Null, path: ...} // These are patches with {op: add, valType: object, value: Null, path: ...}
// Sort by path length (shorter first) to ensure parent objects are created before children // Sort by path length (shorter first) to ensure parent objects are created before children
let mut sorted_objects: Vec<_> = objects_to_create.iter().collect(); let mut sorted_objects: Vec<_> = objects_to_create.iter().collect();
sorted_objects.sort_by_key(|(path_segments, _)| path_segments.len()); sorted_objects.sort_by_key(|(path_segments, _)| path_segments.len());
let mut object_create_patches = vec![];
for (path_segments, maybe_iri) in sorted_objects { for (path_segments, maybe_iri) in sorted_objects {
let escaped_path: Vec<String> = path_segments let escaped_path: Vec<String> = path_segments
.iter() .iter()
@ -254,8 +399,8 @@ impl Verifier {
.collect(); .collect();
let json_pointer = format!("/{}", escaped_path.join("/")); let json_pointer = format!("/{}", escaped_path.join("/"));
// Always create the object itself // Always create the object itself.
patches.push(OrmPatch { object_create_patches.push(OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
valType: Some(OrmPatchType::object), valType: Some(OrmPatchType::object),
path: json_pointer.clone(), path: json_pointer.clone(),
@ -264,7 +409,7 @@ impl Verifier {
// If this object has an IRI (it's a real subject), add the id field // If this object has an IRI (it's a real subject), add the id field
if let Some(iri) = maybe_iri { if let Some(iri) = maybe_iri {
patches.push(OrmPatch { object_create_patches.push(OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
valType: None, valType: None,
path: format!("{}/@id", json_pointer), path: format!("{}/@id", json_pointer),
@ -273,53 +418,73 @@ impl Verifier {
} }
} }
log_info!(
"[orm_backend_update] Created {} object_create_patches",
object_create_patches.len()
);
let total_patches = object_create_patches.len() + patches.len();
log_info!(
"[orm_backend_update] SENDING {} total patches to frontend (session={}, nuri={}, shape={})",
total_patches,
session_id,
sub.nuri.repo(),
sub.shape_type.shape
);
// Send response with patches. // Send response with patches.
let _ = sub let _ = sub
.sender .sender
.clone() .clone()
.send(AppResponse::V0(AppResponseV0::OrmUpdate(patches.to_vec()))) .send(AppResponse::V0(AppResponseV0::OrmUpdate(
[object_create_patches, patches].concat(),
)))
.await; .await;
log_info!("[orm_backend_update] Patches sent successfully");
// Cleanup (remove tracked subjects to be deleted).
Verifier::cleanup_tracked_subjects(sub);
} }
log_info!(
"[orm_backend_update] Finished processing all subscriptions for scope {:?}",
scope
);
} }
log_info!("[orm_backend_update] COMPLETE - processed all scopes");
} }
} }
/// Queue patches for a newly valid tracked subject. /// Queue patches for a newly valid tracked subject.
/// This handles creating object patches and id field patches for subjects that have become valid. /// This handles creating object patches and id field patches for subjects that have become valid.
fn queue_patches_for_newly_valid_subject( fn queue_objects_to_create(
tracked_subject: &OrmTrackedSubject, current_ts: &OrmTrackedSubject,
tracked_subjects: &HashMap<String, HashMap<String, Arc<RwLock<OrmTrackedSubject>>>>, tracked_subjects: &HashMap<String, HashMap<String, Arc<RwLock<OrmTrackedSubject>>>>,
root_shape: &String, root_shape: &String,
path: &[String], path: &[String],
patches: &mut Vec<OrmPatch>,
objects_to_create: &mut HashSet<(Vec<String>, Option<SubjectIri>)>, objects_to_create: &mut HashSet<(Vec<String>, Option<SubjectIri>)>,
orm_changes: &OrmChanges,
child_iri: &String,
) { ) {
// Check if we're at a root subject or need to traverse to parents // Check if we're at a root subject or need to traverse to parents
if tracked_subject.parents.is_empty() || tracked_subject.shape.iri == *root_shape { if current_ts.parents.is_empty() || current_ts.shape.iri == *root_shape {
// Register object for creation. // We are at the root. Insert without the last element (which is the property name).
// Path to object consists of this subject's iri and the path except for the last element. objects_to_create.insert((path[..path.len() - 1].to_vec(), Some(child_iri.clone())));
let mut path_to_subject = vec![tracked_subject.subject_iri.clone()];
if path.len() > 1 {
path_to_subject.extend_from_slice(&path[..path.len() - 1]);
}
// log_debug!("Queuing object creation for path: {:?}", path_to_subject);
// Always create the object itself with its IRI
objects_to_create.insert((
path_to_subject.clone(),
Some(tracked_subject.subject_iri.clone()),
));
} else { } else {
// Not at root: traverse to parents and create object patches along the way // Not at root: traverse to parents and create object patches along the way
for (_parent_iri, parent_tracked_subject) in tracked_subject.parents.iter() { for (_parent_iri, parent_tracked_subject) in current_ts.parents.iter() {
let parent_ts = parent_tracked_subject.read().unwrap(); let parent_ts = parent_tracked_subject.read().unwrap();
if let Some(new_path) = build_path_segment_for_parent(tracked_subject, &parent_ts, path) if let Some(new_path) = build_path_segment_for_parent(current_ts, &parent_ts, path) {
{
// Check if the parent's predicate is multi-valued and if no siblings were previously valid // Check if the parent's predicate is multi-valued and if no siblings were previously valid
let should_create_parent_predicate_object = let should_create_parent_predicate_object =
check_should_create_parent_predicate_object(tracked_subject, &parent_ts); check_should_create_parent_predicate_object(
current_ts,
&parent_ts,
orm_changes,
);
if should_create_parent_predicate_object { if should_create_parent_predicate_object {
// Need to create an intermediate object for the multi-valued predicate // Need to create an intermediate object for the multi-valued predicate
@ -331,18 +496,18 @@ fn queue_patches_for_newly_valid_subject(
} }
// Recurse to the parent first // Recurse to the parent first
queue_patches_for_newly_valid_subject( queue_objects_to_create(
&parent_ts, &parent_ts,
tracked_subjects, tracked_subjects,
root_shape, root_shape,
&new_path, &new_path,
patches,
objects_to_create, objects_to_create,
orm_changes,
child_iri,
); );
// Register this object for creation with its IRI // Register this object for creation with its IRI
objects_to_create objects_to_create.insert((new_path.clone(), Some(current_ts.subject_iri.clone())));
.insert((new_path.clone(), Some(tracked_subject.subject_iri.clone())));
} }
} }
} }
@ -353,6 +518,7 @@ fn queue_patches_for_newly_valid_subject(
fn check_should_create_parent_predicate_object( fn check_should_create_parent_predicate_object(
tracked_subject: &OrmTrackedSubject, tracked_subject: &OrmTrackedSubject,
parent_ts: &OrmTrackedSubject, parent_ts: &OrmTrackedSubject,
orm_changes: &OrmChanges,
) -> bool { ) -> bool {
// Find the predicate schema linking parent to this subject // Find the predicate schema linking parent to this subject
for pred_arc in &parent_ts.shape.predicates { for pred_arc in &parent_ts.shape.predicates {
@ -369,11 +535,22 @@ fn check_should_create_parent_predicate_object(
let is_multi = pred_arc.maxCardinality > 1 || pred_arc.maxCardinality == -1; let is_multi = pred_arc.maxCardinality > 1 || pred_arc.maxCardinality == -1;
if is_multi { if is_multi {
// Check if any siblings were previously valid // Check if any siblings were previously valid.
// If not, the intermediate object does not exist yet.
let any_sibling_was_valid = tp.tracked_children.iter().any(|child| { let any_sibling_was_valid = tp.tracked_children.iter().any(|child| {
let child_read = child.read().unwrap(); let child_read = child.read().unwrap();
child_read.subject_iri != tracked_subject.subject_iri if child_read.subject_iri == tracked_subject.subject_iri {
&& child_read.prev_valid == OrmTrackedSubjectValidity::Valid return false;
}
// Look up the prev_valid from orm_changes
let prev_valid = orm_changes
.get(&child_read.shape.iri)
.and_then(|subjects| subjects.get(&child_read.subject_iri))
.map(|change| &change.prev_valid)
.unwrap_or(&OrmTrackedSubjectValidity::Valid);
*prev_valid == OrmTrackedSubjectValidity::Valid
}); });
return !any_sibling_was_valid; return !any_sibling_was_valid;
@ -442,14 +619,21 @@ fn build_path_to_root_and_create_patches(
), ),
patches: &mut Vec<OrmPatch>, patches: &mut Vec<OrmPatch>,
objects_to_create: &mut HashSet<(Vec<String>, Option<SubjectIri>)>, objects_to_create: &mut HashSet<(Vec<String>, Option<SubjectIri>)>,
prev_valid: &OrmTrackedSubjectValidity,
orm_changes: &OrmChanges,
child_iri: &String,
) { ) {
log_debug!( log_debug!(
" - build path, ts: {}, path {:?}", " - build path, ts: {}, path {:?}, #parents: {}, shape: {}",
tracked_subject.subject_iri, tracked_subject.subject_iri,
path path,
tracked_subject.parents.len(),
tracked_subject.shape.iri
); );
// If the tracked subject is not valid, we don't create patches for it // If the tracked subject is not valid, we don't create patches for it
if tracked_subject.valid != OrmTrackedSubjectValidity::Valid { // EXCEPT when we're removing the object itself (indicated by op == remove and valType == object)
let is_delete_op = diff_op.0 == OrmPatchOp::remove && diff_op.1 == Some(OrmPatchType::object);
if tracked_subject.valid != OrmTrackedSubjectValidity::Valid && !is_delete_op {
return; return;
} }
@ -457,12 +641,19 @@ fn build_path_to_root_and_create_patches(
if tracked_subject.parents.is_empty() || tracked_subject.shape.iri == *root_shape { if tracked_subject.parents.is_empty() || tracked_subject.shape.iri == *root_shape {
// Build the final JSON Pointer path // Build the final JSON Pointer path
let escaped_path: Vec<String> = path.iter().map(|seg| escape_json_pointer(seg)).collect(); let escaped_path: Vec<String> = path.iter().map(|seg| escape_json_pointer(seg)).collect();
// Always add the root subject to the path.
let json_pointer = format!( // Create the JSON pointer path
let json_pointer = if escaped_path.is_empty() {
// For root object operations (no path elements), just use the subject IRI
format!("/{}", escape_json_pointer(&tracked_subject.subject_iri))
} else {
// For nested operations, include both subject and path
format!(
"/{}/{}", "/{}/{}",
escape_json_pointer(&tracked_subject.subject_iri), escape_json_pointer(&tracked_subject.subject_iri),
escaped_path.join("/") escaped_path.join("/")
); )
};
// Create the patch for the actual value change // Create the patch for the actual value change
patches.push(OrmPatch { patches.push(OrmPatch {
@ -473,16 +664,17 @@ fn build_path_to_root_and_create_patches(
}); });
// If the subject is newly valid, now we have the full path to queue its creation. // If the subject is newly valid, now we have the full path to queue its creation.
if tracked_subject.prev_valid != OrmTrackedSubjectValidity::Valid { if *prev_valid != OrmTrackedSubjectValidity::Valid {
let mut final_path = vec![tracked_subject.subject_iri.clone()]; let mut final_path = vec![tracked_subject.subject_iri.clone()];
final_path.extend_from_slice(path); final_path.extend_from_slice(path);
queue_patches_for_newly_valid_subject( queue_objects_to_create(
tracked_subject, tracked_subject,
tracked_subjects, tracked_subjects,
root_shape, root_shape,
&final_path, &final_path,
patches,
objects_to_create, objects_to_create,
orm_changes,
child_iri,
); );
} }
@ -505,6 +697,15 @@ fn build_path_to_root_and_create_patches(
diff_op.clone(), diff_op.clone(),
patches, patches,
objects_to_create, objects_to_create,
prev_valid,
orm_changes,
child_iri,
);
} else {
log_debug!(
" - build_path_segment_for_parent returned None for parent: {}, child: {}",
parent_ts.subject_iri,
tracked_subject.subject_iri
); );
} }
} }

@ -24,10 +24,7 @@ use crate::types::GraphQuadsPatch;
use crate::verifier::*; use crate::verifier::*;
impl Verifier { impl Verifier {
/// After creating new objects (without an id) in JS-land, ///
/// we send the generated id for those back.
/// If something went wrong (revert_inserts / revert_removes not empty),
/// we send a JSON patch back to revert the made changes.
pub(crate) async fn orm_update_self( pub(crate) async fn orm_update_self(
&mut self, &mut self,
scope: &NuriV0, scope: &NuriV0,
@ -40,15 +37,18 @@ impl Verifier {
let (mut sender, _orm_subscription) = let (mut sender, _orm_subscription) =
self.get_first_orm_subscription_sender_for(scope, Some(&shape_iri), Some(&session_id))?; self.get_first_orm_subscription_sender_for(scope, Some(&shape_iri), Some(&session_id))?;
log_info!("[orm_update_self] got subscription");
// Revert changes, if there. // Revert changes, if there.
if revert_inserts.len() > 0 || revert_removes.len() > 0 { if revert_inserts.len() > 0 || revert_removes.len() > 0 {
let revert_changes = GraphQuadsPatch { let revert_changes = GraphQuadsPatch {
inserts: revert_removes, inserts: revert_removes,
removes: revert_inserts, removes: revert_inserts,
}; };
log_info!("[orm_update_self] Reverting triples, calling orm_backend_update. TODO");
// TODO: Call with correct params. // TODO
// self.orm_backend_update(session_id, scope, "", revert_changes) // self.orm_backend_update(session_id, scope, "", revert_changes);
log_info!("[orm_update_self] Triples reverted.");
} }
Ok(()) Ok(())
@ -63,7 +63,7 @@ impl Verifier {
diff: OrmPatches, diff: OrmPatches,
) -> Result<(), String> { ) -> Result<(), String> {
log_info!( log_info!(
"frontend_update_orm session={} shape={} diff={:?}", "[orm_frontend_update] session={} shape={} diff={:?}",
session_id, session_id,
shape_iri, shape_iri,
diff diff
@ -74,12 +74,17 @@ impl Verifier {
self.get_first_orm_subscription_for(scope, Some(&shape_iri), Some(&session_id)); self.get_first_orm_subscription_for(scope, Some(&shape_iri), Some(&session_id));
let doc_nuri = orm_subscription.nuri.clone(); let doc_nuri = orm_subscription.nuri.clone();
log_info!("[orm_frontend_update] got subscription");
let sparql_update = create_sparql_update_query_for_diff(orm_subscription, diff); let sparql_update = create_sparql_update_query_for_diff(orm_subscription, diff);
log_info!(
"[orm_frontend_update] created sparql_update query:\n{}",
sparql_update
);
(doc_nuri, sparql_update) (doc_nuri, sparql_update)
}; };
log_debug!("Created SPARQL query for patches:\n{}", sparql_update);
match self match self
.process_sparql_update( .process_sparql_update(
&doc_nuri, &doc_nuri,
@ -90,8 +95,17 @@ impl Verifier {
) )
.await .await
{ {
Err(e) => Err(e), Err(e) => {
log_info!("[orm_frontend_update] query failed");
Err(e)
}
Ok((_, revert_inserts, revert_removes, skolemnized_blank_nodes)) => { Ok((_, revert_inserts, revert_removes, skolemnized_blank_nodes)) => {
log_info!(
"[orm_frontend_update] query successful. Reverts? {}",
revert_inserts.len()
);
if !revert_inserts.is_empty() if !revert_inserts.is_empty()
|| !revert_removes.is_empty() || !revert_removes.is_empty()
|| !skolemnized_blank_nodes.is_empty() || !skolemnized_blank_nodes.is_empty()
@ -117,6 +131,11 @@ fn create_sparql_update_query_for_diff(
orm_subscription: &OrmSubscription, orm_subscription: &OrmSubscription,
diff: OrmPatches, diff: OrmPatches,
) -> String { ) -> String {
log_info!(
"[create_sparql_update_query_for_diff] Starting with {} patches",
diff.len()
);
// First sort patches. // First sort patches.
// - Process delete patches first. // - Process delete patches first.
// - Process object creation add operations before rest, to ensure potential blank nodes are created. // - Process object creation add operations before rest, to ensure potential blank nodes are created.
@ -124,17 +143,27 @@ fn create_sparql_update_query_for_diff(
.iter() .iter()
.filter(|patch| patch.op == OrmPatchOp::remove) .filter(|patch| patch.op == OrmPatchOp::remove)
.collect(); .collect();
let add_object_patches: Vec<_> = diff log_info!(
.iter() "[create_sparql_update_query_for_diff] Found {} delete patches",
.filter(|patch| { delete_patches.len()
patch.op == OrmPatchOp::add );
&& match &patch.valType {
Some(vt) => *vt == OrmPatchType::object, // let add_object_patches: Vec<_> = diff
_ => false, // .iter()
} // .filter(|patch| {
}) // patch.op == OrmPatchOp::add
.collect(); // && match &patch.valType {
let add_literal_patches: Vec<_> = diff // Some(vt) => *vt == OrmPatchType::object,
// _ => false,
// }
// })
// .collect();
// log_info!(
// "[create_sparql_update_query_for_diff] Found {} add object patches",
// add_object_patches.len()
// );
let add_primitive_patches: Vec<_> = diff
.iter() .iter()
.filter(|patch| { .filter(|patch| {
patch.op == OrmPatchOp::add patch.op == OrmPatchOp::add
@ -144,26 +173,37 @@ fn create_sparql_update_query_for_diff(
} }
}) })
.collect(); .collect();
log_info!(
"[create_sparql_update_query_for_diff] Found {} add primitive patches",
add_primitive_patches.len()
);
// For each diff op, we create a separate INSERT or DELETE block. // For each diff op, we create a separate INSERT or DELETE block.
let mut sparql_sub_queries: Vec<String> = vec![]; let mut sparql_sub_queries: Vec<String> = vec![];
// Create delete statements. // Create delete statements.
// //
for del_patch in delete_patches.iter() { for (idx, del_patch) in delete_patches.iter().enumerate() {
log_info!(
"[create_sparql_update_query_for_diff] Processing delete patch {}/{}: path={}",
idx + 1,
delete_patches.len(),
del_patch.path
);
let mut var_counter: i32 = 0; let mut var_counter: i32 = 0;
let (where_statements, target, _pred_schema) = let (where_statements, target, _pred_schema) =
create_where_statements_for_patch(&del_patch, &mut var_counter, &orm_subscription); create_where_statements_for_patch(&del_patch, &mut var_counter, &orm_subscription);
let (subject_var, target_predicate, target_object) = target; let (subject_var, target_predicate, target_object) = target;
log_info!("[create_sparql_update_query_for_diff] Delete patch where_statements: {:?}, subject_var={}, target_predicate={}, target_object={:?}",
where_statements, subject_var, target_predicate, target_object);
let delete_statement; let delete_statement;
if let Some(target_object) = target_object { if let Some(target_object) = target_object {
// Delete the link to exactly one object (IRI referenced in path, i.e. target_object) // Delete the link to exactly one object (IRI referenced in path, i.e. target_object)
delete_statement = format!( delete_statement = format!(" {} {} {} .", subject_var, target_predicate, target_object)
" {} <{}> <{}> .",
subject_var, target_predicate, target_object
)
} else { } else {
// Delete object or literal referenced by property name. // Delete object or literal referenced by property name.
let delete_val = match &del_patch.value { let delete_val = match &del_patch.value {
@ -175,7 +215,7 @@ fn create_sparql_update_query_for_diff(
// Delete the specific values only. // Delete the specific values only.
Some(val) => json_to_sparql_val(&val), // Can be one or more (joined with ", "). Some(val) => json_to_sparql_val(&val), // Can be one or more (joined with ", ").
}; };
delete_statement = format!(" {} <{}> {} .", subject_var, target_predicate, delete_val); delete_statement = format!(" {} {} {} .", subject_var, target_predicate, delete_val);
} }
sparql_sub_queries.push(format!( sparql_sub_queries.push(format!(
@ -183,20 +223,22 @@ fn create_sparql_update_query_for_diff(
delete_statement, delete_statement,
where_statements.join(" .\n ") where_statements.join(" .\n ")
)); ));
log_info!(
"[create_sparql_update_query_for_diff] Added delete query #{}",
sparql_sub_queries.len()
);
} }
// Process add object patches (might need blank nodes) // Process primitive add patches
// //
for _add_obj_patch in add_object_patches { for (idx, add_patch) in add_primitive_patches.iter().enumerate() {
// Creating objects without an id field is only supported in one circumstance: log_info!(
// An object is added to a property which has a max cardinality of one, e.g. `painting.artist`. "[create_sparql_update_query_for_diff] Processing add primitive patch {}/{}: path={}",
// In that case, we create a blank node. idx + 1,
// TODO: We need to set up a list of created blank nodes and where they belong to. add_primitive_patches.len(),
} add_patch.path
);
// Process literal add patches
//
for add_patch in add_literal_patches {
let mut var_counter: i32 = 0; let mut var_counter: i32 = 0;
// Create WHERE statements from path. // Create WHERE statements from path.
@ -204,8 +246,12 @@ fn create_sparql_update_query_for_diff(
create_where_statements_for_patch(&add_patch, &mut var_counter, &orm_subscription); create_where_statements_for_patch(&add_patch, &mut var_counter, &orm_subscription);
let (subject_var, target_predicate, target_object) = target; let (subject_var, target_predicate, target_object) = target;
log_info!("[create_sparql_update_query_for_diff] Add patch where_statements: {:?}, subject_var={}, target_predicate={}, target_object={:?}",
where_statements, subject_var, target_predicate, target_object);
if let Some(_target_object) = target_object { if let Some(_target_object) = target_object {
// Reference to exactly one object found. This is invalid when inserting literals. // Reference to exactly one object found. This is invalid when inserting literals.
log_info!("[create_sparql_update_query_for_diff] SKIPPING: target_object found for literal add (invalid)");
// TODO: Return error? // TODO: Return error?
continue; continue;
} else { } else {
@ -215,6 +261,7 @@ fn create_sparql_update_query_for_diff(
Some(val) => json_to_sparql_val(&val), // Can be one or more (joined with ", "). Some(val) => json_to_sparql_val(&val), // Can be one or more (joined with ", ").
None => { None => {
// A value must be set. This patch is invalid. // A value must be set. This patch is invalid.
log_info!("[create_sparql_update_query_for_diff] SKIPPING: No value in add patch (invalid)");
// TODO: Return error? // TODO: Return error?
continue; continue;
} }
@ -225,8 +272,9 @@ fn create_sparql_update_query_for_diff(
// If the schema only has max one value, // If the schema only has max one value,
// then `add` can also overwrite values, so we need to delete the previous one // then `add` can also overwrite values, so we need to delete the previous one
if !pred_schema.unwrap().is_multi() { if !pred_schema.unwrap().is_multi() {
log_info!("[create_sparql_update_query_for_diff] Single-value predicate, adding DELETE before INSERT");
let remove_statement = let remove_statement =
format!(" {} <{}> ?o{}", subject_var, target_predicate, var_counter); format!(" {} {} ?o{}", subject_var, target_predicate, var_counter);
let mut wheres = where_statements.clone(); let mut wheres = where_statements.clone();
wheres.push(remove_statement.clone()); wheres.push(remove_statement.clone());
@ -236,18 +284,24 @@ fn create_sparql_update_query_for_diff(
remove_statement, remove_statement,
wheres.join(" .\n ") wheres.join(" .\n ")
)); ));
log_info!("[create_sparql_update_query_for_diff] Added delete query.");
// var_counter += 1; // Not necessary because not used afterwards. // var_counter += 1; // Not necessary because not used afterwards.
} }
// The actual INSERT. // The actual INSERT.
let add_statement = format!(" {} <{}> {} .", subject_var, target_predicate, add_val); let add_statement = format!(" {} {} {} .", subject_var, target_predicate, add_val);
sparql_sub_queries.push(format!( sparql_sub_queries.push(format!(
"INSERT {{\n{}\n}} WHERE {{\n {}\n}}", "INSERT {{\n{}\n}} WHERE {{\n {}\n}}",
add_statement, add_statement,
where_statements.join(". \n ") where_statements.join(". \n ")
)); ));
log_info!("[create_sparql_update_query_for_diff] Added insert query.");
} }
} }
log_info!(
"[create_sparql_update_query_for_diff] Finished. Generated {} sub-queries",
sparql_sub_queries.len()
);
return sparql_sub_queries.join(";\n"); return sparql_sub_queries.join(";\n");
} }
@ -294,6 +348,12 @@ fn create_where_statements_for_patch(
(String, String, Option<String>), (String, String, Option<String>),
Option<Arc<OrmSchemaPredicate>>, Option<Arc<OrmSchemaPredicate>>,
) { ) {
log_info!(
"[create_where_statements_for_patch] Starting. patch.path={}, patch.op={:?}",
patch.path,
patch.op
);
let mut body_statements: Vec<String> = vec![]; let mut body_statements: Vec<String> = vec![];
let mut where_statements: Vec<String> = vec![]; let mut where_statements: Vec<String> = vec![];
@ -303,9 +363,20 @@ fn create_where_statements_for_patch(
.map(|s| decode_json_pointer(&s.to_string())) .map(|s| decode_json_pointer(&s.to_string()))
.collect(); .collect();
log_info!(
"[create_where_statements_for_patch] Decoded path into {} segments: {:?}",
path.len(),
path
);
path.remove(0);
// Handle special case: The whole object is deleted. // Handle special case: The whole object is deleted.
if path.len() == 1 { if path.len() == 1 {
let root_iri = &path[0]; let root_iri = &path[0];
log_info!(
"[create_where_statements_for_patch] Special case: whole object deletion for root_iri={}",
root_iri
);
body_statements.push(format!("<{}> ?p ?o", root_iri)); body_statements.push(format!("<{}> ?p ?o", root_iri));
where_statements.push(format!("<{}> ?p ?o", root_iri)); where_statements.push(format!("<{}> ?p ?o", root_iri));
return ( return (
@ -315,27 +386,58 @@ fn create_where_statements_for_patch(
); );
} }
log_info!(
"[create_where_statements_for_patch] Getting root schema for shape={}",
orm_subscription.shape_type.shape
);
let subj_schema: &Arc<OrmSchemaShape> = orm_subscription let subj_schema: &Arc<OrmSchemaShape> = orm_subscription
.shape_type .shape_type
.schema .schema
.get(&orm_subscription.shape_type.shape) .get(&orm_subscription.shape_type.shape)
.unwrap(); .unwrap();
log_info!("[create_where_statements_for_patch] Root schema found");
let mut current_subj_schema: Arc<OrmSchemaShape> = subj_schema.clone(); let mut current_subj_schema: Arc<OrmSchemaShape> = subj_schema.clone();
// The root IRI might change, if the parent path segment was an IRI. // The root IRI might change, if the parent path segment was an IRI.
let root_iri = path.remove(0); let root_iri = path.remove(0);
let mut subject_ref = format!("<{}>", root_iri); let mut subject_ref = format!("<{}>", root_iri);
log_info!(
"[create_where_statements_for_patch] Starting traversal from root_iri={}, remaining path segments={}",
root_iri,
path.len()
);
while path.len() > 0 { while path.len() > 0 {
let pred_name = path.remove(0); let pred_name = path.remove(0);
log_info!(
"[create_where_statements_for_patch] Processing path segment: pred_name={}, remaining={}",
pred_name,
path.len()
);
log_info!(
"[create_where_statements_for_patch] Looking up predicate schema for name={}",
pred_name
);
let pred_schema = find_pred_schema_by_name(&pred_name, &current_subj_schema); let pred_schema = find_pred_schema_by_name(&pred_name, &current_subj_schema);
log_info!(
"[create_where_statements_for_patch] Found predicate schema: iri={}, is_object={}, is_multi={}",
pred_schema.iri,
pred_schema.is_object(),
pred_schema.is_multi()
);
// Case: We arrived at a leaf value. // Case: We arrived at a leaf value.
if path.len() == 0 { if path.len() == 0 {
log_info!(
"[create_where_statements_for_patch] Reached leaf value. Returning target: subject_ref={}, predicate={}",
subject_ref,
pred_schema.iri
);
return ( return (
where_statements, where_statements,
(subject_ref, pred_schema.iri.clone(), None), (subject_ref, format!("<{}>", pred_schema.iri.clone()), None),
Some(pred_schema), Some(pred_schema),
); );
} }
@ -346,6 +448,12 @@ fn create_where_statements_for_patch(
"{} <{}> ?o{}", "{} <{}> ?o{}",
subject_ref, pred_schema.iri, var_counter, subject_ref, pred_schema.iri, var_counter,
)); ));
log_info!(
"[create_where_statements_for_patch] Added where statement for nested object: {} <{}> ?o{}",
subject_ref,
pred_schema.iri,
var_counter
);
// Update the subject_ref for traversal (e.g. <bob> <hasCat> ?o1 . ?o1 <type> Cat); // Update the subject_ref for traversal (e.g. <bob> <hasCat> ?o1 . ?o1 <type> Cat);
subject_ref = format!("?o{}", var_counter); subject_ref = format!("?o{}", var_counter);
@ -358,31 +466,58 @@ fn create_where_statements_for_patch(
); );
} }
if pred_schema.is_multi() { if pred_schema.is_multi() {
log_info!("[create_where_statements_for_patch] Predicate is multi-valued, expecting object IRI in path");
let object_iri = path.remove(0); let object_iri = path.remove(0);
log_info!(
"[create_where_statements_for_patch] Got object_iri={}, remaining path={}",
object_iri,
path.len()
);
// Path ends on an object IRI, which we return here as well. // Path ends on an object IRI, which we return here as well.
if path.len() == 0 { if path.len() == 0 {
log_info!(
"[create_where_statements_for_patch] Path ends on object IRI. Returning target with object={}",
object_iri
);
return ( return (
where_statements, where_statements,
(subject_ref, pred_schema.iri.clone(), Some(object_iri)), (
subject_ref,
format!("<{}>", pred_schema.iri.clone()),
Some(format!("<{}>", object_iri)),
),
Some(pred_schema), Some(pred_schema),
); );
} }
log_info!(
"[create_where_statements_for_patch] Getting child schema for object_iri={}",
object_iri
);
current_subj_schema = current_subj_schema =
get_first_child_schema(Some(&object_iri), &pred_schema, &orm_subscription); get_first_child_schema(Some(&object_iri), &pred_schema, &orm_subscription);
log_info!("[create_where_statements_for_patch] Child schema found");
// Since we have new IRI that we can use as root, we replace the current one with it. // Since we have new IRI that we can use as root, we replace the current one with it.
subject_ref = format!("<{object_iri}>"); subject_ref = format!("<{object_iri}>");
// And can clear all, now unnecessary where statements. // And can clear all, now unnecessary where statements.
where_statements.clear(); where_statements.clear();
log_info!(
"[create_where_statements_for_patch] Reset subject_ref to <{}> and cleared where statements",
object_iri
);
} else { } else {
// Set to child subject schema. // Set to child subject schema.
// TODO: Actually, we should get the tracked subject and check for the correct shape there. // TODO: Actually, we should get the tracked subject and check for the correct shape there.
// As long as there is only one allowed shape or the first one is valid, this is fine. // As long as there is only one allowed shape or the first one is valid, this is fine.
log_info!("[create_where_statements_for_patch] Predicate is single-valued, getting child schema");
current_subj_schema = get_first_child_schema(None, &pred_schema, &orm_subscription); current_subj_schema = get_first_child_schema(None, &pred_schema, &orm_subscription);
log_info!("[create_where_statements_for_patch] Child schema found");
} }
} }
// Can't happen. // Can't happen.
log_err!("[create_where_statements_for_patch] PANIC: Reached end of function unexpectedly (should be impossible)");
panic!(); panic!();
} }

@ -62,7 +62,6 @@ impl Verifier {
.push(orm_subscription); .push(orm_subscription);
let orm_objects = self.create_orm_object_for_shape(nuri, session_id, &shape_type)?; let orm_objects = self.create_orm_object_for_shape(nuri, session_id, &shape_type)?;
// log_debug!("create_orm_object_for_shape return {:?}", orm_objects);
let _ = tx let _ = tx
.send(AppResponse::V0(AppResponseV0::OrmInitial(orm_objects))) .send(AppResponse::V0(AppResponseV0::OrmInitial(orm_objects)))

@ -96,18 +96,39 @@ impl Verifier {
orm_changes: &mut OrmChanges, orm_changes: &mut OrmChanges,
data_already_fetched: bool, data_already_fetched: bool,
) -> Result<(), NgError> { ) -> Result<(), NgError> {
log_info!(
"[process_changes_for_shape_and_session] Starting processing for nuri, root_shape: {}, session: {}, {} shapes, {} triples added, {} triples removed, data_already_fetched: {}",
root_shape_iri,
session_id,
shapes.len(),
triples_added.len(),
triples_removed.len(),
data_already_fetched
);
// First in, last out stack to keep track of objects to validate (nested objects first). Strings are object IRIs. // First in, last out stack to keep track of objects to validate (nested objects first). Strings are object IRIs.
let mut shape_validation_stack: Vec<(Arc<OrmSchemaShape>, Vec<String>)> = vec![]; let mut shape_validation_stack: Vec<(Arc<OrmSchemaShape>, Vec<String>)> = vec![];
// Track (shape_iri, subject_iri) pairs currently being validated to prevent cycles and double evaluation. // Track (shape_iri, subject_iri) pairs currently being validated to prevent cycles and double evaluation.
let mut currently_validating: HashSet<(String, String)> = HashSet::new(); let mut currently_validating: HashSet<(String, String)> = HashSet::new();
// Add root shape for first validation run. // Add root shape for first validation run.
for shape in shapes { for shape in shapes {
log_info!(
"[process_changes_for_shape_and_session] Adding root shape to validation stack: {}",
shape.iri
);
shape_validation_stack.push((shape, vec![])); shape_validation_stack.push((shape, vec![]));
} }
// Process queue of shapes and subjects to validate. // Process queue of shapes and subjects to validate.
// For a given shape, we evaluate every subject against that shape. // For a given shape, we evaluate every subject against that shape.
while let Some((shape, objects_to_validate)) = shape_validation_stack.pop() { while let Some((shape, objects_to_validate)) = shape_validation_stack.pop() {
log_info!(
"[process_changes_for_shape_and_session] Processing shape from stack: {}, with {} objects to validate: {:?}",
shape.iri,
objects_to_validate.len(),
objects_to_validate
);
// Collect triples relevant for validation. // Collect triples relevant for validation.
let added_triples_by_subject = let added_triples_by_subject =
group_by_subject_for_shape(&shape, triples_added, &objects_to_validate); group_by_subject_for_shape(&shape, triples_added, &objects_to_validate);
@ -118,13 +139,20 @@ impl Verifier {
.chain(removed_triples_by_subject.keys()) .chain(removed_triples_by_subject.keys())
.collect(); .collect();
log_info!(
"[process_changes_for_shape_and_session] Found {} modified subjects for shape {}: {:?}",
modified_subject_iris.len(),
shape.iri,
modified_subject_iris
);
// Variable to collect nested objects that need validation. // Variable to collect nested objects that need validation.
let mut nested_objects_to_eval: HashMap<ShapeIri, Vec<(SubjectIri, bool)>> = let mut nested_objects_to_eval: HashMap<ShapeIri, Vec<(SubjectIri, bool)>> =
HashMap::new(); HashMap::new();
// For each subject, add/remove triples and validate. // For each subject, add/remove triples and validate.
log_debug!( log_info!(
"processing modified subjects: {:?} against shape: {}", "[process_changes_for_shape_and_session] processing modified subjects: {:?} against shape: {}",
modified_subject_iris, modified_subject_iris,
shape.iri shape.iri
); );
@ -136,7 +164,7 @@ impl Verifier {
// Cycle detection: Check if this (shape, subject) pair is already being validated // Cycle detection: Check if this (shape, subject) pair is already being validated
if currently_validating.contains(&validation_key) { if currently_validating.contains(&validation_key) {
log_warn!( log_warn!(
"Cycle detected: subject '{}' with shape '{}' is already being validated. Marking as invalid.", "[process_changes_for_shape_and_session] Cycle detected: subject '{}' with shape '{}' is already being validated. Marking as invalid.",
subject_iri, subject_iri,
shape.iri shape.iri
); );
@ -162,7 +190,7 @@ impl Verifier {
// Mark as currently validating // Mark as currently validating
currently_validating.insert(validation_key.clone()); currently_validating.insert(validation_key.clone());
// Get triples of subject (added & removed). // Get triple changes for subject (added & removed).
let triples_added_for_subj = added_triples_by_subject let triples_added_for_subj = added_triples_by_subject
.get(*subject_iri) .get(*subject_iri)
.map(|v| v.as_slice()) .map(|v| v.as_slice())
@ -177,31 +205,36 @@ impl Verifier {
.entry(shape.iri.clone()) .entry(shape.iri.clone())
.or_insert_with(HashMap::new) .or_insert_with(HashMap::new)
.entry((*subject_iri).clone()) .entry((*subject_iri).clone())
.or_insert_with(|| OrmTrackedSubjectChange { .or_insert_with(|| {
subject_iri: (*subject_iri).clone(), // Create a new change record.
predicates: HashMap::new(), // This includes the previous validity and triple changes.
data_applied: false,
});
// Apply all triples for that subject to the tracked (shape, subject) pair.
// Record the changes.
{
let orm_subscription = self let orm_subscription = self
.orm_subscriptions .orm_subscriptions
.get_mut(nuri) .get_mut(nuri)
.unwrap() .unwrap()
.iter_mut() .iter_mut()
.find(|sub| { .find(|sub| {
sub.shape_type.shape == *root_shape_iri && sub.session_id == session_id sub.shape_type.shape == *root_shape_iri
&& sub.session_id == session_id
}) })
.unwrap(); .unwrap();
// Update tracked subjects and modify change objects. log_info!("[process_changes_for_shape_and_session] Creating change object for {}, {}", subject_iri, shape.iri);
if !change.data_applied { let prev_valid = match orm_subscription
log_debug!( .tracked_subjects
"Adding triples to change tracker for subject {}", .get(*subject_iri)
subject_iri .and_then(|shapes| shapes.get(&shape.iri))
); {
Some(tracked_subject) => tracked_subject.read().unwrap().valid.clone(),
None => OrmTrackedSubjectValidity::Pending,
};
let mut change = OrmTrackedSubjectChange {
subject_iri: (*subject_iri).clone(),
predicates: HashMap::new(),
is_validated: false,
prev_valid,
};
if let Err(e) = add_remove_triples( if let Err(e) = add_remove_triples(
shape.clone(), shape.clone(),
@ -209,46 +242,42 @@ impl Verifier {
triples_added_for_subj, triples_added_for_subj,
triples_removed_for_subj, triples_removed_for_subj,
orm_subscription, orm_subscription,
change, &mut change,
) { ) {
log_err!("apply_changes_from_triples add/remove error: {:?}", e); log_err!("apply_changes_from_triples add/remove error: {:?}", e);
panic!(); panic!();
} }
change.data_applied = true;
}
// Check if this is the first evaluation round - In that case, set old validity to new one. change
// if the object was already validated, don't do so again. });
{
let tracked_subject = &mut orm_subscription
.tracked_subjects
.get(*subject_iri)
.unwrap()
.get(&shape.iri)
.unwrap()
.write()
.unwrap();
// First run // If validation took place already, there's nothing more to do...
if !change.data_applied if change.is_validated {
&& tracked_subject.valid != OrmTrackedSubjectValidity::Pending log_info!(
{ "[process_changes_for_shape_and_session] Subject {} already validated for shape {}, skipping",
tracked_subject.prev_valid = tracked_subject.valid.clone(); subject_iri,
shape.iri
);
continue;
} }
if change.data_applied { log_info!(
log_debug!("not applying triples again for subject {subject_iri}"); "[process_changes_for_shape_and_session] Running validation for subject {} against shape {}",
subject_iri,
shape.iri
);
// Has this subject already been validated? // Run validation and record objects that need to be re-evaluated.
if change.data_applied
&& tracked_subject.valid != OrmTrackedSubjectValidity::Pending
{ {
log_debug!("Not evaluating subject again {subject_iri}"); let orm_subscription = self
.orm_subscriptions
continue; .get_mut(nuri)
} .unwrap()
} .iter_mut()
} .find(|sub| {
sub.shape_type.shape == *root_shape_iri && sub.session_id == session_id
})
.unwrap();
// Validate the subject. // Validate the subject.
// need_eval contains elements in reverse priority (last element to be validated first) // need_eval contains elements in reverse priority (last element to be validated first)
@ -257,21 +286,47 @@ impl Verifier {
// We add the need_eval to be processed next after loop. // We add the need_eval to be processed next after loop.
// Filter out subjects already in the validation stack to prevent double evaluation. // Filter out subjects already in the validation stack to prevent double evaluation.
log_info!(
"[process_changes_for_shape_and_session] Validation returned {} objects that need evaluation",
need_eval.len()
);
for (iri, schema_shape, needs_refetch) in need_eval { for (iri, schema_shape, needs_refetch) in need_eval {
let eval_key = (schema_shape.clone(), iri.clone()); let eval_key = (schema_shape.clone(), iri.clone());
if !currently_validating.contains(&eval_key) { if !currently_validating.contains(&eval_key) {
log_info!(
"[process_changes_for_shape_and_session] Adding nested object to eval: {} with shape {}, needs_refetch: {}",
iri,
schema_shape,
needs_refetch
);
// Only add if not currently being validated // Only add if not currently being validated
nested_objects_to_eval nested_objects_to_eval
.entry(schema_shape) .entry(schema_shape)
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push((iri.clone(), needs_refetch)); .push((iri.clone(), needs_refetch));
} else {
log_info!(
"[process_changes_for_shape_and_session] Skipping nested object {} with shape {} - already validating",
iri,
schema_shape
);
} }
} }
} }
} }
// Now, we queue all non-evaluated objects // Now, we queue all non-evaluated objects
log_info!(
"[process_changes_for_shape_and_session] Processing {} nested shape groups",
nested_objects_to_eval.len()
);
for (shape_iri, objects_to_eval) in &nested_objects_to_eval { for (shape_iri, objects_to_eval) in &nested_objects_to_eval {
log_info!(
"[process_changes_for_shape_and_session] Processing nested shape: {} with {} objects",
shape_iri,
objects_to_eval.len()
);
// Extract schema and shape Arc first (before any borrows) // Extract schema and shape Arc first (before any borrows)
let schema = { let schema = {
let orm_sub = self.get_first_orm_subscription_for( let orm_sub = self.get_first_orm_subscription_for(
@ -285,18 +340,29 @@ impl Verifier {
// Data might need to be fetched (if it has not been during initialization or nested shape fetch). // Data might need to be fetched (if it has not been during initialization or nested shape fetch).
if !data_already_fetched { if !data_already_fetched {
let objects_to_fetch = objects_to_eval let objects_to_fetch: Vec<String> = objects_to_eval
.iter() .iter()
.filter(|(_iri, needs_fetch)| *needs_fetch) .filter(|(_iri, needs_fetch)| *needs_fetch)
.map(|(s, _)| s.clone()) .map(|(s, _)| s.clone())
.collect(); .collect();
log_info!(
"[process_changes_for_shape_and_session] Fetching data for {} objects that need refetch",
objects_to_fetch.len()
);
if objects_to_fetch.len() > 0 {
// Create sparql query // Create sparql query
let shape_query = let shape_query =
shape_type_to_sparql(&schema, &shape_iri, Some(objects_to_fetch))?; shape_type_to_sparql(&schema, &shape_iri, Some(objects_to_fetch))?;
let new_triples = let new_triples =
self.query_sparql_construct(shape_query, Some(nuri_to_string(nuri)))?; self.query_sparql_construct(shape_query, Some(nuri_to_string(nuri)))?;
log_info!(
"[process_changes_for_shape_and_session] Fetched {} triples, recursively processing nested objects",
new_triples.len()
);
// Recursively process nested objects. // Recursively process nested objects.
self.process_changes_for_shape_and_session( self.process_changes_for_shape_and_session(
nuri, nuri,
@ -309,6 +375,7 @@ impl Verifier {
true, true,
)?; )?;
} }
}
// Add objects // Add objects
let objects_not_to_fetch: Vec<String> = objects_to_eval let objects_not_to_fetch: Vec<String> = objects_to_eval
@ -317,16 +384,35 @@ impl Verifier {
.map(|(s, _)| s.clone()) .map(|(s, _)| s.clone())
.collect(); .collect();
if objects_not_to_fetch.len() > 0 { if objects_not_to_fetch.len() > 0 {
log_info!(
"[process_changes_for_shape_and_session] Queueing {} objects that don't need fetching for shape {}",
objects_not_to_fetch.len(),
shape_iri
);
// Queue all objects that don't need fetching. // Queue all objects that don't need fetching.
shape_validation_stack.push((shape_arc, objects_not_to_fetch)); shape_validation_stack.push((shape_arc, objects_not_to_fetch));
} else {
log_info!(
"[process_changes_for_shape_and_session] No objects to queue for shape {} (all needed fetching)",
shape_iri
);
} }
} }
log_info!(
"[process_changes_for_shape_and_session] Cleaning up validation tracking for {} modified subjects",
modified_subject_iris.len()
);
for subject_iri in modified_subject_iris { for subject_iri in modified_subject_iris {
let validation_key = (shape.iri.clone(), subject_iri.to_string()); let validation_key = (shape.iri.clone(), subject_iri.to_string());
currently_validating.remove(&validation_key); currently_validating.remove(&validation_key);
} }
} }
log_info!(
"[process_changes_for_shape_and_session] Finished processing. Validation stack empty."
);
Ok(()) Ok(())
} }
@ -415,4 +501,88 @@ impl Verifier {
Some(subscription) => Ok((subscription.sender.clone(), subscription)), Some(subscription) => Ok((subscription.sender.clone(), subscription)),
} }
} }
pub fn cleanup_tracked_subjects(orm_subscription: &mut OrmSubscription) {
let tracked_subjects = &mut orm_subscription.tracked_subjects;
// First pass: Clean up relationships for subjects being deleted
for (subject_iri, subjects_for_shape) in tracked_subjects.iter() {
for (_shape_iri, tracked_subject_lock) in subjects_for_shape.iter() {
let tracked_subject = tracked_subject_lock.read().unwrap();
// Only process subjects that are marked for deletion
if tracked_subject.valid != OrmTrackedSubjectValidity::ToDelete {
continue;
}
let has_parents = !tracked_subject.parents.is_empty();
// Set all children to `untracked` that don't have other parents
for tracked_predicate in tracked_subject.tracked_predicates.values() {
let tracked_pred_read = tracked_predicate.read().unwrap();
for child in &tracked_pred_read.tracked_children {
let mut tracked_child = child.write().unwrap();
if tracked_child.parents.is_empty()
|| (tracked_child.parents.len() == 1
&& tracked_child
.parents
.contains_key(&tracked_subject.subject_iri))
{
if tracked_child.valid != OrmTrackedSubjectValidity::ToDelete {
tracked_child.valid = OrmTrackedSubjectValidity::Untracked;
}
}
}
}
// Remove this subject from its children's parent lists
// (Only if this is not a root subject - root subjects keep child relationships)
if has_parents {
for tracked_pred in tracked_subject.tracked_predicates.values() {
let tracked_pred_read = tracked_pred.read().unwrap();
for child in &tracked_pred_read.tracked_children {
child.write().unwrap().parents.remove(subject_iri);
}
}
}
// Also remove this subject from its parents' children lists
for (_parent_iri, parent_tracked_subject) in &tracked_subject.parents {
let mut parent_ts = parent_tracked_subject.write().unwrap();
for tracked_pred in parent_ts.tracked_predicates.values_mut() {
let mut tracked_pred_mut = tracked_pred.write().unwrap();
tracked_pred_mut
.tracked_children
.retain(|child| child.read().unwrap().subject_iri != *subject_iri);
}
}
}
}
// Second pass: Collect subjects to remove (we can't remove while iterating)
let mut subjects_to_remove: Vec<(String, String)> = vec![];
for (subject_iri, subjects_for_shape) in tracked_subjects.iter() {
for (shape_iri, tracked_subject) in subjects_for_shape.iter() {
let tracked_subject = tracked_subject.read().unwrap();
// Only cleanup subjects that are marked for deletion
if tracked_subject.valid == OrmTrackedSubjectValidity::ToDelete {
subjects_to_remove.push((subject_iri.clone(), shape_iri.clone()));
}
}
}
// Third pass: Remove the subjects marked for deletion
for (subject_iri, shape_iri) in subjects_to_remove {
if let Some(shapes_map) = tracked_subjects.get_mut(&subject_iri) {
shapes_map.remove(&shape_iri);
// If this was the last shape for this subject, remove the subject entry entirely
if shapes_map.is_empty() {
tracked_subjects.remove(&subject_iri);
}
}
}
}
} }

@ -35,15 +35,14 @@ impl Verifier {
// &update.overlay_id, // &update.overlay_id,
// ); // );
//let base = NuriV0::repo_id(&repo.id); //let base = NuriV0::repo_id(&repo.id);
let binding = nuri.unwrap();
let nuri = binding.split_at(53).0;
log_info!("querying construct\n{}\n{}\n", nuri, query); let nuri_str = nuri.as_ref().map(|s| s.as_str());
log_debug!("querying construct\n{}\n{}\n", nuri_str.unwrap(), query);
let parsed = Query::parse(&query, Some(nuri.clone())) let parsed =
.map_err(|e| NgError::OxiGraphError(e.to_string()))?; Query::parse(&query, nuri_str).map_err(|e| NgError::OxiGraphError(e.to_string()))?;
let results = oxistore let results = oxistore
.query(parsed, Some(nuri.to_string())) .query(parsed, nuri)
.map_err(|e| NgError::OxiGraphError(e.to_string()))?; .map_err(|e| NgError::OxiGraphError(e.to_string()))?;
match results { match results {
QueryResults::Graph(triples) => { QueryResults::Graph(triples) => {
@ -51,8 +50,7 @@ impl Verifier {
for t in triples { for t in triples {
match t { match t {
Err(e) => { Err(e) => {
log_info!("Error: {:?}n", e); log_err!("{}", e.to_string());
return Err(NgError::SparqlError(e.to_string())); return Err(NgError::SparqlError(e.to_string()));
} }
Ok(triple) => { Ok(triple) => {

@ -19,7 +19,7 @@ impl Verifier {
/// Might return nested objects that need to be validated. /// Might return nested objects that need to be validated.
/// Assumes all triples to be of same subject. /// Assumes all triples to be of same subject.
pub fn update_subject_validity( pub fn update_subject_validity(
s_change: &OrmTrackedSubjectChange, s_change: &mut OrmTrackedSubjectChange,
shape: &OrmSchemaShape, shape: &OrmSchemaShape,
orm_subscription: &mut OrmSubscription, orm_subscription: &mut OrmSubscription,
) -> Vec<(SubjectIri, ShapeIri, NeedsFetchBool)> { ) -> Vec<(SubjectIri, ShapeIri, NeedsFetchBool)> {
@ -32,7 +32,7 @@ impl Verifier {
return vec![]; return vec![];
}; };
let mut tracked_subject = tracked_subject.write().unwrap(); let mut tracked_subject = tracked_subject.write().unwrap();
let previous_validity = tracked_subject.prev_valid.clone(); let previous_validity = s_change.prev_valid.clone();
// Keep track of objects that need to be validated against a shape to fetch and validate. // Keep track of objects that need to be validated against a shape to fetch and validate.
let mut need_evaluation: Vec<(String, String, bool)> = vec![]; let mut need_evaluation: Vec<(String, String, bool)> = vec![];
@ -348,41 +348,17 @@ impl Verifier {
tracked_subject.valid = new_validity.clone(); tracked_subject.valid = new_validity.clone();
if new_validity == OrmTrackedSubjectValidity::Invalid { // First, if we have a definite decision, we set is_validated to true.
// For invalid subjects, we need to to cleanup. if new_validity != OrmTrackedSubjectValidity::Pending {
s_change.is_validated = true;
let has_parents = !tracked_subject.parents.is_empty();
if has_parents {
// This object is not a root object. Tracked child objects can be dropped.
// We therefore delete the child -> parent links.
// Untracked objects (with no parents) will be deleted in the subsequent child validation.
for tracked_predicate in tracked_subject.tracked_predicates.values() {
for child in &tracked_predicate.write().unwrap().tracked_children {
child
.write()
.unwrap()
.parents
.remove(&tracked_subject.subject_iri);
}
}
} else {
// This is a root objects, we will set the children to untracked
// but don't delete the child > parent relationship.
} }
// Set all children to `untracked` that don't have other parents. if new_validity == OrmTrackedSubjectValidity::Invalid {
for tracked_predicate in tracked_subject.tracked_predicates.values() { // For invalid subjects, we schedule cleanup.
for child in &tracked_predicate.write().unwrap().tracked_children { if tracked_subject.parents.len() == 0 {
let mut tracked_child = child.write().unwrap(); tracked_subject.valid = OrmTrackedSubjectValidity::Invalid;
if tracked_child.parents.is_empty() } else {
|| (tracked_child.parents.len() == 1 tracked_subject.valid = OrmTrackedSubjectValidity::ToDelete;
&& tracked_child
.parents
.contains_key(&tracked_subject.subject_iri))
{
tracked_child.valid = OrmTrackedSubjectValidity::Untracked;
}
}
} }
// Add all children to need_evaluation for their cleanup. // Add all children to need_evaluation for their cleanup.
@ -396,9 +372,6 @@ impl Verifier {
)); ));
} }
} }
// Remove all tracked_predicates.
tracked_subject.tracked_predicates.clear();
} else if new_validity == OrmTrackedSubjectValidity::Valid } else if new_validity == OrmTrackedSubjectValidity::Valid
&& previous_validity != OrmTrackedSubjectValidity::Valid && previous_validity != OrmTrackedSubjectValidity::Valid
{ {

@ -25,8 +25,6 @@ pub struct OrmTrackedSubject {
pub parents: HashMap<String, Arc<RwLock<OrmTrackedSubject>>>, pub parents: HashMap<String, Arc<RwLock<OrmTrackedSubject>>>,
/// Validity. When untracked, triple updates are not processed for this tracked subject. /// Validity. When untracked, triple updates are not processed for this tracked subject.
pub valid: OrmTrackedSubjectValidity, pub valid: OrmTrackedSubjectValidity,
/// Previous validity. Used for validation and creating JSON Patch diffs from changes.
pub prev_valid: OrmTrackedSubjectValidity,
/// Subject IRI /// Subject IRI
pub subject_iri: String, pub subject_iri: String,
/// The shape for which the predicates are tracked. /// The shape for which the predicates are tracked.
@ -39,6 +37,7 @@ pub enum OrmTrackedSubjectValidity {
Invalid, Invalid,
Pending, Pending,
Untracked, Untracked,
ToDelete,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -60,10 +59,10 @@ pub struct OrmTrackedSubjectChange {
pub subject_iri: String, pub subject_iri: String,
/// Predicates that were changed. /// Predicates that were changed.
pub predicates: HashMap<String, OrmTrackedPredicateChanges>, pub predicates: HashMap<String, OrmTrackedPredicateChanges>,
/// If the new triples have been added to the tracked predicates /// If the validation has taken place
/// (values_added / values_removed) already. This is to prevent pub is_validated: bool,
/// double-application. /// The validity before the new validation.
pub data_applied: bool, pub prev_valid: OrmTrackedSubjectValidity,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct OrmTrackedPredicateChanges { pub struct OrmTrackedPredicateChanges {

@ -15,11 +15,9 @@ use std::sync::Arc;
use futures::channel::mpsc; use futures::channel::mpsc;
use futures::SinkExt; use futures::SinkExt;
use futures::StreamExt; use futures::StreamExt;
use ng_net::actor::SoS;
use ng_net::types::InboxPost; use ng_net::types::InboxPost;
use ng_net::types::NgQRCode; use ng_net::types::NgQRCode;
use ng_net::types::NgQRCodeProfileSharingV0; use ng_net::types::NgQRCodeProfileSharingV0;
use ng_oxigraph::oxigraph::sparql::EvaluationError;
use ng_oxigraph::oxigraph::sparql::{results::*, Query, QueryResults}; use ng_oxigraph::oxigraph::sparql::{results::*, Query, QueryResults};
use ng_oxigraph::oxrdf::{Literal, NamedNode, Quad, Term}; use ng_oxigraph::oxrdf::{Literal, NamedNode, Quad, Term};
use ng_oxigraph::oxsdatatypes::DateTime; use ng_oxigraph::oxsdatatypes::DateTime;

@ -11,6 +11,16 @@ import { computed, signal, isSignal } from "./core";
/** A batched deep mutation (set/add/remove) from a deepSignal root. */ /** A batched deep mutation (set/add/remove) from a deepSignal root. */
export type DeepPatch = { export type DeepPatch = {
/** Property path (array indices, object keys, synthetic Set entry ids) from the root to the mutated location. */
path: (string | number)[];
} & (
| DeepSetAddPatch
| DeepSetRemovePatch
| DeepObjectAddPatch
| DeepRemovePatch
| DeepLiteralAddPatch
);
export type DeepPatchInternal = {
/** Unique identifier for the deep signal root which produced this patch. */ /** Unique identifier for the deep signal root which produced this patch. */
root: symbol; root: symbol;
/** Property path (array indices, object keys, synthetic Set entry ids) from the root to the mutated location. */ /** Property path (array indices, object keys, synthetic Set entry ids) from the root to the mutated location. */
@ -22,6 +32,7 @@ export type DeepPatch = {
| DeepRemovePatch | DeepRemovePatch
| DeepLiteralAddPatch | DeepLiteralAddPatch
); );
export interface DeepSetAddPatch { export interface DeepSetAddPatch {
/** Mutation kind applied at the resolved `path`. */ /** Mutation kind applied at the resolved `path`. */
op: "add"; op: "add";
@ -105,7 +116,7 @@ function buildPath(
return path; return path;
} }
function queuePatch(patch: DeepPatch) { function queuePatch(patch: DeepPatchInternal) {
if (!pendingPatches) pendingPatches = new Map(); if (!pendingPatches) pendingPatches = new Map();
const root = patch.root; const root = patch.root;
let list = pendingPatches.get(root); let list = pendingPatches.get(root);
@ -113,6 +124,9 @@ function queuePatch(patch: DeepPatch) {
list = []; list = [];
pendingPatches.set(root, list); pendingPatches.set(root, list);
} }
// Remove root, we do not send that back.
// @ts-ignore
delete patch.root;
list.push(patch); list.push(patch);
if (!microtaskScheduled) { if (!microtaskScheduled) {
microtaskScheduled = true; microtaskScheduled = true;
@ -124,7 +138,7 @@ function queuePatch(patch: DeepPatch) {
for (const [rootId, patches] of groups) { for (const [rootId, patches] of groups) {
if (!patches.length) continue; if (!patches.length) continue;
const subs = mutationSubscribers.get(rootId); const subs = mutationSubscribers.get(rootId);
if (subs) subs.forEach((cb) => cb(patches)); if (subs) subs.forEach((callback) => callback(patches));
} }
}); });
} }
@ -244,6 +258,8 @@ export function getDeepSignalRootId(obj: any): symbol | undefined {
const proxyToSignals = new WeakMap(); const proxyToSignals = new WeakMap();
// Raw object/array/Set -> stable proxy // Raw object/array/Set -> stable proxy
const objToProxy = new WeakMap(); const objToProxy = new WeakMap();
// Proxy -> raw object/array/Set (reverse lookup)
const proxyToRaw = new WeakMap();
// Raw array -> `$` meta proxy with index signals // Raw array -> `$` meta proxy with index signals
const arrayToArrayOfSignals = new WeakMap(); const arrayToArrayOfSignals = new WeakMap();
// Objects already proxied or marked shallow // Objects already proxied or marked shallow
@ -367,22 +383,25 @@ export function setSetEntrySyntheticId(obj: object, id: string | number) {
} }
const getSetEntryKey = (val: any): string | number => { const getSetEntryKey = (val: any): string | number => {
if (val && typeof val === "object") { if (val && typeof val === "object") {
// If val is a proxy, get the raw object first
const rawVal = proxyToRaw.get(val) || val;
// First check for explicitly assigned synthetic ID // First check for explicitly assigned synthetic ID
if (setObjectIds.has(val)) return setObjectIds.get(val)!; if (setObjectIds.has(rawVal)) return setObjectIds.get(rawVal)!;
// Then check for @id property (primary identifier) // Then check for @id property (primary identifier)
if ( if (
typeof (val as any)["@id"] === "string" || typeof (rawVal as any)["@id"] === "string" ||
typeof (val as any)["@id"] === "number" typeof (rawVal as any)["@id"] === "number"
) )
return (val as any)["@id"]; return (rawVal as any)["@id"];
// Then check for id property (backward compatibility) // Then check for id property (backward compatibility)
if ( if (
typeof (val as any).id === "string" || typeof (rawVal as any).id === "string" ||
typeof (val as any).id === "number" typeof (rawVal as any).id === "number"
) )
return (val as any).id; return (rawVal as any).id;
// Fall back to generating a blank node ID // Fall back to generating a blank node ID
return assignBlankNodeId(val); return assignBlankNodeId(rawVal);
} }
return val as any; return val as any;
}; };
@ -437,6 +456,7 @@ export const deepSignal = <T extends object>(
// Pre-register an empty signals map so isDeepSignal() is true before any property access. // Pre-register an empty signals map so isDeepSignal() is true before any property access.
if (!proxyToSignals.has(proxy)) proxyToSignals.set(proxy, new Map()); if (!proxyToSignals.has(proxy)) proxyToSignals.set(proxy, new Map());
objToProxy.set(obj, proxy); objToProxy.set(obj, proxy);
proxyToRaw.set(proxy, obj);
} }
return objToProxy.get(obj); return objToProxy.get(obj);
}; };
@ -509,6 +529,7 @@ function getFromSet(
childMeta.parent = receiver; childMeta.parent = receiver;
childMeta.key = synthetic; childMeta.key = synthetic;
objToProxy.set(entry, childProxy); objToProxy.set(entry, childProxy);
proxyToRaw.set(childProxy, entry);
return childProxy; return childProxy;
} }
if (objToProxy.has(entry)) return objToProxy.get(entry); if (objToProxy.has(entry)) return objToProxy.get(entry);
@ -520,19 +541,27 @@ function getFromSet(
if (key === "add" || key === "delete" || key === "clear") { if (key === "add" || key === "delete" || key === "clear") {
const fn: Function = (raw as any)[key]; const fn: Function = (raw as any)[key];
return function (this: any, ...args: any[]) { return function (this: any, ...args: any[]) {
// For delete, keep track of the original entry for patch emission
const originalEntry = key === "delete" ? args[0] : undefined;
// For delete, if the argument is a proxy, get the raw object for the actual Set operation
if (key === "delete" && args[0] && typeof args[0] === "object") {
const rawArg = proxyToRaw.get(args[0]);
if (rawArg) {
args = [rawArg];
}
}
const sizeBefore = raw.size; const sizeBefore = raw.size;
const result = fn.apply(raw, args); const result = fn.apply(raw, args);
if (raw.size !== sizeBefore) { if (raw.size !== sizeBefore) {
const metaNow = proxyMeta.get(receiver); const metaNow = proxyMeta.get(receiver);
if ( if (metaNow) {
metaNow && // For root Set, containerPath is empty; for nested Set, build path from parent
const containerPath =
metaNow.parent !== undefined && metaNow.parent !== undefined &&
metaNow.key !== undefined metaNow.key !== undefined
) { ? buildPath(metaNow.parent, metaNow.key)
const containerPath = buildPath( : [];
metaNow.parent,
metaNow.key
);
if (key === "add") { if (key === "add") {
const entry = args[0]; const entry = args[0];
@ -587,6 +616,7 @@ function getFromSet(
childMeta.parent = receiver; childMeta.parent = receiver;
childMeta.key = synthetic; childMeta.key = synthetic;
objToProxy.set(entryVal, childProxy); objToProxy.set(entryVal, childProxy);
proxyToRaw.set(childProxy, entryVal);
entryVal = childProxy; entryVal = childProxy;
} }
// Set entry add: emit object vs primitive variant. // Set entry add: emit object vs primitive variant.
@ -609,7 +639,8 @@ function getFromSet(
}); });
} }
} else if (key === "delete") { } else if (key === "delete") {
const entry = args[0]; // Use the original entry (before proxy-to-raw conversion) for getting the synthetic key
const entry = originalEntry;
const synthetic = getSetEntryKey(entry); const synthetic = getSetEntryKey(entry);
// Check if entry is primitive or object // Check if entry is primitive or object
if (entry && typeof entry === "object") { if (entry && typeof entry === "object") {
@ -664,9 +695,9 @@ function getFromSet(
const makeIterator = (pair: boolean) => { const makeIterator = (pair: boolean) => {
return function thisIter(this: any) { return function thisIter(this: any) {
const iterable = raw.values(); const iterable = raw.values();
return { // Create an Iterator that inherits Iterator.prototype methods (map, filter, etc.)
[Symbol.iterator]() { // Wrap the iterator to proxy entries on-demand
return { const wrappedIterator = {
next() { next() {
const n = iterable.next(); const n = iterable.next();
if (n.done) return n; if (n.done) return n;
@ -677,8 +708,11 @@ function getFromSet(
}; };
}, },
}; };
}, // Set the prototype to Iterator.prototype if available (ES2023+ Iterator Helpers)
} as Iterable<any>; if (typeof Iterator !== "undefined" && Iterator.prototype) {
Object.setPrototypeOf(wrappedIterator, Iterator.prototype);
}
return wrappedIterator;
}; };
}; };
if (key === "values" || key === "keys") return makeIterator(false); if (key === "values" || key === "keys") return makeIterator(false);
@ -813,6 +847,10 @@ const get =
if (target instanceof Set) { if (target instanceof Set) {
return getFromSet(target as Set<any>, fullKey as any, receiver); return getFromSet(target as Set<any>, fullKey as any, receiver);
} }
// Special case: accessing `$` on a non-array object returns the raw target
if (fullKey === "$" && !Array.isArray(target)) {
return target;
}
const norm = normalizeKey(target, fullKey, isArrayMeta, receiver); const norm = normalizeKey(target, fullKey, isArrayMeta, receiver);
if ((norm as any).shortCircuit) return (norm as any).shortCircuit; // returned meta proxy if ((norm as any).shortCircuit) return (norm as any).shortCircuit; // returned meta proxy
const { key, returnSignal } = norm as { const { key, returnSignal } = norm as {
@ -839,9 +877,9 @@ const objectHandlers = {
get: get(false), get: get(false),
set(target: object, fullKey: string, val: any, receiver: object): boolean { set(target: object, fullKey: string, val: any, receiver: object): boolean {
// Prevent modification of @id property // Prevent modification of @id property
if (fullKey === "@id") { // if (fullKey === "@id") {
throw new Error("Cannot modify readonly property '@id'"); // throw new Error("Cannot modify readonly property '@id'");
} // }
// Respect original getter/setter semantics // Respect original getter/setter semantics
if (typeof descriptor(target, fullKey)?.set === "function") if (typeof descriptor(target, fullKey)?.set === "function")
return Reflect.set(target, fullKey, val, receiver); return Reflect.set(target, fullKey, val, receiver);

@ -336,5 +336,99 @@ describe("deepSignal options", () => {
stop(); stop();
}); });
it("emits delete patch when removing objects with @id from Sets", async () => {
const options: DeepSignalOptions = {
addIdToObjects: true,
};
const state = deepSignal({ s: new Set<any>() }, options);
const patches: DeepPatch[][] = [];
const { stopListening: stop } = watch(state, ({ patches: batch }) =>
patches.push(batch)
);
// Add objects with @id
const obj1 = { "@id": "obj-1", value: 1 };
const obj2 = { "@id": "obj-2", value: 2 };
const obj3 = { "@id": "obj-3", value: 3 };
state.s.add(obj1);
state.s.add(obj2);
state.s.add(obj3);
await Promise.resolve();
// Get the proxied objects from the Set
const proxiedObjs = Array.from(state.s);
const proxiedObj2 = proxiedObjs.find(
(o: any) => o["@id"] === "obj-2"
);
// Clear patches from additions
patches.length = 0;
// Delete one object using the proxied object
state.s.delete(proxiedObj2);
await Promise.resolve();
// Check that delete patch was emitted with correct path
const deletePaths = patches
.flat()
.filter((p) => p.op === "remove")
.map((p) => p.path.join("."));
expect(deletePaths).toContain("s.obj-2");
expect(deletePaths).not.toContain("s.obj-1");
expect(deletePaths).not.toContain("s.obj-3");
stop();
});
it("emits delete patches when removing objects without explicit @id from Sets", async () => {
const options: DeepSignalOptions = {
idGenerator: () =>
`gen-${Math.random().toString(36).substr(2, 9)}`,
addIdToObjects: true,
};
const state = deepSignal({ s: new Set<any>() }, options);
// Add objects without @id - they should get generated IDs
const obj1 = { value: 1 };
const obj2 = { value: 2 };
state.s.add(obj1);
state.s.add(obj2);
// Get the proxied objects and their generated IDs
const proxiedObjs = Array.from(state.s);
const proxiedObj1 = proxiedObjs[0];
const proxiedObj2 = proxiedObjs[1];
const id1 = (proxiedObj1 as any)["@id"];
const id2 = (proxiedObj2 as any)["@id"];
expect(id1).toBeDefined();
expect(id2).toBeDefined();
const patches: DeepPatch[][] = [];
const { stopListening: stop } = watch(state, ({ patches: batch }) =>
patches.push(batch)
);
// Delete one object using the proxied object
state.s.delete(proxiedObj1);
await Promise.resolve();
// Check that delete patch was emitted with the generated ID
const deletePaths = patches
.flat()
.filter((p) => p.op === "remove")
.map((p) => p.path.join("."));
expect(deletePaths).toContain(`s.${id1}`);
expect(deletePaths).not.toContain(`s.${id2}`);
stop();
});
}); });
}); });

@ -493,6 +493,84 @@ describe("watch (patch mode)", () => {
expect(flat.some((p) => p.endsWith("eIter.inner.v"))).toBe(true); expect(flat.some((p) => p.endsWith("eIter.inner.v"))).toBe(true);
stop(); stop();
}); });
it("generates correct patches when root is a Set (primitive entries)", async () => {
const rootSet = deepSignal(new Set<any>());
const batches: DeepPatch[][] = [];
const { stopListening: stop } = watch(rootSet, ({ patches }) =>
batches.push(patches)
);
rootSet.add(1);
rootSet.add("test");
rootSet.add(true);
await Promise.resolve();
expect(batches.length).toBe(1);
const patches = batches[0];
expect(patches.length).toBe(3);
// When root is a Set, path should be empty array for primitive adds
patches.forEach((p) => {
expect(p.path).toEqual([]);
expect(p.op).toBe("add");
expect((p as any).type).toBe("set");
});
const values = patches.map((p: any) => p.value[0]);
expect(values).toContain(1);
expect(values).toContain("test");
expect(values).toContain(true);
stop();
});
it("generates correct patches when root is a Set (object entries)", async () => {
const rootSet = deepSignal(new Set<any>());
const batches: DeepPatch[][] = [];
const { stopListening: stop } = watch(rootSet, ({ patches }) =>
batches.push(patches)
);
const obj1 = { "@id": "obj1", value: 1 };
const obj2 = { "@id": "obj2", value: 2 };
rootSet.add(obj1);
rootSet.add(obj2);
await Promise.resolve();
const flat = batches.flat().map((p) => p.path.join("."));
// When root is a Set, first element of path should be synthetic id
expect(flat).toContain("obj1");
expect(flat).toContain("obj1.@id");
expect(flat).toContain("obj1.value");
expect(flat).toContain("obj2");
expect(flat).toContain("obj2.@id");
expect(flat).toContain("obj2.value");
stop();
});
it("tracks nested mutations when root is a Set", async () => {
const rootSet = deepSignal(new Set<any>());
const obj = { id: "nested", data: { x: 1 } };
rootSet.add(obj);
const batches: DeepPatch[][] = [];
const { stopListening: stop } = watch(rootSet, ({ patches }) =>
batches.push(patches)
);
// Get the proxied entry
let proxied: any;
for (const e of rootSet.values()) {
proxied = e;
}
proxied.data.x = 2;
await Promise.resolve();
const flat = batches.flat().map((p) => p.path.join("."));
expect(flat.some((p) => p === "nested.data.x")).toBe(true);
stop();
});
}); });
describe("Arrays & mixed batch", () => { describe("Arrays & mixed batch", () => {

@ -24,6 +24,11 @@ onmessage = (e) => {
e.data.pazzle, e.data.pazzle,
e.data.pin_code e.data.pin_code
); );
} else if (e.data.password) {
secret_wallet = await ng.wallet_open_with_password(
e.data.wallet,
e.data.password
);
} else if (e.data.mnemonic_words) { } else if (e.data.mnemonic_words) {
secret_wallet = await ng.wallet_open_with_mnemonic_words( secret_wallet = await ng.wallet_open_with_mnemonic_words(
e.data.wallet, e.data.wallet,

@ -5,7 +5,6 @@ import Highlight from "../components/Highlight.astro";
import VueRoot from "../components/VueRoot.vue"; import VueRoot from "../components/VueRoot.vue";
import ReactRoot from "../components/ReactRoot"; import ReactRoot from "../components/ReactRoot";
import SvelteRoot from "../components/SvelteRoot.svelte"; import SvelteRoot from "../components/SvelteRoot.svelte";
import { initNg } from "@ng-org/signals"
const title = "Multi-framework app"; const title = "Multi-framework app";
--- ---
@ -28,10 +27,14 @@ const title = "Multi-framework app";
let info = await ng.client_info(); let info = await ng.client_info();
console.log(info.V0.details); console.log(info.V0.details);
initNg(ng, event.session); initNg(ng, event.session);
window.ng = ng;
window.session = event.session;
}, },
true, true,
[] []
); );
</script> </script>
<Layout title={title}> <Layout title={title}>
<Highlight vue> <Highlight vue>
@ -45,4 +48,5 @@ const title = "Multi-framework app";
<Highlight svelte> <Highlight svelte>
<SvelteRoot client:only /> <SvelteRoot client:only />
</Highlight> </Highlight>
</Layout> </Layout>

@ -1,32 +1,117 @@
import React from "react"; import React, { useEffect, useState } from "react";
import { useShape } from "@ng-org/signals/react"; import { useShape } from "@ng-org/signals/react";
import flattenObject from "../utils/flattenObject"; import flattenObject from "../utils/flattenObject";
import { TestObjectShapeType } from "../../shapes/orm/testShape.shapeTypes"; import { TestObjectShapeType } from "../../shapes/orm/testShape.shapeTypes";
import { BasicShapeType } from "../../shapes/orm/basic.shapeTypes";
import type { ShapeType } from "@ng-org/shex-orm";
import type { Basic } from "../../shapes/orm/basic.typings";
import { deepSignal, watch } from "@ng-org/alien-deepsignals";
const sparqlExampleData = `
PREFIX ex: <http://example.org/>
INSERT DATA {
<urn:test:obj1> a ex:TestObject ;
ex:stringValue "hello world" ;
ex:numValue 42 ;
ex:boolValue true ;
ex:arrayValue 1,2,3 ;
ex:objectValue <urn:test:id3> ;
ex:anotherObject <urn:test:id1>, <urn:test:id2> ;
ex:numOrStr "either" ;
ex:lit1Or2 "lit1" ;
ex:unrelated "some value" ;
ex:anotherUnrelated 4242 .
<urn:test:id3>
ex:nestedString "nested" ;
ex:nestedNum 7 ;
ex:nestedArray 5,6 .
<urn:test:id1>
ex:prop1 "one" ;
ex:prop2 1 .
<urn:test:id2>
ex:prop1 "two" ;
ex:prop2 2 .
<urn:test:obj2> a ex:TestObject ;
ex:stringValue "hello world #2" ;
ex:numValue 422 ;
ex:boolValue false ;
ex:arrayValue 4,5,6 ;
ex:objectValue <urn:test:id6> ;
ex:anotherObject <urn:test:id4>, <urn:test:id5> ;
ex:numOrStr 4 ;
ex:lit1Or2 "lit2" ;
ex:unrelated "some value2" ;
ex:anotherUnrelated 42422 .
<urn:test:id6>
ex:nestedString "nested2" ;
ex:nestedNum 72 ;
ex:nestedArray 7,8,9 .
<urn:test:id4>
ex:prop1 "one2" ;
ex:prop2 12 .
<urn:test:id5>
ex:prop1 "two2" ;
ex:prop2 22 .
<urn:basicObject4>
a <http://example.org/Basic> ;
ex:basicString "string of object 1" .
<urn:basicObject5>
a <http://example.org/Basic> ;
ex:basicString "string of object 2" .
}
`;
export function HelloWorldReact() { export function HelloWorldReact() {
const state = useShape(TestObjectShapeType)?.entries().next(); const state = useShape(TestObjectShapeType);
const objects = [...(state || [])];
// @ts-expect-error // @ts-expect-error
window.reactState = state; window.reactState = state;
if (!state) return <>Loading state</>;
// Create a table from the state object: One column for keys, one for values, one with an input to change the value. // Create a table from the state object: One column for keys, one for values, one with an input to change the value.
return ( return (
<div> <div>
<p>Rendered in React</p> <p>Rendered in React</p>
<button <button
onClick={() => { onClick={() => {
state.boolValue = !state.boolValue; window.ng.sparql_update(
state.numValue += 2; window.session.session_id,
sparqlExampleData,
"did:ng:" + window.session.private_store_id
);
}} }}
> >
click me to change multiple props Add example data
</button>
<button
onClick={() => {
window.ng.sparql_update(
window.session.session_id,
`DELETE WHERE { ?s ?p ?o .};`,
"did:ng:" + window.session.private_store_id
);
}}
>
Remove all data
</button> </button>
<table border={1} cellPadding={5}> {!state ? (
<div>Loading...</div>
) : (
<div>
{objects.map((ormObj) => (
<table border={1} cellPadding={5} key={ormObj["@id"]}>
<thead> <thead>
<tr> <tr>
<th>Key</th> <th>Key</th>
@ -37,70 +122,72 @@ export function HelloWorldReact() {
<tbody> <tbody>
{(() => { {(() => {
const setNestedValue = ( const setNestedValue = (
obj: any, targetObj: any,
path: string, lastKey: string,
value: any value: any
) => { ) => {
const keys = path.split("."); // targetObj is the direct parent object containing the property
let current = obj; // lastKey is the property name to set
targetObj[lastKey] = value;
for (let i = 0; i < keys.length - 1; i++) {
current = current[keys[i]];
}
current[keys[keys.length - 1]] = value;
};
const getNestedValue = (obj: any, path: string) => {
return path
.split(".")
.reduce((current, key) => current[key], obj);
}; };
return flattenObject(state).map(([key, value]) => ( return flattenObject(ormObj).map(
([key, value, lastKey, parentObj]) => (
<tr key={key}> <tr key={key}>
<td>{key}</td> <td>{key}</td>
<td> <td>
{value instanceof Set {value instanceof Set
? Array.from(value).join(", ") ? Array.from(
value
).join(", ")
: Array.isArray(value) : Array.isArray(value)
? `[${value.join(", ")}]` ? `[${value.join(", ")}]`
: JSON.stringify(value)} : JSON.stringify(
value
)}
</td> </td>
<td> <td>
{typeof value === "string" ? ( {typeof value ===
"string" ? (
<input <input
type="text" type="text"
value={value} value={value}
onChange={(e) => { onChange={(e) => {
setNestedValue( setNestedValue(
state, parentObj,
key, lastKey,
e.target.value e.target
.value
); );
}} }}
/> />
) : typeof value === "number" ? ( ) : typeof value ===
"number" ? (
<input <input
type="number" type="number"
value={value} value={value}
onChange={(e) => { onChange={(e) => {
setNestedValue( setNestedValue(
state, parentObj,
key, lastKey,
Number(e.target.value) Number(
e.target
.value
)
); );
}} }}
/> />
) : typeof value === "boolean" ? ( ) : typeof value ===
"boolean" ? (
<input <input
type="checkbox" type="checkbox"
checked={value} checked={value}
onChange={(e) => { onChange={(e) => {
setNestedValue( setNestedValue(
state, parentObj,
key, lastKey,
e.target.checked e.target
.checked
); );
}} }}
/> />
@ -108,33 +195,29 @@ export function HelloWorldReact() {
<div> <div>
<button <button
onClick={() => { onClick={() => {
const currentArray = setNestedValue(
getNestedValue( parentObj,
state, lastKey,
key [
...value,
value.length +
1,
]
); );
setNestedValue(state, key, [
...currentArray,
currentArray.length + 1,
]);
}} }}
> >
Add Add
</button> </button>
<button <button
onClick={() => { onClick={() => {
const currentArray =
getNestedValue(
state,
key
);
if ( if (
currentArray.length > 0 value.length >
0
) { ) {
setNestedValue( setNestedValue(
state, parentObj,
key, lastKey,
currentArray.slice( value.slice(
0, 0,
-1 -1
) )
@ -149,13 +232,17 @@ export function HelloWorldReact() {
<div> <div>
<button <button
onClick={() => { onClick={() => {
const currentSet = const newSet =
getNestedValue( new Set(
state, value
key );
newSet.add(
`item${newSet.size + 1}`
); );
currentSet.add( setNestedValue(
`item${currentSet.size + 1}` parentObj,
lastKey,
newSet
); );
}} }}
> >
@ -163,18 +250,24 @@ export function HelloWorldReact() {
</button> </button>
<button <button
onClick={() => { onClick={() => {
const currentSet = const arr =
getNestedValue( Array.from(
state, value
key
); );
const lastItem = const lastItem =
Array.from( arr.pop();
currentSet if (
).pop(); lastItem !==
if (lastItem) { undefined
currentSet.delete( ) {
lastItem const newSet =
new Set(
arr
);
setNestedValue(
parentObj,
lastKey,
newSet
); );
} }
}} }}
@ -187,10 +280,14 @@ export function HelloWorldReact() {
)} )}
</td> </td>
</tr> </tr>
)); )
);
})()} })()}
</tbody> </tbody>
</table> </table>
))}
</div>
)}
</div> </div>
); );
} }

@ -1,37 +1,38 @@
<script lang="ts"> <script lang="ts">
import { TestObjectShapeType } from "../../shapes/orm/testShape.shapeTypes";
import { useShape } from "@ng-org/signals/svelte"; import { useShape } from "@ng-org/signals/svelte";
import flattenObject from "../utils/flattenObject"; import flattenObject from "../utils/flattenObject";
import { BasicShapeType } from "../../shapes/orm/basic.shapeTypes";
import { TestObjectShapeType } from "../../shapes/orm/testShape.shapeTypes";
const shapeObject = useShape(TestObjectShapeType); const shapeObjects = useShape(TestObjectShapeType);
function getNestedValue(obj: any, path: string) { function getNestedValue(obj: any, path: string) {
return path return path
.split(".") .split(".")
.reduce((cur, k) => (cur == null ? cur : cur[k]), obj); .reduce((cur, k) => (cur == null ? cur : cur[k]), obj);
} }
function setNestedValue(obj: any, path: string, value: any) { function setNestedValue(targetObj: any, lastKey: string, value: any) {
const keys = path.split("."); // targetObj is the direct parent object containing the property
let cur = obj; // lastKey is the property name to set
for (let i = 0; i < keys.length - 1; i++) { targetObj[lastKey] = value;
cur = cur[keys[i]];
if (cur == null) return;
} }
cur[keys[keys.length - 1]] = value; const flattenedObjects = $derived(
} $shapeObjects
const flatEntries = $derived( ? Array.from($shapeObjects.values()).map((o) => {
$shapeObject const flattened = flattenObject(o);
? flattenObject($shapeObject.entries().next() || ({} as any)) (window as any).svelteFlattened = flattened;
return { entries: flattened, rootObj: o };
})
: [] : []
); );
$effect(() => { (window as any).svelteState = $shapeObjects;
(window as any).svelteState = $shapeObject;
});
</script> </script>
{#if $shapeObject} {#if $shapeObjects}
<div> <div>
<p>Rendered in Svelte</p> <p>Rendered in Svelte</p>
{#each flattenedObjects as { entries: flatEntries, rootObj }}
<table border="1" cellpadding="5"> <table border="1" cellpadding="5">
<thead> <thead>
<tr> <tr>
@ -41,7 +42,7 @@
</tr> </tr>
</thead> </thead>
<tbody> <tbody>
{#each flatEntries as [key, value] (key)} {#each flatEntries as [key, value, lastKey, parentObj]}
<tr> <tr>
<td style="white-space:nowrap;">{key}</td> <td style="white-space:nowrap;">{key}</td>
<td> <td>
@ -59,38 +60,44 @@
type="text" type="text"
{value} {value}
oninput={(e: any) => oninput={(e: any) =>
setNestedValue($shapeObject, key, e.target.value)} setNestedValue(parentObj, lastKey, e.target.value)}
/> />
{:else if typeof value === "number"} {:else if typeof value === "number"}
<input <input
type="number" type="number"
{value} {value}
oninput={(e: any) => oninput={(e: any) =>
setNestedValue($shapeObject, key, Number(e.target.value))} setNestedValue(
parentObj,
lastKey,
Number(e.target.value)
)}
/> />
{:else if typeof value === "boolean"} {:else if typeof value === "boolean"}
<input <input
type="checkbox" type="checkbox"
checked={value} checked={value}
onchange={(e: any) => onchange={(e: any) =>
setNestedValue($shapeObject, key, e.target.checked)} setNestedValue(parentObj, lastKey, e.target.checked)}
/> />
{:else if Array.isArray(value)} {:else if Array.isArray(value)}
<div style="display:flex; gap:.5rem;"> <div style="display:flex; gap:.5rem;">
<button <button
onclick={() => { onclick={() => {
const cur = getNestedValue($shapeObject, key) || []; setNestedValue(parentObj, lastKey, [
setNestedValue($shapeObject, key, [ ...value,
...cur, value.length + 1,
cur.length + 1,
]); ]);
}}>Add</button }}>Add</button
> >
<button <button
onclick={() => { onclick={() => {
const cur = getNestedValue($shapeObject, key) || []; if (value.length)
if (cur.length) setNestedValue(
setNestedValue($shapeObject, key, cur.slice(0, -1)); parentObj,
lastKey,
value.slice(0, -1)
);
}}>Remove</button }}>Remove</button
> >
</div> </div>
@ -98,15 +105,19 @@
<div style="display:flex; gap:.5rem;"> <div style="display:flex; gap:.5rem;">
<button <button
onclick={() => { onclick={() => {
const cur: Set<any> = getNestedValue($shapeObject, key); const newSet = new Set(value);
cur.add(`item${cur.size + 1}`); newSet.add(`item${newSet.size + 1}`);
setNestedValue(parentObj, lastKey, newSet);
}}>Add</button }}>Add</button
> >
<button <button
onclick={() => { onclick={() => {
const cur: Set<any> = getNestedValue($shapeObject, key); const arr = Array.from(value);
const last = Array.from(cur).pop(); const last = arr.pop();
if (last !== undefined) cur.delete(last); if (last !== undefined) {
const newSet = new Set(arr);
setNestedValue(parentObj, lastKey, newSet);
}
}}>Remove</button }}>Remove</button
> >
</div> </div>
@ -118,6 +129,7 @@
{/each} {/each}
</tbody> </tbody>
</table> </table>
{/each}
</div> </div>
{:else} {:else}
<p>Loading state</p> <p>Loading state</p>

@ -25,14 +25,46 @@ const flattenObject = (
for (const [key, value] of Object.entries(obj)) { for (const [key, value] of Object.entries(obj)) {
if (skipDollarKeys && key.startsWith("$")) continue; if (skipDollarKeys && key.startsWith("$")) continue;
const fullKey = prefix ? `${prefix}.${key}` : key; const fullKey = prefix ? `${prefix}.${key}` : key;
// Handle Sets containing objects with @id
if (value instanceof Set) {
const setItems = Array.from(value);
// Check if Set contains objects with @id
if ( if (
setItems.length > 0 &&
setItems.some(
(item) => item && typeof item === "object" && "@id" in item
)
) {
// Flatten each object in the Set
setItems.forEach((item) => {
if (item && typeof item === "object" && "@id" in item) {
const itemId = item["@id"];
const itemPrefix = `${fullKey}[@id=${itemId}]`;
result.push(
...flattenObject(
item,
itemPrefix,
options,
seen,
depth + 1
)
);
}
});
} else {
// Set doesn't contain objects with @id, treat as leaf
result.push([fullKey, value, key, obj]);
}
} else if (
value && value &&
typeof value === "object" && typeof value === "object" &&
!Array.isArray(value) && !Array.isArray(value) &&
!(value instanceof Set) &&
isPlainObject(value) isPlainObject(value)
) { ) {
result.push(...flattenObject(value, fullKey, options, seen, depth + 1)); result.push(
...flattenObject(value, fullKey, options, seen, depth + 1)
);
} else { } else {
result.push([fullKey, value, key, obj]); result.push([fullKey, value, key, obj]);
} }

@ -3,26 +3,24 @@ import { computed } from "vue";
import { useShape } from "@ng-org/signals/vue"; import { useShape } from "@ng-org/signals/vue";
import flattenObject from "../utils/flattenObject"; import flattenObject from "../utils/flattenObject";
import { TestObjectShapeType } from "../../shapes/orm/testShape.shapeTypes"; import { TestObjectShapeType } from "../../shapes/orm/testShape.shapeTypes";
import { BasicShapeType } from "../../shapes/orm/basic.shapeTypes";
// Acquire deep signal object (proxy) for a shape; scope second arg left empty string for parity // Acquire deep signal object (proxy) for a shape; scope second arg left empty string for parity
const shapeObj = useShape(TestObjectShapeType); const shapeObjects = useShape(TestObjectShapeType);
// Expose for devtools exploration // Expose for devtools exploration
// @ts-ignore // @ts-ignore
window.vueState = shapeObj; window.vueState = shapeObjects;
console.log("vue loaded")
const flatEntries = computed(() => flattenObject(shapeObj));
</script> </script>
<template> <template>
<div class="vue"> <div class="vue">
<p>Rendered in Vue</p> <p>Rendered in Vue</p>
<template v-if="shapeObj && 'type' in shapeObj"> <template v-if="shapeObjects">
<!-- Direct property access --> <template v-for="obj in shapeObjects" :key="obj">
<input type="text" v-model="shapeObj.type" /> <template v-for="flatEntries in [flattenObject(obj)]">
<input type="text" v-model="shapeObj.objectValue.nestedString" />
<!-- Property access through object recursion --> <!-- Property access through object recursion -->
<table <table
border="1" border="1"
@ -65,7 +63,7 @@ const flatEntries = computed(() => flattenObject(shapeObj));
<template v-if="path.indexOf('.') === -1"> <template v-if="path.indexOf('.') === -1">
<input <input
type="text" type="text"
v-model="shapeObj[key]" v-model="obj[key]"
/> />
</template> </template>
<template v-else> <template v-else>
@ -87,7 +85,7 @@ const flatEntries = computed(() => flattenObject(shapeObj));
<template v-if="path.indexOf('.') === -1"> <template v-if="path.indexOf('.') === -1">
<input <input
type="number" type="number"
v-model="shapeObj[key]" v-model="obj[key]"
/> />
</template> </template>
<template v-else> <template v-else>
@ -108,7 +106,7 @@ const flatEntries = computed(() => flattenObject(shapeObj));
<template v-if="path.indexOf('.') === -1"> <template v-if="path.indexOf('.') === -1">
<input <input
type="checkbox" type="checkbox"
v-model="shapeObj[key]" v-model="obj[key]"
/> />
</template> </template>
<template v-else> <template v-else>
@ -215,6 +213,8 @@ const flatEntries = computed(() => flattenObject(shapeObj));
</tbody> </tbody>
</table> </table>
</template> </template>
</template>
</template>
<template v-else> <template v-else>
<p>Loading state</p> <p>Loading state</p>
</template> </template>

@ -0,0 +1,37 @@
import type { Schema } from "@ng-org/shex-orm";
/**
* =============================================================================
* basicSchema: Schema for basic
* =============================================================================
*/
export const basicSchema: Schema = {
"http://example.org/BasicShape": {
iri: "http://example.org/BasicShape",
predicates: [
{
dataTypes: [
{
valType: "literal",
literals: ["http://example.org/Basic"],
},
],
maxCardinality: 1,
minCardinality: 1,
iri: "http://www.w3.org/1999/02/22-rdf-syntax-ns#type",
readablePredicate: "@type",
},
{
dataTypes: [
{
valType: "string",
},
],
maxCardinality: 1,
minCardinality: 1,
iri: "http://example.org/basicString",
readablePredicate: "basicString",
},
],
},
};

@ -0,0 +1,9 @@
import type { ShapeType } from "@ng-org/shex-orm";
import { basicSchema } from "./basic.schema";
import type { Basic } from "./basic.typings";
// ShapeTypes for basic
export const BasicShapeType: ShapeType<Basic> = {
schema: basicSchema,
shape: "http://example.org/BasicShape",
};

@ -0,0 +1,22 @@
export type IRI = string;
/**
* =============================================================================
* Typescript Typings for basic
* =============================================================================
*/
/**
* Basic Type
*/
export interface Basic {
readonly "@id": IRI;
/**
* Original IRI: http://www.w3.org/1999/02/22-rdf-syntax-ns#type
*/
"@type": "http://example.org/Basic";
/**
* Original IRI: http://example.org/basicString
*/
basicString: string;
}

@ -6,8 +6,8 @@ import type { Schema } from "@ng-org/shex-orm";
* ============================================================================= * =============================================================================
*/ */
export const catShapeSchema: Schema = { export const catShapeSchema: Schema = {
"http://example.org/Cat": { "http://example.org/CatShape": {
iri: "http://example.org/Cat", iri: "http://example.org/CatShape",
predicates: [ predicates: [
{ {
dataTypes: [ dataTypes: [
@ -58,7 +58,7 @@ export const catShapeSchema: Schema = {
dataTypes: [ dataTypes: [
{ {
valType: "shape", valType: "shape",
shape: "http://example.org/Cat||http://example.org/address", shape: "http://example.org/CatShape||http://example.org/address",
}, },
], ],
maxCardinality: 1, maxCardinality: 1,
@ -68,8 +68,8 @@ export const catShapeSchema: Schema = {
}, },
], ],
}, },
"http://example.org/Cat||http://example.org/address": { "http://example.org/CatShape||http://example.org/address": {
iri: "http://example.org/Cat||http://example.org/address", iri: "http://example.org/CatShape||http://example.org/address",
predicates: [ predicates: [
{ {
dataTypes: [ dataTypes: [

@ -5,5 +5,5 @@ import type { Cat } from "./catShape.typings";
// ShapeTypes for catShape // ShapeTypes for catShape
export const CatShapeType: ShapeType<Cat> = { export const CatShapeType: ShapeType<Cat> = {
schema: catShapeSchema, schema: catShapeSchema,
shape: "http://example.org/Cat", shape: "http://example.org/CatShape",
}; };

@ -14,7 +14,7 @@ export interface Cat {
/** /**
* Original IRI: http://www.w3.org/1999/02/22-rdf-syntax-ns#type * Original IRI: http://www.w3.org/1999/02/22-rdf-syntax-ns#type
*/ */
"@type": string; "@type": "http://example.org/Cat";
/** /**
* Original IRI: http://example.org/name * Original IRI: http://example.org/name
*/ */

@ -6,8 +6,8 @@ import type { Schema } from "@ng-org/shex-orm";
* ============================================================================= * =============================================================================
*/ */
export const personShapeSchema: Schema = { export const personShapeSchema: Schema = {
"http://example.org/Person": { "http://example.org/PersonShape": {
iri: "http://example.org/Person", iri: "http://example.org/PersonShape",
predicates: [ predicates: [
{ {
dataTypes: [ dataTypes: [
@ -36,7 +36,7 @@ export const personShapeSchema: Schema = {
dataTypes: [ dataTypes: [
{ {
valType: "shape", valType: "shape",
shape: "http://example.org/Person||http://example.org/address", shape: "http://example.org/PersonShape||http://example.org/address",
}, },
], ],
maxCardinality: 1, maxCardinality: 1,
@ -68,8 +68,8 @@ export const personShapeSchema: Schema = {
}, },
], ],
}, },
"http://example.org/Person||http://example.org/address": { "http://example.org/PersonShape||http://example.org/address": {
iri: "http://example.org/Person||http://example.org/address", iri: "http://example.org/PersonShape||http://example.org/address",
predicates: [ predicates: [
{ {
dataTypes: [ dataTypes: [

@ -5,5 +5,5 @@ import type { Person } from "./personShape.typings";
// ShapeTypes for personShape // ShapeTypes for personShape
export const PersonShapeType: ShapeType<Person> = { export const PersonShapeType: ShapeType<Person> = {
schema: personShapeSchema, schema: personShapeSchema,
shape: "http://example.org/Person", shape: "http://example.org/PersonShape",
}; };

@ -14,7 +14,7 @@ export interface Person {
/** /**
* Original IRI: http://www.w3.org/1999/02/22-rdf-syntax-ns#type * Original IRI: http://www.w3.org/1999/02/22-rdf-syntax-ns#type
*/ */
"@type": string; "@type": "http://example.org/Person";
/** /**
* Original IRI: http://example.org/name * Original IRI: http://example.org/name
*/ */

@ -6,8 +6,8 @@ import type { Schema } from "@ng-org/shex-orm";
* ============================================================================= * =============================================================================
*/ */
export const testShapeSchema: Schema = { export const testShapeSchema: Schema = {
"http://example.org/TestObject": { "http://example.org/TestObjectShape": {
iri: "http://example.org/TestObject", iri: "http://example.org/TestObjectShape",
predicates: [ predicates: [
{ {
dataTypes: [ dataTypes: [
@ -71,7 +71,7 @@ export const testShapeSchema: Schema = {
{ {
valType: "shape", valType: "shape",
shape: shape:
"http://example.org/TestObject||http://example.org/objectValue", "http://example.org/TestObjectShape||http://example.org/objectValue",
}, },
], ],
maxCardinality: 1, maxCardinality: 1,
@ -84,7 +84,7 @@ export const testShapeSchema: Schema = {
{ {
valType: "shape", valType: "shape",
shape: shape:
"http://example.org/TestObject||http://example.org/anotherObject", "http://example.org/TestObjectShape||http://example.org/anotherObject",
}, },
], ],
maxCardinality: -1, maxCardinality: -1,
@ -110,7 +110,11 @@ export const testShapeSchema: Schema = {
dataTypes: [ dataTypes: [
{ {
valType: "literal", valType: "literal",
literals: ["lit1", "lit2"], literals: ["lit1"],
},
{
valType: "literal",
literals: ["lit2"],
}, },
], ],
maxCardinality: 1, maxCardinality: 1,
@ -120,8 +124,8 @@ export const testShapeSchema: Schema = {
}, },
], ],
}, },
"http://example.org/TestObject||http://example.org/objectValue": { "http://example.org/TestObjectShape||http://example.org/objectValue": {
iri: "http://example.org/TestObject||http://example.org/objectValue", iri: "http://example.org/TestObjectShape||http://example.org/objectValue",
predicates: [ predicates: [
{ {
dataTypes: [ dataTypes: [
@ -158,8 +162,8 @@ export const testShapeSchema: Schema = {
}, },
], ],
}, },
"http://example.org/TestObject||http://example.org/anotherObject": { "http://example.org/TestObjectShape||http://example.org/anotherObject": {
iri: "http://example.org/TestObject||http://example.org/anotherObject", iri: "http://example.org/TestObjectShape||http://example.org/anotherObject",
predicates: [ predicates: [
{ {
dataTypes: [ dataTypes: [

@ -5,5 +5,5 @@ import type { TestObject } from "./testShape.typings";
// ShapeTypes for testShape // ShapeTypes for testShape
export const TestObjectShapeType: ShapeType<TestObject> = { export const TestObjectShapeType: ShapeType<TestObject> = {
schema: testShapeSchema, schema: testShapeSchema,
shape: "http://example.org/TestObject", shape: "http://example.org/TestObjectShape",
}; };

@ -14,7 +14,7 @@ export interface TestObject {
/** /**
* Original IRI: http://www.w3.org/1999/02/22-rdf-syntax-ns#type * Original IRI: http://www.w3.org/1999/02/22-rdf-syntax-ns#type
*/ */
"@type": string; "@type": "http://example.org/TestObject";
/** /**
* Original IRI: http://example.org/stringValue * Original IRI: http://example.org/stringValue
*/ */
@ -73,5 +73,5 @@ export interface TestObject {
/** /**
* Original IRI: http://example.org/lit1Or2 * Original IRI: http://example.org/lit1Or2
*/ */
lit1Or2: "lit1" | "lit2"; lit1Or2: string | string;
} }

@ -0,0 +1,8 @@
PREFIX ex: <http://example.org/>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
ex:BasicShape {
a [ ex:Basic ] ;
ex:basicString xsd:string ;
}

@ -1,7 +1,7 @@
PREFIX ex: <http://example.org/> PREFIX ex: <http://example.org/>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
ex:Cat { ex:CatShape {
a [ ex:Cat ] ; a [ ex:Cat ] ;
ex:name xsd:string ; ex:name xsd:string ;
ex:age xsd:integer ; ex:age xsd:integer ;

@ -1,7 +1,7 @@
PREFIX ex: <http://example.org/> PREFIX ex: <http://example.org/>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
ex:Person { ex:PersonShape {
a [ ex:Person ] ; a [ ex:Person ] ;
ex:name xsd:string ; ex:name xsd:string ;
ex:address { ex:address {

@ -1,7 +1,7 @@
PREFIX ex: <http://example.org/> PREFIX ex: <http://example.org/>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
ex:TestObject EXTRA a { ex:TestObjectShape EXTRA a {
a [ ex:TestObject ] ; a [ ex:TestObject ] ;
ex:stringValue xsd:string ; ex:stringValue xsd:string ;
ex:numValue xsd:integer ; ex:numValue xsd:integer ;
@ -17,5 +17,5 @@ ex:TestObject EXTRA a {
ex:prop2 xsd:integer ; ex:prop2 xsd:integer ;
} * ; } * ;
ex:numOrStr xsd:string OR xsd:integer ; ex:numOrStr xsd:string OR xsd:integer ;
ex:lit1Or2 ["lit1" "lit2"] ; ex:lit1Or2 ["lit1"] OR ["lit2"] ;
} }

@ -21,6 +21,7 @@ use std::sync::Arc;
use nextgraph::net::app_protocol::AppRequest; use nextgraph::net::app_protocol::AppRequest;
use ng_net::orm::OrmPatch; use ng_net::orm::OrmPatch;
use ng_repo::log_info;
use ng_wallet::types::SensitiveWallet; use ng_wallet::types::SensitiveWallet;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -1314,7 +1315,9 @@ async fn app_request_stream_(
// list. // list.
// }; // };
// }; // };
let response_js = serde_wasm_bindgen::to_value(&app_response).unwrap(); let response_js = app_response
.serialize(&serde_wasm_bindgen::Serializer::new().serialize_maps_as_objects(true))
.unwrap();
// if let Some(graph_triples) = graph_triples_js { // if let Some(graph_triples) = graph_triples_js {
// let response: Object = response_js.try_into().map_err(|_| { // let response: Object = response_js.try_into().map_err(|_| {
// "Error while adding triples to AppResponse.V0.State".to_string() // "Error while adding triples to AppResponse.V0.State".to_string()
@ -1818,7 +1821,6 @@ pub async fn orm_start(
) -> Result<JsValue, String> { ) -> Result<JsValue, String> {
let shape_type: OrmShapeType = serde_wasm_bindgen::from_value::<OrmShapeType>(shapeType) let shape_type: OrmShapeType = serde_wasm_bindgen::from_value::<OrmShapeType>(shapeType)
.map_err(|e| format!("Deserialization error of shapeType {e}"))?; .map_err(|e| format!("Deserialization error of shapeType {e}"))?;
log_info!("frontend_orm_start {:?}", shape_type);
let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(session_id) let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(session_id)
.map_err(|_| "Deserialization error of session_id".to_string())?; .map_err(|_| "Deserialization error of session_id".to_string())?;
let scope = if scope.is_empty() { let scope = if scope.is_empty() {
@ -1826,6 +1828,7 @@ pub async fn orm_start(
} else { } else {
NuriV0::new_from(&scope).map_err(|_| "Deserialization error of scope".to_string())? NuriV0::new_from(&scope).map_err(|_| "Deserialization error of scope".to_string())?
}; };
log_info!("[orm_start] parameters parsed, calling new_orm_start");
let mut request = AppRequest::new_orm_start(scope, shape_type); let mut request = AppRequest::new_orm_start(scope, shape_type);
request.set_session_id(session_id); request.set_session_id(session_id);
app_request_stream_(request, callback).await app_request_stream_(request, callback).await
@ -1833,21 +1836,24 @@ pub async fn orm_start(
#[wasm_bindgen] #[wasm_bindgen]
pub async fn orm_update( pub async fn orm_update(
scope: JsValue, scope: String,
shapeTypeName: String, shapeTypeName: String,
diff: JsValue, diff: JsValue,
session_id: JsValue, session_id: JsValue,
) -> Result<(), String> { ) -> Result<(), String> {
let diff: OrmPatches = serde_wasm_bindgen::from_value::<OrmPatches>(diff) let diff: OrmPatches = serde_wasm_bindgen::from_value::<OrmPatches>(diff)
.map_err(|e| format!("Deserialization error of diff {e}"))?; .map_err(|e| format!("Deserialization error of diff {e}"))?;
log_info!("frontend_update_orm {:?}", diff);
let scope: NuriV0 = serde_wasm_bindgen::from_value::<NuriV0>(scope) let scope = if scope.is_empty() {
.map_err(|_| "Deserialization error of scope".to_string())?; NuriV0::new_entire_user_site()
} else {
NuriV0::new_from(&scope).map_err(|_| "Deserialization error of scope".to_string())?
};
let mut request = AppRequest::new_orm_update(scope, shapeTypeName, diff); let mut request = AppRequest::new_orm_update(scope, shapeTypeName, diff);
let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(session_id) let session_id: u64 = serde_wasm_bindgen::from_value::<u64>(session_id)
.map_err(|_| "Deserialization error of session_id".to_string())?; .map_err(|_| "Deserialization error of session_id".to_string())?;
request.set_session_id(session_id); request.set_session_id(session_id);
log_info!("[orm_update] calling orm_update");
let response = nextgraph::local_broker::app_request(request) let response = nextgraph::local_broker::app_request(request)
.await .await
.map_err(|e: NgError| e.to_string())?; .map_err(|e: NgError| e.to_string())?;

@ -1,542 +0,0 @@
export {};
/**
* Structure for expressing a Shape Expression schema.
* @see <a href="http://shex.io/shex-semantics/#dfn-shapes-schema">ShEx Schema definition</a>
*/
export interface Schema {
/**
* Mandatory type "Schema".
*/
type: "Schema";
/**
* JSON-LD <a href="https://www.w3.org/TR/json-ld11/#the-context">@context</a> for ShEx.
*/
"@context"?: "http://www.w3.org/ns/shex.jsonld" | undefined;
/**
* List of semantic actions to be executed when evaluating conformance.
*/
startActs?: SemAct[] | undefined;
/**
* Identifies default starting shape expression.
*/
start?: shapeExprOrRef | undefined;
/**
* List of ShEx schemas to <a href="http://shex.io/shex-semantics/#import">import</a> when processing this schema.
*/
imports?: IRIREF[] | undefined;
/**
* The list of {@link ShapeDecl}s defined in this schema. Each MUST include and {@link ShapeOr#id}.
*/
shapes?: ShapeDecl[] | undefined;
}
export interface semactsAndAnnotations {
/**
* List of semantic actions to be executed when evaluating conformance.
*/
semActs?: SemAct[] | undefined;
/**
* List of {@link SemAct#predicate}/{@link SemAct#object} annotations.
*/
annotations?: Annotation[] | undefined;
}
/**
* A declaration for a shapeExpr with added inheritance constraints.
* @see <a href="http://shex.io/shex-semantics/#dfn-shapedecl">ShEx ShapeDecl definition</a>
*/
export interface ShapeDecl {
/**
* Mandatory type "ShapeDecl".
*/
type: "ShapeDecl";
/**
* The identifier is an <a href="https://www.w3.org/TR/json-ld11/#node-identifiers">IRI</a> or a <a href="https://www.w3.org/TR/json-ld11/#identifying-blank-nodes">BlankNode</a>
* as expressed in <a href="https://www.w3.org/TR/json-ld11/">JSON-LD 1.1</a>.
*/
id: shapeDeclLabel;
/**
* Whether this ShapeDecl participates in <a href="http://shex.io/shex-semantics/#dfn-inheritanceSubstitution">inheritance substitution</a>.
*/
abstract?: BOOL | undefined;
/**
* The list of {@link shapeExprOrRef}s that a neighborhood MUST conform to in order to conform to this ShapeDecl.
*/
restricts?: shapeExprOrRef[] | undefined;
/**
* The {@link shapeExpr} to which this neighborhood MUST also conform.
*/
shapeExpr: shapeExpr;
}
/**
* Union of shape expression types.
* @see <a href="http://shex.io/shex-semantics/#dfn-shapeexpr">ShEx shapeExpr definition</a>
*/
export type shapeExpr = ShapeOr | ShapeAnd | ShapeNot | NodeConstraint | Shape | ShapeExternal;
/**
* Union of shapeExpr and shapeDeclRef.
* @see <a href="http://shex.io/shex-semantics/#dfn-shapeexpr">ShEx shapeExpr definition</a>
*/
export type shapeExprOrRef = shapeExpr | shapeDeclRef;
/**
* A non-exclusive choice of shape expressions; considered conformant if any of {@link #shapeExprs} conforms.
* @see <a href="http://shex.io/shex-semantics/#dfn-shapeor">ShEx shapeExpr definition</a>
*/
export interface ShapeOr {
/**
* Mandatory type "ShapeOr".
*/
type: "ShapeOr";
/**
* List of two or more {@link shapeExprOrRef}s in this disjunction.
*/
shapeExprs: shapeExprOrRef[];
}
/**
* A conjunction of shape expressions; considered conformant if each conjunct conforms.
* @see <a href="http://shex.io/shex-semantics/#dfn-shapeor">ShEx shapeExpr definition</a>
*/
export interface ShapeAnd {
/**
* Mandatory type "ShapeAnd".
*/
type: "ShapeAnd";
/**
* List of two or more {@link shapeExprOrRef}s in this conjunction.
*/
shapeExprs: shapeExprOrRef[];
}
/**
* A negated shape expressions; considered conformant if {@link #shapeExpr} is not conformant.
* @see <a href="http://shex.io/shex-semantics/#dfn-shapenot">ShEx shapeExpr definition</a>
*/
export interface ShapeNot {
/**
* Mandatory type "ShapeNot".
*/
type: "ShapeNot";
/**
* The {@link shapeExprOrRef} that must be non-conformant for this shape expression to be conformant.
*/
shapeExpr: shapeExprOrRef;
}
/**
* A shape expression not defined in this schema or in any imported schema. The definition of this shape expression is NOT defined by ShEx.
* @see <a href="http://shex.io/shex-semantics/#dfn-shapeexternal">ShEx shapeExpr definition</a>
*/
export interface ShapeExternal {
/**
* Mandatory type "ShapeExternal".
*/
type: "ShapeExternal";
}
/**
* A reference a shape expression.
* The reference is an <a href="https://www.w3.org/TR/json-ld11/#node-identifiers">IRI</a> or a <a href="https://www.w3.org/TR/json-ld11/#identifying-blank-nodes">BlankNode</a>
* as expressed in <a href="https://www.w3.org/TR/json-ld11/">JSON-LD 1.1</a>.
* This is modified to also include the possibility of ShapeDecl
*/
export type shapeDeclRef = shapeDeclLabel | ShapeDecl;
/**
* An identifier for a shape expression.
* The identifier is an <a href="https://www.w3.org/TR/json-ld11/#node-identifiers">IRI</a> or a <a href="https://www.w3.org/TR/json-ld11/#identifying-blank-nodes">BlankNode</a>
* as expressed in <a href="https://www.w3.org/TR/json-ld11/">JSON-LD 1.1</a>.
*/
export type shapeDeclLabel = IRIREF | BNODE;
export type nodeKind = "iri" | "bnode" | "nonliteral" | "literal";
/**
* A collection of constraints on <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-node">RDF Term</a>s expected for conformance.
* The identifier is an <a href="https://www.w3.org/TR/json-ld11/#node-identifiers">IRI</a> or a <a href="https://www.w3.org/TR/json-ld11/#identifying-blank-nodes">BlankNode</a>
* as expressed in <a href="https://www.w3.org/TR/json-ld11/">JSON-LD 1.1</a>.
*/
export interface NodeConstraint extends xsFacets, semactsAndAnnotations {
/**
* Mandatory type "NodeConstraint".
*/
type: "NodeConstraint";
/**
* Type of <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-node">RDF Term</a> expected for a conformant RDF node.
* @see <a href="http://shex.io/shex-semantics/#nodeKind">ShEx nodeKind definition</a>
*/
nodeKind?: nodeKind | undefined;
/**
* The <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-datatype-iri">RDF Literal datatype IRITerm</a> expected for a conformant RDF node.
* @see <a href="http://shex.io/shex-semantics/#datatype">ShEx datatype definition</a>
*/
datatype?: IRIREF | undefined;
/**
* The set of permissible values.
* @see <a href="http://shex.io/shex-semantics/#values">ShEx values definition</a>
*/
values?: valueSetValue[] | undefined;
}
/**
* The set of XML Schema Facets supported in ShEx; defers to {@link stringFacets} and {@link numericFacets}.
* @see <a href="http://shex.io/shex-semantics/#xs-string">ShEx String Facet Constraints</a> and <a href="http://shex.io/shex-semantics/#xs-numeric">ShEx Numeric Facet Constraints</a>.
*/
export interface xsFacets extends stringFacets, numericFacets {
}
/**
* The set of <a href="https://www.w3.org/TR/xmlschema-2/#facets">XML Schema Facets</a> applying to <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form">lexical forms of RDF terms</a>.
* @see <a href="http://shex.io/shex-semantics/#xs-string">ShEx String Facet Constraints</a>.
*/
export interface stringFacets {
/**
* Expected length of the lexical form of an RDF Term.
*/
length?: INTEGER | undefined;
/**
* Expected minimum length of the lexical form of an RDF Term.
*/
minlength?: INTEGER | undefined;
/**
* Expected maximum length of the lexical form of an RDF Term.
*/
maxlength?: INTEGER | undefined;
/**
* Regular expression which the lexical forn of an RDF Term must match.
*/
pattern?: STRING | undefined;
/**
* Optional flags for the regular expression in {@link pattern}.
*/
flags?: STRING | undefined;
}
/**
* The set of <a href="https://www.w3.org/TR/xmlschema-2/#facets">XML Schema Facets</a> applying to <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-value-space">numeric values of RDF terms</a>.
* @see <a href="http://shex.io/shex-semantics/#xs-numeric">ShEx Numeric Facet Constraints</a>.
*/
export interface numericFacets {
/**
* Conformant <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-literal">RDF Literal</a> has as a numeric value <= {@link mininclusive}.
*/
mininclusive?: numericLiteral | undefined;
/**
* Conformant <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-literal">RDF Literal</a> has as a numeric value < {@link minexclusive}.
*/
minexclusive?: numericLiteral | undefined;
/**
* Conformant <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-literal">RDF Literal</a> has as a numeric value > {@link maxinclusive}.
*/
maxinclusive?: numericLiteral | undefined;
/**
* Conformant <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-literal">RDF Literal</a> has as a numeric value >= {@link maxexclusive}.
*/
maxexclusive?: numericLiteral | undefined;
/**
* Conformant <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-literal">RDF Literal</a> has as a numeric value whose canonical form has {@link totaldigits} digits.
* @see <a href="http://shex.io/shex-semantics/#nodeSatisfies-totaldigits">ShEx totalDigits definition</a>
*/
totaldigits?: INTEGER | undefined;
/**
* Conformant <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-literal">RDF Literal</a> has as a numeric value whose canonical form has {@link fractiondigits} digits.
* @see <a href="http://shex.io/shex-semantics/#nodeSatisfies-fractiondigits">ShEx fractionDigits definition</a>
*/
fractiondigits?: INTEGER | undefined;
}
/**
* Union of numeric types in ShEx used in {@link numericFacets}s.
*/
export type numericLiteral = INTEGER | DECIMAL | DOUBLE;
/**
* Union of numeric types that may appear in a value set.
* @see {@link NodeConstraint#values}.
*/
export type valueSetValue = objectValue | IriStem | IriStemRange | LiteralStem | LiteralStemRange | Language | LanguageStem | LanguageStemRange;
/**
* JSON-LD representation of a URL or a Literal.
*/
export type objectValue = IRIREF | ObjectLiteral;
/**
* A <a href="https://www.w3.org/TR/json-ld11/#value-objects">JSON-LD Value Object</a> used to express an <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-literal">RDF Literal</a>.
*/
export interface ObjectLiteral {
/**
* The <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form">lexical form</a> of an RDF Literal.
*/
value: STRING;
/**
* The <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-language-tag">language tag</a> of an RDF Literal.
*/
language?: STRING | undefined;
/**
* The <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-datatype">datatype</a> of an RDF Literal.
*/
type?: STRING | undefined;
}
/**
* Matchs an <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-iri">RDF IRI</a> starting with the character sequence in {@link stem}.
*/
export interface IriStem {
/**
* Mandatory type "IriStem".
*/
type: "IriStem";
/**
* substring of IRI to be matched.
*/
stem: IRIREF;
}
export type iriRangeStem = IRIREF | Wildcard;
export type iriRangeExclusion = IRIREF | IriStem;
/**
* Filters a matching <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-iri">RDF IRI</a>s through a list of exclusions.
* The initial match is made on an IRI stem per {@link IriStem} or a {@link Wildcard} to accept any IRI.
* The {@link exclusion}s are either specific IRIs or {@link IRIStem}s.
*/
export interface IriStemRange {
/**
* Mandatory type "IriStemRange".
*/
type: "IriStemRange";
/**
* substring of IRI to be matched or a {@link Wildcard} matching any IRI.
*/
stem: iriRangeStem;
/**
* IRIs or {@link IRIStem}s to exclude.
*/
exclusions: iriRangeExclusion[];
}
/**
* Matchs an <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-literal">RDF Literal</a> starting with the character sequence in {@link stem}.
*/
export interface LiteralStem {
/**
* Mandatory type "LiteralStem".
*/
type: "LiteralStem";
/**
* substring of Literal to be matched.
*/
stem: STRING;
}
export type literalRangeStem = string | Wildcard;
export type literalRangeExclusion = string | LiteralStem;
/**
* Filters a matching <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-literal">RDF Literal</a>s through a list of exclusions.
* The initial match is made on an Literal stem per {@link LiteralStem} or a {@link Wildcard} to accept any Literal.
* The {@link exclusion}s are either specific Literals or {@link LiteralStem}s.
*/
export interface LiteralStemRange {
/**
* Mandatory type "LiteralStemRange".
*/
type: "LiteralStemRange";
/**
* substring of Literal to be matched or a {@link Wildcard} matching any Literal.
*/
stem: literalRangeStem;
/**
* Literals or {@link LiteralStem}s to exclude.
*/
exclusions: literalRangeExclusion[];
}
/**
* An <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-language-tag">RDF Language Tag</a>.
*/
export interface Language {
/**
* Mandatory type "Language".
*/
type: "Language";
/**
* The <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form">lexical representation</a> of an RDF Language Tag.
*/
languageTag: LANGTAG;
}
/**
* Matchs an <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-language-tag">RDF Language Tag</a> starting with the character sequence in {@link stem}.
*/
export interface LanguageStem {
/**
* Mandatory type "LanguageStem".
*/
type: "LanguageStem";
/**
* substring of Language Tag to be matched.
*/
stem: LANGTAG;
}
export type languageRangeStem = string | Wildcard;
export type languageRangeExclusion = string | LanguageStem;
/**
* Filters a matching <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-langugae-tag">RDF Language Tag</a>s through a list of exclusions.
* The initial match is made on an Language Tag stem per {@link Language TagStem} or a {@link Wildcard} to accept any Language Tag.
* The {@link exclusion}s are either specific Language Tags or {@link Language TagStem}s.
*/
export interface LanguageStemRange {
/**
* Mandatory type "LanguageStemRange".
*/
type: "LanguageStemRange";
/**
* substring of Language-Tag to be matched or a {@link Wildcard} matching any Language Tag.
*/
stem: languageRangeStem;
/**
* Language Tags or {@link LanguageStem}s to exclude.
*/
exclusions: languageRangeExclusion[];
}
/**
* An empty object signifying than any item may be matched.
* This is used in {@link IriStemRange}, {@link LiteralStemRange} and {@link LanguageStemRange}.
*/
export interface Wildcard {
/**
* Mandatory type "Wildcard".
*/
type: "Wildcard";
}
/**
* A collection of {@link tripleExpr}s which must be matched by <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-triple">RDF Triple</a>s in conformance data.
*/
export interface Shape extends semactsAndAnnotations {
/**
* Mandatory type "Shape".
*/
type: "Shape";
/**
* Only the predicates mentioned in the {@link expression} may appear in conformant data.
*/
closed?: BOOL | undefined;
/**
* Permit extra triples with these predicates to appear in triples which don't match any {@link TripleConstraint}s mentioned in the {@link expression}.
*/
extra?: IRIREF[] | undefined;
/**
* List of one or more {@link shapeExprOrRef}s that a neighborhood must satisfy in order to conform to this shape.
*/
extends?: shapeExprOrRef[];
/**
* A tree of {@link tripleExpr}s specifying a set triples into or out of conformant <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-node">RDF Nodes</a>.
*/
expression?: tripleExprOrRef | undefined;
}
/**
* Union of triple expression types.
* @see <a href="http://shex.io/shex-semantics/#dfn-tripleexpr">ShEx tripleExpr definition</a>
*/
export type tripleExpr = EachOf | OneOf | TripleConstraint;
/**
* A tripleExpr or a label to one.
* @see <a href="http://shex.io/shex-semantics/#dfn-tripleexpr">ShEx tripleExpr definition</a>
*/
export type tripleExprOrRef = tripleExpr | tripleExprRef;
/**
* Common attributes appearing in every form of {@link tripleExpr}.
*/
export interface tripleExprBase extends semactsAndAnnotations {
/**
* Optional identifier for {@link tripleExpr}s for reference by {@link tripleExprRef}.
* The identifier is an <a href="https://www.w3.org/TR/json-ld11/#node-identifiers">IRI</a> or a <a href="https://www.w3.org/TR/json-ld11/#identifying-blank-nodes">BlankNode</a>
* as expressed in <a href="https://www.w3.org/TR/json-ld11/">JSON-LD 1.1</a>.
*/
id?: tripleExprLabel | undefined;
/**
* Minimum number of times matching triples must appear in conformant data.
*/
min?: INTEGER | undefined;
/**
* Maximum number of times matching triples must appear in conformant data.
*/
max?: INTEGER | undefined;
}
/**
* A list of of triple expressions; considered conformant if there is some conforming mapping of the examined triples to the {@link #tripleExprs}.
* @see <a href="http://shex.io/shex-semantics/#dfn-eachof">ShEx EachOf definition</a>
*/
export interface EachOf extends tripleExprBase {
/**
* Mandatory type "EachOf".
*/
type: "EachOf";
expressions: tripleExprOrRef[];
}
/**
* An exclusive choice of triple expressions; considered conformant if exactly one of {@link #shapeExprs} conforms.
* @see <a href="http://shex.io/shex-semantics/#dfn-oneof">ShEx OneOf definition</a>
*/
export interface OneOf extends tripleExprBase {
/**
* Mandatory type "OneOf".
*/
type: "OneOf";
expressions: tripleExprOrRef[];
}
/**
* A template matching a number of triples attached to the node being validated.
*/
export interface TripleConstraint extends tripleExprBase {
/**
* Mandatory type "TripleConstraint".
*/
type: "TripleConstraint";
/**
* If false, the TripleConstraint matches the a triple composed of a focus node, the {@link predicate} and an object matching the (optional) {@link shapeExpr}.
* If true, the TripleConstraint matches the a triple composed of a subject matching the (optional) {@link shapeExpr}, the {@link predicate} and focus node.
*/
inverse?: BOOL | undefined;
/**
* The predicate expected in a matching <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-triple">RDF Triple</a>.
*/
predicate: IRIREF;
/**
* A {@link shapeExpr} matching a conformant <a href="https://www.w3.org/TR/rdf11-concepts/#dfn-triple">RDF Triple</a>s subject or object, depending on the value of {@link inverse}.
*/
valueExpr?: shapeExprOrRef | undefined;
/**
* The property name used for creating the JSON object.
*/
readablePredicate: string;
}
/**
* A reference a triple expression.
* The reference is an <a href="https://www.w3.org/TR/json-ld11/#node-identifiers">IRI</a> or a <a href="https://www.w3.org/TR/json-ld11/#identifying-blank-nodes">BlankNode</a>
* as expressed in <a href="https://www.w3.org/TR/json-ld11/">JSON-LD 1.1</a>.
*/
export type tripleExprRef = tripleExprLabel;
/**
* An identifier for a triple expression.
* The identifier is an <a href="https://www.w3.org/TR/json-ld11/#node-identifiers">IRI</a> or a <a href="https://www.w3.org/TR/json-ld11/#identifying-blank-nodes">BlankNode</a>
* as expressed in <a href="https://www.w3.org/TR/json-ld11/">JSON-LD 1.1</a>.
*/
export type tripleExprLabel = IRIREF | BNODE;
/**
* An extension point for Shape Expressions allowing external code to be invoked during validation.
*/
export interface SemAct {
/**
* Mandatory type "SemAct".
*/
type: "SemAct";
name: IRIREF;
code?: STRING | undefined;
}
/**
* An assertion about some part of a ShEx schema which has no affect on conformance checking.
* These can be useful for documentation, provenance tracking, form generation, etch.
*/
export interface Annotation {
/**
* Mandatory type "Annotation".
*/
type: "Annotation";
/**
* The <a href="https://www.w3.org/TR/json-ld11/#node-identifiers">RDF Predicate</a> of the annotation.
*/
predicate: IRI;
/**
* A value for the above {@link predicate}.
*/
object: objectValue;
}
export type IRIREF = string;
export type BNODE = string;
export type INTEGER = number;
export type STRING = string;
export type DECIMAL = number;
export type DOUBLE = number;
export type LANGTAG = string;
export type BOOL = boolean;
export type IRI = string;
//# sourceMappingURL=ShexJTypes.d.ts.map

File diff suppressed because one or more lines are too long

@ -1 +0,0 @@
export {};

@ -1,8 +0,0 @@
interface BuildOptions {
input: string;
output: string;
baseIRI?: string;
}
export declare function build({ input: inputFile, output: outputFile, baseIRI, }: BuildOptions): Promise<void>;
export {};
//# sourceMappingURL=build.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"build.d.ts","sourceRoot":"","sources":["../src/build.ts"],"names":[],"mappings":"AAiBA,UAAU,YAAY;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,wBAAsB,KAAK,CAAC,EACxB,KAAK,EAAE,SAAS,EAChB,MAAM,EAAE,UAAU,EAClB,OAAyC,GAC5C,EAAE,YAAY,iBAmEd"}

@ -1,62 +0,0 @@
import fs from "fs-extra";
import path from "path";
import parser from "@shexjs/parser";
import { shexJConverter } from "./schema-converter/converter.js";
import { renderFile } from "ejs";
import prettier from "prettier";
import loading from "loading-cli";
import { dirname } from "node:path";
import { fileURLToPath } from "node:url";
import { forAllShapes } from "./util/forAllShapes.js";
import annotateReadablePredicates from "./schema-converter/util/annotateReadablePredicates.js";
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const __dirname = dirname(fileURLToPath(import.meta.url));
export async function build({ input: inputFile, output: outputFile, baseIRI = "https://nextgraph.org/shapes#", }) {
const load = loading("Preparing Environment");
load.start();
// Prepare new folder by clearing/and/or creating it
if (fs.existsSync(outputFile)) {
await fs.promises.rm(outputFile, { recursive: true });
}
await fs.promises.mkdir(outputFile);
const fileTemplates = [];
// Pre-annotate schema with readablePredicate to unify naming across outputs
fileTemplates.push("schema", "typings", "shapeTypes");
load.text = "Generating Schema Documents";
await forAllShapes(inputFile, async (fileName, shexC) => {
// Convert to ShexJ
let schema;
try {
// Prase Shex schema to JSON.
// TODO: Do we need the base IRI?
// @ts-ignore ...
schema = parser.construct(baseIRI).parse(shexC);
}
catch (err) {
const errMessage = err instanceof Error
? err.message
: typeof err === "string"
? err
: "Unknown Error";
console.error(`Error processing ${fileName}: ${errMessage}`);
return;
}
// Add readable predicates to schema as the single source of truth.
// @ts-ignore ...
annotateReadablePredicates(schema);
const [typings, compactSchema] = await shexJConverter(schema);
await Promise.all(fileTemplates.map(async (templateName) => {
const finalContent = await renderFile(path.join(__dirname, "schema-converter", "templates", `${templateName}.ejs`), {
typings: typings.typings,
fileName,
schema: JSON.stringify(schema, null, 2),
compactSchema: JSON.stringify(compactSchema, null, 2),
});
await fs.promises.writeFile(path.join(outputFile, `${fileName}.${templateName}.ts`), await prettier.format(finalContent, {
parser: "typescript",
}));
}));
});
load.stop();
}

@ -1,3 +0,0 @@
#!/usr/bin/env node
export {};
//# sourceMappingURL=cli.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"cli.d.ts","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":""}

@ -1,15 +0,0 @@
#!/usr/bin/env node
import { program } from "commander";
import { build } from "./build.js";
program
.name("NG-ORM")
.description("CLI to some JavaScript string utilities")
.version("0.1.0");
program
.command("build")
.description("Build contents of a shex folder into Shape Types")
.option("-i, --input <inputPath>", "Provide the input path", "./.shapes")
.option("-o, --output <outputPath>", "Provide the output path", "./.orm")
.option("-b, --baseIRI <baseIri>", "The base IRI for anonymous shapes", "https://nextgraph.org/shapes#")
.action(build);
program.parse();

@ -1,2 +0,0 @@
export * from "../src/types.ts";
//# sourceMappingURL=index.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,YAAY,CAAC"}

@ -1 +0,0 @@
export * from "./types.js";

@ -1,12 +0,0 @@
import type { Schema } from "@ldo/traverser-shexj";
import * as dom from "dts-dom";
import type { Schema as ShapeSchema } from "../types.ts";
export interface TypingReturn {
typingsString: string;
typings: {
typingString: string;
dts: dom.TopLevelDeclaration;
}[];
}
export declare function shexJConverter(shexj: Schema): Promise<[TypingReturn, ShapeSchema]>;
//# sourceMappingURL=converter.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"converter.d.ts","sourceRoot":"","sources":["../../src/schema-converter/converter.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,sBAAsB,CAAC;AAEnD,OAAO,KAAK,GAAG,MAAM,SAAS,CAAC;AAM/B,OAAO,KAAK,EAAE,MAAM,IAAI,WAAW,EAAS,MAAM,aAAa,CAAC;AAEhE,MAAM,WAAW,YAAY;IACzB,aAAa,EAAE,MAAM,CAAC;IACtB,OAAO,EAAE;QACL,YAAY,EAAE,MAAM,CAAC;QACrB,GAAG,EAAE,GAAG,CAAC,mBAAmB,CAAC;KAChC,EAAE,CAAC;CACP;AAED,wBAAsB,cAAc,CAChC,KAAK,EAAE,MAAM,GACd,OAAO,CAAC,CAAC,YAAY,EAAE,WAAW,CAAC,CAAC,CAkDtC"}

@ -1,69 +0,0 @@
import { jsonld2graphobject } from "jsonld2graphobject";
import * as dom from "dts-dom";
import { ShexJTypingTransformerCompact, additionalCompactEnumAliases, } from "./transformers/ShexJTypingTransformer.js";
import { ShexJSchemaTransformerCompact } from "./transformers/ShexJSchemaTransformer.js";
export async function shexJConverter(shexj) {
// Prepare processed schema (names still rely on context visitor)
const processedShexj = (await jsonld2graphobject({
...shexj,
"@id": "SCHEMA",
"@context": "http://www.w3.org/ns/shex.jsonld",
}, "SCHEMA"));
additionalCompactEnumAliases.clear();
const declarations = await ShexJTypingTransformerCompact.transform(processedShexj, "Schema", null);
const compactSchemaShapesUnflattened = await ShexJSchemaTransformerCompact.transform(processedShexj, "Schema", null);
const compactSchema = flattenSchema(compactSchemaShapesUnflattened);
// Append only enum aliases (no interface Id aliases in compact format now)
const hasName = (d) => typeof d.name === "string";
additionalCompactEnumAliases.forEach((alias) => {
const exists = declarations.some((d) => hasName(d) && d.name === alias);
if (!exists)
declarations.push(dom.create.alias(alias, dom.create.namedTypeReference("IRI")));
});
const typings = declarations.map((declaration) => ({
typingString: dom
.emit(declaration, {
rootFlags: dom.ContextFlags.InAmbientNamespace,
})
.replace(/\r\n/g, "\n"),
dts: declaration,
}));
const header = `export type IRI = string;\n\n`;
const typingsString = header + typings.map((t) => `export ${t.typingString}`).join("");
return [{ typingsString, typings }, compactSchema];
}
/** Shapes may be nested. Put all to their root and give nested ones ids. */
function flattenSchema(shapes) {
let schema = {};
for (const shape of shapes) {
schema[shape.iri] = shape;
// Find nested, unflattened (i.e. anonymous) schemas in predicates' dataTypes.
for (const pred of shape.predicates) {
for (let i = 0; i < pred.dataTypes.length; i++) {
const dt = pred.dataTypes[i];
if (dt.valType === "shape" &&
typeof dt.shape === "object" &&
dt.shape !== null) {
// create a deterministic id for the nested shape; include index if multiple shape entries exist
const shapeCount = pred.dataTypes.filter((d) => d.valType === "shape").length;
const newId = shape.iri +
"||" +
pred.iri +
(shapeCount > 1 ? `||${i}` : "");
// Recurse
const flattened = flattenSchema([
{
...dt.shape,
iri: newId,
},
]);
// Replace the nested schema with its new id.
dt.shape = newId;
schema = { ...schema, ...flattened };
}
}
}
// Flatten / Recurse
}
return schema;
}

@ -1,8 +0,0 @@
import type { Schema } from "@ng-org/shex-orm";
/**
* =============================================================================
* <%- fileName %>Schema: Schema for <%- fileName %>
* =============================================================================
*/
export const <%- fileName %>Schema: Schema = <%- compactSchema %>;

@ -1,14 +0,0 @@
import type { ShapeType } from "@ng-org/shex-orm";
import { <%- fileName %>Schema } from "./<%- fileName %>.schema";
import type {
<% typings.forEach((typing)=> { if (!/Id$/.test(typing.dts.name)) { -%>
<%- typing.dts.name %>,
<% } }); -%>} from "./<%- fileName %>.typings";
// ShapeTypes for <%- fileName %>
<% typings.forEach((typing)=> { if (!/Id$/.test(typing.dts.name)) { -%>
export const <%- typing.dts.name %>ShapeType: ShapeType<<%- typing.dts.name %>> = {
schema: <%- fileName %>Schema,
shape: "<%- typing.dts.shapeId %>",
};
<% } }); -%>

@ -1,14 +0,0 @@
export type IRI = string;
/**
* =============================================================================
* Typescript Typings for <%- fileName %>
* =============================================================================
*/
<% typings.forEach((typing)=> { -%>
/**
* <%- typing.dts.name %> Type
*/
export <%- typing.typingString -%>
<% }); -%>

@ -1,348 +0,0 @@
import type { Predicate, DataType, Shape } from "../../types.ts";
export declare const ShexJSchemaTransformerCompact: import("@ldo/type-traverser").Transformer<{
Schema: {
kind: "interface";
type: import("@ldo/traverser-shexj").Schema;
properties: {
startActs: "SemAct";
start: "shapeExprOrRef";
imports: "IRIREF";
shapes: "ShapeDecl";
};
};
ShapeDecl: {
kind: "interface";
type: import("@ldo/traverser-shexj").ShapeDecl;
properties: {
id: "shapeDeclLabel";
abstract: "BOOL";
restricts: "shapeExprOrRef";
shapeExpr: "shapeExpr";
};
};
shapeExpr: {
kind: "union";
type: import("@ldo/traverser-shexj").shapeExpr;
typeNames: "ShapeOr" | "ShapeAnd" | "ShapeNot" | "NodeConstraint" | "Shape" | "ShapeExternal";
};
shapeExprOrRef: {
kind: "union";
type: import("@ldo/traverser-shexj").shapeExprOrRef;
typeNames: "shapeExpr" | "shapeDeclRef";
};
ShapeOr: {
kind: "interface";
type: import("@ldo/traverser-shexj").ShapeOr;
properties: {
shapeExprs: "shapeExprOrRef";
};
};
ShapeAnd: {
kind: "interface";
type: import("@ldo/traverser-shexj").ShapeAnd;
properties: {
shapeExprs: "shapeExprOrRef";
};
};
ShapeNot: {
kind: "interface";
type: import("@ldo/traverser-shexj").ShapeNot;
properties: {
shapeExpr: "shapeExprOrRef";
};
};
ShapeExternal: {
kind: "interface";
type: import("@ldo/traverser-shexj").ShapeExternal;
properties: Record<string, never>;
};
shapeDeclRef: {
kind: "union";
type: import("@ldo/traverser-shexj").shapeDeclRef;
typeNames: "shapeDeclLabel" | "ShapeDecl";
};
shapeDeclLabel: {
kind: "union";
type: import("@ldo/traverser-shexj").shapeDeclLabel;
typeNames: "IRIREF" | "BNODE";
};
NodeConstraint: {
kind: "interface";
type: import("@ldo/traverser-shexj").NodeConstraint;
properties: {
datatype: "IRIREF";
values: "valueSetValue";
length: "INTEGER";
minlength: "INTEGER";
maxlength: "INTEGER";
pattern: "STRING";
flags: "STRING";
mininclusive: "numericLiteral";
minexclusive: "numericLiteral";
totaldigits: "INTEGER";
fractiondigits: "INTEGER";
semActs: "SemAct";
annotations: "Annotation";
};
};
numericLiteral: {
kind: "union";
type: import("@ldo/traverser-shexj").numericLiteral;
typeNames: "INTEGER" | "DECIMAL" | "DOUBLE";
};
valueSetValue: {
kind: "union";
type: import("@ldo/traverser-shexj").valueSetValue;
typeNames: "objectValue" | "IriStem" | "IriStemRange" | "LiteralStem" | "LiteralStemRange" | "Language" | "LanguageStem" | "LanguageStemRange";
};
objectValue: {
kind: "union";
type: import("@ldo/traverser-shexj").objectValue;
typeNames: "IRIREF" | "ObjectLiteral";
};
ObjectLiteral: {
kind: "interface";
type: import("@ldo/traverser-shexj").ObjectLiteral;
properties: {
value: "STRING";
language: "STRING";
type: "STRING";
};
};
IriStem: {
kind: "interface";
type: import("@ldo/traverser-shexj").IriStem;
properties: {
stem: "IRIREF";
};
};
IriStemRange: {
kind: "interface";
type: import("@ldo/traverser-shexj").IriStemRange;
properties: {
stem: "IRIREF";
exclusions: "IriStemRangeExclusions";
};
};
IriStemRangeExclusions: {
kind: "union";
type: import("@ldo/traverser-shexj").IRIREF | import("@ldo/traverser-shexj").IriStem;
typeNames: "IRIREF" | "IriStem";
};
LiteralStem: {
kind: "interface";
type: import("@ldo/traverser-shexj").LiteralStem;
properties: {
stem: "STRING";
};
};
LiteralStemRange: {
kind: "interface";
type: import("@ldo/traverser-shexj").LiteralStemRange;
properties: {
stem: "LiteralStemRangeStem";
exclusions: "LiteralStemRangeExclusions";
};
};
LiteralStemRangeStem: {
kind: "union";
type: import("@ldo/traverser-shexj").STRING | import("@ldo/traverser-shexj").Wildcard;
typeNames: "STRING" | "Wildcard";
};
LiteralStemRangeExclusions: {
kind: "union";
type: import("@ldo/traverser-shexj").STRING | import("@ldo/traverser-shexj").LiteralStem;
typeNames: "STRING" | "LiteralStem";
};
Language: {
kind: "interface";
type: import("@ldo/traverser-shexj").Language;
properties: {
languageTag: "LANGTAG";
};
};
LanguageStem: {
kind: "interface";
type: import("@ldo/traverser-shexj").LanguageStem;
properties: {
stem: "LANGTAG";
};
};
LanguageStemRange: {
kind: "interface";
type: import("@ldo/traverser-shexj").LanguageStemRange;
properties: {
stem: "LanguageStemRangeStem";
exclusions: "LanguageStemRangeExclusions";
};
};
LanguageStemRangeStem: {
kind: "union";
type: import("@ldo/traverser-shexj").LANGTAG | import("@ldo/traverser-shexj").Wildcard;
typeNames: "LANGTAG" | "Wildcard";
};
LanguageStemRangeExclusions: {
kind: "union";
type: import("@ldo/traverser-shexj").LANGTAG | import("@ldo/traverser-shexj").LanguageStem;
typeNames: "LANGTAG" | "LanguageStem";
};
Wildcard: {
kind: "interface";
type: import("@ldo/traverser-shexj").Wildcard;
properties: Record<string, never>;
};
Shape: {
kind: "interface";
type: import("@ldo/traverser-shexj").Shape;
properties: {
closed: "BOOL";
extra: "IRIREF";
extends: "shapeExprOrRef";
expression: "tripleExprOrRef";
semActs: "SemAct";
annotations: "Annotation";
};
};
tripleExpr: {
kind: "union";
type: import("@ldo/traverser-shexj").tripleExpr;
typeNames: "EachOf" | "OneOf" | "TripleConstraint";
};
tripleExprOrRef: {
kind: "union";
type: import("@ldo/traverser-shexj").tripleExprOrRef;
typeNames: "tripleExpr" | "tripleExprRef";
};
EachOf: {
kind: "interface";
type: import("@ldo/traverser-shexj").EachOf;
properties: {
id: "tripleExprLabel";
min: "INTEGER";
max: "INTEGER";
expressions: "tripleExprOrRef";
semActs: "SemAct";
annotations: "Annotation";
};
};
OneOf: {
kind: "interface";
type: import("@ldo/traverser-shexj").OneOf;
properties: {
id: "tripleExprLabel";
min: "INTEGER";
max: "INTEGER";
expressions: "tripleExprOrRef";
semActs: "SemAct";
annotations: "Annotation";
};
};
TripleConstraint: {
kind: "interface";
type: import("@ldo/traverser-shexj").TripleConstraint;
properties: {
id: "tripleExprLabel";
min: "INTEGER";
max: "INTEGER";
inverse: "BOOL";
predicate: "IRIREF";
valueExpr: "shapeExprOrRef";
semActs: "SemAct";
annotations: "Annotation";
};
};
tripleExprRef: {
kind: "union";
type: import("@ldo/traverser-shexj").tripleExprRef;
typeNames: "tripleExprLabel";
};
tripleExprLabel: {
kind: "union";
type: import("@ldo/traverser-shexj").tripleExprLabel;
typeNames: "IRIREF" | "BNODE";
};
SemAct: {
kind: "interface";
type: import("@ldo/traverser-shexj").SemAct;
properties: {
name: "IRIREF";
code: "STRING";
};
};
Annotation: {
kind: "interface";
type: import("@ldo/traverser-shexj").Annotation;
properties: {
predicate: "IRI";
object: "objectValue";
};
};
IRIREF: {
kind: "primitive";
type: import("@ldo/traverser-shexj").IRIREF;
};
BNODE: {
kind: "primitive";
type: import("@ldo/traverser-shexj").BNODE;
};
INTEGER: {
kind: "primitive";
type: import("@ldo/traverser-shexj").INTEGER;
};
STRING: {
kind: "primitive";
type: import("@ldo/traverser-shexj").STRING;
};
DECIMAL: {
kind: "primitive";
type: import("@ldo/traverser-shexj").DECIMAL;
};
DOUBLE: {
kind: "primitive";
type: import("@ldo/traverser-shexj").DOUBLE;
};
LANGTAG: {
kind: "primitive";
type: import("@ldo/traverser-shexj").LANGTAG;
};
BOOL: {
kind: "primitive";
type: import("@ldo/traverser-shexj").BOOL;
};
IRI: {
kind: "primitive";
type: import("@ldo/traverser-shexj").IRI;
};
}, {
Schema: {
return: Shape[];
};
ShapeDecl: {
return: Shape;
};
Shape: {
return: Shape;
};
EachOf: {
return: Shape;
};
TripleConstraint: {
return: Predicate;
};
NodeConstraint: {
return: DataType;
};
ShapeOr: {
return: DataType[];
};
ShapeAnd: {
return: never;
};
ShapeNot: {
return: never;
};
ShapeExternal: {
return: never;
};
}, null>;
//# sourceMappingURL=ShexJSchemaTransformer.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"ShexJSchemaTransformer.d.ts","sourceRoot":"","sources":["../../../src/schema-converter/transformers/ShexJSchemaTransformer.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AA0DjE,eAAO,MAAM,6BAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;YAE1B;QAAE,MAAM,EAAE,KAAK,EAAE,CAAA;KAAE;eAChB;QAAE,MAAM,EAAE,KAAK,CAAA;KAAE;WACrB;QAAE,MAAM,EAAE,KAAK,CAAA;KAAE;YAChB;QAAE,MAAM,EAAE,KAAK,CAAA;KAAE;sBACP;QAAE,MAAM,EAAE,SAAS,CAAA;KAAE;oBACvB;QAAE,MAAM,EAAE,QAAQ,CAAA;KAAE;aAC3B;QAAE,MAAM,EAAE,QAAQ,EAAE,CAAA;KAAE;cACrB;QAAE,MAAM,EAAE,KAAK,CAAA;KAAE;cACjB;QAAE,MAAM,EAAE,KAAK,CAAA;KAAE;mBACZ;QAAE,MAAM,EAAE,KAAK,CAAA;KAAE;QAqLtC,CAAC"}

@ -1,208 +0,0 @@
import ShexJTraverser from "@ldo/traverser-shexj";
const rdfDataTypeToBasic = (dataType) => {
switch (dataType) {
case "http://www.w3.org/2001/XMLSchema#string":
case "http://www.w3.org/2001/XMLSchema#ENTITIES":
case "http://www.w3.org/2001/XMLSchema#ENTITY":
case "http://www.w3.org/2001/XMLSchema#ID":
case "http://www.w3.org/2001/XMLSchema#IDREF":
case "http://www.w3.org/2001/XMLSchema#IDREFS":
case "http://www.w3.org/2001/XMLSchema#language":
case "http://www.w3.org/2001/XMLSchema#Name":
case "http://www.w3.org/2001/XMLSchema#NCName":
case "http://www.w3.org/2001/XMLSchema#NMTOKEN":
case "http://www.w3.org/2001/XMLSchema#NMTOKENS":
case "http://www.w3.org/2001/XMLSchema#normalizedString":
case "http://www.w3.org/2001/XMLSchema#QName":
case "http://www.w3.org/2001/XMLSchema#token":
return "string";
case "http://www.w3.org/2001/XMLSchema#date":
case "http://www.w3.org/2001/XMLSchema#dateTime":
case "http://www.w3.org/2001/XMLSchema#duration":
case "http://www.w3.org/2001/XMLSchema#gDay":
case "http://www.w3.org/2001/XMLSchema#gMonth":
case "http://www.w3.org/2001/XMLSchema#gMonthDay":
case "http://www.w3.org/2001/XMLSchema#gYear":
case "http://www.w3.org/2001/XMLSchema#gYearMonth":
case "http://www.w3.org/2001/XMLSchema#time":
return "string";
case "http://www.w3.org/2001/XMLSchema#byte":
case "http://www.w3.org/2001/XMLSchema#decimal":
case "http://www.w3.org/2001/XMLSchema#double":
case "http://www.w3.org/2001/XMLSchema#float":
case "http://www.w3.org/2001/XMLSchema#int":
case "http://www.w3.org/2001/XMLSchema#integer":
case "http://www.w3.org/2001/XMLSchema#long":
case "http://www.w3.org/2001/XMLSchema#negativeInteger":
case "http://www.w3.org/2001/XMLSchema#nonNegativeInteger":
case "http://www.w3.org/2001/XMLSchema#nonPositiveInteger":
case "http://www.w3.org/2001/XMLSchema#positiveInteger":
case "http://www.w3.org/2001/XMLSchema#short":
case "http://www.w3.org/2001/XMLSchema#unsignedLong":
case "http://www.w3.org/2001/XMLSchema#unsignedInt":
case "http://www.w3.org/2001/XMLSchema#unsignedShort":
case "http://www.w3.org/2001/XMLSchema#unsignedByte":
return "number";
case "http://www.w3.org/2001/XMLSchema#boolean":
return "boolean";
case "http://www.w3.org/2001/XMLSchema#hexBinary":
return "string";
case "http://www.w3.org/2001/XMLSchema#anyURI":
return "iri";
default:
return "string";
}
};
export const ShexJSchemaTransformerCompact = ShexJTraverser.createTransformer({
Schema: {
transformer: async (_schema, getTransformedChildren) => {
const transformedChildren = await getTransformedChildren();
return transformedChildren.shapes || [];
},
},
ShapeDecl: {
transformer: async (shapeDecl, getTransformedChildren) => {
const schema = await getTransformedChildren();
const shape = schema.shapeExpr;
return { ...shape, iri: shapeDecl.id };
},
},
Shape: {
transformer: async (_shape, getTransformedChildren) => {
// TODO: We don't handles those
_shape.closed;
const transformedChildren = await getTransformedChildren();
const compactShape = transformedChildren.expression;
for (const extra of _shape.extra || []) {
const extraPredicate = compactShape.predicates.find((p) => p.iri === extra);
if (extraPredicate)
extraPredicate.extra = true;
}
return compactShape;
},
},
// EachOf contains the `expressions` array of properties (TripleConstraint)
EachOf: {
transformer: async (eachOf, getTransformedChildren) => {
const transformedChildren = await getTransformedChildren();
return {
iri: "",
predicates: transformedChildren.expressions.map(
// We disregard cases where properties are referenced (strings)
// or where they consist of Unions or Intersections (not supported).
(expr) => expr),
};
},
},
TripleConstraint: {
transformer: async (tripleConstraint, getTransformedChildren, _setReturnPointer) => {
const transformedChildren = await getTransformedChildren();
const commonProperties = {
maxCardinality: tripleConstraint.max ?? 1,
minCardinality: tripleConstraint.min ?? 1,
iri: tripleConstraint.predicate,
// @ts-expect-error The ldo library does not have our modded readablePredicate property.
readablePredicate: tripleConstraint.readablePredicate,
};
// Make property based on object type which is either a parsed schema, literal or type.
if (typeof transformedChildren.valueExpr === "string") {
// Reference to nested object
return {
dataTypes: [
{
valType: "shape",
shape: transformedChildren.valueExpr,
},
],
...commonProperties,
};
}
else if (transformedChildren.valueExpr &&
transformedChildren.valueExpr.predicates) {
// Nested object
return {
dataTypes: [
{
valType: "shape",
shape: transformedChildren.valueExpr,
},
],
...commonProperties,
};
}
else if (Array.isArray(transformedChildren.valueExpr)) {
return {
dataTypes: transformedChildren.valueExpr, // DataType[]
...commonProperties,
};
}
else {
// type or literal
const nodeConstraint = transformedChildren.valueExpr;
return {
dataTypes: [
{
valType: nodeConstraint.valType,
literals: nodeConstraint.literals,
},
],
...commonProperties,
};
}
},
},
NodeConstraint: {
transformer: async (nodeConstraint) => {
if (nodeConstraint.datatype) {
return {
valType: rdfDataTypeToBasic(nodeConstraint.datatype),
};
}
if (nodeConstraint.nodeKind) {
// Something reference-like.
return { valType: "iri" };
}
if (nodeConstraint.values) {
return {
valType: "literal",
literals: nodeConstraint.values.map(
// TODO: We do not convert them to number or boolean or lang tag.
// And we don't have an annotation of the literal's type.
// @ts-expect-error
(valueRecord) => valueRecord.value || valueRecord.id),
};
}
// Maybe we should throw instead...
throw {
error: new Error("Could not parse Node Constraint"),
nodeConstraint,
};
},
},
// Transformer from ShapeOr
ShapeOr: {
transformer: async (shapeOr, getTransformedChildren) => {
const { shapeExprs } = await getTransformedChildren();
// Either a shape IRI, a nested shape or a node CompactSchemaValue (node constraint).
return (Array.isArray(shapeExprs) ? shapeExprs : [shapeExprs]);
},
},
// Transformer from ShapeAnd
ShapeAnd: {
transformer: async () => {
throw new Error("ShapeAnd not supported (compact)");
},
},
// Transformer from ShapeNot - not supported.
ShapeNot: {
transformer: async () => {
throw new Error("ShapeNot not supported (compact)");
},
},
// Transformer from ShapeExternal - not supported.
ShapeExternal: {
transformer: async () => {
throw new Error("ShapeExternal not supported (compact)");
},
},
});

@ -1,366 +0,0 @@
import type { Annotation } from "shexj";
import * as dom from "dts-dom";
import type { InterfaceDeclaration } from "dts-dom";
export interface ShapeInterfaceDeclaration extends InterfaceDeclaration {
shapeId?: string;
}
export declare const additionalCompactEnumAliases: Set<string>;
export interface CompactTransformerContext {
getNameFromIri: (iri: string, rdfType?: string) => string;
}
export declare function toCamelCase(text: string): string;
/**
* Name functions
*/
export declare function iriToName(iri: string): string;
export declare function nameFromAnnotationOrId(obj: {
id?: string;
annotations?: Annotation[];
}): string | undefined;
export declare const ShexJTypingTransformerCompact: import("@ldo/type-traverser").Transformer<{
Schema: {
kind: "interface";
type: import("@ldo/traverser-shexj").Schema;
properties: {
startActs: "SemAct";
start: "shapeExprOrRef";
imports: "IRIREF";
shapes: "ShapeDecl";
};
};
ShapeDecl: {
kind: "interface";
type: import("@ldo/traverser-shexj").ShapeDecl;
properties: {
id: "shapeDeclLabel";
abstract: "BOOL";
restricts: "shapeExprOrRef";
shapeExpr: "shapeExpr";
};
};
shapeExpr: {
kind: "union";
type: import("@ldo/traverser-shexj").shapeExpr;
typeNames: "ShapeOr" | "ShapeAnd" | "ShapeNot" | "NodeConstraint" | "Shape" | "ShapeExternal";
};
shapeExprOrRef: {
kind: "union";
type: import("@ldo/traverser-shexj").shapeExprOrRef;
typeNames: "shapeExpr" | "shapeDeclRef";
};
ShapeOr: {
kind: "interface";
type: import("@ldo/traverser-shexj").ShapeOr;
properties: {
shapeExprs: "shapeExprOrRef";
};
};
ShapeAnd: {
kind: "interface";
type: import("@ldo/traverser-shexj").ShapeAnd;
properties: {
shapeExprs: "shapeExprOrRef";
};
};
ShapeNot: {
kind: "interface";
type: import("@ldo/traverser-shexj").ShapeNot;
properties: {
shapeExpr: "shapeExprOrRef";
};
};
ShapeExternal: {
kind: "interface";
type: import("@ldo/traverser-shexj").ShapeExternal;
properties: Record<string, never>;
};
shapeDeclRef: {
kind: "union";
type: import("@ldo/traverser-shexj").shapeDeclRef;
typeNames: "shapeDeclLabel" | "ShapeDecl";
};
shapeDeclLabel: {
kind: "union";
type: import("@ldo/traverser-shexj").shapeDeclLabel;
typeNames: "IRIREF" | "BNODE";
};
NodeConstraint: {
kind: "interface";
type: import("@ldo/traverser-shexj").NodeConstraint;
properties: {
datatype: "IRIREF";
values: "valueSetValue";
length: "INTEGER";
minlength: "INTEGER";
maxlength: "INTEGER";
pattern: "STRING";
flags: "STRING";
mininclusive: "numericLiteral";
minexclusive: "numericLiteral";
totaldigits: "INTEGER";
fractiondigits: "INTEGER";
semActs: "SemAct";
annotations: "Annotation";
};
};
numericLiteral: {
kind: "union";
type: import("@ldo/traverser-shexj").numericLiteral;
typeNames: "INTEGER" | "DECIMAL" | "DOUBLE";
};
valueSetValue: {
kind: "union";
type: import("@ldo/traverser-shexj").valueSetValue;
typeNames: "objectValue" | "IriStem" | "IriStemRange" | "LiteralStem" | "LiteralStemRange" | "Language" | "LanguageStem" | "LanguageStemRange";
};
objectValue: {
kind: "union";
type: import("@ldo/traverser-shexj").objectValue;
typeNames: "IRIREF" | "ObjectLiteral";
};
ObjectLiteral: {
kind: "interface";
type: import("@ldo/traverser-shexj").ObjectLiteral;
properties: {
value: "STRING";
language: "STRING";
type: "STRING";
};
};
IriStem: {
kind: "interface";
type: import("@ldo/traverser-shexj").IriStem;
properties: {
stem: "IRIREF";
};
};
IriStemRange: {
kind: "interface";
type: import("@ldo/traverser-shexj").IriStemRange;
properties: {
stem: "IRIREF";
exclusions: "IriStemRangeExclusions";
};
};
IriStemRangeExclusions: {
kind: "union";
type: import("@ldo/traverser-shexj").IRIREF | import("@ldo/traverser-shexj").IriStem;
typeNames: "IRIREF" | "IriStem";
};
LiteralStem: {
kind: "interface";
type: import("@ldo/traverser-shexj").LiteralStem;
properties: {
stem: "STRING";
};
};
LiteralStemRange: {
kind: "interface";
type: import("@ldo/traverser-shexj").LiteralStemRange;
properties: {
stem: "LiteralStemRangeStem";
exclusions: "LiteralStemRangeExclusions";
};
};
LiteralStemRangeStem: {
kind: "union";
type: import("@ldo/traverser-shexj").STRING | import("@ldo/traverser-shexj").Wildcard;
typeNames: "STRING" | "Wildcard";
};
LiteralStemRangeExclusions: {
kind: "union";
type: import("@ldo/traverser-shexj").STRING | import("@ldo/traverser-shexj").LiteralStem;
typeNames: "STRING" | "LiteralStem";
};
Language: {
kind: "interface";
type: import("@ldo/traverser-shexj").Language;
properties: {
languageTag: "LANGTAG";
};
};
LanguageStem: {
kind: "interface";
type: import("@ldo/traverser-shexj").LanguageStem;
properties: {
stem: "LANGTAG";
};
};
LanguageStemRange: {
kind: "interface";
type: import("@ldo/traverser-shexj").LanguageStemRange;
properties: {
stem: "LanguageStemRangeStem";
exclusions: "LanguageStemRangeExclusions";
};
};
LanguageStemRangeStem: {
kind: "union";
type: import("@ldo/traverser-shexj").LANGTAG | import("@ldo/traverser-shexj").Wildcard;
typeNames: "LANGTAG" | "Wildcard";
};
LanguageStemRangeExclusions: {
kind: "union";
type: import("@ldo/traverser-shexj").LANGTAG | import("@ldo/traverser-shexj").LanguageStem;
typeNames: "LANGTAG" | "LanguageStem";
};
Wildcard: {
kind: "interface";
type: import("@ldo/traverser-shexj").Wildcard;
properties: Record<string, never>;
};
Shape: {
kind: "interface";
type: import("@ldo/traverser-shexj").Shape;
properties: {
closed: "BOOL";
extra: "IRIREF";
extends: "shapeExprOrRef";
expression: "tripleExprOrRef";
semActs: "SemAct";
annotations: "Annotation";
};
};
tripleExpr: {
kind: "union";
type: import("@ldo/traverser-shexj").tripleExpr;
typeNames: "EachOf" | "OneOf" | "TripleConstraint";
};
tripleExprOrRef: {
kind: "union";
type: import("@ldo/traverser-shexj").tripleExprOrRef;
typeNames: "tripleExpr" | "tripleExprRef";
};
EachOf: {
kind: "interface";
type: import("@ldo/traverser-shexj").EachOf;
properties: {
id: "tripleExprLabel";
min: "INTEGER";
max: "INTEGER";
expressions: "tripleExprOrRef";
semActs: "SemAct";
annotations: "Annotation";
};
};
OneOf: {
kind: "interface";
type: import("@ldo/traverser-shexj").OneOf;
properties: {
id: "tripleExprLabel";
min: "INTEGER";
max: "INTEGER";
expressions: "tripleExprOrRef";
semActs: "SemAct";
annotations: "Annotation";
};
};
TripleConstraint: {
kind: "interface";
type: import("@ldo/traverser-shexj").TripleConstraint;
properties: {
id: "tripleExprLabel";
min: "INTEGER";
max: "INTEGER";
inverse: "BOOL";
predicate: "IRIREF";
valueExpr: "shapeExprOrRef";
semActs: "SemAct";
annotations: "Annotation";
};
};
tripleExprRef: {
kind: "union";
type: import("@ldo/traverser-shexj").tripleExprRef;
typeNames: "tripleExprLabel";
};
tripleExprLabel: {
kind: "union";
type: import("@ldo/traverser-shexj").tripleExprLabel;
typeNames: "IRIREF" | "BNODE";
};
SemAct: {
kind: "interface";
type: import("@ldo/traverser-shexj").SemAct;
properties: {
name: "IRIREF";
code: "STRING";
};
};
Annotation: {
kind: "interface";
type: import("@ldo/traverser-shexj").Annotation;
properties: {
predicate: "IRI";
object: "objectValue";
};
};
IRIREF: {
kind: "primitive";
type: import("@ldo/traverser-shexj").IRIREF;
};
BNODE: {
kind: "primitive";
type: import("@ldo/traverser-shexj").BNODE;
};
INTEGER: {
kind: "primitive";
type: import("@ldo/traverser-shexj").INTEGER;
};
STRING: {
kind: "primitive";
type: import("@ldo/traverser-shexj").STRING;
};
DECIMAL: {
kind: "primitive";
type: import("@ldo/traverser-shexj").DECIMAL;
};
DOUBLE: {
kind: "primitive";
type: import("@ldo/traverser-shexj").DOUBLE;
};
LANGTAG: {
kind: "primitive";
type: import("@ldo/traverser-shexj").LANGTAG;
};
BOOL: {
kind: "primitive";
type: import("@ldo/traverser-shexj").BOOL;
};
IRI: {
kind: "primitive";
type: import("@ldo/traverser-shexj").IRI;
};
}, {
Schema: {
return: dom.TopLevelDeclaration[];
};
ShapeDecl: {
return: dom.InterfaceDeclaration;
};
Shape: {
return: dom.InterfaceDeclaration;
};
EachOf: {
return: dom.ObjectType | dom.InterfaceDeclaration;
};
TripleConstraint: {
return: dom.PropertyDeclaration;
};
NodeConstraint: {
return: dom.Type;
};
ShapeOr: {
return: dom.UnionType;
};
ShapeAnd: {
return: dom.IntersectionType;
};
ShapeNot: {
return: never;
};
ShapeExternal: {
return: never;
};
}, null>;
//# sourceMappingURL=ShexJTypingTransformer.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"ShexJTypingTransformer.d.ts","sourceRoot":"","sources":["../../../src/schema-converter/transformers/ShexJTypingTransformer.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AACxC,OAAO,KAAK,GAAG,MAAM,SAAS,CAAC;AAC/B,OAAO,KAAK,EAAE,oBAAoB,EAAE,MAAM,SAAS,CAAC;AAEpD,MAAM,WAAW,yBAA0B,SAAQ,oBAAoB;IACnE,OAAO,CAAC,EAAE,MAAM,CAAC;CACpB;AAGD,eAAO,MAAM,4BAA4B,aAAoB,CAAC;AAE9D,MAAM,WAAW,yBAAyB;IACtC,cAAc,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,KAAK,MAAM,CAAC;CAC7D;AAeD,wBAAgB,WAAW,CAAC,IAAI,EAAE,MAAM,UAOvC;AAED;;GAEG;AACH,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAc7C;AAED,wBAAgB,sBAAsB,CAAC,GAAG,EAAE;IACxC,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,WAAW,CAAC,EAAE,UAAU,EAAE,CAAC;CAC9B,GAAG,MAAM,GAAG,SAAS,CAgBrB;AAyMD,eAAO,MAAM,6BAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;YAE1B;QAAE,MAAM,EAAE,GAAG,CAAC,mBAAmB,EAAE,CAAA;KAAE;eAClC;QAAE,MAAM,EAAE,GAAG,CAAC,oBAAoB,CAAA;KAAE;WACxC;QAAE,MAAM,EAAE,GAAG,CAAC,oBAAoB,CAAA;KAAE;YACnC;QAAE,MAAM,EAAE,GAAG,CAAC,UAAU,GAAG,GAAG,CAAC,oBAAoB,CAAA;KAAE;sBAC3C;QAAE,MAAM,EAAE,GAAG,CAAC,mBAAmB,CAAA;KAAE;oBACrC;QAAE,MAAM,EAAE,GAAG,CAAC,IAAI,CAAA;KAAE;aAC3B;QAAE,MAAM,EAAE,GAAG,CAAC,SAAS,CAAA;KAAE;cACxB;QAAE,MAAM,EAAE,GAAG,CAAC,gBAAgB,CAAA;KAAE;cAChC;QAAE,MAAM,EAAE,KAAK,CAAA;KAAE;mBACZ;QAAE,MAAM,EAAE,KAAK,CAAA;KAAE;QAmatC,CAAC"}

@ -1,550 +0,0 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import ShexJTraverser from "@ldo/traverser-shexj";
import * as dom from "dts-dom";
// Collected enum alias names (e.g., AuthenticatedAgentId) to emit at end
export const additionalCompactEnumAliases = new Set();
function commentFromAnnotations(annotations) {
const commentAnnotationObject = annotations?.find((annotation) => annotation.predicate ===
"http://www.w3.org/2000/01/rdf-schema#comment")?.object;
if (typeof commentAnnotationObject === "string")
return commentAnnotationObject;
return commentAnnotationObject?.value;
}
export function toCamelCase(text) {
return text
.replace(/([-_ ]){1,}/g, " ")
.split(/[-_ ]/)
.reduce((cur, acc) => {
return cur + acc[0].toUpperCase() + acc.substring(1);
});
}
/**
* Name functions
*/
export function iriToName(iri) {
try {
const url = new URL(iri);
let name;
if (url.hash) {
name = url.hash.slice(1);
}
else {
const splitPathname = url.pathname.split("/");
name = splitPathname[splitPathname.length - 1];
}
return name.replace(/(?<!^)Shape$/, "");
}
catch (err) {
return iri;
}
}
export function nameFromAnnotationOrId(obj) {
const labelAnnotationObject = obj.annotations?.find((annotation) => annotation.predicate ===
"http://www.w3.org/2000/01/rdf-schema#label")?.object;
if (labelAnnotationObject && typeof labelAnnotationObject === "string") {
return toCamelCase(iriToName(labelAnnotationObject));
}
else if (labelAnnotationObject &&
typeof labelAnnotationObject !== "string") {
return toCamelCase(labelAnnotationObject.value);
}
else if (obj.id) {
return toCamelCase(iriToName(obj.id));
}
}
// Helper: classify a dom.Type into categories we care about.
function isObjectLike(t) {
return (t.kind === "object" ||
t.kind === "interface");
}
function isPrimitiveLike(t) {
const kind = t?.kind;
if (kind === "name")
return true; // named references and intrinsic tokens
if (kind === "union") {
return t.members.every(isPrimitiveLike);
}
if (kind === "type-parameter")
return true;
// Fallback: treat scalar intrinsic tokens as primitive
const intrinsicKinds = new Set([
"string",
"number",
"boolean",
"undefined",
]);
return intrinsicKinds.has(kind || "");
}
// Small helpers for unions and alias naming
function isUnionType(t) {
return t?.kind === "union";
}
function unionOf(types) {
const flat = [];
const collect = (tt) => {
if (isUnionType(tt))
tt.members.forEach(collect);
else
flat.push(tt);
};
types.forEach(collect);
const seen = new Set();
const unique = [];
flat.forEach((m) => {
const key = m.name ||
m.value ||
m.kind + JSON.stringify(m);
if (!seen.has(key)) {
seen.add(key);
unique.push(m);
}
});
if (unique.length === 0)
return dom.type.any;
if (unique.length === 1)
return unique[0];
return dom.create.union(unique);
}
function setOf(inner) {
return {
kind: "name",
name: "Set",
typeArguments: [inner],
};
}
function recordOf(key, value) {
return {
kind: "name",
name: "Record",
typeArguments: [key, value],
};
}
// Note: aliasing helpers previously used in earlier versions were removed.
// Property name collision resolution using predicate IRI mapping
const predicateIriByProp = new WeakMap();
// Note: collisions are handled by annotateReadablePredicates pre-pass.
// Merge duplicate properties without introducing LdSet. If a property appears multiple
// times (e.g., via EXTENDS or grouped expressions) we:
// - union the types (flattening existing unions)
// - if one side is Set<T> and the other is plain U, produce Set<T|U>
// - if both are Set<A>, Set<B> -> Set<A|B>
// - preserve optional flag if any occurrence optional
function dedupeCompactProperties(props) {
const isSetRef = (t) => t.kind === "name" && t.name === "Set";
const getSetInner = (t) => isSetRef(t) ? t.typeArguments[0] : t;
// Group by composite key (name + predicate IRI)
const groups = new Map();
for (const p of props) {
const pred = predicateIriByProp.get(p) || "";
const key = `${p.name}\u0000${pred}`;
if (!groups.has(key))
groups.set(key, []);
groups.get(key).push(p);
}
const merged = [];
for (const [, group] of groups) {
if (group.length === 1) {
merged.push(group[0]);
continue;
}
let acc = group[0];
for (let i = 1; i < group.length; i++) {
const next = group[i];
const accSet = isSetRef(acc.type);
const nextSet = isSetRef(next.type);
let mergedType;
if (accSet && nextSet) {
mergedType = setOf(unionOf([getSetInner(acc.type), getSetInner(next.type)]));
}
else if (accSet && !nextSet) {
mergedType = setOf(unionOf([getSetInner(acc.type), next.type]));
}
else if (!accSet && nextSet) {
mergedType = setOf(unionOf([acc.type, getSetInner(next.type)]));
}
else {
mergedType = unionOf([acc.type, next.type]);
}
const optional = acc.flags === dom.DeclarationFlags.Optional ||
next.flags === dom.DeclarationFlags.Optional
? dom.DeclarationFlags.Optional
: dom.DeclarationFlags.None;
const mergedProp = dom.create.property(acc.name, mergedType, optional);
mergedProp.jsDocComment =
acc.jsDocComment && next.jsDocComment
? `${acc.jsDocComment} | ${next.jsDocComment}`
: acc.jsDocComment || next.jsDocComment;
const pred = predicateIriByProp.get(acc) || predicateIriByProp.get(next);
if (pred)
predicateIriByProp.set(mergedProp, pred);
acc = mergedProp;
}
merged.push(acc);
}
return merged;
}
// Helpers to add id: IRI to anonymous object(-union) types
function ensureIdOnMembers(members) {
if (!members)
return;
const props = (members.filter?.((m) => m?.kind === "property") ||
[]);
if (!props.some((m) => m.name === "id")) {
members.unshift(dom.create.property("id", dom.create.namedTypeReference("IRI"), dom.DeclarationFlags.None));
}
}
function withIdOnAnonymousObject(t) {
if (t?.kind === "object") {
const mems = t.members;
ensureIdOnMembers(mems);
return t;
}
return t;
}
function withIdInUnionObjectMembers(t) {
if (!isUnionType(t))
return t;
const members = t.members.map((m) => m?.kind === "object" ? withIdOnAnonymousObject(m) : m);
return dom.create.union(members);
}
// Create property and attach predicate IRI and annotations consistently
function createProperty(name, type, flags, predicateIri, annotations) {
const prop = dom.create.property(name, type, flags);
if (predicateIri)
predicateIriByProp.set(prop, predicateIri);
const cmt = commentFromAnnotations(annotations) || "";
prop.jsDocComment = cmt
? `${cmt}\n\nOriginal IRI: ${predicateIri ?? ""}`.trim()
: `Original IRI: ${predicateIri ?? ""}`;
return prop;
}
export const ShexJTypingTransformerCompact = ShexJTraverser.createTransformer({
// Transformer from Schema to interfaces
Schema: {
transformer: async (_schema, getTransformedChildren) => {
const transformedChildren = await getTransformedChildren();
const interfaces = [];
transformedChildren.shapes?.forEach((shape) => {
if (typeof shape !== "string" &&
shape.kind === "interface") {
interfaces.push(shape);
}
});
return interfaces;
},
},
// Transformer from ShapeDecl to interface
ShapeDecl: {
transformer: async (shapeDecl, getTransformedChildren) => {
const shapeName = nameFromAnnotationOrId(shapeDecl) || "Shape";
const { shapeExpr } = await getTransformedChildren();
if (shapeExpr.kind === "interface") {
const shapeInterface = shapeExpr;
shapeInterface.name = shapeName;
// Preserve shape id for downstream shapeTypes generation
// (mirrors standard transformer behavior)
shapeInterface.shapeId = shapeDecl.id;
if (!shapeInterface.members.find((m) => m.kind === "property" && m.name === "id")) {
shapeInterface.members.unshift(dom.create.property("id", dom.create.namedTypeReference("IRI"),
// Root interfaces should have mandatory id
dom.DeclarationFlags.None));
}
return shapeInterface;
}
throw new Error("Unsupported direct shape expression on ShapeDecl for compact format.");
},
},
// Transformer from Shape to interface
Shape: {
transformer: async (_shape, getTransformedChildren, setReturnPointer) => {
const newInterface = dom.create.interface("");
setReturnPointer(newInterface);
const transformedChildren = await getTransformedChildren();
if (typeof transformedChildren.expression !== "string" &&
transformedChildren.expression &&
(transformedChildren.expression.kind ===
"object" ||
transformedChildren.expression
.kind === "interface")) {
newInterface.members.push(...transformedChildren.expression
.members);
}
else if (transformedChildren.expression
?.kind === "property") {
newInterface.members.push(transformedChildren.expression);
}
if (transformedChildren.extends) {
transformedChildren.extends.forEach((ext) => {
const extInt = ext;
if (extInt.kind === "interface") {
const merged = [
...extInt.members.filter((m) => !(m.kind === "property" && m.name === "id")),
...newInterface.members,
].filter((m) => m.kind === "property");
newInterface.members = dedupeCompactProperties(merged);
}
});
}
// Final pass: ensure only a single id property
const idSeen = new Set();
newInterface.members = newInterface.members.filter((m, idx) => {
if (m.kind !== "property" || m.name !== "id")
return true;
if (idSeen.size === 0) {
idSeen.add(idx);
// normalize id type to IRI
m.type = dom.create.namedTypeReference("IRI");
return true;
}
return false;
});
return newInterface;
},
},
// Transformer from EachOf to object type. EachOf contains the `expressions` array of properties (TripleConstraint)
EachOf: {
transformer: async (eachOf, getTransformedChildren, setReturnPointer) => {
const transformedChildren = await getTransformedChildren();
const name = nameFromAnnotationOrId(eachOf);
const objectType = name
? dom.create.interface(name)
: dom.create.objectType([]);
setReturnPointer(objectType);
const inputProps = [];
transformedChildren.expressions.forEach((expr) => {
if (!expr || typeof expr === "string")
return;
const kind = expr.kind;
if (kind === "property") {
inputProps.push(expr);
}
else if (kind === "object" || kind === "interface") {
const mlist = expr.members;
mlist.forEach((m) => {
if (m.kind === "property") {
inputProps.push(m);
}
});
}
});
const deduped = dedupeCompactProperties(inputProps);
objectType.members.push(...deduped);
return objectType;
},
},
// Transformer from triple constraints to type properties.
TripleConstraint: {
transformer: async (tripleConstraint, getTransformedChildren, _setReturnPointer, node) => {
const transformedChildren = await getTransformedChildren();
const baseName = tripleConstraint
.readablePredicate;
const max = tripleConstraint.max;
const isPlural = max === -1 || (max !== undefined && max !== 1);
const isOptional = tripleConstraint.min === 0;
let valueType = dom.type.any;
if (transformedChildren.valueExpr)
valueType = transformedChildren.valueExpr;
// Generic: If valueExpr is a NodeConstraint with concrete `values`,
// build a union of named alias references derived from those values.
// Works for any predicate (not only rdf:type).
const originalValueExpr = tripleConstraint?.valueExpr;
if (originalValueExpr &&
typeof originalValueExpr === "object" &&
originalValueExpr.type === "NodeConstraint" &&
Array.isArray(originalValueExpr.values) &&
originalValueExpr.values.length > 0) {
const aliasRefs = [];
for (const v of originalValueExpr.values) {
// valueSetValue can be string IRIREF or ObjectLiteral or other stems; handle IRIREF and ObjectLiteral
if (typeof v === "string") {
// For concrete IRIREF values, use a string literal of the IRI
aliasRefs.push(dom.type.stringLiteral(v));
}
else if (v && typeof v === "object") {
// ObjectLiteral has `value`; use that literal as alias base
const literalVal = v.value;
if (literalVal) {
// For explicit literal values, use a string literal type
aliasRefs.push(dom.type.stringLiteral(literalVal));
}
// For other union members (IriStem, ranges, Language, etc.), skip here; fall back covered below if none collected
}
}
if (aliasRefs.length > 0) {
const union = unionOf(aliasRefs);
const final = isPlural ? setOf(union) : union;
return createProperty(baseName, final, isOptional
? dom.DeclarationFlags.Optional
: dom.DeclarationFlags.None, tripleConstraint.predicate, tripleConstraint.annotations);
}
}
if (valueType.kind === "interface" &&
!valueType.name) {
valueType = dom.create.objectType(valueType
.members);
}
// Normalize NodeConstraint returned object forms for IRIs into IRI
// Heuristic: existing transformer (compact) returns string/number/boolean OR object/interface.
// We treat any simple string/number/boolean/name as primitive.
// Determine category
const objLike = isObjectLike(valueType);
const isUnion = valueType?.kind === "union";
const unionMembers = isUnion
? valueType.members
: [];
const unionAllObjLike = isUnion &&
unionMembers.length > 0 &&
unionMembers.every(isObjectLike);
const primLike = isPrimitiveLike(valueType);
if (!primLike &&
!objLike &&
valueType.kind === "union") {
const u = valueType;
const hasObj = u.members.some(isObjectLike);
const hasPrim = u.members.some(isPrimitiveLike);
if (isPlural && hasObj && hasPrim) {
throw new Error(`Mixed plural union (object + primitive) not supported for predicate ${tripleConstraint.predicate}`);
}
}
let finalType;
if (isPlural) {
if (objLike || unionAllObjLike) {
if (valueType.kind ===
"interface" &&
valueType.name) {
const ifaceName = valueType.name;
// Dictionary of full object instances keyed by IRI
finalType = recordOf(dom.create.namedTypeReference("IRI"), dom.create.namedTypeReference(ifaceName));
}
else {
// Anonymous object or union of anonymous/interface objects
let valueForRecord = valueType;
if (unionAllObjLike) {
// Ensure each union member has id?: IRI if anonymous object
valueForRecord =
withIdInUnionObjectMembers(valueType);
}
else {
valueForRecord = withIdOnAnonymousObject(valueType);
}
finalType = recordOf(dom.create.namedTypeReference("IRI"), valueForRecord);
}
}
else {
finalType = setOf(valueType);
}
}
else {
// Singular
// If anonymous object or union of object-like types, ensure id: IRI is present (mandatory)
if (objLike) {
if (valueType.kind === "object") {
valueType = withIdOnAnonymousObject(valueType);
}
}
else if (isUnion && unionAllObjLike) {
valueType = withIdInUnionObjectMembers(valueType);
}
// Singular: always the interface/object type itself (never Id union)
if (valueType.kind ===
"interface" &&
valueType.name) {
finalType = dom.create.namedTypeReference(valueType.name);
}
else {
finalType = valueType;
}
}
return createProperty(baseName, finalType, isOptional
? dom.DeclarationFlags.Optional
: dom.DeclarationFlags.None, tripleConstraint.predicate, tripleConstraint.annotations);
},
},
// Transformer from node constraint to type
NodeConstraint: {
transformer: async (nodeConstraint) => {
if (nodeConstraint.datatype) {
switch (nodeConstraint.datatype) {
case "http://www.w3.org/2001/XMLSchema#boolean":
return dom.type.boolean;
case "http://www.w3.org/2001/XMLSchema#byte":
case "http://www.w3.org/2001/XMLSchema#decimal":
case "http://www.w3.org/2001/XMLSchema#double":
case "http://www.w3.org/2001/XMLSchema#float":
case "http://www.w3.org/2001/XMLSchema#int":
case "http://www.w3.org/2001/XMLSchema#integer":
case "http://www.w3.org/2001/XMLSchema#long":
case "http://www.w3.org/2001/XMLSchema#negativeInteger":
case "http://www.w3.org/2001/XMLSchema#nonNegativeInteger":
case "http://www.w3.org/2001/XMLSchema#nonPositiveInteger":
case "http://www.w3.org/2001/XMLSchema#positiveInteger":
case "http://www.w3.org/2001/XMLSchema#short":
case "http://www.w3.org/2001/XMLSchema#unsignedLong":
case "http://www.w3.org/2001/XMLSchema#unsignedInt":
case "http://www.w3.org/2001/XMLSchema#unsignedShort":
case "http://www.w3.org/2001/XMLSchema#unsignedByte":
return dom.type.number;
default:
return dom.type.string; // treat most as string
}
}
if (nodeConstraint.nodeKind) {
switch (nodeConstraint.nodeKind) {
case "iri":
return dom.create.namedTypeReference("IRI");
case "bnode":
return dom.type.string; // opaque id as string
case "nonliteral":
return dom.create.namedTypeReference("IRI");
case "literal":
default:
return dom.type.string;
}
}
if (nodeConstraint.values) {
const u = dom.create.union([]);
nodeConstraint.values.forEach((v) => {
if (typeof v === "string")
u.members.push(dom.type.stringLiteral(v));
});
if (!u.members.length)
return dom.type.string;
if (u.members.length === 1)
return u.members[0];
return u;
}
return dom.type.any;
},
},
// Transformer from ShapeOr to union type
ShapeOr: {
transformer: async (_shapeOr, getTransformedChildren) => {
const tc = await getTransformedChildren();
return dom.create.union(tc.shapeExprs);
},
},
// Transformer from ShapeAnd to intersection type
ShapeAnd: {
transformer: async (_shapeAnd, getTransformedChildren) => {
const tc = await getTransformedChildren();
const valid = [];
tc.shapeExprs.forEach((t) => {
if (typeof t === "object")
valid.push(t);
});
return dom.create.intersection(valid);
},
},
// Transformer from ShapeNot to type - not supported.
ShapeNot: {
transformer: async () => {
throw new Error("ShapeNot not supported (compact)");
},
},
// Transformer from ShapeExternal to type - not supported.
ShapeExternal: {
transformer: async () => {
throw new Error("ShapeExternal not supported (compact)");
},
},
});

@ -1,5 +0,0 @@
import type { InterfaceDeclaration } from "dts-dom";
export interface ShapeInterfaceDeclaration extends InterfaceDeclaration {
shapeId?: string;
}
//# sourceMappingURL=ShapeInterfaceDeclaration.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"ShapeInterfaceDeclaration.d.ts","sourceRoot":"","sources":["../../../src/schema-converter/util/ShapeInterfaceDeclaration.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,oBAAoB,EAAE,MAAM,SAAS,CAAC;AAEpD,MAAM,WAAW,yBAA0B,SAAQ,oBAAoB;IACrE,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB"}

@ -1,8 +0,0 @@
import type { Schema } from "shexj";
/**
* Annotate EachOf-level TripleConstraints with a collision-free readablePredicate.
* Rule: for any group that shares the same local token, rename all members using
* prefix-first `${prefix}_${local}` from right to left; fallback to composite.
*/
export default function annotateReadablePredicates(schema: Schema): void;
//# sourceMappingURL=annotateReadablePredicates.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"annotateReadablePredicates.d.ts","sourceRoot":"","sources":["../../../src/schema-converter/util/annotateReadablePredicates.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAA8C,MAAM,OAAO,CAAC;AAUhF;;;;GAIG;AACH,MAAM,CAAC,OAAO,UAAU,0BAA0B,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAoIvE"}

@ -1,129 +0,0 @@
// Split IRI by colon, slash and hash; drop empties
const splitIriTokens = (iri) => iri.split(/[:/#]+/).filter(Boolean);
// Keep dots and dashes (so 0.1 stays as 0.1) but sanitize everything else
const sanitize = (s) => s.replace(/[^\w.\-]/g, "_");
/**
* Annotate EachOf-level TripleConstraints with a collision-free readablePredicate.
* Rule: for any group that shares the same local token, rename all members using
* prefix-first `${prefix}_${local}` from right to left; fallback to composite.
*/
export default function annotateReadablePredicates(schema) {
const shapes = schema.shapes ?? [];
const annotateEachOf = (eachOf) => {
if (!eachOf ||
eachOf.type !== "EachOf" ||
!Array.isArray(eachOf.expressions))
return;
const tcs = eachOf.expressions.filter((e) => typeof e === "object" &&
e !== null &&
e.type === "TripleConstraint");
if (tcs.length > 0) {
// Group by local token (last segment of IRI) and set a base readablePredicate for all
const groups = new Map();
for (const tc of tcs) {
const tokens = splitIriTokens(tc.predicate);
const local = tokens.length
? tokens[tokens.length - 1]
: tc.predicate;
// default base name for non-colliders
tc.readablePredicate = local;
const arr = groups.get(local) ?? [];
arr.push(tc);
groups.set(local, arr);
}
// Resolve each group (rename all in collisions)
for (const [, arr] of groups) {
if (arr.length <= 1)
continue;
const used = new Set();
const local = splitIriTokens(arr[0].predicate).slice(-1)[0] ?? "";
for (const tc of arr) {
const tokens = splitIriTokens(tc.predicate);
let localIdx = tokens.lastIndexOf(local);
if (localIdx === -1)
localIdx = Math.max(tokens.length - 1, 0);
let prefixIdx = localIdx - 1;
let assigned = false;
while (prefixIdx >= 0) {
const cand = `${sanitize(tokens[prefixIdx])}_${sanitize(tokens[localIdx])}`;
if (!used.has(cand)) {
tc.readablePredicate = cand;
used.add(cand);
assigned = true;
break;
}
prefixIdx -= 1;
}
if (!assigned) {
const iriNoProto = tc.predicate.replace(/^[a-z]+:\/\//i, "");
const composite = sanitize(iriNoProto
.split(/[:/#]+/)
.slice(0, -1)
.join("_") || "iri");
let cand = `${composite}_${sanitize(tokens[localIdx] || local)}`;
let n = 1;
while (used.has(cand))
cand = `${cand}_${n++}`;
tc.readablePredicate = cand;
used.add(cand);
}
}
}
// Recurse into nested valueExpr shapes of each TC
for (const tc of tcs) {
const ve = tc.valueExpr;
if (ve && typeof ve === "object") {
const t = ve.type;
if (t === "Shape" && ve.expression)
annotateEachOf(ve.expression);
else if (t === "EachOf")
annotateEachOf(ve);
else if (t === "ShapeOr" &&
Array.isArray(ve.shapeExprs)) {
for (const sub of ve.shapeExprs)
annotateFromExpr(sub);
}
else if (t === "ShapeAnd" &&
Array.isArray(ve.shapeExprs)) {
for (const sub of ve.shapeExprs)
annotateFromExpr(sub);
}
}
}
}
// Also recurse into any inline sub-EachOf/Shape expressions found directly in expressions
for (const ex of eachOf.expressions) {
if (ex && typeof ex === "object")
annotateFromExpr(ex);
}
};
const annotateFromExpr = (expr) => {
if (!expr || typeof expr !== "object")
return;
const t = expr.type;
if (t === "Shape" && expr.expression)
annotateEachOf(expr.expression);
else if (t === "EachOf")
annotateEachOf(expr);
else if (t === "ShapeOr" && Array.isArray(expr.shapeExprs)) {
for (const sub of expr.shapeExprs)
annotateFromExpr(sub);
}
else if (t === "ShapeAnd" &&
Array.isArray(expr.shapeExprs)) {
for (const sub of expr.shapeExprs)
annotateFromExpr(sub);
}
else if (t === "TripleConstraint") {
const ve = expr.valueExpr;
if (ve && typeof ve === "object")
annotateFromExpr(ve);
}
};
for (const s of shapes) {
const sd = s;
const shape = (sd.shapeExpr || sd);
if (shape?.expression)
annotateFromExpr(shape);
}
}

@ -1,3 +0,0 @@
import type { ObjectTypeMember } from "dts-dom";
export declare function dedupeObjectTypeMembers(memberList: ObjectTypeMember[]): ObjectTypeMember[];
//# sourceMappingURL=dedupeObjectTypeMembers.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"dedupeObjectTypeMembers.d.ts","sourceRoot":"","sources":["../../../src/schema-converter/util/dedupeObjectTypeMembers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAGhD,wBAAgB,uBAAuB,CACrC,UAAU,EAAE,gBAAgB,EAAE,GAC7B,gBAAgB,EAAE,CAoCpB"}

@ -1,38 +0,0 @@
import * as dom from "dts-dom";
export function dedupeObjectTypeMembers(memberList) {
const properties = {};
memberList.forEach((expression) => {
const propertyDeclaration = expression;
// Combine properties if they're duplicates
if (properties[propertyDeclaration.name]) {
const oldPropertyDeclaration = properties[propertyDeclaration.name];
const oldPropertyType = isLdSetType(oldPropertyDeclaration.type)
? oldPropertyDeclaration.type.typeArguments[0]
: oldPropertyDeclaration.type;
const propertyType = isLdSetType(propertyDeclaration.type)
? propertyDeclaration.type.typeArguments[0]
: propertyDeclaration.type;
const isOptional = propertyDeclaration.flags === dom.DeclarationFlags.Optional ||
oldPropertyDeclaration.flags === dom.DeclarationFlags.Optional;
properties[propertyDeclaration.name] = dom.create.property(propertyDeclaration.name, {
kind: "name",
name: "LdSet",
typeArguments: [dom.create.union([oldPropertyType, propertyType])],
}, isOptional ? dom.DeclarationFlags.Optional : dom.DeclarationFlags.None);
// Set JS Comment
properties[propertyDeclaration.name].jsDocComment =
oldPropertyDeclaration.jsDocComment && propertyDeclaration.jsDocComment
? `${oldPropertyDeclaration.jsDocComment} | ${propertyDeclaration.jsDocComment}`
: oldPropertyDeclaration.jsDocComment ||
propertyDeclaration.jsDocComment;
}
else {
properties[propertyDeclaration.name] = propertyDeclaration;
}
});
return Object.values(properties);
}
function isLdSetType(potentialLdSet) {
return (potentialLdSet.kind === "name" &&
potentialLdSet.name === "LdSet");
}

@ -1,4 +0,0 @@
import type { ShexJTraverserTypes } from "@ldo/traverser-shexj";
import type { InterfaceInstanceNode } from "@ldo/type-traverser";
export declare function getRdfTypesForTripleConstraint(tripleConstraintNode: InterfaceInstanceNode<ShexJTraverserTypes, "TripleConstraint", ShexJTraverserTypes["TripleConstraint"]>): string[] | undefined[];
//# sourceMappingURL=getRdfTypesForTripleConstraint.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"getRdfTypesForTripleConstraint.d.ts","sourceRoot":"","sources":["../../../src/schema-converter/util/getRdfTypesForTripleConstraint.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACR,mBAAmB,EAEtB,MAAM,sBAAsB,CAAC;AAC9B,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,qBAAqB,CAAC;AAwGjE,wBAAgB,8BAA8B,CAC1C,oBAAoB,EAAE,qBAAqB,CACvC,mBAAmB,EACnB,kBAAkB,EAClB,mBAAmB,CAAC,kBAAkB,CAAC,CAC1C,GACF,MAAM,EAAE,GAAG,SAAS,EAAE,CA4BxB"}

@ -1,89 +0,0 @@
function addRdfTypeFromTripleExpr(tripleExpr, rdfTypeSet) {
if (typeof tripleExpr === "object" &&
tripleExpr.type === "TripleConstraint" &&
tripleExpr.predicate ===
"http://www.w3.org/1999/02/22-rdf-syntax-ns#type" &&
typeof tripleExpr.valueExpr === "object" &&
tripleExpr.valueExpr.type === "NodeConstraint" &&
tripleExpr.valueExpr.values) {
tripleExpr.valueExpr.values.forEach((val) => {
if (typeof val === "string")
rdfTypeSet.add(val);
// TODO handle other edge cases like IRIStem
});
}
}
function recursivelyGatherTypesFromShapeNodes(shapeNode, rdfTypeSet) {
const tripleExpr = shapeNode.instance.expression;
if (tripleExpr)
addRdfTypeFromTripleExpr(tripleExpr, rdfTypeSet);
shapeNode.parent("shapeExpr").forEach((parentShapeExpr) => {
parentShapeExpr
.parent("ShapeDecl", "shapeExpr")
.forEach((parentShapeDecl) => {
parentShapeDecl
.parent("shapeDeclRef")
.forEach((parentShapeDeclOrRef) => {
parentShapeDeclOrRef
.parent("shapeExprOrRef")
.forEach((parentShapeExprOrRef) => {
parentShapeExprOrRef
.parent("Shape", "extends")
.forEach((parentShape) => {
recursivelyGatherTypesFromShapeNodes(parentShape, rdfTypeSet);
const childExpressionNode = parentShape.child("expression");
if (!childExpressionNode)
return;
const childEachOf = childExpressionNode
.child()
.child();
if (childEachOf.typeName === "EachOf") {
recursivelyGatherTypesFromEachOfNodes(childEachOf, rdfTypeSet);
}
});
});
});
});
});
}
function recursivelyGatherTypesFromEachOfNodes(eachOfNode, rdfTypeSet) {
const tripleExprs = eachOfNode.instance.expressions;
tripleExprs.forEach((tripleExpr) => {
addRdfTypeFromTripleExpr(tripleExpr, rdfTypeSet);
});
eachOfNode.parent("tripleExpr").forEach((tripleExprNode) => {
const tripleExprOrRefNodes = tripleExprNode.parent("tripleExprOrRef");
tripleExprOrRefNodes.forEach((tripleExprOrRdfNode) => {
const parentEachOfs = tripleExprOrRdfNode.parent("EachOf", "expressions");
parentEachOfs.forEach((parentEachOf) => {
recursivelyGatherTypesFromEachOfNodes(parentEachOf, rdfTypeSet);
});
// Deal with shape extends
const parentShapes = tripleExprOrRdfNode.parent("Shape", "expression");
parentShapes.forEach((parentShape) => recursivelyGatherTypesFromShapeNodes(parentShape, rdfTypeSet));
});
});
}
export function getRdfTypesForTripleConstraint(tripleConstraintNode) {
// Check that there's a triple constraint that is a type at the
// same level if there is, use that as an rdfType
const rdfTypeSet = new Set();
tripleConstraintNode.parent("tripleExpr").forEach((tripleExprParents) => {
tripleExprParents
.parent("tripleExprOrRef")
.forEach((tripleExprOrRefParent) => {
tripleExprOrRefParent
.parent("EachOf", "expressions")
.forEach((eachOfParent) => {
recursivelyGatherTypesFromEachOfNodes(eachOfParent, rdfTypeSet);
});
tripleExprOrRefParent
.parent("Shape", "expression")
.forEach((shapeParent) => {
recursivelyGatherTypesFromShapeNodes(shapeParent, rdfTypeSet);
});
});
});
const rdfTypes = rdfTypeSet.size > 0 ? Array.from(rdfTypeSet) : [undefined];
return rdfTypes;
}

@ -1,37 +0,0 @@
export interface ShapeType<T extends BaseType> {
schema: Schema;
shape: string;
}
export interface BaseType extends Record<string, any> {
id: string;
}
export type Schema = {
[id: string]: Shape;
};
export interface Shape {
iri: string;
predicates: Predicate[];
}
export type DataType = {
/** The required literal value(s), if type is `literal`. Others are allowed, if `extra` is true. */
literals?: number[] | string[] | boolean;
/** If `valType` is `"shape"`, the nested shape or its reference. Use reference for serialization. */
shape?: string | Shape;
/** The type of object value for a triple constraint. */
valType: "number" | "string" | "boolean" | "iri" | "literal" | "shape";
};
export interface Predicate {
/** Allowed type of object. If more than one is present, either of them is allowed. */
dataTypes: DataType[];
/** The RDF predicate URI. */
iri: string;
/** The alias of the `predicateUri` when serialized to a JSON object. */
readablePredicate: string;
/** Maximum allowed number of values. `-1` means infinite. */
maxCardinality: number;
/** Minimum required number of values */
minCardinality: number;
/** If other (additional) values are permitted. Useful for literals. */
extra?: boolean;
}
//# sourceMappingURL=types.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,SAAS,CAAC,CAAC,SAAS,QAAQ;IACzC,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,QAAS,SAAQ,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC;IACjD,EAAE,EAAE,MAAM,CAAC;CACd;AAED,MAAM,MAAM,MAAM,GAAG;IACjB,CAAC,EAAE,EAAE,MAAM,GAAG,KAAK,CAAC;CACvB,CAAC;AAEF,MAAM,WAAW,KAAK;IAClB,GAAG,EAAE,MAAM,CAAC;IACZ,UAAU,EAAE,SAAS,EAAE,CAAC;CAC3B;AAED,MAAM,MAAM,QAAQ,GAAG;IACnB,mGAAmG;IACnG,QAAQ,CAAC,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,OAAO,CAAC;IACzC,qGAAqG;IACrG,KAAK,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IACvB,wDAAwD;IACxD,OAAO,EAAE,QAAQ,GAAG,QAAQ,GAAG,SAAS,GAAG,KAAK,GAAG,SAAS,GAAG,OAAO,CAAC;CAC1E,CAAC;AAEF,MAAM,WAAW,SAAS;IACtB,sFAAsF;IACtF,SAAS,EAAE,QAAQ,EAAE,CAAC;IACtB,6BAA6B;IAC7B,GAAG,EAAE,MAAM,CAAC;IACZ,wEAAwE;IACxE,iBAAiB,EAAE,MAAM,CAAC;IAC1B,6DAA6D;IAC7D,cAAc,EAAE,MAAM,CAAC;IACvB,wCAAwC;IACxC,cAAc,EAAE,MAAM,CAAC;IACvB,uEAAuE;IACvE,KAAK,CAAC,EAAE,OAAO,CAAC;CACnB"}

@ -1 +0,0 @@
export {};

@ -1,2 +0,0 @@
export declare function forAllShapes(shapePath: string, callback: (filename: string, shape: string) => Promise<void>): Promise<void>;
//# sourceMappingURL=forAllShapes.d.ts.map

@ -1 +0,0 @@
{"version":3,"file":"forAllShapes.d.ts","sourceRoot":"","sources":["../../src/util/forAllShapes.ts"],"names":[],"mappings":"AAGA,wBAAsB,YAAY,CAC9B,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,GAC7D,OAAO,CAAC,IAAI,CAAC,CAuBf"}

@ -1,17 +0,0 @@
import fs from "fs";
import path from "node:path";
export async function forAllShapes(shapePath, callback) {
const shapeDir = await fs.promises.readdir(shapePath, {
withFileTypes: true,
});
// Filter out non-shex documents
const shexFiles = shapeDir.filter((file) => file.isFile() && file.name.endsWith(".shex"));
const shexPromise = Promise.all(shexFiles.map(async (file) => {
const fileName = path.parse(file.name).name;
// Get the content of each document
const shexC = await fs.promises.readFile(path.join(shapePath, file.name), "utf8");
await callback(fileName, shexC);
}));
// Note: SHACL conversion omitted here.
await Promise.all([shexPromise]);
}

@ -204,8 +204,30 @@ export const ShexJSchemaTransformerCompact = ShexJTraverser.createTransformer<
literals: nodeConstraint.values.map( literals: nodeConstraint.values.map(
// TODO: We do not convert them to number or boolean or lang tag. // TODO: We do not convert them to number or boolean or lang tag.
// And we don't have an annotation of the literal's type. // And we don't have an annotation of the literal's type.
// @ts-expect-error (valueRecord) => {
(valueRecord) => valueRecord.value || valueRecord.id // If valueRecord is a string (IRIREF), return it directly
if (typeof valueRecord === "string") {
return valueRecord;
}
// Handle ObjectLiteral (has .value property)
if ("value" in valueRecord) {
return valueRecord.value;
}
// Handle other types with .id property (if any)
if ("id" in valueRecord) {
return (valueRecord as any).id;
}
// Handle Language type (has .languageTag)
if ("languageTag" in valueRecord) {
return valueRecord.languageTag;
}
// Handle stem-based types (IriStem, LiteralStem, LanguageStem)
if ("stem" in valueRecord) {
return valueRecord.stem as string;
}
// Fallback - should not happen in well-formed ShEx
return undefined;
}
), ),
}; };
} }

@ -127,6 +127,18 @@ describe("applyDiff - multi-valued objects (Set-based)", () => {
expect(remaining["@id"]).toBe("urn:child2"); expect(remaining["@id"]).toBe("urn:child2");
}); });
test.only("remove object from root set", () => {
const obj1 = { "@id": "urn:child1", name: "Alice" };
const obj2 = { "@id": "urn:child2", name: "Bob" };
const state = new Set([
{ "@id": "urn:person1", children: [obj1] },
{ "@id": "urn:person2", children: [obj2] },
]);
const diff: Patch[] = [{ op: "remove", path: p("urn:person1") }];
applyDiff(state, diff);
expect(state.size).toBe(1);
});
test("create nested Set (multi-valued property within object in Set)", () => { test("create nested Set (multi-valued property within object in Set)", () => {
const parent: any = { "@id": "urn:parent1" }; const parent: any = { "@id": "urn:parent1" };
const state: any = { root: { parents: new Set([parent]) } }; const state: any = { root: { parents: new Set([parent]) } };

@ -232,7 +232,7 @@ export function applyDiff(
} }
// Handle remove from Set // Handle remove from Set
if (patch.op === "remove" && !patch.valType) { if (patch.op === "remove" && patch.valType !== "set") {
if (targetObj) { if (targetObj) {
parentVal.delete(targetObj); parentVal.delete(targetObj);
} }

@ -1,5 +1,5 @@
import type { Diff as Patches, Scope } from "../types.ts"; import type { Diff as Patches, Scope } from "../types.ts";
import { applyDiff } from "./applyDiff.ts"; import { applyDiff, applyDiffToDeepSignal, Patch } from "./applyDiff.ts";
import { ngSession } from "./initNg.ts"; import { ngSession } from "./initNg.ts";
@ -69,17 +69,14 @@ export class OrmConnection<T extends BaseType> {
this.resolveReady = resolve; this.resolveReady = resolve;
}); });
ngSession.then(({ ng, session }) => { ngSession.then(async ({ ng, session }) => {
console.log("ng and session", ng, session); console.log("Creating orm connection. ng and session", ng, session);
try { try {
const sc = ("did:ng:" + session.private_store_id).substring( await new Promise((resolve) => setTimeout(resolve, 4_000));
0,
53
);
console.log("calling orm_start with nuri", sc);
ng.orm_start( ng.orm_start(
sc, (scope.length == 0
? "did:ng:" + session.private_store_id
: scope) as string,
shapeType, shapeType,
session.session_id, session.session_id,
this.onBackendMessage this.onBackendMessage
@ -97,10 +94,10 @@ export class OrmConnection<T extends BaseType> {
* @param ng * @param ng
* @returns * @returns
*/ */
public static getConnection<T extends BaseType>( public static getConnection = <T extends BaseType>(
shapeType: ShapeType<T>, shapeType: ShapeType<T>,
scope: Scope scope: Scope
): OrmConnection<T> { ): OrmConnection<T> => {
const scopeKey = canonicalScope(scope); const scopeKey = canonicalScope(scope);
// Unique identifier for a given shape type and scope. // Unique identifier for a given shape type and scope.
@ -118,50 +115,64 @@ export class OrmConnection<T extends BaseType> {
OrmConnection.idToEntry.set(identifier, newConnection); OrmConnection.idToEntry.set(identifier, newConnection);
return newConnection; return newConnection;
} }
} };
public release() { public release = () => {
if (this.refCount > 0) this.refCount--; if (this.refCount > 0) this.refCount--;
if (this.refCount === 0) { if (this.refCount === 0) {
OrmConnection.idToEntry.delete(this.identifier); OrmConnection.idToEntry.delete(this.identifier);
OrmConnection.cleanupSignalRegistry?.unregister(this.signalObject); OrmConnection.cleanupSignalRegistry?.unregister(this.signalObject);
} }
} };
private onSignalObjectUpdate({ patches }: WatchPatchEvent<Set<T>>) { private onSignalObjectUpdate = ({ patches }: WatchPatchEvent<Set<T>>) => {
if (this.suspendDeepWatcher || !this.ready || !patches.length) return; if (this.suspendDeepWatcher || !this.ready || !patches.length) return;
console.debug("[onSignalObjectUpdate] got changes:", patches);
const ormPatches = deepPatchesToDiff(patches); const ormPatches = deepPatchesToDiff(patches);
ngSession.then(({ ng, session }) => { ngSession.then(({ ng, session }) => {
ng.orm_update( ng.orm_update(
("did:ng:" + session.private_store_id).substring(0, 53), (this.scope.length == 0
? "did:ng:" + session.private_store_id
: this.scope) as string,
this.shapeType.shape, this.shapeType.shape,
ormPatches, ormPatches,
session.session_id session.session_id
); );
}); });
} };
private onBackendMessage(...message: any) { private onBackendMessage = ({ V0: data }: any) => {
this.handleInitialResponse(message); if (data.OrmInitial) {
this.handleInitialResponse(data.OrmInitial);
} else if (data.OrmUpdate) {
this.onBackendUpdate(data.OrmUpdate);
} else {
console.warn("Received unknown ORM message from backend", data);
} }
};
private handleInitialResponse(...param: any) { private handleInitialResponse = (initialData: any) => {
console.log("RESPONSE FROM BACKEND", param); console.debug(
"[handleInitialResponse] handleInitialResponse called with",
// TODO: This will break, just provisionary. initialData
const wasmMessage: any = param; );
const { initialData } = wasmMessage;
// Assign initial data to empty signal object without triggering watcher at first. // Assign initial data to empty signal object without triggering watcher at first.
this.suspendDeepWatcher = true; this.suspendDeepWatcher = true;
batch(() => { batch(() => {
// Do this in case the there was any (incorrect) data added before initialization.
this.signalObject.clear();
// Convert arrays to sets and apply to signalObject (we only have sets but can only transport arrays). // Convert arrays to sets and apply to signalObject (we only have sets but can only transport arrays).
for (const newItem of recurseArrayToSet(initialData)) { for (const newItem of parseOrmInitialObject(initialData)) {
this.signalObject.add(newItem); this.signalObject.add(newItem);
} }
console.log(
"[handleInitialResponse] signal object:",
this.signalObject
);
}); });
queueMicrotask(() => { queueMicrotask(() => {
@ -171,17 +182,34 @@ export class OrmConnection<T extends BaseType> {
}); });
this.ready = true; this.ready = true;
} };
private onBackendUpdate(...params: any) { private onBackendUpdate = (patches: Patch[]) => {
// Apply diff console.log(
} "connectionHandler: onBackendUpdate. Got patches:",
patches
);
this.suspendDeepWatcher = true;
applyDiffToDeepSignal(this.signalObject, patches);
// Use queueMicrotask to ensure watcher is re-enabled _after_ batch completes
queueMicrotask(() => {
this.suspendDeepWatcher = false;
});
};
/** Function to create random subject IRIs for newly created nested objects. */ /** Function to create random subject IRIs for newly created nested objects. */
private generateSubjectIri(path: (string | number)[]): string { private generateSubjectIri = (path: (string | number)[]): string => {
// Generate random string. console.debug("Generating new random id for path", path);
let b = Buffer.alloc(33); // Generate 33 random bytes using Web Crypto API
const b = new Uint8Array(33);
crypto.getRandomValues(b); crypto.getRandomValues(b);
const randomString = b.toString("base64url"); // Convert to base64url
const base64url = (bytes: Uint8Array) =>
btoa(String.fromCharCode(...bytes))
.replace(/\+/g, "-")
.replace(/\//g, "_")
.replace(/=+$/, "");
const randomString = base64url(b);
if (path.length > 0 && path[0].toString().startsWith("did:ng:o:")) { if (path.length > 0 && path[0].toString().startsWith("did:ng:o:")) {
// If the root is a nuri, use that as a base IRI. // If the root is a nuri, use that as a base IRI.
@ -192,7 +220,7 @@ export class OrmConnection<T extends BaseType> {
// Else, just generate a random IRI. // Else, just generate a random IRI.
return "did:ng:q:" + randomString; return "did:ng:q:" + randomString;
} }
} };
} }
// //
@ -211,12 +239,19 @@ export function deepPatchesToDiff(patches: DeepPatch[]): Patches {
}) as Patches; }) as Patches;
} }
const recurseArrayToSet = (obj: any): any => { const parseOrmInitialObject = (obj: any): any => {
// Regular arrays become sets.
if (Array.isArray(obj)) { if (Array.isArray(obj)) {
return new Set(obj.map(recurseArrayToSet)); return new Set(obj.map(parseOrmInitialObject));
} else if (obj && typeof obj === "object") { } else if (obj && typeof obj === "object") {
if ("@id" in obj) {
// Regular object.
for (const key of Object.keys(obj)) { for (const key of Object.keys(obj)) {
obj[key] = recurseArrayToSet(obj[key]); obj[key] = parseOrmInitialObject(obj[key]);
}
} else {
// Object does not have @id, that means it's a set of objects.
return new Set(Object.values(obj));
} }
return obj; return obj;
} else { } else {

@ -9,14 +9,16 @@ const useShape = <T extends BaseType>(
shape: ShapeType<T>, shape: ShapeType<T>,
scope: Scope = "" scope: Scope = ""
) => { ) => {
const shapeSignalRef = useRef< const shapeSignalRef = useRef<ReturnType<
ReturnType<typeof createSignalObjectForShape<T>> typeof createSignalObjectForShape<T>
>(createSignalObjectForShape(shape, scope)); > | null>(null);
const [, setTick] = useState(0); const [, setTick] = useState(0);
useEffect(() => { useEffect(() => {
shapeSignalRef.current = createSignalObjectForShape(shape, scope);
const handle = shapeSignalRef.current; const handle = shapeSignalRef.current;
const deepSignalObj = handle.signalObject; const deepSignalObj = handle.signalObject;
const { stopListening } = watch(deepSignalObj, () => { const { stopListening } = watch(deepSignalObj, () => {
// trigger a React re-render when the deep signal updates // trigger a React re-render when the deep signal updates
setTick((t) => t + 1); setTick((t) => t + 1);
@ -31,9 +33,7 @@ const useShape = <T extends BaseType>(
}; };
}, []); }, []);
if ("@id" in shapeSignalRef.current.signalObject) return shapeSignalRef.current?.signalObject;
return shapeSignalRef.current.signalObject;
else return null;
}; };
export default useShape; export default useShape;

@ -2,97 +2,43 @@ import { ref, onBeforeUnmount } from "vue";
import { watch } from "@ng-org/alien-deepsignals"; import { watch } from "@ng-org/alien-deepsignals";
/** /**
* Bridge a deepSignal root into Vue with per top-level property granularity. * Bridge a deepSignal root into Vue with reactivity.
* Uses a single version counter that increments on any deep mutation,
* causing Vue to re-render when the deepSignal changes.
*
* TODO: Check performance and potentially improve.
*/ */
export function useDeepSignal<T extends Record<string | number | symbol, any>>( export function useDeepSignal<T>(deepProxy: T): T {
deepProxy: T const version = ref(0);
): T {
// Version per top-level key
const versionRefs = new Map<PropertyKey, ReturnType<typeof ref<number>>>();
// Version for the set of top-level keys (enumeration/in-operator)
const keysVersion = ref(0);
const ensureVersion = (key: PropertyKey) => {
if (!versionRefs.has(key)) versionRefs.set(key, ref(0));
return versionRefs.get(key)!;
};
const bump = (key: PropertyKey) => {
const vr = ensureVersion(key);
vr.value = (vr.value || 0) + 1;
};
const bumpAllTopKeys = () => {
for (const k of Reflect.ownKeys(deepProxy as object)) bump(k);
};
// Seed existing string keys (symbols will be created on demand)
for (const k of Object.keys(deepProxy as object)) ensureVersion(k);
// Normalize first path element to a JS property key compatible with Proxy traps
const normalizeTopKey = (k: unknown): PropertyKey =>
typeof k === "number" ? String(k) : (k as PropertyKey);
// Subscribe to deep patches (coalesced per batch to avoid redundant triggers)
const stopHandle = watch(deepProxy, ({ patches }) => { const stopHandle = watch(deepProxy, ({ patches }) => {
let sawRoot = false; if (patches.length > 0) {
let keysChanged = false; version.value++;
const touched = new Set<PropertyKey>();
for (const p of patches) {
if (!p || !Array.isArray(p.path)) continue;
if (p.path.length === 0) {
sawRoot = true;
break; // full invalidation; no need to examine the rest
}
touched.add(normalizeTopKey(p.path[0]));
const op = p.op as string | undefined;
if (p.path.length === 1 && (op === "add" || op === "remove")) {
keysChanged = true;
}
}
if (sawRoot) {
keysVersion.value++;
bumpAllTopKeys();
return;
} }
if (keysChanged) keysVersion.value++;
for (const k of touched) bump(k);
}); });
const proxy = new Proxy({} as T, { // Proxy that creates Vue dependency on version for any access
get(_t, key: PropertyKey) { const proxy = new Proxy(deepProxy as any, {
if (key === "__raw") return deepProxy; get(target, key: PropertyKey) {
// Track per-key dependence if (key === "__raw") return target;
ensureVersion(key).value; // Track version to create reactive dependency
return deepProxy[key]; version.value;
}, const value = target[key];
set(_t, key: PropertyKey, value: any) { // Bind methods to maintain correct `this` context
deepProxy[key] = value; return typeof value === "function" ? value.bind(target) : value;
return true;
},
deleteProperty(_t, key: PropertyKey) {
return delete deepProxy[key];
}, },
has(_t, key: PropertyKey) { has(target, key: PropertyKey) {
// Make `'foo' in proxy` reactive to key set changes version.value;
keysVersion.value; return key in target;
return key in deepProxy;
}, },
ownKeys() { ownKeys(target) {
// Make Object.keys/for...in/v-for over keys reactive version.value;
keysVersion.value; return Reflect.ownKeys(target);
return Reflect.ownKeys(deepProxy as object);
}, },
getOwnPropertyDescriptor(_t, key: PropertyKey) { getOwnPropertyDescriptor(target, key: PropertyKey) {
// Keep enumeration reactive; report a configurable, enumerable prop version.value;
keysVersion.value; const desc = Object.getOwnPropertyDescriptor(target, key);
return { configurable: true, enumerable: true }; return desc ? { ...desc, configurable: true } : undefined;
}, },
}); });
@ -102,10 +48,9 @@ export function useDeepSignal<T extends Record<string | number | symbol, any>>(
} catch { } catch {
// ignore // ignore
} }
versionRefs.clear();
}); });
return proxy; return proxy as T;
} }
export default useDeepSignal; export default useDeepSignal;

@ -8,8 +8,8 @@
// according to those terms. // according to those terms.
use crate::local_broker::{doc_sparql_construct, orm_start, orm_update}; use crate::local_broker::{doc_sparql_construct, orm_start, orm_update};
use crate::tests::create_doc_with_data;
use crate::tests::create_or_open_wallet::create_or_open_wallet; use crate::tests::create_or_open_wallet::create_or_open_wallet;
use crate::tests::{assert_json_eq, create_doc_with_data};
use async_std::stream::StreamExt; use async_std::stream::StreamExt;
use ng_net::app_protocol::{AppResponse, AppResponseV0, NuriV0}; use ng_net::app_protocol::{AppResponse, AppResponseV0, NuriV0};
use ng_net::orm::{ use ng_net::orm::{
@ -55,6 +55,9 @@ async fn test_orm_apply_patches() {
// Test 9: Nested object creation // Test 9: Nested object creation
test_patch_create_nested_object(session_id).await; test_patch_create_nested_object(session_id).await;
// Test 10: Object deleted after invalidating patch.
test_patch_invalidating_object(session_id).await
} }
/// Test adding a single literal value via ORM patch /// Test adding a single literal value via ORM patch
@ -130,7 +133,7 @@ INSERT DATA {
// Apply ORM patch: Add name // Apply ORM patch: Add name
let diff = vec![OrmPatch { let diff = vec![OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
path: "urn:test:person1/name".to_string(), path: "/urn:test:person1/name".to_string(),
valType: None, valType: None,
value: Some(json!("Alice")), value: Some(json!("Alice")),
}]; }];
@ -229,7 +232,7 @@ INSERT DATA {
// Apply ORM patch: Remove name // Apply ORM patch: Remove name
let diff = vec![OrmPatch { let diff = vec![OrmPatch {
op: OrmPatchOp::remove, op: OrmPatchOp::remove,
path: "urn:test:person2/name".to_string(), path: "/urn:test:person2/name".to_string(),
valType: None, valType: None,
value: Some(json!("Bob")), value: Some(json!("Bob")),
}]; }];
@ -329,13 +332,13 @@ INSERT DATA {
let diff = vec![ let diff = vec![
// OrmDiffOp { // OrmDiffOp {
// op: OrmDiffOpType::remove, // op: OrmDiffOpType::remove,
// path: "urn:test:person3/name".to_string(), // path: "/urn:test:person3/name".to_string(),
// valType: None, // valType: None,
// value: Some(json!("Charlie")), // value: Some(json!("Charlie")),
// }, // },
OrmPatch { OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
path: "urn:test:person3/name".to_string(), path: "/urn:test:person3/name".to_string(),
valType: None, valType: None,
value: Some(json!("Charles")), value: Some(json!("Charles")),
}, },
@ -443,7 +446,7 @@ INSERT DATA {
let diff = vec![OrmPatch { let diff = vec![OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
valType: Some(OrmPatchType::set), valType: Some(OrmPatchType::set),
path: "urn:test:person4/hobby".to_string(), path: "/urn:test:person4/hobby".to_string(),
value: Some(json!("Swimming")), value: Some(json!("Swimming")),
}]; }];
@ -543,7 +546,7 @@ INSERT DATA {
// Apply ORM patch: Remove hobby // Apply ORM patch: Remove hobby
let diff = vec![OrmPatch { let diff = vec![OrmPatch {
op: OrmPatchOp::remove, op: OrmPatchOp::remove,
path: "urn:test:person5/hobby".to_string(), path: "/urn:test:person5/hobby".to_string(),
valType: None, valType: None,
value: Some(json!("Swimming")), value: Some(json!("Swimming")),
}]; }];
@ -712,7 +715,7 @@ INSERT DATA {
// Apply ORM patch: Change city in nested address // Apply ORM patch: Change city in nested address
let diff = vec![OrmPatch { let diff = vec![OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
path: "urn:test:person6/address/city".to_string(), path: "/urn:test:person6/address/city".to_string(),
valType: None, valType: None,
value: Some(json!("Shelbyville")), value: Some(json!("Shelbyville")),
}]; }];
@ -931,7 +934,7 @@ INSERT DATA {
// Apply ORM patch: Change street in company's headquarter address (3 levels deep) // Apply ORM patch: Change street in company's headquarter address (3 levels deep)
let diff = vec![OrmPatch { let diff = vec![OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
path: "urn:test:person7/company/urn:test:company1/headquarter/street".to_string(), path: "/urn:test:person7/company/urn:test:company1/headquarter/street".to_string(),
valType: None, valType: None,
value: Some(json!("Rich Street")), value: Some(json!("Rich Street")),
}]; }];
@ -1038,7 +1041,7 @@ INSERT DATA {
let diff = vec![ let diff = vec![
OrmPatch { OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
path: "urn:test:person8".to_string(), path: "/urn:test:person8".to_string(),
valType: Some(OrmPatchType::object), valType: Some(OrmPatchType::object),
value: None, value: None,
}, },
@ -1046,19 +1049,19 @@ INSERT DATA {
// This does nothing as it does not represent a triple. // This does nothing as it does not represent a triple.
// A subject is created when inserting data. // A subject is created when inserting data.
op: OrmPatchOp::add, op: OrmPatchOp::add,
path: "urn:test:person8/@id".to_string(), path: "/urn:test:person8/@id".to_string(),
valType: Some(OrmPatchType::object), valType: Some(OrmPatchType::object),
value: None, value: None,
}, },
OrmPatch { OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
path: "urn:test:person8/type".to_string(), path: "/urn:test:person8/type".to_string(),
valType: None, valType: None,
value: Some(json!("http://example.org/Person")), value: Some(json!("http://example.org/Person")),
}, },
OrmPatch { OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
path: "urn:test:person8/name".to_string(), path: "/urn:test:person8/name".to_string(),
valType: None, valType: None,
value: Some(json!("Alice")), value: Some(json!("Alice")),
}, },
@ -1218,13 +1221,13 @@ INSERT DATA {
let diff = vec![ let diff = vec![
OrmPatch { OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
path: "urn:test:person9/address/http:~1~1example.org~1exampleAddress/type".to_string(), path: "/urn:test:person9/address/http:~1~1example.org~1exampleAddress/type".to_string(),
valType: None, valType: None,
value: Some(json!("http://example.org/Address")), value: Some(json!("http://example.org/Address")),
}, },
OrmPatch { OrmPatch {
op: OrmPatchOp::add, op: OrmPatchOp::add,
path: "urn:test:person9/address/http:~1~1example.org~1exampleAddress/street" path: "/urn:test:person9/address/http:~1~1example.org~1exampleAddress/street"
.to_string(), .to_string(),
valType: None, valType: None,
value: Some(json!("Heaven Avenue")), value: Some(json!("Heaven Avenue")),
@ -1264,3 +1267,117 @@ INSERT DATA {
log_info!("✓ Test passed: Nested object creation"); log_info!("✓ Test passed: Nested object creation");
} }
/// Test replacing object's type invalidating it.
async fn test_patch_invalidating_object(session_id: u64) {
log_info!("\n\n=== TEST: Remove Single Literal ===\n");
let doc_nuri = create_doc_with_data(
session_id,
r#"
PREFIX ex: <http://example.org/>
INSERT DATA {
<urn:test:person2> a ex:Person ;
ex:name "Bob" .
}
"#
.to_string(),
)
.await;
let mut schema = HashMap::new();
schema.insert(
"http://example.org/Person".to_string(),
Arc::new(OrmSchemaShape {
iri: "http://example.org/Person".to_string(),
predicates: vec![
Arc::new(OrmSchemaPredicate {
iri: "http://www.w3.org/1999/02/22-rdf-syntax-ns#type".to_string(),
extra: Some(false),
maxCardinality: 1,
minCardinality: 1,
readablePredicate: "type".to_string(),
dataTypes: vec![OrmSchemaDataType {
valType: OrmSchemaValType::literal,
literals: Some(vec![BasicType::Str(
"http://example.org/Person".to_string(),
)]),
shape: None,
}],
}),
Arc::new(OrmSchemaPredicate {
iri: "http://example.org/name".to_string(),
extra: Some(false),
readablePredicate: "name".to_string(),
minCardinality: 0,
maxCardinality: 1,
dataTypes: vec![OrmSchemaDataType {
valType: OrmSchemaValType::string,
literals: None,
shape: None,
}],
}),
],
}),
);
let shape_type = OrmShapeType {
shape: "http://example.org/Person".to_string(),
schema,
};
let nuri = NuriV0::new_from(&doc_nuri).expect("parse nuri");
let (mut receiver, _cancel_fn) = orm_start(nuri.clone(), shape_type.clone(), session_id)
.await
.expect("orm_start failed");
// Get initial state (person without name)
while let Some(app_response) = receiver.next().await {
if let AppResponse::V0(AppResponseV0::OrmInitial(initial)) = app_response {
break;
}
}
// Apply ORM patch: Change type to something invalid by schema.
let patch = vec![OrmPatch {
op: OrmPatchOp::add,
path: "/urn:test:person2/type".to_string(),
valType: None,
value: Some(json!("InvalidType")),
}];
orm_update(nuri.clone(), shape_type.shape.clone(), patch, session_id)
.await
.expect("orm_update failed");
// Expect delete patch for root object
while let Some(app_response) = receiver.next().await {
let patches = match app_response {
AppResponse::V0(v) => match v {
AppResponseV0::OrmUpdate(json) => Some(json),
_ => None,
},
}
.unwrap();
log_info!("Patches arrived:\n");
for patch in patches.iter() {
log_info!("{:?}", patch);
}
let mut expected = json!([
{
"op": "remove",
"valType": "object",
"path": "/urn:test:person2",
},
]);
let mut actual = json!(patches);
assert_json_eq(&mut expected, &mut actual);
break;
}
log_info!("✓ Test passed: Received object remove patch after patch makes object invalid.");
}

@ -27,35 +27,13 @@ async fn test_orm_patch_creation() {
// Setup wallet and document // Setup wallet and document
let (_wallet, session_id) = create_or_open_wallet().await; let (_wallet, session_id) = create_or_open_wallet().await;
// Tests below all in this test, to prevent waiting times through wallet creation.
// // ===
// test_patch_add_array(session_id).await;
// test_patch_remove_array(session_id).await;
// // ===
// test_patch_add_nested_1(session_id).await;
// ===
test_patch_nested_house_inhabitants(session_id).await; test_patch_nested_house_inhabitants(session_id).await;
// // === test_patch_add_array(session_id).await;
// test_orm_literal(session_id).await;
// // ===
// test_orm_multi_type(session_id).await;
// // ===
// test_orm_nested_1(session_id).await;
// // // ===
// // test_orm_nested_2(session_id).await;
// // // === test_patch_remove_array(session_id).await;
// // test_orm_nested_3(session_id).await;
// // === // test_patch_add_nested_1(session_id).await; // TODO: Edge case not yet fully implemented
// test_orm_nested_4(session_id).await;
} }
async fn test_patch_add_array(session_id: u64) { async fn test_patch_add_array(session_id: u64) {
@ -204,19 +182,11 @@ INSERT DATA {
"op": "add", "op": "add",
"valType": "object", "valType": "object",
"path": "/urn:test:numArrayObj4", "path": "/urn:test:numArrayObj4",
"value": Value::Null
}, },
{ {
"op": "add", "op": "add",
"value": "urn:test:numArrayObj4", "value": "urn:test:numArrayObj4",
"path": "/urn:test:numArrayObj4/@id", "path": "/urn:test:numArrayObj4/@id",
"valType": Value::Null,
},
{
"op": "add",
"value": "http://example.org/TestObject",
"path": "/urn:test:numArrayObj4/type",
"valType": Value::Null,
}, },
{ {
"op": "add", "op": "add",
@ -224,6 +194,11 @@ INSERT DATA {
"value": [0.0], "value": [0.0],
"path": "/urn:test:numArrayObj4/numArray", "path": "/urn:test:numArrayObj4/numArray",
}, },
{
"op": "add",
"value": "http://example.org/TestObject",
"path": "/urn:test:numArrayObj4/type",
},
]); ]);
let mut actual = json!(patches); let mut actual = json!(patches);
@ -556,42 +531,29 @@ INSERT DATA {
{ {
"op": "remove", "op": "remove",
"path": "/urn:test:oj1/multiNest/urn:test:multiNested2/string2", "path": "/urn:test:oj1/multiNest/urn:test:multiNested2/string2",
// "valType": None,
// "value": None,
}, },
{ {
"op": "add", "op": "add",
// "valType": None,
"value": "replacing object shape view", "value": "replacing object shape view",
"path": "/urn:test:oj1/multiNest/urn:test:multiNested2/string1", "path": "/urn:test:oj1/multiNest/urn:test:multiNested2/string1",
}, },
{ {
"op": "add", "op": "add",
"valType": "object", "valType": "object",
// "value": None,
"path": "/urn:test:oj1/multiNest/urn:test:multiNested4", "path": "/urn:test:oj1/multiNest/urn:test:multiNested4",
}, },
{ {
"op": "add", "op": "add",
// "valType": None,
"value": "urn:test:multiNested4", "value": "urn:test:multiNested4",
"path": "/urn:test:oj1/multiNest/urn:test:multiNested4/@id", "path": "/urn:test:oj1/multiNest/urn:test:multiNested4/@id",
}, },
{ {
"op": "add", "op": "add",
// "valType": None,
"value": "multi 4 added", "value": "multi 4 added",
"path": "/urn:test:oj1/multiNest/urn:test:multiNested4/string2", "path": "/urn:test:oj1/multiNest/urn:test:multiNested4/string2",
}, },
{
"op": "remove",
// "valType": None,
// "value": None,
"path": "/urn:test:oj1/singleNest/str",
},
{ {
"op": "add", "op": "add",
// "valType": None,
"value": "Different nested val", "value": "Different nested val",
"path": "/urn:test:oj1/singleNest/str", "path": "/urn:test:oj1/singleNest/str",
}, },
@ -1013,10 +975,6 @@ INSERT DATA {
"path": "/urn:test:house1/inhabitants/urn:test:person1/cat", "path": "/urn:test:house1/inhabitants/urn:test:person1/cat",
}, },
// Bob's cat name changes // Bob's cat name changes
{
"op": "remove",
"path": "/urn:test:house1/inhabitants/urn:test:person2/cat/name",
},
{ {
"op": "add", "op": "add",
"value": "Mr. Mittens", "value": "Mr. Mittens",

Loading…
Cancel
Save