diff --git a/app/nextgraph/src/native-api.ts b/app/nextgraph/src/native-api.ts index 72ef9d20..23968eea 100644 --- a/app/nextgraph/src/native-api.ts +++ b/app/nextgraph/src/native-api.ts @@ -18,6 +18,7 @@ const mapping = { "wallet_gen_shuffle_for_pazzle_opening": ["pazzle_length"], "wallet_gen_shuffle_for_pin": [], "wallet_open_with_pazzle": ["wallet","pazzle","pin"], + "wallet_open_with_password": ["wallet","password"], "wallet_open_with_mnemonic_words": ["wallet","mnemonic_words","pin"], "wallet_open_with_mnemonic": ["wallet","mnemonic","pin"], "wallet_was_opened": ["opened_wallet"], diff --git a/engine/net/src/orm.rs b/engine/net/src/orm.rs index d36b0f60..7e2c166e 100644 --- a/engine/net/src/orm.rs +++ b/engine/net/src/orm.rs @@ -41,8 +41,10 @@ pub enum OrmPatchType { #[derive(Clone, Debug, Serialize, Deserialize)] pub struct OrmPatch { pub op: OrmPatchOp, + #[serde(skip_serializing_if = "Option::is_none")] pub valType: Option, pub path: String, + #[serde(skip_serializing_if = "Option::is_none")] pub value: Option, // TODO: Improve type } diff --git a/engine/verifier/src/orm/add_remove_triples.rs b/engine/verifier/src/orm/add_remove_triples.rs index 4823816b..378bdaff 100644 --- a/engine/verifier/src/orm/add_remove_triples.rs +++ b/engine/verifier/src/orm/add_remove_triples.rs @@ -44,7 +44,6 @@ pub fn add_remove_triples( tracked_predicates: HashMap::new(), parents: HashMap::new(), valid: OrmTrackedSubjectValidity::Pending, - prev_valid: OrmTrackedSubjectValidity::Pending, subject_iri: subject_iri.to_string(), shape: shape.clone(), })) @@ -208,47 +207,8 @@ pub fn add_remove_triples( } else { panic!("tracked_predicate.current_literals must not be None."); } - } else if tracked_predicate - .schema - .dataTypes - .iter() - .any(|dt| dt.valType == OrmSchemaValType::shape) - { - // Remove parent from child and child from tracked children. - // If predicate is of type shape, register (parent -> child) links so that - // nested subjects can later be (lazily) fetched / validated. - let shapes_to_process: Vec<_> = tracked_predicate - .schema - .dataTypes - .iter() - .filter_map(|dt| { - if dt.valType == OrmSchemaValType::shape { - dt.shape.clone() - } else { - None - } - }) - .collect(); - - if let BasicType::Str(obj_iri) = &val_removed { - // Remove link to children - tracked_predicate - .tracked_children - .retain(|ts| *obj_iri != ts.read().unwrap().subject_iri); - - for shape_iri in shapes_to_process { - // Get or create object's tracked subject struct. - let child_shape = schema.get(&shape_iri).unwrap(); - - // Remove self from parent - get_or_create_tracked_subject(&obj_iri, child_shape, tracked_subjects) - .write() - .unwrap() - .parents - .remove(subject_iri); - } - } } + // Parent-child link removal is handled during cleanup since we need to keep them for creating patches. } Ok(()) } diff --git a/engine/verifier/src/orm/handle_backend_update.rs b/engine/verifier/src/orm/handle_backend_update.rs index 41760e26..89a28adf 100644 --- a/engine/verifier/src/orm/handle_backend_update.rs +++ b/engine/verifier/src/orm/handle_backend_update.rs @@ -68,10 +68,29 @@ impl Verifier { }) .collect(); + log_info!( + "[orm_backend_update] called with #adds, #removes: {}, {}", + triple_inserts.len(), + triple_removes.len() + ); + + log_info!( + "[orm_backend_update] Total subscriptions scopes: {}", + self.orm_subscriptions.len() + ); + let mut scopes = vec![]; for (scope, subs) in self.orm_subscriptions.iter_mut() { // Remove old subscriptions + let initial_sub_count = subs.len(); subs.retain(|sub| !sub.sender.is_closed()); + let retained_sub_count = subs.len(); + log_info!( + "[orm_backend_update] Scope {:?}: {} subs ({} retained after cleanup)", + scope, + initial_sub_count, + retained_sub_count + ); if !(scope.target == NuriTargetV0::UserSite || scope @@ -80,11 +99,22 @@ impl Verifier { .map_or(false, |ol| overlaylink == *ol) || scope.target == NuriTargetV0::Repo(repo_id)) { + log_info!( + "[orm_backend_update] SKIPPING scope {:?} - does not match repo_id={:?} or overlay={:?}", + scope, + repo_id, + overlay_id + ); continue; } + log_info!( + "[orm_backend_update] PROCESSING scope {:?} - matches criteria", + scope + ); + // prepare to apply updates to tracked subjects and record the changes. - let root_shapes_and_tracked_subjects = subs + let root_shapes_and_tracked_shapes = subs .iter() .map(|sub| { ( @@ -98,33 +128,76 @@ impl Verifier { }) .collect::>(); - scopes.push((scope.clone(), root_shapes_and_tracked_subjects)); + scopes.push((scope.clone(), root_shapes_and_tracked_shapes)); } log_debug!( "[orm_backend_update], creating patch objects for #scopes {}", scopes.len() ); + + if scopes.is_empty() { + log_info!("[orm_backend_update] NO SCOPES MATCHED - returning early without patches"); + return; + } + for (scope, shapes_zip) in scopes { let mut orm_changes: OrmChanges = HashMap::new(); + log_info!( + "[orm_backend_update] Processing scope {:?} with {} shape types", + scope, + shapes_zip.len() + ); + // Apply the changes to tracked subjects. - for (root_shape_arc, all_shapes) in shapes_zip { + for (root_shape_arc, all_tracked_shapes) in shapes_zip { let shape_iri = root_shape_arc.iri.clone(); + log_info!( + "[orm_backend_update] Calling process_changes_for_shape_and_session for shape={}, session={}", + shape_iri, + session_id + ); let _ = self.process_changes_for_shape_and_session( &scope, &shape_iri, - all_shapes, + if all_tracked_shapes.len() > 0 { + all_tracked_shapes + } else { + // If all tracked subjects are empty, wee need to add the root shape manually. + vec![root_shape_arc] + }, session_id, &triple_inserts, &triple_removes, &mut orm_changes, false, ); + log_info!( + "[orm_backend_update] After process_changes_for_shape_and_session: orm_changes has {} shapes", + orm_changes.len() + ); + } + + log_info!( + "[orm_backend_update] Total orm_changes for scope: {} shapes with changes", + orm_changes.len() + ); + for (shape_iri, subject_changes) in &orm_changes { + log_info!( + "[orm_backend_update] Shape {}: {} subjects changed", + shape_iri, + subject_changes.len() + ); } - let subs = self.orm_subscriptions.get(&scope).unwrap(); - for sub in subs.iter() { + let subs = self.orm_subscriptions.get_mut(&scope).unwrap(); + log_info!( + "[orm_backend_update] Processing {} subscriptions for this scope", + subs.len() + ); + + for sub in subs.iter_mut() { log_debug!( "Applying changes to subscription with nuri {} and shape {}", sub.nuri.repo(), @@ -158,52 +231,109 @@ impl Verifier { // Process changes for this subscription // Iterate over all changes and create patches + log_info!( + "[orm_backend_update] Iterating over {} shapes in orm_changes", + orm_changes.len() + ); + for (shape_iri, subject_changes) in &orm_changes { + log_info!( + "[orm_backend_update] Processing shape {}: {} subject changes", + shape_iri, + subject_changes.len() + ); + for (subject_iri, change) in subject_changes { log_debug!( - "Patch creating for subject change {}. #changed preds: {}", + "Patch creating for subject change x shape {} x {}. #changed preds: {}", subject_iri, + shape_iri, change.predicates.len() ); // Get the tracked subject for this (subject, shape) pair - let tracked_subject = sub + let Some(tracked_subject) = sub .tracked_subjects .get(subject_iri) .and_then(|shapes| shapes.get(shape_iri)) .map(|ts| ts.read().unwrap()) - .unwrap(); + else { + // We might not be tracking this subject x shape combination. Then, there is nothing to do. + log_info!( + "[orm_backend_update] SKIPPING subject {} x shape {} - not tracked in this subscription", + subject_iri, + shape_iri + ); + continue; + }; + + log_debug!( + " - Validity check: prev_valid={:?}, valid={:?}", + change.prev_valid, + tracked_subject.valid + ); // Now we have the tracked predicate (containing the shape) and the change. // Check validity changes - if tracked_subject.prev_valid == OrmTrackedSubjectValidity::Invalid + if change.prev_valid == OrmTrackedSubjectValidity::Invalid && tracked_subject.valid == OrmTrackedSubjectValidity::Invalid { // Is the subject invalid and was it before? There is nothing we need to inform about. + log_info!( + "[orm_backend_update] SKIPPING subject {} - was and still is Invalid", + subject_iri + ); continue; - } else if tracked_subject.prev_valid == OrmTrackedSubjectValidity::Valid - && tracked_subject.valid == OrmTrackedSubjectValidity::Invalid - || tracked_subject.valid == OrmTrackedSubjectValidity::Untracked + } else if change.prev_valid == OrmTrackedSubjectValidity::Valid + && tracked_subject.valid != OrmTrackedSubjectValidity::Valid { - // Has the subject become invalid or untracked? - // We add a patch, deleting the object at its root. - let mut path: Vec; - if tracked_subject.parents.is_empty() { - // If this is a root object, we need to add the object's id itself. - path = vec![tracked_subject.subject_iri.clone()]; - } else { - path = vec![]; - } + log_info!( + "[orm_backend_update] Subject {} became invalid or untracked (prev={:?}, now={:?})", + subject_iri, + change.prev_valid, + tracked_subject.valid + ); - build_path_to_root_and_create_patches( - &tracked_subject, - &sub.tracked_subjects, - &sub.shape_type.shape, - &mut path, - (OrmPatchOp::remove, Some(OrmPatchType::object), None, None), - &mut patches, - &mut objects_to_create, + // Has the subject become invalid or untracked? + // Check if any parent is also being deleted - if so, skip this deletion patch + // because the parent deletion will implicitly delete the children + let has_parent_being_deleted = + tracked_subject.parents.values().any(|parent_arc| { + let parent_ts = parent_arc.read().unwrap(); + parent_ts.valid == OrmTrackedSubjectValidity::ToDelete + }); + + log_info!( + "[orm_backend_update] has_parent_being_deleted={}", + has_parent_being_deleted ); + + if !has_parent_being_deleted { + // We add a patch, deleting the object at its root. + // Start with an empty path - the subject IRI will be added in build_path_to_root_and_create_patches + let mut path = vec![]; + + build_path_to_root_and_create_patches( + &tracked_subject, + &sub.tracked_subjects, + &sub.shape_type.shape, + &mut path, + (OrmPatchOp::remove, Some(OrmPatchType::object), None, None), + &mut patches, + &mut objects_to_create, + &change.prev_valid, + &orm_changes, + &tracked_subject.subject_iri, + ); + } } else { + log_info!( + "[orm_backend_update] Subject {} is valid or became valid (prev={:?}, now={:?}), processing {} predicate changes", + subject_iri, + change.prev_valid, + tracked_subject.valid, + change.predicates.len() + ); + // The subject is valid or has become valid. // Process each predicate change for (_pred_iri, pred_change) in &change.predicates { @@ -221,6 +351,12 @@ impl Verifier { // Get the diff operations for this predicate change let diff_ops = create_diff_ops_from_predicate_change(pred_change); + log_info!( + "[orm_backend_update] Created {} diff_ops for predicate {}", + diff_ops.len(), + _pred_iri + ); + // For each diff operation, traverse up to the root to build the path for diff_op in diff_ops { let mut path = vec![pred_name.clone()]; @@ -234,6 +370,9 @@ impl Verifier { diff_op, &mut patches, &mut objects_to_create, + &change.prev_valid, + &orm_changes, + &tracked_subject.subject_iri, ); } } @@ -241,12 +380,18 @@ impl Verifier { } } + log_info!( + "[orm_backend_update] Finished iterating shapes. Created {} patches, {} objects_to_create", + patches.len(), + objects_to_create.len() + ); + // Create patches for objects that need to be created // These are patches with {op: add, valType: object, value: Null, path: ...} // Sort by path length (shorter first) to ensure parent objects are created before children let mut sorted_objects: Vec<_> = objects_to_create.iter().collect(); sorted_objects.sort_by_key(|(path_segments, _)| path_segments.len()); - + let mut object_create_patches = vec![]; for (path_segments, maybe_iri) in sorted_objects { let escaped_path: Vec = path_segments .iter() @@ -254,8 +399,8 @@ impl Verifier { .collect(); let json_pointer = format!("/{}", escaped_path.join("/")); - // Always create the object itself - patches.push(OrmPatch { + // Always create the object itself. + object_create_patches.push(OrmPatch { op: OrmPatchOp::add, valType: Some(OrmPatchType::object), path: json_pointer.clone(), @@ -264,7 +409,7 @@ impl Verifier { // If this object has an IRI (it's a real subject), add the id field if let Some(iri) = maybe_iri { - patches.push(OrmPatch { + object_create_patches.push(OrmPatch { op: OrmPatchOp::add, valType: None, path: format!("{}/@id", json_pointer), @@ -273,53 +418,73 @@ impl Verifier { } } + log_info!( + "[orm_backend_update] Created {} object_create_patches", + object_create_patches.len() + ); + + let total_patches = object_create_patches.len() + patches.len(); + log_info!( + "[orm_backend_update] SENDING {} total patches to frontend (session={}, nuri={}, shape={})", + total_patches, + session_id, + sub.nuri.repo(), + sub.shape_type.shape + ); + // Send response with patches. let _ = sub .sender .clone() - .send(AppResponse::V0(AppResponseV0::OrmUpdate(patches.to_vec()))) + .send(AppResponse::V0(AppResponseV0::OrmUpdate( + [object_create_patches, patches].concat(), + ))) .await; + + log_info!("[orm_backend_update] Patches sent successfully"); + + // Cleanup (remove tracked subjects to be deleted). + Verifier::cleanup_tracked_subjects(sub); } + + log_info!( + "[orm_backend_update] Finished processing all subscriptions for scope {:?}", + scope + ); } + + log_info!("[orm_backend_update] COMPLETE - processed all scopes"); } } /// Queue patches for a newly valid tracked subject. /// This handles creating object patches and id field patches for subjects that have become valid. -fn queue_patches_for_newly_valid_subject( - tracked_subject: &OrmTrackedSubject, +fn queue_objects_to_create( + current_ts: &OrmTrackedSubject, tracked_subjects: &HashMap>>>, root_shape: &String, path: &[String], - patches: &mut Vec, objects_to_create: &mut HashSet<(Vec, Option)>, + orm_changes: &OrmChanges, + child_iri: &String, ) { // Check if we're at a root subject or need to traverse to parents - if tracked_subject.parents.is_empty() || tracked_subject.shape.iri == *root_shape { - // Register object for creation. - // Path to object consists of this subject's iri and the path except for the last element. - let mut path_to_subject = vec![tracked_subject.subject_iri.clone()]; - if path.len() > 1 { - path_to_subject.extend_from_slice(&path[..path.len() - 1]); - } - - // log_debug!("Queuing object creation for path: {:?}", path_to_subject); - - // Always create the object itself with its IRI - objects_to_create.insert(( - path_to_subject.clone(), - Some(tracked_subject.subject_iri.clone()), - )); + if current_ts.parents.is_empty() || current_ts.shape.iri == *root_shape { + // We are at the root. Insert without the last element (which is the property name). + objects_to_create.insert((path[..path.len() - 1].to_vec(), Some(child_iri.clone()))); } else { // Not at root: traverse to parents and create object patches along the way - for (_parent_iri, parent_tracked_subject) in tracked_subject.parents.iter() { + for (_parent_iri, parent_tracked_subject) in current_ts.parents.iter() { let parent_ts = parent_tracked_subject.read().unwrap(); - if let Some(new_path) = build_path_segment_for_parent(tracked_subject, &parent_ts, path) - { + if let Some(new_path) = build_path_segment_for_parent(current_ts, &parent_ts, path) { // Check if the parent's predicate is multi-valued and if no siblings were previously valid let should_create_parent_predicate_object = - check_should_create_parent_predicate_object(tracked_subject, &parent_ts); + check_should_create_parent_predicate_object( + current_ts, + &parent_ts, + orm_changes, + ); if should_create_parent_predicate_object { // Need to create an intermediate object for the multi-valued predicate @@ -331,18 +496,18 @@ fn queue_patches_for_newly_valid_subject( } // Recurse to the parent first - queue_patches_for_newly_valid_subject( + queue_objects_to_create( &parent_ts, tracked_subjects, root_shape, &new_path, - patches, objects_to_create, + orm_changes, + child_iri, ); // Register this object for creation with its IRI - objects_to_create - .insert((new_path.clone(), Some(tracked_subject.subject_iri.clone()))); + objects_to_create.insert((new_path.clone(), Some(current_ts.subject_iri.clone()))); } } } @@ -353,6 +518,7 @@ fn queue_patches_for_newly_valid_subject( fn check_should_create_parent_predicate_object( tracked_subject: &OrmTrackedSubject, parent_ts: &OrmTrackedSubject, + orm_changes: &OrmChanges, ) -> bool { // Find the predicate schema linking parent to this subject for pred_arc in &parent_ts.shape.predicates { @@ -369,11 +535,22 @@ fn check_should_create_parent_predicate_object( let is_multi = pred_arc.maxCardinality > 1 || pred_arc.maxCardinality == -1; if is_multi { - // Check if any siblings were previously valid + // Check if any siblings were previously valid. + // If not, the intermediate object does not exist yet. let any_sibling_was_valid = tp.tracked_children.iter().any(|child| { let child_read = child.read().unwrap(); - child_read.subject_iri != tracked_subject.subject_iri - && child_read.prev_valid == OrmTrackedSubjectValidity::Valid + if child_read.subject_iri == tracked_subject.subject_iri { + return false; + } + + // Look up the prev_valid from orm_changes + let prev_valid = orm_changes + .get(&child_read.shape.iri) + .and_then(|subjects| subjects.get(&child_read.subject_iri)) + .map(|change| &change.prev_valid) + .unwrap_or(&OrmTrackedSubjectValidity::Valid); + + *prev_valid == OrmTrackedSubjectValidity::Valid }); return !any_sibling_was_valid; @@ -442,14 +619,21 @@ fn build_path_to_root_and_create_patches( ), patches: &mut Vec, objects_to_create: &mut HashSet<(Vec, Option)>, + prev_valid: &OrmTrackedSubjectValidity, + orm_changes: &OrmChanges, + child_iri: &String, ) { log_debug!( - " - build path, ts: {}, path {:?}", + " - build path, ts: {}, path {:?}, #parents: {}, shape: {}", tracked_subject.subject_iri, - path + path, + tracked_subject.parents.len(), + tracked_subject.shape.iri ); // If the tracked subject is not valid, we don't create patches for it - if tracked_subject.valid != OrmTrackedSubjectValidity::Valid { + // EXCEPT when we're removing the object itself (indicated by op == remove and valType == object) + let is_delete_op = diff_op.0 == OrmPatchOp::remove && diff_op.1 == Some(OrmPatchType::object); + if tracked_subject.valid != OrmTrackedSubjectValidity::Valid && !is_delete_op { return; } @@ -457,12 +641,19 @@ fn build_path_to_root_and_create_patches( if tracked_subject.parents.is_empty() || tracked_subject.shape.iri == *root_shape { // Build the final JSON Pointer path let escaped_path: Vec = path.iter().map(|seg| escape_json_pointer(seg)).collect(); - // Always add the root subject to the path. - let json_pointer = format!( - "/{}/{}", - escape_json_pointer(&tracked_subject.subject_iri), - escaped_path.join("/") - ); + + // Create the JSON pointer path + let json_pointer = if escaped_path.is_empty() { + // For root object operations (no path elements), just use the subject IRI + format!("/{}", escape_json_pointer(&tracked_subject.subject_iri)) + } else { + // For nested operations, include both subject and path + format!( + "/{}/{}", + escape_json_pointer(&tracked_subject.subject_iri), + escaped_path.join("/") + ) + }; // Create the patch for the actual value change patches.push(OrmPatch { @@ -473,16 +664,17 @@ fn build_path_to_root_and_create_patches( }); // If the subject is newly valid, now we have the full path to queue its creation. - if tracked_subject.prev_valid != OrmTrackedSubjectValidity::Valid { + if *prev_valid != OrmTrackedSubjectValidity::Valid { let mut final_path = vec![tracked_subject.subject_iri.clone()]; final_path.extend_from_slice(path); - queue_patches_for_newly_valid_subject( + queue_objects_to_create( tracked_subject, tracked_subjects, root_shape, &final_path, - patches, objects_to_create, + orm_changes, + child_iri, ); } @@ -505,6 +697,15 @@ fn build_path_to_root_and_create_patches( diff_op.clone(), patches, objects_to_create, + prev_valid, + orm_changes, + child_iri, + ); + } else { + log_debug!( + " - build_path_segment_for_parent returned None for parent: {}, child: {}", + parent_ts.subject_iri, + tracked_subject.subject_iri ); } } diff --git a/engine/verifier/src/orm/handle_frontend_update.rs b/engine/verifier/src/orm/handle_frontend_update.rs index 6c6ce4c8..a85c472f 100644 --- a/engine/verifier/src/orm/handle_frontend_update.rs +++ b/engine/verifier/src/orm/handle_frontend_update.rs @@ -24,10 +24,7 @@ use crate::types::GraphQuadsPatch; use crate::verifier::*; impl Verifier { - /// After creating new objects (without an id) in JS-land, - /// we send the generated id for those back. - /// If something went wrong (revert_inserts / revert_removes not empty), - /// we send a JSON patch back to revert the made changes. + /// pub(crate) async fn orm_update_self( &mut self, scope: &NuriV0, @@ -40,15 +37,18 @@ impl Verifier { let (mut sender, _orm_subscription) = self.get_first_orm_subscription_sender_for(scope, Some(&shape_iri), Some(&session_id))?; + log_info!("[orm_update_self] got subscription"); + // Revert changes, if there. if revert_inserts.len() > 0 || revert_removes.len() > 0 { let revert_changes = GraphQuadsPatch { inserts: revert_removes, removes: revert_inserts, }; - - // TODO: Call with correct params. - // self.orm_backend_update(session_id, scope, "", revert_changes) + log_info!("[orm_update_self] Reverting triples, calling orm_backend_update. TODO"); + // TODO + // self.orm_backend_update(session_id, scope, "", revert_changes); + log_info!("[orm_update_self] Triples reverted."); } Ok(()) @@ -63,7 +63,7 @@ impl Verifier { diff: OrmPatches, ) -> Result<(), String> { log_info!( - "frontend_update_orm session={} shape={} diff={:?}", + "[orm_frontend_update] session={} shape={} diff={:?}", session_id, shape_iri, diff @@ -74,12 +74,17 @@ impl Verifier { self.get_first_orm_subscription_for(scope, Some(&shape_iri), Some(&session_id)); let doc_nuri = orm_subscription.nuri.clone(); + log_info!("[orm_frontend_update] got subscription"); + let sparql_update = create_sparql_update_query_for_diff(orm_subscription, diff); + log_info!( + "[orm_frontend_update] created sparql_update query:\n{}", + sparql_update + ); (doc_nuri, sparql_update) }; - log_debug!("Created SPARQL query for patches:\n{}", sparql_update); match self .process_sparql_update( &doc_nuri, @@ -90,8 +95,17 @@ impl Verifier { ) .await { - Err(e) => Err(e), + Err(e) => { + log_info!("[orm_frontend_update] query failed"); + + Err(e) + } Ok((_, revert_inserts, revert_removes, skolemnized_blank_nodes)) => { + log_info!( + "[orm_frontend_update] query successful. Reverts? {}", + revert_inserts.len() + ); + if !revert_inserts.is_empty() || !revert_removes.is_empty() || !skolemnized_blank_nodes.is_empty() @@ -117,6 +131,11 @@ fn create_sparql_update_query_for_diff( orm_subscription: &OrmSubscription, diff: OrmPatches, ) -> String { + log_info!( + "[create_sparql_update_query_for_diff] Starting with {} patches", + diff.len() + ); + // First sort patches. // - Process delete patches first. // - Process object creation add operations before rest, to ensure potential blank nodes are created. @@ -124,17 +143,27 @@ fn create_sparql_update_query_for_diff( .iter() .filter(|patch| patch.op == OrmPatchOp::remove) .collect(); - let add_object_patches: Vec<_> = diff - .iter() - .filter(|patch| { - patch.op == OrmPatchOp::add - && match &patch.valType { - Some(vt) => *vt == OrmPatchType::object, - _ => false, - } - }) - .collect(); - let add_literal_patches: Vec<_> = diff + log_info!( + "[create_sparql_update_query_for_diff] Found {} delete patches", + delete_patches.len() + ); + + // let add_object_patches: Vec<_> = diff + // .iter() + // .filter(|patch| { + // patch.op == OrmPatchOp::add + // && match &patch.valType { + // Some(vt) => *vt == OrmPatchType::object, + // _ => false, + // } + // }) + // .collect(); + // log_info!( + // "[create_sparql_update_query_for_diff] Found {} add object patches", + // add_object_patches.len() + // ); + + let add_primitive_patches: Vec<_> = diff .iter() .filter(|patch| { patch.op == OrmPatchOp::add @@ -144,26 +173,37 @@ fn create_sparql_update_query_for_diff( } }) .collect(); + log_info!( + "[create_sparql_update_query_for_diff] Found {} add primitive patches", + add_primitive_patches.len() + ); // For each diff op, we create a separate INSERT or DELETE block. let mut sparql_sub_queries: Vec = vec![]; // Create delete statements. // - for del_patch in delete_patches.iter() { + for (idx, del_patch) in delete_patches.iter().enumerate() { + log_info!( + "[create_sparql_update_query_for_diff] Processing delete patch {}/{}: path={}", + idx + 1, + delete_patches.len(), + del_patch.path + ); + let mut var_counter: i32 = 0; let (where_statements, target, _pred_schema) = create_where_statements_for_patch(&del_patch, &mut var_counter, &orm_subscription); let (subject_var, target_predicate, target_object) = target; + log_info!("[create_sparql_update_query_for_diff] Delete patch where_statements: {:?}, subject_var={}, target_predicate={}, target_object={:?}", + where_statements, subject_var, target_predicate, target_object); + let delete_statement; if let Some(target_object) = target_object { // Delete the link to exactly one object (IRI referenced in path, i.e. target_object) - delete_statement = format!( - " {} <{}> <{}> .", - subject_var, target_predicate, target_object - ) + delete_statement = format!(" {} {} {} .", subject_var, target_predicate, target_object) } else { // Delete object or literal referenced by property name. let delete_val = match &del_patch.value { @@ -175,7 +215,7 @@ fn create_sparql_update_query_for_diff( // Delete the specific values only. Some(val) => json_to_sparql_val(&val), // Can be one or more (joined with ", "). }; - delete_statement = format!(" {} <{}> {} .", subject_var, target_predicate, delete_val); + delete_statement = format!(" {} {} {} .", subject_var, target_predicate, delete_val); } sparql_sub_queries.push(format!( @@ -183,20 +223,22 @@ fn create_sparql_update_query_for_diff( delete_statement, where_statements.join(" .\n ") )); + log_info!( + "[create_sparql_update_query_for_diff] Added delete query #{}", + sparql_sub_queries.len() + ); } - // Process add object patches (might need blank nodes) + // Process primitive add patches // - for _add_obj_patch in add_object_patches { - // Creating objects without an id field is only supported in one circumstance: - // An object is added to a property which has a max cardinality of one, e.g. `painting.artist`. - // In that case, we create a blank node. - // TODO: We need to set up a list of created blank nodes and where they belong to. - } + for (idx, add_patch) in add_primitive_patches.iter().enumerate() { + log_info!( + "[create_sparql_update_query_for_diff] Processing add primitive patch {}/{}: path={}", + idx + 1, + add_primitive_patches.len(), + add_patch.path + ); - // Process literal add patches - // - for add_patch in add_literal_patches { let mut var_counter: i32 = 0; // Create WHERE statements from path. @@ -204,8 +246,12 @@ fn create_sparql_update_query_for_diff( create_where_statements_for_patch(&add_patch, &mut var_counter, &orm_subscription); let (subject_var, target_predicate, target_object) = target; + log_info!("[create_sparql_update_query_for_diff] Add patch where_statements: {:?}, subject_var={}, target_predicate={}, target_object={:?}", + where_statements, subject_var, target_predicate, target_object); + if let Some(_target_object) = target_object { // Reference to exactly one object found. This is invalid when inserting literals. + log_info!("[create_sparql_update_query_for_diff] SKIPPING: target_object found for literal add (invalid)"); // TODO: Return error? continue; } else { @@ -215,6 +261,7 @@ fn create_sparql_update_query_for_diff( Some(val) => json_to_sparql_val(&val), // Can be one or more (joined with ", "). None => { // A value must be set. This patch is invalid. + log_info!("[create_sparql_update_query_for_diff] SKIPPING: No value in add patch (invalid)"); // TODO: Return error? continue; } @@ -225,8 +272,9 @@ fn create_sparql_update_query_for_diff( // If the schema only has max one value, // then `add` can also overwrite values, so we need to delete the previous one if !pred_schema.unwrap().is_multi() { + log_info!("[create_sparql_update_query_for_diff] Single-value predicate, adding DELETE before INSERT"); let remove_statement = - format!(" {} <{}> ?o{}", subject_var, target_predicate, var_counter); + format!(" {} {} ?o{}", subject_var, target_predicate, var_counter); let mut wheres = where_statements.clone(); wheres.push(remove_statement.clone()); @@ -236,18 +284,24 @@ fn create_sparql_update_query_for_diff( remove_statement, wheres.join(" .\n ") )); + log_info!("[create_sparql_update_query_for_diff] Added delete query."); // var_counter += 1; // Not necessary because not used afterwards. } // The actual INSERT. - let add_statement = format!(" {} <{}> {} .", subject_var, target_predicate, add_val); + let add_statement = format!(" {} {} {} .", subject_var, target_predicate, add_val); sparql_sub_queries.push(format!( "INSERT {{\n{}\n}} WHERE {{\n {}\n}}", add_statement, where_statements.join(". \n ") )); + log_info!("[create_sparql_update_query_for_diff] Added insert query."); } } + log_info!( + "[create_sparql_update_query_for_diff] Finished. Generated {} sub-queries", + sparql_sub_queries.len() + ); return sparql_sub_queries.join(";\n"); } @@ -294,6 +348,12 @@ fn create_where_statements_for_patch( (String, String, Option), Option>, ) { + log_info!( + "[create_where_statements_for_patch] Starting. patch.path={}, patch.op={:?}", + patch.path, + patch.op + ); + let mut body_statements: Vec = vec![]; let mut where_statements: Vec = vec![]; @@ -303,9 +363,20 @@ fn create_where_statements_for_patch( .map(|s| decode_json_pointer(&s.to_string())) .collect(); + log_info!( + "[create_where_statements_for_patch] Decoded path into {} segments: {:?}", + path.len(), + path + ); + path.remove(0); + // Handle special case: The whole object is deleted. if path.len() == 1 { let root_iri = &path[0]; + log_info!( + "[create_where_statements_for_patch] Special case: whole object deletion for root_iri={}", + root_iri + ); body_statements.push(format!("<{}> ?p ?o", root_iri)); where_statements.push(format!("<{}> ?p ?o", root_iri)); return ( @@ -315,27 +386,58 @@ fn create_where_statements_for_patch( ); } + log_info!( + "[create_where_statements_for_patch] Getting root schema for shape={}", + orm_subscription.shape_type.shape + ); let subj_schema: &Arc = orm_subscription .shape_type .schema .get(&orm_subscription.shape_type.shape) .unwrap(); + log_info!("[create_where_statements_for_patch] Root schema found"); let mut current_subj_schema: Arc = subj_schema.clone(); // The root IRI might change, if the parent path segment was an IRI. let root_iri = path.remove(0); let mut subject_ref = format!("<{}>", root_iri); + log_info!( + "[create_where_statements_for_patch] Starting traversal from root_iri={}, remaining path segments={}", + root_iri, + path.len() + ); while path.len() > 0 { let pred_name = path.remove(0); + log_info!( + "[create_where_statements_for_patch] Processing path segment: pred_name={}, remaining={}", + pred_name, + path.len() + ); + + log_info!( + "[create_where_statements_for_patch] Looking up predicate schema for name={}", + pred_name + ); let pred_schema = find_pred_schema_by_name(&pred_name, ¤t_subj_schema); + log_info!( + "[create_where_statements_for_patch] Found predicate schema: iri={}, is_object={}, is_multi={}", + pred_schema.iri, + pred_schema.is_object(), + pred_schema.is_multi() + ); // Case: We arrived at a leaf value. if path.len() == 0 { + log_info!( + "[create_where_statements_for_patch] Reached leaf value. Returning target: subject_ref={}, predicate={}", + subject_ref, + pred_schema.iri + ); return ( where_statements, - (subject_ref, pred_schema.iri.clone(), None), + (subject_ref, format!("<{}>", pred_schema.iri.clone()), None), Some(pred_schema), ); } @@ -346,6 +448,12 @@ fn create_where_statements_for_patch( "{} <{}> ?o{}", subject_ref, pred_schema.iri, var_counter, )); + log_info!( + "[create_where_statements_for_patch] Added where statement for nested object: {} <{}> ?o{}", + subject_ref, + pred_schema.iri, + var_counter + ); // Update the subject_ref for traversal (e.g. ?o1 . ?o1 Cat); subject_ref = format!("?o{}", var_counter); @@ -358,31 +466,58 @@ fn create_where_statements_for_patch( ); } if pred_schema.is_multi() { + log_info!("[create_where_statements_for_patch] Predicate is multi-valued, expecting object IRI in path"); let object_iri = path.remove(0); + log_info!( + "[create_where_statements_for_patch] Got object_iri={}, remaining path={}", + object_iri, + path.len() + ); // Path ends on an object IRI, which we return here as well. if path.len() == 0 { + log_info!( + "[create_where_statements_for_patch] Path ends on object IRI. Returning target with object={}", + object_iri + ); return ( where_statements, - (subject_ref, pred_schema.iri.clone(), Some(object_iri)), + ( + subject_ref, + format!("<{}>", pred_schema.iri.clone()), + Some(format!("<{}>", object_iri)), + ), Some(pred_schema), ); } + log_info!( + "[create_where_statements_for_patch] Getting child schema for object_iri={}", + object_iri + ); current_subj_schema = get_first_child_schema(Some(&object_iri), &pred_schema, &orm_subscription); + log_info!("[create_where_statements_for_patch] Child schema found"); // Since we have new IRI that we can use as root, we replace the current one with it. subject_ref = format!("<{object_iri}>"); // And can clear all, now unnecessary where statements. where_statements.clear(); + log_info!( + "[create_where_statements_for_patch] Reset subject_ref to <{}> and cleared where statements", + object_iri + ); } else { // Set to child subject schema. // TODO: Actually, we should get the tracked subject and check for the correct shape there. // As long as there is only one allowed shape or the first one is valid, this is fine. + log_info!("[create_where_statements_for_patch] Predicate is single-valued, getting child schema"); + current_subj_schema = get_first_child_schema(None, &pred_schema, &orm_subscription); + log_info!("[create_where_statements_for_patch] Child schema found"); } } // Can't happen. + log_err!("[create_where_statements_for_patch] PANIC: Reached end of function unexpectedly (should be impossible)"); panic!(); } diff --git a/engine/verifier/src/orm/initialize.rs b/engine/verifier/src/orm/initialize.rs index 5e158e4e..aa796b9d 100644 --- a/engine/verifier/src/orm/initialize.rs +++ b/engine/verifier/src/orm/initialize.rs @@ -62,7 +62,6 @@ impl Verifier { .push(orm_subscription); let orm_objects = self.create_orm_object_for_shape(nuri, session_id, &shape_type)?; - // log_debug!("create_orm_object_for_shape return {:?}", orm_objects); let _ = tx .send(AppResponse::V0(AppResponseV0::OrmInitial(orm_objects))) diff --git a/engine/verifier/src/orm/process_changes.rs b/engine/verifier/src/orm/process_changes.rs index eee48788..4005985a 100644 --- a/engine/verifier/src/orm/process_changes.rs +++ b/engine/verifier/src/orm/process_changes.rs @@ -96,18 +96,39 @@ impl Verifier { orm_changes: &mut OrmChanges, data_already_fetched: bool, ) -> Result<(), NgError> { + log_info!( + "[process_changes_for_shape_and_session] Starting processing for nuri, root_shape: {}, session: {}, {} shapes, {} triples added, {} triples removed, data_already_fetched: {}", + root_shape_iri, + session_id, + shapes.len(), + triples_added.len(), + triples_removed.len(), + data_already_fetched + ); + // First in, last out stack to keep track of objects to validate (nested objects first). Strings are object IRIs. let mut shape_validation_stack: Vec<(Arc, Vec)> = vec![]; // Track (shape_iri, subject_iri) pairs currently being validated to prevent cycles and double evaluation. let mut currently_validating: HashSet<(String, String)> = HashSet::new(); // Add root shape for first validation run. for shape in shapes { + log_info!( + "[process_changes_for_shape_and_session] Adding root shape to validation stack: {}", + shape.iri + ); shape_validation_stack.push((shape, vec![])); } // Process queue of shapes and subjects to validate. // For a given shape, we evaluate every subject against that shape. while let Some((shape, objects_to_validate)) = shape_validation_stack.pop() { + log_info!( + "[process_changes_for_shape_and_session] Processing shape from stack: {}, with {} objects to validate: {:?}", + shape.iri, + objects_to_validate.len(), + objects_to_validate + ); + // Collect triples relevant for validation. let added_triples_by_subject = group_by_subject_for_shape(&shape, triples_added, &objects_to_validate); @@ -118,13 +139,20 @@ impl Verifier { .chain(removed_triples_by_subject.keys()) .collect(); + log_info!( + "[process_changes_for_shape_and_session] Found {} modified subjects for shape {}: {:?}", + modified_subject_iris.len(), + shape.iri, + modified_subject_iris + ); + // Variable to collect nested objects that need validation. let mut nested_objects_to_eval: HashMap> = HashMap::new(); // For each subject, add/remove triples and validate. - log_debug!( - "processing modified subjects: {:?} against shape: {}", + log_info!( + "[process_changes_for_shape_and_session] processing modified subjects: {:?} against shape: {}", modified_subject_iris, shape.iri ); @@ -136,7 +164,7 @@ impl Verifier { // Cycle detection: Check if this (shape, subject) pair is already being validated if currently_validating.contains(&validation_key) { log_warn!( - "Cycle detected: subject '{}' with shape '{}' is already being validated. Marking as invalid.", + "[process_changes_for_shape_and_session] Cycle detected: subject '{}' with shape '{}' is already being validated. Marking as invalid.", subject_iri, shape.iri ); @@ -162,7 +190,7 @@ impl Verifier { // Mark as currently validating currently_validating.insert(validation_key.clone()); - // Get triples of subject (added & removed). + // Get triple changes for subject (added & removed). let triples_added_for_subj = added_triples_by_subject .get(*subject_iri) .map(|v| v.as_slice()) @@ -177,31 +205,36 @@ impl Verifier { .entry(shape.iri.clone()) .or_insert_with(HashMap::new) .entry((*subject_iri).clone()) - .or_insert_with(|| OrmTrackedSubjectChange { - subject_iri: (*subject_iri).clone(), - predicates: HashMap::new(), - data_applied: false, - }); + .or_insert_with(|| { + // Create a new change record. + // This includes the previous validity and triple changes. + let orm_subscription = self + .orm_subscriptions + .get_mut(nuri) + .unwrap() + .iter_mut() + .find(|sub| { + sub.shape_type.shape == *root_shape_iri + && sub.session_id == session_id + }) + .unwrap(); - // Apply all triples for that subject to the tracked (shape, subject) pair. - // Record the changes. - { - let orm_subscription = self - .orm_subscriptions - .get_mut(nuri) - .unwrap() - .iter_mut() - .find(|sub| { - sub.shape_type.shape == *root_shape_iri && sub.session_id == session_id - }) - .unwrap(); + log_info!("[process_changes_for_shape_and_session] Creating change object for {}, {}", subject_iri, shape.iri); + let prev_valid = match orm_subscription + .tracked_subjects + .get(*subject_iri) + .and_then(|shapes| shapes.get(&shape.iri)) + { + Some(tracked_subject) => tracked_subject.read().unwrap().valid.clone(), + None => OrmTrackedSubjectValidity::Pending, + }; - // Update tracked subjects and modify change objects. - if !change.data_applied { - log_debug!( - "Adding triples to change tracker for subject {}", - subject_iri - ); + let mut change = OrmTrackedSubjectChange { + subject_iri: (*subject_iri).clone(), + predicates: HashMap::new(), + is_validated: false, + prev_valid, + }; if let Err(e) = add_remove_triples( shape.clone(), @@ -209,46 +242,42 @@ impl Verifier { triples_added_for_subj, triples_removed_for_subj, orm_subscription, - change, + &mut change, ) { log_err!("apply_changes_from_triples add/remove error: {:?}", e); panic!(); } - change.data_applied = true; - } - // Check if this is the first evaluation round - In that case, set old validity to new one. - // if the object was already validated, don't do so again. - { - let tracked_subject = &mut orm_subscription - .tracked_subjects - .get(*subject_iri) - .unwrap() - .get(&shape.iri) - .unwrap() - .write() - .unwrap(); - - // First run - if !change.data_applied - && tracked_subject.valid != OrmTrackedSubjectValidity::Pending - { - tracked_subject.prev_valid = tracked_subject.valid.clone(); - } + change + }); - if change.data_applied { - log_debug!("not applying triples again for subject {subject_iri}"); + // If validation took place already, there's nothing more to do... + if change.is_validated { + log_info!( + "[process_changes_for_shape_and_session] Subject {} already validated for shape {}, skipping", + subject_iri, + shape.iri + ); + continue; + } - // Has this subject already been validated? - if change.data_applied - && tracked_subject.valid != OrmTrackedSubjectValidity::Pending - { - log_debug!("Not evaluating subject again {subject_iri}"); + log_info!( + "[process_changes_for_shape_and_session] Running validation for subject {} against shape {}", + subject_iri, + shape.iri + ); - continue; - } - } - } + // Run validation and record objects that need to be re-evaluated. + { + let orm_subscription = self + .orm_subscriptions + .get_mut(nuri) + .unwrap() + .iter_mut() + .find(|sub| { + sub.shape_type.shape == *root_shape_iri && sub.session_id == session_id + }) + .unwrap(); // Validate the subject. // need_eval contains elements in reverse priority (last element to be validated first) @@ -257,21 +286,47 @@ impl Verifier { // We add the need_eval to be processed next after loop. // Filter out subjects already in the validation stack to prevent double evaluation. + log_info!( + "[process_changes_for_shape_and_session] Validation returned {} objects that need evaluation", + need_eval.len() + ); for (iri, schema_shape, needs_refetch) in need_eval { let eval_key = (schema_shape.clone(), iri.clone()); if !currently_validating.contains(&eval_key) { + log_info!( + "[process_changes_for_shape_and_session] Adding nested object to eval: {} with shape {}, needs_refetch: {}", + iri, + schema_shape, + needs_refetch + ); // Only add if not currently being validated nested_objects_to_eval .entry(schema_shape) .or_insert_with(Vec::new) .push((iri.clone(), needs_refetch)); + } else { + log_info!( + "[process_changes_for_shape_and_session] Skipping nested object {} with shape {} - already validating", + iri, + schema_shape + ); } } } } // Now, we queue all non-evaluated objects + log_info!( + "[process_changes_for_shape_and_session] Processing {} nested shape groups", + nested_objects_to_eval.len() + ); for (shape_iri, objects_to_eval) in &nested_objects_to_eval { + log_info!( + "[process_changes_for_shape_and_session] Processing nested shape: {} with {} objects", + shape_iri, + objects_to_eval.len() + ); + // Extract schema and shape Arc first (before any borrows) let schema = { let orm_sub = self.get_first_orm_subscription_for( @@ -285,29 +340,41 @@ impl Verifier { // Data might need to be fetched (if it has not been during initialization or nested shape fetch). if !data_already_fetched { - let objects_to_fetch = objects_to_eval + let objects_to_fetch: Vec = objects_to_eval .iter() .filter(|(_iri, needs_fetch)| *needs_fetch) .map(|(s, _)| s.clone()) .collect(); - // Create sparql query - let shape_query = - shape_type_to_sparql(&schema, &shape_iri, Some(objects_to_fetch))?; - let new_triples = - self.query_sparql_construct(shape_query, Some(nuri_to_string(nuri)))?; + log_info!( + "[process_changes_for_shape_and_session] Fetching data for {} objects that need refetch", + objects_to_fetch.len() + ); - // Recursively process nested objects. - self.process_changes_for_shape_and_session( - nuri, - &root_shape_iri, - [shape_arc.clone()].to_vec(), - session_id, - &new_triples, - &vec![], - orm_changes, - true, - )?; + if objects_to_fetch.len() > 0 { + // Create sparql query + let shape_query = + shape_type_to_sparql(&schema, &shape_iri, Some(objects_to_fetch))?; + let new_triples = + self.query_sparql_construct(shape_query, Some(nuri_to_string(nuri)))?; + + log_info!( + "[process_changes_for_shape_and_session] Fetched {} triples, recursively processing nested objects", + new_triples.len() + ); + + // Recursively process nested objects. + self.process_changes_for_shape_and_session( + nuri, + &root_shape_iri, + [shape_arc.clone()].to_vec(), + session_id, + &new_triples, + &vec![], + orm_changes, + true, + )?; + } } // Add objects @@ -317,16 +384,35 @@ impl Verifier { .map(|(s, _)| s.clone()) .collect(); if objects_not_to_fetch.len() > 0 { + log_info!( + "[process_changes_for_shape_and_session] Queueing {} objects that don't need fetching for shape {}", + objects_not_to_fetch.len(), + shape_iri + ); // Queue all objects that don't need fetching. shape_validation_stack.push((shape_arc, objects_not_to_fetch)); + } else { + log_info!( + "[process_changes_for_shape_and_session] No objects to queue for shape {} (all needed fetching)", + shape_iri + ); } } + + log_info!( + "[process_changes_for_shape_and_session] Cleaning up validation tracking for {} modified subjects", + modified_subject_iris.len() + ); for subject_iri in modified_subject_iris { let validation_key = (shape.iri.clone(), subject_iri.to_string()); currently_validating.remove(&validation_key); } } + log_info!( + "[process_changes_for_shape_and_session] Finished processing. Validation stack empty." + ); + Ok(()) } @@ -415,4 +501,88 @@ impl Verifier { Some(subscription) => Ok((subscription.sender.clone(), subscription)), } } + + pub fn cleanup_tracked_subjects(orm_subscription: &mut OrmSubscription) { + let tracked_subjects = &mut orm_subscription.tracked_subjects; + + // First pass: Clean up relationships for subjects being deleted + for (subject_iri, subjects_for_shape) in tracked_subjects.iter() { + for (_shape_iri, tracked_subject_lock) in subjects_for_shape.iter() { + let tracked_subject = tracked_subject_lock.read().unwrap(); + + // Only process subjects that are marked for deletion + if tracked_subject.valid != OrmTrackedSubjectValidity::ToDelete { + continue; + } + + let has_parents = !tracked_subject.parents.is_empty(); + + // Set all children to `untracked` that don't have other parents + for tracked_predicate in tracked_subject.tracked_predicates.values() { + let tracked_pred_read = tracked_predicate.read().unwrap(); + for child in &tracked_pred_read.tracked_children { + let mut tracked_child = child.write().unwrap(); + if tracked_child.parents.is_empty() + || (tracked_child.parents.len() == 1 + && tracked_child + .parents + .contains_key(&tracked_subject.subject_iri)) + { + if tracked_child.valid != OrmTrackedSubjectValidity::ToDelete { + tracked_child.valid = OrmTrackedSubjectValidity::Untracked; + } + } + } + } + + // Remove this subject from its children's parent lists + // (Only if this is not a root subject - root subjects keep child relationships) + if has_parents { + for tracked_pred in tracked_subject.tracked_predicates.values() { + let tracked_pred_read = tracked_pred.read().unwrap(); + for child in &tracked_pred_read.tracked_children { + child.write().unwrap().parents.remove(subject_iri); + } + } + } + + // Also remove this subject from its parents' children lists + for (_parent_iri, parent_tracked_subject) in &tracked_subject.parents { + let mut parent_ts = parent_tracked_subject.write().unwrap(); + for tracked_pred in parent_ts.tracked_predicates.values_mut() { + let mut tracked_pred_mut = tracked_pred.write().unwrap(); + tracked_pred_mut + .tracked_children + .retain(|child| child.read().unwrap().subject_iri != *subject_iri); + } + } + } + } + + // Second pass: Collect subjects to remove (we can't remove while iterating) + let mut subjects_to_remove: Vec<(String, String)> = vec![]; + + for (subject_iri, subjects_for_shape) in tracked_subjects.iter() { + for (shape_iri, tracked_subject) in subjects_for_shape.iter() { + let tracked_subject = tracked_subject.read().unwrap(); + + // Only cleanup subjects that are marked for deletion + if tracked_subject.valid == OrmTrackedSubjectValidity::ToDelete { + subjects_to_remove.push((subject_iri.clone(), shape_iri.clone())); + } + } + } + + // Third pass: Remove the subjects marked for deletion + for (subject_iri, shape_iri) in subjects_to_remove { + if let Some(shapes_map) = tracked_subjects.get_mut(&subject_iri) { + shapes_map.remove(&shape_iri); + + // If this was the last shape for this subject, remove the subject entry entirely + if shapes_map.is_empty() { + tracked_subjects.remove(&subject_iri); + } + } + } + } } diff --git a/engine/verifier/src/orm/query.rs b/engine/verifier/src/orm/query.rs index 3cbc9b15..ea228ce0 100644 --- a/engine/verifier/src/orm/query.rs +++ b/engine/verifier/src/orm/query.rs @@ -35,15 +35,14 @@ impl Verifier { // &update.overlay_id, // ); //let base = NuriV0::repo_id(&repo.id); - let binding = nuri.unwrap(); - let nuri = binding.split_at(53).0; - log_info!("querying construct\n{}\n{}\n", nuri, query); + let nuri_str = nuri.as_ref().map(|s| s.as_str()); + log_debug!("querying construct\n{}\n{}\n", nuri_str.unwrap(), query); - let parsed = Query::parse(&query, Some(nuri.clone())) - .map_err(|e| NgError::OxiGraphError(e.to_string()))?; + let parsed = + Query::parse(&query, nuri_str).map_err(|e| NgError::OxiGraphError(e.to_string()))?; let results = oxistore - .query(parsed, Some(nuri.to_string())) + .query(parsed, nuri) .map_err(|e| NgError::OxiGraphError(e.to_string()))?; match results { QueryResults::Graph(triples) => { @@ -51,8 +50,7 @@ impl Verifier { for t in triples { match t { Err(e) => { - log_info!("Error: {:?}n", e); - + log_err!("{}", e.to_string()); return Err(NgError::SparqlError(e.to_string())); } Ok(triple) => { diff --git a/engine/verifier/src/orm/shape_validation.rs b/engine/verifier/src/orm/shape_validation.rs index 6465f005..deda65e5 100644 --- a/engine/verifier/src/orm/shape_validation.rs +++ b/engine/verifier/src/orm/shape_validation.rs @@ -19,7 +19,7 @@ impl Verifier { /// Might return nested objects that need to be validated. /// Assumes all triples to be of same subject. pub fn update_subject_validity( - s_change: &OrmTrackedSubjectChange, + s_change: &mut OrmTrackedSubjectChange, shape: &OrmSchemaShape, orm_subscription: &mut OrmSubscription, ) -> Vec<(SubjectIri, ShapeIri, NeedsFetchBool)> { @@ -32,7 +32,7 @@ impl Verifier { return vec![]; }; let mut tracked_subject = tracked_subject.write().unwrap(); - let previous_validity = tracked_subject.prev_valid.clone(); + let previous_validity = s_change.prev_valid.clone(); // Keep track of objects that need to be validated against a shape to fetch and validate. let mut need_evaluation: Vec<(String, String, bool)> = vec![]; @@ -348,41 +348,17 @@ impl Verifier { tracked_subject.valid = new_validity.clone(); - if new_validity == OrmTrackedSubjectValidity::Invalid { - // For invalid subjects, we need to to cleanup. + // First, if we have a definite decision, we set is_validated to true. + if new_validity != OrmTrackedSubjectValidity::Pending { + s_change.is_validated = true; + } - let has_parents = !tracked_subject.parents.is_empty(); - if has_parents { - // This object is not a root object. Tracked child objects can be dropped. - // We therefore delete the child -> parent links. - // Untracked objects (with no parents) will be deleted in the subsequent child validation. - for tracked_predicate in tracked_subject.tracked_predicates.values() { - for child in &tracked_predicate.write().unwrap().tracked_children { - child - .write() - .unwrap() - .parents - .remove(&tracked_subject.subject_iri); - } - } + if new_validity == OrmTrackedSubjectValidity::Invalid { + // For invalid subjects, we schedule cleanup. + if tracked_subject.parents.len() == 0 { + tracked_subject.valid = OrmTrackedSubjectValidity::Invalid; } else { - // This is a root objects, we will set the children to untracked - // but don't delete the child > parent relationship. - } - - // Set all children to `untracked` that don't have other parents. - for tracked_predicate in tracked_subject.tracked_predicates.values() { - for child in &tracked_predicate.write().unwrap().tracked_children { - let mut tracked_child = child.write().unwrap(); - if tracked_child.parents.is_empty() - || (tracked_child.parents.len() == 1 - && tracked_child - .parents - .contains_key(&tracked_subject.subject_iri)) - { - tracked_child.valid = OrmTrackedSubjectValidity::Untracked; - } - } + tracked_subject.valid = OrmTrackedSubjectValidity::ToDelete; } // Add all children to need_evaluation for their cleanup. @@ -396,9 +372,6 @@ impl Verifier { )); } } - - // Remove all tracked_predicates. - tracked_subject.tracked_predicates.clear(); } else if new_validity == OrmTrackedSubjectValidity::Valid && previous_validity != OrmTrackedSubjectValidity::Valid { diff --git a/engine/verifier/src/orm/types.rs b/engine/verifier/src/orm/types.rs index 5021934f..9e93b75c 100644 --- a/engine/verifier/src/orm/types.rs +++ b/engine/verifier/src/orm/types.rs @@ -25,8 +25,6 @@ pub struct OrmTrackedSubject { pub parents: HashMap>>, /// Validity. When untracked, triple updates are not processed for this tracked subject. pub valid: OrmTrackedSubjectValidity, - /// Previous validity. Used for validation and creating JSON Patch diffs from changes. - pub prev_valid: OrmTrackedSubjectValidity, /// Subject IRI pub subject_iri: String, /// The shape for which the predicates are tracked. @@ -39,6 +37,7 @@ pub enum OrmTrackedSubjectValidity { Invalid, Pending, Untracked, + ToDelete, } #[derive(Clone, Debug)] @@ -60,10 +59,10 @@ pub struct OrmTrackedSubjectChange { pub subject_iri: String, /// Predicates that were changed. pub predicates: HashMap, - /// If the new triples have been added to the tracked predicates - /// (values_added / values_removed) already. This is to prevent - /// double-application. - pub data_applied: bool, + /// If the validation has taken place + pub is_validated: bool, + /// The validity before the new validation. + pub prev_valid: OrmTrackedSubjectValidity, } #[derive(Debug)] pub struct OrmTrackedPredicateChanges { diff --git a/engine/verifier/src/request_processor.rs b/engine/verifier/src/request_processor.rs index ab249524..b10db4cf 100644 --- a/engine/verifier/src/request_processor.rs +++ b/engine/verifier/src/request_processor.rs @@ -15,11 +15,9 @@ use std::sync::Arc; use futures::channel::mpsc; use futures::SinkExt; use futures::StreamExt; -use ng_net::actor::SoS; use ng_net::types::InboxPost; use ng_net::types::NgQRCode; use ng_net::types::NgQRCodeProfileSharingV0; -use ng_oxigraph::oxigraph::sparql::EvaluationError; use ng_oxigraph::oxigraph::sparql::{results::*, Query, QueryResults}; use ng_oxigraph::oxrdf::{Literal, NamedNode, Quad, Term}; use ng_oxigraph::oxsdatatypes::DateTime; diff --git a/sdk/js/alien-deepsignals/src/deepSignal.ts b/sdk/js/alien-deepsignals/src/deepSignal.ts index b37f99d3..eb7c0dbb 100644 --- a/sdk/js/alien-deepsignals/src/deepSignal.ts +++ b/sdk/js/alien-deepsignals/src/deepSignal.ts @@ -11,6 +11,16 @@ import { computed, signal, isSignal } from "./core"; /** A batched deep mutation (set/add/remove) from a deepSignal root. */ export type DeepPatch = { + /** Property path (array indices, object keys, synthetic Set entry ids) from the root to the mutated location. */ + path: (string | number)[]; +} & ( + | DeepSetAddPatch + | DeepSetRemovePatch + | DeepObjectAddPatch + | DeepRemovePatch + | DeepLiteralAddPatch +); +export type DeepPatchInternal = { /** Unique identifier for the deep signal root which produced this patch. */ root: symbol; /** Property path (array indices, object keys, synthetic Set entry ids) from the root to the mutated location. */ @@ -22,6 +32,7 @@ export type DeepPatch = { | DeepRemovePatch | DeepLiteralAddPatch ); + export interface DeepSetAddPatch { /** Mutation kind applied at the resolved `path`. */ op: "add"; @@ -105,7 +116,7 @@ function buildPath( return path; } -function queuePatch(patch: DeepPatch) { +function queuePatch(patch: DeepPatchInternal) { if (!pendingPatches) pendingPatches = new Map(); const root = patch.root; let list = pendingPatches.get(root); @@ -113,6 +124,9 @@ function queuePatch(patch: DeepPatch) { list = []; pendingPatches.set(root, list); } + // Remove root, we do not send that back. + // @ts-ignore + delete patch.root; list.push(patch); if (!microtaskScheduled) { microtaskScheduled = true; @@ -124,7 +138,7 @@ function queuePatch(patch: DeepPatch) { for (const [rootId, patches] of groups) { if (!patches.length) continue; const subs = mutationSubscribers.get(rootId); - if (subs) subs.forEach((cb) => cb(patches)); + if (subs) subs.forEach((callback) => callback(patches)); } }); } @@ -244,6 +258,8 @@ export function getDeepSignalRootId(obj: any): symbol | undefined { const proxyToSignals = new WeakMap(); // Raw object/array/Set -> stable proxy const objToProxy = new WeakMap(); +// Proxy -> raw object/array/Set (reverse lookup) +const proxyToRaw = new WeakMap(); // Raw array -> `$` meta proxy with index signals const arrayToArrayOfSignals = new WeakMap(); // Objects already proxied or marked shallow @@ -367,22 +383,25 @@ export function setSetEntrySyntheticId(obj: object, id: string | number) { } const getSetEntryKey = (val: any): string | number => { if (val && typeof val === "object") { + // If val is a proxy, get the raw object first + const rawVal = proxyToRaw.get(val) || val; + // First check for explicitly assigned synthetic ID - if (setObjectIds.has(val)) return setObjectIds.get(val)!; + if (setObjectIds.has(rawVal)) return setObjectIds.get(rawVal)!; // Then check for @id property (primary identifier) if ( - typeof (val as any)["@id"] === "string" || - typeof (val as any)["@id"] === "number" + typeof (rawVal as any)["@id"] === "string" || + typeof (rawVal as any)["@id"] === "number" ) - return (val as any)["@id"]; + return (rawVal as any)["@id"]; // Then check for id property (backward compatibility) if ( - typeof (val as any).id === "string" || - typeof (val as any).id === "number" + typeof (rawVal as any).id === "string" || + typeof (rawVal as any).id === "number" ) - return (val as any).id; + return (rawVal as any).id; // Fall back to generating a blank node ID - return assignBlankNodeId(val); + return assignBlankNodeId(rawVal); } return val as any; }; @@ -437,6 +456,7 @@ export const deepSignal = ( // Pre-register an empty signals map so isDeepSignal() is true before any property access. if (!proxyToSignals.has(proxy)) proxyToSignals.set(proxy, new Map()); objToProxy.set(obj, proxy); + proxyToRaw.set(proxy, obj); } return objToProxy.get(obj); }; @@ -509,6 +529,7 @@ function getFromSet( childMeta.parent = receiver; childMeta.key = synthetic; objToProxy.set(entry, childProxy); + proxyToRaw.set(childProxy, entry); return childProxy; } if (objToProxy.has(entry)) return objToProxy.get(entry); @@ -520,19 +541,27 @@ function getFromSet( if (key === "add" || key === "delete" || key === "clear") { const fn: Function = (raw as any)[key]; return function (this: any, ...args: any[]) { + // For delete, keep track of the original entry for patch emission + const originalEntry = key === "delete" ? args[0] : undefined; + + // For delete, if the argument is a proxy, get the raw object for the actual Set operation + if (key === "delete" && args[0] && typeof args[0] === "object") { + const rawArg = proxyToRaw.get(args[0]); + if (rawArg) { + args = [rawArg]; + } + } const sizeBefore = raw.size; const result = fn.apply(raw, args); if (raw.size !== sizeBefore) { const metaNow = proxyMeta.get(receiver); - if ( - metaNow && - metaNow.parent !== undefined && - metaNow.key !== undefined - ) { - const containerPath = buildPath( - metaNow.parent, - metaNow.key - ); + if (metaNow) { + // For root Set, containerPath is empty; for nested Set, build path from parent + const containerPath = + metaNow.parent !== undefined && + metaNow.key !== undefined + ? buildPath(metaNow.parent, metaNow.key) + : []; if (key === "add") { const entry = args[0]; @@ -587,6 +616,7 @@ function getFromSet( childMeta.parent = receiver; childMeta.key = synthetic; objToProxy.set(entryVal, childProxy); + proxyToRaw.set(childProxy, entryVal); entryVal = childProxy; } // Set entry add: emit object vs primitive variant. @@ -609,7 +639,8 @@ function getFromSet( }); } } else if (key === "delete") { - const entry = args[0]; + // Use the original entry (before proxy-to-raw conversion) for getting the synthetic key + const entry = originalEntry; const synthetic = getSetEntryKey(entry); // Check if entry is primitive or object if (entry && typeof entry === "object") { @@ -664,21 +695,24 @@ function getFromSet( const makeIterator = (pair: boolean) => { return function thisIter(this: any) { const iterable = raw.values(); - return { - [Symbol.iterator]() { + // Create an Iterator that inherits Iterator.prototype methods (map, filter, etc.) + // Wrap the iterator to proxy entries on-demand + const wrappedIterator = { + next() { + const n = iterable.next(); + if (n.done) return n; + const entry = ensureEntryProxy(n.value); return { - next() { - const n = iterable.next(); - if (n.done) return n; - const entry = ensureEntryProxy(n.value); - return { - value: pair ? [entry, entry] : entry, - done: false, - }; - }, + value: pair ? [entry, entry] : entry, + done: false, }; }, - } as Iterable; + }; + // Set the prototype to Iterator.prototype if available (ES2023+ Iterator Helpers) + if (typeof Iterator !== "undefined" && Iterator.prototype) { + Object.setPrototypeOf(wrappedIterator, Iterator.prototype); + } + return wrappedIterator; }; }; if (key === "values" || key === "keys") return makeIterator(false); @@ -813,6 +847,10 @@ const get = if (target instanceof Set) { return getFromSet(target as Set, fullKey as any, receiver); } + // Special case: accessing `$` on a non-array object returns the raw target + if (fullKey === "$" && !Array.isArray(target)) { + return target; + } const norm = normalizeKey(target, fullKey, isArrayMeta, receiver); if ((norm as any).shortCircuit) return (norm as any).shortCircuit; // returned meta proxy const { key, returnSignal } = norm as { @@ -839,9 +877,9 @@ const objectHandlers = { get: get(false), set(target: object, fullKey: string, val: any, receiver: object): boolean { // Prevent modification of @id property - if (fullKey === "@id") { - throw new Error("Cannot modify readonly property '@id'"); - } + // if (fullKey === "@id") { + // throw new Error("Cannot modify readonly property '@id'"); + // } // Respect original getter/setter semantics if (typeof descriptor(target, fullKey)?.set === "function") return Reflect.set(target, fullKey, val, receiver); diff --git a/sdk/js/alien-deepsignals/src/test/deepSignalOptions.test.ts b/sdk/js/alien-deepsignals/src/test/deepSignalOptions.test.ts index 5b3f7c76..2b44e723 100644 --- a/sdk/js/alien-deepsignals/src/test/deepSignalOptions.test.ts +++ b/sdk/js/alien-deepsignals/src/test/deepSignalOptions.test.ts @@ -336,5 +336,99 @@ describe("deepSignal options", () => { stop(); }); + + it("emits delete patch when removing objects with @id from Sets", async () => { + const options: DeepSignalOptions = { + addIdToObjects: true, + }; + + const state = deepSignal({ s: new Set() }, options); + const patches: DeepPatch[][] = []; + const { stopListening: stop } = watch(state, ({ patches: batch }) => + patches.push(batch) + ); + + // Add objects with @id + const obj1 = { "@id": "obj-1", value: 1 }; + const obj2 = { "@id": "obj-2", value: 2 }; + const obj3 = { "@id": "obj-3", value: 3 }; + + state.s.add(obj1); + state.s.add(obj2); + state.s.add(obj3); + await Promise.resolve(); + + // Get the proxied objects from the Set + const proxiedObjs = Array.from(state.s); + const proxiedObj2 = proxiedObjs.find( + (o: any) => o["@id"] === "obj-2" + ); + + // Clear patches from additions + patches.length = 0; + + // Delete one object using the proxied object + state.s.delete(proxiedObj2); + await Promise.resolve(); + + // Check that delete patch was emitted with correct path + const deletePaths = patches + .flat() + .filter((p) => p.op === "remove") + .map((p) => p.path.join(".")); + + expect(deletePaths).toContain("s.obj-2"); + expect(deletePaths).not.toContain("s.obj-1"); + expect(deletePaths).not.toContain("s.obj-3"); + + stop(); + }); + + it("emits delete patches when removing objects without explicit @id from Sets", async () => { + const options: DeepSignalOptions = { + idGenerator: () => + `gen-${Math.random().toString(36).substr(2, 9)}`, + addIdToObjects: true, + }; + + const state = deepSignal({ s: new Set() }, options); + + // Add objects without @id - they should get generated IDs + const obj1 = { value: 1 }; + const obj2 = { value: 2 }; + + state.s.add(obj1); + state.s.add(obj2); + + // Get the proxied objects and their generated IDs + const proxiedObjs = Array.from(state.s); + const proxiedObj1 = proxiedObjs[0]; + const proxiedObj2 = proxiedObjs[1]; + const id1 = (proxiedObj1 as any)["@id"]; + const id2 = (proxiedObj2 as any)["@id"]; + + expect(id1).toBeDefined(); + expect(id2).toBeDefined(); + + const patches: DeepPatch[][] = []; + const { stopListening: stop } = watch(state, ({ patches: batch }) => + patches.push(batch) + ); + + // Delete one object using the proxied object + state.s.delete(proxiedObj1); + await Promise.resolve(); + + // Check that delete patch was emitted with the generated ID + const deletePaths = patches + .flat() + .filter((p) => p.op === "remove") + .map((p) => p.path.join(".")); + + expect(deletePaths).toContain(`s.${id1}`); + expect(deletePaths).not.toContain(`s.${id2}`); + + stop(); + }); }); }); diff --git a/sdk/js/alien-deepsignals/src/test/watchPatches.test.ts b/sdk/js/alien-deepsignals/src/test/watchPatches.test.ts index 26583406..28a425b9 100644 --- a/sdk/js/alien-deepsignals/src/test/watchPatches.test.ts +++ b/sdk/js/alien-deepsignals/src/test/watchPatches.test.ts @@ -493,6 +493,84 @@ describe("watch (patch mode)", () => { expect(flat.some((p) => p.endsWith("eIter.inner.v"))).toBe(true); stop(); }); + + it("generates correct patches when root is a Set (primitive entries)", async () => { + const rootSet = deepSignal(new Set()); + const batches: DeepPatch[][] = []; + const { stopListening: stop } = watch(rootSet, ({ patches }) => + batches.push(patches) + ); + rootSet.add(1); + rootSet.add("test"); + rootSet.add(true); + await Promise.resolve(); + + expect(batches.length).toBe(1); + const patches = batches[0]; + expect(patches.length).toBe(3); + + // When root is a Set, path should be empty array for primitive adds + patches.forEach((p) => { + expect(p.path).toEqual([]); + expect(p.op).toBe("add"); + expect((p as any).type).toBe("set"); + }); + + const values = patches.map((p: any) => p.value[0]); + expect(values).toContain(1); + expect(values).toContain("test"); + expect(values).toContain(true); + stop(); + }); + + it("generates correct patches when root is a Set (object entries)", async () => { + const rootSet = deepSignal(new Set()); + const batches: DeepPatch[][] = []; + const { stopListening: stop } = watch(rootSet, ({ patches }) => + batches.push(patches) + ); + + const obj1 = { "@id": "obj1", value: 1 }; + const obj2 = { "@id": "obj2", value: 2 }; + rootSet.add(obj1); + rootSet.add(obj2); + await Promise.resolve(); + + const flat = batches.flat().map((p) => p.path.join(".")); + + // When root is a Set, first element of path should be synthetic id + expect(flat).toContain("obj1"); + expect(flat).toContain("obj1.@id"); + expect(flat).toContain("obj1.value"); + expect(flat).toContain("obj2"); + expect(flat).toContain("obj2.@id"); + expect(flat).toContain("obj2.value"); + stop(); + }); + + it("tracks nested mutations when root is a Set", async () => { + const rootSet = deepSignal(new Set()); + const obj = { id: "nested", data: { x: 1 } }; + rootSet.add(obj); + + const batches: DeepPatch[][] = []; + const { stopListening: stop } = watch(rootSet, ({ patches }) => + batches.push(patches) + ); + + // Get the proxied entry + let proxied: any; + for (const e of rootSet.values()) { + proxied = e; + } + + proxied.data.x = 2; + await Promise.resolve(); + + const flat = batches.flat().map((p) => p.path.join(".")); + expect(flat.some((p) => p === "nested.data.x")).toBe(true); + stop(); + }); }); describe("Arrays & mixed batch", () => { diff --git a/sdk/js/api-web/worker.js b/sdk/js/api-web/worker.js index 7839c95e..dec576c8 100644 --- a/sdk/js/api-web/worker.js +++ b/sdk/js/api-web/worker.js @@ -24,6 +24,11 @@ onmessage = (e) => { e.data.pazzle, e.data.pin_code ); + } else if (e.data.password) { + secret_wallet = await ng.wallet_open_with_password( + e.data.wallet, + e.data.password + ); } else if (e.data.mnemonic_words) { secret_wallet = await ng.wallet_open_with_mnemonic_words( e.data.wallet, diff --git a/sdk/js/examples/multi-framework-signals/src/app/pages/index.astro b/sdk/js/examples/multi-framework-signals/src/app/pages/index.astro index 71993e98..ad75816a 100644 --- a/sdk/js/examples/multi-framework-signals/src/app/pages/index.astro +++ b/sdk/js/examples/multi-framework-signals/src/app/pages/index.astro @@ -5,7 +5,6 @@ import Highlight from "../components/Highlight.astro"; import VueRoot from "../components/VueRoot.vue"; import ReactRoot from "../components/ReactRoot"; import SvelteRoot from "../components/SvelteRoot.svelte"; -import { initNg } from "@ng-org/signals" const title = "Multi-framework app"; --- @@ -28,10 +27,14 @@ const title = "Multi-framework app"; let info = await ng.client_info(); console.log(info.V0.details); initNg(ng, event.session); + + window.ng = ng; + window.session = event.session; }, true, [] ); + @@ -45,4 +48,5 @@ const title = "Multi-framework app"; + diff --git a/sdk/js/examples/multi-framework-signals/src/frontends/react/HelloWorld.tsx b/sdk/js/examples/multi-framework-signals/src/frontends/react/HelloWorld.tsx index 905cbf9d..b562a7c1 100644 --- a/sdk/js/examples/multi-framework-signals/src/frontends/react/HelloWorld.tsx +++ b/sdk/js/examples/multi-framework-signals/src/frontends/react/HelloWorld.tsx @@ -1,196 +1,293 @@ -import React from "react"; +import React, { useEffect, useState } from "react"; import { useShape } from "@ng-org/signals/react"; import flattenObject from "../utils/flattenObject"; import { TestObjectShapeType } from "../../shapes/orm/testShape.shapeTypes"; +import { BasicShapeType } from "../../shapes/orm/basic.shapeTypes"; +import type { ShapeType } from "@ng-org/shex-orm"; +import type { Basic } from "../../shapes/orm/basic.typings"; +import { deepSignal, watch } from "@ng-org/alien-deepsignals"; + +const sparqlExampleData = ` +PREFIX ex: +INSERT DATA { + a ex:TestObject ; + ex:stringValue "hello world" ; + ex:numValue 42 ; + ex:boolValue true ; + ex:arrayValue 1,2,3 ; + ex:objectValue ; + ex:anotherObject , ; + ex:numOrStr "either" ; + ex:lit1Or2 "lit1" ; + ex:unrelated "some value" ; + ex:anotherUnrelated 4242 . + + + ex:nestedString "nested" ; + ex:nestedNum 7 ; + ex:nestedArray 5,6 . + + + ex:prop1 "one" ; + ex:prop2 1 . + + + ex:prop1 "two" ; + ex:prop2 2 . + + a ex:TestObject ; + ex:stringValue "hello world #2" ; + ex:numValue 422 ; + ex:boolValue false ; + ex:arrayValue 4,5,6 ; + ex:objectValue ; + ex:anotherObject , ; + ex:numOrStr 4 ; + ex:lit1Or2 "lit2" ; + ex:unrelated "some value2" ; + ex:anotherUnrelated 42422 . + + + ex:nestedString "nested2" ; + ex:nestedNum 72 ; + ex:nestedArray 7,8,9 . + + + ex:prop1 "one2" ; + ex:prop2 12 . + + + ex:prop1 "two2" ; + ex:prop2 22 . + + + + a ; + ex:basicString "string of object 1" . + + a ; + ex:basicString "string of object 2" . + +} + `; export function HelloWorldReact() { - const state = useShape(TestObjectShapeType)?.entries().next(); + const state = useShape(TestObjectShapeType); + const objects = [...(state || [])]; // @ts-expect-error window.reactState = state; - if (!state) return <>Loading state; - // Create a table from the state object: One column for keys, one for values, one with an input to change the value. return (

Rendered in React

- + - - - - - - - - - - {(() => { - const setNestedValue = ( - obj: any, - path: string, - value: any - ) => { - const keys = path.split("."); - let current = obj; - - for (let i = 0; i < keys.length - 1; i++) { - current = current[keys[i]]; - } - - current[keys[keys.length - 1]] = value; - }; - - const getNestedValue = (obj: any, path: string) => { - return path - .split(".") - .reduce((current, key) => current[key], obj); - }; - - return flattenObject(state).map(([key, value]) => ( - - - - - - )); - })()} - -
KeyValueEdit
{key} - {value instanceof Set - ? Array.from(value).join(", ") - : Array.isArray(value) - ? `[${value.join(", ")}]` - : JSON.stringify(value)} - - {typeof value === "string" ? ( - { - setNestedValue( - state, - key, - e.target.value - ); - }} - /> - ) : typeof value === "number" ? ( - { - setNestedValue( - state, - key, - Number(e.target.value) - ); - }} - /> - ) : typeof value === "boolean" ? ( - { - setNestedValue( - state, - key, - e.target.checked - ); - }} - /> - ) : Array.isArray(value) ? ( -
- - -
- ) : value instanceof Set ? ( -
- - -
- ) : ( - "N/A" - )} -
+ {!state ? ( +
Loading...
+ ) : ( +
+ {objects.map((ormObj) => ( + + + + + + + + + + {(() => { + const setNestedValue = ( + targetObj: any, + lastKey: string, + value: any + ) => { + // targetObj is the direct parent object containing the property + // lastKey is the property name to set + targetObj[lastKey] = value; + }; + + return flattenObject(ormObj).map( + ([key, value, lastKey, parentObj]) => ( + + + + + + ) + ); + })()} + +
KeyValueEdit
{key} + {value instanceof Set + ? Array.from( + value + ).join(", ") + : Array.isArray(value) + ? `[${value.join(", ")}]` + : JSON.stringify( + value + )} + + {typeof value === + "string" ? ( + { + setNestedValue( + parentObj, + lastKey, + e.target + .value + ); + }} + /> + ) : typeof value === + "number" ? ( + { + setNestedValue( + parentObj, + lastKey, + Number( + e.target + .value + ) + ); + }} + /> + ) : typeof value === + "boolean" ? ( + { + setNestedValue( + parentObj, + lastKey, + e.target + .checked + ); + }} + /> + ) : Array.isArray(value) ? ( +
+ + +
+ ) : value instanceof Set ? ( +
+ + +
+ ) : ( + "N/A" + )} +
+ ))} +
+ )}
); } diff --git a/sdk/js/examples/multi-framework-signals/src/frontends/svelte/HelloWorld.svelte b/sdk/js/examples/multi-framework-signals/src/frontends/svelte/HelloWorld.svelte index 24a9c876..445bec51 100644 --- a/sdk/js/examples/multi-framework-signals/src/frontends/svelte/HelloWorld.svelte +++ b/sdk/js/examples/multi-framework-signals/src/frontends/svelte/HelloWorld.svelte @@ -1,123 +1,135 @@ -{#if $shapeObject} +{#if $shapeObjects}

Rendered in Svelte

- - - - - - - - - - {#each flatEntries as [key, value] (key)} + + {#each flattenedObjects as { entries: flatEntries, rootObj }} +
KeyValueEdit
+ - - - + + + - {/each} - -
{key} - {#if value instanceof Set} - {Array.from(value).join(", ")} - {:else if Array.isArray(value)} - [{value.join(", ")}] - {:else} - {JSON.stringify(value)} - {/if} - - {#if typeof value === "string"} - - setNestedValue($shapeObject, key, e.target.value)} - /> - {:else if typeof value === "number"} - - setNestedValue($shapeObject, key, Number(e.target.value))} - /> - {:else if typeof value === "boolean"} - - setNestedValue($shapeObject, key, e.target.checked)} - /> - {:else if Array.isArray(value)} -
- - -
- {:else if value instanceof Set} -
- - -
- {:else} - N/A - {/if} -
KeyValueEdit
+ + + {#each flatEntries as [key, value, lastKey, parentObj]} + + {key} + + {#if value instanceof Set} + {Array.from(value).join(", ")} + {:else if Array.isArray(value)} + [{value.join(", ")}] + {:else} + {JSON.stringify(value)} + {/if} + + + {#if typeof value === "string"} + + setNestedValue(parentObj, lastKey, e.target.value)} + /> + {:else if typeof value === "number"} + + setNestedValue( + parentObj, + lastKey, + Number(e.target.value) + )} + /> + {:else if typeof value === "boolean"} + + setNestedValue(parentObj, lastKey, e.target.checked)} + /> + {:else if Array.isArray(value)} +
+ + +
+ {:else if value instanceof Set} +
+ + +
+ {:else} + N/A + {/if} + + + {/each} + + + {/each}
{:else}

Loading state

diff --git a/sdk/js/examples/multi-framework-signals/src/frontends/utils/flattenObject.ts b/sdk/js/examples/multi-framework-signals/src/frontends/utils/flattenObject.ts index 1b55bf3a..bed55f73 100644 --- a/sdk/js/examples/multi-framework-signals/src/frontends/utils/flattenObject.ts +++ b/sdk/js/examples/multi-framework-signals/src/frontends/utils/flattenObject.ts @@ -1,43 +1,75 @@ interface FlattenOptions { - /** Maximum depth to traverse (default: 8). */ - maxDepth?: number; - /** Skip keys that start with a dollar sign (deepSignal meta). Default: true */ - skipDollarKeys?: boolean; + /** Maximum depth to traverse (default: 8). */ + maxDepth?: number; + /** Skip keys that start with a dollar sign (deepSignal meta). Default: true */ + skipDollarKeys?: boolean; } const isPlainObject = (v: any) => - Object.prototype.toString.call(v) === "[object Object]"; + Object.prototype.toString.call(v) === "[object Object]"; const flattenObject = ( - obj: any, - prefix = "", - options: FlattenOptions = {}, - seen = new Set(), - depth = 0 + obj: any, + prefix = "", + options: FlattenOptions = {}, + seen = new Set(), + depth = 0 ): Array<[string, any, string, any]> => { - const { maxDepth = 8, skipDollarKeys = true } = options; - const result: Array<[string, any, string, any]> = []; - if (!obj || typeof obj !== "object") return result; - if (seen.has(obj)) return result; // cycle detected - seen.add(obj); - if (depth > maxDepth) return result; + const { maxDepth = 8, skipDollarKeys = true } = options; + const result: Array<[string, any, string, any]> = []; + if (!obj || typeof obj !== "object") return result; + if (seen.has(obj)) return result; // cycle detected + seen.add(obj); + if (depth > maxDepth) return result; - for (const [key, value] of Object.entries(obj)) { - if (skipDollarKeys && key.startsWith("$")) continue; - const fullKey = prefix ? `${prefix}.${key}` : key; - if ( - value && - typeof value === "object" && - !Array.isArray(value) && - !(value instanceof Set) && - isPlainObject(value) - ) { - result.push(...flattenObject(value, fullKey, options, seen, depth + 1)); - } else { - result.push([fullKey, value, key, obj]); + for (const [key, value] of Object.entries(obj)) { + if (skipDollarKeys && key.startsWith("$")) continue; + const fullKey = prefix ? `${prefix}.${key}` : key; + + // Handle Sets containing objects with @id + if (value instanceof Set) { + const setItems = Array.from(value); + // Check if Set contains objects with @id + if ( + setItems.length > 0 && + setItems.some( + (item) => item && typeof item === "object" && "@id" in item + ) + ) { + // Flatten each object in the Set + setItems.forEach((item) => { + if (item && typeof item === "object" && "@id" in item) { + const itemId = item["@id"]; + const itemPrefix = `${fullKey}[@id=${itemId}]`; + result.push( + ...flattenObject( + item, + itemPrefix, + options, + seen, + depth + 1 + ) + ); + } + }); + } else { + // Set doesn't contain objects with @id, treat as leaf + result.push([fullKey, value, key, obj]); + } + } else if ( + value && + typeof value === "object" && + !Array.isArray(value) && + isPlainObject(value) + ) { + result.push( + ...flattenObject(value, fullKey, options, seen, depth + 1) + ); + } else { + result.push([fullKey, value, key, obj]); + } } - } - return result; + return result; }; export default flattenObject; diff --git a/sdk/js/examples/multi-framework-signals/src/frontends/vue/HelloWorld.vue b/sdk/js/examples/multi-framework-signals/src/frontends/vue/HelloWorld.vue index 3f367371..86a2196e 100644 --- a/sdk/js/examples/multi-framework-signals/src/frontends/vue/HelloWorld.vue +++ b/sdk/js/examples/multi-framework-signals/src/frontends/vue/HelloWorld.vue @@ -3,217 +3,217 @@ import { computed } from "vue"; import { useShape } from "@ng-org/signals/vue"; import flattenObject from "../utils/flattenObject"; import { TestObjectShapeType } from "../../shapes/orm/testShape.shapeTypes"; +import { BasicShapeType } from "../../shapes/orm/basic.shapeTypes"; // Acquire deep signal object (proxy) for a shape; scope second arg left empty string for parity -const shapeObj = useShape(TestObjectShapeType); +const shapeObjects = useShape(TestObjectShapeType); // Expose for devtools exploration // @ts-ignore -window.vueState = shapeObj; - -const flatEntries = computed(() => flattenObject(shapeObj)); +window.vueState = shapeObjects; +console.log("vue loaded")