@Nonnull @Override public Collection<DataTreeCandidateNode> getChildNodes() { if (data instanceof NormalizedNodeContainer) { return Collections2.transform(((NormalizedNodeContainer<?, ?, ?>) data).getValue(), input -> input == null ? null : new NormalizedNodeDataTreeCandidateNode(input)); } return ImmutableList.of(); }
"No old or new data, modification type should be NONE and deltaChildren() mustn't be called."); if (newData == null) { return Collections2.transform(oldData.getValue(), AbstractRecursiveCandidateNode::deleteNode); return Collections2.transform(newData.getValue(), AbstractRecursiveCandidateNode::writeNode); for (NormalizedNode<?, ?> child : newData.getValue()) { final DataTreeCandidateNode node; final Optional<NormalizedNode<?, ?>> maybeOldChild = oldData.getChild(child.getIdentifier()); for (NormalizedNode<?, ?> child : oldData.getValue()) { if (!newData.getChild(child.getIdentifier()).isPresent()) { result.add(AbstractRecursiveCandidateNode.deleteNode(child));
private void navigateNormalizedNodeContainerMixin(int level, final String parentPath, NormalizedNodeContainer<?, ?, ?> node) { visitor.visitNode(level, parentPath, node); String newParentPath = parentPath + "/" + node.getIdentifier().toString(); final Iterable<? extends NormalizedNode<?, ?>> value = node.getValue(); for(NormalizedNode<?, ?> normalizedNode : value){ if(normalizedNode instanceof MixinNode && normalizedNode instanceof NormalizedNodeContainer){ navigateNormalizedNodeContainerMixin(level + 1, newParentPath, (NormalizedNodeContainer) normalizedNode); } else { navigateNormalizedNode(level, newParentPath, normalizedNode); } } }
private static Optional<NormalizedNode<?, ?>> getChild( final NormalizedNodeContainer<?, PathArgument, NormalizedNode<?, ?>> container, final PathArgument identifier) { return container == null ? Optional.empty() : container.getChild(identifier); }
@Override @SuppressWarnings("rawtypes") public final void writeToCurrent(final NormalizedNodeContainer<?, ?, ?> data) { // write the entire thing into the cursor write(data.getIdentifier(), data); // write the children with subshard check and subshard write if we are going into subshard cursor.enter(data.getIdentifier()); for (NormalizedNode<?, ?> writtenChild : data.getValue()) { write(writtenChild.getIdentifier(), writtenChild); } // Delete step - remove subshard data that was written into current shard // delete from current node.getChildrenWithSubshards().entrySet() .stream().filter(entry -> entry.getValue() instanceof WriteableSubshardBoundaryNode).forEach(entry -> { @SuppressWarnings("unchecked") Optional<NormalizedNode<?, ?>> writtenValue = ((NormalizedNodeContainer) data).getChild(entry.getKey()); if (writtenValue.isPresent()) { // delete from current cursor.delete(entry.getKey()); } }); cursor.exit(); } }
@Override @Nonnull public final PathArgument getIdentifier() { return data.getIdentifier(); }
private static Optional<NormalizedNode<?, ?>> getChild( final NormalizedNodeContainer<?, PathArgument, NormalizedNode<?, ?>> container, final PathArgument identifier) { return container == null ? Optional.empty() : container.getChild(identifier); }
@Override @Nonnull public final PathArgument getIdentifier() { return data.getIdentifier(); }
private static int numOfChildrenFromValue(final NormalizedNode<?, ?> value) { if (value instanceof NormalizedNodeContainer) { return ((NormalizedNodeContainer<?, ?, ?>) value).getValue().size(); } else if (value instanceof UnkeyedListNode) { return ((UnkeyedListNode) value).getSize(); } throw new IllegalArgumentException(String.format( "Unexpected type '%s', expected types are NormalizedNodeContainer and UnkeyedListNode", value.getClass())); } }
"No old or new data, modification type should be NONE and deltaChildren() mustn't be called."); if (newData == null) { return Collections2.transform(oldData.getValue(), AbstractRecursiveCandidateNode::deleteNode); return Collections2.transform(newData.getValue(), AbstractRecursiveCandidateNode::writeNode); for (NormalizedNode<?, ?> child : newData.getValue()) { final DataTreeCandidateNode node; final Optional<NormalizedNode<?, ?>> maybeOldChild = oldData.getChild(child.getIdentifier()); for (NormalizedNode<?, ?> child : oldData.getValue()) { if (!newData.getChild(child.getIdentifier()).isPresent()) { result.add(AbstractRecursiveCandidateNode.deleteNode(child));
private void navigateNormalizedNodeContainerMixin(int level, final String parentPath, NormalizedNodeContainer<?, ?, ?> node) { visitor.visitNode(level, parentPath, node); String newParentPath = parentPath + "/" + node.getIdentifier().toString(); final Iterable<? extends NormalizedNode<?, ?>> value = node.getValue(); for (NormalizedNode<?, ?> normalizedNode : value) { if (normalizedNode instanceof MixinNode && normalizedNode instanceof NormalizedNodeContainer) { navigateNormalizedNodeContainerMixin(level + 1, newParentPath, (NormalizedNodeContainer<?, ?, ?>) normalizedNode); } else { navigateNormalizedNode(level, newParentPath, normalizedNode); } } }
static TreeNode getChildFromData(final NormalizedNodeContainer<?, PathArgument, NormalizedNode<?, ?>> data, final PathArgument childId, final Version version) { final Optional<NormalizedNode<?, ?>> child = data.getChild(childId); return child.isPresent() ? TreeNodeFactory.createTreeNode(child.get(), version) : null; } }
@Nonnull @Override public final Collection<DataTreeCandidateNode> getChildNodes() { return Collections2.transform(getData().getValue(), this::createChild); }
@SuppressWarnings("unchecked") public Map<Class<? extends Augmentation<?>>, Augmentation<?>> getAllAugmentationsFrom( final NormalizedNodeContainer<?, PathArgument, NormalizedNode<?, ?>> data) { @SuppressWarnings("rawtypes") final Map map = new HashMap<>(); for (final NormalizedNode<?, ?> childValue : data.getValue()) { if (childValue instanceof AugmentationNode) { final AugmentationNode augDomNode = (AugmentationNode) childValue; final DataContainerCodecPrototype<?> codecProto = yangAugmentationChild(augDomNode.getIdentifier()); if (codecProto != null) { final DataContainerCodecContext<?, ?> codec = codecProto.get(); map.put(codec.getBindingClass(), codec.deserializeObject(augDomNode)); } } } for (final DataContainerCodecPrototype<?> value : byStreamAugmented.values()) { final Optional<NormalizedNode<?, ?>> augData = data.getChild(value.getYangArg()); if (augData.isPresent()) { map.put(value.getBindingClass(), value.get().deserializeObject(augData.get())); } } return map; }
@Override protected void recursivelyVerifyStructure(final NormalizedNode<?, ?> value) { final NormalizedNodeContainer<?, ?, ?> container = (NormalizedNodeContainer<?, ?, ?>) value; for (final Object child : container.getValue()) { checkArgument(child instanceof NormalizedNode); final NormalizedNode<?, ?> castedChild = (NormalizedNode<?, ?>) child; final Optional<ModificationApplyOperation> childOp = getChild(castedChild.getIdentifier()); if (childOp.isPresent()) { childOp.get().recursivelyVerifyStructure(castedChild); } else { throw new SchemaValidationFailedException( String.format("Node %s is not a valid child of %s according to the schema.", castedChild.getIdentifier(), container.getIdentifier())); } } }
@Override public DataTreeCandidateNode getModifiedChild(final PathArgument childIdentifier) { if (data instanceof NormalizedNodeContainer) { @SuppressWarnings({ "rawtypes", "unchecked" }) final Optional<? extends NormalizedNode<?, ?>> child = ((NormalizedNodeContainer)data).getChild(childIdentifier); return child.map(input -> new NormalizedNodeDataTreeCandidateNode(input)).orElse(null); } return null; }
@Nonnull @Override public Collection<DataTreeCandidateNode> getChildNodes() { if (data instanceof NormalizedNodeContainer) { return Collections2.transform(((NormalizedNodeContainer<?, ?, ?>) data).getValue(), input -> input == null ? null : new NormalizedNodeDataTreeCandidateNode(input)); } return ImmutableList.of(); }
@SuppressWarnings("unchecked") Map<Class<? extends Augmentation<?>>, Augmentation<?>> getAllAugmentationsFrom( final NormalizedNodeContainer<?, PathArgument, NormalizedNode<?, ?>> data) { @SuppressWarnings("rawtypes") final Map map = new HashMap<>(); for (final NormalizedNode<?, ?> childValue : data.getValue()) { if (childValue instanceof AugmentationNode) { final AugmentationNode augDomNode = (AugmentationNode) childValue; final DataContainerCodecPrototype<?> codecProto = yangAugmentationChild(augDomNode.getIdentifier()); if (codecProto != null) { final DataContainerCodecContext<?, ?> codec = codecProto.get(); map.put(codec.getBindingClass(), codec.deserializeObject(augDomNode)); } } } for (final DataContainerCodecPrototype<?> value : augmentations.byStream.values()) { final Optional<NormalizedNode<?, ?>> augData = data.getChild(value.getYangArg()); if (augData.isPresent()) { map.put(value.getBindingClass(), value.get().deserializeObject(augData.get())); } } return map; }
@Override void verifyStructure(final NormalizedNode<?, ?> writtenValue, final boolean verifyChildren) { checkArgument(nodeClass.isInstance(writtenValue), "Node %s is not of type %s", writtenValue, nodeClass); checkArgument(writtenValue instanceof NormalizedNodeContainer); if (verifyChildrenStructure && verifyChildren) { final NormalizedNodeContainer<?, ?, ?> container = (NormalizedNodeContainer<?, ?, ?>) writtenValue; for (final Object child : container.getValue()) { checkArgument(child instanceof NormalizedNode); final NormalizedNode<?, ?> castedChild = (NormalizedNode<?, ?>) child; final Optional<ModificationApplyOperation> childOp = getChild(castedChild.getIdentifier()); if (childOp.isPresent()) { childOp.get().verifyStructure(castedChild, verifyChildren); } else { throw new SchemaValidationFailedException(String.format( "Node %s is not a valid child of %s according to the schema.", castedChild.getIdentifier(), container.getIdentifier())); } } } }
static TreeNode getChildFromData(final NormalizedNodeContainer<?, PathArgument, NormalizedNode<?, ?>> data, final PathArgument childId, final Version version) { final Optional<NormalizedNode<?, ?>> child = data.getChild(childId); return child.isPresent() ? TreeNodeFactory.createTreeNode(child.get(), version) : null; } }