@Override public final void writeEndArray() throws IOException { if (!_writeContext.inArray()) { _reportError("Current context not Array but "+_writeContext.typeDesc()); } // just to make sure we don't "leak" type ids _typeId = null; _writeContext = _writeContext.getParent(); _emitter.emit(new SequenceEndEvent(null, null)); }
@Override public void endSequence(SequenceEndEvent evt, YsldValidateContext context) { if (state != State.STARTED) { context.error("Unexpected End of Sequence", evt.getStartMark()); } state = State.DONE; context.pop(); }
@Override public final void writeEndArray() throws IOException { if (!_writeContext.inArray()) { _reportError("Current context not Array but "+_writeContext.typeDesc()); } // just to make sure we don't "leak" type ids _typeId = null; _writeContext = _writeContext.getParent(); _emitter.emit(new SequenceEndEvent(null, null)); }
@Override public void endSequence(SequenceEndEvent evt, YsldValidateContext context) { if (state != State.STARTED) { context.error("Unexpected End of Sequence", evt.getStartMark()); } else if (valuesValidated != getSubValidators().size()) { context.error( String.format( "Expected tuple of size %d but was %d", getSubValidators().size(), valuesValidated), evt.getStartMark()); } state = State.DONE; context.pop(); }
public Event produce() { if (scanner.checkToken(Token.ID.BlockEntry)) { Token token = scanner.getToken(); if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) { states.push(new ParseIndentlessSequenceEntry()); return new ParseBlockNode().produce(); } else { state = new ParseIndentlessSequenceEntry(); return processEmptyScalar(token.getEndMark()); } } Token token = scanner.peekToken(); Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); return event; } }
@Override public void endSequence(SequenceEndEvent evt, YsldValidateContext context) { if (state != State.STARTED) { context.error("Unexpected End of Sequence", evt.getStartMark()); } state = State.DONE; context.pop(); }
public SldTransformContext endSequence() throws IOException { yaml.emit(new SequenceEndEvent(null, null)); return this; }
@Override public void endSequence(SequenceEndEvent evt, YsldValidateContext context) { if (state != State.STARTED) { context.error("Unexpected End of Sequence", evt.getStartMark()); } else if (valuesValidated != getSubValidators().size()) { context.error( String.format( "Expected tuple of size %d but was %d", getSubValidators().size(), valuesValidated), evt.getStartMark()); } state = State.DONE; context.pop(); }
public Event produce() { if (scanner.checkToken(Token.ID.BlockEntry)) { BlockEntryToken token = (BlockEntryToken) scanner.getToken(); if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.BlockEnd)) { states.push(new ParseBlockSequenceEntry()); return new ParseBlockNode().produce(); } else { state = new ParseBlockSequenceEntry(); return processEmptyScalar(token.getEndMark()); } } if (!scanner.checkToken(Token.ID.BlockEnd)) { Token token = scanner.peekToken(); throw new ParserException("while parsing a block collection", marks.pop(), "expected <block end>, but found '" + token.getTokenId() + "'", token.getStartMark()); } Token token = scanner.getToken(); Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); marks.pop(); return event; } }
public Event produce() { if (!scanner.checkToken(Token.ID.FlowSequenceEnd)) { if (!first) { if (scanner.checkToken(Token.ID.FlowEntry)) { scanner.getToken(); } else { Token token = scanner.peekToken(); throw new ParserException("while parsing a flow sequence", marks.pop(), "expected ',' or ']', but got " + token.getTokenId(), token.getStartMark()); } } if (scanner.checkToken(Token.ID.Key)) { Token token = scanner.peekToken(); Event event = new MappingStartEvent(null, null, true, token.getStartMark(), token.getEndMark(), DumperOptions.FlowStyle.FLOW); state = new ParseFlowSequenceEntryMappingKey(); return event; } else if (!scanner.checkToken(Token.ID.FlowSequenceEnd)) { states.push(new ParseFlowSequenceEntry(false)); return parseFlowNode(); } } Token token = scanner.getToken(); Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); marks.pop(); return event; } }
serializeNode(item, node); this.emitter.emit(new SequenceEndEvent(null, null)); break; default:// instance of MappingNode
public SldTransformContext endSequence() throws IOException { yaml.emit(new SequenceEndEvent(null, null)); return this; }
@JRubyMethod public IRubyObject end_sequence(ThreadContext context) { SequenceEndEvent event = new SequenceEndEvent(NULL_MARK, NULL_MARK); emit(context, event); return this; }
@JRubyMethod public IRubyObject end_sequence(ThreadContext context) { SequenceEndEvent event = new SequenceEndEvent(NULL_MARK, NULL_MARK); emit(context, event); return this; }
@Override public final void writeEndArray() throws IOException { if (!_outputContext.inArray()) { _reportError("Current context not Array but "+_outputContext.typeDesc()); } // just to make sure we don't "leak" type ids _typeId = null; _outputContext = _outputContext.getParent(); _emitter.emit(new SequenceEndEvent(null, null)); }
@Override public final void writeEndArray() throws IOException { if (!_writeContext.inArray()) { _reportError("Current context not Array but "+_writeContext.typeDesc()); } // just to make sure we don't "leak" type ids _typeId = null; _writeContext = _writeContext.getParent(); _emitter.emit(new SequenceEndEvent(null, null)); }
public Event produce() { if (scanner.checkToken(Token.ID.BlockEntry)) { Token token = scanner.getToken(); if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) { states.push(new ParseIndentlessSequenceEntry()); return new ParseBlockNode().produce(); } else { state = new ParseIndentlessSequenceEntry(); return processEmptyScalar(token.getEndMark()); } } Token token = scanner.peekToken(); Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); return event; } }
public Event produce() { if (scanner.checkToken(Token.ID.BlockEntry)) { Token token = scanner.getToken(); if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) { states.push(new ParseIndentlessSequenceEntry()); return new ParseBlockNode().produce(); } else { state = new ParseIndentlessSequenceEntry(); return processEmptyScalar(token.getEndMark()); } } Token token = scanner.peekToken(); Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); return event; } }
public Event produce() { if (scanner.checkToken(Token.ID.BlockEntry)) { Token token = scanner.getToken(); if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) { states.push(new ParseIndentlessSequenceEntry()); return new ParseBlockNode().produce(); } else { state = new ParseIndentlessSequenceEntry(); return processEmptyScalar(token.getEndMark()); } } Token token = scanner.peekToken(); Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); return event; } }
public Event produce() { if (scanner.checkToken(Token.ID.BlockEntry)) { BlockEntryToken token = (BlockEntryToken) scanner.getToken(); if (!scanner.checkToken(Token.ID.BlockEntry, Token.ID.BlockEnd)) { states.push(new ParseBlockSequenceEntry()); return new ParseBlockNode().produce(); } else { state = new ParseBlockSequenceEntry(); return processEmptyScalar(token.getEndMark()); } } if (!scanner.checkToken(Token.ID.BlockEnd)) { Token token = scanner.peekToken(); throw new ParserException("while parsing a block collection", marks.pop(), "expected <block end>, but found " + token.getTokenId(), token.getStartMark()); } Token token = scanner.getToken(); Event event = new SequenceEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); marks.pop(); return event; } }