public Event produce() { state = new ParseFlowSequenceEntry(false); Token token = scanner.peekToken(); return new MappingEndEvent(token.getStartMark(), token.getEndMark()); } }
@Override public final void writeEndObject() throws IOException { if (!_writeContext.inObject()) { _reportError("Current context not Object but "+_writeContext.typeDesc()); } // just to make sure we don't "leak" type ids _typeId = null; _writeContext = _writeContext.getParent(); _emitter.emit(new MappingEndEvent(null, null)); }
@Override public final void writeEndObject() throws IOException { if (!_writeContext.inObject()) { _reportError("Current context not Object but "+_writeContext.typeDesc()); } // just to make sure we don't "leak" type ids _typeId = null; _writeContext = _writeContext.getParent(); _emitter.emit(new MappingEndEvent(null, null)); }
public Event produce() { if (scanner.checkToken(Token.ID.Key)) { Token token = scanner.getToken(); if (!scanner.checkToken(Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) { states.push(new ParseBlockMappingValue()); return parseBlockNodeOrIndentlessSequence(); } else { state = new ParseBlockMappingValue(); return processEmptyScalar(token.getEndMark()); } } if (!scanner.checkToken(Token.ID.BlockEnd)) { Token token = scanner.peekToken(); throw new ParserException("while parsing a block mapping", marks.pop(), "expected <block end>, but found '" + token.getTokenId() + "'", token.getStartMark()); } Token token = scanner.getToken(); Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); marks.pop(); return event; } }
public SldTransformContext endMapping() throws IOException { yaml.emit(new MappingEndEvent(null, null)); return this; }
serializeNode(value, mnode); this.emitter.emit(new MappingEndEvent(null, null));
Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); marks.pop();
public SldTransformContext endMapping() throws IOException { yaml.emit(new MappingEndEvent(null, null)); return this; }
@JRubyMethod public IRubyObject end_mapping(ThreadContext context) { MappingEndEvent event = new MappingEndEvent(NULL_MARK, NULL_MARK); emit(context, event); return this; }
@JRubyMethod public IRubyObject end_mapping(ThreadContext context) { MappingEndEvent event = new MappingEndEvent(NULL_MARK, NULL_MARK); emit(context, event); return this; }
public Event produce() { state = new ParseFlowSequenceEntry(false); Token token = scanner.peekToken(); return new MappingEndEvent(token.getStartMark(), token.getEndMark()); } }
public Event produce() { state = new ParseFlowSequenceEntry(false); Token token = scanner.peekToken(); return new MappingEndEvent(token.getStartMark(), token.getEndMark()); } }
public Event produce() { state = new ParseFlowSequenceEntry(false); Token token = scanner.peekToken(); return new MappingEndEvent(token.getStartMark(), token.getEndMark()); } }
@Override public final void writeEndObject() throws IOException { if (!_outputContext.inObject()) { _reportError("Current context not Object but "+_outputContext.typeDesc()); } // just to make sure we don't "leak" type ids _typeId = null; _outputContext = _outputContext.getParent(); _emitter.emit(new MappingEndEvent(null, null)); }
@Override public final void writeEndObject() throws IOException { if (!_writeContext.inObject()) { _reportError("Current context not Object but "+_writeContext.typeDesc()); } // just to make sure we don't "leak" type ids _typeId = null; _writeContext = _writeContext.getParent(); _emitter.emit(new MappingEndEvent(null, null)); }
public Event produce() { if (scanner.checkToken(Token.ID.Key)) { Token token = scanner.getToken(); if (!scanner.checkToken(Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) { states.push(new ParseBlockMappingValue()); return parseBlockNodeOrIndentlessSequence(); } else { state = new ParseBlockMappingValue(); return processEmptyScalar(token.getEndMark()); } } if (!scanner.checkToken(Token.ID.BlockEnd)) { Token token = scanner.peekToken(); throw new ParserException("while parsing a block mapping", marks.pop(), "expected <block end>, but found " + token.getTokenId(), token.getStartMark()); } Token token = scanner.getToken(); Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); marks.pop(); return event; } }
public Event produce() { if (scanner.checkToken(Token.ID.Key)) { Token token = scanner.getToken(); if (!scanner.checkToken(Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) { states.push(new ParseBlockMappingValue()); return parseBlockNodeOrIndentlessSequence(); } else { state = new ParseBlockMappingValue(); return processEmptyScalar(token.getEndMark()); } } if (!scanner.checkToken(Token.ID.BlockEnd)) { Token token = scanner.peekToken(); throw new ParserException("while parsing a block mapping", marks.pop(), "expected <block end>, but found " + token.getTokenId(), token.getStartMark()); } Token token = scanner.getToken(); Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); marks.pop(); return event; } }
public Event produce() { if (scanner.checkToken(Token.ID.Key)) { Token token = scanner.getToken(); if (!scanner.checkToken(Token.ID.Key, Token.ID.Value, Token.ID.BlockEnd)) { states.push(new ParseBlockMappingValue()); return parseBlockNodeOrIndentlessSequence(); } else { state = new ParseBlockMappingValue(); return processEmptyScalar(token.getEndMark()); } } if (!scanner.checkToken(Token.ID.BlockEnd)) { Token token = scanner.peekToken(); throw new ParserException("while parsing a block mapping", marks.pop(), "expected <block end>, but found " + token.getTokenId(), token.getStartMark()); } Token token = scanner.getToken(); Event event = new MappingEndEvent(token.getStartMark(), token.getEndMark()); state = states.pop(); marks.pop(); return event; } }
serializeNode(value, mnode); this.emitter.emit(new MappingEndEvent(null, null));
serializeNode(value, mnode); this.emitter.emit(new MappingEndEvent(null, null));