private void dumpAll(Iterator<? extends Object> data, Writer output, Tag rootTag) { Serializer serializer = new Serializer(new Emitter(output, dumperOptions), resolver, dumperOptions, rootTag); try { serializer.open(); while (data.hasNext()) { Node node = representer.represent(data.next()); serializer.serialize(node); } serializer.close(); } catch (IOException e) { throw new YAMLException(e); } }
public YAMLGenerator(IOContext ctxt, int jsonFeatures, int yamlFeatures, ObjectCodec codec, Writer out, org.yaml.snakeyaml.DumperOptions.Version version) throws IOException { super(jsonFeatures, codec); _ioContext = ctxt; _formatFeatures = yamlFeatures; _writer = out; _outputOptions = buildDumperOptions(jsonFeatures, yamlFeatures, version); _emitter = new Emitter(_writer, _outputOptions); // should we start output now, or try to defer? _emitter.emit(new StreamStartEvent(null, null)); Map<String,String> noTags = Collections.emptyMap(); boolean startMarker = Feature.WRITE_DOC_START_MARKER.enabledIn(yamlFeatures); _emitter.emit(new DocumentStartEvent(null, null, startMarker, version, // for 1.10 was: ((version == null) ? null : version.getArray()), noTags)); }
public YAMLGenerator(IOContext ctxt, int jsonFeatures, int yamlFeatures, ObjectCodec codec, Writer out, org.yaml.snakeyaml.DumperOptions.Version version) throws IOException { super(jsonFeatures, codec); _ioContext = ctxt; _formatFeatures = yamlFeatures; _writer = out; _outputOptions = buildDumperOptions(jsonFeatures, yamlFeatures, version); _emitter = new Emitter(_writer, _outputOptions); // should we start output now, or try to defer? _emitter.emit(new StreamStartEvent(null, null)); Map<String,String> noTags = Collections.emptyMap(); boolean startMarker = Feature.WRITE_DOC_START_MARKER.enabledIn(yamlFeatures); _emitter.emit(new DocumentStartEvent(null, null, startMarker, version, // for 1.10 was: ((version == null) ? null : version.getArray()), noTags)); }
public SldTransformContext(Writer output) { this.output = output; DumperOptions dumpOpts = new DumperOptions(); dumpOpts.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); yaml = new Emitter(output, dumpOpts); handlers = new ArrayDeque<SldTransformHandler>(); }
public void write(Writer output, Map<String, Object> o, Model model) throws IOException { //TODO improve SnakeYAML API (A. Somov) DumperOptions dumperOptions = new DumperOptions(); dumperOptions.setIndent(2); dumperOptions.setWidth(80); Serializer serializer = new Serializer(new Emitter(output, dumperOptions), new ModelResolver(), dumperOptions, Tag.MAP); Representer representer = new ModelRepresenter(); try { serializer.open(); Node node = representer.represent(model); serializer.serialize(node); serializer.close(); } catch (IOException e) { throw new YAMLException(e); } } }
var request = require('request'); var cheerio = require('cheerio'); var Emitter = require('events').EventEmitter; var extractEmitter = new Emitter(); extractEmitter.on('extracted', function(extractedGames){ console.log(extractedGames); }); var Extractor = function(url) { var games = []; request(url, function (error, response, html) { if (!error && response.statusCode == 200) { var $ = cheerio.load(html); $('tr.game').each(function(i, v){ var game = { /* many attributes */ }; games.push(game); }); extractEmitter.emit('extracted', games); } }); this.extractedGames = games; }; module.exports = function(url) { return new Extractor(url); };
public SldTransformContext(Writer output) { this.output = output; DumperOptions dumpOpts = new DumperOptions(); dumpOpts.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); yaml = new Emitter(output, dumpOpts); handlers = new ArrayDeque<SldTransformHandler>(); }
private void dumpAll(Iterator<? extends Object> data, Writer output, Tag rootTag) { Serializer serializer = new Serializer(new Emitter(output, dumperOptions), resolver, dumperOptions, rootTag); try { serializer.open(); while (data.hasNext()) { Node node = representer.represent(data.next()); serializer.serialize(node); } serializer.close(); } catch (IOException e) { throw new YAMLException(e); } }
private void dumpAll(Iterator<? extends Object> data, Writer output, Tag rootTag) { Serializer serializer = new Serializer(new Emitter(output, dumperOptions), resolver, dumperOptions, rootTag); try { serializer.open(); while (data.hasNext()) { Node node = representer.represent(data.next()); serializer.serialize(node); } serializer.close(); } catch (IOException e) { throw new YAMLException(e); } }
private void dumpAll(Iterator<? extends Object> data, Writer output, Tag rootTag) { Serializer serializer = new Serializer(new Emitter(output, dumperOptions), resolver, dumperOptions, rootTag); try { serializer.open(); while (data.hasNext()) { Node node = representer.represent(data.next()); serializer.serialize(node); } serializer.close(); } catch (IOException e) { throw new YAMLException(e); } }
Writer writer = new OutputStreamWriter(os)) { Yaml yaml = new Yaml(); Emitter emitter = new Emitter(writer, new DumperOptions()); boolean rewrite = false; for (Event event : yaml.parse(reader)) {
public static void dumpFromAst(Node rootNode, Writer output) { if (rootNode == null) { throw new IllegalArgumentException("rootNode is null"); } DumperOptions dumperOptions = new DumperOptions(); Serializer serializer = new Serializer(new Emitter(output, dumperOptions), new Resolver(), dumperOptions, null); try { serializer.open(); serializer.serialize(rootNode); serializer.close(); } catch (IOException e) { throw new YAMLException(e); } }
private void initEmitter(ThreadContext context, IRubyObject _encoding) { if (emitter != null) throw context.runtime.newRuntimeError("already initialized emitter"); Encoding encoding = PsychLibrary.YAMLEncoding.values()[(int)_encoding.convertToInteger().getLongValue()].encoding; Charset charset = context.runtime.getEncodingService().charsetForEncoding(encoding); emitter = new Emitter(new OutputStreamWriter(new IOOutputStream(io, encoding), charset), options); }
private void initEmitter(ThreadContext context, IRubyObject _encoding) { if (emitter != null) throw context.runtime.newRuntimeError("already initialized emitter"); Encoding encoding = PsychLibrary.YAMLEncoding.values()[(int)_encoding.convertToInteger().getLongValue()].encoding; Charset charset = context.runtime.getEncodingService().charsetForEncoding(encoding); emitter = new Emitter(new OutputStreamWriter(new IOOutputStream(io, encoding), charset), options); }
public YAMLGenerator(IOContext ctxt, int jsonFeatures, int yamlFeatures, ObjectCodec codec, Writer out, org.yaml.snakeyaml.DumperOptions.Version version) throws IOException { super(jsonFeatures, codec); _ioContext = ctxt; _formatFeatures = yamlFeatures; _writer = out; _outputOptions = buildDumperOptions(jsonFeatures, yamlFeatures, version); _emitter = new Emitter(_writer, _outputOptions); // should we start output now, or try to defer? _emitter.emit(new StreamStartEvent(null, null)); Map<String,String> noTags = Collections.emptyMap(); boolean startMarker = Feature.WRITE_DOC_START_MARKER.enabledIn(yamlFeatures); _emitter.emit(new DocumentStartEvent(null, null, startMarker, version, // for 1.10 was: ((version == null) ? null : version.getArray()), noTags)); }
public static void dumpFromAst(Node rootNode, Writer output) { if (rootNode == null) { throw new IllegalArgumentException("rootNode is null"); } DumperOptions dumperOptions = new DumperOptions(); //HYBRIS start - replaced null param with rootTag Tag rootTag = rootNode.getTag(); Serializer serializer = new Serializer(new Emitter(output, dumperOptions), new Resolver(), dumperOptions, rootTag); //HYBRIS end try { serializer.open(); serializer.serialize(rootNode); serializer.close(); } catch (IOException e) { throw new YAMLException(e); } }
public void serializeNode(final Writer writer, final Node node) throws IOException { final DumperOptions dumperOptions = new DumperOptions(); if (explicitSequencing) { dumperOptions.setIndicatorIndent(2); dumperOptions.setIndent(4); } else { dumperOptions.setIndicatorIndent(0); dumperOptions.setIndent(2); } final Resolver resolver = new Resolver(); final Serializer serializer = new Serializer(new Emitter(writer, dumperOptions), resolver, dumperOptions, null); serializer.open(); serializer.serialize(node); serializer.close(); }
public YAMLGenerator(ObjectWriteContext writeContext, IOContext ioCtxt, int streamWriteFeatures, int yamlFeatures, Writer out, org.yaml.snakeyaml.DumperOptions.Version version) throws IOException { super(writeContext, streamWriteFeatures); _ioContext = ioCtxt; _formatWriteFeatures = yamlFeatures; _writer = out; _outputOptions = buildDumperOptions(streamWriteFeatures, yamlFeatures, version); _emitter = new Emitter(_writer, _outputOptions); // should we start output now, or try to defer? _emitter.emit(new StreamStartEvent(null, null)); Map<String,String> noTags = Collections.emptyMap(); boolean startMarker = Feature.WRITE_DOC_START_MARKER.enabledIn(yamlFeatures); _emitter.emit(new DocumentStartEvent(null, null, startMarker, version, // for 1.10 was: ((version == null) ? null : version.getArray()), noTags)); }
public void write(Writer output, Map<String, Object> o, Model model) throws IOException { //TODO improve SnakeYAML API (A. Somov) DumperOptions dumperOptions = new DumperOptions(); dumperOptions.setIndent(2); dumperOptions.setWidth(80); Serializer serializer = new Serializer(new Emitter(output, dumperOptions), new ModelResolver(), dumperOptions, Tag.MAP); Representer representer = new ModelRepresenter(); try { serializer.open(); Node node = representer.represent(model); serializer.serialize(node); serializer.close(); } catch (IOException e) { throw new YAMLException(e); } } }