@Override protected ObjectMapper createInstance() { return new CsvMapper(); }
public CSVExportOutputStream(final OutputStream delegate) { this.delegate = delegate; // To be mysqlimport friendly with datetime type mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); }
@Override public void newTable(final String tableName, final List<ColumnInfo> columnsForTable) { currentTableName = tableName; final CsvSchema.Builder builder = CsvSchema.builder(); // Remove quoting of character which applies (somewhat arbitrarily, Tatu???) for string whose length is greater than MAX_QUOTE_CHECK = 24 -- See CVSWriter#_mayNeedQuotes builder.disableQuoteChar(); builder.setColumnSeparator('|'); for (final ColumnInfo columnInfo : columnsForTable) { builder.addColumn(columnInfo.getColumnName(), getColumnTypeFromSqlType(columnInfo.getDataType())); } currentCSVSchema = builder.build(); writer = mapper.writer(currentCSVSchema); shouldWriteHeader = true; }
public static void main(String[] args) throws IOException, ScriptException, NoSuchMethodException { CsvMapper mapper = new CsvMapper(); CsvSchema.Builder schema = new CsvSchema.Builder(); schema.addColumn("geonameid", CsvSchema.ColumnType.NUMBER); schema.addColumn("name", CsvSchema.ColumnType.STRING); schema.addColumn("asciiname", CsvSchema.ColumnType.STRING); schema.addColumn("alternatenames", CsvSchema.ColumnType.STRING); schema.addColumn("latitude", CsvSchema.ColumnType.NUMBER); schema.addColumn("longitude", CsvSchema.ColumnType.NUMBER); schema.addColumn("feature class", CsvSchema.ColumnType.STRING); schema.addColumn("feature code", CsvSchema.ColumnType.STRING); schema.addColumn("country code", CsvSchema.ColumnType.STRING); schema.addColumn("cc2", CsvSchema.ColumnType.STRING); schema.addColumn("admin1 code", CsvSchema.ColumnType.STRING); schema.addColumn("admin2 code", CsvSchema.ColumnType.STRING); schema.addColumn("admin3 code", CsvSchema.ColumnType.STRING); schema.addColumn("admin4 code", CsvSchema.ColumnType.STRING); schema.addColumn("population", CsvSchema.ColumnType.STRING); schema.addColumn("elevation", CsvSchema.ColumnType.STRING); schema.addColumn("dem", CsvSchema.ColumnType.STRING); schema.addColumn("timezone", CsvSchema.ColumnType.STRING); schema.addColumn("modification date", CsvSchema.ColumnType.STRING); schema.setColumnSeparator('\t'); schema.setEscapeChar('"'); MappingIterator<Map> it = mapper.readerFor(Map.class).with(schema.build()).readValues(csvFile); while (it.hasNext()) { Map<String, Object> row = it.next();
CsvSchema.Builder csvSchemaBuilder = CsvSchema.builder() .setColumnSeparator(csvFormat.getDelimiter()) .setLineSeparator(csvFormat.getRecordSeparator()) .setAllowComments("#" .equals(CharUtils.toString(csvFormat.getCommentMarker()))) .setUseHeader(false); csvSchemaBuilder = (csvFormat.getQuoteCharacter() == null) ? csvSchemaBuilder : csvSchemaBuilder.setQuoteChar(csvFormat.getQuoteCharacter()); csvSchemaBuilder = (csvFormat.getEscapeCharacter() == null) ? csvSchemaBuilder : csvSchemaBuilder.setEscapeChar(csvFormat.getEscapeCharacter()); csvSchemaBuilder = csvSchemaBuilder.setSkipFirstDataRow(true); CsvSchema csvSchema = csvSchemaBuilder.build(); ObjectReader objReader = mapper.readerFor(String[].class) .with(csvSchema) .withFeatures(features.toArray(new CsvParser.Feature[features.size()]));
return jsonMapper.readValue(url, EventList.class); } else if (query.type == CSV) { CsvSchema.Builder builder = CsvSchema.builder(); if (request.headers().get("column_separator") != null) { String column_seperator = request.headers().get("column_separator"); throw new RakamException("Invalid column separator", BAD_REQUEST); builder.setColumnSeparator(column_seperator.charAt(0)); .withSharedAttribute("collection", query.collection) .withSharedAttribute("apiKey", masterKey)) .with(builder.build()).readValue(url); } else if (query.type == AVRO) { URLConnection conn = url.openConnection();
public CsvConverter(File csvDir, List<File> inputFiles) { this.csvDir = csvDir; this.inputFiles = inputFiles; csvMapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); schema = csvMapper.schemaFor(Comment.class); schema.withColumnSeparator('\t'); schema.withHeader(); }
@Test public void testName() throws Exception { CsvMapper mapper = new CsvMapper(); mapper.registerModule(new SimpleModule().addDeserializer(EventList.class, new CsvEventDeserializer(metastore, new ProjectConfig(), new TestingConfigManager(), new SchemaChecker(metastore, build), build))); "1/2/09 4:53,Product2,1500\n"; EventList actual = mapper.reader(EventList.class).with(ContextAttributes.getEmpty() .withSharedAttribute("project", "project") .withSharedAttribute("collection", "collection")
@Override public void write(final Map<String, Object> row) throws IOException { final byte[] bytes; if (shouldWriteHeader) { // Write the header once (mapper.writer will clone the writer). Add a small marker in front of the header // to easily split it write(String.format("-- %s ", currentTableName).getBytes()); bytes = mapper.writer(currentCSVSchema.withHeader()).writeValueAsBytes(row); shouldWriteHeader = false; } else { bytes = writer.writeValueAsBytes(row); } write(bytes); }
final CsvSchema csvSchema = mapper.typedSchemaFor(ApiError.class).withHeader(); final ApiError apiError = ApiError.create(genericError.message()); mapper.writerFor(ApiError.class).with(csvSchema).writeValue(entityStream, apiError); } else { final CsvSchema csvSchema = mapper.typedSchemaFor(type).withHeader(); mapper.writerFor(type).with(csvSchema).writeValue(entityStream, genericError);
@Override public TrueFxTicker deserialize(JsonParser parser, DeserializationContext context) throws IOException, JsonProcessingException { ArrayNode array = mapper.readerFor(TrueFxTicker.class).with(schema).readTree(parser); String pair = array.get(0).asText(); long timestamp = array.get(1).asLong(); BigDecimal bid = new BigDecimal(array.get(2).asText()); BigDecimal bidBP = new BigDecimal(array.get(3).asText()); BigDecimal ask = new BigDecimal(array.get(4).asText()); BigDecimal askBP = new BigDecimal(array.get(5).asText()); BigDecimal low = new BigDecimal(array.get(6).asText()); BigDecimal high = new BigDecimal(array.get(7).asText()); BigDecimal open = new BigDecimal(array.get(8).asText()); return new TrueFxTicker(pair, timestamp, bid, bidBP, ask, askBP, low, high, open); } }
/** * Overridable factory method that actually instantiates desired * parser. */ @Override protected CsvParser _createParser(InputStream in, IOContext ctxt) throws IOException, JsonParseException { Reader r = _createReader(in, null, ctxt); return new CsvParser(ctxt, _getBufferRecycler(), _parserFeatures, _csvParserFeatures, _objectCodec, r); }
/** * Overridable factory method that actually instantiates desired * parser. */ @Override protected CsvParser _createParser(Reader r, IOContext ctxt) throws IOException, JsonParseException { return new CsvParser(ctxt, _getBufferRecycler(), _parserFeatures, _csvParserFeatures, _objectCodec, r); }
while (jp.nextToken() == VALUE_STRING) { String name = ValidationUtil.stripName(jp.getValueAsString(), "header name");
String collection = getParam(request.params(), "collection"); CsvSchema.Builder builder = CsvSchema.builder(); if (request.params().get("column_separator") != null) { List<String> column_seperator = request.params().get("column_separator"); throw new RakamException("Invalid column separator", BAD_REQUEST); builder.setColumnSeparator(column_seperator.get(0).charAt(0)); .withSharedAttribute("collection", collection) .withSharedAttribute("apiKey", apiKey)) .with(builder.build()).readValue(buff);
@Inject public EventCollectionHttpService( EventStore eventStore, ApiKeyService apiKeyService, JsonEventDeserializer deserializer, AvroEventDeserializer avroEventDeserializer, EventListDeserializer eventListDeserializer, CsvEventDeserializer csvEventDeserializer, Set<EventMapper> mappers) { this.eventStore = eventStore; this.eventMappers = ImmutableList.copyOf(mappers); this.apiKeyService = apiKeyService; jsonMapper = new ObjectMapper(); SimpleModule module = new SimpleModule(); module.addDeserializer(Event.class, deserializer); module.addDeserializer(EventList.class, eventListDeserializer); jsonMapper.registerModule(module); jsonMapper.registerModule(new SimpleModule("swagger", Version.unknownVersion()) { @Override public void setupModule(SetupContext context) { context.insertAnnotationIntrospector(new SwaggerJacksonAnnotationIntrospector()); } }); this.avroEventDeserializer = avroEventDeserializer; this.jsonEventDeserializer = deserializer; csvMapper = new CsvMapper(); csvMapper.registerModule(new SimpleModule().addDeserializer(EventList.class, csvEventDeserializer)); }
/** * Overridable factory method that actually instantiates desired * parser. */ @Override protected CsvParser _createParser(byte[] data, int offset, int len, IOContext ctxt) throws IOException, JsonParseException { Reader r = _createReader(data, offset, len, null, ctxt); return new CsvParser(ctxt, _getBufferRecycler(), _parserFeatures, _csvParserFeatures, _objectCodec, r); }