@Override public void write(final Map<String, Object> row) throws IOException { final byte[] bytes; if (shouldWriteHeader) { // Write the header once (mapper.writer will clone the writer). Add a small marker in front of the header // to easily split it write(String.format("-- %s ", currentTableName).getBytes()); bytes = mapper.writer(currentCSVSchema.withHeader()).writeValueAsBytes(row); shouldWriteHeader = false; } else { bytes = writer.writeValueAsBytes(row); } write(bytes); }
final CsvSchema csvSchema = mapper.typedSchemaFor(ApiError.class).withHeader(); final ApiError apiError = ApiError.create(genericError.message()); mapper.writerFor(ApiError.class).with(csvSchema).writeValue(entityStream, apiError); } else { final CsvSchema csvSchema = mapper.typedSchemaFor(type).withHeader(); mapper.writerFor(type).with(csvSchema).writeValue(entityStream, genericError);
/** * Convenience method that is same as *<pre> * CsvSchema.emptySchema().withHeader(); *</pre> * and returns a {@link CsvSchema} instance that uses default configuration * with additional setting that the first content line contains intended * column names. * * @since 2.5 */ public CsvSchema schemaWithHeader() { return CsvSchema.emptySchema().withHeader(); }
/** * Convenience method that is same as *<pre> * CsvSchema.emptySchema().withHeader(); *</pre> * and returns a {@link CsvSchema} instance that uses default configuration * with additional setting that the first content line contains intended * column names. */ public CsvSchema schemaWithHeader() { return CsvSchema.emptySchema().withHeader(); }
/** * 序列化为CSV */ public static <V> String toCsv(String separator, V v) { try { CsvSchema schema = csvMapper.schemaFor(v.getClass()).withHeader().withColumnSeparator(separator.charAt(0)); return csvMapper.writer(schema).writeValueAsString(v); } catch (JsonProcessingException e) { log.error("jackson to csv error, obj: {}", v, e); return null; } }
/** * 序列化为CSV */ public static <V> String toCsv(String separator, List<V> list) { try { Class type = list.get(0).getClass(); CsvSchema schema = csvMapper.schemaFor(type).withHeader().withColumnSeparator(separator.charAt(0)); return csvMapper.writer(schema).writeValueAsString(list); } catch (JsonProcessingException e) { log.error("jackson to csv error, obj: {}", list, e); return null; } }
/** * 序列化为CSV文件 */ public static <V> void toCsvFile(String path, String separator, V v) { try (Writer writer = new FileWriter(new File(path), true)) { CsvSchema schema = csvMapper.schemaFor(v.getClass()).withHeader().withColumnSeparator(separator.charAt(0)); csvMapper.writer(schema).writeValues(writer).write(v); writer.flush(); } catch (Exception e) { log.error("jackson to csv file error, path: {}, separator: {}, obj: {}", path, separator, v, e); } }
/** * 序列化为CSV文件 */ public static <V> void toCsvFile(String path, String separator, List<V> list) { try (Writer writer = new FileWriter(new File(path), true)) { Class type = list.get(0).getClass(); CsvSchema schema = csvMapper.schemaFor(type).withHeader().withColumnSeparator(separator.charAt(0)); csvMapper.writer(schema).writeValues(writer).writeAll(list); writer.flush(); } catch (Exception e) { log.error("jackson to csv file error, path: {}, separator: {}, list: {}", path, separator, list, e); } }
public long writeToCsv() { Class<T> persistentClass = getPersistentClass(); // create mapper and schema CsvMapper mapper = new CsvMapper(); CsvSchema schema = mapper.schemaFor(persistentClass).withHeader(); // write entities long count = 0; try { mapper.writer().with(schema).writeValue(new File(csvFile), entities); count = entities.size(); } catch (IOException e) { e.printStackTrace(); } return count; }
public CsvConverter(File csvDir, List<File> inputFiles) { this.csvDir = csvDir; this.inputFiles = inputFiles; csvMapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); schema = csvMapper.schemaFor(Comment.class); schema.withColumnSeparator('\t'); schema.withHeader(); }
public static final synchronized <T> String serialize(final T object, final Boolean withHeaders) throws IOException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(object.getClass()); if (withHeaders) { csvSchema = csvSchema.withHeader(); } else { csvSchema = csvSchema.withoutHeader(); } return csvMapper.writer(csvSchema).writeValueAsString(object); }
@Override public void write(final Map<String, Object> row) throws IOException { final byte[] bytes; if (shouldWriteHeader) { // Write the header once (mapper.writer will clone the writer). Add a small marker in front of the header // to easily split it write(String.format("-- %s ", currentTableName).getBytes()); bytes = mapper.writer(currentCSVSchema.withHeader()).writeValueAsBytes(row); shouldWriteHeader = false; } else { bytes = writer.writeValueAsBytes(row); } write(bytes); }
@Override public void write(final Map<String, Object> row) throws IOException { final byte[] bytes; if (shouldWriteHeader) { // Write the header once (mapper.writer will clone the writer). Add a small marker in front of the header // to easily split it write(String.format("-- %s ", currentTableName).getBytes()); bytes = mapper.writer(currentCSVSchema.withHeader()).writeValueAsBytes(row); shouldWriteHeader = false; } else { bytes = writer.writeValueAsBytes(row); } write(bytes); }
public void write(CsvRoute route, OutputStream target, int startIndex, int endIndex) throws IOException { List<CsvPosition> positions = route.getPositions(); CsvSchema.Builder builder = new CsvSchema.Builder(); if (positions.size() > 0) for (String key : positions.get(0).getRowAsMap().keySet()) builder = builder.addColumn(key); CsvSchema schema = builder.build().withHeader().withColumnSeparator(getColumnSeparator()); try(SequenceWriter writer = new CsvMapper().writer(schema).writeValues(target)) { for (int i = startIndex; i < endIndex; i++) { CsvPosition position = positions.get(i); writer.write(position.getRowAsMap()); } } } }
private MappingIterator<CsvMapperData> readValues(byte[] csvContent) throws TermCsvLoaderException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(CsvMapperData.class). withColumnSeparator(CSV_SEPARATOR). withSkipFirstDataRow(true).withHeader(). withoutColumns(); ObjectReader objectReader = csvMapper.reader(CsvMapperData.class).with(csvSchema); try { return objectReader.readValues(csvContent); } catch (IOException e) { throw new TermCsvLoaderException(e); } } }
private MappingIterator<CsvMapperData> readValues(byte[] csvContent) throws ReferenceCsvLoaderException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(CsvMapperData.class). withColumnSeparator(CSV_SEPARATOR). withSkipFirstDataRow(true).withHeader(). withoutColumns(); ObjectReader objectReader = csvMapper.reader(CsvMapperData.class).with(csvSchema); try { return objectReader.readValues(csvContent); } catch (IOException e) { throw new ReferenceCsvLoaderException(e); } } }
private MappingIterator<CsvMapperData> readValues(byte[] csvContent) throws TermCsvLoaderException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(CsvMapperData.class). withColumnSeparator(CSV_SEPARATOR). withSkipFirstDataRow(true).withHeader(). withoutColumns(); ObjectReader objectReader = csvMapper.reader(CsvMapperData.class).with(csvSchema); try { return objectReader.readValues(csvContent); } catch (IOException e) { throw new TermCsvLoaderException(e); } } }
private MappingIterator<CsvMapperData> readValues(byte[] csvContent) throws ReferenceCsvLoaderException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(CsvMapperData.class). withColumnSeparator(CSV_SEPARATOR). withSkipFirstDataRow(true).withHeader(). withoutColumns(); ObjectReader objectReader = csvMapper.reader(CsvMapperData.class).with(csvSchema); try { return objectReader.readValues(csvContent); } catch (IOException e) { throw new ReferenceCsvLoaderException(e); } } }
private static <T> List<T> importCsvRules(Class<T> type, Class<?> mixin, String rulesString) throws Exception { CsvSchema csvSchema = CsvSchema.emptySchema().withHeader().withColumnSeparator(';'); CsvMapper mapper = new CsvMapper(); mapper.addMixIn(type, mixin); mapper.enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); MappingIterator<T> readValues = mapper.readerFor(type).with(csvSchema).readValues(rulesString); return readValues.readAll(); }
private MappingIterator<Map<String,String>> parseCsv(String csv) throws JsonProcessingException, IOException { return new CsvMapper().reader(Map.class) .with(CsvSchema.emptySchema().withHeader()) // use first row as header .readValues(csv); }