rep = new JacksonRepresentation<MyBean[]>(MediaType.TEXT_CSV, tab) { @Override protected ObjectWriter createObjectWriter() { CsvMapper csvMapper = (CsvMapper) getObjectMapper(); CsvSchema csvSchema = csvMapper.schemaFor(MyBean.class); ObjectWriter result = csvMapper.writer(csvSchema); return result; } };
/** * Creates a Jackson CSV schema based on a mapper and the current object * class. * * @param csvMapper * The source CSV mapper. * @return A Jackson CSV schema */ protected CsvSchema createCsvSchema(CsvMapper csvMapper) { return csvMapper.schemaFor(getObjectClass()); }
String input = "1,2\n3,4"; StringReader reader = new StringReader(input); CsvMapper m = new CsvMapper(); CsvSchema schema = m.schemaFor(FooBar.class).withoutHeader().withLineSeparator("\n").withColumnSeparator(','); try { MappingIterator<FooBar> r = m.reader(FooBar.class).with(schema).readValues(reader); while (r.hasNext()) { System.out.println(r.nextValue()); } } catch (JsonProcessingException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); }
/** * 序列化为CSV */ public static <V> String toCsv(String separator, V v) { try { CsvSchema schema = csvMapper.schemaFor(v.getClass()).withHeader().withColumnSeparator(separator.charAt(0)); return csvMapper.writer(schema).writeValueAsString(v); } catch (JsonProcessingException e) { log.error("jackson to csv error, obj: {}", v, e); return null; } }
/** * 序列化为CSV */ public static <V> String toCsv(String separator, List<V> list) { try { Class type = list.get(0).getClass(); CsvSchema schema = csvMapper.schemaFor(type).withHeader().withColumnSeparator(separator.charAt(0)); return csvMapper.writer(schema).writeValueAsString(list); } catch (JsonProcessingException e) { log.error("jackson to csv error, obj: {}", list, e); return null; } }
/** * 序列化为CSV文件 */ public static <V> void toCsvFile(String path, String separator, List<V> list) { try (Writer writer = new FileWriter(new File(path), true)) { Class type = list.get(0).getClass(); CsvSchema schema = csvMapper.schemaFor(type).withHeader().withColumnSeparator(separator.charAt(0)); csvMapper.writer(schema).writeValues(writer).writeAll(list); writer.flush(); } catch (Exception e) { log.error("jackson to csv file error, path: {}, separator: {}, list: {}", path, separator, list, e); } }
/** * 序列化为CSV文件 */ public static <V> void toCsvFile(String path, String separator, V v) { try (Writer writer = new FileWriter(new File(path), true)) { CsvSchema schema = csvMapper.schemaFor(v.getClass()).withHeader().withColumnSeparator(separator.charAt(0)); csvMapper.writer(schema).writeValues(writer).write(v); writer.flush(); } catch (Exception e) { log.error("jackson to csv file error, path: {}, separator: {}, obj: {}", path, separator, v, e); } }
public long writeToCsv() { Class<T> persistentClass = getPersistentClass(); // create mapper and schema CsvMapper mapper = new CsvMapper(); CsvSchema schema = mapper.schemaFor(persistentClass).withHeader(); // write entities long count = 0; try { mapper.writer().with(schema).writeValue(new File(csvFile), entities); count = entities.size(); } catch (IOException e) { e.printStackTrace(); } return count; }
public CsvConverter(File csvDir, List<File> inputFiles) { this.csvDir = csvDir; this.inputFiles = inputFiles; csvMapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); schema = csvMapper.schemaFor(Comment.class); schema.withColumnSeparator('\t'); schema.withHeader(); }
public static final synchronized <T> String serialize(final T object, final Boolean withHeaders) throws IOException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(object.getClass()); if (withHeaders) { csvSchema = csvSchema.withHeader(); } else { csvSchema = csvSchema.withoutHeader(); } return csvMapper.writer(csvSchema).writeValueAsString(object); }
/** * formatQuoteToCSV - convert quote to CSV * @param quote * @return CSV formatted Quote */ private String formatQuoteToCSV(OHLCquote quote){ String csvData = null; CsvMapper mapper = new CsvMapper(); CsvSchema schema = mapper.schemaFor(OHLCquote.class).withHeader().withColumnSeparator(','); try { csvData = mapper.writer(schema).writeValueAsString(quote); } catch (JsonProcessingException e) { System.out.println("TimeRangeOHLCActionHandler.formatQuoteToCSV(): ERROR: JsonProcessingException on quote"); e.printStackTrace(); } return csvData; }
private byte[] rulesToCsv(List<?> list, Class<?> type, Class<?> mixin) throws IOException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(type).withHeader().withColumnSeparator(';'); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); try (SequenceWriter csvWriter = csvMapper .addMixIn(type, mixin) .writerWithDefaultPrettyPrinter() .with(csvSchema) .forType(type) .writeValues(outputStream)) { for (Object nextRow : list) { csvWriter.write(nextRow); } } return outputStream.toByteArray(); }
/** * Convenience method which is functionally equivalent to: *<pre> * reader(pojoType).withSchema(schemaFor(pojoType)); *</pre> * that is, constructs a {@link ObjectReader} which both binds to * specified type and uses "loose" {@link CsvSchema} introspected from * specified type (one without strict inferred typing). *<p> * @param pojoType Type used both for data-binding (result type) and for * schema introspection. NOTE: must NOT be an array or Collection type, since * these only make sense for data-binding (like arrays of objects to bind), * but not for schema construction (no CSV types can be mapped to arrays * or Collections) */ public ObjectReader readerWithSchemaFor(Class<?> pojoType) { JavaType type = constructType(pojoType); /* sanity check: not useful for structured types, since * schema type will need to differ from data-bind type */ if (type.isArrayType() || type.isCollectionLikeType()) { throw new IllegalArgumentException("Type can NOT be a Collection or array type"); } return readerFor(type).with(schemaFor(type)); }
/** * Convenience method which is functionally equivalent to: *<pre> * writer(pojoType).with(schemaFor(pojoType)); *</pre> * that is, constructs a {@link ObjectWriter} which both binds to * specified type and uses "loose" {@link CsvSchema} introspected from * specified type (one without strict inferred typing). *<p> * @param pojoType Type used both for data-binding (result type) and for * schema introspection. NOTE: must NOT be an array or Collection type, since * these only make sense for data-binding (like arrays of objects to bind), * but not for schema construction (no root-level CSV types can be mapped to arrays * or Collections) */ public ObjectWriter writerWithSchemaFor(Class<?> pojoType) { JavaType type = constructType(pojoType); // sanity check as per javadoc above if (type.isArrayType() || type.isCollectionLikeType()) { throw new IllegalArgumentException("Type can NOT be a Collection or array type"); } return writerFor(type).with(schemaFor(type)); }
Test response = new Test(); response.setNum(1); Stub s = new Stub(); s.setAge("12"); s.setName("Colin"); response.setS(s); final CsvMapper mapper = new CsvMapper(); final CsvSchema schema = mapper.schemaFor(Test.class); final String csv = mapper.writer(schema.withUseHeader(true)).writeValueAsString(response); System.out.println(csv);
private static OffsetSerDe csvOffsetSerDe() { CsvMapper csvMapper = new CsvMapper().configure(CsvGenerator.Feature.STRICT_CHECK_FOR_QUOTING, true); CsvSchema schema = csvMapper.schemaFor(OffsetInfo.class).withLineSeparator(""); return new OffsetSerDe( csvMapper.writer(schema), csvMapper.reader(schema).forType(OffsetInfo.class)); }
private MappingIterator<CsvMapperData> readValues(byte[] csvContent) throws TermCsvLoaderException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(CsvMapperData.class). withColumnSeparator(CSV_SEPARATOR). withSkipFirstDataRow(true).withHeader(). withoutColumns(); ObjectReader objectReader = csvMapper.reader(CsvMapperData.class).with(csvSchema); try { return objectReader.readValues(csvContent); } catch (IOException e) { throw new TermCsvLoaderException(e); } } }
private MappingIterator<CsvMapperData> readValues(byte[] csvContent) throws ReferenceCsvLoaderException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(CsvMapperData.class). withColumnSeparator(CSV_SEPARATOR). withSkipFirstDataRow(true).withHeader(). withoutColumns(); ObjectReader objectReader = csvMapper.reader(CsvMapperData.class).with(csvSchema); try { return objectReader.readValues(csvContent); } catch (IOException e) { throw new ReferenceCsvLoaderException(e); } } }
private MappingIterator<CsvMapperData> readValues(byte[] csvContent) throws TermCsvLoaderException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(CsvMapperData.class). withColumnSeparator(CSV_SEPARATOR). withSkipFirstDataRow(true).withHeader(). withoutColumns(); ObjectReader objectReader = csvMapper.reader(CsvMapperData.class).with(csvSchema); try { return objectReader.readValues(csvContent); } catch (IOException e) { throw new TermCsvLoaderException(e); } } }
private MappingIterator<CsvMapperData> readValues(byte[] csvContent) throws ReferenceCsvLoaderException { CsvMapper csvMapper = new CsvMapper(); CsvSchema csvSchema = csvMapper.schemaFor(CsvMapperData.class). withColumnSeparator(CSV_SEPARATOR). withSkipFirstDataRow(true).withHeader(). withoutColumns(); ObjectReader objectReader = csvMapper.reader(CsvMapperData.class).with(csvSchema); try { return objectReader.readValues(csvContent); } catch (IOException e) { throw new ReferenceCsvLoaderException(e); } } }