/** * Instantiate a Java object from a JsonObject. * Faster than calling `Json.decodeValue(Json.encode(jsonObject), type)`. * * @param type * The type to instantiate from the JsonObject. * @throws IllegalArgumentException * if the type cannot be instantiated. */ public <T> T mapTo(Class<T> type) { return Json.mapper.convertValue(map, type); }
@Override public ObjType apply(Object input) { return mapper.convertValue(input, inObjTypeRef); } }
public static HadoopDruidConverterConfig fromMap(final Map<String, Object> map) { return jsonMapper.convertValue(map, HadoopDruidConverterConfig.class); }
private List<SegmentDescriptor> getMissingSegments(final Map<String, Object> context) { final Object maybeMissingSegments = context.get(Result.MISSING_SEGMENTS_KEY); if (maybeMissingSegments == null) { return new ArrayList<>(); } return jsonMapper.convertValue( maybeMissingSegments, new TypeReference<List<SegmentDescriptor>>() { } ); } }
@Override protected SeekableStreamPartitions<Integer, Long> deserializeSeekableStreamPartitionsFromMetadata( ObjectMapper mapper, Object object ) { return mapper.convertValue(object, mapper.getTypeFactory().constructParametrizedType( SeekableStreamPartitions.class, SeekableStreamPartitions.class, Integer.class, Long.class )); }
@Override protected SeekableStreamPartitions<String, String> deserializeSeekableStreamPartitionsFromMetadata( ObjectMapper mapper, Object object ) { return mapper.convertValue(object, mapper.getTypeFactory().constructParametrizedType( SeekableStreamPartitions.class, SeekableStreamPartitions.class, String.class, String.class )); }
boolean sameLoadSpec(DataSegment s1, DataSegment s2) { final S3LoadSpec s1LoadSpec = (S3LoadSpec) mapper.convertValue(s1.getLoadSpec(), LoadSpec.class); final S3LoadSpec s2LoadSpec = (S3LoadSpec) mapper.convertValue(s2.getLoadSpec(), LoadSpec.class); return Objects.equal(s1LoadSpec.getBucket(), s2LoadSpec.getBucket()) && Objects.equal( s1LoadSpec.getKey(), s2LoadSpec.getKey() ); } }
@Test(expected = IllegalArgumentException.class) public void testTypeTypo() { Map<String, Object> greaterMap = ImmutableMap.of( "type", "nonExistingType", "aggregation", "agg", "value", 1.3 ); ObjectMapper mapper = new DefaultObjectMapper(); // noinspection unused HavingSpec spec = mapper.convertValue(greaterMap, HavingSpec.class); }
public void setShardSpecs(Map<Long, List<HadoopyShardSpec>> shardSpecs) { this.schema = schema.withTuningConfig(schema.getTuningConfig().withShardSpecs(shardSpecs)); this.pathSpec = JSON_MAPPER.convertValue(schema.getIOConfig().getPathSpec(), PathSpec.class); }
public void setGranularitySpec(GranularitySpec granularitySpec) { this.schema = schema.withDataSchema(schema.getDataSchema().withGranularitySpec(granularitySpec)); this.pathSpec = JSON_MAPPER.convertValue(schema.getIOConfig().getPathSpec(), PathSpec.class); }
@Test public void testSerde() { final SelectQueryConfig config = MAPPER.convertValue(CONFIG_MAP, SelectQueryConfig.class); Assert.assertEquals(false, config.getEnableFromNextDefault()); final SelectQueryConfig config2 = MAPPER.convertValue(CONFIG_MAP2, SelectQueryConfig.class); Assert.assertEquals(true, config2.getEnableFromNextDefault()); final SelectQueryConfig configEmpty = MAPPER.convertValue(CONFIG_MAP_EMPTY, SelectQueryConfig.class); Assert.assertEquals(true, configEmpty.getEnableFromNextDefault()); } }
private Optional<NativeEntity<Collector>> findExisting(EntityV1 entity, Map<String, ValueReference> parameters) { final SidecarCollectorEntity collectorEntity = objectMapper.convertValue(entity.data(), SidecarCollectorEntity.class); final String name = collectorEntity.name().asString(parameters); final String os = collectorEntity.nodeOperatingSystem().asString(parameters); final Optional<Collector> existingCollector = Optional.ofNullable(collectorService.findByNameAndOs(name, os)); return existingCollector.map(collector -> NativeEntity.create(entity.id(), collector.id(), TYPE_V1, collector.name(), collector)); }
private Optional<NativeEntity<GrokPattern>> findExisting(EntityV1 entity) { final GrokPatternEntity grokPatternEntity = objectMapper.convertValue(entity.data(), GrokPatternEntity.class); final String name = grokPatternEntity.name(); final String pattern = grokPatternEntity.pattern(); final Optional<GrokPattern> grokPattern = grokPatternService.loadByName(name); grokPattern.ifPresent(existingPattern -> compareGrokPatterns(name, pattern, existingPattern.pattern())); return grokPattern.map(gp -> NativeEntity.create(entity.id(), gp.id(), TYPE_V1,gp.name(), gp)); }
private Optional<NativeEntity<CacheDto>> findExisting(EntityV1 entity, Map<String, ValueReference> parameters) { final LookupCacheEntity cacheEntity = objectMapper.convertValue(entity.data(), LookupCacheEntity.class); final String name = cacheEntity.name().asString(parameters); final Optional<CacheDto> existingCache = cacheService.get(name); return existingCache.map(cache -> NativeEntity.create(entity.id(), cache.id(), TYPE_V1, cache.title(), cache)); }
@Test public void testDefaults() { final Properties props = new Properties(); final ObjectMapper objectMapper = new ObjectMapper(); final LoggingEmitterConfig config = objectMapper.convertValue( Emitters.makeCustomFactoryMap(props), LoggingEmitterConfig.class ); Assert.assertEquals("getLoggerClass", LoggingEmitter.class.getName(), config.getLoggerClass()); Assert.assertEquals("getLogLevel", "info", config.getLogLevel()); }
@Test public void testDefaultsLegacy() { final Properties props = new Properties(); final ObjectMapper objectMapper = new ObjectMapper(); final LoggingEmitterConfig config = objectMapper.convertValue( Emitters.makeLoggingMap(props), LoggingEmitterConfig.class ); Assert.assertEquals("getLoggerClass", LoggingEmitter.class.getName(), config.getLoggerClass()); Assert.assertEquals("getLogLevel", "debug", config.getLogLevel()); }
@Test public void testSettingEverything() { final Properties props = new Properties(); props.setProperty("org.apache.druid.java.util.emitter.loggerClass", "Foo"); props.setProperty("org.apache.druid.java.util.emitter.logLevel", "INFO"); final ObjectMapper objectMapper = new ObjectMapper(); final LoggingEmitterConfig config = objectMapper.convertValue( Emitters.makeCustomFactoryMap(props), LoggingEmitterConfig.class ); Assert.assertEquals("getLoggerClass", "Foo", config.getLoggerClass()); Assert.assertEquals("getLogLevel", "INFO", config.getLogLevel()); }
@Test public void testDimSelectorHavingClauseSerde() { HavingSpec dimHavingSpec = new DimensionSelectorHavingSpec("dim", "v", null); Map<String, Object> dimSelectMap = ImmutableMap.of( "type", "dimSelector", "dimension", "dim", "value", "v" ); ObjectMapper mapper = new DefaultObjectMapper(); assertEquals(dimHavingSpec, mapper.convertValue(dimSelectMap, DimensionSelectorHavingSpec.class)); }
@Test public void testSerde() { final GroupByQueryConfig config = MAPPER.convertValue(CONFIG_MAP, GroupByQueryConfig.class); Assert.assertEquals(true, config.isSingleThreaded()); Assert.assertEquals("v2", config.getDefaultStrategy()); Assert.assertEquals(1, config.getBufferGrouperInitialBuckets()); Assert.assertEquals(2, config.getMaxIntermediateRows()); Assert.assertEquals(3, config.getMaxResults()); Assert.assertEquals(4, config.getMaxOnDiskStorage()); Assert.assertEquals(5, config.getMaxMergingDictionarySize()); Assert.assertEquals(6.0, config.getBufferGrouperMaxLoadFactor(), 0.0); }
private static class TestFirehoseFactory implements FirehoseFactory<InputRowParser> { public TestFirehoseFactory() { } @Override @SuppressWarnings("unchecked") public Firehose connect(InputRowParser parser, File temporaryDirectory) throws ParseException { return new TestFirehose(parser); } }