public DataSchema withTransformSpec(TransformSpec transformSpec) { return new DataSchema(dataSource, parser, aggregators, granularitySpec, transformSpec, jsonMapper); }
public DataSchema withGranularitySpec(GranularitySpec granularitySpec) { return new DataSchema(dataSource, parser, aggregators, granularitySpec, transformSpec, jsonMapper); }
final InputRowParser parser = new NoopInputRowParser(new TimeAndDimsParseSpec(null, finalDimensionsSpec)); return new DataSchema( dataSource, jsonMapper.convertValue(parser, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT),
@JsonCreator public TestRealtimeTask( @JsonProperty("id") String id, @JsonProperty("resource") TaskResource taskResource, @JsonProperty("dataSource") String dataSource, @JsonProperty("taskStatus") TaskStatus status, @JacksonInject ObjectMapper mapper ) { super( id, taskResource, new FireDepartment( new DataSchema(dataSource, null, new AggregatorFactory[]{}, null, null, mapper), new RealtimeIOConfig( new LocalFirehoseFactory(new File("lol"), "rofl", null), (schema, config, metrics) -> null, null ), null ), null ); this.status = status; }
new DataSchema( "dataSource", getObjectMapper().convertValue(
new DataSchema( "dataSource", getObjectMapper().convertValue(
new TaskResource("rofl", 2), new FireDepartment( new DataSchema("foo", null, new AggregatorFactory[0], null, null, new DefaultObjectMapper()), new RealtimeIOConfig( new LocalFirehoseFactory(new File("lol"), "rofl", null),
new DataSchema( "test", jsonMapper.convertValue(
); DataSchema schema = new DataSchema( "", parser,
new DataSchema( DATA_SOURCE, objectMapper.convertValue(
private RealtimeIndexTask newRealtimeIndexTask() DataSchema dataSchema = new DataSchema( "test_ds", null,
null, new IndexIngestionSpec( new DataSchema( "foo", null,
); DataSchema schema = new DataSchema( "test", parser,
); DataSchema schema = new DataSchema( "test", parser,
); DataSchema schema = new DataSchema( "test", parser,
null, new HadoopIngestionSpec( new DataSchema( "foo", null, new AggregatorFactory[0], new UniformGranularitySpec( Granularities.DAY,
@Test public void testDefaultExclusions() { Map<String, Object> parser = jsonMapper.convertValue( new StringInputRowParser( new JSONParseSpec( new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dimB", "dimA")), null, null), null, null ), null ), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); DataSchema schema = new DataSchema( "test", parser, new AggregatorFactory[]{ new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), }, new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), null, jsonMapper ); Assert.assertEquals( ImmutableSet.of("time", "col1", "col2", "metric1", "metric2"), schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions() ); }
new TaskResource("rofl", 2), new IndexIngestionSpec( new DataSchema( "foo", null,
new DataSchema( "foo", jsonMapper.convertValue(
private static class TestFirehoseFactory implements FirehoseFactory<InputRowParser> { public TestFirehoseFactory() { } @Override @SuppressWarnings("unchecked") public Firehose connect(InputRowParser parser, File temporaryDirectory) throws ParseException { return new TestFirehose(parser); } }