@Override public List<InputRow> parseBatch(final T row) { return parser.parseBatch(row).stream().map(transformer::transform).collect(Collectors.toList()); }
@Nullable @Override public InputRow nextRow() { Map<String, Object> mapToParse = lineIterator.next(); return (InputRow) Iterators.getOnlyElement(parser.parseBatch(mapToParse).iterator()); }
public static InputRow createRow(final ImmutableMap<String, ?> map) { return PARSER.parseBatch((Map<String, Object>) map).get(0); }
public static InputRow createRow(final ImmutableMap<String, ?> map, InputRowParser<Map<String, Object>> parser) { return parser.parseBatch((Map<String, Object>) map).get(0); }
@Nullable @Override public InputRow nextRow() { if (nextIterator.hasNext()) { return nextIterator.next(); } for (Map.Entry<MessageQueue, ConcurrentSkipListSet<MessageExt>> entry : messageQueueTreeSetMap.entrySet()) { if (!entry.getValue().isEmpty()) { MessageExt message = entry.getValue().pollFirst(); nextIterator = theParser.parseBatch(ByteBuffer.wrap(message.getBody())).iterator(); windows .computeIfAbsent(entry.getKey(), k -> new ConcurrentSkipListSet<>()) .add(message.getQueueOffset()); return nextIterator.next(); } } // should never happen. throw new RuntimeException("Unexpected Fatal Error! There should have been one row available."); }
@Nullable @Override public InputRow nextRow() { try { if (!nextIterator.hasNext()) { final byte[] message = iter.next().message(); if (message == null) { return null; } nextIterator = theParser.parseBatch(ByteBuffer.wrap(message)).iterator(); } return nextIterator.next(); } catch (InvalidMessageException e) { /* IF the CRC is caused within the wire transfer, this is not the best way to handel CRC. Probably it is better to shutdown the fireHose without commit and start it again. */ log.error(e, "Message failed its checksum and it is corrupt, will skip it"); return null; } }
nextIterator = firehoseParser.parseBatch(nextMsg).iterator();
private static List<InputRow> parseInputRow(Object value, InputRowParser parser) { if (parser instanceof StringInputRowParser && value instanceof Text) { //Note: This is to ensure backward compatibility with 0.7.0 and before //HadoopyStringInputRowParser can handle this and this special case is not needed //except for backward compatibility return Utils.nullableListOf(((StringInputRowParser) parser).parse(value.toString())); } else if (value instanceof InputRow) { return ImmutableList.of((InputRow) value); } else if (value == null) { // Pass through nulls so they get thrown away. return Utils.nullableListOf((InputRow) null); } else { return parser.parseBatch(value); } }
public static InputRow createRow(final Object t, final String dim1, final String dim2, final double m1) { return PARSER.parseBatch( ImmutableMap.of( "t", new DateTime(t, ISOChronology.getInstanceUTC()).getMillis(), "dim1", dim1, "dim2", dim2, "m1", m1 ) ).get(0); }
@Override public boolean hasMore() { nextRow = null; try { if (nextIterator.hasNext()) { nextRow = nextIterator.next(); return true; } // Wait for the next delivery. This will block until something is available. final Delivery delivery = consumer.nextDelivery(); if (delivery != null) { lastDeliveryTag = delivery.getEnvelope().getDeliveryTag(); nextIterator = firehoseParser.parseBatch(ByteBuffer.wrap(delivery.getBody())).iterator(); if (nextIterator.hasNext()) { nextRow = nextIterator.next(); // If delivery is non-null, we report that there is something more to process. return true; } } } catch (InterruptedException e) { // A little unclear on how we should handle this. // At any rate, we're in an unknown state now so let's log something and return false. log.wtf(e, "Got interrupted while waiting for next delivery. Doubt this should ever happen."); } // This means that delivery is null or we caught the exception above so we report that we have // nothing more to process. return false; }
@Override public InputRow nextRow() { synchronized (this) { final InputRow row = parser.parseBatch(queue.removeFirst().orElse(null)).get(0); if (row != null && row.getRaw(FAIL_DIM) != null) { throw new ParseException(FAIL_DIM); } return row; } }
for (final Map<String, Object> event : events) { rows.addAll(parser.parseBatch(event));
@Override public InputRow nextRow() { synchronized (this) { final InputRow row = parser.parseBatch(queue.removeFirst().orElse(null)).get(0); if (row != null && row.getRaw(FAIL_DIM) != null) { throw new ParseException(FAIL_DIM); } return row; } }
index.add(ROW_PARSER.parseBatch((Map<String, Object>) o).get(0));
@Test public void testTransformTimeFromOtherFields() { final TransformSpec transformSpec = new TransformSpec( null, ImmutableList.of( new ExpressionTransform("__time", "(a + b) * 3600000", TestExprMacroTable.INSTANCE) ) ); final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER); final InputRow row = parser.parseBatch(ROW1).get(0); Assert.assertNotNull(row); Assert.assertEquals(DateTimes.of("1970-01-01T05:00:00Z"), row.getTimestamp()); Assert.assertEquals(DateTimes.of("1970-01-01T05:00:00Z").getMillis(), row.getTimestampFromEpoch()); }
@Test public void testTransformTimeFromTime() { final TransformSpec transformSpec = new TransformSpec( null, ImmutableList.of( new ExpressionTransform("__time", "__time + 3600000", TestExprMacroTable.INSTANCE) ) ); final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER); final InputRow row = parser.parseBatch(ROW1).get(0); Assert.assertNotNull(row); Assert.assertEquals(DateTimes.of("2000-01-01T01:00:00Z"), row.getTimestamp()); Assert.assertEquals(DateTimes.of("2000-01-01T01:00:00Z").getMillis(), row.getTimestampFromEpoch()); }
@Test public void testFilterOnTransforms() { // Filters are allowed to refer to transformed fields; double-check this. final TransformSpec transformSpec = new TransformSpec( new AndDimFilter( ImmutableList.of( new SelectorDimFilter("x", "foo", null), new SelectorDimFilter("f", "foobar", null), new SelectorDimFilter("g", "5.0", null) ) ), ImmutableList.of( new ExpressionTransform("f", "concat(x,y)", TestExprMacroTable.INSTANCE), new ExpressionTransform("g", "a + b", TestExprMacroTable.INSTANCE) ) ); final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER); Assert.assertNotNull(parser.parseBatch(ROW1).get(0)); Assert.assertNull(parser.parseBatch(ROW2).get(0)); }
@Test public void testTransformOverwriteField() { // Transforms are allowed to overwrite fields, and to refer to the fields they overwrite; double-check this. final TransformSpec transformSpec = new TransformSpec( null, ImmutableList.of( new ExpressionTransform("x", "concat(x,y)", TestExprMacroTable.INSTANCE) ) ); final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER); final InputRow row = parser.parseBatch(ROW1).get(0); Assert.assertNotNull(row); Assert.assertEquals(DateTimes.of("2000-01-01").getMillis(), row.getTimestampFromEpoch()); Assert.assertEquals(DateTimes.of("2000-01-01"), row.getTimestamp()); Assert.assertEquals(ImmutableList.of("f", "x", "y"), row.getDimensions()); Assert.assertEquals(ImmutableList.of("foobar"), row.getDimension("x")); Assert.assertEquals(3.0, row.getMetric("b").doubleValue(), 0); Assert.assertNull(row.getRaw("f")); }
@Test public void testTransforms() { final TransformSpec transformSpec = new TransformSpec( null, ImmutableList.of( new ExpressionTransform("f", "concat(x,y)", TestExprMacroTable.INSTANCE), new ExpressionTransform("g", "a + b", TestExprMacroTable.INSTANCE), new ExpressionTransform("h", "concat(f,g)", TestExprMacroTable.INSTANCE) ) ); final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER); final InputRow row = parser.parseBatch(ROW1).get(0); Assert.assertNotNull(row); Assert.assertEquals(DateTimes.of("2000-01-01").getMillis(), row.getTimestampFromEpoch()); Assert.assertEquals(DateTimes.of("2000-01-01"), row.getTimestamp()); Assert.assertEquals(ImmutableList.of("f", "x", "y"), row.getDimensions()); Assert.assertEquals(ImmutableList.of("foo"), row.getDimension("x")); Assert.assertEquals(3.0, row.getMetric("b").doubleValue(), 0); Assert.assertEquals("foobar", row.getRaw("f")); Assert.assertEquals(ImmutableList.of("foobar"), row.getDimension("f")); Assert.assertEquals(ImmutableList.of("5.0"), row.getDimension("g")); Assert.assertEquals(ImmutableList.of(), row.getDimension("h")); Assert.assertEquals(5L, row.getMetric("g").longValue()); }
index.add(ROW_PARSER.parseBatch(buildRow(i.longValue())).get(0));