@Override public boolean delete() { throw new UOE("Cannot delete S3 items anonymously. jetS3t doesn't support authenticated deletes easily."); } };
@Override public Writer openWriter() { throw new UOE("HDFS Writer not supported"); }
@Override public OutputStream openOutputStream() { throw new UOE("Cannot stream S3 output"); }
@Override public CharSequence getCharContent(boolean ignoreEncodingErrors) { throw new UOE("Cannot open character sequence"); }
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("LongFirstAggregatorFactory is not supported during ingestion for rollup"); }
@Override public Reader openReader(boolean ignoreEncodingErrors) { throw new UOE("HDFS Reader not supported"); }
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("DoubleLastAggregatorFactory is not supported during ingestion for rollup"); }
@Override public CharSequence getCharContent(boolean ignoreEncodingErrors) { throw new UOE("HDFS CharSequence not supported"); }
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("FloatLastAggregatorFactory is not supported during ingestion for rollup"); }
@Override public void setFieldNames(Iterable<String> fieldNames) { throw new UOE("No field names available"); }
@Override public List<String> getFieldNames() { throw new UOE("No field names available"); } };
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("FloatFirstAggregatorFactory is not supported during ingestion for rollup"); }
@Override public Void getEnvConfig() { throw new UOE("No config for Noop!"); }
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("DoubleFirstAggregatorFactory is not supported during ingestion for rollup"); }
@Override public Reader openReader(boolean ignoreEncodingErrors) { throw new UOE("Cannot open reader"); }
@Override public Writer openWriter() { throw new UOE("Cannot open writer"); }
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("LongLastAggregatorFactory is not supported during ingestion for rollup"); }
@Override public CharSequence getCharContent(boolean ignoreEncodingErrors) { throw new UOE("CharSequence not supported"); }
/** * @return A map containing statistics for a Jobby, optionally null if the Jobby is unable to provide stats. */ @Nullable default Map<String, Object> getStats() { throw new UOE("This Jobby does not implement getJobStats(), Jobby class: [%s]", getClass()); }
/** * Creates an AggregateCombiner to fold rollup aggregation results from serveral "rows" of different indexes during * index merging. AggregateCombiner implements the same logic as {@link #combine}, with the difference that it uses * {@link org.apache.druid.segment.ColumnValueSelector} and it's subinterfaces to get inputs and implements {@code * ColumnValueSelector} to provide output. * * @see AggregateCombiner * @see org.apache.druid.segment.IndexMerger */ public AggregateCombiner makeAggregateCombiner() { throw new UOE("[%s] does not implement makeAggregateCombiner()", this.getClass().getName()); }