@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("DoubleLastAggregatorFactory is not supported during ingestion for rollup"); }
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("LongLastAggregatorFactory is not supported during ingestion for rollup"); }
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("LongFirstAggregatorFactory is not supported during ingestion for rollup"); }
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("FloatFirstAggregatorFactory is not supported during ingestion for rollup"); }
@Override public Reader openReader(boolean ignoreEncodingErrors) throws IOException { throw new UOE("Cannot open reader"); }
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("DoubleFirstAggregatorFactory is not supported during ingestion for rollup"); }
@Override public OutputStream openOutputStream() throws IOException { throw new UOE("Cannot stream S3 output"); }
@Override public void setFieldNames(Iterable<String> fieldNames) { throw new UOE("No field names available"); }
@Override public AggregateCombiner makeAggregateCombiner() { throw new UOE("FloatLastAggregatorFactory is not supported during ingestion for rollup"); }
@Override public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException { throw new UOE("CharSequence not supported"); }
@Override public Void getEnvConfig() { throw new UOE("No config for Noop!"); }
@Override public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException { throw new UOE("Cannot open character sequence"); }
@Override public Writer openWriter() throws IOException { throw new UOE("Cannot open writer"); }
@Override public boolean delete() { throw new UOE("Cannot delete S3 items anonymously. jetS3t doesn't support authenticated deletes easily."); } };
@Override public List<String> getFieldNames() { throw new UOE("No field names available"); } };
@Override @JsonIgnore public DataSegment getSegment() { throw new UOE( "Sub-less data segment not supported for hadoop converter task. Specify interval and datasource instead" ); }
/** * Creates an AggregateCombiner to fold rollup aggregation results from serveral "rows" of different indexes during * index merging. AggregateCombiner implements the same logic as {@link #combine}, with the difference that it uses * {@link io.druid.segment.ColumnValueSelector} and it's subinterfaces to get inputs and implements {@code * ColumnValueSelector} to provide output. * * @see AggregateCombiner * @see io.druid.segment.IndexMerger */ @SuppressWarnings("unused") // Going to be used when https://github.com/druid-io/druid/projects/2 is complete public AggregateCombiner makeAggregateCombiner() { throw new UOE("[%s] does not implement makeAggregateCombiner()", this.getClass().getName()); }
throw new UOE("Unknown type[%s]", type);
break; default: throw new UOE("Unsupported field type[%s]", fieldSpec.getType());