/** * A dummy plugin class for {@link ServiceArtifactTestRun}. */ @Plugin public class DummyPlugin extends DummyPluginBase { }
String configField = getProperties(TypeToken.of(cls), pluginProperties); Set<String> pluginEndpoints = getPluginEndpoints(cls); PluginClass pluginClass = new PluginClass(pluginAnnotation.type(), getPluginName(cls), getPluginDescription(cls), cls.getName(), configField, pluginProperties, pluginEndpoints,
String configField = getProperties(TypeToken.of(cls), pluginProperties); Set<String> pluginEndpoints = getPluginEndpoints(cls); PluginClass pluginClass = new PluginClass(pluginAnnotation.type(), getPluginName(cls), getPluginDescription(cls), cls.getName(), configField, pluginProperties, pluginEndpoints,
/** * No-op plugin, for CLI testing */ @Plugin(type = "runnable") @Name("FakePlugin") public class FakePlugin implements Runnable { public static final String NAME = "FakePlugin"; @Override public void run() { // do nothing } }
/** * Transform used to test error emission. Writes all its input as errors. */ @Plugin(type = Transform.PLUGIN_TYPE) @Name("AllError") public class AllErrorTransform extends Transform<StructuredRecord, StructuredRecord> { public static final PluginClass PLUGIN_CLASS = getPluginClass(); @Override public void configurePipeline(PipelineConfigurer pipelineConfigurer) throws IllegalArgumentException { StageConfigurer stageConfigurer = pipelineConfigurer.getStageConfigurer(); stageConfigurer.setOutputSchema(stageConfigurer.getInputSchema()); } @Override public void transform(StructuredRecord input, Emitter<StructuredRecord> emitter) throws Exception { emitter.emitError(new InvalidEntry<>(500, "msg", input)); } public static ETLPlugin getPlugin() { return new ETLPlugin("AllError", Transform.PLUGIN_TYPE, new HashMap<String, String>(), null); } private static PluginClass getPluginClass() { Map<String, PluginPropertyField> properties = new HashMap<>(); return new PluginClass(Transform.PLUGIN_TYPE, "AllError", "", AllErrorTransform.class.getName(), null, properties); } }
/** * Transform that doubles every record it receives. */ @Plugin(type = Transform.PLUGIN_TYPE) @Name("Double") public class DoubleTransform extends Transform<StructuredRecord, StructuredRecord> { public static final PluginClass PLUGIN_CLASS = getPluginClass(); @Override public void configurePipeline(PipelineConfigurer pipelineConfigurer) throws IllegalArgumentException { StageConfigurer stageConfigurer = pipelineConfigurer.getStageConfigurer(); stageConfigurer.setOutputSchema(stageConfigurer.getInputSchema()); } @Override public void transform(StructuredRecord input, Emitter<StructuredRecord> emitter) throws Exception { emitter.emit(input); emitter.emit(input); } public static ETLPlugin getPlugin() { return new ETLPlugin("Double", Transform.PLUGIN_TYPE, new HashMap<String, String>(), null); } private static PluginClass getPluginClass() { Map<String, PluginPropertyField> properties = new HashMap<>(); return new PluginClass(Transform.PLUGIN_TYPE, "Double", "", DoubleTransform.class.getName(), null, properties); } }
@Plugin(type = PostAction.PLUGIN_TYPE) @Name("TokenWriter") public class NodeStatesAction extends PostAction {
@Plugin(type = ErrorTransform.PLUGIN_TYPE) @Name("Filter") public class FilterErrorTransform extends ErrorTransform<StructuredRecord, StructuredRecord> {
@Plugin(type = BatchAggregator.PLUGIN_TYPE) @Name("Identity") public class IdentityAggregator extends BatchAggregator<StructuredRecord, StructuredRecord, StructuredRecord> {
@Plugin(type = Transform.PLUGIN_TYPE) @Name("Identity") public class IdentityTransform extends Transform<StructuredRecord, StructuredRecord> {
@Plugin(type = Transform.PLUGIN_TYPE) @Name(ExceptionTransform.NAME) public class ExceptionTransform extends Transform<StructuredRecord, StructuredRecord> {
@Plugin(type = Transform.PLUGIN_TYPE) @Name(NullAlertTransform.NAME) public class NullAlertTransform extends Transform<StructuredRecord, StructuredRecord> {
@Plugin(type = Transform.PLUGIN_TYPE) @Name("IntValueFilter") public class IntValueFilterTransform extends Transform<StructuredRecord, StructuredRecord> {
@Plugin(type = ErrorTransform.PLUGIN_TYPE) @Name("Flatten") public class FlattenErrorTransform extends ErrorTransform<StructuredRecord, StructuredRecord> {
@Plugin(type = SplitterTransform.PLUGIN_TYPE) @Name("NullFieldSplitter") public class NullFieldSplitterTransform extends SplitterTransform<StructuredRecord, StructuredRecord> {
/** * Source which has a requirement */ @Plugin(type = BatchSource.PLUGIN_TYPE) @Name(IncapableSource.NAME) @Requirements(datasetTypes = {Table.TYPE}) public class IncapableSource extends BatchSource<byte[], Row, StructuredRecord> { public static final String NAME = "IncapableSource"; @Override public void prepareRun(BatchSourceContext context) throws Exception { } /** * @return {@link IncapableSource} as the ETLPlugin */ public static ETLPlugin getPlugin() { return new ETLPlugin(IncapableSource.NAME, BatchSource.PLUGIN_TYPE, Collections.emptyMap(), null); } }
/** * Sink which has an requirement which can be meet with another special requirement */ @Plugin(type = BatchSink.PLUGIN_TYPE) @Name(IncapableSink.NAME) @Requirements(datasetTypes = {Table.TYPE, KeyValueTable.TYPE}) public class IncapableSink extends BatchSink<StructuredRecord, byte[], Put> { public static final String NAME = "IncapableSink"; @Override public void prepareRun(BatchSinkContext context) throws Exception { } /** * @return {@link IncapableSink} as the ETLPlugin */ public static ETLPlugin getPlugin() { return new ETLPlugin(IncapableSink.NAME, BatchSink.PLUGIN_TYPE, Collections.emptyMap(), null); } }
@Plugin(type = Action.PLUGIN_TYPE) @Name("HDFSDelete") @Description("Action to delete files on HDFS. (Deprecated. Use File Delete instead.)")
@Plugin(type = Action.PLUGIN_TYPE) @Name("HDFSMove") @Description("Action to move files within HDFS. (Deprecated. Use File Move instead.)")
/** * */ @Plugin(type = "udt") @Name("len") public class StringLengthUDT implements UDT { @Override public int apply(String str) { return str.length(); } }