@Override public String create(PipelineOptions options) { return options.getJobName().replace('-', '_'); } }
public static void configureJobName(PipelineOptions options, String dbName, String tableName) { try { options.as(ApplicationNameOptions.class).setAppName("JdbcAvroJob"); } catch (Exception e) { LOGGER.warn("Unable to configure ApplicationName", e); } if (options.getJobName() == null || "auto".equals(options.getJobName())) { String randomPart = Integer.toHexString(ThreadLocalRandom.current().nextInt()); options.setJobName( String.join("-", "dbeam", normalizeString(dbName), normalizeString(tableName), randomPart)); } } }
@ProcessElement public void process(ProcessContext c) { c.output( String.format( "beam_load_%s_%s", c.getPipelineOptions().getJobName().replaceAll("-", ""), BigQueryHelpers.randomUUIDString())); } }))
@Override public String create(PipelineOptions options) { return "projects/" + options.as(GcpOptions.class).getProject() + "/topics/" + options.getJobName(); } }
@Override public String create(PipelineOptions options) { return "projects/" + options.as(GcpOptions.class).getProject() + "/subscriptions/" + options.getJobName(); } }
public FeastMetricsPubSubSink(PipelineOptions pipelineOptions) { this.pubsubHelper = new FeastPubsubHelper(pipelineOptions); this.jobName = pipelineOptions.getJobName(); FeastMetricsPubSubSinkOptions sinkOptions = pipelineOptions.as(FeastMetricsPubSubSinkOptions.class); this.topicUrl = sinkOptions.getMetricsPubSubSinkTopicUrl(); String granularityString = Optional.ofNullable(sinkOptions.getMetricsPubSubSinkGranularity()).orElse("HOUR"); this.granularity = Granularity.Enum.valueOf(granularityString.toUpperCase()); Preconditions.checkNotNull(topicUrl, "FeastMetricsPubSubSink requires pubsub topic url"); }
GlobalWatermarkHolder.clear(); LOG.info("About to run test pipeline " + options.getJobName());
"Deferring combine transformation {} for job {}", transform, ctxt.getOptions().getJobName()); return true;
protected ExtractResult extractFiles(PipelineOptions options) throws Exception { BigQueryOptions bqOptions = options.as(BigQueryOptions.class); TableReference tableToExtract = getTableToExtract(bqOptions); BigQueryServices.DatasetService datasetService = bqServices.getDatasetService(bqOptions); Table table = datasetService.getTable(tableToExtract); if (table == null) { throw new IOException( String.format( "Cannot start an export job since table %s does not exist", BigQueryHelpers.toTableSpec(tableToExtract))); } TableSchema schema = table.getSchema(); JobService jobService = bqServices.getJobService(bqOptions); String extractJobId = getExtractJobId(createJobIdToken(options.getJobName(), stepUuid)); final String extractDestinationDir = resolveTempLocation(bqOptions.getTempLocation(), "BigQueryExtractTemp", stepUuid); String bqLocation = BigQueryHelpers.getDatasetLocation( datasetService, tableToExtract.getProjectId(), tableToExtract.getDatasetId()); List<ResourceId> tempFiles = executeExtract( extractJobId, tableToExtract, jobService, bqOptions.getProject(), extractDestinationDir, bqLocation); return new ExtractResult(schema, tempFiles); }
.setJobName(options.getJobName()) .setPipeline(PipelineTranslation.toProto(pipeline)) .setPipelineOptions(PipelineOptionsTranslation.toProto(options))
createTempTableReference("project-id", createJobIdToken(options.getJobName(), stepUuid)); List<TableRow> expected = ImmutableList.of(