public SpannerConfig withDatabaseId(String databaseId) { return withDatabaseId(ValueProvider.StaticValueProvider.of(databaseId)); }
/** Specifies the Cloud Spanner database. */ public ReadAll withDatabaseId(ValueProvider<String> databaseId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withDatabaseId(databaseId)); }
/** Specifies the Cloud Spanner database. */ public Read withDatabaseId(ValueProvider<String> databaseId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withDatabaseId(databaseId)); }
/** Specifies the Cloud Spanner database. */ public CreateTransaction withDatabaseId(ValueProvider<String> databaseId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withDatabaseId(databaseId)); }
/** Specifies the Cloud Spanner database. */ public Write withDatabaseId(ValueProvider<String> databaseId) { SpannerConfig config = getSpannerConfig(); return withSpannerConfig(config.withDatabaseId(databaseId)); }
public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline p = Pipeline.create(options); SpannerConfig spannerConfig = SpannerConfig.create() .withInstanceId(options.getInstanceId()) .withDatabaseId(options.getDatabaseId()); // [START spanner_dataflow_readall] PCollection<Struct> allRecords = p.apply(SpannerIO.read() .withSpannerConfig(spannerConfig) .withQuery("SELECT t.table_name FROM information_schema.tables AS t WHERE t" + ".table_catalog = '' AND t.table_schema = ''")).apply( MapElements.into(TypeDescriptor.of(ReadOperation.class)) .via((SerializableFunction<Struct, ReadOperation>) input -> { String tableName = input.getString(0); return ReadOperation.create().withQuery("SELECT * FROM " + tableName); })).apply(SpannerIO.readAll().withSpannerConfig(spannerConfig)); // [END spanner_dataflow_readall] PCollection<Long> dbEstimatedSize = allRecords.apply(EstimateSize.create()) .apply(Sum.longsGlobally()); dbEstimatedSize.apply(ToString.elements()).apply(TextIO.write().to(options.getOutput()) .withoutSharding()); p.run().waitUntilFinish(); }
.withDatabaseId(databaseId); PCollectionView<Transaction> tx = p.apply( SpannerIO.createTransaction()
private SpannerConfig createSpannerConfig() { return SpannerConfig.create() .withProjectId(project) .withInstanceId(options.getInstanceId()) .withDatabaseId(databaseName); }
.withProjectId("test") .withInstanceId("123") .withDatabaseId("aaa") .withServiceFactory(serviceFactory);
.withProjectId("test-project") .withInstanceId("test-instance") .withDatabaseId("test-database") .withServiceFactory(serviceFactory);
.withProjectId("test") .withInstanceId("123") .withDatabaseId("aaa") .withServiceFactory(serviceFactory);
public static void main(String[] args) { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline p = Pipeline.create(options); SpannerConfig spannerConfig = SpannerConfig.create() .withHost(options.getSpannerHost()) .withInstanceId(options.getInstanceId()) .withDatabaseId(options.getDatabaseId()); p.apply(new ImportTransform(spannerConfig, options.getInputDir(), options.getWaitForIndexes())); PipelineResult result = p.run(); if (options.getWaitUntilFinish() && /* Only if template location is null, there is a dataflow job to wait for. Else it's * template generation which doesn't start a dataflow job. */ options.as(DataflowPipelineOptions.class).getTemplateLocation() == null) { result.waitUntilFinish(); } } }
.withHost(options.getSpannerHost()) .withInstanceId(options.getInstanceId()) .withDatabaseId(options.getDatabaseId()); p.begin() .apply(
.withProjectId(options.getSpannerProjectId()) .withInstanceId(options.getSpannerInstanceId()) .withDatabaseId(options.getSpannerDatabaseId());