public BigQueryServicesWrapper(BigQueryOptions bqOptions) { this.bqServices = new BigQueryServicesImpl(); this.bqOptions = bqOptions; }
public CreateTables( CreateDisposition createDisposition, DynamicDestinations<?, DestinationT> dynamicDestinations) { this(createDisposition, new BigQueryServicesImpl(), dynamicDestinations); }
BatchLoads( WriteDisposition writeDisposition, CreateDisposition createDisposition, boolean singletonTable, DynamicDestinations<?, DestinationT> dynamicDestinations, Coder<DestinationT> destinationCoder, ValueProvider<String> customGcsTempLocation, @Nullable ValueProvider<String> loadJobProjectId, boolean ignoreUnknownValues) { bigQueryServices = new BigQueryServicesImpl(); this.writeDisposition = writeDisposition; this.createDisposition = createDisposition; this.singletonTable = singletonTable; this.dynamicDestinations = dynamicDestinations; this.destinationCoder = destinationCoder; this.maxNumWritersPerBundle = DEFAULT_MAX_NUM_WRITERS_PER_BUNDLE; this.maxFileSize = DEFAULT_MAX_FILE_SIZE; this.numFileShards = DEFAULT_NUM_FILE_SHARDS; this.triggeringFrequency = null; this.customGcsTempLocation = customGcsTempLocation; this.loadJobProjectId = loadJobProjectId; this.ignoreUnknownValues = ignoreUnknownValues; }
private StreamingWriteTables( BigQueryServices bigQueryServices, InsertRetryPolicy retryPolicy, boolean extendedErrorInfo, boolean skipInvalidRows, boolean ignoreUnknownValues) { this.bigQueryServices = bigQueryServices; this.retryPolicy = retryPolicy; this.extendedErrorInfo = extendedErrorInfo; this.skipInvalidRows = skipInvalidRows; this.ignoreUnknownValues = ignoreUnknownValues; }
/** Constructor. */ public StreamingInserts( CreateDisposition createDisposition, DynamicDestinations<?, DestinationT> dynamicDestinations) { this( createDisposition, dynamicDestinations, new BigQueryServicesImpl(), InsertRetryPolicy.alwaysRetry(), false, false, false); }
private void initializeBigQuery(Description description) throws IOException, InterruptedException { this.datasetService = new BigQueryServicesImpl().getDatasetService(pipelineOptions); this.table = createTable(description); }
/** * Reads from a BigQuery table or query and returns a {@link PCollection} with one element per * each row of the table or query result, parsed from the BigQuery AVRO format using the specified * function. * * <p>Each {@link SchemaAndRecord} contains a BigQuery {@link TableSchema} and a {@link * GenericRecord} representing the row, indexed by column name. Here is a sample parse function * that parses click events from a table. * * <pre>{@code * class ClickEvent { long userId; String url; ... } * * p.apply(BigQueryIO.read(new SerializableFunction<SchemaAndRecord, ClickEvent>() { * public ClickEvent apply(SchemaAndRecord record) { * GenericRecord r = record.getRecord(); * return new ClickEvent((Long) r.get("userId"), (String) r.get("url")); * } * }).from("..."); * }</pre> */ public static <T> TypedRead<T> read(SerializableFunction<SchemaAndRecord, T> parseFn) { return new AutoValue_BigQueryIO_TypedRead.Builder<T>() .setValidate(true) .setWithTemplateCompatibility(false) .setBigQueryServices(new BigQueryServicesImpl()) .setParseFn(parseFn) .build(); }
return new AutoValue_BigQueryIO_Write.Builder<T>() .setValidate(true) .setBigQueryServices(new BigQueryServicesImpl()) .setCreateDisposition(Write.CreateDisposition.CREATE_IF_NEEDED) .setWriteDisposition(Write.WriteDisposition.WRITE_EMPTY)