/** * Create a new PositionOutputStreamAdapter. * * @param out The Flink stream written to. */ PositionOutputStreamAdapter(FSDataOutputStream out) { this.out = checkNotNull(out, "out"); }
public static NanoTime fromBinary(Binary bytes) { Preconditions.checkArgument(bytes.length() == 12, "Must be 12 bytes"); ByteBuffer buf = bytes.toByteBuffer(); buf.order(ByteOrder.LITTLE_ENDIAN); long timeOfDayNanos = buf.getLong(); int julianDay = buf.getInt(); return new NanoTime(julianDay, timeOfDayNanos); }
private static long getCompactorTxnId(Configuration jobConf) { String snapshot = jobConf.get(ValidTxnList.VALID_TXNS_KEY); if(Strings.isNullOrEmpty(snapshot)) { throw new IllegalStateException(ValidTxnList.VALID_TXNS_KEY + " not found for writing to " + jobConf.get(FINAL_LOCATION)); } ValidTxnList validTxnList = new ValidReadTxnList(); validTxnList.readFromString(snapshot); //this is id of the current (compactor) txn return validTxnList.getHighWatermark(); } private void getWriter(Reporter reporter, ObjectInspector inspector,
/** * Initializes the internal state for decoding ints of `bitWidth`. */ private void init(int bitWidth) { Preconditions.checkArgument(bitWidth >= 0 && bitWidth <= 32, "bitWidth must be >= 0 and <= 32"); this.bitWidth = bitWidth; this.bytesWidth = BytesUtils.paddedByteCountFromBits(bitWidth); this.packer = Packer.LITTLE_ENDIAN.newBytePacker(bitWidth); }
@Override public void add(Object value) { Preconditions.checkArgument( AvroUnionConverter.this.memberValue == null, "Union is resolving to more than one type"); memberValue = value; } });
public Builder<T> withConf(Configuration conf) { this.conf = checkNotNull(conf, "conf"); // previous versions didn't use the builder, so may set filter before conf. this maintains // compatibility for filter. other options are reset by a new conf. this.optionsBuilder = HadoopReadOptions.builder(conf); if (filter != null) { optionsBuilder.withRecordFilter(filter); } return this; }
public ParquetReadOptions toReadOptions() { return ParquetReadOptions.builder() .useSignedStringMinMax(enableStringsSignedMinMax) .build(); }
@Override public ParquetReadOptions build() { return new HadoopReadOptions( useSignedStringMinMax, useStatsFilter, useDictionaryFilter, useRecordFilter, recordFilter, metadataFilter, codecFactory, allocator, maxAllocationSize, properties, conf); } }
/** * Initializes the internal state for decoding ints of `bitWidth`. */ private void init(int bitWidth) { Preconditions.checkArgument(bitWidth >= 0 && bitWidth <= 32, "bitWidth must be >= 0 and <= 32"); this.bitWidth = bitWidth; this.bytesWidth = BytesUtils.paddedByteCountFromBits(bitWidth); this.packer = Packer.LITTLE_ENDIAN.newBytePacker(bitWidth); }
@Override public void add(Object value) { Preconditions.checkArgument(memberValue==null, "Union is resolving to more than one type"); memberValue = value; } });
/** * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro. * @param filter for filtering individual records */ public InternalParquetRecordReader(ReadSupport<T> readSupport, Filter filter) { this.readSupport = readSupport; this.filter = checkNotNull(filter, "filter"); }
public Builder<T> withConf(Configuration conf) { this.conf = checkNotNull(conf, "conf"); // previous versions didn't use the builder, so may set filter before conf. this maintains // compatibility for filter. other options are reset by a new conf. this.optionsBuilder = HadoopReadOptions.builder(conf); if (filter != null) { optionsBuilder.withRecordFilter(filter); } return this; }
while (!initialized && currentLocationIndex < locations.length) { String address = locations[currentLocationIndex++]; if (Strings.isNullOrEmpty(address)) { throw new IOException("can not fetch results from empty or null host value");
/** * Initializes the internal state for decoding ints of `bitWidth`. */ private void init(int bitWidth) { Preconditions.checkArgument(bitWidth >= 0 && bitWidth <= 32, "bitWidth must be >= 0 and <= 32"); this.bitWidth = bitWidth; this.bytesWidth = BytesUtils.paddedByteCountFromBits(bitWidth); this.packer = Packer.LITTLE_ENDIAN.newBytePacker(bitWidth); }
public SemanticVersion(int major, int minor, int patch, boolean hasUnknown) { Preconditions.checkArgument(major >= 0, "major must be >= 0"); Preconditions.checkArgument(minor >= 0, "minor must be >= 0"); Preconditions.checkArgument(patch >= 0, "patch must be >= 0"); this.major = major; this.minor = minor; this.patch = patch; this.prerelease = hasUnknown; this.unknown = null; this.pre = null; this.buildInfo = null; }
public Builder withValuesWriterFactory(ValuesWriterFactory factory) { Preconditions.checkNotNull(factory, "ValuesWriterFactory"); this.valuesWriterFactory = factory; return this; }
+ "CMVAS or CTAS statement"); segmentGranularity = Strings.isNullOrEmpty(segmentGranularity) ? HiveConf .getVar(parseCtx.getConf(), HiveConf.ConfVars.HIVE_DRUID_INDEXING_GRANULARITY
protected ReadSupport<T> getReadSupport() { // if readSupport is null, the protected constructor must have been used Preconditions.checkArgument(readSupport != null, "[BUG] Classes that extend Builder should override getReadSupport()"); return readSupport; }
protected Column(ColumnPath columnPath, Class<T> columnType) { checkNotNull(columnPath, "columnPath"); checkNotNull(columnType, "columnType"); this.columnPath = columnPath; this.columnType = columnType; }
/** * Set the Parquet format page size. * * @param pageSize an integer size in bytes * @return this builder for method chaining. */ public Builder withPageSize(int pageSize) { Preconditions.checkArgument(pageSize > 0, "Invalid page size (negative): %s", pageSize); this.pageSize = pageSize; return this; }