/** * Create a new PositionOutputStreamAdapter. * * @param out The Flink stream written to. */ PositionOutputStreamAdapter(FSDataOutputStream out) { this.out = checkNotNull(out, "out"); }
protected Column(ColumnPath columnPath, Class<T> columnType) { checkNotNull(columnPath, "columnPath"); checkNotNull(columnType, "columnType"); this.columnPath = columnPath; this.columnType = columnType; }
public FilteringGroupConverter( GroupConverter delegate, List<Integer> indexFieldPath, Map<ColumnPath, List<ValueInspector>> valueInspectorsByColumn, Map<List<Integer>, PrimitiveColumnIO> columnIOsByIndexFieldPath) { this.delegate = checkNotNull(delegate, "delegate"); this.indexFieldPath = checkNotNull(indexFieldPath, "indexFieldPath"); this.columnIOsByIndexFieldPath = checkNotNull(columnIOsByIndexFieldPath, "columnIOsByIndexFieldPath"); this.valueInspectorsByColumn = checkNotNull(valueInspectorsByColumn, "valueInspectorsByColumn"); }
/** * @param readSupport Object which helps reads files of the given type, e.g. Thrift, Avro. * @param filter for filtering individual records */ public InternalParquetRecordReader(ReadSupport<T> readSupport, Filter filter) { this.readSupport = readSupport; this.filter = checkNotNull(filter, "filter"); }
public Builder withValuesWriterFactory(ValuesWriterFactory factory) { Preconditions.checkNotNull(factory, "ValuesWriterFactory"); this.valuesWriterFactory = factory; return this; }
/** * @param schema the schema of the data * @param extraMetaData application specific metadata to add in the file */ public WriteContext(MessageType schema, Map<String, String> extraMetaData) { super(); this.schema = checkNotNull(schema, "schema"); this.extraMetaData = Collections.unmodifiableMap(checkNotNull(extraMetaData, "extraMetaData")); } /**
/** * @param schema the schema for the file * @param keyValueMetaData the app specific metadata * @param createdBy the description of the library that created the file */ public FileMetaData(MessageType schema, Map<String, String> keyValueMetaData, String createdBy) { super(); this.schema = checkNotNull(schema, "schema"); this.keyValueMetaData = unmodifiableMap(checkNotNull(keyValueMetaData, "keyValueMetaData")); this.createdBy = createdBy; }
/** * @param extraMetaData application specific metadata to add in the file */ public FinalizedWriteContext(Map<String, String> extraMetaData) { super(); this.extraMetaData = Collections.unmodifiableMap(checkNotNull(extraMetaData, "extraMetaData")); }
public static <E extends Enum> Predicate equalTo(final E target) { Preconditions.checkNotNull(target,"target"); final String targetAsString = target.name(); return new Predicate() { @Override public boolean apply(ColumnReader input) { return targetAsString.equals(input.getBinary().toStringUsingUTF8()); } }; }
private Builder(Path file) { this.file = checkNotNull(file, "file"); this.conf = new Configuration(); this.filter = FilterCompat.NOOP; this.thriftClass = null; }
protected Builder(Path path) { this.readSupport = null; this.file = checkNotNull(path, "path"); this.conf = new Configuration(); this.filter = FilterCompat.NOOP; }
Type(String name, Repetition repetition, OriginalType originalType, DecimalMetadata decimalMetadata, ID id) { super(); this.name = checkNotNull(name, "name"); this.repetition = checkNotNull(repetition, "repetition"); this.logicalTypeAnnotation = originalType == null ? null : LogicalTypeAnnotation.fromOriginalType(originalType, decimalMetadata); this.id = id; }
public WildcardPath(String parentGlobPath, String wildcardPath, char delim) { this.parentGlobPath = Preconditions.checkNotNull(parentGlobPath, "parentGlobPath"); this.originalPattern = Preconditions.checkNotNull(wildcardPath, "wildcardPath"); this.pattern = Pattern.compile(buildRegex(wildcardPath, delim)); }
@Deprecated private Builder(ReadSupport<T> readSupport, Path path) { this.readSupport = checkNotNull(readSupport, "readSupport"); this.file = null; this.path = checkNotNull(path, "path"); this.conf = new Configuration(); this.optionsBuilder = HadoopReadOptions.builder(conf); }
@Deprecated private Builder(ReadSupport<T> readSupport, Path path) { this.readSupport = checkNotNull(readSupport, "readSupport"); this.file = null; this.path = checkNotNull(path, "path"); this.conf = new Configuration(); this.optionsBuilder = HadoopReadOptions.builder(conf); }
public static boolean canDrop(FilterPredicate pred, List<ColumnChunkMetaData> columns, DictionaryPageReadStore dictionaries) { checkNotNull(pred, "pred"); checkNotNull(columns, "columns"); return pred.accept(new DictionaryFilter(columns, dictionaries)); }
public static void validateBuffer(byte[] buffer, int off, int len) { Preconditions.checkNotNull(buffer, "buffer"); Preconditions.checkArgument(off >= 0 && len >= 0 && off <= buffer.length - len, "Invalid buffer offset or length: buffer.length=%s off=%s len=%s", buffer.length, off, len); } }
public DeprecatedFieldProjectionFilter(String filterDescStr) { Preconditions.checkNotNull(filterDescStr, "filterDescStr"); filterPatterns = new LinkedList<PathGlobPatternStatus>(); if (filterDescStr == null || filterDescStr.isEmpty()) return; String[] rawPatterns = filterDescStr.split(PATTERN_SEPARATOR); for (String rawPattern : rawPatterns) { filterPatterns.add(new PathGlobPatternStatus(rawPattern)); } }
protected Builder(InputFile file) { this.readSupport = null; this.file = checkNotNull(file, "file"); this.path = null; if (file instanceof HadoopInputFile) { this.conf = ((HadoopInputFile) file).getConfiguration(); } else { this.conf = new Configuration(); } optionsBuilder = HadoopReadOptions.builder(conf); }
private ParquetReader(Configuration conf, Path file, ReadSupport<T> readSupport, FilterCompat.Filter filter) throws IOException { this(Collections.singletonList((InputFile) HadoopInputFile.fromPath(file, conf)), HadoopReadOptions.builder(conf) .withRecordFilter(checkNotNull(filter, "filter")) .build(), readSupport); }