@Override public E map(E input) { if (input != null && entityClass.isAssignableFrom(input.getClass())) { return input; } else { throw new DatasetException( "Object does not match expected type " + entityClass + ": " + String.valueOf(input)); } } }
private static Schema getSchemaFromEntityClass(Class<?> entityClass) { try { return (Schema) entityClass.getDeclaredField("SCHEMA$").get(null); } catch (Throwable e) { LOG.error( "Error getting schema from entity of type: " + entityClass.getName(), e); throw new DatasetException(e); } }
private static Schema getSchemaFromEntityClass(Class<?> entityClass) { try { return (Schema) entityClass.getDeclaredField("SCHEMA$").get(null); } catch (Throwable e) { LOG.error( "Error getting schema from entity of type: " + entityClass.getName(), e); throw new DatasetException(e); } }
/** * Simple return the number used for the name of the file. */ private static int getFileNumber(FileStatus fileStatus) { try { String fileName = fileStatus.getPath().getName(); return Integer.parseInt(fileName.substring(0, fileName.indexOf('.'))); } catch (NumberFormatException e) { throw new DatasetException("Unexpected file in schema manager folder " + fileStatus.getPath(), e); } }
/** * Resolves the storage position of a field in keys with this KeySchema. * * @param fieldName * The source field name. * @return * The position of the field in keys with this schema. * @throws DatasetException * If the field cannot be recovered from keys with this schema. */ public int position(String fieldName) { if (fieldPositions.containsKey(fieldName)) { return fieldPositions.get(fieldName); } else { throw new DatasetException("Cannot recover " + fieldName + " from key"); } } }
public SpecificCompositeAvroDao(HTablePool tablePool, String tableName, List<EntityMapper<S>> entityMappers, Class<E> entityClass) { super(tablePool, tableName, entityMappers); this.entityClass = entityClass; try { entityConstructor = entityClass.getConstructor(); entitySchema = (Schema) entityClass.getDeclaredField("SCHEMA$").get( null); } catch (Throwable e) { LOG.error( "Error getting constructor or schema field for entity of type: " + entityClass.getName(), e); throw new DatasetException(e); } }
public SpecificCompositeAvroDao(HTablePool tablePool, String tableName, List<EntityMapper<S>> entityMappers, Class<E> entityClass) { super(tablePool, tableName, entityMappers); this.entityClass = entityClass; try { entityConstructor = entityClass.getConstructor(); entitySchema = (Schema) entityClass.getDeclaredField("SCHEMA$").get( null); } catch (Throwable e) { LOG.error( "Error getting constructor or schema field for entity of type: " + entityClass.getName(), e); throw new DatasetException(e); } }
private static String getHiveType(Class<?> type) { String typeName = PrimitiveObjectInspectorUtils.getTypeNameFromPrimitiveJava(type); if (typeName == null) { throw new DatasetException("Unsupported FieldPartitioner type: " + type); } return typeName; }
public static FieldPartitioner identity(String sourceName, String name, String className, int buckets) { Class<?> typeClass; try { typeClass = Class.forName(className); } catch (ClassNotFoundException e) { throw new DatasetException("Cannot find class: " + className, e); } return new IdentityFieldPartitioner(sourceName, name, typeClass, buckets); }
@Override @SuppressWarnings("unchecked") public Converter<?, ?, ?, ?> getConverter(PType<?> ptype) { if (ptype instanceof AvroType) { return new KeyConverter<E>((AvroType<E>) ptype); } throw new DatasetException( "Cannot create converter for non-Avro type: " + ptype); }
private static String getHiveType(Class<?> type) { String typeName = PrimitiveObjectInspectorUtils.getTypeNameFromPrimitiveJava(type); if (typeName == null) { throw new DatasetException("Unsupported FieldPartitioner type: " + type); } return typeName; }
public Iterator<Path> getDirectoryIterator(View view) { if (view instanceof FileSystemView) { return ((FileSystemView<?>) view).dirIterator(); } else if (view instanceof FileSystemDataset) { return ((FileSystemDataset<?>) view).dirIterator(); } else { throw new DatasetException( "Underlying Dataset must be a FileSystemDataset"); } }
@Override public long mapFromIncrementResult(Result result, String fieldName) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping == null) { throw new DatasetException("Unknown field in the schema: " + fieldName); } if (fieldMapping.getMappingType() != MappingType.COUNTER) { throw new DatasetException("Field is not a counter type: " + fieldName); } return (Long) entitySerDe.deserialize(fieldMapping, result); }
@Override public long mapFromIncrementResult(Result result, String fieldName) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping == null) { throw new DatasetException("Unknown field in the schema: " + fieldName); } if (fieldMapping.getMappingType() != MappingType.COUNTER) { throw new DatasetException("Field is not a counter type: " + fieldName); } return (Long) entitySerDe.deserialize(fieldMapping, result); }
public RegexEntityFilter(EntitySchema entitySchema, EntitySerDe<?> entitySerDe, String fieldName, String regex, boolean isEqual) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping.getMappingType() != MappingType.COLUMN) { throw new DatasetException( "SingleColumnValueFilter only compatible with COLUMN mapping types."); } this.filter = constructFilter(regex, isEqual, fieldMapping); }
public RegexEntityFilter(EntitySchema entitySchema, EntitySerDe<?> entitySerDe, String fieldName, String regex, boolean isEqual) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping.getMappingType() != MappingType.COLUMN) { throw new DatasetException( "SingleColumnValueFilter only compatible with COLUMN mapping types."); } this.filter = constructFilter(regex, isEqual, fieldMapping); }
private FileSystemPartitionIterator partitionIterator() { DatasetDescriptor descriptor = dataset.getDescriptor(); try { return new FileSystemPartitionIterator( fs, root, descriptor.getPartitionStrategy(), descriptor.getSchema(), getKeyPredicate()); } catch (IOException ex) { throw new DatasetException("Cannot list partitions in view:" + this, ex); } }
public Builder(Dataset dataset) { if (!dataset.getDescriptor().isPartitioned()) { throw new DatasetException("Dataset is not partitioned"); } this.strategy = dataset.getDescriptor().getPartitionStrategy(); this.values = Maps.newHashMap(); }
public SingleFieldEntityFilter(EntitySchema entitySchema, EntitySerDe<?> entitySerDe, String fieldName, Object filterValue, CompareFilter.CompareOp equalityOperator) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping.getMappingType() != MappingType.COLUMN) { throw new DatasetException( "SingleColumnValueFilter only compatible with COLUMN mapping types."); } byte[] family = fieldMapping.getFamily(); byte[] qualifier = fieldMapping.getQualifier(); byte[] comparisonBytes = entitySerDe.serializeColumnValueToBytes(fieldName, filterValue); this.filter = new SingleColumnValueFilter(family, qualifier, equalityOperator, comparisonBytes); }
public SingleFieldEntityFilter(EntitySchema entitySchema, EntitySerDe<?> entitySerDe, String fieldName, Object filterValue, CompareFilter.CompareOp equalityOperator) { FieldMapping fieldMapping = entitySchema.getColumnMappingDescriptor() .getFieldMapping(fieldName); if (fieldMapping.getMappingType() != MappingType.COLUMN) { throw new DatasetException( "SingleColumnValueFilter only compatible with COLUMN mapping types."); } byte[] family = fieldMapping.getFamily(); byte[] qualifier = fieldMapping.getQualifier(); byte[] comparisonBytes = entitySerDe.serializeColumnValueToBytes(fieldName, filterValue); this.filter = new SingleColumnValueFilter(family, qualifier, equalityOperator, comparisonBytes); }