@Override public E map(E input) { if (input != null && entityClass.isAssignableFrom(input.getClass())) { return input; } else { throw new DatasetException( "Object does not match expected type " + entityClass + ": " + String.valueOf(input)); } } }
private static Schema getSchemaFromEntityClass(Class<?> entityClass) { try { return (Schema) entityClass.getDeclaredField("SCHEMA$").get(null); } catch (Throwable e) { LOG.error( "Error getting schema from entity of type: " + entityClass.getName(), e); throw new DatasetException(e); } }
private static Schema getSchemaFromEntityClass(Class<?> entityClass) { try { return (Schema) entityClass.getDeclaredField("SCHEMA$").get(null); } catch (Throwable e) { LOG.error( "Error getting schema from entity of type: " + entityClass.getName(), e); throw new DatasetException(e); } }
/** * Simple return the number used for the name of the file. */ private static int getFileNumber(FileStatus fileStatus) { try { String fileName = fileStatus.getPath().getName(); return Integer.parseInt(fileName.substring(0, fileName.indexOf('.'))); } catch (NumberFormatException e) { throw new DatasetException("Unexpected file in schema manager folder " + fileStatus.getPath(), e); } }
/** * Resolves the storage position of a field in keys with this KeySchema. * * @param fieldName * The source field name. * @return * The position of the field in keys with this schema. * @throws DatasetException * If the field cannot be recovered from keys with this schema. */ public int position(String fieldName) { if (fieldPositions.containsKey(fieldName)) { return fieldPositions.get(fieldName); } else { throw new DatasetException("Cannot recover " + fieldName + " from key"); } } }
/** * Resolves the storage position of a field in keys with this KeySchema. * * @param fieldName * The source field name. * @return * The position of the field in keys with this schema. * @throws DatasetException * If the field cannot be recovered from keys with this schema. */ public int position(String fieldName) { if (fieldPositions.containsKey(fieldName)) { return fieldPositions.get(fieldName); } else { throw new DatasetException("Cannot recover " + fieldName + " from key"); } } }
public SpecificCompositeAvroDao(HTablePool tablePool, String tableName, List<EntityMapper<S>> entityMappers, Class<E> entityClass) { super(tablePool, tableName, entityMappers); this.entityClass = entityClass; try { entityConstructor = entityClass.getConstructor(); entitySchema = (Schema) entityClass.getDeclaredField("SCHEMA$").get( null); } catch (Throwable e) { LOG.error( "Error getting constructor or schema field for entity of type: " + entityClass.getName(), e); throw new DatasetException(e); } }
public SpecificCompositeAvroDao(HTablePool tablePool, String tableName, List<EntityMapper<S>> entityMappers, Class<E> entityClass) { super(tablePool, tableName, entityMappers); this.entityClass = entityClass; try { entityConstructor = entityClass.getConstructor(); entitySchema = (Schema) entityClass.getDeclaredField("SCHEMA$").get( null); } catch (Throwable e) { LOG.error( "Error getting constructor or schema field for entity of type: " + entityClass.getName(), e); throw new DatasetException(e); } }
private static String getHiveType(Class<?> type) { String typeName = PrimitiveObjectInspectorUtils.getTypeNameFromPrimitiveJava(type); if (typeName == null) { throw new DatasetException("Unsupported FieldPartitioner type: " + type); } return typeName; }
public static FieldPartitioner identity(String sourceName, String name, String className, int buckets) { Class<?> typeClass; try { typeClass = Class.forName(className); } catch (ClassNotFoundException e) { throw new DatasetException("Cannot find class: " + className, e); } return new IdentityFieldPartitioner(sourceName, name, typeClass, buckets); }
@Override @SuppressWarnings("unchecked") public Converter<?, ?, ?, ?> getConverter(PType<?> ptype) { if (ptype instanceof AvroType) { return new KeyConverter<E>((AvroType<E>) ptype); } throw new DatasetException( "Cannot create converter for non-Avro type: " + ptype); }
private static String getHiveType(Class<?> type) { String typeName = PrimitiveObjectInspectorUtils.getTypeNameFromPrimitiveJava(type); if (typeName == null) { throw new DatasetException("Unsupported FieldPartitioner type: " + type); } return typeName; }
/** * Construct the factory, giving it the class of the SpecificRecor the * builders will construct. * * @param recordClass * The class of the SpecificRecords the builders will construct. */ public SpecificAvroRecordBuilderFactory(Class<T> recordClass) { this.recordClass = recordClass; try { // Get the constructor of the class so we don't have to // perform this expensive reflection call for every // builder constructed. this.recordClassConstructor = recordClass.getConstructor(); } catch (Exception e) { // A number of reflection exceptions could be caught here. // No good way to handle these types of exceptions, so // throw an DatasetException up to the user. String msg = "Could not get a default constructor for class: " + recordClass.toString(); LOG.error(msg, e); throw new DatasetException(msg, e); } }
public static AvroEntitySchema mergeSpecificStringTypes( Class<? extends SpecificRecord> specificClass, AvroEntitySchema entitySchema) { Schema schemaField; try { schemaField = (Schema) specificClass.getField("SCHEMA$").get(null); } catch (IllegalArgumentException e) { throw new DatasetException(e); } catch (SecurityException e) { throw new DatasetException(e); } catch (IllegalAccessException e) { throw new DatasetException(e); } catch (NoSuchFieldException e) { throw new DatasetException(e); } return new AvroEntitySchema(schemaField, entitySchema.getRawSchema(), entitySchema.getColumnMappingDescriptor()); }
private static File findJarForClass(Class<?> requiredClass) { ProtectionDomain domain = AccessController.doPrivileged( new GetProtectionDomain(requiredClass)); CodeSource codeSource = domain.getCodeSource(); if (codeSource != null) { try { return new File(codeSource.getLocation().toURI()); } catch (URISyntaxException e) { throw new DatasetException( "Cannot locate " + requiredClass.getName() + " jar", e); } } else { // this should only happen for system classes throw new DatasetException( "Cannot locate " + requiredClass.getName() + " jar"); } }
/** * Construct the factory, giving it the class of the SpecificRecor the * builders will construct. * * @param recordClass * The class of the SpecificRecords the builders will construct. */ public SpecificAvroRecordBuilderFactory(Class<T> recordClass) { this.recordClass = recordClass; try { // Get the constructor of the class so we don't have to // perform this expensive reflection call for every // builder constructed. this.recordClassConstructor = recordClass.getConstructor(); } catch (Exception e) { // A number of reflection exceptions could be caught here. // No good way to handle these types of exceptions, so // throw an DatasetException up to the user. String msg = "Could not get a default constructor for class: " + recordClass.toString(); LOG.error(msg, e); throw new DatasetException(msg, e); } }
public static AvroEntitySchema mergeSpecificStringTypes( Class<? extends SpecificRecord> specificClass, AvroEntitySchema entitySchema) { Schema schemaField; try { schemaField = (Schema) specificClass.getField("SCHEMA$").get(null); } catch (IllegalArgumentException e) { throw new DatasetException(e); } catch (SecurityException e) { throw new DatasetException(e); } catch (IllegalAccessException e) { throw new DatasetException(e); } catch (NoSuchFieldException e) { throw new DatasetException(e); } return new AvroEntitySchema(schemaField, entitySchema.getRawSchema(), entitySchema.getColumnMappingDescriptor()); }
@Override public E compose(List<S> subEntities) { E entity; try { entity = entityConstructor.newInstance(); } catch (Throwable e) { LOG.error( "Error trying to construct entity of type: " + entityClass.getName(), e); throw new DatasetException(e); } int cnt = 0; for (S subEntity : subEntities) { if (subEntity != null) { entity.put(cnt, subEntity); } cnt++; } return entity; }
@Override public E compose(List<S> subEntities) { E entity; try { entity = entityConstructor.newInstance(); } catch (Throwable e) { LOG.error( "Error trying to construct entity of type: " + entityClass.getName(), e); throw new DatasetException(e); } int cnt = 0; for (S subEntity : subEntities) { if (subEntity != null) { entity.put(cnt, subEntity); } cnt++; } return entity; }
@VisibleForTesting URI getLegacyRepoUri(URI datasetUri, String namespace) { URI repoUri = DatasetRepositories.repositoryFor(datasetUri).getUri(); URI specificUri = URI.create(repoUri.getSchemeSpecificPart()); String repoScheme = specificUri.getScheme(); if (Sets.newHashSet("hdfs", "file", "hive").contains(repoScheme)) { try { specificUri = new URI(specificUri.getScheme(), specificUri.getUserInfo(), specificUri.getHost(), specificUri.getPort(), specificUri.getPath() + "/" + namespace, specificUri.getQuery(), specificUri.getFragment()); repoUri = URI.create("repo:" + specificUri.toString()); } catch (URISyntaxException ex) { throw new DatasetException("Error generating legacy URI", ex); } } return repoUri; }