final public Deserializer getDeserializerFromMetaStore(boolean skipConfError) { try { return HiveMetaStoreUtils.getDeserializer(SessionState.getSessionConf(), tTable, skipConfError); } catch (MetaException e) { throw new RuntimeException(e); } }
final public Deserializer getDeserializerFromMetaStore(boolean skipConfError) { try { return MetaStoreUtils.getDeserializer(SessionState.getSessionConf(), tTable, skipConfError); } catch (MetaException e) { throw new RuntimeException(e); } }
final public Class<? extends Deserializer> getDeserializerClass() throws Exception { return HiveMetaStoreUtils.getDeserializerClass(SessionState.getSessionConf(), tTable); }
final public Class<? extends Deserializer> getDeserializerClass() throws Exception { return MetaStoreUtils.getDeserializerClass(SessionState.getSessionConf(), tTable); }
final public Deserializer getDeserializer() { if (deserializer == null) { try { deserializer = HiveMetaStoreUtils.getDeserializer(SessionState.getSessionConf(), tPartition, table.getTTable()); } catch (MetaException e) { throw new RuntimeException(e); } } return deserializer; }
final public Deserializer getDeserializer() { if (deserializer == null) { try { deserializer = MetaStoreUtils.getDeserializer(SessionState.getSessionConf(), tPartition, table.getTTable()); } catch (MetaException e) { throw new RuntimeException(e); } } return deserializer; }
@SuppressWarnings("nls") public FileStatus[] getSortedPaths() { try { // Previously, this got the filesystem of the Table, which could be // different from the filesystem of the partition. FileSystem fs = FileSystem.get(getPath().toUri(), SessionState.getSessionConf()); String pathPattern = getPath().toString(); if (getNumBuckets() > 0) { pathPattern = pathPattern + "/*"; } LOG.info("Path pattern = " + pathPattern); FileStatus srcs[] = fs.globStatus(new Path(pathPattern), FileUtils.HIDDEN_FILES_PATH_FILTER); Arrays.sort(srcs); for (FileStatus src : srcs) { LOG.info("Got file: " + src.getPath()); } if (srcs.length == 0) { return null; } return srcs; } catch (Exception e) { throw new RuntimeException("Cannot get path ", e); } }
public boolean isEmpty() throws HiveException { Preconditions.checkNotNull(getPath()); try { FileSystem fs = FileSystem.get(getPath().toUri(), SessionState.getSessionConf()); return !fs.exists(getPath()) || fs.listStatus(getPath(), FileUtils.HIDDEN_FILES_PATH_FILTER).length == 0; } catch (IOException e) { throw new HiveException(e); } }
/** * get all paths for this partition in a sorted manner */ @SuppressWarnings("nls") public FileStatus[] getSortedPaths() { try { // Previously, this got the filesystem of the Table, which could be // different from the filesystem of the partition. FileSystem fs = getDataLocation().getFileSystem(SessionState.getSessionConf()); String pathPattern = getDataLocation().toString(); if (getBucketCount() > 0) { pathPattern = pathPattern + "/*"; } LOG.info("Path pattern = " + pathPattern); FileStatus srcs[] = fs.globStatus(new Path(pathPattern), FileUtils.HIDDEN_FILES_PATH_FILTER); Arrays.sort(srcs); for (FileStatus src : srcs) { LOG.info("Got file: " + src.getPath()); } if (srcs.length == 0) { return null; } return srcs; } catch (Exception e) { throw new RuntimeException("Cannot get path ", e); } }
@SuppressWarnings("nls") public FileStatus[] getSortedPaths() { try { // Previously, this got the filesystem of the Table, which could be // different from the filesystem of the partition. FileSystem fs = FileSystem.get(getPath().toUri(), SessionState.getSessionConf()); String pathPattern = getPath().toString(); if (getNumBuckets() > 0) { pathPattern = pathPattern + "/*"; } LOG.info("Path pattern = " + pathPattern); FileStatus srcs[] = fs.globStatus(new Path(pathPattern), FileUtils.HIDDEN_FILES_PATH_FILTER); Arrays.sort(srcs); for (FileStatus src : srcs) { LOG.info("Got file: " + src.getPath()); } if (srcs.length == 0) { return null; } return srcs; } catch (Exception e) { throw new RuntimeException("Cannot get path ", e); } }
public HiveStorageHandler getStorageHandler() { if (storageHandler != null || !isNonNative()) { return storageHandler; } try { storageHandler = HiveUtils.getStorageHandler( SessionState.getSessionConf(), getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE)); } catch (Exception e) { throw new RuntimeException(e); } return storageHandler; }
/** * get all paths for this partition in a sorted manner */ @SuppressWarnings("nls") public FileStatus[] getSortedPaths() { try { // Previously, this got the filesystem of the Table, which could be // different from the filesystem of the partition. FileSystem fs = getDataLocation().getFileSystem(SessionState.getSessionConf()); String pathPattern = getDataLocation().toString(); if (getBucketCount() > 0) { pathPattern = pathPattern + "/*"; } LOG.info("Path pattern = " + pathPattern); FileStatus srcs[] = fs.globStatus(new Path(pathPattern), FileUtils.HIDDEN_FILES_PATH_FILTER); Arrays.sort(srcs); for (FileStatus src : srcs) { LOG.info("Got file: " + src.getPath()); } if (srcs.length == 0) { return null; } return srcs; } catch (Exception e) { throw new RuntimeException("Cannot get path ", e); } }
public HiveStorageHandler getStorageHandler() { if (storageHandler != null || !isNonNative()) { return storageHandler; } try { storageHandler = HiveUtils.getStorageHandler( SessionState.getSessionConf(), getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE)); } catch (Exception e) { throw new RuntimeException(e); } return storageHandler; }
FileSystem fs = tbl.getDataLocation().getFileSystem(SessionState.getSessionConf()); if(!FileUtils.mkdir(fs, destPath, conf)) { LOG.warn(destPath + " already exists?!?!");
private static HttpClient makeHttpClient(Lifecycle lifecycle) { final int numConnection = HiveConf.getIntVar(SessionState.getSessionConf(), HiveConf.ConfVars.HIVE_DRUID_NUM_HTTP_CONNECTION); final Period readTimeout = new Period(HiveConf.getVar(SessionState.getSessionConf(), HiveConf.ConfVars.HIVE_DRUID_HTTP_READ_TIMEOUT)); LOG.info("Creating Druid HTTP client with {} max parallel connections and {}ms read timeout", numConnection, readTimeout.toStandardDuration().getMillis()); final HttpClient httpClient = HttpClientInit.createClient(HttpClientConfig.builder() .withNumConnections(numConnection) .withReadTimeout(new Period(readTimeout).toStandardDuration()) .build(), lifecycle); if (UserGroupInformation.isSecurityEnabled()) { LOG.info("building Kerberos Http Client"); return new KerberosHttpClient(httpClient); } return httpClient; }
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { HiveConf conf = SessionState.getSessionConf(); if (!conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST)) { throw new RuntimeException("this UDF is only available in testmode"); } if (arguments.length != 1) { throw new UDFArgumentLengthException("ASSERT_TRUE_OOM() expects one argument."); } if (arguments[0].getCategory() != Category.PRIMITIVE) { throw new UDFArgumentTypeException(0, "Argument to ASSERT_TRUE_OOM() should be primitive."); } conditionConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableBooleanObjectInspector); return PrimitiveObjectInspectorFactory.writableVoidObjectInspector; }
private List<FieldSchema> getColsInternal(boolean forMs) { String serializationLib = getSerializationLib(); try { // Do the lightweight check for general case. if (hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tTable.getSd().getCols(); } else if (forMs && !shouldStoreFieldsInMetastore( SessionState.getSessionConf(), serializationLib, tTable.getParameters())) { return Hive.getFieldsFromDeserializerForMsStorage(this, getDeserializer()); } else { return HiveMetaStoreUtils.getFieldsFromDeserializer(getTableName(), getDeserializer()); } } catch (Exception e) { LOG.error("Unable to get field from serde: " + serializationLib, e); } return new ArrayList<FieldSchema>(); }
private List<FieldSchema> getColsInternal(boolean forMs) { String serializationLib = getSerializationLib(); try { // Do the lightweight check for general case. if (hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tTable.getSd().getCols(); } else if (forMs && !shouldStoreFieldsInMetastore( SessionState.getSessionConf(), serializationLib, tTable.getParameters())) { return Hive.getFieldsFromDeserializerForMsStorage(this, getDeserializer()); } else { return MetaStoreUtils.getFieldsFromDeserializer(getTableName(), getDeserializer()); } } catch (Exception e) { LOG.error("Unable to get field from serde: " + serializationLib, e); } return new ArrayList<FieldSchema>(); }
private List<FieldSchema> getColsInternal(boolean forMs) { try { String serializationLib = tPartition.getSd().getSerdeInfo().getSerializationLib(); // Do the lightweight check for general case. if (Table.hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tPartition.getSd().getCols(); } else if (forMs && !Table.shouldStoreFieldsInMetastore( SessionState.getSessionConf(), serializationLib, table.getParameters())) { return Hive.getFieldsFromDeserializerForMsStorage(table, getDeserializer()); } return HiveMetaStoreUtils.getFieldsFromDeserializer(table.getTableName(), getDeserializer()); } catch (Exception e) { LOG.error("Unable to get cols from serde: " + tPartition.getSd().getSerdeInfo().getSerializationLib(), e); } return new ArrayList<FieldSchema>(); }
private List<FieldSchema> getColsInternal(boolean forMs) { try { String serializationLib = tPartition.getSd().getSerdeInfo().getSerializationLib(); // Do the lightweight check for general case. if (Table.hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tPartition.getSd().getCols(); } else if (forMs && !Table.shouldStoreFieldsInMetastore( SessionState.getSessionConf(), serializationLib, table.getParameters())) { return Hive.getFieldsFromDeserializerForMsStorage(table, getDeserializer()); } return MetaStoreUtils.getFieldsFromDeserializer(table.getTableName(), getDeserializer()); } catch (Exception e) { LOG.error("Unable to get cols from serde: " + tPartition.getSd().getSerdeInfo().getSerializationLib(), e); } return new ArrayList<FieldSchema>(); }