@Override public String toString() { return indexExpr.getExprString(); } }
@Override public String vectorExpressionParameters() { return expr.getExprString(); }
@Override public String toString() { return indexExpr.getExprString(); } }
@Override public String vectorExpressionParameters() { return expr.getExprString(); }
@Override public String convertExprToFilter(byte[] exprBytes) throws MetaException { return deserializeExpr(exprBytes).getExprString(); }
private void pushFilters(final JobConf jobConf, RowSchema rowSchema, ExprNodeGenericFuncDesc filterExpr) { // construct column name list for reference by filter push down Utilities.setColumnNameList(jobConf, rowSchema); // push down filters if (filterExpr == null) { LOG.debug("Not pushing filters because FilterExpr is null"); return; } final String filterText = filterExpr.getExprString(); final String filterExprSerialized = SerializationUtilities.serializeExpression(filterExpr); jobConf.set( TableScanDesc.FILTER_TEXT_CONF_STR, filterText); jobConf.set( TableScanDesc.FILTER_EXPR_CONF_STR, filterExprSerialized); }
private void pushFilters(final JobConf jobConf, RowSchema rowSchema, ExprNodeGenericFuncDesc filterExpr) { // construct column name list for reference by filter push down Utilities.setColumnNameList(jobConf, rowSchema); // push down filters if (filterExpr == null) { LOG.debug("Not pushing filters because FilterExpr is null"); return; } final String filterText = filterExpr.getExprString(); final String filterExprSerialized = SerializationUtilities.serializeExpression(filterExpr); jobConf.set( TableScanDesc.FILTER_TEXT_CONF_STR, filterText); jobConf.set( TableScanDesc.FILTER_EXPR_CONF_STR, filterExprSerialized); }
@Override public String convertExprToFilter(byte[] exprBytes, String defaultPartitionName) throws MetaException { ExprNodeGenericFuncDesc expr = deserializeExpr(exprBytes); if ((defaultPartitionName != null) && (!defaultPartitionName.isEmpty())) { try { ExprNodeDescUtils.replaceNullFiltersWithDefaultPartition(expr, defaultPartitionName); } catch (SemanticException ex) { LOG.error("Failed to replace \"is null\" and \"is not null\" expression with default partition", ex); throw new MetaException(ex.getMessage()); } } return expr.getExprString(); }
@Override public String getExprString(boolean sortChildren) { if (sortChildren) { UDFType udfType = genericUDF.getClass().getAnnotation(UDFType.class); if (udfType.commutative()) { // Get the sorted children expr strings String[] childrenExprStrings = new String[chidren.size()]; for (int i = 0; i < childrenExprStrings.length; i++) { childrenExprStrings[i] = chidren.get(i).getExprString(); } return genericUDF.getDisplayString( ImmutableSortedMultiset.copyOf(childrenExprStrings).toArray(new String[childrenExprStrings.length])); } } return getExprString(); }
} catch (Exception e) { throw new SemanticException( ErrorMsg.INVALID_PARTITION.getMsg(partSpec.getExprString()), e); "Unexpected unknown partitions for " + partSpec.getExprString()); if (throwIfNonExistent) { throw new SemanticException( ErrorMsg.INVALID_PARTITION.getMsg(partSpec.getExprString()));
LOG.info("Kafka trimmer working on Filter tree {}", filterExpr.getExprString()); Callable<List<KafkaInputSplit>> trimmerWorker = () -> kafkaScanTrimmer.computeOptimizedScan(filterExpr)
} catch (Exception e) { throw new SemanticException( ErrorMsg.INVALID_PARTITION.getMsg(partSpec.getExprString()), e); "Unexpected unknown partitions for " + partSpec.getExprString()); if (throwIfNonExistent) { throw new SemanticException( ErrorMsg.INVALID_PARTITION.getMsg(partSpec.getExprString()));
public void checkExpr(int numParts, String dbName, String tblName, ExprNodeGenericFuncDesc expr) throws Exception { List<Partition> parts = new ArrayList<Partition>(); client.listPartitionsByExpr(dbName, tblName, SerializationUtilities.serializeExpressionToKryo(expr), null, (short)-1, parts); assertEquals("Partition check failed: " + expr.getExprString(), numParts, parts.size()); }
@Test public void testSerializeTimestamp() { Timestamp ts = Timestamp.ofEpochMilli(1374554702000L, 123456); ExprNodeConstantDesc constant = new ExprNodeConstantDesc(ts); List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1); children.add(constant); ExprNodeGenericFuncDesc desc = new ExprNodeGenericFuncDesc(TypeInfoFactory.timestampTypeInfo, new GenericUDFFromUtcTimestamp(), children); assertEquals(desc.getExprString(), SerializationUtilities.deserializeExpression( SerializationUtilities.serializeExpression(desc)).getExprString()); }
serializedFilterExpr = SerializationUtilities.serializeExpression(filterExpr); String filterText = filterExpr.getExprString(); if (LOG.isDebugEnabled()) { LOG.debug("Pushdown initiated with filterText = " + filterText + ", filterExpr = "
if (ts.getConf() != null && ts.getConf().getFilterExpr() != null) { if (LOG.isDebugEnabled()) { LOG.debug("Serializing: " + ts.getConf().getFilterExpr().getExprString());
serializedFilterExpr = SerializationUtilities.serializeExpression(filterExpr); String filterText = filterExpr.getExprString(); if (LOG.isDebugEnabled()) { LOG.debug("Pushdown initiated with filterText = " + filterText + ", filterExpr = "
if (ts.getConf() != null && ts.getConf().getFilterExpr() != null) { if (LOG.isDebugEnabled()) { LOG.debug("Serializing: " + ts.getConf().getFilterExpr().getExprString());
ExprNodeGenericFuncDesc fd = (ExprNodeGenericFuncDesc) nd; if (LOG.isDebugEnabled()) { String err = "Processing " + fd.getExprString() + " " + fd.getGenericUDF().getUdfName() + " outputs "; for (Object child : nodeOutputs) {
ExprNodeGenericFuncDesc fd = (ExprNodeGenericFuncDesc) nd; if (LOG.isDebugEnabled()) { String err = "Processing " + fd.getExprString() + " " + fd.getGenericUDF().getUdfName() + " outputs "; for (Object child : nodeOutputs) {