/** * Constructs a new pair, inferring the type via the passed arguments * @param <T1> type for first * @param <T2> type for second * @param a first element * @param b second element * @return a new pair containing the passed arguments */ public static <T1, T2> Pair<T1, T2> newPair(T1 a, T2 b) { return new Pair<T1, T2>(a, b); }
public Pair<TblColRef, TupleExpression> replaceColumnByIndex(int index, TblColRef newColumn, TupleExpression newTupleExpr) { if (index < 0 || index >= columns.size()) { return null; } TblColRef oldCol = columns.set(index, newColumn); TupleExpression oldExpr = sourceColumns.set(index, newTupleExpr); return new Pair<>(oldCol, oldExpr); }
public static Pair<String, String> getTableNameSplits(String tableName) { if (Strings.isNullOrEmpty(tableName)) { return null; } String[] splits = tableName.split(Pattern.quote(".")); int i = 0; String database = splits.length == 1 ? KYLIN_PREFIX : splits[i++]; String tableNameOnly = splits[i]; return new Pair(database, tableNameOnly); }
public Map<Long, Map<Long, Pair<Long, Long>>> formatRollingUpStats(List<List<String>> orgRollingUpCount) { Map<Long, Map<Long, Pair<Long, Long>>> formattedRollingUpStats = Maps.newLinkedHashMap(); for (List<String> rollingUp : orgRollingUpCount) { Map<Long, Pair<Long, Long>> childMap = Maps.newLinkedHashMap(); Long srcCuboid = Long.parseLong(rollingUp.get(0)); Long tgtCuboid = Long.parseLong(rollingUp.get(1)); Long rollupCount = (long) Double.parseDouble(rollingUp.get(2)); Long returnCount = (long) Double.parseDouble(rollingUp.get(3)); childMap.put(tgtCuboid, new Pair<>(rollupCount, returnCount)); formattedRollingUpStats.put(srcCuboid, childMap); } return formattedRollingUpStats; }
@Override public Pair<Integer, String> call() throws Exception { Pair<Integer, String> result; try { result = exec.execute(cmd, patternedLogger); } catch (Exception e) { logger.error("error run spark job:", e); result = new Pair<>(-1, e.getMessage()); } return result; } };
@Override public Pair<byte[], byte[]> next() { try { cursorIdx++; int keyLen = dis.readInt(); byte[] key = new byte[keyLen]; dis.read(key); int valueLen = dis.readInt(); byte[] value = new byte[valueLen]; dis.read(value); return new Pair<>(key, value); } catch (Exception e) { throw new RuntimeException( "Cannot read AggregationCache from dumped file: " + e.getMessage()); } }
/** * @param canonicalName * @return type and name pair for realization */ private static Pair<String, String> parseCanonicalName(String canonicalName) { Iterable<String> parts = Splitter.on(CharMatcher.anyOf("[]=,")).split(canonicalName); String[] partsStr = Iterables.toArray(parts, String.class); return new Pair<>(partsStr[0], partsStr[2]); } }
public static Pair<Map<Long, Long>, Map<Long, Double>> readCuboidStatsAndSizeFromCube(Set<Long> cuboidIds, CubeInstance cube) throws IOException { Preconditions.checkNotNull(cuboidIds, "The cuboid set can not be null"); Preconditions.checkNotNull(cube, "The cube instance can not be null"); List<CubeSegment> segmentList = cube.getSegments(SegmentStatusEnum.READY); Map<Long, Long> statisticsMerged = Maps.newHashMapWithExpectedSize(cuboidIds.size()); Map<Long, Double> sizeMerged = Maps.newHashMapWithExpectedSize(cuboidIds.size()); readCuboidStatsFromSegments(cuboidIds, segmentList, statisticsMerged, sizeMerged); return new Pair<>(statisticsMerged, sizeMerged); }
public static List<Pair<String, String>> getPartitionKVsForHiveTable() { List<Pair<String, String>> partitionKVs = Lists.newLinkedList(); partitionKVs.add(new Pair<>(TimePropertyEnum.DAY_DATE.toString(), HiveTypeEnum.HSTRING.toString())); return partitionKVs; }
private Pair<Boolean, Set<String>> hasOverlap(ArrayList<Set<String>> dimsList, Set<String> Dims) { Set<String> existing = new HashSet<>(); Set<String> overlap = new HashSet<>(); for (Set<String> dims : dimsList) { if (CollectionUtils.containsAny(existing, dims)) { overlap.addAll(ensureOrder(CollectionUtils.intersection(existing, dims))); } existing.addAll(dims); } return new Pair<>(overlap.size() > 0, overlap); }
public static List<Pair<String, String>> getTimeColumnsForMetrics() { List<Pair<String, String>> columns = Lists.newLinkedList(); columns.add(new Pair<>(RecordEvent.RecordReserveKeyEnum.TIME.toString(), HiveTypeEnum.HBIGINT.toString())); columns.add(new Pair<>(TimePropertyEnum.YEAR.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(TimePropertyEnum.MONTH.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(TimePropertyEnum.WEEK_BEGIN_DATE.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(TimePropertyEnum.DAY_TIME.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(TimePropertyEnum.TIME_HOUR.toString(), HiveTypeEnum.HINT.toString())); columns.add(new Pair<>(TimePropertyEnum.TIME_MINUTE.toString(), HiveTypeEnum.HINT.toString())); columns.add(new Pair<>(TimePropertyEnum.TIME_SECOND.toString(), HiveTypeEnum.HINT.toString())); return columns; }
public static Pair<Boolean, String> handleTempStatement(String sql, KylinConfig config) { if (!config.isConvertCreateTableToWith()) { return new Pair<>(false, sql); } if (isDropTable(sql)) { return new Pair<>(true, sql); } if (isCreateTable(sql)) { try { translateCreateToWith(sql, config); } catch (IOException ex) { throw new RuntimeException(ex); } return new Pair<>(true, sql); } sql = TempStatementUtil.appendWith(sql, config); return new Pair<>(false, sql); }
public static List<Pair<String, String>> getHiveColumnsForMetricsJobException() { List<Pair<String, String>> columns = Lists.newLinkedList(); columns.add(new Pair<>(JobPropertyEnum.ID_CODE.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(RecordEvent.RecordReserveKeyEnum.HOST.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.USER.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.PROJECT.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.CUBE.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.TYPE.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.ALGORITHM.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(JobPropertyEnum.EXCEPTION.toString(), HiveTypeEnum.HSTRING.toString())); columns.addAll(getTimeColumnsForMetrics()); return columns; }
public static Pair<Set<TblColRef>, Set<TblColRef>> collectColumnsPair(TupleExpression tupleExpression) { ExpressionColCollector collector = new ExpressionColCollector(); tupleExpression.accept(collector); return new Pair<>(collector.filterColumns, collector.measureColumns); }
public static List<Pair<String, String>> getHiveColumnsForMetricsQueryRPC() { List<Pair<String, String>> columns = Lists.newLinkedList(); columns.add(new Pair<>(RecordEvent.RecordReserveKeyEnum.HOST.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryRPCPropertyEnum.PROJECT.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryRPCPropertyEnum.REALIZATION.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryRPCPropertyEnum.RPC_SERVER.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryRPCPropertyEnum.EXCEPTION.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryRPCPropertyEnum.CALL_TIME.toString(), HiveTypeEnum.HBIGINT.toString())); columns.add(new Pair<>(QueryRPCPropertyEnum.RETURN_COUNT.toString(), HiveTypeEnum.HBIGINT.toString())); columns.add(new Pair<>(QueryRPCPropertyEnum.SCAN_COUNT.toString(), HiveTypeEnum.HBIGINT.toString())); columns.add(new Pair<>(QueryRPCPropertyEnum.SKIP_COUNT.toString(), HiveTypeEnum.HBIGINT.toString())); columns.add(new Pair<>(QueryRPCPropertyEnum.AGGR_FILTER_COUNT.toString(), HiveTypeEnum.HBIGINT.toString())); columns.add(new Pair<>(QueryRPCPropertyEnum.AGGR_COUNT.toString(), HiveTypeEnum.HBIGINT.toString())); columns.addAll(getTimeColumnsForMetrics()); return columns; }
public static List<Pair<String, String>> getHiveColumnsForMetricsQuery() { List<Pair<String, String>> columns = Lists.newLinkedList(); columns.add(new Pair<>(QueryPropertyEnum.ID_CODE.toString(), HiveTypeEnum.HBIGINT.toString())); columns.add(new Pair<>(RecordEvent.RecordReserveKeyEnum.HOST.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryPropertyEnum.USER.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryPropertyEnum.PROJECT.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryPropertyEnum.REALIZATION.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryPropertyEnum.REALIZATION_TYPE.toString(), HiveTypeEnum.HINT.toString())); columns.add(new Pair<>(QueryPropertyEnum.TYPE.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryPropertyEnum.EXCEPTION.toString(), HiveTypeEnum.HSTRING.toString())); columns.add(new Pair<>(QueryPropertyEnum.TIME_COST.toString(), HiveTypeEnum.HBIGINT.toString())); columns.add(new Pair<>(QueryPropertyEnum.CALCITE_RETURN_COUNT.toString(), HiveTypeEnum.HBIGINT.toString())); columns.add(new Pair<>(QueryPropertyEnum.STORAGE_RETURN_COUNT.toString(), HiveTypeEnum.HBIGINT.toString())); columns.add(new Pair<>(QueryPropertyEnum.AGGR_FILTER_COUNT.toString(), HiveTypeEnum.HBIGINT.toString())); columns.addAll(getTimeColumnsForMetrics()); return columns; }
/** * Get the encoding name and version for the given col from Measure FunctionDesc * @param functionDesc * @param tblColRef * @return a pair of the encoding name and encoding version */ public static final Pair<String, String> getEncoding(FunctionDesc functionDesc, TblColRef tblColRef) { String encoding = functionDesc.getConfiguration().get(CONFIG_ENCODING_PREFIX + tblColRef.getIdentity()); String encodingVersion = functionDesc.getConfiguration() .get(CONFIG_ENCODING_VERSION_PREFIX + tblColRef.getIdentity()); if (StringUtils.isEmpty(encoding)) { // for backward compatibility encoding = functionDesc.getConfiguration().get(CONFIG_ENCODING_PREFIX + tblColRef.getName()); encodingVersion = functionDesc.getConfiguration().get(CONFIG_ENCODING_VERSION_PREFIX + tblColRef.getName()); } return new Pair<>(encoding, encodingVersion); }
/** * Build the new key, return a reused ByteArray object. Suitable for MR * @param parentCuboid * @param childCuboid * @param splitBuffers * @return */ public Pair<Integer, ByteArray> buildKey(Cuboid parentCuboid, Cuboid childCuboid, ByteArray[] splitBuffers) { RowKeyEncoder rowkeyEncoder = rowKeyEncoderProvider.getRowkeyEncoder(childCuboid); int fullKeySize = rowkeyEncoder.getBytesLength(); if (newKeyBodyBuf == null || newKeyBodyBuf.length() < fullKeySize) { newKeyBodyBuf = new ByteArray(fullKeySize); } buildKeyInternal(parentCuboid, childCuboid, splitBuffers, newKeyBodyBuf); return new Pair<>(Integer.valueOf(fullKeySize), newKeyBodyBuf); }
private TupleExpression getCountColumnExpression(TblColRef colRef) { List<Pair<TupleFilter, TupleExpression>> whenList = Lists.newArrayListWithExpectedSize(1); TupleFilter whenFilter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.ISNULL); whenFilter.addChild(new ColumnTupleFilter(colRef)); whenList.add(new Pair<TupleFilter, TupleExpression>(whenFilter, new NumberTupleExpression(0))); TupleExpression elseExpr = new ColumnTupleExpression(SumDynamicFunctionDesc.mockCntCol); TupleExpression ret = new CaseTupleExpression(whenList, elseExpr); ret.setDigest("_KY_COUNT(" + colRef.getName() + ")"); return ret; } }
@Override public void deserialize(IFilterCodeSystem<?> cs, ByteBuffer buffer) { int nWhenEntries = BytesUtil.readVInt(buffer); List<Pair<TupleFilter, TupleExpression>> whenList = Lists.newArrayListWithExpectedSize(nWhenEntries); for (int i = 0; i < nWhenEntries; i++) { TupleFilter tupleFilter = TupleFilterSerializer.deserialize(BytesUtil.readByteArray(buffer), cs); TupleExpression tupleExpression = TupleExpressionSerializer.deserialize(BytesUtil.readByteArray(buffer), cs); whenList.add(new Pair<>(tupleFilter, tupleExpression)); } this.whenList = whenList; int flag = BytesUtil.readVInt(buffer); if (flag == 1) { this.elseExpr = TupleExpressionSerializer.deserialize(BytesUtil.readByteArray(buffer), cs); } }