public static String[] splitByComma(String str) { return split(str, ","); }
public void parseUserInfo(String userInfo) { String[] info = StringUtil.splitByComma(userInfo); if (info.length > 0) //first element is username this.username = info[0]; for (int i = 1; i < info.length; i++) //the remains should be roles which starts from index 1 this.roles.add(info[i]); }
public void setExternalDataPaths(List<String> externalDataPaths) { setParam("externalDataPaths", StringUtil.join(externalDataPaths, ",")); }
/** * parse the lookup snapshot string to lookup snapshot path map. * @param snapshotsString * @return */ public static Map<String, String> parseLookupSnapshots(String snapshotsString) { Map<String, String> lookupSnapshotMap = Maps.newHashMap(); String[] lookupSnapshotEntries = StringUtil.splitByComma(snapshotsString); for (String lookupSnapshotEntryStr : lookupSnapshotEntries) { String[] split = StringUtil.split(lookupSnapshotEntryStr, "="); lookupSnapshotMap.put(split[0], split[1]); } return lookupSnapshotMap; } }
public SparkExecutable createMergeDictionaryStep(CubeSegment seg, String jobID, List<String> mergingSegmentIds) { final SparkExecutable sparkExecutable = new SparkExecutable(); sparkExecutable.setClassName(SparkMergingDictionary.class.getName()); sparkExecutable.setParam(SparkMergingDictionary.OPTION_CUBE_NAME.getOpt(), seg.getRealization().getName()); sparkExecutable.setParam(SparkMergingDictionary.OPTION_SEGMENT_ID.getOpt(), seg.getUuid()); sparkExecutable.setParam(SparkMergingDictionary.OPTION_META_URL.getOpt(), getSegmentMetadataUrl(seg.getConfig(), jobID)); sparkExecutable.setParam(SparkMergingDictionary.OPTION_MERGE_SEGMENT_IDS.getOpt(), StringUtil.join(mergingSegmentIds, ",")); sparkExecutable.setParam(SparkMergingDictionary.OPTION_OUTPUT_PATH_DICT.getOpt(), getDictInfoPath(jobID)); sparkExecutable.setParam(SparkMergingDictionary.OPTION_OUTPUT_PATH_STAT.getOpt(), getStatisticsPath(jobID)); sparkExecutable.setJobId(jobID); sparkExecutable.setName(ExecutableConstants.STEP_NAME_MERGE_DICTIONARY); sparkExecutable.setSparkConfigName(ExecutableConstants.SPARK_SPECIFIC_CONFIG_NAME_MERGE_DICTIONARY); StringBuilder jars = new StringBuilder(); StringUtil.appendWithSeparator(jars, seg.getConfig().getSparkAdditionalJars()); sparkExecutable.setJars(jars.toString()); return sparkExecutable; }
public void init(CubeDesc cubeDesc) { cubeRef = cubeDesc; for (HBaseColumnFamilyDesc cf : columnFamily) { cf.setName(cf.getName().toUpperCase(Locale.ROOT)); for (HBaseColumnDesc c : cf.getColumns()) { c.setQualifier(c.getQualifier().toUpperCase(Locale.ROOT)); StringUtil.toUpperCaseArray(c.getMeasureRefs(), c.getMeasureRefs()); } } }
private List<String> getIntermediateTables() { List<String> intermediateTables = Lists.newArrayList(); String[] tables = StringUtil.splitAndTrim(getParam("oldHiveTables"), ","); for (String t : tables) { intermediateTables.add(t); } return intermediateTables; }
static DataModelManager newInstance(KylinConfig conf) { try { String cls = StringUtil.noBlank(conf.getDataModelManagerImpl(), DataModelManager.class.getName()); Class<? extends DataModelManager> clz = ClassUtil.forName(cls, DataModelManager.class); return clz.getConstructor(KylinConfig.class).newInstance(conf); } catch (Exception e) { throw new RuntimeException("Failed to init DataModelManager from " + conf, e); } }
logger.trace("Hive Dependencies After Filtered: " + filteredHive); StringUtil.appendWithSeparator(kylinDependency, filteredHive); } else { StringUtil.appendWithSeparator(kylinDependency, hiveExecJarPath); logger.debug("hive-exec jar file: " + hiveExecJarPath); StringUtil.appendWithSeparator(kylinDependency, hiveHCatJarPath); logger.debug("hive-catalog jar file: " + hiveHCatJarPath); StringUtil.appendWithSeparator(kylinDependency, hiveMetaStoreJarPath); logger.debug("hive-metastore jar file: " + hiveMetaStoreJarPath); } catch (ClassNotFoundException e) { kylinKafkaDependency = kylinKafkaDependency.replace(":", ","); logger.trace("Kafka Dependencies: " + kylinKafkaDependency); StringUtil.appendWithSeparator(kylinDependency, kylinKafkaDependency); } else { logger.debug("No Kafka dependency jar set in the environment, will find them from classpath:"); String kafkaClientJarPath = ClassUtil .findContainingJar(Class.forName("org.apache.kafka.clients.consumer.KafkaConsumer")); StringUtil.appendWithSeparator(kylinDependency, kafkaClientJarPath); logger.debug("kafka jar file: " + kafkaClientJarPath); StringUtil.appendWithSeparator(kylinDependency, mrLibDir);
public static void main(String[] args) throws IOException { args = StringUtil.filterSystemArgs(args);
void init(DataModelDesc model) { table = table.toUpperCase(Locale.ROOT); if (columns != null) { StringUtil.toUpperCaseArray(columns, columns); } if (model != null) { table = model.findTable(table).getAlias(); if (columns != null) { for (int i = 0; i < columns.length; i++) { TblColRef column = model.findColumn(table, columns[i]); if (column.getColumnDesc().isComputedColumn() && !model.isFactTable(column.getTableRef())) { throw new RuntimeException("Computed Column on lookup table is not allowed"); } columns[i] = column.getName(); } } } }
/** * parse the lookup snapshot string to lookup snapshot path map. * @param snapshotsString * @return */ public static Map<String, String> parseLookupSnapshots(String snapshotsString) { Map<String, String> lookupSnapshotMap = Maps.newHashMap(); String[] lookupSnapshotEntries = StringUtil.splitByComma(snapshotsString); for (String lookupSnapshotEntryStr : lookupSnapshotEntries) { String[] split = StringUtil.split(lookupSnapshotEntryStr, "="); lookupSnapshotMap.put(split[0], split[1]); } return lookupSnapshotMap; } }
public SparkExecutable createMergeCuboidDataStep(CubeSegment seg, List<CubeSegment> mergingSegments, String jobID) { final List<String> mergingCuboidPaths = Lists.newArrayList(); for (CubeSegment merging : mergingSegments) { mergingCuboidPaths.add(getCuboidRootPath(merging)); } String formattedPath = StringUtil.join(mergingCuboidPaths, ","); String outputPath = getCuboidRootPath(jobID); final SparkExecutable sparkExecutable = new SparkExecutable(); sparkExecutable.setClassName(SparkCubingMerge.class.getName()); sparkExecutable.setParam(SparkCubingMerge.OPTION_CUBE_NAME.getOpt(), seg.getRealization().getName()); sparkExecutable.setParam(SparkCubingMerge.OPTION_SEGMENT_ID.getOpt(), seg.getUuid()); sparkExecutable.setParam(SparkCubingMerge.OPTION_INPUT_PATH.getOpt(), formattedPath); sparkExecutable.setParam(SparkCubingMerge.OPTION_META_URL.getOpt(), getSegmentMetadataUrl(seg.getConfig(), jobID)); sparkExecutable.setParam(SparkCubingMerge.OPTION_OUTPUT_PATH.getOpt(), outputPath); sparkExecutable.setJobId(jobID); sparkExecutable.setName(ExecutableConstants.STEP_NAME_MERGE_CUBOID); StringBuilder jars = new StringBuilder(); StringUtil.appendWithSeparator(jars, seg.getConfig().getSparkAdditionalJars()); sparkExecutable.setJars(jars.toString()); return sparkExecutable; } }
private List<String> getExternalDataPaths() { String[] paths = StringUtil.splitAndTrim(getParam("externalDataPaths"), ","); List<String> result = Lists.newArrayList(); for (String s : paths) { result.add(s); } return result; }
private Class<DataModelDesc> getDataModelImplClass() { try { String cls = StringUtil.noBlank(config.getDataModelImpl(), DataModelDesc.class.getName()); Class<? extends DataModelDesc> clz = ClassUtil.forName(cls, DataModelDesc.class); return (Class<DataModelDesc>) clz; } catch (ClassNotFoundException e) { throw new RuntimeException(e); } }
StringUtil.appendWithSeparator(jars, ClassUtil.findContainingJar(org.apache.hadoop.hbase.KeyValue.class)); StringUtil.appendWithSeparator(jars, ClassUtil.findContainingJar(org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.class)); StringUtil.appendWithSeparator(jars, ClassUtil.findContainingJar(org.apache.hadoop.hbase.regionserver.BloomType.class)); StringUtil.appendWithSeparator(jars, ClassUtil.findContainingJar(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.class)); //hbase-protocal.jar StringUtil.appendWithSeparator(jars, ClassUtil.findContainingJar(org.apache.hadoop.hbase.CompatibilityFactory.class)); //hbase-hadoop-compact.jar StringUtil.appendWithSeparator(jars, ClassUtil.findContainingJar("org.htrace.HTraceConfiguration", null)); // htrace-core.jar StringUtil.appendWithSeparator(jars, ClassUtil.findContainingJar("org.apache.htrace.Trace", null)); // htrace-core.jar StringUtil.appendWithSeparator(jars, ClassUtil.findContainingJar("com.yammer.metrics.core.MetricsRegistry", null)); // metrics-core.jar StringUtil.appendWithSeparator(jars, ClassUtil.findContainingJar("org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactory", null));//hbase-hadoop-compat-1.1.1.jar StringUtil.appendWithSeparator(jars, ClassUtil.findContainingJar("org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl", null));//hbase-hadoop2-compat-1.1.1.jar StringUtil.appendWithSeparator(jars, seg.getConfig().getSparkAdditionalJars()); sparkExecutable.setJars(jars.toString());
public static void main(String[] args) throws IOException { args = StringUtil.filterSystemArgs(args);
public void printIssueExistingHTables() { logger.info("------ HTables exist issues in hbase : not existing, metadata broken ------"); for (String segFullName : issueExistHTables) { String[] sepNameList = StringUtil.splitByComma(segFullName); logger.error(sepNameList[0] + " belonging to cube " + sepNameList[1] + " has some issues and cannot be read successfully!!!"); } logger.info("----------------------------------------------------"); } }
@Override protected void doReduce(IntWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException { for (Text text : values) { String value = text.toString(); String[] splited = StringUtil.split(value, "="); if (splited != null && splited.length == 2) { logger.info("Dictionary for col {}, save at {}", splited[0], splited[1]); context.write(new Text(splited[0]), new Text(splited[1])); } } } }
throw new IllegalStateException("Missing join conditions on table " + dimTable); StringUtil.toUpperCaseArray(join.getForeignKey(), join.getForeignKey()); StringUtil.toUpperCaseArray(join.getPrimaryKey(), join.getPrimaryKey());