/** * Replace the contents of dest with the contents of src * @param src * @param dest */ public static void copyConf(Configuration src, Configuration dest) { dest.clear(); for (Map.Entry<String, String> el : src) { dest.set(el.getKey(), el.getValue()); } }
@Override public void readFields(DataInput in) throws IOException { clear(); int size = WritableUtils.readVInt(in); for(int i=0; i < size; ++i) { String key = org.apache.hadoop.io.Text.readString(in); String value = org.apache.hadoop.io.Text.readString(in); set(key, value); String sources[] = WritableUtils.readCompressedStringArray(in); if(sources != null) { updatingResource.put(key, sources); } } }
@Override public void readFields(DataInput in) throws IOException { clear(); int size = WritableUtils.readVInt(in); for(int i=0; i < size; ++i) { String key = org.apache.hadoop.io.Text.readString(in); String value = org.apache.hadoop.io.Text.readString(in); set(key, value); String sources[] = WritableUtils.readCompressedStringArray(in); if (sources != null) { putIntoUpdatingResource(key, sources); } } }
@Override public void readFields(DataInput in) throws IOException { clear(); int size = WritableUtils.readVInt(in); for(int i=0; i < size; ++i) { String key = org.apache.hadoop.io.Text.readString(in); String value = org.apache.hadoop.io.Text.readString(in); set(key, value); String sources[] = WritableUtils.readCompressedStringArray(in); if (sources != null) { putIntoUpdatingResource(key, sources); } } }
@Test public void testShouldAssignDefaultZookeeperClientPort() { Configuration config = HBaseConfiguration.create(); config.clear(); Properties p = ZKConfig.makeZKProps(config); assertNotNull(p); assertEquals(2181, p.get("clientPort")); }
@Test public void testOverwriteJobConf() throws Exception { MapReduceExecutable executable = new MapReduceExecutable(); KylinConfig config = KylinConfig.getInstanceFromEnv(); Method method = MapReduceExecutable.class.getDeclaredMethod("overwriteJobConf", Configuration.class, KylinConfig.class, new String[] {}.getClass()); method.setAccessible(true); Configuration conf = new Configuration(); conf.set("mapreduce.job.is-mem-hungry", "true"); method.invoke(executable, conf, config, new String[] { "-cubename", "ci_inner_join_cube" }); Assert.assertEquals("mem-test1", conf.get("test1")); Assert.assertEquals("mem-test2", conf.get("test2")); conf.clear(); method.invoke(executable, conf, config, new String[] { "-cubename", "ci_inner_join_cube" }); Assert.assertEquals("test1", conf.get("test1")); Assert.assertEquals("test2", conf.get("test2")); } }
@PreDestroy public void preDestroy() { conf.clear(); conf = null; }
/** * Replace the contents of dest with the contents of src * @param src * @param dest */ public static void copyConf(Configuration src, Configuration dest) { dest.clear(); for (Map.Entry<String, String> el : src) { dest.set(el.getKey(), el.getValue()); } }
/** * Replace the contents of dest with the contents of src * @param src * @param dest */ public static void copyConf(Configuration src, Configuration dest) { dest.clear(); for (Map.Entry<String, String> el : src) { dest.set(el.getKey(), el.getValue()); } }
private static Configuration createHConf() throws MalformedURLException { Configuration hConf = new Configuration(); hConf.clear(); hConf.addResource(new File(HCONF_FILE_NAME).toURI().toURL()); return hConf; }
private Map<String, String> createDatasetConfiguration(String namespace, String datasetName, Map<String, String> datasetArgs) { Configuration hConf = new Configuration(); hConf.clear(); AbstractBatchWritableOutputFormat.setDataset(hConf, namespace, datasetName, datasetArgs); return ConfigurationUtil.toMap(hConf); }
private Map<String, String> createDatasetConfiguration(String namespace, String datasetName, Map<String, String> datasetArgs) { Configuration hConf = new Configuration(); hConf.clear(); AbstractBatchWritableOutputFormat.setDataset(hConf, namespace, datasetName, datasetArgs); return ConfigurationUtil.toMap(hConf); }
@Override public void readFields(DataInput in) throws IOException { clear(); int size = WritableUtils.readVInt(in); for(int i=0; i < size; ++i) { set(org.apache.hadoop.io.Text.readString(in), org.apache.hadoop.io.Text.readString(in)); } }
@Test public void testShouldAssignDefaultZookeeperClientPort() { Configuration config = HBaseConfiguration.create(); config.clear(); Properties p = ZKConfig.makeZKProps(config); assertNotNull(p); assertEquals(2181, p.get("clientPort")); }
@Test public void testGetRMId() throws Exception { conf.set(YarnConfiguration.RM_HA_ID, RM1_NODE_ID); assertEquals("Does not honor " + YarnConfiguration.RM_HA_ID, RM1_NODE_ID, HAUtil.getRMHAId(conf)); conf.clear(); assertNull("Return null when " + YarnConfiguration.RM_HA_ID + " is not set", HAUtil.getRMHAId(conf)); }
@Test public void testGetRMId() throws Exception { conf.set(YarnConfiguration.RM_HA_ID, RM1_NODE_ID); assertEquals("Does not honor " + YarnConfiguration.RM_HA_ID, RM1_NODE_ID, HAUtil.getRMHAId(conf)); conf.clear(); assertNull("Return null when " + YarnConfiguration.RM_HA_ID + " is not set", HAUtil.getRMHAId(conf)); }
@Test public void testGetRMId() throws Exception { conf.set(YarnConfiguration.RM_HA_ID, RM1_NODE_ID); assertEquals("Does not honor " + YarnConfiguration.RM_HA_ID, RM1_NODE_ID, HAUtil.getRMHAId(conf)); conf.clear(); assertNull("Return null when " + YarnConfiguration.RM_HA_ID + " is not set", HAUtil.getRMHAId(conf)); }
@Test public void testHAWithRMHostName() throws Exception { innerTestHAWithRMHostName(false); configuration.clear(); setUp(); innerTestHAWithRMHostName(true); }
@Test public void testHAWithRMHostName() throws Exception { innerTestHAWithRMHostName(false); configuration.clear(); setUp(); innerTestHAWithRMHostName(true); }