public StorageURL getStorageUrl() { String url = getOptional("kylin.storage.url", "default@hbase"); // for backward compatibility if ("hbase".equals(url)) url = "default@hbase"; return StorageURL.valueOf(url); }
public StorageURL getMetadataUrl() { return StorageURL.valueOf(getOptional("kylin.metadata.url", "kylin_metadata@hbase")); }
public static void deleteHDFSMeta(String metaUrl) throws IOException { String realHdfsPath = StorageURL.valueOf(metaUrl).getParameter("path"); HadoopUtil.getFileSystem(realHdfsPath).delete(new Path(realHdfsPath), true); logger.info("Delete metadata in HDFS for this job: " + realHdfsPath); }
public static KylinConfig loadKylinConfigFromHdfs(String uri) { if (uri == null) throw new IllegalArgumentException("meta url should not be null"); if (!uri.contains("@hdfs")) throw new IllegalArgumentException("meta url should like @hdfs schema"); if (kylinConfigCache.get(uri) != null) { logger.info("KylinConfig cached for : {}", uri); return kylinConfigCache.get(uri); } logger.info("Ready to load KylinConfig from uri: {}", uri); KylinConfig config; FileSystem fs; String realHdfsPath = StorageURL.valueOf(uri).getParameter("path") + "/" + KylinConfig.KYLIN_CONF_PROPERTIES_FILE; try { fs = HadoopUtil.getFileSystem(realHdfsPath); InputStream is = fs.open(new Path(realHdfsPath)); Properties prop = KylinConfig.streamToProps(is); config = KylinConfig.createKylinConfig(prop); } catch (IOException e) { throw new RuntimeException(e); } kylinConfigCache.put(uri, config); return config; }
@Test public void testValueOfCache() { StorageURL id1 = StorageURL.valueOf("hello@hbase"); StorageURL id2 = StorageURL.valueOf("hello@hbase"); StorageURL id3 = StorageURL.valueOf("hello @ hbase"); StorageURL id4 = StorageURL.valueOf("hello@hbase,a=b"); assertTrue(id1 == id2); assertTrue(id1 != id3); assertTrue(id1.equals(id3)); assertTrue(id2 != id4); assertTrue(!id2.equals(id4)); } }
@Test public void testListResourcesImpl() throws Exception { String path = "../examples/test_metadata/"; String cp = new File(path).getCanonicalFile().getPath(); FileSystem fs = HadoopUtil.getFileSystem(cp); HDFSResourceStore store = new HDFSResourceStore(KylinConfig.getInstanceFromEnv(), StorageURL.valueOf("hdfs@hdfs")); Field field = store.getClass().getDeclaredField("fs"); field.setAccessible(true); field.set(store, fs); File f1 = new File(cp + "/resource/resource/e1.json"); File f2 = new File(cp + "/resource/resource/e2.json"); if (!f1.getParentFile().exists()) { if (!f1.getParentFile().mkdirs()) { throw new RuntimeException("Can not create dir."); } } if (!(f1.createNewFile() && f2.createNewFile())) { throw new RuntimeException("Can not create file."); } Path p = new Path(cp); TreeSet<String> resources = store.getAllFilePath(new Path(p, "resource"), "/resource/"); TreeSet<String> expected = new TreeSet<>(); expected.add("/resource/resource/e1.json"); expected.add("/resource/resource/e2.json"); Assert.assertEquals(expected, resources); } }
public static void testGridTable(double hitRatio, double indexRatio) throws IOException { logger.info("Testing grid table scanning, hit ratio {}, index ratio {}", hitRatio, indexRatio); StorageURL hbaseUrl = StorageURL.valueOf("default@hbase"); // use hbase-site.xml on classpath Connection conn = HBaseConnection.get(hbaseUrl); createHTableIfNeeded(conn, TEST_TABLE); prepareData(conn); Hits hits = new Hits(N_ROWS, hitRatio, indexRatio); for (int i = 0; i < ROUND; i++) { logger.info("==================================== ROUND {} ========================================", (i + 1)); testRowScanWithIndex(conn, hits.getHitsForRowScanWithIndex()); testRowScanNoIndexFullScan(conn, hits.getHitsForRowScanNoIndex()); testRowScanNoIndexSkipScan(conn, hits.getHitsForRowScanNoIndex()); testColumnScan(conn, hits.getHitsForColumnScan()); } }
public StorageURL getStorageUrl() { String url = getOptional("kylin.storage.url", "default@hbase"); // for backward compatibility if ("hbase".equals(url)) url = "default@hbase"; return StorageURL.valueOf(url); }
public StorageURL getMetadataUrl() { return StorageURL.valueOf(getOptional("kylin.metadata.url", "kylin_metadata@hbase")); }
public static void deleteHDFSMeta(String metaUrl) throws IOException { String realHdfsPath = StorageURL.valueOf(metaUrl).getParameter("path"); HadoopUtil.getFileSystem(realHdfsPath).delete(new Path(realHdfsPath), true); logger.info("Delete metadata in HDFS for this job: " + realHdfsPath); }
public static KylinConfig loadKylinConfigFromHdfs(String uri) { if (uri == null) throw new IllegalArgumentException("meta url should not be null"); if (!uri.contains("@hdfs")) throw new IllegalArgumentException("meta url should like @hdfs schema"); if (kylinConfigCache.get(uri) != null) { logger.info("KylinConfig cached for : {}", uri); return kylinConfigCache.get(uri); } logger.info("Ready to load KylinConfig from uri: {}", uri); KylinConfig config; FileSystem fs; String realHdfsPath = StorageURL.valueOf(uri).getParameter("path") + "/" + KylinConfig.KYLIN_CONF_PROPERTIES_FILE; try { fs = HadoopUtil.getFileSystem(realHdfsPath); InputStream is = fs.open(new Path(realHdfsPath)); Properties prop = KylinConfig.streamToProps(is); config = KylinConfig.createKylinConfig(prop); } catch (IOException e) { throw new RuntimeException(e); } kylinConfigCache.put(uri, config); return config; }
public static void testGridTable(double hitRatio, double indexRatio) throws IOException { logger.info("Testing grid table scanning, hit ratio {}, index ratio {}", hitRatio, indexRatio); StorageURL hbaseUrl = StorageURL.valueOf("default@hbase"); // use hbase-site.xml on classpath Connection conn = HBaseConnection.get(hbaseUrl); createHTableIfNeeded(conn, TEST_TABLE); prepareData(conn); Hits hits = new Hits(N_ROWS, hitRatio, indexRatio); for (int i = 0; i < ROUND; i++) { logger.info("==================================== ROUND {} ========================================", (i + 1)); testRowScanWithIndex(conn, hits.getHitsForRowScanWithIndex()); testRowScanNoIndexFullScan(conn, hits.getHitsForRowScanNoIndex()); testRowScanNoIndexSkipScan(conn, hits.getHitsForRowScanNoIndex()); testColumnScan(conn, hits.getHitsForColumnScan()); } }