private IIInstance getII(String iiName) { IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv()); IIInstance ii = mgr.getII(iiName); if (ii == null) throw new IllegalArgumentException("No Inverted Index found by name " + iiName); return ii; }
@Override public void setConf(Configuration conf) { this.conf = conf; try { KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(conf); IIManager mgr = IIManager.getInstance(config); IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME)); IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW); this.info = new TableRecordInfo(seg); this.rec = this.info.createTableRecord(); } catch (IOException e) { throw new RuntimeException("", e); } }
private static List<String> getHTableNames(KylinConfig config) { CubeManager cubeMgr = CubeManager.getInstance(config); ArrayList<String> result = new ArrayList<String>(); for (CubeInstance cube : cubeMgr.listAllCubes()) { for (CubeSegment seg : cube.getSegments(SegmentStatusEnum.READY)) { String tableName = seg.getStorageLocationIdentifier(); if (StringUtils.isBlank(tableName) == false) { result.add(tableName); System.out.println("added new table: " + tableName); } } } for (IIInstance ii : IIManager.getInstance(config).listAllIIs()) { for (IISegment seg : ii.getSegments(SegmentStatusEnum.READY)) { String tableName = seg.getStorageLocationIdentifier(); if (StringUtils.isBlank(tableName) == false) { result.add(tableName); System.out.println("added new table: " + tableName); } } } return result; } }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); Configuration conf = context.getConfiguration(); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(conf); IIManager mgr = IIManager.getInstance(config); IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME)); IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW); this.info = new TableRecordInfo(seg); this.rec = this.info.createTableRecord(); outputKey = new LongWritable(); outputValue = new ImmutableBytesWritable(rec.getBytes()); schema = HCatInputFormat.getTableSchema(context.getConfiguration()); fields = schema.getFields(); }
@Override public int run(String[] args) throws Exception { Options options = new Options(); try { options.addOption(OPTION_JOB_NAME); options.addOption(OPTION_TABLE_NAME); options.addOption(OPTION_II_NAME); options.addOption(OPTION_OUTPUT_PATH); parseOptions(options, args); job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME)); String tableName = getOptionValue(OPTION_TABLE_NAME).toUpperCase(); String iiName = getOptionValue(OPTION_II_NAME); Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH)); // ---------------------------------------------------------------------------- log.info("Starting: " + job.getJobName() + " on table " + tableName); IIManager iiMgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv()); IIInstance ii = iiMgr.getII(iiName); job.getConfiguration().set(BatchConstants.TABLE_NAME, tableName); job.getConfiguration().set(BatchConstants.TABLE_COLUMNS, getColumns(ii)); setJobClasspath(job); setupMapper(); setupReducer(output); return waitForCompletion(job); } catch (Exception e) { printUsage(options); throw e; } }
@Override public int run(String[] args) throws Exception { Options options = new Options(); try { options.addOption(OPTION_II_NAME); options.addOption(OPTION_INPUT_PATH); parseOptions(options, args); String iiname = getOptionValue(OPTION_II_NAME); String factColumnsInputPath = getOptionValue(OPTION_INPUT_PATH); KylinConfig config = KylinConfig.getInstanceFromEnv(); IIManager mgr = IIManager.getInstance(config); IIInstance ii = mgr.getII(iiname); mgr.buildInvertedIndexDictionary(ii.getFirstSegment(), factColumnsInputPath); return 0; } catch (Exception e) { printUsage(options); throw e; } }
KylinConfig config = KylinConfig.getInstanceFromEnv(); IIInstance iiInstance = IIManager.getInstance(config).getII(iiname); IIDesc iidesc = IIDescManager.getInstance(config).getIIDesc(iiInstance.getDescName());
@Override public int run(String[] args) throws Exception { Options options = new Options(); try { options.addOption(OPTION_INPUT_PATH); options.addOption(OPTION_HTABLE_NAME); options.addOption(OPTION_II_NAME); parseOptions(options, args); String tableName = getOptionValue(OPTION_HTABLE_NAME); String input = getOptionValue(OPTION_INPUT_PATH); String iiname = getOptionValue(OPTION_II_NAME); FileSystem fs = FileSystem.get(getConf()); FsPermission permission = new FsPermission((short) 0777); fs.setPermission(new Path(input, IIDesc.HBASE_FAMILY), permission); int hbaseExitCode = ToolRunner.run(new LoadIncrementalHFiles(getConf()), new String[] { input, tableName }); IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv()); IIInstance ii = mgr.getII(iiname); IISegment seg = ii.getFirstSegment(); seg.setStorageLocationIdentifier(tableName); seg.setStatus(SegmentStatusEnum.READY); mgr.updateII(ii); return hbaseExitCode; } catch (Exception e) { printUsage(options); throw e; } }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); Configuration conf = context.getConfiguration(); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(conf); IIManager mgr = IIManager.getInstance(config); IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME)); IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW); info = new TableRecordInfo(seg); rec = info.createTableRecord(); builder = null; kv = new IIKeyValueCodec(info.getDigest()); }
public static void main(String[] args) throws IOException { Configuration hconf = HadoopUtil.getCurrentConfiguration(); IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv()); String iiName = args[0]; IIInstance ii = mgr.getII(iiName); String path = args[1]; System.out.println("Reading from " + path + " ..."); TableRecordInfo info = new TableRecordInfo(ii.getFirstSegment()); IIKeyValueCodec codec = new IIKeyValueCodec(info.getDigest()); int count = 0; for (Slice slice : codec.decodeKeyValue(readSequenceKVs(hconf, path))) { for (RawTableRecord rec : slice) { System.out.printf(new TableRecord(rec, info).toString()); count++; } } System.out.println("Total " + count + " records"); }
private void cleanUnusedHBaseTables(Configuration conf) throws MasterNotRunningException, ZooKeeperConnectionException, IOException { CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()); IIManager iiManager = IIManager.getInstance(KylinConfig.getInstanceFromEnv());
IIManager iiManager = IIManager.getInstance(config); IIInstance ii = iiManager.getII(iiName); int sharding = ii.getDescriptor().getSharding();
@Test public void testGetIIsByDesc() throws IOException { IIManager mgr = IIManager.getInstance(getTestConfig()); List<IIInstance> iiInstances = mgr.getIIsByDesc("test_kylin_ii_desc"); Assert.assertTrue(iiInstances.size() > 0); IIInstance instance = iiInstances.get(0); Dictionary dict = mgr.getDictionary(instance.getFirstSegment(), instance.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_SITE_ID")); Assert.assertNotNull(dict); }
@Before public void setup() throws IOException { this.createTestMetadata(); this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii"); this.tableRecordInfo = new TableRecordInfo(ii.getFirstSegment()); }
@Before public void setUp() throws Exception { this.createTestMetadata(); this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii"); this.info = new TableRecordInfo(ii.getFirstSegment()); }
@Before public void setup() throws Exception { this.createTestMetadata(); this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii"); this.seg = ii.getFirstSegment(); String hbaseUrl = KylinConfig.getInstanceFromEnv().getStorageUrl(); Configuration hconf = HadoopUtil.newHBaseConfiguration(hbaseUrl); hconn = HConnectionManager.createConnection(hconf); this.info = new TableRecordInfo(seg); }
@Test public void testCreateIIInstance() throws IOException { IIDesc iiDesc = IIDescManager.getInstance(getTestConfig()).getIIDesc("test_kylin_ii_desc"); IIInstance ii = IIInstance.create("new ii", "default", iiDesc); IIManager iiMgr = IIManager.getInstance(getTestConfig()); List<IIInstance> allIIList = iiMgr.listAllIIs(); iiMgr.createII(ii); Assert.assertNotNull(iiMgr.getII("new ii")); }
private IIInstance getII(String iiName) { IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv()); IIInstance ii = mgr.getII(iiName); if (ii == null) throw new IllegalArgumentException("No Inverted Index found by name " + iiName); return ii; }
@Before public void setup() throws IOException { this.createTestMetadata(); this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii"); this.tableRecordInfo = new TableRecordInfo(ii.getFirstSegment()); factTableDesc = MetadataManager.getInstance(getTestConfig()).getTableDesc("DEFAULT.TEST_KYLIN_FACT"); TblColRef formatName = this.ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_FORMAT_NAME"); TblColRef siteId = this.ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_SITE_ID"); Collection<TblColRef> dims = new HashSet<>(); dims.add(formatName); projector = CoprocessorProjector.makeForEndpoint(tableRecordInfo, dims); aggregators = EndpointAggregators.fromFunctions(tableRecordInfo, buildAggregations()); CompareTupleFilter rawFilter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ); rawFilter.addChild(new ColumnTupleFilter(siteId)); rawFilter.addChild(new ConstantTupleFilter("0")); filter = CoprocessorFilter.fromFilter(this.ii.getFirstSegment(), rawFilter); aggCache = new EndpointAggregationCache(aggregators); tableData = mockTable(); }
@Before public void before() throws Exception { HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA); DeployUtil.initCliWorkDir(); // DeployUtil.deployMetadata(); DeployUtil.overrideJobJarLocations(); final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); jobService = ExecutableManager.getInstance(kylinConfig); scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(kylinConfig)); if (!scheduler.hasStarted()) { throw new RuntimeException("scheduler has not been started"); } iiManager = IIManager.getInstance(kylinConfig); jobEngineConfig = new JobEngineConfig(kylinConfig); for (String jobId : jobService.getAllJobIds()) { if(jobService.getJob(jobId) instanceof IIJob){ jobService.deleteJob(jobId); } } IIInstance ii = iiManager.getII(TEST_II_NAME); if (ii.getStatus() != RealizationStatusEnum.DISABLED) { ii.setStatus(RealizationStatusEnum.DISABLED); iiManager.updateII(ii); } }