private TableRecordInfo getTableRecordInfo() { if (tableRecordInfo == null) tableRecordInfo = new TableRecordInfo(this); return tableRecordInfo; }
@Override public void setConf(Configuration conf) { this.conf = conf; try { KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(conf); IIManager mgr = IIManager.getInstance(config); IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME)); IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW); this.info = new TableRecordInfo(seg); this.rec = this.info.createTableRecord(); } catch (IOException e) { throw new RuntimeException("", e); } }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); Configuration conf = context.getConfiguration(); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(conf); IIManager mgr = IIManager.getInstance(config); IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME)); IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW); this.info = new TableRecordInfo(seg); this.rec = this.info.createTableRecord(); outputKey = new LongWritable(); outputValue = new ImmutableBytesWritable(rec.getBytes()); schema = HCatInputFormat.getTableSchema(context.getConfiguration()); fields = schema.getFields(); }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); Configuration conf = context.getConfiguration(); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(conf); IIManager mgr = IIManager.getInstance(config); IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME)); IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW); info = new TableRecordInfo(seg); rec = info.createTableRecord(); builder = null; kv = new IIKeyValueCodec(info.getDigest()); }
public static void main(String[] args) throws IOException { Configuration hconf = HadoopUtil.getCurrentConfiguration(); IIManager mgr = IIManager.getInstance(KylinConfig.getInstanceFromEnv()); String iiName = args[0]; IIInstance ii = mgr.getII(iiName); String path = args[1]; System.out.println("Reading from " + path + " ..."); TableRecordInfo info = new TableRecordInfo(ii.getFirstSegment()); IIKeyValueCodec codec = new IIKeyValueCodec(info.getDigest()); int count = 0; for (Slice slice : codec.decodeKeyValue(readSequenceKVs(hconf, path))) { for (RawTableRecord rec : slice) { System.out.printf(new TableRecord(rec, info).toString()); count++; } } System.out.println("Total " + count + " records"); }
this.tableRecordInfo = new TableRecordInfo(this.seg);
private TableRecordInfo getTableRecordInfo() { if (tableRecordInfo == null) tableRecordInfo = new TableRecordInfo(this); return tableRecordInfo; }
@Before public void setUp() throws Exception { this.createTestMetadata(); this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii"); this.info = new TableRecordInfo(ii.getFirstSegment()); }
@Before public void setup() throws IOException { this.createTestMetadata(); this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii"); this.tableRecordInfo = new TableRecordInfo(ii.getFirstSegment()); }
@Before public void setup() throws Exception { this.createTestMetadata(); this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii"); this.seg = ii.getFirstSegment(); String hbaseUrl = KylinConfig.getInstanceFromEnv().getStorageUrl(); Configuration hconf = HadoopUtil.newHBaseConfiguration(hbaseUrl); hconn = HConnectionManager.createConnection(hconf); this.info = new TableRecordInfo(seg); }
@Before public void setup() throws IOException { this.createTestMetadata(); this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii"); this.tableRecordInfo = new TableRecordInfo(ii.getFirstSegment()); factTableDesc = MetadataManager.getInstance(getTestConfig()).getTableDesc("DEFAULT.TEST_KYLIN_FACT"); TblColRef formatName = this.ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_FORMAT_NAME"); TblColRef siteId = this.ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_SITE_ID"); Collection<TblColRef> dims = new HashSet<>(); dims.add(formatName); projector = CoprocessorProjector.makeForEndpoint(tableRecordInfo, dims); aggregators = EndpointAggregators.fromFunctions(tableRecordInfo, buildAggregations()); CompareTupleFilter rawFilter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ); rawFilter.addChild(new ColumnTupleFilter(siteId)); rawFilter.addChild(new ConstantTupleFilter("0")); filter = CoprocessorFilter.fromFilter(this.ii.getFirstSegment(), rawFilter); aggCache = new EndpointAggregationCache(aggregators); tableData = mockTable(); }
public Slice buildSlice(StreamingBatch microStreamBatch) throws IOException{ final List<List<String>> messages = Lists.transform(microStreamBatch.getMessages(), new Function<StreamingMessage, List<String>>() { @Nullable @Override public List<String> apply(@Nullable StreamingMessage input) { return input.getData(); } }); final Dictionary<?>[] dictionaries = IIDictionaryBuilder.buildDictionary(messages, iiDesc); TableRecordInfo tableRecordInfo = new TableRecordInfo(iiDesc, dictionaries); return build(messages, tableRecordInfo, dictionaries); }
@Override public void setConf(Configuration conf) { this.conf = conf; try { KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(conf); IIManager mgr = IIManager.getInstance(config); IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME)); IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW); this.info = new TableRecordInfo(seg); this.rec = this.info.createTableRecord(); } catch (IOException e) { throw new RuntimeException("", e); } }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); Configuration conf = context.getConfiguration(); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(conf); IIManager mgr = IIManager.getInstance(config); IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME)); IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW); this.info = new TableRecordInfo(seg); this.rec = this.info.createTableRecord(); outputKey = new LongWritable(); outputValue = new ImmutableBytesWritable(rec.getBytes()); schema = HCatInputFormat.getTableSchema(context.getConfiguration()); fields = schema.getFields(); }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); Configuration conf = context.getConfiguration(); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(conf); IIManager mgr = IIManager.getInstance(config); IIInstance ii = mgr.getII(conf.get(BatchConstants.CFG_II_NAME)); IISegment seg = ii.getSegment(conf.get(BatchConstants.CFG_II_SEGMENT_NAME), SegmentStatusEnum.NEW); info = new TableRecordInfo(seg); rec = info.createTableRecord(); builder = null; kv = new IIKeyValueCodec(info.getDigest()); }