public NDCuboidBuilder(CubeSegment cubeSegment, RowKeyEncoderProvider rowKeyEncoderProvider) { this.cubeSegment = cubeSegment; this.rowKeyEncoderProvider = rowKeyEncoderProvider; this.rowKeySplitter = new RowKeySplitter(cubeSegment); }
public RowKeyDecoder(CubeSegment cubeSegment) { this.cubeDesc = cubeSegment.getCubeDesc(); this.rowKeySplitter = new RowKeySplitter(cubeSegment); this.colIO = new RowKeyColumnIO(cubeSegment.getDimensionEncodingMap()); this.values = new ArrayList<String>(); }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME); segmentID = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_ID); String cuboidModeName = context.getConfiguration().get(BatchConstants.CFG_CUBOID_MODE); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); CubeInstance cube = CubeManager.getInstance(config).getCube(cubeName); cubeDesc = cube.getDescriptor(); cubeSegment = cube.getSegmentById(segmentID); ndCuboidBuilder = new NDCuboidBuilder(cubeSegment); // initialize CubiodScheduler cuboidScheduler = CuboidSchedulerUtil.getCuboidSchedulerByMode(cubeSegment, cuboidModeName); rowKeySplitter = new RowKeySplitter(cubeSegment); }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); mos = new MultipleOutputs(context); String cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME); String segmentID = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_ID); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); CubeManager cubeManager = CubeManager.getInstance(config); CubeInstance cube = cubeManager.getCube(cubeName); CubeSegment optSegment = cube.getSegmentById(segmentID); CubeSegment originalSegment = cube.getOriginalSegmentToOptimize(optSegment); rowKeySplitter = new RowKeySplitter(originalSegment); baseCuboid = cube.getCuboidScheduler().getBaseCuboidId(); recommendCuboids = cube.getCuboidsRecommend(); Preconditions.checkNotNull(recommendCuboids, "The recommend cuboid map could not be null"); }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); mos = new MultipleOutputs(context); String cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME); String segmentID = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_ID); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); CubeInstance cube = CubeManager.getInstance(config).getCube(cubeName); CubeSegment cubeSegment = cube.getSegmentById(segmentID); CubeSegment oldSegment = cube.getOriginalSegmentToOptimize(cubeSegment); cubeDesc = cube.getDescriptor(); baseCuboid = cube.getCuboidScheduler().getBaseCuboidId(); rowKeySplitter = new RowKeySplitter(oldSegment); rowKeyEncoderProvider = new RowKeyEncoderProvider(cubeSegment); }
newKeyBuf = ByteArray.allocate(RowConstants.ROWKEY_BUFFER_SIZE); rowKeySplitter = new RowKeySplitter(mergingSeg); rowKeyEncoderProvider = new RowKeyEncoderProvider(mergedSeg);
public void init() { KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl); try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig .setAndUnsetThreadLocalConfig(kConfig)) { CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName); this.cubeSegment = cubeInstance.getSegmentById(segmentId); this.cubeDesc = cubeInstance.getDescriptor(); this.ndCuboidBuilder = new NDCuboidBuilder(cubeSegment, new RowKeyEncoderProvider(cubeSegment)); this.rowKeySplitter = new RowKeySplitter(cubeSegment); } }
@Test public void testWithSlr() throws Exception { //has shard CubeInstance cube = CubeManager.getInstance(getTestConfig()).getCube("TEST_KYLIN_CUBE_WITH_SLR_READY"); RowKeySplitter rowKeySplitter = new RowKeySplitter(cube.getFirstSegment(), 11, 20); // base cuboid rowkey byte[] input = { 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, 0, -104, -106, -128, 11, 54, -105, 55, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 13, 71, 114, 65, 66, 73, 78, 9, 9, 9, 9, 9, 9, 9, 9, 0, 10, 0 }; rowKeySplitter.split(input); assertEquals(11, rowKeySplitter.getBufferSize()); }
@Test public void testWithoutSlr() throws Exception { //no shard CubeInstance cube = CubeManager.getInstance(getTestConfig()).getCube("TEST_KYLIN_CUBE_WITHOUT_SLR_READY"); RowKeySplitter rowKeySplitter = new RowKeySplitter(cube.getFirstSegment(), 11, 20); // base cuboid rowkey byte[] input = { 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 11, 55, -13, 13, 22, 34, 121, 70, 80, 45, 71, 84, 67, 9, 9, 9, 9, 9, 9, 0, 10, 5 }; rowKeySplitter.split(input); assertEquals(10, rowKeySplitter.getBufferSize()); } }
public RowKeyDecoder(CubeSegment cubeSegment) { this.cubeDesc = cubeSegment.getCubeDesc(); this.rowKeySplitter = new RowKeySplitter(cubeSegment, 65, 255); this.colIO = new RowKeyColumnIO(cubeSegment); this.values = new ArrayList<String>(); }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(); segmentName = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_NAME).toUpperCase(); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(context.getConfiguration()); CubeInstance cube = CubeManager.getInstance(config).getCube(cubeName); CubeSegment cubeSegment = cube.getSegment(segmentName, SegmentStatusEnum.NEW); cubeDesc = cube.getDescriptor(); // initialize CubiodScheduler cuboidScheduler = new CuboidScheduler(cubeDesc); rowKeySplitter = new RowKeySplitter(cubeSegment, 65, 256); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.publishConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(); segmentName = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_NAME).toUpperCase(); config = AbstractHadoopJob.loadKylinPropsAndMetadata(context.getConfiguration()); cubeManager = CubeManager.getInstance(config); cube = cubeManager.getCube(cubeName); cubeDesc = cube.getDescriptor(); mergedCubeSegment = cube.getSegment(segmentName, SegmentStatusEnum.NEW); // int colCount = cubeDesc.getRowkey().getRowKeyColumns().length; newKeyBuf = new byte[256];// size will auto-grow // decide which source segment InputSplit inputSplit = context.getInputSplit(); String filePath = ((FileSplit) inputSplit).getPath().toString(); System.out.println("filePath:" + filePath); String jobID = extractJobIDFromPath(filePath); System.out.println("jobID:" + jobID); sourceCubeSegment = findSegmentWithUuid(jobID, cube); System.out.println(sourceCubeSegment); this.rowKeySplitter = new RowKeySplitter(sourceCubeSegment, 65, 255); }
public NDCuboidBuilder(CubeSegment cubeSegment, RowKeyEncoderProvider rowKeyEncoderProvider) { this.cubeSegment = cubeSegment; this.rowKeyEncoderProvider = rowKeyEncoderProvider; this.rowKeySplitter = new RowKeySplitter(cubeSegment); }
@Test public void testWithoutSlr() throws Exception { CubeInstance cube = CubeManager.getInstance(getTestConfig()).getCube("TEST_KYLIN_CUBE_WITHOUT_SLR_READY"); RowKeySplitter rowKeySplitter = new RowKeySplitter(cube.getFirstSegment(), 10, 20); // base cuboid rowkey byte[] input = { 0, 0, 0, 0, 0, 0, 0, -1, 11, 55, -13, 13, 22, 34, 121, 70, 80, 45, 71, 84, 67, 9, 9, 9, 9, 9, 9, 0, 10, 5 }; rowKeySplitter.split(input, input.length); assertEquals(9, rowKeySplitter.getBufferSize()); } }
@Test public void testWithSlr() throws Exception { CubeInstance cube = CubeManager.getInstance(getTestConfig()).getCube("TEST_KYLIN_CUBE_WITH_SLR_READY"); RowKeySplitter rowKeySplitter = new RowKeySplitter(cube.getFirstSegment(), 10, 20); // base cuboid rowkey byte[] input = { 0, 0, 0, 0, 0, 0, 1, -1, 49, 48, 48, 48, 48, 48, 48, 48, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 11, 54, -105, 55, 13, 71, 114, 65, 66, 73, 78, 9, 9, 9, 9, 9, 9, 9, 9, 0, 10, 0 }; rowKeySplitter.split(input, input.length); assertEquals(10, rowKeySplitter.getBufferSize()); }
public RowKeyDecoder(CubeSegment cubeSegment) { this.cubeDesc = cubeSegment.getCubeDesc(); this.rowKeySplitter = new RowKeySplitter(cubeSegment, 65, 255); this.colIO = new RowKeyColumnIO(cubeSegment); this.values = new ArrayList<String>(); }
public RowKeyDecoder(CubeSegment cubeSegment) { this.cubeDesc = cubeSegment.getCubeDesc(); this.rowKeySplitter = new RowKeySplitter(cubeSegment); this.colIO = new RowKeyColumnIO(cubeSegment.getDimensionEncodingMap()); this.values = new ArrayList<String>(); }
@Override protected void setup(Context context) throws IOException { super.publishConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME).toUpperCase(); segmentName = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_NAME).toUpperCase(); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(context.getConfiguration()); CubeInstance cube = CubeManager.getInstance(config).getCube(cubeName); CubeSegment cubeSegment = cube.getSegment(segmentName, SegmentStatusEnum.NEW); cubeDesc = cube.getDescriptor(); // initialize CubiodScheduler cuboidScheduler = new CuboidScheduler(cubeDesc); rowKeySplitter = new RowKeySplitter(cubeSegment, 65, 256); }
@Override protected void doSetup(Context context) throws IOException { super.bindCurrentConfiguration(context.getConfiguration()); cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME); segmentID = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_ID); String cuboidModeName = context.getConfiguration().get(BatchConstants.CFG_CUBOID_MODE); KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata(); CubeInstance cube = CubeManager.getInstance(config).getCube(cubeName); cubeDesc = cube.getDescriptor(); cubeSegment = cube.getSegmentById(segmentID); ndCuboidBuilder = new NDCuboidBuilder(cubeSegment); // initialize CubiodScheduler cuboidScheduler = CuboidSchedulerUtil.getCuboidSchedulerByMode(cubeSegment, cuboidModeName); rowKeySplitter = new RowKeySplitter(cubeSegment); }
public void init() { KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl); try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig .setAndUnsetThreadLocalConfig(kConfig)) { CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName); this.cubeSegment = cubeInstance.getSegmentById(segmentId); this.cubeDesc = cubeInstance.getDescriptor(); this.ndCuboidBuilder = new NDCuboidBuilder(cubeSegment, new RowKeyEncoderProvider(cubeSegment)); this.rowKeySplitter = new RowKeySplitter(cubeSegment); } }