public DataMapSchema getDataMapSchema() { return dataMap.getDataMapSchema(); }
/** * Get the blocklet datamap factory to get the detail information of blocklets * * @param table * @return */ private BlockletDetailsFetcher getBlockletDetailsFetcher(CarbonTable table) { TableDataMap blockletMap = getDataMap(table, BlockletDataMapFactory.DATA_MAP_SCHEMA); return (BlockletDetailsFetcher) blockletMap.getDataMapFactory(); }
public List<ExtendedBlocklet> prune(DataMapDistributable distributable, List<PartitionSpec> partitionsToPrune) throws IOException { List<DataMap> dataMaps = dataMap.getTableDataMaps(distributable); return dataMap.prune(dataMaps, distributable, expression, partitionsToPrune); }
/** * this methods clears the datamap of table from memory */ public void clearDataMaps(String tableUniqName) { List<TableDataMap> tableIndices = allDataMaps.get(tableUniqName); if (tableIndices != null) { for (TableDataMap tableDataMap : tableIndices) { if (tableDataMap != null) { // clear the segmentMap in BlockletDetailsFetcher,else the Segment will remain in executor // and the query fails as we will check whether the blocklet contains in the index or not tableDataMap.getBlockletDetailsFetcher().clear(); tableDataMap.clear(); } } } allDataMaps.remove(tableUniqName); tablePathMap.remove(tableUniqName); }
public DataMapChooser(CarbonTable carbonTable) throws IOException { this.carbonTable = carbonTable; // read all datamaps for this table and populate CG and FG datamap list List<TableDataMap> visibleDataMaps = DataMapStoreManager.getInstance().getAllVisibleDataMap(carbonTable); Map<String, DataMapStatusDetail> map = DataMapStatusManager.readDataMapStatusMap(); cgDataMaps = new ArrayList<>(visibleDataMaps.size()); fgDataMaps = new ArrayList<>(visibleDataMaps.size()); for (TableDataMap visibleDataMap : visibleDataMaps) { DataMapStatusDetail status = map.get(visibleDataMap.getDataMapSchema().getDataMapName()); if (status != null && status.isEnabled()) { DataMapLevel level = visibleDataMap.getDataMapFactory().getDataMapLevel(); if (level == DataMapLevel.CG) { cgDataMaps.add(visibleDataMap); } else { fgDataMaps.add(visibleDataMap); } } } }
/** * Clear the datamap/datamaps of a table from memory and disk * * @param identifier Table identifier */ public void clearDataMap(AbsoluteTableIdentifier identifier, String dataMapName) { CarbonTable carbonTable = getCarbonTable(identifier); String tableUniqueName = identifier.getCarbonTableIdentifier().getTableUniqueName(); List<TableDataMap> tableIndices = allDataMaps.get(tableUniqueName); if (tableIndices != null) { int i = 0; for (TableDataMap tableDataMap : tableIndices) { if (carbonTable != null && tableDataMap != null && dataMapName .equalsIgnoreCase(tableDataMap.getDataMapSchema().getDataMapName())) { try { DataMapUtil.executeDataMapJobForClearingDataMaps(carbonTable); tableDataMap.clear(); } catch (IOException e) { LOGGER.error("clear dataMap job failed", e); // ignoring the exception } tableDataMap.deleteDatamapData(); tableIndices.remove(i); break; } i++; } allDataMaps.put(tableUniqueName, tableIndices); } }
try { for (TableDataMap dataMap : DataMapStoreManager.getInstance().getAllDataMap(carbonTable)) { if (dataMap.getDataMapSchema().isIndexDataMap()) { indexDataMaps.add(dataMap); segments.clear(); segments.add(new Segment(oneLoad.getLoadName())); dataMap.deleteDatamapData(segments);
@Override public List<ExtendedBlocklet> prune(List<Segment> segments, List<PartitionSpec> partitionsToPrune) throws IOException { return dataMap.prune(segments, expression, partitionsToPrune); }
List<ExtendedBlocklet> blocklets, final Map<Segment, List<DataMap>> dataMaps, int totalFiles) { int numOfThreadsForPruning = getNumOfThreadsForPruning(); LOG.info( "Number of threads selected for multi-thread block pruning is " + numOfThreadsForPruning blocklets.addAll(addSegmentId( blockletDetailsFetcher.getExtendedBlocklets(entry.getValue(), entry.getKey()), entry.getKey().toString()));
private List<ExtendedBlocklet> pruneWithFilter(List<Segment> segments, FilterResolverIntf filterExp, List<PartitionSpec> partitions, List<ExtendedBlocklet> blocklets, Map<Segment, List<DataMap>> dataMaps) throws IOException { for (Segment segment : segments) { List<Blocklet> pruneBlocklets = new ArrayList<>(); SegmentProperties segmentProperties = segmentPropertiesFetcher.getSegmentProperties(segment); for (DataMap dataMap : dataMaps.get(segment)) { pruneBlocklets.addAll(dataMap.prune(filterExp, segmentProperties, partitions)); } blocklets.addAll( addSegmentId(blockletDetailsFetcher.getExtendedBlocklets(pruneBlocklets, segment), segment.toString())); } return blocklets; }
/** * Clear the invalid segments from all the datamaps of the table * @param carbonTable * @param segments */ public void clearInvalidSegments(CarbonTable carbonTable, List<Segment> segments) throws IOException { getDefaultDataMap(carbonTable).clear(segments); List<TableDataMap> allDataMap = getAllDataMap(carbonTable); for (TableDataMap dataMap: allDataMap) { dataMap.clear(segments); } }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DataMapTuple that = (DataMapTuple) o; if (order != that.order) return false; return dataMap != null ? dataMap.equals(that.dataMap) : that.dataMap == null; }
TableDataMap dataMap = new TableDataMap(table.getAbsoluteTableIdentifier(), dataMapSchema, dataMapFactory, blockletDetailsFetcher, segmentPropertiesFetcher);
/** * register all datamap writer for specified table and segment */ public void registerAllWriter(CarbonTable carbonTable, String segmentId, String taskNo, SegmentProperties segmentProperties) { // clear cache in executor side DataMapStoreManager.getInstance() .clearDataMaps(carbonTable.getCarbonTableIdentifier().getTableUniqueName()); List<TableDataMap> tableIndices; try { tableIndices = DataMapStoreManager.getInstance().getAllDataMap(carbonTable); } catch (IOException e) { LOG.error("Error while retrieving datamaps", e); throw new RuntimeException(e); } if (tableIndices != null) { tblIdentifier = carbonTable.getCarbonTableIdentifier(); for (TableDataMap tableDataMap : tableIndices) { // register it only if it is not lazy datamap, for lazy datamap, user // will rebuild the datamap manually if (!tableDataMap.getDataMapSchema().isLazy()) { DataMapFactory factory = tableDataMap.getDataMapFactory(); register(factory, segmentId, taskNo, segmentProperties); } } } }
private List<ExtendedBlocklet> pruneWithoutFilter(List<Segment> segments, List<PartitionSpec> partitions, List<ExtendedBlocklet> blocklets) throws IOException { for (Segment segment : segments) { List<Blocklet> allBlocklets = blockletDetailsFetcher.getAllBlocklets(segment, partitions); blocklets.addAll( addSegmentId(blockletDetailsFetcher.getExtendedBlocklets(allBlocklets, segment), segment.toString())); } return blocklets; }
/** * After updating table status file clear the dataMap cache for all segmentId's on which * dataMap is being created because flows like merge index file creation involves modification of * segment file and once segment file is modified the cache for that segment need to be cleared * otherwise the old cache will be used which is stale * * @param carbonTable * @param segmentId */ public static void clearBlockDataMapCache(CarbonTable carbonTable, String segmentId) { TableDataMap defaultDataMap = DataMapStoreManager.getInstance().getDefaultDataMap(carbonTable); Segment segment = new Segment(segmentId); List<Segment> segments = new ArrayList<>(); segments.add(segment); LOGGER.info( "clearing cache while updating segment file entry in table status file for segmentId: " + segmentId); defaultDataMap.clear(segments); }
private TableDataMap getTableDataMap(String dataMapName, List<TableDataMap> tableIndices) { TableDataMap dataMap = null; for (TableDataMap tableDataMap : tableIndices) { if (tableDataMap.getDataMapSchema().getDataMapName().equals(dataMapName)) { dataMap = tableDataMap; break; } } return dataMap; }
@Override public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { distributable = (DataMapDistributableWrapper) inputSplit; // clear the segmentMap and from cache in executor when there are invalid segments if (invalidSegments.size() > 0) { DataMapStoreManager.getInstance().clearInvalidSegments(table, invalidSegments); } TableDataMap tableDataMap = DataMapStoreManager.getInstance() .getDataMap(table, distributable.getDistributable().getDataMapSchema()); if (isJobToClearDataMaps) { // if job is to clear datamaps just clear datamaps from cache and return DataMapStoreManager.getInstance() .clearDataMaps(table.getCarbonTableIdentifier().getTableUniqueName()); // clear the segment properties cache from executor SegmentPropertiesAndSchemaHolder.getInstance() .invalidate(table.getAbsoluteTableIdentifier()); blockletIterator = Collections.emptyIterator(); return; } dataMaps = tableDataMap.getTableDataMaps(distributable.getDistributable()); List<ExtendedBlocklet> blocklets = tableDataMap .prune(dataMaps, distributable.getDistributable(), dataMapExprWrapper.getFilterResolverIntf(distributable.getUniqueId()), partitions); for (ExtendedBlocklet blocklet : blocklets) { blocklet.setDataMapUniqueId(distributable.getUniqueId()); } blockletIterator = blocklets.iterator(); }
@Override public DataMapLevel getDataMapLevel() { return dataMap.getDataMapFactory().getDataMapLevel(); }