/** * Method to clear the data maps */ public void clearDataMaps() throws IOException { DataMapStoreManager.getInstance().clearDataMaps(absoluteTableIdentifier); }
private static DataMapSchema getDataMapSchema(String dataMapName) throws IOException, NoSuchDataMapException { return DataMapStoreManager.getInstance().getDataMapSchema(dataMapName); }
/** * Returns default blocklet datamap * @param carbonTable * @param resolverIntf * @return */ public static DataMapExprWrapper getDefaultDataMap(CarbonTable carbonTable, FilterResolverIntf resolverIntf) { // Return the default datamap if no other datamap exists. return new DataMapExprWrapperImpl( DataMapStoreManager.getInstance().getDefaultDataMap(carbonTable), resolverIntf); }
/** * After updating table status file clear the dataMap cache for all segmentId's on which * dataMap is being created because flows like merge index file creation involves modification of * segment file and once segment file is modified the cache for that segment need to be cleared * otherwise the old cache will be used which is stale * * @param carbonTable * @param segmentId */ public static void clearBlockDataMapCache(CarbonTable carbonTable, String segmentId) { TableDataMap defaultDataMap = DataMapStoreManager.getInstance().getDefaultDataMap(carbonTable); Segment segment = new Segment(segmentId); List<Segment> segments = new ArrayList<>(); segments.add(segment); LOGGER.info( "clearing cache while updating segment file entry in table status file for segmentId: " + segmentId); defaultDataMap.clear(segments); }
/** * This method will disable all lazy (DEFERRED REBUILD) datamap in the given table */ public static void disableAllLazyDataMaps(CarbonTable table) throws IOException { List<DataMapSchema> allDataMapSchemas = DataMapStoreManager.getInstance().getDataMapSchemasOfTable(table); List<DataMapSchema> dataMapToBeDisabled = new ArrayList<>(allDataMapSchemas.size()); for (DataMapSchema dataMap : allDataMapSchemas) { // TODO all non datamaps like MV is now supports only lazy. Once the support is made the // following check can be removed. if (dataMap.isLazy() || !dataMap.isIndexDataMap()) { dataMapToBeDisabled.add(dataMap); } } storageProvider.updateDataMapStatus(dataMapToBeDisabled, DataMapStatus.DISABLED); }
/** * Return true if MV datamap present in the specified table * @param carbonTable * @return timeseries data map present */ public static boolean hasMVDataMap(CarbonTable carbonTable) throws IOException { List<DataMapSchema> dataMapSchemaList = DataMapStoreManager.getInstance() .getDataMapSchemasOfTable(carbonTable); for (DataMapSchema dataMapSchema : dataMapSchemaList) { if (dataMapSchema.getProviderName().equalsIgnoreCase(MV.toString())) { return true; } } return false; }
/** * Get all datamaps of the table for clearing purpose */ public DataMapExprWrapper getAllDataMapsForClear(CarbonTable carbonTable) throws IOException { List<TableDataMap> allDataMapFG = DataMapStoreManager.getInstance().getAllDataMap(carbonTable); DataMapExprWrapper initialExpr = null; if (allDataMapFG.size() > 0) { initialExpr = new DataMapExprWrapperImpl(allDataMapFG.get(0), null); for (int i = 1; i < allDataMapFG.size(); i++) { initialExpr = new AndDataMapExprWrapper(initialExpr, new DataMapExprWrapperImpl(allDataMapFG.get(i), null), null); } } return initialExpr; }
@Override public void cleanMeta() throws IOException { if (getMainTable() == null) { throw new UnsupportedOperationException("Table need to be specified in index datamaps"); } DataMapStoreManager.getInstance().dropDataMapSchema(getDataMapSchema().getDataMapName()); }
public DataMapChooser(CarbonTable carbonTable) throws IOException { this.carbonTable = carbonTable; // read all datamaps for this table and populate CG and FG datamap list List<TableDataMap> visibleDataMaps = DataMapStoreManager.getInstance().getAllVisibleDataMap(carbonTable); Map<String, DataMapStatusDetail> map = DataMapStatusManager.readDataMapStatusMap(); cgDataMaps = new ArrayList<>(visibleDataMaps.size()); fgDataMaps = new ArrayList<>(visibleDataMaps.size()); for (TableDataMap visibleDataMap : visibleDataMaps) { DataMapStatusDetail status = map.get(visibleDataMap.getDataMapSchema().getDataMapName()); if (status != null && status.isEnabled()) { DataMapLevel level = visibleDataMap.getDataMapFactory().getDataMapLevel(); if (level == DataMapLevel.CG) { cgDataMaps.add(visibleDataMap); } else { fgDataMaps.add(visibleDataMap); } } } }
List<TableDataMap> datamaps = DataMapStoreManager.getInstance().getAllDataMap(carbonTable); if (!datamaps.isEmpty()) { for (TableDataMap dataMap : datamaps) { DataMapFactory factoryClass = DataMapStoreManager.getInstance().getDataMapFactoryClass( carbonTable, dataMap.getDataMapSchema()); if (factoryClass.willBecomeStale(operation)) {
@Override public void cleanData() { CarbonTable mainTable = getMainTable(); if (mainTable == null) { throw new UnsupportedOperationException("Table need to be specified in index datamaps"); } DataMapStoreManager.getInstance().clearDataMap( mainTable.getAbsoluteTableIdentifier(), getDataMapSchema().getDataMapName()); }
for (Segment filteredSegment : filteredSegmentToAccess) { boolean refreshNeeded = DataMapStoreManager.getInstance().getTableSegmentRefresher(carbonTable) .isRefreshNeeded(filteredSegment, updateStatusManager.getInvalidTimestampRange(filteredSegment.getSegmentNo())); if (DataMapStoreManager.getInstance().getTableSegmentRefresher(carbonTable) .isRefreshNeeded(segment.getSegmentNo())) { toBeCleanedSegments.add(segment); DataMapStoreManager.getInstance() .clearInvalidSegments(getOrCreateCarbonTable(job.getConfiguration()), toBeCleanedSegments);
dataMaps = DataMapStoreManager.getInstance().getAllDataMap(getCarbonTable()); } catch (IOException ex) { LOGGER.error("failed to get datamaps");
/** * register all datamap writer for specified table and segment */ public void registerAllWriter(CarbonTable carbonTable, String segmentId, String taskNo, SegmentProperties segmentProperties) { // clear cache in executor side DataMapStoreManager.getInstance() .clearDataMaps(carbonTable.getCarbonTableIdentifier().getTableUniqueName()); List<TableDataMap> tableIndices; try { tableIndices = DataMapStoreManager.getInstance().getAllDataMap(carbonTable); } catch (IOException e) { LOG.error("Error while retrieving datamaps", e); throw new RuntimeException(e); } if (tableIndices != null) { tblIdentifier = carbonTable.getCarbonTableIdentifier(); for (TableDataMap tableDataMap : tableIndices) { // register it only if it is not lazy datamap, for lazy datamap, user // will rebuild the datamap manually if (!tableDataMap.getDataMapSchema().isLazy()) { DataMapFactory factory = tableDataMap.getDataMapFactory(); register(factory, segmentId, taskNo, segmentProperties); } } } }
@Override public void initMeta(String ctasSqlStatement) throws MalformedDataMapCommandException, IOException { CarbonTable mainTable = getMainTable(); DataMapSchema dataMapSchema = getDataMapSchema(); if (mainTable == null) { throw new MalformedDataMapCommandException( "Parent table is required to create index datamap"); } ArrayList<RelationIdentifier> relationIdentifiers = new ArrayList<>(); RelationIdentifier relationIdentifier = new RelationIdentifier(mainTable.getDatabaseName(), mainTable.getTableName(), mainTable.getTableInfo().getFactTable().getTableId()); relationIdentifiers.add(relationIdentifier); dataMapSchema.setRelationIdentifier(relationIdentifier); dataMapSchema.setParentTables(relationIdentifiers); DataMapStoreManager.getInstance().registerDataMap(mainTable, dataMapSchema, dataMapFactory); DataMapStoreManager.getInstance().saveDataMapSchema(dataMapSchema); }
@Override public void close() throws IOException { logStatistics(rowCount, queryModel.getStatisticsRecorder()); // clear dictionary cache Map<String, Dictionary> columnToDictionaryMapping = queryModel.getColumnToDictionaryMapping(); if (null != columnToDictionaryMapping) { for (Map.Entry<String, Dictionary> entry : columnToDictionaryMapping.entrySet()) { CarbonUtil.clearDictionaryCache(entry.getValue()); } } if (!skipClearDataMapAtClose) { // Clear the datamap cache DataMapStoreManager.getInstance().clearDataMaps( queryModel.getTable().getAbsoluteTableIdentifier()); } // close read support readSupport.close(); carbonIterator.close(); try { queryExecutor.finish(); } catch (QueryExecutionException e) { throw new IOException(e); } }
/** * Return a chosen datamap based on input filter. See {@link DataMapChooser} */ public DataMapExprWrapper choose(FilterResolverIntf filter) { if (filter != null) { Expression expression = filter.getFilterExpression(); // First check for FG datamaps if any exist ExpressionTuple tuple = selectDataMap(expression, fgDataMaps, filter); if (tuple.dataMapExprWrapper == null) { // Check for CG datamap tuple = selectDataMap(expression, cgDataMaps, filter); } if (tuple.dataMapExprWrapper != null) { return tuple.dataMapExprWrapper; } } // Return the default datamap if no other datamap exists. return new DataMapExprWrapperImpl( DataMapStoreManager.getInstance().getDefaultDataMap(carbonTable), filter); }
@Override public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { distributable = (DataMapDistributableWrapper) inputSplit; // clear the segmentMap and from cache in executor when there are invalid segments if (invalidSegments.size() > 0) { DataMapStoreManager.getInstance().clearInvalidSegments(table, invalidSegments); } TableDataMap tableDataMap = DataMapStoreManager.getInstance() .getDataMap(table, distributable.getDistributable().getDataMapSchema()); if (isJobToClearDataMaps) { // if job is to clear datamaps just clear datamaps from cache and return DataMapStoreManager.getInstance() .clearDataMaps(table.getCarbonTableIdentifier().getTableUniqueName()); // clear the segment properties cache from executor SegmentPropertiesAndSchemaHolder.getInstance() .invalidate(table.getAbsoluteTableIdentifier()); blockletIterator = Collections.emptyIterator(); return; } dataMaps = tableDataMap.getTableDataMaps(distributable.getDistributable()); List<ExtendedBlocklet> blocklets = tableDataMap .prune(dataMaps, distributable.getDistributable(), dataMapExprWrapper.getFilterResolverIntf(distributable.getUniqueId()), partitions); for (ExtendedBlocklet blocklet : blocklets) { blocklet.setDataMapUniqueId(distributable.getUniqueId()); } blockletIterator = blocklets.iterator(); }
List<Segment> invalidSegments = validAndInvalidSegmentsInfo.getInvalidSegments(); DataMapExprWrapper dataMapExprWrapper = null; if (DataMapStoreManager.getInstance().getAllDataMap(carbonTable).size() > 0) { DataMapChooser dataMapChooser = new DataMapChooser(carbonTable); dataMapExprWrapper = dataMapChooser.getAllDataMapsForClear(carbonTable);
List<PartitionSpec> partitionsToPrune = getPartitionsToPrune(job.getConfiguration()); TableDataMap defaultDataMap = DataMapStoreManager.getInstance().getDefaultDataMap(carbonTable); List<ExtendedBlocklet> prunedBlocklets = null;