@Override public Object run() throws Exception { mr.run(conf, jobName.toString(), t, fp, sd, tblValidWriteIds, ci, su, msc); return null; } });
/** * Parse tblproperties specified on "ALTER TABLE ... COMPACT ... WITH OVERWRITE TBLPROPERTIES ..." * and override two categories of properties: * 1. properties of the compactor MR job (with prefix "compactor.") * 2. general hive properties (with prefix "tblprops.") * @param job the compactor MR job * @param tblproperties existing tblproperties * @param properties table properties */ private void overrideTblProps(JobConf job, Map<String, String> tblproperties, String properties) { StringableMap stringableMap = new StringableMap(properties); overrideMRProps(job, stringableMap); // mingle existing tblproperties with those specified on the ALTER TABLE command for (String key : stringableMap.keySet()) { if (key.startsWith(TBLPROPS_PREFIX)) { String propKey = key.substring(9); // 9 is the length of "tblprops.". We only keep the rest tblproperties.put(propKey, stringableMap.get(key)); } } // re-set TABLE_PROPS with reloaded tblproperties job.set(TABLE_PROPS, new StringableMap(tblproperties).toString()); }
final CompactorMR mr = new CompactorMR(); launchedJob = true; try { if (runJobAsSelf(runAs)) { mr.run(conf, jobName.toString(), t, sd, txns, ci, su, txnHandler); } else { UserGroupInformation ugi = UserGroupInformation.createProxyUser(t.getOwner(), mrJob = mr.getMrJob();
job.set(TMP_LOCATION, generateTmpPath(sd)); job.set(INPUT_FORMAT_CLASS_NAME, sd.getInputFormat()); job.set(OUTPUT_FORMAT_CLASS_NAME, sd.getOutputFormat()); job.setInt(NUM_BUCKETS, sd.getNumBuckets()); job.set(ValidWriteIdList.VALID_WRITEIDS_KEY, writeIds.toString()); overrideMRProps(job, t.getParameters()); // override MR properties from tblproperties if applicable if (ci.properties != null) { overrideTblProps(job, t.getParameters(), ci.properties); setColumnTypes(job, sd.getCols());
job.setInt(NUM_BUCKETS, sd.getNumBuckets()); job.set(ValidTxnList.VALID_TXNS_KEY, txns.toString()); overrideMRProps(job, t.getParameters()); // override MR properties from tblproperties if applicable if (ci.properties != null) { overrideTblProps(job, t.getParameters(), ci.properties); setColumnTypes(job, sd.getCols());
AcidUtils.Directory dir = AcidUtils.getAcidState(new Path(sd.getLocation()), conf, writeIds, Ref.from(false), false, t.getParameters()); removeFilesForMmTable(conf, dir); String tmpLocation = generateTmpPath(sd); Path baseLocation = new Path(tmpLocation, "_base"); while (true) { tmpTableName = tmpPrefix + System.currentTimeMillis(); String query = buildMmCompactionCtQuery(tmpTableName, t, p == null ? t.getSd() : p.getSd(), baseLocation.toString()); LOG.info("Compacting a MM table into " + query); String query = buildMmCompactionQuery(driverConf, t, p, tmpTableName); LOG.info("Compacting a MM table via " + query); long compactorTxnId = CompactorMap.getCompactorTxnId(conf); DriverUtils.runOnDriver(driverConf, user, sessionState, query, writeIds, compactorTxnId); commitMmCompaction(tmpLocation, sd.getLocation(), conf, writeIds, compactorTxnId); DriverUtils.runOnDriver(driverConf, user, sessionState, "drop table if exists " + tmpTableName);
runMmCompaction(conf, t, p, sd, writeIds, ci); JobConf job = createBaseJobConf(conf, jobName, t, sd, writeIds, ci); int numMinorCompactions = parsedDeltas.size() / maxDeltastoHandle; for(int jobSubId = 0; jobSubId < numMinorCompactions; jobSubId++) { JobConf jobMinorCompact = createBaseJobConf(conf, jobName + "_" + jobSubId, t, sd, writeIds, ci); launchCompactionJob(jobMinorCompact, null, CompactionType.MINOR, null, parsedDeltas.subList(jobSubId * maxDeltastoHandle, (jobSubId + 1) * maxDeltastoHandle), launchCompactionJob(job, baseDir, ci.type, dirsToSearch, dir.getCurrentDirectories(), dir.getCurrentDirectories().size(), dir.getObsolete().size(), conf, msc, ci.id, jobName);
final CompactorMR mr = new CompactorMR(); launchedJob = true; try { if (runJobAsSelf(runAs)) { mr.run(conf, jobName.toString(), t, sd, txns, isMajor, su); } else { UserGroupInformation ugi = UserGroupInformation.createProxyUser(t.getOwner(),
throw new RuntimeException(HiveConf.ConfVars.HIVETESTMODEFAILCOMPACTION.name() + "=true"); JobConf job = createBaseJobConf(conf, jobName, t, sd, txns, ci); int numMinorCompactions = parsedDeltas.size() / maxDeltastoHandle; for(int jobSubId = 0; jobSubId < numMinorCompactions; jobSubId++) { JobConf jobMinorCompact = createBaseJobConf(conf, jobName + "_" + jobSubId, t, sd, txns, ci); launchCompactionJob(jobMinorCompact, null, CompactionType.MINOR, null, parsedDeltas.subList(jobSubId * maxDeltastoHandle, (jobSubId + 1) * maxDeltastoHandle), launchCompactionJob(job, baseDir, ci.type, dirsToSearch, dir.getCurrentDirectories(), dir.getCurrentDirectories().size(), dir.getObsolete().size(), conf, txnHandler, ci.id, jobName);
job.setInt(NUM_BUCKETS, sd.getNumBuckets()); job.set(ValidTxnList.VALID_TXNS_KEY, txns.toString()); setColumnTypes(job, sd.getCols());
CompactionInfo.compactionInfoToStruct(ci)), conf, runJobAsSelf(ci.runAs) ? ci.runAs : t.getOwner()); final CompactorMR mr = new CompactorMR(); launchedJob = true; try { if (runJobAsSelf(ci.runAs)) { mr.run(conf, jobName.toString(), t, p, sd, tblValidWriteIds, ci, su, msc); } else { UserGroupInformation ugi = UserGroupInformation.createProxyUser(t.getOwner(), msc.commitTxn(compactorTxnId); if (conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST)) { mrJob = mr.getMrJob();
@Override public Object run() throws Exception { mr.run(conf, jobName.toString(), t, sd, txns, ci, su, txnHandler); return null; } });
/** * Parse tblproperties specified on "ALTER TABLE ... COMPACT ... WITH OVERWRITE TBLPROPERTIES ..." * and override two categories of properties: * 1. properties of the compactor MR job (with prefix "compactor.") * 2. general hive properties (with prefix "tblprops.") * @param job the compactor MR job * @param tblproperties existing tblproperties * @param properties table properties */ private void overrideTblProps(JobConf job, Map<String, String> tblproperties, String properties) { StringableMap stringableMap = new StringableMap(properties); overrideMRProps(job, stringableMap); // mingle existing tblproperties with those specified on the ALTER TABLE command for (String key : stringableMap.keySet()) { if (key.startsWith(TBLPROPS_PREFIX)) { String propKey = key.substring(9); // 9 is the length of "tblprops.". We only keep the rest tblproperties.put(propKey, stringableMap.get(key)); } } // re-set TABLE_PROPS with reloaded tblproperties job.set(TABLE_PROPS, new StringableMap(tblproperties).toString()); }
@Override public Object run() throws Exception { mr.run(conf, jobName.toString(), t, sd, txns, isMajor, su); return null; } });