/** * Returns a Scan object from the stored string representation. * * @return Returns a Scan object based on the stored scanner. * @throws IOException */ public Scan getScan() throws IOException { return TableMapReduceUtil.convertStringToScan(this.scan); }
public static Scan extractScanFromConf(Configuration conf) throws IOException { Scan scan = null; if (conf.get(TableInputFormat.SCAN) != null) { scan = TableMapReduceUtil.convertStringToScan(conf.get(TableInputFormat.SCAN)); } else if (conf.get(org.apache.hadoop.hbase.mapred.TableInputFormat.COLUMN_LIST) != null) { String[] columns = conf.get(org.apache.hadoop.hbase.mapred.TableInputFormat.COLUMN_LIST).split(" "); scan = new Scan(); for (String col : columns) { scan.addFamily(Bytes.toBytes(col)); } } else { throw new IllegalArgumentException("Unable to create scan"); } return scan; }
/** * Sets the configuration. This is used to set the details for the tables to * be scanned. * * @param configuration The configuration to set. * @see org.apache.hadoop.conf.Configurable#setConf( * org.apache.hadoop.conf.Configuration) */ @Override public void setConf(Configuration configuration) { this.conf = configuration; String[] rawScans = conf.getStrings(SCANS); if (rawScans.length <= 0) { throw new IllegalArgumentException("There must be at least 1 scan configuration set to : " + SCANS); } List<Scan> scans = new ArrayList<>(); for (int i = 0; i < rawScans.length; i++) { try { scans.add(TableMapReduceUtil.convertStringToScan(rawScans[i])); } catch (IOException e) { throw new RuntimeException("Failed to convert Scan : " + rawScans[i] + " to string", e); } } this.setScans(scans); } }
/** * Returns the details about this instance as a string. * * @return The values of this instance as a string. * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("HBase table split("); sb.append("table name: ").append(tableName); // null scan input is represented by "" String printScan = ""; if (!scan.equals("")) { try { // get the real scan here in toString, not the Base64 string printScan = TableMapReduceUtil.convertStringToScan(scan).toString(); } catch (IOException e) { printScan = ""; } } sb.append(", scan: ").append(printScan); sb.append(", start row: ").append(Bytes.toStringBinary(startRow)); sb.append(", end row: ").append(Bytes.toStringBinary(endRow)); sb.append(", region location: ").append(regionLocation); sb.append(", encoded region name: ").append(encodedRegionName); sb.append(")"); return sb.toString(); }
/** * Retrieve the snapshot name -> list<scan> mapping pushed to configuration by * {@link #setSnapshotToScans(org.apache.hadoop.conf.Configuration, java.util.Map)} * * @param conf Configuration to extract name -> list<scan> mappings from. * @return the snapshot name -> list<scan> mapping pushed to configuration * @throws IOException */ public Map<String, Collection<Scan>> getSnapshotsToScans(Configuration conf) throws IOException { Map<String, Collection<Scan>> rtn = Maps.newHashMap(); for (Map.Entry<String, String> entry : ConfigurationUtil .getKeyValues(conf, SNAPSHOT_TO_SCANS_KEY)) { String snapshotName = entry.getKey(); String scan = entry.getValue(); Collection<Scan> snapshotScans = rtn.get(snapshotName); if (snapshotScans == null) { snapshotScans = Lists.newArrayList(); rtn.put(snapshotName, snapshotScans); } snapshotScans.add(TableMapReduceUtil.convertStringToScan(scan)); } return rtn; }
/** * Sets the configuration. This is used to set the details for the table to * be scanned. * * @param configuration The configuration to set. * @see org.apache.hadoop.conf.Configurable#setConf( * org.apache.hadoop.conf.Configuration) */ @Override @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="REC_CATCH_EXCEPTION", justification="Intentional") public void setConf(Configuration configuration) { this.conf = configuration; Scan scan = null; if (conf.get(SCAN) != null) { try { scan = TableMapReduceUtil.convertStringToScan(conf.get(SCAN)); } catch (IOException e) { LOG.error("An error occurred.", e); } } else { try { scan = createScanFromConfiguration(conf); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); } } setScan(scan); }
public void initialize(InputSplit split, Configuration conf) throws IOException { this.scan = TableMapReduceUtil.convertStringToScan(split.getScan()); this.split = split; TableDescriptor htd = split.htd; HRegionInfo hri = this.split.getRegionInfo(); FileSystem fs = FSUtils.getCurrentFileSystem(conf); // region is immutable, this should be fine, // otherwise we have to set the thread read point scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED); // disable caching of data blocks scan.setCacheBlocks(false); scan.setScanMetricsEnabled(true); scanner = new ClientSideRegionScanner(conf, fs, new Path(split.restoreDir), htd, hri, scan, null); }
TableMapReduceUtil.convertStringToScan(snapshotRegionSplit.getDelegate().getScan()); if (startRow.length > 0) { Assert.assertTrue(
/** * Returns a Scan object from the stored string representation. * * @return Returns a Scan object based on the stored scanner. * @throws IOException */ public Scan getScan() throws IOException { return TableMapReduceUtil.convertStringToScan(this.scan); }
/** * Returns a Scan object from the stored string representation. * * @return Returns a Scan object based on the stored scanner. * @throws IOException */ public Scan getScan() throws IOException { return TableMapReduceUtil.convertStringToScan(this.scan); }
/** * Returns a Scan object from the stored string representation. * * @return Returns a Scan object based on the stored scanner. * @throws IOException */ public Scan getScan() throws IOException { return TableMapReduceUtil.convertStringToScan(this.scan); }
/** * Returns a Scan object from the stored string representation. * * @return Returns a Scan object based on the stored scanner. * @throws IOException */ public Scan getScan() throws IOException { return TableMapReduceUtil.convertStringToScan(this.scan); }
public static Scan convertStringToScan(String scan) throws IOException { return TableMapReduceUtil.convertStringToScan(scan); }
/** * This method is just like the package-private version in TableMapReduceUtil, * except that it disables block caching by default. */ public static Scan convertStringToScan(String base64) throws IOException { Scan scan = TableMapReduceUtil.convertStringToScan(base64); scan.setCacheBlocks(false); return scan; }
public static Scan extractScanFromConf(Configuration conf) throws IOException { Scan scan = null; if (conf.get(TableInputFormat.SCAN) != null) { scan = TableMapReduceUtil.convertStringToScan(conf.get(TableInputFormat.SCAN)); } else if (conf.get(org.apache.hadoop.hbase.mapred.TableInputFormat.COLUMN_LIST) != null) { String[] columns = conf.get(org.apache.hadoop.hbase.mapred.TableInputFormat.COLUMN_LIST).split(" "); scan = new Scan(); for (String col : columns) { scan.addFamily(Bytes.toBytes(col)); } } else { throw new IllegalArgumentException("Unable to create scan"); } return scan; }
public static Scan extractScanFromConf(Configuration conf) throws IOException { Scan scan = null; if (conf.get(TableInputFormat.SCAN) != null) { scan = TableMapReduceUtil.convertStringToScan(conf.get(TableInputFormat.SCAN)); } else if (conf.get(org.apache.hadoop.hbase.mapred.TableInputFormat.COLUMN_LIST) != null) { String[] columns = conf.get(org.apache.hadoop.hbase.mapred.TableInputFormat.COLUMN_LIST).split(" "); scan = new Scan(); for (String col : columns) { scan.addFamily(Bytes.toBytes(col)); } } else { throw new IllegalArgumentException("Unable to create scan"); } return scan; }
public static Scan extractScanFromConf(Configuration conf) throws IOException { Scan scan = null; if (conf.get(TableInputFormat.SCAN) != null) { scan = TableMapReduceUtil.convertStringToScan(conf.get(TableInputFormat.SCAN)); } else if (conf.get(org.apache.hadoop.hbase.mapred.TableInputFormat.COLUMN_LIST) != null) { String[] columns = conf.get(org.apache.hadoop.hbase.mapred.TableInputFormat.COLUMN_LIST).split(" "); scan = new Scan(); for (String col : columns) { scan.addFamily(Bytes.toBytes(col)); } } else { throw new IllegalArgumentException("Unable to create scan"); } return scan; }
public void initialize(InputSplit split, Configuration conf) throws IOException { this.scan = TableMapReduceUtil.convertStringToScan(split.getScan()); this.split = split; HTableDescriptor htd = split.htd; HRegionInfo hri = this.split.getRegionInfo(); FileSystem fs = FSUtils.getCurrentFileSystem(conf); // region is immutable, this should be fine, // otherwise we have to set the thread read point scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED); // disable caching of data blocks scan.setCacheBlocks(false); scanner = new ClientSideRegionScanner(conf, fs, new Path(split.restoreDir), htd, hri, scan, null); }
public void initialize(InputSplit split, Configuration conf) throws IOException { this.scan = TableMapReduceUtil.convertStringToScan(split.getScan()); this.split = split; TableDescriptor htd = split.htd; HRegionInfo hri = this.split.getRegionInfo(); FileSystem fs = FSUtils.getCurrentFileSystem(conf); // region is immutable, this should be fine, // otherwise we have to set the thread read point scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED); // disable caching of data blocks scan.setCacheBlocks(false); scan.setScanMetricsEnabled(true); scanner = new ClientSideRegionScanner(conf, fs, new Path(split.restoreDir), htd, hri, scan, null); }
public void initialize(InputSplit split, Configuration conf) throws IOException { this.scan = TableMapReduceUtil.convertStringToScan(split.getScan()); this.split = split; TableDescriptor htd = split.htd; HRegionInfo hri = this.split.getRegionInfo(); FileSystem fs = FSUtils.getCurrentFileSystem(conf); // region is immutable, this should be fine, // otherwise we have to set the thread read point scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED); // disable caching of data blocks scan.setCacheBlocks(false); scan.setScanMetricsEnabled(true); scanner = new ClientSideRegionScanner(conf, fs, new Path(split.restoreDir), htd, hri, scan, null); }