private InputSplit[] convertSplits(List<org.apache.hadoop.mapreduce.InputSplit> splits) { InputSplit[] converted = new InputSplit[splits.size()]; for (int i = 0; i < splits.size(); i++) { org.apache.hadoop.hbase.mapreduce.TableSplit tableSplit = (org.apache.hadoop.hbase.mapreduce.TableSplit) splits.get(i); TableSplit newTableSplit = new TableSplit(tableSplit.getTableName(), tableSplit.getStartRow(), tableSplit.getEndRow(), tableSplit.getRegionLocation()); converted[i] = newTableSplit; } return converted; }
/** * Create a new FijiTableSplit instance from an HBase TableSplit. * @param tableSplit the HBase TableSplit to clone. * @param regionStartKey the starting key of the region associated with this split. */ public FijiTableSplit(TableSplit tableSplit, byte[] regionStartKey) { super(tableSplit.getTableName(), tableSplit.getStartRow(), tableSplit.getEndRow(), tableSplit.getRegionLocation()); checkNotNull(regionStartKey); mRegionStartKey = regionStartKey; }
/** * Compares this split against the given one. * * @param split The split to compare to. * @return The result of the comparison. * @see java.lang.Comparable#compareTo(java.lang.Object) */ @Override public int compareTo(TableSplit split) { // If The table name of the two splits is the same then compare start row // otherwise compare based on table names int tableNameComparison = Bytes.compareTo(getTableName(), split.getTableName()); return tableNameComparison != 0 ? tableNameComparison : Bytes.compareTo( getStartRow(), split.getStartRow()); }
/** * Create a new FijiTableSplit instance from an HBase TableSplit. * @param tableSplit the HBase TableSplit to clone. */ public FijiTableSplit(TableSplit tableSplit) { super(tableSplit.getTableName(), tableSplit.getStartRow(), tableSplit.getEndRow(), tableSplit.getRegionLocation()); }
@Override public void setup(Context context) { tableName = ((TableSplit)context.getInputSplit()).getTableName(); }
@Override protected void map(ImmutableBytesWritable key, Result result, Context context) throws IOException, InterruptedException { context.progress(); context.getCounter(HBaseIndexerCounters.INPUT_ROWS).increment(1L); try { TableSplit tableSplit; if (context.getInputSplit() instanceof TableSplit) { tableSplit = (TableSplit) context.getInputSplit(); indexer.indexRowData(ImmutableList.<RowData>of(new ResultWrappingRowData(result, tableSplit.getTableName()))); } else { throw new IOException("Input split not of type " + TableSplit.class + " but " + context.getInputSplit().getClass()); } } catch (SolrServerException e) { // These will only be thrown through if there is an exception on the server side. // Document-based errors will be swallowed and the counter will be incremented throw new RuntimeException(e); } catch (SharderException e) { throw new RuntimeException(e); } }
@Override protected void map(ImmutableBytesWritable key, Result result, Context context) throws IOException, InterruptedException { context.progress(); context.getCounter(HBaseIndexerCounters.INPUT_ROWS).increment(1L); try { TableSplit tableSplit; if (context.getInputSplit() instanceof TableSplit) { tableSplit = (TableSplit) context.getInputSplit(); indexer.indexRowData(ImmutableList.<RowData>of(new ResultWrappingRowData(result, tableSplit.getTableName()))); } else { throw new IOException("Input split not of type " + TableSplit.class + " but " + context.getInputSplit().getClass()); } } catch (SolrServerException e) { // These will only be thrown through if there is an exception on the server side. // Document-based errors will be swallowed and the counter will be incremented throw new RuntimeException(e); } catch (SharderException e) { throw new RuntimeException(e); } }
@Override public RecordReader<ImmutableBytesWritable, Result> createRecordReader( InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { TableSplit tSplit = (TableSplit) split; if (tSplit.getTableName() == null) { throw new IOException("Cannot create a record reader because of a" + " previous error. Please look at the previous logs lines from" + " the task's full log for more details."); } ThemisTableRecordReader trr = this.themisTableRecordReader; // if no table record reader was provided use default if (trr == null) { trr = new ThemisTableRecordReader(); } Scan sc = tSplit.getScan(); sc.setStartRow(tSplit.getStartRow()); sc.setStopRow(tSplit.getEndRow()); trr.setScan(sc); trr.setConf(context.getConfiguration()); trr.setTableName(tSplit.getTableName()); trr.initialize(split, context); return trr; } }
TableSplit tSplit = (TableSplit) split; if (tSplit.getTableName() == null) { throw new IOException("Cannot create a record reader because of a" + " previous error. Please look at the previous logs lines from" new HTable(context.getConfiguration(), tSplit.getTableName());
tableSplit.getTableName(), startRow, stopRow,