Codota Logo
Mapper$Context.getCurrentKey
Code IndexAdd Codota to your IDE (free)

How to use
getCurrentKey
method
in
org.apache.hadoop.mapreduce.Mapper$Context

Best Java code snippets using org.apache.hadoop.mapreduce.Mapper$Context.getCurrentKey (Showing top 20 results out of 315)

  • Common ways to obtain Mapper$Context
private void myMethod () {
Mapper$Context m =
  • Codota IconWrappedMapper wrappedMapper;new Context(wrappedMapper)
  • Codota IconMockito mockito;mockito.mock(Mapper.Context.class)
  • Smart code suggestions by Codota
}
origin: apache/incubator-druid

@Override
public void run(Context context) throws IOException, InterruptedException
{
 setup(context);
 while (context.nextKeyValue()) {
  map(context.getCurrentKey(), context.getCurrentValue(), context);
 }
 for (Map.Entry<Interval, HyperLogLogCollector> entry : hyperLogLogs.entrySet()) {
  context.write(
    new LongWritable(entry.getKey().getStartMillis()),
    new BytesWritable(entry.getValue().toByteArray())
  );
 }
 cleanup(context);
}
origin: apache/hbase

 @Override
 protected boolean filter(Context context, Cell cell) {
  // TODO: Can I do a better compare than this copying out key?
  byte [] row = new byte [cell.getRowLength()];
  System.arraycopy(cell.getRowArray(), cell.getRowOffset(), row, 0, cell.getRowLength());
  boolean b = this.keysToFind.contains(row);
  if (b) {
   String keyStr = Bytes.toStringBinary(row);
   try {
    LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey());
   } catch (IOException|InterruptedException e) {
    LOG.warn(e.toString(), e);
   }
   if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) {
    context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1);
   }
   context.getCounter(FOUND_GROUP_KEY, "CELL_WITH_MISSING_ROW").increment(1);
  }
  return b;
 }
}
origin: apache/hbase

 @Override
 protected boolean filter(Context context, Cell cell) {
  // TODO: Can I do a better compare than this copying out key?
  byte [] row = new byte [cell.getRowLength()];
  System.arraycopy(cell.getRowArray(), cell.getRowOffset(), row, 0, cell.getRowLength());
  boolean b = this.keysToFind.contains(row);
  if (b) {
   String keyStr = Bytes.toStringBinary(row);
   try {
    LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey());
   } catch (IOException|InterruptedException e) {
    LOG.warn(e.toString(), e);
   }
   if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) {
    context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1);
   }
   context.getCounter(FOUND_GROUP_KEY, "CELL_WITH_MISSING_ROW").increment(1);
  }
  return b;
 }
}
origin: pl.edu.icm.coansys/commons

@Override
public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  while ((count < fequency) && context.nextKeyValue()) {
    float diff = fequency - count;
    if (random.nextFloat() <= diff) {
      map(context.getCurrentKey(), context.getCurrentValue(), context);
    }
    count++;
  }
  cleanup(context);
}
origin: ShifuML/shifu

@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
  synchronized(outer) {
    if(!outer.nextKeyValue()) {
      return false;
    }
    key = ReflectionUtils.copy(outer.getConfiguration(), outer.getCurrentKey(), key);
    value = ReflectionUtils.copy(conf, outer.getCurrentValue(), value);
    return true;
  }
}
origin: apache/incubator-rya

private static RyaStatement nextRyaStatement(final Context context, final RyaTripleContext ryaContext) throws IOException, InterruptedException {
  RyaStatement ryaStatement = null;
  if (context.nextKeyValue()) {
    final Key key = context.getCurrentKey();
    final Value value = context.getCurrentValue();
    try {
      ryaStatement = createRyaStatement(key, value, ryaContext);
    } catch (final TripleRowResolverException e) {
      log.error("TripleRowResolverException encountered while creating statement", e);
    }
  }
  return ryaStatement;
}
origin: org.apache.hadoop/hadoop-mapred

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  while (context.nextKeyValue()) {
   map(context.getCurrentKey(), context.getCurrentValue(), context);
  }
  cleanup(context);
 }
}
origin: pl.edu.icm.coansys/commons

@Override
public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  while ((count < limit) && context.nextKeyValue()) {
    map(context.getCurrentKey(), context.getCurrentValue(), context);
    count++;
  }
  cleanup(context);
}
origin: com.marklogic/mlcp

  @Override
  public void run(Context context) throws IOException, InterruptedException {
    setup(context);
    try {
      while (!ContentPump.shutdown && context.nextKeyValue()) {
        map(context.getCurrentKey(), context.getCurrentValue(), context);
      }
    } finally {
      if (ContentPump.shutdown && LOG.isDebugEnabled()) {
        LOG.debug("Aborting task...");
      }
      cleanup(context);
    }
  }
}
origin: com.n3twork.druid/druid-indexing-hadoop

@Override
public void run(Context context) throws IOException, InterruptedException
{
 setup(context);
 while (context.nextKeyValue()) {
  map(context.getCurrentKey(), context.getCurrentValue(), context);
 }
 for (Map.Entry<Interval, HyperLogLogCollector> entry : hyperLogLogs.entrySet()) {
  context.write(
    new LongWritable(entry.getKey().getStartMillis()),
    new BytesWritable(entry.getValue().toByteArray())
  );
 }
 cleanup(context);
}
origin: io.druid/druid-indexing-hadoop

@Override
public void run(Context context) throws IOException, InterruptedException
{
 setup(context);
 while (context.nextKeyValue()) {
  map(context.getCurrentKey(), context.getCurrentValue(), context);
 }
 for (Map.Entry<Interval, HyperLogLogCollector> entry : hyperLogLogs.entrySet()) {
  context.write(
    new LongWritable(entry.getKey().getStartMillis()),
    new BytesWritable(entry.getValue().toByteArray())
  );
 }
 cleanup(context);
}
origin: org.apache.druid/druid-indexing-hadoop

@Override
public void run(Context context) throws IOException, InterruptedException
{
 setup(context);
 while (context.nextKeyValue()) {
  map(context.getCurrentKey(), context.getCurrentValue(), context);
 }
 for (Map.Entry<Interval, HyperLogLogCollector> entry : hyperLogLogs.entrySet()) {
  context.write(
    new LongWritable(entry.getKey().getStartMillis()),
    new BytesWritable(entry.getValue().toByteArray())
  );
 }
 cleanup(context);
}
origin: opencb/opencga

@Override
public void run(Context context) throws IOException, InterruptedException {
  this.setup(context);
  try {
    while (context.nextKeyValue()) {
      this.map(context.getCurrentKey(), context.getCurrentValue(), context);
      annotateVariants(context, false);
    }
    annotateVariants(context, true);
  } catch (VariantAnnotatorException e) {
    throw new RuntimeException(e);
  } finally {
    this.cleanup(context);
  }
}
origin: ShifuML/shifu

@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
  synchronized(outer) {
    if(!outer.nextKeyValue()) {
      return false;
    }
    key = ReflectionUtils.copy(outer.getConfiguration(), outer.getCurrentKey(), key);
    value = ReflectionUtils.copy(conf, outer.getCurrentValue(), value);
    return true;
  }
}
origin: io.hops/hadoop-mapreduce-client-core

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  try {
   while (context.nextKeyValue()) {
    map(context.getCurrentKey(), context.getCurrentValue(), context);
   }
  } finally {
   cleanup(context);
  }
 }
}
origin: apache/incubator-gobblin

this.map(context.getCurrentKey(), context.getCurrentValue(), context);
origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  try {
   while (context.nextKeyValue()) {
    map(context.getCurrentKey(), context.getCurrentValue(), context);
   }
  } finally {
   cleanup(context);
  }
 }
}
origin: io.prestosql.hadoop/hadoop-apache

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  try {
   while (context.nextKeyValue()) {
    map(context.getCurrentKey(), context.getCurrentValue(), context);
   }
  } finally {
   cleanup(context);
  }
 }
}
origin: ch.cern.hadoop/hadoop-mapreduce-client-core

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  try {
   while (context.nextKeyValue()) {
    map(context.getCurrentKey(), context.getCurrentValue(), context);
   }
  } finally {
   cleanup(context);
  }
 }
}
origin: com.facebook.hadoop/hadoop-core

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  while (context.nextKeyValue()) {
   map(context.getCurrentKey(), context.getCurrentValue(), context);
  }
  cleanup(context);
 }
}
org.apache.hadoop.mapreduceMapper$ContextgetCurrentKey

Popular methods of Mapper$Context

  • write
  • getConfiguration
  • getCounter
  • getInputSplit
  • progress
  • setStatus
  • getTaskAttemptID
  • nextKeyValue
  • getCurrentValue
  • getNumReduceTasks
  • getJobID
  • getInputFormatClass
  • getJobID,
  • getInputFormatClass,
  • getLocalCacheFiles,
  • getOutputCommitter,
  • getCredentials,
  • getLocalCacheArchives,
  • getStatus,
  • getCacheArchives,
  • getCacheFiles

Popular in Java

  • Start an intent from android
  • setScale (BigDecimal)
  • compareTo (BigDecimal)
    Compares this BigDecimal with the specified BigDecimal. Two BigDecimal objects that are equal in val
  • getOriginalFilename (MultipartFile)
    Return the original filename in the client's filesystem.This may contain path information depending
  • IOException (java.io)
    Signals that an I/O exception of some sort has occurred. This class is the general class of exceptio
  • BigInteger (java.math)
    Immutable arbitrary-precision integers. All operations behave as if BigIntegers were represented in
  • ByteBuffer (java.nio)
    A buffer for bytes. A byte buffer can be created in either one of the following ways: * #allocate(i
  • Hashtable (java.util)
    Hashtable is a synchronized implementation of Map. All optional operations are supported.Neither key
  • IOUtils (org.apache.commons.io)
    General IO stream manipulation utilities. This class provides static utility methods for input/outpu
  • Runner (org.openjdk.jmh.runner)
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now