Codota Logo
FSEditLogLoader$PositionTrackingInputStream.<init>
Code IndexAdd Codota to your IDE (free)

How to use
org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader$PositionTrackingInputStream
constructor

Best Java code snippets using org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader$PositionTrackingInputStream.<init> (Showing top 14 results out of 315)

  • Common ways to obtain FSEditLogLoader$PositionTrackingInputStream
private void myMethod () {
FSEditLogLoader$PositionTrackingInputStream f =
  • Codota IconInputStream is;new FSEditLogLoader.PositionTrackingInputStream(is)
  • Codota IconString pathname;new PositionTrackingInputStream(new BufferedInputStream(new FileInputStream(new File(pathname))))
  • Smart code suggestions by Codota
}
origin: org.apache.hadoop/hadoop-hdfs

/**
 * Process image file.
 */
public void go() throws IOException  {
 DataInputStream in = null;
 PositionTrackingInputStream tracker = null;
 ImageLoader fsip = null;
 boolean done = false;
 try {
  tracker = new PositionTrackingInputStream(new BufferedInputStream(
       new FileInputStream(new File(inputFile))));
  in = new DataInputStream(tracker);
  int imageVersionFile = findImageVersion(in);
  fsip = ImageLoader.LoaderFactory.getLoader(imageVersionFile);
  if(fsip == null) 
   throw new IOException("No image processor to read version " +
     imageVersionFile + " is available.");
  fsip.loadImage(in, processor, skipBlocks);
  done = true;
 } finally {
  if (!done) {
   if (tracker != null) {
    LOG.error("image loading failed at offset " + tracker.getPos());
   } else {
    LOG.error("Failed to load image file.");
   }
  }
  IOUtils.cleanupWithLogger(LOG, in, tracker);
 }
}
origin: org.apache.hadoop/hadoop-hdfs

 private void dumpRemainingEditLogs() {
  byte[] buf = this.getData();
  byte[] remainingRawEdits = Arrays.copyOfRange(buf, 0, this.size());
  ByteArrayInputStream bis = new ByteArrayInputStream(remainingRawEdits);
  DataInputStream dis = new DataInputStream(bis);
  FSEditLogLoader.PositionTrackingInputStream tracker =
    new FSEditLogLoader.PositionTrackingInputStream(bis);
  FSEditLogOp.Reader reader = FSEditLogOp.Reader.create(dis, tracker,
    NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
  FSEditLogOp op;
  LOG.warn("The edits buffer is " + size() + " bytes long with " + numTxns +
    " unflushed transactions. " +
    "Below is the list of unflushed transactions:");
  int numTransactions = 0;
  try {
   while ((op = reader.readOp(false)) != null) {
    LOG.warn("Unflushed op [" + numTransactions + "]: " + op);
    numTransactions++;
   }
  } catch (IOException ioe) {
   // If any exceptions, print raw bytes and stop.
   LOG.warn("Unable to dump remaining ops. Remaining raw bytes: " +
     Hex.encodeHexString(remainingRawEdits), ioe);
  }
 }
}
origin: org.apache.hadoop/hadoop-hdfs

void setBytes(byte[] newBytes, int version) throws IOException {
 inner.setData(newBytes);
 tracker = new FSEditLogLoader.PositionTrackingInputStream(inner);
 in = new DataInputStream(tracker);
 this.version = version;
 reader = FSEditLogOp.Reader.create(in, tracker, version);
}
origin: org.apache.hadoop/hadoop-hdfs

fStream = log.getInputStream();
bin = new BufferedInputStream(fStream);
tracker = new FSEditLogLoader.PositionTrackingInputStream(bin);
dataIn = new DataInputStream(tracker);
try {
origin: ch.cern.hadoop/hadoop-hdfs

public EditLogByteInputStream(byte[] data) throws IOException {
 len = data.length;
 input = new ByteArrayInputStream(data);
 BufferedInputStream bin = new BufferedInputStream(input);
 DataInputStream in = new DataInputStream(bin);
 version = EditLogFileInputStream.readLogVersion(in, true);
 tracker = new FSEditLogLoader.PositionTrackingInputStream(in);
 in = new DataInputStream(tracker);
    
 reader = new FSEditLogOp.Reader(in, tracker, version);
}
origin: ch.cern.hadoop/hadoop-hdfs

void setBytes(byte[] newBytes, int version) throws IOException {
 inner.setData(newBytes);
 tracker = new FSEditLogLoader.PositionTrackingInputStream(inner);
 in = new DataInputStream(tracker);
 this.version = version;
 reader = new FSEditLogOp.Reader(in, tracker, version);
}
origin: com.facebook.hadoop/hadoop-core

@Override
public void refresh(long position) throws IOException {
 fc.position(position);
 BufferedInputStream bin = new BufferedInputStream(fStream);
 tracker = new FSEditLogLoader.PositionTrackingInputStream(bin, position);    
 DataInputStream in = new DataInputStream(tracker); 
 reader = new FSEditLogOp.Reader(in, logVersion);
}

origin: ch.cern.hadoop/hadoop-hdfs

/**
 * Process image file.
 */
public void go() throws IOException  {
 DataInputStream in = null;
 PositionTrackingInputStream tracker = null;
 ImageLoader fsip = null;
 boolean done = false;
 try {
  tracker = new PositionTrackingInputStream(new BufferedInputStream(
       new FileInputStream(new File(inputFile))));
  in = new DataInputStream(tracker);
  int imageVersionFile = findImageVersion(in);
  fsip = ImageLoader.LoaderFactory.getLoader(imageVersionFile);
  if(fsip == null) 
   throw new IOException("No image processor to read version " +
     imageVersionFile + " is available.");
  fsip.loadImage(in, processor, skipBlocks);
  done = true;
 } finally {
  if (!done) {
   LOG.error("image loading failed at offset " + tracker.getPos());
  }
  IOUtils.cleanup(LOG, in, tracker);
 }
}
origin: io.prestosql.hadoop/hadoop-apache

/**
 * Process image file.
 */
public void go() throws IOException  {
 DataInputStream in = null;
 PositionTrackingInputStream tracker = null;
 ImageLoader fsip = null;
 boolean done = false;
 try {
  tracker = new PositionTrackingInputStream(new BufferedInputStream(
       new FileInputStream(new File(inputFile))));
  in = new DataInputStream(tracker);
  int imageVersionFile = findImageVersion(in);
  fsip = ImageLoader.LoaderFactory.getLoader(imageVersionFile);
  if(fsip == null) 
   throw new IOException("No image processor to read version " +
     imageVersionFile + " is available.");
  fsip.loadImage(in, processor, skipBlocks);
  done = true;
 } finally {
  if (!done) {
   LOG.error("image loading failed at offset " + tracker.getPos());
  }
  IOUtils.cleanup(LOG, in, tracker);
 }
}
origin: ch.cern.hadoop/hadoop-hdfs

fStream = log.getInputStream();
bin = new BufferedInputStream(fStream);
tracker = new FSEditLogLoader.PositionTrackingInputStream(bin);
dataIn = new DataInputStream(tracker);
try {
origin: io.prestosql.hadoop/hadoop-apache

fStream = log.getInputStream();
bin = new BufferedInputStream(fStream);
tracker = new FSEditLogLoader.PositionTrackingInputStream(bin);
dataIn = new DataInputStream(tracker);
try {
origin: com.facebook.hadoop/hadoop-core

/**
 * Open an EditLogInputStream for the given file.
 * @param name filename to open
 * @param firstTxId first transaction found in file
 * @param lastTxId last transaction id found in file
 * @throws LogHeaderCorruptException if the header is either missing or
 *         appears to be corrupt/truncated
 * @throws IOException if an actual IO error occurs while reading the
 *         header
 */
EditLogFileInputStream(File name, long firstTxId, long lastTxId)
  throws LogHeaderCorruptException, IOException {
 file = name;
 rp = new RandomAccessFile(file, "r");    
 fStream = new FileInputStream(rp.getFD());
 fc = rp.getChannel();
 BufferedInputStream bin = new BufferedInputStream(fStream);  
 tracker = new FSEditLogLoader.PositionTrackingInputStream(bin);  
 DataInputStream in = new DataInputStream(tracker);
 try {
  logVersion = readLogVersion(in);
 } catch (EOFException eofe) {
  throw new LogHeaderCorruptException("No header found in log");
 }
 reader = new FSEditLogOp.Reader(in, logVersion);
 this.firstTxId = firstTxId;
 this.lastTxId = lastTxId;
}
origin: ch.cern.hadoop/hadoop-hdfs

BufferedInputStream bin = new BufferedInputStream(fin);
FSEditLogLoader.PositionTrackingInputStream tracker = 
  new FSEditLogLoader.PositionTrackingInputStream(bin);
try {
 tracker.setLimit(2);
origin: io.prestosql.hadoop/hadoop-apache

void setBytes(byte[] newBytes, int version) throws IOException {
 inner.setData(newBytes);
 tracker = new FSEditLogLoader.PositionTrackingInputStream(inner);
 in = new DataInputStream(tracker);
 this.version = version;
 reader = new FSEditLogOp.Reader(in, tracker, version);
}
org.apache.hadoop.hdfs.server.namenodeFSEditLogLoader$PositionTrackingInputStream<init>

Popular methods of FSEditLogLoader$PositionTrackingInputStream

  • getPos
  • checkLimit
  • clearLimit
  • close
  • mark
  • read
  • reset
  • setLimit

Popular in Java

  • Parsing JSON documents to java classes using gson
  • addToBackStack (FragmentTransaction)
  • notifyDataSetChanged (ArrayAdapter)
  • getSystemService (Context)
  • Component (java.awt)
    A component is an object having a graphical representation that can be displayed on the screen and t
  • Selector (java.nio.channels)
    A controller for the selection of SelectableChannel objects. Selectable channels can be registered w
  • ResultSet (java.sql)
    An interface for an object which represents a database table entry, returned as the result of the qu
  • DateFormat (java.text)
    Formats or parses dates and times.This class provides factories for obtaining instances configured f
  • Collections (java.util)
    This class consists exclusively of static methods that operate on or return collections. It contains
  • ExecutorService (java.util.concurrent)
    An Executor that provides methods to manage termination and methods that can produce a Future for tr
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now