/** * the n most recent snapshots * @param n the number of recent snapshots * @return the list of n most recent snapshots, with * the most recent in front * @throws IOException */ public List<File> findNRecentSnapshots(int n) throws IOException { FileSnap snaplog = new FileSnap(snapDir); return snaplog.findNRecentSnapshots(n); }
public void run(String snapshotFileName) throws IOException { InputStream is = new CheckedInputStream( new BufferedInputStream(new FileInputStream(snapshotFileName)), new Adler32()); InputArchive ia = BinaryInputArchive.getArchive(is); FileSnap fileSnap = new FileSnap(null); DataTree dataTree = new DataTree(); Map<Long, Integer> sessions = new HashMap<Long, Integer>(); fileSnap.deserialize(dataTree, sessions, ia); printDetails(dataTree, sessions); }
/** * the most recent snapshot in the snapshot * directory * @return the file that contains the most * recent snapshot * @throws IOException */ public File findMostRecentSnapshot() throws IOException { FileSnap snaplog = new FileSnap(snapDir); return snaplog.findMostRecentSnapshot(); }
List<File> snapList = findNValidSnapshots(100); if (snapList.size() == 0) { return -1L; CheckedInputStream crcIn = new CheckedInputStream(snapIS, new Adler32())) { InputArchive ia = BinaryInputArchive.getArchive(crcIn); deserialize(dt, sessions, ia); long checkSum = crcIn.getChecksum().getValue(); long val = ia.readLong("val");
snapLog = new FileSnap(this.snapDir);
/** return if checksum matches for a snapshot **/ private boolean getCheckSum(FileSnap snap, File snapFile) throws IOException { DataTree dt = new DataTree(); Map<Long, Integer> sessions = new ConcurrentHashMap<Long, Integer>(); InputStream snapIS = new BufferedInputStream(new FileInputStream( snapFile)); CheckedInputStream crcIn = new CheckedInputStream(snapIS, new Adler32()); InputArchive ia = BinaryInputArchive.getArchive(crcIn); try { snap.deserialize(dt, sessions, ia); } catch (IOException ie) { // we failed on the most recent snapshot // must be incomplete // try reading the next one // after corrupting snapIS.close(); crcIn.close(); throw ie; } long checksum = crcIn.getChecksum().getValue(); long val = ia.readLong("val"); snapIS.close(); crcIn.close(); return (val != checksum); }
/** * find the most recent snapshot in the database. * @return the file containing the most recent snapshot */ public File findMostRecentSnapshot() throws IOException { List<File> files = findNValidSnapshots(1); if (files.size() == 0) { return null; } return files.get(0); }
/** * serialize the datatree and session into the file snapshot * @param dt the datatree to be serialized * @param sessions the sessions to be serialized * @param snapShot the file to store snapshot into * @param fsync sync the file immediately after write */ public synchronized void serialize(DataTree dt, Map<Long, Integer> sessions, File snapShot, boolean fsync) throws IOException { if (!close) { try (CheckedOutputStream crcOut = new CheckedOutputStream(new BufferedOutputStream(fsync ? new AtomicFileOutputStream(snapShot) : new FileOutputStream(snapShot)), new Adler32())) { //CheckedOutputStream cout = new CheckedOutputStream() OutputArchive oa = BinaryOutputArchive.getArchive(crcOut); FileHeader header = new FileHeader(SNAP_MAGIC, VERSION, dbId); serialize(dt, sessions, oa, header); long val = crcOut.getChecksum().getValue(); oa.writeLong(val, "val"); oa.writeString("/", "path"); crcOut.flush(); } } }
List<File> snapList = findNValidSnapshots(100); if (snapList.size() == 0) { return -1L; crcIn = new CheckedInputStream(snapIS, new Adler32()); InputArchive ia = BinaryInputArchive.getArchive(crcIn); deserialize(dt,sessions, ia); long checkSum = crcIn.getChecksum().getValue(); long val = ia.readLong("val");
snapLog = new FileSnap(this.snapDir);
/** * find the most recent snapshot in the database. * @return the file containing the most recent snapshot */ public File findMostRecentSnapshot() throws IOException { List<File> files = findNValidSnapshots(1); if (files.size() == 0) { return null; } return files.get(0); }
/** * serialize the datatree and session into the file snapshot * @param dt the datatree to be serialized * @param sessions the sessions to be serialized * @param snapShot the file to store snapshot into */ public synchronized void serialize(DataTree dt, Map<Long, Integer> sessions, File snapShot) throws IOException { if (!close) { OutputStream sessOS = new BufferedOutputStream(new FileOutputStream(snapShot)); CheckedOutputStream crcOut = new CheckedOutputStream(sessOS, new Adler32()); //CheckedOutputStream cout = new CheckedOutputStream() OutputArchive oa = BinaryOutputArchive.getArchive(crcOut); FileHeader header = new FileHeader(SNAP_MAGIC, VERSION, dbId); serialize(dt,sessions,oa, header); long val = crcOut.getChecksum().getValue(); oa.writeLong(val, "val"); oa.writeString("/", "path"); sessOS.flush(); crcOut.close(); sessOS.close(); } }
/** * the n most recent snapshots * @param n the number of recent snapshots * @return the list of n most recent snapshots, with * the most recent in front * @throws IOException */ public List<File> findNRecentSnapshots(int n) throws IOException { FileSnap snaplog = new FileSnap(snapDir); return snaplog.findNRecentSnapshots(n); }
/** * the most recent snapshot in the snapshot * directory * @return the file that contains the most * recent snapshot * @throws IOException */ public File findMostRecentSnapshot() throws IOException { FileSnap snaplog = new FileSnap(snapDir); return snaplog.findMostRecentSnapshot(); }
List<File> snapList = findNValidSnapshots(100); if (snapList.size() == 0) { return -1L; crcIn = new CheckedInputStream(snapIS, new Adler32()); InputArchive ia = BinaryInputArchive.getArchive(crcIn); deserialize(dt,sessions, ia); long checkSum = crcIn.getChecksum().getValue(); long val = ia.readLong("val");
public void run(String snapshotFileName, boolean dumpData, boolean dumpJson) throws IOException { File snapshotFile = new File(snapshotFileName); try (InputStream is = new CheckedInputStream( new BufferedInputStream(new FileInputStream(snapshotFileName)), new Adler32())) { InputArchive ia = BinaryInputArchive.getArchive(is); FileSnap fileSnap = new FileSnap(null); DataTree dataTree = new DataTree(); Map<Long, Integer> sessions = new HashMap<Long, Integer>(); fileSnap.deserialize(dataTree, sessions, ia); long fileNameZxid = Util.getZxidFromName(snapshotFile.getName(), SNAPSHOT_FILE_PREFIX); if (dumpJson) { printSnapshotJson(dataTree); } else { printDetails(dataTree, sessions, dumpData, fileNameZxid); } } }
/** * truncate the transaction logs the zxid * specified * @param zxid the zxid to truncate the logs to * @return true if able to truncate the log, false if not * @throws IOException */ public boolean truncateLog(long zxid) throws IOException { // close the existing txnLog and snapLog close(); // truncate it FileTxnLog truncLog = new FileTxnLog(dataDir); boolean truncated = truncLog.truncate(zxid); truncLog.close(); // re-open the txnLog and snapLog // I'd rather just close/reopen this object itself, however that // would have a big impact outside ZKDatabase as there are other // objects holding a reference to this object. txnLog = new FileTxnLog(dataDir); snapLog = new FileSnap(snapDir); return truncated; }
/** * find the most recent snapshot in the database. * @return the file containing the most recent snapshot */ public File findMostRecentSnapshot() throws IOException { List<File> files = findNValidSnapshots(1); if (files.size() == 0) { return null; } return files.get(0); }
/** * serialize the datatree and session into the file snapshot * @param dt the datatree to be serialized * @param sessions the sessions to be serialized * @param snapShot the file to store snapshot into */ public synchronized void serialize(DataTree dt, Map<Long, Integer> sessions, File snapShot) throws IOException { if (!close) { OutputStream sessOS = new BufferedOutputStream(new FileOutputStream(snapShot)); CheckedOutputStream crcOut = new CheckedOutputStream(sessOS, new Adler32()); //CheckedOutputStream cout = new CheckedOutputStream() OutputArchive oa = BinaryOutputArchive.getArchive(crcOut); FileHeader header = new FileHeader(SNAP_MAGIC, VERSION, dbId); serialize(dt,sessions,oa, header); long val = crcOut.getChecksum().getValue(); oa.writeLong(val, "val"); oa.writeString("/", "path"); sessOS.flush(); crcOut.close(); sessOS.close(); } }
((FileSnap) snapLog.snapLog).findNRecentSnapshots(10).size()); ((FileSnap) snapLog.snapLog).findNRecentSnapshots(10).size()); snapLog.snapLog = new FileSnap(snapLog.dataDir); snapLog.save(tree, new ConcurrentHashMap<Long, Integer>(), false); Assert.assertEquals(1, ((FileSnap) snapLog.snapLog).findNRecentSnapshots(10).size());