public synchronized boolean delete() throws IOException { // Close all open file handles... appender.close(); accessorPool.close(); boolean result = true; for (Iterator<DataFile> i = fileMap.values().iterator(); i.hasNext();) { DataFile dataFile = i.next(); result &= dataFile.delete(); } if (preAllocateNextDataFileFuture != null) { preAllocateNextDataFileFuture.cancel(true); } synchronized (dataFileIdLock) { if (nextDataFile != null) { nextDataFile.delete(); nextDataFile = null; } } totalLength.set(0); synchronized (currentDataFile) { fileMap.clear(); fileByFileMap.clear(); lastAppendLocation.set(null); dataFiles = new LinkedNodeList<DataFile>(); } // reopen open file handles... accessorPool = new DataFileAccessorPool(this); appender = new DataFileAppender(this); return result; }
@Override public Location storeItem(ByteSequence data, byte type, boolean sync) throws IOException { // Write the packet our internal buffer. int size = data.getLength() + RECORD_HEAD_SPACE; final Location location = new Location(); location.setSize(size); location.setType(type); Journal.WriteCommand write = new Journal.WriteCommand(location, data, sync); WriteBatch batch = enqueue(write); location.setBatch(batch); if (sync) { try { batch.latch.await(); } catch (InterruptedException e) { throw new InterruptedIOException(); } IOException exception = batch.exception.get(); if (exception != null) { throw exception; } } return location; }
if (nextWriteBatch == null) { DataFile file = journal.getCurrentDataFile(write.location.getSize()); nextWriteBatch = newWriteBatch(write, file); enqueueMutex.notifyAll(); break;
protected void signalError(WriteBatch wb, Throwable t) { if (wb != null) { if (t instanceof IOException) { wb.exception.set((IOException) t); // revert sync batch increment such that next write is contiguous if (syncBatch(wb.writes)) { wb.dataFile.decrementLength(wb.size); } } else { wb.exception.set(IOExceptionSupport.create(t)); } signalDone(wb); } }
journal.setLastAppendLocation(lastWrite.location); signalDone(wb); shutdown = true; running = false; signalError(wb, error); if (nextWriteBatch != null) { signalError(nextWriteBatch, error); nextWriteBatch = null; enqueueMutex.notifyAll();
@Override public void run() { processQueue(); } };
protected void signalError(WriteBatch wb, Throwable t) { if (wb != null) { if (t instanceof IOException) { wb.exception.set((IOException) t); // revert sync batch increment such that next write is contiguous if (syncBatch(wb.writes)) { wb.dataFile.decrementLength(wb.size); } } else { wb.exception.set(IOExceptionSupport.create(t)); } signalDone(wb); } }
journal.setLastAppendLocation(lastWrite.location); signalDone(wb); shutdown = true; running = false; signalError(wb, error); if (nextWriteBatch != null) { signalError(nextWriteBatch, error); nextWriteBatch = null; enqueueMutex.notifyAll();
@Override public void run() { processQueue(); } };
protected void signalError(WriteBatch wb, Throwable t) { if (wb != null) { if (t instanceof IOException) { wb.exception.set((IOException) t); // revert sync batch increment such that next write is contiguous if (syncBatch(wb.writes)) { wb.dataFile.decrementLength(wb.size); } } else { wb.exception.set(IOExceptionSupport.create(t)); } signalDone(wb); } }
journal.setLastAppendLocation(lastWrite.location); signalDone(wb); shutdown = true; running = false; signalError(wb, error); if (nextWriteBatch != null) { signalError(nextWriteBatch, error); nextWriteBatch = null; enqueueMutex.notifyAll();
@Override public Location storeItem(ByteSequence data, byte type, Runnable onComplete) throws IOException { // Write the packet our internal buffer. int size = data.getLength() + RECORD_HEAD_SPACE; final Location location = new Location(); location.setSize(size); location.setType(type); Journal.WriteCommand write = new Journal.WriteCommand(location, data, onComplete); location.setBatch(enqueue(write)); return location; }
started = true; appender = callerBufferAppender ? new CallerBufferingDataFileAppender(this) : new DataFileAppender(this);
@Override public void run() { processQueue(); } };
if (nextWriteBatch == null) { DataFile file = journal.getCurrentDataFile(write.location.getSize()); nextWriteBatch = newWriteBatch(write, file); enqueueMutex.notifyAll(); break;
protected void signalError(WriteBatch wb, Throwable t) { if (wb != null) { if (t instanceof IOException) { wb.exception.set((IOException) t); // revert sync batch increment such that next write is contiguous if (syncBatch(wb.writes)) { wb.dataFile.decrementLength(wb.size); } } else { wb.exception.set(IOExceptionSupport.create(t)); } signalDone(wb); } }
journal.setLastAppendLocation(lastWrite.location); signalDone(wb); shutdown = true; running = false; signalError(wb, error); if (nextWriteBatch != null) { signalError(nextWriteBatch, error); nextWriteBatch = null; enqueueMutex.notifyAll();
@Override public Location storeItem(ByteSequence data, byte type, boolean sync) throws IOException { // Write the packet our internal buffer. int size = data.getLength() + RECORD_HEAD_SPACE; final Location location = new Location(); location.setSize(size); location.setType(type); Journal.WriteCommand write = new Journal.WriteCommand(location, data, sync); WriteBatch batch = enqueue(write); location.setBatch(batch); if (sync) { try { batch.latch.await(); } catch (InterruptedException e) { throw new InterruptedIOException(); } IOException exception = batch.exception.get(); if (exception != null) { throw exception; } } return location; }
public synchronized boolean delete() throws IOException { // Close all open file handles... appender.close(); accessorPool.close(); boolean result = true; for (Iterator<DataFile> i = fileMap.values().iterator(); i.hasNext();) { DataFile dataFile = i.next(); result &= dataFile.delete(); } if (preAllocateNextDataFileFuture != null) { preAllocateNextDataFileFuture.cancel(true); } synchronized (dataFileIdLock) { if (nextDataFile != null) { nextDataFile.delete(); nextDataFile = null; } } totalLength.set(0); synchronized (currentDataFile) { fileMap.clear(); fileByFileMap.clear(); lastAppendLocation.set(null); dataFiles = new LinkedNodeList<DataFile>(); } // reopen open file handles... accessorPool = new DataFileAccessorPool(this); appender = new DataFileAppender(this); return result; }
@Override public void run() { processQueue(); } };