@Override public void close() throws IOException { if (mClosed) { return; } mInputStream.close(); mClosed = true; }
/** * Closes the underlying open streams. */ @Override public void close() throws IOException { if (mIn != null) { mIn.close(); } if (mOut != null) { mOut.close(); } mOffset = -1; } }
private boolean readFile(FileSystem fileSystem) throws IOException, AlluxioException { boolean pass = true; LOG.debug("Reading data..."); final long startTimeMs = CommonUtils.getCurrentMs(); FileInStream is = fileSystem.openFile(mFilePath, mReadOptions); ByteBuffer buf = ByteBuffer.allocate((int) is.remaining()); is.read(buf.array()); buf.order(ByteOrder.nativeOrder()); for (int k = 0; k < NUMBERS; k++) { pass = pass && (buf.getInt() == k); } is.close(); LOG.info(FormatUtils.formatTimeTakenMs(startTimeMs, "readFile file " + mFilePath)); return pass; } }
private boolean readFile(FileSystem fs) throws IOException, AlluxioException { boolean pass = true; for (int i = 0; i < mNumFiles; i++) { AlluxioURI filePath = new AlluxioURI(mFileFolder + "/part-" + i); LOG.debug("Reading data from {}", filePath); FileInStream is = fs.openFile(filePath); URIStatus status = fs.getStatus(filePath); ByteBuffer buf = ByteBuffer.allocate((int) status.getBlockSizeBytes()); is.read(buf.array()); buf.order(ByteOrder.nativeOrder()); for (int k = 0; k < mNumFiles; k++) { pass = pass && (buf.getInt() == k); } is.close(); } return pass; }
is.close(); System.out.println("done");
/** * Tests that reading dataRead bytes into a buffer will properly write those bytes to the cache * streams and that the correct bytes are read from the {@link FileInStream}. * * @param dataRead the bytes to read */ private void testReadBuffer(int dataRead) throws Exception { byte[] buffer = new byte[dataRead]; mTestStream.read(buffer); mTestStream.close(); assertArrayEquals(BufferUtils.getIncreasingByteArray(dataRead), buffer); }
/** * Tests that reading through the file one byte at a time will yield the correct data. */ @Test public void singleByteRead() throws Exception { for (int i = 0; i < FILE_LENGTH; i++) { assertEquals(i & 0xff, mTestStream.read()); } mTestStream.close(); }
/** * Read through the file in small chunks and verify each chunk. */ @Test public void readManyChunks() throws IOException { int chunksize = 10; // chunksize must divide FILE_LENGTH evenly for this test to work assertEquals(0, FILE_LENGTH % chunksize); byte[] buffer = new byte[chunksize]; int offset = 0; for (int i = 0; i < FILE_LENGTH / chunksize; i++) { mTestStream.read(buffer, 0, chunksize); assertArrayEquals(BufferUtils.getIncreasingByteArray(offset, chunksize), buffer); offset += chunksize; } mTestStream.close(); }
@Test public void seekAndClose() throws IOException { OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new FileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int seekAmount = (int) (BLOCK_LENGTH / 2); mTestStream.seek(seekAmount); mTestStream.close(); // Block 0 is cached though it is not fully read. validatePartialCaching(0, 0); }
/** * Tests seeking with incomplete block caching enabled. It seeks forward for more than a block * and then seek to the file beginning. */ @Test public void seekBackwardToFileBeginning() throws IOException { OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new FileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int seekAmount = (int) (BLOCK_LENGTH / 4 + BLOCK_LENGTH); // Seek forward. mTestStream.seek(seekAmount); // Block 1 is partially cached though it is not fully read. validatePartialCaching(1, 0); // Seek backward. mTestStream.seek(0); // Block 1 is fully cached though it is not fully read. validatePartialCaching(1, 0); mTestStream.close(); // block 0 is cached validatePartialCaching(0, 0); }
/** * Tests seeking with incomplete block caching enabled. It seeks forward for more than a block. */ @Test public void longSeekForwardCachingPartiallyReadBlocks() throws IOException { OpenFilePOptions options = OpenFilePOptions.newBuilder().setReadType(ReadPType.CACHE_PROMOTE).build(); mTestStream = new FileInStream(mStatus, new InStreamOptions(mStatus, options, sConf), mContext); int seekAmount = (int) (BLOCK_LENGTH / 4 + BLOCK_LENGTH); int readAmount = (int) (BLOCK_LENGTH / 2); byte[] buffer = new byte[readAmount]; mTestStream.read(buffer); // Seek backward. mTestStream.seek(readAmount + seekAmount); // Block 0 is cached though it is not fully read. validatePartialCaching(0, readAmount); // Block 1 is being cached though its prefix it not read. validatePartialCaching(1, 0); mTestStream.close(); validatePartialCaching(1, 0); }
@Override public void close() throws IOException { if (mClosed) { return; } mInputStream.close(); mClosed = true; }
@Override public void close() throws IOException { if (mAlluxioFileInputStream != null) { mAlluxioFileInputStream.close(); } if (mHdfsInputStream != null) { mHdfsInputStream.close(); } mClosed = true; }
private boolean readFile(FileSystem fileSystem) throws IOException, AlluxioException { boolean pass = true; LOG.debug("Reading data..."); final long startTimeMs = CommonUtils.getCurrentMs(); FileInStream is = fileSystem.openFile(mFilePath, mReadOptions); ByteBuffer buf = ByteBuffer.allocate((int) is.remaining()); is.read(buf.array()); buf.order(ByteOrder.nativeOrder()); for (int k = 0; k < NUMBERS; k++) { pass = pass && (buf.getInt() == k); } is.close(); LOG.info(FormatUtils.formatTimeTakenMs(startTimeMs, "readFile file " + mFilePath)); return pass; } }
private boolean readFile(FileSystem fs) throws IOException, AlluxioException { boolean pass = true; for (int i = 0; i < mNumFiles; i++) { AlluxioURI filePath = new AlluxioURI(mFileFolder + "/part-" + i); LOG.debug("Reading data from {}", filePath); FileInStream is = fs.openFile(filePath); URIStatus status = fs.getStatus(filePath); ByteBuffer buf = ByteBuffer.allocate((int) status.getBlockSizeBytes()); is.read(buf.array()); buf.order(ByteOrder.nativeOrder()); for (int k = 0; k < mNumFiles; k++) { pass = pass && (buf.getInt() == k); } is.close(); } return pass; }
@Override public int read() throws IOException { if (mClosed) { throw new IOException("Cannot read from a closed stream."); } if (mAlluxioFileInputStream != null) { int ret = 0; try { ret = mAlluxioFileInputStream.read(); if (mStatistics != null && ret != -1) { mStatistics.incrementBytesRead(1); } mCurrentPosition++; return ret; } catch (IOException e) { LOG.error(e.getMessage(), e); mAlluxioFileInputStream.close(); mAlluxioFileInputStream = null; } } getHdfsInputStream(); return readFromHdfsBuffer(); }
@Override public int read(byte[] b, int off, int len) throws IOException { if (mClosed) { throw new IOException("Cannot read from a closed stream."); } if (mAlluxioFileInputStream != null) { int ret = 0; try { ret = mAlluxioFileInputStream.read(b, off, len); if (mStatistics != null && ret != -1) { mStatistics.incrementBytesRead(ret); } mCurrentPosition += ret; return ret; } catch (IOException e) { LOG.error(e.getMessage(), e); mAlluxioFileInputStream.close(); mAlluxioFileInputStream = null; } } getHdfsInputStream(); int byteRead = readFromHdfsBuffer(); // byteRead is an unsigned byte, if its -1 then we have hit EOF if (byteRead == -1) { return -1; } // Convert byteRead back to a signed byte b[off] = (byte) byteRead; return 1; }
is.close(); System.out.println("done");