@Override public BlockLocation[] doCall(final Path p) throws IOException, UnresolvedLinkException { return dfs.getBlockLocations(getPathName(p), start, len); } @Override
@Override public BlockLocation[] doCall(final Path p) throws IOException, UnresolvedLinkException { return dfs.getBlockLocations(getPathName(p), start, len); } @Override
public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { if (file == null) { return null; } return dfs.getBlockLocations(getPathName(file.getPath()), start, len); }
@Override public BlockLocation[] doCall(final Path p) throws IOException { return dfs.getBlockLocations(getPathName(p), start, len); } @Override
@Override public BlockLocation[] getFileBlockLocations(Path p, long start, long len) throws IOException, UnresolvedLinkException { return dfs.getBlockLocations(getUriPath(p), start, len); }
public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { if (file == null) { return null; } return dfs.getBlockLocations(getPathName(file.getPath()), start, len); }
@Override public BlockLocation[] getFileBlockLocations(Path p, long start, long len) throws IOException, UnresolvedLinkException { return dfs.getBlockLocations(getUriPath(p), start, len); }
/** * The returned BlockLocation will have different formats for replicated * and erasure coded file. * * Please refer to * {@link FileContext#getFileBlockLocations(Path, long, long)} * for more details. */ @Override public BlockLocation[] getFileBlockLocations(Path p, long start, long len) throws IOException, UnresolvedLinkException { return dfs.getBlockLocations(getUriPath(p), start, len); }
@Override public LocatedFileStatus next() throws IOException { if (!hasNext()) { throw new NoSuchElementException("No more entry in " + src); } FileStatus result = stats[i++]; BlockLocation[] locs = result.isDir() ? null : getBlockLocations( result.getPath().toUri().getPath(), 0, result.getLen()); return new LocatedFileStatus(result, locs); } };
/** * @deprecated Use getBlockLocations instead * * Get hints about the location of the indicated block(s). * * getHints() returns a list of hostnames that store data for * a specific file region. It returns a set of hostnames for * every block within the indicated region. * * This function is very useful when writing code that considers * data-placement when performing operations. For example, the * MapReduce system tries to schedule tasks on the same machines * as the data-block the task processes. */ @Deprecated public String[][] getHints(String src, long start, long length) throws IOException { BlockLocation[] blkLocations = getBlockLocations(src, start, length); if ((blkLocations == null) || (blkLocations.length == 0)) { return new String[0][]; } int blkCount = blkLocations.length; String[][]hints = new String[blkCount][]; for (int i=0; i < blkCount ; i++) { String[] hosts = blkLocations[i].getHosts(); hints[i] = new String[hosts.length]; hints[i] = hosts; } return hints; }
/** * @deprecated Use getBlockLocations instead * * Get hints about the location of the indicated block(s). * * getHints() returns a list of hostnames that store data for * a specific file region. It returns a set of hostnames for * every block within the indicated region. * * This function is very useful when writing code that considers * data-placement when performing operations. For example, the * MapReduce system tries to schedule tasks on the same machines * as the data-block the task processes. */ @Deprecated public String[][] getHints(String src, long start, long length) throws IOException { BlockLocation[] blkLocations = getBlockLocations(src, start, length); if ((blkLocations == null) || (blkLocations.length == 0)) { return new String[0][]; } int blkCount = blkLocations.length; String[][]hints = new String[blkCount][]; for (int i=0; i < blkCount ; i++) { String[] hosts = blkLocations[i].getHosts(); hints[i] = new String[hosts.length]; hints[i] = hosts; } return hints; }
void writeFile(Path file, FSDataOutputStream stm, int size) throws IOException { long blocksBefore = stm.getPos() / BLOCK_SIZE; TestFileCreation.writeFile(stm, BLOCK_SIZE); int blocksAfter = 0; // wait until the block is allocated by DataStreamer BlockLocation[] locatedBlocks; while(blocksAfter <= blocksBefore) { locatedBlocks = hdfs.getClient().getBlockLocations( file.toString(), 0L, BLOCK_SIZE*NUM_BLOCKS); blocksAfter = locatedBlocks == null ? 0 : locatedBlocks.length; } }
void refetchBlocks() throws IOException { this.blockLocations = getClient().getBlockLocations(getSrc(), getLength()); this.locatedBlocks = getBlockLocations().getLocatedBlocks(); this.isRefetchBlocks = false; }
private BlockLocation[] getBlockLocations(Path p) throws Exception { DFSTestUtil.waitReplication(dfs, p, (short)3); BlockLocation[] locations = dfs.getClient().getBlockLocations( p.toUri().getPath(), 0, Long.MAX_VALUE); assertTrue(locations.length == 1 && locations[0].getHosts().length == 3); return locations; }
private FileChecksum getFileChecksumInternal( String src, long length, ChecksumCombineMode combineMode) throws IOException { checkOpen(); Preconditions.checkArgument(length >= 0); LocatedBlocks blockLocations = null; FileChecksumHelper.FileChecksumComputer maker = null; ErasureCodingPolicy ecPolicy = null; if (length > 0) { blockLocations = getBlockLocations(src, length); ecPolicy = blockLocations.getErasureCodingPolicy(); } maker = ecPolicy != null ? new FileChecksumHelper.StripedFileNonStripedChecksumComputer(src, length, blockLocations, namenode, this, ecPolicy, combineMode) : new FileChecksumHelper.ReplicatedFileChecksumComputer(src, length, blockLocations, namenode, this, combineMode); maker.compute(); return maker.getFileChecksum(); }
void writeFile(Path file, FSDataOutputStream stm, int size) throws IOException { long blocksBefore = stm.getPos() / BLOCK_SIZE; TestFileCreation.writeFile(stm, BLOCK_SIZE); // need to make sure the full block is completely flushed to the DataNodes // (see FSOutputSummer#flush) stm.flush(); int blocksAfter = 0; // wait until the block is allocated by DataStreamer BlockLocation[] locatedBlocks; while(blocksAfter <= blocksBefore) { locatedBlocks = DFSClientAdapter.getDFSClient(hdfs).getBlockLocations( file.toString(), 0L, BLOCK_SIZE*NUM_BLOCKS); blocksAfter = locatedBlocks == null ? 0 : locatedBlocks.length; } }
.getBlockLocations(file.toString(), 0, BLOCK_SIZE)[0]; LocatedBlock lastBlock = blockLocation.getLocatedBlock();