/** * This method gets the pipeline for the current WAL. */ @Override DatanodeInfo[] getPipeline() { if (this.hdfs_out != null) { if (this.hdfs_out.getWrappedStream() instanceof DFSOutputStream) { return ((DFSOutputStream) this.hdfs_out.getWrappedStream()).getPipeline(); } } return new DatanodeInfo[0]; } }
DatanodeInfo[] pipeline = out.getPipeline(); for (DatanodeInfo node : pipeline) { assertFalse("Write should be going on", failed.get()); out.write("testagain".getBytes()); assertTrue("There should be atleast 2 nodes in pipeline still", out .getPipeline().length >= 2); out.close(); } finally {
/** * This method gets the pipeline for the current walog. * * @return non-null array of DatanodeInfo */ DatanodeInfo[] getPipeLine() { if (logFile != null) { OutputStream os = logFile.getWrappedStream(); if (os instanceof DFSOutputStream) { return ((DFSOutputStream) os).getPipeline(); } } // Don't have a pipeline or can't figure it out. return EMPTY_PIPELINE; }
/** * This method gets the pipeline for the current walog. * * @return non-null array of DatanodeInfo */ DatanodeInfo[] getPipeLine() { if (null != logFile) { OutputStream os = logFile.getWrappedStream(); if (os instanceof DFSOutputStream) { return ((DFSOutputStream) os).getPipeline(); } } // Don't have a pipeline or can't figure it out. return EMPTY_PIPELINE; }
/** * This method gets the pipeline for the current WAL. */ @VisibleForTesting DatanodeInfo[] getPipeLine() { if (this.hdfs_out != null) { if (this.hdfs_out.getWrappedStream() instanceof DFSOutputStream) { return ((DFSOutputStream) this.hdfs_out.getWrappedStream()).getPipeline(); } } return new DatanodeInfo[0]; } }
stm.write(buffer, 0, mid); DatanodeInfo[] targets = dfstream.getPipeline(); int count = 5; while (count-- > 0 && targets == null) { } catch (InterruptedException e) { targets = dfstream.getPipeline();
stm.write(buffer, 0, mid); DatanodeInfo[] targets = dfstream.getPipeline(); int count = 5; while (count-- > 0 && targets == null) { } catch (InterruptedException e) { targets = dfstream.getPipeline();
DatanodeInfo[] orgNodes = dfsOut.getPipeline(); DatanodeInfo[] newNodes = dfsOut.getPipeline(); out.close();
DFSOutputStream dfstream = (DFSOutputStream) out.getWrappedStream(); DatanodeInfo[] targets = dfstream.getPipeline(); cluster.stopDataNode(targets[0].getXferAddr()); out.hflush(); assertFalse("The first datanode in the pipeline was not replaced.", Arrays.asList(dfstream.getPipeline()).contains(targets[0]));
int count = 0; while (pipeline == null && count < 5) { pipeline = dout.getPipeline(); System.out.println("Waiting for pipeline to be created."); Thread.sleep(1000);
for (DatanodeInfo dni : streams[i].getPipeline()) { DatanodeDescriptor dnd = dnm.getDatanode(dni); expectedTotalLoad -= 2;