private FileLock(FileSystem fs, Path lockFile, FSDataOutputStream lockFileStream, String spoutId) throws IOException { this.fs = fs; this.lockFile = lockFile; this.lockFileStream = lockFileStream; this.componentID = spoutId; logProgress("0", false); }
@Override public void validateField(String name, Object o) { HdfsSpout.checkValidReader((String) o); } }
@Override public Offset clone() { return new Offset(charOffset, lineNumber); } } //class Offset
private static void releaseLockAndLog(FileLock fLock, String spoutId) { try { if (fLock != null) { fLock.release(); LOG.debug("Spout {} released FileLock. SpoutId = {}", fLock.getLockFile(), spoutId); } } catch (IOException e) { LOG.error("Unable to delete lock file : " + fLock.getLockFile() + " SpoutId =" + spoutId, e); } }
@Override public List<Object> next() throws IOException, ParseException { readAttempts++; if (readAttempts == 3 || readAttempts == 4) { throw new IOException("mock test exception"); } else if (readAttempts > 5) { throw new ParseException("mock test exception", null); } return super.next(); } }
private void markFileAsDone(Path filePath) { try { Path newFile = renameCompletedFile(reader.getFilePath()); LOG.info("Completed processing {}. Spout Id = {}", newFile, spoutId); } catch (IOException e) { LOG.error("Unable to archive completed file" + filePath + " Spout ID " + spoutId, e); } closeReaderAndResetTrackers(); }
private void openSpout(HdfsSpout spout, int spoutId, Map<String, Object> topoConf) { MockCollector collector = new MockCollector(); spout.open(topoConf, new MockTopologyContext(spoutId, topoConf), collector); }
public synchronized void recordAckedOffset(FileOffset newOffset) { if (newOffset == null) { return; } offsets.add(newOffset); FileOffset currHead = offsets.first(); if (currHead.isNextOffset(newOffset)) { // check is a minor optimization trimHead(); } }
private static String getFileProgress(FileReader reader) { return reader.getFilePath() + " " + reader.getFileOffset(); }
private DirLockingThread[] startThreads(int thdCount, Path dir) throws IOException { DirLockingThread[] result = new DirLockingThread[thdCount]; for (int i = 0; i < thdCount; i++) { result[i] = new DirLockingThread(i, fs, dir); } for (DirLockingThread thd : result) { thd.start(); } return result; }
private FileDeletionThread[] startThreads(int thdCount, Path file) throws IOException { FileDeletionThread[] result = new FileDeletionThread[thdCount]; for (int i = 0; i < thdCount; i++) { result[i] = new FileDeletionThread(i, fs, file); } for (FileDeletionThread thd : result) { thd.start(); } return result; }
@Override public Offset clone() { return new Offset(lastSyncPoint, recordsSinceLastSync, currentRecord, currRecordEndOffset, prevRecordEndOffset); }
public Offset getFileOffset() { return offset.clone(); }
@Override public void close() throws Exception { spout.close(); } }
@Override public List<Integer> emit(String streamId, List<Object> tuple, Object messageId) { return emit(tuple, messageId); }
private synchronized void trimHead() { if (offsets.size() <= 1) { return; } FileOffset head = offsets.first(); FileOffset head2 = offsets.higher(head); if (head.isNextOffset(head2)) { offsets.pollFirst(); trimHead(); } return; }
public TextFileReader(FileSystem fs, Path file, Map<String, Object> conf) throws IOException { this(fs, file, conf, new TextFileReader.Offset(0, 0)); }
public void heartbeat(String fileOffset) throws IOException { logProgress(fileOffset, true); }
public TextFileReader(FileSystem fs, Path file, Map<String, Object> conf, String startOffset) throws IOException { this(fs, file, conf, new TextFileReader.Offset(startOffset)); }