public AbstractStreamOp(String stream, OpStatsLogger statsLogger, Long checksum, Feature checksumDisabledFeature) { this.stream = stream; this.opStatsLogger = statsLogger; // start here in case the operation is failed before executing. stopwatch.reset().start(); this.checksum = checksum; this.checksumDisabledFeature = checksumDisabledFeature; }
public void start() { m_timer.reset(); m_timer.start(); }
public synchronized void scheduleBackgroundRead() { // if the reader is already closed, we don't need to schedule background read again. if (null != closeFuture) { return; } long prevCount = scheduleCount.getAndIncrement(); if (0 == prevCount) { scheduleDelayStopwatch.reset().start(); executorService.submit(this); } }
private void createNewLogFileIfNeeded() throws IOException { if (LogCopier.this.destFs.exists(this.destLogFile) && (this.watch.elapsed(TimeUnit.MINUTES) > LogCopier.this.maxMinutesPerLogFile || LogCopier.this.destFs.getFileStatus(this.destLogFile).getLen() > LogCopier.this.maxBytesPerLogFile)) { HadoopUtils.renamePath(LogCopier.this.destFs, this.destLogFile, new Path(this.destLogFile.toString() + "." + System.currentTimeMillis())); this.watch.reset(); this.watch.start(); } }
@Override public void run() { Stopwatch sw = Stopwatch.createUnstarted(); Map<String, GarbageCollectorMXBean> gcBeanMapBeforeSleep = getGarbageCollectorMXBeans(); while (true) { sw.reset().start(); try { Thread.sleep(mGcSleepIntervalMs); } catch (InterruptedException ie) { LOG.warn(ie.getStackTrace()); return; } long extraTime = sw.elapsed(TimeUnit.MILLISECONDS) - mGcSleepIntervalMs; mTotalExtraTimeMs += extraTime; Map<String, GarbageCollectorMXBean> gcBeanMapAfterSleep = getGarbageCollectorMXBeans(); if (extraTime > mWarnThresholdMs) { mInfoTimeExceeded++; mWarnTimeExceeded++; LOG.warn(formatLogString(extraTime, gcBeanMapBeforeSleep, gcBeanMapAfterSleep)); } else if (extraTime > mInfoThresholdMs) { mInfoTimeExceeded++; LOG.info(formatLogString( extraTime, gcBeanMapBeforeSleep, gcBeanMapAfterSleep)); } gcBeanMapBeforeSleep = gcBeanMapAfterSleep; } } }
private void complete() { if (cacheFull) { LOG.trace("Cache for {} is full. Backoff reading until notified", fullyQualifiedName); readAheadCacheFullCounter.inc(); resumeStopWatch.reset().start(); stopPromise = null; readAheadCache.setReadAheadCallback(ReadAheadWorker.this); } else { run(); } }
@Override protected EventCompletionCallBack getCompletionCallBack() { CompletionCallBack callbackToReturn = m_lastCallback; if (m_stopwatch.elapsed(TimeUnit.SECONDS) > m_secondsTillCheckpoint) { //System.out.println("Checkpoint"); callbackToReturn.setFinalized(); m_lastCallback = new CompletionCallBack(); callbackToReturn.setChildCallBack(m_lastCallback); m_stopwatch.reset(); m_stopwatch.start(); } return callbackToReturn; }
/** * Set an ledger entry to readahead cache * * @param key * read position of the entry * @param entry * the ledger entry * @param reason * the reason to add the entry to readahead (for logging) * @param envelopeEntries * whether this entry an enveloped entries or not * @param startSequenceId * the start sequence id */ public void set(LedgerReadPosition key, LedgerEntry entry, String reason, boolean envelopeEntries, long startSequenceId) { processNewLedgerEntry(key, entry, reason, envelopeEntries, startSequenceId); lastEntryProcessTime.reset().start(); AsyncNotification n = notification; if (null != n) { n.notifyOnOperationComplete(); } }
@Override public Future<Void> execute(AsyncLogWriter writer, Sequencer sequencer, Object txnLock) { stopwatch.reset().start(); return executeOp(writer, sequencer, txnLock) .addEventListener(new FutureEventListener<Response>() { @Override public void onSuccess(Response response) { opStatsLogger.registerSuccessfulEvent(stopwatch.elapsed(TimeUnit.MICROSECONDS)); setResponse(response); } @Override public void onFailure(Throwable cause) { } }).voided(); }
private static void generateTestData(Table t, int numberOfRecordsInTable, Stopwatch stopwatch) throws IOException { stopwatch.reset().start(); out.println("Generating test data"); generateData(numberOfRecordsInTable, t); out.println("Time to generate " + numberOfRecordsInTable + " records: " + stopwatch.elapsed(TimeUnit.SECONDS) + " seconds"); }
private static void generateTestData(Table t, int numberOfRecordsInTable, Stopwatch stopwatch) throws IOException { stopwatch.reset().start(); out.println("Generating test data"); LocalDateTime startDateTime = LocalDateTime.of(2008, 1, 1, 0, 0, 0); generateData(numberOfRecordsInTable, startDateTime, t); out.println("Time to generate " + numberOfRecordsInTable + " records: " + stopwatch.elapsed(TimeUnit.SECONDS) + " seconds"); }
public void testReset_whileRunning() { ticker.advance(1); stopwatch.start(); assertEquals(0, stopwatch.elapsed(NANOSECONDS)); ticker.advance(2); assertEquals(2, stopwatch.elapsed(NANOSECONDS)); stopwatch.reset(); assertFalse(stopwatch.isRunning()); ticker.advance(3); assertEquals(0, stopwatch.elapsed(NANOSECONDS)); }
public void testReset_new() { ticker.advance(1); stopwatch.reset(); assertFalse(stopwatch.isRunning()); ticker.advance(2); assertEquals(0, stopwatch.elapsed(NANOSECONDS)); stopwatch.start(); ticker.advance(3); assertEquals(3, stopwatch.elapsed(NANOSECONDS)); }
public ChunkMesh generateMesh(ChunkView chunkView, int meshHeight, int verticalOffset) { PerformanceMonitor.startActivity("GenerateMesh"); ChunkMesh mesh = new ChunkMesh(bufferPool); final Stopwatch watch = Stopwatch.createStarted(); for (int x = 0; x < ChunkConstants.SIZE_X; x++) { for (int z = 0; z < ChunkConstants.SIZE_Z; z++) { for (int y = verticalOffset; y < verticalOffset + meshHeight; y++) { Block block = chunkView.getBlock(x, y, z); if (block != null && block.getMeshGenerator() != null) { block.getMeshGenerator().generateChunkMesh(chunkView, mesh, x, y, z); } } } } watch.stop(); mesh.setTimeToGenerateBlockVertices((int) watch.elapsed(TimeUnit.MILLISECONDS)); watch.reset().start(); generateOptimizedBuffers(chunkView, mesh); watch.stop(); mesh.setTimeToGenerateOptimizedBuffers((int) watch.elapsed(TimeUnit.MILLISECONDS)); statVertexArrayUpdateCount++; PerformanceMonitor.endActivity(); return mesh; }
System.out.printf(Locale.ENGLISH, "[%,d]: insert = %,d ms%n", numPoints, stop); stopwatch.reset().start(); ImmutableRTree searchTree = ImmutableRTree.newImmutableFromMutable(tree); stop = stopwatch.elapsed(TimeUnit.MILLISECONDS); System.out.printf(Locale.ENGLISH, "[%,d]: buildImmutable = %,d ms%n", numPoints, stop); stopwatch.reset().start(); stopwatch.reset().start();
System.out.printf(Locale.ENGLISH, "[%,d]: insert = %,d ms%n", numPoints, stop); stopwatch.reset().start(); ImmutableRTree searchTree = ImmutableRTree.newImmutableFromMutable(tree); stop = stopwatch.elapsed(TimeUnit.MILLISECONDS); System.out.printf(Locale.ENGLISH, "[%,d]: buildImmutable = %,d ms%n", numPoints, stop); stopwatch.reset().start(); stopwatch.reset().start();
@Test public void testGetLongestDepth() { Stopwatch sw = new Stopwatch(); Set<Long> cuboidSet1 = Sets.newHashSet(7L, 6L, 5L, 4L, 3L, 2L, 1L); sw.start(); assertEquals(2, CuboidUtil.getLongestDepth(cuboidSet1)); System.out.println("Time cost for GetLongestDepth: " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms"); Set<Long> cuboidSet2 = Sets.newHashSet(1024L, 1666L, 1667L, 1728L, 1730L, 1731L, 1760L, 1762L, 1763L, 1776L, 1778L, 1779L, 1784L, 1788L, 1790L, 1791L, 1920L, 1922L, 1923L, 1984L, 1986L, 1987L, 2016L, 2018L, 2019L, 2032L, 2034L, 2035L, 2040L, 2044L, 2046L, 2047L); sw.reset(); sw.start(); assertEquals(8, CuboidUtil.getLongestDepth(cuboidSet2)); System.out.println("Time cost for GetLongestDepth: " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms"); Set<Long> cuboidSet3 = Sets.newHashSet(31L, 11L, 5L, 3L, 1L); sw.reset(); sw.start(); assertEquals(3, CuboidUtil.getLongestDepth(cuboidSet3)); System.out.println("Time cost for GetLongestDepth: " + sw.elapsed(TimeUnit.MILLISECONDS) + "ms"); sw.stop(); } }
@Test public void createDirectChildrenCacheStressTest() { Stopwatch sw = new Stopwatch(); sw.start(); Set<Long> cuboidSet = generateMassCuboidSet(); System.out.println("Time elapsed for creating sorted cuboid list: " + sw.elapsedMillis()); sw.reset(); sw.start(); checkDirectChildrenCacheStressTest(CuboidStatsUtil.createDirectChildrenCache(cuboidSet)); System.out.println("Time elapsed for creating direct children cache: " + sw.elapsedMillis()); sw.stop(); }
private void testEnumeratorValues(String file) throws Exception { InputStream is = new FileInputStream(file); ArrayList<String> str = loadStrings(is); TrieDictionaryBuilder<String> b = newDictBuilder(str); TrieDictionary<String> dict = b.build(0); System.out.println("Dictionary size for file " + file + " is " + dict.getSize()); Stopwatch sw = new Stopwatch(); sw.start(); List<String> values1 = dict.enumeratorValuesByParent(); System.out.println("By iterating id visit the time cost " + sw.elapsed(TimeUnit.MILLISECONDS) + " ms"); sw.reset(); sw.start(); List<String> values2 = dict.enumeratorValues(); System.out.println("By pre-order visit the time cost " + sw.elapsed(TimeUnit.MILLISECONDS) + " ms"); sw.stop(); assertEquals(Sets.newHashSet(values1), Sets.newHashSet(values2)); }
ticker.advance(8998); assertEquals("9.999 \u03bcs", stopwatch.toString()); stopwatch.reset(); stopwatch.start(); ticker.advance(1234567); assertEquals("1.235 ms", stopwatch.toString()); stopwatch.reset(); stopwatch.start(); ticker.advance(5000000000L); assertEquals("5.000 s", stopwatch.toString()); stopwatch.reset(); stopwatch.start(); ticker.advance((long) (1.5 * 60 * 1000000000L)); assertEquals("1.500 min", stopwatch.toString()); stopwatch.reset(); stopwatch.start(); ticker.advance((long) (2.5 * 60 * 60 * 1000000000L)); assertEquals("2.500 h", stopwatch.toString()); stopwatch.reset(); stopwatch.start(); ticker.advance((long) (7.25 * 24 * 60 * 60 * 1000000000L));