@Override public RateBasedLimiter call() throws Exception { return new RateBasedLimiter(limitPerSecond); } };
++r; try { LIMITER.acquirePermits(1); } catch (InterruptedException e) { log.error("RateBasedLimiter: " + interruptedMsg, e);
public RateBasedLimiter(double rateLimit, TimeUnit timeUnit) { this.rateLimitPerSecond = convertRate(rateLimit, timeUnit, TimeUnit.SECONDS); this.rateLimiter = RateLimiter.create(this.rateLimitPerSecond); }
protected Limiter getRateLimiter() { try { String key = getUri().toString(); RateBasedLimiter limiter = FS_URI_TO_RATE_LIMITER_CACHE.get(key, this.callableLimiter); if (limiter.getRateLimitPerSecond() < this.limitPerSecond) { try { limiter = this.callableLimiter.call(); FS_URI_TO_RATE_LIMITER_CACHE.put(key, limiter); } catch (Exception exc) { throw new ExecutionException(exc); } } return limiter; } catch (ExecutionException ee) { throw new RuntimeException(ee); } } }
protected Limiter getRateLimiter() { try { String key = getUri().toString(); RateBasedLimiter limiter = FS_URI_TO_RATE_LIMITER_CACHE.get(key, this.callableLimiter); if (limiter.getRateLimitPerSecond() < this.limitPerSecond) { try { limiter = this.callableLimiter.call(); FS_URI_TO_RATE_LIMITER_CACHE.put(key, limiter); } catch (Exception exc) { throw new ExecutionException(exc); } } return limiter; } catch (ExecutionException ee) { throw new RuntimeException(ee); } } }
@Override public Limiter buildLimiter(Config config) { if (!config.hasPath(QPS_KEY)) { throw new RuntimeException("Missing key " + QPS_KEY); } return new RateBasedLimiter(config.getLong(QPS_KEY)); } }
@Override public void run() { try { LIMITER.acquirePermits(1); } catch (InterruptedException e) { throw new RuntimeException("RateBasedLimiter got interrupted.", e);
public RateBasedLimiter(double rateLimit, TimeUnit timeUnit) { this.rateLimitPerSecond = convertRate(rateLimit, timeUnit, TimeUnit.SECONDS); this.rateLimiter = RateLimiter.create(this.rateLimitPerSecond); }
static Limiter createLimiter(Configuration configuration, SharedResourcesBroker<SimpleScopeType> broker) { try { Limiter limiter = new NoopLimiter(); long localQps = configuration.getLong(LOCALLY_ENFORCED_QPS, 0); if (localQps > 0) { log.info("Setting up local qps " + localQps); limiter = new MultiLimiter(limiter, new RateBasedLimiter(localQps)); } if (configuration.getBoolean(USE_THROTTLING_SERVER, false)) { log.info("Setting up remote throttling."); String resourceId = configuration.get(RESOURCE_ID); Limiter globalLimiter = broker.getSharedResource(new RestliLimiterFactory<SimpleScopeType>(), new SharedLimiterKey(resourceId)); limiter = new MultiLimiter(limiter, globalLimiter); } return limiter; } catch (NotConfiguredException nce) { throw new RuntimeException(nce); } }
@Override public void run() { try { final ArrayList<ApiDimensionFilter> filters = new ArrayList<>(); filters.addAll(_filterMap.values()); filters.add(GoogleWebmasterFilter.pageFilter(job.getOperator(), job.getPage())); LIMITER.acquirePermits(1); List<String[]> results = _webmaster .performSearchAnalyticsQuery(job.getStartDate(), job.getEndDate(), QUERY_LIMIT, _requestedDimensions, _requestedMetrics, filters); onSuccess(job, results, responseQueue, retries); reporter.report(job.getPagesSize(), _country); } catch (IOException e) { onFailure(e.getMessage(), job, retries); } catch (InterruptedException e) { log.error(String .format("Interrupted while trying to get queries for job %s. Current retry size is %d.", job, retries.size())); } } };
@BeforeClass public void setUp() { this.limiter = new RateBasedLimiter(20, TimeUnit.SECONDS); this.limiter.start(); }
LIMITER.acquirePermits(1); _webmaster .performSearchAnalyticsQueryInBatch(jobs, filterList, callbackList, _requestedDimensions, QUERY_LIMIT);
public ThrottleWriter(DataWriter<D> writer, State state) { Preconditions.checkNotNull(writer, "DataWriter is required."); Preconditions.checkNotNull(state, "State is required."); this.state = state; this.writer = writer; this.type = ThrottleType.valueOf(state.getProp(WRITER_THROTTLE_TYPE_KEY)); int rateLimit = computeRateLimit(state); LOG.info("Rate limit for each writer: " + rateLimit + " " + type); this.limiter = new RateBasedLimiter(computeRateLimit(state)); if (GobblinMetrics.isEnabled(state)) { throttledTimer = Optional.<Timer>of(Instrumented.getMetricContext(state, getClass()).timer(WRITES_THROTTLED_TIMER)); } else { throttledTimer = Optional.absent(); } }
++r; try { LIMITER.acquirePermits(1); } catch (InterruptedException e) { log.error("RateBasedLimiter: " + interruptedMsg, e);
@BeforeClass public void setUp() throws IOException, ExecutionException { Limiter limiter = new RateBasedLimiter(20); this.rateControlledFs = new TestRateControlledFileSystem(FileSystem.getLocal(new Configuration()), 20, limiter); this.rateControlledFs.startRateControl(); }
@Override public void run() { try { LIMITER.acquirePermits(1); } catch (InterruptedException e) { throw new RuntimeException("RateBasedLimiter got interrupted.", e);
GoogleWebmasterDataFetcherImpl(String siteProperty, GoogleWebmasterClient client, State wuState) throws IOException { _siteProperty = siteProperty; Preconditions.checkArgument(_siteProperty.endsWith("/"), "The site property must end in \"/\""); _client = client; _jobs = getHotStartJobs(wuState); API_REQUESTS_PER_SECOND = wuState.getPropAsDouble(GoogleWebMasterSource.KEY_PAGES_TUNING_REQUESTS_PER_SECOND, 4.5); PAGES_COUNT_COOLDOWN_TIME = wuState.getPropAsInt(GoogleWebMasterSource.KEY_PAGES_COUNT_TUNING_COOLDOWN_TIME, 30); PAGES_GET_COOLDOWN_TIME = wuState.getPropAsInt(GoogleWebMasterSource.KEY_PAGES_GET_TUNING_COOLDOWN_TIME, 5); LIMITER = new RateBasedLimiter(API_REQUESTS_PER_SECOND, TimeUnit.SECONDS); GET_PAGES_RETRIES = wuState.getPropAsInt(GoogleWebMasterSource.KEY_PAGES_TUNING_MAX_RETRIES, 120); }
@Override public void run() { try { final ArrayList<ApiDimensionFilter> filters = new ArrayList<>(); filters.addAll(_filterMap.values()); filters.add(GoogleWebmasterFilter.pageFilter(job.getOperator(), job.getPage())); LIMITER.acquirePermits(1); List<String[]> results = _webmaster .performSearchAnalyticsQuery(job.getStartDate(), job.getEndDate(), QUERY_LIMIT, _requestedDimensions, _requestedMetrics, filters); onSuccess(job, results, responseQueue, retries); reporter.report(job.getPagesSize(), _country); } catch (IOException e) { onFailure(e.getMessage(), job, retries); } catch (InterruptedException e) { log.error(String .format("Interrupted while trying to get queries for job %s. Current retry size is %d.", job, retries.size())); } } };
if (state.contains(EXTRACT_LIMIT_RATE_LIMIT_TIMEUNIT_KEY)) { TimeUnit rateTimeUnit = TimeUnit.valueOf(state.getProp(EXTRACT_LIMIT_RATE_LIMIT_TIMEUNIT_KEY).toUpperCase()); return new RateBasedLimiter(rateLimit, rateTimeUnit); return new RateBasedLimiter(rateLimit); case TIME_BASED: Preconditions.checkArgument(state.contains(EXTRACT_LIMIT_TIME_LIMIT_KEY));
LIMITER.acquirePermits(1); _webmaster .performSearchAnalyticsQueryInBatch(jobs, filterList, callbackList, _requestedDimensions, QUERY_LIMIT);