private static void checkRowLimit(int rowLimit) { Preconditions.checkArgument(rowLimit > 0 && rowLimit <= API_ROW_LIMIT, "Row limit for Google Search Console API must be within range (0, 5000]"); } }
public UrlTriePostOrderIterator(UrlTrie trie, int stoppingSize) { Preconditions.checkArgument(stoppingSize > 0); _currentNode = trie.getRoot(); String prefix = trie.getPrefix(); _currentPrefixSb = new StringBuilder(); if (prefix != null) { _currentPrefixSb.append(prefix); } _groupSize = stoppingSize; }
wuState.getPropAsInt(GoggleIngestionConfigurationKeys.SOURCE_ASYNC_ITERATOR_POLL_BLOCKING_TIME, 1)); _wuState = wuState; Preconditions.checkArgument(!filterMap.containsKey(GoogleWebmasterFilter.Dimension.PAGE), "Doesn't support filters for page for the time being. Will implement support later. If page filter is provided, the code won't take the responsibility of get all pages, so it will just return all queries for that page."); Preconditions.checkArgument(PAGE_LIMIT >= 1, "Page limit must be at least 1."); Preconditions.checkArgument(QUERY_LIMIT >= 1, "Query limit must be at least 1."); Preconditions.checkArgument(ROUND_TIME_OUT > 0, "Time out must be positive."); Preconditions.checkArgument(MAX_RETRY_ROUNDS >= 0, "Retry rounds cannot be negative."); Preconditions.checkArgument(ROUND_COOL_DOWN >= 0, "Initial cool down time cannot be negative."); Preconditions.checkArgument(batchesPerSecond > 0, "Requests per second must be positive."); Preconditions.checkArgument(BATCH_SIZE >= 1, "Batch size must be at least 1."); Preconditions.checkArgument(TRIE_GROUP_SIZE >= 1, "Group size must be at least 1."); Preconditions.checkArgument(PAGE_LIMIT == GoogleWebmasterClient.API_ROW_LIMIT, "Page limit must be set at 5000 if you want to use the advanced algorithm. This indicates that you understand what you are doing.");
@Override public List<String> getPages(String siteProperty, String startDate, String endDate, String country, int rowLimit, List<GoogleWebmasterFilter.Dimension> requestedDimensions, List<ApiDimensionFilter> filters, int startRow) throws IOException { checkRowLimit(rowLimit); Preconditions.checkArgument(requestedDimensions.contains(GoogleWebmasterFilter.Dimension.PAGE)); SearchAnalyticsQueryResponse rspByCountry = createSearchAnalyticsQuery(siteProperty, startDate, endDate, requestedDimensions, GoogleWebmasterFilter.andGroupFilters(filters), rowLimit, startRow).execute(); List<ApiDataRow> pageRows = rspByCountry.getRows(); List<String> pages = new ArrayList<>(rowLimit); if (pageRows != null) { int pageIndex = requestedDimensions.indexOf(GoogleWebmasterFilter.Dimension.PAGE); for (ApiDataRow row : pageRows) { pages.add(row.getKeys().get(pageIndex)); } } return pages; }
GoogleWebmasterDataFetcherImpl(String siteProperty, GoogleWebmasterClient client, State wuState) throws IOException { _siteProperty = siteProperty; Preconditions.checkArgument(_siteProperty.endsWith("/"), "The site property must end in \"/\""); _client = client; _jobs = getHotStartJobs(wuState); API_REQUESTS_PER_SECOND = wuState.getPropAsDouble(GoogleWebMasterSource.KEY_PAGES_TUNING_REQUESTS_PER_SECOND, 4.5); PAGES_COUNT_COOLDOWN_TIME = wuState.getPropAsInt(GoogleWebMasterSource.KEY_PAGES_COUNT_TUNING_COOLDOWN_TIME, 30); PAGES_GET_COOLDOWN_TIME = wuState.getPropAsInt(GoogleWebMasterSource.KEY_PAGES_GET_TUNING_COOLDOWN_TIME, 5); LIMITER = new RateBasedLimiter(API_REQUESTS_PER_SECOND, TimeUnit.SECONDS); GET_PAGES_RETRIES = wuState.getPropAsInt(GoogleWebMasterSource.KEY_PAGES_TUNING_MAX_RETRIES, 120); }
/** * Performs all necessary setup steps for running requests against the API. * * @param applicationName the name of the application: com.example.app * @param serviceAccountEmail the Service Account Email (empty if using * installed application) * @return the {@Link AndroidPublisher} service * @throws GeneralSecurityException * @throws IOException */ public static AndroidPublisher init( String applicationName, @Nullable String serviceAccountEmail ) throws IOException, GeneralSecurityException { Preconditions.checkArgument( !Strings.isNullOrEmpty( applicationName ), "applicationName cannot be null or empty!" ); // Authorization. newTrustedTransport(); Credential credential; credential = authorizeWithServiceAccount( serviceAccountEmail ); // Set up and return API client. return new AndroidPublisher.Builder( httpTransport, JSON_FACTORY, credential ).setApplicationName( applicationName ) .build(); }
private static void checkRowLimit(int rowLimit) { Preconditions.checkArgument(rowLimit > 0 && rowLimit <= API_ROW_LIMIT, "Row limit for Google Search Console API must be within range (0, 5000]"); } }
private static void checkFileSize(long size) { Preconditions.checkArgument( size >= 0 && size <= Integer.MAX_VALUE, "file too big (" + size + " B): " + "tarTo should have thrown an IOException" ); }
private File initRootGitDirectory(String rootGitDirectoryPath) { File rootGitDirectory = new File(rootGitDirectoryPath); rootGitDirectory.mkdirs(); Preconditions.checkArgument( rootGitDirectory.isDirectory(), "given root git directory " + "is not a directory: %s", rootGitDirectory.getAbsolutePath() ); return rootGitDirectory; }
public UrlTriePostOrderIterator(UrlTrie trie, int stoppingSize) { Preconditions.checkArgument(stoppingSize > 0); _currentNode = trie.getRoot(); String prefix = trie.getPrefix(); _currentPrefixSb = new StringBuilder(); if (prefix != null) { _currentPrefixSb.append(prefix); } _groupSize = stoppingSize; }
private static void addTarDir( TarArchiveOutputStream tout, Path base, File dir ) throws IOException { Preconditions.checkArgument(dir.isDirectory()); String name = base.relativize( Paths.get(dir.getAbsolutePath()) ).toString(); ArchiveEntry entry = tout.createArchiveEntry(dir, name); tout.putArchiveEntry(entry); tout.closeArchiveEntry(); for (File f : dir.listFiles()) { addTarEntry(tout, base, f); } }
public static AndroidPublisher init(Context context, String fileName) throws IOException { Preconditions.checkArgument(!Strings.isNullOrEmpty(context.getPackageName()), "applicationId cannot be null or empty!"); newTrustedTransport(); GoogleCredential credential = GoogleCredential.fromStream(context.getAssets().open(fileName), HTTP_TRANSPORT, JSON_FACTORY); credential = credential.createScoped(Collections.singleton(AndroidPublisherScopes.ANDROIDPUBLISHER)); return new AndroidPublisher.Builder(HTTP_TRANSPORT, JSON_FACTORY, credential) .setApplicationName(context.getPackageName()) .build(); }
wuState.getPropAsInt(GoggleIngestionConfigurationKeys.SOURCE_ASYNC_ITERATOR_POLL_BLOCKING_TIME, 1)); _wuState = wuState; Preconditions.checkArgument(!filterMap.containsKey(GoogleWebmasterFilter.Dimension.PAGE), "Doesn't support filters for page for the time being. Will implement support later. If page filter is provided, the code won't take the responsibility of get all pages, so it will just return all queries for that page."); Preconditions.checkArgument(PAGE_LIMIT >= 1, "Page limit must be at least 1."); Preconditions.checkArgument(QUERY_LIMIT >= 1, "Query limit must be at least 1."); Preconditions.checkArgument(ROUND_TIME_OUT > 0, "Time out must be positive."); Preconditions.checkArgument(MAX_RETRY_ROUNDS >= 0, "Retry rounds cannot be negative."); Preconditions.checkArgument(ROUND_COOL_DOWN >= 0, "Initial cool down time cannot be negative."); Preconditions.checkArgument(batchesPerSecond > 0, "Requests per second must be positive."); Preconditions.checkArgument(BATCH_SIZE >= 1, "Batch size must be at least 1."); Preconditions.checkArgument(TRIE_GROUP_SIZE >= 1, "Group size must be at least 1."); Preconditions.checkArgument(PAGE_LIMIT == GoogleWebmasterClient.API_ROW_LIMIT, "Page limit must be set at 5000 if you want to use the advanced algorithm. This indicates that you understand what you are doing.");
private static void addTarFile( TarArchiveOutputStream tout, Path base, File file ) throws IOException { Preconditions.checkArgument( file.isFile(), "given file" + " is not file: %s", file); checkFileSize(file.length()); String name = base.relativize( Paths.get(file.getAbsolutePath()) ).toString(); ArchiveEntry entry = tout.createArchiveEntry(file, name); tout.putArchiveEntry(entry); try (InputStream in = new FileInputStream(file)) { IOUtils.copy(in, tout); } tout.closeArchiveEntry(); }
@Override public List<String> getPages(String siteProperty, String startDate, String endDate, String country, int rowLimit, List<GoogleWebmasterFilter.Dimension> requestedDimensions, List<ApiDimensionFilter> filters, int startRow) throws IOException { checkRowLimit(rowLimit); Preconditions.checkArgument(requestedDimensions.contains(GoogleWebmasterFilter.Dimension.PAGE)); SearchAnalyticsQueryResponse rspByCountry = createSearchAnalyticsQuery(siteProperty, startDate, endDate, requestedDimensions, GoogleWebmasterFilter.andGroupFilters(filters), rowLimit, startRow).execute(); List<ApiDataRow> pageRows = rspByCountry.getRows(); List<String> pages = new ArrayList<>(rowLimit); if (pageRows != null) { int pageIndex = requestedDimensions.indexOf(GoogleWebmasterFilter.Dimension.PAGE); for (ApiDataRow row : pageRows) { pages.add(row.getKeys().get(pageIndex)); } } return pages; }
public boolean segmentsAreFromSamePartitionSet( final Set<DataSegment> segments ) { // Verify that these segments are all in the same partition set Preconditions.checkArgument(!segments.isEmpty(), "segments nonempty"); final DataSegment firstSegment = segments.iterator().next(); for (final DataSegment segment : segments) { if (!segment.getDataSource().equals(firstSegment.getDataSource()) || !segment.getInterval().equals(firstSegment.getInterval()) || !segment.getVersion().equals(firstSegment.getVersion())) { return false; } } return true; }
GoogleWebmasterDataFetcherImpl(String siteProperty, GoogleWebmasterClient client, State wuState) throws IOException { _siteProperty = siteProperty; Preconditions.checkArgument(_siteProperty.endsWith("/"), "The site property must end in \"/\""); _client = client; _jobs = getHotStartJobs(wuState); API_REQUESTS_PER_SECOND = wuState.getPropAsDouble(GoogleWebMasterSource.KEY_PAGES_TUNING_REQUESTS_PER_SECOND, 4.5); PAGES_COUNT_COOLDOWN_TIME = wuState.getPropAsInt(GoogleWebMasterSource.KEY_PAGES_COUNT_TUNING_COOLDOWN_TIME, 30); PAGES_GET_COOLDOWN_TIME = wuState.getPropAsInt(GoogleWebMasterSource.KEY_PAGES_GET_TUNING_COOLDOWN_TIME, 5); LIMITER = new RateBasedLimiter(API_REQUESTS_PER_SECOND, TimeUnit.SECONDS); GET_PAGES_RETRIES = wuState.getPropAsInt(GoogleWebMasterSource.KEY_PAGES_TUNING_MAX_RETRIES, 120); }
@Override public void unbzip2Project( String projectName, InputStream dataStream ) throws IOException { Preconditions.checkArgument( Project.isValidProjectName(projectName), "[%s] invalid project name: ", projectName ); Preconditions.checkState( getDirForProject(projectName).mkdirs(), "[%s] directories for " + "evicted project already exist", projectName ); Tar.bz2.unzip(dataStream, getDirForProject(projectName)); }
Preconditions.checkArgument(interval.toDurationMillis() > 0, "interval empty"); final String dataSource = task.getDataSource(); final List<TaskLockPosse> foundPosses = findLockPossesForInterval(dataSource, interval);