Refine search
LongRawValueBasedNotInPredicateEvaluator(NotInPredicate notInPredicate) { String[] values = notInPredicate.getValues(); _nonMatchingValues = new LongOpenHashSet(values.length); for (String value : values) { _nonMatchingValues.add(Long.parseLong(value)); } }
public void createOplogs(boolean needsOplogs, Map<File, DirectoryHolder> backupFiles) { LongOpenHashSet foundCrfs = new LongOpenHashSet(); LongOpenHashSet foundDrfs = new LongOpenHashSet(); foundCrfs.add(oplogId); } else { foundDrfs.add(oplogId);
public int size() { return this.ints.size() + this.longs.size(); } }
private LongOpenHashSet calcMissing(LongOpenHashSet found, LongOpenHashSet expected) { LongOpenHashSet missing = new LongOpenHashSet(expected); missing.removeAll(found); return missing; }
private void initDoubleValues() { if (predicate != null) { return; } synchronized (initLock) { if (predicate != null) { return; } LongArrayList doubleBits = new LongArrayList(values.size()); for (String value : values) { Double doubleValue = Doubles.tryParse(value); if (doubleValue != null) { doubleBits.add(Double.doubleToLongBits((doubleValue))); } } if (doubleBits.size() > NUMERIC_HASHING_THRESHOLD) { final LongOpenHashSet doubleBitsHashSet = new LongOpenHashSet(doubleBits); predicate = input -> doubleBitsHashSet.contains(Double.doubleToLongBits(input)); } else { final long[] doubleBitsArray = doubleBits.toLongArray(); Arrays.sort(doubleBitsArray); predicate = input -> Arrays.binarySearch(doubleBitsArray, Double.doubleToLongBits(input)) >= 0; } } } @Override
String collectionPath = cmdline.getOptionValue(COLLECTION_OPTION); LongOpenHashSet tweetids = new LongOpenHashSet(); File tweetidsFile = new File(cmdline.getOptionValue(ID_OPTION)); if (!tweetidsFile.exists()) { tweetids.add(Long.parseLong(s)); LOG.info("Read " + tweetids.size() + " tweetids."); LongOpenHashSet seen = new LongOpenHashSet(); Status status; while ((status = stream.next()) != null) { if (tweetids.contains(status.getId()) && !seen.contains(status.getId())) { out.write(status.getJsonObject().toString() + "\n"); seen.add(status.getId()); out.close(); LOG.info("Extracted " + seen.size() + " tweetids."); LOG.info("Storing missing tweetids..."); LongIterator iter = tweetids.iterator(); while (iter.hasNext()) { long t = iter.nextLong(); if (!seen.contains(t)) { out.write(t + "\n");
LongOpenHashSet candidates = new LongOpenHashSet(); candidates.add(6); candidates.add(7); candidates.add(8); candidates.add(9); recs = recommender.recommend(1, -1, candidates, null); assertThat(recs, hasSize(0)); candidates.clear(); candidates.add(7); candidates.add(8); candidates.add(9); recs = recommender.recommend(2, -1, candidates, null); assertThat(recs, contains(9L)); candidates.add(6); candidates.remove(9); recs = recommender.recommend(2, -1, candidates, null); assertThat(recs, hasSize(0)); candidates.clear(); candidates.add(6); candidates.add(7); recs = recommender.recommend(5, -1, candidates, null); assertThat(recs, containsInAnyOrder(6L, 7L)); candidates.clear();
@Test public void testLongPredicateEvaluators() { List<String> stringValues = new ArrayList<>(NUM_PREDICATE_VALUES); LongSet valueSet = new LongOpenHashSet(); valueSet.add(value); Assert.assertEquals(inPredicateEvaluator.applySV(value), valueSet.contains(value)); Assert.assertEquals(notInPredicateEvaluator.applySV(value), !valueSet.contains(value));
@Override public int countUnique() { LongSet ints = new LongOpenHashSet(data.size()); for (long i : data) { ints.add(i); } return ints.size(); }
@Override public DateTimeColumn unique() { LongSet ints = new LongOpenHashSet(data.size()); for (long i : data) { ints.add(i); } DateTimeColumn column = emptyCopy(ints.size()); column.setName(name() + " Unique values"); column.data = LongArrayList.wrap(ints.toLongArray()); return column; }
@Override public LongArrayList recommendInternal(ClickData clickData) { LongOpenHashSet itemIDsToCompare = new LongOpenHashSet(); if(wholeSession){ itemIDsToCompare.add(sessionTransaction.item.id); itemIDsToCompare.add(clickData.click.item.id); LongOpenHashSet items = new LongOpenHashSet(); if(currentItemClicks!=null){ for (Integer user : currentItemClicks) { items.addAll(userItemMap.get(user)); items.remove(clickData.click.item.id); for (LongIterator iterator = items.iterator(); iterator.hasNext();) { long item = iterator.nextLong(); EWAHCompressedBitmap entry = itemClickMap.get(item);
/** * Get the IDs of the candidate neighbors for a user. * @param user The user. * @param userItems The user's rated items. * @param targetItems The set of target items. * @return The set of IDs of candidate neighbors. */ private LongSet findCandidateNeighbors(long user, LongSet userItems, LongCollection targetItems) { LongSet users = new LongOpenHashSet(100); LongIterator items; if (userItems.size() < targetItems.size()) { items = userItems.iterator(); } else { items = targetItems.iterator(); } while (items.hasNext()) { LongSet iusers = dao.query(CommonTypes.RATING) .withAttribute(CommonAttributes.ITEM_ID, items.nextLong()) .valueSet(CommonAttributes.USER_ID); if (iusers != null) { users.addAll(iusers); } } users.remove(user); return users; }
public LongColumnPreIndexStatsCollector(String column, StatsCollectorConfig statsCollectorConfig) { super(column, statsCollectorConfig); rawLongSet = new LongOpenHashSet(INITIAL_HASH_SET_SIZE); aggregatedLongSet = new LongOpenHashSet(INITIAL_HASH_SET_SIZE); }
/** * The set of items this user has *seen* in either training or test. * @return The set of all seen items (training and test). */ public LongSet getSeenItems() { if (seenItems == null) { LongSet items = new LongOpenHashSet(getTrainItems()); items.addAll(getTestItems()); seenItems = items; } return seenItems; }
public void evaluate(Transaction transaction, LongArrayList recommendations, LongOpenHashSet userTransactions) { if (userTransactions == null || userTransactions.isEmpty()) { return; LongOpenHashSet uniqueRecs = new LongOpenHashSet(); for (int i = 0; i < realK; i++) { if (!uniqueRecs.add(recommendations.getLong(i))) { throw new RuntimeException("Duplicate recommendation."); for (LongIterator iterator = userTransactions.iterator(); iterator.hasNext();) { long itemID = iterator.nextLong(); for (int i = 0; i < realK; i++) { divider = realK; }else if(type == Type.Recall){ divider = userTransactions.size(); }else{ throw new RuntimeException("Neither precision nor recall defined.");
Arrays.sort(_intValues); LongOpenHashSet longSet = new LongOpenHashSet(); while (longSet.size() < NUM_VALUES) { longSet.add(RANDOM.nextLong()); _longValues = longSet.toLongArray(); Arrays.sort(_longValues);
static ResultList merge(int n, ResultList left, ResultList right, double weight) { Long2IntMap leftRanks = LongUtils.itemRanks(LongUtils.asLongList(left.idList())); Long2IntMap rightRanks = LongUtils.itemRanks(LongUtils.asLongList(right.idList())); int nl = left.size(); int nr = right.size(); LongSet allItems = new LongOpenHashSet(); allItems.addAll(leftRanks.keySet()); allItems.addAll(rightRanks.keySet()); ResultAccumulator accum = ResultAccumulator.create(n); for (LongIterator iter = allItems.iterator(); iter.hasNext();) { long item = iter.nextLong(); int rl = leftRanks.get(item); int rr = rightRanks.get(item); double s1 = rankToScore(rl, nl); double s2 = rankToScore(rr, nr); double score = weight * s1 + (1.0-weight) * s2; accum.add(new RankBlendResult(item, score, rl >= 0 ? left.get(rl) : null, rl, rr >= 0 ? right.get(rr) : null, rl)); } return accum.finish(); }
public TweetGenerator(IndexCollection.Args args, IndexCollection.Counters counters) throws IOException{ super(args, counters); if (!args.tweetDeletedIdsFile.isEmpty()) { deletes = new LongOpenHashSet(); File deletesFile = new File(args.tweetDeletedIdsFile); if (!deletesFile.exists()) { System.err.println("Error: " + deletesFile + " does not exist!"); System.exit(-1); } LOG.info("Reading deletes from " + deletesFile); FileInputStream fin = new FileInputStream(deletesFile); byte[] ignoreBytes = new byte[2]; fin.read(ignoreBytes); // "B", "Z" bytes from commandline tools BufferedReader br = new BufferedReader(new InputStreamReader(new CBZip2InputStream(fin))); String s; while ((s = br.readLine()) != null) { if (s.contains("\t")) { deletes.add(Long.parseLong(s.split("\t")[0])); } else { deletes.add(Long.parseLong(s)); } } br.close(); fin.close(); LOG.info("Read " + deletes.size() + " tweetids from deletes file."); } }
LongOpenHashSet nodes = new LongOpenHashSet(); long maxNodeId = 0; while (iter.hasNext()) { Node n = iter.next(); nodes.add(n.id()); if (n.id() > maxNodeId) { maxNodeId = n.id(); long startTime = System.currentTimeMillis(); AtomicLong graphEdgeCounter = new AtomicLong(); nodes.forEach(v -> graphEdgeCounter.addAndGet(graph.getOutDegree(v))); System.out.println(graphEdgeCounter.get() + " edges traversed in " + (System.currentTimeMillis() - startTime) + "ms"); LongIterator nodeIter = nodes.iterator(); while (nodeIter.hasNext()) { long nodeId = nodeIter.nextLong();
if (!tweets.contains(tweetId)) { tweets.add(tweetId); if (!tweets.contains(resolvedTweetId)) { tweets.add(resolvedTweetId); new Date(), statusCnt, tweets.size(), hashtags.size(), (float) statusCnt / duration, Runtime.getRuntime().totalMemory(), Runtime.getRuntime().freeMemory())); LongIterator leftIter = tweets.iterator(); while (leftIter.hasNext()) { if (userTweetBigraph.getLeftNodeDegree(leftIter.nextLong()) != 0) LongIterator rightIter = hashtags.keySet().iterator(); while (rightIter.hasNext()) { if (userTweetBigraph.getRightNodeDegree(rightIter.nextLong()) != 0)