void add(@Nullable String string) { this.uniqueStrings.add(string); }
/** * Creates a new hash set using elements provided by a type-specific iterator. * * @param i * a type-specific iterator whose elements will fill the set. * @param f * the load factor. */ public ObjectOpenHashSet(final Iterator<? extends K> i, final float f) { this(DEFAULT_INITIAL_SIZE, f); while (i.hasNext()) add(i.next()); } /**
/** * Creates a new hash set and fills it with the elements of a given array. * * @param a * an array whose elements will be used to fill the set. * @param offset * the first element to use. * @param length * the number of elements to use. * @param f * the load factor. */ public ObjectOpenHashSet(final K[] a, final int offset, final int length, final float f) { this(length < 0 ? 0 : length, f); ObjectArrays.ensureOffsetLength(a, offset, length); for (int i = 0; i < length; i++) add(a[offset + i]); } /**
public Set<String> visitPost( Remap node, Set<String> v ) throws QueryBuilderVisitorException { final ObjectOpenHashSet<String> result = new ObjectOpenHashSet<String>(); for( String index: v ) { final String remappedIndex = node.indexRemapping.get( index ); result.add( remappedIndex == null ? index : remappedIndex ); } return result; } public Set<String> visitPost( Weight node, Set<String> v ) throws QueryBuilderVisitorException { return v; }
public Set<String> visitPost( Remap node, Set<String> v ) throws QueryBuilderVisitorException { final ObjectOpenHashSet<String> result = new ObjectOpenHashSet<String>(); for( String index: v ) { final String remappedIndex = node.indexRemapping.get( index ); result.add( remappedIndex == null ? index : remappedIndex ); } return result; } public Set<String> visitPost( Weight node, Set<String> v ) throws QueryBuilderVisitorException { return v; }
public Set<String> visitPost( Remap node, Set<String> v ) throws QueryBuilderVisitorException { final ObjectOpenHashSet<String> result = new ObjectOpenHashSet<String>(); for( String index: v ) { final String remappedIndex = node.indexRemapping.get( index ); result.add( remappedIndex == null ? index : remappedIndex ); } return result; } public Set<String> visitPost( Weight node, Set<String> v ) throws QueryBuilderVisitorException { return v; }
public Set<String> visitPost( Remap node, Set<String> v ) throws QueryBuilderVisitorException { final ObjectOpenHashSet<String> result = new ObjectOpenHashSet<String>(); for( String index: v ) { final String remappedIndex = node.indexRemapping.get( index ); result.add( remappedIndex == null ? index : remappedIndex ); } return result; } public Set<String> visitPost( Weight node, Set<String> v ) throws QueryBuilderVisitorException { return v; }
/** Creates a new multi-term node. * * @param query a vector of nodes representing distinct terms; they must be either instances * of {@link Term}, or instances of {@link Weight} containing instances of {@link Term}. * @throws IllegalArgumentException if some term appears twice in <code>query</code>, or if * the specification is not followed. */ public MultiTerm( final Query... query ) { super( query ); final ObjectOpenHashSet<MutableString> s = new ObjectOpenHashSet<MutableString>( query.length ); for( Query q : query ) { if ( ! ( q instanceof Term ) && ! ( ( q instanceof Weight ) && ( ((Weight)q).query instanceof Term ) ) ) throw new IllegalArgumentException(); s.add( new MutableString( q instanceof Term ? ((Term)q).term : ((Term)((Weight)q).query ).term ) ); } if ( s.size() != query.length ) throw new IllegalArgumentException( "Multiterm nodes require distinct terms" ); }
/** Creates a new multi-term node. * * @param query a vector of nodes representing distinct terms; they must be either instances * of {@link Term}, or instances of {@link Weight} containing instances of {@link Term}. * @throws IllegalArgumentException if some term appears twice in <code>query</code>, or if * the specification is not followed. */ public MultiTerm( final Query... query ) { super( query ); final ObjectOpenHashSet<MutableString> s = new ObjectOpenHashSet<MutableString>( query.length ); for( Query q : query ) { if ( ! ( q instanceof Term ) && ! ( ( q instanceof Weight ) && ( ((Weight)q).query instanceof Term ) ) ) throw new IllegalArgumentException(); s.add( new MutableString( q instanceof Term ? ((Term)q).term : ((Term)((Weight)q).query ).term ) ); } if ( s.size() != query.length ) throw new IllegalArgumentException( "Multiterm nodes require distinct terms" ); }
@Override public Collection<Integer> getNeighbors(final Integer x) { final int v = x.intValue(); final int outdegree = graph.outdegree(v); final int indegree = transpose.outdegree(v); final LazyIntIterator succ = graph.successors(v); final LazyIntIterator pred = transpose.successors(v); final ObjectOpenHashSet<Integer> res = new ObjectOpenHashSet<>(outdegree + indegree); for(int s; (s = succ.nextInt()) != -1;) res.add(Integer.valueOf(s)); for(int p; (p = pred.nextInt()) != -1;) res.add(Integer.valueOf(p)); return res; }
/** Creates a new multi-term node. * * @param query a vector of nodes representing distinct terms; they must be either instances * of {@link Term}, or instances of {@link Weight} containing instances of {@link Term}. * @throws IllegalArgumentException if some term appears twice in <code>query</code>, or if * the specification is not followed. */ public MultiTerm( final Query... query ) { super( query ); final ObjectOpenHashSet<MutableString> s = new ObjectOpenHashSet<MutableString>( query.length ); for( Query q : query ) { if ( ! ( q instanceof Term ) && ! ( ( q instanceof Weight ) && ( ((Weight)q).query instanceof Term ) ) ) throw new IllegalArgumentException(); s.add( new MutableString( q instanceof Term ? ((Term)q).term : ((Term)((Weight)q).query ).term ) ); } if ( s.size() != query.length ) throw new IllegalArgumentException( "Multiterm nodes require distinct terms" ); }
/** Creates a new multi-term node. * * @param query a vector of nodes representing distinct terms; they must be either instances * of {@link Term}, or instances of {@link Weight} containing instances of {@link Term}. * @throws IllegalArgumentException if some term appears twice in <code>query</code>, or if * the specification is not followed. */ public MultiTerm( final Query... query ) { super( query ); final ObjectOpenHashSet<MutableString> s = new ObjectOpenHashSet<MutableString>( query.length ); for( Query q : query ) { if ( ! ( q instanceof Term ) && ! ( ( q instanceof Weight ) && ( ((Weight)q).query instanceof Term ) ) ) throw new IllegalArgumentException(); s.add( new MutableString( q instanceof Term ? ((Term)q).term : ((Term)((Weight)q).query ).term ) ); } if ( s.size() != query.length ) throw new IllegalArgumentException( "Multiterm nodes require distinct terms" ); }
@Override public Object invoke( final String actionName, final Object[] params, final String[] signature ) throws MBeanException, ReflectionException { TreeMap<String,Object> jobManager2Results = new TreeMap<String, Object>(); ObjectOpenHashSet<Object> results = new ObjectOpenHashSet<Object>(); // Invoke the action on all known and not suspected job managers. try { for ( JGroupsRemoteJobManager<? extends Job> remoteJobManager: remoteJobManagers() ) { if ( remoteJobManager.suspected() ) continue; final JMXConnector connector = JMXConnectorFactory.connect( remoteJobManager.jmxServiceURL ); final MBeanServerConnection mBeanServerConnection = connector.getMBeanServerConnection(); final Object result = mBeanServerConnection.invoke( remoteJobManager.objectName, actionName, params, signature ); jobManager2Results.put( remoteJobManager.identifier(), result ); results.add( result ); connector.close(); } } catch( Exception e ) { throw new MBeanException( e ); } // If all results are the same, we return them using a single null key. /* FIXME: Causes an exception if ( results.size() == 1 ) { jobManager2Results.clear(); jobManager2Results.put( this, results.iterator().next() ); }*/ return jobManager2Results; }
private Set deserializeSet(final DataInput is) throws IOException, ClassNotFoundException { Object h = deserialize(is); Class oCls = h.getClass(); if (oCls.equals(Integer.class)) { int size = (Integer) h; ObjectOpenHashSet set = new ObjectOpenHashSet(size); for (int i = 0; i < size; i++) { set.add(deserialize(is)); } return set; } else if (oCls.equals(int[].class)) { return new IntOpenHashSet((int[]) h); } else if (oCls.equals(float[].class)) { return new FloatOpenHashSet((float[]) h); } else if (oCls.equals(double[].class)) { return new DoubleOpenHashSet((double[]) h); } else if (oCls.equals(short[].class)) { return new ShortOpenHashSet((short[]) h); } else if (oCls.equals(byte[].class)) { return new ByteOpenHashSet((byte[]) h); } else if (oCls.equals(long[].class)) { return new LongOpenHashSet((long[]) h); } else if (oCls.equals(boolean[].class)) { return new BooleanOpenHashSet((boolean[]) h); } else if (oCls.equals(char[].class)) { return new CharOpenHashSet((char[]) h); } throw new EOFException(); }
private Set deserializeSet(final DataInput is) throws IOException, ClassNotFoundException { Object h = deserialize(is); Class oCls = h.getClass(); if (oCls.equals(Integer.class)) { int size = (Integer) h; ObjectOpenHashSet set = new ObjectOpenHashSet(size); for (int i = 0; i < size; i++) { set.add(deserialize(is)); } return set; } else if (oCls.equals(int[].class)) { return new IntOpenHashSet((int[]) h); } else if (oCls.equals(float[].class)) { return new FloatOpenHashSet((float[]) h); } else if (oCls.equals(double[].class)) { return new DoubleOpenHashSet((double[]) h); } else if (oCls.equals(short[].class)) { return new ShortOpenHashSet((short[]) h); } else if (oCls.equals(byte[].class)) { return new ByteOpenHashSet((byte[]) h); } else if (oCls.equals(long[].class)) { return new LongOpenHashSet((long[]) h); } else if (oCls.equals(boolean[].class)) { return new BooleanOpenHashSet((boolean[]) h); } else if (oCls.equals(char[].class)) { return new CharOpenHashSet((char[]) h); } throw new EOFException(); }
/** * Tracks the given evidence * @param evidence */ public KmerSupportNode track(KmerSupportNode support) { long kmer = support.lastKmer(); LinkedList<KmerSupportNode> list = lookup.get(kmer); if (list == null) { list = new LinkedList<KmerSupportNode>(); lookup.put(kmer, list); } list.add(support); if (id.add(support.evidence().evidence().getEvidenceID())) { evidenceTotal++; } return support; } /**
continue; titles.add(pageTitle); for (XMLEvent e: events) ew.add(e);
private int visit(final Node<T> n, final Node<T> parent, final long nameLength, final int depth, ObjectOpenHashSet<Node<T>> nodes, ObjectOpenHashSet<Leaf<T>> leaves, ObjectOpenHashSet<T> references) { if (n == null) return 0; if (DEBUG) { for(int i = depth; i-- != 0;) System.err.print('\t'); System.err.println("Node " + n + " (name length: " + nameLength + ")" + (n.isInternal() ? " Jump left: " + ((InternalNode<T>)n).jumpLeft + " Jump right: " + ((InternalNode<T>)n).jumpRight : "")); } assert parent == null || parent.extent(transform).equals(n.extent(transform).subVector(0, ((InternalNode<T>)parent).extentLength)); assert nameLength <= n.extentLength(transform); assert n.nameLength == nameLength : n.nameLength + " != " + nameLength + " " + n; if (n.isInternal()) { assert references.add(((InternalNode<T>)n).reference.key); assert nodes.remove(n) : n; assert handle2Node.keySet().contains(n.handle(transform)) : n; /* Check that jumps are correct. */ final long jumpLength = ((InternalNode<T>)n).jumpLength(); Node<T> jumpLeft = ((InternalNode<T>)n).left; while(jumpLeft.isInternal() && jumpLength > ((InternalNode<T>)jumpLeft).extentLength) jumpLeft = ((InternalNode<T>)jumpLeft).left; assert jumpLeft == ((InternalNode<T>)n).jumpLeft : jumpLeft + " != " + ((InternalNode<T>)n).jumpLeft + " (node: " + n + ")"; Node<T> jumpRight = ((InternalNode<T>)n).right; while(jumpRight.isInternal() && jumpLength > ((InternalNode<T>)jumpRight).extentLength) jumpRight = ((InternalNode<T>)jumpRight).right; assert jumpRight == ((InternalNode<T>)n).jumpRight : jumpRight + " != " + ((InternalNode<T>)n).jumpRight + " (node: " + n + ")"; return 1 + visit(((InternalNode<T>)n).left, n, ((InternalNode<T>)n).extentLength + 1, depth + 1, nodes, leaves, references) + visit(((InternalNode<T>)n).right, n, n.extentLength(transform) + 1, depth + 1, nodes, leaves, references); } else { assert leaves.add((Leaf<T>)n); assert n.extentLength(transform) == n.key(transform).length(); return 1; } }
if (root == null || handle2Node.get(root, true).handleLength() > vHandleLength) root = v; final InternalNode<T> node = handle2Node.get(v, true); nodes.add(node); assert node.reference.reference == node : node + " -> " + node.reference + " -> " + node.reference.reference;