/** * Creates a hash map from two index-aligned arrays of key-value pairs. Default load factor is used. */ public static <KType> ObjectIntHashMap<KType> from(final KType[] keys, final int[] values) { if (keys.length != values.length) { throw new IllegalArgumentException("Arrays of keys and values must have an identical length."); } final ObjectIntHashMap<KType> map = new ObjectIntHashMap<KType>(keys.length); for (int i = 0; i < keys.length; i++) { map.put(keys[i], values[i]); } return map; }
static ObjectIntHashMap<IRI> initMap() { ObjectIntHashMap<IRI> predicates = new ObjectIntHashMap<>(); AtomicInteger nextId = new AtomicInteger(1); List<OWLRDFVocabulary> ORDERED_URIS = Arrays.asList(RDF_TYPE, RDFS_LABEL, OWL_DEPRECATED, RDFS_COMMENT, RDFS_IS_DEFINED_BY, RDF_FIRST, RDF_REST, OWL_EQUIVALENT_CLASS, OWL_EQUIVALENT_PROPERTY, RDFS_SUBCLASS_OF, RDFS_SUB_PROPERTY_OF, RDFS_DOMAIN, RDFS_RANGE, OWL_DISJOINT_WITH, OWL_ON_PROPERTY, OWL_DATA_RANGE, OWL_ON_CLASS, OWL_ANNOTATED_SOURCE, OWL_ANNOTATED_PROPERTY, OWL_ANNOTATED_TARGET); ORDERED_URIS.forEach(iri -> predicates.put(iri.getIRI(), nextId.getAndIncrement())); Stream.of(OWLRDFVocabulary.values()) .forEach(iri -> predicates.putIfAbsent(iri.getIRI(), nextId.getAndIncrement())); return predicates; }
/** * {@inheritDoc} */ @Override public boolean putIfAbsent(final KType key, final int value) { if (!containsKey(key)) { put(key, value); return true; } return false; }
static ObjectIntHashMap<IRI> initMap() { ObjectIntHashMap<IRI> predicates = new ObjectIntHashMap<>(); AtomicInteger nextId = new AtomicInteger(1); List<OWLRDFVocabulary> ORDERED_URIS = Arrays.asList(RDF_TYPE, RDFS_LABEL, OWL_DEPRECATED, RDFS_COMMENT, RDFS_IS_DEFINED_BY, RDF_FIRST, RDF_REST, OWL_EQUIVALENT_CLASS, OWL_EQUIVALENT_PROPERTY, RDFS_SUBCLASS_OF, RDFS_SUB_PROPERTY_OF, RDFS_DOMAIN, RDFS_RANGE, OWL_DISJOINT_WITH, OWL_ON_PROPERTY, OWL_DATA_RANGE, OWL_ON_CLASS, OWL_ANNOTATED_SOURCE, OWL_ANNOTATED_PROPERTY, OWL_ANNOTATED_TARGET); ORDERED_URIS.forEach(iri -> predicates.put(iri.getIRI(), nextId.getAndIncrement())); Stream.of(OWLRDFVocabulary.values()) .forEach(iri -> predicates.putIfAbsent(iri.getIRI(), nextId.getAndIncrement())); return predicates; }
/** * {@inheritDoc} */ @Override public int putAll(final Iterable<? extends ObjectIntCursor<? extends KType>> iterable) { final int count = this.size(); for (final ObjectIntCursor<? extends KType> c : iterable) { put(c.key, c.value); } return this.size() - count; }
static ObjectIntHashMap<IRI> initMap() { ObjectIntHashMap<IRI> predicates = new ObjectIntHashMap<>(); AtomicInteger nextId = new AtomicInteger(1); List<OWLRDFVocabulary> ORDERED_URIS = Arrays.asList(RDF_TYPE, RDFS_LABEL, OWL_DEPRECATED, RDFS_COMMENT, RDFS_IS_DEFINED_BY, RDF_FIRST, RDF_REST, OWL_EQUIVALENT_CLASS, OWL_EQUIVALENT_PROPERTY, RDFS_SUBCLASS_OF, RDFS_SUB_PROPERTY_OF, RDFS_DOMAIN, RDFS_RANGE, OWL_DISJOINT_WITH, OWL_ON_PROPERTY, OWL_DATA_RANGE, OWL_ON_CLASS, OWL_ANNOTATED_SOURCE, OWL_ANNOTATED_PROPERTY, OWL_ANNOTATED_TARGET); ORDERED_URIS.forEach(iri -> predicates.put(iri.getIRI(), nextId.getAndIncrement())); Stream.of(OWLRDFVocabulary.values()) .forEach(iri -> predicates.putIfAbsent(iri.getIRI(), nextId.getAndIncrement())); return predicates; }
/** * If <code>key</code> exists, <code>putValue</code> is inserted into the map, * otherwise any existing value is incremented by <code>additionValue</code>. * * @param key * The key of the value to adjust. * @param putValue * The value to put if <code>key</code> does not exist. * @param incrementValue * The value to add to the existing value if <code>key</code> exists. * @return Returns the current value associated with <code>key</code> (after * changes). */ @SuppressWarnings("cast") @Override public int putOrAdd(final KType key, int putValue, final int incrementValue) { if (containsKey(key)) { putValue = get(key); putValue = (int) (((putValue) + (incrementValue))); } put(key, putValue); return putValue; }
/** * @param axioms axioms * @param type type */ public AtomicDecompositionImpl(List<OWLAxiom> axioms, ModuleType type) { this.type = type; decomposer = new Decomposer(AxiomSelector.wrap(axioms), new SyntacticLocalityChecker()); int size = decomposer.getAOS(this.type).size(); atoms = new ArrayList<>(); for (int i = 0; i < size; i++) { final Atom atom = new Atom(asSet(decomposer.getAOS().get(i).getAtomAxioms())); atoms.add(atom); atomIndex.put(atom, i); for (OWLEntity e : atom.getSignature()) { termBasedIndex.put(e, atom); } } for (int i = 0; i < size; i++) { Set<OntologyAtom> dependentIndexes = decomposer.getAOS().get(i).getDependencies(); for (OntologyAtom j : dependentIndexes) { dependencies.put(atoms.get(i), atoms.get(j.getId())); dependents.put(atoms.get(j.getId()), atoms.get(i)); } } }
/** * @param axioms axioms * @param type type */ public AtomicDecompositionImpl(List<OWLAxiom> axioms, ModuleType type) { this.type = type; decomposer = new Decomposer(AxiomSelector.wrap(axioms), new SyntacticLocalityChecker()); int size = decomposer.getAOS(this.type).size(); atoms = new ArrayList<>(); for (int i = 0; i < size; i++) { final Atom atom = new Atom(asSet(decomposer.getAOS().get(i).getAtomAxioms())); atoms.add(atom); atomIndex.put(atom, i); for (OWLEntity e : atom.getSignature()) { termBasedIndex.put(e, atom); } } for (int i = 0; i < size; i++) { Set<OntologyAtom> dependentIndexes = decomposer.getAOS().get(i).getDependencies(); for (OntologyAtom j : dependentIndexes) { dependencies.put(atoms.get(i), atoms.get(j.getId())); dependents.put(atoms.get(j.getId()), atoms.get(i)); } } }
/** * @param axioms axioms * @param type type */ public AtomicDecompositionImpl(List<OWLAxiom> axioms, ModuleType type) { this.type = type; decomposer = new Decomposer(AxiomSelector.wrap(axioms), new SyntacticLocalityChecker()); int size = decomposer.getAOS(this.type).size(); atoms = new ArrayList<>(); for (int i = 0; i < size; i++) { final Atom atom = new Atom(asSet(decomposer.getAOS().get(i).getAtomAxioms())); atoms.add(atom); atomIndex.put(atom, i); for (OWLEntity e : atom.getSignature()) { termBasedIndex.put(e, atom); } } for (int i = 0; i < size; i++) { Set<OntologyAtom> dependentIndexes = decomposer.getAOS().get(i).getDependencies(); for (OntologyAtom j : dependentIndexes) { dependencies.put(atoms.get(i), atoms.get(j.getId())); dependents.put(atoms.get(j.getId()), atoms.get(i)); } } }
/** * @param axioms axioms * @param type type */ public AtomicDecompositionImpl(List<OWLAxiom> axioms, ModuleType type) { this.type = type; decomposer = new Decomposer(AxiomSelector.wrap(axioms), new SyntacticLocalityChecker()); int size = decomposer.getAOS(this.type).size(); atoms = new ArrayList<>(); for (int i = 0; i < size; i++) { final Atom atom = new Atom(asSet(decomposer.getAOS().get(i).getAtomAxioms())); atoms.add(atom); atomIndex.put(atom, i); for (OWLEntity e : atom.getSignature()) { termBasedIndex.put(e, atom); } } for (int i = 0; i < size; i++) { Set<OntologyAtom> dependentIndexes = decomposer.getAOS().get(i).getDependencies(); for (OntologyAtom j : dependentIndexes) { dependencies.put(atoms.get(i), atoms.get(j.getId())); dependents.put(atoms.get(j.getId()), atoms.get(i)); } } }