/** * Efficiently resolves/adds term identifiers for the buffered * {@link BigdataValue}s. * <p> * If {@link #readOnly}), then the term identifier for unknown values * will remain {@link IRawTripleStore#NULL}. */ protected void processBufferedValues() { if (INFO) log.info("nvalues=" + nvalues); db.getLexiconRelation().addTerms(valueBuffer, nvalues, readOnly); }
/** * Efficiently resolves/adds term identifiers for the buffered * {@link BigdataValue}s. * <p> * If {@link #readOnly}), then the term identifier for unknown values * will remain {@link IRawTripleStore#NULL}. */ protected void processBufferedValues() { if (INFO) log.info("nvalues=" + nvalues); db.getLexiconRelation().addTerms(valueBuffer, nvalues, readOnly); }
@Override public void addTerms(final BigdataValue[] terms) { getLexiconRelation().addTerms(terms, terms.length, false/*readOnly*/); }
@Override public void addTerms(final BigdataValue[] terms) { getLexiconRelation().addTerms(terms, terms.length, false/*readOnly*/); }
static private void addTerms( final AbstractTripleStore database, // final BigdataValue[] terms, // final int numTerms,// final boolean readOnly// ) { if(log.isInfoEnabled()) log.info("writing " + numTerms); if (DEBUG) { for (int i = 0; i < numTerms; i++) { log.debug("term: " + terms[i] + ", iv: " + terms[i].getIV()); } } final long l = database.getLexiconRelation().addTerms(terms, numTerms, readOnly); if (log.isInfoEnabled()) { log.info("# reported from addTerms: " + l); } }
static private void addTerms( final AbstractTripleStore database, // final BigdataValue[] terms, // final int numTerms,// final boolean readOnly// ) { if(log.isInfoEnabled()) log.info("writing " + numTerms); if (DEBUG) { for (int i = 0; i < numTerms; i++) { log.debug("term: " + terms[i] + ", iv: " + terms[i].getIV()); } } final long l = database.getLexiconRelation().addTerms(terms, numTerms, readOnly); if (log.isInfoEnabled()) { log.info("# reported from addTerms: " + l); } }
@SuppressWarnings("unchecked") private void addResolveIVs(final BigdataValue... values) { tripleStore.getLexiconRelation() .addTerms(values, values.length, false/* readOnly */); /* * Cache value on IVs to align with behavior of the SPARQL parser. * * Note: BatchRDFValueResolver does this, so we have to do it to in * order to have an exact structural match when we parse the generated * SPARQL query and then verify the AST model. */ for (BigdataValue v : values) { v.getIV().setValue(v); } }
@SuppressWarnings("unchecked") private void addResolveIVs(final BigdataValue... values) { tripleStore.getLexiconRelation() .addTerms(values, values.length, false/* readOnly */); /* * Cache value on IVs to align with behavior of the SPARQL parser. * * Note: BatchRDFValueResolver does this, so we have to do it to in * order to have an exact structural match when we parse the generated * SPARQL query and then verify the AST model. */ for (BigdataValue v : values) { v.getIV().setValue(v); } }
@SuppressWarnings("unchecked") private void addResolveIVs(final BigdataValue... values) { tripleStore.getLexiconRelation() .addTerms(values, values.length, false/* readOnly */); /* * Cache value on IVs to align with behavior of the SPARQL parser. * * Note: BatchRDFValueResolver does this, so we have to do it to in * order to have an exact structural match when we parse the generated * SPARQL query and then verify the AST model. */ for (BigdataValue v : values) { v.getIV().setValue(v); } }
@SuppressWarnings("unchecked") private void addResolveIVs(final BigdataValue... values) { tripleStore.getLexiconRelation() .addTerms(values, values.length, false/* readOnly */); /* * Cache value on IVs to align with behavior of the SPARQL parser. * * Note: BatchRDFValueResolver does this, so we have to do it to in * order to have an exact structural match when we parse the generated * SPARQL query and then verify the AST model. */ for (BigdataValue v : values) { v.getIV().setValue(v); } }
@SuppressWarnings("unchecked") private void addResolveIVs(final BigdataValue... values) { tripleStore.getLexiconRelation() .addTerms(values, values.length, false/* readOnly */); /* * Cache value on IVs to align with behavior of the SPARQL parser. * * Note: BatchRDFValueResolver does this, so we have to do it to in * order to have an exact structural match when we parse the generated * SPARQL query and then verify the AST model. */ for (BigdataValue v : values) { v.getIV().setValue(v); } }
@SuppressWarnings("unchecked") private void addResolveIVs(final BigdataValue... values) { tripleStore.getLexiconRelation() .addTerms(values, values.length, false/* readOnly */); /* * Cache value on IVs to align with behavior of the SPARQL parser. * * Note: BatchRDFValueResolver does this, so we have to do it to in * order to have an exact structural match when we parse the generated * SPARQL query and then verify the AST model. */ for (BigdataValue v : values) { v.getIV().setValue(v); } }
public IV addTerm(final Value value) { final BigdataValue[] terms = new BigdataValue[] {// getValueFactory().asValue(value) // }; getLexiconRelation().addTerms(terms, 1, false/* readOnly */); return terms[0].getIV(); }
public IV addTerm(final Value value) { final BigdataValue[] terms = new BigdataValue[] {// getValueFactory().asValue(value) // }; getLexiconRelation().addTerms(terms, 1, false/* readOnly */); return terms[0].getIV(); }
@SuppressWarnings("unchecked") private void addResolveIVs(final BigdataValue... values) { tripleStore.getLexiconRelation() .addTerms(values, values.length, false/* readOnly */); /* * Cache value on IVs to align with behavior of the SPARQL parser. * * Note: BatchRDFValueResolver does this, so we have to do it to in * order to have an exact structural match when we parse the generated * SPARQL query and then verify the AST model. */ for (BigdataValue v : values) { v.getIV().setValue(v); } }
@SuppressWarnings("unchecked") private void addResolveIVs(final BigdataValue... values) { tripleStore.getLexiconRelation() .addTerms(values, values.length, false/* readOnly */); /* * Cache value on IVs to align with behavior of the SPARQL parser. * * Note: BatchRDFValueResolver does this, so we have to do it to in * order to have an exact structural match when we parse the generated * SPARQL query and then verify the AST model. */ for (BigdataValue v : values) { v.getIV().setValue(v); } }
@SuppressWarnings("unchecked") private void addResolveIVs(final BigdataValue... values) { tripleStore.getLexiconRelation() .addTerms(values, values.length, false/* readOnly */); /* * Cache value on IVs to align with behavior of the SPARQL parser. * * Note: BatchRDFValueResolver does this, so we have to do it to in * order to have an exact structural match when we parse the generated * SPARQL query and then verify the AST model. */ for (BigdataValue v : values) { v.getIV().setValue(v); } }
/** * Returns a SPARQL 1.1 {@link ServiceNode} using the specified vars in its * body, with a constant endpoint. */ ServiceNode serviceSparql11WithConstant(final String... varNames) { final JoinGroupNode jgn = joinGroupWithVars(varNames); final BigdataValueFactory f = store.getValueFactory(); final BigdataURI serviceEndpoint = f.createURI("http://custom.endpoint"); final IV serviceEndpointIV = makeIV(serviceEndpoint); final BigdataValue[] values = new BigdataValue[] { serviceEndpoint }; store.getLexiconRelation().addTerms( values, values.length, false/* readOnly */); final ServiceNode serviceNode = (ServiceNode) new Helper(){{ tmp = service(constantNode(serviceEndpointIV),jgn); }}.getTmp(); return serviceNode; }
/** * Returns a SPARQL 1.1 {@link ServiceNode} using the specified vars in its * body, with a constant endpoint. */ ServiceNode serviceSparql11WithConstant(final String... varNames) { final JoinGroupNode jgn = joinGroupWithVars(varNames); final BigdataValueFactory f = store.getValueFactory(); final BigdataURI serviceEndpoint = f.createURI("http://custom.endpoint"); final IV serviceEndpointIV = makeIV(serviceEndpoint); final BigdataValue[] values = new BigdataValue[] { serviceEndpoint }; store.getLexiconRelation().addTerms( values, values.length, false/* readOnly */); final ServiceNode serviceNode = (ServiceNode) new Helper(){{ tmp = service(constantNode(serviceEndpointIV),jgn); }}.getTmp(); return serviceNode; }
/** * Returns a SPARQL 1.1 {@link ServiceNode} using the specified vars in its * body, with a constant endpoint. */ ServiceNode serviceSparql11WithConstant(final String... varNames) { final JoinGroupNode jgn = joinGroupWithVars(varNames); final BigdataValueFactory f = store.getValueFactory(); final BigdataURI serviceEndpoint = f.createURI("http://custom.endpoint"); final IV serviceEndpointIV = makeIV(serviceEndpoint); final BigdataValue[] values = new BigdataValue[] { serviceEndpoint }; store.getLexiconRelation().addTerms( values, values.length, false/* readOnly */); final ServiceNode serviceNode = (ServiceNode) new Helper(){{ tmp = service(constantNode(serviceEndpointIV),jgn); }}.getTmp(); return serviceNode; }